Bug Summary

File:jdk/src/hotspot/share/runtime/sharedRuntime.cpp
Warning:line 1025, column 11
Value stored to 'kname' during its initialization is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name sharedRuntime.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 2 -mthread-model posix -fno-delete-null-pointer-checks -mframe-pointer=all -relaxed-aliasing -fmath-errno -fno-rounding-math -masm-verbose -mconstructor-aliases -munwind-tables -target-cpu x86-64 -dwarf-column-info -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib/llvm-10/lib/clang/10.0.0 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/libjvm/objs/precompiled -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -D __STDC_CONSTANT_MACROS -D _GNU_SOURCE -D _REENTRANT -D LIBC=gnu -D LINUX -D VM_LITTLE_ENDIAN -D _LP64=1 -D ASSERT -D CHECK_UNHANDLED_OOPS -D TARGET_ARCH_x86 -D INCLUDE_SUFFIX_OS=_linux -D INCLUDE_SUFFIX_CPU=_x86 -D INCLUDE_SUFFIX_COMPILER=_gcc -D TARGET_COMPILER_gcc -D AMD64 -D HOTSPOT_LIB_ARCH="amd64" -D COMPILER1 -D COMPILER2 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -I /home/daniel/Projects/java/jdk/src/hotspot/share/precompiled -I /home/daniel/Projects/java/jdk/src/hotspot/share/include -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix/include -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base/linux -I /home/daniel/Projects/java/jdk/src/java.base/share/native/libjimage -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -D _FORTIFY_SOURCE=2 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-10/lib/clang/10.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -Wno-format-zero-length -Wno-unused-parameter -Wno-unused -Wno-parentheses -Wno-comment -Wno-unknown-pragmas -Wno-address -Wno-delete-non-virtual-dtor -Wno-char-subscripts -Wno-array-bounds -Wno-int-in-bool-context -Wno-ignored-qualifiers -Wno-missing-field-initializers -Wno-implicit-fallthrough -Wno-empty-body -Wno-strict-overflow -Wno-sequence-point -Wno-maybe-uninitialized -Wno-misleading-indentation -Wno-cast-function-type -Wno-shift-negative-value -std=c++14 -fdeprecated-macro -fdebug-compilation-dir /home/daniel/Projects/java/jdk/make/hotspot -ferror-limit 19 -fmessage-length 0 -fvisibility hidden -stack-protector 1 -fno-rtti -fgnuc-version=4.2.1 -fobjc-runtime=gcc -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -o /home/daniel/Projects/java/scan/2021-12-21-193737-8510-1 -x c++ /home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp
1/*
2 * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "classfile/javaClasses.hpp"
27#include "jvm.h"
28#include "classfile/stringTable.hpp"
29#include "classfile/vmClasses.hpp"
30#include "classfile/vmSymbols.hpp"
31#include "code/codeCache.hpp"
32#include "code/compiledIC.hpp"
33#include "code/icBuffer.hpp"
34#include "code/compiledMethod.inline.hpp"
35#include "code/scopeDesc.hpp"
36#include "code/vtableStubs.hpp"
37#include "compiler/abstractCompiler.hpp"
38#include "compiler/compileBroker.hpp"
39#include "compiler/disassembler.hpp"
40#include "gc/shared/barrierSet.hpp"
41#include "gc/shared/collectedHeap.hpp"
42#include "gc/shared/gcLocker.inline.hpp"
43#include "interpreter/interpreter.hpp"
44#include "interpreter/interpreterRuntime.hpp"
45#include "jfr/jfrEvents.hpp"
46#include "logging/log.hpp"
47#include "memory/resourceArea.hpp"
48#include "memory/universe.hpp"
49#include "oops/compiledICHolder.inline.hpp"
50#include "oops/klass.hpp"
51#include "oops/method.inline.hpp"
52#include "oops/objArrayKlass.hpp"
53#include "oops/oop.inline.hpp"
54#include "prims/forte.hpp"
55#include "prims/jvmtiExport.hpp"
56#include "prims/methodHandles.hpp"
57#include "prims/nativeLookup.hpp"
58#include "runtime/atomic.hpp"
59#include "runtime/frame.inline.hpp"
60#include "runtime/handles.inline.hpp"
61#include "runtime/init.hpp"
62#include "runtime/interfaceSupport.inline.hpp"
63#include "runtime/java.hpp"
64#include "runtime/javaCalls.hpp"
65#include "runtime/sharedRuntime.hpp"
66#include "runtime/stackWatermarkSet.hpp"
67#include "runtime/stubRoutines.hpp"
68#include "runtime/synchronizer.hpp"
69#include "runtime/vframe.inline.hpp"
70#include "runtime/vframeArray.hpp"
71#include "runtime/vm_version.hpp"
72#include "utilities/copy.hpp"
73#include "utilities/dtrace.hpp"
74#include "utilities/events.hpp"
75#include "utilities/hashtable.inline.hpp"
76#include "utilities/macros.hpp"
77#include "utilities/xmlstream.hpp"
78#ifdef COMPILER11
79#include "c1/c1_Runtime1.hpp"
80#endif
81
82// Shared stub locations
83RuntimeStub* SharedRuntime::_wrong_method_blob;
84RuntimeStub* SharedRuntime::_wrong_method_abstract_blob;
85RuntimeStub* SharedRuntime::_ic_miss_blob;
86RuntimeStub* SharedRuntime::_resolve_opt_virtual_call_blob;
87RuntimeStub* SharedRuntime::_resolve_virtual_call_blob;
88RuntimeStub* SharedRuntime::_resolve_static_call_blob;
89address SharedRuntime::_resolve_static_call_entry;
90
91DeoptimizationBlob* SharedRuntime::_deopt_blob;
92SafepointBlob* SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
93SafepointBlob* SharedRuntime::_polling_page_safepoint_handler_blob;
94SafepointBlob* SharedRuntime::_polling_page_return_handler_blob;
95
96#ifdef COMPILER21
97UncommonTrapBlob* SharedRuntime::_uncommon_trap_blob;
98#endif // COMPILER2
99
100
101//----------------------------generate_stubs-----------------------------------
102void SharedRuntime::generate_stubs() {
103 _wrong_method_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method)((address)((address_word)(SharedRuntime::handle_wrong_method)
))
, "wrong_method_stub");
104 _wrong_method_abstract_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract)((address)((address_word)(SharedRuntime::handle_wrong_method_abstract
)))
, "wrong_method_abstract_stub");
105 _ic_miss_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss)((address)((address_word)(SharedRuntime::handle_wrong_method_ic_miss
)))
, "ic_miss_stub");
106 _resolve_opt_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C)((address)((address_word)(SharedRuntime::resolve_opt_virtual_call_C
)))
, "resolve_opt_virtual_call");
107 _resolve_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C)((address)((address_word)(SharedRuntime::resolve_virtual_call_C
)))
, "resolve_virtual_call");
108 _resolve_static_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C)((address)((address_word)(SharedRuntime::resolve_static_call_C
)))
, "resolve_static_call");
109 _resolve_static_call_entry = _resolve_static_call_blob->entry_point();
110
111 AdapterHandlerLibrary::initialize();
112
113#if COMPILER2_OR_JVMCI1
114 // Vectors are generated only by C2 and JVMCI.
115 bool support_wide = is_wide_vector(MaxVectorSize);
116 if (support_wide) {
117 _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception)((address)((address_word)(SafepointSynchronize::handle_polling_page_exception
)))
, POLL_AT_VECTOR_LOOP);
118 }
119#endif // COMPILER2_OR_JVMCI
120 _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception)((address)((address_word)(SafepointSynchronize::handle_polling_page_exception
)))
, POLL_AT_LOOP);
121 _polling_page_return_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception)((address)((address_word)(SafepointSynchronize::handle_polling_page_exception
)))
, POLL_AT_RETURN);
122
123 generate_deopt_blob();
124
125#ifdef COMPILER21
126 generate_uncommon_trap_blob();
127#endif // COMPILER2
128}
129
130#include <math.h>
131
132// Implementation of SharedRuntime
133
134#ifndef PRODUCT
135// For statistics
136int SharedRuntime::_ic_miss_ctr = 0;
137int SharedRuntime::_wrong_method_ctr = 0;
138int SharedRuntime::_resolve_static_ctr = 0;
139int SharedRuntime::_resolve_virtual_ctr = 0;
140int SharedRuntime::_resolve_opt_virtual_ctr = 0;
141int SharedRuntime::_implicit_null_throws = 0;
142int SharedRuntime::_implicit_div0_throws = 0;
143
144int64_t SharedRuntime::_nof_normal_calls = 0;
145int64_t SharedRuntime::_nof_optimized_calls = 0;
146int64_t SharedRuntime::_nof_inlined_calls = 0;
147int64_t SharedRuntime::_nof_megamorphic_calls = 0;
148int64_t SharedRuntime::_nof_static_calls = 0;
149int64_t SharedRuntime::_nof_inlined_static_calls = 0;
150int64_t SharedRuntime::_nof_interface_calls = 0;
151int64_t SharedRuntime::_nof_optimized_interface_calls = 0;
152int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
153int64_t SharedRuntime::_nof_megamorphic_interface_calls = 0;
154
155int SharedRuntime::_new_instance_ctr=0;
156int SharedRuntime::_new_array_ctr=0;
157int SharedRuntime::_multi2_ctr=0;
158int SharedRuntime::_multi3_ctr=0;
159int SharedRuntime::_multi4_ctr=0;
160int SharedRuntime::_multi5_ctr=0;
161int SharedRuntime::_mon_enter_stub_ctr=0;
162int SharedRuntime::_mon_exit_stub_ctr=0;
163int SharedRuntime::_mon_enter_ctr=0;
164int SharedRuntime::_mon_exit_ctr=0;
165int SharedRuntime::_partial_subtype_ctr=0;
166int SharedRuntime::_jbyte_array_copy_ctr=0;
167int SharedRuntime::_jshort_array_copy_ctr=0;
168int SharedRuntime::_jint_array_copy_ctr=0;
169int SharedRuntime::_jlong_array_copy_ctr=0;
170int SharedRuntime::_oop_array_copy_ctr=0;
171int SharedRuntime::_checkcast_array_copy_ctr=0;
172int SharedRuntime::_unsafe_array_copy_ctr=0;
173int SharedRuntime::_generic_array_copy_ctr=0;
174int SharedRuntime::_slow_array_copy_ctr=0;
175int SharedRuntime::_find_handler_ctr=0;
176int SharedRuntime::_rethrow_ctr=0;
177
178int SharedRuntime::_ICmiss_index = 0;
179int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
180address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
181
182
183void SharedRuntime::trace_ic_miss(address at) {
184 for (int i = 0; i < _ICmiss_index; i++) {
185 if (_ICmiss_at[i] == at) {
186 _ICmiss_count[i]++;
187 return;
188 }
189 }
190 int index = _ICmiss_index++;
191 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
192 _ICmiss_at[index] = at;
193 _ICmiss_count[index] = 1;
194}
195
196void SharedRuntime::print_ic_miss_histogram() {
197 if (ICMissHistogram) {
198 tty->print_cr("IC Miss Histogram:");
199 int tot_misses = 0;
200 for (int i = 0; i < _ICmiss_index; i++) {
201 tty->print_cr(" at: " INTPTR_FORMAT"0x%016" "l" "x" " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
202 tot_misses += _ICmiss_count[i];
203 }
204 tty->print_cr("Total IC misses: %7d", tot_misses);
205 }
206}
207#endif // PRODUCT
208
209
210JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))jlong SharedRuntime::lmul(jlong y, jlong x) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
211 return x * y;
212JRT_END}
213
214
215JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))jlong SharedRuntime::ldiv(jlong y, jlong x) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
216 if (x == min_jlong && y == CONST64(-1)(-1LL)) {
217 return x;
218 } else {
219 return x / y;
220 }
221JRT_END}
222
223
224JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))jlong SharedRuntime::lrem(jlong y, jlong x) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
225 if (x == min_jlong && y == CONST64(-1)(-1LL)) {
226 return 0;
227 } else {
228 return x % y;
229 }
230JRT_END}
231
232
233const juint float_sign_mask = 0x7FFFFFFF;
234const juint float_infinity = 0x7F800000;
235const julong double_sign_mask = CONST64(0x7FFFFFFFFFFFFFFF)(0x7FFFFFFFFFFFFFFFLL);
236const julong double_infinity = CONST64(0x7FF0000000000000)(0x7FF0000000000000LL);
237
238JRT_LEAF(jfloat, SharedRuntime::frem(jfloat x, jfloat y))jfloat SharedRuntime::frem(jfloat x, jfloat y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
239#ifdef _WIN64
240 // 64-bit Windows on amd64 returns the wrong values for
241 // infinity operands.
242 union { jfloat f; juint i; } xbits, ybits;
243 xbits.f = x;
244 ybits.f = y;
245 // x Mod Infinity == x unless x is infinity
246 if (((xbits.i & float_sign_mask) != float_infinity) &&
247 ((ybits.i & float_sign_mask) == float_infinity) ) {
248 return x;
249 }
250 return ((jfloat)fmod_winx64((double)x, (double)y));
251#else
252 return ((jfloat)fmod((double)x,(double)y));
253#endif
254JRT_END}
255
256
257JRT_LEAF(jdouble, SharedRuntime::drem(jdouble x, jdouble y))jdouble SharedRuntime::drem(jdouble x, jdouble y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
258#ifdef _WIN64
259 union { jdouble d; julong l; } xbits, ybits;
260 xbits.d = x;
261 ybits.d = y;
262 // x Mod Infinity == x unless x is infinity
263 if (((xbits.l & double_sign_mask) != double_infinity) &&
264 ((ybits.l & double_sign_mask) == double_infinity) ) {
265 return x;
266 }
267 return ((jdouble)fmod_winx64((double)x, (double)y));
268#else
269 return ((jdouble)fmod((double)x,(double)y));
270#endif
271JRT_END}
272
273#ifdef __SOFTFP__
274JRT_LEAF(jfloat, SharedRuntime::fadd(jfloat x, jfloat y))jfloat SharedRuntime::fadd(jfloat x, jfloat y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
275 return x + y;
276JRT_END}
277
278JRT_LEAF(jfloat, SharedRuntime::fsub(jfloat x, jfloat y))jfloat SharedRuntime::fsub(jfloat x, jfloat y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
279 return x - y;
280JRT_END}
281
282JRT_LEAF(jfloat, SharedRuntime::fmul(jfloat x, jfloat y))jfloat SharedRuntime::fmul(jfloat x, jfloat y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
283 return x * y;
284JRT_END}
285
286JRT_LEAF(jfloat, SharedRuntime::fdiv(jfloat x, jfloat y))jfloat SharedRuntime::fdiv(jfloat x, jfloat y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
287 return x / y;
288JRT_END}
289
290JRT_LEAF(jdouble, SharedRuntime::dadd(jdouble x, jdouble y))jdouble SharedRuntime::dadd(jdouble x, jdouble y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
291 return x + y;
292JRT_END}
293
294JRT_LEAF(jdouble, SharedRuntime::dsub(jdouble x, jdouble y))jdouble SharedRuntime::dsub(jdouble x, jdouble y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
295 return x - y;
296JRT_END}
297
298JRT_LEAF(jdouble, SharedRuntime::dmul(jdouble x, jdouble y))jdouble SharedRuntime::dmul(jdouble x, jdouble y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
299 return x * y;
300JRT_END}
301
302JRT_LEAF(jdouble, SharedRuntime::ddiv(jdouble x, jdouble y))jdouble SharedRuntime::ddiv(jdouble x, jdouble y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
303 return x / y;
304JRT_END}
305
306JRT_LEAF(jfloat, SharedRuntime::i2f(jint x))jfloat SharedRuntime::i2f(jint x) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
307 return (jfloat)x;
308JRT_END}
309
310JRT_LEAF(jdouble, SharedRuntime::i2d(jint x))jdouble SharedRuntime::i2d(jint x) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
311 return (jdouble)x;
312JRT_END}
313
314JRT_LEAF(jdouble, SharedRuntime::f2d(jfloat x))jdouble SharedRuntime::f2d(jfloat x) { NoHandleMark __hm; ; os
::verify_stack_alignment(); NoSafepointVerifier __nsv;
315 return (jdouble)x;
316JRT_END}
317
318JRT_LEAF(int, SharedRuntime::fcmpl(float x, float y))int SharedRuntime::fcmpl(float x, float y) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
319 return x>y ? 1 : (x==y ? 0 : -1); /* x<y or is_nan*/
320JRT_END}
321
322JRT_LEAF(int, SharedRuntime::fcmpg(float x, float y))int SharedRuntime::fcmpg(float x, float y) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
323 return x<y ? -1 : (x==y ? 0 : 1); /* x>y or is_nan */
324JRT_END}
325
326JRT_LEAF(int, SharedRuntime::dcmpl(double x, double y))int SharedRuntime::dcmpl(double x, double y) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
327 return x>y ? 1 : (x==y ? 0 : -1); /* x<y or is_nan */
328JRT_END}
329
330JRT_LEAF(int, SharedRuntime::dcmpg(double x, double y))int SharedRuntime::dcmpg(double x, double y) { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
331 return x<y ? -1 : (x==y ? 0 : 1); /* x>y or is_nan */
332JRT_END}
333
334// Functions to return the opposite of the aeabi functions for nan.
335JRT_LEAF(int, SharedRuntime::unordered_fcmplt(float x, float y))int SharedRuntime::unordered_fcmplt(float x, float y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
336 return (x < y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
337JRT_END}
338
339JRT_LEAF(int, SharedRuntime::unordered_dcmplt(double x, double y))int SharedRuntime::unordered_dcmplt(double x, double y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
340 return (x < y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
341JRT_END}
342
343JRT_LEAF(int, SharedRuntime::unordered_fcmple(float x, float y))int SharedRuntime::unordered_fcmple(float x, float y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
344 return (x <= y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
345JRT_END}
346
347JRT_LEAF(int, SharedRuntime::unordered_dcmple(double x, double y))int SharedRuntime::unordered_dcmple(double x, double y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
348 return (x <= y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
349JRT_END}
350
351JRT_LEAF(int, SharedRuntime::unordered_fcmpge(float x, float y))int SharedRuntime::unordered_fcmpge(float x, float y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
352 return (x >= y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
353JRT_END}
354
355JRT_LEAF(int, SharedRuntime::unordered_dcmpge(double x, double y))int SharedRuntime::unordered_dcmpge(double x, double y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
356 return (x >= y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
357JRT_END}
358
359JRT_LEAF(int, SharedRuntime::unordered_fcmpgt(float x, float y))int SharedRuntime::unordered_fcmpgt(float x, float y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
360 return (x > y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
361JRT_END}
362
363JRT_LEAF(int, SharedRuntime::unordered_dcmpgt(double x, double y))int SharedRuntime::unordered_dcmpgt(double x, double y) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
364 return (x > y) ? 1 : ((g_isnan(x) || g_isnan(y)) ? 1 : 0);
365JRT_END}
366
367// Intrinsics make gcc generate code for these.
368float SharedRuntime::fneg(float f) {
369 return -f;
370}
371
372double SharedRuntime::dneg(double f) {
373 return -f;
374}
375
376#endif // __SOFTFP__
377
378#if defined(__SOFTFP__) || defined(E500V2)
379// Intrinsics make gcc generate code for these.
380double SharedRuntime::dabs(double f) {
381 return (f <= (double)0.0) ? (double)0.0 - f : f;
382}
383
384#endif
385
386#if defined(__SOFTFP__) || defined(PPC)
387double SharedRuntime::dsqrt(double f) {
388 return sqrt(f);
389}
390#endif
391
392JRT_LEAF(jint, SharedRuntime::f2i(jfloat x))jint SharedRuntime::f2i(jfloat x) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
393 if (g_isnan(x))
394 return 0;
395 if (x >= (jfloat) max_jint)
396 return max_jint;
397 if (x <= (jfloat) min_jint)
398 return min_jint;
399 return (jint) x;
400JRT_END}
401
402
403JRT_LEAF(jlong, SharedRuntime::f2l(jfloat x))jlong SharedRuntime::f2l(jfloat x) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
404 if (g_isnan(x))
405 return 0;
406 if (x >= (jfloat) max_jlong)
407 return max_jlong;
408 if (x <= (jfloat) min_jlong)
409 return min_jlong;
410 return (jlong) x;
411JRT_END}
412
413
414JRT_LEAF(jint, SharedRuntime::d2i(jdouble x))jint SharedRuntime::d2i(jdouble x) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
415 if (g_isnan(x))
416 return 0;
417 if (x >= (jdouble) max_jint)
418 return max_jint;
419 if (x <= (jdouble) min_jint)
420 return min_jint;
421 return (jint) x;
422JRT_END}
423
424
425JRT_LEAF(jlong, SharedRuntime::d2l(jdouble x))jlong SharedRuntime::d2l(jdouble x) { NoHandleMark __hm; ; os
::verify_stack_alignment(); NoSafepointVerifier __nsv;
426 if (g_isnan(x))
427 return 0;
428 if (x >= (jdouble) max_jlong)
429 return max_jlong;
430 if (x <= (jdouble) min_jlong)
431 return min_jlong;
432 return (jlong) x;
433JRT_END}
434
435
436JRT_LEAF(jfloat, SharedRuntime::d2f(jdouble x))jfloat SharedRuntime::d2f(jdouble x) { NoHandleMark __hm; ; os
::verify_stack_alignment(); NoSafepointVerifier __nsv;
437 return (jfloat)x;
438JRT_END}
439
440
441JRT_LEAF(jfloat, SharedRuntime::l2f(jlong x))jfloat SharedRuntime::l2f(jlong x) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
442 return (jfloat)x;
443JRT_END}
444
445
446JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))jdouble SharedRuntime::l2d(jlong x) { NoHandleMark __hm; ; os
::verify_stack_alignment(); NoSafepointVerifier __nsv;
447 return (jdouble)x;
448JRT_END}
449
450// Exception handling across interpreter/compiler boundaries
451//
452// exception_handler_for_return_address(...) returns the continuation address.
453// The continuation address is the entry point of the exception handler of the
454// previous frame depending on the return address.
455
456address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* current, address return_address) {
457 // Note: This is called when we have unwound the frame of the callee that did
458 // throw an exception. So far, no check has been performed by the StackWatermarkSet.
459 // Notably, the stack is not walkable at this point, and hence the check must
460 // be deferred until later. Specifically, any of the handlers returned here in
461 // this function, will get dispatched to, and call deferred checks to
462 // StackWatermarkSet::after_unwind at a point where the stack is walkable.
463 assert(frame::verify_return_pc(return_address), "must be a return address: " INTPTR_FORMAT, p2i(return_address))do { if (!(frame::verify_return_pc(return_address))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 463, "assert(" "frame::verify_return_pc(return_address)" ") failed"
, "must be a return address: " "0x%016" "l" "x", p2i(return_address
)); ::breakpoint(); } } while (0)
;
464 assert(current->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?")do { if (!(current->frames_to_pop_failed_realloc() == 0 ||
Interpreter::contains(return_address))) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 464, "assert(" "current->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address)"
") failed", "missed frames to pop?"); ::breakpoint(); } } while
(0)
;
465
466 // Reset method handle flag.
467 current->set_is_method_handle_return(false);
468
469#if INCLUDE_JVMCI1
470 // JVMCI's ExceptionHandlerStub expects the thread local exception PC to be clear
471 // and other exception handler continuations do not read it
472 current->set_exception_pc(NULL__null);
473#endif // INCLUDE_JVMCI
474
475 // The fastest case first
476 CodeBlob* blob = CodeCache::find_blob(return_address);
477 CompiledMethod* nm = (blob != NULL__null) ? blob->as_compiled_method_or_null() : NULL__null;
478 if (nm != NULL__null) {
479 // Set flag if return address is a method handle call site.
480 current->set_is_method_handle_return(nm->is_method_handle_return(return_address));
481 // native nmethods don't have exception handlers
482 assert(!nm->is_native_method(), "no exception handler")do { if (!(!nm->is_native_method())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 482, "assert(" "!nm->is_native_method()" ") failed", "no exception handler"
); ::breakpoint(); } } while (0)
;
483 assert(nm->header_begin() != nm->exception_begin(), "no exception handler")do { if (!(nm->header_begin() != nm->exception_begin())
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 483, "assert(" "nm->header_begin() != nm->exception_begin()"
") failed", "no exception handler"); ::breakpoint(); } } while
(0)
;
484 if (nm->is_deopt_pc(return_address)) {
485 // If we come here because of a stack overflow, the stack may be
486 // unguarded. Reguard the stack otherwise if we return to the
487 // deopt blob and the stack bang causes a stack overflow we
488 // crash.
489 StackOverflow* overflow_state = current->stack_overflow_state();
490 bool guard_pages_enabled = overflow_state->reguard_stack_if_needed();
491 if (overflow_state->reserved_stack_activation() != current->stack_base()) {
492 overflow_state->set_reserved_stack_activation(current->stack_base());
493 }
494 assert(guard_pages_enabled, "stack banging in deopt blob may cause crash")do { if (!(guard_pages_enabled)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 494, "assert(" "guard_pages_enabled" ") failed", "stack banging in deopt blob may cause crash"
); ::breakpoint(); } } while (0)
;
495 // The deferred StackWatermarkSet::after_unwind check will be performed in
496 // Deoptimization::fetch_unroll_info (with exec_mode == Unpack_exception)
497 return SharedRuntime::deopt_blob()->unpack_with_exception();
498 } else {
499 // The deferred StackWatermarkSet::after_unwind check will be performed in
500 // * OptoRuntime::handle_exception_C_helper for C2 code
501 // * exception_handler_for_pc_helper via Runtime1::handle_exception_from_callee_id for C1 code
502 return nm->exception_begin();
503 }
504 }
505
506 // Entry code
507 if (StubRoutines::returns_to_call_stub(return_address)) {
508 // The deferred StackWatermarkSet::after_unwind check will be performed in
509 // JavaCallWrapper::~JavaCallWrapper
510 return StubRoutines::catch_exception_entry();
511 }
512 if (blob != NULL__null && blob->is_optimized_entry_blob()) {
513 return ((OptimizedEntryBlob*)blob)->exception_handler();
514 }
515 // Interpreted code
516 if (Interpreter::contains(return_address)) {
517 // The deferred StackWatermarkSet::after_unwind check will be performed in
518 // InterpreterRuntime::exception_handler_for_exception
519 return Interpreter::rethrow_exception_entry();
520 }
521
522 guarantee(blob == NULL || !blob->is_runtime_stub(), "caller should have skipped stub")do { if (!(blob == __null || !blob->is_runtime_stub())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 522, "guarantee(" "blob == NULL || !blob->is_runtime_stub()"
") failed", "caller should have skipped stub"); ::breakpoint
(); } } while (0)
;
523 guarantee(!VtableStubs::contains(return_address), "NULL exceptions in vtables should have been handled already!")do { if (!(!VtableStubs::contains(return_address))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 523, "guarantee(" "!VtableStubs::contains(return_address)" ") failed"
, "NULL exceptions in vtables should have been handled already!"
); ::breakpoint(); } } while (0)
;
524
525#ifndef PRODUCT
526 { ResourceMark rm;
527 tty->print_cr("No exception handler found for exception at " INTPTR_FORMAT"0x%016" "l" "x" " - potential problems:", p2i(return_address));
528 tty->print_cr("a) exception happened in (new?) code stubs/buffers that is not handled here");
529 tty->print_cr("b) other problem");
530 }
531#endif // PRODUCT
532
533 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 533); ::breakpoint(); } while (0)
;
534 return NULL__null;
535}
536
537
538JRT_LEAF(address, SharedRuntime::exception_handler_for_return_address(JavaThread* current, address return_address))address SharedRuntime::exception_handler_for_return_address(JavaThread
* current, address return_address) { NoHandleMark __hm; ; os::
verify_stack_alignment(); NoSafepointVerifier __nsv;
539 return raw_exception_handler_for_return_address(current, return_address);
540JRT_END}
541
542
543address SharedRuntime::get_poll_stub(address pc) {
544 address stub;
545 // Look up the code blob
546 CodeBlob *cb = CodeCache::find_blob(pc);
547
548 // Should be an nmethod
549 guarantee(cb != NULL && cb->is_compiled(), "safepoint polling: pc must refer to an nmethod")do { if (!(cb != __null && cb->is_compiled())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 549, "guarantee(" "cb != NULL && cb->is_compiled()"
") failed", "safepoint polling: pc must refer to an nmethod"
); ::breakpoint(); } } while (0)
;
550
551 // Look up the relocation information
552 assert(((CompiledMethod*)cb)->is_at_poll_or_poll_return(pc),do { if (!(((CompiledMethod*)cb)->is_at_poll_or_poll_return
(pc))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 553, "assert(" "((CompiledMethod*)cb)->is_at_poll_or_poll_return(pc)"
") failed", "safepoint polling: type must be poll"); ::breakpoint
(); } } while (0)
553 "safepoint polling: type must be poll")do { if (!(((CompiledMethod*)cb)->is_at_poll_or_poll_return
(pc))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 553, "assert(" "((CompiledMethod*)cb)->is_at_poll_or_poll_return(pc)"
") failed", "safepoint polling: type must be poll"); ::breakpoint
(); } } while (0)
;
554
555#ifdef ASSERT1
556 if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
557 tty->print_cr("bad pc: " PTR_FORMAT"0x%016" "l" "x", p2i(pc));
558 Disassembler::decode(cb);
559 fatal("Only polling locations are used for safepoint")do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 559, "Only polling locations are used for safepoint"); ::breakpoint
(); } while (0)
;
560 }
561#endif
562
563 bool at_poll_return = ((CompiledMethod*)cb)->is_at_poll_return(pc);
564 bool has_wide_vectors = ((CompiledMethod*)cb)->has_wide_vectors();
565 if (at_poll_return) {
566 assert(SharedRuntime::polling_page_return_handler_blob() != NULL,do { if (!(SharedRuntime::polling_page_return_handler_blob() !=
__null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 567, "assert(" "SharedRuntime::polling_page_return_handler_blob() != __null"
") failed", "polling page return stub not created yet"); ::breakpoint
(); } } while (0)
567 "polling page return stub not created yet")do { if (!(SharedRuntime::polling_page_return_handler_blob() !=
__null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 567, "assert(" "SharedRuntime::polling_page_return_handler_blob() != __null"
") failed", "polling page return stub not created yet"); ::breakpoint
(); } } while (0)
;
568 stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
569 } else if (has_wide_vectors) {
570 assert(SharedRuntime::polling_page_vectors_safepoint_handler_blob() != NULL,do { if (!(SharedRuntime::polling_page_vectors_safepoint_handler_blob
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 571, "assert(" "SharedRuntime::polling_page_vectors_safepoint_handler_blob() != __null"
") failed", "polling page vectors safepoint stub not created yet"
); ::breakpoint(); } } while (0)
571 "polling page vectors safepoint stub not created yet")do { if (!(SharedRuntime::polling_page_vectors_safepoint_handler_blob
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 571, "assert(" "SharedRuntime::polling_page_vectors_safepoint_handler_blob() != __null"
") failed", "polling page vectors safepoint stub not created yet"
); ::breakpoint(); } } while (0)
;
572 stub = SharedRuntime::polling_page_vectors_safepoint_handler_blob()->entry_point();
573 } else {
574 assert(SharedRuntime::polling_page_safepoint_handler_blob() != NULL,do { if (!(SharedRuntime::polling_page_safepoint_handler_blob
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 575, "assert(" "SharedRuntime::polling_page_safepoint_handler_blob() != __null"
") failed", "polling page safepoint stub not created yet"); ::
breakpoint(); } } while (0)
575 "polling page safepoint stub not created yet")do { if (!(SharedRuntime::polling_page_safepoint_handler_blob
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 575, "assert(" "SharedRuntime::polling_page_safepoint_handler_blob() != __null"
") failed", "polling page safepoint stub not created yet"); ::
breakpoint(); } } while (0)
;
576 stub = SharedRuntime::polling_page_safepoint_handler_blob()->entry_point();
577 }
578 log_debug(safepoint)(!(LogImpl<(LogTag::_safepoint), (LogTag::__NO_TAG), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Debug))) ? (void)0 : LogImpl
<(LogTag::_safepoint), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Debug>
("... found polling page %s exception at pc = "
579 INTPTR_FORMAT"0x%016" "l" "x" ", stub =" INTPTR_FORMAT"0x%016" "l" "x",
580 at_poll_return ? "return" : "loop",
581 (intptr_t)pc, (intptr_t)stub);
582 return stub;
583}
584
585
586oop SharedRuntime::retrieve_receiver( Symbol* sig, frame caller ) {
587 assert(caller.is_interpreted_frame(), "")do { if (!(caller.is_interpreted_frame())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 587, "assert(" "caller.is_interpreted_frame()" ") failed", ""
); ::breakpoint(); } } while (0)
;
588 int args_size = ArgumentSizeComputer(sig).size() + 1;
589 assert(args_size <= caller.interpreter_frame_expression_stack_size(), "receiver must be on interpreter stack")do { if (!(args_size <= caller.interpreter_frame_expression_stack_size
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 589, "assert(" "args_size <= caller.interpreter_frame_expression_stack_size()"
") failed", "receiver must be on interpreter stack"); ::breakpoint
(); } } while (0)
;
590 oop result = cast_to_oop(*caller.interpreter_frame_tos_at(args_size - 1));
591 assert(Universe::heap()->is_in(result) && oopDesc::is_oop(result), "receiver must be an oop")do { if (!(Universe::heap()->is_in(result) && oopDesc
::is_oop(result))) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 591, "assert(" "Universe::heap()->is_in(result) && oopDesc::is_oop(result)"
") failed", "receiver must be an oop"); ::breakpoint(); } } while
(0)
;
592 return result;
593}
594
595
596void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Handle h_exception) {
597 if (JvmtiExport::can_post_on_exceptions()) {
598 vframeStream vfst(current, true);
599 methodHandle method = methodHandle(current, vfst.method());
600 address bcp = method()->bcp_from(vfst.bci());
601 JvmtiExport::post_exception_throw(current, method(), bcp, h_exception());
602 }
603
604#if INCLUDE_JVMCI1
605 if (EnableJVMCI && UseJVMCICompiler) {
606 vframeStream vfst(current, true);
607 methodHandle method = methodHandle(current, vfst.method());
608 int bci = vfst.bci();
609 MethodData* trap_mdo = method->method_data();
610 if (trap_mdo != NULL__null) {
611 // Set exception_seen if the exceptional bytecode is an invoke
612 Bytecode_invoke call = Bytecode_invoke_check(method, bci);
613 if (call.is_valid()) {
614 ResourceMark rm(current);
615 ProfileData* pdata = trap_mdo->allocate_bci_to_data(bci, NULL__null);
616 if (pdata != NULL__null && pdata->is_BitData()) {
617 BitData* bit_data = (BitData*) pdata;
618 bit_data->set_exception_seen();
619 }
620 }
621 }
622 }
623#endif
624
625 Exceptions::_throw(current, __FILE__"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp", __LINE__625, h_exception);
626}
627
628void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
629 Handle h_exception = Exceptions::new_exception(current, name, message);
630 throw_and_post_jvmti_exception(current, h_exception);
631}
632
633// The interpreter code to call this tracing function is only
634// called/generated when UL is on for redefine, class and has the right level
635// and tags. Since obsolete methods are never compiled, we don't have
636// to modify the compilers to generate calls to this function.
637//
638JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(int SharedRuntime::rc_trace_method_entry( JavaThread* thread,
Method* method) { NoHandleMark __hm; ; os::verify_stack_alignment
(); NoSafepointVerifier __nsv;
639 JavaThread* thread, Method* method))int SharedRuntime::rc_trace_method_entry( JavaThread* thread,
Method* method) { NoHandleMark __hm; ; os::verify_stack_alignment
(); NoSafepointVerifier __nsv;
640 if (method->is_obsolete()) {
641 // We are calling an obsolete method, but this is not necessarily
642 // an error. Our method could have been redefined just after we
643 // fetched the Method* from the constant pool.
644 ResourceMark rm;
645 log_trace(redefine, class, obsolete)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_obsolete), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_redefine), (LogTag::_class), (LogTag::_obsolete
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("calling obsolete method '%s'", method->name_and_sig_as_C_string());
646 }
647 return 0;
648JRT_END}
649
650// ret_pc points into caller; we are returning caller's exception handler
651// for given exception
652address SharedRuntime::compute_compiled_exc_handler(CompiledMethod* cm, address ret_pc, Handle& exception,
653 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
654 assert(cm != NULL, "must exist")do { if (!(cm != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 654, "assert(" "cm != __null" ") failed", "must exist"); ::
breakpoint(); } } while (0)
;
655 ResourceMark rm;
656
657#if INCLUDE_JVMCI1
658 if (cm->is_compiled_by_jvmci()) {
659 // lookup exception handler for this pc
660 int catch_pco = ret_pc - cm->code_begin();
661 ExceptionHandlerTable table(cm);
662 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
663 if (t != NULL__null) {
664 return cm->code_begin() + t->pco();
665 } else {
666 return Deoptimization::deoptimize_for_missing_exception_handler(cm);
667 }
668 }
669#endif // INCLUDE_JVMCI
670
671 nmethod* nm = cm->as_nmethod();
672 ScopeDesc* sd = nm->scope_desc_at(ret_pc);
673 // determine handler bci, if any
674 EXCEPTION_MARKExceptionMark __em; JavaThread* __the_thread__ = __em.thread(
);
;
675
676 int handler_bci = -1;
677 int scope_depth = 0;
678 if (!force_unwind) {
679 int bci = sd->bci();
680 bool recursive_exception = false;
681 do {
682 bool skip_scope_increment = false;
683 // exception handler lookup
684 Klass* ek = exception->klass();
685 methodHandle mh(THREAD__the_thread__, sd->method());
686 handler_bci = Method::fast_exception_handler_bci_for(mh, ek, bci, THREAD__the_thread__);
687 if (HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception())) {
688 recursive_exception = true;
689 // We threw an exception while trying to find the exception handler.
690 // Transfer the new exception to the exception handle which will
691 // be set into thread local storage, and do another lookup for an
692 // exception handler for this exception, this time starting at the
693 // BCI of the exception handler which caused the exception to be
694 // thrown (bugs 4307310 and 4546590). Set "exception" reference
695 // argument to ensure that the correct exception is thrown (4870175).
696 recursive_exception_occurred = true;
697 exception = Handle(THREAD__the_thread__, PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->pending_exception()));
698 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
699 if (handler_bci >= 0) {
700 bci = handler_bci;
701 handler_bci = -1;
702 skip_scope_increment = true;
703 }
704 }
705 else {
706 recursive_exception = false;
707 }
708 if (!top_frame_only && handler_bci < 0 && !skip_scope_increment) {
709 sd = sd->sender();
710 if (sd != NULL__null) {
711 bci = sd->bci();
712 }
713 ++scope_depth;
714 }
715 } while (recursive_exception || (!top_frame_only && handler_bci < 0 && sd != NULL__null));
716 }
717
718 // found handling method => lookup exception handler
719 int catch_pco = ret_pc - nm->code_begin();
720
721 ExceptionHandlerTable table(nm);
722 HandlerTableEntry *t = table.entry_for(catch_pco, handler_bci, scope_depth);
723 if (t == NULL__null && (nm->is_compiled_by_c1() || handler_bci != -1)) {
724 // Allow abbreviated catch tables. The idea is to allow a method
725 // to materialize its exceptions without committing to the exact
726 // routing of exceptions. In particular this is needed for adding
727 // a synthetic handler to unlock monitors when inlining
728 // synchronized methods since the unlock path isn't represented in
729 // the bytecodes.
730 t = table.entry_for(catch_pco, -1, 0);
731 }
732
733#ifdef COMPILER11
734 if (t == NULL__null && nm->is_compiled_by_c1()) {
735 assert(nm->unwind_handler_begin() != NULL, "")do { if (!(nm->unwind_handler_begin() != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 735, "assert(" "nm->unwind_handler_begin() != __null" ") failed"
, ""); ::breakpoint(); } } while (0)
;
736 return nm->unwind_handler_begin();
737 }
738#endif
739
740 if (t == NULL__null) {
741 ttyLocker ttyl;
742 tty->print_cr("MISSING EXCEPTION HANDLER for pc " INTPTR_FORMAT"0x%016" "l" "x" " and handler bci %d", p2i(ret_pc), handler_bci);
743 tty->print_cr(" Exception:");
744 exception->print();
745 tty->cr();
746 tty->print_cr(" Compiled exception table :");
747 table.print();
748 nm->print_code();
749 guarantee(false, "missing exception handler")do { if (!(false)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 749, "guarantee(" "false" ") failed", "missing exception handler"
); ::breakpoint(); } } while (0)
;
750 return NULL__null;
751 }
752
753 return nm->code_begin() + t->pco();
754}
755
756JRT_ENTRY(void, SharedRuntime::throw_AbstractMethodError(JavaThread* current))void SharedRuntime::throw_AbstractMethodError(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 756, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
757 // These errors occur only at call sites
758 throw_and_post_jvmti_exception(current, vmSymbols::java_lang_AbstractMethodError());
759JRT_END}
760
761JRT_ENTRY(void, SharedRuntime::throw_IncompatibleClassChangeError(JavaThread* current))void SharedRuntime::throw_IncompatibleClassChangeError(JavaThread
* current) { do { if (!(current == JavaThread::current())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 761, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
762 // These errors occur only at call sites
763 throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError(), "vtable stub");
764JRT_END}
765
766JRT_ENTRY(void, SharedRuntime::throw_ArithmeticException(JavaThread* current))void SharedRuntime::throw_ArithmeticException(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 766, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
767 throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArithmeticException(), "/ by zero");
768JRT_END}
769
770JRT_ENTRY(void, SharedRuntime::throw_NullPointerException(JavaThread* current))void SharedRuntime::throw_NullPointerException(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 770, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
771 throw_and_post_jvmti_exception(current, vmSymbols::java_lang_NullPointerException(), NULL__null);
772JRT_END}
773
774JRT_ENTRY(void, SharedRuntime::throw_NullPointerException_at_call(JavaThread* current))void SharedRuntime::throw_NullPointerException_at_call(JavaThread
* current) { do { if (!(current == JavaThread::current())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 774, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
775 // This entry point is effectively only used for NullPointerExceptions which occur at inline
776 // cache sites (when the callee activation is not yet set up) so we are at a call site
777 throw_and_post_jvmti_exception(current, vmSymbols::java_lang_NullPointerException(), NULL__null);
778JRT_END}
779
780JRT_ENTRY(void, SharedRuntime::throw_StackOverflowError(JavaThread* current))void SharedRuntime::throw_StackOverflowError(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 780, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
781 throw_StackOverflowError_common(current, false);
782JRT_END}
783
784JRT_ENTRY(void, SharedRuntime::throw_delayed_StackOverflowError(JavaThread* current))void SharedRuntime::throw_delayed_StackOverflowError(JavaThread
* current) { do { if (!(current == JavaThread::current())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 784, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
785 throw_StackOverflowError_common(current, true);
786JRT_END}
787
788void SharedRuntime::throw_StackOverflowError_common(JavaThread* current, bool delayed) {
789 // We avoid using the normal exception construction in this case because
790 // it performs an upcall to Java, and we're already out of stack space.
791 JavaThread* THREAD__the_thread__ = current; // For exception macros.
792 Klass* k = vmClasses::StackOverflowError_klass();
793 oop exception_oop = InstanceKlass::cast(k)->allocate_instance(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
794 if (delayed) {
795 java_lang_Throwable::set_message(exception_oop,
796 Universe::delayed_stack_overflow_error_message());
797 }
798 Handle exception (current, exception_oop);
799 if (StackTraceInThrowable) {
800 java_lang_Throwable::fill_in_stack_trace(exception);
801 }
802 // Increment counter for hs_err file reporting
803 Atomic::inc(&Exceptions::_stack_overflow_errors);
804 throw_and_post_jvmti_exception(current, exception);
805}
806
807address SharedRuntime::continuation_for_implicit_exception(JavaThread* current,
808 address pc,
809 ImplicitExceptionKind exception_kind)
810{
811 address target_pc = NULL__null;
812
813 if (Interpreter::contains(pc)) {
814 switch (exception_kind) {
815 case IMPLICIT_NULL: return Interpreter::throw_NullPointerException_entry();
816 case IMPLICIT_DIVIDE_BY_ZERO: return Interpreter::throw_ArithmeticException_entry();
817 case STACK_OVERFLOW: return Interpreter::throw_StackOverflowError_entry();
818 default: ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 818); ::breakpoint(); } while (0)
;
819 }
820 } else {
821 switch (exception_kind) {
822 case STACK_OVERFLOW: {
823 // Stack overflow only occurs upon frame setup; the callee is
824 // going to be unwound. Dispatch to a shared runtime stub
825 // which will cause the StackOverflowError to be fabricated
826 // and processed.
827 // Stack overflow should never occur during deoptimization:
828 // the compiled method bangs the stack by as much as the
829 // interpreter would need in case of a deoptimization. The
830 // deoptimization blob and uncommon trap blob bang the stack
831 // in a debug VM to verify the correctness of the compiled
832 // method stack banging.
833 assert(current->deopt_mark() == NULL, "no stack overflow from deopt blob/uncommon trap")do { if (!(current->deopt_mark() == __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 833, "assert(" "current->deopt_mark() == __null" ") failed"
, "no stack overflow from deopt blob/uncommon trap"); ::breakpoint
(); } } while (0)
;
834 Events::log_exception(current, "StackOverflowError at " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc));
835 return StubRoutines::throw_StackOverflowError_entry();
836 }
837
838 case IMPLICIT_NULL: {
839 if (VtableStubs::contains(pc)) {
840 // We haven't yet entered the callee frame. Fabricate an
841 // exception and begin dispatching it in the caller. Since
842 // the caller was at a call site, it's safe to destroy all
843 // caller-saved registers, as these entry points do.
844 VtableStub* vt_stub = VtableStubs::stub_containing(pc);
845
846 // If vt_stub is NULL, then return NULL to signal handler to report the SEGV error.
847 if (vt_stub == NULL__null) return NULL__null;
848
849 if (vt_stub->is_abstract_method_error(pc)) {
850 assert(!vt_stub->is_vtable_stub(), "should never see AbstractMethodErrors from vtable-type VtableStubs")do { if (!(!vt_stub->is_vtable_stub())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 850, "assert(" "!vt_stub->is_vtable_stub()" ") failed", "should never see AbstractMethodErrors from vtable-type VtableStubs"
); ::breakpoint(); } } while (0)
;
851 Events::log_exception(current, "AbstractMethodError at " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc));
852 // Instead of throwing the abstract method error here directly, we re-resolve
853 // and will throw the AbstractMethodError during resolve. As a result, we'll
854 // get a more detailed error message.
855 return SharedRuntime::get_handle_wrong_method_stub();
856 } else {
857 Events::log_exception(current, "NullPointerException at vtable entry " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc));
858 // Assert that the signal comes from the expected location in stub code.
859 assert(vt_stub->is_null_pointer_exception(pc),do { if (!(vt_stub->is_null_pointer_exception(pc))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 860, "assert(" "vt_stub->is_null_pointer_exception(pc)" ") failed"
, "obtained signal from unexpected location in stub code"); ::
breakpoint(); } } while (0)
860 "obtained signal from unexpected location in stub code")do { if (!(vt_stub->is_null_pointer_exception(pc))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 860, "assert(" "vt_stub->is_null_pointer_exception(pc)" ") failed"
, "obtained signal from unexpected location in stub code"); ::
breakpoint(); } } while (0)
;
861 return StubRoutines::throw_NullPointerException_at_call_entry();
862 }
863 } else {
864 CodeBlob* cb = CodeCache::find_blob(pc);
865
866 // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
867 if (cb == NULL__null) return NULL__null;
868
869 // Exception happened in CodeCache. Must be either:
870 // 1. Inline-cache check in C2I handler blob,
871 // 2. Inline-cache check in nmethod, or
872 // 3. Implicit null exception in nmethod
873
874 if (!cb->is_compiled()) {
875 bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
876 if (!is_in_blob) {
877 // Allow normal crash reporting to handle this
878 return NULL__null;
879 }
880 Events::log_exception(current, "NullPointerException in code blob at " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc));
881 // There is no handler here, so we will simply unwind.
882 return StubRoutines::throw_NullPointerException_at_call_entry();
883 }
884
885 // Otherwise, it's a compiled method. Consult its exception handlers.
886 CompiledMethod* cm = (CompiledMethod*)cb;
887 if (cm->inlinecache_check_contains(pc)) {
888 // exception happened inside inline-cache check code
889 // => the nmethod is not yet active (i.e., the frame
890 // is not set up yet) => use return address pushed by
891 // caller => don't push another return address
892 Events::log_exception(current, "NullPointerException in IC check " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc));
893 return StubRoutines::throw_NullPointerException_at_call_entry();
894 }
895
896 if (cm->method()->is_method_handle_intrinsic()) {
897 // exception happened inside MH dispatch code, similar to a vtable stub
898 Events::log_exception(current, "NullPointerException in MH adapter " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc));
899 return StubRoutines::throw_NullPointerException_at_call_entry();
900 }
901
902#ifndef PRODUCT
903 _implicit_null_throws++;
904#endif
905 target_pc = cm->continuation_for_implicit_null_exception(pc);
906 // If there's an unexpected fault, target_pc might be NULL,
907 // in which case we want to fall through into the normal
908 // error handling code.
909 }
910
911 break; // fall through
912 }
913
914
915 case IMPLICIT_DIVIDE_BY_ZERO: {
916 CompiledMethod* cm = CodeCache::find_compiled(pc);
917 guarantee(cm != NULL, "must have containing compiled method for implicit division-by-zero exceptions")do { if (!(cm != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 917, "guarantee(" "cm != NULL" ") failed", "must have containing compiled method for implicit division-by-zero exceptions"
); ::breakpoint(); } } while (0)
;
918#ifndef PRODUCT
919 _implicit_div0_throws++;
920#endif
921 target_pc = cm->continuation_for_implicit_div0_exception(pc);
922 // If there's an unexpected fault, target_pc might be NULL,
923 // in which case we want to fall through into the normal
924 // error handling code.
925 break; // fall through
926 }
927
928 default: ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 928); ::breakpoint(); } while (0)
;
929 }
930
931 assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind")do { if (!(exception_kind == IMPLICIT_NULL || exception_kind ==
IMPLICIT_DIVIDE_BY_ZERO)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 931, "assert(" "exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO"
") failed", "wrong implicit exception kind"); ::breakpoint()
; } } while (0)
;
932
933 if (exception_kind == IMPLICIT_NULL) {
934#ifndef PRODUCT
935 // for AbortVMOnException flag
936 Exceptions::debug_check_abort("java.lang.NullPointerException");
937#endif //PRODUCT
938 Events::log_exception(current, "Implicit null exception at " INTPTR_FORMAT"0x%016" "l" "x" " to " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc), p2i(target_pc));
939 } else {
940#ifndef PRODUCT
941 // for AbortVMOnException flag
942 Exceptions::debug_check_abort("java.lang.ArithmeticException");
943#endif //PRODUCT
944 Events::log_exception(current, "Implicit division by zero exception at " INTPTR_FORMAT"0x%016" "l" "x" " to " INTPTR_FORMAT"0x%016" "l" "x", p2i(pc), p2i(target_pc));
945 }
946 return target_pc;
947 }
948
949 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 949); ::breakpoint(); } while (0)
;
950 return NULL__null;
951}
952
953
954/**
955 * Throws an java/lang/UnsatisfiedLinkError. The address of this method is
956 * installed in the native function entry of all native Java methods before
957 * they get linked to their actual native methods.
958 *
959 * \note
960 * This method actually never gets called! The reason is because
961 * the interpreter's native entries call NativeLookup::lookup() which
962 * throws the exception when the lookup fails. The exception is then
963 * caught and forwarded on the return from NativeLookup::lookup() call
964 * before the call to the native function. This might change in the future.
965 */
966JNI_ENTRY(void*, throw_unsatisfied_link_error(JNIEnv* env, ...))extern "C" { void* throw_unsatisfied_link_error(JNIEnv* env, ...
) { JavaThread* thread=JavaThread::thread_from_jni_environment
(env); do { if (!(!VerifyJNIEnvThread || (thread == Thread::current
()))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 966, "assert(" "!VerifyJNIEnvThread || (thread == Thread::current())"
") failed", "JNIEnv is only valid in same thread"); ::breakpoint
(); } } while (0); ; ThreadInVMfromNative __tiv(thread); VMNativeEntryWrapper
__vew; HandleMarkCleaner __hm(thread); JavaThread* __the_thread__
= thread; os::verify_stack_alignment(); WeakPreserveExceptionMark
__wem(thread);
967{
968 // We return a bad value here to make sure that the exception is
969 // forwarded before we look at the return value.
970 THROW_(vmSymbols::java_lang_UnsatisfiedLinkError(), (void*)badAddress){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 970, vmSymbols::java_lang_UnsatisfiedLinkError(), __null); return
(void*)((address)::badAddressVal); }
;
971}
972JNI_END} }
973
974address SharedRuntime::native_method_throw_unsatisfied_link_error_entry() {
975 return CAST_FROM_FN_PTR(address, &throw_unsatisfied_link_error)((address)((address_word)(&throw_unsatisfied_link_error))
)
;
976}
977
978JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* current, oopDesc* obj))void SharedRuntime::register_finalizer(JavaThread* current, oopDesc
* obj) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 978, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current, false ); HandleMarkCleaner __hm(current); JavaThread
* __the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
979#if INCLUDE_JVMCI1
980 if (!obj->klass()->has_finalizer()) {
981 return;
982 }
983#endif // INCLUDE_JVMCI
984 assert(oopDesc::is_oop(obj), "must be a valid oop")do { if (!(oopDesc::is_oop(obj))) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 984, "assert(" "oopDesc::is_oop(obj)" ") failed", "must be a valid oop"
); ::breakpoint(); } } while (0)
;
985 assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise")do { if (!(obj->klass()->has_finalizer())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 985, "assert(" "obj->klass()->has_finalizer()" ") failed"
, "shouldn't be here otherwise"); ::breakpoint(); } } while (
0)
;
986 InstanceKlass::register_finalizer(instanceOop(obj), CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
987JRT_END}
988
989jlong SharedRuntime::get_java_tid(Thread* thread) {
990 if (thread != NULL__null) {
991 if (thread->is_Java_thread()) {
992 oop obj = JavaThread::cast(thread)->threadObj();
993 return (obj == NULL__null) ? 0 : java_lang_Thread::thread_id(obj);
994 }
995 }
996 return 0;
997}
998
999/**
1000 * This function ought to be a void function, but cannot be because
1001 * it gets turned into a tail-call on sparc, which runs into dtrace bug
1002 * 6254741. Once that is fixed we can remove the dummy return value.
1003 */
1004int SharedRuntime::dtrace_object_alloc(oopDesc* o) {
1005 return dtrace_object_alloc(Thread::current(), o, o->size());
1006}
1007
1008int SharedRuntime::dtrace_object_alloc(Thread* thread, oopDesc* o) {
1009 return dtrace_object_alloc(thread, o, o->size());
1010}
1011
1012int SharedRuntime::dtrace_object_alloc(Thread* thread, oopDesc* o, size_t size) {
1013 assert(DTraceAllocProbes, "wrong call")do { if (!(DTraceAllocProbes)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1013, "assert(" "DTraceAllocProbes" ") failed", "wrong call"
); ::breakpoint(); } } while (0)
;
1014 Klass* klass = o->klass();
1015 Symbol* name = klass->name();
1016 HOTSPOT_OBJECT_ALLOC(
1017 get_java_tid(thread),
1018 (char *) name->bytes(), name->utf8_length(), size * HeapWordSize);
1019 return 0;
1020}
1021
1022JRT_LEAF(int, SharedRuntime::dtrace_method_entry(int SharedRuntime::dtrace_method_entry( JavaThread* current, Method
* method) { NoHandleMark __hm; ; os::verify_stack_alignment()
; NoSafepointVerifier __nsv;
1023 JavaThread* current, Method* method))int SharedRuntime::dtrace_method_entry( JavaThread* current, Method
* method) { NoHandleMark __hm; ; os::verify_stack_alignment()
; NoSafepointVerifier __nsv;
1024 assert(DTraceMethodProbes, "wrong call")do { if (!(DTraceMethodProbes)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1024, "assert(" "DTraceMethodProbes" ") failed", "wrong call"
); ::breakpoint(); } } while (0)
;
1025 Symbol* kname = method->klass_name();
Value stored to 'kname' during its initialization is never read
1026 Symbol* name = method->name();
1027 Symbol* sig = method->signature();
1028 HOTSPOT_METHOD_ENTRY(
1029 get_java_tid(current),
1030 (char *) kname->bytes(), kname->utf8_length(),
1031 (char *) name->bytes(), name->utf8_length(),
1032 (char *) sig->bytes(), sig->utf8_length());
1033 return 0;
1034JRT_END}
1035
1036JRT_LEAF(int, SharedRuntime::dtrace_method_exit(int SharedRuntime::dtrace_method_exit( JavaThread* current, Method
* method) { NoHandleMark __hm; ; os::verify_stack_alignment()
; NoSafepointVerifier __nsv;
1037 JavaThread* current, Method* method))int SharedRuntime::dtrace_method_exit( JavaThread* current, Method
* method) { NoHandleMark __hm; ; os::verify_stack_alignment()
; NoSafepointVerifier __nsv;
1038 assert(DTraceMethodProbes, "wrong call")do { if (!(DTraceMethodProbes)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1038, "assert(" "DTraceMethodProbes" ") failed", "wrong call"
); ::breakpoint(); } } while (0)
;
1039 Symbol* kname = method->klass_name();
1040 Symbol* name = method->name();
1041 Symbol* sig = method->signature();
1042 HOTSPOT_METHOD_RETURN(
1043 get_java_tid(current),
1044 (char *) kname->bytes(), kname->utf8_length(),
1045 (char *) name->bytes(), name->utf8_length(),
1046 (char *) sig->bytes(), sig->utf8_length());
1047 return 0;
1048JRT_END}
1049
1050
1051// Finds receiver, CallInfo (i.e. receiver method), and calling bytecode)
1052// for a call current in progress, i.e., arguments has been pushed on stack
1053// put callee has not been invoked yet. Used by: resolve virtual/static,
1054// vtable updates, etc. Caller frame must be compiled.
1055Handle SharedRuntime::find_callee_info(Bytecodes::Code& bc, CallInfo& callinfo, TRAPSJavaThread* __the_thread__) {
1056 JavaThread* current = THREAD__the_thread__;
1057 ResourceMark rm(current);
1058
1059 // last java frame on stack (which includes native call frames)
1060 vframeStream vfst(current, true); // Do not skip and javaCalls
1061
1062 return find_callee_info_helper(vfst, bc, callinfo, THREAD__the_thread__);
1063}
1064
1065Method* SharedRuntime::extract_attached_method(vframeStream& vfst) {
1066 CompiledMethod* caller = vfst.nm();
1067
1068 nmethodLocker caller_lock(caller);
1069
1070 address pc = vfst.frame_pc();
1071 { // Get call instruction under lock because another thread may be busy patching it.
1072 CompiledICLocker ic_locker(caller);
1073 return caller->attached_method_before_pc(pc);
1074 }
1075 return NULL__null;
1076}
1077
1078// Finds receiver, CallInfo (i.e. receiver method), and calling bytecode
1079// for a call current in progress, i.e., arguments has been pushed on stack
1080// but callee has not been invoked yet. Caller frame must be compiled.
1081Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1082 CallInfo& callinfo, TRAPSJavaThread* __the_thread__) {
1083 Handle receiver;
1084 Handle nullHandle; // create a handy null handle for exception returns
1085 JavaThread* current = THREAD__the_thread__;
1086
1087 assert(!vfst.at_end(), "Java frame must exist")do { if (!(!vfst.at_end())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1087, "assert(" "!vfst.at_end()" ") failed", "Java frame must exist"
); ::breakpoint(); } } while (0)
;
1088
1089 // Find caller and bci from vframe
1090 methodHandle caller(current, vfst.method());
1091 int bci = vfst.bci();
1092
1093 Bytecode_invoke bytecode(caller, bci);
1094 int bytecode_index = bytecode.index();
1095 bc = bytecode.invoke_code();
1096
1097 methodHandle attached_method(current, extract_attached_method(vfst));
1098 if (attached_method.not_null()) {
1099 Method* callee = bytecode.static_target(CHECK_NH__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return Handle(); (void)(0
);
1100 vmIntrinsics::ID id = callee->intrinsic_id();
1101 // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1102 // it attaches statically resolved method to the call site.
1103 if (MethodHandles::is_signature_polymorphic(id) &&
1104 MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1105 bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1106
1107 // Adjust invocation mode according to the attached method.
1108 switch (bc) {
1109 case Bytecodes::_invokevirtual:
1110 if (attached_method->method_holder()->is_interface()) {
1111 bc = Bytecodes::_invokeinterface;
1112 }
1113 break;
1114 case Bytecodes::_invokeinterface:
1115 if (!attached_method->method_holder()->is_interface()) {
1116 bc = Bytecodes::_invokevirtual;
1117 }
1118 break;
1119 case Bytecodes::_invokehandle:
1120 if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1121 bc = attached_method->is_static() ? Bytecodes::_invokestatic
1122 : Bytecodes::_invokevirtual;
1123 }
1124 break;
1125 default:
1126 break;
1127 }
1128 }
1129 }
1130
1131 assert(bc != Bytecodes::_illegal, "not initialized")do { if (!(bc != Bytecodes::_illegal)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1131, "assert(" "bc != Bytecodes::_illegal" ") failed", "not initialized"
); ::breakpoint(); } } while (0)
;
1132
1133 bool has_receiver = bc != Bytecodes::_invokestatic &&
1134 bc != Bytecodes::_invokedynamic &&
1135 bc != Bytecodes::_invokehandle;
1136
1137 // Find receiver for non-static call
1138 if (has_receiver) {
1139 // This register map must be update since we need to find the receiver for
1140 // compiled frames. The receiver might be in a register.
1141 RegisterMap reg_map2(current);
1142 frame stubFrame = current->last_frame();
1143 // Caller-frame is a compiled frame
1144 frame callerFrame = stubFrame.sender(&reg_map2);
1145
1146 if (attached_method.is_null()) {
1147 Method* callee = bytecode.static_target(CHECK_NH__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return Handle(); (void)(0
);
1148 if (callee == NULL__null) {
1149 THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1149, vmSymbols::java_lang_NoSuchMethodException(), __null)
; return nullHandle; }
;
1150 }
1151 }
1152
1153 // Retrieve from a compiled argument list
1154 receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1155
1156 if (receiver.is_null()) {
1157 THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1157, vmSymbols::java_lang_NullPointerException(), __null);
return nullHandle; }
;
1158 }
1159 }
1160
1161 // Resolve method
1162 if (attached_method.not_null()) {
1163 // Parameterized by attached method.
1164 LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return Handle(); (void)(0
);
1165 } else {
1166 // Parameterized by bytecode.
1167 constantPoolHandle constants(current, caller->constants());
1168 LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return Handle(); (void)(0
);
1169 }
1170
1171#ifdef ASSERT1
1172 // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1173 if (has_receiver) {
1174 assert(receiver.not_null(), "should have thrown exception")do { if (!(receiver.not_null())) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1174, "assert(" "receiver.not_null()" ") failed", "should have thrown exception"
); ::breakpoint(); } } while (0)
;
1175 Klass* receiver_klass = receiver->klass();
1176 Klass* rk = NULL__null;
1177 if (attached_method.not_null()) {
1178 // In case there's resolved method attached, use its holder during the check.
1179 rk = attached_method->method_holder();
1180 } else {
1181 // Klass is already loaded.
1182 constantPoolHandle constants(current, caller->constants());
1183 rk = constants->klass_ref_at(bytecode_index, CHECK_NH__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return Handle(); (void)(0
);
1184 }
1185 Klass* static_receiver_klass = rk;
1186 assert(receiver_klass->is_subtype_of(static_receiver_klass),do { if (!(receiver_klass->is_subtype_of(static_receiver_klass
))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1187, "assert(" "receiver_klass->is_subtype_of(static_receiver_klass)"
") failed", "actual receiver must be subclass of static receiver klass"
); ::breakpoint(); } } while (0)
1187 "actual receiver must be subclass of static receiver klass")do { if (!(receiver_klass->is_subtype_of(static_receiver_klass
))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1187, "assert(" "receiver_klass->is_subtype_of(static_receiver_klass)"
") failed", "actual receiver must be subclass of static receiver klass"
); ::breakpoint(); } } while (0)
;
1188 if (receiver_klass->is_instance_klass()) {
1189 if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1190 tty->print_cr("ERROR: Klass not yet initialized!!");
1191 receiver_klass->print();
1192 }
1193 assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized")do { if (!(!InstanceKlass::cast(receiver_klass)->is_not_initialized
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1193, "assert(" "!InstanceKlass::cast(receiver_klass)->is_not_initialized()"
") failed", "receiver_klass must be initialized"); ::breakpoint
(); } } while (0)
;
1194 }
1195 }
1196#endif
1197
1198 return receiver;
1199}
1200
1201methodHandle SharedRuntime::find_callee_method(TRAPSJavaThread* __the_thread__) {
1202 JavaThread* current = THREAD__the_thread__;
1203 ResourceMark rm(current);
1204 // We need first to check if any Java activations (compiled, interpreted)
1205 // exist on the stack since last JavaCall. If not, we need
1206 // to get the target method from the JavaCall wrapper.
1207 vframeStream vfst(current, true); // Do not skip any javaCalls
1208 methodHandle callee_method;
1209 if (vfst.at_end()) {
1210 // No Java frames were found on stack since we did the JavaCall.
1211 // Hence the stack can only contain an entry_frame. We need to
1212 // find the target method from the stub frame.
1213 RegisterMap reg_map(current, false);
1214 frame fr = current->last_frame();
1215 assert(fr.is_runtime_frame(), "must be a runtimeStub")do { if (!(fr.is_runtime_frame())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1215, "assert(" "fr.is_runtime_frame()" ") failed", "must be a runtimeStub"
); ::breakpoint(); } } while (0)
;
1216 fr = fr.sender(&reg_map);
1217 assert(fr.is_entry_frame(), "must be")do { if (!(fr.is_entry_frame())) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1217, "assert(" "fr.is_entry_frame()" ") failed", "must be"
); ::breakpoint(); } } while (0)
;
1218 // fr is now pointing to the entry frame.
1219 callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1220 } else {
1221 Bytecodes::Code bc;
1222 CallInfo callinfo;
1223 find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1224 callee_method = methodHandle(current, callinfo.selected_method());
1225 }
1226 assert(callee_method()->is_method(), "must be")do { if (!(callee_method()->is_method())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1226, "assert(" "callee_method()->is_method()" ") failed"
, "must be"); ::breakpoint(); } } while (0)
;
1227 return callee_method;
1228}
1229
1230// Resolves a call.
1231methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, TRAPSJavaThread* __the_thread__) {
1232 methodHandle callee_method;
1233 callee_method = resolve_sub_helper(is_virtual, is_optimized, THREAD__the_thread__);
1234 if (JvmtiExport::can_hotswap_or_post_breakpoint()) {
1235 int retry_count = 0;
1236 while (!HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception()) && callee_method->is_old() &&
1237 callee_method->method_holder() != vmClasses::Object_klass()) {
1238 // If has a pending exception then there is no need to re-try to
1239 // resolve this method.
1240 // If the method has been redefined, we need to try again.
1241 // Hack: we have no way to update the vtables of arrays, so don't
1242 // require that java.lang.Object has been updated.
1243
1244 // It is very unlikely that method is redefined more than 100 times
1245 // in the middle of resolve. If it is looping here more than 100 times
1246 // means then there could be a bug here.
1247 guarantee((retry_count++ < 100),do { if (!((retry_count++ < 100))) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1248, "guarantee(" "(retry_count++ < 100)" ") failed", "Could not resolve to latest version of redefined method"
); ::breakpoint(); } } while (0)
1248 "Could not resolve to latest version of redefined method")do { if (!((retry_count++ < 100))) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1248, "guarantee(" "(retry_count++ < 100)" ") failed", "Could not resolve to latest version of redefined method"
); ::breakpoint(); } } while (0)
;
1249 // method is redefined in the middle of resolve so re-try.
1250 callee_method = resolve_sub_helper(is_virtual, is_optimized, THREAD__the_thread__);
1251 }
1252 }
1253 return callee_method;
1254}
1255
1256// This fails if resolution required refilling of IC stubs
1257bool SharedRuntime::resolve_sub_helper_internal(methodHandle callee_method, const frame& caller_frame,
1258 CompiledMethod* caller_nm, bool is_virtual, bool is_optimized,
1259 Handle receiver, CallInfo& call_info, Bytecodes::Code invoke_code, TRAPSJavaThread* __the_thread__) {
1260 StaticCallInfo static_call_info;
1261 CompiledICInfo virtual_call_info;
1262
1263 // Make sure the callee nmethod does not get deoptimized and removed before
1264 // we are done patching the code.
1265 CompiledMethod* callee = callee_method->code();
1266
1267 if (callee != NULL__null) {
1268 assert(callee->is_compiled(), "must be nmethod for patching")do { if (!(callee->is_compiled())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1268, "assert(" "callee->is_compiled()" ") failed", "must be nmethod for patching"
); ::breakpoint(); } } while (0)
;
1269 }
1270
1271 if (callee != NULL__null && !callee->is_in_use()) {
1272 // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.
1273 callee = NULL__null;
1274 }
1275 nmethodLocker nl_callee(callee);
1276#ifdef ASSERT1
1277 address dest_entry_point = callee == NULL__null ? 0 : callee->entry_point(); // used below
1278#endif
1279
1280 bool is_nmethod = caller_nm->is_nmethod();
1281
1282 if (is_virtual) {
1283 assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check")do { if (!(receiver.not_null() || invoke_code == Bytecodes::_invokehandle
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1283, "assert(" "receiver.not_null() || invoke_code == Bytecodes::_invokehandle"
") failed", "sanity check"); ::breakpoint(); } } while (0)
;
1284 bool static_bound = call_info.resolved_method()->can_be_statically_bound();
1285 Klass* klass = invoke_code == Bytecodes::_invokehandle ? NULL__null : receiver->klass();
1286 CompiledIC::compute_monomorphic_entry(callee_method, klass,
1287 is_optimized, static_bound, is_nmethod, virtual_call_info,
1288 CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
1289 } else {
1290 // static call
1291 CompiledStaticCall::compute_entry(callee_method, is_nmethod, static_call_info);
1292 }
1293
1294 // grab lock, check for deoptimization and potentially patch caller
1295 {
1296 CompiledICLocker ml(caller_nm);
1297
1298 // Lock blocks for safepoint during which both nmethods can change state.
1299
1300 // Now that we are ready to patch if the Method* was redefined then
1301 // don't update call site and let the caller retry.
1302 // Don't update call site if callee nmethod was unloaded or deoptimized.
1303 // Don't update call site if callee nmethod was replaced by an other nmethod
1304 // which may happen when multiply alive nmethod (tiered compilation)
1305 // will be supported.
1306 if (!callee_method->is_old() &&
1307 (callee == NULL__null || (callee->is_in_use() && callee_method->code() == callee))) {
1308 NoSafepointVerifier nsv;
1309#ifdef ASSERT1
1310 // We must not try to patch to jump to an already unloaded method.
1311 if (dest_entry_point != 0) {
1312 CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
1313 assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),do { if (!((cb != __null) && cb->is_compiled() &&
(((CompiledMethod*)cb) == callee))) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1314, "assert(" "(cb != __null) && cb->is_compiled() && (((CompiledMethod*)cb) == callee)"
") failed", "should not call unloaded nmethod"); ::breakpoint
(); } } while (0)
1314 "should not call unloaded nmethod")do { if (!((cb != __null) && cb->is_compiled() &&
(((CompiledMethod*)cb) == callee))) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1314, "assert(" "(cb != __null) && cb->is_compiled() && (((CompiledMethod*)cb) == callee)"
") failed", "should not call unloaded nmethod"); ::breakpoint
(); } } while (0)
;
1315 }
1316#endif
1317 if (is_virtual) {
1318 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1319 if (inline_cache->is_clean()) {
1320 if (!inline_cache->set_to_monomorphic(virtual_call_info)) {
1321 return false;
1322 }
1323 }
1324 } else {
1325 if (VM_Version::supports_fast_class_init_checks() &&
1326 invoke_code == Bytecodes::_invokestatic &&
1327 callee_method->needs_clinit_barrier() &&
1328 callee != NULL__null && callee->is_compiled_by_jvmci()) {
1329 return true; // skip patching for JVMCI
1330 }
1331 CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1332 if (ssc->is_clean()) ssc->set(static_call_info);
1333 }
1334 }
1335 } // unlock CompiledICLocker
1336 return true;
1337}
1338
1339// Resolves a call. The compilers generate code for calls that go here
1340// and are patched with the real destination of the call.
1341methodHandle SharedRuntime::resolve_sub_helper(bool is_virtual, bool is_optimized, TRAPSJavaThread* __the_thread__) {
1342 JavaThread* current = THREAD__the_thread__;
1343 ResourceMark rm(current);
1344 RegisterMap cbl_map(current, false);
1345 frame caller_frame = current->last_frame().sender(&cbl_map);
1346
1347 CodeBlob* caller_cb = caller_frame.cb();
1348 guarantee(caller_cb != NULL && caller_cb->is_compiled(), "must be called from compiled method")do { if (!(caller_cb != __null && caller_cb->is_compiled
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1348, "guarantee(" "caller_cb != NULL && caller_cb->is_compiled()"
") failed", "must be called from compiled method"); ::breakpoint
(); } } while (0)
;
1349 CompiledMethod* caller_nm = caller_cb->as_compiled_method_or_null();
1350
1351 // make sure caller is not getting deoptimized
1352 // and removed before we are done with it.
1353 // CLEANUP - with lazy deopt shouldn't need this lock
1354 nmethodLocker caller_lock(caller_nm);
1355
1356 // determine call info & receiver
1357 // note: a) receiver is NULL for static calls
1358 // b) an exception is thrown if receiver is NULL for non-static calls
1359 CallInfo call_info;
1360 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1361 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1362 methodHandle callee_method(current, call_info.selected_method());
1363
1364 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||do { if (!((!is_virtual && invoke_code == Bytecodes::
_invokestatic ) || (!is_virtual && invoke_code == Bytecodes
::_invokespecial) || (!is_virtual && invoke_code == Bytecodes
::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes
::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes
::_invokestatic ))) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1368, "assert(" "(!is_virtual && invoke_code == Bytecodes::_invokestatic ) || (!is_virtual && invoke_code == Bytecodes::_invokespecial) || (!is_virtual && invoke_code == Bytecodes::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes::_invokestatic )"
") failed", "inconsistent bytecode"); ::breakpoint(); } } while
(0)
1365 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||do { if (!((!is_virtual && invoke_code == Bytecodes::
_invokestatic ) || (!is_virtual && invoke_code == Bytecodes
::_invokespecial) || (!is_virtual && invoke_code == Bytecodes
::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes
::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes
::_invokestatic ))) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1368, "assert(" "(!is_virtual && invoke_code == Bytecodes::_invokestatic ) || (!is_virtual && invoke_code == Bytecodes::_invokespecial) || (!is_virtual && invoke_code == Bytecodes::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes::_invokestatic )"
") failed", "inconsistent bytecode"); ::breakpoint(); } } while
(0)
1366 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||do { if (!((!is_virtual && invoke_code == Bytecodes::
_invokestatic ) || (!is_virtual && invoke_code == Bytecodes
::_invokespecial) || (!is_virtual && invoke_code == Bytecodes
::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes
::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes
::_invokestatic ))) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1368, "assert(" "(!is_virtual && invoke_code == Bytecodes::_invokestatic ) || (!is_virtual && invoke_code == Bytecodes::_invokespecial) || (!is_virtual && invoke_code == Bytecodes::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes::_invokestatic )"
") failed", "inconsistent bytecode"); ::breakpoint(); } } while
(0)
1367 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||do { if (!((!is_virtual && invoke_code == Bytecodes::
_invokestatic ) || (!is_virtual && invoke_code == Bytecodes
::_invokespecial) || (!is_virtual && invoke_code == Bytecodes
::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes
::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes
::_invokestatic ))) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1368, "assert(" "(!is_virtual && invoke_code == Bytecodes::_invokestatic ) || (!is_virtual && invoke_code == Bytecodes::_invokespecial) || (!is_virtual && invoke_code == Bytecodes::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes::_invokestatic )"
") failed", "inconsistent bytecode"); ::breakpoint(); } } while
(0)
1368 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode")do { if (!((!is_virtual && invoke_code == Bytecodes::
_invokestatic ) || (!is_virtual && invoke_code == Bytecodes
::_invokespecial) || (!is_virtual && invoke_code == Bytecodes
::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes
::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes
::_invokestatic ))) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1368, "assert(" "(!is_virtual && invoke_code == Bytecodes::_invokestatic ) || (!is_virtual && invoke_code == Bytecodes::_invokespecial) || (!is_virtual && invoke_code == Bytecodes::_invokehandle ) || (!is_virtual && invoke_code == Bytecodes::_invokedynamic) || ( is_virtual && invoke_code != Bytecodes::_invokestatic )"
") failed", "inconsistent bytecode"); ::breakpoint(); } } while
(0)
;
1369
1370 assert(caller_nm->is_alive() && !caller_nm->is_unloading(), "It should be alive")do { if (!(caller_nm->is_alive() && !caller_nm->
is_unloading())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1370, "assert(" "caller_nm->is_alive() && !caller_nm->is_unloading()"
") failed", "It should be alive"); ::breakpoint(); } } while
(0)
;
1371
1372#ifndef PRODUCT
1373 // tracing/debugging/statistics
1374 int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1375 (is_virtual) ? (&_resolve_virtual_ctr) :
1376 (&_resolve_static_ctr);
1377 Atomic::inc(addr);
1378
1379 if (TraceCallFixup) {
1380 ResourceMark rm(current);
1381 tty->print("resolving %s%s (%s) call to",
1382 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1383 Bytecodes::name(invoke_code));
1384 callee_method->print_short_name(tty);
1385 tty->print_cr(" at pc: " INTPTR_FORMAT"0x%016" "l" "x" " to code: " INTPTR_FORMAT"0x%016" "l" "x",
1386 p2i(caller_frame.pc()), p2i(callee_method->code()));
1387 }
1388#endif
1389
1390 if (invoke_code == Bytecodes::_invokestatic) {
1391 assert(callee_method->method_holder()->is_initialized() ||do { if (!(callee_method->method_holder()->is_initialized
() || callee_method->method_holder()->is_reentrant_initialization
(current))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1393, "assert(" "callee_method->method_holder()->is_initialized() || callee_method->method_holder()->is_reentrant_initialization(current)"
") failed", "invalid class initialization state for invoke_static"
); ::breakpoint(); } } while (0)
1392 callee_method->method_holder()->is_reentrant_initialization(current),do { if (!(callee_method->method_holder()->is_initialized
() || callee_method->method_holder()->is_reentrant_initialization
(current))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1393, "assert(" "callee_method->method_holder()->is_initialized() || callee_method->method_holder()->is_reentrant_initialization(current)"
") failed", "invalid class initialization state for invoke_static"
); ::breakpoint(); } } while (0)
1393 "invalid class initialization state for invoke_static")do { if (!(callee_method->method_holder()->is_initialized
() || callee_method->method_holder()->is_reentrant_initialization
(current))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1393, "assert(" "callee_method->method_holder()->is_initialized() || callee_method->method_holder()->is_reentrant_initialization(current)"
") failed", "invalid class initialization state for invoke_static"
); ::breakpoint(); } } while (0)
;
1394 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1395 // In order to keep class initialization check, do not patch call
1396 // site for static call when the class is not fully initialized.
1397 // Proper check is enforced by call site re-resolution on every invocation.
1398 //
1399 // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1400 // explicit class initialization check is put in nmethod entry (VEP).
1401 assert(callee_method->method_holder()->is_linked(), "must be")do { if (!(callee_method->method_holder()->is_linked())
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1401, "assert(" "callee_method->method_holder()->is_linked()"
") failed", "must be"); ::breakpoint(); } } while (0)
;
1402 return callee_method;
1403 }
1404 }
1405
1406 // JSR 292 key invariant:
1407 // If the resolved method is a MethodHandle invoke target, the call
1408 // site must be a MethodHandle call site, because the lambda form might tail-call
1409 // leaving the stack in a state unknown to either caller or callee
1410 // TODO detune for now but we might need it again
1411// assert(!callee_method->is_compiled_lambda_form() ||
1412// caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1413
1414 // Compute entry points. This might require generation of C2I converter
1415 // frames, so we cannot be holding any locks here. Furthermore, the
1416 // computation of the entry points is independent of patching the call. We
1417 // always return the entry-point, but we only patch the stub if the call has
1418 // not been deoptimized. Return values: For a virtual call this is an
1419 // (cached_oop, destination address) pair. For a static call/optimized
1420 // virtual this is just a destination address.
1421
1422 // Patching IC caches may fail if we run out if transition stubs.
1423 // We refill the ic stubs then and try again.
1424 for (;;) {
1425 ICRefillVerifier ic_refill_verifier;
1426 bool successful = resolve_sub_helper_internal(callee_method, caller_frame, caller_nm,
1427 is_virtual, is_optimized, receiver,
1428 call_info, invoke_code, CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1429 if (successful) {
1430 return callee_method;
1431 } else {
1432 InlineCacheBuffer::refill_ic_stubs();
1433 }
1434 }
1435
1436}
1437
1438
1439// Inline caches exist only in compiled code
1440JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))address SharedRuntime::handle_wrong_method_ic_miss(JavaThread
* current) { do { if (!(current == JavaThread::current())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1440, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
1441#ifdef ASSERT1
1442 RegisterMap reg_map(current, false);
1443 frame stub_frame = current->last_frame();
1444 assert(stub_frame.is_runtime_frame(), "sanity check")do { if (!(stub_frame.is_runtime_frame())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1444, "assert(" "stub_frame.is_runtime_frame()" ") failed",
"sanity check"); ::breakpoint(); } } while (0)
;
1445 frame caller_frame = stub_frame.sender(&reg_map);
1446 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_optimized_entry_frame(), "unexpected frame")do { if (!(!caller_frame.is_interpreted_frame() && !caller_frame
.is_entry_frame() && !caller_frame.is_optimized_entry_frame
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1446, "assert(" "!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_optimized_entry_frame()"
") failed", "unexpected frame"); ::breakpoint(); } } while (
0)
;
1447#endif /* ASSERT */
1448
1449 methodHandle callee_method;
1450 JRT_BLOCK{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1450, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current); JavaThread* __the_thread__ = current; VMEntryWrapper
__vew;
1451 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1452 // Return Method* through TLS
1453 current->set_vm_result_2(callee_method());
1454 JRT_BLOCK_END}
1455 // return compiled code entry point after potential safepoints
1456 assert(callee_method->verified_code_entry() != NULL, " Jump to zero!")do { if (!(callee_method->verified_code_entry() != __null)
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1456, "assert(" "callee_method->verified_code_entry() != __null"
") failed", " Jump to zero!"); ::breakpoint(); } } while (0)
;
1457 return callee_method->verified_code_entry();
1458JRT_END}
1459
1460
1461// Handle call site that has been made non-entrant
1462JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))address SharedRuntime::handle_wrong_method(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1462, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
1463 // 6243940 We might end up in here if the callee is deoptimized
1464 // as we race to call it. We don't want to take a safepoint if
1465 // the caller was interpreted because the caller frame will look
1466 // interpreted to the stack walkers and arguments are now
1467 // "compiled" so it is much better to make this transition
1468 // invisible to the stack walking code. The i2c path will
1469 // place the callee method in the callee_target. It is stashed
1470 // there because if we try and find the callee by normal means a
1471 // safepoint is possible and have trouble gc'ing the compiled args.
1472 RegisterMap reg_map(current, false);
1473 frame stub_frame = current->last_frame();
1474 assert(stub_frame.is_runtime_frame(), "sanity check")do { if (!(stub_frame.is_runtime_frame())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1474, "assert(" "stub_frame.is_runtime_frame()" ") failed",
"sanity check"); ::breakpoint(); } } while (0)
;
1475 frame caller_frame = stub_frame.sender(&reg_map);
1476
1477 if (caller_frame.is_interpreted_frame() ||
1478 caller_frame.is_entry_frame() ||
1479 caller_frame.is_optimized_entry_frame()) {
1480 Method* callee = current->callee_target();
1481 guarantee(callee != NULL && callee->is_method(), "bad handshake")do { if (!(callee != __null && callee->is_method()
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1481, "guarantee(" "callee != NULL && callee->is_method()"
") failed", "bad handshake"); ::breakpoint(); } } while (0)
;
1482 current->set_vm_result_2(callee);
1483 current->set_callee_target(NULL__null);
1484 if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1485 // Bypass class initialization checks in c2i when caller is in native.
1486 // JNI calls to static methods don't have class initialization checks.
1487 // Fast class initialization checks are present in c2i adapters and call into
1488 // SharedRuntime::handle_wrong_method() on the slow path.
1489 //
1490 // JVM upcalls may land here as well, but there's a proper check present in
1491 // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1492 // so bypassing it in c2i adapter is benign.
1493 return callee->get_c2i_no_clinit_check_entry();
1494 } else {
1495 return callee->get_c2i_entry();
1496 }
1497 }
1498
1499 // Must be compiled to compiled path which is safe to stackwalk
1500 methodHandle callee_method;
1501 JRT_BLOCK{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1501, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current); JavaThread* __the_thread__ = current; VMEntryWrapper
__vew;
1502 // Force resolving of caller (if we called from compiled frame)
1503 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1504 current->set_vm_result_2(callee_method());
1505 JRT_BLOCK_END}
1506 // return compiled code entry point after potential safepoints
1507 assert(callee_method->verified_code_entry() != NULL, " Jump to zero!")do { if (!(callee_method->verified_code_entry() != __null)
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1507, "assert(" "callee_method->verified_code_entry() != __null"
") failed", " Jump to zero!"); ::breakpoint(); } } while (0)
;
1508 return callee_method->verified_code_entry();
1509JRT_END}
1510
1511// Handle abstract method call
1512JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))address SharedRuntime::handle_wrong_method_abstract(JavaThread
* current) { do { if (!(current == JavaThread::current())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1512, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
1513 // Verbose error message for AbstractMethodError.
1514 // Get the called method from the invoke bytecode.
1515 vframeStream vfst(current, true);
1516 assert(!vfst.at_end(), "Java frame must exist")do { if (!(!vfst.at_end())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1516, "assert(" "!vfst.at_end()" ") failed", "Java frame must exist"
); ::breakpoint(); } } while (0)
;
1517 methodHandle caller(current, vfst.method());
1518 Bytecode_invoke invoke(caller, vfst.bci());
1519 DEBUG_ONLY( invoke.verify(); )invoke.verify();
1520
1521 // Find the compiled caller frame.
1522 RegisterMap reg_map(current);
1523 frame stubFrame = current->last_frame();
1524 assert(stubFrame.is_runtime_frame(), "must be")do { if (!(stubFrame.is_runtime_frame())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1524, "assert(" "stubFrame.is_runtime_frame()" ") failed", "must be"
); ::breakpoint(); } } while (0)
;
1525 frame callerFrame = stubFrame.sender(&reg_map);
1526 assert(callerFrame.is_compiled_frame(), "must be")do { if (!(callerFrame.is_compiled_frame())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1526, "assert(" "callerFrame.is_compiled_frame()" ") failed"
, "must be"); ::breakpoint(); } } while (0)
;
1527
1528 // Install exception and return forward entry.
1529 address res = StubRoutines::throw_AbstractMethodError_entry();
1530 JRT_BLOCK{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1530, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current); JavaThread* __the_thread__ = current; VMEntryWrapper
__vew;
1531 methodHandle callee(current, invoke.static_target(current));
1532 if (!callee.is_null()) {
1533 oop recv = callerFrame.retrieve_receiver(&reg_map);
1534 Klass *recv_klass = (recv != NULL__null) ? recv->klass() : NULL__null;
1535 res = StubRoutines::forward_exception_entry();
1536 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res)__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return res; (void)(0
);
1537 }
1538 JRT_BLOCK_END}
1539 return res;
1540JRT_END}
1541
1542
1543// resolve a static call and patch code
1544JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))address SharedRuntime::resolve_static_call_C(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1544, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
1545 methodHandle callee_method;
1546 JRT_BLOCK{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1546, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current); JavaThread* __the_thread__ = current; VMEntryWrapper
__vew;
1547 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1548 current->set_vm_result_2(callee_method());
1549 JRT_BLOCK_END}
1550 // return compiled code entry point after potential safepoints
1551 assert(callee_method->verified_code_entry() != NULL, " Jump to zero!")do { if (!(callee_method->verified_code_entry() != __null)
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1551, "assert(" "callee_method->verified_code_entry() != __null"
") failed", " Jump to zero!"); ::breakpoint(); } } while (0)
;
1552 return callee_method->verified_code_entry();
1553JRT_END}
1554
1555
1556// resolve virtual call and update inline cache to monomorphic
1557JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))address SharedRuntime::resolve_virtual_call_C(JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1557, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
1558 methodHandle callee_method;
1559 JRT_BLOCK{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1559, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current); JavaThread* __the_thread__ = current; VMEntryWrapper
__vew;
1560 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1561 current->set_vm_result_2(callee_method());
1562 JRT_BLOCK_END}
1563 // return compiled code entry point after potential safepoints
1564 assert(callee_method->verified_code_entry() != NULL, " Jump to zero!")do { if (!(callee_method->verified_code_entry() != __null)
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1564, "assert(" "callee_method->verified_code_entry() != __null"
") failed", " Jump to zero!"); ::breakpoint(); } } while (0)
;
1565 return callee_method->verified_code_entry();
1566JRT_END}
1567
1568
1569// Resolve a virtual call that can be statically bound (e.g., always
1570// monomorphic, so it has no inline cache). Patch code to resolved target.
1571JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))address SharedRuntime::resolve_opt_virtual_call_C(JavaThread*
current) { do { if (!(current == JavaThread::current())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1571, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
1572 methodHandle callee_method;
1573 JRT_BLOCK{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1573, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current); JavaThread* __the_thread__ = current; VMEntryWrapper
__vew;
1574 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1575 current->set_vm_result_2(callee_method());
1576 JRT_BLOCK_END}
1577 // return compiled code entry point after potential safepoints
1578 assert(callee_method->verified_code_entry() != NULL, " Jump to zero!")do { if (!(callee_method->verified_code_entry() != __null)
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1578, "assert(" "callee_method->verified_code_entry() != __null"
") failed", " Jump to zero!"); ::breakpoint(); } } while (0)
;
1579 return callee_method->verified_code_entry();
1580JRT_END}
1581
1582// The handle_ic_miss_helper_internal function returns false if it failed due
1583// to either running out of vtable stubs or ic stubs due to IC transitions
1584// to transitional states. The needs_ic_stub_refill value will be set if
1585// the failure was due to running out of IC stubs, in which case handle_ic_miss_helper
1586// refills the IC stubs and tries again.
1587bool SharedRuntime::handle_ic_miss_helper_internal(Handle receiver, CompiledMethod* caller_nm,
1588 const frame& caller_frame, methodHandle callee_method,
1589 Bytecodes::Code bc, CallInfo& call_info,
1590 bool& needs_ic_stub_refill, TRAPSJavaThread* __the_thread__) {
1591 CompiledICLocker ml(caller_nm);
1592 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1593 bool should_be_mono = false;
1594 if (inline_cache->is_optimized()) {
1595 if (TraceCallFixup) {
1596 ResourceMark rm(THREAD__the_thread__);
1597 tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));
1598 callee_method->print_short_name(tty);
1599 tty->print_cr(" code: " INTPTR_FORMAT"0x%016" "l" "x", p2i(callee_method->code()));
1600 }
1601 should_be_mono = true;
1602 } else if (inline_cache->is_icholder_call()) {
1603 CompiledICHolder* ic_oop = inline_cache->cached_icholder();
1604 if (ic_oop != NULL__null) {
1605 if (!ic_oop->is_loader_alive()) {
1606 // Deferred IC cleaning due to concurrent class unloading
1607 if (!inline_cache->set_to_clean()) {
1608 needs_ic_stub_refill = true;
1609 return false;
1610 }
1611 } else if (receiver()->klass() == ic_oop->holder_klass()) {
1612 // This isn't a real miss. We must have seen that compiled code
1613 // is now available and we want the call site converted to a
1614 // monomorphic compiled call site.
1615 // We can't assert for callee_method->code() != NULL because it
1616 // could have been deoptimized in the meantime
1617 if (TraceCallFixup) {
1618 ResourceMark rm(THREAD__the_thread__);
1619 tty->print("FALSE IC miss (%s) converting to compiled call to", Bytecodes::name(bc));
1620 callee_method->print_short_name(tty);
1621 tty->print_cr(" code: " INTPTR_FORMAT"0x%016" "l" "x", p2i(callee_method->code()));
1622 }
1623 should_be_mono = true;
1624 }
1625 }
1626 }
1627
1628 if (should_be_mono) {
1629 // We have a path that was monomorphic but was going interpreted
1630 // and now we have (or had) a compiled entry. We correct the IC
1631 // by using a new icBuffer.
1632 CompiledICInfo info;
1633 Klass* receiver_klass = receiver()->klass();
1634 inline_cache->compute_monomorphic_entry(callee_method,
1635 receiver_klass,
1636 inline_cache->is_optimized(),
1637 false, caller_nm->is_nmethod(),
1638 info, CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
1639 if (!inline_cache->set_to_monomorphic(info)) {
1640 needs_ic_stub_refill = true;
1641 return false;
1642 }
1643 } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1644 // Potential change to megamorphic
1645
1646 bool successful = inline_cache->set_to_megamorphic(&call_info, bc, needs_ic_stub_refill, CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
1647 if (needs_ic_stub_refill) {
1648 return false;
1649 }
1650 if (!successful) {
1651 if (!inline_cache->set_to_clean()) {
1652 needs_ic_stub_refill = true;
1653 return false;
1654 }
1655 }
1656 } else {
1657 // Either clean or megamorphic
1658 }
1659 return true;
1660}
1661
1662methodHandle SharedRuntime::handle_ic_miss_helper(TRAPSJavaThread* __the_thread__) {
1663 JavaThread* current = THREAD__the_thread__;
1664 ResourceMark rm(current);
1665 CallInfo call_info;
1666 Bytecodes::Code bc;
1667
1668 // receiver is NULL for static calls. An exception is thrown for NULL
1669 // receivers for non-static calls
1670 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1671 // Compiler1 can produce virtual call sites that can actually be statically bound
1672 // If we fell thru to below we would think that the site was going megamorphic
1673 // when in fact the site can never miss. Worse because we'd think it was megamorphic
1674 // we'd try and do a vtable dispatch however methods that can be statically bound
1675 // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1676 // reresolution of the call site (as if we did a handle_wrong_method and not an
1677 // plain ic_miss) and the site will be converted to an optimized virtual call site
1678 // never to miss again. I don't believe C2 will produce code like this but if it
1679 // did this would still be the correct thing to do for it too, hence no ifdef.
1680 //
1681 if (call_info.resolved_method()->can_be_statically_bound()) {
1682 methodHandle callee_method = SharedRuntime::reresolve_call_site(CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1683 if (TraceCallFixup) {
1684 RegisterMap reg_map(current, false);
1685 frame caller_frame = current->last_frame().sender(&reg_map);
1686 ResourceMark rm(current);
1687 tty->print("converting IC miss to reresolve (%s) call to", Bytecodes::name(bc));
1688 callee_method->print_short_name(tty);
1689 tty->print_cr(" from pc: " INTPTR_FORMAT"0x%016" "l" "x", p2i(caller_frame.pc()));
1690 tty->print_cr(" code: " INTPTR_FORMAT"0x%016" "l" "x", p2i(callee_method->code()));
1691 }
1692 return callee_method;
1693 }
1694
1695 methodHandle callee_method(current, call_info.selected_method());
1696
1697#ifndef PRODUCT
1698 Atomic::inc(&_ic_miss_ctr);
1699
1700 // Statistics & Tracing
1701 if (TraceCallFixup) {
1702 ResourceMark rm(current);
1703 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1704 callee_method->print_short_name(tty);
1705 tty->print_cr(" code: " INTPTR_FORMAT"0x%016" "l" "x", p2i(callee_method->code()));
1706 }
1707
1708 if (ICMissHistogram) {
1709 MutexLocker m(VMStatistic_lock);
1710 RegisterMap reg_map(current, false);
1711 frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1712 // produce statistics under the lock
1713 trace_ic_miss(f.pc());
1714 }
1715#endif
1716
1717 // install an event collector so that when a vtable stub is created the
1718 // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1719 // event can't be posted when the stub is created as locks are held
1720 // - instead the event will be deferred until the event collector goes
1721 // out of scope.
1722 JvmtiDynamicCodeEventCollector event_collector;
1723
1724 // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1725 // Transitioning IC caches may require transition stubs. If we run out
1726 // of transition stubs, we have to drop locks and perform a safepoint
1727 // that refills them.
1728 RegisterMap reg_map(current, false);
1729 frame caller_frame = current->last_frame().sender(&reg_map);
1730 CodeBlob* cb = caller_frame.cb();
1731 CompiledMethod* caller_nm = cb->as_compiled_method();
1732
1733 for (;;) {
1734 ICRefillVerifier ic_refill_verifier;
1735 bool needs_ic_stub_refill = false;
1736 bool successful = handle_ic_miss_helper_internal(receiver, caller_nm, caller_frame, callee_method,
1737 bc, call_info, needs_ic_stub_refill, CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1738 if (successful || !needs_ic_stub_refill) {
1739 return callee_method;
1740 } else {
1741 InlineCacheBuffer::refill_ic_stubs();
1742 }
1743 }
1744}
1745
1746static bool clear_ic_at_addr(CompiledMethod* caller_nm, address call_addr, bool is_static_call) {
1747 CompiledICLocker ml(caller_nm);
1748 if (is_static_call) {
1749 CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);
1750 if (!ssc->is_clean()) {
1751 return ssc->set_to_clean();
1752 }
1753 } else {
1754 // compiled, dispatched call (which used to call an interpreted method)
1755 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1756 if (!inline_cache->is_clean()) {
1757 return inline_cache->set_to_clean();
1758 }
1759 }
1760 return true;
1761}
1762
1763//
1764// Resets a call-site in compiled code so it will get resolved again.
1765// This routines handles both virtual call sites, optimized virtual call
1766// sites, and static call sites. Typically used to change a call sites
1767// destination from compiled to interpreted.
1768//
1769methodHandle SharedRuntime::reresolve_call_site(TRAPSJavaThread* __the_thread__) {
1770 JavaThread* current = THREAD__the_thread__;
1771 ResourceMark rm(current);
1772 RegisterMap reg_map(current, false);
1773 frame stub_frame = current->last_frame();
1774 assert(stub_frame.is_runtime_frame(), "must be a runtimeStub")do { if (!(stub_frame.is_runtime_frame())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1774, "assert(" "stub_frame.is_runtime_frame()" ") failed",
"must be a runtimeStub"); ::breakpoint(); } } while (0)
;
1775 frame caller = stub_frame.sender(&reg_map);
1776
1777 // Do nothing if the frame isn't a live compiled frame.
1778 // nmethod could be deoptimized by the time we get here
1779 // so no update to the caller is needed.
1780
1781 if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1782
1783 address pc = caller.pc();
1784
1785 // Check for static or virtual call
1786 bool is_static_call = false;
1787 CompiledMethod* caller_nm = CodeCache::find_compiled(pc);
1788
1789 // Default call_addr is the location of the "basic" call.
1790 // Determine the address of the call we a reresolving. With
1791 // Inline Caches we will always find a recognizable call.
1792 // With Inline Caches disabled we may or may not find a
1793 // recognizable call. We will always find a call for static
1794 // calls and for optimized virtual calls. For vanilla virtual
1795 // calls it depends on the state of the UseInlineCaches switch.
1796 //
1797 // With Inline Caches disabled we can get here for a virtual call
1798 // for two reasons:
1799 // 1 - calling an abstract method. The vtable for abstract methods
1800 // will run us thru handle_wrong_method and we will eventually
1801 // end up in the interpreter to throw the ame.
1802 // 2 - a racing deoptimization. We could be doing a vanilla vtable
1803 // call and between the time we fetch the entry address and
1804 // we jump to it the target gets deoptimized. Similar to 1
1805 // we will wind up in the interprter (thru a c2i with c2).
1806 //
1807 address call_addr = NULL__null;
1808 {
1809 // Get call instruction under lock because another thread may be
1810 // busy patching it.
1811 CompiledICLocker ml(caller_nm);
1812 // Location of call instruction
1813 call_addr = caller_nm->call_instruction_address(pc);
1814 }
1815 // Make sure nmethod doesn't get deoptimized and removed until
1816 // this is done with it.
1817 // CLEANUP - with lazy deopt shouldn't need this lock
1818 nmethodLocker nmlock(caller_nm);
1819
1820 if (call_addr != NULL__null) {
1821 RelocIterator iter(caller_nm, call_addr, call_addr+1);
1822 int ret = iter.next(); // Get item
1823 if (ret) {
1824 assert(iter.addr() == call_addr, "must find call")do { if (!(iter.addr() == call_addr)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1824, "assert(" "iter.addr() == call_addr" ") failed", "must find call"
); ::breakpoint(); } } while (0)
;
1825 if (iter.type() == relocInfo::static_call_type) {
1826 is_static_call = true;
1827 } else {
1828 assert(iter.type() == relocInfo::virtual_call_type ||do { if (!(iter.type() == relocInfo::virtual_call_type || iter
.type() == relocInfo::opt_virtual_call_type)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1830, "assert(" "iter.type() == relocInfo::virtual_call_type || iter.type() == relocInfo::opt_virtual_call_type"
") failed", "unexpected relocInfo. type"); ::breakpoint(); }
} while (0)
1829 iter.type() == relocInfo::opt_virtual_call_typedo { if (!(iter.type() == relocInfo::virtual_call_type || iter
.type() == relocInfo::opt_virtual_call_type)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1830, "assert(" "iter.type() == relocInfo::virtual_call_type || iter.type() == relocInfo::opt_virtual_call_type"
") failed", "unexpected relocInfo. type"); ::breakpoint(); }
} while (0)
1830 , "unexpected relocInfo. type")do { if (!(iter.type() == relocInfo::virtual_call_type || iter
.type() == relocInfo::opt_virtual_call_type)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1830, "assert(" "iter.type() == relocInfo::virtual_call_type || iter.type() == relocInfo::opt_virtual_call_type"
") failed", "unexpected relocInfo. type"); ::breakpoint(); }
} while (0)
;
1831 }
1832 } else {
1833 assert(!UseInlineCaches, "relocation info. must exist for this address")do { if (!(!UseInlineCaches)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1833, "assert(" "!UseInlineCaches" ") failed", "relocation info. must exist for this address"
); ::breakpoint(); } } while (0)
;
1834 }
1835
1836 // Cleaning the inline cache will force a new resolve. This is more robust
1837 // than directly setting it to the new destination, since resolving of calls
1838 // is always done through the same code path. (experience shows that it
1839 // leads to very hard to track down bugs, if an inline cache gets updated
1840 // to a wrong method). It should not be performance critical, since the
1841 // resolve is only done once.
1842
1843 for (;;) {
1844 ICRefillVerifier ic_refill_verifier;
1845 if (!clear_ic_at_addr(caller_nm, call_addr, is_static_call)) {
1846 InlineCacheBuffer::refill_ic_stubs();
1847 } else {
1848 break;
1849 }
1850 }
1851 }
1852 }
1853
1854 methodHandle callee_method = find_callee_method(CHECK_(methodHandle())__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return methodHandle(); (void)(0
);
1855
1856
1857#ifndef PRODUCT
1858 Atomic::inc(&_wrong_method_ctr);
1859
1860 if (TraceCallFixup) {
1861 ResourceMark rm(current);
1862 tty->print("handle_wrong_method reresolving call to");
1863 callee_method->print_short_name(tty);
1864 tty->print_cr(" code: " INTPTR_FORMAT"0x%016" "l" "x", p2i(callee_method->code()));
1865 }
1866#endif
1867
1868 return callee_method;
1869}
1870
1871address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1872 // The faulting unsafe accesses should be changed to throw the error
1873 // synchronously instead. Meanwhile the faulting instruction will be
1874 // skipped over (effectively turning it into a no-op) and an
1875 // asynchronous exception will be raised which the thread will
1876 // handle at a later point. If the instruction is a load it will
1877 // return garbage.
1878
1879 // Request an async exception.
1880 thread->set_pending_unsafe_access_error();
1881
1882 // Return address of next instruction to execute.
1883 return next_pc;
1884}
1885
1886#ifdef ASSERT1
1887void SharedRuntime::check_member_name_argument_is_last_argument(const methodHandle& method,
1888 const BasicType* sig_bt,
1889 const VMRegPair* regs) {
1890 ResourceMark rm;
1891 const int total_args_passed = method->size_of_parameters();
1892 const VMRegPair* regs_with_member_name = regs;
1893 VMRegPair* regs_without_member_name = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed - 1)(VMRegPair*) resource_allocate_bytes((total_args_passed - 1) *
sizeof(VMRegPair))
;
1894
1895 const int member_arg_pos = total_args_passed - 1;
1896 assert(member_arg_pos >= 0 && member_arg_pos < total_args_passed, "oob")do { if (!(member_arg_pos >= 0 && member_arg_pos <
total_args_passed)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1896, "assert(" "member_arg_pos >= 0 && member_arg_pos < total_args_passed"
") failed", "oob"); ::breakpoint(); } } while (0)
;
1897 assert(sig_bt[member_arg_pos] == T_OBJECT, "dispatch argument must be an object")do { if (!(sig_bt[member_arg_pos] == T_OBJECT)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1897, "assert(" "sig_bt[member_arg_pos] == T_OBJECT" ") failed"
, "dispatch argument must be an object"); ::breakpoint(); } }
while (0)
;
1898
1899 int comp_args_on_stack = java_calling_convention(sig_bt, regs_without_member_name, total_args_passed - 1);
1900
1901 for (int i = 0; i < member_arg_pos; i++) {
1902 VMReg a = regs_with_member_name[i].first();
1903 VMReg b = regs_without_member_name[i].first();
1904 assert(a->value() == b->value(), "register allocation mismatch: a=" INTX_FORMAT ", b=" INTX_FORMAT, a->value(), b->value())do { if (!(a->value() == b->value())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1904, "assert(" "a->value() == b->value()" ") failed"
, "register allocation mismatch: a=" "%" "l" "d" ", b=" "%" "l"
"d", a->value(), b->value()); ::breakpoint(); } } while
(0)
;
1905 }
1906 assert(regs_with_member_name[member_arg_pos].first()->is_valid(), "bad member arg")do { if (!(regs_with_member_name[member_arg_pos].first()->
is_valid())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1906, "assert(" "regs_with_member_name[member_arg_pos].first()->is_valid()"
") failed", "bad member arg"); ::breakpoint(); } } while (0)
;
1907}
1908#endif
1909
1910bool SharedRuntime::should_fixup_call_destination(address destination, address entry_point, address caller_pc, Method* moop, CodeBlob* cb) {
1911 if (destination != entry_point) {
1912 CodeBlob* callee = CodeCache::find_blob(destination);
1913 // callee == cb seems weird. It means calling interpreter thru stub.
1914 if (callee != NULL__null && (callee == cb || callee->is_adapter_blob())) {
1915 // static call or optimized virtual
1916 if (TraceCallFixup) {
1917 tty->print("fixup callsite at " INTPTR_FORMAT"0x%016" "l" "x" " to compiled code for", p2i(caller_pc));
1918 moop->print_short_name(tty);
1919 tty->print_cr(" to " INTPTR_FORMAT"0x%016" "l" "x", p2i(entry_point));
1920 }
1921 return true;
1922 } else {
1923 if (TraceCallFixup) {
1924 tty->print("failed to fixup callsite at " INTPTR_FORMAT"0x%016" "l" "x" " to compiled code for", p2i(caller_pc));
1925 moop->print_short_name(tty);
1926 tty->print_cr(" to " INTPTR_FORMAT"0x%016" "l" "x", p2i(entry_point));
1927 }
1928 // assert is too strong could also be resolve destinations.
1929 // assert(InlineCacheBuffer::contains(destination) || VtableStubs::contains(destination), "must be");
1930 }
1931 } else {
1932 if (TraceCallFixup) {
1933 tty->print("already patched callsite at " INTPTR_FORMAT"0x%016" "l" "x" " to compiled code for", p2i(caller_pc));
1934 moop->print_short_name(tty);
1935 tty->print_cr(" to " INTPTR_FORMAT"0x%016" "l" "x", p2i(entry_point));
1936 }
1937 }
1938 return false;
1939}
1940
1941// ---------------------------------------------------------------------------
1942// We are calling the interpreter via a c2i. Normally this would mean that
1943// we were called by a compiled method. However we could have lost a race
1944// where we went int -> i2c -> c2i and so the caller could in fact be
1945// interpreted. If the caller is compiled we attempt to patch the caller
1946// so he no longer calls into the interpreter.
1947JRT_LEAF(void, SharedRuntime::fixup_callers_callsite(Method* method, address caller_pc))void SharedRuntime::fixup_callers_callsite(Method* method, address
caller_pc) { NoHandleMark __hm; ; os::verify_stack_alignment
(); NoSafepointVerifier __nsv;
1948 Method* moop(method);
1949
1950 address entry_point = moop->from_compiled_entry_no_trampoline();
1951
1952 // It's possible that deoptimization can occur at a call site which hasn't
1953 // been resolved yet, in which case this function will be called from
1954 // an nmethod that has been patched for deopt and we can ignore the
1955 // request for a fixup.
1956 // Also it is possible that we lost a race in that from_compiled_entry
1957 // is now back to the i2c in that case we don't need to patch and if
1958 // we did we'd leap into space because the callsite needs to use
1959 // "to interpreter" stub in order to load up the Method*. Don't
1960 // ask me how I know this...
1961
1962 CodeBlob* cb = CodeCache::find_blob(caller_pc);
1963 if (cb == NULL__null || !cb->is_compiled() || entry_point == moop->get_c2i_entry()) {
1964 return;
1965 }
1966
1967 // The check above makes sure this is a nmethod.
1968 CompiledMethod* nm = cb->as_compiled_method_or_null();
1969 assert(nm, "must be")do { if (!(nm)) { (*g_assert_poison) = 'X';; report_vm_error(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 1969, "assert(" "nm" ") failed", "must be"); ::breakpoint()
; } } while (0)
;
1970
1971 // Get the return PC for the passed caller PC.
1972 address return_pc = caller_pc + frame::pc_return_offset;
1973
1974 // There is a benign race here. We could be attempting to patch to a compiled
1975 // entry point at the same time the callee is being deoptimized. If that is
1976 // the case then entry_point may in fact point to a c2i and we'd patch the
1977 // call site with the same old data. clear_code will set code() to NULL
1978 // at the end of it. If we happen to see that NULL then we can skip trying
1979 // to patch. If we hit the window where the callee has a c2i in the
1980 // from_compiled_entry and the NULL isn't present yet then we lose the race
1981 // and patch the code with the same old data. Asi es la vida.
1982
1983 if (moop->code() == NULL__null) return;
1984
1985 if (nm->is_in_use()) {
1986 // Expect to find a native call there (unless it was no-inline cache vtable dispatch)
1987 CompiledICLocker ic_locker(nm);
1988 if (NativeCall::is_call_before(return_pc)) {
1989 ResourceMark mark;
1990 NativeCallWrapper* call = nm->call_wrapper_before(return_pc);
1991 //
1992 // bug 6281185. We might get here after resolving a call site to a vanilla
1993 // virtual call. Because the resolvee uses the verified entry it may then
1994 // see compiled code and attempt to patch the site by calling us. This would
1995 // then incorrectly convert the call site to optimized and its downhill from
1996 // there. If you're lucky you'll get the assert in the bugid, if not you've
1997 // just made a call site that could be megamorphic into a monomorphic site
1998 // for the rest of its life! Just another racing bug in the life of
1999 // fixup_callers_callsite ...
2000 //
2001 RelocIterator iter(nm, call->instruction_address(), call->next_instruction_address());
2002 iter.next();
2003 assert(iter.has_current(), "must have a reloc at java call site")do { if (!(iter.has_current())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2003, "assert(" "iter.has_current()" ") failed", "must have a reloc at java call site"
); ::breakpoint(); } } while (0)
;
2004 relocInfo::relocType typ = iter.reloc()->type();
2005 if (typ != relocInfo::static_call_type &&
2006 typ != relocInfo::opt_virtual_call_type &&
2007 typ != relocInfo::static_stub_type) {
2008 return;
2009 }
2010 address destination = call->destination();
2011 if (should_fixup_call_destination(destination, entry_point, caller_pc, moop, cb)) {
2012 call->set_destination_mt_safe(entry_point);
2013 }
2014 }
2015 }
2016JRT_END}
2017
2018
2019// same as JVM_Arraycopy, but called directly from compiled code
2020JRT_ENTRY(void, SharedRuntime::slow_arraycopy_C(oopDesc* src, jint src_pos,void SharedRuntime::slow_arraycopy_C(oopDesc* src, jint src_pos
, oopDesc* dest, jint dest_pos, jint length, JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2023, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
2021 oopDesc* dest, jint dest_pos,void SharedRuntime::slow_arraycopy_C(oopDesc* src, jint src_pos
, oopDesc* dest, jint dest_pos, jint length, JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2023, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
2022 jint length,void SharedRuntime::slow_arraycopy_C(oopDesc* src, jint src_pos
, oopDesc* dest, jint dest_pos, jint length, JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2023, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
2023 JavaThread* current))void SharedRuntime::slow_arraycopy_C(oopDesc* src, jint src_pos
, oopDesc* dest, jint dest_pos, jint length, JavaThread* current
) { do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2023, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; ThreadInVMfromJava
__tiv(current); HandleMarkCleaner __hm(current); JavaThread*
__the_thread__ = current; os::verify_stack_alignment(); VMEntryWrapper
__vew;
{
2024#ifndef PRODUCT
2025 _slow_array_copy_ctr++;
2026#endif
2027 // Check if we have null pointers
2028 if (src == NULL__null || dest == NULL__null) {
2029 THROW(vmSymbols::java_lang_NullPointerException()){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2029, vmSymbols::java_lang_NullPointerException(), __null);
return; }
;
2030 }
2031 // Do the copy. The casts to arrayOop are necessary to the copy_array API,
2032 // even though the copy_array API also performs dynamic checks to ensure
2033 // that src and dest are truly arrays (and are conformable).
2034 // The copy_array mechanism is awkward and could be removed, but
2035 // the compilers don't call this function except as a last resort,
2036 // so it probably doesn't matter.
2037 src->klass()->copy_array((arrayOopDesc*)src, src_pos,
2038 (arrayOopDesc*)dest, dest_pos,
2039 length, current);
2040}
2041JRT_END}
2042
2043// The caller of generate_class_cast_message() (or one of its callers)
2044// must use a ResourceMark in order to correctly free the result.
2045char* SharedRuntime::generate_class_cast_message(
2046 JavaThread* thread, Klass* caster_klass) {
2047
2048 // Get target class name from the checkcast instruction
2049 vframeStream vfst(thread, true);
2050 assert(!vfst.at_end(), "Java frame must exist")do { if (!(!vfst.at_end())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2050, "assert(" "!vfst.at_end()" ") failed", "Java frame must exist"
); ::breakpoint(); } } while (0)
;
2051 Bytecode_checkcast cc(vfst.method(), vfst.method()->bcp_from(vfst.bci()));
2052 constantPoolHandle cpool(thread, vfst.method()->constants());
2053 Klass* target_klass = ConstantPool::klass_at_if_loaded(cpool, cc.index());
2054 Symbol* target_klass_name = NULL__null;
2055 if (target_klass == NULL__null) {
2056 // This klass should be resolved, but just in case, get the name in the klass slot.
2057 target_klass_name = cpool->klass_name_at(cc.index());
2058 }
2059 return generate_class_cast_message(caster_klass, target_klass, target_klass_name);
2060}
2061
2062
2063// The caller of generate_class_cast_message() (or one of its callers)
2064// must use a ResourceMark in order to correctly free the result.
2065char* SharedRuntime::generate_class_cast_message(
2066 Klass* caster_klass, Klass* target_klass, Symbol* target_klass_name) {
2067 const char* caster_name = caster_klass->external_name();
2068
2069 assert(target_klass != NULL || target_klass_name != NULL, "one must be provided")do { if (!(target_klass != __null || target_klass_name != __null
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2069, "assert(" "target_klass != __null || target_klass_name != __null"
") failed", "one must be provided"); ::breakpoint(); } } while
(0)
;
2070 const char* target_name = target_klass == NULL__null ? target_klass_name->as_klass_external_name() :
2071 target_klass->external_name();
2072
2073 size_t msglen = strlen(caster_name) + strlen("class ") + strlen(" cannot be cast to class ") + strlen(target_name) + 1;
2074
2075 const char* caster_klass_description = "";
2076 const char* target_klass_description = "";
2077 const char* klass_separator = "";
2078 if (target_klass != NULL__null && caster_klass->module() == target_klass->module()) {
2079 caster_klass_description = caster_klass->joint_in_module_of_loader(target_klass);
2080 } else {
2081 caster_klass_description = caster_klass->class_in_module_of_loader();
2082 target_klass_description = (target_klass != NULL__null) ? target_klass->class_in_module_of_loader() : "";
2083 klass_separator = (target_klass != NULL__null) ? "; " : "";
2084 }
2085
2086 // add 3 for parenthesis and preceeding space
2087 msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
2088
2089 char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen)(char*) resource_allocate_bytes((msglen) * sizeof(char), AllocFailStrategy
::RETURN_NULL)
;
2090 if (message == NULL__null) {
2091 // Shouldn't happen, but don't cause even more problems if it does
2092 message = const_cast<char*>(caster_klass->external_name());
2093 } else {
2094 jio_snprintf(message,
2095 msglen,
2096 "class %s cannot be cast to class %s (%s%s%s)",
2097 caster_name,
2098 target_name,
2099 caster_klass_description,
2100 klass_separator,
2101 target_klass_description
2102 );
2103 }
2104 return message;
2105}
2106
2107JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())void SharedRuntime::reguard_yellow_pages() { NoHandleMark __hm
; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv;
2108 (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
2109JRT_END}
2110
2111void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2112 if (!SafepointSynchronize::is_synchronizing()) {
2113 // Only try quick_enter() if we're not trying to reach a safepoint
2114 // so that the calling thread reaches the safepoint more quickly.
2115 if (ObjectSynchronizer::quick_enter(obj, current, lock)) return;
2116 }
2117 // NO_ASYNC required because an async exception on the state transition destructor
2118 // would leave you with the lock held and it would never be released.
2119 // The normal monitorenter NullPointerException is thrown without acquiring a lock
2120 // and the model is that an exception implies the method failed.
2121 JRT_BLOCK_NO_ASYNC{ do { if (!(current == JavaThread::current())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2121, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ThreadInVMfromJava
__tiv(current, false ); JavaThread* __the_thread__ = current
; VMEntryWrapper __vew;
2122 Handle h_obj(THREAD__the_thread__, obj);
2123 ObjectSynchronizer::enter(h_obj, lock, current);
2124 assert(!HAS_PENDING_EXCEPTION, "Should have no exception here")do { if (!(!(((ThreadShadow*)__the_thread__)->has_pending_exception
()))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2124, "assert(" "!(((ThreadShadow*)__the_thread__)->has_pending_exception())"
") failed", "Should have no exception here"); ::breakpoint()
; } } while (0)
;
2125 JRT_BLOCK_END}
2126}
2127
2128// Handles the uncommon case in locking, i.e., contention or an inflated lock.
2129JRT_BLOCK_ENTRY(void, SharedRuntime::complete_monitor_locking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))void SharedRuntime::complete_monitor_locking_C(oopDesc* obj, BasicLock
* lock, JavaThread* current) { do { if (!(current == JavaThread
::current())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2129, "assert(" "current == JavaThread::current()" ") failed"
, "Must be"); ::breakpoint(); } } while (0); ; HandleMarkCleaner
__hm(current);
2130 SharedRuntime::monitor_enter_helper(obj, lock, current);
2131JRT_END}
2132
2133void SharedRuntime::monitor_exit_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2134 assert(JavaThread::current() == current, "invariant")do { if (!(JavaThread::current() == current)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2134, "assert(" "JavaThread::current() == current" ") failed"
, "invariant"); ::breakpoint(); } } while (0)
;
2135 // Exit must be non-blocking, and therefore no exceptions can be thrown.
2136 ExceptionMark em(current);
2137 // The object could become unlocked through a JNI call, which we have no other checks for.
2138 // Give a fatal message if CheckJNICalls. Otherwise we ignore it.
2139 if (obj->is_unlocked()) {
2140 if (CheckJNICalls) {
2141 fatal("Object has been unlocked by JNI")do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2141, "Object has been unlocked by JNI"); ::breakpoint(); }
while (0)
;
2142 }
2143 return;
2144 }
2145 ObjectSynchronizer::exit(obj, lock, current);
2146}
2147
2148// Handles the uncommon cases of monitor unlocking in compiled code
2149JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))void SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj
, BasicLock* lock, JavaThread* current) { NoHandleMark __hm; ;
os::verify_stack_alignment(); NoSafepointVerifier __nsv;
2150 SharedRuntime::monitor_exit_helper(obj, lock, current);
2151JRT_END}
2152
2153#ifndef PRODUCT
2154
2155void SharedRuntime::print_statistics() {
2156 ttyLocker ttyl;
2157 if (xtty != NULL__null) xtty->head("statistics type='SharedRuntime'");
2158
2159 SharedRuntime::print_ic_miss_histogram();
2160
2161 // Dump the JRT_ENTRY counters
2162 if (_new_instance_ctr) tty->print_cr("%5d new instance requires GC", _new_instance_ctr);
2163 if (_new_array_ctr) tty->print_cr("%5d new array requires GC", _new_array_ctr);
2164 if (_multi2_ctr) tty->print_cr("%5d multianewarray 2 dim", _multi2_ctr);
2165 if (_multi3_ctr) tty->print_cr("%5d multianewarray 3 dim", _multi3_ctr);
2166 if (_multi4_ctr) tty->print_cr("%5d multianewarray 4 dim", _multi4_ctr);
2167 if (_multi5_ctr) tty->print_cr("%5d multianewarray 5 dim", _multi5_ctr);
2168
2169 tty->print_cr("%5d inline cache miss in compiled", _ic_miss_ctr);
2170 tty->print_cr("%5d wrong method", _wrong_method_ctr);
2171 tty->print_cr("%5d unresolved static call site", _resolve_static_ctr);
2172 tty->print_cr("%5d unresolved virtual call site", _resolve_virtual_ctr);
2173 tty->print_cr("%5d unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2174
2175 if (_mon_enter_stub_ctr) tty->print_cr("%5d monitor enter stub", _mon_enter_stub_ctr);
2176 if (_mon_exit_stub_ctr) tty->print_cr("%5d monitor exit stub", _mon_exit_stub_ctr);
2177 if (_mon_enter_ctr) tty->print_cr("%5d monitor enter slow", _mon_enter_ctr);
2178 if (_mon_exit_ctr) tty->print_cr("%5d monitor exit slow", _mon_exit_ctr);
2179 if (_partial_subtype_ctr) tty->print_cr("%5d slow partial subtype", _partial_subtype_ctr);
2180 if (_jbyte_array_copy_ctr) tty->print_cr("%5d byte array copies", _jbyte_array_copy_ctr);
2181 if (_jshort_array_copy_ctr) tty->print_cr("%5d short array copies", _jshort_array_copy_ctr);
2182 if (_jint_array_copy_ctr) tty->print_cr("%5d int array copies", _jint_array_copy_ctr);
2183 if (_jlong_array_copy_ctr) tty->print_cr("%5d long array copies", _jlong_array_copy_ctr);
2184 if (_oop_array_copy_ctr) tty->print_cr("%5d oop array copies", _oop_array_copy_ctr);
2185 if (_checkcast_array_copy_ctr) tty->print_cr("%5d checkcast array copies", _checkcast_array_copy_ctr);
2186 if (_unsafe_array_copy_ctr) tty->print_cr("%5d unsafe array copies", _unsafe_array_copy_ctr);
2187 if (_generic_array_copy_ctr) tty->print_cr("%5d generic array copies", _generic_array_copy_ctr);
2188 if (_slow_array_copy_ctr) tty->print_cr("%5d slow array copies", _slow_array_copy_ctr);
2189 if (_find_handler_ctr) tty->print_cr("%5d find exception handler", _find_handler_ctr);
2190 if (_rethrow_ctr) tty->print_cr("%5d rethrow handler", _rethrow_ctr);
2191
2192 AdapterHandlerLibrary::print_statistics();
2193
2194 if (xtty != NULL__null) xtty->tail("statistics");
2195}
2196
2197inline double percent(int x, int y) {
2198 return 100.0 * x / MAX2(y, 1);
2199}
2200
2201inline double percent(int64_t x, int64_t y) {
2202 return 100.0 * x / MAX2(y, (int64_t)1);
2203}
2204
2205class MethodArityHistogram {
2206 public:
2207 enum { MAX_ARITY = 256 };
2208 private:
2209 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2210 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2211 static uint64_t _total_compiled_calls;
2212 static uint64_t _max_compiled_calls_per_method;
2213 static int _max_arity; // max. arity seen
2214 static int _max_size; // max. arg size seen
2215
2216 static void add_method_to_histogram(nmethod* nm) {
2217 Method* method = (nm == NULL__null) ? NULL__null : nm->method();
2218 if ((method != NULL__null) && nm->is_alive()) {
2219 ArgumentCount args(method->signature());
2220 int arity = args.size() + (method->is_static() ? 0 : 1);
2221 int argsize = method->size_of_parameters();
2222 arity = MIN2(arity, MAX_ARITY-1);
2223 argsize = MIN2(argsize, MAX_ARITY-1);
2224 uint64_t count = (uint64_t)method->compiled_invocation_count();
2225 _max_compiled_calls_per_method = count > _max_compiled_calls_per_method ? count : _max_compiled_calls_per_method;
2226 _total_compiled_calls += count;
2227 _arity_histogram[arity] += count;
2228 _size_histogram[argsize] += count;
2229 _max_arity = MAX2(_max_arity, arity);
2230 _max_size = MAX2(_max_size, argsize);
2231 }
2232 }
2233
2234 void print_histogram_helper(int n, uint64_t* histo, const char* name) {
2235 const int N = MIN2(9, n);
2236 double sum = 0;
2237 double weighted_sum = 0;
2238 for (int i = 0; i <= n; i++) { sum += histo[i]; weighted_sum += i*histo[i]; }
2239 if (sum >= 1.0) { // prevent divide by zero or divide overflow
2240 double rest = sum;
2241 double percent = sum / 100;
2242 for (int i = 0; i <= N; i++) {
2243 rest -= histo[i];
2244 tty->print_cr("%4d: " UINT64_FORMAT_W(12)"%" "12" "l" "u" " (%5.1f%%)", i, histo[i], histo[i] / percent);
2245 }
2246 tty->print_cr("rest: " INT64_FORMAT_W(12)"%" "12" "l" "d" " (%5.1f%%)", (int64_t)rest, rest / percent);
2247 tty->print_cr("(avg. %s = %3.1f, max = %d)", name, weighted_sum / sum, n);
2248 tty->print_cr("(total # of compiled calls = " INT64_FORMAT_W(14)"%" "14" "l" "d" ")", _total_compiled_calls);
2249 tty->print_cr("(max # of compiled calls = " INT64_FORMAT_W(14)"%" "14" "l" "d" ")", _max_compiled_calls_per_method);
2250 } else {
2251 tty->print_cr("Histogram generation failed for %s. n = %d, sum = %7.5f", name, n, sum);
2252 }
2253 }
2254
2255 void print_histogram() {
2256 tty->print_cr("\nHistogram of call arity (incl. rcvr, calls to compiled methods only):");
2257 print_histogram_helper(_max_arity, _arity_histogram, "arity");
2258 tty->print_cr("\nHistogram of parameter block size (in words, incl. rcvr):");
2259 print_histogram_helper(_max_size, _size_histogram, "size");
2260 tty->cr();
2261 }
2262
2263 public:
2264 MethodArityHistogram() {
2265 // Take the Compile_lock to protect against changes in the CodeBlob structures
2266 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2267 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2268 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2269 _max_arity = _max_size = 0;
2270 _total_compiled_calls = 0;
2271 _max_compiled_calls_per_method = 0;
2272 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2273 CodeCache::nmethods_do(add_method_to_histogram);
2274 print_histogram();
2275 }
2276};
2277
2278uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2279uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2280uint64_t MethodArityHistogram::_total_compiled_calls;
2281uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2282int MethodArityHistogram::_max_arity;
2283int MethodArityHistogram::_max_size;
2284
2285void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2286 tty->print_cr("Calls from compiled code:");
2287 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2288 int64_t mono_c = _nof_normal_calls - _nof_optimized_calls - _nof_megamorphic_calls;
2289 int64_t mono_i = _nof_interface_calls - _nof_optimized_interface_calls - _nof_megamorphic_interface_calls;
2290 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (100%%) total non-inlined ", total);
2291 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2292 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2293 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- optimized ", _nof_optimized_calls, percent(_nof_optimized_calls, _nof_normal_calls));
2294 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2295 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2296 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2297 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2298 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- optimized ", _nof_optimized_interface_calls, percent(_nof_optimized_interface_calls, _nof_interface_calls));
2299 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2300 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_interface_calls, percent(_nof_megamorphic_interface_calls, _nof_interface_calls));
2301 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2302 tty->print_cr("\t" INT64_FORMAT_W(12)"%" "12" "l" "d" " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2303 tty->cr();
2304 tty->print_cr("Note 1: counter updates are not MT-safe.");
2305 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2306 tty->print_cr(" %% in nested categories are relative to their category");
2307 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2308 tty->cr();
2309
2310 MethodArityHistogram h;
2311}
2312#endif
2313
2314
2315// A simple wrapper class around the calling convention information
2316// that allows sharing of adapters for the same calling convention.
2317class AdapterFingerPrint : public CHeapObj<mtCode> {
2318 private:
2319 enum {
2320 _basic_type_bits = 4,
2321 _basic_type_mask = right_n_bits(_basic_type_bits)((((_basic_type_bits) >= BitsPerWord) ? 0 : (OneBit <<
(_basic_type_bits))) - 1)
,
2322 _basic_types_per_int = BitsPerInt / _basic_type_bits,
2323 _compact_int_count = 3
2324 };
2325 // TO DO: Consider integrating this with a more global scheme for compressing signatures.
2326 // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2327
2328 union {
2329 int _compact[_compact_int_count];
2330 int* _fingerprint;
2331 } _value;
2332 int _length; // A negative length indicates the fingerprint is in the compact form,
2333 // Otherwise _value._fingerprint is the array.
2334
2335 // Remap BasicTypes that are handled equivalently by the adapters.
2336 // These are correct for the current system but someday it might be
2337 // necessary to make this mapping platform dependent.
2338 static int adapter_encoding(BasicType in) {
2339 switch (in) {
2340 case T_BOOLEAN:
2341 case T_BYTE:
2342 case T_SHORT:
2343 case T_CHAR:
2344 // There are all promoted to T_INT in the calling convention
2345 return T_INT;
2346
2347 case T_OBJECT:
2348 case T_ARRAY:
2349 // In other words, we assume that any register good enough for
2350 // an int or long is good enough for a managed pointer.
2351#ifdef _LP641
2352 return T_LONG;
2353#else
2354 return T_INT;
2355#endif
2356
2357 case T_INT:
2358 case T_LONG:
2359 case T_FLOAT:
2360 case T_DOUBLE:
2361 case T_VOID:
2362 return in;
2363
2364 default:
2365 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2365); ::breakpoint(); } while (0)
;
2366 return T_CONFLICT;
2367 }
2368 }
2369
2370 public:
2371 AdapterFingerPrint(int total_args_passed, BasicType* sig_bt) {
2372 // The fingerprint is based on the BasicType signature encoded
2373 // into an array of ints with eight entries per int.
2374 int* ptr;
2375 int len = (total_args_passed + (_basic_types_per_int-1)) / _basic_types_per_int;
2376 if (len <= _compact_int_count) {
2377 assert(_compact_int_count == 3, "else change next line")do { if (!(_compact_int_count == 3)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2377, "assert(" "_compact_int_count == 3" ") failed", "else change next line"
); ::breakpoint(); } } while (0)
;
2378 _value._compact[0] = _value._compact[1] = _value._compact[2] = 0;
2379 // Storing the signature encoded as signed chars hits about 98%
2380 // of the time.
2381 _length = -len;
2382 ptr = _value._compact;
2383 } else {
2384 _length = len;
2385 _value._fingerprint = NEW_C_HEAP_ARRAY(int, _length, mtCode)(int*) (AllocateHeap((_length) * sizeof(int), mtCode));
2386 ptr = _value._fingerprint;
2387 }
2388
2389 // Now pack the BasicTypes with 8 per int
2390 int sig_index = 0;
2391 for (int index = 0; index < len; index++) {
2392 int value = 0;
2393 for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2394 int bt = adapter_encoding(sig_bt[sig_index++]);
2395 assert((bt & _basic_type_mask) == bt, "must fit in 4 bits")do { if (!((bt & _basic_type_mask) == bt)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2395, "assert(" "(bt & _basic_type_mask) == bt" ") failed"
, "must fit in 4 bits"); ::breakpoint(); } } while (0)
;
2396 value = (value << _basic_type_bits) | bt;
2397 }
2398 ptr[index] = value;
2399 }
2400 }
2401
2402 ~AdapterFingerPrint() {
2403 if (_length > 0) {
2404 FREE_C_HEAP_ARRAY(int, _value._fingerprint)FreeHeap((char*)(_value._fingerprint));
2405 }
2406 }
2407
2408 int value(int index) {
2409 if (_length < 0) {
2410 return _value._compact[index];
2411 }
2412 return _value._fingerprint[index];
2413 }
2414 int length() {
2415 if (_length < 0) return -_length;
2416 return _length;
2417 }
2418
2419 bool is_compact() {
2420 return _length <= 0;
2421 }
2422
2423 unsigned int compute_hash() {
2424 int hash = 0;
2425 for (int i = 0; i < length(); i++) {
2426 int v = value(i);
2427 hash = (hash << 8) ^ v ^ (hash >> 5);
2428 }
2429 return (unsigned int)hash;
2430 }
2431
2432 const char* as_string() {
2433 stringStream st;
2434 st.print("0x");
2435 for (int i = 0; i < length(); i++) {
2436 st.print("%x", value(i));
2437 }
2438 return st.as_string();
2439 }
2440
2441#ifndef PRODUCT
2442 // Reconstitutes the basic type arguments from the fingerprint,
2443 // producing strings like LIJDF
2444 const char* as_basic_args_string() {
2445 stringStream st;
2446 bool long_prev = false;
2447 for (int i = 0; i < length(); i++) {
2448 unsigned val = (unsigned)value(i);
2449 // args are packed so that first/lower arguments are in the highest
2450 // bits of each int value, so iterate from highest to the lowest
2451 for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2452 unsigned v = (val >> j) & _basic_type_mask;
2453 if (v == 0) {
2454 assert(i == length() - 1, "Only expect zeroes in the last word")do { if (!(i == length() - 1)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2454, "assert(" "i == length() - 1" ") failed", "Only expect zeroes in the last word"
); ::breakpoint(); } } while (0)
;
2455 continue;
2456 }
2457 if (long_prev) {
2458 long_prev = false;
2459 if (v == T_VOID) {
2460 st.print("J");
2461 } else {
2462 st.print("L");
2463 }
2464 }
2465 switch (v) {
2466 case T_INT: st.print("I"); break;
2467 case T_LONG: long_prev = true; break;
2468 case T_FLOAT: st.print("F"); break;
2469 case T_DOUBLE: st.print("D"); break;
2470 case T_VOID: break;
2471 default: ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2471); ::breakpoint(); } while (0)
;
2472 }
2473 }
2474 }
2475 if (long_prev) {
2476 st.print("L");
2477 }
2478 return st.as_string();
2479 }
2480#endif // !product
2481
2482 bool equals(AdapterFingerPrint* other) {
2483 if (other->_length != _length) {
2484 return false;
2485 }
2486 if (_length < 0) {
2487 assert(_compact_int_count == 3, "else change next line")do { if (!(_compact_int_count == 3)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2487, "assert(" "_compact_int_count == 3" ") failed", "else change next line"
); ::breakpoint(); } } while (0)
;
2488 return _value._compact[0] == other->_value._compact[0] &&
2489 _value._compact[1] == other->_value._compact[1] &&
2490 _value._compact[2] == other->_value._compact[2];
2491 } else {
2492 for (int i = 0; i < _length; i++) {
2493 if (_value._fingerprint[i] != other->_value._fingerprint[i]) {
2494 return false;
2495 }
2496 }
2497 }
2498 return true;
2499 }
2500};
2501
2502
2503// A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2504class AdapterHandlerTable : public BasicHashtable<mtCode> {
2505 friend class AdapterHandlerTableIterator;
2506
2507 private:
2508
2509#ifndef PRODUCT
2510 static int _lookups; // number of calls to lookup
2511 static int _buckets; // number of buckets checked
2512 static int _equals; // number of buckets checked with matching hash
2513 static int _hits; // number of successful lookups
2514 static int _compact; // number of equals calls with compact signature
2515#endif
2516
2517 AdapterHandlerEntry* bucket(int i) {
2518 return (AdapterHandlerEntry*)BasicHashtable<mtCode>::bucket(i);
2519 }
2520
2521 public:
2522 AdapterHandlerTable()
2523 : BasicHashtable<mtCode>(293, (sizeof(AdapterHandlerEntry))) { }
2524
2525 // Create a new entry suitable for insertion in the table
2526 AdapterHandlerEntry* new_entry(AdapterFingerPrint* fingerprint, address i2c_entry, address c2i_entry, address c2i_unverified_entry, address c2i_no_clinit_check_entry) {
2527 AdapterHandlerEntry* entry = (AdapterHandlerEntry*)BasicHashtable<mtCode>::new_entry(fingerprint->compute_hash());
2528 entry->init(fingerprint, i2c_entry, c2i_entry, c2i_unverified_entry, c2i_no_clinit_check_entry);
2529 return entry;
2530 }
2531
2532 // Insert an entry into the table
2533 void add(AdapterHandlerEntry* entry) {
2534 int index = hash_to_index(entry->hash());
2535 add_entry(index, entry);
2536 }
2537
2538 void free_entry(AdapterHandlerEntry* entry) {
2539 entry->deallocate();
2540 BasicHashtable<mtCode>::free_entry(entry);
2541 }
2542
2543 // Find a entry with the same fingerprint if it exists
2544 AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2545 NOT_PRODUCT(_lookups++)_lookups++;
2546 AdapterFingerPrint fp(total_args_passed, sig_bt);
2547 unsigned int hash = fp.compute_hash();
2548 int index = hash_to_index(hash);
2549 for (AdapterHandlerEntry* e = bucket(index); e != NULL__null; e = e->next()) {
2550 NOT_PRODUCT(_buckets++)_buckets++;
2551 if (e->hash() == hash) {
2552 NOT_PRODUCT(_equals++)_equals++;
2553 if (fp.equals(e->fingerprint())) {
2554#ifndef PRODUCT
2555 if (fp.is_compact()) _compact++;
2556 _hits++;
2557#endif
2558 return e;
2559 }
2560 }
2561 }
2562 return NULL__null;
2563 }
2564
2565#ifndef PRODUCT
2566 void print_statistics() {
2567 ResourceMark rm;
2568 int longest = 0;
2569 int empty = 0;
2570 int total = 0;
2571 int nonempty = 0;
2572 for (int index = 0; index < table_size(); index++) {
2573 int count = 0;
2574 for (AdapterHandlerEntry* e = bucket(index); e != NULL__null; e = e->next()) {
2575 count++;
2576 }
2577 if (count != 0) nonempty++;
2578 if (count == 0) empty++;
2579 if (count > longest) longest = count;
2580 total += count;
2581 }
2582 tty->print_cr("AdapterHandlerTable: empty %d longest %d total %d average %f",
2583 empty, longest, total, total / (double)nonempty);
2584 tty->print_cr("AdapterHandlerTable: lookups %d buckets %d equals %d hits %d compact %d",
2585 _lookups, _buckets, _equals, _hits, _compact);
2586 }
2587#endif
2588};
2589
2590
2591#ifndef PRODUCT
2592
2593int AdapterHandlerTable::_lookups;
2594int AdapterHandlerTable::_buckets;
2595int AdapterHandlerTable::_equals;
2596int AdapterHandlerTable::_hits;
2597int AdapterHandlerTable::_compact;
2598
2599#endif
2600
2601class AdapterHandlerTableIterator : public StackObj {
2602 private:
2603 AdapterHandlerTable* _table;
2604 int _index;
2605 AdapterHandlerEntry* _current;
2606
2607 void scan() {
2608 while (_index < _table->table_size()) {
2609 AdapterHandlerEntry* a = _table->bucket(_index);
2610 _index++;
2611 if (a != NULL__null) {
2612 _current = a;
2613 return;
2614 }
2615 }
2616 }
2617
2618 public:
2619 AdapterHandlerTableIterator(AdapterHandlerTable* table): _table(table), _index(0), _current(NULL__null) {
2620 scan();
2621 }
2622 bool has_next() {
2623 return _current != NULL__null;
2624 }
2625 AdapterHandlerEntry* next() {
2626 if (_current != NULL__null) {
2627 AdapterHandlerEntry* result = _current;
2628 _current = _current->next();
2629 if (_current == NULL__null) scan();
2630 return result;
2631 } else {
2632 return NULL__null;
2633 }
2634 }
2635};
2636
2637
2638// ---------------------------------------------------------------------------
2639// Implementation of AdapterHandlerLibrary
2640AdapterHandlerTable* AdapterHandlerLibrary::_adapters = NULL__null;
2641AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = NULL__null;
2642AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = NULL__null;
2643AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = NULL__null;
2644AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = NULL__null;
2645AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = NULL__null;
2646AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = NULL__null;
2647const int AdapterHandlerLibrary_size = 16*K;
2648BufferBlob* AdapterHandlerLibrary::_buffer = NULL__null;
2649
2650BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2651 return _buffer;
2652}
2653
2654extern "C" void unexpected_adapter_call() {
2655 ShouldNotCallThis()do { (*g_assert_poison) = 'X';; report_should_not_call("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2655); ::breakpoint(); } while (0)
;
2656}
2657
2658static void post_adapter_creation(const AdapterBlob* new_adapter, const AdapterHandlerEntry* entry) {
2659 char blob_id[256];
2660 jio_snprintf(blob_id,
2661 sizeof(blob_id),
2662 "%s(%s)",
2663 new_adapter->name(),
2664 entry->fingerprint()->as_string());
2665 Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2666
2667 if (JvmtiExport::should_post_dynamic_code_generated()) {
2668 JvmtiExport::post_dynamic_code_generated(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2669 }
2670}
2671
2672void AdapterHandlerLibrary::initialize() {
2673 ResourceMark rm;
2674 AdapterBlob* no_arg_blob = NULL__null;
2675 AdapterBlob* int_arg_blob = NULL__null;
2676 AdapterBlob* obj_arg_blob = NULL__null;
2677 AdapterBlob* obj_int_arg_blob = NULL__null;
2678 AdapterBlob* obj_obj_arg_blob = NULL__null;
2679 {
2680 MutexLocker mu(AdapterHandlerLibrary_lock);
2681 assert(_adapters == NULL, "Initializing more than once")do { if (!(_adapters == __null)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2681, "assert(" "_adapters == __null" ") failed", "Initializing more than once"
); ::breakpoint(); } } while (0)
;
2682
2683 _adapters = new AdapterHandlerTable();
2684
2685 // Create a special handler for abstract methods. Abstract methods
2686 // are never compiled so an i2c entry is somewhat meaningless, but
2687 // throw AbstractMethodError just in case.
2688 // Pass wrong_method_abstract for the c2i transitions to return
2689 // AbstractMethodError for invalid invocations.
2690 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2691 _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, NULL__null),
2692 StubRoutines::throw_AbstractMethodError_entry(),
2693 wrong_method_abstract, wrong_method_abstract);
2694
2695 _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2696
2697 _no_arg_handler = create_adapter(no_arg_blob, 0, NULL__null, true);
2698
2699 BasicType obj_args[] = { T_OBJECT };
2700 _obj_arg_handler = create_adapter(obj_arg_blob, 1, obj_args, true);
2701
2702 BasicType int_args[] = { T_INT };
2703 _int_arg_handler = create_adapter(int_arg_blob, 1, int_args, true);
2704
2705 BasicType obj_int_args[] = { T_OBJECT, T_INT };
2706 _obj_int_arg_handler = create_adapter(obj_int_arg_blob, 2, obj_int_args, true);
2707
2708 BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2709 _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, 2, obj_obj_args, true);
2710
2711 assert(no_arg_blob != NULL &&do { if (!(no_arg_blob != __null && obj_arg_blob != __null
&& int_arg_blob != __null && obj_int_arg_blob
!= __null && obj_obj_arg_blob != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2715, "assert(" "no_arg_blob != __null && obj_arg_blob != __null && int_arg_blob != __null && obj_int_arg_blob != __null && obj_obj_arg_blob != __null"
") failed", "Initial adapters must be properly created"); ::
breakpoint(); } } while (0)
2712 obj_arg_blob != NULL &&do { if (!(no_arg_blob != __null && obj_arg_blob != __null
&& int_arg_blob != __null && obj_int_arg_blob
!= __null && obj_obj_arg_blob != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2715, "assert(" "no_arg_blob != __null && obj_arg_blob != __null && int_arg_blob != __null && obj_int_arg_blob != __null && obj_obj_arg_blob != __null"
") failed", "Initial adapters must be properly created"); ::
breakpoint(); } } while (0)
2713 int_arg_blob != NULL &&do { if (!(no_arg_blob != __null && obj_arg_blob != __null
&& int_arg_blob != __null && obj_int_arg_blob
!= __null && obj_obj_arg_blob != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2715, "assert(" "no_arg_blob != __null && obj_arg_blob != __null && int_arg_blob != __null && obj_int_arg_blob != __null && obj_obj_arg_blob != __null"
") failed", "Initial adapters must be properly created"); ::
breakpoint(); } } while (0)
2714 obj_int_arg_blob != NULL &&do { if (!(no_arg_blob != __null && obj_arg_blob != __null
&& int_arg_blob != __null && obj_int_arg_blob
!= __null && obj_obj_arg_blob != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2715, "assert(" "no_arg_blob != __null && obj_arg_blob != __null && int_arg_blob != __null && obj_int_arg_blob != __null && obj_obj_arg_blob != __null"
") failed", "Initial adapters must be properly created"); ::
breakpoint(); } } while (0)
2715 obj_obj_arg_blob != NULL, "Initial adapters must be properly created")do { if (!(no_arg_blob != __null && obj_arg_blob != __null
&& int_arg_blob != __null && obj_int_arg_blob
!= __null && obj_obj_arg_blob != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2715, "assert(" "no_arg_blob != __null && obj_arg_blob != __null && int_arg_blob != __null && obj_int_arg_blob != __null && obj_obj_arg_blob != __null"
") failed", "Initial adapters must be properly created"); ::
breakpoint(); } } while (0)
;
2716 }
2717
2718 // Outside of the lock
2719 post_adapter_creation(no_arg_blob, _no_arg_handler);
2720 post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2721 post_adapter_creation(int_arg_blob, _int_arg_handler);
2722 post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2723 post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2724}
2725
2726AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
2727 address i2c_entry,
2728 address c2i_entry,
2729 address c2i_unverified_entry,
2730 address c2i_no_clinit_check_entry) {
2731 return _adapters->new_entry(fingerprint, i2c_entry, c2i_entry, c2i_unverified_entry, c2i_no_clinit_check_entry);
2732}
2733
2734AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2735 if (method->is_abstract()) {
2736 return _abstract_method_handler;
2737 }
2738 int total_args_passed = method->size_of_parameters(); // All args on stack
2739 if (total_args_passed == 0) {
2740 return _no_arg_handler;
2741 } else if (total_args_passed == 1) {
2742 if (!method->is_static()) {
2743 return _obj_arg_handler;
2744 }
2745 switch (method->signature()->char_at(1)) {
2746 case JVM_SIGNATURE_CLASS:
2747 case JVM_SIGNATURE_ARRAY:
2748 return _obj_arg_handler;
2749 case JVM_SIGNATURE_INT:
2750 case JVM_SIGNATURE_BOOLEAN:
2751 case JVM_SIGNATURE_CHAR:
2752 case JVM_SIGNATURE_BYTE:
2753 case JVM_SIGNATURE_SHORT:
2754 return _int_arg_handler;
2755 }
2756 } else if (total_args_passed == 2 &&
2757 !method->is_static()) {
2758 switch (method->signature()->char_at(1)) {
2759 case JVM_SIGNATURE_CLASS:
2760 case JVM_SIGNATURE_ARRAY:
2761 return _obj_obj_arg_handler;
2762 case JVM_SIGNATURE_INT:
2763 case JVM_SIGNATURE_BOOLEAN:
2764 case JVM_SIGNATURE_CHAR:
2765 case JVM_SIGNATURE_BYTE:
2766 case JVM_SIGNATURE_SHORT:
2767 return _obj_int_arg_handler;
2768 }
2769 }
2770 return NULL__null;
2771}
2772
2773class AdapterSignatureIterator : public SignatureIterator {
2774 private:
2775 BasicType stack_sig_bt[16];
2776 BasicType* sig_bt;
2777 int index;
2778
2779 public:
2780 AdapterSignatureIterator(Symbol* signature,
2781 fingerprint_t fingerprint,
2782 bool is_static,
2783 int total_args_passed) :
2784 SignatureIterator(signature, fingerprint),
2785 index(0)
2786 {
2787 sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed)(BasicType*) resource_allocate_bytes((total_args_passed) * sizeof
(BasicType))
;
2788 if (!is_static) { // Pass in receiver first
2789 sig_bt[index++] = T_OBJECT;
2790 }
2791 do_parameters_on(this);
2792 }
2793
2794 BasicType* basic_types() {
2795 return sig_bt;
2796 }
2797
2798#ifdef ASSERT1
2799 int slots() {
2800 return index;
2801 }
2802#endif
2803
2804 private:
2805
2806 friend class SignatureIterator; // so do_parameters_on can call do_type
2807 void do_type(BasicType type) {
2808 sig_bt[index++] = type;
2809 if (type == T_LONG || type == T_DOUBLE) {
2810 sig_bt[index++] = T_VOID; // Longs & doubles take 2 Java slots
2811 }
2812 }
2813};
2814
2815AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2816 // Use customized signature handler. Need to lock around updates to
2817 // the AdapterHandlerTable (it is not safe for concurrent readers
2818 // and a single writer: this could be fixed if it becomes a
2819 // problem).
2820 assert(_adapters != NULL, "Uninitialized")do { if (!(_adapters != __null)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2820, "assert(" "_adapters != __null" ") failed", "Uninitialized"
); ::breakpoint(); } } while (0)
;
2821
2822 // Fast-path for trivial adapters
2823 AdapterHandlerEntry* entry = get_simple_adapter(method);
2824 if (entry != NULL__null) {
2825 return entry;
2826 }
2827
2828 ResourceMark rm;
2829 AdapterBlob* new_adapter = NULL__null;
2830
2831 // Fill in the signature array, for the calling-convention call.
2832 int total_args_passed = method->size_of_parameters(); // All args on stack
2833
2834 AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2835 method->is_static(), total_args_passed);
2836 assert(si.slots() == total_args_passed, "")do { if (!(si.slots() == total_args_passed)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2836, "assert(" "si.slots() == total_args_passed" ") failed"
, ""); ::breakpoint(); } } while (0)
;
2837 BasicType* sig_bt = si.basic_types();
2838 {
2839 MutexLocker mu(AdapterHandlerLibrary_lock);
2840
2841 // Lookup method signature's fingerprint
2842 entry = _adapters->lookup(total_args_passed, sig_bt);
2843
2844 if (entry != NULL__null) {
2845#ifdef ASSERT1
2846 if (VerifyAdapterSharing) {
2847 AdapterBlob* comparison_blob = NULL__null;
2848 AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, total_args_passed, sig_bt, false);
2849 assert(comparison_blob == NULL, "no blob should be created when creating an adapter for comparison")do { if (!(comparison_blob == __null)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2849, "assert(" "comparison_blob == __null" ") failed", "no blob should be created when creating an adapter for comparison"
); ::breakpoint(); } } while (0)
;
2850 assert(comparison_entry->compare_code(entry), "code must match")do { if (!(comparison_entry->compare_code(entry))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2850, "assert(" "comparison_entry->compare_code(entry)" ") failed"
, "code must match"); ::breakpoint(); } } while (0)
;
2851 // Release the one just created and return the original
2852 _adapters->free_entry(comparison_entry);
2853 }
2854#endif
2855 return entry;
2856 }
2857
2858 entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2859 }
2860
2861 // Outside of the lock
2862 if (new_adapter != NULL__null) {
2863 post_adapter_creation(new_adapter, entry);
2864 }
2865 return entry;
2866}
2867
2868AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2869 int total_args_passed,
2870 BasicType* sig_bt,
2871 bool allocate_code_blob) {
2872
2873 // StubRoutines::code2() is initialized after this function can be called. As a result,
2874 // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated
2875 // prior to StubRoutines::code2() being set. Checks refer to checks generated in an I2C
2876 // stub that ensure that an I2C stub is called from an interpreter frame.
2877 bool contains_all_checks = StubRoutines::code2() != NULL__null;
2878
2879 VMRegPair stack_regs[16];
2880 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed)(VMRegPair*) resource_allocate_bytes((total_args_passed) * sizeof
(VMRegPair))
;
2881
2882 // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2883 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2884 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2885 CodeBuffer buffer(buf);
2886 short buffer_locs[20];
2887 buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2888 sizeof(buffer_locs)/sizeof(relocInfo));
2889
2890 // Make a C heap allocated version of the fingerprint to store in the adapter
2891 AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);
2892 MacroAssembler _masm(&buffer);
2893 AdapterHandlerEntry* entry = SharedRuntime::generate_i2c2i_adapters(&_masm,
2894 total_args_passed,
2895 comp_args_on_stack,
2896 sig_bt,
2897 regs,
2898 fingerprint);
2899
2900#ifdef ASSERT1
2901 if (VerifyAdapterSharing) {
2902 entry->save_code(buf->code_begin(), buffer.insts_size());
2903 if (!allocate_code_blob) {
2904 return entry;
2905 }
2906 }
2907#endif
2908
2909 new_adapter = AdapterBlob::create(&buffer);
2910 NOT_PRODUCT(int insts_size = buffer.insts_size())int insts_size = buffer.insts_size();
2911 if (new_adapter == NULL__null) {
2912 // CodeCache is full, disable compilation
2913 // Ought to log this but compile log is only per compile thread
2914 // and we're some non descript Java thread.
2915 return NULL__null;
2916 }
2917 entry->relocate(new_adapter->content_begin());
2918#ifndef PRODUCT
2919 // debugging suppport
2920 if (PrintAdapterHandlers || PrintStubCode) {
2921 ttyLocker ttyl;
2922 entry->print_adapter_on(tty);
2923 tty->print_cr("i2c argument handler #%d for: %s %s (%d bytes generated)",
2924 _adapters->number_of_entries(), fingerprint->as_basic_args_string(),
2925 fingerprint->as_string(), insts_size);
2926 tty->print_cr("c2i argument handler starts at %p", entry->get_c2i_entry());
2927 if (Verbose || PrintStubCode) {
2928 address first_pc = entry->base_address();
2929 if (first_pc != NULL__null) {
2930 Disassembler::decode(first_pc, first_pc + insts_size, tty
2931 NOT_PRODUCT(COMMA &new_adapter->asm_remarks()), &new_adapter->asm_remarks());
2932 tty->cr();
2933 }
2934 }
2935 }
2936#endif
2937
2938 // Add the entry only if the entry contains all required checks (see sharedRuntime_xxx.cpp)
2939 // The checks are inserted only if -XX:+VerifyAdapterCalls is specified.
2940 if (contains_all_checks || !VerifyAdapterCalls) {
2941 _adapters->add(entry);
2942 }
2943 return entry;
2944}
2945
2946address AdapterHandlerEntry::base_address() {
2947 address base = _i2c_entry;
2948 if (base == NULL__null) base = _c2i_entry;
2949 assert(base <= _c2i_entry || _c2i_entry == NULL, "")do { if (!(base <= _c2i_entry || _c2i_entry == __null)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2949, "assert(" "base <= _c2i_entry || _c2i_entry == __null"
") failed", ""); ::breakpoint(); } } while (0)
;
2950 assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == NULL, "")do { if (!(base <= _c2i_unverified_entry || _c2i_unverified_entry
== __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2950, "assert(" "base <= _c2i_unverified_entry || _c2i_unverified_entry == __null"
") failed", ""); ::breakpoint(); } } while (0)
;
2951 assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == NULL, "")do { if (!(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry
== __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2951, "assert(" "base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == __null"
") failed", ""); ::breakpoint(); } } while (0)
;
2952 return base;
2953}
2954
2955void AdapterHandlerEntry::relocate(address new_base) {
2956 address old_base = base_address();
2957 assert(old_base != NULL, "")do { if (!(old_base != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2957, "assert(" "old_base != __null" ") failed", ""); ::breakpoint
(); } } while (0)
;
2958 ptrdiff_t delta = new_base - old_base;
2959 if (_i2c_entry != NULL__null)
2960 _i2c_entry += delta;
2961 if (_c2i_entry != NULL__null)
2962 _c2i_entry += delta;
2963 if (_c2i_unverified_entry != NULL__null)
2964 _c2i_unverified_entry += delta;
2965 if (_c2i_no_clinit_check_entry != NULL__null)
2966 _c2i_no_clinit_check_entry += delta;
2967 assert(base_address() == new_base, "")do { if (!(base_address() == new_base)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2967, "assert(" "base_address() == new_base" ") failed", ""
); ::breakpoint(); } } while (0)
;
2968}
2969
2970
2971void AdapterHandlerEntry::deallocate() {
2972 delete _fingerprint;
2973#ifdef ASSERT1
2974 FREE_C_HEAP_ARRAY(unsigned char, _saved_code)FreeHeap((char*)(_saved_code));
2975#endif
2976}
2977
2978
2979#ifdef ASSERT1
2980// Capture the code before relocation so that it can be compared
2981// against other versions. If the code is captured after relocation
2982// then relative instructions won't be equivalent.
2983void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
2984 _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode)(unsigned char*) (AllocateHeap((length) * sizeof(unsigned char
), mtCode))
;
2985 _saved_code_length = length;
2986 memcpy(_saved_code, buffer, length);
2987}
2988
2989
2990bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
2991 assert(_saved_code != NULL && other->_saved_code != NULL, "code not saved")do { if (!(_saved_code != __null && other->_saved_code
!= __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 2991, "assert(" "_saved_code != __null && other->_saved_code != __null"
") failed", "code not saved"); ::breakpoint(); } } while (0)
;
2992
2993 if (other->_saved_code_length != _saved_code_length) {
2994 return false;
2995 }
2996
2997 return memcmp(other->_saved_code, _saved_code, _saved_code_length) == 0;
2998}
2999#endif
3000
3001
3002/**
3003 * Create a native wrapper for this native method. The wrapper converts the
3004 * Java-compiled calling convention to the native convention, handles
3005 * arguments, and transitions to native. On return from the native we transition
3006 * back to java blocking if a safepoint is in progress.
3007 */
3008void AdapterHandlerLibrary::create_native_wrapper(const methodHandle& method) {
3009 ResourceMark rm;
3010 nmethod* nm = NULL__null;
3011
3012 assert(method->is_native(), "must be native")do { if (!(method->is_native())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3012, "assert(" "method->is_native()" ") failed", "must be native"
); ::breakpoint(); } } while (0)
;
3013 assert(method->is_method_handle_intrinsic() ||do { if (!(method->is_method_handle_intrinsic() || method->
has_native_function())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3014, "assert(" "method->is_method_handle_intrinsic() || method->has_native_function()"
") failed", "must have something valid to call!"); ::breakpoint
(); } } while (0)
3014 method->has_native_function(), "must have something valid to call!")do { if (!(method->is_method_handle_intrinsic() || method->
has_native_function())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3014, "assert(" "method->is_method_handle_intrinsic() || method->has_native_function()"
") failed", "must have something valid to call!"); ::breakpoint
(); } } while (0)
;
3015
3016 {
3017 // Perform the work while holding the lock, but perform any printing outside the lock
3018 MutexLocker mu(AdapterHandlerLibrary_lock);
3019 // See if somebody beat us to it
3020 if (method->code() != NULL__null) {
3021 return;
3022 }
3023
3024 const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
3025 assert(compile_id > 0, "Must generate native wrapper")do { if (!(compile_id > 0)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3025, "assert(" "compile_id > 0" ") failed", "Must generate native wrapper"
); ::breakpoint(); } } while (0)
;
3026
3027
3028 ResourceMark rm;
3029 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
3030 if (buf != NULL__null) {
3031 CodeBuffer buffer(buf);
3032 struct { double data[20]; } locs_buf;
3033 buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3034#if defined(AARCH64)
3035 // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3036 // in the constant pool to ensure ordering between the barrier and oops
3037 // accesses. For native_wrappers we need a constant.
3038 buffer.initialize_consts_size(8);
3039#endif
3040 MacroAssembler _masm(&buffer);
3041
3042 // Fill in the signature array, for the calling-convention call.
3043 const int total_args_passed = method->size_of_parameters();
3044
3045 VMRegPair stack_regs[16];
3046 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed)(VMRegPair*) resource_allocate_bytes((total_args_passed) * sizeof
(VMRegPair))
;
3047
3048 AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
3049 method->is_static(), total_args_passed);
3050 BasicType* sig_bt = si.basic_types();
3051 assert(si.slots() == total_args_passed, "")do { if (!(si.slots() == total_args_passed)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3051, "assert(" "si.slots() == total_args_passed" ") failed"
, ""); ::breakpoint(); } } while (0)
;
3052 BasicType ret_type = si.return_type();
3053
3054 // Now get the compiled-Java arguments layout.
3055 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3056
3057 // Generate the compiled-to-native wrapper code
3058 nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3059
3060 if (nm != NULL__null) {
3061 {
3062 MutexLocker pl(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
3063 if (nm->make_in_use()) {
3064 method->set_code(method, nm);
3065 }
3066 }
3067
3068 DirectiveSet* directive = DirectivesStack::getDefaultDirective(CompileBroker::compiler(CompLevel_simple));
3069 if (directive->PrintAssemblyOption) {
3070 nm->print_code();
3071 }
3072 DirectivesStack::release(directive);
3073 }
3074 }
3075 } // Unlock AdapterHandlerLibrary_lock
3076
3077
3078 // Install the generated code.
3079 if (nm != NULL__null) {
3080 const char *msg = method->is_static() ? "(static)" : "";
3081 CompileTask::print_ul(nm, msg);
3082 if (PrintCompilation) {
3083 ttyLocker ttyl;
3084 CompileTask::print(tty, nm, msg);
3085 }
3086 nm->post_compiled_method_load_event();
3087 }
3088}
3089
3090// -------------------------------------------------------------------------
3091// Java-Java calling convention
3092// (what you use when Java calls Java)
3093
3094//------------------------------name_for_receiver----------------------------------
3095// For a given signature, return the VMReg for parameter 0.
3096VMReg SharedRuntime::name_for_receiver() {
3097 VMRegPair regs;
3098 BasicType sig_bt = T_OBJECT;
3099 (void) java_calling_convention(&sig_bt, &regs, 1);
3100 // Return argument 0 register. In the LP64 build pointers
3101 // take 2 registers, but the VM wants only the 'main' name.
3102 return regs.first();
3103}
3104
3105VMRegPair *SharedRuntime::find_callee_arguments(Symbol* sig, bool has_receiver, bool has_appendix, int* arg_size) {
3106 // This method is returning a data structure allocating as a
3107 // ResourceObject, so do not put any ResourceMarks in here.
3108
3109 BasicType *sig_bt = NEW_RESOURCE_ARRAY(BasicType, 256)(BasicType*) resource_allocate_bytes((256) * sizeof(BasicType
))
;
3110 VMRegPair *regs = NEW_RESOURCE_ARRAY(VMRegPair, 256)(VMRegPair*) resource_allocate_bytes((256) * sizeof(VMRegPair
))
;
3111 int cnt = 0;
3112 if (has_receiver) {
3113 sig_bt[cnt++] = T_OBJECT; // Receiver is argument 0; not in signature
3114 }
3115
3116 for (SignatureStream ss(sig); !ss.at_return_type(); ss.next()) {
3117 BasicType type = ss.type();
3118 sig_bt[cnt++] = type;
3119 if (is_double_word_type(type))
3120 sig_bt[cnt++] = T_VOID;
3121 }
3122
3123 if (has_appendix) {
3124 sig_bt[cnt++] = T_OBJECT;
3125 }
3126
3127 assert(cnt < 256, "grow table size")do { if (!(cnt < 256)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3127, "assert(" "cnt < 256" ") failed", "grow table size"
); ::breakpoint(); } } while (0)
;
3128
3129 int comp_args_on_stack;
3130 comp_args_on_stack = java_calling_convention(sig_bt, regs, cnt);
3131
3132 // the calling convention doesn't count out_preserve_stack_slots so
3133 // we must add that in to get "true" stack offsets.
3134
3135 if (comp_args_on_stack) {
3136 for (int i = 0; i < cnt; i++) {
3137 VMReg reg1 = regs[i].first();
3138 if (reg1->is_stack()) {
3139 // Yuck
3140 reg1 = reg1->bias(out_preserve_stack_slots());
3141 }
3142 VMReg reg2 = regs[i].second();
3143 if (reg2->is_stack()) {
3144 // Yuck
3145 reg2 = reg2->bias(out_preserve_stack_slots());
3146 }
3147 regs[i].set_pair(reg2, reg1);
3148 }
3149 }
3150
3151 // results
3152 *arg_size = cnt;
3153 return regs;
3154}
3155
3156// OSR Migration Code
3157//
3158// This code is used convert interpreter frames into compiled frames. It is
3159// called from very start of a compiled OSR nmethod. A temp array is
3160// allocated to hold the interesting bits of the interpreter frame. All
3161// active locks are inflated to allow them to move. The displaced headers and
3162// active interpreter locals are copied into the temp buffer. Then we return
3163// back to the compiled code. The compiled code then pops the current
3164// interpreter frame off the stack and pushes a new compiled frame. Then it
3165// copies the interpreter locals and displaced headers where it wants.
3166// Finally it calls back to free the temp buffer.
3167//
3168// All of this is done NOT at any Safepoint, nor is any safepoint or GC allowed.
3169
3170JRT_LEAF(intptr_t*, SharedRuntime::OSR_migration_begin( JavaThread *current) )intptr_t* SharedRuntime::OSR_migration_begin( JavaThread *current
) { NoHandleMark __hm; ; os::verify_stack_alignment(); NoSafepointVerifier
__nsv;
3171 // During OSR migration, we unwind the interpreted frame and replace it with a compiled
3172 // frame. The stack watermark code below ensures that the interpreted frame is processed
3173 // before it gets unwound. This is helpful as the size of the compiled frame could be
3174 // larger than the interpreted frame, which could result in the new frame not being
3175 // processed correctly.
3176 StackWatermarkSet::before_unwind(current);
3177
3178 //
3179 // This code is dependent on the memory layout of the interpreter local
3180 // array and the monitors. On all of our platforms the layout is identical
3181 // so this code is shared. If some platform lays the their arrays out
3182 // differently then this code could move to platform specific code or
3183 // the code here could be modified to copy items one at a time using
3184 // frame accessor methods and be platform independent.
3185
3186 frame fr = current->last_frame();
3187 assert(fr.is_interpreted_frame(), "")do { if (!(fr.is_interpreted_frame())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3187, "assert(" "fr.is_interpreted_frame()" ") failed", "")
; ::breakpoint(); } } while (0)
;
3188 assert(fr.interpreter_frame_expression_stack_size()==0, "only handle empty stacks")do { if (!(fr.interpreter_frame_expression_stack_size()==0)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3188, "assert(" "fr.interpreter_frame_expression_stack_size()==0"
") failed", "only handle empty stacks"); ::breakpoint(); } }
while (0)
;
3189
3190 // Figure out how many monitors are active.
3191 int active_monitor_count = 0;
3192 for (BasicObjectLock *kptr = fr.interpreter_frame_monitor_end();
3193 kptr < fr.interpreter_frame_monitor_begin();
3194 kptr = fr.next_monitor_in_interpreter_frame(kptr) ) {
3195 if (kptr->obj() != NULL__null) active_monitor_count++;
3196 }
3197
3198 // QQQ we could place number of active monitors in the array so that compiled code
3199 // could double check it.
3200
3201 Method* moop = fr.interpreter_frame_method();
3202 int max_locals = moop->max_locals();
3203 // Allocate temp buffer, 1 word per local & 2 per active monitor
3204 int buf_size_words = max_locals + active_monitor_count * BasicObjectLock::size();
3205 intptr_t *buf = NEW_C_HEAP_ARRAY(intptr_t,buf_size_words, mtCode)(intptr_t*) (AllocateHeap((buf_size_words) * sizeof(intptr_t)
, mtCode))
;
3206
3207 // Copy the locals. Order is preserved so that loading of longs works.
3208 // Since there's no GC I can copy the oops blindly.
3209 assert(sizeof(HeapWord)==sizeof(intptr_t), "fix this code")do { if (!(sizeof(HeapWord)==sizeof(intptr_t))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3209, "assert(" "sizeof(HeapWord)==sizeof(intptr_t)" ") failed"
, "fix this code"); ::breakpoint(); } } while (0)
;
3210 Copy::disjoint_words((HeapWord*)fr.interpreter_frame_local_at(max_locals-1),
3211 (HeapWord*)&buf[0],
3212 max_locals);
3213
3214 // Inflate locks. Copy the displaced headers. Be careful, there can be holes.
3215 int i = max_locals;
3216 for (BasicObjectLock *kptr2 = fr.interpreter_frame_monitor_end();
3217 kptr2 < fr.interpreter_frame_monitor_begin();
3218 kptr2 = fr.next_monitor_in_interpreter_frame(kptr2) ) {
3219 if (kptr2->obj() != NULL__null) { // Avoid 'holes' in the monitor array
3220 BasicLock *lock = kptr2->lock();
3221 // Inflate so the object's header no longer refers to the BasicLock.
3222 if (lock->displaced_header().is_unlocked()) {
3223 // The object is locked and the resulting ObjectMonitor* will also be
3224 // locked so it can't be async deflated until ownership is dropped.
3225 // See the big comment in basicLock.cpp: BasicLock::move_to().
3226 ObjectSynchronizer::inflate_helper(kptr2->obj());
3227 }
3228 // Now the displaced header is free to move because the
3229 // object's header no longer refers to it.
3230 buf[i++] = (intptr_t)lock->displaced_header().value();
3231 buf[i++] = cast_from_oop<intptr_t>(kptr2->obj());
3232 }
3233 }
3234 assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors")do { if (!(i - max_locals == active_monitor_count*2)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3234, "assert(" "i - max_locals == active_monitor_count*2" ") failed"
, "found the expected number of monitors"); ::breakpoint(); }
} while (0)
;
3235
3236 return buf;
3237JRT_END}
3238
3239JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )void SharedRuntime::OSR_migration_end( intptr_t* buf) { NoHandleMark
__hm; ; os::verify_stack_alignment(); NoSafepointVerifier __nsv
;
3240 FREE_C_HEAP_ARRAY(intptr_t, buf)FreeHeap((char*)(buf));
3241JRT_END}
3242
3243bool AdapterHandlerLibrary::contains(const CodeBlob* b) {
3244 AdapterHandlerTableIterator iter(_adapters);
3245 while (iter.has_next()) {
3246 AdapterHandlerEntry* a = iter.next();
3247 if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
3248 }
3249 return false;
3250}
3251
3252void AdapterHandlerLibrary::print_handler_on(outputStream* st, const CodeBlob* b) {
3253 AdapterHandlerTableIterator iter(_adapters);
3254 while (iter.has_next()) {
3255 AdapterHandlerEntry* a = iter.next();
3256 if (b == CodeCache::find_blob(a->get_i2c_entry())) {
3257 st->print("Adapter for signature: ");
3258 a->print_adapter_on(tty);
3259 return;
3260 }
3261 }
3262 assert(false, "Should have found handler")do { if (!(false)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3262, "assert(" "false" ") failed", "Should have found handler"
); ::breakpoint(); } } while (0)
;
3263}
3264
3265void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3266 st->print("AHE@" INTPTR_FORMAT"0x%016" "l" "x" ": %s", p2i(this), fingerprint()->as_string());
3267 if (get_i2c_entry() != NULL__null) {
3268 st->print(" i2c: " INTPTR_FORMAT"0x%016" "l" "x", p2i(get_i2c_entry()));
3269 }
3270 if (get_c2i_entry() != NULL__null) {
3271 st->print(" c2i: " INTPTR_FORMAT"0x%016" "l" "x", p2i(get_c2i_entry()));
3272 }
3273 if (get_c2i_unverified_entry() != NULL__null) {
3274 st->print(" c2iUV: " INTPTR_FORMAT"0x%016" "l" "x", p2i(get_c2i_unverified_entry()));
3275 }
3276 if (get_c2i_no_clinit_check_entry() != NULL__null) {
3277 st->print(" c2iNCI: " INTPTR_FORMAT"0x%016" "l" "x", p2i(get_c2i_no_clinit_check_entry()));
3278 }
3279 st->cr();
3280}
3281
3282#ifndef PRODUCT
3283
3284void AdapterHandlerLibrary::print_statistics() {
3285 _adapters->print_statistics();
3286}
3287
3288#endif /* PRODUCT */
3289
3290JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))void SharedRuntime::enable_stack_reserved_zone(JavaThread* current
) { NoHandleMark __hm; ; os::verify_stack_alignment(); NoSafepointVerifier
__nsv;
3291 StackOverflow* overflow_state = current->stack_overflow_state();
3292 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3293 overflow_state->set_reserved_stack_activation(current->stack_base());
3294JRT_END}
3295
3296frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3297 ResourceMark rm(current);
3298 frame activation;
3299 CompiledMethod* nm = NULL__null;
3300 int count = 1;
3301
3302 assert(fr.is_java_frame(), "Must start on Java frame")do { if (!(fr.is_java_frame())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/sharedRuntime.cpp"
, 3302, "assert(" "fr.is_java_frame()" ") failed", "Must start on Java frame"
); ::breakpoint(); } } while (0)
;
3303
3304 while (true) {
3305 Method* method = NULL__null;
3306 bool found = false;
3307 if (fr.is_interpreted_frame()) {
3308 method = fr.interpreter_frame_method();
3309 if (method != NULL__null && method->has_reserved_stack_access()) {
3310 found = true;
3311 }
3312 } else {
3313 CodeBlob* cb = fr.cb();
3314 if (cb != NULL__null && cb->is_compiled()) {
3315 nm = cb->as_compiled_method();
3316 method = nm->method();
3317 // scope_desc_near() must be used, instead of scope_desc_at() because on
3318 // SPARC, the pcDesc can be on the delay slot after the call instruction.
3319 for (ScopeDesc *sd = nm->scope_desc_near(fr.pc()); sd != NULL__null; sd = sd->sender()) {
3320 method = sd->method();
3321 if (method != NULL__null && method->has_reserved_stack_access()) {
3322 found = true;
3323 }
3324 }
3325 }
3326 }
3327 if (found) {
3328 activation = fr;
3329 warning("Potentially dangerous stack overflow in "
3330 "ReservedStackAccess annotated method %s [%d]",
3331 method->name_and_sig_as_C_string(), count++);
3332 EventReservedStackActivation event;
3333 if (event.should_commit()) {
3334 event.set_method(method);
3335 event.commit();
3336 }
3337 }
3338 if (fr.is_first_java_frame()) {
3339 break;
3340 } else {
3341 fr = fr.java_sender();
3342 }
3343 }
3344 return activation;
3345}
3346
3347void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3348 // After any safepoint, just before going back to compiled code,
3349 // we inform the GC that we will be doing initializing writes to
3350 // this object in the future without emitting card-marks, so
3351 // GC may take any compensating steps.
3352
3353 oop new_obj = current->vm_result();
3354 if (new_obj == NULL__null) return;
3355
3356 BarrierSet *bs = BarrierSet::barrier_set();
3357 bs->on_slowpath_allocation_exit(current, new_obj);
3358}