Bug Summary

File:jdk/src/hotspot/share/opto/doCall.cpp
Warning:line 693, column 13
Value stored to 'cast' during its initialization is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name doCall.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 2 -mthread-model posix -fno-delete-null-pointer-checks -mframe-pointer=all -relaxed-aliasing -fmath-errno -fno-rounding-math -masm-verbose -mconstructor-aliases -munwind-tables -target-cpu x86-64 -dwarf-column-info -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib/llvm-10/lib/clang/10.0.0 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/libjvm/objs/precompiled -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -D __STDC_CONSTANT_MACROS -D _GNU_SOURCE -D _REENTRANT -D LIBC=gnu -D LINUX -D VM_LITTLE_ENDIAN -D _LP64=1 -D ASSERT -D CHECK_UNHANDLED_OOPS -D TARGET_ARCH_x86 -D INCLUDE_SUFFIX_OS=_linux -D INCLUDE_SUFFIX_CPU=_x86 -D INCLUDE_SUFFIX_COMPILER=_gcc -D TARGET_COMPILER_gcc -D AMD64 -D HOTSPOT_LIB_ARCH="amd64" -D COMPILER1 -D COMPILER2 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -I /home/daniel/Projects/java/jdk/src/hotspot/share/precompiled -I /home/daniel/Projects/java/jdk/src/hotspot/share/include -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix/include -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base/linux -I /home/daniel/Projects/java/jdk/src/java.base/share/native/libjimage -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -D _FORTIFY_SOURCE=2 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-10/lib/clang/10.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -Wno-format-zero-length -Wno-unused-parameter -Wno-unused -Wno-parentheses -Wno-comment -Wno-unknown-pragmas -Wno-address -Wno-delete-non-virtual-dtor -Wno-char-subscripts -Wno-array-bounds -Wno-int-in-bool-context -Wno-ignored-qualifiers -Wno-missing-field-initializers -Wno-implicit-fallthrough -Wno-empty-body -Wno-strict-overflow -Wno-sequence-point -Wno-maybe-uninitialized -Wno-misleading-indentation -Wno-cast-function-type -Wno-shift-negative-value -std=c++14 -fdeprecated-macro -fdebug-compilation-dir /home/daniel/Projects/java/jdk/make/hotspot -ferror-limit 19 -fmessage-length 0 -fvisibility hidden -stack-protector 1 -fno-rtti -fgnuc-version=4.2.1 -fobjc-runtime=gcc -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -o /home/daniel/Projects/java/scan/2021-12-21-193737-8510-1 -x c++ /home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp
1/*
2 * Copyright (c) 1998, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "ci/ciCallSite.hpp"
27#include "ci/ciMethodHandle.hpp"
28#include "ci/ciSymbols.hpp"
29#include "classfile/vmSymbols.hpp"
30#include "compiler/compileBroker.hpp"
31#include "compiler/compileLog.hpp"
32#include "interpreter/linkResolver.hpp"
33#include "opto/addnode.hpp"
34#include "opto/callGenerator.hpp"
35#include "opto/castnode.hpp"
36#include "opto/cfgnode.hpp"
37#include "opto/mulnode.hpp"
38#include "opto/parse.hpp"
39#include "opto/rootnode.hpp"
40#include "opto/runtime.hpp"
41#include "opto/subnode.hpp"
42#include "prims/methodHandles.hpp"
43#include "runtime/sharedRuntime.hpp"
44
45void trace_type_profile(Compile* C, ciMethod *method, int depth, int bci, ciMethod *prof_method, ciKlass *prof_klass, int site_count, int receiver_count) {
46 if (TraceTypeProfile || C->print_inlining()) {
47 outputStream* out = tty;
48 if (!C->print_inlining()) {
49 if (!PrintOpto && !PrintCompilation) {
50 method->print_short_name();
51 tty->cr();
52 }
53 CompileTask::print_inlining_tty(prof_method, depth, bci);
54 } else {
55 out = C->print_inlining_stream();
56 }
57 CompileTask::print_inline_indent(depth, out);
58 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
59 stringStream ss;
60 prof_klass->name()->print_symbol_on(&ss);
61 out->print("%s", ss.as_string());
62 out->cr();
63 }
64}
65
66CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
67 JVMState* jvms, bool allow_inline,
68 float prof_factor, ciKlass* speculative_receiver_type,
69 bool allow_intrinsics) {
70 ciMethod* caller = jvms->method();
71 int bci = jvms->bci();
72 Bytecodes::Code bytecode = caller->java_code_at_bci(bci);
73 guarantee(callee != NULL, "failed method resolution")do { if (!(callee != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 73, "guarantee(" "callee != NULL" ") failed", "failed method resolution"
); ::breakpoint(); } } while (0)
;
74
75 const bool is_virtual_or_interface = (bytecode == Bytecodes::_invokevirtual) ||
76 (bytecode == Bytecodes::_invokeinterface);
77
78 // Dtrace currently doesn't work unless all calls are vanilla
79 if (env()->dtrace_method_probes()) {
80 allow_inline = false;
81 }
82
83 // Note: When we get profiling during stage-1 compiles, we want to pull
84 // from more specific profile data which pertains to this inlining.
85 // Right now, ignore the information in jvms->caller(), and do method[bci].
86 ciCallProfile profile = caller->call_profile_at_bci(bci);
87
88 // See how many times this site has been invoked.
89 int site_count = profile.count();
90 int receiver_count = -1;
91 if (call_does_dispatch && UseTypeProfile && profile.has_receiver(0)) {
92 // Receivers in the profile structure are ordered by call counts
93 // so that the most called (major) receiver is profile.receiver(0).
94 receiver_count = profile.receiver_count(0);
95 }
96
97 CompileLog* log = this->log();
98 if (log != NULL__null) {
99 int rid = (receiver_count >= 0)? log->identify(profile.receiver(0)): -1;
100 int r2id = (rid != -1 && profile.has_receiver(1))? log->identify(profile.receiver(1)):-1;
101 log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
102 log->identify(callee), site_count, prof_factor);
103 if (call_does_dispatch) log->print(" virtual='1'");
104 if (allow_inline) log->print(" inline='1'");
105 if (receiver_count >= 0) {
106 log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
107 if (profile.has_receiver(1)) {
108 log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
109 }
110 }
111 if (callee->is_method_handle_intrinsic()) {
112 log->print(" method_handle_intrinsic='1'");
113 }
114 log->end_elem();
115 }
116
117 // Special case the handling of certain common, profitable library
118 // methods. If these methods are replaced with specialized code,
119 // then we return it as the inlined version of the call.
120 CallGenerator* cg_intrinsic = NULL__null;
121 if (allow_inline && allow_intrinsics) {
122 CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
123 if (cg != NULL__null) {
124 if (cg->is_predicated()) {
125 // Code without intrinsic but, hopefully, inlined.
126 CallGenerator* inline_cg = this->call_generator(callee,
127 vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
128 if (inline_cg != NULL__null) {
129 cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
130 }
131 }
132
133 // If intrinsic does the virtual dispatch, we try to use the type profile
134 // first, and hopefully inline it as the regular virtual call below.
135 // We will retry the intrinsic if nothing had claimed it afterwards.
136 if (cg->does_virtual_dispatch()) {
137 cg_intrinsic = cg;
138 cg = NULL__null;
139 } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
140 return CallGenerator::for_late_inline(callee, cg);
141 } else {
142 return cg;
143 }
144 }
145 }
146
147 // Do method handle calls.
148 // NOTE: This must happen before normal inlining logic below since
149 // MethodHandle.invoke* are native methods which obviously don't
150 // have bytecodes and so normal inlining fails.
151 if (callee->is_method_handle_intrinsic()) {
152 CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee, allow_inline);
153 return cg;
154 }
155
156 // Attempt to inline...
157 if (allow_inline) {
158 // The profile data is only partly attributable to this caller,
159 // scale back the call site information.
160 float past_uses = jvms->method()->scale_count(site_count, prof_factor);
161 // This is the number of times we expect the call code to be used.
162 float expected_uses = past_uses;
163
164 // Try inlining a bytecoded method:
165 if (!call_does_dispatch) {
166 InlineTree* ilt = InlineTree::find_subtree_from_root(this->ilt(), jvms->caller(), jvms->method());
167 bool should_delay = AlwaysIncrementalInline;
168 if (ilt->ok_to_inline(callee, jvms, profile, should_delay)) {
169 CallGenerator* cg = CallGenerator::for_inline(callee, expected_uses);
170 // For optimized virtual calls assert at runtime that receiver object
171 // is a subtype of the inlined method holder. CHA can report a method
172 // as a unique target under an abstract method, but receiver type
173 // sometimes has a broader type. Similar scenario is possible with
174 // default methods when type system loses information about implemented
175 // interfaces.
176 if (cg != NULL__null && is_virtual_or_interface && !callee->is_static()) {
177 CallGenerator* trap_cg = CallGenerator::for_uncommon_trap(callee,
178 Deoptimization::Reason_receiver_constraint, Deoptimization::Action_none);
179
180 cg = CallGenerator::for_guarded_call(callee->holder(), trap_cg, cg);
181 }
182 if (cg != NULL__null) {
183 // Delay the inlining of this method to give us the
184 // opportunity to perform some high level optimizations
185 // first.
186 if (should_delay_string_inlining(callee, jvms)) {
187 return CallGenerator::for_string_late_inline(callee, cg);
188 } else if (should_delay_boxing_inlining(callee, jvms)) {
189 return CallGenerator::for_boxing_late_inline(callee, cg);
190 } else if (should_delay_vector_reboxing_inlining(callee, jvms)) {
191 return CallGenerator::for_vector_reboxing_late_inline(callee, cg);
192 } else if (should_delay) {
193 return CallGenerator::for_late_inline(callee, cg);
194 } else {
195 return cg;
196 }
197 }
198 }
199 }
200
201 // Try using the type profile.
202 if (call_does_dispatch && site_count > 0 && UseTypeProfile) {
203 // The major receiver's count >= TypeProfileMajorReceiverPercent of site_count.
204 bool have_major_receiver = profile.has_receiver(0) && (100.*profile.receiver_prob(0) >= (float)TypeProfileMajorReceiverPercent);
205 ciMethod* receiver_method = NULL__null;
206
207 int morphism = profile.morphism();
208 if (speculative_receiver_type != NULL__null) {
209 if (!too_many_traps_or_recompiles(caller, bci, Deoptimization::Reason_speculate_class_check)) {
210 // We have a speculative type, we should be able to resolve
211 // the call. We do that before looking at the profiling at
212 // this invoke because it may lead to bimorphic inlining which
213 // a speculative type should help us avoid.
214 receiver_method = callee->resolve_invoke(jvms->method()->holder(),
215 speculative_receiver_type);
216 if (receiver_method == NULL__null) {
217 speculative_receiver_type = NULL__null;
218 } else {
219 morphism = 1;
220 }
221 } else {
222 // speculation failed before. Use profiling at the call
223 // (could allow bimorphic inlining for instance).
224 speculative_receiver_type = NULL__null;
225 }
226 }
227 if (receiver_method == NULL__null &&
228 (have_major_receiver || morphism == 1 ||
229 (morphism == 2 && UseBimorphicInlining))) {
230 // receiver_method = profile.method();
231 // Profiles do not suggest methods now. Look it up in the major receiver.
232 receiver_method = callee->resolve_invoke(jvms->method()->holder(),
233 profile.receiver(0));
234 }
235 if (receiver_method != NULL__null) {
236 // The single majority receiver sufficiently outweighs the minority.
237 CallGenerator* hit_cg = this->call_generator(receiver_method,
238 vtable_index, !call_does_dispatch, jvms, allow_inline, prof_factor);
239 if (hit_cg != NULL__null) {
240 // Look up second receiver.
241 CallGenerator* next_hit_cg = NULL__null;
242 ciMethod* next_receiver_method = NULL__null;
243 if (morphism == 2 && UseBimorphicInlining) {
244 next_receiver_method = callee->resolve_invoke(jvms->method()->holder(),
245 profile.receiver(1));
246 if (next_receiver_method != NULL__null) {
247 next_hit_cg = this->call_generator(next_receiver_method,
248 vtable_index, !call_does_dispatch, jvms,
249 allow_inline, prof_factor);
250 if (next_hit_cg != NULL__null && !next_hit_cg->is_inline() &&
251 have_major_receiver && UseOnlyInlinedBimorphic) {
252 // Skip if we can't inline second receiver's method
253 next_hit_cg = NULL__null;
254 }
255 }
256 }
257 CallGenerator* miss_cg;
258 Deoptimization::DeoptReason reason = (morphism == 2
259 ? Deoptimization::Reason_bimorphic
260 : Deoptimization::reason_class_check(speculative_receiver_type != NULL__null));
261 if ((morphism == 1 || (morphism == 2 && next_hit_cg != NULL__null)) &&
262 !too_many_traps_or_recompiles(caller, bci, reason)
263 ) {
264 // Generate uncommon trap for class check failure path
265 // in case of monomorphic or bimorphic virtual call site.
266 miss_cg = CallGenerator::for_uncommon_trap(callee, reason,
267 Deoptimization::Action_maybe_recompile);
268 } else {
269 // Generate virtual call for class check failure path
270 // in case of polymorphic virtual call site.
271 miss_cg = (IncrementalInlineVirtual ? CallGenerator::for_late_inline_virtual(callee, vtable_index, prof_factor)
272 : CallGenerator::for_virtual_call(callee, vtable_index));
273 }
274 if (miss_cg != NULL__null) {
275 if (next_hit_cg != NULL__null) {
276 assert(speculative_receiver_type == NULL, "shouldn't end up here if we used speculation")do { if (!(speculative_receiver_type == __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 276, "assert(" "speculative_receiver_type == __null" ") failed"
, "shouldn't end up here if we used speculation"); ::breakpoint
(); } } while (0)
;
277 trace_type_profile(C, jvms->method(), jvms->depth() - 1, jvms->bci(), next_receiver_method, profile.receiver(1), site_count, profile.receiver_count(1));
278 // We don't need to record dependency on a receiver here and below.
279 // Whenever we inline, the dependency is added by Parse::Parse().
280 miss_cg = CallGenerator::for_predicted_call(profile.receiver(1), miss_cg, next_hit_cg, PROB_MAX(1.0f-(1e-6f)));
281 }
282 if (miss_cg != NULL__null) {
283 ciKlass* k = speculative_receiver_type != NULL__null ? speculative_receiver_type : profile.receiver(0);
284 trace_type_profile(C, jvms->method(), jvms->depth() - 1, jvms->bci(), receiver_method, k, site_count, receiver_count);
285 float hit_prob = speculative_receiver_type != NULL__null ? 1.0 : profile.receiver_prob(0);
286 CallGenerator* cg = CallGenerator::for_predicted_call(k, miss_cg, hit_cg, hit_prob);
287 if (cg != NULL__null) return cg;
288 }
289 }
290 }
291 }
292 }
293
294 // If there is only one implementor of this interface then we
295 // may be able to bind this invoke directly to the implementing
296 // klass but we need both a dependence on the single interface
297 // and on the method we bind to. Additionally since all we know
298 // about the receiver type is that it's supposed to implement the
299 // interface we have to insert a check that it's the class we
300 // expect. Interface types are not checked by the verifier so
301 // they are roughly equivalent to Object.
302 // The number of implementors for declared_interface is less or
303 // equal to the number of implementors for target->holder() so
304 // if number of implementors of target->holder() == 1 then
305 // number of implementors for decl_interface is 0 or 1. If
306 // it's 0 then no class implements decl_interface and there's
307 // no point in inlining.
308 if (call_does_dispatch && bytecode == Bytecodes::_invokeinterface) {
309 ciInstanceKlass* declared_interface =
310 caller->get_declared_method_holder_at_bci(bci)->as_instance_klass();
311 ciInstanceKlass* singleton = declared_interface->unique_implementor();
312
313 if (singleton != NULL__null) {
314 assert(singleton != declared_interface, "not a unique implementor")do { if (!(singleton != declared_interface)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 314, "assert(" "singleton != declared_interface" ") failed"
, "not a unique implementor"); ::breakpoint(); } } while (0)
;
315
316 ciMethod* cha_monomorphic_target =
317 callee->find_monomorphic_target(caller->holder(), declared_interface, singleton);
318
319 if (cha_monomorphic_target != NULL__null &&
320 cha_monomorphic_target->holder() != env()->Object_klass()) { // subtype check against Object is useless
321 ciKlass* holder = cha_monomorphic_target->holder();
322
323 // Try to inline the method found by CHA. Inlined method is guarded by the type check.
324 CallGenerator* hit_cg = call_generator(cha_monomorphic_target,
325 vtable_index, !call_does_dispatch, jvms, allow_inline, prof_factor);
326
327 // Deoptimize on type check fail. The interpreter will throw ICCE for us.
328 CallGenerator* miss_cg = CallGenerator::for_uncommon_trap(callee,
329 Deoptimization::Reason_class_check, Deoptimization::Action_none);
330
331 CallGenerator* cg = CallGenerator::for_guarded_call(holder, miss_cg, hit_cg);
332 if (hit_cg != NULL__null && cg != NULL__null) {
333 dependencies()->assert_unique_concrete_method(declared_interface, cha_monomorphic_target, declared_interface, callee);
334 return cg;
335 }
336 }
337 }
338 } // call_does_dispatch && bytecode == Bytecodes::_invokeinterface
339
340 // Nothing claimed the intrinsic, we go with straight-forward inlining
341 // for already discovered intrinsic.
342 if (allow_intrinsics && cg_intrinsic != NULL__null) {
343 assert(cg_intrinsic->does_virtual_dispatch(), "sanity")do { if (!(cg_intrinsic->does_virtual_dispatch())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 343, "assert(" "cg_intrinsic->does_virtual_dispatch()" ") failed"
, "sanity"); ::breakpoint(); } } while (0)
;
344 return cg_intrinsic;
345 }
346 } // allow_inline
347
348 // There was no special inlining tactic, or it bailed out.
349 // Use a more generic tactic, like a simple call.
350 if (call_does_dispatch) {
351 const char* msg = "virtual call";
352 if (C->print_inlining()) {
353 print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg);
354 }
355 C->log_inline_failure(msg);
356 if (IncrementalInlineVirtual && allow_inline) {
357 return CallGenerator::for_late_inline_virtual(callee, vtable_index, prof_factor); // attempt to inline through virtual call later
358 } else {
359 return CallGenerator::for_virtual_call(callee, vtable_index);
360 }
361 } else {
362 // Class Hierarchy Analysis or Type Profile reveals a unique target, or it is a static or special call.
363 CallGenerator* cg = CallGenerator::for_direct_call(callee, should_delay_inlining(callee, jvms));
364 // For optimized virtual calls assert at runtime that receiver object
365 // is a subtype of the method holder.
366 if (cg != NULL__null && is_virtual_or_interface && !callee->is_static()) {
367 CallGenerator* trap_cg = CallGenerator::for_uncommon_trap(callee,
368 Deoptimization::Reason_receiver_constraint, Deoptimization::Action_none);
369 cg = CallGenerator::for_guarded_call(callee->holder(), trap_cg, cg);
370 }
371 return cg;
372 }
373}
374
375// Return true for methods that shouldn't be inlined early so that
376// they are easier to analyze and optimize as intrinsics.
377bool Compile::should_delay_string_inlining(ciMethod* call_method, JVMState* jvms) {
378 if (has_stringbuilder()) {
379
380 if ((call_method->holder() == C->env()->StringBuilder_klass() ||
381 call_method->holder() == C->env()->StringBuffer_klass()) &&
382 (jvms->method()->holder() == C->env()->StringBuilder_klass() ||
383 jvms->method()->holder() == C->env()->StringBuffer_klass())) {
384 // Delay SB calls only when called from non-SB code
385 return false;
386 }
387
388 switch (call_method->intrinsic_id()) {
389 case vmIntrinsics::_StringBuilder_void:
390 case vmIntrinsics::_StringBuilder_int:
391 case vmIntrinsics::_StringBuilder_String:
392 case vmIntrinsics::_StringBuilder_append_char:
393 case vmIntrinsics::_StringBuilder_append_int:
394 case vmIntrinsics::_StringBuilder_append_String:
395 case vmIntrinsics::_StringBuilder_toString:
396 case vmIntrinsics::_StringBuffer_void:
397 case vmIntrinsics::_StringBuffer_int:
398 case vmIntrinsics::_StringBuffer_String:
399 case vmIntrinsics::_StringBuffer_append_char:
400 case vmIntrinsics::_StringBuffer_append_int:
401 case vmIntrinsics::_StringBuffer_append_String:
402 case vmIntrinsics::_StringBuffer_toString:
403 case vmIntrinsics::_Integer_toString:
404 return true;
405
406 case vmIntrinsics::_String_String:
407 {
408 Node* receiver = jvms->map()->in(jvms->argoff() + 1);
409 if (receiver->is_Proj() && receiver->in(0)->is_CallStaticJava()) {
410 CallStaticJavaNode* csj = receiver->in(0)->as_CallStaticJava();
411 ciMethod* m = csj->method();
412 if (m != NULL__null &&
413 (m->intrinsic_id() == vmIntrinsics::_StringBuffer_toString ||
414 m->intrinsic_id() == vmIntrinsics::_StringBuilder_toString))
415 // Delay String.<init>(new SB())
416 return true;
417 }
418 return false;
419 }
420
421 default:
422 return false;
423 }
424 }
425 return false;
426}
427
428bool Compile::should_delay_boxing_inlining(ciMethod* call_method, JVMState* jvms) {
429 if (eliminate_boxing() && call_method->is_boxing_method()) {
430 set_has_boxed_value(true);
431 return aggressive_unboxing();
432 }
433 return false;
434}
435
436bool Compile::should_delay_vector_inlining(ciMethod* call_method, JVMState* jvms) {
437 return EnableVectorSupport && call_method->is_vector_method();
438}
439
440bool Compile::should_delay_vector_reboxing_inlining(ciMethod* call_method, JVMState* jvms) {
441 return EnableVectorSupport && (call_method->intrinsic_id() == vmIntrinsics::_VectorRebox);
442}
443
444// uncommon-trap call-sites where callee is unloaded, uninitialized or will not link
445bool Parse::can_not_compile_call_site(ciMethod *dest_method, ciInstanceKlass* klass) {
446 // Additional inputs to consider...
447 // bc = bc()
448 // caller = method()
449 // iter().get_method_holder_index()
450 assert( dest_method->is_loaded(), "ciTypeFlow should not let us get here" )do { if (!(dest_method->is_loaded())) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 450, "assert(" "dest_method->is_loaded()" ") failed", "ciTypeFlow should not let us get here"
); ::breakpoint(); } } while (0)
;
451 // Interface classes can be loaded & linked and never get around to
452 // being initialized. Uncommon-trap for not-initialized static or
453 // v-calls. Let interface calls happen.
454 ciInstanceKlass* holder_klass = dest_method->holder();
455 if (!holder_klass->is_being_initialized() &&
456 !holder_klass->is_initialized() &&
457 !holder_klass->is_interface()) {
458 uncommon_trap(Deoptimization::Reason_uninitialized,
459 Deoptimization::Action_reinterpret,
460 holder_klass);
461 return true;
462 }
463
464 assert(dest_method->is_loaded(), "dest_method: typeflow responsibility")do { if (!(dest_method->is_loaded())) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 464, "assert(" "dest_method->is_loaded()" ") failed", "dest_method: typeflow responsibility"
); ::breakpoint(); } } while (0)
;
465 return false;
466}
467
468#ifdef ASSERT1
469static bool check_call_consistency(JVMState* jvms, CallGenerator* cg) {
470 ciMethod* symbolic_info = jvms->method()->get_method_at_bci(jvms->bci());
471 ciMethod* resolved_method = cg->method();
472 if (!ciMethod::is_consistent_info(symbolic_info, resolved_method)) {
473 tty->print_cr("JVMS:");
474 jvms->dump();
475 tty->print_cr("Bytecode info:");
476 jvms->method()->get_method_at_bci(jvms->bci())->print(); tty->cr();
477 tty->print_cr("Resolved method:");
478 cg->method()->print(); tty->cr();
479 return false;
480 }
481 return true;
482}
483#endif // ASSERT
484
485//------------------------------do_call----------------------------------------
486// Handle your basic call. Inline if we can & want to, else just setup call.
487void Parse::do_call() {
488 // It's likely we are going to add debug info soon.
489 // Also, if we inline a guy who eventually needs debug info for this JVMS,
490 // our contribution to it is cleaned up right here.
491 kill_dead_locals();
492
493 C->print_inlining_assert_ready();
494
495 // Set frequently used booleans
496 const bool is_virtual = bc() == Bytecodes::_invokevirtual;
497 const bool is_virtual_or_interface = is_virtual || bc() == Bytecodes::_invokeinterface;
498 const bool has_receiver = Bytecodes::has_receiver(bc());
499
500 // Find target being called
501 bool will_link;
502 ciSignature* declared_signature = NULL__null;
503 ciMethod* orig_callee = iter().get_method(will_link, &declared_signature); // callee in the bytecode
504 ciInstanceKlass* holder_klass = orig_callee->holder();
505 ciKlass* holder = iter().get_declared_method_holder();
506 ciInstanceKlass* klass = ciEnv::get_instance_klass_for_declared_method_holder(holder);
507 assert(declared_signature != NULL, "cannot be null")do { if (!(declared_signature != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 507, "assert(" "declared_signature != __null" ") failed", "cannot be null"
); ::breakpoint(); } } while (0)
;
508
509 // Bump max node limit for JSR292 users
510 if (bc() == Bytecodes::_invokedynamic || orig_callee->is_method_handle_intrinsic()) {
511 C->set_max_node_limit(3*MaxNodeLimit);
512 }
513
514 // uncommon-trap when callee is unloaded, uninitialized or will not link
515 // bailout when too many arguments for register representation
516 if (!will_link || can_not_compile_call_site(orig_callee, klass)) {
517 if (PrintOpto && (Verbose || WizardMode)) {
518 method()->print_name(); tty->print_cr(" can not compile call at bci %d to:", bci());
519 orig_callee->print_name(); tty->cr();
520 }
521 return;
522 }
523 assert(holder_klass->is_loaded(), "")do { if (!(holder_klass->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 523, "assert(" "holder_klass->is_loaded()" ") failed", ""
); ::breakpoint(); } } while (0)
;
524 //assert((bc_callee->is_static() || is_invokedynamic) == !has_receiver , "must match bc"); // XXX invokehandle (cur_bc_raw)
525 // Note: this takes into account invokeinterface of methods declared in java/lang/Object,
526 // which should be invokevirtuals but according to the VM spec may be invokeinterfaces
527 assert(holder_klass->is_interface() || holder_klass->super() == NULL || (bc() != Bytecodes::_invokeinterface), "must match bc")do { if (!(holder_klass->is_interface() || holder_klass->
super() == __null || (bc() != Bytecodes::_invokeinterface))) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 527, "assert(" "holder_klass->is_interface() || holder_klass->super() == __null || (bc() != Bytecodes::_invokeinterface)"
") failed", "must match bc"); ::breakpoint(); } } while (0)
;
528 // Note: In the absence of miranda methods, an abstract class K can perform
529 // an invokevirtual directly on an interface method I.m if K implements I.
530
531 // orig_callee is the resolved callee which's signature includes the
532 // appendix argument.
533 const int nargs = orig_callee->arg_size();
534 const bool is_signature_polymorphic = MethodHandles::is_signature_polymorphic(orig_callee->intrinsic_id());
535
536 // Push appendix argument (MethodType, CallSite, etc.), if one.
537 if (iter().has_appendix()) {
538 ciObject* appendix_arg = iter().get_appendix();
539 const TypeOopPtr* appendix_arg_type = TypeOopPtr::make_from_constant(appendix_arg, /* require_const= */ true);
540 Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
541 push(appendix_arg_node);
542 }
543
544 // ---------------------
545 // Does Class Hierarchy Analysis reveal only a single target of a v-call?
546 // Then we may inline or make a static call, but become dependent on there being only 1 target.
547 // Does the call-site type profile reveal only one receiver?
548 // Then we may introduce a run-time check and inline on the path where it succeeds.
549 // The other path may uncommon_trap, check for another receiver, or do a v-call.
550
551 // Try to get the most accurate receiver type
552 ciMethod* callee = orig_callee;
553 int vtable_index = Method::invalid_vtable_index;
554 bool call_does_dispatch = false;
555
556 // Speculative type of the receiver if any
557 ciKlass* speculative_receiver_type = NULL__null;
558 if (is_virtual_or_interface) {
559 Node* receiver_node = stack(sp() - nargs);
560 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
561 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
562 // For arrays, klass below is Object. When vtable calls are used,
563 // resolving the call with Object would allow an illegal call to
564 // finalize() on an array. We use holder instead: illegal calls to
565 // finalize() won't be compiled as vtable calls (IC call
566 // resolution will catch the illegal call) and the few legal calls
567 // on array types won't be either.
568 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
569 receiver_type, is_virtual,
570 call_does_dispatch, vtable_index); // out-parameters
571 speculative_receiver_type = receiver_type != NULL__null ? receiver_type->speculative_type() : NULL__null;
572 }
573
574 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
575 ciKlass* receiver_constraint = NULL__null;
576 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
577 ciInstanceKlass* calling_klass = method()->holder();
578 ciInstanceKlass* sender_klass = calling_klass;
579 if (sender_klass->is_interface()) {
580 receiver_constraint = sender_klass;
581 }
582 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
583 assert(holder->is_interface(), "How did we get a non-interface method here!")do { if (!(holder->is_interface())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 583, "assert(" "holder->is_interface()" ") failed", "How did we get a non-interface method here!"
); ::breakpoint(); } } while (0)
;
584 receiver_constraint = holder;
585 }
586
587 if (receiver_constraint != NULL__null) {
588 Node* receiver_node = stack(sp() - nargs);
589 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint));
590 Node* bad_type_ctrl = NULL__null;
591 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
592 if (bad_type_ctrl != NULL__null) {
593 PreserveJVMState pjvms(this);
594 set_control(bad_type_ctrl);
595 uncommon_trap(Deoptimization::Reason_class_check,
596 Deoptimization::Action_none);
597 }
598 if (stopped()) {
599 return; // MUST uncommon-trap?
600 }
601 set_stack(sp() - nargs, casted_receiver);
602 }
603
604 // Note: It's OK to try to inline a virtual call.
605 // The call generator will not attempt to inline a polymorphic call
606 // unless it knows how to optimize the receiver dispatch.
607 bool try_inline = (C->do_inlining() || InlineAccessors);
608
609 // ---------------------
610 dec_sp(nargs); // Temporarily pop args for JVM state of call
611 JVMState* jvms = sync_jvms();
612
613 // ---------------------
614 // Decide call tactic.
615 // This call checks with CHA, the interpreter profile, intrinsics table, etc.
616 // It decides whether inlining is desirable or not.
617 CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);
618
619 // NOTE: Don't use orig_callee and callee after this point! Use cg->method() instead.
620 orig_callee = callee = NULL__null;
621
622 // ---------------------
623 // Round double arguments before call
624 round_double_arguments(cg->method());
625
626 // Feed profiling data for arguments to the type system so it can
627 // propagate it as speculative types
628 record_profiled_arguments_for_speculation(cg->method(), bc());
629
630#ifndef PRODUCT
631 // bump global counters for calls
632 count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
633
634 // Record first part of parsing work for this call
635 parse_histogram()->record_change();
636#endif // not PRODUCT
637
638 assert(jvms == this->jvms(), "still operating on the right JVMS")do { if (!(jvms == this->jvms())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 638, "assert(" "jvms == this->jvms()" ") failed", "still operating on the right JVMS"
); ::breakpoint(); } } while (0)
;
639 assert(jvms_in_sync(), "jvms must carry full info into CG")do { if (!(jvms_in_sync())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 639, "assert(" "jvms_in_sync()" ") failed", "jvms must carry full info into CG"
); ::breakpoint(); } } while (0)
;
640
641 // save across call, for a subsequent cast_not_null.
642 Node* receiver = has_receiver ? argument(0) : NULL__null;
643
644 // The extra CheckCastPPs for speculative types mess with PhaseStringOpts
645 if (receiver != NULL__null && !call_does_dispatch && !cg->is_string_late_inline()) {
646 // Feed profiling data for a single receiver to the type system so
647 // it can propagate it as a speculative type
648 receiver = record_profiled_receiver_for_speculation(receiver);
649 }
650
651 JVMState* new_jvms = cg->generate(jvms);
652 if (new_jvms == NULL__null) {
653 // When inlining attempt fails (e.g., too many arguments),
654 // it may contaminate the current compile state, making it
655 // impossible to pull back and try again. Once we call
656 // cg->generate(), we are committed. If it fails, the whole
657 // compilation task is compromised.
658 if (failing()) return;
659
660 // This can happen if a library intrinsic is available, but refuses
661 // the call site, perhaps because it did not match a pattern the
662 // intrinsic was expecting to optimize. Should always be possible to
663 // get a normal java call that may inline in that case
664 cg = C->call_generator(cg->method(), vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type, /* allow_intrinsics= */ false);
665 new_jvms = cg->generate(jvms);
666 if (new_jvms == NULL__null) {
667 guarantee(failing(), "call failed to generate: calls should work")do { if (!(failing())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 667, "guarantee(" "failing()" ") failed", "call failed to generate: calls should work"
); ::breakpoint(); } } while (0)
;
668 return;
669 }
670 }
671
672 if (cg->is_inline()) {
673 // Accumulate has_loops estimate
674 C->env()->notice_inlined_method(cg->method());
675 }
676
677 // Reset parser state from [new_]jvms, which now carries results of the call.
678 // Return value (if any) is already pushed on the stack by the cg.
679 add_exception_states_from(new_jvms);
680 if (new_jvms->map()->control() == top()) {
681 stop_and_kill_map();
682 } else {
683 assert(new_jvms->same_calls_as(jvms), "method/bci left unchanged")do { if (!(new_jvms->same_calls_as(jvms))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 683, "assert(" "new_jvms->same_calls_as(jvms)" ") failed"
, "method/bci left unchanged"); ::breakpoint(); } } while (0)
;
684 set_jvms(new_jvms);
685 }
686
687 assert(check_call_consistency(jvms, cg), "inconsistent info")do { if (!(check_call_consistency(jvms, cg))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 687, "assert(" "check_call_consistency(jvms, cg)" ") failed"
, "inconsistent info"); ::breakpoint(); } } while (0)
;
688
689 if (!stopped()) {
690 // This was some sort of virtual call, which did a null check for us.
691 // Now we can assert receiver-not-null, on the normal return path.
692 if (receiver != NULL__null && cg->is_virtual()) {
693 Node* cast = cast_not_null(receiver);
Value stored to 'cast' during its initialization is never read
694 // %%% assert(receiver == cast, "should already have cast the receiver");
695 }
696
697 ciType* rtype = cg->method()->return_type();
698 ciType* ctype = declared_signature->return_type();
699
700 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
701 // Be careful here with return types.
702 if (ctype != rtype) {
703 BasicType rt = rtype->basic_type();
704 BasicType ct = ctype->basic_type();
705 if (ct == T_VOID) {
706 // It's OK for a method to return a value that is discarded.
707 // The discarding does not require any special action from the caller.
708 // The Java code knows this, at VerifyType.isNullConversion.
709 pop_node(rt); // whatever it was, pop it
710 } else if (rt == T_INT || is_subword_type(rt)) {
711 // Nothing. These cases are handled in lambda form bytecode.
712 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct))do { if (!(ct == T_INT || is_subword_type(ct))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 712, "assert(" "ct == T_INT || is_subword_type(ct)" ") failed"
, "must match: rt=%s, ct=%s", type2name(rt), type2name(ct)); ::
breakpoint(); } } while (0)
;
713 } else if (is_reference_type(rt)) {
714 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct))do { if (!(is_reference_type(ct))) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 714, "assert(" "is_reference_type(ct)" ") failed", "rt=%s, ct=%s"
, type2name(rt), type2name(ct)); ::breakpoint(); } } while (0
)
;
715 if (ctype->is_loaded()) {
716 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
717 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
718 if (arg_type != NULL__null && !arg_type->higher_equal(sig_type)) {
719 Node* retnode = pop();
720 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
721 push(cast_obj);
722 }
723 }
724 } else {
725 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct))do { if (!(rt == ct)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 725, "assert(" "rt == ct" ") failed", "unexpected mismatch: rt=%s, ct=%s"
, type2name(rt), type2name(ct)); ::breakpoint(); } } while (0
)
;
726 // push a zero; it's better than getting an oop/int mismatch
727 pop_node(rt);
728 Node* retnode = zerocon(ct);
729 push_node(ct, retnode);
730 }
731 // Now that the value is well-behaved, continue with the call-site type.
732 rtype = ctype;
733 }
734 } else {
735 // Symbolic resolution enforces the types to be the same.
736 // NOTE: We must relax the assert for unloaded types because two
737 // different ciType instances of the same unloaded class type
738 // can appear to be "loaded" by different loaders (depending on
739 // the accessing class).
740 assert(!rtype->is_loaded() || !ctype->is_loaded() || rtype == ctype,do { if (!(!rtype->is_loaded() || !ctype->is_loaded() ||
rtype == ctype)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 741, "assert(" "!rtype->is_loaded() || !ctype->is_loaded() || rtype == ctype"
") failed", "mismatched return types: rtype=%s, ctype=%s", rtype
->name(), ctype->name()); ::breakpoint(); } } while (0)
741 "mismatched return types: rtype=%s, ctype=%s", rtype->name(), ctype->name())do { if (!(!rtype->is_loaded() || !ctype->is_loaded() ||
rtype == ctype)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 741, "assert(" "!rtype->is_loaded() || !ctype->is_loaded() || rtype == ctype"
") failed", "mismatched return types: rtype=%s, ctype=%s", rtype
->name(), ctype->name()); ::breakpoint(); } } while (0)
;
742 }
743
744 // If the return type of the method is not loaded, assert that the
745 // value we got is a null. Otherwise, we need to recompile.
746 if (!rtype->is_loaded()) {
747 if (PrintOpto && (Verbose || WizardMode)) {
748 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
749 cg->method()->print_name(); tty->cr();
750 }
751 if (C->log() != NULL__null) {
752 C->log()->elem("assert_null reason='return' klass='%d'",
753 C->log()->identify(rtype));
754 }
755 // If there is going to be a trap, put it at the next bytecode:
756 set_bci(iter().next_bci());
757 null_assert(peek());
758 set_bci(iter().cur_bci()); // put it back
759 }
760 BasicType ct = ctype->basic_type();
761 if (is_reference_type(ct)) {
762 record_profiled_return_for_speculation();
763 }
764 }
765
766 // Restart record of parsing work after possible inlining of call
767#ifndef PRODUCT
768 parse_histogram()->set_initial_state(bc());
769#endif
770}
771
772//---------------------------catch_call_exceptions-----------------------------
773// Put a Catch and CatchProj nodes behind a just-created call.
774// Send their caught exceptions to the proper handler.
775// This may be used after a call to the rethrow VM stub,
776// when it is needed to process unloaded exception classes.
777void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
778 // Exceptions are delivered through this channel:
779 Node* i_o = this->i_o();
780
781 // Add a CatchNode.
782 GrowableArray<int>* bcis = new (C->node_arena()) GrowableArray<int>(C->node_arena(), 8, 0, -1);
783 GrowableArray<const Type*>* extypes = new (C->node_arena()) GrowableArray<const Type*>(C->node_arena(), 8, 0, NULL__null);
784 GrowableArray<int>* saw_unloaded = new (C->node_arena()) GrowableArray<int>(C->node_arena(), 8, 0, 0);
785
786 bool default_handler = false;
787 for (; !handlers.is_done(); handlers.next()) {
788 ciExceptionHandler* h = handlers.handler();
789 int h_bci = h->handler_bci();
790 ciInstanceKlass* h_klass = h->is_catch_all() ? env()->Throwable_klass() : h->catch_klass();
791 // Do not introduce unloaded exception types into the graph:
792 if (!h_klass->is_loaded()) {
793 if (saw_unloaded->contains(h_bci)) {
794 /* We've already seen an unloaded exception with h_bci,
795 so don't duplicate. Duplication will cause the CatchNode to be
796 unnecessarily large. See 4713716. */
797 continue;
798 } else {
799 saw_unloaded->append(h_bci);
800 }
801 }
802 const Type* h_extype = TypeOopPtr::make_from_klass(h_klass);
803 // (We use make_from_klass because it respects UseUniqueSubclasses.)
804 h_extype = h_extype->join(TypeInstPtr::NOTNULL);
805 assert(!h_extype->empty(), "sanity")do { if (!(!h_extype->empty())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 805, "assert(" "!h_extype->empty()" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
806 // Note: It's OK if the BCIs repeat themselves.
807 bcis->append(h_bci);
808 extypes->append(h_extype);
809 if (h_bci == -1) {
810 default_handler = true;
811 }
812 }
813
814 if (!default_handler) {
815 bcis->append(-1);
816 extypes->append(TypeOopPtr::make_from_klass(env()->Throwable_klass())->is_instptr());
817 }
818
819 int len = bcis->length();
820 CatchNode *cn = new CatchNode(control(), i_o, len+1);
821 Node *catch_ = _gvn.transform(cn);
822
823 // now branch with the exception state to each of the (potential)
824 // handlers
825 for(int i=0; i < len; i++) {
826 // Setup JVM state to enter the handler.
827 PreserveJVMState pjvms(this);
828 // Locals are just copied from before the call.
829 // Get control from the CatchNode.
830 int handler_bci = bcis->at(i);
831 Node* ctrl = _gvn.transform( new CatchProjNode(catch_, i+1,handler_bci));
832 // This handler cannot happen?
833 if (ctrl == top()) continue;
834 set_control(ctrl);
835
836 // Create exception oop
837 const TypeInstPtr* extype = extypes->at(i)->is_instptr();
838 Node *ex_oop = _gvn.transform(new CreateExNode(extypes->at(i), ctrl, i_o));
839
840 // Handle unloaded exception classes.
841 if (saw_unloaded->contains(handler_bci)) {
842 // An unloaded exception type is coming here. Do an uncommon trap.
843#ifndef PRODUCT
844 // We do not expect the same handler bci to take both cold unloaded
845 // and hot loaded exceptions. But, watch for it.
846 if ((Verbose || WizardMode) && extype->is_loaded()) {
847 tty->print("Warning: Handler @%d takes mixed loaded/unloaded exceptions in ", bci());
848 method()->print_name(); tty->cr();
849 } else if (PrintOpto && (Verbose || WizardMode)) {
850 tty->print("Bailing out on unloaded exception type ");
851 extype->klass()->print_name();
852 tty->print(" at bci:%d in ", bci());
853 method()->print_name(); tty->cr();
854 }
855#endif
856 // Emit an uncommon trap instead of processing the block.
857 set_bci(handler_bci);
858 push_ex_oop(ex_oop);
859 uncommon_trap(Deoptimization::Reason_unloaded,
860 Deoptimization::Action_reinterpret,
861 extype->klass(), "!loaded exception");
862 set_bci(iter().cur_bci()); // put it back
863 continue;
864 }
865
866 // go to the exception handler
867 if (handler_bci < 0) { // merge with corresponding rethrow node
868 throw_to_exit(make_exception_state(ex_oop));
869 } else { // Else jump to corresponding handle
870 push_ex_oop(ex_oop); // Clear stack and push just the oop.
871 merge_exception(handler_bci);
872 }
873 }
874
875 // The first CatchProj is for the normal return.
876 // (Note: If this is a call to rethrow_Java, this node goes dead.)
877 set_control(_gvn.transform( new CatchProjNode(catch_, CatchProjNode::fall_through_index, CatchProjNode::no_handler_bci)));
878}
879
880
881//----------------------------catch_inline_exceptions--------------------------
882// Handle all exceptions thrown by an inlined method or individual bytecode.
883// Common case 1: we have no handler, so all exceptions merge right into
884// the rethrow case.
885// Case 2: we have some handlers, with loaded exception klasses that have
886// no subklasses. We do a Deutsch-Shiffman style type-check on the incoming
887// exception oop and branch to the handler directly.
888// Case 3: We have some handlers with subklasses or are not loaded at
889// compile-time. We have to call the runtime to resolve the exception.
890// So we insert a RethrowCall and all the logic that goes with it.
891void Parse::catch_inline_exceptions(SafePointNode* ex_map) {
892 // Caller is responsible for saving away the map for normal control flow!
893 assert(stopped(), "call set_map(NULL) first")do { if (!(stopped())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 893, "assert(" "stopped()" ") failed", "call set_map(NULL) first"
); ::breakpoint(); } } while (0)
;
894 assert(method()->has_exception_handlers(), "don't come here w/o work to do")do { if (!(method()->has_exception_handlers())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 894, "assert(" "method()->has_exception_handlers()" ") failed"
, "don't come here w/o work to do"); ::breakpoint(); } } while
(0)
;
895
896 Node* ex_node = saved_ex_oop(ex_map);
897 if (ex_node == top()) {
898 // No action needed.
899 return;
900 }
901 const TypeInstPtr* ex_type = _gvn.type(ex_node)->isa_instptr();
902 NOT_PRODUCT(if (ex_type==NULL) tty->print_cr("*** Exception not InstPtr"))if (ex_type==__null) tty->print_cr("*** Exception not InstPtr"
)
;
903 if (ex_type == NULL__null)
904 ex_type = TypeOopPtr::make_from_klass(env()->Throwable_klass())->is_instptr();
905
906 // determine potential exception handlers
907 ciExceptionHandlerStream handlers(method(), bci(),
908 ex_type->klass()->as_instance_klass(),
909 ex_type->klass_is_exact());
910
911 // Start executing from the given throw state. (Keep its stack, for now.)
912 // Get the exception oop as known at compile time.
913 ex_node = use_exception_state(ex_map);
914
915 // Get the exception oop klass from its header
916 Node* ex_klass_node = NULL__null;
917 if (has_ex_handler() && !ex_type->klass_is_exact()) {
918 Node* p = basic_plus_adr( ex_node, ex_node, oopDesc::klass_offset_in_bytes());
919 ex_klass_node = _gvn.transform(LoadKlassNode::make(_gvn, NULL__null, immutable_memory(), p, TypeInstPtr::KLASS, TypeInstKlassPtr::OBJECT));
920
921 // Compute the exception klass a little more cleverly.
922 // Obvious solution is to simple do a LoadKlass from the 'ex_node'.
923 // However, if the ex_node is a PhiNode, I'm going to do a LoadKlass for
924 // each arm of the Phi. If I know something clever about the exceptions
925 // I'm loading the class from, I can replace the LoadKlass with the
926 // klass constant for the exception oop.
927 if (ex_node->is_Phi()) {
928 ex_klass_node = new PhiNode(ex_node->in(0), TypeInstKlassPtr::OBJECT);
929 for (uint i = 1; i < ex_node->req(); i++) {
930 Node* ex_in = ex_node->in(i);
931 if (ex_in == top() || ex_in == NULL__null) {
932 // This path was not taken.
933 ex_klass_node->init_req(i, top());
934 continue;
935 }
936 Node* p = basic_plus_adr(ex_in, ex_in, oopDesc::klass_offset_in_bytes());
937 Node* k = _gvn.transform( LoadKlassNode::make(_gvn, NULL__null, immutable_memory(), p, TypeInstPtr::KLASS, TypeInstKlassPtr::OBJECT));
938 ex_klass_node->init_req( i, k );
939 }
940 _gvn.set_type(ex_klass_node, TypeInstKlassPtr::OBJECT);
941
942 }
943 }
944
945 // Scan the exception table for applicable handlers.
946 // If none, we can call rethrow() and be done!
947 // If precise (loaded with no subklasses), insert a D.S. style
948 // pointer compare to the correct handler and loop back.
949 // If imprecise, switch to the Rethrow VM-call style handling.
950
951 int remaining = handlers.count_remaining();
952
953 // iterate through all entries sequentially
954 for (;!handlers.is_done(); handlers.next()) {
955 ciExceptionHandler* handler = handlers.handler();
956
957 if (handler->is_rethrow()) {
958 // If we fell off the end of the table without finding an imprecise
959 // exception klass (and without finding a generic handler) then we
960 // know this exception is not handled in this method. We just rethrow
961 // the exception into the caller.
962 throw_to_exit(make_exception_state(ex_node));
963 return;
964 }
965
966 // exception handler bci range covers throw_bci => investigate further
967 int handler_bci = handler->handler_bci();
968
969 if (remaining == 1) {
970 push_ex_oop(ex_node); // Push exception oop for handler
971 if (PrintOpto && WizardMode) {
972 tty->print_cr(" Catching every inline exception bci:%d -> handler_bci:%d", bci(), handler_bci);
973 }
974 merge_exception(handler_bci); // jump to handler
975 return; // No more handling to be done here!
976 }
977
978 // Get the handler's klass
979 ciInstanceKlass* klass = handler->catch_klass();
980
981 if (!klass->is_loaded()) { // klass is not loaded?
982 // fall through into catch_call_exceptions which will emit a
983 // handler with an uncommon trap.
984 break;
985 }
986
987 if (klass->is_interface()) // should not happen, but...
988 break; // bail out
989
990 // Check the type of the exception against the catch type
991 const TypeKlassPtr *tk = TypeKlassPtr::make(klass);
992 Node* con = _gvn.makecon(tk);
993 Node* not_subtype_ctrl = gen_subtype_check(ex_klass_node, con);
994 if (!stopped()) {
995 PreserveJVMState pjvms(this);
996 const TypeInstPtr* tinst = TypeOopPtr::make_from_klass_unique(klass)->cast_to_ptr_type(TypePtr::NotNull)->is_instptr();
997 assert(klass->has_subklass() || tinst->klass_is_exact(), "lost exactness")do { if (!(klass->has_subklass() || tinst->klass_is_exact
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 997, "assert(" "klass->has_subklass() || tinst->klass_is_exact()"
") failed", "lost exactness"); ::breakpoint(); } } while (0)
;
998 Node* ex_oop = _gvn.transform(new CheckCastPPNode(control(), ex_node, tinst));
999 push_ex_oop(ex_oop); // Push exception oop for handler
1000 if (PrintOpto && WizardMode) {
1001 tty->print(" Catching inline exception bci:%d -> handler_bci:%d -- ", bci(), handler_bci);
1002 klass->print_name();
1003 tty->cr();
1004 }
1005 merge_exception(handler_bci);
1006 }
1007 set_control(not_subtype_ctrl);
1008
1009 // Come here if exception does not match handler.
1010 // Carry on with more handler checks.
1011 --remaining;
1012 }
1013
1014 assert(!stopped(), "you should return if you finish the chain")do { if (!(!stopped())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 1014, "assert(" "!stopped()" ") failed", "you should return if you finish the chain"
); ::breakpoint(); } } while (0)
;
1015
1016 // Oops, need to call into the VM to resolve the klasses at runtime.
1017 // Note: This call must not deoptimize, since it is not a real at this bci!
1018 kill_dead_locals();
1019
1020 make_runtime_call(RC_NO_LEAF | RC_MUST_THROW,
1021 OptoRuntime::rethrow_Type(),
1022 OptoRuntime::rethrow_stub(),
1023 NULL__null, NULL__null,
1024 ex_node);
1025
1026 // Rethrow is a pure call, no side effects, only a result.
1027 // The result cannot be allocated, so we use I_O
1028
1029 // Catch exceptions from the rethrow
1030 catch_call_exceptions(handlers);
1031}
1032
1033
1034// (Note: Moved add_debug_info into GraphKit::add_safepoint_edges.)
1035
1036
1037#ifndef PRODUCT
1038void Parse::count_compiled_calls(bool at_method_entry, bool is_inline) {
1039 if( CountCompiledCalls ) {
1040 if( at_method_entry ) {
1041 // bump invocation counter if top method (for statistics)
1042 if (CountCompiledCalls && depth() == 1) {
1043 const TypePtr* addr_type = TypeMetadataPtr::make(method());
1044 Node* adr1 = makecon(addr_type);
1045 Node* adr2 = basic_plus_adr(adr1, adr1, in_bytes(Method::compiled_invocation_counter_offset()));
1046 increment_counter(adr2);
1047 }
1048 } else if (is_inline) {
1049 switch (bc()) {
1050 case Bytecodes::_invokevirtual: increment_counter(SharedRuntime::nof_inlined_calls_addr()); break;
1051 case Bytecodes::_invokeinterface: increment_counter(SharedRuntime::nof_inlined_interface_calls_addr()); break;
1052 case Bytecodes::_invokestatic:
1053 case Bytecodes::_invokedynamic:
1054 case Bytecodes::_invokespecial: increment_counter(SharedRuntime::nof_inlined_static_calls_addr()); break;
1055 default: fatal("unexpected call bytecode")do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 1055, "unexpected call bytecode"); ::breakpoint(); } while (
0)
;
1056 }
1057 } else {
1058 switch (bc()) {
1059 case Bytecodes::_invokevirtual: increment_counter(SharedRuntime::nof_normal_calls_addr()); break;
1060 case Bytecodes::_invokeinterface: increment_counter(SharedRuntime::nof_interface_calls_addr()); break;
1061 case Bytecodes::_invokestatic:
1062 case Bytecodes::_invokedynamic:
1063 case Bytecodes::_invokespecial: increment_counter(SharedRuntime::nof_static_calls_addr()); break;
1064 default: fatal("unexpected call bytecode")do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 1064, "unexpected call bytecode"); ::breakpoint(); } while (
0)
;
1065 }
1066 }
1067 }
1068}
1069#endif //PRODUCT
1070
1071
1072ciMethod* Compile::optimize_virtual_call(ciMethod* caller, ciInstanceKlass* klass,
1073 ciKlass* holder, ciMethod* callee,
1074 const TypeOopPtr* receiver_type, bool is_virtual,
1075 bool& call_does_dispatch, int& vtable_index,
1076 bool check_access) {
1077 // Set default values for out-parameters.
1078 call_does_dispatch = true;
1079 vtable_index = Method::invalid_vtable_index;
1080
1081 // Choose call strategy.
1082 ciMethod* optimized_virtual_method = optimize_inlining(caller, klass, holder, callee,
1083 receiver_type, check_access);
1084
1085 // Have the call been sufficiently improved such that it is no longer a virtual?
1086 if (optimized_virtual_method != NULL__null) {
1087 callee = optimized_virtual_method;
1088 call_does_dispatch = false;
1089 } else if (!UseInlineCaches && is_virtual && callee->is_loaded()) {
1090 // We can make a vtable call at this site
1091 vtable_index = callee->resolve_vtable_index(caller->holder(), holder);
1092 }
1093 return callee;
1094}
1095
1096// Identify possible target method and inlining style
1097ciMethod* Compile::optimize_inlining(ciMethod* caller, ciInstanceKlass* klass, ciKlass* holder,
1098 ciMethod* callee, const TypeOopPtr* receiver_type,
1099 bool check_access) {
1100 // only use for virtual or interface calls
1101
1102 // If it is obviously final, do not bother to call find_monomorphic_target,
1103 // because the class hierarchy checks are not needed, and may fail due to
1104 // incompletely loaded classes. Since we do our own class loading checks
1105 // in this module, we may confidently bind to any method.
1106 if (callee->can_be_statically_bound()) {
1107 return callee;
1108 }
1109
1110 if (receiver_type == NULL__null) {
1111 return NULL__null; // no receiver type info
1112 }
1113
1114 // Attempt to improve the receiver
1115 bool actual_receiver_is_exact = false;
1116 ciInstanceKlass* actual_receiver = klass;
1117 // Array methods are all inherited from Object, and are monomorphic.
1118 // finalize() call on array is not allowed.
1119 if (receiver_type->isa_aryptr() &&
1120 callee->holder() == env()->Object_klass() &&
1121 callee->name() != ciSymbols::finalize_method_name()) {
1122 return callee;
1123 }
1124
1125 // All other interesting cases are instance klasses.
1126 if (!receiver_type->isa_instptr()) {
1127 return NULL__null;
1128 }
1129
1130 ciInstanceKlass* receiver_klass = receiver_type->klass()->as_instance_klass();
1131 if (receiver_klass->is_loaded() && receiver_klass->is_initialized() && !receiver_klass->is_interface() &&
1132 (receiver_klass == actual_receiver || receiver_klass->is_subtype_of(actual_receiver))) {
1133 // ikl is a same or better type than the original actual_receiver,
1134 // e.g. static receiver from bytecodes.
1135 actual_receiver = receiver_klass;
1136 // Is the actual_receiver exact?
1137 actual_receiver_is_exact = receiver_type->klass_is_exact();
1138 }
1139
1140 ciInstanceKlass* calling_klass = caller->holder();
1141 ciMethod* cha_monomorphic_target = callee->find_monomorphic_target(calling_klass, klass, actual_receiver, check_access);
1142
1143 if (cha_monomorphic_target != NULL__null) {
1144 // Hardwiring a virtual.
1145 assert(!callee->can_be_statically_bound(), "should have been handled earlier")do { if (!(!callee->can_be_statically_bound())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 1145, "assert(" "!callee->can_be_statically_bound()" ") failed"
, "should have been handled earlier"); ::breakpoint(); } } while
(0)
;
1146 assert(!cha_monomorphic_target->is_abstract(), "")do { if (!(!cha_monomorphic_target->is_abstract())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/doCall.cpp"
, 1146, "assert(" "!cha_monomorphic_target->is_abstract()"
") failed", ""); ::breakpoint(); } } while (0)
;
1147 if (!cha_monomorphic_target->can_be_statically_bound(actual_receiver)) {
1148 // If we inlined because CHA revealed only a single target method,
1149 // then we are dependent on that target method not getting overridden
1150 // by dynamic class loading. Be sure to test the "static" receiver
1151 // dest_method here, as opposed to the actual receiver, which may
1152 // falsely lead us to believe that the receiver is final or private.
1153 dependencies()->assert_unique_concrete_method(actual_receiver, cha_monomorphic_target, holder, callee);
1154 }
1155 return cha_monomorphic_target;
1156 }
1157
1158 // If the type is exact, we can still bind the method w/o a vcall.
1159 // (This case comes after CHA so we can see how much extra work it does.)
1160 if (actual_receiver_is_exact) {
1161 // In case of evolution, there is a dependence on every inlined method, since each
1162 // such method can be changed when its class is redefined.
1163 ciMethod* exact_method = callee->resolve_invoke(calling_klass, actual_receiver);
1164 if (exact_method != NULL__null) {
1165 return exact_method;
1166 }
1167 }
1168
1169 return NULL__null;
1170}