Bug Summary

File:jdk/src/hotspot/share/opto/library_call.cpp
Warning:line 5322, column 11
Value stored to 'call' during its initialization is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name library_call.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 2 -mthread-model posix -fno-delete-null-pointer-checks -mframe-pointer=all -relaxed-aliasing -fmath-errno -fno-rounding-math -masm-verbose -mconstructor-aliases -munwind-tables -target-cpu x86-64 -dwarf-column-info -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib/llvm-10/lib/clang/10.0.0 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/libjvm/objs/precompiled -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -D __STDC_CONSTANT_MACROS -D _GNU_SOURCE -D _REENTRANT -D LIBC=gnu -D LINUX -D VM_LITTLE_ENDIAN -D _LP64=1 -D ASSERT -D CHECK_UNHANDLED_OOPS -D TARGET_ARCH_x86 -D INCLUDE_SUFFIX_OS=_linux -D INCLUDE_SUFFIX_CPU=_x86 -D INCLUDE_SUFFIX_COMPILER=_gcc -D TARGET_COMPILER_gcc -D AMD64 -D HOTSPOT_LIB_ARCH="amd64" -D COMPILER1 -D COMPILER2 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -I /home/daniel/Projects/java/jdk/src/hotspot/share/precompiled -I /home/daniel/Projects/java/jdk/src/hotspot/share/include -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix/include -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base/linux -I /home/daniel/Projects/java/jdk/src/java.base/share/native/libjimage -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -D _FORTIFY_SOURCE=2 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-10/lib/clang/10.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -Wno-format-zero-length -Wno-unused-parameter -Wno-unused -Wno-parentheses -Wno-comment -Wno-unknown-pragmas -Wno-address -Wno-delete-non-virtual-dtor -Wno-char-subscripts -Wno-array-bounds -Wno-int-in-bool-context -Wno-ignored-qualifiers -Wno-missing-field-initializers -Wno-implicit-fallthrough -Wno-empty-body -Wno-strict-overflow -Wno-sequence-point -Wno-maybe-uninitialized -Wno-misleading-indentation -Wno-cast-function-type -Wno-shift-negative-value -std=c++14 -fdeprecated-macro -fdebug-compilation-dir /home/daniel/Projects/java/jdk/make/hotspot -ferror-limit 19 -fmessage-length 0 -fvisibility hidden -stack-protector 1 -fno-rtti -fgnuc-version=4.2.1 -fobjc-runtime=gcc -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -o /home/daniel/Projects/java/scan/2021-12-21-193737-8510-1 -x c++ /home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp
1/*
2 * Copyright (c) 1999, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "asm/macroAssembler.hpp"
27#include "ci/ciUtilities.inline.hpp"
28#include "classfile/vmIntrinsics.hpp"
29#include "compiler/compileBroker.hpp"
30#include "compiler/compileLog.hpp"
31#include "gc/shared/barrierSet.hpp"
32#include "jfr/support/jfrIntrinsics.hpp"
33#include "memory/resourceArea.hpp"
34#include "oops/klass.inline.hpp"
35#include "oops/objArrayKlass.hpp"
36#include "opto/addnode.hpp"
37#include "opto/arraycopynode.hpp"
38#include "opto/c2compiler.hpp"
39#include "opto/castnode.hpp"
40#include "opto/cfgnode.hpp"
41#include "opto/convertnode.hpp"
42#include "opto/countbitsnode.hpp"
43#include "opto/idealKit.hpp"
44#include "opto/library_call.hpp"
45#include "opto/mathexactnode.hpp"
46#include "opto/mulnode.hpp"
47#include "opto/narrowptrnode.hpp"
48#include "opto/opaquenode.hpp"
49#include "opto/parse.hpp"
50#include "opto/runtime.hpp"
51#include "opto/rootnode.hpp"
52#include "opto/subnode.hpp"
53#include "prims/unsafe.hpp"
54#include "runtime/objectMonitor.hpp"
55#include "runtime/sharedRuntime.hpp"
56#include "runtime/stubRoutines.hpp"
57#include "utilities/macros.hpp"
58#include "utilities/powerOfTwo.hpp"
59
60#if INCLUDE_JFR1
61#include "jfr/jfr.hpp"
62#endif
63
64//---------------------------make_vm_intrinsic----------------------------
65CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
66 vmIntrinsicID id = m->intrinsic_id();
67 assert(id != vmIntrinsics::_none, "must be a VM intrinsic")do { if (!(id != vmIntrinsics::_none)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 67, "assert(" "id != vmIntrinsics::_none" ") failed", "must be a VM intrinsic"
); ::breakpoint(); } } while (0)
;
68
69 if (!m->is_loaded()) {
70 // Do not attempt to inline unloaded methods.
71 return NULL__null;
72 }
73
74 C2Compiler* compiler = (C2Compiler*)CompileBroker::compiler(CompLevel_full_optimization);
75 bool is_available = false;
76
77 {
78 // For calling is_intrinsic_supported and is_intrinsic_disabled_by_flag
79 // the compiler must transition to '_thread_in_vm' state because both
80 // methods access VM-internal data.
81 VM_ENTRY_MARKCompilerThread* thread=CompilerThread::current(); ThreadInVMfromNative
__tiv(thread); HandleMarkCleaner __hm(thread); JavaThread* __the_thread__
= thread; VMNativeEntryWrapper __vew;
;
82 methodHandle mh(THREAD__the_thread__, m->get_Method());
83 is_available = compiler != NULL__null && compiler->is_intrinsic_supported(mh, is_virtual) &&
84 !C->directive()->is_intrinsic_disabled(mh) &&
85 !vmIntrinsics::is_disabled_by_flags(mh);
86
87 }
88
89 if (is_available) {
90 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility")do { if (!(id <= vmIntrinsics::LAST_COMPILER_INLINE)) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 90, "assert(" "id <= vmIntrinsics::LAST_COMPILER_INLINE"
") failed", "caller responsibility"); ::breakpoint(); } } while
(0)
;
91 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?")do { if (!(id != vmIntrinsics::_Object_init && id != vmIntrinsics
::_invoke)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 91, "assert(" "id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke"
") failed", "enum out of order?"); ::breakpoint(); } } while
(0)
;
92 return new LibraryIntrinsic(m, is_virtual,
93 vmIntrinsics::predicates_needed(id),
94 vmIntrinsics::does_virtual_dispatch(id),
95 id);
96 } else {
97 return NULL__null;
98 }
99}
100
101JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
102 LibraryCallKit kit(jvms, this);
103 Compile* C = kit.C;
104 int nodes = C->unique();
105#ifndef PRODUCT
106 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
107 char buf[1000];
108 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
109 tty->print_cr("Intrinsic %s", str);
110 }
111#endif
112 ciMethod* callee = kit.callee();
113 const int bci = kit.bci();
114#ifdef ASSERT1
115 Node* ctrl = kit.control();
116#endif
117 // Try to inline the intrinsic.
118 if (callee->check_intrinsic_candidate() &&
119 kit.try_to_inline(_last_predicate)) {
120 const char *inline_msg = is_virtual() ? "(intrinsic, virtual)"
121 : "(intrinsic)";
122 CompileTask::print_inlining_ul(callee, jvms->depth() - 1, bci, inline_msg);
123 if (C->print_intrinsics() || C->print_inlining()) {
124 C->print_inlining(callee, jvms->depth() - 1, bci, inline_msg);
125 }
126 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
127 if (C->log()) {
128 C->log()->elem("intrinsic id='%s'%s nodes='%d'",
129 vmIntrinsics::name_at(intrinsic_id()),
130 (is_virtual() ? " virtual='1'" : ""),
131 C->unique() - nodes);
132 }
133 // Push the result from the inlined method onto the stack.
134 kit.push_result();
135 C->print_inlining_update(this);
136 return kit.transfer_exceptions_into_jvms();
137 }
138
139 // The intrinsic bailed out
140 assert(ctrl == kit.control(), "Control flow was added although the intrinsic bailed out")do { if (!(ctrl == kit.control())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 140, "assert(" "ctrl == kit.control()" ") failed", "Control flow was added although the intrinsic bailed out"
); ::breakpoint(); } } while (0)
;
141 if (jvms->has_method()) {
142 // Not a root compile.
143 const char* msg;
144 if (callee->intrinsic_candidate()) {
145 msg = is_virtual() ? "failed to inline (intrinsic, virtual)" : "failed to inline (intrinsic)";
146 } else {
147 msg = is_virtual() ? "failed to inline (intrinsic, virtual), method not annotated"
148 : "failed to inline (intrinsic), method not annotated";
149 }
150 CompileTask::print_inlining_ul(callee, jvms->depth() - 1, bci, msg);
151 if (C->print_intrinsics() || C->print_inlining()) {
152 C->print_inlining(callee, jvms->depth() - 1, bci, msg);
153 }
154 } else {
155 // Root compile
156 ResourceMark rm;
157 stringStream msg_stream;
158 msg_stream.print("Did not generate intrinsic %s%s at bci:%d in",
159 vmIntrinsics::name_at(intrinsic_id()),
160 is_virtual() ? " (virtual)" : "", bci);
161 const char *msg = msg_stream.as_string();
162 log_debug(jit, inlining)(!(LogImpl<(LogTag::_jit), (LogTag::_inlining), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Debug))) ? (void)0 : LogImpl<(LogTag
::_jit), (LogTag::_inlining), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Debug>
("%s", msg);
163 if (C->print_intrinsics() || C->print_inlining()) {
164 tty->print("%s", msg);
165 }
166 }
167 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
168 C->print_inlining_update(this);
169
170 return NULL__null;
171}
172
173Node* LibraryIntrinsic::generate_predicate(JVMState* jvms, int predicate) {
174 LibraryCallKit kit(jvms, this);
175 Compile* C = kit.C;
176 int nodes = C->unique();
177 _last_predicate = predicate;
178#ifndef PRODUCT
179 assert(is_predicated() && predicate < predicates_count(), "sanity")do { if (!(is_predicated() && predicate < predicates_count
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 179, "assert(" "is_predicated() && predicate < predicates_count()"
") failed", "sanity"); ::breakpoint(); } } while (0)
;
180 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
181 char buf[1000];
182 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
183 tty->print_cr("Predicate for intrinsic %s", str);
184 }
185#endif
186 ciMethod* callee = kit.callee();
187 const int bci = kit.bci();
188
189 Node* slow_ctl = kit.try_to_predicate(predicate);
190 if (!kit.failing()) {
191 const char *inline_msg = is_virtual() ? "(intrinsic, virtual, predicate)"
192 : "(intrinsic, predicate)";
193 CompileTask::print_inlining_ul(callee, jvms->depth() - 1, bci, inline_msg);
194 if (C->print_intrinsics() || C->print_inlining()) {
195 C->print_inlining(callee, jvms->depth() - 1, bci, inline_msg);
196 }
197 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
198 if (C->log()) {
199 C->log()->elem("predicate_intrinsic id='%s'%s nodes='%d'",
200 vmIntrinsics::name_at(intrinsic_id()),
201 (is_virtual() ? " virtual='1'" : ""),
202 C->unique() - nodes);
203 }
204 return slow_ctl; // Could be NULL if the check folds.
205 }
206
207 // The intrinsic bailed out
208 if (jvms->has_method()) {
209 // Not a root compile.
210 const char* msg = "failed to generate predicate for intrinsic";
211 CompileTask::print_inlining_ul(kit.callee(), jvms->depth() - 1, bci, msg);
212 if (C->print_intrinsics() || C->print_inlining()) {
213 C->print_inlining(kit.callee(), jvms->depth() - 1, bci, msg);
214 }
215 } else {
216 // Root compile
217 ResourceMark rm;
218 stringStream msg_stream;
219 msg_stream.print("Did not generate intrinsic %s%s at bci:%d in",
220 vmIntrinsics::name_at(intrinsic_id()),
221 is_virtual() ? " (virtual)" : "", bci);
222 const char *msg = msg_stream.as_string();
223 log_debug(jit, inlining)(!(LogImpl<(LogTag::_jit), (LogTag::_inlining), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Debug))) ? (void)0 : LogImpl<(LogTag
::_jit), (LogTag::_inlining), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Debug>
("%s", msg);
224 if (C->print_intrinsics() || C->print_inlining()) {
225 C->print_inlining_stream()->print("%s", msg);
226 }
227 }
228 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
229 return NULL__null;
230}
231
232bool LibraryCallKit::try_to_inline(int predicate) {
233 // Handle symbolic names for otherwise undistinguished boolean switches:
234 const bool is_store = true;
235 const bool is_compress = true;
236 const bool is_static = true;
237 const bool is_volatile = true;
238
239 if (!jvms()->has_method()) {
240 // Root JVMState has a null method.
241 assert(map()->memory()->Opcode() == Op_Parm, "")do { if (!(map()->memory()->Opcode() == Op_Parm)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 241, "assert(" "map()->memory()->Opcode() == Op_Parm"
") failed", ""); ::breakpoint(); } } while (0)
;
242 // Insert the memory aliasing node
243 set_all_memory(reset_memory());
244 }
245 assert(merged_memory(), "")do { if (!(merged_memory())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 245, "assert(" "merged_memory()" ") failed", ""); ::breakpoint
(); } } while (0)
;
246
247 switch (intrinsic_id()) {
248 case vmIntrinsics::_hashCode: return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
249 case vmIntrinsics::_identityHashCode: return inline_native_hashcode(/*!virtual*/ false, is_static);
250 case vmIntrinsics::_getClass: return inline_native_getClass();
251
252 case vmIntrinsics::_ceil:
253 case vmIntrinsics::_floor:
254 case vmIntrinsics::_rint:
255 case vmIntrinsics::_dsin:
256 case vmIntrinsics::_dcos:
257 case vmIntrinsics::_dtan:
258 case vmIntrinsics::_dabs:
259 case vmIntrinsics::_fabs:
260 case vmIntrinsics::_iabs:
261 case vmIntrinsics::_labs:
262 case vmIntrinsics::_datan2:
263 case vmIntrinsics::_dsqrt:
264 case vmIntrinsics::_dsqrt_strict:
265 case vmIntrinsics::_dexp:
266 case vmIntrinsics::_dlog:
267 case vmIntrinsics::_dlog10:
268 case vmIntrinsics::_dpow:
269 case vmIntrinsics::_dcopySign:
270 case vmIntrinsics::_fcopySign:
271 case vmIntrinsics::_dsignum:
272 case vmIntrinsics::_fsignum: return inline_math_native(intrinsic_id());
273
274 case vmIntrinsics::_notify:
275 case vmIntrinsics::_notifyAll:
276 return inline_notify(intrinsic_id());
277
278 case vmIntrinsics::_addExactI: return inline_math_addExactI(false /* add */);
279 case vmIntrinsics::_addExactL: return inline_math_addExactL(false /* add */);
280 case vmIntrinsics::_decrementExactI: return inline_math_subtractExactI(true /* decrement */);
281 case vmIntrinsics::_decrementExactL: return inline_math_subtractExactL(true /* decrement */);
282 case vmIntrinsics::_incrementExactI: return inline_math_addExactI(true /* increment */);
283 case vmIntrinsics::_incrementExactL: return inline_math_addExactL(true /* increment */);
284 case vmIntrinsics::_multiplyExactI: return inline_math_multiplyExactI();
285 case vmIntrinsics::_multiplyExactL: return inline_math_multiplyExactL();
286 case vmIntrinsics::_multiplyHigh: return inline_math_multiplyHigh();
287 case vmIntrinsics::_unsignedMultiplyHigh: return inline_math_unsignedMultiplyHigh();
288 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
289 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
290 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
291 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
292
293 case vmIntrinsics::_arraycopy: return inline_arraycopy();
294
295 case vmIntrinsics::_compareToL: return inline_string_compareTo(StrIntrinsicNode::LL);
296 case vmIntrinsics::_compareToU: return inline_string_compareTo(StrIntrinsicNode::UU);
297 case vmIntrinsics::_compareToLU: return inline_string_compareTo(StrIntrinsicNode::LU);
298 case vmIntrinsics::_compareToUL: return inline_string_compareTo(StrIntrinsicNode::UL);
299
300 case vmIntrinsics::_indexOfL: return inline_string_indexOf(StrIntrinsicNode::LL);
301 case vmIntrinsics::_indexOfU: return inline_string_indexOf(StrIntrinsicNode::UU);
302 case vmIntrinsics::_indexOfUL: return inline_string_indexOf(StrIntrinsicNode::UL);
303 case vmIntrinsics::_indexOfIL: return inline_string_indexOfI(StrIntrinsicNode::LL);
304 case vmIntrinsics::_indexOfIU: return inline_string_indexOfI(StrIntrinsicNode::UU);
305 case vmIntrinsics::_indexOfIUL: return inline_string_indexOfI(StrIntrinsicNode::UL);
306 case vmIntrinsics::_indexOfU_char: return inline_string_indexOfChar(StrIntrinsicNode::U);
307 case vmIntrinsics::_indexOfL_char: return inline_string_indexOfChar(StrIntrinsicNode::L);
308
309 case vmIntrinsics::_equalsL: return inline_string_equals(StrIntrinsicNode::LL);
310 case vmIntrinsics::_equalsU: return inline_string_equals(StrIntrinsicNode::UU);
311
312 case vmIntrinsics::_toBytesStringU: return inline_string_toBytesU();
313 case vmIntrinsics::_getCharsStringU: return inline_string_getCharsU();
314 case vmIntrinsics::_getCharStringU: return inline_string_char_access(!is_store);
315 case vmIntrinsics::_putCharStringU: return inline_string_char_access( is_store);
316
317 case vmIntrinsics::_compressStringC:
318 case vmIntrinsics::_compressStringB: return inline_string_copy( is_compress);
319 case vmIntrinsics::_inflateStringC:
320 case vmIntrinsics::_inflateStringB: return inline_string_copy(!is_compress);
321
322 case vmIntrinsics::_getReference: return inline_unsafe_access(!is_store, T_OBJECT, Relaxed, false);
323 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_store, T_BOOLEAN, Relaxed, false);
324 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_store, T_BYTE, Relaxed, false);
325 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_store, T_SHORT, Relaxed, false);
326 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_store, T_CHAR, Relaxed, false);
327 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_store, T_INT, Relaxed, false);
328 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_store, T_LONG, Relaxed, false);
329 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_store, T_FLOAT, Relaxed, false);
330 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_store, T_DOUBLE, Relaxed, false);
331
332 case vmIntrinsics::_putReference: return inline_unsafe_access( is_store, T_OBJECT, Relaxed, false);
333 case vmIntrinsics::_putBoolean: return inline_unsafe_access( is_store, T_BOOLEAN, Relaxed, false);
334 case vmIntrinsics::_putByte: return inline_unsafe_access( is_store, T_BYTE, Relaxed, false);
335 case vmIntrinsics::_putShort: return inline_unsafe_access( is_store, T_SHORT, Relaxed, false);
336 case vmIntrinsics::_putChar: return inline_unsafe_access( is_store, T_CHAR, Relaxed, false);
337 case vmIntrinsics::_putInt: return inline_unsafe_access( is_store, T_INT, Relaxed, false);
338 case vmIntrinsics::_putLong: return inline_unsafe_access( is_store, T_LONG, Relaxed, false);
339 case vmIntrinsics::_putFloat: return inline_unsafe_access( is_store, T_FLOAT, Relaxed, false);
340 case vmIntrinsics::_putDouble: return inline_unsafe_access( is_store, T_DOUBLE, Relaxed, false);
341
342 case vmIntrinsics::_getReferenceVolatile: return inline_unsafe_access(!is_store, T_OBJECT, Volatile, false);
343 case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_store, T_BOOLEAN, Volatile, false);
344 case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_store, T_BYTE, Volatile, false);
345 case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_store, T_SHORT, Volatile, false);
346 case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_store, T_CHAR, Volatile, false);
347 case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_store, T_INT, Volatile, false);
348 case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_store, T_LONG, Volatile, false);
349 case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_store, T_FLOAT, Volatile, false);
350 case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_store, T_DOUBLE, Volatile, false);
351
352 case vmIntrinsics::_putReferenceVolatile: return inline_unsafe_access( is_store, T_OBJECT, Volatile, false);
353 case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access( is_store, T_BOOLEAN, Volatile, false);
354 case vmIntrinsics::_putByteVolatile: return inline_unsafe_access( is_store, T_BYTE, Volatile, false);
355 case vmIntrinsics::_putShortVolatile: return inline_unsafe_access( is_store, T_SHORT, Volatile, false);
356 case vmIntrinsics::_putCharVolatile: return inline_unsafe_access( is_store, T_CHAR, Volatile, false);
357 case vmIntrinsics::_putIntVolatile: return inline_unsafe_access( is_store, T_INT, Volatile, false);
358 case vmIntrinsics::_putLongVolatile: return inline_unsafe_access( is_store, T_LONG, Volatile, false);
359 case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access( is_store, T_FLOAT, Volatile, false);
360 case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access( is_store, T_DOUBLE, Volatile, false);
361
362 case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_store, T_SHORT, Relaxed, true);
363 case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_store, T_CHAR, Relaxed, true);
364 case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_store, T_INT, Relaxed, true);
365 case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_store, T_LONG, Relaxed, true);
366
367 case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access( is_store, T_SHORT, Relaxed, true);
368 case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access( is_store, T_CHAR, Relaxed, true);
369 case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access( is_store, T_INT, Relaxed, true);
370 case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access( is_store, T_LONG, Relaxed, true);
371
372 case vmIntrinsics::_getReferenceAcquire: return inline_unsafe_access(!is_store, T_OBJECT, Acquire, false);
373 case vmIntrinsics::_getBooleanAcquire: return inline_unsafe_access(!is_store, T_BOOLEAN, Acquire, false);
374 case vmIntrinsics::_getByteAcquire: return inline_unsafe_access(!is_store, T_BYTE, Acquire, false);
375 case vmIntrinsics::_getShortAcquire: return inline_unsafe_access(!is_store, T_SHORT, Acquire, false);
376 case vmIntrinsics::_getCharAcquire: return inline_unsafe_access(!is_store, T_CHAR, Acquire, false);
377 case vmIntrinsics::_getIntAcquire: return inline_unsafe_access(!is_store, T_INT, Acquire, false);
378 case vmIntrinsics::_getLongAcquire: return inline_unsafe_access(!is_store, T_LONG, Acquire, false);
379 case vmIntrinsics::_getFloatAcquire: return inline_unsafe_access(!is_store, T_FLOAT, Acquire, false);
380 case vmIntrinsics::_getDoubleAcquire: return inline_unsafe_access(!is_store, T_DOUBLE, Acquire, false);
381
382 case vmIntrinsics::_putReferenceRelease: return inline_unsafe_access( is_store, T_OBJECT, Release, false);
383 case vmIntrinsics::_putBooleanRelease: return inline_unsafe_access( is_store, T_BOOLEAN, Release, false);
384 case vmIntrinsics::_putByteRelease: return inline_unsafe_access( is_store, T_BYTE, Release, false);
385 case vmIntrinsics::_putShortRelease: return inline_unsafe_access( is_store, T_SHORT, Release, false);
386 case vmIntrinsics::_putCharRelease: return inline_unsafe_access( is_store, T_CHAR, Release, false);
387 case vmIntrinsics::_putIntRelease: return inline_unsafe_access( is_store, T_INT, Release, false);
388 case vmIntrinsics::_putLongRelease: return inline_unsafe_access( is_store, T_LONG, Release, false);
389 case vmIntrinsics::_putFloatRelease: return inline_unsafe_access( is_store, T_FLOAT, Release, false);
390 case vmIntrinsics::_putDoubleRelease: return inline_unsafe_access( is_store, T_DOUBLE, Release, false);
391
392 case vmIntrinsics::_getReferenceOpaque: return inline_unsafe_access(!is_store, T_OBJECT, Opaque, false);
393 case vmIntrinsics::_getBooleanOpaque: return inline_unsafe_access(!is_store, T_BOOLEAN, Opaque, false);
394 case vmIntrinsics::_getByteOpaque: return inline_unsafe_access(!is_store, T_BYTE, Opaque, false);
395 case vmIntrinsics::_getShortOpaque: return inline_unsafe_access(!is_store, T_SHORT, Opaque, false);
396 case vmIntrinsics::_getCharOpaque: return inline_unsafe_access(!is_store, T_CHAR, Opaque, false);
397 case vmIntrinsics::_getIntOpaque: return inline_unsafe_access(!is_store, T_INT, Opaque, false);
398 case vmIntrinsics::_getLongOpaque: return inline_unsafe_access(!is_store, T_LONG, Opaque, false);
399 case vmIntrinsics::_getFloatOpaque: return inline_unsafe_access(!is_store, T_FLOAT, Opaque, false);
400 case vmIntrinsics::_getDoubleOpaque: return inline_unsafe_access(!is_store, T_DOUBLE, Opaque, false);
401
402 case vmIntrinsics::_putReferenceOpaque: return inline_unsafe_access( is_store, T_OBJECT, Opaque, false);
403 case vmIntrinsics::_putBooleanOpaque: return inline_unsafe_access( is_store, T_BOOLEAN, Opaque, false);
404 case vmIntrinsics::_putByteOpaque: return inline_unsafe_access( is_store, T_BYTE, Opaque, false);
405 case vmIntrinsics::_putShortOpaque: return inline_unsafe_access( is_store, T_SHORT, Opaque, false);
406 case vmIntrinsics::_putCharOpaque: return inline_unsafe_access( is_store, T_CHAR, Opaque, false);
407 case vmIntrinsics::_putIntOpaque: return inline_unsafe_access( is_store, T_INT, Opaque, false);
408 case vmIntrinsics::_putLongOpaque: return inline_unsafe_access( is_store, T_LONG, Opaque, false);
409 case vmIntrinsics::_putFloatOpaque: return inline_unsafe_access( is_store, T_FLOAT, Opaque, false);
410 case vmIntrinsics::_putDoubleOpaque: return inline_unsafe_access( is_store, T_DOUBLE, Opaque, false);
411
412 case vmIntrinsics::_compareAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap, Volatile);
413 case vmIntrinsics::_compareAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_cmp_swap, Volatile);
414 case vmIntrinsics::_compareAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_cmp_swap, Volatile);
415 case vmIntrinsics::_compareAndSetInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap, Volatile);
416 case vmIntrinsics::_compareAndSetLong: return inline_unsafe_load_store(T_LONG, LS_cmp_swap, Volatile);
417
418 case vmIntrinsics::_weakCompareAndSetReferencePlain: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Relaxed);
419 case vmIntrinsics::_weakCompareAndSetReferenceAcquire: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Acquire);
420 case vmIntrinsics::_weakCompareAndSetReferenceRelease: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Release);
421 case vmIntrinsics::_weakCompareAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Volatile);
422 case vmIntrinsics::_weakCompareAndSetBytePlain: return inline_unsafe_load_store(T_BYTE, LS_cmp_swap_weak, Relaxed);
423 case vmIntrinsics::_weakCompareAndSetByteAcquire: return inline_unsafe_load_store(T_BYTE, LS_cmp_swap_weak, Acquire);
424 case vmIntrinsics::_weakCompareAndSetByteRelease: return inline_unsafe_load_store(T_BYTE, LS_cmp_swap_weak, Release);
425 case vmIntrinsics::_weakCompareAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_cmp_swap_weak, Volatile);
426 case vmIntrinsics::_weakCompareAndSetShortPlain: return inline_unsafe_load_store(T_SHORT, LS_cmp_swap_weak, Relaxed);
427 case vmIntrinsics::_weakCompareAndSetShortAcquire: return inline_unsafe_load_store(T_SHORT, LS_cmp_swap_weak, Acquire);
428 case vmIntrinsics::_weakCompareAndSetShortRelease: return inline_unsafe_load_store(T_SHORT, LS_cmp_swap_weak, Release);
429 case vmIntrinsics::_weakCompareAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_cmp_swap_weak, Volatile);
430 case vmIntrinsics::_weakCompareAndSetIntPlain: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Relaxed);
431 case vmIntrinsics::_weakCompareAndSetIntAcquire: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Acquire);
432 case vmIntrinsics::_weakCompareAndSetIntRelease: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Release);
433 case vmIntrinsics::_weakCompareAndSetInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Volatile);
434 case vmIntrinsics::_weakCompareAndSetLongPlain: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Relaxed);
435 case vmIntrinsics::_weakCompareAndSetLongAcquire: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Acquire);
436 case vmIntrinsics::_weakCompareAndSetLongRelease: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Release);
437 case vmIntrinsics::_weakCompareAndSetLong: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Volatile);
438
439 case vmIntrinsics::_compareAndExchangeReference: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Volatile);
440 case vmIntrinsics::_compareAndExchangeReferenceAcquire: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Acquire);
441 case vmIntrinsics::_compareAndExchangeReferenceRelease: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Release);
442 case vmIntrinsics::_compareAndExchangeByte: return inline_unsafe_load_store(T_BYTE, LS_cmp_exchange, Volatile);
443 case vmIntrinsics::_compareAndExchangeByteAcquire: return inline_unsafe_load_store(T_BYTE, LS_cmp_exchange, Acquire);
444 case vmIntrinsics::_compareAndExchangeByteRelease: return inline_unsafe_load_store(T_BYTE, LS_cmp_exchange, Release);
445 case vmIntrinsics::_compareAndExchangeShort: return inline_unsafe_load_store(T_SHORT, LS_cmp_exchange, Volatile);
446 case vmIntrinsics::_compareAndExchangeShortAcquire: return inline_unsafe_load_store(T_SHORT, LS_cmp_exchange, Acquire);
447 case vmIntrinsics::_compareAndExchangeShortRelease: return inline_unsafe_load_store(T_SHORT, LS_cmp_exchange, Release);
448 case vmIntrinsics::_compareAndExchangeInt: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Volatile);
449 case vmIntrinsics::_compareAndExchangeIntAcquire: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Acquire);
450 case vmIntrinsics::_compareAndExchangeIntRelease: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Release);
451 case vmIntrinsics::_compareAndExchangeLong: return inline_unsafe_load_store(T_LONG, LS_cmp_exchange, Volatile);
452 case vmIntrinsics::_compareAndExchangeLongAcquire: return inline_unsafe_load_store(T_LONG, LS_cmp_exchange, Acquire);
453 case vmIntrinsics::_compareAndExchangeLongRelease: return inline_unsafe_load_store(T_LONG, LS_cmp_exchange, Release);
454
455 case vmIntrinsics::_getAndAddByte: return inline_unsafe_load_store(T_BYTE, LS_get_add, Volatile);
456 case vmIntrinsics::_getAndAddShort: return inline_unsafe_load_store(T_SHORT, LS_get_add, Volatile);
457 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_get_add, Volatile);
458 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
459
460 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
461 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
462 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
463 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
464 case vmIntrinsics::_getAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
465
466 case vmIntrinsics::_loadFence:
467 case vmIntrinsics::_storeFence:
468 case vmIntrinsics::_storeStoreFence:
469 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
470
471 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
472
473 case vmIntrinsics::_currentThread: return inline_native_currentThread();
474
475#ifdef JFR_HAVE_INTRINSICS
476 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION)((address)((address_word)(JfrTime::time_function()))), "counterTime");
477 case vmIntrinsics::_getClassId: return inline_native_classID();
478 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
479#endif
480 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis)((address)((address_word)(os::javaTimeMillis))), "currentTimeMillis");
481 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos)((address)((address_word)(os::javaTimeNanos))), "nanoTime");
482 case vmIntrinsics::_writeback0: return inline_unsafe_writeback0();
483 case vmIntrinsics::_writebackPreSync0: return inline_unsafe_writebackSync0(true);
484 case vmIntrinsics::_writebackPostSync0: return inline_unsafe_writebackSync0(false);
485 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate();
486 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory();
487 case vmIntrinsics::_getLength: return inline_native_getLength();
488 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
489 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
490 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
491 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
492 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex(T_INT);
493 case vmIntrinsics::_Preconditions_checkLongIndex: return inline_preconditions_checkIndex(T_LONG);
494 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
495
496 case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
497 case vmIntrinsics::_newArray: return inline_unsafe_newArray(false);
498
499 case vmIntrinsics::_isAssignableFrom: return inline_native_subtype_check();
500
501 case vmIntrinsics::_isInstance:
502 case vmIntrinsics::_getModifiers:
503 case vmIntrinsics::_isInterface:
504 case vmIntrinsics::_isArray:
505 case vmIntrinsics::_isPrimitive:
506 case vmIntrinsics::_isHidden:
507 case vmIntrinsics::_getSuperclass:
508 case vmIntrinsics::_getClassAccessFlags: return inline_native_Class_query(intrinsic_id());
509
510 case vmIntrinsics::_floatToRawIntBits:
511 case vmIntrinsics::_floatToIntBits:
512 case vmIntrinsics::_intBitsToFloat:
513 case vmIntrinsics::_doubleToRawLongBits:
514 case vmIntrinsics::_doubleToLongBits:
515 case vmIntrinsics::_longBitsToDouble: return inline_fp_conversions(intrinsic_id());
516
517 case vmIntrinsics::_numberOfLeadingZeros_i:
518 case vmIntrinsics::_numberOfLeadingZeros_l:
519 case vmIntrinsics::_numberOfTrailingZeros_i:
520 case vmIntrinsics::_numberOfTrailingZeros_l:
521 case vmIntrinsics::_bitCount_i:
522 case vmIntrinsics::_bitCount_l:
523 case vmIntrinsics::_reverseBytes_i:
524 case vmIntrinsics::_reverseBytes_l:
525 case vmIntrinsics::_reverseBytes_s:
526 case vmIntrinsics::_reverseBytes_c: return inline_number_methods(intrinsic_id());
527
528 case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass();
529
530 case vmIntrinsics::_Reference_get: return inline_reference_get();
531 case vmIntrinsics::_Reference_refersTo0: return inline_reference_refersTo0(false);
532 case vmIntrinsics::_PhantomReference_refersTo0: return inline_reference_refersTo0(true);
533
534 case vmIntrinsics::_Class_cast: return inline_Class_cast();
535
536 case vmIntrinsics::_aescrypt_encryptBlock:
537 case vmIntrinsics::_aescrypt_decryptBlock: return inline_aescrypt_Block(intrinsic_id());
538
539 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
540 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
541 return inline_cipherBlockChaining_AESCrypt(intrinsic_id());
542
543 case vmIntrinsics::_electronicCodeBook_encryptAESCrypt:
544 case vmIntrinsics::_electronicCodeBook_decryptAESCrypt:
545 return inline_electronicCodeBook_AESCrypt(intrinsic_id());
546
547 case vmIntrinsics::_counterMode_AESCrypt:
548 return inline_counterMode_AESCrypt(intrinsic_id());
549
550 case vmIntrinsics::_galoisCounterMode_AESCrypt:
551 return inline_galoisCounterMode_AESCrypt();
552
553 case vmIntrinsics::_md5_implCompress:
554 case vmIntrinsics::_sha_implCompress:
555 case vmIntrinsics::_sha2_implCompress:
556 case vmIntrinsics::_sha5_implCompress:
557 case vmIntrinsics::_sha3_implCompress:
558 return inline_digestBase_implCompress(intrinsic_id());
559
560 case vmIntrinsics::_digestBase_implCompressMB:
561 return inline_digestBase_implCompressMB(predicate);
562
563 case vmIntrinsics::_multiplyToLen:
564 return inline_multiplyToLen();
565
566 case vmIntrinsics::_squareToLen:
567 return inline_squareToLen();
568
569 case vmIntrinsics::_mulAdd:
570 return inline_mulAdd();
571
572 case vmIntrinsics::_montgomeryMultiply:
573 return inline_montgomeryMultiply();
574 case vmIntrinsics::_montgomerySquare:
575 return inline_montgomerySquare();
576
577 case vmIntrinsics::_bigIntegerRightShiftWorker:
578 return inline_bigIntegerShift(true);
579 case vmIntrinsics::_bigIntegerLeftShiftWorker:
580 return inline_bigIntegerShift(false);
581
582 case vmIntrinsics::_vectorizedMismatch:
583 return inline_vectorizedMismatch();
584
585 case vmIntrinsics::_ghash_processBlocks:
586 return inline_ghash_processBlocks();
587 case vmIntrinsics::_base64_encodeBlock:
588 return inline_base64_encodeBlock();
589 case vmIntrinsics::_base64_decodeBlock:
590 return inline_base64_decodeBlock();
591
592 case vmIntrinsics::_encodeISOArray:
593 case vmIntrinsics::_encodeByteISOArray:
594 return inline_encodeISOArray(false);
595 case vmIntrinsics::_encodeAsciiArray:
596 return inline_encodeISOArray(true);
597
598 case vmIntrinsics::_updateCRC32:
599 return inline_updateCRC32();
600 case vmIntrinsics::_updateBytesCRC32:
601 return inline_updateBytesCRC32();
602 case vmIntrinsics::_updateByteBufferCRC32:
603 return inline_updateByteBufferCRC32();
604
605 case vmIntrinsics::_updateBytesCRC32C:
606 return inline_updateBytesCRC32C();
607 case vmIntrinsics::_updateDirectByteBufferCRC32C:
608 return inline_updateDirectByteBufferCRC32C();
609
610 case vmIntrinsics::_updateBytesAdler32:
611 return inline_updateBytesAdler32();
612 case vmIntrinsics::_updateByteBufferAdler32:
613 return inline_updateByteBufferAdler32();
614
615 case vmIntrinsics::_profileBoolean:
616 return inline_profileBoolean();
617 case vmIntrinsics::_isCompileConstant:
618 return inline_isCompileConstant();
619
620 case vmIntrinsics::_hasNegatives:
621 return inline_hasNegatives();
622
623 case vmIntrinsics::_fmaD:
624 case vmIntrinsics::_fmaF:
625 return inline_fma(intrinsic_id());
626
627 case vmIntrinsics::_isDigit:
628 case vmIntrinsics::_isLowerCase:
629 case vmIntrinsics::_isUpperCase:
630 case vmIntrinsics::_isWhitespace:
631 return inline_character_compare(intrinsic_id());
632
633 case vmIntrinsics::_min:
634 case vmIntrinsics::_max:
635 case vmIntrinsics::_min_strict:
636 case vmIntrinsics::_max_strict:
637 return inline_min_max(intrinsic_id());
638
639 case vmIntrinsics::_maxF:
640 case vmIntrinsics::_minF:
641 case vmIntrinsics::_maxD:
642 case vmIntrinsics::_minD:
643 case vmIntrinsics::_maxF_strict:
644 case vmIntrinsics::_minF_strict:
645 case vmIntrinsics::_maxD_strict:
646 case vmIntrinsics::_minD_strict:
647 return inline_fp_min_max(intrinsic_id());
648
649 case vmIntrinsics::_VectorUnaryOp:
650 return inline_vector_nary_operation(1);
651 case vmIntrinsics::_VectorBinaryOp:
652 return inline_vector_nary_operation(2);
653 case vmIntrinsics::_VectorTernaryOp:
654 return inline_vector_nary_operation(3);
655 case vmIntrinsics::_VectorFromBitsCoerced:
656 return inline_vector_frombits_coerced();
657 case vmIntrinsics::_VectorShuffleIota:
658 return inline_vector_shuffle_iota();
659 case vmIntrinsics::_VectorMaskOp:
660 return inline_vector_mask_operation();
661 case vmIntrinsics::_VectorShuffleToVector:
662 return inline_vector_shuffle_to_vector();
663 case vmIntrinsics::_VectorLoadOp:
664 return inline_vector_mem_operation(/*is_store=*/false);
665 case vmIntrinsics::_VectorLoadMaskedOp:
666 return inline_vector_mem_masked_operation(/*is_store*/false);
667 case vmIntrinsics::_VectorStoreOp:
668 return inline_vector_mem_operation(/*is_store=*/true);
669 case vmIntrinsics::_VectorStoreMaskedOp:
670 return inline_vector_mem_masked_operation(/*is_store=*/true);
671 case vmIntrinsics::_VectorGatherOp:
672 return inline_vector_gather_scatter(/*is_scatter*/ false);
673 case vmIntrinsics::_VectorScatterOp:
674 return inline_vector_gather_scatter(/*is_scatter*/ true);
675 case vmIntrinsics::_VectorReductionCoerced:
676 return inline_vector_reduction();
677 case vmIntrinsics::_VectorTest:
678 return inline_vector_test();
679 case vmIntrinsics::_VectorBlend:
680 return inline_vector_blend();
681 case vmIntrinsics::_VectorRearrange:
682 return inline_vector_rearrange();
683 case vmIntrinsics::_VectorCompare:
684 return inline_vector_compare();
685 case vmIntrinsics::_VectorBroadcastInt:
686 return inline_vector_broadcast_int();
687 case vmIntrinsics::_VectorConvert:
688 return inline_vector_convert();
689 case vmIntrinsics::_VectorInsert:
690 return inline_vector_insert();
691 case vmIntrinsics::_VectorExtract:
692 return inline_vector_extract();
693
694 case vmIntrinsics::_getObjectSize:
695 return inline_getObjectSize();
696
697 case vmIntrinsics::_blackhole:
698 return inline_blackhole();
699
700 default:
701 // If you get here, it may be that someone has added a new intrinsic
702 // to the list in vmIntrinsics.hpp without implementing it here.
703#ifndef PRODUCT
704 if ((PrintMiscellaneous && (Verbose || WizardMode)) || PrintOpto) {
705 tty->print_cr("*** Warning: Unimplemented intrinsic %s(%d)",
706 vmIntrinsics::name_at(intrinsic_id()), vmIntrinsics::as_int(intrinsic_id()));
707 }
708#endif
709 return false;
710 }
711}
712
713Node* LibraryCallKit::try_to_predicate(int predicate) {
714 if (!jvms()->has_method()) {
715 // Root JVMState has a null method.
716 assert(map()->memory()->Opcode() == Op_Parm, "")do { if (!(map()->memory()->Opcode() == Op_Parm)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 716, "assert(" "map()->memory()->Opcode() == Op_Parm"
") failed", ""); ::breakpoint(); } } while (0)
;
717 // Insert the memory aliasing node
718 set_all_memory(reset_memory());
719 }
720 assert(merged_memory(), "")do { if (!(merged_memory())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 720, "assert(" "merged_memory()" ") failed", ""); ::breakpoint
(); } } while (0)
;
721
722 switch (intrinsic_id()) {
723 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
724 return inline_cipherBlockChaining_AESCrypt_predicate(false);
725 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
726 return inline_cipherBlockChaining_AESCrypt_predicate(true);
727 case vmIntrinsics::_electronicCodeBook_encryptAESCrypt:
728 return inline_electronicCodeBook_AESCrypt_predicate(false);
729 case vmIntrinsics::_electronicCodeBook_decryptAESCrypt:
730 return inline_electronicCodeBook_AESCrypt_predicate(true);
731 case vmIntrinsics::_counterMode_AESCrypt:
732 return inline_counterMode_AESCrypt_predicate();
733 case vmIntrinsics::_digestBase_implCompressMB:
734 return inline_digestBase_implCompressMB_predicate(predicate);
735 case vmIntrinsics::_galoisCounterMode_AESCrypt:
736 return inline_galoisCounterMode_AESCrypt_predicate();
737
738 default:
739 // If you get here, it may be that someone has added a new intrinsic
740 // to the list in vmIntrinsics.hpp without implementing it here.
741#ifndef PRODUCT
742 if ((PrintMiscellaneous && (Verbose || WizardMode)) || PrintOpto) {
743 tty->print_cr("*** Warning: Unimplemented predicate for intrinsic %s(%d)",
744 vmIntrinsics::name_at(intrinsic_id()), vmIntrinsics::as_int(intrinsic_id()));
745 }
746#endif
747 Node* slow_ctl = control();
748 set_control(top()); // No fast path instrinsic
749 return slow_ctl;
750 }
751}
752
753//------------------------------set_result-------------------------------
754// Helper function for finishing intrinsics.
755void LibraryCallKit::set_result(RegionNode* region, PhiNode* value) {
756 record_for_igvn(region);
757 set_control(_gvn.transform(region));
758 set_result( _gvn.transform(value));
759 assert(value->type()->basic_type() == result()->bottom_type()->basic_type(), "sanity")do { if (!(value->type()->basic_type() == result()->
bottom_type()->basic_type())) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 759, "assert(" "value->type()->basic_type() == result()->bottom_type()->basic_type()"
") failed", "sanity"); ::breakpoint(); } } while (0)
;
760}
761
762//------------------------------generate_guard---------------------------
763// Helper function for generating guarded fast-slow graph structures.
764// The given 'test', if true, guards a slow path. If the test fails
765// then a fast path can be taken. (We generally hope it fails.)
766// In all cases, GraphKit::control() is updated to the fast path.
767// The returned value represents the control for the slow path.
768// The return value is never 'top'; it is either a valid control
769// or NULL if it is obvious that the slow path can never be taken.
770// Also, if region and the slow control are not NULL, the slow edge
771// is appended to the region.
772Node* LibraryCallKit::generate_guard(Node* test, RegionNode* region, float true_prob) {
773 if (stopped()) {
774 // Already short circuited.
775 return NULL__null;
776 }
777
778 // Build an if node and its projections.
779 // If test is true we take the slow path, which we assume is uncommon.
780 if (_gvn.type(test) == TypeInt::ZERO) {
781 // The slow branch is never taken. No need to build this guard.
782 return NULL__null;
783 }
784
785 IfNode* iff = create_and_map_if(control(), test, true_prob, COUNT_UNKNOWN(-1.0f));
786
787 Node* if_slow = _gvn.transform(new IfTrueNode(iff));
788 if (if_slow == top()) {
789 // The slow branch is never taken. No need to build this guard.
790 return NULL__null;
791 }
792
793 if (region != NULL__null)
794 region->add_req(if_slow);
795
796 Node* if_fast = _gvn.transform(new IfFalseNode(iff));
797 set_control(if_fast);
798
799 return if_slow;
800}
801
802inline Node* LibraryCallKit::generate_slow_guard(Node* test, RegionNode* region) {
803 return generate_guard(test, region, PROB_UNLIKELY_MAG(3)(1e-3f));
804}
805inline Node* LibraryCallKit::generate_fair_guard(Node* test, RegionNode* region) {
806 return generate_guard(test, region, PROB_FAIR(0.5f));
807}
808
809inline Node* LibraryCallKit::generate_negative_guard(Node* index, RegionNode* region,
810 Node* *pos_index) {
811 if (stopped())
812 return NULL__null; // already stopped
813 if (_gvn.type(index)->higher_equal(TypeInt::POS)) // [0,maxint]
814 return NULL__null; // index is already adequately typed
815 Node* cmp_lt = _gvn.transform(new CmpINode(index, intcon(0)));
816 Node* bol_lt = _gvn.transform(new BoolNode(cmp_lt, BoolTest::lt));
817 Node* is_neg = generate_guard(bol_lt, region, PROB_MIN(1e-6f));
818 if (is_neg != NULL__null && pos_index != NULL__null) {
819 // Emulate effect of Parse::adjust_map_after_if.
820 Node* ccast = new CastIINode(index, TypeInt::POS);
821 ccast->set_req(0, control());
822 (*pos_index) = _gvn.transform(ccast);
823 }
824 return is_neg;
825}
826
827// Make sure that 'position' is a valid limit index, in [0..length].
828// There are two equivalent plans for checking this:
829// A. (offset + copyLength) unsigned<= arrayLength
830// B. offset <= (arrayLength - copyLength)
831// We require that all of the values above, except for the sum and
832// difference, are already known to be non-negative.
833// Plan A is robust in the face of overflow, if offset and copyLength
834// are both hugely positive.
835//
836// Plan B is less direct and intuitive, but it does not overflow at
837// all, since the difference of two non-negatives is always
838// representable. Whenever Java methods must perform the equivalent
839// check they generally use Plan B instead of Plan A.
840// For the moment we use Plan A.
841inline Node* LibraryCallKit::generate_limit_guard(Node* offset,
842 Node* subseq_length,
843 Node* array_length,
844 RegionNode* region) {
845 if (stopped())
846 return NULL__null; // already stopped
847 bool zero_offset = _gvn.type(offset) == TypeInt::ZERO;
848 if (zero_offset && subseq_length->eqv_uncast(array_length))
849 return NULL__null; // common case of whole-array copy
850 Node* last = subseq_length;
851 if (!zero_offset) // last += offset
852 last = _gvn.transform(new AddINode(last, offset));
853 Node* cmp_lt = _gvn.transform(new CmpUNode(array_length, last));
854 Node* bol_lt = _gvn.transform(new BoolNode(cmp_lt, BoolTest::lt));
855 Node* is_over = generate_guard(bol_lt, region, PROB_MIN(1e-6f));
856 return is_over;
857}
858
859// Emit range checks for the given String.value byte array
860void LibraryCallKit::generate_string_range_check(Node* array, Node* offset, Node* count, bool char_count) {
861 if (stopped()) {
862 return; // already stopped
863 }
864 RegionNode* bailout = new RegionNode(1);
865 record_for_igvn(bailout);
866 if (char_count) {
867 // Convert char count to byte count
868 count = _gvn.transform(new LShiftINode(count, intcon(1)));
869 }
870
871 // Offset and count must not be negative
872 generate_negative_guard(offset, bailout);
873 generate_negative_guard(count, bailout);
874 // Offset + count must not exceed length of array
875 generate_limit_guard(offset, count, load_array_length(array), bailout);
876
877 if (bailout->req() > 1) {
878 PreserveJVMState pjvms(this);
879 set_control(_gvn.transform(bailout));
880 uncommon_trap(Deoptimization::Reason_intrinsic,
881 Deoptimization::Action_maybe_recompile);
882 }
883}
884
885//--------------------------generate_current_thread--------------------
886Node* LibraryCallKit::generate_current_thread(Node* &tls_output) {
887 ciKlass* thread_klass = env()->Thread_klass();
888 const Type* thread_type = TypeOopPtr::make_from_klass(thread_klass)->cast_to_ptr_type(TypePtr::NotNull);
889 Node* thread = _gvn.transform(new ThreadLocalNode());
890 Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::threadObj_offset()));
891 tls_output = thread;
892 Node* thread_obj_handle = LoadNode::make(_gvn, NULL__null, immutable_memory(), p, p->bottom_type()->is_ptr(), TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
893 thread_obj_handle = _gvn.transform(thread_obj_handle);
894 return access_load(thread_obj_handle, thread_type, T_OBJECT, IN_NATIVE | C2_IMMUTABLE_MEMORY);
895}
896
897
898//------------------------------make_string_method_node------------------------
899// Helper method for String intrinsic functions. This version is called with
900// str1 and str2 pointing to byte[] nodes containing Latin1 or UTF16 encoded
901// characters (depending on 'is_byte'). cnt1 and cnt2 are pointing to Int nodes
902// containing the lengths of str1 and str2.
903Node* LibraryCallKit::make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae) {
904 Node* result = NULL__null;
905 switch (opcode) {
906 case Op_StrIndexOf:
907 result = new StrIndexOfNode(control(), memory(TypeAryPtr::BYTES),
908 str1_start, cnt1, str2_start, cnt2, ae);
909 break;
910 case Op_StrComp:
911 result = new StrCompNode(control(), memory(TypeAryPtr::BYTES),
912 str1_start, cnt1, str2_start, cnt2, ae);
913 break;
914 case Op_StrEquals:
915 // We already know that cnt1 == cnt2 here (checked in 'inline_string_equals').
916 // Use the constant length if there is one because optimized match rule may exist.
917 result = new StrEqualsNode(control(), memory(TypeAryPtr::BYTES),
918 str1_start, str2_start, cnt2->is_Con() ? cnt2 : cnt1, ae);
919 break;
920 default:
921 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 921); ::breakpoint(); } while (0)
;
922 return NULL__null;
923 }
924
925 // All these intrinsics have checks.
926 C->set_has_split_ifs(true); // Has chance for split-if optimization
927 clear_upper_avx();
928
929 return _gvn.transform(result);
930}
931
932//------------------------------inline_string_compareTo------------------------
933bool LibraryCallKit::inline_string_compareTo(StrIntrinsicNode::ArgEnc ae) {
934 Node* arg1 = argument(0);
935 Node* arg2 = argument(1);
936
937 arg1 = must_be_not_null(arg1, true);
938 arg2 = must_be_not_null(arg2, true);
939
940 // Get start addr and length of first argument
941 Node* arg1_start = array_element_address(arg1, intcon(0), T_BYTE);
942 Node* arg1_cnt = load_array_length(arg1);
943
944 // Get start addr and length of second argument
945 Node* arg2_start = array_element_address(arg2, intcon(0), T_BYTE);
946 Node* arg2_cnt = load_array_length(arg2);
947
948 Node* result = make_string_method_node(Op_StrComp, arg1_start, arg1_cnt, arg2_start, arg2_cnt, ae);
949 set_result(result);
950 return true;
951}
952
953//------------------------------inline_string_equals------------------------
954bool LibraryCallKit::inline_string_equals(StrIntrinsicNode::ArgEnc ae) {
955 Node* arg1 = argument(0);
956 Node* arg2 = argument(1);
957
958 // paths (plus control) merge
959 RegionNode* region = new RegionNode(3);
960 Node* phi = new PhiNode(region, TypeInt::BOOL);
961
962 if (!stopped()) {
963
964 arg1 = must_be_not_null(arg1, true);
965 arg2 = must_be_not_null(arg2, true);
966
967 // Get start addr and length of first argument
968 Node* arg1_start = array_element_address(arg1, intcon(0), T_BYTE);
969 Node* arg1_cnt = load_array_length(arg1);
970
971 // Get start addr and length of second argument
972 Node* arg2_start = array_element_address(arg2, intcon(0), T_BYTE);
973 Node* arg2_cnt = load_array_length(arg2);
974
975 // Check for arg1_cnt != arg2_cnt
976 Node* cmp = _gvn.transform(new CmpINode(arg1_cnt, arg2_cnt));
977 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::ne));
978 Node* if_ne = generate_slow_guard(bol, NULL__null);
979 if (if_ne != NULL__null) {
980 phi->init_req(2, intcon(0));
981 region->init_req(2, if_ne);
982 }
983
984 // Check for count == 0 is done by assembler code for StrEquals.
985
986 if (!stopped()) {
987 Node* equals = make_string_method_node(Op_StrEquals, arg1_start, arg1_cnt, arg2_start, arg2_cnt, ae);
988 phi->init_req(1, equals);
989 region->init_req(1, control());
990 }
991 }
992
993 // post merge
994 set_control(_gvn.transform(region));
995 record_for_igvn(region);
996
997 set_result(_gvn.transform(phi));
998 return true;
999}
1000
1001//------------------------------inline_array_equals----------------------------
1002bool LibraryCallKit::inline_array_equals(StrIntrinsicNode::ArgEnc ae) {
1003 assert(ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::LL, "unsupported array types")do { if (!(ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode
::LL)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1003, "assert(" "ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::LL"
") failed", "unsupported array types"); ::breakpoint(); } } while
(0)
;
1004 Node* arg1 = argument(0);
1005 Node* arg2 = argument(1);
1006
1007 const TypeAryPtr* mtype = (ae == StrIntrinsicNode::UU) ? TypeAryPtr::CHARS : TypeAryPtr::BYTES;
1008 set_result(_gvn.transform(new AryEqNode(control(), memory(mtype), arg1, arg2, ae)));
1009 clear_upper_avx();
1010
1011 return true;
1012}
1013
1014//------------------------------inline_hasNegatives------------------------------
1015bool LibraryCallKit::inline_hasNegatives() {
1016 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1017 return false;
1018 }
1019
1020 assert(callee()->signature()->size() == 3, "hasNegatives has 3 parameters")do { if (!(callee()->signature()->size() == 3)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1020, "assert(" "callee()->signature()->size() == 3" ") failed"
, "hasNegatives has 3 parameters"); ::breakpoint(); } } while
(0)
;
1021 // no receiver since it is static method
1022 Node* ba = argument(0);
1023 Node* offset = argument(1);
1024 Node* len = argument(2);
1025
1026 ba = must_be_not_null(ba, true);
1027
1028 // Range checks
1029 generate_string_range_check(ba, offset, len, false);
1030 if (stopped()) {
1031 return true;
1032 }
1033 Node* ba_start = array_element_address(ba, offset, T_BYTE);
1034 Node* result = new HasNegativesNode(control(), memory(TypeAryPtr::BYTES), ba_start, len);
1035 set_result(_gvn.transform(result));
1036 return true;
1037}
1038
1039bool LibraryCallKit::inline_preconditions_checkIndex(BasicType bt) {
1040 Node* index = argument(0);
1041 Node* length = bt == T_INT ? argument(1) : argument(2);
1042 if (too_many_traps(Deoptimization::Reason_intrinsic) || too_many_traps(Deoptimization::Reason_range_check)) {
1043 return false;
1044 }
1045
1046 // check that length is positive
1047 Node* len_pos_cmp = _gvn.transform(CmpNode::make(length, integercon(0, bt), bt));
1048 Node* len_pos_bol = _gvn.transform(new BoolNode(len_pos_cmp, BoolTest::ge));
1049
1050 {
1051 BuildCutout unless(this, len_pos_bol, PROB_MAX(1.0f-(1e-6f)));
1052 uncommon_trap(Deoptimization::Reason_intrinsic,
1053 Deoptimization::Action_make_not_entrant);
1054 }
1055
1056 if (stopped()) {
1057 // Length is known to be always negative during compilation and the IR graph so far constructed is good so return success
1058 return true;
1059 }
1060
1061 // length is now known postive, add a cast node to make this explicit
1062 jlong upper_bound = _gvn.type(length)->is_integer(bt)->hi_as_long();
1063 Node* casted_length = ConstraintCastNode::make(control(), length, TypeInteger::make(0, upper_bound, Type::WidenMax, bt), bt);
1064 casted_length = _gvn.transform(casted_length);
1065 replace_in_map(length, casted_length);
1066 length = casted_length;
1067
1068 // Use an unsigned comparison for the range check itself
1069 Node* rc_cmp = _gvn.transform(CmpNode::make(index, length, bt, true));
1070 BoolTest::mask btest = BoolTest::lt;
1071 Node* rc_bool = _gvn.transform(new BoolNode(rc_cmp, btest));
1072 RangeCheckNode* rc = new RangeCheckNode(control(), rc_bool, PROB_MAX(1.0f-(1e-6f)), COUNT_UNKNOWN(-1.0f));
1073 _gvn.set_type(rc, rc->Value(&_gvn));
1074 if (!rc_bool->is_Con()) {
1075 record_for_igvn(rc);
1076 }
1077 set_control(_gvn.transform(new IfTrueNode(rc)));
1078 {
1079 PreserveJVMState pjvms(this);
1080 set_control(_gvn.transform(new IfFalseNode(rc)));
1081 uncommon_trap(Deoptimization::Reason_range_check,
1082 Deoptimization::Action_make_not_entrant);
1083 }
1084
1085 if (stopped()) {
1086 // Range check is known to always fail during compilation and the IR graph so far constructed is good so return success
1087 return true;
1088 }
1089
1090 // index is now known to be >= 0 and < length, cast it
1091 Node* result = ConstraintCastNode::make(control(), index, TypeInteger::make(0, upper_bound, Type::WidenMax, bt), bt);
1092 result = _gvn.transform(result);
1093 set_result(result);
1094 replace_in_map(index, result);
1095 clear_upper_avx();
1096 return true;
1097}
1098
1099//------------------------------inline_string_indexOf------------------------
1100bool LibraryCallKit::inline_string_indexOf(StrIntrinsicNode::ArgEnc ae) {
1101 if (!Matcher::match_rule_supported(Op_StrIndexOf)) {
1102 return false;
1103 }
1104 Node* src = argument(0);
1105 Node* tgt = argument(1);
1106
1107 // Make the merge point
1108 RegionNode* result_rgn = new RegionNode(4);
1109 Node* result_phi = new PhiNode(result_rgn, TypeInt::INT);
1110
1111 src = must_be_not_null(src, true);
1112 tgt = must_be_not_null(tgt, true);
1113
1114 // Get start addr and length of source string
1115 Node* src_start = array_element_address(src, intcon(0), T_BYTE);
1116 Node* src_count = load_array_length(src);
1117
1118 // Get start addr and length of substring
1119 Node* tgt_start = array_element_address(tgt, intcon(0), T_BYTE);
1120 Node* tgt_count = load_array_length(tgt);
1121
1122 if (ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::UL) {
1123 // Divide src size by 2 if String is UTF16 encoded
1124 src_count = _gvn.transform(new RShiftINode(src_count, intcon(1)));
1125 }
1126 if (ae == StrIntrinsicNode::UU) {
1127 // Divide substring size by 2 if String is UTF16 encoded
1128 tgt_count = _gvn.transform(new RShiftINode(tgt_count, intcon(1)));
1129 }
1130
1131 Node* result = make_indexOf_node(src_start, src_count, tgt_start, tgt_count, result_rgn, result_phi, ae);
1132 if (result != NULL__null) {
1133 result_phi->init_req(3, result);
1134 result_rgn->init_req(3, control());
1135 }
1136 set_control(_gvn.transform(result_rgn));
1137 record_for_igvn(result_rgn);
1138 set_result(_gvn.transform(result_phi));
1139
1140 return true;
1141}
1142
1143//-----------------------------inline_string_indexOf-----------------------
1144bool LibraryCallKit::inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae) {
1145 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1146 return false;
1147 }
1148 if (!Matcher::match_rule_supported(Op_StrIndexOf)) {
1149 return false;
1150 }
1151 assert(callee()->signature()->size() == 5, "String.indexOf() has 5 arguments")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1151, "assert(" "callee()->signature()->size() == 5" ") failed"
, "String.indexOf() has 5 arguments"); ::breakpoint(); } } while
(0)
;
1152 Node* src = argument(0); // byte[]
1153 Node* src_count = argument(1); // char count
1154 Node* tgt = argument(2); // byte[]
1155 Node* tgt_count = argument(3); // char count
1156 Node* from_index = argument(4); // char index
1157
1158 src = must_be_not_null(src, true);
1159 tgt = must_be_not_null(tgt, true);
1160
1161 // Multiply byte array index by 2 if String is UTF16 encoded
1162 Node* src_offset = (ae == StrIntrinsicNode::LL) ? from_index : _gvn.transform(new LShiftINode(from_index, intcon(1)));
1163 src_count = _gvn.transform(new SubINode(src_count, from_index));
1164 Node* src_start = array_element_address(src, src_offset, T_BYTE);
1165 Node* tgt_start = array_element_address(tgt, intcon(0), T_BYTE);
1166
1167 // Range checks
1168 generate_string_range_check(src, src_offset, src_count, ae != StrIntrinsicNode::LL);
1169 generate_string_range_check(tgt, intcon(0), tgt_count, ae == StrIntrinsicNode::UU);
1170 if (stopped()) {
1171 return true;
1172 }
1173
1174 RegionNode* region = new RegionNode(5);
1175 Node* phi = new PhiNode(region, TypeInt::INT);
1176
1177 Node* result = make_indexOf_node(src_start, src_count, tgt_start, tgt_count, region, phi, ae);
1178 if (result != NULL__null) {
1179 // The result is index relative to from_index if substring was found, -1 otherwise.
1180 // Generate code which will fold into cmove.
1181 Node* cmp = _gvn.transform(new CmpINode(result, intcon(0)));
1182 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::lt));
1183
1184 Node* if_lt = generate_slow_guard(bol, NULL__null);
1185 if (if_lt != NULL__null) {
1186 // result == -1
1187 phi->init_req(3, result);
1188 region->init_req(3, if_lt);
1189 }
1190 if (!stopped()) {
1191 result = _gvn.transform(new AddINode(result, from_index));
1192 phi->init_req(4, result);
1193 region->init_req(4, control());
1194 }
1195 }
1196
1197 set_control(_gvn.transform(region));
1198 record_for_igvn(region);
1199 set_result(_gvn.transform(phi));
1200 clear_upper_avx();
1201
1202 return true;
1203}
1204
1205// Create StrIndexOfNode with fast path checks
1206Node* LibraryCallKit::make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,
1207 RegionNode* region, Node* phi, StrIntrinsicNode::ArgEnc ae) {
1208 // Check for substr count > string count
1209 Node* cmp = _gvn.transform(new CmpINode(tgt_count, src_count));
1210 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::gt));
1211 Node* if_gt = generate_slow_guard(bol, NULL__null);
1212 if (if_gt != NULL__null) {
1213 phi->init_req(1, intcon(-1));
1214 region->init_req(1, if_gt);
1215 }
1216 if (!stopped()) {
1217 // Check for substr count == 0
1218 cmp = _gvn.transform(new CmpINode(tgt_count, intcon(0)));
1219 bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
1220 Node* if_zero = generate_slow_guard(bol, NULL__null);
1221 if (if_zero != NULL__null) {
1222 phi->init_req(2, intcon(0));
1223 region->init_req(2, if_zero);
1224 }
1225 }
1226 if (!stopped()) {
1227 return make_string_method_node(Op_StrIndexOf, src_start, src_count, tgt_start, tgt_count, ae);
1228 }
1229 return NULL__null;
1230}
1231
1232//-----------------------------inline_string_indexOfChar-----------------------
1233bool LibraryCallKit::inline_string_indexOfChar(StrIntrinsicNode::ArgEnc ae) {
1234 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1235 return false;
1236 }
1237 if (!Matcher::match_rule_supported(Op_StrIndexOfChar)) {
1238 return false;
1239 }
1240 assert(callee()->signature()->size() == 4, "String.indexOfChar() has 4 arguments")do { if (!(callee()->signature()->size() == 4)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1240, "assert(" "callee()->signature()->size() == 4" ") failed"
, "String.indexOfChar() has 4 arguments"); ::breakpoint(); } }
while (0)
;
1241 Node* src = argument(0); // byte[]
1242 Node* tgt = argument(1); // tgt is int ch
1243 Node* from_index = argument(2);
1244 Node* max = argument(3);
1245
1246 src = must_be_not_null(src, true);
1247
1248 Node* src_offset = ae == StrIntrinsicNode::L ? from_index : _gvn.transform(new LShiftINode(from_index, intcon(1)));
1249 Node* src_start = array_element_address(src, src_offset, T_BYTE);
1250 Node* src_count = _gvn.transform(new SubINode(max, from_index));
1251
1252 // Range checks
1253 generate_string_range_check(src, src_offset, src_count, ae == StrIntrinsicNode::U);
1254 if (stopped()) {
1255 return true;
1256 }
1257
1258 RegionNode* region = new RegionNode(3);
1259 Node* phi = new PhiNode(region, TypeInt::INT);
1260
1261 Node* result = new StrIndexOfCharNode(control(), memory(TypeAryPtr::BYTES), src_start, src_count, tgt, ae);
1262 C->set_has_split_ifs(true); // Has chance for split-if optimization
1263 _gvn.transform(result);
1264
1265 Node* cmp = _gvn.transform(new CmpINode(result, intcon(0)));
1266 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::lt));
1267
1268 Node* if_lt = generate_slow_guard(bol, NULL__null);
1269 if (if_lt != NULL__null) {
1270 // result == -1
1271 phi->init_req(2, result);
1272 region->init_req(2, if_lt);
1273 }
1274 if (!stopped()) {
1275 result = _gvn.transform(new AddINode(result, from_index));
1276 phi->init_req(1, result);
1277 region->init_req(1, control());
1278 }
1279 set_control(_gvn.transform(region));
1280 record_for_igvn(region);
1281 set_result(_gvn.transform(phi));
1282
1283 return true;
1284}
1285//---------------------------inline_string_copy---------------------
1286// compressIt == true --> generate a compressed copy operation (compress char[]/byte[] to byte[])
1287// int StringUTF16.compress(char[] src, int srcOff, byte[] dst, int dstOff, int len)
1288// int StringUTF16.compress(byte[] src, int srcOff, byte[] dst, int dstOff, int len)
1289// compressIt == false --> generate an inflated copy operation (inflate byte[] to char[]/byte[])
1290// void StringLatin1.inflate(byte[] src, int srcOff, char[] dst, int dstOff, int len)
1291// void StringLatin1.inflate(byte[] src, int srcOff, byte[] dst, int dstOff, int len)
1292bool LibraryCallKit::inline_string_copy(bool compress) {
1293 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1294 return false;
1295 }
1296 int nargs = 5; // 2 oops, 3 ints
1297 assert(callee()->signature()->size() == nargs, "string copy has 5 arguments")do { if (!(callee()->signature()->size() == nargs)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1297, "assert(" "callee()->signature()->size() == nargs"
") failed", "string copy has 5 arguments"); ::breakpoint(); }
} while (0)
;
1298
1299 Node* src = argument(0);
1300 Node* src_offset = argument(1);
1301 Node* dst = argument(2);
1302 Node* dst_offset = argument(3);
1303 Node* length = argument(4);
1304
1305 // Check for allocation before we add nodes that would confuse
1306 // tightly_coupled_allocation()
1307 AllocateArrayNode* alloc = tightly_coupled_allocation(dst);
1308
1309 // Figure out the size and type of the elements we will be copying.
1310 const Type* src_type = src->Value(&_gvn);
1311 const Type* dst_type = dst->Value(&_gvn);
1312 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1313 BasicType dst_elem = dst_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1314 assert((compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) ||do { if (!((compress && dst_elem == T_BYTE &&
(src_elem == T_BYTE || src_elem == T_CHAR)) || (!compress &&
src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem
== T_CHAR)))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1316, "assert(" "(compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) || (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR))"
") failed", "Unsupported array types for inline_string_copy"
); ::breakpoint(); } } while (0)
1315 (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR)),do { if (!((compress && dst_elem == T_BYTE &&
(src_elem == T_BYTE || src_elem == T_CHAR)) || (!compress &&
src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem
== T_CHAR)))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1316, "assert(" "(compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) || (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR))"
") failed", "Unsupported array types for inline_string_copy"
); ::breakpoint(); } } while (0)
1316 "Unsupported array types for inline_string_copy")do { if (!((compress && dst_elem == T_BYTE &&
(src_elem == T_BYTE || src_elem == T_CHAR)) || (!compress &&
src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem
== T_CHAR)))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1316, "assert(" "(compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) || (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR))"
") failed", "Unsupported array types for inline_string_copy"
); ::breakpoint(); } } while (0)
;
1317
1318 src = must_be_not_null(src, true);
1319 dst = must_be_not_null(dst, true);
1320
1321 // Convert char[] offsets to byte[] offsets
1322 bool convert_src = (compress && src_elem == T_BYTE);
1323 bool convert_dst = (!compress && dst_elem == T_BYTE);
1324 if (convert_src) {
1325 src_offset = _gvn.transform(new LShiftINode(src_offset, intcon(1)));
1326 } else if (convert_dst) {
1327 dst_offset = _gvn.transform(new LShiftINode(dst_offset, intcon(1)));
1328 }
1329
1330 // Range checks
1331 generate_string_range_check(src, src_offset, length, convert_src);
1332 generate_string_range_check(dst, dst_offset, length, convert_dst);
1333 if (stopped()) {
1334 return true;
1335 }
1336
1337 Node* src_start = array_element_address(src, src_offset, src_elem);
1338 Node* dst_start = array_element_address(dst, dst_offset, dst_elem);
1339 // 'src_start' points to src array + scaled offset
1340 // 'dst_start' points to dst array + scaled offset
1341 Node* count = NULL__null;
1342 if (compress) {
1343 count = compress_string(src_start, TypeAryPtr::get_array_body_type(src_elem), dst_start, length);
1344 } else {
1345 inflate_string(src_start, dst_start, TypeAryPtr::get_array_body_type(dst_elem), length);
1346 }
1347
1348 if (alloc != NULL__null) {
1349 if (alloc->maybe_set_complete(&_gvn)) {
1350 // "You break it, you buy it."
1351 InitializeNode* init = alloc->initialization();
1352 assert(init->is_complete(), "we just did this")do { if (!(init->is_complete())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1352, "assert(" "init->is_complete()" ") failed", "we just did this"
); ::breakpoint(); } } while (0)
;
1353 init->set_complete_with_arraycopy();
1354 assert(dst->is_CheckCastPP(), "sanity")do { if (!(dst->is_CheckCastPP())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1354, "assert(" "dst->is_CheckCastPP()" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
1355 assert(dst->in(0)->in(0) == init, "dest pinned")do { if (!(dst->in(0)->in(0) == init)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1355, "assert(" "dst->in(0)->in(0) == init" ") failed"
, "dest pinned"); ::breakpoint(); } } while (0)
;
1356 }
1357 // Do not let stores that initialize this object be reordered with
1358 // a subsequent store that would make this object accessible by
1359 // other threads.
1360 // Record what AllocateNode this StoreStore protects so that
1361 // escape analysis can go from the MemBarStoreStoreNode to the
1362 // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1363 // based on the escape status of the AllocateNode.
1364 insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
1365 }
1366 if (compress) {
1367 set_result(_gvn.transform(count));
1368 }
1369 clear_upper_avx();
1370
1371 return true;
1372}
1373
1374#ifdef _LP641
1375#define XTOP ,top() /*additional argument*/
1376#else //_LP64
1377#define XTOP /*no additional argument*/
1378#endif //_LP64
1379
1380//------------------------inline_string_toBytesU--------------------------
1381// public static byte[] StringUTF16.toBytes(char[] value, int off, int len)
1382bool LibraryCallKit::inline_string_toBytesU() {
1383 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1384 return false;
1385 }
1386 // Get the arguments.
1387 Node* value = argument(0);
1388 Node* offset = argument(1);
1389 Node* length = argument(2);
1390
1391 Node* newcopy = NULL__null;
1392
1393 // Set the original stack and the reexecute bit for the interpreter to reexecute
1394 // the bytecode that invokes StringUTF16.toBytes() if deoptimization happens.
1395 { PreserveReexecuteState preexecs(this);
1396 jvms()->set_should_reexecute(true);
1397
1398 // Check if a null path was taken unconditionally.
1399 value = null_check(value);
1400
1401 RegionNode* bailout = new RegionNode(1);
1402 record_for_igvn(bailout);
1403
1404 // Range checks
1405 generate_negative_guard(offset, bailout);
1406 generate_negative_guard(length, bailout);
1407 generate_limit_guard(offset, length, load_array_length(value), bailout);
1408 // Make sure that resulting byte[] length does not overflow Integer.MAX_VALUE
1409 generate_limit_guard(length, intcon(0), intcon(max_jint/2), bailout);
1410
1411 if (bailout->req() > 1) {
1412 PreserveJVMState pjvms(this);
1413 set_control(_gvn.transform(bailout));
1414 uncommon_trap(Deoptimization::Reason_intrinsic,
1415 Deoptimization::Action_maybe_recompile);
1416 }
1417 if (stopped()) {
1418 return true;
1419 }
1420
1421 Node* size = _gvn.transform(new LShiftINode(length, intcon(1)));
1422 Node* klass_node = makecon(TypeKlassPtr::make(ciTypeArrayKlass::make(T_BYTE)));
1423 newcopy = new_array(klass_node, size, 0); // no arguments to push
1424 AllocateArrayNode* alloc = tightly_coupled_allocation(newcopy);
1425 guarantee(alloc != NULL, "created above")do { if (!(alloc != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1425, "guarantee(" "alloc != NULL" ") failed", "created above"
); ::breakpoint(); } } while (0)
;
1426
1427 // Calculate starting addresses.
1428 Node* src_start = array_element_address(value, offset, T_CHAR);
1429 Node* dst_start = basic_plus_adr(newcopy, arrayOopDesc::base_offset_in_bytes(T_BYTE));
1430
1431 // Check if src array address is aligned to HeapWordSize (dst is always aligned)
1432 const TypeInt* toffset = gvn().type(offset)->is_int();
1433 bool aligned = toffset->is_con() && ((toffset->get_con() * type2aelembytes(T_CHAR)) % HeapWordSize == 0);
1434
1435 // Figure out which arraycopy runtime method to call (disjoint, uninitialized).
1436 const char* copyfunc_name = "arraycopy";
1437 address copyfunc_addr = StubRoutines::select_arraycopy_function(T_CHAR, aligned, true, copyfunc_name, true);
1438 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
1439 OptoRuntime::fast_arraycopy_Type(),
1440 copyfunc_addr, copyfunc_name, TypeRawPtr::BOTTOM,
1441 src_start, dst_start, ConvI2X(length)ConvI2L(length) XTOP);
1442 // Do not let reads from the cloned object float above the arraycopy.
1443 if (alloc->maybe_set_complete(&_gvn)) {
1444 // "You break it, you buy it."
1445 InitializeNode* init = alloc->initialization();
1446 assert(init->is_complete(), "we just did this")do { if (!(init->is_complete())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1446, "assert(" "init->is_complete()" ") failed", "we just did this"
); ::breakpoint(); } } while (0)
;
1447 init->set_complete_with_arraycopy();
1448 assert(newcopy->is_CheckCastPP(), "sanity")do { if (!(newcopy->is_CheckCastPP())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1448, "assert(" "newcopy->is_CheckCastPP()" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
1449 assert(newcopy->in(0)->in(0) == init, "dest pinned")do { if (!(newcopy->in(0)->in(0) == init)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1449, "assert(" "newcopy->in(0)->in(0) == init" ") failed"
, "dest pinned"); ::breakpoint(); } } while (0)
;
1450 }
1451 // Do not let stores that initialize this object be reordered with
1452 // a subsequent store that would make this object accessible by
1453 // other threads.
1454 // Record what AllocateNode this StoreStore protects so that
1455 // escape analysis can go from the MemBarStoreStoreNode to the
1456 // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1457 // based on the escape status of the AllocateNode.
1458 insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
1459 } // original reexecute is set back here
1460
1461 C->set_has_split_ifs(true); // Has chance for split-if optimization
1462 if (!stopped()) {
1463 set_result(newcopy);
1464 }
1465 clear_upper_avx();
1466
1467 return true;
1468}
1469
1470//------------------------inline_string_getCharsU--------------------------
1471// public void StringUTF16.getChars(byte[] src, int srcBegin, int srcEnd, char dst[], int dstBegin)
1472bool LibraryCallKit::inline_string_getCharsU() {
1473 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1474 return false;
1475 }
1476
1477 // Get the arguments.
1478 Node* src = argument(0);
1479 Node* src_begin = argument(1);
1480 Node* src_end = argument(2); // exclusive offset (i < src_end)
1481 Node* dst = argument(3);
1482 Node* dst_begin = argument(4);
1483
1484 // Check for allocation before we add nodes that would confuse
1485 // tightly_coupled_allocation()
1486 AllocateArrayNode* alloc = tightly_coupled_allocation(dst);
1487
1488 // Check if a null path was taken unconditionally.
1489 src = null_check(src);
1490 dst = null_check(dst);
1491 if (stopped()) {
1492 return true;
1493 }
1494
1495 // Get length and convert char[] offset to byte[] offset
1496 Node* length = _gvn.transform(new SubINode(src_end, src_begin));
1497 src_begin = _gvn.transform(new LShiftINode(src_begin, intcon(1)));
1498
1499 // Range checks
1500 generate_string_range_check(src, src_begin, length, true);
1501 generate_string_range_check(dst, dst_begin, length, false);
1502 if (stopped()) {
1503 return true;
1504 }
1505
1506 if (!stopped()) {
1507 // Calculate starting addresses.
1508 Node* src_start = array_element_address(src, src_begin, T_BYTE);
1509 Node* dst_start = array_element_address(dst, dst_begin, T_CHAR);
1510
1511 // Check if array addresses are aligned to HeapWordSize
1512 const TypeInt* tsrc = gvn().type(src_begin)->is_int();
1513 const TypeInt* tdst = gvn().type(dst_begin)->is_int();
1514 bool aligned = tsrc->is_con() && ((tsrc->get_con() * type2aelembytes(T_BYTE)) % HeapWordSize == 0) &&
1515 tdst->is_con() && ((tdst->get_con() * type2aelembytes(T_CHAR)) % HeapWordSize == 0);
1516
1517 // Figure out which arraycopy runtime method to call (disjoint, uninitialized).
1518 const char* copyfunc_name = "arraycopy";
1519 address copyfunc_addr = StubRoutines::select_arraycopy_function(T_CHAR, aligned, true, copyfunc_name, true);
1520 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
1521 OptoRuntime::fast_arraycopy_Type(),
1522 copyfunc_addr, copyfunc_name, TypeRawPtr::BOTTOM,
1523 src_start, dst_start, ConvI2X(length)ConvI2L(length) XTOP);
1524 // Do not let reads from the cloned object float above the arraycopy.
1525 if (alloc != NULL__null) {
1526 if (alloc->maybe_set_complete(&_gvn)) {
1527 // "You break it, you buy it."
1528 InitializeNode* init = alloc->initialization();
1529 assert(init->is_complete(), "we just did this")do { if (!(init->is_complete())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1529, "assert(" "init->is_complete()" ") failed", "we just did this"
); ::breakpoint(); } } while (0)
;
1530 init->set_complete_with_arraycopy();
1531 assert(dst->is_CheckCastPP(), "sanity")do { if (!(dst->is_CheckCastPP())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1531, "assert(" "dst->is_CheckCastPP()" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
1532 assert(dst->in(0)->in(0) == init, "dest pinned")do { if (!(dst->in(0)->in(0) == init)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1532, "assert(" "dst->in(0)->in(0) == init" ") failed"
, "dest pinned"); ::breakpoint(); } } while (0)
;
1533 }
1534 // Do not let stores that initialize this object be reordered with
1535 // a subsequent store that would make this object accessible by
1536 // other threads.
1537 // Record what AllocateNode this StoreStore protects so that
1538 // escape analysis can go from the MemBarStoreStoreNode to the
1539 // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1540 // based on the escape status of the AllocateNode.
1541 insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
1542 } else {
1543 insert_mem_bar(Op_MemBarCPUOrder);
1544 }
1545 }
1546
1547 C->set_has_split_ifs(true); // Has chance for split-if optimization
1548 return true;
1549}
1550
1551//----------------------inline_string_char_access----------------------------
1552// Store/Load char to/from byte[] array.
1553// static void StringUTF16.putChar(byte[] val, int index, int c)
1554// static char StringUTF16.getChar(byte[] val, int index)
1555bool LibraryCallKit::inline_string_char_access(bool is_store) {
1556 Node* value = argument(0);
1557 Node* index = argument(1);
1558 Node* ch = is_store ? argument(2) : NULL__null;
1559
1560 // This intrinsic accesses byte[] array as char[] array. Computing the offsets
1561 // correctly requires matched array shapes.
1562 assert (arrayOopDesc::base_offset_in_bytes(T_CHAR) == arrayOopDesc::base_offset_in_bytes(T_BYTE),do { if (!(arrayOopDesc::base_offset_in_bytes(T_CHAR) == arrayOopDesc
::base_offset_in_bytes(T_BYTE))) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1563, "assert(" "arrayOopDesc::base_offset_in_bytes(T_CHAR) == arrayOopDesc::base_offset_in_bytes(T_BYTE)"
") failed", "sanity: byte[] and char[] bases agree"); ::breakpoint
(); } } while (0)
1563 "sanity: byte[] and char[] bases agree")do { if (!(arrayOopDesc::base_offset_in_bytes(T_CHAR) == arrayOopDesc
::base_offset_in_bytes(T_BYTE))) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1563, "assert(" "arrayOopDesc::base_offset_in_bytes(T_CHAR) == arrayOopDesc::base_offset_in_bytes(T_BYTE)"
") failed", "sanity: byte[] and char[] bases agree"); ::breakpoint
(); } } while (0)
;
1564 assert (type2aelembytes(T_CHAR) == type2aelembytes(T_BYTE)*2,do { if (!(type2aelembytes(T_CHAR) == type2aelembytes(T_BYTE)
*2)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1565, "assert(" "type2aelembytes(T_CHAR) == type2aelembytes(T_BYTE)*2"
") failed", "sanity: byte[] and char[] scales agree"); ::breakpoint
(); } } while (0)
1565 "sanity: byte[] and char[] scales agree")do { if (!(type2aelembytes(T_CHAR) == type2aelembytes(T_BYTE)
*2)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1565, "assert(" "type2aelembytes(T_CHAR) == type2aelembytes(T_BYTE)*2"
") failed", "sanity: byte[] and char[] scales agree"); ::breakpoint
(); } } while (0)
;
1566
1567 // Bail when getChar over constants is requested: constant folding would
1568 // reject folding mismatched char access over byte[]. A normal inlining for getChar
1569 // Java method would constant fold nicely instead.
1570 if (!is_store && value->is_Con() && index->is_Con()) {
1571 return false;
1572 }
1573
1574 value = must_be_not_null(value, true);
1575
1576 Node* adr = array_element_address(value, index, T_CHAR);
1577 if (adr->is_top()) {
1578 return false;
1579 }
1580 if (is_store) {
1581 access_store_at(value, adr, TypeAryPtr::BYTES, ch, TypeInt::CHAR, T_CHAR, IN_HEAP | MO_UNORDERED | C2_MISMATCHED);
1582 } else {
1583 ch = access_load_at(value, adr, TypeAryPtr::BYTES, TypeInt::CHAR, T_CHAR, IN_HEAP | MO_UNORDERED | C2_MISMATCHED | C2_CONTROL_DEPENDENT_LOAD);
1584 set_result(ch);
1585 }
1586 return true;
1587}
1588
1589//--------------------------round_double_node--------------------------------
1590// Round a double node if necessary.
1591Node* LibraryCallKit::round_double_node(Node* n) {
1592 if (Matcher::strict_fp_requires_explicit_rounding) {
1593#ifdef IA32
1594 if (UseSSE < 2) {
1595 n = _gvn.transform(new RoundDoubleNode(NULL__null, n));
1596 }
1597#else
1598 Unimplemented()do { (*g_assert_poison) = 'X';; report_unimplemented("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1598); ::breakpoint(); } while (0)
;
1599#endif // IA32
1600 }
1601 return n;
1602}
1603
1604//------------------------------inline_math-----------------------------------
1605// public static double Math.abs(double)
1606// public static double Math.sqrt(double)
1607// public static double Math.log(double)
1608// public static double Math.log10(double)
1609bool LibraryCallKit::inline_double_math(vmIntrinsics::ID id) {
1610 Node* arg = round_double_node(argument(0));
1611 Node* n = NULL__null;
1612 switch (id) {
1613 case vmIntrinsics::_dabs: n = new AbsDNode( arg); break;
1614 case vmIntrinsics::_dsqrt:
1615 case vmIntrinsics::_dsqrt_strict:
1616 n = new SqrtDNode(C, control(), arg); break;
1617 case vmIntrinsics::_ceil: n = RoundDoubleModeNode::make(_gvn, arg, RoundDoubleModeNode::rmode_ceil); break;
1618 case vmIntrinsics::_floor: n = RoundDoubleModeNode::make(_gvn, arg, RoundDoubleModeNode::rmode_floor); break;
1619 case vmIntrinsics::_rint: n = RoundDoubleModeNode::make(_gvn, arg, RoundDoubleModeNode::rmode_rint); break;
1620 case vmIntrinsics::_dcopySign: n = CopySignDNode::make(_gvn, arg, round_double_node(argument(2))); break;
1621 case vmIntrinsics::_dsignum: n = SignumDNode::make(_gvn, arg); break;
1622 default: fatal_unexpected_iid(id); break;
1623 }
1624 set_result(_gvn.transform(n));
1625 return true;
1626}
1627
1628//------------------------------inline_math-----------------------------------
1629// public static float Math.abs(float)
1630// public static int Math.abs(int)
1631// public static long Math.abs(long)
1632bool LibraryCallKit::inline_math(vmIntrinsics::ID id) {
1633 Node* arg = argument(0);
1634 Node* n = NULL__null;
1635 switch (id) {
1636 case vmIntrinsics::_fabs: n = new AbsFNode( arg); break;
1637 case vmIntrinsics::_iabs: n = new AbsINode( arg); break;
1638 case vmIntrinsics::_labs: n = new AbsLNode( arg); break;
1639 case vmIntrinsics::_fcopySign: n = new CopySignFNode(arg, argument(1)); break;
1640 case vmIntrinsics::_fsignum: n = SignumFNode::make(_gvn, arg); break;
1641 default: fatal_unexpected_iid(id); break;
1642 }
1643 set_result(_gvn.transform(n));
1644 return true;
1645}
1646
1647//------------------------------runtime_math-----------------------------
1648bool LibraryCallKit::runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName) {
1649 assert(call_type == OptoRuntime::Math_DD_D_Type() || call_type == OptoRuntime::Math_D_D_Type(),do { if (!(call_type == OptoRuntime::Math_DD_D_Type() || call_type
== OptoRuntime::Math_D_D_Type())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1650, "assert(" "call_type == OptoRuntime::Math_DD_D_Type() || call_type == OptoRuntime::Math_D_D_Type()"
") failed", "must be (DD)D or (D)D type"); ::breakpoint(); }
} while (0)
1650 "must be (DD)D or (D)D type")do { if (!(call_type == OptoRuntime::Math_DD_D_Type() || call_type
== OptoRuntime::Math_D_D_Type())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1650, "assert(" "call_type == OptoRuntime::Math_DD_D_Type() || call_type == OptoRuntime::Math_D_D_Type()"
") failed", "must be (DD)D or (D)D type"); ::breakpoint(); }
} while (0)
;
1651
1652 // Inputs
1653 Node* a = round_double_node(argument(0));
1654 Node* b = (call_type == OptoRuntime::Math_DD_D_Type()) ? round_double_node(argument(2)) : NULL__null;
1655
1656 const TypePtr* no_memory_effects = NULL__null;
1657 Node* trig = make_runtime_call(RC_LEAF, call_type, funcAddr, funcName,
1658 no_memory_effects,
1659 a, top(), b, b ? top() : NULL__null);
1660 Node* value = _gvn.transform(new ProjNode(trig, TypeFunc::Parms+0));
1661#ifdef ASSERT1
1662 Node* value_top = _gvn.transform(new ProjNode(trig, TypeFunc::Parms+1));
1663 assert(value_top == top(), "second value must be top")do { if (!(value_top == top())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1663, "assert(" "value_top == top()" ") failed", "second value must be top"
); ::breakpoint(); } } while (0)
;
1664#endif
1665
1666 set_result(value);
1667 return true;
1668}
1669
1670//------------------------------inline_math_pow-----------------------------
1671bool LibraryCallKit::inline_math_pow() {
1672 Node* exp = round_double_node(argument(2));
1673 const TypeD* d = _gvn.type(exp)->isa_double_constant();
1674 if (d != NULL__null) {
1675 if (d->getd() == 2.0) {
1676 // Special case: pow(x, 2.0) => x * x
1677 Node* base = round_double_node(argument(0));
1678 set_result(_gvn.transform(new MulDNode(base, base)));
1679 return true;
1680 } else if (d->getd() == 0.5 && Matcher::match_rule_supported(Op_SqrtD)) {
1681 // Special case: pow(x, 0.5) => sqrt(x)
1682 Node* base = round_double_node(argument(0));
1683 Node* zero = _gvn.zerocon(T_DOUBLE);
1684
1685 RegionNode* region = new RegionNode(3);
1686 Node* phi = new PhiNode(region, Type::DOUBLE);
1687
1688 Node* cmp = _gvn.transform(new CmpDNode(base, zero));
1689 // According to the API specs, pow(-0.0, 0.5) = 0.0 and sqrt(-0.0) = -0.0.
1690 // So pow(-0.0, 0.5) shouldn't be replaced with sqrt(-0.0).
1691 // -0.0/+0.0 are both excluded since floating-point comparison doesn't distinguish -0.0 from +0.0.
1692 Node* test = _gvn.transform(new BoolNode(cmp, BoolTest::le));
1693
1694 Node* if_pow = generate_slow_guard(test, NULL__null);
1695 Node* value_sqrt = _gvn.transform(new SqrtDNode(C, control(), base));
1696 phi->init_req(1, value_sqrt);
1697 region->init_req(1, control());
1698
1699 if (if_pow != NULL__null) {
1700 set_control(if_pow);
1701 address target = StubRoutines::dpow() != NULL__null ? StubRoutines::dpow() :
1702 CAST_FROM_FN_PTR(address, SharedRuntime::dpow)((address)((address_word)(SharedRuntime::dpow)));
1703 const TypePtr* no_memory_effects = NULL__null;
1704 Node* trig = make_runtime_call(RC_LEAF, OptoRuntime::Math_DD_D_Type(), target, "POW",
1705 no_memory_effects, base, top(), exp, top());
1706 Node* value_pow = _gvn.transform(new ProjNode(trig, TypeFunc::Parms+0));
1707#ifdef ASSERT1
1708 Node* value_top = _gvn.transform(new ProjNode(trig, TypeFunc::Parms+1));
1709 assert(value_top == top(), "second value must be top")do { if (!(value_top == top())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 1709, "assert(" "value_top == top()" ") failed", "second value must be top"
); ::breakpoint(); } } while (0)
;
1710#endif
1711 phi->init_req(2, value_pow);
1712 region->init_req(2, _gvn.transform(new ProjNode(trig, TypeFunc::Control)));
1713 }
1714
1715 C->set_has_split_ifs(true); // Has chance for split-if optimization
1716 set_control(_gvn.transform(region));
1717 record_for_igvn(region);
1718 set_result(_gvn.transform(phi));
1719
1720 return true;
1721 }
1722 }
1723
1724 return StubRoutines::dpow() != NULL__null ?
1725 runtime_math(OptoRuntime::Math_DD_D_Type(), StubRoutines::dpow(), "dpow") :
1726 runtime_math(OptoRuntime::Math_DD_D_Type(), CAST_FROM_FN_PTR(address, SharedRuntime::dpow)((address)((address_word)(SharedRuntime::dpow))), "POW");
1727}
1728
1729//------------------------------inline_math_native-----------------------------
1730bool LibraryCallKit::inline_math_native(vmIntrinsics::ID id) {
1731#define FN_PTR(f) CAST_FROM_FN_PTR(address, f)((address)((address_word)(f)))
1732 switch (id) {
1733 // These intrinsics are not properly supported on all hardware
1734 case vmIntrinsics::_dsin:
1735 return StubRoutines::dsin() != NULL__null ?
1736 runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dsin(), "dsin") :
1737 runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dsin), "SIN");
1738 case vmIntrinsics::_dcos:
1739 return StubRoutines::dcos() != NULL__null ?
1740 runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dcos(), "dcos") :
1741 runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dcos), "COS");
1742 case vmIntrinsics::_dtan:
1743 return StubRoutines::dtan() != NULL__null ?
1744 runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dtan(), "dtan") :
1745 runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dtan), "TAN");
1746 case vmIntrinsics::_dlog:
1747 return StubRoutines::dlog() != NULL__null ?
1748 runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dlog(), "dlog") :
1749 runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dlog), "LOG");
1750 case vmIntrinsics::_dlog10:
1751 return StubRoutines::dlog10() != NULL__null ?
1752 runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dlog10(), "dlog10") :
1753 runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dlog10), "LOG10");
1754
1755 // These intrinsics are supported on all hardware
1756 case vmIntrinsics::_ceil:
1757 case vmIntrinsics::_floor:
1758 case vmIntrinsics::_rint: return Matcher::match_rule_supported(Op_RoundDoubleMode) ? inline_double_math(id) : false;
1759 case vmIntrinsics::_dsqrt:
1760 case vmIntrinsics::_dsqrt_strict:
1761 return Matcher::match_rule_supported(Op_SqrtD) ? inline_double_math(id) : false;
1762 case vmIntrinsics::_dabs: return Matcher::has_match_rule(Op_AbsD) ? inline_double_math(id) : false;
1763 case vmIntrinsics::_fabs: return Matcher::match_rule_supported(Op_AbsF) ? inline_math(id) : false;
1764 case vmIntrinsics::_iabs: return Matcher::match_rule_supported(Op_AbsI) ? inline_math(id) : false;
1765 case vmIntrinsics::_labs: return Matcher::match_rule_supported(Op_AbsL) ? inline_math(id) : false;
1766
1767 case vmIntrinsics::_dexp:
1768 return StubRoutines::dexp() != NULL__null ?
1769 runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dexp(), "dexp") :
1770 runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dexp), "EXP");
1771#undef FN_PTR
1772
1773 case vmIntrinsics::_dpow: return inline_math_pow();
1774 case vmIntrinsics::_dcopySign: return inline_double_math(id);
1775 case vmIntrinsics::_fcopySign: return inline_math(id);
1776 case vmIntrinsics::_dsignum: return Matcher::match_rule_supported(Op_SignumD) ? inline_double_math(id) : false;
1777 case vmIntrinsics::_fsignum: return Matcher::match_rule_supported(Op_SignumF) ? inline_math(id) : false;
1778
1779 // These intrinsics are not yet correctly implemented
1780 case vmIntrinsics::_datan2:
1781 return false;
1782
1783 default:
1784 fatal_unexpected_iid(id);
1785 return false;
1786 }
1787}
1788
1789static bool is_simple_name(Node* n) {
1790 return (n->req() == 1 // constant
1791 || (n->is_Type() && n->as_Type()->type()->singleton())
1792 || n->is_Proj() // parameter or return value
1793 || n->is_Phi() // local of some sort
1794 );
1795}
1796
1797//----------------------------inline_notify-----------------------------------*
1798bool LibraryCallKit::inline_notify(vmIntrinsics::ID id) {
1799 const TypeFunc* ftype = OptoRuntime::monitor_notify_Type();
1800 address func;
1801 if (id == vmIntrinsics::_notify) {
1802 func = OptoRuntime::monitor_notify_Java();
1803 } else {
1804 func = OptoRuntime::monitor_notifyAll_Java();
1805 }
1806 Node* call = make_runtime_call(RC_NO_LEAF, ftype, func, NULL__null, TypeRawPtr::BOTTOM, argument(0));
1807 make_slow_call_ex(call, env()->Throwable_klass(), false);
1808 return true;
1809}
1810
1811
1812//----------------------------inline_min_max-----------------------------------
1813bool LibraryCallKit::inline_min_max(vmIntrinsics::ID id) {
1814 set_result(generate_min_max(id, argument(0), argument(1)));
1815 return true;
1816}
1817
1818void LibraryCallKit::inline_math_mathExact(Node* math, Node *test) {
1819 Node* bol = _gvn.transform( new BoolNode(test, BoolTest::overflow) );
1820 IfNode* check = create_and_map_if(control(), bol, PROB_UNLIKELY_MAG(3)(1e-3f), COUNT_UNKNOWN(-1.0f));
1821 Node* fast_path = _gvn.transform( new IfFalseNode(check));
1822 Node* slow_path = _gvn.transform( new IfTrueNode(check) );
1823
1824 {
1825 PreserveJVMState pjvms(this);
1826 PreserveReexecuteState preexecs(this);
1827 jvms()->set_should_reexecute(true);
1828
1829 set_control(slow_path);
1830 set_i_o(i_o());
1831
1832 uncommon_trap(Deoptimization::Reason_intrinsic,
1833 Deoptimization::Action_none);
1834 }
1835
1836 set_control(fast_path);
1837 set_result(math);
1838}
1839
1840template <typename OverflowOp>
1841bool LibraryCallKit::inline_math_overflow(Node* arg1, Node* arg2) {
1842 typedef typename OverflowOp::MathOp MathOp;
1843
1844 MathOp* mathOp = new MathOp(arg1, arg2);
1845 Node* operation = _gvn.transform( mathOp );
1846 Node* ofcheck = _gvn.transform( new OverflowOp(arg1, arg2) );
1847 inline_math_mathExact(operation, ofcheck);
1848 return true;
1849}
1850
1851bool LibraryCallKit::inline_math_addExactI(bool is_increment) {
1852 return inline_math_overflow<OverflowAddINode>(argument(0), is_increment ? intcon(1) : argument(1));
1853}
1854
1855bool LibraryCallKit::inline_math_addExactL(bool is_increment) {
1856 return inline_math_overflow<OverflowAddLNode>(argument(0), is_increment ? longcon(1) : argument(2));
1857}
1858
1859bool LibraryCallKit::inline_math_subtractExactI(bool is_decrement) {
1860 return inline_math_overflow<OverflowSubINode>(argument(0), is_decrement ? intcon(1) : argument(1));
1861}
1862
1863bool LibraryCallKit::inline_math_subtractExactL(bool is_decrement) {
1864 return inline_math_overflow<OverflowSubLNode>(argument(0), is_decrement ? longcon(1) : argument(2));
1865}
1866
1867bool LibraryCallKit::inline_math_negateExactI() {
1868 return inline_math_overflow<OverflowSubINode>(intcon(0), argument(0));
1869}
1870
1871bool LibraryCallKit::inline_math_negateExactL() {
1872 return inline_math_overflow<OverflowSubLNode>(longcon(0), argument(0));
1873}
1874
1875bool LibraryCallKit::inline_math_multiplyExactI() {
1876 return inline_math_overflow<OverflowMulINode>(argument(0), argument(1));
1877}
1878
1879bool LibraryCallKit::inline_math_multiplyExactL() {
1880 return inline_math_overflow<OverflowMulLNode>(argument(0), argument(2));
1881}
1882
1883bool LibraryCallKit::inline_math_multiplyHigh() {
1884 set_result(_gvn.transform(new MulHiLNode(argument(0), argument(2))));
1885 return true;
1886}
1887
1888bool LibraryCallKit::inline_math_unsignedMultiplyHigh() {
1889 set_result(_gvn.transform(new UMulHiLNode(argument(0), argument(2))));
1890 return true;
1891}
1892
1893Node*
1894LibraryCallKit::generate_min_max(vmIntrinsics::ID id, Node* x0, Node* y0) {
1895 // These are the candidate return value:
1896 Node* xvalue = x0;
1897 Node* yvalue = y0;
1898
1899 if (xvalue == yvalue) {
1900 return xvalue;
1901 }
1902
1903 bool want_max = (id == vmIntrinsics::_max || id == vmIntrinsics::_max_strict);
1904
1905 const TypeInt* txvalue = _gvn.type(xvalue)->isa_int();
1906 const TypeInt* tyvalue = _gvn.type(yvalue)->isa_int();
1907 if (txvalue == NULL__null || tyvalue == NULL__null) return top();
1908 // This is not really necessary, but it is consistent with a
1909 // hypothetical MaxINode::Value method:
1910 int widen = MAX2(txvalue->_widen, tyvalue->_widen);
1911
1912 // %%% This folding logic should (ideally) be in a different place.
1913 // Some should be inside IfNode, and there to be a more reliable
1914 // transformation of ?: style patterns into cmoves. We also want
1915 // more powerful optimizations around cmove and min/max.
1916
1917 // Try to find a dominating comparison of these guys.
1918 // It can simplify the index computation for Arrays.copyOf
1919 // and similar uses of System.arraycopy.
1920 // First, compute the normalized version of CmpI(x, y).
1921 int cmp_op = Op_CmpI;
1922 Node* xkey = xvalue;
1923 Node* ykey = yvalue;
1924 Node* ideal_cmpxy = _gvn.transform(new CmpINode(xkey, ykey));
1925 if (ideal_cmpxy->is_Cmp()) {
1926 // E.g., if we have CmpI(length - offset, count),
1927 // it might idealize to CmpI(length, count + offset)
1928 cmp_op = ideal_cmpxy->Opcode();
1929 xkey = ideal_cmpxy->in(1);
1930 ykey = ideal_cmpxy->in(2);
1931 }
1932
1933 // Start by locating any relevant comparisons.
1934 Node* start_from = (xkey->outcnt() < ykey->outcnt()) ? xkey : ykey;
1935 Node* cmpxy = NULL__null;
1936 Node* cmpyx = NULL__null;
1937 for (DUIterator_Fast kmax, k = start_from->fast_outs(kmax); k < kmax; k++) {
1938 Node* cmp = start_from->fast_out(k);
1939 if (cmp->outcnt() > 0 && // must have prior uses
1940 cmp->in(0) == NULL__null && // must be context-independent
1941 cmp->Opcode() == cmp_op) { // right kind of compare
1942 if (cmp->in(1) == xkey && cmp->in(2) == ykey) cmpxy = cmp;
1943 if (cmp->in(1) == ykey && cmp->in(2) == xkey) cmpyx = cmp;
1944 }
1945 }
1946
1947 const int NCMPS = 2;
1948 Node* cmps[NCMPS] = { cmpxy, cmpyx };
1949 int cmpn;
1950 for (cmpn = 0; cmpn < NCMPS; cmpn++) {
1951 if (cmps[cmpn] != NULL__null) break; // find a result
1952 }
1953 if (cmpn < NCMPS) {
1954 // Look for a dominating test that tells us the min and max.
1955 int depth = 0; // Limit search depth for speed
1956 Node* dom = control();
1957 for (; dom != NULL__null; dom = IfNode::up_one_dom(dom, true)) {
1958 if (++depth >= 100) break;
1959 Node* ifproj = dom;
1960 if (!ifproj->is_Proj()) continue;
1961 Node* iff = ifproj->in(0);
1962 if (!iff->is_If()) continue;
1963 Node* bol = iff->in(1);
1964 if (!bol->is_Bool()) continue;
1965 Node* cmp = bol->in(1);
1966 if (cmp == NULL__null) continue;
1967 for (cmpn = 0; cmpn < NCMPS; cmpn++)
1968 if (cmps[cmpn] == cmp) break;
1969 if (cmpn == NCMPS) continue;
1970 BoolTest::mask btest = bol->as_Bool()->_test._test;
1971 if (ifproj->is_IfFalse()) btest = BoolTest(btest).negate();
1972 if (cmp->in(1) == ykey) btest = BoolTest(btest).commute();
1973 // At this point, we know that 'x btest y' is true.
1974 switch (btest) {
1975 case BoolTest::eq:
1976 // They are proven equal, so we can collapse the min/max.
1977 // Either value is the answer. Choose the simpler.
1978 if (is_simple_name(yvalue) && !is_simple_name(xvalue))
1979 return yvalue;
1980 return xvalue;
1981 case BoolTest::lt: // x < y
1982 case BoolTest::le: // x <= y
1983 return (want_max ? yvalue : xvalue);
1984 case BoolTest::gt: // x > y
1985 case BoolTest::ge: // x >= y
1986 return (want_max ? xvalue : yvalue);
1987 default:
1988 break;
1989 }
1990 }
1991 }
1992
1993 // We failed to find a dominating test.
1994 // Let's pick a test that might GVN with prior tests.
1995 Node* best_bol = NULL__null;
1996 BoolTest::mask best_btest = BoolTest::illegal;
1997 for (cmpn = 0; cmpn < NCMPS; cmpn++) {
1998 Node* cmp = cmps[cmpn];
1999 if (cmp == NULL__null) continue;
2000 for (DUIterator_Fast jmax, j = cmp->fast_outs(jmax); j < jmax; j++) {
2001 Node* bol = cmp->fast_out(j);
2002 if (!bol->is_Bool()) continue;
2003 BoolTest::mask btest = bol->as_Bool()->_test._test;
2004 if (btest == BoolTest::eq || btest == BoolTest::ne) continue;
2005 if (cmp->in(1) == ykey) btest = BoolTest(btest).commute();
2006 if (bol->outcnt() > (best_bol == NULL__null ? 0 : best_bol->outcnt())) {
2007 best_bol = bol->as_Bool();
2008 best_btest = btest;
2009 }
2010 }
2011 }
2012
2013 Node* answer_if_true = NULL__null;
2014 Node* answer_if_false = NULL__null;
2015 switch (best_btest) {
2016 default:
2017 if (cmpxy == NULL__null)
2018 cmpxy = ideal_cmpxy;
2019 best_bol = _gvn.transform(new BoolNode(cmpxy, BoolTest::lt));
2020 // and fall through:
2021 case BoolTest::lt: // x < y
2022 case BoolTest::le: // x <= y
2023 answer_if_true = (want_max ? yvalue : xvalue);
2024 answer_if_false = (want_max ? xvalue : yvalue);
2025 break;
2026 case BoolTest::gt: // x > y
2027 case BoolTest::ge: // x >= y
2028 answer_if_true = (want_max ? xvalue : yvalue);
2029 answer_if_false = (want_max ? yvalue : xvalue);
2030 break;
2031 }
2032
2033 jint hi, lo;
2034 if (want_max) {
2035 // We can sharpen the minimum.
2036 hi = MAX2(txvalue->_hi, tyvalue->_hi);
2037 lo = MAX2(txvalue->_lo, tyvalue->_lo);
2038 } else {
2039 // We can sharpen the maximum.
2040 hi = MIN2(txvalue->_hi, tyvalue->_hi);
2041 lo = MIN2(txvalue->_lo, tyvalue->_lo);
2042 }
2043
2044 // Use a flow-free graph structure, to avoid creating excess control edges
2045 // which could hinder other optimizations.
2046 // Since Math.min/max is often used with arraycopy, we want
2047 // tightly_coupled_allocation to be able to see beyond min/max expressions.
2048 Node* cmov = CMoveNode::make(NULL__null, best_bol,
2049 answer_if_false, answer_if_true,
2050 TypeInt::make(lo, hi, widen));
2051
2052 return _gvn.transform(cmov);
2053
2054 /*
2055 // This is not as desirable as it may seem, since Min and Max
2056 // nodes do not have a full set of optimizations.
2057 // And they would interfere, anyway, with 'if' optimizations
2058 // and with CMoveI canonical forms.
2059 switch (id) {
2060 case vmIntrinsics::_min:
2061 result_val = _gvn.transform(new (C, 3) MinINode(x,y)); break;
2062 case vmIntrinsics::_max:
2063 result_val = _gvn.transform(new (C, 3) MaxINode(x,y)); break;
2064 default:
2065 ShouldNotReachHere();
2066 }
2067 */
2068}
2069
2070inline int
2071LibraryCallKit::classify_unsafe_addr(Node* &base, Node* &offset, BasicType type) {
2072 const TypePtr* base_type = TypePtr::NULL_PTR;
2073 if (base != NULL__null) base_type = _gvn.type(base)->isa_ptr();
2074 if (base_type == NULL__null) {
2075 // Unknown type.
2076 return Type::AnyPtr;
2077 } else if (base_type == TypePtr::NULL_PTR) {
2078 // Since this is a NULL+long form, we have to switch to a rawptr.
2079 base = _gvn.transform(new CastX2PNode(offset));
2080 offset = MakeConXlongcon(0);
2081 return Type::RawPtr;
2082 } else if (base_type->base() == Type::RawPtr) {
2083 return Type::RawPtr;
2084 } else if (base_type->isa_oopptr()) {
2085 // Base is never null => always a heap address.
2086 if (!TypePtr::NULL_PTR->higher_equal(base_type)) {
2087 return Type::OopPtr;
2088 }
2089 // Offset is small => always a heap address.
2090 const TypeXTypeLong* offset_type = _gvn.type(offset)->isa_intptr_tisa_long();
2091 if (offset_type != NULL__null &&
2092 base_type->offset() == 0 && // (should always be?)
2093 offset_type->_lo >= 0 &&
2094 !MacroAssembler::needs_explicit_null_check(offset_type->_hi)) {
2095 return Type::OopPtr;
2096 } else if (type == T_OBJECT) {
2097 // off heap access to an oop doesn't make any sense. Has to be on
2098 // heap.
2099 return Type::OopPtr;
2100 }
2101 // Otherwise, it might either be oop+off or NULL+addr.
2102 return Type::AnyPtr;
2103 } else {
2104 // No information:
2105 return Type::AnyPtr;
2106 }
2107}
2108
2109Node* LibraryCallKit::make_unsafe_address(Node*& base, Node* offset, BasicType type, bool can_cast) {
2110 Node* uncasted_base = base;
2111 int kind = classify_unsafe_addr(uncasted_base, offset, type);
2112 if (kind == Type::RawPtr) {
2113 return basic_plus_adr(top(), uncasted_base, offset);
2114 } else if (kind == Type::AnyPtr) {
2115 assert(base == uncasted_base, "unexpected base change")do { if (!(base == uncasted_base)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2115, "assert(" "base == uncasted_base" ") failed", "unexpected base change"
); ::breakpoint(); } } while (0)
;
2116 if (can_cast) {
2117 if (!_gvn.type(base)->speculative_maybe_null() &&
2118 !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2119 // According to profiling, this access is always on
2120 // heap. Casting the base to not null and thus avoiding membars
2121 // around the access should allow better optimizations
2122 Node* null_ctl = top();
2123 base = null_check_oop(base, &null_ctl, true, true, true);
2124 assert(null_ctl->is_top(), "no null control here")do { if (!(null_ctl->is_top())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2124, "assert(" "null_ctl->is_top()" ") failed", "no null control here"
); ::breakpoint(); } } while (0)
;
2125 return basic_plus_adr(base, offset);
2126 } else if (_gvn.type(base)->speculative_always_null() &&
2127 !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
2128 // According to profiling, this access is always off
2129 // heap.
2130 base = null_assert(base);
2131 Node* raw_base = _gvn.transform(new CastX2PNode(offset));
2132 offset = MakeConXlongcon(0);
2133 return basic_plus_adr(top(), raw_base, offset);
2134 }
2135 }
2136 // We don't know if it's an on heap or off heap access. Fall back
2137 // to raw memory access.
2138 Node* raw = _gvn.transform(new CheckCastPPNode(control(), base, TypeRawPtr::BOTTOM));
2139 return basic_plus_adr(top(), raw, offset);
2140 } else {
2141 assert(base == uncasted_base, "unexpected base change")do { if (!(base == uncasted_base)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2141, "assert(" "base == uncasted_base" ") failed", "unexpected base change"
); ::breakpoint(); } } while (0)
;
2142 // We know it's an on heap access so base can't be null
2143 if (TypePtr::NULL_PTR->higher_equal(_gvn.type(base))) {
2144 base = must_be_not_null(base, true);
2145 }
2146 return basic_plus_adr(base, offset);
2147 }
2148}
2149
2150//--------------------------inline_number_methods-----------------------------
2151// inline int Integer.numberOfLeadingZeros(int)
2152// inline int Long.numberOfLeadingZeros(long)
2153//
2154// inline int Integer.numberOfTrailingZeros(int)
2155// inline int Long.numberOfTrailingZeros(long)
2156//
2157// inline int Integer.bitCount(int)
2158// inline int Long.bitCount(long)
2159//
2160// inline char Character.reverseBytes(char)
2161// inline short Short.reverseBytes(short)
2162// inline int Integer.reverseBytes(int)
2163// inline long Long.reverseBytes(long)
2164bool LibraryCallKit::inline_number_methods(vmIntrinsics::ID id) {
2165 Node* arg = argument(0);
2166 Node* n = NULL__null;
2167 switch (id) {
2168 case vmIntrinsics::_numberOfLeadingZeros_i: n = new CountLeadingZerosINode( arg); break;
2169 case vmIntrinsics::_numberOfLeadingZeros_l: n = new CountLeadingZerosLNode( arg); break;
2170 case vmIntrinsics::_numberOfTrailingZeros_i: n = new CountTrailingZerosINode(arg); break;
2171 case vmIntrinsics::_numberOfTrailingZeros_l: n = new CountTrailingZerosLNode(arg); break;
2172 case vmIntrinsics::_bitCount_i: n = new PopCountINode( arg); break;
2173 case vmIntrinsics::_bitCount_l: n = new PopCountLNode( arg); break;
2174 case vmIntrinsics::_reverseBytes_c: n = new ReverseBytesUSNode(0, arg); break;
2175 case vmIntrinsics::_reverseBytes_s: n = new ReverseBytesSNode( 0, arg); break;
2176 case vmIntrinsics::_reverseBytes_i: n = new ReverseBytesINode( 0, arg); break;
2177 case vmIntrinsics::_reverseBytes_l: n = new ReverseBytesLNode( 0, arg); break;
2178 default: fatal_unexpected_iid(id); break;
2179 }
2180 set_result(_gvn.transform(n));
2181 return true;
2182}
2183
2184//----------------------------inline_unsafe_access----------------------------
2185
2186const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type) {
2187 // Attempt to infer a sharper value type from the offset and base type.
2188 ciKlass* sharpened_klass = NULL__null;
2189
2190 // See if it is an instance field, with an object type.
2191 if (alias_type->field() != NULL__null) {
2192 if (alias_type->field()->type()->is_klass()) {
2193 sharpened_klass = alias_type->field()->type()->as_klass();
2194 }
2195 }
2196
2197 // See if it is a narrow oop array.
2198 if (adr_type->isa_aryptr()) {
2199 if (adr_type->offset() >= objArrayOopDesc::base_offset_in_bytes()) {
2200 const TypeOopPtr* elem_type = adr_type->is_aryptr()->elem()->make_oopptr();
2201 if (elem_type != NULL__null) {
2202 sharpened_klass = elem_type->klass();
2203 }
2204 }
2205 }
2206
2207 // The sharpened class might be unloaded if there is no class loader
2208 // contraint in place.
2209 if (sharpened_klass != NULL__null && sharpened_klass->is_loaded()) {
2210 const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2211
2212#ifndef PRODUCT
2213 if (C->print_intrinsics() || C->print_inlining()) {
2214 tty->print(" from base type: "); adr_type->dump(); tty->cr();
2215 tty->print(" sharpened value: "); tjp->dump(); tty->cr();
2216 }
2217#endif
2218 // Sharpen the value type.
2219 return tjp;
2220 }
2221 return NULL__null;
2222}
2223
2224DecoratorSet LibraryCallKit::mo_decorator_for_access_kind(AccessKind kind) {
2225 switch (kind) {
2226 case Relaxed:
2227 return MO_UNORDERED;
2228 case Opaque:
2229 return MO_RELAXED;
2230 case Acquire:
2231 return MO_ACQUIRE;
2232 case Release:
2233 return MO_RELEASE;
2234 case Volatile:
2235 return MO_SEQ_CST;
2236 default:
2237 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2237); ::breakpoint(); } while (0)
;
2238 return 0;
2239 }
2240}
2241
2242bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
2243 if (callee()->is_static()) return false; // caller must have the capability!
2244 DecoratorSet decorators = C2_UNSAFE_ACCESS;
2245 guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads")do { if (!(!is_store || kind != Acquire)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2245, "guarantee(" "!is_store || kind != Acquire" ") failed"
, "Acquire accesses can be produced only for loads"); ::breakpoint
(); } } while (0)
;
2246 guarantee( is_store || kind != Release, "Release accesses can be produced only for stores")do { if (!(is_store || kind != Release)) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2246, "guarantee(" "is_store || kind != Release" ") failed"
, "Release accesses can be produced only for stores"); ::breakpoint
(); } } while (0)
;
2247 assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type")do { if (!(type != T_OBJECT || !unaligned)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2247, "assert(" "type != T_OBJECT || !unaligned" ") failed"
, "unaligned access not supported with object type"); ::breakpoint
(); } } while (0)
;
2248
2249 if (is_reference_type(type)) {
2250 decorators |= ON_UNKNOWN_OOP_REF;
2251 }
2252
2253 if (unaligned) {
2254 decorators |= C2_UNALIGNED;
2255 }
2256
2257#ifndef PRODUCT
2258 {
2259 ResourceMark rm;
2260 // Check the signatures.
2261 ciSignature* sig = callee()->signature();
2262#ifdef ASSERT1
2263 if (!is_store) {
2264 // Object getReference(Object base, int/long offset), etc.
2265 BasicType rtype = sig->return_type()->basic_type();
2266 assert(rtype == type, "getter must return the expected value")do { if (!(rtype == type)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2266, "assert(" "rtype == type" ") failed", "getter must return the expected value"
); ::breakpoint(); } } while (0)
;
2267 assert(sig->count() == 2, "oop getter has 2 arguments")do { if (!(sig->count() == 2)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2267, "assert(" "sig->count() == 2" ") failed", "oop getter has 2 arguments"
); ::breakpoint(); } } while (0)
;
2268 assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object")do { if (!(sig->type_at(0)->basic_type() == T_OBJECT)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2268, "assert(" "sig->type_at(0)->basic_type() == T_OBJECT"
") failed", "getter base is object"); ::breakpoint(); } } while
(0)
;
2269 assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct")do { if (!(sig->type_at(1)->basic_type() == T_LONG)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2269, "assert(" "sig->type_at(1)->basic_type() == T_LONG"
") failed", "getter offset is correct"); ::breakpoint(); } }
while (0)
;
2270 } else {
2271 // void putReference(Object base, int/long offset, Object x), etc.
2272 assert(sig->return_type()->basic_type() == T_VOID, "putter must not return a value")do { if (!(sig->return_type()->basic_type() == T_VOID))
{ (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2272, "assert(" "sig->return_type()->basic_type() == T_VOID"
") failed", "putter must not return a value"); ::breakpoint(
); } } while (0)
;
2273 assert(sig->count() == 3, "oop putter has 3 arguments")do { if (!(sig->count() == 3)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2273, "assert(" "sig->count() == 3" ") failed", "oop putter has 3 arguments"
); ::breakpoint(); } } while (0)
;
2274 assert(sig->type_at(0)->basic_type() == T_OBJECT, "putter base is object")do { if (!(sig->type_at(0)->basic_type() == T_OBJECT)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2274, "assert(" "sig->type_at(0)->basic_type() == T_OBJECT"
") failed", "putter base is object"); ::breakpoint(); } } while
(0)
;
2275 assert(sig->type_at(1)->basic_type() == T_LONG, "putter offset is correct")do { if (!(sig->type_at(1)->basic_type() == T_LONG)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2275, "assert(" "sig->type_at(1)->basic_type() == T_LONG"
") failed", "putter offset is correct"); ::breakpoint(); } }
while (0)
;
2276 BasicType vtype = sig->type_at(sig->count()-1)->basic_type();
2277 assert(vtype == type, "putter must accept the expected value")do { if (!(vtype == type)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2277, "assert(" "vtype == type" ") failed", "putter must accept the expected value"
); ::breakpoint(); } } while (0)
;
2278 }
2279#endif // ASSERT
2280 }
2281#endif //PRODUCT
2282
2283 C->set_has_unsafe_access(true); // Mark eventual nmethod as "unsafe".
2284
2285 Node* receiver = argument(0); // type: oop
2286
2287 // Build address expression.
2288 Node* heap_base_oop = top();
2289
2290 // The base is either a Java object or a value produced by Unsafe.staticFieldBase
2291 Node* base = argument(1); // type: oop
2292 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2293 Node* offset = argument(2); // type: long
2294 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2295 // to be plain byte offsets, which are also the same as those accepted
2296 // by oopDesc::field_addr.
2297 assert(Unsafe_field_offset_to_byte_offset(11) == 11,do { if (!(Unsafe_field_offset_to_byte_offset(11) == 11)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2298, "assert(" "Unsafe_field_offset_to_byte_offset(11) == 11"
") failed", "fieldOffset must be byte-scaled"); ::breakpoint
(); } } while (0)
2298 "fieldOffset must be byte-scaled")do { if (!(Unsafe_field_offset_to_byte_offset(11) == 11)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2298, "assert(" "Unsafe_field_offset_to_byte_offset(11) == 11"
") failed", "fieldOffset must be byte-scaled"); ::breakpoint
(); } } while (0)
;
2299 // 32-bit machines ignore the high half!
2300 offset = ConvL2X(offset)(offset);
2301
2302 // Save state and restore on bailout
2303 uint old_sp = sp();
2304 SafePointNode* old_map = clone_map();
2305
2306 Node* adr = make_unsafe_address(base, offset, type, kind == Relaxed);
2307
2308 if (_gvn.type(base)->isa_ptr() == TypePtr::NULL_PTR) {
2309 if (type != T_OBJECT) {
2310 decorators |= IN_NATIVE; // off-heap primitive access
2311 } else {
2312 set_map(old_map);
2313 set_sp(old_sp);
2314 return false; // off-heap oop accesses are not supported
2315 }
2316 } else {
2317 heap_base_oop = base; // on-heap or mixed access
2318 }
2319
2320 // Can base be NULL? Otherwise, always on-heap access.
2321 bool can_access_non_heap = TypePtr::NULL_PTR->higher_equal(_gvn.type(base));
2322
2323 if (!can_access_non_heap) {
2324 decorators |= IN_HEAP;
2325 }
2326
2327 Node* val = is_store ? argument(4) : NULL__null;
2328
2329 const TypePtr* adr_type = _gvn.type(adr)->isa_ptr();
2330 if (adr_type == TypePtr::NULL_PTR) {
2331 set_map(old_map);
2332 set_sp(old_sp);
2333 return false; // off-heap access with zero address
2334 }
2335
2336 // Try to categorize the address.
2337 Compile::AliasType* alias_type = C->alias_type(adr_type);
2338 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here")do { if (!(alias_type->index() != Compile::AliasIdxBot)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2338, "assert(" "alias_type->index() != Compile::AliasIdxBot"
") failed", "no bare pointers here"); ::breakpoint(); } } while
(0)
;
2339
2340 if (alias_type->adr_type() == TypeInstPtr::KLASS ||
2341 alias_type->adr_type() == TypeAryPtr::RANGE) {
2342 set_map(old_map);
2343 set_sp(old_sp);
2344 return false; // not supported
2345 }
2346
2347 bool mismatched = false;
2348 BasicType bt = alias_type->basic_type();
2349 if (bt != T_ILLEGAL) {
2350 assert(alias_type->adr_type()->is_oopptr(), "should be on-heap access")do { if (!(alias_type->adr_type()->is_oopptr())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2350, "assert(" "alias_type->adr_type()->is_oopptr()"
") failed", "should be on-heap access"); ::breakpoint(); } }
while (0)
;
2351 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2352 // Alias type doesn't differentiate between byte[] and boolean[]).
2353 // Use address type to get the element type.
2354 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2355 }
2356 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2357 // accessing an array field with getReference is not a mismatch
2358 bt = T_OBJECT;
2359 }
2360 if ((bt == T_OBJECT) != (type == T_OBJECT)) {
2361 // Don't intrinsify mismatched object accesses
2362 set_map(old_map);
2363 set_sp(old_sp);
2364 return false;
2365 }
2366 mismatched = (bt != type);
2367 } else if (alias_type->adr_type()->isa_oopptr()) {
2368 mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
2369 }
2370
2371 old_map->destruct(&_gvn);
2372 assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched")do { if (!(!mismatched || alias_type->adr_type()->is_oopptr
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2372, "assert(" "!mismatched || alias_type->adr_type()->is_oopptr()"
") failed", "off-heap access can't be mismatched"); ::breakpoint
(); } } while (0)
;
2373
2374 if (mismatched) {
2375 decorators |= C2_MISMATCHED;
2376 }
2377
2378 // First guess at the value type.
2379 const Type *value_type = Type::get_const_basic_type(type);
2380
2381 // Figure out the memory ordering.
2382 decorators |= mo_decorator_for_access_kind(kind);
2383
2384 if (!is_store && type == T_OBJECT) {
2385 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
2386 if (tjp != NULL__null) {
2387 value_type = tjp;
2388 }
2389 }
2390
2391 receiver = null_check(receiver);
2392 if (stopped()) {
2393 return true;
2394 }
2395 // Heap pointers get a null-check from the interpreter,
2396 // as a courtesy. However, this is not guaranteed by Unsafe,
2397 // and it is not possible to fully distinguish unintended nulls
2398 // from intended ones in this API.
2399
2400 if (!is_store) {
2401 Node* p = NULL__null;
2402 // Try to constant fold a load from a constant field
2403 ciField* field = alias_type->field();
2404 if (heap_base_oop != top() && field != NULL__null && field->is_constant() && !mismatched) {
2405 // final or stable field
2406 p = make_constant_from_field(field, heap_base_oop);
2407 }
2408
2409 if (p == NULL__null) { // Could not constant fold the load
2410 p = access_load_at(heap_base_oop, adr, adr_type, value_type, type, decorators);
2411 // Normalize the value returned by getBoolean in the following cases
2412 if (type == T_BOOLEAN &&
2413 (mismatched ||
2414 heap_base_oop == top() || // - heap_base_oop is NULL or
2415 (can_access_non_heap && field == NULL__null)) // - heap_base_oop is potentially NULL
2416 // and the unsafe access is made to large offset
2417 // (i.e., larger than the maximum offset necessary for any
2418 // field access)
2419 ) {
2420 IdealKit ideal = IdealKit(this);
2421#define __ ideal.
2422 IdealVariable normalized_result(ideal);
2423 __ declarations_done();
2424 __ set(normalized_result, p);
2425 __ if_then(p, BoolTest::ne, ideal.ConI(0));
2426 __ set(normalized_result, ideal.ConI(1));
2427 ideal.end_if();
2428 final_sync(ideal);
2429 p = __ value(normalized_result);
2430#undef __
2431 }
2432 }
2433 if (type == T_ADDRESS) {
2434 p = gvn().transform(new CastP2XNode(NULL__null, p));
2435 p = ConvX2UL(p)(p);
2436 }
2437 // The load node has the control of the preceding MemBarCPUOrder. All
2438 // following nodes will have the control of the MemBarCPUOrder inserted at
2439 // the end of this method. So, pushing the load onto the stack at a later
2440 // point is fine.
2441 set_result(p);
2442 } else {
2443 if (bt == T_ADDRESS) {
2444 // Repackage the long as a pointer.
2445 val = ConvL2X(val)(val);
2446 val = gvn().transform(new CastX2PNode(val));
2447 }
2448 access_store_at(heap_base_oop, adr, adr_type, val, value_type, type, decorators);
2449 }
2450
2451 return true;
2452}
2453
2454//----------------------------inline_unsafe_load_store----------------------------
2455// This method serves a couple of different customers (depending on LoadStoreKind):
2456//
2457// LS_cmp_swap:
2458//
2459// boolean compareAndSetReference(Object o, long offset, Object expected, Object x);
2460// boolean compareAndSetInt( Object o, long offset, int expected, int x);
2461// boolean compareAndSetLong( Object o, long offset, long expected, long x);
2462//
2463// LS_cmp_swap_weak:
2464//
2465// boolean weakCompareAndSetReference( Object o, long offset, Object expected, Object x);
2466// boolean weakCompareAndSetReferencePlain( Object o, long offset, Object expected, Object x);
2467// boolean weakCompareAndSetReferenceAcquire(Object o, long offset, Object expected, Object x);
2468// boolean weakCompareAndSetReferenceRelease(Object o, long offset, Object expected, Object x);
2469//
2470// boolean weakCompareAndSetInt( Object o, long offset, int expected, int x);
2471// boolean weakCompareAndSetIntPlain( Object o, long offset, int expected, int x);
2472// boolean weakCompareAndSetIntAcquire( Object o, long offset, int expected, int x);
2473// boolean weakCompareAndSetIntRelease( Object o, long offset, int expected, int x);
2474//
2475// boolean weakCompareAndSetLong( Object o, long offset, long expected, long x);
2476// boolean weakCompareAndSetLongPlain( Object o, long offset, long expected, long x);
2477// boolean weakCompareAndSetLongAcquire( Object o, long offset, long expected, long x);
2478// boolean weakCompareAndSetLongRelease( Object o, long offset, long expected, long x);
2479//
2480// LS_cmp_exchange:
2481//
2482// Object compareAndExchangeReferenceVolatile(Object o, long offset, Object expected, Object x);
2483// Object compareAndExchangeReferenceAcquire( Object o, long offset, Object expected, Object x);
2484// Object compareAndExchangeReferenceRelease( Object o, long offset, Object expected, Object x);
2485//
2486// Object compareAndExchangeIntVolatile( Object o, long offset, Object expected, Object x);
2487// Object compareAndExchangeIntAcquire( Object o, long offset, Object expected, Object x);
2488// Object compareAndExchangeIntRelease( Object o, long offset, Object expected, Object x);
2489//
2490// Object compareAndExchangeLongVolatile( Object o, long offset, Object expected, Object x);
2491// Object compareAndExchangeLongAcquire( Object o, long offset, Object expected, Object x);
2492// Object compareAndExchangeLongRelease( Object o, long offset, Object expected, Object x);
2493//
2494// LS_get_add:
2495//
2496// int getAndAddInt( Object o, long offset, int delta)
2497// long getAndAddLong(Object o, long offset, long delta)
2498//
2499// LS_get_set:
2500//
2501// int getAndSet(Object o, long offset, int newValue)
2502// long getAndSet(Object o, long offset, long newValue)
2503// Object getAndSet(Object o, long offset, Object newValue)
2504//
2505bool LibraryCallKit::inline_unsafe_load_store(const BasicType type, const LoadStoreKind kind, const AccessKind access_kind) {
2506 // This basic scheme here is the same as inline_unsafe_access, but
2507 // differs in enough details that combining them would make the code
2508 // overly confusing. (This is a true fact! I originally combined
2509 // them, but even I was confused by it!) As much code/comments as
2510 // possible are retained from inline_unsafe_access though to make
2511 // the correspondences clearer. - dl
2512
2513 if (callee()->is_static()) return false; // caller must have the capability!
2514
2515 DecoratorSet decorators = C2_UNSAFE_ACCESS;
2516 decorators |= mo_decorator_for_access_kind(access_kind);
2517
2518#ifndef PRODUCT
2519 BasicType rtype;
2520 {
2521 ResourceMark rm;
2522 // Check the signatures.
2523 ciSignature* sig = callee()->signature();
2524 rtype = sig->return_type()->basic_type();
2525 switch(kind) {
2526 case LS_get_add:
2527 case LS_get_set: {
2528 // Check the signatures.
2529#ifdef ASSERT1
2530 assert(rtype == type, "get and set must return the expected type")do { if (!(rtype == type)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2530, "assert(" "rtype == type" ") failed", "get and set must return the expected type"
); ::breakpoint(); } } while (0)
;
2531 assert(sig->count() == 3, "get and set has 3 arguments")do { if (!(sig->count() == 3)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2531, "assert(" "sig->count() == 3" ") failed", "get and set has 3 arguments"
); ::breakpoint(); } } while (0)
;
2532 assert(sig->type_at(0)->basic_type() == T_OBJECT, "get and set base is object")do { if (!(sig->type_at(0)->basic_type() == T_OBJECT)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2532, "assert(" "sig->type_at(0)->basic_type() == T_OBJECT"
") failed", "get and set base is object"); ::breakpoint(); }
} while (0)
;
2533 assert(sig->type_at(1)->basic_type() == T_LONG, "get and set offset is long")do { if (!(sig->type_at(1)->basic_type() == T_LONG)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2533, "assert(" "sig->type_at(1)->basic_type() == T_LONG"
") failed", "get and set offset is long"); ::breakpoint(); }
} while (0)
;
2534 assert(sig->type_at(2)->basic_type() == type, "get and set must take expected type as new value/delta")do { if (!(sig->type_at(2)->basic_type() == type)) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2534, "assert(" "sig->type_at(2)->basic_type() == type"
") failed", "get and set must take expected type as new value/delta"
); ::breakpoint(); } } while (0)
;
2535 assert(access_kind == Volatile, "mo is not passed to intrinsic nodes in current implementation")do { if (!(access_kind == Volatile)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2535, "assert(" "access_kind == Volatile" ") failed", "mo is not passed to intrinsic nodes in current implementation"
); ::breakpoint(); } } while (0)
;
2536#endif // ASSERT
2537 break;
2538 }
2539 case LS_cmp_swap:
2540 case LS_cmp_swap_weak: {
2541 // Check the signatures.
2542#ifdef ASSERT1
2543 assert(rtype == T_BOOLEAN, "CAS must return boolean")do { if (!(rtype == T_BOOLEAN)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2543, "assert(" "rtype == T_BOOLEAN" ") failed", "CAS must return boolean"
); ::breakpoint(); } } while (0)
;
2544 assert(sig->count() == 4, "CAS has 4 arguments")do { if (!(sig->count() == 4)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2544, "assert(" "sig->count() == 4" ") failed", "CAS has 4 arguments"
); ::breakpoint(); } } while (0)
;
2545 assert(sig->type_at(0)->basic_type() == T_OBJECT, "CAS base is object")do { if (!(sig->type_at(0)->basic_type() == T_OBJECT)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2545, "assert(" "sig->type_at(0)->basic_type() == T_OBJECT"
") failed", "CAS base is object"); ::breakpoint(); } } while
(0)
;
2546 assert(sig->type_at(1)->basic_type() == T_LONG, "CAS offset is long")do { if (!(sig->type_at(1)->basic_type() == T_LONG)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2546, "assert(" "sig->type_at(1)->basic_type() == T_LONG"
") failed", "CAS offset is long"); ::breakpoint(); } } while
(0)
;
2547#endif // ASSERT
2548 break;
2549 }
2550 case LS_cmp_exchange: {
2551 // Check the signatures.
2552#ifdef ASSERT1
2553 assert(rtype == type, "CAS must return the expected type")do { if (!(rtype == type)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2553, "assert(" "rtype == type" ") failed", "CAS must return the expected type"
); ::breakpoint(); } } while (0)
;
2554 assert(sig->count() == 4, "CAS has 4 arguments")do { if (!(sig->count() == 4)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2554, "assert(" "sig->count() == 4" ") failed", "CAS has 4 arguments"
); ::breakpoint(); } } while (0)
;
2555 assert(sig->type_at(0)->basic_type() == T_OBJECT, "CAS base is object")do { if (!(sig->type_at(0)->basic_type() == T_OBJECT)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2555, "assert(" "sig->type_at(0)->basic_type() == T_OBJECT"
") failed", "CAS base is object"); ::breakpoint(); } } while
(0)
;
2556 assert(sig->type_at(1)->basic_type() == T_LONG, "CAS offset is long")do { if (!(sig->type_at(1)->basic_type() == T_LONG)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2556, "assert(" "sig->type_at(1)->basic_type() == T_LONG"
") failed", "CAS offset is long"); ::breakpoint(); } } while
(0)
;
2557#endif // ASSERT
2558 break;
2559 }
2560 default:
2561 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2561); ::breakpoint(); } while (0)
;
2562 }
2563 }
2564#endif //PRODUCT
2565
2566 C->set_has_unsafe_access(true); // Mark eventual nmethod as "unsafe".
2567
2568 // Get arguments:
2569 Node* receiver = NULL__null;
2570 Node* base = NULL__null;
2571 Node* offset = NULL__null;
2572 Node* oldval = NULL__null;
2573 Node* newval = NULL__null;
2574 switch(kind) {
2575 case LS_cmp_swap:
2576 case LS_cmp_swap_weak:
2577 case LS_cmp_exchange: {
2578 const bool two_slot_type = type2size[type] == 2;
2579 receiver = argument(0); // type: oop
2580 base = argument(1); // type: oop
2581 offset = argument(2); // type: long
2582 oldval = argument(4); // type: oop, int, or long
2583 newval = argument(two_slot_type ? 6 : 5); // type: oop, int, or long
2584 break;
2585 }
2586 case LS_get_add:
2587 case LS_get_set: {
2588 receiver = argument(0); // type: oop
2589 base = argument(1); // type: oop
2590 offset = argument(2); // type: long
2591 oldval = NULL__null;
2592 newval = argument(4); // type: oop, int, or long
2593 break;
2594 }
2595 default:
2596 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2596); ::breakpoint(); } while (0)
;
2597 }
2598
2599 // Build field offset expression.
2600 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2601 // to be plain byte offsets, which are also the same as those accepted
2602 // by oopDesc::field_addr.
2603 assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled")do { if (!(Unsafe_field_offset_to_byte_offset(11) == 11)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2603, "assert(" "Unsafe_field_offset_to_byte_offset(11) == 11"
") failed", "fieldOffset must be byte-scaled"); ::breakpoint
(); } } while (0)
;
2604 // 32-bit machines ignore the high half of long offsets
2605 offset = ConvL2X(offset)(offset);
2606 // Save state and restore on bailout
2607 uint old_sp = sp();
2608 SafePointNode* old_map = clone_map();
2609 Node* adr = make_unsafe_address(base, offset,type, false);
2610 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2611
2612 Compile::AliasType* alias_type = C->alias_type(adr_type);
2613 BasicType bt = alias_type->basic_type();
2614 if (bt != T_ILLEGAL &&
2615 (is_reference_type(bt) != (type == T_OBJECT))) {
2616 // Don't intrinsify mismatched object accesses.
2617 set_map(old_map);
2618 set_sp(old_sp);
2619 return false;
2620 }
2621
2622 old_map->destruct(&_gvn);
2623
2624 // For CAS, unlike inline_unsafe_access, there seems no point in
2625 // trying to refine types. Just use the coarse types here.
2626 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here")do { if (!(alias_type->index() != Compile::AliasIdxBot)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2626, "assert(" "alias_type->index() != Compile::AliasIdxBot"
") failed", "no bare pointers here"); ::breakpoint(); } } while
(0)
;
2627 const Type *value_type = Type::get_const_basic_type(type);
2628
2629 switch (kind) {
2630 case LS_get_set:
2631 case LS_cmp_exchange: {
2632 if (type == T_OBJECT) {
2633 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
2634 if (tjp != NULL__null) {
2635 value_type = tjp;
2636 }
2637 }
2638 break;
2639 }
2640 case LS_cmp_swap:
2641 case LS_cmp_swap_weak:
2642 case LS_get_add:
2643 break;
2644 default:
2645 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2645); ::breakpoint(); } while (0)
;
2646 }
2647
2648 // Null check receiver.
2649 receiver = null_check(receiver);
2650 if (stopped()) {
2651 return true;
2652 }
2653
2654 int alias_idx = C->get_alias_index(adr_type);
2655
2656 if (is_reference_type(type)) {
2657 decorators |= IN_HEAP | ON_UNKNOWN_OOP_REF;
2658
2659 // Transformation of a value which could be NULL pointer (CastPP #NULL)
2660 // could be delayed during Parse (for example, in adjust_map_after_if()).
2661 // Execute transformation here to avoid barrier generation in such case.
2662 if (_gvn.type(newval) == TypePtr::NULL_PTR)
2663 newval = _gvn.makecon(TypePtr::NULL_PTR);
2664
2665 if (oldval != NULL__null && _gvn.type(oldval) == TypePtr::NULL_PTR) {
2666 // Refine the value to a null constant, when it is known to be null
2667 oldval = _gvn.makecon(TypePtr::NULL_PTR);
2668 }
2669 }
2670
2671 Node* result = NULL__null;
2672 switch (kind) {
2673 case LS_cmp_exchange: {
2674 result = access_atomic_cmpxchg_val_at(base, adr, adr_type, alias_idx,
2675 oldval, newval, value_type, type, decorators);
2676 break;
2677 }
2678 case LS_cmp_swap_weak:
2679 decorators |= C2_WEAK_CMPXCHG;
2680 case LS_cmp_swap: {
2681 result = access_atomic_cmpxchg_bool_at(base, adr, adr_type, alias_idx,
2682 oldval, newval, value_type, type, decorators);
2683 break;
2684 }
2685 case LS_get_set: {
2686 result = access_atomic_xchg_at(base, adr, adr_type, alias_idx,
2687 newval, value_type, type, decorators);
2688 break;
2689 }
2690 case LS_get_add: {
2691 result = access_atomic_add_at(base, adr, adr_type, alias_idx,
2692 newval, value_type, type, decorators);
2693 break;
2694 }
2695 default:
2696 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2696); ::breakpoint(); } while (0)
;
2697 }
2698
2699 assert(type2size[result->bottom_type()->basic_type()] == type2size[rtype], "result type should match")do { if (!(type2size[result->bottom_type()->basic_type(
)] == type2size[rtype])) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2699, "assert(" "type2size[result->bottom_type()->basic_type()] == type2size[rtype]"
") failed", "result type should match"); ::breakpoint(); } }
while (0)
;
2700 set_result(result);
2701 return true;
2702}
2703
2704bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2705 // Regardless of form, don't allow previous ld/st to move down,
2706 // then issue acquire, release, or volatile mem_bar.
2707 insert_mem_bar(Op_MemBarCPUOrder);
2708 switch(id) {
2709 case vmIntrinsics::_loadFence:
2710 insert_mem_bar(Op_LoadFence);
2711 return true;
2712 case vmIntrinsics::_storeFence:
2713 insert_mem_bar(Op_StoreFence);
2714 return true;
2715 case vmIntrinsics::_storeStoreFence:
2716 insert_mem_bar(Op_StoreStoreFence);
2717 return true;
2718 case vmIntrinsics::_fullFence:
2719 insert_mem_bar(Op_MemBarVolatile);
2720 return true;
2721 default:
2722 fatal_unexpected_iid(id);
2723 return false;
2724 }
2725}
2726
2727bool LibraryCallKit::inline_onspinwait() {
2728 insert_mem_bar(Op_OnSpinWait);
2729 return true;
2730}
2731
2732bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2733 if (!kls->is_Con()) {
2734 return true;
2735 }
2736 const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2737 if (klsptr == NULL__null) {
2738 return true;
2739 }
2740 ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2741 // don't need a guard for a klass that is already initialized
2742 return !ik->is_initialized();
2743}
2744
2745//----------------------------inline_unsafe_writeback0-------------------------
2746// public native void Unsafe.writeback0(long address)
2747bool LibraryCallKit::inline_unsafe_writeback0() {
2748 if (!Matcher::has_match_rule(Op_CacheWB)) {
2749 return false;
2750 }
2751#ifndef PRODUCT
2752 assert(Matcher::has_match_rule(Op_CacheWBPreSync), "found match rule for CacheWB but not CacheWBPreSync")do { if (!(Matcher::has_match_rule(Op_CacheWBPreSync))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2752, "assert(" "Matcher::has_match_rule(Op_CacheWBPreSync)"
") failed", "found match rule for CacheWB but not CacheWBPreSync"
); ::breakpoint(); } } while (0)
;
2753 assert(Matcher::has_match_rule(Op_CacheWBPostSync), "found match rule for CacheWB but not CacheWBPostSync")do { if (!(Matcher::has_match_rule(Op_CacheWBPostSync))) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2753, "assert(" "Matcher::has_match_rule(Op_CacheWBPostSync)"
") failed", "found match rule for CacheWB but not CacheWBPostSync"
); ::breakpoint(); } } while (0)
;
2754 ciSignature* sig = callee()->signature();
2755 assert(sig->type_at(0)->basic_type() == T_LONG, "Unsafe_writeback0 address is long!")do { if (!(sig->type_at(0)->basic_type() == T_LONG)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2755, "assert(" "sig->type_at(0)->basic_type() == T_LONG"
") failed", "Unsafe_writeback0 address is long!"); ::breakpoint
(); } } while (0)
;
2756#endif
2757 null_check_receiver(); // null-check, then ignore
2758 Node *addr = argument(1);
2759 addr = new CastX2PNode(addr);
2760 addr = _gvn.transform(addr);
2761 Node *flush = new CacheWBNode(control(), memory(TypeRawPtr::BOTTOM), addr);
2762 flush = _gvn.transform(flush);
2763 set_memory(flush, TypeRawPtr::BOTTOM);
2764 return true;
2765}
2766
2767//----------------------------inline_unsafe_writeback0-------------------------
2768// public native void Unsafe.writeback0(long address)
2769bool LibraryCallKit::inline_unsafe_writebackSync0(bool is_pre) {
2770 if (is_pre && !Matcher::has_match_rule(Op_CacheWBPreSync)) {
2771 return false;
2772 }
2773 if (!is_pre && !Matcher::has_match_rule(Op_CacheWBPostSync)) {
2774 return false;
2775 }
2776#ifndef PRODUCT
2777 assert(Matcher::has_match_rule(Op_CacheWB),do { if (!(Matcher::has_match_rule(Op_CacheWB))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2779, "assert(" "Matcher::has_match_rule(Op_CacheWB)" ") failed"
, (is_pre ? "found match rule for CacheWBPreSync but not CacheWB"
: "found match rule for CacheWBPostSync but not CacheWB")); ::
breakpoint(); } } while (0)
2778 (is_pre ? "found match rule for CacheWBPreSync but not CacheWB"do { if (!(Matcher::has_match_rule(Op_CacheWB))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2779, "assert(" "Matcher::has_match_rule(Op_CacheWB)" ") failed"
, (is_pre ? "found match rule for CacheWBPreSync but not CacheWB"
: "found match rule for CacheWBPostSync but not CacheWB")); ::
breakpoint(); } } while (0)
2779 : "found match rule for CacheWBPostSync but not CacheWB"))do { if (!(Matcher::has_match_rule(Op_CacheWB))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2779, "assert(" "Matcher::has_match_rule(Op_CacheWB)" ") failed"
, (is_pre ? "found match rule for CacheWBPreSync but not CacheWB"
: "found match rule for CacheWBPostSync but not CacheWB")); ::
breakpoint(); } } while (0)
;
2780
2781#endif
2782 null_check_receiver(); // null-check, then ignore
2783 Node *sync;
2784 if (is_pre) {
2785 sync = new CacheWBPreSyncNode(control(), memory(TypeRawPtr::BOTTOM));
2786 } else {
2787 sync = new CacheWBPostSyncNode(control(), memory(TypeRawPtr::BOTTOM));
2788 }
2789 sync = _gvn.transform(sync);
2790 set_memory(sync, TypeRawPtr::BOTTOM);
2791 return true;
2792}
2793
2794//----------------------------inline_unsafe_allocate---------------------------
2795// public native Object Unsafe.allocateInstance(Class<?> cls);
2796bool LibraryCallKit::inline_unsafe_allocate() {
2797 if (callee()->is_static()) return false; // caller must have the capability!
2798
2799 null_check_receiver(); // null-check, then ignore
2800 Node* cls = null_check(argument(1));
2801 if (stopped()) return true;
2802
2803 Node* kls = load_klass_from_mirror(cls, false, NULL__null, 0);
2804 kls = null_check(kls);
2805 if (stopped()) return true; // argument was like int.class
2806
2807 Node* test = NULL__null;
2808 if (LibraryCallKit::klass_needs_init_guard(kls)) {
2809 // Note: The argument might still be an illegal value like
2810 // Serializable.class or Object[].class. The runtime will handle it.
2811 // But we must make an explicit check for initialization.
2812 Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
2813 // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
2814 // can generate code to load it as unsigned byte.
2815 Node* inst = make_load(NULL__null, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
2816 Node* bits = intcon(InstanceKlass::fully_initialized);
2817 test = _gvn.transform(new SubINode(inst, bits));
2818 // The 'test' is non-zero if we need to take a slow path.
2819 }
2820
2821 Node* obj = new_instance(kls, test);
2822 set_result(obj);
2823 return true;
2824}
2825
2826//------------------------inline_native_time_funcs--------------
2827// inline code for System.currentTimeMillis() and System.nanoTime()
2828// these have the same type and signature
2829bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
2830 const TypeFunc* tf = OptoRuntime::void_long_Type();
2831 const TypePtr* no_memory_effects = NULL__null;
2832 Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
2833 Node* value = _gvn.transform(new ProjNode(time, TypeFunc::Parms+0));
2834#ifdef ASSERT1
2835 Node* value_top = _gvn.transform(new ProjNode(time, TypeFunc::Parms+1));
2836 assert(value_top == top(), "second value must be top")do { if (!(value_top == top())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 2836, "assert(" "value_top == top()" ") failed", "second value must be top"
); ::breakpoint(); } } while (0)
;
2837#endif
2838 set_result(value);
2839 return true;
2840}
2841
2842#ifdef JFR_HAVE_INTRINSICS
2843
2844/**
2845 * if oop->klass != null
2846 * // normal class
2847 * epoch = _epoch_state ? 2 : 1
2848 * if oop->klass->trace_id & ((epoch << META_SHIFT) | epoch)) != epoch {
2849 * ... // enter slow path when the klass is first recorded or the epoch of JFR shifts
2850 * }
2851 * id = oop->klass->trace_id >> TRACE_ID_SHIFT // normal class path
2852 * else
2853 * // primitive class
2854 * if oop->array_klass != null
2855 * id = (oop->array_klass->trace_id >> TRACE_ID_SHIFT) + 1 // primitive class path
2856 * else
2857 * id = LAST_TYPE_ID + 1 // void class path
2858 * if (!signaled)
2859 * signaled = true
2860 */
2861bool LibraryCallKit::inline_native_classID() {
2862 Node* cls = argument(0);
2863
2864 IdealKit ideal(this);
2865#define __ ideal.
2866 IdealVariable result(ideal); __ declarations_done();
2867 Node* kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL__null, immutable_memory(),
2868 basic_plus_adr(cls, java_lang_Class::klass_offset()),
2869 TypeRawPtr::BOTTOM, TypeInstKlassPtr::OBJECT_OR_NULL));
2870
2871
2872 __ if_then(kls, BoolTest::ne, null()); {
2873 Node* kls_trace_id_addr = basic_plus_adr(kls, in_bytes(KLASS_TRACE_ID_OFFSETInstanceKlass::trace_id_offset()));
2874 Node* kls_trace_id_raw = ideal.load(ideal.ctrl(), kls_trace_id_addr,TypeLong::LONG, T_LONG, Compile::AliasIdxRaw);
2875
2876 Node* epoch_address = makecon(TypeRawPtr::make(Jfr::epoch_address()));
2877 Node* epoch = ideal.load(ideal.ctrl(), epoch_address, TypeInt::BOOL, T_BOOLEAN, Compile::AliasIdxRaw);
2878 epoch = _gvn.transform(new LShiftLNode(longcon(1), epoch));
2879 Node* mask = _gvn.transform(new LShiftLNode(epoch, intcon(META_SHIFT8)));
2880 mask = _gvn.transform(new OrLNode(mask, epoch));
2881 Node* kls_trace_id_raw_and_mask = _gvn.transform(new AndLNode(kls_trace_id_raw, mask));
2882
2883 float unlikely = PROB_UNLIKELY(0.999)(1.0f - (float)(0.999));
2884 __ if_then(kls_trace_id_raw_and_mask, BoolTest::ne, epoch, unlikely); {
2885 sync_kit(ideal);
2886 make_runtime_call(RC_LEAF,
2887 OptoRuntime::get_class_id_intrinsic_Type(),
2888 CAST_FROM_FN_PTR(address, Jfr::get_class_id_intrinsic)((address)((address_word)(Jfr::get_class_id_intrinsic))),
2889 "get_class_id_intrinsic",
2890 TypePtr::BOTTOM,
2891 kls);
2892 ideal.sync_kit(this);
2893 } __ end_if();
2894
2895 ideal.set(result, _gvn.transform(new URShiftLNode(kls_trace_id_raw, ideal.ConI(TRACE_ID_SHIFT16))));
2896 } __ else_(); {
2897 Node* array_kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL__null, immutable_memory(),
2898 basic_plus_adr(cls, java_lang_Class::array_klass_offset()),
2899 TypeRawPtr::BOTTOM, TypeInstKlassPtr::OBJECT_OR_NULL));
2900 __ if_then(array_kls, BoolTest::ne, null()); {
2901 Node* array_kls_trace_id_addr = basic_plus_adr(array_kls, in_bytes(KLASS_TRACE_ID_OFFSETInstanceKlass::trace_id_offset()));
2902 Node* array_kls_trace_id_raw = ideal.load(ideal.ctrl(), array_kls_trace_id_addr, TypeLong::LONG, T_LONG, Compile::AliasIdxRaw);
2903 Node* array_kls_trace_id = _gvn.transform(new URShiftLNode(array_kls_trace_id_raw, ideal.ConI(TRACE_ID_SHIFT16)));
2904 ideal.set(result, _gvn.transform(new AddLNode(array_kls_trace_id, longcon(1))));
2905 } __ else_(); {
2906 // void class case
2907 ideal.set(result, _gvn.transform(longcon(LAST_TYPE_ID + 1)));
2908 } __ end_if();
2909
2910 Node* signaled_flag_address = makecon(TypeRawPtr::make(Jfr::signal_address()));
2911 Node* signaled = ideal.load(ideal.ctrl(), signaled_flag_address, TypeInt::BOOL, T_BOOLEAN, Compile::AliasIdxRaw, true, MemNode::acquire);
2912 __ if_then(signaled, BoolTest::ne, ideal.ConI(1)); {
2913 ideal.store(ideal.ctrl(), signaled_flag_address, ideal.ConI(1), T_BOOLEAN, Compile::AliasIdxRaw, MemNode::release, true);
2914 } __ end_if();
2915 } __ end_if();
2916
2917 final_sync(ideal);
2918 set_result(ideal.value(result));
2919#undef __
2920 return true;
2921}
2922
2923bool LibraryCallKit::inline_native_getEventWriter() {
2924 Node* tls_ptr = _gvn.transform(new ThreadLocalNode());
2925
2926 Node* jobj_ptr = basic_plus_adr(top(), tls_ptr,
2927 in_bytes(THREAD_LOCAL_WRITER_OFFSET_JFRJfrThreadLocal::java_event_writer_offset() + Thread::jfr_thread_local_offset
()
));
2928
2929 Node* jobj = make_load(control(), jobj_ptr, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered);
2930
2931 Node* jobj_cmp_null = _gvn.transform( new CmpPNode(jobj, null()) );
2932 Node* test_jobj_eq_null = _gvn.transform( new BoolNode(jobj_cmp_null, BoolTest::eq) );
2933
2934 IfNode* iff_jobj_null =
2935 create_and_map_if(control(), test_jobj_eq_null, PROB_MIN(1e-6f), COUNT_UNKNOWN(-1.0f));
2936
2937 enum { _normal_path = 1,
2938 _null_path = 2,
2939 PATH_LIMIT };
2940
2941 RegionNode* result_rgn = new RegionNode(PATH_LIMIT);
2942 PhiNode* result_val = new PhiNode(result_rgn, TypeInstPtr::BOTTOM);
2943
2944 Node* jobj_is_null = _gvn.transform(new IfTrueNode(iff_jobj_null));
2945 result_rgn->init_req(_null_path, jobj_is_null);
2946 result_val->init_req(_null_path, null());
2947
2948 Node* jobj_is_not_null = _gvn.transform(new IfFalseNode(iff_jobj_null));
2949 set_control(jobj_is_not_null);
2950 Node* res = access_load(jobj, TypeInstPtr::NOTNULL, T_OBJECT,
2951 IN_NATIVE | C2_CONTROL_DEPENDENT_LOAD);
2952 result_rgn->init_req(_normal_path, control());
2953 result_val->init_req(_normal_path, res);
2954
2955 set_result(result_rgn, result_val);
2956
2957 return true;
2958}
2959
2960#endif // JFR_HAVE_INTRINSICS
2961
2962//------------------------inline_native_currentThread------------------
2963bool LibraryCallKit::inline_native_currentThread() {
2964 Node* junk = NULL__null;
2965 set_result(generate_current_thread(junk));
2966 return true;
2967}
2968
2969//---------------------------load_mirror_from_klass----------------------------
2970// Given a klass oop, load its java mirror (a java.lang.Class oop).
2971Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
2972 Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
2973 Node* load = make_load(NULL__null, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
2974 // mirror = ((OopHandle)mirror)->resolve();
2975 return access_load(load, TypeInstPtr::MIRROR, T_OBJECT, IN_NATIVE);
2976}
2977
2978//-----------------------load_klass_from_mirror_common-------------------------
2979// Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
2980// Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
2981// and branch to the given path on the region.
2982// If never_see_null, take an uncommon trap on null, so we can optimistically
2983// compile for the non-null case.
2984// If the region is NULL, force never_see_null = true.
2985Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
2986 bool never_see_null,
2987 RegionNode* region,
2988 int null_path,
2989 int offset) {
2990 if (region == NULL__null) never_see_null = true;
2991 Node* p = basic_plus_adr(mirror, offset);
2992 const TypeKlassPtr* kls_type = TypeInstKlassPtr::OBJECT_OR_NULL;
2993 Node* kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL__null, immutable_memory(), p, TypeRawPtr::BOTTOM, kls_type));
2994 Node* null_ctl = top();
2995 kls = null_check_oop(kls, &null_ctl, never_see_null);
2996 if (region != NULL__null) {
2997 // Set region->in(null_path) if the mirror is a primitive (e.g, int.class).
2998 region->init_req(null_path, null_ctl);
2999 } else {
3000 assert(null_ctl == top(), "no loose ends")do { if (!(null_ctl == top())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3000, "assert(" "null_ctl == top()" ") failed", "no loose ends"
); ::breakpoint(); } } while (0)
;
3001 }
3002 return kls;
3003}
3004
3005//--------------------(inline_native_Class_query helpers)---------------------
3006// Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE_FAST, JVM_ACC_HAS_FINALIZER.
3007// Fall through if (mods & mask) == bits, take the guard otherwise.
3008Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) {
3009 // Branch around if the given klass has the given modifier bit set.
3010 // Like generate_guard, adds a new path onto the region.
3011 Node* modp = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3012 Node* mods = make_load(NULL__null, modp, TypeInt::INT, T_INT, MemNode::unordered);
3013 Node* mask = intcon(modifier_mask);
3014 Node* bits = intcon(modifier_bits);
3015 Node* mbit = _gvn.transform(new AndINode(mods, mask));
3016 Node* cmp = _gvn.transform(new CmpINode(mbit, bits));
3017 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::ne));
3018 return generate_fair_guard(bol, region);
3019}
3020Node* LibraryCallKit::generate_interface_guard(Node* kls, RegionNode* region) {
3021 return generate_access_flags_guard(kls, JVM_ACC_INTERFACE, 0, region);
3022}
3023Node* LibraryCallKit::generate_hidden_class_guard(Node* kls, RegionNode* region) {
3024 return generate_access_flags_guard(kls, JVM_ACC_IS_HIDDEN_CLASS, 0, region);
3025}
3026
3027//-------------------------inline_native_Class_query-------------------
3028bool LibraryCallKit::inline_native_Class_query(vmIntrinsics::ID id) {
3029 const Type* return_type = TypeInt::BOOL;
3030 Node* prim_return_value = top(); // what happens if it's a primitive class?
3031 bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3032 bool expect_prim = false; // most of these guys expect to work on refs
3033
3034 enum { _normal_path = 1, _prim_path = 2, PATH_LIMIT };
3035
3036 Node* mirror = argument(0);
3037 Node* obj = top();
3038
3039 switch (id) {
3040 case vmIntrinsics::_isInstance:
3041 // nothing is an instance of a primitive type
3042 prim_return_value = intcon(0);
3043 obj = argument(1);
3044 break;
3045 case vmIntrinsics::_getModifiers:
3046 prim_return_value = intcon(JVM_ACC_ABSTRACT | JVM_ACC_FINAL | JVM_ACC_PUBLIC);
3047 assert(is_power_of_2((int)JVM_ACC_WRITTEN_FLAGS+1), "change next line")do { if (!(is_power_of_2((int)JVM_ACC_WRITTEN_FLAGS+1))) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3047, "assert(" "is_power_of_2((int)JVM_ACC_WRITTEN_FLAGS+1)"
") failed", "change next line"); ::breakpoint(); } } while (
0)
;
3048 return_type = TypeInt::make(0, JVM_ACC_WRITTEN_FLAGS, Type::WidenMin);
3049 break;
3050 case vmIntrinsics::_isInterface:
3051 prim_return_value = intcon(0);
3052 break;
3053 case vmIntrinsics::_isArray:
3054 prim_return_value = intcon(0);
3055 expect_prim = true; // cf. ObjectStreamClass.getClassSignature
3056 break;
3057 case vmIntrinsics::_isPrimitive:
3058 prim_return_value = intcon(1);
3059 expect_prim = true; // obviously
3060 break;
3061 case vmIntrinsics::_isHidden:
3062 prim_return_value = intcon(0);
3063 break;
3064 case vmIntrinsics::_getSuperclass:
3065 prim_return_value = null();
3066 return_type = TypeInstPtr::MIRROR->cast_to_ptr_type(TypePtr::BotPTR);
3067 break;
3068 case vmIntrinsics::_getClassAccessFlags:
3069 prim_return_value = intcon(JVM_ACC_ABSTRACT | JVM_ACC_FINAL | JVM_ACC_PUBLIC);
3070 return_type = TypeInt::INT; // not bool! 6297094
3071 break;
3072 default:
3073 fatal_unexpected_iid(id);
3074 break;
3075 }
3076
3077 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3078 if (mirror_con == NULL__null) return false; // cannot happen?
3079
3080#ifndef PRODUCT
3081 if (C->print_intrinsics() || C->print_inlining()) {
3082 ciType* k = mirror_con->java_mirror_type();
3083 if (k) {
3084 tty->print("Inlining %s on constant Class ", vmIntrinsics::name_at(intrinsic_id()));
3085 k->print_name();
3086 tty->cr();
3087 }
3088 }
3089#endif
3090
3091 // Null-check the mirror, and the mirror's klass ptr (in case it is a primitive).
3092 RegionNode* region = new RegionNode(PATH_LIMIT);
3093 record_for_igvn(region);
3094 PhiNode* phi = new PhiNode(region, return_type);
3095
3096 // The mirror will never be null of Reflection.getClassAccessFlags, however
3097 // it may be null for Class.isInstance or Class.getModifiers. Throw a NPE
3098 // if it is. See bug 4774291.
3099
3100 // For Reflection.getClassAccessFlags(), the null check occurs in
3101 // the wrong place; see inline_unsafe_access(), above, for a similar
3102 // situation.
3103 mirror = null_check(mirror);
3104 // If mirror or obj is dead, only null-path is taken.
3105 if (stopped()) return true;
3106
3107 if (expect_prim) never_see_null = false; // expect nulls (meaning prims)
3108
3109 // Now load the mirror's klass metaobject, and null-check it.
3110 // Side-effects region with the control path if the klass is null.
3111 Node* kls = load_klass_from_mirror(mirror, never_see_null, region, _prim_path);
3112 // If kls is null, we have a primitive mirror.
3113 phi->init_req(_prim_path, prim_return_value);
3114 if (stopped()) { set_result(region, phi); return true; }
3115 bool safe_for_replace = (region->in(_prim_path) == top());
3116
3117 Node* p; // handy temp
3118 Node* null_ctl;
3119
3120 // Now that we have the non-null klass, we can perform the real query.
3121 // For constant classes, the query will constant-fold in LoadNode::Value.
3122 Node* query_value = top();
3123 switch (id) {
3124 case vmIntrinsics::_isInstance:
3125 // nothing is an instance of a primitive type
3126 query_value = gen_instanceof(obj, kls, safe_for_replace);
3127 break;
3128
3129 case vmIntrinsics::_getModifiers:
3130 p = basic_plus_adr(kls, in_bytes(Klass::modifier_flags_offset()));
3131 query_value = make_load(NULL__null, p, TypeInt::INT, T_INT, MemNode::unordered);
3132 break;
3133
3134 case vmIntrinsics::_isInterface:
3135 // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3136 if (generate_interface_guard(kls, region) != NULL__null)
3137 // A guard was added. If the guard is taken, it was an interface.
3138 phi->add_req(intcon(1));
3139 // If we fall through, it's a plain class.
3140 query_value = intcon(0);
3141 break;
3142
3143 case vmIntrinsics::_isArray:
3144 // (To verify this code sequence, check the asserts in JVM_IsArrayClass.)
3145 if (generate_array_guard(kls, region) != NULL__null)
3146 // A guard was added. If the guard is taken, it was an array.
3147 phi->add_req(intcon(1));
3148 // If we fall through, it's a plain class.
3149 query_value = intcon(0);
3150 break;
3151
3152 case vmIntrinsics::_isPrimitive:
3153 query_value = intcon(0); // "normal" path produces false
3154 break;
3155
3156 case vmIntrinsics::_isHidden:
3157 // (To verify this code sequence, check the asserts in JVM_IsHiddenClass.)
3158 if (generate_hidden_class_guard(kls, region) != NULL__null)
3159 // A guard was added. If the guard is taken, it was an hidden class.
3160 phi->add_req(intcon(1));
3161 // If we fall through, it's a plain class.
3162 query_value = intcon(0);
3163 break;
3164
3165
3166 case vmIntrinsics::_getSuperclass:
3167 // The rules here are somewhat unfortunate, but we can still do better
3168 // with random logic than with a JNI call.
3169 // Interfaces store null or Object as _super, but must report null.
3170 // Arrays store an intermediate super as _super, but must report Object.
3171 // Other types can report the actual _super.
3172 // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3173 if (generate_interface_guard(kls, region) != NULL__null)
3174 // A guard was added. If the guard is taken, it was an interface.
3175 phi->add_req(null());
3176 if (generate_array_guard(kls, region) != NULL__null)
3177 // A guard was added. If the guard is taken, it was an array.
3178 phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror())));
3179 // If we fall through, it's a plain class. Get its _super.
3180 p = basic_plus_adr(kls, in_bytes(Klass::super_offset()));
3181 kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL__null, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeInstKlassPtr::OBJECT_OR_NULL));
3182 null_ctl = top();
3183 kls = null_check_oop(kls, &null_ctl);
3184 if (null_ctl != top()) {
3185 // If the guard is taken, Object.superClass is null (both klass and mirror).
3186 region->add_req(null_ctl);
3187 phi ->add_req(null());
3188 }
3189 if (!stopped()) {
3190 query_value = load_mirror_from_klass(kls);
3191 }
3192 break;
3193
3194 case vmIntrinsics::_getClassAccessFlags:
3195 p = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3196 query_value = make_load(NULL__null, p, TypeInt::INT, T_INT, MemNode::unordered);
3197 break;
3198
3199 default:
3200 fatal_unexpected_iid(id);
3201 break;
3202 }
3203
3204 // Fall-through is the normal case of a query to a real class.
3205 phi->init_req(1, query_value);
3206 region->init_req(1, control());
3207
3208 C->set_has_split_ifs(true); // Has chance for split-if optimization
3209 set_result(region, phi);
3210 return true;
3211}
3212
3213//-------------------------inline_Class_cast-------------------
3214bool LibraryCallKit::inline_Class_cast() {
3215 Node* mirror = argument(0); // Class
3216 Node* obj = argument(1);
3217 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3218 if (mirror_con == NULL__null) {
3219 return false; // dead path (mirror->is_top()).
3220 }
3221 if (obj == NULL__null || obj->is_top()) {
3222 return false; // dead path
3223 }
3224 const TypeOopPtr* tp = _gvn.type(obj)->isa_oopptr();
3225
3226 // First, see if Class.cast() can be folded statically.
3227 // java_mirror_type() returns non-null for compile-time Class constants.
3228 ciType* tm = mirror_con->java_mirror_type();
3229 if (tm != NULL__null && tm->is_klass() &&
3230 tp != NULL__null && tp->klass() != NULL__null) {
3231 if (!tp->klass()->is_loaded()) {
3232 // Don't use intrinsic when class is not loaded.
3233 return false;
3234 } else {
3235 int static_res = C->static_subtype_check(tm->as_klass(), tp->klass());
3236 if (static_res == Compile::SSC_always_true) {
3237 // isInstance() is true - fold the code.
3238 set_result(obj);
3239 return true;
3240 } else if (static_res == Compile::SSC_always_false) {
3241 // Don't use intrinsic, have to throw ClassCastException.
3242 // If the reference is null, the non-intrinsic bytecode will
3243 // be optimized appropriately.
3244 return false;
3245 }
3246 }
3247 }
3248
3249 // Bailout intrinsic and do normal inlining if exception path is frequent.
3250 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
3251 return false;
3252 }
3253
3254 // Generate dynamic checks.
3255 // Class.cast() is java implementation of _checkcast bytecode.
3256 // Do checkcast (Parse::do_checkcast()) optimizations here.
3257
3258 mirror = null_check(mirror);
3259 // If mirror is dead, only null-path is taken.
3260 if (stopped()) {
3261 return true;
3262 }
3263
3264 // Not-subtype or the mirror's klass ptr is NULL (in case it is a primitive).
3265 enum { _bad_type_path = 1, _prim_path = 2, PATH_LIMIT };
3266 RegionNode* region = new RegionNode(PATH_LIMIT);
3267 record_for_igvn(region);
3268
3269 // Now load the mirror's klass metaobject, and null-check it.
3270 // If kls is null, we have a primitive mirror and
3271 // nothing is an instance of a primitive type.
3272 Node* kls = load_klass_from_mirror(mirror, false, region, _prim_path);
3273
3274 Node* res = top();
3275 if (!stopped()) {
3276 Node* bad_type_ctrl = top();
3277 // Do checkcast optimizations.
3278 res = gen_checkcast(obj, kls, &bad_type_ctrl);
3279 region->init_req(_bad_type_path, bad_type_ctrl);
3280 }
3281 if (region->in(_prim_path) != top() ||
3282 region->in(_bad_type_path) != top()) {
3283 // Let Interpreter throw ClassCastException.
3284 PreserveJVMState pjvms(this);
3285 set_control(_gvn.transform(region));
3286 uncommon_trap(Deoptimization::Reason_intrinsic,
3287 Deoptimization::Action_maybe_recompile);
3288 }
3289 if (!stopped()) {
3290 set_result(res);
3291 }
3292 return true;
3293}
3294
3295
3296//--------------------------inline_native_subtype_check------------------------
3297// This intrinsic takes the JNI calls out of the heart of
3298// UnsafeFieldAccessorImpl.set, which improves Field.set, readObject, etc.
3299bool LibraryCallKit::inline_native_subtype_check() {
3300 // Pull both arguments off the stack.
3301 Node* args[2]; // two java.lang.Class mirrors: superc, subc
3302 args[0] = argument(0);
3303 args[1] = argument(1);
3304 Node* klasses[2]; // corresponding Klasses: superk, subk
3305 klasses[0] = klasses[1] = top();
3306
3307 enum {
3308 // A full decision tree on {superc is prim, subc is prim}:
3309 _prim_0_path = 1, // {P,N} => false
3310 // {P,P} & superc!=subc => false
3311 _prim_same_path, // {P,P} & superc==subc => true
3312 _prim_1_path, // {N,P} => false
3313 _ref_subtype_path, // {N,N} & subtype check wins => true
3314 _both_ref_path, // {N,N} & subtype check loses => false
3315 PATH_LIMIT
3316 };
3317
3318 RegionNode* region = new RegionNode(PATH_LIMIT);
3319 Node* phi = new PhiNode(region, TypeInt::BOOL);
3320 record_for_igvn(region);
3321
3322 const TypePtr* adr_type = TypeRawPtr::BOTTOM; // memory type of loads
3323 const TypeKlassPtr* kls_type = TypeInstKlassPtr::OBJECT_OR_NULL;
3324 int class_klass_offset = java_lang_Class::klass_offset();
3325
3326 // First null-check both mirrors and load each mirror's klass metaobject.
3327 int which_arg;
3328 for (which_arg = 0; which_arg <= 1; which_arg++) {
3329 Node* arg = args[which_arg];
3330 arg = null_check(arg);
3331 if (stopped()) break;
3332 args[which_arg] = arg;
3333
3334 Node* p = basic_plus_adr(arg, class_klass_offset);
3335 Node* kls = LoadKlassNode::make(_gvn, NULL__null, immutable_memory(), p, adr_type, kls_type);
3336 klasses[which_arg] = _gvn.transform(kls);
3337 }
3338
3339 // Having loaded both klasses, test each for null.
3340 bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3341 for (which_arg = 0; which_arg <= 1; which_arg++) {
3342 Node* kls = klasses[which_arg];
3343 Node* null_ctl = top();
3344 kls = null_check_oop(kls, &null_ctl, never_see_null);
3345 int prim_path = (which_arg == 0 ? _prim_0_path : _prim_1_path);
3346 region->init_req(prim_path, null_ctl);
3347 if (stopped()) break;
3348 klasses[which_arg] = kls;
3349 }
3350
3351 if (!stopped()) {
3352 // now we have two reference types, in klasses[0..1]
3353 Node* subk = klasses[1]; // the argument to isAssignableFrom
3354 Node* superk = klasses[0]; // the receiver
3355 region->set_req(_both_ref_path, gen_subtype_check(subk, superk));
3356 // now we have a successful reference subtype check
3357 region->set_req(_ref_subtype_path, control());
3358 }
3359
3360 // If both operands are primitive (both klasses null), then
3361 // we must return true when they are identical primitives.
3362 // It is convenient to test this after the first null klass check.
3363 set_control(region->in(_prim_0_path)); // go back to first null check
3364 if (!stopped()) {
3365 // Since superc is primitive, make a guard for the superc==subc case.
3366 Node* cmp_eq = _gvn.transform(new CmpPNode(args[0], args[1]));
3367 Node* bol_eq = _gvn.transform(new BoolNode(cmp_eq, BoolTest::eq));
3368 generate_guard(bol_eq, region, PROB_FAIR(0.5f));
3369 if (region->req() == PATH_LIMIT+1) {
3370 // A guard was added. If the added guard is taken, superc==subc.
3371 region->swap_edges(PATH_LIMIT, _prim_same_path);
3372 region->del_req(PATH_LIMIT);
3373 }
3374 region->set_req(_prim_0_path, control()); // Not equal after all.
3375 }
3376
3377 // these are the only paths that produce 'true':
3378 phi->set_req(_prim_same_path, intcon(1));
3379 phi->set_req(_ref_subtype_path, intcon(1));
3380
3381 // pull together the cases:
3382 assert(region->req() == PATH_LIMIT, "sane region")do { if (!(region->req() == PATH_LIMIT)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3382, "assert(" "region->req() == PATH_LIMIT" ") failed"
, "sane region"); ::breakpoint(); } } while (0)
;
3383 for (uint i = 1; i < region->req(); i++) {
3384 Node* ctl = region->in(i);
3385 if (ctl == NULL__null || ctl == top()) {
3386 region->set_req(i, top());
3387 phi ->set_req(i, top());
3388 } else if (phi->in(i) == NULL__null) {
3389 phi->set_req(i, intcon(0)); // all other paths produce 'false'
3390 }
3391 }
3392
3393 set_control(_gvn.transform(region));
3394 set_result(_gvn.transform(phi));
3395 return true;
3396}
3397
3398//---------------------generate_array_guard_common------------------------
3399Node* LibraryCallKit::generate_array_guard_common(Node* kls, RegionNode* region,
3400 bool obj_array, bool not_array) {
3401
3402 if (stopped()) {
3403 return NULL__null;
3404 }
3405
3406 // If obj_array/non_array==false/false:
3407 // Branch around if the given klass is in fact an array (either obj or prim).
3408 // If obj_array/non_array==false/true:
3409 // Branch around if the given klass is not an array klass of any kind.
3410 // If obj_array/non_array==true/true:
3411 // Branch around if the kls is not an oop array (kls is int[], String, etc.)
3412 // If obj_array/non_array==true/false:
3413 // Branch around if the kls is an oop array (Object[] or subtype)
3414 //
3415 // Like generate_guard, adds a new path onto the region.
3416 jint layout_con = 0;
3417 Node* layout_val = get_layout_helper(kls, layout_con);
3418 if (layout_val == NULL__null) {
3419 bool query = (obj_array
3420 ? Klass::layout_helper_is_objArray(layout_con)
3421 : Klass::layout_helper_is_array(layout_con));
3422 if (query == not_array) {
3423 return NULL__null; // never a branch
3424 } else { // always a branch
3425 Node* always_branch = control();
3426 if (region != NULL__null)
3427 region->add_req(always_branch);
3428 set_control(top());
3429 return always_branch;
3430 }
3431 }
3432 // Now test the correct condition.
3433 jint nval = (obj_array
3434 ? (jint)(Klass::_lh_array_tag_type_value
3435 << Klass::_lh_array_tag_shift)
3436 : Klass::_lh_neutral_value);
3437 Node* cmp = _gvn.transform(new CmpINode(layout_val, intcon(nval)));
3438 BoolTest::mask btest = BoolTest::lt; // correct for testing is_[obj]array
3439 // invert the test if we are looking for a non-array
3440 if (not_array) btest = BoolTest(btest).negate();
3441 Node* bol = _gvn.transform(new BoolNode(cmp, btest));
3442 return generate_fair_guard(bol, region);
3443}
3444
3445
3446//-----------------------inline_native_newArray--------------------------
3447// private static native Object java.lang.reflect.newArray(Class<?> componentType, int length);
3448// private native Object Unsafe.allocateUninitializedArray0(Class<?> cls, int size);
3449bool LibraryCallKit::inline_unsafe_newArray(bool uninitialized) {
3450 Node* mirror;
3451 Node* count_val;
3452 if (uninitialized) {
3453 mirror = argument(1);
3454 count_val = argument(2);
3455 } else {
3456 mirror = argument(0);
3457 count_val = argument(1);
3458 }
3459
3460 mirror = null_check(mirror);
3461 // If mirror or obj is dead, only null-path is taken.
3462 if (stopped()) return true;
3463
3464 enum { _normal_path = 1, _slow_path = 2, PATH_LIMIT };
3465 RegionNode* result_reg = new RegionNode(PATH_LIMIT);
3466 PhiNode* result_val = new PhiNode(result_reg, TypeInstPtr::NOTNULL);
3467 PhiNode* result_io = new PhiNode(result_reg, Type::ABIO);
3468 PhiNode* result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
3469
3470 bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3471 Node* klass_node = load_array_klass_from_mirror(mirror, never_see_null,
3472 result_reg, _slow_path);
3473 Node* normal_ctl = control();
3474 Node* no_array_ctl = result_reg->in(_slow_path);
3475
3476 // Generate code for the slow case. We make a call to newArray().
3477 set_control(no_array_ctl);
3478 if (!stopped()) {
3479 // Either the input type is void.class, or else the
3480 // array klass has not yet been cached. Either the
3481 // ensuing call will throw an exception, or else it
3482 // will cache the array klass for next time.
3483 PreserveJVMState pjvms(this);
3484 CallJavaNode* slow_call = NULL__null;
3485 if (uninitialized) {
3486 // Generate optimized virtual call (holder class 'Unsafe' is final)
3487 slow_call = generate_method_call(vmIntrinsics::_allocateUninitializedArray, false, false);
3488 } else {
3489 slow_call = generate_method_call_static(vmIntrinsics::_newArray);
3490 }
3491 Node* slow_result = set_results_for_java_call(slow_call);
3492 // this->control() comes from set_results_for_java_call
3493 result_reg->set_req(_slow_path, control());
3494 result_val->set_req(_slow_path, slow_result);
3495 result_io ->set_req(_slow_path, i_o());
3496 result_mem->set_req(_slow_path, reset_memory());
3497 }
3498
3499 set_control(normal_ctl);
3500 if (!stopped()) {
3501 // Normal case: The array type has been cached in the java.lang.Class.
3502 // The following call works fine even if the array type is polymorphic.
3503 // It could be a dynamic mix of int[], boolean[], Object[], etc.
3504 Node* obj = new_array(klass_node, count_val, 0); // no arguments to push
3505 result_reg->init_req(_normal_path, control());
3506 result_val->init_req(_normal_path, obj);
3507 result_io ->init_req(_normal_path, i_o());
3508 result_mem->init_req(_normal_path, reset_memory());
3509
3510 if (uninitialized) {
3511 // Mark the allocation so that zeroing is skipped
3512 AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(obj, &_gvn);
3513 alloc->maybe_set_complete(&_gvn);
3514 }
3515 }
3516
3517 // Return the combined state.
3518 set_i_o( _gvn.transform(result_io) );
3519 set_all_memory( _gvn.transform(result_mem));
3520
3521 C->set_has_split_ifs(true); // Has chance for split-if optimization
3522 set_result(result_reg, result_val);
3523 return true;
3524}
3525
3526//----------------------inline_native_getLength--------------------------
3527// public static native int java.lang.reflect.Array.getLength(Object array);
3528bool LibraryCallKit::inline_native_getLength() {
3529 if (too_many_traps(Deoptimization::Reason_intrinsic)) return false;
3530
3531 Node* array = null_check(argument(0));
3532 // If array is dead, only null-path is taken.
3533 if (stopped()) return true;
3534
3535 // Deoptimize if it is a non-array.
3536 Node* non_array = generate_non_array_guard(load_object_klass(array), NULL__null);
3537
3538 if (non_array != NULL__null) {
3539 PreserveJVMState pjvms(this);
3540 set_control(non_array);
3541 uncommon_trap(Deoptimization::Reason_intrinsic,
3542 Deoptimization::Action_maybe_recompile);
3543 }
3544
3545 // If control is dead, only non-array-path is taken.
3546 if (stopped()) return true;
3547
3548 // The works fine even if the array type is polymorphic.
3549 // It could be a dynamic mix of int[], boolean[], Object[], etc.
3550 Node* result = load_array_length(array);
3551
3552 C->set_has_split_ifs(true); // Has chance for split-if optimization
3553 set_result(result);
3554 return true;
3555}
3556
3557//------------------------inline_array_copyOf----------------------------
3558// public static <T,U> T[] java.util.Arrays.copyOf( U[] original, int newLength, Class<? extends T[]> newType);
3559// public static <T,U> T[] java.util.Arrays.copyOfRange(U[] original, int from, int to, Class<? extends T[]> newType);
3560bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
3561 if (too_many_traps(Deoptimization::Reason_intrinsic)) return false;
3562
3563 // Get the arguments.
3564 Node* original = argument(0);
3565 Node* start = is_copyOfRange? argument(1): intcon(0);
3566 Node* end = is_copyOfRange? argument(2): argument(1);
3567 Node* array_type_mirror = is_copyOfRange? argument(3): argument(2);
3568
3569 Node* newcopy = NULL__null;
3570
3571 // Set the original stack and the reexecute bit for the interpreter to reexecute
3572 // the bytecode that invokes Arrays.copyOf if deoptimization happens.
3573 { PreserveReexecuteState preexecs(this);
3574 jvms()->set_should_reexecute(true);
3575
3576 array_type_mirror = null_check(array_type_mirror);
3577 original = null_check(original);
3578
3579 // Check if a null path was taken unconditionally.
3580 if (stopped()) return true;
3581
3582 Node* orig_length = load_array_length(original);
3583
3584 Node* klass_node = load_klass_from_mirror(array_type_mirror, false, NULL__null, 0);
3585 klass_node = null_check(klass_node);
3586
3587 RegionNode* bailout = new RegionNode(1);
3588 record_for_igvn(bailout);
3589
3590 // Despite the generic type of Arrays.copyOf, the mirror might be int, int[], etc.
3591 // Bail out if that is so.
3592 Node* not_objArray = generate_non_objArray_guard(klass_node, bailout);
3593 if (not_objArray != NULL__null) {
3594 // Improve the klass node's type from the new optimistic assumption:
3595 ciKlass* ak = ciArrayKlass::make(env()->Object_klass());
3596 const Type* akls = TypeKlassPtr::make(TypePtr::NotNull, ak, 0/*offset*/);
3597 Node* cast = new CastPPNode(klass_node, akls);
3598 cast->init_req(0, control());
3599 klass_node = _gvn.transform(cast);
3600 }
3601
3602 // Bail out if either start or end is negative.
3603 generate_negative_guard(start, bailout, &start);
3604 generate_negative_guard(end, bailout, &end);
3605
3606 Node* length = end;
3607 if (_gvn.type(start) != TypeInt::ZERO) {
3608 length = _gvn.transform(new SubINode(end, start));
3609 }
3610
3611 // Bail out if length is negative.
3612 // Without this the new_array would throw
3613 // NegativeArraySizeException but IllegalArgumentException is what
3614 // should be thrown
3615 generate_negative_guard(length, bailout, &length);
3616
3617 if (bailout->req() > 1) {
3618 PreserveJVMState pjvms(this);
3619 set_control(_gvn.transform(bailout));
3620 uncommon_trap(Deoptimization::Reason_intrinsic,
3621 Deoptimization::Action_maybe_recompile);
3622 }
3623
3624 if (!stopped()) {
3625 // How many elements will we copy from the original?
3626 // The answer is MinI(orig_length - start, length).
3627 Node* orig_tail = _gvn.transform(new SubINode(orig_length, start));
3628 Node* moved = generate_min_max(vmIntrinsics::_min, orig_tail, length);
3629
3630 // Generate a direct call to the right arraycopy function(s).
3631 // We know the copy is disjoint but we might not know if the
3632 // oop stores need checking.
3633 // Extreme case: Arrays.copyOf((Integer[])x, 10, String[].class).
3634 // This will fail a store-check if x contains any non-nulls.
3635
3636 // ArrayCopyNode:Ideal may transform the ArrayCopyNode to
3637 // loads/stores but it is legal only if we're sure the
3638 // Arrays.copyOf would succeed. So we need all input arguments
3639 // to the copyOf to be validated, including that the copy to the
3640 // new array won't trigger an ArrayStoreException. That subtype
3641 // check can be optimized if we know something on the type of
3642 // the input array from type speculation.
3643 if (_gvn.type(klass_node)->singleton()) {
3644 ciKlass* subk = _gvn.type(load_object_klass(original))->is_klassptr()->klass();
3645 ciKlass* superk = _gvn.type(klass_node)->is_klassptr()->klass();
3646
3647 int test = C->static_subtype_check(superk, subk);
3648 if (test != Compile::SSC_always_true && test != Compile::SSC_always_false) {
3649 const TypeOopPtr* t_original = _gvn.type(original)->is_oopptr();
3650 if (t_original->speculative_type() != NULL__null) {
3651 original = maybe_cast_profiled_obj(original, t_original->speculative_type(), true);
3652 }
3653 }
3654 }
3655
3656 bool validated = false;
3657 // Reason_class_check rather than Reason_intrinsic because we
3658 // want to intrinsify even if this traps.
3659 if (!too_many_traps(Deoptimization::Reason_class_check)) {
3660 Node* not_subtype_ctrl = gen_subtype_check(original, klass_node);
3661
3662 if (not_subtype_ctrl != top()) {
3663 PreserveJVMState pjvms(this);
3664 set_control(not_subtype_ctrl);
3665 uncommon_trap(Deoptimization::Reason_class_check,
3666 Deoptimization::Action_make_not_entrant);
3667 assert(stopped(), "Should be stopped")do { if (!(stopped())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3667, "assert(" "stopped()" ") failed", "Should be stopped"
); ::breakpoint(); } } while (0)
;
3668 }
3669 validated = true;
3670 }
3671
3672 if (!stopped()) {
3673 newcopy = new_array(klass_node, length, 0); // no arguments to push
3674
3675 ArrayCopyNode* ac = ArrayCopyNode::make(this, true, original, start, newcopy, intcon(0), moved, true, false,
3676 load_object_klass(original), klass_node);
3677 if (!is_copyOfRange) {
3678 ac->set_copyof(validated);
3679 } else {
3680 ac->set_copyofrange(validated);
3681 }
3682 Node* n = _gvn.transform(ac);
3683 if (n == ac) {
3684 ac->connect_outputs(this);
3685 } else {
3686 assert(validated, "shouldn't transform if all arguments not validated")do { if (!(validated)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3686, "assert(" "validated" ") failed", "shouldn't transform if all arguments not validated"
); ::breakpoint(); } } while (0)
;
3687 set_all_memory(n);
3688 }
3689 }
3690 }
3691 } // original reexecute is set back here
3692
3693 C->set_has_split_ifs(true); // Has chance for split-if optimization
3694 if (!stopped()) {
3695 set_result(newcopy);
3696 }
3697 return true;
3698}
3699
3700
3701//----------------------generate_virtual_guard---------------------------
3702// Helper for hashCode and clone. Peeks inside the vtable to avoid a call.
3703Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass,
3704 RegionNode* slow_region) {
3705 ciMethod* method = callee();
3706 int vtable_index = method->vtable_index();
3707 assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index,do { if (!(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3708, "assert(" "vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index"
") failed", "bad index %d", vtable_index); ::breakpoint(); }
} while (0)
3708 "bad index %d", vtable_index)do { if (!(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3708, "assert(" "vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index"
") failed", "bad index %d", vtable_index); ::breakpoint(); }
} while (0)
;
3709 // Get the Method* out of the appropriate vtable entry.
3710 int entry_offset = in_bytes(Klass::vtable_start_offset()) +
3711 vtable_index*vtableEntry::size_in_bytes() +
3712 vtableEntry::method_offset_in_bytes();
3713 Node* entry_addr = basic_plus_adr(obj_klass, entry_offset);
3714 Node* target_call = make_load(NULL__null, entry_addr, TypePtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3715
3716 // Compare the target method with the expected method (e.g., Object.hashCode).
3717 const TypePtr* native_call_addr = TypeMetadataPtr::make(method);
3718
3719 Node* native_call = makecon(native_call_addr);
3720 Node* chk_native = _gvn.transform(new CmpPNode(target_call, native_call));
3721 Node* test_native = _gvn.transform(new BoolNode(chk_native, BoolTest::ne));
3722
3723 return generate_slow_guard(test_native, slow_region);
3724}
3725
3726//-----------------------generate_method_call----------------------------
3727// Use generate_method_call to make a slow-call to the real
3728// method if the fast path fails. An alternative would be to
3729// use a stub like OptoRuntime::slow_arraycopy_Java.
3730// This only works for expanding the current library call,
3731// not another intrinsic. (E.g., don't use this for making an
3732// arraycopy call inside of the copyOf intrinsic.)
3733CallJavaNode*
3734LibraryCallKit::generate_method_call(vmIntrinsics::ID method_id, bool is_virtual, bool is_static) {
3735 // When compiling the intrinsic method itself, do not use this technique.
3736 guarantee(callee() != C->method(), "cannot make slow-call to self")do { if (!(callee() != C->method())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3736, "guarantee(" "callee() != C->method()" ") failed",
"cannot make slow-call to self"); ::breakpoint(); } } while (
0)
;
3737
3738 ciMethod* method = callee();
3739 // ensure the JVMS we have will be correct for this call
3740 guarantee(method_id == method->intrinsic_id(), "must match")do { if (!(method_id == method->intrinsic_id())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3740, "guarantee(" "method_id == method->intrinsic_id()"
") failed", "must match"); ::breakpoint(); } } while (0)
;
3741
3742 const TypeFunc* tf = TypeFunc::make(method);
3743 CallJavaNode* slow_call;
3744 if (is_static) {
3745 assert(!is_virtual, "")do { if (!(!is_virtual)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3745, "assert(" "!is_virtual" ") failed", ""); ::breakpoint
(); } } while (0)
;
3746 slow_call = new CallStaticJavaNode(C, tf,
3747 SharedRuntime::get_resolve_static_call_stub(), method);
3748 } else if (is_virtual) {
3749 null_check_receiver();
3750 int vtable_index = Method::invalid_vtable_index;
3751 if (UseInlineCaches) {
3752 // Suppress the vtable call
3753 } else {
3754 // hashCode and clone are not a miranda methods,
3755 // so the vtable index is fixed.
3756 // No need to use the linkResolver to get it.
3757 vtable_index = method->vtable_index();
3758 assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index,do { if (!(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3759, "assert(" "vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index"
") failed", "bad index %d", vtable_index); ::breakpoint(); }
} while (0)
3759 "bad index %d", vtable_index)do { if (!(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3759, "assert(" "vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index"
") failed", "bad index %d", vtable_index); ::breakpoint(); }
} while (0)
;
3760 }
3761 slow_call = new CallDynamicJavaNode(tf,
3762 SharedRuntime::get_resolve_virtual_call_stub(),
3763 method, vtable_index);
3764 } else { // neither virtual nor static: opt_virtual
3765 null_check_receiver();
3766 slow_call = new CallStaticJavaNode(C, tf,
3767 SharedRuntime::get_resolve_opt_virtual_call_stub(), method);
3768 slow_call->set_optimized_virtual(true);
3769 }
3770 if (CallGenerator::is_inlined_method_handle_intrinsic(this->method(), bci(), callee())) {
3771 // To be able to issue a direct call (optimized virtual or virtual)
3772 // and skip a call to MH.linkTo*/invokeBasic adapter, additional information
3773 // about the method being invoked should be attached to the call site to
3774 // make resolution logic work (see SharedRuntime::resolve_{virtual,opt_virtual}_call_C).
3775 slow_call->set_override_symbolic_info(true);
3776 }
3777 set_arguments_for_java_call(slow_call);
3778 set_edges_for_java_call(slow_call);
3779 return slow_call;
3780}
3781
3782
3783/**
3784 * Build special case code for calls to hashCode on an object. This call may
3785 * be virtual (invokevirtual) or bound (invokespecial). For each case we generate
3786 * slightly different code.
3787 */
3788bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) {
3789 assert(is_static == callee()->is_static(), "correct intrinsic selection")do { if (!(is_static == callee()->is_static())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3789, "assert(" "is_static == callee()->is_static()" ") failed"
, "correct intrinsic selection"); ::breakpoint(); } } while (
0)
;
3790 assert(!(is_virtual && is_static), "either virtual, special, or static")do { if (!(!(is_virtual && is_static))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3790, "assert(" "!(is_virtual && is_static)" ") failed"
, "either virtual, special, or static"); ::breakpoint(); } } while
(0)
;
3791
3792 enum { _slow_path = 1, _fast_path, _null_path, PATH_LIMIT };
3793
3794 RegionNode* result_reg = new RegionNode(PATH_LIMIT);
3795 PhiNode* result_val = new PhiNode(result_reg, TypeInt::INT);
3796 PhiNode* result_io = new PhiNode(result_reg, Type::ABIO);
3797 PhiNode* result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
3798 Node* obj = NULL__null;
3799 if (!is_static) {
3800 // Check for hashing null object
3801 obj = null_check_receiver();
3802 if (stopped()) return true; // unconditionally null
3803 result_reg->init_req(_null_path, top());
3804 result_val->init_req(_null_path, top());
3805 } else {
3806 // Do a null check, and return zero if null.
3807 // System.identityHashCode(null) == 0
3808 obj = argument(0);
3809 Node* null_ctl = top();
3810 obj = null_check_oop(obj, &null_ctl);
3811 result_reg->init_req(_null_path, null_ctl);
3812 result_val->init_req(_null_path, _gvn.intcon(0));
3813 }
3814
3815 // Unconditionally null? Then return right away.
3816 if (stopped()) {
3817 set_control( result_reg->in(_null_path));
3818 if (!stopped())
3819 set_result(result_val->in(_null_path));
3820 return true;
3821 }
3822
3823 // We only go to the fast case code if we pass a number of guards. The
3824 // paths which do not pass are accumulated in the slow_region.
3825 RegionNode* slow_region = new RegionNode(1);
3826 record_for_igvn(slow_region);
3827
3828 // If this is a virtual call, we generate a funny guard. We pull out
3829 // the vtable entry corresponding to hashCode() from the target object.
3830 // If the target method which we are calling happens to be the native
3831 // Object hashCode() method, we pass the guard. We do not need this
3832 // guard for non-virtual calls -- the caller is known to be the native
3833 // Object hashCode().
3834 if (is_virtual) {
3835 // After null check, get the object's klass.
3836 Node* obj_klass = load_object_klass(obj);
3837 generate_virtual_guard(obj_klass, slow_region);
3838 }
3839
3840 // Get the header out of the object, use LoadMarkNode when available
3841 Node* header_addr = basic_plus_adr(obj, oopDesc::mark_offset_in_bytes());
3842 // The control of the load must be NULL. Otherwise, the load can move before
3843 // the null check after castPP removal.
3844 Node* no_ctrl = NULL__null;
3845 Node* header = make_load(no_ctrl, header_addr, TypeX_XTypeLong::LONG, TypeX_XTypeLong::LONG->basic_type(), MemNode::unordered);
3846
3847 // Test the header to see if it is unlocked.
3848 Node *lock_mask = _gvn.MakeConXlongcon(markWord::lock_mask_in_place);
3849 Node *lmasked_header = _gvn.transform(new AndXNodeAndLNode(header, lock_mask));
3850 Node *unlocked_val = _gvn.MakeConXlongcon(markWord::unlocked_value);
3851 Node *chk_unlocked = _gvn.transform(new CmpXNodeCmpLNode( lmasked_header, unlocked_val));
3852 Node *test_unlocked = _gvn.transform(new BoolNode( chk_unlocked, BoolTest::ne));
3853
3854 generate_slow_guard(test_unlocked, slow_region);
3855
3856 // Get the hash value and check to see that it has been properly assigned.
3857 // We depend on hash_mask being at most 32 bits and avoid the use of
3858 // hash_mask_in_place because it could be larger than 32 bits in a 64-bit
3859 // vm: see markWord.hpp.
3860 Node *hash_mask = _gvn.intcon(markWord::hash_mask);
3861 Node *hash_shift = _gvn.intcon(markWord::hash_shift);
3862 Node *hshifted_header= _gvn.transform(new URShiftXNodeURShiftLNode(header, hash_shift));
3863 // This hack lets the hash bits live anywhere in the mark object now, as long
3864 // as the shift drops the relevant bits into the low 32 bits. Note that
3865 // Java spec says that HashCode is an int so there's no point in capturing
3866 // an 'X'-sized hashcode (32 in 32-bit build or 64 in 64-bit build).
3867 hshifted_header = ConvX2I(hshifted_header)ConvL2I(hshifted_header);
3868 Node *hash_val = _gvn.transform(new AndINode(hshifted_header, hash_mask));
3869
3870 Node *no_hash_val = _gvn.intcon(markWord::no_hash);
3871 Node *chk_assigned = _gvn.transform(new CmpINode( hash_val, no_hash_val));
3872 Node *test_assigned = _gvn.transform(new BoolNode( chk_assigned, BoolTest::eq));
3873
3874 generate_slow_guard(test_assigned, slow_region);
3875
3876 Node* init_mem = reset_memory();
3877 // fill in the rest of the null path:
3878 result_io ->init_req(_null_path, i_o());
3879 result_mem->init_req(_null_path, init_mem);
3880
3881 result_val->init_req(_fast_path, hash_val);
3882 result_reg->init_req(_fast_path, control());
3883 result_io ->init_req(_fast_path, i_o());
3884 result_mem->init_req(_fast_path, init_mem);
3885
3886 // Generate code for the slow case. We make a call to hashCode().
3887 set_control(_gvn.transform(slow_region));
3888 if (!stopped()) {
3889 // No need for PreserveJVMState, because we're using up the present state.
3890 set_all_memory(init_mem);
3891 vmIntrinsics::ID hashCode_id = is_static ? vmIntrinsics::_identityHashCode : vmIntrinsics::_hashCode;
3892 CallJavaNode* slow_call = generate_method_call(hashCode_id, is_virtual, is_static);
3893 Node* slow_result = set_results_for_java_call(slow_call);
3894 // this->control() comes from set_results_for_java_call
3895 result_reg->init_req(_slow_path, control());
3896 result_val->init_req(_slow_path, slow_result);
3897 result_io ->set_req(_slow_path, i_o());
3898 result_mem ->set_req(_slow_path, reset_memory());
3899 }
3900
3901 // Return the combined state.
3902 set_i_o( _gvn.transform(result_io) );
3903 set_all_memory( _gvn.transform(result_mem));
3904
3905 set_result(result_reg, result_val);
3906 return true;
3907}
3908
3909//---------------------------inline_native_getClass----------------------------
3910// public final native Class<?> java.lang.Object.getClass();
3911//
3912// Build special case code for calls to getClass on an object.
3913bool LibraryCallKit::inline_native_getClass() {
3914 Node* obj = null_check_receiver();
3915 if (stopped()) return true;
3916 set_result(load_mirror_from_klass(load_object_klass(obj)));
3917 return true;
3918}
3919
3920//-----------------inline_native_Reflection_getCallerClass---------------------
3921// public static native Class<?> sun.reflect.Reflection.getCallerClass();
3922//
3923// In the presence of deep enough inlining, getCallerClass() becomes a no-op.
3924//
3925// NOTE: This code must perform the same logic as JVM_GetCallerClass
3926// in that it must skip particular security frames and checks for
3927// caller sensitive methods.
3928bool LibraryCallKit::inline_native_Reflection_getCallerClass() {
3929#ifndef PRODUCT
3930 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
3931 tty->print_cr("Attempting to inline sun.reflect.Reflection.getCallerClass");
3932 }
3933#endif
3934
3935 if (!jvms()->has_method()) {
3936#ifndef PRODUCT
3937 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
3938 tty->print_cr(" Bailing out because intrinsic was inlined at top level");
3939 }
3940#endif
3941 return false;
3942 }
3943
3944 // Walk back up the JVM state to find the caller at the required
3945 // depth.
3946 JVMState* caller_jvms = jvms();
3947
3948 // Cf. JVM_GetCallerClass
3949 // NOTE: Start the loop at depth 1 because the current JVM state does
3950 // not include the Reflection.getCallerClass() frame.
3951 for (int n = 1; caller_jvms != NULL__null; caller_jvms = caller_jvms->caller(), n++) {
3952 ciMethod* m = caller_jvms->method();
3953 switch (n) {
3954 case 0:
3955 fatal("current JVM state does not include the Reflection.getCallerClass frame")do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 3955, "current JVM state does not include the Reflection.getCallerClass frame"
); ::breakpoint(); } while (0)
;
3956 break;
3957 case 1:
3958 // Frame 0 and 1 must be caller sensitive (see JVM_GetCallerClass).
3959 if (!m->caller_sensitive()) {
3960#ifndef PRODUCT
3961 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
3962 tty->print_cr(" Bailing out: CallerSensitive annotation expected at frame %d", n);
3963 }
3964#endif
3965 return false; // bail-out; let JVM_GetCallerClass do the work
3966 }
3967 break;
3968 default:
3969 if (!m->is_ignored_by_security_stack_walk()) {
3970 // We have reached the desired frame; return the holder class.
3971 // Acquire method holder as java.lang.Class and push as constant.
3972 ciInstanceKlass* caller_klass = caller_jvms->method()->holder();
3973 ciInstance* caller_mirror = caller_klass->java_mirror();
3974 set_result(makecon(TypeInstPtr::make(caller_mirror)));
3975
3976#ifndef PRODUCT
3977 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
3978 tty->print_cr(" Succeeded: caller = %d) %s.%s, JVMS depth = %d", n, caller_klass->name()->as_utf8(), caller_jvms->method()->name()->as_utf8(), jvms()->depth());
3979 tty->print_cr(" JVM state at this point:");
3980 for (int i = jvms()->depth(), n = 1; i >= 1; i--, n++) {
3981 ciMethod* m = jvms()->of_depth(i)->method();
3982 tty->print_cr(" %d) %s.%s", n, m->holder()->name()->as_utf8(), m->name()->as_utf8());
3983 }
3984 }
3985#endif
3986 return true;
3987 }
3988 break;
3989 }
3990 }
3991
3992#ifndef PRODUCT
3993 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
3994 tty->print_cr(" Bailing out because caller depth exceeded inlining depth = %d", jvms()->depth());
3995 tty->print_cr(" JVM state at this point:");
3996 for (int i = jvms()->depth(), n = 1; i >= 1; i--, n++) {
3997 ciMethod* m = jvms()->of_depth(i)->method();
3998 tty->print_cr(" %d) %s.%s", n, m->holder()->name()->as_utf8(), m->name()->as_utf8());
3999 }
4000 }
4001#endif
4002
4003 return false; // bail-out; let JVM_GetCallerClass do the work
4004}
4005
4006bool LibraryCallKit::inline_fp_conversions(vmIntrinsics::ID id) {
4007 Node* arg = argument(0);
4008 Node* result = NULL__null;
4009
4010 switch (id) {
4011 case vmIntrinsics::_floatToRawIntBits: result = new MoveF2INode(arg); break;
4012 case vmIntrinsics::_intBitsToFloat: result = new MoveI2FNode(arg); break;
4013 case vmIntrinsics::_doubleToRawLongBits: result = new MoveD2LNode(arg); break;
4014 case vmIntrinsics::_longBitsToDouble: result = new MoveL2DNode(arg); break;
4015
4016 case vmIntrinsics::_doubleToLongBits: {
4017 // two paths (plus control) merge in a wood
4018 RegionNode *r = new RegionNode(3);
4019 Node *phi = new PhiNode(r, TypeLong::LONG);
4020
4021 Node *cmpisnan = _gvn.transform(new CmpDNode(arg, arg));
4022 // Build the boolean node
4023 Node *bolisnan = _gvn.transform(new BoolNode(cmpisnan, BoolTest::ne));
4024
4025 // Branch either way.
4026 // NaN case is less traveled, which makes all the difference.
4027 IfNode *ifisnan = create_and_xform_if(control(), bolisnan, PROB_STATIC_FREQUENT(1.0f-(1e-1f)), COUNT_UNKNOWN(-1.0f));
4028 Node *opt_isnan = _gvn.transform(ifisnan);
4029 assert( opt_isnan->is_If(), "Expect an IfNode")do { if (!(opt_isnan->is_If())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4029, "assert(" "opt_isnan->is_If()" ") failed", "Expect an IfNode"
); ::breakpoint(); } } while (0)
;
4030 IfNode *opt_ifisnan = (IfNode*)opt_isnan;
4031 Node *iftrue = _gvn.transform(new IfTrueNode(opt_ifisnan));
4032
4033 set_control(iftrue);
4034
4035 static const jlong nan_bits = CONST64(0x7ff8000000000000)(0x7ff8000000000000LL);
4036 Node *slow_result = longcon(nan_bits); // return NaN
4037 phi->init_req(1, _gvn.transform( slow_result ));
4038 r->init_req(1, iftrue);
4039
4040 // Else fall through
4041 Node *iffalse = _gvn.transform(new IfFalseNode(opt_ifisnan));
4042 set_control(iffalse);
4043
4044 phi->init_req(2, _gvn.transform(new MoveD2LNode(arg)));
4045 r->init_req(2, iffalse);
4046
4047 // Post merge
4048 set_control(_gvn.transform(r));
4049 record_for_igvn(r);
4050
4051 C->set_has_split_ifs(true); // Has chance for split-if optimization
4052 result = phi;
4053 assert(result->bottom_type()->isa_long(), "must be")do { if (!(result->bottom_type()->isa_long())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4053, "assert(" "result->bottom_type()->isa_long()" ") failed"
, "must be"); ::breakpoint(); } } while (0)
;
4054 break;
4055 }
4056
4057 case vmIntrinsics::_floatToIntBits: {
4058 // two paths (plus control) merge in a wood
4059 RegionNode *r = new RegionNode(3);
4060 Node *phi = new PhiNode(r, TypeInt::INT);
4061
4062 Node *cmpisnan = _gvn.transform(new CmpFNode(arg, arg));
4063 // Build the boolean node
4064 Node *bolisnan = _gvn.transform(new BoolNode(cmpisnan, BoolTest::ne));
4065
4066 // Branch either way.
4067 // NaN case is less traveled, which makes all the difference.
4068 IfNode *ifisnan = create_and_xform_if(control(), bolisnan, PROB_STATIC_FREQUENT(1.0f-(1e-1f)), COUNT_UNKNOWN(-1.0f));
4069 Node *opt_isnan = _gvn.transform(ifisnan);
4070 assert( opt_isnan->is_If(), "Expect an IfNode")do { if (!(opt_isnan->is_If())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4070, "assert(" "opt_isnan->is_If()" ") failed", "Expect an IfNode"
); ::breakpoint(); } } while (0)
;
4071 IfNode *opt_ifisnan = (IfNode*)opt_isnan;
4072 Node *iftrue = _gvn.transform(new IfTrueNode(opt_ifisnan));
4073
4074 set_control(iftrue);
4075
4076 static const jint nan_bits = 0x7fc00000;
4077 Node *slow_result = makecon(TypeInt::make(nan_bits)); // return NaN
4078 phi->init_req(1, _gvn.transform( slow_result ));
4079 r->init_req(1, iftrue);
4080
4081 // Else fall through
4082 Node *iffalse = _gvn.transform(new IfFalseNode(opt_ifisnan));
4083 set_control(iffalse);
4084
4085 phi->init_req(2, _gvn.transform(new MoveF2INode(arg)));
4086 r->init_req(2, iffalse);
4087
4088 // Post merge
4089 set_control(_gvn.transform(r));
4090 record_for_igvn(r);
4091
4092 C->set_has_split_ifs(true); // Has chance for split-if optimization
4093 result = phi;
4094 assert(result->bottom_type()->isa_int(), "must be")do { if (!(result->bottom_type()->isa_int())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4094, "assert(" "result->bottom_type()->isa_int()" ") failed"
, "must be"); ::breakpoint(); } } while (0)
;
4095 break;
4096 }
4097
4098 default:
4099 fatal_unexpected_iid(id);
4100 break;
4101 }
4102 set_result(_gvn.transform(result));
4103 return true;
4104}
4105
4106//----------------------inline_unsafe_copyMemory-------------------------
4107// public native void Unsafe.copyMemory0(Object srcBase, long srcOffset, Object destBase, long destOffset, long bytes);
4108
4109static bool has_wide_mem(PhaseGVN& gvn, Node* addr, Node* base) {
4110 const TypeAryPtr* addr_t = gvn.type(addr)->isa_aryptr();
4111 const Type* base_t = gvn.type(base);
4112
4113 bool in_native = (base_t == TypePtr::NULL_PTR);
4114 bool in_heap = !TypePtr::NULL_PTR->higher_equal(base_t);
4115 bool is_mixed = !in_heap && !in_native;
4116
4117 if (is_mixed) {
4118 return true; // mixed accesses can touch both on-heap and off-heap memory
4119 }
4120 if (in_heap) {
4121 bool is_prim_array = (addr_t != NULL__null) && (addr_t->elem() != Type::BOTTOM);
4122 if (!is_prim_array) {
4123 // Though Unsafe.copyMemory() ensures at runtime for on-heap accesses that base is a primitive array,
4124 // there's not enough type information available to determine proper memory slice for it.
4125 return true;
4126 }
4127 }
4128 return false;
4129}
4130
4131bool LibraryCallKit::inline_unsafe_copyMemory() {
4132 if (callee()->is_static()) return false; // caller must have the capability!
4133 null_check_receiver(); // null-check receiver
4134 if (stopped()) return true;
4135
4136 C->set_has_unsafe_access(true); // Mark eventual nmethod as "unsafe".
4137
4138 Node* src_base = argument(1); // type: oop
4139 Node* src_off = ConvL2X(argument(2))(argument(2)); // type: long
4140 Node* dst_base = argument(4); // type: oop
4141 Node* dst_off = ConvL2X(argument(5))(argument(5)); // type: long
4142 Node* size = ConvL2X(argument(7))(argument(7)); // type: long
4143
4144 assert(Unsafe_field_offset_to_byte_offset(11) == 11,do { if (!(Unsafe_field_offset_to_byte_offset(11) == 11)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4145, "assert(" "Unsafe_field_offset_to_byte_offset(11) == 11"
") failed", "fieldOffset must be byte-scaled"); ::breakpoint
(); } } while (0)
4145 "fieldOffset must be byte-scaled")do { if (!(Unsafe_field_offset_to_byte_offset(11) == 11)) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4145, "assert(" "Unsafe_field_offset_to_byte_offset(11) == 11"
") failed", "fieldOffset must be byte-scaled"); ::breakpoint
(); } } while (0)
;
4146
4147 Node* src_addr = make_unsafe_address(src_base, src_off);
4148 Node* dst_addr = make_unsafe_address(dst_base, dst_off);
4149
4150 Node* thread = _gvn.transform(new ThreadLocalNode());
4151 Node* doing_unsafe_access_addr = basic_plus_adr(top(), thread, in_bytes(JavaThread::doing_unsafe_access_offset()));
4152 BasicType doing_unsafe_access_bt = T_BYTE;
4153 assert((sizeof(bool) * CHAR_BIT) == 8, "not implemented")do { if (!((sizeof(bool) * 8) == 8)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4153, "assert(" "(sizeof(bool) * 8) == 8" ") failed", "not implemented"
); ::breakpoint(); } } while (0)
;
4154
4155 // update volatile field
4156 store_to_memory(control(), doing_unsafe_access_addr, intcon(1), doing_unsafe_access_bt, Compile::AliasIdxRaw, MemNode::unordered);
4157
4158 int flags = RC_LEAF | RC_NO_FP;
4159
4160 const TypePtr* dst_type = TypePtr::BOTTOM;
4161
4162 // Adjust memory effects of the runtime call based on input values.
4163 if (!has_wide_mem(_gvn, src_addr, src_base) &&
4164 !has_wide_mem(_gvn, dst_addr, dst_base)) {
4165 dst_type = _gvn.type(dst_addr)->is_ptr(); // narrow out memory
4166
4167 const TypePtr* src_type = _gvn.type(src_addr)->is_ptr();
4168 if (C->get_alias_index(src_type) == C->get_alias_index(dst_type)) {
4169 flags |= RC_NARROW_MEM; // narrow in memory
4170 }
4171 }
4172
4173 // Call it. Note that the length argument is not scaled.
4174 make_runtime_call(flags,
4175 OptoRuntime::fast_arraycopy_Type(),
4176 StubRoutines::unsafe_arraycopy(),
4177 "unsafe_arraycopy",
4178 dst_type,
4179 src_addr, dst_addr, size XTOP);
4180
4181 store_to_memory(control(), doing_unsafe_access_addr, intcon(0), doing_unsafe_access_bt, Compile::AliasIdxRaw, MemNode::unordered);
4182
4183 return true;
4184}
4185
4186#undef XTOP
4187
4188//------------------------clone_coping-----------------------------------
4189// Helper function for inline_native_clone.
4190void LibraryCallKit::copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array) {
4191 assert(obj_size != NULL, "")do { if (!(obj_size != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4191, "assert(" "obj_size != __null" ") failed", ""); ::breakpoint
(); } } while (0)
;
4192 Node* raw_obj = alloc_obj->in(1);
4193 assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), "")do { if (!(alloc_obj->is_CheckCastPP() && raw_obj->
is_Proj() && raw_obj->in(0)->is_Allocate())) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4193, "assert(" "alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate()"
") failed", ""); ::breakpoint(); } } while (0)
;
4194
4195 AllocateNode* alloc = NULL__null;
4196 if (ReduceBulkZeroing) {
4197 // We will be completely responsible for initializing this object -
4198 // mark Initialize node as complete.
4199 alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
4200 // The object was just allocated - there should be no any stores!
4201 guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), "")do { if (!(alloc != __null && alloc->maybe_set_complete
(&_gvn))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4201, "guarantee(" "alloc != NULL && alloc->maybe_set_complete(&_gvn)"
") failed", ""); ::breakpoint(); } } while (0)
;
4202 // Mark as complete_with_arraycopy so that on AllocateNode
4203 // expansion, we know this AllocateNode is initialized by an array
4204 // copy and a StoreStore barrier exists after the array copy.
4205 alloc->initialization()->set_complete_with_arraycopy();
4206 }
4207
4208 Node* size = _gvn.transform(obj_size);
4209 access_clone(obj, alloc_obj, size, is_array);
4210
4211 // Do not let reads from the cloned object float above the arraycopy.
4212 if (alloc != NULL__null) {
4213 // Do not let stores that initialize this object be reordered with
4214 // a subsequent store that would make this object accessible by
4215 // other threads.
4216 // Record what AllocateNode this StoreStore protects so that
4217 // escape analysis can go from the MemBarStoreStoreNode to the
4218 // AllocateNode and eliminate the MemBarStoreStoreNode if possible
4219 // based on the escape status of the AllocateNode.
4220 insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
4221 } else {
4222 insert_mem_bar(Op_MemBarCPUOrder);
4223 }
4224}
4225
4226//------------------------inline_native_clone----------------------------
4227// protected native Object java.lang.Object.clone();
4228//
4229// Here are the simple edge cases:
4230// null receiver => normal trap
4231// virtual and clone was overridden => slow path to out-of-line clone
4232// not cloneable or finalizer => slow path to out-of-line Object.clone
4233//
4234// The general case has two steps, allocation and copying.
4235// Allocation has two cases, and uses GraphKit::new_instance or new_array.
4236//
4237// Copying also has two cases, oop arrays and everything else.
4238// Oop arrays use arrayof_oop_arraycopy (same as System.arraycopy).
4239// Everything else uses the tight inline loop supplied by CopyArrayNode.
4240//
4241// These steps fold up nicely if and when the cloned object's klass
4242// can be sharply typed as an object array, a type array, or an instance.
4243//
4244bool LibraryCallKit::inline_native_clone(bool is_virtual) {
4245 PhiNode* result_val;
4246
4247 // Set the reexecute bit for the interpreter to reexecute
4248 // the bytecode that invokes Object.clone if deoptimization happens.
4249 { PreserveReexecuteState preexecs(this);
4250 jvms()->set_should_reexecute(true);
4251
4252 Node* obj = null_check_receiver();
4253 if (stopped()) return true;
4254
4255 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
4256
4257 // If we are going to clone an instance, we need its exact type to
4258 // know the number and types of fields to convert the clone to
4259 // loads/stores. Maybe a speculative type can help us.
4260 if (!obj_type->klass_is_exact() &&
4261 obj_type->speculative_type() != NULL__null &&
4262 obj_type->speculative_type()->is_instance_klass()) {
4263 ciInstanceKlass* spec_ik = obj_type->speculative_type()->as_instance_klass();
4264 if (spec_ik->nof_nonstatic_fields() <= ArrayCopyLoadStoreMaxElem &&
4265 !spec_ik->has_injected_fields()) {
4266 ciKlass* k = obj_type->klass();
4267 if (!k->is_instance_klass() ||
4268 k->as_instance_klass()->is_interface() ||
4269 k->as_instance_klass()->has_subklass()) {
4270 obj = maybe_cast_profiled_obj(obj, obj_type->speculative_type(), false);
4271 }
4272 }
4273 }
4274
4275 // Conservatively insert a memory barrier on all memory slices.
4276 // Do not let writes into the original float below the clone.
4277 insert_mem_bar(Op_MemBarCPUOrder);
4278
4279 // paths into result_reg:
4280 enum {
4281 _slow_path = 1, // out-of-line call to clone method (virtual or not)
4282 _objArray_path, // plain array allocation, plus arrayof_oop_arraycopy
4283 _array_path, // plain array allocation, plus arrayof_long_arraycopy
4284 _instance_path, // plain instance allocation, plus arrayof_long_arraycopy
4285 PATH_LIMIT
4286 };
4287 RegionNode* result_reg = new RegionNode(PATH_LIMIT);
4288 result_val = new PhiNode(result_reg, TypeInstPtr::NOTNULL);
4289 PhiNode* result_i_o = new PhiNode(result_reg, Type::ABIO);
4290 PhiNode* result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
4291 record_for_igvn(result_reg);
4292
4293 Node* obj_klass = load_object_klass(obj);
4294 Node* array_ctl = generate_array_guard(obj_klass, (RegionNode*)NULL__null);
4295 if (array_ctl != NULL__null) {
4296 // It's an array.
4297 PreserveJVMState pjvms(this);
4298 set_control(array_ctl);
4299 Node* obj_length = load_array_length(obj);
4300 Node* obj_size = NULL__null;
4301 Node* alloc_obj = new_array(obj_klass, obj_length, 0, &obj_size, /*deoptimize_on_exception=*/true);
4302
4303 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
4304 if (bs->array_copy_requires_gc_barriers(true, T_OBJECT, true, false, BarrierSetC2::Parsing)) {
4305 // If it is an oop array, it requires very special treatment,
4306 // because gc barriers are required when accessing the array.
4307 Node* is_obja = generate_objArray_guard(obj_klass, (RegionNode*)NULL__null);
4308 if (is_obja != NULL__null) {
4309 PreserveJVMState pjvms2(this);
4310 set_control(is_obja);
4311 // Generate a direct call to the right arraycopy function(s).
4312 // Clones are always tightly coupled.
4313 ArrayCopyNode* ac = ArrayCopyNode::make(this, true, obj, intcon(0), alloc_obj, intcon(0), obj_length, true, false);
4314 ac->set_clone_oop_array();
4315 Node* n = _gvn.transform(ac);
4316 assert(n == ac, "cannot disappear")do { if (!(n == ac)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4316, "assert(" "n == ac" ") failed", "cannot disappear"); ::
breakpoint(); } } while (0)
;
4317 ac->connect_outputs(this, /*deoptimize_on_exception=*/true);
4318
4319 result_reg->init_req(_objArray_path, control());
4320 result_val->init_req(_objArray_path, alloc_obj);
4321 result_i_o ->set_req(_objArray_path, i_o());
4322 result_mem ->set_req(_objArray_path, reset_memory());
4323 }
4324 }
4325 // Otherwise, there are no barriers to worry about.
4326 // (We can dispense with card marks if we know the allocation
4327 // comes out of eden (TLAB)... In fact, ReduceInitialCardMarks
4328 // causes the non-eden paths to take compensating steps to
4329 // simulate a fresh allocation, so that no further
4330 // card marks are required in compiled code to initialize
4331 // the object.)
4332
4333 if (!stopped()) {
4334 copy_to_clone(obj, alloc_obj, obj_size, true);
4335
4336 // Present the results of the copy.
4337 result_reg->init_req(_array_path, control());
4338 result_val->init_req(_array_path, alloc_obj);
4339 result_i_o ->set_req(_array_path, i_o());
4340 result_mem ->set_req(_array_path, reset_memory());
4341 }
4342 }
4343
4344 // We only go to the instance fast case code if we pass a number of guards.
4345 // The paths which do not pass are accumulated in the slow_region.
4346 RegionNode* slow_region = new RegionNode(1);
4347 record_for_igvn(slow_region);
4348 if (!stopped()) {
4349 // It's an instance (we did array above). Make the slow-path tests.
4350 // If this is a virtual call, we generate a funny guard. We grab
4351 // the vtable entry corresponding to clone() from the target object.
4352 // If the target method which we are calling happens to be the
4353 // Object clone() method, we pass the guard. We do not need this
4354 // guard for non-virtual calls; the caller is known to be the native
4355 // Object clone().
4356 if (is_virtual) {
4357 generate_virtual_guard(obj_klass, slow_region);
4358 }
4359
4360 // The object must be easily cloneable and must not have a finalizer.
4361 // Both of these conditions may be checked in a single test.
4362 // We could optimize the test further, but we don't care.
4363 generate_access_flags_guard(obj_klass,
4364 // Test both conditions:
4365 JVM_ACC_IS_CLONEABLE_FAST | JVM_ACC_HAS_FINALIZER,
4366 // Must be cloneable but not finalizer:
4367 JVM_ACC_IS_CLONEABLE_FAST,
4368 slow_region);
4369 }
4370
4371 if (!stopped()) {
4372 // It's an instance, and it passed the slow-path tests.
4373 PreserveJVMState pjvms(this);
4374 Node* obj_size = NULL__null;
4375 // Need to deoptimize on exception from allocation since Object.clone intrinsic
4376 // is reexecuted if deoptimization occurs and there could be problems when merging
4377 // exception state between multiple Object.clone versions (reexecute=true vs reexecute=false).
4378 Node* alloc_obj = new_instance(obj_klass, NULL__null, &obj_size, /*deoptimize_on_exception=*/true);
4379
4380 copy_to_clone(obj, alloc_obj, obj_size, false);
4381
4382 // Present the results of the slow call.
4383 result_reg->init_req(_instance_path, control());
4384 result_val->init_req(_instance_path, alloc_obj);
4385 result_i_o ->set_req(_instance_path, i_o());
4386 result_mem ->set_req(_instance_path, reset_memory());
4387 }
4388
4389 // Generate code for the slow case. We make a call to clone().
4390 set_control(_gvn.transform(slow_region));
4391 if (!stopped()) {
4392 PreserveJVMState pjvms(this);
4393 CallJavaNode* slow_call = generate_method_call(vmIntrinsics::_clone, is_virtual);
4394 // We need to deoptimize on exception (see comment above)
4395 Node* slow_result = set_results_for_java_call(slow_call, false, /* deoptimize */ true);
4396 // this->control() comes from set_results_for_java_call
4397 result_reg->init_req(_slow_path, control());
4398 result_val->init_req(_slow_path, slow_result);
4399 result_i_o ->set_req(_slow_path, i_o());
4400 result_mem ->set_req(_slow_path, reset_memory());
4401 }
4402
4403 // Return the combined state.
4404 set_control( _gvn.transform(result_reg));
4405 set_i_o( _gvn.transform(result_i_o));
4406 set_all_memory( _gvn.transform(result_mem));
4407 } // original reexecute is set back here
4408
4409 set_result(_gvn.transform(result_val));
4410 return true;
4411}
4412
4413// If we have a tightly coupled allocation, the arraycopy may take care
4414// of the array initialization. If one of the guards we insert between
4415// the allocation and the arraycopy causes a deoptimization, an
4416// unitialized array will escape the compiled method. To prevent that
4417// we set the JVM state for uncommon traps between the allocation and
4418// the arraycopy to the state before the allocation so, in case of
4419// deoptimization, we'll reexecute the allocation and the
4420// initialization.
4421JVMState* LibraryCallKit::arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp) {
4422 if (alloc != NULL__null) {
4423 ciMethod* trap_method = alloc->jvms()->method();
4424 int trap_bci = alloc->jvms()->bci();
4425
4426 if (!C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_intrinsic) &&
4427 !C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_null_check)) {
4428 // Make sure there's no store between the allocation and the
4429 // arraycopy otherwise visible side effects could be rexecuted
4430 // in case of deoptimization and cause incorrect execution.
4431 bool no_interfering_store = true;
4432 Node* mem = alloc->in(TypeFunc::Memory);
4433 if (mem->is_MergeMem()) {
4434 for (MergeMemStream mms(merged_memory(), mem->as_MergeMem()); mms.next_non_empty2(); ) {
4435 Node* n = mms.memory();
4436 if (n != mms.memory2() && !(n->is_Proj() && n->in(0) == alloc->initialization())) {
4437 assert(n->is_Store(), "what else?")do { if (!(n->is_Store())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4437, "assert(" "n->is_Store()" ") failed", "what else?"
); ::breakpoint(); } } while (0)
;
4438 no_interfering_store = false;
4439 break;
4440 }
4441 }
4442 } else {
4443 for (MergeMemStream mms(merged_memory()); mms.next_non_empty(); ) {
4444 Node* n = mms.memory();
4445 if (n != mem && !(n->is_Proj() && n->in(0) == alloc->initialization())) {
4446 assert(n->is_Store(), "what else?")do { if (!(n->is_Store())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4446, "assert(" "n->is_Store()" ") failed", "what else?"
); ::breakpoint(); } } while (0)
;
4447 no_interfering_store = false;
4448 break;
4449 }
4450 }
4451 }
4452
4453 if (no_interfering_store) {
4454 JVMState* old_jvms = alloc->jvms()->clone_shallow(C);
4455 uint size = alloc->req();
4456 SafePointNode* sfpt = new SafePointNode(size, old_jvms);
4457 old_jvms->set_map(sfpt);
4458 for (uint i = 0; i < size; i++) {
4459 sfpt->init_req(i, alloc->in(i));
4460 }
4461 // re-push array length for deoptimization
4462 sfpt->ins_req(old_jvms->stkoff() + old_jvms->sp(), alloc->in(AllocateNode::ALength));
4463 old_jvms->set_sp(old_jvms->sp()+1);
4464 old_jvms->set_monoff(old_jvms->monoff()+1);
4465 old_jvms->set_scloff(old_jvms->scloff()+1);
4466 old_jvms->set_endoff(old_jvms->endoff()+1);
4467 old_jvms->set_should_reexecute(true);
4468
4469 sfpt->set_i_o(map()->i_o());
4470 sfpt->set_memory(map()->memory());
4471 sfpt->set_control(map()->control());
4472
4473 JVMState* saved_jvms = jvms();
4474 saved_reexecute_sp = _reexecute_sp;
4475
4476 set_jvms(sfpt->jvms());
4477 _reexecute_sp = jvms()->sp();
4478
4479 return saved_jvms;
4480 }
4481 }
4482 }
4483 return NULL__null;
4484}
4485
4486// In case of a deoptimization, we restart execution at the
4487// allocation, allocating a new array. We would leave an uninitialized
4488// array in the heap that GCs wouldn't expect. Move the allocation
4489// after the traps so we don't allocate the array if we
4490// deoptimize. This is possible because tightly_coupled_allocation()
4491// guarantees there's no observer of the allocated array at this point
4492// and the control flow is simple enough.
4493void LibraryCallKit::arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms,
4494 int saved_reexecute_sp, uint new_idx) {
4495 if (saved_jvms != NULL__null && !stopped()) {
4496 assert(alloc != NULL, "only with a tightly coupled allocation")do { if (!(alloc != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4496, "assert(" "alloc != __null" ") failed", "only with a tightly coupled allocation"
); ::breakpoint(); } } while (0)
;
4497 // restore JVM state to the state at the arraycopy
4498 saved_jvms->map()->set_control(map()->control());
4499 assert(saved_jvms->map()->memory() == map()->memory(), "memory state changed?")do { if (!(saved_jvms->map()->memory() == map()->memory
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4499, "assert(" "saved_jvms->map()->memory() == map()->memory()"
") failed", "memory state changed?"); ::breakpoint(); } } while
(0)
;
4500 assert(saved_jvms->map()->i_o() == map()->i_o(), "IO state changed?")do { if (!(saved_jvms->map()->i_o() == map()->i_o())
) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4500, "assert(" "saved_jvms->map()->i_o() == map()->i_o()"
") failed", "IO state changed?"); ::breakpoint(); } } while (
0)
;
4501 // If we've improved the types of some nodes (null check) while
4502 // emitting the guards, propagate them to the current state
4503 map()->replaced_nodes().apply(saved_jvms->map(), new_idx);
4504 set_jvms(saved_jvms);
4505 _reexecute_sp = saved_reexecute_sp;
4506
4507 // Remove the allocation from above the guards
4508 CallProjections callprojs;
4509 alloc->extract_projections(&callprojs, true);
4510 InitializeNode* init = alloc->initialization();
4511 Node* alloc_mem = alloc->in(TypeFunc::Memory);
4512 C->gvn_replace_by(callprojs.fallthrough_ioproj, alloc->in(TypeFunc::I_O));
4513 C->gvn_replace_by(init->proj_out(TypeFunc::Memory), alloc_mem);
4514
4515 // The CastIINode created in GraphKit::new_array (in AllocateArrayNode::make_ideal_length) must stay below
4516 // the allocation (i.e. is only valid if the allocation succeeds):
4517 // 1) replace CastIINode with AllocateArrayNode's length here
4518 // 2) Create CastIINode again once allocation has moved (see below) at the end of this method
4519 //
4520 // Multiple identical CastIINodes might exist here. Each GraphKit::load_array_length() call will generate
4521 // new separate CastIINode (arraycopy guard checks or any array length use between array allocation and ararycopy)
4522 Node* init_control = init->proj_out(TypeFunc::Control);
4523 Node* alloc_length = alloc->Ideal_length();
4524#ifdef ASSERT1
4525 Node* prev_cast = NULL__null;
4526#endif
4527 for (uint i = 0; i < init_control->outcnt(); i++) {
4528 Node* init_out = init_control->raw_out(i);
4529 if (init_out->is_CastII() && init_out->in(TypeFunc::Control) == init_control && init_out->in(1) == alloc_length) {
4530#ifdef ASSERT1
4531 if (prev_cast == NULL__null) {
4532 prev_cast = init_out;
4533 } else {
4534 if (prev_cast->cmp(*init_out) == false) {
4535 prev_cast->dump();
4536 init_out->dump();
4537 assert(false, "not equal CastIINode")do { if (!(false)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4537, "assert(" "false" ") failed", "not equal CastIINode")
; ::breakpoint(); } } while (0)
;
4538 }
4539 }
4540#endif
4541 C->gvn_replace_by(init_out, alloc_length);
4542 }
4543 }
4544 C->gvn_replace_by(init->proj_out(TypeFunc::Control), alloc->in(0));
4545
4546 // move the allocation here (after the guards)
4547 _gvn.hash_delete(alloc);
4548 alloc->set_req(TypeFunc::Control, control());
4549 alloc->set_req(TypeFunc::I_O, i_o());
4550 Node *mem = reset_memory();
4551 set_all_memory(mem);
4552 alloc->set_req(TypeFunc::Memory, mem);
4553 set_control(init->proj_out_or_null(TypeFunc::Control));
4554 set_i_o(callprojs.fallthrough_ioproj);
4555
4556 // Update memory as done in GraphKit::set_output_for_allocation()
4557 const TypeInt* length_type = _gvn.find_int_type(alloc->in(AllocateNode::ALength));
4558 const TypeOopPtr* ary_type = _gvn.type(alloc->in(AllocateNode::KlassNode))->is_klassptr()->as_instance_type();
4559 if (ary_type->isa_aryptr() && length_type != NULL__null) {
4560 ary_type = ary_type->is_aryptr()->cast_to_size(length_type);
4561 }
4562 const TypePtr* telemref = ary_type->add_offset(Type::OffsetBot);
4563 int elemidx = C->get_alias_index(telemref);
4564 set_memory(init->proj_out_or_null(TypeFunc::Memory), Compile::AliasIdxRaw);
4565 set_memory(init->proj_out_or_null(TypeFunc::Memory), elemidx);
4566
4567 Node* allocx = _gvn.transform(alloc);
4568 assert(allocx == alloc, "where has the allocation gone?")do { if (!(allocx == alloc)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4568, "assert(" "allocx == alloc" ") failed", "where has the allocation gone?"
); ::breakpoint(); } } while (0)
;
4569 assert(dest->is_CheckCastPP(), "not an allocation result?")do { if (!(dest->is_CheckCastPP())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4569, "assert(" "dest->is_CheckCastPP()" ") failed", "not an allocation result?"
); ::breakpoint(); } } while (0)
;
4570
4571 _gvn.hash_delete(dest);
4572 dest->set_req(0, control());
4573 Node* destx = _gvn.transform(dest);
4574 assert(destx == dest, "where has the allocation result gone?")do { if (!(destx == dest)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4574, "assert(" "destx == dest" ") failed", "where has the allocation result gone?"
); ::breakpoint(); } } while (0)
;
4575
4576 array_ideal_length(alloc, ary_type, true);
4577 }
4578}
4579
4580
4581//------------------------------inline_arraycopy-----------------------
4582// public static native void java.lang.System.arraycopy(Object src, int srcPos,
4583// Object dest, int destPos,
4584// int length);
4585bool LibraryCallKit::inline_arraycopy() {
4586 // Get the arguments.
4587 Node* src = argument(0); // type: oop
4588 Node* src_offset = argument(1); // type: int
4589 Node* dest = argument(2); // type: oop
4590 Node* dest_offset = argument(3); // type: int
4591 Node* length = argument(4); // type: int
4592
4593 uint new_idx = C->unique();
4594
4595 // Check for allocation before we add nodes that would confuse
4596 // tightly_coupled_allocation()
4597 AllocateArrayNode* alloc = tightly_coupled_allocation(dest);
4598
4599 int saved_reexecute_sp = -1;
4600 JVMState* saved_jvms = arraycopy_restore_alloc_state(alloc, saved_reexecute_sp);
4601 // See arraycopy_restore_alloc_state() comment
4602 // if alloc == NULL we don't have to worry about a tightly coupled allocation so we can emit all needed guards
4603 // if saved_jvms != NULL (then alloc != NULL) then we can handle guards and a tightly coupled allocation
4604 // if saved_jvms == NULL and alloc != NULL, we can't emit any guards
4605 bool can_emit_guards = (alloc == NULL__null || saved_jvms != NULL__null);
4606
4607 // The following tests must be performed
4608 // (1) src and dest are arrays.
4609 // (2) src and dest arrays must have elements of the same BasicType
4610 // (3) src and dest must not be null.
4611 // (4) src_offset must not be negative.
4612 // (5) dest_offset must not be negative.
4613 // (6) length must not be negative.
4614 // (7) src_offset + length must not exceed length of src.
4615 // (8) dest_offset + length must not exceed length of dest.
4616 // (9) each element of an oop array must be assignable
4617
4618 // (3) src and dest must not be null.
4619 // always do this here because we need the JVM state for uncommon traps
4620 Node* null_ctl = top();
4621 src = saved_jvms != NULL__null ? null_check_oop(src, &null_ctl, true, true) : null_check(src, T_ARRAY);
4622 assert(null_ctl->is_top(), "no null control here")do { if (!(null_ctl->is_top())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4622, "assert(" "null_ctl->is_top()" ") failed", "no null control here"
); ::breakpoint(); } } while (0)
;
4623 dest = null_check(dest, T_ARRAY);
4624
4625 if (!can_emit_guards) {
4626 // if saved_jvms == NULL and alloc != NULL, we don't emit any
4627 // guards but the arraycopy node could still take advantage of a
4628 // tightly allocated allocation. tightly_coupled_allocation() is
4629 // called again to make sure it takes the null check above into
4630 // account: the null check is mandatory and if it caused an
4631 // uncommon trap to be emitted then the allocation can't be
4632 // considered tightly coupled in this context.
4633 alloc = tightly_coupled_allocation(dest);
4634 }
4635
4636 bool validated = false;
4637
4638 const Type* src_type = _gvn.type(src);
4639 const Type* dest_type = _gvn.type(dest);
4640 const TypeAryPtr* top_src = src_type->isa_aryptr();
4641 const TypeAryPtr* top_dest = dest_type->isa_aryptr();
4642
4643 // Do we have the type of src?
4644 bool has_src = (top_src != NULL__null && top_src->klass() != NULL__null);
4645 // Do we have the type of dest?
4646 bool has_dest = (top_dest != NULL__null && top_dest->klass() != NULL__null);
4647 // Is the type for src from speculation?
4648 bool src_spec = false;
4649 // Is the type for dest from speculation?
4650 bool dest_spec = false;
4651
4652 if ((!has_src || !has_dest) && can_emit_guards) {
4653 // We don't have sufficient type information, let's see if
4654 // speculative types can help. We need to have types for both src
4655 // and dest so that it pays off.
4656
4657 // Do we already have or could we have type information for src
4658 bool could_have_src = has_src;
4659 // Do we already have or could we have type information for dest
4660 bool could_have_dest = has_dest;
4661
4662 ciKlass* src_k = NULL__null;
4663 if (!has_src) {
4664 src_k = src_type->speculative_type_not_null();
4665 if (src_k != NULL__null && src_k->is_array_klass()) {
4666 could_have_src = true;
4667 }
4668 }
4669
4670 ciKlass* dest_k = NULL__null;
4671 if (!has_dest) {
4672 dest_k = dest_type->speculative_type_not_null();
4673 if (dest_k != NULL__null && dest_k->is_array_klass()) {
4674 could_have_dest = true;
4675 }
4676 }
4677
4678 if (could_have_src && could_have_dest) {
4679 // This is going to pay off so emit the required guards
4680 if (!has_src) {
4681 src = maybe_cast_profiled_obj(src, src_k, true);
4682 src_type = _gvn.type(src);
4683 top_src = src_type->isa_aryptr();
4684 has_src = (top_src != NULL__null && top_src->klass() != NULL__null);
4685 src_spec = true;
4686 }
4687 if (!has_dest) {
4688 dest = maybe_cast_profiled_obj(dest, dest_k, true);
4689 dest_type = _gvn.type(dest);
4690 top_dest = dest_type->isa_aryptr();
4691 has_dest = (top_dest != NULL__null && top_dest->klass() != NULL__null);
4692 dest_spec = true;
4693 }
4694 }
4695 }
4696
4697 if (has_src && has_dest && can_emit_guards) {
4698 BasicType src_elem = top_src->klass()->as_array_klass()->element_type()->basic_type();
4699 BasicType dest_elem = top_dest->klass()->as_array_klass()->element_type()->basic_type();
4700 if (is_reference_type(src_elem)) src_elem = T_OBJECT;
4701 if (is_reference_type(dest_elem)) dest_elem = T_OBJECT;
4702
4703 if (src_elem == dest_elem && src_elem == T_OBJECT) {
4704 // If both arrays are object arrays then having the exact types
4705 // for both will remove the need for a subtype check at runtime
4706 // before the call and may make it possible to pick a faster copy
4707 // routine (without a subtype check on every element)
4708 // Do we have the exact type of src?
4709 bool could_have_src = src_spec;
4710 // Do we have the exact type of dest?
4711 bool could_have_dest = dest_spec;
4712 ciKlass* src_k = top_src->klass();
4713 ciKlass* dest_k = top_dest->klass();
4714 if (!src_spec) {
4715 src_k = src_type->speculative_type_not_null();
4716 if (src_k != NULL__null && src_k->is_array_klass()) {
4717 could_have_src = true;
4718 }
4719 }
4720 if (!dest_spec) {
4721 dest_k = dest_type->speculative_type_not_null();
4722 if (dest_k != NULL__null && dest_k->is_array_klass()) {
4723 could_have_dest = true;
4724 }
4725 }
4726 if (could_have_src && could_have_dest) {
4727 // If we can have both exact types, emit the missing guards
4728 if (could_have_src && !src_spec) {
4729 src = maybe_cast_profiled_obj(src, src_k, true);
4730 }
4731 if (could_have_dest && !dest_spec) {
4732 dest = maybe_cast_profiled_obj(dest, dest_k, true);
4733 }
4734 }
4735 }
4736 }
4737
4738 ciMethod* trap_method = method();
4739 int trap_bci = bci();
4740 if (saved_jvms != NULL__null) {
4741 trap_method = alloc->jvms()->method();
4742 trap_bci = alloc->jvms()->bci();
4743 }
4744
4745 bool negative_length_guard_generated = false;
4746
4747 if (!C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_intrinsic) &&
4748 can_emit_guards &&
4749 !src->is_top() && !dest->is_top()) {
4750 // validate arguments: enables transformation the ArrayCopyNode
4751 validated = true;
4752
4753 RegionNode* slow_region = new RegionNode(1);
4754 record_for_igvn(slow_region);
4755
4756 // (1) src and dest are arrays.
4757 generate_non_array_guard(load_object_klass(src), slow_region);
4758 generate_non_array_guard(load_object_klass(dest), slow_region);
4759
4760 // (2) src and dest arrays must have elements of the same BasicType
4761 // done at macro expansion or at Ideal transformation time
4762
4763 // (4) src_offset must not be negative.
4764 generate_negative_guard(src_offset, slow_region);
4765
4766 // (5) dest_offset must not be negative.
4767 generate_negative_guard(dest_offset, slow_region);
4768
4769 // (7) src_offset + length must not exceed length of src.
4770 generate_limit_guard(src_offset, length,
4771 load_array_length(src),
4772 slow_region);
4773
4774 // (8) dest_offset + length must not exceed length of dest.
4775 generate_limit_guard(dest_offset, length,
4776 load_array_length(dest),
4777 slow_region);
4778
4779 // (6) length must not be negative.
4780 // This is also checked in generate_arraycopy() during macro expansion, but
4781 // we also have to check it here for the case where the ArrayCopyNode will
4782 // be eliminated by Escape Analysis.
4783 if (EliminateAllocations) {
4784 generate_negative_guard(length, slow_region);
4785 negative_length_guard_generated = true;
4786 }
4787
4788 // (9) each element of an oop array must be assignable
4789 Node* dest_klass = load_object_klass(dest);
4790 if (src != dest) {
4791 Node* not_subtype_ctrl = gen_subtype_check(src, dest_klass);
4792
4793 if (not_subtype_ctrl != top()) {
4794 PreserveJVMState pjvms(this);
4795 set_control(not_subtype_ctrl);
4796 uncommon_trap(Deoptimization::Reason_intrinsic,
4797 Deoptimization::Action_make_not_entrant);
4798 assert(stopped(), "Should be stopped")do { if (!(stopped())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4798, "assert(" "stopped()" ") failed", "Should be stopped"
); ::breakpoint(); } } while (0)
;
4799 }
4800 }
4801 {
4802 PreserveJVMState pjvms(this);
4803 set_control(_gvn.transform(slow_region));
4804 uncommon_trap(Deoptimization::Reason_intrinsic,
4805 Deoptimization::Action_make_not_entrant);
4806 assert(stopped(), "Should be stopped")do { if (!(stopped())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4806, "assert(" "stopped()" ") failed", "Should be stopped"
); ::breakpoint(); } } while (0)
;
4807 }
4808
4809 const TypeKlassPtr* dest_klass_t = _gvn.type(dest_klass)->is_klassptr();
4810 const Type *toop = TypeOopPtr::make_from_klass(dest_klass_t->klass());
4811 src = _gvn.transform(new CheckCastPPNode(control(), src, toop));
4812 }
4813
4814 arraycopy_move_allocation_here(alloc, dest, saved_jvms, saved_reexecute_sp, new_idx);
4815
4816 if (stopped()) {
4817 return true;
4818 }
4819
4820 ArrayCopyNode* ac = ArrayCopyNode::make(this, true, src, src_offset, dest, dest_offset, length, alloc != NULL__null, negative_length_guard_generated,
4821 // Create LoadRange and LoadKlass nodes for use during macro expansion here
4822 // so the compiler has a chance to eliminate them: during macro expansion,
4823 // we have to set their control (CastPP nodes are eliminated).
4824 load_object_klass(src), load_object_klass(dest),
4825 load_array_length(src), load_array_length(dest));
4826
4827 ac->set_arraycopy(validated);
4828
4829 Node* n = _gvn.transform(ac);
4830 if (n == ac) {
4831 ac->connect_outputs(this);
4832 } else {
4833 assert(validated, "shouldn't transform if all arguments not validated")do { if (!(validated)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4833, "assert(" "validated" ") failed", "shouldn't transform if all arguments not validated"
); ::breakpoint(); } } while (0)
;
4834 set_all_memory(n);
4835 }
4836 clear_upper_avx();
4837
4838
4839 return true;
4840}
4841
4842
4843// Helper function which determines if an arraycopy immediately follows
4844// an allocation, with no intervening tests or other escapes for the object.
4845AllocateArrayNode*
4846LibraryCallKit::tightly_coupled_allocation(Node* ptr) {
4847 if (stopped()) return NULL__null; // no fast path
4848 if (C->AliasLevel() == 0) return NULL__null; // no MergeMems around
4849
4850 AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(ptr, &_gvn);
4851 if (alloc == NULL__null) return NULL__null;
4852
4853 Node* rawmem = memory(Compile::AliasIdxRaw);
4854 // Is the allocation's memory state untouched?
4855 if (!(rawmem->is_Proj() && rawmem->in(0)->is_Initialize())) {
4856 // Bail out if there have been raw-memory effects since the allocation.
4857 // (Example: There might have been a call or safepoint.)
4858 return NULL__null;
4859 }
4860 rawmem = rawmem->in(0)->as_Initialize()->memory(Compile::AliasIdxRaw);
4861 if (!(rawmem->is_Proj() && rawmem->in(0) == alloc)) {
4862 return NULL__null;
4863 }
4864
4865 // There must be no unexpected observers of this allocation.
4866 for (DUIterator_Fast imax, i = ptr->fast_outs(imax); i < imax; i++) {
4867 Node* obs = ptr->fast_out(i);
4868 if (obs != this->map()) {
4869 return NULL__null;
4870 }
4871 }
4872
4873 // This arraycopy must unconditionally follow the allocation of the ptr.
4874 Node* alloc_ctl = ptr->in(0);
4875 Node* ctl = control();
4876 while (ctl != alloc_ctl) {
4877 // There may be guards which feed into the slow_region.
4878 // Any other control flow means that we might not get a chance
4879 // to finish initializing the allocated object.
4880 if ((ctl->is_IfFalse() || ctl->is_IfTrue()) && ctl->in(0)->is_If()) {
4881 IfNode* iff = ctl->in(0)->as_If();
4882 Node* not_ctl = iff->proj_out_or_null(1 - ctl->as_Proj()->_con);
4883 assert(not_ctl != NULL && not_ctl != ctl, "found alternate")do { if (!(not_ctl != __null && not_ctl != ctl)) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4883, "assert(" "not_ctl != __null && not_ctl != ctl"
") failed", "found alternate"); ::breakpoint(); } } while (0
)
;
4884 // One more try: Various low-level checks bottom out in
4885 // uncommon traps. If the debug-info of the trap omits
4886 // any reference to the allocation, as we've already
4887 // observed, then there can be no objection to the trap.
4888 bool found_trap = false;
4889 for (DUIterator_Fast jmax, j = not_ctl->fast_outs(jmax); j < jmax; j++) {
4890 Node* obs = not_ctl->fast_out(j);
4891 if (obs->in(0) == not_ctl && obs->is_Call() &&
4892 (obs->as_Call()->entry_point() == SharedRuntime::uncommon_trap_blob()->entry_point())) {
4893 found_trap = true; break;
4894 }
4895 }
4896 if (found_trap) {
4897 ctl = iff->in(0); // This test feeds a harmless uncommon trap.
4898 continue;
4899 }
4900 }
4901 return NULL__null;
4902 }
4903
4904 // If we get this far, we have an allocation which immediately
4905 // precedes the arraycopy, and we can take over zeroing the new object.
4906 // The arraycopy will finish the initialization, and provide
4907 // a new control state to which we will anchor the destination pointer.
4908
4909 return alloc;
4910}
4911
4912//-------------inline_encodeISOArray-----------------------------------
4913// encode char[] to byte[] in ISO_8859_1 or ASCII
4914bool LibraryCallKit::inline_encodeISOArray(bool ascii) {
4915 assert(callee()->signature()->size() == 5, "encodeISOArray has 5 parameters")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4915, "assert(" "callee()->signature()->size() == 5" ") failed"
, "encodeISOArray has 5 parameters"); ::breakpoint(); } } while
(0)
;
4916 // no receiver since it is static method
4917 Node *src = argument(0);
4918 Node *src_offset = argument(1);
4919 Node *dst = argument(2);
4920 Node *dst_offset = argument(3);
4921 Node *length = argument(4);
4922
4923 src = must_be_not_null(src, true);
4924 dst = must_be_not_null(dst, true);
4925
4926 const Type* src_type = src->Value(&_gvn);
4927 const Type* dst_type = dst->Value(&_gvn);
4928 const TypeAryPtr* top_src = src_type->isa_aryptr();
4929 const TypeAryPtr* top_dest = dst_type->isa_aryptr();
4930 if (top_src == NULL__null || top_src->klass() == NULL__null ||
4931 top_dest == NULL__null || top_dest->klass() == NULL__null) {
4932 // failed array check
4933 return false;
4934 }
4935
4936 // Figure out the size and type of the elements we will be copying.
4937 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
4938 BasicType dst_elem = dst_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
4939 if (!((src_elem == T_CHAR) || (src_elem== T_BYTE)) || dst_elem != T_BYTE) {
4940 return false;
4941 }
4942
4943 Node* src_start = array_element_address(src, src_offset, T_CHAR);
4944 Node* dst_start = array_element_address(dst, dst_offset, dst_elem);
4945 // 'src_start' points to src array + scaled offset
4946 // 'dst_start' points to dst array + scaled offset
4947
4948 const TypeAryPtr* mtype = TypeAryPtr::BYTES;
4949 Node* enc = new EncodeISOArrayNode(control(), memory(mtype), src_start, dst_start, length, ascii);
4950 enc = _gvn.transform(enc);
4951 Node* res_mem = _gvn.transform(new SCMemProjNode(enc));
4952 set_memory(res_mem, mtype);
4953 set_result(enc);
4954 clear_upper_avx();
4955
4956 return true;
4957}
4958
4959//-------------inline_multiplyToLen-----------------------------------
4960bool LibraryCallKit::inline_multiplyToLen() {
4961 assert(UseMultiplyToLenIntrinsic, "not implemented on this platform")do { if (!(UseMultiplyToLenIntrinsic)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4961, "assert(" "UseMultiplyToLenIntrinsic" ") failed", "not implemented on this platform"
); ::breakpoint(); } } while (0)
;
4962
4963 address stubAddr = StubRoutines::multiplyToLen();
4964 if (stubAddr == NULL__null) {
4965 return false; // Intrinsic's stub is not implemented on this platform
4966 }
4967 const char* stubName = "multiplyToLen";
4968
4969 assert(callee()->signature()->size() == 5, "multiplyToLen has 5 parameters")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 4969, "assert(" "callee()->signature()->size() == 5" ") failed"
, "multiplyToLen has 5 parameters"); ::breakpoint(); } } while
(0)
;
4970
4971 // no receiver because it is a static method
4972 Node* x = argument(0);
4973 Node* xlen = argument(1);
4974 Node* y = argument(2);
4975 Node* ylen = argument(3);
4976 Node* z = argument(4);
4977
4978 x = must_be_not_null(x, true);
4979 y = must_be_not_null(y, true);
4980
4981 const Type* x_type = x->Value(&_gvn);
4982 const Type* y_type = y->Value(&_gvn);
4983 const TypeAryPtr* top_x = x_type->isa_aryptr();
4984 const TypeAryPtr* top_y = y_type->isa_aryptr();
4985 if (top_x == NULL__null || top_x->klass() == NULL__null ||
4986 top_y == NULL__null || top_y->klass() == NULL__null) {
4987 // failed array check
4988 return false;
4989 }
4990
4991 BasicType x_elem = x_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
4992 BasicType y_elem = y_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
4993 if (x_elem != T_INT || y_elem != T_INT) {
4994 return false;
4995 }
4996
4997 // Set the original stack and the reexecute bit for the interpreter to reexecute
4998 // the bytecode that invokes BigInteger.multiplyToLen() if deoptimization happens
4999 // on the return from z array allocation in runtime.
5000 { PreserveReexecuteState preexecs(this);
5001 jvms()->set_should_reexecute(true);
5002
5003 Node* x_start = array_element_address(x, intcon(0), x_elem);
5004 Node* y_start = array_element_address(y, intcon(0), y_elem);
5005 // 'x_start' points to x array + scaled xlen
5006 // 'y_start' points to y array + scaled ylen
5007
5008 // Allocate the result array
5009 Node* zlen = _gvn.transform(new AddINode(xlen, ylen));
5010 ciKlass* klass = ciTypeArrayKlass::make(T_INT);
5011 Node* klass_node = makecon(TypeKlassPtr::make(klass));
5012
5013 IdealKit ideal(this);
5014
5015#define __ ideal.
5016 Node* one = __ ConI(1);
5017 Node* zero = __ ConI(0);
5018 IdealVariable need_alloc(ideal), z_alloc(ideal); __ declarations_done();
5019 __ set(need_alloc, zero);
5020 __ set(z_alloc, z);
5021 __ if_then(z, BoolTest::eq, null()); {
5022 __ increment (need_alloc, one);
5023 } __ else_(); {
5024 // Update graphKit memory and control from IdealKit.
5025 sync_kit(ideal);
5026 Node *cast = new CastPPNode(z, TypePtr::NOTNULL);
5027 cast->init_req(0, control());
5028 _gvn.set_type(cast, cast->bottom_type());
5029 C->record_for_igvn(cast);
5030
5031 Node* zlen_arg = load_array_length(cast);
5032 // Update IdealKit memory and control from graphKit.
5033 __ sync_kit(this);
5034 __ if_then(zlen_arg, BoolTest::lt, zlen); {
5035 __ increment (need_alloc, one);
5036 } __ end_if();
5037 } __ end_if();
5038
5039 __ if_then(__ value(need_alloc), BoolTest::ne, zero); {
5040 // Update graphKit memory and control from IdealKit.
5041 sync_kit(ideal);
5042 Node * narr = new_array(klass_node, zlen, 1);
5043 // Update IdealKit memory and control from graphKit.
5044 __ sync_kit(this);
5045 __ set(z_alloc, narr);
5046 } __ end_if();
5047
5048 sync_kit(ideal);
5049 z = __ value(z_alloc);
5050 // Can't use TypeAryPtr::INTS which uses Bottom offset.
5051 _gvn.set_type(z, TypeOopPtr::make_from_klass(klass));
5052 // Final sync IdealKit and GraphKit.
5053 final_sync(ideal);
5054#undef __
5055
5056 Node* z_start = array_element_address(z, intcon(0), T_INT);
5057
5058 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
5059 OptoRuntime::multiplyToLen_Type(),
5060 stubAddr, stubName, TypePtr::BOTTOM,
5061 x_start, xlen, y_start, ylen, z_start, zlen);
5062 } // original reexecute is set back here
5063
5064 C->set_has_split_ifs(true); // Has chance for split-if optimization
5065 set_result(z);
5066 return true;
5067}
5068
5069//-------------inline_squareToLen------------------------------------
5070bool LibraryCallKit::inline_squareToLen() {
5071 assert(UseSquareToLenIntrinsic, "not implemented on this platform")do { if (!(UseSquareToLenIntrinsic)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5071, "assert(" "UseSquareToLenIntrinsic" ") failed", "not implemented on this platform"
); ::breakpoint(); } } while (0)
;
5072
5073 address stubAddr = StubRoutines::squareToLen();
5074 if (stubAddr == NULL__null) {
5075 return false; // Intrinsic's stub is not implemented on this platform
5076 }
5077 const char* stubName = "squareToLen";
5078
5079 assert(callee()->signature()->size() == 4, "implSquareToLen has 4 parameters")do { if (!(callee()->signature()->size() == 4)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5079, "assert(" "callee()->signature()->size() == 4" ") failed"
, "implSquareToLen has 4 parameters"); ::breakpoint(); } } while
(0)
;
5080
5081 Node* x = argument(0);
5082 Node* len = argument(1);
5083 Node* z = argument(2);
5084 Node* zlen = argument(3);
5085
5086 x = must_be_not_null(x, true);
5087 z = must_be_not_null(z, true);
5088
5089 const Type* x_type = x->Value(&_gvn);
5090 const Type* z_type = z->Value(&_gvn);
5091 const TypeAryPtr* top_x = x_type->isa_aryptr();
5092 const TypeAryPtr* top_z = z_type->isa_aryptr();
5093 if (top_x == NULL__null || top_x->klass() == NULL__null ||
5094 top_z == NULL__null || top_z->klass() == NULL__null) {
5095 // failed array check
5096 return false;
5097 }
5098
5099 BasicType x_elem = x_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5100 BasicType z_elem = z_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5101 if (x_elem != T_INT || z_elem != T_INT) {
5102 return false;
5103 }
5104
5105
5106 Node* x_start = array_element_address(x, intcon(0), x_elem);
5107 Node* z_start = array_element_address(z, intcon(0), z_elem);
5108
5109 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
5110 OptoRuntime::squareToLen_Type(),
5111 stubAddr, stubName, TypePtr::BOTTOM,
5112 x_start, len, z_start, zlen);
5113
5114 set_result(z);
5115 return true;
5116}
5117
5118//-------------inline_mulAdd------------------------------------------
5119bool LibraryCallKit::inline_mulAdd() {
5120 assert(UseMulAddIntrinsic, "not implemented on this platform")do { if (!(UseMulAddIntrinsic)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5120, "assert(" "UseMulAddIntrinsic" ") failed", "not implemented on this platform"
); ::breakpoint(); } } while (0)
;
5121
5122 address stubAddr = StubRoutines::mulAdd();
5123 if (stubAddr == NULL__null) {
5124 return false; // Intrinsic's stub is not implemented on this platform
5125 }
5126 const char* stubName = "mulAdd";
5127
5128 assert(callee()->signature()->size() == 5, "mulAdd has 5 parameters")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5128, "assert(" "callee()->signature()->size() == 5" ") failed"
, "mulAdd has 5 parameters"); ::breakpoint(); } } while (0)
;
5129
5130 Node* out = argument(0);
5131 Node* in = argument(1);
5132 Node* offset = argument(2);
5133 Node* len = argument(3);
5134 Node* k = argument(4);
5135
5136 out = must_be_not_null(out, true);
5137
5138 const Type* out_type = out->Value(&_gvn);
5139 const Type* in_type = in->Value(&_gvn);
5140 const TypeAryPtr* top_out = out_type->isa_aryptr();
5141 const TypeAryPtr* top_in = in_type->isa_aryptr();
5142 if (top_out == NULL__null || top_out->klass() == NULL__null ||
5143 top_in == NULL__null || top_in->klass() == NULL__null) {
5144 // failed array check
5145 return false;
5146 }
5147
5148 BasicType out_elem = out_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5149 BasicType in_elem = in_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5150 if (out_elem != T_INT || in_elem != T_INT) {
5151 return false;
5152 }
5153
5154 Node* outlen = load_array_length(out);
5155 Node* new_offset = _gvn.transform(new SubINode(outlen, offset));
5156 Node* out_start = array_element_address(out, intcon(0), out_elem);
5157 Node* in_start = array_element_address(in, intcon(0), in_elem);
5158
5159 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
5160 OptoRuntime::mulAdd_Type(),
5161 stubAddr, stubName, TypePtr::BOTTOM,
5162 out_start,in_start, new_offset, len, k);
5163 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5164 set_result(result);
5165 return true;
5166}
5167
5168//-------------inline_montgomeryMultiply-----------------------------------
5169bool LibraryCallKit::inline_montgomeryMultiply() {
5170 address stubAddr = StubRoutines::montgomeryMultiply();
5171 if (stubAddr == NULL__null) {
5172 return false; // Intrinsic's stub is not implemented on this platform
5173 }
5174
5175 assert(UseMontgomeryMultiplyIntrinsic, "not implemented on this platform")do { if (!(UseMontgomeryMultiplyIntrinsic)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5175, "assert(" "UseMontgomeryMultiplyIntrinsic" ") failed"
, "not implemented on this platform"); ::breakpoint(); } } while
(0)
;
5176 const char* stubName = "montgomery_multiply";
5177
5178 assert(callee()->signature()->size() == 7, "montgomeryMultiply has 7 parameters")do { if (!(callee()->signature()->size() == 7)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5178, "assert(" "callee()->signature()->size() == 7" ") failed"
, "montgomeryMultiply has 7 parameters"); ::breakpoint(); } }
while (0)
;
5179
5180 Node* a = argument(0);
5181 Node* b = argument(1);
5182 Node* n = argument(2);
5183 Node* len = argument(3);
5184 Node* inv = argument(4);
5185 Node* m = argument(6);
5186
5187 const Type* a_type = a->Value(&_gvn);
5188 const TypeAryPtr* top_a = a_type->isa_aryptr();
5189 const Type* b_type = b->Value(&_gvn);
5190 const TypeAryPtr* top_b = b_type->isa_aryptr();
5191 const Type* n_type = a->Value(&_gvn);
5192 const TypeAryPtr* top_n = n_type->isa_aryptr();
5193 const Type* m_type = a->Value(&_gvn);
5194 const TypeAryPtr* top_m = m_type->isa_aryptr();
5195 if (top_a == NULL__null || top_a->klass() == NULL__null ||
5196 top_b == NULL__null || top_b->klass() == NULL__null ||
5197 top_n == NULL__null || top_n->klass() == NULL__null ||
5198 top_m == NULL__null || top_m->klass() == NULL__null) {
5199 // failed array check
5200 return false;
5201 }
5202
5203 BasicType a_elem = a_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5204 BasicType b_elem = b_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5205 BasicType n_elem = n_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5206 BasicType m_elem = m_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5207 if (a_elem != T_INT || b_elem != T_INT || n_elem != T_INT || m_elem != T_INT) {
5208 return false;
5209 }
5210
5211 // Make the call
5212 {
5213 Node* a_start = array_element_address(a, intcon(0), a_elem);
5214 Node* b_start = array_element_address(b, intcon(0), b_elem);
5215 Node* n_start = array_element_address(n, intcon(0), n_elem);
5216 Node* m_start = array_element_address(m, intcon(0), m_elem);
5217
5218 Node* call = make_runtime_call(RC_LEAF,
5219 OptoRuntime::montgomeryMultiply_Type(),
5220 stubAddr, stubName, TypePtr::BOTTOM,
5221 a_start, b_start, n_start, len, inv, top(),
5222 m_start);
5223 set_result(m);
5224 }
5225
5226 return true;
5227}
5228
5229bool LibraryCallKit::inline_montgomerySquare() {
5230 address stubAddr = StubRoutines::montgomerySquare();
5231 if (stubAddr == NULL__null) {
5232 return false; // Intrinsic's stub is not implemented on this platform
5233 }
5234
5235 assert(UseMontgomerySquareIntrinsic, "not implemented on this platform")do { if (!(UseMontgomerySquareIntrinsic)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5235, "assert(" "UseMontgomerySquareIntrinsic" ") failed", "not implemented on this platform"
); ::breakpoint(); } } while (0)
;
5236 const char* stubName = "montgomery_square";
5237
5238 assert(callee()->signature()->size() == 6, "montgomerySquare has 6 parameters")do { if (!(callee()->signature()->size() == 6)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5238, "assert(" "callee()->signature()->size() == 6" ") failed"
, "montgomerySquare has 6 parameters"); ::breakpoint(); } } while
(0)
;
5239
5240 Node* a = argument(0);
5241 Node* n = argument(1);
5242 Node* len = argument(2);
5243 Node* inv = argument(3);
5244 Node* m = argument(5);
5245
5246 const Type* a_type = a->Value(&_gvn);
5247 const TypeAryPtr* top_a = a_type->isa_aryptr();
5248 const Type* n_type = a->Value(&_gvn);
5249 const TypeAryPtr* top_n = n_type->isa_aryptr();
5250 const Type* m_type = a->Value(&_gvn);
5251 const TypeAryPtr* top_m = m_type->isa_aryptr();
5252 if (top_a == NULL__null || top_a->klass() == NULL__null ||
5253 top_n == NULL__null || top_n->klass() == NULL__null ||
5254 top_m == NULL__null || top_m->klass() == NULL__null) {
5255 // failed array check
5256 return false;
5257 }
5258
5259 BasicType a_elem = a_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5260 BasicType n_elem = n_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5261 BasicType m_elem = m_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5262 if (a_elem != T_INT || n_elem != T_INT || m_elem != T_INT) {
5263 return false;
5264 }
5265
5266 // Make the call
5267 {
5268 Node* a_start = array_element_address(a, intcon(0), a_elem);
5269 Node* n_start = array_element_address(n, intcon(0), n_elem);
5270 Node* m_start = array_element_address(m, intcon(0), m_elem);
5271
5272 Node* call = make_runtime_call(RC_LEAF,
5273 OptoRuntime::montgomerySquare_Type(),
5274 stubAddr, stubName, TypePtr::BOTTOM,
5275 a_start, n_start, len, inv, top(),
5276 m_start);
5277 set_result(m);
5278 }
5279
5280 return true;
5281}
5282
5283bool LibraryCallKit::inline_bigIntegerShift(bool isRightShift) {
5284 address stubAddr = NULL__null;
5285 const char* stubName = NULL__null;
5286
5287 stubAddr = isRightShift? StubRoutines::bigIntegerRightShift(): StubRoutines::bigIntegerLeftShift();
5288 if (stubAddr == NULL__null) {
5289 return false; // Intrinsic's stub is not implemented on this platform
5290 }
5291
5292 stubName = isRightShift? "bigIntegerRightShiftWorker" : "bigIntegerLeftShiftWorker";
5293
5294 assert(callee()->signature()->size() == 5, "expected 5 arguments")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5294, "assert(" "callee()->signature()->size() == 5" ") failed"
, "expected 5 arguments"); ::breakpoint(); } } while (0)
;
5295
5296 Node* newArr = argument(0);
5297 Node* oldArr = argument(1);
5298 Node* newIdx = argument(2);
5299 Node* shiftCount = argument(3);
5300 Node* numIter = argument(4);
5301
5302 const Type* newArr_type = newArr->Value(&_gvn);
5303 const TypeAryPtr* top_newArr = newArr_type->isa_aryptr();
5304 const Type* oldArr_type = oldArr->Value(&_gvn);
5305 const TypeAryPtr* top_oldArr = oldArr_type->isa_aryptr();
5306 if (top_newArr == NULL__null || top_newArr->klass() == NULL__null || top_oldArr == NULL__null
5307 || top_oldArr->klass() == NULL__null) {
5308 return false;
5309 }
5310
5311 BasicType newArr_elem = newArr_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5312 BasicType oldArr_elem = oldArr_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5313 if (newArr_elem != T_INT || oldArr_elem != T_INT) {
5314 return false;
5315 }
5316
5317 // Make the call
5318 {
5319 Node* newArr_start = array_element_address(newArr, intcon(0), newArr_elem);
5320 Node* oldArr_start = array_element_address(oldArr, intcon(0), oldArr_elem);
5321
5322 Node* call = make_runtime_call(RC_LEAF,
Value stored to 'call' during its initialization is never read
5323 OptoRuntime::bigIntegerShift_Type(),
5324 stubAddr,
5325 stubName,
5326 TypePtr::BOTTOM,
5327 newArr_start,
5328 oldArr_start,
5329 newIdx,
5330 shiftCount,
5331 numIter);
5332 }
5333
5334 return true;
5335}
5336
5337//-------------inline_vectorizedMismatch------------------------------
5338bool LibraryCallKit::inline_vectorizedMismatch() {
5339 assert(UseVectorizedMismatchIntrinsic, "not implemented on this platform")do { if (!(UseVectorizedMismatchIntrinsic)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5339, "assert(" "UseVectorizedMismatchIntrinsic" ") failed"
, "not implemented on this platform"); ::breakpoint(); } } while
(0)
;
5340
5341 assert(callee()->signature()->size() == 8, "vectorizedMismatch has 6 parameters")do { if (!(callee()->signature()->size() == 8)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5341, "assert(" "callee()->signature()->size() == 8" ") failed"
, "vectorizedMismatch has 6 parameters"); ::breakpoint(); } }
while (0)
;
5342 Node* obja = argument(0); // Object
5343 Node* aoffset = argument(1); // long
5344 Node* objb = argument(3); // Object
5345 Node* boffset = argument(4); // long
5346 Node* length = argument(6); // int
5347 Node* scale = argument(7); // int
5348
5349 const TypeAryPtr* obja_t = _gvn.type(obja)->isa_aryptr();
5350 const TypeAryPtr* objb_t = _gvn.type(objb)->isa_aryptr();
5351 if (obja_t == NULL__null || obja_t->klass() == NULL__null ||
5352 objb_t == NULL__null || objb_t->klass() == NULL__null ||
5353 scale == top()) {
5354 return false; // failed input validation
5355 }
5356
5357 Node* obja_adr = make_unsafe_address(obja, aoffset);
5358 Node* objb_adr = make_unsafe_address(objb, boffset);
5359
5360 // Partial inlining handling for inputs smaller than ArrayOperationPartialInlineSize bytes in size.
5361 //
5362 // inline_limit = ArrayOperationPartialInlineSize / element_size;
5363 // if (length <= inline_limit) {
5364 // inline_path:
5365 // vmask = VectorMaskGen length
5366 // vload1 = LoadVectorMasked obja, vmask
5367 // vload2 = LoadVectorMasked objb, vmask
5368 // result1 = VectorCmpMasked vload1, vload2, vmask
5369 // } else {
5370 // call_stub_path:
5371 // result2 = call vectorizedMismatch_stub(obja, objb, length, scale)
5372 // }
5373 // exit_block:
5374 // return Phi(result1, result2);
5375 //
5376 enum { inline_path = 1, // input is small enough to process it all at once
5377 stub_path = 2, // input is too large; call into the VM
5378 PATH_LIMIT = 3
5379 };
5380
5381 Node* exit_block = new RegionNode(PATH_LIMIT);
5382 Node* result_phi = new PhiNode(exit_block, TypeInt::INT);
5383 Node* memory_phi = new PhiNode(exit_block, Type::MEMORY, TypePtr::BOTTOM);
5384
5385 Node* call_stub_path = control();
5386
5387 BasicType elem_bt = T_ILLEGAL;
5388
5389 const TypeInt* scale_t = _gvn.type(scale)->is_int();
5390 if (scale_t->is_con()) {
5391 switch (scale_t->get_con()) {
5392 case 0: elem_bt = T_BYTE; break;
5393 case 1: elem_bt = T_SHORT; break;
5394 case 2: elem_bt = T_INT; break;
5395 case 3: elem_bt = T_LONG; break;
5396
5397 default: elem_bt = T_ILLEGAL; break; // not supported
5398 }
5399 }
5400
5401 int inline_limit = 0;
5402 bool do_partial_inline = false;
5403
5404 if (elem_bt != T_ILLEGAL && ArrayOperationPartialInlineSize > 0) {
5405 inline_limit = ArrayOperationPartialInlineSize / type2aelembytes(elem_bt);
5406 do_partial_inline = inline_limit >= 16;
5407 }
5408
5409 if (do_partial_inline) {
5410 assert(elem_bt != T_ILLEGAL, "sanity")do { if (!(elem_bt != T_ILLEGAL)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5410, "assert(" "elem_bt != T_ILLEGAL" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
5411
5412 if (Matcher::match_rule_supported_vector(Op_VectorMaskGen, inline_limit, elem_bt) &&
5413 Matcher::match_rule_supported_vector(Op_LoadVectorMasked, inline_limit, elem_bt) &&
5414 Matcher::match_rule_supported_vector(Op_VectorCmpMasked, inline_limit, elem_bt)) {
5415
5416 const TypeVect* vt = TypeVect::make(elem_bt, inline_limit);
5417 Node* cmp_length = _gvn.transform(new CmpINode(length, intcon(inline_limit)));
5418 Node* bol_gt = _gvn.transform(new BoolNode(cmp_length, BoolTest::gt));
5419
5420 call_stub_path = generate_guard(bol_gt, NULL__null, PROB_MIN(1e-6f));
5421
5422 if (!stopped()) {
5423 Node* casted_length = _gvn.transform(new CastIINode(control(), length, TypeInt::make(0, inline_limit, Type::WidenMin)));
5424
5425 const TypePtr* obja_adr_t = _gvn.type(obja_adr)->isa_ptr();
5426 const TypePtr* objb_adr_t = _gvn.type(objb_adr)->isa_ptr();
5427 Node* obja_adr_mem = memory(C->get_alias_index(obja_adr_t));
5428 Node* objb_adr_mem = memory(C->get_alias_index(objb_adr_t));
5429
5430 Node* vmask = _gvn.transform(VectorMaskGenNode::make(ConvI2X(casted_length)ConvI2L(casted_length), elem_bt));
5431 Node* vload_obja = _gvn.transform(new LoadVectorMaskedNode(control(), obja_adr_mem, obja_adr, obja_adr_t, vt, vmask));
5432 Node* vload_objb = _gvn.transform(new LoadVectorMaskedNode(control(), objb_adr_mem, objb_adr, objb_adr_t, vt, vmask));
5433 Node* result = _gvn.transform(new VectorCmpMaskedNode(vload_obja, vload_objb, vmask, TypeInt::INT));
5434
5435 exit_block->init_req(inline_path, control());
5436 memory_phi->init_req(inline_path, map()->memory());
5437 result_phi->init_req(inline_path, result);
5438
5439 C->set_max_vector_size(MAX2((uint)ArrayOperationPartialInlineSize, C->max_vector_size()));
5440 clear_upper_avx();
5441 }
5442 }
5443 }
5444
5445 if (call_stub_path != NULL__null) {
5446 set_control(call_stub_path);
5447
5448 Node* call = make_runtime_call(RC_LEAF,
5449 OptoRuntime::vectorizedMismatch_Type(),
5450 StubRoutines::vectorizedMismatch(), "vectorizedMismatch", TypePtr::BOTTOM,
5451 obja_adr, objb_adr, length, scale);
5452
5453 exit_block->init_req(stub_path, control());
5454 memory_phi->init_req(stub_path, map()->memory());
5455 result_phi->init_req(stub_path, _gvn.transform(new ProjNode(call, TypeFunc::Parms)));
5456 }
5457
5458 exit_block = _gvn.transform(exit_block);
5459 memory_phi = _gvn.transform(memory_phi);
5460 result_phi = _gvn.transform(result_phi);
5461
5462 set_control(exit_block);
5463 set_all_memory(memory_phi);
5464 set_result(result_phi);
5465
5466 return true;
5467}
5468
5469/**
5470 * Calculate CRC32 for byte.
5471 * int java.util.zip.CRC32.update(int crc, int b)
5472 */
5473bool LibraryCallKit::inline_updateCRC32() {
5474 assert(UseCRC32Intrinsics, "need AVX and LCMUL instructions support")do { if (!(UseCRC32Intrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5474, "assert(" "UseCRC32Intrinsics" ") failed", "need AVX and LCMUL instructions support"
); ::breakpoint(); } } while (0)
;
5475 assert(callee()->signature()->size() == 2, "update has 2 parameters")do { if (!(callee()->signature()->size() == 2)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5475, "assert(" "callee()->signature()->size() == 2" ") failed"
, "update has 2 parameters"); ::breakpoint(); } } while (0)
;
5476 // no receiver since it is static method
5477 Node* crc = argument(0); // type: int
5478 Node* b = argument(1); // type: int
5479
5480 /*
5481 * int c = ~ crc;
5482 * b = timesXtoThe32[(b ^ c) & 0xFF];
5483 * b = b ^ (c >>> 8);
5484 * crc = ~b;
5485 */
5486
5487 Node* M1 = intcon(-1);
5488 crc = _gvn.transform(new XorINode(crc, M1));
5489 Node* result = _gvn.transform(new XorINode(crc, b));
5490 result = _gvn.transform(new AndINode(result, intcon(0xFF)));
5491
5492 Node* base = makecon(TypeRawPtr::make(StubRoutines::crc_table_addr()));
5493 Node* offset = _gvn.transform(new LShiftINode(result, intcon(0x2)));
5494 Node* adr = basic_plus_adr(top(), base, ConvI2X(offset)ConvI2L(offset));
5495 result = make_load(control(), adr, TypeInt::INT, T_INT, MemNode::unordered);
5496
5497 crc = _gvn.transform(new URShiftINode(crc, intcon(8)));
5498 result = _gvn.transform(new XorINode(crc, result));
5499 result = _gvn.transform(new XorINode(result, M1));
5500 set_result(result);
5501 return true;
5502}
5503
5504/**
5505 * Calculate CRC32 for byte[] array.
5506 * int java.util.zip.CRC32.updateBytes(int crc, byte[] buf, int off, int len)
5507 */
5508bool LibraryCallKit::inline_updateBytesCRC32() {
5509 assert(UseCRC32Intrinsics, "need AVX and LCMUL instructions support")do { if (!(UseCRC32Intrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5509, "assert(" "UseCRC32Intrinsics" ") failed", "need AVX and LCMUL instructions support"
); ::breakpoint(); } } while (0)
;
5510 assert(callee()->signature()->size() == 4, "updateBytes has 4 parameters")do { if (!(callee()->signature()->size() == 4)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5510, "assert(" "callee()->signature()->size() == 4" ") failed"
, "updateBytes has 4 parameters"); ::breakpoint(); } } while (
0)
;
5511 // no receiver since it is static method
5512 Node* crc = argument(0); // type: int
5513 Node* src = argument(1); // type: oop
5514 Node* offset = argument(2); // type: int
5515 Node* length = argument(3); // type: int
5516
5517 const Type* src_type = src->Value(&_gvn);
5518 const TypeAryPtr* top_src = src_type->isa_aryptr();
5519 if (top_src == NULL__null || top_src->klass() == NULL__null) {
5520 // failed array check
5521 return false;
5522 }
5523
5524 // Figure out the size and type of the elements we will be copying.
5525 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5526 if (src_elem != T_BYTE) {
5527 return false;
5528 }
5529
5530 // 'src_start' points to src array + scaled offset
5531 src = must_be_not_null(src, true);
5532 Node* src_start = array_element_address(src, offset, src_elem);
5533
5534 // We assume that range check is done by caller.
5535 // TODO: generate range check (offset+length < src.length) in debug VM.
5536
5537 // Call the stub.
5538 address stubAddr = StubRoutines::updateBytesCRC32();
5539 const char *stubName = "updateBytesCRC32";
5540
5541 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::updateBytesCRC32_Type(),
5542 stubAddr, stubName, TypePtr::BOTTOM,
5543 crc, src_start, length);
5544 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5545 set_result(result);
5546 return true;
5547}
5548
5549/**
5550 * Calculate CRC32 for ByteBuffer.
5551 * int java.util.zip.CRC32.updateByteBuffer(int crc, long buf, int off, int len)
5552 */
5553bool LibraryCallKit::inline_updateByteBufferCRC32() {
5554 assert(UseCRC32Intrinsics, "need AVX and LCMUL instructions support")do { if (!(UseCRC32Intrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5554, "assert(" "UseCRC32Intrinsics" ") failed", "need AVX and LCMUL instructions support"
); ::breakpoint(); } } while (0)
;
5555 assert(callee()->signature()->size() == 5, "updateByteBuffer has 4 parameters and one is long")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5555, "assert(" "callee()->signature()->size() == 5" ") failed"
, "updateByteBuffer has 4 parameters and one is long"); ::breakpoint
(); } } while (0)
;
5556 // no receiver since it is static method
5557 Node* crc = argument(0); // type: int
5558 Node* src = argument(1); // type: long
5559 Node* offset = argument(3); // type: int
5560 Node* length = argument(4); // type: int
5561
5562 src = ConvL2X(src)(src); // adjust Java long to machine word
5563 Node* base = _gvn.transform(new CastX2PNode(src));
5564 offset = ConvI2X(offset)ConvI2L(offset);
5565
5566 // 'src_start' points to src array + scaled offset
5567 Node* src_start = basic_plus_adr(top(), base, offset);
5568
5569 // Call the stub.
5570 address stubAddr = StubRoutines::updateBytesCRC32();
5571 const char *stubName = "updateBytesCRC32";
5572
5573 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::updateBytesCRC32_Type(),
5574 stubAddr, stubName, TypePtr::BOTTOM,
5575 crc, src_start, length);
5576 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5577 set_result(result);
5578 return true;
5579}
5580
5581//------------------------------get_table_from_crc32c_class-----------------------
5582Node * LibraryCallKit::get_table_from_crc32c_class(ciInstanceKlass *crc32c_class) {
5583 Node* table = load_field_from_object(NULL__null, "byteTable", "[I", /*decorators*/ IN_HEAP, /*is_static*/ true, crc32c_class);
5584 assert (table != NULL, "wrong version of java.util.zip.CRC32C")do { if (!(table != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5584, "assert(" "table != __null" ") failed", "wrong version of java.util.zip.CRC32C"
); ::breakpoint(); } } while (0)
;
5585
5586 return table;
5587}
5588
5589//------------------------------inline_updateBytesCRC32C-----------------------
5590//
5591// Calculate CRC32C for byte[] array.
5592// int java.util.zip.CRC32C.updateBytes(int crc, byte[] buf, int off, int end)
5593//
5594bool LibraryCallKit::inline_updateBytesCRC32C() {
5595 assert(UseCRC32CIntrinsics, "need CRC32C instruction support")do { if (!(UseCRC32CIntrinsics)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5595, "assert(" "UseCRC32CIntrinsics" ") failed", "need CRC32C instruction support"
); ::breakpoint(); } } while (0)
;
5596 assert(callee()->signature()->size() == 4, "updateBytes has 4 parameters")do { if (!(callee()->signature()->size() == 4)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5596, "assert(" "callee()->signature()->size() == 4" ") failed"
, "updateBytes has 4 parameters"); ::breakpoint(); } } while (
0)
;
5597 assert(callee()->holder()->is_loaded(), "CRC32C class must be loaded")do { if (!(callee()->holder()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5597, "assert(" "callee()->holder()->is_loaded()" ") failed"
, "CRC32C class must be loaded"); ::breakpoint(); } } while (
0)
;
5598 // no receiver since it is a static method
5599 Node* crc = argument(0); // type: int
5600 Node* src = argument(1); // type: oop
5601 Node* offset = argument(2); // type: int
5602 Node* end = argument(3); // type: int
5603
5604 Node* length = _gvn.transform(new SubINode(end, offset));
5605
5606 const Type* src_type = src->Value(&_gvn);
5607 const TypeAryPtr* top_src = src_type->isa_aryptr();
5608 if (top_src == NULL__null || top_src->klass() == NULL__null) {
5609 // failed array check
5610 return false;
5611 }
5612
5613 // Figure out the size and type of the elements we will be copying.
5614 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5615 if (src_elem != T_BYTE) {
5616 return false;
5617 }
5618
5619 // 'src_start' points to src array + scaled offset
5620 src = must_be_not_null(src, true);
5621 Node* src_start = array_element_address(src, offset, src_elem);
5622
5623 // static final int[] byteTable in class CRC32C
5624 Node* table = get_table_from_crc32c_class(callee()->holder());
5625 table = must_be_not_null(table, true);
5626 Node* table_start = array_element_address(table, intcon(0), T_INT);
5627
5628 // We assume that range check is done by caller.
5629 // TODO: generate range check (offset+length < src.length) in debug VM.
5630
5631 // Call the stub.
5632 address stubAddr = StubRoutines::updateBytesCRC32C();
5633 const char *stubName = "updateBytesCRC32C";
5634
5635 Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesCRC32C_Type(),
5636 stubAddr, stubName, TypePtr::BOTTOM,
5637 crc, src_start, length, table_start);
5638 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5639 set_result(result);
5640 return true;
5641}
5642
5643//------------------------------inline_updateDirectByteBufferCRC32C-----------------------
5644//
5645// Calculate CRC32C for DirectByteBuffer.
5646// int java.util.zip.CRC32C.updateDirectByteBuffer(int crc, long buf, int off, int end)
5647//
5648bool LibraryCallKit::inline_updateDirectByteBufferCRC32C() {
5649 assert(UseCRC32CIntrinsics, "need CRC32C instruction support")do { if (!(UseCRC32CIntrinsics)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5649, "assert(" "UseCRC32CIntrinsics" ") failed", "need CRC32C instruction support"
); ::breakpoint(); } } while (0)
;
5650 assert(callee()->signature()->size() == 5, "updateDirectByteBuffer has 4 parameters and one is long")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5650, "assert(" "callee()->signature()->size() == 5" ") failed"
, "updateDirectByteBuffer has 4 parameters and one is long");
::breakpoint(); } } while (0)
;
5651 assert(callee()->holder()->is_loaded(), "CRC32C class must be loaded")do { if (!(callee()->holder()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5651, "assert(" "callee()->holder()->is_loaded()" ") failed"
, "CRC32C class must be loaded"); ::breakpoint(); } } while (
0)
;
5652 // no receiver since it is a static method
5653 Node* crc = argument(0); // type: int
5654 Node* src = argument(1); // type: long
5655 Node* offset = argument(3); // type: int
5656 Node* end = argument(4); // type: int
5657
5658 Node* length = _gvn.transform(new SubINode(end, offset));
5659
5660 src = ConvL2X(src)(src); // adjust Java long to machine word
5661 Node* base = _gvn.transform(new CastX2PNode(src));
5662 offset = ConvI2X(offset)ConvI2L(offset);
5663
5664 // 'src_start' points to src array + scaled offset
5665 Node* src_start = basic_plus_adr(top(), base, offset);
5666
5667 // static final int[] byteTable in class CRC32C
5668 Node* table = get_table_from_crc32c_class(callee()->holder());
5669 table = must_be_not_null(table, true);
5670 Node* table_start = array_element_address(table, intcon(0), T_INT);
5671
5672 // Call the stub.
5673 address stubAddr = StubRoutines::updateBytesCRC32C();
5674 const char *stubName = "updateBytesCRC32C";
5675
5676 Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesCRC32C_Type(),
5677 stubAddr, stubName, TypePtr::BOTTOM,
5678 crc, src_start, length, table_start);
5679 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5680 set_result(result);
5681 return true;
5682}
5683
5684//------------------------------inline_updateBytesAdler32----------------------
5685//
5686// Calculate Adler32 checksum for byte[] array.
5687// int java.util.zip.Adler32.updateBytes(int crc, byte[] buf, int off, int len)
5688//
5689bool LibraryCallKit::inline_updateBytesAdler32() {
5690 assert(UseAdler32Intrinsics, "Adler32 Instrinsic support need")do { if (!(UseAdler32Intrinsics)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5690, "assert(" "UseAdler32Intrinsics" ") failed", "Adler32 Instrinsic support need"
); ::breakpoint(); } } while (0)
; // check if we actually need to check this flag or check a different one
5691 assert(callee()->signature()->size() == 4, "updateBytes has 4 parameters")do { if (!(callee()->signature()->size() == 4)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5691, "assert(" "callee()->signature()->size() == 4" ") failed"
, "updateBytes has 4 parameters"); ::breakpoint(); } } while (
0)
;
5692 assert(callee()->holder()->is_loaded(), "Adler32 class must be loaded")do { if (!(callee()->holder()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5692, "assert(" "callee()->holder()->is_loaded()" ") failed"
, "Adler32 class must be loaded"); ::breakpoint(); } } while (
0)
;
5693 // no receiver since it is static method
5694 Node* crc = argument(0); // type: int
5695 Node* src = argument(1); // type: oop
5696 Node* offset = argument(2); // type: int
5697 Node* length = argument(3); // type: int
5698
5699 const Type* src_type = src->Value(&_gvn);
5700 const TypeAryPtr* top_src = src_type->isa_aryptr();
5701 if (top_src == NULL__null || top_src->klass() == NULL__null) {
5702 // failed array check
5703 return false;
5704 }
5705
5706 // Figure out the size and type of the elements we will be copying.
5707 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5708 if (src_elem != T_BYTE) {
5709 return false;
5710 }
5711
5712 // 'src_start' points to src array + scaled offset
5713 Node* src_start = array_element_address(src, offset, src_elem);
5714
5715 // We assume that range check is done by caller.
5716 // TODO: generate range check (offset+length < src.length) in debug VM.
5717
5718 // Call the stub.
5719 address stubAddr = StubRoutines::updateBytesAdler32();
5720 const char *stubName = "updateBytesAdler32";
5721
5722 Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesAdler32_Type(),
5723 stubAddr, stubName, TypePtr::BOTTOM,
5724 crc, src_start, length);
5725 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5726 set_result(result);
5727 return true;
5728}
5729
5730//------------------------------inline_updateByteBufferAdler32---------------
5731//
5732// Calculate Adler32 checksum for DirectByteBuffer.
5733// int java.util.zip.Adler32.updateByteBuffer(int crc, long buf, int off, int len)
5734//
5735bool LibraryCallKit::inline_updateByteBufferAdler32() {
5736 assert(UseAdler32Intrinsics, "Adler32 Instrinsic support need")do { if (!(UseAdler32Intrinsics)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5736, "assert(" "UseAdler32Intrinsics" ") failed", "Adler32 Instrinsic support need"
); ::breakpoint(); } } while (0)
; // check if we actually need to check this flag or check a different one
5737 assert(callee()->signature()->size() == 5, "updateByteBuffer has 4 parameters and one is long")do { if (!(callee()->signature()->size() == 5)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5737, "assert(" "callee()->signature()->size() == 5" ") failed"
, "updateByteBuffer has 4 parameters and one is long"); ::breakpoint
(); } } while (0)
;
5738 assert(callee()->holder()->is_loaded(), "Adler32 class must be loaded")do { if (!(callee()->holder()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5738, "assert(" "callee()->holder()->is_loaded()" ") failed"
, "Adler32 class must be loaded"); ::breakpoint(); } } while (
0)
;
5739 // no receiver since it is static method
5740 Node* crc = argument(0); // type: int
5741 Node* src = argument(1); // type: long
5742 Node* offset = argument(3); // type: int
5743 Node* length = argument(4); // type: int
5744
5745 src = ConvL2X(src)(src); // adjust Java long to machine word
5746 Node* base = _gvn.transform(new CastX2PNode(src));
5747 offset = ConvI2X(offset)ConvI2L(offset);
5748
5749 // 'src_start' points to src array + scaled offset
5750 Node* src_start = basic_plus_adr(top(), base, offset);
5751
5752 // Call the stub.
5753 address stubAddr = StubRoutines::updateBytesAdler32();
5754 const char *stubName = "updateBytesAdler32";
5755
5756 Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesAdler32_Type(),
5757 stubAddr, stubName, TypePtr::BOTTOM,
5758 crc, src_start, length);
5759
5760 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5761 set_result(result);
5762 return true;
5763}
5764
5765//----------------------------inline_reference_get----------------------------
5766// public T java.lang.ref.Reference.get();
5767bool LibraryCallKit::inline_reference_get() {
5768 const int referent_offset = java_lang_ref_Reference::referent_offset();
5769
5770 // Get the argument:
5771 Node* reference_obj = null_check_receiver();
5772 if (stopped()) return true;
5773
5774 DecoratorSet decorators = IN_HEAP | ON_WEAK_OOP_REF;
5775 Node* result = load_field_from_object(reference_obj, "referent", "Ljava/lang/Object;",
5776 decorators, /*is_static*/ false, NULL__null);
5777 if (result == NULL__null) return false;
5778
5779 // Add memory barrier to prevent commoning reads from this field
5780 // across safepoint since GC can change its value.
5781 insert_mem_bar(Op_MemBarCPUOrder);
5782
5783 set_result(result);
5784 return true;
5785}
5786
5787//----------------------------inline_reference_refersTo0----------------------------
5788// bool java.lang.ref.Reference.refersTo0();
5789// bool java.lang.ref.PhantomReference.refersTo0();
5790bool LibraryCallKit::inline_reference_refersTo0(bool is_phantom) {
5791 // Get arguments:
5792 Node* reference_obj = null_check_receiver();
5793 Node* other_obj = argument(1);
5794 if (stopped()) return true;
5795
5796 DecoratorSet decorators = IN_HEAP | AS_NO_KEEPALIVE;
5797 decorators |= (is_phantom ? ON_PHANTOM_OOP_REF : ON_WEAK_OOP_REF);
5798 Node* referent = load_field_from_object(reference_obj, "referent", "Ljava/lang/Object;",
5799 decorators, /*is_static*/ false, NULL__null);
5800 if (referent == NULL__null) return false;
5801
5802 // Add memory barrier to prevent commoning reads from this field
5803 // across safepoint since GC can change its value.
5804 insert_mem_bar(Op_MemBarCPUOrder);
5805
5806 Node* cmp = _gvn.transform(new CmpPNode(referent, other_obj));
5807 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
5808 IfNode* if_node = create_and_map_if(control(), bol, PROB_FAIR(0.5f), COUNT_UNKNOWN(-1.0f));
5809
5810 RegionNode* region = new RegionNode(3);
5811 PhiNode* phi = new PhiNode(region, TypeInt::BOOL);
5812
5813 Node* if_true = _gvn.transform(new IfTrueNode(if_node));
5814 region->init_req(1, if_true);
5815 phi->init_req(1, intcon(1));
5816
5817 Node* if_false = _gvn.transform(new IfFalseNode(if_node));
5818 region->init_req(2, if_false);
5819 phi->init_req(2, intcon(0));
5820
5821 set_control(_gvn.transform(region));
5822 record_for_igvn(region);
5823 set_result(_gvn.transform(phi));
5824 return true;
5825}
5826
5827
5828Node* LibraryCallKit::load_field_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString,
5829 DecoratorSet decorators = IN_HEAP, bool is_static = false,
5830 ciInstanceKlass* fromKls = NULL__null) {
5831 if (fromKls == NULL__null) {
5832 const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
5833 assert(tinst != NULL, "obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5833, "assert(" "tinst != __null" ") failed", "obj is null"
); ::breakpoint(); } } while (0)
;
5834 assert(tinst->klass()->is_loaded(), "obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5834, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "obj is not loaded"); ::breakpoint(); } } while (0)
;
5835 fromKls = tinst->klass()->as_instance_klass();
5836 } else {
5837 assert(is_static, "only for static field access")do { if (!(is_static)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5837, "assert(" "is_static" ") failed", "only for static field access"
); ::breakpoint(); } } while (0)
;
5838 }
5839 ciField* field = fromKls->get_field_by_name(ciSymbol::make(fieldName),
5840 ciSymbol::make(fieldTypeString),
5841 is_static);
5842
5843 assert (field != NULL, "undefined field")do { if (!(field != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5843, "assert(" "field != __null" ") failed", "undefined field"
); ::breakpoint(); } } while (0)
;
5844 if (field == NULL__null) return (Node *) NULL__null;
5845
5846 if (is_static) {
5847 const TypeInstPtr* tip = TypeInstPtr::make(fromKls->java_mirror());
5848 fromObj = makecon(tip);
5849 }
5850
5851 // Next code copied from Parse::do_get_xxx():
5852
5853 // Compute address and memory type.
5854 int offset = field->offset_in_bytes();
5855 bool is_vol = field->is_volatile();
5856 ciType* field_klass = field->type();
5857 assert(field_klass->is_loaded(), "should be loaded")do { if (!(field_klass->is_loaded())) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5857, "assert(" "field_klass->is_loaded()" ") failed", "should be loaded"
); ::breakpoint(); } } while (0)
;
5858 const TypePtr* adr_type = C->alias_type(field)->adr_type();
5859 Node *adr = basic_plus_adr(fromObj, fromObj, offset);
5860 BasicType bt = field->layout_type();
5861
5862 // Build the resultant type of the load
5863 const Type *type;
5864 if (bt == T_OBJECT) {
5865 type = TypeOopPtr::make_from_klass(field_klass->as_klass());
5866 } else {
5867 type = Type::get_const_basic_type(bt);
5868 }
5869
5870 if (is_vol) {
5871 decorators |= MO_SEQ_CST;
5872 }
5873
5874 return access_load_at(fromObj, adr, adr_type, type, bt, decorators);
5875}
5876
5877Node * LibraryCallKit::field_address_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString,
5878 bool is_exact = true, bool is_static = false,
5879 ciInstanceKlass * fromKls = NULL__null) {
5880 if (fromKls == NULL__null) {
5881 const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
5882 assert(tinst != NULL, "obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5882, "assert(" "tinst != __null" ") failed", "obj is null"
); ::breakpoint(); } } while (0)
;
5883 assert(tinst->klass()->is_loaded(), "obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5883, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "obj is not loaded"); ::breakpoint(); } } while (0)
;
5884 assert(!is_exact || tinst->klass_is_exact(), "klass not exact")do { if (!(!is_exact || tinst->klass_is_exact())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5884, "assert(" "!is_exact || tinst->klass_is_exact()" ") failed"
, "klass not exact"); ::breakpoint(); } } while (0)
;
5885 fromKls = tinst->klass()->as_instance_klass();
5886 }
5887 else {
5888 assert(is_static, "only for static field access")do { if (!(is_static)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5888, "assert(" "is_static" ") failed", "only for static field access"
); ::breakpoint(); } } while (0)
;
5889 }
5890 ciField* field = fromKls->get_field_by_name(ciSymbol::make(fieldName),
5891 ciSymbol::make(fieldTypeString),
5892 is_static);
5893
5894 assert(field != NULL, "undefined field")do { if (!(field != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5894, "assert(" "field != __null" ") failed", "undefined field"
); ::breakpoint(); } } while (0)
;
5895 assert(!field->is_volatile(), "not defined for volatile fields")do { if (!(!field->is_volatile())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5895, "assert(" "!field->is_volatile()" ") failed", "not defined for volatile fields"
); ::breakpoint(); } } while (0)
;
5896
5897 if (is_static) {
5898 const TypeInstPtr* tip = TypeInstPtr::make(fromKls->java_mirror());
5899 fromObj = makecon(tip);
5900 }
5901
5902 // Next code copied from Parse::do_get_xxx():
5903
5904 // Compute address and memory type.
5905 int offset = field->offset_in_bytes();
5906 Node *adr = basic_plus_adr(fromObj, fromObj, offset);
5907
5908 return adr;
5909}
5910
5911//------------------------------inline_aescrypt_Block-----------------------
5912bool LibraryCallKit::inline_aescrypt_Block(vmIntrinsics::ID id) {
5913 address stubAddr = NULL__null;
5914 const char *stubName;
5915 assert(UseAES, "need AES instruction support")do { if (!(UseAES)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5915, "assert(" "UseAES" ") failed", "need AES instruction support"
); ::breakpoint(); } } while (0)
;
5916
5917 switch(id) {
5918 case vmIntrinsics::_aescrypt_encryptBlock:
5919 stubAddr = StubRoutines::aescrypt_encryptBlock();
5920 stubName = "aescrypt_encryptBlock";
5921 break;
5922 case vmIntrinsics::_aescrypt_decryptBlock:
5923 stubAddr = StubRoutines::aescrypt_decryptBlock();
5924 stubName = "aescrypt_decryptBlock";
5925 break;
5926 default:
5927 break;
5928 }
5929 if (stubAddr == NULL__null) return false;
5930
5931 Node* aescrypt_object = argument(0);
5932 Node* src = argument(1);
5933 Node* src_offset = argument(2);
5934 Node* dest = argument(3);
5935 Node* dest_offset = argument(4);
5936
5937 src = must_be_not_null(src, true);
5938 dest = must_be_not_null(dest, true);
5939
5940 // (1) src and dest are arrays.
5941 const Type* src_type = src->Value(&_gvn);
5942 const Type* dest_type = dest->Value(&_gvn);
5943 const TypeAryPtr* top_src = src_type->isa_aryptr();
5944 const TypeAryPtr* top_dest = dest_type->isa_aryptr();
5945 assert (top_src != NULL && top_src->klass() != NULL && top_dest != NULL && top_dest->klass() != NULL, "args are strange")do { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5945, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
;
5946
5947 // for the quick and dirty code we will skip all the checks.
5948 // we are just trying to get the call to be generated.
5949 Node* src_start = src;
5950 Node* dest_start = dest;
5951 if (src_offset != NULL__null || dest_offset != NULL__null) {
5952 assert(src_offset != NULL && dest_offset != NULL, "")do { if (!(src_offset != __null && dest_offset != __null
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5952, "assert(" "src_offset != __null && dest_offset != __null"
") failed", ""); ::breakpoint(); } } while (0)
;
5953 src_start = array_element_address(src, src_offset, T_BYTE);
5954 dest_start = array_element_address(dest, dest_offset, T_BYTE);
5955 }
5956
5957 // now need to get the start of its expanded key array
5958 // this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
5959 Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
5960 if (k_start == NULL__null) return false;
5961
5962 // Call the stub.
5963 make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::aescrypt_block_Type(),
5964 stubAddr, stubName, TypePtr::BOTTOM,
5965 src_start, dest_start, k_start);
5966
5967 return true;
5968}
5969
5970//------------------------------inline_cipherBlockChaining_AESCrypt-----------------------
5971bool LibraryCallKit::inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id) {
5972 address stubAddr = NULL__null;
5973 const char *stubName = NULL__null;
5974
5975 assert(UseAES, "need AES instruction support")do { if (!(UseAES)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 5975, "assert(" "UseAES" ") failed", "need AES instruction support"
); ::breakpoint(); } } while (0)
;
5976
5977 switch(id) {
5978 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
5979 stubAddr = StubRoutines::cipherBlockChaining_encryptAESCrypt();
5980 stubName = "cipherBlockChaining_encryptAESCrypt";
5981 break;
5982 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
5983 stubAddr = StubRoutines::cipherBlockChaining_decryptAESCrypt();
5984 stubName = "cipherBlockChaining_decryptAESCrypt";
5985 break;
5986 default:
5987 break;
5988 }
5989 if (stubAddr == NULL__null) return false;
5990
5991 Node* cipherBlockChaining_object = argument(0);
5992 Node* src = argument(1);
5993 Node* src_offset = argument(2);
5994 Node* len = argument(3);
5995 Node* dest = argument(4);
5996 Node* dest_offset = argument(5);
5997
5998 src = must_be_not_null(src, false);
5999 dest = must_be_not_null(dest, false);
6000
6001 // (1) src and dest are arrays.
6002 const Type* src_type = src->Value(&_gvn);
6003 const Type* dest_type = dest->Value(&_gvn);
6004 const TypeAryPtr* top_src = src_type->isa_aryptr();
6005 const TypeAryPtr* top_dest = dest_type->isa_aryptr();
6006 assert (top_src != NULL && top_src->klass() != NULLdo { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6007, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
6007 && top_dest != NULL && top_dest->klass() != NULL, "args are strange")do { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6007, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
;
6008
6009 // checks are the responsibility of the caller
6010 Node* src_start = src;
6011 Node* dest_start = dest;
6012 if (src_offset != NULL__null || dest_offset != NULL__null) {
6013 assert(src_offset != NULL && dest_offset != NULL, "")do { if (!(src_offset != __null && dest_offset != __null
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6013, "assert(" "src_offset != __null && dest_offset != __null"
") failed", ""); ::breakpoint(); } } while (0)
;
6014 src_start = array_element_address(src, src_offset, T_BYTE);
6015 dest_start = array_element_address(dest, dest_offset, T_BYTE);
6016 }
6017
6018 // if we are in this set of code, we "know" the embeddedCipher is an AESCrypt object
6019 // (because of the predicated logic executed earlier).
6020 // so we cast it here safely.
6021 // this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
6022
6023 Node* embeddedCipherObj = load_field_from_object(cipherBlockChaining_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6024 if (embeddedCipherObj == NULL__null) return false;
6025
6026 // cast it to what we know it will be at runtime
6027 const TypeInstPtr* tinst = _gvn.type(cipherBlockChaining_object)->isa_instptr();
6028 assert(tinst != NULL, "CBC obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6028, "assert(" "tinst != __null" ") failed", "CBC obj is null"
); ::breakpoint(); } } while (0)
;
6029 assert(tinst->klass()->is_loaded(), "CBC obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6029, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "CBC obj is not loaded"); ::breakpoint(); } } while (0)
;
6030 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6031 assert(klass_AESCrypt->is_loaded(), "predicate checks that this class is loaded")do { if (!(klass_AESCrypt->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6031, "assert(" "klass_AESCrypt->is_loaded()" ") failed"
, "predicate checks that this class is loaded"); ::breakpoint
(); } } while (0)
;
6032
6033 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6034 const TypeKlassPtr* aklass = TypeKlassPtr::make(instklass_AESCrypt);
6035 const TypeOopPtr* xtype = aklass->as_instance_type()->cast_to_ptr_type(TypePtr::NotNull);
6036 Node* aescrypt_object = new CheckCastPPNode(control(), embeddedCipherObj, xtype);
6037 aescrypt_object = _gvn.transform(aescrypt_object);
6038
6039 // we need to get the start of the aescrypt_object's expanded key array
6040 Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
6041 if (k_start == NULL__null) return false;
6042
6043 // similarly, get the start address of the r vector
6044 Node* objRvec = load_field_from_object(cipherBlockChaining_object, "r", "[B");
6045 if (objRvec == NULL__null) return false;
6046 Node* r_start = array_element_address(objRvec, intcon(0), T_BYTE);
6047
6048 // Call the stub, passing src_start, dest_start, k_start, r_start and src_len
6049 Node* cbcCrypt = make_runtime_call(RC_LEAF|RC_NO_FP,
6050 OptoRuntime::cipherBlockChaining_aescrypt_Type(),
6051 stubAddr, stubName, TypePtr::BOTTOM,
6052 src_start, dest_start, k_start, r_start, len);
6053
6054 // return cipher length (int)
6055 Node* retvalue = _gvn.transform(new ProjNode(cbcCrypt, TypeFunc::Parms));
6056 set_result(retvalue);
6057 return true;
6058}
6059
6060//------------------------------inline_electronicCodeBook_AESCrypt-----------------------
6061bool LibraryCallKit::inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id) {
6062 address stubAddr = NULL__null;
6063 const char *stubName = NULL__null;
6064
6065 assert(UseAES, "need AES instruction support")do { if (!(UseAES)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6065, "assert(" "UseAES" ") failed", "need AES instruction support"
); ::breakpoint(); } } while (0)
;
6066
6067 switch (id) {
6068 case vmIntrinsics::_electronicCodeBook_encryptAESCrypt:
6069 stubAddr = StubRoutines::electronicCodeBook_encryptAESCrypt();
6070 stubName = "electronicCodeBook_encryptAESCrypt";
6071 break;
6072 case vmIntrinsics::_electronicCodeBook_decryptAESCrypt:
6073 stubAddr = StubRoutines::electronicCodeBook_decryptAESCrypt();
6074 stubName = "electronicCodeBook_decryptAESCrypt";
6075 break;
6076 default:
6077 break;
6078 }
6079
6080 if (stubAddr == NULL__null) return false;
6081
6082 Node* electronicCodeBook_object = argument(0);
6083 Node* src = argument(1);
6084 Node* src_offset = argument(2);
6085 Node* len = argument(3);
6086 Node* dest = argument(4);
6087 Node* dest_offset = argument(5);
6088
6089 // (1) src and dest are arrays.
6090 const Type* src_type = src->Value(&_gvn);
6091 const Type* dest_type = dest->Value(&_gvn);
6092 const TypeAryPtr* top_src = src_type->isa_aryptr();
6093 const TypeAryPtr* top_dest = dest_type->isa_aryptr();
6094 assert(top_src != NULL && top_src->klass() != NULLdo { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6095, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
6095 && top_dest != NULL && top_dest->klass() != NULL, "args are strange")do { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6095, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
;
6096
6097 // checks are the responsibility of the caller
6098 Node* src_start = src;
6099 Node* dest_start = dest;
6100 if (src_offset != NULL__null || dest_offset != NULL__null) {
6101 assert(src_offset != NULL && dest_offset != NULL, "")do { if (!(src_offset != __null && dest_offset != __null
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6101, "assert(" "src_offset != __null && dest_offset != __null"
") failed", ""); ::breakpoint(); } } while (0)
;
6102 src_start = array_element_address(src, src_offset, T_BYTE);
6103 dest_start = array_element_address(dest, dest_offset, T_BYTE);
6104 }
6105
6106 // if we are in this set of code, we "know" the embeddedCipher is an AESCrypt object
6107 // (because of the predicated logic executed earlier).
6108 // so we cast it here safely.
6109 // this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
6110
6111 Node* embeddedCipherObj = load_field_from_object(electronicCodeBook_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6112 if (embeddedCipherObj == NULL__null) return false;
6113
6114 // cast it to what we know it will be at runtime
6115 const TypeInstPtr* tinst = _gvn.type(electronicCodeBook_object)->isa_instptr();
6116 assert(tinst != NULL, "ECB obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6116, "assert(" "tinst != __null" ") failed", "ECB obj is null"
); ::breakpoint(); } } while (0)
;
6117 assert(tinst->klass()->is_loaded(), "ECB obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6117, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "ECB obj is not loaded"); ::breakpoint(); } } while (0)
;
6118 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6119 assert(klass_AESCrypt->is_loaded(), "predicate checks that this class is loaded")do { if (!(klass_AESCrypt->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6119, "assert(" "klass_AESCrypt->is_loaded()" ") failed"
, "predicate checks that this class is loaded"); ::breakpoint
(); } } while (0)
;
6120
6121 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6122 const TypeKlassPtr* aklass = TypeKlassPtr::make(instklass_AESCrypt);
6123 const TypeOopPtr* xtype = aklass->as_instance_type()->cast_to_ptr_type(TypePtr::NotNull);
6124 Node* aescrypt_object = new CheckCastPPNode(control(), embeddedCipherObj, xtype);
6125 aescrypt_object = _gvn.transform(aescrypt_object);
6126
6127 // we need to get the start of the aescrypt_object's expanded key array
6128 Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
6129 if (k_start == NULL__null) return false;
6130
6131 // Call the stub, passing src_start, dest_start, k_start, r_start and src_len
6132 Node* ecbCrypt = make_runtime_call(RC_LEAF | RC_NO_FP,
6133 OptoRuntime::electronicCodeBook_aescrypt_Type(),
6134 stubAddr, stubName, TypePtr::BOTTOM,
6135 src_start, dest_start, k_start, len);
6136
6137 // return cipher length (int)
6138 Node* retvalue = _gvn.transform(new ProjNode(ecbCrypt, TypeFunc::Parms));
6139 set_result(retvalue);
6140 return true;
6141}
6142
6143//------------------------------inline_counterMode_AESCrypt-----------------------
6144bool LibraryCallKit::inline_counterMode_AESCrypt(vmIntrinsics::ID id) {
6145 assert(UseAES, "need AES instruction support")do { if (!(UseAES)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6145, "assert(" "UseAES" ") failed", "need AES instruction support"
); ::breakpoint(); } } while (0)
;
6146 if (!UseAESCTRIntrinsics) return false;
6147
6148 address stubAddr = NULL__null;
6149 const char *stubName = NULL__null;
6150 if (id == vmIntrinsics::_counterMode_AESCrypt) {
6151 stubAddr = StubRoutines::counterMode_AESCrypt();
6152 stubName = "counterMode_AESCrypt";
6153 }
6154 if (stubAddr == NULL__null) return false;
6155
6156 Node* counterMode_object = argument(0);
6157 Node* src = argument(1);
6158 Node* src_offset = argument(2);
6159 Node* len = argument(3);
6160 Node* dest = argument(4);
6161 Node* dest_offset = argument(5);
6162
6163 // (1) src and dest are arrays.
6164 const Type* src_type = src->Value(&_gvn);
6165 const Type* dest_type = dest->Value(&_gvn);
6166 const TypeAryPtr* top_src = src_type->isa_aryptr();
6167 const TypeAryPtr* top_dest = dest_type->isa_aryptr();
6168 assert(top_src != NULL && top_src->klass() != NULL &&do { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6169, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
6169 top_dest != NULL && top_dest->klass() != NULL, "args are strange")do { if (!(top_src != __null && top_src->klass() !=
__null && top_dest != __null && top_dest->
klass() != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6169, "assert(" "top_src != __null && top_src->klass() != __null && top_dest != __null && top_dest->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
;
6170
6171 // checks are the responsibility of the caller
6172 Node* src_start = src;
6173 Node* dest_start = dest;
6174 if (src_offset != NULL__null || dest_offset != NULL__null) {
6175 assert(src_offset != NULL && dest_offset != NULL, "")do { if (!(src_offset != __null && dest_offset != __null
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6175, "assert(" "src_offset != __null && dest_offset != __null"
") failed", ""); ::breakpoint(); } } while (0)
;
6176 src_start = array_element_address(src, src_offset, T_BYTE);
6177 dest_start = array_element_address(dest, dest_offset, T_BYTE);
6178 }
6179
6180 // if we are in this set of code, we "know" the embeddedCipher is an AESCrypt object
6181 // (because of the predicated logic executed earlier).
6182 // so we cast it here safely.
6183 // this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
6184 Node* embeddedCipherObj = load_field_from_object(counterMode_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6185 if (embeddedCipherObj == NULL__null) return false;
6186 // cast it to what we know it will be at runtime
6187 const TypeInstPtr* tinst = _gvn.type(counterMode_object)->isa_instptr();
6188 assert(tinst != NULL, "CTR obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6188, "assert(" "tinst != __null" ") failed", "CTR obj is null"
); ::breakpoint(); } } while (0)
;
6189 assert(tinst->klass()->is_loaded(), "CTR obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6189, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "CTR obj is not loaded"); ::breakpoint(); } } while (0)
;
6190 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6191 assert(klass_AESCrypt->is_loaded(), "predicate checks that this class is loaded")do { if (!(klass_AESCrypt->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6191, "assert(" "klass_AESCrypt->is_loaded()" ") failed"
, "predicate checks that this class is loaded"); ::breakpoint
(); } } while (0)
;
6192 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6193 const TypeKlassPtr* aklass = TypeKlassPtr::make(instklass_AESCrypt);
6194 const TypeOopPtr* xtype = aklass->as_instance_type()->cast_to_ptr_type(TypePtr::NotNull);
6195 Node* aescrypt_object = new CheckCastPPNode(control(), embeddedCipherObj, xtype);
6196 aescrypt_object = _gvn.transform(aescrypt_object);
6197 // we need to get the start of the aescrypt_object's expanded key array
6198 Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
6199 if (k_start == NULL__null) return false;
6200 // similarly, get the start address of the r vector
6201 Node* obj_counter = load_field_from_object(counterMode_object, "counter", "[B");
6202 if (obj_counter == NULL__null) return false;
6203 Node* cnt_start = array_element_address(obj_counter, intcon(0), T_BYTE);
6204
6205 Node* saved_encCounter = load_field_from_object(counterMode_object, "encryptedCounter", "[B");
6206 if (saved_encCounter == NULL__null) return false;
6207 Node* saved_encCounter_start = array_element_address(saved_encCounter, intcon(0), T_BYTE);
6208 Node* used = field_address_from_object(counterMode_object, "used", "I", /*is_exact*/ false);
6209
6210 // Call the stub, passing src_start, dest_start, k_start, r_start and src_len
6211 Node* ctrCrypt = make_runtime_call(RC_LEAF|RC_NO_FP,
6212 OptoRuntime::counterMode_aescrypt_Type(),
6213 stubAddr, stubName, TypePtr::BOTTOM,
6214 src_start, dest_start, k_start, cnt_start, len, saved_encCounter_start, used);
6215
6216 // return cipher length (int)
6217 Node* retvalue = _gvn.transform(new ProjNode(ctrCrypt, TypeFunc::Parms));
6218 set_result(retvalue);
6219 return true;
6220}
6221
6222//------------------------------get_key_start_from_aescrypt_object-----------------------
6223Node * LibraryCallKit::get_key_start_from_aescrypt_object(Node *aescrypt_object) {
6224#if defined(PPC64) || defined(S390)
6225 // MixColumns for decryption can be reduced by preprocessing MixColumns with round keys.
6226 // Intel's extention is based on this optimization and AESCrypt generates round keys by preprocessing MixColumns.
6227 // However, ppc64 vncipher processes MixColumns and requires the same round keys with encryption.
6228 // The ppc64 stubs of encryption and decryption use the same round keys (sessionK[0]).
6229 Node* objSessionK = load_field_from_object(aescrypt_object, "sessionK", "[[I");
6230 assert (objSessionK != NULL, "wrong version of com.sun.crypto.provider.AESCrypt")do { if (!(objSessionK != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6230, "assert(" "objSessionK != __null" ") failed", "wrong version of com.sun.crypto.provider.AESCrypt"
); ::breakpoint(); } } while (0)
;
6231 if (objSessionK == NULL__null) {
6232 return (Node *) NULL__null;
6233 }
6234 Node* objAESCryptKey = load_array_element(objSessionK, intcon(0), TypeAryPtr::OOPS, /* set_ctrl */ true);
6235#else
6236 Node* objAESCryptKey = load_field_from_object(aescrypt_object, "K", "[I");
6237#endif // PPC64
6238 assert (objAESCryptKey != NULL, "wrong version of com.sun.crypto.provider.AESCrypt")do { if (!(objAESCryptKey != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6238, "assert(" "objAESCryptKey != __null" ") failed", "wrong version of com.sun.crypto.provider.AESCrypt"
); ::breakpoint(); } } while (0)
;
6239 if (objAESCryptKey == NULL__null) return (Node *) NULL__null;
6240
6241 // now have the array, need to get the start address of the K array
6242 Node* k_start = array_element_address(objAESCryptKey, intcon(0), T_INT);
6243 return k_start;
6244}
6245
6246//----------------------------inline_cipherBlockChaining_AESCrypt_predicate----------------------------
6247// Return node representing slow path of predicate check.
6248// the pseudo code we want to emulate with this predicate is:
6249// for encryption:
6250// if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
6251// for decryption:
6252// if ((embeddedCipherObj instanceof AESCrypt) && (cipher!=plain)) do_intrinsic, else do_javapath
6253// note cipher==plain is more conservative than the original java code but that's OK
6254//
6255Node* LibraryCallKit::inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting) {
6256 // The receiver was checked for NULL already.
6257 Node* objCBC = argument(0);
6258
6259 Node* src = argument(1);
6260 Node* dest = argument(4);
6261
6262 // Load embeddedCipher field of CipherBlockChaining object.
6263 Node* embeddedCipherObj = load_field_from_object(objCBC, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6264
6265 // get AESCrypt klass for instanceOf check
6266 // AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
6267 // will have same classloader as CipherBlockChaining object
6268 const TypeInstPtr* tinst = _gvn.type(objCBC)->isa_instptr();
6269 assert(tinst != NULL, "CBCobj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6269, "assert(" "tinst != __null" ") failed", "CBCobj is null"
); ::breakpoint(); } } while (0)
;
6270 assert(tinst->klass()->is_loaded(), "CBCobj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6270, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "CBCobj is not loaded"); ::breakpoint(); } } while (0)
;
6271
6272 // we want to do an instanceof comparison against the AESCrypt class
6273 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6274 if (!klass_AESCrypt->is_loaded()) {
6275 // if AESCrypt is not even loaded, we never take the intrinsic fast path
6276 Node* ctrl = control();
6277 set_control(top()); // no regular fast path
6278 return ctrl;
6279 }
6280
6281 src = must_be_not_null(src, true);
6282 dest = must_be_not_null(dest, true);
6283
6284 // Resolve oops to stable for CmpP below.
6285 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6286
6287 Node* instof = gen_instanceof(embeddedCipherObj, makecon(TypeKlassPtr::make(instklass_AESCrypt)));
6288 Node* cmp_instof = _gvn.transform(new CmpINode(instof, intcon(1)));
6289 Node* bool_instof = _gvn.transform(new BoolNode(cmp_instof, BoolTest::ne));
6290
6291 Node* instof_false = generate_guard(bool_instof, NULL__null, PROB_MIN(1e-6f));
6292
6293 // for encryption, we are done
6294 if (!decrypting)
6295 return instof_false; // even if it is NULL
6296
6297 // for decryption, we need to add a further check to avoid
6298 // taking the intrinsic path when cipher and plain are the same
6299 // see the original java code for why.
6300 RegionNode* region = new RegionNode(3);
6301 region->init_req(1, instof_false);
6302
6303 Node* cmp_src_dest = _gvn.transform(new CmpPNode(src, dest));
6304 Node* bool_src_dest = _gvn.transform(new BoolNode(cmp_src_dest, BoolTest::eq));
6305 Node* src_dest_conjoint = generate_guard(bool_src_dest, NULL__null, PROB_MIN(1e-6f));
6306 region->init_req(2, src_dest_conjoint);
6307
6308 record_for_igvn(region);
6309 return _gvn.transform(region);
6310}
6311
6312//----------------------------inline_electronicCodeBook_AESCrypt_predicate----------------------------
6313// Return node representing slow path of predicate check.
6314// the pseudo code we want to emulate with this predicate is:
6315// for encryption:
6316// if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
6317// for decryption:
6318// if ((embeddedCipherObj instanceof AESCrypt) && (cipher!=plain)) do_intrinsic, else do_javapath
6319// note cipher==plain is more conservative than the original java code but that's OK
6320//
6321Node* LibraryCallKit::inline_electronicCodeBook_AESCrypt_predicate(bool decrypting) {
6322 // The receiver was checked for NULL already.
6323 Node* objECB = argument(0);
6324
6325 // Load embeddedCipher field of ElectronicCodeBook object.
6326 Node* embeddedCipherObj = load_field_from_object(objECB, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6327
6328 // get AESCrypt klass for instanceOf check
6329 // AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
6330 // will have same classloader as ElectronicCodeBook object
6331 const TypeInstPtr* tinst = _gvn.type(objECB)->isa_instptr();
6332 assert(tinst != NULL, "ECBobj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6332, "assert(" "tinst != __null" ") failed", "ECBobj is null"
); ::breakpoint(); } } while (0)
;
6333 assert(tinst->klass()->is_loaded(), "ECBobj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6333, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "ECBobj is not loaded"); ::breakpoint(); } } while (0)
;
6334
6335 // we want to do an instanceof comparison against the AESCrypt class
6336 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6337 if (!klass_AESCrypt->is_loaded()) {
6338 // if AESCrypt is not even loaded, we never take the intrinsic fast path
6339 Node* ctrl = control();
6340 set_control(top()); // no regular fast path
6341 return ctrl;
6342 }
6343 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6344
6345 Node* instof = gen_instanceof(embeddedCipherObj, makecon(TypeKlassPtr::make(instklass_AESCrypt)));
6346 Node* cmp_instof = _gvn.transform(new CmpINode(instof, intcon(1)));
6347 Node* bool_instof = _gvn.transform(new BoolNode(cmp_instof, BoolTest::ne));
6348
6349 Node* instof_false = generate_guard(bool_instof, NULL__null, PROB_MIN(1e-6f));
6350
6351 // for encryption, we are done
6352 if (!decrypting)
6353 return instof_false; // even if it is NULL
6354
6355 // for decryption, we need to add a further check to avoid
6356 // taking the intrinsic path when cipher and plain are the same
6357 // see the original java code for why.
6358 RegionNode* region = new RegionNode(3);
6359 region->init_req(1, instof_false);
6360 Node* src = argument(1);
6361 Node* dest = argument(4);
6362 Node* cmp_src_dest = _gvn.transform(new CmpPNode(src, dest));
6363 Node* bool_src_dest = _gvn.transform(new BoolNode(cmp_src_dest, BoolTest::eq));
6364 Node* src_dest_conjoint = generate_guard(bool_src_dest, NULL__null, PROB_MIN(1e-6f));
6365 region->init_req(2, src_dest_conjoint);
6366
6367 record_for_igvn(region);
6368 return _gvn.transform(region);
6369}
6370
6371//----------------------------inline_counterMode_AESCrypt_predicate----------------------------
6372// Return node representing slow path of predicate check.
6373// the pseudo code we want to emulate with this predicate is:
6374// for encryption:
6375// if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
6376// for decryption:
6377// if ((embeddedCipherObj instanceof AESCrypt) && (cipher!=plain)) do_intrinsic, else do_javapath
6378// note cipher==plain is more conservative than the original java code but that's OK
6379//
6380
6381Node* LibraryCallKit::inline_counterMode_AESCrypt_predicate() {
6382 // The receiver was checked for NULL already.
6383 Node* objCTR = argument(0);
6384
6385 // Load embeddedCipher field of CipherBlockChaining object.
6386 Node* embeddedCipherObj = load_field_from_object(objCTR, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6387
6388 // get AESCrypt klass for instanceOf check
6389 // AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
6390 // will have same classloader as CipherBlockChaining object
6391 const TypeInstPtr* tinst = _gvn.type(objCTR)->isa_instptr();
6392 assert(tinst != NULL, "CTRobj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6392, "assert(" "tinst != __null" ") failed", "CTRobj is null"
); ::breakpoint(); } } while (0)
;
6393 assert(tinst->klass()->is_loaded(), "CTRobj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6393, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "CTRobj is not loaded"); ::breakpoint(); } } while (0)
;
6394
6395 // we want to do an instanceof comparison against the AESCrypt class
6396 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6397 if (!klass_AESCrypt->is_loaded()) {
6398 // if AESCrypt is not even loaded, we never take the intrinsic fast path
6399 Node* ctrl = control();
6400 set_control(top()); // no regular fast path
6401 return ctrl;
6402 }
6403
6404 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6405 Node* instof = gen_instanceof(embeddedCipherObj, makecon(TypeKlassPtr::make(instklass_AESCrypt)));
6406 Node* cmp_instof = _gvn.transform(new CmpINode(instof, intcon(1)));
6407 Node* bool_instof = _gvn.transform(new BoolNode(cmp_instof, BoolTest::ne));
6408 Node* instof_false = generate_guard(bool_instof, NULL__null, PROB_MIN(1e-6f));
6409
6410 return instof_false; // even if it is NULL
6411}
6412
6413//------------------------------inline_ghash_processBlocks
6414bool LibraryCallKit::inline_ghash_processBlocks() {
6415 address stubAddr;
6416 const char *stubName;
6417 assert(UseGHASHIntrinsics, "need GHASH intrinsics support")do { if (!(UseGHASHIntrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6417, "assert(" "UseGHASHIntrinsics" ") failed", "need GHASH intrinsics support"
); ::breakpoint(); } } while (0)
;
6418
6419 stubAddr = StubRoutines::ghash_processBlocks();
6420 stubName = "ghash_processBlocks";
6421
6422 Node* data = argument(0);
6423 Node* offset = argument(1);
6424 Node* len = argument(2);
6425 Node* state = argument(3);
6426 Node* subkeyH = argument(4);
6427
6428 state = must_be_not_null(state, true);
6429 subkeyH = must_be_not_null(subkeyH, true);
6430 data = must_be_not_null(data, true);
6431
6432 Node* state_start = array_element_address(state, intcon(0), T_LONG);
6433 assert(state_start, "state is NULL")do { if (!(state_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6433, "assert(" "state_start" ") failed", "state is NULL");
::breakpoint(); } } while (0)
;
6434 Node* subkeyH_start = array_element_address(subkeyH, intcon(0), T_LONG);
6435 assert(subkeyH_start, "subkeyH is NULL")do { if (!(subkeyH_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6435, "assert(" "subkeyH_start" ") failed", "subkeyH is NULL"
); ::breakpoint(); } } while (0)
;
6436 Node* data_start = array_element_address(data, offset, T_BYTE);
6437 assert(data_start, "data is NULL")do { if (!(data_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6437, "assert(" "data_start" ") failed", "data is NULL"); ::
breakpoint(); } } while (0)
;
6438
6439 Node* ghash = make_runtime_call(RC_LEAF|RC_NO_FP,
6440 OptoRuntime::ghash_processBlocks_Type(),
6441 stubAddr, stubName, TypePtr::BOTTOM,
6442 state_start, subkeyH_start, data_start, len);
6443 return true;
6444}
6445
6446bool LibraryCallKit::inline_base64_encodeBlock() {
6447 address stubAddr;
6448 const char *stubName;
6449 assert(UseBASE64Intrinsics, "need Base64 intrinsics support")do { if (!(UseBASE64Intrinsics)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6449, "assert(" "UseBASE64Intrinsics" ") failed", "need Base64 intrinsics support"
); ::breakpoint(); } } while (0)
;
6450 assert(callee()->signature()->size() == 6, "base64_encodeBlock has 6 parameters")do { if (!(callee()->signature()->size() == 6)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6450, "assert(" "callee()->signature()->size() == 6" ") failed"
, "base64_encodeBlock has 6 parameters"); ::breakpoint(); } }
while (0)
;
6451 stubAddr = StubRoutines::base64_encodeBlock();
6452 stubName = "encodeBlock";
6453
6454 if (!stubAddr) return false;
6455 Node* base64obj = argument(0);
6456 Node* src = argument(1);
6457 Node* offset = argument(2);
6458 Node* len = argument(3);
6459 Node* dest = argument(4);
6460 Node* dp = argument(5);
6461 Node* isURL = argument(6);
6462
6463 src = must_be_not_null(src, true);
6464 dest = must_be_not_null(dest, true);
6465
6466 Node* src_start = array_element_address(src, intcon(0), T_BYTE);
6467 assert(src_start, "source array is NULL")do { if (!(src_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6467, "assert(" "src_start" ") failed", "source array is NULL"
); ::breakpoint(); } } while (0)
;
6468 Node* dest_start = array_element_address(dest, intcon(0), T_BYTE);
6469 assert(dest_start, "destination array is NULL")do { if (!(dest_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6469, "assert(" "dest_start" ") failed", "destination array is NULL"
); ::breakpoint(); } } while (0)
;
6470
6471 Node* base64 = make_runtime_call(RC_LEAF,
6472 OptoRuntime::base64_encodeBlock_Type(),
6473 stubAddr, stubName, TypePtr::BOTTOM,
6474 src_start, offset, len, dest_start, dp, isURL);
6475 return true;
6476}
6477
6478bool LibraryCallKit::inline_base64_decodeBlock() {
6479 address stubAddr;
6480 const char *stubName;
6481 assert(UseBASE64Intrinsics, "need Base64 intrinsics support")do { if (!(UseBASE64Intrinsics)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6481, "assert(" "UseBASE64Intrinsics" ") failed", "need Base64 intrinsics support"
); ::breakpoint(); } } while (0)
;
6482 assert(callee()->signature()->size() == 7, "base64_decodeBlock has 7 parameters")do { if (!(callee()->signature()->size() == 7)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6482, "assert(" "callee()->signature()->size() == 7" ") failed"
, "base64_decodeBlock has 7 parameters"); ::breakpoint(); } }
while (0)
;
6483 stubAddr = StubRoutines::base64_decodeBlock();
6484 stubName = "decodeBlock";
6485
6486 if (!stubAddr) return false;
6487 Node* base64obj = argument(0);
6488 Node* src = argument(1);
6489 Node* src_offset = argument(2);
6490 Node* len = argument(3);
6491 Node* dest = argument(4);
6492 Node* dest_offset = argument(5);
6493 Node* isURL = argument(6);
6494 Node* isMIME = argument(7);
6495
6496 src = must_be_not_null(src, true);
6497 dest = must_be_not_null(dest, true);
6498
6499 Node* src_start = array_element_address(src, intcon(0), T_BYTE);
6500 assert(src_start, "source array is NULL")do { if (!(src_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6500, "assert(" "src_start" ") failed", "source array is NULL"
); ::breakpoint(); } } while (0)
;
6501 Node* dest_start = array_element_address(dest, intcon(0), T_BYTE);
6502 assert(dest_start, "destination array is NULL")do { if (!(dest_start)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6502, "assert(" "dest_start" ") failed", "destination array is NULL"
); ::breakpoint(); } } while (0)
;
6503
6504 Node* call = make_runtime_call(RC_LEAF,
6505 OptoRuntime::base64_decodeBlock_Type(),
6506 stubAddr, stubName, TypePtr::BOTTOM,
6507 src_start, src_offset, len, dest_start, dest_offset, isURL, isMIME);
6508 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
6509 set_result(result);
6510 return true;
6511}
6512
6513//------------------------------inline_digestBase_implCompress-----------------------
6514//
6515// Calculate MD5 for single-block byte[] array.
6516// void com.sun.security.provider.MD5.implCompress(byte[] buf, int ofs)
6517//
6518// Calculate SHA (i.e., SHA-1) for single-block byte[] array.
6519// void com.sun.security.provider.SHA.implCompress(byte[] buf, int ofs)
6520//
6521// Calculate SHA2 (i.e., SHA-244 or SHA-256) for single-block byte[] array.
6522// void com.sun.security.provider.SHA2.implCompress(byte[] buf, int ofs)
6523//
6524// Calculate SHA5 (i.e., SHA-384 or SHA-512) for single-block byte[] array.
6525// void com.sun.security.provider.SHA5.implCompress(byte[] buf, int ofs)
6526//
6527// Calculate SHA3 (i.e., SHA3-224 or SHA3-256 or SHA3-384 or SHA3-512) for single-block byte[] array.
6528// void com.sun.security.provider.SHA3.implCompress(byte[] buf, int ofs)
6529//
6530bool LibraryCallKit::inline_digestBase_implCompress(vmIntrinsics::ID id) {
6531 assert(callee()->signature()->size() == 2, "sha_implCompress has 2 parameters")do { if (!(callee()->signature()->size() == 2)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6531, "assert(" "callee()->signature()->size() == 2" ") failed"
, "sha_implCompress has 2 parameters"); ::breakpoint(); } } while
(0)
;
6532
6533 Node* digestBase_obj = argument(0);
6534 Node* src = argument(1); // type oop
6535 Node* ofs = argument(2); // type int
6536
6537 const Type* src_type = src->Value(&_gvn);
6538 const TypeAryPtr* top_src = src_type->isa_aryptr();
6539 if (top_src == NULL__null || top_src->klass() == NULL__null) {
6540 // failed array check
6541 return false;
6542 }
6543 // Figure out the size and type of the elements we will be copying.
6544 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
6545 if (src_elem != T_BYTE) {
6546 return false;
6547 }
6548 // 'src_start' points to src array + offset
6549 src = must_be_not_null(src, true);
6550 Node* src_start = array_element_address(src, ofs, src_elem);
6551 Node* state = NULL__null;
6552 Node* digest_length = NULL__null;
6553 address stubAddr;
6554 const char *stubName;
6555
6556 switch(id) {
6557 case vmIntrinsics::_md5_implCompress:
6558 assert(UseMD5Intrinsics, "need MD5 instruction support")do { if (!(UseMD5Intrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6558, "assert(" "UseMD5Intrinsics" ") failed", "need MD5 instruction support"
); ::breakpoint(); } } while (0)
;
6559 state = get_state_from_digest_object(digestBase_obj, "[I");
6560 stubAddr = StubRoutines::md5_implCompress();
6561 stubName = "md5_implCompress";
6562 break;
6563 case vmIntrinsics::_sha_implCompress:
6564 assert(UseSHA1Intrinsics, "need SHA1 instruction support")do { if (!(UseSHA1Intrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6564, "assert(" "UseSHA1Intrinsics" ") failed", "need SHA1 instruction support"
); ::breakpoint(); } } while (0)
;
6565 state = get_state_from_digest_object(digestBase_obj, "[I");
6566 stubAddr = StubRoutines::sha1_implCompress();
6567 stubName = "sha1_implCompress";
6568 break;
6569 case vmIntrinsics::_sha2_implCompress:
6570 assert(UseSHA256Intrinsics, "need SHA256 instruction support")do { if (!(UseSHA256Intrinsics)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6570, "assert(" "UseSHA256Intrinsics" ") failed", "need SHA256 instruction support"
); ::breakpoint(); } } while (0)
;
6571 state = get_state_from_digest_object(digestBase_obj, "[I");
6572 stubAddr = StubRoutines::sha256_implCompress();
6573 stubName = "sha256_implCompress";
6574 break;
6575 case vmIntrinsics::_sha5_implCompress:
6576 assert(UseSHA512Intrinsics, "need SHA512 instruction support")do { if (!(UseSHA512Intrinsics)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6576, "assert(" "UseSHA512Intrinsics" ") failed", "need SHA512 instruction support"
); ::breakpoint(); } } while (0)
;
6577 state = get_state_from_digest_object(digestBase_obj, "[J");
6578 stubAddr = StubRoutines::sha512_implCompress();
6579 stubName = "sha512_implCompress";
6580 break;
6581 case vmIntrinsics::_sha3_implCompress:
6582 assert(UseSHA3Intrinsics, "need SHA3 instruction support")do { if (!(UseSHA3Intrinsics)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6582, "assert(" "UseSHA3Intrinsics" ") failed", "need SHA3 instruction support"
); ::breakpoint(); } } while (0)
;
6583 state = get_state_from_digest_object(digestBase_obj, "[B");
6584 stubAddr = StubRoutines::sha3_implCompress();
6585 stubName = "sha3_implCompress";
6586 digest_length = get_digest_length_from_digest_object(digestBase_obj);
6587 if (digest_length == NULL__null) return false;
6588 break;
6589 default:
6590 fatal_unexpected_iid(id);
6591 return false;
6592 }
6593 if (state == NULL__null) return false;
6594
6595 assert(stubAddr != NULL, "Stub is generated")do { if (!(stubAddr != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6595, "assert(" "stubAddr != __null" ") failed", "Stub is generated"
); ::breakpoint(); } } while (0)
;
6596 if (stubAddr == NULL__null) return false;
6597
6598 // Call the stub.
6599 Node* call;
6600 if (digest_length == NULL__null) {
6601 call = make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::digestBase_implCompress_Type(false),
6602 stubAddr, stubName, TypePtr::BOTTOM,
6603 src_start, state);
6604 } else {
6605 call = make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::digestBase_implCompress_Type(true),
6606 stubAddr, stubName, TypePtr::BOTTOM,
6607 src_start, state, digest_length);
6608 }
6609
6610 return true;
6611}
6612
6613//------------------------------inline_digestBase_implCompressMB-----------------------
6614//
6615// Calculate MD5/SHA/SHA2/SHA5/SHA3 for multi-block byte[] array.
6616// int com.sun.security.provider.DigestBase.implCompressMultiBlock(byte[] b, int ofs, int limit)
6617//
6618bool LibraryCallKit::inline_digestBase_implCompressMB(int predicate) {
6619 assert(UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics,do { if (!(UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics
|| UseSHA512Intrinsics || UseSHA3Intrinsics)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6620, "assert(" "UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics"
") failed", "need MD5/SHA1/SHA256/SHA512/SHA3 instruction support"
); ::breakpoint(); } } while (0)
6620 "need MD5/SHA1/SHA256/SHA512/SHA3 instruction support")do { if (!(UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics
|| UseSHA512Intrinsics || UseSHA3Intrinsics)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6620, "assert(" "UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics"
") failed", "need MD5/SHA1/SHA256/SHA512/SHA3 instruction support"
); ::breakpoint(); } } while (0)
;
6621 assert((uint)predicate < 5, "sanity")do { if (!((uint)predicate < 5)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6621, "assert(" "(uint)predicate < 5" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
6622 assert(callee()->signature()->size() == 3, "digestBase_implCompressMB has 3 parameters")do { if (!(callee()->signature()->size() == 3)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6622, "assert(" "callee()->signature()->size() == 3" ") failed"
, "digestBase_implCompressMB has 3 parameters"); ::breakpoint
(); } } while (0)
;
6623
6624 Node* digestBase_obj = argument(0); // The receiver was checked for NULL already.
6625 Node* src = argument(1); // byte[] array
6626 Node* ofs = argument(2); // type int
6627 Node* limit = argument(3); // type int
6628
6629 const Type* src_type = src->Value(&_gvn);
6630 const TypeAryPtr* top_src = src_type->isa_aryptr();
6631 if (top_src == NULL__null || top_src->klass() == NULL__null) {
6632 // failed array check
6633 return false;
6634 }
6635 // Figure out the size and type of the elements we will be copying.
6636 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
6637 if (src_elem != T_BYTE) {
6638 return false;
6639 }
6640 // 'src_start' points to src array + offset
6641 src = must_be_not_null(src, false);
6642 Node* src_start = array_element_address(src, ofs, src_elem);
6643
6644 const char* klass_digestBase_name = NULL__null;
6645 const char* stub_name = NULL__null;
6646 address stub_addr = NULL__null;
6647 const char* state_type = "[I";
6648
6649 switch (predicate) {
6650 case 0:
6651 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_md5_implCompress)) {
6652 klass_digestBase_name = "sun/security/provider/MD5";
6653 stub_name = "md5_implCompressMB";
6654 stub_addr = StubRoutines::md5_implCompressMB();
6655 }
6656 break;
6657 case 1:
6658 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_sha_implCompress)) {
6659 klass_digestBase_name = "sun/security/provider/SHA";
6660 stub_name = "sha1_implCompressMB";
6661 stub_addr = StubRoutines::sha1_implCompressMB();
6662 }
6663 break;
6664 case 2:
6665 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_sha2_implCompress)) {
6666 klass_digestBase_name = "sun/security/provider/SHA2";
6667 stub_name = "sha256_implCompressMB";
6668 stub_addr = StubRoutines::sha256_implCompressMB();
6669 }
6670 break;
6671 case 3:
6672 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_sha5_implCompress)) {
6673 klass_digestBase_name = "sun/security/provider/SHA5";
6674 stub_name = "sha512_implCompressMB";
6675 stub_addr = StubRoutines::sha512_implCompressMB();
6676 state_type = "[J";
6677 }
6678 break;
6679 case 4:
6680 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_sha3_implCompress)) {
6681 klass_digestBase_name = "sun/security/provider/SHA3";
6682 stub_name = "sha3_implCompressMB";
6683 stub_addr = StubRoutines::sha3_implCompressMB();
6684 state_type = "[B";
6685 }
6686 break;
6687 default:
6688 fatal("unknown DigestBase intrinsic predicate: %d", predicate)do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6688, "unknown DigestBase intrinsic predicate: %d", predicate
); ::breakpoint(); } while (0)
;
6689 }
6690 if (klass_digestBase_name != NULL__null) {
6691 assert(stub_addr != NULL, "Stub is generated")do { if (!(stub_addr != __null)) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6691, "assert(" "stub_addr != __null" ") failed", "Stub is generated"
); ::breakpoint(); } } while (0)
;
6692 if (stub_addr == NULL__null) return false;
6693
6694 // get DigestBase klass to lookup for SHA klass
6695 const TypeInstPtr* tinst = _gvn.type(digestBase_obj)->isa_instptr();
6696 assert(tinst != NULL, "digestBase_obj is not instance???")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6696, "assert(" "tinst != __null" ") failed", "digestBase_obj is not instance???"
); ::breakpoint(); } } while (0)
;
6697 assert(tinst->klass()->is_loaded(), "DigestBase is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6697, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "DigestBase is not loaded"); ::breakpoint(); } } while (0)
;
6698
6699 ciKlass* klass_digestBase = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make(klass_digestBase_name));
6700 assert(klass_digestBase->is_loaded(), "predicate checks that this class is loaded")do { if (!(klass_digestBase->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6700, "assert(" "klass_digestBase->is_loaded()" ") failed"
, "predicate checks that this class is loaded"); ::breakpoint
(); } } while (0)
;
6701 ciInstanceKlass* instklass_digestBase = klass_digestBase->as_instance_klass();
6702 return inline_digestBase_implCompressMB(digestBase_obj, instklass_digestBase, state_type, stub_addr, stub_name, src_start, ofs, limit);
6703 }
6704 return false;
6705}
6706
6707//------------------------------inline_digestBase_implCompressMB-----------------------
6708bool LibraryCallKit::inline_digestBase_implCompressMB(Node* digestBase_obj, ciInstanceKlass* instklass_digestBase,
6709 const char* state_type, address stubAddr, const char *stubName,
6710 Node* src_start, Node* ofs, Node* limit) {
6711 const TypeKlassPtr* aklass = TypeKlassPtr::make(instklass_digestBase);
6712 const TypeOopPtr* xtype = aklass->as_instance_type()->cast_to_ptr_type(TypePtr::NotNull);
6713 Node* digest_obj = new CheckCastPPNode(control(), digestBase_obj, xtype);
6714 digest_obj = _gvn.transform(digest_obj);
6715
6716 Node* state = get_state_from_digest_object(digest_obj, state_type);
6717 if (state == NULL__null) return false;
6718
6719 Node* digest_length = NULL__null;
6720 if (strcmp("sha3_implCompressMB", stubName) == 0) {
6721 digest_length = get_digest_length_from_digest_object(digest_obj);
6722 if (digest_length == NULL__null) return false;
6723 }
6724
6725 // Call the stub.
6726 Node* call;
6727 if (digest_length == NULL__null) {
6728 call = make_runtime_call(RC_LEAF|RC_NO_FP,
6729 OptoRuntime::digestBase_implCompressMB_Type(false),
6730 stubAddr, stubName, TypePtr::BOTTOM,
6731 src_start, state, ofs, limit);
6732 } else {
6733 call = make_runtime_call(RC_LEAF|RC_NO_FP,
6734 OptoRuntime::digestBase_implCompressMB_Type(true),
6735 stubAddr, stubName, TypePtr::BOTTOM,
6736 src_start, state, digest_length, ofs, limit);
6737 }
6738
6739 // return ofs (int)
6740 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
6741 set_result(result);
6742
6743 return true;
6744}
6745
6746//------------------------------inline_galoisCounterMode_AESCrypt-----------------------
6747bool LibraryCallKit::inline_galoisCounterMode_AESCrypt() {
6748 assert(UseAES, "need AES instruction support")do { if (!(UseAES)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6748, "assert(" "UseAES" ") failed", "need AES instruction support"
); ::breakpoint(); } } while (0)
;
6749 address stubAddr = NULL__null;
6750 const char *stubName = NULL__null;
6751 stubAddr = StubRoutines::galoisCounterMode_AESCrypt();
6752 stubName = "galoisCounterMode_AESCrypt";
6753
6754 if (stubAddr == NULL__null) return false;
6755
6756 Node* in = argument(0);
6757 Node* inOfs = argument(1);
6758 Node* len = argument(2);
6759 Node* ct = argument(3);
6760 Node* ctOfs = argument(4);
6761 Node* out = argument(5);
6762 Node* outOfs = argument(6);
6763 Node* gctr_object = argument(7);
6764 Node* ghash_object = argument(8);
6765
6766 // (1) in, ct and out are arrays.
6767 const Type* in_type = in->Value(&_gvn);
6768 const Type* ct_type = ct->Value(&_gvn);
6769 const Type* out_type = out->Value(&_gvn);
6770 const TypeAryPtr* top_in = in_type->isa_aryptr();
6771 const TypeAryPtr* top_ct = ct_type->isa_aryptr();
6772 const TypeAryPtr* top_out = out_type->isa_aryptr();
6773 assert(top_in != NULL && top_in->klass() != NULL &&do { if (!(top_in != __null && top_in->klass() != __null
&& top_ct != __null && top_ct->klass() !=
__null && top_out != __null && top_out->klass
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6775, "assert(" "top_in != __null && top_in->klass() != __null && top_ct != __null && top_ct->klass() != __null && top_out != __null && top_out->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
6774 top_ct != NULL && top_ct->klass() != NULL &&do { if (!(top_in != __null && top_in->klass() != __null
&& top_ct != __null && top_ct->klass() !=
__null && top_out != __null && top_out->klass
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6775, "assert(" "top_in != __null && top_in->klass() != __null && top_ct != __null && top_ct->klass() != __null && top_out != __null && top_out->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
6775 top_out != NULL && top_out->klass() != NULL, "args are strange")do { if (!(top_in != __null && top_in->klass() != __null
&& top_ct != __null && top_ct->klass() !=
__null && top_out != __null && top_out->klass
() != __null)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6775, "assert(" "top_in != __null && top_in->klass() != __null && top_ct != __null && top_ct->klass() != __null && top_out != __null && top_out->klass() != __null"
") failed", "args are strange"); ::breakpoint(); } } while (
0)
;
6776
6777 // checks are the responsibility of the caller
6778 Node* in_start = in;
6779 Node* ct_start = ct;
6780 Node* out_start = out;
6781 if (inOfs != NULL__null || ctOfs != NULL__null || outOfs != NULL__null) {
6782 assert(inOfs != NULL && ctOfs != NULL && outOfs != NULL, "")do { if (!(inOfs != __null && ctOfs != __null &&
outOfs != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6782, "assert(" "inOfs != __null && ctOfs != __null && outOfs != __null"
") failed", ""); ::breakpoint(); } } while (0)
;
6783 in_start = array_element_address(in, inOfs, T_BYTE);
6784 ct_start = array_element_address(ct, ctOfs, T_BYTE);
6785 out_start = array_element_address(out, outOfs, T_BYTE);
6786 }
6787
6788 // if we are in this set of code, we "know" the embeddedCipher is an AESCrypt object
6789 // (because of the predicated logic executed earlier).
6790 // so we cast it here safely.
6791 // this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
6792 Node* embeddedCipherObj = load_field_from_object(gctr_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6793 Node* counter = load_field_from_object(gctr_object, "counter", "[B");
6794 Node* subkeyHtbl = load_field_from_object(ghash_object, "subkeyHtbl", "[J");
6795 Node* state = load_field_from_object(ghash_object, "state", "[J");
6796
6797 if (embeddedCipherObj == NULL__null || counter == NULL__null || subkeyHtbl == NULL__null || state == NULL__null) {
6798 return false;
6799 }
6800 // cast it to what we know it will be at runtime
6801 const TypeInstPtr* tinst = _gvn.type(gctr_object)->isa_instptr();
6802 assert(tinst != NULL, "GCTR obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6802, "assert(" "tinst != __null" ") failed", "GCTR obj is null"
); ::breakpoint(); } } while (0)
;
6803 assert(tinst->klass()->is_loaded(), "GCTR obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6803, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "GCTR obj is not loaded"); ::breakpoint(); } } while (0)
;
6804 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6805 assert(klass_AESCrypt->is_loaded(), "predicate checks that this class is loaded")do { if (!(klass_AESCrypt->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6805, "assert(" "klass_AESCrypt->is_loaded()" ") failed"
, "predicate checks that this class is loaded"); ::breakpoint
(); } } while (0)
;
6806 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6807 const TypeKlassPtr* aklass = TypeKlassPtr::make(instklass_AESCrypt);
6808 const TypeOopPtr* xtype = aklass->as_instance_type();
6809 Node* aescrypt_object = new CheckCastPPNode(control(), embeddedCipherObj, xtype);
6810 aescrypt_object = _gvn.transform(aescrypt_object);
6811 // we need to get the start of the aescrypt_object's expanded key array
6812 Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
6813 if (k_start == NULL__null) return false;
6814
6815 // similarly, get the start address of the r vector
6816 Node* cnt_start = array_element_address(counter, intcon(0), T_BYTE);
6817 Node* state_start = array_element_address(state, intcon(0), T_LONG);
6818 Node* subkeyHtbl_start = array_element_address(subkeyHtbl, intcon(0), T_LONG);
6819
6820 ciKlass* klass = ciTypeArrayKlass::make(T_LONG);
6821 Node* klass_node = makecon(TypeKlassPtr::make(klass));
6822
6823 // Does this target support this intrinsic?
6824 if (Matcher::htbl_entries == -1) return false;
6825
6826 Node* subkeyHtbl_48_entries_start;
6827 if (Matcher::htbl_entries != 0) {
6828 // new array to hold 48 computed htbl entries
6829 Node* subkeyHtbl_48_entries = new_array(klass_node, intcon(Matcher::htbl_entries), 0);
6830 if (subkeyHtbl_48_entries == NULL__null) return false;
6831 subkeyHtbl_48_entries_start = array_element_address(subkeyHtbl_48_entries, intcon(0), T_LONG);
6832 } else {
6833 // This target doesn't need the extra-large Htbl.
6834 subkeyHtbl_48_entries_start = ConvL2X(intcon(0))(intcon(0));
6835 }
6836
6837 // Call the stub, passing params
6838 Node* gcmCrypt = make_runtime_call(RC_LEAF|RC_NO_FP,
6839 OptoRuntime::galoisCounterMode_aescrypt_Type(),
6840 stubAddr, stubName, TypePtr::BOTTOM,
6841 in_start, len, ct_start, out_start, k_start, state_start, subkeyHtbl_start, subkeyHtbl_48_entries_start, cnt_start);
6842
6843 // return cipher length (int)
6844 Node* retvalue = _gvn.transform(new ProjNode(gcmCrypt, TypeFunc::Parms));
6845 set_result(retvalue);
6846 return true;
6847}
6848
6849//----------------------------inline_galoisCounterMode_AESCrypt_predicate----------------------------
6850// Return node representing slow path of predicate check.
6851// the pseudo code we want to emulate with this predicate is:
6852// for encryption:
6853// if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
6854// for decryption:
6855// if ((embeddedCipherObj instanceof AESCrypt) && (cipher!=plain)) do_intrinsic, else do_javapath
6856// note cipher==plain is more conservative than the original java code but that's OK
6857//
6858
6859Node* LibraryCallKit::inline_galoisCounterMode_AESCrypt_predicate() {
6860 // The receiver was checked for NULL already.
6861 Node* objGCTR = argument(7);
6862 // Load embeddedCipher field of GCTR object.
6863 Node* embeddedCipherObj = load_field_from_object(objGCTR, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
6864 assert(embeddedCipherObj != NULL, "embeddedCipherObj is null")do { if (!(embeddedCipherObj != __null)) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6864, "assert(" "embeddedCipherObj != __null" ") failed", "embeddedCipherObj is null"
); ::breakpoint(); } } while (0)
;
6865
6866 // get AESCrypt klass for instanceOf check
6867 // AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
6868 // will have same classloader as CipherBlockChaining object
6869 const TypeInstPtr* tinst = _gvn.type(objGCTR)->isa_instptr();
6870 assert(tinst != NULL, "GCTR obj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6870, "assert(" "tinst != __null" ") failed", "GCTR obj is null"
); ::breakpoint(); } } while (0)
;
6871 assert(tinst->klass()->is_loaded(), "GCTR obj is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6871, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "GCTR obj is not loaded"); ::breakpoint(); } } while (0)
;
6872
6873 // we want to do an instanceof comparison against the AESCrypt class
6874 ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
6875 if (!klass_AESCrypt->is_loaded()) {
6876 // if AESCrypt is not even loaded, we never take the intrinsic fast path
6877 Node* ctrl = control();
6878 set_control(top()); // no regular fast path
6879 return ctrl;
6880 }
6881
6882 ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
6883 Node* instof = gen_instanceof(embeddedCipherObj, makecon(TypeKlassPtr::make(instklass_AESCrypt)));
6884 Node* cmp_instof = _gvn.transform(new CmpINode(instof, intcon(1)));
6885 Node* bool_instof = _gvn.transform(new BoolNode(cmp_instof, BoolTest::ne));
6886 Node* instof_false = generate_guard(bool_instof, NULL__null, PROB_MIN(1e-6f));
6887
6888 return instof_false; // even if it is NULL
6889}
6890
6891//------------------------------get_state_from_digest_object-----------------------
6892Node * LibraryCallKit::get_state_from_digest_object(Node *digest_object, const char *state_type) {
6893 Node* digest_state = load_field_from_object(digest_object, "state", state_type);
6894 assert (digest_state != NULL, "wrong version of sun.security.provider.MD5/SHA/SHA2/SHA5/SHA3")do { if (!(digest_state != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6894, "assert(" "digest_state != __null" ") failed", "wrong version of sun.security.provider.MD5/SHA/SHA2/SHA5/SHA3"
); ::breakpoint(); } } while (0)
;
6895 if (digest_state == NULL__null) return (Node *) NULL__null;
6896
6897 // now have the array, need to get the start address of the state array
6898 Node* state = array_element_address(digest_state, intcon(0), T_INT);
6899 return state;
6900}
6901
6902//------------------------------get_digest_length_from_sha3_object----------------------------------
6903Node * LibraryCallKit::get_digest_length_from_digest_object(Node *digest_object) {
6904 Node* digest_length = load_field_from_object(digest_object, "digestLength", "I");
6905 assert (digest_length != NULL, "sanity")do { if (!(digest_length != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6905, "assert(" "digest_length != __null" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
6906 return digest_length;
6907}
6908
6909//----------------------------inline_digestBase_implCompressMB_predicate----------------------------
6910// Return node representing slow path of predicate check.
6911// the pseudo code we want to emulate with this predicate is:
6912// if (digestBaseObj instanceof MD5/SHA/SHA2/SHA5/SHA3) do_intrinsic, else do_javapath
6913//
6914Node* LibraryCallKit::inline_digestBase_implCompressMB_predicate(int predicate) {
6915 assert(UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics,do { if (!(UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics
|| UseSHA512Intrinsics || UseSHA3Intrinsics)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6916, "assert(" "UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics"
") failed", "need MD5/SHA1/SHA256/SHA512/SHA3 instruction support"
); ::breakpoint(); } } while (0)
6916 "need MD5/SHA1/SHA256/SHA512/SHA3 instruction support")do { if (!(UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics
|| UseSHA512Intrinsics || UseSHA3Intrinsics)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6916, "assert(" "UseMD5Intrinsics || UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics || UseSHA3Intrinsics"
") failed", "need MD5/SHA1/SHA256/SHA512/SHA3 instruction support"
); ::breakpoint(); } } while (0)
;
6917 assert((uint)predicate < 5, "sanity")do { if (!((uint)predicate < 5)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6917, "assert(" "(uint)predicate < 5" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
6918
6919 // The receiver was checked for NULL already.
6920 Node* digestBaseObj = argument(0);
6921
6922 // get DigestBase klass for instanceOf check
6923 const TypeInstPtr* tinst = _gvn.type(digestBaseObj)->isa_instptr();
6924 assert(tinst != NULL, "digestBaseObj is null")do { if (!(tinst != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6924, "assert(" "tinst != __null" ") failed", "digestBaseObj is null"
); ::breakpoint(); } } while (0)
;
6925 assert(tinst->klass()->is_loaded(), "DigestBase is not loaded")do { if (!(tinst->klass()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6925, "assert(" "tinst->klass()->is_loaded()" ") failed"
, "DigestBase is not loaded"); ::breakpoint(); } } while (0)
;
6926
6927 const char* klass_name = NULL__null;
6928 switch (predicate) {
6929 case 0:
6930 if (UseMD5Intrinsics) {
6931 // we want to do an instanceof comparison against the MD5 class
6932 klass_name = "sun/security/provider/MD5";
6933 }
6934 break;
6935 case 1:
6936 if (UseSHA1Intrinsics) {
6937 // we want to do an instanceof comparison against the SHA class
6938 klass_name = "sun/security/provider/SHA";
6939 }
6940 break;
6941 case 2:
6942 if (UseSHA256Intrinsics) {
6943 // we want to do an instanceof comparison against the SHA2 class
6944 klass_name = "sun/security/provider/SHA2";
6945 }
6946 break;
6947 case 3:
6948 if (UseSHA512Intrinsics) {
6949 // we want to do an instanceof comparison against the SHA5 class
6950 klass_name = "sun/security/provider/SHA5";
6951 }
6952 break;
6953 case 4:
6954 if (UseSHA3Intrinsics) {
6955 // we want to do an instanceof comparison against the SHA3 class
6956 klass_name = "sun/security/provider/SHA3";
6957 }
6958 break;
6959 default:
6960 fatal("unknown SHA intrinsic predicate: %d", predicate)do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6960, "unknown SHA intrinsic predicate: %d", predicate); ::
breakpoint(); } while (0)
;
6961 }
6962
6963 ciKlass* klass = NULL__null;
6964 if (klass_name != NULL__null) {
6965 klass = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make(klass_name));
6966 }
6967 if ((klass == NULL__null) || !klass->is_loaded()) {
6968 // if none of MD5/SHA/SHA2/SHA5 is loaded, we never take the intrinsic fast path
6969 Node* ctrl = control();
6970 set_control(top()); // no intrinsic path
6971 return ctrl;
6972 }
6973 ciInstanceKlass* instklass = klass->as_instance_klass();
6974
6975 Node* instof = gen_instanceof(digestBaseObj, makecon(TypeKlassPtr::make(instklass)));
6976 Node* cmp_instof = _gvn.transform(new CmpINode(instof, intcon(1)));
6977 Node* bool_instof = _gvn.transform(new BoolNode(cmp_instof, BoolTest::ne));
6978 Node* instof_false = generate_guard(bool_instof, NULL__null, PROB_MIN(1e-6f));
6979
6980 return instof_false; // even if it is NULL
6981}
6982
6983//-------------inline_fma-----------------------------------
6984bool LibraryCallKit::inline_fma(vmIntrinsics::ID id) {
6985 Node *a = NULL__null;
6986 Node *b = NULL__null;
6987 Node *c = NULL__null;
6988 Node* result = NULL__null;
6989 switch (id) {
6990 case vmIntrinsics::_fmaD:
6991 assert(callee()->signature()->size() == 6, "fma has 3 parameters of size 2 each.")do { if (!(callee()->signature()->size() == 6)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6991, "assert(" "callee()->signature()->size() == 6" ") failed"
, "fma has 3 parameters of size 2 each."); ::breakpoint(); } }
while (0)
;
6992 // no receiver since it is static method
6993 a = round_double_node(argument(0));
6994 b = round_double_node(argument(2));
6995 c = round_double_node(argument(4));
6996 result = _gvn.transform(new FmaDNode(control(), a, b, c));
6997 break;
6998 case vmIntrinsics::_fmaF:
6999 assert(callee()->signature()->size() == 3, "fma has 3 parameters of size 1 each.")do { if (!(callee()->signature()->size() == 3)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 6999, "assert(" "callee()->signature()->size() == 3" ") failed"
, "fma has 3 parameters of size 1 each."); ::breakpoint(); } }
while (0)
;
7000 a = argument(0);
7001 b = argument(1);
7002 c = argument(2);
7003 result = _gvn.transform(new FmaFNode(control(), a, b, c));
7004 break;
7005 default:
7006 fatal_unexpected_iid(id); break;
7007 }
7008 set_result(result);
7009 return true;
7010}
7011
7012bool LibraryCallKit::inline_character_compare(vmIntrinsics::ID id) {
7013 // argument(0) is receiver
7014 Node* codePoint = argument(1);
7015 Node* n = NULL__null;
7016
7017 switch (id) {
7018 case vmIntrinsics::_isDigit :
7019 n = new DigitNode(control(), codePoint);
7020 break;
7021 case vmIntrinsics::_isLowerCase :
7022 n = new LowerCaseNode(control(), codePoint);
7023 break;
7024 case vmIntrinsics::_isUpperCase :
7025 n = new UpperCaseNode(control(), codePoint);
7026 break;
7027 case vmIntrinsics::_isWhitespace :
7028 n = new WhitespaceNode(control(), codePoint);
7029 break;
7030 default:
7031 fatal_unexpected_iid(id);
7032 }
7033
7034 set_result(_gvn.transform(n));
7035 return true;
7036}
7037
7038//------------------------------inline_fp_min_max------------------------------
7039bool LibraryCallKit::inline_fp_min_max(vmIntrinsics::ID id) {
7040/* DISABLED BECAUSE METHOD DATA ISN'T COLLECTED PER CALL-SITE, SEE JDK-8015416.
7041
7042 // The intrinsic should be used only when the API branches aren't predictable,
7043 // the last one performing the most important comparison. The following heuristic
7044 // uses the branch statistics to eventually bail out if necessary.
7045
7046 ciMethodData *md = callee()->method_data();
7047
7048 if ( md != NULL && md->is_mature() && md->invocation_count() > 0 ) {
7049 ciCallProfile cp = caller()->call_profile_at_bci(bci());
7050
7051 if ( ((double)cp.count()) / ((double)md->invocation_count()) < 0.8 ) {
7052 // Bail out if the call-site didn't contribute enough to the statistics.
7053 return false;
7054 }
7055
7056 uint taken = 0, not_taken = 0;
7057
7058 for (ciProfileData *p = md->first_data(); md->is_valid(p); p = md->next_data(p)) {
7059 if (p->is_BranchData()) {
7060 taken = ((ciBranchData*)p)->taken();
7061 not_taken = ((ciBranchData*)p)->not_taken();
7062 }
7063 }
7064
7065 double balance = (((double)taken) - ((double)not_taken)) / ((double)md->invocation_count());
7066 balance = balance < 0 ? -balance : balance;
7067 if ( balance > 0.2 ) {
7068 // Bail out if the most important branch is predictable enough.
7069 return false;
7070 }
7071 }
7072*/
7073
7074 Node *a = NULL__null;
7075 Node *b = NULL__null;
7076 Node *n = NULL__null;
7077 switch (id) {
7078 case vmIntrinsics::_maxF:
7079 case vmIntrinsics::_minF:
7080 case vmIntrinsics::_maxF_strict:
7081 case vmIntrinsics::_minF_strict:
7082 assert(callee()->signature()->size() == 2, "minF/maxF has 2 parameters of size 1 each.")do { if (!(callee()->signature()->size() == 2)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7082, "assert(" "callee()->signature()->size() == 2" ") failed"
, "minF/maxF has 2 parameters of size 1 each."); ::breakpoint
(); } } while (0)
;
7083 a = argument(0);
7084 b = argument(1);
7085 break;
7086 case vmIntrinsics::_maxD:
7087 case vmIntrinsics::_minD:
7088 case vmIntrinsics::_maxD_strict:
7089 case vmIntrinsics::_minD_strict:
7090 assert(callee()->signature()->size() == 4, "minD/maxD has 2 parameters of size 2 each.")do { if (!(callee()->signature()->size() == 4)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7090, "assert(" "callee()->signature()->size() == 4" ") failed"
, "minD/maxD has 2 parameters of size 2 each."); ::breakpoint
(); } } while (0)
;
7091 a = round_double_node(argument(0));
7092 b = round_double_node(argument(2));
7093 break;
7094 default:
7095 fatal_unexpected_iid(id);
7096 break;
7097 }
7098 switch (id) {
7099 case vmIntrinsics::_maxF:
7100 case vmIntrinsics::_maxF_strict:
7101 n = new MaxFNode(a, b);
7102 break;
7103 case vmIntrinsics::_minF:
7104 case vmIntrinsics::_minF_strict:
7105 n = new MinFNode(a, b);
7106 break;
7107 case vmIntrinsics::_maxD:
7108 case vmIntrinsics::_maxD_strict:
7109 n = new MaxDNode(a, b);
7110 break;
7111 case vmIntrinsics::_minD:
7112 case vmIntrinsics::_minD_strict:
7113 n = new MinDNode(a, b);
7114 break;
7115 default:
7116 fatal_unexpected_iid(id);
7117 break;
7118 }
7119 set_result(_gvn.transform(n));
7120 return true;
7121}
7122
7123bool LibraryCallKit::inline_profileBoolean() {
7124 Node* counts = argument(1);
7125 const TypeAryPtr* ary = NULL__null;
7126 ciArray* aobj = NULL__null;
7127 if (counts->is_Con()
7128 && (ary = counts->bottom_type()->isa_aryptr()) != NULL__null
7129 && (aobj = ary->const_oop()->as_array()) != NULL__null
7130 && (aobj->length() == 2)) {
7131 // Profile is int[2] where [0] and [1] correspond to false and true value occurrences respectively.
7132 jint false_cnt = aobj->element_value(0).as_int();
7133 jint true_cnt = aobj->element_value(1).as_int();
7134
7135 if (C->log() != NULL__null) {
7136 C->log()->elem("observe source='profileBoolean' false='%d' true='%d'",
7137 false_cnt, true_cnt);
7138 }
7139
7140 if (false_cnt + true_cnt == 0) {
7141 // According to profile, never executed.
7142 uncommon_trap_exact(Deoptimization::Reason_intrinsic,
7143 Deoptimization::Action_reinterpret);
7144 return true;
7145 }
7146
7147 // result is a boolean (0 or 1) and its profile (false_cnt & true_cnt)
7148 // is a number of each value occurrences.
7149 Node* result = argument(0);
7150 if (false_cnt == 0 || true_cnt == 0) {
7151 // According to profile, one value has been never seen.
7152 int expected_val = (false_cnt == 0) ? 1 : 0;
7153
7154 Node* cmp = _gvn.transform(new CmpINode(result, intcon(expected_val)));
7155 Node* test = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
7156
7157 IfNode* check = create_and_map_if(control(), test, PROB_ALWAYS(1.0f-(1e-6f)), COUNT_UNKNOWN(-1.0f));
7158 Node* fast_path = _gvn.transform(new IfTrueNode(check));
7159 Node* slow_path = _gvn.transform(new IfFalseNode(check));
7160
7161 { // Slow path: uncommon trap for never seen value and then reexecute
7162 // MethodHandleImpl::profileBoolean() to bump the count, so JIT knows
7163 // the value has been seen at least once.
7164 PreserveJVMState pjvms(this);
7165 PreserveReexecuteState preexecs(this);
7166 jvms()->set_should_reexecute(true);
7167
7168 set_control(slow_path);
7169 set_i_o(i_o());
7170
7171 uncommon_trap_exact(Deoptimization::Reason_intrinsic,
7172 Deoptimization::Action_reinterpret);
7173 }
7174 // The guard for never seen value enables sharpening of the result and
7175 // returning a constant. It allows to eliminate branches on the same value
7176 // later on.
7177 set_control(fast_path);
7178 result = intcon(expected_val);
7179 }
7180 // Stop profiling.
7181 // MethodHandleImpl::profileBoolean() has profiling logic in its bytecode.
7182 // By replacing method body with profile data (represented as ProfileBooleanNode
7183 // on IR level) we effectively disable profiling.
7184 // It enables full speed execution once optimized code is generated.
7185 Node* profile = _gvn.transform(new ProfileBooleanNode(result, false_cnt, true_cnt));
7186 C->record_for_igvn(profile);
7187 set_result(profile);
7188 return true;
7189 } else {
7190 // Continue profiling.
7191 // Profile data isn't available at the moment. So, execute method's bytecode version.
7192 // Usually, when GWT LambdaForms are profiled it means that a stand-alone nmethod
7193 // is compiled and counters aren't available since corresponding MethodHandle
7194 // isn't a compile-time constant.
7195 return false;
7196 }
7197}
7198
7199bool LibraryCallKit::inline_isCompileConstant() {
7200 Node* n = argument(0);
7201 set_result(n->is_Con() ? intcon(1) : intcon(0));
7202 return true;
7203}
7204
7205//------------------------------- inline_getObjectSize --------------------------------------
7206//
7207// Calculate the runtime size of the object/array.
7208// native long sun.instrument.InstrumentationImpl.getObjectSize0(long nativeAgent, Object objectToSize);
7209//
7210bool LibraryCallKit::inline_getObjectSize() {
7211 Node* obj = argument(3);
7212 Node* klass_node = load_object_klass(obj);
7213
7214 jint layout_con = Klass::_lh_neutral_value;
7215 Node* layout_val = get_layout_helper(klass_node, layout_con);
7216 int layout_is_con = (layout_val == NULL__null);
7217
7218 if (layout_is_con) {
7219 // Layout helper is constant, can figure out things at compile time.
7220
7221 if (Klass::layout_helper_is_instance(layout_con)) {
7222 // Instance case: layout_con contains the size itself.
7223 Node *size = longcon(Klass::layout_helper_size_in_bytes(layout_con));
7224 set_result(size);
7225 } else {
7226 // Array case: size is round(header + element_size*arraylength).
7227 // Since arraylength is different for every array instance, we have to
7228 // compute the whole thing at runtime.
7229
7230 Node* arr_length = load_array_length(obj);
7231
7232 int round_mask = MinObjAlignmentInBytes - 1;
7233 int hsize = Klass::layout_helper_header_size(layout_con);
7234 int eshift = Klass::layout_helper_log2_element_size(layout_con);
7235
7236 if ((round_mask & ~right_n_bits(eshift)((((eshift) >= BitsPerWord) ? 0 : (OneBit << (eshift
))) - 1)
) == 0) {
7237 round_mask = 0; // strength-reduce it if it goes away completely
7238 }
7239 assert((hsize & right_n_bits(eshift)) == 0, "hsize is pre-rounded")do { if (!((hsize & ((((eshift) >= BitsPerWord) ? 0 : (
OneBit << (eshift))) - 1)) == 0)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7239, "assert(" "(hsize & ((((eshift) >= BitsPerWord) ? 0 : (OneBit << (eshift))) - 1)) == 0"
") failed", "hsize is pre-rounded"); ::breakpoint(); } } while
(0)
;
7240 Node* header_size = intcon(hsize + round_mask);
7241
7242 Node* lengthx = ConvI2X(arr_length)ConvI2L(arr_length);
7243 Node* headerx = ConvI2X(header_size)ConvI2L(header_size);
7244
7245 Node* abody = lengthx;
7246 if (eshift != 0) {
7247 abody = _gvn.transform(new LShiftXNodeLShiftLNode(lengthx, intcon(eshift)));
7248 }
7249 Node* size = _gvn.transform( new AddXNodeAddLNode(headerx, abody) );
7250 if (round_mask != 0) {
7251 size = _gvn.transform( new AndXNodeAndLNode(size, MakeConXlongcon(~round_mask)) );
7252 }
7253 size = ConvX2L(size)(size);
7254 set_result(size);
7255 }
7256 } else {
7257 // Layout helper is not constant, need to test for array-ness at runtime.
7258
7259 enum { _instance_path = 1, _array_path, PATH_LIMIT };
7260 RegionNode* result_reg = new RegionNode(PATH_LIMIT);
7261 PhiNode* result_val = new PhiNode(result_reg, TypeLong::LONG);
7262 record_for_igvn(result_reg);
7263
7264 Node* array_ctl = generate_array_guard(klass_node, NULL__null);
7265 if (array_ctl != NULL__null) {
7266 // Array case: size is round(header + element_size*arraylength).
7267 // Since arraylength is different for every array instance, we have to
7268 // compute the whole thing at runtime.
7269
7270 PreserveJVMState pjvms(this);
7271 set_control(array_ctl);
7272 Node* arr_length = load_array_length(obj);
7273
7274 int round_mask = MinObjAlignmentInBytes - 1;
7275 Node* mask = intcon(round_mask);
7276
7277 Node* hss = intcon(Klass::_lh_header_size_shift);
7278 Node* hsm = intcon(Klass::_lh_header_size_mask);
7279 Node* header_size = _gvn.transform(new URShiftINode(layout_val, hss));
7280 header_size = _gvn.transform(new AndINode(header_size, hsm));
7281 header_size = _gvn.transform(new AddINode(header_size, mask));
7282
7283 // There is no need to mask or shift this value.
7284 // The semantics of LShiftINode include an implicit mask to 0x1F.
7285 assert(Klass::_lh_log2_element_size_shift == 0, "use shift in place")do { if (!(Klass::_lh_log2_element_size_shift == 0)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7285, "assert(" "Klass::_lh_log2_element_size_shift == 0" ") failed"
, "use shift in place"); ::breakpoint(); } } while (0)
;
7286 Node* elem_shift = layout_val;
7287
7288 Node* lengthx = ConvI2X(arr_length)ConvI2L(arr_length);
7289 Node* headerx = ConvI2X(header_size)ConvI2L(header_size);
7290
7291 Node* abody = _gvn.transform(new LShiftXNodeLShiftLNode(lengthx, elem_shift));
7292 Node* size = _gvn.transform(new AddXNodeAddLNode(headerx, abody));
7293 if (round_mask != 0) {
7294 size = _gvn.transform(new AndXNodeAndLNode(size, MakeConXlongcon(~round_mask)));
7295 }
7296 size = ConvX2L(size)(size);
7297
7298 result_reg->init_req(_array_path, control());
7299 result_val->init_req(_array_path, size);
7300 }
7301
7302 if (!stopped()) {
7303 // Instance case: the layout helper gives us instance size almost directly,
7304 // but we need to mask out the _lh_instance_slow_path_bit.
7305 Node* size = ConvI2X(layout_val)ConvI2L(layout_val);
7306 assert((int) Klass::_lh_instance_slow_path_bit < BytesPerLong, "clear bit")do { if (!((int) Klass::_lh_instance_slow_path_bit < BytesPerLong
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7306, "assert(" "(int) Klass::_lh_instance_slow_path_bit < BytesPerLong"
") failed", "clear bit"); ::breakpoint(); } } while (0)
;
7307 Node* mask = MakeConXlongcon(~(intptr_t) right_n_bits(LogBytesPerLong)((((LogBytesPerLong) >= BitsPerWord) ? 0 : (OneBit <<
(LogBytesPerLong))) - 1)
);
7308 size = _gvn.transform(new AndXNodeAndLNode(size, mask));
7309 size = ConvX2L(size)(size);
7310
7311 result_reg->init_req(_instance_path, control());
7312 result_val->init_req(_instance_path, size);
7313 }
7314
7315 set_result(result_reg, result_val);
7316 }
7317
7318 return true;
7319}
7320
7321//------------------------------- inline_blackhole --------------------------------------
7322//
7323// Make sure all arguments to this node are alive.
7324// This matches methods that were requested to be blackholed through compile commands.
7325//
7326bool LibraryCallKit::inline_blackhole() {
7327 assert(callee()->is_static(), "Should have been checked before: only static methods here")do { if (!(callee()->is_static())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7327, "assert(" "callee()->is_static()" ") failed", "Should have been checked before: only static methods here"
); ::breakpoint(); } } while (0)
;
7328 assert(callee()->is_empty(), "Should have been checked before: only empty methods here")do { if (!(callee()->is_empty())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7328, "assert(" "callee()->is_empty()" ") failed", "Should have been checked before: only empty methods here"
); ::breakpoint(); } } while (0)
;
7329 assert(callee()->holder()->is_loaded(), "Should have been checked before: only methods for loaded classes here")do { if (!(callee()->holder()->is_loaded())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/opto/library_call.cpp"
, 7329, "assert(" "callee()->holder()->is_loaded()" ") failed"
, "Should have been checked before: only methods for loaded classes here"
); ::breakpoint(); } } while (0)
;
7330
7331 // Bind call arguments as blackhole arguments to keep them alive
7332 Node* bh = insert_mem_bar(Op_Blackhole);
7333 uint nargs = callee()->arg_size();
7334 for (uint i = 0; i < nargs; i++) {
7335 bh->add_req(argument(i));
7336 }
7337
7338 return true;
7339}