Bug Summary

File:jdk/src/hotspot/share/runtime/atomic.hpp
Warning:line 499, column 32
Dereference of null pointer (loaded from variable 'dest')

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name instanceKlass.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 2 -mthread-model posix -fno-delete-null-pointer-checks -mframe-pointer=all -relaxed-aliasing -fmath-errno -fno-rounding-math -masm-verbose -mconstructor-aliases -munwind-tables -target-cpu x86-64 -dwarf-column-info -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib/llvm-10/lib/clang/10.0.0 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/libjvm/objs/precompiled -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -D __STDC_CONSTANT_MACROS -D _GNU_SOURCE -D _REENTRANT -D LIBC=gnu -D LINUX -D VM_LITTLE_ENDIAN -D _LP64=1 -D ASSERT -D CHECK_UNHANDLED_OOPS -D TARGET_ARCH_x86 -D INCLUDE_SUFFIX_OS=_linux -D INCLUDE_SUFFIX_CPU=_x86 -D INCLUDE_SUFFIX_COMPILER=_gcc -D TARGET_COMPILER_gcc -D AMD64 -D HOTSPOT_LIB_ARCH="amd64" -D COMPILER1 -D COMPILER2 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -I /home/daniel/Projects/java/jdk/src/hotspot/share/precompiled -I /home/daniel/Projects/java/jdk/src/hotspot/share/include -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix/include -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base/linux -I /home/daniel/Projects/java/jdk/src/java.base/share/native/libjimage -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc/adfiles -I /home/daniel/Projects/java/jdk/src/hotspot/share -I /home/daniel/Projects/java/jdk/src/hotspot/os/linux -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix -I /home/daniel/Projects/java/jdk/src/hotspot/cpu/x86 -I /home/daniel/Projects/java/jdk/src/hotspot/os_cpu/linux_x86 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/hotspot/variant-server/gensrc -D _FORTIFY_SOURCE=2 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-10/lib/clang/10.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -Wno-format-zero-length -Wno-unused-parameter -Wno-unused -Wno-parentheses -Wno-comment -Wno-unknown-pragmas -Wno-address -Wno-delete-non-virtual-dtor -Wno-char-subscripts -Wno-array-bounds -Wno-int-in-bool-context -Wno-ignored-qualifiers -Wno-missing-field-initializers -Wno-implicit-fallthrough -Wno-empty-body -Wno-strict-overflow -Wno-sequence-point -Wno-maybe-uninitialized -Wno-misleading-indentation -Wno-cast-function-type -Wno-shift-negative-value -std=c++14 -fdeprecated-macro -fdebug-compilation-dir /home/daniel/Projects/java/jdk/make/hotspot -ferror-limit 19 -fmessage-length 0 -fvisibility hidden -stack-protector 1 -fno-rtti -fgnuc-version=4.2.1 -fobjc-runtime=gcc -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -o /home/daniel/Projects/java/scan/2021-12-21-193737-8510-1 -x c++ /home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp

/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp

1/*
2 * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "jvm.h"
27#include "cds/archiveUtils.hpp"
28#include "cds/classListWriter.hpp"
29#include "cds/metaspaceShared.hpp"
30#include "classfile/classFileParser.hpp"
31#include "classfile/classFileStream.hpp"
32#include "classfile/classLoader.hpp"
33#include "classfile/classLoaderData.inline.hpp"
34#include "classfile/javaClasses.hpp"
35#include "classfile/moduleEntry.hpp"
36#include "classfile/resolutionErrors.hpp"
37#include "classfile/symbolTable.hpp"
38#include "classfile/systemDictionary.hpp"
39#include "classfile/systemDictionaryShared.hpp"
40#include "classfile/verifier.hpp"
41#include "classfile/vmClasses.hpp"
42#include "classfile/vmSymbols.hpp"
43#include "code/codeCache.hpp"
44#include "code/dependencyContext.hpp"
45#include "compiler/compilationPolicy.hpp"
46#include "compiler/compileBroker.hpp"
47#include "gc/shared/collectedHeap.inline.hpp"
48#include "interpreter/oopMapCache.hpp"
49#include "interpreter/rewriter.hpp"
50#include "jvmtifiles/jvmti.h"
51#include "logging/log.hpp"
52#include "logging/logMessage.hpp"
53#include "logging/logStream.hpp"
54#include "memory/allocation.inline.hpp"
55#include "memory/iterator.inline.hpp"
56#include "memory/metadataFactory.hpp"
57#include "memory/metaspaceClosure.hpp"
58#include "memory/oopFactory.hpp"
59#include "memory/resourceArea.hpp"
60#include "memory/universe.hpp"
61#include "oops/fieldStreams.inline.hpp"
62#include "oops/constantPool.hpp"
63#include "oops/instanceClassLoaderKlass.hpp"
64#include "oops/instanceKlass.inline.hpp"
65#include "oops/instanceMirrorKlass.hpp"
66#include "oops/instanceOop.hpp"
67#include "oops/klass.inline.hpp"
68#include "oops/method.hpp"
69#include "oops/oop.inline.hpp"
70#include "oops/recordComponent.hpp"
71#include "oops/symbol.hpp"
72#include "prims/jvmtiExport.hpp"
73#include "prims/jvmtiRedefineClasses.hpp"
74#include "prims/jvmtiThreadState.hpp"
75#include "prims/methodComparator.hpp"
76#include "runtime/arguments.hpp"
77#include "runtime/atomic.hpp"
78#include "runtime/fieldDescriptor.inline.hpp"
79#include "runtime/handles.inline.hpp"
80#include "runtime/javaCalls.hpp"
81#include "runtime/mutexLocker.hpp"
82#include "runtime/orderAccess.hpp"
83#include "runtime/reflectionUtils.hpp"
84#include "runtime/thread.inline.hpp"
85#include "services/classLoadingService.hpp"
86#include "services/finalizerService.hpp"
87#include "services/threadService.hpp"
88#include "utilities/dtrace.hpp"
89#include "utilities/events.hpp"
90#include "utilities/macros.hpp"
91#include "utilities/stringUtils.hpp"
92#include "utilities/pair.hpp"
93#ifdef COMPILER11
94#include "c1/c1_Compiler.hpp"
95#endif
96#if INCLUDE_JFR1
97#include "jfr/jfrEvents.hpp"
98#endif
99
100#ifdef DTRACE_ENABLED
101
102
103#define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
104#define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
105#define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
106#define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
107#define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
108#define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
109#define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
110#define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
111#define DTRACE_CLASSINIT_PROBE(type, thread_type) \
112 { \
113 char* data = NULL__null; \
114 int len = 0; \
115 Symbol* clss_name = name(); \
116 if (clss_name != NULL__null) { \
117 data = (char*)clss_name->bytes(); \
118 len = clss_name->utf8_length(); \
119 } \
120 HOTSPOT_CLASS_INITIALIZATION_##type( \
121 data, len, (void*)class_loader(), thread_type); \
122 }
123
124#define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
125 { \
126 char* data = NULL__null; \
127 int len = 0; \
128 Symbol* clss_name = name(); \
129 if (clss_name != NULL__null) { \
130 data = (char*)clss_name->bytes(); \
131 len = clss_name->utf8_length(); \
132 } \
133 HOTSPOT_CLASS_INITIALIZATION_##type( \
134 data, len, (void*)class_loader(), thread_type, wait); \
135 }
136
137#else // ndef DTRACE_ENABLED
138
139#define DTRACE_CLASSINIT_PROBE(type, thread_type)
140#define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
141
142#endif // ndef DTRACE_ENABLED
143
144bool InstanceKlass::_finalization_enabled = true;
145
146static inline bool is_class_loader(const Symbol* class_name,
147 const ClassFileParser& parser) {
148 assert(class_name != NULL, "invariant")do { if (!(class_name != __null)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 148, "assert(" "class_name != __null" ") failed", "invariant"
); ::breakpoint(); } } while (0)
;
149
150 if (class_name == vmSymbols::java_lang_ClassLoader()) {
151 return true;
152 }
153
154 if (vmClasses::ClassLoader_klass_loaded()) {
155 const Klass* const super_klass = parser.super_klass();
156 if (super_klass != NULL__null) {
157 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
158 return true;
159 }
160 }
161 }
162 return false;
163}
164
165// private: called to verify that k is a static member of this nest.
166// We know that k is an instance class in the same package and hence the
167// same classloader.
168bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
169 assert(!is_hidden(), "unexpected hidden class")do { if (!(!is_hidden())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 169, "assert(" "!is_hidden()" ") failed", "unexpected hidden class"
); ::breakpoint(); } } while (0)
;
170 if (_nest_members == NULL__null || _nest_members == Universe::the_empty_short_array()) {
171 if (log_is_enabled(Trace, class, nestmates)(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))
) {
172 ResourceMark rm(current);
173 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Checked nest membership of %s in non-nest-host class %s",
174 k->external_name(), this->external_name());
175 }
176 return false;
177 }
178
179 if (log_is_enabled(Trace, class, nestmates)(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))
) {
180 ResourceMark rm(current);
181 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Checking nest membership of %s in %s",
182 k->external_name(), this->external_name());
183 }
184
185 // Check for the named class in _nest_members.
186 // We don't resolve, or load, any classes.
187 for (int i = 0; i < _nest_members->length(); i++) {
188 int cp_index = _nest_members->at(i);
189 Symbol* name = _constants->klass_name_at(cp_index);
190 if (name == k->name()) {
191 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
192 return true;
193 }
194 }
195 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("- class is NOT a nest member!");
196 return false;
197}
198
199// Called to verify that k is a permitted subclass of this class
200bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k) const {
201 Thread* current = Thread::current();
202 assert(k != NULL, "sanity check")do { if (!(k != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 202, "assert(" "k != __null" ") failed", "sanity check"); ::
breakpoint(); } } while (0)
;
203 assert(_permitted_subclasses != NULL && _permitted_subclasses != Universe::the_empty_short_array(),do { if (!(_permitted_subclasses != __null && _permitted_subclasses
!= Universe::the_empty_short_array())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 204, "assert(" "_permitted_subclasses != __null && _permitted_subclasses != Universe::the_empty_short_array()"
") failed", "unexpected empty _permitted_subclasses array");
::breakpoint(); } } while (0)
204 "unexpected empty _permitted_subclasses array")do { if (!(_permitted_subclasses != __null && _permitted_subclasses
!= Universe::the_empty_short_array())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 204, "assert(" "_permitted_subclasses != __null && _permitted_subclasses != Universe::the_empty_short_array()"
") failed", "unexpected empty _permitted_subclasses array");
::breakpoint(); } } while (0)
;
205
206 if (log_is_enabled(Trace, class, sealed)(LogImpl<(LogTag::_class), (LogTag::_sealed), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))
) {
207 ResourceMark rm(current);
208 log_trace(class, sealed)(!(LogImpl<(LogTag::_class), (LogTag::_sealed), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_sealed), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("Checking for permitted subclass of %s in %s",
209 k->external_name(), this->external_name());
210 }
211
212 // Check that the class and its super are in the same module.
213 if (k->module() != this->module()) {
214 ResourceMark rm(current);
215 log_trace(class, sealed)(!(LogImpl<(LogTag::_class), (LogTag::_sealed), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_sealed), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("Check failed for same module of permitted subclass %s and sealed class %s",
216 k->external_name(), this->external_name());
217 return false;
218 }
219
220 if (!k->is_public() && !is_same_class_package(k)) {
221 ResourceMark rm(current);
222 log_trace(class, sealed)(!(LogImpl<(LogTag::_class), (LogTag::_sealed), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_sealed), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("Check failed, subclass %s not public and not in the same package as sealed class %s",
223 k->external_name(), this->external_name());
224 return false;
225 }
226
227 for (int i = 0; i < _permitted_subclasses->length(); i++) {
228 int cp_index = _permitted_subclasses->at(i);
229 Symbol* name = _constants->klass_name_at(cp_index);
230 if (name == k->name()) {
231 log_trace(class, sealed)(!(LogImpl<(LogTag::_class), (LogTag::_sealed), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_sealed), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
232 return true;
233 }
234 }
235 log_trace(class, sealed)(!(LogImpl<(LogTag::_class), (LogTag::_sealed), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_sealed), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("- class is NOT a permitted subclass!");
236 return false;
237}
238
239// Return nest-host class, resolving, validating and saving it if needed.
240// In cases where this is called from a thread that cannot do classloading
241// (such as a native JIT thread) then we simply return NULL, which in turn
242// causes the access check to return false. Such code will retry the access
243// from a more suitable environment later. Otherwise the _nest_host is always
244// set once this method returns.
245// Any errors from nest-host resolution must be preserved so they can be queried
246// from higher-level access checking code, and reported as part of access checking
247// exceptions.
248// VirtualMachineErrors are propagated with a NULL return.
249// Under any conditions where the _nest_host can be set to non-NULL the resulting
250// value of it and, if applicable, the nest host resolution/validation error,
251// are idempotent.
252InstanceKlass* InstanceKlass::nest_host(TRAPSJavaThread* __the_thread__) {
253 InstanceKlass* nest_host_k = _nest_host;
254 if (nest_host_k != NULL__null) {
255 return nest_host_k;
256 }
257
258 ResourceMark rm(THREAD__the_thread__);
259
260 // need to resolve and save our nest-host class.
261 if (_nest_host_index != 0) { // we have a real nest_host
262 // Before trying to resolve check if we're in a suitable context
263 bool can_resolve = THREAD__the_thread__->can_call_java();
264 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
265 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Rejected resolution of nest-host of %s in unsuitable thread",
266 this->external_name());
267 return NULL__null; // sentinel to say "try again from a different context"
268 }
269
270 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Resolving nest-host of %s using cp entry for %s",
271 this->external_name(),
272 _constants->klass_name_at(_nest_host_index)->as_C_string());
273
274 Klass* k = _constants->klass_at(_nest_host_index, THREAD__the_thread__);
275 if (HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception())) {
276 if (PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->pending_exception())->is_a(vmClasses::VirtualMachineError_klass())) {
277 return NULL__null; // propagate VMEs
278 }
279 stringStream ss;
280 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
281 ss.print("Nest host resolution of %s with host %s failed: ",
282 this->external_name(), target_host_class);
283 java_lang_Throwable::print(PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->pending_exception()), &ss);
284 const char* msg = ss.as_string(true /* on C-heap */);
285 constantPoolHandle cph(THREAD__the_thread__, constants());
286 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
287 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
288
289 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("%s", msg);
290 } else {
291 // A valid nest-host is an instance class in the current package that lists this
292 // class as a nest member. If any of these conditions are not met the class is
293 // its own nest-host.
294 const char* error = NULL__null;
295
296 // JVMS 5.4.4 indicates package check comes first
297 if (is_same_class_package(k)) {
298 // Now check actual membership. We can't be a member if our "host" is
299 // not an instance class.
300 if (k->is_instance_klass()) {
301 nest_host_k = InstanceKlass::cast(k);
302 bool is_member = nest_host_k->has_nest_member(THREAD__the_thread__, this);
303 if (is_member) {
304 _nest_host = nest_host_k; // save resolved nest-host value
305
306 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Resolved nest-host of %s to %s",
307 this->external_name(), k->external_name());
308 return nest_host_k;
309 } else {
310 error = "current type is not listed as a nest member";
311 }
312 } else {
313 error = "host is not an instance class";
314 }
315 } else {
316 error = "types are in different packages";
317 }
318
319 // something went wrong, so record what and log it
320 {
321 stringStream ss;
322 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
323 this->external_name(),
324 this->class_loader_data()->loader_name_and_id(),
325 k->external_name(),
326 k->class_loader_data()->loader_name_and_id(),
327 error);
328 const char* msg = ss.as_string(true /* on C-heap */);
329 constantPoolHandle cph(THREAD__the_thread__, constants());
330 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
331 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("%s", msg);
332 }
333 }
334 } else {
335 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Type %s is not part of a nest: setting nest-host to self",
336 this->external_name());
337 }
338
339 // Either not in an explicit nest, or else an error occurred, so
340 // the nest-host is set to `this`. Any thread that sees this assignment
341 // will also see any setting of nest_host_error(), if applicable.
342 return (_nest_host = this);
343}
344
345// Dynamic nest member support: set this class's nest host to the given class.
346// This occurs as part of the class definition, as soon as the instanceKlass
347// has been created and doesn't require further resolution. The code:
348// lookup().defineHiddenClass(bytes_for_X, NESTMATE);
349// results in:
350// class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
351// If it has an explicit _nest_host_index or _nest_members, these will be ignored.
352// We also know the "host" is a valid nest-host in the same package so we can
353// assert some of those facts.
354void InstanceKlass::set_nest_host(InstanceKlass* host) {
355 assert(is_hidden(), "must be a hidden class")do { if (!(is_hidden())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 355, "assert(" "is_hidden()" ") failed", "must be a hidden class"
); ::breakpoint(); } } while (0)
;
356 assert(host != NULL, "NULL nest host specified")do { if (!(host != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 356, "assert(" "host != __null" ") failed", "NULL nest host specified"
); ::breakpoint(); } } while (0)
;
357 assert(_nest_host == NULL, "current class has resolved nest-host")do { if (!(_nest_host == __null)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 357, "assert(" "_nest_host == __null" ") failed", "current class has resolved nest-host"
); ::breakpoint(); } } while (0)
;
358 assert(nest_host_error() == NULL, "unexpected nest host resolution error exists: %s",do { if (!(nest_host_error() == __null)) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 359, "assert(" "nest_host_error() == __null" ") failed", "unexpected nest host resolution error exists: %s"
, nest_host_error()); ::breakpoint(); } } while (0)
359 nest_host_error())do { if (!(nest_host_error() == __null)) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 359, "assert(" "nest_host_error() == __null" ") failed", "unexpected nest host resolution error exists: %s"
, nest_host_error()); ::breakpoint(); } } while (0)
;
360 assert((host->_nest_host == NULL && host->_nest_host_index == 0) ||do { if (!((host->_nest_host == __null && host->
_nest_host_index == 0) || (host->_nest_host == host))) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 361, "assert(" "(host->_nest_host == __null && host->_nest_host_index == 0) || (host->_nest_host == host)"
") failed", "proposed host is not a valid nest-host"); ::breakpoint
(); } } while (0)
361 (host->_nest_host == host), "proposed host is not a valid nest-host")do { if (!((host->_nest_host == __null && host->
_nest_host_index == 0) || (host->_nest_host == host))) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 361, "assert(" "(host->_nest_host == __null && host->_nest_host_index == 0) || (host->_nest_host == host)"
") failed", "proposed host is not a valid nest-host"); ::breakpoint
(); } } while (0)
;
362 // Can't assert this as package is not set yet:
363 // assert(is_same_class_package(host), "proposed host is in wrong package");
364
365 if (log_is_enabled(Trace, class, nestmates)(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))
) {
366 ResourceMark rm;
367 const char* msg = "";
368 // a hidden class does not expect a statically defined nest-host
369 if (_nest_host_index > 0) {
370 msg = "(the NestHost attribute in the current class is ignored)";
371 } else if (_nest_members != NULL__null && _nest_members != Universe::the_empty_short_array()) {
372 msg = "(the NestMembers attribute in the current class is ignored)";
373 }
374 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Injected type %s into the nest of %s %s",
375 this->external_name(),
376 host->external_name(),
377 msg);
378 }
379 // set dynamic nest host
380 _nest_host = host;
381 // Record dependency to keep nest host from being unloaded before this class.
382 ClassLoaderData* this_key = class_loader_data();
383 this_key->record_dependency(host);
384}
385
386// check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
387// or we are k's nest_host - all of which is covered by comparing the two
388// resolved_nest_hosts.
389// Any exceptions (i.e. VMEs) are propagated.
390bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPSJavaThread* __the_thread__) {
391
392 assert(this != k, "this should be handled by higher-level code")do { if (!(this != k)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 392, "assert(" "this != k" ") failed", "this should be handled by higher-level code"
); ::breakpoint(); } } while (0)
;
393
394 // Per JVMS 5.4.4 we first resolve and validate the current class, then
395 // the target class k.
396
397 InstanceKlass* cur_host = nest_host(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
398 if (cur_host == NULL__null) {
399 return false;
400 }
401
402 Klass* k_nest_host = k->nest_host(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
403 if (k_nest_host == NULL__null) {
404 return false;
405 }
406
407 bool access = (cur_host == k_nest_host);
408
409 ResourceMark rm(THREAD__the_thread__);
410 log_trace(class, nestmates)(!(LogImpl<(LogTag::_class), (LogTag::_nestmates), (LogTag
::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl
<(LogTag::_class), (LogTag::_nestmates), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::write<LogLevel::Trace>
("Class %s does %shave nestmate access to %s",
411 this->external_name(),
412 access ? "" : "NOT ",
413 k->external_name());
414 return access;
415}
416
417const char* InstanceKlass::nest_host_error() {
418 if (_nest_host_index == 0) {
419 return NULL__null;
420 } else {
421 constantPoolHandle cph(Thread::current(), constants());
422 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
423 }
424}
425
426InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPSJavaThread* __the_thread__) {
427 const int size = InstanceKlass::size(parser.vtable_size(),
428 parser.itable_size(),
429 nonstatic_oop_map_size(parser.total_oop_map_count()),
430 parser.is_interface());
431
432 const Symbol* const class_name = parser.class_name();
433 assert(class_name != NULL, "invariant")do { if (!(class_name != __null)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 433, "assert(" "class_name != __null" ") failed", "invariant"
); ::breakpoint(); } } while (0)
;
434 ClassLoaderData* loader_data = parser.loader_data();
435 assert(loader_data != NULL, "invariant")do { if (!(loader_data != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 435, "assert(" "loader_data != __null" ") failed", "invariant"
); ::breakpoint(); } } while (0)
;
436
437 InstanceKlass* ik;
438
439 // Allocation
440 if (REF_NONE == parser.reference_type()) {
441 if (class_name == vmSymbols::java_lang_Class()) {
442 // mirror
443 ik = new (loader_data, size, THREAD__the_thread__) InstanceMirrorKlass(parser);
444 }
445 else if (is_class_loader(class_name, parser)) {
446 // class loader
447 ik = new (loader_data, size, THREAD__the_thread__) InstanceClassLoaderKlass(parser);
448 } else {
449 // normal
450 ik = new (loader_data, size, THREAD__the_thread__) InstanceKlass(parser, InstanceKlass::_kind_other);
451 }
452 } else {
453 // reference
454 ik = new (loader_data, size, THREAD__the_thread__) InstanceRefKlass(parser);
455 }
456
457 // Check for pending exception before adding to the loader data and incrementing
458 // class count. Can get OOM here.
459 if (HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception())) {
460 return NULL__null;
461 }
462
463 return ik;
464}
465
466
467// copy method ordering from resource area to Metaspace
468void InstanceKlass::copy_method_ordering(const intArray* m, TRAPSJavaThread* __the_thread__) {
469 if (m != NULL__null) {
470 // allocate a new array and copy contents (memcpy?)
471 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
472 for (int i = 0; i < m->length(); i++) {
473 _method_ordering->at_put(i, m->at(i));
474 }
475 } else {
476 _method_ordering = Universe::the_empty_int_array();
477 }
478}
479
480// create a new array of vtable_indices for default methods
481Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPSJavaThread* __the_thread__) {
482 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
483 assert(default_vtable_indices() == NULL, "only create once")do { if (!(default_vtable_indices() == __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 483, "assert(" "default_vtable_indices() == __null" ") failed"
, "only create once"); ::breakpoint(); } } while (0)
;
484 set_default_vtable_indices(vtable_indices);
485 return vtable_indices;
486}
487
488InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind, KlassID id) :
489 Klass(id),
490 _nest_members(NULL__null),
491 _nest_host(NULL__null),
492 _permitted_subclasses(NULL__null),
493 _record_components(NULL__null),
494 _static_field_size(parser.static_field_size()),
495 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
496 _itable_len(parser.itable_size()),
497 _nest_host_index(0),
498 _init_state(allocated),
499 _reference_type(parser.reference_type()),
500 _init_thread(NULL__null)
501{
502 set_vtable_length(parser.vtable_size());
503 set_kind(kind);
504 set_access_flags(parser.access_flags());
505 if (parser.is_hidden()) set_is_hidden();
506 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
507 false));
508
509 assert(NULL == _methods, "underlying memory not zeroed?")do { if (!(__null == _methods)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 509, "assert(" "__null == _methods" ") failed", "underlying memory not zeroed?"
); ::breakpoint(); } } while (0)
;
510 assert(is_instance_klass(), "is layout incorrect?")do { if (!(is_instance_klass())) { (*g_assert_poison) = 'X';;
report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 510, "assert(" "is_instance_klass()" ") failed", "is layout incorrect?"
); ::breakpoint(); } } while (0)
;
511 assert(size_helper() == parser.layout_size(), "incorrect size_helper?")do { if (!(size_helper() == parser.layout_size())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 511, "assert(" "size_helper() == parser.layout_size()" ") failed"
, "incorrect size_helper?"); ::breakpoint(); } } while (0)
;
512}
513
514void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
515 Array<Method*>* methods) {
516 if (methods != NULL__null && methods != Universe::the_empty_method_array() &&
517 !methods->is_shared()) {
518 for (int i = 0; i < methods->length(); i++) {
519 Method* method = methods->at(i);
520 if (method == NULL__null) continue; // maybe null if error processing
521 // Only want to delete methods that are not executing for RedefineClasses.
522 // The previous version will point to them so they're not totally dangling
523 assert (!method->on_stack(), "shouldn't be called with methods on stack")do { if (!(!method->on_stack())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 523, "assert(" "!method->on_stack()" ") failed", "shouldn't be called with methods on stack"
); ::breakpoint(); } } while (0)
;
524 MetadataFactory::free_metadata(loader_data, method);
525 }
526 MetadataFactory::free_array<Method*>(loader_data, methods);
527 }
528}
529
530void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
531 const Klass* super_klass,
532 Array<InstanceKlass*>* local_interfaces,
533 Array<InstanceKlass*>* transitive_interfaces) {
534 // Only deallocate transitive interfaces if not empty, same as super class
535 // or same as local interfaces. See code in parseClassFile.
536 Array<InstanceKlass*>* ti = transitive_interfaces;
537 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
538 // check that the interfaces don't come from super class
539 Array<InstanceKlass*>* sti = (super_klass == NULL__null) ? NULL__null :
540 InstanceKlass::cast(super_klass)->transitive_interfaces();
541 if (ti != sti && ti != NULL__null && !ti->is_shared()) {
542 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
543 }
544 }
545
546 // local interfaces can be empty
547 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
548 local_interfaces != NULL__null && !local_interfaces->is_shared()) {
549 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
550 }
551}
552
553void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
554 Array<RecordComponent*>* record_components) {
555 if (record_components != NULL__null && !record_components->is_shared()) {
556 for (int i = 0; i < record_components->length(); i++) {
557 RecordComponent* record_component = record_components->at(i);
558 MetadataFactory::free_metadata(loader_data, record_component);
559 }
560 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
561 }
562}
563
564// This function deallocates the metadata and C heap pointers that the
565// InstanceKlass points to.
566void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
567
568 // Orphan the mirror first, CMS thinks it's still live.
569 if (java_mirror() != NULL__null) {
570 java_lang_Class::set_klass(java_mirror(), NULL__null);
571 }
572
573 // Also remove mirror from handles
574 loader_data->remove_handle(_java_mirror);
575
576 // Need to take this class off the class loader data list.
577 loader_data->remove_class(this);
578
579 // The array_klass for this class is created later, after error handling.
580 // For class redefinition, we keep the original class so this scratch class
581 // doesn't have an array class. Either way, assert that there is nothing
582 // to deallocate.
583 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet")do { if (!(array_klasses() == __null)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 583, "assert(" "array_klasses() == __null" ") failed", "array classes shouldn't be created for this class yet"
); ::breakpoint(); } } while (0)
;
584
585 // Release C heap allocated data that this points to, which includes
586 // reference counting symbol names.
587 // Can't release the constant pool here because the constant pool can be
588 // deallocated separately from the InstanceKlass for default methods and
589 // redefine classes.
590 release_C_heap_structures(/* release_constant_pool */ false);
591
592 deallocate_methods(loader_data, methods());
593 set_methods(NULL__null);
594
595 deallocate_record_components(loader_data, record_components());
596 set_record_components(NULL__null);
597
598 if (method_ordering() != NULL__null &&
599 method_ordering() != Universe::the_empty_int_array() &&
600 !method_ordering()->is_shared()) {
601 MetadataFactory::free_array<int>(loader_data, method_ordering());
602 }
603 set_method_ordering(NULL__null);
604
605 // default methods can be empty
606 if (default_methods() != NULL__null &&
607 default_methods() != Universe::the_empty_method_array() &&
608 !default_methods()->is_shared()) {
609 MetadataFactory::free_array<Method*>(loader_data, default_methods());
610 }
611 // Do NOT deallocate the default methods, they are owned by superinterfaces.
612 set_default_methods(NULL__null);
613
614 // default methods vtable indices can be empty
615 if (default_vtable_indices() != NULL__null &&
616 !default_vtable_indices()->is_shared()) {
617 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
618 }
619 set_default_vtable_indices(NULL__null);
620
621
622 // This array is in Klass, but remove it with the InstanceKlass since
623 // this place would be the only caller and it can share memory with transitive
624 // interfaces.
625 if (secondary_supers() != NULL__null &&
626 secondary_supers() != Universe::the_empty_klass_array() &&
627 // see comments in compute_secondary_supers about the following cast
628 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
629 !secondary_supers()->is_shared()) {
630 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
631 }
632 set_secondary_supers(NULL__null);
633
634 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
635 set_transitive_interfaces(NULL__null);
636 set_local_interfaces(NULL__null);
637
638 if (fields() != NULL__null && !fields()->is_shared()) {
639 MetadataFactory::free_array<jushort>(loader_data, fields());
640 }
641 set_fields(NULL__null, 0);
642
643 // If a method from a redefined class is using this constant pool, don't
644 // delete it, yet. The new class's previous version will point to this.
645 if (constants() != NULL__null) {
646 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack")do { if (!(!constants()->on_stack())) { (*g_assert_poison)
= 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 646, "assert(" "!constants()->on_stack()" ") failed", "shouldn't be called if anything is onstack"
); ::breakpoint(); } } while (0)
;
647 if (!constants()->is_shared()) {
648 MetadataFactory::free_metadata(loader_data, constants());
649 }
650 // Delete any cached resolution errors for the constant pool
651 SystemDictionary::delete_resolution_error(constants());
652
653 set_constants(NULL__null);
654 }
655
656 if (inner_classes() != NULL__null &&
657 inner_classes() != Universe::the_empty_short_array() &&
658 !inner_classes()->is_shared()) {
659 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
660 }
661 set_inner_classes(NULL__null);
662
663 if (nest_members() != NULL__null &&
664 nest_members() != Universe::the_empty_short_array() &&
665 !nest_members()->is_shared()) {
666 MetadataFactory::free_array<jushort>(loader_data, nest_members());
667 }
668 set_nest_members(NULL__null);
669
670 if (permitted_subclasses() != NULL__null &&
671 permitted_subclasses() != Universe::the_empty_short_array() &&
672 !permitted_subclasses()->is_shared()) {
673 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
674 }
675 set_permitted_subclasses(NULL__null);
676
677 // We should deallocate the Annotations instance if it's not in shared spaces.
678 if (annotations() != NULL__null && !annotations()->is_shared()) {
679 MetadataFactory::free_metadata(loader_data, annotations());
680 }
681 set_annotations(NULL__null);
682
683 SystemDictionaryShared::handle_class_unloading(this);
684}
685
686bool InstanceKlass::is_record() const {
687 return _record_components != NULL__null &&
688 is_final() &&
689 java_super() == vmClasses::Record_klass();
690}
691
692bool InstanceKlass::is_sealed() const {
693 return _permitted_subclasses != NULL__null &&
694 _permitted_subclasses != Universe::the_empty_short_array();
695}
696
697bool InstanceKlass::should_be_initialized() const {
698 return !is_initialized();
699}
700
701klassItable InstanceKlass::itable() const {
702 return klassItable(const_cast<InstanceKlass*>(this));
703}
704
705void InstanceKlass::eager_initialize(Thread *thread) {
706 if (!EagerInitialization) return;
707
708 if (this->is_not_initialized()) {
709 // abort if the the class has a class initializer
710 if (this->class_initializer() != NULL__null) return;
711
712 // abort if it is java.lang.Object (initialization is handled in genesis)
713 Klass* super_klass = super();
714 if (super_klass == NULL__null) return;
715
716 // abort if the super class should be initialized
717 if (!InstanceKlass::cast(super_klass)->is_initialized()) return;
718
719 // call body to expose the this pointer
720 eager_initialize_impl();
721 }
722}
723
724// JVMTI spec thinks there are signers and protection domain in the
725// instanceKlass. These accessors pretend these fields are there.
726// The hprof specification also thinks these fields are in InstanceKlass.
727oop InstanceKlass::protection_domain() const {
728 // return the protection_domain from the mirror
729 return java_lang_Class::protection_domain(java_mirror());
730}
731
732objArrayOop InstanceKlass::signers() const {
733 // return the signers from the mirror
734 return java_lang_Class::signers(java_mirror());
735}
736
737oop InstanceKlass::init_lock() const {
738 // return the init lock from the mirror
739 oop lock = java_lang_Class::init_lock(java_mirror());
740 // Prevent reordering with any access of initialization state
741 OrderAccess::loadload();
742 assert(lock != NULL || !is_not_initialized(), // initialized or in_error statedo { if (!(lock != __null || !is_not_initialized())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 743, "assert(" "lock != __null || !is_not_initialized()" ") failed"
, "only fully initialized state can have a null lock"); ::breakpoint
(); } } while (0)
743 "only fully initialized state can have a null lock")do { if (!(lock != __null || !is_not_initialized())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 743, "assert(" "lock != __null || !is_not_initialized()" ") failed"
, "only fully initialized state can have a null lock"); ::breakpoint
(); } } while (0)
;
744 return lock;
745}
746
747// Set the initialization lock to null so the object can be GC'ed. Any racing
748// threads to get this lock will see a null lock and will not lock.
749// That's okay because they all check for initialized state after getting
750// the lock and return.
751void InstanceKlass::fence_and_clear_init_lock() {
752 // make sure previous stores are all done, notably the init_state.
753 OrderAccess::storestore();
754 java_lang_Class::clear_init_lock(java_mirror());
755 assert(!is_not_initialized(), "class must be initialized now")do { if (!(!is_not_initialized())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 755, "assert(" "!is_not_initialized()" ") failed", "class must be initialized now"
); ::breakpoint(); } } while (0)
;
756}
757
758void InstanceKlass::eager_initialize_impl() {
759 EXCEPTION_MARKExceptionMark __em; JavaThread* __the_thread__ = __em.thread(
);
;
760 HandleMark hm(THREAD__the_thread__);
761 Handle h_init_lock(THREAD__the_thread__, init_lock());
762 ObjectLocker ol(h_init_lock, THREAD__the_thread__);
763
764 // abort if someone beat us to the initialization
765 if (!is_not_initialized()) return; // note: not equivalent to is_initialized()
766
767 ClassState old_state = init_state();
768 link_class_impl(THREAD__the_thread__);
769 if (HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception())) {
770 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
771 // Abort if linking the class throws an exception.
772
773 // Use a test to avoid redundantly resetting the state if there's
774 // no change. Set_init_state() asserts that state changes make
775 // progress, whereas here we might just be spinning in place.
776 if (old_state != _init_state)
777 set_init_state(old_state);
778 } else {
779 // linking successfull, mark class as initialized
780 set_init_state(fully_initialized);
781 fence_and_clear_init_lock();
782 // trace
783 if (log_is_enabled(Info, class, init)(LogImpl<(LogTag::_class), (LogTag::_init), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Info))
) {
784 ResourceMark rm(THREAD__the_thread__);
785 log_info(class, init)(!(LogImpl<(LogTag::_class), (LogTag::_init), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Info))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_init), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Info>
("[Initialized %s without side effects]", external_name());
786 }
787 }
788}
789
790
791// See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
792// process. The step comments refers to the procedure described in that section.
793// Note: implementation moved to static method to expose the this pointer.
794void InstanceKlass::initialize(TRAPSJavaThread* __the_thread__) {
795 if (this->should_be_initialized()) {
796 initialize_impl(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
797 // Note: at this point the class may be initialized
798 // OR it may be in the state of being initialized
799 // in case of recursive initialization!
800 } else {
801 assert(is_initialized(), "sanity check")do { if (!(is_initialized())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 801, "assert(" "is_initialized()" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
802 }
803}
804
805
806bool InstanceKlass::verify_code(TRAPSJavaThread* __the_thread__) {
807 // 1) Verify the bytecodes
808 return Verifier::verify(this, should_verify_class(), THREAD__the_thread__);
809}
810
811void InstanceKlass::link_class(TRAPSJavaThread* __the_thread__) {
812 assert(is_loaded(), "must be loaded")do { if (!(is_loaded())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 812, "assert(" "is_loaded()" ") failed", "must be loaded");
::breakpoint(); } } while (0)
;
813 if (!is_linked()) {
814 link_class_impl(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
815 }
816}
817
818// Called to verify that a class can link during initialization, without
819// throwing a VerifyError.
820bool InstanceKlass::link_class_or_fail(TRAPSJavaThread* __the_thread__) {
821 assert(is_loaded(), "must be loaded")do { if (!(is_loaded())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 821, "assert(" "is_loaded()" ") failed", "must be loaded");
::breakpoint(); } } while (0)
;
822 if (!is_linked()) {
823 link_class_impl(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
824 }
825 return is_linked();
826}
827
828bool InstanceKlass::link_class_impl(TRAPSJavaThread* __the_thread__) {
829 if (DumpSharedSpaces && SystemDictionaryShared::has_class_failed_verification(this)) {
830 // This is for CDS dumping phase only -- we use the in_error_state to indicate that
831 // the class has failed verification. Throwing the NoClassDefFoundError here is just
832 // a convenient way to stop repeat attempts to verify the same (bad) class.
833 //
834 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
835 // if we are executing Java code. This is not a problem for CDS dumping phase since
836 // it doesn't execute any Java code.
837 ResourceMark rm(THREAD__the_thread__);
838 Exceptions::fthrow(THREAD_AND_LOCATION__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 838
,
839 vmSymbols::java_lang_NoClassDefFoundError(),
840 "Class %s, or one of its supertypes, failed class initialization",
841 external_name());
842 return false;
843 }
844 // return if already verified
845 if (is_linked()) {
846 return true;
847 }
848
849 // Timing
850 // timer handles recursion
851 JavaThread* jt = THREAD__the_thread__;
852
853 // link super class before linking this class
854 Klass* super_klass = super();
855 if (super_klass != NULL__null) {
856 if (super_klass->is_interface()) { // check if super class is an interface
857 ResourceMark rm(THREAD__the_thread__);
858 Exceptions::fthrow(
859 THREAD_AND_LOCATION__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 859
,
860 vmSymbols::java_lang_IncompatibleClassChangeError(),
861 "class %s has interface %s as super class",
862 external_name(),
863 super_klass->external_name()
864 );
865 return false;
866 }
867
868 InstanceKlass* ik_super = InstanceKlass::cast(super_klass);
869 ik_super->link_class_impl(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
870 }
871
872 // link all interfaces implemented by this class before linking this class
873 Array<InstanceKlass*>* interfaces = local_interfaces();
874 int num_interfaces = interfaces->length();
875 for (int index = 0; index < num_interfaces; index++) {
876 InstanceKlass* interk = interfaces->at(index);
877 interk->link_class_impl(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
878 }
879
880 // in case the class is linked in the process of linking its superclasses
881 if (is_linked()) {
882 return true;
883 }
884
885 // trace only the link time for this klass that includes
886 // the verification time
887 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
888 ClassLoader::perf_class_link_selftime(),
889 ClassLoader::perf_classes_linked(),
890 jt->get_thread_stat()->perf_recursion_counts_addr(),
891 jt->get_thread_stat()->perf_timers_addr(),
892 PerfClassTraceTime::CLASS_LINK);
893
894 // verification & rewriting
895 {
896 HandleMark hm(THREAD__the_thread__);
897 Handle h_init_lock(THREAD__the_thread__, init_lock());
898 ObjectLocker ol(h_init_lock, jt);
899 // rewritten will have been set if loader constraint error found
900 // on an earlier link attempt
901 // don't verify or rewrite if already rewritten
902 //
903
904 if (!is_linked()) {
905 if (!is_rewritten()) {
906 if (is_shared()) {
907 assert(!verified_at_dump_time(), "must be")do { if (!(!verified_at_dump_time())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 907, "assert(" "!verified_at_dump_time()" ") failed", "must be"
); ::breakpoint(); } } while (0)
;
908 }
909 {
910 bool verify_ok = verify_code(THREAD__the_thread__);
911 if (!verify_ok) {
912 return false;
913 }
914 }
915
916 // Just in case a side-effect of verify linked this class already
917 // (which can sometimes happen since the verifier loads classes
918 // using custom class loaders, which are free to initialize things)
919 if (is_linked()) {
920 return true;
921 }
922
923 // also sets rewritten
924 rewrite_class(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
925 } else if (is_shared()) {
926 SystemDictionaryShared::check_verification_constraints(this, CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
927 }
928
929 // relocate jsrs and link methods after they are all rewritten
930 link_methods(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
931
932 // Initialize the vtable and interface table after
933 // methods have been rewritten since rewrite may
934 // fabricate new Method*s.
935 // also does loader constraint checking
936 //
937 // initialize_vtable and initialize_itable need to be rerun
938 // for a shared class if
939 // 1) the class is loaded by custom class loader or
940 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
941 // 3) the class was not verified during dump time
942 bool need_init_table = true;
943 if (is_shared() && verified_at_dump_time() &&
944 SystemDictionaryShared::check_linking_constraints(THREAD__the_thread__, this)) {
945 need_init_table = false;
946 }
947 if (need_init_table) {
948 vtable().initialize_vtable_and_check_constraints(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
949 itable().initialize_itable_and_check_constraints(CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
950 }
951#ifdef ASSERT1
952 vtable().verify(tty, true);
953 // In case itable verification is ever added.
954 // itable().verify(tty, true);
955#endif
956 if (UseVtableBasedCHA) {
957 MutexLocker ml(THREAD__the_thread__, Compile_lock);
958 set_init_state(linked);
959
960 // Now flush all code that assume the class is not linked.
961 if (Universe::is_fully_initialized()) {
962 CodeCache::flush_dependents_on(this);
963 }
964 } else {
965 set_init_state(linked);
966 }
967 if (JvmtiExport::should_post_class_prepare()) {
968 JvmtiExport::post_class_prepare(THREAD__the_thread__, this);
969 }
970 }
971 }
972 return true;
973}
974
975// Rewrite the byte codes of all of the methods of a class.
976// The rewriter must be called exactly once. Rewriting must happen after
977// verification but before the first method of the class is executed.
978void InstanceKlass::rewrite_class(TRAPSJavaThread* __the_thread__) {
979 assert(is_loaded(), "must be loaded")do { if (!(is_loaded())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 979, "assert(" "is_loaded()" ") failed", "must be loaded");
::breakpoint(); } } while (0)
;
980 if (is_rewritten()) {
981 assert(is_shared(), "rewriting an unshared class?")do { if (!(is_shared())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 981, "assert(" "is_shared()" ") failed", "rewriting an unshared class?"
); ::breakpoint(); } } while (0)
;
982 return;
983 }
984 Rewriter::rewrite(this, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
985 set_rewritten();
986}
987
988// Now relocate and link method entry points after class is rewritten.
989// This is outside is_rewritten flag. In case of an exception, it can be
990// executed more than once.
991void InstanceKlass::link_methods(TRAPSJavaThread* __the_thread__) {
992 int len = methods()->length();
993 for (int i = len-1; i >= 0; i--) {
994 methodHandle m(THREAD__the_thread__, methods()->at(i));
995
996 // Set up method entry points for compiler and interpreter .
997 m->link_method(m, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
998 }
999}
1000
1001// Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1002void InstanceKlass::initialize_super_interfaces(TRAPSJavaThread* __the_thread__) {
1003 assert (has_nonstatic_concrete_methods(), "caller should have checked this")do { if (!(has_nonstatic_concrete_methods())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1003, "assert(" "has_nonstatic_concrete_methods()" ") failed"
, "caller should have checked this"); ::breakpoint(); } } while
(0)
;
1004 for (int i = 0; i < local_interfaces()->length(); ++i) {
1005 InstanceKlass* ik = local_interfaces()->at(i);
1006
1007 // Initialization is depth first search ie. we start with top of the inheritance tree
1008 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1009 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1010 if (ik->has_nonstatic_concrete_methods()) {
1011 ik->initialize_super_interfaces(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
1012 }
1013
1014 // Only initialize() interfaces that "declare" concrete methods.
1015 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1016 ik->initialize(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
1017 }
1018 }
1019}
1020
1021ResourceHashtable<const InstanceKlass*, OopHandle, 107, ResourceObj::C_HEAP, mtClass>
1022 _initialization_error_table;
1023
1024void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1025 // Create the same exception with a message indicating the thread name,
1026 // and the StackTraceElements.
1027 // If the initialization error is OOM, this might not work, but if GC kicks in
1028 // this would be still be helpful.
1029 JavaThread* THREAD__the_thread__ = current;
1030 Handle cause = java_lang_Throwable::get_cause_with_stack_trace(exception, THREAD__the_thread__);
1031 if (HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception()) || cause.is_null()) {
1032 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
1033 return;
1034 }
1035
1036 MutexLocker ml(THREAD__the_thread__, ClassInitError_lock);
1037 OopHandle elem = OopHandle(Universe::vm_global(), cause());
1038 bool created = false;
1039 _initialization_error_table.put_if_absent(this, elem, &created);
1040 assert(created, "Initialization is single threaded")do { if (!(created)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1040, "assert(" "created" ") failed", "Initialization is single threaded"
); ::breakpoint(); } } while (0)
;
1041 ResourceMark rm(THREAD__the_thread__);
1042 log_trace(class, init)(!(LogImpl<(LogTag::_class), (LogTag::_init), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_init), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("Initialization error added for class %s", external_name());
1043}
1044
1045oop InstanceKlass::get_initialization_error(JavaThread* current) {
1046 MutexLocker ml(current, ClassInitError_lock);
1047 OopHandle* h = _initialization_error_table.get(this);
1048 return (h != nullptr) ? h->resolve() : nullptr;
1049}
1050
1051// Need to remove entries for unloaded classes.
1052void InstanceKlass::clean_initialization_error_table() {
1053 struct InitErrorTableCleaner {
1054 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1055 if (!ik->is_loader_alive()) {
1056 h.release(Universe::vm_global());
1057 return true;
1058 } else {
1059 return false;
1060 }
1061 }
1062 };
1063
1064 assert_locked_or_safepoint(ClassInitError_lock);
1065 InitErrorTableCleaner cleaner;
1066 _initialization_error_table.unlink(&cleaner);
1067}
1068
1069void InstanceKlass::initialize_impl(TRAPSJavaThread* __the_thread__) {
1070 HandleMark hm(THREAD__the_thread__);
1071
1072 // Make sure klass is linked (verified) before initialization
1073 // A class could already be verified, since it has been reflected upon.
1074 link_class(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
1075
1076 DTRACE_CLASSINIT_PROBE(required, -1);
1077
1078 bool wait = false;
1079
1080 JavaThread* jt = THREAD__the_thread__;
1081
1082 // refer to the JVM book page 47 for description of steps
1083 // Step 1
1084 {
1085 Handle h_init_lock(THREAD__the_thread__, init_lock());
1086 ObjectLocker ol(h_init_lock, jt);
1087
1088 // Step 2
1089 // If we were to use wait() instead of waitInterruptibly() then
1090 // we might end up throwing IE from link/symbol resolution sites
1091 // that aren't expected to throw. This would wreak havoc. See 6320309.
1092 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1093 wait = true;
1094 jt->set_class_to_be_initialized(this);
1095 ol.wait_uninterruptibly(jt);
1096 jt->set_class_to_be_initialized(NULL__null);
1097 }
1098
1099 // Step 3
1100 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1101 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1102 return;
1103 }
1104
1105 // Step 4
1106 if (is_initialized()) {
1107 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1108 return;
1109 }
1110
1111 // Step 5
1112 if (is_in_error_state()) {
1113 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1114 ResourceMark rm(THREAD__the_thread__);
1115 Handle cause(THREAD__the_thread__, get_initialization_error(THREAD__the_thread__));
1116
1117 stringStream ss;
1118 ss.print("Could not initialize class %s", external_name());
1119 if (cause.is_null()) {
1120 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string()){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1120, vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string
()); return; }
;
1121 } else {
1122 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),{ Exceptions::_throw_msg_cause(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1123, vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string
(), cause); return; }
1123 ss.as_string(), cause){ Exceptions::_throw_msg_cause(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1123, vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string
(), cause); return; }
;
1124 }
1125 }
1126
1127 // Step 6
1128 set_init_state(being_initialized);
1129 set_init_thread(jt);
1130 }
1131
1132 // Step 7
1133 // Next, if C is a class rather than an interface, initialize it's super class and super
1134 // interfaces.
1135 if (!is_interface()) {
1136 Klass* super_klass = super();
1137 if (super_klass != NULL__null && super_klass->should_be_initialized()) {
1138 super_klass->initialize(THREAD__the_thread__);
1139 }
1140 // If C implements any interface that declares a non-static, concrete method,
1141 // the initialization of C triggers initialization of its super interfaces.
1142 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1143 // having a superinterface that declares, non-static, concrete methods
1144 if (!HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception()) && has_nonstatic_concrete_methods()) {
1145 initialize_super_interfaces(THREAD__the_thread__);
1146 }
1147
1148 // If any exceptions, complete abruptly, throwing the same exception as above.
1149 if (HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception())) {
1150 Handle e(THREAD__the_thread__, PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->pending_exception()));
1151 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
1152 {
1153 EXCEPTION_MARKExceptionMark __em; JavaThread* __the_thread__ = __em.thread(
);
;
1154 add_initialization_error(THREAD__the_thread__, e);
1155 // Locks object, set state, and notify all waiting threads
1156 set_initialization_state_and_notify(initialization_error, THREAD__the_thread__);
1157 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
1158 }
1159 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1160 THROW_OOP(e()){ Exceptions::_throw_oop(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1160, e()); return; }
;
1161 }
1162 }
1163
1164
1165 // Step 8
1166 {
1167 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1168 if (class_initializer() != NULL__null) {
1169 // Timer includes any side effects of class initialization (resolution,
1170 // etc), but not recursive entry into call_class_initializer().
1171 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1172 ClassLoader::perf_class_init_selftime(),
1173 ClassLoader::perf_classes_inited(),
1174 jt->get_thread_stat()->perf_recursion_counts_addr(),
1175 jt->get_thread_stat()->perf_timers_addr(),
1176 PerfClassTraceTime::CLASS_CLINIT);
1177 call_class_initializer(THREAD__the_thread__);
1178 } else {
1179 // The elapsed time is so small it's not worth counting.
1180 if (UsePerfData) {
1181 ClassLoader::perf_classes_inited()->inc();
1182 }
1183 call_class_initializer(THREAD__the_thread__);
1184 }
1185 }
1186
1187 // Step 9
1188 if (!HAS_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->has_pending_exception())) {
1189 set_initialization_state_and_notify(fully_initialized, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
1190 debug_only(vtable().verify(tty, true);)vtable().verify(tty, true);
1191 }
1192 else {
1193 // Step 10 and 11
1194 Handle e(THREAD__the_thread__, PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->pending_exception()));
1195 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
;
1196 // JVMTI has already reported the pending exception
1197 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1198 JvmtiExport::clear_detected_exception(jt);
1199 {
1200 EXCEPTION_MARKExceptionMark __em; JavaThread* __the_thread__ = __em.thread(
);
;
1201 add_initialization_error(THREAD__the_thread__, e);
1202 set_initialization_state_and_notify(initialization_error, THREAD__the_thread__);
1203 CLEAR_PENDING_EXCEPTION(((ThreadShadow*)__the_thread__)->clear_pending_exception(
))
; // ignore any exception thrown, class initialization error is thrown below
1204 // JVMTI has already reported the pending exception
1205 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1206 JvmtiExport::clear_detected_exception(jt);
1207 }
1208 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1209 if (e->is_a(vmClasses::Error_klass())) {
1210 THROW_OOP(e()){ Exceptions::_throw_oop(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1210, e()); return; }
;
1211 } else {
1212 JavaCallArguments args(e);
1213 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),{ Exceptions::_throw_args(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1215, vmSymbols::java_lang_ExceptionInInitializerError(), vmSymbols
::throwable_void_signature(), &args); return; }
1214 vmSymbols::throwable_void_signature(),{ Exceptions::_throw_args(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1215, vmSymbols::java_lang_ExceptionInInitializerError(), vmSymbols
::throwable_void_signature(), &args); return; }
1215 &args){ Exceptions::_throw_args(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1215, vmSymbols::java_lang_ExceptionInInitializerError(), vmSymbols
::throwable_void_signature(), &args); return; }
;
1216 }
1217 }
1218 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1219}
1220
1221
1222void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPSJavaThread* __the_thread__) {
1223 Handle h_init_lock(THREAD__the_thread__, init_lock());
1224 if (h_init_lock() != NULL__null) {
1225 ObjectLocker ol(h_init_lock, THREAD__the_thread__);
1226 set_init_thread(NULL__null); // reset _init_thread before changing _init_state
1227 set_init_state(state);
1228 fence_and_clear_init_lock();
1229 ol.notify_all(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
1230 } else {
1231 assert(h_init_lock() != NULL, "The initialization state should never be set twice")do { if (!(h_init_lock() != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1231, "assert(" "h_init_lock() != __null" ") failed", "The initialization state should never be set twice"
); ::breakpoint(); } } while (0)
;
1232 set_init_thread(NULL__null); // reset _init_thread before changing _init_state
1233 set_init_state(state);
1234 }
1235}
1236
1237InstanceKlass* InstanceKlass::implementor() const {
1238 InstanceKlass* volatile* ik = adr_implementor();
1239 if (ik == NULL__null) {
1240 return NULL__null;
1241 } else {
1242 // This load races with inserts, and therefore needs acquire.
1243 InstanceKlass* ikls = Atomic::load_acquire(ik);
1244 if (ikls != NULL__null && !ikls->is_loader_alive()) {
1245 return NULL__null; // don't return unloaded class
1246 } else {
1247 return ikls;
1248 }
1249 }
1250}
1251
1252
1253void InstanceKlass::set_implementor(InstanceKlass* ik) {
1254 assert_locked_or_safepoint(Compile_lock);
1255 assert(is_interface(), "not interface")do { if (!(is_interface())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1255, "assert(" "is_interface()" ") failed", "not interface"
); ::breakpoint(); } } while (0)
;
1256 InstanceKlass* volatile* addr = adr_implementor();
1257 assert(addr != NULL, "null addr")do { if (!(addr != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1257, "assert(" "addr != __null" ") failed", "null addr"); ::
breakpoint(); } } while (0)
;
1258 if (addr != NULL__null) {
1259 Atomic::release_store(addr, ik);
1260 }
1261}
1262
1263int InstanceKlass::nof_implementors() const {
1264 InstanceKlass* ik = implementor();
1265 if (ik == NULL__null) {
1266 return 0;
1267 } else if (ik != this) {
1268 return 1;
1269 } else {
1270 return 2;
1271 }
1272}
1273
1274// The embedded _implementor field can only record one implementor.
1275// When there are more than one implementors, the _implementor field
1276// is set to the interface Klass* itself. Following are the possible
1277// values for the _implementor field:
1278// NULL - no implementor
1279// implementor Klass* - one implementor
1280// self - more than one implementor
1281//
1282// The _implementor field only exists for interfaces.
1283void InstanceKlass::add_implementor(InstanceKlass* ik) {
1284 if (Universe::is_fully_initialized()) {
1285 assert_lock_strong(Compile_lock);
1286 }
1287 assert(is_interface(), "not interface")do { if (!(is_interface())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1287, "assert(" "is_interface()" ") failed", "not interface"
); ::breakpoint(); } } while (0)
;
1288 // Filter out my subinterfaces.
1289 // (Note: Interfaces are never on the subklass list.)
1290 if (ik->is_interface()) return;
1291
1292 // Filter out subclasses whose supers already implement me.
1293 // (Note: CHA must walk subclasses of direct implementors
1294 // in order to locate indirect implementors.)
1295 InstanceKlass* super_ik = ik->java_super();
1296 if (super_ik != NULL__null && super_ik->implements_interface(this))
1297 // We only need to check one immediate superclass, since the
1298 // implements_interface query looks at transitive_interfaces.
1299 // Any supers of the super have the same (or fewer) transitive_interfaces.
1300 return;
1301
1302 InstanceKlass* iklass = implementor();
1303 if (iklass == NULL__null) {
1304 set_implementor(ik);
1305 } else if (iklass != this && iklass != ik) {
1306 // There is already an implementor. Use itself as an indicator of
1307 // more than one implementors.
1308 set_implementor(this);
1309 }
1310
1311 // The implementor also implements the transitive_interfaces
1312 for (int index = 0; index < local_interfaces()->length(); index++) {
1313 local_interfaces()->at(index)->add_implementor(ik);
1314 }
1315}
1316
1317void InstanceKlass::init_implementor() {
1318 if (is_interface()) {
1319 set_implementor(NULL__null);
1320 }
1321}
1322
1323
1324void InstanceKlass::process_interfaces() {
1325 // link this class into the implementors list of every interface it implements
1326 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1327 assert(local_interfaces()->at(i)->is_klass(), "must be a klass")do { if (!(local_interfaces()->at(i)->is_klass())) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1327, "assert(" "local_interfaces()->at(i)->is_klass()"
") failed", "must be a klass"); ::breakpoint(); } } while (0
)
;
1328 InstanceKlass* interf = local_interfaces()->at(i);
1329 assert(interf->is_interface(), "expected interface")do { if (!(interf->is_interface())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1329, "assert(" "interf->is_interface()" ") failed", "expected interface"
); ::breakpoint(); } } while (0)
;
1330 interf->add_implementor(this);
1331 }
1332}
1333
1334bool InstanceKlass::can_be_primary_super_slow() const {
1335 if (is_interface())
1336 return false;
1337 else
1338 return Klass::can_be_primary_super_slow();
1339}
1340
1341GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1342 Array<InstanceKlass*>* transitive_interfaces) {
1343 // The secondaries are the implemented interfaces.
1344 Array<InstanceKlass*>* interfaces = transitive_interfaces;
1345 int num_secondaries = num_extra_slots + interfaces->length();
1346 if (num_secondaries == 0) {
1347 // Must share this for correct bootstrapping!
1348 set_secondary_supers(Universe::the_empty_klass_array());
1349 return NULL__null;
1350 } else if (num_extra_slots == 0) {
1351 // The secondary super list is exactly the same as the transitive interfaces, so
1352 // let's use it instead of making a copy.
1353 // Redefine classes has to be careful not to delete this!
1354 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1355 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1356 set_secondary_supers((Array<Klass*>*)(address)interfaces);
1357 return NULL__null;
1358 } else {
1359 // Copy transitive interfaces to a temporary growable array to be constructed
1360 // into the secondary super list with extra slots.
1361 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1362 for (int i = 0; i < interfaces->length(); i++) {
1363 secondaries->push(interfaces->at(i));
1364 }
1365 return secondaries;
1366 }
1367}
1368
1369bool InstanceKlass::implements_interface(Klass* k) const {
1370 if (this == k) return true;
1371 assert(k->is_interface(), "should be an interface class")do { if (!(k->is_interface())) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1371, "assert(" "k->is_interface()" ") failed", "should be an interface class"
); ::breakpoint(); } } while (0)
;
1372 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1373 if (transitive_interfaces()->at(i) == k) {
1374 return true;
1375 }
1376 }
1377 return false;
1378}
1379
1380bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1381 // Verify direct super interface
1382 if (this == k) return true;
1383 assert(k->is_interface(), "should be an interface class")do { if (!(k->is_interface())) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1383, "assert(" "k->is_interface()" ") failed", "should be an interface class"
); ::breakpoint(); } } while (0)
;
1384 for (int i = 0; i < local_interfaces()->length(); i++) {
1385 if (local_interfaces()->at(i) == k) {
1386 return true;
1387 }
1388 }
1389 return false;
1390}
1391
1392objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPSJavaThread* __the_thread__) {
1393 check_array_allocation_length(length, arrayOopDesc::max_array_length(T_OBJECT), CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1394 size_t size = objArrayOopDesc::object_size(length);
1395 Klass* ak = array_klass(n, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1396 objArrayOop o = (objArrayOop)Universe::heap()->array_allocate(ak, size, length,
1397 /* do_zero */ true, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1398 return o;
1399}
1400
1401instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPSJavaThread* __the_thread__) {
1402 if (TraceFinalizerRegistration) {
1403 tty->print("Registered ");
1404 i->print_value_on(tty);
1405 tty->print_cr(" (" INTPTR_FORMAT"0x%016" "l" "x" ") as finalizable", p2i(i));
1406 }
1407 instanceHandle h_i(THREAD__the_thread__, i);
1408 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1409 JavaValue result(T_VOID);
1410 JavaCallArguments args(h_i);
1411 methodHandle mh(THREAD__the_thread__, Universe::finalizer_register_method());
1412 JavaCalls::call(&result, mh, &args, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1413 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);)FinalizerService::on_register(h_i(), __the_thread__);
1414 return h_i();
1415}
1416
1417instanceOop InstanceKlass::allocate_instance(TRAPSJavaThread* __the_thread__) {
1418 bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1419 size_t size = size_helper(); // Query before forming handle.
1420
1421 instanceOop i;
1422
1423 i = (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1424 if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1425 i = register_finalizer(i, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1426 }
1427 return i;
1428}
1429
1430instanceHandle InstanceKlass::allocate_instance_handle(TRAPSJavaThread* __the_thread__) {
1431 return instanceHandle(THREAD__the_thread__, allocate_instance(THREAD__the_thread__));
1432}
1433
1434void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPSJavaThread* __the_thread__) {
1435 if (is_interface() || is_abstract()) {
1436 ResourceMark rm(THREAD__the_thread__);
1437 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError(){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1438, throwError ? vmSymbols::java_lang_InstantiationError(
) : vmSymbols::java_lang_InstantiationException(), external_name
()); return; }
1438 : vmSymbols::java_lang_InstantiationException(), external_name()){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1438, throwError ? vmSymbols::java_lang_InstantiationError(
) : vmSymbols::java_lang_InstantiationException(), external_name
()); return; }
;
1439 }
1440 if (this == vmClasses::Class_klass()) {
1441 ResourceMark rm(THREAD__the_thread__);
1442 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError(){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1443, throwError ? vmSymbols::java_lang_IllegalAccessError(
) : vmSymbols::java_lang_IllegalAccessException(), external_name
()); return; }
1443 : vmSymbols::java_lang_IllegalAccessException(), external_name()){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1443, throwError ? vmSymbols::java_lang_IllegalAccessError(
) : vmSymbols::java_lang_IllegalAccessException(), external_name
()); return; }
;
1444 }
1445}
1446
1447Klass* InstanceKlass::array_klass(int n, TRAPSJavaThread* __the_thread__) {
1448 // Need load-acquire for lock-free read
1449 if (array_klasses_acquire() == NULL__null) {
1450 ResourceMark rm(THREAD__the_thread__);
1451 JavaThread *jt = THREAD__the_thread__;
1452 {
1453 // Atomic creation of array_klasses
1454 MutexLocker ma(THREAD__the_thread__, MultiArray_lock);
1455
1456 // Check if update has already taken place
1457 if (array_klasses() == NULL__null) {
1458 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
1459 // use 'release' to pair with lock-free load
1460 release_set_array_klasses(k);
1461 }
1462 }
1463 }
1464 // array_klasses() will always be set at this point
1465 ObjArrayKlass* oak = array_klasses();
1466 return oak->array_klass(n, THREAD__the_thread__);
1467}
1468
1469Klass* InstanceKlass::array_klass_or_null(int n) {
1470 // Need load-acquire for lock-free read
1471 ObjArrayKlass* oak = array_klasses_acquire();
1472 if (oak == NULL__null) {
1473 return NULL__null;
1474 } else {
1475 return oak->array_klass_or_null(n);
1476 }
1477}
1478
1479Klass* InstanceKlass::array_klass(TRAPSJavaThread* __the_thread__) {
1480 return array_klass(1, THREAD__the_thread__);
1481}
1482
1483Klass* InstanceKlass::array_klass_or_null() {
1484 return array_klass_or_null(1);
1485}
1486
1487static int call_class_initializer_counter = 0; // for debugging
1488
1489Method* InstanceKlass::class_initializer() const {
1490 Method* clinit = find_method(
1491 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1492 if (clinit != NULL__null && clinit->has_valid_initializer_flags()) {
1493 return clinit;
1494 }
1495 return NULL__null;
1496}
1497
1498void InstanceKlass::call_class_initializer(TRAPSJavaThread* __the_thread__) {
1499 if (ReplayCompiles &&
1500 (ReplaySuppressInitializers == 1 ||
1501 (ReplaySuppressInitializers >= 2 && class_loader() != NULL__null))) {
1502 // Hide the existence of the initializer for the purpose of replaying the compile
1503 return;
1504 }
1505
1506 methodHandle h_method(THREAD__the_thread__, class_initializer());
1507 assert(!is_initialized(), "we cannot initialize twice")do { if (!(!is_initialized())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1507, "assert(" "!is_initialized()" ") failed", "we cannot initialize twice"
); ::breakpoint(); } } while (0)
;
1508 LogTarget(Info, class, init)LogTargetImpl<LogLevel::Info, (LogTag::_class), (LogTag::_init
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
, (LogTag::__NO_TAG)>
lt;
1509 if (lt.is_enabled()) {
1510 ResourceMark rm(THREAD__the_thread__);
1511 LogStream ls(lt);
1512 ls.print("%d Initializing ", call_class_initializer_counter++);
1513 name()->print_value_on(&ls);
1514 ls.print_cr("%s (" INTPTR_FORMAT"0x%016" "l" "x" ")", h_method() == NULL__null ? "(no method)" : "", p2i(this));
1515 }
1516 if (h_method() != NULL__null) {
1517 JavaCallArguments args; // No arguments
1518 JavaValue result(T_VOID);
1519 JavaCalls::call(&result, h_method, &args, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
); // Static call (no args)
1520 }
1521}
1522
1523
1524void InstanceKlass::mask_for(const methodHandle& method, int bci,
1525 InterpreterOopMap* entry_for) {
1526 // Lazily create the _oop_map_cache at first request
1527 // Lock-free access requires load_acquire.
1528 OopMapCache* oop_map_cache = Atomic::load_acquire(&_oop_map_cache);
1529 if (oop_map_cache == NULL__null) {
1530 MutexLocker x(OopMapCacheAlloc_lock);
1531 // Check if _oop_map_cache was allocated while we were waiting for this lock
1532 if ((oop_map_cache = _oop_map_cache) == NULL__null) {
1533 oop_map_cache = new OopMapCache();
1534 // Ensure _oop_map_cache is stable, since it is examined without a lock
1535 Atomic::release_store(&_oop_map_cache, oop_map_cache);
1536 }
1537 }
1538 // _oop_map_cache is constant after init; lookup below does its own locking.
1539 oop_map_cache->lookup(method, bci, entry_for);
1540}
1541
1542bool InstanceKlass::contains_field_offset(int offset) {
1543 fieldDescriptor fd;
1544 return find_field_from_offset(offset, false, &fd);
1545}
1546
1547bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1548 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1549 Symbol* f_name = fs.name();
1550 Symbol* f_sig = fs.signature();
1551 if (f_name == name && f_sig == sig) {
1552 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1553 return true;
1554 }
1555 }
1556 return false;
1557}
1558
1559
1560Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1561 const int n = local_interfaces()->length();
1562 for (int i = 0; i < n; i++) {
1563 Klass* intf1 = local_interfaces()->at(i);
1564 assert(intf1->is_interface(), "just checking type")do { if (!(intf1->is_interface())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1564, "assert(" "intf1->is_interface()" ") failed", "just checking type"
); ::breakpoint(); } } while (0)
;
1565 // search for field in current interface
1566 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
1567 assert(fd->is_static(), "interface field must be static")do { if (!(fd->is_static())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1567, "assert(" "fd->is_static()" ") failed", "interface field must be static"
); ::breakpoint(); } } while (0)
;
1568 return intf1;
1569 }
1570 // search for field in direct superinterfaces
1571 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
1572 if (intf2 != NULL__null) return intf2;
1573 }
1574 // otherwise field lookup fails
1575 return NULL__null;
1576}
1577
1578
1579Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1580 // search order according to newest JVM spec (5.4.3.2, p.167).
1581 // 1) search for field in current klass
1582 if (find_local_field(name, sig, fd)) {
1583 return const_cast<InstanceKlass*>(this);
1584 }
1585 // 2) search for field recursively in direct superinterfaces
1586 { Klass* intf = find_interface_field(name, sig, fd);
1587 if (intf != NULL__null) return intf;
1588 }
1589 // 3) apply field lookup recursively if superclass exists
1590 { Klass* supr = super();
1591 if (supr != NULL__null) return InstanceKlass::cast(supr)->find_field(name, sig, fd);
1592 }
1593 // 4) otherwise field lookup fails
1594 return NULL__null;
1595}
1596
1597
1598Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1599 // search order according to newest JVM spec (5.4.3.2, p.167).
1600 // 1) search for field in current klass
1601 if (find_local_field(name, sig, fd)) {
1602 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1603 }
1604 // 2) search for field recursively in direct superinterfaces
1605 if (is_static) {
1606 Klass* intf = find_interface_field(name, sig, fd);
1607 if (intf != NULL__null) return intf;
1608 }
1609 // 3) apply field lookup recursively if superclass exists
1610 { Klass* supr = super();
1611 if (supr != NULL__null) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
1612 }
1613 // 4) otherwise field lookup fails
1614 return NULL__null;
1615}
1616
1617
1618bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1619 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1620 if (fs.offset() == offset) {
1621 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1622 if (fd->is_static() == is_static) return true;
1623 }
1624 }
1625 return false;
1626}
1627
1628
1629bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1630 Klass* klass = const_cast<InstanceKlass*>(this);
1631 while (klass != NULL__null) {
1632 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
1633 return true;
1634 }
1635 klass = klass->super();
1636 }
1637 return false;
1638}
1639
1640
1641void InstanceKlass::methods_do(void f(Method* method)) {
1642 // Methods aren't stable until they are loaded. This can be read outside
1643 // a lock through the ClassLoaderData for profiling
1644 // Redefined scratch classes are on the list and need to be cleaned
1645 if (!is_loaded() && !is_scratch_class()) {
1646 return;
1647 }
1648
1649 int len = methods()->length();
1650 for (int index = 0; index < len; index++) {
1651 Method* m = methods()->at(index);
1652 assert(m->is_method(), "must be method")do { if (!(m->is_method())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1652, "assert(" "m->is_method()" ") failed", "must be method"
); ::breakpoint(); } } while (0)
;
1653 f(m);
1654 }
1655}
1656
1657
1658void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1659 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1660 if (fs.access_flags().is_static()) {
1661 fieldDescriptor& fd = fs.field_descriptor();
1662 cl->do_field(&fd);
1663 }
1664 }
1665}
1666
1667
1668void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPSJavaThread* __the_thread__), Handle mirror, TRAPSJavaThread* __the_thread__) {
1669 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1670 if (fs.access_flags().is_static()) {
1671 fieldDescriptor& fd = fs.field_descriptor();
1672 f(&fd, mirror, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
1673 }
1674 }
1675}
1676
1677void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1678 InstanceKlass* super = superklass();
1679 if (super != NULL__null) {
1680 super->do_nonstatic_fields(cl);
1681 }
1682 fieldDescriptor fd;
1683 int length = java_fields_count();
1684 for (int i = 0; i < length; i += 1) {
1685 fd.reinitialize(this, i);
1686 if (!fd.is_static()) {
1687 cl->do_field(&fd);
1688 }
1689 }
1690}
1691
1692// first in Pair is offset, second is index.
1693static int compare_fields_by_offset(Pair<int,int>* a, Pair<int,int>* b) {
1694 return a->first - b->first;
1695}
1696
1697void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) {
1698 InstanceKlass* super = superklass();
1699 if (super != NULL__null) {
1700 super->print_nonstatic_fields(cl);
1701 }
1702 ResourceMark rm;
1703 fieldDescriptor fd;
1704 // In DebugInfo nonstatic fields are sorted by offset.
1705 GrowableArray<Pair<int,int> > fields_sorted;
1706 int i = 0;
1707 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
1708 if (!fs.access_flags().is_static()) {
1709 fd = fs.field_descriptor();
1710 Pair<int,int> f(fs.offset(), fs.index());
1711 fields_sorted.push(f);
1712 i++;
1713 }
1714 }
1715 if (i > 0) {
1716 int length = i;
1717 assert(length == fields_sorted.length(), "duh")do { if (!(length == fields_sorted.length())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1717, "assert(" "length == fields_sorted.length()" ") failed"
, "duh"); ::breakpoint(); } } while (0)
;
1718 // _sort_Fn is defined in growableArray.hpp.
1719 fields_sorted.sort(compare_fields_by_offset);
1720 for (int i = 0; i < length; i++) {
1721 fd.reinitialize(this, fields_sorted.at(i).second);
1722 assert(!fd.is_static() && fd.offset() == fields_sorted.at(i).first, "only nonstatic fields")do { if (!(!fd.is_static() && fd.offset() == fields_sorted
.at(i).first)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1722, "assert(" "!fd.is_static() && fd.offset() == fields_sorted.at(i).first"
") failed", "only nonstatic fields"); ::breakpoint(); } } while
(0)
;
1723 cl->do_field(&fd);
1724 }
1725 }
1726}
1727
1728#ifdef ASSERT1
1729static int linear_search(const Array<Method*>* methods,
1730 const Symbol* name,
1731 const Symbol* signature) {
1732 const int len = methods->length();
1733 for (int index = 0; index < len; index++) {
1734 const Method* const m = methods->at(index);
1735 assert(m->is_method(), "must be method")do { if (!(m->is_method())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1735, "assert(" "m->is_method()" ") failed", "must be method"
); ::breakpoint(); } } while (0)
;
1736 if (m->signature() == signature && m->name() == name) {
1737 return index;
1738 }
1739 }
1740 return -1;
1741}
1742#endif
1743
1744bool InstanceKlass::_disable_method_binary_search = false;
1745
1746NOINLINE__attribute__ ((noinline)) int linear_search(const Array<Method*>* methods, const Symbol* name) {
1747 int len = methods->length();
1748 int l = 0;
1749 int h = len - 1;
1750 while (l <= h) {
1751 Method* m = methods->at(l);
1752 if (m->name() == name) {
1753 return l;
1754 }
1755 l++;
1756 }
1757 return -1;
1758}
1759
1760inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
1761 if (_disable_method_binary_search) {
1762 assert(DynamicDumpSharedSpaces, "must be")do { if (!(DynamicDumpSharedSpaces)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1762, "assert(" "DynamicDumpSharedSpaces" ") failed", "must be"
); ::breakpoint(); } } while (0)
;
1763 // At the final stage of dynamic dumping, the methods array may not be sorted
1764 // by ascending addresses of their names, so we can't use binary search anymore.
1765 // However, methods with the same name are still laid out consecutively inside the
1766 // methods array, so let's look for the first one that matches.
1767 return linear_search(methods, name);
1768 }
1769
1770 int len = methods->length();
1771 int l = 0;
1772 int h = len - 1;
1773
1774 // methods are sorted by ascending addresses of their names, so do binary search
1775 while (l <= h) {
1776 int mid = (l + h) >> 1;
1777 Method* m = methods->at(mid);
1778 assert(m->is_method(), "must be method")do { if (!(m->is_method())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1778, "assert(" "m->is_method()" ") failed", "must be method"
); ::breakpoint(); } } while (0)
;
1779 int res = m->name()->fast_compare(name);
1780 if (res == 0) {
1781 return mid;
1782 } else if (res < 0) {
1783 l = mid + 1;
1784 } else {
1785 h = mid - 1;
1786 }
1787 }
1788 return -1;
1789}
1790
1791// find_method looks up the name/signature in the local methods array
1792Method* InstanceKlass::find_method(const Symbol* name,
1793 const Symbol* signature) const {
1794 return find_method_impl(name, signature,
1795 OverpassLookupMode::find,
1796 StaticLookupMode::find,
1797 PrivateLookupMode::find);
1798}
1799
1800Method* InstanceKlass::find_method_impl(const Symbol* name,
1801 const Symbol* signature,
1802 OverpassLookupMode overpass_mode,
1803 StaticLookupMode static_mode,
1804 PrivateLookupMode private_mode) const {
1805 return InstanceKlass::find_method_impl(methods(),
1806 name,
1807 signature,
1808 overpass_mode,
1809 static_mode,
1810 private_mode);
1811}
1812
1813// find_instance_method looks up the name/signature in the local methods array
1814// and skips over static methods
1815Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
1816 const Symbol* name,
1817 const Symbol* signature,
1818 PrivateLookupMode private_mode) {
1819 Method* const meth = InstanceKlass::find_method_impl(methods,
1820 name,
1821 signature,
1822 OverpassLookupMode::find,
1823 StaticLookupMode::skip,
1824 private_mode);
1825 assert(((meth == NULL) || !meth->is_static()),do { if (!(((meth == __null) || !meth->is_static()))) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1826, "assert(" "((meth == __null) || !meth->is_static())"
") failed", "find_instance_method should have skipped statics"
); ::breakpoint(); } } while (0)
1826 "find_instance_method should have skipped statics")do { if (!(((meth == __null) || !meth->is_static()))) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1826, "assert(" "((meth == __null) || !meth->is_static())"
") failed", "find_instance_method should have skipped statics"
); ::breakpoint(); } } while (0)
;
1827 return meth;
1828}
1829
1830// find_instance_method looks up the name/signature in the local methods array
1831// and skips over static methods
1832Method* InstanceKlass::find_instance_method(const Symbol* name,
1833 const Symbol* signature,
1834 PrivateLookupMode private_mode) const {
1835 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
1836}
1837
1838// Find looks up the name/signature in the local methods array
1839// and filters on the overpass, static and private flags
1840// This returns the first one found
1841// note that the local methods array can have up to one overpass, one static
1842// and one instance (private or not) with the same name/signature
1843Method* InstanceKlass::find_local_method(const Symbol* name,
1844 const Symbol* signature,
1845 OverpassLookupMode overpass_mode,
1846 StaticLookupMode static_mode,
1847 PrivateLookupMode private_mode) const {
1848 return InstanceKlass::find_method_impl(methods(),
1849 name,
1850 signature,
1851 overpass_mode,
1852 static_mode,
1853 private_mode);
1854}
1855
1856// Find looks up the name/signature in the local methods array
1857// and filters on the overpass, static and private flags
1858// This returns the first one found
1859// note that the local methods array can have up to one overpass, one static
1860// and one instance (private or not) with the same name/signature
1861Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
1862 const Symbol* name,
1863 const Symbol* signature,
1864 OverpassLookupMode overpass_mode,
1865 StaticLookupMode static_mode,
1866 PrivateLookupMode private_mode) {
1867 return InstanceKlass::find_method_impl(methods,
1868 name,
1869 signature,
1870 overpass_mode,
1871 static_mode,
1872 private_mode);
1873}
1874
1875Method* InstanceKlass::find_method(const Array<Method*>* methods,
1876 const Symbol* name,
1877 const Symbol* signature) {
1878 return InstanceKlass::find_method_impl(methods,
1879 name,
1880 signature,
1881 OverpassLookupMode::find,
1882 StaticLookupMode::find,
1883 PrivateLookupMode::find);
1884}
1885
1886Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
1887 const Symbol* name,
1888 const Symbol* signature,
1889 OverpassLookupMode overpass_mode,
1890 StaticLookupMode static_mode,
1891 PrivateLookupMode private_mode) {
1892 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
1893 return hit >= 0 ? methods->at(hit): NULL__null;
1894}
1895
1896// true if method matches signature and conforms to skipping_X conditions.
1897static bool method_matches(const Method* m,
1898 const Symbol* signature,
1899 bool skipping_overpass,
1900 bool skipping_static,
1901 bool skipping_private) {
1902 return ((m->signature() == signature) &&
1903 (!skipping_overpass || !m->is_overpass()) &&
1904 (!skipping_static || !m->is_static()) &&
1905 (!skipping_private || !m->is_private()));
1906}
1907
1908// Used directly for default_methods to find the index into the
1909// default_vtable_indices, and indirectly by find_method
1910// find_method_index looks in the local methods array to return the index
1911// of the matching name/signature. If, overpass methods are being ignored,
1912// the search continues to find a potential non-overpass match. This capability
1913// is important during method resolution to prefer a static method, for example,
1914// over an overpass method.
1915// There is the possibility in any _method's array to have the same name/signature
1916// for a static method, an overpass method and a local instance method
1917// To correctly catch a given method, the search criteria may need
1918// to explicitly skip the other two. For local instance methods, it
1919// is often necessary to skip private methods
1920int InstanceKlass::find_method_index(const Array<Method*>* methods,
1921 const Symbol* name,
1922 const Symbol* signature,
1923 OverpassLookupMode overpass_mode,
1924 StaticLookupMode static_mode,
1925 PrivateLookupMode private_mode) {
1926 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
1927 const bool skipping_static = (static_mode == StaticLookupMode::skip);
1928 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
1929 const int hit = quick_search(methods, name);
1930 if (hit != -1) {
1931 const Method* const m = methods->at(hit);
1932
1933 // Do linear search to find matching signature. First, quick check
1934 // for common case, ignoring overpasses if requested.
1935 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1936 return hit;
1937 }
1938
1939 // search downwards through overloaded methods
1940 int i;
1941 for (i = hit - 1; i >= 0; --i) {
1942 const Method* const m = methods->at(i);
1943 assert(m->is_method(), "must be method")do { if (!(m->is_method())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1943, "assert(" "m->is_method()" ") failed", "must be method"
); ::breakpoint(); } } while (0)
;
1944 if (m->name() != name) {
1945 break;
1946 }
1947 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1948 return i;
1949 }
1950 }
1951 // search upwards
1952 for (i = hit + 1; i < methods->length(); ++i) {
1953 const Method* const m = methods->at(i);
1954 assert(m->is_method(), "must be method")do { if (!(m->is_method())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1954, "assert(" "m->is_method()" ") failed", "must be method"
); ::breakpoint(); } } while (0)
;
1955 if (m->name() != name) {
1956 break;
1957 }
1958 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1959 return i;
1960 }
1961 }
1962 // not found
1963#ifdef ASSERT1
1964 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
1965 linear_search(methods, name, signature);
1966 assert(-1 == index, "binary search should have found entry %d", index)do { if (!(-1 == index)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1966, "assert(" "-1 == index" ") failed", "binary search should have found entry %d"
, index); ::breakpoint(); } } while (0)
;
1967#endif
1968 }
1969 return -1;
1970}
1971
1972int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
1973 return find_method_by_name(methods(), name, end);
1974}
1975
1976int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
1977 const Symbol* name,
1978 int* end_ptr) {
1979 assert(end_ptr != NULL, "just checking")do { if (!(end_ptr != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 1979, "assert(" "end_ptr != __null" ") failed", "just checking"
); ::breakpoint(); } } while (0)
;
1980 int start = quick_search(methods, name);
1981 int end = start + 1;
1982 if (start != -1) {
1983 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
1984 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
1985 *end_ptr = end;
1986 return start;
1987 }
1988 return -1;
1989}
1990
1991// uncached_lookup_method searches both the local class methods array and all
1992// superclasses methods arrays, skipping any overpass methods in superclasses,
1993// and possibly skipping private methods.
1994Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
1995 const Symbol* signature,
1996 OverpassLookupMode overpass_mode,
1997 PrivateLookupMode private_mode) const {
1998 OverpassLookupMode overpass_local_mode = overpass_mode;
1999 const Klass* klass = this;
2000 while (klass != NULL__null) {
2001 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name,
2002 signature,
2003 overpass_local_mode,
2004 StaticLookupMode::find,
2005 private_mode);
2006 if (method != NULL__null) {
2007 return method;
2008 }
2009 klass = klass->super();
2010 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2011 }
2012 return NULL__null;
2013}
2014
2015#ifdef ASSERT1
2016// search through class hierarchy and return true if this class or
2017// one of the superclasses was redefined
2018bool InstanceKlass::has_redefined_this_or_super() const {
2019 const Klass* klass = this;
2020 while (klass != NULL__null) {
2021 if (InstanceKlass::cast(klass)->has_been_redefined()) {
2022 return true;
2023 }
2024 klass = klass->super();
2025 }
2026 return false;
2027}
2028#endif
2029
2030// lookup a method in the default methods list then in all transitive interfaces
2031// Do NOT return private or static methods
2032Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2033 Symbol* signature) const {
2034 Method* m = NULL__null;
2035 if (default_methods() != NULL__null) {
2036 m = find_method(default_methods(), name, signature);
2037 }
2038 // Look up interfaces
2039 if (m == NULL__null) {
2040 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2041 }
2042 return m;
2043}
2044
2045// lookup a method in all the interfaces that this class implements
2046// Do NOT return private or static methods, new in JDK8 which are not externally visible
2047// They should only be found in the initial InterfaceMethodRef
2048Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2049 Symbol* signature,
2050 DefaultsLookupMode defaults_mode) const {
2051 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2052 int num_ifs = all_ifs->length();
2053 InstanceKlass *ik = NULL__null;
2054 for (int i = 0; i < num_ifs; i++) {
2055 ik = all_ifs->at(i);
2056 Method* m = ik->lookup_method(name, signature);
2057 if (m != NULL__null && m->is_public() && !m->is_static() &&
2058 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2059 return m;
2060 }
2061 }
2062 return NULL__null;
2063}
2064
2065/* jni_id_for for jfieldIds only */
2066JNIid* InstanceKlass::jni_id_for(int offset) {
2067 MutexLocker ml(JfieldIdCreation_lock);
2068 JNIid* probe = jni_ids() == NULL__null ? NULL__null : jni_ids()->find(offset);
2069 if (probe == NULL__null) {
2070 // Allocate new static field identifier
2071 probe = new JNIid(this, offset, jni_ids());
2072 set_jni_ids(probe);
2073 }
2074 return probe;
2075}
2076
2077u2 InstanceKlass::enclosing_method_data(int offset) const {
2078 const Array<jushort>* const inner_class_list = inner_classes();
2079 if (inner_class_list == NULL__null) {
2080 return 0;
2081 }
2082 const int length = inner_class_list->length();
2083 if (length % inner_class_next_offset == 0) {
2084 return 0;
2085 }
2086 const int index = length - enclosing_method_attribute_size;
2087 assert(offset < enclosing_method_attribute_size, "invalid offset")do { if (!(offset < enclosing_method_attribute_size)) { (*
g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2087, "assert(" "offset < enclosing_method_attribute_size"
") failed", "invalid offset"); ::breakpoint(); } } while (0)
;
2088 return inner_class_list->at(index + offset);
2089}
2090
2091void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2092 u2 method_index) {
2093 Array<jushort>* inner_class_list = inner_classes();
2094 assert (inner_class_list != NULL, "_inner_classes list is not set up")do { if (!(inner_class_list != __null)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2094, "assert(" "inner_class_list != __null" ") failed", "_inner_classes list is not set up"
); ::breakpoint(); } } while (0)
;
2095 int length = inner_class_list->length();
2096 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2097 int index = length - enclosing_method_attribute_size;
2098 inner_class_list->at_put(
2099 index + enclosing_method_class_index_offset, class_index);
2100 inner_class_list->at_put(
2101 index + enclosing_method_method_index_offset, method_index);
2102 }
2103}
2104
2105// Lookup or create a jmethodID.
2106// This code is called by the VMThread and JavaThreads so the
2107// locking has to be done very carefully to avoid deadlocks
2108// and/or other cache consistency problems.
2109//
2110jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) {
2111 size_t idnum = (size_t)method_h->method_idnum();
2112 jmethodID* jmeths = methods_jmethod_ids_acquire();
2113 size_t length = 0;
2114 jmethodID id = NULL__null;
2115
2116 // We use a double-check locking idiom here because this cache is
2117 // performance sensitive. In the normal system, this cache only
2118 // transitions from NULL to non-NULL which is safe because we use
2119 // release_set_methods_jmethod_ids() to advertise the new cache.
2120 // A partially constructed cache should never be seen by a racing
2121 // thread. We also use release_store() to save a new jmethodID
2122 // in the cache so a partially constructed jmethodID should never be
2123 // seen either. Cache reads of existing jmethodIDs proceed without a
2124 // lock, but cache writes of a new jmethodID requires uniqueness and
2125 // creation of the cache itself requires no leaks so a lock is
2126 // generally acquired in those two cases.
2127 //
2128 // If the RedefineClasses() API has been used, then this cache can
2129 // grow and we'll have transitions from non-NULL to bigger non-NULL.
2130 // Cache creation requires no leaks and we require safety between all
2131 // cache accesses and freeing of the old cache so a lock is generally
2132 // acquired when the RedefineClasses() API has been used.
2133
2134 if (jmeths != NULL__null) {
1
Assuming 'jmeths' is not equal to NULL
2
Taking true branch
2135 // the cache already exists
2136 if (!idnum_can_increment()) {
3
Taking true branch
2137 // the cache can't grow so we can just get the current values
2138 get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2139 } else {
2140 // cache can grow so we have to be more careful
2141 if (Threads::number_of_threads() == 0 ||
2142 SafepointSynchronize::is_at_safepoint()) {
2143 // we're single threaded or at a safepoint - no locking needed
2144 get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2145 } else {
2146 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2147 get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2148 }
2149 }
2150 }
2151 // implied else:
2152 // we need to allocate a cache so default length and id values are good
2153
2154 if (jmeths
3.1
'jmeths' is not equal to NULL
3.1
'jmeths' is not equal to NULL
3.1
'jmeths' is not equal to NULL
3.1
'jmeths' is not equal to NULL
== NULL__null || // no cache yet
5
Taking true branch
2155 length
3.2
'length' is > 'idnum'
3.2
'length' is > 'idnum'
3.2
'length' is > 'idnum'
3.2
'length' is > 'idnum'
<= idnum || // cache is too short
2156 id == NULL__null) { // cache doesn't contain entry
4
Assuming 'id' is equal to NULL
2157
2158 // This function can be called by the VMThread so we have to do all
2159 // things that might block on a safepoint before grabbing the lock.
2160 // Otherwise, we can deadlock with the VMThread or have a cache
2161 // consistency issue. These vars keep track of what we might have
2162 // to free after the lock is dropped.
2163 jmethodID to_dealloc_id = NULL__null;
2164 jmethodID* to_dealloc_jmeths = NULL__null;
2165
2166 // may not allocate new_jmeths or use it if we allocate it
2167 jmethodID* new_jmeths = NULL__null;
6
'new_jmeths' initialized to a null pointer value
2168 if (length
6.1
'length' is > 'idnum'
6.1
'length' is > 'idnum'
6.1
'length' is > 'idnum'
6.1
'length' is > 'idnum'
<= idnum) {
7
Taking false branch
2169 // allocate a new cache that might be used
2170 size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
2171 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass)(jmethodID*) (AllocateHeap((size+1) * sizeof(jmethodID), mtClass
))
;
2172 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
2173 // cache size is stored in element[0], other elements offset by one
2174 new_jmeths[0] = (jmethodID)size;
2175 }
2176
2177 // allocate a new jmethodID that might be used
2178 jmethodID new_id = NULL__null;
2179 if (method_h->is_old() && !method_h->is_obsolete()) {
8
Calling 'Method::is_old'
14
Returning from 'Method::is_old'
15
Calling 'Method::is_obsolete'
21
Returning from 'Method::is_obsolete'
22
Taking true branch
2180 // The method passed in is old (but not obsolete), we need to use the current version
2181 Method* current_method = method_with_idnum((int)idnum);
2182 assert(current_method != NULL, "old and but not obsolete, so should exist")do { if (!(current_method != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2182, "assert(" "current_method != __null" ") failed", "old and but not obsolete, so should exist"
); ::breakpoint(); } } while (0)
;
23
Assuming the condition is true
24
Taking false branch
25
Loop condition is false. Exiting loop
2183 new_id = Method::make_jmethod_id(class_loader_data(), current_method);
2184 } else {
2185 // It is the current version of the method or an obsolete method,
2186 // use the version passed in
2187 new_id = Method::make_jmethod_id(class_loader_data(), method_h());
2188 }
2189
2190 if (Threads::number_of_threads() == 0 ||
26
Assuming the condition is true
2191 SafepointSynchronize::is_at_safepoint()) {
2192 // we're single threaded or at a safepoint - no locking needed
2193 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
27
Passing null pointer value via 3rd parameter 'new_jmeths'
28
Calling 'InstanceKlass::get_jmethod_id_fetch_or_update'
2194 &to_dealloc_id, &to_dealloc_jmeths);
2195 } else {
2196 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2197 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2198 &to_dealloc_id, &to_dealloc_jmeths);
2199 }
2200
2201 // The lock has been dropped so we can free resources.
2202 // Free up either the old cache or the new cache if we allocated one.
2203 if (to_dealloc_jmeths != NULL__null) {
2204 FreeHeap(to_dealloc_jmeths);
2205 }
2206 // free up the new ID since it wasn't needed
2207 if (to_dealloc_id != NULL__null) {
2208 Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id);
2209 }
2210 }
2211 return id;
2212}
2213
2214// Figure out how many jmethodIDs haven't been allocated, and make
2215// sure space for them is pre-allocated. This makes getting all
2216// method ids much, much faster with classes with more than 8
2217// methods, and has a *substantial* effect on performance with jvmti
2218// code that loads all jmethodIDs for all classes.
2219void InstanceKlass::ensure_space_for_methodids(int start_offset) {
2220 int new_jmeths = 0;
2221 int length = methods()->length();
2222 for (int index = start_offset; index < length; index++) {
2223 Method* m = methods()->at(index);
2224 jmethodID id = m->find_jmethod_id_or_null();
2225 if (id == NULL__null) {
2226 new_jmeths++;
2227 }
2228 }
2229 if (new_jmeths != 0) {
2230 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths);
2231 }
2232}
2233
2234// Common code to fetch the jmethodID from the cache or update the
2235// cache with the new jmethodID. This function should never do anything
2236// that causes the caller to go to a safepoint or we can deadlock with
2237// the VMThread or have cache consistency issues.
2238//
2239jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
2240 size_t idnum, jmethodID new_id,
2241 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
2242 jmethodID** to_dealloc_jmeths_p) {
2243 assert(new_id != NULL, "sanity check")do { if (!(new_id != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2243, "assert(" "new_id != __null" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
29
Assuming the condition is true
30
Taking false branch
31
Loop condition is false. Exiting loop
2244 assert(to_dealloc_id_p != NULL, "sanity check")do { if (!(to_dealloc_id_p != __null)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2244, "assert(" "to_dealloc_id_p != __null" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
32
Taking false branch
33
Loop condition is false. Exiting loop
2245 assert(to_dealloc_jmeths_p != NULL, "sanity check")do { if (!(to_dealloc_jmeths_p != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2245, "assert(" "to_dealloc_jmeths_p != __null" ") failed",
"sanity check"); ::breakpoint(); } } while (0)
;
34
Taking false branch
35
Loop condition is false. Exiting loop
2246 assert(Threads::number_of_threads() == 0 ||do { if (!(Threads::number_of_threads() == 0 || SafepointSynchronize
::is_at_safepoint() || JmethodIdCreation_lock->owned_by_self
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2248, "assert(" "Threads::number_of_threads() == 0 || SafepointSynchronize::is_at_safepoint() || JmethodIdCreation_lock->owned_by_self()"
") failed", "sanity check"); ::breakpoint(); } } while (0)
36
Taking false branch
37
Loop condition is false. Exiting loop
2247 SafepointSynchronize::is_at_safepoint() ||do { if (!(Threads::number_of_threads() == 0 || SafepointSynchronize
::is_at_safepoint() || JmethodIdCreation_lock->owned_by_self
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2248, "assert(" "Threads::number_of_threads() == 0 || SafepointSynchronize::is_at_safepoint() || JmethodIdCreation_lock->owned_by_self()"
") failed", "sanity check"); ::breakpoint(); } } while (0)
2248 JmethodIdCreation_lock->owned_by_self(), "sanity check")do { if (!(Threads::number_of_threads() == 0 || SafepointSynchronize
::is_at_safepoint() || JmethodIdCreation_lock->owned_by_self
())) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2248, "assert(" "Threads::number_of_threads() == 0 || SafepointSynchronize::is_at_safepoint() || JmethodIdCreation_lock->owned_by_self()"
") failed", "sanity check"); ::breakpoint(); } } while (0)
;
2249
2250 // reacquire the cache - we are locked, single threaded or at a safepoint
2251 jmethodID* jmeths = methods_jmethod_ids_acquire();
2252 jmethodID id = NULL__null;
2253 size_t length = 0;
2254
2255 if (jmeths == NULL__null || // no cache yet
38
Assuming 'jmeths' is equal to NULL
2256 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short
2257 if (jmeths
38.1
'jmeths' is equal to NULL
38.1
'jmeths' is equal to NULL
38.1
'jmeths' is equal to NULL
38.1
'jmeths' is equal to NULL
!= NULL__null) {
39
Taking false branch
2258 // copy any existing entries from the old cache
2259 for (size_t index = 0; index < length; index++) {
2260 new_jmeths[index+1] = jmeths[index+1];
2261 }
2262 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete
2263 }
2264 release_set_methods_jmethod_ids(jmeths = new_jmeths);
40
Null pointer value stored to 'jmeths'
2265 } else {
2266 // fetch jmethodID (if any) from the existing cache
2267 id = jmeths[idnum+1];
2268 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete
2269 }
2270 if (id
40.1
'id' is equal to NULL
40.1
'id' is equal to NULL
40.1
'id' is equal to NULL
40.1
'id' is equal to NULL
== NULL__null) {
41
Taking true branch
2271 // No matching jmethodID in the existing cache or we have a new
2272 // cache or we just grew the cache. This cache write is done here
2273 // by the first thread to win the foot race because a jmethodID
2274 // needs to be unique once it is generally available.
2275 id = new_id;
2276
2277 // The jmethodID cache can be read while unlocked so we have to
2278 // make sure the new jmethodID is complete before installing it
2279 // in the cache.
2280 Atomic::release_store(&jmeths[idnum+1], id);
42
Passing null pointer value via 1st parameter 'p'
43
Calling 'Atomic::release_store'
2281 } else {
2282 *to_dealloc_id_p = new_id; // save new id for later delete
2283 }
2284 return id;
2285}
2286
2287
2288// Common code to get the jmethodID cache length and the jmethodID
2289// value at index idnum if there is one.
2290//
2291void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache,
2292 size_t idnum, size_t *length_p, jmethodID* id_p) {
2293 assert(cache != NULL, "sanity check")do { if (!(cache != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2293, "assert(" "cache != __null" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
2294 assert(length_p != NULL, "sanity check")do { if (!(length_p != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2294, "assert(" "length_p != __null" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
2295 assert(id_p != NULL, "sanity check")do { if (!(id_p != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2295, "assert(" "id_p != __null" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
2296
2297 // cache size is stored in element[0], other elements offset by one
2298 *length_p = (size_t)cache[0];
2299 if (*length_p <= idnum) { // cache is too short
2300 *id_p = NULL__null;
2301 } else {
2302 *id_p = cache[idnum+1]; // fetch jmethodID (if any)
2303 }
2304}
2305
2306
2307// Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles
2308jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2309 size_t idnum = (size_t)method->method_idnum();
2310 jmethodID* jmeths = methods_jmethod_ids_acquire();
2311 size_t length; // length assigned as debugging crumb
2312 jmethodID id = NULL__null;
2313 if (jmeths != NULL__null && // If there is a cache
2314 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough,
2315 id = jmeths[idnum+1]; // Look up the id (may be NULL)
2316 }
2317 return id;
2318}
2319
2320inline DependencyContext InstanceKlass::dependencies() {
2321 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2322 return dep_context;
2323}
2324
2325int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) {
2326 return dependencies().mark_dependent_nmethods(changes);
2327}
2328
2329void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2330 dependencies().add_dependent_nmethod(nm);
2331}
2332
2333void InstanceKlass::remove_dependent_nmethod(nmethod* nm) {
2334 dependencies().remove_dependent_nmethod(nm);
2335}
2336
2337void InstanceKlass::clean_dependency_context() {
2338 dependencies().clean_unloading_dependents();
2339}
2340
2341#ifndef PRODUCT
2342void InstanceKlass::print_dependent_nmethods(bool verbose) {
2343 dependencies().print_dependent_nmethods(verbose);
2344}
2345
2346bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2347 return dependencies().is_dependent_nmethod(nm);
2348}
2349#endif //PRODUCT
2350
2351void InstanceKlass::clean_weak_instanceklass_links() {
2352 clean_implementors_list();
2353 clean_method_data();
2354}
2355
2356void InstanceKlass::clean_implementors_list() {
2357 assert(is_loader_alive(), "this klass should be live")do { if (!(is_loader_alive())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2357, "assert(" "is_loader_alive()" ") failed", "this klass should be live"
); ::breakpoint(); } } while (0)
;
2358 if (is_interface()) {
2359 assert (ClassUnloading, "only called for ClassUnloading")do { if (!(ClassUnloading)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2359, "assert(" "ClassUnloading" ") failed", "only called for ClassUnloading"
); ::breakpoint(); } } while (0)
;
2360 for (;;) {
2361 // Use load_acquire due to competing with inserts
2362 InstanceKlass* impl = Atomic::load_acquire(adr_implementor());
2363 if (impl != NULL__null && !impl->is_loader_alive()) {
2364 // NULL this field, might be an unloaded instance klass or NULL
2365 InstanceKlass* volatile* iklass = adr_implementor();
2366 if (Atomic::cmpxchg(iklass, impl, (InstanceKlass*)NULL__null) == impl) {
2367 // Successfully unlinking implementor.
2368 if (log_is_enabled(Trace, class, unload)(LogImpl<(LogTag::_class), (LogTag::_unload), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))
) {
2369 ResourceMark rm;
2370 log_trace(class, unload)(!(LogImpl<(LogTag::_class), (LogTag::_unload), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_unload), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("unlinking class (implementor): %s", impl->external_name());
2371 }
2372 return;
2373 }
2374 } else {
2375 return;
2376 }
2377 }
2378 }
2379}
2380
2381void InstanceKlass::clean_method_data() {
2382 for (int m = 0; m < methods()->length(); m++) {
2383 MethodData* mdo = methods()->at(m)->method_data();
2384 if (mdo != NULL__null) {
2385 MutexLocker ml(SafepointSynchronize::is_at_safepoint() ? NULL__null : mdo->extra_data_lock());
2386 mdo->clean_method_data(/*always_clean*/false);
2387 }
2388 }
2389}
2390
2391void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2392 Klass::metaspace_pointers_do(it);
2393
2394 if (log_is_enabled(Trace, cds)(LogImpl<(LogTag::_cds), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))
) {
2395 ResourceMark rm;
2396 log_trace(cds)(!(LogImpl<(LogTag::_cds), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_cds), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("Iter(InstanceKlass): %p (%s)", this, external_name());
2397 }
2398
2399 it->push(&_annotations);
2400 it->push((Klass**)&_array_klasses);
2401 if (!is_rewritten()) {
2402 it->push(&_constants, MetaspaceClosure::_writable);
2403 } else {
2404 it->push(&_constants);
2405 }
2406 it->push(&_inner_classes);
2407#if INCLUDE_JVMTI1
2408 it->push(&_previous_versions);
2409#endif
2410 it->push(&_methods);
2411 it->push(&_default_methods);
2412 it->push(&_local_interfaces);
2413 it->push(&_transitive_interfaces);
2414 it->push(&_method_ordering);
2415 if (!is_rewritten()) {
2416 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
2417 } else {
2418 it->push(&_default_vtable_indices);
2419 }
2420 it->push(&_fields);
2421
2422 if (itable_length() > 0) {
2423 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2424 int method_table_offset_in_words = ioe->offset()/wordSize;
2425 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2426 / itableOffsetEntry::size();
2427
2428 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2429 if (ioe->interface_klass() != NULL__null) {
2430 it->push(ioe->interface_klass_addr());
2431 itableMethodEntry* ime = ioe->first_method_entry(this);
2432 int n = klassItable::method_count_for_interface(ioe->interface_klass());
2433 for (int index = 0; index < n; index ++) {
2434 it->push(ime[index].method_addr());
2435 }
2436 }
2437 }
2438 }
2439
2440 it->push(&_nest_members);
2441 it->push(&_permitted_subclasses);
2442 it->push(&_record_components);
2443}
2444
2445void InstanceKlass::remove_unshareable_info() {
2446
2447 if (is_linked()) {
2448 assert(can_be_verified_at_dumptime(), "must be")do { if (!(can_be_verified_at_dumptime())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2448, "assert(" "can_be_verified_at_dumptime()" ") failed",
"must be"); ::breakpoint(); } } while (0)
;
2449 // Remember this so we can avoid walking the hierarchy at runtime.
2450 set_verified_at_dump_time();
2451 }
2452
2453 Klass::remove_unshareable_info();
2454
2455 if (SystemDictionaryShared::has_class_failed_verification(this)) {
2456 // Classes are attempted to link during dumping and may fail,
2457 // but these classes are still in the dictionary and class list in CLD.
2458 // If the class has failed verification, there is nothing else to remove.
2459 return;
2460 }
2461
2462 // Reset to the 'allocated' state to prevent any premature accessing to
2463 // a shared class at runtime while the class is still being loaded and
2464 // restored. A class' init_state is set to 'loaded' at runtime when it's
2465 // being added to class hierarchy (see SystemDictionary:::add_to_hierarchy()).
2466 _init_state = allocated;
2467
2468 { // Otherwise this needs to take out the Compile_lock.
2469 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint")do { if (!(SafepointSynchronize::is_at_safepoint())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2469, "assert(" "SafepointSynchronize::is_at_safepoint()" ") failed"
, "only called at safepoint"); ::breakpoint(); } } while (0)
;
2470 init_implementor();
2471 }
2472
2473 constants()->remove_unshareable_info();
2474
2475 for (int i = 0; i < methods()->length(); i++) {
2476 Method* m = methods()->at(i);
2477 m->remove_unshareable_info();
2478 }
2479
2480 // do array classes also.
2481 if (array_klasses() != NULL__null) {
2482 array_klasses()->remove_unshareable_info();
2483 }
2484
2485 // These are not allocated from metaspace. They are safe to set to NULL.
2486 _source_debug_extension = NULL__null;
2487 _dep_context = NULL__null;
2488 _osr_nmethods_head = NULL__null;
2489#if INCLUDE_JVMTI1
2490 _breakpoints = NULL__null;
2491 _previous_versions = NULL__null;
2492 _cached_class_file = NULL__null;
2493 _jvmti_cached_class_field_map = NULL__null;
2494#endif
2495
2496 _init_thread = NULL__null;
2497 _methods_jmethod_ids = NULL__null;
2498 _jni_ids = NULL__null;
2499 _oop_map_cache = NULL__null;
2500 // clear _nest_host to ensure re-load at runtime
2501 _nest_host = NULL__null;
2502 init_shared_package_entry();
2503 _dep_context_last_cleaned = 0;
2504}
2505
2506void InstanceKlass::remove_java_mirror() {
2507 Klass::remove_java_mirror();
2508
2509 // do array classes also.
2510 if (array_klasses() != NULL__null) {
2511 array_klasses()->remove_java_mirror();
2512 }
2513}
2514
2515void InstanceKlass::init_shared_package_entry() {
2516#if !INCLUDE_CDS_JAVA_HEAP1
2517 _package_entry = NULL__null;
2518#else
2519 if (!MetaspaceShared::use_full_module_graph()) {
2520 _package_entry = NULL__null;
2521 } else if (DynamicDumpSharedSpaces) {
2522 if (!MetaspaceShared::is_in_shared_metaspace(_package_entry)) {
2523 _package_entry = NULL__null;
2524 }
2525 } else {
2526 if (is_shared_unregistered_class()) {
2527 _package_entry = NULL__null;
2528 } else {
2529 _package_entry = PackageEntry::get_archived_entry(_package_entry);
2530 }
2531 }
2532 ArchivePtrMarker::mark_pointer((address**)&_package_entry);
2533#endif
2534}
2535
2536void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
2537 PackageEntry* pkg_entry, TRAPSJavaThread* __the_thread__) {
2538 // SystemDictionary::add_to_hierarchy() sets the init_state to loaded
2539 // before the InstanceKlass is added to the SystemDictionary. Make
2540 // sure the current state is <loaded.
2541 assert(!is_loaded(), "invalid init state")do { if (!(!is_loaded())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2541, "assert(" "!is_loaded()" ") failed", "invalid init state"
); ::breakpoint(); } } while (0)
;
2542 assert(!shared_loading_failed(), "Must not try to load failed class again")do { if (!(!shared_loading_failed())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2542, "assert(" "!shared_loading_failed()" ") failed", "Must not try to load failed class again"
); ::breakpoint(); } } while (0)
;
2543 set_package(loader_data, pkg_entry, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
2544 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
2545
2546 Array<Method*>* methods = this->methods();
2547 int num_methods = methods->length();
2548 for (int index = 0; index < num_methods; ++index) {
2549 methods->at(index)->restore_unshareable_info(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
2550 }
2551#if INCLUDE_JVMTI1
2552 if (JvmtiExport::has_redefined_a_class()) {
2553 // Reinitialize vtable because RedefineClasses may have changed some
2554 // entries in this vtable for super classes so the CDS vtable might
2555 // point to old or obsolete entries. RedefineClasses doesn't fix up
2556 // vtables in the shared system dictionary, only the main one.
2557 // It also redefines the itable too so fix that too.
2558 // First fix any default methods that point to a super class that may
2559 // have been redefined.
2560 bool trace_name_printed = false;
2561 adjust_default_methods(&trace_name_printed);
2562 vtable().initialize_vtable();
2563 itable().initialize_itable();
2564 }
2565#endif
2566
2567 // restore constant pool resolved references
2568 constants()->restore_unshareable_info(CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
2569
2570 if (array_klasses() != NULL__null) {
2571 // To get a consistent list of classes we need MultiArray_lock to ensure
2572 // array classes aren't observed while they are being restored.
2573 MutexLocker ml(MultiArray_lock);
2574 // Array classes have null protection domain.
2575 // --> see ArrayKlass::complete_create_array_klass()
2576 array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
2577 }
2578
2579 // Initialize @ValueBased class annotation
2580 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation()) {
2581 set_is_value_based();
2582 }
2583}
2584
2585// Check if a class or any of its supertypes has a version older than 50.
2586// CDS will not perform verification of old classes during dump time because
2587// without changing the old verifier, the verification constraint cannot be
2588// retrieved during dump time.
2589// Verification of archived old classes will be performed during run time.
2590bool InstanceKlass::can_be_verified_at_dumptime() const {
2591 if (MetaspaceShared::is_in_shared_metaspace(this)) {
2592 // This is a class that was dumped into the base archive, so we know
2593 // it was verified at dump time.
2594 return true;
2595 }
2596 if (major_version() < 50 /*JAVA_6_VERSION*/) {
2597 return false;
2598 }
2599 if (java_super() != NULL__null && !java_super()->can_be_verified_at_dumptime()) {
2600 return false;
2601 }
2602 Array<InstanceKlass*>* interfaces = local_interfaces();
2603 int len = interfaces->length();
2604 for (int i = 0; i < len; i++) {
2605 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
2606 return false;
2607 }
2608 }
2609 return true;
2610}
2611
2612void InstanceKlass::set_shared_class_loader_type(s2 loader_type) {
2613 switch (loader_type) {
2614 case ClassLoader::BOOT_LOADER:
2615 _misc_flags |= _misc_is_shared_boot_class;
2616 break;
2617 case ClassLoader::PLATFORM_LOADER:
2618 _misc_flags |= _misc_is_shared_platform_class;
2619 break;
2620 case ClassLoader::APP_LOADER:
2621 _misc_flags |= _misc_is_shared_app_class;
2622 break;
2623 default:
2624 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2624); ::breakpoint(); } while (0)
;
2625 break;
2626 }
2627}
2628
2629void InstanceKlass::assign_class_loader_type() {
2630 ClassLoaderData *cld = class_loader_data();
2631 if (cld->is_boot_class_loader_data()) {
2632 set_shared_class_loader_type(ClassLoader::BOOT_LOADER);
2633 }
2634 else if (cld->is_platform_class_loader_data()) {
2635 set_shared_class_loader_type(ClassLoader::PLATFORM_LOADER);
2636 }
2637 else if (cld->is_system_class_loader_data()) {
2638 set_shared_class_loader_type(ClassLoader::APP_LOADER);
2639 }
2640}
2641
2642#if INCLUDE_JVMTI1
2643static void clear_all_breakpoints(Method* m) {
2644 m->clear_all_breakpoints();
2645}
2646#endif
2647
2648void InstanceKlass::unload_class(InstanceKlass* ik) {
2649 // Release dependencies.
2650 ik->dependencies().remove_all_dependents();
2651
2652 // notify the debugger
2653 if (JvmtiExport::should_post_class_unload()) {
2654 JvmtiExport::post_class_unload(ik);
2655 }
2656
2657 // notify ClassLoadingService of class unload
2658 ClassLoadingService::notify_class_unloaded(ik);
2659
2660 SystemDictionaryShared::handle_class_unloading(ik);
2661
2662 if (log_is_enabled(Info, class, unload)(LogImpl<(LogTag::_class), (LogTag::_unload), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Info))
) {
2663 ResourceMark rm;
2664 log_info(class, unload)(!(LogImpl<(LogTag::_class), (LogTag::_unload), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Info))) ? (void)0 : LogImpl<(LogTag
::_class), (LogTag::_unload), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Info>
("unloading class %s " INTPTR_FORMAT"0x%016" "l" "x", ik->external_name(), p2i(ik));
2665 }
2666
2667 Events::log_class_unloading(Thread::current(), ik);
2668
2669#if INCLUDE_JFR1
2670 assert(ik != NULL, "invariant")do { if (!(ik != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2670, "assert(" "ik != __null" ") failed", "invariant"); ::
breakpoint(); } } while (0)
;
2671 EventClassUnload event;
2672 event.set_unloadedClass(ik);
2673 event.set_definingClassLoader(ik->class_loader_data());
2674 event.commit();
2675#endif
2676}
2677
2678static void method_release_C_heap_structures(Method* m) {
2679 m->release_C_heap_structures();
2680}
2681
2682// Called also by InstanceKlass::deallocate_contents, with false for release_constant_pool.
2683void InstanceKlass::release_C_heap_structures(bool release_constant_pool) {
2684 // Clean up C heap
2685 Klass::release_C_heap_structures();
2686
2687 // Deallocate and call destructors for MDO mutexes
2688 methods_do(method_release_C_heap_structures);
2689
2690 // Deallocate oop map cache
2691 if (_oop_map_cache != NULL__null) {
2692 delete _oop_map_cache;
2693 _oop_map_cache = NULL__null;
2694 }
2695
2696 // Deallocate JNI identifiers for jfieldIDs
2697 JNIid::deallocate(jni_ids());
2698 set_jni_ids(NULL__null);
2699
2700 jmethodID* jmeths = methods_jmethod_ids_acquire();
2701 if (jmeths != (jmethodID*)NULL__null) {
2702 release_set_methods_jmethod_ids(NULL__null);
2703 FreeHeap(jmeths);
2704 }
2705
2706 assert(_dep_context == NULL,do { if (!(_dep_context == __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2707, "assert(" "_dep_context == __null" ") failed", "dependencies should already be cleaned"
); ::breakpoint(); } } while (0)
2707 "dependencies should already be cleaned")do { if (!(_dep_context == __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2707, "assert(" "_dep_context == __null" ") failed", "dependencies should already be cleaned"
); ::breakpoint(); } } while (0)
;
2708
2709#if INCLUDE_JVMTI1
2710 // Deallocate breakpoint records
2711 if (breakpoints() != 0x0) {
2712 methods_do(clear_all_breakpoints);
2713 assert(breakpoints() == 0x0, "should have cleared breakpoints")do { if (!(breakpoints() == 0x0)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2713, "assert(" "breakpoints() == 0x0" ") failed", "should have cleared breakpoints"
); ::breakpoint(); } } while (0)
;
2714 }
2715
2716 // deallocate the cached class file
2717 if (_cached_class_file != NULL__null) {
2718 os::free(_cached_class_file);
2719 _cached_class_file = NULL__null;
2720 }
2721#endif
2722
2723 FREE_C_HEAP_ARRAY(char, _source_debug_extension)FreeHeap((char*)(_source_debug_extension));
2724
2725 if (release_constant_pool) {
2726 constants()->release_C_heap_structures();
2727 }
2728}
2729
2730void InstanceKlass::set_source_debug_extension(const char* array, int length) {
2731 if (array == NULL__null) {
2732 _source_debug_extension = NULL__null;
2733 } else {
2734 // Adding one to the attribute length in order to store a null terminator
2735 // character could cause an overflow because the attribute length is
2736 // already coded with an u4 in the classfile, but in practice, it's
2737 // unlikely to happen.
2738 assert((length+1) > length, "Overflow checking")do { if (!((length+1) > length)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2738, "assert(" "(length+1) > length" ") failed", "Overflow checking"
); ::breakpoint(); } } while (0)
;
2739 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass)(char*) (AllocateHeap(((length + 1)) * sizeof(char), mtClass)
)
;
2740 for (int i = 0; i < length; i++) {
2741 sde[i] = array[i];
2742 }
2743 sde[length] = '\0';
2744 _source_debug_extension = sde;
2745 }
2746}
2747
2748const char* InstanceKlass::signature_name() const {
2749 int hash_len = 0;
2750 char hash_buf[40];
2751
2752 // Get the internal name as a c string
2753 const char* src = (const char*) (name()->as_C_string());
2754 const int src_length = (int)strlen(src);
2755
2756 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3)(char*) resource_allocate_bytes((src_length + hash_len + 3) *
sizeof(char))
;
2757
2758 // Add L as type indicator
2759 int dest_index = 0;
2760 dest[dest_index++] = JVM_SIGNATURE_CLASS;
2761
2762 // Add the actual class name
2763 for (int src_index = 0; src_index < src_length; ) {
2764 dest[dest_index++] = src[src_index++];
2765 }
2766
2767 if (is_hidden()) { // Replace the last '+' with a '.'.
2768 for (int index = (int)src_length; index > 0; index--) {
2769 if (dest[index] == '+') {
2770 dest[index] = JVM_SIGNATURE_DOT;
2771 break;
2772 }
2773 }
2774 }
2775
2776 // If we have a hash, append it
2777 for (int hash_index = 0; hash_index < hash_len; ) {
2778 dest[dest_index++] = hash_buf[hash_index++];
2779 }
2780
2781 // Add the semicolon and the NULL
2782 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
2783 dest[dest_index] = '\0';
2784 return dest;
2785}
2786
2787ModuleEntry* InstanceKlass::module() const {
2788 if (is_hidden() &&
2789 in_unnamed_package() &&
2790 class_loader_data()->has_class_mirror_holder()) {
2791 // For a non-strong hidden class defined to an unnamed package,
2792 // its (class held) CLD will not have an unnamed module created for it.
2793 // Two choices to find the correct ModuleEntry:
2794 // 1. If hidden class is within a nest, use nest host's module
2795 // 2. Find the unnamed module off from the class loader
2796 // For now option #2 is used since a nest host is not set until
2797 // after the instance class is created in jvm_lookup_define_class().
2798 if (class_loader_data()->is_boot_class_loader_data()) {
2799 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
2800 } else {
2801 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
2802 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module")do { if (!(java_lang_Module::is_instance(module))) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2802, "assert(" "java_lang_Module::is_instance(module)" ") failed"
, "Not an instance of java.lang.Module"); ::breakpoint(); } }
while (0)
;
2803 return java_lang_Module::module_entry(module);
2804 }
2805 }
2806
2807 // Class is in a named package
2808 if (!in_unnamed_package()) {
2809 return _package_entry->module();
2810 }
2811
2812 // Class is in an unnamed package, return its loader's unnamed module
2813 return class_loader_data()->unnamed_module();
2814}
2815
2816void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPSJavaThread* __the_thread__) {
2817
2818 // ensure java/ packages only loaded by boot or platform builtin loaders
2819 // not needed for shared class since CDS does not archive prohibited classes.
2820 if (!is_shared()) {
2821 check_prohibited_package(name(), loader_data, CHECK__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return ; (void)(0
);
2822 }
2823
2824 if (is_shared() && _package_entry != NULL__null) {
2825 if (MetaspaceShared::use_full_module_graph() && _package_entry == pkg_entry) {
2826 // we can use the saved package
2827 assert(MetaspaceShared::is_in_shared_metaspace(_package_entry), "must be")do { if (!(MetaspaceShared::is_in_shared_metaspace(_package_entry
))) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2827, "assert(" "MetaspaceShared::is_in_shared_metaspace(_package_entry)"
") failed", "must be"); ::breakpoint(); } } while (0)
;
2828 return;
2829 } else {
2830 _package_entry = NULL__null;
2831 }
2832 }
2833
2834 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
2835 // it returns, so we need to decrement it when the current function exits.
2836 TempNewSymbol from_class_name =
2837 (pkg_entry != NULL__null) ? NULL__null : ClassLoader::package_from_class_name(name());
2838
2839 Symbol* pkg_name;
2840 if (pkg_entry != NULL__null) {
2841 pkg_name = pkg_entry->name();
2842 } else {
2843 pkg_name = from_class_name;
2844 }
2845
2846 if (pkg_name != NULL__null && loader_data != NULL__null) {
2847
2848 // Find in class loader's package entry table.
2849 _package_entry = pkg_entry != NULL__null ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
2850
2851 // If the package name is not found in the loader's package
2852 // entry table, it is an indication that the package has not
2853 // been defined. Consider it defined within the unnamed module.
2854 if (_package_entry == NULL__null) {
2855
2856 if (!ModuleEntryTable::javabase_defined()) {
2857 // Before java.base is defined during bootstrapping, define all packages in
2858 // the java.base module. If a non-java.base package is erroneously placed
2859 // in the java.base module it will be caught later when java.base
2860 // is defined by ModuleEntryTable::verify_javabase_packages check.
2861 assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL")do { if (!(ModuleEntryTable::javabase_moduleEntry() != __null
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2861, "assert(" "ModuleEntryTable::javabase_moduleEntry() != __null"
") failed", "java.base" " module is NULL"); ::breakpoint(); }
} while (0)
;
2862 _package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry());
2863 } else {
2864 assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL")do { if (!(loader_data->unnamed_module() != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2864, "assert(" "loader_data->unnamed_module() != __null"
") failed", "unnamed module is NULL"); ::breakpoint(); } } while
(0)
;
2865 _package_entry = loader_data->packages()->lookup(pkg_name,
2866 loader_data->unnamed_module());
2867 }
2868
2869 // A package should have been successfully created
2870 DEBUG_ONLY(ResourceMark rm(THREAD))ResourceMark rm(__the_thread__);
2871 assert(_package_entry != NULL, "Package entry for class %s not found, loader %s",do { if (!(_package_entry != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2872, "assert(" "_package_entry != __null" ") failed", "Package entry for class %s not found, loader %s"
, name()->as_C_string(), loader_data->loader_name_and_id
()); ::breakpoint(); } } while (0)
2872 name()->as_C_string(), loader_data->loader_name_and_id())do { if (!(_package_entry != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2872, "assert(" "_package_entry != __null" ") failed", "Package entry for class %s not found, loader %s"
, name()->as_C_string(), loader_data->loader_name_and_id
()); ::breakpoint(); } } while (0)
;
2873 }
2874
2875 if (log_is_enabled(Debug, module)(LogImpl<(LogTag::_module), (LogTag::__NO_TAG), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Debug))
) {
2876 ResourceMark rm(THREAD__the_thread__);
2877 ModuleEntry* m = _package_entry->module();
2878 log_trace(module)(!(LogImpl<(LogTag::_module), (LogTag::__NO_TAG), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_module), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<
LogLevel::Trace>
("Setting package: class: %s, package: %s, loader: %s, module: %s",
2879 external_name(),
2880 pkg_name->as_C_string(),
2881 loader_data->loader_name_and_id(),
2882 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE"unnamed module"));
2883 }
2884 } else {
2885 ResourceMark rm(THREAD__the_thread__);
2886 log_trace(module)(!(LogImpl<(LogTag::_module), (LogTag::__NO_TAG), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_module), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<
LogLevel::Trace>
("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
2887 external_name(),
2888 (loader_data != NULL__null) ? loader_data->loader_name_and_id() : "NULL",
2889 UNNAMED_MODULE"unnamed module");
2890 }
2891}
2892
2893// Function set_classpath_index ensures that for a non-null _package_entry
2894// of the InstanceKlass, the entry is in the boot loader's package entry table.
2895// It then sets the classpath_index in the package entry record.
2896//
2897// The classpath_index field is used to find the entry on the boot loader class
2898// path for packages with classes loaded by the boot loader from -Xbootclasspath/a
2899// in an unnamed module. It is also used to indicate (for all packages whose
2900// classes are loaded by the boot loader) that at least one of the package's
2901// classes has been loaded.
2902void InstanceKlass::set_classpath_index(s2 path_index) {
2903 if (_package_entry != NULL__null) {
2904 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data
()->packages();
2905 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same")do { if (!(pkg_entry_tbl->lookup_only(_package_entry->name
()) == _package_entry)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2905, "assert(" "pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry"
") failed", "Should be same"); ::breakpoint(); } } while (0)
;
2906 assert(path_index != -1, "Unexpected classpath_index")do { if (!(path_index != -1)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2906, "assert(" "path_index != -1" ") failed", "Unexpected classpath_index"
); ::breakpoint(); } } while (0)
;
2907 _package_entry->set_classpath_index(path_index);
2908 }
2909}
2910
2911// different versions of is_same_class_package
2912
2913bool InstanceKlass::is_same_class_package(const Klass* class2) const {
2914 oop classloader1 = this->class_loader();
2915 PackageEntry* classpkg1 = this->package();
2916 if (class2->is_objArray_klass()) {
2917 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
2918 }
2919
2920 oop classloader2;
2921 PackageEntry* classpkg2;
2922 if (class2->is_instance_klass()) {
2923 classloader2 = class2->class_loader();
2924 classpkg2 = class2->package();
2925 } else {
2926 assert(class2->is_typeArray_klass(), "should be type array")do { if (!(class2->is_typeArray_klass())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2926, "assert(" "class2->is_typeArray_klass()" ") failed"
, "should be type array"); ::breakpoint(); } } while (0)
;
2927 classloader2 = NULL__null;
2928 classpkg2 = NULL__null;
2929 }
2930
2931 // Same package is determined by comparing class loader
2932 // and package entries. Both must be the same. This rule
2933 // applies even to classes that are defined in the unnamed
2934 // package, they still must have the same class loader.
2935 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
2936 return true;
2937 }
2938
2939 return false;
2940}
2941
2942// return true if this class and other_class are in the same package. Classloader
2943// and classname information is enough to determine a class's package
2944bool InstanceKlass::is_same_class_package(oop other_class_loader,
2945 const Symbol* other_class_name) const {
2946 if (class_loader() != other_class_loader) {
2947 return false;
2948 }
2949 if (name()->fast_compare(other_class_name) == 0) {
2950 return true;
2951 }
2952
2953 {
2954 ResourceMark rm;
2955
2956 bool bad_class_name = false;
2957 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
2958 if (bad_class_name) {
2959 return false;
2960 }
2961 // Check that package_from_class_name() returns NULL, not "", if there is no package.
2962 assert(other_pkg == NULL || other_pkg->utf8_length() > 0, "package name is empty string")do { if (!(other_pkg == __null || other_pkg->utf8_length()
> 0)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 2962, "assert(" "other_pkg == __null || other_pkg->utf8_length() > 0"
") failed", "package name is empty string"); ::breakpoint();
} } while (0)
;
2963
2964 const Symbol* const this_package_name =
2965 this->package() != NULL__null ? this->package()->name() : NULL__null;
2966
2967 if (this_package_name == NULL__null || other_pkg == NULL__null) {
2968 // One of the two doesn't have a package. Only return true if the other
2969 // one also doesn't have a package.
2970 return this_package_name == other_pkg;
2971 }
2972
2973 // Check if package is identical
2974 return this_package_name->fast_compare(other_pkg) == 0;
2975 }
2976}
2977
2978static bool is_prohibited_package_slow(Symbol* class_name) {
2979 // Caller has ResourceMark
2980 int length;
2981 jchar* unicode = class_name->as_unicode(length);
2982 return (length >= 5 &&
2983 unicode[0] == 'j' &&
2984 unicode[1] == 'a' &&
2985 unicode[2] == 'v' &&
2986 unicode[3] == 'a' &&
2987 unicode[4] == '/');
2988}
2989
2990// Only boot and platform class loaders can define classes in "java/" packages.
2991void InstanceKlass::check_prohibited_package(Symbol* class_name,
2992 ClassLoaderData* loader_data,
2993 TRAPSJavaThread* __the_thread__) {
2994 if (!loader_data->is_boot_class_loader_data() &&
2995 !loader_data->is_platform_class_loader_data() &&
2996 class_name != NULL__null && class_name->utf8_length() >= 5) {
2997 ResourceMark rm(THREAD__the_thread__);
2998 bool prohibited;
2999 const u1* base = class_name->base();
3000 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3001 prohibited = is_prohibited_package_slow(class_name);
3002 } else {
3003 char* name = class_name->as_C_string();
3004 prohibited = (strncmp(name, JAVAPKG"java", JAVAPKG_LEN4) == 0 && name[JAVAPKG_LEN4] == '/');
3005 }
3006 if (prohibited) {
3007 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3008 assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'")do { if (!(pkg_name != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3008, "assert(" "pkg_name != __null" ") failed", "Error in parsing package name starting with 'java/'"
); ::breakpoint(); } } while (0)
;
3009 char* name = pkg_name->as_C_string();
3010 const char* class_loader_name = loader_data->loader_name_and_id();
3011 StringUtils::replace_no_expand(name, "/", ".");
3012 const char* msg_text1 = "Class loader (instance of): ";
3013 const char* msg_text2 = " tried to load prohibited package name: ";
3014 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3015 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len)(char*) resource_allocate_bytes(__the_thread__, (len) * sizeof
(char))
;
3016 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3017 THROW_MSG(vmSymbols::java_lang_SecurityException(), message){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3017, vmSymbols::java_lang_SecurityException(), message); return
; }
;
3018 }
3019 }
3020 return;
3021}
3022
3023bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPSJavaThread* __the_thread__) const {
3024 constantPoolHandle i_cp(THREAD__the_thread__, constants());
3025 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3026 int ioff = iter.inner_class_info_index();
3027 if (ioff != 0) {
3028 // Check to see if the name matches the class we're looking for
3029 // before attempting to find the class.
3030 if (i_cp->klass_name_at_matches(this, ioff)) {
3031 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return false; (void)(0
);
3032 if (this == inner_klass) {
3033 *ooff = iter.outer_class_info_index();
3034 *noff = iter.inner_name_index();
3035 return true;
3036 }
3037 }
3038 }
3039 }
3040 return false;
3041}
3042
3043InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPSJavaThread* __the_thread__) const {
3044 InstanceKlass* outer_klass = NULL__null;
3045 *inner_is_member = false;
3046 int ooff = 0, noff = 0;
3047 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD__the_thread__);
3048 if (has_inner_classes_attr) {
3049 constantPoolHandle i_cp(THREAD__the_thread__, constants());
3050 if (ooff != 0) {
3051 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
3052 if (!ok->is_instance_klass()) {
3053 // If the outer class is not an instance klass then it cannot have
3054 // declared any inner classes.
3055 ResourceMark rm(THREAD__the_thread__);
3056 Exceptions::fthrow(
3057 THREAD_AND_LOCATION__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3057
,
3058 vmSymbols::java_lang_IncompatibleClassChangeError(),
3059 "%s and %s disagree on InnerClasses attribute",
3060 ok->external_name(),
3061 external_name());
3062 return NULL__null;
3063 }
3064 outer_klass = InstanceKlass::cast(ok);
3065 *inner_is_member = true;
3066 }
3067 if (NULL__null == outer_klass) {
3068 // It may be a local class; try for that.
3069 int encl_method_class_idx = enclosing_method_class_index();
3070 if (encl_method_class_idx != 0) {
3071 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
3072 outer_klass = InstanceKlass::cast(ok);
3073 *inner_is_member = false;
3074 }
3075 }
3076 }
3077
3078 // If no inner class attribute found for this class.
3079 if (NULL__null == outer_klass) return NULL__null;
3080
3081 // Throws an exception if outer klass has not declared k as an inner klass
3082 // We need evidence that each klass knows about the other, or else
3083 // the system could allow a spoof of an inner class to gain access rights.
3084 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL__the_thread__); if ((((ThreadShadow*)__the_thread__)->has_pending_exception
())) return __null; (void)(0
);
3085 return outer_klass;
3086}
3087
3088jint InstanceKlass::compute_modifier_flags() const {
3089 jint access = access_flags().as_int();
3090
3091 // But check if it happens to be member class.
3092 InnerClassesIterator iter(this);
3093 for (; !iter.done(); iter.next()) {
3094 int ioff = iter.inner_class_info_index();
3095 // Inner class attribute can be zero, skip it.
3096 // Strange but true: JVM spec. allows null inner class refs.
3097 if (ioff == 0) continue;
3098
3099 // only look at classes that are already loaded
3100 // since we are looking for the flags for our self.
3101 Symbol* inner_name = constants()->klass_name_at(ioff);
3102 if (name() == inner_name) {
3103 // This is really a member class.
3104 access = iter.inner_access_flags();
3105 break;
3106 }
3107 }
3108 // Remember to strip ACC_SUPER bit
3109 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
3110}
3111
3112jint InstanceKlass::jvmti_class_status() const {
3113 jint result = 0;
3114
3115 if (is_linked()) {
3116 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3117 }
3118
3119 if (is_initialized()) {
3120 assert(is_linked(), "Class status is not consistent")do { if (!(is_linked())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3120, "assert(" "is_linked()" ") failed", "Class status is not consistent"
); ::breakpoint(); } } while (0)
;
3121 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3122 }
3123 if (is_in_error_state()) {
3124 result |= JVMTI_CLASS_STATUS_ERROR;
3125 }
3126 return result;
3127}
3128
3129Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPSJavaThread* __the_thread__) {
3130 bool implements_interface; // initialized by method_at_itable_or_null
3131 Method* m = method_at_itable_or_null(holder, index,
3132 implements_interface); // out parameter
3133 if (m != NULL__null) {
3134 assert(implements_interface, "sanity")do { if (!(implements_interface)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3134, "assert(" "implements_interface" ") failed", "sanity"
); ::breakpoint(); } } while (0)
;
3135 return m;
3136 } else if (implements_interface) {
3137 // Throw AbstractMethodError since corresponding itable slot is empty.
3138 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3138, vmSymbols::java_lang_AbstractMethodError(), __null); return
__null; }
;
3139 } else {
3140 // If the interface isn't implemented by the receiver class,
3141 // the VM should throw IncompatibleClassChangeError.
3142 ResourceMark rm(THREAD__the_thread__);
3143 stringStream ss;
3144 bool same_module = (module() == holder->module());
3145 ss.print("Receiver class %s does not implement "
3146 "the interface %s defining the method to be called "
3147 "(%s%s%s)",
3148 external_name(), holder->external_name(),
3149 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3150 (same_module) ? "" : "; ",
3151 (same_module) ? "" : holder->class_in_module_of_loader());
3152 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string()){ Exceptions::_throw_msg(__the_thread__, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3152, vmSymbols::java_lang_IncompatibleClassChangeError(), ss
.as_string()); return __null; }
;
3153 }
3154}
3155
3156Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3157 klassItable itable(this);
3158 for (int i = 0; i < itable.size_offset_table(); i++) {
3159 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3160 if (offset_entry->interface_klass() == holder) {
3161 implements_interface = true;
3162 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3163 Method* m = ime[index].method();
3164 return m;
3165 }
3166 }
3167 implements_interface = false;
3168 return NULL__null; // offset entry not found
3169}
3170
3171int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3172 assert(is_linked(), "required")do { if (!(is_linked())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3172, "assert(" "is_linked()" ") failed", "required"); ::breakpoint
(); } } while (0)
;
3173 assert(intf_method->method_holder()->is_interface(), "not an interface method")do { if (!(intf_method->method_holder()->is_interface()
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3173, "assert(" "intf_method->method_holder()->is_interface()"
") failed", "not an interface method"); ::breakpoint(); } } while
(0)
;
3174 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented")do { if (!(is_subtype_of(intf_method->method_holder()))) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3174, "assert(" "is_subtype_of(intf_method->method_holder())"
") failed", "interface not implemented"); ::breakpoint(); } }
while (0)
;
3175
3176 int vtable_index = Method::invalid_vtable_index;
3177 Symbol* name = intf_method->name();
3178 Symbol* signature = intf_method->signature();
3179
3180 // First check in default method array
3181 if (!intf_method->is_abstract() && default_methods() != NULL__null) {
3182 int index = find_method_index(default_methods(),
3183 name, signature,
3184 Klass::OverpassLookupMode::find,
3185 Klass::StaticLookupMode::find,
3186 Klass::PrivateLookupMode::find);
3187 if (index >= 0) {
3188 vtable_index = default_vtable_indices()->at(index);
3189 }
3190 }
3191 if (vtable_index == Method::invalid_vtable_index) {
3192 // get vtable_index for miranda methods
3193 klassVtable vt = vtable();
3194 vtable_index = vt.index_of_miranda(name, signature);
3195 }
3196 return vtable_index;
3197}
3198
3199#if INCLUDE_JVMTI1
3200// update default_methods for redefineclasses for methods that are
3201// not yet in the vtable due to concurrent subclass define and superinterface
3202// redefinition
3203// Note: those in the vtable, should have been updated via adjust_method_entries
3204void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3205 // search the default_methods for uses of either obsolete or EMCP methods
3206 if (default_methods() != NULL__null) {
3207 for (int index = 0; index < default_methods()->length(); index ++) {
3208 Method* old_method = default_methods()->at(index);
3209 if (old_method == NULL__null || !old_method->is_old()) {
3210 continue; // skip uninteresting entries
3211 }
3212 assert(!old_method->is_deleted(), "default methods may not be deleted")do { if (!(!old_method->is_deleted())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3212, "assert(" "!old_method->is_deleted()" ") failed", "default methods may not be deleted"
); ::breakpoint(); } } while (0)
;
3213 Method* new_method = old_method->get_new_method();
3214 default_methods()->at_put(index, new_method);
3215
3216 if (log_is_enabled(Info, redefine, class, update)(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::_update
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Info))
) {
3217 ResourceMark rm;
3218 if (!(*trace_name_printed)) {
3219 log_info(redefine, class, update)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_update), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Info))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_update), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Info>
3220 ("adjust: klassname=%s default methods from name=%s",
3221 external_name(), old_method->method_holder()->external_name());
3222 *trace_name_printed = true;
3223 }
3224 log_debug(redefine, class, update, vtables)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_update), (LogTag::_vtables), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Debug))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_update), (LogTag::_vtables
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Debug>
3225 ("default method update: %s(%s) ",
3226 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3227 }
3228 }
3229 }
3230}
3231#endif // INCLUDE_JVMTI
3232
3233// On-stack replacement stuff
3234void InstanceKlass::add_osr_nmethod(nmethod* n) {
3235 assert_lock_strong(CompiledMethod_lock);
3236#ifndef PRODUCT
3237 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3238 assert(prev == NULL || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),do { if (!(prev == __null || !prev->is_in_use() || StressRecompilation
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3239, "assert(" "prev == __null || !prev->is_in_use() || StressRecompilation"
") failed", "redundant OSR recompilation detected. memory leak in CodeCache!"
); ::breakpoint(); } } while (0)
3239 "redundant OSR recompilation detected. memory leak in CodeCache!")do { if (!(prev == __null || !prev->is_in_use() || StressRecompilation
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3239, "assert(" "prev == __null || !prev->is_in_use() || StressRecompilation"
") failed", "redundant OSR recompilation detected. memory leak in CodeCache!"
); ::breakpoint(); } } while (0)
;
3240#endif
3241 // only one compilation can be active
3242 assert(n->is_osr_method(), "wrong kind of nmethod")do { if (!(n->is_osr_method())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3242, "assert(" "n->is_osr_method()" ") failed", "wrong kind of nmethod"
); ::breakpoint(); } } while (0)
;
3243 n->set_osr_link(osr_nmethods_head());
3244 set_osr_nmethods_head(n);
3245 // Raise the highest osr level if necessary
3246 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3247
3248 // Get rid of the osr methods for the same bci that have lower levels.
3249 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3250 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3251 if (inv != NULL__null && inv->is_in_use()) {
3252 inv->make_not_entrant();
3253 }
3254 }
3255}
3256
3257// Remove osr nmethod from the list. Return true if found and removed.
3258bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3259 // This is a short non-blocking critical region, so the no safepoint check is ok.
3260 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL__null : CompiledMethod_lock
3261 , Mutex::_no_safepoint_check_flag);
3262 assert(n->is_osr_method(), "wrong kind of nmethod")do { if (!(n->is_osr_method())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3262, "assert(" "n->is_osr_method()" ") failed", "wrong kind of nmethod"
); ::breakpoint(); } } while (0)
;
3263 nmethod* last = NULL__null;
3264 nmethod* cur = osr_nmethods_head();
3265 int max_level = CompLevel_none; // Find the max comp level excluding n
3266 Method* m = n->method();
3267 // Search for match
3268 bool found = false;
3269 while(cur != NULL__null && cur != n) {
3270 if (m == cur->method()) {
3271 // Find max level before n
3272 max_level = MAX2(max_level, cur->comp_level());
3273 }
3274 last = cur;
3275 cur = cur->osr_link();
3276 }
3277 nmethod* next = NULL__null;
3278 if (cur == n) {
3279 found = true;
3280 next = cur->osr_link();
3281 if (last == NULL__null) {
3282 // Remove first element
3283 set_osr_nmethods_head(next);
3284 } else {
3285 last->set_osr_link(next);
3286 }
3287 }
3288 n->set_osr_link(NULL__null);
3289 cur = next;
3290 while (cur != NULL__null) {
3291 // Find max level after n
3292 if (m == cur->method()) {
3293 max_level = MAX2(max_level, cur->comp_level());
3294 }
3295 cur = cur->osr_link();
3296 }
3297 m->set_highest_osr_comp_level(max_level);
3298 return found;
3299}
3300
3301int InstanceKlass::mark_osr_nmethods(const Method* m) {
3302 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL__null : CompiledMethod_lock,
3303 Mutex::_no_safepoint_check_flag);
3304 nmethod* osr = osr_nmethods_head();
3305 int found = 0;
3306 while (osr != NULL__null) {
3307 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain")do { if (!(osr->is_osr_method())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3307, "assert(" "osr->is_osr_method()" ") failed", "wrong kind of nmethod found in chain"
); ::breakpoint(); } } while (0)
;
3308 if (osr->method() == m) {
3309 osr->mark_for_deoptimization();
3310 found++;
3311 }
3312 osr = osr->osr_link();
3313 }
3314 return found;
3315}
3316
3317nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3318 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL__null : CompiledMethod_lock,
3319 Mutex::_no_safepoint_check_flag);
3320 nmethod* osr = osr_nmethods_head();
3321 nmethod* best = NULL__null;
3322 while (osr != NULL__null) {
3323 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain")do { if (!(osr->is_osr_method())) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3323, "assert(" "osr->is_osr_method()" ") failed", "wrong kind of nmethod found in chain"
); ::breakpoint(); } } while (0)
;
3324 // There can be a time when a c1 osr method exists but we are waiting
3325 // for a c2 version. When c2 completes its osr nmethod we will trash
3326 // the c1 version and only be able to find the c2 version. However
3327 // while we overflow in the c1 code at back branches we don't want to
3328 // try and switch to the same code as we are already running
3329
3330 if (osr->method() == m &&
3331 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3332 if (match_level) {
3333 if (osr->comp_level() == comp_level) {
3334 // Found a match - return it.
3335 return osr;
3336 }
3337 } else {
3338 if (best == NULL__null || (osr->comp_level() > best->comp_level())) {
3339 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
3340 // Found the best possible - return it.
3341 return osr;
3342 }
3343 best = osr;
3344 }
3345 }
3346 }
3347 osr = osr->osr_link();
3348 }
3349
3350 assert(match_level == false || best == NULL, "shouldn't pick up anything if match_level is set")do { if (!(match_level == false || best == __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3350, "assert(" "match_level == false || best == __null" ") failed"
, "shouldn't pick up anything if match_level is set"); ::breakpoint
(); } } while (0)
;
3351 if (best != NULL__null && best->comp_level() >= comp_level) {
3352 return best;
3353 }
3354 return NULL__null;
3355}
3356
3357// -----------------------------------------------------------------------------------------------------
3358// Printing
3359
3360#define BULLET" - " " - "
3361
3362static const char* state_names[] = {
3363 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3364};
3365
3366static void print_vtable(intptr_t* start, int len, outputStream* st) {
3367 for (int i = 0; i < len; i++) {
3368 intptr_t e = start[i];
3369 st->print("%d : " INTPTR_FORMAT"0x%016" "l" "x", i, e);
3370 if (MetaspaceObj::is_valid((Metadata*)e)) {
3371 st->print(" ");
3372 ((Metadata*)e)->print_value_on(st);
3373 }
3374 st->cr();
3375 }
3376}
3377
3378static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3379 return print_vtable(reinterpret_cast<intptr_t*>(start), len, st);
3380}
3381
3382void InstanceKlass::print_on(outputStream* st) const {
3383 assert(is_klass(), "must be klass")do { if (!(is_klass())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3383, "assert(" "is_klass()" ") failed", "must be klass"); ::
breakpoint(); } } while (0)
;
3384 Klass::print_on(st);
3385
3386 st->print(BULLET" - ""instance size: %d", size_helper()); st->cr();
3387 st->print(BULLET" - ""klass size: %d", size()); st->cr();
3388 st->print(BULLET" - ""access: "); access_flags().print_on(st); st->cr();
3389 st->print(BULLET" - ""state: "); st->print_cr("%s", state_names[_init_state]);
3390 st->print(BULLET" - ""name: "); name()->print_value_on(st); st->cr();
3391 st->print(BULLET" - ""super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3392 st->print(BULLET" - ""sub: ");
3393 Klass* sub = subklass();
3394 int n;
3395 for (n = 0; sub != NULL__null; n++, sub = sub->next_sibling()) {
3396 if (n < MaxSubklassPrintSize) {
3397 sub->print_value_on(st);
3398 st->print(" ");
3399 }
3400 }
3401 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT"%" "l" "d" " more klasses...)", n - MaxSubklassPrintSize);
3402 st->cr();
3403
3404 if (is_interface()) {
3405 st->print_cr(BULLET" - ""nof implementors: %d", nof_implementors());
3406 if (nof_implementors() == 1) {
3407 st->print_cr(BULLET" - ""implementor: ");
3408 st->print(" ");
3409 implementor()->print_value_on(st);
3410 st->cr();
3411 }
3412 }
3413
3414 st->print(BULLET" - ""arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3415 st->print(BULLET" - ""methods: "); methods()->print_value_on(st); st->cr();
3416 if (Verbose || WizardMode) {
3417 Array<Method*>* method_array = methods();
3418 for (int i = 0; i < method_array->length(); i++) {
3419 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3420 }
3421 }
3422 st->print(BULLET" - ""method ordering: "); method_ordering()->print_value_on(st); st->cr();
3423 st->print(BULLET" - ""default_methods: "); default_methods()->print_value_on(st); st->cr();
3424 if (Verbose && default_methods() != NULL__null) {
3425 Array<Method*>* method_array = default_methods();
3426 for (int i = 0; i < method_array->length(); i++) {
3427 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3428 }
3429 }
3430 if (default_vtable_indices() != NULL__null) {
3431 st->print(BULLET" - ""default vtable indices: "); default_vtable_indices()->print_value_on(st); st->cr();
3432 }
3433 st->print(BULLET" - ""local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
3434 st->print(BULLET" - ""trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3435 st->print(BULLET" - ""constants: "); constants()->print_value_on(st); st->cr();
3436 if (class_loader_data() != NULL__null) {
3437 st->print(BULLET" - ""class loader data: ");
3438 class_loader_data()->print_value_on(st);
3439 st->cr();
3440 }
3441 if (source_file_name() != NULL__null) {
3442 st->print(BULLET" - ""source file: ");
3443 source_file_name()->print_value_on(st);
3444 st->cr();
3445 }
3446 if (source_debug_extension() != NULL__null) {
3447 st->print(BULLET" - ""source debug extension: ");
3448 st->print("%s", source_debug_extension());
3449 st->cr();
3450 }
3451 st->print(BULLET" - ""class annotations: "); class_annotations()->print_value_on(st); st->cr();
3452 st->print(BULLET" - ""class type annotations: "); class_type_annotations()->print_value_on(st); st->cr();
3453 st->print(BULLET" - ""field annotations: "); fields_annotations()->print_value_on(st); st->cr();
3454 st->print(BULLET" - ""field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr();
3455 {
3456 bool have_pv = false;
3457 // previous versions are linked together through the InstanceKlass
3458 for (InstanceKlass* pv_node = previous_versions();
3459 pv_node != NULL__null;
3460 pv_node = pv_node->previous_versions()) {
3461 if (!have_pv)
3462 st->print(BULLET" - ""previous version: ");
3463 have_pv = true;
3464 pv_node->constants()->print_value_on(st);
3465 }
3466 if (have_pv) st->cr();
3467 }
3468
3469 if (generic_signature() != NULL__null) {
3470 st->print(BULLET" - ""generic signature: ");
3471 generic_signature()->print_value_on(st);
3472 st->cr();
3473 }
3474 st->print(BULLET" - ""inner classes: "); inner_classes()->print_value_on(st); st->cr();
3475 st->print(BULLET" - ""nest members: "); nest_members()->print_value_on(st); st->cr();
3476 if (record_components() != NULL__null) {
3477 st->print(BULLET" - ""record components: "); record_components()->print_value_on(st); st->cr();
3478 }
3479 st->print(BULLET" - ""permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
3480 if (java_mirror() != NULL__null) {
3481 st->print(BULLET" - ""java mirror: ");
3482 java_mirror()->print_value_on(st);
3483 st->cr();
3484 } else {
3485 st->print_cr(BULLET" - ""java mirror: NULL");
3486 }
3487 st->print(BULLET" - ""vtable length %d (start addr: " INTPTR_FORMAT"0x%016" "l" "x" ")", vtable_length(), p2i(start_of_vtable())); st->cr();
3488 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
3489 st->print(BULLET" - ""itable length %d (start addr: " INTPTR_FORMAT"0x%016" "l" "x" ")", itable_length(), p2i(start_of_itable())); st->cr();
3490 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st);
3491 st->print_cr(BULLET" - ""---- static fields (%d words):", static_field_size());
3492 FieldPrinter print_static_field(st);
3493 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
3494 st->print_cr(BULLET" - ""---- non-static fields (%d words):", nonstatic_field_size());
3495 FieldPrinter print_nonstatic_field(st);
3496 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
3497 ik->print_nonstatic_fields(&print_nonstatic_field);
3498
3499 st->print(BULLET" - ""non-static oop maps: ");
3500 OopMapBlock* map = start_of_nonstatic_oop_maps();
3501 OopMapBlock* end_map = map + nonstatic_oop_map_count();
3502 while (map < end_map) {
3503 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
3504 map++;
3505 }
3506 st->cr();
3507}
3508
3509void InstanceKlass::print_value_on(outputStream* st) const {
3510 assert(is_klass(), "must be klass")do { if (!(is_klass())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3510, "assert(" "is_klass()" ") failed", "must be klass"); ::
breakpoint(); } } while (0)
;
3511 if (Verbose || WizardMode) access_flags().print_on(st);
3512 name()->print_value_on(st);
3513}
3514
3515void FieldPrinter::do_field(fieldDescriptor* fd) {
3516 _st->print(BULLET" - ");
3517 if (_obj == NULL__null) {
3518 fd->print_on(_st);
3519 _st->cr();
3520 } else {
3521 fd->print_on_for(_st, _obj);
3522 _st->cr();
3523 }
3524}
3525
3526
3527void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
3528 Klass::oop_print_on(obj, st);
3529
3530 if (this == vmClasses::String_klass()) {
3531 typeArrayOop value = java_lang_String::value(obj);
3532 juint length = java_lang_String::length(obj);
3533 if (value != NULL__null &&
3534 value->is_typeArray() &&
3535 length <= (juint) value->length()) {
3536 st->print(BULLET" - ""string: ");
3537 java_lang_String::print(obj, st);
3538 st->cr();
3539 }
3540 }
3541
3542 st->print_cr(BULLET" - ""---- fields (total size " SIZE_FORMAT"%" "l" "u" " words):", oop_size(obj));
3543 FieldPrinter print_field(st, obj);
3544 print_nonstatic_fields(&print_field);
3545
3546 if (this == vmClasses::Class_klass()) {
3547 st->print(BULLET" - ""signature: ");
3548 java_lang_Class::print_signature(obj, st);
3549 st->cr();
3550 Klass* real_klass = java_lang_Class::as_Klass(obj);
3551 if (real_klass != NULL__null && real_klass->is_instance_klass()) {
3552 st->print_cr(BULLET" - ""---- static fields (%d):", java_lang_Class::static_oop_field_count(obj));
3553 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
3554 }
3555 } else if (this == vmClasses::MethodType_klass()) {
3556 st->print(BULLET" - ""signature: ");
3557 java_lang_invoke_MethodType::print_signature(obj, st);
3558 st->cr();
3559 }
3560}
3561
3562#ifndef PRODUCT
3563
3564bool InstanceKlass::verify_itable_index(int i) {
3565 int method_count = klassItable::method_count_for_interface(this);
3566 assert(i >= 0 && i < method_count, "index out of bounds")do { if (!(i >= 0 && i < method_count)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3566, "assert(" "i >= 0 && i < method_count" ") failed"
, "index out of bounds"); ::breakpoint(); } } while (0)
;
3567 return true;
3568}
3569
3570#endif //PRODUCT
3571
3572void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
3573 st->print("a ");
3574 name()->print_value_on(st);
3575 obj->print_address_on(st);
3576 if (this == vmClasses::String_klass()
3577 && java_lang_String::value(obj) != NULL__null) {
3578 ResourceMark rm;
3579 int len = java_lang_String::length(obj);
3580 int plen = (len < 24 ? len : 12);
3581 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
3582 st->print(" = \"%s\"", str);
3583 if (len > plen)
3584 st->print("...[%d]", len);
3585 } else if (this == vmClasses::Class_klass()) {
3586 Klass* k = java_lang_Class::as_Klass(obj);
3587 st->print(" = ");
3588 if (k != NULL__null) {
3589 k->print_value_on(st);
3590 } else {
3591 const char* tname = type2name(java_lang_Class::primitive_type(obj));
3592 st->print("%s", tname ? tname : "type?");
3593 }
3594 } else if (this == vmClasses::MethodType_klass()) {
3595 st->print(" = ");
3596 java_lang_invoke_MethodType::print_signature(obj, st);
3597 } else if (java_lang_boxing_object::is_instance(obj)) {
3598 st->print(" = ");
3599 java_lang_boxing_object::print(obj, st);
3600 } else if (this == vmClasses::LambdaForm_klass()) {
3601 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
3602 if (vmentry != NULL__null) {
3603 st->print(" => ");
3604 vmentry->print_value_on(st);
3605 }
3606 } else if (this == vmClasses::MemberName_klass()) {
3607 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3608 if (vmtarget != NULL__null) {
3609 st->print(" = ");
3610 vmtarget->print_value_on(st);
3611 } else {
3612 oop clazz = java_lang_invoke_MemberName::clazz(obj);
3613 oop name = java_lang_invoke_MemberName::name(obj);
3614 if (clazz != NULL__null) {
3615 clazz->print_value_on(st);
3616 } else {
3617 st->print("NULL");
3618 }
3619 st->print(".");
3620 if (name != NULL__null) {
3621 name->print_value_on(st);
3622 } else {
3623 st->print("NULL");
3624 }
3625 }
3626 }
3627}
3628
3629const char* InstanceKlass::internal_name() const {
3630 return external_name();
3631}
3632
3633void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
3634 const ModuleEntry* module_entry,
3635 const ClassFileStream* cfs) const {
3636 if (ClassListWriter::is_enabled()) {
3637 ClassListWriter::write(this, cfs);
3638 }
3639
3640 if (!log_is_enabled(Info, class, load)(LogImpl<(LogTag::_class), (LogTag::_load), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Info))
) {
3641 return;
3642 }
3643
3644 ResourceMark rm;
3645 LogMessage(class, load)LogMessageImpl<(LogTag::_class), (LogTag::_load), (LogTag::
__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>
msg;
3646 stringStream info_stream;
3647
3648 // Name and class hierarchy info
3649 info_stream.print("%s", external_name());
3650
3651 // Source
3652 if (cfs != NULL__null) {
3653 if (cfs->source() != NULL__null) {
3654 const char* module_name = (module_entry->name() == NULL__null) ? UNNAMED_MODULE"unnamed module" : module_entry->name()->as_C_string();
3655 if (module_name != NULL__null) {
3656 // When the boot loader created the stream, it didn't know the module name
3657 // yet. Let's format it now.
3658 if (cfs->from_boot_loader_modules_image()) {
3659 info_stream.print(" source: jrt:/%s", module_name);
3660 } else {
3661 info_stream.print(" source: %s", cfs->source());
3662 }
3663 } else {
3664 info_stream.print(" source: %s", cfs->source());
3665 }
3666 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
3667 Thread* current = Thread::current();
3668 Klass* caller = current->is_Java_thread() ?
3669 JavaThread::cast(current)->security_get_caller_class(1):
3670 NULL__null;
3671 // caller can be NULL, for example, during a JVMTI VM_Init hook
3672 if (caller != NULL__null) {
3673 info_stream.print(" source: instance of %s", caller->external_name());
3674 } else {
3675 // source is unknown
3676 }
3677 } else {
3678 oop class_loader = loader_data->class_loader();
3679 info_stream.print(" source: %s", class_loader->klass()->external_name());
3680 }
3681 } else {
3682 assert(this->is_shared(), "must be")do { if (!(this->is_shared())) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3682, "assert(" "this->is_shared()" ") failed", "must be"
); ::breakpoint(); } } while (0)
;
3683 if (MetaspaceShared::is_shared_dynamic((void*)this)) {
3684 info_stream.print(" source: shared objects file (top)");
3685 } else {
3686 info_stream.print(" source: shared objects file");
3687 }
3688 }
3689
3690 msg.info("%s", info_stream.as_string());
3691
3692 if (log_is_enabled(Debug, class, load)(LogImpl<(LogTag::_class), (LogTag::_load), (LogTag::__NO_TAG
), (LogTag::__NO_TAG), (LogTag::__NO_TAG), (LogTag::__NO_TAG)
>::is_level(LogLevel::Debug))
) {
3693 stringStream debug_stream;
3694
3695 // Class hierarchy info
3696 debug_stream.print(" klass: " INTPTR_FORMAT"0x%016" "l" "x" " super: " INTPTR_FORMAT"0x%016" "l" "x",
3697 p2i(this), p2i(superklass()));
3698
3699 // Interfaces
3700 if (local_interfaces() != NULL__null && local_interfaces()->length() > 0) {
3701 debug_stream.print(" interfaces:");
3702 int length = local_interfaces()->length();
3703 for (int i = 0; i < length; i++) {
3704 debug_stream.print(" " INTPTR_FORMAT"0x%016" "l" "x",
3705 p2i(InstanceKlass::cast(local_interfaces()->at(i))));
3706 }
3707 }
3708
3709 // Class loader
3710 debug_stream.print(" loader: [");
3711 loader_data->print_value_on(&debug_stream);
3712 debug_stream.print("]");
3713
3714 // Classfile checksum
3715 if (cfs) {
3716 debug_stream.print(" bytes: %d checksum: %08x",
3717 cfs->length(),
3718 ClassLoader::crc32(0, (const char*)cfs->buffer(),
3719 cfs->length()));
3720 }
3721
3722 msg.debug("%s", debug_stream.as_string());
3723 }
3724}
3725
3726// Verification
3727
3728class VerifyFieldClosure: public BasicOopIterateClosure {
3729 protected:
3730 template <class T> void do_oop_work(T* p) {
3731 oop obj = RawAccess<>::oop_load(p);
3732 if (!oopDesc::is_oop_or_null(obj)) {
3733 tty->print_cr("Failed: " PTR_FORMAT"0x%016" "l" "x" " -> " PTR_FORMAT"0x%016" "l" "x", p2i(p), p2i(obj));
3734 Universe::print_on(tty);
3735 guarantee(false, "boom")do { if (!(false)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3735, "guarantee(" "false" ") failed", "boom"); ::breakpoint
(); } } while (0)
;
3736 }
3737 }
3738 public:
3739 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
3740 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
3741};
3742
3743void InstanceKlass::verify_on(outputStream* st) {
3744#ifndef PRODUCT
3745 // Avoid redundant verifies, this really should be in product.
3746 if (_verify_count == Universe::verify_count()) return;
3747 _verify_count = Universe::verify_count();
3748#endif
3749
3750 // Verify Klass
3751 Klass::verify_on(st);
3752
3753 // Verify that klass is present in ClassLoaderData
3754 guarantee(class_loader_data()->contains_klass(this),do { if (!(class_loader_data()->contains_klass(this))) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3755, "guarantee(" "class_loader_data()->contains_klass(this)"
") failed", "this class isn't found in class loader data"); ::
breakpoint(); } } while (0)
3755 "this class isn't found in class loader data")do { if (!(class_loader_data()->contains_klass(this))) { (
*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3755, "guarantee(" "class_loader_data()->contains_klass(this)"
") failed", "this class isn't found in class loader data"); ::
breakpoint(); } } while (0)
;
3756
3757 // Verify vtables
3758 if (is_linked()) {
3759 // $$$ This used to be done only for m/s collections. Doing it
3760 // always seemed a valid generalization. (DLD -- 6/00)
3761 vtable().verify(st);
3762 }
3763
3764 // Verify first subklass
3765 if (subklass() != NULL__null) {
3766 guarantee(subklass()->is_klass(), "should be klass")do { if (!(subklass()->is_klass())) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3766, "guarantee(" "subklass()->is_klass()" ") failed", "should be klass"
); ::breakpoint(); } } while (0)
;
3767 }
3768
3769 // Verify siblings
3770 Klass* super = this->super();
3771 Klass* sib = next_sibling();
3772 if (sib != NULL__null) {
3773 if (sib == this) {
3774 fatal("subclass points to itself " PTR_FORMAT, p2i(sib))do { (*g_assert_poison) = 'X';; report_fatal(INTERNAL_ERROR, "/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3774, "subclass points to itself " "0x%016" "l" "x", p2i(sib
)); ::breakpoint(); } while (0)
;
3775 }
3776
3777 guarantee(sib->is_klass(), "should be klass")do { if (!(sib->is_klass())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3777, "guarantee(" "sib->is_klass()" ") failed", "should be klass"
); ::breakpoint(); } } while (0)
;
3778 guarantee(sib->super() == super, "siblings should have same superklass")do { if (!(sib->super() == super)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3778, "guarantee(" "sib->super() == super" ") failed", "siblings should have same superklass"
); ::breakpoint(); } } while (0)
;
3779 }
3780
3781 // Verify local interfaces
3782 if (local_interfaces()) {
3783 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
3784 for (int j = 0; j < local_interfaces->length(); j++) {
3785 InstanceKlass* e = local_interfaces->at(j);
3786 guarantee(e->is_klass() && e->is_interface(), "invalid local interface")do { if (!(e->is_klass() && e->is_interface()))
{ (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3786, "guarantee(" "e->is_klass() && e->is_interface()"
") failed", "invalid local interface"); ::breakpoint(); } } while
(0)
;
3787 }
3788 }
3789
3790 // Verify transitive interfaces
3791 if (transitive_interfaces() != NULL__null) {
3792 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
3793 for (int j = 0; j < transitive_interfaces->length(); j++) {
3794 InstanceKlass* e = transitive_interfaces->at(j);
3795 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface")do { if (!(e->is_klass() && e->is_interface()))
{ (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3795, "guarantee(" "e->is_klass() && e->is_interface()"
") failed", "invalid transitive interface"); ::breakpoint();
} } while (0)
;
3796 }
3797 }
3798
3799 // Verify methods
3800 if (methods() != NULL__null) {
3801 Array<Method*>* methods = this->methods();
3802 for (int j = 0; j < methods->length(); j++) {
3803 guarantee(methods->at(j)->is_method(), "non-method in methods array")do { if (!(methods->at(j)->is_method())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3803, "guarantee(" "methods->at(j)->is_method()" ") failed"
, "non-method in methods array"); ::breakpoint(); } } while (
0)
;
3804 }
3805 for (int j = 0; j < methods->length() - 1; j++) {
3806 Method* m1 = methods->at(j);
3807 Method* m2 = methods->at(j + 1);
3808 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly")do { if (!(m1->name()->fast_compare(m2->name()) <=
0)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3808, "guarantee(" "m1->name()->fast_compare(m2->name()) <= 0"
") failed", "methods not sorted correctly"); ::breakpoint();
} } while (0)
;
3809 }
3810 }
3811
3812 // Verify method ordering
3813 if (method_ordering() != NULL__null) {
3814 Array<int>* method_ordering = this->method_ordering();
3815 int length = method_ordering->length();
3816 if (JvmtiExport::can_maintain_original_method_order() ||
3817 ((UseSharedSpaces || Arguments::is_dumping_archive()) && length != 0)) {
3818 guarantee(length == methods()->length(), "invalid method ordering length")do { if (!(length == methods()->length())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3818, "guarantee(" "length == methods()->length()" ") failed"
, "invalid method ordering length"); ::breakpoint(); } } while
(0)
;
3819 jlong sum = 0;
3820 for (int j = 0; j < length; j++) {
3821 int original_index = method_ordering->at(j);
3822 guarantee(original_index >= 0, "invalid method ordering index")do { if (!(original_index >= 0)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3822, "guarantee(" "original_index >= 0" ") failed", "invalid method ordering index"
); ::breakpoint(); } } while (0)
;
3823 guarantee(original_index < length, "invalid method ordering index")do { if (!(original_index < length)) { (*g_assert_poison) =
'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3823, "guarantee(" "original_index < length" ") failed",
"invalid method ordering index"); ::breakpoint(); } } while (
0)
;
3824 sum += original_index;
3825 }
3826 // Verify sum of indices 0,1,...,length-1
3827 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum")do { if (!(sum == ((jlong)length*(length-1))/2)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3827, "guarantee(" "sum == ((jlong)length*(length-1))/2" ") failed"
, "invalid method ordering sum"); ::breakpoint(); } } while (
0)
;
3828 } else {
3829 guarantee(length == 0, "invalid method ordering length")do { if (!(length == 0)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3829, "guarantee(" "length == 0" ") failed", "invalid method ordering length"
); ::breakpoint(); } } while (0)
;
3830 }
3831 }
3832
3833 // Verify default methods
3834 if (default_methods() != NULL__null) {
3835 Array<Method*>* methods = this->default_methods();
3836 for (int j = 0; j < methods->length(); j++) {
3837 guarantee(methods->at(j)->is_method(), "non-method in methods array")do { if (!(methods->at(j)->is_method())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3837, "guarantee(" "methods->at(j)->is_method()" ") failed"
, "non-method in methods array"); ::breakpoint(); } } while (
0)
;
3838 }
3839 for (int j = 0; j < methods->length() - 1; j++) {
3840 Method* m1 = methods->at(j);
3841 Method* m2 = methods->at(j + 1);
3842 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly")do { if (!(m1->name()->fast_compare(m2->name()) <=
0)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3842, "guarantee(" "m1->name()->fast_compare(m2->name()) <= 0"
") failed", "methods not sorted correctly"); ::breakpoint();
} } while (0)
;
3843 }
3844 }
3845
3846 // Verify JNI static field identifiers
3847 if (jni_ids() != NULL__null) {
3848 jni_ids()->verify(this);
3849 }
3850
3851 // Verify other fields
3852 if (constants() != NULL__null) {
3853 guarantee(constants()->is_constantPool(), "should be constant pool")do { if (!(constants()->is_constantPool())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3853, "guarantee(" "constants()->is_constantPool()" ") failed"
, "should be constant pool"); ::breakpoint(); } } while (0)
;
3854 }
3855}
3856
3857void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
3858 Klass::oop_verify_on(obj, st);
3859 VerifyFieldClosure blk;
3860 obj->oop_iterate(&blk);
3861}
3862
3863
3864// JNIid class for jfieldIDs only
3865// Note to reviewers:
3866// These JNI functions are just moved over to column 1 and not changed
3867// in the compressed oops workspace.
3868JNIid::JNIid(Klass* holder, int offset, JNIid* next) {
3869 _holder = holder;
3870 _offset = offset;
3871 _next = next;
3872 debug_only(_is_static_field_id = false;)_is_static_field_id = false;
3873}
3874
3875
3876JNIid* JNIid::find(int offset) {
3877 JNIid* current = this;
3878 while (current != NULL__null) {
3879 if (current->offset() == offset) return current;
3880 current = current->next();
3881 }
3882 return NULL__null;
3883}
3884
3885void JNIid::deallocate(JNIid* current) {
3886 while (current != NULL__null) {
3887 JNIid* next = current->next();
3888 delete current;
3889 current = next;
3890 }
3891}
3892
3893
3894void JNIid::verify(Klass* holder) {
3895 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
3896 int end_field_offset;
3897 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize);
3898
3899 JNIid* current = this;
3900 while (current != NULL__null) {
3901 guarantee(current->holder() == holder, "Invalid klass in JNIid")do { if (!(current->holder() == holder)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3901, "guarantee(" "current->holder() == holder" ") failed"
, "Invalid klass in JNIid"); ::breakpoint(); } } while (0)
;
3902#ifdef ASSERT1
3903 int o = current->offset();
3904 if (current->is_static_field_id()) {
3905 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid")do { if (!(o >= first_field_offset && o < end_field_offset
)) { (*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3905, "guarantee(" "o >= first_field_offset && o < end_field_offset"
") failed", "Invalid static field offset in JNIid"); ::breakpoint
(); } } while (0)
;
3906 }
3907#endif
3908 current = current->next();
3909 }
3910}
3911
3912void InstanceKlass::set_init_state(ClassState state) {
3913#ifdef ASSERT1
3914 bool good_state = is_shared() ? (_init_state <= state)
3915 : (_init_state < state);
3916 assert(good_state || state == allocated, "illegal state transition")do { if (!(good_state || state == allocated)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3916, "assert(" "good_state || state == allocated" ") failed"
, "illegal state transition"); ::breakpoint(); } } while (0)
;
3917#endif
3918 assert(_init_thread == NULL, "should be cleared before state change")do { if (!(_init_thread == __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3918, "assert(" "_init_thread == __null" ") failed", "should be cleared before state change"
); ::breakpoint(); } } while (0)
;
3919 _init_state = (u1)state;
3920}
3921
3922#if INCLUDE_JVMTI1
3923
3924// RedefineClasses() support for previous versions
3925
3926// Globally, there is at least one previous version of a class to walk
3927// during class unloading, which is saved because old methods in the class
3928// are still running. Otherwise the previous version list is cleaned up.
3929bool InstanceKlass::_has_previous_versions = false;
3930
3931// Returns true if there are previous versions of a class for class
3932// unloading only. Also resets the flag to false. purge_previous_version
3933// will set the flag to true if there are any left, i.e., if there's any
3934// work to do for next time. This is to avoid the expensive code cache
3935// walk in CLDG::clean_deallocate_lists().
3936bool InstanceKlass::has_previous_versions_and_reset() {
3937 bool ret = _has_previous_versions;
3938 log_trace(redefine, class, iklass, purge)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_purge), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_purge
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("Class unloading: has_previous_versions = %s",
3939 ret ? "true" : "false");
3940 _has_previous_versions = false;
3941 return ret;
3942}
3943
3944// Purge previous versions before adding new previous versions of the class and
3945// during class unloading.
3946void InstanceKlass::purge_previous_version_list() {
3947 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint")do { if (!(SafepointSynchronize::is_at_safepoint())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3947, "assert(" "SafepointSynchronize::is_at_safepoint()" ") failed"
, "only called at safepoint"); ::breakpoint(); } } while (0)
;
3948 assert(has_been_redefined(), "Should only be called for main class")do { if (!(has_been_redefined())) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3948, "assert(" "has_been_redefined()" ") failed", "Should only be called for main class"
); ::breakpoint(); } } while (0)
;
3949
3950 // Quick exit.
3951 if (previous_versions() == NULL__null) {
3952 return;
3953 }
3954
3955 // This klass has previous versions so see what we can cleanup
3956 // while it is safe to do so.
3957
3958 int deleted_count = 0; // leave debugging breadcrumbs
3959 int live_count = 0;
3960 ClassLoaderData* loader_data = class_loader_data();
3961 assert(loader_data != NULL, "should never be null")do { if (!(loader_data != __null)) { (*g_assert_poison) = 'X'
;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3961, "assert(" "loader_data != __null" ") failed", "should never be null"
); ::breakpoint(); } } while (0)
;
3962
3963 ResourceMark rm;
3964 log_trace(redefine, class, iklass, purge)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_purge), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_purge
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("%s: previous versions", external_name());
3965
3966 // previous versions are linked together through the InstanceKlass
3967 InstanceKlass* pv_node = previous_versions();
3968 InstanceKlass* last = this;
3969 int version = 0;
3970
3971 // check the previous versions list
3972 for (; pv_node != NULL__null; ) {
3973
3974 ConstantPool* pvcp = pv_node->constants();
3975 assert(pvcp != NULL, "cp ref was unexpectedly cleared")do { if (!(pvcp != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3975, "assert(" "pvcp != __null" ") failed", "cp ref was unexpectedly cleared"
); ::breakpoint(); } } while (0)
;
3976
3977 if (!pvcp->on_stack()) {
3978 // If the constant pool isn't on stack, none of the methods
3979 // are executing. Unlink this previous_version.
3980 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
3981 // so will be deallocated during the next phase of class unloading.
3982 log_trace(redefine, class, iklass, purge)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_purge), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_purge
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
3983 ("previous version " INTPTR_FORMAT"0x%016" "l" "x" " is dead.", p2i(pv_node));
3984 // Unlink from previous version list.
3985 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data")do { if (!(pv_node->class_loader_data() == loader_data)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3985, "assert(" "pv_node->class_loader_data() == loader_data"
") failed", "wrong loader_data"); ::breakpoint(); } } while (
0)
;
3986 InstanceKlass* next = pv_node->previous_versions();
3987 pv_node->link_previous_versions(NULL__null); // point next to NULL
3988 last->link_previous_versions(next);
3989 // Delete this node directly. Nothing is referring to it and we don't
3990 // want it to increase the counter for metadata to delete in CLDG.
3991 MetadataFactory::free_metadata(loader_data, pv_node);
3992 pv_node = next;
3993 deleted_count++;
3994 version++;
3995 continue;
3996 } else {
3997 log_trace(redefine, class, iklass, purge)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_purge), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_purge
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("previous version " INTPTR_FORMAT"0x%016" "l" "x" " is alive", p2i(pv_node));
3998 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder")do { if (!(pvcp->pool_holder() != __null)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3998, "assert(" "pvcp->pool_holder() != __null" ") failed"
, "Constant pool with no holder"); ::breakpoint(); } } while (
0)
;
3999 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack")do { if (!(!loader_data->is_unloading())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 3999, "guarantee(" "!loader_data->is_unloading()" ") failed"
, "unloaded classes can't be on the stack"); ::breakpoint(); }
} while (0)
;
4000 live_count++;
4001 // found a previous version for next time we do class unloading
4002 _has_previous_versions = true;
4003 }
4004
4005 // next previous version
4006 last = pv_node;
4007 pv_node = pv_node->previous_versions();
4008 version++;
4009 }
4010 log_trace(redefine, class, iklass, purge)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_purge), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_purge
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
4011 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4012}
4013
4014void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4015 int emcp_method_count) {
4016 int obsolete_method_count = old_methods->length() - emcp_method_count;
4017
4018 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4019 _previous_versions != NULL__null) {
4020 // We have a mix of obsolete and EMCP methods so we have to
4021 // clear out any matching EMCP method entries the hard way.
4022 int local_count = 0;
4023 for (int i = 0; i < old_methods->length(); i++) {
4024 Method* old_method = old_methods->at(i);
4025 if (old_method->is_obsolete()) {
4026 // only obsolete methods are interesting
4027 Symbol* m_name = old_method->name();
4028 Symbol* m_signature = old_method->signature();
4029
4030 // previous versions are linked together through the InstanceKlass
4031 int j = 0;
4032 for (InstanceKlass* prev_version = _previous_versions;
4033 prev_version != NULL__null;
4034 prev_version = prev_version->previous_versions(), j++) {
4035
4036 Array<Method*>* method_refs = prev_version->methods();
4037 for (int k = 0; k < method_refs->length(); k++) {
4038 Method* method = method_refs->at(k);
4039
4040 if (!method->is_obsolete() &&
4041 method->name() == m_name &&
4042 method->signature() == m_signature) {
4043 // The current RedefineClasses() call has made all EMCP
4044 // versions of this method obsolete so mark it as obsolete
4045 log_trace(redefine, class, iklass, add)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_add), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_add
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
4046 ("%s(%s): flush obsolete method @%d in version @%d",
4047 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4048
4049 method->set_is_obsolete();
4050 break;
4051 }
4052 }
4053
4054 // The previous loop may not find a matching EMCP method, but
4055 // that doesn't mean that we can optimize and not go any
4056 // further back in the PreviousVersion generations. The EMCP
4057 // method for this generation could have already been made obsolete,
4058 // but there still may be an older EMCP method that has not
4059 // been made obsolete.
4060 }
4061
4062 if (++local_count >= obsolete_method_count) {
4063 // no more obsolete methods so bail out now
4064 break;
4065 }
4066 }
4067 }
4068 }
4069}
4070
4071// Save the scratch_class as the previous version if any of the methods are running.
4072// The previous_versions are used to set breakpoints in EMCP methods and they are
4073// also used to clean MethodData links to redefined methods that are no longer running.
4074void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4075 int emcp_method_count) {
4076 assert(Thread::current()->is_VM_thread(),do { if (!(Thread::current()->is_VM_thread())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 4077, "assert(" "Thread::current()->is_VM_thread()" ") failed"
, "only VMThread can add previous versions"); ::breakpoint();
} } while (0)
4077 "only VMThread can add previous versions")do { if (!(Thread::current()->is_VM_thread())) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 4077, "assert(" "Thread::current()->is_VM_thread()" ") failed"
, "only VMThread can add previous versions"); ::breakpoint();
} } while (0)
;
4078
4079 ResourceMark rm;
4080 log_trace(redefine, class, iklass, add)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_add), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_add
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
4081 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4082
4083 // Clean out old previous versions for this class
4084 purge_previous_version_list();
4085
4086 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4087 // a previous redefinition may be made obsolete by this redefinition.
4088 Array<Method*>* old_methods = scratch_class->methods();
4089 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4090
4091 // If the constant pool for this previous version of the class
4092 // is not marked as being on the stack, then none of the methods
4093 // in this previous version of the class are on the stack so
4094 // we don't need to add this as a previous version.
4095 ConstantPool* cp_ref = scratch_class->constants();
4096 if (!cp_ref->on_stack()) {
4097 log_trace(redefine, class, iklass, add)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_add), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_add
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("scratch class not added; no methods are running");
4098 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4099 return;
4100 }
4101
4102 // Add previous version if any methods are still running.
4103 // Set has_previous_version flag for processing during class unloading.
4104 _has_previous_versions = true;
4105 log_trace(redefine, class, iklass, add)(!(LogImpl<(LogTag::_redefine), (LogTag::_class), (LogTag::
_iklass), (LogTag::_add), (LogTag::__NO_TAG), (LogTag::__NO_TAG
)>::is_level(LogLevel::Trace))) ? (void)0 : LogImpl<(LogTag
::_redefine), (LogTag::_class), (LogTag::_iklass), (LogTag::_add
), (LogTag::__NO_TAG), (LogTag::__NO_TAG)>::write<LogLevel
::Trace>
("scratch class added; one of its methods is on_stack.");
4106 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version")do { if (!(scratch_class->previous_versions() == __null)) {
(*g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 4106, "assert(" "scratch_class->previous_versions() == __null"
") failed", "shouldn't have a previous version"); ::breakpoint
(); } } while (0)
;
4107 scratch_class->link_previous_versions(previous_versions());
4108 link_previous_versions(scratch_class);
4109} // end add_previous_version()
4110
4111#endif // INCLUDE_JVMTI
4112
4113Method* InstanceKlass::method_with_idnum(int idnum) {
4114 Method* m = NULL__null;
4115 if (idnum < methods()->length()) {
4116 m = methods()->at(idnum);
4117 }
4118 if (m == NULL__null || m->method_idnum() != idnum) {
4119 for (int index = 0; index < methods()->length(); ++index) {
4120 m = methods()->at(index);
4121 if (m->method_idnum() == idnum) {
4122 return m;
4123 }
4124 }
4125 // None found, return null for the caller to handle.
4126 return NULL__null;
4127 }
4128 return m;
4129}
4130
4131
4132Method* InstanceKlass::method_with_orig_idnum(int idnum) {
4133 if (idnum >= methods()->length()) {
4134 return NULL__null;
4135 }
4136 Method* m = methods()->at(idnum);
4137 if (m != NULL__null && m->orig_method_idnum() == idnum) {
4138 return m;
4139 }
4140 // Obsolete method idnum does not match the original idnum
4141 for (int index = 0; index < methods()->length(); ++index) {
4142 m = methods()->at(index);
4143 if (m->orig_method_idnum() == idnum) {
4144 return m;
4145 }
4146 }
4147 // None found, return null for the caller to handle.
4148 return NULL__null;
4149}
4150
4151
4152Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
4153 InstanceKlass* holder = get_klass_version(version);
4154 if (holder == NULL__null) {
4155 return NULL__null; // The version of klass is gone, no method is found
4156 }
4157 Method* method = holder->method_with_orig_idnum(idnum);
4158 return method;
4159}
4160
4161#if INCLUDE_JVMTI1
4162JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4163 return _cached_class_file;
4164}
4165
4166jint InstanceKlass::get_cached_class_file_len() {
4167 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4168}
4169
4170unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4171 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4172}
4173#endif
4174
4175// Make a step iterating over the class hierarchy under the root class.
4176// Skips subclasses if requested.
4177void ClassHierarchyIterator::next() {
4178 assert(_current != NULL, "required")do { if (!(_current != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/instanceKlass.cpp"
, 4178, "assert(" "_current != __null" ") failed", "required"
); ::breakpoint(); } } while (0)
;
4179 if (_visit_subclasses && _current->subklass() != NULL__null) {
4180 _current = _current->subklass();
4181 return; // visit next subclass
4182 }
4183 _visit_subclasses = true; // reset
4184 while (_current->next_sibling() == NULL__null && _current != _root) {
4185 _current = _current->superklass(); // backtrack; no more sibling subclasses left
4186 }
4187 if (_current == _root) {
4188 // Iteration is over (back at root after backtracking). Invalidate the iterator.
4189 _current = NULL__null;
4190 return;
4191 }
4192 _current = _current->next_sibling();
4193 return; // visit next sibling subclass
4194}

/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp

1/*
2 * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_OOPS_METHOD_HPP
26#define SHARE_OOPS_METHOD_HPP
27
28#include "code/compressedStream.hpp"
29#include "compiler/compilerDefinitions.hpp"
30#include "interpreter/invocationCounter.hpp"
31#include "oops/annotations.hpp"
32#include "oops/constantPool.hpp"
33#include "oops/methodCounters.hpp"
34#include "oops/instanceKlass.hpp"
35#include "oops/oop.hpp"
36#include "oops/typeArrayOop.hpp"
37#include "utilities/accessFlags.hpp"
38#include "utilities/align.hpp"
39#include "utilities/growableArray.hpp"
40#include "utilities/macros.hpp"
41#include "utilities/vmEnums.hpp"
42#if INCLUDE_JFR1
43#include "jfr/support/jfrTraceIdExtension.hpp"
44#endif
45
46
47// A Method represents a Java method.
48//
49// Note that most applications load thousands of methods, so keeping the size of this
50// class small has a big impact on footprint.
51//
52// Note that native_function and signature_handler have to be at fixed offsets
53// (required by the interpreter)
54//
55// Method embedded field layout (after declared fields):
56// [EMBEDDED native_function (present only if native) ]
57// [EMBEDDED signature_handler (present only if native) ]
58
59class CheckedExceptionElement;
60class LocalVariableTableElement;
61class AdapterHandlerEntry;
62class MethodData;
63class MethodCounters;
64class ConstMethod;
65class InlineTableSizes;
66class CompiledMethod;
67class InterpreterOopMap;
68
69class Method : public Metadata {
70 friend class VMStructs;
71 friend class JVMCIVMStructs;
72 private:
73 // If you add a new field that points to any metaspace object, you
74 // must add this field to Method::metaspace_pointers_do().
75 ConstMethod* _constMethod; // Method read-only data.
76 MethodData* _method_data;
77 MethodCounters* _method_counters;
78 AdapterHandlerEntry* _adapter;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 // note: can have vtables with >2**16 elements (because of inheritance)
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 // Flags
85 enum Flags {
86 _caller_sensitive = 1 << 0,
87 _force_inline = 1 << 1,
88 _dont_inline = 1 << 2,
89 _hidden = 1 << 3,
90 _has_injected_profile = 1 << 4,
91 _intrinsic_candidate = 1 << 5,
92 _reserved_stack_access = 1 << 6,
93 _scoped = 1 << 7
94 };
95 mutable u2 _flags;
96
97 JFR_ONLY(DEFINE_TRACE_FLAG;)mutable JfrTraceFlag _trace_flags;
98
99#ifndef PRODUCT
100 int64_t _compiled_invocation_count;
101#endif
102 // Entry point for calling both from and to the interpreter.
103 address _i2i_entry; // All-args-on-stack calling convention
104 // Entry point for calling from compiled code, to compiled code if it exists
105 // or else the interpreter.
106 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
107 // The entry point for calling both from and to compiled code is
108 // "_code->entry_point()". Because of tiered compilation and de-opt, this
109 // field can come and go. It can transition from NULL to not-null at any
110 // time (whenever a compile completes). It can transition from not-null to
111 // NULL only at safepoints (because of a de-opt).
112 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
113 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
114
115 // Constructor
116 Method(ConstMethod* xconst, AccessFlags access_flags);
117 public:
118
119 static Method* allocate(ClassLoaderData* loader_data,
120 int byte_code_size,
121 AccessFlags access_flags,
122 InlineTableSizes* sizes,
123 ConstMethod::MethodType method_type,
124 TRAPSJavaThread* __the_thread__);
125
126 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
127 Method(){}
128
129 virtual bool is_method() const { return true; }
130
131 void restore_unshareable_info(TRAPSJavaThread* __the_thread__);
132
133 // accessors for instance variables
134
135 ConstMethod* constMethod() const { return _constMethod; }
136 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
137
138
139 static address make_adapters(const methodHandle& mh, TRAPSJavaThread* __the_thread__);
140 address from_compiled_entry() const;
141 address from_compiled_entry_no_trampoline() const;
142 address from_interpreted_entry() const;
143
144 // access flag
145 AccessFlags access_flags() const { return _access_flags; }
146 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
147
148 // name
149 Symbol* name() const { return constants()->symbol_at(name_index()); }
150 int name_index() const { return constMethod()->name_index(); }
151 void set_name_index(int index) { constMethod()->set_name_index(index); }
152
153 // signature
154 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
155 int signature_index() const { return constMethod()->signature_index(); }
156 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
157
158 // generics support
159 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL__null); }
160 int generic_signature_index() const { return constMethod()->generic_signature_index(); }
161 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
162
163 // annotations support
164 AnnotationArray* annotations() const {
165 return constMethod()->method_annotations();
166 }
167 AnnotationArray* parameter_annotations() const {
168 return constMethod()->parameter_annotations();
169 }
170 AnnotationArray* annotation_default() const {
171 return constMethod()->default_annotations();
172 }
173 AnnotationArray* type_annotations() const {
174 return constMethod()->type_annotations();
175 }
176
177 // Helper routine: get klass name + "." + method name + signature as
178 // C string, for the purpose of providing more useful
179 // fatal error handling. The string is allocated in resource
180 // area if a buffer is not provided by the caller.
181 char* name_and_sig_as_C_string() const;
182 char* name_and_sig_as_C_string(char* buf, int size) const;
183
184 // Static routine in the situations we don't have a Method*
185 static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature);
186 static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size);
187
188 // Get return type + klass name + "." + method name + ( parameters types )
189 // as a C string or print it to an outputStream.
190 // This is to be used to assemble strings passed to Java, so that
191 // the text more resembles Java code. Used in exception messages.
192 // Memory is allocated in the resource area; the caller needs
193 // a ResourceMark.
194 const char* external_name() const;
195 void print_external_name(outputStream *os) const;
196
197 static const char* external_name( Klass* klass, Symbol* method_name, Symbol* signature);
198 static void print_external_name(outputStream *os, Klass* klass, Symbol* method_name, Symbol* signature);
199
200 Bytecodes::Code java_code_at(int bci) const {
201 return Bytecodes::java_code_at(this, bcp_from(bci));
202 }
203 Bytecodes::Code code_at(int bci) const {
204 return Bytecodes::code_at(this, bcp_from(bci));
205 }
206
207 // JVMTI breakpoints
208#if !INCLUDE_JVMTI1
209 Bytecodes::Code orig_bytecode_at(int bci) const {
210 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 210); ::breakpoint(); } while (0)
;
211 return Bytecodes::_shouldnotreachhere;
212 }
213 void set_orig_bytecode_at(int bci, Bytecodes::Code code) {
214 ShouldNotReachHere()do { (*g_assert_poison) = 'X';; report_should_not_reach_here(
"/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 214); ::breakpoint(); } while (0)
;
215 };
216 u2 number_of_breakpoints() const {return 0;}
217#else // !INCLUDE_JVMTI
218 Bytecodes::Code orig_bytecode_at(int bci) const;
219 void set_orig_bytecode_at(int bci, Bytecodes::Code code);
220 void set_breakpoint(int bci);
221 void clear_breakpoint(int bci);
222 void clear_all_breakpoints();
223 // Tracking number of breakpoints, for fullspeed debugging.
224 // Only mutated by VM thread.
225 u2 number_of_breakpoints() const {
226 MethodCounters* mcs = method_counters();
227 if (mcs == NULL__null) {
228 return 0;
229 } else {
230 return mcs->number_of_breakpoints();
231 }
232 }
233 void incr_number_of_breakpoints(Thread* current) {
234 MethodCounters* mcs = get_method_counters(current);
235 if (mcs != NULL__null) {
236 mcs->incr_number_of_breakpoints();
237 }
238 }
239 void decr_number_of_breakpoints(Thread* current) {
240 MethodCounters* mcs = get_method_counters(current);
241 if (mcs != NULL__null) {
242 mcs->decr_number_of_breakpoints();
243 }
244 }
245 // Initialization only
246 void clear_number_of_breakpoints() {
247 MethodCounters* mcs = method_counters();
248 if (mcs != NULL__null) {
249 mcs->clear_number_of_breakpoints();
250 }
251 }
252#endif // !INCLUDE_JVMTI
253
254 // index into InstanceKlass methods() array
255 // note: also used by jfr
256 u2 method_idnum() const { return constMethod()->method_idnum(); }
257 void set_method_idnum(u2 idnum) { constMethod()->set_method_idnum(idnum); }
258
259 u2 orig_method_idnum() const { return constMethod()->orig_method_idnum(); }
260 void set_orig_method_idnum(u2 idnum) { constMethod()->set_orig_method_idnum(idnum); }
261
262 // code size
263 int code_size() const { return constMethod()->code_size(); }
264
265 // method size in words
266 int method_size() const { return sizeof(Method)/wordSize + ( is_native() ? 2 : 0 ); }
267
268 // constant pool for Klass* holding this method
269 ConstantPool* constants() const { return constMethod()->constants(); }
270 void set_constants(ConstantPool* c) { constMethod()->set_constants(c); }
271
272 // max stack
273 // return original max stack size for method verification
274 int verifier_max_stack() const { return constMethod()->max_stack(); }
275 int max_stack() const { return constMethod()->max_stack() + extra_stack_entries(); }
276 void set_max_stack(int size) { constMethod()->set_max_stack(size); }
277
278 // max locals
279 int max_locals() const { return constMethod()->max_locals(); }
280 void set_max_locals(int size) { constMethod()->set_max_locals(size); }
281
282 int highest_comp_level() const;
283 void set_highest_comp_level(int level);
284 int highest_osr_comp_level() const;
285 void set_highest_osr_comp_level(int level);
286
287#if COMPILER2_OR_JVMCI1
288 // Count of times method was exited via exception while interpreting
289 void interpreter_throwout_increment(Thread* current) {
290 MethodCounters* mcs = get_method_counters(current);
291 if (mcs != NULL__null) {
292 mcs->interpreter_throwout_increment();
293 }
294 }
295#endif
296
297 int interpreter_throwout_count() const {
298 MethodCounters* mcs = method_counters();
299 if (mcs == NULL__null) {
300 return 0;
301 } else {
302 return mcs->interpreter_throwout_count();
303 }
304 }
305
306 // Derive stuff from the signature at load time.
307 void compute_from_signature(Symbol* sig);
308
309 // size of parameters (receiver if any + arguments)
310 int size_of_parameters() const { return constMethod()->size_of_parameters(); }
311 void set_size_of_parameters(int size) { constMethod()->set_size_of_parameters(size); }
312
313 bool has_stackmap_table() const {
314 return constMethod()->has_stackmap_table();
315 }
316
317 Array<u1>* stackmap_data() const {
318 return constMethod()->stackmap_data();
319 }
320
321 void set_stackmap_data(Array<u1>* sd) {
322 constMethod()->set_stackmap_data(sd);
323 }
324
325 // exception handler table
326 bool has_exception_handler() const
327 { return constMethod()->has_exception_handler(); }
328 int exception_table_length() const
329 { return constMethod()->exception_table_length(); }
330 ExceptionTableElement* exception_table_start() const
331 { return constMethod()->exception_table_start(); }
332
333 // Finds the first entry point bci of an exception handler for an
334 // exception of klass ex_klass thrown at throw_bci. A value of NULL
335 // for ex_klass indicates that the exception klass is not known; in
336 // this case it matches any constraint class. Returns -1 if the
337 // exception cannot be handled in this method. The handler
338 // constraint classes are loaded if necessary. Note that this may
339 // throw an exception if loading of the constraint classes causes
340 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
341 // If an exception is thrown, returns the bci of the
342 // exception handler which caused the exception to be thrown, which
343 // is needed for proper retries. See, for example,
344 // InterpreterRuntime::exception_handler_for_exception.
345 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPSJavaThread* __the_thread__);
346
347 static bool register_native(Klass* k,
348 Symbol* name,
349 Symbol* signature,
350 address entry,
351 TRAPSJavaThread* __the_thread__);
352
353 // method data access
354 MethodData* method_data() const {
355 return _method_data;
356 }
357
358 void set_method_data(MethodData* data);
359
360 MethodCounters* method_counters() const {
361 return _method_counters;
362 }
363
364 void clear_method_counters() {
365 _method_counters = NULL__null;
366 }
367
368 bool init_method_counters(MethodCounters* counters);
369
370 int prev_event_count() const {
371 MethodCounters* mcs = method_counters();
372 return mcs == NULL__null ? 0 : mcs->prev_event_count();
373 }
374 void set_prev_event_count(int count) {
375 MethodCounters* mcs = method_counters();
376 if (mcs != NULL__null) {
377 mcs->set_prev_event_count(count);
378 }
379 }
380 jlong prev_time() const {
381 MethodCounters* mcs = method_counters();
382 return mcs == NULL__null ? 0 : mcs->prev_time();
383 }
384 void set_prev_time(jlong time) {
385 MethodCounters* mcs = method_counters();
386 if (mcs != NULL__null) {
387 mcs->set_prev_time(time);
388 }
389 }
390 float rate() const {
391 MethodCounters* mcs = method_counters();
392 return mcs == NULL__null ? 0 : mcs->rate();
393 }
394 void set_rate(float rate) {
395 MethodCounters* mcs = method_counters();
396 if (mcs != NULL__null) {
397 mcs->set_rate(rate);
398 }
399 }
400
401 int nmethod_age() const {
402 if (method_counters() == NULL__null) {
403 return INT_MAX2147483647;
404 } else {
405 return method_counters()->nmethod_age();
406 }
407 }
408
409 int invocation_count() const;
410 int backedge_count() const;
411
412 bool was_executed_more_than(int n);
413 bool was_never_executed() { return !was_executed_more_than(0); }
414
415 static void build_interpreter_method_data(const methodHandle& method, TRAPSJavaThread* __the_thread__);
416
417 static MethodCounters* build_method_counters(Thread* current, Method* m);
418
419 int interpreter_invocation_count() { return invocation_count(); }
420
421#ifndef PRODUCT
422 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
423 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
424#else
425 // for PrintMethodData in a product build
426 int64_t compiled_invocation_count() const { return 0; }
427#endif // not PRODUCT
428
429 // Clear (non-shared space) pointers which could not be relevant
430 // if this (shared) method were mapped into another JVM.
431 void remove_unshareable_info();
432
433 // nmethod/verified compiler entry
434 address verified_code_entry();
435 bool check_code() const; // Not inline to avoid circular ref
436 CompiledMethod* volatile code() const;
437
438 // Locks CompiledMethod_lock if not held.
439 void unlink_code(CompiledMethod *compare);
440 // Locks CompiledMethod_lock if not held.
441 void unlink_code();
442
443private:
444 // Either called with CompiledMethod_lock held or from constructor.
445 void clear_code();
446
447public:
448 static void set_code(const methodHandle& mh, CompiledMethod* code);
449 void set_adapter_entry(AdapterHandlerEntry* adapter) {
450 _adapter = adapter;
451 }
452 void set_from_compiled_entry(address entry) {
453 _from_compiled_entry = entry;
454 }
455
456 address get_i2c_entry();
457 address get_c2i_entry();
458 address get_c2i_unverified_entry();
459 address get_c2i_no_clinit_check_entry();
460 AdapterHandlerEntry* adapter() const {
461 return _adapter;
462 }
463 // setup entry points
464 void link_method(const methodHandle& method, TRAPSJavaThread* __the_thread__);
465 // clear entry points. Used by sharing code during dump time
466 void unlink_method() NOT_CDS_RETURN;
467
468 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
469 virtual MetaspaceObj::Type type() const { return MethodType; }
470
471 // vtable index
472 enum VtableIndexFlag {
473 // Valid vtable indexes are non-negative (>= 0).
474 // These few negative values are used as sentinels.
475 itable_index_max = -10, // first itable index, growing downward
476 pending_itable_index = -9, // itable index will be assigned
477 invalid_vtable_index = -4, // distinct from any valid vtable index
478 garbage_vtable_index = -3, // not yet linked; no vtable layout yet
479 nonvirtual_vtable_index = -2 // there is no need for vtable dispatch
480 // 6330203 Note: Do not use -1, which was overloaded with many meanings.
481 };
482 DEBUG_ONLY(bool valid_vtable_index() const { return _vtable_index >= nonvirtual_vtable_index; })bool valid_vtable_index() const { return _vtable_index >= nonvirtual_vtable_index
; }
483 bool has_vtable_index() const { return _vtable_index >= 0; }
484 int vtable_index() const { return _vtable_index; }
485 void set_vtable_index(int index);
486 DEBUG_ONLY(bool valid_itable_index() const { return _vtable_index <= pending_itable_index; })bool valid_itable_index() const { return _vtable_index <= pending_itable_index
; }
487 bool has_itable_index() const { return _vtable_index <= itable_index_max; }
488 int itable_index() const { assert(valid_itable_index(), "")do { if (!(valid_itable_index())) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 488, "assert(" "valid_itable_index()" ") failed", ""); ::breakpoint
(); } } while (0)
;
489 return itable_index_max - _vtable_index; }
490 void set_itable_index(int index);
491
492 // interpreter entry
493 address interpreter_entry() const { return _i2i_entry; }
494 // Only used when first initialize so we can set _i2i_entry and _from_interpreted_entry
495 void set_interpreter_entry(address entry) {
496 if (_i2i_entry != entry) {
497 _i2i_entry = entry;
498 }
499 if (_from_interpreted_entry != entry) {
500 _from_interpreted_entry = entry;
501 }
502 }
503
504 // native function (used for native methods only)
505 enum {
506 native_bind_event_is_interesting = true
507 };
508 address native_function() const { return *(native_function_addr()); }
509
510 // Must specify a real function (not NULL).
511 // Use clear_native_function() to unregister.
512 void set_native_function(address function, bool post_event_flag);
513 bool has_native_function() const;
514 void clear_native_function();
515
516 // signature handler (used for native methods only)
517 address signature_handler() const { return *(signature_handler_addr()); }
518 void set_signature_handler(address handler);
519
520 // Interpreter oopmap support
521 void mask_for(int bci, InterpreterOopMap* mask);
522
523 // operations on invocation counter
524 void print_invocation_count();
525
526 // byte codes
527 void set_code(address code) { return constMethod()->set_code(code); }
528 address code_base() const { return constMethod()->code_base(); }
529 bool contains(address bcp) const { return constMethod()->contains(bcp); }
530
531 // prints byte codes
532 void print_codes() const { print_codes_on(tty); }
533 void print_codes_on(outputStream* st) const;
534 void print_codes_on(int from, int to, outputStream* st) const;
535
536 // method parameters
537 bool has_method_parameters() const
538 { return constMethod()->has_method_parameters(); }
539 int method_parameters_length() const
540 { return constMethod()->method_parameters_length(); }
541 MethodParametersElement* method_parameters_start() const
542 { return constMethod()->method_parameters_start(); }
543
544 // checked exceptions
545 int checked_exceptions_length() const
546 { return constMethod()->checked_exceptions_length(); }
547 CheckedExceptionElement* checked_exceptions_start() const
548 { return constMethod()->checked_exceptions_start(); }
549
550 // localvariable table
551 bool has_localvariable_table() const
552 { return constMethod()->has_localvariable_table(); }
553 int localvariable_table_length() const
554 { return constMethod()->localvariable_table_length(); }
555 LocalVariableTableElement* localvariable_table_start() const
556 { return constMethod()->localvariable_table_start(); }
557
558 bool has_linenumber_table() const
559 { return constMethod()->has_linenumber_table(); }
560 u_char* compressed_linenumber_table() const
561 { return constMethod()->compressed_linenumber_table(); }
562
563 // method holder (the Klass* holding this method)
564 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
565
566 Symbol* klass_name() const; // returns the name of the method holder
567 BasicType result_type() const { return constMethod()->result_type(); }
568 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
569 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
570
571 // Checked exceptions thrown by this method (resolved to mirrors)
572 objArrayHandle resolved_checked_exceptions(TRAPSJavaThread* __the_thread__) { return resolved_checked_exceptions_impl(this, THREAD__the_thread__); }
573
574 // Access flags
575 bool is_public() const { return access_flags().is_public(); }
576 bool is_private() const { return access_flags().is_private(); }
577 bool is_protected() const { return access_flags().is_protected(); }
578 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
579 bool is_static() const { return access_flags().is_static(); }
580 bool is_final() const { return access_flags().is_final(); }
581 bool is_synchronized() const { return access_flags().is_synchronized();}
582 bool is_native() const { return access_flags().is_native(); }
583 bool is_abstract() const { return access_flags().is_abstract(); }
584 bool is_synthetic() const { return access_flags().is_synthetic(); }
585
586 // returns true if contains only return operation
587 bool is_empty_method() const;
588
589 // returns true if this is a vanilla constructor
590 bool is_vanilla_constructor() const;
591
592 // checks method and its method holder
593 bool is_final_method() const;
594 bool is_final_method(AccessFlags class_access_flags) const;
595 // interface method declared with 'default' - excludes private interface methods
596 bool is_default_method() const;
597
598 // true if method needs no dynamic dispatch (final and/or no vtable entry)
599 bool can_be_statically_bound() const;
600 bool can_be_statically_bound(InstanceKlass* context) const;
601 bool can_be_statically_bound(AccessFlags class_access_flags) const;
602
603 // true if method can omit stack trace in throw in compiled code.
604 bool can_omit_stack_trace();
605
606 // returns true if the method has any backward branches.
607 bool has_loops() {
608 return access_flags().loops_flag_init() ? access_flags().has_loops() : compute_has_loops_flag();
609 };
610
611 bool compute_has_loops_flag();
612
613 bool has_jsrs() {
614 return access_flags().has_jsrs();
615 };
616 void set_has_jsrs() {
617 _access_flags.set_has_jsrs();
618 }
619
620 // returns true if the method has any monitors.
621 bool has_monitors() const { return is_synchronized() || access_flags().has_monitor_bytecodes(); }
622 bool has_monitor_bytecodes() const { return access_flags().has_monitor_bytecodes(); }
623
624 void set_has_monitor_bytecodes() { _access_flags.set_has_monitor_bytecodes(); }
625
626 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
627 // propererly nest in the method. It might return false, even though they actually nest properly, since the info.
628 // has not been computed yet.
629 bool guaranteed_monitor_matching() const { return access_flags().is_monitor_matching(); }
630 void set_guaranteed_monitor_matching() { _access_flags.set_monitor_matching(); }
631
632 // returns true if the method is an accessor function (setter/getter).
633 bool is_accessor() const;
634
635 // returns true if the method is a getter
636 bool is_getter() const;
637
638 // returns true if the method is a setter
639 bool is_setter() const;
640
641 // returns true if the method does nothing but return a constant of primitive type
642 bool is_constant_getter() const;
643
644 // returns true if the method is an initializer (<init> or <clinit>).
645 bool is_initializer() const;
646
647 // returns true if the method is static OR if the classfile version < 51
648 bool has_valid_initializer_flags() const;
649
650 // returns true if the method name is <clinit> and the method has
651 // valid static initializer flags.
652 bool is_static_initializer() const;
653
654 // returns true if the method name is <init>
655 bool is_object_initializer() const;
656
657 // compiled code support
658 // NOTE: code() is inherently racy as deopt can be clearing code
659 // simultaneously. Use with caution.
660 bool has_compiled_code() const;
661
662 bool needs_clinit_barrier() const;
663
664 // sizing
665 static int header_size() {
666 return align_up((int)sizeof(Method), wordSize) / wordSize;
667 }
668 static int size(bool is_native);
669 int size() const { return method_size(); }
670 void log_touched(Thread* current);
671 static void print_touched_methods(outputStream* out);
672
673 // interpreter support
674 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod )in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_constMethod
) - 16))
; }
675 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags )in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_access_flags
) - 16))
; }
676 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry)in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_from_compiled_entry
) - 16))
; }
677 static ByteSize code_offset() { return byte_offset_of(Method, _code)in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_code
) - 16))
; }
678 static ByteSize method_data_offset() {
679 return byte_offset_of(Method, _method_data)in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_method_data
) - 16))
;
680 }
681 static ByteSize method_counters_offset() {
682 return byte_offset_of(Method, _method_counters)in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_method_counters
) - 16))
;
683 }
684#ifndef PRODUCT
685 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count)in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_compiled_invocation_count
) - 16))
; }
686#endif // not PRODUCT
687 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
688 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry )in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_from_interpreted_entry
) - 16))
; }
689 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry )in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_i2i_entry
) - 16))
; }
690 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
691 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index )in_ByteSize((int)(size_t)((intx)&(((Method*)16)->_vtable_index
) - 16))
; }
692
693 // for code generation
694 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data)(size_t)((intx)&(((Method*)16)->_method_data) - 16); }
695 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id)(size_t)((intx)&(((Method*)16)->_intrinsic_id) - 16); }
696 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
697
698 // Static methods that are used to implement member methods where an exposed this pointer
699 // is needed due to possible GCs
700 static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPSJavaThread* __the_thread__);
701
702 // Returns the byte code index from the byte code pointer
703 int bci_from(address bcp) const;
704 address bcp_from(int bci) const;
705 address bcp_from(address bcp) const;
706 int validate_bci_from_bcp(address bcp) const;
707 int validate_bci(int bci) const;
708
709 // Returns the line number for a bci if debugging information for the method is prowided,
710 // -1 is returned otherwise.
711 int line_number_from_bci(int bci) const;
712
713 // Reflection support
714 bool is_overridden_in(Klass* k) const;
715
716 // Stack walking support
717 bool is_ignored_by_security_stack_walk() const;
718
719 // JSR 292 support
720 bool is_method_handle_intrinsic() const; // MethodHandles::is_signature_polymorphic_intrinsic(intrinsic_id)
721 bool is_compiled_lambda_form() const; // intrinsic_id() == vmIntrinsics::_compiledLambdaForm
722 bool has_member_arg() const; // intrinsic_id() == vmIntrinsics::_linkToSpecial, etc.
723 static methodHandle make_method_handle_intrinsic(vmIntrinsicID iid, // _invokeBasic, _linkToVirtual
724 Symbol* signature, //anything at all
725 TRAPSJavaThread* __the_thread__);
726 static Klass* check_non_bcp_klass(Klass* klass);
727
728 enum {
729 // How many extra stack entries for invokedynamic
730 extra_stack_entries_for_jsr292 = 1
731 };
732
733 // this operates only on invoke methods:
734 // presize interpreter frames for extra interpreter stack entries, if needed
735 // Account for the extra appendix argument for invokehandle/invokedynamic
736 static int extra_stack_entries() { return extra_stack_entries_for_jsr292; }
737 static int extra_stack_words(); // = extra_stack_entries() * Interpreter::stackElementSize
738
739 // RedefineClasses() support:
740 bool is_old() const { return access_flags().is_old(); }
9
Calling 'AccessFlags::is_old'
12
Returning from 'AccessFlags::is_old'
13
Returning the value 1, which participates in a condition later
741 void set_is_old() { _access_flags.set_is_old(); }
742 bool is_obsolete() const { return access_flags().is_obsolete(); }
16
Calling 'AccessFlags::is_obsolete'
19
Returning from 'AccessFlags::is_obsolete'
20
Returning zero, which participates in a condition later
743 void set_is_obsolete() { _access_flags.set_is_obsolete(); }
744 bool is_deleted() const { return access_flags().is_deleted(); }
745 void set_is_deleted() { _access_flags.set_is_deleted(); }
746
747 bool on_stack() const { return access_flags().on_stack(); }
748 void set_on_stack(const bool value);
749
750 // see the definition in Method*.cpp for the gory details
751 bool should_not_be_cached() const;
752
753 // JVMTI Native method prefixing support:
754 bool is_prefixed_native() const { return access_flags().is_prefixed_native(); }
755 void set_is_prefixed_native() { _access_flags.set_is_prefixed_native(); }
756
757 // Rewriting support
758 static methodHandle clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
759 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPSJavaThread* __the_thread__);
760
761 // jmethodID handling
762 // Because the useful life-span of a jmethodID cannot be determined,
763 // once created they are never reclaimed. The methods to which they refer,
764 // however, can be GC'ed away if the class is unloaded or if the method is
765 // made obsolete or deleted -- in these cases, the jmethodID
766 // refers to NULL (as is the case for any weak reference).
767 static jmethodID make_jmethod_id(ClassLoaderData* loader_data, Method* mh);
768 static void destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID mid);
769
770 // Ensure there is enough capacity in the internal tracking data
771 // structures to hold the number of jmethodIDs you plan to generate.
772 // This saves substantial time doing allocations.
773 static void ensure_jmethod_ids(ClassLoaderData* loader_data, int capacity);
774
775 // Use resolve_jmethod_id() in situations where the caller is expected
776 // to provide a valid jmethodID; the only sanity checks are in asserts;
777 // result guaranteed not to be NULL.
778 inline static Method* resolve_jmethod_id(jmethodID mid) {
779 assert(mid != NULL, "JNI method id should not be null")do { if (!(mid != __null)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 779, "assert(" "mid != __null" ") failed", "JNI method id should not be null"
); ::breakpoint(); } } while (0)
;
780 return *((Method**)mid);
781 }
782
783 // Use checked_resolve_jmethod_id() in situations where the caller
784 // should provide a valid jmethodID, but might not. NULL is returned
785 // when the jmethodID does not refer to a valid method.
786 static Method* checked_resolve_jmethod_id(jmethodID mid);
787
788 static void change_method_associated_with_jmethod_id(jmethodID old_jmid_ptr, Method* new_method);
789 static bool is_method_id(jmethodID mid);
790
791 // Clear methods
792 static void clear_jmethod_ids(ClassLoaderData* loader_data);
793 static void print_jmethod_ids_count(const ClassLoaderData* loader_data, outputStream* out) PRODUCT_RETURN;
794
795 // Get this method's jmethodID -- allocate if it doesn't exist
796 jmethodID jmethod_id();
797
798 // Lookup the jmethodID for this method. Return NULL if not found.
799 // NOTE that this function can be called from a signal handler
800 // (see AsyncGetCallTrace support for Forte Analyzer) and this
801 // needs to be async-safe. No allocation should be done and
802 // so handles are not used to avoid deadlock.
803 jmethodID find_jmethod_id_or_null() { return method_holder()->jmethod_id_or_null(this); }
804
805 // Support for inlining of intrinsic methods
806 vmIntrinsicID intrinsic_id() const { return (vmIntrinsicID) _intrinsic_id; }
807 void set_intrinsic_id(vmIntrinsicID id) { _intrinsic_id = (u2) id; }
808
809 // Helper routines for intrinsic_id() and vmIntrinsics::method().
810 void init_intrinsic_id(vmSymbolID klass_id); // updates from _none if a match
811 static vmSymbolID klass_id_for_intrinsics(const Klass* holder);
812
813 bool caller_sensitive() {
814 return (_flags & _caller_sensitive) != 0;
815 }
816 void set_caller_sensitive(bool x) {
817 _flags = x ? (_flags | _caller_sensitive) : (_flags & ~_caller_sensitive);
818 }
819
820 bool force_inline() {
821 return (_flags & _force_inline) != 0;
822 }
823 void set_force_inline(bool x) {
824 _flags = x ? (_flags | _force_inline) : (_flags & ~_force_inline);
825 }
826
827 bool dont_inline() {
828 return (_flags & _dont_inline) != 0;
829 }
830 void set_dont_inline(bool x) {
831 _flags = x ? (_flags | _dont_inline) : (_flags & ~_dont_inline);
832 }
833
834 bool is_hidden() const {
835 return (_flags & _hidden) != 0;
836 }
837
838 void set_hidden(bool x) {
839 _flags = x ? (_flags | _hidden) : (_flags & ~_hidden);
840 }
841
842 bool is_scoped() const {
843 return (_flags & _scoped) != 0;
844 }
845
846 void set_scoped(bool x) {
847 _flags = x ? (_flags | _scoped) : (_flags & ~_scoped);
848 }
849
850 bool intrinsic_candidate() {
851 return (_flags & _intrinsic_candidate) != 0;
852 }
853 void set_intrinsic_candidate(bool x) {
854 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
855 }
856
857 bool has_injected_profile() {
858 return (_flags & _has_injected_profile) != 0;
859 }
860 void set_has_injected_profile(bool x) {
861 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
862 }
863
864 bool has_reserved_stack_access() {
865 return (_flags & _reserved_stack_access) != 0;
866 }
867
868 void set_has_reserved_stack_access(bool x) {
869 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
870 }
871
872 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)bool is_trace_flag_set(jshort flag) const { return _trace_flags
.is_set(flag); } jshort trace_flags() const { return _trace_flags
.flags(); } void set_trace_flags(jshort flags) const { _trace_flags
.set_flags(flags); } jbyte* trace_flags_addr() const { return
_trace_flags.flags_addr(); } jbyte* trace_meta_addr() const {
return _trace_flags.meta_addr(); };
873
874 ConstMethod::MethodType method_type() const {
875 return _constMethod->method_type();
876 }
877 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
878
879 // On-stack replacement support
880 bool has_osr_nmethod(int level, bool match_level) {
881 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL__null;
882 }
883
884 int mark_osr_nmethods() {
885 return method_holder()->mark_osr_nmethods(this);
886 }
887
888 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
889 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
890 }
891
892 // Find if klass for method is loaded
893 bool is_klass_loaded_by_klass_index(int klass_index) const;
894 bool is_klass_loaded(int refinfo_index, bool must_be_resolved = false) const;
895
896 // Indicates whether compilation failed earlier for this method, or
897 // whether it is not compilable for another reason like having a
898 // breakpoint set in it.
899 bool is_not_compilable(int comp_level = CompLevel_any) const;
900 void set_not_compilable(const char* reason, int comp_level = CompLevel_all, bool report = true);
901 void set_not_compilable_quietly(const char* reason, int comp_level = CompLevel_all) {
902 set_not_compilable(reason, comp_level, false);
903 }
904 bool is_not_osr_compilable(int comp_level = CompLevel_any) const;
905 void set_not_osr_compilable(const char* reason, int comp_level = CompLevel_all, bool report = true);
906 void set_not_osr_compilable_quietly(const char* reason, int comp_level = CompLevel_all) {
907 set_not_osr_compilable(reason, comp_level, false);
908 }
909 bool is_always_compilable() const;
910
911 private:
912 void print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason);
913
914 public:
915 MethodCounters* get_method_counters(Thread* current) {
916 if (_method_counters == NULL__null) {
917 build_method_counters(current, this);
918 }
919 return _method_counters;
920 }
921
922 bool is_not_c1_compilable() const { return access_flags().is_not_c1_compilable(); }
923 void set_not_c1_compilable() { _access_flags.set_not_c1_compilable(); }
924 void clear_not_c1_compilable() { _access_flags.clear_not_c1_compilable(); }
925 bool is_not_c2_compilable() const { return access_flags().is_not_c2_compilable(); }
926 void set_not_c2_compilable() { _access_flags.set_not_c2_compilable(); }
927 void clear_not_c2_compilable() { _access_flags.clear_not_c2_compilable(); }
928
929 bool is_not_c1_osr_compilable() const { return is_not_c1_compilable(); } // don't waste an accessFlags bit
930 void set_not_c1_osr_compilable() { set_not_c1_compilable(); } // don't waste an accessFlags bit
931 void clear_not_c1_osr_compilable() { clear_not_c1_compilable(); } // don't waste an accessFlags bit
932 bool is_not_c2_osr_compilable() const { return access_flags().is_not_c2_osr_compilable(); }
933 void set_not_c2_osr_compilable() { _access_flags.set_not_c2_osr_compilable(); }
934 void clear_not_c2_osr_compilable() { _access_flags.clear_not_c2_osr_compilable(); }
935
936 // Background compilation support
937 bool queued_for_compilation() const { return access_flags().queued_for_compilation(); }
938 void set_queued_for_compilation() { _access_flags.set_queued_for_compilation(); }
939 void clear_queued_for_compilation() { _access_flags.clear_queued_for_compilation(); }
940
941 // Resolve all classes in signature, return 'true' if successful
942 static bool load_signature_classes(const methodHandle& m, TRAPSJavaThread* __the_thread__);
943
944 // Return if true if not all classes references in signature, including return type, has been loaded
945 static bool has_unloaded_classes_in_signature(const methodHandle& m, TRAPSJavaThread* __the_thread__);
946
947 // Printing
948 void print_short_name(outputStream* st = tty); // prints as klassname::methodname; Exposed so field engineers can debug VM
949#if INCLUDE_JVMTI1
950 void print_name(outputStream* st = tty); // prints as "virtual void foo(int)"; exposed for -Xlog:redefine+class
951#else
952 void print_name(outputStream* st = tty) PRODUCT_RETURN; // prints as "virtual void foo(int)"
953#endif
954
955 typedef int (*method_comparator_func)(Method* a, Method* b);
956
957 // Helper routine used for method sorting
958 static void sort_methods(Array<Method*>* methods, bool set_idnums = true, method_comparator_func func = NULL__null);
959
960 // Deallocation function for redefine classes or if an error occurs
961 void deallocate_contents(ClassLoaderData* loader_data);
962
963 void release_C_heap_structures();
964
965 Method* get_new_method() const {
966 InstanceKlass* holder = method_holder();
967 Method* new_method = holder->method_with_idnum(orig_method_idnum());
968
969 assert(new_method != NULL, "method_with_idnum() should not be NULL")do { if (!(new_method != __null)) { (*g_assert_poison) = 'X';
; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 969, "assert(" "new_method != __null" ") failed", "method_with_idnum() should not be NULL"
); ::breakpoint(); } } while (0)
;
970 assert(this != new_method, "sanity check")do { if (!(this != new_method)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 970, "assert(" "this != new_method" ") failed", "sanity check"
); ::breakpoint(); } } while (0)
;
971 return new_method;
972 }
973
974 // Printing
975#ifndef PRODUCT
976 void print_on(outputStream* st) const;
977#endif
978 void print_value_on(outputStream* st) const;
979 void print_linkage_flags(outputStream* st) PRODUCT_RETURN;
980
981 const char* internal_name() const { return "{method}"; }
982
983 // Check for valid method pointer
984 static bool has_method_vptr(const void* ptr);
985 static bool is_valid_method(const Method* m);
986
987 // Verify
988 void verify() { verify_on(tty); }
989 void verify_on(outputStream* st);
990
991 private:
992
993 // Inlined elements
994 address* native_function_addr() const { assert(is_native(), "must be native")do { if (!(is_native())) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 994, "assert(" "is_native()" ") failed", "must be native");
::breakpoint(); } } while (0)
; return (address*) (this+1); }
995 address* signature_handler_addr() const { return native_function_addr() + 1; }
996};
997
998
999// Utility class for compressing line number tables
1000
1001class CompressedLineNumberWriteStream: public CompressedWriteStream {
1002 private:
1003 int _bci;
1004 int _line;
1005 public:
1006 // Constructor
1007 CompressedLineNumberWriteStream(int initial_size) : CompressedWriteStream(initial_size), _bci(0), _line(0) {}
1008 CompressedLineNumberWriteStream(u_char* buffer, int initial_size) : CompressedWriteStream(buffer, initial_size), _bci(0), _line(0) {}
1009
1010 // Write (bci, line number) pair to stream
1011 void write_pair_regular(int bci_delta, int line_delta);
1012
1013 // If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
1014 // we save it as one byte, otherwise we write a 0xFF escape character
1015 // and use regular compression. 0x0 is used as end-of-stream terminator.
1016 void write_pair_inline(int bci, int line);
1017
1018 void write_pair(int bci, int line);
1019
1020 // Write end-of-stream marker
1021 void write_terminator() { write_byte(0); }
1022};
1023
1024
1025// Utility class for decompressing line number tables
1026
1027class CompressedLineNumberReadStream: public CompressedReadStream {
1028 private:
1029 int _bci;
1030 int _line;
1031 public:
1032 // Constructor
1033 CompressedLineNumberReadStream(u_char* buffer);
1034 // Read (bci, line number) pair from stream. Returns false at end-of-stream.
1035 bool read_pair();
1036 // Accessing bci and line number (after calling read_pair)
1037 int bci() const { return _bci; }
1038 int line() const { return _line; }
1039};
1040
1041
1042#if INCLUDE_JVMTI1
1043
1044/// Fast Breakpoints.
1045
1046// If this structure gets more complicated (because bpts get numerous),
1047// move it into its own header.
1048
1049// There is presently no provision for concurrent access
1050// to breakpoint lists, which is only OK for JVMTI because
1051// breakpoints are written only at safepoints, and are read
1052// concurrently only outside of safepoints.
1053
1054class BreakpointInfo : public CHeapObj<mtClass> {
1055 friend class VMStructs;
1056 private:
1057 Bytecodes::Code _orig_bytecode;
1058 int _bci;
1059 u2 _name_index; // of method
1060 u2 _signature_index; // of method
1061 BreakpointInfo* _next; // simple storage allocation
1062
1063 public:
1064 BreakpointInfo(Method* m, int bci);
1065
1066 // accessors
1067 Bytecodes::Code orig_bytecode() { return _orig_bytecode; }
1068 void set_orig_bytecode(Bytecodes::Code code) { _orig_bytecode = code; }
1069 int bci() { return _bci; }
1070
1071 BreakpointInfo* next() const { return _next; }
1072 void set_next(BreakpointInfo* n) { _next = n; }
1073
1074 // helps for searchers
1075 bool match(const Method* m, int bci) {
1076 return bci == _bci && match(m);
1077 }
1078
1079 bool match(const Method* m) {
1080 return _name_index == m->name_index() &&
1081 _signature_index == m->signature_index();
1082 }
1083
1084 void set(Method* method);
1085 void clear(Method* method);
1086};
1087
1088#endif // INCLUDE_JVMTI
1089
1090// Utility class for access exception handlers
1091class ExceptionTable : public StackObj {
1092 private:
1093 ExceptionTableElement* _table;
1094 u2 _length;
1095
1096 public:
1097 ExceptionTable(const Method* m) {
1098 if (m->has_exception_handler()) {
1099 _table = m->exception_table_start();
1100 _length = m->exception_table_length();
1101 } else {
1102 _table = NULL__null;
1103 _length = 0;
1104 }
1105 }
1106
1107 int length() const {
1108 return _length;
1109 }
1110
1111 u2 start_pc(int idx) const {
1112 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1112, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1113 return _table[idx].start_pc;
1114 }
1115
1116 void set_start_pc(int idx, u2 value) {
1117 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1117, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1118 _table[idx].start_pc = value;
1119 }
1120
1121 u2 end_pc(int idx) const {
1122 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1122, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1123 return _table[idx].end_pc;
1124 }
1125
1126 void set_end_pc(int idx, u2 value) {
1127 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1127, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1128 _table[idx].end_pc = value;
1129 }
1130
1131 u2 handler_pc(int idx) const {
1132 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1132, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1133 return _table[idx].handler_pc;
1134 }
1135
1136 void set_handler_pc(int idx, u2 value) {
1137 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1137, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1138 _table[idx].handler_pc = value;
1139 }
1140
1141 u2 catch_type_index(int idx) const {
1142 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1142, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1143 return _table[idx].catch_type_index;
1144 }
1145
1146 void set_catch_type_index(int idx, u2 value) {
1147 assert(idx < _length, "out of bounds")do { if (!(idx < _length)) { (*g_assert_poison) = 'X';; report_vm_error
("/home/daniel/Projects/java/jdk/src/hotspot/share/oops/method.hpp"
, 1147, "assert(" "idx < _length" ") failed", "out of bounds"
); ::breakpoint(); } } while (0)
;
1148 _table[idx].catch_type_index = value;
1149 }
1150};
1151
1152#endif // SHARE_OOPS_METHOD_HPP

/home/daniel/Projects/java/jdk/src/hotspot/share/utilities/accessFlags.hpp

1/*
2 * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_UTILITIES_ACCESSFLAGS_HPP
26#define SHARE_UTILITIES_ACCESSFLAGS_HPP
27
28#include "jvm_constants.h"
29#include "utilities/debug.hpp"
30#include "utilities/globalDefinitions.hpp"
31#include "utilities/macros.hpp"
32
33// AccessFlags is an abstraction over Java access flags.
34
35class outputStream;
36
37enum {
38 // See jvm.h for shared JVM_ACC_XXX access flags
39
40 // HotSpot-specific access flags
41
42 // flags actually put in .class file
43 JVM_ACC_WRITTEN_FLAGS = 0x00007FFF,
44
45 // Method* flags
46 JVM_ACC_MONITOR_MATCH = 0x10000000, // True if we know that monitorenter/monitorexit bytecodes match
47 JVM_ACC_HAS_MONITOR_BYTECODES = 0x20000000, // Method contains monitorenter/monitorexit bytecodes
48 JVM_ACC_HAS_LOOPS = 0x40000000, // Method has loops
49 JVM_ACC_LOOPS_FLAG_INIT = (int)0x80000000,// The loop flag has been initialized
50 JVM_ACC_QUEUED = 0x01000000, // Queued for compilation
51 JVM_ACC_NOT_C2_COMPILABLE = 0x02000000,
52 JVM_ACC_NOT_C1_COMPILABLE = 0x04000000,
53 JVM_ACC_NOT_C2_OSR_COMPILABLE = 0x08000000,
54 JVM_ACC_HAS_LINE_NUMBER_TABLE = 0x00100000,
55 JVM_ACC_HAS_CHECKED_EXCEPTIONS = 0x00400000,
56 JVM_ACC_HAS_JSRS = 0x00800000,
57 JVM_ACC_IS_OLD = 0x00010000, // RedefineClasses() has replaced this method
58 JVM_ACC_IS_OBSOLETE = 0x00020000, // RedefineClasses() has made method obsolete
59 JVM_ACC_IS_PREFIXED_NATIVE = 0x00040000, // JVMTI has prefixed this native method
60 JVM_ACC_ON_STACK = 0x00080000, // RedefineClasses() was used on the stack
61 JVM_ACC_IS_DELETED = 0x00008000, // RedefineClasses() has deleted this method
62
63 // Klass* flags
64 JVM_ACC_HAS_MIRANDA_METHODS = 0x10000000, // True if this class has miranda methods in it's vtable
65 JVM_ACC_HAS_VANILLA_CONSTRUCTOR = 0x20000000, // True if klass has a vanilla default constructor
66 JVM_ACC_HAS_FINALIZER = 0x40000000, // True if klass has a non-empty finalize() method
67 JVM_ACC_IS_CLONEABLE_FAST = (int)0x80000000,// True if klass implements the Cloneable interface and can be optimized in generated code
68 JVM_ACC_HAS_FINAL_METHOD = 0x01000000, // True if klass has final method
69 JVM_ACC_IS_SHARED_CLASS = 0x02000000, // True if klass is shared
70 JVM_ACC_IS_HIDDEN_CLASS = 0x04000000, // True if klass is hidden
71 JVM_ACC_IS_VALUE_BASED_CLASS = 0x08000000, // True if klass is marked as a ValueBased class
72 JVM_ACC_IS_BEING_REDEFINED = 0x00100000, // True if the klass is being redefined.
73 JVM_ACC_HAS_RESOLVED_METHODS = 0x00200000, // True if the klass has resolved methods
74
75 // Klass* and Method* flags
76 JVM_ACC_HAS_LOCAL_VARIABLE_TABLE= 0x00400000,
77
78 JVM_ACC_PROMOTED_FLAGS = 0x00400000, // flags promoted from methods to the holding klass
79
80 // field flags
81 // Note: these flags must be defined in the low order 16 bits because
82 // InstanceKlass only stores a ushort worth of information from the
83 // AccessFlags value.
84 // These bits must not conflict with any other field-related access flags
85 // (e.g., ACC_ENUM).
86 // Note that the class-related ACC_ANNOTATION bit conflicts with these flags.
87 JVM_ACC_FIELD_ACCESS_WATCHED = 0x00002000, // field access is watched by JVMTI
88 JVM_ACC_FIELD_MODIFICATION_WATCHED = 0x00008000, // field modification is watched by JVMTI
89 JVM_ACC_FIELD_INTERNAL = 0x00000400, // internal field, same as JVM_ACC_ABSTRACT
90 JVM_ACC_FIELD_STABLE = 0x00000020, // @Stable field, same as JVM_ACC_SYNCHRONIZED and JVM_ACC_SUPER
91 JVM_ACC_FIELD_INITIALIZED_FINAL_UPDATE = 0x00000100, // (static) final field updated outside (class) initializer, same as JVM_ACC_NATIVE
92 JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE = 0x00000800, // field has generic signature
93
94 JVM_ACC_FIELD_INTERNAL_FLAGS = JVM_ACC_FIELD_ACCESS_WATCHED |
95 JVM_ACC_FIELD_MODIFICATION_WATCHED |
96 JVM_ACC_FIELD_INTERNAL |
97 JVM_ACC_FIELD_STABLE |
98 JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE,
99
100 // flags accepted by set_field_flags()
101 JVM_ACC_FIELD_FLAGS = JVM_RECOGNIZED_FIELD_MODIFIERS(JVM_ACC_PUBLIC | JVM_ACC_PRIVATE | JVM_ACC_PROTECTED | JVM_ACC_STATIC
| JVM_ACC_FINAL | JVM_ACC_VOLATILE | JVM_ACC_TRANSIENT | JVM_ACC_ENUM
| JVM_ACC_SYNTHETIC)
| JVM_ACC_FIELD_INTERNAL_FLAGS
102
103};
104
105
106class AccessFlags {
107 friend class VMStructs;
108 private:
109 jint _flags;
110
111 public:
112 AccessFlags() : _flags(0) {}
113 explicit AccessFlags(jint flags) : _flags(flags) {}
114
115 // Java access flags
116 bool is_public () const { return (_flags & JVM_ACC_PUBLIC ) != 0; }
117 bool is_private () const { return (_flags & JVM_ACC_PRIVATE ) != 0; }
118 bool is_protected () const { return (_flags & JVM_ACC_PROTECTED ) != 0; }
119 bool is_static () const { return (_flags & JVM_ACC_STATIC ) != 0; }
120 bool is_final () const { return (_flags & JVM_ACC_FINAL ) != 0; }
121 bool is_synchronized() const { return (_flags & JVM_ACC_SYNCHRONIZED) != 0; }
122 bool is_super () const { return (_flags & JVM_ACC_SUPER ) != 0; }
123 bool is_volatile () const { return (_flags & JVM_ACC_VOLATILE ) != 0; }
124 bool is_transient () const { return (_flags & JVM_ACC_TRANSIENT ) != 0; }
125 bool is_native () const { return (_flags & JVM_ACC_NATIVE ) != 0; }
126 bool is_interface () const { return (_flags & JVM_ACC_INTERFACE ) != 0; }
127 bool is_abstract () const { return (_flags & JVM_ACC_ABSTRACT ) != 0; }
128
129 // Attribute flags
130 bool is_synthetic () const { return (_flags & JVM_ACC_SYNTHETIC ) != 0; }
131
132 // Method* flags
133 bool is_monitor_matching () const { return (_flags & JVM_ACC_MONITOR_MATCH ) != 0; }
134 bool has_monitor_bytecodes () const { return (_flags & JVM_ACC_HAS_MONITOR_BYTECODES ) != 0; }
135 bool has_loops () const { return (_flags & JVM_ACC_HAS_LOOPS ) != 0; }
136 bool loops_flag_init () const { return (_flags & JVM_ACC_LOOPS_FLAG_INIT ) != 0; }
137 bool queued_for_compilation () const { return (_flags & JVM_ACC_QUEUED ) != 0; }
138 bool is_not_c1_compilable () const { return (_flags & JVM_ACC_NOT_C1_COMPILABLE ) != 0; }
139 bool is_not_c2_compilable () const { return (_flags & JVM_ACC_NOT_C2_COMPILABLE ) != 0; }
140 bool is_not_c2_osr_compilable() const { return (_flags & JVM_ACC_NOT_C2_OSR_COMPILABLE ) != 0; }
141 bool has_linenumber_table () const { return (_flags & JVM_ACC_HAS_LINE_NUMBER_TABLE ) != 0; }
142 bool has_checked_exceptions () const { return (_flags & JVM_ACC_HAS_CHECKED_EXCEPTIONS ) != 0; }
143 bool has_jsrs () const { return (_flags & JVM_ACC_HAS_JSRS ) != 0; }
144 bool is_old () const { return (_flags & JVM_ACC_IS_OLD ) != 0; }
10
Assuming the condition is true
11
Returning the value 1, which participates in a condition later
145 bool is_obsolete () const { return (_flags & JVM_ACC_IS_OBSOLETE ) != 0; }
17
Assuming the condition is false
18
Returning zero, which participates in a condition later
146 bool is_deleted () const { return (_flags & JVM_ACC_IS_DELETED ) != 0; }
147 bool is_prefixed_native () const { return (_flags & JVM_ACC_IS_PREFIXED_NATIVE ) != 0; }
148
149 // Klass* flags
150 bool has_miranda_methods () const { return (_flags & JVM_ACC_HAS_MIRANDA_METHODS ) != 0; }
151 bool has_vanilla_constructor () const { return (_flags & JVM_ACC_HAS_VANILLA_CONSTRUCTOR) != 0; }
152 bool has_finalizer () const { return (_flags & JVM_ACC_HAS_FINALIZER ) != 0; }
153 bool has_final_method () const { return (_flags & JVM_ACC_HAS_FINAL_METHOD ) != 0; }
154 bool is_cloneable_fast () const { return (_flags & JVM_ACC_IS_CLONEABLE_FAST ) != 0; }
155 bool is_shared_class () const { return (_flags & JVM_ACC_IS_SHARED_CLASS ) != 0; }
156 bool is_hidden_class () const { return (_flags & JVM_ACC_IS_HIDDEN_CLASS ) != 0; }
157 bool is_value_based_class () const { return (_flags & JVM_ACC_IS_VALUE_BASED_CLASS ) != 0; }
158
159 // Klass* and Method* flags
160 bool has_localvariable_table () const { return (_flags & JVM_ACC_HAS_LOCAL_VARIABLE_TABLE) != 0; }
161 void set_has_localvariable_table() { atomic_set_bits(JVM_ACC_HAS_LOCAL_VARIABLE_TABLE); }
162 void clear_has_localvariable_table() { atomic_clear_bits(JVM_ACC_HAS_LOCAL_VARIABLE_TABLE); }
163
164 bool is_being_redefined() const { return (_flags & JVM_ACC_IS_BEING_REDEFINED) != 0; }
165 void set_is_being_redefined() { atomic_set_bits(JVM_ACC_IS_BEING_REDEFINED); }
166 void clear_is_being_redefined() { atomic_clear_bits(JVM_ACC_IS_BEING_REDEFINED); }
167
168 bool has_resolved_methods() const { return (_flags & JVM_ACC_HAS_RESOLVED_METHODS) != 0; }
169 void set_has_resolved_methods() { atomic_set_bits(JVM_ACC_HAS_RESOLVED_METHODS); }
170
171 // field flags
172 bool is_field_access_watched() const { return (_flags & JVM_ACC_FIELD_ACCESS_WATCHED) != 0; }
173 bool is_field_modification_watched() const
174 { return (_flags & JVM_ACC_FIELD_MODIFICATION_WATCHED) != 0; }
175 bool has_field_initialized_final_update() const
176 { return (_flags & JVM_ACC_FIELD_INITIALIZED_FINAL_UPDATE) != 0; }
177 bool on_stack() const { return (_flags & JVM_ACC_ON_STACK) != 0; }
178 bool is_internal() const { return (_flags & JVM_ACC_FIELD_INTERNAL) != 0; }
179 bool is_stable() const { return (_flags & JVM_ACC_FIELD_STABLE) != 0; }
180 bool field_has_generic_signature() const
181 { return (_flags & JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE) != 0; }
182
183 // get .class file flags
184 jint get_flags () const { return (_flags & JVM_ACC_WRITTEN_FLAGS); }
185
186 // Initialization
187 void add_promoted_flags(jint flags) { _flags |= (flags & JVM_ACC_PROMOTED_FLAGS); }
188 void set_field_flags(jint flags) {
189 assert((flags & JVM_ACC_FIELD_FLAGS) == flags, "only recognized flags")do { if (!((flags & JVM_ACC_FIELD_FLAGS) == flags)) { (*g_assert_poison
) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/utilities/accessFlags.hpp"
, 189, "assert(" "(flags & JVM_ACC_FIELD_FLAGS) == flags"
") failed", "only recognized flags"); ::breakpoint(); } } while
(0)
;
190 _flags = (flags & JVM_ACC_FIELD_FLAGS);
191 }
192 void set_flags(jint flags) { _flags = (flags & JVM_ACC_WRITTEN_FLAGS); }
193
194 void set_queued_for_compilation() { atomic_set_bits(JVM_ACC_QUEUED); }
195 void clear_queued_for_compilation() { atomic_clear_bits(JVM_ACC_QUEUED); }
196
197 // Atomic update of flags
198 void atomic_set_bits(jint bits);
199 void atomic_clear_bits(jint bits);
200
201 private:
202 friend class Method;
203 friend class Klass;
204 friend class ClassFileParser;
205 // the functions below should only be called on the _access_flags inst var directly,
206 // otherwise they are just changing a copy of the flags
207
208 // attribute flags
209 void set_is_synthetic() { atomic_set_bits(JVM_ACC_SYNTHETIC); }
210
211 // Method* flags
212 void set_monitor_matching() { atomic_set_bits(JVM_ACC_MONITOR_MATCH); }
213 void set_has_monitor_bytecodes() { atomic_set_bits(JVM_ACC_HAS_MONITOR_BYTECODES); }
214 void set_has_loops() { atomic_set_bits(JVM_ACC_HAS_LOOPS); }
215 void set_loops_flag_init() { atomic_set_bits(JVM_ACC_LOOPS_FLAG_INIT); }
216 void set_not_c1_compilable() { atomic_set_bits(JVM_ACC_NOT_C1_COMPILABLE); }
217 void set_not_c2_compilable() { atomic_set_bits(JVM_ACC_NOT_C2_COMPILABLE); }
218 void set_not_c2_osr_compilable() { atomic_set_bits(JVM_ACC_NOT_C2_OSR_COMPILABLE); }
219 void set_has_linenumber_table() { atomic_set_bits(JVM_ACC_HAS_LINE_NUMBER_TABLE); }
220 void set_has_checked_exceptions() { atomic_set_bits(JVM_ACC_HAS_CHECKED_EXCEPTIONS); }
221 void set_has_jsrs() { atomic_set_bits(JVM_ACC_HAS_JSRS); }
222 void set_is_old() { atomic_set_bits(JVM_ACC_IS_OLD); }
223 void set_is_obsolete() { atomic_set_bits(JVM_ACC_IS_OBSOLETE); }
224 void set_is_deleted() { atomic_set_bits(JVM_ACC_IS_DELETED); }
225 void set_is_prefixed_native() { atomic_set_bits(JVM_ACC_IS_PREFIXED_NATIVE); }
226
227 void clear_not_c1_compilable() { atomic_clear_bits(JVM_ACC_NOT_C1_COMPILABLE); }
228 void clear_not_c2_compilable() { atomic_clear_bits(JVM_ACC_NOT_C2_COMPILABLE); }
229 void clear_not_c2_osr_compilable() { atomic_clear_bits(JVM_ACC_NOT_C2_OSR_COMPILABLE); }
230 // Klass* flags
231 void set_has_vanilla_constructor() { atomic_set_bits(JVM_ACC_HAS_VANILLA_CONSTRUCTOR); }
232 void set_has_finalizer() { atomic_set_bits(JVM_ACC_HAS_FINALIZER); }
233 void set_has_final_method() { atomic_set_bits(JVM_ACC_HAS_FINAL_METHOD); }
234 void set_is_cloneable_fast() { atomic_set_bits(JVM_ACC_IS_CLONEABLE_FAST); }
235 void set_has_miranda_methods() { atomic_set_bits(JVM_ACC_HAS_MIRANDA_METHODS); }
236 void set_is_shared_class() { atomic_set_bits(JVM_ACC_IS_SHARED_CLASS); }
237 void set_is_hidden_class() { atomic_set_bits(JVM_ACC_IS_HIDDEN_CLASS); }
238 void set_is_value_based_class() { atomic_set_bits(JVM_ACC_IS_VALUE_BASED_CLASS); }
239
240 public:
241 // field flags
242 void set_is_field_access_watched(const bool value)
243 {
244 if (value) {
245 atomic_set_bits(JVM_ACC_FIELD_ACCESS_WATCHED);
246 } else {
247 atomic_clear_bits(JVM_ACC_FIELD_ACCESS_WATCHED);
248 }
249 }
250 void set_is_field_modification_watched(const bool value)
251 {
252 if (value) {
253 atomic_set_bits(JVM_ACC_FIELD_MODIFICATION_WATCHED);
254 } else {
255 atomic_clear_bits(JVM_ACC_FIELD_MODIFICATION_WATCHED);
256 }
257 }
258
259 void set_has_field_initialized_final_update(const bool value) {
260 if (value) {
261 atomic_set_bits(JVM_ACC_FIELD_INITIALIZED_FINAL_UPDATE);
262 } else {
263 atomic_clear_bits(JVM_ACC_FIELD_INITIALIZED_FINAL_UPDATE);
264 }
265 }
266
267 void set_field_has_generic_signature()
268 {
269 atomic_set_bits(JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE);
270 }
271
272 void set_on_stack(const bool value)
273 {
274 if (value) {
275 atomic_set_bits(JVM_ACC_ON_STACK);
276 } else {
277 atomic_clear_bits(JVM_ACC_ON_STACK);
278 }
279 }
280 // Conversion
281 jshort as_short() const { return (jshort)_flags; }
282 jint as_int() const { return _flags; }
283
284 inline friend AccessFlags accessFlags_from(jint flags);
285
286 // Printing/debugging
287#if INCLUDE_JVMTI1
288 void print_on(outputStream* st) const;
289#else
290 void print_on(outputStream* st) const PRODUCT_RETURN;
291#endif
292};
293
294inline AccessFlags accessFlags_from(jint flags) {
295 AccessFlags af;
296 af._flags = flags;
297 return af;
298}
299
300#endif // SHARE_UTILITIES_ACCESSFLAGS_HPP

/home/daniel/Projects/java/jdk/src/hotspot/share/runtime/atomic.hpp

1/*
2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_RUNTIME_ATOMIC_HPP
26#define SHARE_RUNTIME_ATOMIC_HPP
27
28#include "memory/allocation.hpp"
29#include "metaprogramming/conditional.hpp"
30#include "metaprogramming/enableIf.hpp"
31#include "metaprogramming/isIntegral.hpp"
32#include "metaprogramming/isPointer.hpp"
33#include "metaprogramming/isSame.hpp"
34#include "metaprogramming/primitiveConversions.hpp"
35#include "metaprogramming/removeCV.hpp"
36#include "metaprogramming/removePointer.hpp"
37#include "runtime/orderAccess.hpp"
38#include "utilities/align.hpp"
39#include "utilities/bytes.hpp"
40#include "utilities/macros.hpp"
41#include <type_traits>
42
43enum atomic_memory_order {
44 // The modes that align with C++11 are intended to
45 // follow the same semantics.
46 memory_order_relaxed = 0,
47 memory_order_acquire = 2,
48 memory_order_release = 3,
49 memory_order_acq_rel = 4,
50 memory_order_seq_cst = 5,
51 // Strong two-way memory barrier.
52 memory_order_conservative = 8
53};
54
55enum ScopedFenceType {
56 X_ACQUIRE
57 , RELEASE_X
58 , RELEASE_X_FENCE
59};
60
61class Atomic : AllStatic {
62public:
63 // Atomic operations on int64 types are not available on all 32-bit
64 // platforms. If atomic ops on int64 are defined here they must only
65 // be used from code that verifies they are available at runtime and
66 // can provide an alternative action if not - see supports_cx8() for
67 // a means to test availability.
68
69 // The memory operations that are mentioned with each of the atomic
70 // function families come from src/share/vm/runtime/orderAccess.hpp,
71 // e.g., <fence> is described in that file and is implemented by the
72 // OrderAccess::fence() function. See that file for the gory details
73 // on the Memory Access Ordering Model.
74
75 // All of the atomic operations that imply a read-modify-write action
76 // guarantee a two-way memory barrier across that operation. Historically
77 // these semantics reflect the strength of atomic operations that are
78 // provided on SPARC/X86. We assume that strength is necessary unless
79 // we can prove that a weaker form is sufficiently safe.
80
81 // Atomically store to a location
82 // The type T must be either a pointer type convertible to or equal
83 // to D, an integral/enum type equal to D, or a type equal to D that
84 // is primitive convertible using PrimitiveConversions.
85 template<typename D, typename T>
86 inline static void store(volatile D* dest, T store_value);
87
88 template <typename D, typename T>
89 inline static void release_store(volatile D* dest, T store_value);
90
91 template <typename D, typename T>
92 inline static void release_store_fence(volatile D* dest, T store_value);
93
94 // Atomically load from a location
95 // The type T must be either a pointer type, an integral/enum type,
96 // or a type that is primitive convertible using PrimitiveConversions.
97 template<typename T>
98 inline static T load(const volatile T* dest);
99
100 template <typename T>
101 inline static T load_acquire(const volatile T* dest);
102
103 // Atomically add to a location. *add*() provide:
104 // <fence> add-value-to-dest <membar StoreLoad|StoreStore>
105
106 // Returns updated value.
107 template<typename D, typename I>
108 inline static D add(D volatile* dest, I add_value,
109 atomic_memory_order order = memory_order_conservative);
110
111 // Returns previous value.
112 template<typename D, typename I>
113 inline static D fetch_and_add(D volatile* dest, I add_value,
114 atomic_memory_order order = memory_order_conservative);
115
116 template<typename D, typename I>
117 inline static D sub(D volatile* dest, I sub_value,
118 atomic_memory_order order = memory_order_conservative);
119
120 // Atomically increment location. inc() provide:
121 // <fence> increment-dest <membar StoreLoad|StoreStore>
122 // The type D may be either a pointer type, or an integral
123 // type. If it is a pointer type, then the increment is
124 // scaled to the size of the type pointed to by the pointer.
125 template<typename D>
126 inline static void inc(D volatile* dest,
127 atomic_memory_order order = memory_order_conservative);
128
129 // Atomically decrement a location. dec() provide:
130 // <fence> decrement-dest <membar StoreLoad|StoreStore>
131 // The type D may be either a pointer type, or an integral
132 // type. If it is a pointer type, then the decrement is
133 // scaled to the size of the type pointed to by the pointer.
134 template<typename D>
135 inline static void dec(D volatile* dest,
136 atomic_memory_order order = memory_order_conservative);
137
138 // Performs atomic exchange of *dest with exchange_value. Returns old
139 // prior value of *dest. xchg*() provide:
140 // <fence> exchange-value-with-dest <membar StoreLoad|StoreStore>
141 // The type T must be either a pointer type convertible to or equal
142 // to D, an integral/enum type equal to D, or a type equal to D that
143 // is primitive convertible using PrimitiveConversions.
144 template<typename D, typename T>
145 inline static D xchg(volatile D* dest, T exchange_value,
146 atomic_memory_order order = memory_order_conservative);
147
148 // Performs atomic compare of *dest and compare_value, and exchanges
149 // *dest with exchange_value if the comparison succeeded. Returns prior
150 // value of *dest. cmpxchg*() provide:
151 // <fence> compare-and-exchange <membar StoreLoad|StoreStore>
152
153 template<typename D, typename U, typename T>
154 inline static D cmpxchg(D volatile* dest,
155 U compare_value,
156 T exchange_value,
157 atomic_memory_order order = memory_order_conservative);
158
159 // Performs atomic compare of *dest and NULL, and replaces *dest
160 // with exchange_value if the comparison succeeded. Returns true if
161 // the comparison succeeded and the exchange occurred. This is
162 // often used as part of lazy initialization, as a lock-free
163 // alternative to the Double-Checked Locking Pattern.
164 template<typename D, typename T>
165 inline static bool replace_if_null(D* volatile* dest, T* value,
166 atomic_memory_order order = memory_order_conservative);
167
168private:
169WINDOWS_ONLY(public:) // VS2017 warns (C2027) use of undefined type if IsPointerConvertible is declared private
170 // Test whether From is implicitly convertible to To.
171 // From and To must be pointer types.
172 // Note: Provides the limited subset of C++11 std::is_convertible
173 // that is needed here.
174 template<typename From, typename To> struct IsPointerConvertible;
175
176protected:
177 // Dispatch handler for store. Provides type-based validity
178 // checking and limited conversions around calls to the platform-
179 // specific implementation layer provided by PlatformOp.
180 template<typename D, typename T, typename PlatformOp, typename Enable = void>
181 struct StoreImpl;
182
183 // Platform-specific implementation of store. Support for sizes
184 // of 1, 2, 4, and (if different) pointer size bytes are required.
185 // The class is a function object that must be default constructable,
186 // with these requirements:
187 //
188 // either:
189 // - dest is of type D*, an integral, enum or pointer type.
190 // - new_value are of type T, an integral, enum or pointer type D or
191 // pointer type convertible to D.
192 // or:
193 // - T and D are the same and are primitive convertible using PrimitiveConversions
194 // and either way:
195 // - platform_store is an object of type PlatformStore<sizeof(T)>.
196 //
197 // Then
198 // platform_store(new_value, dest)
199 // must be a valid expression.
200 //
201 // The default implementation is a volatile store. If a platform
202 // requires more for e.g. 64 bit stores, a specialization is required
203 template<size_t byte_size> struct PlatformStore;
204
205 // Dispatch handler for load. Provides type-based validity
206 // checking and limited conversions around calls to the platform-
207 // specific implementation layer provided by PlatformOp.
208 template<typename T, typename PlatformOp, typename Enable = void>
209 struct LoadImpl;
210
211 // Platform-specific implementation of load. Support for sizes of
212 // 1, 2, 4 bytes and (if different) pointer size bytes are required.
213 // The class is a function object that must be default
214 // constructable, with these requirements:
215 //
216 // - dest is of type T*, an integral, enum or pointer type, or
217 // T is convertible to a primitive type using PrimitiveConversions
218 // - platform_load is an object of type PlatformLoad<sizeof(T)>.
219 //
220 // Then
221 // platform_load(src)
222 // must be a valid expression, returning a result convertible to T.
223 //
224 // The default implementation is a volatile load. If a platform
225 // requires more for e.g. 64 bit loads, a specialization is required
226 template<size_t byte_size> struct PlatformLoad;
227
228 // Give platforms a variation point to specialize.
229 template<size_t byte_size, ScopedFenceType type> struct PlatformOrderedStore;
230 template<size_t byte_size, ScopedFenceType type> struct PlatformOrderedLoad;
231
232private:
233 // Dispatch handler for add. Provides type-based validity checking
234 // and limited conversions around calls to the platform-specific
235 // implementation layer provided by PlatformAdd.
236 template<typename D, typename I, typename Enable = void>
237 struct AddImpl;
238
239 // Platform-specific implementation of add. Support for sizes of 4
240 // bytes and (if different) pointer size bytes are required. The
241 // class must be default constructable, with these requirements:
242 //
243 // - dest is of type D*, an integral or pointer type.
244 // - add_value is of type I, an integral type.
245 // - sizeof(I) == sizeof(D).
246 // - if D is an integral type, I == D.
247 // - order is of type atomic_memory_order.
248 // - platform_add is an object of type PlatformAdd<sizeof(D)>.
249 //
250 // Then both
251 // platform_add.add_and_fetch(dest, add_value, order)
252 // platform_add.fetch_and_add(dest, add_value, order)
253 // must be valid expressions returning a result convertible to D.
254 //
255 // add_and_fetch atomically adds add_value to the value of dest,
256 // returning the new value.
257 //
258 // fetch_and_add atomically adds add_value to the value of dest,
259 // returning the old value.
260 //
261 // When D is a pointer type P*, both add_and_fetch and fetch_and_add
262 // treat it as if it were an uintptr_t; they do not perform any
263 // scaling of add_value, as that has already been done by the caller.
264 //
265 // No definition is provided; all platforms must explicitly define
266 // this class and any needed specializations.
267 template<size_t byte_size> struct PlatformAdd;
268
269 // Support for platforms that implement some variants of add using a
270 // (typically out of line) non-template helper function. The
271 // generic arguments passed to PlatformAdd need to be translated to
272 // the appropriate type for the helper function, the helper function
273 // invoked on the translated arguments, and the result translated
274 // back. Type is the parameter / return type of the helper
275 // function. No scaling of add_value is performed when D is a pointer
276 // type, so this function can be used to implement the support function
277 // required by AddAndFetch.
278 template<typename Type, typename Fn, typename D, typename I>
279 static D add_using_helper(Fn fn, D volatile* dest, I add_value);
280
281 // Dispatch handler for cmpxchg. Provides type-based validity
282 // checking and limited conversions around calls to the
283 // platform-specific implementation layer provided by
284 // PlatformCmpxchg.
285 template<typename D, typename U, typename T, typename Enable = void>
286 struct CmpxchgImpl;
287
288 // Platform-specific implementation of cmpxchg. Support for sizes
289 // of 1, 4, and 8 are required. The class is a function object that
290 // must be default constructable, with these requirements:
291 //
292 // - dest is of type T*.
293 // - exchange_value and compare_value are of type T.
294 // - order is of type atomic_memory_order.
295 // - platform_cmpxchg is an object of type PlatformCmpxchg<sizeof(T)>.
296 //
297 // Then
298 // platform_cmpxchg(dest, compare_value, exchange_value, order)
299 // must be a valid expression, returning a result convertible to T.
300 //
301 // A default definition is provided, which declares a function template
302 // T operator()(T volatile*, T, T, atomic_memory_order) const
303 //
304 // For each required size, a platform must either provide an
305 // appropriate definition of that function, or must entirely
306 // specialize the class template for that size.
307 template<size_t byte_size> struct PlatformCmpxchg;
308
309 // Support for platforms that implement some variants of cmpxchg
310 // using a (typically out of line) non-template helper function.
311 // The generic arguments passed to PlatformCmpxchg need to be
312 // translated to the appropriate type for the helper function, the
313 // helper invoked on the translated arguments, and the result
314 // translated back. Type is the parameter / return type of the
315 // helper function.
316 template<typename Type, typename Fn, typename T>
317 static T cmpxchg_using_helper(Fn fn,
318 T volatile* dest,
319 T compare_value,
320 T exchange_value);
321
322 // Support platforms that do not provide Read-Modify-Write
323 // byte-level atomic access. To use, derive PlatformCmpxchg<1> from
324 // this class.
325public: // Temporary, can't be private: C++03 11.4/2. Fixed by C++11.
326 struct CmpxchgByteUsingInt;
327private:
328
329 // Dispatch handler for xchg. Provides type-based validity
330 // checking and limited conversions around calls to the
331 // platform-specific implementation layer provided by
332 // PlatformXchg.
333 template<typename D, typename T, typename Enable = void>
334 struct XchgImpl;
335
336 // Platform-specific implementation of xchg. Support for sizes
337 // of 4, and sizeof(intptr_t) are required. The class is a function
338 // object that must be default constructable, with these requirements:
339 //
340 // - dest is of type T*.
341 // - exchange_value is of type T.
342 // - platform_xchg is an object of type PlatformXchg<sizeof(T)>.
343 //
344 // Then
345 // platform_xchg(dest, exchange_value)
346 // must be a valid expression, returning a result convertible to T.
347 //
348 // A default definition is provided, which declares a function template
349 // T operator()(T volatile*, T, atomic_memory_order) const
350 //
351 // For each required size, a platform must either provide an
352 // appropriate definition of that function, or must entirely
353 // specialize the class template for that size.
354 template<size_t byte_size> struct PlatformXchg;
355
356 // Support for platforms that implement some variants of xchg
357 // using a (typically out of line) non-template helper function.
358 // The generic arguments passed to PlatformXchg need to be
359 // translated to the appropriate type for the helper function, the
360 // helper invoked on the translated arguments, and the result
361 // translated back. Type is the parameter / return type of the
362 // helper function.
363 template<typename Type, typename Fn, typename T>
364 static T xchg_using_helper(Fn fn,
365 T volatile* dest,
366 T exchange_value);
367};
368
369template<typename From, typename To>
370struct Atomic::IsPointerConvertible<From*, To*> : AllStatic {
371 // Determine whether From* is implicitly convertible to To*, using
372 // the "sizeof trick".
373 typedef char yes;
374 typedef char (&no)[2];
375
376 static yes test(To*);
377 static no test(...);
378 static From* test_value;
379
380 static const bool value = (sizeof(yes) == sizeof(test(test_value)));
381};
382
383// Handle load for pointer, integral and enum types.
384template<typename T, typename PlatformOp>
385struct Atomic::LoadImpl<
386 T,
387 PlatformOp,
388 typename EnableIf<IsIntegral<T>::value || std::is_enum<T>::value || IsPointer<T>::value>::type>
389{
390 T operator()(T const volatile* dest) const {
391 // Forward to the platform handler for the size of T.
392 return PlatformOp()(dest);
393 }
394};
395
396// Handle load for types that have a translator.
397//
398// All the involved types must be identical.
399//
400// This translates the original call into a call on the decayed
401// arguments, and returns the recovered result of that translated
402// call.
403template<typename T, typename PlatformOp>
404struct Atomic::LoadImpl<
405 T,
406 PlatformOp,
407 typename EnableIf<PrimitiveConversions::Translate<T>::value>::type>
408{
409 T operator()(T const volatile* dest) const {
410 typedef PrimitiveConversions::Translate<T> Translator;
411 typedef typename Translator::Decayed Decayed;
412 STATIC_ASSERT(sizeof(T) == sizeof(Decayed))static_assert((sizeof(T) == sizeof(Decayed)), "sizeof(T) == sizeof(Decayed)"
)
;
413 Decayed result = PlatformOp()(reinterpret_cast<Decayed const volatile*>(dest));
414 return Translator::recover(result);
415 }
416};
417
418// Default implementation of atomic load if a specific platform
419// does not provide a specialization for a certain size class.
420// For increased safety, the default implementation only allows
421// load types that are pointer sized or smaller. If a platform still
422// supports wide atomics, then it has to use specialization
423// of Atomic::PlatformLoad for that wider size class.
424template<size_t byte_size>
425struct Atomic::PlatformLoad {
426 template<typename T>
427 T operator()(T const volatile* dest) const {
428 STATIC_ASSERT(sizeof(T) <= sizeof(void*))static_assert((sizeof(T) <= sizeof(void*)), "sizeof(T) <= sizeof(void*)"
)
; // wide atomics need specialization
429 return *dest;
430 }
431};
432
433// Handle store for integral and enum types.
434//
435// All the involved types must be identical.
436template<typename T, typename PlatformOp>
437struct Atomic::StoreImpl<
438 T, T,
439 PlatformOp,
440 typename EnableIf<IsIntegral<T>::value || std::is_enum<T>::value>::type>
441{
442 void operator()(T volatile* dest, T new_value) const {
443 // Forward to the platform handler for the size of T.
444 PlatformOp()(dest, new_value);
445 }
446};
447
448// Handle store for pointer types.
449//
450// The new_value must be implicitly convertible to the
451// destination's type; it must be type-correct to store the
452// new_value in the destination.
453template<typename D, typename T, typename PlatformOp>
454struct Atomic::StoreImpl<
455 D*, T*,
456 PlatformOp,
457 typename EnableIf<Atomic::IsPointerConvertible<T*, D*>::value>::type>
458{
459 void operator()(D* volatile* dest, T* new_value) const {
460 // Allow derived to base conversion, and adding cv-qualifiers.
461 D* value = new_value;
462 PlatformOp()(dest, value);
46
Passing null pointer value via 1st parameter 'p'
47
Calling 'PlatformOrderedStore::operator()'
52
Passing null pointer value via 1st parameter 'dest'
53
Calling 'PlatformStore::operator()'
463 }
464};
465
466// Handle store for types that have a translator.
467//
468// All the involved types must be identical.
469//
470// This translates the original call into a call on the decayed
471// arguments.
472template<typename T, typename PlatformOp>
473struct Atomic::StoreImpl<
474 T, T,
475 PlatformOp,
476 typename EnableIf<PrimitiveConversions::Translate<T>::value>::type>
477{
478 void operator()(T volatile* dest, T new_value) const {
479 typedef PrimitiveConversions::Translate<T> Translator;
480 typedef typename Translator::Decayed Decayed;
481 STATIC_ASSERT(sizeof(T) == sizeof(Decayed))static_assert((sizeof(T) == sizeof(Decayed)), "sizeof(T) == sizeof(Decayed)"
)
;
482 PlatformOp()(reinterpret_cast<Decayed volatile*>(dest),
483 Translator::decay(new_value));
484 }
485};
486
487// Default implementation of atomic store if a specific platform
488// does not provide a specialization for a certain size class.
489// For increased safety, the default implementation only allows
490// storing types that are pointer sized or smaller. If a platform still
491// supports wide atomics, then it has to use specialization
492// of Atomic::PlatformStore for that wider size class.
493template<size_t byte_size>
494struct Atomic::PlatformStore {
495 template<typename T>
496 void operator()(T volatile* dest,
497 T new_value) const {
498 STATIC_ASSERT(sizeof(T) <= sizeof(void*))static_assert((sizeof(T) <= sizeof(void*)), "sizeof(T) <= sizeof(void*)"
)
; // wide atomics need specialization
499 (void)const_cast<T&>(*dest = new_value);
54
Dereference of null pointer (loaded from variable 'dest')
500 }
501};
502
503template<typename D>
504inline void Atomic::inc(D volatile* dest, atomic_memory_order order) {
505 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value)static_assert((IsPointer<D>::value || IsIntegral<D>
::value), "IsPointer<D>::value || IsIntegral<D>::value"
)
;
506 typedef typename Conditional<IsPointer<D>::value, ptrdiff_t, D>::type I;
507 Atomic::add(dest, I(1), order);
508}
509
510template<typename D>
511inline void Atomic::dec(D volatile* dest, atomic_memory_order order) {
512 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value)static_assert((IsPointer<D>::value || IsIntegral<D>
::value), "IsPointer<D>::value || IsIntegral<D>::value"
)
;
513 typedef typename Conditional<IsPointer<D>::value, ptrdiff_t, D>::type I;
514 // Assumes two's complement integer representation.
515 #pragma warning(suppress: 4146)
516 Atomic::add(dest, I(-1), order);
517}
518
519template<typename D, typename I>
520inline D Atomic::sub(D volatile* dest, I sub_value, atomic_memory_order order) {
521 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value)static_assert((IsPointer<D>::value || IsIntegral<D>
::value), "IsPointer<D>::value || IsIntegral<D>::value"
)
;
522 STATIC_ASSERT(IsIntegral<I>::value)static_assert((IsIntegral<I>::value), "IsIntegral<I>::value"
)
;
523 // If D is a pointer type, use [u]intptr_t as the addend type,
524 // matching signedness of I. Otherwise, use D as the addend type.
525 typedef typename Conditional<IsSigned<I>::value, intptr_t, uintptr_t>::type PI;
526 typedef typename Conditional<IsPointer<D>::value, PI, D>::type AddendType;
527 // Only allow conversions that can't change the value.
528 STATIC_ASSERT(IsSigned<I>::value == IsSigned<AddendType>::value)static_assert((IsSigned<I>::value == IsSigned<AddendType
>::value), "IsSigned<I>::value == IsSigned<AddendType>::value"
)
;
529 STATIC_ASSERT(sizeof(I) <= sizeof(AddendType))static_assert((sizeof(I) <= sizeof(AddendType)), "sizeof(I) <= sizeof(AddendType)"
)
;
530 AddendType addend = sub_value;
531 // Assumes two's complement integer representation.
532 #pragma warning(suppress: 4146) // In case AddendType is not signed.
533 return Atomic::add(dest, -addend, order);
534}
535
536// Define the class before including platform file, which may specialize
537// the operator definition. No generic definition of specializations
538// of the operator template are provided, nor are there any generic
539// specializations of the class. The platform file is responsible for
540// providing those.
541template<size_t byte_size>
542struct Atomic::PlatformCmpxchg {
543 template<typename T>
544 T operator()(T volatile* dest,
545 T compare_value,
546 T exchange_value,
547 atomic_memory_order order) const;
548};
549
550// Define the class before including platform file, which may use this
551// as a base class, requiring it be complete. The definition is later
552// in this file, near the other definitions related to cmpxchg.
553struct Atomic::CmpxchgByteUsingInt {
554 static uint8_t get_byte_in_int(uint32_t n, uint32_t idx);
555 static uint32_t set_byte_in_int(uint32_t n, uint8_t b, uint32_t idx);
556 template<typename T>
557 T operator()(T volatile* dest,
558 T compare_value,
559 T exchange_value,
560 atomic_memory_order order) const;
561};
562
563// Define the class before including platform file, which may specialize
564// the operator definition. No generic definition of specializations
565// of the operator template are provided, nor are there any generic
566// specializations of the class. The platform file is responsible for
567// providing those.
568template<size_t byte_size>
569struct Atomic::PlatformXchg {
570 template<typename T>
571 T operator()(T volatile* dest,
572 T exchange_value,
573 atomic_memory_order order) const;
574};
575
576template <ScopedFenceType T>
577class ScopedFenceGeneral: public StackObj {
578 public:
579 void prefix() {}
580 void postfix() {}
581};
582
583// The following methods can be specialized using simple template specialization
584// in the platform specific files for optimization purposes. Otherwise the
585// generalized variant is used.
586
587template<> inline void ScopedFenceGeneral<X_ACQUIRE>::postfix() { OrderAccess::acquire(); }
588template<> inline void ScopedFenceGeneral<RELEASE_X>::prefix() { OrderAccess::release(); }
589template<> inline void ScopedFenceGeneral<RELEASE_X_FENCE>::prefix() { OrderAccess::release(); }
590template<> inline void ScopedFenceGeneral<RELEASE_X_FENCE>::postfix() { OrderAccess::fence(); }
591
592template <ScopedFenceType T>
593class ScopedFence : public ScopedFenceGeneral<T> {
594 void *const _field;
595 public:
596 ScopedFence(void *const field) : _field(field) { prefix(); }
597 ~ScopedFence() { postfix(); }
598 void prefix() { ScopedFenceGeneral<T>::prefix(); }
599 void postfix() { ScopedFenceGeneral<T>::postfix(); }
600};
601
602// platform specific in-line definitions - must come before shared definitions
603
604#include OS_CPU_HEADER(atomic)"atomic_linux_x86.hpp"
605
606// shared in-line definitions
607
608// size_t casts...
609#if (SIZE_MAX(18446744073709551615UL) != UINTPTR_MAX(18446744073709551615UL))
610#error size_t is not WORD_SIZE, interesting platform, but missing implementation here
611#endif
612
613template<typename T>
614inline T Atomic::load(const volatile T* dest) {
615 return LoadImpl<T, PlatformLoad<sizeof(T)> >()(dest);
616}
617
618template<size_t byte_size, ScopedFenceType type>
619struct Atomic::PlatformOrderedLoad {
620 template <typename T>
621 T operator()(const volatile T* p) const {
622 ScopedFence<type> f((void*)p);
623 return Atomic::load(p);
624 }
625};
626
627template <typename T>
628inline T Atomic::load_acquire(const volatile T* p) {
629 return LoadImpl<T, PlatformOrderedLoad<sizeof(T), X_ACQUIRE> >()(p);
630}
631
632template<typename D, typename T>
633inline void Atomic::store(volatile D* dest, T store_value) {
634 StoreImpl<D, T, PlatformStore<sizeof(D)> >()(dest, store_value);
50
Passing null pointer value via 1st parameter 'dest'
51
Calling 'StoreImpl::operator()'
635}
636
637template<size_t byte_size, ScopedFenceType type>
638struct Atomic::PlatformOrderedStore {
639 template <typename T>
640 void operator()(volatile T* p, T v) const {
641 ScopedFence<type> f((void*)p);
642 Atomic::store(p, v);
48
Passing null pointer value via 1st parameter 'dest'
49
Calling 'Atomic::store'
643 }
644};
645
646template <typename D, typename T>
647inline void Atomic::release_store(volatile D* p, T v) {
648 StoreImpl<D, T, PlatformOrderedStore<sizeof(D), RELEASE_X> >()(p, v);
44
Passing null pointer value via 1st parameter 'dest'
45
Calling 'StoreImpl::operator()'
649}
650
651template <typename D, typename T>
652inline void Atomic::release_store_fence(volatile D* p, T v) {
653 StoreImpl<D, T, PlatformOrderedStore<sizeof(D), RELEASE_X_FENCE> >()(p, v);
654}
655
656template<typename D, typename I>
657inline D Atomic::add(D volatile* dest, I add_value,
658 atomic_memory_order order) {
659 return AddImpl<D, I>::add_and_fetch(dest, add_value, order);
660}
661
662template<typename D, typename I>
663inline D Atomic::fetch_and_add(D volatile* dest, I add_value,
664 atomic_memory_order order) {
665 return AddImpl<D, I>::fetch_and_add(dest, add_value, order);
666}
667
668template<typename D, typename I>
669struct Atomic::AddImpl<
670 D, I,
671 typename EnableIf<IsIntegral<I>::value &&
672 IsIntegral<D>::value &&
673 (sizeof(I) <= sizeof(D)) &&
674 (IsSigned<I>::value == IsSigned<D>::value)>::type>
675{
676 static D add_and_fetch(D volatile* dest, I add_value, atomic_memory_order order) {
677 D addend = add_value;
678 return PlatformAdd<sizeof(D)>().add_and_fetch(dest, addend, order);
679 }
680 static D fetch_and_add(D volatile* dest, I add_value, atomic_memory_order order) {
681 D addend = add_value;
682 return PlatformAdd<sizeof(D)>().fetch_and_add(dest, addend, order);
683 }
684};
685
686template<typename P, typename I>
687struct Atomic::AddImpl<
688 P*, I,
689 typename EnableIf<IsIntegral<I>::value && (sizeof(I) <= sizeof(P*))>::type>
690{
691 STATIC_ASSERT(sizeof(intptr_t) == sizeof(P*))static_assert((sizeof(intptr_t) == sizeof(P*)), "sizeof(intptr_t) == sizeof(P*)"
)
;
692 STATIC_ASSERT(sizeof(uintptr_t) == sizeof(P*))static_assert((sizeof(uintptr_t) == sizeof(P*)), "sizeof(uintptr_t) == sizeof(P*)"
)
;
693 typedef typename Conditional<IsSigned<I>::value,
694 intptr_t,
695 uintptr_t>::type CI;
696
697 static CI scale_addend(CI add_value) {
698 return add_value * sizeof(P);
699 }
700
701 static P* add_and_fetch(P* volatile* dest, I add_value, atomic_memory_order order) {
702 CI addend = add_value;
703 return PlatformAdd<sizeof(P*)>().add_and_fetch(dest, scale_addend(addend), order);
704 }
705 static P* fetch_and_add(P* volatile* dest, I add_value, atomic_memory_order order) {
706 CI addend = add_value;
707 return PlatformAdd<sizeof(P*)>().fetch_and_add(dest, scale_addend(addend), order);
708 }
709};
710
711template<typename Type, typename Fn, typename D, typename I>
712inline D Atomic::add_using_helper(Fn fn, D volatile* dest, I add_value) {
713 return PrimitiveConversions::cast<D>(
714 fn(PrimitiveConversions::cast<Type>(add_value),
715 reinterpret_cast<Type volatile*>(dest)));
716}
717
718template<typename D, typename U, typename T>
719inline D Atomic::cmpxchg(D volatile* dest,
720 U compare_value,
721 T exchange_value,
722 atomic_memory_order order) {
723 return CmpxchgImpl<D, U, T>()(dest, compare_value, exchange_value, order);
724}
725
726template<typename D, typename T>
727inline bool Atomic::replace_if_null(D* volatile* dest, T* value,
728 atomic_memory_order order) {
729 // Presently using a trivial implementation in terms of cmpxchg.
730 // Consider adding platform support, to permit the use of compiler
731 // intrinsics like gcc's __sync_bool_compare_and_swap.
732 D* expected_null = NULL__null;
733 return expected_null == cmpxchg(dest, expected_null, value, order);
734}
735
736// Handle cmpxchg for integral and enum types.
737//
738// All the involved types must be identical.
739template<typename T>
740struct Atomic::CmpxchgImpl<
741 T, T, T,
742 typename EnableIf<IsIntegral<T>::value || std::is_enum<T>::value>::type>
743{
744 T operator()(T volatile* dest, T compare_value, T exchange_value,
745 atomic_memory_order order) const {
746 // Forward to the platform handler for the size of T.
747 return PlatformCmpxchg<sizeof(T)>()(dest,
748 compare_value,
749 exchange_value,
750 order);
751 }
752};
753
754// Handle cmpxchg for pointer types.
755//
756// The destination's type and the compare_value type must be the same,
757// ignoring cv-qualifiers; we don't care about the cv-qualifiers of
758// the compare_value.
759//
760// The exchange_value must be implicitly convertible to the
761// destination's type; it must be type-correct to store the
762// exchange_value in the destination.
763template<typename D, typename U, typename T>
764struct Atomic::CmpxchgImpl<
765 D*, U*, T*,
766 typename EnableIf<Atomic::IsPointerConvertible<T*, D*>::value &&
767 IsSame<typename RemoveCV<D>::type,
768 typename RemoveCV<U>::type>::value>::type>
769{
770 D* operator()(D* volatile* dest, U* compare_value, T* exchange_value,
771 atomic_memory_order order) const {
772 // Allow derived to base conversion, and adding cv-qualifiers.
773 D* new_value = exchange_value;
774 // Don't care what the CV qualifiers for compare_value are,
775 // but we need to match D* when calling platform support.
776 D* old_value = const_cast<D*>(compare_value);
777 return PlatformCmpxchg<sizeof(D*)>()(dest, old_value, new_value, order);
778 }
779};
780
781// Handle cmpxchg for types that have a translator.
782//
783// All the involved types must be identical.
784//
785// This translates the original call into a call on the decayed
786// arguments, and returns the recovered result of that translated
787// call.
788template<typename T>
789struct Atomic::CmpxchgImpl<
790 T, T, T,
791 typename EnableIf<PrimitiveConversions::Translate<T>::value>::type>
792{
793 T operator()(T volatile* dest, T compare_value, T exchange_value,
794 atomic_memory_order order) const {
795 typedef PrimitiveConversions::Translate<T> Translator;
796 typedef typename Translator::Decayed Decayed;
797 STATIC_ASSERT(sizeof(T) == sizeof(Decayed))static_assert((sizeof(T) == sizeof(Decayed)), "sizeof(T) == sizeof(Decayed)"
)
;
798 return Translator::recover(
799 cmpxchg(reinterpret_cast<Decayed volatile*>(dest),
800 Translator::decay(compare_value),
801 Translator::decay(exchange_value),
802 order));
803 }
804};
805
806template<typename Type, typename Fn, typename T>
807inline T Atomic::cmpxchg_using_helper(Fn fn,
808 T volatile* dest,
809 T compare_value,
810 T exchange_value) {
811 STATIC_ASSERT(sizeof(Type) == sizeof(T))static_assert((sizeof(Type) == sizeof(T)), "sizeof(Type) == sizeof(T)"
)
;
812 return PrimitiveConversions::cast<T>(
813 fn(PrimitiveConversions::cast<Type>(exchange_value),
814 reinterpret_cast<Type volatile*>(dest),
815 PrimitiveConversions::cast<Type>(compare_value)));
816}
817
818inline uint32_t Atomic::CmpxchgByteUsingInt::set_byte_in_int(uint32_t n,
819 uint8_t b,
820 uint32_t idx) {
821 int bitsIdx = BitsPerByte * idx;
822 return (n & ~(static_cast<uint32_t>(0xff) << bitsIdx))
823 | (static_cast<uint32_t>(b) << bitsIdx);
824}
825
826inline uint8_t Atomic::CmpxchgByteUsingInt::get_byte_in_int(uint32_t n,
827 uint32_t idx) {
828 int bitsIdx = BitsPerByte * idx;
829 return (uint8_t)(n >> bitsIdx);
830}
831
832template<typename T>
833inline T Atomic::CmpxchgByteUsingInt::operator()(T volatile* dest,
834 T compare_value,
835 T exchange_value,
836 atomic_memory_order order) const {
837 STATIC_ASSERT(sizeof(T) == sizeof(uint8_t))static_assert((sizeof(T) == sizeof(uint8_t)), "sizeof(T) == sizeof(uint8_t)"
)
;
838 uint8_t canon_exchange_value = exchange_value;
839 uint8_t canon_compare_value = compare_value;
840 volatile uint32_t* aligned_dest
841 = reinterpret_cast<volatile uint32_t*>(align_down(dest, sizeof(uint32_t)));
842 size_t offset = pointer_delta(dest, aligned_dest, 1);
843
844 uint32_t idx = (Endian::NATIVE == Endian::BIG)
845 ? (sizeof(uint32_t) - 1 - offset)
846 : offset;
847
848 // current value may not be what we are looking for, so force it
849 // to that value so the initial cmpxchg will fail if it is different
850 uint32_t cur = set_byte_in_int(Atomic::load(aligned_dest), canon_compare_value, idx);
851
852 // always execute a real cmpxchg so that we get the required memory
853 // barriers even on initial failure
854 do {
855 // value to swap in matches current value
856 // except for the one byte we want to update
857 uint32_t new_value = set_byte_in_int(cur, canon_exchange_value, idx);
858
859 uint32_t res = cmpxchg(aligned_dest, cur, new_value, order);
860 if (res == cur) break; // success
861
862 // at least one byte in the int changed value, so update
863 // our view of the current int
864 cur = res;
865 // if our byte is still as cur we loop and try again
866 } while (get_byte_in_int(cur, idx) == canon_compare_value);
867
868 return PrimitiveConversions::cast<T>(get_byte_in_int(cur, idx));
869}
870
871// Handle xchg for integral and enum types.
872//
873// All the involved types must be identical.
874template<typename T>
875struct Atomic::XchgImpl<
876 T, T,
877 typename EnableIf<IsIntegral<T>::value || std::is_enum<T>::value>::type>
878{
879 T operator()(T volatile* dest, T exchange_value, atomic_memory_order order) const {
880 // Forward to the platform handler for the size of T.
881 return PlatformXchg<sizeof(T)>()(dest, exchange_value, order);
882 }
883};
884
885// Handle xchg for pointer types.
886//
887// The exchange_value must be implicitly convertible to the
888// destination's type; it must be type-correct to store the
889// exchange_value in the destination.
890template<typename D, typename T>
891struct Atomic::XchgImpl<
892 D*, T*,
893 typename EnableIf<Atomic::IsPointerConvertible<T*, D*>::value>::type>
894{
895 D* operator()(D* volatile* dest, T* exchange_value, atomic_memory_order order) const {
896 // Allow derived to base conversion, and adding cv-qualifiers.
897 D* new_value = exchange_value;
898 return PlatformXchg<sizeof(D*)>()(dest, new_value, order);
899 }
900};
901
902// Handle xchg for types that have a translator.
903//
904// All the involved types must be identical.
905//
906// This translates the original call into a call on the decayed
907// arguments, and returns the recovered result of that translated
908// call.
909template<typename T>
910struct Atomic::XchgImpl<
911 T, T,
912 typename EnableIf<PrimitiveConversions::Translate<T>::value>::type>
913{
914 T operator()(T volatile* dest, T exchange_value, atomic_memory_order order) const {
915 typedef PrimitiveConversions::Translate<T> Translator;
916 typedef typename Translator::Decayed Decayed;
917 STATIC_ASSERT(sizeof(T) == sizeof(Decayed))static_assert((sizeof(T) == sizeof(Decayed)), "sizeof(T) == sizeof(Decayed)"
)
;
918 return Translator::recover(
919 xchg(reinterpret_cast<Decayed volatile*>(dest),
920 Translator::decay(exchange_value),
921 order));
922 }
923};
924
925template<typename Type, typename Fn, typename T>
926inline T Atomic::xchg_using_helper(Fn fn,
927 T volatile* dest,
928 T exchange_value) {
929 STATIC_ASSERT(sizeof(Type) == sizeof(T))static_assert((sizeof(Type) == sizeof(T)), "sizeof(Type) == sizeof(T)"
)
;
930 // Notice the swapped order of arguments. Change when/if stubs are rewritten.
931 return PrimitiveConversions::cast<T>(
932 fn(PrimitiveConversions::cast<Type>(exchange_value),
933 reinterpret_cast<Type volatile*>(dest)));
934}
935
936template<typename D, typename T>
937inline D Atomic::xchg(volatile D* dest, T exchange_value, atomic_memory_order order) {
938 return XchgImpl<D, T>()(dest, exchange_value, order);
939}
940
941#endif // SHARE_RUNTIME_ATOMIC_HPP