File: | jdk/src/hotspot/share/asm/assembler.cpp |
Warning: | line 176, column 7 Value stored to 'target_sect' during its initialization is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* |
2 | * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | * |
23 | */ |
24 | |
25 | #include "precompiled.hpp" |
26 | #include "asm/codeBuffer.hpp" |
27 | #include "asm/macroAssembler.hpp" |
28 | #include "asm/macroAssembler.inline.hpp" |
29 | #include "gc/shared/collectedHeap.hpp" |
30 | #include "memory/universe.hpp" |
31 | #include "oops/compressedOops.hpp" |
32 | #include "runtime/icache.hpp" |
33 | #include "runtime/os.hpp" |
34 | #include "runtime/thread.hpp" |
35 | |
36 | |
37 | // Implementation of AbstractAssembler |
38 | // |
39 | // The AbstractAssembler is generating code into a CodeBuffer. To make code generation faster, |
40 | // the assembler keeps a copy of the code buffers boundaries & modifies them when |
41 | // emitting bytes rather than using the code buffers accessor functions all the time. |
42 | // The code buffer is updated via set_code_end(...) after emitting a whole instruction. |
43 | |
44 | AbstractAssembler::AbstractAssembler(CodeBuffer* code) { |
45 | if (code == NULL__null) return; |
46 | CodeSection* cs = code->insts(); |
47 | cs->clear_mark(); // new assembler kills old mark |
48 | if (cs->start() == NULL__null) { |
49 | vm_exit_out_of_memory(0, OOM_MMAP_ERROR, "CodeCache: no room for %s", code->name())do { report_vm_out_of_memory("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 49, 0, OOM_MMAP_ERROR, "CodeCache: no room for %s", code-> name()); ::breakpoint(); } while (0); |
50 | } |
51 | _code_section = cs; |
52 | _oop_recorder= code->oop_recorder(); |
53 | DEBUG_ONLY( _short_branch_delta = 0; )_short_branch_delta = 0; |
54 | } |
55 | |
56 | void AbstractAssembler::set_code_section(CodeSection* cs) { |
57 | assert(cs->outer() == code_section()->outer(), "sanity")do { if (!(cs->outer() == code_section()->outer())) { ( *g_assert_poison) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 57, "assert(" "cs->outer() == code_section()->outer()" ") failed", "sanity"); ::breakpoint(); } } while (0); |
58 | assert(cs->is_allocated(), "need to pre-allocate this section")do { if (!(cs->is_allocated())) { (*g_assert_poison) = 'X' ;; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 58, "assert(" "cs->is_allocated()" ") failed", "need to pre-allocate this section" ); ::breakpoint(); } } while (0); |
59 | cs->clear_mark(); // new assembly into this section kills old mark |
60 | _code_section = cs; |
61 | } |
62 | |
63 | // Inform CodeBuffer that incoming code and relocation will be for stubs |
64 | address AbstractAssembler::start_a_stub(int required_space) { |
65 | CodeBuffer* cb = code(); |
66 | CodeSection* cs = cb->stubs(); |
67 | assert(_code_section == cb->insts(), "not in insts?")do { if (!(_code_section == cb->insts())) { (*g_assert_poison ) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 67, "assert(" "_code_section == cb->insts()" ") failed", "not in insts?"); ::breakpoint(); } } while (0); |
68 | if (cs->maybe_expand_to_ensure_remaining(required_space) |
69 | && cb->blob() == NULL__null) { |
70 | return NULL__null; |
71 | } |
72 | set_code_section(cs); |
73 | return pc(); |
74 | } |
75 | |
76 | // Inform CodeBuffer that incoming code and relocation will be code |
77 | // Should not be called if start_a_stub() returned NULL |
78 | void AbstractAssembler::end_a_stub() { |
79 | assert(_code_section == code()->stubs(), "not in stubs?")do { if (!(_code_section == code()->stubs())) { (*g_assert_poison ) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 79, "assert(" "_code_section == code()->stubs()" ") failed" , "not in stubs?"); ::breakpoint(); } } while (0); |
80 | set_code_section(code()->insts()); |
81 | } |
82 | |
83 | // Inform CodeBuffer that incoming code and relocation will be for stubs |
84 | address AbstractAssembler::start_a_const(int required_space, int required_align) { |
85 | CodeBuffer* cb = code(); |
86 | CodeSection* cs = cb->consts(); |
87 | assert(_code_section == cb->insts() || _code_section == cb->stubs(), "not in insts/stubs?")do { if (!(_code_section == cb->insts() || _code_section == cb->stubs())) { (*g_assert_poison) = 'X';; report_vm_error ("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 87, "assert(" "_code_section == cb->insts() || _code_section == cb->stubs()" ") failed", "not in insts/stubs?"); ::breakpoint(); } } while (0); |
88 | address end = cs->end(); |
89 | int pad = -(intptr_t)end & (required_align-1); |
90 | if (cs->maybe_expand_to_ensure_remaining(pad + required_space)) { |
91 | if (cb->blob() == NULL__null) return NULL__null; |
92 | end = cs->end(); // refresh pointer |
93 | } |
94 | if (pad > 0) { |
95 | while (--pad >= 0) { *end++ = 0; } |
96 | cs->set_end(end); |
97 | } |
98 | set_code_section(cs); |
99 | return end; |
100 | } |
101 | |
102 | // Inform CodeBuffer that incoming code and relocation will be code |
103 | // in section cs (insts or stubs). |
104 | void AbstractAssembler::end_a_const(CodeSection* cs) { |
105 | assert(_code_section == code()->consts(), "not in consts?")do { if (!(_code_section == code()->consts())) { (*g_assert_poison ) = 'X';; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 105, "assert(" "_code_section == code()->consts()" ") failed" , "not in consts?"); ::breakpoint(); } } while (0); |
106 | set_code_section(cs); |
107 | } |
108 | |
109 | void AbstractAssembler::flush() { |
110 | ICache::invalidate_range(addr_at(0), offset()); |
111 | } |
112 | |
113 | void AbstractAssembler::bind(Label& L) { |
114 | if (L.is_bound()) { |
115 | // Assembler can bind a label more than once to the same place. |
116 | guarantee(L.loc() == locator(), "attempt to redefine label")do { if (!(L.loc() == locator())) { (*g_assert_poison) = 'X'; ; report_vm_error("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 116, "guarantee(" "L.loc() == locator()" ") failed", "attempt to redefine label" ); ::breakpoint(); } } while (0); |
117 | return; |
118 | } |
119 | L.bind_loc(locator()); |
120 | L.patch_instructions((MacroAssembler*)this); |
121 | } |
122 | |
123 | void AbstractAssembler::generate_stack_overflow_check(int frame_size_in_bytes) { |
124 | // Each code entry causes one stack bang n pages down the stack where n |
125 | // is configurable by StackShadowPages. The setting depends on the maximum |
126 | // depth of VM call stack or native before going back into java code, |
127 | // since only java code can raise a stack overflow exception using the |
128 | // stack banging mechanism. The VM and native code does not detect stack |
129 | // overflow. |
130 | // The code in JavaCalls::call() checks that there is at least n pages |
131 | // available, so all entry code needs to do is bang once for the end of |
132 | // this shadow zone. |
133 | // The entry code may need to bang additional pages if the framesize |
134 | // is greater than a page. |
135 | |
136 | const int page_size = os::vm_page_size(); |
137 | int bang_end = (int)StackOverflow::stack_shadow_zone_size(); |
138 | |
139 | // This is how far the previous frame's stack banging extended. |
140 | const int bang_end_safe = bang_end; |
141 | |
142 | if (frame_size_in_bytes > page_size) { |
143 | bang_end += frame_size_in_bytes; |
144 | } |
145 | |
146 | int bang_offset = bang_end_safe; |
147 | while (bang_offset <= bang_end) { |
148 | // Need at least one stack bang at end of shadow zone. |
149 | bang_stack_with_offset(bang_offset); |
150 | bang_offset += page_size; |
151 | } |
152 | } |
153 | |
154 | void Label::add_patch_at(CodeBuffer* cb, int branch_loc, const char* file, int line) { |
155 | assert(_loc == -1, "Label is unbound")do { if (!(_loc == -1)) { (*g_assert_poison) = 'X';; report_vm_error ("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 155, "assert(" "_loc == -1" ") failed", "Label is unbound") ; ::breakpoint(); } } while (0); |
156 | // Don't add patch locations during scratch emit. |
157 | if (cb->insts()->scratch_emit()) { return; } |
158 | if (_patch_index < PatchCacheSize) { |
159 | _patches[_patch_index] = branch_loc; |
160 | #ifdef ASSERT1 |
161 | _lines[_patch_index] = line; |
162 | _files[_patch_index] = file; |
163 | #endif |
164 | } else { |
165 | if (_patch_overflow == NULL__null) { |
166 | _patch_overflow = cb->create_patch_overflow(); |
167 | } |
168 | _patch_overflow->push(branch_loc); |
169 | } |
170 | ++_patch_index; |
171 | } |
172 | |
173 | void Label::patch_instructions(MacroAssembler* masm) { |
174 | assert(is_bound(), "Label is bound")do { if (!(is_bound())) { (*g_assert_poison) = 'X';; report_vm_error ("/home/daniel/Projects/java/jdk/src/hotspot/share/asm/assembler.cpp" , 174, "assert(" "is_bound()" ") failed", "Label is bound"); :: breakpoint(); } } while (0); |
175 | CodeBuffer* cb = masm->code(); |
176 | int target_sect = CodeBuffer::locator_sect(loc()); |
Value stored to 'target_sect' during its initialization is never read | |
177 | address target = cb->locator_address(loc()); |
178 | while (_patch_index > 0) { |
179 | --_patch_index; |
180 | int branch_loc; |
181 | int line = 0; |
182 | const char* file = NULL__null; |
183 | if (_patch_index >= PatchCacheSize) { |
184 | branch_loc = _patch_overflow->pop(); |
185 | } else { |
186 | branch_loc = _patches[_patch_index]; |
187 | #ifdef ASSERT1 |
188 | line = _lines[_patch_index]; |
189 | file = _files[_patch_index]; |
190 | #endif |
191 | } |
192 | int branch_sect = CodeBuffer::locator_sect(branch_loc); |
193 | address branch = cb->locator_address(branch_loc); |
194 | if (branch_sect == CodeBuffer::SECT_CONSTS) { |
195 | // The thing to patch is a constant word. |
196 | *(address*)branch = target; |
197 | continue; |
198 | } |
199 | |
200 | // Push the target offset into the branch instruction. |
201 | masm->pd_patch_instruction(branch, target, file, line); |
202 | } |
203 | } |
204 | |
205 | void AbstractAssembler::block_comment(const char* comment) { |
206 | if (sect() == CodeBuffer::SECT_INSTS) { |
207 | code_section()->outer()->block_comment(offset(), comment); |
208 | } |
209 | } |
210 | |
211 | const char* AbstractAssembler::code_string(const char* str) { |
212 | if (sect() == CodeBuffer::SECT_INSTS || sect() == CodeBuffer::SECT_STUBS) { |
213 | return code_section()->outer()->code_string(str); |
214 | } |
215 | return NULL__null; |
216 | } |
217 | |
218 | bool MacroAssembler::uses_implicit_null_check(void* address) { |
219 | // Exception handler checks the nmethod's implicit null checks table |
220 | // only when this method returns false. |
221 | uintptr_t addr = reinterpret_cast<uintptr_t>(address); |
222 | uintptr_t page_size = (uintptr_t)os::vm_page_size(); |
223 | #ifdef _LP641 |
224 | if (UseCompressedOops && CompressedOops::base() != NULL__null) { |
225 | // A SEGV can legitimately happen in C2 code at address |
226 | // (heap_base + offset) if Matcher::narrow_oop_use_complex_address |
227 | // is configured to allow narrow oops field loads to be implicitly |
228 | // null checked |
229 | uintptr_t start = (uintptr_t)CompressedOops::base(); |
230 | uintptr_t end = start + page_size; |
231 | if (addr >= start && addr < end) { |
232 | return true; |
233 | } |
234 | } |
235 | #endif |
236 | return addr < page_size; |
237 | } |
238 | |
239 | bool MacroAssembler::needs_explicit_null_check(intptr_t offset) { |
240 | // The offset -1 is used (hardcoded) in a number of places in C1 and MacroAssembler |
241 | // to indicate an unknown offset. For example, TemplateTable::pop_and_check_object(Register r) |
242 | // calls MacroAssembler::null_check(Register reg, int offset = -1) which gets here |
243 | // with -1. Another example is GraphBuilder::access_field(...) which uses -1 as placeholder |
244 | // for offsets to be patched in later. The -1 there means the offset is not yet known |
245 | // and may lie outside of the zero-trapping page, and thus we need to ensure we're forcing |
246 | // an explicit null check for -1. |
247 | |
248 | // Check if offset is outside of [0, os::vm_page_size()] |
249 | return offset < 0 || offset >= os::vm_page_size(); |
250 | } |