File: | out/../deps/v8/src/execution/frames.cc |
Warning: | line 412, column 41 Dereference of null pointer (loaded from variable 'tos_location') |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | // Copyright 2012 the V8 project authors. All rights reserved. | |||
2 | // Use of this source code is governed by a BSD-style license that can be | |||
3 | // found in the LICENSE file. | |||
4 | ||||
5 | #include "src/execution/frames.h" | |||
6 | ||||
7 | #include <memory> | |||
8 | #include <sstream> | |||
9 | ||||
10 | #include "src/base/bits.h" | |||
11 | #include "src/base/platform/wrappers.h" | |||
12 | #include "src/codegen/interface-descriptors.h" | |||
13 | #include "src/codegen/macro-assembler.h" | |||
14 | #include "src/codegen/register-configuration.h" | |||
15 | #include "src/codegen/safepoint-table.h" | |||
16 | #include "src/common/globals.h" | |||
17 | #include "src/deoptimizer/deoptimizer.h" | |||
18 | #include "src/execution/frames-inl.h" | |||
19 | #include "src/execution/vm-state-inl.h" | |||
20 | #include "src/ic/ic-stats.h" | |||
21 | #include "src/logging/counters.h" | |||
22 | #include "src/objects/code.h" | |||
23 | #include "src/objects/slots.h" | |||
24 | #include "src/objects/smi.h" | |||
25 | #include "src/objects/visitors.h" | |||
26 | #include "src/snapshot/embedded/embedded-data-inl.h" | |||
27 | #include "src/strings/string-stream.h" | |||
28 | #include "src/zone/zone-containers.h" | |||
29 | ||||
30 | #if V8_ENABLE_WEBASSEMBLY1 | |||
31 | #include "src/debug/debug-wasm-objects.h" | |||
32 | #include "src/wasm/wasm-code-manager.h" | |||
33 | #include "src/wasm/wasm-engine.h" | |||
34 | #include "src/wasm/wasm-objects-inl.h" | |||
35 | #endif // V8_ENABLE_WEBASSEMBLY | |||
36 | ||||
37 | namespace v8 { | |||
38 | namespace internal { | |||
39 | ||||
40 | ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ = | |||
41 | nullptr; | |||
42 | ||||
43 | namespace { | |||
44 | ||||
45 | Address AddressOf(const StackHandler* handler) { | |||
46 | Address raw = handler->address(); | |||
47 | #ifdef V8_USE_ADDRESS_SANITIZER | |||
48 | // ASan puts C++-allocated StackHandler markers onto its fake stack. | |||
49 | // We work around that by storing the real stack address in the "padding" | |||
50 | // field. StackHandlers allocated from generated code have 0 as padding. | |||
51 | Address padding = | |||
52 | base::Memory<Address>(raw + StackHandlerConstants::kPaddingOffset); | |||
53 | if (padding != 0) return padding; | |||
54 | #endif | |||
55 | return raw; | |||
56 | } | |||
57 | ||||
58 | } // namespace | |||
59 | ||||
60 | // Iterator that supports traversing the stack handlers of a | |||
61 | // particular frame. Needs to know the top of the handler chain. | |||
62 | class StackHandlerIterator { | |||
63 | public: | |||
64 | StackHandlerIterator(const StackFrame* frame, StackHandler* handler) | |||
65 | : limit_(frame->fp()), handler_(handler) { | |||
66 | #if V8_ENABLE_WEBASSEMBLY1 | |||
67 | // Make sure the handler has already been unwound to this frame. With stack | |||
68 | // switching this is not equivalent to the inequality below, because the | |||
69 | // frame and the handler could be in different stacks. | |||
70 | DCHECK_IMPLIES(!FLAG_experimental_wasm_stack_switching,((void) 0) | |||
71 | frame->sp() <= AddressOf(handler))((void) 0); | |||
72 | // For CWasmEntry frames, the handler was registered by the last C++ | |||
73 | // frame (Execution::CallWasm), so even though its address is already | |||
74 | // beyond the limit, we know we always want to unwind one handler. | |||
75 | if (frame->is_c_wasm_entry()) handler_ = handler_->next(); | |||
76 | #else | |||
77 | // Make sure the handler has already been unwound to this frame. | |||
78 | DCHECK_LE(frame->sp(), AddressOf(handler))((void) 0); | |||
79 | #endif // V8_ENABLE_WEBASSEMBLY | |||
80 | } | |||
81 | ||||
82 | StackHandler* handler() const { return handler_; } | |||
83 | ||||
84 | bool done() { return handler_ == nullptr || AddressOf(handler_) > limit_; } | |||
85 | void Advance() { | |||
86 | DCHECK(!done())((void) 0); | |||
87 | handler_ = handler_->next(); | |||
88 | } | |||
89 | ||||
90 | private: | |||
91 | const Address limit_; | |||
92 | StackHandler* handler_; | |||
93 | }; | |||
94 | ||||
95 | // ------------------------------------------------------------------------- | |||
96 | ||||
97 | #define INITIALIZE_SINGLETON(type, field) field##_(this), | |||
98 | StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate, | |||
99 | bool can_access_heap_objects) | |||
100 | : isolate_(isolate), | |||
101 | STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)INITIALIZE_SINGLETON(ENTRY, EntryFrame) INITIALIZE_SINGLETON( CONSTRUCT_ENTRY, ConstructEntryFrame) INITIALIZE_SINGLETON(EXIT , ExitFrame) INITIALIZE_SINGLETON(WASM, WasmFrame) INITIALIZE_SINGLETON (WASM_TO_JS, WasmToJsFrame) INITIALIZE_SINGLETON(JS_TO_WASM, JsToWasmFrame ) INITIALIZE_SINGLETON(STACK_SWITCH, StackSwitchFrame) INITIALIZE_SINGLETON (WASM_DEBUG_BREAK, WasmDebugBreakFrame) INITIALIZE_SINGLETON( C_WASM_ENTRY, CWasmEntryFrame) INITIALIZE_SINGLETON(WASM_EXIT , WasmExitFrame) INITIALIZE_SINGLETON(WASM_COMPILE_LAZY, WasmCompileLazyFrame ) INITIALIZE_SINGLETON(INTERPRETED, InterpretedFrame) INITIALIZE_SINGLETON (BASELINE, BaselineFrame) INITIALIZE_SINGLETON(OPTIMIZED, OptimizedFrame ) INITIALIZE_SINGLETON(STUB, StubFrame) INITIALIZE_SINGLETON( BUILTIN_CONTINUATION, BuiltinContinuationFrame) INITIALIZE_SINGLETON (JAVA_SCRIPT_BUILTIN_CONTINUATION, JavaScriptBuiltinContinuationFrame ) INITIALIZE_SINGLETON(JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH , JavaScriptBuiltinContinuationWithCatchFrame) INITIALIZE_SINGLETON (INTERNAL, InternalFrame) INITIALIZE_SINGLETON(CONSTRUCT, ConstructFrame ) INITIALIZE_SINGLETON(BUILTIN, BuiltinFrame) INITIALIZE_SINGLETON (BUILTIN_EXIT, BuiltinExitFrame) INITIALIZE_SINGLETON(NATIVE, NativeFrame) frame_(nullptr), | |||
102 | handler_(nullptr), | |||
103 | can_access_heap_objects_(can_access_heap_objects) {} | |||
104 | #undef INITIALIZE_SINGLETON | |||
105 | ||||
106 | StackFrameIterator::StackFrameIterator(Isolate* isolate) | |||
107 | : StackFrameIterator(isolate, isolate->thread_local_top()) {} | |||
108 | ||||
109 | StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t) | |||
110 | : StackFrameIteratorBase(isolate, true) { | |||
111 | Reset(t); | |||
112 | } | |||
113 | #if V8_ENABLE_WEBASSEMBLY1 | |||
114 | StackFrameIterator::StackFrameIterator(Isolate* isolate, | |||
115 | wasm::StackMemory* stack) | |||
116 | : StackFrameIteratorBase(isolate, true) { | |||
117 | Reset(isolate->thread_local_top(), stack); | |||
118 | } | |||
119 | #endif | |||
120 | ||||
121 | void StackFrameIterator::Advance() { | |||
122 | DCHECK(!done())((void) 0); | |||
123 | // Compute the state of the calling frame before restoring | |||
124 | // callee-saved registers and unwinding handlers. This allows the | |||
125 | // frame code that computes the caller state to access the top | |||
126 | // handler and the value of any callee-saved register if needed. | |||
127 | StackFrame::State state; | |||
128 | StackFrame::Type type = frame_->GetCallerState(&state); | |||
129 | ||||
130 | // Unwind handlers corresponding to the current frame. | |||
131 | StackHandlerIterator it(frame_, handler_); | |||
132 | while (!it.done()) it.Advance(); | |||
133 | handler_ = it.handler(); | |||
134 | ||||
135 | // Advance to the calling frame. | |||
136 | frame_ = SingletonFor(type, &state); | |||
137 | ||||
138 | // When we're done iterating over the stack frames, the handler | |||
139 | // chain must have been completely unwound. Except for wasm stack-switching: | |||
140 | // we stop at the end of the current segment. | |||
141 | #if V8_ENABLE_WEBASSEMBLY1 | |||
142 | DCHECK_IMPLIES(done() && !FLAG_experimental_wasm_stack_switching,((void) 0) | |||
143 | handler_ == nullptr)((void) 0); | |||
144 | #else | |||
145 | DCHECK_IMPLIES(done(), handler_ == nullptr)((void) 0); | |||
146 | #endif | |||
147 | } | |||
148 | ||||
149 | StackFrame* StackFrameIterator::Reframe() { | |||
150 | StackFrame::Type type = frame_->ComputeType(this, &frame_->state_); | |||
151 | frame_ = SingletonFor(type, &frame_->state_); | |||
152 | return frame(); | |||
153 | } | |||
154 | ||||
155 | void StackFrameIterator::Reset(ThreadLocalTop* top) { | |||
156 | StackFrame::State state; | |||
157 | StackFrame::Type type = | |||
158 | ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state); | |||
159 | handler_ = StackHandler::FromAddress(Isolate::handler(top)); | |||
160 | frame_ = SingletonFor(type, &state); | |||
161 | } | |||
162 | ||||
163 | #if V8_ENABLE_WEBASSEMBLY1 | |||
164 | void StackFrameIterator::Reset(ThreadLocalTop* top, wasm::StackMemory* stack) { | |||
165 | if (stack->jmpbuf()->sp == kNullAddress) { | |||
166 | // A null SP indicates that the computation associated with this stack has | |||
167 | // returned, leaving the stack segment empty. | |||
168 | return; | |||
169 | } | |||
170 | StackFrame::State state; | |||
171 | StackSwitchFrame::GetStateForJumpBuffer(stack->jmpbuf(), &state); | |||
172 | handler_ = StackHandler::FromAddress(Isolate::handler(top)); | |||
173 | frame_ = SingletonFor(StackFrame::STACK_SWITCH, &state); | |||
174 | } | |||
175 | #endif | |||
176 | ||||
177 | StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type, | |||
178 | StackFrame::State* state) { | |||
179 | StackFrame* result = SingletonFor(type); | |||
180 | DCHECK((!result) == (type == StackFrame::NO_FRAME_TYPE))((void) 0); | |||
181 | if (result) result->state_ = *state; | |||
182 | return result; | |||
183 | } | |||
184 | ||||
185 | StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) { | |||
186 | #define FRAME_TYPE_CASE(type, field) \ | |||
187 | case StackFrame::type: \ | |||
188 | return &field##_; | |||
189 | ||||
190 | switch (type) { | |||
191 | case StackFrame::NO_FRAME_TYPE: | |||
192 | return nullptr; | |||
193 | STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)FRAME_TYPE_CASE(ENTRY, EntryFrame) FRAME_TYPE_CASE(CONSTRUCT_ENTRY , ConstructEntryFrame) FRAME_TYPE_CASE(EXIT, ExitFrame) FRAME_TYPE_CASE (WASM, WasmFrame) FRAME_TYPE_CASE(WASM_TO_JS, WasmToJsFrame) FRAME_TYPE_CASE (JS_TO_WASM, JsToWasmFrame) FRAME_TYPE_CASE(STACK_SWITCH, StackSwitchFrame ) FRAME_TYPE_CASE(WASM_DEBUG_BREAK, WasmDebugBreakFrame) FRAME_TYPE_CASE (C_WASM_ENTRY, CWasmEntryFrame) FRAME_TYPE_CASE(WASM_EXIT, WasmExitFrame ) FRAME_TYPE_CASE(WASM_COMPILE_LAZY, WasmCompileLazyFrame) FRAME_TYPE_CASE (INTERPRETED, InterpretedFrame) FRAME_TYPE_CASE(BASELINE, BaselineFrame ) FRAME_TYPE_CASE(OPTIMIZED, OptimizedFrame) FRAME_TYPE_CASE( STUB, StubFrame) FRAME_TYPE_CASE(BUILTIN_CONTINUATION, BuiltinContinuationFrame ) FRAME_TYPE_CASE(JAVA_SCRIPT_BUILTIN_CONTINUATION, JavaScriptBuiltinContinuationFrame ) FRAME_TYPE_CASE(JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH , JavaScriptBuiltinContinuationWithCatchFrame) FRAME_TYPE_CASE (INTERNAL, InternalFrame) FRAME_TYPE_CASE(CONSTRUCT, ConstructFrame ) FRAME_TYPE_CASE(BUILTIN, BuiltinFrame) FRAME_TYPE_CASE(BUILTIN_EXIT , BuiltinExitFrame) FRAME_TYPE_CASE(NATIVE, NativeFrame) | |||
194 | default: | |||
195 | break; | |||
196 | } | |||
197 | return nullptr; | |||
198 | ||||
199 | #undef FRAME_TYPE_CASE | |||
200 | } | |||
201 | ||||
202 | // ------------------------------------------------------------------------- | |||
203 | ||||
204 | void TypedFrameWithJSLinkage::Iterate(RootVisitor* v) const { | |||
205 | IterateExpressions(v); | |||
206 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); | |||
207 | } | |||
208 | ||||
209 | // ------------------------------------------------------------------------- | |||
210 | ||||
211 | void JavaScriptFrameIterator::Advance() { | |||
212 | do { | |||
213 | iterator_.Advance(); | |||
214 | } while (!iterator_.done() && !iterator_.frame()->is_java_script()); | |||
215 | } | |||
216 | ||||
217 | // ------------------------------------------------------------------------- | |||
218 | ||||
219 | StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate) | |||
220 | : iterator_(isolate) { | |||
221 | if (!done() && !IsValidFrame(iterator_.frame())) Advance(); | |||
222 | } | |||
223 | ||||
224 | StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate, | |||
225 | StackFrameId id) | |||
226 | : StackTraceFrameIterator(isolate) { | |||
227 | while (!done() && frame()->id() != id) Advance(); | |||
228 | } | |||
229 | ||||
230 | void StackTraceFrameIterator::Advance() { | |||
231 | do { | |||
232 | iterator_.Advance(); | |||
233 | } while (!done() && !IsValidFrame(iterator_.frame())); | |||
234 | } | |||
235 | ||||
236 | int StackTraceFrameIterator::FrameFunctionCount() const { | |||
237 | DCHECK(!done())((void) 0); | |||
238 | if (!iterator_.frame()->is_optimized()) return 1; | |||
239 | std::vector<SharedFunctionInfo> infos; | |||
240 | OptimizedFrame::cast(iterator_.frame())->GetFunctions(&infos); | |||
241 | return static_cast<int>(infos.size()); | |||
242 | } | |||
243 | ||||
244 | FrameSummary StackTraceFrameIterator::GetTopValidFrame() const { | |||
245 | DCHECK(!done())((void) 0); | |||
246 | // Like FrameSummary::GetTop, but additionally observes | |||
247 | // StackTraceFrameIterator filtering semantics. | |||
248 | std::vector<FrameSummary> frames; | |||
249 | frame()->Summarize(&frames); | |||
250 | if (is_javascript()) { | |||
251 | for (int i = static_cast<int>(frames.size()) - 1; i >= 0; i--) { | |||
252 | if (!IsValidJSFunction(*frames[i].AsJavaScript().function())) continue; | |||
253 | return frames[i]; | |||
254 | } | |||
255 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
256 | } | |||
257 | #if V8_ENABLE_WEBASSEMBLY1 | |||
258 | if (is_wasm()) return frames.back(); | |||
259 | #endif // V8_ENABLE_WEBASSEMBLY | |||
260 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
261 | } | |||
262 | ||||
263 | // static | |||
264 | bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) { | |||
265 | if (frame->is_java_script()) { | |||
266 | return IsValidJSFunction(static_cast<JavaScriptFrame*>(frame)->function()); | |||
267 | } | |||
268 | #if V8_ENABLE_WEBASSEMBLY1 | |||
269 | if (frame->is_wasm()) return true; | |||
270 | #endif // V8_ENABLE_WEBASSEMBLY | |||
271 | return false; | |||
272 | } | |||
273 | ||||
274 | // static | |||
275 | bool StackTraceFrameIterator::IsValidJSFunction(JSFunction f) { | |||
276 | if (!f.IsJSFunction()) return false; | |||
277 | return f.shared().IsSubjectToDebugging(); | |||
278 | } | |||
279 | ||||
280 | // ------------------------------------------------------------------------- | |||
281 | ||||
282 | namespace { | |||
283 | ||||
284 | bool IsInterpreterFramePc(Isolate* isolate, Address pc, | |||
285 | StackFrame::State* state) { | |||
286 | Builtin builtin = OffHeapInstructionStream::TryLookupCode(isolate, pc); | |||
287 | if (builtin != Builtin::kNoBuiltinId && | |||
288 | (builtin == Builtin::kInterpreterEntryTrampoline || | |||
289 | builtin == Builtin::kInterpreterEnterAtBytecode || | |||
290 | builtin == Builtin::kInterpreterEnterAtNextBytecode || | |||
291 | builtin == Builtin::kBaselineOrInterpreterEnterAtBytecode || | |||
292 | builtin == Builtin::kBaselineOrInterpreterEnterAtNextBytecode)) { | |||
293 | return true; | |||
294 | } else if (FLAG_interpreted_frames_native_stack) { | |||
295 | intptr_t marker = Memory<intptr_t>( | |||
296 | state->fp + CommonFrameConstants::kContextOrFrameTypeOffset); | |||
297 | MSAN_MEMORY_IS_INITIALIZED(static_assert((std::is_pointer<decltype(state->fp + StandardFrameConstants ::kFunctionOffset)>::value || std::is_same<v8::base::Address , decltype(state->fp + StandardFrameConstants::kFunctionOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{state ->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
298 | state->fp + StandardFrameConstants::kFunctionOffset,static_assert((std::is_pointer<decltype(state->fp + StandardFrameConstants ::kFunctionOffset)>::value || std::is_same<v8::base::Address , decltype(state->fp + StandardFrameConstants::kFunctionOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{state ->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
299 | kSystemPointerSize)static_assert((std::is_pointer<decltype(state->fp + StandardFrameConstants ::kFunctionOffset)>::value || std::is_same<v8::base::Address , decltype(state->fp + StandardFrameConstants::kFunctionOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{state ->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false); | |||
300 | Object maybe_function = Object( | |||
301 | Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset)); | |||
302 | // There's no need to run a full ContainsSlow if we know the frame can't be | |||
303 | // an InterpretedFrame, so we do these fast checks first | |||
304 | if (StackFrame::IsTypeMarker(marker) || maybe_function.IsSmi()) { | |||
305 | return false; | |||
306 | } else if (!isolate->heap()->InSpaceSlow(pc, CODE_SPACE)) { | |||
307 | return false; | |||
308 | } | |||
309 | Code interpreter_entry_trampoline = | |||
310 | isolate->heap()->GcSafeFindCodeForInnerPointer(pc); | |||
311 | return interpreter_entry_trampoline.is_interpreter_trampoline_builtin(); | |||
312 | } else { | |||
313 | return false; | |||
314 | } | |||
315 | } | |||
316 | ||||
317 | } // namespace | |||
318 | ||||
319 | bool SafeStackFrameIterator::IsNoFrameBytecodeHandlerPc(Isolate* isolate, | |||
320 | Address pc, | |||
321 | Address fp) const { | |||
322 | // Return false for builds with non-embedded bytecode handlers. | |||
323 | if (Isolate::CurrentEmbeddedBlobCode() == nullptr) return false; | |||
324 | ||||
325 | EmbeddedData d = EmbeddedData::FromBlob(isolate); | |||
326 | if (pc < d.InstructionStartOfBytecodeHandlers() || | |||
327 | pc >= d.InstructionEndOfBytecodeHandlers()) { | |||
328 | // Not a bytecode handler pc address. | |||
329 | return false; | |||
330 | } | |||
331 | ||||
332 | if (!IsValidStackAddress(fp + | |||
333 | CommonFrameConstants::kContextOrFrameTypeOffset)) { | |||
334 | return false; | |||
335 | } | |||
336 | ||||
337 | // Check if top stack frame is a bytecode handler stub frame. | |||
338 | MSAN_MEMORY_IS_INITIALIZED(static_assert((std::is_pointer<decltype(fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(fp + CommonFrameConstants::kContextOrFrameTypeOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
339 | fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize)static_assert((std::is_pointer<decltype(fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(fp + CommonFrameConstants::kContextOrFrameTypeOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false); | |||
340 | intptr_t marker = | |||
341 | Memory<intptr_t>(fp + CommonFrameConstants::kContextOrFrameTypeOffset); | |||
342 | if (StackFrame::IsTypeMarker(marker) && | |||
343 | StackFrame::MarkerToType(marker) == StackFrame::STUB) { | |||
344 | // Bytecode handler built a frame. | |||
345 | return false; | |||
346 | } | |||
347 | return true; | |||
348 | } | |||
349 | ||||
350 | SafeStackFrameIterator::SafeStackFrameIterator(Isolate* isolate, Address pc, | |||
351 | Address fp, Address sp, | |||
352 | Address lr, Address js_entry_sp) | |||
353 | : StackFrameIteratorBase(isolate, false), | |||
354 | low_bound_(sp), | |||
355 | high_bound_(js_entry_sp), | |||
356 | top_frame_type_(StackFrame::NO_FRAME_TYPE), | |||
357 | top_context_address_(kNullAddress), | |||
358 | external_callback_scope_(isolate->external_callback_scope()), | |||
359 | top_link_register_(lr) { | |||
360 | StackFrame::State state; | |||
361 | StackFrame::Type type; | |||
362 | ThreadLocalTop* top = isolate->thread_local_top(); | |||
363 | bool advance_frame = true; | |||
364 | ||||
365 | Address fast_c_fp = isolate->isolate_data()->fast_c_call_caller_fp(); | |||
366 | uint8_t stack_is_iterable = isolate->isolate_data()->stack_is_iterable(); | |||
367 | if (!stack_is_iterable) { | |||
| ||||
368 | frame_ = nullptr; | |||
369 | return; | |||
370 | } | |||
371 | // 'Fast C calls' are a special type of C call where we call directly from | |||
372 | // JS to C without an exit frame inbetween. The CEntryStub is responsible | |||
373 | // for setting Isolate::c_entry_fp, meaning that it won't be set for fast C | |||
374 | // calls. To keep the stack iterable, we store the FP and PC of the caller | |||
375 | // of the fast C call on the isolate. This is guaranteed to be the topmost | |||
376 | // JS frame, because fast C calls cannot call back into JS. We start | |||
377 | // iterating the stack from this topmost JS frame. | |||
378 | if (fast_c_fp) { | |||
379 | DCHECK_NE(kNullAddress, isolate->isolate_data()->fast_c_call_caller_pc())((void) 0); | |||
380 | type = StackFrame::Type::OPTIMIZED; | |||
381 | top_frame_type_ = type; | |||
382 | state.fp = fast_c_fp; | |||
383 | state.sp = sp; | |||
384 | state.pc_address = reinterpret_cast<Address*>( | |||
385 | isolate->isolate_data()->fast_c_call_caller_pc_address()); | |||
386 | advance_frame = false; | |||
387 | } else if (IsValidTop(top)) { | |||
388 | type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state); | |||
389 | top_frame_type_ = type; | |||
390 | } else if (IsValidStackAddress(fp)) { | |||
391 | DCHECK_NE(fp, kNullAddress)((void) 0); | |||
392 | state.fp = fp; | |||
393 | state.sp = sp; | |||
394 | state.pc_address = StackFrame::ResolveReturnAddressLocation( | |||
395 | reinterpret_cast<Address*>(CommonFrame::ComputePCAddress(fp))); | |||
396 | ||||
397 | // If the current PC is in a bytecode handler, the top stack frame isn't | |||
398 | // the bytecode handler's frame and the top of stack or link register is a | |||
399 | // return address into the interpreter entry trampoline, then we are likely | |||
400 | // in a bytecode handler with elided frame. In that case, set the PC | |||
401 | // properly and make sure we do not drop the frame. | |||
402 | bool is_no_frame_bytecode_handler = false; | |||
403 | if (IsNoFrameBytecodeHandlerPc(isolate, pc, fp)) { | |||
404 | Address* tos_location = nullptr; | |||
405 | if (top_link_register_) { | |||
406 | tos_location = &top_link_register_; | |||
407 | } else if (IsValidStackAddress(sp)) { | |||
408 | MSAN_MEMORY_IS_INITIALIZED(sp, kSystemPointerSize)static_assert((std::is_pointer<decltype(sp)>::value || std ::is_same<v8::base::Address, decltype(sp)>::value), "static type violation" ); static_assert(std::is_convertible<decltype(kSystemPointerSize ), size_t>::value, "static type violation"); do { ::v8::base ::Use unused_tmp_array_for_use_macro[]{sp, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false); | |||
409 | tos_location = reinterpret_cast<Address*>(sp); | |||
410 | } | |||
411 | ||||
412 | if (IsInterpreterFramePc(isolate, *tos_location, &state)) { | |||
| ||||
413 | state.pc_address = tos_location; | |||
414 | is_no_frame_bytecode_handler = true; | |||
415 | advance_frame = false; | |||
416 | } | |||
417 | } | |||
418 | ||||
419 | // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset, | |||
420 | // we check only that kMarkerOffset is within the stack bounds and do | |||
421 | // compile time check that kContextOffset slot is pushed on the stack before | |||
422 | // kMarkerOffset. | |||
423 | STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <static_assert(StandardFrameConstants::kFunctionOffset < StandardFrameConstants ::kContextOffset, "StandardFrameConstants::kFunctionOffset < StandardFrameConstants::kContextOffset" ) | |||
424 | StandardFrameConstants::kContextOffset)static_assert(StandardFrameConstants::kFunctionOffset < StandardFrameConstants ::kContextOffset, "StandardFrameConstants::kFunctionOffset < StandardFrameConstants::kContextOffset" ); | |||
425 | Address frame_marker = fp + StandardFrameConstants::kFunctionOffset; | |||
426 | if (IsValidStackAddress(frame_marker)) { | |||
427 | if (is_no_frame_bytecode_handler) { | |||
428 | type = StackFrame::INTERPRETED; | |||
429 | } else { | |||
430 | type = StackFrame::ComputeType(this, &state); | |||
431 | } | |||
432 | top_frame_type_ = type; | |||
433 | MSAN_MEMORY_IS_INITIALIZED(static_assert((std::is_pointer<decltype(fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(fp + CommonFrameConstants::kContextOrFrameTypeOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
434 | fp + CommonFrameConstants::kContextOrFrameTypeOffset,static_assert((std::is_pointer<decltype(fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(fp + CommonFrameConstants::kContextOrFrameTypeOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
435 | kSystemPointerSize)static_assert((std::is_pointer<decltype(fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(fp + CommonFrameConstants::kContextOrFrameTypeOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false); | |||
436 | Address type_or_context_address = | |||
437 | Memory<Address>(fp + CommonFrameConstants::kContextOrFrameTypeOffset); | |||
438 | if (!StackFrame::IsTypeMarker(type_or_context_address)) | |||
439 | top_context_address_ = type_or_context_address; | |||
440 | } else { | |||
441 | // Mark the frame as OPTIMIZED if we cannot determine its type. | |||
442 | // We chose OPTIMIZED rather than INTERPRETED because it's closer to | |||
443 | // the original value of StackFrame::JAVA_SCRIPT here, in that JAVA_SCRIPT | |||
444 | // referred to full-codegen frames (now removed from the tree), and | |||
445 | // OPTIMIZED refers to turbofan frames, both of which are generated | |||
446 | // code. INTERPRETED frames refer to bytecode. | |||
447 | // The frame anyways will be skipped. | |||
448 | type = StackFrame::OPTIMIZED; | |||
449 | // Top frame is incomplete so we cannot reliably determine its type. | |||
450 | top_frame_type_ = StackFrame::NO_FRAME_TYPE; | |||
451 | } | |||
452 | } else { | |||
453 | return; | |||
454 | } | |||
455 | frame_ = SingletonFor(type, &state); | |||
456 | if (advance_frame && frame_) Advance(); | |||
457 | } | |||
458 | ||||
459 | bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const { | |||
460 | Address c_entry_fp = Isolate::c_entry_fp(top); | |||
461 | if (!IsValidExitFrame(c_entry_fp)) return false; | |||
462 | // There should be at least one JS_ENTRY stack handler. | |||
463 | Address handler = Isolate::handler(top); | |||
464 | if (handler == kNullAddress) return false; | |||
465 | // Check that there are no js frames on top of the native frames. | |||
466 | return c_entry_fp < handler; | |||
467 | } | |||
468 | ||||
469 | void SafeStackFrameIterator::AdvanceOneFrame() { | |||
470 | DCHECK(!done())((void) 0); | |||
471 | StackFrame* last_frame = frame_; | |||
472 | Address last_sp = last_frame->sp(), last_fp = last_frame->fp(); | |||
473 | ||||
474 | // Before advancing to the next stack frame, perform pointer validity tests. | |||
475 | if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) { | |||
476 | frame_ = nullptr; | |||
477 | return; | |||
478 | } | |||
479 | ||||
480 | // Advance to the previous frame. | |||
481 | StackFrame::State state; | |||
482 | StackFrame::Type type = frame_->GetCallerState(&state); | |||
483 | frame_ = SingletonFor(type, &state); | |||
484 | if (!frame_) return; | |||
485 | ||||
486 | // Check that we have actually moved to the previous frame in the stack. | |||
487 | if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) { | |||
488 | frame_ = nullptr; | |||
489 | } | |||
490 | } | |||
491 | ||||
492 | bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const { | |||
493 | return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp()); | |||
494 | } | |||
495 | ||||
496 | bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) { | |||
497 | StackFrame::State state; | |||
498 | if (frame->is_entry() || frame->is_construct_entry()) { | |||
499 | // See EntryFrame::GetCallerState. It computes the caller FP address | |||
500 | // and calls ExitFrame::GetStateForFramePointer on it. We need to be | |||
501 | // sure that caller FP address is valid. | |||
502 | Address caller_fp = | |||
503 | Memory<Address>(frame->fp() + EntryFrameConstants::kCallerFPOffset); | |||
504 | if (!IsValidExitFrame(caller_fp)) return false; | |||
505 | } | |||
506 | frame->ComputeCallerState(&state); | |||
507 | return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) && | |||
508 | SingletonFor(frame->GetCallerState(&state)) != nullptr; | |||
509 | } | |||
510 | ||||
511 | bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const { | |||
512 | if (!IsValidStackAddress(fp)) return false; | |||
513 | Address sp = ExitFrame::ComputeStackPointer(fp); | |||
514 | if (!IsValidStackAddress(sp)) return false; | |||
515 | StackFrame::State state; | |||
516 | ExitFrame::FillState(fp, sp, &state); | |||
517 | MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address))static_assert((std::is_pointer<decltype(state.pc_address)> ::value || std::is_same<v8::base::Address, decltype(state. pc_address)>::value), "static type violation"); static_assert (std::is_convertible<decltype(sizeof(state.pc_address)), size_t >::value, "static type violation"); do { ::v8::base::Use unused_tmp_array_for_use_macro []{state.pc_address, sizeof(state.pc_address)}; (void)unused_tmp_array_for_use_macro ; } while (false); | |||
518 | return *state.pc_address != kNullAddress; | |||
519 | } | |||
520 | ||||
521 | void SafeStackFrameIterator::Advance() { | |||
522 | while (true) { | |||
523 | AdvanceOneFrame(); | |||
524 | if (done()) break; | |||
525 | ExternalCallbackScope* last_callback_scope = nullptr; | |||
526 | while (external_callback_scope_ != nullptr && | |||
527 | external_callback_scope_->scope_address() < frame_->fp()) { | |||
528 | // As long as the setup of a frame is not atomic, we may happen to be | |||
529 | // in an interval where an ExternalCallbackScope is already created, | |||
530 | // but the frame is not yet entered. So we are actually observing | |||
531 | // the previous frame. | |||
532 | // Skip all the ExternalCallbackScope's that are below the current fp. | |||
533 | last_callback_scope = external_callback_scope_; | |||
534 | external_callback_scope_ = external_callback_scope_->previous(); | |||
535 | } | |||
536 | if (frame_->is_java_script()) break; | |||
537 | #if V8_ENABLE_WEBASSEMBLY1 | |||
538 | if (frame_->is_wasm() || frame_->is_wasm_to_js() || | |||
539 | frame_->is_js_to_wasm()) { | |||
540 | break; | |||
541 | } | |||
542 | #endif // V8_ENABLE_WEBASSEMBLY | |||
543 | if (frame_->is_exit() || frame_->is_builtin_exit()) { | |||
544 | // Some of the EXIT frames may have ExternalCallbackScope allocated on | |||
545 | // top of them. In that case the scope corresponds to the first EXIT | |||
546 | // frame beneath it. There may be other EXIT frames on top of the | |||
547 | // ExternalCallbackScope, just skip them as we cannot collect any useful | |||
548 | // information about them. | |||
549 | if (last_callback_scope) { | |||
550 | frame_->state_.pc_address = | |||
551 | last_callback_scope->callback_entrypoint_address(); | |||
552 | } | |||
553 | break; | |||
554 | } | |||
555 | } | |||
556 | } | |||
557 | ||||
558 | // ------------------------------------------------------------------------- | |||
559 | ||||
560 | namespace { | |||
561 | Code GetContainingCode(Isolate* isolate, Address pc) { | |||
562 | return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code; | |||
563 | } | |||
564 | } // namespace | |||
565 | ||||
566 | Code StackFrame::LookupCode() const { | |||
567 | Code result = GetContainingCode(isolate(), pc()); | |||
568 | DCHECK_GE(pc(), result.InstructionStart(isolate(), pc()))((void) 0); | |||
569 | DCHECK_LT(pc(), result.InstructionEnd(isolate(), pc()))((void) 0); | |||
570 | return result; | |||
571 | } | |||
572 | ||||
573 | void StackFrame::IteratePc(RootVisitor* v, Address* pc_address, | |||
574 | Address* constant_pool_address, Code holder) const { | |||
575 | Address old_pc = ReadPC(pc_address); | |||
576 | DCHECK(ReadOnlyHeap::Contains(holder) ||((void) 0) | |||
577 | holder.GetHeap()->GcSafeCodeContains(holder, old_pc))((void) 0); | |||
578 | unsigned pc_offset = holder.GetOffsetFromInstructionStart(isolate_, old_pc); | |||
579 | Object code = holder; | |||
580 | v->VisitRunningCode(FullObjectSlot(&code)); | |||
581 | if (code == holder) return; | |||
582 | holder = Code::unchecked_cast(code); | |||
583 | Address pc = holder.InstructionStart(isolate_, old_pc) + pc_offset; | |||
584 | // TODO(v8:10026): avoid replacing a signed pointer. | |||
585 | PointerAuthentication::ReplacePC(pc_address, pc, kSystemPointerSize); | |||
586 | if (FLAG_enable_embedded_constant_pool && constant_pool_address) { | |||
587 | *constant_pool_address = holder.constant_pool(); | |||
588 | } | |||
589 | } | |||
590 | ||||
591 | void StackFrame::SetReturnAddressLocationResolver( | |||
592 | ReturnAddressLocationResolver resolver) { | |||
593 | DCHECK_NULL(return_address_location_resolver_)((void) 0); | |||
594 | return_address_location_resolver_ = resolver; | |||
595 | } | |||
596 | ||||
597 | StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator, | |||
598 | State* state) { | |||
599 | #if V8_ENABLE_WEBASSEMBLY1 | |||
600 | if (state->fp == kNullAddress) { | |||
601 | DCHECK(FLAG_experimental_wasm_stack_switching)((void) 0); | |||
602 | return NO_FRAME_TYPE; | |||
603 | } | |||
604 | #endif | |||
605 | ||||
606 | MSAN_MEMORY_IS_INITIALIZED(static_assert((std::is_pointer<decltype(state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value), "static type violation" ); static_assert(std::is_convertible<decltype(kSystemPointerSize ), size_t>::value, "static type violation"); do { ::v8::base ::Use unused_tmp_array_for_use_macro[]{state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset, kSystemPointerSize}; (void)unused_tmp_array_for_use_macro ; } while (false) | |||
607 | state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,static_assert((std::is_pointer<decltype(state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value), "static type violation" ); static_assert(std::is_convertible<decltype(kSystemPointerSize ), size_t>::value, "static type violation"); do { ::v8::base ::Use unused_tmp_array_for_use_macro[]{state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset, kSystemPointerSize}; (void)unused_tmp_array_for_use_macro ; } while (false) | |||
608 | kSystemPointerSize)static_assert((std::is_pointer<decltype(state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value || std::is_same<v8 ::base::Address, decltype(state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset)>::value), "static type violation" ); static_assert(std::is_convertible<decltype(kSystemPointerSize ), size_t>::value, "static type violation"); do { ::v8::base ::Use unused_tmp_array_for_use_macro[]{state->fp + CommonFrameConstants ::kContextOrFrameTypeOffset, kSystemPointerSize}; (void)unused_tmp_array_for_use_macro ; } while (false); | |||
609 | intptr_t marker = Memory<intptr_t>( | |||
610 | state->fp + CommonFrameConstants::kContextOrFrameTypeOffset); | |||
611 | Address pc = StackFrame::ReadPC(state->pc_address); | |||
612 | if (!iterator->can_access_heap_objects_) { | |||
613 | // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really | |||
614 | // means that we are being called from the profiler, which can interrupt | |||
615 | // the VM with a signal at any arbitrary instruction, with essentially | |||
616 | // anything on the stack. So basically none of these checks are 100% | |||
617 | // reliable. | |||
618 | MSAN_MEMORY_IS_INITIALIZED(static_assert((std::is_pointer<decltype(state->fp + StandardFrameConstants ::kFunctionOffset)>::value || std::is_same<v8::base::Address , decltype(state->fp + StandardFrameConstants::kFunctionOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{state ->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
619 | state->fp + StandardFrameConstants::kFunctionOffset,static_assert((std::is_pointer<decltype(state->fp + StandardFrameConstants ::kFunctionOffset)>::value || std::is_same<v8::base::Address , decltype(state->fp + StandardFrameConstants::kFunctionOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{state ->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false) | |||
620 | kSystemPointerSize)static_assert((std::is_pointer<decltype(state->fp + StandardFrameConstants ::kFunctionOffset)>::value || std::is_same<v8::base::Address , decltype(state->fp + StandardFrameConstants::kFunctionOffset )>::value), "static type violation"); static_assert(std::is_convertible <decltype(kSystemPointerSize), size_t>::value, "static type violation" ); do { ::v8::base::Use unused_tmp_array_for_use_macro[]{state ->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize }; (void)unused_tmp_array_for_use_macro; } while (false); | |||
621 | Object maybe_function = Object( | |||
622 | Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset)); | |||
623 | if (!StackFrame::IsTypeMarker(marker)) { | |||
624 | if (maybe_function.IsSmi()) { | |||
625 | return NATIVE; | |||
626 | } else if (IsInterpreterFramePc(iterator->isolate(), pc, state)) { | |||
627 | return INTERPRETED; | |||
628 | } else { | |||
629 | return OPTIMIZED; | |||
630 | } | |||
631 | } | |||
632 | } else { | |||
633 | #if V8_ENABLE_WEBASSEMBLY1 | |||
634 | // If the {pc} does not point into WebAssembly code we can rely on the | |||
635 | // returned {wasm_code} to be null and fall back to {GetContainingCode}. | |||
636 | wasm::WasmCodeRefScope code_ref_scope; | |||
637 | if (wasm::WasmCode* wasm_code = | |||
638 | wasm::GetWasmCodeManager()->LookupCode(pc)) { | |||
639 | switch (wasm_code->kind()) { | |||
640 | case wasm::WasmCode::kWasmFunction: | |||
641 | return WASM; | |||
642 | case wasm::WasmCode::kWasmToCapiWrapper: | |||
643 | return WASM_EXIT; | |||
644 | case wasm::WasmCode::kWasmToJsWrapper: | |||
645 | return WASM_TO_JS; | |||
646 | default: | |||
647 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
648 | } | |||
649 | } | |||
650 | #endif // V8_ENABLE_WEBASSEMBLY | |||
651 | ||||
652 | // Look up the code object to figure out the type of the stack frame. | |||
653 | Code code_obj = GetContainingCode(iterator->isolate(), pc); | |||
654 | if (!code_obj.is_null()) { | |||
655 | switch (code_obj.kind()) { | |||
656 | case CodeKind::BUILTIN: | |||
657 | if (StackFrame::IsTypeMarker(marker)) break; | |||
658 | if (code_obj.is_interpreter_trampoline_builtin() || | |||
659 | // Frames for baseline entry trampolines on the stack are still | |||
660 | // interpreted frames. | |||
661 | code_obj.is_baseline_trampoline_builtin()) { | |||
662 | return INTERPRETED; | |||
663 | } | |||
664 | if (code_obj.is_baseline_leave_frame_builtin()) { | |||
665 | return BASELINE; | |||
666 | } | |||
667 | if (code_obj.is_turbofanned()) { | |||
668 | // TODO(bmeurer): We treat frames for BUILTIN Code objects as | |||
669 | // OptimizedFrame for now (all the builtins with JavaScript | |||
670 | // linkage are actually generated with TurboFan currently, so | |||
671 | // this is sound). | |||
672 | return OPTIMIZED; | |||
673 | } | |||
674 | return BUILTIN; | |||
675 | case CodeKind::TURBOFAN: | |||
676 | case CodeKind::MAGLEV: | |||
677 | return OPTIMIZED; | |||
678 | case CodeKind::BASELINE: | |||
679 | return Type::BASELINE; | |||
680 | #if V8_ENABLE_WEBASSEMBLY1 | |||
681 | case CodeKind::JS_TO_WASM_FUNCTION: | |||
682 | return JS_TO_WASM; | |||
683 | case CodeKind::JS_TO_JS_FUNCTION: | |||
684 | return STUB; | |||
685 | case CodeKind::C_WASM_ENTRY: | |||
686 | return C_WASM_ENTRY; | |||
687 | case CodeKind::WASM_TO_JS_FUNCTION: | |||
688 | return WASM_TO_JS; | |||
689 | case CodeKind::WASM_FUNCTION: | |||
690 | case CodeKind::WASM_TO_CAPI_FUNCTION: | |||
691 | // Never appear as on-heap {Code} objects. | |||
692 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
693 | #endif // V8_ENABLE_WEBASSEMBLY | |||
694 | default: | |||
695 | // All other types should have an explicit marker | |||
696 | break; | |||
697 | } | |||
698 | } else { | |||
699 | return NATIVE; | |||
700 | } | |||
701 | } | |||
702 | DCHECK(StackFrame::IsTypeMarker(marker))((void) 0); | |||
703 | StackFrame::Type candidate = StackFrame::MarkerToType(marker); | |||
704 | switch (candidate) { | |||
705 | case ENTRY: | |||
706 | case CONSTRUCT_ENTRY: | |||
707 | case EXIT: | |||
708 | case BUILTIN_CONTINUATION: | |||
709 | case JAVA_SCRIPT_BUILTIN_CONTINUATION: | |||
710 | case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: | |||
711 | case BUILTIN_EXIT: | |||
712 | case STUB: | |||
713 | case INTERNAL: | |||
714 | case CONSTRUCT: | |||
715 | #if V8_ENABLE_WEBASSEMBLY1 | |||
716 | case WASM_TO_JS: | |||
717 | case WASM: | |||
718 | case WASM_COMPILE_LAZY: | |||
719 | case WASM_EXIT: | |||
720 | case WASM_DEBUG_BREAK: | |||
721 | case JS_TO_WASM: | |||
722 | case STACK_SWITCH: | |||
723 | #endif // V8_ENABLE_WEBASSEMBLY | |||
724 | return candidate; | |||
725 | case OPTIMIZED: | |||
726 | case INTERPRETED: | |||
727 | default: | |||
728 | // Unoptimized and optimized JavaScript frames, including | |||
729 | // interpreted frames, should never have a StackFrame::Type | |||
730 | // marker. If we find one, we're likely being called from the | |||
731 | // profiler in a bogus stack frame. | |||
732 | return NATIVE; | |||
733 | } | |||
734 | } | |||
735 | ||||
736 | #ifdef DEBUG | |||
737 | bool StackFrame::can_access_heap_objects() const { | |||
738 | return iterator_->can_access_heap_objects_; | |||
739 | } | |||
740 | #endif | |||
741 | ||||
742 | StackFrame::Type StackFrame::GetCallerState(State* state) const { | |||
743 | ComputeCallerState(state); | |||
744 | return ComputeType(iterator_, state); | |||
745 | } | |||
746 | ||||
747 | Address CommonFrame::GetCallerStackPointer() const { | |||
748 | return fp() + CommonFrameConstants::kCallerSPOffset; | |||
749 | } | |||
750 | ||||
751 | void NativeFrame::ComputeCallerState(State* state) const { | |||
752 | state->sp = caller_sp(); | |||
753 | state->fp = Memory<Address>(fp() + CommonFrameConstants::kCallerFPOffset); | |||
754 | state->pc_address = ResolveReturnAddressLocation( | |||
755 | reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset)); | |||
756 | state->callee_pc_address = nullptr; | |||
757 | state->constant_pool_address = nullptr; | |||
758 | } | |||
759 | ||||
760 | Code EntryFrame::unchecked_code() const { | |||
761 | return FromCodeT(isolate()->builtins()->code(Builtin::kJSEntry)); | |||
762 | } | |||
763 | ||||
764 | void EntryFrame::ComputeCallerState(State* state) const { | |||
765 | GetCallerState(state); | |||
766 | } | |||
767 | ||||
768 | StackFrame::Type EntryFrame::GetCallerState(State* state) const { | |||
769 | const int offset = EntryFrameConstants::kCallerFPOffset; | |||
770 | Address fp = Memory<Address>(this->fp() + offset); | |||
771 | return ExitFrame::GetStateForFramePointer(fp, state); | |||
772 | } | |||
773 | ||||
774 | #if V8_ENABLE_WEBASSEMBLY1 | |||
775 | StackFrame::Type CWasmEntryFrame::GetCallerState(State* state) const { | |||
776 | const int offset = CWasmEntryFrameConstants::kCEntryFPOffset; | |||
777 | Address fp = Memory<Address>(this->fp() + offset); | |||
778 | return ExitFrame::GetStateForFramePointer(fp, state); | |||
779 | } | |||
780 | #endif // V8_ENABLE_WEBASSEMBLY | |||
781 | ||||
782 | Code ConstructEntryFrame::unchecked_code() const { | |||
783 | return FromCodeT(isolate()->builtins()->code(Builtin::kJSConstructEntry)); | |||
784 | } | |||
785 | ||||
786 | void ExitFrame::ComputeCallerState(State* state) const { | |||
787 | // Set up the caller state. | |||
788 | state->sp = caller_sp(); | |||
789 | state->fp = Memory<Address>(fp() + ExitFrameConstants::kCallerFPOffset); | |||
790 | state->pc_address = ResolveReturnAddressLocation( | |||
791 | reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset)); | |||
792 | state->callee_pc_address = nullptr; | |||
793 | if (FLAG_enable_embedded_constant_pool) { | |||
794 | state->constant_pool_address = reinterpret_cast<Address*>( | |||
795 | fp() + ExitFrameConstants::kConstantPoolOffset); | |||
796 | } | |||
797 | } | |||
798 | ||||
799 | void ExitFrame::Iterate(RootVisitor* v) const { | |||
800 | // The arguments are traversed as part of the expression stack of | |||
801 | // the calling frame. | |||
802 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); | |||
803 | } | |||
804 | ||||
805 | StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) { | |||
806 | if (fp == 0) return NO_FRAME_TYPE; | |||
807 | StackFrame::Type type = ComputeFrameType(fp); | |||
808 | #if V8_ENABLE_WEBASSEMBLY1 | |||
809 | Address sp = type == WASM_EXIT ? WasmExitFrame::ComputeStackPointer(fp) | |||
810 | : ExitFrame::ComputeStackPointer(fp); | |||
811 | #else | |||
812 | Address sp = ExitFrame::ComputeStackPointer(fp); | |||
813 | #endif // V8_ENABLE_WEBASSEMBLY | |||
814 | FillState(fp, sp, state); | |||
815 | DCHECK_NE(*state->pc_address, kNullAddress)((void) 0); | |||
816 | return type; | |||
817 | } | |||
818 | ||||
819 | StackFrame::Type ExitFrame::ComputeFrameType(Address fp) { | |||
820 | // Distinguish between between regular and builtin exit frames. | |||
821 | // Default to EXIT in all hairy cases (e.g., when called from profiler). | |||
822 | const int offset = ExitFrameConstants::kFrameTypeOffset; | |||
823 | Object marker(Memory<Address>(fp + offset)); | |||
824 | ||||
825 | if (!marker.IsSmi()) { | |||
826 | return EXIT; | |||
827 | } | |||
828 | ||||
829 | intptr_t marker_int = bit_cast<intptr_t>(marker); | |||
830 | ||||
831 | StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1); | |||
832 | switch (frame_type) { | |||
833 | case BUILTIN_EXIT: | |||
834 | #if V8_ENABLE_WEBASSEMBLY1 | |||
835 | case WASM_EXIT: | |||
836 | case STACK_SWITCH: | |||
837 | #endif // V8_ENABLE_WEBASSEMBLY | |||
838 | return frame_type; | |||
839 | default: | |||
840 | return EXIT; | |||
841 | } | |||
842 | } | |||
843 | ||||
844 | Address ExitFrame::ComputeStackPointer(Address fp) { | |||
845 | MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset,static_assert((std::is_pointer<decltype(fp + ExitFrameConstants ::kSPOffset)>::value || std::is_same<v8::base::Address, decltype(fp + ExitFrameConstants::kSPOffset)>::value), "static type violation" ); static_assert(std::is_convertible<decltype(kSystemPointerSize ), size_t>::value, "static type violation"); do { ::v8::base ::Use unused_tmp_array_for_use_macro[]{fp + ExitFrameConstants ::kSPOffset, kSystemPointerSize}; (void)unused_tmp_array_for_use_macro ; } while (false) | |||
846 | kSystemPointerSize)static_assert((std::is_pointer<decltype(fp + ExitFrameConstants ::kSPOffset)>::value || std::is_same<v8::base::Address, decltype(fp + ExitFrameConstants::kSPOffset)>::value), "static type violation" ); static_assert(std::is_convertible<decltype(kSystemPointerSize ), size_t>::value, "static type violation"); do { ::v8::base ::Use unused_tmp_array_for_use_macro[]{fp + ExitFrameConstants ::kSPOffset, kSystemPointerSize}; (void)unused_tmp_array_for_use_macro ; } while (false); | |||
847 | return Memory<Address>(fp + ExitFrameConstants::kSPOffset); | |||
848 | } | |||
849 | ||||
850 | #if V8_ENABLE_WEBASSEMBLY1 | |||
851 | Address WasmExitFrame::ComputeStackPointer(Address fp) { | |||
852 | // For WASM_EXIT frames, {sp} is only needed for finding the PC slot, | |||
853 | // everything else is handled via safepoint information. | |||
854 | Address sp = fp + WasmExitFrameConstants::kWasmInstanceOffset; | |||
855 | DCHECK_EQ(sp - 1 * kPCOnStackSize,((void) 0) | |||
856 | fp + WasmExitFrameConstants::kCallingPCOffset)((void) 0); | |||
857 | return sp; | |||
858 | } | |||
859 | #endif // V8_ENABLE_WEBASSEMBLY | |||
860 | ||||
861 | void ExitFrame::FillState(Address fp, Address sp, State* state) { | |||
862 | state->sp = sp; | |||
863 | state->fp = fp; | |||
864 | state->pc_address = ResolveReturnAddressLocation( | |||
865 | reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize)); | |||
866 | state->callee_pc_address = nullptr; | |||
867 | // The constant pool recorded in the exit frame is not associated | |||
868 | // with the pc in this state (the return address into a C entry | |||
869 | // stub). ComputeCallerState will retrieve the constant pool | |||
870 | // together with the associated caller pc. | |||
871 | state->constant_pool_address = nullptr; | |||
872 | } | |||
873 | ||||
874 | void BuiltinExitFrame::Summarize(std::vector<FrameSummary>* frames) const { | |||
875 | DCHECK(frames->empty())((void) 0); | |||
876 | Handle<FixedArray> parameters = GetParameters(); | |||
877 | DisallowGarbageCollection no_gc; | |||
878 | Code code = LookupCode(); | |||
879 | int code_offset = code.GetOffsetFromInstructionStart(isolate(), pc()); | |||
880 | FrameSummary::JavaScriptFrameSummary summary( | |||
881 | isolate(), receiver(), function(), AbstractCode::cast(code), code_offset, | |||
882 | IsConstructor(), *parameters); | |||
883 | frames->push_back(summary); | |||
884 | } | |||
885 | ||||
886 | JSFunction BuiltinExitFrame::function() const { | |||
887 | return JSFunction::cast(target_slot_object()); | |||
888 | } | |||
889 | ||||
890 | Object BuiltinExitFrame::receiver() const { return receiver_slot_object(); } | |||
891 | ||||
892 | Object BuiltinExitFrame::GetParameter(int i) const { | |||
893 | DCHECK(i >= 0 && i < ComputeParametersCount())((void) 0); | |||
894 | int offset = | |||
895 | BuiltinExitFrameConstants::kFirstArgumentOffset + i * kSystemPointerSize; | |||
896 | return Object(Memory<Address>(fp() + offset)); | |||
897 | } | |||
898 | ||||
899 | int BuiltinExitFrame::ComputeParametersCount() const { | |||
900 | Object argc_slot = argc_slot_object(); | |||
901 | DCHECK(argc_slot.IsSmi())((void) 0); | |||
902 | // Argc also counts the receiver, target, new target, and argc itself as args, | |||
903 | // therefore the real argument count is argc - 4. | |||
904 | int argc = Smi::ToInt(argc_slot) - 4; | |||
905 | DCHECK_GE(argc, 0)((void) 0); | |||
906 | return argc; | |||
907 | } | |||
908 | ||||
909 | Handle<FixedArray> BuiltinExitFrame::GetParameters() const { | |||
910 | if (V8_LIKELY(!FLAG_detailed_error_stack_trace)(__builtin_expect(!!(!FLAG_detailed_error_stack_trace), 1))) { | |||
911 | return isolate()->factory()->empty_fixed_array(); | |||
912 | } | |||
913 | int param_count = ComputeParametersCount(); | |||
914 | auto parameters = isolate()->factory()->NewFixedArray(param_count); | |||
915 | for (int i = 0; i < param_count; i++) { | |||
916 | parameters->set(i, GetParameter(i)); | |||
917 | } | |||
918 | return parameters; | |||
919 | } | |||
920 | ||||
921 | bool BuiltinExitFrame::IsConstructor() const { | |||
922 | return !new_target_slot_object().IsUndefined(isolate()); | |||
923 | } | |||
924 | ||||
925 | namespace { | |||
926 | void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode, | |||
927 | int index) { | |||
928 | accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index); | |||
929 | } | |||
930 | ||||
931 | const char* StringForStackFrameType(StackFrame::Type type) { | |||
932 | switch (type) { | |||
933 | #define CASE(value, name) \ | |||
934 | case StackFrame::value: \ | |||
935 | return #name; | |||
936 | STACK_FRAME_TYPE_LIST(CASE)CASE(ENTRY, EntryFrame) CASE(CONSTRUCT_ENTRY, ConstructEntryFrame ) CASE(EXIT, ExitFrame) CASE(WASM, WasmFrame) CASE(WASM_TO_JS , WasmToJsFrame) CASE(JS_TO_WASM, JsToWasmFrame) CASE(STACK_SWITCH , StackSwitchFrame) CASE(WASM_DEBUG_BREAK, WasmDebugBreakFrame ) CASE(C_WASM_ENTRY, CWasmEntryFrame) CASE(WASM_EXIT, WasmExitFrame ) CASE(WASM_COMPILE_LAZY, WasmCompileLazyFrame) CASE(INTERPRETED , InterpretedFrame) CASE(BASELINE, BaselineFrame) CASE(OPTIMIZED , OptimizedFrame) CASE(STUB, StubFrame) CASE(BUILTIN_CONTINUATION , BuiltinContinuationFrame) CASE(JAVA_SCRIPT_BUILTIN_CONTINUATION , JavaScriptBuiltinContinuationFrame) CASE(JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH , JavaScriptBuiltinContinuationWithCatchFrame) CASE(INTERNAL, InternalFrame) CASE(CONSTRUCT, ConstructFrame) CASE(BUILTIN, BuiltinFrame) CASE(BUILTIN_EXIT, BuiltinExitFrame) CASE(NATIVE , NativeFrame) | |||
937 | #undef CASE | |||
938 | default: | |||
939 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
940 | } | |||
941 | } | |||
942 | } // namespace | |||
943 | ||||
944 | void StackFrame::Print(StringStream* accumulator, PrintMode mode, | |||
945 | int index) const { | |||
946 | DisallowGarbageCollection no_gc; | |||
947 | PrintIndex(accumulator, mode, index); | |||
948 | accumulator->Add(StringForStackFrameType(type())); | |||
949 | accumulator->Add(" [pc: %p]\n", reinterpret_cast<void*>(pc())); | |||
950 | } | |||
951 | ||||
952 | void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode, | |||
953 | int index) const { | |||
954 | DisallowGarbageCollection no_gc; | |||
955 | Object receiver = this->receiver(); | |||
956 | JSFunction function = this->function(); | |||
957 | ||||
958 | accumulator->PrintSecurityTokenIfChanged(function); | |||
959 | PrintIndex(accumulator, mode, index); | |||
960 | accumulator->Add("builtin exit frame: "); | |||
961 | Code code; | |||
962 | if (IsConstructor()) accumulator->Add("new "); | |||
963 | accumulator->PrintFunction(function, receiver, &code); | |||
964 | ||||
965 | accumulator->Add("(this=%o", receiver); | |||
966 | ||||
967 | // Print the parameters. | |||
968 | int parameters_count = ComputeParametersCount(); | |||
969 | for (int i = 0; i < parameters_count; i++) { | |||
970 | accumulator->Add(",%o", GetParameter(i)); | |||
971 | } | |||
972 | ||||
973 | accumulator->Add(")\n\n"); | |||
974 | } | |||
975 | ||||
976 | Address CommonFrame::GetExpressionAddress(int n) const { | |||
977 | const int offset = StandardFrameConstants::kExpressionsOffset; | |||
978 | return fp() + offset - n * kSystemPointerSize; | |||
979 | } | |||
980 | ||||
981 | Address UnoptimizedFrame::GetExpressionAddress(int n) const { | |||
982 | const int offset = UnoptimizedFrameConstants::kExpressionsOffset; | |||
983 | return fp() + offset - n * kSystemPointerSize; | |||
984 | } | |||
985 | ||||
986 | Object CommonFrame::context() const { | |||
987 | return ReadOnlyRoots(isolate()).undefined_value(); | |||
988 | } | |||
989 | ||||
990 | int CommonFrame::position() const { | |||
991 | Code code = LookupCode(); | |||
992 | int code_offset = code.GetOffsetFromInstructionStart(isolate(), pc()); | |||
993 | return AbstractCode::cast(code).SourcePosition(code_offset); | |||
994 | } | |||
995 | ||||
996 | int CommonFrame::ComputeExpressionsCount() const { | |||
997 | Address base = GetExpressionAddress(0); | |||
998 | Address limit = sp() - kSystemPointerSize; | |||
999 | DCHECK(base >= limit)((void) 0); // stack grows downwards | |||
1000 | // Include register-allocated locals in number of expressions. | |||
1001 | return static_cast<int>((base - limit) / kSystemPointerSize); | |||
1002 | } | |||
1003 | ||||
1004 | void CommonFrame::ComputeCallerState(State* state) const { | |||
1005 | state->fp = caller_fp(); | |||
1006 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1007 | if (state->fp == kNullAddress) { | |||
1008 | // An empty FP signals the first frame of a stack segment. The caller is | |||
1009 | // on a different stack, or is unbound (suspended stack). | |||
1010 | DCHECK(FLAG_experimental_wasm_stack_switching)((void) 0); | |||
1011 | return; | |||
1012 | } | |||
1013 | #endif | |||
1014 | state->sp = caller_sp(); | |||
1015 | state->pc_address = ResolveReturnAddressLocation( | |||
1016 | reinterpret_cast<Address*>(ComputePCAddress(fp()))); | |||
1017 | state->callee_fp = fp(); | |||
1018 | state->callee_pc_address = pc_address(); | |||
1019 | state->constant_pool_address = | |||
1020 | reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp())); | |||
1021 | } | |||
1022 | ||||
1023 | void CommonFrame::Summarize(std::vector<FrameSummary>* functions) const { | |||
1024 | // This should only be called on frames which override this method. | |||
1025 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
1026 | } | |||
1027 | ||||
1028 | void CommonFrame::IterateCompiledFrame(RootVisitor* v) const { | |||
1029 | // Make sure that we're not doing "safe" stack frame iteration. We cannot | |||
1030 | // possibly find pointers in optimized frames in that state. | |||
1031 | DCHECK(can_access_heap_objects())((void) 0); | |||
1032 | ||||
1033 | // Find the code and compute the safepoint information. | |||
1034 | Address inner_pointer = pc(); | |||
1035 | SafepointEntry safepoint_entry; | |||
1036 | uint32_t stack_slots = 0; | |||
1037 | Code code; | |||
1038 | bool has_tagged_outgoing_params = false; | |||
1039 | uint16_t first_tagged_parameter_slot = 0; | |||
1040 | uint16_t num_tagged_parameter_slots = 0; | |||
1041 | bool is_wasm = false; | |||
1042 | ||||
1043 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1044 | bool has_wasm_feedback_slot = false; | |||
1045 | if (auto* wasm_code = wasm::GetWasmCodeManager()->LookupCode(inner_pointer)) { | |||
1046 | is_wasm = true; | |||
1047 | SafepointTable table(wasm_code); | |||
1048 | safepoint_entry = table.FindEntry(inner_pointer); | |||
1049 | stack_slots = wasm_code->stack_slots(); | |||
1050 | has_tagged_outgoing_params = | |||
1051 | wasm_code->kind() != wasm::WasmCode::kWasmFunction && | |||
1052 | wasm_code->kind() != wasm::WasmCode::kWasmToCapiWrapper; | |||
1053 | first_tagged_parameter_slot = wasm_code->first_tagged_parameter_slot(); | |||
1054 | num_tagged_parameter_slots = wasm_code->num_tagged_parameter_slots(); | |||
1055 | if (wasm_code->is_liftoff() && FLAG_wasm_speculative_inlining) { | |||
1056 | has_wasm_feedback_slot = true; | |||
1057 | } | |||
1058 | } | |||
1059 | #endif // V8_ENABLE_WEBASSEMBLY | |||
1060 | ||||
1061 | if (!is_wasm) { | |||
1062 | InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry = | |||
1063 | isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer); | |||
1064 | if (!entry->safepoint_entry.is_initialized()) { | |||
1065 | entry->safepoint_entry = | |||
1066 | entry->code.GetSafepointEntry(isolate(), inner_pointer); | |||
1067 | DCHECK(entry->safepoint_entry.is_initialized())((void) 0); | |||
1068 | } else { | |||
1069 | DCHECK_EQ(entry->safepoint_entry,((void) 0) | |||
1070 | entry->code.GetSafepointEntry(isolate(), inner_pointer))((void) 0); | |||
1071 | } | |||
1072 | ||||
1073 | code = entry->code; | |||
1074 | safepoint_entry = entry->safepoint_entry; | |||
1075 | stack_slots = code.stack_slots(); | |||
1076 | ||||
1077 | has_tagged_outgoing_params = code.has_tagged_outgoing_params(); | |||
1078 | ||||
1079 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1080 | // With inlined JS-to-Wasm calls, we can be in an OptimizedFrame and | |||
1081 | // directly call a Wasm function from JavaScript. In this case the | |||
1082 | // parameters we pass to the callee are not tagged. | |||
1083 | wasm::WasmCode* wasm_callee = | |||
1084 | wasm::GetWasmCodeManager()->LookupCode(callee_pc()); | |||
1085 | bool is_wasm_call = (wasm_callee != nullptr); | |||
1086 | if (is_wasm_call) has_tagged_outgoing_params = false; | |||
1087 | #endif // V8_ENABLE_WEBASSEMBLY | |||
1088 | } | |||
1089 | ||||
1090 | // Determine the fixed header and spill slot area size. | |||
1091 | int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp; | |||
1092 | intptr_t marker = | |||
1093 | Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset); | |||
1094 | bool typed_frame = StackFrame::IsTypeMarker(marker); | |||
1095 | if (typed_frame) { | |||
1096 | StackFrame::Type candidate = StackFrame::MarkerToType(marker); | |||
1097 | switch (candidate) { | |||
1098 | case ENTRY: | |||
1099 | case CONSTRUCT_ENTRY: | |||
1100 | case EXIT: | |||
1101 | case BUILTIN_CONTINUATION: | |||
1102 | case JAVA_SCRIPT_BUILTIN_CONTINUATION: | |||
1103 | case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: | |||
1104 | case BUILTIN_EXIT: | |||
1105 | case STUB: | |||
1106 | case INTERNAL: | |||
1107 | case CONSTRUCT: | |||
1108 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1109 | case JS_TO_WASM: | |||
1110 | case STACK_SWITCH: | |||
1111 | case C_WASM_ENTRY: | |||
1112 | case WASM_DEBUG_BREAK: | |||
1113 | #endif // V8_ENABLE_WEBASSEMBLY | |||
1114 | frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp; | |||
1115 | break; | |||
1116 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1117 | case WASM_TO_JS: | |||
1118 | case WASM: | |||
1119 | case WASM_COMPILE_LAZY: | |||
1120 | frame_header_size = WasmFrameConstants::kFixedFrameSizeFromFp; | |||
1121 | if (has_wasm_feedback_slot) frame_header_size += kSystemPointerSize; | |||
1122 | break; | |||
1123 | case WASM_EXIT: | |||
1124 | // The last value in the frame header is the calling PC, which should | |||
1125 | // not be visited. | |||
1126 | static_assert(WasmExitFrameConstants::kFixedSlotCountFromFp == | |||
1127 | WasmFrameConstants::kFixedSlotCountFromFp + 1, | |||
1128 | "WasmExitFrame has one slot more than WasmFrame"); | |||
1129 | frame_header_size = WasmFrameConstants::kFixedFrameSizeFromFp; | |||
1130 | break; | |||
1131 | #endif // V8_ENABLE_WEBASSEMBLY | |||
1132 | case OPTIMIZED: | |||
1133 | case INTERPRETED: | |||
1134 | case BASELINE: | |||
1135 | case BUILTIN: | |||
1136 | // These frame types have a context, but they are actually stored | |||
1137 | // in the place on the stack that one finds the frame type. | |||
1138 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
1139 | case NATIVE: | |||
1140 | case NO_FRAME_TYPE: | |||
1141 | case NUMBER_OF_TYPES: | |||
1142 | case MANUAL: | |||
1143 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
1144 | } | |||
1145 | } | |||
1146 | ||||
1147 | // slot_space holds the actual number of spill slots, without fixed frame | |||
1148 | // slots. | |||
1149 | const uint32_t slot_space = | |||
1150 | stack_slots * kSystemPointerSize - | |||
1151 | (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp); | |||
1152 | ||||
1153 | // base <= limit. | |||
1154 | // Fixed frame slots. | |||
1155 | FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size)); | |||
1156 | FullObjectSlot frame_header_limit( | |||
1157 | &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize)); | |||
1158 | // Parameters passed to the callee. | |||
1159 | FullObjectSlot parameters_base(&Memory<Address>(sp())); | |||
1160 | FullObjectSlot parameters_limit(frame_header_base.address() - slot_space); | |||
1161 | // Spill slots are in the region ]frame_header_base, parameters_limit]; | |||
1162 | ||||
1163 | // Visit the rest of the parameters if they are tagged. | |||
1164 | if (has_tagged_outgoing_params) { | |||
1165 | v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base, | |||
1166 | parameters_limit); | |||
1167 | } | |||
1168 | ||||
1169 | // Visit pointer spill slots and locals. | |||
1170 | DCHECK_GE((stack_slots + kBitsPerByte) / kBitsPerByte,((void) 0) | |||
1171 | safepoint_entry.tagged_slots().size())((void) 0); | |||
1172 | int slot_offset = 0; | |||
1173 | PtrComprCageBase cage_base(isolate()); | |||
1174 | for (uint8_t bits : safepoint_entry.tagged_slots()) { | |||
1175 | while (bits) { | |||
1176 | const int bit = base::bits::CountTrailingZeros(bits); | |||
1177 | bits &= ~(1 << bit); | |||
1178 | FullObjectSlot spill_slot = parameters_limit + slot_offset + bit; | |||
1179 | #ifdef V8_COMPRESS_POINTERS | |||
1180 | // Spill slots may contain compressed values in which case the upper | |||
1181 | // 32-bits will contain zeros. In order to simplify handling of such | |||
1182 | // slots in GC we ensure that the slot always contains full value. | |||
1183 | ||||
1184 | // The spill slot may actually contain weak references so we load/store | |||
1185 | // values using spill_slot.location() in order to avoid dealing with | |||
1186 | // FullMaybeObjectSlots here. | |||
1187 | if (V8_EXTERNAL_CODE_SPACE_BOOLfalse) { | |||
1188 | // When external code space is enabled the spill slot could contain both | |||
1189 | // Code and non-Code references, which have different cage bases. So | |||
1190 | // unconditional decompression of the value might corrupt Code pointers. | |||
1191 | // However, given that | |||
1192 | // 1) the Code pointers are never compressed by design (because | |||
1193 | // otherwise we wouldn't know which cage base to apply for | |||
1194 | // decompression, see respective DCHECKs in | |||
1195 | // RelocInfo::target_object()), | |||
1196 | // 2) there's no need to update the upper part of the full pointer | |||
1197 | // because if it was there then it'll stay the same, | |||
1198 | // we can avoid updating upper part of the spill slot if it already | |||
1199 | // contains full value. | |||
1200 | // TODO(v8:11880): Remove this special handling by enforcing builtins | |||
1201 | // to use CodeTs instead of Code objects. | |||
1202 | Address value = *spill_slot.location(); | |||
1203 | if (!HAS_SMI_TAG(value)((static_cast<i::Tagged_t>(value) & ::i::kSmiTagMask ) == ::i::kSmiTag) && value <= 0xffffffff) { | |||
1204 | // We don't need to update smi values or full pointers. | |||
1205 | *spill_slot.location() = | |||
1206 | DecompressTaggedPointer(cage_base, static_cast<Tagged_t>(value)); | |||
1207 | if (DEBUG_BOOLfalse) { | |||
1208 | // Ensure that the spill slot contains correct heap object. | |||
1209 | HeapObject raw = HeapObject::cast(Object(*spill_slot.location())); | |||
1210 | MapWord map_word = raw.map_word(cage_base, kRelaxedLoad); | |||
1211 | HeapObject forwarded = map_word.IsForwardingAddress() | |||
1212 | ? map_word.ToForwardingAddress() | |||
1213 | : raw; | |||
1214 | bool is_self_forwarded = | |||
1215 | forwarded.map_word(cage_base, kRelaxedLoad).ptr() == | |||
1216 | forwarded.address(); | |||
1217 | if (is_self_forwarded) { | |||
1218 | // The object might be in a self-forwarding state if it's located | |||
1219 | // in new large object space. GC will fix this at a later stage. | |||
1220 | CHECK(BasicMemoryChunk::FromHeapObject(forwarded)do { if ((__builtin_expect(!!(!(BasicMemoryChunk::FromHeapObject (forwarded) ->InNewLargeObjectSpace())), 0))) { V8_Fatal("Check failed: %s." , "BasicMemoryChunk::FromHeapObject(forwarded) ->InNewLargeObjectSpace()" ); } } while (false) | |||
1221 | ->InNewLargeObjectSpace())do { if ((__builtin_expect(!!(!(BasicMemoryChunk::FromHeapObject (forwarded) ->InNewLargeObjectSpace())), 0))) { V8_Fatal("Check failed: %s." , "BasicMemoryChunk::FromHeapObject(forwarded) ->InNewLargeObjectSpace()" ); } } while (false); | |||
1222 | } else { | |||
1223 | CHECK(forwarded.map(cage_base).IsMap(cage_base))do { if ((__builtin_expect(!!(!(forwarded.map(cage_base).IsMap (cage_base))), 0))) { V8_Fatal("Check failed: %s.", "forwarded.map(cage_base).IsMap(cage_base)" ); } } while (false); | |||
1224 | } | |||
1225 | } | |||
1226 | } | |||
1227 | } else { | |||
1228 | Tagged_t compressed_value = | |||
1229 | static_cast<Tagged_t>(*spill_slot.location()); | |||
1230 | if (!HAS_SMI_TAG(compressed_value)((static_cast<i::Tagged_t>(compressed_value) & ::i:: kSmiTagMask) == ::i::kSmiTag)) { | |||
1231 | // We don't need to update smi values. | |||
1232 | *spill_slot.location() = | |||
1233 | DecompressTaggedPointer(cage_base, compressed_value); | |||
1234 | } | |||
1235 | } | |||
1236 | #endif | |||
1237 | v->VisitRootPointer(Root::kStackRoots, nullptr, spill_slot); | |||
1238 | } | |||
1239 | slot_offset += kBitsPerByte; | |||
1240 | } | |||
1241 | ||||
1242 | // Visit tagged parameters that have been passed to the function of this | |||
1243 | // frame. Conceptionally these parameters belong to the parent frame. However, | |||
1244 | // the exact count is only known by this frame (in the presence of tail calls, | |||
1245 | // this information cannot be derived from the call site). | |||
1246 | if (num_tagged_parameter_slots > 0) { | |||
1247 | FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp())); | |||
1248 | tagged_parameter_base += first_tagged_parameter_slot; | |||
1249 | FullObjectSlot tagged_parameter_limit = | |||
1250 | tagged_parameter_base + num_tagged_parameter_slots; | |||
1251 | ||||
1252 | v->VisitRootPointers(Root::kStackRoots, nullptr, tagged_parameter_base, | |||
1253 | tagged_parameter_limit); | |||
1254 | } | |||
1255 | ||||
1256 | // For the off-heap code cases, we can skip this. | |||
1257 | if (!code.is_null()) { | |||
1258 | // Visit the return address in the callee and incoming arguments. | |||
1259 | IteratePc(v, pc_address(), constant_pool_address(), code); | |||
1260 | } | |||
1261 | ||||
1262 | // If this frame has JavaScript ABI, visit the context (in stub and JS | |||
1263 | // frames) and the function (in JS frames). If it has WebAssembly ABI, visit | |||
1264 | // the instance object. | |||
1265 | if (!typed_frame) { | |||
1266 | // JavaScript ABI frames also contain arguments count value which is stored | |||
1267 | // untagged, we don't need to visit it. | |||
1268 | frame_header_base += 1; | |||
1269 | } | |||
1270 | v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base, | |||
1271 | frame_header_limit); | |||
1272 | } | |||
1273 | ||||
1274 | Code StubFrame::unchecked_code() const { | |||
1275 | return isolate()->FindCodeObject(pc()); | |||
1276 | } | |||
1277 | ||||
1278 | int StubFrame::LookupExceptionHandlerInTable() { | |||
1279 | Code code = LookupCode(); | |||
1280 | DCHECK(code.is_turbofanned())((void) 0); | |||
1281 | DCHECK_EQ(code.kind(), CodeKind::BUILTIN)((void) 0); | |||
1282 | HandlerTable table(code); | |||
1283 | int pc_offset = code.GetOffsetFromInstructionStart(isolate(), pc()); | |||
1284 | return table.LookupReturn(pc_offset); | |||
1285 | } | |||
1286 | ||||
1287 | void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); } | |||
1288 | ||||
1289 | void JavaScriptFrame::SetParameterValue(int index, Object value) const { | |||
1290 | Memory<Address>(GetParameterSlot(index)) = value.ptr(); | |||
1291 | } | |||
1292 | ||||
1293 | bool JavaScriptFrame::IsConstructor() const { | |||
1294 | return IsConstructFrame(caller_fp()); | |||
1295 | } | |||
1296 | ||||
1297 | bool JavaScriptFrame::HasInlinedFrames() const { | |||
1298 | std::vector<SharedFunctionInfo> functions; | |||
1299 | GetFunctions(&functions); | |||
1300 | return functions.size() > 1; | |||
1301 | } | |||
1302 | ||||
1303 | Code CommonFrameWithJSLinkage::unchecked_code() const { | |||
1304 | return FromCodeT(function().code()); | |||
1305 | } | |||
1306 | ||||
1307 | int OptimizedFrame::ComputeParametersCount() const { | |||
1308 | Code code = LookupCode(); | |||
1309 | if (code.kind() == CodeKind::BUILTIN) { | |||
1310 | return static_cast<int>( | |||
1311 | Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset)) - | |||
1312 | kJSArgcReceiverSlots; | |||
1313 | } else { | |||
1314 | return JavaScriptFrame::ComputeParametersCount(); | |||
1315 | } | |||
1316 | } | |||
1317 | ||||
1318 | Address JavaScriptFrame::GetCallerStackPointer() const { | |||
1319 | return fp() + StandardFrameConstants::kCallerSPOffset; | |||
1320 | } | |||
1321 | ||||
1322 | void JavaScriptFrame::GetFunctions( | |||
1323 | std::vector<SharedFunctionInfo>* functions) const { | |||
1324 | DCHECK(functions->empty())((void) 0); | |||
1325 | functions->push_back(function().shared()); | |||
1326 | } | |||
1327 | ||||
1328 | void JavaScriptFrame::GetFunctions( | |||
1329 | std::vector<Handle<SharedFunctionInfo>>* functions) const { | |||
1330 | DCHECK(functions->empty())((void) 0); | |||
1331 | std::vector<SharedFunctionInfo> raw_functions; | |||
1332 | GetFunctions(&raw_functions); | |||
1333 | for (const auto& raw_function : raw_functions) { | |||
1334 | functions->push_back( | |||
1335 | Handle<SharedFunctionInfo>(raw_function, function().GetIsolate())); | |||
1336 | } | |||
1337 | } | |||
1338 | ||||
1339 | bool CommonFrameWithJSLinkage::IsConstructor() const { | |||
1340 | return IsConstructFrame(caller_fp()); | |||
1341 | } | |||
1342 | ||||
1343 | void CommonFrameWithJSLinkage::Summarize( | |||
1344 | std::vector<FrameSummary>* functions) const { | |||
1345 | DCHECK(functions->empty())((void) 0); | |||
1346 | Code code = LookupCode(); | |||
1347 | int offset = code.GetOffsetFromInstructionStart(isolate(), pc()); | |||
1348 | Handle<AbstractCode> abstract_code(AbstractCode::cast(code), isolate()); | |||
1349 | Handle<FixedArray> params = GetParameters(); | |||
1350 | FrameSummary::JavaScriptFrameSummary summary( | |||
1351 | isolate(), receiver(), function(), *abstract_code, offset, | |||
1352 | IsConstructor(), *params); | |||
1353 | functions->push_back(summary); | |||
1354 | } | |||
1355 | ||||
1356 | JSFunction JavaScriptFrame::function() const { | |||
1357 | return JSFunction::cast(function_slot_object()); | |||
1358 | } | |||
1359 | ||||
1360 | Object JavaScriptFrame::unchecked_function() const { | |||
1361 | // During deoptimization of an optimized function, we may have yet to | |||
1362 | // materialize some closures on the stack. The arguments marker object | |||
1363 | // marks this case. | |||
1364 | DCHECK(function_slot_object().IsJSFunction() ||((void) 0) | |||
1365 | ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object())((void) 0); | |||
1366 | return function_slot_object(); | |||
1367 | } | |||
1368 | ||||
1369 | Object CommonFrameWithJSLinkage::receiver() const { return GetParameter(-1); } | |||
1370 | ||||
1371 | Object JavaScriptFrame::context() const { | |||
1372 | const int offset = StandardFrameConstants::kContextOffset; | |||
1373 | Object maybe_result(Memory<Address>(fp() + offset)); | |||
1374 | DCHECK(!maybe_result.IsSmi())((void) 0); | |||
1375 | return maybe_result; | |||
1376 | } | |||
1377 | ||||
1378 | Script JavaScriptFrame::script() const { | |||
1379 | return Script::cast(function().shared().script()); | |||
1380 | } | |||
1381 | ||||
1382 | int CommonFrameWithJSLinkage::LookupExceptionHandlerInTable( | |||
1383 | int* stack_depth, HandlerTable::CatchPrediction* prediction) { | |||
1384 | DCHECK(!LookupCode().has_handler_table())((void) 0); | |||
1385 | DCHECK(!LookupCode().is_optimized_code() ||((void) 0) | |||
1386 | LookupCode().kind() == CodeKind::BASELINE)((void) 0); | |||
1387 | return -1; | |||
1388 | } | |||
1389 | ||||
1390 | void JavaScriptFrame::PrintFunctionAndOffset(JSFunction function, | |||
1391 | AbstractCode code, int code_offset, | |||
1392 | FILE* file, | |||
1393 | bool print_line_number) { | |||
1394 | PrintF(file, "%s", CodeKindToMarker(code.kind())); | |||
1395 | function.PrintName(file); | |||
1396 | PrintF(file, "+%d", code_offset); | |||
1397 | if (print_line_number) { | |||
1398 | SharedFunctionInfo shared = function.shared(); | |||
1399 | int source_pos = code.SourcePosition(code_offset); | |||
1400 | Object maybe_script = shared.script(); | |||
1401 | if (maybe_script.IsScript()) { | |||
1402 | Script script = Script::cast(maybe_script); | |||
1403 | int line = script.GetLineNumber(source_pos) + 1; | |||
1404 | Object script_name_raw = script.name(); | |||
1405 | if (script_name_raw.IsString()) { | |||
1406 | String script_name = String::cast(script.name()); | |||
1407 | std::unique_ptr<char[]> c_script_name = | |||
1408 | script_name.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); | |||
1409 | PrintF(file, " at %s:%d", c_script_name.get(), line); | |||
1410 | } else { | |||
1411 | PrintF(file, " at <unknown>:%d", line); | |||
1412 | } | |||
1413 | } else { | |||
1414 | PrintF(file, " at <unknown>:<unknown>"); | |||
1415 | } | |||
1416 | } | |||
1417 | } | |||
1418 | ||||
1419 | void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args, | |||
1420 | bool print_line_number) { | |||
1421 | // constructor calls | |||
1422 | DisallowGarbageCollection no_gc; | |||
1423 | JavaScriptFrameIterator it(isolate); | |||
1424 | while (!it.done()) { | |||
1425 | if (it.frame()->is_java_script()) { | |||
1426 | JavaScriptFrame* frame = it.frame(); | |||
1427 | if (frame->IsConstructor()) PrintF(file, "new "); | |||
1428 | JSFunction function = frame->function(); | |||
1429 | int code_offset = 0; | |||
1430 | AbstractCode abstract_code = function.abstract_code(isolate); | |||
1431 | if (frame->is_interpreted()) { | |||
1432 | InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame); | |||
1433 | code_offset = iframe->GetBytecodeOffset(); | |||
1434 | } else if (frame->is_baseline()) { | |||
1435 | // TODO(pthier): AbstractCode should fully support Baseline code. | |||
1436 | BaselineFrame* baseline_frame = BaselineFrame::cast(frame); | |||
1437 | code_offset = baseline_frame->GetBytecodeOffset(); | |||
1438 | abstract_code = AbstractCode::cast(baseline_frame->GetBytecodeArray()); | |||
1439 | } else { | |||
1440 | Code code = frame->unchecked_code(); | |||
1441 | code_offset = code.GetOffsetFromInstructionStart(isolate, frame->pc()); | |||
1442 | } | |||
1443 | PrintFunctionAndOffset(function, abstract_code, code_offset, file, | |||
1444 | print_line_number); | |||
1445 | if (print_args) { | |||
1446 | // function arguments | |||
1447 | // (we are intentionally only printing the actually | |||
1448 | // supplied parameters, not all parameters required) | |||
1449 | PrintF(file, "(this="); | |||
1450 | frame->receiver().ShortPrint(file); | |||
1451 | const int length = frame->ComputeParametersCount(); | |||
1452 | for (int i = 0; i < length; i++) { | |||
1453 | PrintF(file, ", "); | |||
1454 | frame->GetParameter(i).ShortPrint(file); | |||
1455 | } | |||
1456 | PrintF(file, ")"); | |||
1457 | } | |||
1458 | break; | |||
1459 | } | |||
1460 | it.Advance(); | |||
1461 | } | |||
1462 | } | |||
1463 | ||||
1464 | void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction function, | |||
1465 | AbstractCode code, | |||
1466 | int code_offset) { | |||
1467 | auto ic_stats = ICStats::instance(); | |||
1468 | ICInfo& ic_info = ic_stats->Current(); | |||
1469 | SharedFunctionInfo shared = function.shared(); | |||
1470 | ||||
1471 | ic_info.function_name = ic_stats->GetOrCacheFunctionName(function); | |||
1472 | ic_info.script_offset = code_offset; | |||
1473 | ||||
1474 | int source_pos = code.SourcePosition(code_offset); | |||
1475 | Object maybe_script = shared.script(); | |||
1476 | if (maybe_script.IsScript()) { | |||
1477 | Script script = Script::cast(maybe_script); | |||
1478 | ic_info.line_num = script.GetLineNumber(source_pos) + 1; | |||
1479 | ic_info.column_num = script.GetColumnNumber(source_pos); | |||
1480 | ic_info.script_name = ic_stats->GetOrCacheScriptName(script); | |||
1481 | } | |||
1482 | } | |||
1483 | ||||
1484 | Object CommonFrameWithJSLinkage::GetParameter(int index) const { | |||
1485 | return Object(Memory<Address>(GetParameterSlot(index))); | |||
1486 | } | |||
1487 | ||||
1488 | int CommonFrameWithJSLinkage::ComputeParametersCount() const { | |||
1489 | DCHECK(can_access_heap_objects() &&((void) 0) | |||
1490 | isolate()->heap()->gc_state() == Heap::NOT_IN_GC)((void) 0); | |||
1491 | return function().shared().internal_formal_parameter_count_without_receiver(); | |||
1492 | } | |||
1493 | ||||
1494 | int JavaScriptFrame::GetActualArgumentCount() const { | |||
1495 | return static_cast<int>( | |||
1496 | Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset)) - | |||
1497 | kJSArgcReceiverSlots; | |||
1498 | } | |||
1499 | ||||
1500 | Handle<FixedArray> CommonFrameWithJSLinkage::GetParameters() const { | |||
1501 | if (V8_LIKELY(!FLAG_detailed_error_stack_trace)(__builtin_expect(!!(!FLAG_detailed_error_stack_trace), 1))) { | |||
1502 | return isolate()->factory()->empty_fixed_array(); | |||
1503 | } | |||
1504 | int param_count = ComputeParametersCount(); | |||
1505 | Handle<FixedArray> parameters = | |||
1506 | isolate()->factory()->NewFixedArray(param_count); | |||
1507 | for (int i = 0; i < param_count; i++) { | |||
1508 | parameters->set(i, GetParameter(i)); | |||
1509 | } | |||
1510 | ||||
1511 | return parameters; | |||
1512 | } | |||
1513 | ||||
1514 | JSFunction JavaScriptBuiltinContinuationFrame::function() const { | |||
1515 | const int offset = BuiltinContinuationFrameConstants::kFunctionOffset; | |||
1516 | return JSFunction::cast(Object(base::Memory<Address>(fp() + offset))); | |||
1517 | } | |||
1518 | ||||
1519 | int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const { | |||
1520 | // Assert that the first allocatable register is also the argument count | |||
1521 | // register. | |||
1522 | DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0),((void) 0) | |||
1523 | kJavaScriptCallArgCountRegister.code())((void) 0); | |||
1524 | Object argc_object( | |||
1525 | Memory<Address>(fp() + BuiltinContinuationFrameConstants::kArgCOffset)); | |||
1526 | return Smi::ToInt(argc_object) - kJSArgcReceiverSlots; | |||
1527 | } | |||
1528 | ||||
1529 | intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const { | |||
1530 | Address height_slot = | |||
1531 | fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize; | |||
1532 | intptr_t height = Smi::ToInt(Smi(Memory<Address>(height_slot))); | |||
1533 | return height; | |||
1534 | } | |||
1535 | ||||
1536 | Object JavaScriptBuiltinContinuationFrame::context() const { | |||
1537 | return Object(Memory<Address>( | |||
1538 | fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset)); | |||
1539 | } | |||
1540 | ||||
1541 | void JavaScriptBuiltinContinuationWithCatchFrame::SetException( | |||
1542 | Object exception) { | |||
1543 | int argc = ComputeParametersCount(); | |||
1544 | Address exception_argument_slot = | |||
1545 | fp() + BuiltinContinuationFrameConstants::kFixedFrameSizeAboveFp + | |||
1546 | (argc - 1) * kSystemPointerSize; | |||
1547 | ||||
1548 | // Only allow setting exception if previous value was the hole. | |||
1549 | CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(),do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base ::pass_value_or_ref<decltype(ReadOnlyRoots(isolate()).the_hole_value ())>::type, typename ::v8::base::pass_value_or_ref<decltype (Object(Memory<Address>(exception_argument_slot)))>:: type>((ReadOnlyRoots(isolate()).the_hole_value()), (Object (Memory<Address>(exception_argument_slot)))); do { if ( (__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s." , "ReadOnlyRoots(isolate()).the_hole_value()" " " "==" " " "Object(Memory<Address>(exception_argument_slot))" ); } } while (false); } while (false) | |||
1550 | Object(Memory<Address>(exception_argument_slot)))do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base ::pass_value_or_ref<decltype(ReadOnlyRoots(isolate()).the_hole_value ())>::type, typename ::v8::base::pass_value_or_ref<decltype (Object(Memory<Address>(exception_argument_slot)))>:: type>((ReadOnlyRoots(isolate()).the_hole_value()), (Object (Memory<Address>(exception_argument_slot)))); do { if ( (__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s." , "ReadOnlyRoots(isolate()).the_hole_value()" " " "==" " " "Object(Memory<Address>(exception_argument_slot))" ); } } while (false); } while (false); | |||
1551 | Memory<Address>(exception_argument_slot) = exception.ptr(); | |||
1552 | } | |||
1553 | ||||
1554 | FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary( | |||
1555 | Isolate* isolate, Object receiver, JSFunction function, | |||
1556 | AbstractCode abstract_code, int code_offset, bool is_constructor, | |||
1557 | FixedArray parameters) | |||
1558 | : FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT), | |||
1559 | receiver_(receiver, isolate), | |||
1560 | function_(function, isolate), | |||
1561 | abstract_code_(abstract_code, isolate), | |||
1562 | code_offset_(code_offset), | |||
1563 | is_constructor_(is_constructor), | |||
1564 | parameters_(parameters, isolate) { | |||
1565 | DCHECK(abstract_code.IsBytecodeArray() ||((void) 0) | |||
1566 | !CodeKindIsOptimizedJSFunction(Code::cast(abstract_code).kind()))((void) 0); | |||
1567 | } | |||
1568 | ||||
1569 | void FrameSummary::EnsureSourcePositionsAvailable() { | |||
1570 | if (IsJavaScript()) { | |||
1571 | java_script_summary_.EnsureSourcePositionsAvailable(); | |||
1572 | } | |||
1573 | } | |||
1574 | ||||
1575 | bool FrameSummary::AreSourcePositionsAvailable() const { | |||
1576 | if (IsJavaScript()) { | |||
1577 | return java_script_summary_.AreSourcePositionsAvailable(); | |||
1578 | } | |||
1579 | return true; | |||
1580 | } | |||
1581 | ||||
1582 | void FrameSummary::JavaScriptFrameSummary::EnsureSourcePositionsAvailable() { | |||
1583 | Handle<SharedFunctionInfo> shared(function()->shared(), isolate()); | |||
1584 | SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared); | |||
1585 | } | |||
1586 | ||||
1587 | bool FrameSummary::JavaScriptFrameSummary::AreSourcePositionsAvailable() const { | |||
1588 | return !FLAG_enable_lazy_source_positions || function() | |||
1589 | ->shared() | |||
1590 | .GetBytecodeArray(isolate()) | |||
1591 | .HasSourcePositionTable(); | |||
1592 | } | |||
1593 | ||||
1594 | bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const { | |||
1595 | return function()->shared().IsSubjectToDebugging(); | |||
1596 | } | |||
1597 | ||||
1598 | int FrameSummary::JavaScriptFrameSummary::SourcePosition() const { | |||
1599 | return abstract_code()->SourcePosition(code_offset()); | |||
1600 | } | |||
1601 | ||||
1602 | int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const { | |||
1603 | return abstract_code()->SourceStatementPosition(code_offset()); | |||
1604 | } | |||
1605 | ||||
1606 | Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const { | |||
1607 | return handle(function_->shared().script(), isolate()); | |||
1608 | } | |||
1609 | ||||
1610 | Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const { | |||
1611 | return handle(function_->native_context(), isolate()); | |||
1612 | } | |||
1613 | ||||
1614 | Handle<StackFrameInfo> | |||
1615 | FrameSummary::JavaScriptFrameSummary::CreateStackFrameInfo() const { | |||
1616 | Handle<SharedFunctionInfo> shared(function_->shared(), isolate()); | |||
1617 | Handle<Script> script(Script::cast(shared->script()), isolate()); | |||
1618 | Handle<String> function_name = JSFunction::GetDebugName(function_); | |||
1619 | if (function_name->length() == 0 && | |||
1620 | script->compilation_type() == Script::COMPILATION_TYPE_EVAL) { | |||
1621 | function_name = isolate()->factory()->eval_string(); | |||
1622 | } | |||
1623 | int bytecode_offset = code_offset(); | |||
1624 | if (bytecode_offset == kFunctionEntryBytecodeOffset) { | |||
1625 | // For the special function entry bytecode offset (-1), which signals | |||
1626 | // that the stack trace was captured while the function entry was | |||
1627 | // executing (i.e. during the interrupt check), we cannot store this | |||
1628 | // sentinel in the bit field, so we just eagerly lookup the source | |||
1629 | // position within the script. | |||
1630 | SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared); | |||
1631 | int source_position = abstract_code()->SourcePosition(bytecode_offset); | |||
1632 | return isolate()->factory()->NewStackFrameInfo( | |||
1633 | script, source_position, function_name, is_constructor()); | |||
1634 | } | |||
1635 | return isolate()->factory()->NewStackFrameInfo( | |||
1636 | shared, bytecode_offset, function_name, is_constructor()); | |||
1637 | } | |||
1638 | ||||
1639 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1640 | FrameSummary::WasmFrameSummary::WasmFrameSummary( | |||
1641 | Isolate* isolate, Handle<WasmInstanceObject> instance, wasm::WasmCode* code, | |||
1642 | int code_offset, bool at_to_number_conversion) | |||
1643 | : FrameSummaryBase(isolate, WASM), | |||
1644 | wasm_instance_(instance), | |||
1645 | at_to_number_conversion_(at_to_number_conversion), | |||
1646 | code_(code), | |||
1647 | code_offset_(code_offset) {} | |||
1648 | ||||
1649 | Handle<Object> FrameSummary::WasmFrameSummary::receiver() const { | |||
1650 | return wasm_instance_->GetIsolate()->global_proxy(); | |||
1651 | } | |||
1652 | ||||
1653 | uint32_t FrameSummary::WasmFrameSummary::function_index() const { | |||
1654 | return code()->index(); | |||
1655 | } | |||
1656 | ||||
1657 | int FrameSummary::WasmFrameSummary::byte_offset() const { | |||
1658 | return code_->GetSourcePositionBefore(code_offset()); | |||
1659 | } | |||
1660 | ||||
1661 | int FrameSummary::WasmFrameSummary::SourcePosition() const { | |||
1662 | const wasm::WasmModule* module = wasm_instance()->module_object().module(); | |||
1663 | return GetSourcePosition(module, function_index(), byte_offset(), | |||
1664 | at_to_number_conversion()); | |||
1665 | } | |||
1666 | ||||
1667 | Handle<Script> FrameSummary::WasmFrameSummary::script() const { | |||
1668 | return handle(wasm_instance()->module_object().script(), | |||
1669 | wasm_instance()->GetIsolate()); | |||
1670 | } | |||
1671 | ||||
1672 | Handle<Context> FrameSummary::WasmFrameSummary::native_context() const { | |||
1673 | return handle(wasm_instance()->native_context(), isolate()); | |||
1674 | } | |||
1675 | ||||
1676 | Handle<StackFrameInfo> FrameSummary::WasmFrameSummary::CreateStackFrameInfo() | |||
1677 | const { | |||
1678 | Handle<String> function_name = | |||
1679 | GetWasmFunctionDebugName(isolate(), wasm_instance(), function_index()); | |||
1680 | return isolate()->factory()->NewStackFrameInfo(script(), SourcePosition(), | |||
1681 | function_name, false); | |||
1682 | } | |||
1683 | #endif // V8_ENABLE_WEBASSEMBLY | |||
1684 | ||||
1685 | FrameSummary::~FrameSummary() { | |||
1686 | #define FRAME_SUMMARY_DESTR(kind, type, field, desc) \ | |||
1687 | case kind: \ | |||
1688 | field.~type(); \ | |||
1689 | break; | |||
1690 | switch (base_.kind()) { | |||
1691 | FRAME_SUMMARY_VARIANTS(FRAME_SUMMARY_DESTR)FRAME_SUMMARY_DESTR(JAVA_SCRIPT, JavaScriptFrameSummary, java_script_summary_ , JavaScript) FRAME_SUMMARY_DESTR(WASM, WasmFrameSummary, wasm_summary_ , Wasm) | |||
1692 | default: | |||
1693 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
1694 | } | |||
1695 | #undef FRAME_SUMMARY_DESTR | |||
1696 | } | |||
1697 | ||||
1698 | FrameSummary FrameSummary::GetTop(const CommonFrame* frame) { | |||
1699 | std::vector<FrameSummary> frames; | |||
1700 | frame->Summarize(&frames); | |||
1701 | DCHECK_LT(0, frames.size())((void) 0); | |||
1702 | return frames.back(); | |||
1703 | } | |||
1704 | ||||
1705 | FrameSummary FrameSummary::GetBottom(const CommonFrame* frame) { | |||
1706 | return Get(frame, 0); | |||
1707 | } | |||
1708 | ||||
1709 | FrameSummary FrameSummary::GetSingle(const CommonFrame* frame) { | |||
1710 | std::vector<FrameSummary> frames; | |||
1711 | frame->Summarize(&frames); | |||
1712 | DCHECK_EQ(1, frames.size())((void) 0); | |||
1713 | return frames.front(); | |||
1714 | } | |||
1715 | ||||
1716 | FrameSummary FrameSummary::Get(const CommonFrame* frame, int index) { | |||
1717 | DCHECK_LE(0, index)((void) 0); | |||
1718 | std::vector<FrameSummary> frames; | |||
1719 | frame->Summarize(&frames); | |||
1720 | DCHECK_GT(frames.size(), index)((void) 0); | |||
1721 | return frames[index]; | |||
1722 | } | |||
1723 | ||||
1724 | #if V8_ENABLE_WEBASSEMBLY1 | |||
1725 | #define FRAME_SUMMARY_DISPATCH(ret, name) \ | |||
1726 | ret FrameSummary::name() const { \ | |||
1727 | switch (base_.kind()) { \ | |||
1728 | case JAVA_SCRIPT: \ | |||
1729 | return java_script_summary_.name(); \ | |||
1730 | case WASM: \ | |||
1731 | return wasm_summary_.name(); \ | |||
1732 | default: \ | |||
1733 | UNREACHABLE()V8_Fatal("unreachable code"); \ | |||
1734 | } \ | |||
1735 | } | |||
1736 | #else | |||
1737 | #define FRAME_SUMMARY_DISPATCH(ret, name) \ | |||
1738 | ret FrameSummary::name() const { \ | |||
1739 | DCHECK_EQ(JAVA_SCRIPT, base_.kind())((void) 0); \ | |||
1740 | return java_script_summary_.name(); \ | |||
1741 | } | |||
1742 | #endif // V8_ENABLE_WEBASSEMBLY | |||
1743 | ||||
1744 | FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver) | |||
1745 | FRAME_SUMMARY_DISPATCH(int, code_offset) | |||
1746 | FRAME_SUMMARY_DISPATCH(bool, is_constructor) | |||
1747 | FRAME_SUMMARY_DISPATCH(bool, is_subject_to_debugging) | |||
1748 | FRAME_SUMMARY_DISPATCH(Handle<Object>, script) | |||
1749 | FRAME_SUMMARY_DISPATCH(int, SourcePosition) | |||
1750 | FRAME_SUMMARY_DISPATCH(int, SourceStatementPosition) | |||
1751 | FRAME_SUMMARY_DISPATCH(Handle<Context>, native_context) | |||
1752 | FRAME_SUMMARY_DISPATCH(Handle<StackFrameInfo>, CreateStackFrameInfo) | |||
1753 | ||||
1754 | #undef FRAME_SUMMARY_DISPATCH | |||
1755 | ||||
1756 | void OptimizedFrame::Summarize(std::vector<FrameSummary>* frames) const { | |||
1757 | DCHECK(frames->empty())((void) 0); | |||
1758 | DCHECK(is_optimized())((void) 0); | |||
1759 | ||||
1760 | // Delegate to JS frame in absence of turbofan deoptimization. | |||
1761 | // TODO(turbofan): Revisit once we support deoptimization across the board. | |||
1762 | Code code = LookupCode(); | |||
1763 | if (code.kind() == CodeKind::BUILTIN) { | |||
1764 | return JavaScriptFrame::Summarize(frames); | |||
1765 | } | |||
1766 | ||||
1767 | int deopt_index = SafepointEntry::kNoDeoptIndex; | |||
1768 | DeoptimizationData const data = GetDeoptimizationData(&deopt_index); | |||
1769 | if (deopt_index == SafepointEntry::kNoDeoptIndex) { | |||
1770 | CHECK(data.is_null())do { if ((__builtin_expect(!!(!(data.is_null())), 0))) { V8_Fatal ("Check failed: %s.", "data.is_null()"); } } while (false); | |||
1771 | FATAL("Missing deoptimization information for OptimizedFrame::Summarize.")V8_Fatal("Missing deoptimization information for OptimizedFrame::Summarize." ); | |||
1772 | } | |||
1773 | ||||
1774 | // Prepare iteration over translation. Note that the below iteration might | |||
1775 | // materialize objects without storing them back to the Isolate, this will | |||
1776 | // lead to objects being re-materialized again for each summary. | |||
1777 | TranslatedState translated(this); | |||
1778 | translated.Prepare(fp()); | |||
1779 | ||||
1780 | // We create the summary in reverse order because the frames | |||
1781 | // in the deoptimization translation are ordered bottom-to-top. | |||
1782 | bool is_constructor = IsConstructor(); | |||
1783 | for (auto it = translated.begin(); it != translated.end(); it++) { | |||
1784 | if (it->kind() == TranslatedFrame::kUnoptimizedFunction || | |||
1785 | it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation || | |||
1786 | it->kind() == | |||
1787 | TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) { | |||
1788 | Handle<SharedFunctionInfo> shared_info = it->shared_info(); | |||
1789 | ||||
1790 | // The translation commands are ordered and the function is always | |||
1791 | // at the first position, and the receiver is next. | |||
1792 | TranslatedFrame::iterator translated_values = it->begin(); | |||
1793 | ||||
1794 | // Get or materialize the correct function in the optimized frame. | |||
1795 | Handle<JSFunction> function = | |||
1796 | Handle<JSFunction>::cast(translated_values->GetValue()); | |||
1797 | translated_values++; | |||
1798 | ||||
1799 | // Get or materialize the correct receiver in the optimized frame. | |||
1800 | Handle<Object> receiver = translated_values->GetValue(); | |||
1801 | translated_values++; | |||
1802 | ||||
1803 | // Determine the underlying code object and the position within it from | |||
1804 | // the translation corresponding to the frame type in question. | |||
1805 | Handle<AbstractCode> abstract_code; | |||
1806 | unsigned code_offset; | |||
1807 | if (it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation || | |||
1808 | it->kind() == | |||
1809 | TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) { | |||
1810 | code_offset = 0; | |||
1811 | abstract_code = ToAbstractCode( | |||
1812 | isolate()->builtins()->code_handle( | |||
1813 | Builtins::GetBuiltinFromBytecodeOffset(it->bytecode_offset())), | |||
1814 | isolate()); | |||
1815 | } else { | |||
1816 | DCHECK_EQ(it->kind(), TranslatedFrame::kUnoptimizedFunction)((void) 0); | |||
1817 | code_offset = it->bytecode_offset().ToInt(); | |||
1818 | abstract_code = | |||
1819 | handle(shared_info->abstract_code(isolate()), isolate()); | |||
1820 | } | |||
1821 | ||||
1822 | // Append full summary of the encountered JS frame. | |||
1823 | Handle<FixedArray> params = GetParameters(); | |||
1824 | FrameSummary::JavaScriptFrameSummary summary( | |||
1825 | isolate(), *receiver, *function, *abstract_code, code_offset, | |||
1826 | is_constructor, *params); | |||
1827 | frames->push_back(summary); | |||
1828 | is_constructor = false; | |||
1829 | } else if (it->kind() == TranslatedFrame::kConstructStub) { | |||
1830 | // The next encountered JS frame will be marked as a constructor call. | |||
1831 | DCHECK(!is_constructor)((void) 0); | |||
1832 | is_constructor = true; | |||
1833 | } | |||
1834 | } | |||
1835 | } | |||
1836 | ||||
1837 | int OptimizedFrame::LookupExceptionHandlerInTable( | |||
1838 | int* data, HandlerTable::CatchPrediction* prediction) { | |||
1839 | // We cannot perform exception prediction on optimized code. Instead, we need | |||
1840 | // to use FrameSummary to find the corresponding code offset in unoptimized | |||
1841 | // code to perform prediction there. | |||
1842 | DCHECK_NULL(prediction)((void) 0); | |||
1843 | Code code = LookupCode(); | |||
1844 | HandlerTable table(code); | |||
1845 | int pc_offset = code.GetOffsetFromInstructionStart(isolate(), pc()); | |||
1846 | DCHECK_NULL(data)((void) 0); // Data is not used and will not return a value. | |||
1847 | ||||
1848 | // When the return pc has been replaced by a trampoline there won't be | |||
1849 | // a handler for this trampoline. Thus we need to use the return pc that | |||
1850 | // _used to be_ on the stack to get the right ExceptionHandler. | |||
1851 | if (CodeKindCanDeoptimize(code.kind()) && code.marked_for_deoptimization()) { | |||
1852 | SafepointTable safepoints(isolate(), pc(), code); | |||
1853 | pc_offset = safepoints.find_return_pc(pc_offset); | |||
1854 | } | |||
1855 | return table.LookupReturn(pc_offset); | |||
1856 | } | |||
1857 | ||||
1858 | DeoptimizationData OptimizedFrame::GetDeoptimizationData( | |||
1859 | int* deopt_index) const { | |||
1860 | DCHECK(is_optimized())((void) 0); | |||
1861 | ||||
1862 | JSFunction opt_function = function(); | |||
1863 | Code code = FromCodeT(opt_function.code()); | |||
1864 | ||||
1865 | // The code object may have been replaced by lazy deoptimization. Fall | |||
1866 | // back to a slow search in this case to find the original optimized | |||
1867 | // code object. | |||
1868 | if (!code.contains(isolate(), pc())) { | |||
1869 | code = isolate()->heap()->GcSafeFindCodeForInnerPointer(pc()); | |||
1870 | } | |||
1871 | DCHECK(!code.is_null())((void) 0); | |||
1872 | DCHECK(CodeKindCanDeoptimize(code.kind()))((void) 0); | |||
1873 | ||||
1874 | SafepointEntry safepoint_entry = code.GetSafepointEntry(isolate(), pc()); | |||
1875 | if (safepoint_entry.has_deoptimization_index()) { | |||
1876 | *deopt_index = safepoint_entry.deoptimization_index(); | |||
1877 | return DeoptimizationData::cast(code.deoptimization_data()); | |||
1878 | } | |||
1879 | *deopt_index = SafepointEntry::kNoDeoptIndex; | |||
1880 | return DeoptimizationData(); | |||
1881 | } | |||
1882 | ||||
1883 | void OptimizedFrame::GetFunctions( | |||
1884 | std::vector<SharedFunctionInfo>* functions) const { | |||
1885 | DCHECK(functions->empty())((void) 0); | |||
1886 | DCHECK(is_optimized())((void) 0); | |||
1887 | ||||
1888 | // Delegate to JS frame in absence of turbofan deoptimization. | |||
1889 | // TODO(turbofan): Revisit once we support deoptimization across the board. | |||
1890 | Code code = LookupCode(); | |||
1891 | if (code.kind() == CodeKind::BUILTIN) { | |||
1892 | return JavaScriptFrame::GetFunctions(functions); | |||
1893 | } | |||
1894 | ||||
1895 | DisallowGarbageCollection no_gc; | |||
1896 | int deopt_index = SafepointEntry::kNoDeoptIndex; | |||
1897 | DeoptimizationData const data = GetDeoptimizationData(&deopt_index); | |||
1898 | DCHECK(!data.is_null())((void) 0); | |||
1899 | DCHECK_NE(SafepointEntry::kNoDeoptIndex, deopt_index)((void) 0); | |||
1900 | DeoptimizationLiteralArray const literal_array = data.LiteralArray(); | |||
1901 | ||||
1902 | TranslationArrayIterator it(data.TranslationByteArray(), | |||
1903 | data.TranslationIndex(deopt_index).value()); | |||
1904 | TranslationOpcode opcode = TranslationOpcodeFromInt(it.Next()); | |||
1905 | DCHECK_EQ(TranslationOpcode::BEGIN, opcode)((void) 0); | |||
1906 | it.Next(); // Skip frame count. | |||
1907 | int jsframe_count = it.Next(); | |||
1908 | it.Next(); // Skip update feedback count. | |||
1909 | ||||
1910 | // We insert the frames in reverse order because the frames | |||
1911 | // in the deoptimization translation are ordered bottom-to-top. | |||
1912 | while (jsframe_count != 0) { | |||
1913 | opcode = TranslationOpcodeFromInt(it.Next()); | |||
1914 | if (opcode == TranslationOpcode::INTERPRETED_FRAME || | |||
1915 | opcode == TranslationOpcode::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME || | |||
1916 | opcode == TranslationOpcode:: | |||
1917 | JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME) { | |||
1918 | it.Next(); // Skip bailout id. | |||
1919 | jsframe_count--; | |||
1920 | ||||
1921 | // The second operand of the frame points to the function. | |||
1922 | Object shared = literal_array.get(it.Next()); | |||
1923 | functions->push_back(SharedFunctionInfo::cast(shared)); | |||
1924 | ||||
1925 | // Skip over remaining operands to advance to the next opcode. | |||
1926 | it.Skip(TranslationOpcodeOperandCount(opcode) - 2); | |||
1927 | } else { | |||
1928 | // Skip over operands to advance to the next opcode. | |||
1929 | it.Skip(TranslationOpcodeOperandCount(opcode)); | |||
1930 | } | |||
1931 | } | |||
1932 | } | |||
1933 | ||||
1934 | int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) { | |||
1935 | return StandardFrameConstants::kCallerSPOffset - | |||
1936 | ((slot_index + 1) * kSystemPointerSize); | |||
1937 | } | |||
1938 | ||||
1939 | Object OptimizedFrame::StackSlotAt(int index) const { | |||
1940 | return Object(Memory<Address>(fp() + StackSlotOffsetRelativeToFp(index))); | |||
1941 | } | |||
1942 | ||||
1943 | int UnoptimizedFrame::position() const { | |||
1944 | AbstractCode code = AbstractCode::cast(GetBytecodeArray()); | |||
1945 | int code_offset = GetBytecodeOffset(); | |||
1946 | return code.SourcePosition(code_offset); | |||
1947 | } | |||
1948 | ||||
1949 | int UnoptimizedFrame::LookupExceptionHandlerInTable( | |||
1950 | int* context_register, HandlerTable::CatchPrediction* prediction) { | |||
1951 | HandlerTable table(GetBytecodeArray()); | |||
1952 | return table.LookupRange(GetBytecodeOffset(), context_register, prediction); | |||
1953 | } | |||
1954 | ||||
1955 | BytecodeArray UnoptimizedFrame::GetBytecodeArray() const { | |||
1956 | const int index = UnoptimizedFrameConstants::kBytecodeArrayExpressionIndex; | |||
1957 | DCHECK_EQ(UnoptimizedFrameConstants::kBytecodeArrayFromFp,((void) 0) | |||
1958 | UnoptimizedFrameConstants::kExpressionsOffset -((void) 0) | |||
1959 | index * kSystemPointerSize)((void) 0); | |||
1960 | return BytecodeArray::cast(GetExpression(index)); | |||
1961 | } | |||
1962 | ||||
1963 | Object UnoptimizedFrame::ReadInterpreterRegister(int register_index) const { | |||
1964 | const int index = UnoptimizedFrameConstants::kRegisterFileExpressionIndex; | |||
1965 | DCHECK_EQ(UnoptimizedFrameConstants::kRegisterFileFromFp,((void) 0) | |||
1966 | UnoptimizedFrameConstants::kExpressionsOffset -((void) 0) | |||
1967 | index * kSystemPointerSize)((void) 0); | |||
1968 | return GetExpression(index + register_index); | |||
1969 | } | |||
1970 | ||||
1971 | void UnoptimizedFrame::Summarize(std::vector<FrameSummary>* functions) const { | |||
1972 | DCHECK(functions->empty())((void) 0); | |||
1973 | Handle<AbstractCode> abstract_code(AbstractCode::cast(GetBytecodeArray()), | |||
1974 | isolate()); | |||
1975 | Handle<FixedArray> params = GetParameters(); | |||
1976 | FrameSummary::JavaScriptFrameSummary summary( | |||
1977 | isolate(), receiver(), function(), *abstract_code, GetBytecodeOffset(), | |||
1978 | IsConstructor(), *params); | |||
1979 | functions->push_back(summary); | |||
1980 | } | |||
1981 | ||||
1982 | int InterpretedFrame::GetBytecodeOffset() const { | |||
1983 | const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex; | |||
1984 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,((void) 0) | |||
1985 | InterpreterFrameConstants::kExpressionsOffset -((void) 0) | |||
1986 | index * kSystemPointerSize)((void) 0); | |||
1987 | int raw_offset = Smi::ToInt(GetExpression(index)); | |||
1988 | return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag; | |||
1989 | } | |||
1990 | ||||
1991 | // static | |||
1992 | int InterpretedFrame::GetBytecodeOffset(Address fp) { | |||
1993 | const int offset = InterpreterFrameConstants::kExpressionsOffset; | |||
1994 | const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex; | |||
1995 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,((void) 0) | |||
1996 | InterpreterFrameConstants::kExpressionsOffset -((void) 0) | |||
1997 | index * kSystemPointerSize)((void) 0); | |||
1998 | Address expression_offset = fp + offset - index * kSystemPointerSize; | |||
1999 | int raw_offset = Smi::ToInt(Object(Memory<Address>(expression_offset))); | |||
2000 | return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag; | |||
2001 | } | |||
2002 | ||||
2003 | void InterpretedFrame::PatchBytecodeOffset(int new_offset) { | |||
2004 | const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex; | |||
2005 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,((void) 0) | |||
2006 | InterpreterFrameConstants::kExpressionsOffset -((void) 0) | |||
2007 | index * kSystemPointerSize)((void) 0); | |||
2008 | int raw_offset = BytecodeArray::kHeaderSize - kHeapObjectTag + new_offset; | |||
2009 | SetExpression(index, Smi::FromInt(raw_offset)); | |||
2010 | } | |||
2011 | ||||
2012 | void InterpretedFrame::PatchBytecodeArray(BytecodeArray bytecode_array) { | |||
2013 | const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex; | |||
2014 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeArrayFromFp,((void) 0) | |||
2015 | InterpreterFrameConstants::kExpressionsOffset -((void) 0) | |||
2016 | index * kSystemPointerSize)((void) 0); | |||
2017 | SetExpression(index, bytecode_array); | |||
2018 | } | |||
2019 | ||||
2020 | int BaselineFrame::GetBytecodeOffset() const { | |||
2021 | return LookupCode().GetBytecodeOffsetForBaselinePC(this->pc(), | |||
2022 | GetBytecodeArray()); | |||
2023 | } | |||
2024 | ||||
2025 | intptr_t BaselineFrame::GetPCForBytecodeOffset(int bytecode_offset) const { | |||
2026 | return LookupCode().GetBaselineStartPCForBytecodeOffset(bytecode_offset, | |||
2027 | GetBytecodeArray()); | |||
2028 | } | |||
2029 | ||||
2030 | void BaselineFrame::PatchContext(Context value) { | |||
2031 | base::Memory<Address>(fp() + BaselineFrameConstants::kContextOffset) = | |||
2032 | value.ptr(); | |||
2033 | } | |||
2034 | ||||
2035 | JSFunction BuiltinFrame::function() const { | |||
2036 | const int offset = BuiltinFrameConstants::kFunctionOffset; | |||
2037 | return JSFunction::cast(Object(base::Memory<Address>(fp() + offset))); | |||
2038 | } | |||
2039 | ||||
2040 | int BuiltinFrame::ComputeParametersCount() const { | |||
2041 | const int offset = BuiltinFrameConstants::kLengthOffset; | |||
2042 | return Smi::ToInt(Object(base::Memory<Address>(fp() + offset))) - | |||
2043 | kJSArgcReceiverSlots; | |||
2044 | } | |||
2045 | ||||
2046 | #if V8_ENABLE_WEBASSEMBLY1 | |||
2047 | void WasmFrame::Print(StringStream* accumulator, PrintMode mode, | |||
2048 | int index) const { | |||
2049 | PrintIndex(accumulator, mode, index); | |||
2050 | if (function_index() == wasm::kAnonymousFuncIndex) { | |||
2051 | accumulator->Add("Anonymous wasm wrapper [pc: %p]\n", | |||
2052 | reinterpret_cast<void*>(pc())); | |||
2053 | return; | |||
2054 | } | |||
2055 | wasm::WasmCodeRefScope code_ref_scope; | |||
2056 | accumulator->Add("Wasm ["); | |||
2057 | accumulator->PrintName(script().name()); | |||
2058 | Address instruction_start = wasm_code()->instruction_start(); | |||
2059 | base::Vector<const uint8_t> raw_func_name = | |||
2060 | module_object().GetRawFunctionName(function_index()); | |||
2061 | const int kMaxPrintedFunctionName = 64; | |||
2062 | char func_name[kMaxPrintedFunctionName + 1]; | |||
2063 | int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length()); | |||
2064 | memcpy(func_name, raw_func_name.begin(), func_name_len); | |||
2065 | func_name[func_name_len] = '\0'; | |||
2066 | int pos = position(); | |||
2067 | const wasm::WasmModule* module = wasm_instance().module_object().module(); | |||
2068 | int func_index = function_index(); | |||
2069 | int func_code_offset = module->functions[func_index].code.offset(); | |||
2070 | accumulator->Add("], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d)\n", | |||
2071 | func_index, func_name, reinterpret_cast<void*>(pc()), | |||
2072 | static_cast<int>(pc() - instruction_start), pos, | |||
2073 | pos - func_code_offset); | |||
2074 | if (mode != OVERVIEW) accumulator->Add("\n"); | |||
2075 | } | |||
2076 | ||||
2077 | wasm::WasmCode* WasmFrame::wasm_code() const { | |||
2078 | return wasm::GetWasmCodeManager()->LookupCode(pc()); | |||
2079 | } | |||
2080 | ||||
2081 | WasmInstanceObject WasmFrame::wasm_instance() const { | |||
2082 | const int offset = WasmFrameConstants::kWasmInstanceOffset; | |||
2083 | Object instance(Memory<Address>(fp() + offset)); | |||
2084 | return WasmInstanceObject::cast(instance); | |||
2085 | } | |||
2086 | ||||
2087 | wasm::NativeModule* WasmFrame::native_module() const { | |||
2088 | return module_object().native_module(); | |||
2089 | } | |||
2090 | ||||
2091 | WasmModuleObject WasmFrame::module_object() const { | |||
2092 | return wasm_instance().module_object(); | |||
2093 | } | |||
2094 | ||||
2095 | int WasmFrame::function_index() const { | |||
2096 | wasm::WasmCodeRefScope code_ref_scope; | |||
2097 | return wasm_code()->index(); | |||
2098 | } | |||
2099 | ||||
2100 | Script WasmFrame::script() const { return module_object().script(); } | |||
2101 | ||||
2102 | int WasmFrame::position() const { | |||
2103 | wasm::WasmCodeRefScope code_ref_scope; | |||
2104 | const wasm::WasmModule* module = wasm_instance().module_object().module(); | |||
2105 | return GetSourcePosition(module, function_index(), byte_offset(), | |||
2106 | at_to_number_conversion()); | |||
2107 | } | |||
2108 | ||||
2109 | int WasmFrame::byte_offset() const { | |||
2110 | wasm::WasmCode* code = wasm_code(); | |||
2111 | int offset = static_cast<int>(pc() - code->instruction_start()); | |||
2112 | return code->GetSourcePositionBefore(offset); | |||
2113 | } | |||
2114 | ||||
2115 | bool WasmFrame::is_inspectable() const { | |||
2116 | wasm::WasmCodeRefScope code_ref_scope; | |||
2117 | return wasm_code()->is_inspectable(); | |||
2118 | } | |||
2119 | ||||
2120 | Object WasmFrame::context() const { return wasm_instance().native_context(); } | |||
2121 | ||||
2122 | void WasmFrame::Summarize(std::vector<FrameSummary>* functions) const { | |||
2123 | DCHECK(functions->empty())((void) 0); | |||
2124 | // The {WasmCode*} escapes this scope via the {FrameSummary}, which is fine, | |||
2125 | // since this code object is part of our stack. | |||
2126 | wasm::WasmCodeRefScope code_ref_scope; | |||
2127 | wasm::WasmCode* code = wasm_code(); | |||
2128 | int offset = static_cast<int>(pc() - code->instruction_start()); | |||
2129 | Handle<WasmInstanceObject> instance(wasm_instance(), isolate()); | |||
2130 | FrameSummary::WasmFrameSummary summary(isolate(), instance, code, offset, | |||
2131 | at_to_number_conversion()); | |||
2132 | functions->push_back(summary); | |||
2133 | } | |||
2134 | ||||
2135 | bool WasmFrame::at_to_number_conversion() const { | |||
2136 | // Check whether our callee is a WASM_TO_JS frame, and this frame is at the | |||
2137 | // ToNumber conversion call. | |||
2138 | wasm::WasmCode* code = | |||
2139 | callee_pc() != kNullAddress | |||
2140 | ? wasm::GetWasmCodeManager()->LookupCode(callee_pc()) | |||
2141 | : nullptr; | |||
2142 | if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false; | |||
2143 | int offset = static_cast<int>(callee_pc() - code->instruction_start()); | |||
2144 | int pos = code->GetSourcePositionBefore(offset); | |||
2145 | // The imported call has position 0, ToNumber has position 1. | |||
2146 | // If there is no source position available, this is also not a ToNumber call. | |||
2147 | DCHECK(pos == wasm::kNoCodePosition || pos == 0 || pos == 1)((void) 0); | |||
2148 | return pos == 1; | |||
2149 | } | |||
2150 | ||||
2151 | int WasmFrame::LookupExceptionHandlerInTable() { | |||
2152 | wasm::WasmCode* code = wasm::GetWasmCodeManager()->LookupCode(pc()); | |||
2153 | if (!code->IsAnonymous() && code->handler_table_size() > 0) { | |||
2154 | HandlerTable table(code); | |||
2155 | int pc_offset = static_cast<int>(pc() - code->instruction_start()); | |||
2156 | return table.LookupReturn(pc_offset); | |||
2157 | } | |||
2158 | return -1; | |||
2159 | } | |||
2160 | ||||
2161 | void WasmDebugBreakFrame::Iterate(RootVisitor* v) const { | |||
2162 | DCHECK(caller_pc())((void) 0); | |||
2163 | wasm::WasmCode* code = wasm::GetWasmCodeManager()->LookupCode(caller_pc()); | |||
2164 | DCHECK(code)((void) 0); | |||
2165 | SafepointTable table(code); | |||
2166 | SafepointEntry safepoint_entry = table.FindEntry(caller_pc()); | |||
2167 | uint32_t tagged_register_indexes = safepoint_entry.tagged_register_indexes(); | |||
2168 | ||||
2169 | while (tagged_register_indexes != 0) { | |||
2170 | int reg_code = base::bits::CountTrailingZeros(tagged_register_indexes); | |||
2171 | tagged_register_indexes &= ~(1 << reg_code); | |||
2172 | FullObjectSlot spill_slot(&Memory<Address>( | |||
2173 | fp() + | |||
2174 | WasmDebugBreakFrameConstants::GetPushedGpRegisterOffset(reg_code))); | |||
2175 | ||||
2176 | v->VisitRootPointer(Root::kStackRoots, nullptr, spill_slot); | |||
2177 | } | |||
2178 | } | |||
2179 | ||||
2180 | void WasmDebugBreakFrame::Print(StringStream* accumulator, PrintMode mode, | |||
2181 | int index) const { | |||
2182 | PrintIndex(accumulator, mode, index); | |||
2183 | accumulator->Add("WasmDebugBreak"); | |||
2184 | if (mode != OVERVIEW) accumulator->Add("\n"); | |||
2185 | } | |||
2186 | ||||
2187 | void JsToWasmFrame::Iterate(RootVisitor* v) const { | |||
2188 | Code code = GetContainingCode(isolate(), pc()); | |||
2189 | // GenericJSToWasmWrapper stack layout | |||
2190 | // ------+-----------------+---------------------- | |||
2191 | // | return addr | | |||
2192 | // fp |- - - - - - - - -| -------------------| | |||
2193 | // | fp | | | |||
2194 | // fp-p |- - - - - - - - -| | | |||
2195 | // | frame marker | | no GC scan | |||
2196 | // fp-2p |- - - - - - - - -| | | |||
2197 | // | scan_count | | | |||
2198 | // fp-3p |- - - - - - - - -| -------------------| | |||
2199 | // | .... | <- spill_slot_limit | | |||
2200 | // | spill slots | | GC scan scan_count slots | |||
2201 | // | .... | <- spill_slot_base--| | |||
2202 | // |- - - - - - - - -| | | |||
2203 | if (code.is_null() || !code.is_builtin() || | |||
2204 | code.builtin_id() != Builtin::kGenericJSToWasmWrapper) { | |||
2205 | // If it's not the GenericJSToWasmWrapper, then it's the TurboFan compiled | |||
2206 | // specific wrapper. So we have to call IterateCompiledFrame. | |||
2207 | IterateCompiledFrame(v); | |||
2208 | return; | |||
2209 | } | |||
2210 | // The [fp + BuiltinFrameConstants::kGCScanSlotCount] on the stack is a value | |||
2211 | // indicating how many values should be scanned from the top. | |||
2212 | intptr_t scan_count = *reinterpret_cast<intptr_t*>( | |||
2213 | fp() + BuiltinWasmWrapperConstants::kGCScanSlotCountOffset); | |||
2214 | ||||
2215 | FullObjectSlot spill_slot_base(&Memory<Address>(sp())); | |||
2216 | FullObjectSlot spill_slot_limit( | |||
2217 | &Memory<Address>(sp() + scan_count * kSystemPointerSize)); | |||
2218 | v->VisitRootPointers(Root::kStackRoots, nullptr, spill_slot_base, | |||
2219 | spill_slot_limit); | |||
2220 | } | |||
2221 | ||||
2222 | void StackSwitchFrame::Iterate(RootVisitor* v) const { | |||
2223 | // See JsToWasmFrame layout. | |||
2224 | // We cannot DCHECK that the pc matches the expected builtin code here, | |||
2225 | // because the return address is on a different stack. | |||
2226 | // The [fp + BuiltinFrameConstants::kGCScanSlotCountOffset] on the stack is a | |||
2227 | // value indicating how many values should be scanned from the top. | |||
2228 | intptr_t scan_count = *reinterpret_cast<intptr_t*>( | |||
2229 | fp() + BuiltinWasmWrapperConstants::kGCScanSlotCountOffset); | |||
2230 | ||||
2231 | FullObjectSlot spill_slot_base(&Memory<Address>(sp())); | |||
2232 | FullObjectSlot spill_slot_limit( | |||
2233 | &Memory<Address>(sp() + scan_count * kSystemPointerSize)); | |||
2234 | v->VisitRootPointers(Root::kStackRoots, nullptr, spill_slot_base, | |||
2235 | spill_slot_limit); | |||
2236 | } | |||
2237 | ||||
2238 | // static | |||
2239 | void StackSwitchFrame::GetStateForJumpBuffer(wasm::JumpBuffer* jmpbuf, | |||
2240 | State* state) { | |||
2241 | DCHECK_NE(jmpbuf->fp, kNullAddress)((void) 0); | |||
2242 | DCHECK_EQ(ComputeFrameType(jmpbuf->fp), STACK_SWITCH)((void) 0); | |||
2243 | FillState(jmpbuf->fp, jmpbuf->sp, state); | |||
2244 | DCHECK_NE(*state->pc_address, kNullAddress)((void) 0); | |||
2245 | } | |||
2246 | ||||
2247 | WasmInstanceObject WasmCompileLazyFrame::wasm_instance() const { | |||
2248 | return WasmInstanceObject::cast(*wasm_instance_slot()); | |||
2249 | } | |||
2250 | ||||
2251 | FullObjectSlot WasmCompileLazyFrame::wasm_instance_slot() const { | |||
2252 | const int offset = WasmCompileLazyFrameConstants::kWasmInstanceOffset; | |||
2253 | return FullObjectSlot(&Memory<Address>(fp() + offset)); | |||
2254 | } | |||
2255 | ||||
2256 | void WasmCompileLazyFrame::Iterate(RootVisitor* v) const { | |||
2257 | const int header_size = WasmCompileLazyFrameConstants::kFixedFrameSizeFromFp; | |||
2258 | FullObjectSlot base(&Memory<Address>(sp())); | |||
2259 | FullObjectSlot limit(&Memory<Address>(fp() - header_size)); | |||
2260 | v->VisitRootPointers(Root::kStackRoots, nullptr, base, limit); | |||
2261 | v->VisitRootPointer(Root::kStackRoots, nullptr, wasm_instance_slot()); | |||
2262 | } | |||
2263 | #endif // V8_ENABLE_WEBASSEMBLY | |||
2264 | ||||
2265 | namespace { | |||
2266 | ||||
2267 | void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo shared, | |||
2268 | Code code) { | |||
2269 | if (FLAG_max_stack_trace_source_length != 0 && !code.is_null()) { | |||
2270 | std::ostringstream os; | |||
2271 | os << "--------- s o u r c e c o d e ---------\n" | |||
2272 | << SourceCodeOf(shared, FLAG_max_stack_trace_source_length) | |||
2273 | << "\n-----------------------------------------\n"; | |||
2274 | accumulator->Add(os.str().c_str()); | |||
2275 | } | |||
2276 | } | |||
2277 | ||||
2278 | } // namespace | |||
2279 | ||||
2280 | void JavaScriptFrame::Print(StringStream* accumulator, PrintMode mode, | |||
2281 | int index) const { | |||
2282 | Handle<SharedFunctionInfo> shared = handle(function().shared(), isolate()); | |||
2283 | SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared); | |||
2284 | ||||
2285 | DisallowGarbageCollection no_gc; | |||
2286 | Object receiver = this->receiver(); | |||
2287 | JSFunction function = this->function(); | |||
2288 | ||||
2289 | accumulator->PrintSecurityTokenIfChanged(function); | |||
2290 | PrintIndex(accumulator, mode, index); | |||
2291 | PrintFrameKind(accumulator); | |||
2292 | Code code; | |||
2293 | if (IsConstructor()) accumulator->Add("new "); | |||
2294 | accumulator->PrintFunction(function, receiver, &code); | |||
2295 | accumulator->Add(" [%p]", function); | |||
2296 | ||||
2297 | // Get scope information for nicer output, if possible. If code is nullptr, or | |||
2298 | // doesn't contain scope info, scope_info will return 0 for the number of | |||
2299 | // parameters, stack local variables, context local variables, stack slots, | |||
2300 | // or context slots. | |||
2301 | ScopeInfo scope_info = shared->scope_info(); | |||
2302 | Object script_obj = shared->script(); | |||
2303 | if (script_obj.IsScript()) { | |||
2304 | Script script = Script::cast(script_obj); | |||
2305 | accumulator->Add(" ["); | |||
2306 | accumulator->PrintName(script.name()); | |||
2307 | ||||
2308 | if (is_interpreted()) { | |||
2309 | const InterpretedFrame* iframe = InterpretedFrame::cast(this); | |||
2310 | BytecodeArray bytecodes = iframe->GetBytecodeArray(); | |||
2311 | int offset = iframe->GetBytecodeOffset(); | |||
2312 | int source_pos = AbstractCode::cast(bytecodes).SourcePosition(offset); | |||
2313 | int line = script.GetLineNumber(source_pos) + 1; | |||
2314 | accumulator->Add(":%d] [bytecode=%p offset=%d]", line, | |||
2315 | reinterpret_cast<void*>(bytecodes.ptr()), offset); | |||
2316 | } else { | |||
2317 | int function_start_pos = shared->StartPosition(); | |||
2318 | int line = script.GetLineNumber(function_start_pos) + 1; | |||
2319 | accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc())); | |||
2320 | } | |||
2321 | } | |||
2322 | ||||
2323 | accumulator->Add("(this=%o", receiver); | |||
2324 | ||||
2325 | // Print the parameters. | |||
2326 | int parameters_count = ComputeParametersCount(); | |||
2327 | for (int i = 0; i < parameters_count; i++) { | |||
2328 | accumulator->Add(","); | |||
2329 | accumulator->Add("%o", GetParameter(i)); | |||
2330 | } | |||
2331 | ||||
2332 | accumulator->Add(")"); | |||
2333 | if (mode == OVERVIEW) { | |||
2334 | accumulator->Add("\n"); | |||
2335 | return; | |||
2336 | } | |||
2337 | if (is_optimized()) { | |||
2338 | accumulator->Add(" {\n// optimized frame\n"); | |||
2339 | PrintFunctionSource(accumulator, *shared, code); | |||
2340 | accumulator->Add("}\n"); | |||
2341 | return; | |||
2342 | } | |||
2343 | accumulator->Add(" {\n"); | |||
2344 | ||||
2345 | // Compute the number of locals and expression stack elements. | |||
2346 | int heap_locals_count = scope_info.ContextLocalCount(); | |||
2347 | int expressions_count = ComputeExpressionsCount(); | |||
2348 | ||||
2349 | // Try to get hold of the context of this frame. | |||
2350 | Context context; | |||
2351 | if (this->context().IsContext()) { | |||
2352 | context = Context::cast(this->context()); | |||
2353 | while (context.IsWithContext()) { | |||
2354 | context = context.previous(); | |||
2355 | DCHECK(!context.is_null())((void) 0); | |||
2356 | } | |||
2357 | } | |||
2358 | ||||
2359 | // Print heap-allocated local variables. | |||
2360 | if (heap_locals_count > 0) { | |||
2361 | accumulator->Add(" // heap-allocated locals\n"); | |||
2362 | } | |||
2363 | for (auto it : ScopeInfo::IterateLocalNames(&scope_info, no_gc)) { | |||
2364 | accumulator->Add(" var "); | |||
2365 | accumulator->PrintName(it->name()); | |||
2366 | accumulator->Add(" = "); | |||
2367 | if (!context.is_null()) { | |||
2368 | int slot_index = Context::MIN_CONTEXT_SLOTS + it->index(); | |||
2369 | if (slot_index < context.length()) { | |||
2370 | accumulator->Add("%o", context.get(slot_index)); | |||
2371 | } else { | |||
2372 | accumulator->Add( | |||
2373 | "// warning: missing context slot - inconsistent frame?"); | |||
2374 | } | |||
2375 | } else { | |||
2376 | accumulator->Add("// warning: no context found - inconsistent frame?"); | |||
2377 | } | |||
2378 | accumulator->Add("\n"); | |||
2379 | } | |||
2380 | ||||
2381 | // Print the expression stack. | |||
2382 | if (0 < expressions_count) { | |||
2383 | accumulator->Add(" // expression stack (top to bottom)\n"); | |||
2384 | } | |||
2385 | for (int i = expressions_count - 1; i >= 0; i--) { | |||
2386 | accumulator->Add(" [%02d] : %o\n", i, GetExpression(i)); | |||
2387 | } | |||
2388 | ||||
2389 | PrintFunctionSource(accumulator, *shared, code); | |||
2390 | ||||
2391 | accumulator->Add("}\n\n"); | |||
2392 | } | |||
2393 | ||||
2394 | void EntryFrame::Iterate(RootVisitor* v) const { | |||
2395 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); | |||
2396 | } | |||
2397 | ||||
2398 | void CommonFrame::IterateExpressions(RootVisitor* v) const { | |||
2399 | const int last_object_offset = StandardFrameConstants::kLastObjectOffset; | |||
2400 | intptr_t marker = | |||
2401 | Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset); | |||
2402 | FullObjectSlot base(&Memory<Address>(sp())); | |||
2403 | FullObjectSlot limit(&Memory<Address>(fp() + last_object_offset) + 1); | |||
2404 | if (StackFrame::IsTypeMarker(marker)) { | |||
2405 | v->VisitRootPointers(Root::kStackRoots, nullptr, base, limit); | |||
2406 | } else { | |||
2407 | // The frame contains the actual argument count (intptr) that should not be | |||
2408 | // visited. | |||
2409 | FullObjectSlot argc( | |||
2410 | &Memory<Address>(fp() + StandardFrameConstants::kArgCOffset)); | |||
2411 | v->VisitRootPointers(Root::kStackRoots, nullptr, base, argc); | |||
2412 | v->VisitRootPointers(Root::kStackRoots, nullptr, argc + 1, limit); | |||
2413 | } | |||
2414 | } | |||
2415 | ||||
2416 | void JavaScriptFrame::Iterate(RootVisitor* v) const { | |||
2417 | IterateExpressions(v); | |||
2418 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); | |||
2419 | } | |||
2420 | ||||
2421 | void InternalFrame::Iterate(RootVisitor* v) const { | |||
2422 | Code code = LookupCode(); | |||
2423 | IteratePc(v, pc_address(), constant_pool_address(), code); | |||
2424 | // Internal frames typically do not receive any arguments, hence their stack | |||
2425 | // only contains tagged pointers. | |||
2426 | // We are misusing the has_tagged_outgoing_params flag here to tell us whether | |||
2427 | // the full stack frame contains only tagged pointers or only raw values. | |||
2428 | // This is used for the WasmCompileLazy builtin, where we actually pass | |||
2429 | // untagged arguments and also store untagged values on the stack. | |||
2430 | if (code.has_tagged_outgoing_params()) IterateExpressions(v); | |||
2431 | } | |||
2432 | ||||
2433 | // ------------------------------------------------------------------------- | |||
2434 | ||||
2435 | namespace { | |||
2436 | ||||
2437 | // Predictably converts PC to uint32 by calculating offset of the PC in | |||
2438 | // from the embedded builtins start or from respective MemoryChunk. | |||
2439 | uint32_t PcAddressForHashing(Isolate* isolate, Address address) { | |||
2440 | uint32_t hashable_address; | |||
2441 | if (OffHeapInstructionStream::TryGetAddressForHashing(isolate, address, | |||
2442 | &hashable_address)) { | |||
2443 | return hashable_address; | |||
2444 | } | |||
2445 | return ObjectAddressForHashing(address); | |||
2446 | } | |||
2447 | ||||
2448 | } // namespace | |||
2449 | ||||
2450 | InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* | |||
2451 | InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) { | |||
2452 | isolate_->counters()->pc_to_code()->Increment(); | |||
2453 | DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize))((void) 0); | |||
2454 | uint32_t hash = | |||
2455 | ComputeUnseededHash(PcAddressForHashing(isolate_, inner_pointer)); | |||
2456 | uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1); | |||
2457 | InnerPointerToCodeCacheEntry* entry = cache(index); | |||
2458 | if (entry->inner_pointer == inner_pointer) { | |||
2459 | isolate_->counters()->pc_to_code_cached()->Increment(); | |||
2460 | DCHECK(entry->code ==((void) 0) | |||
2461 | isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer))((void) 0); | |||
2462 | } else { | |||
2463 | // Because this code may be interrupted by a profiling signal that | |||
2464 | // also queries the cache, we cannot update inner_pointer before the code | |||
2465 | // has been set. Otherwise, we risk trying to use a cache entry before | |||
2466 | // the code has been computed. | |||
2467 | entry->code = | |||
2468 | isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer); | |||
2469 | entry->safepoint_entry.Reset(); | |||
2470 | entry->inner_pointer = inner_pointer; | |||
2471 | } | |||
2472 | return entry; | |||
2473 | } | |||
2474 | ||||
2475 | // Frame layout helper class implementation. | |||
2476 | // ------------------------------------------------------------------------- | |||
2477 | ||||
2478 | namespace { | |||
2479 | ||||
2480 | // Some architectures need to push padding together with the TOS register | |||
2481 | // in order to maintain stack alignment. | |||
2482 | constexpr int TopOfStackRegisterPaddingSlots() { | |||
2483 | return ArgumentPaddingSlots(1); | |||
2484 | } | |||
2485 | ||||
2486 | bool BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode) { | |||
2487 | switch (mode) { | |||
2488 | case BuiltinContinuationMode::STUB: | |||
2489 | case BuiltinContinuationMode::JAVASCRIPT: | |||
2490 | return false; | |||
2491 | case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH: | |||
2492 | case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION: | |||
2493 | return true; | |||
2494 | } | |||
2495 | UNREACHABLE()V8_Fatal("unreachable code"); | |||
2496 | } | |||
2497 | ||||
2498 | } // namespace | |||
2499 | ||||
2500 | UnoptimizedFrameInfo::UnoptimizedFrameInfo(int parameters_count_with_receiver, | |||
2501 | int translation_height, | |||
2502 | bool is_topmost, bool pad_arguments, | |||
2503 | FrameInfoKind frame_info_kind) { | |||
2504 | const int locals_count = translation_height; | |||
2505 | ||||
2506 | register_stack_slot_count_ = | |||
2507 | UnoptimizedFrameConstants::RegisterStackSlotCount(locals_count); | |||
2508 | ||||
2509 | static constexpr int kTheAccumulator = 1; | |||
2510 | static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots(); | |||
2511 | int maybe_additional_slots = | |||
2512 | (is_topmost || frame_info_kind == FrameInfoKind::kConservative) | |||
2513 | ? (kTheAccumulator + kTopOfStackPadding) | |||
2514 | : 0; | |||
2515 | frame_size_in_bytes_without_fixed_ = | |||
2516 | (register_stack_slot_count_ + maybe_additional_slots) * | |||
2517 | kSystemPointerSize; | |||
2518 | ||||
2519 | // The 'fixed' part of the frame consists of the incoming parameters and | |||
2520 | // the part described by InterpreterFrameConstants. This will include | |||
2521 | // argument padding, when needed. | |||
2522 | const int parameter_padding_slots = | |||
2523 | pad_arguments ? ArgumentPaddingSlots(parameters_count_with_receiver) : 0; | |||
2524 | const int fixed_frame_size = | |||
2525 | InterpreterFrameConstants::kFixedFrameSize + | |||
2526 | (parameters_count_with_receiver + parameter_padding_slots) * | |||
2527 | kSystemPointerSize; | |||
2528 | frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ + fixed_frame_size; | |||
2529 | } | |||
2530 | ||||
2531 | // static | |||
2532 | uint32_t UnoptimizedFrameInfo::GetStackSizeForAdditionalArguments( | |||
2533 | int parameters_count) { | |||
2534 | return (parameters_count + ArgumentPaddingSlots(parameters_count)) * | |||
2535 | kSystemPointerSize; | |||
2536 | } | |||
2537 | ||||
2538 | ConstructStubFrameInfo::ConstructStubFrameInfo(int translation_height, | |||
2539 | bool is_topmost, | |||
2540 | FrameInfoKind frame_info_kind) { | |||
2541 | // Note: This is according to the Translation's notion of 'parameters' which | |||
2542 | // differs to that of the SharedFunctionInfo, e.g. by including the receiver. | |||
2543 | const int parameters_count = translation_height; | |||
2544 | ||||
2545 | // If the construct frame appears to be topmost we should ensure that the | |||
2546 | // value of result register is preserved during continuation execution. | |||
2547 | // We do this here by "pushing" the result of the constructor function to | |||
2548 | // the top of the reconstructed stack and popping it in | |||
2549 | // {Builtin::kNotifyDeoptimized}. | |||
2550 | ||||
2551 | static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots(); | |||
2552 | static constexpr int kTheResult = 1; | |||
2553 | const int argument_padding = ArgumentPaddingSlots(parameters_count); | |||
2554 | ||||
2555 | const int adjusted_height = | |||
2556 | (is_topmost || frame_info_kind == FrameInfoKind::kConservative) | |||
2557 | ? parameters_count + argument_padding + kTheResult + | |||
2558 | kTopOfStackPadding | |||
2559 | : parameters_count + argument_padding; | |||
2560 | frame_size_in_bytes_without_fixed_ = adjusted_height * kSystemPointerSize; | |||
2561 | frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ + | |||
2562 | ConstructFrameConstants::kFixedFrameSize; | |||
2563 | } | |||
2564 | ||||
2565 | BuiltinContinuationFrameInfo::BuiltinContinuationFrameInfo( | |||
2566 | int translation_height, | |||
2567 | const CallInterfaceDescriptor& continuation_descriptor, | |||
2568 | const RegisterConfiguration* register_config, bool is_topmost, | |||
2569 | DeoptimizeKind deopt_kind, BuiltinContinuationMode continuation_mode, | |||
2570 | FrameInfoKind frame_info_kind) { | |||
2571 | const bool is_conservative = frame_info_kind == FrameInfoKind::kConservative; | |||
2572 | ||||
2573 | // Note: This is according to the Translation's notion of 'parameters' which | |||
2574 | // differs to that of the SharedFunctionInfo, e.g. by including the receiver. | |||
2575 | const int parameters_count = translation_height; | |||
2576 | frame_has_result_stack_slot_ = | |||
2577 | !is_topmost || deopt_kind == DeoptimizeKind::kLazy; | |||
2578 | const int result_slot_count = | |||
2579 | (frame_has_result_stack_slot_ || is_conservative) ? 1 : 0; | |||
2580 | ||||
2581 | const int exception_slot_count = | |||
2582 | (BuiltinContinuationModeIsWithCatch(continuation_mode) || is_conservative) | |||
2583 | ? 1 | |||
2584 | : 0; | |||
2585 | ||||
2586 | const int allocatable_register_count = | |||
2587 | register_config->num_allocatable_general_registers(); | |||
2588 | const int padding_slot_count = | |||
2589 | BuiltinContinuationFrameConstants::PaddingSlotCount( | |||
2590 | allocatable_register_count); | |||
2591 | ||||
2592 | const int register_parameter_count = | |||
2593 | continuation_descriptor.GetRegisterParameterCount(); | |||
2594 | translated_stack_parameter_count_ = | |||
2595 | parameters_count - register_parameter_count; | |||
2596 | stack_parameter_count_ = translated_stack_parameter_count_ + | |||
2597 | result_slot_count + exception_slot_count; | |||
2598 | const int stack_param_pad_count = | |||
2599 | ArgumentPaddingSlots(stack_parameter_count_); | |||
2600 | ||||
2601 | // If the builtins frame appears to be topmost we should ensure that the | |||
2602 | // value of result register is preserved during continuation execution. | |||
2603 | // We do this here by "pushing" the result of callback function to the | |||
2604 | // top of the reconstructed stack and popping it in | |||
2605 | // {Builtin::kNotifyDeoptimized}. | |||
2606 | static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots(); | |||
2607 | static constexpr int kTheResult = 1; | |||
2608 | const int push_result_count = | |||
2609 | (is_topmost || is_conservative) ? kTheResult + kTopOfStackPadding : 0; | |||
2610 | ||||
2611 | frame_size_in_bytes_ = | |||
2612 | kSystemPointerSize * (stack_parameter_count_ + stack_param_pad_count + | |||
2613 | allocatable_register_count + padding_slot_count + | |||
2614 | push_result_count) + | |||
2615 | BuiltinContinuationFrameConstants::kFixedFrameSize; | |||
2616 | ||||
2617 | frame_size_in_bytes_above_fp_ = | |||
2618 | kSystemPointerSize * (allocatable_register_count + padding_slot_count + | |||
2619 | push_result_count) + | |||
2620 | (BuiltinContinuationFrameConstants::kFixedFrameSize - | |||
2621 | BuiltinContinuationFrameConstants::kFixedFrameSizeAboveFp); | |||
2622 | } | |||
2623 | ||||
2624 | } // namespace internal | |||
2625 | } // namespace v8 |