frames.cc 77.8 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#include "src/frames.h"

7
#include <memory>
8 9
#include <sstream>

10
#include "src/base/bits.h"
11 12
#include "src/deoptimizer.h"
#include "src/frames-inl.h"
13
#include "src/ic/ic-stats.h"
14
#include "src/register-configuration.h"
15 16
#include "src/safepoint-table.h"
#include "src/string-stream.h"
17
#include "src/visitors.h"
18
#include "src/vm-state-inl.h"
19
#include "src/wasm/wasm-code-manager.h"
20
#include "src/wasm/wasm-engine.h"
21
#include "src/wasm/wasm-objects-inl.h"
22
#include "src/zone/zone-containers.h"
23

24 25
namespace v8 {
namespace internal {
26

27 28
ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ =
    nullptr;
29

30 31 32 33 34 35 36
// Iterator that supports traversing the stack handlers of a
// particular frame. Needs to know the top of the handler chain.
class StackHandlerIterator BASE_EMBEDDED {
 public:
  StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
      : limit_(frame->fp()), handler_(handler) {
    // Make sure the handler has already been unwound to this frame.
37
    DCHECK(frame->sp() <= handler->address());
38 39 40 41
  }

  StackHandler* handler() const { return handler_; }

42
  bool done() { return handler_ == nullptr || handler_->address() > limit_; }
43
  void Advance() {
44
    DCHECK(!done());
45 46 47 48 49 50 51 52 53 54 55 56 57
    handler_ = handler_->next();
  }

 private:
  const Address limit_;
  StackHandler* handler_;
};


// -------------------------------------------------------------------------


#define INITIALIZE_SINGLETON(type, field) field##_(this),
58 59
StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
                                               bool can_access_heap_objects)
60
    : isolate_(isolate),
61 62 63
      STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) frame_(nullptr),
      handler_(nullptr),
      can_access_heap_objects_(can_access_heap_objects) {}
64 65 66
#undef INITIALIZE_SINGLETON

StackFrameIterator::StackFrameIterator(Isolate* isolate)
67
    : StackFrameIterator(isolate, isolate->thread_local_top()) {}
68 69 70 71 72

StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
    : StackFrameIteratorBase(isolate, true) {
  Reset(t);
}
73

74
void StackFrameIterator::Advance() {
75
  DCHECK(!done());
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92
  // Compute the state of the calling frame before restoring
  // callee-saved registers and unwinding handlers. This allows the
  // frame code that computes the caller state to access the top
  // handler and the value of any callee-saved register if needed.
  StackFrame::State state;
  StackFrame::Type type = frame_->GetCallerState(&state);

  // Unwind handlers corresponding to the current frame.
  StackHandlerIterator it(frame_, handler_);
  while (!it.done()) it.Advance();
  handler_ = it.handler();

  // Advance to the calling frame.
  frame_ = SingletonFor(type, &state);

  // When we're done iterating over the stack frames, the handler
  // chain must have been completely unwound.
93
  DCHECK(!done() || handler_ == nullptr);
94 95 96
}


97
void StackFrameIterator::Reset(ThreadLocalTop* top) {
98
  StackFrame::State state;
99 100 101
  StackFrame::Type type = ExitFrame::GetStateForFramePointer(
      Isolate::c_entry_fp(top), &state);
  handler_ = StackHandler::FromAddress(Isolate::handler(top));
102 103 104 105
  frame_ = SingletonFor(type, &state);
}


106
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
107
                                             StackFrame::State* state) {
108
  StackFrame* result = SingletonFor(type);
109 110
  DCHECK((!result) == (type == StackFrame::NONE));
  if (result) result->state_ = *state;
111 112 113 114
  return result;
}


115
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
116
#define FRAME_TYPE_CASE(type, field) \
117 118
  case StackFrame::type:             \
    return &field##_;
119 120

  switch (type) {
121 122 123
    case StackFrame::NONE:
      return nullptr;
      STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
124 125
    default: break;
  }
126
  return nullptr;
127 128 129 130 131 132

#undef FRAME_TYPE_CASE
}

// -------------------------------------------------------------------------

133 134 135 136 137 138 139 140
void JavaScriptFrameIterator::Advance() {
  do {
    iterator_.Advance();
  } while (!iterator_.done() && !iterator_.frame()->is_java_script());
}

// -------------------------------------------------------------------------

141
StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
142 143
    : iterator_(isolate) {
  if (!done() && !IsValidFrame(iterator_.frame())) Advance();
144 145
}

146 147 148 149 150
StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate,
                                                 StackFrame::Id id)
    : StackTraceFrameIterator(isolate) {
  while (!done() && frame()->id() != id) Advance();
}
151

152
void StackTraceFrameIterator::Advance() {
153 154 155
  do {
    iterator_.Advance();
  } while (!done() && !IsValidFrame(iterator_.frame()));
156 157
}

158 159 160 161
bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
  if (frame->is_java_script()) {
    JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
    if (!jsFrame->function()->IsJSFunction()) return false;
162
    return jsFrame->function()->shared()->IsSubjectToDebugging();
163 164 165
  }
  // apart from javascript, only wasm is valid
  return frame->is_wasm();
166 167
}

168 169
// -------------------------------------------------------------------------

170 171
namespace {

172 173
bool IsInterpreterFramePc(Isolate* isolate, Address pc,
                          StackFrame::State* state) {
174 175 176 177 178 179 180
  Code* interpreter_entry_trampoline =
      isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
  Code* interpreter_bytecode_advance =
      isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeAdvance);
  Code* interpreter_bytecode_dispatch =
      isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);

181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
  if (interpreter_entry_trampoline->contains(pc) ||
      interpreter_bytecode_advance->contains(pc) ||
      interpreter_bytecode_dispatch->contains(pc)) {
    return true;
  } else if (FLAG_interpreted_frames_native_stack) {
    intptr_t marker = Memory::intptr_at(
        state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
    MSAN_MEMORY_IS_INITIALIZED(
        state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
    Object* maybe_function =
        Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
    // There's no need to run a full ContainsSlow if we know the frame can't be
    // an InterpretedFrame,  so we do these fast checks first
    if (StackFrame::IsTypeMarker(marker) || maybe_function->IsSmi()) {
      return false;
    } else if (!isolate->heap()->code_space()->ContainsSlow(pc)) {
      return false;
    }
    interpreter_entry_trampoline =
        isolate->heap()->GcSafeFindCodeForInnerPointer(pc);
    return interpreter_entry_trampoline->is_interpreter_trampoline_builtin();
  } else {
    return false;
  }
205 206 207 208 209 210 211
}

DISABLE_ASAN Address ReadMemoryAt(Address address) {
  return Memory::Address_at(address);
}

}  // namespace
212 213

SafeStackFrameIterator::SafeStackFrameIterator(
214
    Isolate* isolate,
215 216 217 218
    Address fp, Address sp, Address js_entry_sp)
    : StackFrameIteratorBase(isolate, false),
      low_bound_(sp),
      high_bound_(js_entry_sp),
219 220
      top_frame_type_(StackFrame::NONE),
      external_callback_scope_(isolate->external_callback_scope()) {
221 222 223
  StackFrame::State state;
  StackFrame::Type type;
  ThreadLocalTop* top = isolate->thread_local_top();
224
  bool advance_frame = true;
225 226
  if (IsValidTop(top)) {
    type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
227
    top_frame_type_ = type;
228
  } else if (IsValidStackAddress(fp)) {
229
    DCHECK_NE(fp, kNullAddress);
230 231
    state.fp = fp;
    state.sp = sp;
232
    state.pc_address = StackFrame::ResolveReturnAddressLocation(
233
        reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
234 235

    // If the top of stack is a return address to the interpreter trampoline,
236 237
    // then we are likely in a bytecode handler with elided frame. In that
    // case, set the PC properly and make sure we do not drop the frame.
238 239
    if (IsValidStackAddress(sp)) {
      MSAN_MEMORY_IS_INITIALIZED(sp, kPointerSize);
240
      Address tos = ReadMemoryAt(sp);
241
      if (IsInterpreterFramePc(isolate, tos, &state)) {
242 243
        state.pc_address = reinterpret_cast<Address*>(sp);
        advance_frame = false;
244 245 246
      }
    }

247 248 249 250
    // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
    // we check only that kMarkerOffset is within the stack bounds and do
    // compile time check that kContextOffset slot is pushed on the stack before
    // kMarkerOffset.
251
    STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
252
                  StandardFrameConstants::kContextOffset);
253
    Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
254 255 256
    if (IsValidStackAddress(frame_marker)) {
      type = StackFrame::ComputeType(this, &state);
      top_frame_type_ = type;
257 258 259 260
      // We only keep the top frame if we believe it to be interpreted frame.
      if (type != StackFrame::INTERPRETED) {
        advance_frame = true;
      }
261
    } else {
262
      // Mark the frame as OPTIMIZED if we cannot determine its type.
263 264 265 266 267
      // We chose OPTIMIZED rather than INTERPRETED because it's closer to
      // the original value of StackFrame::JAVA_SCRIPT here, in that JAVA_SCRIPT
      // referred to full-codegen frames (now removed from the tree), and
      // OPTIMIZED refers to turbofan frames, both of which are generated
      // code. INTERPRETED frames refer to bytecode.
268
      // The frame anyways will be skipped.
269
      type = StackFrame::OPTIMIZED;
270 271 272
      // Top frame is incomplete so we cannot reliably determine its type.
      top_frame_type_ = StackFrame::NONE;
    }
273 274 275 276
  } else {
    return;
  }
  frame_ = SingletonFor(type, &state);
277
  if (advance_frame && frame_) Advance();
278 279 280
}


281
bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
282 283
  Address c_entry_fp = Isolate::c_entry_fp(top);
  if (!IsValidExitFrame(c_entry_fp)) return false;
284
  // There should be at least one JS_ENTRY stack handler.
285
  Address handler = Isolate::handler(top);
286
  if (handler == kNullAddress) return false;
287 288
  // Check that there are no js frames on top of the native frames.
  return c_entry_fp < handler;
289 290 291
}


292
void SafeStackFrameIterator::AdvanceOneFrame() {
293
  DCHECK(!done());
294
  StackFrame* last_frame = frame_;
295
  Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
296 297
  // Before advancing to the next stack frame, perform pointer validity tests.
  if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
298
    frame_ = nullptr;
299 300
    return;
  }
301

302 303 304 305
  // Advance to the previous frame.
  StackFrame::State state;
  StackFrame::Type type = frame_->GetCallerState(&state);
  frame_ = SingletonFor(type, &state);
306
  if (!frame_) return;
307 308

  // Check that we have actually moved to the previous frame in the stack.
309
  if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) {
310
    frame_ = nullptr;
311
  }
312 313 314 315
}


bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
316
  return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
317 318
}

319

320 321
bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
  StackFrame::State state;
322
  if (frame->is_entry() || frame->is_construct_entry()) {
323 324 325 326 327
    // See EntryFrame::GetCallerState. It computes the caller FP address
    // and calls ExitFrame::GetStateForFramePointer on it. We need to be
    // sure that caller FP address is valid.
    Address caller_fp = Memory::Address_at(
        frame->fp() + EntryFrameConstants::kCallerFPOffset);
328
    if (!IsValidExitFrame(caller_fp)) return false;
329 330 331 332 333 334 335 336 337 338
  } else if (frame->is_arguments_adaptor()) {
    // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
    // the number of arguments is stored on stack as Smi. We need to check
    // that it really an Smi.
    Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
        GetExpression(0);
    if (!number_of_args->IsSmi()) {
      return false;
    }
  }
339 340
  frame->ComputeCallerState(&state);
  return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
341
         SingletonFor(frame->GetCallerState(&state)) != nullptr;
342 343 344 345 346 347 348 349 350
}


bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
  if (!IsValidStackAddress(fp)) return false;
  Address sp = ExitFrame::ComputeStackPointer(fp);
  if (!IsValidStackAddress(sp)) return false;
  StackFrame::State state;
  ExitFrame::FillState(fp, sp, &state);
alph's avatar
alph committed
351
  MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
352
  return *state.pc_address != kNullAddress;
353 354 355
}


356
void SafeStackFrameIterator::Advance() {
357
  while (true) {
358
    AdvanceOneFrame();
359
    if (done()) break;
360 361
    ExternalCallbackScope* last_callback_scope = nullptr;
    while (external_callback_scope_ != nullptr &&
362 363 364 365 366 367 368 369 370
           external_callback_scope_->scope_address() < frame_->fp()) {
      // As long as the setup of a frame is not atomic, we may happen to be
      // in an interval where an ExternalCallbackScope is already created,
      // but the frame is not yet entered. So we are actually observing
      // the previous frame.
      // Skip all the ExternalCallbackScope's that are below the current fp.
      last_callback_scope = external_callback_scope_;
      external_callback_scope_ = external_callback_scope_->previous();
    }
371
    if (frame_->is_java_script() || frame_->is_wasm()) break;
372
    if (frame_->is_exit() || frame_->is_builtin_exit()) {
373 374 375 376 377
      // Some of the EXIT frames may have ExternalCallbackScope allocated on
      // top of them. In that case the scope corresponds to the first EXIT
      // frame beneath it. There may be other EXIT frames on top of the
      // ExternalCallbackScope, just skip them as we cannot collect any useful
      // information about them.
378
      if (last_callback_scope) {
379
        frame_->state_.pc_address =
380
            last_callback_scope->callback_entrypoint_address();
381
      }
382
      break;
383
    }
384
  }
385 386 387
}


388 389
// -------------------------------------------------------------------------

390 391 392
namespace {
Code* GetContainingCode(Isolate* isolate, Address pc) {
  return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
393
}
394
}  // namespace
395

396 397
Code* StackFrame::LookupCode() const {
  Code* result = GetContainingCode(isolate(), pc());
398 399
  DCHECK_GE(pc(), result->InstructionStart());
  DCHECK_LT(pc(), result->InstructionEnd());
400 401
  return result;
}
402

403
void StackFrame::IteratePc(RootVisitor* v, Address* pc_address,
404
                           Address* constant_pool_address, Code* holder) {
405
  Address pc = *pc_address;
406
  DCHECK(holder->GetHeap()->GcSafeCodeContains(holder, pc));
407
  unsigned pc_offset = static_cast<unsigned>(pc - holder->InstructionStart());
408
  Object* code = holder;
409
  v->VisitRootPointer(Root::kTop, nullptr, &code);
410 411
  if (code == holder) return;
  holder = reinterpret_cast<Code*>(code);
412
  pc = holder->InstructionStart() + pc_offset;
413 414 415
  *pc_address = pc;
  if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
    *constant_pool_address = holder->constant_pool();
416 417 418 419
  }
}


420 421
void StackFrame::SetReturnAddressLocationResolver(
    ReturnAddressLocationResolver resolver) {
422
  DCHECK_NULL(return_address_location_resolver_);
423
  return_address_location_resolver_ = resolver;
424 425
}

426
StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
427
                                         State* state) {
428
  DCHECK_NE(state->fp, kNullAddress);
429

danno's avatar
danno committed
430 431 432
  MSAN_MEMORY_IS_INITIALIZED(
      state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
      kPointerSize);
433
  intptr_t marker = Memory::intptr_at(
434
      state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
435 436 437 438 439 440
  if (!iterator->can_access_heap_objects_) {
    // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
    // means that we are being called from the profiler, which can interrupt
    // the VM with a signal at any arbitrary instruction, with essentially
    // anything on the stack. So basically none of these checks are 100%
    // reliable.
441
    MSAN_MEMORY_IS_INITIALIZED(
danno's avatar
danno committed
442
        state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
443 444
    Object* maybe_function =
        Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
445
    if (!StackFrame::IsTypeMarker(marker)) {
446
      if (maybe_function->IsSmi()) {
447
        return NATIVE;
448 449
      } else if (IsInterpreterFramePc(iterator->isolate(), *(state->pc_address),
                                      state)) {
450 451
        return INTERPRETED;
      } else {
452
        return OPTIMIZED;
453
      }
454
    }
455
  } else {
456
    Address pc = *(state->pc_address);
457 458
    // If the {pc} does not point into WebAssembly code we can rely on the
    // returned {wasm_code} to be null and fall back to {GetContainingCode}.
459
    wasm::WasmCode* wasm_code =
460
        iterator->isolate()->wasm_engine()->code_manager()->LookupCode(pc);
461 462
    if (wasm_code != nullptr) {
      switch (wasm_code->kind()) {
463
        case wasm::WasmCode::kInterpreterStub:
464
          return WASM_INTERPRETER_ENTRY;
465
        case wasm::WasmCode::kFunction:
466
          return WASM_COMPILED;
467
        case wasm::WasmCode::kLazyStub:
468
          if (StackFrame::IsTypeMarker(marker)) break;
469
          return BUILTIN;
470
        case wasm::WasmCode::kWasmToJsWrapper:
471 472
          return WASM_TO_JS;
        default:
473
          UNREACHABLE();
474
      }
475
    } else {
476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509
      // Look up the code object to figure out the type of the stack frame.
      Code* code_obj = GetContainingCode(iterator->isolate(), pc);
      if (code_obj != nullptr) {
        switch (code_obj->kind()) {
          case Code::BUILTIN:
            if (StackFrame::IsTypeMarker(marker)) break;
            if (code_obj->is_interpreter_trampoline_builtin()) {
              return INTERPRETED;
            }
            if (code_obj->is_turbofanned()) {
              // TODO(bmeurer): We treat frames for BUILTIN Code objects as
              // OptimizedFrame for now (all the builtins with JavaScript
              // linkage are actually generated with TurboFan currently, so
              // this is sound).
              return OPTIMIZED;
            }
            return BUILTIN;
          case Code::OPTIMIZED_FUNCTION:
            return OPTIMIZED;
          case Code::WASM_FUNCTION:
            return WASM_COMPILED;
          case Code::WASM_TO_JS_FUNCTION:
            return WASM_TO_JS;
          case Code::JS_TO_WASM_FUNCTION:
            return JS_TO_WASM;
          case Code::WASM_INTERPRETER_ENTRY:
            return WASM_INTERPRETER_ENTRY;
          case Code::C_WASM_ENTRY:
            return C_WASM_ENTRY;
          default:
            // All other types should have an explicit marker
            break;
        }
      } else {
510
        return NATIVE;
511
      }
512
    }
513
  }
514 515
  DCHECK(StackFrame::IsTypeMarker(marker));
  StackFrame::Type candidate = StackFrame::MarkerToType(marker);
516 517
  switch (candidate) {
    case ENTRY:
518
    case CONSTRUCT_ENTRY:
519
    case EXIT:
520 521
    case BUILTIN_CONTINUATION:
    case JAVA_SCRIPT_BUILTIN_CONTINUATION:
522
    case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
523
    case BUILTIN_EXIT:
524 525 526 527 528
    case STUB:
    case INTERNAL:
    case CONSTRUCT:
    case ARGUMENTS_ADAPTOR:
    case WASM_TO_JS:
529
    case WASM_COMPILED:
530 531
      return candidate;
    case JS_TO_WASM:
532 533 534 535 536 537 538
    case OPTIMIZED:
    case INTERPRETED:
    default:
      // Unoptimized and optimized JavaScript frames, including
      // interpreted frames, should never have a StackFrame::Type
      // marker. If we find one, we're likely being called from the
      // profiler in a bogus stack frame.
539
      return NATIVE;
540
  }
541 542 543
}


544 545 546 547 548 549
#ifdef DEBUG
bool StackFrame::can_access_heap_objects() const {
  return iterator_->can_access_heap_objects_;
}
#endif

550

551 552
StackFrame::Type StackFrame::GetCallerState(State* state) const {
  ComputeCallerState(state);
553
  return ComputeType(iterator_, state);
554 555 556
}


557 558 559 560
Address StackFrame::UnpaddedFP() const {
  return fp();
}

561 562 563 564 565 566 567 568
void NativeFrame::ComputeCallerState(State* state) const {
  state->sp = caller_sp();
  state->fp = Memory::Address_at(fp() + CommonFrameConstants::kCallerFPOffset);
  state->pc_address = ResolveReturnAddressLocation(
      reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset));
  state->callee_pc_address = nullptr;
  state->constant_pool_address = nullptr;
}
569

570
Code* EntryFrame::unchecked_code() const {
571
  return isolate()->heap()->js_entry_code();
572 573 574
}


575 576 577 578 579
void EntryFrame::ComputeCallerState(State* state) const {
  GetCallerState(state);
}


580 581 582 583 584 585
StackFrame::Type EntryFrame::GetCallerState(State* state) const {
  const int offset = EntryFrameConstants::kCallerFPOffset;
  Address fp = Memory::Address_at(this->fp() + offset);
  return ExitFrame::GetStateForFramePointer(fp, state);
}

586
Code* ConstructEntryFrame::unchecked_code() const {
587
  return isolate()->heap()->js_construct_entry_code();
588 589 590
}


591 592 593 594 595
Object*& ExitFrame::code_slot() const {
  const int offset = ExitFrameConstants::kCodeOffset;
  return Memory::Object_at(fp() + offset);
}

596 597
Code* ExitFrame::unchecked_code() const {
  return reinterpret_cast<Code*>(code_slot());
598 599 600
}


601
void ExitFrame::ComputeCallerState(State* state) const {
602
  // Set up the caller state.
603
  state->sp = caller_sp();
604
  state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
605 606
  state->pc_address = ResolveReturnAddressLocation(
      reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
607
  state->callee_pc_address = nullptr;
608
  if (FLAG_enable_embedded_constant_pool) {
609 610 611
    state->constant_pool_address = reinterpret_cast<Address*>(
        fp() + ExitFrameConstants::kConstantPoolOffset);
  }
612 613 614
}


615
void ExitFrame::Iterate(RootVisitor* v) const {
616 617
  // The arguments are traversed as part of the expression stack of
  // the calling frame.
618
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
619
  v->VisitRootPointer(Root::kTop, nullptr, &code_slot());
620 621 622
}


623
Address ExitFrame::GetCallerStackPointer() const {
624
  return fp() + ExitFrameConstants::kCallerSPOffset;
625 626 627
}


628 629 630 631
StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
  if (fp == 0) return NONE;
  Address sp = ComputeStackPointer(fp);
  FillState(fp, sp, state);
632
  DCHECK_NE(*state->pc_address, kNullAddress);
633 634 635 636 637 638 639 640 641 642 643 644 645 646

  return ComputeFrameType(fp);
}

StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
  // Distinguish between between regular and builtin exit frames.
  // Default to EXIT in all hairy cases (e.g., when called from profiler).
  const int offset = ExitFrameConstants::kFrameTypeOffset;
  Object* marker = Memory::Object_at(fp + offset);

  if (!marker->IsSmi()) {
    return EXIT;
  }

647 648 649
  intptr_t marker_int = bit_cast<intptr_t>(marker);

  StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1);
650 651 652 653
  if (frame_type == EXIT || frame_type == BUILTIN_EXIT) {
    return frame_type;
  }

654 655 656
  return EXIT;
}

657
Address ExitFrame::ComputeStackPointer(Address fp) {
658
  MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, kPointerSize);
659 660 661
  return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
}

662 663 664
void ExitFrame::FillState(Address fp, Address sp, State* state) {
  state->sp = sp;
  state->fp = fp;
665
  state->pc_address = ResolveReturnAddressLocation(
666
      reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
667
  state->callee_pc_address = nullptr;
668 669 670 671
  // The constant pool recorded in the exit frame is not associated
  // with the pc in this state (the return address into a C entry
  // stub).  ComputeCallerState will retrieve the constant pool
  // together with the associated caller pc.
672
  state->constant_pool_address = nullptr;
673 674
}

675
JSFunction* BuiltinExitFrame::function() const {
676 677 678 679 680 681 682
  return JSFunction::cast(target_slot_object());
}

Object* BuiltinExitFrame::receiver() const { return receiver_slot_object(); }

bool BuiltinExitFrame::IsConstructor() const {
  return !new_target_slot_object()->IsUndefined(isolate());
683 684
}

685 686
Object* BuiltinExitFrame::GetParameter(int i) const {
  DCHECK(i >= 0 && i < ComputeParametersCount());
687 688
  int offset =
      BuiltinExitFrameConstants::kFirstArgumentOffset + i * kPointerSize;
689 690 691 692 693 694 695 696
  return Memory::Object_at(fp() + offset);
}

int BuiltinExitFrame::ComputeParametersCount() const {
  Object* argc_slot = argc_slot_object();
  DCHECK(argc_slot->IsSmi());
  // Argc also counts the receiver, target, new target, and argc itself as args,
  // therefore the real argument count is argc - 4.
jgruber's avatar
jgruber committed
697
  int argc = Smi::ToInt(argc_slot) - 4;
698
  DCHECK_GE(argc, 0);
699 700 701
  return argc;
}

702 703 704 705 706
namespace {
void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode,
                int index) {
  accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index);
}
707 708 709 710 711 712 713 714 715 716 717 718

const char* StringForStackFrameType(StackFrame::Type type) {
  switch (type) {
#define CASE(value, name) \
  case StackFrame::value: \
    return #name;
    STACK_FRAME_TYPE_LIST(CASE)
#undef CASE
    default:
      UNREACHABLE();
  }
}
719 720
}  // namespace

721 722 723 724 725
void StackFrame::Print(StringStream* accumulator, PrintMode mode,
                       int index) const {
  DisallowHeapAllocation no_gc;
  PrintIndex(accumulator, mode, index);
  accumulator->Add(StringForStackFrameType(type()));
726
  accumulator->Add(" [pc: %p]\n", reinterpret_cast<void*>(pc()));
727 728
}

729 730 731 732 733 734 735 736 737
void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
                             int index) const {
  DisallowHeapAllocation no_gc;
  Object* receiver = this->receiver();
  JSFunction* function = this->function();

  accumulator->PrintSecurityTokenIfChanged(function);
  PrintIndex(accumulator, mode, index);
  accumulator->Add("builtin exit frame: ");
738
  Code* code = nullptr;
739 740 741 742 743 744 745 746 747 748 749 750 751 752
  if (IsConstructor()) accumulator->Add("new ");
  accumulator->PrintFunction(function, receiver, &code);

  accumulator->Add("(this=%o", receiver);

  // Print the parameters.
  int parameters_count = ComputeParametersCount();
  for (int i = 0; i < parameters_count; i++) {
    accumulator->Add(",%o", GetParameter(i));
  }

  accumulator->Add(")\n\n");
}

753
Address StandardFrame::GetExpressionAddress(int n) const {
754 755
  const int offset = StandardFrameConstants::kExpressionsOffset;
  return fp() + offset - n * kPointerSize;
756 757
}

758 759 760
Address InterpretedFrame::GetExpressionAddress(int n) const {
  const int offset = InterpreterFrameConstants::kExpressionsOffset;
  return fp() + offset - n * kPointerSize;
761 762
}

763 764 765 766 767 768 769 770 771 772 773 774 775 776
Script* StandardFrame::script() const {
  // This should only be called on frames which override this method.
  DCHECK(false);
  return nullptr;
}

Object* StandardFrame::receiver() const {
  return isolate()->heap()->undefined_value();
}

Object* StandardFrame::context() const {
  return isolate()->heap()->undefined_value();
}

777 778
int StandardFrame::position() const {
  AbstractCode* code = AbstractCode::cast(LookupCode());
779
  int code_offset = static_cast<int>(pc() - code->InstructionStart());
780 781 782
  return code->SourcePosition(code_offset);
}

783
int StandardFrame::ComputeExpressionsCount() const {
784 785
  Address base = GetExpressionAddress(0);
  Address limit = sp() - kPointerSize;
786
  DCHECK(base >= limit);  // stack grows downwards
787
  // Include register-allocated locals in number of expressions.
788
  return static_cast<int>((base - limit) / kPointerSize);
789 790
}

791 792 793 794 795 796
Object* StandardFrame::GetParameter(int index) const {
  // StandardFrame does not define any parameters.
  UNREACHABLE();
}

int StandardFrame::ComputeParametersCount() const { return 0; }
797

798
void StandardFrame::ComputeCallerState(State* state) const {
799 800
  state->sp = caller_sp();
  state->fp = caller_fp();
801 802
  state->pc_address = ResolveReturnAddressLocation(
      reinterpret_cast<Address*>(ComputePCAddress(fp())));
803
  state->callee_pc_address = pc_address();
804 805
  state->constant_pool_address =
      reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
806 807 808
}


809
bool StandardFrame::IsConstructor() const { return false; }
810

811
void StandardFrame::Summarize(std::vector<FrameSummary>* functions) const {
812 813 814 815
  // This should only be called on frames which override this method.
  UNREACHABLE();
}

816
void StandardFrame::IterateCompiledFrame(RootVisitor* v) const {
817 818
  // Make sure that we're not doing "safe" stack frame iteration. We cannot
  // possibly find pointers in optimized frames in that state.
819
  DCHECK(can_access_heap_objects());
820

821 822
  // Find the code and compute the safepoint information.
  Address inner_pointer = pc();
823
  const wasm::WasmCode* wasm_code =
824
      isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer);
825 826 827 828 829
  SafepointEntry safepoint_entry;
  uint32_t stack_slots;
  Code* code = nullptr;
  bool has_tagged_params = false;
  if (wasm_code != nullptr) {
830
    SafepointTable table(wasm_code->instruction_start(),
831 832 833 834
                         wasm_code->safepoint_table_offset(),
                         wasm_code->stack_slots());
    safepoint_entry = table.FindEntry(inner_pointer);
    stack_slots = wasm_code->stack_slots();
835
    has_tagged_params = wasm_code->kind() != wasm::WasmCode::kFunction;
836
  } else {
837 838 839 840 841 842 843 844 845
    InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
        isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
    if (!entry->safepoint_entry.is_valid()) {
      entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
      DCHECK(entry->safepoint_entry.is_valid());
    } else {
      DCHECK(entry->safepoint_entry.Equals(
          entry->code->GetSafepointEntry(inner_pointer)));
    }
846

847 848 849 850 851 852
    code = entry->code;
    safepoint_entry = entry->safepoint_entry;
    stack_slots = code->stack_slots();
    has_tagged_params = code->has_tagged_params();
  }
  uint32_t slot_space = stack_slots * kPointerSize;
853 854 855

  // Determine the fixed header and spill slot area size.
  int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
856 857 858 859
  intptr_t marker =
      Memory::intptr_at(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
  if (StackFrame::IsTypeMarker(marker)) {
    StackFrame::Type candidate = StackFrame::MarkerToType(marker);
860 861
    switch (candidate) {
      case ENTRY:
862
      case CONSTRUCT_ENTRY:
863
      case EXIT:
864 865
      case BUILTIN_CONTINUATION:
      case JAVA_SCRIPT_BUILTIN_CONTINUATION:
866
      case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
867
      case BUILTIN_EXIT:
868 869 870 871 872
      case ARGUMENTS_ADAPTOR:
      case STUB:
      case INTERNAL:
      case CONSTRUCT:
      case JS_TO_WASM:
873 874 875
      case C_WASM_ENTRY:
        frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
        break;
876
      case WASM_TO_JS:
877 878
      case WASM_COMPILED:
      case WASM_INTERPRETER_ENTRY:
879
        frame_header_size = WasmCompiledFrameConstants::kFixedFrameSizeFromFp;
880 881 882
        break;
      case OPTIMIZED:
      case INTERPRETED:
883
      case BUILTIN:
884 885 886 887
        // These frame types have a context, but they are actually stored
        // in the place on the stack that one finds the frame type.
        UNREACHABLE();
        break;
888
      case NATIVE:
889 890 891 892 893 894 895 896 897
      case NONE:
      case NUMBER_OF_TYPES:
      case MANUAL:
        UNREACHABLE();
        break;
    }
  }
  slot_space -=
      (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
898

899
  Object** frame_header_base = &Memory::Object_at(fp() - frame_header_size);
900 901
  Object** frame_header_limit =
      &Memory::Object_at(fp() - StandardFrameConstants::kCPSlotSize);
902
  Object** parameters_base = &Memory::Object_at(sp());
903
  Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
904

905 906
  // Visit the parameters that may be on top of the saved registers.
  if (safepoint_entry.argument_count() > 0) {
907
    v->VisitRootPointers(Root::kTop, nullptr, parameters_base,
908
                         parameters_base + safepoint_entry.argument_count());
909 910 911
    parameters_base += safepoint_entry.argument_count();
  }

912
  // Skip saved double registers.
913
  if (safepoint_entry.has_doubles()) {
914
    // Number of doubles not known at snapshot time.
915
    DCHECK(!isolate()->serializer_enabled());
916 917 918
    parameters_base +=
        RegisterConfiguration::Default()->num_allocatable_double_registers() *
        kDoubleSize / kPointerSize;
919 920
  }

921
  // Visit the registers that contain pointers if any.
922
  if (safepoint_entry.HasRegisters()) {
923
    for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
924
      if (safepoint_entry.HasRegisterAt(i)) {
925
        int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
926 927
        v->VisitRootPointer(Root::kTop, nullptr,
                            parameters_base + reg_stack_index);
928 929 930 931 932 933 934
      }
    }
    // Skip the words containing the register values.
    parameters_base += kNumSafepointRegisters;
  }

  // We're done dealing with the register bits.
935 936
  uint8_t* safepoint_bits = safepoint_entry.bits();
  safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
937

938
  // Visit the rest of the parameters if they are tagged.
939
  if (has_tagged_params) {
940 941
    v->VisitRootPointers(Root::kTop, nullptr, parameters_base,
                         parameters_limit);
942
  }
943 944 945 946 947

  // Visit pointer spill slots and locals.
  for (unsigned index = 0; index < stack_slots; index++) {
    int byte_index = index >> kBitsPerByteLog2;
    int bit_index = index & (kBitsPerByte - 1);
948
    if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
949
      v->VisitRootPointer(Root::kTop, nullptr, parameters_limit + index);
950 951 952
    }
  }

953
  // For the off-heap code cases, we can skip this.
954 955 956 957
  if (code != nullptr) {
    // Visit the return address in the callee and incoming arguments.
    IteratePc(v, pc_address(), constant_pool_address(), code);
  }
958

959 960 961 962 963
  // If this frame has JavaScript ABI, visit the context (in stub and JS
  // frames) and the function (in JS frames). If it has WebAssembly ABI, visit
  // the instance object.
  v->VisitRootPointers(Root::kTop, nullptr, frame_header_base,
                       frame_header_limit);
964 965
}

966
void StubFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); }
967 968

Code* StubFrame::unchecked_code() const {
969
  return isolate()->FindCodeObject(pc());
970 971 972 973
}


Address StubFrame::GetCallerStackPointer() const {
974
  return fp() + ExitFrameConstants::kCallerSPOffset;
975 976 977 978 979 980 981
}


int StubFrame::GetNumberOfIncomingArguments() const {
  return 0;
}

982 983 984 985
int StubFrame::LookupExceptionHandlerInTable(int* stack_slots) {
  Code* code = LookupCode();
  DCHECK(code->is_turbofanned());
  DCHECK_EQ(code->kind(), Code::BUILTIN);
986
  HandlerTable table(code);
987
  int pc_offset = static_cast<int>(pc() - code->InstructionStart());
988
  *stack_slots = code->stack_slots();
989
  return table.LookupReturn(pc_offset);
990
}
991

992
void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); }
993

994 995 996 997 998
void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
  Memory::Object_at(GetParameterSlot(index)) = value;
}


999
bool JavaScriptFrame::IsConstructor() const {
1000 1001 1002 1003 1004 1005
  Address fp = caller_fp();
  if (has_adapted_arguments()) {
    // Skip the arguments adaptor frame and look at the real caller.
    fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
  }
  return IsConstructFrame(fp);
1006 1007 1008
}


1009
bool JavaScriptFrame::HasInlinedFrames() const {
1010
  std::vector<SharedFunctionInfo*> functions;
1011
  GetFunctions(&functions);
1012
  return functions.size() > 1;
1013 1014 1015
}


1016
Code* JavaScriptFrame::unchecked_code() const {
1017
  return function()->code();
1018 1019 1020
}


1021
int JavaScriptFrame::GetNumberOfIncomingArguments() const {
1022
  DCHECK(can_access_heap_objects() &&
1023
         isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
1024
  return function()->shared()->internal_formal_parameter_count();
1025 1026
}

1027 1028 1029 1030 1031 1032 1033 1034 1035
int OptimizedFrame::GetNumberOfIncomingArguments() const {
  Code* code = LookupCode();
  if (code->kind() == Code::BUILTIN) {
    return static_cast<int>(
        Memory::intptr_at(fp() + OptimizedBuiltinFrameConstants::kArgCOffset));
  } else {
    return JavaScriptFrame::GetNumberOfIncomingArguments();
  }
}
1036

1037
Address JavaScriptFrame::GetCallerStackPointer() const {
1038
  return fp() + StandardFrameConstants::kCallerSPOffset;
1039 1040
}

1041 1042 1043 1044
void JavaScriptFrame::GetFunctions(
    std::vector<SharedFunctionInfo*>* functions) const {
  DCHECK(functions->empty());
  functions->push_back(function()->shared());
1045 1046
}

1047
void JavaScriptFrame::GetFunctions(
1048 1049 1050
    std::vector<Handle<SharedFunctionInfo>>* functions) const {
  DCHECK(functions->empty());
  std::vector<SharedFunctionInfo*> raw_functions;
1051 1052
  GetFunctions(&raw_functions);
  for (const auto& raw_function : raw_functions) {
1053
    functions->push_back(Handle<SharedFunctionInfo>(raw_function));
1054 1055 1056
  }
}

1057
void JavaScriptFrame::Summarize(std::vector<FrameSummary>* functions) const {
1058
  DCHECK(functions->empty());
1059
  Code* code = LookupCode();
1060
  int offset = static_cast<int>(pc() - code->InstructionStart());
1061
  AbstractCode* abstract_code = AbstractCode::cast(code);
1062 1063
  FrameSummary::JavaScriptFrameSummary summary(isolate(), receiver(),
                                               function(), abstract_code,
1064
                                               offset, IsConstructor());
1065
  functions->push_back(summary);
1066 1067
}

1068 1069 1070 1071
JSFunction* JavaScriptFrame::function() const {
  return JSFunction::cast(function_slot_object());
}

1072 1073 1074 1075 1076 1077 1078 1079 1080
Object* JavaScriptFrame::unchecked_function() const {
  // During deoptimization of an optimized function, we may have yet to
  // materialize some closures on the stack. The arguments marker object
  // marks this case.
  DCHECK(function_slot_object()->IsJSFunction() ||
         isolate()->heap()->arguments_marker() == function_slot_object());
  return function_slot_object();
}

1081 1082
Object* JavaScriptFrame::receiver() const { return GetParameter(-1); }

1083 1084 1085 1086 1087 1088 1089
Object* JavaScriptFrame::context() const {
  const int offset = StandardFrameConstants::kContextOffset;
  Object* maybe_result = Memory::Object_at(fp() + offset);
  DCHECK(!maybe_result->IsSmi());
  return maybe_result;
}

1090 1091 1092 1093
Script* JavaScriptFrame::script() const {
  return Script::cast(function()->shared()->script());
}

1094
int JavaScriptFrame::LookupExceptionHandlerInTable(
1095
    int* stack_depth, HandlerTable::CatchPrediction* prediction) {
1096
  DCHECK_EQ(0, LookupCode()->handler_table_offset());
1097 1098
  DCHECK(!LookupCode()->is_optimized_code());
  return -1;
1099 1100
}

1101 1102 1103
void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function,
                                             AbstractCode* code,
                                             int code_offset, FILE* file,
1104 1105 1106 1107 1108 1109
                                             bool print_line_number) {
  PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
  function->PrintName(file);
  PrintF(file, "+%d", code_offset);
  if (print_line_number) {
    SharedFunctionInfo* shared = function->shared();
1110
    int source_pos = code->SourcePosition(code_offset);
1111 1112 1113 1114 1115 1116 1117
    Object* maybe_script = shared->script();
    if (maybe_script->IsScript()) {
      Script* script = Script::cast(maybe_script);
      int line = script->GetLineNumber(source_pos) + 1;
      Object* script_name_raw = script->name();
      if (script_name_raw->IsString()) {
        String* script_name = String::cast(script->name());
1118
        std::unique_ptr<char[]> c_script_name =
1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130
            script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
        PrintF(file, " at %s:%d", c_script_name.get(), line);
      } else {
        PrintF(file, " at <unknown>:%d", line);
      }
    } else {
      PrintF(file, " at <unknown>:<unknown>");
    }
  }
}

void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
1131 1132
                               bool print_line_number) {
  // constructor calls
1133
  DisallowHeapAllocation no_allocation;
1134
  JavaScriptFrameIterator it(isolate);
1135 1136 1137 1138
  while (!it.done()) {
    if (it.frame()->is_java_script()) {
      JavaScriptFrame* frame = it.frame();
      if (frame->IsConstructor()) PrintF(file, "new ");
1139 1140 1141 1142 1143 1144 1145
      JSFunction* function = frame->function();
      int code_offset = 0;
      if (frame->is_interpreted()) {
        InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
        code_offset = iframe->GetBytecodeOffset();
      } else {
        Code* code = frame->unchecked_code();
1146
        code_offset = static_cast<int>(frame->pc() - code->InstructionStart());
1147 1148 1149
      }
      PrintFunctionAndOffset(function, function->abstract_code(), code_offset,
                             file, print_line_number);
1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168
      if (print_args) {
        // function arguments
        // (we are intentionally only printing the actually
        // supplied parameters, not all parameters required)
        PrintF(file, "(this=");
        frame->receiver()->ShortPrint(file);
        const int length = frame->ComputeParametersCount();
        for (int i = 0; i < length; i++) {
          PrintF(file, ", ");
          frame->GetParameter(i)->ShortPrint(file);
        }
        PrintF(file, ")");
      }
      break;
    }
    it.Advance();
  }
}

1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203
void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction* function,
                                                         AbstractCode* code,
                                                         int code_offset) {
  auto ic_stats = ICStats::instance();
  ICInfo& ic_info = ic_stats->Current();
  SharedFunctionInfo* shared = function->shared();

  ic_info.function_name = ic_stats->GetOrCacheFunctionName(function);
  ic_info.script_offset = code_offset;

  int source_pos = code->SourcePosition(code_offset);
  Object* maybe_script = shared->script();
  if (maybe_script->IsScript()) {
    Script* script = Script::cast(maybe_script);
    ic_info.line_num = script->GetLineNumber(source_pos) + 1;
    ic_info.script_name = ic_stats->GetOrCacheScriptName(script);
  }
}

void JavaScriptFrame::CollectTopFrameForICStats(Isolate* isolate) {
  // constructor calls
  DisallowHeapAllocation no_allocation;
  JavaScriptFrameIterator it(isolate);
  ICInfo& ic_info = ICStats::instance()->Current();
  while (!it.done()) {
    if (it.frame()->is_java_script()) {
      JavaScriptFrame* frame = it.frame();
      if (frame->IsConstructor()) ic_info.is_constructor = true;
      JSFunction* function = frame->function();
      int code_offset = 0;
      if (frame->is_interpreted()) {
        InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
        code_offset = iframe->GetBytecodeOffset();
      } else {
        Code* code = frame->unchecked_code();
1204
        code_offset = static_cast<int>(frame->pc() - code->InstructionStart());
1205 1206 1207 1208 1209 1210 1211 1212 1213
      }
      CollectFunctionAndOffsetForICStats(function, function->abstract_code(),
                                         code_offset);
      return;
    }
    it.Advance();
  }
}

1214 1215 1216 1217 1218 1219 1220 1221
Object* JavaScriptFrame::GetParameter(int index) const {
  return Memory::Object_at(GetParameterSlot(index));
}

int JavaScriptFrame::ComputeParametersCount() const {
  return GetNumberOfIncomingArguments();
}

1222
int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const {
1223 1224 1225 1226
  // Assert that the first allocatable register is also the argument count
  // register.
  DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0),
            kJavaScriptCallArgCountRegister.code());
1227 1228
  Object* argc_object =
      Memory::Object_at(fp() + BuiltinContinuationFrameConstants::kArgCOffset);
jgruber's avatar
jgruber committed
1229
  return Smi::ToInt(argc_object);
1230 1231
}

1232 1233 1234
intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const {
  Address height_slot =
      fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize;
1235
  intptr_t height = Smi::ToInt(*reinterpret_cast<Smi**>(height_slot));
1236 1237 1238
  return height;
}

1239 1240 1241 1242 1243
Object* JavaScriptBuiltinContinuationFrame::context() const {
  return Memory::Object_at(
      fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset);
}

1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255
void JavaScriptBuiltinContinuationWithCatchFrame::SetException(
    Object* exception) {
  Address exception_argument_slot =
      fp() + JavaScriptFrameConstants::kLastParameterOffset +
      kPointerSize;  // Skip over return value slot.

  // Only allow setting exception if previous value was the hole.
  CHECK_EQ(isolate()->heap()->the_hole_value(),
           Memory::Object_at(exception_argument_slot));
  Memory::Object_at(exception_argument_slot) = exception;
}

1256 1257
FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary(
    Isolate* isolate, Object* receiver, JSFunction* function,
1258
    AbstractCode* abstract_code, int code_offset, bool is_constructor)
1259
    : FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT),
1260 1261 1262
      receiver_(receiver, isolate),
      function_(function, isolate),
      abstract_code_(abstract_code, isolate),
1263
      code_offset_(code_offset),
1264 1265
      is_constructor_(is_constructor) {
  DCHECK(abstract_code->IsBytecodeArray() ||
1266
         Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION);
1267
}
1268

1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318
bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const {
  return function()->shared()->IsSubjectToDebugging();
}

int FrameSummary::JavaScriptFrameSummary::SourcePosition() const {
  return abstract_code()->SourcePosition(code_offset());
}

int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const {
  return abstract_code()->SourceStatementPosition(code_offset());
}

Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const {
  return handle(function_->shared()->script(), isolate());
}

Handle<String> FrameSummary::JavaScriptFrameSummary::FunctionName() const {
  return JSFunction::GetDebugName(function_);
}

Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const {
  return handle(function_->context()->native_context(), isolate());
}

FrameSummary::WasmFrameSummary::WasmFrameSummary(
    Isolate* isolate, FrameSummary::Kind kind,
    Handle<WasmInstanceObject> instance, bool at_to_number_conversion)
    : FrameSummaryBase(isolate, kind),
      wasm_instance_(instance),
      at_to_number_conversion_(at_to_number_conversion) {}

Handle<Object> FrameSummary::WasmFrameSummary::receiver() const {
  return wasm_instance_->GetIsolate()->global_proxy();
}

#define WASM_SUMMARY_DISPATCH(type, name)                                      \
  type FrameSummary::WasmFrameSummary::name() const {                          \
    DCHECK(kind() == Kind::WASM_COMPILED || kind() == Kind::WASM_INTERPRETED); \
    return kind() == Kind::WASM_COMPILED                                       \
               ? static_cast<const WasmCompiledFrameSummary*>(this)->name()    \
               : static_cast<const WasmInterpretedFrameSummary*>(this)         \
                     ->name();                                                 \
  }

WASM_SUMMARY_DISPATCH(uint32_t, function_index)
WASM_SUMMARY_DISPATCH(int, byte_offset)

#undef WASM_SUMMARY_DISPATCH

int FrameSummary::WasmFrameSummary::SourcePosition() const {
1319
  Handle<WasmSharedModuleData> shared(
1320
      wasm_instance()->module_object()->shared(), isolate());
1321 1322
  return WasmSharedModuleData::GetSourcePosition(
      shared, function_index(), byte_offset(), at_to_number_conversion());
1323 1324 1325
}

Handle<Script> FrameSummary::WasmFrameSummary::script() const {
1326
  return handle(wasm_instance()->module_object()->shared()->script());
1327 1328 1329
}

Handle<String> FrameSummary::WasmFrameSummary::FunctionName() const {
1330
  Handle<WasmSharedModuleData> shared(
1331
      wasm_instance()->module_object()->shared(), isolate());
1332
  return WasmSharedModuleData::GetFunctionName(isolate(), shared,
1333
                                               function_index());
1334 1335 1336
}

Handle<Context> FrameSummary::WasmFrameSummary::native_context() const {
1337
  return handle(wasm_instance()->native_context(), isolate());
1338 1339 1340
}

FrameSummary::WasmCompiledFrameSummary::WasmCompiledFrameSummary(
1341
    Isolate* isolate, Handle<WasmInstanceObject> instance, wasm::WasmCode* code,
1342 1343 1344 1345 1346 1347 1348
    int code_offset, bool at_to_number_conversion)
    : WasmFrameSummary(isolate, WASM_COMPILED, instance,
                       at_to_number_conversion),
      code_(code),
      code_offset_(code_offset) {}

uint32_t FrameSummary::WasmCompiledFrameSummary::function_index() const {
1349
  return code()->index();
1350 1351 1352 1353 1354 1355 1356
}

int FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
    const wasm::WasmCode* code, int offset) {
  int position = 0;
  // Subtract one because the current PC is one instruction after the call site.
  offset--;
1357
  for (SourcePositionTableIterator iterator(code->source_positions());
1358 1359 1360 1361 1362
       !iterator.done() && iterator.code_offset() <= offset;
       iterator.Advance()) {
    position = iterator.source_position().ScriptOffset();
  }
  return position;
1363 1364 1365
}

int FrameSummary::WasmCompiledFrameSummary::byte_offset() const {
1366
  return GetWasmSourcePosition(code_, code_offset());
1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388
}

FrameSummary::WasmInterpretedFrameSummary::WasmInterpretedFrameSummary(
    Isolate* isolate, Handle<WasmInstanceObject> instance,
    uint32_t function_index, int byte_offset)
    : WasmFrameSummary(isolate, WASM_INTERPRETED, instance, false),
      function_index_(function_index),
      byte_offset_(byte_offset) {}

FrameSummary::~FrameSummary() {
#define FRAME_SUMMARY_DESTR(kind, type, field, desc) \
  case kind:                                         \
    field.~type();                                   \
    break;
  switch (base_.kind()) {
    FRAME_SUMMARY_VARIANTS(FRAME_SUMMARY_DESTR)
    default:
      UNREACHABLE();
  }
#undef FRAME_SUMMARY_DESTR
}

1389
FrameSummary FrameSummary::GetTop(const StandardFrame* frame) {
1390
  std::vector<FrameSummary> frames;
1391
  frame->Summarize(&frames);
1392 1393
  DCHECK_LT(0, frames.size());
  return frames.back();
1394 1395 1396 1397
}

FrameSummary FrameSummary::GetBottom(const StandardFrame* frame) {
  return Get(frame, 0);
1398 1399 1400
}

FrameSummary FrameSummary::GetSingle(const StandardFrame* frame) {
1401
  std::vector<FrameSummary> frames;
1402
  frame->Summarize(&frames);
1403 1404
  DCHECK_EQ(1, frames.size());
  return frames.front();
1405 1406
}

1407 1408
FrameSummary FrameSummary::Get(const StandardFrame* frame, int index) {
  DCHECK_LE(0, index);
1409
  std::vector<FrameSummary> frames;
1410
  frame->Summarize(&frames);
1411
  DCHECK_GT(frames.size(), index);
1412 1413 1414
  return frames[index];
}

1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427
#define FRAME_SUMMARY_DISPATCH(ret, name)        \
  ret FrameSummary::name() const {               \
    switch (base_.kind()) {                      \
      case JAVA_SCRIPT:                          \
        return java_script_summary_.name();      \
      case WASM_COMPILED:                        \
        return wasm_compiled_summary_.name();    \
      case WASM_INTERPRETED:                     \
        return wasm_interpreted_summary_.name(); \
      default:                                   \
        UNREACHABLE();                           \
        return ret{};                            \
    }                                            \
1428
  }
1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440

FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver)
FRAME_SUMMARY_DISPATCH(int, code_offset)
FRAME_SUMMARY_DISPATCH(bool, is_constructor)
FRAME_SUMMARY_DISPATCH(bool, is_subject_to_debugging)
FRAME_SUMMARY_DISPATCH(Handle<Object>, script)
FRAME_SUMMARY_DISPATCH(int, SourcePosition)
FRAME_SUMMARY_DISPATCH(int, SourceStatementPosition)
FRAME_SUMMARY_DISPATCH(Handle<String>, FunctionName)
FRAME_SUMMARY_DISPATCH(Handle<Context>, native_context)

#undef FRAME_SUMMARY_DISPATCH
1441

1442
void OptimizedFrame::Summarize(std::vector<FrameSummary>* frames) const {
1443
  DCHECK(frames->empty());
1444
  DCHECK(is_optimized());
1445

1446 1447
  // Delegate to JS frame in absence of turbofan deoptimization.
  // TODO(turbofan): Revisit once we support deoptimization across the board.
1448
  Code* code = LookupCode();
1449
  if (code->kind() == Code::BUILTIN) {
1450 1451 1452
    return JavaScriptFrame::Summarize(frames);
  }

1453
  int deopt_index = Safepoint::kNoDeoptimizationIndex;
1454
  DeoptimizationData* const data = GetDeoptimizationData(&deopt_index);
1455
  if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
1456
    CHECK_NULL(data);
1457 1458
    FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
  }
1459

1460 1461 1462 1463 1464
  // Prepare iteration over translation. Note that the below iteration might
  // materialize objects without storing them back to the Isolate, this will
  // lead to objects being re-materialized again for each summary.
  TranslatedState translated(this);
  translated.Prepare(fp());
1465

1466 1467
  // We create the summary in reverse order because the frames
  // in the deoptimization translation are ordered bottom-to-top.
1468
  bool is_constructor = IsConstructor();
1469 1470
  for (auto it = translated.begin(); it != translated.end(); it++) {
    if (it->kind() == TranslatedFrame::kInterpretedFunction ||
1471 1472 1473
        it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
        it->kind() ==
            TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
1474
      Handle<SharedFunctionInfo> shared_info = it->shared_info();
1475 1476 1477

      // The translation commands are ordered and the function is always
      // at the first position, and the receiver is next.
1478
      TranslatedFrame::iterator translated_values = it->begin();
1479

1480 1481 1482 1483
      // Get or materialize the correct function in the optimized frame.
      Handle<JSFunction> function =
          Handle<JSFunction>::cast(translated_values->GetValue());
      translated_values++;
1484

1485 1486 1487
      // Get or materialize the correct receiver in the optimized frame.
      Handle<Object> receiver = translated_values->GetValue();
      translated_values++;
1488

1489 1490 1491
      // Determine the underlying code object and the position within it from
      // the translation corresponding to the frame type in question.
      Handle<AbstractCode> abstract_code;
1492
      unsigned code_offset;
1493 1494 1495
      if (it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
          it->kind() ==
              TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
1496
        code_offset = 0;
1497 1498 1499 1500
        abstract_code =
            handle(AbstractCode::cast(isolate()->builtins()->builtin(
                       Builtins::GetBuiltinFromBailoutId(it->node_id()))),
                   isolate());
1501
      } else {
1502 1503 1504
        DCHECK_EQ(it->kind(), TranslatedFrame::kInterpretedFunction);
        code_offset = it->node_id().ToInt();  // Points to current bytecode.
        abstract_code = handle(shared_info->abstract_code(), isolate());
1505
      }
1506 1507 1508 1509

      // Append full summary of the encountered JS frame.
      FrameSummary::JavaScriptFrameSummary summary(isolate(), *receiver,
                                                   *function, *abstract_code,
1510
                                                   code_offset, is_constructor);
1511
      frames->push_back(summary);
1512
      is_constructor = false;
1513 1514
    } else if (it->kind() == TranslatedFrame::kConstructStub) {
      // The next encountered JS frame will be marked as a constructor call.
1515
      DCHECK(!is_constructor);
1516
      is_constructor = true;
1517 1518 1519 1520 1521
    }
  }
}


1522 1523
int OptimizedFrame::LookupExceptionHandlerInTable(
    int* stack_slots, HandlerTable::CatchPrediction* prediction) {
1524 1525 1526 1527
  // We cannot perform exception prediction on optimized code. Instead, we need
  // to use FrameSummary to find the corresponding code offset in unoptimized
  // code to perform prediction there.
  DCHECK_NULL(prediction);
1528
  Code* code = LookupCode();
1529
  HandlerTable table(code);
1530
  int pc_offset = static_cast<int>(pc() - code->InstructionStart());
1531
  if (stack_slots) *stack_slots = code->stack_slots();
1532 1533 1534 1535 1536 1537

  // When the return pc has been replaced by a trampoline there won't be
  // a handler for this trampoline. Thus we need to use the return pc that
  // _used to be_ on the stack to get the right ExceptionHandler.
  if (code->kind() == Code::OPTIMIZED_FUNCTION &&
      code->marked_for_deoptimization()) {
1538 1539
    SafepointTable safepoints(code);
    pc_offset = safepoints.find_return_pc(pc_offset);
1540
  }
1541
  return table.LookupReturn(pc_offset);
1542 1543
}

1544
DeoptimizationData* OptimizedFrame::GetDeoptimizationData(
1545
    int* deopt_index) const {
1546
  DCHECK(is_optimized());
1547

1548
  JSFunction* opt_function = function();
1549 1550 1551 1552 1553 1554
  Code* code = opt_function->code();

  // The code object may have been replaced by lazy deoptimization. Fall
  // back to a slow search in this case to find the original optimized
  // code object.
  if (!code->contains(pc())) {
1555
    code = isolate()->heap()->GcSafeFindCodeForInnerPointer(pc());
1556
  }
1557
  DCHECK_NOT_NULL(code);
1558
  DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1559

1560 1561
  SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
  *deopt_index = safepoint_entry.deoptimization_index();
1562
  if (*deopt_index != Safepoint::kNoDeoptimizationIndex) {
1563
    return DeoptimizationData::cast(code->deoptimization_data());
1564 1565
  }
  return nullptr;
1566 1567
}

1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580
Object* OptimizedFrame::receiver() const {
  Code* code = LookupCode();
  if (code->kind() == Code::BUILTIN) {
    Address argc_ptr = fp() + OptimizedBuiltinFrameConstants::kArgCOffset;
    intptr_t argc = *reinterpret_cast<intptr_t*>(argc_ptr);
    intptr_t args_size =
        (StandardFrameConstants::kFixedSlotCountAboveFp + argc) * kPointerSize;
    Address receiver_ptr = fp() + args_size;
    return *reinterpret_cast<Object**>(receiver_ptr);
  } else {
    return JavaScriptFrame::receiver();
  }
}
1581

1582 1583 1584
void OptimizedFrame::GetFunctions(
    std::vector<SharedFunctionInfo*>* functions) const {
  DCHECK(functions->empty());
1585
  DCHECK(is_optimized());
1586

1587 1588
  // Delegate to JS frame in absence of turbofan deoptimization.
  // TODO(turbofan): Revisit once we support deoptimization across the board.
1589
  Code* code = LookupCode();
1590
  if (code->kind() == Code::BUILTIN) {
1591 1592 1593
    return JavaScriptFrame::GetFunctions(functions);
  }

1594
  DisallowHeapAllocation no_gc;
1595
  int deopt_index = Safepoint::kNoDeoptimizationIndex;
1596
  DeoptimizationData* const data = GetDeoptimizationData(&deopt_index);
1597 1598
  DCHECK_NOT_NULL(data);
  DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
1599 1600 1601 1602 1603 1604 1605 1606
  FixedArray* const literal_array = data->LiteralArray();

  TranslationIterator it(data->TranslationByteArray(),
                         data->TranslationIndex(deopt_index)->value());
  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
  DCHECK_EQ(Translation::BEGIN, opcode);
  it.Next();  // Skip frame count.
  int jsframe_count = it.Next();
1607
  it.Next();  // Skip update feedback count.
1608 1609 1610 1611 1612

  // We insert the frames in reverse order because the frames
  // in the deoptimization translation are ordered bottom-to-top.
  while (jsframe_count != 0) {
    opcode = static_cast<Translation::Opcode>(it.Next());
1613
    if (opcode == Translation::INTERPRETED_FRAME ||
1614 1615 1616
        opcode == Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME ||
        opcode ==
            Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME) {
1617
      it.Next();  // Skip bailout id.
1618 1619
      jsframe_count--;

1620 1621
      // The second operand of the frame points to the function.
      Object* shared = literal_array->get(it.Next());
1622
      functions->push_back(SharedFunctionInfo::cast(shared));
1623

1624 1625 1626 1627 1628
      // Skip over remaining operands to advance to the next opcode.
      it.Skip(Translation::NumberOfOperandsFor(opcode) - 2);
    } else {
      // Skip over operands to advance to the next opcode.
      it.Skip(Translation::NumberOfOperandsFor(opcode));
1629 1630 1631 1632 1633
    }
  }
}


1634 1635 1636 1637 1638 1639
int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
  return StandardFrameConstants::kCallerSPOffset -
         ((slot_index + 1) * kPointerSize);
}


1640
Object* OptimizedFrame::StackSlotAt(int index) const {
1641
  return Memory::Object_at(fp() + StackSlotOffsetRelativeToFp(index));
1642 1643
}

1644 1645 1646 1647 1648 1649
int InterpretedFrame::position() const {
  AbstractCode* code = AbstractCode::cast(GetBytecodeArray());
  int code_offset = GetBytecodeOffset();
  return code->SourcePosition(code_offset);
}

1650
int InterpretedFrame::LookupExceptionHandlerInTable(
1651
    int* context_register, HandlerTable::CatchPrediction* prediction) {
1652
  HandlerTable table(function()->shared()->GetBytecodeArray());
1653
  return table.LookupRange(GetBytecodeOffset(), context_register, prediction);
1654 1655 1656 1657
}

int InterpretedFrame::GetBytecodeOffset() const {
  const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1658 1659 1660
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeOffsetFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
jgruber's avatar
jgruber committed
1661
  int raw_offset = Smi::ToInt(GetExpression(index));
1662 1663 1664
  return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
}

1665 1666 1667 1668 1669 1670 1671
int InterpretedFrame::GetBytecodeOffset(Address fp) {
  const int offset = InterpreterFrameConstants::kExpressionsOffset;
  const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeOffsetFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
  Address expression_offset = fp + offset - index * kPointerSize;
jgruber's avatar
jgruber committed
1672
  int raw_offset = Smi::ToInt(Memory::Object_at(expression_offset));
1673 1674 1675
  return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
}

1676 1677
void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
  const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1678 1679 1680
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeOffsetFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1681 1682 1683 1684
  int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag;
  SetExpression(index, Smi::FromInt(raw_offset));
}

1685
BytecodeArray* InterpretedFrame::GetBytecodeArray() const {
1686
  const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1687 1688 1689
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeArrayFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1690
  return BytecodeArray::cast(GetExpression(index));
1691 1692
}

1693
void InterpretedFrame::PatchBytecodeArray(BytecodeArray* bytecode_array) {
1694
  const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1695 1696 1697
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeArrayFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1698
  SetExpression(index, bytecode_array);
1699 1700
}

1701
Object* InterpretedFrame::ReadInterpreterRegister(int register_index) const {
1702
  const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1703
  DCHECK_EQ(
1704
      InterpreterFrameConstants::kRegisterFileFromFp,
1705
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1706
  return GetExpression(index + register_index);
1707 1708
}

1709 1710 1711 1712
void InterpretedFrame::WriteInterpreterRegister(int register_index,
                                                Object* value) {
  const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
  DCHECK_EQ(
1713
      InterpreterFrameConstants::kRegisterFileFromFp,
1714 1715 1716 1717
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
  return SetExpression(index + register_index, value);
}

1718
void InterpretedFrame::Summarize(std::vector<FrameSummary>* functions) const {
1719
  DCHECK(functions->empty());
1720
  AbstractCode* abstract_code =
1721
      AbstractCode::cast(function()->shared()->GetBytecodeArray());
1722 1723 1724
  FrameSummary::JavaScriptFrameSummary summary(
      isolate(), receiver(), function(), abstract_code, GetBytecodeOffset(),
      IsConstructor());
1725
  functions->push_back(summary);
1726
}
1727

1728
int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
jgruber's avatar
jgruber committed
1729
  return Smi::ToInt(GetExpression(0));
1730 1731
}

1732
Code* ArgumentsAdaptorFrame::unchecked_code() const {
1733
  return isolate()->builtins()->builtin(
1734
      Builtins::kArgumentsAdaptorTrampoline);
1735 1736
}

1737
int BuiltinFrame::GetNumberOfIncomingArguments() const {
jgruber's avatar
jgruber committed
1738
  return Smi::ToInt(GetExpression(0));
1739 1740
}

1741 1742 1743 1744
void BuiltinFrame::PrintFrameKind(StringStream* accumulator) const {
  accumulator->Add("builtin frame: ");
}

1745 1746 1747 1748 1749
Address InternalFrame::GetCallerStackPointer() const {
  // Internal frames have no arguments. The stack pointer of the
  // caller is at a fixed offset from the frame pointer.
  return fp() + StandardFrameConstants::kCallerSPOffset;
}
1750

1751
Code* InternalFrame::unchecked_code() const {
1752 1753
  const int offset = InternalFrameConstants::kCodeOffset;
  Object* code = Memory::Object_at(fp() + offset);
1754
  DCHECK_NOT_NULL(code);
1755
  return reinterpret_cast<Code*>(code);
1756 1757 1758
}


1759 1760
void WasmCompiledFrame::Print(StringStream* accumulator, PrintMode mode,
                              int index) const {
1761 1762 1763 1764
  PrintIndex(accumulator, mode, index);
  accumulator->Add("WASM [");
  Script* script = this->script();
  accumulator->PrintName(script->name());
1765 1766 1767 1768
  Address instruction_start = isolate()
                                  ->wasm_engine()
                                  ->code_manager()
                                  ->LookupCode(pc())
1769
                                  ->instruction_start();
1770
  int pc = static_cast<int>(this->pc() - instruction_start);
1771
  Vector<const uint8_t> raw_func_name =
1772
      shared()->GetRawFunctionName(this->function_index());
1773 1774 1775 1776 1777 1778 1779 1780
  const int kMaxPrintedFunctionName = 64;
  char func_name[kMaxPrintedFunctionName + 1];
  int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length());
  memcpy(func_name, raw_func_name.start(), func_name_len);
  func_name[func_name_len] = '\0';
  accumulator->Add("], function #%u ('%s'), pc=%p, pos=%d\n",
                   this->function_index(), func_name, pc, this->position());
  if (mode != OVERVIEW) accumulator->Add("\n");
1781 1782
}

1783 1784
Code* WasmCompiledFrame::unchecked_code() const {
  return isolate()->FindCodeObject(pc());
1785 1786
}

1787
void WasmCompiledFrame::Iterate(RootVisitor* v) const {
1788 1789
  IterateCompiledFrame(v);
}
1790

1791
Address WasmCompiledFrame::GetCallerStackPointer() const {
1792
  return fp() + ExitFrameConstants::kCallerSPOffset;
1793 1794
}

1795 1796
wasm::WasmCode* WasmCompiledFrame::wasm_code() const {
  return isolate()->wasm_engine()->code_manager()->LookupCode(pc());
1797 1798
}

1799
WasmInstanceObject* WasmCompiledFrame::wasm_instance() const {
1800 1801 1802
  const int offset = WasmCompiledFrameConstants::kWasmInstanceOffset;
  Object* instance = Memory::Object_at(fp() + offset);
  return WasmInstanceObject::cast(instance);
1803 1804 1805
}

WasmSharedModuleData* WasmCompiledFrame::shared() const {
1806
  return wasm_instance()->module_object()->shared();
1807 1808 1809
}

WasmCompiledModule* WasmCompiledFrame::compiled_module() const {
1810
  return wasm_instance()->compiled_module();
1811 1812
}

1813
uint32_t WasmCompiledFrame::function_index() const {
1814
  return FrameSummary::GetSingle(this).AsWasmCompiled().function_index();
1815 1816
}

1817
Script* WasmCompiledFrame::script() const { return shared()->script(); }
1818

1819
int WasmCompiledFrame::position() const {
1820
  return FrameSummary::GetSingle(this).SourcePosition();
1821 1822
}

1823
void WasmCompiledFrame::Summarize(std::vector<FrameSummary>* functions) const {
1824
  DCHECK(functions->empty());
1825
  wasm::WasmCode* code = wasm_code();
1826
  int offset = static_cast<int>(pc() - code->instruction_start());
1827
  Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
1828 1829
  FrameSummary::WasmCompiledFrameSummary summary(
      isolate(), instance, code, offset, at_to_number_conversion());
1830
  functions->push_back(summary);
1831 1832 1833
}

bool WasmCompiledFrame::at_to_number_conversion() const {
1834 1835
  // Check whether our callee is a WASM_TO_JS frame, and this frame is at the
  // ToNumber conversion call.
1836
  wasm::WasmCode* code =
1837 1838
      callee_pc() != kNullAddress
          ? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc())
1839 1840
          : nullptr;
  if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
1841
  int offset = static_cast<int>(callee_pc() - code->instruction_start());
1842 1843
  int pos = FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
      code, offset);
1844 1845 1846 1847 1848
  DCHECK(pos == 0 || pos == 1);
  // The imported call has position 0, ToNumber has position 1.
  return !!pos;
}

1849
int WasmCompiledFrame::LookupExceptionHandlerInTable(int* stack_slots) {
1850
  DCHECK_NOT_NULL(stack_slots);
1851 1852
  wasm::WasmCode* code =
      isolate()->wasm_engine()->code_manager()->LookupCode(pc());
1853
  if (!code->IsAnonymous() && code->handler_table_offset() > 0) {
1854 1855
    HandlerTable table(code->instruction_start(), code->handler_table_offset());
    int pc_offset = static_cast<int>(pc() - code->instruction_start());
1856 1857
    *stack_slots = static_cast<int>(code->stack_slots());
    return table.LookupReturn(pc_offset);
1858 1859
  }
  return -1;
1860 1861
}

1862
void WasmInterpreterEntryFrame::Iterate(RootVisitor* v) const {
1863 1864 1865 1866 1867 1868
  IterateCompiledFrame(v);
}

void WasmInterpreterEntryFrame::Print(StringStream* accumulator, PrintMode mode,
                                      int index) const {
  PrintIndex(accumulator, mode, index);
1869
  accumulator->Add("WASM INTERPRETER ENTRY [");
1870 1871 1872 1873 1874 1875
  Script* script = this->script();
  accumulator->PrintName(script->name());
  accumulator->Add("]");
  if (mode != OVERVIEW) accumulator->Add("\n");
}

1876 1877
void WasmInterpreterEntryFrame::Summarize(
    std::vector<FrameSummary>* functions) const {
1878
  Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
1879 1880 1881 1882 1883 1884
  std::vector<std::pair<uint32_t, int>> interpreted_stack =
      instance->debug_info()->GetInterpretedStack(fp());

  for (auto& e : interpreted_stack) {
    FrameSummary::WasmInterpretedFrameSummary summary(isolate(), instance,
                                                      e.first, e.second);
1885
    functions->push_back(summary);
1886
  }
1887 1888
}

1889
Code* WasmInterpreterEntryFrame::unchecked_code() const { UNREACHABLE(); }
1890 1891

WasmInstanceObject* WasmInterpreterEntryFrame::wasm_instance() const {
1892 1893 1894
  const int offset = WasmCompiledFrameConstants::kWasmInstanceOffset;
  Object* instance = Memory::Object_at(fp() + offset);
  return WasmInstanceObject::cast(instance);
1895 1896 1897
}

WasmDebugInfo* WasmInterpreterEntryFrame::debug_info() const {
1898
  return wasm_instance()->debug_info();
1899 1900
}

1901
WasmSharedModuleData* WasmInterpreterEntryFrame::shared() const {
1902
  return wasm_instance()->module_object()->shared();
1903 1904
}

1905
WasmCompiledModule* WasmInterpreterEntryFrame::compiled_module() const {
1906
  return wasm_instance()->compiled_module();
1907 1908 1909 1910
}

Script* WasmInterpreterEntryFrame::script() const { return shared()->script(); }

1911
int WasmInterpreterEntryFrame::position() const {
1912
  return FrameSummary::GetBottom(this).AsWasmInterpreted().SourcePosition();
1913 1914
}

1915
Object* WasmInterpreterEntryFrame::context() const {
1916
  return wasm_instance()->native_context();
1917 1918
}

1919 1920 1921 1922
Address WasmInterpreterEntryFrame::GetCallerStackPointer() const {
  return fp() + ExitFrameConstants::kCallerSPOffset;
}

jkummerow's avatar
jkummerow committed
1923 1924 1925 1926 1927
namespace {


void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
                         Code* code) {
1928
  if (FLAG_max_stack_trace_source_length != 0 && code != nullptr) {
jkummerow's avatar
jkummerow committed
1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940
    std::ostringstream os;
    os << "--------- s o u r c e   c o d e ---------\n"
       << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
       << "\n-----------------------------------------\n";
    accumulator->Add(os.str().c_str());
  }
}


}  // namespace


1941 1942 1943
void JavaScriptFrame::Print(StringStream* accumulator,
                            PrintMode mode,
                            int index) const {
1944
  DisallowHeapAllocation no_gc;
1945
  Object* receiver = this->receiver();
1946
  JSFunction* function = this->function();
1947 1948 1949

  accumulator->PrintSecurityTokenIfChanged(function);
  PrintIndex(accumulator, mode, index);
1950
  PrintFrameKind(accumulator);
1951
  Code* code = nullptr;
1952 1953
  if (IsConstructor()) accumulator->Add("new ");
  accumulator->PrintFunction(function, receiver, &code);
1954
  accumulator->Add(" [%p]", function);
1955

1956
  // Get scope information for nicer output, if possible. If code is nullptr, or
1957 1958 1959
  // doesn't contain scope info, scope_info will return 0 for the number of
  // parameters, stack local variables, context local variables, stack slots,
  // or context slots.
1960 1961
  SharedFunctionInfo* shared = function->shared();
  ScopeInfo* scope_info = shared->scope_info();
1962 1963
  Object* script_obj = shared->script();
  if (script_obj->IsScript()) {
1964
    Script* script = Script::cast(script_obj);
1965 1966 1967
    accumulator->Add(" [");
    accumulator->PrintName(script->name());

1968
    if (is_interpreted()) {
1969 1970 1971 1972
      const InterpretedFrame* iframe =
          reinterpret_cast<const InterpretedFrame*>(this);
      BytecodeArray* bytecodes = iframe->GetBytecodeArray();
      int offset = iframe->GetBytecodeOffset();
1973
      int source_pos = AbstractCode::cast(bytecodes)->SourcePosition(offset);
1974 1975
      int line = script->GetLineNumber(source_pos) + 1;
      accumulator->Add(":%d] [bytecode=%p offset=%d]", line, bytecodes, offset);
1976
    } else {
1977
      int function_start_pos = shared->StartPosition();
1978
      int line = script->GetLineNumber(function_start_pos) + 1;
1979
      accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc()));
1980 1981 1982
    }
  }

1983 1984 1985 1986 1987 1988 1989 1990 1991
  accumulator->Add("(this=%o", receiver);

  // Print the parameters.
  int parameters_count = ComputeParametersCount();
  for (int i = 0; i < parameters_count; i++) {
    accumulator->Add(",");
    // If we have a name for the parameter we print it. Nameless
    // parameters are either because we have more actual parameters
    // than formal parameters or because we have no scope information.
1992 1993
    if (i < scope_info->ParameterCount()) {
      accumulator->PrintName(scope_info->ParameterName(i));
1994 1995 1996 1997 1998 1999 2000 2001 2002 2003
      accumulator->Add("=");
    }
    accumulator->Add("%o", GetParameter(i));
  }

  accumulator->Add(")");
  if (mode == OVERVIEW) {
    accumulator->Add("\n");
    return;
  }
2004
  if (is_optimized()) {
jkummerow's avatar
jkummerow committed
2005 2006 2007
    accumulator->Add(" {\n// optimized frame\n");
    PrintFunctionSource(accumulator, shared, code);
    accumulator->Add("}\n");
2008 2009
    return;
  }
2010 2011 2012
  accumulator->Add(" {\n");

  // Compute the number of locals and expression stack elements.
2013 2014
  int stack_locals_count = scope_info->StackLocalCount();
  int heap_locals_count = scope_info->ContextLocalCount();
2015 2016 2017 2018 2019 2020 2021 2022
  int expressions_count = ComputeExpressionsCount();

  // Print stack-allocated local variables.
  if (stack_locals_count > 0) {
    accumulator->Add("  // stack-allocated locals\n");
  }
  for (int i = 0; i < stack_locals_count; i++) {
    accumulator->Add("  var ");
2023
    accumulator->PrintName(scope_info->StackLocalName(i));
2024 2025 2026 2027 2028 2029 2030 2031 2032 2033
    accumulator->Add(" = ");
    if (i < expressions_count) {
      accumulator->Add("%o", GetExpression(i));
    } else {
      accumulator->Add("// no expression found - inconsistent frame?");
    }
    accumulator->Add("\n");
  }

  // Try to get hold of the context of this frame.
2034 2035
  Context* context = nullptr;
  if (this->context() != nullptr && this->context()->IsContext()) {
2036
    context = Context::cast(this->context());
2037 2038 2039 2040
    while (context->IsWithContext()) {
      context = context->previous();
      DCHECK_NOT_NULL(context);
    }
2041
  }
2042 2043

  // Print heap-allocated local variables.
2044
  if (heap_locals_count > 0) {
2045 2046
    accumulator->Add("  // heap-allocated locals\n");
  }
2047
  for (int i = 0; i < heap_locals_count; i++) {
2048
    accumulator->Add("  var ");
2049
    accumulator->PrintName(scope_info->ContextLocalName(i));
2050
    accumulator->Add(" = ");
2051
    if (context != nullptr) {
2052 2053 2054
      int index = Context::MIN_CONTEXT_SLOTS + i;
      if (index < context->length()) {
        accumulator->Add("%o", context->get(index));
2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065
      } else {
        accumulator->Add(
            "// warning: missing context slot - inconsistent frame?");
      }
    } else {
      accumulator->Add("// warning: no context found - inconsistent frame?");
    }
    accumulator->Add("\n");
  }

  // Print the expression stack.
2066
  int expressions_start = stack_locals_count;
2067 2068 2069 2070 2071 2072 2073
  if (expressions_start < expressions_count) {
    accumulator->Add("  // expression stack (top to bottom)\n");
  }
  for (int i = expressions_count - 1; i >= expressions_start; i--) {
    accumulator->Add("  [%02d] : %o\n", i, GetExpression(i));
  }

jkummerow's avatar
jkummerow committed
2074
  PrintFunctionSource(accumulator, shared, code);
2075 2076 2077 2078 2079 2080 2081 2082 2083 2084

  accumulator->Add("}\n\n");
}


void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
                                  PrintMode mode,
                                  int index) const {
  int actual = ComputeParametersCount();
  int expected = -1;
2085
  JSFunction* function = this->function();
2086
  expected = function->shared()->internal_formal_parameter_count();
2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108

  PrintIndex(accumulator, mode, index);
  accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
  if (mode == OVERVIEW) {
    accumulator->Add("\n");
    return;
  }
  accumulator->Add(" {\n");

  // Print actual arguments.
  if (actual > 0) accumulator->Add("  // actual arguments\n");
  for (int i = 0; i < actual; i++) {
    accumulator->Add("  [%02d] : %o", i, GetParameter(i));
    if (expected != -1 && i >= expected) {
      accumulator->Add("  // not passed to callee");
    }
    accumulator->Add("\n");
  }

  accumulator->Add("}\n\n");
}

2109
void EntryFrame::Iterate(RootVisitor* v) const {
2110
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
2111 2112
}

2113
void StandardFrame::IterateExpressions(RootVisitor* v) const {
2114
  const int offset = StandardFrameConstants::kLastObjectOffset;
2115 2116
  Object** base = &Memory::Object_at(sp());
  Object** limit = &Memory::Object_at(fp() + offset) + 1;
2117
  v->VisitRootPointers(Root::kTop, nullptr, base, limit);
2118 2119
}

2120
void JavaScriptFrame::Iterate(RootVisitor* v) const {
2121
  IterateExpressions(v);
2122
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
2123 2124
}

2125
void InternalFrame::Iterate(RootVisitor* v) const {
2126
  wasm::WasmCode* wasm_code =
2127
      isolate()->wasm_engine()->code_manager()->LookupCode(pc());
2128
  if (wasm_code != nullptr) {
2129
    DCHECK(wasm_code->kind() == wasm::WasmCode::kLazyStub);
2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140
  } else {
    Code* code = LookupCode();
    IteratePc(v, pc_address(), constant_pool_address(), code);
    // Internal frames typically do not receive any arguments, hence their stack
    // only contains tagged pointers.
    // We are misusing the has_tagged_params flag here to tell us whether
    // the full stack frame contains only tagged pointers or only raw values.
    // This is used for the WasmCompileLazy builtin, where we actually pass
    // untagged arguments and also store untagged values on the stack.
    if (code->has_tagged_params()) IterateExpressions(v);
  }
2141 2142 2143 2144
}

// -------------------------------------------------------------------------

2145 2146
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
    InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
2147
  isolate_->counters()->pc_to_code()->Increment();
2148
  DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize));
2149 2150
  uint32_t hash = ComputeIntegerHash(
      ObjectAddressForHashing(reinterpret_cast<void*>(inner_pointer)));
2151 2152 2153
  uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
  InnerPointerToCodeCacheEntry* entry = cache(index);
  if (entry->inner_pointer == inner_pointer) {
2154
    isolate_->counters()->pc_to_code_cached()->Increment();
2155 2156
    DCHECK(entry->code ==
           isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer));
2157 2158
  } else {
    // Because this code may be interrupted by a profiling signal that
2159 2160
    // also queries the cache, we cannot update inner_pointer before the code
    // has been set. Otherwise, we risk trying to use a cache entry before
2161
    // the code has been computed.
2162 2163
    entry->code =
        isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer);
2164
    entry->safepoint_entry.Reset();
2165
    entry->inner_pointer = inner_pointer;
2166 2167 2168 2169 2170
  }
  return entry;
}


2171 2172 2173
// -------------------------------------------------------------------------


2174 2175 2176 2177 2178 2179 2180 2181 2182 2183
#define DEFINE_WRAPPER(type, field)                              \
class field##_Wrapper : public ZoneObject {                      \
 public:  /* NOLINT */                                           \
  field##_Wrapper(const field& original) : frame_(original) {    \
  }                                                              \
  field frame_;                                                  \
};
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
#undef DEFINE_WRAPPER

2184
static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
2185 2186 2187
#define FRAME_TYPE_CASE(type, field) \
  case StackFrame::type: { \
    field##_Wrapper* wrapper = \
2188
        new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
2189 2190 2191 2192 2193 2194 2195 2196
    return &wrapper->frame_; \
  }

  switch (frame->type()) {
    STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
    default: UNREACHABLE();
  }
#undef FRAME_TYPE_CASE
2197
  return nullptr;
2198 2199
}

2200

2201
Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
2202
  ZoneVector<StackFrame*> frames(zone);
2203
  for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
2204
    StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
2205
    frames.push_back(frame);
2206
  }
2207
  return Vector<StackFrame*>(frames.data(), frames.size());
2208 2209 2210
}


2211 2212
}  // namespace internal
}  // namespace v8