frames.cc 63.3 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#include "src/frames.h"

7
#include <memory>
8 9
#include <sstream>

10
#include "src/base/bits.h"
11 12
#include "src/deoptimizer.h"
#include "src/frames-inl.h"
13
#include "src/full-codegen/full-codegen.h"
14
#include "src/register-configuration.h"
15 16 17
#include "src/safepoint-table.h"
#include "src/string-stream.h"
#include "src/vm-state-inl.h"
18 19
#include "src/wasm/wasm-debug.h"
#include "src/wasm/wasm-module.h"
20

21 22
namespace v8 {
namespace internal {
23

24 25
ReturnAddressLocationResolver
    StackFrame::return_address_location_resolver_ = NULL;
26 27


28 29 30 31 32 33 34
// Iterator that supports traversing the stack handlers of a
// particular frame. Needs to know the top of the handler chain.
class StackHandlerIterator BASE_EMBEDDED {
 public:
  StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
      : limit_(frame->fp()), handler_(handler) {
    // Make sure the handler has already been unwound to this frame.
35
    DCHECK(frame->sp() <= handler->address());
36 37 38 39
  }

  StackHandler* handler() const { return handler_; }

40 41 42
  bool done() {
    return handler_ == NULL || handler_->address() > limit_;
  }
43
  void Advance() {
44
    DCHECK(!done());
45 46 47 48 49 50 51 52 53 54 55 56 57
    handler_ = handler_->next();
  }

 private:
  const Address limit_;
  StackHandler* handler_;
};


// -------------------------------------------------------------------------


#define INITIALIZE_SINGLETON(type, field) field##_(this),
58 59
StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
                                               bool can_access_heap_objects)
60 61 62
    : isolate_(isolate),
      STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
      frame_(NULL), handler_(NULL),
63
      can_access_heap_objects_(can_access_heap_objects) {
64
}
65 66 67
#undef INITIALIZE_SINGLETON

StackFrameIterator::StackFrameIterator(Isolate* isolate)
68
    : StackFrameIterator(isolate, isolate->thread_local_top()) {}
69 70 71 72 73

StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
    : StackFrameIteratorBase(isolate, true) {
  Reset(t);
}
74

75
void StackFrameIterator::Advance() {
76
  DCHECK(!done());
77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93
  // Compute the state of the calling frame before restoring
  // callee-saved registers and unwinding handlers. This allows the
  // frame code that computes the caller state to access the top
  // handler and the value of any callee-saved register if needed.
  StackFrame::State state;
  StackFrame::Type type = frame_->GetCallerState(&state);

  // Unwind handlers corresponding to the current frame.
  StackHandlerIterator it(frame_, handler_);
  while (!it.done()) it.Advance();
  handler_ = it.handler();

  // Advance to the calling frame.
  frame_ = SingletonFor(type, &state);

  // When we're done iterating over the stack frames, the handler
  // chain must have been completely unwound.
94
  DCHECK(!done() || handler_ == NULL);
95 96 97
}


98
void StackFrameIterator::Reset(ThreadLocalTop* top) {
99
  StackFrame::State state;
100 101 102
  StackFrame::Type type = ExitFrame::GetStateForFramePointer(
      Isolate::c_entry_fp(top), &state);
  handler_ = StackHandler::FromAddress(Isolate::handler(top));
103 104 105 106
  frame_ = SingletonFor(type, &state);
}


107
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
108
                                             StackFrame::State* state) {
109
  StackFrame* result = SingletonFor(type);
110 111
  DCHECK((!result) == (type == StackFrame::NONE));
  if (result) result->state_ = *state;
112 113 114 115
  return result;
}


116
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
117
#define FRAME_TYPE_CASE(type, field) \
118 119
  case StackFrame::type:             \
    return &field##_;
120 121 122 123 124 125

  switch (type) {
    case StackFrame::NONE: return NULL;
    STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
    default: break;
  }
126
  return NULL;
127 128 129 130 131 132

#undef FRAME_TYPE_CASE
}

// -------------------------------------------------------------------------

133 134
JavaScriptFrameIterator::JavaScriptFrameIterator(Isolate* isolate,
                                                 StackFrame::Id id)
135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
    : iterator_(isolate) {
  while (!done()) {
    Advance();
    if (frame()->id() == id) return;
  }
}


void JavaScriptFrameIterator::Advance() {
  do {
    iterator_.Advance();
  } while (!iterator_.done() && !iterator_.frame()->is_java_script());
}


void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
  if (!frame()->has_adapted_arguments()) return;
  iterator_.Advance();
153
  DCHECK(iterator_.frame()->is_arguments_adaptor());
154 155 156 157 158
}


// -------------------------------------------------------------------------

159
StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
160 161
    : iterator_(isolate) {
  if (!done() && !IsValidFrame(iterator_.frame())) Advance();
162 163
}

164 165 166 167 168
StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate,
                                                 StackFrame::Id id)
    : StackTraceFrameIterator(isolate) {
  while (!done() && frame()->id() != id) Advance();
}
169

170
void StackTraceFrameIterator::Advance() {
171 172 173
  do {
    iterator_.Advance();
  } while (!done() && !IsValidFrame(iterator_.frame()));
174 175
}

176 177 178 179 180
bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
  if (frame->is_java_script()) {
    JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
    if (!jsFrame->function()->IsJSFunction()) return false;
    Object* script = jsFrame->function()->shared()->script();
181 182
    // Don't show functions from native scripts to user.
    return (script->IsScript() &&
183
            Script::TYPE_NATIVE != Script::cast(script)->type());
184 185 186
  }
  // apart from javascript, only wasm is valid
  return frame->is_wasm();
187 188
}

189 190 191 192 193
void StackTraceFrameIterator::AdvanceToArgumentsFrame() {
  if (!is_javascript() || !javascript_frame()->has_adapted_arguments()) return;
  iterator_.Advance();
  DCHECK(iterator_.frame()->is_arguments_adaptor());
}
194

195 196 197 198
// -------------------------------------------------------------------------


SafeStackFrameIterator::SafeStackFrameIterator(
199
    Isolate* isolate,
200 201 202 203
    Address fp, Address sp, Address js_entry_sp)
    : StackFrameIteratorBase(isolate, false),
      low_bound_(sp),
      high_bound_(js_entry_sp),
204 205
      top_frame_type_(StackFrame::NONE),
      external_callback_scope_(isolate->external_callback_scope()) {
206 207 208 209 210
  StackFrame::State state;
  StackFrame::Type type;
  ThreadLocalTop* top = isolate->thread_local_top();
  if (IsValidTop(top)) {
    type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
211
    top_frame_type_ = type;
212
  } else if (IsValidStackAddress(fp)) {
213
    DCHECK(fp != NULL);
214 215
    state.fp = fp;
    state.sp = sp;
216
    state.pc_address = StackFrame::ResolveReturnAddressLocation(
217
        reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
218 219 220 221
    // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
    // we check only that kMarkerOffset is within the stack bounds and do
    // compile time check that kContextOffset slot is pushed on the stack before
    // kMarkerOffset.
222
    STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
223
                  StandardFrameConstants::kContextOffset);
224
    Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
225 226 227 228 229 230 231 232 233 234
    if (IsValidStackAddress(frame_marker)) {
      type = StackFrame::ComputeType(this, &state);
      top_frame_type_ = type;
    } else {
      // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
      // The frame anyways will be skipped.
      type = StackFrame::JAVA_SCRIPT;
      // Top frame is incomplete so we cannot reliably determine its type.
      top_frame_type_ = StackFrame::NONE;
    }
235 236 237 238
  } else {
    return;
  }
  frame_ = SingletonFor(type, &state);
239
  if (frame_) Advance();
240 241 242
}


243
bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
244 245
  Address c_entry_fp = Isolate::c_entry_fp(top);
  if (!IsValidExitFrame(c_entry_fp)) return false;
246
  // There should be at least one JS_ENTRY stack handler.
247 248 249 250
  Address handler = Isolate::handler(top);
  if (handler == NULL) return false;
  // Check that there are no js frames on top of the native frames.
  return c_entry_fp < handler;
251 252 253
}


254
void SafeStackFrameIterator::AdvanceOneFrame() {
255
  DCHECK(!done());
256
  StackFrame* last_frame = frame_;
257
  Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
258 259 260 261 262
  // Before advancing to the next stack frame, perform pointer validity tests.
  if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
    frame_ = NULL;
    return;
  }
263

264 265 266 267
  // Advance to the previous frame.
  StackFrame::State state;
  StackFrame::Type type = frame_->GetCallerState(&state);
  frame_ = SingletonFor(type, &state);
268
  if (!frame_) return;
269 270 271 272 273

  // Check that we have actually moved to the previous frame in the stack.
  if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
    frame_ = NULL;
  }
274 275 276 277
}


bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
278
  return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
279 280 281 282 283
}


bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
  StackFrame::State state;
284 285 286 287 288 289
  if (frame->is_entry() || frame->is_entry_construct()) {
    // See EntryFrame::GetCallerState. It computes the caller FP address
    // and calls ExitFrame::GetStateForFramePointer on it. We need to be
    // sure that caller FP address is valid.
    Address caller_fp = Memory::Address_at(
        frame->fp() + EntryFrameConstants::kCallerFPOffset);
290
    if (!IsValidExitFrame(caller_fp)) return false;
291 292 293 294 295 296 297 298 299 300
  } else if (frame->is_arguments_adaptor()) {
    // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
    // the number of arguments is stored on stack as Smi. We need to check
    // that it really an Smi.
    Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
        GetExpression(0);
    if (!number_of_args->IsSmi()) {
      return false;
    }
  }
301 302
  frame->ComputeCallerState(&state);
  return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
303 304 305 306 307 308 309 310 311 312
      SingletonFor(frame->GetCallerState(&state)) != NULL;
}


bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
  if (!IsValidStackAddress(fp)) return false;
  Address sp = ExitFrame::ComputeStackPointer(fp);
  if (!IsValidStackAddress(sp)) return false;
  StackFrame::State state;
  ExitFrame::FillState(fp, sp, &state);
alph's avatar
alph committed
313 314
  MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
  return *state.pc_address != nullptr;
315 316 317
}


318
void SafeStackFrameIterator::Advance() {
319
  while (true) {
320
    AdvanceOneFrame();
321 322 323 324 325 326 327 328 329 330 331 332 333
    if (done()) break;
    ExternalCallbackScope* last_callback_scope = NULL;
    while (external_callback_scope_ != NULL &&
           external_callback_scope_->scope_address() < frame_->fp()) {
      // As long as the setup of a frame is not atomic, we may happen to be
      // in an interval where an ExternalCallbackScope is already created,
      // but the frame is not yet entered. So we are actually observing
      // the previous frame.
      // Skip all the ExternalCallbackScope's that are below the current fp.
      last_callback_scope = external_callback_scope_;
      external_callback_scope_ = external_callback_scope_->previous();
    }
    if (frame_->is_java_script()) break;
334
    if (frame_->is_exit() || frame_->is_builtin_exit()) {
335 336 337 338 339
      // Some of the EXIT frames may have ExternalCallbackScope allocated on
      // top of them. In that case the scope corresponds to the first EXIT
      // frame beneath it. There may be other EXIT frames on top of the
      // ExternalCallbackScope, just skip them as we cannot collect any useful
      // information about them.
340
      if (last_callback_scope) {
341
        frame_->state_.pc_address =
342
            last_callback_scope->callback_entrypoint_address();
343
      }
344
      break;
345
    }
346
  }
347 348 349
}


350 351 352
// -------------------------------------------------------------------------


353
Code* StackFrame::GetSafepointData(Isolate* isolate,
354
                                   Address inner_pointer,
355
                                   SafepointEntry* safepoint_entry,
356
                                   unsigned* stack_slots) {
357 358
  InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
      isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
359
  if (!entry->safepoint_entry.is_valid()) {
360
    entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
361
    DCHECK(entry->safepoint_entry.is_valid());
362
  } else {
363
    DCHECK(entry->safepoint_entry.Equals(
364
        entry->code->GetSafepointEntry(inner_pointer)));
365 366 367 368
  }

  // Fill in the results and return the code.
  Code* code = entry->code;
369
  *safepoint_entry = entry->safepoint_entry;
370 371 372 373 374
  *stack_slots = code->stack_slots();
  return code;
}


375 376 377 378 379
#ifdef DEBUG
static bool GcSafeCodeContains(HeapObject* object, Address addr);
#endif


380 381
void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address,
                           Address* constant_pool_address, Code* holder) {
382
  Address pc = *pc_address;
383
  DCHECK(GcSafeCodeContains(holder, pc));
384
  unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
385 386 387 388 389 390
  Object* code = holder;
  v->VisitPointer(&code);
  if (code != holder) {
    holder = reinterpret_cast<Code*>(code);
    pc = holder->instruction_start() + pc_offset;
    *pc_address = pc;
391 392 393
    if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
      *constant_pool_address = holder->constant_pool();
    }
394 395 396 397
  }
}


398 399
void StackFrame::SetReturnAddressLocationResolver(
    ReturnAddressLocationResolver resolver) {
400
  DCHECK(return_address_location_resolver_ == NULL);
401
  return_address_location_resolver_ = resolver;
402 403
}

404 405 406 407 408
static bool IsInterpreterFramePc(Isolate* isolate, Address pc) {
  Code* interpreter_entry_trampoline =
      isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
  Code* interpreter_bytecode_dispatch =
      isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
409 410
  Code* interpreter_baseline_on_return =
      isolate->builtins()->builtin(Builtins::kInterpreterMarkBaselineOnReturn);
411 412 413 414

  return (pc >= interpreter_entry_trampoline->instruction_start() &&
          pc < interpreter_entry_trampoline->instruction_end()) ||
         (pc >= interpreter_bytecode_dispatch->instruction_start() &&
415 416 417
          pc < interpreter_bytecode_dispatch->instruction_end()) ||
         (pc >= interpreter_baseline_on_return->instruction_start() &&
          pc < interpreter_baseline_on_return->instruction_end());
418
}
419

420
StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
421
                                         State* state) {
422
  DCHECK(state->fp != NULL);
423

danno's avatar
danno committed
424 425 426
  MSAN_MEMORY_IS_INITIALIZED(
      state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
      kPointerSize);
427 428
  Object* marker = Memory::Object_at(
      state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
429 430 431 432 433 434
  if (!iterator->can_access_heap_objects_) {
    // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
    // means that we are being called from the profiler, which can interrupt
    // the VM with a signal at any arbitrary instruction, with essentially
    // anything on the stack. So basically none of these checks are 100%
    // reliable.
435
    MSAN_MEMORY_IS_INITIALIZED(
danno's avatar
danno committed
436
        state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
437 438 439 440 441 442 443 444 445 446 447
    Object* maybe_function =
        Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
    if (!marker->IsSmi()) {
      if (maybe_function->IsSmi()) {
        return NONE;
      } else if (FLAG_ignition && IsInterpreterFramePc(iterator->isolate(),
                                                       *(state->pc_address))) {
        return INTERPRETED;
      } else {
        return JAVA_SCRIPT;
      }
448
    }
449 450 451 452 453 454
  } else {
    // Look up the code object to figure out the type of the stack frame.
    Code* code_obj =
        GetContainingCode(iterator->isolate(), *(state->pc_address));
    if (code_obj != nullptr) {
      switch (code_obj->kind()) {
455 456
        case Code::BUILTIN:
          if (marker->IsSmi()) break;
457 458 459
          if (code_obj->is_interpreter_trampoline_builtin()) {
            return INTERPRETED;
          }
460 461 462 463 464 465 466 467
          if (code_obj->is_turbofanned()) {
            // TODO(bmeurer): We treat frames for BUILTIN Code objects as
            // OptimizedFrame for now (all the builtins with JavaScript
            // linkage are actually generated with TurboFan currently, so
            // this is sound).
            return OPTIMIZED;
          }
          return BUILTIN;
468 469 470 471 472 473 474 475 476 477 478 479 480 481
        case Code::FUNCTION:
          return JAVA_SCRIPT;
        case Code::OPTIMIZED_FUNCTION:
          return OPTIMIZED;
        case Code::WASM_FUNCTION:
          return WASM;
        case Code::WASM_TO_JS_FUNCTION:
          return WASM_TO_JS;
        case Code::JS_TO_WASM_FUNCTION:
          return JS_TO_WASM;
        default:
          // All other types should have an explicit marker
          break;
      }
482
    } else {
483
      return NONE;
484
    }
485
  }
486

487 488 489 490 491 492 493
  DCHECK(marker->IsSmi());
  StackFrame::Type candidate =
      static_cast<StackFrame::Type>(Smi::cast(marker)->value());
  switch (candidate) {
    case ENTRY:
    case ENTRY_CONSTRUCT:
    case EXIT:
494
    case BUILTIN_EXIT:
495 496 497 498 499 500 501
    case STUB:
    case STUB_FAILURE_TRAMPOLINE:
    case INTERNAL:
    case CONSTRUCT:
    case ARGUMENTS_ADAPTOR:
    case WASM_TO_JS:
    case WASM:
502 503
      return candidate;
    case JS_TO_WASM:
504 505 506 507 508 509 510 511 512
    case JAVA_SCRIPT:
    case OPTIMIZED:
    case INTERPRETED:
    default:
      // Unoptimized and optimized JavaScript frames, including
      // interpreted frames, should never have a StackFrame::Type
      // marker. If we find one, we're likely being called from the
      // profiler in a bogus stack frame.
      return NONE;
513
  }
514 515 516
}


517 518 519 520 521 522
#ifdef DEBUG
bool StackFrame::can_access_heap_objects() const {
  return iterator_->can_access_heap_objects_;
}
#endif

523

524 525
StackFrame::Type StackFrame::GetCallerState(State* state) const {
  ComputeCallerState(state);
526
  return ComputeType(iterator_, state);
527 528 529
}


530 531 532 533 534
Address StackFrame::UnpaddedFP() const {
  return fp();
}


535
Code* EntryFrame::unchecked_code() const {
536
  return isolate()->heap()->js_entry_code();
537 538 539
}


540 541 542 543 544
void EntryFrame::ComputeCallerState(State* state) const {
  GetCallerState(state);
}


545 546 547 548 549 550
void EntryFrame::SetCallerFp(Address caller_fp) {
  const int offset = EntryFrameConstants::kCallerFPOffset;
  Memory::Address_at(this->fp() + offset) = caller_fp;
}


551 552 553 554 555 556 557
StackFrame::Type EntryFrame::GetCallerState(State* state) const {
  const int offset = EntryFrameConstants::kCallerFPOffset;
  Address fp = Memory::Address_at(this->fp() + offset);
  return ExitFrame::GetStateForFramePointer(fp, state);
}


558
Code* EntryConstructFrame::unchecked_code() const {
559
  return isolate()->heap()->js_construct_entry_code();
560 561 562
}


563 564 565 566 567
Object*& ExitFrame::code_slot() const {
  const int offset = ExitFrameConstants::kCodeOffset;
  return Memory::Object_at(fp() + offset);
}

568 569
Code* ExitFrame::unchecked_code() const {
  return reinterpret_cast<Code*>(code_slot());
570 571 572
}


573
void ExitFrame::ComputeCallerState(State* state) const {
574
  // Set up the caller state.
575
  state->sp = caller_sp();
576
  state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
577 578
  state->pc_address = ResolveReturnAddressLocation(
      reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
579
  if (FLAG_enable_embedded_constant_pool) {
580 581 582
    state->constant_pool_address = reinterpret_cast<Address*>(
        fp() + ExitFrameConstants::kConstantPoolOffset);
  }
583 584 585
}


586 587 588 589 590
void ExitFrame::SetCallerFp(Address caller_fp) {
  Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
}


591 592 593
void ExitFrame::Iterate(ObjectVisitor* v) const {
  // The arguments are traversed as part of the expression stack of
  // the calling frame.
594
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
595 596 597 598
  v->VisitPointer(&code_slot());
}


599
Address ExitFrame::GetCallerStackPointer() const {
600
  return fp() + ExitFrameConstants::kCallerSPOffset;
601 602 603
}


604 605 606 607
StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
  if (fp == 0) return NONE;
  Address sp = ComputeStackPointer(fp);
  FillState(fp, sp, state);
608
  DCHECK(*state->pc_address != NULL);
609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628

  return ComputeFrameType(fp);
}

StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
  // Distinguish between between regular and builtin exit frames.
  // Default to EXIT in all hairy cases (e.g., when called from profiler).
  const int offset = ExitFrameConstants::kFrameTypeOffset;
  Object* marker = Memory::Object_at(fp + offset);

  if (!marker->IsSmi()) {
    return EXIT;
  }

  StackFrame::Type frame_type =
      static_cast<StackFrame::Type>(Smi::cast(marker)->value());
  if (frame_type == EXIT || frame_type == BUILTIN_EXIT) {
    return frame_type;
  }

629 630 631
  return EXIT;
}

632
Address ExitFrame::ComputeStackPointer(Address fp) {
633
  MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, kPointerSize);
634 635 636
  return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
}

637 638 639
void ExitFrame::FillState(Address fp, Address sp, State* state) {
  state->sp = sp;
  state->fp = fp;
640
  state->pc_address = ResolveReturnAddressLocation(
641
      reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
642 643 644 645 646
  // The constant pool recorded in the exit frame is not associated
  // with the pc in this state (the return address into a C entry
  // stub).  ComputeCallerState will retrieve the constant pool
  // together with the associated caller pc.
  state->constant_pool_address = NULL;
647 648
}

649
JSFunction* BuiltinExitFrame::function() const {
650 651 652 653 654 655 656
  return JSFunction::cast(target_slot_object());
}

Object* BuiltinExitFrame::receiver() const { return receiver_slot_object(); }

bool BuiltinExitFrame::IsConstructor() const {
  return !new_target_slot_object()->IsUndefined(isolate());
657 658
}

659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698
Object* BuiltinExitFrame::GetParameter(int i) const {
  DCHECK(i >= 0 && i < ComputeParametersCount());
  int offset = BuiltinExitFrameConstants::kArgcOffset + (i + 1) * kPointerSize;
  return Memory::Object_at(fp() + offset);
}

int BuiltinExitFrame::ComputeParametersCount() const {
  Object* argc_slot = argc_slot_object();
  DCHECK(argc_slot->IsSmi());
  // Argc also counts the receiver, target, new target, and argc itself as args,
  // therefore the real argument count is argc - 4.
  int argc = Smi::cast(argc_slot)->value() - 4;
  DCHECK(argc >= 0);
  return argc;
}

void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
                             int index) const {
  DisallowHeapAllocation no_gc;
  Object* receiver = this->receiver();
  JSFunction* function = this->function();

  accumulator->PrintSecurityTokenIfChanged(function);
  PrintIndex(accumulator, mode, index);
  accumulator->Add("builtin exit frame: ");
  Code* code = NULL;
  if (IsConstructor()) accumulator->Add("new ");
  accumulator->PrintFunction(function, receiver, &code);

  accumulator->Add("(this=%o", receiver);

  // Print the parameters.
  int parameters_count = ComputeParametersCount();
  for (int i = 0; i < parameters_count; i++) {
    accumulator->Add(",%o", GetParameter(i));
  }

  accumulator->Add(")\n\n");
}

699
Address StandardFrame::GetExpressionAddress(int n) const {
700 701
  const int offset = StandardFrameConstants::kExpressionsOffset;
  return fp() + offset - n * kPointerSize;
702 703
}

704 705 706
Address InterpretedFrame::GetExpressionAddress(int n) const {
  const int offset = InterpreterFrameConstants::kExpressionsOffset;
  return fp() + offset - n * kPointerSize;
707 708
}

709 710 711 712 713 714 715 716 717 718 719 720 721 722
Script* StandardFrame::script() const {
  // This should only be called on frames which override this method.
  DCHECK(false);
  return nullptr;
}

Object* StandardFrame::receiver() const {
  return isolate()->heap()->undefined_value();
}

Object* StandardFrame::context() const {
  return isolate()->heap()->undefined_value();
}

723
int StandardFrame::ComputeExpressionsCount() const {
724 725
  Address base = GetExpressionAddress(0);
  Address limit = sp() - kPointerSize;
726
  DCHECK(base >= limit);  // stack grows downwards
727
  // Include register-allocated locals in number of expressions.
728
  return static_cast<int>((base - limit) / kPointerSize);
729 730
}

731 732 733 734 735 736 737
Object* StandardFrame::GetParameter(int index) const {
  // StandardFrame does not define any parameters.
  UNREACHABLE();
  return nullptr;
}

int StandardFrame::ComputeParametersCount() const { return 0; }
738

739
void StandardFrame::ComputeCallerState(State* state) const {
740 741
  state->sp = caller_sp();
  state->fp = caller_fp();
742 743
  state->pc_address = ResolveReturnAddressLocation(
      reinterpret_cast<Address*>(ComputePCAddress(fp())));
744 745
  state->constant_pool_address =
      reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
746 747 748
}


749 750 751 752 753
void StandardFrame::SetCallerFp(Address caller_fp) {
  Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
      caller_fp;
}

754
bool StandardFrame::IsConstructor() const { return false; }
755

756
void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
757 758
  // Make sure that we're not doing "safe" stack frame iteration. We cannot
  // possibly find pointers in optimized frames in that state.
759
  DCHECK(can_access_heap_objects());
760 761 762

  // Compute the safepoint information.
  unsigned stack_slots = 0;
763
  SafepointEntry safepoint_entry;
764
  Code* code = StackFrame::GetSafepointData(
765
      isolate(), pc(), &safepoint_entry, &stack_slots);
766 767 768 769
  unsigned slot_space = stack_slots * kPointerSize;

  // Determine the fixed header and spill slot area size.
  int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
770 771
  Object* marker =
      Memory::Object_at(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
772 773 774 775 776 777 778
  if (marker->IsSmi()) {
    StackFrame::Type candidate =
        static_cast<StackFrame::Type>(Smi::cast(marker)->value());
    switch (candidate) {
      case ENTRY:
      case ENTRY_CONSTRUCT:
      case EXIT:
779
      case BUILTIN_EXIT:
780 781 782 783 784 785 786 787 788 789 790 791 792
      case STUB_FAILURE_TRAMPOLINE:
      case ARGUMENTS_ADAPTOR:
      case STUB:
      case INTERNAL:
      case CONSTRUCT:
      case JS_TO_WASM:
      case WASM_TO_JS:
      case WASM:
        frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
        break;
      case JAVA_SCRIPT:
      case OPTIMIZED:
      case INTERPRETED:
793
      case BUILTIN:
794 795 796 797 798 799 800 801 802 803 804 805 806
        // These frame types have a context, but they are actually stored
        // in the place on the stack that one finds the frame type.
        UNREACHABLE();
        break;
      case NONE:
      case NUMBER_OF_TYPES:
      case MANUAL:
        UNREACHABLE();
        break;
    }
  }
  slot_space -=
      (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
807

808
  Object** frame_header_base = &Memory::Object_at(fp() - frame_header_size);
809 810
  Object** frame_header_limit =
      &Memory::Object_at(fp() - StandardFrameConstants::kCPSlotSize);
811
  Object** parameters_base = &Memory::Object_at(sp());
812
  Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
813

814 815 816 817 818 819 820
  // Visit the parameters that may be on top of the saved registers.
  if (safepoint_entry.argument_count() > 0) {
    v->VisitPointers(parameters_base,
                     parameters_base + safepoint_entry.argument_count());
    parameters_base += safepoint_entry.argument_count();
  }

821
  // Skip saved double registers.
822
  if (safepoint_entry.has_doubles()) {
823
    // Number of doubles not known at snapshot time.
824
    DCHECK(!isolate()->serializer_enabled());
825 826 827
    parameters_base += RegisterConfiguration::Crankshaft()
                           ->num_allocatable_double_registers() *
                       kDoubleSize / kPointerSize;
828 829
  }

830
  // Visit the registers that contain pointers if any.
831
  if (safepoint_entry.HasRegisters()) {
832
    for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
833
      if (safepoint_entry.HasRegisterAt(i)) {
834 835 836 837 838 839 840 841 842
        int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
        v->VisitPointer(parameters_base + reg_stack_index);
      }
    }
    // Skip the words containing the register values.
    parameters_base += kNumSafepointRegisters;
  }

  // We're done dealing with the register bits.
843 844
  uint8_t* safepoint_bits = safepoint_entry.bits();
  safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
845 846

  // Visit the rest of the parameters.
847 848 849 850
  if (!is_js_to_wasm() && !is_wasm()) {
    // Non-WASM frames have tagged values as parameters.
    v->VisitPointers(parameters_base, parameters_limit);
  }
851 852 853 854 855

  // Visit pointer spill slots and locals.
  for (unsigned index = 0; index < stack_slots; index++) {
    int byte_index = index >> kBitsPerByteLog2;
    int bit_index = index & (kBitsPerByte - 1);
856
    if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
857 858 859 860
      v->VisitPointer(parameters_limit + index);
    }
  }

861
  // Visit the return address in the callee and incoming arguments.
862
  IteratePc(v, pc_address(), constant_pool_address(), code);
863

864 865 866
  if (!is_wasm() && !is_wasm_to_js()) {
    // Visit the context in stub frame and JavaScript frame.
    // Visit the function in JavaScript frame.
867
    v->VisitPointers(frame_header_base, frame_header_limit);
868
  }
869 870 871 872 873 874 875 876 877
}


void StubFrame::Iterate(ObjectVisitor* v) const {
  IterateCompiledFrame(v);
}


Code* StubFrame::unchecked_code() const {
878
  return static_cast<Code*>(isolate()->FindCodeObject(pc()));
879 880 881 882
}


Address StubFrame::GetCallerStackPointer() const {
883
  return fp() + ExitFrameConstants::kCallerSPOffset;
884 885 886 887 888 889 890 891 892 893
}


int StubFrame::GetNumberOfIncomingArguments() const {
  return 0;
}


void OptimizedFrame::Iterate(ObjectVisitor* v) const {
  IterateCompiledFrame(v);
894 895 896
}


897 898 899 900 901
void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
  Memory::Object_at(GetParameterSlot(index)) = value;
}


902
bool JavaScriptFrame::IsConstructor() const {
903 904 905 906 907 908
  Address fp = caller_fp();
  if (has_adapted_arguments()) {
    // Skip the arguments adaptor frame and look at the real caller.
    fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
  }
  return IsConstructFrame(fp);
909 910 911
}


912
bool JavaScriptFrame::HasInlinedFrames() const {
913 914 915 916 917 918
  List<JSFunction*> functions(1);
  GetFunctions(&functions);
  return functions.length() > 1;
}


919 920 921
int JavaScriptFrame::GetArgumentsLength() const {
  // If there is an arguments adaptor frame get the arguments length from it.
  if (has_adapted_arguments()) {
922
    return ArgumentsAdaptorFrame::GetLength(caller_fp());
923 924 925 926 927 928
  } else {
    return GetNumberOfIncomingArguments();
  }
}


929
Code* JavaScriptFrame::unchecked_code() const {
930
  return function()->code();
931 932 933
}


934
int JavaScriptFrame::GetNumberOfIncomingArguments() const {
935
  DCHECK(can_access_heap_objects() &&
936 937
         isolate()->heap()->gc_state() == Heap::NOT_IN_GC);

938
  return function()->shared()->internal_formal_parameter_count();
939 940 941
}


942
Address JavaScriptFrame::GetCallerStackPointer() const {
943
  return fp() + StandardFrameConstants::kCallerSPOffset;
944 945 946
}


947
void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) const {
948
  DCHECK(functions->length() == 0);
949
  functions->Add(function());
950 951
}

952 953
void JavaScriptFrame::Summarize(List<FrameSummary>* functions,
                                FrameSummary::Mode mode) const {
954
  DCHECK(functions->length() == 0);
955 956 957 958
  Code* code = LookupCode();
  int offset = static_cast<int>(pc() - code->instruction_start());
  AbstractCode* abstract_code = AbstractCode::cast(code);
  FrameSummary summary(receiver(), function(), abstract_code, offset,
959
                       IsConstructor(), mode);
960 961 962
  functions->Add(summary);
}

963 964 965 966 967 968
JSFunction* JavaScriptFrame::function() const {
  return JSFunction::cast(function_slot_object());
}

Object* JavaScriptFrame::receiver() const { return GetParameter(-1); }

969 970 971 972 973 974 975 976 977 978 979
Script* JavaScriptFrame::script() const {
  return Script::cast(function()->shared()->script());
}

Object* JavaScriptFrame::context() const {
  const int offset = StandardFrameConstants::kContextOffset;
  Object* maybe_result = Memory::Object_at(fp() + offset);
  DCHECK(!maybe_result->IsSmi());
  return maybe_result;
}

980
int JavaScriptFrame::LookupExceptionHandlerInTable(
981
    int* stack_depth, HandlerTable::CatchPrediction* prediction) {
982 983 984
  Code* code = LookupCode();
  DCHECK(!code->is_optimized_code());
  int pc_offset = static_cast<int>(pc() - code->entry());
985
  return code->LookupRangeInHandlerTable(pc_offset, stack_depth, prediction);
986 987
}

988 989 990 991 992 993 994 995 996
void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, Code* code,
                                             Address pc, FILE* file,
                                             bool print_line_number) {
  PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
  function->PrintName(file);
  int code_offset = static_cast<int>(pc - code->instruction_start());
  PrintF(file, "+%d", code_offset);
  if (print_line_number) {
    SharedFunctionInfo* shared = function->shared();
997
    int source_pos = AbstractCode::cast(code)->SourcePosition(code_offset);
998 999 1000 1001 1002 1003 1004
    Object* maybe_script = shared->script();
    if (maybe_script->IsScript()) {
      Script* script = Script::cast(maybe_script);
      int line = script->GetLineNumber(source_pos) + 1;
      Object* script_name_raw = script->name();
      if (script_name_raw->IsString()) {
        String* script_name = String::cast(script->name());
1005
        std::unique_ptr<char[]> c_script_name =
1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018
            script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
        PrintF(file, " at %s:%d", c_script_name.get(), line);
      } else {
        PrintF(file, " at <unknown>:%d", line);
      }
    } else {
      PrintF(file, " at <unknown>:<unknown>");
    }
  }
}


void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
1019 1020
                               bool print_line_number) {
  // constructor calls
1021
  DisallowHeapAllocation no_allocation;
1022
  JavaScriptFrameIterator it(isolate);
1023 1024 1025 1026
  while (!it.done()) {
    if (it.frame()->is_java_script()) {
      JavaScriptFrame* frame = it.frame();
      if (frame->IsConstructor()) PrintF(file, "new ");
1027 1028
      PrintFunctionAndOffset(frame->function(), frame->unchecked_code(),
                             frame->pc(), file, print_line_number);
1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048
      if (print_args) {
        // function arguments
        // (we are intentionally only printing the actually
        // supplied parameters, not all parameters required)
        PrintF(file, "(this=");
        frame->receiver()->ShortPrint(file);
        const int length = frame->ComputeParametersCount();
        for (int i = 0; i < length; i++) {
          PrintF(file, ", ");
          frame->GetParameter(i)->ShortPrint(file);
        }
        PrintF(file, ")");
      }
      break;
    }
    it.Advance();
  }
}


1049
void JavaScriptFrame::SaveOperandStack(FixedArray* store) const {
1050
  int operands_count = store->length();
1051
  DCHECK_LE(operands_count, ComputeOperandsCount());
1052
  for (int i = 0; i < operands_count; i++) {
1053 1054 1055 1056
    store->set(i, GetOperand(i));
  }
}

1057 1058 1059 1060 1061 1062 1063 1064
Object* JavaScriptFrame::GetParameter(int index) const {
  return Memory::Object_at(GetParameterSlot(index));
}

int JavaScriptFrame::ComputeParametersCount() const {
  return GetNumberOfIncomingArguments();
}

1065 1066 1067 1068 1069 1070 1071 1072 1073
namespace {

bool CannotDeoptFromAsmCode(Code* code, JSFunction* function) {
  return code->is_turbofanned() && function->shared()->asm_function() &&
         !FLAG_turbo_asm_deoptimization;
}

}  // namespace

1074 1075
FrameSummary::FrameSummary(Object* receiver, JSFunction* function,
                           AbstractCode* abstract_code, int code_offset,
1076
                           bool is_constructor, Mode mode)
1077 1078
    : receiver_(receiver, function->GetIsolate()),
      function_(function),
1079 1080
      abstract_code_(abstract_code),
      code_offset_(code_offset),
1081 1082 1083
      is_constructor_(is_constructor) {
  DCHECK(abstract_code->IsBytecodeArray() ||
         Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION ||
1084 1085
         CannotDeoptFromAsmCode(Code::cast(abstract_code), function) ||
         mode == kApproximateSummary);
1086
}
1087

1088 1089 1090 1091 1092 1093
FrameSummary FrameSummary::GetFirst(JavaScriptFrame* frame) {
  List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
  frame->Summarize(&frames);
  return frames.first();
}

1094 1095 1096 1097 1098 1099
void FrameSummary::Print() {
  PrintF("receiver: ");
  receiver_->ShortPrint();
  PrintF("\nfunction: ");
  function_->shared()->DebugName()->ShortPrint();
  PrintF("\ncode: ");
1100 1101 1102 1103
  abstract_code_->ShortPrint();
  if (abstract_code_->IsCode()) {
    Code* code = abstract_code_->GetCode();
    if (code->kind() == Code::FUNCTION) PrintF(" UNOPT ");
1104
    if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1105 1106 1107 1108 1109 1110
      if (function()->shared()->asm_function()) {
        DCHECK(CannotDeoptFromAsmCode(code, *function()));
        PrintF(" ASM ");
      } else {
        PrintF(" OPT (approximate)");
      }
1111
    }
1112 1113 1114 1115
  } else {
    PrintF(" BYTECODE ");
  }
  PrintF("\npc: %d\n", code_offset_);
1116 1117
}

1118 1119
void OptimizedFrame::Summarize(List<FrameSummary>* frames,
                               FrameSummary::Mode mode) const {
1120 1121
  DCHECK(frames->length() == 0);
  DCHECK(is_optimized());
1122

1123 1124
  // Delegate to JS frame in absence of turbofan deoptimization.
  // TODO(turbofan): Revisit once we support deoptimization across the board.
1125 1126
  Code* code = LookupCode();
  if (code->kind() == Code::BUILTIN ||
1127
      CannotDeoptFromAsmCode(code, function())) {
1128 1129 1130
    return JavaScriptFrame::Summarize(frames);
  }

1131 1132 1133
  DisallowHeapAllocation no_gc;
  int deopt_index = Safepoint::kNoDeoptimizationIndex;
  DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1134 1135 1136 1137 1138 1139 1140
  if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
    DCHECK(data == nullptr);
    if (mode == FrameSummary::kApproximateSummary) {
      return JavaScriptFrame::Summarize(frames, mode);
    }
    FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
  }
1141 1142 1143 1144
  FixedArray* const literal_array = data->LiteralArray();

  TranslationIterator it(data->TranslationByteArray(),
                         data->TranslationIndex(deopt_index)->value());
1145 1146 1147
  Translation::Opcode frame_opcode =
      static_cast<Translation::Opcode>(it.Next());
  DCHECK_EQ(Translation::BEGIN, frame_opcode);
1148 1149 1150
  it.Next();  // Drop frame count.
  int jsframe_count = it.Next();

1151 1152
  // We create the summary in reverse order because the frames
  // in the deoptimization translation are ordered bottom-to-top.
1153
  bool is_constructor = IsConstructor();
1154
  while (jsframe_count != 0) {
1155 1156 1157
    frame_opcode = static_cast<Translation::Opcode>(it.Next());
    if (frame_opcode == Translation::JS_FRAME ||
        frame_opcode == Translation::INTERPRETED_FRAME) {
1158
      jsframe_count--;
1159
      BailoutId const bailout_id = BailoutId(it.Next());
1160 1161 1162 1163 1164 1165
      SharedFunctionInfo* const shared_info =
          SharedFunctionInfo::cast(literal_array->get(it.Next()));
      it.Next();  // Skip height.

      // The translation commands are ordered and the function is always
      // at the first position, and the receiver is next.
1166
      Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1167 1168 1169 1170 1171 1172

      // Get the correct function in the optimized frame.
      JSFunction* function;
      if (opcode == Translation::LITERAL) {
        function = JSFunction::cast(literal_array->get(it.Next()));
      } else {
1173 1174
        CHECK_EQ(opcode, Translation::STACK_SLOT);
        function = JSFunction::cast(StackSlotAt(it.Next()));
1175
      }
1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195
      DCHECK_EQ(shared_info, function->shared());

      // If we are at a call, the receiver is always in a stack slot.
      // Otherwise we are not guaranteed to get the receiver value.
      opcode = static_cast<Translation::Opcode>(it.Next());

      // Get the correct receiver in the optimized frame.
      Object* receiver;
      if (opcode == Translation::LITERAL) {
        receiver = literal_array->get(it.Next());
      } else if (opcode == Translation::STACK_SLOT) {
        receiver = StackSlotAt(it.Next());
      } else {
        // The receiver is not in a stack slot nor in a literal.  We give up.
        it.Skip(Translation::NumberOfOperandsFor(opcode));
        // TODO(3029): Materializing a captured object (or duplicated
        // object) is hard, we return undefined for now. This breaks the
        // produced stack trace, as constructor frames aren't marked as
        // such anymore.
        receiver = isolate()->heap()->undefined_value();
1196 1197
      }

1198
      AbstractCode* abstract_code;
1199

1200
      unsigned code_offset;
1201
      if (frame_opcode == Translation::JS_FRAME) {
1202
        Code* code = shared_info->code();
1203 1204 1205
        DeoptimizationOutputData* const output_data =
            DeoptimizationOutputData::cast(code->deoptimization_data());
        unsigned const entry =
1206
            Deoptimizer::GetOutputInfo(output_data, bailout_id, shared_info);
1207 1208
        code_offset = FullCodeGenerator::PcField::decode(entry);
        abstract_code = AbstractCode::cast(code);
1209 1210
      } else {
        DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME);
1211 1212 1213
        // BailoutId points to the next bytecode in the bytecode aray. Subtract
        // 1 to get the end of current bytecode.
        code_offset = bailout_id.ToInt() - 1;
1214
        abstract_code = AbstractCode::cast(shared_info->bytecode_array());
1215
      }
1216 1217
      FrameSummary summary(receiver, function, abstract_code, code_offset,
                           is_constructor);
1218 1219
      frames->Add(summary);
      is_constructor = false;
1220
    } else if (frame_opcode == Translation::CONSTRUCT_STUB_FRAME) {
1221
      // The next encountered JS_FRAME will be marked as a constructor call.
1222
      it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1223 1224 1225 1226
      DCHECK(!is_constructor);
      is_constructor = true;
    } else {
      // Skip over operands to advance to the next opcode.
1227
      it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1228 1229
    }
  }
1230
  DCHECK(!is_constructor);
1231 1232 1233
}


1234 1235
int OptimizedFrame::LookupExceptionHandlerInTable(
    int* stack_slots, HandlerTable::CatchPrediction* prediction) {
1236 1237 1238 1239
  // We cannot perform exception prediction on optimized code. Instead, we need
  // to use FrameSummary to find the corresponding code offset in unoptimized
  // code to perform prediction there.
  DCHECK_NULL(prediction);
1240 1241 1242
  Code* code = LookupCode();
  HandlerTable* table = HandlerTable::cast(code->handler_table());
  int pc_offset = static_cast<int>(pc() - code->entry());
1243
  if (stack_slots) *stack_slots = code->stack_slots();
1244
  return table->LookupReturn(pc_offset);
1245 1246 1247
}


1248
DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1249
    int* deopt_index) const {
1250
  DCHECK(is_optimized());
1251

1252
  JSFunction* opt_function = function();
1253 1254 1255 1256 1257 1258
  Code* code = opt_function->code();

  // The code object may have been replaced by lazy deoptimization. Fall
  // back to a slow search in this case to find the original optimized
  // code object.
  if (!code->contains(pc())) {
1259 1260
    code = isolate()->inner_pointer_to_code_cache()->
        GcSafeFindCodeForInnerPointer(pc());
1261
  }
1262 1263
  DCHECK(code != NULL);
  DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1264

1265 1266
  SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
  *deopt_index = safepoint_entry.deoptimization_index();
1267 1268 1269 1270
  if (*deopt_index != Safepoint::kNoDeoptimizationIndex) {
    return DeoptimizationInputData::cast(code->deoptimization_data());
  }
  return nullptr;
1271 1272 1273
}


1274
void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) const {
1275 1276
  DCHECK(functions->length() == 0);
  DCHECK(is_optimized());
1277

1278 1279
  // Delegate to JS frame in absence of turbofan deoptimization.
  // TODO(turbofan): Revisit once we support deoptimization across the board.
1280 1281 1282
  Code* code = LookupCode();
  if (code->kind() == Code::BUILTIN ||
      CannotDeoptFromAsmCode(code, function())) {
1283 1284 1285
    return JavaScriptFrame::GetFunctions(functions);
  }

1286
  DisallowHeapAllocation no_gc;
1287 1288
  int deopt_index = Safepoint::kNoDeoptimizationIndex;
  DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1289 1290
  DCHECK_NOT_NULL(data);
  DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305
  FixedArray* const literal_array = data->LiteralArray();

  TranslationIterator it(data->TranslationByteArray(),
                         data->TranslationIndex(deopt_index)->value());
  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
  DCHECK_EQ(Translation::BEGIN, opcode);
  it.Next();  // Skip frame count.
  int jsframe_count = it.Next();

  // We insert the frames in reverse order because the frames
  // in the deoptimization translation are ordered bottom-to-top.
  while (jsframe_count != 0) {
    opcode = static_cast<Translation::Opcode>(it.Next());
    // Skip over operands to advance to the next opcode.
    it.Skip(Translation::NumberOfOperandsFor(opcode));
1306 1307
    if (opcode == Translation::JS_FRAME ||
        opcode == Translation::INTERPRETED_FRAME) {
1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318
      jsframe_count--;

      // The translation commands are ordered and the function is always at the
      // first position.
      opcode = static_cast<Translation::Opcode>(it.Next());

      // Get the correct function in the optimized frame.
      Object* function;
      if (opcode == Translation::LITERAL) {
        function = literal_array->get(it.Next());
      } else {
1319 1320
        CHECK_EQ(Translation::STACK_SLOT, opcode);
        function = StackSlotAt(it.Next());
1321 1322
      }
      functions->Add(JSFunction::cast(function));
1323 1324 1325 1326 1327
    }
  }
}


1328 1329 1330 1331 1332 1333
int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
  return StandardFrameConstants::kCallerSPOffset -
         ((slot_index + 1) * kPointerSize);
}


1334
Object* OptimizedFrame::StackSlotAt(int index) const {
1335
  return Memory::Object_at(fp() + StackSlotOffsetRelativeToFp(index));
1336 1337
}

1338
int InterpretedFrame::LookupExceptionHandlerInTable(
1339
    int* context_register, HandlerTable::CatchPrediction* prediction) {
1340
  BytecodeArray* bytecode = function()->shared()->bytecode_array();
1341 1342
  return bytecode->LookupRangeInHandlerTable(GetBytecodeOffset(),
                                             context_register, prediction);
1343 1344 1345 1346
}

int InterpretedFrame::GetBytecodeOffset() const {
  const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1347 1348 1349
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeOffsetFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1350 1351 1352 1353 1354 1355
  int raw_offset = Smi::cast(GetExpression(index))->value();
  return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
}

void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
  const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1356 1357 1358
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeOffsetFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1359 1360 1361 1362
  int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag;
  SetExpression(index, Smi::FromInt(raw_offset));
}

1363
BytecodeArray* InterpretedFrame::GetBytecodeArray() const {
1364
  const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1365 1366 1367
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeArrayFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1368
  return BytecodeArray::cast(GetExpression(index));
1369 1370
}

1371
void InterpretedFrame::PatchBytecodeArray(BytecodeArray* bytecode_array) {
1372
  const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1373 1374 1375
  DCHECK_EQ(
      InterpreterFrameConstants::kBytecodeArrayFromFp,
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1376
  SetExpression(index, bytecode_array);
1377 1378
}

1379
Object* InterpretedFrame::ReadInterpreterRegister(int register_index) const {
1380
  const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1381
  DCHECK_EQ(
1382
      InterpreterFrameConstants::kRegisterFileFromFp,
1383
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1384
  return GetExpression(index + register_index);
1385 1386
}

1387 1388 1389 1390
void InterpretedFrame::WriteInterpreterRegister(int register_index,
                                                Object* value) {
  const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
  DCHECK_EQ(
1391
      InterpreterFrameConstants::kRegisterFileFromFp,
1392 1393 1394 1395
      InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
  return SetExpression(index + register_index, value);
}

1396 1397
void InterpretedFrame::Summarize(List<FrameSummary>* functions,
                                 FrameSummary::Mode mode) const {
1398 1399 1400 1401 1402 1403 1404
  DCHECK(functions->length() == 0);
  AbstractCode* abstract_code =
      AbstractCode::cast(function()->shared()->bytecode_array());
  FrameSummary summary(receiver(), function(), abstract_code,
                       GetBytecodeOffset(), IsConstructor());
  functions->Add(summary);
}
1405

1406 1407 1408 1409
int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
  return Smi::cast(GetExpression(0))->value();
}

1410 1411 1412
int ArgumentsAdaptorFrame::GetLength(Address fp) {
  const int offset = ArgumentsAdaptorFrameConstants::kLengthOffset;
  return Smi::cast(Memory::Object_at(fp + offset))->value();
1413 1414
}

1415
Code* ArgumentsAdaptorFrame::unchecked_code() const {
1416
  return isolate()->builtins()->builtin(
1417
      Builtins::kArgumentsAdaptorTrampoline);
1418 1419
}

1420 1421 1422 1423
int BuiltinFrame::GetNumberOfIncomingArguments() const {
  return Smi::cast(GetExpression(0))->value();
}

1424 1425 1426 1427
void BuiltinFrame::PrintFrameKind(StringStream* accumulator) const {
  accumulator->Add("builtin frame: ");
}

1428 1429 1430 1431 1432
Address InternalFrame::GetCallerStackPointer() const {
  // Internal frames have no arguments. The stack pointer of the
  // caller is at a fixed offset from the frame pointer.
  return fp() + StandardFrameConstants::kCallerSPOffset;
}
1433

1434
Code* InternalFrame::unchecked_code() const {
1435 1436
  const int offset = InternalFrameConstants::kCodeOffset;
  Object* code = Memory::Object_at(fp() + offset);
1437
  DCHECK(code != NULL);
1438
  return reinterpret_cast<Code*>(code);
1439 1440 1441 1442 1443 1444 1445 1446 1447
}


void StackFrame::PrintIndex(StringStream* accumulator,
                            PrintMode mode,
                            int index) {
  accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
}

1448 1449 1450 1451 1452 1453 1454 1455 1456
void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
                      int index) const {
  accumulator->Add("wasm frame");
}

Code* WasmFrame::unchecked_code() const {
  return static_cast<Code*>(isolate()->FindCodeObject(pc()));
}

1457 1458 1459 1460
void WasmFrame::Iterate(ObjectVisitor* v) const { IterateCompiledFrame(v); }

Address WasmFrame::GetCallerStackPointer() const {
  return fp() + ExitFrameConstants::kCallerSPOffset;
1461 1462
}

1463
Object* WasmFrame::wasm_obj() const {
1464 1465
  return wasm::GetOwningWasmInstance(*isolate()->factory()->undefined_value(),
                                     LookupCode());
1466 1467
}

1468
uint32_t WasmFrame::function_index() const {
1469 1470
  FixedArray* deopt_data = LookupCode()->deoptimization_data();
  DCHECK(deopt_data->length() == 2);
1471 1472 1473 1474
  return Smi::cast(deopt_data->get(1))->value();
}

Script* WasmFrame::script() const {
1475 1476
  Handle<JSObject> wasm(JSObject::cast(wasm_obj()), isolate());
  Handle<wasm::WasmDebugInfo> debug_info = wasm::GetDebugInfo(wasm);
1477
  return wasm::WasmDebugInfo::GetFunctionScript(debug_info, function_index());
1478
}
1479

jkummerow's avatar
jkummerow committed
1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497
namespace {


void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
                         Code* code) {
  if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
    std::ostringstream os;
    os << "--------- s o u r c e   c o d e ---------\n"
       << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
       << "\n-----------------------------------------\n";
    accumulator->Add(os.str().c_str());
  }
}


}  // namespace


1498 1499 1500
void JavaScriptFrame::Print(StringStream* accumulator,
                            PrintMode mode,
                            int index) const {
1501
  DisallowHeapAllocation no_gc;
1502
  Object* receiver = this->receiver();
1503
  JSFunction* function = this->function();
1504 1505 1506

  accumulator->PrintSecurityTokenIfChanged(function);
  PrintIndex(accumulator, mode, index);
1507
  PrintFrameKind(accumulator);
1508 1509 1510
  Code* code = NULL;
  if (IsConstructor()) accumulator->Add("new ");
  accumulator->PrintFunction(function, receiver, &code);
1511

1512 1513 1514 1515
  // Get scope information for nicer output, if possible. If code is NULL, or
  // doesn't contain scope info, scope_info will return 0 for the number of
  // parameters, stack local variables, context local variables, stack slots,
  // or context slots.
1516 1517
  SharedFunctionInfo* shared = function->shared();
  ScopeInfo* scope_info = shared->scope_info();
1518 1519
  Object* script_obj = shared->script();
  if (script_obj->IsScript()) {
1520
    Script* script = Script::cast(script_obj);
1521 1522 1523 1524 1525 1526
    accumulator->Add(" [");
    accumulator->PrintName(script->name());

    Address pc = this->pc();
    if (code != NULL && code->kind() == Code::FUNCTION &&
        pc >= code->instruction_start() && pc < code->instruction_end()) {
1527
      int offset = static_cast<int>(pc - code->instruction_start());
1528
      int source_pos = AbstractCode::cast(code)->SourcePosition(offset);
1529
      int line = script->GetLineNumber(source_pos) + 1;
1530 1531 1532 1533 1534 1535
      accumulator->Add(":%d] [pc=%p]", line, pc);
    } else if (is_interpreted()) {
      const InterpretedFrame* iframe =
          reinterpret_cast<const InterpretedFrame*>(this);
      BytecodeArray* bytecodes = iframe->GetBytecodeArray();
      int offset = iframe->GetBytecodeOffset();
1536
      int source_pos = AbstractCode::cast(bytecodes)->SourcePosition(offset);
1537 1538
      int line = script->GetLineNumber(source_pos) + 1;
      accumulator->Add(":%d] [bytecode=%p offset=%d]", line, bytecodes, offset);
1539 1540
    } else {
      int function_start_pos = shared->start_position();
1541
      int line = script->GetLineNumber(function_start_pos) + 1;
1542
      accumulator->Add(":~%d] [pc=%p]", line, pc);
1543 1544 1545
    }
  }

1546 1547 1548 1549 1550 1551 1552 1553 1554
  accumulator->Add("(this=%o", receiver);

  // Print the parameters.
  int parameters_count = ComputeParametersCount();
  for (int i = 0; i < parameters_count; i++) {
    accumulator->Add(",");
    // If we have a name for the parameter we print it. Nameless
    // parameters are either because we have more actual parameters
    // than formal parameters or because we have no scope information.
1555 1556
    if (i < scope_info->ParameterCount()) {
      accumulator->PrintName(scope_info->ParameterName(i));
1557 1558 1559 1560 1561 1562 1563 1564 1565 1566
      accumulator->Add("=");
    }
    accumulator->Add("%o", GetParameter(i));
  }

  accumulator->Add(")");
  if (mode == OVERVIEW) {
    accumulator->Add("\n");
    return;
  }
1567
  if (is_optimized()) {
jkummerow's avatar
jkummerow committed
1568 1569 1570
    accumulator->Add(" {\n// optimized frame\n");
    PrintFunctionSource(accumulator, shared, code);
    accumulator->Add("}\n");
1571 1572
    return;
  }
1573 1574 1575
  accumulator->Add(" {\n");

  // Compute the number of locals and expression stack elements.
1576 1577
  int stack_locals_count = scope_info->StackLocalCount();
  int heap_locals_count = scope_info->ContextLocalCount();
1578 1579 1580 1581 1582 1583 1584 1585
  int expressions_count = ComputeExpressionsCount();

  // Print stack-allocated local variables.
  if (stack_locals_count > 0) {
    accumulator->Add("  // stack-allocated locals\n");
  }
  for (int i = 0; i < stack_locals_count; i++) {
    accumulator->Add("  var ");
1586
    accumulator->PrintName(scope_info->StackLocalName(i));
1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600
    accumulator->Add(" = ");
    if (i < expressions_count) {
      accumulator->Add("%o", GetExpression(i));
    } else {
      accumulator->Add("// no expression found - inconsistent frame?");
    }
    accumulator->Add("\n");
  }

  // Try to get hold of the context of this frame.
  Context* context = NULL;
  if (this->context() != NULL && this->context()->IsContext()) {
    context = Context::cast(this->context());
  }
1601 1602
  while (context->IsWithContext()) {
    context = context->previous();
1603
    DCHECK(context != NULL);
1604
  }
1605 1606

  // Print heap-allocated local variables.
1607
  if (heap_locals_count > 0) {
1608 1609
    accumulator->Add("  // heap-allocated locals\n");
  }
1610
  for (int i = 0; i < heap_locals_count; i++) {
1611
    accumulator->Add("  var ");
1612
    accumulator->PrintName(scope_info->ContextLocalName(i));
1613 1614
    accumulator->Add(" = ");
    if (context != NULL) {
1615 1616 1617
      int index = Context::MIN_CONTEXT_SLOTS + i;
      if (index < context->length()) {
        accumulator->Add("%o", context->get(index));
1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628
      } else {
        accumulator->Add(
            "// warning: missing context slot - inconsistent frame?");
      }
    } else {
      accumulator->Add("// warning: no context found - inconsistent frame?");
    }
    accumulator->Add("\n");
  }

  // Print the expression stack.
1629
  int expressions_start = stack_locals_count;
1630 1631 1632 1633 1634 1635 1636
  if (expressions_start < expressions_count) {
    accumulator->Add("  // expression stack (top to bottom)\n");
  }
  for (int i = expressions_count - 1; i >= expressions_start; i--) {
    accumulator->Add("  [%02d] : %o\n", i, GetExpression(i));
  }

jkummerow's avatar
jkummerow committed
1637
  PrintFunctionSource(accumulator, shared, code);
1638 1639 1640 1641 1642 1643 1644 1645 1646 1647

  accumulator->Add("}\n\n");
}


void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
                                  PrintMode mode,
                                  int index) const {
  int actual = ComputeParametersCount();
  int expected = -1;
1648
  JSFunction* function = this->function();
1649
  expected = function->shared()->internal_formal_parameter_count();
1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673

  PrintIndex(accumulator, mode, index);
  accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
  if (mode == OVERVIEW) {
    accumulator->Add("\n");
    return;
  }
  accumulator->Add(" {\n");

  // Print actual arguments.
  if (actual > 0) accumulator->Add("  // actual arguments\n");
  for (int i = 0; i < actual; i++) {
    accumulator->Add("  [%02d] : %o", i, GetParameter(i));
    if (expected != -1 && i >= expected) {
      accumulator->Add("  // not passed to callee");
    }
    accumulator->Add("\n");
  }

  accumulator->Add("}\n\n");
}


void EntryFrame::Iterate(ObjectVisitor* v) const {
1674
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1675 1676 1677 1678
}


void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1679
  const int offset = StandardFrameConstants::kLastObjectOffset;
1680 1681 1682 1683 1684 1685 1686 1687
  Object** base = &Memory::Object_at(sp());
  Object** limit = &Memory::Object_at(fp() + offset) + 1;
  v->VisitPointers(base, limit);
}


void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
  IterateExpressions(v);
1688
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1689 1690 1691 1692 1693 1694
}

void InternalFrame::Iterate(ObjectVisitor* v) const {
  // Internal frames only have object pointers on the expression stack
  // as they never have any arguments.
  IterateExpressions(v);
1695
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1696 1697 1698
}


1699 1700
void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
  Object** base = &Memory::Object_at(sp());
1701 1702
  Object** limit = &Memory::Object_at(
      fp() + StubFailureTrampolineFrameConstants::kFixedHeaderBottomOffset);
1703
  v->VisitPointers(base, limit);
1704
  base = &Memory::Object_at(fp() + StandardFrameConstants::kFunctionOffset);
1705
  const int offset = StandardFrameConstants::kLastObjectOffset;
1706
  limit = &Memory::Object_at(fp() + offset) + 1;
1707
  v->VisitPointers(base, limit);
1708
  IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1709 1710 1711
}


1712 1713 1714 1715 1716 1717
Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
  return fp() + StandardFrameConstants::kCallerSPOffset;
}


Code* StubFailureTrampolineFrame::unchecked_code() const {
1718
  Code* trampoline;
1719
  StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1720
      FindCodeInCache(&trampoline);
1721 1722 1723 1724
  if (trampoline->contains(pc())) {
    return trampoline;
  }

1725
  StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1726
      FindCodeInCache(&trampoline);
1727 1728
  if (trampoline->contains(pc())) {
    return trampoline;
1729
  }
1730

1731 1732 1733 1734 1735
  UNREACHABLE();
  return NULL;
}


1736 1737 1738 1739
// -------------------------------------------------------------------------


JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1740
  DCHECK(n >= 0);
1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753
  for (int i = 0; i <= n; i++) {
    while (!iterator_.frame()->is_java_script()) iterator_.Advance();
    if (i == n) return JavaScriptFrame::cast(iterator_.frame());
    iterator_.Advance();
  }
  UNREACHABLE();
  return NULL;
}


// -------------------------------------------------------------------------


1754 1755 1756 1757
static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
  MapWord map_word = object->map_word();
  return map_word.IsForwardingAddress() ?
      map_word.ToForwardingAddress()->map() : map_word.ToMap();
1758 1759 1760
}


1761
static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1762 1763 1764 1765 1766 1767 1768
  return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
}


#ifdef DEBUG
static bool GcSafeCodeContains(HeapObject* code, Address addr) {
  Map* map = GcSafeMapOfCodeSpaceObject(code);
1769
  DCHECK(map == code->GetHeap()->code_map());
1770 1771 1772 1773 1774 1775 1776 1777 1778 1779
  Address start = code->address();
  Address end = code->address() + code->SizeFromMap(map);
  return start <= addr && addr < end;
}
#endif


Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
                                                Address inner_pointer) {
  Code* code = reinterpret_cast<Code*>(object);
1780
  DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
1781
  return code;
1782 1783 1784
}


1785 1786
Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
    Address inner_pointer) {
1787
  Heap* heap = isolate_->heap();
1788

1789
  // Check if the inner pointer points into a large object chunk.
1790
  LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1791 1792 1793
  if (large_page != NULL) {
    return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
  }
1794

1795 1796 1797 1798
  if (!heap->code_space()->Contains(inner_pointer)) {
    return nullptr;
  }

1799
  // Iterate through the page until we reach the end or find an object starting
1800 1801
  // after the inner pointer.
  Page* page = Page::FromAddress(inner_pointer);
1802

1803
  DCHECK_EQ(page->owner(), heap->code_space());
mlippautz's avatar
mlippautz committed
1804 1805
  heap->mark_compact_collector()->sweeper().SweepOrWaitUntilSweepingCompleted(
      page);
1806

1807
  Address addr = page->skip_list()->StartFor(inner_pointer);
1808 1809 1810 1811

  Address top = heap->code_space()->top();
  Address limit = heap->code_space()->limit();

1812
  while (true) {
1813 1814 1815
    if (addr == top && addr != limit) {
      addr = limit;
      continue;
1816
    }
1817 1818 1819 1820

    HeapObject* obj = HeapObject::FromAddress(addr);
    int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
    Address next_addr = addr + obj_size;
1821
    if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1822
    addr = next_addr;
1823 1824 1825
  }
}

1826

1827 1828
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
    InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1829
  isolate_->counters()->pc_to_code()->Increment();
1830
  DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
1831 1832
  uint32_t hash = ComputeIntegerHash(ObjectAddressForHashing(inner_pointer),
                                     v8::internal::kZeroHashSeed);
1833 1834 1835
  uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
  InnerPointerToCodeCacheEntry* entry = cache(index);
  if (entry->inner_pointer == inner_pointer) {
1836
    isolate_->counters()->pc_to_code_cached()->Increment();
1837
    DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1838 1839
  } else {
    // Because this code may be interrupted by a profiling signal that
1840 1841
    // also queries the cache, we cannot update inner_pointer before the code
    // has been set. Otherwise, we risk trying to use a cache entry before
1842
    // the code has been computed.
1843
    entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1844
    entry->safepoint_entry.Reset();
1845
    entry->inner_pointer = inner_pointer;
1846 1847 1848 1849 1850
  }
  return entry;
}


1851 1852 1853
// -------------------------------------------------------------------------


1854
int NumRegs(RegList reglist) { return base::bits::CountPopulation(reglist); }
1855 1856


1857 1858 1859 1860
struct JSCallerSavedCodeData {
  int reg_code[kNumJSCallerSaved];
};

1861
JSCallerSavedCodeData caller_saved_code_data;
1862

1863 1864 1865 1866 1867 1868
void SetUpJSCallerSavedCodeData() {
  int i = 0;
  for (int r = 0; r < kNumRegs; r++)
    if ((kJSCallerSaved & (1 << r)) != 0)
      caller_saved_code_data.reg_code[i++] = r;

1869
  DCHECK(i == kNumJSCallerSaved);
1870
}
1871

1872

1873
int JSCallerSavedCode(int n) {
1874
  DCHECK(0 <= n && n < kNumJSCallerSaved);
1875
  return caller_saved_code_data.reg_code[n];
1876 1877 1878
}


1879 1880 1881 1882 1883 1884 1885 1886 1887 1888
#define DEFINE_WRAPPER(type, field)                              \
class field##_Wrapper : public ZoneObject {                      \
 public:  /* NOLINT */                                           \
  field##_Wrapper(const field& original) : frame_(original) {    \
  }                                                              \
  field frame_;                                                  \
};
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
#undef DEFINE_WRAPPER

1889
static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1890 1891 1892
#define FRAME_TYPE_CASE(type, field) \
  case StackFrame::type: { \
    field##_Wrapper* wrapper = \
1893
        new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904
    return &wrapper->frame_; \
  }

  switch (frame->type()) {
    STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
    default: UNREACHABLE();
  }
#undef FRAME_TYPE_CASE
  return NULL;
}

1905

1906
Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1907
  ZoneList<StackFrame*> list(10, zone);
1908
  for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1909 1910
    StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
    list.Add(frame, zone);
1911 1912 1913 1914 1915
  }
  return list.ToVector();
}


1916 1917
}  // namespace internal
}  // namespace v8