deoptimizer.cc 143 KB
Newer Older
1
// Copyright 2013 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#include "src/deoptimizer.h"
6

7 8
#include <memory>

9
#include "src/accessors.h"
10
#include "src/assembler-inl.h"
11
#include "src/ast/prettyprinter.h"
12
#include "src/callable.h"
13
#include "src/disasm.h"
14
#include "src/frames-inl.h"
15
#include "src/global-handles.h"
16
#include "src/interpreter/interpreter.h"
17
#include "src/macro-assembler.h"
18
#include "src/objects/debug-objects-inl.h"
19
#include "src/tracing/trace-event.h"
20
#include "src/v8.h"
21

22 23
// Has to be the last include (doesn't have include guards)
#include "src/objects/object-macros.h"
24 25 26 27

namespace v8 {
namespace internal {

28
DeoptimizerData::DeoptimizerData(Heap* heap) : heap_(heap), current_(nullptr) {
29
  for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) {
30
    deopt_entry_code_[i] = nullptr;
31
  }
32 33 34 35
  Code** start = &deopt_entry_code_[0];
  Code** end = &deopt_entry_code_[Deoptimizer::kLastBailoutType + 1];
  heap_->RegisterStrongRoots(reinterpret_cast<Object**>(start),
                             reinterpret_cast<Object**>(end));
36
}
37 38


39
DeoptimizerData::~DeoptimizerData() {
40
  for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) {
41
    deopt_entry_code_[i] = nullptr;
42
  }
43 44
  Code** start = &deopt_entry_code_[0];
  heap_->UnregisterStrongRoots(reinterpret_cast<Object**>(start));
45 46
}

47

48 49 50
Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
  if (function_->IsHeapObject()) {
    // Search all deoptimizing code in the native context of the function.
51
    Isolate* isolate = function_->GetIsolate();
52 53
    Context* native_context = function_->context()->native_context();
    Object* element = native_context->DeoptimizedCodeListHead();
54
    while (!element->IsUndefined(isolate)) {
55
      Code* code = Code::cast(element);
56
      CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
57 58
      if (code->contains(addr)) return code;
      element = code->next_code_link();
59 60
    }
  }
61
  return nullptr;
62 63 64
}


65 66
// We rely on this function not causing a GC.  It is called from generated code
// without having a real stack frame in place.
67 68 69 70
Deoptimizer* Deoptimizer::New(JSFunction* function,
                              BailoutType type,
                              unsigned bailout_id,
                              Address from,
71 72
                              int fp_to_sp_delta,
                              Isolate* isolate) {
73 74
  Deoptimizer* deoptimizer = new Deoptimizer(isolate, function, type,
                                             bailout_id, from, fp_to_sp_delta);
75
  CHECK_NULL(isolate->deoptimizer_data()->current_);
76
  isolate->deoptimizer_data()->current_ = deoptimizer;
77 78 79 80
  return deoptimizer;
}


81 82
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
  Deoptimizer* result = isolate->deoptimizer_data()->current_;
83
  CHECK_NOT_NULL(result);
84
  result->DeleteFrameDescriptions();
85
  isolate->deoptimizer_data()->current_ = nullptr;
86 87 88
  return result;
}

89 90
DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
    JavaScriptFrame* frame,
91
    int jsframe_index,
92
    Isolate* isolate) {
93
  CHECK(frame->is_optimized());
94

95
  TranslatedState translated_values(frame);
96
  translated_values.Prepare(frame->fp());
97 98 99 100 101

  TranslatedState::iterator frame_it = translated_values.end();
  int counter = jsframe_index;
  for (auto it = translated_values.begin(); it != translated_values.end();
       it++) {
102 103
    if (it->kind() == TranslatedFrame::kInterpretedFunction ||
        it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation) {
104 105 106 107 108 109 110 111
      if (counter == 0) {
        frame_it = it;
        break;
      }
      counter--;
    }
  }
  CHECK(frame_it != translated_values.end());
112 113 114
  // We only include kJavaScriptBuiltinContinuation frames above to get the
  // counting right.
  CHECK_EQ(frame_it->kind(), TranslatedFrame::kInterpretedFunction);
115

116 117
  DeoptimizedFrameInfo* info =
      new DeoptimizedFrameInfo(&translated_values, frame_it, isolate);
118 119 120 121

  return info;
}

122 123 124 125 126 127 128
void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
                                                int count,
                                                BailoutType type) {
  TableEntryGenerator generator(masm, type, count);
  generator.Generate();
}

129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172
namespace {
class ActivationsFinder : public ThreadVisitor {
 public:
  explicit ActivationsFinder(std::set<Code*>* codes,
                             Code* topmost_optimized_code,
                             bool safe_to_deopt_topmost_optimized_code)
      : codes_(codes) {
#ifdef DEBUG
    topmost_ = topmost_optimized_code;
    safe_to_deopt_ = safe_to_deopt_topmost_optimized_code;
#endif
  }

  // Find the frames with activations of codes marked for deoptimization, search
  // for the trampoline to the deoptimizer call respective to each code, and use
  // it to replace the current pc on the stack.
  void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
    for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
      if (it.frame()->type() == StackFrame::OPTIMIZED) {
        Code* code = it.frame()->LookupCode();
        if (code->kind() == Code::OPTIMIZED_FUNCTION &&
            code->marked_for_deoptimization()) {
          codes_->erase(code);
          // Obtain the trampoline to the deoptimizer call.
          SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
          int trampoline_pc = safepoint.trampoline_pc();
          DCHECK_IMPLIES(code == topmost_, safe_to_deopt_);
          // Replace the current pc on the stack with the trampoline.
          it.frame()->set_pc(code->instruction_start() + trampoline_pc);
        }
      }
    }
  }

 private:
  std::set<Code*>* codes_;

#ifdef DEBUG
  Code* topmost_;
  bool safe_to_deopt_;
#endif
};
}  // namespace

173
// Move marked code from the optimized code list to the deoptimized code list,
174
// and replace pc on the stack for codes marked for deoptimization.
175
void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
176
  DisallowHeapAllocation no_allocation;
177

178
  Isolate* isolate = context->GetHeap()->isolate();
179
  Code* topmost_optimized_code = nullptr;
180
  bool safe_to_deopt_topmost_optimized_code = false;
181
#ifdef DEBUG
182 183 184 185 186 187 188 189
  // Make sure all activations of optimized code can deopt at their current PC.
  // The topmost optimized code has special handling because it cannot be
  // deoptimized due to weak object dependency.
  for (StackFrameIterator it(isolate, isolate->thread_local_top());
       !it.done(); it.Advance()) {
    StackFrame::Type type = it.frame()->type();
    if (type == StackFrame::OPTIMIZED) {
      Code* code = it.frame()->LookupCode();
190 191
      JSFunction* function =
          static_cast<OptimizedFrame*>(it.frame())->function();
192 193 194 195 196 197 198 199 200
      if (FLAG_trace_deopt) {
        CodeTracer::Scope scope(isolate->GetCodeTracer());
        PrintF(scope.file(), "[deoptimizer found activation of function: ");
        function->PrintName(scope.file());
        PrintF(scope.file(),
               " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
      }
      SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
      int deopt_index = safepoint.deoptimization_index();
Juliana Franco's avatar
Juliana Franco committed
201

202
      // Turbofan deopt is checked when we are patching addresses on stack.
203
      bool safe_if_deopt_triggered =
204
          deopt_index != Safepoint::kNoDeoptimizationIndex;
205
      bool is_builtin_code = code->kind() == Code::BUILTIN;
206
      DCHECK(topmost_optimized_code == nullptr || safe_if_deopt_triggered ||
207
             is_builtin_code);
208
      if (topmost_optimized_code == nullptr) {
209
        topmost_optimized_code = code;
210
        safe_to_deopt_topmost_optimized_code = safe_if_deopt_triggered;
211 212 213 214 215
      }
    }
  }
#endif

216 217 218 219
  // We will use this set to mark those Code objects that are marked for
  // deoptimization and have not been found in stack frames.
  std::set<Code*> codes;

220
  // Move marked code from the optimized code list to the deoptimized code list.
221
  // Walk over all optimized code objects in this native context.
222
  Code* prev = nullptr;
223
  Object* element = context->OptimizedCodeListHead();
224
  while (!element->IsUndefined(isolate)) {
225
    Code* code = Code::cast(element);
226
    CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
227
    Object* next = code->next_code_link();
228

229
    if (code->marked_for_deoptimization()) {
230
      // Make sure that this object does not point to any garbage.
231
      isolate->heap()->InvalidateCodeEmbeddedObjects(code);
232 233
      codes.insert(code);

234
      if (prev != nullptr) {
235 236 237 238 239 240
        // Skip this code in the optimized code list.
        prev->set_next_code_link(next);
      } else {
        // There was no previous node, the next node is the new head.
        context->SetOptimizedCodeListHead(next);
      }
241

242 243 244 245 246 247 248 249
      // Move the code to the _deoptimized_ code list.
      code->set_next_code_link(context->DeoptimizedCodeListHead());
      context->SetDeoptimizedCodeListHead(code);
    } else {
      // Not marked; preserve this element.
      prev = code;
    }
    element = next;
250 251
  }

252 253 254 255 256 257 258 259
  ActivationsFinder visitor(&codes, topmost_optimized_code,
                            safe_to_deopt_topmost_optimized_code);
  // Iterate over the stack of this thread.
  visitor.VisitThread(isolate, isolate->thread_local_top());
  // In addition to iterate over the stack of this thread, we also
  // need to consider all the other threads as they may also use
  // the code currently beings deoptimized.
  isolate->thread_manager()->IterateArchivedThreads(&visitor);
260

261
  // If there's no activation of a code in any stack then we can remove its
262 263 264 265
  // deoptimization data. We do this to ensure that code objects that are
  // unlinked don't transitively keep objects alive unnecessarily.
  for (Code* code : codes) {
    isolate->heap()->InvalidateCodeDeoptimizationData(code);
266
  }
267 268
}

269

270
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
271
  RuntimeCallTimerScope runtimeTimer(isolate,
272
                                     RuntimeCallCounterId::kDeoptimizeCode);
273
  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
274
  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
275
  if (FLAG_trace_deopt) {
276 277
    CodeTracer::Scope scope(isolate->GetCodeTracer());
    PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
278
  }
279
  isolate->AbortConcurrentOptimization(BlockingBehavior::kBlock);
280
  DisallowHeapAllocation no_allocation;
281 282
  // For all contexts, mark all code, then deoptimize.
  Object* context = isolate->heap()->native_contexts_list();
283
  while (!context->IsUndefined(isolate)) {
284 285 286
    Context* native_context = Context::cast(context);
    MarkAllCodeForContext(native_context);
    DeoptimizeMarkedCodeForContext(native_context);
287
    context = native_context->next_context_link();
288 289 290 291
  }
}


292
void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
293
  RuntimeCallTimerScope runtimeTimer(isolate,
294
                                     RuntimeCallCounterId::kDeoptimizeCode);
295
  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
296
  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
297
  if (FLAG_trace_deopt) {
298 299
    CodeTracer::Scope scope(isolate->GetCodeTracer());
    PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
300
  }
301
  DisallowHeapAllocation no_allocation;
302
  // For all contexts, deoptimize code already marked.
303
  Object* context = isolate->heap()->native_contexts_list();
304
  while (!context->IsUndefined(isolate)) {
305 306
    Context* native_context = Context::cast(context);
    DeoptimizeMarkedCodeForContext(native_context);
307
    context = native_context->next_context_link();
308 309 310
  }
}

311 312
void Deoptimizer::MarkAllCodeForContext(Context* context) {
  Object* element = context->OptimizedCodeListHead();
313 314
  Isolate* isolate = context->GetIsolate();
  while (!element->IsUndefined(isolate)) {
315
    Code* code = Code::cast(element);
316
    CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
317 318 319
    code->set_marked_for_deoptimization(true);
    element = code->next_code_link();
  }
320 321
}

322
void Deoptimizer::DeoptimizeFunction(JSFunction* function, Code* code) {
323 324
  Isolate* isolate = function->GetIsolate();
  RuntimeCallTimerScope runtimeTimer(isolate,
325
                                     RuntimeCallCounterId::kDeoptimizeCode);
326
  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
327
  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
328
  if (code == nullptr) code = function->code();
329

330 331 332 333 334
  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
    // Mark the code for deoptimization and unlink any functions that also
    // refer to that code. The code cannot be shared across native contexts,
    // so we only need to search one.
    code->set_marked_for_deoptimization(true);
335 336 337 338 339 340 341 342
    // The code in the function's optimized code feedback vector slot might
    // be different from the code on the function - evict it if necessary.
    function->feedback_vector()->EvictOptimizedCodeMarkedForDeoptimization(
        function->shared(), "unlinking code marked for deopt");
    if (!code->deopt_already_counted()) {
      function->feedback_vector()->increment_deopt_count();
      code->set_deopt_already_counted(true);
    }
343
    DeoptimizeMarkedCodeForContext(function->context()->native_context());
344 345 346 347
  }
}


348
void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
349 350 351
  deoptimizer->DoComputeOutputFrames();
}

352 353 354

const char* Deoptimizer::MessageFor(BailoutType type) {
  switch (type) {
355 356 357
    case EAGER: return "eager";
    case SOFT: return "soft";
    case LAZY: return "lazy";
358
  }
359
  FATAL("Unsupported deopt type");
360
  return nullptr;
361 362
}

363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379
namespace {

CodeEventListener::DeoptKind DeoptKindOfBailoutType(
    Deoptimizer::BailoutType bailout_type) {
  switch (bailout_type) {
    case Deoptimizer::EAGER:
      return CodeEventListener::kEager;
    case Deoptimizer::SOFT:
      return CodeEventListener::kSoft;
    case Deoptimizer::LAZY:
      return CodeEventListener::kLazy;
  }
  UNREACHABLE();
}

}  // namespace

380 381
Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
                         BailoutType type, unsigned bailout_id, Address from,
382
                         int fp_to_sp_delta)
383 384
    : isolate_(isolate),
      function_(function),
385 386 387 388
      bailout_id_(bailout_id),
      bailout_type_(type),
      from_(from),
      fp_to_sp_delta_(fp_to_sp_delta),
389 390 391
      deoptimizing_throw_(false),
      catch_handler_data_(-1),
      catch_handler_pc_offset_(-1),
392
      input_(nullptr),
393
      output_count_(0),
394
      jsframe_count_(0),
395
      output_(nullptr),
396 397 398 399 400 401
      caller_frame_top_(0),
      caller_fp_(0),
      caller_pc_(0),
      caller_constant_pool_(0),
      input_frame_context_(0),
      stack_fp_(0),
402
      trace_scope_(nullptr) {
403 404 405 406 407
  if (isolate->deoptimizer_lazy_throw()) {
    isolate->set_deoptimizer_lazy_throw(false);
    deoptimizing_throw_ = true;
  }

408
  DCHECK_NOT_NULL(from);
409
  compiled_code_ = FindOptimizedCode();
410
  DCHECK_NOT_NULL(compiled_code_);
411

412
  DCHECK(function->IsJSFunction());
413 414 415
  trace_scope_ = FLAG_trace_deopt
                     ? new CodeTracer::Scope(isolate->GetCodeTracer())
                     : nullptr;
416
#ifdef DEBUG
417
  DCHECK(AllowHeapAllocation::IsAllowed());
418 419
  disallow_heap_allocation_ = new DisallowHeapAllocation();
#endif  // DEBUG
420 421
  if (compiled_code_->kind() != Code::OPTIMIZED_FUNCTION ||
      !compiled_code_->deopt_already_counted()) {
422 423 424 425 426 427 428 429
    // If the function is optimized, and we haven't counted that deopt yet, then
    // increment the function's deopt count so that we can avoid optimising
    // functions that deopt too often.

    if (bailout_type_ == Deoptimizer::SOFT) {
      // Soft deopts shouldn't count against the overall deoptimization count
      // that can eventually lead to disabling optimization for a function.
      isolate->counters()->soft_deopts_executed()->Increment();
430
    } else if (function != nullptr) {
431
      function->feedback_vector()->increment_deopt_count();
432 433
    }
  }
434
  if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
435
    compiled_code_->set_deopt_already_counted(true);
436 437 438
    PROFILE(isolate_,
            CodeDeoptEvent(compiled_code_, DeoptKindOfBailoutType(type), from_,
                           fp_to_sp_delta_));
439
  }
440
  unsigned size = ComputeInputFrameSize();
441
  int parameter_count =
442
      function->shared()->internal_formal_parameter_count() + 1;
443
  input_ = new (size) FrameDescription(size, parameter_count);
444 445
}

446
Code* Deoptimizer::FindOptimizedCode() {
447
  Code* compiled_code = FindDeoptimizingCode(from_);
448
  return (compiled_code == nullptr)
449 450
             ? static_cast<Code*>(isolate_->FindCodeObject(from_))
             : compiled_code;
451 452 453 454
}


void Deoptimizer::PrintFunctionName() {
455
  if (function_->IsHeapObject() && function_->IsJSFunction()) {
456
    function_->ShortPrint(trace_scope_->file());
457
  } else {
458 459
    PrintF(trace_scope_->file(),
           "%s", Code::Kind2String(compiled_code_->kind()));
460 461 462
  }
}

463 464 465 466 467 468
Handle<JSFunction> Deoptimizer::function() const {
  return Handle<JSFunction>(function_);
}
Handle<Code> Deoptimizer::compiled_code() const {
  return Handle<Code>(compiled_code_);
}
469

470
Deoptimizer::~Deoptimizer() {
471
  DCHECK(input_ == nullptr && output_ == nullptr);
472
  DCHECK_NULL(disallow_heap_allocation_);
473
  delete trace_scope_;
474 475 476 477 478 479 480 481 482
}


void Deoptimizer::DeleteFrameDescriptions() {
  delete input_;
  for (int i = 0; i < output_count_; ++i) {
    if (output_[i] != input_) delete output_[i];
  }
  delete[] output_;
483 484
  input_ = nullptr;
  output_ = nullptr;
485
#ifdef DEBUG
486
  DCHECK(!AllowHeapAllocation::IsAllowed());
487
  DCHECK_NOT_NULL(disallow_heap_allocation_);
488
  delete disallow_heap_allocation_;
489
  disallow_heap_allocation_ = nullptr;
490
#endif  // DEBUG
491 492
}

493 494
Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, int id,
                                            BailoutType type) {
495
  CHECK_GE(id, 0);
496
  if (id >= kMaxNumberOfEntries) return nullptr;
497
  DeoptimizerData* data = isolate->deoptimizer_data();
498
  CHECK_LE(type, kLastBailoutType);
499 500 501
  CHECK_NOT_NULL(data->deopt_entry_code_[type]);
  Code* code = data->deopt_entry_code_[type];
  return code->instruction_start() + (id * table_entry_size_);
502 503 504
}


505 506 507 508
int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
                                     Address addr,
                                     BailoutType type) {
  DeoptimizerData* data = isolate->deoptimizer_data();
509 510 511 512
  CHECK_LE(type, kLastBailoutType);
  Code* code = data->deopt_entry_code_[type];
  if (code == nullptr) return kNotDeoptimizationEntry;
  Address start = code->instruction_start();
513
  if (addr < start ||
514
      addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
515 516
    return kNotDeoptimizationEntry;
  }
517
  DCHECK_EQ(0,
518 519
            static_cast<int>(addr - start) % table_entry_size_);
  return static_cast<int>(addr - start) / table_entry_size_;
520 521 522
}


523
int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
524
  int length = 0;
525 526
  // Count all entries in the deoptimizing code list of every context.
  Object* context = isolate->heap()->native_contexts_list();
527
  while (!context->IsUndefined(isolate)) {
528 529
    Context* native_context = Context::cast(context);
    Object* element = native_context->DeoptimizedCodeListHead();
530
    while (!element->IsUndefined(isolate)) {
531
      Code* code = Code::cast(element);
532
      DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
533 534 535
      if (!code->marked_for_deoptimization()) {
        length++;
      }
536 537
      element = code->next_code_link();
    }
538
    context = Context::cast(context)->next_context_link();
539 540 541 542
  }
  return length;
}

543 544 545 546 547 548
namespace {

int LookupCatchHandler(TranslatedFrame* translated_frame, int* data_out) {
  switch (translated_frame->kind()) {
    case TranslatedFrame::kInterpretedFunction: {
      int bytecode_offset = translated_frame->node_id().ToInt();
549 550
      HandlerTable table(translated_frame->raw_shared_info()->bytecode_array());
      return table.LookupRange(bytecode_offset, data_out, nullptr);
551 552 553 554 555 556 557
    }
    default:
      break;
  }
  return -1;
}

558 559 560 561
bool ShouldPadArguments(int arg_count) {
  return kPadArguments && (arg_count % 2 != 0);
}

562
}  // namespace
563

564 565
// We rely on this function not causing a GC.  It is called from generated code
// without having a real stack frame in place.
566
void Deoptimizer::DoComputeOutputFrames() {
567
  base::ElapsedTimer timer;
568 569 570

  // Determine basic deoptimization information.  The optimized frame is
  // described by the input data.
571 572
  DeoptimizationData* input_data =
      DeoptimizationData::cast(compiled_code_->deoptimization_data());
573

574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595
  {
    // Read caller's PC, caller's FP and caller's constant pool values
    // from input frame. Compute caller's frame top address.

    Register fp_reg = JavaScriptFrame::fp_register();
    stack_fp_ = input_->GetRegister(fp_reg.code());

    caller_frame_top_ = stack_fp_ + ComputeInputFrameAboveFpFixedSize();

    Address fp_address = input_->GetFramePointerAddress();
    caller_fp_ = Memory::intptr_at(fp_address);
    caller_pc_ =
        Memory::intptr_at(fp_address + CommonFrameConstants::kCallerPCOffset);
    input_frame_context_ = Memory::intptr_at(
        fp_address + CommonFrameConstants::kContextOrFrameTypeOffset);

    if (FLAG_enable_embedded_constant_pool) {
      caller_constant_pool_ = Memory::intptr_at(
          fp_address + CommonFrameConstants::kConstantPoolOffset);
    }
  }

596
  if (trace_scope_ != nullptr) {
597
    timer.Start();
598 599
    PrintF(trace_scope_->file(), "[deoptimizing (DEOPT %s): begin ",
           MessageFor(bailout_type_));
600
    PrintFunctionName();
601
    PrintF(trace_scope_->file(),
602 603 604 605
           " (opt #%d) @%d, FP to SP delta: %d, caller sp: 0x%08" V8PRIxPTR
           "]\n",
           input_data->OptimizationId()->value(), bailout_id_, fp_to_sp_delta_,
           caller_frame_top_);
606
    if (bailout_type_ == EAGER || bailout_type_ == SOFT) {
607 608
      compiled_code_->PrintDeoptLocation(
          trace_scope_->file(), "            ;;; deoptimize at ", from_);
609
    }
610 611
  }

612
  BailoutId node_id = input_data->BytecodeOffset(bailout_id_);
613 614 615 616
  ByteArray* translations = input_data->TranslationByteArray();
  unsigned translation_index =
      input_data->TranslationIndex(bailout_id_)->value();

617 618
  TranslationIterator state_iterator(translations, translation_index);
  translated_state_.Init(
619
      input_->GetFramePointerAddress(), &state_iterator,
620
      input_data->LiteralArray(), input_->GetRegisterValues(),
621 622 623 624
      trace_scope_ == nullptr ? nullptr : trace_scope_->file(),
      function_->IsHeapObject()
          ? function_->shared()->internal_formal_parameter_count()
          : 0);
625

626
  // Do the input frame to output frame(s) translation.
627
  size_t count = translated_state_.frames().size();
628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643
  // If we are supposed to go to the catch handler, find the catching frame
  // for the catch and make sure we only deoptimize upto that frame.
  if (deoptimizing_throw_) {
    size_t catch_handler_frame_index = count;
    for (size_t i = count; i-- > 0;) {
      catch_handler_pc_offset_ = LookupCatchHandler(
          &(translated_state_.frames()[i]), &catch_handler_data_);
      if (catch_handler_pc_offset_ >= 0) {
        catch_handler_frame_index = i;
        break;
      }
    }
    CHECK_LT(catch_handler_frame_index, count);
    count = catch_handler_frame_index + 1;
  }

644
  DCHECK_NULL(output_);
645
  output_ = new FrameDescription*[count];
646
  for (size_t i = 0; i < count; ++i) {
647
    output_[i] = nullptr;
648
  }
649
  output_count_ = static_cast<int>(count);
650 651

  // Translate each output frame.
652 653
  int frame_index = 0;  // output_frame_index
  for (size_t i = 0; i < count; ++i, ++frame_index) {
654
    // Read the ast node id, function, and frame height for this output frame.
655 656
    TranslatedFrame* translated_frame = &(translated_state_.frames()[i]);
    switch (translated_frame->kind()) {
657
      case TranslatedFrame::kInterpretedFunction:
658
        DoComputeInterpretedFrame(translated_frame, frame_index,
659
                                  deoptimizing_throw_ && i == count - 1);
660 661
        jsframe_count_++;
        break;
662
      case TranslatedFrame::kArgumentsAdaptor:
663 664
        DoComputeArgumentsAdaptorFrame(translated_frame, frame_index);
        break;
665
      case TranslatedFrame::kConstructStub:
666
        DoComputeConstructStubFrame(translated_frame, frame_index);
667
        break;
668 669 670 671 672 673
      case TranslatedFrame::kBuiltinContinuation:
        DoComputeBuiltinContinuation(translated_frame, frame_index, false);
        break;
      case TranslatedFrame::kJavaScriptBuiltinContinuation:
        DoComputeBuiltinContinuation(translated_frame, frame_index, true);
        break;
674 675
      case TranslatedFrame::kInvalid:
        FATAL("invalid frame");
676 677
        break;
    }
678 679 680
  }

  // Print some helpful diagnostic information.
681
  if (trace_scope_ != nullptr) {
682
    double ms = timer.Elapsed().InMillisecondsF();
683
    int index = output_count_ - 1;  // Index of the topmost frame.
684 685
    PrintF(trace_scope_->file(), "[deoptimizing (%s): end ",
           MessageFor(bailout_type_));
686
    PrintFunctionName();
687 688
    PrintF(trace_scope_->file(),
           " @%d => node=%d, pc=0x%08" V8PRIxPTR ", caller sp=0x%08" V8PRIxPTR
689
           ", took %0.3f ms]\n",
690
           bailout_id_, node_id.ToInt(), output_[index]->GetPc(),
691
           caller_frame_top_, ms);
692 693 694
  }
}

695 696
void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
                                            int frame_index,
697
                                            bool goto_catch_handler) {
698 699
  SharedFunctionInfo* shared = translated_frame->raw_shared_info();

700
  TranslatedFrame::iterator value_iterator = translated_frame->begin();
701 702
  bool is_bottommost = (0 == frame_index);
  bool is_topmost = (output_count_ - 1 == frame_index);
703 704
  int input_index = 0;

705
  int bytecode_offset = translated_frame->node_id().ToInt();
706 707 708 709 710
  int height = translated_frame->height();
  int register_count = height - 1;  // Exclude accumulator.
  int register_stack_slot_count =
      InterpreterFrameConstants::RegisterStackSlotCount(register_count);
  int height_in_bytes = register_stack_slot_count * kPointerSize;
711

712 713 714 715 716
  // The topmost frame will contain the accumulator.
  if (is_topmost) {
    height_in_bytes += kPointerSize;
    if (PadTopOfStackRegister()) height_in_bytes += kPointerSize;
  }
717

718 719
  TranslatedFrame::iterator function_iterator = value_iterator;
  Object* function = value_iterator->GetRawValue();
720 721
  value_iterator++;
  input_index++;
722
  if (trace_scope_ != nullptr) {
723
    PrintF(trace_scope_->file(), "  translating interpreted frame ");
724
    std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
725
    PrintF(trace_scope_->file(), "%s", name.get());
726 727 728 729 730 731
    PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d%s\n",
           bytecode_offset, height_in_bytes,
           goto_catch_handler ? " (throw)" : "");
  }
  if (goto_catch_handler) {
    bytecode_offset = catch_handler_pc_offset_;
732 733 734
  }

  // The 'fixed' part of the frame consists of the incoming parameters and
735 736
  // the part described by InterpreterFrameConstants. This will include
  // argument padding, when needed.
737
  unsigned fixed_frame_size = ComputeInterpretedFixedSize(shared);
738 739 740
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;

  // Allocate and store the output frame description.
741 742 743
  int parameter_count = shared->internal_formal_parameter_count() + 1;
  FrameDescription* output_frame = new (output_frame_size)
      FrameDescription(output_frame_size, parameter_count);
744 745 746 747 748

  CHECK(frame_index >= 0 && frame_index < output_count_);
  CHECK_NULL(output_[frame_index]);
  output_[frame_index] = output_frame;

749 750
  // The top address of the frame is computed from the previous frame's top and
  // this frame's size.
751 752
  intptr_t top_address;
  if (is_bottommost) {
753
    top_address = caller_frame_top_ - output_frame_size;
754 755 756 757 758 759 760
  } else {
    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
  }
  output_frame->SetTop(top_address);

  // Compute the incoming parameter translation.
  unsigned output_offset = output_frame_size;
761 762 763 764 765 766 767

  if (ShouldPadArguments(parameter_count)) {
    output_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                       output_offset, "padding ");
  }

768 769 770 771 772 773
  for (int i = 0; i < parameter_count; ++i) {
    output_offset -= kPointerSize;
    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
                                 output_offset);
  }

774
  DCHECK_EQ(output_offset, output_frame->GetLastArgumentSlotOffset());
775 776 777 778
  if (trace_scope_ != nullptr) {
    PrintF(trace_scope_->file(), "    -------------------------\n");
  }

779
  // There are no translation commands for the caller's pc and fp, the
780
  // context, the function and the bytecode offset.  Synthesize
781 782 783 784 785 786 787 788 789 790
  // their values and set them up
  // explicitly.
  //
  // The caller's pc for the bottommost output frame is the same as in the
  // input frame.  For all subsequent output frames, it can be read from the
  // previous one.  This frame's pc can be computed from the non-optimized
  // function code and AST id of the bailout.
  output_offset -= kPCOnStackSize;
  intptr_t value;
  if (is_bottommost) {
791
    value = caller_pc_;
792 793 794 795 796 797 798 799 800 801 802 803
  } else {
    value = output_[frame_index - 1]->GetPc();
  }
  output_frame->SetCallerPc(output_offset, value);
  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");

  // The caller's frame pointer for the bottommost output frame is the same
  // as in the input frame.  For all subsequent output frames, it can be
  // read from the previous one.  Also compute and set this frame's frame
  // pointer.
  output_offset -= kFPOnStackSize;
  if (is_bottommost) {
804
    value = caller_fp_;
805 806 807 808 809 810
  } else {
    value = output_[frame_index - 1]->GetFp();
  }
  output_frame->SetCallerFp(output_offset, value);
  intptr_t fp_value = top_address + output_offset;
  output_frame->SetFp(fp_value);
811 812 813 814
  if (is_topmost) {
    Register fp_reg = InterpretedFrame::fp_register();
    output_frame->SetRegister(fp_reg.code(), fp_value);
  }
815 816 817 818 819 820 821 822
  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");

  if (FLAG_enable_embedded_constant_pool) {
    // For the bottommost output frame the constant pool pointer can be gotten
    // from the input frame. For subsequent output frames, it can be read from
    // the previous frame.
    output_offset -= kPointerSize;
    if (is_bottommost) {
823
      value = caller_constant_pool_;
824 825 826 827 828 829 830 831 832 833 834 835
    } else {
      value = output_[frame_index - 1]->GetConstantPool();
    }
    output_frame->SetCallerConstantPool(output_offset, value);
    DebugPrintOutputSlot(value, frame_index, output_offset,
                         "caller's constant_pool\n");
  }

  // For the bottommost output frame the context can be gotten from the input
  // frame. For all subsequent output frames it can be gotten from the function
  // so long as we don't inline functions that need local contexts.
  output_offset -= kPointerSize;
836 837 838 839 840 841 842 843 844 845 846 847 848

  // When deoptimizing into a catch block, we need to take the context
  // from a register that was specified in the handler table.
  TranslatedFrame::iterator context_pos = value_iterator;
  int context_input_index = input_index;
  if (goto_catch_handler) {
    // Skip to the translated value of the register specified
    // in the handler table.
    for (int i = 0; i < catch_handler_data_ + 1; ++i) {
      context_pos++;
      context_input_index++;
    }
  }
849
  // Read the context from the translations.
850
  Object* context = context_pos->GetRawValue();
851 852
  value = reinterpret_cast<intptr_t>(context);
  output_frame->SetContext(value);
853
  WriteValueToOutput(context, context_input_index, frame_index, output_offset,
854
                     "context    ");
855 856 857 858 859 860
  if (context == isolate_->heap()->arguments_marker()) {
    Address output_address =
        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
        output_offset;
    values_to_materialize_.push_back({output_address, context_pos});
  }
861 862 863 864 865 866 867
  value_iterator++;
  input_index++;

  // The function was mentioned explicitly in the BEGIN_FRAME.
  output_offset -= kPointerSize;
  value = reinterpret_cast<intptr_t>(function);
  WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
868 869 870 871 872 873
  if (function == isolate_->heap()->arguments_marker()) {
    Address output_address =
        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
        output_offset;
    values_to_materialize_.push_back({output_address, function_iterator});
  }
874

875
  // Set the bytecode array pointer.
876
  output_offset -= kPointerSize;
877
  Object* bytecode_array = shared->HasBreakInfo()
878 879
                               ? shared->GetDebugInfo()->DebugBytecodeArray()
                               : shared->bytecode_array();
880 881
  WriteValueToOutput(bytecode_array, 0, frame_index, output_offset,
                     "bytecode array ");
882

883 884
  // The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
  output_offset -= kPointerSize;
Juliana Franco's avatar
Juliana Franco committed
885

886
  int raw_bytecode_offset =
887
      BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset;
888
  Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset);
Juliana Franco's avatar
Juliana Franco committed
889 890
  output_[frame_index]->SetFrameSlot(
      output_offset, reinterpret_cast<intptr_t>(smi_bytecode_offset));
891

892
  if (trace_scope_ != nullptr) {
Juliana Franco's avatar
Juliana Franco committed
893 894 895
    DebugPrintOutputSlot(reinterpret_cast<intptr_t>(smi_bytecode_offset),
                         frame_index, output_offset, "bytecode offset @ ");
    PrintF(trace_scope_->file(), "%d\n", bytecode_offset);
896
    PrintF(trace_scope_->file(), "  (input #0)\n");
897 898 899
    PrintF(trace_scope_->file(), "    -------------------------\n");
  }

900
  // Translate the rest of the interpreter registers in the frame.
901
  for (int i = 0; i < register_count; ++i) {
902 903 904 905 906
    output_offset -= kPointerSize;
    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
                                 output_offset);
  }

907 908 909 910 911 912 913 914 915 916 917
  int register_slots_written = register_count;
  DCHECK_LE(register_slots_written, register_stack_slot_count);
  // Some architectures must pad the stack frame with extra stack slots
  // to ensure the stack frame is aligned. Do this now.
  while (register_slots_written < register_stack_slot_count) {
    register_slots_written++;
    output_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                       output_offset, "padding ");
  }

918 919
  // Translate the accumulator register (depending on frame position).
  if (is_topmost) {
920 921 922 923 924 925 926
    if (PadTopOfStackRegister()) {
      output_offset -= kPointerSize;
      WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                         output_offset, "padding ");
    }
    // For topmost frame, put the accumulator on the stack. The
    // {NotifyDeoptimized} builtin pops it off the topmost frame (possibly
927 928 929 930 931 932
    // after materialization).
    output_offset -= kPointerSize;
    if (goto_catch_handler) {
      // If we are lazy deopting to a catch handler, we set the accumulator to
      // the exception (which lives in the result register).
      intptr_t accumulator_value =
933
          input_->GetRegister(kInterpreterAccumulatorRegister.code());
934 935 936 937 938 939 940
      WriteValueToOutput(reinterpret_cast<Object*>(accumulator_value), 0,
                         frame_index, output_offset, "accumulator ");
      value_iterator++;
    } else {
      WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
                                   output_offset, "accumulator ");
    }
941
  } else {
942 943 944 945
    // For non-topmost frames, skip the accumulator translation. For those
    // frames, the return value from the callee will become the accumulator.
    value_iterator++;
    input_index++;
946 947
  }
  CHECK_EQ(0u, output_offset);
948

949 950 951 952
  // Compute this frame's PC and state. The PC will be a special builtin that
  // continues the bytecode dispatch. Note that non-topmost and lazy-style
  // bailout handlers also advance the bytecode offset before dispatch, hence
  // simulating what normal handlers do upon completion of the operation.
953
  Builtins* builtins = isolate_->builtins();
954
  Code* dispatch_builtin =
955 956 957
      (!is_topmost || (bailout_type_ == LAZY)) && !goto_catch_handler
          ? builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance)
          : builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
958 959
  output_frame->SetPc(
      reinterpret_cast<intptr_t>(dispatch_builtin->InstructionStart()));
960 961 962 963

  // Update constant pool.
  if (FLAG_enable_embedded_constant_pool) {
    intptr_t constant_pool_value =
964
        reinterpret_cast<intptr_t>(dispatch_builtin->constant_pool());
965 966 967 968 969 970 971 972
    output_frame->SetConstantPool(constant_pool_value);
    if (is_topmost) {
      Register constant_pool_reg =
          InterpretedFrame::constant_pool_pointer_register();
      output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
    }
  }

973 974 975 976
  // Clear the context register. The context might be a de-materialized object
  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
  // safety we use Smi(0) instead of the potential {arguments_marker} here.
  if (is_topmost) {
977
    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
978 979
    Register context_reg = JavaScriptFrame::context_register();
    output_frame->SetRegister(context_reg.code(), context_value);
980
    // Set the continuation for the topmost frame.
981
    Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
982
    output_frame->SetContinuation(
983
        reinterpret_cast<intptr_t>(continuation->InstructionStart()));
984 985 986
  }
}

987 988
void Deoptimizer::DoComputeArgumentsAdaptorFrame(
    TranslatedFrame* translated_frame, int frame_index) {
989
  TranslatedFrame::iterator value_iterator = translated_frame->begin();
990
  bool is_bottommost = (0 == frame_index);
991 992 993
  int input_index = 0;

  unsigned height = translated_frame->height();
994
  unsigned height_in_bytes = height * kPointerSize;
995 996 997
  int parameter_count = height;
  if (ShouldPadArguments(parameter_count)) height_in_bytes += kPointerSize;

998 999
  TranslatedFrame::iterator function_iterator = value_iterator;
  Object* function = value_iterator->GetRawValue();
1000
  value_iterator++;
1001
  input_index++;
1002
  if (trace_scope_ != nullptr) {
1003 1004
    PrintF(trace_scope_->file(),
           "  translating arguments adaptor => height=%d\n", height_in_bytes);
1005 1006
  }

1007
  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFixedFrameSize;
1008 1009 1010
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;

  // Allocate and store the output frame description.
1011 1012
  FrameDescription* output_frame = new (output_frame_size)
      FrameDescription(output_frame_size, parameter_count);
1013

1014 1015
  // Arguments adaptor can not be topmost.
  CHECK(frame_index < output_count_ - 1);
1016
  CHECK_NULL(output_[frame_index]);
1017 1018
  output_[frame_index] = output_frame;

1019 1020
  // The top address of the frame is computed from the previous frame's top and
  // this frame's size.
1021
  intptr_t top_address;
1022 1023 1024 1025 1026
  if (is_bottommost) {
    top_address = caller_frame_top_ - output_frame_size;
  } else {
    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
  }
1027 1028
  output_frame->SetTop(top_address);

1029
  unsigned output_offset = output_frame_size;
1030 1031 1032 1033 1034 1035 1036
  if (ShouldPadArguments(parameter_count)) {
    output_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                       output_offset, "padding ");
  }

  // Compute the incoming parameter translation.
1037 1038
  for (int i = 0; i < parameter_count; ++i) {
    output_offset -= kPointerSize;
1039 1040
    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
                                 output_offset);
1041 1042
  }

1043
  DCHECK_EQ(output_offset, output_frame->GetLastArgumentSlotOffset());
1044
  // Read caller's PC from the previous frame.
1045
  output_offset -= kPCOnStackSize;
1046 1047 1048 1049 1050 1051 1052 1053
  intptr_t value;
  if (is_bottommost) {
    value = caller_pc_;
  } else {
    value = output_[frame_index - 1]->GetPc();
  }
  output_frame->SetCallerPc(output_offset, value);
  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
1054 1055

  // Read caller's FP from the previous frame, and set this frame's FP.
1056
  output_offset -= kFPOnStackSize;
1057 1058 1059 1060 1061
  if (is_bottommost) {
    value = caller_fp_;
  } else {
    value = output_[frame_index - 1]->GetFp();
  }
1062
  output_frame->SetCallerFp(output_offset, value);
1063 1064
  intptr_t fp_value = top_address + output_offset;
  output_frame->SetFp(fp_value);
1065
  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1066

1067
  if (FLAG_enable_embedded_constant_pool) {
1068
    // Read the caller's constant pool from the previous frame.
1069
    output_offset -= kPointerSize;
1070 1071 1072 1073 1074
    if (is_bottommost) {
      value = caller_constant_pool_;
    } else {
      value = output_[frame_index - 1]->GetConstantPool();
    }
1075
    output_frame->SetCallerConstantPool(output_offset, value);
1076 1077
    DebugPrintOutputSlot(value, frame_index, output_offset,
                         "caller's constant_pool\n");
1078 1079
  }

1080 1081
  // A marker value is used in place of the context.
  output_offset -= kPointerSize;
1082
  intptr_t context = StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR);
1083
  output_frame->SetFrameSlot(output_offset, context);
1084 1085
  DebugPrintOutputSlot(context, frame_index, output_offset,
                       "context (adaptor sentinel)\n");
1086 1087 1088 1089

  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
  output_offset -= kPointerSize;
  value = reinterpret_cast<intptr_t>(function);
1090
  WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
1091 1092 1093 1094 1095 1096
  if (function == isolate_->heap()->arguments_marker()) {
    Address output_address =
        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
        output_offset;
    values_to_materialize_.push_back({output_address, function_iterator});
  }
1097 1098 1099 1100 1101

  // Number of incoming arguments.
  output_offset -= kPointerSize;
  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
  output_frame->SetFrameSlot(output_offset, value);
1102 1103 1104
  DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
  if (trace_scope_ != nullptr) {
    PrintF(trace_scope_->file(), "(%d)\n", height - 1);
1105 1106
  }

1107 1108 1109 1110
  output_offset -= kPointerSize;
  WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                     output_offset, "padding ");

1111
  DCHECK_EQ(0, output_offset);
1112 1113 1114 1115 1116

  Builtins* builtins = isolate_->builtins();
  Code* adaptor_trampoline =
      builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
  intptr_t pc_value = reinterpret_cast<intptr_t>(
1117
      adaptor_trampoline->InstructionStart() +
1118 1119
      isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
  output_frame->SetPc(pc_value);
1120
  if (FLAG_enable_embedded_constant_pool) {
1121 1122 1123 1124
    intptr_t constant_pool_value =
        reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
    output_frame->SetConstantPool(constant_pool_value);
  }
1125 1126
}

1127 1128
void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
                                              int frame_index) {
1129
  TranslatedFrame::iterator value_iterator = translated_frame->begin();
1130 1131 1132 1133 1134
  bool is_topmost = (output_count_ - 1 == frame_index);
  // The construct frame could become topmost only if we inlined a constructor
  // call which does a tail call (otherwise the tail callee's frame would be
  // the topmost one). So it could only be the LAZY case.
  CHECK(!is_topmost || bailout_type_ == LAZY);
1135 1136
  int input_index = 0;

1137
  Builtins* builtins = isolate_->builtins();
1138 1139 1140 1141
  Code* construct_stub = builtins->builtin(
      FLAG_harmony_restrict_constructor_return
          ? Builtins::kJSConstructStubGenericRestrictedReturn
          : Builtins::kJSConstructStubGenericUnrestrictedReturn);
1142
  BailoutId bailout_id = translated_frame->node_id();
1143
  unsigned height = translated_frame->height();
1144
  unsigned height_in_bytes = height * kPointerSize;
1145 1146 1147 1148

  // If the construct frame appears to be topmost we should ensure that the
  // value of result register is preserved during continuation execution.
  // We do this here by "pushing" the result of the constructor function to the
1149 1150
  // top of the reconstructed stack and popping it in
  // {Builtins::kNotifyDeoptimized}.
1151 1152
  if (is_topmost) {
    height_in_bytes += kPointerSize;
1153
    if (PadTopOfStackRegister()) height_in_bytes += kPointerSize;
1154 1155
  }

1156 1157 1158
  int parameter_count = height;
  if (ShouldPadArguments(parameter_count)) height_in_bytes += kPointerSize;

1159
  JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
1160
  value_iterator++;
1161
  input_index++;
1162
  if (trace_scope_ != nullptr) {
1163
    PrintF(trace_scope_->file(),
1164 1165 1166 1167
           "  translating construct stub => bailout_id=%d (%s), height=%d\n",
           bailout_id.ToInt(),
           bailout_id == BailoutId::ConstructStubCreate() ? "create" : "invoke",
           height_in_bytes);
1168 1169
  }

1170
  unsigned fixed_frame_size = ConstructFrameConstants::kFixedFrameSize;
1171 1172 1173
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;

  // Allocate and store the output frame description.
1174 1175
  FrameDescription* output_frame = new (output_frame_size)
      FrameDescription(output_frame_size, parameter_count);
1176

1177 1178
  // Construct stub can not be topmost.
  DCHECK(frame_index > 0 && frame_index < output_count_);
1179
  DCHECK_NULL(output_[frame_index]);
1180 1181
  output_[frame_index] = output_frame;

1182 1183
  // The top address of the frame is computed from the previous frame's top and
  // this frame's size.
1184 1185 1186 1187
  intptr_t top_address;
  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
  output_frame->SetTop(top_address);

1188
  unsigned output_offset = output_frame_size;
1189 1190 1191 1192 1193 1194 1195 1196

  if (ShouldPadArguments(parameter_count)) {
    output_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                       output_offset, "padding ");
  }

  // Compute the incoming parameter translation.
1197 1198
  for (int i = 0; i < parameter_count; ++i) {
    output_offset -= kPointerSize;
1199 1200
    // The allocated receiver of a construct stub frame is passed as the
    // receiver parameter through the translation. It might be encoding
1201
    // a captured object, override the slot address for a captured object.
1202 1203
    WriteTranslatedValueToOutput(
        &value_iterator, &input_index, frame_index, output_offset, nullptr,
1204
        (i == 0) ? reinterpret_cast<Address>(top_address) : nullptr);
1205 1206
  }

1207
  DCHECK_EQ(output_offset, output_frame->GetLastArgumentSlotOffset());
1208
  // Read caller's PC from the previous frame.
1209
  output_offset -= kPCOnStackSize;
1210
  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1211
  output_frame->SetCallerPc(output_offset, callers_pc);
1212
  DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
1213 1214

  // Read caller's FP from the previous frame, and set this frame's FP.
1215
  output_offset -= kFPOnStackSize;
1216
  intptr_t value = output_[frame_index - 1]->GetFp();
1217
  output_frame->SetCallerFp(output_offset, value);
1218 1219
  intptr_t fp_value = top_address + output_offset;
  output_frame->SetFp(fp_value);
1220 1221 1222 1223
  if (is_topmost) {
    Register fp_reg = JavaScriptFrame::fp_register();
    output_frame->SetRegister(fp_reg.code(), fp_value);
  }
1224
  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1225

1226
  if (FLAG_enable_embedded_constant_pool) {
1227
    // Read the caller's constant pool from the previous frame.
1228 1229
    output_offset -= kPointerSize;
    value = output_[frame_index - 1]->GetConstantPool();
1230
    output_frame->SetCallerConstantPool(output_offset, value);
1231 1232
    DebugPrintOutputSlot(value, frame_index, output_offset,
                         "caller's constant_pool\n");
1233 1234
  }

1235
  // A marker value is used to mark the frame.
1236
  output_offset -= kPointerSize;
1237
  value = StackFrame::TypeToMarker(StackFrame::CONSTRUCT);
1238
  output_frame->SetFrameSlot(output_offset, value);
1239
  DebugPrintOutputSlot(value, frame_index, output_offset,
1240
                       "typed frame marker\n");
1241

1242
  // The context can be gotten from the previous frame.
1243
  output_offset -= kPointerSize;
1244
  value = output_[frame_index - 1]->GetContext();
1245
  output_frame->SetFrameSlot(output_offset, value);
1246
  DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
1247 1248 1249 1250 1251

  // Number of incoming arguments.
  output_offset -= kPointerSize;
  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
  output_frame->SetFrameSlot(output_offset, value);
1252 1253 1254
  DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
  if (trace_scope_ != nullptr) {
    PrintF(trace_scope_->file(), "(%d)\n", height - 1);
1255 1256
  }

1257 1258 1259 1260 1261 1262 1263 1264
  // The constructor function was mentioned explicitly in the
  // CONSTRUCT_STUB_FRAME.
  output_offset -= kPointerSize;
  value = reinterpret_cast<intptr_t>(function);
  WriteValueToOutput(function, 0, frame_index, output_offset,
                     "constructor function ");

  // The deopt info contains the implicit receiver or the new target at the
1265 1266 1267 1268 1269 1270
  // position of the receiver. Copy it to the top of stack, with the hole value
  // as padding to maintain alignment.
  output_offset -= kPointerSize;
  WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                     output_offset, "padding");

1271
  output_offset -= kPointerSize;
1272 1273 1274 1275 1276 1277

  if (ShouldPadArguments(parameter_count)) {
    value = output_frame->GetFrameSlot(output_frame_size - 2 * kPointerSize);
  } else {
    value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
  }
1278
  output_frame->SetFrameSlot(output_offset, value);
1279

1280
  if (bailout_id == BailoutId::ConstructStubCreate()) {
1281
    DebugPrintOutputSlot(value, frame_index, output_offset, "new target\n");
1282
  } else {
1283
    CHECK(bailout_id == BailoutId::ConstructStubInvoke());
1284 1285 1286
    DebugPrintOutputSlot(value, frame_index, output_offset,
                         "allocated receiver\n");
  }
1287

1288
  if (is_topmost) {
1289 1290 1291 1292 1293
    if (PadTopOfStackRegister()) {
      output_offset -= kPointerSize;
      WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                         output_offset, "padding ");
    }
1294 1295
    // Ensure the result is restored back when we return to the stub.
    output_offset -= kPointerSize;
1296
    Register result_reg = kReturnRegister0;
1297 1298
    value = input_->GetRegister(result_reg.code());
    output_frame->SetFrameSlot(output_offset, value);
1299
    DebugPrintOutputSlot(value, frame_index, output_offset, "subcall result\n");
1300 1301
  }

1302
  CHECK_EQ(0u, output_offset);
1303

1304 1305
  // Compute this frame's PC.
  DCHECK(bailout_id.IsValidForConstructStub());
1306
  Address start = construct_stub->InstructionStart();
1307 1308 1309 1310 1311 1312 1313 1314
  int pc_offset =
      bailout_id == BailoutId::ConstructStubCreate()
          ? isolate_->heap()->construct_stub_create_deopt_pc_offset()->value()
          : isolate_->heap()->construct_stub_invoke_deopt_pc_offset()->value();
  intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
  output_frame->SetPc(pc_value);

  // Update constant pool.
1315
  if (FLAG_enable_embedded_constant_pool) {
1316 1317 1318
    intptr_t constant_pool_value =
        reinterpret_cast<intptr_t>(construct_stub->constant_pool());
    output_frame->SetConstantPool(constant_pool_value);
1319 1320 1321 1322 1323 1324 1325
    if (is_topmost) {
      Register constant_pool_reg =
          JavaScriptFrame::constant_pool_pointer_register();
      output_frame->SetRegister(constant_pool_reg.code(), fp_value);
    }
  }

1326 1327 1328 1329
  // Clear the context register. The context might be a de-materialized object
  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
  // safety we use Smi(0) instead of the potential {arguments_marker} here.
  if (is_topmost) {
1330
    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1331 1332 1333 1334
    Register context_reg = JavaScriptFrame::context_register();
    output_frame->SetRegister(context_reg.code(), context_value);
  }

1335 1336 1337 1338
  // Set the continuation for the topmost frame.
  if (is_topmost) {
    Builtins* builtins = isolate_->builtins();
    DCHECK_EQ(LAZY, bailout_type_);
1339
    Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1340
    output_frame->SetContinuation(
1341
        reinterpret_cast<intptr_t>(continuation->InstructionStart()));
1342
  }
1343 1344
}

1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400
// BuiltinContinuationFrames capture the machine state that is expected as input
// to a builtin, including both input register values and stack parameters. When
// the frame is reactivated (i.e. the frame below it returns), a
// ContinueToBuiltin stub restores the register state from the frame and tail
// calls to the actual target builtin, making it appear that the stub had been
// directly called by the frame above it. The input values to populate the frame
// are taken from the deopt's FrameState.
//
// Frame translation happens in two modes, EAGER and LAZY. In EAGER mode, all of
// the parameters to the Builtin are explicitly specified in the TurboFan
// FrameState node. In LAZY mode, there is always one fewer parameters specified
// in the FrameState than expected by the Builtin. In that case, construction of
// BuiltinContinuationFrame adds the final missing parameter during
// deoptimization, and that parameter is always on the stack and contains the
// value returned from the callee of the call site triggering the LAZY deopt
// (e.g. rax on x64). This requires that continuation Builtins for LAZY deopts
// must have at least one stack parameter.
//
//                TO
//    |          ....           |
//    +-------------------------+
//    |     builtin param 0     |<- FrameState input value n becomes
//    +-------------------------+
//    |           ...           |
//    +-------------------------+
//    |     builtin param m     |<- FrameState input value n+m-1, or in
//    +-------------------------+   the LAZY case, return LAZY result value
//    | ContinueToBuiltin entry |
//    +-------------------------+
// |  |    saved frame (FP)     |
// |  +=========================+<- fpreg
// |  |constant pool (if ool_cp)|
// v  +-------------------------+
//    |BUILTIN_CONTINUATION mark|
//    +-------------------------+
//    |  JS Builtin code object |
//    +-------------------------+
//    | builtin input GPR reg0  |<- populated from deopt FrameState using
//    +-------------------------+   the builtin's CallInterfaceDescriptor
//    |          ...            |   to map a FrameState's 0..n-1 inputs to
//    +-------------------------+   the builtin's n input register params.
//    | builtin input GPR regn  |
//    |-------------------------|<- spreg
//
void Deoptimizer::DoComputeBuiltinContinuation(
    TranslatedFrame* translated_frame, int frame_index,
    bool java_script_builtin) {
  TranslatedFrame::iterator value_iterator = translated_frame->begin();
  int input_index = 0;

  // The output frame must have room for all of the parameters that need to be
  // passed to the builtin continuation.
  int height_in_words = translated_frame->height();

  BailoutId bailout_id = translated_frame->node_id();
  Builtins::Name builtin_name = Builtins::GetBuiltinFromBailoutId(bailout_id);
1401
  CHECK(!Builtins::IsLazy(builtin_name));
1402 1403 1404 1405 1406 1407 1408 1409 1410 1411
  Code* builtin = isolate()->builtins()->builtin(builtin_name);
  Callable continuation_callable =
      Builtins::CallableFor(isolate(), builtin_name);
  CallInterfaceDescriptor continuation_descriptor =
      continuation_callable.descriptor();

  bool is_bottommost = (0 == frame_index);
  bool is_topmost = (output_count_ - 1 == frame_index);
  bool must_handle_result = !is_topmost || bailout_type_ == LAZY;

1412
  const RegisterConfiguration* config(RegisterConfiguration::Default());
1413
  int allocatable_register_count = config->num_allocatable_general_registers();
1414 1415 1416
  int padding_slot_count = BuiltinContinuationFrameConstants::PaddingSlotCount(
      allocatable_register_count);

1417 1418 1419 1420 1421 1422
  int register_parameter_count =
      continuation_descriptor.GetRegisterParameterCount();
  // Make sure to account for the context by removing it from the register
  // parameter count.
  int stack_param_count = height_in_words - register_parameter_count - 1;
  if (must_handle_result) stack_param_count++;
1423
  unsigned output_frame_size =
1424 1425 1426
      kPointerSize * (stack_param_count + allocatable_register_count +
                      padding_slot_count) +
      BuiltinContinuationFrameConstants::kFixedFrameSize;
1427

1428 1429 1430 1431 1432 1433 1434 1435 1436 1437
  // If the builtins frame appears to be topmost we should ensure that the
  // value of result register is preserved during continuation execution.
  // We do this here by "pushing" the result of callback function to the
  // top of the reconstructed stack and popping it in
  // {Builtins::kNotifyDeoptimized}.
  if (is_topmost) {
    output_frame_size += kPointerSize;
    if (PadTopOfStackRegister()) output_frame_size += kPointerSize;
  }

1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455
  // Validate types of parameters. They must all be tagged except for argc for
  // JS builtins.
  bool has_argc = false;
  for (int i = 0; i < register_parameter_count; ++i) {
    MachineType type = continuation_descriptor.GetParameterType(i);
    int code = continuation_descriptor.GetRegisterParameter(i).code();
    // Only tagged and int32 arguments are supported, and int32 only for the
    // arguments count on JavaScript builtins.
    if (type == MachineType::Int32()) {
      CHECK_EQ(code, kJavaScriptCallArgCountRegister.code());
      has_argc = true;
    } else {
      // Any other argument must be a tagged value.
      CHECK(IsAnyTagged(type.representation()));
    }
  }
  CHECK_EQ(java_script_builtin, has_argc);

1456
  if (trace_scope_ != nullptr) {
1457
    PrintF(trace_scope_->file(),
1458 1459 1460 1461 1462
           "  translating BuiltinContinuation to %s,"
           " register param count %d,"
           " stack param count %d\n",
           Builtins::name(builtin_name), register_parameter_count,
           stack_param_count);
1463 1464
  }

1465 1466 1467 1468 1469 1470
  int translated_stack_parameters =
      must_handle_result ? stack_param_count - 1 : stack_param_count;

  if (ShouldPadArguments(stack_param_count)) output_frame_size += kPointerSize;
  FrameDescription* output_frame = new (output_frame_size)
      FrameDescription(output_frame_size, stack_param_count);
1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482
  output_[frame_index] = output_frame;

  // The top address of the frame is computed from the previous frame's top and
  // this frame's size.
  intptr_t top_address;
  if (is_bottommost) {
    top_address = caller_frame_top_ - output_frame_size;
  } else {
    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
  }
  output_frame->SetTop(top_address);

1483 1484 1485
  // Get the possible JSFunction for the case that this is a
  // JavaScriptBuiltinContinuationFrame, which needs the JSFunction pointer
  // like a normal JavaScriptFrame.
1486 1487
  intptr_t maybe_function =
      reinterpret_cast<intptr_t>(value_iterator->GetRawValue());
1488
  ++input_index;
1489 1490
  ++value_iterator;

1491 1492 1493 1494 1495
  struct RegisterValue {
    Object* raw_value_;
    TranslatedFrame::iterator iterator_;
  };
  std::vector<RegisterValue> register_values;
1496
  int total_registers = config->num_general_registers();
1497
  register_values.resize(total_registers, {Smi::kZero, value_iterator});
1498 1499 1500

  intptr_t value;

1501 1502 1503 1504 1505 1506
  unsigned output_frame_offset = output_frame_size;
  if (ShouldPadArguments(stack_param_count)) {
    output_frame_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                       output_frame_offset, "padding ");
  }
1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520

  for (int i = 0; i < translated_stack_parameters; ++i) {
    output_frame_offset -= kPointerSize;
    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
                                 output_frame_offset);
  }

  if (must_handle_result) {
    output_frame_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), input_index,
                       frame_index, output_frame_offset,
                       "placeholder for return result on lazy deopt ");
  }

1521 1522
  DCHECK_EQ(output_frame_offset, output_frame->GetLastArgumentSlotOffset());

1523
  for (int i = 0; i < register_parameter_count; ++i) {
1524
    Object* object = value_iterator->GetRawValue();
1525
    int code = continuation_descriptor.GetRegisterParameter(i).code();
1526
    register_values[code] = {object, value_iterator};
1527 1528 1529 1530 1531 1532 1533 1534 1535
    ++input_index;
    ++value_iterator;
  }

  // The context register is always implicit in the CallInterfaceDescriptor but
  // its register must be explicitly set when continuing to the builtin. Make
  // sure that it's harvested from the translation and copied into the register
  // set (it was automatically added at the end of the FrameState by the
  // instruction selector).
1536 1537 1538
  Object* context = value_iterator->GetRawValue();
  value = reinterpret_cast<intptr_t>(context);
  register_values[kContextRegister.code()] = {context, value_iterator};
1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607
  output_frame->SetContext(value);
  output_frame->SetRegister(kContextRegister.code(), value);
  ++input_index;
  ++value_iterator;

  // Set caller's PC (JSFunction continuation).
  output_frame_offset -= kPCOnStackSize;
  if (is_bottommost) {
    value = caller_pc_;
  } else {
    value = output_[frame_index - 1]->GetPc();
  }
  output_frame->SetCallerPc(output_frame_offset, value);
  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                       "caller's pc\n");

  // Read caller's FP from the previous frame, and set this frame's FP.
  output_frame_offset -= kFPOnStackSize;
  if (is_bottommost) {
    value = caller_fp_;
  } else {
    value = output_[frame_index - 1]->GetFp();
  }
  output_frame->SetCallerFp(output_frame_offset, value);
  intptr_t fp_value = top_address + output_frame_offset;
  output_frame->SetFp(fp_value);
  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                       "caller's fp\n");

  if (FLAG_enable_embedded_constant_pool) {
    // Read the caller's constant pool from the previous frame.
    output_frame_offset -= kPointerSize;
    if (is_bottommost) {
      value = caller_constant_pool_;
    } else {
      value = output_[frame_index - 1]->GetConstantPool();
    }
    output_frame->SetCallerConstantPool(output_frame_offset, value);
    DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                         "caller's constant_pool\n");
  }

  // A marker value is used in place of the context.
  output_frame_offset -= kPointerSize;
  intptr_t marker =
      java_script_builtin
          ? StackFrame::TypeToMarker(
                StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION)
          : StackFrame::TypeToMarker(StackFrame::BUILTIN_CONTINUATION);
  output_frame->SetFrameSlot(output_frame_offset, marker);
  DebugPrintOutputSlot(marker, frame_index, output_frame_offset,
                       "context (builtin continuation sentinel)\n");

  output_frame_offset -= kPointerSize;
  value = java_script_builtin ? maybe_function : 0;
  output_frame->SetFrameSlot(output_frame_offset, value);
  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                       java_script_builtin ? "JSFunction\n" : "unused\n");

  // The builtin to continue to
  output_frame_offset -= kPointerSize;
  value = reinterpret_cast<intptr_t>(builtin);
  output_frame->SetFrameSlot(output_frame_offset, value);
  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                       "builtin address\n");

  for (int i = 0; i < allocatable_register_count; ++i) {
    output_frame_offset -= kPointerSize;
    int code = config->GetAllocatableGeneralCode(i);
1608 1609
    Object* object = register_values[code].raw_value_;
    value = reinterpret_cast<intptr_t>(object);
1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625
    output_frame->SetFrameSlot(output_frame_offset, value);
    if (trace_scope_ != nullptr) {
      ScopedVector<char> str(128);
      if (java_script_builtin &&
          code == kJavaScriptCallArgCountRegister.code()) {
        SNPrintF(
            str,
            "tagged argument count %s (will be untagged by continuation)\n",
            config->GetGeneralRegisterName(code));
      } else {
        SNPrintF(str, "builtin register argument %s\n",
                 config->GetGeneralRegisterName(code));
      }
      DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                           str.start());
    }
1626 1627 1628 1629 1630 1631 1632
    if (object == isolate_->heap()->arguments_marker()) {
      Address output_address =
          reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
          output_frame_offset;
      values_to_materialize_.push_back(
          {output_address, register_values[code].iterator_});
    }
1633 1634
  }

1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663
  // Some architectures must pad the stack frame with extra stack slots
  // to ensure the stack frame is aligned.
  for (int i = 0; i < padding_slot_count; ++i) {
    output_frame_offset -= kPointerSize;
    WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                       output_frame_offset, "padding ");
  }

  if (is_topmost) {
    if (PadTopOfStackRegister()) {
      output_frame_offset -= kPointerSize;
      WriteValueToOutput(isolate()->heap()->the_hole_value(), 0, frame_index,
                         output_frame_offset, "padding ");
    }
    // Ensure the result is restored back when we return to the stub.
    output_frame_offset -= kPointerSize;
    Register result_reg = kReturnRegister0;
    if (must_handle_result) {
      value = input_->GetRegister(result_reg.code());
    } else {
      value = reinterpret_cast<intptr_t>(isolate()->heap()->undefined_value());
    }
    output_frame->SetFrameSlot(output_frame_offset, value);
    DebugPrintOutputSlot(value, frame_index, output_frame_offset,
                         "callback result\n");
  }

  CHECK_EQ(0u, output_frame_offset);

1664
  // Clear the context register. The context might be a de-materialized object
1665
  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1666 1667 1668 1669 1670 1671 1672
  // safety we use Smi(0) instead of the potential {arguments_marker} here.
  if (is_topmost) {
    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
    Register context_reg = JavaScriptFrame::context_register();
    output_frame->SetRegister(context_reg.code(), context_value);
  }

1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690
  // Ensure the frame pointer register points to the callee's frame. The builtin
  // will build its own frame once we continue to it.
  Register fp_reg = JavaScriptFrame::fp_register();
  output_frame->SetRegister(fp_reg.code(), output_[frame_index - 1]->GetFp());

  Code* continue_to_builtin =
      java_script_builtin
          ? (must_handle_result
                 ? isolate()->builtins()->builtin(
                       Builtins::kContinueToJavaScriptBuiltinWithResult)
                 : isolate()->builtins()->builtin(
                       Builtins::kContinueToJavaScriptBuiltin))
          : (must_handle_result
                 ? isolate()->builtins()->builtin(
                       Builtins::kContinueToCodeStubBuiltinWithResult)
                 : isolate()->builtins()->builtin(
                       Builtins::kContinueToCodeStubBuiltin));
  output_frame->SetPc(
1691
      reinterpret_cast<intptr_t>(continue_to_builtin->InstructionStart()));
1692 1693

  Code* continuation =
1694
      isolate()->builtins()->builtin(Builtins::kNotifyDeoptimized);
1695
  output_frame->SetContinuation(
1696
      reinterpret_cast<intptr_t>(continuation->InstructionStart()));
1697
}
1698

1699
void Deoptimizer::MaterializeHeapObjects() {
1700
  translated_state_.Prepare(reinterpret_cast<Address>(stack_fp_));
1701

1702 1703
  for (auto& materialization : values_to_materialize_) {
    Handle<Object> value = materialization.value_->GetValue();
1704

1705 1706 1707 1708 1709 1710
    if (trace_scope_ != nullptr) {
      PrintF("Materialization [0x%08" V8PRIxPTR "] <- 0x%08" V8PRIxPTR " ;  ",
             reinterpret_cast<intptr_t>(materialization.output_slot_address_),
             reinterpret_cast<intptr_t>(*value));
      value->ShortPrint(trace_scope_->file());
      PrintF(trace_scope_->file(), "\n");
1711
    }
1712

1713 1714
    *(reinterpret_cast<intptr_t*>(materialization.output_slot_address_)) =
        reinterpret_cast<intptr_t>(*value);
1715
  }
jarin@chromium.org's avatar
jarin@chromium.org committed
1716

1717 1718
  translated_state_.VerifyMaterializedObjects();

1719 1720 1721 1722 1723 1724
  bool feedback_updated = translated_state_.DoUpdateFeedback();
  if (trace_scope_ != nullptr && feedback_updated) {
    PrintF(trace_scope_->file(), "Feedback updated");
    compiled_code_->PrintDeoptLocation(trace_scope_->file(),
                                       " from deoptimization at ", from_);
  }
1725

1726 1727
  isolate_->materialized_object_store()->Remove(
      reinterpret_cast<Address>(stack_fp_));
1728 1729 1730
}


1731
void Deoptimizer::WriteTranslatedValueToOutput(
1732
    TranslatedFrame::iterator* iterator, int* input_index, int frame_index,
1733 1734
    unsigned output_offset, const char* debug_hint_string,
    Address output_address_for_materialization) {
1735
  Object* value = (*iterator)->GetRawValue();
1736

1737 1738
  WriteValueToOutput(value, *input_index, frame_index, output_offset,
                     debug_hint_string);
1739

1740
  if (value == isolate_->heap()->arguments_marker()) {
1741 1742 1743
    Address output_address =
        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
        output_offset;
1744 1745
    if (output_address_for_materialization == nullptr) {
      output_address_for_materialization = output_address;
1746
    }
1747 1748 1749
    values_to_materialize_.push_back(
        {output_address_for_materialization, *iterator});
  }
1750

1751 1752 1753
  (*iterator)++;
  (*input_index)++;
}
1754 1755


1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784
void Deoptimizer::WriteValueToOutput(Object* value, int input_index,
                                     int frame_index, unsigned output_offset,
                                     const char* debug_hint_string) {
  output_[frame_index]->SetFrameSlot(output_offset,
                                     reinterpret_cast<intptr_t>(value));

  if (trace_scope_ != nullptr) {
    DebugPrintOutputSlot(reinterpret_cast<intptr_t>(value), frame_index,
                         output_offset, debug_hint_string);
    value->ShortPrint(trace_scope_->file());
    PrintF(trace_scope_->file(), "  (input #%d)\n", input_index);
  }
}


void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index,
                                       unsigned output_offset,
                                       const char* debug_hint_string) {
  if (trace_scope_ != nullptr) {
    Address output_address =
        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
        output_offset;
    PrintF(trace_scope_->file(),
           "    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ;  %s",
           reinterpret_cast<intptr_t>(output_address), output_offset, value,
           debug_hint_string == nullptr ? "" : debug_hint_string);
  }
}

1785
unsigned Deoptimizer::ComputeInputFrameAboveFpFixedSize() const {
1786
  unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp;
1787 1788 1789
  if (!function_->IsSmi()) {
    fixed_size += ComputeIncomingArgumentSize(function_->shared());
  }
1790 1791 1792 1793
  return fixed_size;
}

unsigned Deoptimizer::ComputeInputFrameSize() const {
1794 1795
  // The fp-to-sp delta already takes the context, constant pool pointer and the
  // function into account so we have to avoid double counting them.
1796 1797
  unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize();
  unsigned result = fixed_size_above_fp + fp_to_sp_delta_;
1798 1799
  if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
    unsigned stack_slots = compiled_code_->stack_slots();
1800 1801
    unsigned outgoing_size = 0;
    //        ComputeOutgoingArgumentSize(compiled_code_, bailout_id_);
1802 1803 1804
    CHECK_EQ(fixed_size_above_fp + (stack_slots * kPointerSize) -
                 CommonFrameConstants::kFixedFrameSizeAboveFp + outgoing_size,
             result);
1805
  }
1806
  return result;
1807 1808
}

1809 1810
// static
unsigned Deoptimizer::ComputeInterpretedFixedSize(SharedFunctionInfo* shared) {
1811
  // The fixed part of the frame consists of the return address, frame
1812
  // pointer, function, context, bytecode offset and all the incoming arguments.
1813
  return ComputeIncomingArgumentSize(shared) +
1814 1815 1816
         InterpreterFrameConstants::kFixedFrameSize;
}

1817 1818
// static
unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo* shared) {
1819 1820 1821
  int parameter_slots = shared->internal_formal_parameter_count() + 1;
  if (kPadArguments) parameter_slots = RoundUp(parameter_slots, 2);
  return parameter_slots * kPointerSize;
1822
}
1823

1824
void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
1825
                                                   BailoutType type) {
1826 1827
  CHECK(type == EAGER || type == SOFT || type == LAZY);
  DeoptimizerData* data = isolate->deoptimizer_data();
1828
  if (data->deopt_entry_code_[type] != nullptr) return;
1829

1830
  MacroAssembler masm(isolate, nullptr, 16 * KB, CodeObjectRequired::kYes);
1831
  masm.set_emit_debug_code(false);
1832
  GenerateDeoptimizationEntries(&masm, kMaxNumberOfEntries, type);
1833
  CodeDesc desc;
1834
  masm.GetCode(isolate, &desc);
1835
  DCHECK(!RelocInfo::RequiresRelocation(desc));
1836

1837 1838 1839
  // Allocate the code as immovable since the entry addresses will be used
  // directly and there is no support for relocating them.
  Handle<Code> code = isolate->factory()->NewCode(
1840
      desc, Code::STUB, Handle<Object>(), Builtins::kNoBuiltinId,
1841
      MaybeHandle<ByteArray>(), MaybeHandle<DeoptimizationData>(), kImmovable);
1842
  CHECK(Heap::IsImmovable(*code));
1843

1844 1845
  CHECK_NULL(data->deopt_entry_code_[type]);
  data->deopt_entry_code_[type] = *code;
1846
}
1847

1848
void Deoptimizer::EnsureCodeForMaxDeoptimizationEntries(Isolate* isolate) {
1849 1850 1851
  EnsureCodeForDeoptimizationEntry(isolate, EAGER);
  EnsureCodeForDeoptimizationEntry(isolate, LAZY);
  EnsureCodeForDeoptimizationEntry(isolate, SOFT);
1852 1853
}

1854
FrameDescription::FrameDescription(uint32_t frame_size, int parameter_count)
1855
    : frame_size_(frame_size),
1856
      parameter_count_(parameter_count),
1857 1858
      top_(kZapUint32),
      pc_(kZapUint32),
1859
      fp_(kZapUint32),
1860 1861
      context_(kZapUint32),
      constant_pool_(kZapUint32) {
1862 1863
  // Zap all the registers.
  for (int r = 0; r < Register::kNumRegisters; r++) {
1864 1865 1866
    // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
    // isn't used before the next safepoint, the GC will try to scan it as a
    // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
1867 1868 1869 1870 1871 1872 1873 1874 1875
    SetRegister(r, kZapUint32);
  }

  // Zap all the slots.
  for (unsigned o = 0; o < frame_size; o += kPointerSize) {
    SetFrameSlot(o, kZapUint32);
  }
}

1876
void TranslationBuffer::Add(int32_t value) {
1877
  // This wouldn't handle kMinInt correctly if it ever encountered it.
1878
  DCHECK_NE(value, kMinInt);
1879 1880 1881 1882 1883 1884 1885 1886
  // Encode the sign bit in the least significant bit.
  bool is_negative = (value < 0);
  uint32_t bits = ((is_negative ? -value : value) << 1) |
      static_cast<int32_t>(is_negative);
  // Encode the individual bytes using the least significant bit of
  // each byte to indicate whether or not more bytes follow.
  do {
    uint32_t next = bits >> 7;
1887
    contents_.push_back(((bits << 1) & 0xFF) | (next != 0));
1888 1889 1890 1891
    bits = next;
  } while (bits != 0);
}

1892 1893 1894 1895
TranslationIterator::TranslationIterator(ByteArray* buffer, int index)
    : buffer_(buffer), index_(index) {
  DCHECK(index >= 0 && index < buffer->length());
}
1896 1897 1898 1899 1900 1901

int32_t TranslationIterator::Next() {
  // Run through the bytes until we reach one with a least significant
  // bit of zero (marks the end).
  uint32_t bits = 0;
  for (int i = 0; true; i += 7) {
1902
    DCHECK(HasNext());
1903 1904 1905 1906 1907 1908 1909 1910 1911 1912
    uint8_t next = buffer_->get(index_++);
    bits |= (next >> 1) << i;
    if ((next & 1) == 0) break;
  }
  // The bits encode the sign in the least significant bit.
  bool is_negative = (bits & 1) == 1;
  int32_t result = bits >> 1;
  return is_negative ? -result : result;
}

1913
bool TranslationIterator::HasNext() const { return index_ < buffer_->length(); }
1914

1915
Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
1916 1917
  Handle<ByteArray> result = factory->NewByteArray(CurrentIndex(), TENURED);
  contents_.CopyTo(result->GetDataStartAddress());
1918 1919 1920
  return result;
}

1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938
void Translation::BeginBuiltinContinuationFrame(BailoutId bailout_id,
                                                int literal_id,
                                                unsigned height) {
  buffer_->Add(BUILTIN_CONTINUATION_FRAME);
  buffer_->Add(bailout_id.ToInt());
  buffer_->Add(literal_id);
  buffer_->Add(height);
}

void Translation::BeginJavaScriptBuiltinContinuationFrame(BailoutId bailout_id,
                                                          int literal_id,
                                                          unsigned height) {
  buffer_->Add(JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME);
  buffer_->Add(bailout_id.ToInt());
  buffer_->Add(literal_id);
  buffer_->Add(height);
}

1939 1940
void Translation::BeginConstructStubFrame(BailoutId bailout_id, int literal_id,
                                          unsigned height) {
1941
  buffer_->Add(CONSTRUCT_STUB_FRAME);
1942
  buffer_->Add(bailout_id.ToInt());
1943 1944
  buffer_->Add(literal_id);
  buffer_->Add(height);
1945 1946 1947
}


1948
void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
1949 1950 1951
  buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
  buffer_->Add(literal_id);
  buffer_->Add(height);
1952 1953
}

1954 1955
void Translation::BeginInterpretedFrame(BailoutId bytecode_offset,
                                        int literal_id, unsigned height) {
1956 1957 1958 1959
  buffer_->Add(INTERPRETED_FRAME);
  buffer_->Add(bytecode_offset.ToInt());
  buffer_->Add(literal_id);
  buffer_->Add(height);
1960 1961
}

1962
void Translation::ArgumentsElements(CreateArgumentsType type) {
1963
  buffer_->Add(ARGUMENTS_ELEMENTS);
1964
  buffer_->Add(static_cast<uint8_t>(type));
1965
}
1966

1967
void Translation::ArgumentsLength(CreateArgumentsType type) {
1968
  buffer_->Add(ARGUMENTS_LENGTH);
1969
  buffer_->Add(static_cast<uint8_t>(type));
1970 1971
}

1972
void Translation::BeginCapturedObject(int length) {
1973 1974
  buffer_->Add(CAPTURED_OBJECT);
  buffer_->Add(length);
1975 1976 1977 1978
}


void Translation::DuplicateObject(int object_index) {
1979 1980
  buffer_->Add(DUPLICATED_OBJECT);
  buffer_->Add(object_index);
1981 1982 1983
}


1984
void Translation::StoreRegister(Register reg) {
1985 1986
  buffer_->Add(REGISTER);
  buffer_->Add(reg.code());
1987 1988 1989 1990
}


void Translation::StoreInt32Register(Register reg) {
1991 1992
  buffer_->Add(INT32_REGISTER);
  buffer_->Add(reg.code());
1993 1994 1995
}


1996
void Translation::StoreUint32Register(Register reg) {
1997 1998
  buffer_->Add(UINT32_REGISTER);
  buffer_->Add(reg.code());
1999 2000 2001
}


2002
void Translation::StoreBoolRegister(Register reg) {
2003 2004
  buffer_->Add(BOOL_REGISTER);
  buffer_->Add(reg.code());
2005 2006
}

2007
void Translation::StoreFloatRegister(FloatRegister reg) {
2008 2009
  buffer_->Add(FLOAT_REGISTER);
  buffer_->Add(reg.code());
2010
}
2011

2012
void Translation::StoreDoubleRegister(DoubleRegister reg) {
2013 2014
  buffer_->Add(DOUBLE_REGISTER);
  buffer_->Add(reg.code());
2015 2016 2017 2018
}


void Translation::StoreStackSlot(int index) {
2019 2020
  buffer_->Add(STACK_SLOT);
  buffer_->Add(index);
2021 2022 2023 2024
}


void Translation::StoreInt32StackSlot(int index) {
2025 2026
  buffer_->Add(INT32_STACK_SLOT);
  buffer_->Add(index);
2027 2028 2029
}


2030
void Translation::StoreUint32StackSlot(int index) {
2031 2032
  buffer_->Add(UINT32_STACK_SLOT);
  buffer_->Add(index);
2033 2034 2035
}


2036
void Translation::StoreBoolStackSlot(int index) {
2037 2038
  buffer_->Add(BOOL_STACK_SLOT);
  buffer_->Add(index);
2039 2040
}

2041
void Translation::StoreFloatStackSlot(int index) {
2042 2043
  buffer_->Add(FLOAT_STACK_SLOT);
  buffer_->Add(index);
2044
}
2045

2046
void Translation::StoreDoubleStackSlot(int index) {
2047 2048
  buffer_->Add(DOUBLE_STACK_SLOT);
  buffer_->Add(index);
2049 2050 2051 2052
}


void Translation::StoreLiteral(int literal_id) {
2053 2054
  buffer_->Add(LITERAL);
  buffer_->Add(literal_id);
2055 2056
}

2057 2058 2059 2060 2061
void Translation::AddUpdateFeedback(int vector_literal, int slot) {
  buffer_->Add(UPDATE_FEEDBACK);
  buffer_->Add(vector_literal);
  buffer_->Add(slot);
}
2062

2063
void Translation::StoreJSFrameFunction() {
2064
  StoreStackSlot((StandardFrameConstants::kCallerPCOffset -
2065
                  StandardFrameConstants::kFunctionOffset) /
2066
                 kPointerSize);
2067 2068
}

2069 2070
int Translation::NumberOfOperandsFor(Opcode opcode) {
  switch (opcode) {
2071
    case DUPLICATED_OBJECT:
2072 2073
    case ARGUMENTS_ELEMENTS:
    case ARGUMENTS_LENGTH:
2074
    case CAPTURED_OBJECT:
2075 2076
    case REGISTER:
    case INT32_REGISTER:
2077
    case UINT32_REGISTER:
2078
    case BOOL_REGISTER:
2079
    case FLOAT_REGISTER:
2080 2081 2082
    case DOUBLE_REGISTER:
    case STACK_SLOT:
    case INT32_STACK_SLOT:
2083
    case UINT32_STACK_SLOT:
2084
    case BOOL_STACK_SLOT:
2085
    case FLOAT_STACK_SLOT:
2086 2087 2088
    case DOUBLE_STACK_SLOT:
    case LITERAL:
      return 1;
2089
    case ARGUMENTS_ADAPTOR_FRAME:
2090
    case UPDATE_FEEDBACK:
2091
      return 2;
2092
    case BEGIN:
2093
    case INTERPRETED_FRAME:
2094
    case CONSTRUCT_STUB_FRAME:
2095 2096
    case BUILTIN_CONTINUATION_FRAME:
    case JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME:
2097 2098
      return 3;
  }
2099
  FATAL("Unexpected translation type");
2100 2101 2102 2103
  return -1;
}


2104
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
2105 2106

const char* Translation::StringFor(Opcode opcode) {
2107
#define TRANSLATION_OPCODE_CASE(item)   case item: return #item;
2108
  switch (opcode) {
2109
    TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
2110
  }
2111
#undef TRANSLATION_OPCODE_CASE
2112 2113 2114 2115 2116 2117
  UNREACHABLE();
}

#endif


jarin@chromium.org's avatar
jarin@chromium.org committed
2118 2119 2120 2121 2122 2123
Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
  int index = StackIdToIndex(fp);
  if (index == -1) {
    return Handle<FixedArray>::null();
  }
  Handle<FixedArray> array = GetStackEntries();
2124
  CHECK_GT(array->length(), index);
2125
  return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate()));
jarin@chromium.org's avatar
jarin@chromium.org committed
2126 2127 2128 2129
}


void MaterializedObjectStore::Set(Address fp,
2130
                                  Handle<FixedArray> materialized_objects) {
jarin@chromium.org's avatar
jarin@chromium.org committed
2131 2132
  int index = StackIdToIndex(fp);
  if (index == -1) {
2133 2134
    index = static_cast<int>(frame_fps_.size());
    frame_fps_.push_back(fp);
jarin@chromium.org's avatar
jarin@chromium.org committed
2135 2136 2137 2138 2139 2140 2141
  }

  Handle<FixedArray> array = EnsureStackEntries(index + 1);
  array->set(index, *materialized_objects);
}


2142
bool MaterializedObjectStore::Remove(Address fp) {
2143 2144 2145
  auto it = std::find(frame_fps_.begin(), frame_fps_.end(), fp);
  if (it == frame_fps_.end()) return false;
  int index = static_cast<int>(std::distance(frame_fps_.begin(), it));
jarin@chromium.org's avatar
jarin@chromium.org committed
2146

2147
  frame_fps_.erase(it);
2148
  FixedArray* array = isolate()->heap()->materialized_objects();
2149

2150
  CHECK_LT(index, array->length());
2151 2152
  int fps_size = static_cast<int>(frame_fps_.size());
  for (int i = index; i < fps_size; i++) {
jarin@chromium.org's avatar
jarin@chromium.org committed
2153 2154
    array->set(i, array->get(i + 1));
  }
2155
  array->set(fps_size, isolate()->heap()->undefined_value());
2156
  return true;
jarin@chromium.org's avatar
jarin@chromium.org committed
2157 2158 2159 2160
}


int MaterializedObjectStore::StackIdToIndex(Address fp) {
2161 2162 2163 2164
  auto it = std::find(frame_fps_.begin(), frame_fps_.end(), fp);
  return it == frame_fps_.end()
             ? -1
             : static_cast<int>(std::distance(frame_fps_.begin(), it));
jarin@chromium.org's avatar
jarin@chromium.org committed
2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191
}


Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
  return Handle<FixedArray>(isolate()->heap()->materialized_objects());
}


Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
  Handle<FixedArray> array = GetStackEntries();
  if (array->length() >= length) {
    return array;
  }

  int new_length = length > 10 ? length : 10;
  if (new_length < 2 * array->length()) {
    new_length = 2 * array->length();
  }

  Handle<FixedArray> new_array =
      isolate()->factory()->NewFixedArray(new_length, TENURED);
  for (int i = 0; i < array->length(); i++) {
    new_array->set(i, array->get(i));
  }
  for (int i = array->length(); i < length; i++) {
    new_array->set(i, isolate()->heap()->undefined_value());
  }
2192
  isolate()->heap()->SetRootMaterializedObjects(*new_array);
jarin@chromium.org's avatar
jarin@chromium.org committed
2193
  return new_array;
2194 2195
}

2196
namespace {
2197

2198 2199 2200 2201
Handle<Object> GetValueForDebugger(TranslatedFrame::iterator it,
                                   Isolate* isolate) {
  if (it->GetRawValue() == isolate->heap()->arguments_marker()) {
    if (!it->IsMaterializableByDebugger()) {
2202
      return isolate->factory()->optimized_out();
2203
    }
2204
  }
2205 2206
  return it->GetValue();
}
2207

2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225
}  // namespace

DeoptimizedFrameInfo::DeoptimizedFrameInfo(TranslatedState* state,
                                           TranslatedState::iterator frame_it,
                                           Isolate* isolate) {
  // If the previous frame is an adaptor frame, we will take the parameters
  // from there.
  TranslatedState::iterator parameter_frame = frame_it;
  if (parameter_frame != state->begin()) {
    parameter_frame--;
  }
  int parameter_count;
  if (parameter_frame->kind() == TranslatedFrame::kArgumentsAdaptor) {
    parameter_count = parameter_frame->height() - 1;  // Ignore the receiver.
  } else {
    parameter_frame = frame_it;
    parameter_count =
        frame_it->shared_info()->internal_formal_parameter_count();
2226
  }
2227 2228 2229 2230 2231 2232 2233 2234 2235 2236
  TranslatedFrame::iterator parameter_it = parameter_frame->begin();
  parameter_it++;  // Skip the function.
  parameter_it++;  // Skip the receiver.

  // Figure out whether there is a construct stub frame on top of
  // the parameter frame.
  has_construct_stub_ =
      parameter_frame != state->begin() &&
      (parameter_frame - 1)->kind() == TranslatedFrame::kConstructStub;

2237 2238 2239
  DCHECK_EQ(TranslatedFrame::kInterpretedFunction, frame_it->kind());
  source_position_ = Deoptimizer::ComputeSourcePositionFromBytecodeArray(
      *frame_it->shared_info(), frame_it->node_id());
2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267

  TranslatedFrame::iterator value_it = frame_it->begin();
  // Get the function. Note that this might materialize the function.
  // In case the debugger mutates this value, we should deoptimize
  // the function and remember the value in the materialized value store.
  function_ = Handle<JSFunction>::cast(value_it->GetValue());

  parameters_.resize(static_cast<size_t>(parameter_count));
  for (int i = 0; i < parameter_count; i++) {
    Handle<Object> parameter = GetValueForDebugger(parameter_it, isolate);
    SetParameter(i, parameter);
    parameter_it++;
  }

  // Skip the function, the receiver and the arguments.
  int skip_count =
      frame_it->shared_info()->internal_formal_parameter_count() + 2;
  TranslatedFrame::iterator stack_it = frame_it->begin();
  for (int i = 0; i < skip_count; i++) {
    stack_it++;
  }

  // Get the context.
  context_ = GetValueForDebugger(stack_it, isolate);
  stack_it++;

  // Get the expression stack.
  int stack_height = frame_it->height();
2268
  if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2269
    // For interpreter frames, we should not count the accumulator.
2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280
    // TODO(jarin): Clean up the indexing in translated frames.
    stack_height--;
  }
  expression_stack_.resize(static_cast<size_t>(stack_height));
  for (int i = 0; i < stack_height; i++) {
    Handle<Object> expression = GetValueForDebugger(stack_it, isolate);
    SetExpression(i, expression);
    stack_it++;
  }

  // For interpreter frame, skip the accumulator.
2281
  if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2282 2283 2284
    stack_it++;
  }
  CHECK(stack_it == frame_it->end());
2285 2286 2287
}


2288
Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, Address pc) {
2289
  CHECK(code->InstructionStart() <= pc && pc <= code->InstructionEnd());
2290
  SourcePosition last_position = SourcePosition::Unknown();
2291
  DeoptimizeReason last_reason = DeoptimizeReason::kUnknown;
2292
  int last_deopt_id = kNoDeoptimizationId;
2293
  int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
2294
             RelocInfo::ModeMask(RelocInfo::DEOPT_ID) |
2295 2296
             RelocInfo::ModeMask(RelocInfo::DEOPT_SCRIPT_OFFSET) |
             RelocInfo::ModeMask(RelocInfo::DEOPT_INLINING_ID);
2297 2298
  for (RelocIterator it(code, mask); !it.done(); it.next()) {
    RelocInfo* info = it.rinfo();
2299
    if (info->pc() >= pc) break;
2300 2301 2302 2303 2304 2305
    if (info->rmode() == RelocInfo::DEOPT_SCRIPT_OFFSET) {
      int script_offset = static_cast<int>(info->data());
      it.next();
      DCHECK(it.rinfo()->rmode() == RelocInfo::DEOPT_INLINING_ID);
      int inlining_id = static_cast<int>(it.rinfo()->data());
      last_position = SourcePosition(script_offset, inlining_id);
2306
    } else if (info->rmode() == RelocInfo::DEOPT_ID) {
2307
      last_deopt_id = static_cast<int>(info->data());
2308
    } else if (info->rmode() == RelocInfo::DEOPT_REASON) {
2309
      last_reason = static_cast<DeoptimizeReason>(info->data());
2310 2311
    }
  }
2312
  return DeoptInfo(last_position, last_reason, last_deopt_id);
2313
}
2314 2315


2316 2317 2318 2319 2320
// static
int Deoptimizer::ComputeSourcePositionFromBytecodeArray(
    SharedFunctionInfo* shared, BailoutId node_id) {
  DCHECK(shared->HasBytecodeArray());
  return AbstractCode::cast(shared->bytecode_array())
2321
      ->SourcePosition(node_id.ToInt());
2322 2323
}

2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342
// static
TranslatedValue TranslatedValue::NewDeferredObject(TranslatedState* container,
                                                   int length,
                                                   int object_index) {
  TranslatedValue slot(container, kCapturedObject);
  slot.materialization_info_ = {object_index, length};
  return slot;
}


// static
TranslatedValue TranslatedValue::NewDuplicateObject(TranslatedState* container,
                                                    int id) {
  TranslatedValue slot(container, kDuplicatedObject);
  slot.materialization_info_ = {id, -1};
  return slot;
}


2343 2344
// static
TranslatedValue TranslatedValue::NewFloat(TranslatedState* container,
2345
                                          Float32 value) {
2346 2347 2348 2349 2350
  TranslatedValue slot(container, kFloat);
  slot.float_value_ = value;
  return slot;
}

2351 2352
// static
TranslatedValue TranslatedValue::NewDouble(TranslatedState* container,
2353
                                           Float64 value) {
2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396
  TranslatedValue slot(container, kDouble);
  slot.double_value_ = value;
  return slot;
}


// static
TranslatedValue TranslatedValue::NewInt32(TranslatedState* container,
                                          int32_t value) {
  TranslatedValue slot(container, kInt32);
  slot.int32_value_ = value;
  return slot;
}


// static
TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container,
                                           uint32_t value) {
  TranslatedValue slot(container, kUInt32);
  slot.uint32_value_ = value;
  return slot;
}


// static
TranslatedValue TranslatedValue::NewBool(TranslatedState* container,
                                         uint32_t value) {
  TranslatedValue slot(container, kBoolBit);
  slot.uint32_value_ = value;
  return slot;
}


// static
TranslatedValue TranslatedValue::NewTagged(TranslatedState* container,
                                           Object* literal) {
  TranslatedValue slot(container, kTagged);
  slot.raw_literal_ = literal;
  return slot;
}


// static
2397 2398
TranslatedValue TranslatedValue::NewInvalid(TranslatedState* container) {
  return TranslatedValue(container, kInvalid);
2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421
}


Isolate* TranslatedValue::isolate() const { return container_->isolate(); }


Object* TranslatedValue::raw_literal() const {
  DCHECK_EQ(kTagged, kind());
  return raw_literal_;
}


int32_t TranslatedValue::int32_value() const {
  DCHECK_EQ(kInt32, kind());
  return int32_value_;
}


uint32_t TranslatedValue::uint32_value() const {
  DCHECK(kind() == kUInt32 || kind() == kBoolBit);
  return uint32_value_;
}

2422
Float32 TranslatedValue::float_value() const {
2423 2424 2425
  DCHECK_EQ(kFloat, kind());
  return float_value_;
}
2426

2427
Float64 TranslatedValue::double_value() const {
2428 2429 2430 2431 2432 2433
  DCHECK_EQ(kDouble, kind());
  return double_value_;
}


int TranslatedValue::object_length() const {
2434
  DCHECK_EQ(kind(), kCapturedObject);
2435 2436 2437 2438 2439
  return materialization_info_.length_;
}


int TranslatedValue::object_index() const {
2440
  DCHECK(kind() == kCapturedObject || kind() == kDuplicatedObject);
2441 2442 2443 2444 2445 2446
  return materialization_info_.id_;
}


Object* TranslatedValue::GetRawValue() const {
  // If we have a value, return it.
2447 2448
  if (materialization_state() == kFinished) {
    return *storage_;
2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475
  }

  // Otherwise, do a best effort to get the value without allocation.
  switch (kind()) {
    case kTagged:
      return raw_literal();

    case kInt32: {
      bool is_smi = Smi::IsValid(int32_value());
      if (is_smi) {
        return Smi::FromInt(int32_value());
      }
      break;
    }

    case kUInt32: {
      bool is_smi = (uint32_value() <= static_cast<uintptr_t>(Smi::kMaxValue));
      if (is_smi) {
        return Smi::FromInt(static_cast<int32_t>(uint32_value()));
      }
      break;
    }

    case kBoolBit: {
      if (uint32_value() == 0) {
        return isolate()->heap()->false_value();
      } else {
2476
        CHECK_EQ(1U, uint32_value());
2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489
        return isolate()->heap()->true_value();
      }
    }

    default:
      break;
  }

  // If we could not get the value without allocation, return the arguments
  // marker.
  return isolate()->heap()->arguments_marker();
}

2490 2491 2492 2493 2494
void TranslatedValue::set_initialized_storage(Handle<Object> storage) {
  DCHECK_EQ(kUninitialized, materialization_state());
  storage_ = storage;
  materialization_state_ = kFinished;
}
2495 2496 2497

Handle<Object> TranslatedValue::GetValue() {
  // If we already have a value, then get it.
2498
  if (materialization_state() == kFinished) return storage_;
2499 2500 2501 2502 2503 2504 2505

  // Otherwise we have to materialize.
  switch (kind()) {
    case TranslatedValue::kTagged:
    case TranslatedValue::kInt32:
    case TranslatedValue::kUInt32:
    case TranslatedValue::kBoolBit:
2506
    case TranslatedValue::kFloat:
2507 2508
    case TranslatedValue::kDouble: {
      MaterializeSimple();
2509
      return storage_;
2510 2511 2512
    }

    case TranslatedValue::kCapturedObject:
2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529
    case TranslatedValue::kDuplicatedObject: {
      // We need to materialize the object (or possibly even object graphs).
      // To make the object verifier happy, we materialize in two steps.

      // 1. Allocate storage for reachable objects. This makes sure that for
      //    each object we have allocated space on heap. The space will be
      //    a byte array that will be later initialized, or a fully
      //    initialized object if it is safe to allocate one that will
      //    pass the verifier.
      container_->EnsureObjectAllocatedAt(this);

      // 2. Initialize the objects. If we have allocated only byte arrays
      //    for some objects, we now overwrite the byte arrays with the
      //    correct object fields. Note that this phase does not allocate
      //    any new objects, so it does not trigger the object verifier.
      return container_->InitializeObjectAt(this);
    }
2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541

    case TranslatedValue::kInvalid:
      FATAL("unexpected case");
      return Handle<Object>::null();
  }

  FATAL("internal error: value missing");
  return Handle<Object>::null();
}

void TranslatedValue::MaterializeSimple() {
  // If we already have materialized, return.
2542
  if (materialization_state() == kFinished) return;
2543 2544 2545 2546

  Object* raw_value = GetRawValue();
  if (raw_value != isolate()->heap()->arguments_marker()) {
    // We can get the value without allocation, just return it here.
2547
    set_initialized_storage(Handle<Object>(raw_value, isolate()));
2548 2549 2550 2551
    return;
  }

  switch (kind()) {
2552
    case kInt32:
2553 2554
      set_initialized_storage(
          Handle<Object>(isolate()->factory()->NewNumber(int32_value())));
2555 2556 2557
      return;

    case kUInt32:
2558 2559
      set_initialized_storage(
          Handle<Object>(isolate()->factory()->NewNumber(uint32_value())));
2560 2561
      return;

2562 2563
    case kFloat: {
      double scalar_value = float_value().get_scalar();
2564 2565
      set_initialized_storage(
          Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
2566
      return;
2567
    }
2568

2569 2570
    case kDouble: {
      double scalar_value = double_value().get_scalar();
2571 2572
      set_initialized_storage(
          Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
2573
      return;
2574
    }
2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596

    case kCapturedObject:
    case kDuplicatedObject:
    case kInvalid:
    case kTagged:
    case kBoolBit:
      FATAL("internal error: unexpected materialization.");
      break;
  }
}


bool TranslatedValue::IsMaterializedObject() const {
  switch (kind()) {
    case kCapturedObject:
    case kDuplicatedObject:
      return true;
    default:
      return false;
  }
}

2597 2598 2599 2600
bool TranslatedValue::IsMaterializableByDebugger() const {
  // At the moment, we only allow materialization of doubles.
  return (kind() == kDouble);
}
2601 2602

int TranslatedValue::GetChildrenCount() const {
2603
  if (kind() == kCapturedObject) {
2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619
    return object_length();
  } else {
    return 0;
  }
}


uint32_t TranslatedState::GetUInt32Slot(Address fp, int slot_offset) {
  Address address = fp + slot_offset;
#if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
  return Memory::uint32_at(address + kIntSize);
#else
  return Memory::uint32_at(address);
#endif
}

2620
Float32 TranslatedState::GetFloatSlot(Address fp, int slot_offset) {
2621
#if !V8_TARGET_ARCH_S390X && !V8_TARGET_ARCH_PPC64
2622
  return Float32::FromBits(GetUInt32Slot(fp, slot_offset));
2623 2624 2625
#else
  return Float32::FromBits(Memory::uint32_at(fp + slot_offset));
#endif
2626 2627 2628 2629 2630
}

Float64 TranslatedState::GetDoubleSlot(Address fp, int slot_offset) {
  return Float64::FromBits(Memory::uint64_at(fp + slot_offset));
}
2631 2632 2633

void TranslatedValue::Handlify() {
  if (kind() == kTagged) {
2634
    set_initialized_storage(Handle<Object>(raw_literal(), isolate()));
2635 2636 2637 2638 2639
    raw_literal_ = nullptr;
  }
}


2640 2641
TranslatedFrame TranslatedFrame::InterpretedFrame(
    BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) {
2642
  TranslatedFrame frame(kInterpretedFunction, shared_info, height);
2643 2644 2645 2646 2647
  frame.node_id_ = bytecode_offset;
  return frame;
}


2648 2649
TranslatedFrame TranslatedFrame::ArgumentsAdaptorFrame(
    SharedFunctionInfo* shared_info, int height) {
2650
  return TranslatedFrame(kArgumentsAdaptor, shared_info, height);
2651 2652
}

2653
TranslatedFrame TranslatedFrame::ConstructStubFrame(
2654
    BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2655
  TranslatedFrame frame(kConstructStub, shared_info, height);
2656 2657
  frame.node_id_ = bailout_id;
  return frame;
2658 2659
}

2660 2661
TranslatedFrame TranslatedFrame::BuiltinContinuationFrame(
    BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2662
  TranslatedFrame frame(kBuiltinContinuation, shared_info, height);
2663 2664 2665 2666 2667 2668
  frame.node_id_ = bailout_id;
  return frame;
}

TranslatedFrame TranslatedFrame::JavaScriptBuiltinContinuationFrame(
    BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2669
  TranslatedFrame frame(kJavaScriptBuiltinContinuation, shared_info, height);
2670 2671 2672
  frame.node_id_ = bailout_id;
  return frame;
}
2673 2674 2675

int TranslatedFrame::GetValueCount() {
  switch (kind()) {
2676 2677 2678
    case kInterpretedFunction: {
      int parameter_count =
          raw_shared_info_->internal_formal_parameter_count() + 1;
2679 2680
      // + 2 for function and context.
      return height_ + parameter_count + 2;
2681 2682
    }

2683 2684
    case kArgumentsAdaptor:
    case kConstructStub:
2685 2686
    case kBuiltinContinuation:
    case kJavaScriptBuiltinContinuation:
2687 2688
      return 1 + height_;

2689 2690 2691 2692 2693 2694 2695 2696
    case kInvalid:
      UNREACHABLE();
      break;
  }
  UNREACHABLE();
}


2697 2698 2699 2700
void TranslatedFrame::Handlify() {
  if (raw_shared_info_ != nullptr) {
    shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_);
    raw_shared_info_ = nullptr;
2701 2702 2703 2704 2705 2706 2707 2708 2709
  }
  for (auto& value : values_) {
    value.Handlify();
  }
}


TranslatedFrame TranslatedState::CreateNextTranslatedFrame(
    TranslationIterator* iterator, FixedArray* literal_array, Address fp,
2710
    FILE* trace_file) {
2711 2712 2713
  Translation::Opcode opcode =
      static_cast<Translation::Opcode>(iterator->Next());
  switch (opcode) {
2714 2715 2716 2717 2718 2719
    case Translation::INTERPRETED_FRAME: {
      BailoutId bytecode_offset = BailoutId(iterator->Next());
      SharedFunctionInfo* shared_info =
          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
      int height = iterator->Next();
      if (trace_file != nullptr) {
2720
        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2721 2722 2723 2724 2725 2726 2727 2728 2729 2730
        PrintF(trace_file, "  reading input frame %s", name.get());
        int arg_count = shared_info->internal_formal_parameter_count() + 1;
        PrintF(trace_file,
               " => bytecode_offset=%d, args=%d, height=%d; inputs:\n",
               bytecode_offset.ToInt(), arg_count, height);
      }
      return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info,
                                               height);
    }

2731
    case Translation::ARGUMENTS_ADAPTOR_FRAME: {
2732 2733
      SharedFunctionInfo* shared_info =
          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2734 2735
      int height = iterator->Next();
      if (trace_file != nullptr) {
2736
        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2737
        PrintF(trace_file, "  reading arguments adaptor frame %s", name.get());
2738 2739
        PrintF(trace_file, " => height=%d; inputs:\n", height);
      }
2740
      return TranslatedFrame::ArgumentsAdaptorFrame(shared_info, height);
2741 2742 2743
    }

    case Translation::CONSTRUCT_STUB_FRAME: {
2744
      BailoutId bailout_id = BailoutId(iterator->Next());
2745 2746
      SharedFunctionInfo* shared_info =
          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2747 2748
      int height = iterator->Next();
      if (trace_file != nullptr) {
2749
        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2750
        PrintF(trace_file, "  reading construct stub frame %s", name.get());
2751 2752
        PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
               bailout_id.ToInt(), height);
2753
      }
2754 2755
      return TranslatedFrame::ConstructStubFrame(bailout_id, shared_info,
                                                 height);
2756 2757
    }

2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769
    case Translation::BUILTIN_CONTINUATION_FRAME: {
      BailoutId bailout_id = BailoutId(iterator->Next());
      SharedFunctionInfo* shared_info =
          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
      int height = iterator->Next();
      if (trace_file != nullptr) {
        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
        PrintF(trace_file, "  reading builtin continuation frame %s",
               name.get());
        PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
               bailout_id.ToInt(), height);
      }
2770 2771 2772
      // Add one to the height to account for the context which was implicitly
      // added to the translation during code generation.
      int height_with_context = height + 1;
2773
      return TranslatedFrame::BuiltinContinuationFrame(bailout_id, shared_info,
2774
                                                       height_with_context);
2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788
    }

    case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME: {
      BailoutId bailout_id = BailoutId(iterator->Next());
      SharedFunctionInfo* shared_info =
          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
      int height = iterator->Next();
      if (trace_file != nullptr) {
        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
        PrintF(trace_file, "  reading JavaScript builtin continuation frame %s",
               name.get());
        PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
               bailout_id.ToInt(), height);
      }
2789 2790 2791
      // Add one to the height to account for the context which was implicitly
      // added to the translation during code generation.
      int height_with_context = height + 1;
2792
      return TranslatedFrame::JavaScriptBuiltinContinuationFrame(
2793
          bailout_id, shared_info, height_with_context);
2794
    }
2795
    case Translation::UPDATE_FEEDBACK:
2796 2797
    case Translation::BEGIN:
    case Translation::DUPLICATED_OBJECT:
2798
    case Translation::ARGUMENTS_ELEMENTS:
2799
    case Translation::ARGUMENTS_LENGTH:
2800 2801 2802 2803 2804
    case Translation::CAPTURED_OBJECT:
    case Translation::REGISTER:
    case Translation::INT32_REGISTER:
    case Translation::UINT32_REGISTER:
    case Translation::BOOL_REGISTER:
2805
    case Translation::FLOAT_REGISTER:
2806 2807 2808 2809 2810
    case Translation::DOUBLE_REGISTER:
    case Translation::STACK_SLOT:
    case Translation::INT32_STACK_SLOT:
    case Translation::UINT32_STACK_SLOT:
    case Translation::BOOL_STACK_SLOT:
2811
    case Translation::FLOAT_STACK_SLOT:
2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834
    case Translation::DOUBLE_STACK_SLOT:
    case Translation::LITERAL:
      break;
  }
  FATAL("We should never get here - unexpected deopt info.");
  return TranslatedFrame::InvalidFrame();
}


// static
void TranslatedFrame::AdvanceIterator(
    std::deque<TranslatedValue>::iterator* iter) {
  int values_to_skip = 1;
  while (values_to_skip > 0) {
    // Consume the current element.
    values_to_skip--;
    // Add all the children.
    values_to_skip += (*iter)->GetChildrenCount();

    (*iter)++;
  }
}

2835
Address TranslatedState::ComputeArgumentsPosition(Address input_frame_pointer,
2836 2837
                                                  CreateArgumentsType type,
                                                  int* length) {
2838 2839 2840 2841
  Address parent_frame_pointer = *reinterpret_cast<Address*>(
      input_frame_pointer + StandardFrameConstants::kCallerFPOffset);
  intptr_t parent_frame_type = Memory::intptr_at(
      parent_frame_pointer + CommonFrameConstants::kContextOrFrameTypeOffset);
2842

2843 2844 2845
  Address arguments_frame;
  if (parent_frame_type ==
      StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)) {
2846 2847 2848 2849 2850
    if (length)
      *length = Smi::cast(*reinterpret_cast<Object**>(
                              parent_frame_pointer +
                              ArgumentsAdaptorFrameConstants::kLengthOffset))
                    ->value();
2851 2852
    arguments_frame = parent_frame_pointer;
  } else {
2853
    if (length) *length = formal_parameter_count_;
2854 2855 2856
    arguments_frame = input_frame_pointer;
  }

2857
  if (type == CreateArgumentsType::kRestParameter) {
2858 2859
    // If the actual number of arguments is less than the number of formal
    // parameters, we have zero rest parameters.
2860
    if (length) *length = std::max(0, *length - formal_parameter_count_);
2861 2862
  }

2863 2864 2865 2866
  return arguments_frame;
}

// Creates translated values for an arguments backing store, or the backing
2867
// store for rest parameters depending on the given {type}. The TranslatedValue
2868 2869 2870
// objects for the fields are not read from the TranslationIterator, but instead
// created on-the-fly based on dynamic information in the optimized frame.
void TranslatedState::CreateArgumentsElementsTranslatedValues(
2871
    int frame_index, Address input_frame_pointer, CreateArgumentsType type,
2872
    FILE* trace_file) {
2873 2874 2875 2876
  TranslatedFrame& frame = frames_[frame_index];

  int length;
  Address arguments_frame =
2877
      ComputeArgumentsPosition(input_frame_pointer, type, &length);
2878

2879 2880
  int object_index = static_cast<int>(object_positions_.size());
  int value_index = static_cast<int>(frame.values_.size());
2881
  if (trace_file != nullptr) {
2882 2883
    PrintF(trace_file, "arguments elements object #%d (type = %d, length = %d)",
           object_index, static_cast<uint8_t>(type), length);
2884
  }
2885

2886 2887 2888 2889 2890 2891 2892 2893
  object_positions_.push_back({frame_index, value_index});
  frame.Add(TranslatedValue::NewDeferredObject(
      this, length + FixedArray::kHeaderSize / kPointerSize, object_index));

  frame.Add(
      TranslatedValue::NewTagged(this, isolate_->heap()->fixed_array_map()));
  frame.Add(TranslatedValue::NewInt32(this, length));

2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904
  int number_of_holes = 0;
  if (type == CreateArgumentsType::kMappedArguments) {
    // If the actual number of arguments is less than the number of formal
    // parameters, we have fewer holes to fill to not overshoot the length.
    number_of_holes = Min(formal_parameter_count_, length);
  }
  for (int i = 0; i < number_of_holes; ++i) {
    frame.Add(
        TranslatedValue::NewTagged(this, isolate_->heap()->the_hole_value()));
  }
  for (int i = length - number_of_holes - 1; i >= 0; --i) {
2905 2906 2907 2908 2909 2910 2911
    Address argument_slot = arguments_frame +
                            CommonFrameConstants::kFixedFrameSizeAboveFp +
                            i * kPointerSize;
    frame.Add(TranslatedValue::NewTagged(
        this, *reinterpret_cast<Object**>(argument_slot)));
  }
}
2912

2913 2914
// We can't intermix stack decoding and allocations because the deoptimization
// infrastracture is not GC safe.
2915
// Thus we build a temporary structure in malloced space.
2916 2917 2918 2919 2920 2921 2922 2923 2924
// The TranslatedValue objects created correspond to the static translation
// instructions from the TranslationIterator, except for
// Translation::ARGUMENTS_ELEMENTS, where the number and values of the
// FixedArray elements depend on dynamic information from the optimized frame.
// Returns the number of expected nested translations from the
// TranslationIterator.
int TranslatedState::CreateNextTranslatedValue(
    int frame_index, TranslationIterator* iterator, FixedArray* literal_array,
    Address fp, RegisterValues* registers, FILE* trace_file) {
2925 2926
  disasm::NameConverter converter;

2927 2928 2929
  TranslatedFrame& frame = frames_[frame_index];
  int value_index = static_cast<int>(frame.values_.size());

2930 2931 2932 2933
  Translation::Opcode opcode =
      static_cast<Translation::Opcode>(iterator->Next());
  switch (opcode) {
    case Translation::BEGIN:
2934
    case Translation::INTERPRETED_FRAME:
2935 2936
    case Translation::ARGUMENTS_ADAPTOR_FRAME:
    case Translation::CONSTRUCT_STUB_FRAME:
2937 2938
    case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME:
    case Translation::BUILTIN_CONTINUATION_FRAME:
2939
    case Translation::UPDATE_FEEDBACK:
2940 2941 2942 2943 2944 2945 2946 2947 2948
      // Peeled off before getting here.
      break;

    case Translation::DUPLICATED_OBJECT: {
      int object_id = iterator->Next();
      if (trace_file != nullptr) {
        PrintF(trace_file, "duplicated object #%d", object_id);
      }
      object_positions_.push_back(object_positions_[object_id]);
2949 2950 2951 2952
      TranslatedValue translated_value =
          TranslatedValue::NewDuplicateObject(this, object_id);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
2953 2954
    }

2955
    case Translation::ARGUMENTS_ELEMENTS: {
2956 2957 2958
      CreateArgumentsType arguments_type =
          static_cast<CreateArgumentsType>(iterator->Next());
      CreateArgumentsElementsTranslatedValues(frame_index, fp, arguments_type,
2959
                                              trace_file);
2960
      return 0;
2961 2962
    }

2963
    case Translation::ARGUMENTS_LENGTH: {
2964 2965
      CreateArgumentsType arguments_type =
          static_cast<CreateArgumentsType>(iterator->Next());
2966
      int length;
2967
      ComputeArgumentsPosition(fp, arguments_type, &length);
2968
      if (trace_file != nullptr) {
2969 2970
        PrintF(trace_file, "arguments length field (type = %d, length = %d)",
               static_cast<uint8_t>(arguments_type), length);
2971
      }
2972 2973 2974 2975
      frame.Add(TranslatedValue::NewInt32(this, length));
      return 0;
    }

2976 2977 2978 2979 2980 2981 2982 2983
    case Translation::CAPTURED_OBJECT: {
      int field_count = iterator->Next();
      int object_index = static_cast<int>(object_positions_.size());
      if (trace_file != nullptr) {
        PrintF(trace_file, "captured object #%d (length = %d)", object_index,
               field_count);
      }
      object_positions_.push_back({frame_index, value_index});
2984 2985 2986 2987
      TranslatedValue translated_value =
          TranslatedValue::NewDeferredObject(this, field_count, object_index);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
2988 2989 2990 2991
    }

    case Translation::REGISTER: {
      int input_reg = iterator->Next();
2992 2993 2994 2995 2996
      if (registers == nullptr) {
        TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
        frame.Add(translated_value);
        return translated_value.GetChildrenCount();
      }
2997 2998 2999 3000 3001 3002
      intptr_t value = registers->GetRegister(input_reg);
      if (trace_file != nullptr) {
        PrintF(trace_file, "0x%08" V8PRIxPTR " ; %s ", value,
               converter.NameOfCPURegister(input_reg));
        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
      }
3003 3004 3005 3006
      TranslatedValue translated_value =
          TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3007 3008 3009 3010
    }

    case Translation::INT32_REGISTER: {
      int input_reg = iterator->Next();
3011 3012 3013 3014 3015
      if (registers == nullptr) {
        TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
        frame.Add(translated_value);
        return translated_value.GetChildrenCount();
      }
3016 3017 3018 3019 3020
      intptr_t value = registers->GetRegister(input_reg);
      if (trace_file != nullptr) {
        PrintF(trace_file, "%" V8PRIdPTR " ; %s ", value,
               converter.NameOfCPURegister(input_reg));
      }
3021 3022 3023 3024
      TranslatedValue translated_value =
          TranslatedValue::NewInt32(this, static_cast<int32_t>(value));
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3025 3026 3027 3028
    }

    case Translation::UINT32_REGISTER: {
      int input_reg = iterator->Next();
3029 3030 3031 3032 3033
      if (registers == nullptr) {
        TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
        frame.Add(translated_value);
        return translated_value.GetChildrenCount();
      }
3034 3035 3036 3037 3038
      intptr_t value = registers->GetRegister(input_reg);
      if (trace_file != nullptr) {
        PrintF(trace_file, "%" V8PRIuPTR " ; %s (uint)", value,
               converter.NameOfCPURegister(input_reg));
      }
3039 3040 3041 3042
      TranslatedValue translated_value =
          TranslatedValue::NewUInt32(this, static_cast<uint32_t>(value));
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3043 3044 3045 3046
    }

    case Translation::BOOL_REGISTER: {
      int input_reg = iterator->Next();
3047 3048 3049 3050 3051
      if (registers == nullptr) {
        TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
        frame.Add(translated_value);
        return translated_value.GetChildrenCount();
      }
3052 3053 3054 3055 3056
      intptr_t value = registers->GetRegister(input_reg);
      if (trace_file != nullptr) {
        PrintF(trace_file, "%" V8PRIdPTR " ; %s (bool)", value,
               converter.NameOfCPURegister(input_reg));
      }
3057 3058 3059 3060
      TranslatedValue translated_value =
          TranslatedValue::NewBool(this, static_cast<uint32_t>(value));
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3061 3062
    }

3063 3064
    case Translation::FLOAT_REGISTER: {
      int input_reg = iterator->Next();
3065 3066 3067 3068 3069
      if (registers == nullptr) {
        TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
        frame.Add(translated_value);
        return translated_value.GetChildrenCount();
      }
3070
      Float32 value = registers->GetFloatRegister(input_reg);
3071
      if (trace_file != nullptr) {
3072 3073 3074
        PrintF(
            trace_file, "%e ; %s (float)", value.get_scalar(),
            RegisterConfiguration::Default()->GetFloatRegisterName(input_reg));
3075
      }
3076 3077 3078
      TranslatedValue translated_value = TranslatedValue::NewFloat(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3079 3080
    }

3081 3082
    case Translation::DOUBLE_REGISTER: {
      int input_reg = iterator->Next();
3083 3084 3085 3086 3087
      if (registers == nullptr) {
        TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
        frame.Add(translated_value);
        return translated_value.GetChildrenCount();
      }
3088
      Float64 value = registers->GetDoubleRegister(input_reg);
3089
      if (trace_file != nullptr) {
3090 3091 3092
        PrintF(
            trace_file, "%e ; %s (double)", value.get_scalar(),
            RegisterConfiguration::Default()->GetDoubleRegisterName(input_reg));
3093
      }
3094 3095 3096 3097
      TranslatedValue translated_value =
          TranslatedValue::NewDouble(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3098 3099 3100
    }

    case Translation::STACK_SLOT: {
3101 3102
      int slot_offset =
          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3103 3104 3105 3106 3107 3108
      intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset));
      if (trace_file != nullptr) {
        PrintF(trace_file, "0x%08" V8PRIxPTR " ; [fp %c %d] ", value,
               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
      }
3109 3110 3111 3112
      TranslatedValue translated_value =
          TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3113 3114 3115
    }

    case Translation::INT32_STACK_SLOT: {
3116 3117
      int slot_offset =
          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3118 3119 3120 3121 3122 3123
      uint32_t value = GetUInt32Slot(fp, slot_offset);
      if (trace_file != nullptr) {
        PrintF(trace_file, "%d ; (int) [fp %c %d] ",
               static_cast<int32_t>(value), slot_offset < 0 ? '-' : '+',
               std::abs(slot_offset));
      }
3124 3125 3126
      TranslatedValue translated_value = TranslatedValue::NewInt32(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3127 3128 3129
    }

    case Translation::UINT32_STACK_SLOT: {
3130 3131
      int slot_offset =
          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3132 3133 3134 3135 3136
      uint32_t value = GetUInt32Slot(fp, slot_offset);
      if (trace_file != nullptr) {
        PrintF(trace_file, "%u ; (uint) [fp %c %d] ", value,
               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
      }
3137 3138 3139 3140
      TranslatedValue translated_value =
          TranslatedValue::NewUInt32(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3141 3142 3143
    }

    case Translation::BOOL_STACK_SLOT: {
3144 3145
      int slot_offset =
          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3146 3147 3148 3149 3150
      uint32_t value = GetUInt32Slot(fp, slot_offset);
      if (trace_file != nullptr) {
        PrintF(trace_file, "%u ; (bool) [fp %c %d] ", value,
               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
      }
3151 3152 3153
      TranslatedValue translated_value = TranslatedValue::NewBool(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3154 3155
    }

3156 3157 3158
    case Translation::FLOAT_STACK_SLOT: {
      int slot_offset =
          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3159
      Float32 value = GetFloatSlot(fp, slot_offset);
3160
      if (trace_file != nullptr) {
3161
        PrintF(trace_file, "%e ; (float) [fp %c %d] ", value.get_scalar(),
3162 3163
               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
      }
3164 3165 3166
      TranslatedValue translated_value = TranslatedValue::NewFloat(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3167 3168
    }

3169
    case Translation::DOUBLE_STACK_SLOT: {
3170 3171
      int slot_offset =
          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3172
      Float64 value = GetDoubleSlot(fp, slot_offset);
3173
      if (trace_file != nullptr) {
3174
        PrintF(trace_file, "%e ; (double) [fp %c %d] ", value.get_scalar(),
3175 3176
               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
      }
3177 3178 3179 3180
      TranslatedValue translated_value =
          TranslatedValue::NewDouble(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191
    }

    case Translation::LITERAL: {
      int literal_index = iterator->Next();
      Object* value = literal_array->get(literal_index);
      if (trace_file != nullptr) {
        PrintF(trace_file, "0x%08" V8PRIxPTR " ; (literal %d) ",
               reinterpret_cast<intptr_t>(value), literal_index);
        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
      }

3192 3193 3194 3195
      TranslatedValue translated_value =
          TranslatedValue::NewTagged(this, value);
      frame.Add(translated_value);
      return translated_value.GetChildrenCount();
3196 3197 3198 3199 3200 3201
    }
  }

  FATAL("We should never get here - unexpected deopt info.");
}

3202
TranslatedState::TranslatedState(const JavaScriptFrame* frame) {
3203
  int deopt_index = Safepoint::kNoDeoptimizationIndex;
3204
  DeoptimizationData* data =
3205 3206
      static_cast<const OptimizedFrame*>(frame)->GetDeoptimizationData(
          &deopt_index);
3207
  DCHECK(data != nullptr && deopt_index != Safepoint::kNoDeoptimizationIndex);
3208 3209
  TranslationIterator it(data->TranslationByteArray(),
                         data->TranslationIndex(deopt_index)->value());
3210
  Init(frame->fp(), &it, data->LiteralArray(), nullptr /* registers */,
3211 3212
       nullptr /* trace file */,
       frame->function()->shared()->internal_formal_parameter_count());
3213 3214 3215 3216 3217
}

void TranslatedState::Init(Address input_frame_pointer,
                           TranslationIterator* iterator,
                           FixedArray* literal_array, RegisterValues* registers,
3218
                           FILE* trace_file, int formal_parameter_count) {
3219 3220
  DCHECK(frames_.empty());

3221 3222
  formal_parameter_count_ = formal_parameter_count;

3223 3224 3225 3226 3227 3228 3229
  isolate_ = literal_array->GetIsolate();
  // Read out the 'header' translation.
  Translation::Opcode opcode =
      static_cast<Translation::Opcode>(iterator->Next());
  CHECK(opcode == Translation::BEGIN);

  int count = iterator->Next();
3230
  frames_.reserve(count);
3231
  iterator->Next();  // Drop JS frames count.
3232 3233 3234
  int update_feedback_count = iterator->Next();
  CHECK_GE(update_feedback_count, 0);
  CHECK_LE(update_feedback_count, 1);
3235

3236
  if (update_feedback_count == 1) {
3237
    ReadUpdateFeedback(iterator, literal_array, trace_file);
3238
  }
3239 3240 3241 3242

  std::stack<int> nested_counts;

  // Read the frames
3243
  for (int frame_index = 0; frame_index < count; frame_index++) {
3244
    // Read the frame descriptor.
3245 3246
    frames_.push_back(CreateNextTranslatedFrame(
        iterator, literal_array, input_frame_pointer, trace_file));
3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265
    TranslatedFrame& frame = frames_.back();

    // Read the values.
    int values_to_process = frame.GetValueCount();
    while (values_to_process > 0 || !nested_counts.empty()) {
      if (trace_file != nullptr) {
        if (nested_counts.empty()) {
          // For top level values, print the value number.
          PrintF(trace_file, "    %3i: ",
                 frame.GetValueCount() - values_to_process);
        } else {
          // Take care of indenting for nested values.
          PrintF(trace_file, "         ");
          for (size_t j = 0; j < nested_counts.size(); j++) {
            PrintF(trace_file, "  ");
          }
        }
      }

3266 3267 3268
      int nested_count =
          CreateNextTranslatedValue(frame_index, iterator, literal_array,
                                    input_frame_pointer, registers, trace_file);
3269 3270 3271 3272 3273 3274 3275

      if (trace_file != nullptr) {
        PrintF(trace_file, "\n");
      }

      // Update the value count and resolve the nesting.
      values_to_process--;
3276
      if (nested_count > 0) {
3277
        nested_counts.push(values_to_process);
3278
        values_to_process = nested_count;
3279 3280 3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292
      } else {
        while (values_to_process == 0 && !nested_counts.empty()) {
          values_to_process = nested_counts.top();
          nested_counts.pop();
        }
      }
    }
  }

  CHECK(!iterator->HasNext() ||
        static_cast<Translation::Opcode>(iterator->Next()) ==
            Translation::BEGIN);
}

3293
void TranslatedState::Prepare(Address stack_frame_pointer) {
3294
  for (auto& frame : frames_) frame.Handlify();
3295

3296 3297 3298 3299 3300
  if (feedback_vector_ != nullptr) {
    feedback_vector_handle_ =
        Handle<FeedbackVector>(feedback_vector_, isolate());
    feedback_vector_ = nullptr;
  }
3301 3302 3303 3304 3305
  stack_frame_pointer_ = stack_frame_pointer;

  UpdateFromPreviouslyMaterializedObjects();
}

3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357
TranslatedValue* TranslatedState::GetValueByObjectIndex(int object_index) {
  CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
  TranslatedState::ObjectPosition pos = object_positions_[object_index];
  return &(frames_[pos.frame_index_].values_[pos.value_index_]);
}

Handle<Object> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  slot = ResolveCapturedObject(slot);

  DisallowHeapAllocation no_allocation;
  if (slot->materialization_state() != TranslatedValue::kFinished) {
    std::stack<int> worklist;
    worklist.push(slot->object_index());
    slot->mark_finished();

    while (!worklist.empty()) {
      int index = worklist.top();
      worklist.pop();
      InitializeCapturedObjectAt(index, &worklist, no_allocation);
    }
  }
  return slot->GetStorage();
}

void TranslatedState::InitializeCapturedObjectAt(
    int object_index, std::stack<int>* worklist,
    const DisallowHeapAllocation& no_allocation) {
  CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
  TranslatedState::ObjectPosition pos = object_positions_[object_index];
  int value_index = pos.value_index_;

  TranslatedFrame* frame = &(frames_[pos.frame_index_]);
  TranslatedValue* slot = &(frame->values_[value_index]);
  value_index++;

  CHECK_EQ(TranslatedValue::kFinished, slot->materialization_state());
  CHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());

  // Ensure all fields are initialized.
  int children_init_index = value_index;
  for (int i = 0; i < slot->GetChildrenCount(); i++) {
    // If the field is an object that has not been initialized yet, queue it
    // for initialization (and mark it as such).
    TranslatedValue* child_slot = frame->ValueAt(children_init_index);
    if (child_slot->kind() == TranslatedValue::kCapturedObject ||
        child_slot->kind() == TranslatedValue::kDuplicatedObject) {
      child_slot = ResolveCapturedObject(child_slot);
      if (child_slot->materialization_state() != TranslatedValue::kFinished) {
        DCHECK_EQ(TranslatedValue::kAllocated,
                  child_slot->materialization_state());
        worklist->push(child_slot->object_index());
        child_slot->mark_finished();
3358 3359
      }
    }
3360
    SkipSlots(1, frame, &children_init_index);
3361 3362
  }

3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377
  // Read the map.
  // The map should never be materialized, so let us check we already have
  // an existing object here.
  CHECK_EQ(frame->values_[value_index].kind(), TranslatedValue::kTagged);
  Handle<Map> map = Handle<Map>::cast(frame->values_[value_index].GetValue());
  CHECK(map->IsMap());
  value_index++;

  // Handle the special cases.
  switch (map->instance_type()) {
    case MUTABLE_HEAP_NUMBER_TYPE:
    case FIXED_DOUBLE_ARRAY_TYPE:
      return;

    case FIXED_ARRAY_TYPE:
3378
    case BOILERPLATE_DESCRIPTION_TYPE:
3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389
    case HASH_TABLE_TYPE:
    case PROPERTY_ARRAY_TYPE:
    case CONTEXT_EXTENSION_TYPE:
      InitializeObjectWithTaggedFieldsAt(frame, &value_index, slot, map,
                                         no_allocation);
      break;

    default:
      CHECK(map->IsJSObjectMap());
      InitializeJSObjectAt(frame, &value_index, slot, map, no_allocation);
      break;
3390
  }
3391 3392
  CHECK_EQ(value_index, children_init_index);
}
3393

3394 3395
void TranslatedState::EnsureObjectAllocatedAt(TranslatedValue* slot) {
  slot = ResolveCapturedObject(slot);
3396

3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431 3432 3433 3434 3435 3436 3437 3438 3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451 3452
  if (slot->materialization_state() == TranslatedValue::kUninitialized) {
    std::stack<int> worklist;
    worklist.push(slot->object_index());
    slot->mark_allocated();

    while (!worklist.empty()) {
      int index = worklist.top();
      worklist.pop();
      EnsureCapturedObjectAllocatedAt(index, &worklist);
    }
  }
}

void TranslatedState::MaterializeFixedDoubleArray(TranslatedFrame* frame,
                                                  int* value_index,
                                                  TranslatedValue* slot,
                                                  Handle<Map> map) {
  int length = Smi::cast(frame->values_[*value_index].GetRawValue())->value();
  (*value_index)++;
  Handle<FixedDoubleArray> array = Handle<FixedDoubleArray>::cast(
      isolate()->factory()->NewFixedDoubleArray(length));
  CHECK_GT(length, 0);
  for (int i = 0; i < length; i++) {
    CHECK_NE(TranslatedValue::kCapturedObject,
             frame->values_[*value_index].kind());
    Handle<Object> value = frame->values_[*value_index].GetValue();
    if (value->IsNumber()) {
      array->set(i, value->Number());
    } else {
      CHECK(value.is_identical_to(isolate()->factory()->the_hole_value()));
      array->set_the_hole(isolate(), i);
    }
    (*value_index)++;
  }
  slot->set_storage(array);
}

void TranslatedState::MaterializeMutableHeapNumber(TranslatedFrame* frame,
                                                   int* value_index,
                                                   TranslatedValue* slot) {
  CHECK_NE(TranslatedValue::kCapturedObject,
           frame->values_[*value_index].kind());
  Handle<Object> value = frame->values_[*value_index].GetValue();
  Handle<HeapNumber> box;
  CHECK(value->IsNumber());
  box = isolate()->factory()->NewHeapNumber(value->Number(), MUTABLE);
  (*value_index)++;
  slot->set_storage(box);
}

namespace {

enum DoubleStorageKind : uint8_t {
  kStoreTagged,
  kStoreUnboxedDouble,
  kStoreMutableHeapNumber,
3453 3454
};

3455
}  // namespace
3456

3457 3458 3459 3460 3461 3462
void TranslatedState::SkipSlots(int slots_to_skip, TranslatedFrame* frame,
                                int* value_index) {
  while (slots_to_skip > 0) {
    TranslatedValue* slot = &(frame->values_[*value_index]);
    (*value_index)++;
    slots_to_skip--;
3463

3464 3465
    if (slot->kind() == TranslatedValue::kCapturedObject) {
      slots_to_skip += slot->GetChildrenCount();
3466
    }
3467
  }
3468
}
3469

3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491
void TranslatedState::EnsureCapturedObjectAllocatedAt(
    int object_index, std::stack<int>* worklist) {
  CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
  TranslatedState::ObjectPosition pos = object_positions_[object_index];
  int value_index = pos.value_index_;

  TranslatedFrame* frame = &(frames_[pos.frame_index_]);
  TranslatedValue* slot = &(frame->values_[value_index]);
  value_index++;

  CHECK_EQ(TranslatedValue::kAllocated, slot->materialization_state());
  CHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());

  // Read the map.
  // The map should never be materialized, so let us check we already have
  // an existing object here.
  CHECK_EQ(frame->values_[value_index].kind(), TranslatedValue::kTagged);
  Handle<Map> map = Handle<Map>::cast(frame->values_[value_index].GetValue());
  CHECK(map->IsMap());
  value_index++;

  // Handle the special cases.
3492
  switch (map->instance_type()) {
3493 3494 3495 3496 3497
    case FIXED_DOUBLE_ARRAY_TYPE:
      // Materialize (i.e. allocate&initialize) the array and return since
      // there is no need to process the children.
      return MaterializeFixedDoubleArray(frame, &value_index, slot, map);

3498
    case MUTABLE_HEAP_NUMBER_TYPE:
3499 3500 3501 3502 3503 3504 3505 3506 3507
      // Materialize (i.e. allocate&initialize) the heap number and return.
      // There is no need to process the children.
      return MaterializeMutableHeapNumber(frame, &value_index, slot);

    case FIXED_ARRAY_TYPE:
    case HASH_TABLE_TYPE: {
      // Check we have the right size.
      int array_length =
          Smi::cast(frame->values_[value_index].GetRawValue())->value();
3508

3509 3510 3511
      int instance_size = FixedArray::SizeFor(array_length);
      CHECK_EQ(instance_size, slot->GetChildrenCount() * kPointerSize);

3512 3513 3514 3515 3516 3517 3518 3519
      // Canonicalize empty fixed array.
      if (*map == isolate()->heap()->empty_fixed_array()->map() &&
          array_length == 0) {
        slot->set_storage(isolate()->factory()->empty_fixed_array());
      } else {
        slot->set_storage(AllocateStorageFor(slot));
      }

3520 3521 3522
      // Make sure all the remaining children (after the map) are allocated.
      return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
                                     &value_index, worklist);
3523
    }
3524

3525 3526 3527 3528 3529 3530 3531 3532 3533
    case PROPERTY_ARRAY_TYPE: {
      // Check we have the right size.
      int length_or_hash =
          Smi::cast(frame->values_[value_index].GetRawValue())->value();
      int array_length = PropertyArray::LengthField::decode(length_or_hash);
      int instance_size = PropertyArray::SizeFor(array_length);
      CHECK_EQ(instance_size, slot->GetChildrenCount() * kPointerSize);

      slot->set_storage(AllocateStorageFor(slot));
3534 3535 3536
      // Make sure all the remaining children (after the map) are allocated.
      return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
                                     &value_index, worklist);
3537
    }
3538

3539
    case CONTEXT_EXTENSION_TYPE: {
3540 3541
      CHECK_EQ(map->instance_size(), slot->GetChildrenCount() * kPointerSize);
      slot->set_storage(AllocateStorageFor(slot));
3542 3543 3544
      // Make sure all the remaining children (after the map) are allocated.
      return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
                                     &value_index, worklist);
3545
    }
3546 3547 3548 3549 3550

    default:
      CHECK(map->IsJSObjectMap());
      EnsureJSObjectAllocated(slot, map);
      TranslatedValue* properties_slot = &(frame->values_[value_index]);
3551
      value_index++;
3552 3553 3554 3555 3556 3557
      if (properties_slot->kind() == TranslatedValue::kCapturedObject) {
        // If we are materializing the property array, make sure we put
        // the mutable heap numbers at the right places.
        EnsurePropertiesAllocatedAndMarked(properties_slot, map);
        EnsureChildrenAllocated(properties_slot->GetChildrenCount(), frame,
                                &value_index, worklist);
3558
      }
3559 3560 3561 3562
      // Make sure all the remaining children (after the map and properties) are
      // allocated.
      return EnsureChildrenAllocated(slot->GetChildrenCount() - 2, frame,
                                     &value_index, worklist);
3563
  }
3564
  UNREACHABLE();
3565 3566 3567 3568 3569 3570 3571 3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589
}

void TranslatedState::EnsureChildrenAllocated(int count, TranslatedFrame* frame,
                                              int* value_index,
                                              std::stack<int>* worklist) {
  // Ensure all children are allocated.
  for (int i = 0; i < count; i++) {
    // If the field is an object that has not been allocated yet, queue it
    // for initialization (and mark it as such).
    TranslatedValue* child_slot = frame->ValueAt(*value_index);
    if (child_slot->kind() == TranslatedValue::kCapturedObject ||
        child_slot->kind() == TranslatedValue::kDuplicatedObject) {
      child_slot = ResolveCapturedObject(child_slot);
      if (child_slot->materialization_state() ==
          TranslatedValue::kUninitialized) {
        worklist->push(child_slot->object_index());
        child_slot->mark_allocated();
      }
    } else {
      // Make sure the simple values (heap numbers, etc.) are properly
      // initialized.
      child_slot->MaterializeSimple();
    }
    SkipSlots(1, frame, value_index);
  }
3590
}
3591

3592 3593 3594 3595
void TranslatedState::EnsurePropertiesAllocatedAndMarked(
    TranslatedValue* properties_slot, Handle<Map> map) {
  CHECK_EQ(TranslatedValue::kUninitialized,
           properties_slot->materialization_state());
3596

3597 3598 3599
  Handle<ByteArray> object_storage = AllocateStorageFor(properties_slot);
  properties_slot->mark_allocated();
  properties_slot->set_storage(object_storage);
3600

3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611
  // Set markers for the double properties.
  Handle<DescriptorArray> descriptors(map->instance_descriptors());
  int field_count = map->NumberOfOwnDescriptors();
  for (int i = 0; i < field_count; i++) {
    FieldIndex index = FieldIndex::ForDescriptor(*map, i);
    if (descriptors->GetDetails(i).representation().IsDouble() &&
        !index.is_inobject()) {
      CHECK(!map->IsUnboxedDoubleField(index));
      int outobject_index = index.outobject_array_index();
      int array_index = outobject_index * kPointerSize;
      object_storage->set(array_index, kStoreMutableHeapNumber);
3612
    }
3613 3614
  }
}
3615

3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648
Handle<ByteArray> TranslatedState::AllocateStorageFor(TranslatedValue* slot) {
  int allocate_size =
      ByteArray::LengthFor(slot->GetChildrenCount() * kPointerSize);
  // It is important to allocate all the objects tenured so that the marker
  // does not visit them.
  Handle<ByteArray> object_storage =
      isolate()->factory()->NewByteArray(allocate_size, TENURED);
  for (int i = 0; i < object_storage->length(); i++) {
    object_storage->set(i, kStoreTagged);
  }
  return object_storage;
}

void TranslatedState::EnsureJSObjectAllocated(TranslatedValue* slot,
                                              Handle<Map> map) {
  CHECK_EQ(map->instance_size(), slot->GetChildrenCount() * kPointerSize);

  Handle<ByteArray> object_storage = AllocateStorageFor(slot);
  // Now we handle the interesting (JSObject) case.
  Handle<DescriptorArray> descriptors(map->instance_descriptors());
  int field_count = map->NumberOfOwnDescriptors();

  // Set markers for the double properties.
  for (int i = 0; i < field_count; i++) {
    FieldIndex index = FieldIndex::ForDescriptor(*map, i);
    if (descriptors->GetDetails(i).representation().IsDouble() &&
        index.is_inobject()) {
      CHECK_GE(index.index(), FixedArray::kHeaderSize / kPointerSize);
      int array_index = index.index() * kPointerSize - FixedArray::kHeaderSize;
      uint8_t marker = map->IsUnboxedDoubleField(index)
                           ? kStoreUnboxedDouble
                           : kStoreMutableHeapNumber;
      object_storage->set(array_index, marker);
3649
    }
3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663
  }
  slot->set_storage(object_storage);
}

Handle<Object> TranslatedState::GetValueAndAdvance(TranslatedFrame* frame,
                                                   int* value_index) {
  TranslatedValue* slot = frame->ValueAt(*value_index);
  SkipSlots(1, frame, value_index);
  if (slot->kind() == TranslatedValue::kDuplicatedObject) {
    slot = ResolveCapturedObject(slot);
  }
  CHECK_NE(TranslatedValue::kUninitialized, slot->materialization_state());
  return slot->GetStorage();
}
3664

3665 3666 3667 3668 3669
void TranslatedState::InitializeJSObjectAt(
    TranslatedFrame* frame, int* value_index, TranslatedValue* slot,
    Handle<Map> map, const DisallowHeapAllocation& no_allocation) {
  Handle<HeapObject> object_storage = Handle<HeapObject>::cast(slot->storage_);
  DCHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());
3670

3671 3672
  // The object should have at least a map and some payload.
  CHECK_GE(slot->GetChildrenCount(), 2);
3673

3674 3675 3676
  // Notify the concurrent marker about the layout change.
  isolate()->heap()->NotifyObjectLayoutChange(
      *object_storage, slot->GetChildrenCount() * kPointerSize, no_allocation);
3677

3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710 3711 3712 3713 3714 3715
  // Fill the property array field.
  {
    Handle<Object> properties = GetValueAndAdvance(frame, value_index);
    WRITE_FIELD(*object_storage, JSObject::kPropertiesOrHashOffset,
                *properties);
    WRITE_BARRIER(isolate()->heap(), *object_storage,
                  JSObject::kPropertiesOrHashOffset, *properties);
  }

  // For all the other fields we first look at the fixed array and check the
  // marker to see if we store an unboxed double.
  DCHECK_EQ(kPointerSize, JSObject::kPropertiesOrHashOffset);
  for (int i = 2; i < slot->GetChildrenCount(); i++) {
    // Initialize and extract the value from its slot.
    Handle<Object> field_value = GetValueAndAdvance(frame, value_index);

    // Read out the marker and ensure the field is consistent with
    // what the markers in the storage say (note that all heap numbers
    // should be fully initialized by now).
    int offset = i * kPointerSize;
    uint8_t marker = READ_UINT8_FIELD(*object_storage, offset);
    if (marker == kStoreUnboxedDouble) {
      double double_field_value;
      if (field_value->IsSmi()) {
        double_field_value = Smi::cast(*field_value)->value();
      } else {
        CHECK(field_value->IsHeapNumber());
        double_field_value = HeapNumber::cast(*field_value)->value();
      }
      WRITE_DOUBLE_FIELD(*object_storage, offset, double_field_value);
    } else if (marker == kStoreMutableHeapNumber) {
      CHECK(field_value->IsMutableHeapNumber());
      WRITE_FIELD(*object_storage, offset, *field_value);
      WRITE_BARRIER(isolate()->heap(), *object_storage, offset, *field_value);
    } else {
      CHECK_EQ(kStoreTagged, marker);
      WRITE_FIELD(*object_storage, offset, *field_value);
      WRITE_BARRIER(isolate()->heap(), *object_storage, offset, *field_value);
3716
    }
3717 3718 3719
  }
  object_storage->synchronized_set_map(*map);
}
3720

3721 3722 3723 3724 3725
void TranslatedState::InitializeObjectWithTaggedFieldsAt(
    TranslatedFrame* frame, int* value_index, TranslatedValue* slot,
    Handle<Map> map, const DisallowHeapAllocation& no_allocation) {
  Handle<HeapObject> object_storage = Handle<HeapObject>::cast(slot->storage_);

3726 3727 3728 3729 3730 3731 3732 3733
  // Skip the writes if we already have the canonical empty fixed array.
  if (*object_storage == isolate()->heap()->empty_fixed_array()) {
    CHECK_EQ(2, slot->GetChildrenCount());
    Handle<Object> length_value = GetValueAndAdvance(frame, value_index);
    CHECK_EQ(*length_value, Smi::FromInt(0));
    return;
  }

3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751
  // Notify the concurrent marker about the layout change.
  isolate()->heap()->NotifyObjectLayoutChange(
      *object_storage, slot->GetChildrenCount() * kPointerSize, no_allocation);

  // Write the fields to the object.
  for (int i = 1; i < slot->GetChildrenCount(); i++) {
    Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
    int offset = i * kPointerSize;
    uint8_t marker = READ_UINT8_FIELD(*object_storage, offset);
    if (i > 1 && marker == kStoreMutableHeapNumber) {
      CHECK(field_value->IsMutableHeapNumber());
    } else {
      CHECK(marker == kStoreTagged || i == 1);
      CHECK(!field_value->IsMutableHeapNumber());
    }

    WRITE_FIELD(*object_storage, offset, *field_value);
    WRITE_BARRIER(isolate()->heap(), *object_storage, offset, *field_value);
3752 3753
  }

3754
  object_storage->synchronized_set_map(*map);
3755 3756
}

3757 3758 3759 3760 3761 3762
TranslatedValue* TranslatedState::ResolveCapturedObject(TranslatedValue* slot) {
  while (slot->kind() == TranslatedValue::kDuplicatedObject) {
    slot = GetValueByObjectIndex(slot->object_index());
  }
  CHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());
  return slot;
3763 3764
}

3765 3766
TranslatedFrame* TranslatedState::GetFrameFromJSFrameIndex(int jsframe_index) {
  for (size_t i = 0; i < frames_.size(); i++) {
3767 3768
    if (frames_[i].kind() == TranslatedFrame::kInterpretedFunction ||
        frames_[i].kind() == TranslatedFrame::kJavaScriptBuiltinContinuation) {
3769 3770 3771 3772 3773 3774 3775 3776 3777 3778
      if (jsframe_index > 0) {
        jsframe_index--;
      } else {
        return &(frames_[i]);
      }
    }
  }
  return nullptr;
}

3779 3780 3781
TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
    int jsframe_index, int* args_count) {
  for (size_t i = 0; i < frames_.size(); i++) {
3782 3783
    if (frames_[i].kind() == TranslatedFrame::kInterpretedFunction ||
        frames_[i].kind() == TranslatedFrame::kJavaScriptBuiltinContinuation) {
3784 3785 3786
      if (jsframe_index > 0) {
        jsframe_index--;
      } else {
3787 3788
        // We have the JS function frame, now check if it has arguments
        // adaptor.
3789 3790 3791 3792 3793 3794
        if (i > 0 &&
            frames_[i - 1].kind() == TranslatedFrame::kArgumentsAdaptor) {
          *args_count = frames_[i - 1].height();
          return &(frames_[i - 1]);
        }
        *args_count =
3795
            frames_[i].shared_info()->internal_formal_parameter_count() + 1;
3796 3797 3798 3799 3800 3801 3802
        return &(frames_[i]);
      }
    }
  }
  return nullptr;
}

3803
void TranslatedState::StoreMaterializedValuesAndDeopt(JavaScriptFrame* frame) {
3804 3805 3806 3807 3808 3809 3810 3811 3812 3813 3814
  MaterializedObjectStore* materialized_store =
      isolate_->materialized_object_store();
  Handle<FixedArray> previously_materialized_objects =
      materialized_store->Get(stack_frame_pointer_);

  Handle<Object> marker = isolate_->factory()->arguments_marker();

  int length = static_cast<int>(object_positions_.size());
  bool new_store = false;
  if (previously_materialized_objects.is_null()) {
    previously_materialized_objects =
3815
        isolate_->factory()->NewFixedArray(length, TENURED);
3816 3817 3818 3819 3820 3821
    for (int i = 0; i < length; i++) {
      previously_materialized_objects->set(i, *marker);
    }
    new_store = true;
  }

3822
  CHECK_EQ(length, previously_materialized_objects->length());
3823 3824 3825 3826 3827 3828 3829

  bool value_changed = false;
  for (int i = 0; i < length; i++) {
    TranslatedState::ObjectPosition pos = object_positions_[i];
    TranslatedValue* value_info =
        &(frames_[pos.frame_index_].values_[pos.value_index_]);

3830
    CHECK(value_info->IsMaterializedObject());
3831

3832 3833 3834 3835
    // Skip duplicate objects (i.e., those that point to some
    // other object id).
    if (value_info->object_index() != i) continue;

3836 3837 3838 3839 3840 3841 3842
    Handle<Object> value(value_info->GetRawValue(), isolate_);

    if (!value.is_identical_to(marker)) {
      if (previously_materialized_objects->get(i) == *marker) {
        previously_materialized_objects->set(i, *value);
        value_changed = true;
      } else {
3843
        CHECK(previously_materialized_objects->get(i) == *value);
3844 3845 3846 3847 3848 3849
      }
    }
  }
  if (new_store && value_changed) {
    materialized_store->Set(stack_frame_pointer_,
                            previously_materialized_objects);
3850
    CHECK_EQ(frames_[0].kind(), TranslatedFrame::kInterpretedFunction);
3851 3852
    CHECK_EQ(frame->function(), frames_[0].front().GetRawValue());
    Deoptimizer::DeoptimizeFunction(frame->function(), frame->LookupCode());
3853 3854 3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867
  }
}

void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
  MaterializedObjectStore* materialized_store =
      isolate_->materialized_object_store();
  Handle<FixedArray> previously_materialized_objects =
      materialized_store->Get(stack_frame_pointer_);

  // If we have no previously materialized objects, there is nothing to do.
  if (previously_materialized_objects.is_null()) return;

  Handle<Object> marker = isolate_->factory()->arguments_marker();

  int length = static_cast<int>(object_positions_.size());
3868
  CHECK_EQ(length, previously_materialized_objects->length());
3869 3870 3871 3872 3873 3874 3875 3876

  for (int i = 0; i < length; i++) {
    // For a previously materialized objects, inject their value into the
    // translated values.
    if (previously_materialized_objects->get(i) != *marker) {
      TranslatedState::ObjectPosition pos = object_positions_[i];
      TranslatedValue* value_info =
          &(frames_[pos.frame_index_].values_[pos.value_index_]);
3877
      CHECK(value_info->IsMaterializedObject());
3878

3879 3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899
      if (value_info->kind() == TranslatedValue::kCapturedObject) {
        value_info->set_initialized_storage(
            Handle<Object>(previously_materialized_objects->get(i), isolate_));
      }
    }
  }
}

void TranslatedState::VerifyMaterializedObjects() {
#if VERIFY_HEAP
  int length = static_cast<int>(object_positions_.size());
  for (int i = 0; i < length; i++) {
    TranslatedValue* slot = GetValueByObjectIndex(i);
    if (slot->kind() == TranslatedValue::kCapturedObject) {
      CHECK_EQ(slot, GetValueByObjectIndex(slot->object_index()));
      if (slot->materialization_state() == TranslatedValue::kFinished) {
        slot->GetStorage()->ObjectVerify();
      } else {
        CHECK_EQ(slot->materialization_state(),
                 TranslatedValue::kUninitialized);
      }
3900 3901
    }
  }
3902
#endif
3903 3904
}

3905
bool TranslatedState::DoUpdateFeedback() {
3906 3907
  if (!feedback_vector_handle_.is_null()) {
    CHECK(!feedback_slot_.IsInvalid());
3908
    isolate()->CountUsage(v8::Isolate::kDeoptimizerDisableSpeculation);
3909
    FeedbackNexus nexus(feedback_vector_handle_, feedback_slot_);
3910
    nexus.SetSpeculationMode(SpeculationMode::kDisallowSpeculation);
3911
    return true;
3912
  }
3913
  return false;
3914 3915 3916
}

void TranslatedState::ReadUpdateFeedback(TranslationIterator* iterator,
3917 3918
                                         FixedArray* literal_array,
                                         FILE* trace_file) {
3919 3920 3921
  CHECK_EQ(Translation::UPDATE_FEEDBACK, iterator->Next());
  feedback_vector_ = FeedbackVector::cast(literal_array->get(iterator->Next()));
  feedback_slot_ = FeedbackSlot(iterator->Next());
3922 3923 3924 3925
  if (trace_file != nullptr) {
    PrintF(trace_file, "  reading FeedbackVector (slot %d)\n",
           feedback_slot_.ToInt());
  }
3926 3927
}

3928 3929
}  // namespace internal
}  // namespace v8
3930 3931 3932

// Undefine the heap manipulation macros.
#include "src/objects/object-macros-undef.h"