assembler.cc 59.8 KB
Newer Older
1
// Copyright (c) 1994-2006 Sun Microsystems Inc.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// - Redistribution in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// - Neither the name of Sun Microsystems or the names of contributors may
// be used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

// The original source code covered by the above license above has been
// modified significantly by Google Inc.
33
// Copyright 2012 the V8 project authors. All rights reserved.
34

35
#include "src/assembler.h"
36

37
#include <cmath>
38
#include "src/api.h"
39
#include "src/base/cpu.h"
40
#include "src/base/functional.h"
41
#include "src/base/lazy-instance.h"
42
#include "src/base/platform/platform.h"
43
#include "src/base/utils/random-number-generator.h"
44
#include "src/builtins.h"
45
#include "src/codegen.h"
46
#include "src/counters.h"
47
#include "src/debug/debug.h"
48 49
#include "src/deoptimizer.h"
#include "src/execution.h"
50
#include "src/ic/ic.h"
51
#include "src/ic/stub-cache.h"
52
#include "src/profiler/cpu-profiler.h"
53 54 55
#include "src/regexp/jsregexp.h"
#include "src/regexp/regexp-macro-assembler.h"
#include "src/regexp/regexp-stack.h"
56
#include "src/register-configuration.h"
57
#include "src/runtime/runtime.h"
58
#include "src/simulator.h"  // For flushing instruction cache.
59
#include "src/snapshot/serialize.h"
60
#include "src/token.h"
61 62

#if V8_TARGET_ARCH_IA32
63
#include "src/ia32/assembler-ia32-inl.h"  // NOLINT
64
#elif V8_TARGET_ARCH_X64
65
#include "src/x64/assembler-x64-inl.h"  // NOLINT
66
#elif V8_TARGET_ARCH_ARM64
67
#include "src/arm64/assembler-arm64-inl.h"  // NOLINT
68
#elif V8_TARGET_ARCH_ARM
69
#include "src/arm/assembler-arm-inl.h"  // NOLINT
70 71
#elif V8_TARGET_ARCH_PPC
#include "src/ppc/assembler-ppc-inl.h"  // NOLINT
72
#elif V8_TARGET_ARCH_MIPS
73
#include "src/mips/assembler-mips-inl.h"  // NOLINT
74 75
#elif V8_TARGET_ARCH_MIPS64
#include "src/mips64/assembler-mips64-inl.h"  // NOLINT
danno@chromium.org's avatar
danno@chromium.org committed
76
#elif V8_TARGET_ARCH_X87
77
#include "src/x87/assembler-x87-inl.h"  // NOLINT
78 79 80 81
#else
#error "Unknown architecture."
#endif

lrn@chromium.org's avatar
lrn@chromium.org committed
82
// Include native regexp-macro-assembler.
83
#ifndef V8_INTERPRETED_REGEXP
lrn@chromium.org's avatar
lrn@chromium.org committed
84
#if V8_TARGET_ARCH_IA32
85
#include "src/regexp/ia32/regexp-macro-assembler-ia32.h"  // NOLINT
lrn@chromium.org's avatar
lrn@chromium.org committed
86
#elif V8_TARGET_ARCH_X64
87
#include "src/regexp/x64/regexp-macro-assembler-x64.h"  // NOLINT
88
#elif V8_TARGET_ARCH_ARM64
89
#include "src/regexp/arm64/regexp-macro-assembler-arm64.h"  // NOLINT
lrn@chromium.org's avatar
lrn@chromium.org committed
90
#elif V8_TARGET_ARCH_ARM
91
#include "src/regexp/arm/regexp-macro-assembler-arm.h"  // NOLINT
92
#elif V8_TARGET_ARCH_PPC
93
#include "src/regexp/ppc/regexp-macro-assembler-ppc.h"  // NOLINT
94
#elif V8_TARGET_ARCH_MIPS
95
#include "src/regexp/mips/regexp-macro-assembler-mips.h"  // NOLINT
96
#elif V8_TARGET_ARCH_MIPS64
97
#include "src/regexp/mips64/regexp-macro-assembler-mips64.h"  // NOLINT
danno@chromium.org's avatar
danno@chromium.org committed
98
#elif V8_TARGET_ARCH_X87
99
#include "src/regexp/x87/regexp-macro-assembler-x87.h"  // NOLINT
lrn@chromium.org's avatar
lrn@chromium.org committed
100 101 102
#else  // Unknown architecture.
#error "Unknown architecture."
#endif  // Target architecture.
103
#endif  // V8_INTERPRETED_REGEXP
104

105 106
namespace v8 {
namespace internal {
107

108 109 110 111 112 113
// -----------------------------------------------------------------------------
// Common register code.

const char* Register::ToString() {
  // This is the mapping of allocation indices to registers.
  DCHECK(reg_code >= 0 && reg_code < kNumRegisters);
114 115
  return RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
      ->GetGeneralRegisterName(reg_code);
116 117 118 119 120
}


bool Register::IsAllocatable() const {
  return ((1 << reg_code) &
121
          RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
122 123 124 125 126 127 128
              ->allocatable_general_codes_mask()) != 0;
}


const char* DoubleRegister::ToString() {
  // This is the mapping of allocation indices to registers.
  DCHECK(reg_code >= 0 && reg_code < kMaxNumRegisters);
129 130
  return RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
      ->GetDoubleRegisterName(reg_code);
131 132 133 134 135
}


bool DoubleRegister::IsAllocatable() const {
  return ((1 << reg_code) &
136
          RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
137 138 139 140
              ->allocatable_double_codes_mask()) != 0;
}


141 142 143 144
// -----------------------------------------------------------------------------
// Common double constants.

struct DoubleConstant BASE_EMBEDDED {
145 146 147 148 149 150
double min_int;
double one_half;
double minus_one_half;
double negative_infinity;
double the_hole_nan;
double uint32_bias;
151 152
};

153
static DoubleConstant double_constants;
154

155
const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
156

157
static bool math_exp_data_initialized = false;
158
static base::Mutex* math_exp_data_mutex = NULL;
159 160 161
static double* math_exp_constants_array = NULL;
static double* math_exp_log_table_array = NULL;

162 163 164
// -----------------------------------------------------------------------------
// Implementation of AssemblerBase

165
AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
166
    : isolate_(isolate),
167
      jit_cookie_(0),
168
      enabled_cpu_features_(0),
169
      emit_debug_code_(FLAG_debug_code),
170
      predictable_code_size_(false),
171
      // We may use the assembler without an isolate.
172
      serializer_enabled_(isolate && isolate->serializer_enabled()),
173
      constant_pool_available_(false) {
174
  if (FLAG_mask_constants_with_cookie && isolate != NULL)  {
175
    jit_cookie_ = isolate->random_number_generator()->NextInt();
176
  }
177 178 179 180
  own_buffer_ = buffer == NULL;
  if (buffer_size == 0) buffer_size = kMinimalBufferSize;
  DCHECK(buffer_size > 0);
  if (own_buffer_) buffer = NewArray<byte>(buffer_size);
181 182 183 184 185 186 187 188
  buffer_ = static_cast<byte*>(buffer);
  buffer_size_ = buffer_size;

  pc_ = buffer_;
}


AssemblerBase::~AssemblerBase() {
189
  if (own_buffer_) DeleteArray(buffer_);
190 191 192
}


193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
void AssemblerBase::FlushICache(Isolate* isolate, void* start, size_t size) {
  if (size == 0) return;
  if (CpuFeatures::IsSupported(COHERENT_CACHE)) return;

#if defined(USE_SIMULATOR)
  Simulator::FlushICache(isolate->simulator_i_cache(), start, size);
#else
  CpuFeatures::FlushICache(start, size);
#endif  // USE_SIMULATOR
}


void AssemblerBase::FlushICacheWithoutIsolate(void* start, size_t size) {
  // Ideally we would just call Isolate::Current() here. However, this flushes
  // out issues because we usually only need the isolate when in the simulator.
  Isolate* isolate;
#if defined(USE_SIMULATOR)
  isolate = Isolate::Current();
#else
  isolate = nullptr;
#endif  // USE_SIMULATOR
  FlushICache(isolate, start, size);
}


218 219 220
// -----------------------------------------------------------------------------
// Implementation of PredictableCodeSizeScope

221 222 223 224
PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler)
    : PredictableCodeSizeScope(assembler, -1) {}


225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243
PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
                                                   int expected_size)
    : assembler_(assembler),
      expected_size_(expected_size),
      start_offset_(assembler->pc_offset()),
      old_value_(assembler->predictable_code_size()) {
  assembler_->set_predictable_code_size(true);
}


PredictableCodeSizeScope::~PredictableCodeSizeScope() {
  // TODO(svenpanne) Remove the 'if' when everything works.
  if (expected_size_ >= 0) {
    CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
  }
  assembler_->set_predictable_code_size(old_value_);
}


244 245 246 247 248 249
// -----------------------------------------------------------------------------
// Implementation of CpuFeatureScope

#ifdef DEBUG
CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
    : assembler_(assembler) {
250
  DCHECK(CpuFeatures::IsSupported(f));
251 252 253 254
  old_enabled_ = assembler_->enabled_cpu_features();
  uint64_t mask = static_cast<uint64_t>(1) << f;
  // TODO(svenpanne) This special case below doesn't belong here!
#if V8_TARGET_ARCH_ARM
255
  // ARMv7 is implied by VFP3.
256
  if (f == VFP3) {
257
    mask |= static_cast<uint64_t>(1) << ARMv7;
258 259 260 261 262 263 264 265 266 267 268 269
  }
#endif
  assembler_->set_enabled_cpu_features(old_enabled_ | mask);
}


CpuFeatureScope::~CpuFeatureScope() {
  assembler_->set_enabled_cpu_features(old_enabled_);
}
#endif


270 271 272
bool CpuFeatures::initialized_ = false;
unsigned CpuFeatures::supported_ = 0;
unsigned CpuFeatures::cache_line_size_ = 0;
273 274


275 276 277 278 279 280 281 282 283 284 285 286 287 288
// -----------------------------------------------------------------------------
// Implementation of Label

int Label::pos() const {
  if (pos_ < 0) return -pos_ - 1;
  if (pos_ > 0) return  pos_ - 1;
  UNREACHABLE();
  return 0;
}


// -----------------------------------------------------------------------------
// Implementation of RelocInfoWriter and RelocIterator
//
289 290 291 292 293
// Relocation information is written backwards in memory, from high addresses
// towards low addresses, byte by byte.  Therefore, in the encodings listed
// below, the first byte listed it at the highest address, and successive
// bytes in the record are at progressively lower addresses.
//
294 295 296 297 298 299
// Encoding
//
// The most common modes are given single-byte encodings.  Also, it is
// easy to identify the type of reloc info and skip unwanted modes in
// an iteration.
//
300
// The encoding relies on the fact that there are fewer than 14
301
// different relocation modes using standard non-compact encoding.
302
//
303 304 305
// The first byte of a relocation record has a tag in its low 2 bits:
// Here are the record schemes, depending on the low tag and optional higher
// tags.
306
//
307 308
// Low tag:
//   00: embedded_object:      [6-bit pc delta] 00
309
//
310
//   01: code_target:          [6-bit pc delta] 01
311
//
312 313
//   10: short_data_record:    [6-bit pc delta] 10 followed by
//                             [6-bit data delta] [2-bit data type tag]
314
//
315 316 317
//   11: long_record           [6 bit reloc mode] 11
//                             followed by pc delta
//                             followed by optional data depending on type.
318
//
319 320 321 322
//  2-bit data type tags, used in short_data_record and data_jump long_record:
//   code_target_with_id: 00
//   position:            01
//   statement_position:  10
323
//   deopt_reason:        11
324
//
325 326 327 328 329 330
//  If a pc delta exceeds 6 bits, it is split into a remainder that fits into
//  6 bits and a part that does not. The latter is encoded as a long record
//  with PC_JUMP as pseudo reloc info mode. The former is encoded as part of
//  the following record in the usual way. The long pc jump record has variable
//  length:
//               pc-jump:        [PC_JUMP] 11
331 332 333 334 335 336
//                               [7 bits data] 0
//                                  ...
//                               [7 bits data] 1
//               (Bits 6..31 of pc delta, with leading zeroes
//                dropped, and last non-zero chunk tagged with 1.)

337 338
const int kTagBits = 2;
const int kTagMask = (1 << kTagBits) - 1;
339 340 341
const int kLongTagBits = 6;
const int kShortDataTypeTagBits = 2;
const int kShortDataBits = kBitsPerByte - kShortDataTypeTagBits;
342 343 344

const int kEmbeddedObjectTag = 0;
const int kCodeTargetTag = 1;
345
const int kLocatableTag = 2;
346 347 348 349
const int kDefaultTag = 3;

const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
350
const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
351 352 353 354 355 356 357

const int kChunkBits = 7;
const int kChunkMask = (1 << kChunkBits) - 1;
const int kLastChunkTagBits = 1;
const int kLastChunkTagMask = 1;
const int kLastChunkTag = 1;

358 359 360
const int kCodeWithIdTag = 0;
const int kNonstatementPositionTag = 1;
const int kStatementPositionTag = 2;
361 362
const int kDeoptReasonTag = 3;

363

364
uint32_t RelocInfoWriter::WriteLongPCJump(uint32_t pc_delta) {
365 366 367 368
  // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
  // Otherwise write a variable length PC jump for the bits that do
  // not fit in the kSmallPCDeltaBits bits.
  if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
369
  WriteMode(RelocInfo::PC_JUMP);
370
  uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
371
  DCHECK(pc_jump > 0);
372 373 374 375 376 377 378 379 380 381 382 383
  // Write kChunkBits size chunks of the pc_jump.
  for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
    byte b = pc_jump & kChunkMask;
    *--pos_ = b << kLastChunkTagBits;
  }
  // Tag the last chunk so it can be identified.
  *pos_ = *pos_ | kLastChunkTag;
  // Return the remaining kSmallPCDeltaBits of the pc_delta.
  return pc_delta & kSmallPCDeltaMask;
}


384 385 386
void RelocInfoWriter::WriteShortTaggedPC(uint32_t pc_delta, int tag) {
  // Write a byte of tagged pc-delta, possibly preceded by an explicit pc-jump.
  pc_delta = WriteLongPCJump(pc_delta);
387 388 389 390
  *--pos_ = pc_delta << kTagBits | tag;
}


391 392
void RelocInfoWriter::WriteShortTaggedData(intptr_t data_delta, int tag) {
  *--pos_ = static_cast<byte>(data_delta << kShortDataTypeTagBits | tag);
393 394 395
}


396 397 398
void RelocInfoWriter::WriteMode(RelocInfo::Mode rmode) {
  STATIC_ASSERT(RelocInfo::NUMBER_OF_MODES <= (1 << kLongTagBits));
  *--pos_ = static_cast<int>((rmode << kTagBits) | kDefaultTag);
399 400 401
}


402
void RelocInfoWriter::WriteModeAndPC(uint32_t pc_delta, RelocInfo::Mode rmode) {
403
  // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
404 405
  pc_delta = WriteLongPCJump(pc_delta);
  WriteMode(rmode);
406 407 408 409
  *--pos_ = pc_delta;
}


410
void RelocInfoWriter::WriteIntData(int number) {
411
  for (int i = 0; i < kIntSize; i++) {
412
    *--pos_ = static_cast<byte>(number);
413
    // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
414
    number = number >> kBitsPerByte;
415 416 417
  }
}

418

419
void RelocInfoWriter::WriteData(intptr_t data_delta) {
420
  for (int i = 0; i < kIntptrSize; i++) {
421
    *--pos_ = static_cast<byte>(data_delta);
422
    // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
423
    data_delta = data_delta >> kBitsPerByte;
424 425 426 427
  }
}


428 429 430 431 432
void RelocInfoWriter::WritePosition(int pc_delta, int pos_delta,
                                    RelocInfo::Mode rmode) {
  int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
                                                    : kStatementPositionTag;
  // Check if delta is small enough to fit in a tagged byte.
433 434 435
  if (is_intn(pos_delta, kShortDataBits)) {
    WriteShortTaggedPC(pc_delta, kLocatableTag);
    WriteShortTaggedData(pos_delta, pos_type_tag);
436 437
  } else {
    // Otherwise, use costly encoding.
438 439
    WriteModeAndPC(pc_delta, rmode);
    WriteIntData(pos_delta);
440 441 442 443 444 445 446 447 448 449 450 451 452 453 454
  }
}


void RelocInfoWriter::FlushPosition() {
  if (!next_position_candidate_flushed_) {
    WritePosition(next_position_candidate_pc_delta_,
                  next_position_candidate_pos_delta_, RelocInfo::POSITION);
    next_position_candidate_pos_delta_ = 0;
    next_position_candidate_pc_delta_ = 0;
    next_position_candidate_flushed_ = true;
  }
}


455
void RelocInfoWriter::Write(const RelocInfo* rinfo) {
456 457 458 459
  RelocInfo::Mode rmode = rinfo->rmode();
  if (rmode != RelocInfo::POSITION) {
    FlushPosition();
  }
460 461 462
#ifdef DEBUG
  byte* begin_pos = pos_;
#endif
463 464
  DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
  DCHECK(rinfo->pc() - last_pc_ >= 0);
465
  // Use unsigned delta-encoding for pc.
466
  uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
467 468

  // The two most common modes are given small tags, and usually fit in a byte.
469
  if (rmode == RelocInfo::EMBEDDED_OBJECT) {
470
    WriteShortTaggedPC(pc_delta, kEmbeddedObjectTag);
471
  } else if (rmode == RelocInfo::CODE_TARGET) {
472
    WriteShortTaggedPC(pc_delta, kCodeTargetTag);
473
    DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
474 475
  } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
    // Use signed delta-encoding for id.
476
    DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
477 478
    int id_delta = static_cast<int>(rinfo->data()) - last_id_;
    // Check if delta is small enough to fit in a tagged byte.
479 480 481
    if (is_intn(id_delta, kShortDataBits)) {
      WriteShortTaggedPC(pc_delta, kLocatableTag);
      WriteShortTaggedData(id_delta, kCodeWithIdTag);
482 483
    } else {
      // Otherwise, use costly encoding.
484 485
      WriteModeAndPC(pc_delta, rmode);
      WriteIntData(id_delta);
486 487
    }
    last_id_ = static_cast<int>(rinfo->data());
488
  } else if (rmode == RelocInfo::DEOPT_REASON) {
489 490 491
    DCHECK(rinfo->data() < (1 << kShortDataBits));
    WriteShortTaggedPC(pc_delta, kLocatableTag);
    WriteShortTaggedData(rinfo->data(), kDeoptReasonTag);
492
  } else if (RelocInfo::IsPosition(rmode)) {
493
    // Use signed delta-encoding for position.
494
    DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
495
    int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
496 497
    if (rmode == RelocInfo::STATEMENT_POSITION) {
      WritePosition(pc_delta, pos_delta, rmode);
498
    } else {
499
      DCHECK_EQ(rmode, RelocInfo::POSITION);
500 501 502 503 504 505 506 507
      if (pc_delta != 0 || last_mode_ != RelocInfo::POSITION) {
        FlushPosition();
        next_position_candidate_pc_delta_ = pc_delta;
        next_position_candidate_pos_delta_ = pos_delta;
      } else {
        next_position_candidate_pos_delta_ += pos_delta;
      }
      next_position_candidate_flushed_ = false;
508
    }
509
    last_position_ = static_cast<int>(rinfo->data());
510
  } else {
511 512 513 514 515
    WriteModeAndPC(pc_delta, rmode);
    if (RelocInfo::IsComment(rmode)) {
      WriteData(rinfo->data());
    } else if (RelocInfo::IsConstPool(rmode) ||
               RelocInfo::IsVeneerPool(rmode) ||
516
               RelocInfo::IsDebugBreakSlotAtCall(rmode)) {
517
      WriteIntData(static_cast<int>(rinfo->data()));
518
    }
519 520
  }
  last_pc_ = rinfo->pc();
521
  last_mode_ = rmode;
522
#ifdef DEBUG
523
  DCHECK(begin_pos - pos_ <= kMaxSize);
524 525 526 527 528 529 530 531 532
#endif
}


inline int RelocIterator::AdvanceGetTag() {
  return *--pos_ & kTagMask;
}


533 534 535
inline RelocInfo::Mode RelocIterator::GetMode() {
  return static_cast<RelocInfo::Mode>((*pos_ >> kTagBits) &
                                      ((1 << kLongTagBits) - 1));
536 537 538
}


539
inline void RelocIterator::ReadShortTaggedPC() {
540 541 542 543 544 545 546 547 548
  rinfo_.pc_ += *pos_ >> kTagBits;
}


inline void RelocIterator::AdvanceReadPC() {
  rinfo_.pc_ += *--pos_;
}


549 550 551 552 553 554 555 556 557 558
void RelocIterator::AdvanceReadId() {
  int x = 0;
  for (int i = 0; i < kIntSize; i++) {
    x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
  }
  last_id_ += x;
  rinfo_.data_ = last_id_;
}


559
void RelocIterator::AdvanceReadInt() {
560 561 562 563 564 565 566 567
  int x = 0;
  for (int i = 0; i < kIntSize; i++) {
    x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
  }
  rinfo_.data_ = x;
}


568 569 570 571 572 573 574 575 576 577
void RelocIterator::AdvanceReadPosition() {
  int x = 0;
  for (int i = 0; i < kIntSize; i++) {
    x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
  }
  last_position_ += x;
  rinfo_.data_ = last_position_;
}


578
void RelocIterator::AdvanceReadData() {
579 580 581
  intptr_t x = 0;
  for (int i = 0; i < kIntptrSize; i++) {
    x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
582
  }
583
  rinfo_.data_ = x;
584 585 586
}


587
void RelocIterator::AdvanceReadLongPCJump() {
588 589 590 591 592 593 594 595 596 597 598 599 600 601 602
  // Read the 32-kSmallPCDeltaBits most significant bits of the
  // pc jump in kChunkBits bit chunks and shift them into place.
  // Stop when the last chunk is encountered.
  uint32_t pc_jump = 0;
  for (int i = 0; i < kIntSize; i++) {
    byte pc_jump_part = *--pos_;
    pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
    if ((pc_jump_part & kLastChunkTagMask) == 1) break;
  }
  // The least significant kSmallPCDeltaBits bits will be added
  // later.
  rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
}


603 604
inline int RelocIterator::GetShortDataTypeTag() {
  return *pos_ & ((1 << kShortDataTypeTagBits) - 1);
605 606 607
}


608
inline void RelocIterator::ReadShortTaggedId() {
609 610
  int8_t signed_b = *pos_;
  // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
611
  last_id_ += signed_b >> kShortDataTypeTagBits;
612
  rinfo_.data_ = last_id_;
613 614 615
}


616
inline void RelocIterator::ReadShortTaggedPosition() {
617 618
  int8_t signed_b = *pos_;
  // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
619
  last_position_ += signed_b >> kShortDataTypeTagBits;
620 621 622 623
  rinfo_.data_ = last_position_;
}


624
inline void RelocIterator::ReadShortTaggedData() {
625 626 627 628 629
  uint8_t unsigned_b = *pos_;
  rinfo_.data_ = unsigned_b >> kTagBits;
}


630
static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
631
  DCHECK(tag == kNonstatementPositionTag ||
632 633 634 635
         tag == kStatementPositionTag);
  return (tag == kNonstatementPositionTag) ?
         RelocInfo::POSITION :
         RelocInfo::STATEMENT_POSITION;
636 637 638 639
}


void RelocIterator::next() {
640
  DCHECK(!done());
641 642 643 644 645 646 647 648
  // Basically, do the opposite of RelocInfoWriter::Write.
  // Reading of data is as far as possible avoided for unwanted modes,
  // but we must always update the pc.
  //
  // We exit this loop by returning when we find a mode we want.
  while (pos_ > end_) {
    int tag = AdvanceGetTag();
    if (tag == kEmbeddedObjectTag) {
649
      ReadShortTaggedPC();
650
      if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
651
    } else if (tag == kCodeTargetTag) {
652
      ReadShortTaggedPC();
653
      if (SetMode(RelocInfo::CODE_TARGET)) return;
654
    } else if (tag == kLocatableTag) {
655
      ReadShortTaggedPC();
656
      Advance();
657 658
      int data_type_tag = GetShortDataTypeTag();
      if (data_type_tag == kCodeWithIdTag) {
659
        if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
660 661 662 663 664 665
          ReadShortTaggedId();
          return;
        }
      } else if (data_type_tag == kDeoptReasonTag) {
        if (SetMode(RelocInfo::DEOPT_REASON)) {
          ReadShortTaggedData();
666 667 668
          return;
        }
      } else {
669 670
        DCHECK(data_type_tag == kNonstatementPositionTag ||
               data_type_tag == kStatementPositionTag);
671
        if (mode_mask_ & RelocInfo::kPositionMask) {
672 673 674 675
          // Always update the position if we are interested in either
          // statement positions or non-statement positions.
          ReadShortTaggedPosition();
          if (SetMode(GetPositionModeFromTag(data_type_tag))) return;
676
        }
677 678
      }
    } else {
679
      DCHECK(tag == kDefaultTag);
680 681 682 683 684 685 686
      RelocInfo::Mode rmode = GetMode();
      if (rmode == RelocInfo::PC_JUMP) {
        AdvanceReadLongPCJump();
      } else {
        AdvanceReadPC();
        if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
          if (SetMode(rmode)) {
687 688 689 690
            AdvanceReadId();
            return;
          }
          Advance(kIntSize);
691 692 693 694 695 696 697
        } else if (RelocInfo::IsComment(rmode)) {
          if (SetMode(rmode)) {
            AdvanceReadData();
            return;
          }
          Advance(kIntptrSize);
        } else if (RelocInfo::IsPosition(rmode)) {
698
          if (mode_mask_ & RelocInfo::kPositionMask) {
699 700
            // Always update the position if we are interested in either
            // statement positions or non-statement positions.
701
            AdvanceReadPosition();
702
            if (SetMode(rmode)) return;
703 704 705
          } else {
            Advance(kIntSize);
          }
706 707
        } else if (RelocInfo::IsConstPool(rmode) ||
                   RelocInfo::IsVeneerPool(rmode) ||
708
                   RelocInfo::IsDebugBreakSlotAtCall(rmode)) {
709
          if (SetMode(rmode)) {
710
            AdvanceReadInt();
711 712 713
            return;
          }
          Advance(kIntSize);
714
        } else if (SetMode(static_cast<RelocInfo::Mode>(rmode))) {
715 716
          return;
        }
717 718 719
      }
    }
  }
720 721 722 723 724 725 726 727 728
  if (code_age_sequence_ != NULL) {
    byte* old_code_age_sequence = code_age_sequence_;
    code_age_sequence_ = NULL;
    if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
      rinfo_.data_ = 0;
      rinfo_.pc_ = old_code_age_sequence;
      return;
    }
  }
729 730 731 732 733
  done_ = true;
}


RelocIterator::RelocIterator(Code* code, int mode_mask) {
734
  rinfo_.host_ = code;
735 736
  rinfo_.pc_ = code->instruction_start();
  rinfo_.data_ = 0;
737
  // Relocation info is read backwards.
738 739 740 741
  pos_ = code->relocation_start() + code->relocation_size();
  end_ = code->relocation_start();
  done_ = false;
  mode_mask_ = mode_mask;
742 743
  last_id_ = 0;
  last_position_ = 0;
744
  byte* sequence = code->FindCodeAgeSequence();
745 746 747 748
  // We get the isolate from the map, because at serialization time
  // the code pointer has been cloned and isn't really in heap space.
  Isolate* isolate = code->map()->GetIsolate();
  if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
749 750 751 752
    code_age_sequence_ = sequence;
  } else {
    code_age_sequence_ = NULL;
  }
753 754 755 756 757 758 759 760
  if (mode_mask_ == 0) pos_ = end_;
  next();
}


RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
  rinfo_.pc_ = desc.buffer;
  rinfo_.data_ = 0;
761
  // Relocation info is read backwards.
762 763 764 765
  pos_ = desc.buffer + desc.buffer_size;
  end_ = pos_ - desc.reloc_size;
  done_ = false;
  mode_mask_ = mode_mask;
766 767
  last_id_ = 0;
  last_position_ = 0;
768
  code_age_sequence_ = NULL;
769 770 771 772 773 774 775 776 777
  if (mode_mask_ == 0) pos_ = end_;
  next();
}


// -----------------------------------------------------------------------------
// Implementation of RelocInfo


778 779 780 781 782 783 784
#ifdef DEBUG
bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
  // Ensure there are no code targets or embedded objects present in the
  // deoptimization entries, they would require relocation after code
  // generation.
  int mode_mask = RelocInfo::kCodeTargetMask |
                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
785
                  RelocInfo::ModeMask(RelocInfo::CELL) |
786 787 788 789 790 791 792
                  RelocInfo::kApplyMask;
  RelocIterator it(desc, mode_mask);
  return !it.done();
}
#endif


793
#ifdef ENABLE_DISASSEMBLER
794
const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
795
  switch (rmode) {
796
    case NONE32:
797
      return "no reloc 32";
798
    case NONE64:
799
      return "no reloc 64";
800
    case EMBEDDED_OBJECT:
801
      return "embedded object";
802
    case CONSTRUCT_CALL:
803
      return "code target (js construct call)";
804 805
    case DEBUGGER_STATEMENT:
      return "debugger statement";
806
    case CODE_TARGET:
807
      return "code target";
808
    case CODE_TARGET_WITH_ID:
809
      return "code target with id";
810
    case CELL:
811
      return "property cell";
812
    case RUNTIME_ENTRY:
813
      return "runtime entry";
814
    case COMMENT:
815
      return "comment";
816
    case POSITION:
817
      return "position";
818
    case STATEMENT_POSITION:
819
      return "statement position";
820
    case EXTERNAL_REFERENCE:
821
      return "external reference";
822
    case INTERNAL_REFERENCE:
823
      return "internal reference";
824
    case INTERNAL_REFERENCE_ENCODED:
825
      return "encoded internal reference";
826
    case DEOPT_REASON:
827
      return "deopt reason";
828
    case CONST_POOL:
829
      return "constant pool";
830
    case VENEER_POOL:
831
      return "veneer pool";
832 833
    case DEBUG_BREAK_SLOT_AT_POSITION:
      return "debug break slot at position";
834 835
    case DEBUG_BREAK_SLOT_AT_RETURN:
      return "debug break slot at return";
836 837 838 839
    case DEBUG_BREAK_SLOT_AT_CALL:
      return "debug break slot at call";
    case DEBUG_BREAK_SLOT_AT_CONSTRUCT_CALL:
      return "debug break slot at construct call";
840
    case CODE_AGE_SEQUENCE:
841 842 843
      return "code age sequence";
    case GENERATOR_CONTINUATION:
      return "generator continuation";
844 845
    case NUMBER_OF_MODES:
    case PC_JUMP:
846
      UNREACHABLE();
847
      return "number_of_modes";
848 849 850 851 852
  }
  return "unknown relocation type";
}


853 854
void RelocInfo::Print(Isolate* isolate, std::ostream& os) {  // NOLINT
  os << static_cast<const void*>(pc_) << "  " << RelocModeName(rmode_);
855
  if (IsComment(rmode_)) {
856
    os << "  (" << reinterpret_cast<char*>(data_) << ")";
857 858 859
  } else if (rmode_ == DEOPT_REASON) {
    os << "  (" << Deoptimizer::GetDeoptReason(
                       static_cast<Deoptimizer::DeoptReason>(data_)) << ")";
860
  } else if (rmode_ == EMBEDDED_OBJECT) {
861
    os << "  (" << Brief(target_object()) << ")";
862
  } else if (rmode_ == EXTERNAL_REFERENCE) {
863
    ExternalReferenceEncoder ref_encoder(isolate);
864 865
    os << " ("
       << ref_encoder.NameOfAddress(isolate, target_external_reference())
866 867
       << ")  (" << static_cast<const void*>(target_external_reference())
       << ")";
868
  } else if (IsCodeTarget(rmode_)) {
869
    Code* code = Code::GetCodeFromTargetAddress(target_address());
870 871
    os << " (" << Code::Kind2String(code->kind()) << ")  ("
       << static_cast<const void*>(target_address()) << ")";
872
    if (rmode_ == CODE_TARGET_WITH_ID) {
873
      os << " (id=" << static_cast<int>(data_) << ")";
874
    }
875
  } else if (IsPosition(rmode_)) {
876
    os << "  (" << data() << ")";
877
  } else if (IsRuntimeEntry(rmode_) &&
878
             isolate->deoptimizer_data() != NULL) {
879 880
    // Depotimization bailouts are stored as runtime entries.
    int id = Deoptimizer::GetDeoptimizationId(
881
        isolate, target_address(), Deoptimizer::EAGER);
882
    if (id != Deoptimizer::kNotDeoptimizationEntry) {
883
      os << "  (deoptimization bailout " << id << ")";
884
    }
885 886
  } else if (IsConstPool(rmode_)) {
    os << " (size " << static_cast<int>(data_) << ")";
887 888
  }

889
  os << "\n";
890
}
891
#endif  // ENABLE_DISASSEMBLER
892 893


894
#ifdef VERIFY_HEAP
895
void RelocInfo::Verify(Isolate* isolate) {
896
  switch (rmode_) {
897
    case EMBEDDED_OBJECT:
898 899
      Object::VerifyPointer(target_object());
      break;
900
    case CELL:
901 902
      Object::VerifyPointer(target_cell());
      break;
903
    case DEBUGGER_STATEMENT:
904
    case CONSTRUCT_CALL:
905
    case CODE_TARGET_WITH_ID:
906
    case CODE_TARGET: {
907 908
      // convert inline target address to code object
      Address addr = target_address();
909
      CHECK(addr != NULL);
910
      // Check that we can find the right code object.
911
      Code* code = Code::GetCodeFromTargetAddress(addr);
912
      Object* found = isolate->FindCodeObject(addr);
913 914
      CHECK(found->IsCode());
      CHECK(code->address() == HeapObject::cast(found)->address());
915 916
      break;
    }
917 918 919 920 921 922 923 924 925
    case INTERNAL_REFERENCE:
    case INTERNAL_REFERENCE_ENCODED: {
      Address target = target_internal_reference();
      Address pc = target_internal_reference_address();
      Code* code = Code::cast(isolate->FindCodeObject(pc));
      CHECK(target >= code->instruction_start());
      CHECK(target <= code->instruction_end());
      break;
    }
926 927 928 929 930
    case RUNTIME_ENTRY:
    case COMMENT:
    case POSITION:
    case STATEMENT_POSITION:
    case EXTERNAL_REFERENCE:
931
    case DEOPT_REASON:
932
    case CONST_POOL:
933
    case VENEER_POOL:
934
    case DEBUG_BREAK_SLOT_AT_POSITION:
935
    case DEBUG_BREAK_SLOT_AT_RETURN:
936 937
    case DEBUG_BREAK_SLOT_AT_CALL:
    case DEBUG_BREAK_SLOT_AT_CONSTRUCT_CALL:
938
    case GENERATOR_CONTINUATION:
939
    case NONE32:
940
    case NONE64:
941
      break;
942
    case NUMBER_OF_MODES:
943
    case PC_JUMP:
944 945
      UNREACHABLE();
      break;
946
    case CODE_AGE_SEQUENCE:
947
      DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
948
      break;
949 950
  }
}
951
#endif  // VERIFY_HEAP
952 953


954 955 956 957 958
int RelocInfo::DebugBreakCallArgumentsCount(intptr_t data) {
  return static_cast<int>(data);
}


959 960 961
// -----------------------------------------------------------------------------
// Implementation of ExternalReference

962 963 964
void ExternalReference::SetUp() {
  double_constants.min_int = kMinInt;
  double_constants.one_half = 0.5;
965
  double_constants.minus_one_half = -0.5;
966
  double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
967
  double_constants.negative_infinity = -V8_INFINITY;
968 969
  double_constants.uint32_bias =
    static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
970

971
  math_exp_data_mutex = new base::Mutex();
972 973 974 975 976 977 978
}


void ExternalReference::InitializeMathExpData() {
  // Early return?
  if (math_exp_data_initialized) return;

979
  base::LockGuard<base::Mutex> lock_guard(math_exp_data_mutex);
980 981 982 983 984 985 986 987 988 989 990
  if (!math_exp_data_initialized) {
    // If this is changed, generated code must be adapted too.
    const int kTableSizeBits = 11;
    const int kTableSize = 1 << kTableSizeBits;
    const double kTableSizeDouble = static_cast<double>(kTableSize);

    math_exp_constants_array = new double[9];
    // Input values smaller than this always return 0.
    math_exp_constants_array[0] = -708.39641853226408;
    // Input values larger than this always return +Infinity.
    math_exp_constants_array[1] = 709.78271289338397;
991
    math_exp_constants_array[2] = V8_INFINITY;
992 993 994
    // The rest is black magic. Do not attempt to understand it. It is
    // loosely based on the "expd" function published at:
    // http://herumi.blogspot.com/2011/08/fast-double-precision-exponential.html
995
    const double constant3 = (1 << kTableSizeBits) / std::log(2.0);
996 997 998 999 1000 1001 1002 1003 1004 1005
    math_exp_constants_array[3] = constant3;
    math_exp_constants_array[4] =
        static_cast<double>(static_cast<int64_t>(3) << 51);
    math_exp_constants_array[5] = 1 / constant3;
    math_exp_constants_array[6] = 3.0000000027955394;
    math_exp_constants_array[7] = 0.16666666685227835;
    math_exp_constants_array[8] = 1;

    math_exp_log_table_array = new double[kTableSize];
    for (int i = 0; i < kTableSize; i++) {
1006
      double value = std::pow(2, i / kTableSizeDouble);
1007
      uint64_t bits = bit_cast<uint64_t, double>(value);
1008
      bits &= (static_cast<uint64_t>(1) << 52) - 1;
1009
      double mantissa = bit_cast<double, uint64_t>(bits);
1010 1011 1012 1013 1014 1015 1016 1017 1018 1019
      math_exp_log_table_array[i] = mantissa;
    }

    math_exp_data_initialized = true;
  }
}


void ExternalReference::TearDownMathExpData() {
  delete[] math_exp_constants_array;
1020
  math_exp_constants_array = NULL;
1021
  delete[] math_exp_log_table_array;
1022
  math_exp_log_table_array = NULL;
1023
  delete math_exp_data_mutex;
1024
  math_exp_data_mutex = NULL;
1025 1026 1027
}


1028 1029
ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
  : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
1030 1031


1032
ExternalReference::ExternalReference(
1033 1034 1035 1036
    ApiFunction* fun,
    Type type = ExternalReference::BUILTIN_CALL,
    Isolate* isolate = NULL)
  : address_(Redirect(isolate, fun->address(), type)) {}
1037 1038


1039 1040
ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
  : address_(isolate->builtins()->builtin_address(name)) {}
1041 1042


1043 1044
ExternalReference::ExternalReference(Runtime::FunctionId id, Isolate* isolate)
    : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
1045 1046


1047 1048
ExternalReference::ExternalReference(const Runtime::Function* f,
                                     Isolate* isolate)
1049
    : address_(Redirect(isolate, f->entry)) {}
1050 1051


1052 1053
ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
  return ExternalReference(isolate);
1054 1055 1056
}


1057 1058 1059 1060
ExternalReference::ExternalReference(StatsCounter* counter)
  : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}


1061 1062
ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
  : address_(isolate->get_address_from_id(id)) {}
1063 1064 1065 1066 1067 1068


ExternalReference::ExternalReference(const SCTableReference& table_ref)
  : address_(table_ref.address()) {}


1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084
ExternalReference ExternalReference::
    incremental_marking_record_write_function(Isolate* isolate) {
  return ExternalReference(Redirect(
      isolate,
      FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
}


ExternalReference ExternalReference::
    store_buffer_overflow_function(Isolate* isolate) {
  return ExternalReference(Redirect(
      isolate,
      FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
}


1085 1086 1087 1088 1089
ExternalReference ExternalReference::delete_handle_scope_extensions(
    Isolate* isolate) {
  return ExternalReference(Redirect(
      isolate,
      FUNCTION_ADDR(HandleScope::DeleteExtensions)));
1090 1091 1092
}


1093 1094 1095 1096 1097 1098
ExternalReference ExternalReference::get_date_field_function(
    Isolate* isolate) {
  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
}


1099 1100 1101 1102 1103 1104 1105
ExternalReference ExternalReference::get_make_code_young_function(
    Isolate* isolate) {
  return ExternalReference(Redirect(
      isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
}


1106 1107 1108 1109 1110 1111 1112
ExternalReference ExternalReference::get_mark_code_as_executed_function(
    Isolate* isolate) {
  return ExternalReference(Redirect(
      isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted)));
}


1113 1114 1115 1116 1117
ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
  return ExternalReference(isolate->date_cache()->stamp_address());
}


1118 1119 1120 1121 1122
ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
  return ExternalReference(isolate->stress_deopt_count_address());
}


1123 1124
ExternalReference ExternalReference::new_deoptimizer_function(
    Isolate* isolate) {
1125
  return ExternalReference(
1126
      Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1127 1128 1129
}


1130 1131
ExternalReference ExternalReference::compute_output_frames_function(
    Isolate* isolate) {
1132
  return ExternalReference(
1133
      Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1134 1135 1136
}


1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150
ExternalReference ExternalReference::log_enter_external_function(
    Isolate* isolate) {
  return ExternalReference(
      Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
}


ExternalReference ExternalReference::log_leave_external_function(
    Isolate* isolate) {
  return ExternalReference(
      Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
}


1151 1152
ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
  return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1153 1154 1155
}


1156 1157 1158 1159
ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
    Isolate* isolate) {
  return ExternalReference(
      isolate->keyed_lookup_cache()->field_offsets_address());
1160 1161 1162
}


1163 1164
ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
  return ExternalReference(isolate->heap()->roots_array_start());
1165 1166 1167
}


1168 1169 1170 1171 1172 1173
ExternalReference ExternalReference::allocation_sites_list_address(
    Isolate* isolate) {
  return ExternalReference(isolate->heap()->allocation_sites_list_address());
}


1174 1175
ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
  return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1176 1177 1178
}


1179 1180 1181
ExternalReference ExternalReference::address_of_real_stack_limit(
    Isolate* isolate) {
  return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1182 1183 1184
}


1185 1186 1187
ExternalReference ExternalReference::address_of_regexp_stack_limit(
    Isolate* isolate) {
  return ExternalReference(isolate->regexp_stack()->limit_address());
1188 1189 1190
}


1191 1192
ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
  return ExternalReference(isolate->heap()->NewSpaceStart());
1193 1194
}

1195

1196
ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1197
  return ExternalReference(isolate->heap()->store_buffer_top_address());
1198 1199 1200
}


1201
ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
1202 1203
  return ExternalReference(reinterpret_cast<Address>(
      isolate->heap()->NewSpaceMask()));
1204 1205 1206
}


1207 1208 1209
ExternalReference ExternalReference::new_space_allocation_top_address(
    Isolate* isolate) {
  return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1210 1211
}

1212

1213 1214 1215
ExternalReference ExternalReference::new_space_allocation_limit_address(
    Isolate* isolate) {
  return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1216 1217
}

1218

1219
ExternalReference ExternalReference::old_space_allocation_top_address(
1220
    Isolate* isolate) {
1221
  return ExternalReference(isolate->heap()->OldSpaceAllocationTopAddress());
1222 1223 1224
}


1225
ExternalReference ExternalReference::old_space_allocation_limit_address(
1226
    Isolate* isolate) {
1227
  return ExternalReference(isolate->heap()->OldSpaceAllocationLimitAddress());
1228 1229 1230
}


1231 1232 1233
ExternalReference ExternalReference::handle_scope_level_address(
    Isolate* isolate) {
  return ExternalReference(HandleScope::current_level_address(isolate));
1234 1235 1236
}


1237 1238 1239
ExternalReference ExternalReference::handle_scope_next_address(
    Isolate* isolate) {
  return ExternalReference(HandleScope::current_next_address(isolate));
1240 1241 1242
}


1243 1244 1245
ExternalReference ExternalReference::handle_scope_limit_address(
    Isolate* isolate) {
  return ExternalReference(HandleScope::current_limit_address(isolate));
1246 1247 1248
}


1249 1250 1251
ExternalReference ExternalReference::scheduled_exception_address(
    Isolate* isolate) {
  return ExternalReference(isolate->scheduled_exception_address());
1252 1253 1254
}


1255 1256 1257 1258 1259 1260
ExternalReference ExternalReference::address_of_pending_message_obj(
    Isolate* isolate) {
  return ExternalReference(isolate->pending_message_obj_address());
}


1261
ExternalReference ExternalReference::address_of_min_int() {
1262
  return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1263 1264 1265 1266
}


ExternalReference ExternalReference::address_of_one_half() {
1267
  return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1268 1269 1270
}


1271 1272 1273 1274 1275 1276
ExternalReference ExternalReference::address_of_minus_one_half() {
  return ExternalReference(
      reinterpret_cast<void*>(&double_constants.minus_one_half));
}


1277
ExternalReference ExternalReference::address_of_negative_infinity() {
1278 1279
  return ExternalReference(
      reinterpret_cast<void*>(&double_constants.negative_infinity));
1280 1281 1282
}


1283
ExternalReference ExternalReference::address_of_the_hole_nan() {
1284 1285
  return ExternalReference(
      reinterpret_cast<void*>(&double_constants.the_hole_nan));
1286 1287 1288
}


1289 1290 1291 1292 1293 1294
ExternalReference ExternalReference::address_of_uint32_bias() {
  return ExternalReference(
      reinterpret_cast<void*>(&double_constants.uint32_bias));
}


1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318
ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
  return ExternalReference(isolate->cpu_profiler()->is_profiling_address());
}


ExternalReference ExternalReference::invoke_function_callback(
    Isolate* isolate) {
  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
  ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
  ApiFunction thunk_fun(thunk_address);
  return ExternalReference(&thunk_fun, thunk_type, isolate);
}


ExternalReference ExternalReference::invoke_accessor_getter_callback(
    Isolate* isolate) {
  Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
  ExternalReference::Type thunk_type =
      ExternalReference::PROFILING_GETTER_CALL;
  ApiFunction thunk_fun(thunk_address);
  return ExternalReference(&thunk_fun, thunk_type, isolate);
}


1319
#ifndef V8_INTERPRETED_REGEXP
lrn@chromium.org's avatar
lrn@chromium.org committed
1320

1321 1322
ExternalReference ExternalReference::re_check_stack_guard_state(
    Isolate* isolate) {
lrn@chromium.org's avatar
lrn@chromium.org committed
1323
  Address function;
1324
#if V8_TARGET_ARCH_X64
lrn@chromium.org's avatar
lrn@chromium.org committed
1325 1326 1327
  function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
#elif V8_TARGET_ARCH_IA32
  function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState);
1328 1329
#elif V8_TARGET_ARCH_ARM64
  function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState);
lrn@chromium.org's avatar
lrn@chromium.org committed
1330 1331
#elif V8_TARGET_ARCH_ARM
  function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
1332 1333
#elif V8_TARGET_ARCH_PPC
  function = FUNCTION_ADDR(RegExpMacroAssemblerPPC::CheckStackGuardState);
1334 1335
#elif V8_TARGET_ARCH_MIPS
  function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1336 1337
#elif V8_TARGET_ARCH_MIPS64
  function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
danno@chromium.org's avatar
danno@chromium.org committed
1338 1339
#elif V8_TARGET_ARCH_X87
  function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState);
lrn@chromium.org's avatar
lrn@chromium.org committed
1340
#else
1341
  UNREACHABLE();
lrn@chromium.org's avatar
lrn@chromium.org committed
1342
#endif
1343
  return ExternalReference(Redirect(isolate, function));
lrn@chromium.org's avatar
lrn@chromium.org committed
1344 1345
}

1346

1347
ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
lrn@chromium.org's avatar
lrn@chromium.org committed
1348
  return ExternalReference(
1349
      Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
lrn@chromium.org's avatar
lrn@chromium.org committed
1350 1351
}

1352 1353
ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
    Isolate* isolate) {
lrn@chromium.org's avatar
lrn@chromium.org committed
1354
  return ExternalReference(Redirect(
1355
      isolate,
lrn@chromium.org's avatar
lrn@chromium.org committed
1356 1357 1358
      FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
}

1359

1360 1361 1362 1363
ExternalReference ExternalReference::re_word_character_map() {
  return ExternalReference(
      NativeRegExpMacroAssembler::word_character_map_address());
}
1364

1365 1366 1367
ExternalReference ExternalReference::address_of_static_offsets_vector(
    Isolate* isolate) {
  return ExternalReference(
1368
      reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1369 1370
}

1371 1372
ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
    Isolate* isolate) {
1373
  return ExternalReference(
1374
      isolate->regexp_stack()->memory_address());
1375 1376
}

1377 1378 1379
ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
    Isolate* isolate) {
  return ExternalReference(isolate->regexp_stack()->memory_size_address());
1380 1381
}

1382
#endif  // V8_INTERPRETED_REGEXP
lrn@chromium.org's avatar
lrn@chromium.org committed
1383

1384

1385 1386
ExternalReference ExternalReference::math_log_double_function(
    Isolate* isolate) {
1387
  typedef double (*d2d)(double x);
1388
  return ExternalReference(Redirect(isolate,
1389
                                    FUNCTION_ADDR(static_cast<d2d>(std::log)),
1390
                                    BUILTIN_FP_CALL));
1391 1392 1393
}


1394
ExternalReference ExternalReference::math_exp_constants(int constant_index) {
1395
  DCHECK(math_exp_data_initialized);
1396 1397 1398 1399 1400 1401
  return ExternalReference(
      reinterpret_cast<void*>(math_exp_constants_array + constant_index));
}


ExternalReference ExternalReference::math_exp_log_table() {
1402
  DCHECK(math_exp_data_initialized);
1403 1404 1405 1406
  return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array));
}


1407 1408 1409 1410 1411 1412
ExternalReference ExternalReference::page_flags(Page* page) {
  return ExternalReference(reinterpret_cast<Address>(page) +
                           MemoryChunk::kFlagsOffset);
}


1413 1414 1415 1416 1417
ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
  return ExternalReference(entry);
}


1418
ExternalReference ExternalReference::cpu_features() {
1419
  DCHECK(CpuFeatures::initialized_);
1420 1421 1422 1423
  return ExternalReference(&CpuFeatures::supported_);
}


1424 1425 1426 1427 1428 1429
ExternalReference ExternalReference::debug_is_active_address(
    Isolate* isolate) {
  return ExternalReference(isolate->debug()->is_active_address());
}


1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443
ExternalReference ExternalReference::debug_after_break_target_address(
    Isolate* isolate) {
  return ExternalReference(isolate->debug()->after_break_target_address());
}


ExternalReference
    ExternalReference::debug_restarter_frame_function_pointer_address(
        Isolate* isolate) {
  return ExternalReference(
      isolate->debug()->restarter_frame_function_pointer_address());
}


1444
ExternalReference ExternalReference::virtual_handler_register(
1445
    Isolate* isolate) {
1446 1447 1448 1449 1450 1451
  return ExternalReference(isolate->virtual_handler_register_address());
}


ExternalReference ExternalReference::virtual_slot_register(Isolate* isolate) {
  return ExternalReference(isolate->virtual_slot_register_address());
1452 1453 1454
}


1455 1456 1457 1458 1459 1460 1461
ExternalReference ExternalReference::runtime_function_table_address(
    Isolate* isolate) {
  return ExternalReference(
      const_cast<Runtime::Function*>(Runtime::RuntimeFunctionTable(isolate)));
}


1462 1463 1464 1465 1466 1467
double power_helper(double x, double y) {
  int y_int = static_cast<int>(y);
  if (y == y_int) {
    return power_double_int(x, y_int);  // Returns 1 if exponent is 0.
  }
  if (y == 0.5) {
1468
    return (std::isinf(x)) ? V8_INFINITY
1469
                           : fast_sqrt(x + 0.0);  // Convert -0 to +0.
1470 1471
  }
  if (y == -0.5) {
1472
    return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0);  // Convert -0 to +0.
1473 1474 1475 1476 1477
  }
  return power_double_double(x, y);
}


1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497
// Helper function to compute x^y, where y is known to be an
// integer. Uses binary decomposition to limit the number of
// multiplications; see the discussion in "Hacker's Delight" by Henry
// S. Warren, Jr., figure 11-6, page 213.
double power_double_int(double x, int y) {
  double m = (y < 0) ? 1 / x : x;
  unsigned n = (y < 0) ? -y : y;
  double p = 1;
  while (n != 0) {
    if ((n & 1) != 0) p *= m;
    m *= m;
    if ((n & 2) != 0) p *= m;
    m *= m;
    n >>= 2;
  }
  return p;
}


double power_double_double(double x, double y) {
1498 1499 1500 1501
#if (defined(__MINGW64_VERSION_MAJOR) &&                              \
     (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)) || \
    defined(V8_OS_AIX)
  // MinGW64 and AIX have a custom implementation for pow.  This handles certain
1502
  // special cases that are different.
1503
  if ((x == 0.0 || std::isinf(x)) && y != 0.0 && std::isfinite(y)) {
1504
    double f;
1505 1506 1507 1508 1509
    double result = ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
    /* retain sign if odd integer exponent */
    return ((std::modf(y, &f) == 0.0) && (static_cast<int64_t>(y) & 1))
               ? copysign(result, x)
               : result;
1510 1511 1512 1513
  }

  if (x == 2.0) {
    int y_int = static_cast<int>(y);
1514 1515 1516
    if (y == y_int) {
      return std::ldexp(1.0, y_int);
    }
1517 1518 1519
  }
#endif

1520 1521
  // The checks for special cases can be dropped in ia32 because it has already
  // been done in generated code before bailing out here.
1522
  if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1523
    return std::numeric_limits<double>::quiet_NaN();
1524
  }
1525
  return std::pow(x, y);
1526 1527 1528
}


1529 1530 1531 1532
ExternalReference ExternalReference::power_double_double_function(
    Isolate* isolate) {
  return ExternalReference(Redirect(isolate,
                                    FUNCTION_ADDR(power_double_double),
1533
                                    BUILTIN_FP_FP_CALL));
1534 1535 1536
}


1537 1538 1539 1540
ExternalReference ExternalReference::power_double_int_function(
    Isolate* isolate) {
  return ExternalReference(Redirect(isolate,
                                    FUNCTION_ADDR(power_double_int),
1541
                                    BUILTIN_FP_INT_CALL));
1542 1543 1544
}


1545
bool EvalComparison(Token::Value op, double op1, double op2) {
1546
  DCHECK(Token::IsCompareOp(op));
1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558
  switch (op) {
    case Token::EQ:
    case Token::EQ_STRICT: return (op1 == op2);
    case Token::NE: return (op1 != op2);
    case Token::LT: return (op1 < op2);
    case Token::GT: return (op1 > op2);
    case Token::LTE: return (op1 <= op2);
    case Token::GTE: return (op1 >= op2);
    default:
      UNREACHABLE();
      return false;
  }
1559 1560 1561
}


1562 1563
ExternalReference ExternalReference::mod_two_doubles_operation(
    Isolate* isolate) {
1564
  return ExternalReference(Redirect(isolate,
1565
                                    FUNCTION_ADDR(modulo),
1566
                                    BUILTIN_FP_FP_CALL));
1567 1568
}

1569

1570 1571 1572
ExternalReference ExternalReference::debug_step_in_fp_address(
    Isolate* isolate) {
  return ExternalReference(isolate->debug()->step_in_fp_addr());
1573 1574
}

1575

1576 1577 1578 1579 1580 1581
ExternalReference ExternalReference::fixed_typed_array_base_data_offset() {
  return ExternalReference(reinterpret_cast<void*>(
      FixedTypedArrayBase::kDataOffset - kHeapObjectTag));
}


1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604
bool operator==(ExternalReference lhs, ExternalReference rhs) {
  return lhs.address() == rhs.address();
}


bool operator!=(ExternalReference lhs, ExternalReference rhs) {
  return !(lhs == rhs);
}


size_t hash_value(ExternalReference reference) {
  return base::hash<Address>()(reference.address());
}


std::ostream& operator<<(std::ostream& os, ExternalReference reference) {
  os << static_cast<const void*>(reference.address());
  const Runtime::Function* fn = Runtime::FunctionForEntry(reference.address());
  if (fn) os << "<" << fn->name << ".entry>";
  return os;
}


1605
void PositionsRecorder::RecordPosition(int pos) {
1606 1607
  DCHECK(pos != RelocInfo::kNoPosition);
  DCHECK(pos >= 0);
1608
  state_.current_position = pos;
1609 1610 1611 1612
  LOG_CODE_EVENT(assembler_->isolate(),
                 CodeLinePosInfoAddPositionEvent(jit_handler_data_,
                                                 assembler_->pc_offset(),
                                                 pos));
1613 1614 1615 1616
}


void PositionsRecorder::RecordStatementPosition(int pos) {
1617 1618
  DCHECK(pos != RelocInfo::kNoPosition);
  DCHECK(pos >= 0);
1619
  state_.current_statement_position = pos;
1620 1621 1622 1623 1624
  LOG_CODE_EVENT(assembler_->isolate(),
                 CodeLinePosInfoAddStatementPositionEvent(
                     jit_handler_data_,
                     assembler_->pc_offset(),
                     pos));
1625 1626 1627 1628 1629 1630 1631 1632
}


bool PositionsRecorder::WriteRecordedPositions() {
  bool written = false;

  // Write the statement position if it is different from what was written last
  // time.
1633
  if (state_.current_statement_position != state_.written_statement_position) {
1634 1635
    EnsureSpace ensure_space(assembler_);
    assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1636
                                state_.current_statement_position);
1637 1638
    state_.written_position = state_.current_statement_position;
    state_.written_statement_position = state_.current_statement_position;
1639 1640 1641 1642
    written = true;
  }

  // Write the position if it is different from what was written last time and
1643
  // also different from the statement position that was just written.
1644
  if (state_.current_position != state_.written_position) {
1645
    EnsureSpace ensure_space(assembler_);
1646
    assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1647
    state_.written_position = state_.current_position;
1648 1649 1650 1651 1652 1653 1654
    written = true;
  }

  // Return whether something was written.
  return written;
}

1655

1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834
ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits,
                                         int double_reach_bits) {
  info_[ConstantPoolEntry::INTPTR].entries.reserve(64);
  info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits;
  info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits;
}


ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess(
    ConstantPoolEntry::Type type) const {
  const PerTypeEntryInfo& info = info_[type];

  if (info.overflow()) return ConstantPoolEntry::OVERFLOWED;

  int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count;
  int dbl_offset = dbl_count * kDoubleSize;
  int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count;
  int ptr_offset = ptr_count * kPointerSize + dbl_offset;

  if (type == ConstantPoolEntry::DOUBLE) {
    // Double overflow detection must take into account the reach for both types
    int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits;
    if (!is_uintn(dbl_offset, info.regular_reach_bits) ||
        (ptr_count > 0 &&
         !is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) {
      return ConstantPoolEntry::OVERFLOWED;
    }
  } else {
    DCHECK(type == ConstantPoolEntry::INTPTR);
    if (!is_uintn(ptr_offset, info.regular_reach_bits)) {
      return ConstantPoolEntry::OVERFLOWED;
    }
  }

  return ConstantPoolEntry::REGULAR;
}


ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry(
    ConstantPoolEntry& entry, ConstantPoolEntry::Type type) {
  DCHECK(!emitted_label_.is_bound());
  PerTypeEntryInfo& info = info_[type];
  const int entry_size = ConstantPoolEntry::size(type);
  bool merged = false;

  if (entry.sharing_ok()) {
    // Try to merge entries
    std::vector<ConstantPoolEntry>::iterator it = info.shared_entries.begin();
    int end = static_cast<int>(info.shared_entries.size());
    for (int i = 0; i < end; i++, it++) {
      if ((entry_size == kPointerSize) ? entry.value() == it->value()
                                       : entry.value64() == it->value64()) {
        // Merge with found entry.
        entry.set_merged_index(i);
        merged = true;
        break;
      }
    }
  }

  // By definition, merged entries have regular access.
  DCHECK(!merged || entry.merged_index() < info.regular_count);
  ConstantPoolEntry::Access access =
      (merged ? ConstantPoolEntry::REGULAR : NextAccess(type));

  // Enforce an upper bound on search time by limiting the search to
  // unique sharable entries which fit in the regular section.
  if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) {
    info.shared_entries.push_back(entry);
  } else {
    info.entries.push_back(entry);
  }

  // We're done if we found a match or have already triggered the
  // overflow state.
  if (merged || info.overflow()) return access;

  if (access == ConstantPoolEntry::REGULAR) {
    info.regular_count++;
  } else {
    info.overflow_start = static_cast<int>(info.entries.size()) - 1;
  }

  return access;
}


void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm,
                                            ConstantPoolEntry::Type type) {
  PerTypeEntryInfo& info = info_[type];
  std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
  const int entry_size = ConstantPoolEntry::size(type);
  int base = emitted_label_.pos();
  DCHECK(base > 0);
  int shared_end = static_cast<int>(shared_entries.size());
  std::vector<ConstantPoolEntry>::iterator shared_it = shared_entries.begin();
  for (int i = 0; i < shared_end; i++, shared_it++) {
    int offset = assm->pc_offset() - base;
    shared_it->set_offset(offset);  // Save offset for merged entries.
    if (entry_size == kPointerSize) {
      assm->dp(shared_it->value());
    } else {
      assm->dq(shared_it->value64());
    }
    DCHECK(is_uintn(offset, info.regular_reach_bits));

    // Patch load sequence with correct offset.
    assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset,
                                             ConstantPoolEntry::REGULAR, type);
  }
}


void ConstantPoolBuilder::EmitGroup(Assembler* assm,
                                    ConstantPoolEntry::Access access,
                                    ConstantPoolEntry::Type type) {
  PerTypeEntryInfo& info = info_[type];
  const bool overflow = info.overflow();
  std::vector<ConstantPoolEntry>& entries = info.entries;
  std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
  const int entry_size = ConstantPoolEntry::size(type);
  int base = emitted_label_.pos();
  DCHECK(base > 0);
  int begin;
  int end;

  if (access == ConstantPoolEntry::REGULAR) {
    // Emit any shared entries first
    EmitSharedEntries(assm, type);
  }

  if (access == ConstantPoolEntry::REGULAR) {
    begin = 0;
    end = overflow ? info.overflow_start : static_cast<int>(entries.size());
  } else {
    DCHECK(access == ConstantPoolEntry::OVERFLOWED);
    if (!overflow) return;
    begin = info.overflow_start;
    end = static_cast<int>(entries.size());
  }

  std::vector<ConstantPoolEntry>::iterator it = entries.begin();
  if (begin > 0) std::advance(it, begin);
  for (int i = begin; i < end; i++, it++) {
    // Update constant pool if necessary and get the entry's offset.
    int offset;
    ConstantPoolEntry::Access entry_access;
    if (!it->is_merged()) {
      // Emit new entry
      offset = assm->pc_offset() - base;
      entry_access = access;
      if (entry_size == kPointerSize) {
        assm->dp(it->value());
      } else {
        assm->dq(it->value64());
      }
    } else {
      // Retrieve offset from shared entry.
      offset = shared_entries[it->merged_index()].offset();
      entry_access = ConstantPoolEntry::REGULAR;
    }

    DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED ||
           is_uintn(offset, info.regular_reach_bits));

    // Patch load sequence with correct offset.
    assm->PatchConstantPoolAccessInstruction(it->position(), offset,
                                             entry_access, type);
  }
}


// Emit and return position of pool.  Zero implies no constant pool.
int ConstantPoolBuilder::Emit(Assembler* assm) {
  bool emitted = emitted_label_.is_bound();
  bool empty = IsEmpty();

  if (!emitted) {
    // Mark start of constant pool.  Align if necessary.
1835
    if (!empty) assm->DataAlign(kDoubleSize);
1836 1837 1838 1839 1840 1841 1842
    assm->bind(&emitted_label_);
    if (!empty) {
      // Emit in groups based on access and type.
      // Emit doubles first for alignment purposes.
      EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE);
      EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR);
      if (info_[ConstantPoolEntry::DOUBLE].overflow()) {
1843
        assm->DataAlign(kDoubleSize);
1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857
        EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
                  ConstantPoolEntry::DOUBLE);
      }
      if (info_[ConstantPoolEntry::INTPTR].overflow()) {
        EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
                  ConstantPoolEntry::INTPTR);
      }
    }
  }

  return !empty ? emitted_label_.pos() : 0;
}


1858 1859 1860
// Platform specific but identical code for all the platforms.


1861 1862
void Assembler::RecordDeoptReason(const int reason,
                                  const SourcePosition position) {
1863 1864
  if (FLAG_trace_deopt || isolate()->cpu_profiler()->is_profiling()) {
    EnsureSpace ensure_space(this);
1865
    int raw_position = position.IsUnknown() ? 0 : position.raw();
1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879
    RecordRelocInfo(RelocInfo::POSITION, raw_position);
    RecordRelocInfo(RelocInfo::DEOPT_REASON, reason);
  }
}


void Assembler::RecordComment(const char* msg) {
  if (FLAG_code_comments) {
    EnsureSpace ensure_space(this);
    RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
  }
}


1880 1881 1882 1883 1884 1885
void Assembler::RecordGeneratorContinuation() {
  EnsureSpace ensure_space(this);
  RecordRelocInfo(RelocInfo::GENERATOR_CONTINUATION);
}


1886
void Assembler::RecordDebugBreakSlot(RelocInfo::Mode mode, int call_argc) {
1887
  EnsureSpace ensure_space(this);
1888 1889 1890
  DCHECK(RelocInfo::IsDebugBreakSlot(mode));
  intptr_t data = static_cast<intptr_t>(call_argc);
  RecordRelocInfo(mode, data);
1891
}
1892 1893 1894 1895 1896 1897 1898 1899


void Assembler::DataAlign(int m) {
  DCHECK(m >= 2 && base::bits::IsPowerOfTwo32(m));
  while ((pc_offset() & (m - 1)) != 0) {
    db(0);
  }
}
1900 1901
}  // namespace internal
}  // namespace v8