heap-inl.h 25.7 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_HEAP_INL_H_
#define V8_HEAP_HEAP_INL_H_
7

8 9
#include <cmath>

10
#include "src/base/platform/platform.h"
11
#include "src/counters.h"
12
#include "src/heap/heap.h"
13 14
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/spaces-inl.h"
15 16
#include "src/heap/store-buffer.h"
#include "src/heap/store-buffer-inl.h"
17 18 19
#include "src/heap-profiler.h"
#include "src/isolate.h"
#include "src/list-inl.h"
20
#include "src/log.h"
21
#include "src/msan.h"
22
#include "src/objects.h"
23

24 25
namespace v8 {
namespace internal {
26

27
void PromotionQueue::insert(HeapObject* target, int size) {
28 29 30 31 32
  if (emergency_stack_ != NULL) {
    emergency_stack_->Add(Entry(target, size));
    return;
  }

33 34 35 36
  if ((rear_ - 2) < limit_) {
    RelocateQueueHead();
    emergency_stack_->Add(Entry(target, size));
    return;
37
  }
38

39 40
  *(--rear_) = reinterpret_cast<intptr_t>(target);
  *(--rear_) = size;
41
// Assert no overflow into live objects.
42
#ifdef DEBUG
43
  SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
44 45
                              reinterpret_cast<Address>(rear_));
#endif
46 47 48
}


49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
#define ROOT_ACCESSOR(type, name, camel_name) \
  type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR

#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
  Map* Heap::name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
#undef STRUCT_MAP_ACCESSOR

#define STRING_ACCESSOR(name, str) \
  String* Heap::name() { return String::cast(roots_[k##name##RootIndex]); }
INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
#undef STRING_ACCESSOR

#define SYMBOL_ACCESSOR(name) \
  Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR

#define SYMBOL_ACCESSOR(name, varname, description) \
  Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR

#define ROOT_ACCESSOR(type, name, camel_name)                                 \
  void Heap::set_##name(type* value) {                                        \
    /* The deserializer makes use of the fact that these common roots are */  \
    /* never in new space and never on a page that is being compacted.    */  \
    DCHECK(!deserialization_complete() ||                                     \
           RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex));    \
    DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
    roots_[k##camel_name##RootIndex] = value;                                 \
  }
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR


87
template <>
88 89 90 91 92 93
bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
  return chars == str.length();
}


94
template <>
95 96 97 98 99
bool inline Heap::IsOneByte(String* str, int chars) {
  return str->IsOneByteRepresentation();
}


100
AllocationResult Heap::AllocateInternalizedStringFromUtf8(
101
    Vector<const char> str, int chars, uint32_t hash_field) {
102
  if (IsOneByte(str, chars)) {
103 104
    return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
                                             hash_field);
105
  }
106
  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
107 108 109
}


110 111 112
template <typename T>
AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
                                                      uint32_t hash_field) {
113
  if (IsOneByte(t, chars)) {
114
    return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
115
  }
116
  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
117 118 119
}


120
AllocationResult Heap::AllocateOneByteInternalizedString(
121
    Vector<const uint8_t> str, uint32_t hash_field) {
122
  CHECK_GE(String::kMaxLength, str.length());
123
  // Compute map and object size.
124
  Map* map = one_byte_internalized_string_map();
125
  int size = SeqOneByteString::SizeFor(str.length());
126
  AllocationSpace space = SelectSpace(size, TENURED);
127 128

  // Allocate string.
129
  HeapObject* result;
130
  {
131
    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
132
    if (!allocation.To(&result)) return allocation;
133 134
  }

135
  // String maps are all immortal immovable objects.
136
  result->set_map_no_write_barrier(map);
137 138 139 140 141
  // Set length and hash fields of the allocated string.
  String* answer = String::cast(result);
  answer->set_length(str.length());
  answer->set_hash_field(hash_field);

142
  DCHECK_EQ(size, answer->Size());
143 144

  // Fill in the characters.
145 146
  MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
          str.length());
147 148 149 150 151

  return answer;
}


152 153
AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
                                                         uint32_t hash_field) {
154
  CHECK_GE(String::kMaxLength, str.length());
155
  // Compute map and object size.
156
  Map* map = internalized_string_map();
157
  int size = SeqTwoByteString::SizeFor(str.length());
158
  AllocationSpace space = SelectSpace(size, TENURED);
159 160

  // Allocate string.
161
  HeapObject* result;
162
  {
163
    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
164
    if (!allocation.To(&result)) return allocation;
165 166
  }

167
  result->set_map(map);
168 169 170 171 172
  // Set length and hash fields of the allocated string.
  String* answer = String::cast(result);
  answer->set_length(str.length());
  answer->set_hash_field(hash_field);

173
  DCHECK_EQ(size, answer->Size());
174 175

  // Fill in the characters.
176 177
  MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
          str.length() * kUC16Size);
178 179 180 181

  return answer;
}

182
AllocationResult Heap::CopyFixedArray(FixedArray* src) {
183
  if (src->length() == 0) return src;
184 185 186 187
  return CopyFixedArrayWithMap(src, src->map());
}


188
AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
189
  if (src->length() == 0) return src;
190 191 192 193
  return CopyFixedDoubleArrayWithMap(src, src->map());
}


194
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
195
                                   AllocationSpace retry_space,
196
                                   AllocationAlignment alignment) {
197 198 199
  DCHECK(AllowHandleAllocation::IsAllowed());
  DCHECK(AllowHeapAllocation::IsAllowed());
  DCHECK(gc_state_ == NOT_IN_GC);
200
#ifdef DEBUG
201
  if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
202
      Heap::allocation_timeout_-- <= 0) {
203
    return AllocationResult::Retry(space);
204
  }
205 206
  isolate_->counters()->objs_since_last_full()->Increment();
  isolate_->counters()->objs_since_last_young()->Increment();
207
#endif
208 209

  HeapObject* object;
210
  AllocationResult allocation;
211
  if (NEW_SPACE == space) {
212
    allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
213
    if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
214 215
      space = retry_space;
    } else {
216 217
      if (allocation.To(&object)) {
        OnAllocationEvent(object, size_in_bytes);
218
      }
219
      return allocation;
220
    }
221 222
  }

223
  if (OLD_SPACE == space) {
224
    allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
225
  } else if (CODE_SPACE == space) {
226
    if (size_in_bytes <= code_space()->AreaSize()) {
227
      allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
228 229 230 231
    } else {
      // Large code objects are allocated in large object space.
      allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
    }
232
  } else if (LO_SPACE == space) {
233
    allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
234
  } else {
235
    DCHECK(MAP_SPACE == space);
236
    allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
237
  }
238 239 240 241
  if (allocation.To(&object)) {
    OnAllocationEvent(object, size_in_bytes);
  } else {
    old_gen_exhausted_ = true;
242
  }
243
  return allocation;
244 245 246
}


247 248 249 250 251 252
void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
  HeapProfiler* profiler = isolate_->heap_profiler();
  if (profiler->is_tracking_allocations()) {
    profiler->AllocationEvent(object->address(), size_in_bytes);
  }

253
  ++allocations_count_;
254

255
  if (FLAG_verify_predictable) {
256 257 258 259 260 261 262 263 264
    UpdateAllocationsHash(object);
    UpdateAllocationsHash(size_in_bytes);

    if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
        (--dump_allocations_hash_countdown_ == 0)) {
      dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
      PrintAlloctionsHash();
    }
  }
265 266 267 268 269 270

  if (FLAG_trace_allocation_stack_interval > 0) {
    if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
      isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
    }
  }
271 272 273
}


274
void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
275 276 277 278 279 280
                       int size_in_bytes) {
  HeapProfiler* heap_profiler = isolate_->heap_profiler();
  if (heap_profiler->is_tracking_object_moves()) {
    heap_profiler->ObjectMoveEvent(source->address(), target->address(),
                                   size_in_bytes);
  }
281 282 283
  if (target->IsSharedFunctionInfo()) {
    LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
                                                         target->address()));
284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331
  }

  if (FLAG_verify_predictable) {
    ++allocations_count_;

    UpdateAllocationsHash(source);
    UpdateAllocationsHash(target);
    UpdateAllocationsHash(size_in_bytes);

    if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
        (--dump_allocations_hash_countdown_ == 0)) {
      dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
      PrintAlloctionsHash();
    }
  }
}


void Heap::UpdateAllocationsHash(HeapObject* object) {
  Address object_address = object->address();
  MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
  AllocationSpace allocation_space = memory_chunk->owner()->identity();

  STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
  uint32_t value =
      static_cast<uint32_t>(object_address - memory_chunk->address()) |
      (static_cast<uint32_t>(allocation_space) << kPageSizeBits);

  UpdateAllocationsHash(value);
}


void Heap::UpdateAllocationsHash(uint32_t value) {
  uint16_t c1 = static_cast<uint16_t>(value);
  uint16_t c2 = static_cast<uint16_t>(value >> 16);
  raw_allocations_hash_ =
      StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
  raw_allocations_hash_ =
      StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
}


void Heap::PrintAlloctionsHash() {
  uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
  PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
}


332
void Heap::FinalizeExternalString(String* string) {
333
  DCHECK(string->IsExternalString());
334 335
  v8::String::ExternalStringResourceBase** resource_addr =
      reinterpret_cast<v8::String::ExternalStringResourceBase**>(
336
          reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
337
          kHeapObjectTag);
338

339 340 341
  // Dispose of the C++ object if it has not already been disposed.
  if (*resource_addr != NULL) {
    (*resource_addr)->Dispose();
342
    *resource_addr = NULL;
343
  }
344 345 346
}


347
bool Heap::InNewSpace(Object* object) {
348
  bool result = new_space_.Contains(object);
349 350 351
  DCHECK(!result ||                 // Either not in new space
         gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
         InToSpace(object));        // ... or in to-space (where we allocate).
352
  return result;
353 354 355
}


356
bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
357 358


359
bool Heap::InFromSpace(Object* object) {
360
  return new_space_.FromSpaceContains(object);
361 362 363 364
}


bool Heap::InToSpace(Object* object) {
365
  return new_space_.ToSpaceContains(object);
366 367 368
}


369
bool Heap::InOldSpace(Address address) { return old_space_->Contains(address); }
370 371


372 373
bool Heap::InOldSpace(Object* object) {
  return InOldSpace(reinterpret_cast<Address>(object));
374 375 376
}


377 378 379 380 381 382
bool Heap::OldGenerationAllocationLimitReached() {
  if (!incremental_marking()->IsStopped()) return false;
  return OldGenerationSpaceAvailable() < 0;
}


383
bool Heap::ShouldBePromoted(Address old_address, int object_size) {
384 385
  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
  Address age_mark = new_space_.age_mark();
386
  return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
387
         (!page->ContainsLimit(age_mark) || old_address < age_mark);
388 389 390 391
}


void Heap::RecordWrite(Address address, int offset) {
392
  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
393 394 395
}


396
void Heap::RecordWrites(Address address, int start, int len) {
397 398 399 400 401
  if (!InNewSpace(address)) {
    for (int i = 0; i < len; i++) {
      store_buffer_.Mark(address + start + i * kPointerSize);
    }
  }
402 403 404
}


405
bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
406 407
  // Object migration is governed by the following rules:
  //
408
  // 1) Objects in new-space can be migrated to the old space
409 410 411
  //    that matches their target space or they stay in new-space.
  // 2) Objects in old-space stay in the same space when migrating.
  // 3) Fillers (two or more words) can migrate due to left-trimming of
412
  //    fixed arrays in new-space or old space.
413 414 415 416 417
  // 4) Fillers (one word) can never migrate, they are skipped by
  //    incremental marking explicitly to prevent invalid pattern.
  //
  // Since this function is used for debugging only, we do not place
  // asserts here, but check everything explicitly.
418 419 420
  if (obj->map() == one_pointer_filler_map()) return false;
  InstanceType type = obj->map()->instance_type();
  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
421 422 423
  AllocationSpace src = chunk->owner()->identity();
  switch (src) {
    case NEW_SPACE:
424 425 426 427
      return dst == src || dst == OLD_SPACE;
    case OLD_SPACE:
      return dst == src &&
             (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
428 429 430 431 432 433 434 435 436 437 438
    case CODE_SPACE:
      return dst == src && type == CODE_TYPE;
    case MAP_SPACE:
    case LO_SPACE:
      return false;
  }
  UNREACHABLE();
  return false;
}


439
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
440
  CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
441
            static_cast<size_t>(byte_size / kPointerSize));
442 443 444
}


445
void Heap::MoveBlock(Address dst, Address src, int byte_size) {
446
  DCHECK(IsAligned(byte_size, kPointerSize));
447 448 449

  int size_in_words = byte_size / kPointerSize;

450
  if ((dst < src) || (dst >= (src + byte_size))) {
451 452 453
    Object** src_slot = reinterpret_cast<Object**>(src);
    Object** dst_slot = reinterpret_cast<Object**>(dst);
    Object** end_slot = src_slot + size_in_words;
454

455 456
    while (src_slot != end_slot) {
      *dst_slot++ = *src_slot++;
457 458
    }
  } else {
459
    MemMove(dst, src, static_cast<size_t>(byte_size));
460 461 462 463
  }
}


464
void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
465 466


467
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
468
  // Check if there is potentially a memento behind the object. If
469
  // the last word of the memento is on another page we return
470
  // immediately.
471 472 473
  Address object_address = object->address();
  Address memento_address = object_address + object->Size();
  Address last_memento_word_address = memento_address + kPointerSize;
474
  if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
475
    return NULL;
476 477 478
  }

  HeapObject* candidate = HeapObject::FromAddress(memento_address);
479 480 481 482 483 484
  Map* candidate_map = candidate->map();
  // This fast check may peek at an uninitialized word. However, the slow check
  // below (memento_address == top) ensures that this is safe. Mark the word as
  // initialized to silence MemorySanitizer warnings.
  MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
  if (candidate_map != allocation_memento_map()) return NULL;
485 486 487 488 489 490 491 492 493 494

  // Either the object is the last object in the new space, or there is another
  // object of at least word size (the header map word) following it, so
  // suffices to compare ptr and top here. Note that technically we do not have
  // to compare with the current top pointer of the from space page during GC,
  // since we always install filler objects above the top pointer of a from
  // space page when performing a garbage collection. However, always performing
  // the test makes it possible to have a single, unified version of
  // FindAllocationMemento that is used both by the GC and the mutator.
  Address top = NewSpaceTop();
495
  DCHECK(memento_address == top ||
496
         memento_address + HeapObject::kHeaderSize <= top ||
497
         !NewSpacePage::OnSamePage(memento_address, top - 1));
498
  if (memento_address == top) return NULL;
499 500

  AllocationMemento* memento = AllocationMemento::cast(candidate);
501 502 503 504 505 506 507 508
  if (!memento->IsValid()) return NULL;
  return memento;
}


void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
                                        ScratchpadSlotMode mode) {
  Heap* heap = object->GetHeap();
509
  DCHECK(heap->InFromSpace(object));
510 511

  if (!FLAG_allocation_site_pretenuring ||
512 513
      !AllocationSite::CanTrack(object->map()->instance_type()))
    return;
514 515 516

  AllocationMemento* memento = heap->FindAllocationMemento(object);
  if (memento == NULL) return;
517

518
  if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
519
    heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
520 521 522 523
  }
}


524
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
525
  DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
526 527 528 529 530 531 532 533 534 535

  // We use the first word (where the map pointer usually is) of a heap
  // object to record the forwarding pointer.  A forwarding pointer can
  // point to an old space, the code space, or the to space of the new
  // generation.
  MapWord first_word = object->map_word();

  // If the first word is a forwarding address, the object has already been
  // copied.
  if (first_word.IsForwardingAddress()) {
536
    HeapObject* dest = first_word.ToForwardingAddress();
537
    DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
538
    *p = dest;
539 540 541
    return;
  }

542
  UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
543

544
  // AllocationMementos are unrooted and shouldn't survive a scavenge
545
  DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
546 547 548 549 550
  // Call the slow part of scavenge object.
  return ScavengeObjectSlow(p, object);
}


551
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
552
                          const v8::GCCallbackFlags callbackFlags) {
553 554
  const char* collector_reason = NULL;
  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
555
  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
556 557 558
}


559
Isolate* Heap::isolate() {
560 561
  return reinterpret_cast<Isolate*>(
      reinterpret_cast<intptr_t>(this) -
562
      reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
563 564
}

565

566 567 568 569
// Calls the FUNCTION_CALL function and retries it up to three times
// to guarantee that any allocations performed during the call will
// succeed if there's enough memory.

570 571
// Warning: Do not use the identifiers __object__, __maybe_object__ or
// __scope__ in a call to this macro.
572

573 574 575 576 577 578 579 580 581 582 583
#define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
  if (__allocation__.To(&__object__)) {                   \
    DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
    RETURN_VALUE;                                         \
  }

#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)    \
  do {                                                                        \
    AllocationResult __allocation__ = FUNCTION_CALL;                          \
    Object* __object__ = NULL;                                                \
    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
584 585 586 587 588 589 590
    /* Two GCs before panicking.  In newspace will almost always succeed. */  \
    for (int __i__ = 0; __i__ < 2; __i__++) {                                 \
      (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),          \
                                        "allocation failure");                \
      __allocation__ = FUNCTION_CALL;                                         \
      RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                       \
    }                                                                         \
591 592 593 594 595 596 597 598 599 600
    (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();        \
    (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");          \
    {                                                                         \
      AlwaysAllocateScope __scope__(ISOLATE);                                 \
      __allocation__ = FUNCTION_CALL;                                         \
    }                                                                         \
    RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
    /* TODO(1181417): Fix this. */                                            \
    v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
    RETURN_EMPTY;                                                             \
601 602
  } while (false)

603 604 605
#define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
                              RETURN_EMPTY)                         \
  CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
606

607
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
608
  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL,                               \
609
                        return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
610
                        return Handle<TYPE>())
611 612


613
#define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
614
  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
615 616


617
void ExternalStringTable::AddString(String* string) {
618
  DCHECK(string->IsExternalString());
619
  if (heap_->InNewSpace(string)) {
620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643
    new_space_strings_.Add(string);
  } else {
    old_space_strings_.Add(string);
  }
}


void ExternalStringTable::Iterate(ObjectVisitor* v) {
  if (!new_space_strings_.is_empty()) {
    Object** start = &new_space_strings_[0];
    v->VisitPointers(start, start + new_space_strings_.length());
  }
  if (!old_space_strings_.is_empty()) {
    Object** start = &old_space_strings_[0];
    v->VisitPointers(start, start + old_space_strings_.length());
  }
}


// Verify() is inline to avoid ifdef-s around its calls in release
// mode.
void ExternalStringTable::Verify() {
#ifdef DEBUG
  for (int i = 0; i < new_space_strings_.length(); ++i) {
644
    Object* obj = Object::cast(new_space_strings_[i]);
645 646
    DCHECK(heap_->InNewSpace(obj));
    DCHECK(obj != heap_->the_hole_value());
647 648
  }
  for (int i = 0; i < old_space_strings_.length(); ++i) {
649
    Object* obj = Object::cast(old_space_strings_[i]);
650 651
    DCHECK(!heap_->InNewSpace(obj));
    DCHECK(obj != heap_->the_hole_value());
652 653 654 655 656 657
  }
#endif
}


void ExternalStringTable::AddOldString(String* string) {
658 659
  DCHECK(string->IsExternalString());
  DCHECK(!heap_->InNewSpace(string));
660 661 662 663 664 665
  old_space_strings_.Add(string);
}


void ExternalStringTable::ShrinkNewStrings(int position) {
  new_space_strings_.Rewind(position);
666
#ifdef VERIFY_HEAP
667 668 669
  if (FLAG_verify_heap) {
    Verify();
  }
670
#endif
671 672
}

673

674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694
int DescriptorLookupCache::Lookup(Map* source, Name* name) {
  if (!name->IsUniqueName()) return kAbsent;
  int index = Hash(source, name);
  Key& key = keys_[index];
  if ((key.source == source) && (key.name == name)) return results_[index];
  return kAbsent;
}


void DescriptorLookupCache::Update(Map* source, Name* name, int result) {
  DCHECK(result != kAbsent);
  if (name->IsUniqueName()) {
    int index = Hash(source, name);
    Key& key = keys_[index];
    key.source = source;
    key.name = name;
    results_[index] = result;
  }
}


695
void Heap::ClearInstanceofCache() {
696
  set_instanceof_cache_function(Smi::FromInt(0));
697 698 699 700 701 702 703 704 705
}


Object* Heap::ToBoolean(bool condition) {
  return condition ? true_value() : false_value();
}


void Heap::CompletelyClearInstanceofCache() {
706 707
  set_instanceof_cache_map(Smi::FromInt(0));
  set_instanceof_cache_function(Smi::FromInt(0));
708 709 710
}


711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750
uint32_t Heap::HashSeed() {
  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
  DCHECK(FLAG_randomize_hashes || seed == 0);
  return seed;
}


Smi* Heap::NextScriptId() {
  int next_id = last_script_id()->value() + 1;
  if (!Smi::IsValid(next_id) || next_id < 0) next_id = 1;
  Smi* next_id_smi = Smi::FromInt(next_id);
  set_last_script_id(next_id_smi);
  return next_id_smi;
}


void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
  DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
}


void Heap::SetConstructStubDeoptPCOffset(int pc_offset) {
  DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}


void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
  DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
  set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}


void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
  DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
  set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}


751 752 753
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
    : heap_(isolate->heap()), daf_(isolate) {
  heap_->always_allocate_scope_depth_++;
754 755 756
}


757
AlwaysAllocateScope::~AlwaysAllocateScope() {
758
  heap_->always_allocate_scope_depth_--;
759 760 761
}


762 763 764 765 766
GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
  heap_->gc_callbacks_depth_++;
}


767
GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
768 769 770 771 772 773 774


bool GCCallbacksScope::CheckReenter() {
  return heap_->gc_callbacks_depth_ == 1;
}


775 776 777 778
void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
  for (Object** current = start; current < end; current++) {
    if ((*current)->IsHeapObject()) {
      HeapObject* object = HeapObject::cast(*current);
779
      CHECK(object->GetIsolate()->heap()->Contains(object));
780
      CHECK(object->map()->IsMap());
781 782 783 784 785
    }
  }
}


786 787
void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
  for (Object** current = start; current < end; current++) {
788
    CHECK((*current)->IsSmi());
789 790
  }
}
791 792
}
}  // namespace v8::internal
793

794
#endif  // V8_HEAP_HEAP_INL_H_