Commit 47e86b88 authored by dcarney@chromium.org's avatar dcarney@chromium.org

Get rid of most uses of 'Temporary macro' HEAP

R=svenpanne@chromium.org
BUG=

Review URL: https://codereview.chromium.org/23708030

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16622 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent ce687e4b
......@@ -400,7 +400,7 @@ MaybeObject* Accessors::ScriptGetEvalFromScript(Isolate* isolate,
return *GetScriptWrapper(eval_from_script);
}
}
return HEAP->undefined_value();
return isolate->heap()->undefined_value();
}
......
......@@ -599,7 +599,7 @@ bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
// If the function is in new space we assume it's more likely to
// change and thus prefer the general IC code.
if (!HEAP->InNewSpace(*candidate)) {
if (!lookup->isolate()->heap()->InNewSpace(*candidate)) {
target_ = candidate;
return true;
}
......
......@@ -45,7 +45,8 @@ class SourceCodeCache BASE_EMBEDDED {
explicit SourceCodeCache(Script::Type type): type_(type), cache_(NULL) { }
void Initialize(bool create_heap_objects) {
cache_ = create_heap_objects ? HEAP->empty_fixed_array() : NULL;
cache_ = create_heap_objects ?
Isolate::Current()->heap()->empty_fixed_array() : NULL;
}
void Iterate(ObjectVisitor* v) {
......
......@@ -303,11 +303,11 @@ static FixedArrayBase* LeftTrimFixedArray(Heap* heap,
} else {
entry_size = kDoubleSize;
}
ASSERT(elms->map() != HEAP->fixed_cow_array_map());
ASSERT(elms->map() != heap->fixed_cow_array_map());
// For now this trick is only applied to fixed arrays in new and paged space.
// In large object space the object's start must coincide with chunk
// and thus the trick is just not applicable.
ASSERT(!HEAP->lo_space()->Contains(elms));
ASSERT(!heap->lo_space()->Contains(elms));
STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
......
......@@ -725,7 +725,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
}
script->set_is_shared_cross_origin(is_shared_cross_origin);
script->set_data(script_data.is_null() ? HEAP->undefined_value()
script->set_data(script_data.is_null() ? isolate->heap()->undefined_value()
: *script_data);
// Compile the function and add it to the cache.
......@@ -742,8 +742,8 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
compilation_cache->PutScript(source, context, result);
}
} else {
if (result->ic_age() != HEAP->global_ic_age()) {
result->ResetForNewContext(HEAP->global_ic_age());
if (result->ic_age() != isolate->heap()->global_ic_age()) {
result->ResetForNewContext(isolate->heap()->global_ic_age());
}
}
......@@ -805,8 +805,8 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
}
}
} else {
if (result->ic_age() != HEAP->global_ic_age()) {
result->ResetForNewContext(HEAP->global_ic_age());
if (result->ic_age() != isolate->heap()->global_ic_age()) {
result->ResetForNewContext(isolate->heap()->global_ic_age());
}
}
......
......@@ -772,7 +772,8 @@ void Shell::InstallUtilityScript(Isolate* isolate) {
= i::Handle<i::JSObject>(debug->debug_context()->global_object());
utility_context->Global()->Set(String::New("$debug"),
Utils::ToLocal(js_debug));
debug->debug_context()->set_security_token(HEAP->undefined_value());
debug->debug_context()->set_security_token(
reinterpret_cast<i::Isolate*>(isolate)->heap()->undefined_value());
#endif // ENABLE_DEBUGGER_SUPPORT
// Run the d8 shell utility script in the utility context
......
......@@ -154,7 +154,8 @@ static void CopyObjectToObjectElements(FixedArrayBase* from_base,
ElementsKind to_kind,
uint32_t to_start,
int raw_copy_size) {
ASSERT(to_base->map() != HEAP->fixed_cow_array_map());
ASSERT(to_base->map() !=
from_base->GetIsolate()->heap()->fixed_cow_array_map());
DisallowHeapAllocation no_allocation;
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
......
......@@ -103,7 +103,8 @@ void ExternalizeStringExtension::Externalize(
reinterpret_cast<char*>(data), string->length());
result = string->MakeExternal(resource);
if (result && !string->IsInternalizedString()) {
HEAP->external_string_table()->AddString(*string);
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(args.GetIsolate());
isolate->heap()->external_string_table()->AddString(*string);
}
if (!result) delete resource;
} else {
......@@ -113,7 +114,8 @@ void ExternalizeStringExtension::Externalize(
data, string->length());
result = string->MakeExternal(resource);
if (result && !string->IsInternalizedString()) {
HEAP->external_string_table()->AddString(*string);
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(args.GetIsolate());
isolate->heap()->external_string_table()->AddString(*string);
}
if (!result) delete resource;
}
......
......@@ -39,10 +39,11 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction(
void GCExtension::GC(const v8::FunctionCallbackInfo<v8::Value>& args) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(args.GetIsolate());
if (args[0]->BooleanValue()) {
HEAP->CollectGarbage(NEW_SPACE, "gc extension");
isolate->heap()->CollectGarbage(NEW_SPACE, "gc extension");
} else {
HEAP->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
}
}
......
......@@ -489,7 +489,7 @@ Address StackFrame::UnpaddedFP() const {
Code* EntryFrame::unchecked_code() const {
return HEAP->js_entry_code();
return isolate()->heap()->js_entry_code();
}
......@@ -512,7 +512,7 @@ StackFrame::Type EntryFrame::GetCallerState(State* state) const {
Code* EntryConstructFrame::unchecked_code() const {
return HEAP->js_construct_entry_code();
return isolate()->heap()->js_construct_entry_code();
}
......
......@@ -69,7 +69,7 @@ void PromotionQueue::insert(HeapObject* target, int size) {
*(--rear_) = size;
// Assert no overflow into live objects.
#ifdef DEBUG
SemiSpace::AssertValidRange(HEAP->new_space()->top(),
SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
reinterpret_cast<Address>(rear_));
#endif
}
......@@ -508,7 +508,7 @@ void Heap::ScavengePointer(HeapObject** p) {
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
ASSERT(HEAP->InFromSpace(object));
ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
// We use the first word (where the map pointer usually is) of a heap
// object to record the forwarding pointer. A forwarding pointer can
......@@ -520,7 +520,7 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
// copied.
if (first_word.IsForwardingAddress()) {
HeapObject* dest = first_word.ToForwardingAddress();
ASSERT(HEAP->InFromSpace(*p));
ASSERT(object->GetIsolate()->heap()->InFromSpace(*p));
*p = dest;
return;
}
......@@ -613,10 +613,10 @@ Isolate* Heap::isolate() {
#ifdef DEBUG
#define GC_GREEDY_CHECK() \
if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
#define GC_GREEDY_CHECK(ISOLATE) \
if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck()
#else
#define GC_GREEDY_CHECK() { }
#define GC_GREEDY_CHECK(ISOLATE) { }
#endif
// Calls the FUNCTION_CALL function and retries it up to three times
......@@ -628,7 +628,7 @@ Isolate* Heap::isolate() {
#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\
do { \
GC_GREEDY_CHECK(); \
GC_GREEDY_CHECK(ISOLATE); \
MaybeObject* __maybe_object__ = FUNCTION_CALL; \
Object* __object__ = NULL; \
if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
......@@ -636,7 +636,7 @@ Isolate* Heap::isolate() {
OOM; \
} \
if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
(ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
allocation_space(), \
"allocation failure"); \
__maybe_object__ = FUNCTION_CALL; \
......@@ -645,8 +645,8 @@ Isolate* Heap::isolate() {
OOM; \
} \
if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
(ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
(ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
{ \
AlwaysAllocateScope __scope__; \
__maybe_object__ = FUNCTION_CALL; \
......@@ -719,12 +719,12 @@ void ExternalStringTable::Verify() {
for (int i = 0; i < new_space_strings_.length(); ++i) {
Object* obj = Object::cast(new_space_strings_[i]);
ASSERT(heap_->InNewSpace(obj));
ASSERT(obj != HEAP->the_hole_value());
ASSERT(obj != heap_->the_hole_value());
}
for (int i = 0; i < old_space_strings_.length(); ++i) {
Object* obj = Object::cast(old_space_strings_[i]);
ASSERT(!heap_->InNewSpace(obj));
ASSERT(obj != HEAP->the_hole_value());
ASSERT(obj != heap_->the_hole_value());
}
#endif
}
......@@ -831,25 +831,29 @@ AlwaysAllocateScope::AlwaysAllocateScope() {
// non-handle code to call handle code. The code still works but
// performance will degrade, so we want to catch this situation
// in debug mode.
ASSERT(HEAP->always_allocate_scope_depth_ == 0);
HEAP->always_allocate_scope_depth_++;
Isolate* isolate = Isolate::Current();
ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
isolate->heap()->always_allocate_scope_depth_++;
}
AlwaysAllocateScope::~AlwaysAllocateScope() {
HEAP->always_allocate_scope_depth_--;
ASSERT(HEAP->always_allocate_scope_depth_ == 0);
Isolate* isolate = Isolate::Current();
isolate->heap()->always_allocate_scope_depth_--;
ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
}
#ifdef VERIFY_HEAP
NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() {
HEAP->no_weak_embedded_maps_verification_scope_depth_++;
Isolate* isolate = Isolate::Current();
isolate->heap()->no_weak_embedded_maps_verification_scope_depth_++;
}
NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() {
HEAP->no_weak_embedded_maps_verification_scope_depth_--;
Isolate* isolate = Isolate::Current();
isolate->heap()->no_weak_embedded_maps_verification_scope_depth_--;
}
#endif
......@@ -858,7 +862,7 @@ void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
HeapObject* object = HeapObject::cast(*current);
CHECK(HEAP->Contains(object));
CHECK(object->GetIsolate()->heap()->Contains(object));
CHECK(object->map()->IsMap());
}
}
......@@ -866,21 +870,23 @@ void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
double GCTracer::SizeOfHeapObjects() {
return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
return (static_cast<double>(heap_->SizeOfObjects())) / MB;
}
DisallowAllocationFailure::DisallowAllocationFailure() {
#ifdef DEBUG
old_state_ = HEAP->disallow_allocation_failure_;
HEAP->disallow_allocation_failure_ = true;
Isolate* isolate = Isolate::Current();
old_state_ = isolate->heap()->disallow_allocation_failure_;
isolate->heap()->disallow_allocation_failure_ = true;
#endif
}
DisallowAllocationFailure::~DisallowAllocationFailure() {
#ifdef DEBUG
HEAP->disallow_allocation_failure_ = old_state_;
Isolate* isolate = Isolate::Current();
isolate->heap()->disallow_allocation_failure_ = old_state_;
#endif
}
......
......@@ -472,7 +472,7 @@ void HeapObjectsMap::StopHeapObjectsTracking() {
void HeapObjectsMap::UpdateHeapObjectsMap() {
HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"HeapSnapshotsCollection::UpdateHeapObjectsMap");
HeapIterator iterator(heap_);
for (HeapObject* obj = iterator.next();
......@@ -560,12 +560,13 @@ void HeapObjectsMap::RemoveDeadEntries() {
}
SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
SnapshotObjectId HeapObjectsMap::GenerateId(Heap* heap,
v8::RetainedObjectInfo* info) {
SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
const char* label = info->GetLabel();
id ^= StringHasher::HashSequentialString(label,
static_cast<int>(strlen(label)),
HEAP->HashSeed());
heap->HashSeed());
intptr_t element_count = info->GetElementCount();
if (element_count != -1)
id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
......@@ -623,7 +624,7 @@ void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
SnapshotObjectId id) {
// First perform a full GC in order to avoid dead objects.
HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"HeapSnapshotsCollection::FindHeapObjectById");
DisallowHeapAllocation no_allocation;
HeapObject* object = NULL;
......@@ -1932,7 +1933,7 @@ HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
return snapshot_->AddEntry(
entries_type_,
name,
HeapObjectsMap::GenerateId(info),
HeapObjectsMap::GenerateId(collection_->heap(), info),
size != -1 ? static_cast<int>(size) : 0);
}
......@@ -2108,7 +2109,7 @@ NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
uint32_t hash = StringHasher::HashSequentialString(
label_copy,
static_cast<int>(strlen(label_copy)),
HEAP->HashSeed());
isolate_->heap()->HashSeed());
HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
hash, true);
if (entry->value == NULL) {
......
......@@ -237,7 +237,7 @@ class HeapObjectsMap {
SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
size_t GetUsedMemorySize() const;
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
static SnapshotObjectId GenerateId(Heap* heap, v8::RetainedObjectInfo* info);
static inline SnapshotObjectId GetNthGcSubrootId(int delta);
static const int kObjectIdStep = 2;
......
......@@ -731,7 +731,7 @@ void Heap::MoveElements(FixedArray* array,
int len) {
if (len == 0) return;
ASSERT(array->map() != HEAP->fixed_cow_array_map());
ASSERT(array->map() != fixed_cow_array_map());
Object** dst_objects = array->data_start() + dst_index;
OS::MemMove(dst_objects,
array->data_start() + src_index,
......@@ -765,9 +765,9 @@ class StringTableVerifier : public ObjectVisitor {
};
static void VerifyStringTable() {
static void VerifyStringTable(Heap* heap) {
StringTableVerifier verifier;
HEAP->string_table()->IterateElements(&verifier);
heap->string_table()->IterateElements(&verifier);
}
#endif // VERIFY_HEAP
......@@ -922,7 +922,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyStringTable();
VerifyStringTable(this);
}
#endif
......@@ -1046,7 +1046,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyStringTable();
VerifyStringTable(this);
}
#endif
......@@ -1154,29 +1154,33 @@ class ScavengeVisitor: public ObjectVisitor {
// new space.
class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
public:
explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {}
void VisitPointers(Object** start, Object**end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
CHECK(!heap_->InNewSpace(HeapObject::cast(*current)));
}
}
}
private:
Heap* heap_;
};
static void VerifyNonPointerSpacePointers() {
static void VerifyNonPointerSpacePointers(Heap* heap) {
// Verify that there are no pointers to new space in spaces where we
// do not expect them.
VerifyNonPointerSpacePointersVisitor v;
HeapObjectIterator code_it(HEAP->code_space());
VerifyNonPointerSpacePointersVisitor v(heap);
HeapObjectIterator code_it(heap->code_space());
for (HeapObject* object = code_it.Next();
object != NULL; object = code_it.Next())
object->Iterate(&v);
// The old data space was normally swept conservatively so that the iterator
// doesn't work, so we normally skip the next bit.
if (!HEAP->old_data_space()->was_swept_conservatively()) {
HeapObjectIterator data_it(HEAP->old_data_space());
if (!heap->old_data_space()->was_swept_conservatively()) {
HeapObjectIterator data_it(heap->old_data_space());
for (HeapObject* object = data_it.Next();
object != NULL; object = data_it.Next())
object->Iterate(&v);
......@@ -1323,7 +1327,7 @@ void Heap::Scavenge() {
RelocationLock relocation_lock(this);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
#endif
gc_state_ = SCAVENGE;
......@@ -2377,7 +2381,7 @@ void Heap::SelectScavengingVisitorsTable() {
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
SLOW_ASSERT(HEAP->InFromSpace(object));
SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
MapWord first_word = object->map_word();
SLOW_ASSERT(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
......@@ -7841,7 +7845,7 @@ int KeyedLookupCache::Lookup(Map* map, Name* name) {
void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) {
if (!name->IsUniqueName()) {
String* internalized_string;
if (!HEAP->InternalizeStringIfExists(
if (!map->GetIsolate()->heap()->InternalizeStringIfExists(
String::cast(name), &internalized_string)) {
return;
}
......@@ -7849,7 +7853,7 @@ void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) {
}
// This cache is cleared only between mark compact passes, so we expect the
// cache to only contain old space names.
ASSERT(!HEAP->InNewSpace(name));
ASSERT(!map->GetIsolate()->heap()->InNewSpace(name));
int index = (Hash(map, name) & kHashMask);
// After a GC there will be free slots, so we use them in order (this may
......
......@@ -1532,7 +1532,7 @@ inline bool Context::has_out_of_memory() {
// Mark the native context with out of memory.
inline void Context::mark_out_of_memory() {
native_context()->set_out_of_memory(HEAP->true_value());
native_context()->set_out_of_memory(GetIsolate()->heap()->true_value());
}
......
......@@ -1617,7 +1617,7 @@ class RegExpEngine: public AllStatic {
struct CompilationResult {
explicit CompilationResult(const char* error_message)
: error_message(error_message),
code(HEAP->the_hole_value()),
code(Isolate::Current()->heap()->the_hole_value()),
num_registers(0) {}
CompilationResult(Object* code, int registers)
: error_message(NULL),
......
......@@ -1233,7 +1233,9 @@ static bool IsInlined(JSFunction* function, SharedFunctionInfo* candidate) {
DeoptimizationInputData* data =
DeoptimizationInputData::cast(function->code()->deoptimization_data());
if (data == HEAP->empty_fixed_array()) return false;
if (data == function->GetIsolate()->heap()->empty_fixed_array()) {
return false;
}
FixedArray* literals = data->LiteralArray();
......@@ -1549,7 +1551,7 @@ MaybeObject* LiveEdit::PatchFunctionPositions(
info->set_end_position(new_function_end);
info->set_function_token_position(new_function_token_pos);
HEAP->EnsureHeapIsIterable();
info->GetIsolate()->heap()->EnsureHeapIsIterable();
if (IsJSFunctionCode(info->code())) {
// Patch relocation info section of the code.
......@@ -1565,7 +1567,7 @@ MaybeObject* LiveEdit::PatchFunctionPositions(
}
}
return HEAP->undefined_value();
return info->GetIsolate()->heap()->undefined_value();
}
......@@ -1611,7 +1613,7 @@ Object* LiveEdit::ChangeScriptSource(Handle<Script> original_script,
original_script->set_source(*new_source);
// Drop line ends so that they will be recalculated.
original_script->set_line_ends(HEAP->undefined_value());
original_script->set_line_ends(isolate->heap()->undefined_value());
return *old_script_object;
}
......
......@@ -58,7 +58,7 @@ void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
ASSERT(IsMarked(obj));
ASSERT(HEAP->Contains(obj));
ASSERT(obj->GetIsolate()->heap()->Contains(obj));
marking_deque_.PushBlack(obj);
}
}
......
......@@ -396,7 +396,7 @@ void Assembler::GrowBuffer() {
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if ((desc.buffer_size > kMaximalBufferSize) ||
(desc.buffer_size > HEAP->MaxOldGenerationSize())) {
(desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
......@@ -1533,7 +1533,7 @@ void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
} else {
EnsureSpace ensure_space(this);
ASSERT(value->IsHeapObject());
ASSERT(!HEAP->InNewSpace(*value));
ASSERT(!isolate()->heap()->InNewSpace(*value));
emit_rex_64(dst);
emit(0xB8 | dst.low_bits());
emitp(value.location(), mode);
......
......@@ -285,16 +285,17 @@ void MacroAssembler::InNewSpace(Register object,
cmpq(scratch, kScratchRegister);
j(cc, branch, distance);
} else {
ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask())));
intptr_t new_space_start =
reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart());
movq(kScratchRegister, -new_space_start, RelocInfo::NONE64);
if (scratch.is(object)) {
addq(scratch, kScratchRegister);
} else {
lea(scratch, Operand(object, kScratchRegister, times_1, 0));
}
and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
and_(scratch,
Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
j(cc, branch, distance);
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment