Commit 8f73ae03 authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

* Move almost all roots into an array ready for use by a constant-pool

register on ARM.
* Make some compile-time loops into run-time loops for compactness.
Review URL: http://codereview.chromium.org/149324

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2398 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent bf656d2a
......@@ -85,7 +85,7 @@ Handle<Code> CodeStub::GetCode() {
Handle<NumberDictionary>(Heap::code_stubs()),
key,
code);
Heap::set_code_stubs(*dict);
Heap::public_set_code_stubs(*dict);
index = Heap::code_stubs()->FindEntry(key);
}
ASSERT(index != NumberDictionary::kNotFound);
......
......@@ -28,6 +28,7 @@
#ifndef V8_FACTORY_H_
#define V8_FACTORY_H_
#include "globals.h"
#include "heap.h"
#include "zone-inl.h"
......@@ -299,13 +300,19 @@ class Factory : public AllStatic {
Handle<JSObject> instance,
bool* pending_exception);
#define ROOT_ACCESSOR(type, name) \
static Handle<type> name() { return Handle<type>(&Heap::name##_); }
#define ROOT_ACCESSOR(type, name, camel_name) \
static inline Handle<type> name() { \
return Handle<type>(bit_cast<type**, Object**>( \
&Heap::roots_[Heap::k##camel_name##RootIndex])); \
}
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR_ACCESSOR
#define SYMBOL_ACCESSOR(name, str) \
static Handle<String> name() { return Handle<String>(&Heap::name##_); }
static inline Handle<String> name() { \
return Handle<String>(bit_cast<String**, Object**>( \
&Heap::roots_[Heap::k##name##RootIndex])); \
}
SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
......
......@@ -216,7 +216,7 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
void Heap::SetLastScriptId(Object* last_script_id) {
last_script_id_ = last_script_id;
roots_[kLastScriptIdRootIndex] = last_script_id;
}
......
This diff is collapsed.
This diff is collapsed.
......@@ -224,7 +224,9 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
if (reinterpret_cast<String*>(second) != Heap::empty_string()) return object;
if (second != Heap::raw_unchecked_empty_string()) {
return object;
}
// Since we don't have the object's start, it is impossible to update the
// remembered set. Therefore, we only replace the string with its left
......@@ -421,7 +423,7 @@ class SymbolTableCleaner : public ObjectVisitor {
}
}
// Set the entry to null_value (as deleted).
*p = Heap::null_value();
*p = Heap::raw_unchecked_null_value();
pointers_removed_++;
}
}
......@@ -475,7 +477,7 @@ void MarkCompactCollector::MarkDescriptorArray(
DescriptorArray* descriptors) {
if (descriptors->IsMarked()) return;
// Empty descriptor array is marked as a root before any maps are marked.
ASSERT(descriptors != Heap::empty_descriptor_array());
ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
SetMark(descriptors);
FixedArray* contents = reinterpret_cast<FixedArray*>(
......@@ -590,7 +592,7 @@ void MarkCompactCollector::MarkSymbolTable() {
// and if it is a sliced string or a cons string backed by an
// external string (even indirectly), then the external string does
// not receive a weak reference callback.
SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table());
SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
// Mark the symbol table itself.
SetMark(symbol_table);
// Explicitly mark the prefix.
......@@ -780,10 +782,9 @@ void MarkCompactCollector::MarkLiveObjects() {
ProcessObjectGroups(root_visitor.stack_visitor());
// Prune the symbol table removing all symbols only pointed to by the
// symbol table. Cannot use SymbolTable::cast here because the symbol
// symbol table. Cannot use symbol_table() here because the symbol
// table is marked.
SymbolTable* symbol_table =
reinterpret_cast<SymbolTable*>(Heap::symbol_table());
SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
SymbolTableCleaner v;
symbol_table->IterateElements(&v);
symbol_table->ElementsRemoved(v.PointersRemoved());
......@@ -1142,11 +1143,11 @@ static void SweepSpace(NewSpace* space) {
// since their existing map might not be live after the collection.
int size = object->Size();
if (size >= ByteArray::kHeaderSize) {
object->set_map(Heap::byte_array_map());
object->set_map(Heap::raw_unchecked_byte_array_map());
ByteArray::cast(object)->set_length(ByteArray::LengthFor(size));
} else {
ASSERT(size == kPointerSize);
object->set_map(Heap::one_word_filler_map());
object->set_map(Heap::raw_unchecked_one_word_filler_map());
}
ASSERT(object->Size() == size);
}
......
......@@ -481,7 +481,7 @@ bool Object::IsDictionary() {
bool Object::IsSymbolTable() {
return IsHashTable() && this == Heap::symbol_table();
return IsHashTable() && this == Heap::raw_unchecked_symbol_table();
}
......@@ -2655,8 +2655,8 @@ void Map::ClearCodeCache() {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:
// - MarkCompactCollector::MarkUnmarkedObject
ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
WRITE_FIELD(this, kCodeCacheOffset, Heap::empty_fixed_array());
ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array()));
WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array());
}
......
......@@ -4615,7 +4615,7 @@ void Map::ClearNonLiveTransitions(Object* real_prototype) {
// low-level accessors to get and modify their data.
DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
*RawField(this, Map::kInstanceDescriptorsOffset));
if (d == Heap::empty_descriptor_array()) return;
if (d == Heap::raw_unchecked_empty_descriptor_array()) return;
Smi* NullDescriptorDetails =
PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
FixedArray* contents = reinterpret_cast<FixedArray*>(
......
This diff is collapsed.
......@@ -1141,7 +1141,7 @@ static void ReportHistogram(bool print_spill) {
// Summarize string types.
int string_number = 0;
int string_bytes = 0;
#define INCREMENT(type, size, name) \
#define INCREMENT(type, size, name, camel_name) \
string_number += heap_histograms[type].number(); \
string_bytes += heap_histograms[type].bytes();
STRING_TYPE_LIST(INCREMENT)
......@@ -1185,8 +1185,8 @@ static void DoReportStatistics(HistogramInfo* info, const char* description) {
// Lump all the string types together.
int string_number = 0;
int string_bytes = 0;
#define INCREMENT(type, size, name) \
string_number += info[type].number(); \
#define INCREMENT(type, size, name, camel_name) \
string_number += info[type].number(); \
string_bytes += info[type].bytes();
STRING_TYPE_LIST(INCREMENT)
#undef INCREMENT
......@@ -1266,12 +1266,12 @@ void FreeListNode::set_size(int size_in_bytes) {
// field and a next pointer, we give it a filler map that gives it the
// correct size.
if (size_in_bytes > ByteArray::kHeaderSize) {
set_map(Heap::byte_array_map());
set_map(Heap::raw_unchecked_byte_array_map());
ByteArray::cast(this)->set_length(ByteArray::LengthFor(size_in_bytes));
} else if (size_in_bytes == kPointerSize) {
set_map(Heap::one_word_filler_map());
set_map(Heap::raw_unchecked_one_word_filler_map());
} else if (size_in_bytes == 2 * kPointerSize) {
set_map(Heap::two_word_filler_map());
set_map(Heap::raw_unchecked_two_word_filler_map());
} else {
UNREACHABLE();
}
......@@ -1280,14 +1280,14 @@ void FreeListNode::set_size(int size_in_bytes) {
Address FreeListNode::next() {
ASSERT(map() == Heap::byte_array_map());
ASSERT(map() == Heap::raw_unchecked_byte_array_map());
ASSERT(Size() >= kNextOffset + kPointerSize);
return Memory::Address_at(address() + kNextOffset);
}
void FreeListNode::set_next(Address next) {
ASSERT(map() == Heap::byte_array_map());
ASSERT(map() == Heap::raw_unchecked_byte_array_map());
ASSERT(Size() >= kNextOffset + kPointerSize);
Memory::Address_at(address() + kNextOffset) = next;
}
......@@ -1856,7 +1856,7 @@ void OldSpace::ReportStatistics() {
int bitpos = intoff*kBitsPerByte + bitoff;
Address slot = p->OffsetToAddress(bitpos << kObjectAlignmentBits);
Object** obj = reinterpret_cast<Object**>(slot);
if (*obj == Heap::fixed_array_map()) {
if (*obj == Heap::raw_unchecked_fixed_array_map()) {
rset_marked_arrays++;
FixedArray* fa = FixedArray::cast(HeapObject::FromAddress(slot));
......
......@@ -562,10 +562,11 @@ Object* StubCache::ComputeCallGlobal(int argc,
static Object* GetProbeValue(Code::Flags flags) {
NumberDictionary* dictionary = Heap::non_monomorphic_cache();
// Use raw_unchecked... so we don't get assert failures during GC.
NumberDictionary* dictionary = Heap::raw_unchecked_non_monomorphic_cache();
int entry = dictionary->FindEntry(flags);
if (entry != -1) return dictionary->ValueAt(entry);
return Heap::undefined_value();
return Heap::raw_unchecked_undefined_value();
}
......@@ -579,7 +580,7 @@ static Object* ProbeCache(Code::Flags flags) {
Heap::non_monomorphic_cache()->AtNumberPut(flags,
Heap::undefined_value());
if (result->IsFailure()) return result;
Heap::set_non_monomorphic_cache(NumberDictionary::cast(result));
Heap::public_set_non_monomorphic_cache(NumberDictionary::cast(result));
return probe;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment