Commit 1dcb6e33 authored by vitalyr@chromium.org's avatar vitalyr@chromium.org

Minimize malloc heap allocation on process startup.

R=vegorov@chromium.org
BUG=http://b/issue?id=5095592

Review URL: http://codereview.chromium.org/7572018

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8833 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent d63014d6
...@@ -84,7 +84,7 @@ namespace v8 { ...@@ -84,7 +84,7 @@ namespace v8 {
if (has_pending_exception) { \ if (has_pending_exception) { \
if (handle_scope_implementer->CallDepthIsZero() && \ if (handle_scope_implementer->CallDepthIsZero() && \
(isolate)->is_out_of_memory()) { \ (isolate)->is_out_of_memory()) { \
if (!handle_scope_implementer->ignore_out_of_memory()) \ if (!(isolate)->ignore_out_of_memory()) \
i::V8::FatalProcessOutOfMemory(NULL); \ i::V8::FatalProcessOutOfMemory(NULL); \
} \ } \
bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \ bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \
...@@ -4259,8 +4259,8 @@ static void* ExternalValueImpl(i::Handle<i::Object> obj) { ...@@ -4259,8 +4259,8 @@ static void* ExternalValueImpl(i::Handle<i::Object> obj) {
Local<Value> v8::External::Wrap(void* data) { Local<Value> v8::External::Wrap(void* data) {
i::Isolate* isolate = i::Isolate::Current(); i::Isolate* isolate = i::Isolate::Current();
STATIC_ASSERT(sizeof(data) == sizeof(i::Address)); STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
LOG_API(isolate, "External::Wrap");
EnsureInitializedForIsolate(isolate, "v8::External::Wrap()"); EnsureInitializedForIsolate(isolate, "v8::External::Wrap()");
LOG_API(isolate, "External::Wrap");
ENTER_V8(isolate); ENTER_V8(isolate);
v8::Local<v8::Value> result = CanBeEncodedAsSmi(data) v8::Local<v8::Value> result = CanBeEncodedAsSmi(data)
...@@ -4304,8 +4304,8 @@ void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) { ...@@ -4304,8 +4304,8 @@ void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) {
Local<External> v8::External::New(void* data) { Local<External> v8::External::New(void* data) {
STATIC_ASSERT(sizeof(data) == sizeof(i::Address)); STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
i::Isolate* isolate = i::Isolate::Current(); i::Isolate* isolate = i::Isolate::Current();
LOG_API(isolate, "External::New");
EnsureInitializedForIsolate(isolate, "v8::External::New()"); EnsureInitializedForIsolate(isolate, "v8::External::New()");
LOG_API(isolate, "External::New");
ENTER_V8(isolate); ENTER_V8(isolate);
return ExternalNewImpl(data); return ExternalNewImpl(data);
} }
...@@ -4797,8 +4797,7 @@ Local<Integer> Integer::NewFromUnsigned(uint32_t value) { ...@@ -4797,8 +4797,7 @@ Local<Integer> Integer::NewFromUnsigned(uint32_t value) {
void V8::IgnoreOutOfMemoryException() { void V8::IgnoreOutOfMemoryException() {
EnterIsolateIfNeeded()->handle_scope_implementer()->set_ignore_out_of_memory( EnterIsolateIfNeeded()->set_ignore_out_of_memory(true);
true);
} }
......
...@@ -404,7 +404,6 @@ class HandleScopeImplementer { ...@@ -404,7 +404,6 @@ class HandleScopeImplementer {
entered_contexts_(0), entered_contexts_(0),
saved_contexts_(0), saved_contexts_(0),
spare_(NULL), spare_(NULL),
ignore_out_of_memory_(false),
call_depth_(0) { } call_depth_(0) { }
// Threading support for handle data. // Threading support for handle data.
...@@ -437,10 +436,6 @@ class HandleScopeImplementer { ...@@ -437,10 +436,6 @@ class HandleScopeImplementer {
inline bool HasSavedContexts(); inline bool HasSavedContexts();
inline List<internal::Object**>* blocks() { return &blocks_; } inline List<internal::Object**>* blocks() { return &blocks_; }
inline bool ignore_out_of_memory() { return ignore_out_of_memory_; }
inline void set_ignore_out_of_memory(bool value) {
ignore_out_of_memory_ = value;
}
private: private:
void ResetAfterArchive() { void ResetAfterArchive() {
...@@ -448,7 +443,6 @@ class HandleScopeImplementer { ...@@ -448,7 +443,6 @@ class HandleScopeImplementer {
entered_contexts_.Initialize(0); entered_contexts_.Initialize(0);
saved_contexts_.Initialize(0); saved_contexts_.Initialize(0);
spare_ = NULL; spare_ = NULL;
ignore_out_of_memory_ = false;
call_depth_ = 0; call_depth_ = 0;
} }
...@@ -473,7 +467,6 @@ class HandleScopeImplementer { ...@@ -473,7 +467,6 @@ class HandleScopeImplementer {
// Used as a stack to keep track of saved contexts. // Used as a stack to keep track of saved contexts.
List<Context*> saved_contexts_; List<Context*> saved_contexts_;
Object** spare_; Object** spare_;
bool ignore_out_of_memory_;
int call_depth_; int call_depth_;
// This is only used for threading support. // This is only used for threading support.
v8::ImplementationUtilities::HandleScopeData handle_scope_data_; v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
......
...@@ -1965,7 +1965,7 @@ void Debug::AfterGarbageCollection() { ...@@ -1965,7 +1965,7 @@ void Debug::AfterGarbageCollection() {
Debugger::Debugger(Isolate* isolate) Debugger::Debugger(Isolate* isolate)
: debugger_access_(OS::CreateMutex()), : debugger_access_(isolate->debugger_access()),
event_listener_(Handle<Object>()), event_listener_(Handle<Object>()),
event_listener_data_(Handle<Object>()), event_listener_data_(Handle<Object>()),
compiling_natives_(false), compiling_natives_(false),
...@@ -1987,8 +1987,6 @@ Debugger::Debugger(Isolate* isolate) ...@@ -1987,8 +1987,6 @@ Debugger::Debugger(Isolate* isolate)
Debugger::~Debugger() { Debugger::~Debugger() {
delete debugger_access_;
debugger_access_ = 0;
delete dispatch_handler_access_; delete dispatch_handler_access_;
dispatch_handler_access_ = 0; dispatch_handler_access_ = 0;
delete command_received_; delete command_received_;
......
...@@ -132,7 +132,7 @@ static Handle<Object> Invoke(bool construct, ...@@ -132,7 +132,7 @@ static Handle<Object> Invoke(bool construct,
if (*has_pending_exception) { if (*has_pending_exception) {
isolate->ReportPendingMessages(); isolate->ReportPendingMessages();
if (isolate->pending_exception() == Failure::OutOfMemoryException()) { if (isolate->pending_exception() == Failure::OutOfMemoryException()) {
if (!isolate->handle_scope_implementer()->ignore_out_of_memory()) { if (!isolate->ignore_out_of_memory()) {
V8::FatalProcessOutOfMemory("JS", true); V8::FatalProcessOutOfMemory("JS", true);
} }
} }
......
...@@ -76,6 +76,10 @@ int ThreadId::GetCurrentThreadId() { ...@@ -76,6 +76,10 @@ int ThreadId::GetCurrentThreadId() {
ThreadLocalTop::ThreadLocalTop() { ThreadLocalTop::ThreadLocalTop() {
InitializeInternal(); InitializeInternal();
// This flag may be set using v8::V8::IgnoreOutOfMemoryException()
// before an isolate is initialized. The initialize methods below do
// not touch it to preserve its value.
ignore_out_of_memory_ = false;
} }
...@@ -382,7 +386,6 @@ void Isolate::EnsureDefaultIsolate() { ...@@ -382,7 +386,6 @@ void Isolate::EnsureDefaultIsolate() {
if (Thread::GetThreadLocal(isolate_key_) == NULL) { if (Thread::GetThreadLocal(isolate_key_) == NULL) {
Thread::SetThreadLocal(isolate_key_, default_isolate_); Thread::SetThreadLocal(isolate_key_, default_isolate_);
} }
CHECK(default_isolate_->PreInit());
} }
...@@ -654,6 +657,7 @@ void Isolate::PrintStack() { ...@@ -654,6 +657,7 @@ void Isolate::PrintStack() {
incomplete_message_ = &accumulator; incomplete_message_ = &accumulator;
PrintStack(&accumulator); PrintStack(&accumulator);
accumulator.OutputToStdOut(); accumulator.OutputToStdOut();
InitializeLoggingAndCounters();
accumulator.Log(); accumulator.Log();
incomplete_message_ = NULL; incomplete_message_ = NULL;
stack_trace_nesting_level_ = 0; stack_trace_nesting_level_ = 0;
...@@ -1375,11 +1379,15 @@ Isolate::Isolate() ...@@ -1375,11 +1379,15 @@ Isolate::Isolate()
bootstrapper_(NULL), bootstrapper_(NULL),
runtime_profiler_(NULL), runtime_profiler_(NULL),
compilation_cache_(NULL), compilation_cache_(NULL),
counters_(new Counters()), counters_(NULL),
code_range_(NULL), code_range_(NULL),
// Must be initialized early to allow v8::SetResourceConstraints calls.
break_access_(OS::CreateMutex()), break_access_(OS::CreateMutex()),
logger_(new Logger()), debugger_initialized_(false),
stats_table_(new StatsTable()), // Must be initialized early to allow v8::Debug calls.
debugger_access_(OS::CreateMutex()),
logger_(NULL),
stats_table_(NULL),
stub_cache_(NULL), stub_cache_(NULL),
deoptimizer_data_(NULL), deoptimizer_data_(NULL),
capture_stack_trace_for_uncaught_exceptions_(false), capture_stack_trace_for_uncaught_exceptions_(false),
...@@ -1510,7 +1518,7 @@ void Isolate::Deinit() { ...@@ -1510,7 +1518,7 @@ void Isolate::Deinit() {
logger_->TearDown(); logger_->TearDown();
// The default isolate is re-initializable due to legacy API. // The default isolate is re-initializable due to legacy API.
state_ = PREINITIALIZED; state_ = UNINITIALIZED;
} }
} }
...@@ -1592,58 +1600,6 @@ Isolate::~Isolate() { ...@@ -1592,58 +1600,6 @@ Isolate::~Isolate() {
} }
bool Isolate::PreInit() {
if (state_ != UNINITIALIZED) return true;
TRACE_ISOLATE(preinit);
ASSERT(Isolate::Current() == this);
#ifdef ENABLE_DEBUGGER_SUPPORT
debug_ = new Debug(this);
debugger_ = new Debugger(this);
#endif
memory_allocator_ = new MemoryAllocator();
memory_allocator_->isolate_ = this;
code_range_ = new CodeRange();
code_range_->isolate_ = this;
// Safe after setting Heap::isolate_, initializing StackGuard and
// ensuring that Isolate::Current() == this.
heap_.SetStackLimits();
#ifdef DEBUG
DisallowAllocationFailure disallow_allocation_failure;
#endif
#define C(name) isolate_addresses_[Isolate::k_##name] = \
reinterpret_cast<Address>(name());
ISOLATE_ADDRESS_LIST(C)
#undef C
string_tracker_ = new StringTracker();
string_tracker_->isolate_ = this;
compilation_cache_ = new CompilationCache(this);
transcendental_cache_ = new TranscendentalCache();
keyed_lookup_cache_ = new KeyedLookupCache();
context_slot_cache_ = new ContextSlotCache();
descriptor_lookup_cache_ = new DescriptorLookupCache();
unicode_cache_ = new UnicodeCache();
pc_to_code_cache_ = new PcToCodeCache(this);
write_input_buffer_ = new StringInputBuffer();
global_handles_ = new GlobalHandles(this);
bootstrapper_ = new Bootstrapper();
handle_scope_implementer_ = new HandleScopeImplementer(this);
stub_cache_ = new StubCache(this);
ast_sentinels_ = new AstSentinels();
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
state_ = PREINITIALIZED;
return true;
}
void Isolate::InitializeThreadLocal() { void Isolate::InitializeThreadLocal() {
thread_local_top_.isolate_ = this; thread_local_top_.isolate_ = this;
thread_local_top_.Initialize(); thread_local_top_.Initialize();
...@@ -1680,19 +1636,71 @@ void Isolate::PropagatePendingExceptionToExternalTryCatch() { ...@@ -1680,19 +1636,71 @@ void Isolate::PropagatePendingExceptionToExternalTryCatch() {
} }
void Isolate::InitializeLoggingAndCounters() {
if (logger_ == NULL) {
logger_ = new Logger;
}
if (counters_ == NULL) {
counters_ = new Counters;
}
}
void Isolate::InitializeDebugger() {
#ifdef ENABLE_DEBUGGER_SUPPORT
ScopedLock lock(debugger_access_);
if (NoBarrier_Load(&debugger_initialized_)) return;
InitializeLoggingAndCounters();
debug_ = new Debug(this);
debugger_ = new Debugger(this);
Release_Store(&debugger_initialized_, true);
#endif
}
bool Isolate::Init(Deserializer* des) { bool Isolate::Init(Deserializer* des) {
ASSERT(state_ != INITIALIZED); ASSERT(state_ != INITIALIZED);
ASSERT(Isolate::Current() == this);
TRACE_ISOLATE(init); TRACE_ISOLATE(init);
bool create_heap_objects = des == NULL;
#ifdef DEBUG #ifdef DEBUG
// The initialization process does not handle memory exhaustion. // The initialization process does not handle memory exhaustion.
DisallowAllocationFailure disallow_allocation_failure; DisallowAllocationFailure disallow_allocation_failure;
#endif #endif
if (state_ == UNINITIALIZED && !PreInit()) return false; InitializeLoggingAndCounters();
InitializeDebugger();
memory_allocator_ = new MemoryAllocator(this);
code_range_ = new CodeRange(this);
// Safe after setting Heap::isolate_, initializing StackGuard and
// ensuring that Isolate::Current() == this.
heap_.SetStackLimits();
#define C(name) isolate_addresses_[Isolate::k_##name] = \
reinterpret_cast<Address>(name());
ISOLATE_ADDRESS_LIST(C)
#undef C
string_tracker_ = new StringTracker();
string_tracker_->isolate_ = this;
compilation_cache_ = new CompilationCache(this);
transcendental_cache_ = new TranscendentalCache();
keyed_lookup_cache_ = new KeyedLookupCache();
context_slot_cache_ = new ContextSlotCache();
descriptor_lookup_cache_ = new DescriptorLookupCache();
unicode_cache_ = new UnicodeCache();
pc_to_code_cache_ = new PcToCodeCache(this);
write_input_buffer_ = new StringInputBuffer();
global_handles_ = new GlobalHandles(this);
bootstrapper_ = new Bootstrapper();
handle_scope_implementer_ = new HandleScopeImplementer(this);
stub_cache_ = new StubCache(this);
ast_sentinels_ = new AstSentinels();
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
// Enable logging before setting up the heap // Enable logging before setting up the heap
logger_->Setup(); logger_->Setup();
...@@ -1715,7 +1723,8 @@ bool Isolate::Init(Deserializer* des) { ...@@ -1715,7 +1723,8 @@ bool Isolate::Init(Deserializer* des) {
stack_guard_.InitThread(lock); stack_guard_.InitThread(lock);
} }
// Setup the object heap // Setup the object heap.
const bool create_heap_objects = (des == NULL);
ASSERT(!heap_.HasBeenSetup()); ASSERT(!heap_.HasBeenSetup());
if (!heap_.Setup(create_heap_objects)) { if (!heap_.Setup(create_heap_objects)) {
V8::SetFatalError(); V8::SetFatalError();
...@@ -1775,6 +1784,16 @@ bool Isolate::Init(Deserializer* des) { ...@@ -1775,6 +1784,16 @@ bool Isolate::Init(Deserializer* des) {
} }
// Initialized lazily to allow early
// v8::V8::SetAddHistogramSampleFunction calls.
StatsTable* Isolate::stats_table() {
if (stats_table_ == NULL) {
stats_table_ = new StatsTable;
}
return stats_table_;
}
void Isolate::Enter() { void Isolate::Enter() {
Isolate* current_isolate = NULL; Isolate* current_isolate = NULL;
PerIsolateThreadData* current_data = CurrentPerIsolateThreadData(); PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
...@@ -1814,8 +1833,6 @@ void Isolate::Enter() { ...@@ -1814,8 +1833,6 @@ void Isolate::Enter() {
SetIsolateThreadLocals(this, data); SetIsolateThreadLocals(this, data);
CHECK(PreInit());
// In case it's the first time some thread enters the isolate. // In case it's the first time some thread enters the isolate.
set_thread_id(data->thread_id()); set_thread_id(data->thread_id());
} }
......
...@@ -256,6 +256,9 @@ class ThreadLocalTop BASE_EMBEDDED { ...@@ -256,6 +256,9 @@ class ThreadLocalTop BASE_EMBEDDED {
// Call back function to report unsafe JS accesses. // Call back function to report unsafe JS accesses.
v8::FailedAccessCheckCallback failed_access_check_callback_; v8::FailedAccessCheckCallback failed_access_check_callback_;
// Whether out of memory exceptions should be ignored.
bool ignore_out_of_memory_;
private: private:
void InitializeInternal(); void InitializeInternal();
...@@ -446,6 +449,13 @@ class Isolate { ...@@ -446,6 +449,13 @@ class Isolate {
return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_)); return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
} }
// Usually called by Init(), but can be called early e.g. to allow
// testing components that require logging but not the whole
// isolate.
//
// Safe to call more than once.
void InitializeLoggingAndCounters();
bool Init(Deserializer* des); bool Init(Deserializer* des);
bool IsInitialized() { return state_ == INITIALIZED; } bool IsInitialized() { return state_ == INITIALIZED; }
...@@ -498,10 +508,12 @@ class Isolate { ...@@ -498,10 +508,12 @@ class Isolate {
// switched to non-legacy behavior). // switched to non-legacy behavior).
static void EnterDefaultIsolate(); static void EnterDefaultIsolate();
// Debug.
// Mutex for serializing access to break control structures. // Mutex for serializing access to break control structures.
Mutex* break_access() { return break_access_; } Mutex* break_access() { return break_access_; }
// Mutex for serializing access to debugger.
Mutex* debugger_access() { return debugger_access_; }
Address get_address_from_id(AddressId id); Address get_address_from_id(AddressId id);
// Access to top context (where the current function object was created). // Access to top context (where the current function object was created).
...@@ -661,6 +673,12 @@ class Isolate { ...@@ -661,6 +673,12 @@ class Isolate {
// Tells whether the current context has experienced an out of memory // Tells whether the current context has experienced an out of memory
// exception. // exception.
bool is_out_of_memory(); bool is_out_of_memory();
bool ignore_out_of_memory() {
return thread_local_top_.ignore_out_of_memory_;
}
void set_ignore_out_of_memory(bool value) {
thread_local_top_.ignore_out_of_memory_ = value;
}
void PrintCurrentStackTrace(FILE* out); void PrintCurrentStackTrace(FILE* out);
void PrintStackTrace(FILE* out, char* thread_data); void PrintStackTrace(FILE* out, char* thread_data);
...@@ -769,14 +787,24 @@ class Isolate { ...@@ -769,14 +787,24 @@ class Isolate {
#undef GLOBAL_CONTEXT_FIELD_ACCESSOR #undef GLOBAL_CONTEXT_FIELD_ACCESSOR
Bootstrapper* bootstrapper() { return bootstrapper_; } Bootstrapper* bootstrapper() { return bootstrapper_; }
Counters* counters() { return counters_; } Counters* counters() {
// Call InitializeLoggingAndCounters() if logging is needed before
// the isolate is fully initialized.
ASSERT(counters_ != NULL);
return counters_;
}
CodeRange* code_range() { return code_range_; } CodeRange* code_range() { return code_range_; }
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; } RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
CompilationCache* compilation_cache() { return compilation_cache_; } CompilationCache* compilation_cache() { return compilation_cache_; }
Logger* logger() { return logger_; } Logger* logger() {
// Call InitializeLoggingAndCounters() if logging is needed before
// the isolate is fully initialized.
ASSERT(logger_ != NULL);
return logger_;
}
StackGuard* stack_guard() { return &stack_guard_; } StackGuard* stack_guard() { return &stack_guard_; }
Heap* heap() { return &heap_; } Heap* heap() { return &heap_; }
StatsTable* stats_table() { return stats_table_; } StatsTable* stats_table();
StubCache* stub_cache() { return stub_cache_; } StubCache* stub_cache() { return stub_cache_; }
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
ThreadLocalTop* thread_local_top() { return &thread_local_top_; } ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
...@@ -877,8 +905,14 @@ class Isolate { ...@@ -877,8 +905,14 @@ class Isolate {
void PreallocatedStorageInit(size_t size); void PreallocatedStorageInit(size_t size);
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
Debugger* debugger() { return debugger_; } Debugger* debugger() {
Debug* debug() { return debug_; } if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
return debugger_;
}
Debug* debug() {
if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
return debug_;
}
#endif #endif
inline bool DebuggerHasBreakPoints(); inline bool DebuggerHasBreakPoints();
...@@ -1010,8 +1044,6 @@ class Isolate { ...@@ -1010,8 +1044,6 @@ class Isolate {
static Isolate* default_isolate_; static Isolate* default_isolate_;
static ThreadDataTable* thread_data_table_; static ThreadDataTable* thread_data_table_;
bool PreInit();
void Deinit(); void Deinit();
static void SetIsolateThreadLocals(Isolate* isolate, static void SetIsolateThreadLocals(Isolate* isolate,
...@@ -1019,7 +1051,6 @@ class Isolate { ...@@ -1019,7 +1051,6 @@ class Isolate {
enum State { enum State {
UNINITIALIZED, // Some components may not have been allocated. UNINITIALIZED, // Some components may not have been allocated.
PREINITIALIZED, // Components have been allocated but not initialized.
INITIALIZED // All components are fully initialized. INITIALIZED // All components are fully initialized.
}; };
...@@ -1063,6 +1094,8 @@ class Isolate { ...@@ -1063,6 +1094,8 @@ class Isolate {
void PropagatePendingExceptionToExternalTryCatch(); void PropagatePendingExceptionToExternalTryCatch();
void InitializeDebugger();
int stack_trace_nesting_level_; int stack_trace_nesting_level_;
StringStream* incomplete_message_; StringStream* incomplete_message_;
// The preallocated memory thread singleton. // The preallocated memory thread singleton.
...@@ -1076,6 +1109,8 @@ class Isolate { ...@@ -1076,6 +1109,8 @@ class Isolate {
Counters* counters_; Counters* counters_;
CodeRange* code_range_; CodeRange* code_range_;
Mutex* break_access_; Mutex* break_access_;
Atomic32 debugger_initialized_;
Mutex* debugger_access_;
Heap heap_; Heap heap_;
Logger* logger_; Logger* logger_;
StackGuard stack_guard_; StackGuard stack_guard_;
...@@ -1165,6 +1200,7 @@ class Isolate { ...@@ -1165,6 +1200,7 @@ class Isolate {
friend class Simulator; friend class Simulator;
friend class StackGuard; friend class StackGuard;
friend class ThreadId; friend class ThreadId;
friend class TestMemoryAllocatorScope;
friend class v8::Isolate; friend class v8::Isolate;
friend class v8::Locker; friend class v8::Locker;
friend class v8::Unlocker; friend class v8::Unlocker;
......
...@@ -148,12 +148,12 @@ PageIterator::PageIterator(PagedSpace* space, Mode mode) : space_(space) { ...@@ -148,12 +148,12 @@ PageIterator::PageIterator(PagedSpace* space, Mode mode) : space_(space) {
// CodeRange // CodeRange
CodeRange::CodeRange() CodeRange::CodeRange(Isolate* isolate)
: code_range_(NULL), : isolate_(isolate),
code_range_(NULL),
free_list_(0), free_list_(0),
allocation_list_(0), allocation_list_(0),
current_allocation_block_index_(0), current_allocation_block_index_(0) {
isolate_(NULL) {
} }
...@@ -279,8 +279,9 @@ void CodeRange::TearDown() { ...@@ -279,8 +279,9 @@ void CodeRange::TearDown() {
const int kEstimatedNumberOfChunks = 270; const int kEstimatedNumberOfChunks = 270;
MemoryAllocator::MemoryAllocator() MemoryAllocator::MemoryAllocator(Isolate* isolate)
: capacity_(0), : isolate_(isolate),
capacity_(0),
capacity_executable_(0), capacity_executable_(0),
size_(0), size_(0),
size_executable_(0), size_executable_(0),
...@@ -288,8 +289,7 @@ MemoryAllocator::MemoryAllocator() ...@@ -288,8 +289,7 @@ MemoryAllocator::MemoryAllocator()
chunks_(kEstimatedNumberOfChunks), chunks_(kEstimatedNumberOfChunks),
free_chunk_ids_(kEstimatedNumberOfChunks), free_chunk_ids_(kEstimatedNumberOfChunks),
max_nof_chunks_(0), max_nof_chunks_(0),
top_(0), top_(0) {
isolate_(NULL) {
} }
......
...@@ -408,6 +408,8 @@ class Space : public Malloced { ...@@ -408,6 +408,8 @@ class Space : public Malloced {
// manages a range of virtual memory. // manages a range of virtual memory.
class CodeRange { class CodeRange {
public: public:
explicit CodeRange(Isolate* isolate);
// Reserves a range of virtual memory, but does not commit any of it. // Reserves a range of virtual memory, but does not commit any of it.
// Can only be called once, at heap initialization time. // Can only be called once, at heap initialization time.
// Returns false on failure. // Returns false on failure.
...@@ -417,9 +419,9 @@ class CodeRange { ...@@ -417,9 +419,9 @@ class CodeRange {
// manage it. // manage it.
void TearDown(); void TearDown();
bool exists() { return code_range_ != NULL; } bool exists() { return this != NULL && code_range_ != NULL; }
bool contains(Address address) { bool contains(Address address) {
if (code_range_ == NULL) return false; if (this == NULL || code_range_ == NULL) return false;
Address start = static_cast<Address>(code_range_->address()); Address start = static_cast<Address>(code_range_->address());
return start <= address && address < start + code_range_->size(); return start <= address && address < start + code_range_->size();
} }
...@@ -432,7 +434,7 @@ class CodeRange { ...@@ -432,7 +434,7 @@ class CodeRange {
void FreeRawMemory(void* buf, size_t length); void FreeRawMemory(void* buf, size_t length);
private: private:
CodeRange(); Isolate* isolate_;
// The reserved range of virtual memory that all code objects are put in. // The reserved range of virtual memory that all code objects are put in.
VirtualMemory* code_range_; VirtualMemory* code_range_;
...@@ -466,10 +468,6 @@ class CodeRange { ...@@ -466,10 +468,6 @@ class CodeRange {
static int CompareFreeBlockAddress(const FreeBlock* left, static int CompareFreeBlockAddress(const FreeBlock* left,
const FreeBlock* right); const FreeBlock* right);
friend class Isolate;
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(CodeRange); DISALLOW_COPY_AND_ASSIGN(CodeRange);
}; };
...@@ -500,6 +498,8 @@ class CodeRange { ...@@ -500,6 +498,8 @@ class CodeRange {
class MemoryAllocator { class MemoryAllocator {
public: public:
explicit MemoryAllocator(Isolate* isolate);
// Initializes its internal bookkeeping structures. // Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit. // Max capacity of the total space and executable memory limit.
bool Setup(intptr_t max_capacity, intptr_t capacity_executable); bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
...@@ -657,10 +657,10 @@ class MemoryAllocator { ...@@ -657,10 +657,10 @@ class MemoryAllocator {
#endif #endif
private: private:
MemoryAllocator();
static const int kChunkSize = kPagesPerChunk * Page::kPageSize; static const int kChunkSize = kPagesPerChunk * Page::kPageSize;
Isolate* isolate_;
// Maximum space size in bytes. // Maximum space size in bytes.
intptr_t capacity_; intptr_t capacity_;
// Maximum subset of capacity_ that can be executable // Maximum subset of capacity_ that can be executable
...@@ -753,10 +753,6 @@ class MemoryAllocator { ...@@ -753,10 +753,6 @@ class MemoryAllocator {
Page* prev, Page* prev,
Page** last_page_in_use); Page** last_page_in_use);
friend class Isolate;
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(MemoryAllocator); DISALLOW_COPY_AND_ASSIGN(MemoryAllocator);
}; };
......
...@@ -186,7 +186,9 @@ class Block { ...@@ -186,7 +186,9 @@ class Block {
TEST(CodeRange) { TEST(CodeRange) {
const int code_range_size = 16*MB; const int code_range_size = 16*MB;
OS::Setup(); OS::Setup();
Isolate::Current()->code_range()->Setup(code_range_size); Isolate::Current()->InitializeLoggingAndCounters();
CodeRange* code_range = new CodeRange(Isolate::Current());
code_range->Setup(code_range_size);
int current_allocated = 0; int current_allocated = 0;
int total_allocated = 0; int total_allocated = 0;
List<Block> blocks(1000); List<Block> blocks(1000);
...@@ -198,8 +200,7 @@ TEST(CodeRange) { ...@@ -198,8 +200,7 @@ TEST(CodeRange) {
size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) + size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
Pseudorandom() % 5000 + 1; Pseudorandom() % 5000 + 1;
size_t allocated = 0; size_t allocated = 0;
void* base = Isolate::Current()->code_range()-> void* base = code_range->AllocateRawMemory(requested, &allocated);
AllocateRawMemory(requested, &allocated);
CHECK(base != NULL); CHECK(base != NULL);
blocks.Add(Block(base, static_cast<int>(allocated))); blocks.Add(Block(base, static_cast<int>(allocated)));
current_allocated += static_cast<int>(allocated); current_allocated += static_cast<int>(allocated);
...@@ -207,8 +208,7 @@ TEST(CodeRange) { ...@@ -207,8 +208,7 @@ TEST(CodeRange) {
} else { } else {
// Free a block. // Free a block.
int index = Pseudorandom() % blocks.length(); int index = Pseudorandom() % blocks.length();
Isolate::Current()->code_range()->FreeRawMemory( code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
blocks[index].base, blocks[index].size);
current_allocated -= blocks[index].size; current_allocated -= blocks[index].size;
if (index < blocks.length() - 1) { if (index < blocks.length() - 1) {
blocks[index] = blocks.RemoveLast(); blocks[index] = blocks.RemoveLast();
...@@ -218,5 +218,6 @@ TEST(CodeRange) { ...@@ -218,5 +218,6 @@ TEST(CodeRange) {
} }
} }
Isolate::Current()->code_range()->TearDown(); code_range->TearDown();
delete code_range;
} }
...@@ -5844,6 +5844,7 @@ TEST(DebuggerDebugMessageDispatch) { ...@@ -5844,6 +5844,7 @@ TEST(DebuggerDebugMessageDispatch) {
TEST(DebuggerAgent) { TEST(DebuggerAgent) {
v8::V8::Initialize();
i::Debugger* debugger = i::Isolate::Current()->debugger(); i::Debugger* debugger = i::Isolate::Current()->debugger();
// Make sure these ports is not used by other tests to allow tests to run in // Make sure these ports is not used by other tests to allow tests to run in
// parallel. // parallel.
......
...@@ -291,8 +291,8 @@ TEST(LocalHandles) { ...@@ -291,8 +291,8 @@ TEST(LocalHandles) {
TEST(GlobalHandles) { TEST(GlobalHandles) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM(); InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
Handle<Object> h1; Handle<Object> h1;
Handle<Object> h2; Handle<Object> h2;
...@@ -339,8 +339,8 @@ static void TestWeakGlobalHandleCallback(v8::Persistent<v8::Value> handle, ...@@ -339,8 +339,8 @@ static void TestWeakGlobalHandleCallback(v8::Persistent<v8::Value> handle,
TEST(WeakGlobalHandlesScavenge) { TEST(WeakGlobalHandlesScavenge) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM(); InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false; WeakPointerCleared = false;
...@@ -377,8 +377,8 @@ TEST(WeakGlobalHandlesScavenge) { ...@@ -377,8 +377,8 @@ TEST(WeakGlobalHandlesScavenge) {
TEST(WeakGlobalHandlesMark) { TEST(WeakGlobalHandlesMark) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM(); InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false; WeakPointerCleared = false;
...@@ -416,8 +416,8 @@ TEST(WeakGlobalHandlesMark) { ...@@ -416,8 +416,8 @@ TEST(WeakGlobalHandlesMark) {
} }
TEST(DeleteWeakGlobalHandle) { TEST(DeleteWeakGlobalHandle) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM(); InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false; WeakPointerCleared = false;
......
...@@ -134,6 +134,8 @@ TEST(KeywordMatcher) { ...@@ -134,6 +134,8 @@ TEST(KeywordMatcher) {
TEST(ScanHTMLEndComments) { TEST(ScanHTMLEndComments) {
v8::V8::Initialize();
// Regression test. See: // Regression test. See:
// http://code.google.com/p/chromium/issues/detail?id=53548 // http://code.google.com/p/chromium/issues/detail?id=53548
// Tests that --> is correctly interpreted as comment-to-end-of-line if there // Tests that --> is correctly interpreted as comment-to-end-of-line if there
...@@ -263,6 +265,8 @@ TEST(Preparsing) { ...@@ -263,6 +265,8 @@ TEST(Preparsing) {
TEST(StandAlonePreParser) { TEST(StandAlonePreParser) {
v8::V8::Initialize();
int marker; int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit( i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
...@@ -299,6 +303,8 @@ TEST(StandAlonePreParser) { ...@@ -299,6 +303,8 @@ TEST(StandAlonePreParser) {
TEST(RegressChromium62639) { TEST(RegressChromium62639) {
v8::V8::Initialize();
int marker; int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit( i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
...@@ -320,6 +326,8 @@ TEST(RegressChromium62639) { ...@@ -320,6 +326,8 @@ TEST(RegressChromium62639) {
TEST(Regress928) { TEST(Regress928) {
v8::V8::Initialize();
// Preparsing didn't consider the catch clause of a try statement // Preparsing didn't consider the catch clause of a try statement
// as with-content, which made it assume that a function inside // as with-content, which made it assume that a function inside
// the block could be lazily compiled, and an extra, unexpected, // the block could be lazily compiled, and an extra, unexpected,
...@@ -360,6 +368,8 @@ TEST(Regress928) { ...@@ -360,6 +368,8 @@ TEST(Regress928) {
TEST(PreParseOverflow) { TEST(PreParseOverflow) {
v8::V8::Initialize();
int marker; int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit( i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
...@@ -610,6 +620,8 @@ void TestStreamScanner(i::UC16CharacterStream* stream, ...@@ -610,6 +620,8 @@ void TestStreamScanner(i::UC16CharacterStream* stream,
} }
TEST(StreamScanner) { TEST(StreamScanner) {
v8::V8::Initialize();
const char* str1 = "{ foo get for : */ <- \n\n /*foo*/ bib"; const char* str1 = "{ foo get for : */ <- \n\n /*foo*/ bib";
i::Utf8ToUC16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1), i::Utf8ToUC16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1),
static_cast<unsigned>(strlen(str1))); static_cast<unsigned>(strlen(str1)));
...@@ -690,6 +702,8 @@ void TestScanRegExp(const char* re_source, const char* expected) { ...@@ -690,6 +702,8 @@ void TestScanRegExp(const char* re_source, const char* expected) {
TEST(RegExpScanning) { TEST(RegExpScanning) {
v8::V8::Initialize();
// RegExp token with added garbage at the end. The scanner should only // RegExp token with added garbage at the end. The scanner should only
// scan the RegExp until the terminating slash just before "flipperwald". // scan the RegExp until the terminating slash just before "flipperwald".
TestScanRegExp("/b/flipperwald", "b"); TestScanRegExp("/b/flipperwald", "b");
......
...@@ -99,10 +99,10 @@ static int make_code(TypeCode type, int id) { ...@@ -99,10 +99,10 @@ static int make_code(TypeCode type, int id) {
TEST(ExternalReferenceEncoder) { TEST(ExternalReferenceEncoder) {
OS::Setup();
Isolate* isolate = i::Isolate::Current(); Isolate* isolate = i::Isolate::Current();
isolate->stats_table()->SetCounterFunction(counter_function); isolate->stats_table()->SetCounterFunction(counter_function);
HEAP->Setup(false); v8::V8::Initialize();
ExternalReferenceEncoder encoder; ExternalReferenceEncoder encoder;
CHECK_EQ(make_code(BUILTIN, Builtins::kArrayCode), CHECK_EQ(make_code(BUILTIN, Builtins::kArrayCode),
Encode(encoder, Builtins::kArrayCode)); Encode(encoder, Builtins::kArrayCode));
...@@ -139,10 +139,10 @@ TEST(ExternalReferenceEncoder) { ...@@ -139,10 +139,10 @@ TEST(ExternalReferenceEncoder) {
TEST(ExternalReferenceDecoder) { TEST(ExternalReferenceDecoder) {
OS::Setup();
Isolate* isolate = i::Isolate::Current(); Isolate* isolate = i::Isolate::Current();
isolate->stats_table()->SetCounterFunction(counter_function); isolate->stats_table()->SetCounterFunction(counter_function);
HEAP->Setup(false); v8::V8::Initialize();
ExternalReferenceDecoder decoder; ExternalReferenceDecoder decoder;
CHECK_EQ(AddressOf(Builtins::kArrayCode), CHECK_EQ(AddressOf(Builtins::kArrayCode),
decoder.Decode(make_code(BUILTIN, Builtins::kArrayCode))); decoder.Decode(make_code(BUILTIN, Builtins::kArrayCode)));
......
...@@ -91,46 +91,74 @@ TEST(Page) { ...@@ -91,46 +91,74 @@ TEST(Page) {
} }
namespace v8 {
namespace internal {
// Temporarily sets a given allocator in an isolate.
class TestMemoryAllocatorScope {
public:
TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
: isolate_(isolate),
old_allocator_(isolate->memory_allocator_) {
isolate->memory_allocator_ = allocator;
}
~TestMemoryAllocatorScope() {
isolate_->memory_allocator_ = old_allocator_;
}
private:
Isolate* isolate_;
MemoryAllocator* old_allocator_;
DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
};
} } // namespace v8::internal
TEST(MemoryAllocator) { TEST(MemoryAllocator) {
OS::Setup(); OS::Setup();
Isolate* isolate = Isolate::Current(); Isolate* isolate = Isolate::Current();
CHECK(HEAP->ConfigureHeapDefault()); isolate->InitializeLoggingAndCounters();
CHECK(isolate->memory_allocator()->Setup(HEAP->MaxReserved(), Heap* heap = isolate->heap();
HEAP->MaxExecutableSize())); CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
OldSpace faked_space(HEAP, CHECK(memory_allocator->Setup(heap->MaxReserved(),
HEAP->MaxReserved(), heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace faked_space(heap,
heap->MaxReserved(),
OLD_POINTER_SPACE, OLD_POINTER_SPACE,
NOT_EXECUTABLE); NOT_EXECUTABLE);
int total_pages = 0; int total_pages = 0;
int requested = MemoryAllocator::kPagesPerChunk; int requested = MemoryAllocator::kPagesPerChunk;
int allocated; int allocated;
// If we request n pages, we should get n or n - 1. // If we request n pages, we should get n or n - 1.
Page* first_page = Page* first_page = memory_allocator->AllocatePages(
isolate->memory_allocator()->AllocatePages( requested, &allocated, &faked_space);
requested, &allocated, &faked_space);
CHECK(first_page->is_valid()); CHECK(first_page->is_valid());
CHECK(allocated == requested || allocated == requested - 1); CHECK(allocated == requested || allocated == requested - 1);
total_pages += allocated; total_pages += allocated;
Page* last_page = first_page; Page* last_page = first_page;
for (Page* p = first_page; p->is_valid(); p = p->next_page()) { for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space)); CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
last_page = p; last_page = p;
} }
// Again, we should get n or n - 1 pages. // Again, we should get n or n - 1 pages.
Page* others = Page* others = memory_allocator->AllocatePages(
isolate->memory_allocator()->AllocatePages( requested, &allocated, &faked_space);
requested, &allocated, &faked_space);
CHECK(others->is_valid()); CHECK(others->is_valid());
CHECK(allocated == requested || allocated == requested - 1); CHECK(allocated == requested || allocated == requested - 1);
total_pages += allocated; total_pages += allocated;
isolate->memory_allocator()->SetNextPage(last_page, others); memory_allocator->SetNextPage(last_page, others);
int page_count = 0; int page_count = 0;
for (Page* p = first_page; p->is_valid(); p = p->next_page()) { for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space)); CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
page_count++; page_count++;
} }
CHECK(total_pages == page_count); CHECK(total_pages == page_count);
...@@ -141,34 +169,39 @@ TEST(MemoryAllocator) { ...@@ -141,34 +169,39 @@ TEST(MemoryAllocator) {
// Freeing pages at the first chunk starting at or after the second page // Freeing pages at the first chunk starting at or after the second page
// should free the entire second chunk. It will return the page it was passed // should free the entire second chunk. It will return the page it was passed
// (since the second page was in the first chunk). // (since the second page was in the first chunk).
Page* free_return = isolate->memory_allocator()->FreePages(second_page); Page* free_return = memory_allocator->FreePages(second_page);
CHECK(free_return == second_page); CHECK(free_return == second_page);
isolate->memory_allocator()->SetNextPage(first_page, free_return); memory_allocator->SetNextPage(first_page, free_return);
// Freeing pages in the first chunk starting at the first page should free // Freeing pages in the first chunk starting at the first page should free
// the first chunk and return an invalid page. // the first chunk and return an invalid page.
Page* invalid_page = isolate->memory_allocator()->FreePages(first_page); Page* invalid_page = memory_allocator->FreePages(first_page);
CHECK(!invalid_page->is_valid()); CHECK(!invalid_page->is_valid());
isolate->memory_allocator()->TearDown(); memory_allocator->TearDown();
delete memory_allocator;
} }
TEST(NewSpace) { TEST(NewSpace) {
OS::Setup(); OS::Setup();
CHECK(HEAP->ConfigureHeapDefault()); Isolate* isolate = Isolate::Current();
CHECK(Isolate::Current()->memory_allocator()->Setup( isolate->InitializeLoggingAndCounters();
HEAP->MaxReserved(), HEAP->MaxExecutableSize())); Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->Setup(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
NewSpace new_space(HEAP); NewSpace new_space(heap);
void* chunk = void* chunk =
Isolate::Current()->memory_allocator()->ReserveInitialChunk( memory_allocator->ReserveInitialChunk(4 * heap->ReservedSemiSpaceSize());
4 * HEAP->ReservedSemiSpaceSize());
CHECK(chunk != NULL); CHECK(chunk != NULL);
Address start = RoundUp(static_cast<Address>(chunk), Address start = RoundUp(static_cast<Address>(chunk),
2 * HEAP->ReservedSemiSpaceSize()); 2 * heap->ReservedSemiSpaceSize());
CHECK(new_space.Setup(start, 2 * HEAP->ReservedSemiSpaceSize())); CHECK(new_space.Setup(start, 2 * heap->ReservedSemiSpaceSize()));
CHECK(new_space.HasBeenSetup()); CHECK(new_space.HasBeenSetup());
while (new_space.Available() >= Page::kMaxHeapObjectSize) { while (new_space.Available() >= Page::kMaxHeapObjectSize) {
...@@ -178,28 +211,33 @@ TEST(NewSpace) { ...@@ -178,28 +211,33 @@ TEST(NewSpace) {
} }
new_space.TearDown(); new_space.TearDown();
Isolate::Current()->memory_allocator()->TearDown(); memory_allocator->TearDown();
delete memory_allocator;
} }
TEST(OldSpace) { TEST(OldSpace) {
OS::Setup(); OS::Setup();
CHECK(HEAP->ConfigureHeapDefault()); Isolate* isolate = Isolate::Current();
CHECK(Isolate::Current()->memory_allocator()->Setup( isolate->InitializeLoggingAndCounters();
HEAP->MaxReserved(), HEAP->MaxExecutableSize())); Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
OldSpace* s = new OldSpace(HEAP, MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
HEAP->MaxOldGenerationSize(), CHECK(memory_allocator->Setup(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace* s = new OldSpace(heap,
heap->MaxOldGenerationSize(),
OLD_POINTER_SPACE, OLD_POINTER_SPACE,
NOT_EXECUTABLE); NOT_EXECUTABLE);
CHECK(s != NULL); CHECK(s != NULL);
void* chunk = void* chunk = memory_allocator->ReserveInitialChunk(
Isolate::Current()->memory_allocator()->ReserveInitialChunk( 4 * heap->ReservedSemiSpaceSize());
4 * HEAP->ReservedSemiSpaceSize());
CHECK(chunk != NULL); CHECK(chunk != NULL);
Address start = static_cast<Address>(chunk); Address start = static_cast<Address>(chunk);
size_t size = RoundUp(start, 2 * HEAP->ReservedSemiSpaceSize()) - start; size_t size = RoundUp(start, 2 * heap->ReservedSemiSpaceSize()) - start;
CHECK(s->Setup(start, size)); CHECK(s->Setup(start, size));
...@@ -209,13 +247,13 @@ TEST(OldSpace) { ...@@ -209,13 +247,13 @@ TEST(OldSpace) {
s->TearDown(); s->TearDown();
delete s; delete s;
Isolate::Current()->memory_allocator()->TearDown(); memory_allocator->TearDown();
delete memory_allocator;
} }
TEST(LargeObjectSpace) { TEST(LargeObjectSpace) {
OS::Setup(); v8::V8::Initialize();
CHECK(HEAP->Setup(false));
LargeObjectSpace* lo = HEAP->lo_space(); LargeObjectSpace* lo = HEAP->lo_space();
CHECK(lo != NULL); CHECK(lo != NULL);
...@@ -247,9 +285,4 @@ TEST(LargeObjectSpace) { ...@@ -247,9 +285,4 @@ TEST(LargeObjectSpace) {
CHECK(!lo->IsEmpty()); CHECK(!lo->IsEmpty());
CHECK(lo->AllocateRaw(lo_size)->IsFailure()); CHECK(lo->AllocateRaw(lo_size)->IsFailure());
lo->TearDown();
delete lo;
Isolate::Current()->memory_allocator()->TearDown();
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment