Commit a713b274 authored by antonm@chromium.org's avatar antonm@chromium.org

Introduce fast native caches and use it in String.search.

Review URL: http://codereview.chromium.org/1563005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4418 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 518481d5
......@@ -4118,6 +4118,72 @@ void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
}
class DeferredSearchCache: public DeferredCode {
public:
DeferredSearchCache(Register dst, Register cache, Register key)
: dst_(dst), cache_(cache), key_(key) {
set_comment("[ DeferredSearchCache");
}
virtual void Generate();
private:
Register dst_, cache_, key_;
};
void DeferredSearchCache::Generate() {
__ push(cache_);
__ push(key_);
__ CallRuntime(Runtime::kGetFromCache, 2);
if (!dst_.is(r0)) {
__ mov(dst_, r0);
}
}
void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
ASSERT_NE(NULL, args->at(0)->AsLiteral());
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
Top::global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0);
return;
}
Handle<FixedArray> cache_obj(
FixedArray::cast(jsfunction_result_caches->get(cache_id)));
Load(args->at(1));
frame_->EmitPop(r2);
DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2);
const int kFingerOffset =
FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ mov(r1, Operand(cache_obj));
__ ldr(r0, FieldMemOperand(r1, kFingerOffset));
// r0 now holds finger offset as a smi.
__ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
// r3 now points to the start of fixed array elements.
__ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
// Note side effect of PreIndex: r3 now points to the key of the pair.
__ cmp(r2, r0);
deferred->Branch(ne);
__ ldr(r0, MemOperand(r3, kPointerSize));
deferred->BindExit();
frame_->EmitPush(r0);
}
void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
ASSERT_EQ(args->length(), 1);
......
......@@ -409,6 +409,9 @@ class CodeGenerator: public AstVisitor {
void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
// Support for fast native caches.
void GenerateGetFromCache(ZoneList<Expression*>* args);
// Fast support for number to string.
void GenerateNumberToString(ZoneList<Expression*>* args);
......
......@@ -228,6 +228,7 @@ class Genesis BASE_EMBEDDED {
// Used for creating a context from scratch.
void InstallNativeFunctions();
bool InstallNatives();
void InstallJSFunctionResultCaches();
// Used both for deserialized and from-scratch contexts to add the extensions
// provided.
static bool InstallExtensions(Handle<Context> global_context,
......@@ -1301,6 +1302,44 @@ bool Genesis::InstallNatives() {
}
// Do not forget to update macros.py with named constant
// of cache id.
#define JSFUNCTION_RESULT_CACHE_LIST(F) \
F(16, global_context()->regexp_function())
static FixedArray* CreateCache(int size, JSFunction* factory) {
// Caches are supposed to live for a long time, allocate in old space.
int array_size = JSFunctionResultCache::kEntriesIndex + 2 * size;
Handle<FixedArray> cache =
Factory::NewFixedArrayWithHoles(array_size, TENURED);
cache->set(JSFunctionResultCache::kFactoryIndex, factory);
cache->set(JSFunctionResultCache::kFingerIndex,
Smi::FromInt(JSFunctionResultCache::kEntriesIndex));
cache->set(JSFunctionResultCache::kCacheSizeIndex,
Smi::FromInt(JSFunctionResultCache::kEntriesIndex));
return *cache;
}
void Genesis::InstallJSFunctionResultCaches() {
const int kNumberOfCaches = 0 +
#define F(size, func) + 1
JSFUNCTION_RESULT_CACHE_LIST(F)
#undef F
;
Handle<FixedArray> caches = Factory::NewFixedArray(kNumberOfCaches, TENURED);
int index = 0;
#define F(size, func) caches->set(index++, CreateCache(size, func));
JSFUNCTION_RESULT_CACHE_LIST(F)
#undef F
global_context()->set_jsfunction_result_caches(*caches);
}
int BootstrapperActive::nesting_ = 0;
......@@ -1664,6 +1703,7 @@ Genesis::Genesis(Handle<Object> global_object,
HookUpGlobalProxy(inner_global, global_proxy);
InitializeGlobal(inner_global, empty_function);
if (!InstallNatives()) return;
InstallJSFunctionResultCaches();
MakeFunctionInstancePrototypeWritable();
......
......@@ -124,6 +124,7 @@ namespace internal {
F(StringCompare, 2, 1) \
F(RegExpExec, 4, 1) \
F(RegExpConstructResult, 3, 1) \
F(GetFromCache, 2, 1) \
F(NumberToString, 1, 1) \
F(MathPow, 2, 1) \
F(MathSin, 1, 1) \
......
......@@ -83,6 +83,7 @@ enum ContextLookupFlags {
V(GET_STACK_TRACE_LINE_INDEX, JSFunction, get_stack_trace_line_fun) \
V(CONFIGURE_GLOBAL_INDEX, JSFunction, configure_global_fun) \
V(FUNCTION_CACHE_INDEX, JSObject, function_cache) \
V(JSFUNCTION_RESULT_CACHES_INDEX, FixedArray, jsfunction_result_caches) \
V(RUNTIME_CONTEXT_INDEX, Context, runtime_context) \
V(CALL_AS_FUNCTION_DELEGATE_INDEX, JSFunction, call_as_function_delegate) \
V(CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, JSFunction, \
......@@ -205,6 +206,7 @@ class Context: public FixedArray {
GET_STACK_TRACE_LINE_INDEX,
CONFIGURE_GLOBAL_INDEX,
FUNCTION_CACHE_INDEX,
JSFUNCTION_RESULT_CACHES_INDEX,
RUNTIME_CONTEXT_INDEX,
CALL_AS_FUNCTION_DELEGATE_INDEX,
CALL_AS_CONSTRUCTOR_DELEGATE_INDEX,
......
......@@ -43,9 +43,11 @@ Handle<FixedArray> Factory::NewFixedArray(int size, PretenureFlag pretenure) {
}
Handle<FixedArray> Factory::NewFixedArrayWithHoles(int size) {
Handle<FixedArray> Factory::NewFixedArrayWithHoles(int size,
PretenureFlag pretenure) {
ASSERT(0 <= size);
CALL_HEAP_FUNCTION(Heap::AllocateFixedArrayWithHoles(size), FixedArray);
CALL_HEAP_FUNCTION(Heap::AllocateFixedArrayWithHoles(size, pretenure),
FixedArray);
}
......
......@@ -47,7 +47,9 @@ class Factory : public AllStatic {
PretenureFlag pretenure = NOT_TENURED);
// Allocate a new fixed array with non-existing entries (the hole).
static Handle<FixedArray> NewFixedArrayWithHoles(int size);
static Handle<FixedArray> NewFixedArrayWithHoles(
int size,
PretenureFlag pretenure = NOT_TENURED);
static Handle<NumberDictionary> NewNumberDictionary(int at_least_space_for);
......
......@@ -1670,8 +1670,8 @@ bool Heap::CreateInitialObjects() {
if (InitializeNumberStringCache()->IsFailure()) return false;
// Allocate cache for single character strings.
obj = AllocateFixedArray(String::kMaxAsciiCharCode+1, TENURED);
// Allocate cache for single character ASCII strings.
obj = AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED);
if (obj->IsFailure()) return false;
set_single_character_string_cache(FixedArray::cast(obj));
......@@ -3013,13 +3013,10 @@ Object* Heap::AllocateFixedArray(int length) {
}
Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
ASSERT(length >= 0);
ASSERT(empty_fixed_array()->IsFixedArray());
Object* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
if (length < 0 || length > FixedArray::kMaxLength) {
return Failure::OutOfMemoryException();
}
if (length == 0) return empty_fixed_array();
AllocationSpace space =
(pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
......@@ -3053,18 +3050,39 @@ Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
ASSERT(space == LO_SPACE);
result = lo_space_->AllocateRawFixedArray(size);
}
return result;
}
static Object* AllocateFixedArrayWithFiller(int length,
PretenureFlag pretenure,
Object* filler) {
ASSERT(length >= 0);
ASSERT(Heap::empty_fixed_array()->IsFixedArray());
if (length == 0) return Heap::empty_fixed_array();
ASSERT(!Heap::InNewSpace(filler));
Object* result = Heap::AllocateRawFixedArray(length, pretenure);
if (result->IsFailure()) return result;
// Initialize the object.
reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
HeapObject::cast(result)->set_map(Heap::fixed_array_map());
FixedArray* array = FixedArray::cast(result);
array->set_length(length);
ASSERT(!Heap::InNewSpace(undefined_value()));
MemsetPointer(array->data_start(), undefined_value(), length);
MemsetPointer(array->data_start(), filler, length);
return array;
}
Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
}
Object* Heap::AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure) {
return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
}
Object* Heap::AllocateUninitializedFixedArray(int length) {
if (length == 0) return empty_fixed_array();
......@@ -3077,22 +3095,6 @@ Object* Heap::AllocateUninitializedFixedArray(int length) {
}
Object* Heap::AllocateFixedArrayWithHoles(int length) {
if (length == 0) return empty_fixed_array();
Object* result = AllocateRawFixedArray(length);
if (!result->IsFailure()) {
// Initialize header.
reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
FixedArray* array = FixedArray::cast(result);
array->set_length(length);
// Initialize body.
ASSERT(!Heap::InNewSpace(the_hole_value()));
MemsetPointer(array->data_start(), the_hole_value(), length);
}
return result;
}
Object* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
Object* result = Heap::AllocateFixedArray(length, pretenure);
if (result->IsFailure()) return result;
......
......@@ -484,7 +484,9 @@ class Heap : public AllStatic {
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
// Please note this does not perform a garbage collection.
static Object* AllocateFixedArrayWithHoles(int length);
static Object* AllocateFixedArrayWithHoles(
int length,
PretenureFlag pretenure = NOT_TENURED);
// AllocateHashTable is identical to AllocateFixedArray except
// that the resulting object has hash_table_map as map.
......@@ -896,8 +898,10 @@ class Heap : public AllStatic {
// Returns the adjusted value.
static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
// Allocate unitialized fixed array (pretenure == NON_TENURE).
// Allocate uninitialized fixed array.
static Object* AllocateRawFixedArray(int length);
static Object* AllocateRawFixedArray(int length,
PretenureFlag pretenure);
// True if we have reached the allocation limit in the old generation that
// should force the next GC (caused normally) to be a full one.
......
......@@ -6628,6 +6628,80 @@ void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
}
class DeferredSearchCache: public DeferredCode {
public:
DeferredSearchCache(Register dst, Register cache, Register key)
: dst_(dst), cache_(cache), key_(key) {
set_comment("[ DeferredSearchCache");
}
virtual void Generate();
private:
Register dst_, cache_, key_;
};
void DeferredSearchCache::Generate() {
__ push(cache_);
__ push(key_);
__ CallRuntime(Runtime::kGetFromCache, 2);
if (!dst_.is(eax)) {
__ mov(dst_, eax);
}
}
void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
ASSERT_NE(NULL, args->at(0)->AsLiteral());
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
Top::global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
frame_->Push(Factory::undefined_value());
return;
}
Handle<FixedArray> cache_obj(
FixedArray::cast(jsfunction_result_caches->get(cache_id)));
Load(args->at(1));
Result key = frame_->Pop();
key.ToRegister();
Result cache = allocator()->Allocate();
__ mov(cache.reg(), cache_obj);
Result tmp = allocator()->Allocate();
DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
cache.reg(),
key.reg());
const int kFingerOffset =
FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
// tmp.reg() now holds finger offset as a smi.
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ mov(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset));
__ cmp(key.reg(), FieldOperand(cache.reg(),
tmp.reg(), // as smi
times_half_pointer_size,
FixedArray::kHeaderSize));
deferred->Branch(not_equal);
__ mov(tmp.reg(), FieldOperand(cache.reg(),
tmp.reg(), // as smi
times_half_pointer_size,
kPointerSize + FixedArray::kHeaderSize));
deferred->BindExit();
frame_->Push(&tmp);
}
void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
ASSERT_EQ(args->length(), 1);
......
......@@ -630,6 +630,9 @@ class CodeGenerator: public AstVisitor {
void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
// Support for fast native caches.
void GenerateGetFromCache(ZoneList<Expression*>* args);
// Fast support for number to string.
void GenerateNumberToString(ZoneList<Expression*>* args);
......
......@@ -83,6 +83,9 @@ const kMaxMonth = 10000000;
const kMinDate = -100000000;
const kMaxDate = 100000000;
# Native cache ids.
const STRING_TO_REGEXP_CACHE_ID = 0;
# Type query macros.
#
# Note: We have special support for typeof(foo) === 'bar' in the compiler.
......
......@@ -76,6 +76,7 @@
// - MapCache
// - Context
// - GlobalContext
// - JSFunctionResultCache
// - String
// - SeqString
// - SeqAsciiString
......@@ -2307,6 +2308,23 @@ class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> {
};
// JSFunctionResultCache caches results of some JSFunction invocation.
// It is a fixed array with fixed structure:
// [0]: factory function
// [1]: finger index
// [2]: current cache size
// [3]: dummy field.
// The rest of array are key/value pairs.
class JSFunctionResultCache: public FixedArray {
public:
static const int kFactoryIndex = 0;
static const int kFingerIndex = kFactoryIndex + 1;
static const int kCacheSizeIndex = kFingerIndex + 1;
static const int kDummyIndex = kCacheSizeIndex + 1;
static const int kEntriesIndex = kDummyIndex + 1;
};
// ByteArray represents fixed sized byte arrays. Used by the outside world,
// such as PCRE, and also by the memory allocator and garbage collector to
// fill in free blocks in the heap.
......
......@@ -10007,6 +10007,89 @@ static Object* Runtime_DeleteHandleScopeExtensions(Arguments args) {
}
static Object* CacheMiss(FixedArray* cache_obj, int index, Object* key_obj) {
ASSERT(index % 2 == 0); // index of the key
ASSERT(index >= JSFunctionResultCache::kEntriesIndex);
ASSERT(index < cache_obj->length());
HandleScope scope;
Handle<FixedArray> cache(cache_obj);
Handle<Object> key(key_obj);
Handle<JSFunction> factory(JSFunction::cast(
cache->get(JSFunctionResultCache::kFactoryIndex)));
// TODO(antonm): consider passing a receiver when constructing a cache.
Handle<Object> receiver(Top::global_context()->global());
Handle<Object> value;
{
// This handle is nor shared, nor used later, so it's safe.
Object** argv[] = { key.location() };
bool pending_exception = false;
value = Execution::Call(factory,
receiver,
1,
argv,
&pending_exception);
if (pending_exception) return Failure::Exception();
}
cache->set(index, *key);
cache->set(index + 1, *value);
cache->set(JSFunctionResultCache::kFingerIndex, Smi::FromInt(index));
return *value;
}
static Object* Runtime_GetFromCache(Arguments args) {
// This is only called from codegen, so checks might be more lax.
CONVERT_CHECKED(FixedArray, cache, args[0]);
Object* key = args[1];
const int finger_index =
Smi::cast(cache->get(JSFunctionResultCache::kFingerIndex))->value();
Object* o = cache->get(finger_index);
if (o == key) {
// The fastest case: hit the same place again.
return cache->get(finger_index + 1);
}
for (int i = finger_index - 2;
i >= JSFunctionResultCache::kEntriesIndex;
i -= 2) {
o = cache->get(i);
if (o == key) {
cache->set(JSFunctionResultCache::kFingerIndex, Smi::FromInt(i));
return cache->get(i + 1);
}
}
const int size =
Smi::cast(cache->get(JSFunctionResultCache::kCacheSizeIndex))->value();
ASSERT(size <= cache->length());
for (int i = size - 2; i > finger_index; i -= 2) {
o = cache->get(i);
if (o == key) {
cache->set(JSFunctionResultCache::kFingerIndex, Smi::FromInt(i));
return cache->get(i + 1);
}
}
// Cache miss. If we have spare room, put new data into it, otherwise
// evict post finger entry which must be least recently used.
if (size < cache->length()) {
cache->set(JSFunctionResultCache::kCacheSizeIndex, Smi::FromInt(size + 2));
return CacheMiss(cache, size, key);
} else {
int target_index = (finger_index < cache->length()) ?
finger_index + 2 : JSFunctionResultCache::kEntriesIndex;
return CacheMiss(cache, target_index, key);
}
}
#ifdef DEBUG
// ListNatives is ONLY used by the fuzz-natives.js in debug mode
// Exclude the code in release mode.
......
......@@ -288,6 +288,8 @@ namespace internal {
F(LocalKeys, 1, 1) \
/* Handle scopes */ \
F(DeleteHandleScopeExtensions, 0, 1) \
/* Cache suport */ \
F(GetFromCache, 2, 1) \
\
/* Pseudo functions - handled as macros by parser */ \
F(IS_VAR, 1, 1)
......
......@@ -529,10 +529,16 @@ function ApplyReplacementFunction(replace, matchInfo, subject) {
return replace.apply(null, parameters);
}
// ECMA-262 section 15.5.4.12
function StringSearch(re) {
var regexp = new $RegExp(re);
var regexp;
if (IS_STRING(re)) {
regexp = %_GetFromCache(STRING_TO_REGEXP_CACHE_ID, re);
} else if (IS_REGEXP(re)) {
regexp = re;
} else {
regexp = new $RegExp(re);
}
var s = TO_STRING_INLINE(this);
var match = DoRegExpExec(regexp, s, 0);
if (match) {
......
......@@ -4259,6 +4259,82 @@ void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
}
class DeferredSearchCache: public DeferredCode {
public:
DeferredSearchCache(Register dst, Register cache, Register key)
: dst_(dst), cache_(cache), key_(key) {
set_comment("[ DeferredSearchCache");
}
virtual void Generate();
private:
Register dst_, cache_, key_;
};
void DeferredSearchCache::Generate() {
__ push(cache_);
__ push(key_);
__ CallRuntime(Runtime::kGetFromCache, 2);
if (!dst_.is(rax)) {
__ movq(dst_, rax);
}
}
void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
ASSERT_NE(NULL, args->at(0)->AsLiteral());
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
Top::global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
frame_->Push(Factory::undefined_value());
return;
}
Handle<FixedArray> cache_obj(
FixedArray::cast(jsfunction_result_caches->get(cache_id)));
Load(args->at(1));
Result key = frame_->Pop();
key.ToRegister();
Result cache = allocator()->Allocate();
__ movq(cache.reg(), cache_obj, RelocInfo::EMBEDDED_OBJECT);
Result tmp = allocator()->Allocate();
DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
cache.reg(),
key.reg());
const int kFingerOffset =
FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
// tmp.reg() now holds finger offset as a smi.
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ movq(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset));
SmiIndex index =
masm()->SmiToIndex(kScratchRegister, tmp.reg(), kPointerSizeLog2);
__ cmpq(key.reg(), FieldOperand(cache.reg(),
index.reg,
index.scale,
FixedArray::kHeaderSize));
deferred->Branch(not_equal);
__ movq(tmp.reg(), FieldOperand(cache.reg(),
index.reg,
index.scale,
kPointerSize + FixedArray::kHeaderSize));
deferred->BindExit();
frame_->Push(&tmp);
}
void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
ASSERT_EQ(args->length(), 1);
......
......@@ -584,6 +584,9 @@ class CodeGenerator: public AstVisitor {
void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
// Support for fast native caches.
void GenerateGetFromCache(ZoneList<Expression*>* args);
// Fast support for number to string.
void GenerateNumberToString(ZoneList<Expression*>* args);
......
......@@ -169,6 +169,10 @@ var knownProblems = {
"RegExpConstructResult": true,
"_RegExpConstructResult": true,
// This function performs some checks compile time (it requires its first
// argument to be a compile time smi).
"_GetFromCache": true,
// LiveEdit feature is under development currently and has fragile input.
"LiveEditFindSharedFunctionInfosForScript": true,
"LiveEditGatherCompileInfo": true,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment