Count ICs that have type information.

Hidden behind --type-info-threshold=X flag, usage dependent on experimental profiler.

Review URL: https://chromiumcodereview.appspot.com/9403009

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10753 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 2740d306
......@@ -3850,7 +3850,7 @@ class Internals {
static const int kFullStringRepresentationMask = 0x07;
static const int kExternalTwoByteRepresentationTag = 0x02;
static const int kJSObjectType = 0xa7;
static const int kJSObjectType = 0xa8;
static const int kFirstNonstringType = 0x80;
static const int kForeignType = 0x85;
......
......@@ -148,6 +148,13 @@ Handle<AccessorPair> Factory::NewAccessorPair() {
}
Handle<TypeFeedbackInfo> Factory::NewTypeFeedbackInfo() {
CALL_HEAP_FUNCTION(isolate(),
isolate()->heap()->AllocateTypeFeedbackInfo(),
TypeFeedbackInfo);
}
// Symbols are created in the old generation (data space).
Handle<String> Factory::LookupSymbol(Vector<const char> string) {
CALL_HEAP_FUNCTION(isolate(),
......
......@@ -76,6 +76,8 @@ class Factory {
// Allocates a pre-tenured empty AccessorPair.
Handle<AccessorPair> NewAccessorPair();
Handle<TypeFeedbackInfo> NewTypeFeedbackInfo();
Handle<String> LookupSymbol(Vector<const char> str);
Handle<String> LookupSymbol(Handle<String> str);
Handle<String> LookupAsciiSymbol(Vector<const char> str);
......
......@@ -177,12 +177,17 @@ DEFINE_bool(weighted_back_edges, false,
"weight back edges by jump distance for interrupt triggering")
DEFINE_int(interrupt_budget, 10000,
"execution budget before interrupt is triggered")
DEFINE_int(type_info_threshold, 0,
"percentage of ICs that must have type info to allow optimization")
DEFINE_implication(experimental_profiler, watch_ic_patching)
DEFINE_implication(experimental_profiler, self_optimization)
DEFINE_implication(experimental_profiler, count_based_interrupts)
DEFINE_implication(experimental_profiler, weighted_back_edges)
DEFINE_bool(trace_opt_verbose, false, "extra verbose compilation tracing")
DEFINE_implication(trace_opt_verbose, trace_opt)
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging")
......
......@@ -303,6 +303,7 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
code->set_optimizable(info->IsOptimizable());
cgen.PopulateDeoptimizationData(code);
cgen.PopulateTypeFeedbackInfo(code);
cgen.PopulateTypeFeedbackCells(code);
code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
code->set_handler_table(*cgen.handler_table());
......@@ -361,6 +362,13 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
}
void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
info->set_ic_total_count(ic_total_count_);
code->set_type_feedback_info(*info);
}
void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
if (type_feedback_cells_.is_empty()) return;
int length = type_feedback_cells_.length();
......@@ -371,7 +379,8 @@ void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
cache->SetAstId(i, Smi::FromInt(type_feedback_cells_[i].ast_id));
cache->SetCell(i, *type_feedback_cells_[i].cell);
}
code->set_type_feedback_cells(*cache);
TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
*cache);
}
......
......@@ -89,16 +89,11 @@ class FullCodeGenerator: public AstVisitor {
? info->function()->ast_node_count() : 0),
stack_checks_(2), // There's always at least one.
type_feedback_cells_(info->HasDeoptimizationSupport()
? info->function()->ast_node_count() : 0) { }
? info->function()->ast_node_count() : 0),
ic_total_count_(0) { }
static bool MakeCode(CompilationInfo* info);
void Generate();
void PopulateDeoptimizationData(Handle<Code> code);
void PopulateTypeFeedbackCells(Handle<Code> code);
Handle<FixedArray> handler_table() { return handler_table_; }
// Encode state and pc-offset as a BitField<type, start, size>.
// Only use 30 bits because we encode the result as a smi.
class StateField : public BitField<State, 0, 1> { };
......@@ -516,6 +511,10 @@ class FullCodeGenerator: public AstVisitor {
// accumulator.
void EmitKeyedPropertyAssignment(Assignment* expr);
void CallIC(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
unsigned ast_id = kNoASTId);
void SetFunctionPosition(FunctionLiteral* fun);
void SetReturnPosition(FunctionLiteral* fun);
void SetStatementPosition(Statement* stmt);
......@@ -581,6 +580,13 @@ class FullCodeGenerator: public AstVisitor {
void VisitForTypeofValue(Expression* expr);
void Generate();
void PopulateDeoptimizationData(Handle<Code> code);
void PopulateTypeFeedbackInfo(Handle<Code> code);
void PopulateTypeFeedbackCells(Handle<Code> code);
Handle<FixedArray> handler_table() { return handler_table_; }
struct BailoutEntry {
unsigned id;
unsigned pc_and_state;
......@@ -779,6 +785,7 @@ class FullCodeGenerator: public AstVisitor {
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;
ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
int ic_total_count_;
Handle<FixedArray> handler_table_;
Handle<JSGlobalPropertyCell> profiling_counter_;
......
......@@ -1938,6 +1938,19 @@ MaybeObject* Heap::AllocateAccessorPair() {
}
MaybeObject* Heap::AllocateTypeFeedbackInfo() {
TypeFeedbackInfo* info;
{ MaybeObject* maybe_result = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
if (!maybe_result->To(&info)) return maybe_result;
}
info->set_ic_total_count(0);
info->set_ic_with_typeinfo_count(0);
info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
return info;
}
const Heap::StringTypeTable Heap::string_type_table[] = {
#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
{type, size, k##camel_name##MapRootIndex},
......@@ -3361,8 +3374,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
code->set_check_type(RECEIVER_MAP_CHECK);
}
code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
code->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
code->set_type_feedback_info(undefined_value(), SKIP_WRITE_BARRIER);
code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
code->set_gc_metadata(Smi::FromInt(0));
// Allow self references to created code object by patching the handle to
......
......@@ -640,6 +640,9 @@ class Heap {
// Allocates a pre-tenured empty AccessorPair.
MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
// Allocates an empty TypeFeedbackInfo.
MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo();
// Clear the Instanceof cache (used when a prototype changes).
inline void ClearInstanceofCache();
......
......@@ -876,7 +876,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
__ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ test(eax, eax);
__ j(not_equal, &next_test);
......@@ -1189,7 +1189,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
__ call(ic, mode);
CallIC(ic, mode);
}
......@@ -1270,7 +1270,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
__ mov(eax, GlobalObjectOperand());
__ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
__ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(eax);
break;
}
......@@ -1470,7 +1470,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
__ call(ic, RelocInfo::CODE_TARGET, key->id());
CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
......@@ -1734,14 +1734,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
ASSERT(!key->handle()->IsSmi());
__ mov(ecx, Immediate(key->handle()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
__ call(ic, RelocInfo::CODE_TARGET, prop->id());
CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
__ call(ic, RelocInfo::CODE_TARGET, prop->id());
CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
......@@ -1762,7 +1762,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
__ mov(eax, ecx);
BinaryOpStub stub(op, mode);
__ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
......@@ -1847,7 +1847,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(edx);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
__ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(eax);
}
......@@ -1888,7 +1888,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
__ call(ic);
CallIC(ic);
break;
}
case KEYED_PROPERTY: {
......@@ -1901,7 +1901,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
__ call(ic);
CallIC(ic);
break;
}
}
......@@ -1919,7 +1919,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
__ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
......@@ -2028,7 +2028,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
__ call(ic, RelocInfo::CODE_TARGET, expr->id());
CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
......@@ -2068,7 +2068,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
__ call(ic, RelocInfo::CODE_TARGET, expr->id());
CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
......@@ -2102,6 +2102,16 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
unsigned ast_id) {
ic_total_count_++;
__ call(code, rmode, ast_id);
}
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
......@@ -2118,7 +2128,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
SetSourcePosition(expr->position());
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
__ call(ic, mode, expr->id());
CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
......@@ -2150,7 +2160,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
__ call(ic, RelocInfo::CODE_TARGET, expr->id());
CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
......@@ -3737,7 +3747,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
__ call(ic, mode, expr->id());
CallIC(ic, mode, expr->id());
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
} else {
......@@ -3895,7 +3905,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register eax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
__ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(eax);
}
......@@ -4015,7 +4025,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
__ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
......@@ -4049,7 +4059,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
__ call(ic, RelocInfo::CODE_TARGET, expr->id());
CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
......@@ -4066,7 +4076,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
__ call(ic, RelocInfo::CODE_TARGET, expr->id());
CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
// Result is on the stack
......@@ -4094,7 +4104,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
__ call(ic);
CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(eax);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
......@@ -4274,7 +4284,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
__ call(ic, RelocInfo::CODE_TARGET, expr->id());
CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
......
......@@ -79,10 +79,10 @@ Code* IC::GetTargetAtAddress(Address address) {
void IC::SetTargetAtAddress(Address address, Code* target) {
ASSERT(target->is_inline_cache_stub() || target->is_compare_ic_stub());
Code* old_target = GetTargetAtAddress(address);
#ifdef DEBUG
// STORE_IC and KEYED_STORE_IC use Code::extra_ic_state() to mark
// ICs as strict mode. The strict-ness of the IC must be preserved.
Code* old_target = GetTargetAtAddress(address);
if (old_target->kind() == Code::STORE_IC ||
old_target->kind() == Code::KEYED_STORE_IC) {
ASSERT(Code::GetStrictMode(old_target->extra_ic_state()) ==
......@@ -92,7 +92,7 @@ void IC::SetTargetAtAddress(Address address, Code* target) {
Assembler::set_target_address_at(address, target->instruction_start());
target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
target);
PostPatching();
PostPatching(address, target, old_target);
}
......
......@@ -296,7 +296,32 @@ Failure* IC::ReferenceError(const char* type, Handle<String> name) {
}
void IC::PostPatching() {
void IC::PostPatching(Address address, Code* target, Code* old_target) {
if (FLAG_type_info_threshold > 0) {
if (old_target->is_inline_cache_stub() &&
target->is_inline_cache_stub()) {
State old_state = old_target->ic_state();
State new_state = target->ic_state();
bool was_uninitialized =
old_state == UNINITIALIZED || old_state == PREMONOMORPHIC;
bool is_uninitialized =
new_state == UNINITIALIZED || new_state == PREMONOMORPHIC;
int delta = 0;
if (was_uninitialized && !is_uninitialized) {
delta = 1;
} else if (!was_uninitialized && is_uninitialized) {
delta = -1;
}
if (delta != 0) {
Code* host = target->GetHeap()->isolate()->
inner_pointer_to_code_cache()->GetCacheEntry(address)->code;
TypeFeedbackInfo* info =
TypeFeedbackInfo::cast(host->type_feedback_info());
info->set_ic_with_typeinfo_count(
info->ic_with_typeinfo_count() + delta);
}
}
}
if (FLAG_watch_ic_patching) {
Isolate::Current()->runtime_profiler()->NotifyICChanged();
// We do not want to optimize until the ICs have settled down,
......@@ -313,7 +338,9 @@ void IC::PostPatching() {
if (raw_frame->is_java_script()) {
JSFunction* function =
JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function());
function->shared()->set_profiler_ticks(0);
if (function->IsOptimized()) continue;
SharedFunctionInfo* shared = function->shared();
shared->set_profiler_ticks(0);
}
it.Advance();
}
......
......@@ -165,7 +165,7 @@ class IC {
// Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address);
static inline void SetTargetAtAddress(Address address, Code* target);
static void PostPatching();
static void PostPatching(Address address, Code* target, Code* old_target);
private:
// Frame pointer for the frame that uses (calls) the IC.
......
......@@ -1176,11 +1176,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
Heap* heap = map->GetHeap();
Code* code = reinterpret_cast<Code*>(object);
if (FLAG_cleanup_code_caches_at_gc) {
TypeFeedbackCells* type_feedback_cells = code->type_feedback_cells();
for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
ASSERT(type_feedback_cells->AstId(i)->IsSmi());
JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
Object* raw_info = code->type_feedback_info();
if (raw_info->IsTypeFeedbackInfo()) {
TypeFeedbackCells* type_feedback_cells =
TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
ASSERT(type_feedback_cells->AstId(i)->IsSmi());
JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
}
}
}
code->CodeIterateBody<StaticMarkingVisitor>(heap);
......
......@@ -326,6 +326,13 @@ void PolymorphicCodeCache::PolymorphicCodeCacheVerify() {
}
void TypeFeedbackInfo::TypeFeedbackInfoVerify() {
VerifyObjectField(kIcTotalCountOffset);
VerifyObjectField(kIcWithTypeinfoCountOffset);
VerifyHeapPointer(type_feedback_cells());
}
void FixedArray::FixedArrayVerify() {
for (int i = 0; i < length(); i++) {
Object* e = get(i);
......
......@@ -4123,8 +4123,7 @@ INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
ACCESSORS(Code, type_feedback_cells, TypeFeedbackCells,
kTypeFeedbackCellsOffset)
ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
......@@ -4800,6 +4799,13 @@ Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
}
SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
SMI_ACCESSORS(TypeFeedbackInfo, ic_with_typeinfo_count,
kIcWithTypeinfoCountOffset)
ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
kTypeFeedbackCellsOffset)
Relocatable::Relocatable(Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
isolate_ = isolate;
......
......@@ -554,6 +554,15 @@ void PolymorphicCodeCache::PolymorphicCodeCachePrint(FILE* out) {
}
void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "TypeFeedbackInfo");
PrintF(out, "\n - ic_total_count: %d, ic_with_typeinfo_count: %d",
ic_total_count(), ic_with_typeinfo_count());
PrintF(out, "\n - type_feedback_cells: ");
type_feedback_cells()->FixedArrayPrint(out);
}
void FixedArray::FixedArrayPrint(FILE* out) {
HeapObject::PrintHeader(out, "FixedArray");
PrintF(out, " - length: %d", length());
......
......@@ -109,7 +109,7 @@ void Code::CodeIterateBody(ObjectVisitor* v) {
IteratePointer(v, kRelocationInfoOffset);
IteratePointer(v, kHandlerTableOffset);
IteratePointer(v, kDeoptimizationDataOffset);
IteratePointer(v, kTypeFeedbackCellsOffset);
IteratePointer(v, kTypeFeedbackInfoOffset);
RelocIterator it(this, mode_mask);
for (; !it.done(); it.next()) {
......@@ -141,7 +141,7 @@ void Code::CodeIterateBody(Heap* heap) {
reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
StaticVisitor::VisitPointer(
heap,
reinterpret_cast<Object**>(this->address() + kTypeFeedbackCellsOffset));
reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
RelocIterator it(this, mode_mask);
for (; !it.done(); it.next()) {
......
......@@ -439,7 +439,8 @@ const int kVariableSizeSentinel = 0;
V(TYPE_SWITCH_INFO, TypeSwitchInfo, type_switch_info) \
V(SCRIPT, Script, script) \
V(CODE_CACHE, CodeCache, code_cache) \
V(POLYMORPHIC_CODE_CACHE, PolymorphicCodeCache, polymorphic_code_cache)
V(POLYMORPHIC_CODE_CACHE, PolymorphicCodeCache, polymorphic_code_cache) \
V(TYPE_FEEDBACK_INFO, TypeFeedbackInfo, type_feedback_info)
#ifdef ENABLE_DEBUGGER_SUPPORT
#define STRUCT_LIST_DEBUGGER(V) \
......@@ -594,6 +595,7 @@ enum InstanceType {
SCRIPT_TYPE,
CODE_CACHE_TYPE,
POLYMORPHIC_CODE_CACHE_TYPE,
TYPE_FEEDBACK_INFO_TYPE,
// The following two instance types are only used when ENABLE_DEBUGGER_SUPPORT
// is defined. However as include/v8.h contain some of the instance type
// constants always having them avoids them getting different numbers
......@@ -4032,6 +4034,7 @@ class TypeFeedbackCells: public FixedArray {
// Forward declaration.
class SafepointEntry;
class TypeFeedbackInfo;
// Code describes objects with on-the-fly generated machine code.
class Code: public HeapObject {
......@@ -4103,8 +4106,9 @@ class Code: public HeapObject {
// [deoptimization_data]: Array containing data for deopt.
DECL_ACCESSORS(deoptimization_data, FixedArray)
// [type_feedback_cells]: Array containing cache cells used for type feedback.
DECL_ACCESSORS(type_feedback_cells, TypeFeedbackCells)
// [type_feedback_info]: Struct containing type feedback information.
// Will contain either a TypeFeedbackInfo object, or undefined.
DECL_ACCESSORS(type_feedback_info, Object)
// [gc_metadata]: Field used to hold GC related metadata. The contents of this
// field does not have to be traced during garbage collection since
......@@ -4355,9 +4359,9 @@ class Code: public HeapObject {
static const int kHandlerTableOffset = kRelocationInfoOffset + kPointerSize;
static const int kDeoptimizationDataOffset =
kHandlerTableOffset + kPointerSize;
static const int kTypeFeedbackCellsOffset =
static const int kTypeFeedbackInfoOffset =
kDeoptimizationDataOffset + kPointerSize;
static const int kGCMetadataOffset = kTypeFeedbackCellsOffset + kPointerSize;
static const int kGCMetadataOffset = kTypeFeedbackInfoOffset + kPointerSize;
static const int kFlagsOffset = kGCMetadataOffset + kPointerSize;
static const int kKindSpecificFlagsOffset = kFlagsOffset + kIntSize;
......@@ -6365,6 +6369,40 @@ class PolymorphicCodeCacheHashTable
};
class TypeFeedbackInfo: public Struct {
public:
inline int ic_total_count();
inline void set_ic_total_count(int count);
inline int ic_with_typeinfo_count();
inline void set_ic_with_typeinfo_count(int count);
DECL_ACCESSORS(type_feedback_cells, TypeFeedbackCells)
static inline TypeFeedbackInfo* cast(Object* obj);
#ifdef OBJECT_PRINT
inline void TypeFeedbackInfoPrint() {
TypeFeedbackInfoPrint(stdout);
}
void TypeFeedbackInfoPrint(FILE* out);
#endif
#ifdef DEBUG
void TypeFeedbackInfoVerify();
#endif
static const int kIcTotalCountOffset = HeapObject::kHeaderSize;
static const int kIcWithTypeinfoCountOffset =
kIcTotalCountOffset + kPointerSize;
static const int kTypeFeedbackCellsOffset =
kIcWithTypeinfoCountOffset + kPointerSize;
static const int kSize = kTypeFeedbackCellsOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo);
};
enum AllowNullsFlag {ALLOW_NULLS, DISALLOW_NULLS};
enum RobustnessFlag {ROBUST_STRING_TRAVERSAL, FAST_STRING_TRAVERSAL};
......
......@@ -102,6 +102,25 @@ void RuntimeProfiler::GlobalSetup() {
}
static void GetICCounts(JSFunction* function,
int* ic_with_typeinfo_count,
int* ic_total_count,
int* percentage) {
*ic_total_count = 0;
*ic_with_typeinfo_count = 0;
Object* raw_info =
function->shared()->code()->type_feedback_info();
if (raw_info->IsTypeFeedbackInfo()) {
TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
*ic_with_typeinfo_count = info->ic_with_typeinfo_count();
*ic_total_count = info->ic_total_count();
}
*percentage = *ic_total_count > 0
? 100 * *ic_with_typeinfo_count / *ic_total_count
: 100;
}
void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
ASSERT(function->IsOptimizable());
if (FLAG_trace_opt) {
......@@ -109,6 +128,11 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
function->PrintName();
PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
PrintF(" for recompilation, reason: %s", reason);
if (FLAG_type_info_threshold > 0) {
int typeinfo, total, percentage;
GetICCounts(function, &typeinfo, &total, &percentage);
PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
}
PrintF("]\n");
}
......@@ -258,9 +282,20 @@ void RuntimeProfiler::OptimizeNow() {
int ticks = function->shared()->profiler_ticks();
if (ticks >= kProfilerTicksBeforeOptimization) {
// If this particular function hasn't had any ICs patched for enough
// ticks, optimize it now.
Optimize(function, "hot and stable");
int typeinfo, total, percentage;
GetICCounts(function, &typeinfo, &total, &percentage);
if (percentage >= FLAG_type_info_threshold) {
// If this particular function hasn't had any ICs patched for enough
// ticks, optimize it now.
Optimize(function, "hot and stable");
} else {
if (FLAG_trace_opt_verbose) {
PrintF("[not yet optimizing ");
function->PrintName();
PrintF(", not enough type info: %d/%d (%d%%)]\n",
typeinfo, total, percentage);
}
}
} else if (!any_ic_changed_ &&
function->shared()->code()->instruction_size() < kMaxSizeEarlyOpt) {
// If no IC was patched since the last tick and this function is very
......
......@@ -573,7 +573,11 @@ void TypeFeedbackOracle::GetRelocInfos(Handle<Code> code,
void TypeFeedbackOracle::CreateDictionary(Handle<Code> code,
ZoneList<RelocInfo>* infos) {
DisableAssertNoAllocation allocation_allowed;
int length = infos->length() + code->type_feedback_cells()->CellCount();
int cell_count = code->type_feedback_info()->IsTypeFeedbackInfo()
? TypeFeedbackInfo::cast(code->type_feedback_info())->
type_feedback_cells()->CellCount()
: 0;
int length = infos->length() + cell_count;
byte* old_start = code->instruction_start();
dictionary_ = FACTORY->NewUnseededNumberDictionary(length);
byte* new_start = code->instruction_start();
......@@ -643,7 +647,10 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
void TypeFeedbackOracle::ProcessTypeFeedbackCells(Handle<Code> code) {
Handle<TypeFeedbackCells> cache(code->type_feedback_cells());
Object* raw_info = code->type_feedback_info();
if (!raw_info->IsTypeFeedbackInfo()) return;
Handle<TypeFeedbackCells> cache(
TypeFeedbackInfo::cast(raw_info)->type_feedback_cells());
for (int i = 0; i < cache->CellCount(); i++) {
unsigned ast_id = cache->AstId(i)->value();
Object* value = cache->Cell(i)->value();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment