Commit f612a29f authored by titzer@chromium.org's avatar titzer@chromium.org

Factor out common code from platform-specific deoptimization. Fix Deoptimizer...

Factor out common code from platform-specific deoptimization. Fix Deoptimizer not to need to partition functions, but revoke their code before patching, allowing deoptimizing_code_list to be removed from Code; Add DeoptimizeCodeList API to deoptimizer, which works on a ZoneList<Code*>.

BUG=
R=mstarzinger@chromium.org, ulan@chromium.org

Review URL: https://codereview.chromium.org/19638014

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15854 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 82a54b00
......@@ -44,22 +44,8 @@ int Deoptimizer::patch_size() {
}
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
DisallowHeapAllocation no_allocation;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// Get the optimized code.
Code* code = function->code();
void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
Address code_start_address = code->instruction_start();
// The optimized code is going to be patched, so we cannot use it any more.
function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
// Invalidate the relocation information, as it will become invalid by the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
......@@ -92,25 +78,6 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
prev_call_address = call_address;
#endif
}
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
}
}
......
......@@ -331,34 +331,47 @@ void Deoptimizer::VisitAllOptimizedFunctions(
// Removes the functions selected by the given filter from the optimized
// function list of the given context and partitions the removed functions
// into one or more lists such that all functions in a list share the same
// code. The head of each list is written in the deoptimizing_functions field
// of the corresponding code object.
// The found code objects are returned in the given zone list.
static void PartitionOptimizedFunctions(Context* context,
OptimizedFunctionFilter* filter,
ZoneList<Code*>* partitions,
Zone* zone,
Object* undefined) {
// function list of the given context and adds their code to the list of
// code objects to be deoptimized.
static void SelectCodeToDeoptimize(Context* context,
OptimizedFunctionFilter* filter,
ZoneList<Code*>* codes,
Zone* zone,
Object* undefined) {
DisallowHeapAllocation no_allocation;
Object* current = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
Object* remainder_head = undefined;
Object* remainder_tail = undefined;
ASSERT_EQ(0, partitions->length());
// TODO(titzer): rewrite to not modify unselected functions.
while (current != undefined) {
JSFunction* function = JSFunction::cast(current);
current = function->next_function_link();
if (filter->TakeFunction(function)) {
// Extract this function from the context's list and remember the code.
Code* code = function->code();
if (code->deoptimizing_functions() == undefined) {
partitions->Add(code, zone);
ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
if (code->marked_for_deoptimization()) {
ASSERT(codes->Contains(code));
} else {
ASSERT(partitions->Contains(code));
code->set_marked_for_deoptimization(true);
codes->Add(code, zone);
}
SharedFunctionInfo* shared = function->shared();
// Replace the function's code with the shared code.
function->set_code(shared->code());
// Evict the code from the optimized code map.
shared->EvictFromOptimizedCodeMap(code, "deoptimized function");
// Remove the function from the optimized functions list.
function->set_next_function_link(undefined);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
}
function->set_next_function_link(code->deoptimizing_functions());
code->set_deoptimizing_functions(function);
} else {
// Don't select this function; link it back into the list.
if (remainder_head == undefined) {
remainder_head = function;
} else {
......@@ -393,6 +406,14 @@ class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter {
};
class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
public:
virtual bool TakeFunction(JSFunction* function) {
return function->code()->marked_for_deoptimization();
}
};
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
DisallowHeapAllocation no_allocation;
......@@ -421,19 +442,11 @@ void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
Code* code = function->code();
Context* context = function->context()->native_context();
Isolate* isolate = context->GetIsolate();
Object* undefined = isolate->heap()->undefined_value();
Zone zone(isolate);
ZoneList<Code*> codes(1, &zone);
if (code->kind() != Code::OPTIMIZED_FUNCTION) return;
DeoptimizeWithMatchingCodeFilter filter(code);
PartitionOptimizedFunctions(context, &filter, &codes, &zone, undefined);
ASSERT_EQ(1, codes.length());
DeoptimizeFunctionWithPreparedFunctionList(
JSFunction::cast(codes.at(0)->deoptimizing_functions()));
codes.at(0)->set_deoptimizing_functions(undefined);
DeoptimizeAllFunctionsForContext(
function->context()->native_context(), &filter);
}
......@@ -443,12 +456,10 @@ void Deoptimizer::DeoptimizeAllFunctionsForContext(
Isolate* isolate = context->GetIsolate();
Object* undefined = isolate->heap()->undefined_value();
Zone zone(isolate);
ZoneList<Code*> codes(1, &zone);
PartitionOptimizedFunctions(context, filter, &codes, &zone, undefined);
for (int i = 0; i < codes.length(); ++i) {
DeoptimizeFunctionWithPreparedFunctionList(
JSFunction::cast(codes.at(i)->deoptimizing_functions()));
codes.at(i)->set_deoptimizing_functions(undefined);
ZoneList<Code*> codes(4, &zone);
SelectCodeToDeoptimize(context, filter, &codes, &zone, undefined);
for (int i = 0; i < codes.length(); i++) {
DeoptimizeCode(isolate, codes.at(i));
}
}
......@@ -466,6 +477,55 @@ void Deoptimizer::DeoptimizeAllFunctionsWith(Isolate* isolate,
}
void Deoptimizer::DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes) {
if (codes->length() == 0) return; // Nothing to do.
// Mark the code; any functions refering to this code will be selected.
for (int i = 0; i < codes->length(); i++) {
ASSERT(!codes->at(i)->marked_for_deoptimization());
codes->at(i)->set_marked_for_deoptimization(true);
}
// For all contexts, remove optimized functions that refer to the selected
// code from the optimized function lists.
Object* undefined = isolate->heap()->undefined_value();
Zone zone(isolate);
Object* list = isolate->heap()->native_contexts_list();
DeoptimizeMarkedCodeFilter filter;
while (!list->IsUndefined()) {
Context* context = Context::cast(list);
// Note that selecting code unlinks the functions that refer to it.
SelectCodeToDeoptimize(context, &filter, codes, &zone, undefined);
list = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
}
// Now deoptimize all the code.
for (int i = 0; i < codes->length(); i++) {
DeoptimizeCode(isolate, codes->at(i));
}
}
void Deoptimizer::DeoptimizeCode(Isolate* isolate, Code* code) {
HandleScope scope(isolate);
DisallowHeapAllocation nha;
// Do platform-specific patching of the optimized code.
PatchCodeForDeoptimization(isolate, code);
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
}
void Deoptimizer::HandleWeakDeoptimizedCode(v8::Isolate* isolate,
v8::Persistent<v8::Value>* obj,
void* parameter) {
......@@ -2571,21 +2631,6 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
}
void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function,
Code* code) {
SharedFunctionInfo* shared = function->shared();
Object* undefined = function->GetHeap()->undefined_value();
Object* current = function;
while (current != undefined) {
JSFunction* func = JSFunction::cast(current);
current = func->next_function_link();
func->set_code(shared->code());
func->set_next_function_link(undefined);
}
}
FrameDescription::FrameDescription(uint32_t frame_size,
JSFunction* function)
: frame_size_(frame_size),
......
......@@ -197,6 +197,8 @@ class Deoptimizer : public Malloced {
static void DeoptimizeAllFunctionsWith(Isolate* isolate,
OptimizedFunctionFilter* filter);
static void DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes);
static void DeoptimizeAllFunctionsForContext(
Context* context, OptimizedFunctionFilter* filter);
......@@ -411,9 +413,11 @@ class Deoptimizer : public Malloced {
v8::Persistent<v8::Value>* obj,
void* data);
// Deoptimize function assuming that function->next_function_link() points
// to a list that contains all functions that share the same optimized code.
static void DeoptimizeFunctionWithPreparedFunctionList(JSFunction* function);
// Deoptimize the given code and add to appropriate deoptimization lists.
static void DeoptimizeCode(Isolate* isolate, Code* code);
// Patch the given code so that it will deoptimize itself.
static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
// Fill the input from from a JavaScript frame. This is used when
// the debugger needs to inspect an optimized frame. For normal
......
......@@ -114,22 +114,8 @@ void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
}
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
DisallowHeapAllocation nha;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// Get the optimized code.
Code* code = function->code();
void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
Address code_start_address = code->instruction_start();
// The optimized code is going to be patched, so we cannot use it any more.
function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
// We will overwrite the code's relocation info in-place. Relocation info
// is written backward. The relocation info is the payload of a byte
// array. Later on we will slide this to the start of the byte array and
......@@ -188,25 +174,6 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
ASSERT(junk_address <= reloc_end_address);
isolate->heap()->CreateFillerObjectAt(junk_address,
reloc_end_address - junk_address);
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
}
}
......
......@@ -73,8 +73,8 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
migration_slots_buffer_(NULL),
heap_(NULL),
code_flusher_(NULL),
encountered_weak_collections_(NULL) { }
encountered_weak_collections_(NULL),
code_to_deoptimize_(NULL) { }
#ifdef VERIFY_HEAP
class VerifyMarkingVisitor: public ObjectVisitor {
......@@ -492,7 +492,7 @@ void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
obj = code_iterator.Next()) {
Code* code = Code::cast(obj);
if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
if (code->marked_for_deoptimization()) continue;
if (WillBeDeoptimized(code)) continue;
code->VerifyEmbeddedMapsDependency();
}
}
......@@ -945,14 +945,6 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
}
class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
public:
virtual bool TakeFunction(JSFunction* function) {
return function->code()->marked_for_deoptimization();
}
};
void MarkCompactCollector::Finish() {
#ifdef DEBUG
ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
......@@ -964,8 +956,23 @@ void MarkCompactCollector::Finish() {
// objects (empty string, illegal builtin).
isolate()->stub_cache()->Clear();
DeoptimizeMarkedCodeFilter filter;
Deoptimizer::DeoptimizeAllFunctionsWith(isolate(), &filter);
if (code_to_deoptimize_ != Smi::FromInt(0)) {
// Convert the linked list of Code objects into a ZoneList.
Zone zone(isolate());
ZoneList<Code*> codes(4, &zone);
Object *list = code_to_deoptimize_;
while (list->IsCode()) {
Code *code = Code::cast(list);
list = code->code_to_deoptimize_link();
codes.Add(code, &zone);
// Destroy the link and don't ever try to deoptimize this code again.
code->set_code_to_deoptimize_link(Smi::FromInt(0));
}
code_to_deoptimize_ = Smi::FromInt(0);
Deoptimizer::DeoptimizeCodeList(isolate(), &codes);
}
}
......@@ -2610,8 +2617,17 @@ void MarkCompactCollector::ClearAndDeoptimizeDependentCode(Map* map) {
// and ClearAndDeoptimizeDependentCode shouldn't be called.
ASSERT(entries->is_code_at(i));
Code* code = entries->code_at(i);
if (IsMarked(code) && !code->marked_for_deoptimization()) {
code->set_marked_for_deoptimization(true);
if (IsMarked(code) && !WillBeDeoptimized(code)) {
// Insert the code into the code_to_deoptimize linked list.
Object* next = code_to_deoptimize_;
if (next != Smi::FromInt(0)) {
// Record the slot so that it is updated.
Object** slot = code->code_to_deoptimize_link_slot();
RecordSlot(slot, slot, next);
}
code->set_code_to_deoptimize_link(next);
code_to_deoptimize_ = code;
}
entries->clear_at(i);
}
......@@ -2632,7 +2648,7 @@ void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
Object* obj = entries->object_at(i);
ASSERT(obj->IsCode() || IsMarked(obj));
if (IsMarked(obj) &&
(!obj->IsCode() || !Code::cast(obj)->marked_for_deoptimization())) {
(!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
if (new_number_of_entries + group_number_of_entries != i) {
entries->set_object_at(
new_number_of_entries + group_number_of_entries, obj);
......@@ -3273,6 +3289,16 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
}
// Return true if the given code is deoptimized or will be deoptimized.
bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
// We assume the code_to_deoptimize_link is initialized to undefined.
// If it is 0, or refers to another Code object, then this code
// is already linked, or was already linked into the list.
return code->code_to_deoptimize_link() != heap()->undefined_value()
|| code->marked_for_deoptimization();
}
bool MarkCompactCollector::MarkInvalidatedCode() {
bool code_marked = false;
......@@ -3456,8 +3482,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
// Update pointer from the native contexts list.
// Update the heads of the native contexts list the code to deoptimize list.
updating_visitor.VisitPointer(heap_->native_contexts_list_address());
updating_visitor.VisitPointer(&code_to_deoptimize_);
heap_->string_table()->Iterate(&updating_visitor);
......
......@@ -743,6 +743,7 @@ class MarkCompactCollector {
~MarkCompactCollector();
bool MarkInvalidatedCode();
bool WillBeDeoptimized(Code* code);
void RemoveDeadInvalidatedCode();
void ProcessInvalidatedCode(ObjectVisitor* visitor);
......@@ -946,6 +947,7 @@ class MarkCompactCollector {
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_collections_;
Object* code_to_deoptimize_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
......
......@@ -43,22 +43,8 @@ int Deoptimizer::patch_size() {
}
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
DisallowHeapAllocation nha;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// Get the optimized code.
Code* code = function->code();
void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
Address code_start_address = code->instruction_start();
// The optimized code is going to be patched, so we cannot use it any more.
function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
// Invalidate the relocation information, as it will become invalid by the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
......@@ -87,30 +73,6 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
#ifdef DEBUG
prev_call_address = call_address;
#endif
}
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
#ifdef DEBUG
if (FLAG_print_code) {
code->PrintLn();
}
#endif
}
}
......
......@@ -5239,15 +5239,22 @@ void Code::set_stub_info(int value) {
}
void Code::set_deoptimizing_functions(Object* value) {
Object* Code::code_to_deoptimize_link() {
// Optimized code should not have type feedback.
ASSERT(kind() == OPTIMIZED_FUNCTION);
return READ_FIELD(this, kTypeFeedbackInfoOffset);
}
void Code::set_code_to_deoptimize_link(Object* value) {
ASSERT(kind() == OPTIMIZED_FUNCTION);
WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
}
Object* Code::deoptimizing_functions() {
Object** Code::code_to_deoptimize_link_slot() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
return HeapObject::RawField(this, kTypeFeedbackInfoOffset);
}
......
......@@ -11348,14 +11348,6 @@ bool DependentCode::Contains(DependencyGroup group, Code* code) {
}
class DeoptimizeDependentCodeFilter : public OptimizedFunctionFilter {
public:
virtual bool TakeFunction(JSFunction* function) {
return function->code()->marked_for_deoptimization();
}
};
void DependentCode::DeoptimizeDependentCodeGroup(
Isolate* isolate,
DependentCode::DependencyGroup group) {
......@@ -11365,10 +11357,14 @@ void DependentCode::DeoptimizeDependentCodeGroup(
int end = starts.at(group + 1);
int code_entries = starts.number_of_entries();
if (start == end) return;
// Collect all the code to deoptimize.
Zone zone(isolate);
ZoneList<Code*> codes(end - start, &zone);
for (int i = start; i < end; i++) {
if (is_code_at(i)) {
Code* code = code_at(i);
code->set_marked_for_deoptimization(true);
if (!code->marked_for_deoptimization()) codes.Add(code, &zone);
} else {
CompilationInfo* info = compilation_info_at(i);
info->AbortDueToDependencyChange();
......@@ -11384,8 +11380,7 @@ void DependentCode::DeoptimizeDependentCodeGroup(
clear_at(i);
}
set_number_of_entries(group, 0);
DeoptimizeDependentCodeFilter filter;
Deoptimizer::DeoptimizeAllFunctionsWith(isolate, &filter);
Deoptimizer::DeoptimizeCodeList(isolate, &codes);
}
......
......@@ -4561,7 +4561,7 @@ class Code: public HeapObject {
// [type_feedback_info]: Struct containing type feedback information for
// unoptimized code. Optimized code can temporarily store the head of
// the list of the dependent optimized functions during deoptimization.
// the list of code to be deoptimized during mark-compact GC.
// STUBs can use this slot to store arbitrary information as a Smi.
// Will contain either a TypeFeedbackInfo object, or JSFunction object,
// or undefined, or a Smi.
......@@ -4569,8 +4569,11 @@ class Code: public HeapObject {
inline void InitializeTypeFeedbackInfoNoWriteBarrier(Object* value);
inline int stub_info();
inline void set_stub_info(int info);
inline Object* deoptimizing_functions();
inline void set_deoptimizing_functions(Object* value);
// Used during GC to code a list of code objects to deoptimize.
inline Object* code_to_deoptimize_link();
inline void set_code_to_deoptimize_link(Object* value);
inline Object** code_to_deoptimize_link_slot();
// [gc_metadata]: Field used to hold GC related metadata. The contents of this
// field does not have to be traced during garbage collection since
......@@ -6762,18 +6765,6 @@ class JSFunction: public JSObject {
// Retrieve the native context from a function's literal array.
static Context* NativeContextFromLiterals(FixedArray* literals);
#ifdef DEBUG
bool FunctionsInFunctionListShareSameCode() {
Object* current = this;
while (!current->IsUndefined()) {
JSFunction* function = JSFunction::cast(current);
current = function->next_function_link();
if (function->code() != this->code()) return false;
}
return true;
}
#endif
bool PassesHydrogenFilter();
// Layout descriptors. The last property (from kNonWeakFieldsEndOffset to
......
......@@ -46,21 +46,7 @@ int Deoptimizer::patch_size() {
}
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
DisallowHeapAllocation nha;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// Get the optimized code.
Code* code = function->code();
// The optimized code is going to be patched, so we cannot use it any more.
function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
// Invalidate the relocation information, as it will become invalid by the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
......@@ -71,7 +57,7 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
// before the safepoint table (space was allocated there when the Code
// object was created, if necessary).
Address instruction_start = function->code()->instruction_start();
Address instruction_start = code->instruction_start();
#ifdef DEBUG
Address prev_call_address = NULL;
#endif
......@@ -93,25 +79,6 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
prev_call_address = call_address;
#endif
}
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment