Commit 90ebb7b4 authored by zhengxing.li's avatar zhengxing.li Committed by Commit bot

X87: Reland "[turbofan] Discard the shared code entry in the optimized code map.".

  port ec132e05 (r40086)

  original commit message:
  (GcStress failure was unrelated.)

  At one time, we hoped to generate the same code for different
  native contexts. But in truth, much performance comes from optimizing
  on the native context. Now we abandon this pathway.

BUG=

Review-Url: https://codereview.chromium.org/2404843002
Cr-Commit-Position: refs/heads/master@{#40147}
parent ecc7d969
...@@ -1056,7 +1056,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { ...@@ -1056,7 +1056,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------------------------------- // -----------------------------------
// First lookup code, maybe we don't need to compile! // First lookup code, maybe we don't need to compile!
Label gotta_call_runtime, gotta_call_runtime_no_stack; Label gotta_call_runtime, gotta_call_runtime_no_stack;
Label maybe_call_runtime;
Label try_shared; Label try_shared;
Label loop_top, loop_bottom; Label loop_top, loop_bottom;
...@@ -1119,15 +1118,12 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { ...@@ -1119,15 +1118,12 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ mov(entry, FieldOperand(map, index, times_half_pointer_size, __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
SharedFunctionInfo::kOffsetToPreviousCachedCode)); SharedFunctionInfo::kOffsetToPreviousCachedCode));
__ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &maybe_call_runtime); __ JumpIfSmi(entry, &try_shared);
// Found literals and code. Get them into the closure and return. // Found literals and code. Get them into the closure and return.
__ pop(closure); __ pop(closure);
// Store code entry in the closure. // Store code entry in the closure.
__ lea(entry, FieldOperand(entry, Code::kHeaderSize)); __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
Label install_optimized_code_and_tailcall;
__ bind(&install_optimized_code_and_tailcall);
__ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
__ RecordWriteCodeEntryField(closure, entry, eax); __ RecordWriteCodeEntryField(closure, entry, eax);
...@@ -1161,20 +1157,8 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { ...@@ -1161,20 +1157,8 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// We found neither literals nor code. // We found neither literals nor code.
__ jmp(&gotta_call_runtime); __ jmp(&gotta_call_runtime);
__ bind(&maybe_call_runtime);
__ pop(closure);
// Last possibility. Check the context free optimized code map entry.
__ mov(entry, FieldOperand(map, FixedArray::kHeaderSize +
SharedFunctionInfo::kSharedCodeIndex));
__ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Store code entry in the closure.
__ lea(entry, FieldOperand(entry, Code::kHeaderSize));
__ jmp(&install_optimized_code_and_tailcall);
__ bind(&try_shared); __ bind(&try_shared);
__ pop(closure);
__ pop(new_target); __ pop(new_target);
__ pop(argument_count); __ pop(argument_count);
// Is the full code valid? // Is the full code valid?
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment