Commit c053419e authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[sparkplug] Upstream Sparkplug

Sparkplug is a new baseline, non-optimising second-tier compiler,
designed to fit in the compiler trade-off space between Ignition and
TurboProp/TurboFan.

Design doc:
https://docs.google.com/document/d/13c-xXmFOMcpUQNqo66XWQt3u46TsBjXrHrh4c045l-A/edit?usp=sharing

Bug: v8:11420
Change-Id: Ideb7270db3d6548eedd8337a3f596eb6f8fea6b1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2667514
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarMichael Stanton <mvstanton@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#72686}
parent 27b8ad20
......@@ -2520,6 +2520,10 @@ v8_source_set("v8_base_without_compiler") {
"src/ast/source-range-ast-visitor.h",
"src/ast/variables.cc",
"src/ast/variables.h",
"src/baseline/baseline-compiler.cc",
"src/baseline/baseline-compiler.h",
"src/baseline/baseline.cc",
"src/baseline/baseline.h",
"src/builtins/accessors.cc",
"src/builtins/accessors.h",
"src/builtins/builtins-api.cc",
......@@ -3683,6 +3687,7 @@ v8_source_set("v8_base_without_compiler") {
if (v8_current_cpu == "x86") {
sources += [ ### gcmole(arch:ia32) ###
"src/baseline/ia32/baseline-compiler-ia32-inl.h",
"src/codegen/ia32/assembler-ia32-inl.h",
"src/codegen/ia32/assembler-ia32.cc",
"src/codegen/ia32/assembler-ia32.h",
......@@ -3709,6 +3714,7 @@ v8_source_set("v8_base_without_compiler") {
]
} else if (v8_current_cpu == "x64") {
sources += [ ### gcmole(arch:x64) ###
"src/baseline/x64/baseline-compiler-x64-inl.h",
"src/codegen/x64/assembler-x64-inl.h",
"src/codegen/x64/assembler-x64.cc",
"src/codegen/x64/assembler-x64.h",
......@@ -3759,6 +3765,7 @@ v8_source_set("v8_base_without_compiler") {
}
} else if (v8_current_cpu == "arm") {
sources += [ ### gcmole(arch:arm) ###
"src/baseline/arm/baseline-compiler-arm-inl.h",
"src/codegen/arm/assembler-arm-inl.h",
"src/codegen/arm/assembler-arm.cc",
"src/codegen/arm/assembler-arm.h",
......@@ -3790,6 +3797,7 @@ v8_source_set("v8_base_without_compiler") {
]
} else if (v8_current_cpu == "arm64") {
sources += [ ### gcmole(arch:arm64) ###
"src/baseline/arm64/baseline-compiler-arm64-inl.h",
"src/codegen/arm64/assembler-arm64-inl.h",
"src/codegen/arm64/assembler-arm64.cc",
"src/codegen/arm64/assembler-arm64.h",
......@@ -3849,6 +3857,7 @@ v8_source_set("v8_base_without_compiler") {
}
} else if (v8_current_cpu == "mips" || v8_current_cpu == "mipsel") {
sources += [ ### gcmole(arch:mipsel) ###
"src/baseline/mips/baseline-compiler-mips-inl.h",
"src/codegen/mips/assembler-mips-inl.h",
"src/codegen/mips/assembler-mips.cc",
"src/codegen/mips/assembler-mips.h",
......@@ -3877,6 +3886,7 @@ v8_source_set("v8_base_without_compiler") {
]
} else if (v8_current_cpu == "mips64" || v8_current_cpu == "mips64el") {
sources += [ ### gcmole(arch:mips64el) ###
"src/baseline/mips64/baseline-compiler-mips64-inl.h",
"src/codegen/mips64/assembler-mips64-inl.h",
"src/codegen/mips64/assembler-mips64.cc",
"src/codegen/mips64/assembler-mips64.h",
......@@ -3905,6 +3915,7 @@ v8_source_set("v8_base_without_compiler") {
]
} else if (v8_current_cpu == "ppc") {
sources += [ ### gcmole(arch:ppc) ###
"src/baseline/ppc/baseline-compiler-ppc-inl.h",
"src/codegen/ppc/assembler-ppc-inl.h",
"src/codegen/ppc/assembler-ppc.cc",
"src/codegen/ppc/assembler-ppc.h",
......@@ -3936,6 +3947,7 @@ v8_source_set("v8_base_without_compiler") {
]
} else if (v8_current_cpu == "ppc64") {
sources += [ ### gcmole(arch:ppc64) ###
"src/baseline/ppc/baseline-compiler-ppc-inl.h",
"src/codegen/ppc/assembler-ppc-inl.h",
"src/codegen/ppc/assembler-ppc.cc",
"src/codegen/ppc/assembler-ppc.h",
......@@ -3967,6 +3979,7 @@ v8_source_set("v8_base_without_compiler") {
]
} else if (v8_current_cpu == "s390" || v8_current_cpu == "s390x") {
sources += [ ### gcmole(arch:s390) ###
"src/baseline/s390/baseline-compiler-s390-inl.h",
"src/codegen/s390/assembler-s390-inl.h",
"src/codegen/s390/assembler-s390.cc",
"src/codegen/s390/assembler-s390.h",
......
specific_include_rules = {
"baseline-compiler\.h": [
"+src/interpreter/interpreter-intrinsics.h",
],
}
cbruni@chromium.org
leszeks@chromium.org
marja@chromium.org
pthier@chromium.org
verwaest@chromium.org
victorgomes@chromium.org
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/baseline/baseline.h"
// TODO(v8:11421): Remove #if once baseline compiler is ported to other
// architectures.
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64
#include "src/baseline/baseline-compiler.h"
#include "src/heap/factory-inl.h"
#include "src/logging/counters.h"
#include "src/objects/script-inl.h"
#include "src/objects/shared-function-info-inl.h"
namespace v8 {
namespace internal {
Handle<Code> CompileWithBaseline(
Isolate* isolate, Handle<SharedFunctionInfo> shared_function_info,
Handle<BytecodeArray> bytecode) {
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kCompileBaseline);
baseline::BaselineCompiler compiler(isolate, shared_function_info, bytecode);
compiler.GenerateCode();
return compiler.Build(isolate);
}
// TODO(v8:11429): This can be the basis of Compiler::CompileBaseline
Handle<Code> CompileWithBaseline(Isolate* isolate,
Handle<SharedFunctionInfo> shared) {
if (shared->HasBaselineData()) {
return handle(shared->baseline_data().baseline_code(), isolate);
}
if (FLAG_trace_opt) {
PrintF("[compiling method ");
shared->ShortPrint();
PrintF(" using Sparkplug]\n");
}
base::ElapsedTimer timer;
timer.Start();
Handle<Code> code = CompileWithBaseline(
isolate, shared, handle(shared->GetBytecodeArray(isolate), isolate));
// TODO(v8:11429): Extract to Factory::NewBaselineData
Handle<BaselineData> baseline_data = Handle<BaselineData>::cast(
isolate->factory()->NewStruct(BASELINE_DATA_TYPE, AllocationType::kOld));
baseline_data->set_baseline_code(*code);
baseline_data->set_data(
HeapObject::cast(shared->function_data(kAcquireLoad)));
shared->set_baseline_data(*baseline_data);
if (FLAG_print_code) {
code->Print();
}
if (shared->script().IsScript()) {
Compiler::LogFunctionCompilation(
isolate, CodeEventListener::FUNCTION_TAG, shared,
handle(Script::cast(shared->script()), isolate),
Handle<AbstractCode>::cast(code), CodeKind::SPARKPLUG,
timer.Elapsed().InMillisecondsF());
}
if (FLAG_trace_opt) {
// TODO(v8:11429): Move to Compiler.
PrintF("[completed compiling ");
shared->ShortPrint();
PrintF(" using Sparkplug - took %0.3f ms]\n",
timer.Elapsed().InMillisecondsF());
}
return code;
}
} // namespace internal
} // namespace v8
#else
namespace v8 {
namespace internal {
Handle<Code> CompileWithBaseline(Isolate* isolate,
Handle<SharedFunctionInfo> shared) {
UNREACHABLE();
}
} // namespace internal
} // namespace v8
#endif
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASELINE_BASELINE_H_
#define V8_BASELINE_BASELINE_H_
#include "src/handles/handles.h"
namespace v8 {
namespace internal {
class Code;
class SharedFunctionInfo;
class BytecodeArray;
// TODO(v8:11429): Restrict header visibility to just this file.
Handle<Code> CompileWithBaseline(Isolate* isolate,
Handle<SharedFunctionInfo> shared);
} // namespace internal
} // namespace v8
#endif // V8_BASELINE_BASELINE_H_
This diff is collapsed.
This diff is collapsed.
......@@ -64,15 +64,51 @@ void Builtins::Generate_CallFunctionForwardVarargs(MacroAssembler* masm) {
masm->isolate()->builtins()->CallFunction());
}
TF_BUILTIN(Call_ReceiverIsNullOrUndefined_Baseline,
CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kFunction);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = LoadContextFromBaseline();
auto feedback_vector = LoadFeedbackVectorFromBaseline();
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
TailCallBuiltin(Builtins::kCall_ReceiverIsNullOrUndefined, context, target,
argc);
}
TF_BUILTIN(Call_ReceiverIsNotNullOrUndefined_Baseline,
CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kFunction);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = LoadContextFromBaseline();
auto feedback_vector = LoadFeedbackVectorFromBaseline();
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
TailCallBuiltin(Builtins::kCall_ReceiverIsNotNullOrUndefined, context, target,
argc);
}
TF_BUILTIN(Call_ReceiverIsAny_Baseline, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kFunction);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = LoadContextFromBaseline();
auto feedback_vector = LoadFeedbackVectorFromBaseline();
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
TailCallBuiltin(Builtins::kCall_ReceiverIsAny, context, target, argc);
}
TF_BUILTIN(Call_ReceiverIsNullOrUndefined_WithFeedback,
CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kFunction);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, maybe_feedback_vector,
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
TailCallBuiltin(Builtins::kCall_ReceiverIsNullOrUndefined, context, target,
argc);
......@@ -83,10 +119,9 @@ TF_BUILTIN(Call_ReceiverIsNotNullOrUndefined_WithFeedback,
auto target = Parameter<Object>(Descriptor::kFunction);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, maybe_feedback_vector,
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
TailCallBuiltin(Builtins::kCall_ReceiverIsNotNullOrUndefined, context, target,
argc);
......@@ -96,10 +131,9 @@ TF_BUILTIN(Call_ReceiverIsAny_WithFeedback, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kFunction);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, maybe_feedback_vector,
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
TailCallBuiltin(Builtins::kCall_ReceiverIsAny, context, target, argc);
}
......@@ -434,10 +468,9 @@ TF_BUILTIN(CallWithArrayLike_WithFeedback, CallOrConstructBuiltinsAssembler) {
base::Optional<TNode<Object>> new_target = base::nullopt;
auto arguments_list = Parameter<Object>(Descriptor::kArgumentsList);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, maybe_feedback_vector,
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
}
......@@ -451,16 +484,28 @@ TF_BUILTIN(CallWithSpread, CallOrConstructBuiltinsAssembler) {
CallOrConstructWithSpread(target, new_target, spread, args_count, context);
}
TF_BUILTIN(CallWithSpread_Baseline, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kTarget);
base::Optional<TNode<Object>> new_target = base::nullopt;
auto spread = Parameter<Object>(Descriptor::kSpread);
auto args_count = UncheckedParameter<Int32T>(Descriptor::kArgumentsCount);
auto context = LoadContextFromBaseline();
auto feedback_vector = LoadFeedbackVectorFromBaseline();
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
CallOrConstructWithSpread(target, new_target, spread, args_count, context);
}
TF_BUILTIN(CallWithSpread_WithFeedback, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kTarget);
base::Optional<TNode<Object>> new_target = base::nullopt;
auto spread = Parameter<Object>(Descriptor::kSpread);
auto args_count = UncheckedParameter<Int32T>(Descriptor::kArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
CollectCallFeedback(target, context, maybe_feedback_vector,
CollectCallFeedback(target, context, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)));
CallOrConstructWithSpread(target, new_target, spread, args_count, context);
}
......
......@@ -37,18 +37,46 @@ void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
BUILTIN_CODE(masm->isolate(), ConstructFunction));
}
// TODO(v8:11429): Here and below, consider sharing code with Foo_WithFeedback,
// or removing the latter entirely.
TF_BUILTIN(Construct_Baseline, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kTarget);
auto new_target = Parameter<Object>(Descriptor::kNewTarget);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
// TODO(v8:11429,verwaest): Only emit context loads where necessary
auto context = LoadContextFromBaseline();
// TODO(v8:11429,verwaest): Make sure CollectConstructFeedback knows we have a
// feedback vector.
auto feedback_vector = LoadFeedbackVectorFromBaseline();
TVARIABLE(AllocationSite, allocation_site);
Label if_construct_generic(this), if_construct_array(this);
CollectConstructFeedback(context, target, new_target, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)),
&if_construct_generic, &if_construct_array,
&allocation_site);
BIND(&if_construct_generic);
TailCallBuiltin(Builtins::kConstruct, context, target, new_target, argc);
BIND(&if_construct_array);
TailCallBuiltin(Builtins::kArrayConstructorImpl, context, target, new_target,
argc, allocation_site.value());
}
TF_BUILTIN(Construct_WithFeedback, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kTarget);
auto new_target = Parameter<Object>(Descriptor::kNewTarget);
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
TVARIABLE(AllocationSite, allocation_site);
Label if_construct_generic(this), if_construct_array(this);
CollectConstructFeedback(context, target, new_target, maybe_feedback_vector,
CollectConstructFeedback(context, target, new_target, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)),
&if_construct_generic, &if_construct_array,
&allocation_site);
......@@ -75,13 +103,12 @@ TF_BUILTIN(ConstructWithArrayLike_WithFeedback,
auto new_target = Parameter<Object>(Descriptor::kNewTarget);
auto arguments_list = Parameter<Object>(Descriptor::kArgumentsList);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
TVARIABLE(AllocationSite, allocation_site);
Label if_construct_generic(this), if_construct_array(this);
CollectConstructFeedback(context, target, new_target, maybe_feedback_vector,
CollectConstructFeedback(context, target, new_target, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)),
&if_construct_generic, &if_construct_array,
&allocation_site);
......@@ -103,6 +130,34 @@ TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
CallOrConstructWithSpread(target, new_target, spread, args_count, context);
}
TF_BUILTIN(ConstructWithSpread_Baseline, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kTarget);
auto new_target = Parameter<Object>(Descriptor::kNewTarget);
auto spread = Parameter<Object>(Descriptor::kSpread);
auto args_count =
UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
// TODO(v8:11429,verwaest): Only emit context loads where necessary
auto context = LoadContextFromBaseline();
// TODO(v8:11429,verwaest): Make sure CollectConstructFeedback knows we have a
// feedback vector.
auto feedback_vector = LoadFeedbackVectorFromBaseline();
TVARIABLE(AllocationSite, allocation_site);
Label if_construct_generic(this), if_construct_array(this);
CollectConstructFeedback(context, target, new_target, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)),
&if_construct_generic, &if_construct_array,
&allocation_site);
BIND(&if_construct_array);
Goto(&if_construct_generic); // Not implemented.
BIND(&if_construct_generic);
CallOrConstructWithSpread(target, new_target, spread, args_count, context);
}
TF_BUILTIN(ConstructWithSpread_WithFeedback, CallOrConstructBuiltinsAssembler) {
auto target = Parameter<Object>(Descriptor::kTarget);
auto new_target = Parameter<Object>(Descriptor::kNewTarget);
......@@ -110,13 +165,12 @@ TF_BUILTIN(ConstructWithSpread_WithFeedback, CallOrConstructBuiltinsAssembler) {
auto args_count =
UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<HeapObject>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
TVARIABLE(AllocationSite, allocation_site);
Label if_construct_generic(this), if_construct_array(this);
CollectConstructFeedback(context, target, new_target, maybe_feedback_vector,
CollectConstructFeedback(context, target, new_target, feedback_vector,
Unsigned(ChangeInt32ToIntPtr(slot)),
&if_construct_generic, &if_construct_array,
&allocation_site);
......
This diff is collapsed.
......@@ -27,24 +27,33 @@ IC_BUILTIN(LoadIC_Megamorphic)
IC_BUILTIN(LoadIC_Noninlined)
IC_BUILTIN(LoadIC_NoFeedback)
IC_BUILTIN(LoadICTrampoline)
IC_BUILTIN(LoadICBaseline)
IC_BUILTIN(LoadICTrampoline_Megamorphic)
IC_BUILTIN(LoadSuperIC)
IC_BUILTIN(LoadSuperICBaseline)
IC_BUILTIN(KeyedLoadIC)
IC_BUILTIN(KeyedLoadIC_Megamorphic)
IC_BUILTIN(KeyedLoadIC_PolymorphicName)
IC_BUILTIN(KeyedLoadICTrampoline)
IC_BUILTIN(KeyedLoadICBaseline)
IC_BUILTIN(KeyedLoadICTrampoline_Megamorphic)
IC_BUILTIN(LoadGlobalIC_NoFeedback)
IC_BUILTIN(StoreGlobalIC)
IC_BUILTIN(StoreGlobalICTrampoline)
IC_BUILTIN(StoreGlobalICBaseline)
IC_BUILTIN(StoreIC)
IC_BUILTIN(StoreICTrampoline)
IC_BUILTIN(StoreICBaseline)
IC_BUILTIN(KeyedStoreIC)
IC_BUILTIN(KeyedStoreICTrampoline)
IC_BUILTIN(KeyedStoreICBaseline)
IC_BUILTIN(StoreInArrayLiteralIC)
IC_BUILTIN(StoreInArrayLiteralICBaseline)
IC_BUILTIN(CloneObjectIC)
IC_BUILTIN(CloneObjectICBaseline)
IC_BUILTIN(CloneObjectIC_Slow)
IC_BUILTIN(KeyedHasIC)
IC_BUILTIN(KeyedHasICBaseline)
IC_BUILTIN(KeyedHasIC_Megamorphic)
IC_BUILTIN(KeyedHasIC_PolymorphicName)
......@@ -54,6 +63,17 @@ IC_BUILTIN_PARAM(LoadGlobalICTrampoline, LoadGlobalICTrampoline,
NOT_INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LoadGlobalICInsideTypeofTrampoline, LoadGlobalICTrampoline,
INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LoadGlobalICBaseline, LoadGlobalICBaseline, NOT_INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LoadGlobalICInsideTypeofBaseline, LoadGlobalICBaseline,
INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LookupGlobalICBaseline, LookupGlobalICBaseline,
NOT_INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LookupGlobalICInsideTypeofBaseline, LookupGlobalICBaseline,
INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LookupContextBaseline, LookupContextBaseline,
NOT_INSIDE_TYPEOF)
IC_BUILTIN_PARAM(LookupContextInsideTypeofBaseline, LookupContextBaseline,
INSIDE_TYPEOF)
TF_BUILTIN(DynamicCheckMaps, CodeStubAssembler) {
auto map = Parameter<Map>(Descriptor::kMap);
......
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "src/api/api.h"
#include "src/baseline/baseline-compiler.h"
#include "src/builtins/builtins-utils-gen.h"
#include "src/builtins/builtins.h"
#include "src/codegen/code-stub-assembler.h"
......@@ -687,6 +688,20 @@ TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
}
TF_BUILTIN(ForInPrepare, CodeStubAssembler) {
// The {enumerator} is either a Map or a FixedArray.
auto enumerator = Parameter<HeapObject>(Descriptor::kEnumerator);
auto index = Parameter<TaggedIndex>(Descriptor::kVectorIndex);
auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
TNode<UintPtrT> vector_index = Unsigned(TaggedIndexToIntPtr(index));
TNode<FixedArray> cache_array;
TNode<Smi> cache_length;
ForInPrepare(enumerator, vector_index, feedback_vector, &cache_array,
&cache_length, true);
Return(cache_array, cache_length);
}
TF_BUILTIN(ForInFilter, CodeStubAssembler) {
auto key = Parameter<String>(Descriptor::kKey);
auto object = Parameter<HeapObject>(Descriptor::kObject);
......@@ -909,6 +924,28 @@ void Builtins::Generate_MemMove(MacroAssembler* masm) {
}
#endif // V8_TARGET_ARCH_IA32
// TODO(v8:11421): Remove #if once baseline compiler is ported to other
// architectures.
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64
void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
baseline::BaselineAssembler::EmitReturn(masm);
}
#else
// Stub out implementations of arch-specific baseline builtins.
void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
masm->Trap();
}
void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
masm->Trap();
}
void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
masm->Trap();
}
void Builtins::Generate_TailCallOptimizedCodeSlot(MacroAssembler* masm) {
masm->Trap();
}
#endif
// ES6 [[Get]] operation.
TF_BUILTIN(GetProperty, CodeStubAssembler) {
auto object = Parameter<Object>(Descriptor::kObject);
......
......@@ -143,13 +143,28 @@ void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
isolate(), CompileLazy))));
StoreObjectField(function, JSFunction::kCodeOffset, sfi_code);
Label tailcall_code(this);
Label baseline(this);
TVARIABLE(Code, code);
// Check if we have baseline code.
// TODO(v8:11429): We already know if we have baseline code in
// GetSharedFunctionInfoCode, make that jump to here.
TNode<Uint32T> code_flags =
LoadObjectField<Uint32T>(sfi_code, Code::kFlagsOffset);
TNode<Uint32T> code_kind = DecodeWord32<Code::KindField>(code_flags);
TNode<BoolT> is_baseline =
IsEqualInWord32<Code::KindField>(code_kind, CodeKind::SPARKPLUG);
GotoIf(is_baseline, &baseline);
// Finally, check for presence of an NCI cached Code object - if an entry
// possibly exists, call into runtime to query the cache.
TNode<Uint8T> flags2 =
LoadObjectField<Uint8T>(shared, SharedFunctionInfo::kFlags2Offset);
TNode<BoolT> may_have_cached_code =
IsSetWord32<SharedFunctionInfo::MayHaveCachedCodeBit>(flags2);
TNode<Code> code = Select<Code>(
code = Select<Code>(
may_have_cached_code,
[=]() {
return CAST(CallRuntime(Runtime::kTryInstallNCICode,
......@@ -157,9 +172,21 @@ void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
function));
},
[=]() { return sfi_code; });
Goto(&tailcall_code);
BIND(&baseline);
// Ensure we have a feedback vector.
code = Select<Code>(
IsFeedbackVector(feedback_cell_value), [=]() { return sfi_code; },
[=]() {
return CAST(CallRuntime(Runtime::kPrepareForBaseline,
Parameter<Context>(Descriptor::kContext),
function));
});
Goto(&tailcall_code);
BIND(&tailcall_code);
// Jump to the selected code entry.
GenerateTailCallToJSCode(code, function);
GenerateTailCallToJSCode(code.value(), function);
BIND(&compile_function);
GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function);
......
This diff is collapsed.
......@@ -1171,11 +1171,21 @@ TF_BUILTIN(InstanceOf_WithFeedback, ObjectBuiltinsAssembler) {
auto object = Parameter<Object>(Descriptor::kLeft);
auto callable = Parameter<Object>(Descriptor::kRight);
auto context = Parameter<Context>(Descriptor::kContext);
auto maybe_feedback_vector =
Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
auto feedback_vector = Parameter<HeapObject>(Descriptor::kFeedbackVector);
auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
CollectInstanceOfFeedback(callable, context, maybe_feedback_vector, slot);
CollectInstanceOfFeedback(callable, context, feedback_vector, slot);
Return(InstanceOf(object, callable, context));
}
TF_BUILTIN(InstanceOf_Baseline, ObjectBuiltinsAssembler) {
auto object = Parameter<Object>(Descriptor::kLeft);
auto callable = Parameter<Object>(Descriptor::kRight);
auto context = LoadContextFromBaseline();
auto feedback_vector = LoadFeedbackVectorFromBaseline();
auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
CollectInstanceOfFeedback(callable, context, feedback_vector, slot);
Return(InstanceOf(object, callable, context));
}
......
......@@ -6,11 +6,11 @@
namespace runtime {
extern runtime CreateArrayLiteral(
Context, FeedbackVector, TaggedIndex, ArrayBoilerplateDescription,
Smi): HeapObject;
Context, Undefined | FeedbackVector, TaggedIndex,
ArrayBoilerplateDescription, Smi): HeapObject;
extern runtime CreateObjectLiteral(
Context, FeedbackVector, TaggedIndex, ObjectBoilerplateDescription,
Smi): HeapObject;
Context, Undefined | FeedbackVector, TaggedIndex,
ObjectBoilerplateDescription, Smi): HeapObject;
}
namespace constructor {
......@@ -22,8 +22,8 @@ extern enum AllocationSiteMode {
TRACK_ALLOCATION_SITE
}
const kIsShallowAndDisableMementos: constexpr int31
generates 'AggregateLiteral::Flags::kIsShallowAndDisableMementos';
const kIsShallow: constexpr int31
generates 'AggregateLiteral::Flags::kIsShallow';
const kEvalScope: constexpr ScopeType generates 'ScopeType::EVAL_SCOPE';
const kFunctionScope:
constexpr ScopeType generates 'ScopeType::FUNCTION_SCOPE';
......@@ -60,17 +60,18 @@ builtin CreateRegExpLiteral(implicit context: Context)(
}
builtin CreateShallowArrayLiteral(implicit context: Context)(
feedbackVector: FeedbackVector, slot: TaggedIndex,
maybeFeedbackVector: Undefined|FeedbackVector, slot: TaggedIndex,
constantElements: ArrayBoilerplateDescription): HeapObject {
try {
const vector = Cast<FeedbackVector>(maybeFeedbackVector)
otherwise CallRuntime;
return CreateShallowArrayLiteral(
feedbackVector, slot, context,
AllocationSiteMode::DONT_TRACK_ALLOCATION_SITE)
vector, slot, context, AllocationSiteMode::TRACK_ALLOCATION_SITE)
otherwise CallRuntime;
} label CallRuntime deferred {
tail runtime::CreateArrayLiteral(
context, feedbackVector, slot, constantElements,
SmiConstant(kIsShallowAndDisableMementos));
context, maybeFeedbackVector, slot, constantElements,
SmiConstant(kIsShallow));
}
}
......@@ -80,14 +81,16 @@ builtin CreateEmptyArrayLiteral(implicit context: Context)(
}
builtin CreateShallowObjectLiteral(implicit context: Context)(
feedbackVector: FeedbackVector, slot: TaggedIndex,
maybeFeedbackVector: Undefined|FeedbackVector, slot: TaggedIndex,
desc: ObjectBoilerplateDescription, flags: Smi): HeapObject {
try {
const feedbackVector = Cast<FeedbackVector>(maybeFeedbackVector)
otherwise CallRuntime;
return CreateShallowObjectLiteral(feedbackVector, slot)
otherwise CallRuntime;
} label CallRuntime deferred {
tail runtime::CreateObjectLiteral(
context, feedbackVector, slot, desc, flags);
context, maybeFeedbackVector, slot, desc, flags);
}
}
......
......@@ -47,28 +47,23 @@ builtin BytecodeBudgetInterruptFromCode(implicit context: Context)(
tail runtime::BytecodeBudgetInterruptFromCode(feedbackCell);
}
extern transitioning macro ForInPrepareForTorque(
Map | FixedArray, uintptr, Undefined | FeedbackVector): FixedArray;
transitioning builtin ForInPrepare(implicit _context: Context)(
enumerator: Map|FixedArray, slot: uintptr,
maybeFeedbackVector: Undefined|FeedbackVector): FixedArray {
return ForInPrepareForTorque(enumerator, slot, maybeFeedbackVector);
}
extern transitioning builtin ForInFilter(implicit context: Context)(
JSAny, HeapObject): JSAny;
extern enum ForInFeedback extends uint31 { kAny, ...}
extern macro UpdateFeedback(
SmiTagged<ForInFeedback>, Undefined | FeedbackVector, uintptr);
extern macro UpdateFeedback(SmiTagged<ForInFeedback>, FeedbackVector, uintptr);
extern macro MaybeUpdateFeedback(
SmiTagged<ForInFeedback>, Undefined | FeedbackVector, uintptr,
constexpr bool);
@export
transitioning macro ForInNextSlow(
context: Context, slot: uintptr, receiver: JSAnyNotSmi, key: JSAny,
cacheType: Object, maybeFeedbackVector: Undefined|FeedbackVector): JSAny {
cacheType: Object, maybeFeedbackVector: Undefined|FeedbackVector,
guaranteedFeedback: constexpr bool): JSAny {
assert(receiver.map != cacheType); // Handled on the fast path.
UpdateFeedback(
SmiTag<ForInFeedback>(ForInFeedback::kAny), maybeFeedbackVector, slot);
MaybeUpdateFeedback(
SmiTag<ForInFeedback>(ForInFeedback::kAny), maybeFeedbackVector, slot,
guaranteedFeedback);
return ForInFilter(key, receiver);
}
......@@ -77,7 +72,7 @@ transitioning macro ForInNextSlow(
transitioning builtin ForInNext(
context: Context, slot: uintptr, receiver: JSAnyNotSmi,
cacheArray: FixedArray, cacheType: Object, cacheIndex: Smi,
maybeFeedbackVector: Undefined|FeedbackVector): JSAny {
feedbackVector: FeedbackVector): JSAny {
// Load the next key from the enumeration array.
const key = UnsafeCast<JSAny>(cacheArray.objects[cacheIndex]);
......@@ -87,7 +82,7 @@ transitioning builtin ForInNext(
}
return ForInNextSlow(
context, slot, receiver, key, cacheType, maybeFeedbackVector);
context, slot, receiver, key, cacheType, feedbackVector, true);
}
} // namespace internal
......@@ -74,6 +74,20 @@ transitioning builtin GetIteratorWithFeedback(
context, receiver, iteratorMethod, callSlotSmi, maybeFeedbackVector);
}
extern macro LoadFeedbackVectorFromBaseline(): FeedbackVector;
transitioning builtin GetIteratorBaseline(
context: Context, receiver: JSAny, loadSlot: TaggedIndex,
callSlot: TaggedIndex): JSAny {
const feedback: FeedbackVector = LoadFeedbackVectorFromBaseline();
const iteratorMethod: JSAny =
LoadIC(context, receiver, IteratorSymbolConstant(), loadSlot, feedback);
// TODO(v8:10047): Use TaggedIndex here once TurboFan supports it.
const callSlotSmi: Smi = TaggedIndexToSmi(callSlot);
return CallIteratorWithFeedback(
context, receiver, iteratorMethod, callSlotSmi, feedback);
}
transitioning builtin CallIteratorWithFeedback(
context: Context, receiver: JSAny, iteratorMethod: JSAny, callSlot: Smi,
feedback: Undefined|FeedbackVector): JSAny {
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -21,6 +21,7 @@ namespace internal {
V(kExpectedOptimizationSentinel, \
"Expected optimized code cell or optimization sentinel") \
V(kExpectedUndefinedOrCell, "Expected undefined or cell in register") \
V(kExpectedFeedbackVector, "Expected feedback vector") \
V(kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, \
"The function_data field should be a BytecodeArray on interpreter entry") \
V(kInputStringTooLong, "Input string too long") \
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment