Commit 01452bed authored by Marja Hölttä's avatar Marja Hölttä Committed by Commit Bot

[objects.h splitting] Move Allocationsite & AllocationMemento

BUG=v8:5402,v8:8015

Cq-Include-Trybots: luci.v8.try:v8_linux_noi18n_rel_ng
Change-Id: I14613a05f9b71308858afdd8d2b212ae9786abb3
Reviewed-on: https://chromium-review.googlesource.com/1215169
Commit-Queue: Marja Hölttä <marja@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55822}
parent dd3ac3c9
......@@ -843,6 +843,8 @@ action("postmortem-metadata") {
sources = [
"src/objects.h",
"src/objects-inl.h",
"src/objects/allocation-site-inl.h",
"src/objects/allocation-site.h",
"src/objects/code-inl.h",
"src/objects/code.h",
"src/objects/data-handler.h",
......@@ -1535,6 +1537,7 @@ v8_source_set("v8_base") {
"src/accessors.h",
"src/address-map.cc",
"src/address-map.h",
"src/allocation-site-scopes-inl.h",
"src/allocation-site-scopes.h",
"src/allocation.cc",
"src/allocation.h",
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_ALLOCATION_SITE_SCOPES_INL_H_
#define V8_ALLOCATION_SITE_SCOPES_INL_H_
#include "src/allocation-site-scopes.h"
#include "src/objects/allocation-site-inl.h"
namespace v8 {
namespace internal {
Handle<AllocationSite> AllocationSiteUsageContext::EnterNewScope() {
if (top().is_null()) {
InitializeTraversal(top_site_);
} else {
// Advance current site
Object* nested_site = current()->nested_site();
// Something is wrong if we advance to the end of the list here.
update_current_site(AllocationSite::cast(nested_site));
}
return Handle<AllocationSite>(*current(), isolate());
}
void AllocationSiteUsageContext::ExitScope(Handle<AllocationSite> scope_site,
Handle<JSObject> object) {
// This assert ensures that we are pointing at the right sub-object in a
// recursive walk of a nested literal.
DCHECK(object.is_null() || *object == scope_site->boilerplate());
}
bool AllocationSiteUsageContext::ShouldCreateMemento(Handle<JSObject> object) {
if (activated_ && AllocationSite::CanTrack(object->map()->instance_type())) {
if (FLAG_allocation_site_pretenuring ||
AllocationSite::ShouldTrack(object->GetElementsKind())) {
if (FLAG_trace_creation_allocation_sites) {
PrintF("*** Creating Memento for %s %p\n",
object->IsJSArray() ? "JSArray" : "JSObject",
static_cast<void*>(*object));
}
return true;
}
}
return false;
}
} // namespace internal
} // namespace v8
#endif // V8_ALLOCATION_SITE_SCOPES_INL_H_
......@@ -56,40 +56,12 @@ class AllocationSiteUsageContext : public AllocationSiteContext {
top_site_(site),
activated_(activated) { }
inline Handle<AllocationSite> EnterNewScope() {
if (top().is_null()) {
InitializeTraversal(top_site_);
} else {
// Advance current site
Object* nested_site = current()->nested_site();
// Something is wrong if we advance to the end of the list here.
update_current_site(AllocationSite::cast(nested_site));
}
return Handle<AllocationSite>(*current(), isolate());
}
inline Handle<AllocationSite> EnterNewScope();
inline void ExitScope(Handle<AllocationSite> scope_site,
Handle<JSObject> object) {
// This assert ensures that we are pointing at the right sub-object in a
// recursive walk of a nested literal.
DCHECK(object.is_null() || *object == scope_site->boilerplate());
}
Handle<JSObject> object);
bool ShouldCreateMemento(Handle<JSObject> object) {
if (activated_ &&
AllocationSite::CanTrack(object->map()->instance_type())) {
if (FLAG_allocation_site_pretenuring ||
AllocationSite::ShouldTrack(object->GetElementsKind())) {
if (FLAG_trace_creation_allocation_sites) {
PrintF("*** Creating Memento for %s %p\n",
object->IsJSArray() ? "JSArray" : "JSObject",
static_cast<void*>(*object));
}
return true;
}
}
return false;
}
inline bool ShouldCreateMemento(Handle<JSObject> object);
static const bool kCopying = true;
......
......@@ -402,8 +402,10 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
}
Handle<JSObject> object;
ASSIGN_RETURN_ON_EXCEPTION(isolate, object,
JSObject::New(constructor, new_target), JSObject);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, object,
JSObject::New(constructor, new_target, Handle<AllocationSite>::null()),
JSObject);
if (is_prototype) JSObject::OptimizeAsPrototype(object);
......
......@@ -31,8 +31,9 @@ Object* ConstructBuffer(Isolate* isolate, Handle<JSFunction> target,
Handle<JSReceiver> new_target, Handle<Object> length,
bool initialize) {
Handle<JSObject> result;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, result,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
size_t byte_length;
if (!TryNumberToSize(*length, &byte_length)) {
THROW_NEW_ERROR_RETURN_FAILURE(
......
......@@ -26,8 +26,9 @@ BUILTIN(BooleanConstructor) {
Handle<JSReceiver> new_target = Handle<JSReceiver>::cast(args.new_target());
DCHECK(*target == target->native_context()->boolean_function());
Handle<JSObject> result;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, result,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<JSValue>::cast(result)->set_value(
isolate->heap()->ToBoolean(value->BooleanValue(isolate)));
return *result;
......
......@@ -86,8 +86,9 @@ BUILTIN(DataViewConstructor) {
// "%DataViewPrototype%", «[[DataView]], [[ViewedArrayBuffer]],
// [[ByteLength]], [[ByteOffset]]»).
Handle<JSObject> result;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, result,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
for (int i = 0; i < ArrayBufferView::kEmbedderFieldCount; ++i) {
Handle<JSDataView>::cast(result)->SetEmbedderField(i, Smi::kZero);
}
......
......@@ -277,8 +277,9 @@ Object* FormatConstructor(BuiltinArguments args, Isolate* isolate,
// "%<T>Prototype%", ...).
Handle<JSObject> format_obj;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, format_obj,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, format_obj,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<T> format = Handle<T>::cast(format_obj);
// 3. Perform ? Initialize<T>(Format, locales, options).
......@@ -493,8 +494,9 @@ BUILTIN(ListFormatConstructor) {
Handle<JSObject> result;
// 2. Let listFormat be OrdinaryCreateFromConstructor(NewTarget,
// "%ListFormatPrototype%").
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, result,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<JSListFormat> format = Handle<JSListFormat>::cast(result);
format->set_flags(0);
......@@ -526,8 +528,10 @@ MaybeHandle<JSLocale> CreateLocale(Isolate* isolate,
Handle<JSReceiver> new_target,
Handle<Object> tag, Handle<Object> options) {
Handle<JSObject> result;
ASSIGN_RETURN_ON_EXCEPTION(isolate, result,
JSObject::New(constructor, new_target), JSLocale);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, result,
JSObject::New(constructor, new_target, Handle<AllocationSite>::null()),
JSLocale);
// First parameter is a locale, as a string/object. Can't be empty.
if (!tag->IsString() && !tag->IsJSReceiver()) {
......@@ -737,8 +741,9 @@ BUILTIN(RelativeTimeFormatConstructor) {
// 2. Let relativeTimeFormat be
// ! OrdinaryCreateFromConstructor(NewTarget,
// "%RelativeTimeFormatPrototype%").
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, result,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<JSRelativeTimeFormat> format =
Handle<JSRelativeTimeFormat>::cast(result);
format->set_flags(0);
......@@ -800,8 +805,9 @@ BUILTIN(PluralRulesConstructor) {
// [[MinimumFractionDigits]], [[MaximumFractionDigits]],
// [[MinimumSignificantDigits]], [[MaximumSignificantDigits]] »).
Handle<JSObject> plural_rules_obj;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, plural_rules_obj,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, plural_rules_obj,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<JSPluralRules> plural_rules =
Handle<JSPluralRules>::cast(plural_rules_obj);
......@@ -844,8 +850,9 @@ BUILTIN(CollatorConstructor) {
// 5. Let collator be ? OrdinaryCreateFromConstructor(newTarget,
// "%CollatorPrototype%", internalSlotsList).
Handle<JSObject> collator_obj;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, collator_obj,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, collator_obj,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<JSCollator> collator = Handle<JSCollator>::cast(collator_obj);
// 6. Return ? InitializeCollator(collator, locales, options).
......@@ -941,8 +948,9 @@ BUILTIN(V8BreakIteratorConstructor) {
Handle<Object> options = args.atOrUndefined(isolate, 2);
Handle<JSObject> break_iterator_obj;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, break_iterator_obj,
JSObject::New(target, new_target));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, break_iterator_obj,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
Handle<JSV8BreakIterator> break_iterator =
Handle<JSV8BreakIterator>::cast(break_iterator_obj);
......
......@@ -951,6 +951,8 @@ enum AllocationSiteMode {
LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
};
enum class AllocationSiteUpdateMode { kUpdate, kCheckOnly };
// The mips architecture prior to revision 5 has inverted encoding for sNaN.
#if (V8_TARGET_ARCH_MIPS && !defined(_MIPS_ARCH_MIPS32R6) && \
(!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR))) || \
......
......@@ -24,6 +24,7 @@
#include "src/log.h"
#include "src/msan.h"
#include "src/objects-inl.h"
#include "src/objects/allocation-site-inl.h"
#include "src/objects/api-callbacks-inl.h"
#include "src/objects/descriptor-array.h"
#include "src/objects/literal-objects.h"
......
......@@ -41,6 +41,7 @@ class HeapTester;
class TestMemoryAllocatorScope;
} // namespace heap
class AllocationMemento;
class ObjectBoilerplateDescription;
class BytecodeArray;
class CodeDataContainer;
......
......@@ -858,8 +858,9 @@ MaybeHandle<Object> ConstructCallSite(Isolate* isolate,
handle(isolate->native_context()->callsite_function(), isolate);
Handle<JSObject> obj;
ASSIGN_RETURN_ON_EXCEPTION(isolate, obj, JSObject::New(target, target),
Object);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, obj,
JSObject::New(target, target, Handle<AllocationSite>::null()), Object);
Handle<Symbol> key = isolate->factory()->call_site_frame_array_symbol();
RETURN_ON_EXCEPTION(isolate, JSObject::SetOwnPropertyIgnoreAttributes(
......@@ -1114,8 +1115,10 @@ MaybeHandle<Object> ErrorUtils::Construct(
// 2. Let O be ? OrdinaryCreateFromConstructor(newTarget, "%ErrorPrototype%",
// « [[ErrorData]] »).
Handle<JSObject> err;
ASSIGN_RETURN_ON_EXCEPTION(isolate, err,
JSObject::New(target, new_target_recv), Object);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, err,
JSObject::New(target, new_target_recv, Handle<AllocationSite>::null()),
Object);
// 3. If message is not undefined, then
// a. Let msg be ? ToString(message).
......
......@@ -473,8 +473,6 @@ bool Object::IsMinusZero() const {
// Cast operations
CAST_ACCESSOR(AccessorPair)
CAST_ACCESSOR(AllocationMemento)
CAST_ACCESSOR(AllocationSite)
CAST_ACCESSOR(AsyncGeneratorRequest)
CAST_ACCESSOR(BigInt)
CAST_ACCESSOR(ObjectBoilerplateDescription)
......@@ -1003,151 +1001,6 @@ FixedArrayBase* JSObject::elements() const {
return static_cast<FixedArrayBase*>(array);
}
bool AllocationSite::HasWeakNext() const {
return map() == GetReadOnlyRoots().allocation_site_map();
}
void AllocationSite::Initialize() {
set_transition_info_or_boilerplate(Smi::kZero);
SetElementsKind(GetInitialFastElementsKind());
set_nested_site(Smi::kZero);
set_pretenure_data(0);
set_pretenure_create_count(0);
set_dependent_code(
DependentCode::cast(GetReadOnlyRoots().empty_weak_fixed_array()),
SKIP_WRITE_BARRIER);
}
bool AllocationSite::IsZombie() const {
return pretenure_decision() == kZombie;
}
bool AllocationSite::IsMaybeTenure() const {
return pretenure_decision() == kMaybeTenure;
}
bool AllocationSite::PretenuringDecisionMade() const {
return pretenure_decision() != kUndecided;
}
void AllocationSite::MarkZombie() {
DCHECK(!IsZombie());
Initialize();
set_pretenure_decision(kZombie);
}
ElementsKind AllocationSite::GetElementsKind() const {
return ElementsKindBits::decode(transition_info());
}
void AllocationSite::SetElementsKind(ElementsKind kind) {
set_transition_info(ElementsKindBits::update(transition_info(), kind));
}
bool AllocationSite::CanInlineCall() const {
return DoNotInlineBit::decode(transition_info()) == 0;
}
void AllocationSite::SetDoNotInlineCall() {
set_transition_info(DoNotInlineBit::update(transition_info(), true));
}
bool AllocationSite::PointsToLiteral() const {
Object* raw_value = transition_info_or_boilerplate();
DCHECK_EQ(!raw_value->IsSmi(),
raw_value->IsJSArray() || raw_value->IsJSObject());
return !raw_value->IsSmi();
}
// Heuristic: We only need to create allocation site info if the boilerplate
// elements kind is the initial elements kind.
bool AllocationSite::ShouldTrack(ElementsKind boilerplate_elements_kind) {
return IsSmiElementsKind(boilerplate_elements_kind);
}
inline bool AllocationSite::CanTrack(InstanceType type) {
if (FLAG_allocation_site_pretenuring) {
// TurboFan doesn't care at all about String pretenuring feedback,
// so don't bother even trying to track that.
return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
}
return type == JS_ARRAY_TYPE;
}
AllocationSite::PretenureDecision AllocationSite::pretenure_decision() const {
return PretenureDecisionBits::decode(pretenure_data());
}
void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
int32_t value = pretenure_data();
set_pretenure_data(PretenureDecisionBits::update(value, decision));
}
bool AllocationSite::deopt_dependent_code() const {
return DeoptDependentCodeBit::decode(pretenure_data());
}
void AllocationSite::set_deopt_dependent_code(bool deopt) {
int32_t value = pretenure_data();
set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
}
int AllocationSite::memento_found_count() const {
return MementoFoundCountBits::decode(pretenure_data());
}
inline void AllocationSite::set_memento_found_count(int count) {
int32_t value = pretenure_data();
// Verify that we can count more mementos than we can possibly find in one
// new space collection.
DCHECK((GetHeap()->MaxSemiSpaceSize() /
(Heap::kMinObjectSizeInWords * kPointerSize +
AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
DCHECK_LT(count, MementoFoundCountBits::kMax);
set_pretenure_data(MementoFoundCountBits::update(value, count));
}
int AllocationSite::memento_create_count() const {
return pretenure_create_count();
}
void AllocationSite::set_memento_create_count(int count) {
set_pretenure_create_count(count);
}
bool AllocationSite::IncrementMementoFoundCount(int increment) {
if (IsZombie()) return false;
int value = memento_found_count();
set_memento_found_count(value + increment);
return memento_found_count() >= kPretenureMinimumCreated;
}
inline void AllocationSite::IncrementMementoCreateCount() {
DCHECK(FLAG_allocation_site_pretenuring);
int value = memento_create_count();
set_memento_create_count(value + 1);
}
bool AllocationMemento::IsValid() const {
return allocation_site()->IsAllocationSite() &&
!AllocationSite::cast(allocation_site())->IsZombie();
}
AllocationSite* AllocationMemento::GetAllocationSite() const {
DCHECK(IsValid());
return AllocationSite::cast(allocation_site());
}
Address AllocationMemento::GetAllocationSiteUnchecked() const {
return reinterpret_cast<Address>(allocation_site());
}
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
JSObject::ValidateElements(*object);
ElementsKind elements_kind = object->map()->elements_kind();
......@@ -2223,38 +2076,6 @@ ACCESSORS(TemplateObjectDescription, cooked_strings, FixedArray,
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
ACCESSORS(AllocationSite, transition_info_or_boilerplate, Object,
kTransitionInfoOrBoilerplateOffset)
JSObject* AllocationSite::boilerplate() const {
DCHECK(PointsToLiteral());
return JSObject::cast(transition_info_or_boilerplate());
}
void AllocationSite::set_boilerplate(JSObject* object, WriteBarrierMode mode) {
set_transition_info_or_boilerplate(object, mode);
}
int AllocationSite::transition_info() const {
DCHECK(!PointsToLiteral());
return Smi::cast(transition_info_or_boilerplate())->value();
}
void AllocationSite::set_transition_info(int value) {
DCHECK(!PointsToLiteral());
set_transition_info_or_boilerplate(Smi::FromInt(value), SKIP_WRITE_BARRIER);
}
ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
INT32_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
INT32_ACCESSORS(AllocationSite, pretenure_create_count,
kPretenureCreateCountOffset)
ACCESSORS(AllocationSite, dependent_code, DependentCode,
kDependentCodeOffset)
ACCESSORS_CHECKED(AllocationSite, weak_next, Object, kWeakNextOffset,
HasWeakNext())
ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
SMI_ACCESSORS(StackFrameInfo, line_number, kLineNumberIndex)
SMI_ACCESSORS(StackFrameInfo, column_number, kColumnNumberIndex)
SMI_ACCESSORS(StackFrameInfo, script_id, kScriptIdIndex)
......
......@@ -18482,8 +18482,10 @@ MaybeHandle<JSDate> JSDate::New(Handle<JSFunction> constructor,
Handle<JSReceiver> new_target, double tv) {
Isolate* const isolate = constructor->GetIsolate();
Handle<JSObject> result;
ASSIGN_RETURN_ON_EXCEPTION(isolate, result,
JSObject::New(constructor, new_target), JSDate);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, result,
JSObject::New(constructor, new_target, Handle<AllocationSite>::null()),
JSDate);
if (-DateCache::kMaxTimeInMs <= tv && tv <= DateCache::kMaxTimeInMs) {
tv = DoubleToInteger(tv) + 0.0;
} else {
......
......@@ -1939,8 +1939,6 @@ enum class KeyCollectionMode {
static_cast<int>(v8::KeyCollectionMode::kIncludePrototypes)
};
enum class AllocationSiteUpdateMode { kUpdate, kCheckOnly };
class PropertyArray : public HeapObject {
public:
// [length]: length of the array.
......@@ -2259,7 +2257,7 @@ class JSObject: public JSReceiver {
static V8_WARN_UNUSED_RESULT MaybeHandle<JSObject> New(
Handle<JSFunction> constructor, Handle<JSReceiver> new_target,
Handle<AllocationSite> site = Handle<AllocationSite>::null());
Handle<AllocationSite> site);
static MaybeHandle<Context> GetFunctionRealm(Handle<JSObject> object);
......@@ -3662,188 +3660,6 @@ class JSMessageObject: public JSObject {
typedef BodyDescriptor BodyDescriptorWeak;
};
class AllocationSite : public Struct, public NeverReadOnlySpaceObject {
public:
static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;
static const double kPretenureRatio;
static const int kPretenureMinimumCreated = 100;
// Values for pretenure decision field.
enum PretenureDecision {
kUndecided = 0,
kDontTenure = 1,
kMaybeTenure = 2,
kTenure = 3,
kZombie = 4,
kLastPretenureDecisionValue = kZombie
};
// Use the mixin methods over the HeapObject methods.
// TODO(v8:7786) Remove once the HeapObject methods are gone.
using NeverReadOnlySpaceObject::GetHeap;
using NeverReadOnlySpaceObject::GetIsolate;
const char* PretenureDecisionName(PretenureDecision decision);
// Contains either a Smi-encoded bitfield or a boilerplate. If it's a Smi the
// AllocationSite is for a constructed Array.
DECL_ACCESSORS(transition_info_or_boilerplate, Object)
DECL_ACCESSORS(boilerplate, JSObject)
DECL_INT_ACCESSORS(transition_info)
// nested_site threads a list of sites that represent nested literals
// walked in a particular order. So [[1, 2], 1, 2] will have one
// nested_site, but [[1, 2], 3, [4]] will have a list of two.
DECL_ACCESSORS(nested_site, Object)
// Bitfield containing pretenuring information.
DECL_INT32_ACCESSORS(pretenure_data)
DECL_INT32_ACCESSORS(pretenure_create_count)
DECL_ACCESSORS(dependent_code, DependentCode)
// heap->allocation_site_list() points to the last AllocationSite which form
// a linked list through the weak_next property. The GC might remove elements
// from the list by updateing weak_next.
DECL_ACCESSORS(weak_next, Object)
inline void Initialize();
// Checks if the allocation site contain weak_next field;
inline bool HasWeakNext() const;
// This method is expensive, it should only be called for reporting.
bool IsNested();
// transition_info bitfields, for constructed array transition info.
class ElementsKindBits: public BitField<ElementsKind, 0, 15> {};
class UnusedBits: public BitField<int, 15, 14> {};
class DoNotInlineBit: public BitField<bool, 29, 1> {};
// Bitfields for pretenure_data
class MementoFoundCountBits: public BitField<int, 0, 26> {};
class PretenureDecisionBits: public BitField<PretenureDecision, 26, 3> {};
class DeoptDependentCodeBit: public BitField<bool, 29, 1> {};
STATIC_ASSERT(PretenureDecisionBits::kMax >= kLastPretenureDecisionValue);
// Increments the mementos found counter and returns true when the first
// memento was found for a given allocation site.
inline bool IncrementMementoFoundCount(int increment = 1);
inline void IncrementMementoCreateCount();
PretenureFlag GetPretenureMode() const;
void ResetPretenureDecision();
inline PretenureDecision pretenure_decision() const;
inline void set_pretenure_decision(PretenureDecision decision);
inline bool deopt_dependent_code() const;
inline void set_deopt_dependent_code(bool deopt);
inline int memento_found_count() const;
inline void set_memento_found_count(int count);
inline int memento_create_count() const;
inline void set_memento_create_count(int count);
// The pretenuring decision is made during gc, and the zombie state allows
// us to recognize when an allocation site is just being kept alive because
// a later traversal of new space may discover AllocationMementos that point
// to this AllocationSite.
inline bool IsZombie() const;
inline bool IsMaybeTenure() const;
inline void MarkZombie();
inline bool MakePretenureDecision(PretenureDecision current_decision,
double ratio,
bool maximum_size_scavenge);
inline bool DigestPretenuringFeedback(bool maximum_size_scavenge);
inline ElementsKind GetElementsKind() const;
inline void SetElementsKind(ElementsKind kind);
inline bool CanInlineCall() const;
inline void SetDoNotInlineCall();
inline bool PointsToLiteral() const;
template <AllocationSiteUpdateMode update_or_check =
AllocationSiteUpdateMode::kUpdate>
static bool DigestTransitionFeedback(Handle<AllocationSite> site,
ElementsKind to_kind);
DECL_PRINTER(AllocationSite)
DECL_VERIFIER(AllocationSite)
DECL_CAST(AllocationSite)
static inline bool ShouldTrack(ElementsKind boilerplate_elements_kind);
static bool ShouldTrack(ElementsKind from, ElementsKind to);
static inline bool CanTrack(InstanceType type);
// Layout description.
// AllocationSite has to start with TransitionInfoOrboilerPlateOffset
// and end with WeakNext field.
#define ALLOCATION_SITE_FIELDS(V) \
V(kTransitionInfoOrBoilerplateOffset, kPointerSize) \
V(kNestedSiteOffset, kPointerSize) \
V(kDependentCodeOffset, kPointerSize) \
V(kCommonPointerFieldEndOffset, 0) \
V(kPretenureDataOffset, kInt32Size) \
V(kPretenureCreateCountOffset, kInt32Size) \
/* Size of AllocationSite without WeakNext field */ \
V(kSizeWithoutWeakNext, 0) \
V(kWeakNextOffset, kPointerSize) \
/* Size of AllocationSite with WeakNext field */ \
V(kSizeWithWeakNext, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, ALLOCATION_SITE_FIELDS)
static const int kStartOffset = HeapObject::kHeaderSize;
template <bool includeWeakNext>
class BodyDescriptorImpl;
// BodyDescriptor is used to traverse all the pointer fields including
// weak_next
typedef BodyDescriptorImpl<true> BodyDescriptor;
// BodyDescriptorWeak is used to traverse all the pointer fields
// except for weak_next
typedef BodyDescriptorImpl<false> BodyDescriptorWeak;
private:
inline bool PretenuringDecisionMade() const;
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
};
class AllocationMemento: public Struct {
public:
static const int kAllocationSiteOffset = HeapObject::kHeaderSize;
static const int kSize = kAllocationSiteOffset + kPointerSize;
DECL_ACCESSORS(allocation_site, Object)
inline bool IsValid() const;
inline AllocationSite* GetAllocationSite() const;
inline Address GetAllocationSiteUnchecked() const;
DECL_PRINTER(AllocationMemento)
DECL_VERIFIER(AllocationMemento)
DECL_CAST(AllocationMemento)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationMemento);
};
// Utility superclass for stack-allocated objects that must be updated
// on gc. It provides two ways for the gc to update instances, either
// iterating or updating after gc.
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_ALLOCATION_SITE_INL_H_
#define V8_OBJECTS_ALLOCATION_SITE_INL_H_
#include "src/objects/allocation-site.h"
#include "src/heap/heap-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
CAST_ACCESSOR(AllocationMemento)
CAST_ACCESSOR(AllocationSite)
ACCESSORS(AllocationSite, transition_info_or_boilerplate, Object,
kTransitionInfoOrBoilerplateOffset)
ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
INT32_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
INT32_ACCESSORS(AllocationSite, pretenure_create_count,
kPretenureCreateCountOffset)
ACCESSORS(AllocationSite, dependent_code, DependentCode, kDependentCodeOffset)
ACCESSORS_CHECKED(AllocationSite, weak_next, Object, kWeakNextOffset,
HasWeakNext())
ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
JSObject* AllocationSite::boilerplate() const {
DCHECK(PointsToLiteral());
return JSObject::cast(transition_info_or_boilerplate());
}
void AllocationSite::set_boilerplate(JSObject* object, WriteBarrierMode mode) {
set_transition_info_or_boilerplate(object, mode);
}
int AllocationSite::transition_info() const {
DCHECK(!PointsToLiteral());
return Smi::cast(transition_info_or_boilerplate())->value();
}
void AllocationSite::set_transition_info(int value) {
DCHECK(!PointsToLiteral());
set_transition_info_or_boilerplate(Smi::FromInt(value), SKIP_WRITE_BARRIER);
}
bool AllocationSite::HasWeakNext() const {
return map() == GetReadOnlyRoots().allocation_site_map();
}
void AllocationSite::Initialize() {
set_transition_info_or_boilerplate(Smi::kZero);
SetElementsKind(GetInitialFastElementsKind());
set_nested_site(Smi::kZero);
set_pretenure_data(0);
set_pretenure_create_count(0);
set_dependent_code(
DependentCode::cast(GetReadOnlyRoots().empty_weak_fixed_array()),
SKIP_WRITE_BARRIER);
}
bool AllocationSite::IsZombie() const {
return pretenure_decision() == kZombie;
}
bool AllocationSite::IsMaybeTenure() const {
return pretenure_decision() == kMaybeTenure;
}
bool AllocationSite::PretenuringDecisionMade() const {
return pretenure_decision() != kUndecided;
}
void AllocationSite::MarkZombie() {
DCHECK(!IsZombie());
Initialize();
set_pretenure_decision(kZombie);
}
ElementsKind AllocationSite::GetElementsKind() const {
return ElementsKindBits::decode(transition_info());
}
void AllocationSite::SetElementsKind(ElementsKind kind) {
set_transition_info(ElementsKindBits::update(transition_info(), kind));
}
bool AllocationSite::CanInlineCall() const {
return DoNotInlineBit::decode(transition_info()) == 0;
}
void AllocationSite::SetDoNotInlineCall() {
set_transition_info(DoNotInlineBit::update(transition_info(), true));
}
bool AllocationSite::PointsToLiteral() const {
Object* raw_value = transition_info_or_boilerplate();
DCHECK_EQ(!raw_value->IsSmi(),
raw_value->IsJSArray() || raw_value->IsJSObject());
return !raw_value->IsSmi();
}
// Heuristic: We only need to create allocation site info if the boilerplate
// elements kind is the initial elements kind.
bool AllocationSite::ShouldTrack(ElementsKind boilerplate_elements_kind) {
return IsSmiElementsKind(boilerplate_elements_kind);
}
inline bool AllocationSite::CanTrack(InstanceType type) {
if (FLAG_allocation_site_pretenuring) {
// TurboFan doesn't care at all about String pretenuring feedback,
// so don't bother even trying to track that.
return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
}
return type == JS_ARRAY_TYPE;
}
AllocationSite::PretenureDecision AllocationSite::pretenure_decision() const {
return PretenureDecisionBits::decode(pretenure_data());
}
void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
int32_t value = pretenure_data();
set_pretenure_data(PretenureDecisionBits::update(value, decision));
}
bool AllocationSite::deopt_dependent_code() const {
return DeoptDependentCodeBit::decode(pretenure_data());
}
void AllocationSite::set_deopt_dependent_code(bool deopt) {
int32_t value = pretenure_data();
set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
}
int AllocationSite::memento_found_count() const {
return MementoFoundCountBits::decode(pretenure_data());
}
inline void AllocationSite::set_memento_found_count(int count) {
int32_t value = pretenure_data();
// Verify that we can count more mementos than we can possibly find in one
// new space collection.
DCHECK((GetHeap()->MaxSemiSpaceSize() /
(Heap::kMinObjectSizeInWords * kPointerSize +
AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
DCHECK_LT(count, MementoFoundCountBits::kMax);
set_pretenure_data(MementoFoundCountBits::update(value, count));
}
int AllocationSite::memento_create_count() const {
return pretenure_create_count();
}
void AllocationSite::set_memento_create_count(int count) {
set_pretenure_create_count(count);
}
bool AllocationSite::IncrementMementoFoundCount(int increment) {
if (IsZombie()) return false;
int value = memento_found_count();
set_memento_found_count(value + increment);
return memento_found_count() >= kPretenureMinimumCreated;
}
inline void AllocationSite::IncrementMementoCreateCount() {
DCHECK(FLAG_allocation_site_pretenuring);
int value = memento_create_count();
set_memento_create_count(value + 1);
}
bool AllocationMemento::IsValid() const {
return allocation_site()->IsAllocationSite() &&
!AllocationSite::cast(allocation_site())->IsZombie();
}
AllocationSite* AllocationMemento::GetAllocationSite() const {
DCHECK(IsValid());
return AllocationSite::cast(allocation_site());
}
Address AllocationMemento::GetAllocationSiteUnchecked() const {
return reinterpret_cast<Address>(allocation_site());
}
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_ALLOCATION_SITE_INL_H_
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_ALLOCATION_SITE_H_
#define V8_OBJECTS_ALLOCATION_SITE_H_
#include "src/objects.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
class AllocationSite : public Struct, public NeverReadOnlySpaceObject {
public:
static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;
static const double kPretenureRatio;
static const int kPretenureMinimumCreated = 100;
// Values for pretenure decision field.
enum PretenureDecision {
kUndecided = 0,
kDontTenure = 1,
kMaybeTenure = 2,
kTenure = 3,
kZombie = 4,
kLastPretenureDecisionValue = kZombie
};
// Use the mixin methods over the HeapObject methods.
// TODO(v8:7786) Remove once the HeapObject methods are gone.
using NeverReadOnlySpaceObject::GetHeap;
using NeverReadOnlySpaceObject::GetIsolate;
const char* PretenureDecisionName(PretenureDecision decision);
// Contains either a Smi-encoded bitfield or a boilerplate. If it's a Smi the
// AllocationSite is for a constructed Array.
DECL_ACCESSORS(transition_info_or_boilerplate, Object)
DECL_ACCESSORS(boilerplate, JSObject)
DECL_INT_ACCESSORS(transition_info)
// nested_site threads a list of sites that represent nested literals
// walked in a particular order. So [[1, 2], 1, 2] will have one
// nested_site, but [[1, 2], 3, [4]] will have a list of two.
DECL_ACCESSORS(nested_site, Object)
// Bitfield containing pretenuring information.
DECL_INT32_ACCESSORS(pretenure_data)
DECL_INT32_ACCESSORS(pretenure_create_count)
DECL_ACCESSORS(dependent_code, DependentCode)
// heap->allocation_site_list() points to the last AllocationSite which form
// a linked list through the weak_next property. The GC might remove elements
// from the list by updateing weak_next.
DECL_ACCESSORS(weak_next, Object)
inline void Initialize();
// Checks if the allocation site contain weak_next field;
inline bool HasWeakNext() const;
// This method is expensive, it should only be called for reporting.
bool IsNested();
// transition_info bitfields, for constructed array transition info.
class ElementsKindBits : public BitField<ElementsKind, 0, 15> {};
class UnusedBits : public BitField<int, 15, 14> {};
class DoNotInlineBit : public BitField<bool, 29, 1> {};
// Bitfields for pretenure_data
class MementoFoundCountBits : public BitField<int, 0, 26> {};
class PretenureDecisionBits : public BitField<PretenureDecision, 26, 3> {};
class DeoptDependentCodeBit : public BitField<bool, 29, 1> {};
STATIC_ASSERT(PretenureDecisionBits::kMax >= kLastPretenureDecisionValue);
// Increments the mementos found counter and returns true when the first
// memento was found for a given allocation site.
inline bool IncrementMementoFoundCount(int increment = 1);
inline void IncrementMementoCreateCount();
PretenureFlag GetPretenureMode() const;
void ResetPretenureDecision();
inline PretenureDecision pretenure_decision() const;
inline void set_pretenure_decision(PretenureDecision decision);
inline bool deopt_dependent_code() const;
inline void set_deopt_dependent_code(bool deopt);
inline int memento_found_count() const;
inline void set_memento_found_count(int count);
inline int memento_create_count() const;
inline void set_memento_create_count(int count);
// The pretenuring decision is made during gc, and the zombie state allows
// us to recognize when an allocation site is just being kept alive because
// a later traversal of new space may discover AllocationMementos that point
// to this AllocationSite.
inline bool IsZombie() const;
inline bool IsMaybeTenure() const;
inline void MarkZombie();
inline bool MakePretenureDecision(PretenureDecision current_decision,
double ratio, bool maximum_size_scavenge);
inline bool DigestPretenuringFeedback(bool maximum_size_scavenge);
inline ElementsKind GetElementsKind() const;
inline void SetElementsKind(ElementsKind kind);
inline bool CanInlineCall() const;
inline void SetDoNotInlineCall();
inline bool PointsToLiteral() const;
template <AllocationSiteUpdateMode update_or_check =
AllocationSiteUpdateMode::kUpdate>
static bool DigestTransitionFeedback(Handle<AllocationSite> site,
ElementsKind to_kind);
DECL_PRINTER(AllocationSite)
DECL_VERIFIER(AllocationSite)
DECL_CAST(AllocationSite)
static inline bool ShouldTrack(ElementsKind boilerplate_elements_kind);
static bool ShouldTrack(ElementsKind from, ElementsKind to);
static inline bool CanTrack(InstanceType type);
// Layout description.
// AllocationSite has to start with TransitionInfoOrboilerPlateOffset
// and end with WeakNext field.
#define ALLOCATION_SITE_FIELDS(V) \
V(kTransitionInfoOrBoilerplateOffset, kPointerSize) \
V(kNestedSiteOffset, kPointerSize) \
V(kDependentCodeOffset, kPointerSize) \
V(kCommonPointerFieldEndOffset, 0) \
V(kPretenureDataOffset, kInt32Size) \
V(kPretenureCreateCountOffset, kInt32Size) \
/* Size of AllocationSite without WeakNext field */ \
V(kSizeWithoutWeakNext, 0) \
V(kWeakNextOffset, kPointerSize) \
/* Size of AllocationSite with WeakNext field */ \
V(kSizeWithWeakNext, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, ALLOCATION_SITE_FIELDS)
static const int kStartOffset = HeapObject::kHeaderSize;
template <bool includeWeakNext>
class BodyDescriptorImpl;
// BodyDescriptor is used to traverse all the pointer fields including
// weak_next
typedef BodyDescriptorImpl<true> BodyDescriptor;
// BodyDescriptorWeak is used to traverse all the pointer fields
// except for weak_next
typedef BodyDescriptorImpl<false> BodyDescriptorWeak;
private:
inline bool PretenuringDecisionMade() const;
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
};
class AllocationMemento : public Struct {
public:
static const int kAllocationSiteOffset = HeapObject::kHeaderSize;
static const int kSize = kAllocationSiteOffset + kPointerSize;
DECL_ACCESSORS(allocation_site, Object)
inline bool IsValid() const;
inline AllocationSite* GetAllocationSite() const;
inline Address GetAllocationSiteUnchecked() const;
DECL_PRINTER(AllocationMemento)
DECL_VERIFIER(AllocationMemento)
DECL_CAST(AllocationMemento)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationMemento);
};
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_ALLOCATION_SITE_H_
......@@ -6,6 +6,7 @@
#define V8_OBJECTS_JS_ARRAY_H_
#include "src/objects.h"
#include "src/objects/allocation-site.h"
#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
......
......@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/allocation-site-scopes.h"
#include "src/allocation-site-scopes-inl.h"
#include "src/arguments-inl.h"
#include "src/ast/ast.h"
#include "src/isolate-inl.h"
......
......@@ -765,7 +765,9 @@ RUNTIME_FUNCTION(Runtime_NewObject) {
DCHECK_EQ(2, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, target, 0);
CONVERT_ARG_HANDLE_CHECKED(JSReceiver, new_target, 1);
RETURN_RESULT_OR_FAILURE(isolate, JSObject::New(target, new_target));
RETURN_RESULT_OR_FAILURE(
isolate,
JSObject::New(target, new_target, Handle<AllocationSite>::null()));
}
RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTrackingForMap) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment