Commit 24ff68e8 authored by Jakob Gruber's avatar Jakob Gruber Committed by V8 LUCI CQ

Reland "[compiler] Consider IsPendingAllocation in Ref construction"

This is the second reland of 4683d6fe

Initial CL:   crrev.com/c/2874663
First reland: crrev.com/c/2886861

The first reland fixes Ref construction failures in:
- MapRef::instance_descriptors
- NativeContext reads (see also crrev.com/c/2891575)

The second reland (this CL):
- Adds required infrastructure (e.g. kAssumeMemoryFence) but
  without enabling the IsPendingAllocation check. Enabling the check
  will be done separately to avoid further revert chains.

Original change's description:
> [compiler] Consider IsPendingAllocation in Ref construction
>
> The logic in JSHeapBroker::TryGetOrCreateData assumes that parts
> of the object are safe to read. In particular, the instance type
> must be readable for the chain of `Is##Name()` type checks.
>
> This is guaranteed if
>
>  - a global memory fence happened after object initialization and
>    prior to the read by the compiler; or
>  - the object was published through a release store and read through
>    an acquire read.
>
> The former is protected by the new call to ObjectMayBeUninitialized
> (which internally calls IsPendingAllocation) in TryGetOrCreateData.
>
> The latter must be marked explicitly by calling the new
> MakeRefAssumeMemoryFence variant.
>
> Note that support in this CL is expected to be incomplete and will
> have to be extended in the future as more cases show up in which
> MakeRef calls must be converted to MakeRefAssumeMemoryFence or to
> TryMakeRef.
>
> Bug: v8:7790,v8:11711
> Change-Id: Ic2f7d9fc46e4bfc3f6bbe42816f73fc5ec174337
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2874663
> Commit-Queue: Jakob Gruber <jgruber@chromium.org>
> Reviewed-by: Georg Neis <neis@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#74474}

Bug: v8:7790,v8:11711,chromium:1207680,chromium:1207679
Change-Id: I123b2962df724a13dd2c7334ae949234bc3bf27a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2902738Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74638}
parent 98ba4acc
...@@ -70,8 +70,11 @@ enum ObjectDataKind { ...@@ -70,8 +70,11 @@ enum ObjectDataKind {
namespace { namespace {
bool IsReadOnlyHeapObject(Object object) { bool IsReadOnlyHeapObjectForCompiler(HeapObject object) {
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
// TODO(jgruber): Remove this compiler-specific predicate and use the plain
// heap predicate instead. This would involve removing the special cases for
// builtins.
return (object.IsCode() && Code::cast(object).is_builtin()) || return (object.IsCode() && Code::cast(object).is_builtin()) ||
(object.IsHeapObject() && (object.IsHeapObject() &&
ReadOnlyHeap::Contains(HeapObject::cast(object))); ReadOnlyHeap::Contains(HeapObject::cast(object)));
...@@ -113,7 +116,8 @@ class ObjectData : public ZoneObject { ...@@ -113,7 +116,8 @@ class ObjectData : public ZoneObject {
kind == kNeverSerializedHeapObject || kind == kNeverSerializedHeapObject ||
kind == kBackgroundSerializedHeapObject); kind == kBackgroundSerializedHeapObject);
CHECK_IMPLIES(kind == kUnserializedReadOnlyHeapObject, CHECK_IMPLIES(kind == kUnserializedReadOnlyHeapObject,
IsReadOnlyHeapObject(*object)); object->IsHeapObject() && IsReadOnlyHeapObjectForCompiler(
HeapObject::cast(*object)));
} }
#define DECLARE_IS(Name, ...) bool Is##Name() const; #define DECLARE_IS(Name, ...) bool Is##Name() const;
...@@ -334,7 +338,7 @@ bool PropertyCellData::Serialize(JSHeapBroker* broker) { ...@@ -334,7 +338,7 @@ bool PropertyCellData::Serialize(JSHeapBroker* broker) {
} }
} }
ObjectData* value_data = broker->TryGetOrCreateData(value, false); ObjectData* value_data = broker->TryGetOrCreateData(value);
if (value_data == nullptr) { if (value_data == nullptr) {
DCHECK(!broker->IsMainThread()); DCHECK(!broker->IsMainThread());
return false; return false;
...@@ -2141,7 +2145,7 @@ base::Optional<PropertyCellRef> GetPropertyCellFromHeap(JSHeapBroker* broker, ...@@ -2141,7 +2145,7 @@ base::Optional<PropertyCellRef> GetPropertyCellFromHeap(JSHeapBroker* broker,
it.TryLookupCachedProperty(); it.TryLookupCachedProperty();
if (it.state() == LookupIterator::DATA && if (it.state() == LookupIterator::DATA &&
it.GetHolder<JSObject>()->IsJSGlobalObject()) { it.GetHolder<JSObject>()->IsJSGlobalObject()) {
return MakeRef(broker, it.GetPropertyCell()); return TryMakeRef(broker, it.GetPropertyCell());
} }
return base::nullopt; return base::nullopt;
} }
...@@ -2775,13 +2779,13 @@ void JSHeapBroker::ClearReconstructibleData() { ...@@ -2775,13 +2779,13 @@ void JSHeapBroker::ClearReconstructibleData() {
} }
ObjectData* JSHeapBroker::TryGetOrCreateData(Handle<Object> object, ObjectData* JSHeapBroker::TryGetOrCreateData(Handle<Object> object,
bool crash_on_error) { GetOrCreateDataFlags flags) {
RefsMap::Entry* entry = refs_->Lookup(object.address()); RefsMap::Entry* entry = refs_->Lookup(object.address());
if (entry != nullptr) return entry->value; if (entry != nullptr) return entry->value;
if (mode() == JSHeapBroker::kDisabled) { if (mode() == JSHeapBroker::kDisabled) {
entry = refs_->LookupOrInsert(object.address()); entry = refs_->LookupOrInsert(object.address());
ObjectData** storage = &(entry->value); ObjectData** storage = &entry->value;
if (*storage == nullptr) { if (*storage == nullptr) {
entry->value = zone()->New<ObjectData>( entry->value = zone()->New<ObjectData>(
this, storage, object, this, storage, object,
...@@ -2796,23 +2800,43 @@ ObjectData* JSHeapBroker::TryGetOrCreateData(Handle<Object> object, ...@@ -2796,23 +2800,43 @@ ObjectData* JSHeapBroker::TryGetOrCreateData(Handle<Object> object,
ObjectData* object_data; ObjectData* object_data;
if (object->IsSmi()) { if (object->IsSmi()) {
entry = refs_->LookupOrInsert(object.address()); entry = refs_->LookupOrInsert(object.address());
object_data = zone()->New<ObjectData>(this, &(entry->value), object, kSmi); return zone()->New<ObjectData>(this, &entry->value, object, kSmi);
} else if (IsReadOnlyHeapObject(*object)) { }
DCHECK(!object->IsSmi());
const bool crash_on_error = (flags & kCrashOnError) != 0;
// TODO(jgruber): Remove this flag check (and the flag) once TSAN failures
// are fixed.
// See also: crbug.com/v8/11779
if (FLAG_turbo_concurrent_inlining_check_ispendingallocation) {
if ((flags & kAssumeMemoryFence) == 0 &&
ObjectMayBeUninitialized(HeapObject::cast(*object))) {
TRACE_BROKER_MISSING(this, "Object may be uninitialized " << *object);
CHECK_WITH_MSG(!crash_on_error, "Ref construction failed");
return nullptr;
}
}
if (IsReadOnlyHeapObjectForCompiler(HeapObject::cast(*object))) {
entry = refs_->LookupOrInsert(object.address()); entry = refs_->LookupOrInsert(object.address());
object_data = zone()->New<ObjectData>(this, &(entry->value), object, return zone()->New<ObjectData>(this, &entry->value, object,
kUnserializedReadOnlyHeapObject); kUnserializedReadOnlyHeapObject);
}
#define CREATE_DATA(Name, Kind) \ #define CREATE_DATA(Name, Kind) \
} \ if (object->Is##Name()) { \
/* NOLINTNEXTLINE(readability/braces) */ \
else if (object->Is##Name()) { \
CreateDataFunctor<Kind, Name##Data, Name> f; \ CreateDataFunctor<Kind, Name##Data, Name> f; \
if (!f(this, refs_, object, &entry, &object_data)) { \ if (!f(this, refs_, object, &entry, &object_data)) { \
CHECK(!crash_on_error); \ CHECK_WITH_MSG(!crash_on_error, "Ref construction failed"); \
return nullptr; \ return nullptr; \
} } \
/* NOLINTNEXTLINE(readability/braces) */ \
} else
HEAP_BROKER_OBJECT_LIST(CREATE_DATA) HEAP_BROKER_OBJECT_LIST(CREATE_DATA)
#undef CREATE_DATA #undef CREATE_DATA
} else { {
UNREACHABLE(); UNREACHABLE();
} }
// At this point the entry pointer is not guaranteed to be valid as // At this point the entry pointer is not guaranteed to be valid as
...@@ -3350,7 +3374,7 @@ base::Optional<CallHandlerInfoRef> FunctionTemplateInfoRef::call_code() const { ...@@ -3350,7 +3374,7 @@ base::Optional<CallHandlerInfoRef> FunctionTemplateInfoRef::call_code() const {
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
HeapObject call_code = object()->call_code(kAcquireLoad); HeapObject call_code = object()->call_code(kAcquireLoad);
if (call_code.IsUndefined()) return base::nullopt; if (call_code.IsUndefined()) return base::nullopt;
return MakeRef(broker(), CallHandlerInfo::cast(call_code)); return TryMakeRef(broker(), CallHandlerInfo::cast(call_code));
} }
ObjectData* call_code = data()->AsFunctionTemplateInfo()->call_code(); ObjectData* call_code = data()->AsFunctionTemplateInfo()->call_code();
if (!call_code) return base::nullopt; if (!call_code) return base::nullopt;
...@@ -3522,8 +3546,9 @@ base::Optional<ObjectRef> MapRef::GetStrongValue( ...@@ -3522,8 +3546,9 @@ base::Optional<ObjectRef> MapRef::GetStrongValue(
DescriptorArrayRef MapRef::instance_descriptors() const { DescriptorArrayRef MapRef::instance_descriptors() const {
if (data_->should_access_heap() || broker()->is_concurrent_inlining()) { if (data_->should_access_heap() || broker()->is_concurrent_inlining()) {
return MakeRef(broker(), object()->instance_descriptors(broker()->isolate(), return MakeRefAssumeMemoryFence(
kRelaxedLoad)); broker(),
object()->instance_descriptors(broker()->isolate(), kAcquireLoad));
} }
return DescriptorArrayRef(broker(), data()->AsMap()->instance_descriptors()); return DescriptorArrayRef(broker(), data()->AsMap()->instance_descriptors());
...@@ -3693,7 +3718,8 @@ bool NativeContextRef::is_unserialized_heap_object() const { ...@@ -3693,7 +3718,8 @@ bool NativeContextRef::is_unserialized_heap_object() const {
ScopeInfoRef NativeContextRef::scope_info() const { ScopeInfoRef NativeContextRef::scope_info() const {
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
return MakeRef(broker(), object()->scope_info()); // The scope_info is immutable after initialization.
return MakeRefAssumeMemoryFence(broker(), object()->scope_info());
} }
return ScopeInfoRef(broker(), data()->AsNativeContext()->scope_info()); return ScopeInfoRef(broker(), data()->AsNativeContext()->scope_info());
} }
...@@ -3711,7 +3737,10 @@ MapRef NativeContextRef::GetFunctionMapFromIndex(int index) const { ...@@ -3711,7 +3737,10 @@ MapRef NativeContextRef::GetFunctionMapFromIndex(int index) const {
DCHECK_GE(index, Context::FIRST_FUNCTION_MAP_INDEX); DCHECK_GE(index, Context::FIRST_FUNCTION_MAP_INDEX);
DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX); DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
return get(index).value().AsMap(); CHECK_LT(index, object()->length());
return MakeRefAssumeMemoryFence(broker(),
object()->get(index, kAcquireLoad))
.AsMap();
} }
return MapRef(broker(), data()->AsNativeContext()->function_maps().at( return MapRef(broker(), data()->AsNativeContext()->function_maps().at(
index - Context::FIRST_FUNCTION_MAP_INDEX)); index - Context::FIRST_FUNCTION_MAP_INDEX));
...@@ -3951,7 +3980,7 @@ base::Optional<ObjectRef> JSArrayRef::GetOwnCowElement( ...@@ -3951,7 +3980,7 @@ base::Optional<ObjectRef> JSArrayRef::GetOwnCowElement(
base::Optional<CellRef> SourceTextModuleRef::GetCell(int cell_index) const { base::Optional<CellRef> SourceTextModuleRef::GetCell(int cell_index) const {
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
return MakeRef(broker(), object()->GetCell(cell_index)); return TryMakeRef(broker(), object()->GetCell(cell_index));
} }
ObjectData* cell = ObjectData* cell =
data()->AsSourceTextModule()->GetCell(broker(), cell_index); data()->AsSourceTextModule()->GetCell(broker(), cell_index);
...@@ -3971,12 +4000,7 @@ ObjectRef::ObjectRef(JSHeapBroker* broker, Handle<Object> object, ...@@ -3971,12 +4000,7 @@ ObjectRef::ObjectRef(JSHeapBroker* broker, Handle<Object> object,
bool check_type) bool check_type)
: broker_(broker) { : broker_(broker) {
CHECK_NE(broker->mode(), JSHeapBroker::kRetired); CHECK_NE(broker->mode(), JSHeapBroker::kRetired);
data_ = broker->GetOrCreateData(object); data_ = broker->GetOrCreateData(object);
if (!data_) { // TODO(mslekova): Remove once we're on the background thread.
object->Print();
}
CHECK_WITH_MSG(data_ != nullptr, "Object is not known to the heap broker");
} }
namespace { namespace {
...@@ -4023,9 +4047,10 @@ HeapObjectType HeapObjectRef::GetHeapObjectType() const { ...@@ -4023,9 +4047,10 @@ HeapObjectType HeapObjectRef::GetHeapObjectType() const {
if (map().is_callable()) flags |= HeapObjectType::kCallable; if (map().is_callable()) flags |= HeapObjectType::kCallable;
return HeapObjectType(map().instance_type(), flags, map().oddball_type()); return HeapObjectType(map().instance_type(), flags, map().oddball_type());
} }
base::Optional<JSObjectRef> AllocationSiteRef::boilerplate() const { base::Optional<JSObjectRef> AllocationSiteRef::boilerplate() const {
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
return MakeRef(broker(), object()->boilerplate(kAcquireLoad)); return TryMakeRef(broker(), object()->boilerplate(kAcquireLoad));
} }
ObjectData* boilerplate = data()->AsAllocationSite()->boilerplate(); ObjectData* boilerplate = data()->AsAllocationSite()->boilerplate();
if (boilerplate) { if (boilerplate) {
...@@ -4041,7 +4066,7 @@ ElementsKind JSObjectRef::GetElementsKind() const { ...@@ -4041,7 +4066,7 @@ ElementsKind JSObjectRef::GetElementsKind() const {
base::Optional<FixedArrayBaseRef> JSObjectRef::elements() const { base::Optional<FixedArrayBaseRef> JSObjectRef::elements() const {
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
return MakeRef(broker(), object()->elements()); return TryMakeRef(broker(), object()->elements());
} }
const JSObjectData* d = data()->AsJSObject(); const JSObjectData* d = data()->AsJSObject();
if (!d->serialized_elements()) { if (!d->serialized_elements()) {
...@@ -4246,7 +4271,8 @@ void NativeContextData::SerializeOnBackground(JSHeapBroker* broker) { ...@@ -4246,7 +4271,8 @@ void NativeContextData::SerializeOnBackground(JSHeapBroker* broker) {
#define SERIALIZE_MEMBER(type, name) \ #define SERIALIZE_MEMBER(type, name) \
DCHECK_NULL(name##_); \ DCHECK_NULL(name##_); \
name##_ = broker->GetOrCreateData(context->name(kAcquireLoad)); \ name##_ = broker->GetOrCreateData(context->name(kAcquireLoad), \
kAssumeMemoryFence); \
if (!name##_->should_access_heap()) { \ if (!name##_->should_access_heap()) { \
DCHECK(!name##_->IsJSFunction()); \ DCHECK(!name##_->IsJSFunction()); \
} }
...@@ -4261,8 +4287,8 @@ void NativeContextData::SerializeOnBackground(JSHeapBroker* broker) { ...@@ -4261,8 +4287,8 @@ void NativeContextData::SerializeOnBackground(JSHeapBroker* broker) {
int const last = Context::LAST_FUNCTION_MAP_INDEX; int const last = Context::LAST_FUNCTION_MAP_INDEX;
function_maps_.reserve(last + 1 - first); function_maps_.reserve(last + 1 - first);
for (int i = first; i <= last; ++i) { for (int i = first; i <= last; ++i) {
function_maps_.push_back( function_maps_.push_back(broker->GetOrCreateData(
broker->GetOrCreateData(context->get(i, kAcquireLoad))); context->get(i, kAcquireLoad), kAssumeMemoryFence));
} }
} }
...@@ -4299,7 +4325,7 @@ bool JSFunctionRef::serialized_code_and_feedback() const { ...@@ -4299,7 +4325,7 @@ bool JSFunctionRef::serialized_code_and_feedback() const {
CodeRef JSFunctionRef::code() const { CodeRef JSFunctionRef::code() const {
if (data_->should_access_heap() || broker()->is_concurrent_inlining()) { if (data_->should_access_heap() || broker()->is_concurrent_inlining()) {
return MakeRef(broker(), object()->code(kAcquireLoad)); return MakeRefAssumeMemoryFence(broker(), object()->code(kAcquireLoad));
} }
return CodeRef(broker(), ObjectRef::data()->AsJSFunction()->code()); return CodeRef(broker(), ObjectRef::data()->AsJSFunction()->code());
......
...@@ -224,18 +224,20 @@ bool JSHeapBroker::IsArrayOrObjectPrototype(Handle<JSObject> object) const { ...@@ -224,18 +224,20 @@ bool JSHeapBroker::IsArrayOrObjectPrototype(Handle<JSObject> object) const {
} }
ObjectData* JSHeapBroker::TryGetOrCreateData(Object object, ObjectData* JSHeapBroker::TryGetOrCreateData(Object object,
bool crash_on_error) { GetOrCreateDataFlags flags) {
return TryGetOrCreateData(CanonicalPersistentHandle(object), crash_on_error); return TryGetOrCreateData(CanonicalPersistentHandle(object), flags);
} }
ObjectData* JSHeapBroker::GetOrCreateData(Handle<Object> object) { ObjectData* JSHeapBroker::GetOrCreateData(Handle<Object> object,
ObjectData* return_value = TryGetOrCreateData(object, true); GetOrCreateDataFlags flags) {
ObjectData* return_value = TryGetOrCreateData(object, flags | kCrashOnError);
DCHECK_NOT_NULL(return_value); DCHECK_NOT_NULL(return_value);
return return_value; return return_value;
} }
ObjectData* JSHeapBroker::GetOrCreateData(Object object) { ObjectData* JSHeapBroker::GetOrCreateData(Object object,
return GetOrCreateData(CanonicalPersistentHandle(object)); GetOrCreateDataFlags flags) {
return GetOrCreateData(CanonicalPersistentHandle(object), flags);
} }
bool JSHeapBroker::StackHasOverflowed() const { bool JSHeapBroker::StackHasOverflowed() const {
...@@ -247,8 +249,12 @@ bool JSHeapBroker::StackHasOverflowed() const { ...@@ -247,8 +249,12 @@ bool JSHeapBroker::StackHasOverflowed() const {
} }
bool JSHeapBroker::ObjectMayBeUninitialized(Handle<Object> object) const { bool JSHeapBroker::ObjectMayBeUninitialized(Handle<Object> object) const {
return !IsMainThread() && object->IsHeapObject() && if (!object->IsHeapObject()) return false;
isolate()->heap()->IsPendingAllocation(HeapObject::cast(*object)); return ObjectMayBeUninitialized(HeapObject::cast(*object));
}
bool JSHeapBroker::ObjectMayBeUninitialized(HeapObject object) const {
return !IsMainThread() && isolate()->heap()->IsPendingAllocation(object);
} }
bool CanInlineElementAccess(MapRef const& map) { bool CanInlineElementAccess(MapRef const& map) {
......
...@@ -80,6 +80,18 @@ struct PropertyAccessTarget { ...@@ -80,6 +80,18 @@ struct PropertyAccessTarget {
}; };
}; };
enum GetOrCreateDataFlag {
// If set, a failure to create the data object results in a crash.
kCrashOnError = 1 << 0,
// If set, data construction assumes that the given object is protected by
// a memory fence (e.g. acquire-release) and thus fields required for
// construction (like Object::map) are safe to read. The protection can
// extend to some other situations as well.
kAssumeMemoryFence = 1 << 1,
};
using GetOrCreateDataFlags = base::Flags<GetOrCreateDataFlag>;
DEFINE_OPERATORS_FOR_FLAGS(GetOrCreateDataFlags)
class V8_EXPORT_PRIVATE JSHeapBroker { class V8_EXPORT_PRIVATE JSHeapBroker {
public: public:
JSHeapBroker(Isolate* isolate, Zone* broker_zone, bool tracing_enabled, JSHeapBroker(Isolate* isolate, Zone* broker_zone, bool tracing_enabled,
...@@ -152,14 +164,16 @@ class V8_EXPORT_PRIVATE JSHeapBroker { ...@@ -152,14 +164,16 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
Handle<Object> GetRootHandle(Object object); Handle<Object> GetRootHandle(Object object);
// Never returns nullptr. // Never returns nullptr.
ObjectData* GetOrCreateData(Handle<Object>); ObjectData* GetOrCreateData(Handle<Object> object,
// Like the previous but wraps argument in handle first (for convenience). GetOrCreateDataFlags flags = {});
ObjectData* GetOrCreateData(Object); ObjectData* GetOrCreateData(Object object, GetOrCreateDataFlags flags = {});
// Gets data only if we have it. However, thin wrappers will be created for // Gets data only if we have it. However, thin wrappers will be created for
// smis, read-only objects and never-serialized objects. // smis, read-only objects and never-serialized objects.
ObjectData* TryGetOrCreateData(Handle<Object>, bool crash_on_error = false); ObjectData* TryGetOrCreateData(Handle<Object> object,
ObjectData* TryGetOrCreateData(Object object, bool crash_on_error = false); GetOrCreateDataFlags flags = {});
ObjectData* TryGetOrCreateData(Object object,
GetOrCreateDataFlags flags = {});
// Check if {object} is any native context's %ArrayPrototype% or // Check if {object} is any native context's %ArrayPrototype% or
// %ObjectPrototype%. // %ObjectPrototype%.
...@@ -376,6 +390,7 @@ class V8_EXPORT_PRIVATE JSHeapBroker { ...@@ -376,6 +390,7 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
// thus safe to read from a memory safety perspective. The converse does not // thus safe to read from a memory safety perspective. The converse does not
// necessarily hold. // necessarily hold.
bool ObjectMayBeUninitialized(Handle<Object> object) const; bool ObjectMayBeUninitialized(Handle<Object> object) const;
bool ObjectMayBeUninitialized(HeapObject object) const;
bool CanUseFeedback(const FeedbackNexus& nexus) const; bool CanUseFeedback(const FeedbackNexus& nexus) const;
const ProcessedFeedback& NewInsufficientFeedback(FeedbackSlotKind kind) const; const ProcessedFeedback& NewInsufficientFeedback(FeedbackSlotKind kind) const;
...@@ -564,8 +579,8 @@ class V8_NODISCARD UnparkedScopeIfNeeded { ...@@ -564,8 +579,8 @@ class V8_NODISCARD UnparkedScopeIfNeeded {
template <class T, template <class T,
typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
base::Optional<typename ref_traits<T>::ref_type> TryMakeRef( base::Optional<typename ref_traits<T>::ref_type> TryMakeRef(
JSHeapBroker* broker, T object) { JSHeapBroker* broker, T object, GetOrCreateDataFlags flags = {}) {
ObjectData* data = broker->TryGetOrCreateData(object); ObjectData* data = broker->TryGetOrCreateData(object, flags);
if (data == nullptr) { if (data == nullptr) {
TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(object)); TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(object));
return {}; return {};
...@@ -576,8 +591,8 @@ base::Optional<typename ref_traits<T>::ref_type> TryMakeRef( ...@@ -576,8 +591,8 @@ base::Optional<typename ref_traits<T>::ref_type> TryMakeRef(
template <class T, template <class T,
typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
base::Optional<typename ref_traits<T>::ref_type> TryMakeRef( base::Optional<typename ref_traits<T>::ref_type> TryMakeRef(
JSHeapBroker* broker, Handle<T> object) { JSHeapBroker* broker, Handle<T> object, GetOrCreateDataFlags flags = {}) {
ObjectData* data = broker->TryGetOrCreateData(object); ObjectData* data = broker->TryGetOrCreateData(object, flags);
if (data == nullptr) { if (data == nullptr) {
TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(*object)); TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(*object));
return {}; return {};
...@@ -598,6 +613,20 @@ typename ref_traits<T>::ref_type MakeRef(JSHeapBroker* broker, ...@@ -598,6 +613,20 @@ typename ref_traits<T>::ref_type MakeRef(JSHeapBroker* broker,
return TryMakeRef(broker, object).value(); return TryMakeRef(broker, object).value();
} }
template <class T,
typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
typename ref_traits<T>::ref_type MakeRefAssumeMemoryFence(JSHeapBroker* broker,
T object) {
return TryMakeRef(broker, object, kAssumeMemoryFence).value();
}
template <class T,
typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
typename ref_traits<T>::ref_type MakeRefAssumeMemoryFence(JSHeapBroker* broker,
Handle<T> object) {
return TryMakeRef(broker, object, kAssumeMemoryFence).value();
}
} // namespace compiler } // namespace compiler
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -683,6 +683,9 @@ DEFINE_WEAK_VALUE_IMPLICATION(stress_concurrent_inlining, interrupt_budget, ...@@ -683,6 +683,9 @@ DEFINE_WEAK_VALUE_IMPLICATION(stress_concurrent_inlining, interrupt_budget,
DEFINE_BOOL( DEFINE_BOOL(
turbo_concurrent_get_property_access_info, false, turbo_concurrent_get_property_access_info, false,
"concurrently call GetPropertyAccessInfo (only with --concurrent-inlining)") "concurrently call GetPropertyAccessInfo (only with --concurrent-inlining)")
DEFINE_BOOL(turbo_concurrent_inlining_check_ispendingallocation, false,
"when --concurrent-inlining is enabled, check IsPendingAllocation "
"in Ref construction")
DEFINE_INT(max_serializer_nesting, 25, DEFINE_INT(max_serializer_nesting, 25,
"maximum levels for nesting child serializers") "maximum levels for nesting child serializers")
DEFINE_WEAK_IMPLICATION(future, concurrent_inlining) DEFINE_WEAK_IMPLICATION(future, concurrent_inlining)
......
...@@ -728,6 +728,7 @@ ...@@ -728,6 +728,7 @@
'test-heap/LeakNativeContextVia*': [PASS, FAIL], 'test-heap/LeakNativeContextVia*': [PASS, FAIL],
'test-heap/NewSpaceObjectsInOptimizedCode': [PASS, FAIL], 'test-heap/NewSpaceObjectsInOptimizedCode': [PASS, FAIL],
'test-heap/ObjectsInEagerlyDeoptimizedCodeAreWeak': [PASS, FAIL], 'test-heap/ObjectsInEagerlyDeoptimizedCodeAreWeak': [PASS, FAIL],
'test-heap/ObjectsInOptimizedCodeAreWeak': [PASS, FAIL],
}], # variant == stress_concurrent_inlining }], # variant == stress_concurrent_inlining
################################################################################ ################################################################################
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Flags: --expose-gc --interrupt-budget=1000 --no-lazy-feedback-allocation
var __v_5;
function __v_1() {
var PI = {
get() {}
};
function __v_5() {
Object.defineProperty(PI, 'func', {
});
'𝌆'.match();
}
__v_5(...[__v_5]);
try {
__v_1();
} catch (PI) {}
}
__v_1();
gc();
__v_1();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment