Commit ebd9dcda authored by Santiago Aboy Solanes's avatar Santiago Aboy Solanes Committed by Commit Bot

[compiler] Perform MapRef's SupportsFastArray methods concurrently

We are safe to go through the native_contexts_list_ since we do it
through IsAnyInitialArrayPrototype which disallows the GC. Furthermore,
we read that list with an acquire load which guarantees that the fields
have been initialized.

Bug: v8:7790
Change-Id: I778d51f4ead44e472f842693a7e9ff577d6acfe3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2826541Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74086}
parent 8003bbbe
...@@ -2880,14 +2880,14 @@ bool MapRef::HasOnlyStablePrototypesWithFastElements( ...@@ -2880,14 +2880,14 @@ bool MapRef::HasOnlyStablePrototypesWithFastElements(
} }
bool MapRef::supports_fast_array_iteration() const { bool MapRef::supports_fast_array_iteration() const {
if (data_->should_access_heap()) { if (data_->should_access_heap() || FLAG_turbo_direct_heap_access) {
return SupportsFastArrayIteration(broker()->isolate(), object()); return SupportsFastArrayIteration(broker()->isolate(), object());
} }
return data()->AsMap()->supports_fast_array_iteration(); return data()->AsMap()->supports_fast_array_iteration();
} }
bool MapRef::supports_fast_array_resize() const { bool MapRef::supports_fast_array_resize() const {
if (data_->should_access_heap()) { if (data_->should_access_heap() || FLAG_turbo_direct_heap_access) {
return SupportsFastArrayResize(broker()->isolate(), object()); return SupportsFastArrayResize(broker()->isolate(), object());
} }
return data()->AsMap()->supports_fast_array_resize(); return data()->AsMap()->supports_fast_array_resize();
......
...@@ -569,9 +569,12 @@ class Heap { ...@@ -569,9 +569,12 @@ class Heap {
V8_EXPORT_PRIVATE int NotifyContextDisposed(bool dependant_context); V8_EXPORT_PRIVATE int NotifyContextDisposed(bool dependant_context);
void set_native_contexts_list(Object object) { void set_native_contexts_list(Object object) {
native_contexts_list_ = object; native_contexts_list_.store(object, std::memory_order_release);
}
Object native_contexts_list() const {
return native_contexts_list_.load(std::memory_order_acquire);
} }
Object native_contexts_list() const { return native_contexts_list_; }
void set_allocation_sites_list(Object object) { void set_allocation_sites_list(Object object) {
allocation_sites_list_ = object; allocation_sites_list_ = object;
...@@ -2200,7 +2203,7 @@ class Heap { ...@@ -2200,7 +2203,7 @@ class Heap {
// Weak list heads, threaded through the objects. // Weak list heads, threaded through the objects.
// List heads are initialized lazily and contain the undefined_value at start. // List heads are initialized lazily and contain the undefined_value at start.
Object native_contexts_list_; std::atomic<Object> native_contexts_list_;
Object allocation_sites_list_; Object allocation_sites_list_;
Object dirty_js_finalization_registries_list_; Object dirty_js_finalization_registries_list_;
// Weak list tails. // Weak list tails.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment