Commit 6cbcf68d authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: Avoid IsMarking() calls in the write-barrier

Now that we have all useful flags on the API side, use to them.

Bug: chromium:1056170
Change-Id: Ia849b0925a2b2c10ace30b6c2b6871bd3572da31
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3899306
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83238}
parent 59c070a2
...@@ -12,6 +12,7 @@ namespace cppgc { ...@@ -12,6 +12,7 @@ namespace cppgc {
namespace internal { namespace internal {
class HeapBase; class HeapBase;
class WriteBarrierTypeForCagedHeapPolicy; class WriteBarrierTypeForCagedHeapPolicy;
class WriteBarrierTypeForNonCagedHeapPolicy;
} // namespace internal } // namespace internal
/** /**
...@@ -34,6 +35,7 @@ class HeapHandle { ...@@ -34,6 +35,7 @@ class HeapHandle {
friend class internal::HeapBase; friend class internal::HeapBase;
friend class internal::WriteBarrierTypeForCagedHeapPolicy; friend class internal::WriteBarrierTypeForCagedHeapPolicy;
friend class internal::WriteBarrierTypeForNonCagedHeapPolicy;
}; };
} // namespace cppgc } // namespace cppgc
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "cppgc/heap-state.h" #include "cppgc/heap-state.h"
#include "cppgc/internal/api-constants.h" #include "cppgc/internal/api-constants.h"
#include "cppgc/internal/atomic-entry-flag.h" #include "cppgc/internal/atomic-entry-flag.h"
#include "cppgc/internal/base-page-handle.h"
#include "cppgc/internal/member-storage.h" #include "cppgc/internal/member-storage.h"
#include "cppgc/platform.h" #include "cppgc/platform.h"
#include "cppgc/sentinel-pointer.h" #include "cppgc/sentinel-pointer.h"
...@@ -283,7 +284,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch< ...@@ -283,7 +284,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params); return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
} }
#else // !defined(CPPGC_YOUNG_GENERATION) #else // !defined(CPPGC_YOUNG_GENERATION)
if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) { if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
#endif // !defined(CPPGC_YOUNG_GENERATION) #endif // !defined(CPPGC_YOUNG_GENERATION)
...@@ -326,11 +327,6 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final { ...@@ -326,11 +327,6 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
template <WriteBarrier::ValueMode value_mode> template <WriteBarrier::ValueMode value_mode>
struct ValueModeDispatch; struct ValueModeDispatch;
// TODO(chromium:1056170): Create fast path on API.
static bool IsMarking(const void*, HeapHandle**);
// TODO(chromium:1056170): Create fast path on API.
static bool IsMarking(HeapHandle&);
WriteBarrierTypeForNonCagedHeapPolicy() = delete; WriteBarrierTypeForNonCagedHeapPolicy() = delete;
}; };
...@@ -348,7 +344,13 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch< ...@@ -348,7 +344,13 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
if (V8_LIKELY(!WriteBarrier::IsEnabled())) { if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
if (IsMarking(object, &params.heap)) { // We know that |object| is within the normal page or in the beginning of a
// large page, so extract the page header by bitmasking.
BasePageHandle* page =
BasePageHandle::FromPayload(const_cast<void*>(object));
HeapHandle& heap_handle = page->heap_handle();
if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
...@@ -364,7 +366,7 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch< ...@@ -364,7 +366,7 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
HeapHandleCallback callback) { HeapHandleCallback callback) {
if (V8_UNLIKELY(WriteBarrier::IsEnabled())) { if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
HeapHandle& handle = callback(); HeapHandle& handle = callback();
if (IsMarking(handle)) { if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
params.heap = &handle; params.heap = &handle;
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
......
...@@ -187,24 +187,6 @@ void WriteBarrier::CheckParams(Type expected_type, const Params& params) { ...@@ -187,24 +187,6 @@ void WriteBarrier::CheckParams(Type expected_type, const Params& params) {
} }
#endif // V8_ENABLE_CHECKS #endif // V8_ENABLE_CHECKS
// static
bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(const void* object,
HeapHandle** handle) {
// Large objects cannot have mixins, so we are guaranteed to always have
// a pointer on the same page.
const auto* page = BasePage::FromPayload(object);
*handle = &page->heap();
const MarkerBase* marker = page->heap().marker();
return marker && marker->IsMarking();
}
// static
bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(HeapHandle& heap_handle) {
const auto& heap_base = internal::HeapBase::From(heap_handle);
const MarkerBase* marker = heap_base.marker();
return marker && marker->IsMarking();
}
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
// static // static
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment