Commit 076687ab authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[asserts] Add combination assert scopes

Add a "combination" assert scope class, which combines multiple existing
assert scopes.  This will allow scopes with functional overlap, e.g.
DisallowGarbageCollection and DisallowHeapAllocation, to share an assert
type rather than rather than requiring users to remember to set both. To
demonstrate this, this redefines DisallowGarbageCollection to a
combination of DisallowHeapAllocation and a new DisallowSafepoints, and
some of the DCHECKs checking both are simplified to only check one or
the other, as appropriate.

The combination classes become subclasses of the existing assert scopes,
so that they can be used in their place as e.g. a function parameter,
e.g. DisallowGarbageCollection can be passed to a function expecting
const DisallowHeapAllocation&.

As a drive-by, this also changes the per-thread assert scopes to use a
bitmask, rather than a bool array, to store their per-thread data. The
per-isolate scopes already used a bitmask, so this unifies the
behaviour between the two.

Change-Id: I209e0a56f45e124c0ccadbd9fb77f39e070612fe
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2534814
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71231}
parent 42ae248f
......@@ -1670,7 +1670,6 @@ bool Compiler::CollectSourcePositions(Isolate* isolate,
// Collecting source positions requires allocating a new source position
// table.
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
Handle<BytecodeArray> bytecode =
handle(shared_info->GetBytecodeArray(), isolate);
......
......@@ -4,6 +4,7 @@
#include "src/common/assert-scope.h"
#include "src/base/bit-field.h"
#include "src/base/lazy-instance.h"
#include "src/base/platform/platform.h"
#include "src/execution/isolate.h"
......@@ -14,96 +15,47 @@ namespace internal {
namespace {
DEFINE_LAZY_LEAKY_OBJECT_GETTER(base::Thread::LocalStorageKey,
GetPerThreadAssertKey,
base::Thread::CreateThreadLocalKey())
template <PerThreadAssertType kType>
using PerThreadDataBit = base::BitField<bool, kType, 1>;
template <PerIsolateAssertType kType>
using PerIsolateDataBit = base::BitField<bool, kType, 1>;
} // namespace
// Thread-local storage for assert data. Default all asserts to "allow".
thread_local uint32_t current_per_thread_assert_data(~0);
class PerThreadAssertData final {
public:
PerThreadAssertData() : nesting_level_(0) {
for (int i = 0; i < LAST_PER_THREAD_ASSERT_TYPE; i++) {
assert_states_[i] = true;
}
}
~PerThreadAssertData() {
for (int i = 0; i < LAST_PER_THREAD_ASSERT_TYPE; ++i) {
DCHECK(assert_states_[i]);
}
}
bool Get(PerThreadAssertType type) const { return assert_states_[type]; }
void Set(PerThreadAssertType type, bool x) { assert_states_[type] = x; }
void IncrementLevel() { ++nesting_level_; }
bool DecrementLevel() { return --nesting_level_ == 0; }
static PerThreadAssertData* GetCurrent() {
return reinterpret_cast<PerThreadAssertData*>(
base::Thread::GetThreadLocal(*GetPerThreadAssertKey()));
}
static void SetCurrent(PerThreadAssertData* data) {
base::Thread::SetThreadLocal(*GetPerThreadAssertKey(), data);
}
private:
bool assert_states_[LAST_PER_THREAD_ASSERT_TYPE];
int nesting_level_;
DISALLOW_COPY_AND_ASSIGN(PerThreadAssertData);
};
} // namespace
template <PerThreadAssertType kType, bool kAllow>
PerThreadAssertScope<kType, kAllow>::PerThreadAssertScope() {
PerThreadAssertData* current_data = PerThreadAssertData::GetCurrent();
if (current_data == nullptr) {
current_data = new PerThreadAssertData();
PerThreadAssertData::SetCurrent(current_data);
}
data_and_old_state_.update(current_data, current_data->Get(kType));
current_data->IncrementLevel();
current_data->Set(kType, kAllow);
PerThreadAssertScope<kType, kAllow>::PerThreadAssertScope()
: old_data_(current_per_thread_assert_data) {
current_per_thread_assert_data =
PerThreadDataBit<kType>::update(old_data_.value(), kAllow);
}
template <PerThreadAssertType kType, bool kAllow>
PerThreadAssertScope<kType, kAllow>::~PerThreadAssertScope() {
if (data() == nullptr) return;
if (!old_data_.has_value()) return;
Release();
}
template <PerThreadAssertType kType, bool kAllow>
void PerThreadAssertScope<kType, kAllow>::Release() {
auto* current_data = data();
DCHECK_NOT_NULL(current_data);
current_data->Set(kType, old_state());
if (current_data->DecrementLevel()) {
PerThreadAssertData::SetCurrent(nullptr);
delete current_data;
}
set_data(nullptr);
current_per_thread_assert_data = old_data_.value();
old_data_.reset();
}
// static
template <PerThreadAssertType kType, bool kAllow>
bool PerThreadAssertScope<kType, kAllow>::IsAllowed() {
PerThreadAssertData* current_data = PerThreadAssertData::GetCurrent();
return current_data == nullptr || current_data->Get(kType);
return PerThreadDataBit<kType>::decode(current_per_thread_assert_data);
}
namespace {
template <PerIsolateAssertType kType>
using DataBit = base::BitField<bool, kType, 1>;
} // namespace
template <PerIsolateAssertType kType, bool kAllow>
PerIsolateAssertScope<kType, kAllow>::PerIsolateAssertScope(Isolate* isolate)
: isolate_(isolate), old_data_(isolate->per_isolate_assert_data()) {
DCHECK_NOT_NULL(isolate);
STATIC_ASSERT(kType < 32);
isolate_->set_per_isolate_assert_data(
DataBit<kType>::update(old_data_, kAllow));
PerIsolateDataBit<kType>::update(old_data_, kAllow));
}
template <PerIsolateAssertType kType, bool kAllow>
......@@ -114,16 +66,16 @@ PerIsolateAssertScope<kType, kAllow>::~PerIsolateAssertScope() {
// static
template <PerIsolateAssertType kType, bool kAllow>
bool PerIsolateAssertScope<kType, kAllow>::IsAllowed(Isolate* isolate) {
return DataBit<kType>::decode(isolate->per_isolate_assert_data());
return PerIsolateDataBit<kType>::decode(isolate->per_isolate_assert_data());
}
// -----------------------------------------------------------------------------
// Instantiations.
template class PerThreadAssertScope<GARBAGE_COLLECTION_ASSERT, false>;
template class PerThreadAssertScope<GARBAGE_COLLECTION_ASSERT, true>;
template class PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, false>;
template class PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, true>;
template class PerThreadAssertScope<SAFEPOINTS_ASSERT, false>;
template class PerThreadAssertScope<SAFEPOINTS_ASSERT, true>;
template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, false>;
template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, true>;
template class PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, false>;
......
......@@ -13,28 +13,20 @@
#include "src/base/optional.h"
#include "src/base/platform/mutex.h"
#include "src/common/globals.h"
#include "src/utils/pointer-with-payload.h"
namespace v8 {
namespace internal {
// Forward declarations.
class Isolate;
class PerThreadAssertData;
template <>
struct PointerWithPayloadTraits<PerThreadAssertData> {
static constexpr int value = 1;
};
enum PerThreadAssertType {
GARBAGE_COLLECTION_ASSERT,
SAFEPOINTS_ASSERT,
HEAP_ALLOCATION_ASSERT,
HANDLE_ALLOCATION_ASSERT,
HANDLE_DEREFERENCE_ASSERT,
CODE_DEPENDENCY_CHANGE_ASSERT,
CODE_ALLOCATION_ASSERT,
LAST_PER_THREAD_ASSERT_TYPE
CODE_ALLOCATION_ASSERT
};
enum PerIsolateAssertType {
......@@ -52,31 +44,18 @@ class PerThreadAssertScope {
V8_EXPORT_PRIVATE PerThreadAssertScope();
V8_EXPORT_PRIVATE ~PerThreadAssertScope();
PerThreadAssertScope(const PerThreadAssertScope&) = delete;
PerThreadAssertScope& operator=(const PerThreadAssertScope&) = delete;
V8_EXPORT_PRIVATE static bool IsAllowed();
void Release();
private:
PointerWithPayload<PerThreadAssertData, bool, 1> data_and_old_state_;
V8_INLINE void set_data(PerThreadAssertData* data) {
data_and_old_state_.SetPointer(data);
}
V8_INLINE PerThreadAssertData* data() const {
return data_and_old_state_.GetPointer();
}
V8_INLINE void set_old_state(bool old_state) {
return data_and_old_state_.SetPayload(old_state);
}
V8_INLINE bool old_state() const { return data_and_old_state_.GetPayload(); }
DISALLOW_COPY_AND_ASSIGN(PerThreadAssertScope);
base::Optional<uint32_t> old_data_;
};
template <PerIsolateAssertType type, bool allow>
template <PerIsolateAssertType kType, bool kAllow>
class PerIsolateAssertScope {
public:
V8_EXPORT_PRIVATE explicit PerIsolateAssertScope(Isolate* isolate);
......@@ -91,9 +70,50 @@ class PerIsolateAssertScope {
DISALLOW_COPY_AND_ASSIGN(PerIsolateAssertScope);
};
template <PerThreadAssertType type, bool allow>
template <typename... Scopes>
class CombinationAssertScope;
// Base case for CombinationAssertScope (equivalent to Scope).
template <typename Scope>
class CombinationAssertScope<Scope> : public Scope {
public:
V8_EXPORT_PRIVATE static bool IsAllowed() {
// Define IsAllowed() explicitly rather than with using Scope::IsAllowed, to
// allow SFINAE removal of IsAllowed() when it's not defined (under debug).
return Scope::IsAllowed();
}
using Scope::Release;
using Scope::Scope;
};
// Inductive case for CombinationAssertScope.
template <typename Scope, typename... Scopes>
class CombinationAssertScope<Scope, Scopes...>
: public Scope, public CombinationAssertScope<Scopes...> {
using NextScopes = CombinationAssertScope<Scopes...>;
public:
// Constructor for per-thread scopes.
V8_EXPORT_PRIVATE CombinationAssertScope() : Scope(), NextScopes() {}
// Constructor for per-isolate scopes.
V8_EXPORT_PRIVATE explicit CombinationAssertScope(Isolate* isolate)
: Scope(isolate), NextScopes(isolate) {}
V8_EXPORT_PRIVATE static bool IsAllowed() {
return Scope::IsAllowed() && NextScopes::IsAllowed();
}
void Release() {
// Release in reverse order.
NextScopes::Release();
Scope::Release();
}
};
template <PerThreadAssertType kType, bool kAllow>
#ifdef DEBUG
class PerThreadAssertScopeDebugOnly : public PerThreadAssertScope<type, allow> {
class PerThreadAssertScopeDebugOnly
: public PerThreadAssertScope<kType, kAllow> {
#else
class PerThreadAssertScopeDebugOnly {
public:
......@@ -104,13 +124,13 @@ class PerThreadAssertScopeDebugOnly {
#endif
};
template <PerIsolateAssertType type, bool allow>
template <PerIsolateAssertType kType, bool kAllow>
#ifdef DEBUG
class PerIsolateAssertScopeDebugOnly
: public PerIsolateAssertScope<type, allow> {
: public PerIsolateAssertScope<kType, kAllow> {
public:
explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate)
: PerIsolateAssertScope<type, allow>(isolate) {}
: PerIsolateAssertScope<kType, kAllow>(isolate) {}
#else
class PerIsolateAssertScopeDebugOnly {
public:
......@@ -128,22 +148,12 @@ using DisallowHandleAllocation =
using AllowHandleAllocation =
PerThreadAssertScopeDebugOnly<HANDLE_ALLOCATION_ASSERT, true>;
// Scope to document where we do not expect garbage collections. It differs from
// DisallowHeapAllocation by also forbidding safepoints.
using DisallowGarbageCollection =
PerThreadAssertScopeDebugOnly<GARBAGE_COLLECTION_ASSERT, false>;
// The DISALLOW_GARBAGE_COLLECTION macro can be used to define a
// DisallowGarbageCollection field in classes that isn't present in release
// builds.
#ifdef DEBUG
#define DISALLOW_GARBAGE_COLLECTION(name) DisallowGarbageCollection name;
#else
#define DISALLOW_GARBAGE_COLLECTION(name)
#endif
// Scope to document where we do not expect safepoints to be entered.
using DisallowSafepoints =
PerThreadAssertScopeDebugOnly<SAFEPOINTS_ASSERT, false>;
// Scope to introduce an exception to DisallowGarbageCollection.
using AllowGarbageCollection =
PerThreadAssertScopeDebugOnly<GARBAGE_COLLECTION_ASSERT, true>;
// Scope to introduce an exception to DisallowSafepoints.
using AllowSafepoints = PerThreadAssertScopeDebugOnly<SAFEPOINTS_ASSERT, true>;
// Scope to document where we do not expect any allocation and GC. Deprecated
// and will eventually be removed, use DisallowGarbageCollection instead.
......@@ -186,12 +196,33 @@ using DisallowCodeAllocation =
using AllowCodeAllocation =
PerThreadAssertScopeDebugOnly<CODE_ALLOCATION_ASSERT, true>;
class DisallowHeapAccess {
DisallowCodeDependencyChange no_dependency_change_;
DisallowHandleAllocation no_handle_allocation_;
DisallowHandleDereference no_handle_dereference_;
DisallowHeapAllocation no_heap_allocation_;
};
// Scope to document where we do not expect garbage collections. It differs from
// DisallowHeapAllocation by also forbidding safepoints.
using DisallowGarbageCollection =
CombinationAssertScope<DisallowSafepoints, DisallowHeapAllocation>;
// The DISALLOW_GARBAGE_COLLECTION macro can be used to define a
// DisallowGarbageCollection field in classes that isn't present in release
// builds.
#ifdef DEBUG
#define DISALLOW_GARBAGE_COLLECTION(name) DisallowGarbageCollection name;
#else
#define DISALLOW_GARBAGE_COLLECTION(name)
#endif
// Scope to introduce an exception to DisallowGarbageCollection.
using AllowGarbageCollection =
CombinationAssertScope<AllowSafepoints, AllowHeapAllocation>;
// Scope to document where we do not expect any access to the heap.
using DisallowHeapAccess =
CombinationAssertScope<DisallowCodeDependencyChange,
DisallowHandleDereference, DisallowHandleAllocation,
DisallowHeapAllocation>;
// Scope to introduce an exception to DisallowHeapAccess.
using AllowHeapAccess =
CombinationAssertScope<AllowCodeDependencyChange, AllowHandleDereference,
AllowHandleAllocation, AllowHeapAllocation>;
class DisallowHeapAccessIf {
public:
......@@ -286,6 +317,8 @@ using AllowExceptions =
// Explicit instantiation declarations.
extern template class PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, false>;
extern template class PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, true>;
extern template class PerThreadAssertScope<SAFEPOINTS_ASSERT, false>;
extern template class PerThreadAssertScope<SAFEPOINTS_ASSERT, true>;
extern template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, false>;
extern template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, true>;
extern template class PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, false>;
......
......@@ -534,7 +534,6 @@ Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction function,
DCHECK(function.IsJSFunction());
#ifdef DEBUG
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
disallow_garbage_collection_ = new DisallowGarbageCollection();
#endif // DEBUG
......
......@@ -1613,8 +1613,7 @@ Object Isolate::ThrowInternal(Object raw_exception, MessageLocation* location) {
// Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
// initialize the line_ends array, so be careful when calling them.
#ifdef DEBUG
if (AllowHeapAllocation::IsAllowed() &&
AllowGarbageCollection::IsAllowed()) {
if (AllowGarbageCollection::IsAllowed()) {
#else
if ((false)) {
#endif
......
......@@ -170,7 +170,6 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
AllocationAlignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK_IMPLIES(type == AllocationType::kCode || type == AllocationType::kMap,
alignment == AllocationAlignment::kWordAligned);
DCHECK_EQ(gc_state(), NOT_IN_GC);
......@@ -271,7 +270,6 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation,
AllocationAlignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK_EQ(gc_state(), NOT_IN_GC);
Heap* heap = isolate()->heap();
if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL &&
......
......@@ -838,7 +838,8 @@ void Heap::GarbageCollectionPrologue() {
UpdateMaximumCommitted();
#ifdef DEBUG
DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state() == NOT_IN_GC);
DCHECK(!AllowHeapAllocation::IsAllowed());
DCHECK_EQ(gc_state(), NOT_IN_GC);
if (FLAG_gc_verbose) Print();
#endif // DEBUG
......@@ -1572,9 +1573,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
{
tracer()->Start(collector, gc_reason, collector_reason);
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
DisallowHeapAllocation no_allocation_during_gc;
DisallowGarbageCollection no_gc_during_gc;
GarbageCollectionPrologue();
......
......@@ -19,7 +19,6 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type,
DCHECK_EQ(LocalHeap::Current(), this);
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK_IMPLIES(type == AllocationType::kCode || type == AllocationType::kMap,
alignment == AllocationAlignment::kWordAligned);
Heap::HeapState state = heap()->gc_state();
......
......@@ -45,9 +45,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
// Frequently invoked by local thread to check whether safepoint was requested
// from the main thread.
void Safepoint() {
// In case garbage collection is disabled, the thread isn't even allowed to
// invoke Safepoint(). Otherwise a GC might happen here.
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK(AllowSafepoints::IsAllowed());
if (IsSafepointRequested()) {
ClearSafepointRequested();
......
......@@ -44,7 +44,6 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
}
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
int length = cons->length();
allocation =
ObjectInYoungGeneration(*cons) ? allocation : AllocationType::kOld;
......
......@@ -205,7 +205,6 @@ RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
DCHECK(CodeKindCanDeoptimize(deoptimizer->compiled_code()->kind()));
DCHECK(deoptimizer->compiled_code()->is_turbofanned());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK(isolate->context().is_null());
......
......@@ -94,7 +94,8 @@ if [ ! -e "${BUILD_DIR}" ]; then
fi
cd "${BUILD_DIR}"
cmake -GNinja -DCMAKE_CXX_FLAGS="-static-libstdc++" -DLLVM_ENABLE_TERMINFO=OFF \
-DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS=clang "${LLVM_PROJECT_DIR}/llvm"
-DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS=clang \
-DLLVM_ENABLE_Z3_SOLVER=OFF "${LLVM_PROJECT_DIR}/llvm"
MACOSX_DEPLOYMENT_TARGET=10.5 ninja -j"${NUM_JOBS}"
# Strip the clang binary.
......
......@@ -164,10 +164,10 @@ void TestDeadVarAnalysis(Isolate* isolate) {
void TestGuardedDeadVarAnalysis(Isolate* isolate) {
JSObject raw_obj = *isolate->factory()->NewJSObjectWithNullProto();
// Note: having DisallowHeapAllocation with the same function as CauseGC
// Note: having DisallowGarbageCollection with the same function as CauseGC
// normally doesn't make sense, but we want to test whether the gurads
// are recognized by GCMole.
DisallowHeapAllocation no_gc;
DisallowGarbageCollection no_gc;
CauseGCRaw(raw_obj, isolate);
// Shouldn't cause warning.
......@@ -177,8 +177,8 @@ void TestGuardedDeadVarAnalysis(Isolate* isolate) {
void TestGuardedDeadVarAnalysisNotOnStack(Isolate* isolate) {
JSObject raw_obj = *isolate->factory()->NewJSObjectWithNullProto();
// {DisallowHeapAccess} has a {DisallowHeapAllocation} embedded as a member
// field, so both are treated equally by gcmole.
// {DisallowHeapAccess} has {DisallowHeapAllocation} as a superclass, so both
// are treated equally by gcmole.
DisallowHeapAccess no_gc;
CauseGCRaw(raw_obj, isolate);
......
7e31d257a711b1a77823633e4f19152c3e0718f4
e5a13c333747e111a1a22c57f9acd3056eefff16
......@@ -1590,7 +1590,7 @@ class ProblemsFinder : public clang::ASTConsumer,
clang::CXXRecordDecl* no_heap_access_decl =
r.ResolveNamespace("v8")
.ResolveNamespace("internal")
.Resolve<clang::CXXRecordDecl>("DisallowHeapAccess");
.ResolveTemplate("DisallowHeapAccess");
clang::CXXRecordDecl* object_decl =
r.ResolveNamespace("v8").ResolveNamespace("internal").
......@@ -1612,9 +1612,6 @@ class ProblemsFinder : public clang::ASTConsumer,
if (smi_decl != NULL) smi_decl = smi_decl->getDefinition();
if (no_heap_access_decl != NULL)
no_heap_access_decl = no_heap_access_decl->getDefinition();
if (object_decl != NULL && smi_decl != NULL && maybe_object_decl != NULL) {
function_analyzer_ = new FunctionAnalyzer(
clang::ItaniumMangleContext::create(ctx, d_), object_decl,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment