Commit 9526c293 authored by Camillo's avatar Camillo Committed by V8 LUCI CQ

Dehandlify more code


Bug: v8:11263
Change-Id: Ia53518d52f906c96afafc084af99679bd64b46fa
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3866177
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82888}
parent b7c71a6d
......@@ -135,15 +135,18 @@ Handle<ModuleRequest> SourceTextModuleDescriptor::AstModuleRequest::Serialize(
static_cast<int>(import_assertions()->size() *
ModuleRequest::kAssertionEntrySize),
AllocationType::kOld);
int i = 0;
for (auto iter = import_assertions()->cbegin();
iter != import_assertions()->cend();
++iter, i += ModuleRequest::kAssertionEntrySize) {
import_assertions_array->set(i, *iter->first->string());
import_assertions_array->set(i + 1, *iter->second.first->string());
import_assertions_array->set(i + 2,
Smi::FromInt(iter->second.second.beg_pos));
{
DisallowGarbageCollection no_gc;
auto raw_import_assertions = *import_assertions_array;
int i = 0;
for (auto iter = import_assertions()->cbegin();
iter != import_assertions()->cend();
++iter, i += ModuleRequest::kAssertionEntrySize) {
raw_import_assertions.set(i, *iter->first->string());
raw_import_assertions.set(i + 1, *iter->second.first->string());
raw_import_assertions.set(i + 2,
Smi::FromInt(iter->second.second.beg_pos));
}
}
return v8::internal::ModuleRequest::New(isolate, specifier()->string(),
import_assertions_array, position());
......
......@@ -755,9 +755,13 @@ class ArrayConcatVisitor {
isolate_->factory()->NewNumber(static_cast<double>(index_offset_));
Handle<Map> map = JSObject::GetElementsTransitionMap(
array, fast_elements() ? HOLEY_ELEMENTS : DICTIONARY_ELEMENTS);
array->set_length(*length);
array->set_elements(*storage_fixed_array());
array->set_map(*map, kReleaseStore);
{
DisallowGarbageCollection no_gc;
auto raw = *array;
raw.set_length(*length);
raw.set_elements(*storage_fixed_array());
raw.set_map(*map, kReleaseStore);
}
return array;
}
......
......@@ -850,70 +850,81 @@ class MaglevCodeGeneratorImpl final {
code_gen_state_.compilation_info()
->translation_array_builder()
.ToTranslationArray(isolate()->factory());
{
DisallowGarbageCollection no_gc;
auto raw_data = *data;
data->SetTranslationByteArray(*translation_array);
// TODO(leszeks): Fix with the real inlined function count.
data->SetInlinedFunctionCount(Smi::zero());
// TODO(leszeks): Support optimization IDs
data->SetOptimizationId(Smi::zero());
raw_data.SetTranslationByteArray(*translation_array);
// TODO(leszeks): Fix with the real inlined function count.
raw_data.SetInlinedFunctionCount(Smi::zero());
// TODO(leszeks): Support optimization IDs
raw_data.SetOptimizationId(Smi::zero());
DCHECK_NE(deopt_exit_start_offset_, -1);
data->SetDeoptExitStart(Smi::FromInt(deopt_exit_start_offset_));
data->SetEagerDeoptCount(Smi::FromInt(eager_deopt_count));
data->SetLazyDeoptCount(Smi::FromInt(lazy_deopt_count));
DCHECK_NE(deopt_exit_start_offset_, -1);
raw_data.SetDeoptExitStart(Smi::FromInt(deopt_exit_start_offset_));
raw_data.SetEagerDeoptCount(Smi::FromInt(eager_deopt_count));
raw_data.SetLazyDeoptCount(Smi::FromInt(lazy_deopt_count));
data->SetSharedFunctionInfo(*code_gen_state_.compilation_info()
->toplevel_compilation_unit()
->shared_function_info()
.object());
raw_data.SetSharedFunctionInfo(*code_gen_state_.compilation_info()
->toplevel_compilation_unit()
->shared_function_info()
.object());
}
IdentityMap<int, base::DefaultAllocationPolicy>& deopt_literals =
code_gen_state_.compilation_info()->deopt_literals();
Handle<DeoptimizationLiteralArray> literals =
isolate()->factory()->NewDeoptimizationLiteralArray(
deopt_literals.size() + 1);
// TODO(leszeks): Fix with the real inlining positions.
Handle<PodArray<InliningPosition>> inlining_positions =
PodArray<InliningPosition>::New(isolate(), 0);
DisallowGarbageCollection no_gc;
auto raw_literals = *literals;
auto raw_data = *data;
IdentityMap<int, base::DefaultAllocationPolicy>::IteratableScope iterate(
&deopt_literals);
for (auto it = iterate.begin(); it != iterate.end(); ++it) {
literals->set(*it.entry(), it.key());
raw_literals.set(*it.entry(), it.key());
}
// Add the bytecode to the deopt literals to make sure it's held strongly.
// TODO(leszeks): Do this for inlined functions too.
literals->set(deopt_literals.size(), *code_gen_state_.compilation_info()
->toplevel_compilation_unit()
->bytecode()
.object());
data->SetLiteralArray(*literals);
raw_literals.set(deopt_literals.size(), *code_gen_state_.compilation_info()
->toplevel_compilation_unit()
->bytecode()
.object());
raw_data.SetLiteralArray(raw_literals);
// TODO(leszeks): Fix with the real inlining positions.
Handle<PodArray<InliningPosition>> inlining_positions =
PodArray<InliningPosition>::New(isolate(), 0);
data->SetInliningPositions(*inlining_positions);
raw_data.SetInliningPositions(*inlining_positions);
// TODO(leszeks): Fix once we have OSR.
BytecodeOffset osr_offset = BytecodeOffset::None();
data->SetOsrBytecodeOffset(Smi::FromInt(osr_offset.ToInt()));
data->SetOsrPcOffset(Smi::FromInt(-1));
raw_data.SetOsrBytecodeOffset(Smi::FromInt(osr_offset.ToInt()));
raw_data.SetOsrPcOffset(Smi::FromInt(-1));
// Populate deoptimization entries.
int i = 0;
for (EagerDeoptInfo* deopt_info : code_gen_state_.eager_deopts()) {
DCHECK_NE(deopt_info->translation_index, -1);
data->SetBytecodeOffset(i, deopt_info->state.bytecode_position);
data->SetTranslationIndex(i, Smi::FromInt(deopt_info->translation_index));
data->SetPc(i, Smi::FromInt(deopt_info->deopt_entry_label.pos()));
raw_data.SetBytecodeOffset(i, deopt_info->state.bytecode_position);
raw_data.SetTranslationIndex(i,
Smi::FromInt(deopt_info->translation_index));
raw_data.SetPc(i, Smi::FromInt(deopt_info->deopt_entry_label.pos()));
#ifdef DEBUG
data->SetNodeId(i, Smi::FromInt(i));
raw_data.SetNodeId(i, Smi::FromInt(i));
#endif // DEBUG
i++;
}
for (LazyDeoptInfo* deopt_info : code_gen_state_.lazy_deopts()) {
DCHECK_NE(deopt_info->translation_index, -1);
data->SetBytecodeOffset(i, deopt_info->state.bytecode_position);
data->SetTranslationIndex(i, Smi::FromInt(deopt_info->translation_index));
data->SetPc(i, Smi::FromInt(deopt_info->deopt_entry_label.pos()));
raw_data.SetBytecodeOffset(i, deopt_info->state.bytecode_position);
raw_data.SetTranslationIndex(i,
Smi::FromInt(deopt_info->translation_index));
raw_data.SetPc(i, Smi::FromInt(deopt_info->deopt_entry_label.pos()));
#ifdef DEBUG
data->SetNodeId(i, Smi::FromInt(i));
raw_data.SetNodeId(i, Smi::FromInt(i));
#endif // DEBUG
i++;
}
......
......@@ -923,11 +923,13 @@ void FeedbackNexus::ConfigureCloneObject(Handle<Map> source_map,
// Transition to POLYMORPHIC.
Handle<WeakFixedArray> array =
CreateArrayOfSize(2 * kCloneObjectPolymorphicEntrySize);
array->Set(0, HeapObjectReference::Weak(*feedback));
array->Set(1, GetFeedbackExtra());
array->Set(2, HeapObjectReference::Weak(*source_map));
array->Set(3, MaybeObject::FromObject(*result_map));
SetFeedback(*array, UPDATE_WRITE_BARRIER,
DisallowGarbageCollection no_gc;
auto raw_array = *array;
raw_array.Set(0, HeapObjectReference::Weak(*feedback));
raw_array.Set(1, GetFeedbackExtra());
raw_array.Set(2, HeapObjectReference::Weak(*source_map));
raw_array.Set(3, MaybeObject::FromObject(*result_map));
SetFeedback(raw_array, UPDATE_WRITE_BARRIER,
HeapObjectReference::ClearedValue(isolate));
}
break;
......
......@@ -327,12 +327,13 @@ Handle<NameToIndexHashTable> NameToIndexHashTable::Add(
SLOW_DCHECK(table->FindEntry(isolate, key).is_not_found());
// Check whether the dictionary should be extended.
table = EnsureCapacity(isolate, table);
DisallowGarbageCollection no_gc;
auto raw_table = *table;
// Compute the key object.
InternalIndex entry = table->FindInsertionEntry(isolate, key->hash());
table->set(EntryToIndex(entry), *key);
table->set(EntryToValueIndex(entry), Smi::FromInt(index));
table->ElementAdded();
InternalIndex entry = raw_table.FindInsertionEntry(isolate, key->hash());
raw_table.set(EntryToIndex(entry), *key);
raw_table.set(EntryToValueIndex(entry), Smi::FromInt(index));
raw_table.ElementAdded();
return table;
}
......
......@@ -3981,18 +3981,25 @@ bool DescriptorArray::IsEqualUpTo(DescriptorArray desc, int nof_descriptors) {
Handle<FixedArray> FixedArray::SetAndGrow(Isolate* isolate,
Handle<FixedArray> array, int index,
Handle<Object> value) {
if (index < array->length()) {
int src_length = array->length();
if (index < src_length) {
array->set(index, *value);
return array;
}
int capacity = array->length();
int capacity = src_length;
do {
capacity = JSObject::NewElementsCapacity(capacity);
} while (capacity <= index);
Handle<FixedArray> new_array = isolate->factory()->NewFixedArray(capacity);
array->CopyTo(0, *new_array, 0, array->length());
new_array->FillWithHoles(array->length(), new_array->length());
new_array->set(index, *value);
DisallowGarbageCollection no_gc;
auto raw_src = *array;
auto raw_dst = *new_array;
raw_src.CopyTo(0, raw_dst, 0, src_length);
DCHECK_EQ(raw_dst.length(), capacity);
raw_dst.FillWithHoles(src_length, capacity);
raw_dst.set(index, *value);
return new_array;
}
......@@ -4559,8 +4566,11 @@ int16_t DescriptorArray::UpdateNumberOfMarkedDescriptors(
Handle<AccessorPair> AccessorPair::Copy(Isolate* isolate,
Handle<AccessorPair> pair) {
Handle<AccessorPair> copy = isolate->factory()->NewAccessorPair();
copy->set_getter(pair->getter());
copy->set_setter(pair->setter());
DisallowGarbageCollection no_gc;
auto raw_src = *pair;
auto raw_copy = *copy;
raw_copy.set_getter(raw_src.getter());
raw_copy.set_setter(raw_src.setter());
return copy;
}
......@@ -5788,10 +5798,11 @@ Handle<Derived> HashTable<Derived, Shape>::NewInternal(
Handle<FixedArray> array = factory->NewFixedArrayWithMap(
Derived::GetMap(ReadOnlyRoots(isolate)), length, allocation);
Handle<Derived> table = Handle<Derived>::cast(array);
table->SetNumberOfElements(0);
table->SetNumberOfDeletedElements(0);
table->SetCapacity(capacity);
DisallowGarbageCollection no_gc;
auto raw_table = *table;
raw_table.SetNumberOfElements(0);
raw_table.SetNumberOfDeletedElements(0);
raw_table.SetCapacity(capacity);
return table;
}
......
......@@ -35,12 +35,14 @@ MaybeHandle<Derived> OrderedHashTable<Derived, entrysize>::Allocate(
HashTableStartIndex() + num_buckets + (capacity * kEntrySize),
allocation);
Handle<Derived> table = Handle<Derived>::cast(backing_store);
DisallowGarbageCollection no_gc;
auto raw_table = *table;
for (int i = 0; i < num_buckets; ++i) {
table->set(HashTableStartIndex() + i, Smi::FromInt(kNotFound));
raw_table.set(HashTableStartIndex() + i, Smi::FromInt(kNotFound));
}
table->SetNumberOfBuckets(num_buckets);
table->SetNumberOfElements(0);
table->SetNumberOfDeletedElements(0);
raw_table.SetNumberOfBuckets(num_buckets);
raw_table.SetNumberOfElements(0);
raw_table.SetNumberOfDeletedElements(0);
return table;
}
......@@ -56,9 +58,11 @@ MaybeHandle<Derived> OrderedHashTable<Derived, entrysize>::AllocateEmpty(
Derived::GetMap(ReadOnlyRoots(isolate)), HashTableStartIndex(),
allocation);
Handle<Derived> table = Handle<Derived>::cast(backing_store);
table->SetNumberOfBuckets(0);
table->SetNumberOfElements(0);
table->SetNumberOfDeletedElements(0);
DisallowHandleAllocation no_gc;
auto raw_table = *table;
raw_table.SetNumberOfBuckets(0);
raw_table.SetNumberOfElements(0);
raw_table.SetNumberOfDeletedElements(0);
return table;
}
......@@ -167,15 +171,21 @@ InternalIndex OrderedHashTable<Derived, entrysize>::FindEntry(Isolate* isolate,
MaybeHandle<OrderedHashSet> OrderedHashSet::Add(Isolate* isolate,
Handle<OrderedHashSet> table,
Handle<Object> key) {
int hash = key->GetOrCreateHash(isolate).value();
if (table->NumberOfElements() > 0) {
int raw_entry = table->HashToEntryRaw(hash);
// Walk the chain of the bucket and try finding the key.
while (raw_entry != kNotFound) {
Object candidate_key = table->KeyAt(InternalIndex(raw_entry));
// Do not add if we have the key already
if (candidate_key.SameValueZero(*key)) return table;
raw_entry = table->NextChainEntryRaw(raw_entry);
int hash;
{
DisallowGarbageCollection no_gc;
auto raw_key = *key;
auto raw_table = *table;
hash = raw_key.GetOrCreateHash(isolate).value();
if (raw_table.NumberOfElements() > 0) {
int raw_entry = raw_table.HashToEntryRaw(hash);
// Walk the chain of the bucket and try finding the key.
while (raw_entry != kNotFound) {
Object candidate_key = raw_table.KeyAt(InternalIndex(raw_entry));
// Do not add if we have the key already
if (candidate_key.SameValueZero(raw_key)) return table;
raw_entry = raw_table.NextChainEntryRaw(raw_entry);
}
}
}
......@@ -184,18 +194,20 @@ MaybeHandle<OrderedHashSet> OrderedHashSet::Add(Isolate* isolate,
if (!table_candidate.ToHandle(&table)) {
return table_candidate;
}
DisallowGarbageCollection no_gc;
auto raw_table = *table;
// Read the existing bucket values.
int bucket = table->HashToBucket(hash);
int previous_entry = table->HashToEntryRaw(hash);
int nof = table->NumberOfElements();
int bucket = raw_table.HashToBucket(hash);
int previous_entry = raw_table.HashToEntryRaw(hash);
int nof = raw_table.NumberOfElements();
// Insert a new entry at the end,
int new_entry = nof + table->NumberOfDeletedElements();
int new_index = table->EntryToIndexRaw(new_entry);
table->set(new_index, *key);
table->set(new_index + kChainOffset, Smi::FromInt(previous_entry));
int new_entry = nof + raw_table.NumberOfDeletedElements();
int new_index = raw_table.EntryToIndexRaw(new_entry);
raw_table.set(new_index, *key);
raw_table.set(new_index + kChainOffset, Smi::FromInt(previous_entry));
// and point the bucket to the new entry.
table->set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
table->SetNumberOfElements(nof + 1);
raw_table.set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
raw_table.SetNumberOfElements(nof + 1);
return table;
}
......@@ -390,19 +402,21 @@ MaybeHandle<OrderedHashMap> OrderedHashMap::Add(Isolate* isolate,
if (!table_candidate.ToHandle(&table)) {
return table_candidate;
}
DisallowGarbageCollection no_gc;
auto raw_table = *table;
// Read the existing bucket values.
int bucket = table->HashToBucket(hash);
int previous_entry = table->HashToEntryRaw(hash);
int nof = table->NumberOfElements();
int bucket = raw_table.HashToBucket(hash);
int previous_entry = raw_table.HashToEntryRaw(hash);
int nof = raw_table.NumberOfElements();
// Insert a new entry at the end,
int new_entry = nof + table->NumberOfDeletedElements();
int new_index = table->EntryToIndexRaw(new_entry);
table->set(new_index, *key);
table->set(new_index + kValueOffset, *value);
table->set(new_index + kChainOffset, Smi::FromInt(previous_entry));
int new_entry = nof + raw_table.NumberOfDeletedElements();
int new_index = raw_table.EntryToIndexRaw(new_entry);
raw_table.set(new_index, *key);
raw_table.set(new_index + kValueOffset, *value);
raw_table.set(new_index + kChainOffset, Smi::FromInt(previous_entry));
// and point the bucket to the new entry.
table->set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
table->SetNumberOfElements(nof + 1);
raw_table.set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
raw_table.SetNumberOfElements(nof + 1);
return table;
}
......@@ -455,26 +469,28 @@ MaybeHandle<OrderedNameDictionary> OrderedNameDictionary::Add(
if (!table_candidate.ToHandle(&table)) {
return table_candidate;
}
DisallowGarbageCollection no_gc;
auto raw_table = *table;
// Read the existing bucket values.
int hash = key->hash();
int bucket = table->HashToBucket(hash);
int previous_entry = table->HashToEntryRaw(hash);
int nof = table->NumberOfElements();
int bucket = raw_table.HashToBucket(hash);
int previous_entry = raw_table.HashToEntryRaw(hash);
int nof = raw_table.NumberOfElements();
// Insert a new entry at the end,
int new_entry = nof + table->NumberOfDeletedElements();
int new_index = table->EntryToIndexRaw(new_entry);
table->set(new_index, *key);
table->set(new_index + kValueOffset, *value);
int new_entry = nof + raw_table.NumberOfDeletedElements();
int new_index = raw_table.EntryToIndexRaw(new_entry);
raw_table.set(new_index, *key);
raw_table.set(new_index + kValueOffset, *value);
// TODO(gsathya): Optimize how PropertyDetails are stored in this
// dictionary to save memory (by reusing padding?) and performance
// (by not doing the Smi conversion).
table->set(new_index + kPropertyDetailsOffset, details.AsSmi());
raw_table.set(new_index + kPropertyDetailsOffset, details.AsSmi());
table->set(new_index + kChainOffset, Smi::FromInt(previous_entry));
raw_table.set(new_index + kChainOffset, Smi::FromInt(previous_entry));
// and point the bucket to the new entry.
table->set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
table->SetNumberOfElements(nof + 1);
raw_table.set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
raw_table.SetNumberOfElements(nof + 1);
return table;
}
......@@ -718,22 +734,24 @@ MaybeHandle<SmallOrderedHashSet> SmallOrderedHashSet::Add(
}
}
DisallowGarbageCollection no_gc;
auto raw_table = *table;
int hash = key->GetOrCreateHash(isolate).value();
int nof = table->NumberOfElements();
int nof = raw_table.NumberOfElements();
// Read the existing bucket values.
int bucket = table->HashToBucket(hash);
int previous_entry = table->HashToFirstEntry(hash);
int bucket = raw_table.HashToBucket(hash);
int previous_entry = raw_table.HashToFirstEntry(hash);
// Insert a new entry at the end,
int new_entry = nof + table->NumberOfDeletedElements();
int new_entry = nof + raw_table.NumberOfDeletedElements();
table->SetDataEntry(new_entry, SmallOrderedHashSet::kKeyIndex, *key);
table->SetFirstEntry(bucket, new_entry);
table->SetNextEntry(new_entry, previous_entry);
raw_table.SetDataEntry(new_entry, SmallOrderedHashSet::kKeyIndex, *key);
raw_table.SetFirstEntry(bucket, new_entry);
raw_table.SetNextEntry(new_entry, previous_entry);
// and update book keeping.
table->SetNumberOfElements(nof + 1);
raw_table.SetNumberOfElements(nof + 1);
return table;
}
......@@ -760,24 +778,25 @@ MaybeHandle<SmallOrderedHashMap> SmallOrderedHashMap::Add(
return MaybeHandle<SmallOrderedHashMap>();
}
}
DisallowGarbageCollection no_gc;
auto raw_table = *table;
int hash = key->GetOrCreateHash(isolate).value();
int nof = table->NumberOfElements();
int nof = raw_table.NumberOfElements();
// Read the existing bucket values.
int bucket = table->HashToBucket(hash);
int previous_entry = table->HashToFirstEntry(hash);
int bucket = raw_table.HashToBucket(hash);
int previous_entry = raw_table.HashToFirstEntry(hash);
// Insert a new entry at the end,
int new_entry = nof + table->NumberOfDeletedElements();
int new_entry = nof + raw_table.NumberOfDeletedElements();
table->SetDataEntry(new_entry, SmallOrderedHashMap::kValueIndex, *value);
table->SetDataEntry(new_entry, SmallOrderedHashMap::kKeyIndex, *key);
table->SetFirstEntry(bucket, new_entry);
table->SetNextEntry(new_entry, previous_entry);
raw_table.SetDataEntry(new_entry, SmallOrderedHashMap::kValueIndex, *value);
raw_table.SetDataEntry(new_entry, SmallOrderedHashMap::kKeyIndex, *key);
raw_table.SetFirstEntry(bucket, new_entry);
raw_table.SetNextEntry(new_entry, previous_entry);
// and update book keeping.
table->SetNumberOfElements(nof + 1);
raw_table.SetNumberOfElements(nof + 1);
return table;
}
......
......@@ -501,7 +501,7 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
Factory* factory = isolate->factory();
Handle<ScopeInfo> scope_info =
factory->NewScopeInfo(length, AllocationType::kReadOnly);
DisallowGarbageCollection _nogc;
// Encode the flags.
int flags =
ScopeTypeBits::encode(is_empty_function ? FUNCTION_SCOPE : SCRIPT_SCOPE) |
......@@ -524,18 +524,20 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
PrivateNameLookupSkipsOuterClassBit::encode(false) |
HasContextExtensionSlotBit::encode(is_native_context) |
IsReplModeScopeBit::encode(false) | HasLocalsBlockListBit::encode(false);
scope_info->set_flags(flags);
scope_info->set_parameter_count(parameter_count);
scope_info->set_context_local_count(context_local_count);
auto raw_scope_info = *scope_info;
raw_scope_info.set_flags(flags);
raw_scope_info.set_parameter_count(parameter_count);
raw_scope_info.set_context_local_count(context_local_count);
int index = kVariablePartIndex;
// Here we add info for context-allocated "this".
DCHECK_EQ(index, scope_info->ContextLocalNamesIndex());
DCHECK_EQ(index, raw_scope_info.ContextLocalNamesIndex());
ReadOnlyRoots roots(isolate);
if (context_local_count) {
scope_info->set(index++, ReadOnlyRoots(isolate).this_string());
raw_scope_info.set(index++, roots.this_string());
}
DCHECK_EQ(index, scope_info->ContextLocalInfosIndex());
DCHECK_EQ(index, raw_scope_info.ContextLocalInfosIndex());
if (context_local_count > 0) {
const uint32_t value =
VariableModeBits::encode(VariableMode::kConst) |
......@@ -543,30 +545,30 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
MaybeAssignedFlagBit::encode(kNotAssigned) |
ParameterNumberBits::encode(ParameterNumberBits::kMax) |
IsStaticFlagBit::encode(IsStaticFlag::kNotStatic);
scope_info->set(index++, Smi::FromInt(value));
raw_scope_info.set(index++, Smi::FromInt(value));
}
DCHECK_EQ(index, scope_info->FunctionVariableInfoIndex());
DCHECK_EQ(index, raw_scope_info.FunctionVariableInfoIndex());
if (is_empty_function) {
scope_info->set(index++, *isolate->factory()->empty_string());
scope_info->set(index++, Smi::zero());
raw_scope_info.set(index++, roots.empty_string());
raw_scope_info.set(index++, Smi::zero());
}
DCHECK_EQ(index, scope_info->InferredFunctionNameIndex());
DCHECK_EQ(index, raw_scope_info.InferredFunctionNameIndex());
if (has_inferred_function_name) {
scope_info->set(index++, *isolate->factory()->empty_string());
raw_scope_info.set(index++, roots.empty_string());
}
DCHECK_EQ(index, scope_info->PositionInfoIndex());
DCHECK_EQ(index, raw_scope_info.PositionInfoIndex());
// Store dummy position to be in sync with the {scope_type}.
scope_info->set(index++, Smi::zero());
scope_info->set(index++, Smi::zero());
DCHECK_EQ(index, scope_info->OuterScopeInfoIndex());
DCHECK_EQ(index, scope_info->length());
DCHECK_EQ(scope_info->ParameterCount(), parameter_count);
raw_scope_info.set(index++, Smi::zero());
raw_scope_info.set(index++, Smi::zero());
DCHECK_EQ(index, raw_scope_info.OuterScopeInfoIndex());
DCHECK_EQ(index, raw_scope_info.length());
DCHECK_EQ(raw_scope_info.ParameterCount(), parameter_count);
if (is_empty_function || is_native_context) {
DCHECK_EQ(scope_info->ContextLength(), 0);
DCHECK_EQ(raw_scope_info.ContextLength(), 0);
} else {
DCHECK_EQ(scope_info->ContextLength(),
scope_info->ContextHeaderLength() + 1);
DCHECK_EQ(raw_scope_info.ContextLength(),
raw_scope_info.ContextHeaderLength() + 1);
}
return scope_info;
......@@ -1119,9 +1121,11 @@ Handle<ModuleRequest> ModuleRequest::New(IsolateT* isolate,
int position) {
Handle<ModuleRequest> result = Handle<ModuleRequest>::cast(
isolate->factory()->NewStruct(MODULE_REQUEST_TYPE, AllocationType::kOld));
result->set_specifier(*specifier);
result->set_import_assertions(*import_assertions);
result->set_position(position);
DisallowGarbageCollection no_gc;
auto raw = *result;
raw.set_specifier(*specifier);
raw.set_import_assertions(*import_assertions);
raw.set_position(position);
return result;
}
......@@ -1141,13 +1145,15 @@ Handle<SourceTextModuleInfoEntry> SourceTextModuleInfoEntry::New(
Handle<SourceTextModuleInfoEntry> result =
Handle<SourceTextModuleInfoEntry>::cast(isolate->factory()->NewStruct(
SOURCE_TEXT_MODULE_INFO_ENTRY_TYPE, AllocationType::kOld));
result->set_export_name(*export_name);
result->set_local_name(*local_name);
result->set_import_name(*import_name);
result->set_module_request(module_request);
result->set_cell_index(cell_index);
result->set_beg_pos(beg_pos);
result->set_end_pos(end_pos);
DisallowGarbageCollection no_gc;
auto raw = *result;
raw.set_export_name(*export_name);
raw.set_local_name(*local_name);
raw.set_import_name(*import_name);
raw.set_module_request(module_request);
raw.set_cell_index(cell_index);
raw.set_beg_pos(beg_pos);
raw.set_end_pos(end_pos);
return result;
}
......
......@@ -503,61 +503,65 @@ void SharedFunctionInfo::InitFromFunctionLiteral(
IsolateT* isolate, Handle<SharedFunctionInfo> shared_info,
FunctionLiteral* lit, bool is_toplevel) {
DCHECK(!shared_info->name_or_scope_info(kAcquireLoad).IsScopeInfo());
// When adding fields here, make sure DeclarationScope::AnalyzePartially is
// updated accordingly.
shared_info->set_internal_formal_parameter_count(
JSParameterCount(lit->parameter_count()));
shared_info->SetFunctionTokenPosition(lit->function_token_position(),
lit->start_position());
shared_info->set_syntax_kind(lit->syntax_kind());
shared_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
shared_info->set_language_mode(lit->language_mode());
shared_info->set_function_literal_id(lit->function_literal_id());
// FunctionKind must have already been set.
DCHECK(lit->kind() == shared_info->kind());
DCHECK_IMPLIES(lit->requires_instance_members_initializer(),
IsClassConstructor(lit->kind()));
shared_info->set_requires_instance_members_initializer(
lit->requires_instance_members_initializer());
DCHECK_IMPLIES(lit->class_scope_has_private_brand(),
IsClassConstructor(lit->kind()));
shared_info->set_class_scope_has_private_brand(
lit->class_scope_has_private_brand());
DCHECK_IMPLIES(lit->has_static_private_methods_or_accessors(),
IsClassConstructor(lit->kind()));
shared_info->set_has_static_private_methods_or_accessors(
lit->has_static_private_methods_or_accessors());
shared_info->set_is_toplevel(is_toplevel);
DCHECK(shared_info->outer_scope_info().IsTheHole());
if (!is_toplevel) {
Scope* outer_scope = lit->scope()->GetOuterScopeWithContext();
if (outer_scope) {
shared_info->set_outer_scope_info(*outer_scope->scope_info());
shared_info->set_private_name_lookup_skips_outer_class(
lit->scope()->private_name_lookup_skips_outer_class());
{
DisallowGarbageCollection no_gc;
auto raw_sfi = *shared_info;
// When adding fields here, make sure DeclarationScope::AnalyzePartially is
// updated accordingly.
raw_sfi.set_internal_formal_parameter_count(
JSParameterCount(lit->parameter_count()));
raw_sfi.SetFunctionTokenPosition(lit->function_token_position(),
lit->start_position());
raw_sfi.set_syntax_kind(lit->syntax_kind());
raw_sfi.set_allows_lazy_compilation(lit->AllowsLazyCompilation());
raw_sfi.set_language_mode(lit->language_mode());
raw_sfi.set_function_literal_id(lit->function_literal_id());
// FunctionKind must have already been set.
DCHECK(lit->kind() == raw_sfi.kind());
DCHECK_IMPLIES(lit->requires_instance_members_initializer(),
IsClassConstructor(lit->kind()));
raw_sfi.set_requires_instance_members_initializer(
lit->requires_instance_members_initializer());
DCHECK_IMPLIES(lit->class_scope_has_private_brand(),
IsClassConstructor(lit->kind()));
raw_sfi.set_class_scope_has_private_brand(
lit->class_scope_has_private_brand());
DCHECK_IMPLIES(lit->has_static_private_methods_or_accessors(),
IsClassConstructor(lit->kind()));
raw_sfi.set_has_static_private_methods_or_accessors(
lit->has_static_private_methods_or_accessors());
raw_sfi.set_is_toplevel(is_toplevel);
DCHECK(raw_sfi.outer_scope_info().IsTheHole());
if (!is_toplevel) {
Scope* outer_scope = lit->scope()->GetOuterScopeWithContext();
if (outer_scope) {
raw_sfi.set_outer_scope_info(*outer_scope->scope_info());
raw_sfi.set_private_name_lookup_skips_outer_class(
lit->scope()->private_name_lookup_skips_outer_class());
}
}
}
shared_info->set_length(lit->function_length());
// For lazy parsed functions, the following flags will be inaccurate since we
// don't have the information yet. They're set later in
// UpdateSharedFunctionFlagsAfterCompilation (compiler.cc), when the function
// is really parsed and compiled.
if (lit->ShouldEagerCompile()) {
shared_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
shared_info->UpdateAndFinalizeExpectedNofPropertiesFromEstimate(lit);
DCHECK_NULL(lit->produced_preparse_data());
raw_sfi.set_length(lit->function_length());
// For lazy parsed functions, the following flags will be inaccurate since
// we don't have the information yet. They're set later in
// UpdateSharedFunctionFlagsAfterCompilation (compiler.cc), when the
// function is really parsed and compiled.
if (lit->ShouldEagerCompile()) {
raw_sfi.set_has_duplicate_parameters(lit->has_duplicate_parameters());
raw_sfi.UpdateAndFinalizeExpectedNofPropertiesFromEstimate(lit);
DCHECK_NULL(lit->produced_preparse_data());
// If we're about to eager compile, we'll have the function literal
// available, so there's no need to wastefully allocate an uncompiled
// data.
return;
}
// If we're about to eager compile, we'll have the function literal
// available, so there's no need to wastefully allocate an uncompiled data.
return;
raw_sfi.UpdateExpectedNofPropertiesFromEstimate(lit);
}
shared_info->UpdateExpectedNofPropertiesFromEstimate(lit);
Handle<UncompiledData> data;
ProducedPreparseData* scope_data = lit->produced_preparse_data();
......
......@@ -1210,14 +1210,16 @@ void SourceTextModule::Reset(Isolate* isolate,
Handle<FixedArray> requested_modules =
factory->NewFixedArray(module->requested_modules().length());
if (module->status() == kLinking) {
module->set_code(JSFunction::cast(module->code()).shared());
DisallowGarbageCollection no_gc;
auto raw_module = *module;
if (raw_module.status() == kLinking) {
raw_module.set_code(JSFunction::cast(raw_module.code()).shared());
}
module->set_regular_exports(*regular_exports);
module->set_regular_imports(*regular_imports);
module->set_requested_modules(*requested_modules);
module->set_dfs_index(-1);
module->set_dfs_ancestor_index(-1);
raw_module.set_regular_exports(*regular_exports);
raw_module.set_regular_imports(*regular_imports);
raw_module.set_requested_modules(*requested_modules);
raw_module.set_dfs_index(-1);
raw_module.set_dfs_ancestor_index(-1);
}
std::vector<std::tuple<Handle<SourceTextModule>, Handle<JSMessageObject>>>
......
......@@ -89,8 +89,12 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
WriteToFlat(*cons, flat->GetChars(no_gc), 0, length);
result = flat;
}
cons->set_first(*result);
cons->set_second(ReadOnlyRoots(isolate).empty_string());
{
DisallowGarbageCollection no_gc;
auto raw_cons = *cons;
raw_cons.set_first(*result);
raw_cons.set_second(ReadOnlyRoots(isolate).empty_string());
}
DCHECK(result->IsFlat());
return result;
}
......
......@@ -512,15 +512,16 @@ Handle<SwissNameDictionary> SwissNameDictionary::Add(
DCHECK(original_table->FindEntry(isolate, *key).is_not_found());
Handle<SwissNameDictionary> table = EnsureGrowable(isolate, original_table);
int nof = table->NumberOfElements();
int nod = table->NumberOfDeletedElements();
DisallowGarbageCollection no_gc;
auto raw_table = *table;
int nof = raw_table.NumberOfElements();
int nod = raw_table.NumberOfDeletedElements();
int new_enum_index = nof + nod;
int new_entry = table->AddInternal(*key, *value, details);
int new_entry = raw_table.AddInternal(*key, *value, details);
table->SetNumberOfElements(nof + 1);
table->SetEntryForEnumerationIndex(new_enum_index, new_entry);
raw_table.SetNumberOfElements(nof + 1);
raw_table.SetEntryForEnumerationIndex(new_enum_index, new_entry);
if (entry_out) {
*entry_out = InternalIndex(new_entry);
......
......@@ -44,16 +44,19 @@ Handle<SharedFunctionInfo> FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
} else {
function_kind = FunctionKind::kNormalFunction;
}
Handle<SharedFunctionInfo> result =
Handle<SharedFunctionInfo> sfi =
isolate->factory()->NewSharedFunctionInfoForApiFunction(name_string, info,
function_kind);
result->set_length(info->length());
result->DontAdaptArguments();
DCHECK(result->IsApiFunction());
info->set_shared_function_info(*result);
return result;
{
DisallowGarbageCollection no_gc;
auto raw_sfi = *sfi;
auto raw_template = *info;
raw_sfi.set_length(raw_template.length());
raw_sfi.DontAdaptArguments();
DCHECK(raw_sfi.IsApiFunction());
raw_template.set_shared_function_info(raw_sfi);
}
return sfi;
}
bool FunctionTemplateInfo::IsTemplateFor(Map map) const {
......
......@@ -256,28 +256,31 @@ Handle<Script> ParseInfo::CreateScript(
flags().script_id() == Script::kTemporaryScriptId);
Handle<Script> script =
isolate->factory()->NewScriptWithId(source, flags().script_id());
DisallowGarbageCollection no_gc;
auto raw_script = *script;
switch (natives) {
case EXTENSION_CODE:
script->set_type(Script::TYPE_EXTENSION);
raw_script.set_type(Script::TYPE_EXTENSION);
break;
case INSPECTOR_CODE:
script->set_type(Script::TYPE_INSPECTOR);
raw_script.set_type(Script::TYPE_INSPECTOR);
break;
case NOT_NATIVES_CODE:
break;
}
script->set_origin_options(origin_options);
script->set_is_repl_mode(flags().is_repl_mode());
raw_script.set_origin_options(origin_options);
raw_script.set_is_repl_mode(flags().is_repl_mode());
DCHECK_EQ(is_wrapped_as_function(), !maybe_wrapped_arguments.is_null());
if (is_wrapped_as_function()) {
script->set_wrapped_arguments(*maybe_wrapped_arguments.ToHandleChecked());
raw_script.set_wrapped_arguments(
*maybe_wrapped_arguments.ToHandleChecked());
} else if (flags().is_eval()) {
script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
raw_script.set_compilation_type(Script::COMPILATION_TYPE_EVAL);
}
CheckFlagsForToplevelCompileFromScript(*script,
CheckFlagsForToplevelCompileFromScript(raw_script,
isolate->is_collecting_type_profile());
return script;
}
......
......@@ -58,16 +58,18 @@ RUNTIME_FUNCTION(Runtime_CreateJSGeneratorObject) {
Handle<JSGeneratorObject> generator =
isolate->factory()->NewJSGeneratorObject(function);
generator->set_function(*function);
generator->set_context(isolate->context());
generator->set_receiver(*receiver);
generator->set_parameters_and_registers(*parameters_and_registers);
generator->set_resume_mode(JSGeneratorObject::ResumeMode::kNext);
generator->set_continuation(JSGeneratorObject::kGeneratorExecuting);
if (generator->IsJSAsyncGeneratorObject()) {
Handle<JSAsyncGeneratorObject>::cast(generator)->set_is_awaiting(0);
DisallowGarbageCollection no_gc;
auto raw_generator = *generator;
raw_generator.set_function(*function);
raw_generator.set_context(isolate->context());
raw_generator.set_receiver(*receiver);
raw_generator.set_parameters_and_registers(*parameters_and_registers);
raw_generator.set_resume_mode(JSGeneratorObject::ResumeMode::kNext);
raw_generator.set_continuation(JSGeneratorObject::kGeneratorExecuting);
if (raw_generator.IsJSAsyncGeneratorObject()) {
JSAsyncGeneratorObject::cast(raw_generator).set_is_awaiting(0);
}
return *generator;
return raw_generator;
}
RUNTIME_FUNCTION(Runtime_GeneratorClose) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment