Commit 8f31a046 authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

cleanup: Fix some typos, mostly

Mostly in comments, again, not much to be said...
One case of UNREACHABLE with return.

Bug: v8:12425
Change-Id: I295db355c4794e4205b9b70ebbf51e019ec14060
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3695265Reviewed-by: 's avatarMarja Hölttä <marja@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81240}
parent 096e6cf9
......@@ -255,7 +255,7 @@ Scope::Scope(Zone* zone, ScopeType scope_type,
must_use_preparsed_scope_data_ = true;
if (scope_type == BLOCK_SCOPE) {
// Set is_block_scope_for_object_literal_ based on the existince of the home
// Set is_block_scope_for_object_literal_ based on the existence of the home
// object variable (we don't store it explicitly).
DCHECK_NOT_NULL(ast_value_factory);
int home_object_index = scope_info->ContextSlotIndex(
......@@ -2297,7 +2297,7 @@ void UpdateNeedsHoleCheck(Variable* var, VariableProxy* proxy, Scope* scope) {
// Dynamically introduced variables never need a hole check (since they're
// VariableMode::kVar bindings, either from var or function declarations),
// but the variable they shadow might need a hole check, which we want to do
// if we decide that no shadowing variable was dynamically introoduced.
// if we decide that no shadowing variable was dynamically introduced.
DCHECK_EQ(kCreatedInitialized, var->initialization_flag());
return UpdateNeedsHoleCheck(var->local_if_not_shadowed(), proxy, scope);
}
......@@ -2798,7 +2798,7 @@ bool IsComplementaryAccessorPair(VariableMode a, VariableMode b) {
void ClassScope::FinalizeReparsedClassScope(
Isolate* isolate, MaybeHandle<ScopeInfo> maybe_scope_info,
AstValueFactory* ast_value_factory, bool needs_allocation_fixup) {
// Set this bit so that DelcarationScope::Analyze recognizes
// Set this bit so that DeclarationScope::Analyze recognizes
// the reparsed instance member initializer scope.
#ifdef DEBUG
is_reparsed_class_scope_ = true;
......@@ -2949,7 +2949,7 @@ Variable* ClassScope::LookupPrivateName(VariableProxy* proxy) {
scope_iter.Next()) {
ClassScope* scope = scope_iter.GetScope();
// Try finding it in the private name map first, if it can't be found,
// try the deseralized scope info.
// try the deserialized scope info.
Variable* var = scope->LookupLocalPrivateName(proxy->raw_name());
if (var == nullptr && !scope->scope_info_.is_null()) {
var = scope->LookupPrivateNameInScopeInfo(proxy->raw_name());
......@@ -3041,7 +3041,7 @@ VariableProxy* ClassScope::ResolvePrivateNamesPartially() {
}
// The private name may be found later in the outer private name scope, so
// push it to the outer sopce.
// push it to the outer scope.
private_name_scope_iter.AddUnresolvedPrivateName(proxy);
}
......
......@@ -76,8 +76,8 @@ void ReadOnlyHeap::SetUp(Isolate* isolate,
artifacts = InitializeSharedReadOnlyArtifacts();
artifacts->InitializeChecksum(read_only_snapshot_data);
ro_heap = CreateInitalHeapForBootstrapping(isolate, artifacts);
ro_heap->DeseralizeIntoIsolate(isolate, read_only_snapshot_data,
can_rehash);
ro_heap->DeserializeIntoIsolate(isolate, read_only_snapshot_data,
can_rehash);
read_only_heap_created = true;
} else {
// With pointer compression, there is one ReadOnlyHeap per Isolate.
......@@ -104,15 +104,15 @@ void ReadOnlyHeap::SetUp(Isolate* isolate,
auto* ro_heap = new ReadOnlyHeap(new ReadOnlySpace(isolate->heap()));
isolate->SetUpFromReadOnlyArtifacts(nullptr, ro_heap);
if (read_only_snapshot_data != nullptr) {
ro_heap->DeseralizeIntoIsolate(isolate, read_only_snapshot_data,
can_rehash);
ro_heap->DeserializeIntoIsolate(isolate, read_only_snapshot_data,
can_rehash);
}
}
}
void ReadOnlyHeap::DeseralizeIntoIsolate(Isolate* isolate,
SnapshotData* read_only_snapshot_data,
bool can_rehash) {
void ReadOnlyHeap::DeserializeIntoIsolate(Isolate* isolate,
SnapshotData* read_only_snapshot_data,
bool can_rehash) {
DCHECK_NOT_NULL(read_only_snapshot_data);
ReadOnlyDeserializer des(isolate, read_only_snapshot_data, can_rehash);
des.DeserializeIntoIsolate();
......
......@@ -106,9 +106,9 @@ class ReadOnlyHeap {
Isolate* isolate, std::shared_ptr<ReadOnlyArtifacts> artifacts);
// Runs the read-only deserializer and calls InitFromIsolate to complete
// read-only heap initialization.
void DeseralizeIntoIsolate(Isolate* isolate,
SnapshotData* read_only_snapshot_data,
bool can_rehash);
void DeserializeIntoIsolate(Isolate* isolate,
SnapshotData* read_only_snapshot_data,
bool can_rehash);
// Initializes read-only heap from an already set-up isolate, copying
// read-only roots from the isolate. This then seals the space off from
// further writes, marks it as read-only and detaches it from the heap
......
......@@ -64,7 +64,7 @@ void ReadOnlyArtifacts::VerifyChecksum(SnapshotData* read_only_snapshot_data,
// supported. As a result, it's possible that it will create a new
// read-only snapshot that is not compatible with the original one (for
// instance due to the string table being re-ordered). Since we won't
// acutally use that new Isoalte, we're ok with any potential corruption.
// actually use that new Isolate, we're ok with any potential corruption.
// See crbug.com/1043058.
CHECK_EQ(read_only_blob_checksum_, snapshot_checksum);
}
......
......@@ -140,7 +140,7 @@ size_t Page::ShrinkToHighWaterMark() {
// Ensure that no objects will be allocated on this page.
DCHECK_EQ(0u, AvailableInFreeList());
// Ensure that slot sets are empty. Otherwise the buckets for the shrinked
// Ensure that slot sets are empty. Otherwise the buckets for the shrunk
// area would not be freed when deallocating this page.
DCHECK_NULL(slot_set<OLD_TO_NEW>());
DCHECK_NULL(slot_set<OLD_TO_OLD>());
......
......@@ -63,10 +63,10 @@ class SemiSpace;
// collection. The large object space is paged. Pages in large object space
// may be larger than the page size.
//
// A remembered set is used to keep track of intergenerational references.
// A remembered set is used to keep track of inter-generational references.
//
// During scavenges and mark-sweep collections we sometimes (after a store
// buffer overflow) iterate intergenerational pointers without decoding heap
// buffer overflow) iterate inter-generational pointers without decoding heap
// object maps so if the page belongs to old space or large object space
// it is essential to guarantee that the page does not contain any
// garbage pointers to new space: every pointer aligned word which satisfies
......@@ -81,7 +81,7 @@ class SemiSpace;
// sections are skipped when scanning the page, even if we are otherwise
// scanning without regard for object boundaries. Garbage sections are chained
// together to form a free list after a GC. Garbage sections created outside
// of GCs by object trunctation etc. may not be in the free list chain. Very
// of GCs by object truncation etc. may not be in the free list chain. Very
// small free spaces are ignored, they need only be cleaned of bogus pointers
// into new space.
//
......@@ -184,10 +184,7 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
heap::List<MemoryChunk>& memory_chunk_list() { return memory_chunk_list_; }
virtual Page* InitializePage(MemoryChunk* chunk) {
UNREACHABLE();
return nullptr;
}
virtual Page* InitializePage(MemoryChunk* chunk) { UNREACHABLE(); }
FreeList* free_list() { return free_list_.get(); }
......@@ -602,7 +599,7 @@ class SpaceWithLinearArea : public Space {
V8_EXPORT_PRIVATE void UpdateAllocationOrigins(AllocationOrigin origin);
// Allocates an object from the linear allocation area. Assumes that the
// linear allocation area is large enought to fit the object.
// linear allocation area is large enough to fit the object.
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult
AllocateFastUnaligned(int size_in_bytes, AllocationOrigin origin);
// Tries to allocate an aligned object from the linear allocation area.
......
......@@ -20,7 +20,7 @@ class StressScavengeObserver : public AllocationObserver {
void RequestedGCDone();
// The maximum percent of the newspace capacity reached. This is tracked when
// specyfing --fuzzer-gc-analysis.
// specifying --fuzzer-gc-analysis.
double MaxNewSpaceSizeReached() const;
private:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment