// Copyright 2014 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/heap/factory.h" #include // For copy #include // For shared_ptr<> #include #include // For move #include "src/ast/ast-source-ranges.h" #include "src/base/bits.h" #include "src/builtins/accessors.h" #include "src/builtins/constants-table-builder.h" #include "src/codegen/compilation-cache.h" #include "src/codegen/compiler.h" #include "src/common/assert-scope.h" #include "src/common/globals.h" #include "src/diagnostics/basic-block-profiler.h" #include "src/execution/isolate-inl.h" #include "src/execution/protectors-inl.h" #include "src/heap/basic-memory-chunk.h" #include "src/heap/heap-inl.h" #include "src/heap/incremental-marking.h" #include "src/heap/mark-compact-inl.h" #include "src/heap/memory-chunk.h" #include "src/heap/read-only-heap.h" #include "src/ic/handler-configuration-inl.h" #include "src/init/bootstrapper.h" #include "src/interpreter/interpreter.h" #include "src/logging/counters.h" #include "src/logging/log.h" #include "src/numbers/conversions.h" #include "src/numbers/hash-seed-inl.h" #include "src/objects/allocation-site-inl.h" #include "src/objects/allocation-site-scopes.h" #include "src/objects/api-callbacks.h" #include "src/objects/arguments-inl.h" #include "src/objects/bigint.h" #include "src/objects/cell-inl.h" #include "src/objects/debug-objects-inl.h" #include "src/objects/embedder-data-array-inl.h" #include "src/objects/feedback-cell-inl.h" #include "src/objects/fixed-array-inl.h" #include "src/objects/foreign-inl.h" #include "src/objects/instance-type-inl.h" #include "src/objects/js-array-buffer-inl.h" #include "src/objects/js-array-inl.h" #include "src/objects/js-collection-inl.h" #include "src/objects/js-generator-inl.h" #include "src/objects/js-objects.h" #include "src/objects/js-regexp-inl.h" #include "src/objects/js-weak-refs-inl.h" #include "src/objects/literal-objects-inl.h" #include "src/objects/megadom-handler-inl.h" #include "src/objects/microtask-inl.h" #include "src/objects/module-inl.h" #include "src/objects/promise-inl.h" #include "src/objects/property-descriptor-object-inl.h" #include "src/objects/scope-info.h" #include "src/objects/stack-frame-info-inl.h" #include "src/objects/string-set-inl.h" #include "src/objects/struct-inl.h" #include "src/objects/synthetic-module-inl.h" #include "src/objects/template-objects-inl.h" #include "src/objects/transitions-inl.h" #include "src/roots/roots.h" #include "src/strings/unicode-inl.h" #if V8_ENABLE_WEBASSEMBLY #include "src/wasm/wasm-value.h" #endif namespace v8 { namespace internal { Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc, CodeKind kind) : isolate_(isolate), code_desc_(desc), kind_(kind), position_table_(isolate_->factory()->empty_byte_array()) {} MaybeHandle Factory::CodeBuilder::BuildInternal( bool retry_allocation_or_fail) { const auto factory = isolate_->factory(); // Allocate objects needed for code initialization. Handle reloc_info = factory->NewByteArray(code_desc_.reloc_size, AllocationType::kOld); Handle data_container; // Use a canonical off-heap trampoline CodeDataContainer if possible. const int32_t promise_rejection_flag = Code::IsPromiseRejectionField::encode(true); if (read_only_data_container_ && (kind_specific_flags_ == 0 || kind_specific_flags_ == promise_rejection_flag)) { const ReadOnlyRoots roots(isolate_); const auto canonical_code_data_container = Handle::cast( kind_specific_flags_ == 0 ? roots.trampoline_trivial_code_data_container_handle() : roots.trampoline_promise_rejection_code_data_container_handle()); DCHECK_EQ(canonical_code_data_container->kind_specific_flags(), kind_specific_flags_); data_container = canonical_code_data_container; } else { data_container = factory->NewCodeDataContainer( 0, read_only_data_container_ ? AllocationType::kReadOnly : AllocationType::kOld); data_container->set_kind_specific_flags(kind_specific_flags_); } // Basic block profiling data for builtins is stored in the JS heap rather // than in separately-allocated C++ objects. Allocate that data now if // appropriate. Handle on_heap_profiler_data; if (profiler_data_ && isolate_->IsGeneratingEmbeddedBuiltins()) { on_heap_profiler_data = profiler_data_->CopyToJSHeap(isolate_); // Add the on-heap data to a global list, which keeps it alive and allows // iteration. Handle list(isolate_->heap()->basic_block_profiling_data(), isolate_); Handle new_list = ArrayList::Add(isolate_, list, on_heap_profiler_data); isolate_->heap()->SetBasicBlockProfilingData(new_list); } STATIC_ASSERT(Code::kOnHeapBodyIsContiguous); Heap* heap = isolate_->heap(); CodePageCollectionMemoryModificationScope code_allocation(heap); Handle code; bool code_is_on_heap = code_desc_.origin && code_desc_.origin->IsOnHeap(); if (code_is_on_heap) { DCHECK(FLAG_sparkplug_on_heap); DCHECK_EQ(kind_, CodeKind::BASELINE); code = code_desc_.origin->code().ToHandleChecked(); } else { if (!AllocateCode(retry_allocation_or_fail).ToHandle(&code)) { return MaybeHandle(); } } { Code raw_code = *code; constexpr bool kIsNotOffHeapTrampoline = false; DisallowGarbageCollection no_gc; if (code_is_on_heap) { heap->NotifyCodeObjectChangeStart(raw_code, no_gc); } raw_code.set_raw_instruction_size(code_desc_.instruction_size()); raw_code.set_raw_metadata_size(code_desc_.metadata_size()); raw_code.initialize_flags(kind_, is_turbofanned_, stack_slots_, kIsNotOffHeapTrampoline); raw_code.set_builtin_id(builtin_); // This might impact direct concurrent reads from TF if we are resetting // this field. We currently assume it's immutable thus a relaxed read (after // passing IsPendingAllocation). raw_code.set_inlined_bytecode_size(inlined_bytecode_size_); raw_code.set_code_data_container(*data_container, kReleaseStore); raw_code.set_deoptimization_data(*deoptimization_data_); if (kind_ == CodeKind::BASELINE) { raw_code.set_bytecode_offset_table(*position_table_); } else { raw_code.set_source_position_table(*position_table_); } raw_code.set_handler_table_offset( code_desc_.handler_table_offset_relative()); raw_code.set_constant_pool_offset( code_desc_.constant_pool_offset_relative()); raw_code.set_code_comments_offset( code_desc_.code_comments_offset_relative()); raw_code.set_unwinding_info_offset( code_desc_.unwinding_info_offset_relative()); // Allow self references to created code object by patching the handle to // point to the newly allocated Code object. Handle self_reference; if (self_reference_.ToHandle(&self_reference)) { DCHECK(self_reference->IsOddball()); DCHECK_EQ(Oddball::cast(*self_reference).kind(), Oddball::kSelfReferenceMarker); DCHECK_NE(kind_, CodeKind::BASELINE); if (isolate_->IsGeneratingEmbeddedBuiltins()) { isolate_->builtins_constants_table_builder()->PatchSelfReference( self_reference, code); } self_reference.PatchValue(*code); } // Likewise, any references to the basic block counters marker need to be // updated to point to the newly-allocated counters array. if (!on_heap_profiler_data.is_null()) { isolate_->builtins_constants_table_builder() ->PatchBasicBlockCountersReference( handle(on_heap_profiler_data->counts(), isolate_)); } if (code_is_on_heap) { FinalizeOnHeapCode(code, *reloc_info); } else { // Migrate generated code. // The generated code can contain embedded objects (typically from // handles) in a pointer-to-tagged-value format (i.e. with indirection // like a handle) that are dereferenced during the copy to point directly // to the actual heap objects. These pointers can include references to // the code object itself, through the self_reference parameter. raw_code.CopyFromNoFlush(*reloc_info, heap, code_desc_); } raw_code.clear_padding(); if (code_is_on_heap) { raw_code.set_relocation_info(*reloc_info, kReleaseStore); // Now that object is properly initialized, the GC needs to revisit this // object if marking is on. heap->NotifyCodeObjectChangeEnd(raw_code, no_gc); } else { raw_code.set_relocation_info(*reloc_info); } if (V8_EXTERNAL_CODE_SPACE_BOOL) { data_container->SetCodeAndEntryPoint(isolate_, raw_code); } #ifdef VERIFY_HEAP if (FLAG_verify_heap) raw_code.ObjectVerify(isolate_); #endif // Flush the instruction cache before changing the permissions. // Note: we do this before setting permissions to ReadExecute because on // some older ARM kernels there is a bug which causes an access error on // cache flush instructions to trigger access error on non-writable memory. // See https://bugs.chromium.org/p/v8/issues/detail?id=8157 raw_code.FlushICache(); } if (profiler_data_ && FLAG_turbo_profiling_verbose) { #ifdef ENABLE_DISASSEMBLER std::ostringstream os; code->Disassemble(nullptr, os, isolate_); if (!on_heap_profiler_data.is_null()) { Handle disassembly = isolate_->factory()->NewStringFromAsciiChecked(os.str().c_str(), AllocationType::kOld); on_heap_profiler_data->set_code(*disassembly); } else { profiler_data_->SetCode(os); } #endif // ENABLE_DISASSEMBLER } return code; } MaybeHandle Factory::CodeBuilder::AllocateCode( bool retry_allocation_or_fail) { Heap* heap = isolate_->heap(); HeapObject result; AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_ ? AllocationType::kCode : AllocationType::kReadOnly; const int object_size = Code::SizeFor(code_desc_.body_size()); if (retry_allocation_or_fail) { result = heap->AllocateRawWith( object_size, allocation_type, AllocationOrigin::kRuntime); } else { result = heap->AllocateRawWith( object_size, allocation_type, AllocationOrigin::kRuntime); // Return an empty handle if we cannot allocate the code object. if (result.is_null()) return MaybeHandle(); } // The code object has not been fully initialized yet. We rely on the // fact that no allocation will happen from this point on. DisallowGarbageCollection no_gc; result.set_map_after_allocation(*isolate_->factory()->code_map(), SKIP_WRITE_BARRIER); Handle code = handle(Code::cast(result), isolate_); if (is_executable_) { DCHECK(IsAligned(code->address(), kCodeAlignment)); DCHECK_IMPLIES( !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(), heap->code_region().contains(code->address())); } return code; } void Factory::CodeBuilder::FinalizeOnHeapCode(Handle code, ByteArray reloc_info) { Heap* heap = isolate_->heap(); // We cannot trim the Code object in CODE_LO_SPACE. DCHECK(!heap->code_lo_space()->Contains(*code)); code->CopyRelocInfoToByteArray(reloc_info, code_desc_); #ifdef VERIFY_HEAP code->VerifyRelocInfo(isolate_, reloc_info); #endif int old_object_size = Code::SizeFor(code_desc_.origin->buffer_size()); int new_object_size = Code::SizeFor(code_desc_.instruction_size() + code_desc_.metadata_size()); int size_to_trim = old_object_size - new_object_size; DCHECK_GE(size_to_trim, 0); heap->UndoLastAllocationAt(code->address() + new_object_size, size_to_trim); } MaybeHandle Factory::NewEmptyCode(CodeKind kind, int buffer_size) { STATIC_ASSERT(Code::kOnHeapBodyIsContiguous); const int object_size = Code::SizeFor(buffer_size); Heap* heap = isolate()->heap(); HeapObject result = heap->AllocateRawWith( object_size, AllocationType::kCode, AllocationOrigin::kRuntime); if (result.is_null()) return MaybeHandle(); DisallowGarbageCollection no_gc; result.set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER); Code raw_code = Code::cast(result); constexpr bool kIsNotOffHeapTrampoline = false; raw_code.set_raw_instruction_size(0); raw_code.set_raw_metadata_size(buffer_size); raw_code.set_relocation_info_or_undefined(*undefined_value()); raw_code.initialize_flags(kind, false, 0, kIsNotOffHeapTrampoline); raw_code.set_builtin_id(Builtin::kNoBuiltinId); auto code_data_container = Handle::cast(trampoline_trivial_code_data_container()); raw_code.set_code_data_container(*code_data_container, kReleaseStore); raw_code.set_deoptimization_data(*DeoptimizationData::Empty(isolate())); raw_code.set_bytecode_offset_table(*empty_byte_array()); raw_code.set_handler_table_offset(0); raw_code.set_constant_pool_offset(0); raw_code.set_code_comments_offset(0); raw_code.set_unwinding_info_offset(0); Handle code = handle(raw_code, isolate()); DCHECK(IsAligned(code->address(), kCodeAlignment)); DCHECK_IMPLIES( !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(), heap->code_region().contains(code->address())); DCHECK(heap->code_space()->Contains(raw_code)); return code; } MaybeHandle Factory::CodeBuilder::TryBuild() { return BuildInternal(false); } Handle Factory::CodeBuilder::Build() { return BuildInternal(true).ToHandleChecked(); } HeapObject Factory::AllocateRaw(int size, AllocationType allocation, AllocationAlignment alignment) { return isolate()->heap()->AllocateRawWith( size, allocation, AllocationOrigin::kRuntime, alignment); } HeapObject Factory::AllocateRawWithAllocationSite( Handle map, AllocationType allocation, Handle allocation_site) { DCHECK(map->instance_type() != MAP_TYPE); int size = map->instance_size(); if (!allocation_site.is_null()) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); size += AllocationMemento::kSize; } HeapObject result = isolate()->heap()->AllocateRawWith(size, allocation); WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; result.set_map_after_allocation(*map, write_barrier_mode); if (!allocation_site.is_null()) { AllocationMemento alloc_memento = AllocationMemento::unchecked_cast( Object(result.ptr() + map->instance_size())); InitializeAllocationMemento(alloc_memento, *allocation_site); } return result; } void Factory::InitializeAllocationMemento(AllocationMemento memento, AllocationSite allocation_site) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); memento.set_map_after_allocation(*allocation_memento_map(), SKIP_WRITE_BARRIER); memento.set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); if (FLAG_allocation_site_pretenuring) { allocation_site.IncrementMementoCreateCount(); } } HeapObject Factory::New(Handle map, AllocationType allocation) { DCHECK(map->instance_type() != MAP_TYPE); int size = map->instance_size(); HeapObject result = isolate()->heap()->AllocateRawWith(size, allocation); // New space objects are allocated white. WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; result.set_map_after_allocation(*map, write_barrier_mode); return result; } Handle Factory::NewFillerObject(int size, bool double_align, AllocationType allocation, AllocationOrigin origin) { AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned; Heap* heap = isolate()->heap(); HeapObject result = heap->AllocateRawWith( size, allocation, origin, alignment); heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo); return Handle(result, isolate()); } Handle Factory::NewPrototypeInfo() { auto result = NewStructInternal(PROTOTYPE_INFO_TYPE, AllocationType::kOld); DisallowGarbageCollection no_gc; result.set_prototype_users(Smi::zero()); result.set_registry_slot(PrototypeInfo::UNREGISTERED); result.set_bit_field(0); result.set_module_namespace(*undefined_value(), SKIP_WRITE_BARRIER); return handle(result, isolate()); } Handle Factory::NewEnumCache(Handle keys, Handle indices) { auto result = NewStructInternal(ENUM_CACHE_TYPE, AllocationType::kOld); DisallowGarbageCollection no_gc; result.set_keys(*keys); result.set_indices(*indices); return handle(result, isolate()); } Handle Factory::NewTuple2(Handle value1, Handle value2, AllocationType allocation) { auto result = NewStructInternal(TUPLE2_TYPE, allocation); DisallowGarbageCollection no_gc; result.set_value1(*value1); result.set_value2(*value2); return handle(result, isolate()); } Handle Factory::NewBaselineData( Handle code, Handle function_data) { auto baseline_data = NewStructInternal(BASELINE_DATA_TYPE, AllocationType::kOld); DisallowGarbageCollection no_gc; baseline_data.set_baseline_code(*code); baseline_data.set_data(*function_data); return handle(baseline_data, isolate()); } Handle Factory::NewOddball(Handle map, const char* to_string, Handle to_number, const char* type_of, byte kind) { Handle oddball(Oddball::cast(New(map, AllocationType::kReadOnly)), isolate()); Oddball::Initialize(isolate(), oddball, to_string, to_number, type_of, kind); return oddball; } Handle Factory::NewSelfReferenceMarker() { return NewOddball(self_reference_marker_map(), "self_reference_marker", handle(Smi::FromInt(-1), isolate()), "undefined", Oddball::kSelfReferenceMarker); } Handle Factory::NewBasicBlockCountersMarker() { return NewOddball(basic_block_counters_marker_map(), "basic_block_counters_marker", handle(Smi::FromInt(-1), isolate()), "undefined", Oddball::kBasicBlockCountersMarker); } Handle Factory::NewPropertyArray(int length) { DCHECK_LE(0, length); if (length == 0) return empty_property_array(); HeapObject result = AllocateRawFixedArray(length, AllocationType::kYoung); DisallowGarbageCollection no_gc; result.set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER); PropertyArray array = PropertyArray::cast(result); array.initialize_length(length); MemsetTagged(array.data_start(), read_only_roots().undefined_value(), length); return handle(array, isolate()); } MaybeHandle Factory::TryNewFixedArray( int length, AllocationType allocation_type) { DCHECK_LE(0, length); if (length == 0) return empty_fixed_array(); int size = FixedArray::SizeFor(length); Heap* heap = isolate()->heap(); AllocationResult allocation = heap->AllocateRaw(size, allocation_type); HeapObject result; if (!allocation.To(&result)) return MaybeHandle(); if ((size > heap->MaxRegularHeapObjectSize(allocation_type)) && FLAG_use_marking_progress_bar) { BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result); chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR); } DisallowGarbageCollection no_gc; result.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER); FixedArray array = FixedArray::cast(result); array.set_length(length); MemsetTagged(array.data_start(), *undefined_value(), length); return handle(array, isolate()); } Handle Factory::NewClosureFeedbackCellArray( int length) { if (length == 0) return empty_closure_feedback_cell_array(); Handle feedback_cell_array = Handle::cast(NewFixedArrayWithMap( read_only_roots().closure_feedback_cell_array_map_handle(), length, AllocationType::kOld)); return feedback_cell_array; } Handle Factory::NewFeedbackVector( Handle shared, Handle closure_feedback_cell_array) { int length = shared->feedback_metadata().slot_count(); DCHECK_LE(0, length); int size = FeedbackVector::SizeFor(length); FeedbackVector vector = FeedbackVector::cast(AllocateRawWithImmortalMap( size, AllocationType::kOld, *feedback_vector_map())); DisallowGarbageCollection no_gc; vector.set_shared_function_info(*shared); vector.set_maybe_optimized_code(HeapObjectReference::ClearedValue(isolate()), kReleaseStore); vector.set_length(length); vector.set_invocation_count(0); vector.set_profiler_ticks(0); vector.InitializeOptimizationState(); vector.set_closure_feedback_cell_array(*closure_feedback_cell_array); // TODO(leszeks): Initialize based on the feedback metadata. MemsetTagged(ObjectSlot(vector.slots_start()), *undefined_value(), length); return handle(vector, isolate()); } Handle Factory::NewEmbedderDataArray(int length) { DCHECK_LE(0, length); int size = EmbedderDataArray::SizeFor(length); EmbedderDataArray array = EmbedderDataArray::cast(AllocateRawWithImmortalMap( size, AllocationType::kYoung, *embedder_data_array_map())); DisallowGarbageCollection no_gc; array.set_length(length); if (length > 0) { ObjectSlot start(array.slots_start()); ObjectSlot end(array.slots_end()); size_t slot_count = end - start; MemsetTagged(start, *undefined_value(), slot_count); for (int i = 0; i < length; i++) { // TODO(v8:10391, saelo): Handle external pointers in EmbedderDataSlot EmbedderDataSlot(array, i).AllocateExternalPointerEntry(isolate()); } } return handle(array, isolate()); } Handle Factory::NewFixedDoubleArrayWithHoles(int length) { DCHECK_LE(0, length); Handle array = NewFixedDoubleArray(length); if (length > 0) { Handle::cast(array)->FillWithHoles(0, length); } return array; } template Handle Factory::AllocateSmallOrderedHashTable(Handle map, int capacity, AllocationType allocation) { // Capacity must be a power of two, since we depend on being able // to divide and multiple by 2 (kLoadFactor) to derive capacity // from number of buckets. If we decide to change kLoadFactor // to something other than 2, capacity should be stored as another // field of this object. DCHECK_EQ(T::kLoadFactor, 2); capacity = base::bits::RoundUpToPowerOfTwo32(std::max({T::kMinCapacity, capacity})); capacity = std::min({capacity, T::kMaxCapacity}); DCHECK_LT(0, capacity); DCHECK_EQ(0, capacity % T::kLoadFactor); int size = T::SizeFor(capacity); HeapObject result = AllocateRawWithImmortalMap(size, allocation, *map); Handle table(T::cast(result), isolate()); table->Initialize(isolate(), capacity); return table; } Handle Factory::NewSmallOrderedHashSet( int capacity, AllocationType allocation) { return AllocateSmallOrderedHashTable( small_ordered_hash_set_map(), capacity, allocation); } Handle Factory::NewSmallOrderedHashMap( int capacity, AllocationType allocation) { return AllocateSmallOrderedHashTable( small_ordered_hash_map_map(), capacity, allocation); } Handle Factory::NewSmallOrderedNameDictionary( int capacity, AllocationType allocation) { Handle dict = AllocateSmallOrderedHashTable( small_ordered_name_dictionary_map(), capacity, allocation); dict->SetHash(PropertyArray::kNoHashSentinel); return dict; } Handle Factory::NewOrderedHashSet() { return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kInitialCapacity, AllocationType::kYoung) .ToHandleChecked(); } Handle Factory::NewOrderedHashMap() { return OrderedHashMap::Allocate(isolate(), OrderedHashMap::kInitialCapacity, AllocationType::kYoung) .ToHandleChecked(); } Handle Factory::NewOrderedNameDictionary(int capacity) { return OrderedNameDictionary::Allocate(isolate(), capacity, AllocationType::kYoung) .ToHandleChecked(); } Handle Factory::NewNameDictionary(int at_least_space_for) { return NameDictionary::New(isolate(), at_least_space_for); } Handle Factory::NewPropertyDescriptorObject() { auto object = NewStructInternal( PROPERTY_DESCRIPTOR_OBJECT_TYPE, AllocationType::kYoung); DisallowGarbageCollection no_gc; object.set_flags(0); Oddball the_hole = read_only_roots().the_hole_value(); object.set_value(the_hole, SKIP_WRITE_BARRIER); object.set_get(the_hole, SKIP_WRITE_BARRIER); object.set_set(the_hole, SKIP_WRITE_BARRIER); return handle(object, isolate()); } Handle Factory::CreateCanonicalEmptySwissNameDictionary() { // This function is only supposed to be used to create the canonical empty // version and should not be used afterwards. DCHECK_EQ(kNullAddress, ReadOnlyRoots(isolate()).at( RootIndex::kEmptySwissPropertyDictionary)); ReadOnlyRoots roots(isolate()); Handle empty_meta_table = NewByteArray(SwissNameDictionary::kMetaTableEnumerationDataStartIndex, AllocationType::kReadOnly); Map map = roots.swiss_name_dictionary_map(); int size = SwissNameDictionary::SizeFor(0); HeapObject obj = AllocateRawWithImmortalMap(size, AllocationType::kReadOnly, map); SwissNameDictionary result = SwissNameDictionary::cast(obj); result.Initialize(isolate(), *empty_meta_table, 0); return handle(result, isolate()); } // Internalized strings are created in the old generation (data space). Handle Factory::InternalizeUtf8String( const base::Vector& string) { base::Vector utf8_data = base::Vector::cast(string); Utf8Decoder decoder(utf8_data); if (decoder.is_ascii()) return InternalizeString(utf8_data); if (decoder.is_one_byte()) { std::unique_ptr buffer(new uint8_t[decoder.utf16_length()]); decoder.Decode(buffer.get(), utf8_data); return InternalizeString( base::Vector(buffer.get(), decoder.utf16_length())); } std::unique_ptr buffer(new uint16_t[decoder.utf16_length()]); decoder.Decode(buffer.get(), utf8_data); return InternalizeString( base::Vector(buffer.get(), decoder.utf16_length())); } template Handle Factory::InternalizeString(Handle string, int from, int length, bool convert_encoding) { SeqSubStringKey key(isolate(), string, from, length, convert_encoding); return InternalizeStringWithKey(&key); } template Handle Factory::InternalizeString( Handle string, int from, int length, bool convert_encoding); template Handle Factory::InternalizeString( Handle string, int from, int length, bool convert_encoding); MaybeHandle Factory::NewStringFromOneByte( const base::Vector& string, AllocationType allocation) { DCHECK_NE(allocation, AllocationType::kReadOnly); int length = string.length(); if (length == 0) return empty_string(); if (length == 1) return LookupSingleCharacterStringFromCode(string[0]); Handle result; ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, NewRawOneByteString(string.length(), allocation), String); DisallowGarbageCollection no_gc; // Copy the characters into the new object. CopyChars(SeqOneByteString::cast(*result).GetChars(no_gc), string.begin(), length); return result; } MaybeHandle Factory::NewStringFromUtf8( const base::Vector& string, AllocationType allocation) { base::Vector utf8_data = base::Vector::cast(string); Utf8Decoder decoder(utf8_data); if (decoder.utf16_length() == 0) return empty_string(); if (decoder.is_one_byte()) { // Allocate string. Handle result; ASSIGN_RETURN_ON_EXCEPTION( isolate(), result, NewRawOneByteString(decoder.utf16_length(), allocation), String); DisallowGarbageCollection no_gc; decoder.Decode(result->GetChars(no_gc), utf8_data); return result; } // Allocate string. Handle result; ASSIGN_RETURN_ON_EXCEPTION( isolate(), result, NewRawTwoByteString(decoder.utf16_length(), allocation), String); DisallowGarbageCollection no_gc; decoder.Decode(result->GetChars(no_gc), utf8_data); return result; } MaybeHandle Factory::NewStringFromUtf8SubString( Handle str, int begin, int length, AllocationType allocation) { base::Vector utf8_data; { DisallowGarbageCollection no_gc; utf8_data = base::Vector(str->GetChars(no_gc) + begin, length); } Utf8Decoder decoder(utf8_data); if (length == 1) { uint16_t t; // Decode even in the case of length 1 since it can be a bad character. decoder.Decode(&t, utf8_data); return LookupSingleCharacterStringFromCode(t); } if (decoder.is_ascii()) { // If the string is ASCII, we can just make a substring. // TODO(v8): the allocation flag is ignored in this case. return NewSubString(str, begin, begin + length); } DCHECK_GT(decoder.utf16_length(), 0); if (decoder.is_one_byte()) { // Allocate string. Handle result; ASSIGN_RETURN_ON_EXCEPTION( isolate(), result, NewRawOneByteString(decoder.utf16_length(), allocation), String); DisallowGarbageCollection no_gc; // Update pointer references, since the original string may have moved after // allocation. utf8_data = base::Vector(str->GetChars(no_gc) + begin, length); decoder.Decode(result->GetChars(no_gc), utf8_data); return result; } // Allocate string. Handle result; ASSIGN_RETURN_ON_EXCEPTION( isolate(), result, NewRawTwoByteString(decoder.utf16_length(), allocation), String); DisallowGarbageCollection no_gc; // Update pointer references, since the original string may have moved after // allocation. utf8_data = base::Vector(str->GetChars(no_gc) + begin, length); decoder.Decode(result->GetChars(no_gc), utf8_data); return result; } MaybeHandle Factory::NewStringFromTwoByte(const base::uc16* string, int length, AllocationType allocation) { DCHECK_NE(allocation, AllocationType::kReadOnly); if (length == 0) return empty_string(); if (String::IsOneByte(string, length)) { if (length == 1) return LookupSingleCharacterStringFromCode(string[0]); Handle result; ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, NewRawOneByteString(length, allocation), String); DisallowGarbageCollection no_gc; CopyChars(result->GetChars(no_gc), string, length); return result; } else { Handle result; ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, NewRawTwoByteString(length, allocation), String); DisallowGarbageCollection no_gc; CopyChars(result->GetChars(no_gc), string, length); return result; } } MaybeHandle Factory::NewStringFromTwoByte( const base::Vector& string, AllocationType allocation) { return NewStringFromTwoByte(string.begin(), string.length(), allocation); } MaybeHandle Factory::NewStringFromTwoByte( const ZoneVector* string, AllocationType allocation) { return NewStringFromTwoByte(string->data(), static_cast(string->size()), allocation); } namespace { inline void WriteOneByteData(Handle s, uint8_t* chars, int len) { DCHECK(s->length() == len); String::WriteToFlat(*s, chars, 0, len); } inline void WriteTwoByteData(Handle s, uint16_t* chars, int len) { DCHECK(s->length() == len); String::WriteToFlat(*s, chars, 0, len); } } // namespace template Handle Factory::AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field) { DCHECK_LE(0, chars); DCHECK_GE(String::kMaxLength, chars); // Compute map and object size. int size; Map map; if (is_one_byte) { map = *one_byte_internalized_string_map(); size = SeqOneByteString::SizeFor(chars); } else { map = *internalized_string_map(); size = SeqTwoByteString::SizeFor(chars); } String result = String::cast( AllocateRawWithImmortalMap(size, isolate()->heap()->CanAllocateInReadOnlySpace() ? AllocationType::kReadOnly : AllocationType::kOld, map)); DisallowGarbageCollection no_gc; result.set_length(chars); result.set_raw_hash_field(hash_field); DCHECK_EQ(size, result.Size()); if (is_one_byte) { WriteOneByteData(t, SeqOneByteString::cast(result).GetChars(no_gc), chars); } else { WriteTwoByteData(t, SeqTwoByteString::cast(result).GetChars(no_gc), chars); } return handle(result, isolate()); } Handle Factory::NewInternalizedStringImpl(Handle string, int chars, uint32_t hash_field) { if (string->IsOneByteRepresentation()) { return AllocateInternalizedStringImpl(string, chars, hash_field); } return AllocateInternalizedStringImpl(string, chars, hash_field); } namespace { MaybeHandle GetInternalizedStringMap(Factory* f, Handle string) { switch (string->map().instance_type()) { case STRING_TYPE: return f->internalized_string_map(); case ONE_BYTE_STRING_TYPE: return f->one_byte_internalized_string_map(); case EXTERNAL_STRING_TYPE: return f->external_internalized_string_map(); case EXTERNAL_ONE_BYTE_STRING_TYPE: return f->external_one_byte_internalized_string_map(); default: return MaybeHandle(); // No match found. } } } // namespace MaybeHandle Factory::InternalizedStringMapForString( Handle string) { // Do not internalize young strings: This allows us to ignore both string // table and stub cache on scavenges. if (Heap::InYoungGeneration(*string)) return MaybeHandle(); return GetInternalizedStringMap(this, string); } template Handle Factory::InternalizeExternalString(Handle string) { Handle map = GetInternalizedStringMap(this, string).ToHandleChecked(); StringClass external_string = StringClass::cast(New(map, AllocationType::kOld)); DisallowGarbageCollection no_gc; external_string.AllocateExternalPointerEntries(isolate()); StringClass cast_string = StringClass::cast(*string); external_string.set_length(cast_string.length()); external_string.set_raw_hash_field(cast_string.raw_hash_field()); external_string.SetResource(isolate(), nullptr); isolate()->heap()->RegisterExternalString(external_string); return handle(external_string, isolate()); } template Handle Factory::InternalizeExternalString(Handle); template Handle Factory::InternalizeExternalString(Handle); Handle Factory::LookupSingleCharacterStringFromCode(uint16_t code) { if (code <= unibrow::Latin1::kMaxChar) { { DisallowGarbageCollection no_gc; Object value = single_character_string_cache()->get(code); if (value != *undefined_value()) { return handle(String::cast(value), isolate()); } } uint8_t buffer[] = {static_cast(code)}; Handle result = InternalizeString(base::Vector(buffer, 1)); single_character_string_cache()->set(code, *result); return result; } uint16_t buffer[] = {code}; return InternalizeString(base::Vector(buffer, 1)); } Handle Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) { DCHECK_GE(lead, 0xD800); DCHECK_LE(lead, 0xDBFF); DCHECK_GE(trail, 0xDC00); DCHECK_LE(trail, 0xDFFF); Handle str = isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked(); DisallowGarbageCollection no_gc; base::uc16* dest = str->GetChars(no_gc); dest[0] = lead; dest[1] = trail; return str; } Handle Factory::NewProperSubString(Handle str, int begin, int end) { #if VERIFY_HEAP if (FLAG_verify_heap) str->StringVerify(isolate()); #endif DCHECK(begin > 0 || end < str->length()); str = String::Flatten(isolate(), str); int length = end - begin; if (length <= 0) return empty_string(); if (length == 1) { return LookupSingleCharacterStringFromCode(str->Get(begin)); } if (length == 2) { // Optimization for 2-byte strings often used as keys in a decompression // dictionary. Check whether we already have the string in the string // table to prevent creation of many unnecessary strings. uint16_t c1 = str->Get(begin); uint16_t c2 = str->Get(begin + 1); return MakeOrFindTwoCharacterString(c1, c2); } if (!FLAG_string_slices || length < SlicedString::kMinLength) { if (str->IsOneByteRepresentation()) { Handle result = NewRawOneByteString(length).ToHandleChecked(); DisallowGarbageCollection no_gc; uint8_t* dest = result->GetChars(no_gc); String::WriteToFlat(*str, dest, begin, end); return result; } else { Handle result = NewRawTwoByteString(length).ToHandleChecked(); DisallowGarbageCollection no_gc; base::uc16* dest = result->GetChars(no_gc); String::WriteToFlat(*str, dest, begin, end); return result; } } int offset = begin; if (str->IsSlicedString()) { Handle slice = Handle::cast(str); str = Handle(slice->parent(), isolate()); offset += slice->offset(); } if (str->IsThinString()) { Handle thin = Handle::cast(str); str = handle(thin->actual(), isolate()); } DCHECK(str->IsSeqString() || str->IsExternalString()); Handle map = str->IsOneByteRepresentation() ? sliced_one_byte_string_map() : sliced_string_map(); SlicedString slice = SlicedString::cast(New(map, AllocationType::kYoung)); DisallowGarbageCollection no_gc; slice.set_raw_hash_field(String::kEmptyHashField); slice.set_length(length); slice.set_parent(*str); slice.set_offset(offset); return handle(slice, isolate()); } MaybeHandle Factory::NewExternalStringFromOneByte( const ExternalOneByteString::Resource* resource) { size_t length = resource->length(); if (length > static_cast(String::kMaxLength)) { THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String); } if (length == 0) return empty_string(); Handle map = resource->IsCacheable() ? external_one_byte_string_map() : uncached_external_one_byte_string_map(); ExternalOneByteString external_string = ExternalOneByteString::cast(New(map, AllocationType::kOld)); DisallowGarbageCollection no_gc; external_string.AllocateExternalPointerEntries(isolate()); external_string.set_length(static_cast(length)); external_string.set_raw_hash_field(String::kEmptyHashField); external_string.SetResource(isolate(), resource); isolate()->heap()->RegisterExternalString(external_string); return Handle(external_string, isolate()); } MaybeHandle Factory::NewExternalStringFromTwoByte( const ExternalTwoByteString::Resource* resource) { size_t length = resource->length(); if (length > static_cast(String::kMaxLength)) { THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String); } if (length == 0) return empty_string(); Handle map = resource->IsCacheable() ? external_string_map() : uncached_external_string_map(); ExternalTwoByteString string = ExternalTwoByteString::cast(New(map, AllocationType::kOld)); DisallowGarbageCollection no_gc; string.AllocateExternalPointerEntries(isolate()); string.set_length(static_cast(length)); string.set_raw_hash_field(String::kEmptyHashField); string.SetResource(isolate(), resource); isolate()->heap()->RegisterExternalString(string); return Handle(string, isolate()); } Handle Factory::NewJSStringIterator(Handle string) { Handle map(isolate()->native_context()->initial_string_iterator_map(), isolate()); Handle flat_string = String::Flatten(isolate(), string); Handle iterator = Handle::cast(NewJSObjectFromMap(map)); DisallowGarbageCollection no_gc; JSStringIterator raw = *iterator; raw.set_string(*flat_string); raw.set_index(0); return iterator; } Symbol Factory::NewSymbolInternal(AllocationType allocation) { DCHECK(allocation != AllocationType::kYoung); // Statically ensure that it is safe to allocate symbols in paged spaces. STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize); Symbol symbol = Symbol::cast(AllocateRawWithImmortalMap( Symbol::kSize, allocation, read_only_roots().symbol_map())); DisallowGarbageCollection no_gc; // Generate a random hash value. int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask); symbol.set_raw_hash_field(Name::kIsNotIntegerIndexMask | (hash << Name::kHashShift)); symbol.set_description(read_only_roots().undefined_value(), SKIP_WRITE_BARRIER); symbol.set_flags(0); DCHECK(!symbol.is_private()); return symbol; } Handle Factory::NewSymbol(AllocationType allocation) { return handle(NewSymbolInternal(allocation), isolate()); } Handle Factory::NewPrivateSymbol(AllocationType allocation) { DCHECK(allocation != AllocationType::kYoung); Symbol symbol = NewSymbolInternal(allocation); DisallowGarbageCollection no_gc; symbol.set_is_private(true); return handle(symbol, isolate()); } Handle Factory::NewPrivateNameSymbol(Handle name) { Symbol symbol = NewSymbolInternal(); DisallowGarbageCollection no_gc; symbol.set_is_private_name(); symbol.set_description(*name); return handle(symbol, isolate()); } Context Factory::NewContextInternal(Handle map, int size, int variadic_part_length, AllocationType allocation) { DCHECK_LE(Context::kTodoHeaderSize, size); DCHECK(IsAligned(size, kTaggedSize)); DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length); DCHECK_LE(Context::SizeFor(variadic_part_length), size); HeapObject result = isolate()->heap()->AllocateRawWith(size, allocation); result.set_map_after_allocation(*map); DisallowGarbageCollection no_gc; Context context = Context::cast(result); context.set_length(variadic_part_length); DCHECK_EQ(context.SizeFromMap(*map), size); if (size > Context::kTodoHeaderSize) { ObjectSlot start = context.RawField(Context::kTodoHeaderSize); ObjectSlot end = context.RawField(size); size_t slot_count = end - start; MemsetTagged(start, *undefined_value(), slot_count); } return context; } Handle Factory::NewNativeContext() { Handle map = NewMap(NATIVE_CONTEXT_TYPE, kVariableSizeSentinel); NativeContext context = NativeContext::cast(NewContextInternal( map, NativeContext::kSize, NativeContext::NATIVE_CONTEXT_SLOTS, AllocationType::kOld)); DisallowGarbageCollection no_gc; context.set_native_context_map(*map); map->set_native_context(context); // The ExternalPointerTable is a C++ object. context.AllocateExternalPointerEntries(isolate()); context.set_scope_info(*native_scope_info()); context.set_previous(Context()); context.set_extension(*undefined_value()); context.set_errors_thrown(Smi::zero()); context.set_math_random_index(Smi::zero()); context.set_serialized_objects(*empty_fixed_array()); context.set_microtask_queue(isolate(), nullptr); context.set_osr_code_cache(*empty_weak_fixed_array()); context.set_retained_maps(*empty_weak_array_list()); return handle(context, isolate()); } Handle Factory::NewScriptContext(Handle outer, Handle scope_info) { DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE); int variadic_part_length = scope_info->ContextLength(); Context context = NewContextInternal(handle(outer->script_context_map(), isolate()), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kOld); DisallowGarbageCollection no_gc; context.set_scope_info(*scope_info); context.set_previous(*outer); DCHECK(context.IsScriptContext()); return handle(context, isolate()); } Handle Factory::NewScriptContextTable() { Handle context_table = Handle::cast( NewFixedArrayWithMap(read_only_roots().script_context_table_map_handle(), ScriptContextTable::kMinLength)); context_table->set_used(0, kReleaseStore); return context_table; } Handle Factory::NewModuleContext(Handle module, Handle outer, Handle scope_info) { DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE); int variadic_part_length = scope_info->ContextLength(); Context context = NewContextInternal( isolate()->module_context_map(), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kOld); DisallowGarbageCollection no_gc; context.set_scope_info(*scope_info); context.set_previous(*outer); context.set_extension(*module); DCHECK(context.IsModuleContext()); return handle(context, isolate()); } Handle Factory::NewFunctionContext(Handle outer, Handle scope_info) { Handle map; switch (scope_info->scope_type()) { case EVAL_SCOPE: map = isolate()->eval_context_map(); break; case FUNCTION_SCOPE: map = isolate()->function_context_map(); break; default: UNREACHABLE(); } int variadic_part_length = scope_info->ContextLength(); Context context = NewContextInternal(map, Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kYoung); DisallowGarbageCollection no_gc; context.set_scope_info(*scope_info); context.set_previous(*outer); return handle(context, isolate()); } Handle Factory::NewCatchContext(Handle previous, Handle scope_info, Handle thrown_object) { DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE); STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX); // TODO(ishell): Take the details from CatchContext class. int variadic_part_length = Context::MIN_CONTEXT_SLOTS + 1; Context context = NewContextInternal( isolate()->catch_context_map(), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kYoung); DisallowGarbageCollection no_gc; DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context)); context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER); context.set_previous(*previous, SKIP_WRITE_BARRIER); context.set(Context::THROWN_OBJECT_INDEX, *thrown_object, SKIP_WRITE_BARRIER); return handle(context, isolate()); } Handle Factory::NewDebugEvaluateContext(Handle previous, Handle scope_info, Handle extension, Handle wrapped) { DCHECK(scope_info->IsDebugEvaluateScope()); Handle ext = extension.is_null() ? Handle::cast(undefined_value()) : Handle::cast(extension); // TODO(ishell): Take the details from DebugEvaluateContextContext class. int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS + 1; Context context = NewContextInternal(isolate()->debug_evaluate_context_map(), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kYoung); DisallowGarbageCollection no_gc; DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context)); context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER); context.set_previous(*previous, SKIP_WRITE_BARRIER); context.set_extension(*ext, SKIP_WRITE_BARRIER); if (!wrapped.is_null()) { context.set(Context::WRAPPED_CONTEXT_INDEX, *wrapped, SKIP_WRITE_BARRIER); } return handle(context, isolate()); } Handle Factory::NewWithContext(Handle previous, Handle scope_info, Handle extension) { DCHECK_EQ(scope_info->scope_type(), WITH_SCOPE); // TODO(ishell): Take the details from WithContext class. int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS; Context context = NewContextInternal( isolate()->with_context_map(), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kYoung); DisallowGarbageCollection no_gc; DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context)); context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER); context.set_previous(*previous, SKIP_WRITE_BARRIER); context.set_extension(*extension, SKIP_WRITE_BARRIER); return handle(context, isolate()); } Handle Factory::NewBlockContext(Handle previous, Handle scope_info) { DCHECK_IMPLIES(scope_info->scope_type() != BLOCK_SCOPE, scope_info->scope_type() == CLASS_SCOPE); int variadic_part_length = scope_info->ContextLength(); Context context = NewContextInternal( isolate()->block_context_map(), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kYoung); DisallowGarbageCollection no_gc; DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context)); context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER); context.set_previous(*previous, SKIP_WRITE_BARRIER); return handle(context, isolate()); } Handle Factory::NewBuiltinContext(Handle native_context, int variadic_part_length) { DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length); Context context = NewContextInternal( isolate()->function_context_map(), Context::SizeFor(variadic_part_length), variadic_part_length, AllocationType::kYoung); DisallowGarbageCollection no_gc; DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context)); context.set_scope_info(read_only_roots().empty_scope_info(), SKIP_WRITE_BARRIER); context.set_previous(*native_context, SKIP_WRITE_BARRIER); return handle(context, isolate()); } Handle Factory::NewAliasedArgumentsEntry( int aliased_context_slot) { auto entry = NewStructInternal( ALIASED_ARGUMENTS_ENTRY_TYPE, AllocationType::kYoung); entry.set_aliased_context_slot(aliased_context_slot); return handle(entry, isolate()); } Handle Factory::NewAccessorInfo() { auto info = NewStructInternal(ACCESSOR_INFO_TYPE, AllocationType::kOld); DisallowGarbageCollection no_gc; info.set_name(*empty_string(), SKIP_WRITE_BARRIER); info.set_flags(0); // Must clear the flags, it was initialized as undefined. info.set_is_sloppy(true); info.set_initial_property_attributes(NONE); // Clear some other fields that should not be undefined. info.set_getter(Smi::zero(), SKIP_WRITE_BARRIER); info.set_setter(Smi::zero(), SKIP_WRITE_BARRIER); info.set_js_getter(Smi::zero(), SKIP_WRITE_BARRIER); return handle(info, isolate()); } void Factory::AddToScriptList(Handle