factory.cc 156 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/heap/factory.h"
6

Bartek Nowierski's avatar
Bartek Nowierski committed
7 8 9 10 11
#include <algorithm>  // For copy
#include <memory>     // For shared_ptr<>
#include <string>
#include <utility>  // For move

12
#include "src/ast/ast-source-ranges.h"
13
#include "src/base/bits.h"
14
#include "src/builtins/accessors.h"
15
#include "src/builtins/constants-table-builder.h"
16
#include "src/codegen/compilation-cache.h"
17
#include "src/codegen/compiler.h"
18
#include "src/common/assert-scope.h"
19
#include "src/common/globals.h"
20
#include "src/diagnostics/basic-block-profiler.h"
21
#include "src/execution/isolate-inl.h"
22
#include "src/execution/protectors-inl.h"
23
#include "src/heap/basic-memory-chunk.h"
24 25
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
26
#include "src/heap/mark-compact-inl.h"
27
#include "src/heap/memory-chunk.h"
28
#include "src/heap/read-only-heap.h"
29
#include "src/ic/handler-configuration-inl.h"
30
#include "src/init/bootstrapper.h"
31
#include "src/interpreter/interpreter.h"
32 33
#include "src/logging/counters.h"
#include "src/logging/log.h"
34
#include "src/numbers/conversions.h"
35
#include "src/numbers/hash-seed-inl.h"
36
#include "src/objects/allocation-site-inl.h"
37
#include "src/objects/allocation-site-scopes.h"
38
#include "src/objects/api-callbacks.h"
39
#include "src/objects/arguments-inl.h"
40
#include "src/objects/bigint.h"
41
#include "src/objects/call-site-info-inl.h"
42
#include "src/objects/cell-inl.h"
43
#include "src/objects/debug-objects-inl.h"
44
#include "src/objects/embedder-data-array-inl.h"
45
#include "src/objects/feedback-cell-inl.h"
46
#include "src/objects/fixed-array-inl.h"
47
#include "src/objects/foreign-inl.h"
48
#include "src/objects/instance-type-inl.h"
49
#include "src/objects/js-array-buffer-inl.h"
50
#include "src/objects/js-array-inl.h"
51
#include "src/objects/js-collection-inl.h"
52
#include "src/objects/js-generator-inl.h"
53
#include "src/objects/js-objects.h"
54
#include "src/objects/js-regexp-inl.h"
55
#include "src/objects/js-weak-refs-inl.h"
56
#include "src/objects/literal-objects-inl.h"
57
#include "src/objects/megadom-handler-inl.h"
58
#include "src/objects/microtask-inl.h"
59
#include "src/objects/module-inl.h"
60
#include "src/objects/promise-inl.h"
61
#include "src/objects/property-descriptor-object-inl.h"
62
#include "src/objects/scope-info.h"
63
#include "src/objects/string-set-inl.h"
64
#include "src/objects/struct-inl.h"
65
#include "src/objects/synthetic-module-inl.h"
66
#include "src/objects/template-objects-inl.h"
67
#include "src/objects/transitions-inl.h"
68
#include "src/roots/roots.h"
69
#include "src/strings/unicode-inl.h"
70 71 72
#if V8_ENABLE_WEBASSEMBLY
#include "src/wasm/wasm-value.h"
#endif
73

74
#include "src/heap/local-factory-inl.h"
75 76
#include "src/heap/local-heap-inl.h"

77 78
namespace v8 {
namespace internal {
79

80
Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc,
81
                                  CodeKind kind)
82
    : isolate_(isolate),
83 84 85 86 87 88 89 90 91
      local_isolate_(isolate_->main_thread_local_isolate()),
      code_desc_(desc),
      kind_(kind),
      position_table_(isolate_->factory()->empty_byte_array()) {}

Factory::CodeBuilder::CodeBuilder(LocalIsolate* local_isolate,
                                  const CodeDesc& desc, CodeKind kind)
    : isolate_(local_isolate->GetMainThreadIsolateUnsafe()),
      local_isolate_(local_isolate),
92 93
      code_desc_(desc),
      kind_(kind),
94
      position_table_(isolate_->factory()->empty_byte_array()) {}
95

96 97
MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
    bool retry_allocation_or_fail) {
98 99
  const auto factory = isolate_->factory();
  // Allocate objects needed for code initialization.
100
  Handle<ByteArray> reloc_info =
101 102 103 104
      CompiledWithConcurrentBaseline()
          ? local_isolate_->factory()->NewByteArray(code_desc_.reloc_size,
                                                    AllocationType::kOld)
          : factory->NewByteArray(code_desc_.reloc_size, AllocationType::kOld);
105 106 107 108 109 110 111 112 113
  Handle<CodeDataContainer> data_container;

  // Use a canonical off-heap trampoline CodeDataContainer if possible.
  const int32_t promise_rejection_flag =
      Code::IsPromiseRejectionField::encode(true);
  if (read_only_data_container_ &&
      (kind_specific_flags_ == 0 ||
       kind_specific_flags_ == promise_rejection_flag)) {
    const ReadOnlyRoots roots(isolate_);
114
    const auto canonical_code_data_container = Handle<CodeDataContainer>::cast(
115 116
        kind_specific_flags_ == 0
            ? roots.trampoline_trivial_code_data_container_handle()
117
            : roots.trampoline_promise_rejection_code_data_container_handle());
118
    DCHECK_EQ(canonical_code_data_container->kind_specific_flags(kRelaxedLoad),
119 120 121
              kind_specific_flags_);
    data_container = canonical_code_data_container;
  } else {
122 123 124 125 126 127 128 129
    if (CompiledWithConcurrentBaseline()) {
      data_container = local_isolate_->factory()->NewCodeDataContainer(
          0, AllocationType::kOld);
    } else {
      data_container = factory->NewCodeDataContainer(
          0, read_only_data_container_ ? AllocationType::kReadOnly
                                       : AllocationType::kOld);
    }
130 131 132
    if (V8_EXTERNAL_CODE_SPACE_BOOL) {
      data_container->initialize_flags(kind_, builtin_);
    }
133 134
    data_container->set_kind_specific_flags(kind_specific_flags_,
                                            kRelaxedStore);
135 136
  }

137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
  // Basic block profiling data for builtins is stored in the JS heap rather
  // than in separately-allocated C++ objects. Allocate that data now if
  // appropriate.
  Handle<OnHeapBasicBlockProfilerData> on_heap_profiler_data;
  if (profiler_data_ && isolate_->IsGeneratingEmbeddedBuiltins()) {
    on_heap_profiler_data = profiler_data_->CopyToJSHeap(isolate_);

    // Add the on-heap data to a global list, which keeps it alive and allows
    // iteration.
    Handle<ArrayList> list(isolate_->heap()->basic_block_profiling_data(),
                           isolate_);
    Handle<ArrayList> new_list =
        ArrayList::Add(isolate_, list, on_heap_profiler_data);
    isolate_->heap()->SetBasicBlockProfilingData(new_list);
  }

153
  STATIC_ASSERT(Code::kOnHeapBodyIsContiguous);
154 155
  Heap* heap = isolate_->heap();
  CodePageCollectionMemoryModificationScope code_allocation(heap);
156

157
  Handle<Code> code;
158 159 160 161 162 163
  if (CompiledWithConcurrentBaseline()) {
    if (!AllocateConcurrentSparkplugCode(retry_allocation_or_fail)
             .ToHandle(&code)) {
      return MaybeHandle<Code>();
    }
  } else if (!AllocateCode(retry_allocation_or_fail).ToHandle(&code)) {
164
    return MaybeHandle<Code>();
165
  }
166

167 168 169
  {
    Code raw_code = *code;
    constexpr bool kIsNotOffHeapTrampoline = false;
170
    DisallowGarbageCollection no_gc;
171

172 173
    raw_code.set_raw_instruction_size(code_desc_.instruction_size());
    raw_code.set_raw_metadata_size(code_desc_.metadata_size());
174
    raw_code.set_relocation_info(*reloc_info);
175 176 177 178 179 180 181 182 183
    raw_code.initialize_flags(kind_, is_turbofanned_, stack_slots_,
                              kIsNotOffHeapTrampoline);
    raw_code.set_builtin_id(builtin_);
    // This might impact direct concurrent reads from TF if we are resetting
    // this field. We currently assume it's immutable thus a relaxed read (after
    // passing IsPendingAllocation).
    raw_code.set_inlined_bytecode_size(inlined_bytecode_size_);
    raw_code.set_code_data_container(*data_container, kReleaseStore);
    if (kind_ == CodeKind::BASELINE) {
184
      raw_code.set_bytecode_or_interpreter_data(*interpreter_data_);
185 186
      raw_code.set_bytecode_offset_table(*position_table_);
    } else {
187
      raw_code.set_deoptimization_data(*deoptimization_data_);
188
      raw_code.set_source_position_table(*position_table_);
189
    }
190 191 192 193 194 195 196 197
    raw_code.set_handler_table_offset(
        code_desc_.handler_table_offset_relative());
    raw_code.set_constant_pool_offset(
        code_desc_.constant_pool_offset_relative());
    raw_code.set_code_comments_offset(
        code_desc_.code_comments_offset_relative());
    raw_code.set_unwinding_info_offset(
        code_desc_.unwinding_info_offset_relative());
198 199 200 201 202 203

    // Allow self references to created code object by patching the handle to
    // point to the newly allocated Code object.
    Handle<Object> self_reference;
    if (self_reference_.ToHandle(&self_reference)) {
      DCHECK(self_reference->IsOddball());
204 205 206
      DCHECK_EQ(Oddball::cast(*self_reference).kind(),
                Oddball::kSelfReferenceMarker);
      DCHECK_NE(kind_, CodeKind::BASELINE);
207 208 209
      if (isolate_->IsGeneratingEmbeddedBuiltins()) {
        isolate_->builtins_constants_table_builder()->PatchSelfReference(
            self_reference, code);
210
      }
211
      self_reference.PatchValue(*code);
212 213
    }

214 215 216 217 218 219 220 221
    // Likewise, any references to the basic block counters marker need to be
    // updated to point to the newly-allocated counters array.
    if (!on_heap_profiler_data.is_null()) {
      isolate_->builtins_constants_table_builder()
          ->PatchBasicBlockCountersReference(
              handle(on_heap_profiler_data->counts(), isolate_));
    }

222 223 224 225 226 227 228
    // Migrate generated code.
    // The generated code can contain embedded objects (typically from
    // handles) in a pointer-to-tagged-value format (i.e. with indirection
    // like a handle) that are dereferenced during the copy to point directly
    // to the actual heap objects. These pointers can include references to
    // the code object itself, through the self_reference parameter.
    raw_code.CopyFromNoFlush(*reloc_info, heap, code_desc_);
229

230
    raw_code.clear_padding();
231

232
    if (V8_EXTERNAL_CODE_SPACE_BOOL) {
233
      raw_code.set_main_cage_base(isolate_->cage_base());
234 235
      data_container->SetCodeAndEntryPoint(isolate_, raw_code);
    }
236
#ifdef VERIFY_HEAP
237
    if (FLAG_verify_heap) HeapObject::VerifyCodePointer(isolate_, raw_code);
238
#endif
239 240 241 242 243 244

    // Flush the instruction cache before changing the permissions.
    // Note: we do this before setting permissions to ReadExecute because on
    // some older ARM kernels there is a bug which causes an access error on
    // cache flush instructions to trigger access error on non-writable memory.
    // See https://bugs.chromium.org/p/v8/issues/detail?id=8157
245
    raw_code.FlushICache();
246 247
  }

248
  if (profiler_data_ && FLAG_turbo_profiling_verbose) {
249 250 251 252 253 254 255 256 257 258 259 260 261 262
#ifdef ENABLE_DISASSEMBLER
    std::ostringstream os;
    code->Disassemble(nullptr, os, isolate_);
    if (!on_heap_profiler_data.is_null()) {
      Handle<String> disassembly =
          isolate_->factory()->NewStringFromAsciiChecked(os.str().c_str(),
                                                         AllocationType::kOld);
      on_heap_profiler_data->set_code(*disassembly);
    } else {
      profiler_data_->SetCode(os);
    }
#endif  // ENABLE_DISASSEMBLER
  }

263
  return code;
264 265
}

266
// TODO(victorgomes): Unify the two AllocateCodes
267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299
MaybeHandle<Code> Factory::CodeBuilder::AllocateCode(
    bool retry_allocation_or_fail) {
  Heap* heap = isolate_->heap();
  HeapObject result;
  AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
                                       ? AllocationType::kCode
                                       : AllocationType::kReadOnly;
  const int object_size = Code::SizeFor(code_desc_.body_size());
  if (retry_allocation_or_fail) {
    result = heap->AllocateRawWith<Heap::kRetryOrFail>(
        object_size, allocation_type, AllocationOrigin::kRuntime);
  } else {
    result = heap->AllocateRawWith<Heap::kLightRetry>(
        object_size, allocation_type, AllocationOrigin::kRuntime);
    // Return an empty handle if we cannot allocate the code object.
    if (result.is_null()) return MaybeHandle<Code>();
  }

  // The code object has not been fully initialized yet.  We rely on the
  // fact that no allocation will happen from this point on.
  DisallowGarbageCollection no_gc;
  result.set_map_after_allocation(*isolate_->factory()->code_map(),
                                  SKIP_WRITE_BARRIER);
  Handle<Code> code = handle(Code::cast(result), isolate_);
  if (is_executable_) {
    DCHECK(IsAligned(code->address(), kCodeAlignment));
    DCHECK_IMPLIES(
        !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
        heap->code_region().contains(code->address()));
  }
  return code;
}

300 301 302 303 304 305 306
MaybeHandle<Code> Factory::CodeBuilder::AllocateConcurrentSparkplugCode(
    bool retry_allocation_or_fail) {
  LocalHeap* heap = local_isolate_->heap();
  AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
                                       ? AllocationType::kCode
                                       : AllocationType::kReadOnly;
  const int object_size = Code::SizeFor(code_desc_.body_size());
307 308 309 310
  HeapObject result;
  if (!heap->AllocateRaw(object_size, allocation_type).To(&result)) {
    return MaybeHandle<Code>();
  }
311 312 313 314 315
  CHECK(!result.is_null());

  // The code object has not been fully initialized yet.  We rely on the
  // fact that no allocation will happen from this point on.
  DisallowGarbageCollection no_gc;
316
  result.set_map_after_allocation(*local_isolate_->factory()->code_map(),
317 318 319 320 321 322
                                  SKIP_WRITE_BARRIER);
  Handle<Code> code = handle(Code::cast(result), local_isolate_);
  DCHECK_IMPLIES(is_executable_, IsAligned(code->address(), kCodeAlignment));
  return code;
}

323 324 325
MaybeHandle<Code> Factory::CodeBuilder::TryBuild() {
  return BuildInternal(false);
}
326

327
Handle<Code> Factory::CodeBuilder::Build() {
328 329
  return BuildInternal(true).ToHandleChecked();
}
330

331 332 333
HeapObject Factory::AllocateRaw(int size, AllocationType allocation,
                                AllocationAlignment alignment) {
  return isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(
334
      size, allocation, AllocationOrigin::kRuntime, alignment);
335
}
336

337
HeapObject Factory::AllocateRawWithAllocationSite(
338
    Handle<Map> map, AllocationType allocation,
339 340 341
    Handle<AllocationSite> allocation_site) {
  DCHECK(map->instance_type() != MAP_TYPE);
  int size = map->instance_size();
342 343 344 345
  if (!allocation_site.is_null()) {
    DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
    size += AllocationMemento::kSize;
  }
346
  HeapObject result =
347
      isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(size, allocation);
348 349 350
  WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
                                            ? SKIP_WRITE_BARRIER
                                            : UPDATE_WRITE_BARRIER;
351
  result.set_map_after_allocation(*map, write_barrier_mode);
352
  if (!allocation_site.is_null()) {
353
    AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
354
        Object(result.ptr() + map->instance_size()));
355 356 357 358 359
    InitializeAllocationMemento(alloc_memento, *allocation_site);
  }
  return result;
}

360 361
void Factory::InitializeAllocationMemento(AllocationMemento memento,
                                          AllocationSite allocation_site) {
362
  DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
363 364 365
  memento.set_map_after_allocation(*allocation_memento_map(),
                                   SKIP_WRITE_BARRIER);
  memento.set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
366
  if (FLAG_allocation_site_pretenuring) {
367
    allocation_site.IncrementMementoCreateCount();
368 369 370
  }
}

371
HeapObject Factory::New(Handle<Map> map, AllocationType allocation) {
372 373
  DCHECK(map->instance_type() != MAP_TYPE);
  int size = map->instance_size();
374
  HeapObject result =
375
      isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(size, allocation);
376
  // New space objects are allocated white.
377
  WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
378 379
                                            ? SKIP_WRITE_BARRIER
                                            : UPDATE_WRITE_BARRIER;
380
  result.set_map_after_allocation(*map, write_barrier_mode);
381
  return result;
382
}
383

384 385
Handle<HeapObject> Factory::NewFillerObject(int size,
                                            AllocationAlignment alignment,
386 387
                                            AllocationType allocation,
                                            AllocationOrigin origin) {
388
  Heap* heap = isolate()->heap();
389 390
  HeapObject result = heap->AllocateRawWith<Heap::kRetryOrFail>(
      size, allocation, origin, alignment);
391
  heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
392 393
  return Handle<HeapObject>(result, isolate());
}
394

395
Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
396 397
  auto result = NewStructInternal<PrototypeInfo>(PROTOTYPE_INFO_TYPE,
                                                 AllocationType::kOld);
398
  DisallowGarbageCollection no_gc;
399 400 401 402 403
  result.set_prototype_users(Smi::zero());
  result.set_registry_slot(PrototypeInfo::UNREGISTERED);
  result.set_bit_field(0);
  result.set_module_namespace(*undefined_value(), SKIP_WRITE_BARRIER);
  return handle(result, isolate());
404 405
}

406 407
Handle<EnumCache> Factory::NewEnumCache(Handle<FixedArray> keys,
                                        Handle<FixedArray> indices) {
408 409
  auto result =
      NewStructInternal<EnumCache>(ENUM_CACHE_TYPE, AllocationType::kOld);
410
  DisallowGarbageCollection no_gc;
411 412 413
  result.set_keys(*keys);
  result.set_indices(*indices);
  return handle(result, isolate());
414 415
}

416
Handle<Tuple2> Factory::NewTuple2(Handle<Object> value1, Handle<Object> value2,
417
                                  AllocationType allocation) {
418
  auto result = NewStructInternal<Tuple2>(TUPLE2_TYPE, allocation);
419
  DisallowGarbageCollection no_gc;
420 421 422
  result.set_value1(*value1);
  result.set_value2(*value2);
  return handle(result, isolate());
423 424
}

425
Handle<Oddball> Factory::NewOddball(Handle<Map> map, const char* to_string,
426
                                    Handle<Object> to_number,
427 428 429
                                    const char* type_of, byte kind) {
  Handle<Oddball> oddball(Oddball::cast(New(map, AllocationType::kReadOnly)),
                          isolate());
430
  Oddball::Initialize(isolate(), oddball, to_string, to_number, type_of, kind);
431 432 433
  return oddball;
}

434
Handle<Oddball> Factory::NewSelfReferenceMarker() {
435 436
  return NewOddball(self_reference_marker_map(), "self_reference_marker",
                    handle(Smi::FromInt(-1), isolate()), "undefined",
437
                    Oddball::kSelfReferenceMarker);
438 439
}

440 441 442 443 444 445 446
Handle<Oddball> Factory::NewBasicBlockCountersMarker() {
  return NewOddball(basic_block_counters_marker_map(),
                    "basic_block_counters_marker",
                    handle(Smi::FromInt(-1), isolate()), "undefined",
                    Oddball::kBasicBlockCountersMarker);
}

447
Handle<PropertyArray> Factory::NewPropertyArray(int length) {
448 449
  DCHECK_LE(0, length);
  if (length == 0) return empty_property_array();
450
  HeapObject result = AllocateRawFixedArray(length, AllocationType::kYoung);
451
  DisallowGarbageCollection no_gc;
452
  result.set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
453 454 455 456
  PropertyArray array = PropertyArray::cast(result);
  array.initialize_length(length);
  MemsetTagged(array.data_start(), read_only_roots().undefined_value(), length);
  return handle(array, isolate());
457 458
}

459 460
MaybeHandle<FixedArray> Factory::TryNewFixedArray(
    int length, AllocationType allocation_type) {
461 462 463
  DCHECK_LE(0, length);
  if (length == 0) return empty_fixed_array();

464 465
  int size = FixedArray::SizeFor(length);
  Heap* heap = isolate()->heap();
466
  AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
467
  HeapObject result;
468
  if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
469
  if ((size > heap->MaxRegularHeapObjectSize(allocation_type)) &&
470
      FLAG_use_marking_progress_bar) {
471
    LargePage::FromHeapObject(result)->ProgressBar().Enable();
472
  }
473
  DisallowGarbageCollection no_gc;
474
  result.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
475 476 477 478
  FixedArray array = FixedArray::cast(result);
  array.set_length(length);
  MemsetTagged(array.data_start(), *undefined_value(), length);
  return handle(array, isolate());
479
}
480

481
Handle<ClosureFeedbackCellArray> Factory::NewClosureFeedbackCellArray(
482
    int length) {
483 484
  if (length == 0) return empty_closure_feedback_cell_array();

485
  Handle<ClosureFeedbackCellArray> feedback_cell_array =
486
      Handle<ClosureFeedbackCellArray>::cast(NewFixedArrayWithMap(
487
          read_only_roots().closure_feedback_cell_array_map_handle(), length,
488
          AllocationType::kOld));
489 490 491 492

  return feedback_cell_array;
}

493
Handle<FeedbackVector> Factory::NewFeedbackVector(
494
    Handle<SharedFunctionInfo> shared,
495
    Handle<ClosureFeedbackCellArray> closure_feedback_cell_array) {
496
  int length = shared->feedback_metadata().slot_count();
497 498 499
  DCHECK_LE(0, length);
  int size = FeedbackVector::SizeFor(length);

500 501 502 503 504 505 506 507 508 509 510
  FeedbackVector vector = FeedbackVector::cast(AllocateRawWithImmortalMap(
      size, AllocationType::kOld, *feedback_vector_map()));
  DisallowGarbageCollection no_gc;
  vector.set_shared_function_info(*shared);
  vector.set_maybe_optimized_code(HeapObjectReference::ClearedValue(isolate()),
                                  kReleaseStore);
  vector.set_length(length);
  vector.set_invocation_count(0);
  vector.set_profiler_ticks(0);
  vector.InitializeOptimizationState();
  vector.set_closure_feedback_cell_array(*closure_feedback_cell_array);
511

512
  // TODO(leszeks): Initialize based on the feedback metadata.
513 514
  MemsetTagged(ObjectSlot(vector.slots_start()), *undefined_value(), length);
  return handle(vector, isolate());
515 516
}

517
Handle<EmbedderDataArray> Factory::NewEmbedderDataArray(int length) {
518 519
  DCHECK_LE(0, length);
  int size = EmbedderDataArray::SizeFor(length);
520 521 522 523
  EmbedderDataArray array = EmbedderDataArray::cast(AllocateRawWithImmortalMap(
      size, AllocationType::kYoung, *embedder_data_array_map()));
  DisallowGarbageCollection no_gc;
  array.set_length(length);
524 525

  if (length > 0) {
526 527
    ObjectSlot start(array.slots_start());
    ObjectSlot end(array.slots_end());
528
    size_t slot_count = end - start;
529
    MemsetTagged(start, *undefined_value(), slot_count);
530 531
    for (int i = 0; i < length; i++) {
      // TODO(v8:10391, saelo): Handle external pointers in EmbedderDataSlot
532
      EmbedderDataSlot(array, i).AllocateExternalPointerEntry(isolate());
533
    }
534
  }
535
  return handle(array, isolate());
536 537
}

538
Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles(int length) {
539
  DCHECK_LE(0, length);
540
  Handle<FixedArrayBase> array = NewFixedDoubleArray(length);
541 542
  if (length > 0) {
    Handle<FixedDoubleArray>::cast(array)->FillWithHoles(0, length);
543 544 545 546
  }
  return array;
}

547 548
template <typename T>
Handle<T> Factory::AllocateSmallOrderedHashTable(Handle<Map> map, int capacity,
549
                                                 AllocationType allocation) {
550 551 552 553 554 555
  // Capacity must be a power of two, since we depend on being able
  // to divide and multiple by 2 (kLoadFactor) to derive capacity
  // from number of buckets. If we decide to change kLoadFactor
  // to something other than 2, capacity should be stored as another
  // field of this object.
  DCHECK_EQ(T::kLoadFactor, 2);
556 557 558
  capacity =
      base::bits::RoundUpToPowerOfTwo32(std::max({T::kMinCapacity, capacity}));
  capacity = std::min({capacity, T::kMaxCapacity});
559 560

  DCHECK_LT(0, capacity);
561 562 563
  DCHECK_EQ(0, capacity % T::kLoadFactor);

  int size = T::SizeFor(capacity);
564
  HeapObject result = AllocateRawWithImmortalMap(size, allocation, *map);
565
  Handle<T> table(T::cast(result), isolate());
566 567
  table->Initialize(isolate(), capacity);
  return table;
568 569
}

570
Handle<SmallOrderedHashSet> Factory::NewSmallOrderedHashSet(
571
    int capacity, AllocationType allocation) {
572
  return AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
573
      small_ordered_hash_set_map(), capacity, allocation);
574 575
}

576
Handle<SmallOrderedHashMap> Factory::NewSmallOrderedHashMap(
577
    int capacity, AllocationType allocation) {
578
  return AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
579
      small_ordered_hash_map_map(), capacity, allocation);
580 581 582
}

Handle<SmallOrderedNameDictionary> Factory::NewSmallOrderedNameDictionary(
583
    int capacity, AllocationType allocation) {
584 585
  Handle<SmallOrderedNameDictionary> dict =
      AllocateSmallOrderedHashTable<SmallOrderedNameDictionary>(
586
          small_ordered_name_dictionary_map(), capacity, allocation);
587 588
  dict->SetHash(PropertyArray::kNoHashSentinel);
  return dict;
589 590
}

591
Handle<OrderedHashSet> Factory::NewOrderedHashSet() {
592 593
  return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kInitialCapacity,
                                  AllocationType::kYoung)
594
      .ToHandleChecked();
595 596 597
}

Handle<OrderedHashMap> Factory::NewOrderedHashMap() {
598 599
  return OrderedHashMap::Allocate(isolate(), OrderedHashMap::kInitialCapacity,
                                  AllocationType::kYoung)
600
      .ToHandleChecked();
601 602
}

603 604 605
Handle<OrderedNameDictionary> Factory::NewOrderedNameDictionary(int capacity) {
  return OrderedNameDictionary::Allocate(isolate(), capacity,
                                         AllocationType::kYoung)
606
      .ToHandleChecked();
607 608
}

609 610 611 612
Handle<NameDictionary> Factory::NewNameDictionary(int at_least_space_for) {
  return NameDictionary::New(isolate(), at_least_space_for);
}

613
Handle<PropertyDescriptorObject> Factory::NewPropertyDescriptorObject() {
614 615
  auto object = NewStructInternal<PropertyDescriptorObject>(
      PROPERTY_DESCRIPTOR_OBJECT_TYPE, AllocationType::kYoung);
616
  DisallowGarbageCollection no_gc;
617
  object.set_flags(0);
618
  Oddball the_hole = read_only_roots().the_hole_value();
619 620 621 622
  object.set_value(the_hole, SKIP_WRITE_BARRIER);
  object.set_get(the_hole, SKIP_WRITE_BARRIER);
  object.set_set(the_hole, SKIP_WRITE_BARRIER);
  return handle(object, isolate());
623 624
}

625 626 627 628 629 630 631 632 633
Handle<SwissNameDictionary> Factory::CreateCanonicalEmptySwissNameDictionary() {
  // This function is only supposed to be used to create the canonical empty
  // version and should not be used afterwards.
  DCHECK_EQ(kNullAddress, ReadOnlyRoots(isolate()).at(
                              RootIndex::kEmptySwissPropertyDictionary));

  ReadOnlyRoots roots(isolate());

  Handle<ByteArray> empty_meta_table =
634
      NewByteArray(SwissNameDictionary::kMetaTableEnumerationDataStartIndex,
635 636 637 638 639 640 641 642 643 644 645
                   AllocationType::kReadOnly);

  Map map = roots.swiss_name_dictionary_map();
  int size = SwissNameDictionary::SizeFor(0);
  HeapObject obj =
      AllocateRawWithImmortalMap(size, AllocationType::kReadOnly, map);
  SwissNameDictionary result = SwissNameDictionary::cast(obj);
  result.Initialize(isolate(), *empty_meta_table, 0);
  return handle(result, isolate());
}

646
// Internalized strings are created in the old generation (data space).
647
Handle<String> Factory::InternalizeUtf8String(
648 649 650
    const base::Vector<const char>& string) {
  base::Vector<const uint8_t> utf8_data =
      base::Vector<const uint8_t>::cast(string);
651
  Utf8Decoder decoder(utf8_data);
652
  if (decoder.is_ascii()) return InternalizeString(utf8_data);
653 654 655
  if (decoder.is_one_byte()) {
    std::unique_ptr<uint8_t[]> buffer(new uint8_t[decoder.utf16_length()]);
    decoder.Decode(buffer.get(), utf8_data);
656
    return InternalizeString(
657
        base::Vector<const uint8_t>(buffer.get(), decoder.utf16_length()));
658 659 660
  }
  std::unique_ptr<uint16_t[]> buffer(new uint16_t[decoder.utf16_length()]);
  decoder.Decode(buffer.get(), utf8_data);
661
  return InternalizeString(
662
      base::Vector<const base::uc16>(buffer.get(), decoder.utf16_length()));
663 664
}

665 666 667 668 669
template <typename SeqString>
Handle<String> Factory::InternalizeString(Handle<SeqString> string, int from,
                                          int length, bool convert_encoding) {
  SeqSubStringKey<SeqString> key(isolate(), string, from, length,
                                 convert_encoding);
670 671 672
  return InternalizeStringWithKey(&key);
}

673 674 675 676 677 678
template Handle<String> Factory::InternalizeString(
    Handle<SeqOneByteString> string, int from, int length,
    bool convert_encoding);
template Handle<String> Factory::InternalizeString(
    Handle<SeqTwoByteString> string, int from, int length,
    bool convert_encoding);
679

680
MaybeHandle<String> Factory::NewStringFromOneByte(
681
    const base::Vector<const uint8_t>& string, AllocationType allocation) {
682
  DCHECK_NE(allocation, AllocationType::kReadOnly);
683
  int length = string.length();
684
  if (length == 0) return empty_string();
685
  if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
686
  Handle<SeqOneByteString> result;
687
  ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
688
                             NewRawOneByteString(string.length(), allocation),
689
                             String);
690

691
  DisallowGarbageCollection no_gc;
692
  // Copy the characters into the new object.
693
  CopyChars(SeqOneByteString::cast(*result).GetChars(no_gc), string.begin(),
694 695
            length);
  return result;
696 697
}

698 699 700 701
MaybeHandle<String> Factory::NewStringFromUtf8(
    const base::Vector<const char>& string, AllocationType allocation) {
  base::Vector<const uint8_t> utf8_data =
      base::Vector<const uint8_t>::cast(string);
702
  Utf8Decoder decoder(utf8_data);
703

704
  if (decoder.utf16_length() == 0) return empty_string();
705

706 707 708 709 710 711
  if (decoder.is_one_byte()) {
    // Allocate string.
    Handle<SeqOneByteString> result;
    ASSIGN_RETURN_ON_EXCEPTION(
        isolate(), result,
        NewRawOneByteString(decoder.utf16_length(), allocation), String);
712

713
    DisallowGarbageCollection no_gc;
714 715
    decoder.Decode(result->GetChars(no_gc), utf8_data);
    return result;
716 717
  }

718 719 720 721
  // Allocate string.
  Handle<SeqTwoByteString> result;
  ASSIGN_RETURN_ON_EXCEPTION(
      isolate(), result,
722
      NewRawTwoByteString(decoder.utf16_length(), allocation), String);
723

724
  DisallowGarbageCollection no_gc;
725
  decoder.Decode(result->GetChars(no_gc), utf8_data);
726 727 728 729 730
  return result;
}

MaybeHandle<String> Factory::NewStringFromUtf8SubString(
    Handle<SeqOneByteString> str, int begin, int length,
731
    AllocationType allocation) {
732
  base::Vector<const uint8_t> utf8_data;
733
  {
734
    DisallowGarbageCollection no_gc;
735 736
    utf8_data =
        base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
737
  }
738
  Utf8Decoder decoder(utf8_data);
739

740 741 742 743 744 745 746 747
  if (length == 1) {
    uint16_t t;
    // Decode even in the case of length 1 since it can be a bad character.
    decoder.Decode(&t, utf8_data);
    return LookupSingleCharacterStringFromCode(t);
  }

  if (decoder.is_ascii()) {
748
    // If the string is ASCII, we can just make a substring.
749
    // TODO(v8): the allocation flag is ignored in this case.
750 751 752
    return NewSubString(str, begin, begin + length);
  }

753 754 755 756 757 758 759 760
  DCHECK_GT(decoder.utf16_length(), 0);

  if (decoder.is_one_byte()) {
    // Allocate string.
    Handle<SeqOneByteString> result;
    ASSIGN_RETURN_ON_EXCEPTION(
        isolate(), result,
        NewRawOneByteString(decoder.utf16_length(), allocation), String);
761
    DisallowGarbageCollection no_gc;
762 763
    // Update pointer references, since the original string may have moved after
    // allocation.
764 765
    utf8_data =
        base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
766 767 768
    decoder.Decode(result->GetChars(no_gc), utf8_data);
    return result;
  }
769

770 771 772 773
  // Allocate string.
  Handle<SeqTwoByteString> result;
  ASSIGN_RETURN_ON_EXCEPTION(
      isolate(), result,
774
      NewRawTwoByteString(decoder.utf16_length(), allocation), String);
775

776
  DisallowGarbageCollection no_gc;
777 778
  // Update pointer references, since the original string may have moved after
  // allocation.
779
  utf8_data = base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
780
  decoder.Decode(result->GetChars(no_gc), utf8_data);
781
  return result;
782 783
}

784
MaybeHandle<String> Factory::NewStringFromTwoByte(const base::uc16* string,
785
                                                  int length,
786 787
                                                  AllocationType allocation) {
  DCHECK_NE(allocation, AllocationType::kReadOnly);
788
  if (length == 0) return empty_string();
789
  if (String::IsOneByte(string, length)) {
790
    if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
791
    Handle<SeqOneByteString> result;
792
    ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
793
                               NewRawOneByteString(length, allocation), String);
794
    DisallowGarbageCollection no_gc;
795
    CopyChars(result->GetChars(no_gc), string, length);
796 797 798
    return result;
  } else {
    Handle<SeqTwoByteString> result;
799
    ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
800
                               NewRawTwoByteString(length, allocation), String);
801
    DisallowGarbageCollection no_gc;
802
    CopyChars(result->GetChars(no_gc), string, length);
803 804
    return result;
  }
805 806
}

807
MaybeHandle<String> Factory::NewStringFromTwoByte(
808
    const base::Vector<const base::uc16>& string, AllocationType allocation) {
809
  return NewStringFromTwoByte(string.begin(), string.length(), allocation);
810 811 812
}

MaybeHandle<String> Factory::NewStringFromTwoByte(
813
    const ZoneVector<base::uc16>* string, AllocationType allocation) {
814
  return NewStringFromTwoByte(string->data(), static_cast<int>(string->size()),
815
                              allocation);
816
}
817

818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839
namespace {

inline void WriteOneByteData(Handle<String> s, uint8_t* chars, int len) {
  DCHECK(s->length() == len);
  String::WriteToFlat(*s, chars, 0, len);
}

inline void WriteTwoByteData(Handle<String> s, uint16_t* chars, int len) {
  DCHECK(s->length() == len);
  String::WriteToFlat(*s, chars, 0, len);
}

}  // namespace

template <bool is_one_byte, typename T>
Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
                                                       uint32_t hash_field) {
  DCHECK_LE(0, chars);
  DCHECK_GE(String::kMaxLength, chars);

  // Compute map and object size.
  int size;
840
  Map map;
841 842 843 844 845 846 847 848
  if (is_one_byte) {
    map = *one_byte_internalized_string_map();
    size = SeqOneByteString::SizeFor(chars);
  } else {
    map = *internalized_string_map();
    size = SeqTwoByteString::SizeFor(chars);
  }

849 850 851 852 853 854 855 856
  String result = String::cast(AllocateRawWithImmortalMap(
      size,
      RefineAllocationTypeForInPlaceInternalizableString(
          isolate()->heap()->CanAllocateInReadOnlySpace()
              ? AllocationType::kReadOnly
              : AllocationType::kOld,
          map),
      map));
857
  DisallowGarbageCollection no_gc;
858 859 860
  result.set_length(chars);
  result.set_raw_hash_field(hash_field);
  DCHECK_EQ(size, result.Size());
861 862

  if (is_one_byte) {
863
    WriteOneByteData(t, SeqOneByteString::cast(result).GetChars(no_gc), chars);
864
  } else {
865
    WriteTwoByteData(t, SeqTwoByteString::cast(result).GetChars(no_gc), chars);
866
  }
867
  return handle(result, isolate());
868 869 870 871 872
}

Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string,
                                                  int chars,
                                                  uint32_t hash_field) {
873
  if (string->IsOneByteRepresentation()) {
874 875 876
    return AllocateInternalizedStringImpl<true>(string, chars, hash_field);
  }
  return AllocateInternalizedStringImpl<false>(string, chars, hash_field);
877 878
}

879
StringTransitionStrategy Factory::ComputeInternalizationStrategyForString(
880 881 882 883
    Handle<String> string, MaybeHandle<Map>* internalized_map) {
  // Do not internalize young strings in-place: This allows us to ignore both
  // string table and stub cache on scavenges.
  if (Heap::InYoungGeneration(*string)) {
884
    return StringTransitionStrategy::kCopy;
885 886 887 888 889 890 891 892 893
  }
  DCHECK_NOT_NULL(internalized_map);
  DisallowGarbageCollection no_gc;
  // This method may be called concurrently, so snapshot the map from the input
  // string instead of the calling IsType methods on HeapObject, which would
  // reload the map each time.
  Map map = string->map();
  *internalized_map = GetInPlaceInternalizedStringMap(map);
  if (!internalized_map->is_null()) {
894
    return StringTransitionStrategy::kInPlace;
895 896
  }
  if (InstanceTypeChecker::IsInternalizedString(map.instance_type())) {
897
    return StringTransitionStrategy::kAlreadyTransitioned;
898
  }
899
  return StringTransitionStrategy::kCopy;
900 901 902 903
}

template <class StringClass>
Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) {
904 905
  Handle<Map> map =
      GetInPlaceInternalizedStringMap(string->map()).ToHandleChecked();
906 907 908 909 910 911 912 913 914 915
  StringClass external_string =
      StringClass::cast(New(map, AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  external_string.AllocateExternalPointerEntries(isolate());
  StringClass cast_string = StringClass::cast(*string);
  external_string.set_length(cast_string.length());
  external_string.set_raw_hash_field(cast_string.raw_hash_field());
  external_string.SetResource(isolate(), nullptr);
  isolate()->heap()->RegisterExternalString(external_string);
  return handle(external_string, isolate());
916 917 918 919 920 921
}

template Handle<ExternalOneByteString>
    Factory::InternalizeExternalString<ExternalOneByteString>(Handle<String>);
template Handle<ExternalTwoByteString>
    Factory::InternalizeExternalString<ExternalTwoByteString>(Handle<String>);
922

923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947
StringTransitionStrategy Factory::ComputeSharingStrategyForString(
    Handle<String> string, MaybeHandle<Map>* shared_map) {
  DCHECK(FLAG_shared_string_table);
  // Do not share young strings in-place: there is no shared young space.
  if (Heap::InYoungGeneration(*string)) {
    return StringTransitionStrategy::kCopy;
  }
  DCHECK_NOT_NULL(shared_map);
  DisallowGarbageCollection no_gc;
  InstanceType instance_type = string->map().instance_type();
  if (StringShape(instance_type).IsShared()) {
    return StringTransitionStrategy::kAlreadyTransitioned;
  }
  switch (instance_type) {
    case STRING_TYPE:
      *shared_map = read_only_roots().shared_string_map_handle();
      return StringTransitionStrategy::kInPlace;
    case ONE_BYTE_STRING_TYPE:
      *shared_map = read_only_roots().shared_one_byte_string_map_handle();
      return StringTransitionStrategy::kInPlace;
    default:
      return StringTransitionStrategy::kCopy;
  }
}

948 949
Handle<String> Factory::LookupSingleCharacterStringFromCode(uint16_t code) {
  if (code <= unibrow::Latin1::kMaxChar) {
950
    {
951
      DisallowGarbageCollection no_gc;
952
      Object value = single_character_string_cache()->get(code);
953 954 955 956
      if (value != *undefined_value()) {
        return handle(String::cast(value), isolate());
      }
    }
957
    uint8_t buffer[] = {static_cast<uint8_t>(code)};
958 959
    Handle<String> result =
        InternalizeString(base::Vector<const uint8_t>(buffer, 1));
960 961 962
    single_character_string_cache()->set(code, *result);
    return result;
  }
963
  uint16_t buffer[] = {code};
964
  return InternalizeString(base::Vector<const uint16_t>(buffer, 1));
965 966
}

967 968 969 970 971 972 973 974
Handle<String> Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) {
  DCHECK_GE(lead, 0xD800);
  DCHECK_LE(lead, 0xDBFF);
  DCHECK_GE(trail, 0xDC00);
  DCHECK_LE(trail, 0xDFFF);

  Handle<SeqTwoByteString> str =
      isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked();
975
  DisallowGarbageCollection no_gc;
976
  base::uc16* dest = str->GetChars(no_gc);
977 978 979 980
  dest[0] = lead;
  dest[1] = trail;
  return str;
}
981

982
Handle<String> Factory::NewProperSubString(Handle<String> str, int begin,
983
                                           int end) {
984
#if VERIFY_HEAP
985
  if (FLAG_verify_heap) str->StringVerify(isolate());
986
#endif
987
  DCHECK(begin > 0 || end < str->length());
988

989
  str = String::Flatten(isolate(), str);
990

991
  int length = end - begin;
992 993 994 995 996 997 998 999 1000 1001
  if (length <= 0) return empty_string();
  if (length == 1) {
    return LookupSingleCharacterStringFromCode(str->Get(begin));
  }
  if (length == 2) {
    // Optimization for 2-byte strings often used as keys in a decompression
    // dictionary.  Check whether we already have the string in the string
    // table to prevent creation of many unnecessary strings.
    uint16_t c1 = str->Get(begin);
    uint16_t c2 = str->Get(begin + 1);
1002
    return MakeOrFindTwoCharacterString(c1, c2);
1003 1004
  }

1005
  if (!FLAG_string_slices || length < SlicedString::kMinLength) {
1006
    if (str->IsOneByteRepresentation()) {
1007
      Handle<SeqOneByteString> result =
1008
          NewRawOneByteString(length).ToHandleChecked();
1009
      DisallowGarbageCollection no_gc;
1010
      uint8_t* dest = result->GetChars(no_gc);
1011
      String::WriteToFlat(*str, dest, begin, length);
1012 1013
      return result;
    } else {
1014 1015
      Handle<SeqTwoByteString> result =
          NewRawTwoByteString(length).ToHandleChecked();
1016
      DisallowGarbageCollection no_gc;
1017
      base::uc16* dest = result->GetChars(no_gc);
1018
      String::WriteToFlat(*str, dest, begin, length);
1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029
      return result;
    }
  }

  int offset = begin;

  if (str->IsSlicedString()) {
    Handle<SlicedString> slice = Handle<SlicedString>::cast(str);
    str = Handle<String>(slice->parent(), isolate());
    offset += slice->offset();
  }
1030 1031 1032 1033
  if (str->IsThinString()) {
    Handle<ThinString> thin = Handle<ThinString>::cast(str);
    str = handle(thin->actual(), isolate());
  }
1034

1035
  DCHECK(str->IsSeqString() || str->IsExternalString());
1036 1037 1038
  Handle<Map> map = str->IsOneByteRepresentation()
                        ? sliced_one_byte_string_map()
                        : sliced_string_map();
1039 1040 1041 1042 1043 1044 1045
  SlicedString slice = SlicedString::cast(New(map, AllocationType::kYoung));
  DisallowGarbageCollection no_gc;
  slice.set_raw_hash_field(String::kEmptyHashField);
  slice.set_length(length);
  slice.set_parent(*str);
  slice.set_offset(offset);
  return handle(slice, isolate());
1046 1047
}

1048 1049
MaybeHandle<String> Factory::NewExternalStringFromOneByte(
    const ExternalOneByteString::Resource* resource) {
1050 1051
  size_t length = resource->length();
  if (length > static_cast<size_t>(String::kMaxLength)) {
1052
    THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1053
  }
1054
  if (length == 0) return empty_string();
1055

1056 1057 1058
  Handle<Map> map = resource->IsCacheable()
                        ? external_one_byte_string_map()
                        : uncached_external_one_byte_string_map();
1059 1060 1061 1062 1063 1064 1065 1066
  ExternalOneByteString external_string =
      ExternalOneByteString::cast(New(map, AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  external_string.AllocateExternalPointerEntries(isolate());
  external_string.set_length(static_cast<int>(length));
  external_string.set_raw_hash_field(String::kEmptyHashField);
  external_string.SetResource(isolate(), resource);
  isolate()->heap()->RegisterExternalString(external_string);
1067

1068
  return Handle<String>(external_string, isolate());
1069 1070
}

1071
MaybeHandle<String> Factory::NewExternalStringFromTwoByte(
1072
    const ExternalTwoByteString::Resource* resource) {
1073 1074
  size_t length = resource->length();
  if (length > static_cast<size_t>(String::kMaxLength)) {
1075
    THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1076
  }
1077
  if (length == 0) return empty_string();
1078

1079 1080
  Handle<Map> map = resource->IsCacheable() ? external_string_map()
                                            : uncached_external_string_map();
1081 1082 1083 1084 1085 1086 1087 1088 1089
  ExternalTwoByteString string =
      ExternalTwoByteString::cast(New(map, AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  string.AllocateExternalPointerEntries(isolate());
  string.set_length(static_cast<int>(length));
  string.set_raw_hash_field(String::kEmptyHashField);
  string.SetResource(isolate(), resource);
  isolate()->heap()->RegisterExternalString(string);
  return Handle<ExternalTwoByteString>(string, isolate());
1090 1091
}

1092
Handle<JSStringIterator> Factory::NewJSStringIterator(Handle<String> string) {
1093
  Handle<Map> map(isolate()->native_context()->initial_string_iterator_map(),
1094
                  isolate());
1095
  Handle<String> flat_string = String::Flatten(isolate(), string);
1096 1097 1098
  Handle<JSStringIterator> iterator =
      Handle<JSStringIterator>::cast(NewJSObjectFromMap(map));

1099 1100 1101 1102
  DisallowGarbageCollection no_gc;
  JSStringIterator raw = *iterator;
  raw.set_string(*flat_string);
  raw.set_index(0);
1103 1104
  return iterator;
}
1105

1106
Symbol Factory::NewSymbolInternal(AllocationType allocation) {
1107
  DCHECK(allocation != AllocationType::kYoung);
1108 1109 1110
  // Statically ensure that it is safe to allocate symbols in paged spaces.
  STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);

1111 1112
  Symbol symbol = Symbol::cast(AllocateRawWithImmortalMap(
      Symbol::kSize, allocation, read_only_roots().symbol_map()));
1113
  DisallowGarbageCollection no_gc;
1114 1115
  // Generate a random hash value.
  int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask);
1116 1117 1118 1119 1120 1121
  symbol.set_raw_hash_field(Name::kIsNotIntegerIndexMask |
                            (hash << Name::kHashShift));
  symbol.set_description(read_only_roots().undefined_value(),
                         SKIP_WRITE_BARRIER);
  symbol.set_flags(0);
  DCHECK(!symbol.is_private());
1122 1123
  return symbol;
}
1124

1125 1126 1127 1128
Handle<Symbol> Factory::NewSymbol(AllocationType allocation) {
  return handle(NewSymbolInternal(allocation), isolate());
}

1129 1130
Handle<Symbol> Factory::NewPrivateSymbol(AllocationType allocation) {
  DCHECK(allocation != AllocationType::kYoung);
1131 1132 1133 1134
  Symbol symbol = NewSymbolInternal(allocation);
  DisallowGarbageCollection no_gc;
  symbol.set_is_private(true);
  return handle(symbol, isolate());
1135 1136
}

1137
Handle<Symbol> Factory::NewPrivateNameSymbol(Handle<String> name) {
1138 1139 1140 1141 1142
  Symbol symbol = NewSymbolInternal();
  DisallowGarbageCollection no_gc;
  symbol.set_is_private_name();
  symbol.set_description(*name);
  return handle(symbol, isolate());
1143 1144
}

1145
Context Factory::NewContextInternal(Handle<Map> map, int size,
1146
                                    int variadic_part_length,
1147
                                    AllocationType allocation) {
1148 1149 1150 1151 1152
  DCHECK_LE(Context::kTodoHeaderSize, size);
  DCHECK(IsAligned(size, kTaggedSize));
  DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
  DCHECK_LE(Context::SizeFor(variadic_part_length), size);

1153 1154 1155
  HeapObject result =
      isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(size, allocation);
  result.set_map_after_allocation(*map);
1156 1157 1158 1159
  DisallowGarbageCollection no_gc;
  Context context = Context::cast(result);
  context.set_length(variadic_part_length);
  DCHECK_EQ(context.SizeFromMap(*map), size);
1160
  if (size > Context::kTodoHeaderSize) {
1161 1162
    ObjectSlot start = context.RawField(Context::kTodoHeaderSize);
    ObjectSlot end = context.RawField(size);
1163
    size_t slot_count = end - start;
1164
    MemsetTagged(start, *undefined_value(), slot_count);
1165 1166 1167 1168
  }
  return context;
}

1169
Handle<NativeContext> Factory::NewNativeContext() {
1170
  Handle<Map> map = NewMap(NATIVE_CONTEXT_TYPE, kVariableSizeSentinel);
1171 1172 1173 1174 1175 1176 1177 1178 1179
  NativeContext context = NativeContext::cast(NewContextInternal(
      map, NativeContext::kSize, NativeContext::NATIVE_CONTEXT_SLOTS,
      AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  context.set_native_context_map(*map);
  map->set_native_context(context);
  // The ExternalPointerTable is a C++ object.
  context.AllocateExternalPointerEntries(isolate());
  context.set_scope_info(*native_scope_info());
1180
  context.set_previous(Context());
1181 1182 1183 1184 1185 1186 1187 1188
  context.set_extension(*undefined_value());
  context.set_errors_thrown(Smi::zero());
  context.set_math_random_index(Smi::zero());
  context.set_serialized_objects(*empty_fixed_array());
  context.set_microtask_queue(isolate(), nullptr);
  context.set_osr_code_cache(*empty_weak_fixed_array());
  context.set_retained_maps(*empty_weak_array_list());
  return handle(context, isolate());
1189 1190
}

1191
Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer,
1192
                                          Handle<ScopeInfo> scope_info) {
neis's avatar
neis committed
1193
  DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE);
1194
  int variadic_part_length = scope_info->ContextLength();
1195 1196 1197 1198 1199 1200 1201 1202 1203
  Context context =
      NewContextInternal(handle(outer->script_context_map(), isolate()),
                         Context::SizeFor(variadic_part_length),
                         variadic_part_length, AllocationType::kOld);
  DisallowGarbageCollection no_gc;
  context.set_scope_info(*scope_info);
  context.set_previous(*outer);
  DCHECK(context.IsScriptContext());
  return handle(context, isolate());
1204 1205
}

1206
Handle<ScriptContextTable> Factory::NewScriptContextTable() {
1207
  Handle<ScriptContextTable> context_table = Handle<ScriptContextTable>::cast(
1208
      NewFixedArrayWithMap(read_only_roots().script_context_table_map_handle(),
1209
                           ScriptContextTable::kMinLength));
1210
  context_table->set_used(0, kReleaseStore);
1211 1212 1213
  return context_table;
}

1214
Handle<Context> Factory::NewModuleContext(Handle<SourceTextModule> module,
1215
                                          Handle<NativeContext> outer,
1216
                                          Handle<ScopeInfo> scope_info) {
neis's avatar
neis committed
1217
  DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE);
1218
  int variadic_part_length = scope_info->ContextLength();
1219
  Context context = NewContextInternal(
1220
      isolate()->module_context_map(), Context::SizeFor(variadic_part_length),
1221
      variadic_part_length, AllocationType::kOld);
1222 1223 1224 1225 1226 1227
  DisallowGarbageCollection no_gc;
  context.set_scope_info(*scope_info);
  context.set_previous(*outer);
  context.set_extension(*module);
  DCHECK(context.IsModuleContext());
  return handle(context, isolate());
1228 1229
}

1230 1231
Handle<Context> Factory::NewFunctionContext(Handle<Context> outer,
                                            Handle<ScopeInfo> scope_info) {
1232
  Handle<Map> map;
1233
  switch (scope_info->scope_type()) {
1234
    case EVAL_SCOPE:
1235
      map = isolate()->eval_context_map();
1236 1237
      break;
    case FUNCTION_SCOPE:
1238
      map = isolate()->function_context_map();
1239 1240 1241 1242
      break;
    default:
      UNREACHABLE();
  }
1243
  int variadic_part_length = scope_info->ContextLength();
1244 1245 1246 1247 1248 1249 1250
  Context context =
      NewContextInternal(map, Context::SizeFor(variadic_part_length),
                         variadic_part_length, AllocationType::kYoung);
  DisallowGarbageCollection no_gc;
  context.set_scope_info(*scope_info);
  context.set_previous(*outer);
  return handle(context, isolate());
1251 1252
}

1253
Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
1254
                                         Handle<ScopeInfo> scope_info,
1255
                                         Handle<Object> thrown_object) {
1256
  DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE);
1257
  STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
1258 1259
  // TODO(ishell): Take the details from CatchContext class.
  int variadic_part_length = Context::MIN_CONTEXT_SLOTS + 1;
1260
  Context context = NewContextInternal(
1261
      isolate()->catch_context_map(), Context::SizeFor(variadic_part_length),
1262
      variadic_part_length, AllocationType::kYoung);
1263
  DisallowGarbageCollection no_gc;
1264
  DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1265 1266 1267
  context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
  context.set_previous(*previous, SKIP_WRITE_BARRIER);
  context.set(Context::THROWN_OBJECT_INDEX, *thrown_object, SKIP_WRITE_BARRIER);
1268
  return handle(context, isolate());
1269 1270
}

1271
Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
1272
                                                 Handle<ScopeInfo> scope_info,
1273
                                                 Handle<JSReceiver> extension,
1274
                                                 Handle<Context> wrapped) {
1275
  DCHECK(scope_info->IsDebugEvaluateScope());
1276
  Handle<HeapObject> ext = extension.is_null()
1277
                               ? Handle<HeapObject>::cast(undefined_value())
1278
                               : Handle<HeapObject>::cast(extension);
1279
  // TODO(ishell): Take the details from DebugEvaluateContextContext class.
1280
  int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS + 1;
1281 1282 1283 1284 1285
  Context context =
      NewContextInternal(isolate()->debug_evaluate_context_map(),
                         Context::SizeFor(variadic_part_length),
                         variadic_part_length, AllocationType::kYoung);
  DisallowGarbageCollection no_gc;
1286
  DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1287 1288 1289 1290 1291 1292
  context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
  context.set_previous(*previous, SKIP_WRITE_BARRIER);
  context.set_extension(*ext, SKIP_WRITE_BARRIER);
  if (!wrapped.is_null()) {
    context.set(Context::WRAPPED_CONTEXT_INDEX, *wrapped, SKIP_WRITE_BARRIER);
  }
1293
  return handle(context, isolate());
1294
}
1295

1296
Handle<Context> Factory::NewWithContext(Handle<Context> previous,
1297
                                        Handle<ScopeInfo> scope_info,
1298
                                        Handle<JSReceiver> extension) {
1299 1300
  DCHECK_EQ(scope_info->scope_type(), WITH_SCOPE);
  // TODO(ishell): Take the details from WithContext class.
1301
  int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS;
1302
  Context context = NewContextInternal(
1303
      isolate()->with_context_map(), Context::SizeFor(variadic_part_length),
1304
      variadic_part_length, AllocationType::kYoung);
1305
  DisallowGarbageCollection no_gc;
1306
  DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1307 1308 1309
  context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
  context.set_previous(*previous, SKIP_WRITE_BARRIER);
  context.set_extension(*extension, SKIP_WRITE_BARRIER);
1310
  return handle(context, isolate());
1311 1312
}

1313
Handle<Context> Factory::NewBlockContext(Handle<Context> previous,
1314
                                         Handle<ScopeInfo> scope_info) {
1315 1316
  DCHECK_IMPLIES(scope_info->scope_type() != BLOCK_SCOPE,
                 scope_info->scope_type() == CLASS_SCOPE);
1317
  int variadic_part_length = scope_info->ContextLength();
1318
  Context context = NewContextInternal(
1319
      isolate()->block_context_map(), Context::SizeFor(variadic_part_length),
1320
      variadic_part_length, AllocationType::kYoung);
1321
  DisallowGarbageCollection no_gc;
1322
  DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1323 1324
  context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
  context.set_previous(*previous, SKIP_WRITE_BARRIER);
1325
  return handle(context, isolate());
1326 1327
}

1328
Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context,
1329 1330
                                           int variadic_part_length) {
  DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1331
  Context context = NewContextInternal(
1332
      isolate()->function_context_map(), Context::SizeFor(variadic_part_length),
1333
      variadic_part_length, AllocationType::kYoung);
1334
  DisallowGarbageCollection no_gc;
1335
  DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1336 1337 1338
  context.set_scope_info(read_only_roots().empty_scope_info(),
                         SKIP_WRITE_BARRIER);
  context.set_previous(*native_context, SKIP_WRITE_BARRIER);
1339
  return handle(context, isolate());
1340 1341
}

1342 1343
Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry(
    int aliased_context_slot) {
1344 1345
  auto entry = NewStructInternal<AliasedArgumentsEntry>(
      ALIASED_ARGUMENTS_ENTRY_TYPE, AllocationType::kYoung);
1346 1347
  entry.set_aliased_context_slot(aliased_context_slot);
  return handle(entry, isolate());
1348 1349
}

1350
Handle<AccessorInfo> Factory::NewAccessorInfo() {
1351 1352
  auto info =
      NewStructInternal<AccessorInfo>(ACCESSOR_INFO_TYPE, AllocationType::kOld);
1353
  DisallowGarbageCollection no_gc;
1354 1355 1356 1357
  info.set_name(*empty_string(), SKIP_WRITE_BARRIER);
  info.set_flags(0);  // Must clear the flags, it was initialized as undefined.
  info.set_is_sloppy(true);
  info.set_initial_property_attributes(NONE);
1358 1359

  // Clear some other fields that should not be undefined.
1360 1361 1362 1363
  info.set_getter(Smi::zero(), SKIP_WRITE_BARRIER);
  info.set_setter(Smi::zero(), SKIP_WRITE_BARRIER);
  info.set_js_getter(Smi::zero(), SKIP_WRITE_BARRIER);
  return handle(info, isolate());
1364 1365
}

1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378
Handle<ErrorStackData> Factory::NewErrorStackData(
    Handle<Object> call_site_infos_or_formatted_stack,
    Handle<Object> limit_or_stack_frame_infos) {
  ErrorStackData error_stack_data = NewStructInternal<ErrorStackData>(
      ERROR_STACK_DATA_TYPE, AllocationType::kYoung);
  DisallowGarbageCollection no_gc;
  error_stack_data.set_call_site_infos_or_formatted_stack(
      *call_site_infos_or_formatted_stack, SKIP_WRITE_BARRIER);
  error_stack_data.set_limit_or_stack_frame_infos(*limit_or_stack_frame_infos,
                                                  SKIP_WRITE_BARRIER);
  return handle(error_stack_data, isolate());
}

1379
void Factory::AddToScriptList(Handle<Script> script) {
1380
  Handle<WeakArrayList> scripts = script_list();
1381 1382
  scripts = WeakArrayList::Append(isolate(), scripts,
                                  MaybeObjectHandle::Weak(script));
1383
  isolate()->heap()->set_script_list(*scripts);
1384 1385
}

1386 1387
Handle<Script> Factory::CloneScript(Handle<Script> script) {
  Heap* heap = isolate()->heap();
1388
  int script_id = isolate()->GetNextScriptId();
1389 1390 1391
#ifdef V8_SCRIPTORMODULE_LEGACY_LIFETIME
  Handle<ArrayList> list = ArrayList::New(isolate(), 0);
#endif
1392
  Handle<Script> new_script_handle =
1393
      Handle<Script>::cast(NewStruct(SCRIPT_TYPE, AllocationType::kOld));
1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405
  {
    DisallowGarbageCollection no_gc;
    Script new_script = *new_script_handle;
    const Script old_script = *script;
    new_script.set_source(old_script.source());
    new_script.set_name(old_script.name());
    new_script.set_id(script_id);
    new_script.set_line_offset(old_script.line_offset());
    new_script.set_column_offset(old_script.column_offset());
    new_script.set_context_data(old_script.context_data());
    new_script.set_type(old_script.type());
    new_script.set_line_ends(*undefined_value(), SKIP_WRITE_BARRIER);
1406 1407
    new_script.set_eval_from_shared_or_wrapped_arguments_or_sfi_table(
        script->eval_from_shared_or_wrapped_arguments_or_sfi_table());
1408 1409 1410 1411 1412
    new_script.set_shared_function_infos(*empty_weak_fixed_array(),
                                         SKIP_WRITE_BARRIER);
    new_script.set_eval_from_position(old_script.eval_from_position());
    new_script.set_flags(old_script.flags());
    new_script.set_host_defined_options(old_script.host_defined_options());
1413 1414 1415
#ifdef V8_SCRIPTORMODULE_LEGACY_LIFETIME
    new_script.set_script_or_modules(*list);
#endif
1416
  }
1417

1418 1419
  Handle<WeakArrayList> scripts = script_list();
  scripts = WeakArrayList::AddToEnd(isolate(), scripts,
1420
                                    MaybeObjectHandle::Weak(new_script_handle));
1421
  heap->set_script_list(*scripts);
1422
  LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id));
1423
  return new_script_handle;
1424 1425
}

1426 1427 1428
Handle<CallableTask> Factory::NewCallableTask(Handle<JSReceiver> callable,
                                              Handle<Context> context) {
  DCHECK(callable->IsCallable());
1429 1430
  auto microtask = NewStructInternal<CallableTask>(CALLABLE_TASK_TYPE,
                                                   AllocationType::kYoung);
1431 1432 1433
  DisallowGarbageCollection no_gc;
  microtask.set_callable(*callable, SKIP_WRITE_BARRIER);
  microtask.set_context(*context, SKIP_WRITE_BARRIER);
1434
  return handle(microtask, isolate());
1435 1436 1437 1438
}

Handle<CallbackTask> Factory::NewCallbackTask(Handle<Foreign> callback,
                                              Handle<Foreign> data) {
1439 1440
  auto microtask = NewStructInternal<CallbackTask>(CALLBACK_TASK_TYPE,
                                                   AllocationType::kYoung);
1441 1442 1443
  DisallowGarbageCollection no_gc;
  microtask.set_callback(*callback, SKIP_WRITE_BARRIER);
  microtask.set_data(*data, SKIP_WRITE_BARRIER);
1444
  return handle(microtask, isolate());
1445
}
1446

1447
Handle<PromiseResolveThenableJobTask> Factory::NewPromiseResolveThenableJobTask(
1448 1449
    Handle<JSPromise> promise_to_resolve, Handle<JSReceiver> thenable,
    Handle<JSReceiver> then, Handle<Context> context) {
1450
  DCHECK(then->IsCallable());
1451 1452
  auto microtask = NewStructInternal<PromiseResolveThenableJobTask>(
      PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE, AllocationType::kYoung);
1453 1454 1455 1456 1457
  DisallowGarbageCollection no_gc;
  microtask.set_promise_to_resolve(*promise_to_resolve, SKIP_WRITE_BARRIER);
  microtask.set_thenable(*thenable, SKIP_WRITE_BARRIER);
  microtask.set_then(*then, SKIP_WRITE_BARRIER);
  microtask.set_context(*context, SKIP_WRITE_BARRIER);
1458
  return handle(microtask, isolate());
1459 1460
}

1461
Handle<Foreign> Factory::NewForeign(Address addr) {
1462 1463
  // Statically ensure that it is safe to allocate foreigns in paged spaces.
  STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
1464
  Map map = *foreign_map();
1465 1466 1467 1468 1469 1470
  Foreign foreign = Foreign::cast(AllocateRawWithImmortalMap(
      map.instance_size(), AllocationType::kYoung, map));
  DisallowGarbageCollection no_gc;
  foreign.AllocateExternalPointerEntries(isolate());
  foreign.set_foreign_address(isolate(), addr);
  return handle(foreign, isolate());
1471 1472
}

1473
#if V8_ENABLE_WEBASSEMBLY
1474 1475 1476
Handle<WasmTypeInfo> Factory::NewWasmTypeInfo(
    Address type_address, Handle<Map> opt_parent, int instance_size_bytes,
    Handle<WasmInstanceObject> instance) {
1477 1478 1479 1480 1481 1482 1483 1484 1485
  // We pretenure WasmTypeInfo objects for two reasons:
  // (1) They are referenced by Maps, which are assumed to be long-lived,
  //     so pretenuring the WTI is a bit more efficient.
  // (2) The object visitors need to read the WasmTypeInfo to find tagged
  //     fields in Wasm structs; in the middle of a GC cycle that's only
  //     safe to do if the WTI is in old space.
  // The supertypes list is constant after initialization, so we pretenure
  // that too. The subtypes list, however, is expected to grow (and hence be
  // replaced), so we don't pretenure it.
1486
  Handle<ArrayList> subtypes = ArrayList::New(isolate(), 0);
1487
  Handle<FixedArray> supertypes;
1488
  if (opt_parent.is_null()) {
1489
    supertypes = NewFixedArray(wasm::kMinimumSupertypeArraySize);
1490 1491 1492
    for (int i = 0; i < supertypes->length(); i++) {
      supertypes->set(i, *undefined_value());
    }
1493
  } else {
1494 1495 1496 1497 1498 1499 1500 1501 1502 1503
    Handle<FixedArray> parent_supertypes =
        handle(opt_parent->wasm_type_info().supertypes(), isolate());
    int last_defined_index = parent_supertypes->length() - 1;
    while (last_defined_index >= 0 &&
           parent_supertypes->get(last_defined_index).IsUndefined()) {
      last_defined_index--;
    }
    if (last_defined_index == parent_supertypes->length() - 1) {
      supertypes = CopyArrayAndGrow(parent_supertypes, 1, AllocationType::kOld);
    } else {
1504
      supertypes = CopyFixedArray(parent_supertypes);
1505 1506
    }
    supertypes->set(last_defined_index + 1, *opt_parent);
1507
  }
1508
  Map map = *wasm_type_info_map();
1509
  WasmTypeInfo result = WasmTypeInfo::cast(AllocateRawWithImmortalMap(
1510
      map.instance_size(), AllocationType::kOld, map));
1511 1512 1513
  DisallowGarbageCollection no_gc;
  result.AllocateExternalPointerEntries(isolate());
  result.set_foreign_address(isolate(), type_address);
1514
  result.set_supertypes(*supertypes);
1515
  result.set_subtypes(*subtypes);
1516
  result.set_instance_size(instance_size_bytes);
1517
  result.set_instance(*instance);
1518
  return handle(result, isolate());
1519 1520
}

1521
Handle<WasmApiFunctionRef> Factory::NewWasmApiFunctionRef(
1522
    Handle<JSReceiver> callable, Handle<HeapObject> suspender) {
1523 1524 1525 1526
  Map map = *wasm_api_function_ref_map();
  auto result = WasmApiFunctionRef::cast(AllocateRawWithImmortalMap(
      map.instance_size(), AllocationType::kOld, map));
  DisallowGarbageCollection no_gc;
1527
  result.set_isolate_root(isolate()->isolate_root());
1528 1529 1530 1531 1532 1533
  result.set_native_context(*isolate()->native_context());
  if (!callable.is_null()) {
    result.set_callable(*callable);
  } else {
    result.set_callable(*undefined_value());
  }
1534 1535 1536 1537 1538
  if (!suspender.is_null()) {
    result.set_suspender(*suspender);
  } else {
    result.set_suspender(*undefined_value());
  }
1539 1540 1541
  return handle(result, isolate());
}

1542 1543 1544 1545 1546 1547 1548 1549 1550 1551
Handle<WasmInternalFunction> Factory::NewWasmInternalFunction(
    Address opt_call_target, Handle<HeapObject> ref, Handle<Map> rtt) {
  HeapObject raw = AllocateRaw(rtt->instance_size(), AllocationType::kOld);
  raw.set_map_after_allocation(*rtt);
  WasmInternalFunction result = WasmInternalFunction::cast(raw);
  DisallowGarbageCollection no_gc;
  result.AllocateExternalPointerEntries(isolate());
  result.set_foreign_address(isolate(), opt_call_target);
  result.set_ref(*ref);
  // Default values, will be overwritten by the caller.
1552
  result.set_code(*BUILTIN_CODE(isolate(), Abort));
1553 1554 1555 1556
  result.set_external(*undefined_value());
  return handle(result, isolate());
}

1557 1558 1559
Handle<WasmJSFunctionData> Factory::NewWasmJSFunctionData(
    Address opt_call_target, Handle<JSReceiver> callable, int return_count,
    int parameter_count, Handle<PodArray<wasm::ValueType>> serialized_sig,
1560 1561
    Handle<CodeT> wrapper_code, Handle<Map> rtt, Handle<HeapObject> suspender) {
  Handle<WasmApiFunctionRef> ref = NewWasmApiFunctionRef(callable, suspender);
1562 1563
  Handle<WasmInternalFunction> internal =
      NewWasmInternalFunction(opt_call_target, ref, rtt);
1564 1565 1566 1567 1568
  Map map = *wasm_js_function_data_map();
  WasmJSFunctionData result =
      WasmJSFunctionData::cast(AllocateRawWithImmortalMap(
          map.instance_size(), AllocationType::kOld, map));
  DisallowGarbageCollection no_gc;
1569
  result.set_internal(*internal);
1570
  result.set_wrapper_code(*wrapper_code);
1571 1572 1573 1574 1575 1576 1577
  result.set_serialized_return_count(return_count);
  result.set_serialized_parameter_count(parameter_count);
  result.set_serialized_signature(*serialized_sig);
  return handle(result, isolate());
}

Handle<WasmExportedFunctionData> Factory::NewWasmExportedFunctionData(
1578
    Handle<CodeT> export_wrapper, Handle<WasmInstanceObject> instance,
1579
    Address call_target, Handle<Object> ref, int func_index,
1580
    Address sig_address, int wrapper_budget, Handle<Map> rtt) {
1581
  Handle<Foreign> sig_foreign = NewForeign(sig_address);
1582 1583
  Handle<WasmInternalFunction> internal =
      NewWasmInternalFunction(call_target, Handle<HeapObject>::cast(ref), rtt);
1584 1585 1586 1587 1588
  Map map = *wasm_exported_function_data_map();
  WasmExportedFunctionData result =
      WasmExportedFunctionData::cast(AllocateRawWithImmortalMap(
          map.instance_size(), AllocationType::kOld, map));
  DisallowGarbageCollection no_gc;
1589
  DCHECK(ref->IsWasmInstanceObject() || ref->IsWasmApiFunctionRef());
1590
  result.set_internal(*internal);
1591 1592 1593 1594 1595
  result.set_wrapper_code(*export_wrapper);
  result.set_instance(*instance);
  result.set_function_index(func_index);
  result.set_signature(*sig_foreign);
  result.set_wrapper_budget(wrapper_budget);
1596
  result.set_c_wrapper_code(*BUILTIN_CODE(isolate(), Illegal),
1597
                            SKIP_WRITE_BARRIER);
1598
  result.set_packed_args_size(0);
1599
  result.set_suspender(*undefined_value());
1600 1601 1602
  return handle(result, isolate());
}

1603 1604
Handle<WasmCapiFunctionData> Factory::NewWasmCapiFunctionData(
    Address call_target, Handle<Foreign> embedder_data,
1605
    Handle<CodeT> wrapper_code, Handle<Map> rtt,
1606
    Handle<PodArray<wasm::ValueType>> serialized_sig) {
1607 1608
  Handle<WasmApiFunctionRef> ref =
      NewWasmApiFunctionRef(Handle<JSReceiver>(), Handle<HeapObject>());
1609 1610
  Handle<WasmInternalFunction> internal =
      NewWasmInternalFunction(call_target, ref, rtt);
1611 1612 1613 1614 1615
  Map map = *wasm_capi_function_data_map();
  WasmCapiFunctionData result =
      WasmCapiFunctionData::cast(AllocateRawWithImmortalMap(
          map.instance_size(), AllocationType::kOld, map));
  DisallowGarbageCollection no_gc;
1616
  result.set_internal(*internal);
1617 1618 1619 1620 1621 1622
  result.set_wrapper_code(*wrapper_code);
  result.set_embedder_data(*embedder_data);
  result.set_serialized_signature(*serialized_sig);
  return handle(result, isolate());
}

1623 1624 1625 1626 1627 1628 1629 1630
Handle<WasmArray> Factory::NewWasmArray(
    const wasm::ArrayType* type, const std::vector<wasm::WasmValue>& elements,
    Handle<Map> map) {
  uint32_t length = static_cast<uint32_t>(elements.size());
  HeapObject raw =
      AllocateRaw(WasmArray::SizeFor(*map, length), AllocationType::kYoung);
  raw.set_map_after_allocation(*map);
  WasmArray result = WasmArray::cast(raw);
1631
  result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1632
  result.set_length(length);
1633 1634 1635
  if (type->element_type().is_numeric()) {
    for (uint32_t i = 0; i < length; i++) {
      Address address = result.ElementAddress(i);
1636 1637 1638
      elements[i]
          .Packed(type->element_type())
          .CopyTo(reinterpret_cast<byte*>(address));
1639 1640 1641 1642 1643
    }
  } else {
    for (uint32_t i = 0; i < length; i++) {
      int offset = result.element_offset(i);
      TaggedField<Object>::store(result, offset, *elements[i].to_ref());
1644 1645 1646 1647 1648
    }
  }
  return handle(result, isolate());
}

1649 1650 1651 1652 1653 1654 1655
Handle<WasmStruct> Factory::NewWasmStruct(const wasm::StructType* type,
                                          wasm::WasmValue* args,
                                          Handle<Map> map) {
  DCHECK_EQ(WasmStruct::Size(type), map->wasm_type_info().instance_size());
  HeapObject raw = AllocateRaw(WasmStruct::Size(type), AllocationType::kYoung);
  raw.set_map_after_allocation(*map);
  WasmStruct result = WasmStruct::cast(raw);
1656
  result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1657
  for (uint32_t i = 0; i < type->field_count(); i++) {
1658
    int offset = type->field_offset(i);
1659
    if (type->field(i).is_numeric()) {
1660
      Address address = result.RawFieldAddress(offset);
1661
      args[i].Packed(type->field(i)).CopyTo(reinterpret_cast<byte*>(address));
1662
    } else {
1663 1664
      offset += WasmStruct::kHeaderSize;
      TaggedField<Object>::store(result, offset, *args[i].to_ref());
1665 1666 1667 1668 1669
    }
  }
  return handle(result, isolate());
}

1670 1671 1672
Handle<SharedFunctionInfo>
Factory::NewSharedFunctionInfoForWasmExportedFunction(
    Handle<String> name, Handle<WasmExportedFunctionData> data) {
1673
  return NewSharedFunctionInfo(name, data, Builtin::kNoBuiltinId);
1674 1675 1676 1677
}

Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmJSFunction(
    Handle<String> name, Handle<WasmJSFunctionData> data) {
1678
  return NewSharedFunctionInfo(name, data, Builtin::kNoBuiltinId);
1679 1680 1681 1682 1683
}

Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmCapiFunction(
    Handle<WasmCapiFunctionData> data) {
  return NewSharedFunctionInfo(MaybeHandle<String>(), data,
1684 1685
                               Builtin::kNoBuiltinId,
                               FunctionKind::kConciseMethod);
1686 1687 1688
}
#endif  // V8_ENABLE_WEBASSEMBLY

1689
Handle<Cell> Factory::NewCell(Handle<Object> value) {
1690
  STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
1691 1692 1693 1694 1695
  Cell result = Cell::cast(AllocateRawWithImmortalMap(
      Cell::kSize, AllocationType::kOld, *cell_map()));
  DisallowGarbageCollection no_gc;
  result.set_value(*value);
  return handle(result, isolate());
1696 1697
}

1698
Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
1699 1700 1701 1702 1703 1704 1705 1706
  FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
      FeedbackCell::kAlignedSize, AllocationType::kOld,
      *no_closures_cell_map()));
  DisallowGarbageCollection no_gc;
  result.set_value(*value);
  result.SetInitialInterruptBudget();
  result.clear_padding();
  return handle(result, isolate());
1707 1708
}

1709
Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
1710 1711 1712 1713 1714 1715 1716 1717
  FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
      FeedbackCell::kAlignedSize, AllocationType::kOld,
      *one_closure_cell_map()));
  DisallowGarbageCollection no_gc;
  result.set_value(*value);
  result.SetInitialInterruptBudget();
  result.clear_padding();
  return handle(result, isolate());
1718 1719
}

1720
Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
1721 1722 1723 1724 1725 1726 1727 1728
  FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
      FeedbackCell::kAlignedSize, AllocationType::kOld,
      *many_closures_cell_map()));
  DisallowGarbageCollection no_gc;
  result.set_value(*value);
  result.SetInitialInterruptBudget();
  result.clear_padding();
  return handle(result, isolate());
1729 1730
}

1731
Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name,
1732 1733
                                              PropertyDetails details,
                                              Handle<Object> value,
1734
                                              AllocationType allocation) {
1735 1736
  DCHECK(name->IsUniqueName());
  STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
1737 1738
  PropertyCell cell = PropertyCell::cast(AllocateRawWithImmortalMap(
      PropertyCell::kSize, allocation, *global_property_cell_map()));
1739
  DisallowGarbageCollection no_gc;
1740 1741 1742
  cell.set_dependent_code(
      DependentCode::empty_dependent_code(ReadOnlyRoots(isolate())),
      SKIP_WRITE_BARRIER);
1743 1744 1745 1746 1747
  WriteBarrierMode mode = allocation == AllocationType::kYoung
                              ? SKIP_WRITE_BARRIER
                              : UPDATE_WRITE_BARRIER;
  cell.set_name(*name, mode);
  cell.set_value(*value, mode);
1748 1749
  cell.set_property_details_raw(details.AsSmi(), SKIP_WRITE_BARRIER);
  return handle(cell, isolate());
1750 1751
}

1752 1753 1754 1755 1756 1757
Handle<PropertyCell> Factory::NewProtector() {
  return NewPropertyCell(
      empty_string(), PropertyDetails::Empty(PropertyCellType::kConstantType),
      handle(Smi::FromInt(Protectors::kProtectorValid), isolate()));
}

1758 1759 1760
Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions,
                                                    int slack) {
  int capacity = TransitionArray::LengthFor(number_of_transitions + slack);
1761 1762 1763
  Handle<TransitionArray> array = Handle<TransitionArray>::cast(
      NewWeakFixedArrayWithMap(read_only_roots().transition_array_map(),
                               capacity, AllocationType::kOld));
1764 1765 1766
  // Transition arrays are AllocationType::kOld. When black allocation is on we
  // have to add the transition array to the list of
  // encountered_transition_arrays.
1767 1768 1769 1770
  Heap* heap = isolate()->heap();
  if (heap->incremental_marking()->black_allocation()) {
    heap->mark_compact_collector()->AddTransitionArray(*array);
  }
1771
  array->WeakFixedArray::Set(TransitionArray::kPrototypeTransitionsIndex,
1772
                             MaybeObject::FromObject(Smi::zero()));
1773 1774 1775
  array->WeakFixedArray::Set(
      TransitionArray::kTransitionLengthIndex,
      MaybeObject::FromObject(Smi::FromInt(number_of_transitions)));
1776
  return array;
1777 1778
}

1779 1780 1781
Handle<AllocationSite> Factory::NewAllocationSite(bool with_weak_next) {
  Handle<Map> map = with_weak_next ? allocation_site_map()
                                   : allocation_site_without_weaknext_map();
1782 1783
  Handle<AllocationSite> site(
      AllocationSite::cast(New(map, AllocationType::kOld)), isolate());
1784 1785
  site->Initialize();

1786 1787 1788 1789 1790
  if (with_weak_next) {
    // Link the site
    site->set_weak_next(isolate()->heap()->allocation_sites_list());
    isolate()->heap()->set_allocation_sites_list(*site);
  }
1791
  return site;
1792 1793
}

1794
Handle<Map> Factory::NewMap(InstanceType type, int instance_size,
1795 1796
                            ElementsKind elements_kind, int inobject_properties,
                            AllocationType allocation_type) {
1797
  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1798
  DCHECK_IMPLIES(InstanceTypeChecker::IsJSObject(type) &&
1799 1800 1801
                     !Map::CanHaveFastTransitionableElementsKind(type),
                 IsDictionaryElementsKind(elements_kind) ||
                     IsTerminalElementsKind(elements_kind));
1802 1803
  DCHECK(allocation_type == AllocationType::kMap ||
         allocation_type == AllocationType::kSharedMap);
1804
  HeapObject result = isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(
1805
      Map::kSize, allocation_type);
1806
  DisallowGarbageCollection no_gc;
1807
  result.set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER);
1808 1809 1810 1811 1812
  return handle(InitializeMap(Map::cast(result), type, instance_size,
                              elements_kind, inobject_properties),
                isolate());
}

1813 1814 1815
Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
                           ElementsKind elements_kind,
                           int inobject_properties) {
1816
  DisallowGarbageCollection no_gc;
1817
  map.set_instance_type(type);
1818 1819 1820
  HeapObject raw_null_value = *null_value();
  map.set_prototype(raw_null_value, SKIP_WRITE_BARRIER);
  map.set_constructor_or_back_pointer(raw_null_value, SKIP_WRITE_BARRIER);
1821 1822
  map.set_instance_size(instance_size);
  if (map.IsJSObjectMap()) {
1823
    DCHECK(!ReadOnlyHeap::Contains(map));
1824 1825 1826 1827
    map.SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
                                          inobject_properties);
    DCHECK_EQ(map.GetInObjectProperties(), inobject_properties);
    map.set_prototype_validity_cell(*invalid_prototype_validity_cell());
1828 1829
  } else {
    DCHECK_EQ(inobject_properties, 0);
1830
    map.set_inobject_properties_start_or_constructor_function_index(0);
1831 1832
    map.set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid),
                                    SKIP_WRITE_BARRIER);
1833
  }
1834 1835 1836
  map.set_dependent_code(
      DependentCode::empty_dependent_code(ReadOnlyRoots(isolate())),
      SKIP_WRITE_BARRIER);
1837 1838
  map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()),
                          SKIP_WRITE_BARRIER);
1839 1840
  map.SetInObjectUnusedPropertyFields(inobject_properties);
  map.SetInstanceDescriptors(isolate(), *empty_descriptor_array(), 0);
1841
  // Must be called only after |instance_type| and |instance_size| are set.
1842
  map.set_visitor_id(Map::GetVisitorId(map));
1843
  map.set_bit_field(0);
1844 1845 1846 1847 1848 1849
  map.set_bit_field2(Map::Bits2::NewTargetIsBaseBit::encode(true));
  int bit_field3 =
      Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
      Map::Bits3::OwnsDescriptorsBit::encode(true) |
      Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking) |
      Map::Bits3::IsExtensibleBit::encode(true);
1850 1851 1852 1853
  map.set_bit_field3(bit_field3);
  DCHECK(!map.is_in_retained_map_list());
  map.clear_padding();
  map.set_elements_kind(elements_kind);
1854
  isolate()->counters()->maps_created()->Increment();
1855
  if (FLAG_log_maps) LOG(isolate(), MapCreate(map));
1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868
  return map;
}

Handle<JSObject> Factory::CopyJSObject(Handle<JSObject> source) {
  return CopyJSObjectWithAllocationSite(source, Handle<AllocationSite>());
}

Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
    Handle<JSObject> source, Handle<AllocationSite> site) {
  Handle<Map> map(source->map(), isolate());

  // We can only clone regexps, normal objects, api objects, errors or arrays.
  // Copying anything else will break invariants.
1869 1870 1871 1872
  InstanceType instance_type = map->instance_type();
  bool is_clonable_js_type =
      instance_type == JS_REG_EXP_TYPE || instance_type == JS_OBJECT_TYPE ||
      instance_type == JS_ERROR_TYPE || instance_type == JS_ARRAY_TYPE ||
1873 1874
      instance_type == JS_SPECIAL_API_OBJECT_TYPE ||
      InstanceTypeChecker::IsJSApiObject(instance_type);
1875 1876
  bool is_clonable_wasm_type = false;
#if V8_ENABLE_WEBASSEMBLY
1877 1878 1879 1880 1881
  is_clonable_wasm_type = instance_type == WASM_GLOBAL_OBJECT_TYPE ||
                          instance_type == WASM_INSTANCE_OBJECT_TYPE ||
                          instance_type == WASM_MEMORY_OBJECT_TYPE ||
                          instance_type == WASM_MODULE_OBJECT_TYPE ||
                          instance_type == WASM_TABLE_OBJECT_TYPE;
1882 1883 1884
#endif  // V8_ENABLE_WEBASSEMBLY
  CHECK(is_clonable_js_type || is_clonable_wasm_type);

1885
  DCHECK(site.is_null() || AllocationSite::CanTrack(instance_type));
1886 1887

  int object_size = map->instance_size();
1888 1889 1890 1891 1892
  int adjusted_object_size = object_size;
  if (!site.is_null()) {
    DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
    adjusted_object_size += AllocationMemento::kSize;
  }
1893
  HeapObject raw_clone = isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(
1894
      adjusted_object_size, AllocationType::kYoung);
1895

1896 1897
  DCHECK(Heap::InYoungGeneration(raw_clone) || FLAG_single_generation);

1898
  Heap::CopyBlock(raw_clone.address(), source->address(), object_size);
1899 1900
  Handle<JSObject> clone(JSObject::cast(raw_clone), isolate());

1901 1902 1903 1904 1905 1906 1907
  if (FLAG_enable_unconditional_write_barriers) {
    // By default, we shouldn't need to update the write barrier here, as the
    // clone will be allocated in new space.
    const ObjectSlot start(raw_clone.address());
    const ObjectSlot end(raw_clone.address() + object_size);
    isolate()->heap()->WriteBarrierForRange(raw_clone, start, end);
  }
1908
  if (!site.is_null()) {
1909
    AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
1910
        Object(raw_clone.ptr() + object_size));
1911 1912 1913 1914
    InitializeAllocationMemento(alloc_memento, *site);
  }

  SLOW_DCHECK(clone->GetElementsKind() == source->GetElementsKind());
1915
  FixedArrayBase elements = source->elements();
1916
  // Update elements if necessary.
1917
  if (elements.length() > 0) {
1918
    FixedArrayBase elem;
1919
    if (elements.map() == *fixed_cow_array_map()) {
1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931
      elem = elements;
    } else if (source->HasDoubleElements()) {
      elem = *CopyFixedDoubleArray(
          handle(FixedDoubleArray::cast(elements), isolate()));
    } else {
      elem = *CopyFixedArray(handle(FixedArray::cast(elements), isolate()));
    }
    clone->set_elements(elem);
  }

  // Update properties if necessary.
  if (source->HasFastProperties()) {
1932
    PropertyArray properties = source->property_array();
1933
    if (properties.length() > 0) {
1934 1935
      // TODO(gsathya): Do not copy hash code.
      Handle<PropertyArray> prop = CopyArrayWithMap(
1936
          handle(properties, isolate()), handle(properties.map(), isolate()));
1937
      clone->set_raw_properties_or_hash(*prop, kRelaxedStore);
1938 1939
    }
  } else {
1940
    Handle<Object> copied_properties;
1941
    if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
1942 1943 1944 1945 1946 1947
      copied_properties = SwissNameDictionary::ShallowCopy(
          isolate(), handle(source->property_dictionary_swiss(), isolate()));
    } else {
      copied_properties =
          CopyFixedArray(handle(source->property_dictionary(), isolate()));
    }
1948
    clone->set_raw_properties_or_hash(*copied_properties, kRelaxedStore);
1949 1950
  }
  return clone;
1951 1952
}

1953 1954
namespace {
template <typename T>
1955 1956
void initialize_length(T array, int length) {
  array.set_length(length);
1957
}
1958

1959
template <>
1960 1961
void initialize_length<PropertyArray>(PropertyArray array, int length) {
  array.initialize_length(length);
1962 1963
}

1964 1965
inline void ZeroEmbedderFields(i::JSObject obj) {
  int count = obj.GetEmbedderFieldCount();
1966
  for (int i = 0; i < count; i++) {
1967
    obj.SetEmbedderField(i, Smi::zero());
1968 1969 1970
  }
}

1971
}  // namespace
1972

1973
template <typename T>
1974
Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
1975
  int len = src->length();
1976
  HeapObject new_object = AllocateRawFixedArray(len, AllocationType::kYoung);
1977
  DisallowGarbageCollection no_gc;
1978 1979 1980 1981 1982 1983 1984
  new_object.set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
  T result = T::cast(new_object);
  initialize_length(result, len);
  // Copy the content.
  WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
  result.CopyElements(isolate(), 0, *src, 0, len, mode);
  return handle(result, isolate());
1985 1986 1987 1988
}

template <typename T>
Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
1989
                                    AllocationType allocation) {
1990 1991 1992 1993
  DCHECK_LT(0, grow_by);
  DCHECK_LE(grow_by, kMaxInt - src->length());
  int old_len = src->length();
  int new_len = old_len + grow_by;
1994 1995 1996 1997
  HeapObject new_object = AllocateRawFixedArray(new_len, allocation);
  DisallowGarbageCollection no_gc;
  new_object.set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
  T result = T::cast(new_object);
1998 1999
  initialize_length(result, new_len);
  // Copy the content.
2000 2001 2002
  WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
  result.CopyElements(isolate(), 0, *src, 0, old_len, mode);
  MemsetTagged(ObjectSlot(result.data_start() + old_len),
2003
               read_only_roots().undefined_value(), grow_by);
2004
  return handle(result, isolate());
2005 2006
}

2007 2008
Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array,
                                                  Handle<Map> map) {
2009
  return CopyArrayWithMap(array, map);
2010 2011
}

2012
Handle<FixedArray> Factory::CopyFixedArrayAndGrow(Handle<FixedArray> array,
2013 2014
                                                  int grow_by) {
  return CopyArrayAndGrow(array, grow_by, AllocationType::kYoung);
2015 2016
}

2017 2018 2019 2020 2021
Handle<WeakArrayList> Factory::NewUninitializedWeakArrayList(
    int capacity, AllocationType allocation) {
  DCHECK_LE(0, capacity);
  if (capacity == 0) return empty_weak_array_list();

2022 2023 2024 2025 2026 2027 2028 2029
  HeapObject heap_object = AllocateRawWeakArrayList(capacity, allocation);
  DisallowGarbageCollection no_gc;
  heap_object.set_map_after_allocation(*weak_array_list_map(),
                                       SKIP_WRITE_BARRIER);
  WeakArrayList result = WeakArrayList::cast(heap_object);
  result.set_length(0);
  result.set_capacity(capacity);
  return handle(result, isolate());
2030 2031 2032 2033 2034 2035 2036
}

Handle<WeakArrayList> Factory::NewWeakArrayList(int capacity,
                                                AllocationType allocation) {
  Handle<WeakArrayList> result =
      NewUninitializedWeakArrayList(capacity, allocation);
  MemsetTagged(ObjectSlot(result->data_start()),
2037
               read_only_roots().undefined_value(), capacity);
2038 2039 2040
  return result;
}

2041
Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow(
2042
    Handle<WeakFixedArray> src, int grow_by) {
2043
  DCHECK(!src->IsTransitionArray());  // Compacted by GC, this code doesn't work
2044
  return CopyArrayAndGrow(src, grow_by, AllocationType::kOld);
2045 2046
}

2047
Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow(
2048
    Handle<WeakArrayList> src, int grow_by, AllocationType allocation) {
2049 2050 2051
  int old_capacity = src->capacity();
  int new_capacity = old_capacity + grow_by;
  DCHECK_GE(new_capacity, old_capacity);
2052 2053
  Handle<WeakArrayList> result =
      NewUninitializedWeakArrayList(new_capacity, allocation);
2054 2055
  DisallowGarbageCollection no_gc;
  WeakArrayList raw = *result;
2056
  int old_len = src->length();
2057
  raw.set_length(old_len);
2058
  // Copy the content.
2059 2060 2061
  WriteBarrierMode mode = raw.GetWriteBarrierMode(no_gc);
  raw.CopyElements(isolate(), 0, *src, 0, old_len, mode);
  MemsetTagged(ObjectSlot(raw.data_start() + old_len),
2062
               read_only_roots().undefined_value(), new_capacity - old_len);
2063 2064 2065 2066 2067 2068 2069 2070 2071 2072
  return result;
}

Handle<WeakArrayList> Factory::CompactWeakArrayList(Handle<WeakArrayList> src,
                                                    int new_capacity,
                                                    AllocationType allocation) {
  Handle<WeakArrayList> result =
      NewUninitializedWeakArrayList(new_capacity, allocation);

  // Copy the content.
2073
  DisallowGarbageCollection no_gc;
2074 2075 2076 2077
  WeakArrayList raw_src = *src;
  WeakArrayList raw_result = *result;
  WriteBarrierMode mode = raw_result.GetWriteBarrierMode(no_gc);
  int copy_to = 0, length = raw_src.length();
2078
  for (int i = 0; i < length; i++) {
2079
    MaybeObject element = raw_src.Get(i);
2080
    if (element->IsCleared()) continue;
2081
    raw_result.Set(copy_to++, element, mode);
2082
  }
2083
  raw_result.set_length(copy_to);
2084

2085
  MemsetTagged(ObjectSlot(raw_result.data_start() + copy_to),
2086
               read_only_roots().undefined_value(), new_capacity - copy_to);
2087
  return result;
2088 2089
}

2090
Handle<PropertyArray> Factory::CopyPropertyArrayAndGrow(
2091 2092
    Handle<PropertyArray> array, int grow_by) {
  return CopyArrayAndGrow(array, grow_by, AllocationType::kYoung);
2093 2094
}

2095 2096
Handle<FixedArray> Factory::CopyFixedArrayUpTo(Handle<FixedArray> array,
                                               int new_len,
2097
                                               AllocationType allocation) {
2098 2099 2100
  DCHECK_LE(0, new_len);
  DCHECK_LE(new_len, array->length());
  if (new_len == 0) return empty_fixed_array();
2101
  HeapObject heap_object = AllocateRawFixedArray(new_len, allocation);
2102
  DisallowGarbageCollection no_gc;
2103 2104 2105 2106 2107 2108 2109
  heap_object.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
  FixedArray result = FixedArray::cast(heap_object);
  result.set_length(new_len);
  // Copy the content.
  WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
  result.CopyElements(isolate(), 0, *array, 0, new_len, mode);
  return handle(result, isolate());
2110
}
2111

2112
Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
2113
  if (array->length() == 0) return array;
2114
  return CopyArrayWithMap(array, handle(array->map(), isolate()));
2115 2116
}

2117 2118
Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray(
    Handle<FixedDoubleArray> array) {
2119 2120
  int len = array->length();
  if (len == 0) return array;
2121 2122
  Handle<FixedDoubleArray> result =
      Handle<FixedDoubleArray>::cast(NewFixedDoubleArray(len));
2123 2124 2125 2126 2127
  Heap::CopyBlock(
      result->address() + FixedDoubleArray::kLengthOffset,
      array->address() + FixedDoubleArray::kLengthOffset,
      FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
  return result;
2128 2129
}

2130
Handle<HeapNumber> Factory::NewHeapNumberForCodeAssembler(double value) {
2131 2132 2133
  return isolate()->heap()->CanAllocateInReadOnlySpace()
             ? NewHeapNumber<AllocationType::kReadOnly>(value)
             : NewHeapNumber<AllocationType::kOld>(value);
2134 2135
}

2136 2137 2138 2139
Handle<JSObject> Factory::NewError(Handle<JSFunction> constructor,
                                   MessageTemplate template_index,
                                   Handle<Object> arg0, Handle<Object> arg1,
                                   Handle<Object> arg2) {
2140 2141 2142 2143 2144 2145
  HandleScope scope(isolate());

  if (arg0.is_null()) arg0 = undefined_value();
  if (arg1.is_null()) arg1 = undefined_value();
  if (arg2.is_null()) arg2 = undefined_value();

2146 2147
  return scope.CloseAndEscape(ErrorUtils::MakeGenericError(
      isolate(), constructor, template_index, arg0, arg1, arg2, SKIP_NONE));
2148 2149
}

2150 2151
Handle<JSObject> Factory::NewError(Handle<JSFunction> constructor,
                                   Handle<String> message) {
2152 2153 2154 2155
  // Construct a new error object. If an exception is thrown, use the exception
  // as the result.

  Handle<Object> no_caller;
2156
  return ErrorUtils::Construct(isolate(), constructor, constructor, message,
2157
                               undefined_value(), SKIP_NONE, no_caller,
2158
                               ErrorUtils::StackTraceCollection::kEnabled)
2159
      .ToHandleChecked();
2160 2161
}

2162
Handle<Object> Factory::NewInvalidStringLengthError() {
2163
  if (FLAG_correctness_fuzzer_suppressions) {
2164 2165
    FATAL("Aborting on invalid string length");
  }
2166
  // Invalidate the "string length" protector.
2167 2168
  if (Protectors::IsStringLengthOverflowLookupChainIntact(isolate())) {
    Protectors::InvalidateStringLengthOverflowLookupChain(isolate());
2169 2170 2171
  }
  return NewRangeError(MessageTemplate::kInvalidStringLength);
}
2172

2173
#define DEFINE_ERROR(NAME, name)                                              \
2174 2175 2176
  Handle<JSObject> Factory::New##NAME(                                        \
      MessageTemplate template_index, Handle<Object> arg0,                    \
      Handle<Object> arg1, Handle<Object> arg2) {                             \
2177 2178 2179 2180 2181 2182 2183 2184 2185
    return NewError(isolate()->name##_function(), template_index, arg0, arg1, \
                    arg2);                                                    \
  }
DEFINE_ERROR(Error, error)
DEFINE_ERROR(EvalError, eval_error)
DEFINE_ERROR(RangeError, range_error)
DEFINE_ERROR(ReferenceError, reference_error)
DEFINE_ERROR(SyntaxError, syntax_error)
DEFINE_ERROR(TypeError, type_error)
2186
DEFINE_ERROR(WasmCompileError, wasm_compile_error)
2187
DEFINE_ERROR(WasmLinkError, wasm_link_error)
2188
DEFINE_ERROR(WasmRuntimeError, wasm_runtime_error)
2189
DEFINE_ERROR(WasmExceptionError, wasm_exception_error)
2190 2191
#undef DEFINE_ERROR

2192 2193 2194
Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
  // Make sure to use globals from the function's context, since the function
  // can be from a different context.
2195
  Handle<NativeContext> native_context(function->context().native_context(),
2196
                                       isolate());
2197
  Handle<Map> new_map;
2198
  if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared().kind()))) {
2199 2200
    new_map = handle(native_context->async_generator_object_prototype_map(),
                     isolate());
2201
  } else if (IsResumableFunction(function->shared().kind())) {
2202 2203
    // Generator and async function prototypes can share maps since they
    // don't have "constructor" properties.
2204 2205
    new_map =
        handle(native_context->generator_object_prototype_map(), isolate());
2206 2207 2208
  } else {
    // Each function prototype gets a fresh map to avoid unwanted sharing of
    // maps between prototypes of different constructors.
2209 2210
    Handle<JSFunction> object_function(native_context->object_function(),
                                       isolate());
2211
    DCHECK(object_function->has_initial_map());
2212
    new_map = handle(object_function->initial_map(), isolate());
2213 2214
  }

2215
  DCHECK(!new_map->is_prototype_map());
2216 2217
  Handle<JSObject> prototype = NewJSObjectFromMap(new_map);

2218
  if (!IsResumableFunction(function->shared().kind())) {
2219 2220
    JSObject::AddProperty(isolate(), prototype, constructor_string(), function,
                          DONT_ENUM);
2221 2222 2223 2224 2225
  }

  return prototype;
}

2226
Handle<JSObject> Factory::NewExternal(void* value) {
2227
  Handle<Foreign> foreign = NewForeign(reinterpret_cast<Address>(value));
2228
  Handle<JSObject> external = NewJSObjectFromMap(external_map());
2229
  external->SetEmbedderField(0, *foreign);
2230
  return external;
2231 2232
}

2233 2234 2235 2236 2237 2238
Handle<DeoptimizationLiteralArray> Factory::NewDeoptimizationLiteralArray(
    int length) {
  return Handle<DeoptimizationLiteralArray>::cast(
      NewWeakFixedArray(length, AllocationType::kOld));
}

2239 2240
Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
                                              Address off_heap_entry) {
2241 2242
  CHECK_NOT_NULL(isolate()->embedded_blob_code());
  CHECK_NE(0, isolate()->embedded_blob_code_size());
2243
  CHECK(Builtins::IsIsolateIndependentBuiltin(*code));
2244

2245
  bool generate_jump_to_instruction_stream =
2246
      Builtins::CodeObjectIsExecutable(code->builtin_id());
2247 2248
  Handle<Code> result = Builtins::GenerateOffHeapTrampolineFor(
      isolate(), off_heap_entry,
2249
      code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
2250
      generate_jump_to_instruction_stream);
2251 2252 2253 2254 2255

  // Trampolines may not contain any metadata since all metadata offsets,
  // stored on the Code object, refer to the off-heap metadata area.
  CHECK_EQ(result->raw_metadata_size(), 0);

2256 2257
  // The CodeDataContainer should not be modified beyond this point since it's
  // now possibly canonicalized.
2258 2259 2260

  // The trampoline code object must inherit specific flags from the original
  // builtin (e.g. the safepoint-table offset). We set them manually here.
2261
  {
2262
    DisallowGarbageCollection no_gc;
2263
    CodePageMemoryModificationScope code_allocation(*result);
2264 2265
    Code raw_code = *code;
    Code raw_result = *result;
2266 2267 2268

    const bool set_is_off_heap_trampoline = true;
    const int stack_slots =
2269 2270 2271
        raw_code.has_safepoint_info() ? raw_code.stack_slots() : 0;
    raw_result.initialize_flags(raw_code.kind(), raw_code.is_turbofanned(),
                                stack_slots, set_is_off_heap_trampoline);
2272
    raw_result.set_builtin_id(raw_code.builtin_id());
2273 2274 2275 2276
    raw_result.set_handler_table_offset(raw_code.handler_table_offset());
    raw_result.set_constant_pool_offset(raw_code.constant_pool_offset());
    raw_result.set_code_comments_offset(raw_code.code_comments_offset());
    raw_result.set_unwinding_info_offset(raw_code.unwinding_info_offset());
2277 2278 2279 2280

    // Replace the newly generated trampoline's RelocInfo ByteArray with the
    // canonical one stored in the roots to avoid duplicating it for every
    // single builtin.
2281
    ByteArray canonical_reloc_info =
2282
        generate_jump_to_instruction_stream
2283 2284
            ? read_only_roots().off_heap_trampoline_relocation_info()
            : read_only_roots().empty_byte_array();
2285
#ifdef DEBUG
2286
    // Verify that the contents are the same.
2287
    ByteArray reloc_info = raw_result.relocation_info();
2288 2289 2290
    DCHECK_EQ(reloc_info.length(), canonical_reloc_info.length());
    for (int i = 0; i < reloc_info.length(); ++i) {
      DCHECK_EQ(reloc_info.get(i), canonical_reloc_info.get(i));
2291
    }
2292
#endif
2293
    raw_result.set_relocation_info(canonical_reloc_info);
2294
    if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2295 2296
      CodeDataContainer code_data_container =
          raw_result.code_data_container(kAcquireLoad);
2297 2298
      // Updating flags (in particular is_off_heap_trampoline one) might change
      // the value of the instruction start, so update it here.
2299 2300 2301 2302
      code_data_container.UpdateCodeEntryPoint(isolate(), raw_result);
      // Also update flag values cached on the code data container.
      code_data_container.initialize_flags(raw_code.kind(),
                                           raw_code.builtin_id());
2303
    }
2304
  }
2305

2306 2307 2308
  return result;
}

2309
Handle<Code> Factory::CopyCode(Handle<Code> code) {
2310
  Handle<CodeDataContainer> data_container = NewCodeDataContainer(
2311
      code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
2312
      AllocationType::kOld);
2313

2314
  Heap* heap = isolate()->heap();
2315 2316 2317 2318
  Handle<Code> new_code;
  {
    int obj_size = code->Size();
    CodePageCollectionMemoryModificationScope code_allocation(heap);
2319
    HeapObject result = heap->AllocateRawWith<Heap::kRetryOrFail>(
2320
        obj_size, AllocationType::kCode, AllocationOrigin::kRuntime);
2321 2322 2323

    // Copy code object.
    Address old_addr = code->address();
2324
    Address new_addr = result.address();
2325 2326 2327 2328
    Heap::CopyBlock(new_addr, old_addr, obj_size);
    new_code = handle(Code::cast(result), isolate());

    // Set the {CodeDataContainer}, it cannot be shared.
2329
    new_code->set_code_data_container(*data_container, kReleaseStore);
2330 2331 2332 2333 2334 2335

    new_code->Relocate(new_addr - old_addr);
    // We have to iterate over the object and process its pointers when black
    // allocation is on.
    heap->incremental_marking()->ProcessBlackAllocatedObject(*new_code);
    // Record all references to embedded objects in the new code object.
2336
#ifndef V8_DISABLE_WRITE_BARRIERS
2337
    WriteBarrierForCode(*new_code);
2338
#endif
2339
  }
2340
  if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2341
    data_container->initialize_flags(code->kind(), code->builtin_id());
2342 2343
    data_container->SetCodeAndEntryPoint(isolate(), *new_code);
  }
2344 2345

#ifdef VERIFY_HEAP
2346
  if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
2347
#endif
2348
  DCHECK(IsAligned(new_code->address(), kCodeAlignment));
2349
  DCHECK_IMPLIES(
2350 2351
      !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
      heap->code_region().contains(new_code->address()));
2352 2353
  return new_code;
}
2354

2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373
Handle<BytecodeArray> Factory::CopyBytecodeArray(Handle<BytecodeArray> source) {
  int size = BytecodeArray::SizeFor(source->length());
  BytecodeArray copy = BytecodeArray::cast(AllocateRawWithImmortalMap(
      size, AllocationType::kOld, *bytecode_array_map()));
  DisallowGarbageCollection no_gc;
  BytecodeArray raw_source = *source;
  copy.set_length(raw_source.length());
  copy.set_frame_size(raw_source.frame_size());
  copy.set_parameter_count(raw_source.parameter_count());
  copy.set_incoming_new_target_or_generator_register(
      raw_source.incoming_new_target_or_generator_register());
  copy.set_constant_pool(raw_source.constant_pool());
  copy.set_handler_table(raw_source.handler_table());
  copy.set_source_position_table(raw_source.source_position_table(kAcquireLoad),
                                 kReleaseStore);
  copy.set_osr_loop_nesting_level(raw_source.osr_loop_nesting_level());
  copy.set_bytecode_age(raw_source.bytecode_age());
  raw_source.CopyBytecodesTo(copy);
  return handle(copy, isolate());
2374
}
2375

2376
Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
2377
                                      AllocationType allocation) {
2378
  JSFunction::EnsureHasInitialMap(constructor);
2379
  Handle<Map> map(constructor->initial_map(), isolate());
2380
  return NewJSObjectFromMap(map, allocation);
2381 2382
}

2383
Handle<JSObject> Factory::NewSlowJSObjectWithNullProto() {
2384 2385
  Handle<JSObject> result =
      NewSlowJSObjectFromMap(isolate()->slow_object_with_null_prototype_map());
2386 2387
  return result;
}
2388

2389
Handle<JSObject> Factory::NewJSObjectWithNullProto() {
2390 2391 2392 2393
  Handle<Map> map(isolate()->object_function()->initial_map(), isolate());
  Handle<Map> map_with_null_proto =
      Map::TransitionToPrototype(isolate(), map, null_value());
  return NewJSObjectFromMap(map_with_null_proto);
2394 2395
}

2396 2397
Handle<JSGlobalObject> Factory::NewJSGlobalObject(
    Handle<JSFunction> constructor) {
2398
  DCHECK(constructor->has_initial_map());
2399
  Handle<Map> map(constructor->initial_map(), isolate());
2400
  DCHECK(map->is_dictionary_map());
2401 2402 2403 2404

  // Make sure no field properties are described in the initial map.
  // This guarantees us that normalizing the properties does not
  // require us to change property values to PropertyCells.
2405
  DCHECK_EQ(map->NextFreePropertyIndex(), 0);
2406 2407 2408

  // Make sure we don't have a ton of pre-allocated slots in the
  // global objects. They will be unused once we normalize the object.
2409
  DCHECK_EQ(map->UnusedPropertyFields(), 0);
2410
  DCHECK_EQ(map->GetInObjectProperties(), 0);
2411 2412 2413 2414

  // Initial size of the backing store to avoid resize of the storage during
  // bootstrapping. The size differs between the JS global object ad the
  // builtins object.
yangguo's avatar
yangguo committed
2415
  int initial_size = 64;
2416 2417 2418

  // Allocate a dictionary object for backing storage.
  int at_least_space_for = map->NumberOfOwnDescriptors() * 2 + initial_size;
2419 2420
  Handle<GlobalDictionary> dictionary =
      GlobalDictionary::New(isolate(), at_least_space_for);
2421 2422 2423

  // The global object might be created from an object template with accessors.
  // Fill these accessors into the dictionary.
2424
  Handle<DescriptorArray> descs(map->instance_descriptors(isolate()),
2425
                                isolate());
2426
  for (InternalIndex i : map->IterateOwnDescriptors()) {
2427
    PropertyDetails details = descs->GetDetails(i);
2428
    // Only accessors are expected.
2429 2430
    DCHECK_EQ(PropertyKind::kAccessor, details.kind());
    PropertyDetails d(PropertyKind::kAccessor, details.attributes(),
2431
                      PropertyCellType::kMutable);
2432
    Handle<Name> name(descs->GetKey(i), isolate());
2433 2434
    Handle<Object> value(descs->GetStrongValue(i), isolate());
    Handle<PropertyCell> cell = NewPropertyCell(name, d, value);
2435
    // |dictionary| already contains enough space for all properties.
2436
    USE(GlobalDictionary::Add(isolate(), dictionary, name, cell, d));
2437 2438 2439
  }

  // Allocate the global object and initialize it with the backing store.
2440 2441
  Handle<JSGlobalObject> global(
      JSGlobalObject::cast(New(map, AllocationType::kOld)), isolate());
2442
  InitializeJSObjectFromMap(*global, *dictionary, *map);
2443 2444

  // Create a new map for the global object.
2445
  Handle<Map> new_map = Map::CopyDropDescriptors(isolate(), map);
2446 2447 2448 2449
  Map raw_map = *new_map;
  raw_map.set_may_have_interesting_symbols(true);
  raw_map.set_is_dictionary_map(true);
  LOG(isolate(), MapDetails(raw_map));
2450 2451

  // Set up the global object as a normalized object.
2452
  global->set_global_dictionary(*dictionary, kReleaseStore);
2453
  global->set_map(raw_map, kReleaseStore);
2454 2455

  // Make sure result is a global object with properties in dictionary.
2456
  DCHECK(global->IsJSGlobalObject() && !global->HasFastProperties());
2457 2458 2459
  return global;
}

2460 2461 2462
void Factory::InitializeJSObjectFromMap(JSObject obj, Object properties,
                                        Map map) {
  DisallowGarbageCollection no_gc;
2463
  obj.set_raw_properties_or_hash(properties, kRelaxedStore);
2464
  obj.initialize_elements();
2465 2466 2467
  // TODO(1240798): Initialize the object's body using valid initial values
  // according to the object's initial map.  For example, if the map's
  // instance type is JS_ARRAY_TYPE, the length field should be initialized
2468
  // to a number (e.g. Smi::zero()) and the elements initialized to a
2469 2470 2471 2472 2473 2474
  // fixed array (e.g. Heap::empty_fixed_array()).  Currently, the object
  // verification code has to cope with (temporarily) invalid objects.  See
  // for example, JSArray::JSArrayVerify).
  InitializeJSObjectBody(obj, map, JSObject::kHeaderSize);
}

2475 2476 2477 2478
void Factory::InitializeJSObjectBody(JSObject obj, Map map, int start_offset) {
  DisallowGarbageCollection no_gc;
  if (start_offset == map.instance_size()) return;
  DCHECK_LT(start_offset, map.instance_size());
2479 2480 2481 2482 2483 2484 2485 2486 2487 2488

  // We cannot always fill with one_pointer_filler_map because objects
  // created from API functions expect their embedder fields to be initialized
  // with undefined_value.
  // Pre-allocated fields need to be initialized with undefined_value as well
  // so that object accesses before the constructor completes (e.g. in the
  // debugger) will not cause a crash.

  // In case of Array subclassing the |map| could already be transitioned
  // to different elements kind from the initial map on which we track slack.
2489
  bool in_progress = map.IsInobjectSlackTrackingInProgress();
2490 2491 2492
  obj.InitializeBody(map, start_offset, in_progress,
                     ReadOnlyRoots(isolate()).one_pointer_filler_map_word(),
                     *undefined_value());
2493
  if (in_progress) {
2494
    map.FindRootMap(isolate()).InobjectSlackTrackingStep(isolate());
2495 2496
  }
}
2497

2498
Handle<JSObject> Factory::NewJSObjectFromMap(
2499
    Handle<Map> map, AllocationType allocation,
2500
    Handle<AllocationSite> allocation_site) {
2501 2502
  // JSFunctions should be allocated using AllocateFunction to be
  // properly initialized.
2503
  DCHECK(!InstanceTypeChecker::IsJSFunction((map->instance_type())));
2504 2505 2506 2507 2508

  // Both types of global objects should be allocated using
  // AllocateGlobalObject to be properly initialized.
  DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);

2509 2510
  JSObject js_obj = JSObject::cast(
      AllocateRawWithAllocationSite(map, allocation, allocation_site));
2511

2512
  InitializeJSObjectFromMap(js_obj, *empty_fixed_array(), *map);
2513

2514 2515 2516 2517
  DCHECK(js_obj.HasFastElements() || js_obj.HasTypedArrayElements() ||
         js_obj.HasFastStringWrapperElements() ||
         js_obj.HasFastArgumentsElements() || js_obj.HasDictionaryElements());
  return handle(js_obj, isolate());
2518 2519
}

2520 2521 2522
Handle<JSObject> Factory::NewSlowJSObjectFromMap(
    Handle<Map> map, int capacity, AllocationType allocation,
    Handle<AllocationSite> allocation_site) {
2523
  DCHECK(map->is_dictionary_map());
2524
  Handle<HeapObject> object_properties;
2525
  if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
2526
    object_properties = NewSwissNameDictionary(capacity, allocation);
2527 2528 2529
  } else {
    object_properties = NameDictionary::New(isolate(), capacity);
  }
2530 2531
  Handle<JSObject> js_object =
      NewJSObjectFromMap(map, allocation, allocation_site);
2532
  js_object->set_raw_properties_or_hash(*object_properties, kRelaxedStore);
2533 2534
  return js_object;
}
2535

2536
Handle<JSObject> Factory::NewSlowJSObjectWithPropertiesAndElements(
2537
    Handle<HeapObject> prototype, Handle<HeapObject> properties,
2538
    Handle<FixedArrayBase> elements) {
2539
  DCHECK_IMPLIES(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL,
2540
                 properties->IsSwissNameDictionary());
2541 2542
  DCHECK_IMPLIES(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL,
                 properties->IsNameDictionary());
2543

2544 2545 2546 2547 2548
  Handle<Map> object_map = isolate()->slow_object_with_object_prototype_map();
  if (object_map->prototype() != *prototype) {
    object_map = Map::TransitionToPrototype(isolate(), object_map, prototype);
  }
  DCHECK(object_map->is_dictionary_map());
2549 2550
  Handle<JSObject> object =
      NewJSObjectFromMap(object_map, AllocationType::kYoung);
2551
  object->set_raw_properties_or_hash(*properties);
2552
  if (*elements != read_only_roots().empty_fixed_array()) {
2553 2554 2555
    DCHECK(elements->IsNumberDictionary());
    object_map =
        JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
2556
    JSObject::MigrateToMap(isolate(), object, object_map);
2557 2558 2559 2560 2561
    object->set_elements(*elements);
  }
  return object;
}

2562
Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length,
2563
                                    int capacity,
2564
                                    ArrayStorageAllocationMode mode,
2565
                                    AllocationType allocation) {
2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576
  DCHECK(capacity >= length);
  if (capacity == 0) {
    return NewJSArrayWithElements(empty_fixed_array(), elements_kind, length,
                                  allocation);
  }

  HandleScope inner_scope(isolate());
  Handle<FixedArrayBase> elms =
      NewJSArrayStorage(elements_kind, capacity, mode);
  return inner_scope.CloseAndEscape(NewJSArrayWithUnverifiedElements(
      elms, elements_kind, length, allocation));
2577 2578
}

2579
Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements,
2580
                                                ElementsKind elements_kind,
2581
                                                int length,
2582
                                                AllocationType allocation) {
2583 2584 2585 2586 2587
  Handle<JSArray> array = NewJSArrayWithUnverifiedElements(
      elements, elements_kind, length, allocation);
  JSObject::ValidateElements(*array);
  return array;
}
2588

2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600
Handle<JSArray> Factory::NewJSArrayWithUnverifiedElements(
    Handle<FixedArrayBase> elements, ElementsKind elements_kind, int length,
    AllocationType allocation) {
  DCHECK(length <= elements->length());
  NativeContext native_context = isolate()->raw_native_context();
  Map map = native_context.GetInitialJSArrayMap(elements_kind);
  if (map.is_null()) {
    JSFunction array_function = native_context.array_function();
    map = array_function.initial_map();
  }
  Handle<JSArray> array = Handle<JSArray>::cast(
      NewJSObjectFromMap(handle(map, isolate()), allocation));
2601
  DisallowGarbageCollection no_gc;
2602 2603 2604
  JSArray raw = *array;
  raw.set_elements(*elements);
  raw.set_length(Smi::FromInt(length));
2605
  return array;
2606 2607
}

2608
void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity,
2609
                                ArrayStorageAllocationMode mode) {
2610
  DCHECK(capacity >= length);
2611 2612

  if (capacity == 0) {
2613
    JSArray raw = *array;
2614
    DisallowGarbageCollection no_gc;
2615 2616
    raw.set_length(Smi::zero());
    raw.set_elements(*empty_fixed_array());
2617 2618 2619
    return;
  }

2620
  HandleScope inner_scope(isolate());
2621 2622
  Handle<FixedArrayBase> elms =
      NewJSArrayStorage(array->GetElementsKind(), capacity, mode);
2623
  DisallowGarbageCollection no_gc;
2624 2625 2626
  JSArray raw = *array;
  raw.set_elements(*elms);
  raw.set_length(Smi::FromInt(length));
2627 2628 2629 2630 2631
}

Handle<FixedArrayBase> Factory::NewJSArrayStorage(
    ElementsKind elements_kind, int capacity, ArrayStorageAllocationMode mode) {
  DCHECK_GT(capacity, 0);
2632
  Handle<FixedArrayBase> elms;
2633
  if (IsDoubleElementsKind(elements_kind)) {
2634 2635 2636 2637 2638
    if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
      elms = NewFixedDoubleArray(capacity);
    } else {
      DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
      elms = NewFixedDoubleArrayWithHoles(capacity);
2639 2640
    }
  } else {
2641
    DCHECK(IsSmiOrObjectElementsKind(elements_kind));
2642
    if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
2643
      elms = NewFixedArray(capacity);
2644 2645 2646
    } else {
      DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
      elms = NewFixedArrayWithHoles(capacity);
2647 2648
    }
  }
2649
  return elms;
2650 2651
}

2652
Handle<JSWeakMap> Factory::NewJSWeakMap() {
2653
  NativeContext native_context = isolate()->raw_native_context();
2654
  Handle<Map> map(native_context.js_weak_map_fun().initial_map(), isolate());
2655 2656
  Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)),
                            isolate());
2657 2658 2659 2660 2661 2662 2663 2664
  {
    // Do not leak handles for the hash table, it would make entries strong.
    HandleScope scope(isolate());
    JSWeakCollection::Initialize(weakmap, isolate());
  }
  return weakmap;
}

2665 2666
Handle<JSModuleNamespace> Factory::NewJSModuleNamespace() {
  Handle<Map> map = isolate()->js_module_namespace_map();
2667 2668 2669
  Handle<JSModuleNamespace> module_namespace(
      Handle<JSModuleNamespace>::cast(NewJSObjectFromMap(map)));
  FieldIndex index = FieldIndex::ForDescriptor(
2670
      *map, InternalIndex(JSModuleNamespace::kToStringTagFieldIndex));
2671 2672
  module_namespace->FastPropertyAtPut(index, read_only_roots().Module_string(),
                                      SKIP_WRITE_BARRIER);
2673
  return module_namespace;
2674
}
2675

2676 2677
Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
    Handle<JSFunction> function) {
2678
  DCHECK(IsResumableFunction(function->shared().kind()));
2679
  JSFunction::EnsureHasInitialMap(function);
2680
  Handle<Map> map(function->initial_map(), isolate());
2681 2682 2683 2684

  DCHECK(map->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
         map->instance_type() == JS_ASYNC_GENERATOR_OBJECT_TYPE);

2685
  return Handle<JSGeneratorObject>::cast(NewJSObjectFromMap(map));
2686 2687
}

2688
Handle<SourceTextModule> Factory::NewSourceTextModule(
2689
    Handle<SharedFunctionInfo> sfi) {
2690
  Handle<SourceTextModuleInfo> module_info(
2691
      sfi->scope_info().ModuleDescriptorInfo(), isolate());
2692
  Handle<ObjectHashTable> exports =
2693
      ObjectHashTable::New(isolate(), module_info->RegularExportCount());
2694 2695 2696
  Handle<FixedArray> regular_exports =
      NewFixedArray(module_info->RegularExportCount());
  Handle<FixedArray> regular_imports =
2697 2698
      NewFixedArray(module_info->regular_imports().length());
  int requested_modules_length = module_info->module_requests().length();
2699 2700 2701
  Handle<FixedArray> requested_modules =
      requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
                                   : empty_fixed_array();
2702
  Handle<ArrayList> async_parent_modules = ArrayList::New(isolate(), 0);
2703

2704
  ReadOnlyRoots roots(isolate());
2705 2706 2707 2708 2709 2710 2711 2712 2713 2714
  SourceTextModule module = SourceTextModule::cast(
      New(source_text_module_map(), AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  module.set_code(*sfi);
  module.set_exports(*exports);
  module.set_regular_exports(*regular_exports);
  module.set_regular_imports(*regular_imports);
  module.set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
  module.set_module_namespace(roots.undefined_value(), SKIP_WRITE_BARRIER);
  module.set_requested_modules(*requested_modules);
2715
  module.set_status(Module::kUnlinked);
2716 2717
  module.set_exception(roots.the_hole_value(), SKIP_WRITE_BARRIER);
  module.set_top_level_capability(roots.undefined_value(), SKIP_WRITE_BARRIER);
2718 2719
  module.set_import_meta(roots.the_hole_value(), kReleaseStore,
                         SKIP_WRITE_BARRIER);
2720 2721 2722 2723
  module.set_dfs_index(-1);
  module.set_dfs_ancestor_index(-1);
  module.set_flags(0);
  module.set_async(IsAsyncModule(sfi->kind()));
2724
  module.set_async_evaluating_ordinal(SourceTextModule::kNotAsyncEvaluated);
2725 2726 2727 2728
  module.set_cycle_root(roots.the_hole_value(), SKIP_WRITE_BARRIER);
  module.set_async_parent_modules(*async_parent_modules);
  module.set_pending_async_dependencies(0);
  return handle(module, isolate());
2729
}
2730

2731 2732 2733 2734
Handle<SyntheticModule> Factory::NewSyntheticModule(
    Handle<String> module_name, Handle<FixedArray> export_names,
    v8::Module::SyntheticModuleEvaluationSteps evaluation_steps) {
  ReadOnlyRoots roots(isolate());
2735

2736 2737 2738 2739
  Handle<ObjectHashTable> exports =
      ObjectHashTable::New(isolate(), static_cast<int>(export_names->length()));
  Handle<Foreign> evaluation_steps_foreign =
      NewForeign(reinterpret_cast<i::Address>(evaluation_steps));
2740

2741 2742 2743 2744 2745
  SyntheticModule module =
      SyntheticModule::cast(New(synthetic_module_map(), AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  module.set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
  module.set_module_namespace(roots.undefined_value(), SKIP_WRITE_BARRIER);
2746
  module.set_status(Module::kUnlinked);
2747 2748 2749 2750 2751 2752 2753
  module.set_exception(roots.the_hole_value(), SKIP_WRITE_BARRIER);
  module.set_top_level_capability(roots.undefined_value(), SKIP_WRITE_BARRIER);
  module.set_name(*module_name);
  module.set_export_names(*export_names);
  module.set_exports(*exports);
  module.set_evaluation_steps(*evaluation_steps_foreign);
  return handle(module, isolate());
2754 2755
}

2756 2757
Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(
    std::shared_ptr<BackingStore> backing_store, AllocationType allocation) {
2758 2759 2760 2761
  Handle<Map> map(isolate()->native_context()->array_buffer_fun().initial_map(),
                  isolate());
  auto result =
      Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, allocation));
2762 2763
  result->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
                std::move(backing_store));
2764 2765 2766 2767 2768 2769
  return result;
}

MaybeHandle<JSArrayBuffer> Factory::NewJSArrayBufferAndBackingStore(
    size_t byte_length, InitializedFlag initialized,
    AllocationType allocation) {
2770 2771 2772 2773 2774 2775 2776
  std::unique_ptr<BackingStore> backing_store = nullptr;

  if (byte_length > 0) {
    backing_store = BackingStore::Allocate(isolate(), byte_length,
                                           SharedFlag::kNotShared, initialized);
    if (!backing_store) return MaybeHandle<JSArrayBuffer>();
  }
2777 2778 2779 2780
  Handle<Map> map(isolate()->native_context()->array_buffer_fun().initial_map(),
                  isolate());
  auto array_buffer =
      Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, allocation));
2781 2782
  array_buffer->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
                      std::move(backing_store));
2783 2784 2785
  return array_buffer;
}

2786 2787
Handle<JSArrayBuffer> Factory::NewJSSharedArrayBuffer(
    std::shared_ptr<BackingStore> backing_store) {
2788 2789 2790 2791
  DCHECK_IMPLIES(backing_store->is_resizable(), FLAG_harmony_rab_gsab);
  Handle<Map> map(
      isolate()->native_context()->shared_array_buffer_fun().initial_map(),
      isolate());
2792 2793
  auto result = Handle<JSArrayBuffer>::cast(
      NewJSObjectFromMap(map, AllocationType::kYoung));
2794 2795 2796 2797
  ResizableFlag resizable = backing_store->is_resizable()
                                ? ResizableFlag::kResizable
                                : ResizableFlag::kNotResizable;
  result->Setup(SharedFlag::kShared, resizable, std::move(backing_store));
2798
  return result;
2799 2800
}

2801 2802
Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
                                                      bool done) {
2803 2804
  Handle<Map> map(isolate()->native_context()->iterator_result_map(),
                  isolate());
2805 2806 2807 2808 2809 2810
  Handle<JSIteratorResult> js_iter_result = Handle<JSIteratorResult>::cast(
      NewJSObjectFromMap(map, AllocationType::kYoung));
  DisallowGarbageCollection no_gc;
  JSIteratorResult raw = *js_iter_result;
  raw.set_value(*value, SKIP_WRITE_BARRIER);
  raw.set_done(*ToBoolean(done), SKIP_WRITE_BARRIER);
2811 2812
  return js_iter_result;
}
2813

2814
Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator(
2815
    Handle<JSReceiver> sync_iterator, Handle<Object> next) {
2816 2817
  Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map(),
                  isolate());
2818
  Handle<JSAsyncFromSyncIterator> iterator =
2819 2820 2821
      Handle<JSAsyncFromSyncIterator>::cast(
          NewJSObjectFromMap(map, AllocationType::kYoung));
  DisallowGarbageCollection no_gc;
2822
  JSAsyncFromSyncIterator raw = *iterator;
2823 2824
  raw.set_sync_iterator(*sync_iterator, SKIP_WRITE_BARRIER);
  raw.set_next(*next, SKIP_WRITE_BARRIER);
2825 2826 2827
  return iterator;
}

2828
Handle<JSMap> Factory::NewJSMap() {
2829
  Handle<Map> map(isolate()->native_context()->js_map_map(), isolate());
2830
  Handle<JSMap> js_map = Handle<JSMap>::cast(NewJSObjectFromMap(map));
2831
  JSMap::Initialize(js_map, isolate());
2832 2833 2834 2835
  return js_map;
}

Handle<JSSet> Factory::NewJSSet() {
2836
  Handle<Map> map(isolate()->native_context()->js_set_map(), isolate());
2837
  Handle<JSSet> js_set = Handle<JSSet>::cast(NewJSObjectFromMap(map));
2838
  JSSet::Initialize(js_set, isolate());
2839 2840 2841
  return js_set;
}

2842 2843 2844
void Factory::TypeAndSizeForElementsKind(ElementsKind kind,
                                         ExternalArrayType* array_type,
                                         size_t* element_size) {
2845
  switch (kind) {
2846 2847 2848 2849
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
  case TYPE##_ELEMENTS:                           \
    *array_type = kExternal##Type##Array;         \
    *element_size = sizeof(ctype);                \
2850
    break;
2851
    TYPED_ARRAYS(TYPED_ARRAY_CASE)
2852
    RAB_GSAB_TYPED_ARRAYS_WITH_TYPED_ARRAY_TYPE(TYPED_ARRAY_CASE)
2853 2854
#undef TYPED_ARRAY_CASE

2855 2856 2857 2858 2859
    default:
      UNREACHABLE();
  }
}

2860
namespace {
2861

2862 2863
void ForFixedTypedArray(ExternalArrayType array_type, size_t* element_size,
                        ElementsKind* element_kind) {
2864
  switch (array_type) {
2865 2866 2867 2868
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
  case kExternal##Type##Array:                    \
    *element_size = sizeof(ctype);                \
    *element_kind = TYPE##_ELEMENTS;              \
2869
    return;
2870 2871 2872

    TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
2873 2874
  }
  UNREACHABLE();
2875 2876
}

2877
}  // namespace
2878

2879 2880
Handle<JSArrayBufferView> Factory::NewJSArrayBufferView(
    Handle<Map> map, Handle<FixedArrayBase> elements,
2881
    Handle<JSArrayBuffer> buffer, size_t byte_offset, size_t byte_length) {
2882 2883 2884
  CHECK_LE(byte_length, buffer->byte_length());
  CHECK_LE(byte_offset, buffer->byte_length());
  CHECK_LE(byte_offset + byte_length, buffer->byte_length());
2885 2886
  Handle<JSArrayBufferView> array_buffer_view = Handle<JSArrayBufferView>::cast(
      NewJSObjectFromMap(map, AllocationType::kYoung));
2887 2888
  DisallowGarbageCollection no_gc;
  JSArrayBufferView raw = *array_buffer_view;
2889 2890
  raw.set_elements(*elements, SKIP_WRITE_BARRIER);
  raw.set_buffer(*buffer, SKIP_WRITE_BARRIER);
2891 2892
  raw.set_byte_offset(byte_offset);
  raw.set_byte_length(byte_length);
2893
  raw.set_bit_field(0);
2894 2895
  ZeroEmbedderFields(raw);
  DCHECK_EQ(raw.GetEmbedderFieldCount(),
2896 2897
            v8::ArrayBufferView::kEmbedderFieldCount);
  return array_buffer_view;
2898 2899
}

2900 2901
Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type,
                                              Handle<JSArrayBuffer> buffer,
2902 2903
                                              size_t byte_offset,
                                              size_t length) {
2904 2905 2906
  size_t element_size;
  ElementsKind elements_kind;
  ForFixedTypedArray(type, &element_size, &elements_kind);
2907
  size_t byte_length = length * element_size;
2908

2909 2910 2911
  CHECK_LE(length, JSTypedArray::kMaxLength);
  CHECK_EQ(length, byte_length / element_size);
  CHECK_EQ(0, byte_offset % ElementsKindToByteSize(elements_kind));
2912

2913 2914 2915 2916 2917 2918 2919 2920
  Handle<Map> map;
  switch (elements_kind) {
#define TYPED_ARRAY_FUN(Type, type, TYPE, ctype)                              \
  case TYPE##_ELEMENTS:                                                       \
    map =                                                                     \
        handle(isolate()->native_context()->type##_array_fun().initial_map(), \
               isolate());                                                    \
    break;
2921

2922 2923
    TYPED_ARRAYS(TYPED_ARRAY_FUN)
#undef TYPED_ARRAY_FUN
2924

2925 2926 2927
    default:
      UNREACHABLE();
  }
2928 2929 2930
  Handle<JSTypedArray> typed_array =
      Handle<JSTypedArray>::cast(NewJSArrayBufferView(
          map, empty_byte_array(), buffer, byte_offset, byte_length));
2931
  JSTypedArray raw = *typed_array;
2932
  DisallowGarbageCollection no_gc;
2933 2934
  raw.set_length(length);
  raw.SetOffHeapDataPtr(isolate(), buffer->backing_store(), byte_offset);
2935 2936
  raw.set_is_length_tracking(false);
  raw.set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable());
2937
  return typed_array;
2938 2939
}

2940 2941
Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer,
                                          size_t byte_offset,
2942
                                          size_t byte_length) {
2943
  Handle<Map> map(isolate()->native_context()->data_view_fun().initial_map(),
2944
                  isolate());
2945
  Handle<JSDataView> obj = Handle<JSDataView>::cast(NewJSArrayBufferView(
2946
      map, empty_fixed_array(), buffer, byte_offset, byte_length));
2947 2948
  obj->set_data_pointer(
      isolate(), static_cast<uint8_t*>(buffer->backing_store()) + byte_offset);
2949 2950 2951
  // TODO(v8:11111): Support creating length tracking DataViews via the API.
  obj->set_is_length_tracking(false);
  obj->set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable());
2952
  return obj;
2953 2954
}

2955 2956
MaybeHandle<JSBoundFunction> Factory::NewJSBoundFunction(
    Handle<JSReceiver> target_function, Handle<Object> bound_this,
2957
    base::Vector<Handle<Object>> bound_args) {
2958 2959 2960 2961 2962 2963 2964 2965 2966
  DCHECK(target_function->IsCallable());
  STATIC_ASSERT(Code::kMaxArguments <= FixedArray::kMaxLength);
  if (bound_args.length() >= Code::kMaxArguments) {
    THROW_NEW_ERROR(isolate(),
                    NewRangeError(MessageTemplate::kTooManyArguments),
                    JSBoundFunction);
  }

  // Determine the prototype of the {target_function}.
2967
  Handle<HeapObject> prototype;
2968 2969 2970
  ASSIGN_RETURN_ON_EXCEPTION(
      isolate(), prototype,
      JSReceiver::GetPrototype(isolate(), target_function), JSBoundFunction);
2971

2972 2973
  SaveAndSwitchContext save(
      isolate(), *target_function->GetCreationContext().ToHandleChecked());
2974

2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986
  // Create the [[BoundArguments]] for the result.
  Handle<FixedArray> bound_arguments;
  if (bound_args.length() == 0) {
    bound_arguments = empty_fixed_array();
  } else {
    bound_arguments = NewFixedArray(bound_args.length());
    for (int i = 0; i < bound_args.length(); ++i) {
      bound_arguments->set(i, *bound_args[i]);
    }
  }

  // Setup the map for the JSBoundFunction instance.
2987 2988 2989
  Handle<Map> map = target_function->IsConstructor()
                        ? isolate()->bound_function_with_constructor_map()
                        : isolate()->bound_function_without_constructor_map();
2990
  if (map->prototype() != *prototype) {
2991
    map = Map::TransitionToPrototype(isolate(), map, prototype);
2992 2993 2994 2995
  }
  DCHECK_EQ(target_function->IsConstructor(), map->is_constructor());

  // Setup the JSBoundFunction instance.
2996 2997
  Handle<JSBoundFunction> result = Handle<JSBoundFunction>::cast(
      NewJSObjectFromMap(map, AllocationType::kYoung));
2998 2999
  DisallowGarbageCollection no_gc;
  JSBoundFunction raw = *result;
3000 3001 3002
  raw.set_bound_target_function(*target_function, SKIP_WRITE_BARRIER);
  raw.set_bound_this(*bound_this, SKIP_WRITE_BARRIER);
  raw.set_bound_arguments(*bound_arguments, SKIP_WRITE_BARRIER);
3003 3004 3005
  return result;
}

3006
// ES6 section 9.5.15 ProxyCreate (target, handler)
3007
Handle<JSProxy> Factory::NewJSProxy(Handle<JSReceiver> target,
3008
                                    Handle<JSReceiver> handler) {
3009
  // Allocate the proxy object.
3010 3011 3012 3013 3014 3015 3016 3017 3018 3019
  Handle<Map> map;
  if (target->IsCallable()) {
    if (target->IsConstructor()) {
      map = Handle<Map>(isolate()->proxy_constructor_map());
    } else {
      map = Handle<Map>(isolate()->proxy_callable_map());
    }
  } else {
    map = Handle<Map>(isolate()->proxy_map());
  }
3020
  DCHECK(map->prototype().IsNull(isolate()));
3021
  JSProxy result = JSProxy::cast(New(map, AllocationType::kYoung));
3022
  DisallowGarbageCollection no_gc;
3023
  result.initialize_properties(isolate());
3024 3025
  result.set_target(*target, SKIP_WRITE_BARRIER);
  result.set_handler(*handler, SKIP_WRITE_BARRIER);
3026
  return handle(result, isolate());
3027 3028
}

3029
Handle<JSGlobalProxy> Factory::NewUninitializedJSGlobalProxy(int size) {
3030 3031
  // Create an empty shell of a JSGlobalProxy that needs to be reinitialized
  // via ReinitializeJSGlobalProxy later.
3032
  Handle<Map> map = NewMap(JS_GLOBAL_PROXY_TYPE, size);
3033
  // Maintain invariant expected from any JSGlobalProxy.
3034 3035 3036 3037 3038 3039 3040
  {
    DisallowGarbageCollection no_gc;
    Map raw = *map;
    raw.set_is_access_check_needed(true);
    raw.set_may_have_interesting_symbols(true);
    LOG(isolate(), MapDetails(raw));
  }
3041
  Handle<JSGlobalProxy> proxy = Handle<JSGlobalProxy>::cast(
3042
      NewJSObjectFromMap(map, AllocationType::kOld));
3043 3044 3045 3046
  // Create identity hash early in case there is any JS collection containing
  // a global proxy key and needs to be rehashed after deserialization.
  proxy->GetOrCreateIdentityHash(isolate());
  return proxy;
3047 3048
}

3049 3050
void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,
                                        Handle<JSFunction> constructor) {
3051
  DCHECK(constructor->has_initial_map());
3052
  Handle<Map> map(constructor->initial_map(), isolate());
3053
  Handle<Map> old_map(object->map(), isolate());
3054

3055
  // The proxy's hash should be retained across reinitialization.
3056 3057
  Handle<Object> raw_properties_or_hash(object->raw_properties_or_hash(),
                                        isolate());
3058

3059
  if (old_map->is_prototype_map()) {
3060
    map = Map::Copy(isolate(), map, "CopyAsPrototypeForJSGlobalProxy");
3061 3062
    map->set_is_prototype_map(true);
  }
3063
  JSObject::NotifyMapChange(old_map, map, isolate());
3064
  old_map->NotifyLeafMapLayoutChange(isolate());
3065

3066 3067
  // Check that the already allocated object has the same size and type as
  // objects allocated using the constructor.
3068 3069
  DCHECK(map->instance_size() == old_map->instance_size());
  DCHECK(map->instance_type() == old_map->instance_type());
3070 3071 3072

  // In order to keep heap in consistent state there must be no allocations
  // before object re-initialization is finished.
3073
  DisallowGarbageCollection no_gc;
3074 3075

  // Reset the map for the object.
3076
  JSGlobalProxy raw = *object;
3077
  raw.set_map(*map, kReleaseStore);
3078 3079

  // Reinitialize the object from the constructor map.
3080
  InitializeJSObjectFromMap(raw, *raw_properties_or_hash, *map);
3081 3082
}

3083
Handle<JSMessageObject> Factory::NewJSMessageObject(
3084
    MessageTemplate message, Handle<Object> argument, int start_position,
3085 3086
    int end_position, Handle<SharedFunctionInfo> shared_info,
    int bytecode_offset, Handle<Script> script, Handle<Object> stack_frames) {
3087
  Handle<Map> map = message_object_map();
3088 3089 3090 3091 3092 3093 3094 3095
  JSMessageObject message_obj =
      JSMessageObject::cast(New(map, AllocationType::kYoung));
  DisallowGarbageCollection no_gc;
  message_obj.set_raw_properties_or_hash(*empty_fixed_array(),
                                         SKIP_WRITE_BARRIER);
  message_obj.initialize_elements();
  message_obj.set_elements(*empty_fixed_array(), SKIP_WRITE_BARRIER);
  message_obj.set_type(message);
3096
  message_obj.set_argument(*argument, SKIP_WRITE_BARRIER);
3097 3098
  message_obj.set_start_position(start_position);
  message_obj.set_end_position(end_position);
3099
  message_obj.set_script(*script, SKIP_WRITE_BARRIER);
3100 3101 3102 3103
  if (start_position >= 0) {
    // If there's a start_position, then there's no need to store the
    // SharedFunctionInfo as it will never be necessary to regenerate the
    // position.
3104 3105
    message_obj.set_shared_info(*undefined_value(), SKIP_WRITE_BARRIER);
    message_obj.set_bytecode_offset(Smi::FromInt(0));
3106
  } else {
3107
    message_obj.set_bytecode_offset(Smi::FromInt(bytecode_offset));
3108
    if (shared_info.is_null()) {
3109
      message_obj.set_shared_info(*undefined_value(), SKIP_WRITE_BARRIER);
3110 3111
      DCHECK_EQ(bytecode_offset, -1);
    } else {
3112
      message_obj.set_shared_info(*shared_info, SKIP_WRITE_BARRIER);
3113
      DCHECK_GE(bytecode_offset, kFunctionEntryBytecodeOffset);
3114 3115 3116
    }
  }

3117
  message_obj.set_stack_frames(*stack_frames, SKIP_WRITE_BARRIER);
3118 3119
  message_obj.set_error_level(v8::Isolate::kMessageError);
  return handle(message_obj, isolate());
3120 3121
}

3122 3123
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForApiFunction(
    MaybeHandle<String> maybe_name,
3124
    Handle<FunctionTemplateInfo> function_template_info, FunctionKind kind) {
3125
  Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(
3126
      maybe_name, function_template_info, Builtin::kNoBuiltinId, kind);
3127
  return shared;
3128 3129 3130
}

Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForBuiltin(
3131 3132 3133
    MaybeHandle<String> maybe_name, Builtin builtin, FunctionKind kind) {
  Handle<SharedFunctionInfo> shared =
      NewSharedFunctionInfo(maybe_name, MaybeHandle<Code>(), builtin, kind);
3134
  return shared;
3135 3136
}

3137 3138 3139 3140 3141 3142
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWebSnapshot() {
  return NewSharedFunctionInfo(empty_string(), MaybeHandle<Code>(),
                               Builtin::kNoBuiltinId,
                               FunctionKind::kNormalFunction);
}

3143
namespace {
3144
V8_INLINE int NumberToStringCacheHash(Handle<FixedArray> cache, Smi number) {
3145
  int mask = (cache->length() >> 1) - 1;
3146
  return number.value() & mask;
3147
}
3148 3149

V8_INLINE int NumberToStringCacheHash(Handle<FixedArray> cache, double number) {
3150 3151 3152 3153 3154
  int mask = (cache->length() >> 1) - 1;
  int64_t bits = bit_cast<int64_t>(number);
  return (static_cast<int>(bits) ^ static_cast<int>(bits >> 32)) & mask;
}

3155 3156
V8_INLINE Handle<String> CharToString(Factory* factory, const char* string,
                                      NumberCacheMode mode) {
3157 3158
  // We tenure the allocated string since it is referenced from the
  // number-string cache which lives in the old space.
3159 3160 3161 3162 3163
  AllocationType type = mode == NumberCacheMode::kIgnore
                            ? AllocationType::kYoung
                            : AllocationType::kOld;
  return factory->NewStringFromAsciiChecked(string, type);
}
3164

3165 3166 3167 3168
}  // namespace

void Factory::NumberToStringCacheSet(Handle<Object> number, int hash,
                                     Handle<String> js_string) {
3169 3170
  if (!number_string_cache()->get(hash * 2).IsUndefined(isolate()) &&
      !FLAG_optimize_for_size) {
3171 3172
    int full_size = isolate()->heap()->MaxNumberToStringCacheSize();
    if (number_string_cache()->length() != full_size) {
3173 3174
      Handle<FixedArray> new_cache =
          NewFixedArray(full_size, AllocationType::kOld);
3175
      isolate()->heap()->set_number_string_cache(*new_cache);
3176
      return;
3177
    }
3178
  }
3179 3180 3181 3182
  DisallowGarbageCollection no_gc;
  FixedArray cache = *number_string_cache();
  cache.set(hash * 2, *number);
  cache.set(hash * 2 + 1, *js_string);
3183 3184
}

3185
Handle<Object> Factory::NumberToStringCacheGet(Object number, int hash) {
3186
  DisallowGarbageCollection no_gc;
3187 3188
  FixedArray cache = *number_string_cache();
  Object key = cache.get(hash * 2);
3189 3190
  if (key == number || (key.IsHeapNumber() && number.IsHeapNumber() &&
                        key.Number() == number.Number())) {
3191
    return Handle<String>(String::cast(cache.get(hash * 2 + 1)), isolate());
3192 3193 3194 3195
  }
  return undefined_value();
}

3196
Handle<String> Factory::NumberToString(Handle<Object> number,
3197 3198
                                       NumberCacheMode mode) {
  if (number->IsSmi()) return SmiToString(Smi::cast(*number), mode);
3199 3200 3201 3202 3203

  double double_value = Handle<HeapNumber>::cast(number)->value();
  // Try to canonicalize doubles.
  int smi_value;
  if (DoubleToSmiInteger(double_value, &smi_value)) {
3204
    return SmiToString(Smi::FromInt(smi_value), mode);
3205
  }
3206
  return HeapNumberToString(Handle<HeapNumber>::cast(number), double_value,
3207
                            mode);
3208 3209 3210 3211
}

// Must be large enough to fit any double, int, or size_t.
static const int kNumberToStringBufferSize = 32;
3212

3213
Handle<String> Factory::HeapNumberToString(Handle<HeapNumber> number,
3214
                                           double value, NumberCacheMode mode) {
3215
  int hash = 0;
3216
  if (mode != NumberCacheMode::kIgnore) {
3217
    hash = NumberToStringCacheHash(number_string_cache(), value);
3218 3219
  }
  if (mode == NumberCacheMode::kBoth) {
3220
    Handle<Object> cached = NumberToStringCacheGet(*number, hash);
3221
    if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3222
  }
3223

3224 3225 3226
  Handle<String> result;
  if (value == 0) {
    result = zero_string();
3227 3228
  } else if (std::isnan(value)) {
    result = NaN_string();
3229 3230 3231 3232 3233 3234
  } else {
    char arr[kNumberToStringBufferSize];
    base::Vector<char> buffer(arr, arraysize(arr));
    const char* string = DoubleToCString(value, buffer);
    result = CharToString(this, string, mode);
  }
3235 3236 3237 3238
  if (mode != NumberCacheMode::kIgnore) {
    NumberToStringCacheSet(number, hash, result);
  }
  return result;
3239 3240
}

3241 3242 3243
inline Handle<String> Factory::SmiToString(Smi number, NumberCacheMode mode) {
  int hash = NumberToStringCacheHash(number_string_cache(), number);
  if (mode == NumberCacheMode::kBoth) {
3244 3245
    Handle<Object> cached = NumberToStringCacheGet(number, hash);
    if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3246
  }
3247

3248 3249 3250 3251 3252 3253 3254 3255 3256
  Handle<String> result;
  if (number == Smi::zero()) {
    result = zero_string();
  } else {
    char arr[kNumberToStringBufferSize];
    base::Vector<char> buffer(arr, arraysize(arr));
    const char* string = IntToCString(number.value(), buffer);
    result = CharToString(this, string, mode);
  }
3257 3258 3259
  if (mode != NumberCacheMode::kIgnore) {
    NumberToStringCacheSet(handle(number, isolate()), hash, result);
  }
3260

3261 3262 3263
  // Compute the hash here (rather than letting the caller take care of it) so
  // that the "cache hit" case above doesn't have to bother with it.
  STATIC_ASSERT(Smi::kMaxValue <= std::numeric_limits<uint32_t>::max());
3264 3265 3266 3267 3268 3269 3270 3271 3272
  {
    DisallowGarbageCollection no_gc;
    String raw = *result;
    if (raw.raw_hash_field() == String::kEmptyHashField &&
        number.value() >= 0) {
      uint32_t raw_hash_field = StringHasher::MakeArrayIndexHash(
          static_cast<uint32_t>(number.value()), raw.length());
      raw.set_raw_hash_field(raw_hash_field);
    }
3273 3274
  }
  return result;
3275 3276
}

3277 3278
Handle<String> Factory::SizeToString(size_t value, bool check_cache) {
  Handle<String> result;
3279 3280
  NumberCacheMode cache_mode =
      check_cache ? NumberCacheMode::kBoth : NumberCacheMode::kIgnore;
3281 3282
  if (value <= Smi::kMaxValue) {
    int32_t int32v = static_cast<int32_t>(static_cast<uint32_t>(value));
3283
    // SmiToString sets the hash when needed, we can return immediately.
3284
    return SmiToString(Smi::FromInt(int32v), cache_mode);
3285 3286 3287
  } else if (value <= kMaxSafeInteger) {
    // TODO(jkummerow): Refactor the cache to not require Objects as keys.
    double double_value = static_cast<double>(value);
3288
    result = HeapNumberToString(NewHeapNumber(double_value), value, cache_mode);
3289 3290
  } else {
    char arr[kNumberToStringBufferSize];
3291
    base::Vector<char> buffer(arr, arraysize(arr));
3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303
    // Build the string backwards from the least significant digit.
    int i = buffer.length();
    size_t value_copy = value;
    buffer[--i] = '\0';
    do {
      buffer[--i] = '0' + (value_copy % 10);
      value_copy /= 10;
    } while (value_copy > 0);
    char* string = buffer.begin() + i;
    // No way to cache this; we'd need an {Object} to use as key.
    result = NewStringFromAsciiChecked(string);
  }
3304 3305 3306 3307 3308 3309 3310 3311 3312
  {
    DisallowGarbageCollection no_gc;
    String raw = *result;
    if (value <= JSArray::kMaxArrayIndex &&
        raw.raw_hash_field() == String::kEmptyHashField) {
      uint32_t raw_hash_field = StringHasher::MakeArrayIndexHash(
          static_cast<uint32_t>(value), raw.length());
      raw.set_raw_hash_field(raw_hash_field);
    }
3313 3314 3315 3316
  }
  return result;
}

3317
Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
3318
  DCHECK(!shared->HasDebugInfo());
3319

3320 3321
  auto debug_info =
      NewStructInternal<DebugInfo>(DEBUG_INFO_TYPE, AllocationType::kOld);
3322
  DisallowGarbageCollection no_gc;
3323
  SharedFunctionInfo raw_shared = *shared;
3324
  debug_info.set_flags(DebugInfo::kNone, kRelaxedStore);
3325 3326 3327 3328 3329 3330 3331 3332 3333 3334
  debug_info.set_shared(raw_shared);
  debug_info.set_debugger_hints(0);
  DCHECK_EQ(DebugInfo::kNoDebuggingId, debug_info.debugging_id());
  debug_info.set_script(raw_shared.script_or_debug_info(kAcquireLoad));
  HeapObject undefined = *undefined_value();
  debug_info.set_original_bytecode_array(undefined, kReleaseStore,
                                         SKIP_WRITE_BARRIER);
  debug_info.set_debug_bytecode_array(undefined, kReleaseStore,
                                      SKIP_WRITE_BARRIER);
  debug_info.set_break_points(*empty_fixed_array(), SKIP_WRITE_BARRIER);
3335

3336 3337
  // Link debug info to function.
  raw_shared.SetDebugInfo(debug_info);
3338

3339
  return handle(debug_info, isolate());
3340 3341
}

3342
Handle<BreakPointInfo> Factory::NewBreakPointInfo(int source_position) {
3343 3344
  auto new_break_point_info = NewStructInternal<BreakPointInfo>(
      BREAK_POINT_INFO_TYPE, AllocationType::kOld);
3345
  DisallowGarbageCollection no_gc;
3346 3347 3348
  new_break_point_info.set_source_position(source_position);
  new_break_point_info.set_break_points(*undefined_value(), SKIP_WRITE_BARRIER);
  return handle(new_break_point_info, isolate());
3349
}
3350

3351
Handle<BreakPoint> Factory::NewBreakPoint(int id, Handle<String> condition) {
3352 3353
  auto new_break_point =
      NewStructInternal<BreakPoint>(BREAK_POINT_TYPE, AllocationType::kOld);
3354
  DisallowGarbageCollection no_gc;
3355 3356 3357
  new_break_point.set_id(id);
  new_break_point.set_condition(*condition);
  return handle(new_break_point, isolate());
3358 3359
}

3360
Handle<CallSiteInfo> Factory::NewCallSiteInfo(
3361
    Handle<Object> receiver_or_instance, Handle<Object> function,
3362 3363
    Handle<HeapObject> code_object, int code_offset_or_source_position,
    int flags, Handle<FixedArray> parameters) {
3364 3365
  auto info = NewStructInternal<CallSiteInfo>(CALL_SITE_INFO_TYPE,
                                              AllocationType::kYoung);
3366
  DisallowGarbageCollection no_gc;
3367 3368
  info.set_receiver_or_instance(*receiver_or_instance, SKIP_WRITE_BARRIER);
  info.set_function(*function, SKIP_WRITE_BARRIER);
3369 3370
  info.set_code_object(*code_object, SKIP_WRITE_BARRIER);
  info.set_code_offset_or_source_position(code_offset_or_source_position);
3371
  info.set_flags(flags);
3372
  info.set_parameters(*parameters, SKIP_WRITE_BARRIER);
3373
  return handle(info, isolate());
3374 3375
}

3376
Handle<StackFrameInfo> Factory::NewStackFrameInfo(
3377 3378 3379
    Handle<HeapObject> shared_or_script, int bytecode_offset_or_source_position,
    Handle<String> function_name, bool is_constructor) {
  DCHECK_GE(bytecode_offset_or_source_position, 0);
3380 3381 3382 3383
  StackFrameInfo info = NewStructInternal<StackFrameInfo>(
      STACK_FRAME_INFO_TYPE, AllocationType::kYoung);
  DisallowGarbageCollection no_gc;
  info.set_flags(0);
3384 3385 3386
  info.set_shared_or_script(*shared_or_script, SKIP_WRITE_BARRIER);
  info.set_bytecode_offset_or_source_position(
      bytecode_offset_or_source_position);
3387 3388 3389 3390 3391
  info.set_function_name(*function_name, SKIP_WRITE_BARRIER);
  info.set_is_constructor(is_constructor);
  return handle(info, isolate());
}

3392
Handle<JSObject> Factory::NewArgumentsObject(Handle<JSFunction> callee,
3393
                                             int length) {
3394 3395
  bool strict_mode_callee = is_strict(callee->shared().language_mode()) ||
                            !callee->shared().has_simple_parameters();
3396 3397 3398 3399
  Handle<Map> map = strict_mode_callee ? isolate()->strict_arguments_map()
                                       : isolate()->sloppy_arguments_map();
  AllocationSiteUsageContext context(isolate(), Handle<AllocationSite>(),
                                     false);
3400
  DCHECK(!isolate()->has_pending_exception());
3401 3402
  Handle<JSObject> result = NewJSObjectFromMap(map);
  Handle<Smi> value(Smi::FromInt(length), isolate());
3403
  Object::SetProperty(isolate(), result, length_string(), value,
3404 3405
                      StoreOrigin::kMaybeKeyed,
                      Just(ShouldThrow::kThrowOnError))
3406
      .Assert();
3407
  if (!strict_mode_callee) {
3408
    Object::SetProperty(isolate(), result, callee_string(), callee,
3409 3410
                        StoreOrigin::kMaybeKeyed,
                        Just(ShouldThrow::kThrowOnError))
3411
        .Assert();
3412 3413
  }
  return result;
3414 3415
}

3416
Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<NativeContext> context,
3417 3418
                                               int number_of_properties) {
  // Use initial slow object proto map for too many properties.
3419
  if (number_of_properties >= JSObject::kMapCacheSize) {
3420
    return handle(context->slow_object_with_object_prototype_map(), isolate());
3421
  }
3422

3423 3424 3425 3426 3427 3428 3429 3430 3431 3432
  Handle<WeakFixedArray> cache(WeakFixedArray::cast(context->map_cache()),
                               isolate());

  // Check to see whether there is a matching element in the cache.
  MaybeObject result = cache->Get(number_of_properties);
  HeapObject heap_object;
  if (result->GetHeapObjectIfWeak(&heap_object)) {
    Map map = Map::cast(heap_object);
    DCHECK(!map.is_dictionary_map());
    return handle(map, isolate());
3433
  }
3434

3435 3436
  // Create a new map and add it to the cache.
  Handle<Map> map = Map::Create(isolate(), number_of_properties);
3437
  DCHECK(!map->is_dictionary_map());
3438
  cache->Set(number_of_properties, HeapObjectReference::Weak(*map));
3439
  return map;
3440 3441
}

3442 3443 3444 3445 3446 3447 3448 3449 3450 3451
Handle<MegaDomHandler> Factory::NewMegaDomHandler(MaybeObjectHandle accessor,
                                                  MaybeObjectHandle context) {
  Handle<Map> map = read_only_roots().mega_dom_handler_map_handle();
  MegaDomHandler handler = MegaDomHandler::cast(New(map, AllocationType::kOld));
  DisallowGarbageCollection no_gc;
  handler.set_accessor(*accessor);
  handler.set_context(*context);
  return handle(handler, isolate());
}

3452 3453
Handle<LoadHandler> Factory::NewLoadHandler(int data_count,
                                            AllocationType allocation) {
3454
  Handle<Map> map;
3455 3456 3457 3458 3459 3460 3461 3462 3463 3464 3465 3466
  switch (data_count) {
    case 1:
      map = load_handler1_map();
      break;
    case 2:
      map = load_handler2_map();
      break;
    case 3:
      map = load_handler3_map();
      break;
    default:
      UNREACHABLE();
3467
  }
3468
  return handle(LoadHandler::cast(New(map, allocation)), isolate());
3469 3470 3471 3472
}

Handle<StoreHandler> Factory::NewStoreHandler(int data_count) {
  Handle<Map> map;
3473 3474 3475 3476 3477 3478 3479 3480 3481 3482
  switch (data_count) {
    case 0:
      map = store_handler0_map();
      break;
    case 1:
      map = store_handler1_map();
      break;
    case 2:
      map = store_handler2_map();
      break;
3483 3484 3485
    case 3:
      map = store_handler3_map();
      break;
3486 3487
    default:
      UNREACHABLE();
3488
  }
3489
  return handle(StoreHandler::cast(New(map, AllocationType::kOld)), isolate());
3490
}
3491

3492 3493
void Factory::SetRegExpAtomData(Handle<JSRegExp> regexp, Handle<String> source,
                                JSRegExp::Flags flags, Handle<Object> data) {
3494 3495
  FixedArray store =
      *NewFixedArray(JSRegExp::kAtomDataSize, AllocationType::kYoung);
3496 3497
  DisallowGarbageCollection no_gc;
  store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::ATOM));
3498
  store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3499
  store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3500
  store.set(JSRegExp::kAtomPatternIndex, *data, SKIP_WRITE_BARRIER);
3501
  regexp->set_data(store);
3502 3503
}

3504
void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp,
3505
                                    Handle<String> source,
3506 3507 3508
                                    JSRegExp::Flags flags, int capture_count,
                                    uint32_t backtrack_limit) {
  DCHECK(Smi::IsValid(backtrack_limit));
3509 3510
  FixedArray store =
      *NewFixedArray(JSRegExp::kIrregexpDataSize, AllocationType::kYoung);
3511
  DisallowGarbageCollection no_gc;
3512
  Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);
3513 3514 3515
  Smi ticks_until_tier_up = FLAG_regexp_tier_up
                                ? Smi::FromInt(FLAG_regexp_tier_up_ticks)
                                : uninitialized;
3516
  store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::IRREGEXP));
3517
  store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3518 3519 3520 3521 3522 3523 3524 3525 3526 3527 3528
  store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
  store.set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpLatin1BytecodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpUC16BytecodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::zero());
  store.set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
  store.set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
  store.set(JSRegExp::kIrregexpTicksUntilTierUpIndex, ticks_until_tier_up);
  store.set(JSRegExp::kIrregexpBacktrackLimit, Smi::FromInt(backtrack_limit));
  regexp->set_data(store);
3529 3530
}

3531 3532 3533 3534
void Factory::SetRegExpExperimentalData(Handle<JSRegExp> regexp,
                                        Handle<String> source,
                                        JSRegExp::Flags flags,
                                        int capture_count) {
3535 3536
  FixedArray store =
      *NewFixedArray(JSRegExp::kExperimentalDataSize, AllocationType::kYoung);
3537
  DisallowGarbageCollection no_gc;
3538 3539
  Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);

3540
  store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::EXPERIMENTAL));
3541
  store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3542 3543 3544 3545 3546 3547 3548 3549 3550 3551 3552
  store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
  store.set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpLatin1BytecodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpUC16BytecodeIndex, uninitialized);
  store.set(JSRegExp::kIrregexpMaxRegisterCountIndex, uninitialized);
  store.set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
  store.set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
  store.set(JSRegExp::kIrregexpTicksUntilTierUpIndex, uninitialized);
  store.set(JSRegExp::kIrregexpBacktrackLimit, uninitialized);
  regexp->set_data(store);
3553 3554
}

3555 3556 3557 3558 3559 3560
Handle<RegExpMatchInfo> Factory::NewRegExpMatchInfo() {
  // Initially, the last match info consists of all fixed fields plus space for
  // the match itself (i.e., 2 capture indices).
  static const int kInitialSize = RegExpMatchInfo::kFirstCaptureIndex +
                                  RegExpMatchInfo::kInitialCaptureIndices;

3561 3562
  Handle<FixedArray> elems =
      NewFixedArray(kInitialSize, AllocationType::kYoung);
3563
  Handle<RegExpMatchInfo> result = Handle<RegExpMatchInfo>::cast(elems);
3564 3565 3566 3567
  {
    DisallowGarbageCollection no_gc;
    RegExpMatchInfo raw = *result;
    raw.SetNumberOfCaptureRegisters(RegExpMatchInfo::kInitialCaptureIndices);
3568 3569
    raw.SetLastSubject(*empty_string(), SKIP_WRITE_BARRIER);
    raw.SetLastInput(*undefined_value(), SKIP_WRITE_BARRIER);
3570 3571 3572
    raw.SetCapture(0, 0);
    raw.SetCapture(1, 0);
  }
3573 3574
  return result;
}
3575

3576
Handle<Object> Factory::GlobalConstantFor(Handle<Name> name) {
3577 3578 3579 3580 3581
  if (Name::Equals(isolate(), name, undefined_string())) {
    return undefined_value();
  }
  if (Name::Equals(isolate(), name, NaN_string())) return nan_value();
  if (Name::Equals(isolate(), name, Infinity_string())) return infinity_value();
3582 3583 3584
  return Handle<Object>::null();
}

3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595
Handle<String> Factory::ToPrimitiveHintString(ToPrimitiveHint hint) {
  switch (hint) {
    case ToPrimitiveHint::kDefault:
      return default_string();
    case ToPrimitiveHint::kNumber:
      return number_string();
    case ToPrimitiveHint::kString:
      return string_string();
  }
  UNREACHABLE();
}
3596

3597 3598
Handle<Map> Factory::CreateSloppyFunctionMap(
    FunctionMode function_mode, MaybeHandle<JSFunction> maybe_empty_function) {
3599 3600 3601 3602 3603 3604 3605 3606
  bool has_prototype = IsFunctionModeWithPrototype(function_mode);
  int header_size = has_prototype ? JSFunction::kSizeWithPrototype
                                  : JSFunction::kSizeWithoutPrototype;
  int descriptors_count = has_prototype ? 5 : 4;
  int inobject_properties_count = 0;
  if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count;

  Handle<Map> map = NewMap(
3607
      JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
3608
      TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
3609 3610 3611 3612 3613 3614 3615
  {
    DisallowGarbageCollection no_gc;
    Map raw_map = *map;
    raw_map.set_has_prototype_slot(has_prototype);
    raw_map.set_is_constructor(has_prototype);
    raw_map.set_is_callable(true);
  }
3616 3617
  Handle<JSFunction> empty_function;
  if (maybe_empty_function.ToHandle(&empty_function)) {
3618
    Map::SetPrototype(isolate(), map, empty_function);
3619
  }
3620

3621 3622 3623
  //
  // Setup descriptors array.
  //
3624
  Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
3625 3626 3627

  PropertyAttributes ro_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3628 3629
  PropertyAttributes rw_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
3630 3631 3632
  PropertyAttributes roc_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);

3633
  int field_index = 0;
3634
  STATIC_ASSERT(JSFunctionOrBoundFunction::kLengthDescriptorIndex == 0);
3635
  {  // Add length accessor.
3636 3637
    Descriptor d = Descriptor::AccessorConstant(
        length_string(), function_length_accessor(), roc_attribs);
3638
    map->AppendDescriptor(isolate(), &d);
3639 3640
  }

3641
  STATIC_ASSERT(JSFunctionOrBoundFunction::kNameDescriptorIndex == 1);
3642 3643 3644
  if (IsFunctionModeWithName(function_mode)) {
    // Add name field.
    Handle<Name> name = isolate()->factory()->name_string();
3645 3646
    Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
                                         roc_attribs, Representation::Tagged());
3647
    map->AppendDescriptor(isolate(), &d);
3648 3649 3650

  } else {
    // Add name accessor.
3651 3652
    Descriptor d = Descriptor::AccessorConstant(
        name_string(), function_name_accessor(), roc_attribs);
3653
    map->AppendDescriptor(isolate(), &d);
3654
  }
3655
  {  // Add arguments accessor.
3656 3657
    Descriptor d = Descriptor::AccessorConstant(
        arguments_string(), function_arguments_accessor(), ro_attribs);
3658
    map->AppendDescriptor(isolate(), &d);
3659
  }
3660
  {  // Add caller accessor.
3661 3662
    Descriptor d = Descriptor::AccessorConstant(
        caller_string(), function_caller_accessor(), ro_attribs);
3663
    map->AppendDescriptor(isolate(), &d);
3664 3665
  }
  if (IsFunctionModeWithPrototype(function_mode)) {
3666 3667 3668 3669
    // Add prototype accessor.
    PropertyAttributes attribs =
        IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
                                                           : ro_attribs;
3670 3671
    Descriptor d = Descriptor::AccessorConstant(
        prototype_string(), function_prototype_accessor(), attribs);
3672
    map->AppendDescriptor(isolate(), &d);
3673
  }
3674
  DCHECK_EQ(inobject_properties_count, field_index);
3675 3676
  DCHECK_EQ(0,
            map->instance_descriptors(isolate()).number_of_slack_descriptors());
3677
  LOG(isolate(), MapDetails(*map));
3678
  return map;
3679 3680 3681 3682
}

Handle<Map> Factory::CreateStrictFunctionMap(
    FunctionMode function_mode, Handle<JSFunction> empty_function) {
3683 3684 3685
  bool has_prototype = IsFunctionModeWithPrototype(function_mode);
  int header_size = has_prototype ? JSFunction::kSizeWithPrototype
                                  : JSFunction::kSizeWithoutPrototype;
3686
  int inobject_properties_count = 0;
3687 3688 3689 3690 3691 3692 3693 3694
  // length and prototype accessors or just length accessor.
  int descriptors_count = IsFunctionModeWithPrototype(function_mode) ? 2 : 1;
  if (IsFunctionModeWithName(function_mode)) {
    ++inobject_properties_count;  // name property.
  } else {
    ++descriptors_count;  // name accessor.
  }
  descriptors_count += inobject_properties_count;
3695

3696
  Handle<Map> map = NewMap(
3697
      JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
3698
      TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
3699 3700 3701 3702 3703 3704 3705
  {
    DisallowGarbageCollection no_gc;
    Map raw_map = *map;
    raw_map.set_has_prototype_slot(has_prototype);
    raw_map.set_is_constructor(has_prototype);
    raw_map.set_is_callable(true);
  }
3706
  Map::SetPrototype(isolate(), map, empty_function);
3707

3708 3709 3710
  //
  // Setup descriptors array.
  //
3711
  Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
3712 3713 3714 3715 3716 3717 3718 3719

  PropertyAttributes rw_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
  PropertyAttributes ro_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
  PropertyAttributes roc_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);

3720
  int field_index = 0;
3721
  STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
3722
  {  // Add length accessor.
3723 3724
    Descriptor d = Descriptor::AccessorConstant(
        length_string(), function_length_accessor(), roc_attribs);
3725
    map->AppendDescriptor(isolate(), &d);
3726 3727 3728
  }

  STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1);
3729 3730 3731
  if (IsFunctionModeWithName(function_mode)) {
    // Add name field.
    Handle<Name> name = isolate()->factory()->name_string();
3732 3733
    Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
                                         roc_attribs, Representation::Tagged());
3734
    map->AppendDescriptor(isolate(), &d);
3735 3736 3737

  } else {
    // Add name accessor.
3738 3739
    Descriptor d = Descriptor::AccessorConstant(
        name_string(), function_name_accessor(), roc_attribs);
3740
    map->AppendDescriptor(isolate(), &d);
3741 3742
  }

3743
  if (IsFunctionModeWithPrototype(function_mode)) {
3744
    // Add prototype accessor.
3745
    PropertyAttributes attribs =
3746
        IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
3747
                                                           : ro_attribs;
3748 3749
    Descriptor d = Descriptor::AccessorConstant(
        prototype_string(), function_prototype_accessor(), attribs);
3750
    map->AppendDescriptor(isolate(), &d);
3751
  }
3752
  DCHECK_EQ(inobject_properties_count, field_index);
3753 3754
  DCHECK_EQ(0,
            map->instance_descriptors(isolate()).number_of_slack_descriptors());
3755
  LOG(isolate(), MapDetails(*map));
3756
  return map;
3757 3758
}

3759
Handle<Map> Factory::CreateClassFunctionMap(Handle<JSFunction> empty_function) {
3760 3761
  Handle<Map> map =
      NewMap(JS_CLASS_CONSTRUCTOR_TYPE, JSFunction::kSizeWithPrototype);
3762 3763 3764 3765 3766 3767 3768 3769
  {
    DisallowGarbageCollection no_gc;
    Map raw_map = *map;
    raw_map.set_has_prototype_slot(true);
    raw_map.set_is_constructor(true);
    raw_map.set_is_prototype_map(true);
    raw_map.set_is_callable(true);
  }
3770
  Map::SetPrototype(isolate(), map, empty_function);
3771

3772 3773 3774
  //
  // Setup descriptors array.
  //
3775
  Map::EnsureDescriptorSlack(isolate(), map, 2);
3776

3777 3778
  PropertyAttributes ro_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3779 3780 3781 3782
  PropertyAttributes roc_attribs =
      static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);

  STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
3783
  {  // Add length accessor.
3784 3785
    Descriptor d = Descriptor::AccessorConstant(
        length_string(), function_length_accessor(), roc_attribs);
3786
    map->AppendDescriptor(isolate(), &d);
3787 3788 3789
  }

  {
3790
    // Add prototype accessor.
3791
    Descriptor d = Descriptor::AccessorConstant(
3792
        prototype_string(), function_prototype_accessor(), ro_attribs);
3793
    map->AppendDescriptor(isolate(), &d);
3794
  }
3795
  LOG(isolate(), MapDetails(*map));
3796
  return map;
3797 3798
}

3799 3800 3801
Handle<JSPromise> Factory::NewJSPromiseWithoutHook() {
  Handle<JSPromise> promise =
      Handle<JSPromise>::cast(NewJSObject(isolate()->promise_function()));
3802 3803
  DisallowGarbageCollection no_gc;
  JSPromise raw = *promise;
3804
  raw.set_reactions_or_result(Smi::zero(), SKIP_WRITE_BARRIER);
3805 3806 3807
  raw.set_flags(0);
  ZeroEmbedderFields(*promise);
  DCHECK_EQ(raw.GetEmbedderFieldCount(), v8::Promise::kEmbedderFieldCount);
3808
  return promise;
3809 3810
}

3811 3812
Handle<JSPromise> Factory::NewJSPromise() {
  Handle<JSPromise> promise = NewJSPromiseWithoutHook();
3813 3814
  isolate()->RunAllPromiseHooks(PromiseHookType::kInit, promise,
                                undefined_value());
3815 3816 3817
  return promise;
}

3818 3819 3820 3821
Handle<CallHandlerInfo> Factory::NewCallHandlerInfo(bool has_no_side_effect) {
  Handle<Map> map = has_no_side_effect
                        ? side_effect_free_call_handler_info_map()
                        : side_effect_call_handler_info_map();
3822
  CallHandlerInfo info = CallHandlerInfo::cast(New(map, AllocationType::kOld));
3823
  DisallowGarbageCollection no_gc;
3824 3825 3826 3827 3828
  Object undefined_value = read_only_roots().undefined_value();
  info.set_callback(undefined_value, SKIP_WRITE_BARRIER);
  info.set_js_callback(undefined_value, SKIP_WRITE_BARRIER);
  info.set_data(undefined_value, SKIP_WRITE_BARRIER);
  return handle(info, isolate());
3829 3830
}

3831 3832 3833 3834 3835 3836 3837 3838
bool Factory::CanAllocateInReadOnlySpace() {
  return isolate()->heap()->CanAllocateInReadOnlySpace();
}

bool Factory::EmptyStringRootIsInitialized() {
  return isolate()->roots_table()[RootIndex::kempty_string] != kNullAddress;
}

3839 3840 3841 3842 3843 3844
AllocationType Factory::AllocationTypeForInPlaceInternalizableString() {
  return isolate()
      ->heap()
      ->allocation_type_for_in_place_internalizable_strings();
}

3845
Handle<JSFunction> Factory::NewFunctionForTesting(Handle<String> name) {
3846
  Handle<SharedFunctionInfo> info =
3847
      NewSharedFunctionInfoForBuiltin(name, Builtin::kIllegal);
3848 3849 3850
  info->set_language_mode(LanguageMode::kSloppy);
  return JSFunctionBuilder{isolate(), info, isolate()->native_context()}
      .Build();
3851 3852 3853 3854 3855 3856 3857 3858 3859 3860 3861
}

Factory::JSFunctionBuilder::JSFunctionBuilder(Isolate* isolate,
                                              Handle<SharedFunctionInfo> sfi,
                                              Handle<Context> context)
    : isolate_(isolate), sfi_(sfi), context_(context) {}

Handle<JSFunction> Factory::JSFunctionBuilder::Build() {
  PrepareMap();
  PrepareFeedbackCell();

3862
  Handle<Code> code = handle(FromCodeT(sfi_->GetCode()), isolate_);
3863 3864
  Handle<JSFunction> result = BuildRaw(code);

3865
  if (code->kind() == CodeKind::BASELINE) {
3866 3867
    IsCompiledScope is_compiled_scope(sfi_->is_compiled_scope(isolate_));
    JSFunction::EnsureFeedbackVector(result, &is_compiled_scope);
3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880
  }

  Compiler::PostInstantiation(result);
  return result;
}

Handle<JSFunction> Factory::JSFunctionBuilder::BuildRaw(Handle<Code> code) {
  Isolate* isolate = isolate_;
  Factory* factory = isolate_->factory();

  Handle<Map> map = maybe_map_.ToHandleChecked();
  Handle<FeedbackCell> feedback_cell = maybe_feedback_cell_.ToHandleChecked();

3881
  DCHECK(InstanceTypeChecker::IsJSFunction(map->instance_type()));
3882 3883

  // Allocation.
3884 3885
  JSFunction function = JSFunction::cast(factory->New(map, allocation_type_));
  DisallowGarbageCollection no_gc;
3886

3887 3888 3889
  WriteBarrierMode mode = allocation_type_ == AllocationType::kYoung
                              ? SKIP_WRITE_BARRIER
                              : UPDATE_WRITE_BARRIER;
3890
  // Header initialization.
3891 3892 3893 3894 3895 3896 3897 3898
  function.initialize_properties(isolate);
  function.initialize_elements();
  function.set_shared(*sfi_, mode);
  function.set_context(*context_, mode);
  function.set_raw_feedback_cell(*feedback_cell, mode);
  function.set_code(*code, kReleaseStore, mode);
  if (function.has_prototype_slot()) {
    function.set_prototype_or_initial_map(
3899 3900
        ReadOnlyRoots(isolate).the_hole_value(), kReleaseStore,
        SKIP_WRITE_BARRIER);
3901 3902 3903 3904
  }

  // Potentially body initialization.
  factory->InitializeJSObjectBody(
3905
      function, *map, JSFunction::GetHeaderSize(map->has_prototype_slot()));
3906

3907
  return handle(function, isolate_);
3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921
}

void Factory::JSFunctionBuilder::PrepareMap() {
  if (maybe_map_.is_null()) {
    // No specific map requested, use the default.
    maybe_map_ = handle(
        Map::cast(context_->native_context().get(sfi_->function_map_index())),
        isolate_);
  }
}

void Factory::JSFunctionBuilder::PrepareFeedbackCell() {
  Handle<FeedbackCell> feedback_cell;
  if (maybe_feedback_cell_.ToHandle(&feedback_cell)) {
3922
    // Track the newly-created closure.
3923 3924 3925 3926 3927 3928 3929
    feedback_cell->IncrementClosureCount(isolate_);
  } else {
    // Fall back to the many_closures_cell.
    maybe_feedback_cell_ = isolate_->factory()->many_closures_cell();
  }
}

3930 3931
}  // namespace internal
}  // namespace v8