runtime-test.cc 54.4 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6 7
#include <fstream>
#include <memory>

8
#include "include/v8-function.h"
9
#include "include/v8-profiler.h"
10
#include "src/api/api-inl.h"
11
#include "src/base/numbers/double.h"
12
#include "src/base/platform/mutex.h"
13 14
#include "src/codegen/assembler-inl.h"
#include "src/codegen/compiler.h"
15
#include "src/codegen/pending-optimization-table.h"
16
#include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
17
#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
Simon Zünd's avatar
Simon Zünd committed
18
#include "src/debug/debug-evaluate.h"
19
#include "src/deoptimizer/deoptimizer.h"
20 21 22
#include "src/execution/arguments-inl.h"
#include "src/execution/frames-inl.h"
#include "src/execution/isolate-inl.h"
23
#include "src/execution/protectors-inl.h"
24
#include "src/execution/tiering-manager.h"
25 26
#include "src/heap/heap-inl.h"  // For ToBoolean. TODO(jkummerow): Drop.
#include "src/heap/heap-write-barrier-inl.h"
27
#include "src/ic/stub-cache.h"
28
#include "src/logging/counters.h"
29
#include "src/objects/heap-object-inl.h"
30
#include "src/objects/js-array-inl.h"
31
#include "src/objects/js-function-inl.h"
32
#include "src/objects/js-regexp-inl.h"
33
#include "src/objects/managed-inl.h"
34
#include "src/objects/smi.h"
35
#include "src/profiler/heap-snapshot-generator.h"
36 37
#include "src/regexp/regexp.h"
#include "src/runtime/runtime-utils.h"
38
#include "src/snapshot/snapshot.h"
39
#include "src/web-snapshot/web-snapshot.h"
40

41 42 43 44
#ifdef V8_ENABLE_MAGLEV
#include "src/maglev/maglev.h"
#endif  // V8_ENABLE_MAGLEV

45
#if V8_ENABLE_WEBASSEMBLY
46
#include "src/wasm/wasm-engine.h"
47
#endif  // V8_ENABLE_WEBASSEMBLY
48

49 50 51
namespace v8 {
namespace internal {

52
namespace {
53 54 55 56 57
V8_WARN_UNUSED_RESULT Object CrashUnlessFuzzing(Isolate* isolate) {
  CHECK(FLAG_fuzzing);
  return ReadOnlyRoots(isolate).undefined_value();
}

58 59 60 61 62
V8_WARN_UNUSED_RESULT bool CrashUnlessFuzzingReturnFalse(Isolate* isolate) {
  CHECK(FLAG_fuzzing);
  return false;
}

63 64
// Returns |value| unless correctness-fuzzer-supressions is enabled,
// otherwise returns undefined_value.
65
V8_WARN_UNUSED_RESULT Object ReturnFuzzSafe(Object value, Isolate* isolate) {
66 67 68
  return FLAG_correctness_fuzzer_suppressions
             ? ReadOnlyRoots(isolate).undefined_value()
             : value;
69 70
}

71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
// Assert that the given argument is a number within the Int32 range
// and convert it to int32_t.  If the argument is not an Int32 we crash if not
// in fuzzing mode.
#define CONVERT_INT32_ARG_FUZZ_SAFE(name, index)                   \
  if (!args[index].IsNumber()) return CrashUnlessFuzzing(isolate); \
  int32_t name = 0;                                                \
  if (!args[index].ToInt32(&name)) return CrashUnlessFuzzing(isolate);

// Cast the given object to a boolean and store it in a variable with
// the given name.  If the object is not a boolean we crash if not in
// fuzzing mode.
#define CONVERT_BOOLEAN_ARG_FUZZ_SAFE(name, index)                  \
  if (!args[index].IsBoolean()) return CrashUnlessFuzzing(isolate); \
  bool name = args[index].IsTrue(isolate);

86 87 88 89 90 91 92 93 94 95 96 97
bool IsAsmWasmFunction(Isolate* isolate, JSFunction function) {
  DisallowGarbageCollection no_gc;
#if V8_ENABLE_WEBASSEMBLY
  // For simplicity we include invalid asm.js functions whose code hasn't yet
  // been updated to CompileLazy but is still the InstantiateAsmJs builtin.
  return function.shared().HasAsmWasmData() ||
         function.code().builtin_id() == Builtin::kInstantiateAsmJs;
#else
  return false;
#endif  // V8_ENABLE_WEBASSEMBLY
}

98 99
}  // namespace

100 101 102 103 104 105 106 107
RUNTIME_FUNCTION(Runtime_ClearMegamorphicStubCache) {
  HandleScope scope(isolate);
  DCHECK_EQ(0, args.length());
  isolate->load_stub_cache()->Clear();
  isolate->store_stub_cache()->Clear();
  return ReadOnlyRoots(isolate).undefined_value();
}

108 109
RUNTIME_FUNCTION(Runtime_ConstructDouble) {
  HandleScope scope(isolate);
110
  DCHECK_EQ(2, args.length());
111 112
  uint32_t hi = NumberToUint32(args[0]);
  uint32_t lo = NumberToUint32(args[1]);
113
  uint64_t result = (static_cast<uint64_t>(hi) << 32) | lo;
114
  return *isolate->factory()->NewNumber(base::uint64_to_double(result));
115 116
}

117 118 119
RUNTIME_FUNCTION(Runtime_ConstructConsString) {
  HandleScope scope(isolate);
  DCHECK_EQ(2, args.length());
120 121
  Handle<String> left = args.at<String>(0);
  Handle<String> right = args.at<String>(1);
122 123 124 125 126 127 128 129 130

  CHECK(left->IsOneByteRepresentation());
  CHECK(right->IsOneByteRepresentation());

  const bool kIsOneByte = true;
  const int length = left->length() + right->length();
  return *isolate->factory()->NewConsString(left, right, length, kIsOneByte);
}

131 132 133
RUNTIME_FUNCTION(Runtime_ConstructSlicedString) {
  HandleScope scope(isolate);
  DCHECK_EQ(2, args.length());
134 135
  Handle<String> string = args.at<String>(0);
  int index = args.smi_value_at(1);
136 137

  CHECK(string->IsOneByteRepresentation());
138
  CHECK_LT(index, string->length());
139

140 141
  Handle<String> sliced_string =
      isolate->factory()->NewSubString(string, index, string->length());
142 143 144 145
  CHECK(sliced_string->IsSlicedString());
  return *sliced_string;
}

146 147
RUNTIME_FUNCTION(Runtime_DeoptimizeFunction) {
  HandleScope scope(isolate);
148
  DCHECK_EQ(1, args.length());
149

150
  Handle<Object> function_object = args.at(0);
151
  if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
152 153
  Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);

154
  if (function->HasAttachedOptimizedCode()) {
155 156
    Deoptimizer::DeoptimizeFunction(*function);
  }
157

158
  return ReadOnlyRoots(isolate).undefined_value();
159 160
}

161 162
RUNTIME_FUNCTION(Runtime_DeoptimizeNow) {
  HandleScope scope(isolate);
163
  DCHECK_EQ(0, args.length());
164 165 166

  Handle<JSFunction> function;

167
  // Find the JavaScript function on the top of the stack.
168
  JavaScriptFrameIterator it(isolate);
169
  if (!it.done()) function = handle(it.frame()->function(), isolate);
170
  if (function.is_null()) return CrashUnlessFuzzing(isolate);
171

172
  if (function->HasAttachedOptimizedCode()) {
173 174
    Deoptimizer::DeoptimizeFunction(*function);
  }
175

176
  return ReadOnlyRoots(isolate).undefined_value();
177 178
}

179 180
RUNTIME_FUNCTION(Runtime_RunningInSimulator) {
  SealHandleScope shs(isolate);
181
  DCHECK_EQ(0, args.length());
182
#if defined(USE_SIMULATOR)
183
  return ReadOnlyRoots(isolate).true_value();
184
#else
185
  return ReadOnlyRoots(isolate).false_value();
186 187 188
#endif
}

Simon Zünd's avatar
Simon Zünd committed
189 190 191
RUNTIME_FUNCTION(Runtime_RuntimeEvaluateREPL) {
  HandleScope scope(isolate);
  DCHECK_EQ(1, args.length());
192
  Handle<String> source = args.at<String>(0);
Simon Zünd's avatar
Simon Zünd committed
193 194
  Handle<Object> result;
  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
195 196 197 198
      isolate, result,
      DebugEvaluate::Global(isolate, source,
                            debug::EvaluateGlobalMode::kDefault,
                            REPLMode::kYes));
Simon Zünd's avatar
Simon Zünd committed
199 200 201 202

  return *result;
}

203 204 205 206 207
RUNTIME_FUNCTION(Runtime_ICsAreEnabled) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(FLAG_use_ic);
}
208 209 210

RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported) {
  SealHandleScope shs(isolate);
211
  DCHECK_EQ(0, args.length());
212 213 214 215
  return isolate->heap()->ToBoolean(
      isolate->concurrent_recompilation_enabled());
}

216 217 218 219 220 221
RUNTIME_FUNCTION(Runtime_IsAtomicsWaitAllowed) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(isolate->allow_atomics_wait());
}

222
namespace {
223

224
template <CodeKind code_kind>
225
bool CanOptimizeFunction(Handle<JSFunction> function, Isolate* isolate,
226 227 228 229 230 231
                         IsCompiledScope* is_compiled_scope);

template <>
bool CanOptimizeFunction<CodeKind::TURBOFAN>(
    Handle<JSFunction> function, Isolate* isolate,
    IsCompiledScope* is_compiled_scope) {
232
  // The following conditions were lifted (in part) from the DCHECK inside
233
  // JSFunction::MarkForOptimization().
234

235
  if (!function->shared().allows_lazy_compilation()) {
236
    return CrashUnlessFuzzingReturnFalse(isolate);
237
  }
238

239
  // If function isn't compiled, compile it now.
240
  if (!is_compiled_scope->is_compiled() &&
241
      !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
242 243
                         is_compiled_scope)) {
    return CrashUnlessFuzzingReturnFalse(isolate);
244 245
  }

246
  if (!FLAG_turbofan) return false;
247

248
  if (function->shared().optimization_disabled() &&
249
      function->shared().disabled_optimization_reason() ==
250
          BailoutReason::kNeverOptimize) {
251
    return CrashUnlessFuzzingReturnFalse(isolate);
252
  }
253

254
  if (IsAsmWasmFunction(isolate, *function)) {
255 256
    return CrashUnlessFuzzingReturnFalse(isolate);
  }
257

258 259 260 261
  if (FLAG_testing_d8_test_runner) {
    PendingOptimizationTable::MarkedForOptimization(isolate, function);
  }

262
  CodeKind kind = CodeKindForTopTier();
263
  if (function->HasAvailableOptimizedCode() ||
264
      function->HasAvailableCodeKind(kind)) {
265
    DCHECK(function->HasAttachedOptimizedCode() ||
266
           function->ChecksTieringState());
267 268 269
    if (FLAG_testing_d8_test_runner) {
      PendingOptimizationTable::FunctionWasOptimized(isolate, function);
    }
270 271 272 273 274 275
    return false;
  }

  return true;
}

276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292
#ifdef V8_ENABLE_MAGLEV
template <>
bool CanOptimizeFunction<CodeKind::MAGLEV>(Handle<JSFunction> function,
                                           Isolate* isolate,
                                           IsCompiledScope* is_compiled_scope) {
  if (!FLAG_maglev) return false;

  CHECK(!IsAsmWasmFunction(isolate, *function));

  // TODO(v8:7700): Disabled optimization due to deopts?
  // TODO(v8:7700): Already cached?

  return function->GetActiveTier() < CodeKind::MAGLEV;
}
#endif  // V8_ENABLE_MAGLEV

Object OptimizeFunctionOnNextCall(RuntimeArguments& args, Isolate* isolate) {
293 294 295 296
  if (args.length() != 1 && args.length() != 2) {
    return CrashUnlessFuzzing(isolate);
  }

297
  Handle<Object> function_object = args.at(0);
298 299 300
  if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
  Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);

301 302
  static constexpr CodeKind kCodeKind = CodeKind::TURBOFAN;

303 304
  IsCompiledScope is_compiled_scope(
      function->shared().is_compiled_scope(isolate));
305
  if (!CanOptimizeFunction<kCodeKind>(function, isolate, &is_compiled_scope)) {
306 307 308
    return ReadOnlyRoots(isolate).undefined_value();
  }

309
  ConcurrencyMode concurrency_mode = ConcurrencyMode::kSynchronous;
310
  if (args.length() == 2) {
311
    Handle<Object> type = args.at(1);
312
    if (!type->IsString()) return CrashUnlessFuzzing(isolate);
313 314 315 316 317
    if (Handle<String>::cast(type)->IsOneByteEqualTo(
            base::StaticCharVector("concurrent")) &&
        isolate->concurrent_recompilation_enabled()) {
      concurrency_mode = ConcurrencyMode::kConcurrent;
    }
318
  }
319

320 321 322
  // This function may not have been lazily compiled yet, even though its shared
  // function has.
  if (!function->is_compiled()) {
323 324 325 326 327 328
    DCHECK(function->shared().HasBytecodeArray());
    CodeT codet = *BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
    if (function->shared().HasBaselineCode()) {
      codet = function->shared().baseline_code(kAcquireLoad);
    }
    function->set_code(codet);
329
  }
330

331
  TraceManualRecompile(*function, kCodeKind, concurrency_mode);
332 333
  JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
  function->MarkForOptimization(isolate, CodeKind::TURBOFAN, concurrency_mode);
334

335
  return ReadOnlyRoots(isolate).undefined_value();
336 337
}

338
bool EnsureFeedbackVector(Isolate* isolate, Handle<JSFunction> function) {
339
  // Check function allows lazy compilation.
340 341 342
  if (!function->shared().allows_lazy_compilation()) return false;

  if (function->has_feedback_vector()) return true;
343 344

  // If function isn't compiled, compile it now.
345 346
  IsCompiledScope is_compiled_scope(
      function->shared().is_compiled_scope(function->GetIsolate()));
347 348 349
  // If the JSFunction isn't compiled but it has a initialized feedback cell
  // then no need to compile. CompileLazy builtin would handle these cases by
  // installing the code from SFI. Calling compile here may cause another
350
  // optimization if FLAG_always_turbofan is set.
351 352 353
  bool needs_compilation =
      !function->is_compiled() && !function->has_closure_feedback_cell_array();
  if (needs_compilation &&
354
      !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
355
                         &is_compiled_scope)) {
356
    return false;
357 358 359 360
  }

  // Ensure function has a feedback vector to hold type feedback for
  // optimization.
361
  JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
362 363 364 365 366
  return true;
}

}  // namespace

367 368
RUNTIME_FUNCTION(Runtime_CompileBaseline) {
  HandleScope scope(isolate);
369 370 371
  if (args.length() != 1) {
    return CrashUnlessFuzzing(isolate);
  }
372
  Handle<Object> function_object = args.at(0);
373 374
  if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
  Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
375 376 377 378 379 380 381 382 383 384

  IsCompiledScope is_compiled_scope =
      function->shared(isolate).is_compiled_scope(isolate);

  if (!function->shared(isolate).IsUserJavaScript()) {
    return CrashUnlessFuzzing(isolate);
  }

  // First compile the bytecode, if we have to.
  if (!is_compiled_scope.is_compiled() &&
385
      !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
386 387 388 389
                         &is_compiled_scope)) {
    return CrashUnlessFuzzing(isolate);
  }

390
  if (!Compiler::CompileBaseline(isolate, function, Compiler::CLEAR_EXCEPTION,
391 392 393 394 395 396 397
                                 &is_compiled_scope)) {
    return CrashUnlessFuzzing(isolate);
  }

  return *function;
}

398 399 400 401 402 403 404
// TODO(v8:7700): Remove this function once we no longer need it to measure
// maglev compile times. For normal tierup, OptimizeMaglevOnNextCall should be
// used instead.
#ifdef V8_ENABLE_MAGLEV
RUNTIME_FUNCTION(Runtime_BenchMaglev) {
  HandleScope scope(isolate);
  DCHECK_EQ(args.length(), 2);
405 406
  Handle<JSFunction> function = args.at<JSFunction>(0);
  int count = args.smi_value_at(1);
407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429

  Handle<CodeT> codet;
  base::ElapsedTimer timer;
  timer.Start();
  codet = Maglev::Compile(isolate, function).ToHandleChecked();
  for (int i = 1; i < count; ++i) {
    HandleScope handle_scope(isolate);
    Maglev::Compile(isolate, function);
  }
  PrintF("Maglev compile time: %g ms!\n",
         timer.Elapsed().InMillisecondsF() / count);

  function->set_code(*codet);

  return ReadOnlyRoots(isolate).undefined_value();
}
#else
RUNTIME_FUNCTION(Runtime_BenchMaglev) {
  PrintF("Maglev is not enabled.\n");
  return ReadOnlyRoots(isolate).undefined_value();
}
#endif  // V8_ENABLE_MAGLEV

430 431 432
RUNTIME_FUNCTION(Runtime_ActiveTierIsMaglev) {
  HandleScope scope(isolate);
  DCHECK_EQ(args.length(), 1);
433
  Handle<JSFunction> function = args.at<JSFunction>(0);
434 435 436
  return isolate->heap()->ToBoolean(function->ActiveTierIsMaglev());
}

437 438 439 440
#ifdef V8_ENABLE_MAGLEV
RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall) {
  HandleScope scope(isolate);
  DCHECK_EQ(args.length(), 1);
441
  Handle<JSFunction> function = args.at<JSFunction>(0);
442 443 444 445 446 447 448 449 450 451 452 453

  static constexpr CodeKind kCodeKind = CodeKind::MAGLEV;

  IsCompiledScope is_compiled_scope(
      function->shared().is_compiled_scope(isolate));
  if (!CanOptimizeFunction<kCodeKind>(function, isolate, &is_compiled_scope)) {
    return ReadOnlyRoots(isolate).undefined_value();
  }
  DCHECK(is_compiled_scope.is_compiled());
  DCHECK(function->is_compiled());

  // TODO(v8:7700): Support concurrent compiles.
454
  const ConcurrencyMode concurrency_mode = ConcurrencyMode::kSynchronous;
455

456
  TraceManualRecompile(*function, kCodeKind, concurrency_mode);
457
  JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
458 459 460 461 462 463 464 465 466 467 468 469
  function->MarkForOptimization(isolate, kCodeKind, concurrency_mode);

  return ReadOnlyRoots(isolate).undefined_value();
}
#else
RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall) {
  PrintF("Maglev is not enabled.\n");
  return ReadOnlyRoots(isolate).undefined_value();
}
#endif  // V8_ENABLE_MAGLEV

// TODO(jgruber): Rename to OptimizeTurbofanOnNextCall.
470 471
RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
  HandleScope scope(isolate);
472
  return OptimizeFunctionOnNextCall(args, isolate);
473 474
}

475 476 477
RUNTIME_FUNCTION(Runtime_EnsureFeedbackVectorForFunction) {
  HandleScope scope(isolate);
  DCHECK_EQ(1, args.length());
478
  Handle<JSFunction> function = args.at<JSFunction>(0);
479
  EnsureFeedbackVector(isolate, function);
480 481 482 483 484
  return ReadOnlyRoots(isolate).undefined_value();
}

RUNTIME_FUNCTION(Runtime_PrepareFunctionForOptimization) {
  HandleScope scope(isolate);
485 486 487
  if ((args.length() != 1 && args.length() != 2) || !args[0].IsJSFunction()) {
    return CrashUnlessFuzzing(isolate);
  }
488
  Handle<JSFunction> function = args.at<JSFunction>(0);
489

490 491
  bool allow_heuristic_optimization = false;
  if (args.length() == 2) {
492
    Handle<Object> sync_object = args.at(1);
493
    if (!sync_object->IsString()) return CrashUnlessFuzzing(isolate);
494 495
    Handle<String> sync = Handle<String>::cast(sync_object);
    if (sync->IsOneByteEqualTo(
496
            base::StaticCharVector("allow heuristic optimization"))) {
497 498 499 500
      allow_heuristic_optimization = true;
    }
  }

501
  if (!EnsureFeedbackVector(isolate, function)) {
502
    return CrashUnlessFuzzing(isolate);
503
  }
504 505 506

  // If optimization is disabled for the function, return without making it
  // pending optimize for test.
507
  if (function->shared().optimization_disabled() &&
508
      function->shared().disabled_optimization_reason() ==
509
          BailoutReason::kNeverOptimize) {
510
    return CrashUnlessFuzzing(isolate);
511 512
  }

513
  if (IsAsmWasmFunction(isolate, *function)) return CrashUnlessFuzzing(isolate);
514 515 516

  // Hold onto the bytecode array between marking and optimization to ensure
  // it's not flushed.
517
  if (FLAG_testing_d8_test_runner) {
518 519
    PendingOptimizationTable::PreparedForOptimization(
        isolate, function, allow_heuristic_optimization);
520
  }
521 522 523 524

  return ReadOnlyRoots(isolate).undefined_value();
}

525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565
namespace {

void FinalizeOptimization(Isolate* isolate) {
  DCHECK(isolate->concurrent_recompilation_enabled());
  isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
  isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
  isolate->optimizing_compile_dispatcher()->set_finalize(true);
}

BytecodeOffset OffsetOfNextJumpLoop(Isolate* isolate, UnoptimizedFrame* frame) {
  Handle<BytecodeArray> bytecode_array(frame->GetBytecodeArray(), isolate);
  const int current_offset = frame->GetBytecodeOffset();

  interpreter::BytecodeArrayIterator it(bytecode_array, current_offset);

  // First, look for a loop that contains the current bytecode offset.
  for (; !it.done(); it.Advance()) {
    if (it.current_bytecode() != interpreter::Bytecode::kJumpLoop) {
      continue;
    }
    if (!base::IsInRange(current_offset, it.GetJumpTargetOffset(),
                         it.current_offset())) {
      continue;
    }

    return BytecodeOffset(it.current_offset());
  }

  // Fall back to any loop after the current offset.
  it.SetOffset(current_offset);
  for (; !it.done(); it.Advance()) {
    if (it.current_bytecode() == interpreter::Bytecode::kJumpLoop) {
      return BytecodeOffset(it.current_offset());
    }
  }

  return BytecodeOffset::None();
}

}  // namespace

566
RUNTIME_FUNCTION(Runtime_OptimizeOsr) {
567
  HandleScope handle_scope(isolate);
568
  DCHECK(args.length() == 0 || args.length() == 1);
569

570
  Handle<JSFunction> function;
571

572
  // The optional parameter determines the frame being targeted.
573
  int stack_depth = 0;
574
  if (args.length() == 1) {
575
    if (!args[0].IsSmi()) return CrashUnlessFuzzing(isolate);
576
    stack_depth = args.smi_value_at(0);
577
  }
578

579 580 581
  // Find the JavaScript function on the top of the stack.
  JavaScriptFrameIterator it(isolate);
  while (!it.done() && stack_depth--) it.Advance();
582
  if (!it.done()) function = handle(it.frame()->function(), isolate);
583
  if (function.is_null()) return CrashUnlessFuzzing(isolate);
584

585
  if (V8_UNLIKELY(!FLAG_turbofan) || V8_UNLIKELY(!FLAG_use_osr)) {
586 587
    return ReadOnlyRoots(isolate).undefined_value();
  }
588

589 590 591 592
  if (!function->shared().allows_lazy_compilation()) {
    return CrashUnlessFuzzing(isolate);
  }

593
  if (function->shared().optimization_disabled() &&
594
      function->shared().disabled_optimization_reason() ==
595 596
          BailoutReason::kNeverOptimize) {
    return CrashUnlessFuzzing(isolate);
597 598
  }

599 600 601
  if (FLAG_testing_d8_test_runner) {
    PendingOptimizationTable::MarkedForOptimization(isolate, function);
  }
602

603 604
  if (function->HasAvailableOptimizedCode()) {
    DCHECK(function->HasAttachedOptimizedCode() ||
605
           function->ChecksTieringState());
606 607
    // If function is already optimized, remove the bytecode array from the
    // pending optimize for test table and return.
608 609 610
    if (FLAG_testing_d8_test_runner) {
      PendingOptimizationTable::FunctionWasOptimized(isolate, function);
    }
611 612 613
    return ReadOnlyRoots(isolate).undefined_value();
  }

614 615 616 617 618
  if (!it.frame()->is_unoptimized()) {
    // Nothing to be done.
    return ReadOnlyRoots(isolate).undefined_value();
  }

619 620
  IsCompiledScope is_compiled_scope(
      function->shared().is_compiled_scope(isolate));
621
  JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653
  isolate->tiering_manager()->RequestOsrAtNextOpportunity(*function);

  // If concurrent OSR is enabled, the testing workflow is a bit tricky. We
  // must guarantee that the next JumpLoop installs the finished OSR'd code
  // object, but we still want to exercise concurrent code paths. To do so,
  // we attempt to find the next JumpLoop, start an OSR job for it now, and
  // immediately force finalization.
  // If this succeeds and we correctly match up the next JumpLoop, once we
  // reach the JumpLoop we'll hit the OSR cache and install the generated code.
  // If not (e.g. because we enter a nested loop first), the next JumpLoop will
  // see the cached OSR code with a mismatched offset, and trigger
  // non-concurrent OSR compilation and installation.
  if (isolate->concurrent_recompilation_enabled() && FLAG_concurrent_osr) {
    const BytecodeOffset osr_offset =
        OffsetOfNextJumpLoop(isolate, UnoptimizedFrame::cast(it.frame()));
    if (osr_offset.IsNone()) {
      // The loop may have been elided by bytecode generation (e.g. for
      // patterns such as `do { ... } while (false);`.
      return ReadOnlyRoots(isolate).undefined_value();
    }

    // Finalize first to ensure all pending tasks are done (since we can't
    // queue more than one OSR job for each function).
    FinalizeOptimization(isolate);

    // Queue the job.
    auto unused_result = Compiler::CompileOptimizedOSR(
        isolate, function, osr_offset, UnoptimizedFrame::cast(it.frame()),
        ConcurrencyMode::kConcurrent);
    USE(unused_result);

    // Finalize again to finish the queued job. The next call into
654
    // Runtime::kCompileOptimizedOSR will pick up the cached Code object.
655
    FinalizeOptimization(isolate);
656 657
  }

658
  return ReadOnlyRoots(isolate).undefined_value();
659 660
}

661 662
RUNTIME_FUNCTION(Runtime_BaselineOsr) {
  HandleScope scope(isolate);
663
  DCHECK_EQ(0, args.length());
664 665 666

  // Find the JavaScript function on the top of the stack.
  JavaScriptFrameIterator it(isolate);
667
  Handle<JSFunction> function = handle(it.frame()->function(), isolate);
668 669 670 671 672 673 674 675
  if (function.is_null()) return CrashUnlessFuzzing(isolate);
  if (!FLAG_sparkplug || !FLAG_use_osr) {
    return ReadOnlyRoots(isolate).undefined_value();
  }
  if (!it.frame()->is_unoptimized()) {
    return ReadOnlyRoots(isolate).undefined_value();
  }

676 677 678 679
  IsCompiledScope is_compiled_scope(
      function->shared().is_compiled_scope(isolate));
  Compiler::CompileBaseline(isolate, function, Compiler::CLEAR_EXCEPTION,
                            &is_compiled_scope);
680 681 682 683

  return ReadOnlyRoots(isolate).undefined_value();
}

684 685
RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction) {
  HandleScope scope(isolate);
686
  DCHECK_EQ(1, args.length());
687
  Handle<Object> function_object = args.at(0);
688
  if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
689
  Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
690 691 692
  Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
  if (sfi->abstract_code(isolate).kind() != CodeKind::INTERPRETED_FUNCTION &&
      sfi->abstract_code(isolate).kind() != CodeKind::BUILTIN) {
693 694
    return CrashUnlessFuzzing(isolate);
  }
695 696 697 698 699 700 701 702 703
  // Make sure to finish compilation if there is a parallel lazy compilation in
  // progress, to make sure that the compilation finalization doesn't clobber
  // the SharedFunctionInfo's disable_optimization field.
  if (isolate->lazy_compile_dispatcher() &&
      isolate->lazy_compile_dispatcher()->IsEnqueued(sfi)) {
    isolate->lazy_compile_dispatcher()->FinishNow(sfi);
  }

  sfi->DisableOptimization(BailoutReason::kNeverOptimize);
704
  return ReadOnlyRoots(isolate).undefined_value();
705 706 707 708
}

RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) {
  HandleScope scope(isolate);
709
  DCHECK_EQ(args.length(), 1);
710

711
  int status = 0;
712 713 714
  if (FLAG_lite_mode || FLAG_jitless) {
    // Both jitless and lite modes cannot optimize. Unit tests should handle
    // these the same way. In the future, the two flags may become synonyms.
715 716
    status |= static_cast<int>(OptimizationStatus::kLiteMode);
  }
Mythri's avatar
Mythri committed
717
  if (!isolate->use_optimizer()) {
718 719
    status |= static_cast<int>(OptimizationStatus::kNeverOptimize);
  }
720
  if (FLAG_always_turbofan || FLAG_prepare_always_turbofan) {
721 722 723 724
    status |= static_cast<int>(OptimizationStatus::kAlwaysOptimize);
  }
  if (FLAG_deopt_every_n_times) {
    status |= static_cast<int>(OptimizationStatus::kMaybeDeopted);
725
  }
726

727
  Handle<Object> function_object = args.at(0);
728
  if (function_object->IsUndefined()) return Smi::FromInt(status);
729
  if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
730

731
  Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
732
  status |= static_cast<int>(OptimizationStatus::kIsFunction);
733

734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749
  switch (function->tiering_state()) {
    case TieringState::kRequestTurbofan_Synchronous:
      status |= static_cast<int>(OptimizationStatus::kMarkedForOptimization);
      break;
    case TieringState::kRequestTurbofan_Concurrent:
      status |= static_cast<int>(
          OptimizationStatus::kMarkedForConcurrentOptimization);
      break;
    case TieringState::kInProgress:
      status |= static_cast<int>(OptimizationStatus::kOptimizingConcurrently);
      break;
    case TieringState::kNone:
    case TieringState::kRequestMaglev_Synchronous:
    case TieringState::kRequestMaglev_Concurrent:
      // TODO(v8:7700): Maglev support.
      break;
750 751
  }

752
  if (function->HasAttachedOptimizedCode()) {
753
    CodeT code = function->code();
754
    if (code.marked_for_deoptimization()) {
755 756 757 758
      status |= static_cast<int>(OptimizationStatus::kMarkedForDeoptimization);
    } else {
      status |= static_cast<int>(OptimizationStatus::kOptimized);
    }
759 760 761
    if (code.is_maglevved()) {
      status |= static_cast<int>(OptimizationStatus::kMaglevved);
    } else if (code.is_turbofanned()) {
762 763
      status |= static_cast<int>(OptimizationStatus::kTurboFanned);
    }
764
  }
765
  if (function->HasAttachedCodeKind(CodeKind::BASELINE)) {
766
    status |= static_cast<int>(OptimizationStatus::kBaseline);
767
  }
768
  if (function->ActiveTierIsIgnition()) {
769
    status |= static_cast<int>(OptimizationStatus::kInterpreted);
770
  }
771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787

  // Additionally, detect activations of this frame on the stack, and report the
  // status of the topmost frame.
  JavaScriptFrame* frame = nullptr;
  JavaScriptFrameIterator it(isolate);
  while (!it.done()) {
    if (it.frame()->function() == *function) {
      frame = it.frame();
      break;
    }
    it.Advance();
  }
  if (frame != nullptr) {
    status |= static_cast<int>(OptimizationStatus::kIsExecuting);
    if (frame->is_optimized()) {
      status |=
          static_cast<int>(OptimizationStatus::kTopmostFrameIsTurboFanned);
788 789 790 791 792
    } else if (frame->is_interpreted()) {
      status |=
          static_cast<int>(OptimizationStatus::kTopmostFrameIsInterpreted);
    } else if (frame->is_baseline()) {
      status |= static_cast<int>(OptimizationStatus::kTopmostFrameIsBaseline);
793 794 795
    }
  }

796
  return Smi::FromInt(status);
797 798
}

799 800
RUNTIME_FUNCTION(Runtime_DisableOptimizationFinalization) {
  DCHECK_EQ(0, args.length());
801 802 803
  if (isolate->concurrent_recompilation_enabled()) {
    isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
    isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
804
    isolate->stack_guard()->ClearInstallCode();
805 806
    isolate->optimizing_compile_dispatcher()->set_finalize(false);
  }
807 808 809 810 811
  return ReadOnlyRoots(isolate).undefined_value();
}

RUNTIME_FUNCTION(Runtime_WaitForBackgroundOptimization) {
  DCHECK_EQ(0, args.length());
812 813 814
  if (isolate->concurrent_recompilation_enabled()) {
    isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
  }
815 816 817 818 819
  return ReadOnlyRoots(isolate).undefined_value();
}

RUNTIME_FUNCTION(Runtime_FinalizeOptimization) {
  DCHECK_EQ(0, args.length());
820
  if (isolate->concurrent_recompilation_enabled()) {
821
    FinalizeOptimization(isolate);
822
  }
823 824 825
  return ReadOnlyRoots(isolate).undefined_value();
}

826 827
static void ReturnNull(const v8::FunctionCallbackInfo<v8::Value>& args) {
  args.GetReturnValue().SetNull();
828
}
829

830 831
RUNTIME_FUNCTION(Runtime_GetUndetectable) {
  HandleScope scope(isolate);
832
  DCHECK_EQ(0, args.length());
833 834 835
  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
  Local<v8::ObjectTemplate> desc = v8::ObjectTemplate::New(v8_isolate);
  desc->MarkAsUndetectable();
836
  desc->SetCallAsFunctionHandler(ReturnNull);
837 838
  Local<v8::Object> obj =
      desc->NewInstance(v8_isolate->GetCurrentContext()).ToLocalChecked();
839 840 841
  return *Utils::OpenHandle(*obj);
}

842
static void call_as_function(const v8::FunctionCallbackInfo<v8::Value>& args) {
843 844 845 846 847
  double v1 =
      args[0]->NumberValue(args.GetIsolate()->GetCurrentContext()).ToChecked();
  double v2 =
      args[1]->NumberValue(args.GetIsolate()->GetCurrentContext()).ToChecked();
  args.GetReturnValue().Set(v8::Number::New(args.GetIsolate(), v1 - v2));
848 849 850 851 852 853
}

// Returns a callable object. The object returns the difference of its two
// parameters when it is called.
RUNTIME_FUNCTION(Runtime_GetCallable) {
  HandleScope scope(isolate);
854
  DCHECK_EQ(0, args.length());
855 856 857 858 859 860 861 862 863 864 865 866
  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
  Local<v8::FunctionTemplate> t = v8::FunctionTemplate::New(v8_isolate);
  Local<ObjectTemplate> instance_template = t->InstanceTemplate();
  instance_template->SetCallAsFunctionHandler(call_as_function);
  v8_isolate->GetCurrentContext();
  Local<v8::Object> instance =
      t->GetFunction(v8_isolate->GetCurrentContext())
          .ToLocalChecked()
          ->NewInstance(v8_isolate->GetCurrentContext())
          .ToLocalChecked();
  return *Utils::OpenHandle(*instance);
}
867

868
RUNTIME_FUNCTION(Runtime_ClearFunctionFeedback) {
869
  HandleScope scope(isolate);
870
  DCHECK_EQ(1, args.length());
871
  Handle<JSFunction> function = args.at<JSFunction>(0);
872
  function->ClearTypeFeedbackInfo();
873
  return ReadOnlyRoots(isolate).undefined_value();
874 875 876 877
}

RUNTIME_FUNCTION(Runtime_NotifyContextDisposed) {
  HandleScope scope(isolate);
878
  DCHECK_EQ(0, args.length());
879
  isolate->heap()->NotifyContextDisposed(true);
880
  return ReadOnlyRoots(isolate).undefined_value();
881 882 883 884 885
}

RUNTIME_FUNCTION(Runtime_SetAllocationTimeout) {
  SealHandleScope shs(isolate);
  DCHECK(args.length() == 2 || args.length() == 3);
886
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
887
  CONVERT_INT32_ARG_FUZZ_SAFE(timeout, 1);
888
  isolate->heap()->set_allocation_timeout(timeout);
889 890
#endif
#ifdef DEBUG
891
  CONVERT_INT32_ARG_FUZZ_SAFE(interval, 0);
892 893 894
  FLAG_gc_interval = interval;
  if (args.length() == 3) {
    // Enable/disable inline allocation if requested.
895
    CONVERT_BOOLEAN_ARG_FUZZ_SAFE(inline_allocation, 2);
896 897 898 899 900 901 902
    if (inline_allocation) {
      isolate->heap()->EnableInlineAllocation();
    } else {
      isolate->heap()->DisableInlineAllocation();
    }
  }
#endif
903
  return ReadOnlyRoots(isolate).undefined_value();
904 905
}

906 907 908
namespace {

int FixedArrayLenFromSize(int size) {
909 910
  return std::min({(size - FixedArray::kHeaderSize) / kTaggedSize,
                   FixedArray::kMaxRegularLength});
911 912 913
}

void FillUpOneNewSpacePage(Isolate* isolate, Heap* heap) {
914
  DCHECK(!FLAG_single_generation);
915
  PauseAllocationObserversScope pause_observers(heap);
916
  NewSpace* space = heap->new_space();
917 918 919
  // We cannot rely on `space->limit()` to point to the end of the current page
  // in the case where inline allocations are disabled, it actually points to
  // the current allocation pointer.
920
  DCHECK_IMPLIES(!space->IsInlineAllocationEnabled(),
921
                 space->limit() == space->top());
922 923
  int space_remaining =
      static_cast<int>(space->to_space().page_high() - space->top());
924 925 926 927 928 929 930 931 932 933
  while (space_remaining > 0) {
    int length = FixedArrayLenFromSize(space_remaining);
    if (length > 0) {
      Handle<FixedArray> padding =
          isolate->factory()->NewFixedArray(length, AllocationType::kYoung);
      DCHECK(heap->new_space()->Contains(*padding));
      space_remaining -= padding->Size();
    } else {
      // Not enough room to create another fixed array. Create a filler.
      heap->CreateFillerObjectAt(*heap->new_space()->allocation_top_address(),
934
                                 space_remaining);
935 936 937 938 939 940 941 942 943 944 945
      break;
    }
  }
}

}  // namespace

RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull) {
  HandleScope scope(isolate);
  Heap* heap = isolate->heap();
  NewSpace* space = heap->new_space();
946
  AlwaysAllocateScopeForTesting always_allocate(heap);
947 948 949 950 951 952
  do {
    FillUpOneNewSpacePage(isolate, heap);
  } while (space->AddFreshPage());

  return ReadOnlyRoots(isolate).undefined_value();
}
953

954 955 956 957 958 959 960 961 962 963 964 965
RUNTIME_FUNCTION(Runtime_ScheduleGCInStackCheck) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  isolate->RequestInterrupt(
      [](v8::Isolate* isolate, void*) {
        isolate->RequestGarbageCollectionForTesting(
            v8::Isolate::kFullGarbageCollection);
      },
      nullptr);
  return ReadOnlyRoots(isolate).undefined_value();
}

966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991
class FileOutputStream : public v8::OutputStream {
 public:
  explicit FileOutputStream(const char* filename) : os_(filename) {}
  ~FileOutputStream() override { os_.close(); }

  WriteResult WriteAsciiChunk(char* data, int size) override {
    os_.write(data, size);
    return kContinue;
  }

  void EndOfStream() override { os_.close(); }

 private:
  std::ofstream os_;
};

RUNTIME_FUNCTION(Runtime_TakeHeapSnapshot) {
  if (FLAG_fuzzing) {
    // We don't want to create snapshots in fuzzers.
    return ReadOnlyRoots(isolate).undefined_value();
  }

  std::string filename = "heap.heapsnapshot";

  if (args.length() >= 1) {
    HandleScope hs(isolate);
992
    Handle<String> filename_as_js_string = args.at<String>(0);
993 994 995 996 997 998 999
    std::unique_ptr<char[]> buffer = filename_as_js_string->ToCString();
    filename = std::string(buffer.get());
  }

  HeapProfiler* heap_profiler = isolate->heap_profiler();
  // Since this API is intended for V8 devs, we do not treat globals as roots
  // here on purpose.
1000 1001 1002 1003
  v8::HeapProfiler::HeapSnapshotOptions options;
  options.numerics_mode = v8::HeapProfiler::NumericsMode::kExposeNumericValues;
  options.snapshot_mode = v8::HeapProfiler::HeapSnapshotMode::kExposeInternals;
  HeapSnapshot* snapshot = heap_profiler->TakeSnapshot(options);
1004 1005 1006 1007 1008 1009
  FileOutputStream stream(filename.c_str());
  HeapSnapshotJSONSerializer serializer(snapshot);
  serializer.Serialize(&stream);
  return ReadOnlyRoots(isolate).undefined_value();
}

1010
static void DebugPrintImpl(MaybeObject maybe_object) {
1011
  StdoutStream os;
1012
  if (maybe_object->IsCleared()) {
1013
    os << "[weak cleared]";
1014
  } else {
1015
    Object object = maybe_object.GetHeapObjectOrSmi();
1016
    bool weak = maybe_object.IsWeak();
1017

1018
#ifdef OBJECT_PRINT
1019 1020 1021
    os << "DebugPrint: ";
    if (weak) os << "[weak] ";
    object.Print(os);
1022 1023
    if (object.IsHeapObject()) {
      HeapObject::cast(object).map().Print(os);
1024
    }
1025
#else
1026
    if (weak) os << "[weak] ";
1027 1028
    // ShortPrint is available in release mode. Print is not.
    os << Brief(object);
1029
#endif
1030
  }
1031
  os << std::endl;
1032 1033 1034 1035 1036
}

RUNTIME_FUNCTION(Runtime_DebugPrint) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1037

1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057
  MaybeObject maybe_object(*args.address_of_arg_at(0));
  DebugPrintImpl(maybe_object);
  return args[0];
}

RUNTIME_FUNCTION(Runtime_DebugPrintPtr) {
  SealHandleScope shs(isolate);
  StdoutStream os;
  DCHECK_EQ(1, args.length());

  MaybeObject maybe_object(*args.address_of_arg_at(0));
  if (!maybe_object.IsCleared()) {
    Object object = maybe_object.GetHeapObjectOrSmi();
    size_t pointer;
    if (object.ToIntegerIndex(&pointer)) {
      MaybeObject from_pointer(static_cast<Address>(pointer));
      DebugPrintImpl(from_pointer);
    }
  }
  // We don't allow the converted pointer to leak out to JavaScript.
1058
  return args[0];
1059 1060
}

1061 1062 1063 1064
RUNTIME_FUNCTION(Runtime_PrintWithNameForAssert) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(2, args.length());

1065
  auto name = String::cast(args[0]);
1066 1067 1068 1069 1070 1071 1072 1073

  PrintF(" * ");
  StringCharacterStream stream(name);
  while (stream.HasMore()) {
    uint16_t character = stream.GetNext();
    PrintF("%c", character);
  }
  PrintF(": ");
1074
  args[1].ShortPrint();
1075 1076
  PrintF("\n");

1077
  return ReadOnlyRoots(isolate).undefined_value();
1078
}
1079 1080 1081

RUNTIME_FUNCTION(Runtime_DebugTrace) {
  SealHandleScope shs(isolate);
1082
  DCHECK_EQ(0, args.length());
1083
  isolate->PrintStack(stdout);
1084
  return ReadOnlyRoots(isolate).undefined_value();
1085 1086
}

1087 1088
RUNTIME_FUNCTION(Runtime_DebugTrackRetainingPath) {
  HandleScope scope(isolate);
1089 1090
  DCHECK_LE(1, args.length());
  DCHECK_GE(2, args.length());
1091
  CHECK(FLAG_track_retaining_path);
1092
  Handle<HeapObject> object = args.at<HeapObject>(0);
1093 1094
  RetainingPathOption option = RetainingPathOption::kDefault;
  if (args.length() == 2) {
1095
    Handle<String> str = args.at<String>(1);
1096
    const char track_ephemeron_path[] = "track-ephemeron-path";
1097
    if (str->IsOneByteEqualTo(base::StaticCharVector(track_ephemeron_path))) {
1098 1099 1100
      option = RetainingPathOption::kTrackEphemeronPath;
    } else {
      CHECK_EQ(str->length(), 0);
1101
    }
1102
  }
1103
  isolate->heap()->AddRetainingPathTarget(object, option);
1104
  return ReadOnlyRoots(isolate).undefined_value();
1105
}
1106 1107 1108 1109 1110

// This will not allocate (flatten the string), but it may run
// very slowly for very deeply nested ConsStrings.  For debugging use only.
RUNTIME_FUNCTION(Runtime_GlobalPrint) {
  SealHandleScope shs(isolate);
1111
  DCHECK_EQ(1, args.length());
1112

1113
  auto string = String::cast(args[0]);
1114
  StringCharacterStream stream(string);
1115 1116 1117 1118 1119 1120 1121 1122
  while (stream.HasMore()) {
    uint16_t character = stream.GetNext();
    PrintF("%c", character);
  }
  return string;
}

RUNTIME_FUNCTION(Runtime_SystemBreak) {
1123 1124 1125
  // The code below doesn't create handles, but when breaking here in GDB
  // having a handle scope might be useful.
  HandleScope scope(isolate);
1126
  DCHECK_EQ(0, args.length());
1127
  base::OS::DebugBreak();
1128
  return ReadOnlyRoots(isolate).undefined_value();
1129 1130
}

1131 1132 1133
RUNTIME_FUNCTION(Runtime_SetForceSlowPath) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1134
  Object arg = args[0];
1135
  if (arg.IsTrue(isolate)) {
1136 1137
    isolate->set_force_slow_path(true);
  } else {
1138
    DCHECK(arg.IsFalse(isolate));
1139 1140
    isolate->set_force_slow_path(false);
  }
1141
  return ReadOnlyRoots(isolate).undefined_value();
1142
}
1143 1144 1145

RUNTIME_FUNCTION(Runtime_Abort) {
  SealHandleScope shs(isolate);
1146
  DCHECK_EQ(1, args.length());
1147
  int message_id = args.smi_value_at(0);
1148
  const char* message = GetAbortReason(static_cast<AbortReason>(message_id));
1149 1150 1151 1152 1153 1154 1155 1156
  base::OS::PrintError("abort: %s\n", message);
  isolate->PrintStack(stderr);
  base::OS::Abort();
  UNREACHABLE();
}

RUNTIME_FUNCTION(Runtime_AbortJS) {
  HandleScope scope(isolate);
1157
  DCHECK_EQ(1, args.length());
1158
  Handle<String> message = args.at<String>(0);
1159 1160
  if (FLAG_disable_abortjs) {
    base::OS::PrintError("[disabled] abort: %s\n", message->ToCString().get());
1161
    return Object();
1162
  }
1163 1164 1165 1166 1167 1168
  base::OS::PrintError("abort: %s\n", message->ToCString().get());
  isolate->PrintStack(stderr);
  base::OS::Abort();
  UNREACHABLE();
}

1169
RUNTIME_FUNCTION(Runtime_AbortCSADcheck) {
1170 1171
  HandleScope scope(isolate);
  DCHECK_EQ(1, args.length());
1172
  Handle<String> message = args.at<String>(0);
1173
  base::OS::PrintError("abort: CSA_DCHECK failed: %s\n",
1174 1175 1176 1177 1178
                       message->ToCString().get());
  isolate->PrintStack(stderr);
  base::OS::Abort();
  UNREACHABLE();
}
1179

1180 1181 1182
RUNTIME_FUNCTION(Runtime_DisassembleFunction) {
  HandleScope scope(isolate);
#ifdef DEBUG
1183
  DCHECK_EQ(1, args.length());
1184
  // Get the function and make sure it is compiled.
1185
  Handle<JSFunction> func = args.at<JSFunction>(0);
1186
  IsCompiledScope is_compiled_scope;
1187 1188 1189
  if (!func->is_compiled() && func->HasAvailableOptimizedCode()) {
    func->set_code(func->feedback_vector().optimized_code());
  }
1190
  CHECK(func->is_compiled() ||
1191 1192
        Compiler::Compile(isolate, func, Compiler::KEEP_EXCEPTION,
                          &is_compiled_scope));
1193
  StdoutStream os;
1194
  func->code().Print(os);
1195 1196
  os << std::endl;
#endif  // DEBUG
1197
  return ReadOnlyRoots(isolate).undefined_value();
1198 1199
}

1200
namespace {
1201

1202
int StackSize(Isolate* isolate) {
1203 1204 1205 1206 1207
  int n = 0;
  for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++;
  return n;
}

1208 1209 1210 1211
void PrintIndentation(int stack_size) {
  const int max_display = 80;
  if (stack_size <= max_display) {
    PrintF("%4d:%*s", stack_size, stack_size, "");
1212
  } else {
1213
    PrintF("%4d:%*s", stack_size, max_display, "...");
1214 1215 1216
  }
}

1217
}  // namespace
1218 1219 1220

RUNTIME_FUNCTION(Runtime_TraceEnter) {
  SealHandleScope shs(isolate);
1221
  DCHECK_EQ(0, args.length());
1222
  PrintIndentation(StackSize(isolate));
1223 1224
  JavaScriptFrame::PrintTop(isolate, stdout, true, false);
  PrintF(" {\n");
1225
  return ReadOnlyRoots(isolate).undefined_value();
1226 1227 1228 1229
}

RUNTIME_FUNCTION(Runtime_TraceExit) {
  SealHandleScope shs(isolate);
1230
  DCHECK_EQ(1, args.length());
1231
  Object obj = args[0];
1232
  PrintIndentation(StackSize(isolate));
1233
  PrintF("} -> ");
1234
  obj.ShortPrint();
1235
  PrintF("\n");
1236 1237 1238
  return obj;  // return TOS
}

1239 1240
RUNTIME_FUNCTION(Runtime_HaveSameMap) {
  SealHandleScope shs(isolate);
1241
  DCHECK_EQ(2, args.length());
1242 1243
  auto obj1 = JSObject::cast(args[0]);
  auto obj2 = JSObject::cast(args[1]);
1244
  return isolate->heap()->ToBoolean(obj1.map() == obj2.map());
1245 1246
}

1247 1248 1249
RUNTIME_FUNCTION(Runtime_InLargeObjectSpace) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1250
  auto obj = HeapObject::cast(args[0]);
1251 1252 1253 1254 1255 1256
  return isolate->heap()->ToBoolean(
      isolate->heap()->new_lo_space()->Contains(obj) ||
      isolate->heap()->code_lo_space()->Contains(obj) ||
      isolate->heap()->lo_space()->Contains(obj));
}

1257 1258 1259
RUNTIME_FUNCTION(Runtime_HasElementsInALargeObjectSpace) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1260
  auto array = JSArray::cast(args[0]);
1261 1262 1263 1264 1265
  FixedArrayBase elements = array.elements();
  return isolate->heap()->ToBoolean(
      isolate->heap()->new_lo_space()->Contains(elements) ||
      isolate->heap()->lo_space()->Contains(elements));
}
1266

1267
RUNTIME_FUNCTION(Runtime_InYoungGeneration) {
ben's avatar
ben committed
1268
  SealHandleScope shs(isolate);
1269
  DCHECK_EQ(1, args.length());
1270
  Object obj = args[0];
1271
  return isolate->heap()->ToBoolean(ObjectInYoungGeneration(obj));
ben's avatar
ben committed
1272 1273
}

1274 1275 1276 1277
// Force pretenuring for the allocation site the passed object belongs to.
RUNTIME_FUNCTION(Runtime_PretenureAllocationSite) {
  DisallowGarbageCollection no_gc;

1278
  if (args.length() != 1) return CrashUnlessFuzzing(isolate);
1279
  Object arg = args[0];
1280 1281 1282
  if (!arg.IsJSObject()) return CrashUnlessFuzzing(isolate);
  JSObject object = JSObject::cast(arg);

1283
  Heap* heap = object.GetHeap();
1284 1285 1286 1287 1288
  if (!heap->InYoungGeneration(object)) {
    // Object is not in new space, thus there is no memento and nothing to do.
    return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate);
  }

1289 1290
  AllocationMemento memento =
      heap->FindAllocationMemento<Heap::kForRuntime>(object.map(), object);
1291 1292
  if (memento.is_null())
    return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate);
1293 1294
  AllocationSite site = memento.GetAllocationSite();
  heap->PretenureAllocationSiteOnNextCollection(site);
1295
  return ReturnFuzzSafe(ReadOnlyRoots(isolate).true_value(), isolate);
1296 1297
}

1298
namespace {
1299 1300

v8::ModifyCodeGenerationFromStringsResult DisallowCodegenFromStringsCallback(
1301 1302
    v8::Local<v8::Context> context, v8::Local<v8::Value> source,
    bool is_code_kind) {
1303
  return {false, {}};
1304
}
1305 1306 1307

}  // namespace

1308 1309
RUNTIME_FUNCTION(Runtime_DisallowCodegenFromStrings) {
  SealHandleScope shs(isolate);
1310
  DCHECK_EQ(1, args.length());
1311
  bool flag = Oddball::cast(args[0]).ToBool(isolate);
1312
  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1313
  v8_isolate->SetModifyCodeGenerationFromStringsCallback(
1314
      flag ? DisallowCodegenFromStringsCallback : nullptr);
1315
  return ReadOnlyRoots(isolate).undefined_value();
1316 1317
}

1318 1319 1320
RUNTIME_FUNCTION(Runtime_RegexpHasBytecode) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(2, args.length());
1321 1322
  auto regexp = JSRegExp::cast(args[0]);
  bool is_latin1 = Oddball::cast(args[1]).ToBool(isolate);
1323
  bool result;
1324 1325
  if (regexp.type_tag() == JSRegExp::IRREGEXP) {
    result = regexp.bytecode(is_latin1).IsByteArray();
1326 1327 1328 1329
  } else {
    result = false;
  }
  return isolate->heap()->ToBoolean(result);
1330 1331 1332 1333 1334
}

RUNTIME_FUNCTION(Runtime_RegexpHasNativeCode) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(2, args.length());
1335 1336
  auto regexp = JSRegExp::cast(args[0]);
  bool is_latin1 = Oddball::cast(args[1]).ToBool(isolate);
1337
  bool result;
1338 1339
  if (regexp.type_tag() == JSRegExp::IRREGEXP) {
    result = regexp.code(is_latin1).IsCodeT();
1340 1341 1342 1343
  } else {
    result = false;
  }
  return isolate->heap()->ToBoolean(result);
1344 1345
}

1346 1347 1348
RUNTIME_FUNCTION(Runtime_RegexpTypeTag) {
  HandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1349
  auto regexp = JSRegExp::cast(args[0]);
1350
  const char* type_str;
1351
  switch (regexp.type_tag()) {
1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367
    case JSRegExp::NOT_COMPILED:
      type_str = "NOT_COMPILED";
      break;
    case JSRegExp::ATOM:
      type_str = "ATOM";
      break;
    case JSRegExp::IRREGEXP:
      type_str = "IRREGEXP";
      break;
    case JSRegExp::EXPERIMENTAL:
      type_str = "EXPERIMENTAL";
      break;
  }
  return *isolate->factory()->NewStringFromAsciiChecked(type_str);
}

1368 1369 1370
RUNTIME_FUNCTION(Runtime_RegexpIsUnmodified) {
  HandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1371
  Handle<JSRegExp> regexp = args.at<JSRegExp>(0);
1372 1373 1374 1375
  return isolate->heap()->ToBoolean(
      RegExp::IsUnmodifiedRegExp(isolate, regexp));
}

1376 1377
#define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \
  RUNTIME_FUNCTION(Runtime_##Name) {               \
1378
    auto obj = JSObject::cast(args[0]);            \
1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390
    return isolate->heap()->ToBoolean(obj.Name()); \
  }

ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasFastElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasObjectElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiOrObjectElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasDoubleElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasHoleyElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasDictionaryElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasPackedElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSloppyArgumentsElements)
1391
// Properties test sitting with elements tests - not fooling anyone.
1392
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasFastProperties)
1393 1394

#undef ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION
1395

1396 1397
#define FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION(Type, type, TYPE, ctype) \
  RUNTIME_FUNCTION(Runtime_HasFixed##Type##Elements) {                     \
1398
    auto obj = JSObject::cast(args[0]);                                    \
1399
    return isolate->heap()->ToBoolean(obj.HasFixed##Type##Elements());     \
1400 1401 1402 1403 1404
  }

TYPED_ARRAYS(FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION)

#undef FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION
1405

1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433
RUNTIME_FUNCTION(Runtime_IsConcatSpreadableProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(
      Protectors::IsIsConcatSpreadableLookupChainIntact(isolate));
}

RUNTIME_FUNCTION(Runtime_TypedArraySpeciesProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(
      Protectors::IsTypedArraySpeciesLookupChainIntact(isolate));
}

RUNTIME_FUNCTION(Runtime_RegExpSpeciesProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(
      Protectors::IsRegExpSpeciesLookupChainIntact(isolate));
}

RUNTIME_FUNCTION(Runtime_PromiseSpeciesProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(
      Protectors::IsPromiseSpeciesLookupChainIntact(isolate));
}

1434 1435 1436
RUNTIME_FUNCTION(Runtime_ArraySpeciesProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
1437 1438
  return isolate->heap()->ToBoolean(
      Protectors::IsArraySpeciesLookupChainIntact(isolate));
1439
}
1440

1441 1442 1443
RUNTIME_FUNCTION(Runtime_MapIteratorProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
1444 1445
  return isolate->heap()->ToBoolean(
      Protectors::IsMapIteratorLookupChainIntact(isolate));
1446 1447 1448 1449 1450
}

RUNTIME_FUNCTION(Runtime_SetIteratorProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
1451 1452
  return isolate->heap()->ToBoolean(
      Protectors::IsSetIteratorLookupChainIntact(isolate));
1453 1454
}

1455 1456 1457 1458
RUNTIME_FUNCTION(Runtime_StringIteratorProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(
1459
      Protectors::IsStringIteratorLookupChainIntact(isolate));
1460 1461
}

1462 1463 1464 1465 1466 1467
RUNTIME_FUNCTION(Runtime_ArrayIteratorProtector) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(
      Protectors::IsArrayIteratorLookupChainIntact(isolate));
}
1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483
// For use by tests and fuzzers. It
//
// 1. serializes a snapshot of the current isolate,
// 2. deserializes the snapshot,
// 3. and runs VerifyHeap on the resulting isolate.
//
// The current isolate should not be modified by this call and can keep running
// once it completes.
RUNTIME_FUNCTION(Runtime_SerializeDeserializeNow) {
  HandleScope scope(isolate);
  DCHECK_EQ(0, args.length());
  Snapshot::SerializeDeserializeAndVerifyForTesting(isolate,
                                                    isolate->native_context());
  return ReadOnlyRoots(isolate).undefined_value();
}

1484
RUNTIME_FUNCTION(Runtime_HeapObjectVerify) {
1485 1486
  HandleScope shs(isolate);
  DCHECK_EQ(1, args.length());
1487
  Handle<Object> object = args.at(0);
1488
#ifdef VERIFY_HEAP
1489
  object->ObjectVerify(isolate);
1490 1491 1492
#else
  CHECK(object->IsObject());
  if (object->IsHeapObject()) {
1493
    CHECK(HeapObject::cast(*object).map().IsMap());
1494 1495 1496 1497 1498 1499 1500
  } else {
    CHECK(object->IsSmi());
  }
#endif
  return isolate->heap()->ToBoolean(true);
}

1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512
RUNTIME_FUNCTION(Runtime_ArrayBufferMaxByteLength) {
  HandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return *isolate->factory()->NewNumber(JSArrayBuffer::kMaxByteLength);
}

RUNTIME_FUNCTION(Runtime_TypedArrayMaxLength) {
  HandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return *isolate->factory()->NewNumber(JSTypedArray::kMaxLength);
}

1513 1514 1515 1516
RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTracking) {
  HandleScope scope(isolate);
  DCHECK_EQ(1, args.length());

1517
  Handle<JSObject> object = args.at<JSObject>(0);
1518
  MapUpdater::CompleteInobjectSlackTracking(isolate, object->map());
1519

1520
  return ReadOnlyRoots(isolate).undefined_value();
1521 1522
}

1523
RUNTIME_FUNCTION(Runtime_TurbofanStaticAssert) {
1524
  SealHandleScope shs(isolate);
1525 1526
  // Always lowered to StaticAssert node in Turbofan, so we never get here in
  // compiled code.
1527 1528 1529
  return ReadOnlyRoots(isolate).undefined_value();
}

1530 1531 1532 1533 1534 1535
RUNTIME_FUNCTION(Runtime_IsBeingInterpreted) {
  SealHandleScope shs(isolate);
  // Always lowered to false in Turbofan, so we never get here in compiled code.
  return ReadOnlyRoots(isolate).true_value();
}

1536 1537 1538 1539
RUNTIME_FUNCTION(Runtime_EnableCodeLoggingForTesting) {
  // The {NoopListener} currently does nothing on any callback, but reports
  // {true} on {is_listening_to_code_events()}. Feel free to add assertions to
  // any method to further test the code logging callbacks.
1540
  class NoopListener final : public LogEventListener {
1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551
    void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
                         const char* name) final {}
    void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
                         Handle<Name> name) final {}
    void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
                         Handle<SharedFunctionInfo> shared,
                         Handle<Name> script_name) final {}
    void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
                         Handle<SharedFunctionInfo> shared,
                         Handle<Name> script_name, int line, int column) final {
    }
1552
#if V8_ENABLE_WEBASSEMBLY
1553
    void CodeCreateEvent(LogEventsAndTags tag, const wasm::WasmCode* code,
1554 1555
                         wasm::WasmName name, const char* source_url,
                         int code_offset, int script_id) final {}
1556
#endif  // V8_ENABLE_WEBASSEMBLY
1557 1558 1559 1560 1561 1562

    void CallbackEvent(Handle<Name> name, Address entry_point) final {}
    void GetterCallbackEvent(Handle<Name> name, Address entry_point) final {}
    void SetterCallbackEvent(Handle<Name> name, Address entry_point) final {}
    void RegExpCodeCreateEvent(Handle<AbstractCode> code,
                               Handle<String> source) final {}
1563 1564
    void CodeMoveEvent(AbstractCode from, AbstractCode to) final {}
    void SharedFunctionInfoMoveEvent(Address from, Address to) final {}
1565
    void NativeContextMoveEvent(Address from, Address to) final {}
1566
    void CodeMovingGCEvent() final {}
1567 1568 1569
    void CodeDisableOptEvent(Handle<AbstractCode> code,
                             Handle<SharedFunctionInfo> shared) final {}
    void CodeDeoptEvent(Handle<Code> code, DeoptimizeKind kind, Address pc,
1570
                        int fp_to_sp_delta) final {}
1571 1572 1573
    void CodeDependencyChangeEvent(Handle<Code> code,
                                   Handle<SharedFunctionInfo> shared,
                                   const char* reason) final {}
1574
    void WeakCodeClearEvent() final {}
1575 1576 1577 1578

    bool is_listening_to_code_events() final { return true; }
  };
  static base::LeakyObject<NoopListener> noop_listener;
1579
#if V8_ENABLE_WEBASSEMBLY
1580
  wasm::GetWasmEngine()->EnableCodeLogging(isolate);
1581
#endif  // V8_ENABLE_WEBASSEMBLY
1582
  isolate->log_event_dispatcher()->AddListener(noop_listener.get());
1583 1584 1585
  return ReadOnlyRoots(isolate).undefined_value();
}

1586 1587 1588 1589
RUNTIME_FUNCTION(Runtime_NewRegExpWithBacktrackLimit) {
  HandleScope scope(isolate);
  DCHECK_EQ(3, args.length());

1590 1591
  Handle<String> pattern = args.at<String>(0);
  Handle<String> flags_string = args.at<String>(1);
1592
  uint32_t backtrack_limit = args.positive_smi_value_at(2);
1593 1594

  JSRegExp::Flags flags =
1595
      JSRegExp::FlagsFromString(isolate, flags_string).value();
1596 1597 1598 1599 1600

  RETURN_RESULT_OR_FAILURE(
      isolate, JSRegExp::New(isolate, pattern, flags, backtrack_limit));
}

1601 1602 1603 1604 1605 1606
RUNTIME_FUNCTION(Runtime_Is64Bit) {
  SealHandleScope shs(isolate);
  DCHECK_EQ(0, args.length());
  return isolate->heap()->ToBoolean(kSystemPointerSize == 8);
}

1607 1608 1609 1610 1611 1612
RUNTIME_FUNCTION(Runtime_BigIntMaxLengthBits) {
  HandleScope scope(isolate);
  DCHECK_EQ(0, args.length());
  return *isolate->factory()->NewNumber(BigInt::kMaxLengthBits);
}

1613 1614 1615
RUNTIME_FUNCTION(Runtime_IsSameHeapObject) {
  HandleScope scope(isolate);
  DCHECK_EQ(2, args.length());
1616 1617
  Handle<HeapObject> obj1 = args.at<HeapObject>(0);
  Handle<HeapObject> obj2 = args.at<HeapObject>(1);
1618 1619 1620 1621 1622 1623
  return isolate->heap()->ToBoolean(obj1->address() == obj2->address());
}

RUNTIME_FUNCTION(Runtime_IsSharedString) {
  HandleScope scope(isolate);
  DCHECK_EQ(1, args.length());
1624
  Handle<HeapObject> obj = args.at<HeapObject>(0);
1625 1626 1627 1628
  return isolate->heap()->ToBoolean(obj->IsString() &&
                                    Handle<String>::cast(obj)->IsShared());
}

1629 1630 1631 1632 1633 1634
RUNTIME_FUNCTION(Runtime_SharedGC) {
  SealHandleScope scope(isolate);
  isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting);
  return ReadOnlyRoots(isolate).undefined_value();
}

1635 1636
}  // namespace internal
}  // namespace v8