runtime-compiler.cc 17.4 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/runtime/runtime-utils.h"
6 7

#include "src/arguments.h"
8
#include "src/asmjs/asm-js.h"
9
#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
10 11
#include "src/compiler.h"
#include "src/deoptimizer.h"
12
#include "src/frames-inl.h"
13
#include "src/full-codegen/full-codegen.h"
14
#include "src/isolate-inl.h"
15
#include "src/messages.h"
16 17 18 19 20 21 22 23
#include "src/v8threads.h"
#include "src/vm-state-inl.h"

namespace v8 {
namespace internal {

RUNTIME_FUNCTION(Runtime_CompileLazy) {
  HandleScope scope(isolate);
24
  DCHECK_EQ(1, args.length());
25
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
26

27 28 29 30 31 32 33
#ifdef DEBUG
  if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
    PrintF("[unoptimized: ");
    function->PrintName();
    PrintF("]\n");
  }
#endif
34

35 36
  StackLimitCheck check(isolate);
  if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
37
  if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
38
    return isolate->heap()->exception();
39
  }
40
  DCHECK(function->is_compiled());
41 42 43
  return function->code();
}

44 45 46 47 48 49 50 51 52 53 54 55
RUNTIME_FUNCTION(Runtime_CompileBaseline) {
  HandleScope scope(isolate);
  DCHECK_EQ(1, args.length());
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
  StackLimitCheck check(isolate);
  if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
  if (!Compiler::CompileBaseline(function)) {
    return isolate->heap()->exception();
  }
  DCHECK(function->is_compiled());
  return function->code();
}
56 57 58

RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
  HandleScope scope(isolate);
59
  DCHECK_EQ(1, args.length());
60
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
61 62 63 64 65 66 67
  StackLimitCheck check(isolate);
  if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
  if (!Compiler::CompileOptimized(function, Compiler::CONCURRENT)) {
    return isolate->heap()->exception();
  }
  DCHECK(function->is_compiled());
  return function->code();
68 69 70 71 72
}


RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
  HandleScope scope(isolate);
73
  DCHECK_EQ(1, args.length());
74
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
75 76 77 78 79 80 81
  StackLimitCheck check(isolate);
  if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
  if (!Compiler::CompileOptimized(function, Compiler::NOT_CONCURRENT)) {
    return isolate->heap()->exception();
  }
  DCHECK(function->is_compiled());
  return function->code();
82 83
}

84 85 86 87 88
RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
  HandleScope scope(isolate);
  DCHECK_EQ(args.length(), 4);
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);

89 90 91 92
  Handle<JSReceiver> stdlib;
  if (args[1]->IsJSReceiver()) {
    stdlib = args.at<JSReceiver>(1);
  }
93 94
  Handle<JSObject> foreign;
  if (args[2]->IsJSObject()) {
95
    foreign = args.at<JSObject>(2);
96 97 98
  }
  Handle<JSArrayBuffer> memory;
  if (args[3]->IsJSArrayBuffer()) {
99
    memory = args.at<JSArrayBuffer>(3);
100
  }
101
  if (function->shared()->HasAsmWasmData() &&
102
      AsmJs::IsStdlibValid(isolate, handle(function->shared()->asm_wasm_data()),
103
                           stdlib)) {
104 105 106 107 108 109 110
    MaybeHandle<Object> result;
    result = AsmJs::InstantiateAsmWasm(
        isolate, handle(function->shared()->asm_wasm_data()), memory, foreign);
    if (!result.is_null()) {
      return *result.ToHandleChecked();
    }
  }
111 112
  // Remove wasm data, mark as broken for asm->wasm,
  // replace code with CompileLazy, and return a smi 0 to indicate failure.
113 114 115
  if (function->shared()->HasAsmWasmData()) {
    function->shared()->ClearAsmWasmData();
  }
116 117 118 119 120 121 122 123 124
  function->shared()->set_is_asm_wasm_broken(true);
  DCHECK(function->code() ==
         isolate->builtins()->builtin(Builtins::kInstantiateAsmJs));
  function->ReplaceCode(isolate->builtins()->builtin(Builtins::kCompileLazy));
  if (function->shared()->code() ==
      isolate->builtins()->builtin(Builtins::kInstantiateAsmJs)) {
    function->shared()->ReplaceCode(
        isolate->builtins()->builtin(Builtins::kCompileLazy));
  }
125
  return Smi::kZero;
126
}
127 128 129

RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
  HandleScope scope(isolate);
130
  DCHECK_EQ(0, args.length());
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
  Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
  DCHECK(AllowHeapAllocation::IsAllowed());
  delete deoptimizer;
  return isolate->heap()->undefined_value();
}

class ActivationsFinder : public ThreadVisitor {
 public:
  Code* code_;
  bool has_code_activations_;

  explicit ActivationsFinder(Code* code)
      : code_(code), has_code_activations_(false) {}

  void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
    JavaScriptFrameIterator it(isolate, top);
    VisitFrames(&it);
  }

  void VisitFrames(JavaScriptFrameIterator* it) {
    for (; !it->done(); it->Advance()) {
      JavaScriptFrame* frame = it->frame();
      if (code_->contains(frame->pc())) has_code_activations_ = true;
    }
  }
};


RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
  HandleScope scope(isolate);
161
  DCHECK_EQ(1, args.length());
162 163 164 165 166
  CONVERT_SMI_ARG_CHECKED(type_arg, 0);
  Deoptimizer::BailoutType type =
      static_cast<Deoptimizer::BailoutType>(type_arg);
  Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
  DCHECK(AllowHeapAllocation::IsAllowed());
167
  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
168
  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
169 170 171 172 173 174

  Handle<JSFunction> function = deoptimizer->function();
  Handle<Code> optimized_code = deoptimizer->compiled_code();

  DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
  DCHECK(type == deoptimizer->bailout_type());
175 176 177 178 179 180 181 182 183 184
  DCHECK_NULL(isolate->context());

  // TODO(turbofan): For Crankshaft we restore the context before objects are
  // being materialized, because it never de-materializes the context but it
  // requires a context to materialize arguments objects. This is specific to
  // Crankshaft and can be removed once only TurboFan goes through here.
  if (!optimized_code->is_turbofanned()) {
    JavaScriptFrameIterator top_it(isolate);
    JavaScriptFrame* top_frame = top_it.frame();
    isolate->set_context(Context::cast(top_frame->context()));
185 186 187 188
  } else {
    // TODO(turbofan): We currently need the native context to materialize
    // the arguments object, but only to get to its map.
    isolate->set_context(function->native_context());
189
  }
190 191 192 193 194 195

  // Make sure to materialize objects before causing any allocation.
  JavaScriptFrameIterator it(isolate);
  deoptimizer->MaterializeHeapObjects(&it);
  delete deoptimizer;

196
  // Ensure the context register is updated for materialized objects.
197 198 199 200 201
  if (optimized_code->is_turbofanned()) {
    JavaScriptFrameIterator top_it(isolate);
    JavaScriptFrame* top_frame = top_it.frame();
    isolate->set_context(Context::cast(top_frame->context()));
  }
202

203
  if (type == Deoptimizer::LAZY) {
204 205 206
    return isolate->heap()->undefined_value();
  }

207 208 209 210
  // Search for other activations of the same optimized code.
  // At this point {it} is at the topmost frame of all the frames materialized
  // by the deoptimizer. Note that this frame does not necessarily represent
  // an activation of {function} because of potential inlined tail-calls.
211 212 213 214 215 216 217 218 219 220 221 222 223
  ActivationsFinder activations_finder(*optimized_code);
  activations_finder.VisitFrames(&it);
  isolate->thread_manager()->IterateArchivedThreads(&activations_finder);

  if (!activations_finder.has_code_activations_) {
    if (function->code() == *optimized_code) {
      if (FLAG_trace_deopt) {
        PrintF("[removing optimized code for: ");
        function->PrintName();
        PrintF("]\n");
      }
      function->ReplaceCode(function->shared()->code());
    }
224 225 226 227
    // Evict optimized code for this function from the cache so that it
    // doesn't get used for new closures.
    function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
                                                  "notify deoptimized");
228 229 230 231 232 233 234 235 236 237 238 239
  } else {
    // TODO(titzer): we should probably do DeoptimizeCodeList(code)
    // unconditionally if the code is not already marked for deoptimization.
    // If there is an index by shared function info, all the better.
    Deoptimizer::DeoptimizeFunction(*function);
  }

  return isolate->heap()->undefined_value();
}


static bool IsSuitableForOnStackReplacement(Isolate* isolate,
240
                                            Handle<JSFunction> function) {
241
  // Keep track of whether we've succeeded in optimizing.
242
  if (function->shared()->optimization_disabled()) return false;
243 244 245 246 247 248 249 250 251 252 253 254 255
  // If we are trying to do OSR when there are already optimized
  // activations of the function, it means (a) the function is directly or
  // indirectly recursive and (b) an optimized invocation has been
  // deoptimized so that we are currently in an unoptimized activation.
  // Check for optimized activations of this function.
  for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
    JavaScriptFrame* frame = it.frame();
    if (frame->is_optimized() && frame->function() == *function) return false;
  }

  return true;
}

256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
namespace {

BailoutId DetermineEntryAndDisarmOSRForBaseline(JavaScriptFrame* frame) {
  Handle<Code> caller_code(frame->function()->shared()->code());

  // Passing the PC in the JavaScript frame from the caller directly is
  // not GC safe, so we walk the stack to get it.
  if (!caller_code->contains(frame->pc())) {
    // Code on the stack may not be the code object referenced by the shared
    // function info.  It may have been replaced to include deoptimization data.
    caller_code = Handle<Code>(frame->LookupCode());
  }

  DCHECK_EQ(frame->LookupCode(), *caller_code);
  DCHECK_EQ(Code::FUNCTION, caller_code->kind());
  DCHECK(caller_code->contains(frame->pc()));

  // Revert the patched back edge table, regardless of whether OSR succeeds.
  BackEdgeTable::Revert(frame->isolate(), *caller_code);

276
  // Return a BailoutId representing an AST id of the {IterationStatement}.
277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298
  uint32_t pc_offset =
      static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
  return caller_code->TranslatePcOffsetToAstId(pc_offset);
}

BailoutId DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame* frame) {
  InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);

  // Note that the bytecode array active on the stack might be different from
  // the one installed on the function (e.g. patched by debugger). This however
  // is fine because we guarantee the layout to be in sync, hence any BailoutId
  // representing the entry point will be valid for any copy of the bytecode.
  Handle<BytecodeArray> bytecode(iframe->GetBytecodeArray());

  DCHECK(frame->LookupCode()->is_interpreter_trampoline_builtin());
  DCHECK(frame->function()->shared()->HasBytecodeArray());
  DCHECK(frame->is_interpreted());
  DCHECK(FLAG_ignition_osr);

  // Reset the OSR loop nesting depth to disarm back edges.
  bytecode->set_osr_loop_nesting_level(0);

299 300
  // Return a BailoutId representing the bytecode offset of the back branch.
  return BailoutId(iframe->GetBytecodeOffset());
301 302 303
}

}  // namespace
304 305 306

RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
  HandleScope scope(isolate);
307
  DCHECK_EQ(1, args.length());
308 309 310 311 312
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);

  // We're not prepared to handle a function with arguments object.
  DCHECK(!function->shared()->uses_arguments());

313
  // Only reachable when OST is enabled.
314
  CHECK(FLAG_use_osr);
315

316
  // Determine frame triggering OSR request.
317 318 319 320
  JavaScriptFrameIterator it(isolate);
  JavaScriptFrame* frame = it.frame();
  DCHECK_EQ(frame->function(), *function);

321 322 323 324 325
  // Determine the entry point for which this OSR request has been fired and
  // also disarm all back edges in the calling code to stop new requests.
  BailoutId ast_id = frame->is_interpreted()
                         ? DetermineEntryAndDisarmOSRForInterpreter(frame)
                         : DetermineEntryAndDisarmOSRForBaseline(frame);
326 327
  DCHECK(!ast_id.IsNone());

328
  MaybeHandle<Code> maybe_result;
329
  if (IsSuitableForOnStackReplacement(isolate, function)) {
330 331 332 333 334
    if (FLAG_trace_osr) {
      PrintF("[OSR - Compiling: ");
      function->PrintName();
      PrintF(" at AST id %d]\n", ast_id.ToInt());
    }
335
    maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
336 337 338
  }

  // Check whether we ended up with usable optimized code.
339 340 341
  Handle<Code> result;
  if (maybe_result.ToHandle(&result) &&
      result->kind() == Code::OPTIMIZED_FUNCTION) {
342 343 344 345 346 347 348 349 350 351 352 353 354
    DeoptimizationInputData* data =
        DeoptimizationInputData::cast(result->deoptimization_data());

    if (data->OsrPcOffset()->value() >= 0) {
      DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
      if (FLAG_trace_osr) {
        PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
               ast_id.ToInt(), data->OsrPcOffset()->value());
      }
      // TODO(titzer): this is a massive hack to make the deopt counts
      // match. Fix heuristics for reenabling optimizations!
      function->shared()->increment_deopt_count();

355 356 357 358 359 360 361 362 363 364 365 366 367 368
      if (result->is_turbofanned()) {
        // When we're waiting for concurrent optimization, set to compile on
        // the next call - otherwise we'd run unoptimized once more
        // and potentially compile for OSR another time as well.
        if (function->IsMarkedForConcurrentOptimization()) {
          if (FLAG_trace_osr) {
            PrintF("[OSR - Re-marking ");
            function->PrintName();
            PrintF(" for non-concurrent optimization]\n");
          }
          function->ReplaceCode(
              isolate->builtins()->builtin(Builtins::kCompileOptimized));
        }
      } else {
369 370 371
        // Crankshafted OSR code can be installed into the function.
        function->ReplaceCode(*result);
      }
372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391
      return *result;
    }
  }

  // Failed.
  if (FLAG_trace_osr) {
    PrintF("[OSR - Failed: ");
    function->PrintName();
    PrintF(" at AST id %d]\n", ast_id.ToInt());
  }

  if (!function->IsOptimized()) {
    function->ReplaceCode(function->shared()->code());
  }
  return NULL;
}


RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
  HandleScope scope(isolate);
392
  DCHECK_EQ(1, args.length());
393 394 395 396 397 398 399 400 401
  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);

  // First check if this is a real stack overflow.
  StackLimitCheck check(isolate);
  if (check.JsHasOverflowed()) {
    SealHandleScope shs(isolate);
    return isolate->StackOverflow();
  }

402
  isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
403 404 405 406 407 408 409
  return (function->IsOptimized()) ? function->code()
                                   : function->shared()->code();
}


bool CodeGenerationFromStringsAllowed(Isolate* isolate,
                                      Handle<Context> context) {
410
  DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
411 412 413 414 415 416 417 418 419 420 421 422 423
  // Check with callback if set.
  AllowCodeGenerationFromStringsCallback callback =
      isolate->allow_code_gen_callback();
  if (callback == NULL) {
    // No callback set and code generation disallowed.
    return false;
  } else {
    // Callback set. Let it decide if code generation is allowed.
    VMState<EXTERNAL> state(isolate);
    return callback(v8::Utils::ToLocal(context));
  }
}

424 425 426
static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
                                 Handle<SharedFunctionInfo> outer_info,
                                 LanguageMode language_mode,
427
                                 int eval_scope_position, int eval_position) {
428 429 430 431 432
  Handle<Context> context = Handle<Context>(isolate->context());
  Handle<Context> native_context = Handle<Context>(context->native_context());

  // Check if native context allows code generation from
  // strings. Throw an exception if it doesn't.
433
  if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
434 435 436 437 438
      !CodeGenerationFromStringsAllowed(isolate, native_context)) {
    Handle<Object> error_message =
        native_context->ErrorMessageForCodeGenerationFromStrings();
    Handle<Object> error;
    MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
439
        MessageTemplate::kCodeGenFromStrings, error_message);
440
    if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
441
    return isolate->heap()->exception();
442 443 444 445 446 447 448
  }

  // Deal with a normal eval call with a string argument. Compile it
  // and return the compiled function bound in the local context.
  static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
  Handle<JSFunction> compiled;
  ASSIGN_RETURN_ON_EXCEPTION_VALUE(
449 450 451
      isolate, compiled, Compiler::GetFunctionFromEval(
                             source, outer_info, context, language_mode,
                             restriction, eval_scope_position, eval_position),
452 453
      isolate->heap()->exception());
  return *compiled;
454 455 456
}


457
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
458
  HandleScope scope(isolate);
459
  DCHECK_EQ(6, args.length());
460

461
  Handle<Object> callee = args.at(0);
462 463 464 465 466 467 468 469

  // If "eval" didn't refer to the original GlobalEval, it's not a
  // direct call to eval.
  // (And even if it is, but the first argument isn't a string, just let
  // execution default to an indirect call to eval, which will also return
  // the first argument without doing anything).
  if (*callee != isolate->native_context()->global_eval_fun() ||
      !args[1]->IsString()) {
470
    return *callee;
471 472
  }

473 474 475
  DCHECK(args[3]->IsSmi());
  DCHECK(is_valid_language_mode(args.smi_at(3)));
  LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
476
  DCHECK(args[4]->IsSmi());
477 478 479
  Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
                                        isolate);
  return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
480
                           language_mode, args.smi_at(4), args.smi_at(5));
481
}
482 483
}  // namespace internal
}  // namespace v8