builtins-lazy-gen.cc 7.59 KB
Newer Older
1 2 3 4 5 6 7 8
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/builtins/builtins-lazy-gen.h"

#include "src/builtins/builtins-utils-gen.h"
#include "src/builtins/builtins.h"
9
#include "src/common/globals.h"
10
#include "src/objects/code-inl.h"
11
#include "src/objects/feedback-vector.h"
12 13 14 15 16 17
#include "src/objects/shared-function-info.h"

namespace v8 {
namespace internal {

void LazyBuiltinsAssembler::GenerateTailCallToJSCode(
18
    TNode<CodeT> code, TNode<JSFunction> function) {
19 20 21
  auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
  auto context = Parameter<Context>(Descriptor::kContext);
  auto new_target = Parameter<Object>(Descriptor::kNewTarget);
22
  TailCallJSCode(code, context, function, new_target, argc);
23 24 25 26
}

void LazyBuiltinsAssembler::GenerateTailCallToReturnedCode(
    Runtime::FunctionId function_id, TNode<JSFunction> function) {
27
  auto context = Parameter<Context>(Descriptor::kContext);
28
  TNode<CodeT> code = CAST(CallRuntime(function_id, context, function));
29 30 31 32
  GenerateTailCallToJSCode(code, function);
}

void LazyBuiltinsAssembler::TailCallRuntimeIfMarkerEquals(
33
    TNode<Uint32T> marker, OptimizationMarker expected_marker,
34 35
    Runtime::FunctionId function_id, TNode<JSFunction> function) {
  Label no_match(this);
36 37 38
  GotoIfNot(Word32Equal(marker,
                        Uint32Constant(static_cast<uint32_t>(expected_marker))),
            &no_match);
39 40 41 42 43 44
  GenerateTailCallToReturnedCode(function_id, function);
  BIND(&no_match);
}

void LazyBuiltinsAssembler::MaybeTailCallOptimizedCodeSlot(
    TNode<JSFunction> function, TNode<FeedbackVector> feedback_vector) {
45 46 47 48 49 50
  Label fallthrough(this), may_have_optimized_code(this);

  TNode<Uint32T> optimization_state =
      LoadObjectField<Uint32T>(feedback_vector, FeedbackVector::kFlagsOffset);

  // Fall through if no optimization trigger or optimized code.
51 52 53 54
  GotoIfNot(IsSetWord32(
                optimization_state,
                FeedbackVector::kHasOptimizedCodeOrCompileOptimizedMarkerMask),
            &fallthrough);
55

56 57
  GotoIfNot(IsSetWord32(optimization_state,
                        FeedbackVector::kHasCompileOptimizedMarker),
58
            &may_have_optimized_code);
59 60 61 62 63 64

  // TODO(ishell): introduce Runtime::kHandleOptimizationMarker and check
  // all these marker values there.
  TNode<Uint32T> marker =
      DecodeWord32<FeedbackVector::OptimizationMarkerBits>(optimization_state);
  TailCallRuntimeIfMarkerEquals(
65 66 67 68 69
      marker, OptimizationMarker::kCompileTurbofan_NotConcurrent,
      Runtime::kCompileTurbofan_NotConcurrent, function);
  TailCallRuntimeIfMarkerEquals(marker,
                                OptimizationMarker::kCompileTurbofan_Concurrent,
                                Runtime::kCompileTurbofan_Concurrent, function);
70

71
  Unreachable();
72
  BIND(&may_have_optimized_code);
73
  {
74 75 76
    Label heal_optimized_code_slot(this);
    TNode<MaybeObject> maybe_optimized_code_entry = LoadMaybeWeakObjectField(
        feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset);
77 78

    // Optimized code slot is a weak reference to CodeT object.
79
    TNode<CodeT> optimized_code = CAST(GetHeapObjectAssumeWeak(
80
        maybe_optimized_code_entry, &heal_optimized_code_slot));
81 82 83 84

    // Check if the optimized code is marked for deopt. If it is, call the
    // runtime to clear it.
    TNode<CodeDataContainer> code_data_container =
85
        CodeDataContainerFromCodeT(optimized_code);
86 87 88 89
    TNode<Int32T> code_kind_specific_flags = LoadObjectField<Int32T>(
        code_data_container, CodeDataContainer::kKindSpecificFlagsOffset);
    GotoIf(IsSetWord32<Code::MarkedForDeoptimizationField>(
               code_kind_specific_flags),
90
           &heal_optimized_code_slot);
91 92 93

    // Optimized code is good, get it into the closure and link the closure into
    // the optimized functions list, then tail call the optimized code.
94
    StoreObjectField(function, JSFunction::kCodeOffset, optimized_code);
95
    Comment("MaybeTailCallOptimizedCodeSlot:: GenerateTailCallToJSCode");
96
    GenerateTailCallToJSCode(optimized_code, function);
97

98 99 100 101 102
    // Optimized code slot contains deoptimized code or code is cleared and
    // optimized code marker isn't updated. Evict the code, update the marker
    // and re-enter the closure's code.
    BIND(&heal_optimized_code_slot);
    GenerateTailCallToReturnedCode(Runtime::kHealOptimizedCodeSlot, function);
103 104 105 106 107 108 109 110 111 112 113
  }

  // Fall-through if the optimized code cell is clear and there is no
  // optimization marker.
  BIND(&fallthrough);
}

void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
  // First lookup code, maybe we don't need to compile!
  Label compile_function(this, Label::kDeferred);

114 115 116 117 118
  // Check the code object for the SFI. If SFI's code entry points to
  // CompileLazy, then we need to lazy compile regardless of the function or
  // feedback vector marker.
  TNode<SharedFunctionInfo> shared =
      CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
119
  TVARIABLE(Uint16T, sfi_data_type);
120
  TNode<CodeT> sfi_code =
121
      GetSharedFunctionInfoCode(shared, &sfi_data_type, &compile_function);
122

123 124 125 126 127
  TNode<HeapObject> feedback_cell_value = LoadFeedbackCellValue(function);

  // If feedback cell isn't initialized, compile function
  GotoIf(IsUndefined(feedback_cell_value), &compile_function);

128
  Label maybe_use_sfi_code(this);
129
  // If there is no feedback, don't check for optimized code.
130
  GotoIf(HasInstanceType(feedback_cell_value, CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
131
         &maybe_use_sfi_code);
132 133

  // If it isn't undefined or fixed array it must be a feedback vector.
134
  CSA_DCHECK(this, IsFeedbackVector(feedback_cell_value));
135 136

  // Is there an optimization marker or optimized code in the feedback vector?
137
  MaybeTailCallOptimizedCodeSlot(function, CAST(feedback_cell_value));
138
  Goto(&maybe_use_sfi_code);
139

140 141 142 143
  // At this point we have a candidate Code object. It's *not* a cached
  // optimized Code object (we'd have tail-called it above). A usual case would
  // be the InterpreterEntryTrampoline to start executing existing bytecode.
  BIND(&maybe_use_sfi_code);
144
  CSA_DCHECK(this, TaggedNotEqual(sfi_code, HeapConstant(BUILTIN_CODE(
145
                                                isolate(), CompileLazy))));
146
  StoreObjectField(function, JSFunction::kCodeOffset, sfi_code);
147

148 149 150
  Label tailcall_code(this);
  Label baseline(this);

151
  TVARIABLE(CodeT, code);
152 153

  // Check if we have baseline code.
154
  GotoIf(InstanceTypeEqual(sfi_data_type.value(), CODET_TYPE), &baseline);
155

156
  code = sfi_code;
157
  Goto(&tailcall_code);
158

159 160
  BIND(&baseline);
  // Ensure we have a feedback vector.
161
  code = Select<CodeT>(
162 163
      IsFeedbackVector(feedback_cell_value), [=]() { return sfi_code; },
      [=]() {
164 165 166
        return CAST(CallRuntime(Runtime::kInstallBaselineCode,
                                Parameter<Context>(Descriptor::kContext),
                                function));
167 168 169
      });
  Goto(&tailcall_code);
  BIND(&tailcall_code);
170
  // Jump to the selected code entry.
171
  GenerateTailCallToJSCode(code.value(), function);
172 173

  BIND(&compile_function);
174
  GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function);
175 176 177
}

TF_BUILTIN(CompileLazy, LazyBuiltinsAssembler) {
178
  auto function = Parameter<JSFunction>(Descriptor::kTarget);
179 180 181 182 183

  CompileLazy(function);
}

TF_BUILTIN(CompileLazyDeoptimizedCode, LazyBuiltinsAssembler) {
184
  auto function = Parameter<JSFunction>(Descriptor::kTarget);
185

186
  TNode<CodeT> code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
187
  // Set the code slot inside the JSFunction to CompileLazy.
188
  StoreObjectField(function, JSFunction::kCodeOffset, code);
189
  GenerateTailCallToJSCode(code, function);
190 191 192 193
}

}  // namespace internal
}  // namespace v8