js-function-inl.h 10.6 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_JS_FUNCTION_INL_H_
#define V8_OBJECTS_JS_FUNCTION_INL_H_

8 9 10 11 12
#include "src/objects/js-function.h"

// Include other inline headers *after* including js-function.h, such that e.g.
// the definition of JSFunction is available (and this comment prevents
// clang-format from merging that include into the following ones).
13 14 15 16
#include "src/diagnostics/code-tracer.h"
#include "src/ic/ic.h"
#include "src/init/bootstrapper.h"
#include "src/objects/feedback-cell-inl.h"
17
#include "src/objects/map-updater.h"
18
#include "src/objects/shared-function-info-inl.h"
19 20 21 22 23 24 25

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

26 27
#include "torque-generated/src/objects/js-function-tq-inl.inc"

28
TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunctionOrWrappedFunction)
29
TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
30
TQ_OBJECT_CONSTRUCTORS_IMPL(JSWrappedFunction)
31
TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunction)
32 33

ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
34 35
RELEASE_ACQUIRE_ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell,
                          kFeedbackCellOffset)
36 37 38 39 40 41 42 43 44 45 46

FeedbackVector JSFunction::feedback_vector() const {
  DCHECK(has_feedback_vector());
  return FeedbackVector::cast(raw_feedback_cell().value());
}

ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
  DCHECK(has_closure_feedback_cell_array());
  return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
}

47
void JSFunction::reset_tiering_state() {
48
  DCHECK(has_feedback_vector());
49
  feedback_vector().reset_tiering_state();
50 51
}

52
bool JSFunction::ChecksTieringState() { return code().checks_tiering_state(); }
53 54 55 56

void JSFunction::CompleteInobjectSlackTrackingIfActive() {
  if (!has_prototype_slot()) return;
  if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
57
    MapUpdater::CompleteInobjectSlackTracking(GetIsolate(), initial_map());
58 59 60
  }
}

61 62
template <typename IsolateT>
AbstractCode JSFunction::abstract_code(IsolateT* isolate) {
63
  if (ActiveTierIsIgnition()) {
64
    return AbstractCode::cast(shared().GetBytecodeArray(isolate));
65
  } else {
66
    return AbstractCode::cast(FromCodeT(code(kAcquireLoad)));
67 68 69 70 71
  }
}

int JSFunction::length() { return shared().length(); }

72 73
ACCESSORS_RELAXED(JSFunction, code, CodeT, kCodeOffset)
RELEASE_ACQUIRE_ACCESSORS(JSFunction, code, CodeT, kCodeOffset)
74

75
#ifdef V8_EXTERNAL_CODE_SPACE
76 77
void JSFunction::set_code(Code code, ReleaseStoreTag, WriteBarrierMode mode) {
  set_code(ToCodeT(code), kReleaseStore, mode);
78 79 80
}
#endif

81 82
Address JSFunction::code_entry_point() const {
  if (V8_EXTERNAL_CODE_SPACE_BOOL) {
83
    return CodeDataContainer::cast(code()).code_entry_point();
84 85 86 87
  } else {
    return code().InstructionStart();
  }
}
88 89 90

// TODO(ishell): Why relaxed read but release store?
DEF_GETTER(JSFunction, shared, SharedFunctionInfo) {
91 92 93 94 95 96
  return shared(cage_base, kRelaxedLoad);
}

DEF_RELAXED_GETTER(JSFunction, shared, SharedFunctionInfo) {
  return TaggedField<SharedFunctionInfo,
                     kSharedFunctionInfoOffset>::Relaxed_Load(cage_base, *this);
97 98 99 100 101 102 103 104
}

void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) {
  // Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
  RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
  CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
}

105 106 107 108 109 110
TieringState JSFunction::tiering_state() const {
  if (!has_feedback_vector()) return TieringState::kNone;
  return feedback_vector().tiering_state();
}

void JSFunction::set_tiering_state(TieringState state) {
111
  DCHECK(has_feedback_vector());
112
  DCHECK(IsNone(state) || ChecksTieringState());
113
  feedback_vector().set_tiering_state(state);
114 115
}

116 117 118 119 120 121 122 123 124 125
TieringState JSFunction::osr_tiering_state() {
  DCHECK(has_feedback_vector());
  return feedback_vector().osr_tiering_state();
}

void JSFunction::set_osr_tiering_state(TieringState marker) {
  DCHECK(has_feedback_vector());
  feedback_vector().set_osr_tiering_state(marker);
}

126 127 128 129 130 131 132 133 134 135 136 137 138 139
bool JSFunction::has_feedback_vector() const {
  return shared().is_compiled() &&
         raw_feedback_cell().value().IsFeedbackVector();
}

bool JSFunction::has_closure_feedback_cell_array() const {
  return shared().is_compiled() &&
         raw_feedback_cell().value().IsClosureFeedbackCellArray();
}

Context JSFunction::context() {
  return TaggedField<Context, kContextOffset>::load(*this);
}

140 141 142 143
DEF_RELAXED_GETTER(JSFunction, context, Context) {
  return TaggedField<Context, kContextOffset>::Relaxed_Load(cage_base, *this);
}

144 145 146 147 148 149 150 151 152 153
bool JSFunction::has_context() const {
  return TaggedField<HeapObject, kContextOffset>::load(*this).IsContext();
}

JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }

NativeContext JSFunction::native_context() {
  return context().native_context();
}

154 155 156
RELEASE_ACQUIRE_ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map,
                                  HeapObject, kPrototypeOrInitialMapOffset,
                                  map().has_prototype_slot())
157 158

DEF_GETTER(JSFunction, has_prototype_slot, bool) {
159
  return map(cage_base).has_prototype_slot();
160 161 162
}

DEF_GETTER(JSFunction, initial_map, Map) {
163
  return Map::cast(prototype_or_initial_map(cage_base, kAcquireLoad));
164 165 166
}

DEF_GETTER(JSFunction, has_initial_map, bool) {
167
  DCHECK(has_prototype_slot(cage_base));
168
  return prototype_or_initial_map(cage_base, kAcquireLoad).IsMap(cage_base);
169 170 171
}

DEF_GETTER(JSFunction, has_instance_prototype, bool) {
172 173
  DCHECK(has_prototype_slot(cage_base));
  return has_initial_map(cage_base) ||
174 175
         !prototype_or_initial_map(cage_base, kAcquireLoad)
              .IsTheHole(GetReadOnlyRoots(cage_base));
176 177 178
}

DEF_GETTER(JSFunction, has_prototype, bool) {
179 180 181
  DCHECK(has_prototype_slot(cage_base));
  return map(cage_base).has_non_instance_prototype() ||
         has_instance_prototype(cage_base);
182 183 184
}

DEF_GETTER(JSFunction, has_prototype_property, bool) {
185 186
  return (has_prototype_slot(cage_base) && IsConstructor(cage_base)) ||
         IsGeneratorFunction(shared(cage_base).kind());
187 188 189
}

DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
190 191
  return !has_prototype_property(cage_base) ||
         map(cage_base).has_non_instance_prototype();
192 193 194
}

DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
195
  DCHECK(has_instance_prototype(cage_base));
196
  if (has_initial_map(cage_base)) {
197
    return initial_map(cage_base).prototype(cage_base);
198
  }
199 200
  // When there is no initial map and the prototype is a JSReceiver, the
  // initial map field is used for the prototype field.
201
  return HeapObject::cast(prototype_or_initial_map(cage_base, kAcquireLoad));
202 203 204
}

DEF_GETTER(JSFunction, prototype, Object) {
205
  DCHECK(has_prototype(cage_base));
206 207
  // If the function's prototype property has been set to a non-JSReceiver
  // value, that value is stored in the constructor field of the map.
208 209
  if (map(cage_base).has_non_instance_prototype()) {
    Object prototype = map(cage_base).GetConstructor(cage_base);
210
    // The map must have a prototype in that field, not a back pointer.
211 212
    DCHECK(!prototype.IsMap(cage_base));
    DCHECK(!prototype.IsFunctionTemplateInfo(cage_base));
213 214
    return prototype;
  }
215
  return instance_prototype(cage_base);
216 217 218
}

bool JSFunction::is_compiled() const {
219
  return code(kAcquireLoad).builtin_id() != Builtin::kCompileLazy &&
220 221 222
         shared().is_compiled();
}

223 224 225
bool JSFunction::ShouldFlushBaselineCode(
    base::EnumSet<CodeFlushMode> code_flush_mode) {
  if (!IsBaselineCodeFlushingEnabled(code_flush_mode)) return false;
226 227 228 229 230 231 232 233 234 235 236 237
  // Do a raw read for shared and code fields here since this function may be
  // called on a concurrent thread. JSFunction itself should be fully
  // initialized here but the SharedFunctionInfo, Code objects may not be
  // initialized. We read using acquire loads to defend against that.
  Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
  if (!maybe_shared.IsSharedFunctionInfo()) return false;

  // See crbug.com/v8/11972 for more details on acquire / release semantics for
  // code field. We don't use release stores when copying code pointers from
  // SFI / FV to JSFunction but it is safe in practice.
  Object maybe_code = ACQUIRE_READ_FIELD(*this, kCodeOffset);
  if (!maybe_code.IsCodeT()) return false;
238
  CodeT code = CodeT::cast(maybe_code);
239 240 241
  if (code.kind() != CodeKind::BASELINE) return false;

  SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
242
  return shared.ShouldFlushCode(code_flush_mode);
243 244
}

245 246
bool JSFunction::NeedsResetDueToFlushedBytecode() {
  // Do a raw read for shared and code fields here since this function may be
247 248 249
  // called on a concurrent thread. JSFunction itself should be fully
  // initialized here but the SharedFunctionInfo, Code objects may not be
  // initialized. We read using acquire loads to defend against that.
250
  Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
251
  if (!maybe_shared.IsSharedFunctionInfo()) return false;
252

253
  Object maybe_code = ACQUIRE_READ_FIELD(*this, kCodeOffset);
254
  if (!maybe_code.IsCodeT()) return false;
255
  CodeT code = CodeT::cast(maybe_code);
256 257

  SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
258
  return !shared.is_compiled() && code.builtin_id() != Builtin::kCompileLazy;
259 260
}

261
bool JSFunction::NeedsResetDueToFlushedBaselineCode() {
262
  return code().kind() == CodeKind::BASELINE && !shared().HasBaselineCode();
263 264 265
}

void JSFunction::ResetIfCodeFlushed(
266 267 268
    base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
                                      HeapObject target)>>
        gc_notify_updated_slot) {
269 270 271 272
  const bool kBytecodeCanFlush = FLAG_flush_bytecode || FLAG_stress_snapshot;
  const bool kBaselineCodeCanFlush =
      FLAG_flush_baseline_code || FLAG_stress_snapshot;
  if (!kBytecodeCanFlush && !kBaselineCodeCanFlush) return;
273

274 275
  DCHECK_IMPLIES(NeedsResetDueToFlushedBytecode(), kBytecodeCanFlush);
  if (kBytecodeCanFlush && NeedsResetDueToFlushedBytecode()) {
276 277
    // Bytecode was flushed and function is now uncompiled, reset JSFunction
    // by setting code to CompileLazy and clearing the feedback vector.
278
    set_code(*BUILTIN_CODE(GetIsolate(), CompileLazy));
279
    raw_feedback_cell().reset_feedback_vector(gc_notify_updated_slot);
280 281 282
    return;
  }

283 284
  DCHECK_IMPLIES(NeedsResetDueToFlushedBaselineCode(), kBaselineCodeCanFlush);
  if (kBaselineCodeCanFlush && NeedsResetDueToFlushedBaselineCode()) {
285
    // Flush baseline code from the closure if required
286
    set_code(*BUILTIN_CODE(GetIsolate(), InterpreterEntryTrampoline));
287 288 289 290 291 292 293 294 295
  }
}

}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_JS_FUNCTION_INL_H_