shared-function-info-inl.h 32.2 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
#define V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_

8
#include "src/base/macros.h"
9
#include "src/base/platform/mutex.h"
10
#include "src/common/globals.h"
11
#include "src/handles/handles-inl.h"
12
#include "src/heap/heap-write-barrier-inl.h"
13
#include "src/objects/debug-objects-inl.h"
14
#include "src/objects/feedback-vector-inl.h"
15 16
#include "src/objects/scope-info-inl.h"
#include "src/objects/script-inl.h"
17
#include "src/objects/shared-function-info.h"
18
#include "src/objects/templates-inl.h"
19 20

#if V8_ENABLE_WEBASSEMBLY
21
#include "src/wasm/wasm-module.h"
22
#include "src/wasm/wasm-objects.h"
23
#endif  // V8_ENABLE_WEBASSEMBLY
24 25 26 27 28 29 30

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

31 32
#include "torque-generated/src/objects/shared-function-info-tq-inl.inc"

33
TQ_OBJECT_CONSTRUCTORS_IMPL(PreparseData)
34

35 36
int PreparseData::inner_start_offset() const {
  return InnerOffset(data_length());
37 38
}

39 40
ObjectSlot PreparseData::inner_data_start() const {
  return RawField(inner_start_offset());
41 42
}

43 44 45 46 47 48
void PreparseData::clear_padding() {
  int data_end_offset = kDataStartOffset + data_length();
  int padding_size = inner_start_offset() - data_end_offset;
  DCHECK_LE(0, padding_size);
  if (padding_size == 0) return;
  memset(reinterpret_cast<void*>(address() + data_end_offset), 0, padding_size);
49 50
}

51 52 53 54
byte PreparseData::get(int index) const {
  DCHECK_LE(0, index);
  DCHECK_LT(index, data_length());
  int offset = kDataStartOffset + index * kByteSize;
55
  return ReadField<byte>(offset);
56 57 58 59 60 61
}

void PreparseData::set(int index, byte value) {
  DCHECK_LE(0, index);
  DCHECK_LT(index, data_length());
  int offset = kDataStartOffset + index * kByteSize;
62
  WriteField<byte>(offset, value);
63 64 65 66 67
}

void PreparseData::copy_in(int index, const byte* buffer, int length) {
  DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
         index + length <= this->data_length());
68
  Address dst_addr = field_address(kDataStartOffset + index * kByteSize);
69
  memcpy(reinterpret_cast<void*>(dst_addr), buffer, length);
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89
}

PreparseData PreparseData::get_child(int index) const {
  return PreparseData::cast(get_child_raw(index));
}

Object PreparseData::get_child_raw(int index) const {
  DCHECK_LE(0, index);
  DCHECK_LT(index, this->children_length());
  int offset = inner_start_offset() + index * kTaggedSize;
  return RELAXED_READ_FIELD(*this, offset);
}

void PreparseData::set_child(int index, PreparseData value,
                             WriteBarrierMode mode) {
  DCHECK_LE(0, index);
  DCHECK_LT(index, this->children_length());
  int offset = inner_start_offset() + index * kTaggedSize;
  RELAXED_WRITE_FIELD(*this, offset, value);
  CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
90
}
91

92 93 94
TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledData)
TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData)
TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithPreparseData)
95

96 97
TQ_OBJECT_CONSTRUCTORS_IMPL(BaselineData)

98
TQ_OBJECT_CONSTRUCTORS_IMPL(InterpreterData)
99

100
ACCESSORS(InterpreterData, raw_interpreter_trampoline, CodeT,
101
          kInterpreterTrampolineOffset)
102

103 104 105 106 107 108 109 110 111
DEF_GETTER(InterpreterData, interpreter_trampoline, Code) {
  return FromCodeT(raw_interpreter_trampoline(cage_base));
}

void InterpreterData::set_interpreter_trampoline(Code code,
                                                 WriteBarrierMode mode) {
  set_raw_interpreter_trampoline(ToCodeT(code), mode);
}

112
TQ_OBJECT_CONSTRUCTORS_IMPL(SharedFunctionInfo)
113
NEVER_READ_ONLY_SPACE_IMPL(SharedFunctionInfo)
114 115
DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)

116 117
RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, function_data, Object,
                          kFunctionDataOffset)
118 119
RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, name_or_scope_info, Object,
                          kNameOrScopeInfoOffset)
120 121
RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, script_or_debug_info, HeapObject,
                          kScriptOrDebugInfoOffset)
122

123 124 125
RENAME_TORQUE_ACCESSORS(SharedFunctionInfo,
                        raw_outer_scope_info_or_feedback_metadata,
                        outer_scope_info_or_feedback_metadata, HeapObject)
126 127 128 129 130 131 132 133
DEF_ACQUIRE_GETTER(SharedFunctionInfo,
                   raw_outer_scope_info_or_feedback_metadata, HeapObject) {
  HeapObject value =
      TaggedField<HeapObject, kOuterScopeInfoOrFeedbackMetadataOffset>::
          Acquire_Load(cage_base, *this);
  return value;
}

134 135 136 137 138 139
RENAME_UINT16_TORQUE_ACCESSORS(SharedFunctionInfo,
                               internal_formal_parameter_count,
                               formal_parameter_count)
RENAME_UINT16_TORQUE_ACCESSORS(SharedFunctionInfo, raw_function_token_offset,
                               function_token_offset)

140 141 142 143 144 145 146 147
RELAXED_INT32_ACCESSORS(SharedFunctionInfo, flags, kFlagsOffset)
int32_t SharedFunctionInfo::relaxed_flags() const {
  return flags(kRelaxedLoad);
}
void SharedFunctionInfo::set_relaxed_flags(int32_t flags) {
  return set_flags(flags, kRelaxedStore);
}

148
UINT8_ACCESSORS(SharedFunctionInfo, flags2, kFlags2Offset)
149

150
bool SharedFunctionInfo::HasSharedName() const {
151
  Object value = name_or_scope_info(kAcquireLoad);
152 153
  if (value.IsScopeInfo()) {
    return ScopeInfo::cast(value).HasSharedFunctionName();
154 155
  }
  return value != kNoSharedNameSentinel;
156 157
}

158
String SharedFunctionInfo::Name() const {
159
  if (!HasSharedName()) return GetReadOnlyRoots().empty_string();
160
  Object value = name_or_scope_info(kAcquireLoad);
161 162 163
  if (value.IsScopeInfo()) {
    if (ScopeInfo::cast(value).HasFunctionName()) {
      return String::cast(ScopeInfo::cast(value).FunctionName());
164
    }
165
    return GetReadOnlyRoots().empty_string();
166 167
  }
  return String::cast(value);
168 169
}

170
void SharedFunctionInfo::SetName(String name) {
171
  Object maybe_scope_info = name_or_scope_info(kAcquireLoad);
172 173
  if (maybe_scope_info.IsScopeInfo()) {
    ScopeInfo::cast(maybe_scope_info).SetFunctionName(name);
174
  } else {
175
    DCHECK(maybe_scope_info.IsString() ||
176
           maybe_scope_info == kNoSharedNameSentinel);
177
    set_name_or_scope_info(name, kReleaseStore);
178
  }
179 180 181
  UpdateFunctionMapIndex();
}

182
bool SharedFunctionInfo::is_script() const {
183
  return scope_info(kAcquireLoad).is_script_scope() &&
184 185 186 187 188
         Script::cast(script()).compilation_type() ==
             Script::COMPILATION_TYPE_HOST;
}

bool SharedFunctionInfo::needs_script_context() const {
189
  return is_script() && scope_info(kAcquireLoad).ContextLocalCount() > 0;
190 191
}

192 193
template <typename IsolateT>
AbstractCode SharedFunctionInfo::abstract_code(IsolateT* isolate) {
194 195
  // TODO(v8:11429): Decide if this return bytecode or baseline code, when the
  // latter is present.
196
  if (HasBytecodeArray()) {
197
    return AbstractCode::cast(GetBytecodeArray(isolate));
198
  } else {
199
    return AbstractCode::cast(GetCode());
200 201 202
  }
}

203 204 205 206 207 208 209 210 211
int SharedFunctionInfo::function_token_position() const {
  int offset = raw_function_token_offset();
  if (offset == kFunctionTokenOutOfRange) {
    return kNoSourcePosition;
  } else {
    return StartPosition() - offset;
  }
}

212 213
template <typename IsolateT>
bool SharedFunctionInfo::AreSourcePositionsAvailable(IsolateT* isolate) const {
214 215 216 217 218 219 220
  if (FLAG_enable_lazy_source_positions) {
    return !HasBytecodeArray() ||
           GetBytecodeArray(isolate).HasSourcePositionTable();
  }
  return true;
}

221
template <typename IsolateT>
222
SharedFunctionInfo::Inlineability SharedFunctionInfo::GetInlineability(
223
    IsolateT* isolate, bool is_turboprop) const {
224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243
  if (!script().IsScript()) return kHasNoScript;

  if (GetIsolate()->is_precise_binary_code_coverage() &&
      !has_reported_binary_coverage()) {
    // We may miss invocations if this function is inlined.
    return kNeedsBinaryCoverage;
  }

  if (optimization_disabled()) return kHasOptimizationDisabled;

  // Built-in functions are handled by the JSCallReducer.
  if (HasBuiltinId()) return kIsBuiltin;

  if (!IsUserJavaScript()) return kIsNotUserCode;

  // If there is no bytecode array, it is either not compiled or it is compiled
  // with WebAssembly for the asm.js pipeline. In either case we don't want to
  // inline.
  if (!HasBytecodeArray()) return kHasNoBytecode;

244 245 246 247 248
  int max_inlined_size = FLAG_max_inlined_bytecode_size;
  if (is_turboprop) {
    max_inlined_size = max_inlined_size / FLAG_turboprop_inline_scaling_factor;
  }
  if (GetBytecodeArray(isolate).length() > max_inlined_size) {
249 250 251 252 253 254 255 256
    return kExceedsBytecodeLimit;
  }

  if (HasBreakInfo()) return kMayContainBreakPoints;

  return kIsInlineable;
}

257 258 259
BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2, class_scope_has_private_brand,
                    SharedFunctionInfo::ClassScopeHasPrivateBrandBit)

260 261 262 263
BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2,
                    has_static_private_methods_or_accessors,
                    SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit)

264
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, syntax_kind,
265 266
                    SharedFunctionInfo::FunctionSyntaxKindBits)

267
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, allows_lazy_compilation,
268
                    SharedFunctionInfo::AllowLazyCompilationBit)
269
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, has_duplicate_parameters,
270 271
                    SharedFunctionInfo::HasDuplicateParametersBit)

272
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, native,
273
                    SharedFunctionInfo::IsNativeBit)
274
#if V8_ENABLE_WEBASSEMBLY
275
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, is_asm_wasm_broken,
276
                    SharedFunctionInfo::IsAsmWasmBrokenBit)
277
#endif  // V8_ENABLE_WEBASSEMBLY
278
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
279
                    requires_instance_members_initializer,
280
                    SharedFunctionInfo::RequiresInstanceMembersInitializerBit)
281

282 283
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
                    name_should_print_as_anonymous,
284
                    SharedFunctionInfo::NameShouldPrintAsAnonymousBit)
285 286
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
                    has_reported_binary_coverage,
287 288
                    SharedFunctionInfo::HasReportedBinaryCoverageBit)

289
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, is_toplevel,
290
                    SharedFunctionInfo::IsTopLevelBit)
291
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, properties_are_final,
292
                    SharedFunctionInfo::PropertiesAreFinalBit)
293
BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
294 295
                    private_name_lookup_skips_outer_class,
                    SharedFunctionInfo::PrivateNameLookupSkipsOuterClassBit)
296

297 298 299 300 301
bool SharedFunctionInfo::optimization_disabled() const {
  return disable_optimization_reason() != BailoutReason::kNoReason;
}

BailoutReason SharedFunctionInfo::disable_optimization_reason() const {
302
  return DisabledOptimizationReasonBits::decode(flags(kRelaxedLoad));
303
}
304

305
LanguageMode SharedFunctionInfo::language_mode() const {
306
  STATIC_ASSERT(LanguageModeSize == 2);
307
  return construct_language_mode(IsStrictBit::decode(flags(kRelaxedLoad)));
308 309 310
}

void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
311
  STATIC_ASSERT(LanguageModeSize == 2);
312 313 314
  // We only allow language mode transitions that set the same language mode
  // again or go up in the chain:
  DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
315
  int hints = flags(kRelaxedLoad);
316
  hints = IsStrictBit::update(hints, is_strict(language_mode));
317
  set_flags(hints, kRelaxedStore);
318
  UpdateFunctionMapIndex();
319 320 321
}

FunctionKind SharedFunctionInfo::kind() const {
322
  STATIC_ASSERT(FunctionKindBits::kSize == kFunctionKindBitSize);
323
  return FunctionKindBits::decode(flags(kRelaxedLoad));
324 325 326
}

void SharedFunctionInfo::set_kind(FunctionKind kind) {
327
  int hints = flags(kRelaxedLoad);
328
  hints = FunctionKindBits::update(hints, kind);
329
  hints = IsClassConstructorBit::update(hints, IsClassConstructor(kind));
330
  set_flags(hints, kRelaxedStore);
331 332 333
  UpdateFunctionMapIndex();
}

334 335 336 337
bool SharedFunctionInfo::is_wrapped() const {
  return syntax_kind() == FunctionSyntaxKind::kWrapped;
}

338
bool SharedFunctionInfo::construct_as_builtin() const {
339
  return ConstructAsBuiltinBit::decode(flags(kRelaxedLoad));
340 341 342 343 344
}

void SharedFunctionInfo::CalculateConstructAsBuiltin() {
  bool uses_builtins_construct_stub = false;
  if (HasBuiltinId()) {
345
    Builtin id = builtin_id();
346
    if (id != Builtin::kCompileLazy && id != Builtin::kEmptyFunction) {
347 348 349 350 351 352
      uses_builtins_construct_stub = true;
    }
  } else if (IsApiFunction()) {
    uses_builtins_construct_stub = true;
  }

353
  int f = flags(kRelaxedLoad);
354
  f = ConstructAsBuiltinBit::update(f, uses_builtins_construct_stub);
355
  set_flags(f, kRelaxedStore);
356 357
}

358
int SharedFunctionInfo::function_map_index() const {
359
  // Note: Must be kept in sync with the FastNewClosure builtin.
360 361
  int index = Context::FIRST_FUNCTION_MAP_INDEX +
              FunctionMapIndexBits::decode(flags(kRelaxedLoad));
362 363 364 365 366 367 368 369 370 371
  DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
  return index;
}

void SharedFunctionInfo::set_function_map_index(int index) {
  STATIC_ASSERT(Context::LAST_FUNCTION_MAP_INDEX <=
                Context::FIRST_FUNCTION_MAP_INDEX + FunctionMapIndexBits::kMax);
  DCHECK_LE(Context::FIRST_FUNCTION_MAP_INDEX, index);
  DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
  index -= Context::FIRST_FUNCTION_MAP_INDEX;
372 373
  set_flags(FunctionMapIndexBits::update(flags(kRelaxedLoad), index),
            kRelaxedStore);
374 375
}

376
void SharedFunctionInfo::clear_padding() {
377 378
  memset(reinterpret_cast<void*>(this->address() + kSize), 0,
         kAlignedSize - kSize);
379 380
}

381
void SharedFunctionInfo::UpdateFunctionMapIndex() {
382 383
  int map_index =
      Context::FunctionMapIndex(language_mode(), kind(), HasSharedName());
384
  set_function_map_index(map_index);
385 386 387
}

void SharedFunctionInfo::DontAdaptArguments() {
388
#if V8_ENABLE_WEBASSEMBLY
389
  // TODO(leszeks): Revise this DCHECK now that the code field is gone.
390
  DCHECK(!HasWasmExportedFunctionData());
391
#endif  // V8_ENABLE_WEBASSEMBLY
392 393 394
  set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
}

395
bool SharedFunctionInfo::IsInterpreted() const { return HasBytecodeArray(); }
396

397 398
ScopeInfo SharedFunctionInfo::scope_info(AcquireLoadTag tag) const {
  Object maybe_scope_info = name_or_scope_info(tag);
399
  if (maybe_scope_info.IsScopeInfo()) {
400 401
    return ScopeInfo::cast(maybe_scope_info);
  }
402
  return GetReadOnlyRoots().empty_scope_info();
403 404
}

405 406 407 408
ScopeInfo SharedFunctionInfo::scope_info() const {
  return scope_info(kAcquireLoad);
}

409 410
void SharedFunctionInfo::SetScopeInfo(ScopeInfo scope_info,
                                      WriteBarrierMode mode) {
411
  // Move the existing name onto the ScopeInfo.
412
  Object name = name_or_scope_info(kAcquireLoad);
413 414
  if (name.IsScopeInfo()) {
    name = ScopeInfo::cast(name).FunctionName();
415
  }
416
  DCHECK(name.IsString() || name == kNoSharedNameSentinel);
417
  // Only set the function name for function scopes.
418 419 420
  scope_info.SetFunctionName(name);
  if (HasInferredName() && inferred_name().length() != 0) {
    scope_info.SetInferredFunctionName(inferred_name());
421
  }
422
  set_name_or_scope_info(scope_info, kReleaseStore, mode);
423 424 425 426
}

void SharedFunctionInfo::set_raw_scope_info(ScopeInfo scope_info,
                                            WriteBarrierMode mode) {
427 428
  WRITE_FIELD(*this, kNameOrScopeInfoOffset, scope_info);
  CONDITIONAL_WRITE_BARRIER(*this, kNameOrScopeInfoOffset, scope_info, mode);
429 430
}

431
HeapObject SharedFunctionInfo::outer_scope_info() const {
432 433 434 435 436 437
  DCHECK(!is_compiled());
  DCHECK(!HasFeedbackMetadata());
  return raw_outer_scope_info_or_feedback_metadata();
}

bool SharedFunctionInfo::HasOuterScopeInfo() const {
438
  ScopeInfo outer_info;
439
  if (!is_compiled()) {
440
    if (!outer_scope_info().IsScopeInfo()) return false;
441 442
    outer_info = ScopeInfo::cast(outer_scope_info());
  } else {
443 444 445
    ScopeInfo info = scope_info(kAcquireLoad);
    if (!info.HasOuterScopeInfo()) return false;
    outer_info = info.OuterScopeInfo();
446
  }
447
  return !outer_info.IsEmpty();
448 449
}

450
ScopeInfo SharedFunctionInfo::GetOuterScopeInfo() const {
451 452
  DCHECK(HasOuterScopeInfo());
  if (!is_compiled()) return ScopeInfo::cast(outer_scope_info());
453
  return scope_info(kAcquireLoad).OuterScopeInfo();
454 455
}

456
void SharedFunctionInfo::set_outer_scope_info(HeapObject value,
457 458
                                              WriteBarrierMode mode) {
  DCHECK(!is_compiled());
459 460
  DCHECK(raw_outer_scope_info_or_feedback_metadata().IsTheHole());
  DCHECK(value.IsScopeInfo() || value.IsTheHole());
461
  set_raw_outer_scope_info_or_feedback_metadata(value, mode);
462 463 464
}

bool SharedFunctionInfo::HasFeedbackMetadata() const {
465
  return raw_outer_scope_info_or_feedback_metadata().IsFeedbackMetadata();
466 467
}

468 469 470 471
bool SharedFunctionInfo::HasFeedbackMetadata(AcquireLoadTag tag) const {
  return raw_outer_scope_info_or_feedback_metadata(tag).IsFeedbackMetadata();
}

472
FeedbackMetadata SharedFunctionInfo::feedback_metadata() const {
473 474 475 476
  DCHECK(HasFeedbackMetadata());
  return FeedbackMetadata::cast(raw_outer_scope_info_or_feedback_metadata());
}

477 478 479 480 481 482
RELEASE_ACQUIRE_ACCESSORS_CHECKED2(SharedFunctionInfo, feedback_metadata,
                                   FeedbackMetadata,
                                   kOuterScopeInfoOrFeedbackMetadataOffset,
                                   HasFeedbackMetadata(kAcquireLoad),
                                   !HasFeedbackMetadata(kAcquireLoad) &&
                                       value.IsFeedbackMetadata())
483 484

bool SharedFunctionInfo::is_compiled() const {
485
  Object data = function_data(kAcquireLoad);
486
  return data != Smi::FromEnum(Builtin::kCompileLazy) &&
487
         !data.IsUncompiledData();
488 489
}

490 491
template <typename IsolateT>
IsCompiledScope SharedFunctionInfo::is_compiled_scope(IsolateT* isolate) const {
492
  return IsCompiledScope(*this, isolate);
493 494 495
}

IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
496
                                 Isolate* isolate)
497
    : is_compiled_(shared.is_compiled()) {
498
  if (shared.HasBaselineData()) {
499
    retain_code_ = handle(shared.baseline_data(), isolate);
500 501
  } else if (shared.HasBytecodeArray()) {
    retain_code_ = handle(shared.GetBytecodeArray(isolate), isolate);
502 503 504 505 506
  } else {
    retain_code_ = MaybeHandle<HeapObject>();
  }

  DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
507 508 509 510
}

IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
                                 LocalIsolate* isolate)
511
    : is_compiled_(shared.is_compiled()) {
512 513 514
  if (shared.HasBaselineData()) {
    retain_code_ = isolate->heap()->NewPersistentHandle(shared.baseline_data());
  } else if (shared.HasBytecodeArray()) {
515 516 517 518 519 520 521
    retain_code_ =
        isolate->heap()->NewPersistentHandle(shared.GetBytecodeArray(isolate));
  } else {
    retain_code_ = MaybeHandle<HeapObject>();
  }

  DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
522 523
}

524
bool SharedFunctionInfo::has_simple_parameters() {
525
  return scope_info(kAcquireLoad).HasSimpleParameters();
526 527
}

528 529 530 531 532
bool SharedFunctionInfo::CanCollectSourcePosition(Isolate* isolate) {
  return FLAG_enable_lazy_source_positions && HasBytecodeArray() &&
         !GetBytecodeArray(isolate).HasSourcePositionTable();
}

533
bool SharedFunctionInfo::IsApiFunction() const {
534
  return function_data(kAcquireLoad).IsFunctionTemplateInfo();
535 536
}

537
FunctionTemplateInfo SharedFunctionInfo::get_api_func_data() const {
538
  DCHECK(IsApiFunction());
539
  return FunctionTemplateInfo::cast(function_data(kAcquireLoad));
540 541 542
}

bool SharedFunctionInfo::HasBytecodeArray() const {
543
  Object data = function_data(kAcquireLoad);
544 545
  return data.IsBytecodeArray() || data.IsInterpreterData() ||
         data.IsBaselineData();
546 547
}

548 549 550
template <typename IsolateT>
BytecodeArray SharedFunctionInfo::GetBytecodeArray(IsolateT* isolate) const {
  SharedMutexGuardIfOffThread<IsolateT, base::kShared> mutex_guard(
551 552
      GetIsolate()->shared_function_info_access(), isolate);

553
  DCHECK(HasBytecodeArray());
554 555
  if (HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray()) {
    return GetDebugInfo().OriginalBytecodeArray();
556 557
  }

558 559 560
  return GetActiveBytecodeArray();
}

561 562 563 564 565 566 567 568
DEF_GETTER(BaselineData, baseline_code, Code) {
  return FromCodeT(TorqueGeneratedClass::baseline_code(cage_base));
}

void BaselineData::set_baseline_code(Code code, WriteBarrierMode mode) {
  return TorqueGeneratedClass::set_baseline_code(ToCodeT(code), mode);
}

569 570
BytecodeArray BaselineData::GetActiveBytecodeArray() const {
  Object data = this->data();
571 572
  if (data.IsBytecodeArray()) {
    return BytecodeArray::cast(data);
573
  } else {
574 575
    DCHECK(data.IsInterpreterData());
    return InterpreterData::cast(data).bytecode_array();
576 577 578
  }
}

579 580 581 582 583 584 585 586 587 588
void BaselineData::SetActiveBytecodeArray(BytecodeArray bytecode) {
  Object data = this->data();
  if (data.IsBytecodeArray()) {
    set_data(bytecode);
  } else {
    DCHECK(data.IsInterpreterData());
    InterpreterData::cast(data).set_bytecode_array(bytecode);
  }
}

589
BytecodeArray SharedFunctionInfo::GetActiveBytecodeArray() const {
590 591 592
  Object data = function_data(kAcquireLoad);
  if (data.IsBytecodeArray()) {
    return BytecodeArray::cast(data);
593 594
  } else if (data.IsBaselineData()) {
    return baseline_data().GetActiveBytecodeArray();
595
  } else {
596 597
    DCHECK(data.IsInterpreterData());
    return InterpreterData::cast(data).bytecode_array();
598
  }
599 600
}

601
void SharedFunctionInfo::SetActiveBytecodeArray(BytecodeArray bytecode) {
602 603 604
  Object data = function_data(kAcquireLoad);
  if (data.IsBytecodeArray()) {
    set_function_data(bytecode, kReleaseStore);
605 606
  } else if (data.IsBaselineData()) {
    baseline_data().SetActiveBytecodeArray(bytecode);
607
  } else {
608
    DCHECK(data.IsInterpreterData());
609
    interpreter_data().set_bytecode_array(bytecode);
610 611 612
  }
}

613
void SharedFunctionInfo::set_bytecode_array(BytecodeArray bytecode) {
614
  DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
615
         HasUncompiledData());
616
  set_function_data(bytecode, kReleaseStore);
617 618
}

619
bool SharedFunctionInfo::ShouldFlushCode(
620 621
    base::EnumSet<CodeFlushMode> code_flush_mode) {
  if (IsFlushingDisabled(code_flush_mode)) return false;
622

623 624
  // TODO(rmcilroy): Enable bytecode flushing for resumable functions.
  if (IsResumableFunction(kind()) || !allows_lazy_compilation()) {
625 626 627 628 629 630
    return false;
  }

  // Get a snapshot of the function data field, and if it is a bytecode array,
  // check if it is old. Note, this is done this way since this function can be
  // called by the concurrent marker.
631
  Object data = function_data(kAcquireLoad);
632
  if (data.IsBaselineData()) {
633 634
    // If baseline code flushing isn't enabled and we have baseline data on SFI
    // we cannot flush baseline / bytecode.
635
    if (!IsBaselineCodeFlushingEnabled(code_flush_mode)) return false;
636 637
    data =
        ACQUIRE_READ_FIELD(BaselineData::cast(data), BaselineData::kDataOffset);
638 639 640 641
  } else if (!IsByteCodeFlushingEnabled(code_flush_mode)) {
    // If bytecode flushing isn't enabled and there is no baseline code there is
    // nothing to flush.
    return false;
642
  }
643
  if (!data.IsBytecodeArray()) return false;
644

645
  if (IsStressFlushingEnabled(code_flush_mode)) return true;
646 647 648

  BytecodeArray bytecode = BytecodeArray::cast(data);

649
  return bytecode.IsOld();
650 651
}

652
Code SharedFunctionInfo::InterpreterTrampoline() const {
653
  DCHECK(HasInterpreterData());
654
  return interpreter_data().interpreter_trampoline();
655 656 657
}

bool SharedFunctionInfo::HasInterpreterData() const {
658 659 660
  Object data = function_data(kAcquireLoad);
  if (data.IsBaselineData()) data = BaselineData::cast(data).data();
  return data.IsInterpreterData();
661 662
}

663
InterpreterData SharedFunctionInfo::interpreter_data() const {
664
  DCHECK(HasInterpreterData());
665 666 667
  Object data = function_data(kAcquireLoad);
  if (data.IsBaselineData()) data = BaselineData::cast(data).data();
  return InterpreterData::cast(data);
668 669 670
}

void SharedFunctionInfo::set_interpreter_data(
671
    InterpreterData interpreter_data) {
672
  DCHECK(FLAG_interpreted_frames_native_stack);
673
  DCHECK(!HasBaselineData());
674
  set_function_data(interpreter_data, kReleaseStore);
675 676
}

677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694
bool SharedFunctionInfo::HasBaselineData() const {
  return function_data(kAcquireLoad).IsBaselineData();
}

BaselineData SharedFunctionInfo::baseline_data() const {
  DCHECK(HasBaselineData());
  return BaselineData::cast(function_data(kAcquireLoad));
}

void SharedFunctionInfo::set_baseline_data(BaselineData baseline_data) {
  set_function_data(baseline_data, kReleaseStore);
}

void SharedFunctionInfo::flush_baseline_data() {
  DCHECK(HasBaselineData());
  set_function_data(baseline_data().data(), kReleaseStore);
}

695
#if V8_ENABLE_WEBASSEMBLY
696
bool SharedFunctionInfo::HasAsmWasmData() const {
697
  return function_data(kAcquireLoad).IsAsmWasmData();
698 699
}

700 701 702 703 704 705 706 707 708 709 710 711
bool SharedFunctionInfo::HasWasmExportedFunctionData() const {
  return function_data(kAcquireLoad).IsWasmExportedFunctionData();
}

bool SharedFunctionInfo::HasWasmJSFunctionData() const {
  return function_data(kAcquireLoad).IsWasmJSFunctionData();
}

bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
  return function_data(kAcquireLoad).IsWasmCapiFunctionData();
}

712
AsmWasmData SharedFunctionInfo::asm_wasm_data() const {
713
  DCHECK(HasAsmWasmData());
714
  return AsmWasmData::cast(function_data(kAcquireLoad));
715 716
}

717
void SharedFunctionInfo::set_asm_wasm_data(AsmWasmData data) {
718
  DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
719
         HasUncompiledData() || HasAsmWasmData());
720
  set_function_data(data, kReleaseStore);
721 722
}

723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739
const wasm::WasmModule* SharedFunctionInfo::wasm_module() const {
  if (!HasWasmExportedFunctionData()) return nullptr;
  const WasmExportedFunctionData& function_data = wasm_exported_function_data();
  const WasmInstanceObject& wasm_instance = function_data.instance();
  const WasmModuleObject& wasm_module_object = wasm_instance.module_object();
  return wasm_module_object.module();
}

const wasm::FunctionSig* SharedFunctionInfo::wasm_function_signature() const {
  const wasm::WasmModule* module = wasm_module();
  if (!module) return nullptr;
  const WasmExportedFunctionData& function_data = wasm_exported_function_data();
  DCHECK_LT(function_data.function_index(), module->functions.size());
  return module->functions[function_data.function_index()].sig;
}
#endif  // V8_ENABLE_WEBASSEMBLY

740
bool SharedFunctionInfo::HasBuiltinId() const {
741
  return function_data(kAcquireLoad).IsSmi();
742 743
}

744
Builtin SharedFunctionInfo::builtin_id() const {
745
  DCHECK(HasBuiltinId());
746
  int id = Smi::ToInt(function_data(kAcquireLoad));
747
  DCHECK(Builtins::IsBuiltinId(id));
748
  return Builtins::FromInt(id);
749 750
}

751 752 753
void SharedFunctionInfo::set_builtin_id(Builtin builtin) {
  DCHECK(Builtins::IsBuiltinId(builtin));
  set_function_data(Smi::FromInt(static_cast<int>(builtin)), kReleaseStore,
754
                    SKIP_WRITE_BARRIER);
755 756
}

757
bool SharedFunctionInfo::HasUncompiledData() const {
758
  return function_data(kAcquireLoad).IsUncompiledData();
759 760
}

761
UncompiledData SharedFunctionInfo::uncompiled_data() const {
762
  DCHECK(HasUncompiledData());
763
  return UncompiledData::cast(function_data(kAcquireLoad));
764 765
}

766
void SharedFunctionInfo::set_uncompiled_data(UncompiledData uncompiled_data) {
767
  DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
768
         HasUncompiledData());
769
  DCHECK(uncompiled_data.IsUncompiledData());
770
  set_function_data(uncompiled_data, kReleaseStore);
771 772
}

773
bool SharedFunctionInfo::HasUncompiledDataWithPreparseData() const {
774
  return function_data(kAcquireLoad).IsUncompiledDataWithPreparseData();
775 776
}

777 778 779
UncompiledDataWithPreparseData
SharedFunctionInfo::uncompiled_data_with_preparse_data() const {
  DCHECK(HasUncompiledDataWithPreparseData());
780
  return UncompiledDataWithPreparseData::cast(function_data(kAcquireLoad));
781 782
}

783 784
void SharedFunctionInfo::set_uncompiled_data_with_preparse_data(
    UncompiledDataWithPreparseData uncompiled_data_with_preparse_data) {
785
  DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy));
786
  DCHECK(uncompiled_data_with_preparse_data.IsUncompiledDataWithPreparseData());
787
  set_function_data(uncompiled_data_with_preparse_data, kReleaseStore);
788 789
}

790
bool SharedFunctionInfo::HasUncompiledDataWithoutPreparseData() const {
791
  return function_data(kAcquireLoad).IsUncompiledDataWithoutPreparseData();
792 793
}

794 795 796
void SharedFunctionInfo::ClearPreparseData() {
  DCHECK(HasUncompiledDataWithPreparseData());
  UncompiledDataWithPreparseData data = uncompiled_data_with_preparse_data();
797 798 799

  // Trim off the pre-parsed scope data from the uncompiled data by swapping the
  // map, leaving only an uncompiled data without pre-parsed scope.
800
  DisallowGarbageCollection no_gc;
801
  Heap* heap = GetHeapFromWritableObject(data);
802 803

  // Swap the map.
804
  heap->NotifyObjectLayoutChange(data, no_gc);
805 806 807
  STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize <
                UncompiledDataWithPreparseData::kSize);
  STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==
808
                UncompiledData::kHeaderSize);
809 810
  data.set_map(GetReadOnlyRoots().uncompiled_data_without_preparse_data_map(),
               kReleaseStore);
811 812 813

  // Fill the remaining space with filler.
  heap->CreateFillerObjectAt(
814
      data.address() + UncompiledDataWithoutPreparseData::kSize,
815
      UncompiledDataWithPreparseData::kSize -
816
          UncompiledDataWithoutPreparseData::kSize,
817
      ClearRecordedSlots::kYes);
818 819

  // Ensure that the clear was successful.
820
  DCHECK(HasUncompiledDataWithoutPreparseData());
821 822
}

823
void UncompiledData::InitAfterBytecodeFlush(
824
    String inferred_name, int start_position, int end_position,
825
    std::function<void(HeapObject object, ObjectSlot slot, HeapObject target)>
826
        gc_notify_updated_slot) {
827 828 829 830 831
  set_inferred_name(inferred_name);
  gc_notify_updated_slot(*this, RawField(UncompiledData::kInferredNameOffset),
                         inferred_name);
  set_start_position(start_position);
  set_end_position(end_position);
832 833
}

834
HeapObject SharedFunctionInfo::script() const {
835
  HeapObject maybe_script = script_or_debug_info(kAcquireLoad);
836 837
  if (maybe_script.IsDebugInfo()) {
    return DebugInfo::cast(maybe_script).script();
838 839 840 841
  }
  return maybe_script;
}

842
void SharedFunctionInfo::set_script(HeapObject script) {
843
  HeapObject maybe_debug_info = script_or_debug_info(kAcquireLoad);
844 845
  if (maybe_debug_info.IsDebugInfo()) {
    DebugInfo::cast(maybe_debug_info).set_script(script);
846
  } else {
847
    set_script_or_debug_info(script, kReleaseStore);
848 849 850
  }
}

Simon Zünd's avatar
Simon Zünd committed
851 852 853 854
bool SharedFunctionInfo::is_repl_mode() const {
  return script().IsScript() && Script::cast(script()).is_repl_mode();
}

855
bool SharedFunctionInfo::HasDebugInfo() const {
856
  return script_or_debug_info(kAcquireLoad).IsDebugInfo();
857 858
}

859
DebugInfo SharedFunctionInfo::GetDebugInfo() const {
860 861 862
  auto debug_info = script_or_debug_info(kAcquireLoad);
  DCHECK(debug_info.IsDebugInfo());
  return DebugInfo::cast(debug_info);
863 864
}

865
void SharedFunctionInfo::SetDebugInfo(DebugInfo debug_info) {
866
  DCHECK(!HasDebugInfo());
867 868
  DCHECK_EQ(debug_info.script(), script_or_debug_info(kAcquireLoad));
  set_script_or_debug_info(debug_info, kReleaseStore);
869 870
}

871
bool SharedFunctionInfo::HasInferredName() {
872
  Object scope_info = name_or_scope_info(kAcquireLoad);
873 874
  if (scope_info.IsScopeInfo()) {
    return ScopeInfo::cast(scope_info).HasInferredFunctionName();
875 876
  }
  return HasUncompiledData();
877 878
}

879
String SharedFunctionInfo::inferred_name() {
880
  Object maybe_scope_info = name_or_scope_info(kAcquireLoad);
881
  if (maybe_scope_info.IsScopeInfo()) {
882
    ScopeInfo scope_info = ScopeInfo::cast(maybe_scope_info);
883 884 885
    if (scope_info.HasInferredFunctionName()) {
      Object name = scope_info.InferredFunctionName();
      if (name.IsString()) return String::cast(name);
886 887
    }
  } else if (HasUncompiledData()) {
888
    return uncompiled_data().inferred_name();
889
  }
890
  return GetReadOnlyRoots().empty_string();
891 892
}

893
bool SharedFunctionInfo::IsUserJavaScript() const {
894
  Object script_obj = script();
895
  if (script_obj.IsUndefined()) return false;
896
  Script script = Script::cast(script_obj);
897
  return script.IsUserJavaScript();
898 899
}

900
bool SharedFunctionInfo::IsSubjectToDebugging() const {
901 902 903 904
#if V8_ENABLE_WEBASSEMBLY
  if (HasAsmWasmData()) return false;
#endif  // V8_ENABLE_WEBASSEMBLY
  return IsUserJavaScript();
905 906
}

907
bool SharedFunctionInfo::CanDiscardCompiled() const {
908 909 910 911 912
#if V8_ENABLE_WEBASSEMBLY
  if (HasAsmWasmData()) return true;
#endif  // V8_ENABLE_WEBASSEMBLY
  return HasBytecodeArray() || HasUncompiledDataWithPreparseData() ||
         HasBaselineData();
913 914
}

915
bool SharedFunctionInfo::is_class_constructor() const {
916
  return IsClassConstructorBit::decode(flags(kRelaxedLoad));
917 918 919 920
}

void SharedFunctionInfo::set_are_properties_final(bool value) {
  if (is_class_constructor()) {
921
    set_properties_are_final(value);
922 923 924 925
  }
}

bool SharedFunctionInfo::are_properties_final() const {
926
  bool bit = properties_are_final();
927 928 929
  return bit && is_class_constructor();
}

930 931 932
}  // namespace internal
}  // namespace v8

933
#include "src/base/platform/wrappers.h"
934 935 936
#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_