implementation-visitor.h 31.9 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_TORQUE_IMPLEMENTATION_VISITOR_H_
#define V8_TORQUE_IMPLEMENTATION_VISITOR_H_

8
#include <memory>
9 10
#include <string>

11
#include "src/base/macros.h"
12
#include "src/torque/ast.h"
13
#include "src/torque/cfg.h"
14
#include "src/torque/cpp-builder.h"
15
#include "src/torque/declarations.h"
16
#include "src/torque/global-context.h"
17
#include "src/torque/type-oracle.h"
18 19 20 21 22 23 24
#include "src/torque/types.h"
#include "src/torque/utils.h"

namespace v8 {
namespace internal {
namespace torque {

25 26
template <typename T>
class Binding;
27
class LocalValue;
28
class ImplementationVisitor;
29

30 31 32 33 34 35 36 37
// LocationReference is the representation of an l-value, so a value that might
// allow for assignment. For uniformity, this class can also represent
// unassignable temporaries. Assignable values fall in two categories:
//   - stack ranges that represent mutable variables, including structs.
//   - field or element access expressions that generate operator calls.
class LocationReference {
 public:
  // An assignable stack range.
38 39 40
  static LocationReference VariableAccess(
      VisitResult variable,
      base::Optional<Binding<LocalValue>*> binding = base::nullopt) {
41 42 43
    DCHECK(variable.IsOnStack());
    LocationReference result;
    result.variable_ = std::move(variable);
44
    result.binding_ = binding;
45 46 47 48 49 50 51 52 53 54
    return result;
  }
  // An unassignable value. {description} is only used for error messages.
  static LocationReference Temporary(VisitResult temporary,
                                     std::string description) {
    LocationReference result;
    result.temporary_ = std::move(temporary);
    result.temporary_description_ = std::move(description);
    return result;
  }
55 56 57 58
  // A heap reference, that is, a tagged value and an offset to encode an inner
  // pointer.
  static LocationReference HeapReference(VisitResult heap_reference) {
    LocationReference result;
59
    DCHECK(TypeOracle::MatchReferenceGeneric(heap_reference.type()));
60 61 62
    result.heap_reference_ = std::move(heap_reference);
    return result;
  }
63 64 65 66
  // A reference to an array on the heap. That is, a tagged value, an offset to
  // encode an inner pointer, and the number of elements.
  static LocationReference HeapSlice(VisitResult heap_slice) {
    LocationReference result;
67
    DCHECK(Type::MatchUnaryGeneric(heap_slice.type(),
68 69 70
                                   TypeOracle::GetConstSliceGeneric()) ||
           Type::MatchUnaryGeneric(heap_slice.type(),
                                   TypeOracle::GetMutableSliceGeneric()));
71 72 73
    result.heap_slice_ = std::move(heap_slice);
    return result;
  }
74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
  static LocationReference ArrayAccess(VisitResult base, VisitResult offset) {
    LocationReference result;
    result.eval_function_ = std::string{"[]"};
    result.assign_function_ = std::string{"[]="};
    result.call_arguments_ = {base, offset};
    return result;
  }
  static LocationReference FieldAccess(VisitResult object,
                                       std::string fieldname) {
    LocationReference result;
    result.eval_function_ = "." + fieldname;
    result.assign_function_ = "." + fieldname + "=";
    result.call_arguments_ = {object};
    return result;
  }
89 90 91 92 93 94 95
  static LocationReference BitFieldAccess(const LocationReference& object,
                                          BitField field) {
    LocationReference result;
    result.bit_field_struct_ = std::make_shared<LocationReference>(object);
    result.bit_field_ = std::move(field);
    return result;
  }
96

97 98 99 100 101 102 103 104 105 106 107
  bool IsConst() const {
    if (IsHeapReference()) {
      bool is_const;
      bool success =
          TypeOracle::MatchReferenceGeneric(heap_reference().type(), &is_const)
              .has_value();
      CHECK(success);
      return is_const;
    }
    return IsTemporary();
  }
108 109 110 111 112 113 114 115 116 117 118

  bool IsVariableAccess() const { return variable_.has_value(); }
  const VisitResult& variable() const {
    DCHECK(IsVariableAccess());
    return *variable_;
  }
  bool IsTemporary() const { return temporary_.has_value(); }
  const VisitResult& temporary() const {
    DCHECK(IsTemporary());
    return *temporary_;
  }
119 120 121 122 123
  bool IsHeapReference() const { return heap_reference_.has_value(); }
  const VisitResult& heap_reference() const {
    DCHECK(IsHeapReference());
    return *heap_reference_;
  }
124 125 126 127 128
  bool IsHeapSlice() const { return heap_slice_.has_value(); }
  const VisitResult& heap_slice() const {
    DCHECK(IsHeapSlice());
    return *heap_slice_;
  }
129 130 131 132 133 134 135 136 137 138 139 140 141
  bool IsBitFieldAccess() const {
    bool is_bitfield_access = bit_field_struct_ != nullptr;
    DCHECK_EQ(is_bitfield_access, bit_field_.has_value());
    return is_bitfield_access;
  }
  const LocationReference& bit_field_struct_location() const {
    DCHECK(IsBitFieldAccess());
    return *bit_field_struct_;
  }
  const BitField& bit_field() const {
    DCHECK(IsBitFieldAccess());
    return *bit_field_;
  }
142

143
  base::Optional<const Type*> ReferencedType() const {
144
    if (IsHeapReference()) {
145
      return *TypeOracle::MatchReferenceGeneric(heap_reference().type());
146 147
    }
    if (IsHeapSlice()) {
148 149 150 151 152 153
      if (auto type = Type::MatchUnaryGeneric(
              heap_slice().type(), TypeOracle::GetMutableSliceGeneric())) {
        return *type;
      }
      return Type::MatchUnaryGeneric(heap_slice().type(),
                                     TypeOracle::GetConstSliceGeneric());
154
    }
155 156 157
    if (IsBitFieldAccess()) {
      return bit_field_->name_and_type.type;
    }
158 159 160 161
    if (IsVariableAccess() || IsHeapSlice() || IsTemporary()) {
      return GetVisitResult().type();
    }
    return base::nullopt;
162
  }
163 164 165

  const VisitResult& GetVisitResult() const {
    if (IsVariableAccess()) return variable();
166
    if (IsHeapSlice()) return heap_slice();
167 168 169 170
    DCHECK(IsTemporary());
    return temporary();
  }

171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
  // For error reporting.
  const std::string& temporary_description() const {
    DCHECK(IsTemporary());
    return *temporary_description_;
  }

  bool IsCallAccess() const {
    bool is_call_access = eval_function_.has_value();
    DCHECK_EQ(is_call_access, assign_function_.has_value());
    return is_call_access;
  }
  const VisitResultVector& call_arguments() const {
    DCHECK(IsCallAccess());
    return call_arguments_;
  }
  const std::string& eval_function() const {
    DCHECK(IsCallAccess());
    return *eval_function_;
  }
  const std::string& assign_function() const {
    DCHECK(IsCallAccess());
    return *assign_function_;
  }
194 195 196 197
  base::Optional<Binding<LocalValue>*> binding() const {
    DCHECK(IsVariableAccess());
    return binding_;
  }
198 199 200 201 202

 private:
  base::Optional<VisitResult> variable_;
  base::Optional<VisitResult> temporary_;
  base::Optional<std::string> temporary_description_;
203
  base::Optional<VisitResult> heap_reference_;
204
  base::Optional<VisitResult> heap_slice_;
205 206 207
  base::Optional<std::string> eval_function_;
  base::Optional<std::string> assign_function_;
  VisitResultVector call_arguments_;
208
  base::Optional<Binding<LocalValue>*> binding_;
209

210 211 212 213 214 215 216
  // The location of the bitfield struct that contains this bitfield, if this
  // reference is a bitfield access. Uses a shared_ptr so that LocationReference
  // is copyable, allowing us to set this field equal to a copy of a
  // stack-allocated LocationReference.
  std::shared_ptr<const LocationReference> bit_field_struct_;
  base::Optional<BitField> bit_field_;

217
  LocationReference() = default;
218 219
};

220
struct InitializerResults {
221
  std::vector<Identifier*> names;
222
  std::map<std::string, VisitResult> field_value_map;
223 224 225
};

struct LayoutForInitialization {
226
  std::map<std::string, VisitResult> array_lengths;
227 228
  std::map<std::string, VisitResult> offsets;
  VisitResult size;
229 230
};

231 232
extern uint64_t next_unique_binding_index;

233 234 235 236 237 238 239
template <class T>
class Binding;

template <class T>
class BindingsManager {
 public:
  base::Optional<Binding<T>*> TryLookup(const std::string& name) {
240
    if (StartsWithSingleUnderscore(name)) {
241 242 243 244 245 246 247 248
      Error("Trying to reference '", name, "' which is marked as unused.")
          .Throw();
    }
    auto binding = current_bindings_[name];
    if (binding) {
      (*binding)->SetUsed();
    }
    return binding;
249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264
  }

 private:
  friend class Binding<T>;
  std::unordered_map<std::string, base::Optional<Binding<T>*>>
      current_bindings_;
};

template <class T>
class Binding : public T {
 public:
  template <class... Args>
  Binding(BindingsManager<T>* manager, const std::string& name, Args&&... args)
      : T(std::forward<Args>(args)...),
        manager_(manager),
        name_(name),
265
        previous_binding_(this),
266
        used_(false),
267 268
        written_(false),
        unique_index_(next_unique_binding_index++) {
269 270
    std::swap(previous_binding_, manager_->current_bindings_[name]);
  }
271 272 273 274 275
  template <class... Args>
  Binding(BindingsManager<T>* manager, const Identifier* name, Args&&... args)
      : Binding(manager, name->value, std::forward<Args>(args)...) {
    declaration_position_ = name->pos;
  }
276
  ~Binding() {
277
    if (!used_ && !SkipLintCheck()) {
278 279 280 281
      Lint(BindingTypeString(), "'", name_,
           "' is never used. Prefix with '_' if this is intentional.")
          .Position(declaration_position_);
    }
282 283 284 285 286 287 288 289

    if (CheckWritten() && !written_ && !SkipLintCheck()) {
      Lint(BindingTypeString(), "'", name_,
           "' is never assigned to. Use 'const' instead of 'let'.")
          .Position(declaration_position_);
    }

    manager_->current_bindings_[name_] = previous_binding_;
290
  }
291 292
  Binding(const Binding&) = delete;
  Binding& operator=(const Binding&) = delete;
293

294 295
  std::string BindingTypeString() const;
  bool CheckWritten() const;
296 297 298 299

  const std::string& name() const { return name_; }
  SourcePosition declaration_position() const { return declaration_position_; }

300 301 302
  bool Used() const { return used_; }
  void SetUsed() { used_ = true; }

303 304 305
  bool Written() const { return written_; }
  void SetWritten() { written_ = true; }

306 307
  uint64_t unique_index() const { return unique_index_; }

308
 private:
309 310
  bool SkipLintCheck() const { return name_.length() > 0 && name_[0] == '_'; }

311 312 313 314
  BindingsManager<T>* manager_;
  const std::string name_;
  base::Optional<Binding*> previous_binding_;
  SourcePosition declaration_position_ = CurrentSourcePosition::Get();
315
  bool used_;
316
  bool written_;
317
  uint64_t unique_index_;
318 319 320 321 322 323
};

template <class T>
class BlockBindings {
 public:
  explicit BlockBindings(BindingsManager<T>* manager) : manager_(manager) {}
324
  Binding<T>* Add(std::string name, T value, bool mark_as_used = false) {
325
    ReportErrorIfAlreadyBound(name);
326
    auto binding =
327
        std::make_unique<Binding<T>>(manager_, name, std::move(value));
328
    Binding<T>* result = binding.get();
329 330
    if (mark_as_used) binding->SetUsed();
    bindings_.push_back(std::move(binding));
331
    return result;
332 333
  }

334
  Binding<T>* Add(const Identifier* name, T value, bool mark_as_used = false) {
335
    ReportErrorIfAlreadyBound(name->value);
336
    auto binding =
337
        std::make_unique<Binding<T>>(manager_, name, std::move(value));
338
    Binding<T>* result = binding.get();
339 340
    if (mark_as_used) binding->SetUsed();
    bindings_.push_back(std::move(binding));
341
    return result;
342 343
  }

344 345 346 347 348 349 350 351 352 353
  std::vector<Binding<T>*> bindings() const {
    std::vector<Binding<T>*> result;
    result.reserve(bindings_.size());
    for (auto& b : bindings_) {
      result.push_back(b.get());
    }
    return result;
  }

 private:
354 355 356 357 358 359 360 361 362 363 364
  void ReportErrorIfAlreadyBound(const std::string& name) {
    for (const auto& binding : bindings_) {
      if (binding->name() == name) {
        ReportError(
            "redeclaration of name \"", name,
            "\" in the same block is illegal, previous declaration at: ",
            binding->declaration_position());
      }
    }
  }

365 366 367 368
  BindingsManager<T>* manager_;
  std::vector<std::unique_ptr<Binding<T>>> bindings_;
};

369 370 371 372 373 374
class LocalValue {
 public:
  explicit LocalValue(LocationReference reference)
      : value(std::move(reference)) {}
  explicit LocalValue(std::string inaccessible_explanation)
      : inaccessible_explanation(std::move(inaccessible_explanation)) {}
375 376
  explicit LocalValue(std::function<LocationReference()> lazy)
      : lazy(std::move(lazy)) {}
377 378 379 380 381 382 383 384 385

  LocationReference GetLocationReference(Binding<LocalValue>* binding) {
    if (value) {
      const LocationReference& ref = *value;
      if (ref.IsVariableAccess()) {
        // Attach the binding to enable the never-assigned-to lint check.
        return LocationReference::VariableAccess(ref.GetVisitResult(), binding);
      }
      return ref;
386 387
    } else if (lazy) {
      return (*lazy)();
388 389 390 391 392 393
    } else {
      Error("Cannot access ", binding->name(), ": ", inaccessible_explanation)
          .Throw();
    }
  }

394
  bool IsAccessibleNonLazy() const { return value.has_value(); }
395 396 397

 private:
  base::Optional<LocationReference> value;
398
  base::Optional<std::function<LocationReference()>> lazy;
399
  std::string inaccessible_explanation;
400 401 402 403 404 405 406 407 408 409 410
};

struct LocalLabel {
  Block* block;
  std::vector<const Type*> parameter_types;

  explicit LocalLabel(Block* block,
                      std::vector<const Type*> parameter_types = {})
      : block(block), parameter_types(std::move(parameter_types)) {}
};

411 412 413 414 415
template <>
inline std::string Binding<LocalValue>::BindingTypeString() const {
  return "Variable ";
}
template <>
416 417 418
inline bool Binding<LocalValue>::CheckWritten() const {
  // Do the check only for non-const variables and non struct types.
  auto binding = *manager_->current_bindings_[name_];
419
  if (!binding->IsAccessibleNonLazy()) return false;
420
  const LocationReference& ref = binding->GetLocationReference(binding);
421
  if (!ref.IsVariableAccess()) return false;
422
  return !ref.GetVisitResult().type()->StructSupertype();
423 424
}
template <>
425 426 427
inline std::string Binding<LocalLabel>::BindingTypeString() const {
  return "Label ";
}
428 429 430 431
template <>
inline bool Binding<LocalLabel>::CheckWritten() const {
  return false;
}
432

433 434 435 436 437
struct Arguments {
  VisitResultVector parameters;
  std::vector<Binding<LocalLabel>*> labels;
};

438
// Determine if a callable should be considered as an overload.
439
bool IsCompatibleSignature(const Signature& sig, const TypeVector& types,
440
                           size_t label_count);
441

442
class ImplementationVisitor {
443
 public:
444 445
  void GenerateBuiltinDefinitionsAndInterfaceDescriptors(
      const std::string& output_directory);
446
  void GenerateVisitorLists(const std::string& output_directory);
447
  void GenerateBitFields(const std::string& output_directory);
448
  void GeneratePrintDefinitions(const std::string& output_directory);
449
  void GenerateClassDefinitions(const std::string& output_directory);
450
  void GenerateBodyDescriptors(const std::string& output_directory);
451
  void GenerateInstanceTypes(const std::string& output_directory);
452
  void GenerateClassVerifiers(const std::string& output_directory);
453
  void GenerateEnumVerifiers(const std::string& output_directory);
454
  void GenerateClassDebugReaders(const std::string& output_directory);
455 456
  void GenerateExportedMacrosAssembler(const std::string& output_directory);
  void GenerateCSATypes(const std::string& output_directory);
457 458

  VisitResult Visit(Expression* expr);
459
  const Type* Visit(Statement* stmt);
460

461
  template <typename T>
462
  void CheckInitializersWellformed(
463
      const std::string& aggregate_name, const std::vector<T>& aggregate_fields,
464
      const std::vector<NameAndExpression>& initializers,
465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483
      bool ignore_first_field = false) {
    size_t fields_offset = ignore_first_field ? 1 : 0;
    size_t fields_size = aggregate_fields.size() - fields_offset;
    for (size_t i = 0; i < std::min(fields_size, initializers.size()); i++) {
      const std::string& field_name =
          aggregate_fields[i + fields_offset].name_and_type.name;
      Identifier* found_name = initializers[i].name;
      if (field_name != found_name->value) {
        Error("Expected field name \"", field_name, "\" instead of \"",
              found_name->value, "\"")
            .Position(found_name->pos)
            .Throw();
      }
    }
    if (fields_size != initializers.size()) {
      ReportError("expected ", fields_size, " initializers for ",
                  aggregate_name, " found ", initializers.size());
    }
  }
484

485
  InitializerResults VisitInitializerResults(
486
      const ClassType* class_type,
487
      const std::vector<NameAndExpression>& expressions);
488 489 490
  LocationReference GenerateFieldReference(
      VisitResult object, const Field& field, const ClassType* class_type,
      bool treat_optional_as_indexed = false);
491
  LocationReference GenerateFieldReferenceForInit(
492 493
      VisitResult object, const Field& field,
      const LayoutForInitialization& layout);
494 495
  VisitResult GenerateArrayLength(
      Expression* array_length, Namespace* nspace,
496
      const std::map<std::string, LocalValue>& bindings);
497 498 499 500
  VisitResult GenerateArrayLength(VisitResult object, const Field& field);
  VisitResult GenerateArrayLength(const ClassType* class_type,
                                  const InitializerResults& initializer_results,
                                  const Field& field);
501 502 503
  LayoutForInitialization GenerateLayoutForInitialization(
      const ClassType* class_type,
      const InitializerResults& initializer_results);
504

505
  void InitializeClass(const ClassType* class_type, VisitResult allocate_result,
506 507
                       const InitializerResults& initializer_results,
                       const LayoutForInitialization& layout);
508

509 510
  VisitResult Visit(StructExpression* decl);

511
  LocationReference GetLocationReference(Expression* location);
512
  LocationReference LookupLocalValue(const std::string& name);
513
  LocationReference GetLocationReference(IdentifierExpression* expr);
514
  LocationReference GetLocationReference(DereferenceExpression* expr);
515
  LocationReference GetLocationReference(FieldAccessExpression* expr);
516 517
  LocationReference GenerateFieldAccess(
      LocationReference reference, const std::string& fieldname,
518
      bool ignore_stuct_field_constness = false,
519
      base::Optional<SourcePosition> pos = {});
520
  LocationReference GetLocationReference(ElementAccessExpression* expr);
521 522
  LocationReference GenerateReferenceToItemInHeapSlice(LocationReference slice,
                                                       VisitResult index);
523

524
  VisitResult GenerateFetchFromLocation(const LocationReference& reference);
525

526
  VisitResult GetBuiltinCode(Builtin* builtin);
527

528
  VisitResult Visit(LocationExpression* expr);
529
  VisitResult Visit(FieldAccessExpression* expr);
530

531
  void VisitAllDeclarables();
532
  void Visit(Declarable* delarable, base::Optional<SourceId> file = {});
533
  void Visit(TypeAlias* decl);
534
  VisitResult InlineMacro(Macro* macro,
535
                          base::Optional<LocationReference> this_reference,
536 537
                          const std::vector<VisitResult>& arguments,
                          const std::vector<Block*> label_blocks);
538
  void VisitMacroCommon(Macro* macro);
539 540
  void Visit(ExternMacro* macro) {}
  void Visit(TorqueMacro* macro);
541
  void Visit(Method* macro);
542
  void Visit(Builtin* builtin);
543
  void Visit(NamespaceConstant* decl);
544 545

  VisitResult Visit(CallExpression* expr, bool is_tail = false);
546
  VisitResult Visit(CallMethodExpression* expr);
547
  VisitResult Visit(IntrinsicCallExpression* intrinsic);
548
  const Type* Visit(TailCallStatement* stmt);
549 550 551 552 553 554 555 556 557

  VisitResult Visit(ConditionalExpression* expr);

  VisitResult Visit(LogicalOrExpression* expr);
  VisitResult Visit(LogicalAndExpression* expr);

  VisitResult Visit(IncrementDecrementExpression* expr);
  VisitResult Visit(AssignmentExpression* expr);
  VisitResult Visit(StringLiteralExpression* expr);
558 559
  VisitResult Visit(FloatingPointLiteralExpression* expr);
  VisitResult Visit(IntegerLiteralExpression* expr);
560
  VisitResult Visit(AssumeTypeImpossibleExpression* expr);
561 562
  VisitResult Visit(TryLabelExpression* expr);
  VisitResult Visit(StatementExpression* expr);
563
  VisitResult Visit(NewExpression* expr);
564
  VisitResult Visit(SpreadExpression* expr);
565

566 567 568 569 570 571 572 573
  const Type* Visit(ReturnStatement* stmt);
  const Type* Visit(GotoStatement* stmt);
  const Type* Visit(IfStatement* stmt);
  const Type* Visit(WhileStatement* stmt);
  const Type* Visit(BreakStatement* stmt);
  const Type* Visit(ContinueStatement* stmt);
  const Type* Visit(ForLoopStatement* stmt);
  const Type* Visit(VarDeclarationStatement* stmt);
574 575
  const Type* Visit(VarDeclarationStatement* stmt,
                    BlockBindings<LocalValue>* block_bindings);
576 577 578 579
  const Type* Visit(BlockStatement* block);
  const Type* Visit(ExpressionStatement* stmt);
  const Type* Visit(DebugStatement* stmt);
  const Type* Visit(AssertStatement* stmt);
580

581 582
  void BeginGeneratedFiles();
  void EndGeneratedFiles();
583 584
  void BeginDebugMacrosFile();
  void EndDebugMacrosFile();
585

586
  void GenerateImplementation(const std::string& dir);
587

588 589 590 591
  DECLARE_CONTEXTUAL_VARIABLE(ValueBindingsManager,
                              BindingsManager<LocalValue>);
  DECLARE_CONTEXTUAL_VARIABLE(LabelBindingsManager,
                              BindingsManager<LocalLabel>);
592
  DECLARE_CONTEXTUAL_VARIABLE(CurrentCallable, Callable*);
593 594
  DECLARE_CONTEXTUAL_VARIABLE(CurrentFileStreams,
                              GlobalContext::PerFileStreams*);
595
  DECLARE_CONTEXTUAL_VARIABLE(CurrentReturnValue, base::Optional<VisitResult>);
596 597 598 599 600 601 602 603 604

  // A BindingsManagersScope has to be active for local bindings to be created.
  // Shadowing an existing BindingsManagersScope by creating a new one hides all
  // existing bindings while the additional BindingsManagersScope is active.
  struct BindingsManagersScope {
    ValueBindingsManager::Scope value_bindings_manager;
    LabelBindingsManager::Scope label_bindings_manager;
  };

605 606
  void SetDryRun(bool is_dry_run) { is_dry_run_ = is_dry_run; }

607
 private:
608 609 610
  base::Optional<Block*> GetCatchBlock();
  void GenerateCatchBlock(base::Optional<Block*> catch_block);

611 612 613 614 615 616 617 618 619 620 621 622 623 624
  // {StackScope} records the stack height at creation time and reconstructs it
  // when being destructed by emitting a {DeleteRangeInstruction}, except for
  // the slots protected by {StackScope::Yield}. Calling {Yield(v)} deletes all
  // slots above the initial stack height except for the slots of {v}, which are
  // moved to form the only slots above the initial height and marks them to
  // survive destruction of the {StackScope}. A typical pattern is the
  // following:
  //
  // VisitResult result;
  // {
  //   StackScope stack_scope(this);
  //   // ... create temporary slots ...
  //   result = stack_scope.Yield(surviving_slots);
  // }
625
  class V8_NODISCARD StackScope {
626
   public:
627 628 629 630
    explicit StackScope(ImplementationVisitor* visitor) : visitor_(visitor) {
      base_ = visitor_->assembler().CurrentStack().AboveTop();
    }
    VisitResult Yield(VisitResult result) {
631 632
      DCHECK(!closed_);
      closed_ = true;
633 634 635 636 637 638 639 640 641 642 643 644 645 646 647
      if (!result.IsOnStack()) {
        if (!visitor_->assembler().CurrentBlockIsComplete()) {
          visitor_->assembler().DropTo(base_);
        }
        return result;
      }
      DCHECK_LE(base_, result.stack_range().begin());
      DCHECK_LE(result.stack_range().end(),
                visitor_->assembler().CurrentStack().AboveTop());
      visitor_->assembler().DropTo(result.stack_range().end());
      visitor_->assembler().DeleteRange(
          StackRange{base_, result.stack_range().begin()});
      base_ = visitor_->assembler().CurrentStack().AboveTop();
      return VisitResult(result.type(), visitor_->assembler().TopRange(
                                            result.stack_range().Size()));
648
    }
649

650 651 652 653 654 655 656 657
    void Close() {
      DCHECK(!closed_);
      closed_ = true;
      if (!visitor_->assembler().CurrentBlockIsComplete()) {
        visitor_->assembler().DropTo(base_);
      }
    }

658
    ~StackScope() {
659
      if (closed_) {
660 661 662
        DCHECK_IMPLIES(
            !visitor_->assembler().CurrentBlockIsComplete(),
            base_ == visitor_->assembler().CurrentStack().AboveTop());
663 664
      } else {
        Close();
665
      }
666 667 668 669
    }

   private:
    ImplementationVisitor* visitor_;
670
    BottomOffset base_;
671 672 673 674 675 676
    bool closed_ = false;
  };

  class BreakContinueActivator {
   public:
    BreakContinueActivator(Block* break_block, Block* continue_block)
677
        : break_binding_{&LabelBindingsManager::Get(), kBreakLabelName,
678
                         LocalLabel{break_block}},
679
          continue_binding_{&LabelBindingsManager::Get(), kContinueLabelName,
680 681 682 683 684
                            LocalLabel{continue_block}} {}

   private:
    Binding<LocalLabel> break_binding_;
    Binding<LocalLabel> continue_binding_;
685 686
  };

687 688 689 690 691
  base::Optional<Binding<LocalValue>*> TryLookupLocalValue(
      const std::string& name);
  base::Optional<Binding<LocalLabel>*> TryLookupLabel(const std::string& name);
  Binding<LocalLabel>* LookupLabel(const std::string& name);
  Block* LookupSimpleLabel(const std::string& name);
692
  template <class Container>
693 694 695 696
  Callable* LookupCallable(const QualifiedName& name,
                           const Container& declaration_container,
                           const TypeVector& types,
                           const std::vector<Binding<LocalLabel>*>& labels,
697 698 699 700
                           const TypeVector& specialization_types,
                           bool silence_errors = false);
  bool TestLookupCallable(const QualifiedName& name,
                          const TypeVector& parameter_types);
701 702 703 704 705 706 707

  template <class Container>
  Callable* LookupCallable(const QualifiedName& name,
                           const Container& declaration_container,
                           const Arguments& arguments,
                           const TypeVector& specialization_types);

708 709
  Method* LookupMethod(const std::string& name,
                       const AggregateType* receiver_type,
710
                       const Arguments& arguments,
711
                       const TypeVector& specialization_types);
712

713 714 715 716
  TypeArgumentInference InferSpecializationTypes(
      GenericCallable* generic, const TypeVector& explicit_specialization_types,
      const TypeVector& explicit_arguments);

717
  const Type* GetCommonType(const Type* left, const Type* right);
718 719 720

  VisitResult GenerateCopy(const VisitResult& to_copy);

721
  void GenerateAssignToLocation(const LocationReference& reference,
722
                                const VisitResult& assignment_value);
723

724 725 726 727
  void AddCallParameter(Callable* callable, VisitResult parameter,
                        const Type* parameter_type,
                        std::vector<VisitResult>* converted_arguments,
                        StackRange* argument_range,
728 729
                        std::vector<std::string>* constexpr_arguments,
                        bool inline_macro);
730 731 732 733 734 735

  VisitResult GenerateCall(Callable* callable,
                           base::Optional<LocationReference> this_parameter,
                           Arguments parameters,
                           const TypeVector& specialization_types = {},
                           bool tail_call = false);
736
  VisitResult GenerateCall(const QualifiedName& callable_name,
737 738 739
                           Arguments parameters,
                           const TypeVector& specialization_types = {},
                           bool tail_call = false);
740 741 742 743 744 745
  VisitResult GenerateCall(std::string callable_name, Arguments parameters,
                           const TypeVector& specialization_types = {},
                           bool tail_call = false) {
    return GenerateCall(QualifiedName(std::move(callable_name)),
                        std::move(parameters), specialization_types, tail_call);
  }
746
  VisitResult GeneratePointerCall(Expression* callee,
747
                                  const Arguments& parameters, bool tail_call);
748

749 750
  void GenerateBranch(const VisitResult& condition, Block* true_block,
                      Block* false_block);
751

752 753
  VisitResult GenerateBoolConstant(bool constant);

754 755
  void GenerateExpressionBranch(Expression* expression, Block* true_block,
                                Block* false_block);
756

757 758 759 760 761 762
  cpp::Function GenerateMacroFunctionDeclaration(Macro* macro);

  cpp::Function GenerateFunction(
      cpp::Class* owner, const std::string& name, const Signature& signature,
      const NameVector& parameter_names, bool pass_code_assembler_state = true,
      std::vector<std::string>* generated_parameter_names = nullptr);
763

764
  VisitResult GenerateImplicitConvert(const Type* destination_type,
765 766
                                      VisitResult source);

767
  StackRange GenerateLabelGoto(LocalLabel* label,
768
                               base::Optional<StackRange> arguments = {});
769

770 771 772 773 774 775
  VisitResult GenerateSetBitField(const Type* bitfield_struct_type,
                                  const BitField& bitfield,
                                  VisitResult bitfield_struct,
                                  VisitResult value,
                                  bool starts_as_zero = false);

776
  std::vector<Binding<LocalLabel>*> LabelsFromIdentifiers(
777
      const std::vector<Identifier*>& names);
778

779 780 781
  StackRange LowerParameter(const Type* type, const std::string& parameter_name,
                            Stack<std::string>* lowered_parameters);

782 783 784
  void LowerLabelParameter(const Type* type, const std::string& parameter_name,
                           std::vector<std::string>* lowered_parameters);

785 786 787 788
  std::string ExternalLabelName(const std::string& label_name);
  std::string ExternalLabelParameterName(const std::string& label_name,
                                         size_t i);
  std::string ExternalParameterName(const std::string& name);
789

790
  std::ostream& csa_ccfile() {
791
    if (auto* streams = CurrentFileStreams::Get()) {
792 793 794 795 796 797 798 799 800 801
      switch (output_type_) {
        case OutputType::kCSA:
          return streams->csa_ccfile;
        case OutputType::kCC:
          return streams->class_definition_inline_headerfile_macro_definitions;
        case OutputType::kCCDebug:
          return debug_macros_cc_;
        default:
          UNREACHABLE();
      }
802
    }
803
    return null_stream_;
804
  }
805
  std::ostream& csa_headerfile() {
806
    if (auto* streams = CurrentFileStreams::Get()) {
807 808 809 810 811 812 813 814 815 816
      switch (output_type_) {
        case OutputType::kCSA:
          return streams->csa_headerfile;
        case OutputType::kCC:
          return streams->class_definition_inline_headerfile_macro_declarations;
        case OutputType::kCCDebug:
          return debug_macros_h_;
        default:
          UNREACHABLE();
      }
817 818 819 820
    }
    return null_stream_;
  }

821 822 823
  CfgAssembler& assembler() { return *assembler_; }

  void SetReturnValue(VisitResult return_value) {
824 825 826 827
    base::Optional<VisitResult>& current_return_value =
        CurrentReturnValue::Get();
    DCHECK_IMPLIES(current_return_value, *current_return_value == return_value);
    current_return_value = std::move(return_value);
828 829 830
  }

  VisitResult GetAndClearReturnValue() {
831 832
    VisitResult return_value = *CurrentReturnValue::Get();
    CurrentReturnValue::Get() = base::nullopt;
833 834 835
    return return_value;
  }

836 837 838 839 840
  void WriteFile(const std::string& file, const std::string& content) {
    if (is_dry_run_) return;
    ReplaceFileContentsIfDifferent(file, content);
  }

841 842 843 844 845 846 847 848 849 850 851 852 853 854 855
  const Identifier* TryGetSourceForBitfieldExpression(
      const Expression* expr) const {
    auto it = bitfield_expressions_.find(expr);
    if (it == bitfield_expressions_.end()) return nullptr;
    return it->second;
  }

  void PropagateBitfieldMark(const Expression* original,
                             const Expression* derived) {
    if (const Identifier* source =
            TryGetSourceForBitfieldExpression(original)) {
      bitfield_expressions_[derived] = source;
    }
  }

856 857
  class MacroInliningScope;

858
  base::Optional<CfgAssembler> assembler_;
859
  NullOStream null_stream_;
860
  bool is_dry_run_;
861 862 863 864 865 866 867 868

  // Just for allowing us to emit warnings. After visiting an Expression, if
  // that Expression is a bitfield load, plus an optional inversion or an
  // equality check with a constant, then that Expression will be present in
  // this map. The Identifier associated is the bitfield struct that contains
  // the value to load.
  std::unordered_map<const Expression*, const Identifier*>
      bitfield_expressions_;
869

870 871 872 873
  // For emitting warnings. Contains the current set of macros being inlined in
  // calls to InlineMacro.
  std::unordered_set<const Macro*> inlining_macros_;

874 875 876 877 878
  // The contents of the debug macros output files. These contain all Torque
  // macros that have been generated using the C++ backend with debug purpose.
  std::stringstream debug_macros_cc_;
  std::stringstream debug_macros_h_;

879
  OutputType output_type_ = OutputType::kCSA;
880 881
};

882 883
void ReportAllUnusedMacros();

884 885 886 887 888
}  // namespace torque
}  // namespace internal
}  // namespace v8

#endif  // V8_TORQUE_IMPLEMENTATION_VISITOR_H_