code-generator.h 13.9 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_COMPILER_CODE_GENERATOR_H_
#define V8_COMPILER_CODE_GENERATOR_H_

8
#include "src/base/optional.h"
9 10
#include "src/compiler/gap-resolver.h"
#include "src/compiler/instruction.h"
11
#include "src/compiler/osr.h"
12
#include "src/compiler/unwinding-info-writer.h"
13 14 15
#include "src/deoptimizer.h"
#include "src/macro-assembler.h"
#include "src/safepoint-table.h"
16
#include "src/source-position-table.h"
17
#include "src/trap-handler/trap-handler.h"
18 19 20

namespace v8 {
namespace internal {
21 22 23

class CompilationInfo;

24 25
namespace compiler {

26
// Forward declarations.
27
class DeoptimizationExit;
28
class FrameAccessState;
29
class Linkage;
30
class OutOfLineCode;
31

32 33 34 35 36 37 38 39
struct BranchInfo {
  FlagsCondition condition;
  Label* true_label;
  Label* false_label;
  bool fallthru;
};


40 41 42 43 44 45 46 47 48 49 50 51 52
class InstructionOperandIterator {
 public:
  InstructionOperandIterator(Instruction* instr, size_t pos)
      : instr_(instr), pos_(pos) {}

  Instruction* instruction() const { return instr_; }
  InstructionOperand* Advance() { return instr_->InputAt(pos_++); }

 private:
  Instruction* instr_;
  size_t pos_;
};

53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69
// Either a non-null Handle<Object> or a double.
class DeoptimizationLiteral {
 public:
  DeoptimizationLiteral() : object_(), number_(0) {}
  explicit DeoptimizationLiteral(Handle<Object> object)
      : object_(object), number_(0) {
    DCHECK(!object_.is_null());
  }
  explicit DeoptimizationLiteral(double number) : object_(), number_(number) {}

  Handle<Object> object() const { return object_; }

  bool operator==(const DeoptimizationLiteral& other) const {
    return object_.equals(other.object_) &&
           bit_cast<uint64_t>(number_) == bit_cast<uint64_t>(other.number_);
  }

70
  Handle<Object> Reify(Isolate* isolate) const;
71 72 73 74 75

 private:
  Handle<Object> object_;
  double number_;
};
76

77
// Generates native code for a sequence of instructions.
78
class CodeGenerator final : public GapResolver::Assembler {
79
 public:
80
  explicit CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage,
81
                         InstructionSequence* code, CompilationInfo* info,
82
                         base::Optional<OsrHelper> osr_helper,
83 84
                         int start_source_position,
                         JumpOptimizationInfo* jump_opt);
85

86 87 88
  // Generate native code. After calling AssembleCode, call FinalizeCode to
  // produce the actual code object. If an error occurs during either phase,
  // FinalizeCode returns a null handle.
89
  void AssembleCode();  // Does not need to run on main thread.
90
  Handle<Code> FinalizeCode();
91 92

  InstructionSequence* code() const { return code_; }
93
  FrameAccessState* frame_access_state() const { return frame_access_state_; }
94
  const Frame* frame() const { return frame_access_state_->frame(); }
95
  Isolate* isolate() const;
96
  Linkage* linkage() const { return linkage_; }
97

98
  Label* GetLabel(RpoNumber rpo) { return &labels_[rpo.ToSize()]; }
99

100 101 102
  SourcePosition start_source_position() const {
    return start_source_position_;
  }
103

104
  void AssembleSourcePosition(Instruction* instr);
105 106
  void AssembleSourcePosition(SourcePosition source_position);

107 108 109 110
  // Record a safepoint with the given pointer map.
  void RecordSafepoint(ReferenceMap* references, Safepoint::Kind kind,
                       int arguments, Safepoint::DeoptMode deopt_mode);

111
  Zone* zone() const { return zone_; }
112

113
 private:
114
  TurboAssembler* tasm() { return &tasm_; }
115 116
  GapResolver* resolver() { return &resolver_; }
  SafepointTableBuilder* safepoints() { return &safepoints_; }
117
  CompilationInfo* info() const { return info_; }
118
  OsrHelper* osr_helper() { return &(*osr_helper_); }
119

120 121 122
  // Create the FrameAccessState object. The Frame is immutable from here on.
  void CreateFrameAccessState(Frame* frame);

123 124 125
  // Architecture - specific frame finalization.
  void FinishFrame(Frame* frame);

126 127
  // Checks if {block} will appear directly after {current_block_} when
  // assembling code, in which case, a fall-through can be used.
128
  bool IsNextInAssemblyOrder(RpoNumber block) const;
129

130 131 132 133 134 135
  // Check if a heap object can be materialized by loading from a heap root,
  // which is cheaper on some platforms than materializing the actual heap
  // object constant.
  bool IsMaterializableFromRoot(Handle<HeapObject> object,
                                Heap::RootListIndex* index_return);

136 137
  enum CodeGenResult { kSuccess, kTooManyDeoptimizationBailouts };

138
  // Assemble instructions for the specified block.
139
  CodeGenResult AssembleBlock(const InstructionBlock* block);
140

141
  // Assemble code for the specified instruction.
142 143
  CodeGenResult AssembleInstruction(Instruction* instr,
                                    const InstructionBlock* block);
144
  void AssembleGaps(Instruction* instr);
145

146 147 148 149 150
  // Returns true if a instruction is a tail call that needs to adjust the stack
  // pointer before execution. The stack slot index to the empty slot above the
  // adjusted stack pointer is returned in |slot|.
  bool GetSlotAboveSPBeforeTailCall(Instruction* instr, int* slot);

151 152
  CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
                                        SourcePosition pos);
153

154 155 156 157
  // ===========================================================================
  // ============= Architecture-specific code generation methods. ==============
  // ===========================================================================

158
  CodeGenResult AssembleArchInstruction(Instruction* instr);
159
  void AssembleArchJump(RpoNumber target);
160
  void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
161 162 163 164

  // Generates special branch for deoptimization condition.
  void AssembleArchDeoptBranch(Instruction* instr, BranchInfo* branch);

165
  void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
166
  void AssembleArchTrap(Instruction* instr, FlagsCondition condition);
167 168
  void AssembleArchLookupSwitch(Instruction* instr);
  void AssembleArchTableSwitch(Instruction* instr);
169

170 171 172 173 174 175
  // When entering a code that is marked for deoptimization, rather continuing
  // with its execution, we jump to a lazy compiled code. We need to do this
  // because this code has already been deoptimized and needs to be unlinked
  // from the JS functions referring it.
  void BailoutIfDeoptimized();

176 177
  // Generates an architecture-specific, descriptor-specific prologue
  // to set up a stack frame.
178
  void AssembleConstructFrame();
179

180 181
  // Generates an architecture-specific, descriptor-specific return sequence
  // to tear down a stack frame.
182
  void AssembleReturn(InstructionOperand* pop);
183

184 185
  void AssembleDeconstructFrame();

186
  // Generates code to manipulate the stack in preparation for a tail call.
187
  void AssemblePrepareTailCall();
188

189 190 191 192
  // Generates code to pop current frame if it is an arguments adaptor frame.
  void AssemblePopArgumentsAdaptorFrame(Register args_reg, Register scratch1,
                                        Register scratch2, Register scratch3);

193 194
  enum PushTypeFlag {
    kImmediatePush = 0x1,
195 196 197
    kRegisterPush = 0x2,
    kStackSlotPush = 0x4,
    kScalarPush = kRegisterPush | kStackSlotPush
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225
  };

  typedef base::Flags<PushTypeFlag> PushTypeFlags;

  static bool IsValidPush(InstructionOperand source, PushTypeFlags push_type);

  // Generate a list moves from an instruction that are candidates to be turned
  // into push instructions on platforms that support them. In general, the list
  // of push candidates are moves to a set of contiguous destination
  // InstructionOperand locations on the stack that don't clobber values that
  // are needed for resolve the gap or use values generated by the gap,
  // i.e. moves that can be hoisted together before the actual gap and assembled
  // together.
  static void GetPushCompatibleMoves(Instruction* instr,
                                     PushTypeFlags push_type,
                                     ZoneVector<MoveOperands*>* pushes);

  // Called before a tail call |instr|'s gap moves are assembled and allows
  // gap-specific pre-processing, e.g. adjustment of the sp for tail calls that
  // need it before gap moves or conversion of certain gap moves into pushes.
  void AssembleTailCallBeforeGap(Instruction* instr,
                                 int first_unused_stack_slot);
  // Called after a tail call |instr|'s gap moves are assembled and allows
  // gap-specific post-processing, e.g. adjustment of the sp for tail calls that
  // need it after gap moves.
  void AssembleTailCallAfterGap(Instruction* instr,
                                int first_unused_stack_slot);

226 227
  void FinishCode();

228 229 230 231 232
  // ===========================================================================
  // ============== Architecture-specific gap resolver methods. ================
  // ===========================================================================

  // Interface used by the gap resolver to emit moves and swaps.
233
  void AssembleMove(InstructionOperand* source,
234
                    InstructionOperand* destination) final;
235
  void AssembleSwap(InstructionOperand* source,
236
                    InstructionOperand* destination) final;
237

238 239 240 241 242 243 244 245 246 247 248 249
  // ===========================================================================
  // =================== Jump table construction methods. ======================
  // ===========================================================================

  class JumpTable;
  // Adds a jump table that is emitted after the actual code.  Returns label
  // pointing to the beginning of the table.  {targets} is assumed to be static
  // or zone allocated.
  Label* AddJumpTable(Label** targets, size_t target_count);
  // Emits a jump table.
  void AssembleJumpTable(Label** targets, size_t target_count);

250
  // ===========================================================================
251 252 253 254
  // ================== Deoptimization table construction. =====================
  // ===========================================================================

  void RecordCallPosition(Instruction* instr);
255
  void PopulateDeoptimizationData(Handle<Code> code);
256
  int DefineDeoptimizationLiteral(DeoptimizationLiteral literal);
257 258
  DeoptimizationEntry const& GetDeoptimizationEntry(Instruction* instr,
                                                    size_t frame_state_offset);
259
  DeoptimizeKind GetDeoptimizationKind(int deoptimization_id) const;
260
  DeoptimizeReason GetDeoptimizationReason(int deoptimization_id) const;
261
  int BuildTranslation(Instruction* instr, int pc_offset,
262
                       size_t frame_state_offset,
263
                       OutputFrameStateCombine state_combine);
264
  void BuildTranslationForFrameStateDescriptor(
265 266 267
      FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
      Translation* translation, OutputFrameStateCombine state_combine);
  void TranslateStateValueDescriptor(StateValueDescriptor* desc,
268
                                     StateValueList* nested,
269 270 271 272 273 274
                                     Translation* translation,
                                     InstructionOperandIterator* iter);
  void TranslateFrameStateDescriptorOperands(FrameStateDescriptor* desc,
                                             InstructionOperandIterator* iter,
                                             OutputFrameStateCombine combine,
                                             Translation* translation);
275
  void AddTranslationForOperand(Translation* translation, Instruction* instr,
276
                                InstructionOperand* op, MachineType type);
277
  void MarkLazyDeoptSite();
278

279 280 281
  DeoptimizationExit* AddDeoptimizationExit(Instruction* instr,
                                            size_t frame_state_offset);

282
  // ===========================================================================
283

284
  class DeoptimizationState final : public ZoneObject {
285
   public:
286
    DeoptimizationState(BailoutId bailout_id, int translation_id, int pc_offset,
287
                        DeoptimizeKind kind, DeoptimizeReason reason)
288 289 290
        : bailout_id_(bailout_id),
          translation_id_(translation_id),
          pc_offset_(pc_offset),
291
          kind_(kind),
292
          reason_(reason) {}
293

294 295
    BailoutId bailout_id() const { return bailout_id_; }
    int translation_id() const { return translation_id_; }
296
    int pc_offset() const { return pc_offset_; }
297
    DeoptimizeKind kind() const { return kind_; }
298
    DeoptimizeReason reason() const { return reason_; }
299 300 301 302

   private:
    BailoutId bailout_id_;
    int translation_id_;
303
    int pc_offset_;
304
    DeoptimizeKind kind_;
305
    DeoptimizeReason reason_;
306 307
  };

308 309 310 311 312
  struct HandlerInfo {
    Label* handler;
    int pc_offset;
  };

313
  friend class OutOfLineCode;
314
  friend class CodeGeneratorTester;
315

316
  Zone* zone_;
317
  FrameAccessState* frame_access_state_;
318 319
  Linkage* const linkage_;
  InstructionSequence* const code_;
320
  UnwindingInfoWriter unwinding_info_writer_;
321
  CompilationInfo* const info_;
322
  Label* const labels_;
323
  Label return_label_;
324
  RpoNumber current_block_;
325
  SourcePosition start_source_position_;
326
  SourcePosition current_source_position_;
327
  TurboAssembler tasm_;
328 329
  GapResolver resolver_;
  SafepointTableBuilder safepoints_;
330
  ZoneVector<HandlerInfo> handlers_;
331
  ZoneDeque<DeoptimizationExit*> deoptimization_exits_;
332
  ZoneDeque<DeoptimizationState*> deoptimization_states_;
333
  ZoneDeque<DeoptimizationLiteral> deoptimization_literals_;
334
  size_t inlined_function_count_;
335
  TranslationBuffer translations_;
336
  int last_lazy_deopt_pc_;
337 338 339 340 341 342 343 344 345 346

  // kArchCallCFunction could be reached either:
  //   kArchCallCFunction;
  // or:
  //   kArchSaveCallerRegisters;
  //   kArchCallCFunction;
  //   kArchRestoreCallerRegisters;
  // The boolean is used to distinguish the two cases. In the latter case, we
  // also need to decide if FP registers need to be saved, which is controlled
  // by fp_mode_.
347
  bool caller_registers_saved_;
348 349
  SaveFPRegsMode fp_mode_;

350
  JumpTable* jump_tables_;
351
  OutOfLineCode* ools_;
352
  base::Optional<OsrHelper> osr_helper_;
353
  int osr_pc_offset_;
354
  int optimized_out_literal_id_;
355
  SourcePositionTableBuilder source_position_table_builder_;
356
  CodeGenResult result_;
357 358 359 360 361 362 363
};

}  // namespace compiler
}  // namespace internal
}  // namespace v8

#endif  // V8_COMPILER_CODE_GENERATOR_H