lithium-codegen-arm64.h 17.4 KB
Newer Older
1
// Copyright 2013 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
#define V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
7

8
#include "src/crankshaft/arm64/lithium-arm64.h"
9

10
#include "src/ast/scopes.h"
11 12
#include "src/crankshaft/arm64/lithium-gap-resolver-arm64.h"
#include "src/crankshaft/lithium-codegen.h"
13 14 15
#include "src/deoptimizer.h"
#include "src/safepoint-table.h"
#include "src/utils.h"
16 17 18 19 20 21 22 23 24 25 26 27 28

namespace v8 {
namespace internal {

// Forward declarations.
class LDeferredCode;
class SafepointGenerator;
class BranchGenerator;

class LCodeGen: public LCodeGenBase {
 public:
  LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
      : LCodeGenBase(chunk, assembler, info),
29
        jump_table_(4, info->zone()),
30 31 32 33 34
        scope_(info->scope()),
        deferred_(8, info->zone()),
        frame_is_built_(false),
        safepoints_(info->zone()),
        resolver_(this),
35 36
        expected_safepoint_kind_(Safepoint::kSimple),
        pushed_arguments_(0) {
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
    PopulateDeoptimizationLiteralsWithInlinedFunctions();
  }

  // Simple accessors.
  Scope* scope() const { return scope_; }

  int LookupDestination(int block_id) const {
    return chunk()->LookupDestination(block_id);
  }

  bool IsNextEmittedBlock(int block_id) const {
    return LookupDestination(block_id) == GetNextEmittedBlock();
  }

  bool NeedsEagerFrame() const {
52 53
    return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
           !info()->IsStub() || info()->requires_frame();
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
  }
  bool NeedsDeferredFrame() const {
    return !NeedsEagerFrame() && info()->is_deferred_calling();
  }

  LinkRegisterStatus GetLinkRegisterState() const {
    return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
  }

  // Try to generate code for the entire chunk, but it may fail if the
  // chunk contains constructs we cannot handle. Returns true if the
  // code generation attempt succeeded.
  bool GenerateCode();

  // Finish the code by setting stack height, safepoint, and bailout
  // information on it.
  void FinishCode(Handle<Code> code);

72
  enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
73 74 75 76
  // Support for converting LOperands to assembler types.
  Register ToRegister(LOperand* op) const;
  Register ToRegister32(LOperand* op) const;
  Operand ToOperand(LOperand* op);
77
  Operand ToOperand32(LOperand* op);
78 79 80
  enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
  MemOperand ToMemOperand(LOperand* op,
                          StackMode stack_mode = kCanUseStackPointer) const;
81 82
  Handle<Object> ToHandle(LConstantOperand* op) const;

83 84
  template <class LI>
  Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info);
85 86 87 88 89

  int JSShiftAmountFromLConstant(LOperand* constant) {
    return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
  }

90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
  // TODO(jbramley): Examine these helpers and check that they make sense.
  // IsInteger32Constant returns true for smi constants, for example.
  bool IsInteger32Constant(LConstantOperand* op) const;
  bool IsSmi(LConstantOperand* op) const;

  int32_t ToInteger32(LConstantOperand* op) const;
  Smi* ToSmi(LConstantOperand* op) const;
  double ToDouble(LConstantOperand* op) const;
  DoubleRegister ToDoubleRegister(LOperand* op) const;

  // Declare methods that deal with the individual node types.
#define DECLARE_DO(type) void Do##type(L##type* node);
  LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
#undef DECLARE_DO

 private:
  // Return a double scratch register which can be used locally
  // when generating code for a lithium instruction.
  DoubleRegister double_scratch() { return crankshaft_fp_scratch; }

  // Deferred code support.
  void DoDeferredNumberTagD(LNumberTagD* instr);
  void DoDeferredStackCheck(LStackCheck* instr);
113
  void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129
  void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
  void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
  void DoDeferredMathAbsTagged(LMathAbsTagged* instr,
                               Label* exit,
                               Label* allocation_entry);

  void DoDeferredNumberTagU(LInstruction* instr,
                            LOperand* value,
                            LOperand* temp1,
                            LOperand* temp2);
  void DoDeferredTaggedToI(LTaggedToI* instr,
                           LOperand* value,
                           LOperand* temp1,
                           LOperand* temp2);
  void DoDeferredAllocate(LAllocate* instr);
  void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
130 131 132 133
  void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
                                   Register result,
                                   Register object,
                                   Register index);
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186

  static Condition TokenToCondition(Token::Value op, bool is_unsigned);
  void EmitGoto(int block);
  void DoGap(LGap* instr);

  // Generic version of EmitBranch. It contains some code to avoid emitting a
  // branch on the next emitted basic block where we could just fall-through.
  // You shouldn't use that directly but rather consider one of the helper like
  // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch...
  template<class InstrType>
  void EmitBranchGeneric(InstrType instr,
                         const BranchGenerator& branch);

  template<class InstrType>
  void EmitBranch(InstrType instr, Condition condition);

  template<class InstrType>
  void EmitCompareAndBranch(InstrType instr,
                            Condition condition,
                            const Register& lhs,
                            const Operand& rhs);

  template<class InstrType>
  void EmitTestAndBranch(InstrType instr,
                         Condition condition,
                         const Register& value,
                         uint64_t mask);

  template<class InstrType>
  void EmitBranchIfNonZeroNumber(InstrType instr,
                                 const FPRegister& value,
                                 const FPRegister& scratch);

  template<class InstrType>
  void EmitBranchIfHeapNumber(InstrType instr,
                              const Register& value);

  template<class InstrType>
  void EmitBranchIfRoot(InstrType instr,
                        const Register& value,
                        Heap::RootListIndex index);

  // Emits optimized code to deep-copy the contents of statically known object
  // graphs (e.g. object literal boilerplate). Expects a pointer to the
  // allocated destination object in the result register, and a pointer to the
  // source object in the source register.
  void EmitDeepCopy(Handle<JSObject> object,
                    Register result,
                    Register source,
                    Register scratch,
                    int* offset,
                    AllocationSiteMode mode);

187 188
  template <class T>
  void EmitVectorLoadICRegisters(T* instr);
189 190
  template <class T>
  void EmitVectorStoreICRegisters(T* instr);
191

192 193 194 195 196 197 198 199 200 201
  // Emits optimized code for %_IsString(x).  Preserves input register.
  // Returns the condition on which a final split to
  // true and false label should be made, to optimize fallthrough.
  Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
                         SmiCheck check_needed);

  MemOperand BuildSeqStringOperand(Register string,
                                   Register temp,
                                   LOperand* index,
                                   String::Encoding encoding);
202
  void DeoptimizeBranch(LInstruction* instr, DeoptimizeReason deopt_reason,
203 204
                        BranchType branch_type, Register reg = NoReg,
                        int bit = -1,
205
                        Deoptimizer::BailoutType* override_bailout_type = NULL);
206
  void Deoptimize(LInstruction* instr, DeoptimizeReason deopt_reason,
207
                  Deoptimizer::BailoutType* override_bailout_type = NULL);
208
  void DeoptimizeIf(Condition cond, LInstruction* instr,
209
                    DeoptimizeReason deopt_reason);
210
  void DeoptimizeIfZero(Register rt, LInstruction* instr,
211
                        DeoptimizeReason deopt_reason);
212
  void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
213
                           DeoptimizeReason deopt_reason);
214
  void DeoptimizeIfNegative(Register rt, LInstruction* instr,
215
                            DeoptimizeReason deopt_reason);
216
  void DeoptimizeIfSmi(Register rt, LInstruction* instr,
217
                       DeoptimizeReason deopt_reason);
218
  void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
219
                          DeoptimizeReason deopt_reason);
220
  void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
221
                        LInstruction* instr, DeoptimizeReason deopt_reason);
222
  void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
223
                           LInstruction* instr, DeoptimizeReason deopt_reason);
224
  void DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr);
225
  void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
226
                             DeoptimizeReason deopt_reason);
227
  void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
228
                          DeoptimizeReason deopt_reason);
229
  void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
230
                            DeoptimizeReason deopt_reason);
231 232 233 234 235 236 237 238

  MemOperand PrepareKeyedExternalArrayOperand(Register key,
                                              Register base,
                                              Register scratch,
                                              bool key_is_smi,
                                              bool key_is_constant,
                                              int constant_key,
                                              ElementsKind elements_kind,
239
                                              int base_offset);
240 241 242 243 244 245
  MemOperand PrepareKeyedArrayOperand(Register base,
                                      Register elements,
                                      Register key,
                                      bool key_is_tagged,
                                      ElementsKind elements_kind,
                                      Representation representation,
246
                                      int base_offset);
247 248 249 250

  void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
                                            Safepoint::DeoptMode mode);

251 252 253 254 255 256 257
  bool HasAllocatedStackSlots() const {
    return chunk()->HasAllocatedStackSlots();
  }
  int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
  int GetTotalFrameSlotCount() const {
    return chunk()->GetTotalFrameSlotCount();
  }
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275

  void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }

  // Emit frame translation commands for an environment.
  void WriteTranslation(LEnvironment* environment, Translation* translation);

  void AddToTranslation(LEnvironment* environment,
                        Translation* translation,
                        LOperand* op,
                        bool is_tagged,
                        bool is_uint32,
                        int* object_index_pointer,
                        int* dematerialized_index_pointer);

  void SaveCallerDoubles();
  void RestoreCallerDoubles();

  // Code generation steps.  Returns true if code generation should continue.
276
  void GenerateBodyInstructionPre(LInstruction* instr) override;
277 278
  bool GeneratePrologue();
  bool GenerateDeferredCode();
279
  bool GenerateJumpTable();
280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310
  bool GenerateSafepointTable();

  // Generates the custom OSR entrypoint and sets the osr_pc_offset.
  void GenerateOsrPrologue();

  enum SafepointMode {
    RECORD_SIMPLE_SAFEPOINT,
    RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
  };

  void CallCode(Handle<Code> code,
                RelocInfo::Mode mode,
                LInstruction* instr);

  void CallCodeGeneric(Handle<Code> code,
                       RelocInfo::Mode mode,
                       LInstruction* instr,
                       SafepointMode safepoint_mode);

  void CallRuntime(const Runtime::Function* function,
                   int num_arguments,
                   LInstruction* instr,
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs);

  void CallRuntime(Runtime::FunctionId id,
                   int num_arguments,
                   LInstruction* instr) {
    const Runtime::Function* function = Runtime::FunctionForId(id);
    CallRuntime(function, num_arguments, instr);
  }

311 312 313 314 315
  void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
    const Runtime::Function* function = Runtime::FunctionForId(id);
    CallRuntime(function, function->nargs, instr);
  }

316 317 318 319 320 321
  void LoadContextFromDeferred(LOperand* context);
  void CallRuntimeFromDeferred(Runtime::FunctionId id,
                               int argc,
                               LInstruction* instr,
                               LOperand* context);

322 323 324
  void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
                          Register scratch2, Register scratch3);

325 326
  // Generate a direct call to a known function.  Expects the function
  // to be in x1.
327
  void CallKnownFunction(Handle<JSFunction> function,
328
                         int formal_parameter_count, int arity,
329
                         bool is_tail_call, LInstruction* instr);
330

331
  // Support for recording safepoint information.
332 333 334 335 336 337 338 339 340 341 342 343
  void RecordSafepoint(LPointerMap* pointers,
                       Safepoint::Kind kind,
                       int arguments,
                       Safepoint::DeoptMode mode);
  void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
  void RecordSafepoint(Safepoint::DeoptMode mode);
  void RecordSafepointWithRegisters(LPointerMap* pointers,
                                    int arguments,
                                    Safepoint::DeoptMode mode);
  void RecordSafepointWithLazyDeopt(LInstruction* instr,
                                    SafepointMode safepoint_mode);

344
  void EnsureSpaceForLazyDeopt(int space_needed) override;
345

346
  ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
347 348 349 350 351 352 353 354 355 356 357 358 359
  Scope* const scope_;
  ZoneList<LDeferredCode*> deferred_;
  bool frame_is_built_;

  // Builder that keeps track of safepoints in the code. The table itself is
  // emitted at the end of the generated code.
  SafepointTableBuilder safepoints_;

  // Compiler from a set of parallel moves to a sequential list of moves.
  LGapResolver resolver_;

  Safepoint::Kind expected_safepoint_kind_;

360 361 362 363 364 365 366 367 368
  // The number of arguments pushed onto the stack, either by this block or by a
  // predecessor.
  int pushed_arguments_;

  void RecordPushedArgumentsDelta(int delta) {
    pushed_arguments_ += delta;
    DCHECK(pushed_arguments_ >= 0);
  }

369 370 371 372
  int old_position_;

  class PushSafepointRegistersScope BASE_EMBEDDED {
   public:
373
    explicit PushSafepointRegistersScope(LCodeGen* codegen)
374
        : codegen_(codegen) {
375 376
      DCHECK(codegen_->info()->is_calling());
      DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
377
      codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
378

379 380 381 382 383 384
      UseScratchRegisterScope temps(codegen_->masm_);
      // Preserve the value of lr which must be saved on the stack (the call to
      // the stub will clobber it).
      Register to_be_pushed_lr =
          temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
      codegen_->masm_->Mov(to_be_pushed_lr, lr);
385 386
      StoreRegistersStateStub stub(codegen_->isolate());
      codegen_->masm_->CallStub(&stub);
387 388 389
    }

    ~PushSafepointRegistersScope() {
390
      DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
391 392
      RestoreRegistersStateStub stub(codegen_->isolate());
      codegen_->masm_->CallStub(&stub);
393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442
      codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
    }

   private:
    LCodeGen* codegen_;
  };

  friend class LDeferredCode;
  friend class SafepointGenerator;
  DISALLOW_COPY_AND_ASSIGN(LCodeGen);
};


class LDeferredCode: public ZoneObject {
 public:
  explicit LDeferredCode(LCodeGen* codegen)
      : codegen_(codegen),
        external_exit_(NULL),
        instruction_index_(codegen->current_instruction_) {
    codegen->AddDeferredCode(this);
  }

  virtual ~LDeferredCode() { }
  virtual void Generate() = 0;
  virtual LInstruction* instr() = 0;

  void SetExit(Label* exit) { external_exit_ = exit; }
  Label* entry() { return &entry_; }
  Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; }
  int instruction_index() const { return instruction_index_; }

 protected:
  LCodeGen* codegen() const { return codegen_; }
  MacroAssembler* masm() const { return codegen_->masm(); }

 private:
  LCodeGen* codegen_;
  Label entry_;
  Label exit_;
  Label* external_exit_;
  int instruction_index_;
};


// This is the abstract class used by EmitBranchGeneric.
// It is used to emit code for conditional branching. The Emit() function
// emits code to branch when the condition holds and EmitInverted() emits
// the branch when the inverted condition is verified.
//
// For actual examples of condition see the concrete implementation in
443
// lithium-codegen-arm64.cc (e.g. BranchOnCondition, CompareAndBranch).
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459
class BranchGenerator BASE_EMBEDDED {
 public:
  explicit BranchGenerator(LCodeGen* codegen)
    : codegen_(codegen) { }

  virtual ~BranchGenerator() { }

  virtual void Emit(Label* label) const = 0;
  virtual void EmitInverted(Label* label) const = 0;

 protected:
  MacroAssembler* masm() const { return codegen_->masm(); }

  LCodeGen* codegen_;
};

460 461
}  // namespace internal
}  // namespace v8
462

463
#endif  // V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_