jump-threading.cc 10.2 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6
#include "src/compiler/backend/jump-threading.h"
#include "src/compiler/backend/code-generator-impl.h"
7 8 9 10 11

namespace v8 {
namespace internal {
namespace compiler {

12 13 14 15
#define TRACE(...)                                \
  do {                                            \
    if (FLAG_trace_turbo_jt) PrintF(__VA_ARGS__); \
  } while (false)
16

17 18
namespace {

19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
struct JumpThreadingState {
  bool forwarded;
  ZoneVector<RpoNumber>& result;
  ZoneStack<RpoNumber>& stack;

  void Clear(size_t count) { result.assign(count, unvisited()); }
  void PushIfUnvisited(RpoNumber num) {
    if (result[num.ToInt()] == unvisited()) {
      stack.push(num);
      result[num.ToInt()] = onstack();
    }
  }
  void Forward(RpoNumber to) {
    RpoNumber from = stack.top();
    RpoNumber to_to = result[to.ToInt()];
    bool pop = true;
    if (to == from) {
36
      TRACE("  xx %d\n", from.ToInt());
37 38
      result[from.ToInt()] = from;
    } else if (to_to == unvisited()) {
39
      TRACE("  fw %d -> %d (recurse)\n", from.ToInt(), to.ToInt());
40 41 42 43
      stack.push(to);
      result[to.ToInt()] = onstack();
      pop = false;  // recurse.
    } else if (to_to == onstack()) {
44
      TRACE("  fw %d -> %d (cycle)\n", from.ToInt(), to.ToInt());
45 46 47
      result[from.ToInt()] = to;  // break the cycle.
      forwarded = true;
    } else {
48
      TRACE("  fw %d -> %d (forward)\n", from.ToInt(), to.ToInt());
49 50 51 52 53 54 55 56 57
      result[from.ToInt()] = to_to;  // forward the block.
      forwarded = true;
    }
    if (pop) stack.pop();
  }
  RpoNumber unvisited() { return RpoNumber::FromInt(-1); }
  RpoNumber onstack() { return RpoNumber::FromInt(-2); }
};

58 59 60 61 62 63 64 65 66 67 68 69 70
bool IsBlockWithBranchPoisoning(InstructionSequence* code,
                                InstructionBlock* block) {
  if (block->PredecessorCount() != 1) return false;
  RpoNumber pred_rpo = (block->predecessors())[0];
  const InstructionBlock* pred = code->InstructionBlockAt(pred_rpo);
  if (pred->code_start() == pred->code_end()) return false;
  Instruction* instr = code->InstructionAt(pred->code_end() - 1);
  FlagsMode mode = FlagsModeField::decode(instr->opcode());
  return mode == kFlags_branch_and_poison;
}

}  // namespace

71
bool JumpThreading::ComputeForwarding(Zone* local_zone,
72
                                      ZoneVector<RpoNumber>* result,
73 74
                                      InstructionSequence* code,
                                      bool frame_at_start) {
75
  ZoneStack<RpoNumber> stack(local_zone);
76
  JumpThreadingState state = {false, *result, stack};
77
  state.Clear(code->InstructionBlockCount());
78 79 80 81
  RpoNumber empty_deconstruct_frame_return_block = RpoNumber::Invalid();
  int32_t empty_deconstruct_frame_return_size;
  RpoNumber empty_no_deconstruct_frame_return_block = RpoNumber::Invalid();
  int32_t empty_no_deconstruct_frame_return_size;
82 83 84 85 86 87 88 89 90 91

  // Iterate over the blocks forward, pushing the blocks onto the stack.
  for (auto const block : code->instruction_blocks()) {
    RpoNumber current = block->rpo_number();
    state.PushIfUnvisited(current);

    // Process the stack, which implements DFS through empty blocks.
    while (!state.stack.empty()) {
      InstructionBlock* block = code->InstructionBlockAt(state.stack.top());
      // Process the instructions in a block up to a non-empty instruction.
92 93
      TRACE("jt [%d] B%d\n", static_cast<int>(stack.size()),
            block->rpo_number().ToInt());
94
      RpoNumber fw = block->rpo_number();
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123
      if (!IsBlockWithBranchPoisoning(code, block)) {
        bool fallthru = true;
        for (int i = block->code_start(); i < block->code_end(); ++i) {
          Instruction* instr = code->InstructionAt(i);
          if (!instr->AreMovesRedundant()) {
            // can't skip instructions with non redundant moves.
            TRACE("  parallel move\n");
            fallthru = false;
          } else if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
            // can't skip instructions with flags continuations.
            TRACE("  flags\n");
            fallthru = false;
          } else if (instr->IsNop()) {
            // skip nops.
            TRACE("  nop\n");
            continue;
          } else if (instr->arch_opcode() == kArchJmp) {
            // try to forward the jump instruction.
            TRACE("  jmp\n");
            // if this block deconstructs the frame, we can't forward it.
            // TODO(mtrofin): we can still forward if we end up building
            // the frame at start. So we should move the decision of whether
            // to build a frame or not in the register allocator, and trickle it
            // here and to the code generator.
            if (frame_at_start || !(block->must_deconstruct_frame() ||
                                    block->must_construct_frame())) {
              fw = code->InputRpo(instr, 0);
            }
            fallthru = false;
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
          } else if (instr->IsRet()) {
            TRACE("  ret\n");
            if (fallthru) {
              CHECK_IMPLIES(block->must_construct_frame(),
                            block->must_deconstruct_frame());
              // Only handle returns with immediate/constant operands, since
              // they must always be the same for all returns in a function.
              // Dynamic return values might use different registers at
              // different return sites and therefore cannot be shared.
              if (instr->InputAt(0)->IsImmediate()) {
                int32_t return_size =
                    ImmediateOperand::cast(instr->InputAt(0))->inline_value();
                // Instructions can be shared only for blocks that share
                // the same |must_deconstruct_frame| attribute.
                if (block->must_deconstruct_frame()) {
                  if (empty_deconstruct_frame_return_block ==
                      RpoNumber::Invalid()) {
                    empty_deconstruct_frame_return_block = block->rpo_number();
                    empty_deconstruct_frame_return_size = return_size;
                  } else if (empty_deconstruct_frame_return_size ==
                             return_size) {
                    fw = empty_deconstruct_frame_return_block;
                    block->clear_must_deconstruct_frame();
                  }
                } else {
                  if (empty_no_deconstruct_frame_return_block ==
                      RpoNumber::Invalid()) {
                    empty_no_deconstruct_frame_return_block =
                        block->rpo_number();
                    empty_no_deconstruct_frame_return_size = return_size;
                  } else if (empty_no_deconstruct_frame_return_size ==
                             return_size) {
                    fw = empty_no_deconstruct_frame_return_block;
                  }
                }
              }
            }
            fallthru = false;
162 163 164 165
          } else {
            // can't skip other instructions.
            TRACE("  other\n");
            fallthru = false;
166
          }
167
          break;
168 169 170 171 172 173
        }
        if (fallthru) {
          int next = 1 + block->rpo_number().ToInt();
          if (next < code->InstructionBlockCount())
            fw = RpoNumber::FromInt(next);
        }
174
      }
175 176 177 178
      state.Forward(fw);
    }
  }

Ben L. Titzer's avatar
Ben L. Titzer committed
179
#ifdef DEBUG
180
  for (RpoNumber num : *result) {
181
    DCHECK(num.IsValid());
Ben L. Titzer's avatar
Ben L. Titzer committed
182
  }
183 184
#endif

Ben L. Titzer's avatar
Ben L. Titzer committed
185
  if (FLAG_trace_turbo_jt) {
186
    for (int i = 0; i < static_cast<int>(result->size()); i++) {
187
      TRACE("B%d ", i);
188
      int to = (*result)[i].ToInt();
Ben L. Titzer's avatar
Ben L. Titzer committed
189
      if (i != to) {
190
        TRACE("-> B%d\n", to);
Ben L. Titzer's avatar
Ben L. Titzer committed
191
      } else {
192
        TRACE("\n");
Ben L. Titzer's avatar
Ben L. Titzer committed
193
      }
194 195 196 197 198 199
    }
  }

  return state.forwarded;
}

200
void JumpThreading::ApplyForwarding(Zone* local_zone,
201
                                    ZoneVector<RpoNumber> const& result,
202 203 204
                                    InstructionSequence* code) {
  if (!FLAG_turbo_jt) return;

205
  ZoneVector<bool> skip(static_cast<int>(result.size()), false, local_zone);
206 207 208 209

  // Skip empty blocks when the previous block doesn't fall through.
  bool prev_fallthru = true;
  for (auto const block : code->instruction_blocks()) {
210 211 212 213 214 215 216 217 218 219 220 221 222
    RpoNumber block_rpo = block->rpo_number();
    int block_num = block_rpo.ToInt();
    RpoNumber result_rpo = result[block_num];
    skip[block_num] = !prev_fallthru && result_rpo != block_rpo;

    if (result_rpo != block_rpo) {
      // We need the handler information to be propagated, so that branch
      // targets are annotated as necessary for control flow integrity
      // checks (when enabled).
      if (code->InstructionBlockAt(block_rpo)->IsHandler()) {
        code->InstructionBlockAt(result_rpo)->MarkHandler();
      }
    }
223 224 225 226

    bool fallthru = true;
    for (int i = block->code_start(); i < block->code_end(); ++i) {
      Instruction* instr = code->InstructionAt(i);
227 228
      FlagsMode mode = FlagsModeField::decode(instr->opcode());
      if (mode == kFlags_branch || mode == kFlags_branch_and_poison) {
229
        fallthru = false;  // branches don't fall through to the next block.
230 231
      } else if (instr->arch_opcode() == kArchJmp ||
                 instr->arch_opcode() == kArchRet) {
232 233
        if (skip[block_num]) {
          // Overwrite a redundant jump with a nop.
234
          TRACE("jt-fw nop @%d\n", i);
235
          instr->OverwriteWithNop();
236 237
          // If this block was marked as a handler, it can be unmarked now.
          code->InstructionBlockAt(block_rpo)->UnmarkHandler();
238 239 240 241 242 243 244 245 246 247 248 249 250 251
        }
        fallthru = false;  // jumps don't fall through to the next block.
      }
    }
    prev_fallthru = fallthru;
  }

  // Patch RPO immediates.
  InstructionSequence::Immediates& immediates = code->immediates();
  for (size_t i = 0; i < immediates.size(); i++) {
    Constant constant = immediates[i];
    if (constant.type() == Constant::kRpoNumber) {
      RpoNumber rpo = constant.ToRpoNumber();
      RpoNumber fw = result[rpo.ToInt()];
252
      if (!(fw == rpo)) immediates[i] = Constant(fw);
253 254 255
    }
  }

256 257
  // Renumber the blocks so that IsNextInAssemblyOrder() will return true,
  // even if there are skipped blocks in-between.
258
  int ao = 0;
259 260 261
  for (auto const block : code->ao_blocks()) {
    block->set_ao_number(RpoNumber::FromInt(ao));
    if (!skip[block->rpo_number().ToInt()]) ao++;
262 263 264
  }
}

265 266
#undef TRACE

267 268 269
}  // namespace compiler
}  // namespace internal
}  // namespace v8