jump-threading.cc 9.34 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6
#include "src/compiler/backend/jump-threading.h"
#include "src/compiler/backend/code-generator-impl.h"
7 8 9 10 11

namespace v8 {
namespace internal {
namespace compiler {

12 13 14 15
#define TRACE(...)                                \
  do {                                            \
    if (FLAG_trace_turbo_jt) PrintF(__VA_ARGS__); \
  } while (false)
16

17 18
namespace {

19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
struct JumpThreadingState {
  bool forwarded;
  ZoneVector<RpoNumber>& result;
  ZoneStack<RpoNumber>& stack;

  void Clear(size_t count) { result.assign(count, unvisited()); }
  void PushIfUnvisited(RpoNumber num) {
    if (result[num.ToInt()] == unvisited()) {
      stack.push(num);
      result[num.ToInt()] = onstack();
    }
  }
  void Forward(RpoNumber to) {
    RpoNumber from = stack.top();
    RpoNumber to_to = result[to.ToInt()];
    bool pop = true;
    if (to == from) {
36
      TRACE("  xx %d\n", from.ToInt());
37 38
      result[from.ToInt()] = from;
    } else if (to_to == unvisited()) {
39
      TRACE("  fw %d -> %d (recurse)\n", from.ToInt(), to.ToInt());
40 41 42 43
      stack.push(to);
      result[to.ToInt()] = onstack();
      pop = false;  // recurse.
    } else if (to_to == onstack()) {
44
      TRACE("  fw %d -> %d (cycle)\n", from.ToInt(), to.ToInt());
45 46 47
      result[from.ToInt()] = to;  // break the cycle.
      forwarded = true;
    } else {
48
      TRACE("  fw %d -> %d (forward)\n", from.ToInt(), to.ToInt());
49 50 51 52 53 54 55 56 57
      result[from.ToInt()] = to_to;  // forward the block.
      forwarded = true;
    }
    if (pop) stack.pop();
  }
  RpoNumber unvisited() { return RpoNumber::FromInt(-1); }
  RpoNumber onstack() { return RpoNumber::FromInt(-2); }
};

58 59
}  // namespace

60
bool JumpThreading::ComputeForwarding(Zone* local_zone,
61
                                      ZoneVector<RpoNumber>* result,
62 63
                                      InstructionSequence* code,
                                      bool frame_at_start) {
64
  ZoneStack<RpoNumber> stack(local_zone);
65
  JumpThreadingState state = {false, *result, stack};
66
  state.Clear(code->InstructionBlockCount());
67 68 69 70
  RpoNumber empty_deconstruct_frame_return_block = RpoNumber::Invalid();
  int32_t empty_deconstruct_frame_return_size;
  RpoNumber empty_no_deconstruct_frame_return_block = RpoNumber::Invalid();
  int32_t empty_no_deconstruct_frame_return_size;
71 72

  // Iterate over the blocks forward, pushing the blocks onto the stack.
73 74
  for (auto const instruction_block : code->instruction_blocks()) {
    RpoNumber current = instruction_block->rpo_number();
75 76 77 78 79 80
    state.PushIfUnvisited(current);

    // Process the stack, which implements DFS through empty blocks.
    while (!state.stack.empty()) {
      InstructionBlock* block = code->InstructionBlockAt(state.stack.top());
      // Process the instructions in a block up to a non-empty instruction.
81 82
      TRACE("jt [%d] B%d\n", static_cast<int>(stack.size()),
            block->rpo_number().ToInt());
83
      RpoNumber fw = block->rpo_number();
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142
      bool fallthru = true;
      for (int i = block->code_start(); i < block->code_end(); ++i) {
        Instruction* instr = code->InstructionAt(i);
        if (!instr->AreMovesRedundant()) {
          // can't skip instructions with non redundant moves.
          TRACE("  parallel move\n");
          fallthru = false;
        } else if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
          // can't skip instructions with flags continuations.
          TRACE("  flags\n");
          fallthru = false;
        } else if (instr->IsNop()) {
          // skip nops.
          TRACE("  nop\n");
          continue;
        } else if (instr->arch_opcode() == kArchJmp) {
          // try to forward the jump instruction.
          TRACE("  jmp\n");
          // if this block deconstructs the frame, we can't forward it.
          // TODO(mtrofin): we can still forward if we end up building
          // the frame at start. So we should move the decision of whether
          // to build a frame or not in the register allocator, and trickle it
          // here and to the code generator.
          if (frame_at_start || !(block->must_deconstruct_frame() ||
                                  block->must_construct_frame())) {
            fw = code->InputRpo(instr, 0);
          }
          fallthru = false;
        } else if (instr->IsRet()) {
          TRACE("  ret\n");
          if (fallthru) {
            CHECK_IMPLIES(block->must_construct_frame(),
                          block->must_deconstruct_frame());
            // Only handle returns with immediate/constant operands, since
            // they must always be the same for all returns in a function.
            // Dynamic return values might use different registers at
            // different return sites and therefore cannot be shared.
            if (instr->InputAt(0)->IsImmediate()) {
              int32_t return_size = ImmediateOperand::cast(instr->InputAt(0))
                                        ->inline_int32_value();
              // Instructions can be shared only for blocks that share
              // the same |must_deconstruct_frame| attribute.
              if (block->must_deconstruct_frame()) {
                if (empty_deconstruct_frame_return_block ==
                    RpoNumber::Invalid()) {
                  empty_deconstruct_frame_return_block = block->rpo_number();
                  empty_deconstruct_frame_return_size = return_size;
                } else if (empty_deconstruct_frame_return_size == return_size) {
                  fw = empty_deconstruct_frame_return_block;
                  block->clear_must_deconstruct_frame();
                }
              } else {
                if (empty_no_deconstruct_frame_return_block ==
                    RpoNumber::Invalid()) {
                  empty_no_deconstruct_frame_return_block = block->rpo_number();
                  empty_no_deconstruct_frame_return_size = return_size;
                } else if (empty_no_deconstruct_frame_return_size ==
                           return_size) {
                  fw = empty_no_deconstruct_frame_return_block;
143 144 145
                }
              }
            }
146
          }
147 148 149 150 151
          fallthru = false;
        } else {
          // can't skip other instructions.
          TRACE("  other\n");
          fallthru = false;
152
        }
153 154 155 156 157
        break;
      }
      if (fallthru) {
        int next = 1 + block->rpo_number().ToInt();
        if (next < code->InstructionBlockCount()) fw = RpoNumber::FromInt(next);
158
      }
159 160 161 162
      state.Forward(fw);
    }
  }

Ben L. Titzer's avatar
Ben L. Titzer committed
163
#ifdef DEBUG
164
  for (RpoNumber num : *result) {
165
    DCHECK(num.IsValid());
Ben L. Titzer's avatar
Ben L. Titzer committed
166
  }
167 168
#endif

Ben L. Titzer's avatar
Ben L. Titzer committed
169
  if (FLAG_trace_turbo_jt) {
170
    for (int i = 0; i < static_cast<int>(result->size()); i++) {
171
      TRACE("B%d ", i);
172
      int to = (*result)[i].ToInt();
Ben L. Titzer's avatar
Ben L. Titzer committed
173
      if (i != to) {
174
        TRACE("-> B%d\n", to);
Ben L. Titzer's avatar
Ben L. Titzer committed
175
      } else {
176
        TRACE("\n");
Ben L. Titzer's avatar
Ben L. Titzer committed
177
      }
178 179 180 181 182 183
    }
  }

  return state.forwarded;
}

184
void JumpThreading::ApplyForwarding(Zone* local_zone,
185
                                    ZoneVector<RpoNumber> const& result,
186 187 188
                                    InstructionSequence* code) {
  if (!FLAG_turbo_jt) return;

189
  ZoneVector<bool> skip(static_cast<int>(result.size()), false, local_zone);
190 191 192

  // Skip empty blocks when the previous block doesn't fall through.
  bool prev_fallthru = true;
193
  for (auto const block : code->ao_blocks()) {
194 195 196 197 198 199 200 201 202 203 204 205 206
    RpoNumber block_rpo = block->rpo_number();
    int block_num = block_rpo.ToInt();
    RpoNumber result_rpo = result[block_num];
    skip[block_num] = !prev_fallthru && result_rpo != block_rpo;

    if (result_rpo != block_rpo) {
      // We need the handler information to be propagated, so that branch
      // targets are annotated as necessary for control flow integrity
      // checks (when enabled).
      if (code->InstructionBlockAt(block_rpo)->IsHandler()) {
        code->InstructionBlockAt(result_rpo)->MarkHandler();
      }
    }
207 208 209 210

    bool fallthru = true;
    for (int i = block->code_start(); i < block->code_end(); ++i) {
      Instruction* instr = code->InstructionAt(i);
211
      FlagsMode mode = FlagsModeField::decode(instr->opcode());
212
      if (mode == kFlags_branch) {
213
        fallthru = false;  // branches don't fall through to the next block.
214 215
      } else if (instr->arch_opcode() == kArchJmp ||
                 instr->arch_opcode() == kArchRet) {
216 217
        if (skip[block_num]) {
          // Overwrite a redundant jump with a nop.
218
          TRACE("jt-fw nop @%d\n", i);
219
          instr->OverwriteWithNop();
220 221
          // If this block was marked as a handler, it can be unmarked now.
          code->InstructionBlockAt(block_rpo)->UnmarkHandler();
222 223 224 225 226 227 228 229
        }
        fallthru = false;  // jumps don't fall through to the next block.
      }
    }
    prev_fallthru = fallthru;
  }

  // Patch RPO immediates.
230 231 232 233
  InstructionSequence::RpoImmediates& rpo_immediates = code->rpo_immediates();
  for (size_t i = 0; i < rpo_immediates.size(); i++) {
    RpoNumber rpo = rpo_immediates[i];
    if (rpo.IsValid()) {
234
      RpoNumber fw = result[rpo.ToInt()];
235
      if (fw != rpo) rpo_immediates[i] = fw;
236 237 238
    }
  }

239 240
  // Renumber the blocks so that IsNextInAssemblyOrder() will return true,
  // even if there are skipped blocks in-between.
241
  int ao = 0;
242 243 244
  for (auto const block : code->ao_blocks()) {
    block->set_ao_number(RpoNumber::FromInt(ao));
    if (!skip[block->rpo_number().ToInt()]) ao++;
245 246 247
  }
}

248 249
#undef TRACE

250 251 252
}  // namespace compiler
}  // namespace internal
}  // namespace v8