state-values-utils.cc 13.1 KB
Newer Older
1 2 3 4 5 6
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/compiler/state-values-utils.h"

7 8
#include "src/bit-vector.h"

9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
namespace v8 {
namespace internal {
namespace compiler {

StateValuesCache::StateValuesCache(JSGraph* js_graph)
    : js_graph_(js_graph),
      hash_map_(AreKeysEqual, ZoneHashMap::kDefaultHashMapCapacity,
                ZoneAllocationPolicy(zone())),
      working_space_(zone()),
      empty_state_values_(nullptr) {}


// static
bool StateValuesCache::AreKeysEqual(void* key1, void* key2) {
  NodeKey* node_key1 = reinterpret_cast<NodeKey*>(key1);
  NodeKey* node_key2 = reinterpret_cast<NodeKey*>(key2);

  if (node_key1->node == nullptr) {
    if (node_key2->node == nullptr) {
      return AreValueKeysEqual(reinterpret_cast<StateValuesKey*>(key1),
                               reinterpret_cast<StateValuesKey*>(key2));
    } else {
      return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key1),
                               node_key2->node);
    }
  } else {
    if (node_key2->node == nullptr) {
      // If the nodes are already processed, they must be the same.
      return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key2),
                               node_key1->node);
    } else {
      return node_key1->node == node_key2->node;
    }
  }
  UNREACHABLE();
}


// static
bool StateValuesCache::IsKeysEqualToNode(StateValuesKey* key, Node* node) {
  if (key->count != static_cast<size_t>(node->InputCount())) {
    return false;
  }
52

53
  DCHECK_EQ(IrOpcode::kStateValues, node->opcode());
54 55 56 57 58 59 60 61
  SparseInputMask node_mask = SparseInputMaskOf(node->op());

  if (node_mask != key->mask) {
    return false;
  }

  // Comparing real inputs rather than sparse inputs, since we already know the
  // sparse input masks are the same.
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76
  for (size_t i = 0; i < key->count; i++) {
    if (key->values[i] != node->InputAt(static_cast<int>(i))) {
      return false;
    }
  }
  return true;
}


// static
bool StateValuesCache::AreValueKeysEqual(StateValuesKey* key1,
                                         StateValuesKey* key2) {
  if (key1->count != key2->count) {
    return false;
  }
77 78 79
  if (key1->mask != key2->mask) {
    return false;
  }
80 81 82 83 84 85 86 87 88 89 90
  for (size_t i = 0; i < key1->count; i++) {
    if (key1->values[i] != key2->values[i]) {
      return false;
    }
  }
  return true;
}


Node* StateValuesCache::GetEmptyStateValues() {
  if (empty_state_values_ == nullptr) {
91 92
    empty_state_values_ =
        graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
93 94 95 96
  }
  return empty_state_values_;
}

97 98 99 100
StateValuesCache::WorkingBuffer* StateValuesCache::GetWorkingSpace(
    size_t level) {
  if (working_space_.size() <= level) {
    working_space_.resize(level + 1);
101
  }
102
  return &working_space_[level];
103 104 105 106 107 108 109
}

namespace {

int StateValuesHashKey(Node** nodes, size_t count) {
  size_t hash = count;
  for (size_t i = 0; i < count; i++) {
110
    hash = hash * 23 + (nodes[i] == nullptr ? 0 : nodes[i]->id());
111
  }
112
  return static_cast<int>(hash & 0x7FFFFFFF);
113 114 115 116
}

}  // namespace

117 118 119
Node* StateValuesCache::GetValuesNodeFromCache(Node** nodes, size_t count,
                                               SparseInputMask mask) {
  StateValuesKey key(count, mask, nodes);
120
  int hash = StateValuesHashKey(nodes, count);
121 122
  ZoneHashMap::Entry* lookup =
      hash_map_.LookupOrInsert(&key, hash, ZoneAllocationPolicy(zone()));
123 124 125
  DCHECK_NOT_NULL(lookup);
  Node* node;
  if (lookup->value == nullptr) {
126 127
    int node_count = static_cast<int>(count);
    node = graph()->NewNode(common()->StateValues(node_count, mask), node_count,
128 129 130 131 132 133 134 135 136 137
                            nodes);
    NodeKey* new_key = new (zone()->New(sizeof(NodeKey))) NodeKey(node);
    lookup->key = new_key;
    lookup->value = node;
  } else {
    node = reinterpret_cast<Node*>(lookup->value);
  }
  return node;
}

138 139
SparseInputMask::BitMaskType StateValuesCache::FillBufferWithValues(
    WorkingBuffer* node_buffer, size_t* node_count, size_t* values_idx,
140 141
    Node** values, size_t count, const BitVector* liveness,
    int liveness_offset) {
142
  SparseInputMask::BitMaskType input_mask = 0;
143

144 145 146
  // Virtual nodes are the live nodes plus the implicit optimized out nodes,
  // which are implied by the liveness mask.
  size_t virtual_node_count = *node_count;
147

148 149 150
  while (*values_idx < count && *node_count < kMaxInputCount &&
         virtual_node_count < SparseInputMask::kMaxSparseInputs) {
    DCHECK_LE(*values_idx, static_cast<size_t>(INT_MAX));
151

152
    if (liveness == nullptr ||
153
        liveness->Contains(liveness_offset + static_cast<int>(*values_idx))) {
154 155 156 157
      input_mask |= 1 << (virtual_node_count);
      (*node_buffer)[(*node_count)++] = values[*values_idx];
    }
    virtual_node_count++;
158

159
    (*values_idx)++;
160 161
  }

162 163
  DCHECK_GE(StateValuesCache::kMaxInputCount, *node_count);
  DCHECK_GE(SparseInputMask::kMaxSparseInputs, virtual_node_count);
164

165 166
  // Add the end marker at the end of the mask.
  input_mask |= SparseInputMask::kEndMarker << virtual_node_count;
167

168 169
  return input_mask;
}
170

171 172
Node* StateValuesCache::BuildTree(size_t* values_idx, Node** values,
                                  size_t count, const BitVector* liveness,
173
                                  int liveness_offset, size_t level) {
174 175 176 177 178 179
  WorkingBuffer* node_buffer = GetWorkingSpace(level);
  size_t node_count = 0;
  SparseInputMask::BitMaskType input_mask = SparseInputMask::kDenseBitMask;

  if (level == 0) {
    input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx,
180
                                      values, count, liveness, liveness_offset);
181 182 183 184 185 186 187 188 189 190 191
    // Make sure we returned a sparse input mask.
    DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask);
  } else {
    while (*values_idx < count && node_count < kMaxInputCount) {
      if (count - *values_idx < kMaxInputCount - node_count) {
        // If we have fewer values remaining than inputs remaining, dump the
        // remaining values into this node.
        // TODO(leszeks): We could optimise this further by only counting
        // remaining live nodes.

        size_t previous_input_count = node_count;
192 193 194
        input_mask =
            FillBufferWithValues(node_buffer, &node_count, values_idx, values,
                                 count, liveness, liveness_offset);
195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
        // Make sure we have exhausted our values.
        DCHECK_EQ(*values_idx, count);
        // Make sure we returned a sparse input mask.
        DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask);

        // Make sure we haven't touched inputs below previous_input_count in the
        // mask.
        DCHECK_EQ(input_mask & ((1 << previous_input_count) - 1), 0u);
        // Mark all previous inputs as live.
        input_mask |= ((1 << previous_input_count) - 1);

        break;

      } else {
        // Otherwise, add the values to a subtree and add that as an input.
210 211
        Node* subtree = BuildTree(values_idx, values, count, liveness,
                                  liveness_offset, level - 1);
212 213 214 215
        (*node_buffer)[node_count++] = subtree;
        // Don't touch the bitmask, so that it stays dense.
      }
    }
216
  }
217 218 219 220 221 222 223

  if (node_count == 1 && input_mask == SparseInputMask::kDenseBitMask) {
    // Elide the StateValue node if there is only one, dense input. This will
    // only happen if we built a single subtree (as nodes with values are always
    // sparse), and so we can replace ourselves with it.
    DCHECK_EQ((*node_buffer)[0]->opcode(), IrOpcode::kStateValues);
    return (*node_buffer)[0];
224
  } else {
225 226 227 228 229 230 231 232 233
    return GetValuesNodeFromCache(node_buffer->data(), node_count,
                                  SparseInputMask(input_mask));
  }
}

#if DEBUG
namespace {

void CheckTreeContainsValues(Node* tree, Node** values, size_t count,
234
                             const BitVector* liveness, int liveness_offset) {
235
  DCHECK_EQ(count, StateValuesAccess(tree).size());
236 237 238 239 240 241

  int i;
  auto access = StateValuesAccess(tree);
  auto it = access.begin();
  auto itend = access.end();
  for (i = 0; it != itend; ++it, ++i) {
242
    if (liveness == nullptr || liveness->Contains(liveness_offset + i)) {
243
      DCHECK_EQ((*it).node, values[i]);
244
    } else {
245
      DCHECK_NULL((*it).node);
246
    }
247
  }
248
  DCHECK_EQ(static_cast<size_t>(i), count);
249 250
}

251 252
}  // namespace
#endif
253

254
Node* StateValuesCache::GetNodeForValues(Node** values, size_t count,
255 256
                                         const BitVector* liveness,
                                         int liveness_offset) {
257
#if DEBUG
258
  // Check that the values represent actual values, and not a tree of values.
259
  for (size_t i = 0; i < count; i++) {
260 261 262 263 264 265
    if (values[i] != nullptr) {
      DCHECK_NE(values[i]->opcode(), IrOpcode::kStateValues);
      DCHECK_NE(values[i]->opcode(), IrOpcode::kTypedStateValues);
    }
  }
  if (liveness != nullptr) {
266
    DCHECK_LE(liveness_offset + count, static_cast<size_t>(liveness->length()));
267 268

    for (size_t i = 0; i < count; i++) {
269
      if (liveness->Contains(liveness_offset + static_cast<int>(i))) {
270 271 272
        DCHECK_NOT_NULL(values[i]);
      }
    }
273 274
  }
#endif
275

276 277 278
  if (count == 0) {
    return GetEmptyStateValues();
  }
279 280 281 282 283

  // This is a worst-case tree height estimate, assuming that all values are
  // live. We could get a better estimate by counting zeroes in the liveness
  // vector, but there's no point -- any excess height in the tree will be
  // collapsed by the single-input elision at the end of BuildTree.
284
  size_t height = 0;
285 286
  size_t max_inputs = kMaxInputCount;
  while (count > max_inputs) {
287
    height++;
288
    max_inputs *= kMaxInputCount;
289 290
  }

291
  size_t values_idx = 0;
292 293
  Node* tree =
      BuildTree(&values_idx, values, count, liveness, liveness_offset, height);
294 295
  // The values should be exhausted by the end of BuildTree.
  DCHECK_EQ(values_idx, count);
296

297 298
  // The 'tree' must be rooted with a state value node.
  DCHECK_EQ(tree->opcode(), IrOpcode::kStateValues);
299

300
#if DEBUG
301
  CheckTreeContainsValues(tree, values, count, liveness, liveness_offset);
302
#endif
303 304 305 306 307

  return tree;
}

StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) {
308 309 310
  stack_[current_depth_] =
      SparseInputMaskOf(node->op()).IterateOverInputs(node);
  EnsureValid();
311 312
}

313
SparseInputMask::InputIterator* StateValuesAccess::iterator::Top() {
314 315
  DCHECK_LE(0, current_depth_);
  DCHECK_GT(kMaxInlineDepth, current_depth_);
316 317 318 319 320
  return &(stack_[current_depth_]);
}

void StateValuesAccess::iterator::Push(Node* node) {
  current_depth_++;
321
  CHECK_GT(kMaxInlineDepth, current_depth_);
322 323
  stack_[current_depth_] =
      SparseInputMaskOf(node->op()).IterateOverInputs(node);
324 325 326 327
}


void StateValuesAccess::iterator::Pop() {
328
  DCHECK_LE(0, current_depth_);
329 330 331 332 333 334 335 336
  current_depth_--;
}


bool StateValuesAccess::iterator::done() { return current_depth_ < 0; }


void StateValuesAccess::iterator::Advance() {
337 338 339
  Top()->Advance();
  EnsureValid();
}
340

341
void StateValuesAccess::iterator::EnsureValid() {
342
  while (true) {
343 344 345 346 347 348
    SparseInputMask::InputIterator* top = Top();

    if (top->IsEmpty()) {
      // We are on a valid (albeit optimized out) node.
      return;
    }
349

350 351 352
    if (top->IsEnd()) {
      // We have hit the end of this iterator. Pop the stack and move to the
      // next sibling iterator.
353 354 355 356 357
      Pop();
      if (done()) {
        // Stack is exhausted, we have reached the end.
        return;
      }
358 359
      Top()->Advance();
      continue;
360 361
    }

362 363 364 365 366 367 368 369 370
    // At this point the value is known to be live and within our input nodes.
    Node* value_node = top->GetReal();

    if (value_node->opcode() == IrOpcode::kStateValues ||
        value_node->opcode() == IrOpcode::kTypedStateValues) {
      // Nested state, we need to push to the stack.
      Push(value_node);
      continue;
    }
371

372 373 374
    // We are on a valid node, we can stop the iteration.
    return;
  }
375 376
}

377
Node* StateValuesAccess::iterator::node() { return Top()->Get(nullptr); }
378

379
MachineType StateValuesAccess::iterator::type() {
380 381
  Node* parent = Top()->parent();
  if (parent->opcode() == IrOpcode::kStateValues) {
382
    return MachineType::AnyTagged();
383
  } else {
384 385 386 387 388 389 390 391
    DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode());

    if (Top()->IsEmpty()) {
      return MachineType::None();
    } else {
      ZoneVector<MachineType> const* types = MachineTypesOf(parent->op());
      return (*types)[Top()->real_index()];
    }
392 393 394 395
  }
}


396 397 398 399 400 401 402 403 404 405 406 407 408
bool StateValuesAccess::iterator::operator!=(iterator& other) {
  // We only allow comparison with end().
  CHECK(other.done());
  return !done();
}


StateValuesAccess::iterator& StateValuesAccess::iterator::operator++() {
  Advance();
  return *this;
}


409 410 411
StateValuesAccess::TypedNode StateValuesAccess::iterator::operator*() {
  return TypedNode(node(), type());
}
412 413 414 415


size_t StateValuesAccess::size() {
  size_t count = 0;
416 417 418 419 420 421
  SparseInputMask mask = SparseInputMaskOf(node_->op());

  SparseInputMask::InputIterator iterator = mask.IterateOverInputs(node_);

  for (; !iterator.IsEnd(); iterator.Advance()) {
    if (iterator.IsEmpty()) {
422
      count++;
423 424 425 426 427 428 429 430
    } else {
      Node* value = iterator.GetReal();
      if (value->opcode() == IrOpcode::kStateValues ||
          value->opcode() == IrOpcode::kTypedStateValues) {
        count += StateValuesAccess(value).size();
      } else {
        count++;
      }
431 432
    }
  }
433

434 435 436 437 438 439
  return count;
}

}  // namespace compiler
}  // namespace internal
}  // namespace v8