interpreter.cc 12.8 KB
Newer Older
1 2 3 4 5 6
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/interpreter/interpreter.h"

7
#include <fstream>
8
#include <memory>
9

10
#include "builtins-generated/bytecodes-builtins-list.h"
11 12
#include "src/ast/prettyprinter.h"
#include "src/bootstrapper.h"
13
#include "src/compiler.h"
14
#include "src/counters-inl.h"
15
#include "src/interpreter/bytecode-generator.h"
16
#include "src/interpreter/bytecodes.h"
17 18
#include "src/objects-inl.h"
#include "src/objects/shared-function-info.h"
19
#include "src/objects/slots.h"
20
#include "src/ostreams.h"
21
#include "src/parsing/parse-info.h"
22
#include "src/setup-isolate.h"
23
#include "src/snapshot/snapshot.h"
24
#include "src/unoptimized-compilation-info.h"
25
#include "src/visitors.h"
26 27 28 29 30

namespace v8 {
namespace internal {
namespace interpreter {

31
class InterpreterCompilationJob final : public UnoptimizedCompilationJob {
32
 public:
33 34 35 36
  InterpreterCompilationJob(
      ParseInfo* parse_info, FunctionLiteral* literal,
      AccountingAllocator* allocator,
      std::vector<FunctionLiteral*>* eager_inner_literals);
37 38 39

 protected:
  Status ExecuteJobImpl() final;
40 41
  Status FinalizeJobImpl(Handle<SharedFunctionInfo> shared_info,
                         Isolate* isolate) final;
42 43 44 45

 private:
  BytecodeGenerator* generator() { return &generator_; }

46
  Zone zone_;
47
  UnoptimizedCompilationInfo compilation_info_;
48 49 50 51 52
  BytecodeGenerator generator_;

  DISALLOW_COPY_AND_ASSIGN(InterpreterCompilationJob);
};

53 54 55
Interpreter::Interpreter(Isolate* isolate)
    : isolate_(isolate),
      interpreter_entry_trampoline_instruction_start_(kNullAddress) {
56
  memset(dispatch_table_, 0, sizeof(dispatch_table_));
57

58 59
  if (FLAG_trace_ignition_dispatches) {
    static const int kBytecodeCount = static_cast<int>(Bytecode::kLast) + 1;
60
    bytecode_dispatch_counters_table_.reset(
61
        new uintptr_t[kBytecodeCount * kBytecodeCount]);
62
    memset(bytecode_dispatch_counters_table_.get(), 0,
63 64
           sizeof(uintptr_t) * kBytecodeCount * kBytecodeCount);
  }
65 66
}

67 68 69 70 71 72 73 74 75 76 77 78 79
namespace {

int BuiltinIndexFromBytecode(Bytecode bytecode, OperandScale operand_scale) {
  int index = BytecodeOperands::OperandScaleAsIndex(operand_scale) *
                  kNumberOfBytecodeHandlers +
              static_cast<int>(bytecode);
  int offset = kBytecodeToBuiltinsMapping[index];
  return offset >= 0 ? Builtins::kFirstBytecodeHandler + offset
                     : Builtins::kIllegalHandler;
}

}  // namespace

80 81
Code Interpreter::GetBytecodeHandler(Bytecode bytecode,
                                     OperandScale operand_scale) {
82 83
  int builtin_index = BuiltinIndexFromBytecode(bytecode, operand_scale);
  Builtins* builtins = isolate_->builtins();
84
  return builtins->builtin(builtin_index);
85 86
}

87
void Interpreter::SetBytecodeHandler(Bytecode bytecode,
88
                                     OperandScale operand_scale, Code handler) {
89 90
  DCHECK(handler->kind() == Code::BYTECODE_HANDLER);
  size_t index = GetDispatchTableIndex(bytecode, operand_scale);
91
  dispatch_table_[index] = handler->InstructionStart();
92 93
}

94 95 96 97 98
// static
size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
                                          OperandScale operand_scale) {
  static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
  size_t index = static_cast<size_t>(bytecode);
99 100
  return index + BytecodeOperands::OperandScaleAsIndex(operand_scale) *
                     kEntriesPerOperandScale;
101 102
}

103
void Interpreter::IterateDispatchTable(RootVisitor* v) {
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
  if (FLAG_embedded_builtins && !isolate_->serializer_enabled() &&
      isolate_->embedded_blob() != nullptr) {
// If builtins are embedded (and we're not generating a snapshot), then
// every bytecode handler will be off-heap, so there's no point iterating
// over them.
#ifdef DEBUG
    for (int i = 0; i < kDispatchTableSize; i++) {
      Address code_entry = dispatch_table_[i];
      CHECK(code_entry == kNullAddress ||
            InstructionStream::PcIsOffHeap(isolate_, code_entry));
    }
#endif  // ENABLE_SLOW_DCHECKS
    return;
  }

119 120
  for (int i = 0; i < kDispatchTableSize; i++) {
    Address code_entry = dispatch_table_[i];
121 122 123
    // Skip over off-heap bytecode handlers since they will never move.
    if (InstructionStream::PcIsOffHeap(isolate_, code_entry)) continue;

124 125 126 127 128 129 130
    // TODO(jkummerow): Would it hurt to simply do:
    // if (code_entry == kNullAddress) continue;
    Code code;
    if (code_entry != kNullAddress) {
      code = Code::GetCodeFromTargetAddress(code_entry);
    }
    Code old_code = code;
131
    v->VisitRootPointer(Root::kDispatchTable, nullptr, FullObjectSlot(&code));
132
    if (code != old_code) {
133
      dispatch_table_[i] = code->entry();
134 135
    }
  }
136 137
}

138 139 140 141
int Interpreter::InterruptBudget() {
  return FLAG_interrupt_budget;
}

142 143
namespace {

144 145
void MaybePrintAst(ParseInfo* parse_info,
                   UnoptimizedCompilationInfo* compilation_info) {
146
  if (!FLAG_print_ast) return;
147

148
  StdoutStream os;
149 150
  std::unique_ptr<char[]> name = compilation_info->literal()->GetDebugName();
  os << "[generating bytecode for function: " << name.get() << "]" << std::endl;
151 152
#ifdef DEBUG
  os << "--- AST ---" << std::endl
153 154
     << AstPrinter(parse_info->stack_limit())
            .PrintProgram(compilation_info->literal())
155
     << std::endl;
156 157 158
#endif  // DEBUG
}

159 160 161 162 163 164 165 166 167 168 169 170 171 172
bool ShouldPrintBytecode(Handle<SharedFunctionInfo> shared) {
  if (!FLAG_print_bytecode) return false;

  // Checks whether function passed the filter.
  if (shared->is_toplevel()) {
    Vector<const char> filter = CStrVector(FLAG_print_bytecode_filter);
    return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
  } else {
    return shared->PassesFilter(FLAG_print_bytecode_filter);
  }
}

}  // namespace

173 174
InterpreterCompilationJob::InterpreterCompilationJob(
    ParseInfo* parse_info, FunctionLiteral* literal,
175
    AccountingAllocator* allocator,
176
    std::vector<FunctionLiteral*>* eager_inner_literals)
177 178
    : UnoptimizedCompilationJob(parse_info->stack_limit(), parse_info,
                                &compilation_info_),
179 180
      zone_(allocator, ZONE_NAME),
      compilation_info_(&zone_, parse_info, literal),
181 182
      generator_(&compilation_info_, parse_info->ast_string_constants(),
                 eager_inner_literals) {}
183 184

InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() {
185
  RuntimeCallTimerScope runtimeTimerScope(
186 187
      parse_info()->runtime_call_stats(),
      parse_info()->on_background_thread()
188 189
          ? RuntimeCallCounterId::kCompileBackgroundIgnition
          : RuntimeCallCounterId::kCompileIgnition);
190 191
  // TODO(lpy): add support for background compilation RCS trace.
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition");
192 193 194 195

  // Print AST if flag is enabled. Note, if compiling on a background thread
  // then ASTs from different functions may be intersperse when printed.
  MaybePrintAst(parse_info(), compilation_info());
196

197
  generator()->GenerateBytecode(stack_limit());
198 199 200 201 202 203 204

  if (generator()->HasStackOverflow()) {
    return FAILED;
  }
  return SUCCEEDED;
}

205
InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
206
    Handle<SharedFunctionInfo> shared_info, Isolate* isolate) {
207
  RuntimeCallTimerScope runtimeTimerScope(
208
      parse_info()->runtime_call_stats(),
209
      RuntimeCallCounterId::kCompileIgnitionFinalization);
210 211
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
               "V8.CompileIgnitionFinalization");
212

213 214
  Handle<BytecodeArray> bytecodes =
      generator()->FinalizeBytecode(isolate, parse_info()->script());
215 216 217 218
  if (generator()->HasStackOverflow()) {
    return FAILED;
  }

219
  if (ShouldPrintBytecode(shared_info)) {
220
    StdoutStream os;
221 222 223 224
    std::unique_ptr<char[]> name =
        compilation_info()->literal()->GetDebugName();
    os << "[generated bytecode for function: " << name.get() << "]"
       << std::endl;
225
    bytecodes->Disassemble(os);
226 227 228
    os << std::flush;
  }

229
  compilation_info()->SetBytecodeArray(bytecodes);
230 231 232
  return SUCCEEDED;
}

233
UnoptimizedCompilationJob* Interpreter::NewCompilationJob(
234 235
    ParseInfo* parse_info, FunctionLiteral* literal,
    AccountingAllocator* allocator,
236
    std::vector<FunctionLiteral*>* eager_inner_literals) {
237 238
  return new InterpreterCompilationJob(parse_info, literal, allocator,
                                       eager_inner_literals);
239 240
}

241 242 243 244 245 246 247 248 249 250 251 252 253 254 255
void Interpreter::ForEachBytecode(
    const std::function<void(Bytecode, OperandScale)>& f) {
  constexpr OperandScale kOperandScales[] = {
#define VALUE(Name, _) OperandScale::k##Name,
      OPERAND_SCALE_LIST(VALUE)
#undef VALUE
  };

  for (OperandScale operand_scale : kOperandScales) {
    for (int i = 0; i < Bytecodes::kBytecodeCount; i++) {
      f(Bytecodes::FromByte(i), operand_scale);
    }
  }
}

256
void Interpreter::Initialize() {
257
  Builtins* builtins = isolate_->builtins();
258 259 260 261 262

  // Set the interpreter entry trampoline entry point now that builtins are
  // initialized.
  Handle<Code> code = BUILTIN_CODE(isolate_, InterpreterEntryTrampoline);
  DCHECK(builtins->is_initialized());
263 264
  DCHECK(code->is_off_heap_trampoline() ||
         isolate_->heap()->IsImmovable(*code));
265 266 267
  interpreter_entry_trampoline_instruction_start_ = code->InstructionStart();

  // Initialize the dispatch table.
268
  Code illegal = builtins->builtin(Builtins::kIllegalHandler);
269
  int builtin_id = Builtins::kFirstBytecodeHandler;
270 271
  ForEachBytecode([=, &builtin_id](Bytecode bytecode,
                                   OperandScale operand_scale) {
272
    Code handler = illegal;
273 274 275 276 277 278 279 280 281
    if (Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) {
#ifdef DEBUG
      std::string builtin_name(Builtins::name(builtin_id));
      std::string expected_name =
          Bytecodes::ToString(bytecode, operand_scale, "") + "Handler";
      DCHECK_EQ(expected_name, builtin_name);
#endif
      handler = builtins->builtin(builtin_id++);
    }
282 283
    SetBytecodeHandler(bytecode, operand_scale, handler);
  });
284
  DCHECK(builtin_id == Builtins::builtin_count);
285
  DCHECK(IsDispatchTableInitialized());
286 287
}

288
bool Interpreter::IsDispatchTableInitialized() const {
289
  return dispatch_table_[0] != kNullAddress;
290 291
}

292
const char* Interpreter::LookupNameOfBytecodeHandler(const Code code) {
293
#ifdef ENABLE_DISASSEMBLER
294 295 296 297
#define RETURN_NAME(Name, ...)                                 \
  if (dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] == \
      code->entry()) {                                         \
    return #Name;                                              \
298 299 300 301 302 303 304
  }
  BYTECODE_LIST(RETURN_NAME)
#undef RETURN_NAME
#endif  // ENABLE_DISASSEMBLER
  return nullptr;
}

305 306 307 308 309 310 311 312 313 314 315 316
uintptr_t Interpreter::GetDispatchCounter(Bytecode from, Bytecode to) const {
  int from_index = Bytecodes::ToByte(from);
  int to_index = Bytecodes::ToByte(to);
  return bytecode_dispatch_counters_table_[from_index * kNumberOfBytecodes +
                                           to_index];
}

Local<v8::Object> Interpreter::GetDispatchCountersObject() {
  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(isolate_);
  Local<v8::Context> context = isolate->GetCurrentContext();

  Local<v8::Object> counters_map = v8::Object::New(isolate);
317 318 319 320 321 322 323 324 325 326 327 328

  // Output is a JSON-encoded object of objects.
  //
  // The keys on the top level object are source bytecodes,
  // and corresponding value are objects. Keys on these last are the
  // destinations of the dispatch and the value associated is a counter for
  // the correspondent source-destination dispatch chain.
  //
  // Only non-zero counters are written to file, but an entry in the top-level
  // object is always present, even if the value is empty because all counters
  // for that source are zero.

329
  for (int from_index = 0; from_index < kNumberOfBytecodes; ++from_index) {
330
    Bytecode from_bytecode = Bytecodes::FromByte(from_index);
331 332 333 334 335
    Local<v8::Object> counters_row = v8::Object::New(isolate);

    for (int to_index = 0; to_index < kNumberOfBytecodes; ++to_index) {
      Bytecode to_bytecode = Bytecodes::FromByte(to_index);
      uintptr_t counter = GetDispatchCounter(from_bytecode, to_bytecode);
336 337

      if (counter > 0) {
338 339 340 341 342 343
        std::string to_name = Bytecodes::ToString(to_bytecode);
        Local<v8::String> to_name_object =
            v8::String::NewFromUtf8(isolate, to_name.c_str(),
                                    NewStringType::kNormal)
                .ToLocalChecked();
        Local<v8::Number> counter_object = v8::Number::New(isolate, counter);
344 345
        CHECK(counters_row
                  ->DefineOwnProperty(context, to_name_object, counter_object)
346
                  .IsJust());
347 348 349
      }
    }

350 351 352 353 354 355
    std::string from_name = Bytecodes::ToString(from_bytecode);
    Local<v8::String> from_name_object =
        v8::String::NewFromUtf8(isolate, from_name.c_str(),
                                NewStringType::kNormal)
            .ToLocalChecked();

356 357 358
    CHECK(
        counters_map->DefineOwnProperty(context, from_name_object, counters_row)
            .IsJust());
359 360
  }

361
  return counters_map;
362 363
}

364 365 366
}  // namespace interpreter
}  // namespace internal
}  // namespace v8