Commit 7777aa42 authored by Clemens Backes's avatar Clemens Backes Committed by Commit Bot

Change all TODOs from clemensh to clemensb

R=adamk@chromium.org

No-Try: true
Change-Id: I71824f52802c125dbee51216054575f44d08d534
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1825243
Commit-Queue: Clemens Backes [né Hammacher] <clemensb@chromium.org>
Auto-Submit: Clemens Backes [né Hammacher] <clemensb@chromium.org>
Reviewed-by: 's avatarAdam Klein <adamk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63983}
parent a65cf262
......@@ -399,7 +399,7 @@ def _CheckMacroUndefs(input_api, output_api):
"""
Checks that each #define in a .cc file is eventually followed by an #undef.
TODO(clemensh): This check should eventually be enabled for all cc files via
TODO(clemensb): This check should eventually be enabled for all cc files via
tools/presubmit.py (https://crbug.com/v8/6811).
"""
def FilterFile(affected_file):
......@@ -467,7 +467,7 @@ def _CheckNoexceptAnnotations(input_api, output_api):
Omitting it at some places can result in weird compiler errors if this is
mixed with other classes that have the annotation.
TODO(clemensh): This check should eventually be enabled for all files via
TODO(clemensb): This check should eventually be enabled for all files via
tools/presubmit.py (https://crbug.com/v8/8616).
"""
......
......@@ -293,7 +293,7 @@ class V8_EXPORT_PRIVATE Operand {
// Only valid if len_ > 4.
RelocInfo::Mode rmode_ = RelocInfo::NONE;
// TODO(clemensh): Get rid of this friendship, or make Operand immutable.
// TODO(clemensb): Get rid of this friendship, or make Operand immutable.
friend class Assembler;
};
ASSERT_TRIVIALLY_COPYABLE(Operand);
......
......@@ -28,7 +28,7 @@ constexpr int NumRegs(RegList list) {
// Combine two RegLists by building the union of the contained registers.
// Implemented as a Functor to pass it to base::fold even on gcc < 5 (see
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=52892).
// TODO(clemensh): Remove this once we require gcc >= 5.0.
// TODO(clemensb): Remove this once we require gcc >= 5.0.
struct CombineRegListsFunctor {
constexpr RegList operator()(RegList list1, RegList list2) const {
return list1 | list2;
......
......@@ -70,7 +70,7 @@ int FrameInspector::GetParametersCount() {
Handle<Object> FrameInspector::GetParameter(int index) {
if (is_optimized_) return deoptimized_frame_->GetParameter(index);
// TODO(clemensh): Handle wasm_interpreted_frame_.
// TODO(clemensb): Handle wasm_interpreted_frame_.
return handle(frame_->GetParameter(index), isolate_);
}
......
......@@ -1039,7 +1039,7 @@ void Debug::PrepareStep(StepAction step_action) {
// and deoptimize every frame along the way.
bool in_current_frame = true;
for (; !frames_it.done(); frames_it.Advance()) {
// TODO(clemensh): Implement stepping out from JS to wasm.
// TODO(clemensb): Implement stepping out from JS to wasm.
if (frames_it.frame()->is_wasm()) continue;
JavaScriptFrame* frame = JavaScriptFrame::cast(frames_it.frame());
if (last_step_action() == StepIn) {
......@@ -1069,7 +1069,7 @@ void Debug::PrepareStep(StepAction step_action) {
thread_local_.target_frame_count_ = current_frame_count;
V8_FALLTHROUGH;
case StepIn:
// TODO(clemensh): Implement stepping from JS into wasm.
// TODO(clemensb): Implement stepping from JS into wasm.
FloodWithOneShot(shared);
break;
}
......
......@@ -631,7 +631,7 @@ Handle<Object> AsmJsWasmStackFrame::GetReceiver() const {
}
Handle<Object> AsmJsWasmStackFrame::GetFunction() const {
// TODO(clemensh): Return lazily created JSFunction.
// TODO(clemensb): Return lazily created JSFunction.
return Null();
}
......
......@@ -1087,7 +1087,7 @@ std::shared_ptr<StackFrame> V8Debugger::symbolize(
return std::shared_ptr<StackFrame>(it->second);
}
std::shared_ptr<StackFrame> frame(new StackFrame(isolate(), v8Frame));
// TODO(clemensh): Figure out a way to do this translation only right before
// TODO(clemensb): Figure out a way to do this translation only right before
// sending the stack trace over wire.
if (v8Frame->IsWasm()) frame->translate(&m_wasmTranslation);
if (m_maxAsyncCallStackDepth) {
......
......@@ -41,7 +41,7 @@ inline MemOperand GetHalfStackSlot(uint32_t index, RegPairHalf half) {
return Operand(ebp, -kFirstStackSlotOffset - offset);
}
// TODO(clemensh): Make this a constexpr variable once Operand is constexpr.
// TODO(clemensb): Make this a constexpr variable once Operand is constexpr.
inline Operand GetInstanceOperand() { return Operand(ebp, -8); }
static constexpr LiftoffRegList kByteRegs =
......
......@@ -297,7 +297,7 @@ class StackTransferRecipe {
// process all remaining moves in that cycle. Repeat for all cycles.
uint32_t next_spill_slot = asm_->cache_state()->stack_height();
while (!move_dst_regs_.is_empty()) {
// TODO(clemensh): Use an unused register if available.
// TODO(clemensb): Use an unused register if available.
LiftoffRegister dst = move_dst_regs_.GetFirstRegSet();
RegisterMove* move = register_move(dst);
LiftoffRegister spill_reg = move->src;
......@@ -412,7 +412,7 @@ void InitMergeRegion(LiftoffAssembler::CacheState* state,
} // namespace
// TODO(clemensh): Don't copy the full parent state (this makes us N^2).
// TODO(clemensb): Don't copy the full parent state (this makes us N^2).
void LiftoffAssembler::CacheState::InitMerge(const CacheState& source,
uint32_t num_locals,
uint32_t arity,
......@@ -484,7 +484,7 @@ constexpr AssemblerOptions DefaultLiftoffOptions() {
} // namespace
// TODO(clemensh): Provide a reasonably sized buffer, based on wasm function
// TODO(clemensb): Provide a reasonably sized buffer, based on wasm function
// size.
LiftoffAssembler::LiftoffAssembler(std::unique_ptr<AssemblerBuffer> buffer)
: TurboAssembler(nullptr, DefaultLiftoffOptions(), CodeObjectRequired::kNo,
......@@ -526,7 +526,7 @@ LiftoffRegister LiftoffAssembler::PopToRegister(LiftoffRegList pinned) {
void LiftoffAssembler::MergeFullStackWith(const CacheState& target,
const CacheState& source) {
DCHECK_EQ(source.stack_height(), target.stack_height());
// TODO(clemensh): Reuse the same StackTransferRecipe object to save some
// TODO(clemensb): Reuse the same StackTransferRecipe object to save some
// allocations.
StackTransferRecipe transfers(this);
for (uint32_t i = 0, e = source.stack_height(); i < e; ++i) {
......
......@@ -228,7 +228,7 @@ class LiftoffAssembler : public TurboAssembler {
return reg;
}
// TODO(clemensh): Don't copy the full parent state (this makes us N^2).
// TODO(clemensb): Don't copy the full parent state (this makes us N^2).
void InitMerge(const CacheState& source, uint32_t num_locals,
uint32_t arity, uint32_t stack_depth);
......
......@@ -6,7 +6,7 @@
#include "src/base/optional.h"
#include "src/codegen/assembler-inl.h"
// TODO(clemensh): Remove dependences on compiler stuff.
// TODO(clemensb): Remove dependences on compiler stuff.
#include "src/codegen/interface-descriptors.h"
#include "src/codegen/macro-assembler-inl.h"
#include "src/compiler/linkage.h"
......@@ -121,7 +121,7 @@ constexpr Vector<const ValueType> kSupportedTypes =
class LiftoffCompiler {
public:
// TODO(clemensh): Make this a template parameter.
// TODO(clemensb): Make this a template parameter.
static constexpr Decoder::ValidateFlag validate = Decoder::kValidate;
using Value = ValueBase;
......@@ -488,7 +488,7 @@ class LiftoffCompiler {
// Before entering a loop, spill all locals to the stack, in order to free
// the cache registers, and to avoid unnecessarily reloading stack values
// into registers at branches.
// TODO(clemensh): Come up with a better strategy here, involving
// TODO(clemensb): Come up with a better strategy here, involving
// pre-analysis of the function.
__ SpillLocals();
......
......@@ -47,7 +47,7 @@ inline Operand GetStackSlot(uint32_t index) {
return Operand(rbp, -kFirstStackSlotOffset - offset);
}
// TODO(clemensh): Make this a constexpr variable once Operand is constexpr.
// TODO(clemensb): Make this a constexpr variable once Operand is constexpr.
inline Operand GetInstanceOperand() { return Operand(rbp, -16); }
inline Operand GetMemOp(LiftoffAssembler* assm, Register addr, Register offset,
......
......@@ -1501,7 +1501,7 @@ void AsyncCompileJob::CreateNativeModule(
// Create the module object and populate with compiled functions and
// information needed at instantiation time.
// TODO(clemensh): For the same module (same bytes / same hash), we should
// TODO(clemensb): For the same module (same bytes / same hash), we should
// only have one {WasmModuleObject}. Otherwise, we might only set
// breakpoints on a (potentially empty) subset of the instances.
// Create the module object.
......
......@@ -547,7 +547,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
Handle<Code> wrapper_code =
JSToWasmWrapperCompilationUnit::CompileJSToWasmWrapper(
isolate_, function.sig, function.imported);
// TODO(clemensh): Don't generate an exported function for the start
// TODO(clemensb): Don't generate an exported function for the start
// function. Use CWasmEntry instead.
start_function_ = WasmExportedFunction::New(
isolate_, instance, start_index,
......
......@@ -44,7 +44,7 @@ using FunctionSig = Signature<ValueType>;
inline size_t hash_value(ValueType type) { return static_cast<size_t>(type); }
// TODO(clemensh): Compute memtype and size from ValueType once we have c++14
// TODO(clemensb): Compute memtype and size from ValueType once we have c++14
// constexpr support.
#define FOREACH_LOAD_TYPE(V) \
V(I32, , Int32, 2) \
......
......@@ -1226,7 +1226,7 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) {
const uint32_t num_wasm_functions = module_->num_declared_functions;
const bool has_functions = num_wasm_functions > 0;
const bool is_first_code_space = code_space_data_.empty();
// TODO(clemensh): Avoid additional jump table if the code space is close
// TODO(clemensb): Avoid additional jump table if the code space is close
// enough to another existing code space.
const bool needs_jump_table =
has_functions && (kNeedsFarJumpsBetweenCodeSpaces || is_first_code_space);
......
......@@ -321,7 +321,7 @@ class WasmCodeAllocator {
// The engine-wide wasm code manager.
WasmCodeManager* const code_manager_;
// TODO(clemensh): Try to make this non-recursive again. It's recursive
// TODO(clemensb): Try to make this non-recursive again. It's recursive
// currently because {AllocateForCodeInRegion} might create a new code space,
// which recursively calls {AllocateForCodeInRegion} for the jump table.
mutable base::RecursiveMutex mutex_;
......@@ -348,7 +348,7 @@ class WasmCodeAllocator {
bool is_executable_ = false;
// TODO(clemensh): Remove this field once multiple code spaces are supported
// TODO(clemensb): Remove this field once multiple code spaces are supported
// everywhere.
const bool can_request_more_memory_;
......
......@@ -184,7 +184,7 @@ class InterpreterHandle {
argument_values.begin());
bool finished = false;
while (!finished) {
// TODO(clemensh): Add occasional StackChecks.
// TODO(clemensb): Add occasional StackChecks.
WasmInterpreter::State state = ContinueExecution(thread);
switch (state) {
case WasmInterpreter::State::PAUSED:
......
......@@ -306,7 +306,7 @@ MaybeHandle<WasmModuleObject> WasmEngine::SyncCompile(
CreateWasmScript(isolate, bytes, native_module->module()->source_map_url);
// Create the module object.
// TODO(clemensh): For the same module (same bytes / same hash), we should
// TODO(clemensb): For the same module (same bytes / same hash), we should
// only have one WasmModuleObject. Otherwise, we might only set
// breakpoints on a (potentially empty) subset of the instances.
......@@ -336,7 +336,7 @@ void WasmEngine::AsyncInstantiate(
ErrorThrower thrower(isolate, "WebAssembly.instantiate()");
// Instantiate a TryCatch so that caught exceptions won't progagate out.
// They will still be set as pending exceptions on the isolate.
// TODO(clemensh): Avoid TryCatch, use Execution::TryCall internally to invoke
// TODO(clemensb): Avoid TryCatch, use Execution::TryCall internally to invoke
// start function and report thrown exception explicitly via out argument.
v8::TryCatch catcher(reinterpret_cast<v8::Isolate*>(isolate));
catcher.SetVerbose(false);
......
......@@ -133,7 +133,7 @@ class V8_EXPORT_PRIVATE WasmInterpreter {
// Stack inspection and modification.
pc_t GetBreakpointPc();
// TODO(clemensh): Make this uint32_t.
// TODO(clemensb): Make this uint32_t.
int GetFrameCount();
// The InterpretedFrame is only valid as long as the Thread is paused.
FramePtr GetFrame(int index);
......
......@@ -102,7 +102,7 @@ WasmModuleObject::shared_native_module() const {
return managed_native_module().get();
}
const wasm::WasmModule* WasmModuleObject::module() const {
// TODO(clemensh): Remove this helper (inline in callers).
// TODO(clemensb): Remove this helper (inline in callers).
return native_module()->module();
}
void WasmModuleObject::reset_breakpoint_infos() {
......
......@@ -495,7 +495,7 @@ constexpr const FunctionSig* kCachedSigs[] = {
// gcc 4.7 - 4.9 has a bug which causes the constexpr attribute to get lost when
// passing functions (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=52892). Hence
// encapsulate these constexpr functions in functors.
// TODO(clemensh): Remove this once we require gcc >= 5.0.
// TODO(clemensb): Remove this once we require gcc >= 5.0.
struct GetShortOpcodeSigIndex {
constexpr WasmOpcodeSig operator()(byte opcode) const {
......
......@@ -465,7 +465,7 @@ class WasmGenerator {
template <ValueType T1, ValueType T2, ValueType... Ts>
void Generate(DataRange* data) {
// TODO(clemensh): Implement a more even split.
// TODO(clemensb): Implement a more even split.
auto first_data = data->split();
Generate<T1>(&first_data);
Generate<T2, Ts...>(data);
......
......@@ -38,7 +38,7 @@
}], # ALWAYS
# Liftoff is currently only sufficiently implemented on x64 and ia32.
# TODO(clemensh): Implement on all other platforms (crbug.com/v8/6600).
# TODO(clemensb): Implement on all other platforms (crbug.com/v8/6600).
['arch != x64 and arch != ia32', {
'wasm-trace-memory-liftoff': [SKIP],
}], # arch != x64 and arch != ia32
......
......@@ -1071,7 +1071,7 @@
##############################################################################
# Liftoff is currently only sufficiently implemented on x64, ia32, arm64 and
# arm.
# TODO(clemensh): Implement on all other platforms (crbug.com/v8/6600).
# TODO(clemensb): Implement on all other platforms (crbug.com/v8/6600).
['arch != x64 and arch != ia32 and arch != arm64 and arch != arm', {
'wasm/liftoff': [SKIP],
'wasm/tier-up-testing-flag': [SKIP],
......
......@@ -22,7 +22,7 @@ import os.path
import re
import sys
# TODO(clemensh): Extend to tests.
# TODO(clemensb): Extend to tests.
DEFAULT_INPUT = ['base', 'src']
DEFAULT_GN_FILE = 'BUILD.gn'
MY_DIR = os.path.dirname(os.path.realpath(__file__))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment