Commit e1742a67 authored by yangguo@chromium.org's avatar yangguo@chromium.org

Remove relocation lock.

Freeze HValue hash codes that are based on object addresses.

R=svenpanne@chromium.org
BUG=

Review URL: https://chromiumcodereview.appspot.com/14040006

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@14273 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 9585ead1
......@@ -59,7 +59,6 @@ inline bool Handle<T>::is_identical_to(const Handle<T> other) const {
if (FLAG_enable_slow_asserts) {
Isolate* isolate = Isolate::Current();
CHECK(isolate->AllowHandleDereference() ||
Heap::RelocationLock::IsLocked(isolate->heap()) ||
!isolate->optimizing_compiler_thread()->IsOptimizerThread());
}
#endif // DEBUG
......
......@@ -162,8 +162,7 @@ Heap::Heap()
#endif
promotion_queue_(this),
configured_(false),
chunks_queued_for_free_(NULL),
relocation_mutex_(NULL) {
chunks_queued_for_free_(NULL) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
......@@ -1294,8 +1293,6 @@ class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
void Heap::Scavenge() {
RelocationLock relocation_lock(this);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
#endif
......@@ -6628,11 +6625,6 @@ bool Heap::SetUp() {
store_buffer()->SetUp();
if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex();
#ifdef DEBUG
relocation_mutex_locked_ = false;
#endif // DEBUG
return true;
}
......@@ -6735,8 +6727,6 @@ void Heap::TearDown() {
incremental_marking()->TearDown();
isolate_->memory_allocator()->TearDown();
delete relocation_mutex_;
}
......
......@@ -1846,38 +1846,6 @@ class Heap {
void CheckpointObjectStats();
// We don't use a ScopedLock here since we want to lock the heap
// only when FLAG_parallel_recompilation is true.
class RelocationLock {
public:
explicit RelocationLock(Heap* heap) : heap_(heap) {
if (FLAG_parallel_recompilation) {
heap_->relocation_mutex_->Lock();
#ifdef DEBUG
heap_->relocation_mutex_locked_ = true;
#endif // DEBUG
}
}
~RelocationLock() {
if (FLAG_parallel_recompilation) {
#ifdef DEBUG
heap_->relocation_mutex_locked_ = false;
#endif // DEBUG
heap_->relocation_mutex_->Unlock();
}
}
#ifdef DEBUG
static bool IsLocked(Heap* heap) {
return heap->relocation_mutex_locked_;
}
#endif // DEBUG
private:
Heap* heap_;
};
private:
Heap();
......@@ -2347,11 +2315,6 @@ class Heap {
MemoryChunk* chunks_queued_for_free_;
Mutex* relocation_mutex_;
#ifdef DEBUG
bool relocation_mutex_locked_;
#endif // DEBUG;
friend class Factory;
friend class GCTracer;
friend class DisallowAllocationFailure;
......
......@@ -1584,10 +1584,10 @@ void HCheckMaps::SetSideEffectDominator(GVNFlag side_effect,
// for which the map is known.
if (HasNoUses() && dominator->IsStoreNamedField()) {
HStoreNamedField* store = HStoreNamedField::cast(dominator);
Handle<Map> map = store->transition();
if (map.is_null() || store->object() != value()) return;
UniqueValueId map_unique_id = store->transition_unique_id();
if (!map_unique_id.IsInitialized() || store->object() != value()) return;
for (int i = 0; i < map_set()->length(); i++) {
if (map.is_identical_to(map_set()->at(i))) {
if (map_unique_id == map_unique_ids_.at(i)) {
DeleteAndReplaceWith(NULL);
return;
}
......@@ -2047,6 +2047,7 @@ static bool IsInteger32(double value) {
HConstant::HConstant(Handle<Object> handle, Representation r)
: handle_(handle),
unique_id_(),
has_int32_value_(false),
has_double_value_(false),
is_internalized_string_(false),
......@@ -2075,11 +2076,13 @@ HConstant::HConstant(Handle<Object> handle, Representation r)
HConstant::HConstant(Handle<Object> handle,
UniqueValueId unique_id,
Representation r,
HType type,
bool is_internalize_string,
bool boolean_value)
: handle_(handle),
unique_id_(unique_id),
has_int32_value_(false),
has_double_value_(false),
is_internalized_string_(is_internalize_string),
......@@ -2095,7 +2098,9 @@ HConstant::HConstant(Handle<Object> handle,
HConstant::HConstant(int32_t integer_value,
Representation r,
Handle<Object> optional_handle)
: has_int32_value_(true),
: handle_(optional_handle),
unique_id_(),
has_int32_value_(true),
has_double_value_(true),
is_internalized_string_(false),
boolean_value_(integer_value != 0),
......@@ -2108,7 +2113,9 @@ HConstant::HConstant(int32_t integer_value,
HConstant::HConstant(double double_value,
Representation r,
Handle<Object> optional_handle)
: has_int32_value_(IsInteger32(double_value)),
: handle_(optional_handle),
unique_id_(),
has_int32_value_(IsInteger32(double_value)),
has_double_value_(true),
is_internalized_string_(false),
boolean_value_(double_value != 0 && !isnan(double_value)),
......@@ -2133,8 +2140,12 @@ HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
if (has_int32_value_) return new(zone) HConstant(int32_value_, r, handle_);
if (has_double_value_) return new(zone) HConstant(double_value_, r, handle_);
ASSERT(!handle_.is_null());
return new(zone) HConstant(
handle_, r, type_from_value_, is_internalized_string_, boolean_value_);
return new(zone) HConstant(handle_,
unique_id_,
r,
type_from_value_,
is_internalized_string_,
boolean_value_);
}
......@@ -2459,6 +2470,8 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
Zone* zone)
: types_(Min(types->length(), kMaxLoadPolymorphism), zone),
name_(name),
types_unique_ids_(0, zone),
name_unique_id_(),
need_generic_(false) {
SetOperandAt(0, context);
SetOperandAt(1, object);
......@@ -2525,15 +2538,39 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
}
void HCheckMaps::FinalizeUniqueValueId() {
if (!map_unique_ids_.is_empty()) return;
Zone* zone = block()->zone();
map_unique_ids_.Initialize(map_set_.length(), zone);
for (int i = 0; i < map_set_.length(); i++) {
map_unique_ids_.Add(UniqueValueId(map_set_.at(i)), zone);
}
}
void HLoadNamedFieldPolymorphic::FinalizeUniqueValueId() {
if (!types_unique_ids_.is_empty()) return;
Zone* zone = block()->zone();
types_unique_ids_.Initialize(types_.length(), zone);
for (int i = 0; i < types_.length(); i++) {
types_unique_ids_.Add(UniqueValueId(types_.at(i)), zone);
}
name_unique_id_ = UniqueValueId(name_);
}
bool HLoadNamedFieldPolymorphic::DataEquals(HValue* value) {
ASSERT_EQ(types_.length(), types_unique_ids_.length());
HLoadNamedFieldPolymorphic* other = HLoadNamedFieldPolymorphic::cast(value);
if (types_.length() != other->types()->length()) return false;
if (!name_.is_identical_to(other->name())) return false;
if (name_unique_id_ != other->name_unique_id_) return false;
if (types_unique_ids_.length() != other->types_unique_ids_.length()) {
return false;
}
if (need_generic_ != other->need_generic_) return false;
for (int i = 0; i < types_.length(); i++) {
for (int i = 0; i < types_unique_ids_.length(); i++) {
bool found = false;
for (int j = 0; j < types_.length(); j++) {
if (types_.at(j).is_identical_to(other->types()->at(i))) {
for (int j = 0; j < types_unique_ids_.length(); j++) {
if (types_unique_ids_.at(j) == other->types_unique_ids_.at(i)) {
found = true;
break;
}
......
This diff is collapsed.
......@@ -517,7 +517,6 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
void HGraph::Verify(bool do_full_verify) const {
// Allow dereferencing for debug mode verification.
Heap::RelocationLock(isolate()->heap());
HandleDereferenceGuard allow_handle_deref(isolate(),
HandleDereferenceGuard::ALLOW);
for (int i = 0; i < blocks_.length(); i++) {
......@@ -619,6 +618,7 @@ HConstant* HGraph::GetConstant##Name() { \
if (!constant_##name##_.is_set()) { \
HConstant* constant = new(zone()) HConstant( \
isolate()->factory()->name##_value(), \
UniqueValueId(isolate()->heap()->name##_value()), \
Representation::Tagged(), \
htype, \
false, \
......@@ -880,6 +880,7 @@ HGraph* HGraphBuilder::CreateGraph() {
HPhase phase("H_Block building", isolate());
set_current_block(graph()->entry_block());
if (!BuildGraph()) return NULL;
graph()->FinalizeUniqueValueIds();
return graph_;
}
......@@ -1696,6 +1697,19 @@ HBasicBlock* HGraph::CreateBasicBlock() {
}
void HGraph::FinalizeUniqueValueIds() {
AssertNoAllocation no_gc;
ASSERT(!isolate()->optimizing_compiler_thread()->IsOptimizerThread());
for (int i = 0; i < blocks()->length(); ++i) {
for (HInstruction* instr = blocks()->at(i)->first();
instr != NULL;
instr = instr->next()) {
instr->FinalizeUniqueValueId();
}
}
}
void HGraph::Canonicalize() {
if (!FLAG_use_canonicalizing) return;
HPhase phase("H_Canonicalize", this);
......@@ -4327,8 +4341,6 @@ bool HOptimizedGraphBuilder::BuildGraph() {
void HGraph::GlobalValueNumbering() {
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
// We use objects' raw addresses for identification, so they must not move.
Heap::RelocationLock relocation_lock(isolate()->heap());
HPhase phase("H_Global value numbering", this);
HGlobalValueNumberer gvn(this, info());
bool removed_side_effects = gvn.Analyze();
......
......@@ -260,6 +260,7 @@ class HGraph: public ZoneObject {
HBasicBlock* entry_block() const { return entry_block_; }
HEnvironment* start_environment() const { return start_environment_; }
void FinalizeUniqueValueIds();
void InitializeInferredTypes();
void InsertTypeConversions();
void MergeRemovableSimulates();
......
......@@ -3125,8 +3125,6 @@ void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
Heap::RelocationLock relocation_lock(heap());
bool code_slots_filtering_required;
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
code_slots_filtering_required = MarkInvalidatedCode();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment