Commit 3b53f7dc authored by yurys@chromium.org's avatar yurys@chromium.org

Fix data race in v8::internal::UnboundQueue

This change modifies memory accesses to ensure proper load/store ordering.

BUG=249750
R=dvyukov@google.com, jkummerow@chromium.org

Review URL: https://codereview.chromium.org/17294004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15219 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 07ae09c1
......@@ -191,9 +191,8 @@ void ProfilerEventsProcessor::AddCurrentStack() {
bool ProfilerEventsProcessor::ProcessCodeEvent(unsigned* dequeue_order) {
if (!events_buffer_.IsEmpty()) {
CodeEventsContainer record;
events_buffer_.Dequeue(&record);
CodeEventsContainer record;
if (events_buffer_.Dequeue(&record)) {
switch (record.generic.type) {
#define PROFILER_TYPE_CASE(type, clss) \
case CodeEventRecord::type: \
......
......@@ -128,9 +128,8 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
ASSERT(!IsOptimizerThread());
HandleScope handle_scope(isolate_);
int functions_installed = 0;
while (!output_queue_.IsEmpty()) {
OptimizingCompiler* compiler;
output_queue_.Dequeue(&compiler);
OptimizingCompiler* compiler;
while (output_queue_.Dequeue(&compiler)) {
Compiler::InstallOptimizedCode(compiler);
functions_installed++;
}
......
......@@ -68,11 +68,12 @@ void UnboundQueue<Record>::DeleteFirst() {
template<typename Record>
void UnboundQueue<Record>::Dequeue(Record* rec) {
ASSERT(divider_ != last_);
bool UnboundQueue<Record>::Dequeue(Record* rec) {
if (divider_ == Acquire_Load(&last_)) return false;
Node* next = reinterpret_cast<Node*>(divider_)->next;
*rec = next->value;
Release_Store(&divider_, reinterpret_cast<AtomicWord>(next));
return true;
}
......@@ -81,13 +82,22 @@ void UnboundQueue<Record>::Enqueue(const Record& rec) {
Node*& next = reinterpret_cast<Node*>(last_)->next;
next = new Node(rec);
Release_Store(&last_, reinterpret_cast<AtomicWord>(next));
while (first_ != reinterpret_cast<Node*>(divider_)) DeleteFirst();
while (first_ != reinterpret_cast<Node*>(Acquire_Load(&divider_))) {
DeleteFirst();
}
}
template<typename Record>
bool UnboundQueue<Record>::IsEmpty() const {
return NoBarrier_Load(&divider_) == NoBarrier_Load(&last_);
}
template<typename Record>
Record* UnboundQueue<Record>::Peek() {
ASSERT(divider_ != last_);
Record* UnboundQueue<Record>::Peek() const {
if (divider_ == Acquire_Load(&last_)) return NULL;
Node* next = reinterpret_cast<Node*>(divider_)->next;
return &next->value;
}
......
......@@ -46,10 +46,10 @@ class UnboundQueue BASE_EMBEDDED {
inline UnboundQueue();
inline ~UnboundQueue();
INLINE(void Dequeue(Record* rec));
INLINE(bool Dequeue(Record* rec));
INLINE(void Enqueue(const Record& rec));
INLINE(bool IsEmpty()) { return divider_ == last_; }
INLINE(Record* Peek());
INLINE(bool IsEmpty() const);
INLINE(Record* Peek() const);
private:
INLINE(void DeleteFirst());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment