Deprecate several unchecked accessors.

R=hpayer@google.com
BUG=v8:1490

Review URL: https://codereview.chromium.org/16663009

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15123 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 4ac1aa53
......@@ -1495,15 +1495,13 @@ class MarkCompactMarkingVisitor
FIXED_ARRAY_TYPE) return;
// Make sure this is a RegExp that actually contains code.
if (re->TypeTagUnchecked() != JSRegExp::IRREGEXP) return;
if (re->TypeTag() != JSRegExp::IRREGEXP) return;
Object* code = re->DataAtUnchecked(JSRegExp::code_index(is_ascii));
Object* code = re->DataAt(JSRegExp::code_index(is_ascii));
if (!code->IsSmi() &&
HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) {
// Save a copy that can be reinstated if we need the code again.
re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii),
code,
heap);
re->SetDataAt(JSRegExp::saved_code_index(is_ascii), code);
// Saving a copy might create a pointer into compaction candidate
// that was not observed by marker. This might happen if JSRegExp data
......@@ -1515,9 +1513,8 @@ class MarkCompactMarkingVisitor
RecordSlot(slot, slot, code);
// Set a number in the 0-255 range to guarantee no smi overflow.
re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii),
Smi::FromInt(heap->sweep_generation() & 0xff),
heap);
re->SetDataAt(JSRegExp::code_index(is_ascii),
Smi::FromInt(heap->sweep_generation() & 0xff));
} else if (code->IsSmi()) {
int value = Smi::cast(code)->value();
// The regexp has not been compiled yet or there was a compilation error.
......@@ -1528,12 +1525,10 @@ class MarkCompactMarkingVisitor
// Check if we should flush now.
if (value == ((heap->sweep_generation() - kRegExpCodeThreshold) & 0xff)) {
re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii),
Smi::FromInt(JSRegExp::kUninitializedValue),
heap);
re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii),
Smi::FromInt(JSRegExp::kUninitializedValue),
heap);
re->SetDataAt(JSRegExp::code_index(is_ascii),
Smi::FromInt(JSRegExp::kUninitializedValue));
re->SetDataAt(JSRegExp::saved_code_index(is_ascii),
Smi::FromInt(JSRegExp::kUninitializedValue));
}
}
}
......@@ -2448,7 +2443,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
// This map is used for inobject slack tracking and has been detached
// from SharedFunctionInfo during the mark phase.
// Since it survived the GC, reattach it now.
map->unchecked_constructor()->shared()->AttachInitialMap(map);
JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map);
}
ClearNonLivePrototypeTransitions(map);
......@@ -2479,13 +2474,11 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
int proto_index = proto_offset + new_number_of_transitions * step;
int map_index = map_offset + new_number_of_transitions * step;
if (new_number_of_transitions != i) {
prototype_transitions->set_unchecked(
heap_,
prototype_transitions->set(
proto_index,
prototype,
UPDATE_WRITE_BARRIER);
prototype_transitions->set_unchecked(
heap_,
prototype_transitions->set(
map_index,
cached_map,
SKIP_WRITE_BARRIER);
......
......@@ -2091,30 +2091,6 @@ void FixedArray::set_the_hole(int index) {
}
void FixedArray::set_unchecked(int index, Smi* value) {
ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(this, offset, value);
}
void FixedArray::set_unchecked(Heap* heap,
int index,
Object* value,
WriteBarrierMode mode) {
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(this, offset, value);
CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
}
void FixedArray::set_null_unchecked(Heap* heap, int index) {
ASSERT(index >= 0 && index < this->length());
ASSERT(!heap->InNewSpace(heap->null_value()));
WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
}
double* FixedDoubleArray::data_start() {
return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
}
......@@ -3577,11 +3553,6 @@ bool Map::is_dictionary_map() {
}
JSFunction* Map::unchecked_constructor() {
return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
}
Code::Flags Code::flags() {
return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
}
......@@ -4744,11 +4715,6 @@ Code* SharedFunctionInfo::code() {
}
Code* SharedFunctionInfo::unchecked_code() {
return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
}
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
WRITE_FIELD(this, kCodeOffset, value);
CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
......@@ -5285,12 +5251,6 @@ int Code::body_size() {
}
FixedArray* Code::unchecked_deoptimization_data() {
return reinterpret_cast<FixedArray*>(
READ_FIELD(this, kDeoptimizationDataOffset));
}
ByteArray* Code::unchecked_relocation_info() {
return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
}
......@@ -5366,12 +5326,6 @@ JSRegExp::Type JSRegExp::TypeTag() {
}
JSRegExp::Type JSRegExp::TypeTagUnchecked() {
Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
return static_cast<JSRegExp::Type>(smi->value());
}
int JSRegExp::CaptureCount() {
switch (TypeTag()) {
case ATOM:
......@@ -5407,13 +5361,6 @@ Object* JSRegExp::DataAt(int index) {
}
Object* JSRegExp::DataAtUnchecked(int index) {
FixedArray* fa = reinterpret_cast<FixedArray*>(data());
int offset = FixedArray::kHeaderSize + index * kPointerSize;
return READ_FIELD(fa, offset);
}
void JSRegExp::SetDataAt(int index, Object* value) {
ASSERT(TypeTag() != NOT_COMPILED);
ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
......@@ -5421,18 +5368,6 @@ void JSRegExp::SetDataAt(int index, Object* value) {
}
void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
FixedArray* fa = reinterpret_cast<FixedArray*>(data());
if (value->IsSmi()) {
fa->set_unchecked(index, Smi::cast(value));
} else {
// We only do this during GC, so we don't need to notify the write barrier.
fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
}
}
ElementsKind JSObject::GetElementsKind() {
ElementsKind kind = map()->elements_kind();
#if DEBUG
......
......@@ -2571,7 +2571,7 @@ class FixedArray: public FixedArrayBase {
inline void set(int index, Object* value);
inline bool is_the_hole(int index);
// Setter that doesn't need write barrier).
// Setter that doesn't need write barrier.
inline void set(int index, Smi* value);
// Setter with explicit barrier mode.
inline void set(int index, Object* value, WriteBarrierMode mode);
......@@ -2585,12 +2585,6 @@ class FixedArray: public FixedArrayBase {
inline void set_null(Heap* heap, int index);
inline void set_the_hole(int index);
// Setters with less debug checks for the GC to use.
inline void set_unchecked(int index, Smi* value);
inline void set_null_unchecked(Heap* heap, int index);
inline void set_unchecked(Heap* heap, int index, Object* value,
WriteBarrierMode mode);
inline Object** GetFirstElementAddress();
inline bool ContainsOnlySmisOrHoles();
......@@ -4553,7 +4547,6 @@ class Code: public HeapObject {
// Unchecked accessors to be used during GC.
inline ByteArray* unchecked_relocation_info();
inline FixedArray* unchecked_deoptimization_data();
inline int relocation_size();
......@@ -5320,8 +5313,6 @@ class Map: public HeapObject {
// [constructor]: points back to the function responsible for this map.
DECL_ACCESSORS(constructor, Object)
inline JSFunction* unchecked_constructor();
// [instance descriptors]: describes the object.
DECL_ACCESSORS(instance_descriptors, DescriptorArray)
inline void InitializeDescriptors(DescriptorArray* descriptors);
......@@ -5373,8 +5364,7 @@ class Map: public HeapObject {
inline void SetNumberOfProtoTransitions(int value) {
FixedArray* cache = GetPrototypeTransitions();
ASSERT(cache->length() != 0);
cache->set_unchecked(kProtoTransitionNumberOfEntriesOffset,
Smi::FromInt(value));
cache->set(kProtoTransitionNumberOfEntriesOffset, Smi::FromInt(value));
}
// Lookup in the map's instance descriptors and fill out the result
......@@ -5940,8 +5930,6 @@ class SharedFunctionInfo: public HeapObject {
// [construct stub]: Code stub for constructing instances of this function.
DECL_ACCESSORS(construct_stub, Code)
inline Code* unchecked_code();
// Returns if this function has been compiled to native code yet.
inline bool is_compiled();
......@@ -7132,11 +7120,6 @@ class JSRegExp: public JSObject {
// Set implementation data after the object has been prepared.
inline void SetDataAt(int index, Object* value);
// Used during GC when flushing code or setting age.
inline Object* DataAtUnchecked(int index);
inline void SetDataAtUnchecked(int index, Object* value, Heap* heap);
inline Type TypeTagUnchecked();
static int code_index(bool is_ascii) {
if (is_ascii) {
return kIrregexpASCIICodeIndex;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment