Commit de018fbd authored by ishell's avatar ishell Committed by Commit bot

Revert of Reland "Filter invalid slots out from the SlotsBuffer after...

Revert of Reland "Filter invalid slots out from the SlotsBuffer after marking." (patchset #2 id:2 of https://codereview.chromium.org/1032833002/)

Reason for revert:
Reverting risky GC changes that block v8 roll.

Original issue's description:
> Reland "Filter invalid slots out from the SlotsBuffer after marking."
>
> > There are two reasons that could cause invalid slots appearance in SlotsBuffer:
> > 1) If GC trims "tail" of an array for which it has already recorded a slots and then migrate another object to the "tail".
> > 2) Tagged slot could become a double slot after migrating of an object to another map with "shifted" fields (for example as a result of generalizing immutable data property to a data field).
>
> > This CL also adds useful machinery that helps triggering incremental write barriers.
>
> > BUG=chromium:454297
> > LOG=Y
>
> NOTRY=true
>
> Committed: https://crrev.com/f86aadd1d45c756467dff8e08a055b462d7a060b
> Cr-Commit-Position: refs/heads/master@{#27433}

TBR=machenbach@chromium.org,ulan@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review URL: https://codereview.chromium.org/1041593002

Cr-Commit-Position: refs/heads/master@{#27491}
parent d23a9f7a
......@@ -767,11 +767,6 @@ DEFINE_BOOL(stress_compaction, false,
"stress the GC compactor to flush out bugs (implies "
"--force_marking_deque_overflows)")
DEFINE_BOOL(manual_evacuation_candidates_selection, false,
"Test mode only flag. It allows an unit test to select evacuation "
"candidates pages (requires --stress_compaction).")
//
// Dev shell flags
//
......
......@@ -287,60 +287,6 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
}
void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
}
}
void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
heap_->store_buffer()->ClearInvalidStoreBufferEntries();
ClearInvalidSlotsBufferEntries(heap_->old_pointer_space());
ClearInvalidSlotsBufferEntries(heap_->old_data_space());
ClearInvalidSlotsBufferEntries(heap_->code_space());
ClearInvalidSlotsBufferEntries(heap_->cell_space());
ClearInvalidSlotsBufferEntries(heap_->map_space());
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer());
}
}
#ifdef VERIFY_HEAP
static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::VerifySlots(heap, p->slots_buffer());
}
}
static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) {
heap->store_buffer()->VerifyValidStoreBufferEntries();
VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space());
VerifyValidSlotsBufferEntries(heap, heap->old_data_space());
VerifyValidSlotsBufferEntries(heap, heap->code_space());
VerifyValidSlotsBufferEntries(heap, heap->cell_space());
VerifyValidSlotsBufferEntries(heap, heap->map_space());
LargeObjectIterator it(heap->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::VerifySlots(heap, chunk->slots_buffer());
}
}
#endif
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
......@@ -366,11 +312,11 @@ void MarkCompactCollector::CollectGarbage() {
}
#endif
ClearInvalidStoreAndSlotsBufferEntries();
heap_->store_buffer()->ClearInvalidStoreBufferEntries();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyValidStoreAndSlotsBufferEntries(heap_);
heap_->store_buffer()->VerifyValidStoreBufferEntries();
}
#endif
......@@ -777,7 +723,6 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
int count = 0;
int fragmentation = 0;
int page_number = 0;
Candidate* least = NULL;
PageIterator it(space);
......@@ -792,16 +737,9 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
CHECK(p->slots_buffer() == NULL);
if (FLAG_stress_compaction) {
if (FLAG_manual_evacuation_candidates_selection) {
if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) {
p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
fragmentation = 1;
}
} else {
unsigned int counter = space->heap()->ms_count();
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
page_number++;
}
} else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) {
// Don't try to release too many pages.
if (estimated_release >= over_reserved) {
......@@ -3109,14 +3047,10 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
}
bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
HeapObject** out_object) {
bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) {
// This function does not support large objects right now.
Space* owner = p->owner();
if (owner == heap_->lo_space() || owner == NULL) {
*out_object = NULL;
return true;
}
if (owner == heap_->lo_space() || owner == NULL) return true;
uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
......@@ -3170,7 +3104,6 @@ bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
(object->address() + object->Size()) > slot) {
// If the slot is within the last found object in the cell, the slot is
// in a live object.
*out_object = object;
return true;
}
return false;
......@@ -3212,38 +3145,28 @@ bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) {
}
bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
HeapObject* object = NULL;
// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Find out based on mark bits if the slot is part of a live object.
if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address,
HeapObject* object) {
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
if (!Marking::IsBlack(Marking::MarkBitFrom(object))) {
return false;
}
#if V8_DOUBLE_FIELDS_UNBOXING
// |object| is NULL only when the slot belongs to large object space.
DCHECK(object != NULL ||
Page::FromAnyPointerAddress(heap_, slot)->owner() ==
heap_->lo_space());
// We don't need to check large objects' layout descriptor since it can't
// contain in-object fields anyway.
if (object != NULL) {
// Filter out slots that happens to point to unboxed double fields.
LayoutDescriptorHelper helper(object->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
if (!has_only_tagged_fields &&
!helper.IsTagged(static_cast<int>(slot - object->address()))) {
// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Find out based on mark bits if the slot is part of a live object.
if (!IsSlotInBlackObject(
Page::FromAddress(reinterpret_cast<Address>(address)),
reinterpret_cast<Address>(address))) {
return false;
}
}
#endif
return true;
}
void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address,
HeapObject* object) {
// The target object has to be black.
CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
......@@ -3251,7 +3174,9 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Use the mark bit iterator to find out about liveness of the slot.
CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
CHECK(IsSlotInBlackObjectSlow(
Page::FromAddress(reinterpret_cast<Address>(address)),
reinterpret_cast<Address>(address)));
}
......@@ -4570,66 +4495,6 @@ bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator,
}
static Object* g_smi_slot = NULL;
void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) {
DCHECK_EQ(Smi::FromInt(0), g_smi_slot);
// Remove entries by replacing them with a dummy slot containing a smi.
const ObjectSlot kRemovedEntry = &g_smi_slot;
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
if (heap->InNewSpace(object) ||
!heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot))) {
slots[slot_idx] = kRemovedEntry;
}
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
DCHECK_EQ(Smi::FromInt(0), g_smi_slot);
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
CHECK(!heap->InNewSpace(object));
CHECK(heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot)));
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) {
return SlotsBuffer::CODE_TARGET_SLOT;
......
......@@ -363,15 +363,6 @@ class SlotsBuffer {
SlotsBuffer** buffer_address, SlotType type, Address addr,
AdditionMode mode);
// Eliminates all stale entries from the slots buffer, i.e., slots that
// are not part of live objects anymore. This method must be called after
// marking, when the whole transitive closure is known and must be called
// before sweeping when mark bits are still intact.
static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer);
// Ensures that there are no invalid slots in the chain of slots buffers.
static void VerifySlots(Heap* heap, SlotsBuffer* buffer);
static const int kNumberOfElements = 1021;
private:
......@@ -688,10 +679,10 @@ class MarkCompactCollector {
// The following four methods can just be called after marking, when the
// whole transitive closure is known. They must be called before sweeping
// when mark bits are still intact.
bool IsSlotInBlackObject(Page* p, Address slot, HeapObject** out_object);
bool IsSlotInBlackObject(Page* p, Address slot);
bool IsSlotInBlackObjectSlow(Page* p, Address slot);
bool IsSlotInLiveObject(Address slot);
void VerifyIsSlotInLiveObject(Address slot, HeapObject* object);
bool IsSlotInLiveObject(HeapObject** address, HeapObject* object);
void VerifyIsSlotInLiveObject(HeapObject** address, HeapObject* object);
private:
class SweeperTask;
......@@ -703,8 +694,6 @@ class MarkCompactCollector {
bool WillBeDeoptimized(Code* code);
void RemoveDeadInvalidatedCode();
void ProcessInvalidatedCode(ObjectVisitor* visitor);
void ClearInvalidSlotsBufferEntries(PagedSpace* space);
void ClearInvalidStoreAndSlotsBufferEntries();
void StartSweeperThreads();
......
......@@ -385,12 +385,6 @@ class MemoryChunk {
// to grey transition is performed in the value.
HAS_PROGRESS_BAR,
// This flag is intended to be used for testing. Works only when both
// FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
// are set. It forces the page to become an evacuation candidate at next
// candidates selection cycle.
FORCE_EVACUATION_CANDIDATE_FOR_TESTING,
// Last flag, keep at bottom.
NUM_MEMORY_CHUNK_FLAGS
};
......
......@@ -361,14 +361,12 @@ void StoreBuffer::ClearInvalidStoreBufferEntries() {
Address* new_top = old_start_;
for (Address* current = old_start_; current < old_top_; current++) {
Address addr = *current;
Object** slot = reinterpret_cast<Object**>(addr);
Object** slot = reinterpret_cast<Object**>(*current);
Object* object = *slot;
if (heap_->InNewSpace(object) && object->IsHeapObject()) {
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
HeapObject* heap_object = HeapObject::cast(object);
if (Marking::IsBlack(Marking::MarkBitFrom(heap_object)) &&
heap_->mark_compact_collector()->IsSlotInLiveObject(addr)) {
if (heap_->InNewSpace(object)) {
if (heap_->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<HeapObject*>(object))) {
*new_top++ = addr;
}
}
......@@ -391,10 +389,10 @@ void StoreBuffer::VerifyValidStoreBufferEntries() {
for (Address* current = old_start_; current < old_top_; current++) {
Object** slot = reinterpret_cast<Object**>(*current);
Object* object = *slot;
CHECK(object->IsHeapObject());
CHECK(heap_->InNewSpace(object));
heap_->mark_compact_collector()->VerifyIsSlotInLiveObject(
reinterpret_cast<Address>(slot), HeapObject::cast(object));
reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<HeapObject*>(object));
}
}
......
......@@ -48,12 +48,6 @@ static Handle<String> MakeName(const char* str, int suffix) {
}
Handle<JSObject> GetObject(const char* name) {
return v8::Utils::OpenHandle(
*v8::Handle<v8::Object>::Cast(CcTest::global()->Get(v8_str(name))));
}
static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
if (obj->IsUnboxedDoubleField(field_index)) {
return obj->RawFastDoublePropertyAt(field_index);
......@@ -1311,211 +1305,4 @@ TEST(WriteBarriersInCopyJSObject) {
CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index));
}
static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
int tagged_descriptor, int double_descriptor,
bool check_tagged_value = true) {
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
PagedSpace* old_pointer_space = heap->old_pointer_space();
// The plan: create |obj| by |map| in old space, create |obj_value| in
// new space and ensure that write barrier is triggered when |obj_value| is
// written to property |tagged_descriptor| of |obj|.
// Then migrate object to |new_map| and set proper value for property
// |double_descriptor|. Call GC and ensure that it did not crash during
// store buffer entries updating.
Handle<JSObject> obj;
Handle<HeapObject> obj_value;
{
AlwaysAllocateScope always_allocate(isolate);
obj = factory->NewJSObjectFromMap(map, TENURED, false);
CHECK(old_pointer_space->Contains(*obj));
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
}
CHECK(heap->InNewSpace(*obj_value));
{
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
const int n = 153;
for (int i = 0; i < n; i++) {
obj->FastPropertyAtPut(index, *obj_value);
}
}
// Migrate |obj| to |new_map| which should shift fields and put the
// |boom_value| to the slot that was earlier recorded by write barrier.
JSObject::MigrateToMap(obj, new_map);
Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
double boom_value = bit_cast<double>(fake_object);
FieldIndex double_field_index =
FieldIndex::ForDescriptor(*new_map, double_descriptor);
CHECK(obj->IsUnboxedDoubleField(double_field_index));
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
// Trigger GC to evacuate all candidates.
CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
if (check_tagged_value) {
FieldIndex tagged_field_index =
FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
}
CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
}
static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
int tagged_descriptor,
int double_descriptor,
bool check_tagged_value = true) {
if (FLAG_never_compact || !FLAG_incremental_marking) return;
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
PagedSpace* old_pointer_space = heap->old_pointer_space();
// The plan: create |obj| by |map| in old space, create |obj_value| in
// old space and ensure it end up in evacuation candidate page. Start
// incremental marking and ensure that incremental write barrier is triggered
// when |obj_value| is written to property |tagged_descriptor| of |obj|.
// Then migrate object to |new_map| and set proper value for property
// |double_descriptor|. Call GC and ensure that it did not crash during
// slots buffer entries updating.
Handle<JSObject> obj;
Handle<HeapObject> obj_value;
Page* ec_page;
{
AlwaysAllocateScope always_allocate(isolate);
obj = factory->NewJSObjectFromMap(map, TENURED, false);
CHECK(old_pointer_space->Contains(*obj));
// Make sure |obj_value| is placed on an old-space evacuation candidate.
SimulateFullSpace(old_pointer_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
CHECK_NE(ec_page, Page::FromAddress(obj->address()));
}
// Heap is ready, force |ec_page| to become an evacuation candidate and
// simulate incremental marking.
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
SimulateIncrementalMarking(heap);
// Check that everything is ready for triggering incremental write barrier
// (i.e. that both |obj| and |obj_value| are black and the marking phase is
// still active and |obj_value|'s page is indeed an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking());
CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger incremental write barrier, which should add a slot to |ec_page|'s
// slots buffer.
{
int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer());
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
const int n = SlotsBuffer::kNumberOfElements + 10;
for (int i = 0; i < n; i++) {
obj->FastPropertyAtPut(index, *obj_value);
}
// Ensure that the slot was actually added to the |ec_page|'s slots buffer.
CHECK_EQ(slots_buffer_len + n,
SlotsBuffer::SizeOfChain(ec_page->slots_buffer()));
}
// Migrate |obj| to |new_map| which should shift fields and put the
// |boom_value| to the slot that was earlier recorded by incremental write
// barrier.
JSObject::MigrateToMap(obj, new_map);
double boom_value = bit_cast<double>(UINT64_C(0xbaad0176a37c28e1));
FieldIndex double_field_index =
FieldIndex::ForDescriptor(*new_map, double_descriptor);
CHECK(obj->IsUnboxedDoubleField(double_field_index));
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
// Trigger GC to evacuate all candidates.
CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE, "boom");
// Ensure that the values are still there and correct.
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
if (check_tagged_value) {
FieldIndex tagged_field_index =
FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
}
CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
}
enum WriteBarrierKind { OLD_TO_OLD_WRITE_BARRIER, OLD_TO_NEW_WRITE_BARRIER };
static void TestWriteBarrierObjectShiftFieldsRight(
WriteBarrierKind write_barrier_kind) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::HandleScope scope(CcTest::isolate());
Handle<HeapType> any_type = HeapType::Any(isolate);
CompileRun("function func() { return 1; }");
Handle<JSObject> func = GetObject("func");
Handle<Map> map = Map::Create(isolate, 10);
map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
INSERT_TRANSITION).ToHandleChecked();
map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
Representation::Double(),
INSERT_TRANSITION).ToHandleChecked();
map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE,
Representation::Tagged(),
INSERT_TRANSITION).ToHandleChecked();
// Shift fields right by turning constant property to a field.
Handle<Map> new_map = Map::ReconfigureProperty(
map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD);
if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
TestWriteBarrier(map, new_map, 2, 1);
} else {
CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
TestIncrementalWriteBarrier(map, new_map, 2, 1);
}
}
// TODO(ishell): enable when this issue is fixed.
DISABLED_TEST(WriteBarrierObjectShiftFieldsRight) {
TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
}
TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
}
// TODO(ishell): add respective tests for property kind reconfiguring from
// accessor field to double, once accessor fields are supported by
// Map::ReconfigureProperty().
// TODO(ishell): add respective tests for fast property removal case once
// Map::ReconfigureProperty() supports that.
#endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment