test-mark-compact.cc 17.1 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

#include <stdlib.h>

30
#ifdef __linux__
31
#include <errno.h>
32
#include <fcntl.h>
33 34
#include <sys/stat.h>
#include <sys/types.h>
35 36 37
#include <unistd.h>
#endif

38
#include <utility>
39

40
#include "src/v8.h"
41

42
#include "src/full-codegen/full-codegen.h"
43 44
#include "src/global-handles.h"
#include "test/cctest/cctest.h"
45
#include "test/cctest/heap-tester.h"
46 47

using namespace v8::internal;
48
using v8::Just;
49 50


51
TEST(MarkingDeque) {
52
  CcTest::InitializeVM();
53 54 55 56
  int mem_size = 20 * kPointerSize;
  byte* mem = NewArray<byte>(20*kPointerSize);
  Address low = reinterpret_cast<Address>(mem);
  Address high = low + mem_size;
57
  MarkingDeque s;
58 59
  s.Initialize(low, high);

60 61
  Address original_address = reinterpret_cast<Address>(&s);
  Address current_address = original_address;
62
  while (!s.IsFull()) {
63
    s.Push(HeapObject::FromAddress(current_address));
64
    current_address += kPointerSize;
65 66
  }

67
  while (!s.IsEmpty()) {
68
    Address value = s.Pop()->address();
69 70
    current_address -= kPointerSize;
    CHECK_EQ(current_address, value);
71 72
  }

73
  CHECK_EQ(original_address, current_address);
74 75 76 77
  DeleteArray(mem);
}


78
HEAP_TEST(Promotion) {
79
  CcTest::InitializeVM();
80
  Heap* heap = CcTest::heap();
81
  heap->ConfigureHeap(1, 1, 1, 0);
82

83
  v8::HandleScope sc(CcTest::isolate());
84 85

  // Allocate a fixed array in the new space.
86
  int array_length =
87
      (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
88
      (4 * kPointerSize);
89
  Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked();
90 91 92
  Handle<FixedArray> array(FixedArray::cast(obj));

  // Array should be in the new space.
93
  CHECK(heap->InSpace(*array, NEW_SPACE));
94

95
  // Call mark compact GC, so array becomes an old object.
96 97
  heap->CollectAllGarbage();
  heap->CollectAllGarbage();
98 99

  // Array now sits in the old space
100
  CHECK(heap->InSpace(*array, OLD_SPACE));
101 102 103
}


104
HEAP_TEST(NoPromotion) {
105
  CcTest::InitializeVM();
106
  Heap* heap = CcTest::heap();
107
  heap->ConfigureHeap(1, 1, 1, 0);
108

109
  v8::HandleScope sc(CcTest::isolate());
110

111 112
  // Allocate a big fixed array in the new space.
  int array_length =
113
      (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
114
      (2 * kPointerSize);
115
  Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked();
116 117
  Handle<FixedArray> array(FixedArray::cast(obj));

118 119
  // Array should be in the new space.
  CHECK(heap->InSpace(*array, NEW_SPACE));
120

121
  // Simulate a full old space to make promotion fail.
122
  SimulateFullSpace(heap->old_space());
123 124

  // Call mark compact GC, and it should pass.
125
  heap->CollectGarbage(OLD_SPACE);
126 127 128
}


129
HEAP_TEST(MarkCompactCollector) {
130
  FLAG_incremental_marking = false;
131
  FLAG_retain_maps_for_n_gc = 0;
132
  CcTest::InitializeVM();
133
  Isolate* isolate = CcTest::i_isolate();
134
  Heap* heap = CcTest::heap();
135
  Factory* factory = isolate->factory();
136

137
  v8::HandleScope sc(CcTest::isolate());
138
  Handle<JSGlobalObject> global(isolate->context()->global_object());
139

140
  // call mark-compact when heap is empty
141
  heap->CollectGarbage(OLD_SPACE, "trigger 1");
142 143

  // keep allocating garbage in new space until it fails
144
  const int arraysize = 100;
145
  AllocationResult allocation;
146
  do {
147
    allocation = heap->AllocateFixedArray(arraysize);
148
  } while (!allocation.IsRetry());
149
  heap->CollectGarbage(NEW_SPACE, "trigger 2");
150
  heap->AllocateFixedArray(arraysize).ToObjectChecked();
151 152 153

  // keep allocating maps until it fails
  do {
154 155
    allocation = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
  } while (!allocation.IsRetry());
156
  heap->CollectGarbage(MAP_SPACE, "trigger 3");
157
  heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize).ToObjectChecked();
158 159 160 161

  { HandleScope scope(isolate);
    // allocate a garbage
    Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
162
    Handle<JSFunction> function = factory->NewFunction(func_name);
163
    JSReceiver::SetProperty(global, func_name, function, SLOPPY).Check();
164 165 166 167

    factory->NewJSObject(function);
  }

168
  heap->CollectGarbage(OLD_SPACE, "trigger 4");
169

170 171
  { HandleScope scope(isolate);
    Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
172
    CHECK(Just(true) == JSReceiver::HasOwnProperty(global, func_name));
173 174
    Handle<Object> func_value =
        Object::GetProperty(global, func_name).ToHandleChecked();
175 176 177 178 179
    CHECK(func_value->IsJSFunction());
    Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
    Handle<JSObject> obj = factory->NewJSObject(function);

    Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
180
    JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
181 182
    Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
    Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
183
    JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
184
  }
185

186
  heap->CollectGarbage(OLD_SPACE, "trigger 5");
187

188 189
  { HandleScope scope(isolate);
    Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
190
    CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
191 192
    Handle<Object> object =
        Object::GetProperty(global, obj_name).ToHandleChecked();
193 194
    CHECK(object->IsJSObject());
    Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
195 196
    CHECK_EQ(*Object::GetProperty(object, prop_name).ToHandleChecked(),
             Smi::FromInt(23));
197
  }
198 199 200
}


201 202
// TODO(1600): compaction of map space is temporary removed from GC.
#if 0
203 204
static Handle<Map> CreateMap(Isolate* isolate) {
  return isolate->factory()->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
205 206 207 208 209
}


TEST(MapCompact) {
  FLAG_max_map_space_pages = 16;
210
  CcTest::InitializeVM();
211
  Isolate* isolate = CcTest::i_isolate();
212
  Factory* factory = isolate->factory();
213 214 215 216 217

  {
    v8::HandleScope sc;
    // keep allocating maps while pointers are still encodable and thus
    // mark compact is permitted.
218
    Handle<JSObject> root = factory->NewJSObjectFromMap(CreateMap());
219 220 221
    do {
      Handle<Map> map = CreateMap();
      map->set_prototype(*root);
222
      root = factory->NewJSObjectFromMap(map);
223
    } while (CcTest::heap()->map_space()->MapPointersEncodable());
224 225 226 227 228
  }
  // Now, as we don't have any handles to just allocated maps, we should
  // be able to trigger map compaction.
  // To give an additional chance to fail, try to force compaction which
  // should be impossible right now.
229
  CcTest::heap()->CollectAllGarbage(Heap::kForceCompactionMask);
230
  // And now map pointers should be encodable again.
231
  CHECK(CcTest::heap()->map_space()->MapPointersEncodable());
232
}
233
#endif
234

235 236

static int NumberOfWeakCalls = 0;
237 238 239 240 241
static void WeakPointerCallback(
    const v8::WeakCallbackData<v8::Value, void>& data) {
  std::pair<v8::Persistent<v8::Value>*, int>* p =
      reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
          data.GetParameter());
242
  DCHECK_EQ(1234, p->second);
243
  NumberOfWeakCalls++;
244
  p->first->Reset();
245 246
}

247

248
HEAP_TEST(ObjectGroups) {
249
  FLAG_incremental_marking = false;
250
  CcTest::InitializeVM();
251
  GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
252
  Heap* heap = CcTest::heap();
253
  NumberOfWeakCalls = 0;
254
  v8::HandleScope handle_scope(CcTest::isolate());
255 256

  Handle<Object> g1s1 =
257
      global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
258
  Handle<Object> g1s2 =
259
      global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
260
  Handle<Object> g1c1 =
261
      global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
262 263 264 265 266 267 268 269 270 271 272 273
  std::pair<Handle<Object>*, int> g1s1_and_id(&g1s1, 1234);
  GlobalHandles::MakeWeak(g1s1.location(),
                          reinterpret_cast<void*>(&g1s1_and_id),
                          &WeakPointerCallback);
  std::pair<Handle<Object>*, int> g1s2_and_id(&g1s2, 1234);
  GlobalHandles::MakeWeak(g1s2.location(),
                          reinterpret_cast<void*>(&g1s2_and_id),
                          &WeakPointerCallback);
  std::pair<Handle<Object>*, int> g1c1_and_id(&g1c1, 1234);
  GlobalHandles::MakeWeak(g1c1.location(),
                          reinterpret_cast<void*>(&g1c1_and_id),
                          &WeakPointerCallback);
274 275

  Handle<Object> g2s1 =
276
      global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
277
  Handle<Object> g2s2 =
278
    global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
279
  Handle<Object> g2c1 =
280
    global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
281 282 283 284 285 286 287 288 289 290 291 292
  std::pair<Handle<Object>*, int> g2s1_and_id(&g2s1, 1234);
  GlobalHandles::MakeWeak(g2s1.location(),
                          reinterpret_cast<void*>(&g2s1_and_id),
                          &WeakPointerCallback);
  std::pair<Handle<Object>*, int> g2s2_and_id(&g2s2, 1234);
  GlobalHandles::MakeWeak(g2s2.location(),
                          reinterpret_cast<void*>(&g2s2_and_id),
                          &WeakPointerCallback);
  std::pair<Handle<Object>*, int> g2c1_and_id(&g2c1, 1234);
  GlobalHandles::MakeWeak(g2c1.location(),
                          reinterpret_cast<void*>(&g2c1_and_id),
                          &WeakPointerCallback);
293 294

  Handle<Object> root = global_handles->Create(*g1s1);  // make a root.
295 296 297 298 299

  // Connect group 1 and 2, make a cycle.
  Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
  Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);

300 301 302
  {
    Object** g1_objects[] = { g1s1.location(), g1s2.location() };
    Object** g2_objects[] = { g2s1.location(), g2s2.location() };
303
    global_handles->AddObjectGroup(g1_objects, 2, NULL);
304 305
    global_handles->SetReference(Handle<HeapObject>::cast(g1s1).location(),
                                 g1c1.location());
306
    global_handles->AddObjectGroup(g2_objects, 2, NULL);
307 308
    global_handles->SetReference(Handle<HeapObject>::cast(g2s1).location(),
                                 g2c1.location());
309
  }
310
  // Do a full GC
311
  heap->CollectGarbage(OLD_SPACE);
312 313 314 315 316

  // All object should be alive.
  CHECK_EQ(0, NumberOfWeakCalls);

  // Weaken the root.
317 318 319 320
  std::pair<Handle<Object>*, int> root_and_id(&root, 1234);
  GlobalHandles::MakeWeak(root.location(),
                          reinterpret_cast<void*>(&root_and_id),
                          &WeakPointerCallback);
321 322
  // But make children strong roots---all the objects (except for children)
  // should be collectable now.
323 324
  global_handles->ClearWeakness(g1c1.location());
  global_handles->ClearWeakness(g2c1.location());
325 326

  // Groups are deleted, rebuild groups.
327 328 329
  {
    Object** g1_objects[] = { g1s1.location(), g1s2.location() };
    Object** g2_objects[] = { g2s1.location(), g2s2.location() };
330
    global_handles->AddObjectGroup(g1_objects, 2, NULL);
331 332
    global_handles->SetReference(Handle<HeapObject>::cast(g1s1).location(),
                                 g1c1.location());
333
    global_handles->AddObjectGroup(g2_objects, 2, NULL);
334 335
    global_handles->SetReference(Handle<HeapObject>::cast(g2s1).location(),
                                 g2c1.location());
336
  }
337

338
  heap->CollectGarbage(OLD_SPACE);
339 340 341

  // All objects should be gone. 5 global handles in total.
  CHECK_EQ(5, NumberOfWeakCalls);
342 343

  // And now make children weak again and collect them.
344 345 346 347 348 349
  GlobalHandles::MakeWeak(g1c1.location(),
                          reinterpret_cast<void*>(&g1c1_and_id),
                          &WeakPointerCallback);
  GlobalHandles::MakeWeak(g2c1.location(),
                          reinterpret_cast<void*>(&g2c1_and_id),
                          &WeakPointerCallback);
350

351
  heap->CollectGarbage(OLD_SPACE);
352
  CHECK_EQ(7, NumberOfWeakCalls);
353
}
354 355 356 357 358 359 360 361 362


class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
 public:
  TestRetainedObjectInfo() : has_been_disposed_(false) {}

  bool has_been_disposed() { return has_been_disposed_; }

  virtual void Dispose() {
363
    DCHECK(!has_been_disposed_);
364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379
    has_been_disposed_ = true;
  }

  virtual bool IsEquivalent(v8::RetainedObjectInfo* other) {
    return other == this;
  }

  virtual intptr_t GetHash() { return 0; }

  virtual const char* GetLabel() { return "whatever"; }

 private:
  bool has_been_disposed_;
};


380
TEST(EmptyObjectGroups) {
381
  CcTest::InitializeVM();
382
  GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
383

384
  v8::HandleScope handle_scope(CcTest::isolate());
385 386 387

  TestRetainedObjectInfo info;
  global_handles->AddObjectGroup(NULL, 0, &info);
388
  DCHECK(info.has_been_disposed());
389
}
390 391


392 393 394 395 396 397 398
#if defined(__has_feature)
#if __has_feature(address_sanitizer)
#define V8_WITH_ASAN 1
#endif
#endif


399 400
// Here is a memory use test that uses /proc, and is therefore Linux-only.  We
// do not care how much memory the simulator uses, since it is only there for
401 402
// debugging purposes. Testing with ASAN doesn't make sense, either.
#if defined(__linux__) && !defined(USE_SIMULATOR) && !defined(V8_WITH_ASAN)
403 404 405 406 407 408 409 410 411 412 413 414


static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
  char* end_address = buffer + *position;
  uintptr_t result = strtoul(buffer + *position, &end_address, base);
  CHECK(result != ULONG_MAX || errno != ERANGE);
  CHECK(end_address > buffer + *position);
  *position = end_address - buffer;
  return result;
}


415 416 417 418
// The memory use computed this way is not entirely accurate and depends on
// the way malloc allocates memory.  That's why the memory use may seem to
// increase even though the sum of the allocated object sizes decreases.  It
// also means that the memory use depends on the kernel and stdlib.
419 420 421 422 423 424 425 426
static intptr_t MemoryInUse() {
  intptr_t memory_use = 0;

  int fd = open("/proc/self/maps", O_RDONLY);
  if (fd < 0) return -1;

  const int kBufSize = 10000;
  char buffer[kBufSize];
427
  ssize_t length = read(fd, buffer, kBufSize);
428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446
  intptr_t line_start = 0;
  CHECK_LT(length, kBufSize);  // Make the buffer bigger.
  CHECK_GT(length, 0);  // We have to find some data in the file.
  while (line_start < length) {
    if (buffer[line_start] == '\n') {
      line_start++;
      continue;
    }
    intptr_t position = line_start;
    uintptr_t start = ReadLong(buffer, &position, 16);
    CHECK_EQ(buffer[position++], '-');
    uintptr_t end = ReadLong(buffer, &position, 16);
    CHECK_EQ(buffer[position++], ' ');
    CHECK(buffer[position] == '-' || buffer[position] == 'r');
    bool read_permission = (buffer[position++] == 'r');
    CHECK(buffer[position] == '-' || buffer[position] == 'w');
    bool write_permission = (buffer[position++] == 'w');
    CHECK(buffer[position] == '-' || buffer[position] == 'x');
    bool execute_permission = (buffer[position++] == 'x');
447
    CHECK(buffer[position] == 's' || buffer[position] == 'p');
448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472
    bool private_mapping = (buffer[position++] == 'p');
    CHECK_EQ(buffer[position++], ' ');
    uintptr_t offset = ReadLong(buffer, &position, 16);
    USE(offset);
    CHECK_EQ(buffer[position++], ' ');
    uintptr_t major = ReadLong(buffer, &position, 16);
    USE(major);
    CHECK_EQ(buffer[position++], ':');
    uintptr_t minor = ReadLong(buffer, &position, 16);
    USE(minor);
    CHECK_EQ(buffer[position++], ' ');
    uintptr_t inode = ReadLong(buffer, &position, 10);
    while (position < length && buffer[position] != '\n') position++;
    if ((read_permission || write_permission || execute_permission) &&
        private_mapping && inode == 0) {
      memory_use += (end - start);
    }

    line_start = position;
  }
  close(fd);
  return memory_use;
}


473
intptr_t ShortLivingIsolate() {
474 475 476
  v8::Isolate::CreateParams create_params;
  create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
  v8::Isolate* isolate = v8::Isolate::New(create_params);
477 478
  { v8::Isolate::Scope isolate_scope(isolate);
    v8::Locker lock(isolate);
479
    v8::HandleScope handle_scope(isolate);
480 481 482 483 484 485 486 487 488
    v8::Local<v8::Context> context = v8::Context::New(isolate);
    CHECK(!context.IsEmpty());
  }
  isolate->Dispose();
  return MemoryInUse();
}


TEST(RegressJoinThreadsOnIsolateDeinit) {
489
  intptr_t size_limit = ShortLivingIsolate() * 2;
490
  for (int i = 0; i < 10; i++) {
491
    CHECK_GT(size_limit, ShortLivingIsolate());
492 493 494
  }
}

495
#endif  // __linux__ and !USE_SIMULATOR