test-heap.cc 148 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 28

#include <stdlib.h>
29
#include <utility>
30

31
#include "src/v8.h"
32

33 34 35 36 37 38 39
#include "src/compilation-cache.h"
#include "src/execution.h"
#include "src/factory.h"
#include "src/global-handles.h"
#include "src/macro-assembler.h"
#include "src/stub-cache.h"
#include "test/cctest/cctest.h"
40 41 42

using namespace v8::internal;

43 44
// Go through all incremental marking steps in one swoop.
static void SimulateIncrementalMarking() {
45 46
  MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
47 48
  if (collector->IsConcurrentSweepingInProgress()) {
    collector->WaitUntilSweepingCompleted();
49
  }
50 51 52 53
  CHECK(marking->IsMarking() || marking->IsStopped());
  if (marking->IsStopped()) {
    marking->Start();
  }
54 55 56 57 58 59 60 61
  CHECK(marking->IsMarking());
  while (!marking->IsComplete()) {
    marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
  }
  CHECK(marking->IsComplete());
}


62 63 64
static void CheckMap(Map* map, int type, int instance_size) {
  CHECK(map->IsHeapObject());
#ifdef DEBUG
65
  CHECK(CcTest::heap()->Contains(map));
66
#endif
67
  CHECK_EQ(CcTest::heap()->meta_map(), map->map());
68 69 70 71 72 73
  CHECK_EQ(type, map->instance_type());
  CHECK_EQ(instance_size, map->instance_size());
}


TEST(HeapMaps) {
74
  CcTest::InitializeVM();
75 76 77 78 79
  Heap* heap = CcTest::heap();
  CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
  CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
  CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
  CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
80 81 82
}


83
static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
84
  CHECK(obj->IsOddball());
85
  Handle<Object> handle(obj, isolate);
86
  Object* print_string =
87
      *Execution::ToString(isolate, handle).ToHandleChecked();
88
  CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
89 90 91
}


92
static void CheckSmi(Isolate* isolate, int value, const char* string) {
93
  Handle<Object> handle(Smi::FromInt(value), isolate);
94
  Object* print_string =
95
      *Execution::ToString(isolate, handle).ToHandleChecked();
96
  CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
97 98 99
}


100
static void CheckNumber(Isolate* isolate, double value, const char* string) {
101 102 103 104 105
  Handle<Object> number = isolate->factory()->NewNumber(value);
  CHECK(number->IsNumber());
  Handle<Object> print_string =
      Execution::ToString(isolate, number).ToHandleChecked();
  CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
106 107 108
}


109
static void CheckFindCodeObject(Isolate* isolate) {
110 111 112
  // Test FindCodeObject
#define __ assm.

113
  Assembler assm(isolate, NULL, 0);
114 115 116 117 118

  __ nop();  // supported on all architectures

  CodeDesc desc;
  assm.GetCode(&desc);
119 120
  Handle<Code> code = isolate->factory()->NewCode(
      desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
121 122
  CHECK(code->IsCode());

123
  HeapObject* obj = HeapObject::cast(*code);
124 125 126
  Address obj_addr = obj->address();

  for (int i = 0; i < obj->Size(); i += kPointerSize) {
127
    Object* found = isolate->FindCodeObject(obj_addr + i);
128
    CHECK_EQ(*code, found);
129 130
  }

131 132 133
  Handle<Code> copy = isolate->factory()->NewCode(
      desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
  HeapObject* obj_copy = HeapObject::cast(*copy);
134 135
  Object* not_right = isolate->FindCodeObject(obj_copy->address() +
                                              obj_copy->Size() / 2);
136
  CHECK(not_right != *code);
137 138 139
}


140 141 142 143 144 145 146 147 148 149
TEST(HandleNull) {
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  HandleScope outer_scope(isolate);
  LocalContext context;
  Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
  CHECK(!n.is_null());
}


150
TEST(HeapObjects) {
151
  CcTest::InitializeVM();
152
  Isolate* isolate = CcTest::i_isolate();
153
  Factory* factory = isolate->factory();
154
  Heap* heap = isolate->heap();
155

156
  HandleScope sc(isolate);
157
  Handle<Object> value = factory->NewNumber(1.000123);
158 159 160 161
  CHECK(value->IsHeapNumber());
  CHECK(value->IsNumber());
  CHECK_EQ(1.000123, value->Number());

162
  value = factory->NewNumber(1.0);
163 164 165 166
  CHECK(value->IsSmi());
  CHECK(value->IsNumber());
  CHECK_EQ(1.0, value->Number());

167
  value = factory->NewNumberFromInt(1024);
168 169 170 171
  CHECK(value->IsSmi());
  CHECK(value->IsNumber());
  CHECK_EQ(1024.0, value->Number());

172
  value = factory->NewNumberFromInt(Smi::kMinValue);
173 174
  CHECK(value->IsSmi());
  CHECK(value->IsNumber());
175
  CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
176

177
  value = factory->NewNumberFromInt(Smi::kMaxValue);
178 179
  CHECK(value->IsSmi());
  CHECK(value->IsNumber());
180
  CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
181

182 183
#if !defined(V8_TARGET_ARCH_X64) && !defined(V8_TARGET_ARCH_ARM64) && \
    !defined(V8_TARGET_ARCH_MIPS64)
184
  // TODO(lrn): We need a NumberFromIntptr function in order to test this.
185
  value = factory->NewNumberFromInt(Smi::kMinValue - 1);
186 187 188
  CHECK(value->IsHeapNumber());
  CHECK(value->IsNumber());
  CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
189
#endif
190

191
  value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
192 193
  CHECK(value->IsHeapNumber());
  CHECK(value->IsNumber());
194 195
  CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
           value->Number());
196

197
  value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
198 199 200 201 202
  CHECK(value->IsHeapNumber());
  CHECK(value->IsNumber());
  CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
           value->Number());

203
  // nan oddball checks
204 205
  CHECK(factory->nan_value()->IsNumber());
  CHECK(std::isnan(factory->nan_value()->Number()));
206

207
  Handle<String> s = factory->NewStringFromStaticAscii("fisk hest ");
208 209
  CHECK(s->IsString());
  CHECK_EQ(10, s->length());
210

211
  Handle<String> object_string = Handle<String>::cast(factory->Object_string());
212
  Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
213
  CHECK(JSReceiver::HasOwnProperty(global, object_string));
214 215

  // Check ToString for oddballs
216 217 218 219
  CheckOddball(isolate, heap->true_value(), "true");
  CheckOddball(isolate, heap->false_value(), "false");
  CheckOddball(isolate, heap->null_value(), "null");
  CheckOddball(isolate, heap->undefined_value(), "undefined");
220 221

  // Check ToString for Smis
222 223 224
  CheckSmi(isolate, 0, "0");
  CheckSmi(isolate, 42, "42");
  CheckSmi(isolate, -42, "-42");
225 226

  // Check ToString for Numbers
227
  CheckNumber(isolate, 1.1, "1.1");
228

229
  CheckFindCodeObject(isolate);
230 231 232 233
}


TEST(Tagging) {
234
  CcTest::InitializeVM();
235
  int request = 24;
236
  CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
237 238 239 240 241 242 243
  CHECK(Smi::FromInt(42)->IsSmi());
  CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
  CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
}


TEST(GarbageCollection) {
244
  CcTest::InitializeVM();
245
  Isolate* isolate = CcTest::i_isolate();
246 247
  Heap* heap = isolate->heap();
  Factory* factory = isolate->factory();
248

249
  HandleScope sc(isolate);
250
  // Check GC.
251
  heap->CollectGarbage(NEW_SPACE);
252

253
  Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
254 255 256 257
  Handle<String> name = factory->InternalizeUtf8String("theFunction");
  Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
  Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
  Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
258 259
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
  Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
260 261

  {
262
    HandleScope inner_scope(isolate);
263
    // Allocate a function and keep it in global object's property.
264
    Handle<JSFunction> function = factory->NewFunction(name);
265
    JSReceiver::SetProperty(global, name, function, NONE, SLOPPY).Check();
266
    // Allocate an object.  Unrooted after leaving the scope.
267
    Handle<JSObject> obj = factory->NewJSObject(function);
268 269 270 271
    JSReceiver::SetProperty(
        obj, prop_name, twenty_three, NONE, SLOPPY).Check();
    JSReceiver::SetProperty(
        obj, prop_namex, twenty_four, NONE, SLOPPY).Check();
272

273 274 275 276
    CHECK_EQ(Smi::FromInt(23),
             *Object::GetProperty(obj, prop_name).ToHandleChecked());
    CHECK_EQ(Smi::FromInt(24),
             *Object::GetProperty(obj, prop_namex).ToHandleChecked());
277
  }
278

279
  heap->CollectGarbage(NEW_SPACE);
280

281
  // Function should be alive.
282
  CHECK(JSReceiver::HasOwnProperty(global, name));
283
  // Check function is retained.
284 285
  Handle<Object> func_value =
      Object::GetProperty(global, name).ToHandleChecked();
286
  CHECK(func_value->IsJSFunction());
287
  Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
288 289

  {
290
    HandleScope inner_scope(isolate);
291
    // Allocate another object, make it reachable from global.
292
    Handle<JSObject> obj = factory->NewJSObject(function);
293 294 295
    JSReceiver::SetProperty(global, obj_name, obj, NONE, SLOPPY).Check();
    JSReceiver::SetProperty(
        obj, prop_name, twenty_three, NONE, SLOPPY).Check();
296
  }
297

298
  // After gc, it should survive.
299
  heap->CollectGarbage(NEW_SPACE);
300

301
  CHECK(JSReceiver::HasOwnProperty(global, obj_name));
302 303
  Handle<Object> obj =
      Object::GetProperty(global, obj_name).ToHandleChecked();
304
  CHECK(obj->IsJSObject());
305 306
  CHECK_EQ(Smi::FromInt(23),
           *Object::GetProperty(obj, prop_name).ToHandleChecked());
307 308 309
}


310 311
static void VerifyStringAllocation(Isolate* isolate, const char* string) {
  HandleScope scope(isolate);
312 313
  Handle<String> s = isolate->factory()->NewStringFromUtf8(
      CStrVector(string)).ToHandleChecked();
314
  CHECK_EQ(StrLength(string), s->length());
315
  for (int index = 0; index < s->length(); index++) {
316 317
    CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
  }
318 319 320 321
}


TEST(String) {
322 323
  CcTest::InitializeVM();
  Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
324

325 326 327 328 329
  VerifyStringAllocation(isolate, "a");
  VerifyStringAllocation(isolate, "ab");
  VerifyStringAllocation(isolate, "abc");
  VerifyStringAllocation(isolate, "abcd");
  VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
330 331 332 333
}


TEST(LocalHandles) {
334
  CcTest::InitializeVM();
335
  Isolate* isolate = CcTest::i_isolate();
336
  Factory* factory = isolate->factory();
337

338
  v8::HandleScope scope(CcTest::isolate());
339
  const char* name = "Kasper the spunky";
340
  Handle<String> string = factory->NewStringFromAsciiChecked(name);
341
  CHECK_EQ(StrLength(name), string->length());
342 343 344 345
}


TEST(GlobalHandles) {
346
  CcTest::InitializeVM();
347
  Isolate* isolate = CcTest::i_isolate();
348 349 350
  Heap* heap = isolate->heap();
  Factory* factory = isolate->factory();
  GlobalHandles* global_handles = isolate->global_handles();
351

352 353 354 355 356 357
  Handle<Object> h1;
  Handle<Object> h2;
  Handle<Object> h3;
  Handle<Object> h4;

  {
358
    HandleScope scope(isolate);
359

360
    Handle<Object> i = factory->NewStringFromStaticAscii("fisk");
361
    Handle<Object> u = factory->NewNumber(1.12344);
362

363 364 365 366
    h1 = global_handles->Create(*i);
    h2 = global_handles->Create(*u);
    h3 = global_handles->Create(*i);
    h4 = global_handles->Create(*u);
367
  }
368 369

  // after gc, it should survive
370
  heap->CollectGarbage(NEW_SPACE);
371 372 373 374 375 376 377

  CHECK((*h1)->IsString());
  CHECK((*h2)->IsHeapNumber());
  CHECK((*h3)->IsString());
  CHECK((*h4)->IsHeapNumber());

  CHECK_EQ(*h3, *h1);
378 379
  GlobalHandles::Destroy(h1.location());
  GlobalHandles::Destroy(h3.location());
380 381

  CHECK_EQ(*h4, *h2);
382 383
  GlobalHandles::Destroy(h2.location());
  GlobalHandles::Destroy(h4.location());
384 385 386 387 388
}


static bool WeakPointerCleared = false;

389 390 391 392 393 394 395
static void TestWeakGlobalHandleCallback(
    const v8::WeakCallbackData<v8::Value, void>& data) {
  std::pair<v8::Persistent<v8::Value>*, int>* p =
      reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
          data.GetParameter());
  if (p->second == 1234) WeakPointerCleared = true;
  p->first->Reset();
396 397 398 399
}


TEST(WeakGlobalHandlesScavenge) {
400
  i::FLAG_stress_compaction = false;
401
  CcTest::InitializeVM();
402
  Isolate* isolate = CcTest::i_isolate();
403 404 405
  Heap* heap = isolate->heap();
  Factory* factory = isolate->factory();
  GlobalHandles* global_handles = isolate->global_handles();
406 407 408

  WeakPointerCleared = false;

409 410 411 412
  Handle<Object> h1;
  Handle<Object> h2;

  {
413
    HandleScope scope(isolate);
414

415
    Handle<Object> i = factory->NewStringFromStaticAscii("fisk");
416
    Handle<Object> u = factory->NewNumber(1.12344);
417

418 419
    h1 = global_handles->Create(*i);
    h2 = global_handles->Create(*u);
420
  }
421

422 423 424 425
  std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
  GlobalHandles::MakeWeak(h2.location(),
                          reinterpret_cast<void*>(&handle_and_id),
                          &TestWeakGlobalHandleCallback);
426 427

  // Scavenge treats weak pointers as normal roots.
428
  heap->CollectGarbage(NEW_SPACE);
429 430 431 432 433

  CHECK((*h1)->IsString());
  CHECK((*h2)->IsHeapNumber());

  CHECK(!WeakPointerCleared);
434 435
  CHECK(!global_handles->IsNearDeath(h2.location()));
  CHECK(!global_handles->IsNearDeath(h1.location()));
436

437 438
  GlobalHandles::Destroy(h1.location());
  GlobalHandles::Destroy(h2.location());
439 440 441 442
}


TEST(WeakGlobalHandlesMark) {
443
  CcTest::InitializeVM();
444
  Isolate* isolate = CcTest::i_isolate();
445 446 447
  Heap* heap = isolate->heap();
  Factory* factory = isolate->factory();
  GlobalHandles* global_handles = isolate->global_handles();
448 449 450

  WeakPointerCleared = false;

451 452 453 454
  Handle<Object> h1;
  Handle<Object> h2;

  {
455
    HandleScope scope(isolate);
456

457
    Handle<Object> i = factory->NewStringFromStaticAscii("fisk");
458
    Handle<Object> u = factory->NewNumber(1.12344);
459

460 461
    h1 = global_handles->Create(*i);
    h2 = global_handles->Create(*u);
462
  }
463

464
  // Make sure the objects are promoted.
465 466 467
  heap->CollectGarbage(OLD_POINTER_SPACE);
  heap->CollectGarbage(NEW_SPACE);
  CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
468

469 470 471 472
  std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
  GlobalHandles::MakeWeak(h2.location(),
                          reinterpret_cast<void*>(&handle_and_id),
                          &TestWeakGlobalHandleCallback);
473 474 475
  CHECK(!GlobalHandles::IsNearDeath(h1.location()));
  CHECK(!GlobalHandles::IsNearDeath(h2.location()));

476
  // Incremental marking potentially marked handles before they turned weak.
477
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
478 479 480 481 482 483

  CHECK((*h1)->IsString());

  CHECK(WeakPointerCleared);
  CHECK(!GlobalHandles::IsNearDeath(h1.location()));

484
  GlobalHandles::Destroy(h1.location());
485 486
}

487

488
TEST(DeleteWeakGlobalHandle) {
489
  i::FLAG_stress_compaction = false;
490
  CcTest::InitializeVM();
491
  Isolate* isolate = CcTest::i_isolate();
492 493 494
  Heap* heap = isolate->heap();
  Factory* factory = isolate->factory();
  GlobalHandles* global_handles = isolate->global_handles();
495 496 497

  WeakPointerCleared = false;

498 499 500
  Handle<Object> h;

  {
501
    HandleScope scope(isolate);
502

503
    Handle<Object> i = factory->NewStringFromStaticAscii("fisk");
504
    h = global_handles->Create(*i);
505
  }
506

507 508 509 510
  std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
  GlobalHandles::MakeWeak(h.location(),
                          reinterpret_cast<void*>(&handle_and_id),
                          &TestWeakGlobalHandleCallback);
511 512

  // Scanvenge does not recognize weak reference.
513
  heap->CollectGarbage(NEW_SPACE);
514 515 516 517

  CHECK(!WeakPointerCleared);

  // Mark-compact treats weak reference properly.
518
  heap->CollectGarbage(OLD_POINTER_SPACE);
519 520 521 522

  CHECK(WeakPointerCleared);
}

523

524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587
static const char* not_so_random_string_table[] = {
  "abstract",
  "boolean",
  "break",
  "byte",
  "case",
  "catch",
  "char",
  "class",
  "const",
  "continue",
  "debugger",
  "default",
  "delete",
  "do",
  "double",
  "else",
  "enum",
  "export",
  "extends",
  "false",
  "final",
  "finally",
  "float",
  "for",
  "function",
  "goto",
  "if",
  "implements",
  "import",
  "in",
  "instanceof",
  "int",
  "interface",
  "long",
  "native",
  "new",
  "null",
  "package",
  "private",
  "protected",
  "public",
  "return",
  "short",
  "static",
  "super",
  "switch",
  "synchronized",
  "this",
  "throw",
  "throws",
  "transient",
  "true",
  "try",
  "typeof",
  "var",
  "void",
  "volatile",
  "while",
  "with",
  0
};


588
static void CheckInternalizedStrings(const char** strings) {
589 590
  Isolate* isolate = CcTest::i_isolate();
  Factory* factory = isolate->factory();
591
  for (const char* string = *strings; *strings != 0; string = *strings++) {
592 593 594 595
    HandleScope scope(isolate);
    Handle<String> a =
        isolate->factory()->InternalizeUtf8String(CStrVector(string));
    // InternalizeUtf8String may return a failure if a GC is needed.
596
    CHECK(a->IsInternalizedString());
597 598
    Handle<String> b = factory->InternalizeUtf8String(string);
    CHECK_EQ(*b, *a);
599 600 601 602
    CHECK(b->IsUtf8EqualTo(CStrVector(string)));
    b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
    CHECK_EQ(*b, *a);
    CHECK(b->IsUtf8EqualTo(CStrVector(string)));
603 604 605 606
  }
}


607
TEST(StringTable) {
608
  CcTest::InitializeVM();
609

610
  v8::HandleScope sc(CcTest::isolate());
611 612
  CheckInternalizedStrings(not_so_random_string_table);
  CheckInternalizedStrings(not_so_random_string_table);
613 614 615 616
}


TEST(FunctionAllocation) {
617
  CcTest::InitializeVM();
618
  Isolate* isolate = CcTest::i_isolate();
619
  Factory* factory = isolate->factory();
620

621
  v8::HandleScope sc(CcTest::isolate());
622
  Handle<String> name = factory->InternalizeUtf8String("theFunction");
623
  Handle<JSFunction> function = factory->NewFunction(name);
624

625 626 627
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
  Handle<Smi> twenty_four(Smi::FromInt(24), isolate);

628 629
  Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
  Handle<JSObject> obj = factory->NewJSObject(function);
630
  JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY).Check();
631 632
  CHECK_EQ(Smi::FromInt(23),
           *Object::GetProperty(obj, prop_name).ToHandleChecked());
633
  // Check that we can add properties to function objects.
634 635
  JSReceiver::SetProperty(
      function, prop_name, twenty_four, NONE, SLOPPY).Check();
636 637
  CHECK_EQ(Smi::FromInt(24),
           *Object::GetProperty(function, prop_name).ToHandleChecked());
638 639 640 641
}


TEST(ObjectProperties) {
642
  CcTest::InitializeVM();
643
  Isolate* isolate = CcTest::i_isolate();
644
  Factory* factory = isolate->factory();
645

646
  v8::HandleScope sc(CcTest::isolate());
647
  Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
648 649
  Handle<Object> object = Object::GetProperty(
      CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
650
  Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
651 652 653
  Handle<JSObject> obj = factory->NewJSObject(constructor);
  Handle<String> first = factory->InternalizeUtf8String("first");
  Handle<String> second = factory->InternalizeUtf8String("second");
654

655 656 657
  Handle<Smi> one(Smi::FromInt(1), isolate);
  Handle<Smi> two(Smi::FromInt(2), isolate);

658
  // check for empty
659
  CHECK(!JSReceiver::HasOwnProperty(obj, first));
660 661

  // add first
662
  JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check();
663
  CHECK(JSReceiver::HasOwnProperty(obj, first));
664 665

  // delete first
666
  JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
667
  CHECK(!JSReceiver::HasOwnProperty(obj, first));
668 669

  // add first and then second
670 671
  JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check();
  JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY).Check();
672 673
  CHECK(JSReceiver::HasOwnProperty(obj, first));
  CHECK(JSReceiver::HasOwnProperty(obj, second));
674 675

  // delete first and then second
676
  JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
677
  CHECK(JSReceiver::HasOwnProperty(obj, second));
678
  JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check();
679 680
  CHECK(!JSReceiver::HasOwnProperty(obj, first));
  CHECK(!JSReceiver::HasOwnProperty(obj, second));
681 682

  // add first and then second
683 684
  JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check();
  JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY).Check();
685 686
  CHECK(JSReceiver::HasOwnProperty(obj, first));
  CHECK(JSReceiver::HasOwnProperty(obj, second));
687 688

  // delete second and then first
689
  JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check();
690
  CHECK(JSReceiver::HasOwnProperty(obj, first));
691
  JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
692 693
  CHECK(!JSReceiver::HasOwnProperty(obj, first));
  CHECK(!JSReceiver::HasOwnProperty(obj, second));
694

695
  // check string and internalized string match
696
  const char* string1 = "fisk";
697
  Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
698
  JSReceiver::SetProperty(obj, s1, one, NONE, SLOPPY).Check();
699
  Handle<String> s1_string = factory->InternalizeUtf8String(string1);
700
  CHECK(JSReceiver::HasOwnProperty(obj, s1_string));
701

702
  // check internalized string and string match
703
  const char* string2 = "fugl";
704
  Handle<String> s2_string = factory->InternalizeUtf8String(string2);
705
  JSReceiver::SetProperty(obj, s2_string, one, NONE, SLOPPY).Check();
706
  Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
707
  CHECK(JSReceiver::HasOwnProperty(obj, s2));
708 709 710 711
}


TEST(JSObjectMaps) {
712
  CcTest::InitializeVM();
713
  Isolate* isolate = CcTest::i_isolate();
714
  Factory* factory = isolate->factory();
715

716
  v8::HandleScope sc(CcTest::isolate());
717
  Handle<String> name = factory->InternalizeUtf8String("theFunction");
718
  Handle<JSFunction> function = factory->NewFunction(name);
719

720 721
  Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
  Handle<JSObject> obj = factory->NewJSObject(function);
722
  Handle<Map> initial_map(function->initial_map());
723 724

  // Set a propery
725
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
726
  JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, SLOPPY).Check();
727 728
  CHECK_EQ(Smi::FromInt(23),
           *Object::GetProperty(obj, prop_name).ToHandleChecked());
729 730

  // Check the map has changed
731
  CHECK(*initial_map != obj->map());
732 733 734 735
}


TEST(JSArray) {
736
  CcTest::InitializeVM();
737
  Isolate* isolate = CcTest::i_isolate();
738
  Factory* factory = isolate->factory();
739

740
  v8::HandleScope sc(CcTest::isolate());
741
  Handle<String> name = factory->InternalizeUtf8String("Array");
742 743
  Handle<Object> fun_obj = Object::GetProperty(
      CcTest::i_isolate()->global_object(), name).ToHandleChecked();
744
  Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
745 746

  // Allocate the object.
747
  Handle<Object> element;
748
  Handle<JSObject> object = factory->NewJSObject(function);
749
  Handle<JSArray> array = Handle<JSArray>::cast(object);
750
  // We just initialized the VM, no heap allocation failure yet.
751
  JSArray::Initialize(array, 0);
752 753

  // Set array length to 0.
754
  JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check();
755
  CHECK_EQ(Smi::FromInt(0), array->length());
756
  // Must be in fast mode.
757
  CHECK(array->HasFastSmiOrObjectElements());
758 759

  // array[length] = name.
760
  JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check();
761
  CHECK_EQ(Smi::FromInt(1), array->length());
762 763
  element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
  CHECK_EQ(*element, *name);
764

765 766
  // Set array length with larger than smi value.
  Handle<Object> length =
767
      factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
768
  JSArray::SetElementsLength(array, length).Check();
769 770

  uint32_t int_length = 0;
771
  CHECK(length->ToArrayIndex(&int_length));
772
  CHECK_EQ(*length, array->length());
773
  CHECK(array->HasDictionaryElements());  // Must be in slow mode.
774 775

  // array[length] = name.
776
  JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check();
777
  uint32_t new_int_length = 0;
778
  CHECK(array->length()->ToArrayIndex(&new_int_length));
779
  CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
780 781 782 783
  element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
  CHECK_EQ(*element, *name);
  element = Object::GetElement(isolate, array, 0).ToHandleChecked();
  CHECK_EQ(*element, *name);
784 785 786 787
}


TEST(JSObjectCopy) {
788
  CcTest::InitializeVM();
789
  Isolate* isolate = CcTest::i_isolate();
790
  Factory* factory = isolate->factory();
791

792
  v8::HandleScope sc(CcTest::isolate());
793
  Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
794 795
  Handle<Object> object = Object::GetProperty(
      CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
796
  Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
797 798 799
  Handle<JSObject> obj = factory->NewJSObject(constructor);
  Handle<String> first = factory->InternalizeUtf8String("first");
  Handle<String> second = factory->InternalizeUtf8String("second");
800

801 802 803
  Handle<Smi> one(Smi::FromInt(1), isolate);
  Handle<Smi> two(Smi::FromInt(2), isolate);

804 805
  JSReceiver::SetProperty(obj, first, one, NONE, SLOPPY).Check();
  JSReceiver::SetProperty(obj, second, two, NONE, SLOPPY).Check();
806

807 808
  JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check();
  JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check();
809 810

  // Make the clone.
811
  Handle<Object> value1, value2;
812
  Handle<JSObject> clone = factory->CopyJSObject(obj);
813
  CHECK(!clone.is_identical_to(obj));
814

815 816 817 818 819 820
  value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
  value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
  value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
  value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
821

822 823 824 825 826 827
  value1 = Object::GetProperty(obj, first).ToHandleChecked();
  value2 = Object::GetProperty(clone, first).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
  value1 = Object::GetProperty(obj, second).ToHandleChecked();
  value2 = Object::GetProperty(clone, second).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
828 829

  // Flip the values.
830 831
  JSReceiver::SetProperty(clone, first, two, NONE, SLOPPY).Check();
  JSReceiver::SetProperty(clone, second, one, NONE, SLOPPY).Check();
832

833 834
  JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check();
  JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check();
835

836 837 838 839 840 841 842 843 844 845 846 847 848
  value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
  value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
  value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
  value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
  CHECK_EQ(*value1, *value2);

  value1 = Object::GetProperty(obj, second).ToHandleChecked();
  value2 = Object::GetProperty(clone, first).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
  value1 = Object::GetProperty(obj, first).ToHandleChecked();
  value2 = Object::GetProperty(clone, second).ToHandleChecked();
  CHECK_EQ(*value1, *value2);
849 850 851 852
}


TEST(StringAllocation) {
853
  CcTest::InitializeVM();
854
  Isolate* isolate = CcTest::i_isolate();
855
  Factory* factory = isolate->factory();
856 857 858

  const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
  for (int length = 0; length < 100; length++) {
859
    v8::HandleScope scope(CcTest::isolate());
860 861 862 863 864 865 866 867 868 869 870
    char* non_ascii = NewArray<char>(3 * length + 1);
    char* ascii = NewArray<char>(length + 1);
    non_ascii[3 * length] = 0;
    ascii[length] = 0;
    for (int i = 0; i < length; i++) {
      ascii[i] = 'a';
      non_ascii[3 * i] = chars[0];
      non_ascii[3 * i + 1] = chars[1];
      non_ascii[3 * i + 2] = chars[2];
    }
    Handle<String> non_ascii_sym =
871
        factory->InternalizeUtf8String(
872
            Vector<const char>(non_ascii, 3 * length));
873 874
    CHECK_EQ(length, non_ascii_sym->length());
    Handle<String> ascii_sym =
875
        factory->InternalizeOneByteString(OneByteVector(ascii, length));
876
    CHECK_EQ(length, ascii_sym->length());
877 878
    Handle<String> non_ascii_str = factory->NewStringFromUtf8(
        Vector<const char>(non_ascii, 3 * length)).ToHandleChecked();
879 880
    non_ascii_str->Hash();
    CHECK_EQ(length, non_ascii_str->length());
881 882
    Handle<String> ascii_str = factory->NewStringFromUtf8(
        Vector<const char>(ascii, length)).ToHandleChecked();
883 884 885 886 887 888 889 890
    ascii_str->Hash();
    CHECK_EQ(length, ascii_str->length());
    DeleteArray(non_ascii);
    DeleteArray(ascii);
  }
}


891
static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
892 893
  // Count the number of objects found in the heap.
  int found_count = 0;
894
  HeapIterator iterator(heap);
895
  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
896 897 898 899 900 901 902 903 904 905 906
    for (int i = 0; i < size; i++) {
      if (*objs[i] == obj) {
        found_count++;
      }
    }
  }
  return found_count;
}


TEST(Iteration) {
907
  CcTest::InitializeVM();
908
  Isolate* isolate = CcTest::i_isolate();
909
  Factory* factory = isolate->factory();
910
  v8::HandleScope scope(CcTest::isolate());
911 912 913 914 915 916

  // Array of objects to scan haep for.
  const int objs_count = 6;
  Handle<Object> objs[objs_count];
  int next_objs_index = 0;

917
  // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
918 919
  objs[next_objs_index++] = factory->NewJSArray(10);
  objs[next_objs_index++] = factory->NewJSArray(10,
920 921
                                                FAST_HOLEY_ELEMENTS,
                                                TENURED);
922

923
  // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
924
  objs[next_objs_index++] =
925
      factory->NewStringFromStaticAscii("abcdefghij");
926
  objs[next_objs_index++] =
927
      factory->NewStringFromStaticAscii("abcdefghij", TENURED);
928 929

  // Allocate a large string (for large object space).
930
  int large_size = Page::kMaxRegularHeapObjectSize + 1;
931 932 933
  char* str = new char[large_size];
  for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
  str[large_size - 1] = '\0';
934
  objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
935 936 937 938 939 940
  delete[] str;

  // Add a Map object to look for.
  objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());

  CHECK_EQ(objs_count, next_objs_index);
941
  CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
942
}
943 944


945
TEST(EmptyHandleEscapeFrom) {
946
  CcTest::InitializeVM();
947

948
  v8::HandleScope scope(CcTest::isolate());
949 950 951
  Handle<JSObject> runaway;

  {
952
      v8::EscapableHandleScope nested(CcTest::isolate());
953 954 955 956 957 958
      Handle<JSObject> empty;
      runaway = empty.EscapeFrom(&nested);
  }

  CHECK(runaway.is_null());
}
959 960 961 962 963 964 965 966 967


static int LenFromSize(int size) {
  return (size - FixedArray::kHeaderSize) / kPointerSize;
}


TEST(Regression39128) {
  // Test case for crbug.com/39128.
968
  CcTest::InitializeVM();
969
  Isolate* isolate = CcTest::i_isolate();
970
  TestHeap* heap = CcTest::test_heap();
971 972

  // Increase the chance of 'bump-the-pointer' allocation in old space.
973
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
974

975
  v8::HandleScope scope(CcTest::isolate());
976 977 978

  // The plan: create JSObject which references objects in new space.
  // Then clone this object (forcing it to go into old space) and check
979
  // that region dirty marks are updated correctly.
980 981

  // Step 1: prepare a map for the object.  We add 1 inobject property to it.
982
  Handle<JSFunction> object_ctor(
983
      CcTest::i_isolate()->native_context()->object_function());
984 985
  CHECK(object_ctor->has_initial_map());
  // Create a map with single inobject property.
986
  Handle<Map> my_map = Map::Create(object_ctor, 1);
987 988 989 990 991 992 993 994 995
  int n_properties = my_map->inobject_properties();
  CHECK_GT(n_properties, 0);

  int object_size = my_map->instance_size();

  // Step 2: allocate a lot of objects so to almost fill new space: we need
  // just enough room to allocate JSObject and thus fill the newspace.

  int allocation_amount = Min(FixedArray::kMaxSize,
996
                              Page::kMaxRegularHeapObjectSize + kPointerSize);
997
  int allocation_len = LenFromSize(allocation_amount);
998
  NewSpace* new_space = heap->new_space();
999 1000 1001
  Address* top_addr = new_space->allocation_top_address();
  Address* limit_addr = new_space->allocation_limit_address();
  while ((*limit_addr - *top_addr) > allocation_amount) {
1002
    CHECK(!heap->always_allocate());
1003
    Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1004 1005 1006 1007
    CHECK(new_space->Contains(array));
  }

  // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1008
  int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1009 1010 1011
  int fixed_array_len = LenFromSize(to_fill);
  CHECK(fixed_array_len < FixedArray::kMaxLength);

1012
  CHECK(!heap->always_allocate());
1013
  Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1014 1015
  CHECK(new_space->Contains(array));

1016
  Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1017 1018
  CHECK(new_space->Contains(object));
  JSObject* jsobject = JSObject::cast(object);
1019
  CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1020 1021
  CHECK_EQ(0, jsobject->properties()->length());
  // Create a reference to object in new space in jsobject.
1022 1023 1024
  FieldIndex index = FieldIndex::ForInObjectOffset(
      JSObject::kHeaderSize - kPointerSize);
  jsobject->FastPropertyAtPut(index, array);
1025

1026
  CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1027 1028 1029

  // Step 4: clone jsobject, but force always allocate first to create a clone
  // in old pointer space.
1030
  Address old_pointer_space_top = heap->old_pointer_space()->top();
1031
  AlwaysAllocateScope aa_scope(isolate);
1032
  Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1033 1034 1035 1036 1037
  JSObject* clone = JSObject::cast(clone_obj);
  if (clone->address() != old_pointer_space_top) {
    // Alas, got allocated from free list, we cannot do checks.
    return;
  }
1038
  CHECK(heap->old_pointer_space()->Contains(clone->address()));
1039
}
1040

1041

1042
TEST(TestCodeFlushing) {
1043 1044
  // If we do not flush code this test is invalid.
  if (!FLAG_flush_code) return;
1045
  i::FLAG_allow_natives_syntax = true;
1046
  i::FLAG_optimize_for_size = false;
1047
  CcTest::InitializeVM();
1048
  Isolate* isolate = CcTest::i_isolate();
1049
  Factory* factory = isolate->factory();
1050
  v8::HandleScope scope(CcTest::isolate());
1051 1052 1053 1054 1055 1056
  const char* source = "function foo() {"
                       "  var x = 42;"
                       "  var y = 42;"
                       "  var z = x + y;"
                       "};"
                       "foo()";
1057
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1058 1059

  // This compile will add the code to the compilation cache.
1060
  { v8::HandleScope scope(CcTest::isolate());
1061 1062
    CompileRun(source);
  }
1063 1064

  // Check function is compiled.
1065 1066
  Handle<Object> func_value = Object::GetProperty(
      CcTest::i_isolate()->global_object(), foo_name).ToHandleChecked();
1067
  CHECK(func_value->IsJSFunction());
1068
  Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1069 1070
  CHECK(function->shared()->is_compiled());

1071
  // The code will survive at least two GCs.
1072 1073
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1074
  CHECK(function->shared()->is_compiled());
1075

1076 1077 1078
  // Simulate several GCs that use full marking.
  const int kAgingThreshold = 6;
  for (int i = 0; i < kAgingThreshold; i++) {
1079
    CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1080
  }
1081 1082

  // foo should no longer be in the compilation cache
1083 1084
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
  CHECK(!function->is_compiled() || function->IsOptimized());
1085 1086 1087
  // Call foo to get it recompiled.
  CompileRun("foo()");
  CHECK(function->shared()->is_compiled());
1088
  CHECK(function->is_compiled());
1089
}
1090 1091


1092 1093 1094 1095 1096 1097
TEST(TestCodeFlushingPreAged) {
  // If we do not flush code this test is invalid.
  if (!FLAG_flush_code) return;
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_optimize_for_size = true;
  CcTest::InitializeVM();
1098
  Isolate* isolate = CcTest::i_isolate();
1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114
  Factory* factory = isolate->factory();
  v8::HandleScope scope(CcTest::isolate());
  const char* source = "function foo() {"
                       "  var x = 42;"
                       "  var y = 42;"
                       "  var z = x + y;"
                       "};"
                       "foo()";
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");

  // Compile foo, but don't run it.
  { v8::HandleScope scope(CcTest::isolate());
    CompileRun(source);
  }

  // Check function is compiled.
1115
  Handle<Object> func_value =
1116
      Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1117
  CHECK(func_value->IsJSFunction());
1118
  Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156
  CHECK(function->shared()->is_compiled());

  // The code has been run so will survive at least one GC.
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CHECK(function->shared()->is_compiled());

  // The code was only run once, so it should be pre-aged and collected on the
  // next GC.
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());

  // Execute the function again twice, and ensure it is reset to the young age.
  { v8::HandleScope scope(CcTest::isolate());
    CompileRun("foo();"
               "foo();");
  }

  // The code will survive at least two GC now that it is young again.
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CHECK(function->shared()->is_compiled());

  // Simulate several GCs that use full marking.
  const int kAgingThreshold = 6;
  for (int i = 0; i < kAgingThreshold; i++) {
    CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  }

  // foo should no longer be in the compilation cache
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
  CHECK(!function->is_compiled() || function->IsOptimized());
  // Call foo to get it recompiled.
  CompileRun("foo()");
  CHECK(function->shared()->is_compiled());
  CHECK(function->is_compiled());
}


1157 1158
TEST(TestCodeFlushingIncremental) {
  // If we do not flush code this test is invalid.
1159
  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1160
  i::FLAG_allow_natives_syntax = true;
1161
  i::FLAG_optimize_for_size = false;
1162
  CcTest::InitializeVM();
1163
  Isolate* isolate = CcTest::i_isolate();
1164
  Factory* factory = isolate->factory();
1165
  v8::HandleScope scope(CcTest::isolate());
1166 1167 1168 1169 1170 1171
  const char* source = "function foo() {"
                       "  var x = 42;"
                       "  var y = 42;"
                       "  var z = x + y;"
                       "};"
                       "foo()";
1172
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1173 1174

  // This compile will add the code to the compilation cache.
1175
  { v8::HandleScope scope(CcTest::isolate());
1176 1177 1178 1179
    CompileRun(source);
  }

  // Check function is compiled.
1180
  Handle<Object> func_value =
1181
      Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1182
  CHECK(func_value->IsJSFunction());
1183
  Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1184 1185 1186
  CHECK(function->shared()->is_compiled());

  // The code will survive at least two GCs.
1187 1188
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1189 1190 1191 1192 1193 1194
  CHECK(function->shared()->is_compiled());

  // Simulate several GCs that use incremental marking.
  const int kAgingThreshold = 6;
  for (int i = 0; i < kAgingThreshold; i++) {
    SimulateIncrementalMarking();
1195
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1196 1197 1198 1199 1200
  }
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
  CHECK(!function->is_compiled() || function->IsOptimized());

  // This compile will compile the function again.
1201
  { v8::HandleScope scope(CcTest::isolate());
1202 1203 1204 1205 1206 1207 1208 1209
    CompileRun("foo();");
  }

  // Simulate several GCs that use incremental marking but make sure
  // the loop breaks once the function is enqueued as a candidate.
  for (int i = 0; i < kAgingThreshold; i++) {
    SimulateIncrementalMarking();
    if (!function->next_function_link()->IsUndefined()) break;
1210
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1211 1212 1213 1214
  }

  // Force optimization while incremental marking is active and while
  // the function is enqueued as a candidate.
1215
  { v8::HandleScope scope(CcTest::isolate());
1216 1217 1218 1219
    CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
  }

  // Simulate one final GC to make sure the candidate queue is sane.
1220
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1221 1222 1223 1224 1225
  CHECK(function->shared()->is_compiled() || !function->IsOptimized());
  CHECK(function->is_compiled() || !function->IsOptimized());
}


1226 1227
TEST(TestCodeFlushingIncrementalScavenge) {
  // If we do not flush code this test is invalid.
1228
  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1229
  i::FLAG_allow_natives_syntax = true;
1230
  i::FLAG_optimize_for_size = false;
1231
  CcTest::InitializeVM();
1232
  Isolate* isolate = CcTest::i_isolate();
1233
  Factory* factory = isolate->factory();
1234
  v8::HandleScope scope(CcTest::isolate());
1235 1236 1237 1238 1239 1240 1241 1242 1243 1244
  const char* source = "var foo = function() {"
                       "  var x = 42;"
                       "  var y = 42;"
                       "  var z = x + y;"
                       "};"
                       "foo();"
                       "var bar = function() {"
                       "  var x = 23;"
                       "};"
                       "bar();";
1245 1246
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
  Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1247 1248

  // Perfrom one initial GC to enable code flushing.
1249
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1250 1251

  // This compile will add the code to the compilation cache.
1252
  { v8::HandleScope scope(CcTest::isolate());
1253 1254 1255 1256
    CompileRun(source);
  }

  // Check functions are compiled.
1257
  Handle<Object> func_value =
1258
      Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1259
  CHECK(func_value->IsJSFunction());
1260
  Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1261
  CHECK(function->shared()->is_compiled());
1262
  Handle<Object> func_value2 =
1263
      Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1264
  CHECK(func_value2->IsJSFunction());
1265
  Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1266 1267 1268
  CHECK(function2->shared()->is_compiled());

  // Clear references to functions so that one of them can die.
1269
  { v8::HandleScope scope(CcTest::isolate());
1270 1271 1272 1273 1274 1275
    CompileRun("foo = 0; bar = 0;");
  }

  // Bump the code age so that flushing is triggered while the function
  // object is still located in new-space.
  const int kAgingThreshold = 6;
1276 1277 1278 1279
  for (int i = 0; i < kAgingThreshold; i++) {
    function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
  }
1280 1281 1282 1283 1284 1285

  // Simulate incremental marking so that the functions are enqueued as
  // code flushing candidates. Then kill one of the functions. Finally
  // perform a scavenge while incremental marking is still running.
  SimulateIncrementalMarking();
  *function2.location() = NULL;
1286
  CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1287 1288

  // Simulate one final GC to make sure the candidate queue is sane.
1289
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1290 1291 1292 1293 1294
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
  CHECK(!function->is_compiled() || function->IsOptimized());
}


1295 1296 1297 1298
TEST(TestCodeFlushingIncrementalAbort) {
  // If we do not flush code this test is invalid.
  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
  i::FLAG_allow_natives_syntax = true;
1299
  i::FLAG_optimize_for_size = false;
1300
  CcTest::InitializeVM();
1301
  Isolate* isolate = CcTest::i_isolate();
1302
  Factory* factory = isolate->factory();
1303
  Heap* heap = isolate->heap();
1304
  v8::HandleScope scope(CcTest::isolate());
1305 1306 1307 1308 1309 1310
  const char* source = "function foo() {"
                       "  var x = 42;"
                       "  var y = 42;"
                       "  var z = x + y;"
                       "};"
                       "foo()";
1311
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1312 1313

  // This compile will add the code to the compilation cache.
1314
  { v8::HandleScope scope(CcTest::isolate());
1315 1316 1317 1318
    CompileRun(source);
  }

  // Check function is compiled.
1319
  Handle<Object> func_value =
1320
      Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1321
  CHECK(func_value->IsJSFunction());
1322
  Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1323 1324 1325
  CHECK(function->shared()->is_compiled());

  // The code will survive at least two GCs.
1326 1327
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1328 1329 1330 1331
  CHECK(function->shared()->is_compiled());

  // Bump the code age so that flushing is triggered.
  const int kAgingThreshold = 6;
1332 1333 1334
  for (int i = 0; i < kAgingThreshold; i++) {
    function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
  }
1335 1336 1337 1338 1339 1340 1341 1342 1343

  // Simulate incremental marking so that the function is enqueued as
  // code flushing candidate.
  SimulateIncrementalMarking();

  // Enable the debugger and add a breakpoint while incremental marking
  // is running so that incremental marking aborts and code flushing is
  // disabled.
  int position = 0;
1344
  Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1345 1346
  isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
  isolate->debug()->ClearAllBreakPoints();
1347 1348

  // Force optimization now that code flushing is disabled.
1349
  { v8::HandleScope scope(CcTest::isolate());
1350 1351 1352 1353
    CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
  }

  // Simulate one final GC to make sure the candidate queue is sane.
1354
  heap->CollectAllGarbage(Heap::kNoGCFlags);
1355 1356 1357 1358 1359
  CHECK(function->shared()->is_compiled() || !function->IsOptimized());
  CHECK(function->is_compiled() || !function->IsOptimized());
}


1360 1361
// Count the number of native contexts in the weak list of native contexts.
int CountNativeContexts() {
1362
  int count = 0;
1363
  Object* object = CcTest::heap()->native_contexts_list();
1364 1365 1366 1367 1368 1369 1370 1371
  while (!object->IsUndefined()) {
    count++;
    object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
  }
  return count;
}


1372
// Count the number of user functions in the weak list of optimized
1373
// functions attached to a native context.
1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385
static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
  int count = 0;
  Handle<Context> icontext = v8::Utils::OpenHandle(*context);
  Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
  while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
    count++;
    object = JSFunction::cast(object)->next_function_link();
  }
  return count;
}


1386
TEST(TestInternalWeakLists) {
1387 1388
  v8::V8::Initialize();

1389 1390 1391 1392
  // Some flags turn Scavenge collections into Mark-sweep collections
  // and hence are incompatible with this test case.
  if (FLAG_gc_global || FLAG_stress_compaction) return;

1393 1394
  static const int kNumTestContexts = 10;

1395
  Isolate* isolate = CcTest::i_isolate();
1396
  Heap* heap = isolate->heap();
1397
  HandleScope scope(isolate);
1398
  v8::Handle<v8::Context> ctx[kNumTestContexts];
1399

1400
  CHECK_EQ(0, CountNativeContexts());
1401 1402 1403

  // Create a number of global contests which gets linked together.
  for (int i = 0; i < kNumTestContexts; i++) {
1404
    ctx[i] = v8::Context::New(CcTest::isolate());
1405

1406 1407 1408 1409 1410
    // Collect garbage that might have been created by one of the
    // installed extensions.
    isolate->compilation_cache()->Clear();
    heap->CollectAllGarbage(Heap::kNoGCFlags);

1411
    bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1412

1413
    CHECK_EQ(i + 1, CountNativeContexts());
1414 1415

    ctx[i]->Enter();
1416 1417 1418

    // Create a handle scope so no function objects get stuch in the outer
    // handle scope
1419
    HandleScope scope(isolate);
1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442
    const char* source = "function f1() { };"
                         "function f2() { };"
                         "function f3() { };"
                         "function f4() { };"
                         "function f5() { };";
    CompileRun(source);
    CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
    CompileRun("f1()");
    CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
    CompileRun("f2()");
    CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
    CompileRun("f3()");
    CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
    CompileRun("f4()");
    CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
    CompileRun("f5()");
    CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));

    // Remove function f1, and
    CompileRun("f1=null");

    // Scavenge treats these references as strong.
    for (int j = 0; j < 10; j++) {
1443
      CcTest::heap()->CollectGarbage(NEW_SPACE);
1444 1445 1446 1447
      CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
    }

    // Mark compact handles the weak references.
1448 1449
    isolate->compilation_cache()->Clear();
    heap->CollectAllGarbage(Heap::kNoGCFlags);
1450 1451 1452 1453 1454
    CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));

    // Get rid of f3 and f5 in the same way.
    CompileRun("f3=null");
    for (int j = 0; j < 10; j++) {
1455
      CcTest::heap()->CollectGarbage(NEW_SPACE);
1456 1457
      CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
    }
1458
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1459 1460 1461
    CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
    CompileRun("f5=null");
    for (int j = 0; j < 10; j++) {
1462
      CcTest::heap()->CollectGarbage(NEW_SPACE);
1463 1464
      CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
    }
1465
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1466 1467
    CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));

1468 1469 1470
    ctx[i]->Exit();
  }

1471
  // Force compilation cache cleanup.
1472 1473
  CcTest::heap()->NotifyContextDisposed();
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1474

1475
  // Dispose the native contexts one by one.
1476
  for (int i = 0; i < kNumTestContexts; i++) {
1477 1478
    // TODO(dcarney): is there a better way to do this?
    i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1479
    *unsafe = CcTest::heap()->undefined_value();
1480 1481 1482 1483
    ctx[i].Clear();

    // Scavenge treats these references as strong.
    for (int j = 0; j < 10; j++) {
1484
      CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1485
      CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1486 1487 1488
    }

    // Mark compact handles the weak references.
1489
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1490
    CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1491 1492
  }

1493
  CHECK_EQ(0, CountNativeContexts());
1494 1495 1496
}


1497
// Count the number of native contexts in the weak list of native contexts
1498
// causing a GC after the specified number of elements.
1499 1500
static int CountNativeContextsWithGC(Isolate* isolate, int n) {
  Heap* heap = isolate->heap();
1501
  int count = 0;
1502
  Handle<Object> object(heap->native_contexts_list(), isolate);
1503 1504
  while (!object->IsUndefined()) {
    count++;
1505
    if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1506
    object =
1507 1508
        Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
                       isolate);
1509 1510 1511 1512 1513
  }
  return count;
}


1514
// Count the number of user functions in the weak list of optimized
1515
// functions attached to a native context causing a GC after the
1516 1517 1518 1519 1520
// specified number of elements.
static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
                                             int n) {
  int count = 0;
  Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1521 1522 1523
  Isolate* isolate = icontext->GetIsolate();
  Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
                        isolate);
1524 1525 1526
  while (object->IsJSFunction() &&
         !Handle<JSFunction>::cast(object)->IsBuiltin()) {
    count++;
1527
    if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1528
    object = Handle<Object>(
1529 1530
        Object::cast(JSFunction::cast(*object)->next_function_link()),
        isolate);
1531 1532 1533 1534 1535
  }
  return count;
}


1536
TEST(TestInternalWeakListsTraverseWithGC) {
1537
  v8::V8::Initialize();
1538
  Isolate* isolate = CcTest::i_isolate();
1539

1540 1541
  static const int kNumTestContexts = 10;

1542
  HandleScope scope(isolate);
1543
  v8::Handle<v8::Context> ctx[kNumTestContexts];
1544

1545
  CHECK_EQ(0, CountNativeContexts());
1546 1547 1548 1549

  // Create an number of contexts and check the length of the weak list both
  // with and without GCs while iterating the list.
  for (int i = 0; i < kNumTestContexts; i++) {
1550
    ctx[i] = v8::Context::New(CcTest::isolate());
1551
    CHECK_EQ(i + 1, CountNativeContexts());
1552
    CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1553
  }
1554

1555
  bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583

  // Compile a number of functions the length of the weak list of optimized
  // functions both with and without GCs while iterating the list.
  ctx[0]->Enter();
  const char* source = "function f1() { };"
                       "function f2() { };"
                       "function f3() { };"
                       "function f4() { };"
                       "function f5() { };";
  CompileRun(source);
  CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
  CompileRun("f1()");
  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
  CompileRun("f2()");
  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
  CompileRun("f3()");
  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
  CompileRun("f4()");
  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
  CompileRun("f5()");
  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));

  ctx[0]->Exit();
1584
}
1585 1586


1587 1588 1589 1590 1591
TEST(TestSizeOfObjects) {
  v8::V8::Initialize();

  // Get initial heap size after several full GCs, which will stabilize
  // the heap size and return with sweeping finished completely.
1592 1593 1594 1595 1596
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1597
  MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1598 1599
  if (collector->IsConcurrentSweepingInProgress()) {
    collector->WaitUntilSweepingCompleted();
1600
  }
1601
  int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1602 1603 1604

  {
    // Allocate objects on several different old-space pages so that
1605 1606
    // concurrent sweeper threads will be busy sweeping the old space on
    // subsequent GC runs.
1607
    AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1608
    int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1609
    for (int i = 1; i <= 100; i++) {
1610
      CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1611
      CHECK_EQ(initial_size + i * filler_size,
1612
               static_cast<int>(CcTest::heap()->SizeOfObjects()));
1613 1614 1615 1616 1617
    }
  }

  // The heap size should go back to initial size after a full GC, even
  // though sweeping didn't finish yet.
1618
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1619 1620 1621

  // Normally sweeping would not be complete here, but no guarantees.

1622
  CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1623

1624
  // Waiting for sweeper threads should not change heap size.
1625 1626
  if (collector->IsConcurrentSweepingInProgress()) {
    collector->WaitUntilSweepingCompleted();
1627
  }
1628
  CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1629 1630 1631
}


1632
TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1633
  CcTest::InitializeVM();
1634
  HeapIterator iterator(CcTest::heap());
1635
  intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1636 1637 1638 1639
  intptr_t size_of_objects_2 = 0;
  for (HeapObject* obj = iterator.next();
       obj != NULL;
       obj = iterator.next()) {
1640 1641 1642
    if (!obj->IsFreeSpace()) {
      size_of_objects_2 += obj->Size();
    }
1643
  }
1644 1645 1646 1647
  // Delta must be within 5% of the larger result.
  // TODO(gc): Tighten this up by distinguishing between byte
  // arrays that are real and those that merely mark free space
  // on the heap.
1648 1649 1650 1651 1652 1653
  if (size_of_objects_1 > size_of_objects_2) {
    intptr_t delta = size_of_objects_1 - size_of_objects_2;
    PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
           "Iterator: %" V8_PTR_PREFIX "d, "
           "delta: %" V8_PTR_PREFIX "d\n",
           size_of_objects_1, size_of_objects_2, delta);
1654
    CHECK_GT(size_of_objects_1 / 20, delta);
1655 1656 1657 1658 1659 1660
  } else {
    intptr_t delta = size_of_objects_2 - size_of_objects_1;
    PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
           "Iterator: %" V8_PTR_PREFIX "d, "
           "delta: %" V8_PTR_PREFIX "d\n",
           size_of_objects_1, size_of_objects_2, delta);
1661
    CHECK_GT(size_of_objects_2 / 20, delta);
1662 1663
  }
}
1664 1665


1666 1667 1668
static void FillUpNewSpace(NewSpace* new_space) {
  // Fill up new space to the point that it is completely full. Make sure
  // that the scavenger does not undo the filling.
1669 1670 1671 1672
  Heap* heap = new_space->heap();
  Isolate* isolate = heap->isolate();
  Factory* factory = isolate->factory();
  HandleScope scope(isolate);
1673
  AlwaysAllocateScope always_allocate(isolate);
1674
  intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1675
  intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1676
  for (intptr_t i = 0; i < number_of_fillers; i++) {
1677
    CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1678 1679 1680 1681
  }
}


1682
TEST(GrowAndShrinkNewSpace) {
1683
  CcTest::InitializeVM();
1684 1685
  Heap* heap = CcTest::heap();
  NewSpace* new_space = heap->new_space();
1686

1687 1688
  if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
      heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1689 1690 1691 1692 1693 1694
    // The max size cannot exceed the reserved size, since semispaces must be
    // always within the reserved space.  We can't test new space growing and
    // shrinking if the reserved size is the same as the minimum (initial) size.
    return;
  }

1695
  // Explicitly growing should double the space capacity.
1696
  intptr_t old_capacity, new_capacity;
1697 1698 1699
  old_capacity = new_space->Capacity();
  new_space->Grow();
  new_capacity = new_space->Capacity();
1700
  CHECK(2 * old_capacity == new_capacity);
1701

1702
  old_capacity = new_space->Capacity();
1703
  FillUpNewSpace(new_space);
1704
  new_capacity = new_space->Capacity();
1705
  CHECK(old_capacity == new_capacity);
1706 1707 1708 1709 1710

  // Explicitly shrinking should not affect space capacity.
  old_capacity = new_space->Capacity();
  new_space->Shrink();
  new_capacity = new_space->Capacity();
1711
  CHECK(old_capacity == new_capacity);
1712

1713
  // Let the scavenger empty the new space.
1714
  heap->CollectGarbage(NEW_SPACE);
1715
  CHECK_LE(new_space->Size(), old_capacity);
1716 1717 1718 1719 1720

  // Explicitly shrinking should halve the space capacity.
  old_capacity = new_space->Capacity();
  new_space->Shrink();
  new_capacity = new_space->Capacity();
1721
  CHECK(old_capacity == 2 * new_capacity);
1722 1723 1724 1725 1726 1727 1728

  // Consecutive shrinking should not affect space capacity.
  old_capacity = new_space->Capacity();
  new_space->Shrink();
  new_space->Shrink();
  new_space->Shrink();
  new_capacity = new_space->Capacity();
1729
  CHECK(old_capacity == new_capacity);
1730
}
1731 1732 1733


TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1734
  CcTest::InitializeVM();
1735 1736 1737
  Heap* heap = CcTest::heap();
  if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
      heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1738 1739 1740 1741 1742 1743
    // The max size cannot exceed the reserved size, since semispaces must be
    // always within the reserved space.  We can't test new space growing and
    // shrinking if the reserved size is the same as the minimum (initial) size.
    return;
  }

1744
  v8::HandleScope scope(CcTest::isolate());
1745
  NewSpace* new_space = heap->new_space();
1746 1747 1748 1749 1750 1751
  intptr_t old_capacity, new_capacity;
  old_capacity = new_space->Capacity();
  new_space->Grow();
  new_capacity = new_space->Capacity();
  CHECK(2 * old_capacity == new_capacity);
  FillUpNewSpace(new_space);
1752
  heap->CollectAllAvailableGarbage();
1753 1754 1755
  new_capacity = new_space->Capacity();
  CHECK(old_capacity == new_capacity);
}
1756

1757 1758 1759

static int NumberOfGlobalObjects() {
  int count = 0;
1760
  HeapIterator iterator(CcTest::heap());
1761 1762 1763 1764 1765 1766 1767 1768 1769
  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
    if (obj->IsGlobalObject()) count++;
  }
  return count;
}


// Test that we don't embed maps from foreign contexts into
// optimized code.
1770
TEST(LeakNativeContextViaMap) {
1771
  i::FLAG_allow_natives_syntax = true;
1772
  v8::Isolate* isolate = CcTest::isolate();
1773
  v8::HandleScope outer_scope(isolate);
1774 1775
  v8::Persistent<v8::Context> ctx1p;
  v8::Persistent<v8::Context> ctx2p;
1776 1777
  {
    v8::HandleScope scope(isolate);
1778 1779 1780
    ctx1p.Reset(isolate, v8::Context::New(isolate));
    ctx2p.Reset(isolate, v8::Context::New(isolate));
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1781
  }
1782

1783
  CcTest::heap()->CollectAllAvailableGarbage();
1784 1785 1786
  CHECK_EQ(4, NumberOfGlobalObjects());

  {
1787
    v8::HandleScope inner_scope(isolate);
1788
    CompileRun("var v = {x: 42}");
1789 1790
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1791 1792 1793 1794 1795
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
    ctx2->Enter();
    ctx2->Global()->Set(v8_str("o"), v);
    v8::Local<v8::Value> res = CompileRun(
        "function f() { return o.x; }"
1796 1797
        "for (var i = 0; i < 10; ++i) f();"
        "%OptimizeFunctionOnNextCall(f);"
1798 1799
        "f();");
    CHECK_EQ(42, res->Int32Value());
1800
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1801
    ctx2->Exit();
1802
    v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1803
    ctx1p.Reset();
1804
    v8::V8::ContextDisposedNotification();
1805
  }
1806
  CcTest::heap()->CollectAllAvailableGarbage();
1807
  CHECK_EQ(2, NumberOfGlobalObjects());
1808
  ctx2p.Reset();
1809
  CcTest::heap()->CollectAllAvailableGarbage();
1810 1811 1812 1813 1814 1815
  CHECK_EQ(0, NumberOfGlobalObjects());
}


// Test that we don't embed functions from foreign contexts into
// optimized code.
1816
TEST(LeakNativeContextViaFunction) {
1817
  i::FLAG_allow_natives_syntax = true;
1818
  v8::Isolate* isolate = CcTest::isolate();
1819
  v8::HandleScope outer_scope(isolate);
1820 1821
  v8::Persistent<v8::Context> ctx1p;
  v8::Persistent<v8::Context> ctx2p;
1822 1823
  {
    v8::HandleScope scope(isolate);
1824 1825 1826
    ctx1p.Reset(isolate, v8::Context::New(isolate));
    ctx2p.Reset(isolate, v8::Context::New(isolate));
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1827
  }
1828

1829
  CcTest::heap()->CollectAllAvailableGarbage();
1830 1831 1832
  CHECK_EQ(4, NumberOfGlobalObjects());

  {
1833
    v8::HandleScope inner_scope(isolate);
1834
    CompileRun("var v = function() { return 42; }");
1835 1836
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1837 1838 1839 1840 1841
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
    ctx2->Enter();
    ctx2->Global()->Set(v8_str("o"), v);
    v8::Local<v8::Value> res = CompileRun(
        "function f(x) { return x(); }"
1842 1843
        "for (var i = 0; i < 10; ++i) f(o);"
        "%OptimizeFunctionOnNextCall(f);"
1844 1845
        "f(o);");
    CHECK_EQ(42, res->Int32Value());
1846
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1847 1848
    ctx2->Exit();
    ctx1->Exit();
1849
    ctx1p.Reset();
1850
    v8::V8::ContextDisposedNotification();
1851
  }
1852
  CcTest::heap()->CollectAllAvailableGarbage();
1853
  CHECK_EQ(2, NumberOfGlobalObjects());
1854
  ctx2p.Reset();
1855
  CcTest::heap()->CollectAllAvailableGarbage();
1856 1857
  CHECK_EQ(0, NumberOfGlobalObjects());
}
1858 1859


1860
TEST(LeakNativeContextViaMapKeyed) {
1861
  i::FLAG_allow_natives_syntax = true;
1862
  v8::Isolate* isolate = CcTest::isolate();
1863
  v8::HandleScope outer_scope(isolate);
1864 1865
  v8::Persistent<v8::Context> ctx1p;
  v8::Persistent<v8::Context> ctx2p;
1866 1867
  {
    v8::HandleScope scope(isolate);
1868 1869 1870
    ctx1p.Reset(isolate, v8::Context::New(isolate));
    ctx2p.Reset(isolate, v8::Context::New(isolate));
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1871
  }
1872

1873
  CcTest::heap()->CollectAllAvailableGarbage();
1874 1875 1876
  CHECK_EQ(4, NumberOfGlobalObjects());

  {
1877
    v8::HandleScope inner_scope(isolate);
1878
    CompileRun("var v = [42, 43]");
1879 1880
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1881 1882 1883 1884 1885 1886 1887 1888 1889
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
    ctx2->Enter();
    ctx2->Global()->Set(v8_str("o"), v);
    v8::Local<v8::Value> res = CompileRun(
        "function f() { return o[0]; }"
        "for (var i = 0; i < 10; ++i) f();"
        "%OptimizeFunctionOnNextCall(f);"
        "f();");
    CHECK_EQ(42, res->Int32Value());
1890
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1891 1892
    ctx2->Exit();
    ctx1->Exit();
1893
    ctx1p.Reset();
1894
    v8::V8::ContextDisposedNotification();
1895
  }
1896
  CcTest::heap()->CollectAllAvailableGarbage();
1897
  CHECK_EQ(2, NumberOfGlobalObjects());
1898
  ctx2p.Reset();
1899
  CcTest::heap()->CollectAllAvailableGarbage();
1900 1901 1902 1903
  CHECK_EQ(0, NumberOfGlobalObjects());
}


1904
TEST(LeakNativeContextViaMapProto) {
1905
  i::FLAG_allow_natives_syntax = true;
1906
  v8::Isolate* isolate = CcTest::isolate();
1907
  v8::HandleScope outer_scope(isolate);
1908 1909
  v8::Persistent<v8::Context> ctx1p;
  v8::Persistent<v8::Context> ctx2p;
1910 1911
  {
    v8::HandleScope scope(isolate);
1912 1913 1914
    ctx1p.Reset(isolate, v8::Context::New(isolate));
    ctx2p.Reset(isolate, v8::Context::New(isolate));
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1915
  }
1916

1917
  CcTest::heap()->CollectAllAvailableGarbage();
1918 1919 1920
  CHECK_EQ(4, NumberOfGlobalObjects());

  {
1921
    v8::HandleScope inner_scope(isolate);
1922
    CompileRun("var v = { y: 42}");
1923 1924
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
    ctx2->Enter();
    ctx2->Global()->Set(v8_str("o"), v);
    v8::Local<v8::Value> res = CompileRun(
        "function f() {"
        "  var p = {x: 42};"
        "  p.__proto__ = o;"
        "  return p.x;"
        "}"
        "for (var i = 0; i < 10; ++i) f();"
        "%OptimizeFunctionOnNextCall(f);"
        "f();");
    CHECK_EQ(42, res->Int32Value());
1938
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1939 1940
    ctx2->Exit();
    ctx1->Exit();
1941
    ctx1p.Reset();
1942
    v8::V8::ContextDisposedNotification();
1943
  }
1944
  CcTest::heap()->CollectAllAvailableGarbage();
1945
  CHECK_EQ(2, NumberOfGlobalObjects());
1946
  ctx2p.Reset();
1947
  CcTest::heap()->CollectAllAvailableGarbage();
1948 1949
  CHECK_EQ(0, NumberOfGlobalObjects());
}
1950 1951 1952 1953


TEST(InstanceOfStubWriteBarrier) {
  i::FLAG_allow_natives_syntax = true;
1954
#ifdef VERIFY_HEAP
1955
  i::FLAG_verify_heap = true;
1956
#endif
1957

1958
  CcTest::InitializeVM();
1959
  if (!CcTest::i_isolate()->use_crankshaft()) return;
1960
  if (i::FLAG_force_marking_deque_overflows) return;
1961
  v8::HandleScope outer_scope(CcTest::isolate());
1962 1963

  {
1964
    v8::HandleScope scope(CcTest::isolate());
1965 1966 1967 1968 1969 1970 1971 1972 1973 1974
    CompileRun(
        "function foo () { }"
        "function mkbar () { return new (new Function(\"\")) (); }"
        "function f (x) { return (x instanceof foo); }"
        "function g () { f(mkbar()); }"
        "f(new foo()); f(new foo());"
        "%OptimizeFunctionOnNextCall(f);"
        "f(new foo()); g();");
  }

1975
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
1976 1977 1978 1979 1980 1981
  marking->Abort();
  marking->Start();

  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
1982
              CcTest::global()->Get(v8_str("f"))));
1983 1984 1985 1986 1987

  CHECK(f->IsOptimized());

  while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
         !marking->IsStopped()) {
1988 1989 1990
    // Discard any pending GC requests otherwise we will get GC when we enter
    // code below.
    marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1991 1992 1993 1994 1995
  }

  CHECK(marking->IsMarking());

  {
1996
    v8::HandleScope scope(CcTest::isolate());
1997
    v8::Handle<v8::Object> global = CcTest::global();
1998 1999 2000 2001 2002
    v8::Handle<v8::Function> g =
        v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
    g->Call(global, 0, NULL);
  }

2003 2004
  CcTest::heap()->incremental_marking()->set_should_hurry(true);
  CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
2005
}
2006 2007 2008


TEST(PrototypeTransitionClearing) {
2009
  if (FLAG_never_compact) return;
2010
  CcTest::InitializeVM();
2011
  Isolate* isolate = CcTest::i_isolate();
2012
  Factory* factory = isolate->factory();
2013
  v8::HandleScope scope(CcTest::isolate());
2014

2015 2016 2017 2018 2019 2020 2021
  CompileRun("var base = {};");
  Handle<JSObject> baseObject =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Object>::Cast(
              CcTest::global()->Get(v8_str("base"))));
  int initialTransitions = baseObject->map()->NumberOfProtoTransitions();

2022 2023 2024 2025 2026 2027 2028 2029 2030
  CompileRun(
      "var live = [];"
      "for (var i = 0; i < 10; i++) {"
      "  var object = {};"
      "  var prototype = {};"
      "  object.__proto__ = prototype;"
      "  if (i >= 3) live.push(object, prototype);"
      "}");

2031
  // Verify that only dead prototype transitions are cleared.
2032 2033
  CHECK_EQ(initialTransitions + 10,
      baseObject->map()->NumberOfProtoTransitions());
2034
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2035
  const int transitions = 10 - 3;
2036 2037
  CHECK_EQ(initialTransitions + transitions,
      baseObject->map()->NumberOfProtoTransitions());
2038 2039

  // Verify that prototype transitions array was compacted.
2040
  FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
2041
  for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2042 2043 2044
    int j = Map::kProtoTransitionHeaderSize +
        i * Map::kProtoTransitionElementsPerEntry;
    CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
2045
    Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
2046
    CHECK(proto->IsJSObject());
2047
  }
2048 2049 2050

  // Make sure next prototype is placed on an old-space evacuation candidate.
  Handle<JSObject> prototype;
2051
  PagedSpace* space = CcTest::heap()->old_pointer_space();
2052
  {
2053
    AlwaysAllocateScope always_allocate(isolate);
2054
    SimulateFullSpace(space);
2055
    prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2056
  }
2057 2058 2059 2060 2061

  // Add a prototype on an evacuation candidate and verify that transition
  // clearing correctly records slots in prototype transition array.
  i::FLAG_always_compact = true;
  Handle<Map> map(baseObject->map());
2062 2063
  CHECK(!space->LastPage()->Contains(
      map->GetPrototypeTransitions()->address()));
2064
  CHECK(space->LastPage()->Contains(prototype->address()));
2065
}
2066 2067 2068


TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2069
  i::FLAG_stress_compaction = false;
2070
  i::FLAG_allow_natives_syntax = true;
2071
#ifdef VERIFY_HEAP
2072 2073
  i::FLAG_verify_heap = true;
#endif
2074

2075
  CcTest::InitializeVM();
2076
  if (!CcTest::i_isolate()->use_crankshaft()) return;
2077
  v8::HandleScope outer_scope(CcTest::isolate());
2078 2079

  {
2080
    v8::HandleScope scope(CcTest::isolate());
2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093
    CompileRun(
        "function f () {"
        "  var s = 0;"
        "  for (var i = 0; i < 100; i++)  s += i;"
        "  return s;"
        "}"
        "f(); f();"
        "%OptimizeFunctionOnNextCall(f);"
        "f();");
  }
  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
2094
              CcTest::global()->Get(v8_str("f"))));
2095 2096
  CHECK(f->IsOptimized());

2097
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2098 2099 2100
  marking->Abort();
  marking->Start();

2101
  // The following two calls will increment CcTest::heap()->global_ic_age().
2102 2103 2104 2105 2106 2107 2108
  const int kLongIdlePauseInMs = 1000;
  v8::V8::ContextDisposedNotification();
  v8::V8::IdleNotification(kLongIdlePauseInMs);

  while (!marking->IsStopped() && !marking->IsComplete()) {
    marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
  }
2109 2110 2111 2112 2113 2114
  if (!marking->IsStopped() || marking->should_hurry()) {
    // We don't normally finish a GC via Step(), we normally finish by
    // setting the stack guard and then do the final steps in the stack
    // guard interrupt.  But here we didn't ask for that, and there is no
    // JS code running to trigger the interrupt, so we explicitly finalize
    // here.
2115
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2116 2117
                            "Test finalizing incremental mark-sweep");
  }
2118

2119
  CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2120 2121 2122 2123 2124 2125
  CHECK_EQ(0, f->shared()->opt_count());
  CHECK_EQ(0, f->shared()->code()->profiler_ticks());
}


TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2126
  i::FLAG_stress_compaction = false;
2127
  i::FLAG_allow_natives_syntax = true;
2128
#ifdef VERIFY_HEAP
2129 2130
  i::FLAG_verify_heap = true;
#endif
2131

2132
  CcTest::InitializeVM();
2133
  if (!CcTest::i_isolate()->use_crankshaft()) return;
2134
  v8::HandleScope outer_scope(CcTest::isolate());
2135 2136

  {
2137
    v8::HandleScope scope(CcTest::isolate());
2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150
    CompileRun(
        "function f () {"
        "  var s = 0;"
        "  for (var i = 0; i < 100; i++)  s += i;"
        "  return s;"
        "}"
        "f(); f();"
        "%OptimizeFunctionOnNextCall(f);"
        "f();");
  }
  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
2151
              CcTest::global()->Get(v8_str("f"))));
2152 2153
  CHECK(f->IsOptimized());

2154
  CcTest::heap()->incremental_marking()->Abort();
2155

2156
  // The following two calls will increment CcTest::heap()->global_ic_age().
2157 2158 2159 2160 2161
  // Since incremental marking is off, IdleNotification will do full GC.
  const int kLongIdlePauseInMs = 1000;
  v8::V8::ContextDisposedNotification();
  v8::V8::IdleNotification(kLongIdlePauseInMs);

2162
  CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2163 2164 2165
  CHECK_EQ(0, f->shared()->opt_count());
  CHECK_EQ(0, f->shared()->code()->profiler_ticks());
}
2166 2167 2168 2169 2170


// Test that HAllocateObject will always return an object in new-space.
TEST(OptimizedAllocationAlwaysInNewSpace) {
  i::FLAG_allow_natives_syntax = true;
2171
  CcTest::InitializeVM();
2172
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2173
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2174
  v8::HandleScope scope(CcTest::isolate());
2175

2176
  SimulateFullSpace(CcTest::heap()->new_space());
2177
  AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188
  v8::Local<v8::Value> res = CompileRun(
      "function c(x) {"
      "  this.x = x;"
      "  for (var i = 0; i < 32; i++) {"
      "    this['x' + i] = x;"
      "  }"
      "}"
      "function f(x) { return new c(x); };"
      "f(1); f(2); f(3);"
      "%OptimizeFunctionOnNextCall(f);"
      "f(4);");
2189
  CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2190

2191 2192 2193
  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));

2194
  CHECK(CcTest::heap()->InNewSpace(*o));
2195 2196 2197
}


2198 2199
TEST(OptimizedPretenuringAllocationFolding) {
  i::FLAG_allow_natives_syntax = true;
2200
  i::FLAG_expose_gc = true;
2201
  CcTest::InitializeVM();
2202
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2203 2204 2205
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2206 2207 2208 2209 2210
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2211
  i::ScopedVector<char> source(1024);
2212
  i::SNPrintF(
2213 2214
      source,
      "var number_elements = %d;"
2215
      "var elements = new Array();"
2216
      "function f() {"
2217
      "  for (var i = 0; i < number_elements; i++) {"
2218
      "    elements[i] = [[{}], [1.1]];"
2219
      "  }"
2220
      "  return elements[number_elements-1]"
2221
      "};"
2222 2223
      "f(); gc();"
      "f(); f();"
2224
      "%%OptimizeFunctionOnNextCall(f);"
2225 2226
      "f();",
      AllocationSite::kPretenureMinimumCreated);
2227

2228
  v8::Local<v8::Value> res = CompileRun(source.start());
2229

2230 2231 2232 2233 2234 2235
  v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
  Handle<JSObject> int_array_handle =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
  v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
  Handle<JSObject> double_array_handle =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2236 2237 2238

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2239 2240 2241 2242 2243
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2244 2245 2246
}


2247
TEST(OptimizedPretenuringObjectArrayLiterals) {
2248
  i::FLAG_allow_natives_syntax = true;
2249
  i::FLAG_expose_gc = true;
2250
  CcTest::InitializeVM();
2251
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2252
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2253
  v8::HandleScope scope(CcTest::isolate());
2254

2255 2256 2257 2258 2259
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2260
  i::ScopedVector<char> source(1024);
2261
  i::SNPrintF(
2262 2263
      source,
      "var number_elements = %d;"
2264
      "var elements = new Array(number_elements);"
2265
      "function f() {"
2266 2267 2268 2269
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = [{}, {}, {}];"
      "  }"
      "  return elements[number_elements - 1];"
2270
      "};"
2271 2272
      "f(); gc();"
      "f(); f();"
2273
      "%%OptimizeFunctionOnNextCall(f);"
2274 2275 2276 2277
      "f();",
      AllocationSite::kPretenureMinimumCreated);

  v8::Local<v8::Value> res = CompileRun(source.start());
2278 2279 2280 2281

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));

2282 2283
  CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2284 2285 2286
}


2287
TEST(OptimizedPretenuringMixedInObjectProperties) {
2288
  i::FLAG_allow_natives_syntax = true;
2289
  i::FLAG_expose_gc = true;
2290
  CcTest::InitializeVM();
2291
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2292 2293 2294
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2295 2296 2297 2298 2299 2300
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }


2301
  i::ScopedVector<char> source(1024);
2302
  i::SNPrintF(
2303 2304
      source,
      "var number_elements = %d;"
2305
      "var elements = new Array(number_elements);"
2306
      "function f() {"
2307 2308 2309 2310
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
      "  }"
      "  return elements[number_elements - 1];"
2311
      "};"
2312 2313
      "f(); gc();"
      "f(); f();"
2314
      "%%OptimizeFunctionOnNextCall(f);"
2315 2316 2317 2318
      "f();",
      AllocationSite::kPretenureMinimumCreated);

  v8::Local<v8::Value> res = CompileRun(source.start());
2319 2320 2321 2322

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));

2323
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2324 2325 2326 2327
  FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
  FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
2328

2329 2330
  JSObject* inner_object =
      reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2331
  CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2332 2333 2334
  CHECK(CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
  CHECK(CcTest::heap()->InOldPointerSpace(
      inner_object->RawFastPropertyAt(idx2)));
2335 2336 2337 2338 2339
}


TEST(OptimizedPretenuringDoubleArrayProperties) {
  i::FLAG_allow_natives_syntax = true;
2340
  i::FLAG_expose_gc = true;
2341
  CcTest::InitializeVM();
2342
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2343 2344 2345
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2346 2347 2348 2349 2350
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2351
  i::ScopedVector<char> source(1024);
2352
  i::SNPrintF(
2353 2354
      source,
      "var number_elements = %d;"
2355
      "var elements = new Array(number_elements);"
2356
      "function f() {"
2357
      "  for (var i = 0; i < number_elements; i++) {"
2358
      "    elements[i] = {a: 1.1, b: 2.2};"
2359 2360
      "  }"
      "  return elements[i - 1];"
2361
      "};"
2362 2363
      "f(); gc();"
      "f(); f();"
2364
      "%%OptimizeFunctionOnNextCall(f);"
2365 2366 2367 2368
      "f();",
      AllocationSite::kPretenureMinimumCreated);

  v8::Local<v8::Value> res = CompileRun(source.start());
2369 2370 2371 2372

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));

2373 2374
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
  CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2375 2376 2377 2378 2379
}


TEST(OptimizedPretenuringdoubleArrayLiterals) {
  i::FLAG_allow_natives_syntax = true;
2380
  i::FLAG_expose_gc = true;
2381
  CcTest::InitializeVM();
2382
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2383 2384 2385
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2386 2387 2388 2389 2390
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2391
  i::ScopedVector<char> source(1024);
2392
  i::SNPrintF(
2393 2394
      source,
      "var number_elements = %d;"
2395
      "var elements = new Array(number_elements);"
2396
      "function f() {"
2397 2398 2399 2400
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = [1.1, 2.2, 3.3];"
      "  }"
      "  return elements[number_elements - 1];"
2401
      "};"
2402 2403
      "f(); gc();"
      "f(); f();"
2404
      "%%OptimizeFunctionOnNextCall(f);"
2405 2406 2407 2408
      "f();",
      AllocationSite::kPretenureMinimumCreated);

  v8::Local<v8::Value> res = CompileRun(source.start());
2409 2410 2411 2412

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));

2413 2414
  CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2415 2416 2417 2418 2419
}


TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
  i::FLAG_allow_natives_syntax = true;
2420
  i::FLAG_expose_gc = true;
2421
  CcTest::InitializeVM();
2422
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2423 2424 2425
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2426 2427 2428 2429 2430
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2431
  i::ScopedVector<char> source(1024);
2432
  i::SNPrintF(
2433 2434
      source,
      "var number_elements = 100;"
2435
      "var elements = new Array(number_elements);"
2436
      "function f() {"
2437 2438 2439 2440
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
      "  }"
      "  return elements[number_elements - 1];"
2441
      "};"
2442 2443
      "f(); gc();"
      "f(); f();"
2444
      "%%OptimizeFunctionOnNextCall(f);"
2445
      "f();");
2446 2447

  v8::Local<v8::Value> res = CompileRun(source.start());
2448 2449 2450 2451 2452 2453 2454 2455 2456 2457

  v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
  Handle<JSObject> int_array_handle =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
  v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
  Handle<JSObject> double_array_handle =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2458 2459 2460 2461 2462
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2463 2464 2465 2466 2467
}


TEST(OptimizedPretenuringNestedObjectLiterals) {
  i::FLAG_allow_natives_syntax = true;
2468
  i::FLAG_expose_gc = true;
2469
  CcTest::InitializeVM();
2470
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2471 2472 2473
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2474 2475 2476 2477 2478
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2479
  i::ScopedVector<char> source(1024);
2480
  i::SNPrintF(
2481 2482
      source,
      "var number_elements = %d;"
2483
      "var elements = new Array(number_elements);"
2484
      "function f() {"
2485 2486 2487 2488
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
      "  }"
      "  return elements[number_elements - 1];"
2489
      "};"
2490 2491
      "f(); gc();"
      "f(); f();"
2492
      "%%OptimizeFunctionOnNextCall(f);"
2493 2494 2495 2496
      "f();",
      AllocationSite::kPretenureMinimumCreated);

  v8::Local<v8::Value> res = CompileRun(source.start());
2497 2498 2499 2500 2501 2502 2503 2504 2505 2506

  v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
  Handle<JSObject> int_array_handle_1 =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
  v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
  Handle<JSObject> int_array_handle_2 =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2507 2508 2509 2510 2511
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2512 2513 2514 2515 2516
}


TEST(OptimizedPretenuringNestedDoubleLiterals) {
  i::FLAG_allow_natives_syntax = true;
2517
  i::FLAG_expose_gc = true;
2518
  CcTest::InitializeVM();
2519
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2520 2521 2522
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2523 2524 2525 2526 2527
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2528
  i::ScopedVector<char> source(1024);
2529
  i::SNPrintF(
2530 2531
      source,
      "var number_elements = %d;"
2532
      "var elements = new Array(number_elements);"
2533
      "function f() {"
2534 2535 2536 2537
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
      "  }"
      "  return elements[number_elements - 1];"
2538
      "};"
2539 2540
      "f(); gc();"
      "f(); f();"
2541
      "%%OptimizeFunctionOnNextCall(f);"
2542 2543 2544 2545
      "f();",
      AllocationSite::kPretenureMinimumCreated);

  v8::Local<v8::Value> res = CompileRun(source.start());
2546 2547 2548 2549 2550 2551 2552 2553 2554 2555 2556 2557

  v8::Local<v8::Value> double_array_1 =
      v8::Object::Cast(*res)->Get(v8_str("0"));
  Handle<JSObject> double_array_handle_1 =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
  v8::Local<v8::Value> double_array_2 =
      v8::Object::Cast(*res)->Get(v8_str("1"));
  Handle<JSObject> double_array_handle_2 =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2558 2559 2560 2561 2562
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2563 2564 2565
}


2566 2567 2568
// Make sure pretenuring feedback is gathered for constructed objects as well
// as for literals.
TEST(OptimizedPretenuringConstructorCalls) {
2569
  if (!i::FLAG_pretenuring_call_new) {
2570 2571 2572 2573
    // FLAG_pretenuring_call_new needs to be synced with the snapshot.
    return;
  }
  i::FLAG_allow_natives_syntax = true;
2574
  i::FLAG_expose_gc = true;
2575 2576 2577 2578 2579
  CcTest::InitializeVM();
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

2580 2581 2582 2583 2584
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2585
  i::ScopedVector<char> source(1024);
2586 2587 2588
  // Call new is doing slack tracking for the first
  // JSFunction::kGenerousAllocationCount allocations, and we can't find
  // mementos during that time.
2589
  i::SNPrintF(
2590 2591
      source,
      "var number_elements = %d;"
2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602
      "var elements = new Array(number_elements);"
      "function foo() {"
      "  this.a = 3;"
      "  this.b = {};"
      "}"
      "function f() {"
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = new foo();"
      "  }"
      "  return elements[number_elements - 1];"
      "};"
2603 2604
      "f(); gc();"
      "f(); f();"
2605
      "%%OptimizeFunctionOnNextCall(f);"
2606
      "f();",
2607 2608
      AllocationSite::kPretenureMinimumCreated +
      JSFunction::kGenerousAllocationCount);
2609 2610

  v8::Local<v8::Value> res = CompileRun(source.start());
2611 2612 2613 2614 2615 2616 2617 2618

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));

  CHECK(CcTest::heap()->InOldPointerSpace(*o));
}


2619 2620 2621 2622 2623
TEST(OptimizedPretenuringCallNew) {
  if (!i::FLAG_pretenuring_call_new) {
    // FLAG_pretenuring_call_new needs to be synced with the snapshot.
    return;
  }
2624
  i::FLAG_allow_natives_syntax = true;
2625
  i::FLAG_expose_gc = true;
2626
  CcTest::InitializeVM();
2627
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2628
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2629
  v8::HandleScope scope(CcTest::isolate());
2630

2631 2632 2633 2634 2635
  // Grow new space unitl maximum capacity reached.
  while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
    CcTest::heap()->new_space()->Grow();
  }

2636
  i::ScopedVector<char> source(1024);
2637 2638 2639
  // Call new is doing slack tracking for the first
  // JSFunction::kGenerousAllocationCount allocations, and we can't find
  // mementos during that time.
2640
  i::SNPrintF(
2641
      source,
2642
      "var number_elements = %d;"
2643 2644
      "var elements = new Array(number_elements);"
      "function g() { this.a = 0; }"
2645
      "function f() {"
2646 2647 2648 2649
      "  for (var i = 0; i < number_elements; i++) {"
      "    elements[i] = new g();"
      "  }"
      "  return elements[number_elements - 1];"
2650
      "};"
2651 2652
      "f(); gc();"
      "f(); f();"
2653
      "%%OptimizeFunctionOnNextCall(f);"
2654
      "f();",
2655 2656
      AllocationSite::kPretenureMinimumCreated +
      JSFunction::kGenerousAllocationCount);
2657 2658

  v8::Local<v8::Value> res = CompileRun(source.start());
2659

2660 2661
  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2662
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2663
}
2664 2665


2666 2667
// Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals) {
2668 2669
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
2670
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2671 2672 2673 2674 2675
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
  v8::HandleScope scope(CcTest::isolate());

  v8::Local<v8::Value> res = CompileRun(
      "function f() {"
2676 2677 2678
      "  var numbers = new Array(1, 2, 3);"
      "  numbers[0] = 3.14;"
      "  return numbers;"
2679 2680 2681 2682
      "};"
      "f(); f(); f();"
      "%OptimizeFunctionOnNextCall(f);"
      "f();");
2683 2684
  CHECK_EQ(static_cast<int>(3.14),
           v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2685 2686 2687

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2688 2689

  CHECK(CcTest::heap()->InNewSpace(o->elements()));
2690 2691 2692
}


2693
static int CountMapTransitions(Map* map) {
2694
  return map->transitions()->number_of_transitions();
2695 2696 2697 2698 2699 2700
}


// Test that map transitions are cleared and maps are collected with
// incremental marking as well.
TEST(Regress1465) {
2701
  i::FLAG_stress_compaction = false;
2702 2703
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_trace_incremental_marking = true;
2704 2705
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
2706
  static const int transitions_count = 256;
2707

2708
  CompileRun("function F() {}");
2709
  {
2710
    AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2711 2712
    for (int i = 0; i < transitions_count; i++) {
      EmbeddedVector<char, 64> buffer;
2713
      SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2714 2715
      CompileRun(buffer.start());
    }
2716
    CompileRun("var root = new F;");
2717
  }
2718

2719 2720 2721
  Handle<JSObject> root =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Object>::Cast(
2722
              CcTest::global()->Get(v8_str("root"))));
2723 2724 2725 2726

  // Count number of live transitions before marking.
  int transitions_before = CountMapTransitions(root->map());
  CompileRun("%DebugPrint(root);");
2727
  CHECK_EQ(transitions_count, transitions_before);
2728

2729
  SimulateIncrementalMarking();
2730
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2731 2732 2733 2734 2735 2736 2737

  // Count number of live transitions after marking.  Note that one transition
  // is left, because 'o' still holds an instance of one transition target.
  int transitions_after = CountMapTransitions(root->map());
  CompileRun("%DebugPrint(root);");
  CHECK_EQ(1, transitions_after);
}
2738 2739


2740 2741 2742 2743 2744
#ifdef DEBUG
static void AddTransitions(int transitions_count) {
  AlwaysAllocateScope always_allocate(CcTest::i_isolate());
  for (int i = 0; i < transitions_count; i++) {
    EmbeddedVector<char, 64> buffer;
2745
    SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777
    CompileRun(buffer.start());
  }
}


static Handle<JSObject> GetByName(const char* name) {
  return v8::Utils::OpenHandle(
      *v8::Handle<v8::Object>::Cast(
          CcTest::global()->Get(v8_str(name))));
}


static void AddPropertyTo(
    int gc_count, Handle<JSObject> object, const char* property_name) {
  Isolate* isolate = CcTest::i_isolate();
  Factory* factory = isolate->factory();
  Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
  i::FLAG_gc_interval = gc_count;
  i::FLAG_gc_global = true;
  CcTest::heap()->set_allocation_timeout(gc_count);
  JSReceiver::SetProperty(
      object, prop_name, twenty_three, NONE, SLOPPY).Check();
}


TEST(TransitionArrayShrinksDuringAllocToZero) {
  i::FLAG_stress_compaction = false;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  static const int transitions_count = 10;
2778
  CompileRun("function F() { }");
2779
  AddTransitions(transitions_count);
2780
  CompileRun("var root = new F;");
2781 2782 2783 2784 2785 2786 2787
  Handle<JSObject> root = GetByName("root");

  // Count number of live transitions before marking.
  int transitions_before = CountMapTransitions(root->map());
  CHECK_EQ(transitions_count, transitions_before);

  // Get rid of o
2788 2789
  CompileRun("o = new F;"
             "root = new F");
2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806
  root = GetByName("root");
  AddPropertyTo(2, root, "funny");

  // Count number of live transitions after marking.  Note that one transition
  // is left, because 'o' still holds an instance of one transition target.
  int transitions_after = CountMapTransitions(
      Map::cast(root->map()->GetBackPointer()));
  CHECK_EQ(1, transitions_after);
}


TEST(TransitionArrayShrinksDuringAllocToOne) {
  i::FLAG_stress_compaction = false;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  static const int transitions_count = 10;
2807
  CompileRun("function F() {}");
2808
  AddTransitions(transitions_count);
2809
  CompileRun("var root = new F;");
2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832
  Handle<JSObject> root = GetByName("root");

  // Count number of live transitions before marking.
  int transitions_before = CountMapTransitions(root->map());
  CHECK_EQ(transitions_count, transitions_before);

  root = GetByName("root");
  AddPropertyTo(2, root, "funny");

  // Count number of live transitions after marking.  Note that one transition
  // is left, because 'o' still holds an instance of one transition target.
  int transitions_after = CountMapTransitions(
      Map::cast(root->map()->GetBackPointer()));
  CHECK_EQ(2, transitions_after);
}


TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
  i::FLAG_stress_compaction = false;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  static const int transitions_count = 10;
2833
  CompileRun("function F() {}");
2834
  AddTransitions(transitions_count);
2835
  CompileRun("var root = new F;");
2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858
  Handle<JSObject> root = GetByName("root");

  // Count number of live transitions before marking.
  int transitions_before = CountMapTransitions(root->map());
  CHECK_EQ(transitions_count, transitions_before);

  root = GetByName("root");
  AddPropertyTo(0, root, "prop9");

  // Count number of live transitions after marking.  Note that one transition
  // is left, because 'o' still holds an instance of one transition target.
  int transitions_after = CountMapTransitions(
      Map::cast(root->map()->GetBackPointer()));
  CHECK_EQ(1, transitions_after);
}


TEST(TransitionArraySimpleToFull) {
  i::FLAG_stress_compaction = false;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  static const int transitions_count = 1;
2859
  CompileRun("function F() {}");
2860
  AddTransitions(transitions_count);
2861
  CompileRun("var root = new F;");
2862 2863 2864 2865 2866 2867
  Handle<JSObject> root = GetByName("root");

  // Count number of live transitions before marking.
  int transitions_before = CountMapTransitions(root->map());
  CHECK_EQ(transitions_count, transitions_before);

2868 2869
  CompileRun("o = new F;"
             "root = new F");
2870 2871 2872 2873 2874 2875 2876 2877 2878 2879 2880 2881 2882
  root = GetByName("root");
  ASSERT(root->map()->transitions()->IsSimpleTransition());
  AddPropertyTo(2, root, "happy");

  // Count number of live transitions after marking.  Note that one transition
  // is left, because 'o' still holds an instance of one transition target.
  int transitions_after = CountMapTransitions(
      Map::cast(root->map()->GetBackPointer()));
  CHECK_EQ(1, transitions_after);
}
#endif  // DEBUG


2883 2884 2885
TEST(Regress2143a) {
  i::FLAG_collect_maps = true;
  i::FLAG_incremental_marking = true;
2886 2887
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
2888 2889 2890 2891 2892 2893 2894

  // Prepare a map transition from the root object together with a yet
  // untransitioned root object.
  CompileRun("var root = new Object;"
             "root.foo = 0;"
             "root = new Object;");

2895
  SimulateIncrementalMarking();
2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906

  // Compile a StoreIC that performs the prepared map transition. This
  // will restart incremental marking and should make sure the root is
  // marked grey again.
  CompileRun("function f(o) {"
             "  o.foo = 0;"
             "}"
             "f(new Object);"
             "f(root);");

  // This bug only triggers with aggressive IC clearing.
2907
  CcTest::heap()->AgeInlineCaches();
2908 2909

  // Explicitly request GC to perform final marking step and sweeping.
2910
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2911 2912 2913 2914

  Handle<JSObject> root =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Object>::Cast(
2915
              CcTest::global()->Get(v8_str("root"))));
2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926

  // The root object should be in a sane state.
  CHECK(root->IsJSObject());
  CHECK(root->map()->IsMap());
}


TEST(Regress2143b) {
  i::FLAG_collect_maps = true;
  i::FLAG_incremental_marking = true;
  i::FLAG_allow_natives_syntax = true;
2927 2928
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
2929 2930 2931 2932 2933 2934 2935

  // Prepare a map transition from the root object together with a yet
  // untransitioned root object.
  CompileRun("var root = new Object;"
             "root.foo = 0;"
             "root = new Object;");

2936
  SimulateIncrementalMarking();
2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950

  // Compile an optimized LStoreNamedField that performs the prepared
  // map transition. This will restart incremental marking and should
  // make sure the root is marked grey again.
  CompileRun("function f(o) {"
             "  o.foo = 0;"
             "}"
             "f(new Object);"
             "f(new Object);"
             "%OptimizeFunctionOnNextCall(f);"
             "f(root);"
             "%DeoptimizeFunction(f);");

  // This bug only triggers with aggressive IC clearing.
2951
  CcTest::heap()->AgeInlineCaches();
2952 2953

  // Explicitly request GC to perform final marking step and sweeping.
2954
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2955 2956 2957 2958

  Handle<JSObject> root =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Object>::Cast(
2959
              CcTest::global()->Get(v8_str("root"))));
2960 2961 2962 2963 2964

  // The root object should be in a sane state.
  CHECK(root->IsJSObject());
  CHECK(root->map()->IsMap());
}
2965 2966 2967


TEST(ReleaseOverReservedPages) {
2968
  if (FLAG_never_compact) return;
2969
  i::FLAG_trace_gc = true;
2970 2971 2972
  // The optimizer can allocate stuff, messing up the test.
  i::FLAG_crankshaft = false;
  i::FLAG_always_opt = false;
2973
  CcTest::InitializeVM();
2974
  Isolate* isolate = CcTest::i_isolate();
2975
  Factory* factory = isolate->factory();
2976
  Heap* heap = isolate->heap();
2977
  v8::HandleScope scope(CcTest::isolate());
2978 2979 2980
  static const int number_of_test_pages = 20;

  // Prepare many pages with low live-bytes count.
2981
  PagedSpace* old_pointer_space = heap->old_pointer_space();
2982 2983
  CHECK_EQ(1, old_pointer_space->CountTotalPages());
  for (int i = 0; i < number_of_test_pages; i++) {
2984
    AlwaysAllocateScope always_allocate(isolate);
2985
    SimulateFullSpace(old_pointer_space);
2986
    factory->NewFixedArray(1, TENURED);
2987 2988 2989 2990 2991
  }
  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());

  // Triggering one GC will cause a lot of garbage to be discovered but
  // even spread across all allocated pages.
2992
  heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
2993
  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2994 2995 2996

  // Triggering subsequent GCs should cause at least half of the pages
  // to be released to the OS after at most two cycles.
2997
  heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
2998
  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2999
  heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
3000 3001
  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);

3002 3003 3004 3005 3006 3007 3008
  // Triggering a last-resort GC should cause all pages to be released to the
  // OS so that other processes can seize the memory.  If we get a failure here
  // where there are 2 pages left instead of 1, then we should increase the
  // size of the first page a little in SizeOfFirstPage in spaces.cc.  The
  // first page should be small in order to reduce memory used when the VM
  // boots, but if the 20 small arrays don't fit on the first page then that's
  // an indication that it is too small.
3009
  heap->CollectAllAvailableGarbage("triggered really hard");
3010 3011
  CHECK_EQ(1, old_pointer_space->CountTotalPages());
}
3012 3013 3014


TEST(Regress2237) {
3015
  i::FLAG_stress_compaction = false;
3016
  CcTest::InitializeVM();
3017
  Isolate* isolate = CcTest::i_isolate();
3018
  Factory* factory = isolate->factory();
3019
  v8::HandleScope scope(CcTest::isolate());
3020
  Handle<String> slice(CcTest::heap()->empty_string());
3021 3022 3023

  {
    // Generate a parent that lives in new-space.
3024
    v8::HandleScope inner_scope(CcTest::isolate());
3025
    const char* c = "This text is long enough to trigger sliced strings.";
3026
    Handle<String> s = factory->NewStringFromAsciiChecked(c);
3027
    CHECK(s->IsSeqOneByteString());
3028
    CHECK(CcTest::heap()->InNewSpace(*s));
3029 3030 3031

    // Generate a sliced string that is based on the above parent and
    // lives in old-space.
3032
    SimulateFullSpace(CcTest::heap()->new_space());
3033
    AlwaysAllocateScope always_allocate(isolate);
3034
    Handle<String> t = factory->NewProperSubString(s, 5, 35);
3035
    CHECK(t->IsSlicedString());
3036
    CHECK(!CcTest::heap()->InNewSpace(*t));
3037 3038 3039
    *slice.location() = *t.location();
  }

3040
  CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3041
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3042
  CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3043
}
3044 3045 3046 3047


#ifdef OBJECT_PRINT
TEST(PrintSharedFunctionInfo) {
3048 3049
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
3050 3051 3052 3053 3054 3055
  const char* source = "f = function() { return 987654321; }\n"
                       "g = function() { return 123456789; }\n";
  CompileRun(source);
  Handle<JSFunction> g =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
3056
              CcTest::global()->Get(v8_str("g"))));
3057

3058 3059 3060
  OFStream os(stdout);
  g->shared()->Print(os);
  os << endl;
3061 3062
}
#endif  // OBJECT_PRINT
3063 3064 3065


TEST(Regress2211) {
3066 3067
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
3068 3069 3070

  v8::Handle<v8::String> value = v8_str("val string");
  Smi* hash = Smi::FromInt(321);
3071
  Factory* factory = CcTest::i_isolate()->factory();
3072 3073 3074

  for (int i = 0; i < 2; i++) {
    // Store identity hash first and common hidden property second.
3075
    v8::Handle<v8::Object> obj = v8::Object::New(CcTest::isolate());
3076 3077 3078 3079 3080 3081
    Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
    CHECK(internal_obj->HasFastProperties());

    // In the first iteration, set hidden value first and identity hash second.
    // In the second iteration, reverse the order.
    if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
3082
    JSObject::SetIdentityHash(internal_obj, handle(hash, CcTest::i_isolate()));
3083 3084 3085 3086
    if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);

    // Check values.
    CHECK_EQ(hash,
3087
             internal_obj->GetHiddenProperty(factory->identity_hash_string()));
3088 3089 3090
    CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));

    // Check size.
3091
    FieldIndex index = FieldIndex::ForDescriptor(internal_obj->map(), 0);
3092
    ObjectHashTable* hashtable = ObjectHashTable::cast(
3093
        internal_obj->RawFastPropertyAt(index));
3094 3095
    // HashTable header (5) and 4 initial entries (8).
    CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
3096 3097
  }
}
3098 3099


3100
TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
3101
  if (i::FLAG_always_opt) return;
3102 3103
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118
  v8::Local<v8::Value> fun1, fun2;

  {
    LocalContext env;
    CompileRun("function fun() {};");
    fun1 = env->Global()->Get(v8_str("fun"));
  }

  {
    LocalContext env;
    CompileRun("function fun() {};");
    fun2 = env->Global()->Get(v8_str("fun"));
  }

  // Prepare function f that contains type feedback for closures
3119
  // originating from two different native contexts.
3120 3121
  CcTest::global()->Set(v8_str("fun1"), fun1);
  CcTest::global()->Set(v8_str("fun2"), fun2);
3122
  CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3123

3124 3125 3126
  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
3127
              CcTest::global()->Get(v8_str("f"))));
3128

3129
  Handle<FixedArray> feedback_vector(f->shared()->feedback_vector());
3130 3131 3132 3133

  CHECK_EQ(2, feedback_vector->length());
  CHECK(feedback_vector->get(0)->IsJSFunction());
  CHECK(feedback_vector->get(1)->IsJSFunction());
3134

3135
  SimulateIncrementalMarking();
3136
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3137

3138
  CHECK_EQ(2, feedback_vector->length());
3139 3140 3141 3142
  CHECK_EQ(feedback_vector->get(0),
           *TypeFeedbackInfo::UninitializedSentinel(CcTest::i_isolate()));
  CHECK_EQ(feedback_vector->get(1),
           *TypeFeedbackInfo::UninitializedSentinel(CcTest::i_isolate()));
3143
}
3144 3145 3146 3147 3148


static Code* FindFirstIC(Code* code, Code::Kind kind) {
  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
             RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3149
             RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160
  for (RelocIterator it(code, mask); !it.done(); it.next()) {
    RelocInfo* info = it.rinfo();
    Code* target = Code::GetCodeFromTargetAddress(info->target_address());
    if (target->is_inline_cache_stub() && target->kind() == kind) {
      return target;
    }
  }
  return NULL;
}


3161
TEST(IncrementalMarkingPreservesMonomorphicIC) {
3162
  if (i::FLAG_always_opt) return;
3163 3164
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
3165 3166

  // Prepare function f that contains a monomorphic IC for object
3167
  // originating from the same native context.
3168 3169 3170 3171 3172
  CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
             "function f(o) { return o.x; } f(obj); f(obj);");
  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
3173
              CcTest::global()->Get(v8_str("f"))));
3174 3175 3176 3177

  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
  CHECK(ic_before->ic_state() == MONOMORPHIC);

3178
  SimulateIncrementalMarking();
3179
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3180 3181 3182 3183 3184 3185

  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
  CHECK(ic_after->ic_state() == MONOMORPHIC);
}


3186
TEST(IncrementalMarkingClearsMonomorphicIC) {
3187
  if (i::FLAG_always_opt) return;
3188 3189
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
3190 3191 3192 3193 3194 3195 3196 3197 3198
  v8::Local<v8::Value> obj1;

  {
    LocalContext env;
    CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
    obj1 = env->Global()->Get(v8_str("obj"));
  }

  // Prepare function f that contains a monomorphic IC for object
3199
  // originating from a different native context.
3200
  CcTest::global()->Set(v8_str("obj1"), obj1);
3201 3202 3203 3204
  CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
3205
              CcTest::global()->Get(v8_str("f"))));
3206 3207 3208 3209 3210 3211

  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
  CHECK(ic_before->ic_state() == MONOMORPHIC);

  // Fire context dispose notification.
  v8::V8::ContextDisposedNotification();
3212
  SimulateIncrementalMarking();
3213
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3214 3215

  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3216
  CHECK(IC::IsCleared(ic_after));
3217 3218 3219
}


3220
TEST(IncrementalMarkingClearsPolymorphicIC) {
3221
  if (i::FLAG_always_opt) return;
3222 3223
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
3224 3225 3226 3227 3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238
  v8::Local<v8::Value> obj1, obj2;

  {
    LocalContext env;
    CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
    obj1 = env->Global()->Get(v8_str("obj"));
  }

  {
    LocalContext env;
    CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
    obj2 = env->Global()->Get(v8_str("obj"));
  }

  // Prepare function f that contains a polymorphic IC for objects
3239
  // originating from two different native contexts.
3240 3241
  CcTest::global()->Set(v8_str("obj1"), obj1);
  CcTest::global()->Set(v8_str("obj2"), obj2);
3242 3243 3244 3245
  CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
  Handle<JSFunction> f =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
3246
              CcTest::global()->Get(v8_str("f"))));
3247 3248

  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3249
  CHECK(ic_before->ic_state() == POLYMORPHIC);
3250 3251 3252

  // Fire context dispose notification.
  v8::V8::ContextDisposedNotification();
3253
  SimulateIncrementalMarking();
3254
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3255 3256

  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3257
  CHECK(IC::IsCleared(ic_after));
3258
}
3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277 3278 3279 3280 3281 3282


class SourceResource: public v8::String::ExternalAsciiStringResource {
 public:
  explicit SourceResource(const char* data)
    : data_(data), length_(strlen(data)) { }

  virtual void Dispose() {
    i::DeleteArray(data_);
    data_ = NULL;
  }

  const char* data() const { return data_; }

  size_t length() const { return length_; }

  bool IsDisposed() { return data_ == NULL; }

 private:
  const char* data_;
  size_t length_;
};


3283
void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
3284 3285 3286 3287
  // Test that the data retained by the Error.stack accessor is released
  // after the first time the accessor is fired.  We use external string
  // to check whether the data is being released since the external string
  // resource's callback is fired when the external string is GC'ed.
3288
  v8::HandleScope scope(CcTest::isolate());
3289 3290
  SourceResource* resource = new SourceResource(i::StrDup(source));
  {
3291
    v8::HandleScope scope(CcTest::isolate());
3292 3293
    v8::Handle<v8::String> source_string =
        v8::String::NewExternal(CcTest::isolate(), resource);
3294
    CcTest::heap()->CollectAllAvailableGarbage();
3295 3296 3297
    v8::Script::Compile(source_string)->Run();
    CHECK(!resource->IsDisposed());
  }
3298
  // CcTest::heap()->CollectAllAvailableGarbage();
3299 3300 3301
  CHECK(!resource->IsDisposed());

  CompileRun(accessor);
3302
  CcTest::heap()->CollectAllAvailableGarbage();
3303 3304 3305 3306 3307

  // External source has been released.
  CHECK(resource->IsDisposed());
  delete resource;
}
3308 3309


3310
TEST(ReleaseStackTraceData) {
3311 3312 3313 3314 3315
  if (i::FLAG_always_opt) {
    // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
    // See: https://codereview.chromium.org/181833004/
    return;
  }
3316 3317
  FLAG_use_ic = false;  // ICs retain objects.
  FLAG_concurrent_recompilation = false;
3318
  CcTest::InitializeVM();
3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330
  static const char* source1 = "var error = null;            "
  /* Normal Error */           "try {                        "
                               "  throw new Error();         "
                               "} catch (e) {                "
                               "  error = e;                 "
                               "}                            ";
  static const char* source2 = "var error = null;            "
  /* Stack overflow */         "try {                        "
                               "  (function f() { f(); })(); "
                               "} catch (e) {                "
                               "  error = e;                 "
                               "}                            ";
3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357
  static const char* source3 = "var error = null;            "
  /* Normal Error */           "try {                        "
  /* as prototype */           "  throw new Error();         "
                               "} catch (e) {                "
                               "  error = {};                "
                               "  error.__proto__ = e;       "
                               "}                            ";
  static const char* source4 = "var error = null;            "
  /* Stack overflow */         "try {                        "
  /* as prototype   */         "  (function f() { f(); })(); "
                               "} catch (e) {                "
                               "  error = {};                "
                               "  error.__proto__ = e;       "
                               "}                            ";
  static const char* getter = "error.stack";
  static const char* setter = "error.stack = 0";

  ReleaseStackTraceDataTest(source1, setter);
  ReleaseStackTraceDataTest(source2, setter);
  // We do not test source3 and source4 with setter, since the setter is
  // supposed to (untypically) write to the receiver, not the holder.  This is
  // to emulate the behavior of a data property.

  ReleaseStackTraceDataTest(source1, getter);
  ReleaseStackTraceDataTest(source2, getter);
  ReleaseStackTraceDataTest(source3, getter);
  ReleaseStackTraceDataTest(source4, getter);
3358 3359 3360
}


3361 3362 3363
TEST(Regress159140) {
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_flush_code_incrementally = true;
3364
  CcTest::InitializeVM();
3365
  Isolate* isolate = CcTest::i_isolate();
3366
  Heap* heap = isolate->heap();
3367
  HandleScope scope(isolate);
3368 3369

  // Perform one initial GC to enable code flushing.
3370
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3371 3372 3373 3374 3375 3376 3377

  // Prepare several closures that are all eligible for code flushing
  // because all reachable ones are not optimized. Make sure that the
  // optimized code object is directly reachable through a handle so
  // that it is marked black during incremental marking.
  Handle<Code> code;
  {
3378
    HandleScope inner_scope(isolate);
3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393
    CompileRun("function h(x) {}"
               "function mkClosure() {"
               "  return function(x) { return x + 1; };"
               "}"
               "var f = mkClosure();"
               "var g = mkClosure();"
               "f(1); f(2);"
               "g(1); g(2);"
               "h(1); h(2);"
               "%OptimizeFunctionOnNextCall(f); f(3);"
               "%OptimizeFunctionOnNextCall(h); h(3);");

    Handle<JSFunction> f =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3394
                CcTest::global()->Get(v8_str("f"))));
3395 3396 3397 3398 3399 3400
    CHECK(f->is_compiled());
    CompileRun("f = null;");

    Handle<JSFunction> g =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3401
                CcTest::global()->Get(v8_str("g"))));
3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415
    CHECK(g->is_compiled());
    const int kAgingThreshold = 6;
    for (int i = 0; i < kAgingThreshold; i++) {
      g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    }

    code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
  }

  // Simulate incremental marking so that the functions are enqueued as
  // code flushing candidates. Then optimize one function. Finally
  // finish the GC to complete code flushing.
  SimulateIncrementalMarking();
  CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3416
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3417 3418 3419 3420

  // Unoptimized code is missing and the deoptimizer will go ballistic.
  CompileRun("g('bozo');");
}
3421 3422 3423 3424 3425


TEST(Regress165495) {
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_flush_code_incrementally = true;
3426
  CcTest::InitializeVM();
3427
  Isolate* isolate = CcTest::i_isolate();
3428
  Heap* heap = isolate->heap();
3429
  HandleScope scope(isolate);
3430 3431

  // Perform one initial GC to enable code flushing.
3432
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3433 3434 3435 3436 3437

  // Prepare an optimized closure that the optimized code map will get
  // populated. Then age the unoptimized code to trigger code flushing
  // but make sure the optimized code is unreachable.
  {
3438
    HandleScope inner_scope(isolate);
3439 3440 3441 3442 3443 3444 3445 3446 3447 3448
    CompileRun("function mkClosure() {"
               "  return function(x) { return x + 1; };"
               "}"
               "var f = mkClosure();"
               "f(1); f(2);"
               "%OptimizeFunctionOnNextCall(f); f(3);");

    Handle<JSFunction> f =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3449
                CcTest::global()->Get(v8_str("f"))));
3450 3451 3452 3453 3454 3455 3456 3457 3458 3459 3460 3461
    CHECK(f->is_compiled());
    const int kAgingThreshold = 6;
    for (int i = 0; i < kAgingThreshold; i++) {
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    }

    CompileRun("f = null;");
  }

  // Simulate incremental marking so that unoptimized code is flushed
  // even though it still is cached in the optimized code map.
  SimulateIncrementalMarking();
3462
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3463 3464 3465 3466 3467

  // Make a new closure that will get code installed from the code map.
  // Unoptimized code is missing and the deoptimizer will go ballistic.
  CompileRun("var g = mkClosure(); g('bozo');");
}
3468 3469 3470


TEST(Regress169209) {
3471
  i::FLAG_stress_compaction = false;
3472 3473
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_flush_code_incrementally = true;
3474

3475
  CcTest::InitializeVM();
3476
  Isolate* isolate = CcTest::i_isolate();
3477
  Heap* heap = isolate->heap();
3478
  HandleScope scope(isolate);
3479 3480

  // Perform one initial GC to enable code flushing.
3481
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3482 3483 3484 3485 3486

  // Prepare a shared function info eligible for code flushing for which
  // the unoptimized code will be replaced during optimization.
  Handle<SharedFunctionInfo> shared1;
  {
3487
    HandleScope inner_scope(isolate);
3488 3489 3490 3491 3492 3493 3494 3495 3496
    CompileRun("function f() { return 'foobar'; }"
               "function g(x) { if (x) f(); }"
               "f();"
               "g(false);"
               "g(false);");

    Handle<JSFunction> f =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3497
                CcTest::global()->Get(v8_str("f"))));
3498 3499 3500 3501 3502 3503
    CHECK(f->is_compiled());
    const int kAgingThreshold = 6;
    for (int i = 0; i < kAgingThreshold; i++) {
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    }

3504
    shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3505 3506 3507 3508 3509 3510
  }

  // Prepare a shared function info eligible for code flushing that will
  // represent the dangling tail of the candidate list.
  Handle<SharedFunctionInfo> shared2;
  {
3511
    HandleScope inner_scope(isolate);
3512 3513 3514 3515 3516 3517
    CompileRun("function flushMe() { return 0; }"
               "flushMe(1);");

    Handle<JSFunction> f =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3518
                CcTest::global()->Get(v8_str("flushMe"))));
3519 3520 3521 3522 3523 3524
    CHECK(f->is_compiled());
    const int kAgingThreshold = 6;
    for (int i = 0; i < kAgingThreshold; i++) {
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    }

3525
    shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3526 3527 3528 3529 3530 3531 3532 3533 3534 3535 3536 3537 3538 3539
  }

  // Simulate incremental marking and collect code flushing candidates.
  SimulateIncrementalMarking();
  CHECK(shared1->code()->gc_metadata() != NULL);

  // Optimize function and make sure the unoptimized code is replaced.
#ifdef DEBUG
  FLAG_stop_at = "f";
#endif
  CompileRun("%OptimizeFunctionOnNextCall(g);"
             "g(false);");

  // Finish garbage collection cycle.
3540
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3541 3542
  CHECK(shared1->code()->gc_metadata() == NULL);
}
3543 3544 3545 3546 3547 3548 3549 3550 3551


// Helper function that simulates a fill new-space in the heap.
static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
                                        int extra_bytes) {
  int space_remaining = static_cast<int>(
      *space->allocation_limit_address() - *space->allocation_top_address());
  CHECK(space_remaining >= extra_bytes);
  int new_linear_size = space_remaining - extra_bytes;
3552 3553 3554 3555
  v8::internal::AllocationResult allocation =
      space->AllocateRaw(new_linear_size);
  v8::internal::FreeListNode* node =
      v8::internal::FreeListNode::cast(allocation.ToObjectChecked());
3556 3557 3558 3559 3560 3561 3562
  node->set_size(space->heap(), new_linear_size);
}


TEST(Regress169928) {
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_crankshaft = false;
3563
  CcTest::InitializeVM();
3564
  Isolate* isolate = CcTest::i_isolate();
3565
  Factory* factory = isolate->factory();
3566
  v8::HandleScope scope(CcTest::isolate());
3567 3568 3569 3570 3571 3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589

  // Some flags turn Scavenge collections into Mark-sweep collections
  // and hence are incompatible with this test case.
  if (FLAG_gc_global || FLAG_stress_compaction) return;

  // Prepare the environment
  CompileRun("function fastliteralcase(literal, value) {"
             "    literal[0] = value;"
             "    return literal;"
             "}"
             "function get_standard_literal() {"
             "    var literal = [1, 2, 3];"
             "    return literal;"
             "}"
             "obj = fastliteralcase(get_standard_literal(), 1);"
             "obj = fastliteralcase(get_standard_literal(), 1.5);"
             "obj = fastliteralcase(get_standard_literal(), 2);");

  // prepare the heap
  v8::Local<v8::String> mote_code_string =
      v8_str("fastliteralcase(mote, 2.5);");

  v8::Local<v8::String> array_name = v8_str("mote");
3590
  CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
3591 3592

  // First make sure we flip spaces
3593
  CcTest::heap()->CollectGarbage(NEW_SPACE);
3594 3595

  // Allocate the object.
3596
  Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3597 3598 3599
  array_data->set(0, Smi::FromInt(1));
  array_data->set(1, Smi::FromInt(2));

3600
  AllocateAllButNBytes(CcTest::heap()->new_space(),
3601
                       JSArray::kSize + AllocationMemento::kSize +
3602 3603
                       kPointerSize);

3604
  Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3605 3606 3607 3608 3609 3610
                                                          FAST_SMI_ELEMENTS,
                                                          NOT_TENURED);

  CHECK_EQ(Smi::FromInt(2), array->length());
  CHECK(array->HasFastSmiOrObjectElements());

3611
  // We need filler the size of AllocationMemento object, plus an extra
3612
  // fill pointer value.
3613 3614
  HeapObject* obj = NULL;
  AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw(
3615
      AllocationMemento::kSize + kPointerSize);
3616 3617 3618 3619
  CHECK(allocation.To(&obj));
  Address addr_obj = obj->address();
  CcTest::heap()->CreateFillerObjectAt(
      addr_obj, AllocationMemento::kSize + kPointerSize);
3620 3621 3622

  // Give the array a name, making sure not to allocate strings.
  v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3623
  CcTest::global()->Set(array_name, array_obj);
3624 3625 3626

  // This should crash with a protection violation if we are running a build
  // with the bug.
3627
  AlwaysAllocateScope aa_scope(isolate);
3628 3629
  v8::Script::Compile(mote_code_string)->Run();
}
3630 3631 3632


TEST(Regress168801) {
3633
  if (i::FLAG_never_compact) return;
3634 3635 3636 3637
  i::FLAG_always_compact = true;
  i::FLAG_cache_optimized_code = false;
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_flush_code_incrementally = true;
3638
  CcTest::InitializeVM();
3639
  Isolate* isolate = CcTest::i_isolate();
3640
  Heap* heap = isolate->heap();
3641
  HandleScope scope(isolate);
3642 3643

  // Perform one initial GC to enable code flushing.
3644
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3645 3646

  // Ensure the code ends up on an evacuation candidate.
3647
  SimulateFullSpace(heap->code_space());
3648 3649 3650 3651

  // Prepare an unoptimized function that is eligible for code flushing.
  Handle<JSFunction> function;
  {
3652
    HandleScope inner_scope(isolate);
3653 3654 3655 3656 3657 3658 3659 3660 3661
    CompileRun("function mkClosure() {"
               "  return function(x) { return x + 1; };"
               "}"
               "var f = mkClosure();"
               "f(1); f(2);");

    Handle<JSFunction> f =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3662
                CcTest::global()->Get(v8_str("f"))));
3663 3664 3665 3666 3667 3668
    CHECK(f->is_compiled());
    const int kAgingThreshold = 6;
    for (int i = 0; i < kAgingThreshold; i++) {
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    }

3669
    function = inner_scope.CloseAndEscape(handle(*f, isolate));
3670 3671 3672 3673 3674 3675 3676 3677 3678
  }

  // Simulate incremental marking so that unoptimized function is enqueued as a
  // candidate for code flushing. The shared function info however will not be
  // explicitly enqueued.
  SimulateIncrementalMarking();

  // Now optimize the function so that it is taken off the candidate list.
  {
3679
    HandleScope inner_scope(isolate);
3680 3681 3682 3683
    CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
  }

  // This cycle will bust the heap and subsequent cycles will go ballistic.
3684 3685
  heap->CollectAllGarbage(Heap::kNoGCFlags);
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3686
}
3687 3688 3689


TEST(Regress173458) {
3690
  if (i::FLAG_never_compact) return;
3691 3692 3693 3694
  i::FLAG_always_compact = true;
  i::FLAG_cache_optimized_code = false;
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_flush_code_incrementally = true;
3695
  CcTest::InitializeVM();
3696
  Isolate* isolate = CcTest::i_isolate();
3697
  Heap* heap = isolate->heap();
3698
  HandleScope scope(isolate);
3699 3700

  // Perform one initial GC to enable code flushing.
3701
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3702 3703

  // Ensure the code ends up on an evacuation candidate.
3704
  SimulateFullSpace(heap->code_space());
3705 3706 3707 3708

  // Prepare an unoptimized function that is eligible for code flushing.
  Handle<JSFunction> function;
  {
3709
    HandleScope inner_scope(isolate);
3710 3711 3712 3713 3714 3715 3716 3717 3718
    CompileRun("function mkClosure() {"
               "  return function(x) { return x + 1; };"
               "}"
               "var f = mkClosure();"
               "f(1); f(2);");

    Handle<JSFunction> f =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
3719
                CcTest::global()->Get(v8_str("f"))));
3720 3721 3722 3723 3724 3725
    CHECK(f->is_compiled());
    const int kAgingThreshold = 6;
    for (int i = 0; i < kAgingThreshold; i++) {
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
    }

3726
    function = inner_scope.CloseAndEscape(handle(*f, isolate));
3727 3728 3729 3730 3731 3732 3733 3734
  }

  // Simulate incremental marking so that unoptimized function is enqueued as a
  // candidate for code flushing. The shared function info however will not be
  // explicitly enqueued.
  SimulateIncrementalMarking();

  // Now enable the debugger which in turn will disable code flushing.
3735
  CHECK(isolate->debug()->Load());
3736 3737

  // This cycle will bust the heap and subsequent cycles will go ballistic.
3738 3739
  heap->CollectAllGarbage(Heap::kNoGCFlags);
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3740
}
3741 3742 3743 3744 3745 3746 3747 3748 3749


class DummyVisitor : public ObjectVisitor {
 public:
  void VisitPointers(Object** start, Object** end) { }
};


TEST(DeferredHandles) {
3750
  CcTest::InitializeVM();
3751
  Isolate* isolate = CcTest::i_isolate();
3752
  Heap* heap = isolate->heap();
3753
  v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3754
  HandleScopeData* data = isolate->handle_scope_data();
3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765
  Handle<Object> init(heap->empty_string(), isolate);
  while (data->next < data->limit) {
    Handle<Object> obj(heap->empty_string(), isolate);
  }
  // An entire block of handles has been filled.
  // Next handle would require a new block.
  ASSERT(data->next == data->limit);

  DeferredHandleScope deferred(isolate);
  DummyVisitor visitor;
  isolate->handle_scope_implementer()->Iterate(&visitor);
3766
  delete deferred.Detach();
3767
}
3768 3769 3770 3771 3772 3773 3774 3775 3776 3777


TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  CompileRun("function f(n) {"
             "    var a = new Array(n);"
             "    for (var i = 0; i < n; i += 100) a[i] = i;"
             "};"
             "f(10 * 1024 * 1024);");
3778
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3779 3780 3781 3782 3783
  if (marking->IsStopped()) marking->Start();
  // This big step should be sufficient to mark the whole array.
  marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
  ASSERT(marking->IsComplete());
}
3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811 3812


TEST(DisableInlineAllocation) {
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  CompileRun("function test() {"
             "  var x = [];"
             "  for (var i = 0; i < 10; i++) {"
             "    x[i] = [ {}, [1,2,3], [1,x,3] ];"
             "  }"
             "}"
             "function run() {"
             "  %OptimizeFunctionOnNextCall(test);"
             "  test();"
             "  %DeoptimizeFunction(test);"
             "}");

  // Warm-up with inline allocation enabled.
  CompileRun("test(); test(); run();");

  // Run test with inline allocation disabled.
  CcTest::heap()->DisableInlineAllocation();
  CompileRun("run()");

  // Run test with inline allocation re-enabled.
  CcTest::heap()->EnableInlineAllocation();
  CompileRun("run()");
}
3813 3814 3815 3816 3817 3818 3819 3820 3821 3822 3823 3824 3825 3826 3827 3828 3829 3830 3831 3832 3833


static int AllocationSitesCount(Heap* heap) {
  int count = 0;
  for (Object* site = heap->allocation_sites_list();
       !(site->IsUndefined());
       site = AllocationSite::cast(site)->weak_next()) {
    count++;
  }
  return count;
}


TEST(EnsureAllocationSiteDependentCodesProcessed) {
  if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  v8::internal::Heap* heap = CcTest::heap();
  GlobalHandles* global_handles = isolate->global_handles();

3834 3835
  if (!isolate->use_crankshaft()) return;

3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849
  // The allocation site at the head of the list is ours.
  Handle<AllocationSite> site;
  {
    LocalContext context;
    v8::HandleScope scope(context->GetIsolate());

    int count = AllocationSitesCount(heap);
    CompileRun("var bar = function() { return (new Array()); };"
               "var a = bar();"
               "bar();"
               "bar();");

    // One allocation site should have been created.
    int new_count = AllocationSitesCount(heap);
3850
    CHECK_EQ(new_count, (count + 1));
3851 3852 3853 3854 3855 3856 3857
    site = Handle<AllocationSite>::cast(
        global_handles->Create(
            AllocationSite::cast(heap->allocation_sites_list())));

    CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");

    DependentCode::GroupStartIndexes starts(site->dependent_code());
3858
    CHECK_GE(starts.number_of_entries(), 1);
3859
    int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
3860
    CHECK(site->dependent_code()->is_code_at(index));
3861 3862 3863 3864 3865
    Code* function_bar = site->dependent_code()->code_at(index);
    Handle<JSFunction> bar_handle =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
                CcTest::global()->Get(v8_str("bar"))));
3866
    CHECK_EQ(bar_handle->code(), function_bar);
3867 3868 3869 3870 3871
  }

  // Now make sure that a gc should get rid of the function, even though we
  // still have the allocation site alive.
  for (int i = 0; i < 4; i++) {
3872
    heap->CollectAllGarbage(Heap::kNoGCFlags);
3873 3874 3875 3876 3877 3878
  }

  // The site still exists because of our global handle, but the code is no
  // longer referred to by dependent_code().
  DependentCode::GroupStartIndexes starts(site->dependent_code());
  int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
3879
  CHECK(!(site->dependent_code()->is_code_at(index)));
3880
}
3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932 3933 3934 3935 3936 3937 3938 3939 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964


TEST(CellsInOptimizedCodeAreWeak) {
  if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
  i::FLAG_weak_embedded_objects_in_optimized_code = true;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  v8::internal::Heap* heap = CcTest::heap();

  if (!isolate->use_crankshaft()) return;
  HandleScope outer_scope(heap->isolate());
  Handle<Code> code;
  {
    LocalContext context;
    HandleScope scope(heap->isolate());

    CompileRun("bar = (function() {"
               "  function bar() {"
               "    return foo(1);"
               "  };"
               "  var foo = function(x) { with (x) { return 1 + x; } };"
               "  bar(foo);"
               "  bar(foo);"
               "  bar(foo);"
               "  %OptimizeFunctionOnNextCall(bar);"
               "  bar(foo);"
               "  return bar;})();");

    Handle<JSFunction> bar =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
                CcTest::global()->Get(v8_str("bar"))));
    code = scope.CloseAndEscape(Handle<Code>(bar->code()));
  }

  // Now make sure that a gc should get rid of the function
  for (int i = 0; i < 4; i++) {
    heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  }

  ASSERT(code->marked_for_deoptimization());
}


TEST(ObjectsInOptimizedCodeAreWeak) {
  if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
  i::FLAG_weak_embedded_objects_in_optimized_code = true;
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  v8::internal::Heap* heap = CcTest::heap();

  if (!isolate->use_crankshaft()) return;
  HandleScope outer_scope(heap->isolate());
  Handle<Code> code;
  {
    LocalContext context;
    HandleScope scope(heap->isolate());

    CompileRun("function bar() {"
               "  return foo(1);"
               "};"
               "function foo(x) { with (x) { return 1 + x; } };"
               "bar();"
               "bar();"
               "bar();"
               "%OptimizeFunctionOnNextCall(bar);"
               "bar();");

    Handle<JSFunction> bar =
        v8::Utils::OpenHandle(
            *v8::Handle<v8::Function>::Cast(
                CcTest::global()->Get(v8_str("bar"))));
    code = scope.CloseAndEscape(Handle<Code>(bar->code()));
  }

  // Now make sure that a gc should get rid of the function
  for (int i = 0; i < 4; i++) {
    heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  }

  ASSERT(code->marked_for_deoptimization());
}
3965 3966


3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984
TEST(NoWeakHashTableLeakWithIncrementalMarking) {
  if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
  if (!i::FLAG_incremental_marking) return;
  i::FLAG_weak_embedded_objects_in_optimized_code = true;
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_compilation_cache = false;
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  v8::internal::Heap* heap = CcTest::heap();

  if (!isolate->use_crankshaft()) return;
  HandleScope outer_scope(heap->isolate());
  for (int i = 0; i < 3; i++) {
    SimulateIncrementalMarking();
    {
      LocalContext context;
      HandleScope scope(heap->isolate());
      EmbeddedVector<char, 256> source;
3985 3986 3987 3988 3989 3990 3991 3992 3993 3994
      SNPrintF(source,
               "function bar%d() {"
               "  return foo%d(1);"
               "};"
               "function foo%d(x) { with (x) { return 1 + x; } };"
               "bar%d();"
               "bar%d();"
               "bar%d();"
               "%%OptimizeFunctionOnNextCall(bar%d);"
               "bar%d();", i, i, i, i, i, i, i, i);
3995 3996 3997 3998
      CompileRun(source.start());
    }
    heap->CollectAllGarbage(i::Heap::kNoGCFlags);
  }
3999 4000 4001 4002 4003 4004
  int elements = 0;
  if (heap->weak_object_to_code_table()->IsHashTable()) {
    WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
    elements = t->NumberOfElements();
  }
  CHECK_EQ(0, elements);
4005 4006
}

4007 4008 4009

static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
  EmbeddedVector<char, 256> source;
4010 4011 4012 4013 4014
  SNPrintF(source,
          "function %s() { return 0; }"
          "%s(); %s();"
          "%%OptimizeFunctionOnNextCall(%s);"
          "%s();", name, name, name, name, name);
4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052 4053 4054 4055 4056 4057 4058 4059 4060 4061 4062 4063 4064 4065
  CompileRun(source.start());
  Handle<JSFunction> fun =
      v8::Utils::OpenHandle(
          *v8::Handle<v8::Function>::Cast(
              CcTest::global()->Get(v8_str(name))));
  return fun;
}


static int GetCodeChainLength(Code* code) {
  int result = 0;
  while (code->next_code_link()->IsCode()) {
    result++;
    code = Code::cast(code->next_code_link());
  }
  return result;
}


TEST(NextCodeLinkIsWeak) {
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  v8::internal::Heap* heap = CcTest::heap();

  if (!isolate->use_crankshaft()) return;
  HandleScope outer_scope(heap->isolate());
  Handle<Code> code;
  heap->CollectAllAvailableGarbage();
  int code_chain_length_before, code_chain_length_after;
  {
    HandleScope scope(heap->isolate());
    Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
    Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
    CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
    code_chain_length_before = GetCodeChainLength(immortal->code());
    // Keep the immortal code and let the mortal code die.
    code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
    CompileRun("mortal = null; immortal = null;");
  }
  heap->CollectAllAvailableGarbage();
  // Now mortal code should be dead.
  code_chain_length_after = GetCodeChainLength(*code);
  CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
}


static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
  i::byte buffer[i::Assembler::kMinimalBufferSize];
  MacroAssembler masm(isolate, buffer, sizeof(buffer));
  CodeDesc desc;
4066 4067
  masm.Push(isolate->factory()->undefined_value());
  masm.Drop(1);
4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100 4101 4102 4103
  masm.GetCode(&desc);
  Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
  Handle<Code> code = isolate->factory()->NewCode(
      desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
  CHECK(code->IsCode());
  return code;
}


TEST(NextCodeLinkIsWeak2) {
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  Isolate* isolate = CcTest::i_isolate();
  v8::internal::Heap* heap = CcTest::heap();

  if (!isolate->use_crankshaft()) return;
  HandleScope outer_scope(heap->isolate());
  heap->CollectAllAvailableGarbage();
  Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
  Handle<Code> new_head;
  Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
  {
    HandleScope scope(heap->isolate());
    Handle<Code> immortal = DummyOptimizedCode(isolate);
    Handle<Code> mortal = DummyOptimizedCode(isolate);
    mortal->set_next_code_link(*old_head);
    immortal->set_next_code_link(*mortal);
    context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
    new_head = scope.CloseAndEscape(immortal);
  }
  heap->CollectAllAvailableGarbage();
  // Now mortal code should be dead.
  CHECK_EQ(*old_head, new_head->next_code_link());
}


4104 4105 4106 4107 4108 4109 4110 4111 4112 4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133 4134 4135 4136 4137 4138 4139 4140 4141 4142 4143 4144 4145 4146 4147 4148 4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171 4172 4173 4174 4175 4176 4177 4178 4179 4180 4181 4182 4183 4184 4185 4186 4187 4188 4189 4190 4191 4192 4193 4194 4195 4196 4197 4198 4199 4200 4201 4202 4203 4204 4205 4206 4207 4208 4209 4210 4211
static bool weak_ic_cleared = false;

static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) {
  printf("clear weak is called\n");
  weak_ic_cleared = true;
  v8::Persistent<v8::Value>* p =
      reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter());
  CHECK(p->IsNearDeath());
  p->Reset();
}


// Checks that the value returned by execution of the source is weak.
void CheckWeakness(const char* source) {
  i::FLAG_stress_compaction = false;
  CcTest::InitializeVM();
  v8::Isolate* isolate = CcTest::isolate();
  v8::HandleScope scope(isolate);
  v8::Persistent<v8::Object> garbage;
  {
    v8::HandleScope scope(isolate);
    garbage.Reset(isolate, CompileRun(source)->ToObject());
  }
  weak_ic_cleared = false;
  garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
  Heap* heap = CcTest::i_isolate()->heap();
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
  CHECK(weak_ic_cleared);
}


// Each of the following "weak IC" tests creates an IC that embeds a map with
// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
TEST(WeakMapInMonomorphicLoadIC) {
  CheckWeakness("function loadIC(obj) {"
                "  return obj.name;"
                "}"
                " (function() {"
                "   var proto = {'name' : 'weak'};"
                "   var obj = Object.create(proto);"
                "   loadIC(obj);"
                "   loadIC(obj);"
                "   loadIC(obj);"
                "   return proto;"
                " })();");
}


TEST(WeakMapInMonomorphicKeyedLoadIC) {
  CheckWeakness("function keyedLoadIC(obj, field) {"
                "  return obj[field];"
                "}"
                " (function() {"
                "   var proto = {'name' : 'weak'};"
                "   var obj = Object.create(proto);"
                "   keyedLoadIC(obj, 'name');"
                "   keyedLoadIC(obj, 'name');"
                "   keyedLoadIC(obj, 'name');"
                "   return proto;"
                " })();");
}


TEST(WeakMapInMonomorphicStoreIC) {
  CheckWeakness("function storeIC(obj, value) {"
                "  obj.name = value;"
                "}"
                " (function() {"
                "   var proto = {'name' : 'weak'};"
                "   var obj = Object.create(proto);"
                "   storeIC(obj, 'x');"
                "   storeIC(obj, 'x');"
                "   storeIC(obj, 'x');"
                "   return proto;"
                " })();");
}


TEST(WeakMapInMonomorphicKeyedStoreIC) {
  CheckWeakness("function keyedStoreIC(obj, field, value) {"
                "  obj[field] = value;"
                "}"
                " (function() {"
                "   var proto = {'name' : 'weak'};"
                "   var obj = Object.create(proto);"
                "   keyedStoreIC(obj, 'x');"
                "   keyedStoreIC(obj, 'x');"
                "   keyedStoreIC(obj, 'x');"
                "   return proto;"
                " })();");
}


TEST(WeakMapInMonomorphicCompareNilIC) {
  CheckWeakness("function compareNilIC(obj) {"
                "  return obj == null;"
                "}"
                " (function() {"
                "   var proto = {'name' : 'weak'};"
                "   var obj = Object.create(proto);"
                "   compareNilIC(obj);"
                "   compareNilIC(obj);"
                "   compareNilIC(obj);"
                "   return proto;"
                " })();");
}


4212
#ifdef DEBUG
4213 4214 4215 4216 4217 4218 4219 4220 4221 4222 4223 4224 4225 4226 4227 4228 4229 4230 4231 4232 4233 4234 4235 4236 4237 4238 4239 4240 4241 4242 4243 4244 4245 4246 4247 4248 4249 4250 4251 4252
TEST(AddInstructionChangesNewSpacePromotion) {
  i::FLAG_allow_natives_syntax = true;
  i::FLAG_expose_gc = true;
  i::FLAG_stress_compaction = true;
  i::FLAG_gc_interval = 1000;
  CcTest::InitializeVM();
  if (!i::FLAG_allocation_site_pretenuring) return;
  v8::HandleScope scope(CcTest::isolate());
  Isolate* isolate = CcTest::i_isolate();
  Heap* heap = isolate->heap();

  CompileRun(
      "function add(a, b) {"
      "  return a + b;"
      "}"
      "add(1, 2);"
      "add(\"a\", \"b\");"
      "var oldSpaceObject;"
      "gc();"
      "function crash(x) {"
      "  var object = {a: null, b: null};"
      "  var result = add(1.5, x | 0);"
      "  object.a = result;"
      "  oldSpaceObject = object;"
      "  return object;"
      "}"
      "crash(1);"
      "crash(1);"
      "%OptimizeFunctionOnNextCall(crash);"
      "crash(1);");

  v8::Handle<v8::Object> global = CcTest::global();
    v8::Handle<v8::Function> g =
        v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
  v8::Handle<v8::Value> args1[] = { v8_num(1) };
  heap->DisableInlineAllocation();
  heap->set_allocation_timeout(1);
  g->Call(global, 1, args1);
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
}
4253 4254 4255 4256 4257 4258 4259 4260 4261 4262 4263 4264 4265 4266 4267 4268 4269 4270 4271 4272 4273 4274 4275 4276


void OnFatalErrorExpectOOM(const char* location, const char* message) {
  // Exit with 0 if the location matches our expectation.
  exit(strcmp(location, "CALL_AND_RETRY_LAST"));
}


TEST(CEntryStubOOM) {
  i::FLAG_allow_natives_syntax = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);

  v8::Handle<v8::Value> result = CompileRun(
      "%SetFlags('--gc-interval=1');"
      "var a = [];"
      "a.__proto__ = [];"
      "a.unshift(1)");

  CHECK(result->IsNumber());
}

#endif  // DEBUG
4277 4278 4279 4280 4281 4282 4283 4284 4285 4286 4287 4288 4289 4290 4291 4292 4293 4294 4295 4296 4297 4298 4299 4300 4301 4302 4303 4304 4305


static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }


static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
  CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
}


TEST(Regress357137) {
  CcTest::InitializeVM();
  v8::Isolate* isolate = CcTest::isolate();
  v8::HandleScope hscope(isolate);
  v8::Handle<v8::ObjectTemplate> global =v8::ObjectTemplate::New(isolate);
  global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
              v8::FunctionTemplate::New(isolate, RequestInterrupt));
  v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
  ASSERT(!context.IsEmpty());
  v8::Context::Scope cscope(context);

  v8::Local<v8::Value> result = CompileRun(
      "var locals = '';"
      "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
      "eval('function f() {' + locals + 'return function() { return v0; }; }');"
      "interrupt();"  // This triggers a fake stack overflow in f.
      "f()()");
  CHECK_EQ(42.0, result->ToNumber()->Value());
}
4306 4307


4308
TEST(ArrayShiftSweeping) {
4309 4310 4311 4312 4313 4314 4315 4316 4317
  i::FLAG_expose_gc = true;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  Isolate* isolate = CcTest::i_isolate();
  Heap* heap = isolate->heap();

  v8::Local<v8::Value> result = CompileRun(
      "var array = new Array(40000);"
      "var tmp = new Array(100000);"
4318
      "array[0] = 10;"
4319
      "gc();"
4320
      "gc();"
4321 4322 4323 4324 4325 4326 4327 4328
      "array.shift();"
      "array;");

  Handle<JSObject> o =
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
  CHECK(heap->InOldPointerSpace(o->elements()));
  CHECK(heap->InOldPointerSpace(*o));
  Page* page = Page::FromAddress(o->elements()->address());
4329
  CHECK(page->parallel_sweeping() <= MemoryChunk::PARALLEL_SWEEPING_FINALIZE ||
4330 4331
        Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
}
yangguo@chromium.org's avatar
yangguo@chromium.org committed
4332 4333


4334 4335 4336 4337 4338 4339 4340 4341 4342 4343 4344 4345 4346 4347 4348 4349 4350 4351 4352 4353 4354 4355 4356 4357 4358 4359 4360 4361 4362 4363 4364 4365 4366 4367 4368 4369 4370 4371 4372 4373 4374 4375 4376 4377 4378 4379 4380 4381 4382 4383 4384 4385 4386 4387 4388 4389 4390 4391 4392 4393 4394 4395 4396 4397 4398 4399 4400 4401 4402 4403 4404 4405 4406 4407 4408
TEST(PromotionQueue) {
  i::FLAG_expose_gc = true;
  i::FLAG_max_semi_space_size = 2;
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());
  Isolate* isolate = CcTest::i_isolate();
  Heap* heap = isolate->heap();
  NewSpace* new_space = heap->new_space();

  // In this test we will try to overwrite the promotion queue which is at the
  // end of to-space. To actually make that possible, we need at least two
  // semi-space pages and take advantage of fragementation.
  // (1) Grow semi-space to two pages.
  // (2) Create a few small long living objects and call the scavenger to
  // move them to the other semi-space.
  // (3) Create a huge object, i.e., remainder of first semi-space page and
  // create another huge object which should be of maximum allocatable memory
  // size of the second semi-space page.
  // (4) Call the scavenger again.
  // What will happen is: the scavenger will promote the objects created in (2)
  // and will create promotion queue entries at the end of the second
  // semi-space page during the next scavenge when it promotes the objects to
  // the old generation. The first allocation of (3) will fill up the first
  // semi-space page. The second allocation in (3) will not fit into the first
  // semi-space page, but it will overwrite the promotion queue which are in
  // the second semi-space page. If the right guards are in place, the promotion
  // queue will be evacuated in that case.

  // Grow the semi-space to two pages to make semi-space copy overwrite the
  // promotion queue, which will be at the end of the second page.
  intptr_t old_capacity = new_space->Capacity();
  new_space->Grow();
  CHECK(new_space->IsAtMaximumCapacity());
  CHECK(2 * old_capacity == new_space->Capacity());

  // Call the scavenger two times to get an empty new space
  heap->CollectGarbage(NEW_SPACE);
  heap->CollectGarbage(NEW_SPACE);

  // First create a few objects which will survive a scavenge, and will get
  // promoted to the old generation later on. These objects will create
  // promotion queue entries at the end of the second semi-space page.
  const int number_handles = 12;
  Handle<FixedArray> handles[number_handles];
  for (int i = 0; i < number_handles; i++) {
    handles[i] = isolate->factory()->NewFixedArray(1, NOT_TENURED);
  }
  heap->CollectGarbage(NEW_SPACE);

  // Create the first huge object which will exactly fit the first semi-space
  // page.
  int new_linear_size = static_cast<int>(
      *heap->new_space()->allocation_limit_address() -
          *heap->new_space()->allocation_top_address());
  int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
  Handle<FixedArray> first =
    isolate->factory()->NewFixedArray(length, NOT_TENURED);
  CHECK(heap->InNewSpace(*first));

  // Create the second huge object of maximum allocatable second semi-space
  // page size.
  new_linear_size = static_cast<int>(
      *heap->new_space()->allocation_limit_address() -
          *heap->new_space()->allocation_top_address());
  length = Page::kMaxRegularHeapObjectSize / kPointerSize -
      FixedArray::kHeaderSize;
  Handle<FixedArray> second =
      isolate->factory()->NewFixedArray(length, NOT_TENURED);
  CHECK(heap->InNewSpace(*second));

  // This scavenge will corrupt memory if the promotion queue is not evacuated.
  heap->CollectGarbage(NEW_SPACE);
}


yangguo@chromium.org's avatar
yangguo@chromium.org committed
4409 4410 4411 4412 4413 4414 4415 4416 4417 4418
#ifdef DEBUG
TEST(PathTracer) {
  CcTest::InitializeVM();
  v8::HandleScope scope(CcTest::isolate());

  v8::Local<v8::Value> result = CompileRun("'abc'");
  Handle<Object> o = v8::Utils::OpenHandle(*result);
  CcTest::i_isolate()->heap()->TracePathToObject(*o);
}
#endif  // DEBUG