heap.h 106 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_HEAP_H_
#define V8_HEAP_HEAP_H_
7

8
#include <cmath>
9
#include <map>
10

11 12
// Clients of this interface shouldn't depend on lots of heap internals.
// Do not include anything from src/heap here!
13 14
#include "src/allocation.h"
#include "src/assert-scope.h"
15
#include "src/atomic-utils.h"
16
#include "src/globals.h"
17
// TODO(mstarzinger): Two more includes to kill!
18
#include "src/heap/spaces.h"
19
#include "src/heap/store-buffer.h"
20
#include "src/list.h"
21

22 23
namespace v8 {
namespace internal {
24 25

// Defines all the roots in Heap.
26
#define STRONG_ROOT_LIST(V)                                                    \
27
  V(Map, byte_array_map, ByteArrayMap)                                         \
28
  V(Map, free_space_map, FreeSpaceMap)                                         \
29 30 31
  V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
  V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
  /* Cluster the most popular ones in a few cache lines here at the top.    */ \
32
  V(Smi, store_buffer_top, StoreBufferTop)                                     \
33 34 35 36 37
  V(Oddball, undefined_value, UndefinedValue)                                  \
  V(Oddball, the_hole_value, TheHoleValue)                                     \
  V(Oddball, null_value, NullValue)                                            \
  V(Oddball, true_value, TrueValue)                                            \
  V(Oddball, false_value, FalseValue)                                          \
38
  V(String, empty_string, empty_string)                                        \
39
  V(String, hidden_string, hidden_string)                                      \
40
  V(Oddball, uninitialized_value, UninitializedValue)                          \
41
  V(Map, cell_map, CellMap)                                                    \
42 43 44
  V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
  V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
  V(Map, meta_map, MetaMap)                                                    \
45
  V(Map, heap_number_map, HeapNumberMap)                                       \
46
  V(Map, mutable_heap_number_map, MutableHeapNumberMap)                        \
47
  V(Map, float32x4_map, Float32x4Map)                                          \
48
  V(Map, int32x4_map, Int32x4Map)                                              \
49
  V(Map, uint32x4_map, Uint32x4Map)                                            \
50 51
  V(Map, bool32x4_map, Bool32x4Map)                                            \
  V(Map, int16x8_map, Int16x8Map)                                              \
52
  V(Map, uint16x8_map, Uint16x8Map)                                            \
53 54
  V(Map, bool16x8_map, Bool16x8Map)                                            \
  V(Map, int8x16_map, Int8x16Map)                                              \
55
  V(Map, uint8x16_map, Uint8x16Map)                                            \
56
  V(Map, bool8x16_map, Bool8x16Map)                                            \
57
  V(Map, native_context_map, NativeContextMap)                                 \
58
  V(Map, fixed_array_map, FixedArrayMap)                                       \
59
  V(Map, code_map, CodeMap)                                                    \
60
  V(Map, scope_info_map, ScopeInfoMap)                                         \
61
  V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
62
  V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
ulan@chromium.org's avatar
ulan@chromium.org committed
63
  V(Map, weak_cell_map, WeakCellMap)                                           \
64 65
  V(Map, one_byte_string_map, OneByteStringMap)                                \
  V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
66
  V(Map, function_context_map, FunctionContextMap)                             \
67 68 69
  V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
  V(ByteArray, empty_byte_array, EmptyByteArray)                               \
  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
70 71 72
  /* The roots above this line should be boring from a GC point of view.    */ \
  /* This means they are never in new space and never on a page that is     */ \
  /* being compacted.                                                       */ \
73 74 75 76
  V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel)      \
  V(Oddball, arguments_marker, ArgumentsMarker)                                \
  V(Oddball, exception, Exception)                                             \
  V(Oddball, termination_exception, TerminationException)                      \
77 78 79 80 81
  V(FixedArray, number_string_cache, NumberStringCache)                        \
  V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
  V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
  V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
  V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
82
  V(FixedArray, string_split_cache, StringSplitCache)                          \
83
  V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
84
  V(Smi, hash_seed, HashSeed)                                                  \
85 86
  V(Map, hash_table_map, HashTableMap)                                         \
  V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
87
  V(Map, symbol_map, SymbolMap)                                                \
88
  V(Map, string_map, StringMap)                                                \
89
  V(Map, cons_one_byte_string_map, ConsOneByteStringMap)                       \
90
  V(Map, cons_string_map, ConsStringMap)                                       \
91
  V(Map, sliced_string_map, SlicedStringMap)                                   \
92
  V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap)                   \
93
  V(Map, external_string_map, ExternalStringMap)                               \
94
  V(Map, external_string_with_one_byte_data_map,                               \
95
    ExternalStringWithOneByteDataMap)                                          \
96
  V(Map, external_one_byte_string_map, ExternalOneByteStringMap)               \
97
  V(Map, native_source_string_map, NativeSourceStringMap)                      \
98
  V(Map, short_external_string_map, ShortExternalStringMap)                    \
99
  V(Map, short_external_string_with_one_byte_data_map,                         \
100
    ShortExternalStringWithOneByteDataMap)                                     \
101
  V(Map, internalized_string_map, InternalizedStringMap)                       \
102 103
  V(Map, external_internalized_string_map, ExternalInternalizedStringMap)      \
  V(Map, external_internalized_string_with_one_byte_data_map,                  \
104
    ExternalInternalizedStringWithOneByteDataMap)                              \
105 106
  V(Map, external_one_byte_internalized_string_map,                            \
    ExternalOneByteInternalizedStringMap)                                      \
107
  V(Map, short_external_internalized_string_map,                               \
108
    ShortExternalInternalizedStringMap)                                        \
109
  V(Map, short_external_internalized_string_with_one_byte_data_map,            \
110
    ShortExternalInternalizedStringWithOneByteDataMap)                         \
111 112 113
  V(Map, short_external_one_byte_internalized_string_map,                      \
    ShortExternalOneByteInternalizedStringMap)                                 \
  V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap)    \
114 115 116 117 118 119 120 121 122
  V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
  V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
  V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
  V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
  V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
  V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
  V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
  V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
  V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
123 124 125 126 127 128 129 130 131
  V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
  V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
  V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
  V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
  V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
  V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
  V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
  V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
  V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
132
    EmptyFixedUint8ClampedArray)                                               \
133
  V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
134
  V(Map, catch_context_map, CatchContextMap)                                   \
135
  V(Map, with_context_map, WithContextMap)                                     \
136
  V(Map, block_context_map, BlockContextMap)                                   \
137
  V(Map, module_context_map, ModuleContextMap)                                 \
138 139
  V(Map, script_context_map, ScriptContextMap)                                 \
  V(Map, script_context_table_map, ScriptContextTableMap)                      \
140 141 142 143 144 145 146
  V(Map, undefined_map, UndefinedMap)                                          \
  V(Map, the_hole_map, TheHoleMap)                                             \
  V(Map, null_map, NullMap)                                                    \
  V(Map, boolean_map, BooleanMap)                                              \
  V(Map, uninitialized_map, UninitializedMap)                                  \
  V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
  V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap)   \
147
  V(Map, exception_map, ExceptionMap)                                          \
148
  V(Map, termination_exception_map, TerminationExceptionMap)                   \
149
  V(Map, message_object_map, JSMessageObjectMap)                               \
150
  V(Map, foreign_map, ForeignMap)                                              \
151 152
  V(Map, neander_map, NeanderMap)                                              \
  V(Map, external_map, ExternalMap)                                            \
153 154 155
  V(HeapNumber, nan_value, NanValue)                                           \
  V(HeapNumber, infinity_value, InfinityValue)                                 \
  V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
156
  V(HeapNumber, minus_infinity_value, MinusInfinityValue)                      \
157
  V(JSObject, message_listeners, MessageListeners)                             \
158 159
  V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
  V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
160
  V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
161 162 163
  V(Code, js_entry_code, JsEntryCode)                                          \
  V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
  V(FixedArray, natives_source_cache, NativesSourceCache)                      \
164 165
  V(FixedArray, experimental_natives_source_cache,                             \
    ExperimentalNativesSourceCache)                                            \
166
  V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache)           \
167 168
  V(FixedArray, experimental_extra_natives_source_cache,                       \
    ExperimentalExtraNativesSourceCache)                                       \
169
  V(FixedArray, code_stub_natives_source_cache, CodeStubNativesSourceCache)    \
170
  V(Script, empty_script, EmptyScript)                                         \
171
  V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
172
  V(Cell, undefined_cell, UndefinedCell)                                       \
173
  V(JSObject, observation_state, ObservationState)                             \
174
  V(Object, symbol_registry, SymbolRegistry)                                   \
175
  V(Object, script_list, ScriptList)                                           \
176
  V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
177
    EmptySlowElementDictionary)                                                \
178
  V(FixedArray, materialized_objects, MaterializedObjects)                     \
179
  V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad)        \
180
  V(FixedArray, microtask_queue, MicrotaskQueue)                               \
181
  V(TypeFeedbackVector, dummy_vector, DummyVector)                             \
ulan's avatar
ulan committed
182
  V(FixedArray, detached_contexts, DetachedContexts)                           \
183
  V(ArrayList, retained_maps, RetainedMaps)                                    \
184
  V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)           \
185
  V(PropertyCell, array_protector, ArrayProtector)                             \
186
  V(PropertyCell, empty_property_cell, EmptyPropertyCell)                      \
187 188
  V(Object, weak_stack_trace_list, WeakStackTraceList)                         \
  V(Object, code_stub_context, CodeStubContext)                                \
189
  V(JSObject, code_stub_exports_object, CodeStubExportsObject)                 \
190 191 192 193
  V(FixedArray, interpreter_table, InterpreterTable)                           \
  V(Map, bytecode_array_map, BytecodeArrayMap)                                 \
  V(BytecodeArray, empty_bytecode_array, EmptyBytecodeArray)

194

195
// Entries in this list are limited to Smis and are not visited during GC.
196 197 198 199 200 201 202
#define SMI_ROOT_LIST(V)                                                   \
  V(Smi, stack_limit, StackLimit)                                          \
  V(Smi, real_stack_limit, RealStackLimit)                                 \
  V(Smi, last_script_id, LastScriptId)                                     \
  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)       \
  V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)             \
203 204
  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)

205

206 207 208
#define ROOT_LIST(V)  \
  STRONG_ROOT_LIST(V) \
  SMI_ROOT_LIST(V)    \
209 210
  V(StringTable, string_table, StringTable)

211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311
#define INTERNALIZED_STRING_LIST(V)                        \
  V(Object_string, "Object")                               \
  V(proto_string, "__proto__")                             \
  V(arguments_string, "arguments")                         \
  V(Arguments_string, "Arguments")                         \
  V(caller_string, "caller")                               \
  V(boolean_string, "boolean")                             \
  V(Boolean_string, "Boolean")                             \
  V(callee_string, "callee")                               \
  V(configurable_string, "configurable")                   \
  V(constructor_string, "constructor")                     \
  V(default_string, "default")                             \
  V(dot_result_string, ".result")                          \
  V(enumerable_string, "enumerable")                       \
  V(eval_string, "eval")                                   \
  V(float32x4_string, "float32x4")                         \
  V(Float32x4_string, "Float32x4")                         \
  V(int32x4_string, "int32x4")                             \
  V(Int32x4_string, "Int32x4")                             \
  V(uint32x4_string, "uint32x4")                           \
  V(Uint32x4_string, "Uint32x4")                           \
  V(bool32x4_string, "bool32x4")                           \
  V(Bool32x4_string, "Bool32x4")                           \
  V(int16x8_string, "int16x8")                             \
  V(Int16x8_string, "Int16x8")                             \
  V(uint16x8_string, "uint16x8")                           \
  V(Uint16x8_string, "Uint16x8")                           \
  V(bool16x8_string, "bool16x8")                           \
  V(Bool16x8_string, "Bool16x8")                           \
  V(int8x16_string, "int8x16")                             \
  V(Int8x16_string, "Int8x16")                             \
  V(uint8x16_string, "uint8x16")                           \
  V(Uint8x16_string, "Uint8x16")                           \
  V(bool8x16_string, "bool8x16")                           \
  V(Bool8x16_string, "Bool8x16")                           \
  V(function_string, "function")                           \
  V(Function_string, "Function")                           \
  V(get_string, "get")                                     \
  V(length_string, "length")                               \
  V(name_string, "name")                                   \
  V(null_string, "null")                                   \
  V(number_string, "number")                               \
  V(Number_string, "Number")                               \
  V(nan_string, "NaN")                                     \
  V(set_string, "set")                                     \
  V(source_string, "source")                               \
  V(source_url_string, "source_url")                       \
  V(source_mapping_url_string, "source_mapping_url")       \
  V(this_string, "this")                                   \
  V(writable_string, "writable")                           \
  V(global_string, "global")                               \
  V(ignore_case_string, "ignoreCase")                      \
  V(multiline_string, "multiline")                         \
  V(sticky_string, "sticky")                               \
  V(unicode_string, "unicode")                             \
  V(harmony_tolength_string, "harmony_tolength")           \
  V(input_string, "input")                                 \
  V(index_string, "index")                                 \
  V(last_index_string, "lastIndex")                        \
  V(object_string, "object")                               \
  V(prototype_string, "prototype")                         \
  V(string_string, "string")                               \
  V(String_string, "String")                               \
  V(symbol_string, "symbol")                               \
  V(Symbol_string, "Symbol")                               \
  V(Map_string, "Map")                                     \
  V(Set_string, "Set")                                     \
  V(WeakMap_string, "WeakMap")                             \
  V(WeakSet_string, "WeakSet")                             \
  V(for_string, "for")                                     \
  V(for_api_string, "for_api")                             \
  V(Date_string, "Date")                                   \
  V(char_at_string, "CharAt")                              \
  V(undefined_string, "undefined")                         \
  V(valueOf_string, "valueOf")                             \
  V(stack_string, "stack")                                 \
  V(toString_string, "toString")                           \
  V(toJSON_string, "toJSON")                               \
  V(KeyedLoadMonomorphic_string, "KeyedLoadMonomorphic")   \
  V(KeyedStoreMonomorphic_string, "KeyedStoreMonomorphic") \
  V(illegal_access_string, "illegal access")               \
  V(cell_value_string, "%cell_value")                      \
  V(illegal_argument_string, "illegal argument")           \
  V(closure_string, "(closure)")                           \
  V(dot_string, ".")                                       \
  V(compare_ic_string, "==")                               \
  V(strict_compare_ic_string, "===")                       \
  V(infinity_string, "Infinity")                           \
  V(minus_infinity_string, "-Infinity")                    \
  V(query_colon_string, "(?:)")                            \
  V(Generator_string, "Generator")                         \
  V(throw_string, "throw")                                 \
  V(done_string, "done")                                   \
  V(value_string, "value")                                 \
  V(next_string, "next")                                   \
  V(byte_length_string, "byteLength")                      \
  V(byte_offset_string, "byteOffset")                      \
  V(minus_zero_string, "-0")                               \
  V(Array_string, "Array")                                 \
  V(Error_string, "Error")                                 \
  V(RegExp_string, "RegExp")                               \
312
  V(anonymous_string, "anonymous")
313

314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354
#define PRIVATE_SYMBOL_LIST(V)              \
  V(array_iteration_kind_symbol)            \
  V(array_iterator_next_symbol)             \
  V(array_iterator_object_symbol)           \
  V(call_site_function_symbol)              \
  V(call_site_position_symbol)              \
  V(call_site_receiver_symbol)              \
  V(call_site_strict_symbol)                \
  V(class_end_position_symbol)              \
  V(class_start_position_symbol)            \
  V(detailed_stack_trace_symbol)            \
  V(elements_transition_symbol)             \
  V(error_end_pos_symbol)                   \
  V(error_script_symbol)                    \
  V(error_start_pos_symbol)                 \
  V(formatted_stack_trace_symbol)           \
  V(frozen_symbol)                          \
  V(hash_code_symbol)                       \
  V(home_object_symbol)                     \
  V(internal_error_symbol)                  \
  V(intl_impl_object_symbol)                \
  V(intl_initialized_marker_symbol)         \
  V(megamorphic_symbol)                     \
  V(nonexistent_symbol)                     \
  V(nonextensible_symbol)                   \
  V(normal_ic_symbol)                       \
  V(observed_symbol)                        \
  V(premonomorphic_symbol)                  \
  V(promise_debug_marker_symbol)            \
  V(promise_has_handler_symbol)             \
  V(promise_on_resolve_symbol)              \
  V(promise_on_reject_symbol)               \
  V(promise_raw_symbol)                     \
  V(promise_status_symbol)                  \
  V(promise_value_symbol)                   \
  V(sealed_symbol)                          \
  V(stack_trace_symbol)                     \
  V(string_iterator_iterated_string_symbol) \
  V(string_iterator_next_index_symbol)      \
  V(uninitialized_symbol)

355 356 357 358 359 360 361
#define PUBLIC_SYMBOL_LIST(V)                               \
  V(has_instance_symbol, Symbol.hasInstance)                \
  V(is_regexp_symbol, Symbol.isRegExp)                      \
  V(iterator_symbol, Symbol.iterator)                       \
  V(to_primitive_symbol, Symbol.toPrimitive)                \
  V(to_string_tag_symbol, Symbol.toStringTag)               \
  V(unscopables_symbol, Symbol.unscopables)
362

363 364 365 366 367 368 369
// Well-Known Symbols are "Public" symbols, which have a bit set which causes
// them to produce an undefined value when a load results in a failed access
// check. Because this behaviour is not specified properly as of yet, it only
// applies to a subset of spec-defined Well-Known Symbols.
#define WELL_KNOWN_SYMBOL_LIST(V) \
  V(is_concat_spreadable_symbol, Symbol.isConcatSpreadable)

370 371 372 373
// Heap roots that are known to be immortal immovable, for which we can safely
// skip write barriers. This list is not complete and has omissions.
#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
  V(ByteArrayMap)                       \
374
  V(BytecodeArrayMap)                   \
375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390
  V(FreeSpaceMap)                       \
  V(OnePointerFillerMap)                \
  V(TwoPointerFillerMap)                \
  V(UndefinedValue)                     \
  V(TheHoleValue)                       \
  V(NullValue)                          \
  V(TrueValue)                          \
  V(FalseValue)                         \
  V(UninitializedValue)                 \
  V(CellMap)                            \
  V(GlobalPropertyCellMap)              \
  V(SharedFunctionInfoMap)              \
  V(MetaMap)                            \
  V(HeapNumberMap)                      \
  V(MutableHeapNumberMap)               \
  V(Float32x4Map)                       \
391
  V(Int32x4Map)                         \
392
  V(Uint32x4Map)                        \
393 394
  V(Bool32x4Map)                        \
  V(Int16x8Map)                         \
395
  V(Uint16x8Map)                        \
396 397
  V(Bool16x8Map)                        \
  V(Int8x16Map)                         \
398
  V(Uint8x16Map)                        \
399
  V(Bool8x16Map)                        \
400 401 402 403 404 405 406 407 408 409 410 411
  V(NativeContextMap)                   \
  V(FixedArrayMap)                      \
  V(CodeMap)                            \
  V(ScopeInfoMap)                       \
  V(FixedCOWArrayMap)                   \
  V(FixedDoubleArrayMap)                \
  V(WeakCellMap)                        \
  V(NoInterceptorResultSentinel)        \
  V(HashTableMap)                       \
  V(OrderedHashTableMap)                \
  V(EmptyFixedArray)                    \
  V(EmptyByteArray)                     \
412
  V(EmptyBytecodeArray)                 \
413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434
  V(EmptyDescriptorArray)               \
  V(ArgumentsMarker)                    \
  V(SymbolMap)                          \
  V(SloppyArgumentsElementsMap)         \
  V(FunctionContextMap)                 \
  V(CatchContextMap)                    \
  V(WithContextMap)                     \
  V(BlockContextMap)                    \
  V(ModuleContextMap)                   \
  V(ScriptContextMap)                   \
  V(UndefinedMap)                       \
  V(TheHoleMap)                         \
  V(NullMap)                            \
  V(BooleanMap)                         \
  V(UninitializedMap)                   \
  V(ArgumentsMarkerMap)                 \
  V(JSMessageObjectMap)                 \
  V(ForeignMap)                         \
  V(NeanderMap)                         \
  V(empty_string)                       \
  PRIVATE_SYMBOL_LIST(V)

435
// Forward declarations.
436
class ArrayBufferTracker;
437 438 439 440
class GCIdleTimeAction;
class GCIdleTimeHandler;
class GCIdleTimeHeapState;
class GCTracer;
441
class HeapObjectsFilter;
442
class HeapStats;
443
class HistogramTimer;
444
class Isolate;
445
class MemoryReducer;
446
class ObjectStats;
447
class Scavenger;
ulan's avatar
ulan committed
448
class ScavengeJob;
449
class WeakObjectRetainer;
450 451


452 453
// A queue of objects promoted during scavenge. Each object is accompanied
// by it's size to avoid dereferencing a map pointer for scanning.
454 455 456
// The last page in to-space is used for the promotion queue. On conflict
// during scavenge, the promotion queue is allocated externally and all
// entries are copied to the external queue.
457 458
class PromotionQueue {
 public:
459
  explicit PromotionQueue(Heap* heap)
460 461 462 463
      : front_(NULL),
        rear_(NULL),
        limit_(NULL),
        emergency_stack_(0),
464
        heap_(heap) {}
465 466 467 468

  void Initialize();

  void Destroy() {
469
    DCHECK(is_empty());
470 471
    delete emergency_stack_;
    emergency_stack_ = NULL;
472 473
  }

474 475 476 477 478
  Page* GetHeadPage() {
    return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
  }

  void SetNewLimit(Address limit) {
479 480 481 482 483 484
    // If we are already using an emergency stack, we can ignore it.
    if (emergency_stack_) return;

    // If the limit is not on the same page, we can ignore it.
    if (Page::FromAllocationTop(limit) != GetHeadPage()) return;

485 486 487 488 489 490 491 492 493
    limit_ = reinterpret_cast<intptr_t*>(limit);

    if (limit_ <= rear_) {
      return;
    }

    RelocateQueueHead();
  }

494
  bool IsBelowPromotionQueue(Address to_space_top) {
495 496 497 498
    // If an emergency stack is used, the to-space address cannot interfere
    // with the promotion queue.
    if (emergency_stack_) return true;

499 500 501 502 503 504 505 506 507 508 509
    // If the given to-space top pointer and the head of the promotion queue
    // are not on the same page, then the to-space objects are below the
    // promotion queue.
    if (GetHeadPage() != Page::FromAddress(to_space_top)) {
      return true;
    }
    // If the to space top pointer is smaller or equal than the promotion
    // queue head, then the to-space objects are below the promotion queue.
    return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
  }

510 511
  bool is_empty() {
    return (front_ == rear_) &&
512
           (emergency_stack_ == NULL || emergency_stack_->length() == 0);
513
  }
514 515 516 517

  inline void insert(HeapObject* target, int size);

  void remove(HeapObject** target, int* size) {
518
    DCHECK(!is_empty());
519 520 521 522 523 524 525
    if (front_ == rear_) {
      Entry e = emergency_stack_->RemoveLast();
      *target = e.obj_;
      *size = e.size_;
      return;
    }

526 527 528
    *target = reinterpret_cast<HeapObject*>(*(--front_));
    *size = static_cast<int>(*(--front_));
    // Assert no underflow.
529 530
    SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
                                reinterpret_cast<Address>(front_));
531 532 533
  }

 private:
534
  // The front of the queue is higher in the memory page chain than the rear.
535 536
  intptr_t* front_;
  intptr_t* rear_;
537 538 539 540 541
  intptr_t* limit_;

  static const int kEntrySizeInWords = 2;

  struct Entry {
542
    Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
543 544 545 546 547 548 549 550 551

    HeapObject* obj_;
    int size_;
  };
  List<Entry>* emergency_stack_;

  Heap* heap_;

  void RelocateQueueHead();
552 553 554 555 556

  DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
};


557 558 559 560 561
enum ArrayStorageAllocationMode {
  DONT_INITIALIZE_ARRAY_ELEMENTS,
  INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
};

562

563
class Heap {
564
 public:
565 566 567 568 569
  // Declare all the root indices.  This defines the root list order.
  enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
    STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
570

571 572 573
#define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
        INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
#undef STRING_DECLARATION
574

575 576 577
#define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
            PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_INDEX_DECLARATION
578

579
#define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
580
                PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
581
                    WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
582
#undef SYMBOL_INDEX_DECLARATION
583

584 585
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
586
                        STRUCT_LIST(DECLARE_STRUCT_MAP)
587
#undef DECLARE_STRUCT_MAP
588
                            kStringTableRootIndex,
589

590 591 592 593 594 595 596
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
    SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
        kRootListLength,
    kStrongRootListLength = kStringTableRootIndex,
    kSmiRootsStart = kStringTableRootIndex + 1
  };
597

598 599 600 601 602
  // Indicates whether live bytes adjustment is triggered
  // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
  // - or from within GC (CONCURRENT_TO_SWEEPER),
  // - or mutator code (CONCURRENT_TO_SWEEPER).
  enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
603

604
  enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
605

606
  enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
607

608 609 610 611 612 613 614
  // Taking this lock prevents the GC from entering a phase that relocates
  // object references.
  class RelocationLock {
   public:
    explicit RelocationLock(Heap* heap) : heap_(heap) {
      heap_->relocation_mutex_.Lock();
    }
615

616
    ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
617

618 619 620
   private:
    Heap* heap_;
  };
621

622 623 624 625 626 627 628 629 630
  // An optional version of the above lock that can be used for some critical
  // sections on the mutator thread; only safe since the GC currently does not
  // do concurrent compaction.
  class OptionalRelocationLock {
   public:
    OptionalRelocationLock(Heap* heap, bool concurrent)
        : heap_(heap), concurrent_(concurrent) {
      if (concurrent_) heap_->relocation_mutex_.Lock();
    }
631

632 633 634
    ~OptionalRelocationLock() {
      if (concurrent_) heap_->relocation_mutex_.Unlock();
    }
635

636 637 638 639
   private:
    Heap* heap_;
    bool concurrent_;
  };
640

641 642 643 644 645 646 647 648
  // Support for partial snapshots.  After calling this we have a linear
  // space to write objects in each space.
  struct Chunk {
    uint32_t size;
    Address start;
    Address end;
  };
  typedef List<Chunk> Reservation;
649

650 651
  static const intptr_t kMinimumOldGenerationAllocationLimit =
      8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
652

653
  static const int kInitalOldGenerationLimitFactor = 2;
654

655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709
#if V8_OS_ANDROID
  // Don't apply pointer multiplier on Android since it has no swap space and
  // should instead adapt it's heap size based on available physical memory.
  static const int kPointerMultiplier = 1;
#else
  static const int kPointerMultiplier = i::kPointerSize / 4;
#endif

  // The new space size has to be a power of 2. Sizes are in MB.
  static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
  static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
  static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
  static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;

  // The old space size has to be a multiple of Page::kPageSize.
  // Sizes are in MB.
  static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
  static const int kMaxOldSpaceSizeMediumMemoryDevice =
      256 * kPointerMultiplier;
  static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
  static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;

  // The executable size has to be a multiple of Page::kPageSize.
  // Sizes are in MB.
  static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
  static const int kMaxExecutableSizeMediumMemoryDevice =
      192 * kPointerMultiplier;
  static const int kMaxExecutableSizeHighMemoryDevice =
      256 * kPointerMultiplier;
  static const int kMaxExecutableSizeHugeMemoryDevice =
      256 * kPointerMultiplier;

  static const int kTraceRingBufferSize = 512;
  static const int kStacktraceBufferSize = 512;

  static const double kMinHeapGrowingFactor;
  static const double kMaxHeapGrowingFactor;
  static const double kMaxHeapGrowingFactorMemoryConstrained;
  static const double kMaxHeapGrowingFactorIdle;
  static const double kTargetMutatorUtilization;

  // Sloppy mode arguments object size.
  static const int kSloppyArgumentsObjectSize =
      JSObject::kHeaderSize + 2 * kPointerSize;

  // Strict mode arguments has no callee so it is smaller.
  static const int kStrictArgumentsObjectSize =
      JSObject::kHeaderSize + 1 * kPointerSize;

  // Indicies for direct access into argument objects.
  static const int kArgumentsLengthIndex = 0;

  // callee is only valid in sloppy mode.
  static const int kArgumentsCalleeIndex = 1;

710 711 712 713 714 715 716 717
  static const int kNoGCFlags = 0;
  static const int kReduceMemoryFootprintMask = 1;
  static const int kAbortIncrementalMarkingMask = 2;
  static const int kFinalizeIncrementalMarkingMask = 4;

  // Making the heap iterable requires us to abort incremental marking.
  static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;

718 719 720
  // The roots that have an index less than this are always in old space.
  static const int kOldSpaceRoots = 0x20;

721 722 723
  // The minimum size of a HeapObject on the heap.
  static const int kMinObjectSizeInWords = 2;

724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769
  STATIC_ASSERT(kUndefinedValueRootIndex ==
                Internals::kUndefinedValueRootIndex);
  STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
  STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
  STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
  STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);

  // Calculates the maximum amount of filler that could be required by the
  // given alignment.
  static int GetMaximumFillToAlign(AllocationAlignment alignment);
  // Calculates the actual amount of filler required for a given address at the
  // given alignment.
  static int GetFillToAlign(Address address, AllocationAlignment alignment);

  template <typename T>
  static inline bool IsOneByte(T t, int chars);

  static void FatalProcessOutOfMemory(const char* location,
                                      bool take_snapshot = false);

  static bool RootIsImmortalImmovable(int root_index);

  // Checks whether the space is valid.
  static bool IsValidAllocationSpace(AllocationSpace space);

  // An object may have an AllocationSite associated with it through a trailing
  // AllocationMemento. Its feedback should be updated when objects are found
  // in the heap.
  static inline void UpdateAllocationSiteFeedback(HeapObject* object,
                                                  ScratchpadSlotMode mode);

  // Generated code can embed direct references to non-writable roots if
  // they are in new space.
  static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);

  // Zapping is needed for verify heap, and always done in debug builds.
  static inline bool ShouldZapGarbage() {
#ifdef DEBUG
    return true;
#else
#ifdef VERIFY_HEAP
    return FLAG_verify_heap;
#else
    return false;
#endif
#endif
770 771
  }

772 773 774 775 776 777 778 779 780 781
  static double HeapGrowingFactor(double gc_speed, double mutator_speed);

  // Copy block of memory from src to dst. Size of block should be aligned
  // by pointer size.
  static inline void CopyBlock(Address dst, Address src, int byte_size);

  // Optimized version of memmove for blocks with pointer size aligned sizes and
  // pointer size aligned addresses.
  static inline void MoveBlock(Address dst, Address src, int byte_size);

782 783 784 785
  // Determines a static visitor id based on the given {map} that can then be
  // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
  static int GetStaticVisitorIdForMap(Map* map);

786 787 788 789 790 791 792
  // Notifies the heap that is ok to start marking or other activities that
  // should not happen during deserialization.
  void NotifyDeserializationComplete();

  intptr_t old_generation_allocation_limit() const {
    return old_generation_allocation_limit_;
  }
793

794
  bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
795

796
  Address* NewSpaceAllocationTopAddress() {
797
    return new_space_.allocation_top_address();
798
  }
799
  Address* NewSpaceAllocationLimitAddress() {
800
    return new_space_.allocation_limit_address();
801 802
  }

803 804
  Address* OldSpaceAllocationTopAddress() {
    return old_space_->allocation_top_address();
805
  }
806 807
  Address* OldSpaceAllocationLimitAddress() {
    return old_space_->allocation_limit_address();
808 809
  }

810 811 812 813 814 815
  // TODO(hpayer): There is still a missmatch between capacity and actual
  // committed memory size.
  bool CanExpandOldGeneration(int size) {
    return (CommittedOldGenerationMemory() + size) < MaxOldGenerationSize();
  }

816
  // Clear the Instanceof cache (used when a prototype changes).
817
  inline void ClearInstanceofCache();
818

819 820
  // Iterates the whole code space to clear all keyed store ICs.
  void ClearAllKeyedStoreICs();
821

822 823
  // FreeSpace objects have a null map after deserialization. Update the map.
  void RepairFreeListsAfterDeserialization();
824

825 826 827 828
  // Move len elements within a given array from src_index index to dst_index
  // index.
  void MoveElements(FixedArray* array, int dst_index, int src_index, int len);

829
  // Initialize a filler object to keep the ability to iterate over the heap
830
  // when introducing gaps within pages.
831
  void CreateFillerObjectAt(Address addr, int size);
832

833 834
  bool CanMoveObjectStart(HeapObject* object);

835
  // Maintain consistency of live bytes during incremental marking.
836
  void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
837

838 839 840 841 842 843 844 845
  // Trim the given array from the left. Note that this relocates the object
  // start and hence is only valid if there is only a single reference to it.
  FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);

  // Trim the given array from the right.
  template<Heap::InvocationMode mode>
  void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);

846
  // Converts the given boolean condition to JavaScript boolean value.
847
  inline Object* ToBoolean(bool condition);
848

849 850 851
  // Check whether the heap is currently iterable.
  bool IsHeapIterable();

852
  // Notify the heap that a context has been disposed.
853
  int NotifyContextDisposed(bool dependant_context);
854

855 856 857 858 859 860 861 862 863 864 865 866 867 868
  inline void increment_scan_on_scavenge_pages() {
    scan_on_scavenge_pages_++;
    if (FLAG_gc_verbose) {
      PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
    }
  }

  inline void decrement_scan_on_scavenge_pages() {
    scan_on_scavenge_pages_--;
    if (FLAG_gc_verbose) {
      PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
    }
  }

869 870
  void set_native_contexts_list(Object* object) {
    native_contexts_list_ = object;
871
  }
872
  Object* native_contexts_list() const { return native_contexts_list_; }
873

874 875 876 877
  void set_allocation_sites_list(Object* object) {
    allocation_sites_list_ = object;
  }
  Object* allocation_sites_list() { return allocation_sites_list_; }
878 879

  // Used in CreateAllocationSiteStub and the (de)serializer.
880
  Object** allocation_sites_list_address() { return &allocation_sites_list_; }
881

882 883 884 885 886 887 888
  void set_encountered_weak_collections(Object* weak_collection) {
    encountered_weak_collections_ = weak_collection;
  }
  Object* encountered_weak_collections() const {
    return encountered_weak_collections_;
  }

ulan@chromium.org's avatar
ulan@chromium.org committed
889 890 891 892 893
  void set_encountered_weak_cells(Object* weak_cell) {
    encountered_weak_cells_ = weak_cell;
  }
  Object* encountered_weak_cells() const { return encountered_weak_cells_; }

894
  // Number of mark-sweeps.
895
  int ms_count() const { return ms_count_; }
896

897 898 899 900
  // Checks whether the given object is allowed to be migrated from it's
  // current space into the given destination space. Used for debugging.
  inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);

901
  void CheckHandleCount();
902

903 904 905 906 907
  // Number of "runtime allocations" done so far.
  uint32_t allocations_count() { return allocations_count_; }

  // Returns deterministic "time" value in ms. Works only with
  // FLAG_verify_predictable.
908
  double synthetic_time() { return allocations_count() / 2.0; }
909

910
  // Print short heap statistics.
911
  void PrintShortHeapStatistics();
912

913
  inline HeapState gc_state() { return gc_state_; }
914

915
  inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
916

917 918 919 920
  // If an object has an AllocationMemento trailing it, return it, otherwise
  // return NULL;
  inline AllocationMemento* FindAllocationMemento(HeapObject* object);

921 922
  // Returns false if not able to reserve.
  bool ReserveSpace(Reservation* reservations);
923

924 925 926 927
  //
  // Support for the API.
  //

928
  void CreateApiObjects();
929

930
  // Implements the corresponding V8 API function.
931
  bool IdleNotification(double deadline_in_seconds);
932
  bool IdleNotification(int idle_time_in_ms);
933

934 935
  double MonotonicallyIncreasingTimeInMs();

936
  void RecordStats(HeapStats* stats, bool take_snapshot = false);
937

938
  // Check new space expansion criteria and expand semispaces if it was hit.
939
  void CheckNewSpaceExpansionCriteria();
940

941
  inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
942 943
    if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;

944
    intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
945

946
    if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
947 948 949 950

    return false;
  }

951 952
  void VisitExternalResources(v8::ExternalResourceVisitor* visitor);

953 954
  // An object should be promoted if the object has survived a
  // scavenge operation.
955 956 957 958
  inline bool ShouldBePromoted(Address old_address, int object_size);

  void ClearNormalizedMapCaches();

959 960
  void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);

961 962
  bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; }

963 964 965
  inline bool OldGenerationAllocationLimitReached();

  void QueueMemoryChunkForFree(MemoryChunk* chunk);
966
  void FilterStoreBufferEntriesOnAboutToBeFreedPages();
967
  void FreeQueuedChunks(MemoryChunk* list_head);
968
  void FreeQueuedChunks();
969
  void WaitUntilUnmappingOfFreeChunksCompleted();
970 971 972 973 974

  // Completely clear the Instanceof cache (to stop it keeping objects alive
  // around a GC).
  inline void CompletelyClearInstanceofCache();

975
  inline uint32_t HashSeed();
976

977
  inline int NextScriptId();
978

979 980 981 982
  inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
  inline void SetConstructStubDeoptPCOffset(int pc_offset);
  inline void SetGetterStubDeoptPCOffset(int pc_offset);
  inline void SetSetterStubDeoptPCOffset(int pc_offset);
983

984 985 986
  // For post mortem debugging.
  void RememberUnmappedPage(Address page, bool compacted);

987 988
  // Global inline caching age: it is incremented on some GCs after context
  // disposal. We use it to flush inline caches.
989
  int global_ic_age() { return global_ic_age_; }
990 991

  void AgeInlineCaches() {
992
    global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
993 994
  }

995
  int64_t amount_of_external_allocated_memory() {
996 997 998
    return amount_of_external_allocated_memory_;
  }

999 1000 1001 1002
  void update_amount_of_external_allocated_memory(int64_t delta) {
    amount_of_external_allocated_memory_ += delta;
  }

1003 1004
  void DeoptMarkedAllocationSites();

1005 1006 1007 1008
  bool DeoptMaybeTenuredAllocationSites() {
    return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
  }

ulan's avatar
ulan committed
1009
  void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
1010
                                     Handle<DependentCode> dep);
1011

ulan's avatar
ulan committed
1012
  DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
1013

1014 1015
  void AddRetainedMap(Handle<Map> map);

1016 1017 1018 1019 1020 1021 1022
  // This event is triggered after successful allocation of a new object made
  // by runtime. Allocations of target space for object evacuation do not
  // trigger the event. In order to track ALL allocations one must turn off
  // FLAG_inline_new and FLAG_use_allocation_folding.
  inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);

  // This event is triggered after object is moved to a new place.
1023
  inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1024 1025
                          int size_in_bytes);

1026 1027
  bool deserialization_complete() const { return deserialization_complete_; }

1028 1029 1030 1031
  bool HasLowAllocationRate();
  bool HasHighFragmentation();
  bool HasHighFragmentation(intptr_t used, intptr_t committed);

1032 1033
  bool ShouldOptimizeForMemoryUsage() { return optimize_for_memory_usage_; }

1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054
  // ===========================================================================
  // Initialization. ===========================================================
  // ===========================================================================

  // Configure heap size in MB before setup. Return false if the heap has been
  // set up already.
  bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
                     int max_executable_size, size_t code_range_size);
  bool ConfigureHeapDefault();

  // Prepares the heap, setting up memory areas that are needed in the isolate
  // without actually creating any objects.
  bool SetUp();

  // Bootstraps the object heap with the core set of objects required to run.
  // Returns whether it succeeded.
  bool CreateHeapObjects();

  // Destroys all memory allocated by the heap.
  void TearDown();

1055 1056 1057
  // Returns whether SetUp has been called.
  bool HasBeenSetUp();

1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114
  // ===========================================================================
  // Getters for spaces. =======================================================
  // ===========================================================================

  // Return the starting address and a mask for the new space.  And-masking an
  // address with the mask will result in the start address of the new space
  // for all addresses in either semispace.
  Address NewSpaceStart() { return new_space_.start(); }
  uintptr_t NewSpaceMask() { return new_space_.mask(); }
  Address NewSpaceTop() { return new_space_.top(); }

  NewSpace* new_space() { return &new_space_; }
  OldSpace* old_space() { return old_space_; }
  OldSpace* code_space() { return code_space_; }
  MapSpace* map_space() { return map_space_; }
  LargeObjectSpace* lo_space() { return lo_space_; }

  PagedSpace* paged_space(int idx) {
    switch (idx) {
      case OLD_SPACE:
        return old_space();
      case MAP_SPACE:
        return map_space();
      case CODE_SPACE:
        return code_space();
      case NEW_SPACE:
      case LO_SPACE:
        UNREACHABLE();
    }
    return NULL;
  }

  Space* space(int idx) {
    switch (idx) {
      case NEW_SPACE:
        return new_space();
      case LO_SPACE:
        return lo_space();
      default:
        return paged_space(idx);
    }
  }

  // Returns name of the space.
  const char* GetSpaceName(int idx);

  // ===========================================================================
  // Getters to other components. ==============================================
  // ===========================================================================

  GCTracer* tracer() { return tracer_; }

  PromotionQueue* promotion_queue() { return &promotion_queue_; }

  inline Isolate* isolate();

  MarkCompactCollector* mark_compact_collector() {
1115
    return mark_compact_collector_;
1116 1117
  }

1118 1119 1120 1121
  // ===========================================================================
  // Root set access. ==========================================================
  // ===========================================================================

1122 1123
  // Heap root getters.
#define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139
  ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR

  // Utility type maps.
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
  STRUCT_LIST(STRUCT_MAP_ACCESSOR)
#undef STRUCT_MAP_ACCESSOR

#define STRING_ACCESSOR(name, str) inline String* name();
  INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
#undef STRING_ACCESSOR

#define SYMBOL_ACCESSOR(name) inline Symbol* name();
  PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR

1140
#define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
1141
  PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
1142
  WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
1143 1144 1145
#undef SYMBOL_ACCESSOR

  Object* root(RootListIndex index) { return roots_[index]; }
1146 1147 1148
  Handle<Object> root_handle(RootListIndex index) {
    return Handle<Object>(&roots_[index]);
  }
1149 1150 1151 1152 1153

  // Generated code can embed this address to get access to the roots.
  Object** roots_array_start() { return roots_; }

  // Sets the stub_cache_ (only used when expanding the dictionary).
1154
  void SetRootCodeStubs(UnseededNumberDictionary* value) {
1155 1156 1157 1158
    roots_[kCodeStubsRootIndex] = value;
  }

  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1159
  void SetRootNonMonomorphicCache(UnseededNumberDictionary* value) {
1160 1161 1162
    roots_[kNonMonomorphicCacheRootIndex] = value;
  }

1163 1164
  void SetRootMaterializedObjects(FixedArray* objects) {
    roots_[kMaterializedObjectsRootIndex] = objects;
1165 1166
  }

1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180
  void SetRootCodeStubContext(Object* value) {
    roots_[kCodeStubContextRootIndex] = value;
  }

  void SetRootCodeStubExportsObject(JSObject* value) {
    roots_[kCodeStubExportsObjectRootIndex] = value;
  }

  void SetRootScriptList(Object* value) {
    roots_[kScriptListRootIndex] = value;
  }

  void SetRootStringTable(StringTable* value) {
    roots_[kStringTableRootIndex] = value;
1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199
  }

  // Set the stack limit in the roots_ array.  Some architectures generate
  // code that looks here, because it is faster than loading from the static
  // jslimit_/real_jslimit_ variable in the StackGuard.
  void SetStackLimits();

  // Generated code can treat direct references to this root as constant.
  bool RootCanBeTreatedAsConstant(RootListIndex root_index);

  Map* MapForFixedTypedArray(ExternalArrayType array_type);
  RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);

  RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
  FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);

  void RegisterStrongRoots(Object** start, Object** end);
  void UnregisterStrongRoots(Object** start);

1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214
  // ===========================================================================
  // Inline allocation. ========================================================
  // ===========================================================================

  // Indicates whether inline bump-pointer allocation has been disabled.
  bool inline_allocation_disabled() { return inline_allocation_disabled_; }

  // Switch whether inline bump-pointer allocation should be used.
  void EnableInlineAllocation();
  void DisableInlineAllocation();

  // ===========================================================================
  // Methods triggering GCs. ===================================================
  // ===========================================================================

1215
  // Performs garbage collection operation.
1216 1217 1218
  // Returns whether there is a chance that another major GC could
  // collect more garbage.
  inline bool CollectGarbage(
1219 1220
      AllocationSpace space, const char* gc_reason = NULL,
      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1221

1222 1223 1224
  // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
  // non-zero, then the slower precise sweeper is used, which leaves the heap
  // in a state where we can iterate over the heap visiting all objects.
1225
  void CollectAllGarbage(
1226
      int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
1227 1228 1229
      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);

  // Last hope GC, should try to squeeze as much as possible.
1230
  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1231

1232 1233 1234 1235
  // Reports and external memory pressure event, either performs a major GC or
  // completes incremental marking in order to free external resources.
  void ReportExternalMemoryPressure(const char* gc_reason = NULL);

1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258
  // Invoked when GC was requested via the stack guard.
  void HandleGCRequest();

  // ===========================================================================
  // Iterators. ================================================================
  // ===========================================================================

  // Iterates over all roots in the heap.
  void IterateRoots(ObjectVisitor* v, VisitMode mode);
  // Iterates over all strong roots in the heap.
  void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
  // Iterates over entries in the smi roots list.  Only interesting to the
  // serializer/deserializer, since GC does not care about smis.
  void IterateSmiRoots(ObjectVisitor* v);
  // Iterates over all the other roots in the heap.
  void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);

  // Iterate pointers to from semispace of new space found in memory interval
  // from start to end within |object|.
  void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
                                         Address end, bool record_slots,
                                         ObjectSlotCallback callback);

1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272
  // ===========================================================================
  // Store buffer API. =========================================================
  // ===========================================================================

  // Write barrier support for address[offset] = o.
  INLINE(void RecordWrite(Address address, int offset));

  // Write barrier support for address[start : start + len[ = o.
  INLINE(void RecordWrites(Address address, int start, int len));

  Address* store_buffer_top_address() {
    return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
  }

1273 1274 1275 1276 1277 1278 1279 1280 1281 1282
  // ===========================================================================
  // Incremental marking API. ==================================================
  // ===========================================================================

  // Start incremental marking and ensure that idle time handler can perform
  // incremental steps.
  void StartIdleIncrementalMarking();

  // Starts incremental marking assuming incremental marking is currently
  // stopped.
1283
  void StartIncrementalMarking(int gc_flags = kNoGCFlags,
1284 1285
                               const GCCallbackFlags gc_callback_flags =
                                   GCCallbackFlags::kNoGCCallbackFlags,
1286 1287
                               const char* reason = nullptr);

1288 1289
  void FinalizeIncrementalMarkingIfComplete(const char* comment);

1290 1291
  bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);

1292
  IncrementalMarking* incremental_marking() { return incremental_marking_; }
1293

1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304
  // ===========================================================================
  // External string table API. ================================================
  // ===========================================================================

  // Registers an external string.
  inline void RegisterExternalString(String* string);

  // Finalizes an external string by deleting the associated external
  // data and clearing the resource pointer.
  inline void FinalizeExternalString(String* string);

1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329
  // ===========================================================================
  // Methods checking/returning the space of a given object/address. ===========
  // ===========================================================================

  // Returns whether the object resides in new space.
  inline bool InNewSpace(Object* object);
  inline bool InNewSpace(Address address);
  inline bool InNewSpacePage(Address address);
  inline bool InFromSpace(Object* object);
  inline bool InToSpace(Object* object);

  // Returns whether the object resides in old space.
  inline bool InOldSpace(Address address);
  inline bool InOldSpace(Object* object);

  // Checks whether an address/object in the heap (including auxiliary
  // area and unused area).
  bool Contains(Address addr);
  bool Contains(HeapObject* value);

  // Checks whether an address/object in a space.
  // Currently used by tests, serialization and heap verification only.
  bool InSpace(Address addr, AllocationSpace space);
  bool InSpace(HeapObject* value, AllocationSpace space);

1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348
  // ===========================================================================
  // Object statistics tracking. ===============================================
  // ===========================================================================

  // Returns the number of buckets used by object statistics tracking during a
  // major GC. Note that the following methods fail gracefully when the bounds
  // are exceeded though.
  size_t NumberOfTrackedHeapObjectTypes();

  // Returns object statistics about count and size at the last major GC.
  // Objects are being grouped into buckets that roughly resemble existing
  // instance types.
  size_t ObjectCountAtLastGC(size_t index);
  size_t ObjectSizeAtLastGC(size_t index);

  // Retrieves names of buckets used by object statistics tracking.
  bool GetObjectTypeName(size_t index, const char** object_type,
                         const char** object_sub_type);

1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373
  // ===========================================================================
  // GC statistics. ============================================================
  // ===========================================================================

  // Returns the maximum amount of memory reserved for the heap.  For
  // the young generation, we reserve 4 times the amount needed for a
  // semi space.  The young generation consists of two semi spaces and
  // we reserve twice the amount needed for those in order to ensure
  // that new space can be aligned to its size.
  intptr_t MaxReserved() {
    return 4 * reserved_semispace_size_ + max_old_generation_size_;
  }
  int MaxSemiSpaceSize() { return max_semi_space_size_; }
  int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
  int InitialSemiSpaceSize() { return initial_semispace_size_; }
  int TargetSemiSpaceSize() { return target_semispace_size_; }
  intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
  intptr_t MaxExecutableSize() { return max_executable_size_; }

  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
  // more spaces are needed until it reaches the limit.
  intptr_t Capacity();

  // Returns the amount of memory currently committed for the heap.
  intptr_t CommittedMemory();
1374

1375 1376
  // Returns the amount of memory currently committed for the old space.
  intptr_t CommittedOldGenerationMemory();
1377

1378 1379
  // Returns the amount of executable memory currently committed for the heap.
  intptr_t CommittedMemoryExecutable();
1380

1381 1382
  // Returns the amount of phyical memory currently committed for the heap.
  size_t CommittedPhysicalMemory();
1383

1384 1385
  // Returns the maximum amount of memory ever committed for the heap.
  intptr_t MaximumCommittedMemory() { return maximum_committed_; }
1386

1387 1388 1389
  // Updates the maximum committed memory for the heap. Should be called
  // whenever a space grows.
  void UpdateMaximumCommitted();
1390

1391 1392 1393 1394
  // Returns the available bytes in space w/o growing.
  // Heap doesn't guarantee that it can allocate an object that requires
  // all available bytes. Check MaxHeapObjectSize() instead.
  intptr_t Available();
1395

1396 1397
  // Returns of size of all objects residing in the heap.
  intptr_t SizeOfObjects();
1398

1399
  void UpdateSurvivalStatistics(int start_new_space_size);
1400

1401 1402 1403
  inline void IncrementPromotedObjectsSize(int object_size) {
    DCHECK(object_size > 0);
    promoted_objects_size_ += object_size;
1404
  }
1405
  inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
1406

1407 1408 1409
  inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
    DCHECK(object_size > 0);
    semi_space_copied_object_size_ += object_size;
1410
  }
1411 1412
  inline intptr_t semi_space_copied_object_size() {
    return semi_space_copied_object_size_;
1413
  }
1414

1415 1416 1417
  inline intptr_t SurvivedNewSpaceObjectSize() {
    return promoted_objects_size_ + semi_space_copied_object_size_;
  }
1418

1419
  inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1420

1421
  inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1422

1423
  inline void IncrementNodesPromoted() { nodes_promoted_++; }
1424

1425 1426 1427 1428 1429
  inline void IncrementYoungSurvivorsCounter(int survived) {
    DCHECK(survived >= 0);
    survived_last_scavenge_ = survived;
    survived_since_last_expansion_ += survived;
  }
1430

1431 1432 1433 1434 1435 1436 1437 1438 1439
  inline intptr_t PromotedTotalSize() {
    int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
    if (total > std::numeric_limits<intptr_t>::max()) {
      // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
      return std::numeric_limits<intptr_t>::max();
    }
    if (total < 0) return 0;
    return static_cast<intptr_t>(total);
  }
1440

1441 1442 1443
  void UpdateNewSpaceAllocationCounter() {
    new_space_allocation_counter_ = NewSpaceAllocationCounter();
  }
1444

1445 1446 1447
  size_t NewSpaceAllocationCounter() {
    return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
  }
1448

1449 1450 1451 1452
  // This should be used only for testing.
  void set_new_space_allocation_counter(size_t new_value) {
    new_space_allocation_counter_ = new_value;
  }
1453

1454 1455 1456
  void UpdateOldGenerationAllocationCounter() {
    old_generation_allocation_counter_ = OldGenerationAllocationCounter();
  }
1457

1458 1459 1460
  size_t OldGenerationAllocationCounter() {
    return old_generation_allocation_counter_ + PromotedSinceLastGC();
  }
1461

1462 1463 1464 1465
  // This should be used only for testing.
  void set_old_generation_allocation_counter(size_t new_value) {
    old_generation_allocation_counter_ = new_value;
  }
1466

1467 1468 1469
  size_t PromotedSinceLastGC() {
    return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
  }
1470

1471
  int gc_count() const { return gc_count_; }
1472

1473 1474
  // Returns the size of objects residing in non new spaces.
  intptr_t PromotedSpaceSizeOfObjects();
1475

1476 1477 1478 1479
  double total_regexp_code_generated() { return total_regexp_code_generated_; }
  void IncreaseTotalRegexpCodeGenerated(int size) {
    total_regexp_code_generated_ += size;
  }
1480

1481 1482 1483 1484 1485 1486 1487
  void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
    if (is_crankshafted) {
      crankshaft_codegen_bytes_generated_ += size;
    } else {
      full_codegen_bytes_generated_ += size;
    }
  }
1488

1489 1490 1491
  // ===========================================================================
  // Prologue/epilogue callback methods.========================================
  // ===========================================================================
1492

1493 1494 1495
  void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
                             GCType gc_type_filter, bool pass_isolate = true);
  void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
1496

1497 1498 1499
  void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
                             GCType gc_type_filter, bool pass_isolate = true);
  void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
1500

1501 1502
  void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
  void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1503

1504 1505 1506
  // ===========================================================================
  // Allocation methods. =======================================================
  // ===========================================================================
1507

1508 1509 1510
  // Creates a filler object and returns a heap object immediately after it.
  MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
                                                int filler_size);
1511

1512 1513 1514 1515 1516 1517 1518
  // Creates a filler object if needed for alignment and returns a heap object
  // immediately after it. If any space is left after the returned object,
  // another filler object is created so the over allocated memory is iterable.
  MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
                                              int object_size,
                                              int allocation_size,
                                              AllocationAlignment alignment);
1519

1520
  // ===========================================================================
1521
  // ArrayBuffer tracking. =====================================================
1522
  // ===========================================================================
1523

1524 1525 1526 1527 1528 1529 1530
  void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
  void UnregisterArrayBuffer(JSArrayBuffer* buffer);

  inline ArrayBufferTracker* array_buffer_tracker() {
    return array_buffer_tracker_;
  }

1531
// =============================================================================
1532

1533 1534 1535 1536
#ifdef VERIFY_HEAP
  // Verify the heap is in its normal state before or after a GC.
  void Verify();
#endif
1537

1538 1539
#ifdef DEBUG
  void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
1540

1541 1542 1543
  void TracePathToObjectFrom(Object* target, Object* root);
  void TracePathToObject(Object* target);
  void TracePathToGlobal();
1544

1545 1546
  void Print();
  void PrintHandles();
1547

1548 1549 1550 1551
  // Report heap statistics.
  void ReportHeapStatistics(const char* title);
  void ReportCodeStatistics(const char* title);
#endif
ulan@chromium.org's avatar
ulan@chromium.org committed
1552

1553
 private:
1554 1555
  class UnmapFreeMemoryTask;

1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594
  // External strings table is a place where all external strings are
  // registered.  We need to keep track of such strings to properly
  // finalize them.
  class ExternalStringTable {
   public:
    // Registers an external string.
    inline void AddString(String* string);

    inline void Iterate(ObjectVisitor* v);

    // Restores internal invariant and gets rid of collected strings.
    // Must be called after each Iterate() that modified the strings.
    void CleanUp();

    // Destroys all allocated memory.
    void TearDown();

   private:
    explicit ExternalStringTable(Heap* heap) : heap_(heap) {}

    inline void Verify();

    inline void AddOldString(String* string);

    // Notifies the table that only a prefix of the new list is valid.
    inline void ShrinkNewStrings(int position);

    // To speed up scavenge collections new space string are kept
    // separate from old space strings.
    List<Object*> new_space_strings_;
    List<Object*> old_space_strings_;

    Heap* heap_;

    friend class Heap;

    DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
  };

1595
  struct StrongRootsList;
1596

1597 1598 1599 1600 1601 1602
  struct StringTypeTable {
    InstanceType type;
    int size;
    RootListIndex index;
  };

1603
  struct ConstantStringTable {
1604 1605 1606 1607 1608 1609 1610 1611 1612 1613
    const char* contents;
    RootListIndex index;
  };

  struct StructTable {
    InstanceType type;
    int size;
    RootListIndex index;
  };

1614 1615 1616 1617 1618 1619 1620
  struct GCCallbackPair {
    GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
                   bool pass_isolate)
        : callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}

    bool operator==(const GCCallbackPair& other) const {
      return other.callback == callback;
1621
    }
1622 1623

    v8::Isolate::GCCallback callback;
1624
    GCType gc_type;
1625
    bool pass_isolate;
1626
  };
1627

1628 1629 1630
  typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
                                                        Object** pointer);

1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657
  static const int kInitialStringTableSize = 2048;
  static const int kInitialEvalCacheSize = 64;
  static const int kInitialNumberStringCacheSize = 256;

  static const int kRememberedUnmappedPages = 128;

  static const StringTypeTable string_type_table[];
  static const ConstantStringTable constant_string_table[];
  static const StructTable struct_table[];

  static const int kYoungSurvivalRateHighThreshold = 90;
  static const int kYoungSurvivalRateAllowedDeviation = 15;
  static const int kOldSurvivalRateLowThreshold = 10;

  static const int kMaxMarkCompactsInIdleRound = 7;
  static const int kIdleScavengeThreshold = 5;

  static const int kAllocationSiteScratchpadSize = 256;

  Heap();

  static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
      Heap* heap, Object** pointer);

  static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
                                          StoreBufferEvent event);

1658 1659
  // Selects the proper allocation space based on the pretenuring decision.
  static AllocationSpace SelectSpace(PretenureFlag pretenure) {
1660 1661 1662
    return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
  }

1663 1664 1665 1666 1667
#define ROOT_ACCESSOR(type, name, camel_name) \
  inline void set_##name(type* value);
  ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR

1668 1669
  StoreBuffer* store_buffer() { return &store_buffer_; }

1670
  void set_current_gc_flags(int flags) {
1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687
    current_gc_flags_ = flags;
    DCHECK(!ShouldFinalizeIncrementalMarking() ||
           !ShouldAbortIncrementalMarking());
  }

  inline bool ShouldReduceMemory() const {
    return current_gc_flags_ & kReduceMemoryFootprintMask;
  }

  inline bool ShouldAbortIncrementalMarking() const {
    return current_gc_flags_ & kAbortIncrementalMarkingMask;
  }

  inline bool ShouldFinalizeIncrementalMarking() const {
    return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
  }

1688 1689
  void PreprocessStackTraces();

1690 1691 1692
  // Pretenuring decisions are made based on feedback collected during new
  // space evacuation. Note that between feedback collection and calling this
  // method object in old space must not move.
1693
  // Right now we only process pretenuring feedback in high promotion mode.
1694
  bool ProcessPretenuringFeedback();
1695

1696
  // Checks whether a global GC is necessary
1697 1698
  GarbageCollector SelectGarbageCollector(AllocationSpace space,
                                          const char** reason);
1699

1700 1701 1702 1703 1704
  // Make sure there is a filler value behind the top of the new space
  // so that the GC does not confuse some unintialized/stale memory
  // with the allocation memento of the object at the top
  void EnsureFillerObjectAtTop();

1705 1706 1707 1708
  // Ensure that we have swept all spaces in such a way that we can iterate
  // over all objects.  May cause a GC.
  void MakeHeapIterable();

1709 1710 1711
  // Performs garbage collection operation.
  // Returns whether there is a chance that another major GC could
  // collect more garbage.
1712 1713 1714 1715
  bool CollectGarbage(
      GarbageCollector collector, const char* gc_reason,
      const char* collector_reason,
      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1716 1717 1718 1719

  // Performs garbage collection
  // Returns whether there is a chance another major GC could
  // collect more garbage.
1720 1721 1722
  bool PerformGarbageCollection(
      GarbageCollector collector,
      const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797

  inline void UpdateOldSpaceLimits();

  // Initializes a JSObject based on its map.
  void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
                                 Map* map);
  void InitializeAllocationMemento(AllocationMemento* memento,
                                   AllocationSite* allocation_site);

  bool CreateInitialMaps();
  void CreateInitialObjects();

  // These five Create*EntryStub functions are here and forced to not be inlined
  // because of a gcc-4.4 bug that assigns wrong vtable entries.
  NO_INLINE(void CreateJSEntryStub());
  NO_INLINE(void CreateJSConstructEntryStub());

  void CreateFixedStubs();

  HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);

  // Commits from space if it is uncommitted.
  void EnsureFromSpaceIsCommitted();

  // Uncommit unused semi space.
  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }

  // Fill in bogus values in from space
  void ZapFromSpace();

  // Deopts all code that contains allocation instruction which are tenured or
  // not tenured. Moreover it clears the pretenuring allocation site statistics.
  void ResetAllAllocationSitesDependentCode(PretenureFlag flag);

  // Evaluates local pretenuring for the old space and calls
  // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
  // the old space.
  void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);

  // Record statistics before and after garbage collection.
  void ReportStatisticsBeforeGC();
  void ReportStatisticsAfterGC();

  // Creates and installs the full-sized number string cache.
  int FullSizeNumberStringCacheLength();
  // Flush the number to string cache.
  void FlushNumberStringCache();

  // Sets used allocation sites entries to undefined.
  void FlushAllocationSitesScratchpad();

  // Initializes the allocation sites scratchpad with undefined values.
  void InitializeAllocationSitesScratchpad();

  // Adds an allocation site to the scratchpad if there is space left.
  void AddAllocationSiteToScratchpad(AllocationSite* site,
                                     ScratchpadSlotMode mode);

  // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
  // Re-visit incremental marking heuristics.
  bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }

  void ConfigureInitialOldGenerationSize();

  bool HasLowYoungGenerationAllocationRate();
  bool HasLowOldGenerationAllocationRate();
  double YoungGenerationMutatorUtilization();
  double OldGenerationMutatorUtilization();

  void ReduceNewSpaceSize();

  bool TryFinalizeIdleIncrementalMarking(
      double idle_time_in_ms, size_t size_of_objects,
      size_t mark_compact_speed_in_bytes_per_ms);

1798
  GCIdleTimeHeapState ComputeHeapState();
1799 1800

  bool PerformIdleTimeAction(GCIdleTimeAction action,
1801
                             GCIdleTimeHeapState heap_state,
1802 1803 1804
                             double deadline_in_ms);

  void IdleNotificationEpilogue(GCIdleTimeAction action,
1805 1806
                                GCIdleTimeHeapState heap_state, double start_ms,
                                double deadline_in_ms);
1807 1808 1809 1810 1811
  void CheckAndNotifyBackgroundIdleNotification(double idle_time_in_ms,
                                                double now_ms);

  inline void UpdateAllocationsHash(HeapObject* object);
  inline void UpdateAllocationsHash(uint32_t value);
1812
  void PrintAlloctionsHash();
1813 1814 1815 1816

  void AddToRingBuffer(const char* string);
  void GetFromRingBuffer(char* buffer);

1817 1818 1819 1820 1821 1822
  // Attempt to over-approximate the weak closure by marking object groups and
  // implicit references from global handles, but don't atomically complete
  // marking. If we continue to mark incrementally, we might have marked
  // objects that die later.
  void OverApproximateWeakClosure(const char* gc_reason);

1823 1824 1825 1826 1827 1828 1829 1830
  // Returns the timer used for a given GC type.
  // - GCScavenger: young generation GC
  // - GCCompactor: full GC
  // - GCFinalzeMC: finalization of incremental full GC
  // - GCFinalizeMCReduceMemory: finalization of incremental full GC with
  // memory reduction
  HistogramTimer* GCTypeTimer(GarbageCollector collector);

1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905
  // ===========================================================================
  // Actual GC. ================================================================
  // ===========================================================================

  // Code that should be run before and after each GC.  Includes some
  // reporting/verification activities when compiled with DEBUG set.
  void GarbageCollectionPrologue();
  void GarbageCollectionEpilogue();

  // Performs a major collection in the whole heap.
  void MarkCompact();

  // Code to be run before and after mark-compact.
  void MarkCompactPrologue();
  void MarkCompactEpilogue();

  // Performs a minor collection in new generation.
  void Scavenge();

  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);

  void UpdateNewSpaceReferencesInExternalStringTable(
      ExternalStringTableUpdaterCallback updater_func);

  void UpdateReferencesInExternalStringTable(
      ExternalStringTableUpdaterCallback updater_func);

  void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
  void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
  void ProcessNativeContexts(WeakObjectRetainer* retainer);
  void ProcessAllocationSites(WeakObjectRetainer* retainer);

  // ===========================================================================
  // GC statistics. ============================================================
  // ===========================================================================

  inline intptr_t OldGenerationSpaceAvailable() {
    return old_generation_allocation_limit_ - PromotedTotalSize();
  }

  // Returns maximum GC pause.
  double get_max_gc_pause() { return max_gc_pause_; }

  // Returns maximum size of objects alive after GC.
  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }

  // Returns minimal interval between two subsequent collections.
  double get_min_in_mutator() { return min_in_mutator_; }

  // Update GC statistics that are tracked on the Heap.
  void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
                                    double marking_time);

  bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }

  // ===========================================================================
  // Growing strategy. =========================================================
  // ===========================================================================

  // Decrease the allocation limit if the new limit based on the given
  // parameters is lower than the current limit.
  void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
                                          double gc_speed,
                                          double mutator_speed);


  // Calculates the allocation limit based on a given growing factor and a
  // given old generation size.
  intptr_t CalculateOldGenerationAllocationLimit(double factor,
                                                 intptr_t old_gen_size);

  // Sets the allocation limit to trigger the next full garbage collection.
  void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
                                       double mutator_speed);

ulan's avatar
ulan committed
1906 1907 1908 1909 1910 1911 1912
  // ===========================================================================
  // Inline allocation. ========================================================
  // ===========================================================================

  void LowerInlineAllocationLimit(intptr_t step);
  void ResetInlineAllocationLimit();

1913 1914 1915 1916 1917
  // ===========================================================================
  // Idle notification. ========================================================
  // ===========================================================================

  bool RecentIdleNotificationHappened();
ulan's avatar
ulan committed
1918
  void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
1919

1920 1921 1922 1923
  // ===========================================================================
  // Allocation methods. =======================================================
  // ===========================================================================

1924 1925 1926 1927 1928 1929
  // Returns a deep copy of the JavaScript object.
  // Properties and elements are copied too.
  // Optionally takes an AllocationSite to be appended in an AllocationMemento.
  MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
                                                AllocationSite* site = NULL);

1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964
  // Allocates a JS Map in the heap.
  MUST_USE_RESULT AllocationResult
  AllocateMap(InstanceType instance_type, int instance_size,
              ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);

  // Allocates and initializes a new JavaScript object based on a
  // constructor.
  // If allocation_site is non-null, then a memento is emitted after the object
  // that points to the site.
  MUST_USE_RESULT AllocationResult AllocateJSObject(
      JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
      AllocationSite* allocation_site = NULL);

  // Allocates and initializes a new JavaScript object based on a map.
  // Passing an allocation site means that a memento will be created that
  // points to the site.
  MUST_USE_RESULT AllocationResult
  AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
                          AllocationSite* allocation_site = NULL);

  // Allocates a HeapNumber from value.
  MUST_USE_RESULT AllocationResult
  AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
                     PretenureFlag pretenure = NOT_TENURED);

// Allocates SIMD values from the given lane values.
#define SIMD_ALLOCATE_DECLARATION(TYPE, Type, type, lane_count, lane_type) \
  AllocationResult Allocate##Type(lane_type lanes[lane_count],             \
                                  PretenureFlag pretenure = NOT_TENURED);
  SIMD128_TYPES(SIMD_ALLOCATE_DECLARATION)
#undef SIMD_ALLOCATE_DECLARATION

  // Allocates a byte array of the specified length
  MUST_USE_RESULT AllocationResult
  AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1965

1966 1967
  // Allocates a bytecode array with given contents.
  MUST_USE_RESULT AllocationResult
1968
  AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
1969
                        int parameter_count, FixedArray* constant_pool);
1970

1971 1972 1973 1974
  // Copy the code and scope info part of the code object, but insert
  // the provided data as the relocation information.
  MUST_USE_RESULT AllocationResult CopyCode(Code* code,
                                            Vector<byte> reloc_info);
1975

1976
  MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1977

1978 1979 1980
  // Allocates a fixed array initialized with undefined values
  MUST_USE_RESULT AllocationResult
  AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1981

1982
  // Allocate an uninitialized object.  The memory is non-executable if the
1983 1984 1985
  // hardware and OS allow.  This is the single choke-point for allocations
  // performed by the runtime and should not be bypassed (to extend this to
  // inlined allocations, use the Heap::DisableInlineAllocation() support).
1986
  MUST_USE_RESULT inline AllocationResult AllocateRaw(
1987
      int size_in_bytes, AllocationSpace space,
1988
      AllocationAlignment aligment = kWordAligned);
1989

1990
  // Allocates a heap object based on the map.
1991 1992 1993
  MUST_USE_RESULT AllocationResult
      Allocate(Map* map, AllocationSpace space,
               AllocationSite* allocation_site = NULL);
1994 1995

  // Allocates a partial map for bootstrapping.
1996 1997
  MUST_USE_RESULT AllocationResult
      AllocatePartialMap(InstanceType instance_type, int instance_size);
1998 1999 2000

  // Allocate a block of memory in the given space (filled with a filler).
  // Used as a fall-back for generated code when the space is full.
2001 2002
  MUST_USE_RESULT AllocationResult
      AllocateFillerObject(int size, bool double_align, AllocationSpace space);
2003

2004
  // Allocate an uninitialized fixed array.
2005 2006
  MUST_USE_RESULT AllocationResult
      AllocateRawFixedArray(int length, PretenureFlag pretenure);
2007 2008

  // Allocate an uninitialized fixed double array.
2009 2010
  MUST_USE_RESULT AllocationResult
      AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
2011 2012

  // Allocate an initialized fixed array with the given filler value.
2013 2014 2015
  MUST_USE_RESULT AllocationResult
      AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
                                   Object* filler);
2016

2017
  // Allocate and partially initializes a String.  There are two String
2018 2019 2020
  // encodings: one-byte and two-byte.  These functions allocate a string of
  // the given length and set its map and length fields.  The characters of
  // the string are uninitialized.
2021 2022 2023 2024
  MUST_USE_RESULT AllocationResult
      AllocateRawOneByteString(int length, PretenureFlag pretenure);
  MUST_USE_RESULT AllocationResult
      AllocateRawTwoByteString(int length, PretenureFlag pretenure);
2025

2026 2027
  // Allocates an internalized string in old space based on the character
  // stream.
2028
  MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
2029
      Vector<const char> str, int chars, uint32_t hash_field);
2030

2031
  MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
2032
      Vector<const uint8_t> str, uint32_t hash_field);
2033

2034
  MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
2035
      Vector<const uc16> str, uint32_t hash_field);
2036

2037 2038 2039
  template <bool is_one_byte, typename T>
  MUST_USE_RESULT AllocationResult
      AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
2040

2041
  template <typename T>
2042
  MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
2043 2044 2045
      T t, int chars, uint32_t hash_field);

  // Allocates an uninitialized fixed array. It must be filled by the caller.
2046
  MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
2047

2048
  // Make a copy of src and return it.
2049
  MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
2050

2051 2052
  // Make a copy of src, also grow the copy, and return the copy.
  MUST_USE_RESULT AllocationResult
2053
  CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
2054 2055

  // Make a copy of src, set the map, and return the copy.
2056 2057
  MUST_USE_RESULT AllocationResult
      CopyFixedArrayWithMap(FixedArray* src, Map* map);
2058

2059
  // Make a copy of src and return it.
2060
  MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
2061 2062
      FixedDoubleArray* src);

2063
  // Computes a single character string where the character has code.
2064
  // A cache is used for one-byte (Latin1) codes.
2065 2066
  MUST_USE_RESULT AllocationResult
      LookupSingleCharacterStringFromCode(uint16_t code);
2067 2068

  // Allocate a symbol in old space.
2069
  MUST_USE_RESULT AllocationResult AllocateSymbol();
2070 2071

  // Allocates an external array of the specified length and type.
2072 2073 2074
  MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
      int length, ExternalArrayType array_type, void* external_pointer,
      PretenureFlag pretenure);
2075 2076

  // Allocates a fixed typed array of the specified length and type.
2077
  MUST_USE_RESULT AllocationResult
2078 2079
  AllocateFixedTypedArray(int length, ExternalArrayType array_type,
                          bool initialize, PretenureFlag pretenure);
2080 2081

  // Make a copy of src and return it.
2082
  MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
2083 2084

  // Make a copy of src, set the map, and return the copy.
2085 2086
  MUST_USE_RESULT AllocationResult
      CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
2087 2088

  // Allocates a fixed double array with uninitialized values. Returns
2089
  MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
2090
      int length, PretenureFlag pretenure = NOT_TENURED);
2091

2092
  // Allocate empty fixed array.
2093
  MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
2094

2095
  // Allocate empty fixed typed array of given type.
2096 2097
  MUST_USE_RESULT AllocationResult
      AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
2098

2099
  // Allocate a tenured simple cell.
2100
  MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
2101 2102

  // Allocate a tenured JS global property cell initialized with the hole.
2103
  MUST_USE_RESULT AllocationResult AllocatePropertyCell();
2104

ulan@chromium.org's avatar
ulan@chromium.org committed
2105 2106
  MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);

2107
  // Allocates a new utility object in the old generation.
2108
  MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
2109 2110

  // Allocates a new foreign object.
2111 2112
  MUST_USE_RESULT AllocationResult
      AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
2113

2114 2115
  MUST_USE_RESULT AllocationResult
      AllocateCode(int object_size, bool immovable);
2116

2117
  MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
2118

2119
  MUST_USE_RESULT AllocationResult InternalizeString(String* str);
2120

2121 2122 2123
  // The amount of external memory registered through the API kept alive
  // by global handles
  int64_t amount_of_external_allocated_memory_;
2124

2125 2126
  // Caches the amount of external memory registered at the last global gc.
  int64_t amount_of_external_allocated_memory_at_last_global_gc_;
2127

2128 2129 2130
  // This can be calculated directly from a pointer to the heap; however, it is
  // more expedient to get at the isolate directly from within Heap methods.
  Isolate* isolate_;
2131

2132
  Object* roots_[kRootListLength];
2133

2134 2135 2136 2137 2138 2139 2140 2141 2142 2143
  size_t code_range_size_;
  int reserved_semispace_size_;
  int max_semi_space_size_;
  int initial_semispace_size_;
  int target_semispace_size_;
  intptr_t max_old_generation_size_;
  intptr_t initial_old_generation_size_;
  bool old_generation_size_configured_;
  intptr_t max_executable_size_;
  intptr_t maximum_committed_;
2144

2145 2146 2147
  // For keeping track of how much data has survived
  // scavenge since last new space expansion.
  int survived_since_last_expansion_;
2148

2149 2150
  // ... and since the last scavenge.
  int survived_last_scavenge_;
2151

2152 2153
  // This is not the depth of nested AlwaysAllocateScope's but rather a single
  // count, as scopes can be acquired from multiple tasks (read: threads).
2154
  AtomicNumber<size_t> always_allocate_scope_count_;
2155

2156 2157
  // For keeping track of context disposals.
  int contexts_disposed_;
2158

2159
  int global_ic_age_;
2160

2161
  int scan_on_scavenge_pages_;
2162

2163 2164 2165 2166 2167 2168 2169 2170
  NewSpace new_space_;
  OldSpace* old_space_;
  OldSpace* code_space_;
  MapSpace* map_space_;
  LargeObjectSpace* lo_space_;
  HeapState gc_state_;
  int gc_post_processing_depth_;
  Address new_space_top_after_last_gc_;
2171

2172 2173
  // Returns the amount of external memory registered since last global gc.
  int64_t PromotedExternalMemorySize();
2174

2175 2176
  // How many "runtime allocations" happened.
  uint32_t allocations_count_;
2177

2178 2179
  // Running hash over allocations performed.
  uint32_t raw_allocations_hash_;
2180

2181 2182
  // Countdown counter, dumps allocation hash when 0.
  uint32_t dump_allocations_hash_countdown_;
2183

2184 2185
  // How many mark-sweep collections happened.
  unsigned int ms_count_;
2186

2187 2188
  // How many gc happened.
  unsigned int gc_count_;
2189

2190 2191 2192
  // For post mortem debugging.
  int remembered_unmapped_pages_index_;
  Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2193

2194 2195 2196 2197 2198 2199
#ifdef DEBUG
  // If the --gc-interval flag is set to a positive value, this
  // variable holds the value indicating the number of allocations
  // remain until the next failure and garbage collection.
  int allocation_timeout_;
#endif  // DEBUG
2200

2201 2202 2203 2204 2205
  // Limit that triggers a global GC on the next (normally caused) GC.  This
  // is checked when we have already decided to do a GC to help determine
  // which collector to invoke, before expanding a paged space in the old
  // generation and on every allocation in large object space.
  intptr_t old_generation_allocation_limit_;
2206

2207 2208 2209
  // Indicates that an allocation has failed in the old generation since the
  // last GC.
  bool old_gen_exhausted_;
2210

2211 2212 2213
  // Indicates that memory usage is more important than latency.
  // TODO(ulan): Merge it with memory reducer once chromium:490559 is fixed.
  bool optimize_for_memory_usage_;
2214

2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241
  // Indicates that inline bump-pointer allocation has been globally disabled
  // for all spaces. This is used to disable allocations in generated code.
  bool inline_allocation_disabled_;

  // Weak list heads, threaded through the objects.
  // List heads are initialized lazily and contain the undefined_value at start.
  Object* native_contexts_list_;
  Object* allocation_sites_list_;

  // List of encountered weak collections (JSWeakMap and JSWeakSet) during
  // marking. It is initialized during marking, destroyed after marking and
  // contains Smi(0) while marking is not active.
  Object* encountered_weak_collections_;

  Object* encountered_weak_cells_;

  StoreBufferRebuilder store_buffer_rebuilder_;

  List<GCCallbackPair> gc_epilogue_callbacks_;
  List<GCCallbackPair> gc_prologue_callbacks_;

  // Total RegExp code ever generated
  double total_regexp_code_generated_;

  int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];

  GCTracer* tracer_;
2242

2243
  int high_survival_rate_period_length_;
2244
  intptr_t promoted_objects_size_;
2245
  double promotion_ratio_;
2246 2247
  double promotion_rate_;
  intptr_t semi_space_copied_object_size_;
2248
  intptr_t previous_semi_space_copied_object_size_;
2249
  double semi_space_copied_rate_;
2250 2251 2252
  int nodes_died_in_new_space_;
  int nodes_copied_in_new_space_;
  int nodes_promoted_;
2253

2254 2255 2256 2257 2258 2259
  // This is the pretenuring trigger for allocation sites that are in maybe
  // tenure state. When we switched to the maximum new space size we deoptimize
  // the code that belongs to the allocation site and derive the lifetime
  // of the allocation site.
  unsigned int maximum_size_scavenges_;

2260
  // Maximum GC pause.
2261
  double max_gc_pause_;
2262

2263
  // Total time spent in GC.
2264
  double total_gc_time_ms_;
2265

2266 2267 2268 2269
  // Maximum size of objects alive after GC.
  intptr_t max_alive_after_gc_;

  // Minimal interval between two subsequent collections.
2270
  double min_in_mutator_;
2271

2272
  // Cumulative GC time spent in marking.
2273 2274
  double marking_time_;

2275
  // Cumulative GC time spent in sweeping.
2276 2277
  double sweeping_time_;

2278
  // Last time an idle notification happened.
2279 2280
  double last_idle_notification_time_;

2281 2282 2283
  // Last time a garbage collection happened.
  double last_gc_time_;

2284 2285
  Scavenger* scavenge_collector_;

2286
  MarkCompactCollector* mark_compact_collector_;
2287

2288 2289
  StoreBuffer store_buffer_;

2290
  IncrementalMarking* incremental_marking_;
2291

2292
  GCIdleTimeHandler* gc_idle_time_handler_;
2293

2294
  MemoryReducer* memory_reducer_;
2295

2296 2297
  ObjectStats* object_stats_;

ulan's avatar
ulan committed
2298 2299
  ScavengeJob* scavenge_job_;

2300 2301 2302 2303
  // These two counters are monotomically increasing and never reset.
  size_t full_codegen_bytes_generated_;
  size_t crankshaft_codegen_bytes_generated_;

2304 2305 2306 2307 2308
  // This counter is increased before each GC and never reset.
  // To account for the bytes allocated since the last GC, use the
  // NewSpaceAllocationCounter() function.
  size_t new_space_allocation_counter_;

2309 2310 2311 2312 2313 2314 2315 2316
  // This counter is increased before each GC and never reset. To
  // account for the bytes allocated since the last GC, use the
  // OldGenerationAllocationCounter() function.
  size_t old_generation_allocation_counter_;

  // The size of objects in old generation after the last MarkCompact GC.
  size_t old_generation_size_at_last_gc_;

2317 2318 2319 2320 2321
  // If the --deopt_every_n_garbage_collections flag is set to a positive value,
  // this variable holds the number of garbage collections since the last
  // deoptimization triggered by garbage collection.
  int gcs_since_last_deopt_;

2322
  int allocation_sites_scratchpad_length_;
2323

2324 2325 2326 2327 2328 2329 2330
  char trace_ring_buffer_[kTraceRingBufferSize];
  // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
  // full then the data is from ring_buffer_end_ to the end of the buffer and
  // from 0 to ring_buffer_end_.
  bool ring_buffer_full_;
  size_t ring_buffer_end_;

2331 2332 2333 2334
  // Shared state read by the scavenge collector and set by ScavengeObject.
  PromotionQueue promotion_queue_;

  // Flag is set when the heap has been configured.  The heap can be repeatedly
2335
  // configured through the API until it is set up.
2336 2337
  bool configured_;

2338
  // Currently set GC flags that are respected by all GC components.
2339
  int current_gc_flags_;
2340

2341 2342 2343 2344
  // Currently set GC callback flags that are used to pass information between
  // the embedder and V8's GC.
  GCCallbackFlags current_gc_callback_flags_;

2345 2346
  ExternalStringTable external_string_table_;

2347
  MemoryChunk* chunks_queued_for_free_;
2348

2349 2350 2351
  size_t concurrent_unmapping_tasks_active_;

  base::Semaphore pending_unmapping_tasks_semaphore_;
2352

2353
  base::Mutex relocation_mutex_;
2354

2355 2356
  int gc_callbacks_depth_;

2357 2358
  bool deserialization_complete_;

2359 2360
  bool concurrent_sweeping_enabled_;

2361 2362
  StrongRootsList* strong_roots_list_;

2363 2364
  ArrayBufferTracker* array_buffer_tracker_;

2365
  // Classes in "heap" can be friends.
2366 2367
  friend class AlwaysAllocateScope;
  friend class GCCallbacksScope;
2368
  friend class GCTracer;
2369
  friend class HeapIterator;
2370
  friend class IncrementalMarking;
2371
  friend class MarkCompactCollector;
2372
  friend class MarkCompactMarkingVisitor;
ulan's avatar
ulan committed
2373
  friend class NewSpace;
2374
  friend class ObjectStatsVisitor;
2375
  friend class Page;
2376
  friend class Scavenger;
2377
  friend class StoreBuffer;
2378

2379 2380 2381 2382 2383 2384
  // The allocator interface.
  friend class Factory;

  // The Isolate constructs us.
  friend class Isolate;

2385 2386 2387
  // Used in cctest.
  friend class HeapTester;

2388
  DISALLOW_COPY_AND_ASSIGN(Heap);
2389 2390 2391
};


ager@chromium.org's avatar
ager@chromium.org committed
2392 2393
class HeapStats {
 public:
2394 2395 2396
  static const int kStartMarker = 0xDECADE00;
  static const int kEndMarker = 0xDECADE01;

2397 2398 2399
  int* start_marker;                       //  0
  int* new_space_size;                     //  1
  int* new_space_capacity;                 //  2
2400 2401 2402 2403 2404 2405
  intptr_t* old_space_size;                //  3
  intptr_t* old_space_capacity;            //  4
  intptr_t* code_space_size;               //  5
  intptr_t* code_space_capacity;           //  6
  intptr_t* map_space_size;                //  7
  intptr_t* map_space_capacity;            //  8
2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416
  intptr_t* lo_space_size;                 //  9
  int* global_handle_count;                // 10
  int* weak_global_handle_count;           // 11
  int* pending_global_handle_count;        // 12
  int* near_death_global_handle_count;     // 13
  int* free_global_handle_count;           // 14
  intptr_t* memory_allocator_size;         // 15
  intptr_t* memory_allocator_capacity;     // 16
  int* objects_per_type;                   // 17
  int* size_per_type;                      // 18
  int* os_error;                           // 19
2417
  char* last_few_messages;                 // 20
2418 2419
  char* js_stacktrace;                     // 21
  int* end_marker;                         // 22
2420 2421 2422
};


2423 2424
class AlwaysAllocateScope {
 public:
2425
  explicit inline AlwaysAllocateScope(Isolate* isolate);
2426
  inline ~AlwaysAllocateScope();
2427 2428

 private:
2429
  Heap* heap_;
2430 2431
};

2432

2433 2434 2435 2436 2437
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
// point into the heap to a location that has a map pointer at its first word.
// Caveat: Heap::Contains is an approximation because it can return true for
// objects in a heap space but above the allocation pointer.
2438
class VerifyPointersVisitor : public ObjectVisitor {
2439
 public:
2440
  inline void VisitPointers(Object** start, Object** end);
2441 2442 2443
};


2444
// Verify that all objects are Smis.
2445
class VerifySmisVisitor : public ObjectVisitor {
2446 2447 2448 2449 2450
 public:
  inline void VisitPointers(Object** start, Object** end);
};


2451 2452
// Space iterator for iterating over all spaces of the heap.  Returns each space
// in turn, and null when it is done.
2453 2454
class AllSpaces BASE_EMBEDDED {
 public:
2455
  explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2456
  Space* next();
2457

2458
 private:
2459
  Heap* heap_;
2460 2461 2462 2463
  int counter_;
};


2464 2465
// Space iterator for iterating over all old spaces of the heap: Old space
// and code space.  Returns each space in turn, and null when it is done.
2466 2467
class OldSpaces BASE_EMBEDDED {
 public:
2468
  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2469
  OldSpace* next();
2470

2471
 private:
2472
  Heap* heap_;
2473 2474 2475 2476
  int counter_;
};


2477
// Space iterator for iterating over all the paged spaces of the heap: Map
2478
// space, old space, code space and cell space.  Returns
2479
// each space in turn, and null when it is done.
2480 2481
class PagedSpaces BASE_EMBEDDED {
 public:
2482
  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2483
  PagedSpace* next();
2484

2485
 private:
2486
  Heap* heap_;
2487 2488 2489 2490
  int counter_;
};


2491 2492 2493 2494 2495
// Space iterator for iterating over all spaces of the heap.
// For each space an object iterator is provided. The deallocation of the
// returned object iterators is handled by the space iterator.
class SpaceIterator : public Malloced {
 public:
2496
  explicit SpaceIterator(Heap* heap);
2497 2498 2499 2500 2501 2502 2503 2504
  virtual ~SpaceIterator();

  bool has_next();
  ObjectIterator* next();

 private:
  ObjectIterator* CreateIterator();

2505
  Heap* heap_;
2506
  int current_space_;         // from enum AllocationSpace.
2507 2508 2509 2510
  ObjectIterator* iterator_;  // object iterator for the current space.
};


2511 2512 2513 2514
// A HeapIterator provides iteration over the whole heap. It
// aggregates the specific iterators for the different spaces as
// these can only iterate over one space only.
//
2515 2516 2517
// HeapIterator ensures there is no allocation during its lifetime
// (using an embedded DisallowHeapAllocation instance).
//
2518 2519 2520 2521 2522
// HeapIterator can skip free list nodes (that is, de-allocated heap
// objects that still remain in the heap). As implementation of free
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
2523 2524
class HeapIterator BASE_EMBEDDED {
 public:
2525
  enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2526

2527 2528
  explicit HeapIterator(Heap* heap,
                        HeapObjectsFiltering filtering = kNoFiltering);
2529
  ~HeapIterator();
2530 2531 2532 2533

  HeapObject* next();

 private:
2534 2535 2536 2537
  struct MakeHeapIterableHelper {
    explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
  };

2538
  HeapObject* NextObject();
2539

2540 2541 2542 2543
  // The following two fields need to be declared in this order. Initialization
  // order guarantees that we first make the heap iterable (which may involve
  // allocations) and only then lock it down by not allowing further
  // allocations.
2544 2545
  MakeHeapIterableHelper make_heap_iterable_helper_;
  DisallowHeapAllocation no_heap_allocation_;
2546

2547
  Heap* heap_;
2548 2549
  HeapObjectsFiltering filtering_;
  HeapObjectsFilter* filter_;
2550 2551 2552 2553 2554 2555 2556
  // Space iterator for iterating all the spaces.
  SpaceIterator* space_iterator_;
  // Object iterator for the space currently being iterated.
  ObjectIterator* object_iterator_;
};


2557 2558 2559 2560 2561
// Cache for mapping (map, property name) into field offset.
// Cleared at startup and prior to mark sweep collection.
class KeyedLookupCache {
 public:
  // Lookup field offset for (map, name). If absent, -1 is returned.
2562
  int Lookup(Handle<Map> map, Handle<Name> name);
2563 2564

  // Update an element in the cache.
2565
  void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2566 2567

  // Clear the cache.
2568
  void Clear();
2569

2570
  static const int kLength = 256;
2571
  static const int kCapacityMask = kLength - 1;
2572
  static const int kMapHashShift = 5;
2573 2574
  static const int kHashMask = -4;  // Zero the last two bits.
  static const int kEntriesPerBucket = 4;
2575 2576 2577
  static const int kEntryLength = 2;
  static const int kMapIndex = 0;
  static const int kKeyIndex = 1;
2578
  static const int kNotFound = -1;
2579

2580 2581 2582 2583
  // kEntriesPerBucket should be a power of 2.
  STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
  STATIC_ASSERT(kEntriesPerBucket == -kHashMask);

2584
 private:
2585 2586 2587 2588 2589 2590 2591 2592
  KeyedLookupCache() {
    for (int i = 0; i < kLength; ++i) {
      keys_[i].map = NULL;
      keys_[i].name = NULL;
      field_offsets_[i] = kNotFound;
    }
  }

2593
  static inline int Hash(Handle<Map> map, Handle<Name> name);
2594 2595 2596

  // Get the address of the keys and field_offsets arrays.  Used in
  // generated code to perform cache lookups.
2597
  Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2598

2599
  Address field_offsets_address() {
2600 2601 2602
    return reinterpret_cast<Address>(&field_offsets_);
  }

2603 2604
  struct Key {
    Map* map;
2605
    Name* name;
2606
  };
2607 2608 2609

  Key keys_[kLength];
  int field_offsets_[kLength];
2610

2611
  friend class ExternalReference;
2612 2613
  friend class Isolate;
  DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2614
};
2615

2616

2617
// Cache for mapping (map, property name) into descriptor index.
2618 2619 2620 2621 2622 2623 2624
// The cache contains both positive and negative results.
// Descriptor index equals kNotFound means the property is absent.
// Cleared at startup and prior to any gc.
class DescriptorLookupCache {
 public:
  // Lookup descriptor index for (map, name).
  // If absent, kAbsent is returned.
2625
  inline int Lookup(Map* source, Name* name);
2626 2627

  // Update an element in the cache.
2628
  inline void Update(Map* source, Name* name, int result);
2629 2630

  // Clear the cache.
2631
  void Clear();
2632 2633

  static const int kAbsent = -2;
2634

2635
 private:
2636 2637
  DescriptorLookupCache() {
    for (int i = 0; i < kLength; ++i) {
2638
      keys_[i].source = NULL;
2639 2640 2641 2642 2643
      keys_[i].name = NULL;
      results_[i] = kAbsent;
    }
  }

2644
  static int Hash(Object* source, Name* name) {
2645
    // Uses only lower 32 bits if pointers are larger.
2646
    uint32_t source_hash =
2647 2648
        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
        kPointerSizeLog2;
2649
    uint32_t name_hash =
2650 2651
        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
        kPointerSizeLog2;
2652
    return (source_hash ^ name_hash) % kLength;
2653 2654 2655 2656
  }

  static const int kLength = 64;
  struct Key {
2657
    Map* source;
2658
    Name* name;
2659 2660
  };

2661 2662
  Key keys_[kLength];
  int results_[kLength];
2663

2664 2665
  friend class Isolate;
  DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2666 2667 2668
};


2669 2670 2671 2672 2673 2674 2675 2676 2677 2678 2679 2680
// Abstract base class for checking whether a weak object should be retained.
class WeakObjectRetainer {
 public:
  virtual ~WeakObjectRetainer() {}

  // Return whether this object should be retained. If NULL is returned the
  // object has no references. Otherwise the address of the retained object
  // should be returned as in some GC situations the object has been moved.
  virtual Object* RetainAs(Object* object) = 0;
};


2681
#ifdef DEBUG
2682 2683 2684 2685 2686 2687 2688 2689 2690 2691
// Helper class for tracing paths to a search target Object from all roots.
// The TracePathFrom() method can be used to trace paths from a specific
// object to the search target object.
class PathTracer : public ObjectVisitor {
 public:
  enum WhatToFind {
    FIND_ALL,   // Will find all matches.
    FIND_FIRST  // Will stop the search after first match.
  };

ishell@chromium.org's avatar
ishell@chromium.org committed
2692 2693 2694
  // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
  static const int kMarkTag = 2;

2695 2696 2697
  // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
  // after the first match.  If FIND_ALL is specified, then tracing will be
  // done for all matches.
2698
  PathTracer(Object* search_target, WhatToFind what_to_find,
2699 2700 2701 2702 2703 2704 2705
             VisitMode visit_mode)
      : search_target_(search_target),
        found_target_(false),
        found_target_in_trace_(false),
        what_to_find_(what_to_find),
        visit_mode_(visit_mode),
        object_stack_(20),
2706
        no_allocation() {}
2707 2708 2709 2710 2711 2712 2713 2714 2715 2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727 2728 2729 2730 2731

  virtual void VisitPointers(Object** start, Object** end);

  void Reset();
  void TracePathFrom(Object** root);

  bool found() const { return found_target_; }

  static Object* const kAnyGlobalObject;

 protected:
  class MarkVisitor;
  class UnmarkVisitor;

  void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
  void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
  virtual void ProcessResults();

  Object* search_target_;
  bool found_target_;
  bool found_target_in_trace_;
  WhatToFind what_to_find_;
  VisitMode visit_mode_;
  List<Object*> object_stack_;

2732
  DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
2733

2734
 private:
2735 2736
  DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
};
2737
#endif  // DEBUG
2738 2739
}  // namespace internal
}  // namespace v8
2740

2741
#endif  // V8_HEAP_HEAP_H_