|
|
@ -43,27 +43,17 @@ |
|
|
|
namespace v8 { |
|
|
|
namespace internal { |
|
|
|
|
|
|
|
#define ROOT_ALLOCATION(type, name) type* Heap::name##_; |
|
|
|
ROOT_LIST(ROOT_ALLOCATION) |
|
|
|
#undef ROOT_ALLOCATION |
|
|
|
|
|
|
|
|
|
|
|
#define STRUCT_ALLOCATION(NAME, Name, name) Map* Heap::name##_map_; |
|
|
|
STRUCT_LIST(STRUCT_ALLOCATION) |
|
|
|
#undef STRUCT_ALLOCATION |
|
|
|
|
|
|
|
|
|
|
|
#define SYMBOL_ALLOCATION(name, string) String* Heap::name##_; |
|
|
|
SYMBOL_LIST(SYMBOL_ALLOCATION) |
|
|
|
#undef SYMBOL_ALLOCATION |
|
|
|
|
|
|
|
String* Heap::hidden_symbol_; |
|
|
|
Object* Heap::roots_[Heap::kRootListLength]; |
|
|
|
|
|
|
|
|
|
|
|
NewSpace Heap::new_space_; |
|
|
|
OldSpace* Heap::old_pointer_space_ = NULL; |
|
|
|
OldSpace* Heap::old_data_space_ = NULL; |
|
|
|
OldSpace* Heap::code_space_ = NULL; |
|
|
|
MapSpace* Heap::map_space_ = NULL; |
|
|
|
CellSpace* Heap::cell_space_ = NULL; |
|
|
|
LargeObjectSpace* Heap::lo_space_ = NULL; |
|
|
|
|
|
|
|
static const int kMinimumPromotionLimit = 2*MB; |
|
|
@ -79,7 +69,7 @@ int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0; |
|
|
|
|
|
|
|
// semispace_size_ should be a power of 2 and old_generation_size_ should be
|
|
|
|
// a multiple of Page::kPageSize.
|
|
|
|
#if V8_HOST_ARCH_ARM |
|
|
|
#if V8_TARGET_ARCH_ARM |
|
|
|
int Heap::semispace_size_ = 512*KB; |
|
|
|
int Heap::old_generation_size_ = 128*MB; |
|
|
|
int Heap::initial_semispace_size_ = 128*KB; |
|
|
@ -121,7 +111,8 @@ int Heap::Capacity() { |
|
|
|
old_pointer_space_->Capacity() + |
|
|
|
old_data_space_->Capacity() + |
|
|
|
code_space_->Capacity() + |
|
|
|
map_space_->Capacity(); |
|
|
|
map_space_->Capacity() + |
|
|
|
cell_space_->Capacity(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -132,7 +123,8 @@ int Heap::Available() { |
|
|
|
old_pointer_space_->Available() + |
|
|
|
old_data_space_->Available() + |
|
|
|
code_space_->Available() + |
|
|
|
map_space_->Available(); |
|
|
|
map_space_->Available() + |
|
|
|
cell_space_->Available(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -141,6 +133,7 @@ bool Heap::HasBeenSetup() { |
|
|
|
old_data_space_ != NULL && |
|
|
|
code_space_ != NULL && |
|
|
|
map_space_ != NULL && |
|
|
|
cell_space_ != NULL && |
|
|
|
lo_space_ != NULL; |
|
|
|
} |
|
|
|
|
|
|
@ -221,6 +214,7 @@ void Heap::ReportStatisticsAfterGC() { |
|
|
|
// NewSpace statistics are logged exactly once when --log-gc is turned on.
|
|
|
|
#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) |
|
|
|
if (FLAG_heap_stats) { |
|
|
|
new_space_.CollectStatistics(); |
|
|
|
ReportHeapStatistics("After GC"); |
|
|
|
} else if (FLAG_log_gc) { |
|
|
|
new_space_.ReportStatistics(); |
|
|
@ -283,9 +277,8 @@ void Heap::GarbageCollectionEpilogue() { |
|
|
|
|
|
|
|
Counters::alive_after_last_gc.Set(SizeOfObjects()); |
|
|
|
|
|
|
|
SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table_); |
|
|
|
Counters::symbol_table_capacity.Set(symbol_table->Capacity()); |
|
|
|
Counters::number_of_symbols.Set(symbol_table->NumberOfElements()); |
|
|
|
Counters::symbol_table_capacity.Set(symbol_table()->Capacity()); |
|
|
|
Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); |
|
|
|
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) |
|
|
|
ReportStatisticsAfterGC(); |
|
|
|
#endif |
|
|
@ -370,6 +363,8 @@ bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { |
|
|
|
return code_space_->Available() >= requested_size; |
|
|
|
case MAP_SPACE: |
|
|
|
return map_space_->Available() >= requested_size; |
|
|
|
case CELL_SPACE: |
|
|
|
return cell_space_->Available() >= requested_size; |
|
|
|
case LO_SPACE: |
|
|
|
return lo_space_->Available() >= requested_size; |
|
|
|
} |
|
|
@ -404,8 +399,7 @@ class SymbolTableVerifier : public ObjectVisitor { |
|
|
|
static void VerifySymbolTable() { |
|
|
|
#ifdef DEBUG |
|
|
|
SymbolTableVerifier verifier; |
|
|
|
SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table()); |
|
|
|
symbol_table->IterateElements(&verifier); |
|
|
|
Heap::symbol_table()->IterateElements(&verifier); |
|
|
|
#endif // DEBUG
|
|
|
|
} |
|
|
|
|
|
|
@ -428,22 +422,8 @@ void Heap::PerformGarbageCollection(AllocationSpace space, |
|
|
|
old_gen_allocation_limit_ = |
|
|
|
old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); |
|
|
|
old_gen_exhausted_ = false; |
|
|
|
|
|
|
|
// If we have used the mark-compact collector to collect the new
|
|
|
|
// space, and it has not compacted the new space, we force a
|
|
|
|
// separate scavenge collection. This is a hack. It covers the
|
|
|
|
// case where (1) a new space collection was requested, (2) the
|
|
|
|
// collector selection policy selected the mark-compact collector,
|
|
|
|
// and (3) the mark-compact collector policy selected not to
|
|
|
|
// compact the new space. In that case, there is no more (usable)
|
|
|
|
// free space in the new space after the collection compared to
|
|
|
|
// before.
|
|
|
|
if (space == NEW_SPACE && !MarkCompactCollector::HasCompacted()) { |
|
|
|
Scavenge(); |
|
|
|
} |
|
|
|
} else { |
|
|
|
Scavenge(); |
|
|
|
} |
|
|
|
Counters::objs_since_last_young.Set(0); |
|
|
|
|
|
|
|
PostGarbageCollectionProcessing(); |
|
|
@ -621,6 +601,7 @@ static void VerifyNonPointerSpacePointers() { |
|
|
|
} |
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
|
|
void Heap::Scavenge() { |
|
|
|
#ifdef DEBUG |
|
|
|
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); |
|
|
@ -679,7 +660,7 @@ void Heap::Scavenge() { |
|
|
|
// Copy objects reachable from weak pointers.
|
|
|
|
GlobalHandles::IterateWeakRoots(&scavenge_visitor); |
|
|
|
|
|
|
|
#if V8_HOST_ARCH_64_BIT |
|
|
|
#ifdef V8_HOST_ARCH_64_BIT |
|
|
|
// TODO(X64): Make this go away again. We currently disable RSets for
|
|
|
|
// 64-bit-mode.
|
|
|
|
HeapObjectIterator old_pointer_iterator(old_pointer_space_); |
|
|
@ -699,13 +680,25 @@ void Heap::Scavenge() { |
|
|
|
heap_object->Iterate(&scavenge_visitor); |
|
|
|
} |
|
|
|
} |
|
|
|
#else // V8_HOST_ARCH_64_BIT
|
|
|
|
#else // !defined(V8_HOST_ARCH_64_BIT)
|
|
|
|
// Copy objects reachable from the old generation. By definition,
|
|
|
|
// there are no intergenerational pointers in code or data spaces.
|
|
|
|
IterateRSet(old_pointer_space_, &ScavengePointer); |
|
|
|
IterateRSet(map_space_, &ScavengePointer); |
|
|
|
lo_space_->IterateRSet(&ScavengePointer); |
|
|
|
#endif // V8_HOST_ARCH_64_BIT
|
|
|
|
#endif |
|
|
|
|
|
|
|
// Copy objects reachable from cells by scavenging cell values directly.
|
|
|
|
HeapObjectIterator cell_iterator(cell_space_); |
|
|
|
while (cell_iterator.has_next()) { |
|
|
|
HeapObject* cell = cell_iterator.next(); |
|
|
|
if (cell->IsJSGlobalPropertyCell()) { |
|
|
|
Address value_address = |
|
|
|
reinterpret_cast<Address>(cell) + |
|
|
|
(JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
|
|
|
scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
do { |
|
|
|
ASSERT(new_space_front <= new_space_.top()); |
|
|
@ -845,8 +838,8 @@ int Heap::UpdateRSet(HeapObject* obj) { |
|
|
|
|
|
|
|
|
|
|
|
void Heap::RebuildRSets() { |
|
|
|
// By definition, we do not care about remembered set bits in code or data
|
|
|
|
// spaces.
|
|
|
|
// By definition, we do not care about remembered set bits in code,
|
|
|
|
// data, or cell spaces.
|
|
|
|
map_space_->ClearRSet(); |
|
|
|
RebuildRSets(map_space_); |
|
|
|
|
|
|
@ -1021,11 +1014,11 @@ void Heap::ScavengePointer(HeapObject** p) { |
|
|
|
|
|
|
|
Object* Heap::AllocatePartialMap(InstanceType instance_type, |
|
|
|
int instance_size) { |
|
|
|
Object* result = AllocateRawMap(Map::kSize); |
|
|
|
Object* result = AllocateRawMap(); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
|
|
|
|
// Map::cast cannot be used due to uninitialized map field.
|
|
|
|
reinterpret_cast<Map*>(result)->set_map(meta_map()); |
|
|
|
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); |
|
|
|
reinterpret_cast<Map*>(result)->set_instance_type(instance_type); |
|
|
|
reinterpret_cast<Map*>(result)->set_instance_size(instance_size); |
|
|
|
reinterpret_cast<Map*>(result)->set_inobject_properties(0); |
|
|
@ -1035,7 +1028,7 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type, |
|
|
|
|
|
|
|
|
|
|
|
Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { |
|
|
|
Object* result = AllocateRawMap(Map::kSize); |
|
|
|
Object* result = AllocateRawMap(); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
|
|
|
|
Map* map = reinterpret_cast<Map*>(result); |
|
|
@ -1054,36 +1047,59 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
const Heap::StringTypeTable Heap::string_type_table[] = { |
|
|
|
#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ |
|
|
|
{type, size, k##camel_name##MapRootIndex}, |
|
|
|
STRING_TYPE_LIST(STRING_TYPE_ELEMENT) |
|
|
|
#undef STRING_TYPE_ELEMENT |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = { |
|
|
|
#define CONSTANT_SYMBOL_ELEMENT(name, contents) \ |
|
|
|
{contents, k##name##RootIndex}, |
|
|
|
SYMBOL_LIST(CONSTANT_SYMBOL_ELEMENT) |
|
|
|
#undef CONSTANT_SYMBOL_ELEMENT |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
const Heap::StructTable Heap::struct_table[] = { |
|
|
|
#define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ |
|
|
|
{ NAME##_TYPE, Name::kSize, k##Name##MapRootIndex }, |
|
|
|
STRUCT_LIST(STRUCT_TABLE_ELEMENT) |
|
|
|
#undef STRUCT_TABLE_ELEMENT |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
bool Heap::CreateInitialMaps() { |
|
|
|
Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
|
|
|
|
// Map::cast cannot be used due to uninitialized map field.
|
|
|
|
meta_map_ = reinterpret_cast<Map*>(obj); |
|
|
|
meta_map()->set_map(meta_map()); |
|
|
|
Map* new_meta_map = reinterpret_cast<Map*>(obj); |
|
|
|
set_meta_map(new_meta_map); |
|
|
|
new_meta_map->set_map(new_meta_map); |
|
|
|
|
|
|
|
obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
fixed_array_map_ = Map::cast(obj); |
|
|
|
set_fixed_array_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
oddball_map_ = Map::cast(obj); |
|
|
|
set_oddball_map(Map::cast(obj)); |
|
|
|
|
|
|
|
// Allocate the empty array
|
|
|
|
obj = AllocateEmptyFixedArray(); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
empty_fixed_array_ = FixedArray::cast(obj); |
|
|
|
set_empty_fixed_array(FixedArray::cast(obj)); |
|
|
|
|
|
|
|
obj = Allocate(oddball_map(), OLD_DATA_SPACE); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
null_value_ = obj; |
|
|
|
set_null_value(obj); |
|
|
|
|
|
|
|
// Allocate the empty descriptor array. AllocateMap can now be used.
|
|
|
|
// Allocate the empty descriptor array.
|
|
|
|
obj = AllocateEmptyFixedArray(); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
// There is a check against empty_descriptor_array() in cast().
|
|
|
|
empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj); |
|
|
|
set_empty_descriptor_array(DescriptorArray::cast(obj)); |
|
|
|
|
|
|
|
// Fix the instance_descriptors for the existing maps.
|
|
|
|
meta_map()->set_instance_descriptors(empty_descriptor_array()); |
|
|
@ -1101,100 +1117,106 @@ bool Heap::CreateInitialMaps() { |
|
|
|
|
|
|
|
fixed_array_map()->set_prototype(null_value()); |
|
|
|
fixed_array_map()->set_constructor(null_value()); |
|
|
|
|
|
|
|
oddball_map()->set_prototype(null_value()); |
|
|
|
oddball_map()->set_constructor(null_value()); |
|
|
|
|
|
|
|
obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
heap_number_map_ = Map::cast(obj); |
|
|
|
set_heap_number_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(PROXY_TYPE, Proxy::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
proxy_map_ = Map::cast(obj); |
|
|
|
set_proxy_map(Map::cast(obj)); |
|
|
|
|
|
|
|
#define ALLOCATE_STRING_MAP(type, size, name) \ |
|
|
|
obj = AllocateMap(type, size); \ |
|
|
|
if (obj->IsFailure()) return false; \ |
|
|
|
name##_map_ = Map::cast(obj); |
|
|
|
STRING_TYPE_LIST(ALLOCATE_STRING_MAP); |
|
|
|
#undef ALLOCATE_STRING_MAP |
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { |
|
|
|
const StringTypeTable& entry = string_type_table[i]; |
|
|
|
obj = AllocateMap(entry.type, entry.size); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
roots_[entry.index] = Map::cast(obj); |
|
|
|
} |
|
|
|
|
|
|
|
obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undetectable_short_string_map_ = Map::cast(obj); |
|
|
|
undetectable_short_string_map_->set_is_undetectable(); |
|
|
|
set_undetectable_short_string_map(Map::cast(obj)); |
|
|
|
Map::cast(obj)->set_is_undetectable(); |
|
|
|
|
|
|
|
obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undetectable_medium_string_map_ = Map::cast(obj); |
|
|
|
undetectable_medium_string_map_->set_is_undetectable(); |
|
|
|
set_undetectable_medium_string_map(Map::cast(obj)); |
|
|
|
Map::cast(obj)->set_is_undetectable(); |
|
|
|
|
|
|
|
obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undetectable_long_string_map_ = Map::cast(obj); |
|
|
|
undetectable_long_string_map_->set_is_undetectable(); |
|
|
|
set_undetectable_long_string_map(Map::cast(obj)); |
|
|
|
Map::cast(obj)->set_is_undetectable(); |
|
|
|
|
|
|
|
obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undetectable_short_ascii_string_map_ = Map::cast(obj); |
|
|
|
undetectable_short_ascii_string_map_->set_is_undetectable(); |
|
|
|
set_undetectable_short_ascii_string_map(Map::cast(obj)); |
|
|
|
Map::cast(obj)->set_is_undetectable(); |
|
|
|
|
|
|
|
obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undetectable_medium_ascii_string_map_ = Map::cast(obj); |
|
|
|
undetectable_medium_ascii_string_map_->set_is_undetectable(); |
|
|
|
set_undetectable_medium_ascii_string_map(Map::cast(obj)); |
|
|
|
Map::cast(obj)->set_is_undetectable(); |
|
|
|
|
|
|
|
obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undetectable_long_ascii_string_map_ = Map::cast(obj); |
|
|
|
undetectable_long_ascii_string_map_->set_is_undetectable(); |
|
|
|
set_undetectable_long_ascii_string_map(Map::cast(obj)); |
|
|
|
Map::cast(obj)->set_is_undetectable(); |
|
|
|
|
|
|
|
obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
byte_array_map_ = Map::cast(obj); |
|
|
|
set_byte_array_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(CODE_TYPE, Code::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
code_map_ = Map::cast(obj); |
|
|
|
set_code_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE, |
|
|
|
JSGlobalPropertyCell::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
set_global_property_cell_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(FILLER_TYPE, kPointerSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
one_word_filler_map_ = Map::cast(obj); |
|
|
|
set_one_pointer_filler_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
two_word_filler_map_ = Map::cast(obj); |
|
|
|
set_two_pointer_filler_map(Map::cast(obj)); |
|
|
|
|
|
|
|
#define ALLOCATE_STRUCT_MAP(NAME, Name, name) \ |
|
|
|
obj = AllocateMap(NAME##_TYPE, Name::kSize); \ |
|
|
|
if (obj->IsFailure()) return false; \ |
|
|
|
name##_map_ = Map::cast(obj); |
|
|
|
STRUCT_LIST(ALLOCATE_STRUCT_MAP) |
|
|
|
#undef ALLOCATE_STRUCT_MAP |
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { |
|
|
|
const StructTable& entry = struct_table[i]; |
|
|
|
obj = AllocateMap(entry.type, entry.size); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
roots_[entry.index] = Map::cast(obj); |
|
|
|
} |
|
|
|
|
|
|
|
obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
hash_table_map_ = Map::cast(obj); |
|
|
|
set_hash_table_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
context_map_ = Map::cast(obj); |
|
|
|
set_context_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
catch_context_map_ = Map::cast(obj); |
|
|
|
set_catch_context_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
global_context_map_ = Map::cast(obj); |
|
|
|
set_global_context_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
boilerplate_function_map_ = Map::cast(obj); |
|
|
|
set_boilerplate_function_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
shared_function_info_map_ = Map::cast(obj); |
|
|
|
set_shared_function_info_map(Map::cast(obj)); |
|
|
|
|
|
|
|
ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); |
|
|
|
return true; |
|
|
@ -1230,6 +1252,15 @@ Object* Heap::AllocateHeapNumber(double value) { |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
Object* Heap::AllocateJSGlobalPropertyCell(Object* value) { |
|
|
|
Object* result = AllocateRawCell(); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
HeapObject::cast(result)->set_map(global_property_cell_map()); |
|
|
|
JSGlobalPropertyCell::cast(result)->set_value(value); |
|
|
|
return result; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
Object* Heap::CreateOddball(Map* map, |
|
|
|
const char* to_string, |
|
|
|
Object* to_number) { |
|
|
@ -1244,41 +1275,62 @@ bool Heap::CreateApiObjects() { |
|
|
|
|
|
|
|
obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
neander_map_ = Map::cast(obj); |
|
|
|
set_neander_map(Map::cast(obj)); |
|
|
|
|
|
|
|
obj = Heap::AllocateJSObjectFromMap(neander_map_); |
|
|
|
obj = Heap::AllocateJSObjectFromMap(neander_map()); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
Object* elements = AllocateFixedArray(2); |
|
|
|
if (elements->IsFailure()) return false; |
|
|
|
FixedArray::cast(elements)->set(0, Smi::FromInt(0)); |
|
|
|
JSObject::cast(obj)->set_elements(FixedArray::cast(elements)); |
|
|
|
message_listeners_ = JSObject::cast(obj); |
|
|
|
set_message_listeners(JSObject::cast(obj)); |
|
|
|
|
|
|
|
return true; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::CreateCEntryStub() { |
|
|
|
CEntryStub stub; |
|
|
|
set_c_entry_code(*stub.GetCode()); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::CreateCEntryDebugBreakStub() { |
|
|
|
CEntryDebugBreakStub stub; |
|
|
|
set_c_entry_debug_break_code(*stub.GetCode()); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::CreateJSEntryStub() { |
|
|
|
JSEntryStub stub; |
|
|
|
set_js_entry_code(*stub.GetCode()); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::CreateJSConstructEntryStub() { |
|
|
|
JSConstructEntryStub stub; |
|
|
|
set_js_construct_entry_code(*stub.GetCode()); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::CreateFixedStubs() { |
|
|
|
// Here we create roots for fixed stubs. They are needed at GC
|
|
|
|
// for cooking and uncooking (check out frames.cc).
|
|
|
|
// The eliminates the need for doing dictionary lookup in the
|
|
|
|
// stub cache for these stubs.
|
|
|
|
HandleScope scope; |
|
|
|
{ |
|
|
|
CEntryStub stub; |
|
|
|
c_entry_code_ = *stub.GetCode(); |
|
|
|
} |
|
|
|
{ |
|
|
|
CEntryDebugBreakStub stub; |
|
|
|
c_entry_debug_break_code_ = *stub.GetCode(); |
|
|
|
} |
|
|
|
{ |
|
|
|
JSEntryStub stub; |
|
|
|
js_entry_code_ = *stub.GetCode(); |
|
|
|
} |
|
|
|
{ |
|
|
|
JSConstructEntryStub stub; |
|
|
|
js_construct_entry_code_ = *stub.GetCode(); |
|
|
|
} |
|
|
|
// gcc-4.4 has problem generating correct code of following snippet:
|
|
|
|
// { CEntryStub stub;
|
|
|
|
// c_entry_code_ = *stub.GetCode();
|
|
|
|
// }
|
|
|
|
// { CEntryDebugBreakStub stub;
|
|
|
|
// c_entry_debug_break_code_ = *stub.GetCode();
|
|
|
|
// }
|
|
|
|
// To workaround the problem, make separate functions without inlining.
|
|
|
|
Heap::CreateCEntryStub(); |
|
|
|
Heap::CreateCEntryDebugBreakStub(); |
|
|
|
Heap::CreateJSEntryStub(); |
|
|
|
Heap::CreateJSConstructEntryStub(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -1288,34 +1340,35 @@ bool Heap::CreateInitialObjects() { |
|
|
|
// The -0 value must be set before NumberFromDouble works.
|
|
|
|
obj = AllocateHeapNumber(-0.0, TENURED); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
minus_zero_value_ = obj; |
|
|
|
ASSERT(signbit(minus_zero_value_->Number()) != 0); |
|
|
|
set_minus_zero_value(obj); |
|
|
|
ASSERT(signbit(minus_zero_value()->Number()) != 0); |
|
|
|
|
|
|
|
obj = AllocateHeapNumber(OS::nan_value(), TENURED); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
nan_value_ = obj; |
|
|
|
set_nan_value(obj); |
|
|
|
|
|
|
|
obj = Allocate(oddball_map(), OLD_DATA_SPACE); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
undefined_value_ = obj; |
|
|
|
set_undefined_value(obj); |
|
|
|
ASSERT(!InNewSpace(undefined_value())); |
|
|
|
|
|
|
|
// Allocate initial symbol table.
|
|
|
|
obj = SymbolTable::Allocate(kInitialSymbolTableSize); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
symbol_table_ = obj; |
|
|
|
// Don't use set_symbol_table() due to asserts.
|
|
|
|
roots_[kSymbolTableRootIndex] = obj; |
|
|
|
|
|
|
|
// Assign the print strings for oddballs after creating symboltable.
|
|
|
|
Object* symbol = LookupAsciiSymbol("undefined"); |
|
|
|
if (symbol->IsFailure()) return false; |
|
|
|
Oddball::cast(undefined_value_)->set_to_string(String::cast(symbol)); |
|
|
|
Oddball::cast(undefined_value_)->set_to_number(nan_value_); |
|
|
|
Oddball::cast(undefined_value())->set_to_string(String::cast(symbol)); |
|
|
|
Oddball::cast(undefined_value())->set_to_number(nan_value()); |
|
|
|
|
|
|
|
// Assign the print strings for oddballs after creating symboltable.
|
|
|
|
symbol = LookupAsciiSymbol("null"); |
|
|
|
if (symbol->IsFailure()) return false; |
|
|
|
Oddball::cast(null_value_)->set_to_string(String::cast(symbol)); |
|
|
|
Oddball::cast(null_value_)->set_to_number(Smi::FromInt(0)); |
|
|
|
Oddball::cast(null_value())->set_to_string(String::cast(symbol)); |
|
|
|
Oddball::cast(null_value())->set_to_number(Smi::FromInt(0)); |
|
|
|
|
|
|
|
// Allocate the null_value
|
|
|
|
obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0)); |
|
|
@ -1323,32 +1376,31 @@ bool Heap::CreateInitialObjects() { |
|
|
|
|
|
|
|
obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1)); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
true_value_ = obj; |
|
|
|
set_true_value(obj); |
|
|
|
|
|
|
|
obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0)); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
false_value_ = obj; |
|
|
|
set_false_value(obj); |
|
|
|
|
|
|
|
obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1)); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
the_hole_value_ = obj; |
|
|
|
set_the_hole_value(obj); |
|
|
|
|
|
|
|
// Allocate the empty string.
|
|
|
|
obj = AllocateRawAsciiString(0, TENURED); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
empty_string_ = String::cast(obj); |
|
|
|
set_empty_string(String::cast(obj)); |
|
|
|
|
|
|
|
#define SYMBOL_INITIALIZE(name, string) \ |
|
|
|
obj = LookupAsciiSymbol(string); \ |
|
|
|
if (obj->IsFailure()) return false; \ |
|
|
|
(name##_) = String::cast(obj); |
|
|
|
SYMBOL_LIST(SYMBOL_INITIALIZE) |
|
|
|
#undef SYMBOL_INITIALIZE |
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) { |
|
|
|
obj = LookupAsciiSymbol(constant_symbol_table[i].contents); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
roots_[constant_symbol_table[i].index] = String::cast(obj); |
|
|
|
} |
|
|
|
|
|
|
|
// Allocate the hidden symbol which is used to identify the hidden properties
|
|
|
|
// in JSObjects. The hash code has a special value so that it will not match
|
|
|
|
// the empty string when searching for the property. It cannot be part of the
|
|
|
|
// SYMBOL_LIST because it needs to be allocated manually with the special
|
|
|
|
// loop above because it needs to be allocated manually with the special
|
|
|
|
// hash code in place. The hash code for the hidden_symbol is zero to ensure
|
|
|
|
// that it will always be at the first entry in property descriptors.
|
|
|
|
obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask); |
|
|
@ -1358,37 +1410,37 @@ bool Heap::CreateInitialObjects() { |
|
|
|
// Allocate the proxy for __proto__.
|
|
|
|
obj = AllocateProxy((Address) &Accessors::ObjectPrototype); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
prototype_accessors_ = Proxy::cast(obj); |
|
|
|
set_prototype_accessors(Proxy::cast(obj)); |
|
|
|
|
|
|
|
// Allocate the code_stubs dictionary.
|
|
|
|
obj = Dictionary::Allocate(4); |
|
|
|
obj = NumberDictionary::Allocate(4); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
code_stubs_ = Dictionary::cast(obj); |
|
|
|
set_code_stubs(NumberDictionary::cast(obj)); |
|
|
|
|
|
|
|
// Allocate the non_monomorphic_cache used in stub-cache.cc
|
|
|
|
obj = Dictionary::Allocate(4); |
|
|
|
obj = NumberDictionary::Allocate(4); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
non_monomorphic_cache_ = Dictionary::cast(obj); |
|
|
|
set_non_monomorphic_cache(NumberDictionary::cast(obj)); |
|
|
|
|
|
|
|
CreateFixedStubs(); |
|
|
|
|
|
|
|
// Allocate the number->string conversion cache
|
|
|
|
obj = AllocateFixedArray(kNumberStringCacheSize * 2); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
number_string_cache_ = FixedArray::cast(obj); |
|
|
|
set_number_string_cache(FixedArray::cast(obj)); |
|
|
|
|
|
|
|
// Allocate cache for single character strings.
|
|
|
|
obj = AllocateFixedArray(String::kMaxAsciiCharCode+1); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
single_character_string_cache_ = FixedArray::cast(obj); |
|
|
|
set_single_character_string_cache(FixedArray::cast(obj)); |
|
|
|
|
|
|
|
// Allocate cache for external strings pointing to native source code.
|
|
|
|
obj = AllocateFixedArray(Natives::GetBuiltinsCount()); |
|
|
|
if (obj->IsFailure()) return false; |
|
|
|
natives_source_cache_ = FixedArray::cast(obj); |
|
|
|
set_natives_source_cache(FixedArray::cast(obj)); |
|
|
|
|
|
|
|
// Handling of script id generation is in Factory::NewScript.
|
|
|
|
last_script_id_ = undefined_value(); |
|
|
|
set_last_script_id(undefined_value()); |
|
|
|
|
|
|
|
// Initialize keyed lookup cache.
|
|
|
|
KeyedLookupCache::Clear(); |
|
|
@ -1426,13 +1478,13 @@ Object* Heap::GetNumberStringCache(Object* number) { |
|
|
|
} else { |
|
|
|
hash = double_get_hash(number->Number()); |
|
|
|
} |
|
|
|
Object* key = number_string_cache_->get(hash * 2); |
|
|
|
Object* key = number_string_cache()->get(hash * 2); |
|
|
|
if (key == number) { |
|
|
|
return String::cast(number_string_cache_->get(hash * 2 + 1)); |
|
|
|
return String::cast(number_string_cache()->get(hash * 2 + 1)); |
|
|
|
} else if (key->IsHeapNumber() && |
|
|
|
number->IsHeapNumber() && |
|
|
|
key->Number() == number->Number()) { |
|
|
|
return String::cast(number_string_cache_->get(hash * 2 + 1)); |
|
|
|
return String::cast(number_string_cache()->get(hash * 2 + 1)); |
|
|
|
} |
|
|
|
return undefined_value(); |
|
|
|
} |
|
|
@ -1442,12 +1494,12 @@ void Heap::SetNumberStringCache(Object* number, String* string) { |
|
|
|
int hash; |
|
|
|
if (number->IsSmi()) { |
|
|
|
hash = smi_get_hash(Smi::cast(number)); |
|
|
|
number_string_cache_->set(hash * 2, number, SKIP_WRITE_BARRIER); |
|
|
|
number_string_cache()->set(hash * 2, number, SKIP_WRITE_BARRIER); |
|
|
|
} else { |
|
|
|
hash = double_get_hash(number->Number()); |
|
|
|
number_string_cache_->set(hash * 2, number); |
|
|
|
number_string_cache()->set(hash * 2, number); |
|
|
|
} |
|
|
|
number_string_cache_->set(hash * 2 + 1, string); |
|
|
|
number_string_cache()->set(hash * 2 + 1, string); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -1460,19 +1512,19 @@ Object* Heap::SmiOrNumberFromDouble(double value, |
|
|
|
static const DoubleRepresentation plus_zero(0.0); |
|
|
|
static const DoubleRepresentation minus_zero(-0.0); |
|
|
|
static const DoubleRepresentation nan(OS::nan_value()); |
|
|
|
ASSERT(minus_zero_value_ != NULL); |
|
|
|
ASSERT(minus_zero_value() != NULL); |
|
|
|
ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits)); |
|
|
|
|
|
|
|
DoubleRepresentation rep(value); |
|
|
|
if (rep.bits == plus_zero.bits) return Smi::FromInt(0); // not uncommon
|
|
|
|
if (rep.bits == minus_zero.bits) { |
|
|
|
return new_object ? AllocateHeapNumber(-0.0, pretenure) |
|
|
|
: minus_zero_value_; |
|
|
|
: minus_zero_value(); |
|
|
|
} |
|
|
|
if (rep.bits == nan.bits) { |
|
|
|
return new_object |
|
|
|
? AllocateHeapNumber(OS::nan_value(), pretenure) |
|
|
|
: nan_value_; |
|
|
|
: nan_value(); |
|
|
|
} |
|
|
|
|
|
|
|
// Try to represent the value as a tagged small integer.
|
|
|
@ -1514,7 +1566,7 @@ Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) { |
|
|
|
|
|
|
|
|
|
|
|
Object* Heap::AllocateSharedFunctionInfo(Object* name) { |
|
|
|
Object* result = Allocate(shared_function_info_map(), NEW_SPACE); |
|
|
|
Object* result = Allocate(shared_function_info_map(), OLD_POINTER_SPACE); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
|
|
|
|
SharedFunctionInfo* share = SharedFunctionInfo::cast(result); |
|
|
@ -1778,7 +1830,7 @@ void Heap::CreateFillerObjectAt(Address addr, int size) { |
|
|
|
if (size == 0) return; |
|
|
|
HeapObject* filler = HeapObject::FromAddress(addr); |
|
|
|
if (size == kPointerSize) { |
|
|
|
filler->set_map(Heap::one_word_filler_map()); |
|
|
|
filler->set_map(Heap::one_pointer_filler_map()); |
|
|
|
} else { |
|
|
|
filler->set_map(Heap::byte_array_map()); |
|
|
|
ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); |
|
|
@ -2006,7 +2058,7 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) { |
|
|
|
|
|
|
|
// Allocate the backing storage for the properties.
|
|
|
|
int prop_size = map->unused_property_fields() - map->inobject_properties(); |
|
|
|
Object* properties = AllocateFixedArray(prop_size); |
|
|
|
Object* properties = AllocateFixedArray(prop_size, pretenure); |
|
|
|
if (properties->IsFailure()) return properties; |
|
|
|
|
|
|
|
// Allocate the JSObject.
|
|
|
@ -2034,7 +2086,39 @@ Object* Heap::AllocateJSObject(JSFunction* constructor, |
|
|
|
Map::cast(initial_map)->set_constructor(constructor); |
|
|
|
} |
|
|
|
// Allocate the object based on the constructors initial map.
|
|
|
|
return AllocateJSObjectFromMap(constructor->initial_map(), pretenure); |
|
|
|
Object* result = |
|
|
|
AllocateJSObjectFromMap(constructor->initial_map(), pretenure); |
|
|
|
// Make sure result is NOT a global object if valid.
|
|
|
|
ASSERT(result->IsFailure() || !result->IsGlobalObject()); |
|
|
|
return result; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
Object* Heap::AllocateGlobalObject(JSFunction* constructor) { |
|
|
|
ASSERT(constructor->has_initial_map()); |
|
|
|
// Make sure no field properties are described in the initial map.
|
|
|
|
// This guarantees us that normalizing the properties does not
|
|
|
|
// require us to change property values to JSGlobalPropertyCells.
|
|
|
|
ASSERT(constructor->initial_map()->NextFreePropertyIndex() == 0); |
|
|
|
|
|
|
|
// Make sure we don't have a ton of pre-allocated slots in the
|
|
|
|
// global objects. They will be unused once we normalize the object.
|
|
|
|
ASSERT(constructor->initial_map()->unused_property_fields() == 0); |
|
|
|
ASSERT(constructor->initial_map()->inobject_properties() == 0); |
|
|
|
|
|
|
|
// Allocate the object based on the constructors initial map.
|
|
|
|
Object* result = AllocateJSObjectFromMap(constructor->initial_map(), TENURED); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
|
|
|
|
// Normalize the result.
|
|
|
|
JSObject* global = JSObject::cast(result); |
|
|
|
result = global->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
|
|
|
|
// Make sure result is a global object with properties in dictionary.
|
|
|
|
ASSERT(global->IsGlobalObject()); |
|
|
|
ASSERT(!global->HasFastProperties()); |
|
|
|
return global; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -2111,7 +2195,7 @@ Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, |
|
|
|
|
|
|
|
// Allocate the backing storage for the properties.
|
|
|
|
int prop_size = map->unused_property_fields() - map->inobject_properties(); |
|
|
|
Object* properties = AllocateFixedArray(prop_size); |
|
|
|
Object* properties = AllocateFixedArray(prop_size, TENURED); |
|
|
|
if (properties->IsFailure()) return properties; |
|
|
|
|
|
|
|
// Reset the map for the object.
|
|
|
@ -2500,7 +2584,7 @@ Object* Heap::AllocateHashTable(int length) { |
|
|
|
Object* result = Heap::AllocateFixedArray(length); |
|
|
|
if (result->IsFailure()) return result; |
|
|
|
reinterpret_cast<Array*>(result)->set_map(hash_table_map()); |
|
|
|
ASSERT(result->IsDictionary()); |
|
|
|
ASSERT(result->IsHashTable()); |
|
|
|
return result; |
|
|
|
} |
|
|
|
|
|
|
@ -2622,6 +2706,8 @@ void Heap::ReportHeapStatistics(const char* title) { |
|
|
|
code_space_->ReportStatistics(); |
|
|
|
PrintF("Map space : "); |
|
|
|
map_space_->ReportStatistics(); |
|
|
|
PrintF("Cell space : "); |
|
|
|
cell_space_->ReportStatistics(); |
|
|
|
PrintF("Large object space : "); |
|
|
|
lo_space_->ReportStatistics(); |
|
|
|
PrintF(">>>>>> ========================================= >>>>>>\n"); |
|
|
@ -2642,6 +2728,7 @@ bool Heap::Contains(Address addr) { |
|
|
|
old_data_space_->Contains(addr) || |
|
|
|
code_space_->Contains(addr) || |
|
|
|
map_space_->Contains(addr) || |
|
|
|
cell_space_->Contains(addr) || |
|
|
|
lo_space_->SlowContains(addr)); |
|
|
|
} |
|
|
|
|
|
|
@ -2666,6 +2753,8 @@ bool Heap::InSpace(Address addr, AllocationSpace space) { |
|
|
|
return code_space_->Contains(addr); |
|
|
|
case MAP_SPACE: |
|
|
|
return map_space_->Contains(addr); |
|
|
|
case CELL_SPACE: |
|
|
|
return cell_space_->Contains(addr); |
|
|
|
case LO_SPACE: |
|
|
|
return lo_space_->SlowContains(addr); |
|
|
|
} |
|
|
@ -2679,22 +2768,31 @@ void Heap::Verify() { |
|
|
|
ASSERT(HasBeenSetup()); |
|
|
|
|
|
|
|
VerifyPointersVisitor visitor; |
|
|
|
Heap::IterateRoots(&visitor); |
|
|
|
IterateRoots(&visitor); |
|
|
|
|
|
|
|
AllSpaces spaces; |
|
|
|
while (Space* space = spaces.next()) { |
|
|
|
space->Verify(); |
|
|
|
} |
|
|
|
new_space_.Verify(); |
|
|
|
|
|
|
|
VerifyPointersAndRSetVisitor rset_visitor; |
|
|
|
old_pointer_space_->Verify(&rset_visitor); |
|
|
|
map_space_->Verify(&rset_visitor); |
|
|
|
|
|
|
|
VerifyPointersVisitor no_rset_visitor; |
|
|
|
old_data_space_->Verify(&no_rset_visitor); |
|
|
|
code_space_->Verify(&no_rset_visitor); |
|
|
|
cell_space_->Verify(&no_rset_visitor); |
|
|
|
|
|
|
|
lo_space_->Verify(); |
|
|
|
} |
|
|
|
#endif // DEBUG
|
|
|
|
|
|
|
|
|
|
|
|
Object* Heap::LookupSymbol(Vector<const char> string) { |
|
|
|
Object* symbol = NULL; |
|
|
|
Object* new_table = |
|
|
|
SymbolTable::cast(symbol_table_)->LookupSymbol(string, &symbol); |
|
|
|
Object* new_table = symbol_table()->LookupSymbol(string, &symbol); |
|
|
|
if (new_table->IsFailure()) return new_table; |
|
|
|
symbol_table_ = new_table; |
|
|
|
// Can't use set_symbol_table because SymbolTable::cast knows that
|
|
|
|
// SymbolTable is a singleton and checks for identity.
|
|
|
|
roots_[kSymbolTableRootIndex] = new_table; |
|
|
|
ASSERT(symbol != NULL); |
|
|
|
return symbol; |
|
|
|
} |
|
|
@ -2703,10 +2801,11 @@ Object* Heap::LookupSymbol(Vector<const char> string) { |
|
|
|
Object* Heap::LookupSymbol(String* string) { |
|
|
|
if (string->IsSymbol()) return string; |
|
|
|
Object* symbol = NULL; |
|
|
|
Object* new_table = |
|
|
|
SymbolTable::cast(symbol_table_)->LookupString(string, &symbol); |
|
|
|
Object* new_table = symbol_table()->LookupString(string, &symbol); |
|
|
|
if (new_table->IsFailure()) return new_table; |
|
|
|
symbol_table_ = new_table; |
|
|
|
// Can't use set_symbol_table because SymbolTable::cast knows that
|
|
|
|
// SymbolTable is a singleton and checks for identity.
|
|
|
|
roots_[kSymbolTableRootIndex] = new_table; |
|
|
|
ASSERT(symbol != NULL); |
|
|
|
return symbol; |
|
|
|
} |
|
|
@ -2717,8 +2816,7 @@ bool Heap::LookupSymbolIfExists(String* string, String** symbol) { |
|
|
|
*symbol = string; |
|
|
|
return true; |
|
|
|
} |
|
|
|
SymbolTable* table = SymbolTable::cast(symbol_table_); |
|
|
|
return table->LookupSymbolIfExists(string, symbol); |
|
|
|
return symbol_table()->LookupSymbolIfExists(string, symbol); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -2805,28 +2903,15 @@ void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { |
|
|
|
|
|
|
|
void Heap::IterateRoots(ObjectVisitor* v) { |
|
|
|
IterateStrongRoots(v); |
|
|
|
v->VisitPointer(reinterpret_cast<Object**>(&symbol_table_)); |
|
|
|
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); |
|
|
|
SYNCHRONIZE_TAG("symbol_table"); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::IterateStrongRoots(ObjectVisitor* v) { |
|
|
|
#define ROOT_ITERATE(type, name) \ |
|
|
|
v->VisitPointer(bit_cast<Object**, type**>(&name##_)); |
|
|
|
STRONG_ROOT_LIST(ROOT_ITERATE); |
|
|
|
#undef ROOT_ITERATE |
|
|
|
v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); |
|
|
|
SYNCHRONIZE_TAG("strong_root_list"); |
|
|
|
|
|
|
|
#define STRUCT_MAP_ITERATE(NAME, Name, name) \ |
|
|
|
v->VisitPointer(bit_cast<Object**, Map**>(&name##_map_)); |
|
|
|
STRUCT_LIST(STRUCT_MAP_ITERATE); |
|
|
|
#undef STRUCT_MAP_ITERATE |
|
|
|
SYNCHRONIZE_TAG("struct_map"); |
|
|
|
|
|
|
|
#define SYMBOL_ITERATE(name, string) \ |
|
|
|
v->VisitPointer(bit_cast<Object**, String**>(&name##_)); |
|
|
|
SYMBOL_LIST(SYMBOL_ITERATE) |
|
|
|
#undef SYMBOL_ITERATE |
|
|
|
v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_)); |
|
|
|
SYNCHRONIZE_TAG("symbol"); |
|
|
|
|
|
|
@ -2901,6 +2986,7 @@ int Heap::PromotedSpaceSize() { |
|
|
|
+ old_data_space_->Size() |
|
|
|
+ code_space_->Size() |
|
|
|
+ map_space_->Size() |
|
|
|
+ cell_space_->Size() |
|
|
|
+ lo_space_->Size(); |
|
|
|
} |
|
|
|
|
|
|
@ -2978,6 +3064,13 @@ bool Heap::Setup(bool create_heap_objects) { |
|
|
|
// enough to hold at least a page will cause it to allocate.
|
|
|
|
if (!map_space_->Setup(NULL, 0)) return false; |
|
|
|
|
|
|
|
// Initialize global property cell space.
|
|
|
|
cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE); |
|
|
|
if (cell_space_ == NULL) return false; |
|
|
|
// Setting up a paged space without giving it a virtual memory range big
|
|
|
|
// enough to hold at least a page will cause it to allocate.
|
|
|
|
if (!cell_space_->Setup(NULL, 0)) return false; |
|
|
|
|
|
|
|
// The large object code space may contain code or data. We set the memory
|
|
|
|
// to be non-executable here for safety, but this means we need to enable it
|
|
|
|
// explicitly when allocating large code objects.
|
|
|
@ -3030,6 +3123,12 @@ void Heap::TearDown() { |
|
|
|
map_space_ = NULL; |
|
|
|
} |
|
|
|
|
|
|
|
if (cell_space_ != NULL) { |
|
|
|
cell_space_->TearDown(); |
|
|
|
delete cell_space_; |
|
|
|
cell_space_ = NULL; |
|
|
|
} |
|
|
|
|
|
|
|
if (lo_space_ != NULL) { |
|
|
|
lo_space_->TearDown(); |
|
|
|
delete lo_space_; |
|
|
@ -3041,11 +3140,9 @@ void Heap::TearDown() { |
|
|
|
|
|
|
|
|
|
|
|
void Heap::Shrink() { |
|
|
|
// Try to shrink map, old, and code spaces.
|
|
|
|
map_space_->Shrink(); |
|
|
|
old_pointer_space_->Shrink(); |
|
|
|
old_data_space_->Shrink(); |
|
|
|
code_space_->Shrink(); |
|
|
|
// Try to shrink all paged spaces.
|
|
|
|
PagedSpaces spaces; |
|
|
|
while (PagedSpace* space = spaces.next()) space->Shrink(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -3053,24 +3150,16 @@ void Heap::Shrink() { |
|
|
|
|
|
|
|
void Heap::Protect() { |
|
|
|
if (HasBeenSetup()) { |
|
|
|
new_space_.Protect(); |
|
|
|
map_space_->Protect(); |
|
|
|
old_pointer_space_->Protect(); |
|
|
|
old_data_space_->Protect(); |
|
|
|
code_space_->Protect(); |
|
|
|
lo_space_->Protect(); |
|
|
|
AllSpaces spaces; |
|
|
|
while (Space* space = spaces.next()) space->Protect(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Heap::Unprotect() { |
|
|
|
if (HasBeenSetup()) { |
|
|
|
new_space_.Unprotect(); |
|
|
|
map_space_->Unprotect(); |
|
|
|
old_pointer_space_->Unprotect(); |
|
|
|
old_data_space_->Unprotect(); |
|
|
|
code_space_->Unprotect(); |
|
|
|
lo_space_->Unprotect(); |
|
|
|
AllSpaces spaces; |
|
|
|
while (Space* space = spaces.next()) space->Unprotect(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
@ -3108,6 +3197,8 @@ Space* AllSpaces::next() { |
|
|
|
return Heap::code_space(); |
|
|
|
case MAP_SPACE: |
|
|
|
return Heap::map_space(); |
|
|
|
case CELL_SPACE: |
|
|
|
return Heap::cell_space(); |
|
|
|
case LO_SPACE: |
|
|
|
return Heap::lo_space(); |
|
|
|
default: |
|
|
@ -3126,6 +3217,8 @@ PagedSpace* PagedSpaces::next() { |
|
|
|
return Heap::code_space(); |
|
|
|
case MAP_SPACE: |
|
|
|
return Heap::map_space(); |
|
|
|
case CELL_SPACE: |
|
|
|
return Heap::cell_space(); |
|
|
|
default: |
|
|
|
return NULL; |
|
|
|
} |
|
|
@ -3199,6 +3292,9 @@ ObjectIterator* SpaceIterator::CreateIterator() { |
|
|
|
case MAP_SPACE: |
|
|
|
iterator_ = new HeapObjectIterator(Heap::map_space()); |
|
|
|
break; |
|
|
|
case CELL_SPACE: |
|
|
|
iterator_ = new HeapObjectIterator(Heap::cell_space()); |
|
|
|
break; |
|
|
|
case LO_SPACE: |
|
|
|
iterator_ = new LargeObjectIterator(Heap::lo_space()); |
|
|
|
break; |
|
|
@ -3303,7 +3399,7 @@ void HeapProfiler::WriteSample() { |
|
|
|
// Lump all the string types together.
|
|
|
|
int string_number = 0; |
|
|
|
int string_bytes = 0; |
|
|
|
#define INCREMENT_SIZE(type, size, name) \ |
|
|
|
#define INCREMENT_SIZE(type, size, name, camel_name) \ |
|
|
|
string_number += info[type].number(); \ |
|
|
|
string_bytes += info[type].bytes(); |
|
|
|
STRING_TYPE_LIST(INCREMENT_SIZE) |
|
|
|