|
@ -516,10 +516,18 @@ void Deserializer::DecodeReservation( |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Deserializer::FlushICacheForNewCodeObjects() { |
|
|
void Deserializer::FlushICacheForNewCodeObjects() { |
|
|
PageIterator it(isolate_->heap()->code_space()); |
|
|
if (!deserializing_user_code_) { |
|
|
while (it.has_next()) { |
|
|
// The entire isolate is newly deserialized. Simply flush all code pages.
|
|
|
Page* p = it.next(); |
|
|
PageIterator it(isolate_->heap()->code_space()); |
|
|
CpuFeatures::FlushICache(p->area_start(), p->area_end() - p->area_start()); |
|
|
while (it.has_next()) { |
|
|
|
|
|
Page* p = it.next(); |
|
|
|
|
|
CpuFeatures::FlushICache(p->area_start(), |
|
|
|
|
|
p->area_end() - p->area_start()); |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
for (Code* code : new_code_objects_) { |
|
|
|
|
|
CpuFeatures::FlushICache(code->instruction_start(), |
|
|
|
|
|
code->instruction_size()); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -556,10 +564,15 @@ void Deserializer::Deserialize(Isolate* isolate) { |
|
|
DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); |
|
|
DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); |
|
|
// No active handles.
|
|
|
// No active handles.
|
|
|
DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); |
|
|
DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); |
|
|
isolate_->heap()->IterateSmiRoots(this); |
|
|
|
|
|
isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); |
|
|
{ |
|
|
isolate_->heap()->RepairFreeListsAfterDeserialization(); |
|
|
DisallowHeapAllocation no_gc; |
|
|
isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); |
|
|
isolate_->heap()->IterateSmiRoots(this); |
|
|
|
|
|
isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); |
|
|
|
|
|
isolate_->heap()->RepairFreeListsAfterDeserialization(); |
|
|
|
|
|
isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); |
|
|
|
|
|
DeserializeDeferredObjects(); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
isolate_->heap()->set_native_contexts_list( |
|
|
isolate_->heap()->set_native_contexts_list( |
|
|
isolate_->heap()->undefined_value()); |
|
|
isolate_->heap()->undefined_value()); |
|
@ -608,11 +621,12 @@ MaybeHandle<Object> Deserializer::DeserializePartial( |
|
|
Object* root; |
|
|
Object* root; |
|
|
Object* outdated_contexts; |
|
|
Object* outdated_contexts; |
|
|
VisitPointer(&root); |
|
|
VisitPointer(&root); |
|
|
|
|
|
DeserializeDeferredObjects(); |
|
|
VisitPointer(&outdated_contexts); |
|
|
VisitPointer(&outdated_contexts); |
|
|
|
|
|
|
|
|
// There's no code deserialized here. If this assert fires
|
|
|
// There's no code deserialized here. If this assert fires then that's
|
|
|
// then that's changed and logging should be added to notify
|
|
|
// changed and logging should be added to notify the profiler et al of the
|
|
|
// the profiler et al of the new code.
|
|
|
// new code, which also has to be flushed from instruction cache.
|
|
|
CHECK_EQ(start_address, code_space->top()); |
|
|
CHECK_EQ(start_address, code_space->top()); |
|
|
CHECK(outdated_contexts->IsFixedArray()); |
|
|
CHECK(outdated_contexts->IsFixedArray()); |
|
|
*outdated_contexts_out = |
|
|
*outdated_contexts_out = |
|
@ -628,10 +642,17 @@ MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode( |
|
|
return Handle<SharedFunctionInfo>(); |
|
|
return Handle<SharedFunctionInfo>(); |
|
|
} else { |
|
|
} else { |
|
|
deserializing_user_code_ = true; |
|
|
deserializing_user_code_ = true; |
|
|
DisallowHeapAllocation no_gc; |
|
|
HandleScope scope(isolate); |
|
|
Object* root; |
|
|
Handle<SharedFunctionInfo> result; |
|
|
VisitPointer(&root); |
|
|
{ |
|
|
return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root)); |
|
|
DisallowHeapAllocation no_gc; |
|
|
|
|
|
Object* root; |
|
|
|
|
|
VisitPointer(&root); |
|
|
|
|
|
DeserializeDeferredObjects(); |
|
|
|
|
|
result = Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root)); |
|
|
|
|
|
} |
|
|
|
|
|
CommitNewInternalizedStrings(isolate); |
|
|
|
|
|
return scope.CloseAndEscape(result); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -652,13 +673,21 @@ void Deserializer::VisitPointers(Object** start, Object** end) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Deserializer::RelinkAllocationSite(AllocationSite* site) { |
|
|
void Deserializer::DeserializeDeferredObjects() { |
|
|
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { |
|
|
for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) { |
|
|
site->set_weak_next(isolate_->heap()->undefined_value()); |
|
|
int space = code & kSpaceMask; |
|
|
} else { |
|
|
DCHECK(space <= kNumberOfSpaces); |
|
|
site->set_weak_next(isolate_->heap()->allocation_sites_list()); |
|
|
DCHECK(code - space == kNewObject); |
|
|
|
|
|
HeapObject* object = GetBackReferencedObject(space); |
|
|
|
|
|
int size = source_.GetInt() << kPointerSizeLog2; |
|
|
|
|
|
Address obj_address = object->address(); |
|
|
|
|
|
Object** start = reinterpret_cast<Object**>(obj_address + kPointerSize); |
|
|
|
|
|
Object** end = reinterpret_cast<Object**>(obj_address + size); |
|
|
|
|
|
bool filled = ReadData(start, end, space, obj_address); |
|
|
|
|
|
CHECK(filled); |
|
|
|
|
|
DCHECK(CanBeDeferred(object)); |
|
|
|
|
|
PostProcessNewObject(object, space); |
|
|
} |
|
|
} |
|
|
isolate_->heap()->set_allocation_sites_list(site); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -688,31 +717,76 @@ class StringTableInsertionKey : public HashTableKey { |
|
|
return handle(string_, isolate); |
|
|
return handle(string_, isolate); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private: |
|
|
String* string_; |
|
|
String* string_; |
|
|
uint32_t hash_; |
|
|
uint32_t hash_; |
|
|
|
|
|
DisallowHeapAllocation no_gc; |
|
|
}; |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HeapObject* Deserializer::ProcessNewObjectFromSerializedCode(HeapObject* obj) { |
|
|
HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) { |
|
|
if (obj->IsString()) { |
|
|
if (deserializing_user_code()) { |
|
|
String* string = String::cast(obj); |
|
|
if (obj->IsString()) { |
|
|
// Uninitialize hash field as the hash seed may have changed.
|
|
|
String* string = String::cast(obj); |
|
|
string->set_hash_field(String::kEmptyHashField); |
|
|
// Uninitialize hash field as the hash seed may have changed.
|
|
|
if (string->IsInternalizedString()) { |
|
|
string->set_hash_field(String::kEmptyHashField); |
|
|
DisallowHeapAllocation no_gc; |
|
|
if (string->IsInternalizedString()) { |
|
|
HandleScope scope(isolate_); |
|
|
// Canonicalize the internalized string. If it already exists in the
|
|
|
StringTableInsertionKey key(string); |
|
|
// string table, set it to forward to the existing one.
|
|
|
String* canonical = *StringTable::LookupKey(isolate_, &key); |
|
|
StringTableInsertionKey key(string); |
|
|
string->SetForwardedInternalizedString(canonical); |
|
|
String* canonical = StringTable::LookupKeyIfExists(isolate_, &key); |
|
|
return canonical; |
|
|
if (canonical == NULL) { |
|
|
|
|
|
new_internalized_strings_.Add(handle(string)); |
|
|
|
|
|
return string; |
|
|
|
|
|
} else { |
|
|
|
|
|
string->SetForwardedInternalizedString(canonical); |
|
|
|
|
|
return canonical; |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
} else if (obj->IsScript()) { |
|
|
|
|
|
// Assign a new script id to avoid collision.
|
|
|
|
|
|
Script::cast(obj)->set_id(isolate_->heap()->NextScriptId()); |
|
|
|
|
|
} else { |
|
|
|
|
|
DCHECK(CanBeDeferred(obj)); |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
if (obj->IsAllocationSite()) { |
|
|
|
|
|
DCHECK(obj->IsAllocationSite()); |
|
|
|
|
|
// Allocation sites are present in the snapshot, and must be linked into
|
|
|
|
|
|
// a list at deserialization time.
|
|
|
|
|
|
AllocationSite* site = AllocationSite::cast(obj); |
|
|
|
|
|
// TODO(mvstanton): consider treating the heap()->allocation_sites_list()
|
|
|
|
|
|
// as a (weak) root. If this root is relocated correctly, this becomes
|
|
|
|
|
|
// unnecessary.
|
|
|
|
|
|
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { |
|
|
|
|
|
site->set_weak_next(isolate_->heap()->undefined_value()); |
|
|
|
|
|
} else { |
|
|
|
|
|
site->set_weak_next(isolate_->heap()->allocation_sites_list()); |
|
|
|
|
|
} |
|
|
|
|
|
isolate_->heap()->set_allocation_sites_list(site); |
|
|
|
|
|
} else if (obj->IsCode()) { |
|
|
|
|
|
// We flush all code pages after deserializing the startup snapshot. In that
|
|
|
|
|
|
// case, we only need to remember code objects in the large object space.
|
|
|
|
|
|
// When deserializing user code, remember each individual code object.
|
|
|
|
|
|
if (deserializing_user_code() || space == LO_SPACE) { |
|
|
|
|
|
new_code_objects_.Add(Code::cast(obj)); |
|
|
} |
|
|
} |
|
|
} else if (obj->IsScript()) { |
|
|
|
|
|
Script::cast(obj)->set_id(isolate_->heap()->NextScriptId()); |
|
|
|
|
|
} |
|
|
} |
|
|
return obj; |
|
|
return obj; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Deserializer::CommitNewInternalizedStrings(Isolate* isolate) { |
|
|
|
|
|
StringTable::EnsureCapacityForDeserialization( |
|
|
|
|
|
isolate, new_internalized_strings_.length()); |
|
|
|
|
|
for (Handle<String> string : new_internalized_strings_) { |
|
|
|
|
|
StringTableInsertionKey key(*string); |
|
|
|
|
|
DCHECK_NULL(StringTable::LookupKeyIfExists(isolate, &key)); |
|
|
|
|
|
StringTable::LookupKey(isolate, &key); |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HeapObject* Deserializer::GetBackReferencedObject(int space) { |
|
|
HeapObject* Deserializer::GetBackReferencedObject(int space) { |
|
|
HeapObject* obj; |
|
|
HeapObject* obj; |
|
|
BackReference back_reference(source_.GetInt()); |
|
|
BackReference back_reference(source_.GetInt()); |
|
@ -746,21 +820,10 @@ void Deserializer::ReadObject(int space_number, Object** write_back) { |
|
|
HeapObject* obj; |
|
|
HeapObject* obj; |
|
|
int next_int = source_.GetInt(); |
|
|
int next_int = source_.GetInt(); |
|
|
|
|
|
|
|
|
bool double_align = false; |
|
|
|
|
|
#ifndef V8_HOST_ARCH_64_BIT |
|
|
|
|
|
double_align = next_int == kDoubleAlignmentSentinel; |
|
|
|
|
|
if (double_align) next_int = source_.GetInt(); |
|
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
|
|
DCHECK_NE(kDoubleAlignmentSentinel, next_int); |
|
|
DCHECK_NE(kDoubleAlignmentSentinel, next_int); |
|
|
int size = next_int << kObjectAlignmentBits; |
|
|
int size = next_int << kObjectAlignmentBits; |
|
|
int reserved_size = size + (double_align ? kPointerSize : 0); |
|
|
address = Allocate(space_number, size); |
|
|
address = Allocate(space_number, reserved_size); |
|
|
|
|
|
obj = HeapObject::FromAddress(address); |
|
|
obj = HeapObject::FromAddress(address); |
|
|
if (double_align) { |
|
|
|
|
|
obj = isolate_->heap()->DoubleAlignForDeserialization(obj, reserved_size); |
|
|
|
|
|
address = obj->address(); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
isolate_->heap()->OnAllocationEvent(obj, size); |
|
|
isolate_->heap()->OnAllocationEvent(obj, size); |
|
|
Object** current = reinterpret_cast<Object**>(address); |
|
|
Object** current = reinterpret_cast<Object**>(address); |
|
@ -768,24 +831,17 @@ void Deserializer::ReadObject(int space_number, Object** write_back) { |
|
|
if (FLAG_log_snapshot_positions) { |
|
|
if (FLAG_log_snapshot_positions) { |
|
|
LOG(isolate_, SnapshotPositionEvent(address, source_.position())); |
|
|
LOG(isolate_, SnapshotPositionEvent(address, source_.position())); |
|
|
} |
|
|
} |
|
|
ReadData(current, limit, space_number, address); |
|
|
|
|
|
|
|
|
|
|
|
// TODO(mvstanton): consider treating the heap()->allocation_sites_list()
|
|
|
|
|
|
// as a (weak) root. If this root is relocated correctly,
|
|
|
|
|
|
// RelinkAllocationSite() isn't necessary.
|
|
|
|
|
|
if (obj->IsAllocationSite()) RelinkAllocationSite(AllocationSite::cast(obj)); |
|
|
|
|
|
|
|
|
|
|
|
// Fix up strings from serialized user code.
|
|
|
if (ReadData(current, limit, space_number, address)) { |
|
|
if (deserializing_user_code()) obj = ProcessNewObjectFromSerializedCode(obj); |
|
|
// Only post process if object content has not been deferred.
|
|
|
|
|
|
obj = PostProcessNewObject(obj, space_number); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
Object* write_back_obj = obj; |
|
|
Object* write_back_obj = obj; |
|
|
UnalignedCopy(write_back, &write_back_obj); |
|
|
UnalignedCopy(write_back, &write_back_obj); |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
if (obj->IsCode()) { |
|
|
if (obj->IsCode()) { |
|
|
DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE); |
|
|
DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE); |
|
|
#ifdef VERIFY_HEAP |
|
|
|
|
|
obj->ObjectVerify(); |
|
|
|
|
|
#endif // VERIFY_HEAP
|
|
|
|
|
|
} else { |
|
|
} else { |
|
|
DCHECK(space_number != CODE_SPACE); |
|
|
DCHECK(space_number != CODE_SPACE); |
|
|
} |
|
|
} |
|
@ -829,7 +885,7 @@ Address Deserializer::Allocate(int space_index, int size) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Deserializer::ReadData(Object** current, Object** limit, int source_space, |
|
|
bool Deserializer::ReadData(Object** current, Object** limit, int source_space, |
|
|
Address current_object_address) { |
|
|
Address current_object_address) { |
|
|
Isolate* const isolate = isolate_; |
|
|
Isolate* const isolate = isolate_; |
|
|
// Write barrier support costs around 1% in startup time. In fact there
|
|
|
// Write barrier support costs around 1% in startup time. In fact there
|
|
@ -1086,6 +1142,18 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space, |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
case kDeferred: { |
|
|
|
|
|
// Deferred can only occur right after the heap object header.
|
|
|
|
|
|
DCHECK(current == reinterpret_cast<Object**>(current_object_address + |
|
|
|
|
|
kPointerSize)); |
|
|
|
|
|
HeapObject* obj = HeapObject::FromAddress(current_object_address); |
|
|
|
|
|
// If the deferred object is a map, its instance type may be used
|
|
|
|
|
|
// during deserialization. Initialize it with a temporary value.
|
|
|
|
|
|
if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE); |
|
|
|
|
|
current = limit; |
|
|
|
|
|
return false; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
case kSynchronize: |
|
|
case kSynchronize: |
|
|
// If we get here then that indicates that you have a mismatch between
|
|
|
// If we get here then that indicates that you have a mismatch between
|
|
|
// the number of GC roots when serializing and deserializing.
|
|
|
// the number of GC roots when serializing and deserializing.
|
|
@ -1192,6 +1260,7 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space, |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
CHECK_EQ(limit, current); |
|
|
CHECK_EQ(limit, current); |
|
|
|
|
|
return true; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -1200,6 +1269,7 @@ Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) |
|
|
sink_(sink), |
|
|
sink_(sink), |
|
|
external_reference_encoder_(isolate), |
|
|
external_reference_encoder_(isolate), |
|
|
root_index_map_(isolate), |
|
|
root_index_map_(isolate), |
|
|
|
|
|
recursion_depth_(0), |
|
|
code_address_map_(NULL), |
|
|
code_address_map_(NULL), |
|
|
large_objects_total_size_(0), |
|
|
large_objects_total_size_(0), |
|
|
seen_large_objects_index_(0) { |
|
|
seen_large_objects_index_(0) { |
|
@ -1275,6 +1345,16 @@ void Serializer::OutputStatistics(const char* name) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Serializer::SerializeDeferredObjects() { |
|
|
|
|
|
while (deferred_objects_.length() > 0) { |
|
|
|
|
|
HeapObject* obj = deferred_objects_.RemoveLast(); |
|
|
|
|
|
ObjectSerializer obj_serializer(this, obj, sink_, kPlain, kStartOfObject); |
|
|
|
|
|
obj_serializer.SerializeDeferred(); |
|
|
|
|
|
} |
|
|
|
|
|
sink_->Put(kSynchronize, "Finished with deferred objects"); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void StartupSerializer::SerializeStrongReferences() { |
|
|
void StartupSerializer::SerializeStrongReferences() { |
|
|
Isolate* isolate = this->isolate(); |
|
|
Isolate* isolate = this->isolate(); |
|
|
// No active threads.
|
|
|
// No active threads.
|
|
@ -1318,6 +1398,7 @@ void PartialSerializer::Serialize(Object** o) { |
|
|
back_reference_map()->AddGlobalProxy(context->global_proxy()); |
|
|
back_reference_map()->AddGlobalProxy(context->global_proxy()); |
|
|
} |
|
|
} |
|
|
VisitPointer(o); |
|
|
VisitPointer(o); |
|
|
|
|
|
SerializeDeferredObjects(); |
|
|
SerializeOutdatedContextsAsFixedArray(); |
|
|
SerializeOutdatedContextsAsFixedArray(); |
|
|
Pad(); |
|
|
Pad(); |
|
|
} |
|
|
} |
|
@ -1342,10 +1423,10 @@ void PartialSerializer::SerializeOutdatedContextsAsFixedArray() { |
|
|
sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); |
|
|
sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); |
|
|
} |
|
|
} |
|
|
for (int i = 0; i < length; i++) { |
|
|
for (int i = 0; i < length; i++) { |
|
|
BackReference back_ref = outdated_contexts_[i]; |
|
|
Context* context = outdated_contexts_[i]; |
|
|
DCHECK(BackReferenceIsAlreadyAllocated(back_ref)); |
|
|
BackReference back_reference = back_reference_map_.Lookup(context); |
|
|
sink_->Put(kBackref + back_ref.space(), "BackRef"); |
|
|
sink_->Put(kBackref + back_reference.space(), "BackRef"); |
|
|
sink_->PutInt(back_ref.reference(), "BackRefValue"); |
|
|
PutBackReference(context, back_reference); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@ -1508,10 +1589,7 @@ bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, |
|
|
"BackRefWithSkip"); |
|
|
"BackRefWithSkip"); |
|
|
sink_->PutInt(skip, "BackRefSkipDistance"); |
|
|
sink_->PutInt(skip, "BackRefSkipDistance"); |
|
|
} |
|
|
} |
|
|
DCHECK(BackReferenceIsAlreadyAllocated(back_reference)); |
|
|
PutBackReference(obj, back_reference); |
|
|
sink_->PutInt(back_reference.reference(), "BackRefValue"); |
|
|
|
|
|
|
|
|
|
|
|
hot_objects_.Add(obj); |
|
|
|
|
|
} |
|
|
} |
|
|
return true; |
|
|
return true; |
|
|
} |
|
|
} |
|
@ -1547,7 +1625,7 @@ void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void StartupSerializer::SerializeWeakReferences() { |
|
|
void StartupSerializer::SerializeWeakReferencesAndDeferred() { |
|
|
// This phase comes right after the serialization (of the snapshot).
|
|
|
// This phase comes right after the serialization (of the snapshot).
|
|
|
// After we have done the partial serialization the partial snapshot cache
|
|
|
// After we have done the partial serialization the partial snapshot cache
|
|
|
// will contain some references needed to decode the partial snapshot. We
|
|
|
// will contain some references needed to decode the partial snapshot. We
|
|
@ -1556,6 +1634,7 @@ void StartupSerializer::SerializeWeakReferences() { |
|
|
Object* undefined = isolate()->heap()->undefined_value(); |
|
|
Object* undefined = isolate()->heap()->undefined_value(); |
|
|
VisitPointer(&undefined); |
|
|
VisitPointer(&undefined); |
|
|
isolate()->heap()->IterateWeakRoots(this, VISIT_ALL); |
|
|
isolate()->heap()->IterateWeakRoots(this, VISIT_ALL); |
|
|
|
|
|
SerializeDeferredObjects(); |
|
|
Pad(); |
|
|
Pad(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -1588,6 +1667,13 @@ void Serializer::PutRoot(int root_index, |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Serializer::PutBackReference(HeapObject* object, BackReference reference) { |
|
|
|
|
|
DCHECK(BackReferenceIsAlreadyAllocated(reference)); |
|
|
|
|
|
sink_->PutInt(reference.reference(), "BackRefValue"); |
|
|
|
|
|
hot_objects_.Add(object); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
|
|
void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
|
|
WhereToPoint where_to_point, int skip) { |
|
|
WhereToPoint where_to_point, int skip) { |
|
|
if (obj->IsMap()) { |
|
|
if (obj->IsMap()) { |
|
@ -1641,9 +1727,7 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
|
|
Context::cast(obj)->global_object() == global_object_) { |
|
|
Context::cast(obj)->global_object() == global_object_) { |
|
|
// Context refers to the current global object. This reference will
|
|
|
// Context refers to the current global object. This reference will
|
|
|
// become outdated after deserialization.
|
|
|
// become outdated after deserialization.
|
|
|
BackReference back_reference = back_reference_map_.Lookup(obj); |
|
|
outdated_contexts_.Add(Context::cast(obj)); |
|
|
DCHECK(back_reference.is_valid()); |
|
|
|
|
|
outdated_contexts_.Add(back_reference); |
|
|
|
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -1671,17 +1755,8 @@ void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space, |
|
|
} |
|
|
} |
|
|
back_reference = serializer_->AllocateLargeObject(size); |
|
|
back_reference = serializer_->AllocateLargeObject(size); |
|
|
} else { |
|
|
} else { |
|
|
bool needs_double_align = false; |
|
|
back_reference = serializer_->Allocate(space, size); |
|
|
if (object_->NeedsToEnsureDoubleAlignment()) { |
|
|
|
|
|
// Add wriggle room for double alignment padding.
|
|
|
|
|
|
back_reference = serializer_->Allocate(space, size + kPointerSize); |
|
|
|
|
|
needs_double_align = true; |
|
|
|
|
|
} else { |
|
|
|
|
|
back_reference = serializer_->Allocate(space, size); |
|
|
|
|
|
} |
|
|
|
|
|
sink_->Put(kNewObject + reference_representation_ + space, "NewObject"); |
|
|
sink_->Put(kNewObject + reference_representation_ + space, "NewObject"); |
|
|
if (needs_double_align) |
|
|
|
|
|
sink_->PutInt(kDoubleAlignmentSentinel, "DoubleAlignSentinel"); |
|
|
|
|
|
int encoded_size = size >> kObjectAlignmentBits; |
|
|
int encoded_size = size >> kObjectAlignmentBits; |
|
|
DCHECK_NE(kDoubleAlignmentSentinel, encoded_size); |
|
|
DCHECK_NE(kDoubleAlignmentSentinel, encoded_size); |
|
|
sink_->PutInt(encoded_size, "ObjectSizeInWords"); |
|
|
sink_->PutInt(encoded_size, "ObjectSizeInWords"); |
|
@ -1773,6 +1848,9 @@ void Serializer::ObjectSerializer::Serialize() { |
|
|
// We cannot serialize typed array objects correctly.
|
|
|
// We cannot serialize typed array objects correctly.
|
|
|
DCHECK(!object_->IsJSTypedArray()); |
|
|
DCHECK(!object_->IsJSTypedArray()); |
|
|
|
|
|
|
|
|
|
|
|
// We don't expect fillers.
|
|
|
|
|
|
DCHECK(!object_->IsFiller()); |
|
|
|
|
|
|
|
|
if (object_->IsPrototypeInfo()) { |
|
|
if (object_->IsPrototypeInfo()) { |
|
|
Object* prototype_users = PrototypeInfo::cast(object_)->prototype_users(); |
|
|
Object* prototype_users = PrototypeInfo::cast(object_)->prototype_users(); |
|
|
if (prototype_users->IsWeakFixedArray()) { |
|
|
if (prototype_users->IsWeakFixedArray()) { |
|
@ -1810,6 +1888,39 @@ void Serializer::ObjectSerializer::Serialize() { |
|
|
CHECK_EQ(0, bytes_processed_so_far_); |
|
|
CHECK_EQ(0, bytes_processed_so_far_); |
|
|
bytes_processed_so_far_ = kPointerSize; |
|
|
bytes_processed_so_far_ = kPointerSize; |
|
|
|
|
|
|
|
|
|
|
|
RecursionScope recursion(serializer_); |
|
|
|
|
|
// Objects that are immediately post processed during deserialization
|
|
|
|
|
|
// cannot be deferred, since post processing requires the object content.
|
|
|
|
|
|
if (recursion.ExceedsMaximum() && CanBeDeferred(object_)) { |
|
|
|
|
|
serializer_->QueueDeferredObject(object_); |
|
|
|
|
|
sink_->Put(kDeferred, "Deferring object content"); |
|
|
|
|
|
return; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
object_->IterateBody(map->instance_type(), size, this); |
|
|
|
|
|
OutputRawData(object_->address() + size); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Serializer::ObjectSerializer::SerializeDeferred() { |
|
|
|
|
|
if (FLAG_trace_serializer) { |
|
|
|
|
|
PrintF(" Encoding deferred heap object: "); |
|
|
|
|
|
object_->ShortPrint(); |
|
|
|
|
|
PrintF("\n"); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
int size = object_->Size(); |
|
|
|
|
|
Map* map = object_->map(); |
|
|
|
|
|
BackReference reference = serializer_->back_reference_map()->Lookup(object_); |
|
|
|
|
|
|
|
|
|
|
|
// Serialize the rest of the object.
|
|
|
|
|
|
CHECK_EQ(0, bytes_processed_so_far_); |
|
|
|
|
|
bytes_processed_so_far_ = kPointerSize; |
|
|
|
|
|
|
|
|
|
|
|
sink_->Put(kNewObject + reference.space(), "deferred object"); |
|
|
|
|
|
serializer_->PutBackReference(object_, reference); |
|
|
|
|
|
sink_->PutInt(size >> kPointerSizeLog2, "deferred object size"); |
|
|
|
|
|
|
|
|
object_->IterateBody(map->instance_type(), size, this); |
|
|
object_->IterateBody(map->instance_type(), size, this); |
|
|
OutputRawData(object_->address() + size); |
|
|
OutputRawData(object_->address() + size); |
|
|
} |
|
|
} |
|
@ -2134,6 +2245,7 @@ ScriptData* CodeSerializer::Serialize(Isolate* isolate, |
|
|
DisallowHeapAllocation no_gc; |
|
|
DisallowHeapAllocation no_gc; |
|
|
Object** location = Handle<Object>::cast(info).location(); |
|
|
Object** location = Handle<Object>::cast(info).location(); |
|
|
cs.VisitPointer(location); |
|
|
cs.VisitPointer(location); |
|
|
|
|
|
cs.SerializeDeferredObjects(); |
|
|
cs.Pad(); |
|
|
cs.Pad(); |
|
|
|
|
|
|
|
|
SerializedCodeData data(sink.data(), cs); |
|
|
SerializedCodeData data(sink.data(), cs); |
|
@ -2212,8 +2324,6 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
|
|
void CodeSerializer::SerializeGeneric(HeapObject* heap_object, |
|
|
void CodeSerializer::SerializeGeneric(HeapObject* heap_object, |
|
|
HowToCode how_to_code, |
|
|
HowToCode how_to_code, |
|
|
WhereToPoint where_to_point) { |
|
|
WhereToPoint where_to_point) { |
|
|
if (heap_object->IsInternalizedString()) num_internalized_strings_++; |
|
|
|
|
|
|
|
|
|
|
|
// Object has not yet been serialized. Serialize it here.
|
|
|
// Object has not yet been serialized. Serialize it here.
|
|
|
ObjectSerializer serializer(this, heap_object, sink_, how_to_code, |
|
|
ObjectSerializer serializer(this, heap_object, sink_, how_to_code, |
|
|
where_to_point); |
|
|
where_to_point); |
|
@ -2325,10 +2435,6 @@ MaybeHandle<SharedFunctionInfo> CodeSerializer::Deserialize( |
|
|
return MaybeHandle<SharedFunctionInfo>(); |
|
|
return MaybeHandle<SharedFunctionInfo>(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
// Eagerly expand string table to avoid allocations during deserialization.
|
|
|
|
|
|
StringTable::EnsureCapacityForDeserialization(isolate, |
|
|
|
|
|
scd->NumInternalizedStrings()); |
|
|
|
|
|
|
|
|
|
|
|
// Prepare and register list of attached objects.
|
|
|
// Prepare and register list of attached objects.
|
|
|
Vector<const uint32_t> code_stub_keys = scd->CodeStubKeys(); |
|
|
Vector<const uint32_t> code_stub_keys = scd->CodeStubKeys(); |
|
|
Vector<Handle<Object> > attached_objects = Vector<Handle<Object> >::New( |
|
|
Vector<Handle<Object> > attached_objects = Vector<Handle<Object> >::New( |
|
@ -2492,7 +2598,6 @@ SerializedCodeData::SerializedCodeData(const List<byte>& payload, |
|
|
SetHeaderValue(kCpuFeaturesOffset, |
|
|
SetHeaderValue(kCpuFeaturesOffset, |
|
|
static_cast<uint32_t>(CpuFeatures::SupportedFeatures())); |
|
|
static_cast<uint32_t>(CpuFeatures::SupportedFeatures())); |
|
|
SetHeaderValue(kFlagHashOffset, FlagList::Hash()); |
|
|
SetHeaderValue(kFlagHashOffset, FlagList::Hash()); |
|
|
SetHeaderValue(kNumInternalizedStringsOffset, cs.num_internalized_strings()); |
|
|
|
|
|
SetHeaderValue(kNumReservationsOffset, reservations.length()); |
|
|
SetHeaderValue(kNumReservationsOffset, reservations.length()); |
|
|
SetHeaderValue(kNumCodeStubKeysOffset, num_stub_keys); |
|
|
SetHeaderValue(kNumCodeStubKeysOffset, num_stub_keys); |
|
|
SetHeaderValue(kPayloadLengthOffset, payload.length()); |
|
|
SetHeaderValue(kPayloadLengthOffset, payload.length()); |
|
@ -2570,10 +2675,6 @@ Vector<const byte> SerializedCodeData::Payload() const { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
int SerializedCodeData::NumInternalizedStrings() const { |
|
|
|
|
|
return GetHeaderValue(kNumInternalizedStringsOffset); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const { |
|
|
Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const { |
|
|
int reservations_size = GetHeaderValue(kNumReservationsOffset) * kInt32Size; |
|
|
int reservations_size = GetHeaderValue(kNumReservationsOffset) * kInt32Size; |
|
|
const byte* start = data_ + kHeaderSize + reservations_size; |
|
|
const byte* start = data_ + kHeaderSize + reservations_size; |
|
|