Browse Source

deps: backport a715957 from V8 upstream

This commit does not include the changes to `src/heap/scavenger.cc`.

These changes would revert the changes that should have come in
086bd5aede, meaning that there is no issue with that change missing
in the previous commit.

Original commit message:
  Iterate handles with special left-trim visitor

  BUG=chromium:620553
  LOG=N
  R=hpayer@chromium.org

  Review-Url: https://codereview.chromium.org/2102243002
  Cr-Commit-Position: refs/heads/master@{#37366}

PR-URL: https://github.com/nodejs/node/pull/10668
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Fedor Indutny <fedor.indutny@gmail.com>
Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
v4.x
Myles Borins 9 years ago
committed by Myles Borins
parent
commit
a234d445c4
No known key found for this signature in database GPG Key ID: 933B01F40B5CA946
  1. 25
      deps/v8/src/heap/heap-inl.h
  2. 45
      deps/v8/src/heap/heap.cc
  3. 6
      deps/v8/src/heap/heap.h
  4. 2
      deps/v8/src/heap/mark-compact.cc

25
deps/v8/src/heap/heap-inl.h

@ -398,31 +398,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) {
static_cast<size_t>(byte_size / kPointerSize)); static_cast<size_t>(byte_size / kPointerSize));
} }
bool Heap::PurgeLeftTrimmedObject(Object** object) {
HeapObject* current = reinterpret_cast<HeapObject*>(*object);
const MapWord map_word = current->map_word();
if (current->IsFiller() && !map_word.IsForwardingAddress()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*object = nullptr;
return true;
}
return false;
}
void Heap::MoveBlock(Address dst, Address src, int byte_size) { void Heap::MoveBlock(Address dst, Address src, int byte_size) {
DCHECK(IsAligned(byte_size, kPointerSize)); DCHECK(IsAligned(byte_size, kPointerSize));

45
deps/v8/src/heap/heap.cc

@ -5316,6 +5316,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) {
v->Synchronize(VisitorSynchronization::kSmiRootList); v->Synchronize(VisitorSynchronization::kSmiRootList);
} }
// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor {
public:
explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) {
USE(heap_);
}
void VisitPointer(Object** p) override { FixHandle(p); }
void VisitPointers(Object** start, Object** end) override {
for (Object** p = start; p < end; p++) FixHandle(p);
}
private:
inline void FixHandle(Object** p) {
HeapObject* current = reinterpret_cast<HeapObject*>(*p);
if (!current->IsHeapObject()) return;
const MapWord map_word = current->map_word();
if (!map_word.IsForwardingAddress() && current->IsFiller()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == heap_->one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == heap_->two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*p = nullptr;
}
}
Heap* heap_;
};
void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
@ -5339,6 +5382,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->Synchronize(VisitorSynchronization::kCompilationCache); v->Synchronize(VisitorSynchronization::kCompilationCache);
// Iterate over local handles in handle scopes. // Iterate over local handles in handle scopes.
FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this);
isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
isolate_->handle_scope_implementer()->Iterate(v); isolate_->handle_scope_implementer()->Iterate(v);
isolate_->IterateDeferredHandles(v); isolate_->IterateDeferredHandles(v);
v->Synchronize(VisitorSynchronization::kHandleScope); v->Synchronize(VisitorSynchronization::kHandleScope);

6
deps/v8/src/heap/heap.h

@ -590,12 +590,6 @@ class Heap {
// jslimit_/real_jslimit_ variable in the StackGuard. // jslimit_/real_jslimit_ variable in the StackGuard.
void SetStackLimits(); void SetStackLimits();
// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
inline bool PurgeLeftTrimmedObject(Object** object);
// Notifies the heap that is ok to start marking or other activities that // Notifies the heap that is ok to start marking or other activities that
// should not happen during deserialization. // should not happen during deserialization.
void NotifyDeserializationComplete(); void NotifyDeserializationComplete();

2
deps/v8/src/heap/mark-compact.cc

@ -1650,8 +1650,6 @@ class RootMarkingVisitor : public ObjectVisitor {
HeapObject* object = ShortCircuitConsString(p); HeapObject* object = ShortCircuitConsString(p);
if (collector_->heap()->PurgeLeftTrimmedObject(p)) return;
MarkBit mark_bit = Marking::MarkBitFrom(object); MarkBit mark_bit = Marking::MarkBitFrom(object);
if (Marking::IsBlackOrGrey(mark_bit)) return; if (Marking::IsBlackOrGrey(mark_bit)) return;

Loading…
Cancel
Save