Browse Source

deps: improve ArrayBuffer performance in v8

This a backport of the following commits from the v8's upstream:

* 1a8c38c50513f9af07ada479629a653e1cf36ff3
* 206f12abee3f1e7eda8fc6521d48f3c319460ee1
* 9e3676da9ab1aaf7de3e8582cb3fdefcc3dbaf33

Original commit message:

    heap: make array buffer maps disjoint

    Remove intersection from the `std::map`s representing current live
    ArrayBuffers. While being simpler to understand, it poses
    significant performance issue for the active ArrayBuffer users (like
    node.js).

    Store buffers separately, and process them together during
    mark-sweep phase.

    The results of benchmarks are:

    $ ./node-slow bench && ./node-fast bench
    4997.4 ns/op
    4685.7 ns/op

    NOTE: `fast` - was a patched node.js, `slow` - unpatched node.js
    with vanilla v8.

Ref: https://github.com/nodejs/node/pull/2732
PR-URL: https://github.com/nodejs/node/pull/3351
Reviewed-By: indutny - Fedor Indutny <fedor.indutny@gmail.com>
Reviewed-By: bnoordhuis - Ben Noordhuis <info@bnoordhuis.nl>
v5.x
Fedor Indutny 9 years ago
committed by Ali Ijaz Sheikh
parent
commit
1fdec65203
  1. 135
      deps/v8/src/heap/heap.cc
  2. 15
      deps/v8/src/heap/heap.h
  3. 3
      deps/v8/src/heap/mark-compact.cc

135
deps/v8/src/heap/heap.cc

@ -1791,61 +1791,13 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
} }
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length) {
live_buffers[data] = length;
}
void Heap::UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
DCHECK(live_buffers.count(data) > 0);
live_buffers.erase(data);
not_yet_discovered_buffers.erase(data);
}
void Heap::RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
not_yet_discovered_buffers.erase(data);
}
size_t Heap::FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
size_t freed_memory = 0;
for (auto buffer = not_yet_discovered_buffers.begin();
buffer != not_yet_discovered_buffers.end(); ++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
freed_memory += buffer->second;
live_buffers.erase(buffer->first);
}
not_yet_discovered_buffers = live_buffers;
return freed_memory;
}
void Heap::TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_buffers.clear();
not_yet_discovered_buffers.clear();
}
void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
size_t length) { size_t length) {
if (!data) return; if (!data) return;
RegisterNewArrayBufferHelper(live_array_buffers_, data, length);
if (in_new_space) { if (in_new_space) {
RegisterNewArrayBufferHelper(live_array_buffers_for_scavenge_, data, live_array_buffers_for_scavenge_[data] = length;
length); } else {
live_array_buffers_[data] = length;
} }
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
@ -1857,54 +1809,75 @@ void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
if (!data) return; if (!data) return;
UnregisterArrayBufferHelper(live_array_buffers_,
not_yet_discovered_array_buffers_, data); std::map<void*, size_t>* live_buffers =
if (in_new_space) { in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
UnregisterArrayBufferHelper(live_array_buffers_for_scavenge_, std::map<void*, size_t>* not_yet_discovered_buffers =
not_yet_discovered_array_buffers_for_scavenge_, in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
data); : &not_yet_discovered_array_buffers_;
}
DCHECK(live_buffers->count(data) > 0);
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);
} }
void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) {
// ArrayBuffer might be in the middle of being constructed. // ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return; if (data == undefined_value()) return;
RegisterLiveArrayBufferHelper( if (from_scavenge) {
from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ not_yet_discovered_array_buffers_for_scavenge_.erase(data);
: not_yet_discovered_array_buffers_, } else if (!not_yet_discovered_array_buffers_.erase(data)) {
data); not_yet_discovered_array_buffers_for_scavenge_.erase(data);
}
} }
void Heap::FreeDeadArrayBuffers(bool from_scavenge) { void Heap::FreeDeadArrayBuffers(bool from_scavenge) {
if (from_scavenge) { size_t freed_memory = 0;
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
not_yet_discovered_array_buffers_.erase(buffer.first); isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
live_array_buffers_.erase(buffer.first); freed_memory += buffer.second;
} live_array_buffers_for_scavenge_.erase(buffer.first);
} else { }
if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_array_buffers_) { for (auto& buffer : not_yet_discovered_array_buffers_) {
// Scavenge can't happend during evacuation, so we only need to update isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
// live_array_buffers_for_scavenge_. freed_memory += buffer.second;
// not_yet_discovered_array_buffers_for_scanvenge_ will be reset before live_array_buffers_.erase(buffer.first);
// the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace.
live_array_buffers_for_scavenge_.erase(buffer.first);
} }
} }
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC. // Do not call through the api as this code is triggered while doing a GC.
amount_of_external_allocated_memory_ -= FreeDeadArrayBuffersHelper( amount_of_external_allocated_memory_ -= freed_memory;
isolate_,
from_scavenge ? live_array_buffers_for_scavenge_ : live_array_buffers_,
from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_
: not_yet_discovered_array_buffers_);
} }
void Heap::TearDownArrayBuffers() { void Heap::TearDownArrayBuffers() {
TearDownArrayBuffersHelper(isolate_, live_array_buffers_, size_t freed_memory = 0;
not_yet_discovered_array_buffers_); for (auto& buffer : live_array_buffers_) {
isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
for (auto& buffer : live_array_buffers_for_scavenge_) {
isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
live_array_buffers_.clear();
live_array_buffers_for_scavenge_.clear();
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear();
if (freed_memory > 0) {
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_memory));
}
} }
@ -1922,7 +1895,7 @@ void Heap::PromoteArrayBuffer(Object* obj) {
// ArrayBuffer might be in the middle of being constructed. // ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return; if (data == undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
DCHECK(live_array_buffers_.count(data) > 0); live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data); live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data); not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} }

15
deps/v8/src/heap/heap.h

@ -2095,21 +2095,6 @@ class Heap {
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
void TearDownArrayBuffers(); void TearDownArrayBuffers();
// These correspond to the non-Helper versions.
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length);
void UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
void RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
size_t FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
void TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
// Record statistics before and after garbage collection. // Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC(); void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC(); void ReportStatisticsAfterGC();

3
deps/v8/src/heap/mark-compact.cc

@ -4357,6 +4357,9 @@ void MarkCompactCollector::SweepSpaces() {
EvacuateNewSpaceAndCandidates(); EvacuateNewSpaceAndCandidates();
// NOTE: ArrayBuffers must be evacuated first, before freeing them. Otherwise
// not yet discovered buffers for scavenge will have all of them, and they
// will be erroneously freed.
heap()->FreeDeadArrayBuffers(false); heap()->FreeDeadArrayBuffers(false);
// Deallocate unmarked objects and clear marked bits for marked objects. // Deallocate unmarked objects and clear marked bits for marked objects.

Loading…
Cancel
Save