heap: make array buffer maps disjoint

Remove intersection from the `std::map`s representing current live
ArrayBuffers. While being simpler to understand, it poses significant
performance issue for the active ArrayBuffer users (like node.js).

Store buffers separately, and process them together during mark-sweep phase.

The results of benchmarks are:

$ ./node-slow bench && ./node-fast bench
4997.4 ns/op
4685.7 ns/op

NOTE: `fast` - was a patched node.js, `slow` - unpatched node.js with vanilla v8.

BUG=

Review URL: https://codereview.chromium.org/1316873004

Cr-Commit-Position: refs/heads/master@{#30495}
This commit is contained in:
fedor 2015-08-31 23:51:51 -07:00 committed by Commit bot
parent 1cd96c5b92
commit 9e3676da9a
3 changed files with 59 additions and 98 deletions

View File

@ -1744,61 +1744,13 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
} }
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length) {
live_buffers[data] = length;
}
void Heap::UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
DCHECK(live_buffers.count(data) > 0);
live_buffers.erase(data);
not_yet_discovered_buffers.erase(data);
}
void Heap::RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
not_yet_discovered_buffers.erase(data);
}
size_t Heap::FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
size_t freed_memory = 0;
for (auto buffer = not_yet_discovered_buffers.begin();
buffer != not_yet_discovered_buffers.end(); ++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
freed_memory += buffer->second;
live_buffers.erase(buffer->first);
}
not_yet_discovered_buffers = live_buffers;
return freed_memory;
}
void Heap::TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_buffers.clear();
not_yet_discovered_buffers.clear();
}
void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
size_t length) { size_t length) {
if (!data) return; if (!data) return;
RegisterNewArrayBufferHelper(live_array_buffers_, data, length);
if (in_new_space) { if (in_new_space) {
RegisterNewArrayBufferHelper(live_array_buffers_for_scavenge_, data, live_array_buffers_for_scavenge_[data] = length;
length); } else {
live_array_buffers_[data] = length;
} }
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
@ -1810,54 +1762,75 @@ void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
if (!data) return; if (!data) return;
UnregisterArrayBufferHelper(live_array_buffers_,
not_yet_discovered_array_buffers_, data); std::map<void*, size_t>* live_buffers =
if (in_new_space) { in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
UnregisterArrayBufferHelper(live_array_buffers_for_scavenge_, std::map<void*, size_t>* not_yet_discovered_buffers =
not_yet_discovered_array_buffers_for_scavenge_, in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
data); : &not_yet_discovered_array_buffers_;
}
DCHECK(live_buffers->count(data) > 0);
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);
} }
void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) {
// ArrayBuffer might be in the middle of being constructed. // ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return; if (data == undefined_value()) return;
RegisterLiveArrayBufferHelper( if (from_scavenge) {
from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ not_yet_discovered_array_buffers_for_scavenge_.erase(data);
: not_yet_discovered_array_buffers_, } else if (!not_yet_discovered_array_buffers_.erase(data)) {
data); not_yet_discovered_array_buffers_for_scavenge_.erase(data);
}
} }
void Heap::FreeDeadArrayBuffers(bool from_scavenge) { void Heap::FreeDeadArrayBuffers(bool from_scavenge) {
if (from_scavenge) { size_t freed_memory = 0;
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
not_yet_discovered_array_buffers_.erase(buffer.first); isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
live_array_buffers_.erase(buffer.first); freed_memory += buffer.second;
} live_array_buffers_for_scavenge_.erase(buffer.first);
} else { }
if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_array_buffers_) { for (auto& buffer : not_yet_discovered_array_buffers_) {
// Scavenge can't happend during evacuation, so we only need to update isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
// live_array_buffers_for_scavenge_. freed_memory += buffer.second;
// not_yet_discovered_array_buffers_for_scanvenge_ will be reset before live_array_buffers_.erase(buffer.first);
// the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace.
live_array_buffers_for_scavenge_.erase(buffer.first);
} }
} }
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC. // Do not call through the api as this code is triggered while doing a GC.
amount_of_external_allocated_memory_ -= FreeDeadArrayBuffersHelper( amount_of_external_allocated_memory_ -= freed_memory;
isolate_,
from_scavenge ? live_array_buffers_for_scavenge_ : live_array_buffers_,
from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_
: not_yet_discovered_array_buffers_);
} }
void Heap::TearDownArrayBuffers() { void Heap::TearDownArrayBuffers() {
TearDownArrayBuffersHelper(isolate_, live_array_buffers_, size_t freed_memory = 0;
not_yet_discovered_array_buffers_); for (auto& buffer : live_array_buffers_) {
isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
for (auto& buffer : live_array_buffers_for_scavenge_) {
isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
live_array_buffers_.clear();
live_array_buffers_for_scavenge_.clear();
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear();
if (freed_memory > 0) {
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_memory));
}
} }
@ -1875,7 +1848,7 @@ void Heap::PromoteArrayBuffer(Object* obj) {
// ArrayBuffer might be in the middle of being constructed. // ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return; if (data == undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
DCHECK(live_array_buffers_.count(data) > 0); live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data); live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data); not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} }

View File

@ -1790,21 +1790,6 @@ class Heap {
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
void TearDownArrayBuffers(); void TearDownArrayBuffers();
// These correspond to the non-Helper versions.
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length);
void UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
void RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
size_t FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
void TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
// Record statistics before and after garbage collection. // Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC(); void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC(); void ReportStatisticsAfterGC();

View File

@ -4431,10 +4431,13 @@ void MarkCompactCollector::SweepSpaces() {
// buffer entries are already filter out. We can just release the memory. // buffer entries are already filter out. We can just release the memory.
heap()->FreeQueuedChunks(); heap()->FreeQueuedChunks();
heap()->FreeDeadArrayBuffers(false);
EvacuateNewSpaceAndCandidates(); EvacuateNewSpaceAndCandidates();
// NOTE: ArrayBuffers must be evacuated first, before freeing them. Otherwise
// not yet discovered buffers for scavenge will have all of them, and they
// will be erroneously freed.
heap()->FreeDeadArrayBuffers(false);
// Clear the marking state of live large objects. // Clear the marking state of live large objects.
heap_->lo_space()->ClearMarkingStateOfLiveObjects(); heap_->lo_space()->ClearMarkingStateOfLiveObjects();