Keep track of array buffers in new space separately
BUG=v8:3996 R=hpayer@chromium.org LOG=n Review URL: https://codereview.chromium.org/1133773002 Cr-Commit-Position: refs/heads/master@{#28978}
This commit is contained in:
parent
065b2374aa
commit
506397d0a4
@ -6531,7 +6531,8 @@ v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
|
|||||||
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
|
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
|
||||||
"ArrayBuffer already externalized");
|
"ArrayBuffer already externalized");
|
||||||
self->set_is_external(true);
|
self->set_is_external(true);
|
||||||
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
|
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self),
|
||||||
|
self->backing_store());
|
||||||
|
|
||||||
return GetContents();
|
return GetContents();
|
||||||
}
|
}
|
||||||
@ -6738,7 +6739,8 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::Externalize() {
|
|||||||
Utils::ApiCheck(!self->is_external(), "v8::SharedArrayBuffer::Externalize",
|
Utils::ApiCheck(!self->is_external(), "v8::SharedArrayBuffer::Externalize",
|
||||||
"SharedArrayBuffer already externalized");
|
"SharedArrayBuffer already externalized");
|
||||||
self->set_is_external(true);
|
self->set_is_external(true);
|
||||||
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
|
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self),
|
||||||
|
self->backing_store());
|
||||||
return GetContents();
|
return GetContents();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
134
src/heap/heap.cc
134
src/heap/heap.cc
@ -1623,6 +1623,8 @@ void Heap::Scavenge() {
|
|||||||
|
|
||||||
SelectScavengingVisitorsTable();
|
SelectScavengingVisitorsTable();
|
||||||
|
|
||||||
|
PrepareArrayBufferDiscoveryInNewSpace();
|
||||||
|
|
||||||
// Flip the semispaces. After flipping, to space is empty, from space has
|
// Flip the semispaces. After flipping, to space is empty, from space has
|
||||||
// live objects.
|
// live objects.
|
||||||
new_space_.Flip();
|
new_space_.Flip();
|
||||||
@ -1704,6 +1706,8 @@ void Heap::Scavenge() {
|
|||||||
new_space_.LowerInlineAllocationLimit(
|
new_space_.LowerInlineAllocationLimit(
|
||||||
new_space_.inline_allocation_limit_step());
|
new_space_.inline_allocation_limit_step());
|
||||||
|
|
||||||
|
FreeDeadArrayBuffers(true);
|
||||||
|
|
||||||
// Update how much has survived scavenge.
|
// Update how much has survived scavenge.
|
||||||
IncrementYoungSurvivorsCounter(static_cast<int>(
|
IncrementYoungSurvivorsCounter(static_cast<int>(
|
||||||
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
|
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
|
||||||
@ -1797,46 +1801,118 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
|
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
|
||||||
|
void* data, size_t length) {
|
||||||
|
live_buffers[data] = length;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Heap::UnregisterArrayBufferHelper(
|
||||||
|
std::map<void*, size_t>& live_buffers,
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
|
||||||
|
DCHECK(live_buffers.count(data) > 0);
|
||||||
|
live_buffers.erase(data);
|
||||||
|
not_yet_discovered_buffers.erase(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Heap::RegisterLiveArrayBufferHelper(
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
|
||||||
|
not_yet_discovered_buffers.erase(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
size_t Heap::FreeDeadArrayBuffersHelper(
|
||||||
|
Isolate* isolate, std::map<void*, size_t>& live_buffers,
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers) {
|
||||||
|
size_t freed_memory = 0;
|
||||||
|
for (auto buffer = not_yet_discovered_buffers.begin();
|
||||||
|
buffer != not_yet_discovered_buffers.end(); ++buffer) {
|
||||||
|
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
|
||||||
|
freed_memory += buffer->second;
|
||||||
|
live_buffers.erase(buffer->first);
|
||||||
|
}
|
||||||
|
not_yet_discovered_buffers = live_buffers;
|
||||||
|
return freed_memory;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Heap::TearDownArrayBuffersHelper(
|
||||||
|
Isolate* isolate, std::map<void*, size_t>& live_buffers,
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers) {
|
||||||
|
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
|
||||||
|
++buffer) {
|
||||||
|
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
|
||||||
|
}
|
||||||
|
live_buffers.clear();
|
||||||
|
not_yet_discovered_buffers.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
|
||||||
|
size_t length) {
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
live_array_buffers_[data] = length;
|
RegisterNewArrayBufferHelper(
|
||||||
|
in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
|
||||||
|
length);
|
||||||
reinterpret_cast<v8::Isolate*>(isolate_)
|
reinterpret_cast<v8::Isolate*>(isolate_)
|
||||||
->AdjustAmountOfExternalAllocatedMemory(length);
|
->AdjustAmountOfExternalAllocatedMemory(length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::UnregisterArrayBuffer(void* data) {
|
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
DCHECK(live_array_buffers_.count(data) > 0);
|
UnregisterArrayBufferHelper(
|
||||||
live_array_buffers_.erase(data);
|
in_new_space ? live_new_array_buffers_ : live_array_buffers_,
|
||||||
not_yet_discovered_array_buffers_.erase(data);
|
in_new_space ? not_yet_discovered_new_array_buffers_
|
||||||
|
: not_yet_discovered_array_buffers_,
|
||||||
|
data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::RegisterLiveArrayBuffer(void* data) {
|
void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
|
||||||
not_yet_discovered_array_buffers_.erase(data);
|
RegisterLiveArrayBufferHelper(in_new_space
|
||||||
|
? not_yet_discovered_new_array_buffers_
|
||||||
|
: not_yet_discovered_array_buffers_,
|
||||||
|
data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::FreeDeadArrayBuffers() {
|
void Heap::FreeDeadArrayBuffers(bool in_new_space) {
|
||||||
for (auto buffer = not_yet_discovered_array_buffers_.begin();
|
size_t freed_memory = FreeDeadArrayBuffersHelper(
|
||||||
buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
|
isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_,
|
||||||
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
|
in_new_space ? not_yet_discovered_new_array_buffers_
|
||||||
// Don't use the API method here since this could trigger another GC.
|
: not_yet_discovered_array_buffers_);
|
||||||
amount_of_external_allocated_memory_ -= buffer->second;
|
if (freed_memory) {
|
||||||
live_array_buffers_.erase(buffer->first);
|
reinterpret_cast<v8::Isolate*>(isolate_)
|
||||||
|
->AdjustAmountOfExternalAllocatedMemory(
|
||||||
|
-static_cast<int64_t>(freed_memory));
|
||||||
}
|
}
|
||||||
not_yet_discovered_array_buffers_ = live_array_buffers_;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::TearDownArrayBuffers() {
|
void Heap::TearDownArrayBuffers() {
|
||||||
for (auto buffer = live_array_buffers_.begin();
|
TearDownArrayBuffersHelper(isolate_, live_array_buffers_,
|
||||||
buffer != live_array_buffers_.end(); ++buffer) {
|
not_yet_discovered_array_buffers_);
|
||||||
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
|
TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_,
|
||||||
|
not_yet_discovered_new_array_buffers_);
|
||||||
}
|
}
|
||||||
live_array_buffers_.clear();
|
|
||||||
not_yet_discovered_array_buffers_.clear();
|
|
||||||
|
void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
|
||||||
|
not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Heap::PromoteArrayBuffer(Object* obj) {
|
||||||
|
JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
|
||||||
|
if (buffer->is_external()) return;
|
||||||
|
void* data = buffer->backing_store();
|
||||||
|
if (!data) return;
|
||||||
|
DCHECK(live_new_array_buffers_.count(data) > 0);
|
||||||
|
live_array_buffers_[data] = live_new_array_buffers_[data];
|
||||||
|
live_new_array_buffers_.erase(data);
|
||||||
|
not_yet_discovered_new_array_buffers_.erase(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -2089,6 +2165,7 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|||||||
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
|
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
|
||||||
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
|
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
|
||||||
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
|
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
|
||||||
|
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
|
||||||
|
|
||||||
table_.Register(
|
table_.Register(
|
||||||
kVisitNativeContext,
|
kVisitNativeContext,
|
||||||
@ -2118,9 +2195,6 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|||||||
table_.Register(kVisitJSWeakCollection,
|
table_.Register(kVisitJSWeakCollection,
|
||||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||||
|
|
||||||
table_.Register(kVisitJSArrayBuffer,
|
|
||||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
|
||||||
|
|
||||||
table_.Register(kVisitJSTypedArray,
|
table_.Register(kVisitJSTypedArray,
|
||||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||||
|
|
||||||
@ -2348,6 +2422,18 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
|
||||||
|
HeapObject* object) {
|
||||||
|
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
|
||||||
|
|
||||||
|
Heap* heap = map->GetHeap();
|
||||||
|
MapWord map_word = object->map_word();
|
||||||
|
DCHECK(map_word.IsForwardingAddress());
|
||||||
|
HeapObject* target = map_word.ToForwardingAddress();
|
||||||
|
if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
|
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
|
||||||
HeapObject* object) {
|
HeapObject* object) {
|
||||||
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
|
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
|
||||||
|
@ -1567,10 +1567,28 @@ class Heap {
|
|||||||
|
|
||||||
bool deserialization_complete() const { return deserialization_complete_; }
|
bool deserialization_complete() const { return deserialization_complete_; }
|
||||||
|
|
||||||
void RegisterNewArrayBuffer(void* data, size_t length);
|
// The following methods are used to track raw C++ pointers to externally
|
||||||
void UnregisterArrayBuffer(void* data);
|
// allocated memory used as backing store in live array buffers.
|
||||||
void RegisterLiveArrayBuffer(void* data);
|
|
||||||
void FreeDeadArrayBuffers();
|
// A new ArrayBuffer was created with |data| as backing store.
|
||||||
|
void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length);
|
||||||
|
|
||||||
|
// The backing store |data| is no longer owned by V8.
|
||||||
|
void UnregisterArrayBuffer(bool in_new_space, void* data);
|
||||||
|
|
||||||
|
// A live ArrayBuffer was discovered during marking/scavenge.
|
||||||
|
void RegisterLiveArrayBuffer(bool in_new_space, void* data);
|
||||||
|
|
||||||
|
// Frees all backing store pointers that weren't discovered in the previous
|
||||||
|
// marking or scavenge phase.
|
||||||
|
void FreeDeadArrayBuffers(bool in_new_space);
|
||||||
|
|
||||||
|
// Prepare for a new scavenge phase. A new marking phase is implicitly
|
||||||
|
// prepared by finishing the previous one.
|
||||||
|
void PrepareArrayBufferDiscoveryInNewSpace();
|
||||||
|
|
||||||
|
// An ArrayBuffer moved from new space to old space.
|
||||||
|
void PromoteArrayBuffer(Object* buffer);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// Methods made available to tests.
|
// Methods made available to tests.
|
||||||
@ -2074,9 +2092,24 @@ class Heap {
|
|||||||
// the old space.
|
// the old space.
|
||||||
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
|
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
|
||||||
|
|
||||||
// Called on heap tear-down.
|
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
|
||||||
void TearDownArrayBuffers();
|
void TearDownArrayBuffers();
|
||||||
|
|
||||||
|
// These correspond to the non-Helper versions.
|
||||||
|
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
|
||||||
|
void* data, size_t length);
|
||||||
|
void UnregisterArrayBufferHelper(
|
||||||
|
std::map<void*, size_t>& live_buffers,
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
|
||||||
|
void RegisterLiveArrayBufferHelper(
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
|
||||||
|
size_t FreeDeadArrayBuffersHelper(
|
||||||
|
Isolate* isolate, std::map<void*, size_t>& live_buffers,
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers);
|
||||||
|
void TearDownArrayBuffersHelper(
|
||||||
|
Isolate* isolate, std::map<void*, size_t>& live_buffers,
|
||||||
|
std::map<void*, size_t>& not_yet_discovered_buffers);
|
||||||
|
|
||||||
// Record statistics before and after garbage collection.
|
// Record statistics before and after garbage collection.
|
||||||
void ReportStatisticsBeforeGC();
|
void ReportStatisticsBeforeGC();
|
||||||
void ReportStatisticsAfterGC();
|
void ReportStatisticsAfterGC();
|
||||||
@ -2319,7 +2352,9 @@ class Heap {
|
|||||||
bool concurrent_sweeping_enabled_;
|
bool concurrent_sweeping_enabled_;
|
||||||
|
|
||||||
std::map<void*, size_t> live_array_buffers_;
|
std::map<void*, size_t> live_array_buffers_;
|
||||||
|
std::map<void*, size_t> live_new_array_buffers_;
|
||||||
std::map<void*, size_t> not_yet_discovered_array_buffers_;
|
std::map<void*, size_t> not_yet_discovered_array_buffers_;
|
||||||
|
std::map<void*, size_t> not_yet_discovered_new_array_buffers_;
|
||||||
|
|
||||||
struct StrongRootsList;
|
struct StrongRootsList;
|
||||||
StrongRootsList* strong_roots_list_;
|
StrongRootsList* strong_roots_list_;
|
||||||
|
@ -3041,6 +3041,10 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
|
|||||||
AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
|
AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
|
||||||
if (allocation.To(&target)) {
|
if (allocation.To(&target)) {
|
||||||
MigrateObject(target, object, object_size, old_space->identity());
|
MigrateObject(target, object, object_size, old_space->identity());
|
||||||
|
// If we end up needing more special cases, we should factor this out.
|
||||||
|
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
|
||||||
|
heap()->PromoteArrayBuffer(target);
|
||||||
|
}
|
||||||
heap()->IncrementPromotedObjectsSize(object_size);
|
heap()->IncrementPromotedObjectsSize(object_size);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -4367,7 +4371,6 @@ void MarkCompactCollector::SweepSpaces() {
|
|||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
state_ = SWEEP_SPACES;
|
state_ = SWEEP_SPACES;
|
||||||
#endif
|
#endif
|
||||||
heap()->FreeDeadArrayBuffers();
|
|
||||||
|
|
||||||
MoveEvacuationCandidatesToEndOfPagesList();
|
MoveEvacuationCandidatesToEndOfPagesList();
|
||||||
|
|
||||||
@ -4395,6 +4398,8 @@ void MarkCompactCollector::SweepSpaces() {
|
|||||||
|
|
||||||
EvacuateNewSpaceAndCandidates();
|
EvacuateNewSpaceAndCandidates();
|
||||||
|
|
||||||
|
heap()->FreeDeadArrayBuffers(false);
|
||||||
|
|
||||||
// ClearNonLiveReferences depends on precise sweeping of map space to
|
// ClearNonLiveReferences depends on precise sweeping of map space to
|
||||||
// detect whether unmarked map became dead in this collection or in one
|
// detect whether unmarked map became dead in this collection or in one
|
||||||
// of the previous ones.
|
// of the previous ones.
|
||||||
|
@ -85,6 +85,10 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
|
|||||||
heap,
|
heap,
|
||||||
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
|
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
|
||||||
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
|
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
|
||||||
|
if (!JSArrayBuffer::cast(object)->is_external()) {
|
||||||
|
heap->RegisterLiveArrayBuffer(true,
|
||||||
|
JSArrayBuffer::cast(object)->backing_store());
|
||||||
|
}
|
||||||
return JSArrayBuffer::kSizeWithInternalFields;
|
return JSArrayBuffer::kSizeWithInternalFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -504,7 +508,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
|
|||||||
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
|
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
|
||||||
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
|
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
|
||||||
if (!JSArrayBuffer::cast(object)->is_external()) {
|
if (!JSArrayBuffer::cast(object)->is_external()) {
|
||||||
heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
|
heap->RegisterLiveArrayBuffer(heap->InNewSpace(object),
|
||||||
|
JSArrayBuffer::cast(object)->backing_store());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16481,7 +16481,8 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
|
|||||||
void* backing_store =
|
void* backing_store =
|
||||||
isolate->array_buffer_allocator()->AllocateUninitialized(
|
isolate->array_buffer_allocator()->AllocateUninitialized(
|
||||||
fixed_typed_array->DataSize());
|
fixed_typed_array->DataSize());
|
||||||
isolate->heap()->RegisterNewArrayBuffer(backing_store,
|
isolate->heap()->RegisterNewArrayBuffer(isolate->heap()->InNewSpace(*buffer),
|
||||||
|
backing_store,
|
||||||
fixed_typed_array->DataSize());
|
fixed_typed_array->DataSize());
|
||||||
buffer->set_backing_store(backing_store);
|
buffer->set_backing_store(backing_store);
|
||||||
buffer->set_is_external(false);
|
buffer->set_is_external(false);
|
||||||
|
@ -34,7 +34,8 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
|
|||||||
array_buffer->set_byte_length(*byte_length);
|
array_buffer->set_byte_length(*byte_length);
|
||||||
|
|
||||||
if (data && !is_external) {
|
if (data && !is_external) {
|
||||||
isolate->heap()->RegisterNewArrayBuffer(data, allocated_length);
|
isolate->heap()->RegisterNewArrayBuffer(
|
||||||
|
isolate->heap()->InNewSpace(*array_buffer), data, allocated_length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,7 +151,8 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
|
|||||||
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
|
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
|
||||||
array_buffer->set_is_external(true);
|
array_buffer->set_is_external(true);
|
||||||
Runtime::NeuterArrayBuffer(array_buffer);
|
Runtime::NeuterArrayBuffer(array_buffer);
|
||||||
isolate->heap()->UnregisterArrayBuffer(backing_store);
|
isolate->heap()->UnregisterArrayBuffer(
|
||||||
|
isolate->heap()->InNewSpace(*array_buffer), backing_store);
|
||||||
isolate->array_buffer_allocator()->Free(backing_store, byte_length);
|
isolate->array_buffer_allocator()->Free(backing_store, byte_length);
|
||||||
return isolate->heap()->undefined_value();
|
return isolate->heap()->undefined_value();
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user