Revert of Remove the weak list of array buffers (patchset #8 id:140001 of https://codereview.chromium.org/1114563002/)

Reason for revert:
[Sheriff] Crashes in layout tests:
https://chromegw.corp.google.com/i/client.v8/builders/V8-Blink%20Linux%2064%20%28dbg%29/builds/2668

Original issue's description:
> Remove the weak list of array buffers
>
> Instead, collect live array buffers during marking and free pointers we
> no longer found.
>
> BUG=v8:3996
> R=hpayer@chromium.org
> LOG=n
>
> Committed: https://crrev.com/2d39709cf5ee17637f6f2d75380a9e61ae0b342b
> Cr-Commit-Position: refs/heads/master@{#28132}

TBR=dslomov@chromium.org,hpayer@chromium.org,jochen@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:3996

Review URL: https://codereview.chromium.org/1115043005

Cr-Commit-Position: refs/heads/master@{#28148}
This commit is contained in:
machenbach 2015-04-30 01:38:18 -07:00 committed by Commit bot
parent cf420ec337
commit 3e25666c79
15 changed files with 274 additions and 63 deletions

View File

@ -6254,12 +6254,9 @@ bool v8::ArrayBuffer::IsNeuterable() const {
v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
i::Isolate* isolate = self->GetIsolate();
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
"ArrayBuffer already externalized");
self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
return GetContents();
}

View File

@ -1566,6 +1566,8 @@ class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
void V8HeapExplorer::ExtractJSArrayBufferReferences(
int entry, JSArrayBuffer* buffer) {
SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
JSArrayBuffer::kWeakNextOffset);
// Setup a reference to a native memory backing_store object.
if (!buffer->backing_store())
return;

View File

@ -154,6 +154,8 @@ Heap::Heap()
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
set_native_contexts_list(NULL);
set_array_buffers_list(Smi::FromInt(0));
set_last_array_buffer_in_list(Smi::FromInt(0));
set_allocation_sites_list(Smi::FromInt(0));
set_encountered_weak_collections(Smi::FromInt(0));
set_encountered_weak_cells(Smi::FromInt(0));
@ -1705,67 +1707,64 @@ void Heap::UpdateReferencesInExternalStringTable(
void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
ProcessArrayBuffers(retainer, false);
ProcessNativeContexts(retainer);
ProcessAllocationSites(retainer);
}
void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
ProcessArrayBuffers(retainer, true);
ProcessNativeContexts(retainer);
}
void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer,
false, NULL);
// Update the head of the list of contexts.
set_native_contexts_list(head);
}
void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
live_array_buffers_[data] = length;
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(length);
}
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
bool stop_after_young) {
Object* last_array_buffer = undefined_value();
Object* array_buffer_obj =
VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer,
stop_after_young, &last_array_buffer);
set_array_buffers_list(array_buffer_obj);
set_last_array_buffer_in_list(last_array_buffer);
void Heap::UnregisterArrayBuffer(void* data) {
DCHECK(live_array_buffers_.count(data) > 0);
live_array_buffers_.erase(data);
not_yet_discovered_array_buffers_.erase(data);
}
void Heap::RegisterLiveArrayBuffer(void* data) {
not_yet_discovered_array_buffers_.erase(data);
}
void Heap::FreeDeadArrayBuffers() {
for (auto buffer = not_yet_discovered_array_buffers_.begin();
buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
// Don't use the API method here since this could trigger another GC.
amount_of_external_allocated_memory_ -= buffer->second;
live_array_buffers_.erase(buffer->first);
// Verify invariant that young array buffers come before old array buffers
// in array buffers list if there was no promotion failure.
Object* undefined = undefined_value();
Object* next = array_buffers_list();
bool old_objects_recorded = false;
while (next != undefined) {
if (!old_objects_recorded) {
old_objects_recorded = !InNewSpace(next);
}
CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
next = JSArrayBuffer::cast(next)->weak_next();
}
not_yet_discovered_array_buffers_ = live_array_buffers_;
}
void Heap::TearDownArrayBuffers() {
for (auto buffer = live_array_buffers_.begin();
buffer != live_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
Object* undefined = undefined_value();
for (Object* o = array_buffers_list(); o != undefined;) {
JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
Runtime::FreeArrayBuffer(isolate(), buffer);
o = buffer->weak_next();
}
live_array_buffers_.clear();
not_yet_discovered_array_buffers_.clear();
set_array_buffers_list(undefined);
}
void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
Object* allocation_site_obj =
VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
Object* allocation_site_obj = VisitWeakList<AllocationSite>(
this, allocation_sites_list(), retainer, false, NULL);
set_allocation_sites_list(allocation_site_obj);
}
@ -5411,6 +5410,8 @@ bool Heap::CreateHeapObjects() {
CHECK_EQ(0u, gc_count_);
set_native_contexts_list(undefined_value());
set_array_buffers_list(undefined_value());
set_last_array_buffer_in_list(undefined_value());
set_allocation_sites_list(undefined_value());
return true;
}

View File

@ -6,7 +6,6 @@
#define V8_HEAP_HEAP_H_
#include <cmath>
#include <map>
#include "src/allocation.h"
#include "src/assert-scope.h"
@ -875,6 +874,16 @@ class Heap {
}
Object* native_contexts_list() const { return native_contexts_list_; }
void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
Object* array_buffers_list() const { return array_buffers_list_; }
void set_last_array_buffer_in_list(Object* object) {
last_array_buffer_in_list_ = object;
}
Object* last_array_buffer_in_list() const {
return last_array_buffer_in_list_;
}
void set_allocation_sites_list(Object* object) {
allocation_sites_list_ = object;
}
@ -1481,11 +1490,6 @@ class Heap {
bool deserialization_complete() const { return deserialization_complete_; }
void RegisterNewArrayBuffer(void* data, size_t length);
void UnregisterArrayBuffer(void* data);
void RegisterLiveArrayBuffer(void* data);
void FreeDeadArrayBuffers();
protected:
// Methods made available to tests.
@ -1656,6 +1660,8 @@ class Heap {
// Weak list heads, threaded through the objects.
// List heads are initialized lazily and contain the undefined_value at start.
Object* native_contexts_list_;
Object* array_buffers_list_;
Object* last_array_buffer_in_list_;
Object* allocation_sites_list_;
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
@ -1989,6 +1995,7 @@ class Heap {
void MarkCompactEpilogue();
void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
void ProcessAllocationSites(WeakObjectRetainer* retainer);
// Deopts all code that contains allocation instruction which are tenured or
@ -2149,9 +2156,6 @@ class Heap {
bool concurrent_sweeping_enabled_;
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
friend class AlwaysAllocateScope;
friend class Deserializer;
friend class Factory;

View File

@ -4386,8 +4386,6 @@ void MarkCompactCollector::SweepSpaces() {
#ifdef DEBUG
state_ = SWEEP_SPACES;
#endif
heap()->FreeDeadArrayBuffers();
MoveEvacuationCandidatesToEndOfPagesList();
// Noncompacting collections simply sweep the spaces to clear the mark

View File

@ -80,9 +80,12 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
VisitPointers(heap, HeapObject::RawField(
object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
VisitPointers(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
heap, HeapObject::RawField(object,
JSArrayBuffer::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
return JSArrayBuffer::kSizeWithInternalFields;
}
@ -530,10 +533,11 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
StaticVisitor::VisitPointers(
heap, HeapObject::RawField(object,
JSArrayBuffer::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
}
}

View File

@ -191,7 +191,8 @@ struct WeakListVisitor;
template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
bool stop_after_young, Object** list_tail) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
@ -234,7 +235,10 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
}
// Terminate the list if there is one or more elements.
if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
if (tail != NULL) {
WeakListVisitor<T>::SetWeakNext(tail, undefined);
if (list_tail) *list_tail = tail;
}
return head;
}
@ -316,7 +320,8 @@ struct WeakListVisitor<Context> {
static void DoWeakList(Heap* heap, Context* context,
WeakObjectRetainer* retainer, int index) {
// Visit the weak list, removing dead intermediate elements.
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
Object* list_head =
VisitWeakList<T>(heap, context->get(index), retainer, false, NULL);
// Update the list head.
context->set(index, list_head, UPDATE_WRITE_BARRIER);
@ -339,6 +344,26 @@ struct WeakListVisitor<Context> {
};
template <>
struct WeakListVisitor<JSArrayBuffer> {
static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
WeakObjectRetainer* retainer) {
}
static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
Runtime::FreeArrayBuffer(heap->isolate(), phantom);
}
};
template <>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) {
@ -356,9 +381,19 @@ struct WeakListVisitor<AllocationSite> {
template Object* VisitWeakList<Context>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
WeakObjectRetainer* retainer,
bool stop_after_young,
Object** list_tail);
template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
WeakObjectRetainer* retainer,
bool stop_after_young,
Object** list_tail);
template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
WeakObjectRetainer* retainer,
bool stop_after_young,
Object** list_tail);
}
} // namespace v8::internal

View File

@ -490,7 +490,10 @@ class WeakObjectRetainer;
// pointers. The template parameter T is a WeakListVisitor that defines how to
// access the next-element pointers.
template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
bool stop_after_young, Object** list_tail);
Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
}
} // namespace v8::internal

View File

@ -6470,6 +6470,9 @@ void JSArrayBuffer::set_was_neutered(bool value) {
}
ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
Object* JSArrayBufferView::byte_offset() const {
if (WasNeutered()) return Smi::FromInt(0);
return Object::cast(READ_FIELD(this, kByteOffsetOffset));

View File

@ -10258,6 +10258,9 @@ class JSArrayBuffer: public JSObject {
inline bool was_neutered();
inline void set_was_neutered(bool value);
// [weak_next]: linked list of array buffers.
DECL_ACCESSORS(weak_next, Object)
DECLARE_CAST(JSArrayBuffer)
void Neuter();
@ -10274,7 +10277,8 @@ class JSArrayBuffer: public JSObject {
#else
static const int kBitFieldOffset = kBitFieldSlot + kIntSize;
#endif
static const int kSize = kBitFieldSlot + kPointerSize;
static const int kWeakNextOffset = kBitFieldSlot + kPointerSize;
static const int kSize = kWeakNextOffset + kPointerSize;
static const int kSizeWithInternalFields =
kSize + v8::ArrayBuffer::kInternalFieldCount * kPointerSize;

View File

@ -12,6 +12,22 @@
namespace v8 {
namespace internal {
void Runtime::FreeArrayBuffer(Isolate* isolate,
JSArrayBuffer* phantom_array_buffer) {
if (phantom_array_buffer->is_external()) return;
size_t allocated_length =
NumberToSize(isolate, phantom_array_buffer->byte_length());
reinterpret_cast<v8::Isolate*>(isolate)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(allocated_length));
CHECK(isolate->array_buffer_allocator() != NULL);
isolate->array_buffer_allocator()->Free(phantom_array_buffer->backing_store(),
allocated_length);
}
void Runtime::SetupArrayBuffer(Isolate* isolate,
Handle<JSArrayBuffer> array_buffer,
bool is_external, void* data,
@ -31,8 +47,17 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
array_buffer->set_byte_length(*byte_length);
if (data && !is_external) {
isolate->heap()->RegisterNewArrayBuffer(data, allocated_length);
if (isolate->heap()->InNewSpace(*array_buffer) ||
isolate->heap()->array_buffers_list()->IsUndefined()) {
array_buffer->set_weak_next(isolate->heap()->array_buffers_list());
isolate->heap()->set_array_buffers_list(*array_buffer);
if (isolate->heap()->last_array_buffer_in_list()->IsUndefined()) {
isolate->heap()->set_last_array_buffer_in_list(*array_buffer);
}
} else {
JSArrayBuffer::cast(isolate->heap()->last_array_buffer_in_list())
->set_weak_next(*array_buffer);
isolate->heap()->set_last_array_buffer_in_list(*array_buffer);
}
}
@ -58,6 +83,10 @@ bool Runtime::SetupArrayBufferAllocatingData(Isolate* isolate,
}
SetupArrayBuffer(isolate, array_buffer, false, data, allocated_length);
reinterpret_cast<v8::Isolate*>(isolate)
->AdjustAmountOfExternalAllocatedMemory(allocated_length);
return true;
}
@ -144,7 +173,6 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer);
isolate->heap()->UnregisterArrayBuffer(backing_store);
isolate->array_buffer_allocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value();
}

View File

@ -817,6 +817,9 @@ class Runtime : public AllStatic {
static void NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer);
static void FreeArrayBuffer(Isolate* isolate,
JSArrayBuffer* phantom_array_buffer);
static int FindIndexedNonNativeFrame(JavaScriptFrameIterator* it, int index);
enum TypedArrayId {

View File

@ -563,6 +563,10 @@ void Deserializer::Deserialize(Isolate* isolate) {
isolate_->heap()->set_native_contexts_list(
isolate_->heap()->undefined_value());
isolate_->heap()->set_array_buffers_list(
isolate_->heap()->undefined_value());
isolate_->heap()->set_last_array_buffer_in_list(
isolate_->heap()->undefined_value());
// The allocation site list is build during root iteration, but if no sites
// were encountered then it needs to be initialized to undefined.

View File

@ -164,6 +164,7 @@
'test-version.cc',
'test-weakmaps.cc',
'test-weaksets.cc',
'test-weaktypedarrays.cc',
'trace-extension.cc',
'../../src/startup-data-util.h',
'../../src/startup-data-util.cc'

View File

@ -0,0 +1,124 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <stdlib.h>
#include "src/v8.h"
#include "test/cctest/cctest.h"
#include "src/api.h"
#include "src/heap/heap.h"
#include "src/objects.h"
using namespace v8::internal;
static Isolate* GetIsolateFrom(LocalContext* context) {
return reinterpret_cast<Isolate*>((*context)->GetIsolate());
}
static int CountArrayBuffersInWeakList(Heap* heap) {
int count = 0;
for (Object* o = heap->array_buffers_list();
!o->IsUndefined();
o = JSArrayBuffer::cast(o)->weak_next()) {
count++;
}
return count;
}
static bool HasArrayBufferInWeakList(Heap* heap, JSArrayBuffer* ab) {
for (Object* o = heap->array_buffers_list();
!o->IsUndefined();
o = JSArrayBuffer::cast(o)->weak_next()) {
if (ab == o) return true;
}
return false;
}
TEST(WeakArrayBuffersFromScript) {
v8::V8::Initialize();
LocalContext context;
Isolate* isolate = GetIsolateFrom(&context);
int start = CountArrayBuffersInWeakList(isolate->heap());
for (int i = 1; i <= 3; i++) {
// Create 3 array buffers, make i-th of them garbage,
// validate correct state of array buffer weak list.
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
{
v8::HandleScope scope(context->GetIsolate());
{
v8::HandleScope s1(context->GetIsolate());
CompileRun("var ab1 = new ArrayBuffer(256);"
"var ab2 = new ArrayBuffer(256);"
"var ab3 = new ArrayBuffer(256);");
v8::Handle<v8::ArrayBuffer> ab1 =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab1"));
v8::Handle<v8::ArrayBuffer> ab2 =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab2"));
v8::Handle<v8::ArrayBuffer> ab3 =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab3"));
CHECK_EQ(3, CountArrayBuffersInWeakList(isolate->heap()) - start);
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab1)));
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab2)));
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab3)));
}
i::ScopedVector<char> source(1024);
i::SNPrintF(source, "ab%d = null;", i);
CompileRun(source.start());
isolate->heap()->CollectAllGarbage();
CHECK_EQ(2, CountArrayBuffersInWeakList(isolate->heap()) - start);
{
v8::HandleScope s2(context->GetIsolate());
for (int j = 1; j <= 3; j++) {
if (j == i) continue;
i::SNPrintF(source, "ab%d", j);
v8::Handle<v8::ArrayBuffer> ab =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun(source.start()));
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab)));
}
}
CompileRun("ab1 = null; ab2 = null; ab3 = null;");
}
isolate->heap()->CollectAllGarbage();
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
}
}