Just visit young array buffers during scavenge. Additionally keep the views in new space in a separate global list and move them to the corresponding array buffers when they get promoted.
BUG= Review URL: https://codereview.chromium.org/990423004 Cr-Commit-Position: refs/heads/master@{#27128}
This commit is contained in:
parent
f71e262683
commit
aab76eff43
@ -1790,8 +1790,14 @@ void SetupArrayBufferView(i::Isolate* isolate,
|
||||
|
||||
obj->set_buffer(*buffer);
|
||||
|
||||
obj->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*obj);
|
||||
Heap* heap = isolate->heap();
|
||||
if (heap->InNewSpace(*obj)) {
|
||||
obj->set_weak_next(heap->new_array_buffer_views_list());
|
||||
heap->set_new_array_buffer_views_list(*obj);
|
||||
} else {
|
||||
obj->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*obj);
|
||||
}
|
||||
|
||||
i::Handle<i::Object> byte_offset_object =
|
||||
isolate->factory()->NewNumberFromSize(byte_offset);
|
||||
|
@ -143,7 +143,9 @@ Heap::Heap()
|
||||
chunks_queued_for_free_(NULL),
|
||||
gc_callbacks_depth_(0),
|
||||
deserialization_complete_(false),
|
||||
concurrent_sweeping_enabled_(false) {
|
||||
concurrent_sweeping_enabled_(false),
|
||||
migration_failure_(false),
|
||||
previous_migration_failure_(false) {
|
||||
// Allow build-time customization of the max semispace size. Building
|
||||
// V8 with snapshots and a non-default max semispace size is much
|
||||
// easier if you can define it as part of the build environment.
|
||||
@ -737,6 +739,13 @@ void Heap::GarbageCollectionEpilogue() {
|
||||
// Remember the last top pointer so that we can later find out
|
||||
// whether we allocated in new space since the last GC.
|
||||
new_space_top_after_last_gc_ = new_space()->top();
|
||||
|
||||
if (migration_failure_) {
|
||||
set_previous_migration_failure(true);
|
||||
} else {
|
||||
set_previous_migration_failure(false);
|
||||
}
|
||||
set_migration_failure(false);
|
||||
}
|
||||
|
||||
|
||||
@ -1738,29 +1747,63 @@ void Heap::UpdateReferencesInExternalStringTable(
|
||||
|
||||
|
||||
void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
|
||||
ProcessArrayBuffers(retainer);
|
||||
ProcessArrayBuffers(retainer, false);
|
||||
ProcessNewArrayBufferViews(retainer);
|
||||
ProcessNativeContexts(retainer);
|
||||
ProcessAllocationSites(retainer);
|
||||
}
|
||||
|
||||
|
||||
void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
|
||||
ProcessArrayBuffers(retainer);
|
||||
ProcessArrayBuffers(retainer, true);
|
||||
ProcessNewArrayBufferViews(retainer);
|
||||
ProcessNativeContexts(retainer);
|
||||
}
|
||||
|
||||
|
||||
void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
|
||||
Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
|
||||
Object* head =
|
||||
VisitWeakList<Context>(this, native_contexts_list(), retainer, false);
|
||||
// Update the head of the list of contexts.
|
||||
set_native_contexts_list(head);
|
||||
}
|
||||
|
||||
|
||||
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) {
|
||||
Object* array_buffer_obj =
|
||||
VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer);
|
||||
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
|
||||
bool stop_after_young) {
|
||||
Object* array_buffer_obj = VisitWeakList<JSArrayBuffer>(
|
||||
this, array_buffers_list(), retainer, stop_after_young);
|
||||
set_array_buffers_list(array_buffer_obj);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Verify invariant that young array buffers come before old array buffers
|
||||
// in array buffers list if there was no promotion failure.
|
||||
Object* undefined = undefined_value();
|
||||
Object* next = array_buffers_list();
|
||||
bool old_objects_recorded = false;
|
||||
if (migration_failure()) return;
|
||||
while (next != undefined) {
|
||||
if (!old_objects_recorded) {
|
||||
old_objects_recorded = !InNewSpace(next);
|
||||
}
|
||||
DCHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
|
||||
next = JSArrayBuffer::cast(next)->weak_next();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) {
|
||||
// Retain the list of new space views.
|
||||
Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
|
||||
this, new_array_buffer_views_list_, retainer, false);
|
||||
set_new_array_buffer_views_list(typed_array_obj);
|
||||
|
||||
// Some objects in the list may be in old space now. Find them
|
||||
// and move them to the corresponding array buffer.
|
||||
Object* view = VisitNewArrayBufferViewsWeakList(
|
||||
this, new_array_buffer_views_list_, retainer);
|
||||
set_new_array_buffer_views_list(view);
|
||||
}
|
||||
|
||||
|
||||
@ -1776,8 +1819,8 @@ void Heap::TearDownArrayBuffers() {
|
||||
|
||||
|
||||
void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
|
||||
Object* allocation_site_obj =
|
||||
VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
|
||||
Object* allocation_site_obj = VisitWeakList<AllocationSite>(
|
||||
this, allocation_sites_list(), retainer, false);
|
||||
set_allocation_sites_list(allocation_site_obj);
|
||||
}
|
||||
|
||||
@ -2189,6 +2232,7 @@ class ScavengingVisitor : public StaticVisitorBase {
|
||||
if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
|
||||
return;
|
||||
}
|
||||
heap->set_migration_failure(true);
|
||||
}
|
||||
|
||||
if (PromoteObject<object_contents, alignment>(map, slot, object,
|
||||
@ -5542,6 +5586,7 @@ bool Heap::CreateHeapObjects() {
|
||||
|
||||
set_native_contexts_list(undefined_value());
|
||||
set_array_buffers_list(undefined_value());
|
||||
set_new_array_buffer_views_list(undefined_value());
|
||||
set_allocation_sites_list(undefined_value());
|
||||
return true;
|
||||
}
|
||||
|
@ -870,6 +870,13 @@ class Heap {
|
||||
void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
|
||||
Object* array_buffers_list() const { return array_buffers_list_; }
|
||||
|
||||
void set_new_array_buffer_views_list(Object* object) {
|
||||
new_array_buffer_views_list_ = object;
|
||||
}
|
||||
Object* new_array_buffer_views_list() const {
|
||||
return new_array_buffer_views_list_;
|
||||
}
|
||||
|
||||
void set_allocation_sites_list(Object* object) {
|
||||
allocation_sites_list_ = object;
|
||||
}
|
||||
@ -1469,6 +1476,18 @@ class Heap {
|
||||
|
||||
bool deserialization_complete() const { return deserialization_complete_; }
|
||||
|
||||
bool migration_failure() const { return migration_failure_; }
|
||||
void set_migration_failure(bool migration_failure) {
|
||||
migration_failure_ = migration_failure;
|
||||
}
|
||||
|
||||
bool previous_migration_failure() const {
|
||||
return previous_migration_failure_;
|
||||
}
|
||||
void set_previous_migration_failure(bool previous_migration_failure) {
|
||||
previous_migration_failure_ = previous_migration_failure;
|
||||
}
|
||||
|
||||
protected:
|
||||
// Methods made available to tests.
|
||||
|
||||
@ -1636,11 +1655,16 @@ class Heap {
|
||||
bool inline_allocation_disabled_;
|
||||
|
||||
// Weak list heads, threaded through the objects.
|
||||
// List heads are initilized lazily and contain the undefined_value at start.
|
||||
// List heads are initialized lazily and contain the undefined_value at start.
|
||||
Object* native_contexts_list_;
|
||||
Object* array_buffers_list_;
|
||||
Object* allocation_sites_list_;
|
||||
|
||||
// This is a global list of array buffer views in new space. When the views
|
||||
// get promoted, they are removed form the list and added to the corresponding
|
||||
// array buffer.
|
||||
Object* new_array_buffer_views_list_;
|
||||
|
||||
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
|
||||
// marking. It is initialized during marking, destroyed after marking and
|
||||
// contains Smi(0) while marking is not active.
|
||||
@ -1973,7 +1997,8 @@ class Heap {
|
||||
void MarkCompactEpilogue();
|
||||
|
||||
void ProcessNativeContexts(WeakObjectRetainer* retainer);
|
||||
void ProcessArrayBuffers(WeakObjectRetainer* retainer);
|
||||
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
|
||||
void ProcessNewArrayBufferViews(WeakObjectRetainer* retainer);
|
||||
void ProcessAllocationSites(WeakObjectRetainer* retainer);
|
||||
|
||||
// Deopts all code that contains allocation instruction which are tenured or
|
||||
@ -2135,6 +2160,13 @@ class Heap {
|
||||
|
||||
bool concurrent_sweeping_enabled_;
|
||||
|
||||
// A migration failure indicates that a semi-space copy of an object during
|
||||
// a scavenge failed and the object got promoted instead.
|
||||
bool migration_failure_;
|
||||
|
||||
// A migration failure happened in the previous scavenge.
|
||||
bool previous_migration_failure_;
|
||||
|
||||
friend class AlwaysAllocateScope;
|
||||
friend class Deserializer;
|
||||
friend class Factory;
|
||||
|
@ -191,15 +191,19 @@ struct WeakListVisitor;
|
||||
|
||||
|
||||
template <class T>
|
||||
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
|
||||
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
|
||||
bool stop_after_young) {
|
||||
Object* undefined = heap->undefined_value();
|
||||
Object* head = undefined;
|
||||
T* tail = NULL;
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
bool record_slots = MustRecordSlots(heap);
|
||||
|
||||
while (list != undefined) {
|
||||
// Check whether to keep the candidate in the list.
|
||||
T* candidate = reinterpret_cast<T*>(list);
|
||||
T* original_candidate = candidate;
|
||||
|
||||
Object* retained = retainer->RetainAs(list);
|
||||
if (retained != NULL) {
|
||||
if (head == undefined) {
|
||||
@ -220,9 +224,21 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
|
||||
candidate = reinterpret_cast<T*>(retained);
|
||||
tail = candidate;
|
||||
|
||||
|
||||
// tail is a live object, visit it.
|
||||
WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
|
||||
|
||||
// The list of weak objects is usually order. It starts with objects
|
||||
// recently allocated in the young generation followed by objects
|
||||
// allocated in the old generation. When a migration failure happened,
|
||||
// the list is not ordered until the next GC that has no migration
|
||||
// failure.
|
||||
// For young generation collections we just have to visit until the last
|
||||
// young generation objects.
|
||||
if (stop_after_young && !heap->migration_failure() &&
|
||||
!heap->previous_migration_failure() &&
|
||||
!heap->InNewSpace(original_candidate)) {
|
||||
return head;
|
||||
}
|
||||
} else {
|
||||
WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
|
||||
}
|
||||
@ -239,6 +255,56 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
|
||||
}
|
||||
|
||||
|
||||
Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer) {
|
||||
Object* undefined = heap->undefined_value();
|
||||
Object* previous = undefined;
|
||||
Object* head = undefined;
|
||||
Object* next;
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
bool record_slots = MustRecordSlots(heap);
|
||||
|
||||
for (Object* o = list; o != undefined;) {
|
||||
JSArrayBufferView* view = JSArrayBufferView::cast(o);
|
||||
next = view->weak_next();
|
||||
if (!heap->InNewSpace(view)) {
|
||||
if (previous != undefined) {
|
||||
// We are in the middle of the list, skip the old space element.
|
||||
JSArrayBufferView* previous_view = JSArrayBufferView::cast(previous);
|
||||
previous_view->set_weak_next(next);
|
||||
if (record_slots) {
|
||||
Object** next_slot = HeapObject::RawField(
|
||||
previous_view, JSArrayBufferView::kWeakNextOffset);
|
||||
collector->RecordSlot(next_slot, next_slot, next);
|
||||
}
|
||||
}
|
||||
JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer());
|
||||
view->set_weak_next(buffer->weak_first_view());
|
||||
if (record_slots) {
|
||||
Object** next_slot =
|
||||
HeapObject::RawField(view, JSArrayBufferView::kWeakNextOffset);
|
||||
collector->RecordSlot(next_slot, next_slot, buffer->weak_first_view());
|
||||
}
|
||||
buffer->set_weak_first_view(view);
|
||||
if (record_slots) {
|
||||
Object** slot =
|
||||
HeapObject::RawField(buffer, JSArrayBuffer::kWeakFirstViewOffset);
|
||||
heap->mark_compact_collector()->RecordSlot(slot, slot, view);
|
||||
}
|
||||
} else {
|
||||
// We found a valid new space view, remember it.
|
||||
previous = view;
|
||||
if (head == undefined) {
|
||||
// We are at the list head.
|
||||
head = view;
|
||||
}
|
||||
}
|
||||
o = next;
|
||||
}
|
||||
return head;
|
||||
}
|
||||
|
||||
|
||||
template <class T>
|
||||
static void ClearWeakList(Heap* heap, Object* list) {
|
||||
Object* undefined = heap->undefined_value();
|
||||
@ -316,7 +382,8 @@ struct WeakListVisitor<Context> {
|
||||
static void DoWeakList(Heap* heap, Context* context,
|
||||
WeakObjectRetainer* retainer, int index) {
|
||||
// Visit the weak list, removing dead intermediate elements.
|
||||
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
|
||||
Object* list_head =
|
||||
VisitWeakList<T>(heap, context->get(index), retainer, false);
|
||||
|
||||
// Update the list head.
|
||||
context->set(index, list_head, UPDATE_WRITE_BARRIER);
|
||||
@ -368,7 +435,7 @@ struct WeakListVisitor<JSArrayBuffer> {
|
||||
static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
|
||||
WeakObjectRetainer* retainer) {
|
||||
Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
|
||||
heap, array_buffer->weak_first_view(), retainer);
|
||||
heap, array_buffer->weak_first_view(), retainer, false);
|
||||
array_buffer->set_weak_first_view(typed_array_obj);
|
||||
if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
|
||||
Object** slot = HeapObject::RawField(array_buffer,
|
||||
@ -399,23 +466,21 @@ struct WeakListVisitor<AllocationSite> {
|
||||
};
|
||||
|
||||
|
||||
template Object* VisitWeakList<Code>(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer);
|
||||
|
||||
|
||||
template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer);
|
||||
|
||||
|
||||
template Object* VisitWeakList<Context>(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer);
|
||||
WeakObjectRetainer* retainer,
|
||||
bool stop_after_young);
|
||||
|
||||
|
||||
template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer);
|
||||
WeakObjectRetainer* retainer,
|
||||
bool stop_after_young);
|
||||
|
||||
template Object* VisitWeakList<JSArrayBufferView>(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer,
|
||||
bool stop_after_young);
|
||||
|
||||
template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer);
|
||||
WeakObjectRetainer* retainer,
|
||||
bool stop_after_young);
|
||||
}
|
||||
} // namespace v8::internal
|
||||
|
@ -489,7 +489,10 @@ class WeakObjectRetainer;
|
||||
// pointers. The template parameter T is a WeakListVisitor that defines how to
|
||||
// access the next-element pointers.
|
||||
template <class T>
|
||||
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
|
||||
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
|
||||
bool stop_after_young);
|
||||
Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
|
||||
WeakObjectRetainer* retainer);
|
||||
}
|
||||
} // namespace v8::internal
|
||||
|
||||
|
@ -17008,8 +17008,15 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
|
||||
fixed_typed_array->length(), typed_array->type(),
|
||||
static_cast<uint8_t*>(buffer->backing_store()));
|
||||
|
||||
buffer->set_weak_first_view(*typed_array);
|
||||
DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
|
||||
Heap* heap = isolate->heap();
|
||||
if (heap->InNewSpace(*typed_array)) {
|
||||
DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
|
||||
typed_array->set_weak_next(heap->new_array_buffer_views_list());
|
||||
heap->set_new_array_buffer_views_list(*typed_array);
|
||||
} else {
|
||||
buffer->set_weak_first_view(*typed_array);
|
||||
DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
|
||||
}
|
||||
typed_array->set_buffer(*buffer);
|
||||
JSObject::SetMapAndElements(typed_array, new_map, new_elements);
|
||||
|
||||
|
@ -88,6 +88,8 @@ bool Runtime::SetupArrayBufferAllocatingData(Isolate* isolate,
|
||||
|
||||
void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
|
||||
Isolate* isolate = array_buffer->GetIsolate();
|
||||
// Firstly, iterate over the views which are referenced directly by the array
|
||||
// buffer.
|
||||
for (Handle<Object> view_obj(array_buffer->weak_first_view(), isolate);
|
||||
!view_obj->IsUndefined();) {
|
||||
Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
|
||||
@ -100,6 +102,24 @@ void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
|
||||
}
|
||||
view_obj = handle(view->weak_next(), isolate);
|
||||
}
|
||||
|
||||
// Secondly, iterate over the global list of new space views to find views
|
||||
// that belong to the neutered array buffer.
|
||||
Heap* heap = isolate->heap();
|
||||
for (Handle<Object> view_obj(heap->new_array_buffer_views_list(), isolate);
|
||||
!view_obj->IsUndefined();) {
|
||||
Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
|
||||
if (view->buffer() == *array_buffer) {
|
||||
if (view->IsJSTypedArray()) {
|
||||
JSTypedArray::cast(*view)->Neuter();
|
||||
} else if (view->IsJSDataView()) {
|
||||
JSDataView::cast(*view)->Neuter();
|
||||
} else {
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
view_obj = handle(view->weak_next(), isolate);
|
||||
}
|
||||
array_buffer->Neuter();
|
||||
}
|
||||
|
||||
@ -265,11 +285,18 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitialize) {
|
||||
holder->set_byte_offset(*byte_offset_object);
|
||||
holder->set_byte_length(*byte_length_object);
|
||||
|
||||
Heap* heap = isolate->heap();
|
||||
if (!maybe_buffer->IsNull()) {
|
||||
Handle<JSArrayBuffer> buffer = Handle<JSArrayBuffer>::cast(maybe_buffer);
|
||||
holder->set_buffer(*buffer);
|
||||
holder->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*holder);
|
||||
|
||||
if (heap->InNewSpace(*holder)) {
|
||||
holder->set_weak_next(heap->new_array_buffer_views_list());
|
||||
heap->set_new_array_buffer_views_list(*holder);
|
||||
} else {
|
||||
holder->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*holder);
|
||||
}
|
||||
|
||||
Handle<ExternalArray> elements = isolate->factory()->NewExternalArray(
|
||||
static_cast<int>(length), array_type,
|
||||
@ -367,8 +394,15 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitializeFromArrayLike) {
|
||||
isolate->factory()->NewNumberFromSize(byte_length));
|
||||
holder->set_byte_length(*byte_length_obj);
|
||||
holder->set_length(*length_obj);
|
||||
holder->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*holder);
|
||||
|
||||
Heap* heap = isolate->heap();
|
||||
if (heap->InNewSpace(*holder)) {
|
||||
holder->set_weak_next(heap->new_array_buffer_views_list());
|
||||
heap->set_new_array_buffer_views_list(*holder);
|
||||
} else {
|
||||
holder->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*holder);
|
||||
}
|
||||
|
||||
Handle<ExternalArray> elements = isolate->factory()->NewExternalArray(
|
||||
static_cast<int>(length), array_type,
|
||||
@ -542,8 +576,14 @@ RUNTIME_FUNCTION(Runtime_DataViewInitialize) {
|
||||
holder->set_byte_offset(*byte_offset);
|
||||
holder->set_byte_length(*byte_length);
|
||||
|
||||
holder->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*holder);
|
||||
Heap* heap = isolate->heap();
|
||||
if (heap->InNewSpace(*holder)) {
|
||||
holder->set_weak_next(heap->new_array_buffer_views_list());
|
||||
heap->set_new_array_buffer_views_list(*holder);
|
||||
} else {
|
||||
holder->set_weak_next(buffer->weak_first_view());
|
||||
buffer->set_weak_first_view(*holder);
|
||||
}
|
||||
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
@ -572,6 +572,8 @@ void Deserializer::Deserialize(Isolate* isolate) {
|
||||
isolate_->heap()->undefined_value());
|
||||
isolate_->heap()->set_array_buffers_list(
|
||||
isolate_->heap()->undefined_value());
|
||||
isolate->heap()->set_new_array_buffer_views_list(
|
||||
isolate_->heap()->undefined_value());
|
||||
|
||||
// The allocation site list is build during root iteration, but if no sites
|
||||
// were encountered then it needs to be initialized to undefined.
|
||||
|
@ -62,7 +62,20 @@ static bool HasArrayBufferInWeakList(Heap* heap, JSArrayBuffer* ab) {
|
||||
}
|
||||
|
||||
|
||||
static int CountViews(JSArrayBuffer* array_buffer) {
|
||||
static int CountViewsInNewSpaceList(Heap* heap, JSArrayBuffer* array_buffer) {
|
||||
int count = 0;
|
||||
for (Object* o = heap->new_array_buffer_views_list(); !o->IsUndefined();) {
|
||||
JSArrayBufferView* view = JSArrayBufferView::cast(o);
|
||||
if (array_buffer == view->buffer()) {
|
||||
count++;
|
||||
}
|
||||
o = view->weak_next();
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
static int CountViews(Heap* heap, JSArrayBuffer* array_buffer) {
|
||||
int count = 0;
|
||||
for (Object* o = array_buffer->weak_first_view();
|
||||
!o->IsUndefined();
|
||||
@ -70,17 +83,27 @@ static int CountViews(JSArrayBuffer* array_buffer) {
|
||||
count++;
|
||||
}
|
||||
|
||||
return count;
|
||||
return count + CountViewsInNewSpaceList(heap, array_buffer);
|
||||
}
|
||||
|
||||
static bool HasViewInWeakList(JSArrayBuffer* array_buffer,
|
||||
|
||||
static bool HasViewInNewSpaceList(Heap* heap, JSArrayBufferView* ta) {
|
||||
for (Object* o = heap->new_array_buffer_views_list(); !o->IsUndefined();
|
||||
o = JSArrayBufferView::cast(o)->weak_next()) {
|
||||
if (ta == o) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
static bool HasViewInWeakList(Heap* heap, JSArrayBuffer* array_buffer,
|
||||
JSArrayBufferView* ta) {
|
||||
for (Object* o = array_buffer->weak_first_view();
|
||||
!o->IsUndefined();
|
||||
o = JSArrayBufferView::cast(o)->weak_next()) {
|
||||
if (ta == o) return true;
|
||||
}
|
||||
return false;
|
||||
return HasViewInNewSpaceList(heap, ta);
|
||||
}
|
||||
|
||||
|
||||
@ -200,18 +223,18 @@ void TestViewFromApi() {
|
||||
|
||||
Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
|
||||
Handle<JSArrayBufferView> ita2 = v8::Utils::OpenHandle(*ta2);
|
||||
CHECK_EQ(2, CountViews(*iab));
|
||||
CHECK(HasViewInWeakList(*iab, *ita1));
|
||||
CHECK(HasViewInWeakList(*iab, *ita2));
|
||||
CHECK_EQ(2, CountViews(isolate->heap(), *iab));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita2));
|
||||
}
|
||||
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(1, CountViews(*iab));
|
||||
CHECK_EQ(1, CountViews(isolate->heap(), *iab));
|
||||
Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
|
||||
CHECK(HasViewInWeakList(*iab, *ita1));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
|
||||
}
|
||||
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
|
||||
CHECK_EQ(0, CountViews(*iab));
|
||||
CHECK_EQ(0, CountViews(isolate->heap(), *iab));
|
||||
}
|
||||
|
||||
|
||||
@ -299,10 +322,13 @@ static void TestTypedArrayFromScript(const char* constructor) {
|
||||
v8::Handle<TypedArray>::Cast(CompileRun("ta3"));
|
||||
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
|
||||
Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
|
||||
CHECK_EQ(3, CountViews(*iab));
|
||||
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta1)));
|
||||
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta2)));
|
||||
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta3)));
|
||||
CHECK_EQ(3, CountViews(isolate->heap(), *iab));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab,
|
||||
*v8::Utils::OpenHandle(*ta1)));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab,
|
||||
*v8::Utils::OpenHandle(*ta2)));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab,
|
||||
*v8::Utils::OpenHandle(*ta3)));
|
||||
}
|
||||
|
||||
i::SNPrintF(source, "ta%d = null;", i);
|
||||
@ -316,13 +342,14 @@ static void TestTypedArrayFromScript(const char* constructor) {
|
||||
v8::Handle<v8::ArrayBuffer> ab =
|
||||
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab"));
|
||||
Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
|
||||
CHECK_EQ(2, CountViews(*iab));
|
||||
CHECK_EQ(2, CountViews(isolate->heap(), *iab));
|
||||
for (int j = 1; j <= 3; j++) {
|
||||
if (j == i) continue;
|
||||
i::SNPrintF(source, "ta%d", j);
|
||||
v8::Handle<TypedArray> ta =
|
||||
v8::Handle<TypedArray>::Cast(CompileRun(source.start()));
|
||||
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta)));
|
||||
CHECK(HasViewInWeakList(isolate->heap(), *iab,
|
||||
*v8::Utils::OpenHandle(*ta)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -336,7 +363,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
|
||||
v8::Handle<v8::ArrayBuffer> ab =
|
||||
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab"));
|
||||
Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
|
||||
CHECK_EQ(0, CountViews(*iab));
|
||||
CHECK_EQ(0, CountViews(isolate->heap(), *iab));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user