[heap] fix crash during the scavenge of ArrayBuffer
Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611}
This commit is contained in:
parent
8adb1c4705
commit
8d6a228819
102
src/heap/heap.cc
102
src/heap/heap.cc
@ -1921,42 +1921,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
|||||||
// for pointers to from semispace instead of looking for pointers
|
// for pointers to from semispace instead of looking for pointers
|
||||||
// to new space.
|
// to new space.
|
||||||
DCHECK(!target->IsMap());
|
DCHECK(!target->IsMap());
|
||||||
Address obj_address = target->address();
|
|
||||||
|
|
||||||
// We are not collecting slots on new space objects during mutation
|
IteratePointersToFromSpace(target, size, &Scavenger::ScavengeObject);
|
||||||
// thus we have to scan for pointers to evacuation candidates when we
|
|
||||||
// promote objects. But we should not record any slots in non-black
|
|
||||||
// objects. Grey object's slots would be rescanned.
|
|
||||||
// White object might not survive until the end of collection
|
|
||||||
// it would be a violation of the invariant to record it's slots.
|
|
||||||
bool record_slots = false;
|
|
||||||
if (incremental_marking()->IsCompacting()) {
|
|
||||||
MarkBit mark_bit = Marking::MarkBitFrom(target);
|
|
||||||
record_slots = Marking::IsBlack(mark_bit);
|
|
||||||
}
|
|
||||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
|
||||||
LayoutDescriptorHelper helper(target->map());
|
|
||||||
bool has_only_tagged_fields = helper.all_fields_tagged();
|
|
||||||
|
|
||||||
if (!has_only_tagged_fields) {
|
|
||||||
for (int offset = 0; offset < size;) {
|
|
||||||
int end_of_region_offset;
|
|
||||||
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
|
|
||||||
IterateAndMarkPointersToFromSpace(
|
|
||||||
target, obj_address + offset,
|
|
||||||
obj_address + end_of_region_offset, record_slots,
|
|
||||||
&Scavenger::ScavengeObject);
|
|
||||||
}
|
|
||||||
offset = end_of_region_offset;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
#endif
|
|
||||||
IterateAndMarkPointersToFromSpace(target, obj_address,
|
|
||||||
obj_address + size, record_slots,
|
|
||||||
&Scavenger::ScavengeObject);
|
|
||||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4506,6 +4472,72 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
|
||||||
|
ObjectSlotCallback callback) {
|
||||||
|
Address obj_address = target->address();
|
||||||
|
|
||||||
|
// We are not collecting slots on new space objects during mutation
|
||||||
|
// thus we have to scan for pointers to evacuation candidates when we
|
||||||
|
// promote objects. But we should not record any slots in non-black
|
||||||
|
// objects. Grey object's slots would be rescanned.
|
||||||
|
// White object might not survive until the end of collection
|
||||||
|
// it would be a violation of the invariant to record it's slots.
|
||||||
|
bool record_slots = false;
|
||||||
|
if (incremental_marking()->IsCompacting()) {
|
||||||
|
MarkBit mark_bit = Marking::MarkBitFrom(target);
|
||||||
|
record_slots = Marking::IsBlack(mark_bit);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do not scavenge JSArrayBuffer's contents
|
||||||
|
switch (target->ContentType()) {
|
||||||
|
case HeapObjectContents::kTaggedValues: {
|
||||||
|
IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
|
||||||
|
record_slots, callback);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case HeapObjectContents::kMixedValues: {
|
||||||
|
if (target->IsFixedTypedArrayBase()) {
|
||||||
|
IterateAndMarkPointersToFromSpace(
|
||||||
|
target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
|
||||||
|
obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
|
||||||
|
callback);
|
||||||
|
} else if (target->IsBytecodeArray()) {
|
||||||
|
IterateAndMarkPointersToFromSpace(
|
||||||
|
target, obj_address + BytecodeArray::kConstantPoolOffset,
|
||||||
|
obj_address + BytecodeArray::kHeaderSize, record_slots, callback);
|
||||||
|
} else if (target->IsJSArrayBuffer()) {
|
||||||
|
IterateAndMarkPointersToFromSpace(
|
||||||
|
target, obj_address,
|
||||||
|
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
|
||||||
|
record_slots, callback);
|
||||||
|
IterateAndMarkPointersToFromSpace(
|
||||||
|
target, obj_address + JSArrayBuffer::kSize, obj_address + size,
|
||||||
|
record_slots, callback);
|
||||||
|
#if V8_DOUBLE_FIELDS_UNBOXING
|
||||||
|
} else if (FLAG_unbox_double_fields) {
|
||||||
|
LayoutDescriptorHelper helper(target->map());
|
||||||
|
DCHECK(!helper.all_fields_tagged());
|
||||||
|
|
||||||
|
for (int offset = 0; offset < size;) {
|
||||||
|
int end_of_region_offset;
|
||||||
|
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
|
||||||
|
IterateAndMarkPointersToFromSpace(
|
||||||
|
target, obj_address + offset,
|
||||||
|
obj_address + end_of_region_offset, record_slots, callback);
|
||||||
|
}
|
||||||
|
offset = end_of_region_offset;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case HeapObjectContents::kRawValues: {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
|
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
|
||||||
IterateStrongRoots(v, mode);
|
IterateStrongRoots(v, mode);
|
||||||
IterateWeakRoots(v, mode);
|
IterateWeakRoots(v, mode);
|
||||||
|
@ -1241,6 +1241,9 @@ class Heap {
|
|||||||
|
|
||||||
// Iterate pointers to from semispace of new space found in memory interval
|
// Iterate pointers to from semispace of new space found in memory interval
|
||||||
// from start to end within |object|.
|
// from start to end within |object|.
|
||||||
|
void IteratePointersToFromSpace(HeapObject* target, int size,
|
||||||
|
ObjectSlotCallback callback);
|
||||||
|
|
||||||
void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
|
void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
|
||||||
Address end, bool record_slots,
|
Address end, bool record_slots,
|
||||||
ObjectSlotCallback callback);
|
ObjectSlotCallback callback);
|
||||||
|
@ -14203,6 +14203,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
THREADED_TEST(SkipArrayBufferDuringScavenge) {
|
||||||
|
LocalContext env;
|
||||||
|
v8::Isolate* isolate = env->GetIsolate();
|
||||||
|
v8::HandleScope handle_scope(isolate);
|
||||||
|
|
||||||
|
// Make sure the pointer looks like a heap object
|
||||||
|
Local<v8::Object> tmp = v8::Object::New(isolate);
|
||||||
|
uint8_t* store_ptr =
|
||||||
|
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
|
||||||
|
|
||||||
|
// Make `store_ptr` point to from space
|
||||||
|
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||||
|
|
||||||
|
// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
|
||||||
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
|
||||||
|
|
||||||
|
// Should not crash,
|
||||||
|
// i.e. backing store pointer should not be treated as a heap object pointer
|
||||||
|
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||||
|
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||||
|
|
||||||
|
// Use `ab` to silence compiler warning
|
||||||
|
CHECK_EQ(ab->GetContents().Data(), store_ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
THREADED_TEST(SharedUint8Array) {
|
THREADED_TEST(SharedUint8Array) {
|
||||||
i::FLAG_harmony_sharedarraybuffer = true;
|
i::FLAG_harmony_sharedarraybuffer = true;
|
||||||
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,
|
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,
|
||||||
|
Loading…
Reference in New Issue
Block a user