Reland "Delay setting up deserialized JSArrayBuffer"
This is a reland of 83786cb49d
Original change's description:
> Delay setting up deserialized JSArrayBuffer
>
> Setting up JSArrayBuffer may trigger GC. Delay this until we
> are done with deserialization.
>
> R=ulan@chromium.org
>
> Bug: chromium:1033395
> Change-Id: I6c79bc47421bc2662dc1906534fc8e820c351ced
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1965580
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Commit-Queue: Yang Guo <yangguo@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#65441}
Tbr: yangguo@chromium.org
Bug: chromium:1033395, chromium:1034059
Change-Id: I89d05768f52a480400d9c6f5aaaa233c5d5ba126
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1969896
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65484}
This commit is contained in:
parent
31dde88e8b
commit
ff7acbd697
@ -8058,12 +8058,14 @@ void BigInt::ToWordsArray(int* sign_bit, int* word_count,
|
||||
void Isolate::ReportExternalAllocationLimitReached() {
|
||||
i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
|
||||
if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
|
||||
DCHECK(i::AllowHeapAllocation::IsAllowed());
|
||||
heap->ReportExternalMemoryPressure();
|
||||
}
|
||||
|
||||
void Isolate::CheckMemoryPressure() {
|
||||
i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
|
||||
if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
|
||||
DCHECK(i::AllowHeapAllocation::IsAllowed());
|
||||
heap->CheckMemoryPressure();
|
||||
}
|
||||
|
||||
|
@ -274,9 +274,16 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
|
||||
} else if (obj.IsJSDataView()) {
|
||||
JSDataView data_view = JSDataView::cast(obj);
|
||||
JSArrayBuffer buffer = JSArrayBuffer::cast(data_view.buffer());
|
||||
data_view.set_data_pointer(
|
||||
reinterpret_cast<uint8_t*>(buffer.backing_store()) +
|
||||
data_view.byte_offset());
|
||||
void* backing_store = nullptr;
|
||||
if (buffer.backing_store() != nullptr) {
|
||||
// The backing store of the JSArrayBuffer has not been correctly restored
|
||||
// yet, as that may trigger GC. The backing_store field currently contains
|
||||
// a numbered reference to an already deserialized backing store.
|
||||
size_t store_index = reinterpret_cast<size_t>(buffer.backing_store());
|
||||
backing_store = backing_stores_[store_index]->buffer_start();
|
||||
}
|
||||
data_view.set_data_pointer(reinterpret_cast<uint8_t*>(backing_store) +
|
||||
data_view.byte_offset());
|
||||
} else if (obj.IsJSTypedArray()) {
|
||||
JSTypedArray typed_array = JSTypedArray::cast(obj);
|
||||
// Fixup typed array pointers.
|
||||
@ -294,15 +301,9 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
|
||||
}
|
||||
} else if (obj.IsJSArrayBuffer()) {
|
||||
JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
|
||||
// Only fixup for the off-heap case.
|
||||
// Only fixup for the off-heap case. This may trigger GC.
|
||||
if (buffer.backing_store() != nullptr) {
|
||||
// Serializer writes backing store ref in |backing_store| field.
|
||||
size_t store_index = reinterpret_cast<size_t>(buffer.backing_store());
|
||||
auto backing_store = backing_stores_[store_index];
|
||||
SharedFlag shared = backing_store && backing_store->is_shared()
|
||||
? SharedFlag::kShared
|
||||
: SharedFlag::kNotShared;
|
||||
buffer.Setup(shared, backing_store);
|
||||
new_off_heap_array_buffers_.push_back(handle(buffer, isolate_));
|
||||
}
|
||||
} else if (obj.IsBytecodeArray()) {
|
||||
// TODO(mythria): Remove these once we store the default values for these
|
||||
|
@ -76,6 +76,10 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
|
||||
attached_objects_.push_back(attached_object);
|
||||
}
|
||||
|
||||
void CheckNoArrayBufferBackingStores() {
|
||||
CHECK_EQ(new_off_heap_array_buffers().size(), 0);
|
||||
}
|
||||
|
||||
Isolate* isolate() const { return isolate_; }
|
||||
SnapshotByteSource* source() { return &source_; }
|
||||
const std::vector<AllocationSite>& new_allocation_sites() const {
|
||||
@ -98,6 +102,14 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
|
||||
return new_scripts_;
|
||||
}
|
||||
|
||||
const std::vector<Handle<JSArrayBuffer>>& new_off_heap_array_buffers() const {
|
||||
return new_off_heap_array_buffers_;
|
||||
}
|
||||
|
||||
std::shared_ptr<BackingStore> backing_store(size_t i) {
|
||||
return backing_stores_[i];
|
||||
}
|
||||
|
||||
DeserializerAllocator* allocator() { return &allocator_; }
|
||||
bool deserializing_user_code() const { return deserializing_user_code_; }
|
||||
bool can_rehash() const { return can_rehash_; }
|
||||
@ -172,6 +184,7 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
|
||||
std::vector<CallHandlerInfo> call_handler_infos_;
|
||||
std::vector<Handle<String>> new_internalized_strings_;
|
||||
std::vector<Handle<Script>> new_scripts_;
|
||||
std::vector<Handle<JSArrayBuffer>> new_off_heap_array_buffers_;
|
||||
std::vector<std::shared_ptr<BackingStore>> backing_stores_;
|
||||
|
||||
DeserializerAllocator allocator_;
|
||||
|
@ -90,6 +90,15 @@ void ObjectDeserializer::CommitPostProcessedObjects() {
|
||||
MaybeObjectHandle::Weak(script));
|
||||
heap->SetRootScriptList(*list);
|
||||
}
|
||||
|
||||
for (Handle<JSArrayBuffer> buffer : new_off_heap_array_buffers()) {
|
||||
// Serializer writes backing store ref in |backing_store| field.
|
||||
size_t store_index = reinterpret_cast<size_t>(buffer->backing_store());
|
||||
auto bs = backing_store(store_index);
|
||||
SharedFlag shared =
|
||||
bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
|
||||
buffer->Setup(shared, bs);
|
||||
}
|
||||
}
|
||||
|
||||
void ObjectDeserializer::LinkAllocationSites() {
|
||||
|
@ -37,27 +37,46 @@ MaybeHandle<Object> PartialDeserializer::Deserialize(
|
||||
|
||||
AddAttachedObject(global_proxy);
|
||||
|
||||
DisallowHeapAllocation no_gc;
|
||||
// Keep track of the code space start and end pointers in case new
|
||||
// code objects were unserialized
|
||||
CodeSpace* code_space = isolate->heap()->code_space();
|
||||
Address start_address = code_space->top();
|
||||
Object root;
|
||||
VisitRootPointer(Root::kPartialSnapshotCache, nullptr, FullObjectSlot(&root));
|
||||
DeserializeDeferredObjects();
|
||||
DeserializeEmbedderFields(embedder_fields_deserializer);
|
||||
Handle<Object> result;
|
||||
{
|
||||
DisallowHeapAllocation no_gc;
|
||||
// Keep track of the code space start and end pointers in case new
|
||||
// code objects were unserialized
|
||||
CodeSpace* code_space = isolate->heap()->code_space();
|
||||
Address start_address = code_space->top();
|
||||
Object root;
|
||||
VisitRootPointer(Root::kPartialSnapshotCache, nullptr,
|
||||
FullObjectSlot(&root));
|
||||
DeserializeDeferredObjects();
|
||||
DeserializeEmbedderFields(embedder_fields_deserializer);
|
||||
|
||||
allocator()->RegisterDeserializedObjectsForBlackAllocation();
|
||||
allocator()->RegisterDeserializedObjectsForBlackAllocation();
|
||||
|
||||
// There's no code deserialized here. If this assert fires then that's
|
||||
// changed and logging should be added to notify the profiler et al of the
|
||||
// new code, which also has to be flushed from instruction cache.
|
||||
CHECK_EQ(start_address, code_space->top());
|
||||
// There's no code deserialized here. If this assert fires then that's
|
||||
// changed and logging should be added to notify the profiler et al of the
|
||||
// new code, which also has to be flushed from instruction cache.
|
||||
CHECK_EQ(start_address, code_space->top());
|
||||
|
||||
if (FLAG_rehash_snapshot && can_rehash()) Rehash();
|
||||
LogNewMapEvents();
|
||||
if (FLAG_rehash_snapshot && can_rehash()) Rehash();
|
||||
LogNewMapEvents();
|
||||
|
||||
return Handle<Object>(root, isolate);
|
||||
result = handle(root, isolate);
|
||||
}
|
||||
|
||||
SetupOffHeapArrayBufferBackingStores();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void PartialDeserializer::SetupOffHeapArrayBufferBackingStores() {
|
||||
for (Handle<JSArrayBuffer> buffer : new_off_heap_array_buffers()) {
|
||||
// Serializer writes backing store ref in |backing_store| field.
|
||||
size_t store_index = reinterpret_cast<size_t>(buffer->backing_store());
|
||||
auto bs = backing_store(store_index);
|
||||
SharedFlag shared =
|
||||
bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
|
||||
buffer->Setup(shared, bs);
|
||||
}
|
||||
}
|
||||
|
||||
void PartialDeserializer::DeserializeEmbedderFields(
|
||||
|
@ -33,6 +33,8 @@ class V8_EXPORT_PRIVATE PartialDeserializer final : public Deserializer {
|
||||
|
||||
void DeserializeEmbedderFields(
|
||||
v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer);
|
||||
|
||||
void SetupOffHeapArrayBufferBackingStores();
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -51,6 +51,7 @@ void ReadOnlyDeserializer::DeserializeInto(Isolate* isolate) {
|
||||
if (object->IsUndefined(roots)) break;
|
||||
}
|
||||
DeserializeDeferredObjects();
|
||||
CheckNoArrayBufferBackingStores();
|
||||
}
|
||||
|
||||
if (FLAG_rehash_snapshot && can_rehash()) {
|
||||
|
@ -44,6 +44,8 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) {
|
||||
FlushICache();
|
||||
}
|
||||
|
||||
CheckNoArrayBufferBackingStores();
|
||||
|
||||
isolate->heap()->set_native_contexts_list(
|
||||
ReadOnlyRoots(isolate).undefined_value());
|
||||
// The allocation site list is build during root iteration, but if no sites
|
||||
|
Loading…
Reference in New Issue
Block a user