[serializer] Enable TypedArrays and ArrayBuffers in the snapshot.

Previously we could not support these due to their unique memory layout
including off-heap backing store allocations. We now serialize these
allocations and then fix-up references to them in the PostProcess step
of deserialization.

Bug: v8:6691
Cq-Include-Trybots: master.tryserver.chromium.linux:linux_chromium_rel_ng
Change-Id: Ic215049c06e6ee655bd17c11dfab0d8630568a84
Reviewed-on: https://chromium-review.googlesource.com/597709
Reviewed-by: Yang Guo <yangguo@chromium.org>
Commit-Queue: Peter Marshall <petermarshall@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47271}
This commit is contained in:
Peter Marshall 2017-08-09 11:08:20 +02:00 committed by Commit Bot
parent 402f276011
commit a5f321cd9b
13 changed files with 445 additions and 51 deletions

View File

@ -8194,8 +8194,12 @@ class V8_EXPORT SnapshotCreator {
* Set the default context to be included in the snapshot blob.
* The snapshot will not contain the global proxy, and we expect one or a
* global object template to create one, to be provided upon deserialization.
*
* \param callback optional callback to serialize internal fields.
*/
void SetDefaultContext(Local<Context> context);
void SetDefaultContext(Local<Context> context,
SerializeInternalFieldsCallback callback =
SerializeInternalFieldsCallback());
/**
* Add additional context to be included in the snapshot blob.
@ -8547,7 +8551,9 @@ class V8_EXPORT Context {
static Local<Context> New(
Isolate* isolate, ExtensionConfiguration* extensions = NULL,
MaybeLocal<ObjectTemplate> global_template = MaybeLocal<ObjectTemplate>(),
MaybeLocal<Value> global_object = MaybeLocal<Value>());
MaybeLocal<Value> global_object = MaybeLocal<Value>(),
DeserializeInternalFieldsCallback internal_fields_deserializer =
DeserializeInternalFieldsCallback());
/**
* Create a new context from a (non-default) context snapshot. There

View File

@ -84,6 +84,11 @@ class SerializerReference {
ValueIndexBits::encode(index));
}
static SerializerReference OffHeapBackingStoreReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(kExternalSpace) |
ValueIndexBits::encode(index));
}
static SerializerReference LargeObjectReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(LO_SPACE) |
ValueIndexBits::encode(index));
@ -119,6 +124,15 @@ class SerializerReference {
return ValueIndexBits::decode(bitfield_);
}
bool is_off_heap_backing_store_reference() const {
return SpaceBits::decode(bitfield_) == kExternalSpace;
}
uint32_t off_heap_backing_store_index() const {
DCHECK(is_off_heap_backing_store_reference());
return ValueIndexBits::decode(bitfield_);
}
uint32_t large_object_index() const {
DCHECK(is_back_reference());
return ValueIndexBits::decode(bitfield_);
@ -160,6 +174,8 @@ class SerializerReference {
// [ kSpecialValueSpace ] [ Special value index ]
// Attached reference
// [ kAttachedReferenceSpace ] [ Attached reference index ]
// External
// [ kExternalSpace ] [ External reference index ]
static const int kChunkOffsetSize = kPageSizeBits - kObjectAlignmentBits;
static const int kChunkIndexSize = 32 - kChunkOffsetSize - kSpaceTagSize;
@ -167,7 +183,8 @@ class SerializerReference {
static const int kSpecialValueSpace = LAST_SPACE + 1;
static const int kAttachedReferenceSpace = kSpecialValueSpace + 1;
STATIC_ASSERT(kAttachedReferenceSpace < (1 << kSpaceTagSize));
static const int kExternalSpace = kAttachedReferenceSpace + 1;
STATIC_ASSERT(kExternalSpace < (1 << kSpaceTagSize));
static const int kInvalidValue = 0;
static const int kDummyValue = 1;
@ -193,13 +210,13 @@ class SerializerReferenceMap {
SerializerReferenceMap()
: no_allocation_(), map_(), attached_reference_index_(0) {}
SerializerReference Lookup(HeapObject* obj) {
SerializerReference Lookup(void* obj) {
Maybe<uint32_t> maybe_index = map_.Get(obj);
return maybe_index.IsJust() ? SerializerReference(maybe_index.FromJust())
: SerializerReference();
}
void Add(HeapObject* obj, SerializerReference b) {
void Add(void* obj, SerializerReference b) {
DCHECK(b.is_valid());
DCHECK(map_.Get(obj).IsNothing());
map_.Set(obj, b.bitfield_);
@ -214,7 +231,7 @@ class SerializerReferenceMap {
private:
DisallowHeapAllocation no_allocation_;
HeapObjectToIndexHashMap map_;
PointerToIndexHashMap<void*> map_;
int attached_reference_index_;
DISALLOW_COPY_AND_ASSIGN(SerializerReferenceMap);
};

View File

@ -554,6 +554,7 @@ struct SnapshotCreatorData {
ArrayBufferAllocator allocator_;
Isolate* isolate_;
Persistent<Context> default_context_;
SerializeInternalFieldsCallback default_embedder_fields_serializer_;
PersistentValueVector<Context> contexts_;
PersistentValueVector<Template> templates_;
std::vector<SerializeInternalFieldsCallback> embedder_fields_serializers_;
@ -596,7 +597,8 @@ Isolate* SnapshotCreator::GetIsolate() {
return SnapshotCreatorData::cast(data_)->isolate_;
}
void SnapshotCreator::SetDefaultContext(Local<Context> context) {
void SnapshotCreator::SetDefaultContext(
Local<Context> context, SerializeInternalFieldsCallback callback) {
DCHECK(!context.IsEmpty());
SnapshotCreatorData* data = SnapshotCreatorData::cast(data_);
DCHECK(!data->created_);
@ -604,6 +606,7 @@ void SnapshotCreator::SetDefaultContext(Local<Context> context) {
Isolate* isolate = data->isolate_;
CHECK_EQ(isolate, context->GetIsolate());
data->default_context_.Reset(isolate, context);
data->default_embedder_fields_serializer_ = callback;
}
size_t SnapshotCreator::AddContext(Local<Context> context,
@ -711,9 +714,11 @@ StartupData SnapshotCreator::CreateBlob(
bool can_be_rehashed = true;
{
// The default snapshot does not support embedder fields.
// The default context is created with a handler for embedder fields which
// determines how they are handled if encountered during serialization.
i::PartialSerializer partial_serializer(
isolate, &startup_serializer, v8::SerializeInternalFieldsCallback());
isolate, &startup_serializer,
data->default_embedder_fields_serializer_);
partial_serializer.Serialize(&default_context, false);
can_be_rehashed = can_be_rehashed && partial_serializer.can_be_rehashed();
context_snapshots.Add(new i::SnapshotData(&partial_serializer));
@ -6561,12 +6566,13 @@ Local<Context> NewContext(
return Utils::ToLocal(scope.CloseAndEscape(env));
}
Local<Context> v8::Context::New(v8::Isolate* external_isolate,
v8::ExtensionConfiguration* extensions,
v8::MaybeLocal<ObjectTemplate> global_template,
v8::MaybeLocal<Value> global_object) {
Local<Context> v8::Context::New(
v8::Isolate* external_isolate, v8::ExtensionConfiguration* extensions,
v8::MaybeLocal<ObjectTemplate> global_template,
v8::MaybeLocal<Value> global_object,
DeserializeInternalFieldsCallback internal_fields_deserializer) {
return NewContext(external_isolate, extensions, global_template,
global_object, 0, DeserializeInternalFieldsCallback());
global_object, 0, internal_fields_deserializer);
}
MaybeLocal<Context> v8::Context::FromSnapshot(

View File

@ -2954,6 +2954,11 @@ int FixedTypedArrayBase::DataSize() const {
return DataSize(map()->instance_type());
}
size_t FixedTypedArrayBase::ByteLength() const {
return static_cast<size_t>(length()) *
static_cast<size_t>(ElementSize(map()->instance_type()));
}
int FixedTypedArrayBase::size() const {
return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
}

View File

@ -19204,8 +19204,6 @@ bool JSArrayBuffer::SetupAllocatingData(Handle<JSArrayBuffer> array_buffer,
bool initialize, SharedFlag shared) {
void* data;
CHECK(isolate->array_buffer_allocator() != NULL);
// Prevent creating array buffers when serializing.
DCHECK(!isolate->serializer_enabled());
if (allocated_length != 0) {
if (allocated_length >= MB)
isolate->counters()->array_buffer_big_allocations()->AddSample(

View File

@ -3487,6 +3487,8 @@ class FixedTypedArrayBase: public FixedArrayBase {
inline int DataSize() const;
inline size_t ByteLength() const;
private:
static inline int ElementSize(InstanceType type);

View File

@ -210,6 +210,26 @@ HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) {
NativesExternalStringResource::DecodeForDeserialization(
string->resource()));
isolate_->heap()->RegisterExternalString(string);
} else if (obj->IsJSArrayBuffer()) {
JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
// Only fixup for the off-heap case.
if (buffer->backing_store() != nullptr) {
Smi* store_index = reinterpret_cast<Smi*>(buffer->backing_store());
void* backing_store = off_heap_backing_stores_[store_index->value()];
buffer->set_backing_store(backing_store);
buffer->set_allocation_base(backing_store);
isolate_->heap()->RegisterNewArrayBuffer(buffer);
}
} else if (obj->IsFixedTypedArrayBase()) {
FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(obj);
// Only fixup for the off-heap case.
if (fta->base_pointer() == nullptr) {
Smi* store_index = reinterpret_cast<Smi*>(fta->external_pointer());
void* backing_store = off_heap_backing_stores_[store_index->value()];
fta->set_external_pointer(backing_store);
}
}
if (FLAG_rehash_snapshot && can_rehash_ && !deserializing_user_code()) {
if (obj->IsString()) {
@ -647,6 +667,17 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
break;
}
case kOffHeapBackingStore: {
int byte_length = source_.GetInt();
byte* backing_store = static_cast<byte*>(
isolate->array_buffer_allocator()->AllocateUninitialized(
byte_length));
CHECK_NOT_NULL(backing_store);
source_.CopyRaw(backing_store, byte_length);
off_heap_backing_stores_.push_back(backing_store);
break;
}
case kAlignmentPrefix:
case kAlignmentPrefix + 1:
case kAlignmentPrefix + 2:

View File

@ -5,6 +5,8 @@
#ifndef V8_SNAPSHOT_DESERIALIZER_H_
#define V8_SNAPSHOT_DESERIALIZER_H_
#include <vector>
#include "src/heap/heap.h"
#include "src/objects.h"
#include "src/snapshot/serializer-common.h"
@ -53,6 +55,9 @@ class Deserializer : public SerializerDeserializer {
next_alignment_(kWordAligned),
can_rehash_(false) {
DecodeReservation(data->Reservations());
// We start the indicies here at 1, so that we can distinguish between an
// actual index and a nullptr in a deserialized object requiring fix-up.
off_heap_backing_stores_.push_back(nullptr);
}
void Initialize(Isolate* isolate);
@ -139,6 +144,7 @@ class Deserializer : public SerializerDeserializer {
List<Handle<String>> new_internalized_strings_;
List<Handle<Script>> new_scripts_;
List<TransitionArray*> transition_arrays_;
std::vector<byte*> off_heap_backing_stores_;
const bool deserializing_user_code_;

View File

@ -59,9 +59,6 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array());
}
// Replace typed arrays by undefined.
if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value();
if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
int root_index = root_index_map_.Lookup(obj);
@ -144,6 +141,9 @@ void PartialSerializer::SerializeEmbedderFields() {
int embedder_fields_count = obj->GetEmbedderFieldCount();
for (int i = 0; i < embedder_fields_count; i++) {
if (obj->GetEmbedderField(i)->IsHeapObject()) continue;
// Do not attempt to serialize nullptr embedder fields.
if (obj->GetEmbedderField(i) == 0) continue;
StartupData data = serialize_embedder_fields_.callback(
v8::Utils::ToLocal(obj), i, serialize_embedder_fields_.data);
sink_.Put(kNewObject + reference.space(), "embedder field holder");

View File

@ -174,6 +174,9 @@ class SerializerDeserializer : public RootVisitor {
// Used for embedder-provided serialization data for embedder fields.
static const int kEmbedderFieldsData = 0x1f;
// Used for embedder-allocated backing stores for TypedArrays.
static const int kOffHeapBackingStore = 0x35;
// 8 hot (recently seen or back-referenced) objects with optional skip.
static const int kNumberOfHotObjects = 8;
STATIC_ASSERT(kNumberOfHotObjects == HotObjectsList::kSize);
@ -183,7 +186,7 @@ class SerializerDeserializer : public RootVisitor {
static const int kHotObjectWithSkip = 0x58;
static const int kHotObjectMask = 0x07;
// 0x35..0x37, 0x55..0x57, 0x75..0x7f unused.
// 0x36..0x37, 0x55..0x57, 0x75..0x7f unused.
// ---------- byte code range 0x80..0xff ----------
// First 32 root array items.

View File

@ -21,7 +21,8 @@ Serializer::Serializer(Isolate* isolate)
code_address_map_(NULL),
num_maps_(0),
large_objects_total_size_(0),
seen_large_objects_index_(0) {
seen_large_objects_index_(0),
seen_backing_stores_index_(1) {
// The serializer is meant to be used only to generate initial heap images
// from a context in which there is only one isolate.
for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
@ -282,6 +283,12 @@ int Serializer::PutAlignmentPrefix(HeapObject* object) {
return 0;
}
SerializerReference Serializer::AllocateOffHeapBackingStore() {
DCHECK_NE(0, seen_backing_stores_index_);
return SerializerReference::OffHeapBackingStoreReference(
seen_backing_stores_index_++);
}
SerializerReference Serializer::AllocateLargeObject(int size) {
// Large objects are allocated one-by-one when deserializing. We do not
// have to keep track of multiple chunks.
@ -409,6 +416,74 @@ void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
serializer_->SerializeObject(map, kPlain, kStartOfObject, 0);
}
int32_t Serializer::ObjectSerializer::SerializeBackingStore(
void* backing_store, int32_t byte_length) {
SerializerReference reference =
serializer_->reference_map()->Lookup(backing_store);
// Serialize the off-heap backing store.
if (!reference.is_valid()) {
sink_->Put(kOffHeapBackingStore, "Off-heap backing store");
sink_->PutInt(byte_length, "length");
sink_->PutRaw(static_cast<byte*>(backing_store), byte_length,
"BackingStore");
reference = serializer_->AllocateOffHeapBackingStore();
// Mark this backing store as already serialized.
serializer_->reference_map()->Add(backing_store, reference);
}
return static_cast<int32_t>(reference.off_heap_backing_store_index());
}
// When a JSArrayBuffer is neutered, the FixedTypedArray that points to the
// same backing store does not know anything about it. This fixup step finds
// neutered TypedArrays and clears the values in the FixedTypedArray so that
// we don't try to serialize the now invalid backing store.
void Serializer::ObjectSerializer::FixupIfNeutered() {
JSTypedArray* array = JSTypedArray::cast(object_);
if (!array->WasNeutered()) return;
FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(array->elements());
DCHECK(fta->base_pointer() == nullptr);
fta->set_external_pointer(Smi::kZero);
fta->set_length(0);
}
void Serializer::ObjectSerializer::SerializeJSArrayBuffer() {
JSArrayBuffer* buffer = JSArrayBuffer::cast(object_);
void* backing_store = buffer->backing_store();
// We cannot store byte_length larger than Smi range in the snapshot.
// Attempt to make sure that NumberToInt32 produces something sensible.
CHECK(buffer->byte_length()->IsSmi());
int32_t byte_length = NumberToInt32(buffer->byte_length());
// The embedder-allocated backing store only exists for the off-heap case.
if (backing_store != nullptr) {
int32_t ref = SerializeBackingStore(backing_store, byte_length);
buffer->set_backing_store(Smi::FromInt(ref));
}
SerializeContent();
}
void Serializer::ObjectSerializer::SerializeFixedTypedArray() {
FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(object_);
void* backing_store = fta->DataPtr();
// We cannot store byte_length larger than Smi range in the snapshot.
CHECK(fta->ByteLength() < Smi::kMaxValue);
int32_t byte_length = static_cast<int32_t>(fta->ByteLength());
// The heap contains empty FixedTypedArrays for each type, with a byte_length
// of 0 (e.g. empty_fixed_uint8_array). These look like they are are 'on-heap'
// but have no data to copy, so we skip the backing store here.
// The embedder-allocated backing store only exists for the off-heap case.
if (byte_length > 0 && fta->base_pointer() == nullptr) {
int32_t ref = SerializeBackingStore(backing_store, byte_length);
fta->set_external_pointer(Smi::FromInt(ref));
}
SerializeContent();
}
void Serializer::ObjectSerializer::SerializeExternalString() {
Heap* heap = serializer_->isolate()->heap();
if (object_->map() != heap->native_source_string_map()) {
@ -535,9 +610,17 @@ void Serializer::ObjectSerializer::Serialize() {
} else if (object_->IsSeqTwoByteString()) {
SeqTwoByteString::cast(object_)->clear_padding();
}
// We cannot serialize typed array objects correctly.
DCHECK(!object_->IsJSTypedArray());
if (object_->IsJSTypedArray()) {
FixupIfNeutered();
}
if (object_->IsJSArrayBuffer()) {
SerializeJSArrayBuffer();
return;
}
if (object_->IsFixedTypedArrayBase()) {
SerializeFixedTypedArray();
return;
}
// We don't expect fillers.
DCHECK(!object_->IsFiller());

View File

@ -5,6 +5,8 @@
#ifndef V8_SNAPSHOT_SERIALIZER_H_
#define V8_SNAPSHOT_SERIALIZER_H_
#include <map>
#include "src/isolate.h"
#include "src/log.h"
#include "src/objects.h"
@ -189,6 +191,7 @@ class Serializer : public SerializerDeserializer {
}
// This will return the space for an object.
SerializerReference AllocateOffHeapBackingStore();
SerializerReference AllocateLargeObject(int size);
SerializerReference AllocateMap();
SerializerReference Allocate(AllocationSpace space, int size);
@ -261,6 +264,13 @@ class Serializer : public SerializerDeserializer {
uint32_t large_objects_total_size_;
uint32_t seen_large_objects_index_;
// Used to keep track of the off-heap backing stores used by TypedArrays/
// ArrayBuffers. Note that the index begins at 1 and not 0, because when a
// TypedArray has an on-heap backing store, the backing_store pointer in the
// corresponding ArrayBuffer will be null, which makes it indistinguishable
// from index 0.
uint32_t seen_backing_stores_index_;
List<byte> code_buffer_;
// To handle stack overflow.
@ -322,6 +332,10 @@ class Serializer::ObjectSerializer : public ObjectVisitor {
// bytes to skip instead of performing a skip instruction, in case the skip
// can be merged into the next instruction.
int OutputRawData(Address up_to, ReturnSkip return_skip = kIgnoringReturn);
int32_t SerializeBackingStore(void* backing_store, int32_t byte_length);
void FixupIfNeutered();
void SerializeJSArrayBuffer();
void SerializeFixedTypedArray();
void SerializeExternalString();
void SerializeExternalStringAsSequentialString();
@ -330,6 +344,7 @@ class Serializer::ObjectSerializer : public ObjectVisitor {
Serializer* serializer_;
HeapObject* object_;
SnapshotByteSink* sink_;
std::map<void*, Smi*> backing_stores;
int reference_representation_;
int bytes_processed_so_far_;
bool code_has_been_output_;

View File

@ -588,6 +588,255 @@ TEST(CustomSnapshotDataBlob1) {
isolate1->Dispose();
}
struct InternalFieldData {
uint32_t data;
};
v8::StartupData SerializeInternalFields(v8::Local<v8::Object> holder, int index,
void* data) {
CHECK_EQ(reinterpret_cast<void*>(2016), data);
InternalFieldData* embedder_field = static_cast<InternalFieldData*>(
holder->GetAlignedPointerFromInternalField(index));
int size = sizeof(*embedder_field);
char* payload = new char[size];
DCHECK(embedder_field != nullptr);
// We simply use memcpy to serialize the content.
memcpy(payload, embedder_field, size);
return {payload, size};
}
std::vector<InternalFieldData*> deserialized_data;
void DeserializeInternalFields(v8::Local<v8::Object> holder, int index,
v8::StartupData payload, void* data) {
CHECK_EQ(reinterpret_cast<void*>(2017), data);
InternalFieldData* embedder_field = new InternalFieldData{0};
memcpy(embedder_field, payload.data, payload.raw_size);
holder->SetAlignedPointerInInternalField(index, embedder_field);
deserialized_data.push_back(embedder_field);
}
typedef std::vector<std::tuple<const char*, int32_t>> Int32Expectations;
void TestInt32Expectations(const Int32Expectations& expectations) {
for (const auto& e : expectations) {
ExpectInt32(std::get<0>(e), std::get<1>(e));
}
}
void TypedArrayTestHelper(const char* code,
const Int32Expectations& expectations) {
DisableAlwaysOpt();
i::FLAG_allow_natives_syntax = true;
v8::StartupData blob;
{
v8::SnapshotCreator creator;
v8::Isolate* isolate = creator.GetIsolate();
{
v8::HandleScope handle_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
v8::Context::Scope context_scope(context);
CompileRun(code);
TestInt32Expectations(expectations);
creator.SetDefaultContext(
context, v8::SerializeInternalFieldsCallback(
SerializeInternalFields, reinterpret_cast<void*>(2016)));
}
blob =
creator.CreateBlob(v8::SnapshotCreator::FunctionCodeHandling::kClear);
}
v8::Isolate::CreateParams create_params;
create_params.snapshot_blob = &blob;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = TestIsolate::New(create_params);
{
v8::Isolate::Scope i_scope(isolate);
v8::HandleScope h_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(
isolate, NULL, v8::MaybeLocal<v8::ObjectTemplate>(),
v8::MaybeLocal<v8::Value>(),
v8::DeserializeInternalFieldsCallback(DeserializeInternalFields,
reinterpret_cast<void*>(2017)));
delete[] blob.data; // We can dispose of the snapshot blob now.
v8::Context::Scope c_scope(context);
TestInt32Expectations(expectations);
}
isolate->Dispose();
}
TEST(CustomSnapshotDataBlobWithOffHeapTypedArray) {
const char* code =
"var x = new Uint8Array(128);"
"x[0] = 12;"
"var arr = new Array(17);"
"arr[1] = 24;"
"var y = new Uint32Array(arr);"
"var buffer = new ArrayBuffer(128);"
"var z = new Int16Array(buffer);"
"z[0] = 48;";
Int32Expectations expectations = {std::make_tuple("x[0]", 12),
std::make_tuple("y[1]", 24),
std::make_tuple("z[0]", 48)};
TypedArrayTestHelper(code, expectations);
}
TEST(CustomSnapshotDataBlobSharedArrayBuffer) {
const char* code =
"var x = new Int32Array([12, 24, 48, 96]);"
"var y = new Uint8Array(x.buffer)";
Int32Expectations expectations = {
std::make_tuple("x[0]", 12), std::make_tuple("x[1]", 24),
std::make_tuple("y[0]", 12), std::make_tuple("y[1]", 0),
std::make_tuple("y[2]", 0), std::make_tuple("y[3]", 0),
std::make_tuple("y[4]", 24)};
TypedArrayTestHelper(code, expectations);
}
TEST(CustomSnapshotDataBlobDataView) {
const char* code =
"var x = new Int8Array([1, 2, 3, 4]);"
"var v = new DataView(x.buffer)";
Int32Expectations expectations = {std::make_tuple("v.getInt8(0)", 1),
std::make_tuple("v.getInt8(1)", 2),
std::make_tuple("v.getInt16(0)", 258),
std::make_tuple("v.getInt16(1)", 515)};
TypedArrayTestHelper(code, expectations);
}
TEST(CustomSnapshotDataBlobNeuteredArrayBuffer) {
const char* code =
"var x = new Int16Array([12, 24, 48]);"
"%ArrayBufferNeuter(x.buffer);";
Int32Expectations expectations = {std::make_tuple("x.buffer.byteLength", 0),
std::make_tuple("x.length", 0)};
DisableAlwaysOpt();
i::FLAG_allow_natives_syntax = true;
v8::StartupData blob;
{
v8::SnapshotCreator creator;
v8::Isolate* isolate = creator.GetIsolate();
{
v8::HandleScope handle_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
v8::Context::Scope context_scope(context);
CompileRun(code);
TestInt32Expectations(expectations);
creator.SetDefaultContext(
context, v8::SerializeInternalFieldsCallback(
SerializeInternalFields, reinterpret_cast<void*>(2016)));
}
blob =
creator.CreateBlob(v8::SnapshotCreator::FunctionCodeHandling::kClear);
}
v8::Isolate::CreateParams create_params;
create_params.snapshot_blob = &blob;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = TestIsolate::New(create_params);
{
v8::Isolate::Scope i_scope(isolate);
v8::HandleScope h_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(
isolate, NULL, v8::MaybeLocal<v8::ObjectTemplate>(),
v8::MaybeLocal<v8::Value>(),
v8::DeserializeInternalFieldsCallback(DeserializeInternalFields,
reinterpret_cast<void*>(2017)));
delete[] blob.data; // We can dispose of the snapshot blob now.
v8::Context::Scope c_scope(context);
TestInt32Expectations(expectations);
v8::Local<v8::Value> x = CompileRun("x");
CHECK(x->IsTypedArray());
i::Handle<i::JSTypedArray> array =
i::Handle<i::JSTypedArray>::cast(v8::Utils::OpenHandle(*x));
CHECK(array->WasNeutered());
CHECK_NULL(
FixedTypedArrayBase::cast(array->elements())->external_pointer());
}
isolate->Dispose();
}
i::Handle<i::JSArrayBuffer> GetBufferFromTypedArray(
v8::Local<v8::Value> typed_array) {
CHECK(typed_array->IsTypedArray());
i::Handle<i::JSArrayBufferView> view = i::Handle<i::JSArrayBufferView>::cast(
v8::Utils::OpenHandle(*typed_array));
return i::handle(i::JSArrayBuffer::cast(view->buffer()));
}
TEST(CustomSnapshotDataBlobOnOrOffHeapTypedArray) {
const char* code =
"var x = new Uint8Array(8);"
"x[0] = 12;"
"x[7] = 24;"
"var y = new Int16Array([12, 24, 48]);"
"var z = new Int32Array(64);"
"z[0] = 96;";
Int32Expectations expectations = {
std::make_tuple("x[0]", 12), std::make_tuple("x[7]", 24),
std::make_tuple("y[2]", 48), std::make_tuple("z[0]", 96)};
DisableAlwaysOpt();
i::FLAG_allow_natives_syntax = true;
v8::StartupData blob;
{
v8::SnapshotCreator creator;
v8::Isolate* isolate = creator.GetIsolate();
{
v8::HandleScope handle_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
v8::Context::Scope context_scope(context);
CompileRun(code);
TestInt32Expectations(expectations);
creator.SetDefaultContext(
context, v8::SerializeInternalFieldsCallback(
SerializeInternalFields, reinterpret_cast<void*>(2016)));
}
blob =
creator.CreateBlob(v8::SnapshotCreator::FunctionCodeHandling::kClear);
}
v8::Isolate::CreateParams create_params;
create_params.snapshot_blob = &blob;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = TestIsolate::New(create_params);
{
v8::Isolate::Scope i_scope(isolate);
v8::HandleScope h_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(
isolate, NULL, v8::MaybeLocal<v8::ObjectTemplate>(),
v8::MaybeLocal<v8::Value>(),
v8::DeserializeInternalFieldsCallback(DeserializeInternalFields,
reinterpret_cast<void*>(2017)));
delete[] blob.data; // We can dispose of the snapshot blob now.
v8::Context::Scope c_scope(context);
TestInt32Expectations(expectations);
i::Handle<i::JSArrayBuffer> buffer =
GetBufferFromTypedArray(CompileRun("x"));
// The resulting buffer should be on-heap.
CHECK_NULL(buffer->backing_store());
buffer = GetBufferFromTypedArray(CompileRun("y"));
CHECK_NULL(buffer->backing_store());
buffer = GetBufferFromTypedArray(CompileRun("z"));
// The resulting buffer should be off-heap.
CHECK_NOT_NULL(buffer->backing_store());
}
isolate->Dispose();
}
TEST(CustomSnapshotDataBlob2) {
DisableAlwaysOpt();
const char* source2 =
@ -2061,33 +2310,6 @@ TEST(SnapshotCreatorUnknownExternalReferences) {
delete[] blob.data;
}
struct InternalFieldData {
uint32_t data;
};
v8::StartupData SerializeInternalFields(v8::Local<v8::Object> holder, int index,
void* data) {
CHECK_EQ(reinterpret_cast<void*>(2016), data);
InternalFieldData* embedder_field = static_cast<InternalFieldData*>(
holder->GetAlignedPointerFromInternalField(index));
int size = sizeof(*embedder_field);
char* payload = new char[size];
// We simply use memcpy to serialize the content.
memcpy(payload, embedder_field, size);
return {payload, size};
}
std::vector<InternalFieldData*> deserialized_data;
void DeserializeInternalFields(v8::Local<v8::Object> holder, int index,
v8::StartupData payload, void* data) {
CHECK_EQ(reinterpret_cast<void*>(2017), data);
InternalFieldData* embedder_field = new InternalFieldData{0};
memcpy(embedder_field, payload.data, payload.raw_size);
holder->SetAlignedPointerInInternalField(index, embedder_field);
deserialized_data.push_back(embedder_field);
}
TEST(SnapshotCreatorTemplates) {
DisableAlwaysOpt();
v8::StartupData blob;