[turbofan] Introduce snapshot for serialized builtins

This change adds an infrastructure to "snapshot" data that is being
serialized only once. This data lives in its own per-isolate zone, wrapped
in a new CompilerData class.

This change reduces the "serialize standard objects" on TypeScript
benchmark from ~69ms to ~30ms (more than 50% improvement).

Bug: v8:7790
Change-Id: I6ce4f6fb993334969662fdd993d681945a9f3727
Reviewed-on: https://chromium-review.googlesource.com/1238920
Commit-Queue: Maya Lekova <mslekova@chromium.org>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Reviewed-by: Georg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56309}
This commit is contained in:
Maya Lekova 2018-10-01 10:55:07 +02:00 committed by Commit Bot
parent ac972c586e
commit 8724b8d4fd
8 changed files with 291 additions and 55 deletions

View File

@ -1837,6 +1837,7 @@ v8_source_set("v8_base") {
"src/compiler/operator.h", "src/compiler/operator.h",
"src/compiler/osr.cc", "src/compiler/osr.cc",
"src/compiler/osr.h", "src/compiler/osr.h",
"src/compiler/per-isolate-compiler-cache.h",
"src/compiler/persistent-map.h", "src/compiler/persistent-map.h",
"src/compiler/pipeline-statistics.cc", "src/compiler/pipeline-statistics.cc",
"src/compiler/pipeline-statistics.h", "src/compiler/pipeline-statistics.h",
@ -1848,6 +1849,8 @@ v8_source_set("v8_base") {
"src/compiler/raw-machine-assembler.h", "src/compiler/raw-machine-assembler.h",
"src/compiler/redundancy-elimination.cc", "src/compiler/redundancy-elimination.cc",
"src/compiler/redundancy-elimination.h", "src/compiler/redundancy-elimination.h",
"src/compiler/refs-map.cc",
"src/compiler/refs-map.h",
"src/compiler/register-allocator-verifier.cc", "src/compiler/register-allocator-verifier.cc",
"src/compiler/register-allocator-verifier.h", "src/compiler/register-allocator-verifier.h",
"src/compiler/register-allocator.cc", "src/compiler/register-allocator.cc",

View File

@ -8,6 +8,7 @@
#include "src/boxed-float.h" #include "src/boxed-float.h"
#include "src/code-factory.h" #include "src/code-factory.h"
#include "src/compiler/graph-reducer.h" #include "src/compiler/graph-reducer.h"
#include "src/compiler/per-isolate-compiler-cache.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
#include "src/objects/js-array-inl.h" #include "src/objects/js-array-inl.h"
#include "src/objects/js-regexp-inl.h" #include "src/objects/js-regexp-inl.h"
@ -22,9 +23,6 @@ namespace compiler {
HEAP_BROKER_OBJECT_LIST(FORWARD_DECL) HEAP_BROKER_OBJECT_LIST(FORWARD_DECL)
#undef FORWARD_DECL #undef FORWARD_DECL
// TODO(neis): It would be nice to share the serialized data for read-only
// objects.
// There are three kinds of ObjectData values. // There are three kinds of ObjectData values.
// //
// kSmi: The underlying V8 object is a Smi and the data is an instance of the // kSmi: The underlying V8 object is a Smi and the data is an instance of the
@ -47,6 +45,8 @@ class ObjectData : public ZoneObject {
ObjectData(JSHeapBroker* broker, ObjectData** storage, Handle<Object> object, ObjectData(JSHeapBroker* broker, ObjectData** storage, Handle<Object> object,
ObjectDataKind kind) ObjectDataKind kind)
: object_(object), kind_(kind) { : object_(object), kind_(kind) {
// This assignment ensures we don't end up inserting the same object
// in an endless recursion.
*storage = this; *storage = this;
broker->Trace("Creating data %p for handle %" V8PRIuPTR " (", this, broker->Trace("Creating data %p for handle %" V8PRIuPTR " (", this,
@ -1332,8 +1332,17 @@ ObjectRef ContextRef::get(int index) const {
return ObjectRef(broker(), value); return ObjectRef(broker(), value);
} }
JSHeapBroker::JSHeapBroker(Isolate* isolate, Zone* zone) JSHeapBroker::JSHeapBroker(Isolate* isolate, Zone* broker_zone)
: isolate_(isolate), zone_(zone), refs_(zone, kInitialRefsBucketCount) { : isolate_(isolate),
broker_zone_(broker_zone),
current_zone_(broker_zone),
refs_(new (zone())
RefsMap(kMinimalRefsBucketCount, AddressMatcher(), zone())) {
// Note that this initialization of the refs_ pointer with the minimal
// initial capacity is redundant in the normal use case (concurrent
// compilation enabled, standard objects to be serialized), as the map
// is going to be replaced immediatelly with a larger capacity one.
// It doesn't seem to affect the performance in a noticeable way though.
Trace("Constructing heap broker.\n"); Trace("Constructing heap broker.\n");
} }
@ -1352,8 +1361,7 @@ void JSHeapBroker::StartSerializing() {
CHECK_EQ(mode_, kDisabled); CHECK_EQ(mode_, kDisabled);
Trace("Starting serialization.\n"); Trace("Starting serialization.\n");
mode_ = kSerializing; mode_ = kSerializing;
refs_.clear(); refs_->Clear();
SetNativeContextRef();
} }
void JSHeapBroker::StopSerializing() { void JSHeapBroker::StopSerializing() {
@ -1377,15 +1385,87 @@ void JSHeapBroker::SetNativeContextRef() {
native_context_ = NativeContextRef(this, isolate()->native_context()); native_context_ = NativeContextRef(this, isolate()->native_context());
} }
bool IsShareable(Handle<Object> object, Isolate* isolate) {
Builtins* const b = isolate->builtins();
int index;
RootIndex root_index;
return (object->IsHeapObject() &&
b->IsBuiltinHandle(Handle<HeapObject>::cast(object), &index)) ||
isolate->heap()->IsRootHandle(object, &root_index);
}
void JSHeapBroker::SerializeShareableObjects() {
PerIsolateCompilerCache::Setup(isolate());
compiler_cache_ = isolate()->compiler_cache();
if (compiler_cache_->HasSnapshot()) {
RefsMap* snapshot = compiler_cache_->GetSnapshot();
refs_ = new (zone()) RefsMap(snapshot, zone());
return;
}
TraceScope tracer(
this, "JSHeapBroker::SerializeShareableObjects (building snapshot)");
refs_ =
new (zone()) RefsMap(kInitialRefsBucketCount, AddressMatcher(), zone());
current_zone_ = compiler_cache_->zone();
Builtins* const b = isolate()->builtins();
{
Builtins::Name builtins[] = {
Builtins::kAllocateInNewSpace,
Builtins::kAllocateInOldSpace,
Builtins::kArgumentsAdaptorTrampoline,
Builtins::kArrayConstructorImpl,
Builtins::kCallFunctionForwardVarargs,
Builtins::kCallFunction_ReceiverIsAny,
Builtins::kCallFunction_ReceiverIsNotNullOrUndefined,
Builtins::kCallFunction_ReceiverIsNullOrUndefined,
Builtins::kConstructFunctionForwardVarargs,
Builtins::kForInFilter,
Builtins::kJSBuiltinsConstructStub,
Builtins::kJSConstructStubGeneric,
Builtins::kStringAdd_CheckNone,
Builtins::kStringAdd_ConvertLeft,
Builtins::kStringAdd_ConvertRight,
Builtins::kToNumber,
Builtins::kToObject,
};
for (auto id : builtins) {
GetOrCreateData(b->builtin_handle(id));
}
}
for (int32_t id = 0; id < Builtins::builtin_count; ++id) {
if (Builtins::KindOf(id) == Builtins::TFJ) {
GetOrCreateData(b->builtin_handle(id));
}
}
for (RefsMap::Entry* p = refs_->Start(); p != nullptr; p = refs_->Next(p)) {
CHECK(IsShareable(p->value->object(), isolate()));
}
// TODO(mslekova):
// Serialize root objects (from factory).
compiler_cache()->SetSnapshot(refs_);
current_zone_ = broker_zone_;
}
void JSHeapBroker::SerializeStandardObjects() { void JSHeapBroker::SerializeStandardObjects() {
if (mode() == kDisabled) return; if (mode() == kDisabled) return;
CHECK_EQ(mode(), kSerializing); CHECK_EQ(mode(), kSerializing);
SerializeShareableObjects();
TraceScope tracer(this, "JSHeapBroker::SerializeStandardObjects"); TraceScope tracer(this, "JSHeapBroker::SerializeStandardObjects");
SetNativeContextRef();
native_context().Serialize(); native_context().Serialize();
Builtins* const b = isolate()->builtins();
Factory* const f = isolate()->factory(); Factory* const f = isolate()->factory();
// Maps, strings, oddballs // Maps, strings, oddballs
@ -1461,37 +1541,6 @@ void JSHeapBroker::SerializeStandardObjects() {
->AsPropertyCell() ->AsPropertyCell()
->Serialize(this); ->Serialize(this);
// Builtins
{
Builtins::Name builtins[] = {
Builtins::kAllocateInNewSpace,
Builtins::kAllocateInOldSpace,
Builtins::kArgumentsAdaptorTrampoline,
Builtins::kArrayConstructorImpl,
Builtins::kCallFunctionForwardVarargs,
Builtins::kCallFunction_ReceiverIsAny,
Builtins::kCallFunction_ReceiverIsNotNullOrUndefined,
Builtins::kCallFunction_ReceiverIsNullOrUndefined,
Builtins::kConstructFunctionForwardVarargs,
Builtins::kForInFilter,
Builtins::kJSBuiltinsConstructStub,
Builtins::kJSConstructStubGeneric,
Builtins::kStringAdd_CheckNone,
Builtins::kStringAdd_ConvertLeft,
Builtins::kStringAdd_ConvertRight,
Builtins::kToNumber,
Builtins::kToObject,
};
for (auto id : builtins) {
GetOrCreateData(b->builtin_handle(id));
}
}
for (int32_t id = 0; id < Builtins::builtin_count; ++id) {
if (Builtins::KindOf(id) == Builtins::TFJ) {
GetOrCreateData(b->builtin_handle(id));
}
}
// CEntry stub // CEntry stub
GetOrCreateData( GetOrCreateData(
CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs, kArgvOnStack, true)); CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs, kArgvOnStack, true));
@ -1500,16 +1549,16 @@ void JSHeapBroker::SerializeStandardObjects() {
} }
ObjectData* JSHeapBroker::GetData(Handle<Object> object) const { ObjectData* JSHeapBroker::GetData(Handle<Object> object) const {
auto it = refs_.find(object.address()); RefsMap::Entry* entry = refs_->Lookup(object.address());
return it != refs_.end() ? it->second : nullptr; return entry ? entry->value : nullptr;
} }
// clang-format off // clang-format off
ObjectData* JSHeapBroker::GetOrCreateData(Handle<Object> object) { ObjectData* JSHeapBroker::GetOrCreateData(Handle<Object> object) {
CHECK(SerializingAllowed()); CHECK(SerializingAllowed());
auto insertion_result = refs_.insert({object.address(), nullptr}); RefsMap::Entry* entry = refs_->LookupOrInsert(object.address(), zone());
ObjectData** data_storage = &(insertion_result.first->second); ObjectData** data_storage = &(entry->value);
if (insertion_result.second) { if (*data_storage == nullptr) {
// TODO(neis): Remove these Allow* once we serialize everything upfront. // TODO(neis): Remove these Allow* once we serialize everything upfront.
AllowHandleAllocation handle_allocation; AllowHandleAllocation handle_allocation;
AllowHandleDereference handle_dereference; AllowHandleDereference handle_dereference;
@ -2072,15 +2121,16 @@ ObjectRef::ObjectRef(JSHeapBroker* broker, Handle<Object> object)
data_ = broker->GetOrCreateData(object); data_ = broker->GetOrCreateData(object);
break; break;
case JSHeapBroker::kDisabled: { case JSHeapBroker::kDisabled: {
auto insertion_result = broker->refs_.insert({object.address(), nullptr}); RefsMap::Entry* entry =
ObjectData** data_storage = &(insertion_result.first->second); broker->refs_->LookupOrInsert(object.address(), broker->zone());
if (insertion_result.second) { ObjectData** storage = &(entry->value);
if (*storage == nullptr) {
AllowHandleDereference handle_dereference; AllowHandleDereference handle_dereference;
new (broker->zone()) entry->value = new (broker->zone())
ObjectData(broker, data_storage, object, ObjectData(broker, storage, object,
object->IsSmi() ? kSmi : kUnserializedHeapObject); object->IsSmi() ? kSmi : kUnserializedHeapObject);
} }
data_ = *data_storage; data_ = *storage;
break; break;
} }
case JSHeapBroker::kRetired: case JSHeapBroker::kRetired:

View File

@ -7,6 +7,7 @@
#include "src/base/compiler-specific.h" #include "src/base/compiler-specific.h"
#include "src/base/optional.h" #include "src/base/optional.h"
#include "src/compiler/refs-map.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/objects.h" #include "src/objects.h"
#include "src/objects/builtin-function-id.h" #include "src/objects/builtin-function-id.h"
@ -473,15 +474,18 @@ class InternalizedStringRef : public StringRef {
using StringRef::StringRef; using StringRef::StringRef;
}; };
class PerIsolateCompilerCache;
class V8_EXPORT_PRIVATE JSHeapBroker : public NON_EXPORTED_BASE(ZoneObject) { class V8_EXPORT_PRIVATE JSHeapBroker : public NON_EXPORTED_BASE(ZoneObject) {
public: public:
JSHeapBroker(Isolate* isolate, Zone* zone); JSHeapBroker(Isolate* isolate, Zone* broker_zone);
void SetNativeContextRef(); void SetNativeContextRef();
void SerializeStandardObjects(); void SerializeStandardObjects();
Isolate* isolate() const { return isolate_; } Isolate* isolate() const { return isolate_; }
Zone* zone() const { return zone_; } Zone* zone() const { return current_zone_; }
NativeContextRef native_context() const { return native_context_.value(); } NativeContextRef native_context() const { return native_context_.value(); }
PerIsolateCompilerCache* compiler_cache() const { return compiler_cache_; }
enum BrokerMode { kDisabled, kSerializing, kSerialized, kRetired }; enum BrokerMode { kDisabled, kSerializing, kSerialized, kRetired };
BrokerMode mode() const { return mode_; } BrokerMode mode() const { return mode_; }
@ -506,15 +510,20 @@ class V8_EXPORT_PRIVATE JSHeapBroker : public NON_EXPORTED_BASE(ZoneObject) {
friend class ObjectRef; friend class ObjectRef;
friend class ObjectData; friend class ObjectData;
void SerializeShareableObjects();
Isolate* const isolate_; Isolate* const isolate_;
Zone* const zone_; Zone* const broker_zone_;
Zone* current_zone_;
base::Optional<NativeContextRef> native_context_; base::Optional<NativeContextRef> native_context_;
ZoneUnorderedMap<Address, ObjectData*> refs_; RefsMap* refs_;
BrokerMode mode_ = kDisabled; BrokerMode mode_ = kDisabled;
unsigned tracing_indentation_ = 0; unsigned tracing_indentation_ = 0;
PerIsolateCompilerCache* compiler_cache_;
static const size_t kInitialRefsBucketCount = 1000; static const size_t kMinimalRefsBucketCount = 8; // must be power of 2
static const size_t kInitialRefsBucketCount = 1024; // must be power of 2
}; };
#define ASSIGN_RETURN_NO_CHANGE_IF_DATA_MISSING(something_var, \ #define ASSIGN_RETURN_NO_CHANGE_IF_DATA_MISSING(something_var, \

View File

@ -0,0 +1,64 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_PER_ISOLATE_COMPILER_CACHE_H_
#define V8_COMPILER_PER_ISOLATE_COMPILER_CACHE_H_
#include "src/compiler/refs-map.h"
#include "src/isolate.h"
#include "src/zone/zone-containers.h"
namespace v8 {
namespace internal {
class Isolate;
class Zone;
namespace compiler {
class ObjectData;
// This class serves as a per-isolate container of data that should be
// persisted between compiler runs. For now it stores the code builtins
// so they are not serialized on each compiler run.
class PerIsolateCompilerCache : public ZoneObject {
public:
explicit PerIsolateCompilerCache(Zone* zone)
: zone_(zone), refs_snapshot_(nullptr) {}
RefsMap* GetSnapshot() { return refs_snapshot_; }
void SetSnapshot(RefsMap* refs) {
DCHECK_NULL(refs_snapshot_);
DCHECK(!refs->IsEmpty());
refs_snapshot_ = new (zone_) RefsMap(refs, zone_);
}
bool HasSnapshot() const { return refs_snapshot_; }
Zone* zone() const { return zone_; }
static void Setup(Isolate* isolate) {
if (isolate->compiler_cache()) return;
// The following zone is supposed to contain compiler-related objects
// that should live through all compilations, as opposed to the
// broker_zone which holds per-compilation data. It's not meant for
// per-compilation or heap broker data.
Zone* compiler_zone = new Zone(isolate->allocator(), "Compiler zone");
PerIsolateCompilerCache* compiler_cache =
new (compiler_zone) PerIsolateCompilerCache(compiler_zone);
isolate->set_compiler_utils(compiler_cache, compiler_zone);
}
private:
Zone* const zone_;
RefsMap* refs_snapshot_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_PER_ISOLATE_COMPILER_CACHE_H_

35
src/compiler/refs-map.cc Normal file
View File

@ -0,0 +1,35 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/refs-map.h"
namespace v8 {
namespace internal {
namespace compiler {
using UnderlyingMap =
base::TemplateHashMapImpl<Address, ObjectData*, AddressMatcher,
ZoneAllocationPolicy>;
RefsMap::RefsMap(uint32_t capacity, AddressMatcher match, Zone* zone)
: UnderlyingMap(capacity, match, ZoneAllocationPolicy(zone)) {}
RefsMap::RefsMap(const RefsMap* other, Zone* zone)
: UnderlyingMap(other, ZoneAllocationPolicy(zone)) {}
RefsMap::Entry* RefsMap::Lookup(const Address& key) const {
return UnderlyingMap::Lookup(key, Hash(key));
}
RefsMap::Entry* RefsMap::LookupOrInsert(const Address& key, Zone* zone) {
return UnderlyingMap::LookupOrInsert(key, RefsMap::Hash(key),
[]() { return nullptr; },
ZoneAllocationPolicy(zone));
}
uint32_t RefsMap::Hash(Address addr) { return static_cast<uint32_t>(addr); }
} // namespace compiler
} // namespace internal
} // namespace v8

54
src/compiler/refs-map.h Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_REFS_MAP_H_
#define V8_COMPILER_REFS_MAP_H_
#include "src/base/hashmap.h"
#include "src/globals.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
namespace compiler {
class ObjectData;
class AddressMatcher : public base::KeyEqualityMatcher<Address> {
public:
bool operator()(uint32_t hash1, uint32_t hash2, const Address& key1,
const Address& key2) const {
return key1 == key2;
}
};
// This class employs our own implementation of hash map for the purpose of
// storing the mapping between canonical Addresses and allocated ObjectData.
// It's used as the refs map in JSHeapBroker and as the snapshot in
// PerIsolateCompilerCache, as we need a cheap copy between the two and
// std::unordered_map doesn't satisfy this requirement, as it rehashes the
// whole map and copies all entries one by one.
class RefsMap
: public base::TemplateHashMapImpl<Address, ObjectData*, AddressMatcher,
ZoneAllocationPolicy>,
public ZoneObject {
public:
RefsMap(uint32_t capacity, AddressMatcher match, Zone* zone);
RefsMap(const RefsMap* other, Zone* zone);
bool IsEmpty() const { return occupancy() == 0; }
// Wrappers around methods from UnderlyingMap
Entry* Lookup(const Address& key) const;
Entry* LookupOrInsert(const Address& key, Zone* zone);
private:
static uint32_t Hash(Address addr);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_REFS_MAP_H_

View File

@ -2688,6 +2688,10 @@ void Isolate::Deinit() {
delete root_index_map_; delete root_index_map_;
root_index_map_ = nullptr; root_index_map_ = nullptr;
delete compiler_zone_;
compiler_zone_ = nullptr;
compiler_cache_ = nullptr;
ClearSerializerData(); ClearSerializerData();
} }
@ -2963,6 +2967,7 @@ bool Isolate::Init(StartupDeserializer* des) {
date_cache_ = new DateCache(); date_cache_ = new DateCache();
heap_profiler_ = new HeapProfiler(heap()); heap_profiler_ = new HeapProfiler(heap());
interpreter_ = new interpreter::Interpreter(this); interpreter_ = new interpreter::Interpreter(this);
compiler_dispatcher_ = compiler_dispatcher_ =
new CompilerDispatcher(this, V8::GetCurrentPlatform(), FLAG_stack_size); new CompilerDispatcher(this, V8::GetCurrentPlatform(), FLAG_stack_size);

View File

@ -107,6 +107,10 @@ namespace interpreter {
class Interpreter; class Interpreter;
} }
namespace compiler {
class PerIsolateCompilerCache;
}
namespace wasm { namespace wasm {
class WasmEngine; class WasmEngine;
} }
@ -1451,6 +1455,15 @@ class Isolate : private HiddenFactory {
interpreter::Interpreter* interpreter() const { return interpreter_; } interpreter::Interpreter* interpreter() const { return interpreter_; }
compiler::PerIsolateCompilerCache* compiler_cache() const {
return compiler_cache_;
}
void set_compiler_utils(compiler::PerIsolateCompilerCache* cache,
Zone* zone) {
compiler_cache_ = cache;
compiler_zone_ = zone;
}
AccountingAllocator* allocator() { return allocator_; } AccountingAllocator* allocator() { return allocator_; }
CompilerDispatcher* compiler_dispatcher() const { CompilerDispatcher* compiler_dispatcher() const {
@ -1742,6 +1755,9 @@ class Isolate : private HiddenFactory {
interpreter::Interpreter* interpreter_; interpreter::Interpreter* interpreter_;
compiler::PerIsolateCompilerCache* compiler_cache_ = nullptr;
Zone* compiler_zone_ = nullptr;
CompilerDispatcher* compiler_dispatcher_; CompilerDispatcher* compiler_dispatcher_;
typedef std::pair<InterruptCallback, void*> InterruptEntry; typedef std::pair<InterruptCallback, void*> InterruptEntry;