[ptr-compr] Introduce IsolateData class

... containing RootsTable, ExternalReferenceTable, builtins array and
potentially some other data that can be accessed via the RootRegister.

This is a preliminary step before adding support for pointer-compression
friendly heap layout.

Bug: v8:8182
Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
Change-Id: I2899f657aaff1351a5304afa0b1a4c5ae4cfc31d
Reviewed-on: https://chromium-review.googlesource.com/c/1245426
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Ben Titzer <titzer@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56551}
This commit is contained in:
Igor Sheludko 2018-10-10 17:25:33 +02:00 committed by Commit Bot
parent 812e768cbe
commit b929b52e36
30 changed files with 253 additions and 180 deletions

View File

@ -2132,6 +2132,7 @@ v8_source_set("v8_base") {
"src/interpreter/interpreter.h",
"src/intl.cc",
"src/intl.h",
"src/isolate-data.h",
"src/isolate-inl.h",
"src/isolate.cc",
"src/isolate.h",

View File

@ -29,7 +29,7 @@ class WasmBuiltinsAssembler : public CodeStubAssembler {
kHeapObjectTag)));
TNode<Code> target = UncheckedCast<Code>(Load(
MachineType::TaggedPointer(), roots,
IntPtrConstant(Heap::roots_to_builtins_offset() + id * kPointerSize)));
IntPtrConstant(IsolateData::kBuiltinsTableOffset + id * kPointerSize)));
return target;
}

View File

@ -26,7 +26,7 @@ uint32_t BuiltinsConstantsTableBuilder::AddObject(Handle<Object> object) {
// Roots must not be inserted into the constants table as they are already
// accessibly from the root list.
RootIndex root_list_index;
DCHECK(!isolate_->heap()->IsRootHandle(object, &root_list_index));
DCHECK(!isolate_->roots_table().IsRootHandle(object, &root_list_index));
// Not yet finalized.
DCHECK_EQ(ReadOnlyRoots(isolate_).empty_fixed_array(),
@ -56,7 +56,7 @@ void BuiltinsConstantsTableBuilder::PatchSelfReference(
// Roots must not be inserted into the constants table as they are already
// accessibly from the root list.
RootIndex root_list_index;
DCHECK(!isolate_->heap()->IsRootHandle(code_object, &root_list_index));
DCHECK(!isolate_->roots_table().IsRootHandle(code_object, &root_list_index));
// Not yet finalized.
DCHECK_EQ(ReadOnlyRoots(isolate_).empty_fixed_array(),

View File

@ -459,8 +459,7 @@ bool CodeGenerator::IsMaterializableFromRoot(Handle<HeapObject> object,
const CallDescriptor* incoming_descriptor =
linkage()->GetIncomingDescriptor();
if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
Heap* heap = isolate()->heap();
return heap->IsRootHandle(object, index_return) &&
return isolate()->roots_table().IsRootHandle(object, index_return) &&
RootsTable::IsImmortalImmovable(*index_return);
}
return false;

View File

@ -472,7 +472,7 @@ InstructionOperand OperandForDeopt(Isolate* isolate, OperandGenerator* g,
Handle<HeapObject> constant = HeapConstantOf(input->op());
RootIndex root_index;
if (isolate->heap()->IsRootHandle(constant, &root_index) &&
if (isolate->roots_table().IsRootHandle(constant, &root_index) &&
root_index == RootIndex::kOptimizedOut) {
// For an optimized-out object we return an invalid instruction
// operand, so that we take the fast path for optimized-out values.

View File

@ -1393,7 +1393,7 @@ bool IsShareable(Handle<Object> object, Isolate* isolate) {
RootIndex root_index;
return (object->IsHeapObject() &&
b->IsBuiltinHandle(Handle<HeapObject>::cast(object), &index)) ||
isolate->heap()->IsRootHandle(object, &root_index);
isolate->roots_table().IsRootHandle(object, &root_index);
}
void JSHeapBroker::SerializeShareableObjects() {

View File

@ -1220,9 +1220,9 @@ class RepresentationSelector {
}
if (value_type.IsHeapConstant()) {
RootIndex root_index;
Heap* heap = jsgraph_->isolate()->heap();
if (heap->IsRootHandle(value_type.AsHeapConstant()->Value(),
&root_index)) {
const RootsTable& roots_table = jsgraph_->isolate()->roots_table();
if (roots_table.IsRootHandle(value_type.AsHeapConstant()->Value(),
&root_index)) {
if (RootsTable::IsImmortalImmovable(root_index)) {
// Write barriers are unnecessary for immortal immovable roots.
return kNoWriteBarrier;

View File

@ -2996,7 +2996,7 @@ Node* WasmGraphBuilder::BuildLoadBuiltinFromInstance(int builtin_index) {
Node* roots =
LOAD_INSTANCE_FIELD(RootsArrayAddress, MachineType::TaggedPointer());
return LOAD_TAGGED_POINTER(
roots, Heap::roots_to_builtins_offset() + builtin_index * kPointerSize);
roots, IsolateData::kBuiltinsTableOffset + builtin_index * kPointerSize);
}
// Only call this function for code which is not reused across instantiations,

View File

@ -1720,11 +1720,11 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
FlagsContinuation* cont) {
X64OperandGenerator g(selector);
if (selector->CanUseRootsRegister()) {
Heap* const heap = selector->isolate()->heap();
const RootsTable& roots_table = selector->isolate()->roots_table();
RootIndex root_index;
HeapObjectBinopMatcher m(node);
if (m.right().HasValue() &&
heap->IsRootHandle(m.right().Value(), &root_index)) {
roots_table.IsRootHandle(m.right().Value(), &root_index)) {
if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
InstructionCode opcode =
kX64Cmp | AddressingModeField::encode(kMode_Root);
@ -1733,7 +1733,7 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
g.TempImmediate(TurboAssemblerBase::RootRegisterOffset(root_index)),
g.UseRegister(m.left().node()), cont);
} else if (m.left().HasValue() &&
heap->IsRootHandle(m.left().Value(), &root_index)) {
roots_table.IsRootHandle(m.left().Value(), &root_index)) {
InstructionCode opcode =
kX64Cmp | AddressingModeField::encode(kMode_Root);
return VisitCompare(

View File

@ -16,6 +16,7 @@
#include "src/disasm.h"
#include "src/ic/ic.h"
#include "src/instruction-stream.h"
#include "src/isolate-data.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/snapshot/serializer-common.h"
@ -55,13 +56,13 @@ class V8NameConverter: public disasm::NameConverter {
void V8NameConverter::InitExternalRefsCache() const {
ExternalReferenceTable* external_reference_table =
isolate_->heap()->external_reference_table();
isolate_->external_reference_table();
if (!external_reference_table->is_initialized()) return;
base::AddressRegion addressable_region =
isolate_->root_register_addressable_region();
Address roots_start =
reinterpret_cast<Address>(isolate_->heap()->roots_array_start());
reinterpret_cast<Address>(isolate_->roots_array_start());
for (uint32_t i = 0; i < external_reference_table->size(); i++) {
Address address = external_reference_table->address(i);
@ -116,13 +117,12 @@ const char* V8NameConverter::NameInCode(byte* addr) const {
const char* V8NameConverter::RootRelativeName(int offset) const {
if (isolate_ == nullptr) return nullptr;
const int kRootsStart = 0;
const int kRootsEnd = Heap::roots_to_external_reference_table_offset();
const int kExtRefsStart = kRootsEnd;
const int kExtRefsEnd = Heap::roots_to_builtins_offset();
const int kBuiltinsStart = kExtRefsEnd;
const int kBuiltinsEnd =
kBuiltinsStart + Builtins::builtin_count * kPointerSize;
const int kRootsStart = IsolateData::kRootsTableOffset;
const int kRootsEnd = IsolateData::kRootsTableEndOffset;
const int kExtRefsStart = IsolateData::kExternalReferenceTableOffset;
const int kExtRefsEnd = IsolateData::kExternalReferenceTableEndOffset;
const int kBuiltinsStart = IsolateData::kBuiltinsTableOffset;
const int kBuiltinsEnd = IsolateData::kBuiltinsTableEndOffset;
if (kRootsStart <= offset && offset < kRootsEnd) {
uint32_t offset_in_roots_table = offset - kRootsStart;
@ -150,12 +150,12 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
}
// Likewise if the external reference table is uninitialized.
if (!isolate_->heap()->external_reference_table()->is_initialized()) {
if (!isolate_->external_reference_table()->is_initialized()) {
return nullptr;
}
SNPrintF(v8_buffer_, "external reference (%s)",
isolate_->heap()->external_reference_table()->NameFromOffset(
isolate_->external_reference_table()->NameFromOffset(
offset_in_extref_table));
return v8_buffer_.start();

View File

@ -384,7 +384,7 @@ ExternalReference ExternalReference::log_leave_external_function() {
}
ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
return ExternalReference(isolate->heap()->roots_array_start());
return ExternalReference(isolate->roots_array_start());
}
ExternalReference ExternalReference::allocation_sites_list_address(

View File

@ -29,9 +29,8 @@ bool HandleBase::IsDereferenceAllowed(DereferenceCheckMode mode) const {
HeapObject* heap_object = HeapObject::cast(object);
Isolate* isolate;
if (!Isolate::FromWritableHeapObject(heap_object, &isolate)) return true;
Heap* heap = isolate->heap();
RootIndex root_index;
if (heap->IsRootHandleLocation(location_, &root_index) &&
if (isolate->roots_table().IsRootHandleLocation(location_, &root_index) &&
RootsTable::IsImmortalImmovable(root_index)) {
return true;
}

View File

@ -16,10 +16,10 @@
namespace v8 {
namespace internal {
#define ROOT_ACCESSOR(type, name, CamelName) \
Handle<type> Factory::name() { \
return Handle<type>(bit_cast<type**>( \
&isolate()->heap()->roots_[RootIndex::k##CamelName])); \
#define ROOT_ACCESSOR(type, name, CamelName) \
Handle<type> Factory::name() { \
return Handle<type>( \
bit_cast<type**>(&isolate()->roots_table()[RootIndex::k##CamelName])); \
}
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR

View File

@ -773,9 +773,8 @@ Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString(
int length, uint32_t hash_field) {
CHECK_GE(String::kMaxLength, length);
// The canonical empty_string is the only zero-length string we allow.
DCHECK_IMPLIES(
length == 0,
isolate()->heap()->roots_[RootIndex::kempty_string] == nullptr);
DCHECK_IMPLIES(length == 0,
isolate()->roots_table()[RootIndex::kempty_string] == nullptr);
Map* map = *one_byte_internalized_string_map();
int size = SeqOneByteString::SizeFor(length);

View File

@ -55,8 +55,10 @@ HeapObject* AllocationResult::ToObjectChecked() {
return HeapObject::cast(object_);
}
#define ROOT_ACCESSOR(type, name, CamelName) \
type* Heap::name() { return type::cast(roots_[RootIndex::k##CamelName]); }
#define ROOT_ACCESSOR(type, name, CamelName) \
type* Heap::name() { \
return type::cast(roots_table()[RootIndex::k##CamelName]); \
}
MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
@ -68,11 +70,35 @@ MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
!RootsTable::IsImmortalImmovable(RootIndex::k##CamelName)); \
DCHECK_IMPLIES(RootsTable::IsImmortalImmovable(RootIndex::k##CamelName), \
IsImmovable(HeapObject::cast(value))); \
roots_[RootIndex::k##CamelName] = value; \
roots_table()[RootIndex::k##CamelName] = value; \
}
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
void Heap::SetRootCodeStubs(SimpleNumberDictionary* value) {
roots_table()[RootIndex::kCodeStubs] = value;
}
void Heap::SetRootMaterializedObjects(FixedArray* objects) {
roots_table()[RootIndex::kMaterializedObjects] = objects;
}
void Heap::SetRootScriptList(Object* value) {
roots_table()[RootIndex::kScriptList] = value;
}
void Heap::SetRootStringTable(StringTable* value) {
roots_table()[RootIndex::kStringTable] = value;
}
void Heap::SetRootNoScriptSharedFunctionInfos(Object* value) {
roots_table()[RootIndex::kNoScriptSharedFunctionInfos] = value;
}
void Heap::SetMessageListeners(TemplateList* value) {
roots_table()[RootIndex::kMessageListeners] = value;
}
PagedSpace* Heap::paged_space(int idx) {
DCHECK_NE(idx, LO_SPACE);
DCHECK_NE(idx, NEW_SPACE);

View File

@ -605,10 +605,6 @@ const char* Heap::GetSpaceName(int idx) {
return nullptr;
}
void Heap::SetRootCodeStubs(SimpleNumberDictionary* value) {
roots_[RootIndex::kCodeStubs] = value;
}
void Heap::RepairFreeListsAfterDeserialization() {
PagedSpaces spaces(this);
for (PagedSpace* space = spaces.next(); space != nullptr;
@ -3645,12 +3641,12 @@ Code* Heap::builtin(int index) {
DCHECK(Builtins::IsBuiltinId(index));
// Code::cast cannot be used here since we access builtins
// during the marking phase of mark sweep. See IC::Clear.
return reinterpret_cast<Code*>(builtins_[index]);
return reinterpret_cast<Code*>(builtins_table()[index]);
}
Address Heap::builtin_address(int index) {
DCHECK(Builtins::IsBuiltinId(index) || index == Builtins::builtin_count);
return reinterpret_cast<Address>(&builtins_[index]);
return reinterpret_cast<Address>(&builtins_table()[index]);
}
void Heap::set_builtin(int index, HeapObject* builtin) {
@ -3658,7 +3654,7 @@ void Heap::set_builtin(int index, HeapObject* builtin) {
DCHECK(Internals::HasHeapObjectTag(builtin));
// The given builtin may be completely uninitialized thus we cannot check its
// type here.
builtins_[index] = builtin;
builtins_table()[index] = builtin;
}
void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
@ -3671,7 +3667,7 @@ void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
mode == VISIT_ALL_IN_MINOR_MC_MARK ||
mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
v->VisitRootPointer(Root::kStringTable, nullptr,
&roots_[RootIndex::kStringTable]);
&roots_table()[RootIndex::kStringTable]);
v->Synchronize(VisitorSynchronization::kStringTable);
if (!isMinorGC && mode != VISIT_ALL_IN_SWEEP_NEWSPACE &&
mode != VISIT_FOR_SERIALIZATION) {
@ -3686,8 +3682,9 @@ void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
void Heap::IterateSmiRoots(RootVisitor* v) {
// Acquire execution access since we are going to read stack limit values.
ExecutionAccess access(isolate());
v->VisitRootPointers(Root::kSmiRootList, nullptr, roots_.smi_roots_begin(),
roots_.smi_roots_end());
v->VisitRootPointers(Root::kSmiRootList, nullptr,
roots_table().smi_roots_begin(),
roots_table().smi_roots_end());
v->Synchronize(VisitorSynchronization::kSmiRootList);
}
@ -3747,10 +3744,10 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
// Garbage collection can skip over the read-only roots.
const bool isGC = mode != VISIT_ALL && mode != VISIT_FOR_SERIALIZATION &&
mode != VISIT_ONLY_STRONG_FOR_SERIALIZATION;
Object** start =
isGC ? roots_.read_only_roots_end() : roots_.strong_roots_begin();
Object** start = isGC ? roots_table().read_only_roots_end()
: roots_table().strong_roots_begin();
v->VisitRootPointers(Root::kStrongRootList, nullptr, start,
roots_.strong_roots_end());
roots_table().strong_roots_end());
v->Synchronize(VisitorSynchronization::kStrongRootList);
isolate_->bootstrapper()->Iterate(v);
@ -3844,7 +3841,8 @@ void Heap::IterateWeakGlobalHandles(RootVisitor* v) {
void Heap::IterateBuiltins(RootVisitor* v) {
for (int i = 0; i < Builtins::builtin_count; i++) {
v->VisitRootPointer(Root::kBuiltins, Builtins::name(i), &builtins_[i]);
v->VisitRootPointer(Root::kBuiltins, Builtins::name(i),
&builtins_table()[i]);
}
}
@ -4393,7 +4391,7 @@ void Heap::SetUp() {
write_protect_code_memory_ = FLAG_write_protect_code_memory;
external_reference_table_.Init(isolate_);
isolate_data_.external_reference_table()->Init(isolate_);
}
void Heap::InitializeHashSeed() {
@ -4415,15 +4413,15 @@ void Heap::SetStackLimits() {
// Set up the special root array entries containing the stack limits.
// These are actually addresses, but the tag makes the GC ignore it.
roots_[RootIndex::kStackLimit] = reinterpret_cast<Object*>(
roots_table()[RootIndex::kStackLimit] = reinterpret_cast<Object*>(
(isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
roots_[RootIndex::kRealStackLimit] = reinterpret_cast<Object*>(
roots_table()[RootIndex::kRealStackLimit] = reinterpret_cast<Object*>(
(isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
}
void Heap::ClearStackLimits() {
roots_[RootIndex::kStackLimit] = Smi::kZero;
roots_[RootIndex::kRealStackLimit] = Smi::kZero;
roots_table()[RootIndex::kStackLimit] = Smi::kZero;
roots_table()[RootIndex::kRealStackLimit] = Smi::kZero;
}
int Heap::NextAllocationTimeout(int current_timeout) {

View File

@ -19,13 +19,12 @@
#include "src/allocation.h"
#include "src/assert-scope.h"
#include "src/base/atomic-utils.h"
#include "src/external-reference-table.h"
#include "src/globals.h"
#include "src/heap-symbols.h"
#include "src/isolate-data.h"
#include "src/objects.h"
#include "src/objects/fixed-array.h"
#include "src/objects/string-table.h"
#include "src/roots.h"
#include "src/visitors.h"
namespace v8 {
@ -638,82 +637,39 @@ class Heap {
return array_buffer_collector_;
}
const IsolateData* isolate_data() const { return &isolate_data_; }
IsolateData* isolate_data() { return &isolate_data_; }
// ===========================================================================
// Root set access. ==========================================================
// ===========================================================================
friend class ReadOnlyRoots;
public:
RootsTable& roots_table() { return roots_; }
// Shortcut to the roots table stored in |isolate_data_|.
V8_INLINE const RootsTable& roots_table() const {
return isolate_data_.roots();
}
V8_INLINE RootsTable& roots_table() { return isolate_data_.roots(); }
// Heap root getters.
#define ROOT_ACCESSOR(type, name, CamelName) inline type* name();
MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
Object* root(RootIndex index) { return roots_[index]; }
// TODO(ishell): move to Isolate
Object* root(RootIndex index) { return roots_table()[index]; }
Handle<Object> root_handle(RootIndex index) {
return Handle<Object>(&roots_[index]);
}
bool IsRootHandleLocation(Object** handle_location, RootIndex* index) const {
return roots_.IsRootHandleLocation(handle_location, index);
}
template <typename T>
bool IsRootHandle(Handle<T> handle, RootIndex* index) const {
return roots_.IsRootHandle(handle, index);
}
// Generated code can embed this address to get access to the roots.
Object** roots_array_start() { return roots_.roots_; }
ExternalReferenceTable* external_reference_table() {
DCHECK(external_reference_table_.is_initialized());
return &external_reference_table_;
}
static constexpr int roots_to_external_reference_table_offset() {
return kRootsExternalReferenceTableOffset;
}
static constexpr int roots_to_builtins_offset() {
return kRootsBuiltinsOffset;
}
static constexpr int root_register_addressable_end_offset() {
return kRootRegisterAddressableEndOffset;
}
Address root_register_addressable_end() {
return reinterpret_cast<Address>(roots_array_start()) +
kRootRegisterAddressableEndOffset;
return Handle<Object>(&roots_table()[index]);
}
// Sets the stub_cache_ (only used when expanding the dictionary).
void SetRootCodeStubs(SimpleNumberDictionary* value);
V8_INLINE void SetRootCodeStubs(SimpleNumberDictionary* value);
V8_INLINE void SetRootMaterializedObjects(FixedArray* objects);
V8_INLINE void SetRootScriptList(Object* value);
V8_INLINE void SetRootStringTable(StringTable* value);
V8_INLINE void SetRootNoScriptSharedFunctionInfos(Object* value);
V8_INLINE void SetMessageListeners(TemplateList* value);
void SetRootMaterializedObjects(FixedArray* objects) {
roots_[RootIndex::kMaterializedObjects] = objects;
}
void SetRootScriptList(Object* value) {
roots_[RootIndex::kScriptList] = value;
}
void SetRootStringTable(StringTable* value) {
roots_[RootIndex::kStringTable] = value;
}
void SetRootNoScriptSharedFunctionInfos(Object* value) {
roots_[RootIndex::kNoScriptSharedFunctionInfos] = value;
}
void SetMessageListeners(TemplateList* value) {
roots_[RootIndex::kMessageListeners] = value;
}
// Set the stack limit in the roots_ array. Some architectures generate
// Set the stack limit in the roots table. Some architectures generate
// code that looks here, because it is faster than loading from the static
// jslimit_/real_jslimit_ variable in the StackGuard.
void SetStackLimits();
@ -783,6 +739,9 @@ class Heap {
// Builtins. =================================================================
// ===========================================================================
// Shortcut to the builtins table stored in |isolate_data_|.
V8_INLINE Object** builtins_table() { return isolate_data_.builtins(); }
Code* builtin(int index);
Address builtin_address(int index);
void set_builtin(int index, HeapObject* builtin);
@ -1797,28 +1756,7 @@ class Heap {
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_ = nullptr;
RootsTable roots_;
// This table is accessed from builtin code compiled into the snapshot, and
// thus its offset from roots_ must remain static. This is verified in
// Isolate::Init() using runtime checks.
static constexpr int kRootsExternalReferenceTableOffset =
static_cast<int>(RootIndex::kRootListLength) * kPointerSize;
ExternalReferenceTable external_reference_table_;
// As external references above, builtins are accessed through an offset from
// the roots register. Its offset from roots_ must remain static. This is
// verified in Isolate::Init() using runtime checks.
static constexpr int kRootsBuiltinsOffset =
kRootsExternalReferenceTableOffset +
ExternalReferenceTable::SizeInBytes();
Object* builtins_[Builtins::builtin_count];
// kRootRegister may be used to address any location that starts at the
// Isolate and ends at this point. Fields past this point are not guaranteed
// to live at a static offset from kRootRegister.
static constexpr int kRootRegisterAddressableEndOffset =
kRootsBuiltinsOffset + Builtins::builtin_count * kPointerSize;
IsolateData isolate_data_;
size_t code_range_size_ = 0;
size_t max_semi_space_size_ = 8 * (kPointerSize / 4) * MB;
@ -2094,6 +2032,7 @@ class Heap {
friend class ObjectStatsCollector;
friend class Page;
friend class PagedSpace;
friend class ReadOnlyRoots;
friend class Scavenger;
friend class ScavengerCollector;
friend class Space;

View File

@ -292,7 +292,7 @@ bool Heap::CreateInitialMaps() {
const StructTable& entry = struct_table[i];
Map* map;
if (!AllocatePartialMap(entry.type, entry.size).To(&map)) return false;
roots_[entry.index] = map;
roots_table()[entry.index] = map;
}
// Allocate the empty enum cache.
@ -334,7 +334,7 @@ bool Heap::CreateInitialMaps() {
FinalizePartialMap(roots.the_hole_map());
for (unsigned i = 0; i < arraysize(struct_table); ++i) {
const StructTable& entry = struct_table[i];
FinalizePartialMap(Map::cast(roots_[entry.index]));
FinalizePartialMap(Map::cast(roots_table()[entry.index]));
}
{ // Map allocation
@ -389,7 +389,7 @@ bool Heap::CreateInitialMaps() {
// Mark cons string maps as unstable, because their objects can change
// maps during GC.
if (StringShape(entry.type).IsCons()) map->mark_unstable();
roots_[entry.index] = map;
roots_table()[entry.index] = map;
}
{ // Create a separate external one byte string map for native sources.
@ -635,7 +635,7 @@ void Heap::CreateInitialObjects() {
for (unsigned i = 0; i < arraysize(constant_string_table); i++) {
Handle<String> str =
factory->InternalizeUtf8String(constant_string_table[i].contents);
roots_[constant_string_table[i].index] = *str;
roots_table()[constant_string_table[i].index] = *str;
}
// Allocate
@ -705,7 +705,7 @@ void Heap::CreateInitialObjects() {
{ \
Handle<Symbol> symbol( \
isolate()->factory()->NewPrivateSymbol(TENURED_READ_ONLY)); \
roots_[RootIndex::k##name] = *symbol; \
roots_table()[RootIndex::k##name] = *symbol; \
}
PRIVATE_SYMBOL_LIST_GENERATOR(SYMBOL_INIT, /* not used */)
#undef SYMBOL_INIT
@ -718,7 +718,7 @@ void Heap::CreateInitialObjects() {
Handle<String> name##d = \
factory->NewStringFromStaticChars(#description, TENURED_READ_ONLY); \
name->set_name(*name##d); \
roots_[RootIndex::k##name] = *name;
roots_table()[RootIndex::k##name] = *name;
PUBLIC_SYMBOL_LIST_GENERATOR(SYMBOL_INIT, /* not used */)
#undef SYMBOL_INIT
@ -728,7 +728,7 @@ void Heap::CreateInitialObjects() {
factory->NewStringFromStaticChars(#description, TENURED_READ_ONLY); \
name->set_is_well_known_symbol(true); \
name->set_name(*name##d); \
roots_[RootIndex::k##name] = *name;
roots_table()[RootIndex::k##name] = *name;
WELL_KNOWN_SYMBOL_LIST_GENERATOR(SYMBOL_INIT, /* not used */)
#undef SYMBOL_INIT
@ -916,15 +916,15 @@ void Heap::CreateInternalAccessorInfoObjects() {
#define INIT_ACCESSOR_INFO(_, accessor_name, AccessorName, ...) \
acessor_info = Accessors::Make##AccessorName##Info(isolate); \
roots_[RootIndex::k##AccessorName##Accessor] = *acessor_info;
roots_table()[RootIndex::k##AccessorName##Accessor] = *acessor_info;
ACCESSOR_INFO_LIST_GENERATOR(INIT_ACCESSOR_INFO, /* not used */)
#undef INIT_ACCESSOR_INFO
#define INIT_SIDE_EFFECT_FLAG(_, accessor_name, AccessorName, GetterType, \
SetterType) \
AccessorInfo::cast(roots_[RootIndex::k##AccessorName##Accessor]) \
AccessorInfo::cast(roots_table()[RootIndex::k##AccessorName##Accessor]) \
->set_getter_side_effect_type(SideEffectType::GetterType); \
AccessorInfo::cast(roots_[RootIndex::k##AccessorName##Accessor]) \
AccessorInfo::cast(roots_table()[RootIndex::k##AccessorName##Accessor]) \
->set_setter_side_effect_type(SideEffectType::SetterType);
ACCESSOR_INFO_LIST_GENERATOR(INIT_SIDE_EFFECT_FLAG, /* not used */)
#undef INIT_SIDE_EFFECT_FLAG

95
src/isolate-data.h Normal file
View File

@ -0,0 +1,95 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_ISOLATE_DATA_H_
#define V8_ISOLATE_DATA_H_
#include "src/builtins/builtins.h"
#include "src/external-reference-table.h"
#include "src/roots.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
class Isolate;
// This class contains a collection of data accessible from both C++ runtime
// and compiled code (including assembly stubs, builtins, interpreter bytecode
// handlers and optimized code).
// In particular, it contains pointer to the V8 heap roots table, external
// reference table and builtins array.
// The compiled code accesses the isolate data fields indirectly via the root
// register.
class IsolateData final {
public:
IsolateData() = default;
// Layout description.
#define FIELDS(V) \
/* roots_ */ \
V(kRootsTableOffset, RootsTable::kEntriesCount* kPointerSize) \
V(kRootsTableEndOffset, 0) \
/* external_reference_table_ */ \
V(kExternalReferenceTableOffset, ExternalReferenceTable::SizeInBytes()) \
V(kExternalReferenceTableEndOffset, 0) \
/* builtins_ */ \
V(kBuiltinsTableOffset, Builtins::builtin_count* kPointerSize) \
V(kBuiltinsTableEndOffset, 0) \
/* Total size. */ \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS)
#undef FIELDS
// Returns true if this address points to data stored in this instance.
// If it's the case then the value can be accessed indirectly through the
// root register.
bool contains(Address address) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address start = reinterpret_cast<Address>(this);
return (address - start) < sizeof(*this);
}
RootsTable& roots() { return roots_; }
const RootsTable& roots() const { return roots_; }
ExternalReferenceTable* external_reference_table() {
return &external_reference_table_;
}
Object** builtins() { return &builtins_[0]; }
private:
RootsTable roots_;
ExternalReferenceTable external_reference_table_;
Object* builtins_[Builtins::builtin_count];
V8_INLINE static void AssertPredictableLayout();
friend class Isolate;
DISALLOW_COPY_AND_ASSIGN(IsolateData);
};
// IsolateData object must have "predictable" layout which does not change when
// cross-compiling to another platform. Otherwise there may be compatibility
// issues because of different compilers used for snapshot generator and
// actual V8 code.
void IsolateData::AssertPredictableLayout() {
STATIC_ASSERT(offsetof(IsolateData, roots_) ==
IsolateData::kRootsTableOffset);
STATIC_ASSERT(offsetof(IsolateData, external_reference_table_) ==
IsolateData::kExternalReferenceTableOffset);
STATIC_ASSERT(offsetof(IsolateData, builtins_) ==
IsolateData::kBuiltinsTableOffset);
STATIC_ASSERT(sizeof(IsolateData) == IsolateData::kSize);
}
} // namespace internal
} // namespace v8
#endif // V8_ISOLATE_DATA_H_

View File

@ -12,8 +12,11 @@ namespace v8 {
namespace internal {
base::AddressRegion Isolate::root_register_addressable_region() {
// TODO(ishell): limit this region to the IsolateData object once all the
// data is moved there.
Address start = reinterpret_cast<Address>(this);
Address end = heap_.root_register_addressable_end();
Address end =
reinterpret_cast<Address>(heap_.isolate_data()) + sizeof(IsolateData);
return base::AddressRegion(start, end - start);
}

View File

@ -3281,7 +3281,7 @@ bool Isolate::Init(StartupDeserializer* des) {
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
Internals::kIsolateEmbedderDataOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)),
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.isolate_data_.roots_)),
Internals::kIsolateRootsOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_)),
Internals::kExternalMemoryOffset);
@ -3290,12 +3290,12 @@ bool Isolate::Init(StartupDeserializer* des) {
CHECK_EQ(static_cast<int>(
OFFSET_OF(Isolate, heap_.external_memory_at_last_mark_compact_)),
Internals::kExternalMemoryAtLastMarkCompactOffset);
CHECK_EQ(
static_cast<int>(OFFSET_OF(Isolate, heap_.external_reference_table_)),
Internals::kIsolateRootsOffset +
Heap::kRootsExternalReferenceTableOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.builtins_)),
Internals::kIsolateRootsOffset + Heap::kRootsBuiltinsOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(
Isolate, heap_.isolate_data_.external_reference_table_)),
Internals::kIsolateRootsOffset +
IsolateData::kExternalReferenceTableOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.isolate_data_.builtins_)),
Internals::kIsolateRootsOffset + IsolateData::kBuiltinsTableOffset);
{
HandleScope scope(this);

View File

@ -999,11 +999,24 @@ class Isolate : private HiddenFactory {
StackGuard* stack_guard() { return &stack_guard_; }
Heap* heap() { return &heap_; }
const IsolateData* isolate_data() const { return heap_.isolate_data(); }
IsolateData* isolate_data() { return heap_.isolate_data(); }
RootsTable& roots_table() { return isolate_data()->roots(); }
// Generated code can embed this address to get access to the roots.
Object** roots_array_start() { return roots_table().roots_; }
// kRootRegister may be used to address any location that falls into this
// region. Fields outside this region are not guaranteed to live at a static
// offset from kRootRegister.
inline base::AddressRegion root_register_addressable_region();
ExternalReferenceTable* external_reference_table() {
DCHECK(isolate_data()->external_reference_table()->is_initialized());
return isolate_data()->external_reference_table();
}
StubCache* load_stub_cache() { return load_stub_cache_; }
StubCache* store_stub_cache() { return store_stub_cache_; }
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }

View File

@ -449,6 +449,7 @@ class RootsTable {
Object* roots_[kEntriesCount];
friend class Isolate;
friend class Heap;
friend class Factory;
friend class ReadOnlyRoots;

View File

@ -26,7 +26,7 @@ void Deserializer<AllocatorT>::Initialize(Isolate* isolate) {
DCHECK_NOT_NULL(isolate);
isolate_ = isolate;
DCHECK_NULL(external_reference_table_);
external_reference_table_ = isolate->heap()->external_reference_table();
external_reference_table_ = isolate->external_reference_table();
#ifdef DEBUG
// Count the number of external references registered through the API.
num_api_references_ = 0;

View File

@ -22,7 +22,7 @@ ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate) {
map_ = new AddressToIndexHashMap();
isolate->set_external_reference_map(map_);
// Add V8's external references.
ExternalReferenceTable* table = isolate->heap()->external_reference_table();
ExternalReferenceTable* table = isolate->external_reference_table();
for (uint32_t i = 0; i < table->size(); ++i) {
Address addr = table->address(i);
// Ignore duplicate references.
@ -89,7 +89,7 @@ const char* ExternalReferenceEncoder::NameOfAddress(Isolate* isolate,
if (maybe_index.IsNothing()) return "<unknown>";
Value value(maybe_index.FromJust());
if (value.is_from_api()) return "<from api>";
return isolate->heap()->external_reference_table()->name(value.index());
return isolate->external_reference_table()->name(value.index());
}
void SerializedData::AllocateData(uint32_t size) {
@ -102,7 +102,7 @@ void SerializedData::AllocateData(uint32_t size) {
// static
uint32_t SerializedData::ComputeMagicNumber(Isolate* isolate) {
return ComputeMagicNumber(isolate->heap()->external_reference_table());
return ComputeMagicNumber(isolate->external_reference_table());
}
// The partial snapshot cache is terminated by undefined. We visit the

View File

@ -122,7 +122,7 @@ void StartupSerializer::SerializeStrongReferences() {
void StartupSerializer::VisitRootPointers(Root root, const char* description,
Object** start, Object** end) {
if (start == isolate()->heap()->roots_array_start()) {
if (start == isolate()->roots_array_start()) {
// Serializing the root list needs special handling:
// - Only root list elements that have been fully serialized can be
// referenced using kRootArray bytecodes.

View File

@ -6,7 +6,8 @@
#include "src/builtins/builtins.h"
#include "src/builtins/constants-table-builder.h"
#include "src/heap/heap-inl.h"
#include "src/isolate-data.h"
#include "src/isolate-inl.h"
#include "src/lsan.h"
#include "src/snapshot/serializer-common.h"
@ -33,7 +34,7 @@ void TurboAssemblerBase::IndirectLoadConstant(Register destination,
int builtin_index;
RootIndex root_index;
if (isolate()->heap()->IsRootHandle(object, &root_index)) {
if (isolate()->roots_table().IsRootHandle(object, &root_index)) {
// Roots are loaded relative to the root register.
LoadRoot(destination, root_index);
} else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) {
@ -92,7 +93,7 @@ int32_t TurboAssemblerBase::RootRegisterOffset(RootIndex root_index) {
// static
int32_t TurboAssemblerBase::RootRegisterOffsetForExternalReferenceIndex(
int reference_index) {
return Heap::roots_to_external_reference_table_offset() - kRootRegisterBias +
return IsolateData::kExternalReferenceTableOffset - kRootRegisterBias +
ExternalReferenceTable::OffsetOfEntry(reference_index);
}
@ -100,7 +101,7 @@ int32_t TurboAssemblerBase::RootRegisterOffsetForExternalReferenceIndex(
intptr_t TurboAssemblerBase::RootRegisterOffsetForExternalReference(
Isolate* isolate, const ExternalReference& reference) {
return static_cast<intptr_t>(reference.address()) - kRootRegisterBias -
reinterpret_cast<intptr_t>(isolate->heap()->roots_array_start());
reinterpret_cast<intptr_t>(isolate->roots_array_start());
}
// static
@ -113,7 +114,7 @@ bool TurboAssemblerBase::IsAddressableThroughRootRegister(
// static
int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltinIndex(
int builtin_index) {
return Heap::roots_to_builtins_offset() - kRootRegisterBias +
return IsolateData::kBuiltinsTableOffset - kRootRegisterBias +
builtin_index * kPointerSize;
}

View File

@ -1288,7 +1288,7 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
instance->SetRawMemory(nullptr, 0);
instance->set_roots_array_address(
reinterpret_cast<Address>(isolate->heap()->roots_array_start()));
reinterpret_cast<Address>(isolate->roots_array_start()));
instance->set_stack_limit_address(
isolate->stack_guard()->address_of_jslimit());
instance->set_real_stack_limit_address(

View File

@ -120,8 +120,8 @@ class Reader {
constexpr size_t kVersionSize = 4 * sizeof(uint32_t);
void WriteVersion(Isolate* isolate, Writer* writer) {
writer->Write(SerializedData::ComputeMagicNumber(
isolate->heap()->external_reference_table()));
writer->Write(
SerializedData::ComputeMagicNumber(isolate->external_reference_table()));
writer->Write(Version::Hash());
writer->Write(static_cast<uint32_t>(CpuFeatures::SupportedFeatures()));
writer->Write(FlagList::Hash());
@ -243,7 +243,7 @@ NativeModuleSerializer::NativeModuleSerializer(
->instruction_start();
wasm_stub_targets_lookup_.insert(std::make_pair(addr, i));
}
ExternalReferenceTable* table = isolate_->heap()->external_reference_table();
ExternalReferenceTable* table = isolate_->external_reference_table();
for (uint32_t i = 0; i < table->size(); ++i) {
Address addr = table->address(i);
reference_table_lookup_.insert(std::make_pair(addr, i));
@ -501,8 +501,7 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
}
case RelocInfo::EXTERNAL_REFERENCE: {
uint32_t tag = GetWasmCalleeTag(iter.rinfo());
Address address =
isolate_->heap()->external_reference_table()->address(tag);
Address address = isolate_->external_reference_table()->address(tag);
iter.rinfo()->set_target_external_reference(address, SKIP_ICACHE_FLUSH);
break;
}

View File

@ -4543,8 +4543,8 @@ TEST_F(InstructionSelectorTest, ExternalReferenceLoad1) {
TRACED_FOREACH(int64_t, offset, kOffsets) {
StreamBuilder m(this, MachineType::Int64());
ExternalReference reference = bit_cast<ExternalReference>(
reinterpret_cast<intptr_t>(isolate()->heap()->roots_array_start()) +
offset + kRootRegisterBias);
reinterpret_cast<intptr_t>(isolate()->roots_array_start()) + offset +
kRootRegisterBias);
Node* const value =
m.Load(MachineType::Int64(), m.ExternalConstant(reference));
m.Return(value);
@ -4565,8 +4565,8 @@ TEST_F(InstructionSelectorTest, ExternalReferenceLoad2) {
StreamBuilder m(this, MachineType::Int64());
int64_t offset = 0x100000000;
ExternalReference reference = bit_cast<ExternalReference>(
reinterpret_cast<intptr_t>(isolate()->heap()->roots_array_start()) +
offset + kRootRegisterBias);
reinterpret_cast<intptr_t>(isolate()->roots_array_start()) + offset +
kRootRegisterBias);
Node* const value =
m.Load(MachineType::Int64(), m.ExternalConstant(reference));
m.Return(value);