[ext-code-space] Add CodeDataContainer::code field and friends
... behind the v8_enable_external_code_space build flag. This is a first CL in a row of CLs that will make CodeDataContainer the only type of objects that could contain references to Code objects (besides the Code objects embedded into the generated code). Eventually these changes will allow us to move Code space out of the V8 heap cage. This CL adds |code| field to ensure that CodeDataContainer keeps the respective Code object alive and |code_entry_point| field that contains cached value of the code().InstructionStart(). Bug: v8:11880 Change-Id: Ie7ce75667d8da306797d203691b429671bc4530d Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2964093 Commit-Queue: Igor Sheludko <ishell@chromium.org> Reviewed-by: Nico Hartmann <nicohartmann@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Jakob Gruber <jgruber@chromium.org> Cr-Commit-Position: refs/heads/master@{#75179}
This commit is contained in:
parent
11891fd6a7
commit
227e90188b
14
BUILD.gn
14
BUILD.gn
@ -196,6 +196,11 @@ declare_args() {
|
|||||||
# Sets -dV8_SHORT_BUILTIN_CALLS
|
# Sets -dV8_SHORT_BUILTIN_CALLS
|
||||||
v8_enable_short_builtin_calls = ""
|
v8_enable_short_builtin_calls = ""
|
||||||
|
|
||||||
|
# Enable support for external code range relative to the pointer compression
|
||||||
|
# cage.
|
||||||
|
# Sets -dV8_EXTERNAL_CODE_SPACE
|
||||||
|
v8_enable_external_code_space = ""
|
||||||
|
|
||||||
# With post mortem support enabled, metadata is embedded into libv8 that
|
# With post mortem support enabled, metadata is embedded into libv8 that
|
||||||
# describes various parameters of the VM for use by debuggers. See
|
# describes various parameters of the VM for use by debuggers. See
|
||||||
# tools/gen-postmortem-metadata.py for details.
|
# tools/gen-postmortem-metadata.py for details.
|
||||||
@ -391,6 +396,9 @@ if (v8_enable_short_builtin_calls == "") {
|
|||||||
v8_enable_short_builtin_calls =
|
v8_enable_short_builtin_calls =
|
||||||
v8_current_cpu == "x64" || (!is_android && v8_current_cpu == "arm64")
|
v8_current_cpu == "x64" || (!is_android && v8_current_cpu == "arm64")
|
||||||
}
|
}
|
||||||
|
if (v8_enable_external_code_space == "") {
|
||||||
|
v8_enable_external_code_space = false
|
||||||
|
}
|
||||||
if (v8_enable_single_generation == "") {
|
if (v8_enable_single_generation == "") {
|
||||||
v8_enable_single_generation = v8_disable_write_barriers
|
v8_enable_single_generation = v8_disable_write_barriers
|
||||||
}
|
}
|
||||||
@ -471,6 +479,9 @@ assert(!v8_use_multi_snapshots || !v8_control_flow_integrity,
|
|||||||
assert(!v8_enable_heap_sandbox || v8_enable_pointer_compression,
|
assert(!v8_enable_heap_sandbox || v8_enable_pointer_compression,
|
||||||
"V8 Heap Sandbox requires pointer compression")
|
"V8 Heap Sandbox requires pointer compression")
|
||||||
|
|
||||||
|
assert(!v8_enable_heap_sandbox || v8_enable_external_code_space,
|
||||||
|
"V8 Heap Sandbox is not compatible with external code space YET")
|
||||||
|
|
||||||
assert(
|
assert(
|
||||||
!v8_enable_pointer_compression_shared_cage || v8_enable_pointer_compression,
|
!v8_enable_pointer_compression_shared_cage || v8_enable_pointer_compression,
|
||||||
"Can't share a pointer compression cage if pointers aren't compressed")
|
"Can't share a pointer compression cage if pointers aren't compressed")
|
||||||
@ -879,6 +890,9 @@ config("features") {
|
|||||||
if (v8_enable_short_builtin_calls) {
|
if (v8_enable_short_builtin_calls) {
|
||||||
defines += [ "V8_SHORT_BUILTIN_CALLS" ]
|
defines += [ "V8_SHORT_BUILTIN_CALLS" ]
|
||||||
}
|
}
|
||||||
|
if (v8_enable_external_code_space) {
|
||||||
|
defines += [ "V8_EXTERNAL_CODE_SPACE" ]
|
||||||
|
}
|
||||||
if (v8_enable_swiss_name_dictionary) {
|
if (v8_enable_swiss_name_dictionary) {
|
||||||
defines += [ "V8_ENABLE_SWISS_NAME_DICTIONARY" ]
|
defines += [ "V8_ENABLE_SWISS_NAME_DICTIONARY" ]
|
||||||
}
|
}
|
||||||
|
@ -146,6 +146,7 @@ enum ExternalPointerTag : uint64_t {
|
|||||||
kForeignForeignAddressTag = 0x01f7000000000000, // 0b000000111110111
|
kForeignForeignAddressTag = 0x01f7000000000000, // 0b000000111110111
|
||||||
kNativeContextMicrotaskQueueTag = 0x01fb000000000000, // 0b000000111111011
|
kNativeContextMicrotaskQueueTag = 0x01fb000000000000, // 0b000000111111011
|
||||||
kEmbedderDataSlotPayloadTag = 0x01fd000000000000, // 0b000000111111101
|
kEmbedderDataSlotPayloadTag = 0x01fd000000000000, // 0b000000111111101
|
||||||
|
kCodeEntryPointTag = 0x01fe000000000000, // 0b000000111111110
|
||||||
};
|
};
|
||||||
|
|
||||||
constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;
|
constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;
|
||||||
|
@ -396,7 +396,8 @@ Handle<Code> Builtins::GenerateOffHeapTrampolineFor(
|
|||||||
: TrampolineType::kAbort);
|
: TrampolineType::kAbort);
|
||||||
|
|
||||||
return Factory::CodeBuilder(isolate, desc, CodeKind::BUILTIN)
|
return Factory::CodeBuilder(isolate, desc, CodeKind::BUILTIN)
|
||||||
.set_read_only_data_container(kind_specfic_flags)
|
.set_kind_specific_flags(kind_specfic_flags)
|
||||||
|
.set_read_only_data_container(!V8_EXTERNAL_CODE_SPACE_BOOL)
|
||||||
.set_self_reference(generator.CodeObject())
|
.set_self_reference(generator.CodeObject())
|
||||||
.set_is_executable(generate_jump_to_instruction_stream)
|
.set_is_executable(generate_jump_to_instruction_stream)
|
||||||
.Build();
|
.Build();
|
||||||
|
@ -126,6 +126,12 @@ const size_t kShortBuiltinCallsOldSpaceSizeThreshold = size_t{2} * GB;
|
|||||||
#define V8_DICT_PROPERTY_CONST_TRACKING_BOOL false
|
#define V8_DICT_PROPERTY_CONST_TRACKING_BOOL false
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifdef V8_EXTERNAL_CODE_SPACE
|
||||||
|
#define V8_EXTERNAL_CODE_SPACE_BOOL true
|
||||||
|
#else
|
||||||
|
#define V8_EXTERNAL_CODE_SPACE_BOOL false
|
||||||
|
#endif
|
||||||
|
|
||||||
// Determine whether tagged pointers are 8 bytes (used in Torque layouts for
|
// Determine whether tagged pointers are 8 bytes (used in Torque layouts for
|
||||||
// choosing where to insert padding).
|
// choosing where to insert padding).
|
||||||
#if V8_TARGET_ARCH_64_BIT && !defined(V8_COMPRESS_POINTERS)
|
#if V8_TARGET_ARCH_64_BIT && !defined(V8_COMPRESS_POINTERS)
|
||||||
|
@ -969,6 +969,11 @@ void CodeDataContainer::CodeDataContainerVerify(Isolate* isolate) {
|
|||||||
CHECK(IsCodeDataContainer());
|
CHECK(IsCodeDataContainer());
|
||||||
VerifyObjectField(isolate, kNextCodeLinkOffset);
|
VerifyObjectField(isolate, kNextCodeLinkOffset);
|
||||||
CHECK(next_code_link().IsCode() || next_code_link().IsUndefined(isolate));
|
CHECK(next_code_link().IsCode() || next_code_link().IsUndefined(isolate));
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
if (raw_code() != Smi::zero()) {
|
||||||
|
CHECK_EQ(code().InstructionStart(), code_entry_point());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Code::CodeVerify(Isolate* isolate) {
|
void Code::CodeVerify(Isolate* isolate) {
|
||||||
@ -984,6 +989,9 @@ void Code::CodeVerify(Isolate* isolate) {
|
|||||||
IsAligned(InstructionStart(), kCodeAlignment));
|
IsAligned(InstructionStart(), kCodeAlignment));
|
||||||
CHECK_IMPLIES(!ReadOnlyHeap::Contains(*this),
|
CHECK_IMPLIES(!ReadOnlyHeap::Contains(*this),
|
||||||
IsAligned(raw_instruction_start(), kCodeAlignment));
|
IsAligned(raw_instruction_start(), kCodeAlignment));
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
CHECK_EQ(*this, code_data_container(kAcquireLoad).code());
|
||||||
|
}
|
||||||
// TODO(delphick): Refactor Factory::CodeBuilder::BuildInternal, so that the
|
// TODO(delphick): Refactor Factory::CodeBuilder::BuildInternal, so that the
|
||||||
// following CHECK works builtin trampolines. It currently fails because
|
// following CHECK works builtin trampolines. It currently fails because
|
||||||
// CodeVerify is called halfway through constructing the trampoline and so not
|
// CodeVerify is called halfway through constructing the trampoline and so not
|
||||||
|
@ -79,9 +79,11 @@ void PrintDictionaryContents(std::ostream& os, T dict) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef V8_ENABLE_SWISS_NAME_DICTIONARY
|
||||||
Isolate* isolate = GetIsolateFromWritableObject(dict);
|
Isolate* isolate = GetIsolateFromWritableObject(dict);
|
||||||
// IterateEntries for SwissNameDictionary needs to create a handle.
|
// IterateEntries for SwissNameDictionary needs to create a handle.
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
|
#endif
|
||||||
for (InternalIndex i : dict.IterateEntries()) {
|
for (InternalIndex i : dict.IterateEntries()) {
|
||||||
Object k;
|
Object k;
|
||||||
if (!dict.ToKey(roots, i, &k)) continue;
|
if (!dict.ToKey(roots, i, &k)) continue;
|
||||||
@ -1639,6 +1641,11 @@ void PropertyCell::PropertyCellPrint(std::ostream& os) {
|
|||||||
|
|
||||||
void Code::CodePrint(std::ostream& os) {
|
void Code::CodePrint(std::ostream& os) {
|
||||||
PrintHeader(os, "Code");
|
PrintHeader(os, "Code");
|
||||||
|
os << "\n - code_data_container: "
|
||||||
|
<< Brief(code_data_container(kAcquireLoad));
|
||||||
|
if (is_builtin()) {
|
||||||
|
os << "\n - builtin_id: " << Builtins::name(builtin_id());
|
||||||
|
}
|
||||||
os << "\n";
|
os << "\n";
|
||||||
#ifdef ENABLE_DISASSEMBLER
|
#ifdef ENABLE_DISASSEMBLER
|
||||||
Disassemble(nullptr, os, GetIsolate());
|
Disassemble(nullptr, os, GetIsolate());
|
||||||
@ -1648,6 +1655,11 @@ void Code::CodePrint(std::ostream& os) {
|
|||||||
void CodeDataContainer::CodeDataContainerPrint(std::ostream& os) {
|
void CodeDataContainer::CodeDataContainerPrint(std::ostream& os) {
|
||||||
PrintHeader(os, "CodeDataContainer");
|
PrintHeader(os, "CodeDataContainer");
|
||||||
os << "\n - kind_specific_flags: " << kind_specific_flags();
|
os << "\n - kind_specific_flags: " << kind_specific_flags();
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
os << "\n - code: " << Brief(code());
|
||||||
|
os << "\n - code_entry_point: "
|
||||||
|
<< reinterpret_cast<void*>(code_entry_point());
|
||||||
|
}
|
||||||
os << "\n";
|
os << "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,10 +96,10 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
|
|||||||
(kind_specific_flags_ == 0 ||
|
(kind_specific_flags_ == 0 ||
|
||||||
kind_specific_flags_ == promise_rejection_flag)) {
|
kind_specific_flags_ == promise_rejection_flag)) {
|
||||||
const ReadOnlyRoots roots(isolate_);
|
const ReadOnlyRoots roots(isolate_);
|
||||||
const auto canonical_code_data_container =
|
const auto canonical_code_data_container = Handle<CodeDataContainer>::cast(
|
||||||
kind_specific_flags_ == 0
|
kind_specific_flags_ == 0
|
||||||
? roots.trampoline_trivial_code_data_container_handle()
|
? roots.trampoline_trivial_code_data_container_handle()
|
||||||
: roots.trampoline_promise_rejection_code_data_container_handle();
|
: roots.trampoline_promise_rejection_code_data_container_handle());
|
||||||
DCHECK_EQ(canonical_code_data_container->kind_specific_flags(),
|
DCHECK_EQ(canonical_code_data_container->kind_specific_flags(),
|
||||||
kind_specific_flags_);
|
kind_specific_flags_);
|
||||||
data_container = canonical_code_data_container;
|
data_container = canonical_code_data_container;
|
||||||
@ -136,7 +136,9 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
|
|||||||
CodePageCollectionMemoryModificationScope code_allocation(heap);
|
CodePageCollectionMemoryModificationScope code_allocation(heap);
|
||||||
HeapObject result;
|
HeapObject result;
|
||||||
AllocationType allocation_type =
|
AllocationType allocation_type =
|
||||||
is_executable_ ? AllocationType::kCode : AllocationType::kReadOnly;
|
V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
|
||||||
|
? AllocationType::kCode
|
||||||
|
: AllocationType::kReadOnly;
|
||||||
if (retry_allocation_or_fail) {
|
if (retry_allocation_or_fail) {
|
||||||
result = heap->AllocateRawWith<Heap::kRetryOrFail>(
|
result = heap->AllocateRawWith<Heap::kRetryOrFail>(
|
||||||
object_size, allocation_type, AllocationOrigin::kRuntime);
|
object_size, allocation_type, AllocationOrigin::kRuntime);
|
||||||
@ -218,6 +220,9 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
|
|||||||
|
|
||||||
raw_code.clear_padding();
|
raw_code.clear_padding();
|
||||||
|
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
data_container->SetCodeAndEntryPoint(isolate_, raw_code);
|
||||||
|
}
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
if (FLAG_verify_heap) raw_code.ObjectVerify(isolate_);
|
if (FLAG_verify_heap) raw_code.ObjectVerify(isolate_);
|
||||||
#endif
|
#endif
|
||||||
@ -2078,6 +2083,11 @@ Handle<CodeDataContainer> Factory::NewCodeDataContainer(
|
|||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
data_container.set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
|
data_container.set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
|
||||||
data_container.set_kind_specific_flags(flags);
|
data_container.set_kind_specific_flags(flags);
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
data_container.AllocateExternalPointerEntries(isolate());
|
||||||
|
data_container.set_raw_code(Smi::zero(), SKIP_WRITE_BARRIER);
|
||||||
|
data_container.set_code_entry_point(isolate(), kNullAddress);
|
||||||
|
}
|
||||||
data_container.clear_padding();
|
data_container.clear_padding();
|
||||||
return handle(data_container, isolate());
|
return handle(data_container, isolate());
|
||||||
}
|
}
|
||||||
@ -2137,6 +2147,12 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
raw_result.set_relocation_info(canonical_reloc_info);
|
raw_result.set_relocation_info(canonical_reloc_info);
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
// Updating flags (in particular is_off_heap_trampoline one) might change
|
||||||
|
// the value of the instruction start, so update it here.
|
||||||
|
raw_result.code_data_container(kAcquireLoad)
|
||||||
|
.UpdateCodeEntryPoint(isolate(), raw_result);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@ -2173,6 +2189,9 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
|
|||||||
WriteBarrierForCode(*new_code);
|
WriteBarrierForCode(*new_code);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
data_container->SetCodeAndEntryPoint(isolate(), *new_code);
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
|
if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
|
||||||
|
@ -901,8 +901,13 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
|
|||||||
// Indicates the CodeDataContainer should be allocated in read-only space.
|
// Indicates the CodeDataContainer should be allocated in read-only space.
|
||||||
// As an optimization, if the kind-specific flags match that of a canonical
|
// As an optimization, if the kind-specific flags match that of a canonical
|
||||||
// container, it will be used instead.
|
// container, it will be used instead.
|
||||||
CodeBuilder& set_read_only_data_container(int32_t flags) {
|
CodeBuilder& set_read_only_data_container(bool read_only) {
|
||||||
read_only_data_container_ = true;
|
CHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !read_only);
|
||||||
|
read_only_data_container_ = read_only;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
CodeBuilder& set_kind_specific_flags(int32_t flags) {
|
||||||
kind_specific_flags_ = flags;
|
kind_specific_flags_ = flags;
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
@ -49,6 +49,19 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T ForwardingAddress(T heap_obj) {
|
||||||
|
MapWord map_word = heap_obj.map_word(kRelaxedLoad);
|
||||||
|
|
||||||
|
if (map_word.IsForwardingAddress()) {
|
||||||
|
return T::cast(map_word.ToForwardingAddress());
|
||||||
|
} else if (Heap::InFromPage(heap_obj)) {
|
||||||
|
return T();
|
||||||
|
} else {
|
||||||
|
return heap_obj;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
AllocationSpace AllocationResult::RetrySpace() {
|
AllocationSpace AllocationResult::RetrySpace() {
|
||||||
DCHECK(IsRetry());
|
DCHECK(IsRetry());
|
||||||
return static_cast<AllocationSpace>(Smi::ToInt(object_));
|
return static_cast<AllocationSpace>(Smi::ToInt(object_));
|
||||||
|
@ -2735,17 +2735,7 @@ class HeapObjectAllocationTracker {
|
|||||||
};
|
};
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
T ForwardingAddress(T heap_obj) {
|
inline T ForwardingAddress(T heap_obj);
|
||||||
MapWord map_word = heap_obj.map_word(kRelaxedLoad);
|
|
||||||
|
|
||||||
if (map_word.IsForwardingAddress()) {
|
|
||||||
return T::cast(map_word.ToForwardingAddress());
|
|
||||||
} else if (Heap::InFromPage(heap_obj)) {
|
|
||||||
return T();
|
|
||||||
} else {
|
|
||||||
return heap_obj;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Address block allocator compatible with standard containers which registers
|
// Address block allocator compatible with standard containers which registers
|
||||||
// its allocated range as strong roots.
|
// its allocated range as strong roots.
|
||||||
|
@ -1340,10 +1340,19 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
|
|||||||
} else if (dest == CODE_SPACE) {
|
} else if (dest == CODE_SPACE) {
|
||||||
DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
|
DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
|
||||||
base->heap_->CopyBlock(dst_addr, src_addr, size);
|
base->heap_->CopyBlock(dst_addr, src_addr, size);
|
||||||
Code::cast(dst).Relocate(dst_addr - src_addr);
|
Code code = Code::cast(dst);
|
||||||
|
code.Relocate(dst_addr - src_addr);
|
||||||
if (mode != MigrationMode::kFast)
|
if (mode != MigrationMode::kFast)
|
||||||
base->ExecuteMigrationObservers(dest, src, dst, size);
|
base->ExecuteMigrationObservers(dest, src, dst, size);
|
||||||
dst.IterateBodyFast(dst.map(), size, base->record_visitor_);
|
dst.IterateBodyFast(dst.map(), size, base->record_visitor_);
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
CodeDataContainer code_data_container =
|
||||||
|
code.GCSafeCodeDataContainer(kAcquireLoad);
|
||||||
|
Isolate* isolate_for_sandbox = base->heap_->isolate();
|
||||||
|
// Update the |code_entry_point| which is a raw interiour or off-heap
|
||||||
|
// pointer and thus not handled by the regular updating mechanism.
|
||||||
|
code_data_container.SetCodeAndEntryPoint(isolate_for_sandbox, code);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
DCHECK_OBJECT_SIZE(size);
|
DCHECK_OBJECT_SIZE(size);
|
||||||
DCHECK(dest == NEW_SPACE);
|
DCHECK(dest == NEW_SPACE);
|
||||||
|
@ -891,14 +891,22 @@ void Heap::CreateInitialObjects() {
|
|||||||
set_off_heap_trampoline_relocation_info(
|
set_off_heap_trampoline_relocation_info(
|
||||||
*Builtins::GenerateOffHeapTrampolineRelocInfo(isolate_));
|
*Builtins::GenerateOffHeapTrampolineRelocInfo(isolate_));
|
||||||
|
|
||||||
set_trampoline_trivial_code_data_container(
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
*isolate()->factory()->NewCodeDataContainer(0,
|
// These roots will not be used.
|
||||||
AllocationType::kReadOnly));
|
HeapObject no_container = *isolate()->factory()->undefined_value();
|
||||||
|
set_trampoline_trivial_code_data_container(no_container);
|
||||||
|
set_trampoline_promise_rejection_code_data_container(no_container);
|
||||||
|
|
||||||
set_trampoline_promise_rejection_code_data_container(
|
} else {
|
||||||
*isolate()->factory()->NewCodeDataContainer(
|
set_trampoline_trivial_code_data_container(
|
||||||
Code::IsPromiseRejectionField::encode(true),
|
*isolate()->factory()->NewCodeDataContainer(0,
|
||||||
AllocationType::kReadOnly));
|
AllocationType::kReadOnly));
|
||||||
|
|
||||||
|
set_trampoline_promise_rejection_code_data_container(
|
||||||
|
*isolate()->factory()->NewCodeDataContainer(
|
||||||
|
Code::IsPromiseRejectionField::encode(true),
|
||||||
|
AllocationType::kReadOnly));
|
||||||
|
}
|
||||||
|
|
||||||
// Evaluate the hash values which will then be cached in the strings.
|
// Evaluate the hash values which will then be cached in the strings.
|
||||||
isolate()->factory()->zero_string()->EnsureHash();
|
isolate()->factory()->zero_string()->EnsureHash();
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
#include "src/codegen/code-desc.h"
|
#include "src/codegen/code-desc.h"
|
||||||
#include "src/common/assert-scope.h"
|
#include "src/common/assert-scope.h"
|
||||||
#include "src/execution/isolate.h"
|
#include "src/execution/isolate.h"
|
||||||
|
#include "src/heap/heap-inl.h"
|
||||||
#include "src/interpreter/bytecode-register.h"
|
#include "src/interpreter/bytecode-register.h"
|
||||||
#include "src/objects/code.h"
|
#include "src/objects/code.h"
|
||||||
#include "src/objects/dictionary.h"
|
#include "src/objects/dictionary.h"
|
||||||
@ -203,6 +204,17 @@ RELEASE_ACQUIRE_CODE_ACCESSORS(code_data_container, CodeDataContainer,
|
|||||||
#undef CODE_ACCESSORS
|
#undef CODE_ACCESSORS
|
||||||
#undef RELEASE_ACQUIRE_CODE_ACCESSORS
|
#undef RELEASE_ACQUIRE_CODE_ACCESSORS
|
||||||
|
|
||||||
|
CodeDataContainer Code::GCSafeCodeDataContainer(AcquireLoadTag) const {
|
||||||
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
|
HeapObject object =
|
||||||
|
TaggedField<HeapObject, kCodeDataContainerOffset>::Acquire_Load(cage_base,
|
||||||
|
*this);
|
||||||
|
DCHECK(!ObjectInYoungGeneration(object));
|
||||||
|
CodeDataContainer code_data_container =
|
||||||
|
ForwardingAddress(CodeDataContainer::unchecked_cast(object));
|
||||||
|
return code_data_container;
|
||||||
|
}
|
||||||
|
|
||||||
void Code::WipeOutHeader() {
|
void Code::WipeOutHeader() {
|
||||||
WRITE_FIELD(*this, kRelocationInfoOffset, Smi::FromInt(0));
|
WRITE_FIELD(*this, kRelocationInfoOffset, Smi::FromInt(0));
|
||||||
WRITE_FIELD(*this, kDeoptimizationDataOffset, Smi::FromInt(0));
|
WRITE_FIELD(*this, kDeoptimizationDataOffset, Smi::FromInt(0));
|
||||||
@ -733,8 +745,47 @@ STATIC_ASSERT(FIELD_SIZE(CodeDataContainer::kKindSpecificFlagsOffset) ==
|
|||||||
kInt32Size);
|
kInt32Size);
|
||||||
RELAXED_INT32_ACCESSORS(CodeDataContainer, kind_specific_flags,
|
RELAXED_INT32_ACCESSORS(CodeDataContainer, kind_specific_flags,
|
||||||
kKindSpecificFlagsOffset)
|
kKindSpecificFlagsOffset)
|
||||||
|
ACCESSORS_CHECKED(CodeDataContainer, raw_code, Object, kCodeOffset,
|
||||||
|
V8_EXTERNAL_CODE_SPACE_BOOL)
|
||||||
ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
|
ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
|
||||||
|
|
||||||
|
void CodeDataContainer::AllocateExternalPointerEntries(Isolate* isolate) {
|
||||||
|
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||||
|
InitExternalPointerField(kCodeEntryPointOffset, isolate);
|
||||||
|
}
|
||||||
|
|
||||||
|
DEF_GETTER(CodeDataContainer, code, Code) {
|
||||||
|
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||||
|
return Code::cast(raw_code(cage_base));
|
||||||
|
}
|
||||||
|
|
||||||
|
DEF_GETTER(CodeDataContainer, code_entry_point, Address) {
|
||||||
|
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||||
|
Isolate* isolate = GetIsolateForHeapSandbox(*this);
|
||||||
|
return ReadExternalPointerField(kCodeEntryPointOffset, isolate,
|
||||||
|
kCodeEntryPointTag);
|
||||||
|
}
|
||||||
|
|
||||||
|
void CodeDataContainer::set_code_entry_point(Isolate* isolate, Address value) {
|
||||||
|
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||||
|
WriteExternalPointerField(kCodeEntryPointOffset, isolate, value,
|
||||||
|
kCodeEntryPointTag);
|
||||||
|
}
|
||||||
|
|
||||||
|
void CodeDataContainer::SetCodeAndEntryPoint(Isolate* isolate_for_sandbox,
|
||||||
|
Code code, WriteBarrierMode mode) {
|
||||||
|
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||||
|
set_raw_code(code, mode);
|
||||||
|
set_code_entry_point(isolate_for_sandbox, code.InstructionStart());
|
||||||
|
}
|
||||||
|
|
||||||
|
void CodeDataContainer::UpdateCodeEntryPoint(Isolate* isolate_for_sandbox,
|
||||||
|
Code code) {
|
||||||
|
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||||
|
DCHECK_EQ(raw_code(), code);
|
||||||
|
set_code_entry_point(isolate_for_sandbox, code.InstructionStart());
|
||||||
|
}
|
||||||
|
|
||||||
void CodeDataContainer::clear_padding() {
|
void CodeDataContainer::clear_padding() {
|
||||||
memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
|
memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
|
||||||
kSize - kUnalignedSize);
|
kSize - kUnalignedSize);
|
||||||
|
@ -47,6 +47,23 @@ class CodeDataContainer : public HeapObject {
|
|||||||
// is deterministic.
|
// is deterministic.
|
||||||
inline void clear_padding();
|
inline void clear_padding();
|
||||||
|
|
||||||
|
// Back-reference to the Code object.
|
||||||
|
// Available only when V8_EXTERNAL_CODE_SPACE is defined.
|
||||||
|
DECL_GETTER(code, Code)
|
||||||
|
|
||||||
|
// Cached value of code().InstructionStart().
|
||||||
|
// Available only when V8_EXTERNAL_CODE_SPACE is defined.
|
||||||
|
DECL_GETTER(code_entry_point, Address)
|
||||||
|
|
||||||
|
inline void SetCodeAndEntryPoint(
|
||||||
|
Isolate* isolate_for_sandbox, Code code,
|
||||||
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||||
|
// Updates the value of the code entry point. The code must be equal to
|
||||||
|
// the code() value.
|
||||||
|
inline void UpdateCodeEntryPoint(Isolate* isolate_for_sandbox, Code code);
|
||||||
|
|
||||||
|
inline void AllocateExternalPointerEntries(Isolate* isolate);
|
||||||
|
|
||||||
DECL_CAST(CodeDataContainer)
|
DECL_CAST(CodeDataContainer)
|
||||||
|
|
||||||
// Dispatched behavior.
|
// Dispatched behavior.
|
||||||
@ -54,15 +71,19 @@ class CodeDataContainer : public HeapObject {
|
|||||||
DECL_VERIFIER(CodeDataContainer)
|
DECL_VERIFIER(CodeDataContainer)
|
||||||
|
|
||||||
// Layout description.
|
// Layout description.
|
||||||
#define CODE_DATA_FIELDS(V) \
|
#define CODE_DATA_FIELDS(V) \
|
||||||
/* Weak pointer fields. */ \
|
/* Strong pointer fields. */ \
|
||||||
V(kPointerFieldsStrongEndOffset, 0) \
|
V(kCodeOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0) \
|
||||||
V(kNextCodeLinkOffset, kTaggedSize) \
|
V(kPointerFieldsStrongEndOffset, 0) \
|
||||||
V(kPointerFieldsWeakEndOffset, 0) \
|
/* Weak pointer fields. */ \
|
||||||
/* Raw data fields. */ \
|
V(kNextCodeLinkOffset, kTaggedSize) \
|
||||||
V(kKindSpecificFlagsOffset, kInt32Size) \
|
V(kPointerFieldsWeakEndOffset, 0) \
|
||||||
V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
|
/* Raw data fields. */ \
|
||||||
/* Total size. */ \
|
V(kCodeEntryPointOffset, \
|
||||||
|
V8_EXTERNAL_CODE_SPACE_BOOL ? kExternalPointerSize : 0) \
|
||||||
|
V(kKindSpecificFlagsOffset, kInt32Size) \
|
||||||
|
V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
|
||||||
|
/* Total size. */ \
|
||||||
V(kSize, 0)
|
V(kSize, 0)
|
||||||
|
|
||||||
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
|
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
|
||||||
@ -70,6 +91,12 @@ class CodeDataContainer : public HeapObject {
|
|||||||
|
|
||||||
class BodyDescriptor;
|
class BodyDescriptor;
|
||||||
|
|
||||||
|
private:
|
||||||
|
DECL_ACCESSORS(raw_code, Object)
|
||||||
|
inline void set_code_entry_point(Isolate* isolate, Address value);
|
||||||
|
|
||||||
|
friend Factory;
|
||||||
|
|
||||||
OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
|
OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -562,6 +589,9 @@ class Code : public HeapObject {
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
friend class RelocIterator;
|
friend class RelocIterator;
|
||||||
|
friend class EvacuateVisitorBase;
|
||||||
|
|
||||||
|
inline CodeDataContainer GCSafeCodeDataContainer(AcquireLoadTag) const;
|
||||||
|
|
||||||
bool is_promise_rejection() const;
|
bool is_promise_rejection() const;
|
||||||
bool is_exception_caught() const;
|
bool is_exception_caught() const;
|
||||||
|
@ -873,7 +873,7 @@ class CodeDataContainer::BodyDescriptor final : public BodyDescriptorBase {
|
|||||||
public:
|
public:
|
||||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||||
return offset >= CodeDataContainer::kHeaderSize &&
|
return offset >= CodeDataContainer::kHeaderSize &&
|
||||||
offset < CodeDataContainer::kSize;
|
offset <= CodeDataContainer::kPointerFieldsWeakEndOffset;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
@ -884,6 +884,12 @@ class CodeDataContainer::BodyDescriptor final : public BodyDescriptorBase {
|
|||||||
IterateCustomWeakPointers(
|
IterateCustomWeakPointers(
|
||||||
obj, CodeDataContainer::kPointerFieldsStrongEndOffset,
|
obj, CodeDataContainer::kPointerFieldsStrongEndOffset,
|
||||||
CodeDataContainer::kPointerFieldsWeakEndOffset, v);
|
CodeDataContainer::kPointerFieldsWeakEndOffset, v);
|
||||||
|
|
||||||
|
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
|
||||||
|
// TODO(v8:11880): Currently, the |code| field is still compressed and
|
||||||
|
// the |code_entry_point| field doesn't require custom visitation, so
|
||||||
|
// nothing to do here yet.
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int SizeOf(Map map, HeapObject object) {
|
static inline int SizeOf(Map map, HeapObject object) {
|
||||||
|
@ -190,9 +190,9 @@ class Symbol;
|
|||||||
/* Canonical off-heap trampoline data */ \
|
/* Canonical off-heap trampoline data */ \
|
||||||
V(ByteArray, off_heap_trampoline_relocation_info, \
|
V(ByteArray, off_heap_trampoline_relocation_info, \
|
||||||
OffHeapTrampolineRelocationInfo) \
|
OffHeapTrampolineRelocationInfo) \
|
||||||
V(CodeDataContainer, trampoline_trivial_code_data_container, \
|
V(HeapObject, trampoline_trivial_code_data_container, \
|
||||||
TrampolineTrivialCodeDataContainer) \
|
TrampolineTrivialCodeDataContainer) \
|
||||||
V(CodeDataContainer, trampoline_promise_rejection_code_data_container, \
|
V(HeapObject, trampoline_promise_rejection_code_data_container, \
|
||||||
TrampolinePromiseRejectionCodeDataContainer) \
|
TrampolinePromiseRejectionCodeDataContainer) \
|
||||||
/* Canonical scope infos */ \
|
/* Canonical scope infos */ \
|
||||||
V(ScopeInfo, global_this_binding_scope_info, GlobalThisBindingScopeInfo) \
|
V(ScopeInfo, global_this_binding_scope_info, GlobalThisBindingScopeInfo) \
|
||||||
|
@ -386,6 +386,12 @@ void Deserializer::PostProcessNewObject(Handle<Map> map, Handle<HeapObject> obj,
|
|||||||
if (deserializing_user_code()) {
|
if (deserializing_user_code()) {
|
||||||
new_code_objects_.push_back(Handle<Code>::cast(obj));
|
new_code_objects_.push_back(Handle<Code>::cast(obj));
|
||||||
}
|
}
|
||||||
|
} else if (V8_EXTERNAL_CODE_SPACE_BOOL &&
|
||||||
|
InstanceTypeChecker::IsCodeDataContainer(instance_type)) {
|
||||||
|
auto code_data_container = Handle<CodeDataContainer>::cast(obj);
|
||||||
|
code_data_container->AllocateExternalPointerEntries(isolate());
|
||||||
|
code_data_container->UpdateCodeEntryPoint(isolate(),
|
||||||
|
code_data_container->code());
|
||||||
} else if (InstanceTypeChecker::IsMap(instance_type)) {
|
} else if (InstanceTypeChecker::IsMap(instance_type)) {
|
||||||
if (FLAG_log_maps) {
|
if (FLAG_log_maps) {
|
||||||
// Keep track of all seen Maps to log them later since they might be only
|
// Keep track of all seen Maps to log them later since they might be only
|
||||||
|
@ -42,9 +42,7 @@ void ReadOnlySerializer::SerializeObjectImpl(Handle<HeapObject> obj) {
|
|||||||
// serialize twice.
|
// serialize twice.
|
||||||
if (*obj != ReadOnlyRoots(isolate()).not_mapped_symbol()) {
|
if (*obj != ReadOnlyRoots(isolate()).not_mapped_symbol()) {
|
||||||
if (SerializeHotObject(obj)) return;
|
if (SerializeHotObject(obj)) return;
|
||||||
if (IsRootAndHasBeenSerialized(*obj) && SerializeRoot(obj)) {
|
if (IsRootAndHasBeenSerialized(*obj) && SerializeRoot(obj)) return;
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (SerializeBackReference(obj)) return;
|
if (SerializeBackReference(obj)) return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1079,13 +1079,19 @@ void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
|
|||||||
} else if (object_->IsDescriptorArray()) {
|
} else if (object_->IsDescriptorArray()) {
|
||||||
// The number of marked descriptors field can be changed by GC
|
// The number of marked descriptors field can be changed by GC
|
||||||
// concurrently.
|
// concurrently.
|
||||||
byte field_value[2];
|
static byte field_value[2] = {0};
|
||||||
field_value[0] = 0;
|
|
||||||
field_value[1] = 0;
|
|
||||||
OutputRawWithCustomField(
|
OutputRawWithCustomField(
|
||||||
sink_, object_start, base, bytes_to_output,
|
sink_, object_start, base, bytes_to_output,
|
||||||
DescriptorArray::kRawNumberOfMarkedDescriptorsOffset,
|
DescriptorArray::kRawNumberOfMarkedDescriptorsOffset,
|
||||||
sizeof(field_value), field_value);
|
sizeof(field_value), field_value);
|
||||||
|
} else if (V8_EXTERNAL_CODE_SPACE_BOOL && object_->IsCodeDataContainer()) {
|
||||||
|
// The CodeEntryPoint field is just a cached value which will be
|
||||||
|
// recomputed after deserialization, so write zeros to keep the snapshot
|
||||||
|
// deterministic.
|
||||||
|
static byte field_value[kExternalPointerSize] = {0};
|
||||||
|
OutputRawWithCustomField(sink_, object_start, base, bytes_to_output,
|
||||||
|
CodeDataContainer::kCodeEntryPointOffset,
|
||||||
|
sizeof(field_value), field_value);
|
||||||
} else {
|
} else {
|
||||||
sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
|
sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
|
||||||
bytes_to_output, "Bytes");
|
bytes_to_output, "Bytes");
|
||||||
|
@ -46,6 +46,7 @@ class BuildFlags : public ContextualClass<BuildFlags> {
|
|||||||
public:
|
public:
|
||||||
BuildFlags() {
|
BuildFlags() {
|
||||||
build_flags_["V8_SFI_HAS_UNIQUE_ID"] = V8_SFI_HAS_UNIQUE_ID;
|
build_flags_["V8_SFI_HAS_UNIQUE_ID"] = V8_SFI_HAS_UNIQUE_ID;
|
||||||
|
build_flags_["V8_EXTERNAL_CODE_SPACE"] = V8_EXTERNAL_CODE_SPACE_BOOL;
|
||||||
build_flags_["TAGGED_SIZE_8_BYTES"] = TAGGED_SIZE_8_BYTES;
|
build_flags_["TAGGED_SIZE_8_BYTES"] = TAGGED_SIZE_8_BYTES;
|
||||||
build_flags_["TRUE_FOR_TESTING"] = true;
|
build_flags_["TRUE_FOR_TESTING"] = true;
|
||||||
build_flags_["FALSE_FOR_TESTING"] = false;
|
build_flags_["FALSE_FOR_TESTING"] = false;
|
||||||
|
Loading…
Reference in New Issue
Block a user