Revert "[code] Separate instruction and metadata areas"
This reverts commit b66993bcfb
.
Reason for revert: Broke v8 win32 https://ci.chromium.org/p/v8/builders/ci/V8%20Win32/29454?
Original change's description:
> [code] Separate instruction and metadata areas
>
> In this CL, Code object layout changes s.t. the instruction
> area is distinct / non-overlapping from the metadata area.
>
> On-heap Code objects now have a variable-size `body` area,
> containing distinct-but-adjacent `instruction` and `metadata`
> areas.
>
> Off-heap code (= embedded builtins) currently have the same,
> but in the future the metadata area will move elsewhere and
> no longer be adjacent to instructions.
>
> To implement this, the main changes are:
>
> - The Code object header now contains instruction and metadata
> sizes, and no longer contains the safepoint table offset
> (it's implicitly the first table of the metadata section).
> - The embedded metadata table contains information about both
> instruction and metadata areas.
>
> I've also added assertions in spots that currently rely on a
> contiguous body area.
>
> Bug: v8:11036
> Change-Id: I940f0c70c07ad511dafd2d2c3e337de8c92cd4b9
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2491025
> Reviewed-by: Leszek Swirski <leszeks@chromium.org>
> Reviewed-by: Clemens Backes <clemensb@chromium.org>
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> Commit-Queue: Jakob Gruber <jgruber@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#70743}
TBR=jgruber@chromium.org,leszeks@chromium.org,clemensb@chromium.org,dinfuehr@chromium.org
Change-Id: Ia52ac609a47b8a2038a2511f0af8526ebdfe4719
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:11036
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2497381
Reviewed-by: Zhi An Ng <zhin@chromium.org>
Commit-Queue: Zhi An Ng <zhin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70744}
This commit is contained in:
parent
b66993bcfb
commit
9a02964a36
@ -257,8 +257,8 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
|
||||
flush_icache = true;
|
||||
}
|
||||
if (flush_icache) {
|
||||
FlushInstructionCache(code.raw_instruction_start(),
|
||||
code.raw_instruction_size());
|
||||
FlushInstructionCache(code.raw_instruction_start_future(),
|
||||
code.raw_instruction_size_future());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -62,12 +62,6 @@ class CodeDesc {
|
||||
int code_comments_offset = 0;
|
||||
int code_comments_size = 0;
|
||||
|
||||
// TODO(jgruber,v8:11036): Remove these functions once CodeDesc fields have
|
||||
// been made consistent with Code layout.
|
||||
int body_size() const { return instr_size + unwinding_info_size; }
|
||||
int instruction_size() const { return safepoint_table_offset; }
|
||||
int metadata_size() const { return body_size() - instruction_size(); }
|
||||
|
||||
// Relocation info is located at the end of the buffer and not part of the
|
||||
// instructions area.
|
||||
|
||||
|
@ -386,12 +386,10 @@ size_t Isolate::HashIsolateForEmbeddedBlob() {
|
||||
reinterpret_cast<uint8_t*>(code.ptr() - kHeapObjectTag);
|
||||
|
||||
// These static asserts ensure we don't miss relevant fields. We don't hash
|
||||
// instruction/metadata size and flags since they change when creating the
|
||||
// off-heap trampolines. Other data fields must remain the same.
|
||||
// instruction size and flags since they change when creating the off-heap
|
||||
// trampolines. Other data fields must remain the same.
|
||||
STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kDataStart);
|
||||
STATIC_ASSERT(Code::kMetadataSizeOffset ==
|
||||
Code::kInstructionSizeOffsetEnd + 1);
|
||||
STATIC_ASSERT(Code::kFlagsOffset == Code::kMetadataSizeOffsetEnd + 1);
|
||||
STATIC_ASSERT(Code::kFlagsOffset == Code::kInstructionSizeOffsetEnd + 1);
|
||||
STATIC_ASSERT(Code::kBuiltinIndexOffset == Code::kFlagsOffsetEnd + 1);
|
||||
static constexpr int kStartOffset = Code::kBuiltinIndexOffset;
|
||||
|
||||
|
@ -212,7 +212,9 @@ void CodeStatistics::CollectCodeCommentStatistics(HeapObject obj,
|
||||
cit.Next();
|
||||
}
|
||||
|
||||
STATIC_ASSERT(Code::kBodyIsContiguous);
|
||||
// TODO(jgruber,v8:11036): Revisit this when separating instruction- and
|
||||
// metadata areas. The logic will become a bit more complex since these areas
|
||||
// will no longer be adjacent in some cases.
|
||||
DCHECK(0 <= prev_pc_offset && prev_pc_offset <= code.raw_body_size());
|
||||
delta += static_cast<int>(code.raw_body_size() - prev_pc_offset);
|
||||
EnterComment(isolate, "NoComment", delta);
|
||||
|
@ -120,8 +120,8 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
|
||||
}
|
||||
|
||||
// TODO(jgruber,v8:11036): Distinguish instruction and metadata areas.
|
||||
STATIC_ASSERT(Code::kOnHeapBodyIsContiguous);
|
||||
const int object_size = Code::SizeFor(code_desc_.body_size());
|
||||
const int body_size = code_desc_.instr_size + code_desc_.unwinding_info_size;
|
||||
const int object_size = Code::SizeFor(body_size);
|
||||
|
||||
Handle<Code> code;
|
||||
{
|
||||
@ -157,8 +157,8 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
|
||||
|
||||
constexpr bool kIsNotOffHeapTrampoline = false;
|
||||
|
||||
code->set_raw_instruction_size(code_desc_.instruction_size());
|
||||
code->set_raw_metadata_size(code_desc_.metadata_size());
|
||||
// TODO(jgruber,v8:11036): Distinguish instruction and metadata areas.
|
||||
code->set_raw_instruction_size(body_size);
|
||||
code->set_relocation_info(*reloc_info);
|
||||
code->initialize_flags(kind_, is_turbofanned_, stack_slots_,
|
||||
kIsNotOffHeapTrampoline);
|
||||
@ -167,6 +167,7 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
|
||||
code->set_code_data_container(*data_container, kReleaseStore);
|
||||
code->set_deoptimization_data(*deoptimization_data_);
|
||||
code->set_source_position_table(*source_position_table_);
|
||||
code->set_safepoint_table_offset(code_desc_.safepoint_table_offset);
|
||||
code->set_handler_table_offset(code_desc_.handler_table_offset);
|
||||
code->set_constant_pool_offset(code_desc_.constant_pool_offset);
|
||||
code->set_code_comments_offset(code_desc_.code_comments_offset);
|
||||
@ -2056,11 +2057,6 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
|
||||
isolate(), off_heap_entry,
|
||||
code->code_data_container(kAcquireLoad).kind_specific_flags(),
|
||||
generate_jump_to_instruction_stream);
|
||||
|
||||
// Trampolines may not contain any metadata since all metadata offsets,
|
||||
// stored on the Code object, refer to the off-heap metadata area.
|
||||
CHECK_EQ(result->raw_metadata_size(), 0);
|
||||
|
||||
// The CodeDataContainer should not be modified beyond this point since it's
|
||||
// now possibly canonicalized.
|
||||
|
||||
@ -2075,6 +2071,7 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
|
||||
result->initialize_flags(code->kind(), code->is_turbofanned(), stack_slots,
|
||||
set_is_off_heap_trampoline);
|
||||
result->set_builtin_index(code->builtin_index());
|
||||
result->set_safepoint_table_offset(code->safepoint_table_offset());
|
||||
result->set_handler_table_offset(code->handler_table_offset());
|
||||
result->set_constant_pool_offset(code->constant_pool_offset());
|
||||
result->set_code_comments_offset(code->code_comments_offset());
|
||||
|
@ -164,7 +164,7 @@ OBJECT_CONSTRUCTORS_IMPL(Code, HeapObject)
|
||||
NEVER_READ_ONLY_SPACE_IMPL(Code)
|
||||
|
||||
INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
|
||||
INT_ACCESSORS(Code, raw_metadata_size, kMetadataSizeOffset)
|
||||
INT_ACCESSORS(Code, safepoint_table_offset, kSafepointTableOffsetOffset)
|
||||
INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
|
||||
INT_ACCESSORS(Code, code_comments_offset, kCodeCommentsOffsetOffset)
|
||||
INT32_ACCESSORS(Code, unwinding_info_offset, kUnwindingInfoOffsetOffset)
|
||||
@ -222,30 +222,18 @@ void Code::set_next_code_link(Object value) {
|
||||
|
||||
Address Code::raw_body_start() const { return raw_instruction_start(); }
|
||||
|
||||
Address Code::BodyStart() const {
|
||||
STATIC_ASSERT(kBodyIsContiguous);
|
||||
return InstructionStart();
|
||||
}
|
||||
|
||||
Address Code::raw_body_end() const {
|
||||
return raw_body_start() + raw_body_size();
|
||||
}
|
||||
Address Code::raw_body_end() const { return raw_instruction_end(); }
|
||||
|
||||
int Code::raw_body_size() const {
|
||||
return raw_instruction_size() + raw_metadata_size();
|
||||
}
|
||||
|
||||
Address Code::BodyEnd() const {
|
||||
STATIC_ASSERT(kBodyIsContiguous);
|
||||
return MetadataEnd();
|
||||
// TODO(jgruber,v8:11036): Distinguish instruction and metadata areas.
|
||||
DCHECK_EQ(unwinding_info_offset() + unwinding_info_size(), InstructionSize());
|
||||
return raw_instruction_size();
|
||||
}
|
||||
|
||||
int Code::BodySize() const {
|
||||
// TODO(jgruber,v8:11036): Update once embedded instructions and metadata are
|
||||
// separate.
|
||||
return V8_UNLIKELY(is_off_heap_trampoline())
|
||||
? OffHeapInstructionSize() + OffHeapMetadataSize()
|
||||
: raw_body_size();
|
||||
// TODO(jgruber,v8:11036): Distinguish instruction and metadata areas.
|
||||
DCHECK_EQ(unwinding_info_offset() + unwinding_info_size(), InstructionSize());
|
||||
return InstructionSize();
|
||||
}
|
||||
|
||||
int Code::InstructionSize() const {
|
||||
@ -271,37 +259,27 @@ Address Code::InstructionEnd() const {
|
||||
: raw_instruction_end();
|
||||
}
|
||||
|
||||
Address Code::raw_instruction_start_future() const {
|
||||
return raw_instruction_start();
|
||||
}
|
||||
|
||||
Address Code::raw_instruction_end_future() const {
|
||||
return raw_metadata_start();
|
||||
}
|
||||
|
||||
int Code::raw_instruction_size_future() const {
|
||||
return raw_instruction_size() - raw_metadata_size();
|
||||
}
|
||||
|
||||
Address Code::raw_metadata_start() const {
|
||||
return raw_instruction_start() + raw_instruction_size();
|
||||
return raw_instruction_start() + safepoint_table_offset();
|
||||
}
|
||||
|
||||
Address Code::MetadataStart() const {
|
||||
STATIC_ASSERT(kBodyIsContiguous);
|
||||
return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapMetadataStart()
|
||||
: raw_metadata_start();
|
||||
}
|
||||
Address Code::raw_metadata_end() const { return raw_instruction_end(); }
|
||||
|
||||
Address Code::raw_metadata_end() const {
|
||||
return raw_metadata_start() + raw_metadata_size();
|
||||
}
|
||||
|
||||
Address Code::MetadataEnd() const {
|
||||
STATIC_ASSERT(kBodyIsContiguous);
|
||||
return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapMetadataEnd()
|
||||
: raw_metadata_end();
|
||||
}
|
||||
|
||||
int Code::MetadataSize() const {
|
||||
STATIC_ASSERT(kBodyIsContiguous);
|
||||
return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapMetadataSize()
|
||||
: raw_metadata_size();
|
||||
}
|
||||
|
||||
int Code::safepoint_table_offset() const {
|
||||
// TODO(jgruber,v8:11036): Update once embedded instructions and metadata are
|
||||
// separate.
|
||||
STATIC_ASSERT(kBodyIsContiguous);
|
||||
return InstructionSize();
|
||||
int Code::raw_metadata_size() const {
|
||||
DCHECK_LE(raw_metadata_start(), raw_metadata_end());
|
||||
return static_cast<int>(raw_metadata_end() - raw_metadata_start());
|
||||
}
|
||||
|
||||
int Code::SizeIncludingMetadata() const {
|
||||
@ -577,7 +555,7 @@ Address Code::unwinding_info_start() const {
|
||||
return InstructionStart() + unwinding_info_offset();
|
||||
}
|
||||
|
||||
Address Code::unwinding_info_end() const { return MetadataEnd(); }
|
||||
Address Code::unwinding_info_end() const { return InstructionEnd(); }
|
||||
|
||||
int Code::unwinding_info_size() const {
|
||||
DCHECK_GE(unwinding_info_end(), unwinding_info_start());
|
||||
|
@ -88,13 +88,14 @@ void Code::Relocate(intptr_t delta) {
|
||||
}
|
||||
|
||||
void Code::FlushICache() const {
|
||||
// TODO(jgruber,v8:11036): This should likely flush only actual instructions,
|
||||
// not metadata.
|
||||
FlushInstructionCache(raw_instruction_start(), raw_instruction_size());
|
||||
}
|
||||
|
||||
void Code::CopyFromNoFlush(Heap* heap, const CodeDesc& desc) {
|
||||
// Copy code.
|
||||
// TODO(jgruber,v8:11036): Distinguish instruction and metadata areas.
|
||||
STATIC_ASSERT(kOnHeapBodyIsContiguous);
|
||||
CopyBytes(reinterpret_cast<byte*>(raw_instruction_start()), desc.buffer,
|
||||
static_cast<size_t>(desc.instr_size));
|
||||
// TODO(jgruber,v8:11036): Merge with the above.
|
||||
@ -139,60 +140,29 @@ SafepointEntry Code::GetSafepointEntry(Address pc) {
|
||||
|
||||
int Code::OffHeapInstructionSize() const {
|
||||
DCHECK(is_off_heap_trampoline());
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr)
|
||||
return raw_instruction_size();
|
||||
}
|
||||
EmbeddedData d = EmbeddedData::FromBlob();
|
||||
return d.InstructionSizeOfBuiltin(builtin_index());
|
||||
}
|
||||
|
||||
Address Code::OffHeapInstructionStart() const {
|
||||
DCHECK(is_off_heap_trampoline());
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
|
||||
return raw_instruction_size();
|
||||
}
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr)
|
||||
return raw_instruction_start();
|
||||
EmbeddedData d = EmbeddedData::FromBlob();
|
||||
return d.InstructionStartOfBuiltin(builtin_index());
|
||||
}
|
||||
|
||||
Address Code::OffHeapInstructionEnd() const {
|
||||
DCHECK(is_off_heap_trampoline());
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
|
||||
return raw_instruction_size();
|
||||
}
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr)
|
||||
return raw_instruction_end();
|
||||
EmbeddedData d = EmbeddedData::FromBlob();
|
||||
return d.InstructionStartOfBuiltin(builtin_index()) +
|
||||
d.InstructionSizeOfBuiltin(builtin_index());
|
||||
}
|
||||
|
||||
int Code::OffHeapMetadataSize() const {
|
||||
DCHECK(is_off_heap_trampoline());
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
|
||||
return raw_instruction_size();
|
||||
}
|
||||
EmbeddedData d = EmbeddedData::FromBlob();
|
||||
return d.MetadataSizeOfBuiltin(builtin_index());
|
||||
}
|
||||
|
||||
Address Code::OffHeapMetadataStart() const {
|
||||
DCHECK(is_off_heap_trampoline());
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
|
||||
return raw_instruction_size();
|
||||
}
|
||||
EmbeddedData d = EmbeddedData::FromBlob();
|
||||
return d.MetadataStartOfBuiltin(builtin_index());
|
||||
}
|
||||
|
||||
Address Code::OffHeapMetadataEnd() const {
|
||||
DCHECK(is_off_heap_trampoline());
|
||||
if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
|
||||
return raw_instruction_size();
|
||||
}
|
||||
EmbeddedData d = EmbeddedData::FromBlob();
|
||||
return d.MetadataStartOfBuiltin(builtin_index()) +
|
||||
d.MetadataSizeOfBuiltin(builtin_index());
|
||||
}
|
||||
|
||||
int AbstractCode::SourcePosition(int offset) {
|
||||
Object maybe_table = source_position_table();
|
||||
if (maybe_table.IsException()) return kNoSourcePosition;
|
||||
|
@ -83,45 +83,45 @@ class Code : public HeapObject {
|
||||
// +--------------------------+
|
||||
// | header |
|
||||
// | padded to code alignment |
|
||||
// +--------------------------+ <-- raw_body_start()
|
||||
// | instructions | == raw_instruction_start() (IS)
|
||||
// +--------------------------+ <-- raw_instruction_start() (= IS)
|
||||
// | instructions |
|
||||
// | ... |
|
||||
// +--------------------------+ <-- raw_instruction_end()
|
||||
// | metadata | == raw_metadata_start()
|
||||
// +--------------------------+ <-- raw_metadata_start()
|
||||
// | metadata |
|
||||
// | ... | == IS + safepoint_table_offset()
|
||||
// | |
|
||||
// | | <-- IS + handler_table_offset()
|
||||
// | | <-- IS + constant_pool_offset()
|
||||
// | | <-- IS + code_comments_offset()
|
||||
// | | <-- IS + unwinding_info_offset()
|
||||
// | padded to obj alignment |
|
||||
// +--------------------------+ <-- raw_metadata_end() == raw_body_end()
|
||||
// +--------------------------+ <-- raw_metadata_end()
|
||||
// | | == raw_instruction_end()
|
||||
// | padded to code alignment |
|
||||
// +--------------------------+
|
||||
//
|
||||
// In other words, the variable-size 'body' consists of 'instructions' and
|
||||
// 'metadata'.
|
||||
// 'metadata'. Both are currently inside [InstructionStart,InstructionEnd[.
|
||||
//
|
||||
// Note the accessor functions below may be prefixed with 'raw'. In this case,
|
||||
// raw accessors (e.g. raw_instruction_start) always refer to the on-heap
|
||||
// Code object, while camel-case accessors (e.g. InstructionStart) may refer
|
||||
// to an off-heap area in the case of embedded builtins.
|
||||
|
||||
// TODO(jgruber,v8:11036): Update once no longer true for embedded builtins.
|
||||
static constexpr bool kOnHeapBodyIsContiguous = true;
|
||||
static constexpr bool kOffHeapBodyIsContiguous = true;
|
||||
static constexpr bool kBodyIsContiguous =
|
||||
kOnHeapBodyIsContiguous && kOffHeapBodyIsContiguous;
|
||||
|
||||
inline Address raw_body_start() const;
|
||||
inline Address BodyStart() const;
|
||||
inline Address raw_body_end() const;
|
||||
inline Address BodyEnd() const;
|
||||
inline int raw_body_size() const;
|
||||
|
||||
// TODO(jgruber,v8:11036): Replace this once the off-heap instruction and
|
||||
// metadata areas are separate.
|
||||
inline int BodySize() const;
|
||||
|
||||
// TODO(jgruber,v8:11036): Shrink the instructions area to contain only
|
||||
// instructions. Until that happens, the state of the world is:
|
||||
//
|
||||
// raw_body_start == raw_instruction_start
|
||||
// raw_metadata_start == raw_instruction_start + safepoint_table_offset
|
||||
// raw_body_end == raw_instruction_end == raw_metadata_end
|
||||
|
||||
inline Address raw_instruction_start() const;
|
||||
inline Address InstructionStart() const;
|
||||
V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;
|
||||
@ -135,23 +135,21 @@ class Code : public HeapObject {
|
||||
inline int InstructionSize() const;
|
||||
V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;
|
||||
|
||||
inline Address raw_metadata_start() const;
|
||||
inline Address MetadataStart() const;
|
||||
Address OffHeapMetadataStart() const;
|
||||
inline Address raw_metadata_end() const;
|
||||
inline Address MetadataEnd() const;
|
||||
V8_EXPORT_PRIVATE Address OffHeapMetadataEnd() const;
|
||||
inline int raw_metadata_size() const;
|
||||
inline void set_raw_metadata_size(int value);
|
||||
inline int MetadataSize() const;
|
||||
int OffHeapMetadataSize() const;
|
||||
// TODO(jgruber,v8:11036): Replace legacy accessors with these _future
|
||||
// accessors. The _future accessors only refer to the range of executable
|
||||
// instructions, *without* metadata tables.
|
||||
inline Address raw_instruction_start_future() const;
|
||||
inline Address raw_instruction_end_future() const;
|
||||
inline int raw_instruction_size_future() const;
|
||||
|
||||
// TODO(jgruber,v8:11036): Change all these offsets to be relative to
|
||||
// MetadataStart instead of InstructionStart.
|
||||
inline Address raw_metadata_start() const;
|
||||
inline Address raw_metadata_end() const;
|
||||
inline int raw_metadata_size() const;
|
||||
|
||||
// [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
|
||||
// instruction stream where the safepoint table starts.
|
||||
inline int safepoint_table_offset() const;
|
||||
inline void set_safepoint_table_offset(int offset);
|
||||
Address SafepointTableAddress() const;
|
||||
int safepoint_table_size() const;
|
||||
bool has_safepoint_table() const;
|
||||
@ -415,12 +413,11 @@ class Code : public HeapObject {
|
||||
/* Objects embedded into code is visited via reloc info. */ \
|
||||
V(kDataStart, 0) \
|
||||
V(kInstructionSizeOffset, kIntSize) \
|
||||
V(kMetadataSizeOffset, kIntSize) \
|
||||
V(kFlagsOffset, kInt32Size) \
|
||||
V(kBuiltinIndexOffset, kIntSize) \
|
||||
V(kInlinedBytecodeSizeOffset, kIntSize) \
|
||||
/* Offsets describing inline metadata tables, relative to */ \
|
||||
/* InstructionStart. */ \
|
||||
/* Offsets describing inline metadata tables. */ \
|
||||
V(kSafepointTableOffsetOffset, kIntSize) \
|
||||
V(kHandlerTableOffsetOffset, kIntSize) \
|
||||
V(kConstantPoolOffsetOffset, \
|
||||
FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
|
||||
|
@ -172,7 +172,7 @@ int NativeRegExpMacroAssembler::CheckStackGuardState(
|
||||
DisallowHeapAllocation no_gc;
|
||||
Address old_pc = PointerAuthentication::AuthenticatePC(return_address, 0);
|
||||
DCHECK_LE(re_code.raw_instruction_start(), old_pc);
|
||||
DCHECK_LE(old_pc, re_code.raw_instruction_end());
|
||||
DCHECK_LE(old_pc, re_code.raw_instruction_end_future());
|
||||
|
||||
StackLimitCheck check(isolate);
|
||||
bool js_has_overflowed = check.JsHasOverflowed();
|
||||
|
@ -224,16 +224,11 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
|
||||
Builtins::name(i));
|
||||
}
|
||||
|
||||
uint32_t instruction_size =
|
||||
static_cast<uint32_t>(code.raw_instruction_size());
|
||||
uint32_t metadata_size = static_cast<uint32_t>(code.raw_metadata_size());
|
||||
uint32_t length = instruction_size + metadata_size;
|
||||
uint32_t length = static_cast<uint32_t>(code.raw_body_size());
|
||||
|
||||
DCHECK_EQ(0, raw_code_size % kCodeAlignment);
|
||||
metadata[i].instruction_offset = raw_code_size;
|
||||
metadata[i].instruction_length = instruction_size;
|
||||
metadata[i].metadata_offset = raw_code_size + instruction_size;
|
||||
metadata[i].metadata_length = metadata_size;
|
||||
metadata[i].instructions_offset = raw_code_size;
|
||||
metadata[i].instructions_length = length;
|
||||
|
||||
// Align the start of each instruction stream.
|
||||
raw_code_size += PadAndAlign(length);
|
||||
@ -270,9 +265,8 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
|
||||
// Write the raw data section.
|
||||
STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
|
||||
for (int i = 0; i < Builtins::builtin_count; i++) {
|
||||
STATIC_ASSERT(Code::kBodyIsContiguous);
|
||||
Code code = builtins->builtin(i);
|
||||
uint32_t offset = metadata[i].instruction_offset;
|
||||
uint32_t offset = metadata[i].instructions_offset;
|
||||
uint8_t* dst = raw_code_start + offset;
|
||||
DCHECK_LE(RawCodeOffset() + offset + code.raw_body_size(), blob_code_size);
|
||||
std::memcpy(dst, reinterpret_cast<uint8_t*>(code.raw_body_start()),
|
||||
@ -303,30 +297,17 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
|
||||
Address EmbeddedData::InstructionStartOfBuiltin(int i) const {
|
||||
DCHECK(Builtins::IsBuiltinId(i));
|
||||
const struct Metadata* metadata = Metadata();
|
||||
const uint8_t* result = RawCode() + metadata[i].instruction_offset;
|
||||
DCHECK_LT(result, code_ + code_size_);
|
||||
const uint8_t* result = RawCode() + metadata[i].instructions_offset;
|
||||
DCHECK_LE(result, code_ + code_size_);
|
||||
DCHECK_IMPLIES(result == code_ + code_size_,
|
||||
InstructionSizeOfBuiltin(i) == 0);
|
||||
return reinterpret_cast<Address>(result);
|
||||
}
|
||||
|
||||
uint32_t EmbeddedData::InstructionSizeOfBuiltin(int i) const {
|
||||
DCHECK(Builtins::IsBuiltinId(i));
|
||||
const struct Metadata* metadata = Metadata();
|
||||
return metadata[i].instruction_length;
|
||||
}
|
||||
|
||||
Address EmbeddedData::MetadataStartOfBuiltin(int i) const {
|
||||
DCHECK(Builtins::IsBuiltinId(i));
|
||||
STATIC_ASSERT(Code::kOffHeapBodyIsContiguous);
|
||||
const struct Metadata* metadata = Metadata();
|
||||
const uint8_t* result = RawCode() + metadata[i].metadata_offset;
|
||||
DCHECK_LT(result, code_ + code_size_);
|
||||
return reinterpret_cast<Address>(result);
|
||||
}
|
||||
|
||||
uint32_t EmbeddedData::MetadataSizeOfBuiltin(int i) const {
|
||||
DCHECK(Builtins::IsBuiltinId(i));
|
||||
const struct Metadata* metadata = Metadata();
|
||||
return metadata[i].metadata_length;
|
||||
return metadata[i].instructions_length;
|
||||
}
|
||||
|
||||
Address EmbeddedData::InstructionStartOfBytecodeHandlers() const {
|
||||
|
@ -75,8 +75,7 @@ class EmbeddedData final {
|
||||
Address InstructionStartOfBytecodeHandlers() const;
|
||||
Address InstructionEndOfBytecodeHandlers() const;
|
||||
|
||||
Address MetadataStartOfBuiltin(int i) const;
|
||||
uint32_t MetadataSizeOfBuiltin(int i) const;
|
||||
bool ContainsBuiltin(int i) const { return InstructionSizeOfBuiltin(i) > 0; }
|
||||
|
||||
uint32_t AddressForHashing(Address addr) {
|
||||
Address start = reinterpret_cast<Address>(code_);
|
||||
@ -85,12 +84,9 @@ class EmbeddedData final {
|
||||
}
|
||||
|
||||
// Padded with kCodeAlignment.
|
||||
// TODO(v8:11045): Consider removing code alignment.
|
||||
uint32_t PaddedInstructionSizeOfBuiltin(int i) const {
|
||||
STATIC_ASSERT(Code::kOffHeapBodyIsContiguous);
|
||||
uint32_t size = InstructionSizeOfBuiltin(i) + MetadataSizeOfBuiltin(i);
|
||||
CHECK_NE(size, 0);
|
||||
return PadAndAlign(size);
|
||||
uint32_t size = InstructionSizeOfBuiltin(i);
|
||||
return (size == 0) ? 0 : PadAndAlign(size);
|
||||
}
|
||||
|
||||
size_t CreateEmbeddedBlobHash() const;
|
||||
@ -103,28 +99,14 @@ class EmbeddedData final {
|
||||
return *reinterpret_cast<const size_t*>(metadata_ + IsolateHashOffset());
|
||||
}
|
||||
|
||||
// Blob layout information for a single instruction stream. Corresponds
|
||||
// roughly to Code object layout (see the instruction and metadata area).
|
||||
// TODO(jgruber): With the addition of metadata sections in Code objects,
|
||||
// naming here has become confusing. Metadata refers to both this struct
|
||||
// and the Code section, and the embedded instruction area currently
|
||||
// contains both Code's instruction and metadata areas. Fix it.
|
||||
struct Metadata {
|
||||
// The offset and (unpadded) length of this builtin's instruction area
|
||||
// from the start of the embedded code section.
|
||||
uint32_t instruction_offset;
|
||||
uint32_t instruction_length;
|
||||
// The offset and (unpadded) length of this builtin's metadata area
|
||||
// from the start of the embedded code section.
|
||||
// TODO(jgruber,v8:11036): Move this to the embedded metadata area.
|
||||
uint32_t metadata_offset;
|
||||
uint32_t metadata_length;
|
||||
// Blob layout information.
|
||||
uint32_t instructions_offset;
|
||||
uint32_t instructions_length;
|
||||
};
|
||||
STATIC_ASSERT(offsetof(Metadata, instruction_offset) == 0 * kUInt32Size);
|
||||
STATIC_ASSERT(offsetof(Metadata, instruction_length) == 1 * kUInt32Size);
|
||||
STATIC_ASSERT(offsetof(Metadata, metadata_offset) == 2 * kUInt32Size);
|
||||
STATIC_ASSERT(offsetof(Metadata, metadata_length) == 3 * kUInt32Size);
|
||||
STATIC_ASSERT(sizeof(Metadata) == 4 * kUInt32Size);
|
||||
STATIC_ASSERT(offsetof(Metadata, instructions_offset) == 0);
|
||||
STATIC_ASSERT(offsetof(Metadata, instructions_length) == kUInt32Size);
|
||||
STATIC_ASSERT(sizeof(Metadata) == kUInt32Size + kUInt32Size);
|
||||
|
||||
// The layout of the blob is as follows:
|
||||
//
|
||||
|
@ -156,8 +156,8 @@ void EmbeddedFileWriter::WriteInstructionStreams(
|
||||
w->AlignToCodeAlignment();
|
||||
w->DeclareLabel(EmbeddedBlobCodeDataSymbol().c_str());
|
||||
|
||||
STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
|
||||
for (int i = 0; i < i::Builtins::builtin_count; i++) {
|
||||
if (!blob->ContainsBuiltin(i)) continue;
|
||||
WriteBuiltin(w, blob, i);
|
||||
}
|
||||
w->Newline();
|
||||
|
@ -109,12 +109,12 @@ void EmitUnwindData(PlatformEmbeddedFileWriterWin* w,
|
||||
w->Comment(" UnwindInfoAddress");
|
||||
w->StartPdataSection();
|
||||
{
|
||||
STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
|
||||
Address prev_builtin_end_offset = 0;
|
||||
for (int i = 0; i < Builtins::builtin_count; i++) {
|
||||
// Some builtins are leaf functions from the point of view of Win64 stack
|
||||
// walking: they do not move the stack pointer and do not require a PDATA
|
||||
// entry because the return address can be retrieved from [rsp].
|
||||
if (!blob->ContainsBuiltin(i)) continue;
|
||||
if (unwind_infos[i].is_leaf_function()) continue;
|
||||
|
||||
uint64_t builtin_start_offset = blob->InstructionStartOfBuiltin(i) -
|
||||
@ -193,8 +193,8 @@ void EmitUnwindData(PlatformEmbeddedFileWriterWin* w,
|
||||
std::vector<int> code_chunks;
|
||||
std::vector<win64_unwindinfo::FrameOffsets> fp_adjustments;
|
||||
|
||||
STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
|
||||
for (int i = 0; i < Builtins::builtin_count; i++) {
|
||||
if (!blob->ContainsBuiltin(i)) continue;
|
||||
if (unwind_infos[i].is_leaf_function()) continue;
|
||||
|
||||
uint64_t builtin_start_offset = blob->InstructionStartOfBuiltin(i) -
|
||||
|
@ -896,9 +896,9 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
|
||||
source_pos_table->copy_out(0, source_pos.start(),
|
||||
source_pos_table->length());
|
||||
}
|
||||
STATIC_ASSERT(Code::kBodyIsContiguous);
|
||||
Vector<const byte> instructions(reinterpret_cast<byte*>(code->BodyStart()),
|
||||
static_cast<size_t>(code->BodySize()));
|
||||
Vector<const byte> instructions(
|
||||
reinterpret_cast<byte*>(code->InstructionStart()),
|
||||
static_cast<size_t>(code->InstructionSize()));
|
||||
const int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
|
||||
|
||||
// TODO(jgruber,v8:8758): Remove this translation. It exists only because
|
||||
@ -1370,10 +1370,10 @@ void NativeModule::AddCodeSpace(
|
||||
WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
|
||||
#undef RUNTIME_STUB
|
||||
#undef RUNTIME_STUB_TRAP
|
||||
STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
|
||||
Address builtin_addresses[WasmCode::kRuntimeStubCount];
|
||||
for (int i = 0; i < WasmCode::kRuntimeStubCount; ++i) {
|
||||
Builtins::Name builtin = stub_names[i];
|
||||
CHECK(embedded_data.ContainsBuiltin(builtin));
|
||||
builtin_addresses[i] = embedded_data.InstructionStartOfBuiltin(builtin);
|
||||
}
|
||||
JumpTableAssembler::GenerateFarJumpTable(
|
||||
|
@ -91,7 +91,7 @@ TEST(CodeLayoutWithUnwindingInfo) {
|
||||
.Build();
|
||||
|
||||
CHECK(code->has_unwinding_info());
|
||||
CHECK_EQ(code->raw_body_size(), buffer_size + unwinding_info_size);
|
||||
CHECK_EQ(code->raw_instruction_size(), buffer_size + unwinding_info_size);
|
||||
CHECK_EQ(0, memcmp(reinterpret_cast<void*>(code->raw_instruction_start()),
|
||||
buffer, buffer_size));
|
||||
CHECK_EQ(code->unwinding_info_size(), unwinding_info_size);
|
||||
|
Loading…
Reference in New Issue
Block a user