[ptr-compr][arm64] Use ldr_w to load on 32 bits for CompressedHeapConstants

Adds basic support for CompressedHeapConstants to Arm64 by moving to a ldr_w
instruction and passing COMPRESSED_EMBEDDED_OBJECT as the RelocInfo. However,
we still haven't made the COMPRESSED_EMBEDDED_OBJECT be actually compressed
in the code-stream (they still take up a full 64-bits). Support for this will
be added next.

Adding a test on macro assembler that checks that the
RelocInfo::COMPRESSED_EMBEDDED_OBJECT is flowing through.

Cq-Include-Trybots: luci.v8.try:v8_linux64_pointer_compression_rel_ng
Cq-Include-Trybots: luci.v8.try:v8_linux64_arm64_pointer_compression_rel_ng
Bug: v8:8977, v8:7703, v8:9298
Change-Id: Ibc64cdfdd85d5cdfa060ed6227b10bb47eae3a8a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1635692
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62306}
This commit is contained in:
Sigurd Schneider 2019-06-18 16:54:18 +02:00 committed by Commit Bot
parent e446f182f8
commit 7617d1a1e7
8 changed files with 184 additions and 28 deletions

View File

@ -200,6 +200,7 @@ struct ImmediateInitializer {
static inline RelocInfo::Mode rmode_for(T) { return RelocInfo::NONE; }
static inline int64_t immediate_for(T t) {
STATIC_ASSERT(sizeof(T) <= 8);
STATIC_ASSERT(std::is_integral<T>::value || std::is_enum<T>::value);
return t;
}
};
@ -223,9 +224,10 @@ struct ImmediateInitializer<ExternalReference> {
};
template <typename T>
Immediate::Immediate(Handle<T> handle)
: value_(static_cast<intptr_t>(handle.address())),
rmode_(RelocInfo::FULL_EMBEDDED_OBJECT) {}
Immediate::Immediate(Handle<T> handle, RelocInfo::Mode mode)
: value_(static_cast<intptr_t>(handle.address())), rmode_(mode) {
DCHECK(RelocInfo::IsEmbeddedObjectMode(mode));
}
template <typename T>
Immediate::Immediate(T t)
@ -476,7 +478,7 @@ void Assembler::Unreachable() {
Address Assembler::target_pointer_address_at(Address pc) {
Instruction* instr = reinterpret_cast<Instruction*>(pc);
DCHECK(instr->IsLdrLiteralX());
DCHECK(instr->IsLdrLiteralX() || instr->IsLdrLiteralW());
return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
}
@ -491,6 +493,13 @@ Address Assembler::target_address_at(Address pc, Address constant_pool) {
}
}
Tagged_t Assembler::target_compressed_address_at(Address pc,
Address constant_pool) {
Instruction* instr = reinterpret_cast<Instruction*>(pc);
CHECK(instr->IsLdrLiteralW());
return Memory<Tagged_t>(target_pointer_address_at(pc));
}
Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
Instruction* instr = reinterpret_cast<Instruction*>(pc);
if (instr->IsLdrLiteralX()) {
@ -506,16 +515,26 @@ Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
AssemblerBase::EmbeddedObjectIndex
Assembler::embedded_object_index_referenced_from(Address pc) {
Instruction* instr = reinterpret_cast<Instruction*>(pc);
CHECK(instr->IsLdrLiteralX());
STATIC_ASSERT(sizeof(EmbeddedObjectIndex) == sizeof(intptr_t));
return Memory<EmbeddedObjectIndex>(target_pointer_address_at(pc));
if (instr->IsLdrLiteralX()) {
STATIC_ASSERT(sizeof(EmbeddedObjectIndex) == sizeof(intptr_t));
return Memory<EmbeddedObjectIndex>(target_pointer_address_at(pc));
} else {
DCHECK(instr->IsLdrLiteralW());
return Memory<uint32_t>(target_pointer_address_at(pc));
}
}
void Assembler::set_embedded_object_index_referenced_from(
Address pc, EmbeddedObjectIndex data) {
Instruction* instr = reinterpret_cast<Instruction*>(pc);
CHECK(instr->IsLdrLiteralX());
Memory<EmbeddedObjectIndex>(target_pointer_address_at(pc)) = data;
if (instr->IsLdrLiteralX()) {
Memory<EmbeddedObjectIndex>(target_pointer_address_at(pc)) = data;
} else {
DCHECK(instr->IsLdrLiteralW());
DCHECK(is_uint32(data));
WriteUnalignedValue<uint32_t>(target_pointer_address_at(pc),
static_cast<uint32_t>(data));
}
}
Handle<HeapObject> Assembler::target_object_handle_at(Address pc) {
@ -596,12 +615,21 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool,
}
}
void Assembler::set_target_compressed_address_at(
Address pc, Address constant_pool, Tagged_t target,
ICacheFlushMode icache_flush_mode) {
Instruction* instr = reinterpret_cast<Instruction*>(pc);
CHECK(instr->IsLdrLiteralW());
Memory<Tagged_t>(target_pointer_address_at(pc)) = target;
}
int RelocInfo::target_address_size() {
if (IsCodedSpecially()) {
return Assembler::kSpecialTargetSize;
} else {
DCHECK(reinterpret_cast<Instruction*>(pc_)->IsLdrLiteralX());
return kSystemPointerSize;
Instruction* instr = reinterpret_cast<Instruction*>(pc_);
DCHECK(instr->IsLdrLiteralX() || instr->IsLdrLiteralW());
return instr->IsLdrLiteralW() ? kTaggedSize : kSystemPointerSize;
}
}
@ -640,17 +668,29 @@ Address RelocInfo::constant_pool_entry_address() {
}
HeapObject RelocInfo::target_object() {
DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
return HeapObject::cast(
Object(Assembler::target_address_at(pc_, constant_pool_)));
DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
if (IsCompressedEmbeddedObject(rmode_)) {
return HeapObject::cast(Object(DecompressTaggedAny(
host_.address(),
Assembler::target_compressed_address_at(pc_, constant_pool_))));
} else {
return HeapObject::cast(
Object(Assembler::target_address_at(pc_, constant_pool_)));
}
}
HeapObject RelocInfo::target_object_no_host(Isolate* isolate) {
return target_object();
if (IsCompressedEmbeddedObject(rmode_)) {
return HeapObject::cast(Object(DecompressTaggedAny(
isolate,
Assembler::target_compressed_address_at(pc_, constant_pool_))));
} else {
return target_object();
}
}
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
if (IsFullEmbeddedObject(rmode_)) {
if (IsEmbeddedObjectMode(rmode_)) {
return origin->target_object_handle_at(pc_);
} else {
DCHECK(IsCodeTarget(rmode_));
@ -661,9 +701,15 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
if (IsCompressedEmbeddedObject(rmode_)) {
Assembler::set_target_compressed_address_at(
pc_, constant_pool_, CompressTagged(target.ptr()), icache_flush_mode);
} else {
DCHECK(IsFullEmbeddedObject(rmode_));
Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
}
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
WriteBarrierForCode(host(), this, target);
}
@ -711,11 +757,14 @@ Address RelocInfo::target_off_heap_target() {
}
void RelocInfo::WipeOut() {
DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
DCHECK(IsEmbeddedObjectMode(rmode_) || IsCodeTarget(rmode_) ||
IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
if (IsInternalReference(rmode_)) {
WriteUnalignedValue<Address>(pc_, kNullAddress);
} else if (IsCompressedEmbeddedObject(rmode_)) {
Assembler::set_target_compressed_address_at(pc_, constant_pool_,
kNullAddress);
} else {
Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
}

View File

@ -1284,9 +1284,6 @@ void Assembler::ldr(const CPURegister& rt, const Operand& operand) {
}
void Assembler::ldr(const CPURegister& rt, const Immediate& imm) {
// Currently we only support 64-bit literals.
DCHECK(rt.Is64Bits());
BlockPoolsScope no_pool_before_ldr_pcrel_instr(this);
RecordRelocInfo(imm.rmode(), imm.value());
// The load will be patched when the constpool is emitted, patching code

View File

@ -35,7 +35,8 @@ class SafepointTableBuilder;
class Immediate {
public:
template <typename T>
inline explicit Immediate(Handle<T> handle);
inline explicit Immediate(
Handle<T> handle, RelocInfo::Mode mode = RelocInfo::FULL_EMBEDDED_OBJECT);
// This is allowed to be an implicit constructor because Immediate is
// a wrapper class that doesn't normally perform any type conversion.
@ -253,10 +254,18 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// Read/Modify the code target address in the branch/call instruction at pc.
// The isolate argument is unused (and may be nullptr) when skipping flushing.
inline static Address target_address_at(Address pc, Address constant_pool);
// Read/Modify the code target address in the branch/call instruction at pc.
inline static Tagged_t target_compressed_address_at(Address pc,
Address constant_pool);
inline static void set_target_address_at(
Address pc, Address constant_pool, Address target,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
inline static void set_target_compressed_address_at(
Address pc, Address constant_pool, Tagged_t target,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
// Returns the handle for the code object called at 'pc'.
// This might need to be temporarily encoded as an offset into code_targets_.
inline Handle<Code> code_target_object_handle_at(Address pc);

View File

@ -203,6 +203,7 @@ class Instruction {
}
bool IsLdrLiteralX() const { return Mask(LoadLiteralMask) == LDR_x_lit; }
bool IsLdrLiteralW() const { return Mask(LoadLiteralMask) == LDR_w_lit; }
bool IsPCRelAddressing() const {
return Mask(PCRelAddressingFMask) == PCRelAddressingFixed;

View File

@ -291,8 +291,7 @@ void TurboAssembler::Mov(const Register& rd, const Operand& operand,
ExternalReference reference = bit_cast<ExternalReference>(addr);
IndirectLoadExternalReference(rd, reference);
return;
} else if (operand.ImmediateRMode() ==
RelocInfo::FULL_EMBEDDED_OBJECT) {
} else if (RelocInfo::IsEmbeddedObjectMode(operand.ImmediateRMode())) {
Handle<HeapObject> x(
reinterpret_cast<Address*>(operand.ImmediateValue()));
IndirectLoadConstant(rd, x);

View File

@ -2678,8 +2678,11 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
if (IsMaterializableFromRoot(src_object, &index)) {
__ LoadRoot(dst, index);
} else {
// TODO(v8:8977): Add the needed RelocInfo and make this mov on 32 bits
__ Mov(dst, src_object);
// TODO(v8:8977): Even though this mov happens on 32 bits (Note the
// .W()) and we are passing along the RelocInfo, we still haven't made
// the address embedded in the code-stream actually be compressed.
__ Mov(dst.W(),
Immediate(src_object, RelocInfo::COMPRESSED_EMBEDDED_OBJECT));
}
} else {
__ Mov(dst, g.ToImmediate(source));

View File

@ -306,6 +306,7 @@ v8_source_set("cctest_sources") {
"test-fuzz-arm64.cc",
"test-javascript-arm64.cc",
"test-js-arm64-variables.cc",
"test-macro-assembler-arm64.cc",
"test-sync-primitives-arm64.cc",
"test-utils-arm64.cc",
"test-utils-arm64.h",

View File

@ -0,0 +1,97 @@
// Copyright 2019 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <stdlib.h>
#include "src/init/v8.h"
#include "src/base/platform/platform.h"
#include "src/codegen/arm64/assembler-arm64-inl.h"
#include "src/codegen/macro-assembler.h"
#include "src/execution/simulator.h"
#include "src/heap/factory.h"
#include "src/objects/objects-inl.h"
#include "src/objects/smi.h"
#include "src/utils/ostreams.h"
#include "test/cctest/cctest.h"
#include "test/common/assembler-tester.h"
namespace v8 {
namespace internal {
namespace test_macro_assembler_arm64 {
using F0 = int();
#define __ masm.
TEST(EmbeddedObj) {
#ifdef V8_COMPRESS_POINTERS
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
buffer->CreateView());
Handle<HeapObject> old_array = isolate->factory()->NewFixedArray(2000);
Handle<HeapObject> my_array = isolate->factory()->NewFixedArray(1000);
__ Mov(w4, Immediate(my_array, RelocInfo::COMPRESSED_EMBEDDED_OBJECT));
__ Mov(x5, old_array);
__ ret(x5);
CodeDesc desc;
masm.GetCode(isolate, &desc);
Handle<Code> code = Factory::CodeBuilder(isolate, desc, Code::STUB).Build();
#ifdef DEBUG
StdoutStream os;
code->Print(os);
#endif
// Collect garbage to ensure reloc info can be walked by the heap.
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
// Test the user-facing reloc interface.
const int mode_mask = RelocInfo::EmbeddedObjectModeMask();
for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
if (RelocInfo::IsCompressedEmbeddedObject(mode)) {
CHECK_EQ(*my_array, it.rinfo()->target_object());
} else {
CHECK(RelocInfo::IsFullEmbeddedObject(mode));
CHECK_EQ(*old_array, it.rinfo()->target_object());
}
}
#endif // V8_COMPRESS_POINTERS
}
#undef __
} // namespace test_macro_assembler_arm64
} // namespace internal
} // namespace v8