[arm] Remove embedded constant pool support.
This option doesn't work for ARM any more. BUG= Review-Url: https://codereview.chromium.org/2816703002 Cr-Commit-Position: refs/heads/master@{#44646}
This commit is contained in:
parent
dc83caa6fb
commit
c4b02905d8
@ -75,11 +75,7 @@ Address RelocInfo::target_address_address() {
|
||||
DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
|
||||
|| rmode_ == EMBEDDED_OBJECT
|
||||
|| rmode_ == EXTERNAL_REFERENCE);
|
||||
if (FLAG_enable_embedded_constant_pool ||
|
||||
Assembler::IsMovW(Memory::int32_at(pc_))) {
|
||||
// We return the PC for embedded constant pool since this function is used
|
||||
// by the serializer and expects the address to reside within the code
|
||||
// object.
|
||||
if (Assembler::IsMovW(Memory::int32_at(pc_))) {
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
} else {
|
||||
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
|
||||
@ -344,32 +340,14 @@ Address Assembler::target_address_from_return_address(Address pc) {
|
||||
// @ return address
|
||||
// In cases that need frequent patching, the address is in the
|
||||
// constant pool. It could be a small constant pool load:
|
||||
// ldr ip, [pc / pp, #...] @ call address
|
||||
// blx ip
|
||||
// @ return address
|
||||
// Or an extended constant pool load (ARMv7):
|
||||
// movw ip, #...
|
||||
// movt ip, #...
|
||||
// ldr ip, [pc, ip] @ call address
|
||||
// blx ip
|
||||
// @ return address
|
||||
// Or an extended constant pool load (ARMv6):
|
||||
// mov ip, #...
|
||||
// orr ip, ip, #...
|
||||
// orr ip, ip, #...
|
||||
// orr ip, ip, #...
|
||||
// ldr ip, [pc, ip] @ call address
|
||||
// ldr ip, [pc, #...] @ call address
|
||||
// blx ip
|
||||
// @ return address
|
||||
Address candidate = pc - 2 * Assembler::kInstrSize;
|
||||
Instr candidate_instr(Memory::int32_at(candidate));
|
||||
if (IsLdrPcImmediateOffset(candidate_instr) |
|
||||
IsLdrPpImmediateOffset(candidate_instr)) {
|
||||
if (IsLdrPcImmediateOffset(candidate_instr)) {
|
||||
return candidate;
|
||||
} else {
|
||||
if (IsLdrPpRegOffset(candidate_instr)) {
|
||||
candidate -= Assembler::kInstrSize;
|
||||
}
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
candidate -= 1 * Assembler::kInstrSize;
|
||||
DCHECK(IsMovW(Memory::int32_at(candidate)) &&
|
||||
@ -388,46 +366,31 @@ Address Assembler::target_address_from_return_address(Address pc) {
|
||||
|
||||
|
||||
Address Assembler::return_address_from_call_start(Address pc) {
|
||||
if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
|
||||
IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
|
||||
if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
|
||||
// Load from constant pool, small section.
|
||||
return pc + kInstrSize * 2;
|
||||
} else {
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
DCHECK(IsMovW(Memory::int32_at(pc)));
|
||||
DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
|
||||
if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
|
||||
// Load from constant pool, extended section.
|
||||
return pc + kInstrSize * 4;
|
||||
} else {
|
||||
// A movw / movt load immediate.
|
||||
return pc + kInstrSize * 3;
|
||||
}
|
||||
} else {
|
||||
DCHECK(IsMovImmed(Memory::int32_at(pc)));
|
||||
DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
|
||||
DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
|
||||
DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
|
||||
if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
|
||||
// Load from constant pool, extended section.
|
||||
return pc + kInstrSize * 6;
|
||||
} else {
|
||||
// A mov / orr load immediate.
|
||||
return pc + kInstrSize * 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Assembler::deserialization_set_special_target_at(
|
||||
Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
set_target_address_at(isolate, constant_pool_entry, code, target);
|
||||
} else {
|
||||
Memory::Address_at(constant_pool_entry) = target;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Assembler::deserialization_set_target_internal_reference_at(
|
||||
@ -438,56 +401,19 @@ void Assembler::deserialization_set_target_internal_reference_at(
|
||||
|
||||
bool Assembler::is_constant_pool_load(Address pc) {
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
return !Assembler::IsMovW(Memory::int32_at(pc)) ||
|
||||
(FLAG_enable_embedded_constant_pool &&
|
||||
Assembler::IsLdrPpRegOffset(
|
||||
Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
|
||||
return !Assembler::IsMovW(Memory::int32_at(pc));
|
||||
} else {
|
||||
return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
|
||||
(FLAG_enable_embedded_constant_pool &&
|
||||
Assembler::IsLdrPpRegOffset(
|
||||
Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
|
||||
return !Assembler::IsMovImmed(Memory::int32_at(pc));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address Assembler::constant_pool_entry_address(Address pc,
|
||||
Address constant_pool) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
DCHECK(constant_pool != NULL);
|
||||
int cp_offset;
|
||||
if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
|
||||
DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
|
||||
IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
|
||||
IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
|
||||
IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
|
||||
// This is an extended constant pool lookup (ARMv6).
|
||||
Instr mov_instr = instr_at(pc);
|
||||
Instr orr_instr_1 = instr_at(pc + kInstrSize);
|
||||
Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
|
||||
Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
|
||||
cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
|
||||
DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
|
||||
} else if (IsMovW(Memory::int32_at(pc))) {
|
||||
DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
|
||||
IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
|
||||
// This is an extended constant pool lookup (ARMv7).
|
||||
Instruction* movw_instr = Instruction::At(pc);
|
||||
Instruction* movt_instr = Instruction::At(pc + kInstrSize);
|
||||
cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
|
||||
movw_instr->ImmedMovwMovtValue();
|
||||
} else {
|
||||
// This is a small constant pool lookup.
|
||||
DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
|
||||
cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
|
||||
}
|
||||
return constant_pool + cp_offset;
|
||||
} else {
|
||||
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
|
||||
Instr instr = Memory::int32_at(pc);
|
||||
return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address Assembler::target_address_at(Address pc, Address constant_pool) {
|
||||
|
@ -327,10 +327,9 @@ const int RelocInfo::kApplyMask = 0;
|
||||
|
||||
bool RelocInfo::IsCodedSpecially() {
|
||||
// The deserializer needs to know whether a pointer is specially coded. Being
|
||||
// specially coded on ARM means that it is a movw/movt instruction, or is an
|
||||
// embedded constant pool entry. These only occur if
|
||||
// FLAG_enable_embedded_constant_pool is true.
|
||||
return FLAG_enable_embedded_constant_pool;
|
||||
// specially coded on ARM means that it is a movw/movt instruction. We don't
|
||||
// generate those for relocatable pointers.
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@ -503,18 +502,9 @@ const Instr kPopRegPattern =
|
||||
// ldr rd, [pc, #offset]
|
||||
const Instr kLdrPCImmedMask = 15 * B24 | 7 * B20 | 15 * B16;
|
||||
const Instr kLdrPCImmedPattern = 5 * B24 | L | Register::kCode_pc * B16;
|
||||
// ldr rd, [pp, #offset]
|
||||
const Instr kLdrPpImmedMask = 15 * B24 | 7 * B20 | 15 * B16;
|
||||
const Instr kLdrPpImmedPattern = 5 * B24 | L | Register::kCode_r8 * B16;
|
||||
// ldr rd, [pp, rn]
|
||||
const Instr kLdrPpRegMask = 15 * B24 | 7 * B20 | 15 * B16;
|
||||
const Instr kLdrPpRegPattern = 7 * B24 | L | Register::kCode_r8 * B16;
|
||||
// vldr dd, [pc, #offset]
|
||||
const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
|
||||
const Instr kVldrDPCPattern = 13 * B24 | L | Register::kCode_pc * B16 | 11 * B8;
|
||||
// vldr dd, [pp, #offset]
|
||||
const Instr kVldrDPpMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
|
||||
const Instr kVldrDPpPattern = 13 * B24 | L | Register::kCode_r8 * B16 | 11 * B8;
|
||||
// blxcc rm
|
||||
const Instr kBlxRegMask =
|
||||
15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
|
||||
@ -554,8 +544,7 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
|
||||
: AssemblerBase(isolate_data, buffer, buffer_size),
|
||||
recorded_ast_id_(TypeFeedbackId::None()),
|
||||
pending_32_bit_constants_(),
|
||||
pending_64_bit_constants_(),
|
||||
constant_pool_builder_(kLdrMaxReachBits, kVldrMaxReachBits) {
|
||||
pending_64_bit_constants_() {
|
||||
pending_32_bit_constants_.reserve(kMinNumPendingConstants);
|
||||
pending_64_bit_constants_.reserve(kMinNumPendingConstants);
|
||||
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
|
||||
@ -583,13 +572,9 @@ Assembler::~Assembler() {
|
||||
void Assembler::GetCode(CodeDesc* desc) {
|
||||
// Emit constant pool if necessary.
|
||||
int constant_pool_offset = 0;
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
constant_pool_offset = EmitEmbeddedConstantPool();
|
||||
} else {
|
||||
CheckConstPool(true, false);
|
||||
DCHECK(pending_32_bit_constants_.empty());
|
||||
DCHECK(pending_64_bit_constants_.empty());
|
||||
}
|
||||
// Set up code descriptor.
|
||||
desc->buffer = buffer_;
|
||||
desc->buffer_size = buffer_size_;
|
||||
@ -740,24 +725,6 @@ Register Assembler::GetRm(Instr instr) {
|
||||
}
|
||||
|
||||
|
||||
Instr Assembler::GetConsantPoolLoadPattern() {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
return kLdrPpImmedPattern;
|
||||
} else {
|
||||
return kLdrPCImmedPattern;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Instr Assembler::GetConsantPoolLoadMask() {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
return kLdrPpImmedMask;
|
||||
} else {
|
||||
return kLdrPCImmedMask;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Assembler::IsPush(Instr instr) {
|
||||
return ((instr & ~kRdMask) == kPushRegPattern);
|
||||
}
|
||||
@ -795,23 +762,6 @@ bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
|
||||
}
|
||||
|
||||
|
||||
bool Assembler::IsLdrPpImmediateOffset(Instr instr) {
|
||||
// Check the instruction is indeed a
|
||||
// ldr<cond> <Rd>, [pp +/- offset_12].
|
||||
return (instr & kLdrPpImmedMask) == kLdrPpImmedPattern;
|
||||
}
|
||||
|
||||
|
||||
bool Assembler::IsLdrPpRegOffset(Instr instr) {
|
||||
// Check the instruction is indeed a
|
||||
// ldr<cond> <Rd>, [pp, +/- <Rm>].
|
||||
return (instr & kLdrPpRegMask) == kLdrPpRegPattern;
|
||||
}
|
||||
|
||||
|
||||
Instr Assembler::GetLdrPpRegOffsetPattern() { return kLdrPpRegPattern; }
|
||||
|
||||
|
||||
bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
|
||||
// Check the instruction is indeed a
|
||||
// vldr<cond> <Dd>, [pc +/- offset_10].
|
||||
@ -819,13 +769,6 @@ bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
|
||||
}
|
||||
|
||||
|
||||
bool Assembler::IsVldrDPpImmediateOffset(Instr instr) {
|
||||
// Check the instruction is indeed a
|
||||
// vldr<cond> <Dd>, [pp +/- offset_10].
|
||||
return (instr & kVldrDPpMask) == kVldrDPpPattern;
|
||||
}
|
||||
|
||||
|
||||
bool Assembler::IsBlxReg(Instr instr) {
|
||||
// Check the instruction is indeed a
|
||||
// blxcc <Rm>
|
||||
@ -1169,10 +1112,7 @@ bool Operand::must_output_reloc_info(const Assembler* assembler) const {
|
||||
static bool use_mov_immediate_load(const Operand& x,
|
||||
const Assembler* assembler) {
|
||||
DCHECK(assembler != nullptr);
|
||||
if (FLAG_enable_embedded_constant_pool &&
|
||||
!assembler->is_constant_pool_available()) {
|
||||
return true;
|
||||
} else if (x.must_output_reloc_info(assembler)) {
|
||||
if (x.must_output_reloc_info(assembler)) {
|
||||
// Prefer constant pool if data is likely to be patched.
|
||||
return false;
|
||||
} else {
|
||||
@ -1196,14 +1136,10 @@ int Operand::instructions_required(const Assembler* assembler,
|
||||
if (use_mov_immediate_load(*this, assembler)) {
|
||||
// A movw / movt or mov / orr immediate load.
|
||||
instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4;
|
||||
} else if (assembler->ConstantPoolAccessIsInOverflow()) {
|
||||
// An overflowed constant pool load.
|
||||
instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5;
|
||||
} else {
|
||||
// A small constant pool load.
|
||||
instructions = 1;
|
||||
}
|
||||
|
||||
if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set
|
||||
// For a mov or mvn instruction which doesn't set the condition
|
||||
// code, the constant pool or immediate load is enough, otherwise we need
|
||||
@ -1228,51 +1164,25 @@ void Assembler::move_32_bit_immediate(Register rd,
|
||||
}
|
||||
|
||||
if (use_mov_immediate_load(x, this)) {
|
||||
// use_mov_immediate_load should return false when we need to output
|
||||
// relocation info, since we prefer the constant pool for values that
|
||||
// can be patched.
|
||||
DCHECK(!x.must_output_reloc_info(this));
|
||||
Register target = rd.code() == pc.code() ? ip : rd;
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
CpuFeatureScope scope(this, ARMv7);
|
||||
if (!FLAG_enable_embedded_constant_pool &&
|
||||
x.must_output_reloc_info(this)) {
|
||||
// Make sure the movw/movt doesn't get separated.
|
||||
BlockConstPoolFor(2);
|
||||
}
|
||||
movw(target, imm32 & 0xffff, cond);
|
||||
movt(target, imm32 >> 16, cond);
|
||||
} else {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond);
|
||||
orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond);
|
||||
orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond);
|
||||
orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond);
|
||||
}
|
||||
if (target.code() != rd.code()) {
|
||||
mov(rd, target, LeaveCC, cond);
|
||||
}
|
||||
} else {
|
||||
DCHECK(!FLAG_enable_embedded_constant_pool || is_constant_pool_available());
|
||||
ConstantPoolEntry::Access access =
|
||||
ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_);
|
||||
if (access == ConstantPoolEntry::OVERFLOWED) {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
Register target = rd.code() == pc.code() ? ip : rd;
|
||||
// Emit instructions to load constant pool offset.
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
CpuFeatureScope scope(this, ARMv7);
|
||||
movw(target, 0, cond);
|
||||
movt(target, 0, cond);
|
||||
} else {
|
||||
mov(target, Operand(0), LeaveCC, cond);
|
||||
orr(target, target, Operand(0), LeaveCC, cond);
|
||||
orr(target, target, Operand(0), LeaveCC, cond);
|
||||
orr(target, target, Operand(0), LeaveCC, cond);
|
||||
}
|
||||
// Load from constant pool at offset.
|
||||
ldr(rd, MemOperand(pp, target), cond);
|
||||
} else {
|
||||
DCHECK(access == ConstantPoolEntry::REGULAR);
|
||||
ldr(rd, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0),
|
||||
cond);
|
||||
}
|
||||
USE(access);
|
||||
ldr(rd, MemOperand(pc, 0), cond);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2787,12 +2697,6 @@ void Assembler::vmov(const DwVfpRegister dst,
|
||||
DCHECK(VfpRegisterIsAvailable(dst));
|
||||
DCHECK(!scratch.is(ip));
|
||||
uint32_t enc;
|
||||
// If the embedded constant pool is disabled, we can use the normal, inline
|
||||
// constant pool. If the embedded constant pool is enabled (via
|
||||
// FLAG_enable_embedded_constant_pool), we can only use it where the pool
|
||||
// pointer (pp) is valid.
|
||||
bool can_use_pool =
|
||||
!FLAG_enable_embedded_constant_pool || is_constant_pool_available();
|
||||
if (CpuFeatures::IsSupported(VFPv3) && FitsVmovFPImmediate(imm, &enc)) {
|
||||
CpuFeatureScope scope(this, VFPv3);
|
||||
// The double can be encoded in the instruction.
|
||||
@ -2804,8 +2708,7 @@ void Assembler::vmov(const DwVfpRegister dst,
|
||||
int vd, d;
|
||||
dst.split_code(&vd, &d);
|
||||
emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
|
||||
} else if (CpuFeatures::IsSupported(ARMv7) && FLAG_enable_vldr_imm &&
|
||||
can_use_pool) {
|
||||
} else if (CpuFeatures::IsSupported(ARMv7) && FLAG_enable_vldr_imm) {
|
||||
CpuFeatureScope scope(this, ARMv7);
|
||||
// TODO(jfb) Temporarily turned off until we have constant blinding or
|
||||
// some equivalent mitigation: an attacker can otherwise control
|
||||
@ -2823,17 +2726,9 @@ void Assembler::vmov(const DwVfpRegister dst,
|
||||
// that's tricky because vldr has a limited reach. Furthermore
|
||||
// it breaks load locality.
|
||||
ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm);
|
||||
if (access == ConstantPoolEntry::OVERFLOWED) {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
// Emit instructions to load constant pool offset.
|
||||
movw(ip, 0);
|
||||
movt(ip, 0);
|
||||
// Load from constant pool at offset.
|
||||
vldr(dst, MemOperand(pp, ip));
|
||||
} else {
|
||||
DCHECK(access == ConstantPoolEntry::REGULAR);
|
||||
vldr(dst, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0));
|
||||
}
|
||||
USE(access);
|
||||
vldr(dst, MemOperand(pc, 0));
|
||||
} else {
|
||||
// Synthesise the double from ARM immediates.
|
||||
uint32_t lo, hi;
|
||||
@ -5046,9 +4941,6 @@ ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
|
||||
rmode != RelocInfo::NONE64);
|
||||
bool sharing_ok = RelocInfo::IsNone(rmode) ||
|
||||
!(serializer_enabled() || rmode < RelocInfo::CELL);
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
return constant_pool_builder_.AddEntry(position, value, sharing_ok);
|
||||
} else {
|
||||
DCHECK(pending_32_bit_constants_.size() < kMaxNumPending32Constants);
|
||||
if (pending_32_bit_constants_.empty()) {
|
||||
first_const_pool_32_use_ = position;
|
||||
@ -5061,14 +4953,10 @@ ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
|
||||
BlockConstPoolFor(1);
|
||||
return ConstantPoolEntry::REGULAR;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
|
||||
double value) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
return constant_pool_builder_.AddEntry(position, value);
|
||||
} else {
|
||||
DCHECK(pending_64_bit_constants_.size() < kMaxNumPending64Constants);
|
||||
if (pending_64_bit_constants_.empty()) {
|
||||
first_const_pool_64_use_ = position;
|
||||
@ -5081,17 +4969,9 @@ ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
|
||||
BlockConstPoolFor(1);
|
||||
return ConstantPoolEntry::REGULAR;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Assembler::BlockConstPoolFor(int instructions) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
// Should be a no-op if using an embedded constant pool.
|
||||
DCHECK(pending_32_bit_constants_.empty());
|
||||
DCHECK(pending_64_bit_constants_.empty());
|
||||
return;
|
||||
}
|
||||
|
||||
int pc_limit = pc_offset() + instructions * kInstrSize;
|
||||
if (no_const_pool_before_ < pc_limit) {
|
||||
// Max pool start (if we need a jump and an alignment).
|
||||
@ -5114,13 +4994,6 @@ void Assembler::BlockConstPoolFor(int instructions) {
|
||||
|
||||
|
||||
void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
// Should be a no-op if using an embedded constant pool.
|
||||
DCHECK(pending_32_bit_constants_.empty());
|
||||
DCHECK(pending_64_bit_constants_.empty());
|
||||
return;
|
||||
}
|
||||
|
||||
// Some short sequence of instruction mustn't be broken up by constant pool
|
||||
// emission, such sequences are protected by calls to BlockConstPoolFor and
|
||||
// BlockConstPoolScope.
|
||||
@ -5333,61 +5206,6 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
|
||||
next_buffer_check_ = pc_offset() + kCheckPoolInterval;
|
||||
}
|
||||
|
||||
|
||||
void Assembler::PatchConstantPoolAccessInstruction(
|
||||
int pc_offset, int offset, ConstantPoolEntry::Access access,
|
||||
ConstantPoolEntry::Type type) {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
Address pc = buffer_ + pc_offset;
|
||||
|
||||
// Patch vldr/ldr instruction with correct offset.
|
||||
Instr instr = instr_at(pc);
|
||||
if (access == ConstantPoolEntry::OVERFLOWED) {
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
CpuFeatureScope scope(this, ARMv7);
|
||||
// Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
|
||||
Instr next_instr = instr_at(pc + kInstrSize);
|
||||
DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0));
|
||||
DCHECK((IsMovT(next_instr) &&
|
||||
Instruction::ImmedMovwMovtValue(next_instr) == 0));
|
||||
instr_at_put(pc, PatchMovwImmediate(instr, offset & 0xffff));
|
||||
instr_at_put(pc + kInstrSize,
|
||||
PatchMovwImmediate(next_instr, offset >> 16));
|
||||
} else {
|
||||
// Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
|
||||
Instr instr_2 = instr_at(pc + kInstrSize);
|
||||
Instr instr_3 = instr_at(pc + 2 * kInstrSize);
|
||||
Instr instr_4 = instr_at(pc + 3 * kInstrSize);
|
||||
DCHECK((IsMovImmed(instr) && Instruction::Immed8Value(instr) == 0));
|
||||
DCHECK((IsOrrImmed(instr_2) && Instruction::Immed8Value(instr_2) == 0) &&
|
||||
GetRn(instr_2).is(GetRd(instr_2)));
|
||||
DCHECK((IsOrrImmed(instr_3) && Instruction::Immed8Value(instr_3) == 0) &&
|
||||
GetRn(instr_3).is(GetRd(instr_3)));
|
||||
DCHECK((IsOrrImmed(instr_4) && Instruction::Immed8Value(instr_4) == 0) &&
|
||||
GetRn(instr_4).is(GetRd(instr_4)));
|
||||
instr_at_put(pc, PatchShiftImm(instr, (offset & kImm8Mask)));
|
||||
instr_at_put(pc + kInstrSize,
|
||||
PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
|
||||
instr_at_put(pc + 2 * kInstrSize,
|
||||
PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
|
||||
instr_at_put(pc + 3 * kInstrSize,
|
||||
PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
|
||||
}
|
||||
} else if (type == ConstantPoolEntry::DOUBLE) {
|
||||
// Instruction to patch must be 'vldr rd, [pp, #0]'.
|
||||
DCHECK((IsVldrDPpImmediateOffset(instr) &&
|
||||
GetVldrDRegisterImmediateOffset(instr) == 0));
|
||||
DCHECK(is_uint10(offset));
|
||||
instr_at_put(pc, SetVldrDRegisterImmediateOffset(instr, offset));
|
||||
} else {
|
||||
// Instruction to patch must be 'ldr rd, [pp, #0]'.
|
||||
DCHECK((IsLdrPpImmediateOffset(instr) &&
|
||||
GetLdrRegisterImmediateOffset(instr) == 0));
|
||||
DCHECK(is_uint12(offset));
|
||||
instr_at_put(pc, SetLdrRegisterImmediateOffset(instr, offset));
|
||||
}
|
||||
}
|
||||
|
||||
PatchingAssembler::PatchingAssembler(IsolateData isolate_data, byte* address,
|
||||
int instructions)
|
||||
: Assembler(isolate_data, address, instructions * kInstrSize + kGap) {
|
||||
|
@ -142,7 +142,6 @@ struct Register {
|
||||
};
|
||||
|
||||
// r7: context register
|
||||
// r8: constant pool pointer register if FLAG_enable_embedded_constant_pool.
|
||||
// r9: lithium scratch
|
||||
#define DECLARE_REGISTER(R) constexpr Register R = {Register::kCode_##R};
|
||||
GENERAL_REGISTERS(DECLARE_REGISTER)
|
||||
@ -1558,12 +1557,6 @@ class Assembler : public AssemblerBase {
|
||||
static int GetBranchOffset(Instr instr);
|
||||
static bool IsLdrRegisterImmediate(Instr instr);
|
||||
static bool IsVldrDRegisterImmediate(Instr instr);
|
||||
static Instr GetConsantPoolLoadPattern();
|
||||
static Instr GetConsantPoolLoadMask();
|
||||
static bool IsLdrPpRegOffset(Instr instr);
|
||||
static Instr GetLdrPpRegOffsetPattern();
|
||||
static bool IsLdrPpImmediateOffset(Instr instr);
|
||||
static bool IsVldrDPpImmediateOffset(Instr instr);
|
||||
static int GetLdrRegisterImmediateOffset(Instr instr);
|
||||
static int GetVldrDRegisterImmediateOffset(Instr instr);
|
||||
static Instr SetLdrRegisterImmediateOffset(Instr instr, int offset);
|
||||
@ -1628,19 +1621,12 @@ class Assembler : public AssemblerBase {
|
||||
}
|
||||
}
|
||||
|
||||
int EmitEmbeddedConstantPool() {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
return constant_pool_builder_.Emit(this);
|
||||
}
|
||||
|
||||
bool ConstantPoolAccessIsInOverflow() const {
|
||||
return constant_pool_builder_.NextAccess(ConstantPoolEntry::INTPTR) ==
|
||||
ConstantPoolEntry::OVERFLOWED;
|
||||
}
|
||||
|
||||
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
|
||||
ConstantPoolEntry::Access access,
|
||||
ConstantPoolEntry::Type type);
|
||||
ConstantPoolEntry::Type type) {
|
||||
// No embedded constant pool support.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
protected:
|
||||
// Relocation for a type-recording IC has the AST id added to it. This
|
||||
@ -1763,8 +1749,6 @@ class Assembler : public AssemblerBase {
|
||||
int first_const_pool_32_use_;
|
||||
int first_const_pool_64_use_;
|
||||
|
||||
ConstantPoolBuilder constant_pool_builder_;
|
||||
|
||||
// The bound position, before this we cannot do instruction elimination.
|
||||
int last_bound_pos_;
|
||||
|
||||
|
@ -983,9 +983,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ mov(r2, Operand(pending_handler_offset_address));
|
||||
__ ldr(r2, MemOperand(r2));
|
||||
__ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
__ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1);
|
||||
}
|
||||
__ add(pc, r1, r2);
|
||||
}
|
||||
|
||||
@ -1029,9 +1026,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
// r3: argc
|
||||
// r4: argv
|
||||
StackFrame::Type marker = type();
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
__ mov(r8, Operand::Zero());
|
||||
}
|
||||
__ mov(r7, Operand(StackFrame::TypeToMarker(marker)));
|
||||
__ mov(r6, Operand(StackFrame::TypeToMarker(marker)));
|
||||
__ mov(r5,
|
||||
@ -1039,7 +1033,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ ldr(r5, MemOperand(r5));
|
||||
__ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
|
||||
__ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
|
||||
(FLAG_enable_embedded_constant_pool ? r8.bit() : 0) |
|
||||
ip.bit());
|
||||
|
||||
// Set up frame pointer for the frame to be pushed.
|
||||
|
@ -391,8 +391,8 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) {
|
||||
|
||||
|
||||
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
SetFrameSlot(offset, value);
|
||||
// No embedded constant pool support.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
|
||||
|
@ -20,16 +20,16 @@ namespace internal {
|
||||
Register JavaScriptFrame::fp_register() { return v8::internal::fp; }
|
||||
Register JavaScriptFrame::context_register() { return cp; }
|
||||
Register JavaScriptFrame::constant_pool_pointer_register() {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
return pp;
|
||||
UNREACHABLE();
|
||||
return no_reg;
|
||||
}
|
||||
|
||||
|
||||
Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; }
|
||||
Register StubFailureTrampolineFrame::context_register() { return cp; }
|
||||
Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
return pp;
|
||||
UNREACHABLE();
|
||||
return no_reg;
|
||||
}
|
||||
|
||||
|
||||
|
@ -66,23 +66,11 @@ const int kNumDoubleCalleeSaved = 8;
|
||||
// TODO(regis): Only 8 registers may actually be sufficient. Revisit.
|
||||
const int kNumSafepointRegisters = 16;
|
||||
|
||||
// The embedded constant pool pointer (r8/pp) is not included in the safepoint
|
||||
// since it is not tagged. This register is preserved in the stack frame where
|
||||
// its value will be updated if GC code movement occurs. Including it in the
|
||||
// safepoint (where it will not be relocated) would cause a stale value to be
|
||||
// restored.
|
||||
const RegList kConstantPointerRegMask =
|
||||
FLAG_enable_embedded_constant_pool ? (1 << 8) : 0;
|
||||
const int kNumConstantPoolPointerReg =
|
||||
FLAG_enable_embedded_constant_pool ? 1 : 0;
|
||||
|
||||
// Define the list of registers actually saved at safepoints.
|
||||
// Note that the number of saved registers may be smaller than the reserved
|
||||
// space, i.e. kNumSafepointSavedRegisters <= kNumSafepointRegisters.
|
||||
const RegList kSafepointSavedRegisters =
|
||||
kJSCallerSaved | (kCalleeSaved & ~kConstantPointerRegMask);
|
||||
const int kNumSafepointSavedRegisters =
|
||||
kNumJSCallerSaved + kNumCalleeSaved - kNumConstantPoolPointerReg;
|
||||
const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
|
||||
const int kNumSafepointSavedRegisters = kNumJSCallerSaved + kNumCalleeSaved;
|
||||
|
||||
// ----------------------------------------------------
|
||||
|
||||
|
@ -770,16 +770,6 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
|
||||
void MacroAssembler::PushCommonFrame(Register marker_reg) {
|
||||
if (marker_reg.is_valid()) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
if (marker_reg.code() > pp.code()) {
|
||||
stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
|
||||
add(fp, sp, Operand(kPointerSize));
|
||||
Push(marker_reg);
|
||||
} else {
|
||||
stm(db_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
|
||||
add(fp, sp, Operand(2 * kPointerSize));
|
||||
}
|
||||
} else {
|
||||
if (marker_reg.code() > fp.code()) {
|
||||
stm(db_w, sp, fp.bit() | lr.bit());
|
||||
mov(fp, Operand(sp));
|
||||
@ -788,41 +778,28 @@ void MacroAssembler::PushCommonFrame(Register marker_reg) {
|
||||
stm(db_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
|
||||
add(fp, sp, Operand(kPointerSize));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stm(db_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
|
||||
fp.bit() | lr.bit());
|
||||
add(fp, sp, Operand(FLAG_enable_embedded_constant_pool ? kPointerSize : 0));
|
||||
stm(db_w, sp, fp.bit() | lr.bit());
|
||||
mov(fp, sp);
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::PopCommonFrame(Register marker_reg) {
|
||||
if (marker_reg.is_valid()) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
if (marker_reg.code() > pp.code()) {
|
||||
pop(marker_reg);
|
||||
ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
|
||||
} else {
|
||||
ldm(ia_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
|
||||
}
|
||||
} else {
|
||||
if (marker_reg.code() > fp.code()) {
|
||||
pop(marker_reg);
|
||||
ldm(ia_w, sp, fp.bit() | lr.bit());
|
||||
} else {
|
||||
ldm(ia_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ldm(ia_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
|
||||
fp.bit() | lr.bit());
|
||||
ldm(ia_w, sp, fp.bit() | lr.bit());
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::PushStandardFrame(Register function_reg) {
|
||||
DCHECK(!function_reg.is_valid() || function_reg.code() < cp.code());
|
||||
stm(db_w, sp, (function_reg.is_valid() ? function_reg.bit() : 0) | cp.bit() |
|
||||
(FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
|
||||
fp.bit() | lr.bit());
|
||||
int offset = -StandardFrameConstants::kContextOffset;
|
||||
offset += function_reg.is_valid() ? kPointerSize : 0;
|
||||
@ -833,11 +810,7 @@ void MacroAssembler::PushStandardFrame(Register function_reg) {
|
||||
// Push and pop all registers that can hold pointers.
|
||||
void MacroAssembler::PushSafepointRegisters() {
|
||||
// Safepoints expect a block of contiguous register values starting with r0.
|
||||
// except when FLAG_enable_embedded_constant_pool, which omits pp.
|
||||
DCHECK(kSafepointSavedRegisters ==
|
||||
(FLAG_enable_embedded_constant_pool
|
||||
? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
|
||||
: (1 << kNumSafepointSavedRegisters) - 1));
|
||||
DCHECK(kSafepointSavedRegisters == (1 << kNumSafepointSavedRegisters) - 1);
|
||||
// Safepoints expect a block of kNumSafepointRegisters values on the
|
||||
// stack, so adjust the stack for unsaved registers.
|
||||
const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
|
||||
@ -867,10 +840,6 @@ void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
|
||||
int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
|
||||
// The registers are pushed starting with the highest encoding,
|
||||
// which means that lowest encodings are closest to the stack pointer.
|
||||
if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
|
||||
// RegList omits pp.
|
||||
reg_code -= 1;
|
||||
}
|
||||
DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
|
||||
return reg_code;
|
||||
}
|
||||
@ -1399,29 +1368,9 @@ void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
|
||||
Register code_target_address) {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
ldr(pp, MemOperand(code_target_address,
|
||||
Code::kConstantPoolOffset - Code::kHeaderSize));
|
||||
add(pp, pp, code_target_address);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::LoadConstantPoolPointerRegister() {
|
||||
DCHECK(FLAG_enable_embedded_constant_pool);
|
||||
int entry_offset = pc_offset() + Instruction::kPCReadOffset;
|
||||
sub(ip, pc, Operand(entry_offset));
|
||||
LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
|
||||
}
|
||||
|
||||
void MacroAssembler::StubPrologue(StackFrame::Type type) {
|
||||
mov(ip, Operand(StackFrame::TypeToMarker(type)));
|
||||
PushCommonFrame(ip);
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
LoadConstantPoolPointerRegister();
|
||||
set_constant_pool_available(true);
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::Prologue(bool code_pre_aging) {
|
||||
@ -1440,10 +1389,6 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
|
||||
nop(ip.code());
|
||||
}
|
||||
}
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
LoadConstantPoolPointerRegister();
|
||||
set_constant_pool_available(true);
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::EmitLoadFeedbackVector(Register vector) {
|
||||
@ -1458,9 +1403,6 @@ void MacroAssembler::EnterFrame(StackFrame::Type type,
|
||||
// r0-r3: preserved
|
||||
mov(ip, Operand(StackFrame::TypeToMarker(type)));
|
||||
PushCommonFrame(ip);
|
||||
if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
|
||||
LoadConstantPoolPointerRegister();
|
||||
}
|
||||
if (type == StackFrame::INTERNAL) {
|
||||
mov(ip, Operand(CodeObject()));
|
||||
push(ip);
|
||||
@ -1474,18 +1416,10 @@ int MacroAssembler::LeaveFrame(StackFrame::Type type) {
|
||||
// r2: preserved
|
||||
|
||||
// Drop the execution stack down to the frame pointer and restore
|
||||
// the caller frame pointer, return address and constant pool pointer
|
||||
// (if FLAG_enable_embedded_constant_pool).
|
||||
int frame_ends;
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
|
||||
frame_ends = pc_offset();
|
||||
ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
|
||||
} else {
|
||||
// the caller frame pointer and return address.
|
||||
mov(sp, fp);
|
||||
frame_ends = pc_offset();
|
||||
int frame_ends = pc_offset();
|
||||
ldm(ia_w, sp, fp.bit() | lr.bit());
|
||||
}
|
||||
return frame_ends;
|
||||
}
|
||||
|
||||
@ -1519,9 +1453,6 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
|
||||
mov(ip, Operand::Zero());
|
||||
str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
|
||||
}
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
|
||||
}
|
||||
mov(ip, Operand(CodeObject()));
|
||||
str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
|
||||
|
||||
@ -1537,8 +1468,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
|
||||
// Note that d0 will be accessible at
|
||||
// fp - ExitFrameConstants::kFrameSize -
|
||||
// DwVfpRegister::kMaxNumRegisters * kDoubleSize,
|
||||
// since the sp slot, code slot and constant pool slot (if
|
||||
// FLAG_enable_embedded_constant_pool) were pushed after the fp.
|
||||
// since the sp slot and code slot were pushed after the fp.
|
||||
}
|
||||
|
||||
// Reserve place for the return address and stack space and align the frame
|
||||
@ -1603,9 +1533,6 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
|
||||
#endif
|
||||
|
||||
// Tear down the exit frame, pop the arguments, and return.
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
|
||||
}
|
||||
mov(sp, Operand(fp));
|
||||
ldm(ia_w, sp, fp.bit() | lr.bit());
|
||||
if (argument_count.is_valid()) {
|
||||
|
@ -41,7 +41,6 @@ inline MemOperand FieldMemOperand(Register object, int offset) {
|
||||
|
||||
// Give alias names to registers
|
||||
const Register cp = {Register::kCode_r7}; // JavaScript context pointer.
|
||||
const Register pp = {Register::kCode_r8}; // Constant pool pointer.
|
||||
const Register kRootRegister = {Register::kCode_r10}; // Roots array pointer.
|
||||
|
||||
// Flags used for AllocateHeapNumber
|
||||
@ -474,12 +473,10 @@ class MacroAssembler: public Assembler {
|
||||
}
|
||||
}
|
||||
|
||||
// Push a fixed frame, consisting of lr, fp, constant pool (if
|
||||
// FLAG_enable_embedded_constant_pool)
|
||||
// Push a fixed frame, consisting of lr, fp
|
||||
void PushCommonFrame(Register marker_reg = no_reg);
|
||||
|
||||
// Push a standard frame, consisting of lr, fp, constant pool (if
|
||||
// FLAG_enable_embedded_constant_pool), context and JS function
|
||||
// Push a standard frame, consisting of lr, fp, context and JS function
|
||||
void PushStandardFrame(Register function_reg);
|
||||
|
||||
void PopCommonFrame(Register marker_reg = no_reg);
|
||||
@ -1337,11 +1334,6 @@ class MacroAssembler: public Assembler {
|
||||
Register scratch_reg,
|
||||
Label* no_memento_found);
|
||||
|
||||
// Loads the constant pool pointer (pp) register.
|
||||
void LoadConstantPoolPointerRegisterFromCodeTargetAddress(
|
||||
Register code_target_address);
|
||||
void LoadConstantPoolPointerRegister();
|
||||
|
||||
private:
|
||||
void CallCFunctionHelper(Register function,
|
||||
int num_reg_arguments,
|
||||
|
@ -851,7 +851,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
// r2: receiver
|
||||
// r3: argc
|
||||
// r4: argv
|
||||
// r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
|
||||
// r5-r6, r8 and cp may be clobbered
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
|
||||
// Enter an internal frame.
|
||||
@ -901,9 +901,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ mov(r5, Operand(r4));
|
||||
__ mov(r6, Operand(r4));
|
||||
if (!FLAG_enable_embedded_constant_pool) {
|
||||
__ mov(r8, Operand(r4));
|
||||
}
|
||||
if (kR9Available == 1) {
|
||||
__ mov(r9, Operand(r4));
|
||||
}
|
||||
@ -1697,10 +1695,6 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
|
||||
ConstantPoolUnavailableScope constant_pool_unavailable(masm);
|
||||
__ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
|
||||
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
__ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
|
||||
}
|
||||
|
||||
// Load the OSR entrypoint offset from the deoptimization data.
|
||||
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
|
||||
__ ldr(r1, FieldMemOperand(
|
||||
@ -1958,7 +1952,6 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
|
||||
__ SmiTag(r0);
|
||||
__ mov(r4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
|
||||
(FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
|
||||
fp.bit() | lr.bit());
|
||||
__ add(fp, sp,
|
||||
Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
|
||||
|
@ -503,9 +503,6 @@ void CodeGenerator::AssembleDeconstructFrame() {
|
||||
|
||||
void CodeGenerator::AssemblePrepareTailCall() {
|
||||
if (frame_access_state()->has_frame()) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
|
||||
}
|
||||
__ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
|
||||
__ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
}
|
||||
@ -2508,9 +2505,7 @@ void CodeGenerator::FinishFrame(Frame* frame) {
|
||||
frame->AllocateSavedCalleeRegisterSlots((last - first + 1) *
|
||||
(kDoubleSize / kPointerSize));
|
||||
}
|
||||
const RegList saves = FLAG_enable_embedded_constant_pool
|
||||
? (descriptor->CalleeSavedRegisters() & ~pp.bit())
|
||||
: descriptor->CalleeSavedRegisters();
|
||||
const RegList saves = descriptor->CalleeSavedRegisters();
|
||||
if (saves != 0) {
|
||||
// Save callee-saved registers.
|
||||
frame->AllocateSavedCalleeRegisterSlots(
|
||||
@ -2522,14 +2517,8 @@ void CodeGenerator::AssembleConstructFrame() {
|
||||
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
|
||||
if (frame_access_state()->has_frame()) {
|
||||
if (descriptor->IsCFunctionCall()) {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
__ Push(lr, fp, pp);
|
||||
// Adjust FP to point to saved FP.
|
||||
__ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
|
||||
} else {
|
||||
__ Push(lr, fp);
|
||||
__ mov(fp, sp);
|
||||
}
|
||||
} else if (descriptor->IsJSFunctionCall()) {
|
||||
__ Prologue(this->info()->GeneratePreagedPrologue());
|
||||
if (descriptor->PushArgumentCount()) {
|
||||
@ -2615,9 +2604,7 @@ void CodeGenerator::AssembleConstructFrame() {
|
||||
__ vstm(db_w, sp, DwVfpRegister::from_code(first),
|
||||
DwVfpRegister::from_code(last));
|
||||
}
|
||||
const RegList saves = FLAG_enable_embedded_constant_pool
|
||||
? (descriptor->CalleeSavedRegisters() & ~pp.bit())
|
||||
: descriptor->CalleeSavedRegisters();
|
||||
const RegList saves = descriptor->CalleeSavedRegisters();
|
||||
if (saves != 0) {
|
||||
// Save callee-saved registers.
|
||||
__ stm(db_w, sp, saves);
|
||||
@ -2629,9 +2616,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
|
||||
int pop_count = static_cast<int>(descriptor->StackParameterCount());
|
||||
|
||||
// Restore registers.
|
||||
const RegList saves = FLAG_enable_embedded_constant_pool
|
||||
? (descriptor->CalleeSavedRegisters() & ~pp.bit())
|
||||
: descriptor->CalleeSavedRegisters();
|
||||
const RegList saves = descriptor->CalleeSavedRegisters();
|
||||
if (saves != 0) {
|
||||
__ ldm(ia_w, sp, saves);
|
||||
}
|
||||
|
@ -1319,7 +1319,7 @@ DEFINE_INT(dump_allocations_digest_at_alloc, -1,
|
||||
|
||||
// assembler.h
|
||||
DEFINE_BOOL(enable_embedded_constant_pool, V8_EMBEDDED_CONSTANT_POOL,
|
||||
"enable use of embedded constant pools (ARM/PPC only)")
|
||||
"enable use of embedded constant pools (PPC only)")
|
||||
|
||||
DEFINE_BOOL(unbox_double_fields, V8_DOUBLE_FIELDS_UNBOXING,
|
||||
"enable in-object double fields unboxing (64-bit only)")
|
||||
|
@ -2760,43 +2760,7 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
|
||||
|
||||
static Address GetInterruptImmediateLoadAddress(Address pc) {
|
||||
Address load_address = pc - 2 * Assembler::kInstrSize;
|
||||
if (!FLAG_enable_embedded_constant_pool) {
|
||||
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
|
||||
} else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
|
||||
// This is an extended constant pool lookup.
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
load_address -= 2 * Assembler::kInstrSize;
|
||||
DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
|
||||
DCHECK(Assembler::IsMovT(
|
||||
Memory::int32_at(load_address + Assembler::kInstrSize)));
|
||||
} else {
|
||||
load_address -= 4 * Assembler::kInstrSize;
|
||||
DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
|
||||
DCHECK(Assembler::IsOrrImmed(
|
||||
Memory::int32_at(load_address + Assembler::kInstrSize)));
|
||||
DCHECK(Assembler::IsOrrImmed(
|
||||
Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
|
||||
DCHECK(Assembler::IsOrrImmed(
|
||||
Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
|
||||
}
|
||||
} else if (CpuFeatures::IsSupported(ARMv7) &&
|
||||
Assembler::IsMovT(Memory::int32_at(load_address))) {
|
||||
// This is a movw / movt immediate load.
|
||||
load_address -= Assembler::kInstrSize;
|
||||
DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
|
||||
} else if (!CpuFeatures::IsSupported(ARMv7) &&
|
||||
Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
|
||||
// This is a mov / orr immediate load.
|
||||
load_address -= 3 * Assembler::kInstrSize;
|
||||
DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
|
||||
DCHECK(Assembler::IsOrrImmed(
|
||||
Memory::int32_at(load_address + Assembler::kInstrSize)));
|
||||
DCHECK(Assembler::IsOrrImmed(
|
||||
Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
|
||||
} else {
|
||||
// This is a small constant pool lookup.
|
||||
DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
|
||||
}
|
||||
return load_address;
|
||||
}
|
||||
|
||||
|
@ -77,9 +77,7 @@ class ArchDefaultRegisterConfiguration : public RegisterConfiguration {
|
||||
kMaxAllocatableGeneralRegisterCount,
|
||||
kMaxAllocatableDoubleRegisterCount,
|
||||
#elif V8_TARGET_ARCH_ARM
|
||||
FLAG_enable_embedded_constant_pool
|
||||
? (kMaxAllocatableGeneralRegisterCount - 1)
|
||||
: kMaxAllocatableGeneralRegisterCount,
|
||||
kMaxAllocatableGeneralRegisterCount,
|
||||
CpuFeatures::IsSupported(VFP32DREGS)
|
||||
? kMaxAllocatableDoubleRegisterCount
|
||||
: (ALLOCATABLE_NO_VFP32_DOUBLE_REGISTERS(REGISTER_COUNT) 0),
|
||||
|
Loading…
Reference in New Issue
Block a user