Add {lda,stl}x?r{,b,h} instructions to ARM64 assembler/disassembler

They are not currently implemented by the ARM64 simulator.

R=jarin@chromium.org, bmeurer@chromium.org

Review-Url: https://codereview.chromium.org/1990073002
Cr-Commit-Position: refs/heads/master@{#36385}
This commit is contained in:
binji 2016-05-19 13:08:15 -07:00 committed by Commit bot
parent 31ac67ee61
commit d208cdd7f1
11 changed files with 396 additions and 128 deletions

View File

@ -1716,6 +1716,83 @@ void Assembler::ldr(const CPURegister& rt, const Immediate& imm) {
ldr_pcrel(rt, 0); ldr_pcrel(rt, 0);
} }
void Assembler::ldar(const Register& rt, const Register& rn) {
DCHECK(rn.Is64Bits());
LoadStoreAcquireReleaseOp op = rt.Is32Bits() ? LDAR_w : LDAR_x;
Emit(op | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::ldaxr(const Register& rt, const Register& rn) {
DCHECK(rn.Is64Bits());
LoadStoreAcquireReleaseOp op = rt.Is32Bits() ? LDAXR_w : LDAXR_x;
Emit(op | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::stlr(const Register& rt, const Register& rn) {
DCHECK(rn.Is64Bits());
LoadStoreAcquireReleaseOp op = rt.Is32Bits() ? STLR_w : STLR_x;
Emit(op | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::stlxr(const Register& rs, const Register& rt,
const Register& rn) {
DCHECK(rs.Is32Bits());
DCHECK(rn.Is64Bits());
LoadStoreAcquireReleaseOp op = rt.Is32Bits() ? STLXR_w : STLXR_x;
Emit(op | Rs(rs) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::ldarb(const Register& rt, const Register& rn) {
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(LDAR_b | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::ldaxrb(const Register& rt, const Register& rn) {
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(LDAXR_b | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::stlrb(const Register& rt, const Register& rn) {
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(STLR_b | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::stlxrb(const Register& rs, const Register& rt,
const Register& rn) {
DCHECK(rs.Is32Bits());
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(STLXR_b | Rs(rs) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::ldarh(const Register& rt, const Register& rn) {
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(LDAR_h | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::ldaxrh(const Register& rt, const Register& rn) {
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(LDAXR_h | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::stlrh(const Register& rt, const Register& rn) {
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(STLR_h | Rs(x31) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::stlxrh(const Register& rs, const Register& rt,
const Register& rn) {
DCHECK(rs.Is32Bits());
DCHECK(rt.Is32Bits());
DCHECK(rn.Is64Bits());
Emit(STLXR_h | Rs(rs) | Rt2(x31) | Rn(rn) | Rt(rt));
}
void Assembler::mov(const Register& rd, const Register& rm) { void Assembler::mov(const Register& rd, const Register& rm) {
// Moves involving the stack pointer are encoded as add immediate with // Moves involving the stack pointer are encoded as add immediate with

View File

@ -1401,6 +1401,42 @@ class Assembler : public AssemblerBase {
// Load literal to register. // Load literal to register.
void ldr(const CPURegister& rt, const Immediate& imm); void ldr(const CPURegister& rt, const Immediate& imm);
// Load-acquire word.
void ldar(const Register& rt, const Register& rn);
// Load-acquire exclusive word.
void ldaxr(const Register& rt, const Register& rn);
// Store-release word.
void stlr(const Register& rt, const Register& rn);
// Store-release exclusive word.
void stlxr(const Register& rs, const Register& rt, const Register& rn);
// Load-acquire byte.
void ldarb(const Register& rt, const Register& rn);
// Load-acquire exclusive byte.
void ldaxrb(const Register& rt, const Register& rn);
// Store-release byte.
void stlrb(const Register& rt, const Register& rn);
// Store-release exclusive byte.
void stlxrb(const Register& rs, const Register& rt, const Register& rn);
// Load-acquire half-word.
void ldarh(const Register& rt, const Register& rn);
// Load-acquire exclusive half-word.
void ldaxrh(const Register& rt, const Register& rn);
// Store-release half-word.
void stlrh(const Register& rt, const Register& rn);
// Store-release exclusive half-word.
void stlxrh(const Register& rs, const Register& rt, const Register& rn);
// Move instructions. The default shift of -1 indicates that the move // Move instructions. The default shift of -1 indicates that the move
// instruction will calculate an appropriate 16-bit immediate and left shift // instruction will calculate an appropriate 16-bit immediate and left shift
// that is equal to the 64-bit immediate argument. If an explicit left shift // that is equal to the 64-bit immediate argument. If an explicit left shift
@ -1695,6 +1731,11 @@ class Assembler : public AssemblerBase {
return rt2.code() << Rt2_offset; return rt2.code() << Rt2_offset;
} }
static Instr Rs(CPURegister rs) {
DCHECK(rs.code() != kSPRegInternalCode);
return rs.code() << Rs_offset;
}
// These encoding functions allow the stack pointer to be encoded, and // These encoding functions allow the stack pointer to be encoded, and
// disallow the zero register. // disallow the zero register.
static Instr RdSP(Register rd) { static Instr RdSP(Register rd) {

View File

@ -118,88 +118,88 @@ const unsigned kFloatMantissaBits = 23;
const unsigned kFloatExponentBits = 8; const unsigned kFloatExponentBits = 8;
#define INSTRUCTION_FIELDS_LIST(V_) \ #define INSTRUCTION_FIELDS_LIST(V_) \
/* Register fields */ \ /* Register fields */ \
V_(Rd, 4, 0, Bits) /* Destination register. */ \ V_(Rd, 4, 0, Bits) /* Destination register. */ \
V_(Rn, 9, 5, Bits) /* First source register. */ \ V_(Rn, 9, 5, Bits) /* First source register. */ \
V_(Rm, 20, 16, Bits) /* Second source register. */ \ V_(Rm, 20, 16, Bits) /* Second source register. */ \
V_(Ra, 14, 10, Bits) /* Third source register. */ \ V_(Ra, 14, 10, Bits) /* Third source register. */ \
V_(Rt, 4, 0, Bits) /* Load dest / store source. */ \ V_(Rt, 4, 0, Bits) /* Load dest / store source. */ \
V_(Rt2, 14, 10, Bits) /* Load second dest / */ \ V_(Rt2, 14, 10, Bits) /* Load second dest / */ \
/* store second source. */ \ /* store second source. */ \
V_(PrefetchMode, 4, 0, Bits) \ V_(Rs, 20, 16, Bits) /* Store-exclusive status */ \
V_(PrefetchMode, 4, 0, Bits) \
\ \
/* Common bits */ \ /* Common bits */ \
V_(SixtyFourBits, 31, 31, Bits) \ V_(SixtyFourBits, 31, 31, Bits) \
V_(FlagsUpdate, 29, 29, Bits) \ V_(FlagsUpdate, 29, 29, Bits) \
\ \
/* PC relative addressing */ \ /* PC relative addressing */ \
V_(ImmPCRelHi, 23, 5, SignedBits) \ V_(ImmPCRelHi, 23, 5, SignedBits) \
V_(ImmPCRelLo, 30, 29, Bits) \ V_(ImmPCRelLo, 30, 29, Bits) \
\ \
/* Add/subtract/logical shift register */ \ /* Add/subtract/logical shift register */ \
V_(ShiftDP, 23, 22, Bits) \ V_(ShiftDP, 23, 22, Bits) \
V_(ImmDPShift, 15, 10, Bits) \ V_(ImmDPShift, 15, 10, Bits) \
\ \
/* Add/subtract immediate */ \ /* Add/subtract immediate */ \
V_(ImmAddSub, 21, 10, Bits) \ V_(ImmAddSub, 21, 10, Bits) \
V_(ShiftAddSub, 23, 22, Bits) \ V_(ShiftAddSub, 23, 22, Bits) \
\ \
/* Add/substract extend */ \ /* Add/substract extend */ \
V_(ImmExtendShift, 12, 10, Bits) \ V_(ImmExtendShift, 12, 10, Bits) \
V_(ExtendMode, 15, 13, Bits) \ V_(ExtendMode, 15, 13, Bits) \
\ \
/* Move wide */ \ /* Move wide */ \
V_(ImmMoveWide, 20, 5, Bits) \ V_(ImmMoveWide, 20, 5, Bits) \
V_(ShiftMoveWide, 22, 21, Bits) \ V_(ShiftMoveWide, 22, 21, Bits) \
\ \
/* Logical immediate, bitfield and extract */ \ /* Logical immediate, bitfield and extract */ \
V_(BitN, 22, 22, Bits) \ V_(BitN, 22, 22, Bits) \
V_(ImmRotate, 21, 16, Bits) \ V_(ImmRotate, 21, 16, Bits) \
V_(ImmSetBits, 15, 10, Bits) \ V_(ImmSetBits, 15, 10, Bits) \
V_(ImmR, 21, 16, Bits) \ V_(ImmR, 21, 16, Bits) \
V_(ImmS, 15, 10, Bits) \ V_(ImmS, 15, 10, Bits) \
\ \
/* Test and branch immediate */ \ /* Test and branch immediate */ \
V_(ImmTestBranch, 18, 5, SignedBits) \ V_(ImmTestBranch, 18, 5, SignedBits) \
V_(ImmTestBranchBit40, 23, 19, Bits) \ V_(ImmTestBranchBit40, 23, 19, Bits) \
V_(ImmTestBranchBit5, 31, 31, Bits) \ V_(ImmTestBranchBit5, 31, 31, Bits) \
\ \
/* Conditionals */ \ /* Conditionals */ \
V_(Condition, 15, 12, Bits) \ V_(Condition, 15, 12, Bits) \
V_(ConditionBranch, 3, 0, Bits) \ V_(ConditionBranch, 3, 0, Bits) \
V_(Nzcv, 3, 0, Bits) \ V_(Nzcv, 3, 0, Bits) \
V_(ImmCondCmp, 20, 16, Bits) \ V_(ImmCondCmp, 20, 16, Bits) \
V_(ImmCondBranch, 23, 5, SignedBits) \ V_(ImmCondBranch, 23, 5, SignedBits) \
\ \
/* Floating point */ \ /* Floating point */ \
V_(FPType, 23, 22, Bits) \ V_(FPType, 23, 22, Bits) \
V_(ImmFP, 20, 13, Bits) \ V_(ImmFP, 20, 13, Bits) \
V_(FPScale, 15, 10, Bits) \ V_(FPScale, 15, 10, Bits) \
\ \
/* Load Store */ \ /* Load Store */ \
V_(ImmLS, 20, 12, SignedBits) \ V_(ImmLS, 20, 12, SignedBits) \
V_(ImmLSUnsigned, 21, 10, Bits) \ V_(ImmLSUnsigned, 21, 10, Bits) \
V_(ImmLSPair, 21, 15, SignedBits) \ V_(ImmLSPair, 21, 15, SignedBits) \
V_(SizeLS, 31, 30, Bits) \ V_(SizeLS, 31, 30, Bits) \
V_(ImmShiftLS, 12, 12, Bits) \ V_(ImmShiftLS, 12, 12, Bits) \
\ \
/* Other immediates */ \ /* Other immediates */ \
V_(ImmUncondBranch, 25, 0, SignedBits) \ V_(ImmUncondBranch, 25, 0, SignedBits) \
V_(ImmCmpBranch, 23, 5, SignedBits) \ V_(ImmCmpBranch, 23, 5, SignedBits) \
V_(ImmLLiteral, 23, 5, SignedBits) \ V_(ImmLLiteral, 23, 5, SignedBits) \
V_(ImmException, 20, 5, Bits) \ V_(ImmException, 20, 5, Bits) \
V_(ImmHint, 11, 5, Bits) \ V_(ImmHint, 11, 5, Bits) \
V_(ImmBarrierDomain, 11, 10, Bits) \ V_(ImmBarrierDomain, 11, 10, Bits) \
V_(ImmBarrierType, 9, 8, Bits) \ V_(ImmBarrierType, 9, 8, Bits) \
\ \
/* System (MRS, MSR) */ \ /* System (MRS, MSR) */ \
V_(ImmSystemRegister, 19, 5, Bits) \ V_(ImmSystemRegister, 19, 5, Bits) \
V_(SysO0, 19, 19, Bits) \ V_(SysO0, 19, 19, Bits) \
V_(SysOp1, 18, 16, Bits) \ V_(SysOp1, 18, 16, Bits) \
V_(SysOp2, 7, 5, Bits) \ V_(SysOp2, 7, 5, Bits) \
V_(CRn, 15, 12, Bits) \ V_(CRn, 15, 12, Bits) \
V_(CRm, 11, 8, Bits) \ V_(CRm, 11, 8, Bits)
#define SYSTEM_REGISTER_FIELDS_LIST(V_, M_) \ #define SYSTEM_REGISTER_FIELDS_LIST(V_, M_) \
/* NZCV */ \ /* NZCV */ \
@ -857,6 +857,29 @@ enum LoadStoreRegisterOffset {
#undef LOAD_STORE_REGISTER_OFFSET #undef LOAD_STORE_REGISTER_OFFSET
}; };
// Load/store acquire/release
enum LoadStoreAcquireReleaseOp {
LoadStoreAcquireReleaseFixed = 0x08000000,
LoadStoreAcquireReleaseFMask = 0x3F000000,
LoadStoreAcquireReleaseMask = 0xCFC08000,
STLXR_b = LoadStoreAcquireReleaseFixed | 0x00008000,
LDAXR_b = LoadStoreAcquireReleaseFixed | 0x00408000,
STLR_b = LoadStoreAcquireReleaseFixed | 0x00808000,
LDAR_b = LoadStoreAcquireReleaseFixed | 0x00C08000,
STLXR_h = LoadStoreAcquireReleaseFixed | 0x40008000,
LDAXR_h = LoadStoreAcquireReleaseFixed | 0x40408000,
STLR_h = LoadStoreAcquireReleaseFixed | 0x40808000,
LDAR_h = LoadStoreAcquireReleaseFixed | 0x40C08000,
STLXR_w = LoadStoreAcquireReleaseFixed | 0x80008000,
LDAXR_w = LoadStoreAcquireReleaseFixed | 0x80408000,
STLR_w = LoadStoreAcquireReleaseFixed | 0x80808000,
LDAR_w = LoadStoreAcquireReleaseFixed | 0x80C08000,
STLXR_x = LoadStoreAcquireReleaseFixed | 0xC0008000,
LDAXR_x = LoadStoreAcquireReleaseFixed | 0xC0408000,
STLR_x = LoadStoreAcquireReleaseFixed | 0xC0808000,
LDAR_x = LoadStoreAcquireReleaseFixed | 0xC0C08000,
};
// Conditional compare. // Conditional compare.
enum ConditionalCompareOp { enum ConditionalCompareOp {
ConditionalCompareMask = 0x60000000, ConditionalCompareMask = 0x60000000,

View File

@ -217,8 +217,15 @@ void Decoder<V>::DecodeLoadStore(Instruction* instr) {
if (instr->Bit(28) == 0) { if (instr->Bit(28) == 0) {
if (instr->Bit(29) == 0) { if (instr->Bit(29) == 0) {
if (instr->Bit(26) == 0) { if (instr->Bit(26) == 0) {
// TODO(all): VisitLoadStoreExclusive. if (instr->Mask(0xA08000) == 0x800000 ||
instr->Mask(0xA00000) == 0xA00000) {
V::VisitUnallocated(instr);
} else if (instr->Mask(0x808000) == 0) {
// Load/Store exclusive without acquire/release are unimplemented.
V::VisitUnimplemented(instr); V::VisitUnimplemented(instr);
} else {
V::VisitLoadStoreAcquireRelease(instr);
}
} else { } else {
DecodeAdvSIMDLoadStore(instr); DecodeAdvSIMDLoadStore(instr);
} }

View File

@ -39,6 +39,7 @@ namespace internal {
V(LoadStorePreIndex) \ V(LoadStorePreIndex) \
V(LoadStoreRegisterOffset) \ V(LoadStoreRegisterOffset) \
V(LoadStoreUnsignedOffset) \ V(LoadStoreUnsignedOffset) \
V(LoadStoreAcquireRelease) \
V(LogicalShifted) \ V(LogicalShifted) \
V(AddSubShifted) \ V(AddSubShifted) \
V(AddSubExtended) \ V(AddSubExtended) \

View File

@ -914,6 +914,34 @@ void DisassemblingDecoder::VisitLoadStorePairOffset(Instruction* instr) {
Format(instr, mnemonic, form); Format(instr, mnemonic, form);
} }
void DisassemblingDecoder::VisitLoadStoreAcquireRelease(Instruction *instr) {
const char *mnemonic = "unimplemented";
const char *form = "'Wt, ['Xn]";
const char *form_x = "'Xt, ['Xn]";
const char *form_stlx = "'Ws, 'Wt, ['Xn]";
const char *form_stlx_x = "'Ws, 'Xt, ['Xn]";
switch (instr->Mask(LoadStoreAcquireReleaseMask)) {
case LDAXR_b: mnemonic = "ldaxrb"; break;
case STLR_b: mnemonic = "stlrb"; break;
case LDAR_b: mnemonic = "ldarb"; break;
case LDAXR_h: mnemonic = "ldaxrh"; break;
case STLR_h: mnemonic = "stlrh"; break;
case LDAR_h: mnemonic = "ldarh"; break;
case LDAXR_w: mnemonic = "ldaxr"; break;
case STLR_w: mnemonic = "stlr"; break;
case LDAR_w: mnemonic = "ldar"; break;
case LDAXR_x: mnemonic = "ldaxr"; form = form_x; break;
case STLR_x: mnemonic = "stlr"; form = form_x; break;
case LDAR_x: mnemonic = "ldar"; form = form_x; break;
case STLXR_h: mnemonic = "stlxrh"; form = form_stlx; break;
case STLXR_b: mnemonic = "stlxrb"; form = form_stlx; break;
case STLXR_w: mnemonic = "stlxr"; form = form_stlx; break;
case STLXR_x: mnemonic = "stlxr"; form = form_stlx_x; break;
default: form = "(LoadStoreAcquireReleaseMask)";
}
Format(instr, mnemonic, form);
}
void DisassemblingDecoder::VisitFPCompare(Instruction* instr) { void DisassemblingDecoder::VisitFPCompare(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
@ -1295,6 +1323,9 @@ int DisassemblingDecoder::SubstituteRegisterField(Instruction* instr,
} }
break; break;
} }
case 's':
reg_num = instr->Rs();
break;
default: UNREACHABLE(); default: UNREACHABLE();
} }

View File

@ -429,6 +429,31 @@ void Instrument::VisitLoadStoreUnsignedOffset(Instruction* instr) {
InstrumentLoadStore(instr); InstrumentLoadStore(instr);
} }
void Instrument::VisitLoadStoreAcquireRelease(Instruction* instr) {
Update();
static Counter* load_counter = GetCounter("Load Acquire");
static Counter* store_counter = GetCounter("Store Release");
switch (instr->Mask(LoadStoreAcquireReleaseMask)) {
case LDAR_b: // Fall-through.
case LDAR_h: // Fall-through.
case LDAR_w: // Fall-through.
case LDAR_x: // Fall-through.
case LDAXR_b: // Fall-through.
case LDAXR_h: // Fall-through.
case LDAXR_w: // Fall-through.
case LDAXR_x: load_counter->Increment(); break;
case STLR_b: // Fall-through.
case STLR_h: // Fall-through.
case STLR_w: // Fall-through.
case STLR_x: // Fall-through.
case STLXR_b: // Fall-through.
case STLXR_h: // Fall-through.
case STLXR_w: // Fall-through.
case STLXR_x: store_counter->Increment(); break;
default: UNREACHABLE();
}
}
void Instrument::VisitLogicalShifted(Instruction* instr) { void Instrument::VisitLogicalShifted(Instruction* instr) {
Update(); Update();

View File

@ -309,6 +309,22 @@ LS_MACRO_LIST(DEFINE_FUNCTION)
LSPAIR_MACRO_LIST(DEFINE_FUNCTION) LSPAIR_MACRO_LIST(DEFINE_FUNCTION)
#undef DEFINE_FUNCTION #undef DEFINE_FUNCTION
#define DECLARE_FUNCTION(FN, OP) \
void MacroAssembler::FN(const Register& rt, const Register& rn) { \
DCHECK(allow_macro_instructions_); \
OP(rt, rn); \
}
LDA_STL_MACRO_LIST(DECLARE_FUNCTION)
#undef DECLARE_FUNCTION
#define DECLARE_FUNCTION(FN, OP) \
void MacroAssembler::FN(const Register& rs, const Register& rt, \
const Register& rn) { \
DCHECK(allow_macro_instructions_); \
OP(rs, rt, rn); \
}
STLX_MACRO_LIST(DECLARE_FUNCTION)
#undef DECLARE_FUNCTION
void MacroAssembler::Asr(const Register& rd, void MacroAssembler::Asr(const Register& rd,
const Register& rn, const Register& rn,

View File

@ -68,6 +68,21 @@ namespace internal {
V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2)) \ V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2)) \
V(Ldpsw, CPURegister&, rt, rt2, LDPSW_x) V(Ldpsw, CPURegister&, rt, rt2, LDPSW_x)
#define LDA_STL_MACRO_LIST(V) \
V(Ldarb, ldarb) \
V(Ldarh, ldarh) \
V(Ldar, ldar) \
V(Ldaxrb, ldaxrb) \
V(Ldaxrh, ldaxrh) \
V(Ldaxr, ldaxr) \
V(Stlrb, stlrb) \
V(Stlrh, stlrh) \
V(Stlr, stlr)
#define STLX_MACRO_LIST(V) \
V(Stlxrb, stlxrb) \
V(Stlxrh, stlxrh) \
V(Stlxr, stlxr)
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Static helper functions // Static helper functions
@ -295,6 +310,17 @@ class MacroAssembler : public Assembler {
void LoadStorePairMacro(const CPURegister& rt, const CPURegister& rt2, void LoadStorePairMacro(const CPURegister& rt, const CPURegister& rt2,
const MemOperand& addr, LoadStorePairOp op); const MemOperand& addr, LoadStorePairOp op);
// Load-acquire/store-release macros.
#define DECLARE_FUNCTION(FN, OP) \
inline void FN(const Register& rt, const Register& rn);
LDA_STL_MACRO_LIST(DECLARE_FUNCTION)
#undef DECLARE_FUNCTION
#define DECLARE_FUNCTION(FN, OP) \
inline void FN(const Register& rs, const Register& rt, const Register& rn);
STLX_MACRO_LIST(DECLARE_FUNCTION)
#undef DECLARE_FUNCTION
// V8-specific load/store helpers. // V8-specific load/store helpers.
void Load(const Register& rt, const MemOperand& addr, Representation r); void Load(const Register& rt, const MemOperand& addr, Representation r);
void Store(const Register& rt, const MemOperand& addr, Representation r); void Store(const Register& rt, const MemOperand& addr, Representation r);

View File

@ -1900,6 +1900,9 @@ void Simulator::LoadStoreWriteBack(unsigned addr_reg,
} }
} }
void Simulator::VisitLoadStoreAcquireRelease(Instruction* instr) {
// TODO(binji)
}
void Simulator::CheckMemoryAccess(uintptr_t address, uintptr_t stack) { void Simulator::CheckMemoryAccess(uintptr_t address, uintptr_t stack) {
if ((address >= stack_limit_) && (address < stack)) { if ((address >= stack_limit_) && (address < stack)) {

View File

@ -1259,6 +1259,24 @@ TEST_(load_store_pair) {
CLEANUP(); CLEANUP();
} }
TEST_(load_store_acquire_release) {
SET_UP_MASM();
COMPARE(ldar(w0, x1), "ldar w0, [x1]");
COMPARE(ldarb(w2, x3), "ldarb w2, [x3]");
COMPARE(ldarh(w4, x5), "ldarh w4, [x5]");
COMPARE(ldaxr(w6, x7), "ldaxr w6, [x7]");
COMPARE(ldaxrb(w8, x9), "ldaxrb w8, [x9]");
COMPARE(ldaxrh(w10, x11), "ldaxrh w10, [x11]");
COMPARE(stlr(w12, x13), "stlr w12, [x13]");
COMPARE(stlrb(w14, x15), "stlrb w14, [x15]");
COMPARE(stlrh(w16, x17), "stlrh w16, [x17]");
COMPARE(stlxr(w18, w19, x20), "stlxr w18, w19, [x20]");
COMPARE(stlxrb(w21, w22, x23), "stlxrb w21, w22, [x23]");
COMPARE(stlxrh(w24, w25, x26), "stlxrh w24, w25, [x26]");
CLEANUP();
}
#if 0 // TODO(all): enable. #if 0 // TODO(all): enable.
TEST_(load_literal) { TEST_(load_literal) {