aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlexander Gilday <alexander.gilday@arm.com>2018-04-13 16:15:34 +0100
committerAlexander Gilday <alexander.gilday@arm.com>2018-04-19 14:16:32 +0100
commit2c3cebbd93cc0db27e1e55740bd89a8045148d47 (patch)
tree433c7bb4b51d24397ed00688962c2cf42bc01809
parent560332df277a0e143763e5f5038fbb539c57453b (diff)
Add support for Load/Store LORegion instructions.
Includes: - STLLR, STLLRB, STLLRH - LDLAR, LDLARB, LDLARH Change-Id: I6691072c4aa218eb097414a8b559623e47707078
-rw-r--r--src/aarch64/assembler-aarch64.cc38
-rw-r--r--src/aarch64/assembler-aarch64.h18
-rw-r--r--src/aarch64/constants-aarch64.h10
-rw-r--r--src/aarch64/disasm-aarch64.cc8
-rw-r--r--src/aarch64/macro-assembler-aarch64.h30
-rw-r--r--src/aarch64/simulator-aarch64.cc10
-rw-r--r--test/aarch64/test-assembler-aarch64.cc57
-rw-r--r--test/aarch64/test-disasm-aarch64.cc174
8 files changed, 269 insertions, 76 deletions
diff --git a/src/aarch64/assembler-aarch64.cc b/src/aarch64/assembler-aarch64.cc
index 8525aa72..77a01d32 100644
--- a/src/aarch64/assembler-aarch64.cc
+++ b/src/aarch64/assembler-aarch64.cc
@@ -1388,6 +1388,44 @@ void Assembler::ldar(const Register& rt, const MemOperand& src) {
}
+void Assembler::stllrb(const Register& rt, const MemOperand& dst) {
+ VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
+ Emit(STLLRB | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
+}
+
+
+void Assembler::stllrh(const Register& rt, const MemOperand& dst) {
+ VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
+ Emit(STLLRH | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
+}
+
+
+void Assembler::stllr(const Register& rt, const MemOperand& dst) {
+ VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
+ LoadStoreExclusive op = rt.Is64Bits() ? STLLR_x : STLLR_w;
+ Emit(op | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
+}
+
+
+void Assembler::ldlarb(const Register& rt, const MemOperand& src) {
+ VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
+ Emit(LDLARB | Rs_mask | Rt(rt) | Rt2_mask | RnSP(src.GetBaseRegister()));
+}
+
+
+void Assembler::ldlarh(const Register& rt, const MemOperand& src) {
+ VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
+ Emit(LDLARH | Rs_mask | Rt(rt) | Rt2_mask | RnSP(src.GetBaseRegister()));
+}
+
+
+void Assembler::ldlar(const Register& rt, const MemOperand& src) {
+ VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
+ LoadStoreExclusive op = rt.Is64Bits() ? LDLAR_x : LDLAR_w;
+ Emit(op | Rs_mask | Rt(rt) | Rt2_mask | RnSP(src.GetBaseRegister()));
+}
+
+
// clang-format off
#define COMPARE_AND_SWAP_W_X_LIST(V) \
V(cas, CAS) \
diff --git a/src/aarch64/assembler-aarch64.h b/src/aarch64/assembler-aarch64.h
index 09613bb6..1740ff1c 100644
--- a/src/aarch64/assembler-aarch64.h
+++ b/src/aarch64/assembler-aarch64.h
@@ -1173,6 +1173,24 @@ class Assembler : public vixl::internal::AssemblerBase {
// Load-acquire register.
void ldar(const Register& rt, const MemOperand& src);
+ // Store LORelease byte [Armv8.1].
+ void stllrb(const Register& rt, const MemOperand& dst);
+
+ // Store LORelease half-word [Armv8.1].
+ void stllrh(const Register& rt, const MemOperand& dst);
+
+ // Store LORelease register [Armv8.1].
+ void stllr(const Register& rt, const MemOperand& dst);
+
+ // Load LORelease byte [Armv8.1].
+ void ldlarb(const Register& rt, const MemOperand& src);
+
+ // Load LORelease half-word [Armv8.1].
+ void ldlarh(const Register& rt, const MemOperand& src);
+
+ // Load LORelease register [Armv8.1].
+ void ldlar(const Register& rt, const MemOperand& src);
+
// Compare and Swap word or doubleword in memory [Armv8.1].
void cas(const Register& rs, const Register& rt, const MemOperand& src);
diff --git a/src/aarch64/constants-aarch64.h b/src/aarch64/constants-aarch64.h
index 24e1d88a..995ac2dc 100644
--- a/src/aarch64/constants-aarch64.h
+++ b/src/aarch64/constants-aarch64.h
@@ -979,6 +979,16 @@ enum LoadStoreExclusive {
LDAR_w = LoadStoreExclusiveFixed | 0x80C08000,
LDAR_x = LoadStoreExclusiveFixed | 0xC0C08000,
+ // v8.1 Load/store LORegion ops
+ STLLRB = LoadStoreExclusiveFixed | 0x00800000,
+ LDLARB = LoadStoreExclusiveFixed | 0x00C00000,
+ STLLRH = LoadStoreExclusiveFixed | 0x40800000,
+ LDLARH = LoadStoreExclusiveFixed | 0x40C00000,
+ STLLR_w = LoadStoreExclusiveFixed | 0x80800000,
+ LDLAR_w = LoadStoreExclusiveFixed | 0x80C00000,
+ STLLR_x = LoadStoreExclusiveFixed | 0xC0800000,
+ LDLAR_x = LoadStoreExclusiveFixed | 0xC0C00000,
+
// v8.1 Load/store exclusive ops
LSEBit_l = 0x00400000,
LSEBit_o0 = 0x00008000,
diff --git a/src/aarch64/disasm-aarch64.cc b/src/aarch64/disasm-aarch64.cc
index 98c7fb45..71f00753 100644
--- a/src/aarch64/disasm-aarch64.cc
+++ b/src/aarch64/disasm-aarch64.cc
@@ -1274,6 +1274,14 @@ void Disassembler::VisitLoadStorePairNonTemporal(const Instruction *instr) {
V(LDARH_w, "ldarh", "'Wt") \
V(LDAR_w, "ldar", "'Wt") \
V(LDAR_x, "ldar", "'Xt") \
+ V(STLLRB, "stllrb", "'Wt") \
+ V(STLLRH, "stllrh", "'Wt") \
+ V(STLLR_w, "stllr", "'Wt") \
+ V(STLLR_x, "stllr", "'Xt") \
+ V(LDLARB, "ldlarb", "'Wt") \
+ V(LDLARH, "ldlarh", "'Wt") \
+ V(LDLAR_w, "ldlar", "'Wt") \
+ V(LDLAR_x, "ldlar", "'Xt") \
V(CAS_w, "cas", "'Ws, 'Wt") \
V(CAS_x, "cas", "'Xs, 'Xt") \
V(CASA_w, "casa", "'Ws, 'Wt") \
diff --git a/src/aarch64/macro-assembler-aarch64.h b/src/aarch64/macro-assembler-aarch64.h
index 29a29556..959300cc 100644
--- a/src/aarch64/macro-assembler-aarch64.h
+++ b/src/aarch64/macro-assembler-aarch64.h
@@ -1474,6 +1474,21 @@ class MacroAssembler : public Assembler, public MacroAssemblerInterface {
SingleEmissionCheckScope guard(this);
ldarh(rt, src);
}
+ void Ldlar(const Register& rt, const MemOperand& src) {
+ VIXL_ASSERT(allow_macro_instructions_);
+ SingleEmissionCheckScope guard(this);
+ ldlar(rt, src);
+ }
+ void Ldlarb(const Register& rt, const MemOperand& src) {
+ VIXL_ASSERT(allow_macro_instructions_);
+ SingleEmissionCheckScope guard(this);
+ ldlarb(rt, src);
+ }
+ void Ldlarh(const Register& rt, const MemOperand& src) {
+ VIXL_ASSERT(allow_macro_instructions_);
+ SingleEmissionCheckScope guard(this);
+ ldlarh(rt, src);
+ }
void Ldaxp(const Register& rt, const Register& rt2, const MemOperand& src) {
VIXL_ASSERT(allow_macro_instructions_);
VIXL_ASSERT(!rt.Aliases(rt2));
@@ -1927,6 +1942,21 @@ class MacroAssembler : public Assembler, public MacroAssemblerInterface {
SingleEmissionCheckScope guard(this);
stlrh(rt, dst);
}
+ void Stllr(const Register& rt, const MemOperand& dst) {
+ VIXL_ASSERT(allow_macro_instructions_);
+ SingleEmissionCheckScope guard(this);
+ stllr(rt, dst);
+ }
+ void Stllrb(const Register& rt, const MemOperand& dst) {
+ VIXL_ASSERT(allow_macro_instructions_);
+ SingleEmissionCheckScope guard(this);
+ stllrb(rt, dst);
+ }
+ void Stllrh(const Register& rt, const MemOperand& dst) {
+ VIXL_ASSERT(allow_macro_instructions_);
+ SingleEmissionCheckScope guard(this);
+ stllrh(rt, dst);
+ }
void Stlxp(const Register& rs,
const Register& rt,
const Register& rt2,
diff --git a/src/aarch64/simulator-aarch64.cc b/src/aarch64/simulator-aarch64.cc
index 3abc7800..96c56699 100644
--- a/src/aarch64/simulator-aarch64.cc
+++ b/src/aarch64/simulator-aarch64.cc
@@ -1651,8 +1651,8 @@ void Simulator::VisitLoadStoreExclusive(const Instruction* instr) {
LoadStoreExclusive op =
static_cast<LoadStoreExclusive>(instr->Mask(LoadStoreExclusiveMask));
- bool is_acquire_release = instr->GetLdStXAcquireRelease();
bool is_exclusive = !instr->GetLdStXNotExclusive();
+ bool is_acquire_release = !is_exclusive || instr->GetLdStXAcquireRelease();
bool is_load = instr->GetLdStXLoad();
bool is_pair = instr->GetLdStXPair();
@@ -1728,21 +1728,25 @@ void Simulator::VisitLoadStoreExclusive(const Instruction* instr) {
case LDXRB_w:
case LDAXRB_w:
case LDARB_w:
+ case LDLARB:
WriteWRegister(rt, Memory::Read<uint8_t>(address), NoRegLog);
break;
case LDXRH_w:
case LDAXRH_w:
case LDARH_w:
+ case LDLARH:
WriteWRegister(rt, Memory::Read<uint16_t>(address), NoRegLog);
break;
case LDXR_w:
case LDAXR_w:
case LDAR_w:
+ case LDLAR_w:
WriteWRegister(rt, Memory::Read<uint32_t>(address), NoRegLog);
break;
case LDXR_x:
case LDAXR_x:
case LDAR_x:
+ case LDLAR_x:
WriteXRegister(rt, Memory::Read<uint64_t>(address), NoRegLog);
break;
case LDXP_w:
@@ -1799,21 +1803,25 @@ void Simulator::VisitLoadStoreExclusive(const Instruction* instr) {
case STXRB_w:
case STLXRB_w:
case STLRB_w:
+ case STLLRB:
Memory::Write<uint8_t>(address, ReadWRegister(rt));
break;
case STXRH_w:
case STLXRH_w:
case STLRH_w:
+ case STLLRH:
Memory::Write<uint16_t>(address, ReadWRegister(rt));
break;
case STXR_w:
case STLXR_w:
case STLR_w:
+ case STLLR_w:
Memory::Write<uint32_t>(address, ReadWRegister(rt));
break;
case STXR_x:
case STLXR_x:
case STLR_x:
+ case STLLR_x:
Memory::Write<uint64_t>(address, ReadXRegister(rt));
break;
case STXP_w:
diff --git a/test/aarch64/test-assembler-aarch64.cc b/test/aarch64/test-assembler-aarch64.cc
index 2f58d216..65dbed9b 100644
--- a/test/aarch64/test-assembler-aarch64.cc
+++ b/test/aarch64/test-assembler-aarch64.cc
@@ -15890,6 +15890,63 @@ TEST(ldar_stlr) {
}
+TEST(ldlar_stllr) {
+ // The middle value is read, modified, and written. The padding exists only to
+ // check for over-write.
+ uint8_t b[] = {0, 0x12, 0};
+ uint16_t h[] = {0, 0x1234, 0};
+ uint32_t w[] = {0, 0x12345678, 0};
+ uint64_t x[] = {0, 0x123456789abcdef0, 0};
+
+ SETUP();
+ START();
+
+ __ Mov(x10, reinterpret_cast<uintptr_t>(&b[1]));
+ __ Ldlarb(w0, MemOperand(x10));
+ __ Add(w0, w0, 1);
+ __ Stllrb(w0, MemOperand(x10));
+
+ __ Mov(x10, reinterpret_cast<uintptr_t>(&h[1]));
+ __ Ldlarh(w0, MemOperand(x10));
+ __ Add(w0, w0, 1);
+ __ Stllrh(w0, MemOperand(x10));
+
+ __ Mov(x10, reinterpret_cast<uintptr_t>(&w[1]));
+ __ Ldlar(w0, MemOperand(x10));
+ __ Add(w0, w0, 1);
+ __ Stllr(w0, MemOperand(x10));
+
+ __ Mov(x10, reinterpret_cast<uintptr_t>(&x[1]));
+ __ Ldlar(x0, MemOperand(x10));
+ __ Add(x0, x0, 1);
+ __ Stllr(x0, MemOperand(x10));
+
+ END();
+
+// TODO: test on real hardware when available
+#ifdef VIXL_INCLUDE_SIMULATOR_AARCH64
+ RUN();
+
+ ASSERT_EQUAL_32(0x13, b[1]);
+ ASSERT_EQUAL_32(0x1235, h[1]);
+ ASSERT_EQUAL_32(0x12345679, w[1]);
+ ASSERT_EQUAL_64(0x123456789abcdef1, x[1]);
+
+ // Check for over-write.
+ ASSERT_EQUAL_32(0, b[0]);
+ ASSERT_EQUAL_32(0, b[2]);
+ ASSERT_EQUAL_32(0, h[0]);
+ ASSERT_EQUAL_32(0, h[2]);
+ ASSERT_EQUAL_32(0, w[0]);
+ ASSERT_EQUAL_32(0, w[2]);
+ ASSERT_EQUAL_64(0, x[0]);
+ ASSERT_EQUAL_64(0, x[2]);
+#endif // VIXL_INCLUDE_SIMULATOR_AARCH64
+
+ TEARDOWN();
+}
+
+
TEST(ldxr_stxr) {
// The middle value is read, modified, and written. The padding exists only to
// check for over-write.
diff --git a/test/aarch64/test-disasm-aarch64.cc b/test/aarch64/test-disasm-aarch64.cc
index 3c900d6a..2cd0a60b 100644
--- a/test/aarch64/test-disasm-aarch64.cc
+++ b/test/aarch64/test-disasm-aarch64.cc
@@ -1848,81 +1848,105 @@ TEST(load_store_exclusive) {
COMPARE(stlr(w3, MemOperand(sp)), "stlr w3, [sp]");
COMPARE(stlr(x4, MemOperand(x5)), "stlr x4, [x5]");
COMPARE(stlr(x6, MemOperand(sp)), "stlr x6, [sp]");
- COMPARE(ldarb(w7, MemOperand(x8)), "ldarb w7, [x8]");
- COMPARE(ldarb(w9, MemOperand(sp)), "ldarb w9, [sp]");
- COMPARE(ldarb(x10, MemOperand(x11)), "ldarb w10, [x11]");
- COMPARE(ldarb(x12, MemOperand(sp)), "ldarb w12, [sp]");
- COMPARE(ldarh(w13, MemOperand(x14)), "ldarh w13, [x14]");
- COMPARE(ldarh(w15, MemOperand(sp)), "ldarh w15, [sp]");
- COMPARE(ldarh(x16, MemOperand(x17)), "ldarh w16, [x17]");
- COMPARE(ldarh(x18, MemOperand(sp)), "ldarh w18, [sp]");
- COMPARE(ldar(w19, MemOperand(x20)), "ldar w19, [x20]");
- COMPARE(ldar(w21, MemOperand(sp)), "ldar w21, [sp]");
- COMPARE(ldar(x22, MemOperand(x23)), "ldar x22, [x23]");
- COMPARE(ldar(x24, MemOperand(sp)), "ldar x24, [sp]");
-
- COMPARE(cas(w25, w26, MemOperand(x27)), "cas w25, w26, [x27]");
- COMPARE(cas(w28, w29, MemOperand(sp)), "cas w28, w29, [sp]");
- COMPARE(cas(x30, x0, MemOperand(x1)), "cas x30, x0, [x1]");
- COMPARE(cas(x2, x3, MemOperand(sp)), "cas x2, x3, [sp]");
- COMPARE(casa(w4, w5, MemOperand(x6)), "casa w4, w5, [x6]");
- COMPARE(casa(w7, w8, MemOperand(sp)), "casa w7, w8, [sp]");
- COMPARE(casa(x9, x10, MemOperand(x11)), "casa x9, x10, [x11]");
- COMPARE(casa(x12, x13, MemOperand(sp)), "casa x12, x13, [sp]");
- COMPARE(casl(w14, w15, MemOperand(x16)), "casl w14, w15, [x16]");
- COMPARE(casl(w17, w18, MemOperand(sp)), "casl w17, w18, [sp]");
- COMPARE(casl(x19, x20, MemOperand(x21)), "casl x19, x20, [x21]");
- COMPARE(casl(x22, x23, MemOperand(sp)), "casl x22, x23, [sp]");
- COMPARE(casal(w24, w25, MemOperand(x26)), "casal w24, w25, [x26]");
- COMPARE(casal(w27, w28, MemOperand(sp)), "casal w27, w28, [sp]");
- COMPARE(casal(x29, x30, MemOperand(x0)), "casal x29, x30, [x0]");
- COMPARE(casal(x1, x2, MemOperand(sp)), "casal x1, x2, [sp]");
- COMPARE(casb(w3, w4, MemOperand(x5)), "casb w3, w4, [x5]");
- COMPARE(casb(w6, w7, MemOperand(sp)), "casb w6, w7, [sp]");
- COMPARE(casab(w8, w9, MemOperand(x10)), "casab w8, w9, [x10]");
- COMPARE(casab(w11, w12, MemOperand(sp)), "casab w11, w12, [sp]");
- COMPARE(caslb(w13, w14, MemOperand(x15)), "caslb w13, w14, [x15]");
- COMPARE(caslb(w16, w17, MemOperand(sp)), "caslb w16, w17, [sp]");
- COMPARE(casalb(w18, w19, MemOperand(x20)), "casalb w18, w19, [x20]");
- COMPARE(casalb(w21, w22, MemOperand(sp)), "casalb w21, w22, [sp]");
- COMPARE(cash(w23, w24, MemOperand(x25)), "cash w23, w24, [x25]");
- COMPARE(cash(w26, w27, MemOperand(sp)), "cash w26, w27, [sp]");
- COMPARE(casah(w28, w29, MemOperand(x30)), "casah w28, w29, [x30]");
- COMPARE(casah(w0, w1, MemOperand(sp)), "casah w0, w1, [sp]");
- COMPARE(caslh(w2, w3, MemOperand(x4)), "caslh w2, w3, [x4]");
- COMPARE(caslh(w5, w6, MemOperand(sp)), "caslh w5, w6, [sp]");
- COMPARE(casalh(w7, w8, MemOperand(x9)), "casalh w7, w8, [x9]");
- COMPARE(casalh(w10, w11, MemOperand(sp)), "casalh w10, w11, [sp]");
- COMPARE(casp(w12, w13, w14, w15, MemOperand(x16)),
- "casp w12, w13, w14, w15, [x16]");
- COMPARE(casp(w18, w19, w20, w21, MemOperand(sp)),
- "casp w18, w19, w20, w21, [sp]");
- COMPARE(casp(x22, x23, x24, x25, MemOperand(x26)),
- "casp x22, x23, x24, x25, [x26]");
- COMPARE(casp(x28, x29, x0, x1, MemOperand(sp)),
- "casp x28, x29, x0, x1, [sp]");
- COMPARE(caspa(w2, w3, w4, w5, MemOperand(x6)), "caspa w2, w3, w4, w5, [x6]");
- COMPARE(caspa(w8, w9, w10, w11, MemOperand(sp)),
- "caspa w8, w9, w10, w11, [sp]");
- COMPARE(caspa(x12, x13, x14, x15, MemOperand(x16)),
- "caspa x12, x13, x14, x15, [x16]");
- COMPARE(caspa(x18, x19, x20, x21, MemOperand(sp)),
- "caspa x18, x19, x20, x21, [sp]");
- COMPARE(caspl(w22, w23, w24, w25, MemOperand(x26)),
- "caspl w22, w23, w24, w25, [x26]");
- COMPARE(caspl(w28, w29, w0, w1, MemOperand(sp)),
- "caspl w28, w29, w0, w1, [sp]");
- COMPARE(caspl(x2, x3, x4, x5, MemOperand(x6)), "caspl x2, x3, x4, x5, [x6]");
- COMPARE(caspl(x8, x9, x10, x11, MemOperand(sp)),
- "caspl x8, x9, x10, x11, [sp]");
- COMPARE(caspal(w12, w13, w14, w15, MemOperand(x16)),
- "caspal w12, w13, w14, w15, [x16]");
- COMPARE(caspal(w18, w19, w20, w21, MemOperand(sp)),
- "caspal w18, w19, w20, w21, [sp]");
- COMPARE(caspal(x22, x23, x24, x25, MemOperand(x26)),
- "caspal x22, x23, x24, x25, [x26]");
- COMPARE(caspal(x28, x29, x0, x1, MemOperand(sp)),
- "caspal x28, x29, x0, x1, [sp]");
+ COMPARE(stllrb(w7, MemOperand(x8)), "stllrb w7, [x8]");
+ COMPARE(stllrb(w9, MemOperand(sp)), "stllrb w9, [sp]");
+ COMPARE(stllrb(x10, MemOperand(x11)), "stllrb w10, [x11]");
+ COMPARE(stllrb(x12, MemOperand(sp)), "stllrb w12, [sp]");
+ COMPARE(stllrh(w13, MemOperand(x14)), "stllrh w13, [x14]");
+ COMPARE(stllrh(w15, MemOperand(sp)), "stllrh w15, [sp]");
+ COMPARE(stllrh(x16, MemOperand(x17)), "stllrh w16, [x17]");
+ COMPARE(stllrh(x18, MemOperand(sp)), "stllrh w18, [sp]");
+ COMPARE(stllr(w19, MemOperand(x20)), "stllr w19, [x20]");
+ COMPARE(stllr(w21, MemOperand(sp)), "stllr w21, [sp]");
+ COMPARE(stllr(x22, MemOperand(x23)), "stllr x22, [x23]");
+ COMPARE(stllr(x24, MemOperand(sp)), "stllr x24, [sp]");
+ COMPARE(ldarb(w25, MemOperand(x26)), "ldarb w25, [x26]");
+ COMPARE(ldarb(w27, MemOperand(sp)), "ldarb w27, [sp]");
+ COMPARE(ldarb(x28, MemOperand(x29)), "ldarb w28, [x29]");
+ COMPARE(ldarb(x30, MemOperand(sp)), "ldarb w30, [sp]");
+ COMPARE(ldarh(w0, MemOperand(x1)), "ldarh w0, [x1]");
+ COMPARE(ldarh(w2, MemOperand(sp)), "ldarh w2, [sp]");
+ COMPARE(ldarh(x3, MemOperand(x4)), "ldarh w3, [x4]");
+ COMPARE(ldarh(x5, MemOperand(sp)), "ldarh w5, [sp]");
+ COMPARE(ldar(w6, MemOperand(x7)), "ldar w6, [x7]");
+ COMPARE(ldar(w8, MemOperand(sp)), "ldar w8, [sp]");
+ COMPARE(ldar(x9, MemOperand(x10)), "ldar x9, [x10]");
+ COMPARE(ldar(x11, MemOperand(sp)), "ldar x11, [sp]");
+ COMPARE(ldlarb(w12, MemOperand(x13)), "ldlarb w12, [x13]");
+ COMPARE(ldlarb(w14, MemOperand(sp)), "ldlarb w14, [sp]");
+ COMPARE(ldlarb(x15, MemOperand(x16)), "ldlarb w15, [x16]");
+ COMPARE(ldlarb(x17, MemOperand(sp)), "ldlarb w17, [sp]");
+ COMPARE(ldlarh(w18, MemOperand(x19)), "ldlarh w18, [x19]");
+ COMPARE(ldlarh(w20, MemOperand(sp)), "ldlarh w20, [sp]");
+ COMPARE(ldlarh(x21, MemOperand(x22)), "ldlarh w21, [x22]");
+ COMPARE(ldlarh(x23, MemOperand(sp)), "ldlarh w23, [sp]");
+ COMPARE(ldlar(w24, MemOperand(x25)), "ldlar w24, [x25]");
+ COMPARE(ldlar(w26, MemOperand(sp)), "ldlar w26, [sp]");
+ COMPARE(ldlar(x27, MemOperand(x28)), "ldlar x27, [x28]");
+ COMPARE(ldlar(x29, MemOperand(sp)), "ldlar x29, [sp]");
+
+ COMPARE(cas(w30, w0, MemOperand(x1)), "cas w30, w0, [x1]");
+ COMPARE(cas(w2, w3, MemOperand(sp)), "cas w2, w3, [sp]");
+ COMPARE(cas(x4, x5, MemOperand(x6)), "cas x4, x5, [x6]");
+ COMPARE(cas(x7, x8, MemOperand(sp)), "cas x7, x8, [sp]");
+ COMPARE(casa(w9, w10, MemOperand(x11)), "casa w9, w10, [x11]");
+ COMPARE(casa(w12, w13, MemOperand(sp)), "casa w12, w13, [sp]");
+ COMPARE(casa(x14, x15, MemOperand(x16)), "casa x14, x15, [x16]");
+ COMPARE(casa(x17, x18, MemOperand(sp)), "casa x17, x18, [sp]");
+ COMPARE(casl(w19, w20, MemOperand(x21)), "casl w19, w20, [x21]");
+ COMPARE(casl(w22, w23, MemOperand(sp)), "casl w22, w23, [sp]");
+ COMPARE(casl(x24, x25, MemOperand(x26)), "casl x24, x25, [x26]");
+ COMPARE(casl(x27, x28, MemOperand(sp)), "casl x27, x28, [sp]");
+ COMPARE(casal(w29, w30, MemOperand(x0)), "casal w29, w30, [x0]");
+ COMPARE(casal(w1, w2, MemOperand(sp)), "casal w1, w2, [sp]");
+ COMPARE(casal(x3, x4, MemOperand(x5)), "casal x3, x4, [x5]");
+ COMPARE(casal(x6, x7, MemOperand(sp)), "casal x6, x7, [sp]");
+ COMPARE(casb(w8, w9, MemOperand(x10)), "casb w8, w9, [x10]");
+ COMPARE(casb(w11, w12, MemOperand(sp)), "casb w11, w12, [sp]");
+ COMPARE(casab(w13, w14, MemOperand(x15)), "casab w13, w14, [x15]");
+ COMPARE(casab(w16, w17, MemOperand(sp)), "casab w16, w17, [sp]");
+ COMPARE(caslb(w18, w19, MemOperand(x20)), "caslb w18, w19, [x20]");
+ COMPARE(caslb(w21, w22, MemOperand(sp)), "caslb w21, w22, [sp]");
+ COMPARE(casalb(w23, w24, MemOperand(x25)), "casalb w23, w24, [x25]");
+ COMPARE(casalb(w26, w27, MemOperand(sp)), "casalb w26, w27, [sp]");
+ COMPARE(cash(w28, w29, MemOperand(x30)), "cash w28, w29, [x30]");
+ COMPARE(cash(w0, w1, MemOperand(sp)), "cash w0, w1, [sp]");
+ COMPARE(casah(w2, w3, MemOperand(x4)), "casah w2, w3, [x4]");
+ COMPARE(casah(w5, w6, MemOperand(sp)), "casah w5, w6, [sp]");
+ COMPARE(caslh(w7, w8, MemOperand(x9)), "caslh w7, w8, [x9]");
+ COMPARE(caslh(w10, w11, MemOperand(sp)), "caslh w10, w11, [sp]");
+ COMPARE(casalh(w12, w13, MemOperand(x14)), "casalh w12, w13, [x14]");
+ COMPARE(casalh(w15, w16, MemOperand(sp)), "casalh w15, w16, [sp]");
+ COMPARE(casp(w18, w19, w20, w21, MemOperand(x22)),
+ "casp w18, w19, w20, w21, [x22]");
+ COMPARE(casp(w24, w25, w26, w27, MemOperand(sp)),
+ "casp w24, w25, w26, w27, [sp]");
+ COMPARE(casp(x28, x29, x0, x1, MemOperand(x2)),
+ "casp x28, x29, x0, x1, [x2]");
+ COMPARE(casp(x4, x5, x6, x7, MemOperand(sp)), "casp x4, x5, x6, x7, [sp]");
+ COMPARE(caspa(w8, w9, w10, w11, MemOperand(x12)),
+ "caspa w8, w9, w10, w11, [x12]");
+ COMPARE(caspa(w14, w15, w16, w17, MemOperand(sp)),
+ "caspa w14, w15, w16, w17, [sp]");
+ COMPARE(caspa(x18, x19, x20, x21, MemOperand(x22)),
+ "caspa x18, x19, x20, x21, [x22]");
+ COMPARE(caspa(x24, x25, x26, x27, MemOperand(sp)),
+ "caspa x24, x25, x26, x27, [sp]");
+ COMPARE(caspl(w28, w29, w0, w1, MemOperand(x2)),
+ "caspl w28, w29, w0, w1, [x2]");
+ COMPARE(caspl(w4, w5, w6, w7, MemOperand(sp)), "caspl w4, w5, w6, w7, [sp]");
+ COMPARE(caspl(x8, x9, x10, x11, MemOperand(x12)),
+ "caspl x8, x9, x10, x11, [x12]");
+ COMPARE(caspl(x14, x15, x16, x17, MemOperand(sp)),
+ "caspl x14, x15, x16, x17, [sp]");
+ COMPARE(caspal(w18, w19, w20, w21, MemOperand(x22)),
+ "caspal w18, w19, w20, w21, [x22]");
+ COMPARE(caspal(w24, w25, w26, w27, MemOperand(sp)),
+ "caspal w24, w25, w26, w27, [sp]");
+ COMPARE(caspal(x28, x29, x0, x1, MemOperand(x2)),
+ "caspal x28, x29, x0, x1, [x2]");
+ COMPARE(caspal(x4, x5, x6, x7, MemOperand(sp)),
+ "caspal x4, x5, x6, x7, [sp]");
CLEANUP();