diff options
author | Alexandre Rames <alexandre.rames@uop.re> | 2017-02-05 20:22:52 -0800 |
---|---|---|
committer | Jacob Bramley <jacob.bramley@arm.com> | 2017-03-29 07:48:11 +0000 |
commit | 62799611b9a3406de8c456052a0e4e8c7d728203 (patch) | |
tree | 0b3d446ebabed65aaf214cef28e347f2d1f4a451 | |
parent | cf4b3000286d141e369c86e647bdd7dcf06e5dc0 (diff) |
AArch64: Add support for `MacroAssembler::TailCallRuntime()`.
This is similar to the existing `MacroAssembler::CallRuntime()` but
uses (or behaves as when simulating) a branch instruction instead of
a 'branch and link' instruction.
Change-Id: I95f5ed010590edff93c09ac4eabf4334a13109ea
-rw-r--r-- | src/aarch64/macro-assembler-aarch64.h | 29 | ||||
-rw-r--r-- | src/aarch64/simulator-aarch64.cc | 10 | ||||
-rw-r--r-- | src/aarch64/simulator-constants-aarch64.h | 6 | ||||
-rw-r--r-- | test/aarch64/test-assembler-aarch64.cc | 26 |
4 files changed, 62 insertions, 9 deletions
diff --git a/src/aarch64/macro-assembler-aarch64.h b/src/aarch64/macro-assembler-aarch64.h index 825c16dd..414e962a 100644 --- a/src/aarch64/macro-assembler-aarch64.h +++ b/src/aarch64/macro-assembler-aarch64.h @@ -3034,7 +3034,17 @@ class MacroAssembler : public Assembler, public MacroAssemblerInterface { #ifdef VIXL_HAS_MACROASSEMBLER_RUNTIME_CALL_SUPPORT template <typename R, typename... P> - void CallRuntime(R (*function)(P...)); + void CallRuntimeHelper(R (*function)(P...), RuntimeCallType call_type); + + template <typename R, typename... P> + void CallRuntime(R (*function)(P...)) { + CallRuntimeHelper(function, kCallRuntime); + } + + template <typename R, typename... P> + void TailCallRuntime(R (*function)(P...)) { + CallRuntimeHelper(function, kTailCallRuntime); + } #endif // #ifdef VIXL_HAS_MACROASSEMBLER_RUNTIME_CALL_SUPPORT protected: @@ -3390,7 +3400,8 @@ class UseScratchRegisterScope { // `R` stands for 'return type', and `P` for 'parameter types'. template <typename R, typename... P> -void MacroAssembler::CallRuntime(R (*function)(P...)) { +void MacroAssembler::CallRuntimeHelper(R (*function)(P...), + RuntimeCallType call_type) { if (generate_simulator_code_) { #ifdef VIXL_HAS_SIMULATED_RUNTIME_CALL_SUPPORT uintptr_t runtime_call_wrapper_address = reinterpret_cast<uintptr_t>( @@ -3398,7 +3409,7 @@ void MacroAssembler::CallRuntime(R (*function)(P...)) { uintptr_t function_address = reinterpret_cast<uintptr_t>(function); EmissionCheckScope guard(this, - kInstructionSize + 2 * kRuntimeCallAddressSize, + kRuntimeCallLength, CodeBufferCheckScope::kExactSize); Label start; bind(&start); @@ -3412,8 +3423,9 @@ void MacroAssembler::CallRuntime(R (*function)(P...)) { VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == kRuntimeCallFunctionOffset); dc(function_address); - VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == - kRuntimeCallFunctionOffset + kRuntimeCallAddressSize); + VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == kRuntimeCallTypeOffset); + dc32(call_type); + VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == kRuntimeCallLength); #else VIXL_UNREACHABLE(); #endif // #ifdef VIXL_HAS_SIMULATED_RUNTIME_CALL_SUPPORT @@ -3421,7 +3433,12 @@ void MacroAssembler::CallRuntime(R (*function)(P...)) { UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); Mov(temp, reinterpret_cast<uint64_t>(function)); - Blr(temp); + if (call_type == kTailCallRuntime) { + Br(temp); + } else { + VIXL_ASSERT(call_type == kCallRuntime); + Blr(temp); + } } } diff --git a/src/aarch64/simulator-aarch64.cc b/src/aarch64/simulator-aarch64.cc index df8f4d98..f643dbd9 100644 --- a/src/aarch64/simulator-aarch64.cc +++ b/src/aarch64/simulator-aarch64.cc @@ -5377,10 +5377,18 @@ void Simulator::DoRuntimeCall(const Instruction* instr) { Memory::Read<uintptr_t>(instr + kRuntimeCallWrapperOffset); uintptr_t function_address = Memory::Read<uintptr_t>(instr + kRuntimeCallFunctionOffset); + RuntimeCallType call_type = static_cast<RuntimeCallType>( + Memory::Read<uint32_t>(instr + kRuntimeCallTypeOffset)); auto runtime_call_wrapper = reinterpret_cast<void (*)(Simulator*, uintptr_t)>(call_wrapper_address); + + if (call_type == kCallRuntime) { + WriteRegister(kLinkRegCode, + instr->GetInstructionAtOffset(kRuntimeCallLength)); + } runtime_call_wrapper(this, function_address); - WritePc(instr->GetInstructionAtOffset(kRuntimeCallLength)); + // Read the return address from `lr` and write it into `pc`. + WritePc(ReadRegister<Instruction*>(kLinkRegCode)); } #else void Simulator::DoRuntimeCall(const Instruction* instr) { diff --git a/src/aarch64/simulator-constants-aarch64.h b/src/aarch64/simulator-constants-aarch64.h index bcd3120c..b6a4241c 100644 --- a/src/aarch64/simulator-constants-aarch64.h +++ b/src/aarch64/simulator-constants-aarch64.h @@ -141,14 +141,16 @@ const unsigned kLogParamsOffset = 1 * kInstructionSize; const unsigned kLogLength = 2 * kInstructionSize; // Runtime call simulation - kRuntimeCall +enum RuntimeCallType { kCallRuntime, kTailCallRuntime }; + const unsigned kRuntimeCallWrapperOffset = 1 * kInstructionSize; // The size of a pointer on host. const unsigned kRuntimeCallAddressSize = sizeof(uintptr_t); const unsigned kRuntimeCallFunctionOffset = kRuntimeCallWrapperOffset + kRuntimeCallAddressSize; -const unsigned kRuntimeCallLength = +const unsigned kRuntimeCallTypeOffset = kRuntimeCallFunctionOffset + kRuntimeCallAddressSize; - +const unsigned kRuntimeCallLength = kRuntimeCallTypeOffset + sizeof(uint32_t); } // namespace aarch64 } // namespace vixl diff --git a/test/aarch64/test-assembler-aarch64.cc b/test/aarch64/test-assembler-aarch64.cc index 63a6278f..db54fd29 100644 --- a/test/aarch64/test-assembler-aarch64.cc +++ b/test/aarch64/test-assembler-aarch64.cc @@ -23258,6 +23258,9 @@ TEST(runtime_calls) { #endif START(); + + // Test `CallRuntime`. + __ Mov(w0, 0); __ CallRuntime(runtime_call_add_one); __ Mov(w20, w0); @@ -23281,6 +23284,27 @@ TEST(runtime_calls) { __ Fmov(d21, d0); __ Pop(d1, d0); + // Test `TailCallRuntime`. + + Label function, after_function; + __ B(&after_function); + __ Bind(&function); + __ Mov(x22, 0); + __ Mov(w0, 123); + __ TailCallRuntime(runtime_call_add_one); + // Control should not fall through. + __ Mov(x22, 0xbad); + __ Ret(); + __ Bind(&after_function); + + // Call our dummy function, taking care to preserve the link register. + __ Push(ip0, lr); + __ Bl(&function); + __ Pop(lr, ip0); + // Save the result. + __ Mov(w23, w0); + + int64_t value = 0xbadbeef; __ Mov(x0, reinterpret_cast<uint64_t>(&value)); __ CallRuntime(runtime_call_store_at_address); @@ -23296,6 +23320,8 @@ TEST(runtime_calls) { ASSERT_EQUAL_64(0x123, x21); ASSERT_EQUAL_FP64(310.0, d21); VIXL_CHECK(value == 0xf00d); + ASSERT_EQUAL_64(0, x22); + ASSERT_EQUAL_32(124, w23); #endif // #if defined(VIXL_HAS_SIMULATED_RUNTIME_CALL_SUPPORT) || ... TEARDOWN(); |