blob: abbc6b9eea66cec9ebfccc50db011aaf4a2ff46b [file] [log] [blame]
Pierre Langlois88c46b82016-06-02 18:15:32 +01001// Copyright 2015, ARM Limited
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "test-utils.h"
28
29#include <sys/mman.h>
30
31#include "globals-vixl.h"
32#include "a64/cpu-a64.h"
33
34namespace vixl {
35
36ExecutableMemory::ExecutableMemory(size_t size)
37 : size_(size),
38 buffer_(mmap(NULL, size, PROT_READ | PROT_WRITE | PROT_EXEC,
39 MAP_SHARED | MAP_ANONYMOUS, -1, 0)) {
40 VIXL_ASSERT(reinterpret_cast<intptr_t>(buffer_) != -1);
41}
42
43ExecutableMemory::~ExecutableMemory() {
44 munmap(buffer_, size_);
45}
46
47void ExecutableMemory::Write(const byte* code_start, size_t code_size) const {
48 VIXL_CHECK(code_size <= size_);
49 memcpy(buffer_, code_start, size_);
50}
51
52void ExecutableMemory::Execute(int offset) const {
53 void (*test_function)(void);
54
55 VIXL_ASSERT((offset >= 0) && (static_cast<size_t>(offset) < size_));
56 VIXL_STATIC_ASSERT(sizeof(buffer_) == sizeof(test_function));
57 VIXL_STATIC_ASSERT(sizeof(uintptr_t) == sizeof(test_function));
58 uintptr_t entry_point = reinterpret_cast<uintptr_t>(buffer_);
59 entry_point += offset;
60 memcpy(&test_function, &entry_point, sizeof(test_function));
61
62#if defined(__aarch64__)
63 aarch64::CPU::EnsureIAndDCacheCoherency(buffer_, size_);
64#elif defined(__arm__)
65 // TODO: Do not use __builtin___clear_cache and instead implement
66 // `CPU::EnsureIAndDCacheCoherency` for aarch32.
67 __builtin___clear_cache(buffer_, reinterpret_cast<char*>(buffer_) + size_);
68#endif
69 test_function();
70}
71
72} // namespace vixl