Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 1 | // Copyright 2017, VIXL authors |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 2 | // All rights reserved. |
| 3 | // |
| 4 | // Redistribution and use in source and binary forms, with or without |
| 5 | // modification, are permitted provided that the following conditions are met: |
| 6 | // |
| 7 | // * Redistributions of source code must retain the above copyright notice, |
| 8 | // this list of conditions and the following disclaimer. |
| 9 | // * Redistributions in binary form must reproduce the above copyright notice, |
| 10 | // this list of conditions and the following disclaimer in the documentation |
| 11 | // and/or other materials provided with the distribution. |
| 12 | // * Neither the name of ARM Limited nor the names of its contributors may be |
| 13 | // used to endorse or promote products derived from this software without |
| 14 | // specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND |
| 17 | // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
| 18 | // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
| 19 | // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE |
| 20 | // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL |
| 21 | // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
| 22 | // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER |
| 23 | // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, |
| 24 | // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 25 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 | |
Anton Kirilov | 088b01f | 2022-09-27 14:27:38 +0100 | [diff] [blame] | 27 | #ifdef VIXL_CODE_BUFFER_MMAP |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 28 | extern "C" { |
| 29 | #include <sys/mman.h> |
| 30 | } |
Anton Kirilov | 088b01f | 2022-09-27 14:27:38 +0100 | [diff] [blame] | 31 | #endif |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 32 | |
Alexandre Rames | 1f9074d | 2016-05-23 15:50:01 +0100 | [diff] [blame] | 33 | #include "code-buffer-vixl.h" |
| 34 | #include "utils-vixl.h" |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 35 | |
| 36 | namespace vixl { |
| 37 | |
| 38 | |
Alexandre Rames | 7cd99a8 | 2016-08-03 13:32:02 +0100 | [diff] [blame] | 39 | CodeBuffer::CodeBuffer(size_t capacity) |
| 40 | : buffer_(NULL), |
| 41 | managed_(true), |
| 42 | cursor_(NULL), |
| 43 | dirty_(false), |
| 44 | capacity_(capacity) { |
| 45 | if (capacity_ == 0) { |
| 46 | return; |
| 47 | } |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 48 | #ifdef VIXL_CODE_BUFFER_MALLOC |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 49 | buffer_ = reinterpret_cast<byte*>(malloc(capacity_)); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 50 | #elif defined(VIXL_CODE_BUFFER_MMAP) |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 51 | buffer_ = reinterpret_cast<byte*>(mmap(NULL, |
| 52 | capacity, |
| 53 | PROT_READ | PROT_WRITE, |
| 54 | MAP_PRIVATE | MAP_ANONYMOUS, |
| 55 | -1, |
| 56 | 0)); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 57 | #else |
| 58 | #error Unknown code buffer allocator. |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 59 | #endif |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 60 | VIXL_CHECK(buffer_ != NULL); |
Alexandre Rames | d383296 | 2016-07-04 15:03:43 +0100 | [diff] [blame] | 61 | // Aarch64 instructions must be word aligned, we assert the default allocator |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 62 | // always returns word align memory. |
| 63 | VIXL_ASSERT(IsWordAligned(buffer_)); |
| 64 | |
| 65 | cursor_ = buffer_; |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 66 | } |
| 67 | |
| 68 | |
Alexandre Rames | 919e3fe | 2016-10-14 09:07:54 +0100 | [diff] [blame] | 69 | CodeBuffer::CodeBuffer(byte* buffer, size_t capacity) |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 70 | : buffer_(reinterpret_cast<byte*>(buffer)), |
| 71 | managed_(false), |
| 72 | cursor_(reinterpret_cast<byte*>(buffer)), |
| 73 | dirty_(false), |
| 74 | capacity_(capacity) { |
| 75 | VIXL_ASSERT(buffer_ != NULL); |
| 76 | } |
| 77 | |
| 78 | |
Pierre Langlois | a5b3cef | 2019-01-28 11:30:38 +0000 | [diff] [blame] | 79 | CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION { |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 80 | VIXL_ASSERT(!IsDirty()); |
| 81 | if (managed_) { |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 82 | #ifdef VIXL_CODE_BUFFER_MALLOC |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 83 | free(buffer_); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 84 | #elif defined(VIXL_CODE_BUFFER_MMAP) |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 85 | munmap(buffer_, capacity_); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 86 | #else |
| 87 | #error Unknown code buffer allocator. |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 88 | #endif |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 89 | } |
| 90 | } |
| 91 | |
| 92 | |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 93 | void CodeBuffer::SetExecutable() { |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 94 | #ifdef VIXL_CODE_BUFFER_MMAP |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 95 | int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC); |
| 96 | VIXL_CHECK(ret == 0); |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 97 | #else |
| 98 | // This requires page-aligned memory blocks, which we can only guarantee with |
| 99 | // mmap. |
| 100 | VIXL_UNIMPLEMENTED(); |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 101 | #endif |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 102 | } |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 103 | |
| 104 | |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 105 | void CodeBuffer::SetWritable() { |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 106 | #ifdef VIXL_CODE_BUFFER_MMAP |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 107 | int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE); |
| 108 | VIXL_CHECK(ret == 0); |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 109 | #else |
| 110 | // This requires page-aligned memory blocks, which we can only guarantee with |
| 111 | // mmap. |
| 112 | VIXL_UNIMPLEMENTED(); |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 113 | #endif |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 114 | } |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 115 | |
| 116 | |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 117 | void CodeBuffer::EmitString(const char* string) { |
Anton Kirilov | 088b01f | 2022-09-27 14:27:38 +0100 | [diff] [blame] | 118 | const auto len = strlen(string) + 1; |
| 119 | VIXL_ASSERT(HasSpaceFor(len)); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 120 | char* dst = reinterpret_cast<char*>(cursor_); |
| 121 | dirty_ = true; |
Anton Kirilov | 088b01f | 2022-09-27 14:27:38 +0100 | [diff] [blame] | 122 | memcpy(dst, string, len); |
| 123 | cursor_ = reinterpret_cast<byte*>(dst + len); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 124 | } |
| 125 | |
| 126 | |
Pierre Langlois | 88c46b8 | 2016-06-02 18:15:32 +0100 | [diff] [blame] | 127 | void CodeBuffer::EmitData(const void* data, size_t size) { |
| 128 | VIXL_ASSERT(HasSpaceFor(size)); |
| 129 | dirty_ = true; |
| 130 | memcpy(cursor_, data, size); |
| 131 | cursor_ = cursor_ + size; |
| 132 | } |
| 133 | |
| 134 | |
Vincent Belliard | 3e1b899 | 2016-07-13 16:02:19 -0700 | [diff] [blame] | 135 | void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) { |
| 136 | dirty_ = true; |
| 137 | byte* dst = buffer_ + offset; |
| 138 | VIXL_ASSERT(dst + size <= cursor_); |
| 139 | memcpy(dst, data, size); |
| 140 | } |
| 141 | |
| 142 | |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 143 | void CodeBuffer::Align() { |
| 144 | byte* end = AlignUp(cursor_, 4); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 145 | const size_t padding_size = end - cursor_; |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 146 | VIXL_ASSERT(padding_size <= 4); |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 147 | EmitZeroedBytes(static_cast<int>(padding_size)); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 148 | } |
| 149 | |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 150 | void CodeBuffer::EmitZeroedBytes(int n) { |
| 151 | EnsureSpaceFor(n); |
| 152 | dirty_ = true; |
| 153 | memset(cursor_, 0, n); |
| 154 | cursor_ += n; |
| 155 | } |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 156 | |
| 157 | void CodeBuffer::Reset() { |
armvixl | 330dc71 | 2014-11-25 10:38:32 +0000 | [diff] [blame] | 158 | #ifdef VIXL_DEBUG |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 159 | if (managed_) { |
Pierre Langlois | 88c46b8 | 2016-06-02 18:15:32 +0100 | [diff] [blame] | 160 | // Fill with zeros (there is no useful value common to A32 and T32). |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 161 | memset(buffer_, 0, capacity_); |
| 162 | } |
| 163 | #endif |
| 164 | cursor_ = buffer_; |
| 165 | SetClean(); |
| 166 | } |
| 167 | |
| 168 | |
| 169 | void CodeBuffer::Grow(size_t new_capacity) { |
| 170 | VIXL_ASSERT(managed_); |
| 171 | VIXL_ASSERT(new_capacity > capacity_); |
Pierre Langlois | f5348ce | 2016-09-22 11:15:35 +0100 | [diff] [blame] | 172 | ptrdiff_t cursor_offset = GetCursorOffset(); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 173 | #ifdef VIXL_CODE_BUFFER_MALLOC |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 174 | buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity)); |
| 175 | VIXL_CHECK(buffer_ != NULL); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 176 | #elif defined(VIXL_CODE_BUFFER_MMAP) |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 177 | buffer_ = static_cast<byte*>( |
| 178 | mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE)); |
| 179 | VIXL_CHECK(buffer_ != MAP_FAILED); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 180 | #else |
| 181 | #error Unknown code buffer allocator. |
| 182 | #endif |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 183 | |
Pierre Langlois | f5348ce | 2016-09-22 11:15:35 +0100 | [diff] [blame] | 184 | cursor_ = buffer_ + cursor_offset; |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 185 | capacity_ = new_capacity; |
| 186 | } |
| 187 | |
| 188 | |
| 189 | } // namespace vixl |