Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 1 | // Copyright 2017, VIXL authors |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 2 | // All rights reserved. |
| 3 | // |
| 4 | // Redistribution and use in source and binary forms, with or without |
| 5 | // modification, are permitted provided that the following conditions are met: |
| 6 | // |
| 7 | // * Redistributions of source code must retain the above copyright notice, |
| 8 | // this list of conditions and the following disclaimer. |
| 9 | // * Redistributions in binary form must reproduce the above copyright notice, |
| 10 | // this list of conditions and the following disclaimer in the documentation |
| 11 | // and/or other materials provided with the distribution. |
| 12 | // * Neither the name of ARM Limited nor the names of its contributors may be |
| 13 | // used to endorse or promote products derived from this software without |
| 14 | // specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND |
| 17 | // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
| 18 | // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
| 19 | // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE |
| 20 | // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL |
| 21 | // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
| 22 | // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER |
| 23 | // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, |
| 24 | // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 25 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 | |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 27 | extern "C" { |
| 28 | #include <sys/mman.h> |
| 29 | } |
| 30 | |
Alexandre Rames | 1f9074d | 2016-05-23 15:50:01 +0100 | [diff] [blame] | 31 | #include "code-buffer-vixl.h" |
| 32 | #include "utils-vixl.h" |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 33 | |
| 34 | namespace vixl { |
| 35 | |
| 36 | |
Alexandre Rames | 7cd99a8 | 2016-08-03 13:32:02 +0100 | [diff] [blame] | 37 | CodeBuffer::CodeBuffer(size_t capacity) |
| 38 | : buffer_(NULL), |
| 39 | managed_(true), |
| 40 | cursor_(NULL), |
| 41 | dirty_(false), |
| 42 | capacity_(capacity) { |
| 43 | if (capacity_ == 0) { |
| 44 | return; |
| 45 | } |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 46 | #ifdef VIXL_CODE_BUFFER_MALLOC |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 47 | buffer_ = reinterpret_cast<byte*>(malloc(capacity_)); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 48 | #elif defined(VIXL_CODE_BUFFER_MMAP) |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 49 | buffer_ = reinterpret_cast<byte*>(mmap(NULL, |
| 50 | capacity, |
| 51 | PROT_READ | PROT_WRITE, |
| 52 | MAP_PRIVATE | MAP_ANONYMOUS, |
| 53 | -1, |
| 54 | 0)); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 55 | #else |
| 56 | #error Unknown code buffer allocator. |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 57 | #endif |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 58 | VIXL_CHECK(buffer_ != NULL); |
Alexandre Rames | d383296 | 2016-07-04 15:03:43 +0100 | [diff] [blame] | 59 | // Aarch64 instructions must be word aligned, we assert the default allocator |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 60 | // always returns word align memory. |
| 61 | VIXL_ASSERT(IsWordAligned(buffer_)); |
| 62 | |
| 63 | cursor_ = buffer_; |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | |
Alexandre Rames | 919e3fe | 2016-10-14 09:07:54 +0100 | [diff] [blame] | 67 | CodeBuffer::CodeBuffer(byte* buffer, size_t capacity) |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 68 | : buffer_(reinterpret_cast<byte*>(buffer)), |
| 69 | managed_(false), |
| 70 | cursor_(reinterpret_cast<byte*>(buffer)), |
| 71 | dirty_(false), |
| 72 | capacity_(capacity) { |
| 73 | VIXL_ASSERT(buffer_ != NULL); |
| 74 | } |
| 75 | |
| 76 | |
Pierre Langlois | a5b3cef | 2019-01-28 11:30:38 +0000 | [diff] [blame] | 77 | CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION { |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 78 | VIXL_ASSERT(!IsDirty()); |
| 79 | if (managed_) { |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 80 | #ifdef VIXL_CODE_BUFFER_MALLOC |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 81 | free(buffer_); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 82 | #elif defined(VIXL_CODE_BUFFER_MMAP) |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 83 | munmap(buffer_, capacity_); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 84 | #else |
| 85 | #error Unknown code buffer allocator. |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 86 | #endif |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 87 | } |
| 88 | } |
| 89 | |
| 90 | |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 91 | void CodeBuffer::SetExecutable() { |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 92 | #ifdef VIXL_CODE_BUFFER_MMAP |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 93 | int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC); |
| 94 | VIXL_CHECK(ret == 0); |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 95 | #else |
| 96 | // This requires page-aligned memory blocks, which we can only guarantee with |
| 97 | // mmap. |
| 98 | VIXL_UNIMPLEMENTED(); |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 99 | #endif |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 100 | } |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 101 | |
| 102 | |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 103 | void CodeBuffer::SetWritable() { |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 104 | #ifdef VIXL_CODE_BUFFER_MMAP |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 105 | int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE); |
| 106 | VIXL_CHECK(ret == 0); |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 107 | #else |
| 108 | // This requires page-aligned memory blocks, which we can only guarantee with |
| 109 | // mmap. |
| 110 | VIXL_UNIMPLEMENTED(); |
Jacob Bramley | 745a855 | 2016-12-20 09:52:30 +0000 | [diff] [blame] | 111 | #endif |
Jacob Bramley | cff5a2e | 2019-03-15 09:34:56 +0000 | [diff] [blame] | 112 | } |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 113 | |
| 114 | |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 115 | void CodeBuffer::EmitString(const char* string) { |
Pierre Langlois | 88c46b8 | 2016-06-02 18:15:32 +0100 | [diff] [blame] | 116 | VIXL_ASSERT(HasSpaceFor(strlen(string) + 1)); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 117 | char* dst = reinterpret_cast<char*>(cursor_); |
| 118 | dirty_ = true; |
| 119 | char* null_char = stpcpy(dst, string); |
| 120 | cursor_ = reinterpret_cast<byte*>(null_char) + 1; |
| 121 | } |
| 122 | |
| 123 | |
Pierre Langlois | 88c46b8 | 2016-06-02 18:15:32 +0100 | [diff] [blame] | 124 | void CodeBuffer::EmitData(const void* data, size_t size) { |
| 125 | VIXL_ASSERT(HasSpaceFor(size)); |
| 126 | dirty_ = true; |
| 127 | memcpy(cursor_, data, size); |
| 128 | cursor_ = cursor_ + size; |
| 129 | } |
| 130 | |
| 131 | |
Vincent Belliard | 3e1b899 | 2016-07-13 16:02:19 -0700 | [diff] [blame] | 132 | void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) { |
| 133 | dirty_ = true; |
| 134 | byte* dst = buffer_ + offset; |
| 135 | VIXL_ASSERT(dst + size <= cursor_); |
| 136 | memcpy(dst, data, size); |
| 137 | } |
| 138 | |
| 139 | |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 140 | void CodeBuffer::Align() { |
| 141 | byte* end = AlignUp(cursor_, 4); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 142 | const size_t padding_size = end - cursor_; |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 143 | VIXL_ASSERT(padding_size <= 4); |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 144 | EmitZeroedBytes(static_cast<int>(padding_size)); |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 145 | } |
| 146 | |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 147 | void CodeBuffer::EmitZeroedBytes(int n) { |
| 148 | EnsureSpaceFor(n); |
| 149 | dirty_ = true; |
| 150 | memset(cursor_, 0, n); |
| 151 | cursor_ += n; |
| 152 | } |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 153 | |
| 154 | void CodeBuffer::Reset() { |
armvixl | 330dc71 | 2014-11-25 10:38:32 +0000 | [diff] [blame] | 155 | #ifdef VIXL_DEBUG |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 156 | if (managed_) { |
Pierre Langlois | 88c46b8 | 2016-06-02 18:15:32 +0100 | [diff] [blame] | 157 | // Fill with zeros (there is no useful value common to A32 and T32). |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 158 | memset(buffer_, 0, capacity_); |
| 159 | } |
| 160 | #endif |
| 161 | cursor_ = buffer_; |
| 162 | SetClean(); |
| 163 | } |
| 164 | |
| 165 | |
| 166 | void CodeBuffer::Grow(size_t new_capacity) { |
| 167 | VIXL_ASSERT(managed_); |
| 168 | VIXL_ASSERT(new_capacity > capacity_); |
Pierre Langlois | f5348ce | 2016-09-22 11:15:35 +0100 | [diff] [blame] | 169 | ptrdiff_t cursor_offset = GetCursorOffset(); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 170 | #ifdef VIXL_CODE_BUFFER_MALLOC |
Alexandre Rames | 0287a6c | 2016-11-16 10:37:05 +0000 | [diff] [blame] | 171 | buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity)); |
| 172 | VIXL_CHECK(buffer_ != NULL); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 173 | #elif defined(VIXL_CODE_BUFFER_MMAP) |
Alex Gilday | 31dd2ae | 2016-07-05 16:34:41 +0100 | [diff] [blame] | 174 | buffer_ = static_cast<byte*>( |
| 175 | mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE)); |
| 176 | VIXL_CHECK(buffer_ != MAP_FAILED); |
Jacob Bramley | 1fa6f06 | 2016-12-19 11:40:08 +0000 | [diff] [blame] | 177 | #else |
| 178 | #error Unknown code buffer allocator. |
| 179 | #endif |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 180 | |
Pierre Langlois | f5348ce | 2016-09-22 11:15:35 +0100 | [diff] [blame] | 181 | cursor_ = buffer_ + cursor_offset; |
armvixl | c68cb64 | 2014-09-25 18:49:30 +0100 | [diff] [blame] | 182 | capacity_ = new_capacity; |
| 183 | } |
| 184 | |
| 185 | |
| 186 | } // namespace vixl |