blob: 2cfe8b71d37cfb7c889a9fff6306d518b044fab0 [file] [log] [blame]
Georgia Kouveli8b57c862017-03-02 15:18:58 +00001// Copyright 2017, VIXL authors
armvixlc68cb642014-09-25 18:49:30 +01002// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
Anton Kirilov088b01f2022-09-27 14:27:38 +010027#ifdef VIXL_CODE_BUFFER_MMAP
Alex Gilday31dd2ae2016-07-05 16:34:41 +010028extern "C" {
29#include <sys/mman.h>
30}
Anton Kirilov088b01f2022-09-27 14:27:38 +010031#endif
Alex Gilday31dd2ae2016-07-05 16:34:41 +010032
Alexandre Rames1f9074d2016-05-23 15:50:01 +010033#include "code-buffer-vixl.h"
34#include "utils-vixl.h"
armvixlc68cb642014-09-25 18:49:30 +010035
36namespace vixl {
37
38
Alexandre Rames7cd99a82016-08-03 13:32:02 +010039CodeBuffer::CodeBuffer(size_t capacity)
40 : buffer_(NULL),
41 managed_(true),
42 cursor_(NULL),
43 dirty_(false),
44 capacity_(capacity) {
45 if (capacity_ == 0) {
46 return;
47 }
Jacob Bramley1fa6f062016-12-19 11:40:08 +000048#ifdef VIXL_CODE_BUFFER_MALLOC
Alexandre Rames0287a6c2016-11-16 10:37:05 +000049 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
Jacob Bramley1fa6f062016-12-19 11:40:08 +000050#elif defined(VIXL_CODE_BUFFER_MMAP)
Alex Gilday31dd2ae2016-07-05 16:34:41 +010051 buffer_ = reinterpret_cast<byte*>(mmap(NULL,
52 capacity,
53 PROT_READ | PROT_WRITE,
54 MAP_PRIVATE | MAP_ANONYMOUS,
55 -1,
56 0));
Jacob Bramley1fa6f062016-12-19 11:40:08 +000057#else
58#error Unknown code buffer allocator.
Alexandre Rames0287a6c2016-11-16 10:37:05 +000059#endif
armvixlc68cb642014-09-25 18:49:30 +010060 VIXL_CHECK(buffer_ != NULL);
Alexandre Ramesd3832962016-07-04 15:03:43 +010061 // Aarch64 instructions must be word aligned, we assert the default allocator
armvixlc68cb642014-09-25 18:49:30 +010062 // always returns word align memory.
63 VIXL_ASSERT(IsWordAligned(buffer_));
64
65 cursor_ = buffer_;
armvixlc68cb642014-09-25 18:49:30 +010066}
67
68
Alexandre Rames919e3fe2016-10-14 09:07:54 +010069CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
armvixlc68cb642014-09-25 18:49:30 +010070 : buffer_(reinterpret_cast<byte*>(buffer)),
71 managed_(false),
72 cursor_(reinterpret_cast<byte*>(buffer)),
73 dirty_(false),
74 capacity_(capacity) {
75 VIXL_ASSERT(buffer_ != NULL);
76}
77
78
Pierre Langloisa5b3cef2019-01-28 11:30:38 +000079CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION {
armvixlc68cb642014-09-25 18:49:30 +010080 VIXL_ASSERT(!IsDirty());
81 if (managed_) {
Jacob Bramley1fa6f062016-12-19 11:40:08 +000082#ifdef VIXL_CODE_BUFFER_MALLOC
Alexandre Rames0287a6c2016-11-16 10:37:05 +000083 free(buffer_);
Jacob Bramley1fa6f062016-12-19 11:40:08 +000084#elif defined(VIXL_CODE_BUFFER_MMAP)
Alex Gilday31dd2ae2016-07-05 16:34:41 +010085 munmap(buffer_, capacity_);
Jacob Bramley1fa6f062016-12-19 11:40:08 +000086#else
87#error Unknown code buffer allocator.
Alexandre Rames0287a6c2016-11-16 10:37:05 +000088#endif
armvixlc68cb642014-09-25 18:49:30 +010089 }
90}
91
92
Jacob Bramley745a8552016-12-20 09:52:30 +000093void CodeBuffer::SetExecutable() {
Jacob Bramleycff5a2e2019-03-15 09:34:56 +000094#ifdef VIXL_CODE_BUFFER_MMAP
Alex Gilday31dd2ae2016-07-05 16:34:41 +010095 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
96 VIXL_CHECK(ret == 0);
Jacob Bramleycff5a2e2019-03-15 09:34:56 +000097#else
98 // This requires page-aligned memory blocks, which we can only guarantee with
99 // mmap.
100 VIXL_UNIMPLEMENTED();
Jacob Bramley745a8552016-12-20 09:52:30 +0000101#endif
Jacob Bramleycff5a2e2019-03-15 09:34:56 +0000102}
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100103
104
Jacob Bramley745a8552016-12-20 09:52:30 +0000105void CodeBuffer::SetWritable() {
Jacob Bramleycff5a2e2019-03-15 09:34:56 +0000106#ifdef VIXL_CODE_BUFFER_MMAP
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100107 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
108 VIXL_CHECK(ret == 0);
Jacob Bramleycff5a2e2019-03-15 09:34:56 +0000109#else
110 // This requires page-aligned memory blocks, which we can only guarantee with
111 // mmap.
112 VIXL_UNIMPLEMENTED();
Jacob Bramley745a8552016-12-20 09:52:30 +0000113#endif
Jacob Bramleycff5a2e2019-03-15 09:34:56 +0000114}
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100115
116
armvixlc68cb642014-09-25 18:49:30 +0100117void CodeBuffer::EmitString(const char* string) {
Anton Kirilov088b01f2022-09-27 14:27:38 +0100118 const auto len = strlen(string) + 1;
119 VIXL_ASSERT(HasSpaceFor(len));
armvixlc68cb642014-09-25 18:49:30 +0100120 char* dst = reinterpret_cast<char*>(cursor_);
121 dirty_ = true;
Anton Kirilov088b01f2022-09-27 14:27:38 +0100122 memcpy(dst, string, len);
123 cursor_ = reinterpret_cast<byte*>(dst + len);
armvixlc68cb642014-09-25 18:49:30 +0100124}
125
126
Pierre Langlois88c46b82016-06-02 18:15:32 +0100127void CodeBuffer::EmitData(const void* data, size_t size) {
128 VIXL_ASSERT(HasSpaceFor(size));
129 dirty_ = true;
130 memcpy(cursor_, data, size);
131 cursor_ = cursor_ + size;
132}
133
134
Vincent Belliard3e1b8992016-07-13 16:02:19 -0700135void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
136 dirty_ = true;
137 byte* dst = buffer_ + offset;
138 VIXL_ASSERT(dst + size <= cursor_);
139 memcpy(dst, data, size);
140}
141
142
armvixlc68cb642014-09-25 18:49:30 +0100143void CodeBuffer::Align() {
144 byte* end = AlignUp(cursor_, 4);
armvixlc68cb642014-09-25 18:49:30 +0100145 const size_t padding_size = end - cursor_;
armvixlc68cb642014-09-25 18:49:30 +0100146 VIXL_ASSERT(padding_size <= 4);
Georgia Kouveli8b57c862017-03-02 15:18:58 +0000147 EmitZeroedBytes(static_cast<int>(padding_size));
armvixlc68cb642014-09-25 18:49:30 +0100148}
149
Georgia Kouveli8b57c862017-03-02 15:18:58 +0000150void CodeBuffer::EmitZeroedBytes(int n) {
151 EnsureSpaceFor(n);
152 dirty_ = true;
153 memset(cursor_, 0, n);
154 cursor_ += n;
155}
armvixlc68cb642014-09-25 18:49:30 +0100156
157void CodeBuffer::Reset() {
armvixl330dc712014-11-25 10:38:32 +0000158#ifdef VIXL_DEBUG
armvixlc68cb642014-09-25 18:49:30 +0100159 if (managed_) {
Pierre Langlois88c46b82016-06-02 18:15:32 +0100160 // Fill with zeros (there is no useful value common to A32 and T32).
armvixlc68cb642014-09-25 18:49:30 +0100161 memset(buffer_, 0, capacity_);
162 }
163#endif
164 cursor_ = buffer_;
165 SetClean();
166}
167
168
169void CodeBuffer::Grow(size_t new_capacity) {
170 VIXL_ASSERT(managed_);
171 VIXL_ASSERT(new_capacity > capacity_);
Pierre Langloisf5348ce2016-09-22 11:15:35 +0100172 ptrdiff_t cursor_offset = GetCursorOffset();
Jacob Bramley1fa6f062016-12-19 11:40:08 +0000173#ifdef VIXL_CODE_BUFFER_MALLOC
Alexandre Rames0287a6c2016-11-16 10:37:05 +0000174 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
175 VIXL_CHECK(buffer_ != NULL);
Jacob Bramley1fa6f062016-12-19 11:40:08 +0000176#elif defined(VIXL_CODE_BUFFER_MMAP)
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100177 buffer_ = static_cast<byte*>(
178 mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
179 VIXL_CHECK(buffer_ != MAP_FAILED);
Jacob Bramley1fa6f062016-12-19 11:40:08 +0000180#else
181#error Unknown code buffer allocator.
182#endif
armvixlc68cb642014-09-25 18:49:30 +0100183
Pierre Langloisf5348ce2016-09-22 11:15:35 +0100184 cursor_ = buffer_ + cursor_offset;
armvixlc68cb642014-09-25 18:49:30 +0100185 capacity_ = new_capacity;
186}
187
188
189} // namespace vixl