blob: 65abaea2334de01cd3599aad5381cd447a61373d [file] [log] [blame]
Alexandre Ramesb78f1392016-07-01 14:22:22 +01001// Copyright 2014, VIXL authors
armvixlc68cb642014-09-25 18:49:30 +01002// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
Alex Gilday31dd2ae2016-07-05 16:34:41 +010027extern "C" {
28#include <sys/mman.h>
29}
30
Alexandre Rames1f9074d2016-05-23 15:50:01 +010031#include "code-buffer-vixl.h"
32#include "utils-vixl.h"
armvixlc68cb642014-09-25 18:49:30 +010033
34namespace vixl {
35
36
Alexandre Rames7cd99a82016-08-03 13:32:02 +010037CodeBuffer::CodeBuffer(size_t capacity)
38 : buffer_(NULL),
39 managed_(true),
40 cursor_(NULL),
41 dirty_(false),
42 capacity_(capacity) {
43 if (capacity_ == 0) {
44 return;
45 }
Alexandre Rames0287a6c2016-11-16 10:37:05 +000046#ifdef __APPLE__
47 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
48#else
Alex Gilday31dd2ae2016-07-05 16:34:41 +010049 buffer_ = reinterpret_cast<byte*>(mmap(NULL,
50 capacity,
51 PROT_READ | PROT_WRITE,
52 MAP_PRIVATE | MAP_ANONYMOUS,
53 -1,
54 0));
Alexandre Rames0287a6c2016-11-16 10:37:05 +000055#endif
armvixlc68cb642014-09-25 18:49:30 +010056 VIXL_CHECK(buffer_ != NULL);
Alexandre Ramesd3832962016-07-04 15:03:43 +010057 // Aarch64 instructions must be word aligned, we assert the default allocator
armvixlc68cb642014-09-25 18:49:30 +010058 // always returns word align memory.
59 VIXL_ASSERT(IsWordAligned(buffer_));
60
61 cursor_ = buffer_;
armvixlc68cb642014-09-25 18:49:30 +010062}
63
64
Alexandre Rames919e3fe2016-10-14 09:07:54 +010065CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
armvixlc68cb642014-09-25 18:49:30 +010066 : buffer_(reinterpret_cast<byte*>(buffer)),
67 managed_(false),
68 cursor_(reinterpret_cast<byte*>(buffer)),
69 dirty_(false),
70 capacity_(capacity) {
71 VIXL_ASSERT(buffer_ != NULL);
72}
73
74
75CodeBuffer::~CodeBuffer() {
76 VIXL_ASSERT(!IsDirty());
77 if (managed_) {
Alexandre Rames0287a6c2016-11-16 10:37:05 +000078#ifdef __APPLE__
79 free(buffer_);
80#else
Alex Gilday31dd2ae2016-07-05 16:34:41 +010081 munmap(buffer_, capacity_);
Alexandre Rames0287a6c2016-11-16 10:37:05 +000082#endif
armvixlc68cb642014-09-25 18:49:30 +010083 }
84}
85
86
Alex Gilday31dd2ae2016-07-05 16:34:41 +010087void CodeBuffer::SetExecutable() {
88 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
89 VIXL_CHECK(ret == 0);
90}
91
92
93void CodeBuffer::SetWritable() {
94 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
95 VIXL_CHECK(ret == 0);
96}
97
98
armvixlc68cb642014-09-25 18:49:30 +010099void CodeBuffer::EmitString(const char* string) {
Pierre Langlois88c46b82016-06-02 18:15:32 +0100100 VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
armvixlc68cb642014-09-25 18:49:30 +0100101 char* dst = reinterpret_cast<char*>(cursor_);
102 dirty_ = true;
103 char* null_char = stpcpy(dst, string);
104 cursor_ = reinterpret_cast<byte*>(null_char) + 1;
105}
106
107
Pierre Langlois88c46b82016-06-02 18:15:32 +0100108void CodeBuffer::EmitData(const void* data, size_t size) {
109 VIXL_ASSERT(HasSpaceFor(size));
110 dirty_ = true;
111 memcpy(cursor_, data, size);
112 cursor_ = cursor_ + size;
113}
114
115
Vincent Belliard3e1b8992016-07-13 16:02:19 -0700116void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
117 dirty_ = true;
118 byte* dst = buffer_ + offset;
119 VIXL_ASSERT(dst + size <= cursor_);
120 memcpy(dst, data, size);
121}
122
123
armvixlc68cb642014-09-25 18:49:30 +0100124void CodeBuffer::Align() {
125 byte* end = AlignUp(cursor_, 4);
armvixlc68cb642014-09-25 18:49:30 +0100126 const size_t padding_size = end - cursor_;
armvixlc68cb642014-09-25 18:49:30 +0100127 VIXL_ASSERT(padding_size <= 4);
Alexandre Rames47ed2652016-11-09 14:44:06 +0000128 EnsureSpaceFor(padding_size);
armvixlc68cb642014-09-25 18:49:30 +0100129 dirty_ = true;
Alexandre Rames47ed2652016-11-09 14:44:06 +0000130 memset(cursor_, 0, padding_size);
armvixlc68cb642014-09-25 18:49:30 +0100131 cursor_ = end;
132}
133
134
135void CodeBuffer::Reset() {
armvixl330dc712014-11-25 10:38:32 +0000136#ifdef VIXL_DEBUG
armvixlc68cb642014-09-25 18:49:30 +0100137 if (managed_) {
Pierre Langlois88c46b82016-06-02 18:15:32 +0100138 // Fill with zeros (there is no useful value common to A32 and T32).
armvixlc68cb642014-09-25 18:49:30 +0100139 memset(buffer_, 0, capacity_);
140 }
141#endif
142 cursor_ = buffer_;
143 SetClean();
144}
145
146
147void CodeBuffer::Grow(size_t new_capacity) {
148 VIXL_ASSERT(managed_);
149 VIXL_ASSERT(new_capacity > capacity_);
Pierre Langloisf5348ce2016-09-22 11:15:35 +0100150 ptrdiff_t cursor_offset = GetCursorOffset();
Alexandre Rames0287a6c2016-11-16 10:37:05 +0000151#ifdef __APPLE__
152 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
153 VIXL_CHECK(buffer_ != NULL);
154#else
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100155 buffer_ = static_cast<byte*>(
156 mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
Alexandre Rames0287a6c2016-11-16 10:37:05 +0000157#endif
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100158 VIXL_CHECK(buffer_ != MAP_FAILED);
armvixlc68cb642014-09-25 18:49:30 +0100159
Pierre Langloisf5348ce2016-09-22 11:15:35 +0100160 cursor_ = buffer_ + cursor_offset;
armvixlc68cb642014-09-25 18:49:30 +0100161 capacity_ = new_capacity;
162}
163
164
165} // namespace vixl