blob: 2dc331bee372c5e7e6c1ea5d19058195bfe9f9d5 [file] [log] [blame]
Alexandre Ramesb78f1392016-07-01 14:22:22 +01001// Copyright 2014, VIXL authors
armvixlc68cb642014-09-25 18:49:30 +01002// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
Alex Gilday31dd2ae2016-07-05 16:34:41 +010027extern "C" {
28#include <sys/mman.h>
29}
30
Alexandre Rames1f9074d2016-05-23 15:50:01 +010031#include "code-buffer-vixl.h"
32#include "utils-vixl.h"
armvixlc68cb642014-09-25 18:49:30 +010033
34namespace vixl {
35
36
Alexandre Rames7cd99a82016-08-03 13:32:02 +010037CodeBuffer::CodeBuffer(size_t capacity)
38 : buffer_(NULL),
39 managed_(true),
40 cursor_(NULL),
41 dirty_(false),
42 capacity_(capacity) {
43 if (capacity_ == 0) {
44 return;
45 }
Alex Gilday31dd2ae2016-07-05 16:34:41 +010046 buffer_ = reinterpret_cast<byte*>(mmap(NULL,
47 capacity,
48 PROT_READ | PROT_WRITE,
49 MAP_PRIVATE | MAP_ANONYMOUS,
50 -1,
51 0));
armvixlc68cb642014-09-25 18:49:30 +010052 VIXL_CHECK(buffer_ != NULL);
Alexandre Ramesd3832962016-07-04 15:03:43 +010053 // Aarch64 instructions must be word aligned, we assert the default allocator
armvixlc68cb642014-09-25 18:49:30 +010054 // always returns word align memory.
55 VIXL_ASSERT(IsWordAligned(buffer_));
56
57 cursor_ = buffer_;
armvixlc68cb642014-09-25 18:49:30 +010058}
59
60
61CodeBuffer::CodeBuffer(void* buffer, size_t capacity)
62 : buffer_(reinterpret_cast<byte*>(buffer)),
63 managed_(false),
64 cursor_(reinterpret_cast<byte*>(buffer)),
65 dirty_(false),
66 capacity_(capacity) {
67 VIXL_ASSERT(buffer_ != NULL);
68}
69
70
71CodeBuffer::~CodeBuffer() {
72 VIXL_ASSERT(!IsDirty());
73 if (managed_) {
Alex Gilday31dd2ae2016-07-05 16:34:41 +010074 munmap(buffer_, capacity_);
armvixlc68cb642014-09-25 18:49:30 +010075 }
76}
77
78
Alex Gilday31dd2ae2016-07-05 16:34:41 +010079void CodeBuffer::SetExecutable() {
80 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
81 VIXL_CHECK(ret == 0);
82}
83
84
85void CodeBuffer::SetWritable() {
86 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
87 VIXL_CHECK(ret == 0);
88}
89
90
armvixlc68cb642014-09-25 18:49:30 +010091void CodeBuffer::EmitString(const char* string) {
Pierre Langlois88c46b82016-06-02 18:15:32 +010092 VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
armvixlc68cb642014-09-25 18:49:30 +010093 char* dst = reinterpret_cast<char*>(cursor_);
94 dirty_ = true;
95 char* null_char = stpcpy(dst, string);
96 cursor_ = reinterpret_cast<byte*>(null_char) + 1;
97}
98
99
Pierre Langlois88c46b82016-06-02 18:15:32 +0100100void CodeBuffer::EmitData(const void* data, size_t size) {
101 VIXL_ASSERT(HasSpaceFor(size));
102 dirty_ = true;
103 memcpy(cursor_, data, size);
104 cursor_ = cursor_ + size;
105}
106
107
Vincent Belliard3e1b8992016-07-13 16:02:19 -0700108void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
109 dirty_ = true;
110 byte* dst = buffer_ + offset;
111 VIXL_ASSERT(dst + size <= cursor_);
112 memcpy(dst, data, size);
113}
114
115
armvixlc68cb642014-09-25 18:49:30 +0100116void CodeBuffer::Align() {
117 byte* end = AlignUp(cursor_, 4);
118 VIXL_ASSERT(end >= cursor_);
119 const size_t padding_size = end - cursor_;
Pierre Langlois88c46b82016-06-02 18:15:32 +0100120 VIXL_ASSERT(HasSpaceFor(padding_size));
armvixlc68cb642014-09-25 18:49:30 +0100121 VIXL_ASSERT(padding_size <= 4);
armvixl0f35e362016-05-10 13:57:58 +0100122 const byte padding[] = {0, 0, 0, 0};
armvixlc68cb642014-09-25 18:49:30 +0100123 dirty_ = true;
124 memcpy(cursor_, padding, padding_size);
125 cursor_ = end;
126}
127
128
129void CodeBuffer::Reset() {
armvixl330dc712014-11-25 10:38:32 +0000130#ifdef VIXL_DEBUG
armvixlc68cb642014-09-25 18:49:30 +0100131 if (managed_) {
Pierre Langlois88c46b82016-06-02 18:15:32 +0100132 // Fill with zeros (there is no useful value common to A32 and T32).
armvixlc68cb642014-09-25 18:49:30 +0100133 memset(buffer_, 0, capacity_);
134 }
135#endif
136 cursor_ = buffer_;
137 SetClean();
138}
139
140
141void CodeBuffer::Grow(size_t new_capacity) {
142 VIXL_ASSERT(managed_);
143 VIXL_ASSERT(new_capacity > capacity_);
Pierre Langlois88c46b82016-06-02 18:15:32 +0100144 size_t size = GetCursorOffset();
Alex Gilday31dd2ae2016-07-05 16:34:41 +0100145 buffer_ = static_cast<byte*>(
146 mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
147 VIXL_CHECK(buffer_ != MAP_FAILED);
armvixlc68cb642014-09-25 18:49:30 +0100148
149 cursor_ = buffer_ + size;
150 capacity_ = new_capacity;
151}
152
153
154} // namespace vixl