blob: d5861c7938e218681dd5d6e3073ac93fa18570e6 [file] [log] [blame]
Alexandre Ramesb78f1392016-07-01 14:22:22 +01001// Copyright 2014, VIXL authors
armvixlc68cb642014-09-25 18:49:30 +01002// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
Alexandre Rames1f9074d2016-05-23 15:50:01 +010027#include "code-buffer-vixl.h"
28#include "utils-vixl.h"
armvixlc68cb642014-09-25 18:49:30 +010029
30namespace vixl {
31
32
Alexandre Rames7cd99a82016-08-03 13:32:02 +010033CodeBuffer::CodeBuffer(size_t capacity)
34 : buffer_(NULL),
35 managed_(true),
36 cursor_(NULL),
37 dirty_(false),
38 capacity_(capacity) {
39 if (capacity_ == 0) {
40 return;
41 }
armvixlc68cb642014-09-25 18:49:30 +010042 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
43 VIXL_CHECK(buffer_ != NULL);
Alexandre Ramesd3832962016-07-04 15:03:43 +010044 // Aarch64 instructions must be word aligned, we assert the default allocator
armvixlc68cb642014-09-25 18:49:30 +010045 // always returns word align memory.
46 VIXL_ASSERT(IsWordAligned(buffer_));
47
48 cursor_ = buffer_;
armvixlc68cb642014-09-25 18:49:30 +010049}
50
51
52CodeBuffer::CodeBuffer(void* buffer, size_t capacity)
53 : buffer_(reinterpret_cast<byte*>(buffer)),
54 managed_(false),
55 cursor_(reinterpret_cast<byte*>(buffer)),
56 dirty_(false),
57 capacity_(capacity) {
58 VIXL_ASSERT(buffer_ != NULL);
59}
60
61
62CodeBuffer::~CodeBuffer() {
63 VIXL_ASSERT(!IsDirty());
64 if (managed_) {
65 free(buffer_);
66 }
67}
68
69
70void CodeBuffer::EmitString(const char* string) {
Pierre Langlois88c46b82016-06-02 18:15:32 +010071 VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
armvixlc68cb642014-09-25 18:49:30 +010072 char* dst = reinterpret_cast<char*>(cursor_);
73 dirty_ = true;
74 char* null_char = stpcpy(dst, string);
75 cursor_ = reinterpret_cast<byte*>(null_char) + 1;
76}
77
78
Pierre Langlois88c46b82016-06-02 18:15:32 +010079void CodeBuffer::EmitData(const void* data, size_t size) {
80 VIXL_ASSERT(HasSpaceFor(size));
81 dirty_ = true;
82 memcpy(cursor_, data, size);
83 cursor_ = cursor_ + size;
84}
85
86
Vincent Belliard3e1b8992016-07-13 16:02:19 -070087void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
88 dirty_ = true;
89 byte* dst = buffer_ + offset;
90 VIXL_ASSERT(dst + size <= cursor_);
91 memcpy(dst, data, size);
92}
93
94
armvixlc68cb642014-09-25 18:49:30 +010095void CodeBuffer::Align() {
96 byte* end = AlignUp(cursor_, 4);
97 VIXL_ASSERT(end >= cursor_);
98 const size_t padding_size = end - cursor_;
Pierre Langlois88c46b82016-06-02 18:15:32 +010099 VIXL_ASSERT(HasSpaceFor(padding_size));
armvixlc68cb642014-09-25 18:49:30 +0100100 VIXL_ASSERT(padding_size <= 4);
armvixl0f35e362016-05-10 13:57:58 +0100101 const byte padding[] = {0, 0, 0, 0};
armvixlc68cb642014-09-25 18:49:30 +0100102 dirty_ = true;
103 memcpy(cursor_, padding, padding_size);
104 cursor_ = end;
105}
106
107
108void CodeBuffer::Reset() {
armvixl330dc712014-11-25 10:38:32 +0000109#ifdef VIXL_DEBUG
armvixlc68cb642014-09-25 18:49:30 +0100110 if (managed_) {
Pierre Langlois88c46b82016-06-02 18:15:32 +0100111 // Fill with zeros (there is no useful value common to A32 and T32).
armvixlc68cb642014-09-25 18:49:30 +0100112 memset(buffer_, 0, capacity_);
113 }
114#endif
115 cursor_ = buffer_;
116 SetClean();
117}
118
119
120void CodeBuffer::Grow(size_t new_capacity) {
121 VIXL_ASSERT(managed_);
122 VIXL_ASSERT(new_capacity > capacity_);
Pierre Langlois88c46b82016-06-02 18:15:32 +0100123 size_t size = GetCursorOffset();
armvixlc68cb642014-09-25 18:49:30 +0100124 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
125 VIXL_CHECK(buffer_ != NULL);
126
127 cursor_ = buffer_ + size;
128 capacity_ = new_capacity;
129}
130
131
132} // namespace vixl