blob: c700e5a62ed848c4205e122402d6dd6e7e5f2f20 [file] [log] [blame]
Alexandre Ramesb78f1392016-07-01 14:22:22 +01001// Copyright 2014, VIXL authors
armvixlc68cb642014-09-25 18:49:30 +01002// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
Alexandre Rames1f9074d2016-05-23 15:50:01 +010027#include "code-buffer-vixl.h"
28#include "utils-vixl.h"
armvixlc68cb642014-09-25 18:49:30 +010029
30namespace vixl {
31
32
Alexandre Rames7cd99a82016-08-03 13:32:02 +010033CodeBuffer::CodeBuffer(size_t capacity)
34 : buffer_(NULL),
35 managed_(true),
36 cursor_(NULL),
37 dirty_(false),
38 capacity_(capacity) {
39 if (capacity_ == 0) {
40 return;
41 }
armvixlc68cb642014-09-25 18:49:30 +010042 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
43 VIXL_CHECK(buffer_ != NULL);
Alexandre Ramesd3832962016-07-04 15:03:43 +010044 // Aarch64 instructions must be word aligned, we assert the default allocator
armvixlc68cb642014-09-25 18:49:30 +010045 // always returns word align memory.
46 VIXL_ASSERT(IsWordAligned(buffer_));
47
48 cursor_ = buffer_;
armvixlc68cb642014-09-25 18:49:30 +010049}
50
51
52CodeBuffer::CodeBuffer(void* buffer, size_t capacity)
53 : buffer_(reinterpret_cast<byte*>(buffer)),
54 managed_(false),
55 cursor_(reinterpret_cast<byte*>(buffer)),
56 dirty_(false),
57 capacity_(capacity) {
58 VIXL_ASSERT(buffer_ != NULL);
59}
60
61
62CodeBuffer::~CodeBuffer() {
63 VIXL_ASSERT(!IsDirty());
64 if (managed_) {
65 free(buffer_);
66 }
67}
68
69
70void CodeBuffer::EmitString(const char* string) {
Pierre Langlois88c46b82016-06-02 18:15:32 +010071 VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
armvixlc68cb642014-09-25 18:49:30 +010072 char* dst = reinterpret_cast<char*>(cursor_);
73 dirty_ = true;
74 char* null_char = stpcpy(dst, string);
75 cursor_ = reinterpret_cast<byte*>(null_char) + 1;
76}
77
78
Pierre Langlois88c46b82016-06-02 18:15:32 +010079void CodeBuffer::EmitData(const void* data, size_t size) {
80 VIXL_ASSERT(HasSpaceFor(size));
81 dirty_ = true;
82 memcpy(cursor_, data, size);
83 cursor_ = cursor_ + size;
84}
85
86
armvixlc68cb642014-09-25 18:49:30 +010087void CodeBuffer::Align() {
88 byte* end = AlignUp(cursor_, 4);
89 VIXL_ASSERT(end >= cursor_);
90 const size_t padding_size = end - cursor_;
Pierre Langlois88c46b82016-06-02 18:15:32 +010091 VIXL_ASSERT(HasSpaceFor(padding_size));
armvixlc68cb642014-09-25 18:49:30 +010092 VIXL_ASSERT(padding_size <= 4);
armvixl0f35e362016-05-10 13:57:58 +010093 const byte padding[] = {0, 0, 0, 0};
armvixlc68cb642014-09-25 18:49:30 +010094 dirty_ = true;
95 memcpy(cursor_, padding, padding_size);
96 cursor_ = end;
97}
98
99
100void CodeBuffer::Reset() {
armvixl330dc712014-11-25 10:38:32 +0000101#ifdef VIXL_DEBUG
armvixlc68cb642014-09-25 18:49:30 +0100102 if (managed_) {
Pierre Langlois88c46b82016-06-02 18:15:32 +0100103 // Fill with zeros (there is no useful value common to A32 and T32).
armvixlc68cb642014-09-25 18:49:30 +0100104 memset(buffer_, 0, capacity_);
105 }
106#endif
107 cursor_ = buffer_;
108 SetClean();
109}
110
111
112void CodeBuffer::Grow(size_t new_capacity) {
113 VIXL_ASSERT(managed_);
114 VIXL_ASSERT(new_capacity > capacity_);
Pierre Langlois88c46b82016-06-02 18:15:32 +0100115 size_t size = GetCursorOffset();
armvixlc68cb642014-09-25 18:49:30 +0100116 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
117 VIXL_CHECK(buffer_ != NULL);
118
119 cursor_ = buffer_ + size;
120 capacity_ = new_capacity;
121}
122
123
124} // namespace vixl