blob: ce5ba65d5bc52bcf48322511a10b7a1f74ac2519 [file] [log] [blame]
Alexandre Rames9dd6fa32016-10-12 13:26:54 +01001// Copyright 2016, VIXL authors
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "test-runner.h"
28
Alexandre Rames586c6b92016-10-24 11:59:33 +010029#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010030#include "aarch64/macro-assembler-aarch64.h"
Alexandre Rames586c6b92016-10-24 11:59:33 +010031#endif
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010032
33#define TEST(name) TEST_(SCOPES_##name)
34#define __ masm.
35
36namespace vixl {
37
38// This file contains tests for code generation scopes.
39
Alexandre Rames586c6b92016-10-24 11:59:33 +010040#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010041TEST(CodeBufferCheckScope_basic) {
42 aarch64::MacroAssembler masm;
43
44 {
45 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
46 __ Mov(aarch64::x0, 0);
47 }
48
49 masm.FinalizeCode();
50}
51
52
53TEST(CodeBufferCheckScope_Open) {
54 aarch64::MacroAssembler masm;
55
56 {
57 CodeBufferCheckScope scope;
58 __ Mov(aarch64::x0, 0);
59 scope.Open(&masm, aarch64::kInstructionSize);
60 __ Mov(aarch64::x1, 1);
61 }
62
63 masm.FinalizeCode();
64}
65
66
67TEST(CodeBufferCheckScope_Close) {
68 aarch64::MacroAssembler masm;
69
70 {
71 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
72 __ Mov(aarch64::x0, 0);
73 scope.Close();
74 __ Mov(aarch64::x1, 1);
75 }
76
77 masm.FinalizeCode();
78}
79
80
81TEST(CodeBufferCheckScope_Open_Close) {
82 aarch64::MacroAssembler masm;
83
84 {
85 CodeBufferCheckScope scope;
86 __ Mov(aarch64::x0, 0);
87 scope.Open(&masm, aarch64::kInstructionSize);
88 __ Mov(aarch64::x1, 1);
89 scope.Close();
90 __ Mov(aarch64::x2, 2);
91 }
92
93 masm.FinalizeCode();
94}
95
96
Alexandre Ramesc0b25f22016-10-19 13:53:55 +010097TEST(EmissionCheckScope_basic) {
98 aarch64::MacroAssembler masm;
99
100 {
101 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
102 __ Mov(aarch64::x0, 0);
103 }
104
105 masm.FinalizeCode();
106}
107
108
109TEST(EmissionCheckScope_Open) {
110 aarch64::MacroAssembler masm;
111
112 {
113 EmissionCheckScope scope;
114 __ Mov(aarch64::x0, 0);
115 scope.Open(&masm, aarch64::kInstructionSize);
116 __ Mov(aarch64::x1, 1);
117 }
118
119 masm.FinalizeCode();
120}
121
122
123TEST(EmissionCheckScope_Close) {
124 aarch64::MacroAssembler masm;
125
126 {
127 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
128 __ Mov(aarch64::x0, 0);
129 scope.Close();
130 __ Mov(aarch64::x1, 1);
131 }
132
133 masm.FinalizeCode();
134}
135
136
137TEST(EmissionCheckScope_Open_Close) {
138 aarch64::MacroAssembler masm;
139
140 {
141 EmissionCheckScope scope;
142 __ Mov(aarch64::x0, 0);
143 scope.Open(&masm, aarch64::kInstructionSize);
144 __ Mov(aarch64::x1, 1);
145 scope.Close();
146 __ Mov(aarch64::x2, 2);
147 }
148
149 masm.FinalizeCode();
150}
151
152
153#define ASSERT_LITERAL_POOL_SIZE(expected) \
154 VIXL_CHECK( \
155 (expected + aarch64::kInstructionSize) == (masm.GetLiteralPoolSize()))
156
157TEST(EmissionCheckScope_emit_pool) {
158 aarch64::MacroAssembler masm;
159
160 // Make sure the pool is empty;
161 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
162 ASSERT_LITERAL_POOL_SIZE(0);
163
164 __ Ldr(aarch64::x0, 0x1234567890abcdef);
165 ASSERT_LITERAL_POOL_SIZE(8);
166
167 {
168 // Check that opening the scope with a reserved space well below the limit
169 // at which can generate the literal pool does not force the emission of
170 // the pool.
171 EmissionCheckScope scope(&masm,
172 10 * aarch64::kInstructionSize,
173 EmissionCheckScope::kMaximumSize);
174 ASSERT_LITERAL_POOL_SIZE(8);
175 }
176
177 {
178 // Check that the scope forces emission of the pool if necessary.
179 EmissionCheckScope scope(&masm,
180 aarch64::kMaxLoadLiteralRange + 1,
181 EmissionCheckScope::kMaximumSize);
182 ASSERT_LITERAL_POOL_SIZE(0);
183 }
184
185 masm.FinalizeCode();
186}
187
188
189TEST(EmissionCheckScope_emit_pool_on_Open) {
190 aarch64::MacroAssembler masm;
191
192 // Make sure the pool is empty;
193 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
194 ASSERT_LITERAL_POOL_SIZE(0);
195
196 __ Ldr(aarch64::x0, 0x1234567890abcdef);
197 ASSERT_LITERAL_POOL_SIZE(8);
198
199 {
200 // Check that opening the scope with a reserved space well below the limit
201 // at which can generate the literal pool does not force the emission of
202 // the pool.
203 EmissionCheckScope scope;
204 scope.Open(&masm,
205 10 * aarch64::kInstructionSize,
206 EmissionCheckScope::kMaximumSize);
207 ASSERT_LITERAL_POOL_SIZE(8);
208 }
209
210 {
211 // Check that the scope forces emission of the pool if necessary.
212 EmissionCheckScope scope;
213 scope.Open(&masm,
214 aarch64::kMaxLoadLiteralRange + 1,
215 EmissionCheckScope::kMaximumSize);
216 ASSERT_LITERAL_POOL_SIZE(0);
217 }
218
219 masm.FinalizeCode();
220}
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100221
222
223TEST(ExactAssemblyScope_basic) {
224 aarch64::MacroAssembler masm;
225
226 {
227 ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
228 __ nop();
229 }
230
231 masm.FinalizeCode();
232}
233
234
235TEST(ExactAssemblyScope_Open) {
236 aarch64::MacroAssembler masm;
237
238 {
239 ExactAssemblyScope scope;
240 __ Mov(aarch64::x0, 0);
241 scope.Open(&masm, aarch64::kInstructionSize);
242 __ movz(aarch64::x1, 1);
243 }
244
245 masm.FinalizeCode();
246}
247
248
249TEST(ExactAssemblyScope_Close) {
250 aarch64::MacroAssembler masm;
251
252 {
253 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
254 __ movz(aarch64::x0, 0);
255 scope.Close();
256 __ Mov(aarch64::x1, 1);
257 }
258
259 masm.FinalizeCode();
260}
261
262
263TEST(ExactAssemblyScope_Open_Close) {
264 aarch64::MacroAssembler masm;
265
266 {
267 ExactAssemblyScope scope;
268 __ Mov(aarch64::x0, 0);
269 scope.Open(&masm, aarch64::kInstructionSize);
270 __ movz(aarch64::x1, 1);
271 scope.Close();
272 __ Mov(aarch64::x2, 2);
273 }
274
275 masm.FinalizeCode();
276}
277
278
279TEST(ExactAssemblyScope) {
280 aarch64::MacroAssembler masm;
281
282 // By default macro instructions are allowed.
283 VIXL_ASSERT(masm.AllowMacroInstructions());
284 {
285 ExactAssemblyScope scope1(&masm, 2 * aarch64::kInstructionSize);
286 VIXL_ASSERT(!masm.AllowMacroInstructions());
287 __ nop();
288 {
289 ExactAssemblyScope scope2(&masm, 1 * aarch64::kInstructionSize);
290 VIXL_ASSERT(!masm.AllowMacroInstructions());
291 __ nop();
292 }
293 VIXL_ASSERT(!masm.AllowMacroInstructions());
294 }
295 VIXL_ASSERT(masm.AllowMacroInstructions());
296
297 {
298 ExactAssemblyScope scope(&masm, 2 * aarch64::kInstructionSize);
299 __ add(aarch64::x0, aarch64::x0, aarch64::x0);
300 __ sub(aarch64::x0, aarch64::x0, aarch64::x0);
301 }
302
303 masm.FinalizeCode();
304}
305
306
307TEST(ExactAssemblyScope_scope_with_pools) {
308 aarch64::MacroAssembler masm;
309
310 ASSERT_LITERAL_POOL_SIZE(0);
311
312 __ Ldr(aarch64::x10, 0x1234567890abcdef);
313
314 ASSERT_LITERAL_POOL_SIZE(8);
315
316 const int64_t n_nops =
317 aarch64::kMaxLoadLiteralRange / aarch64::kInstructionSize;
318 {
319 // The literal pool should be generated at this point, as otherwise the
320 // `Ldr` will run out of range when we generate the `nop` instructions
321 // below.
322 ExactAssemblyScope scope(&masm, n_nops * aarch64::kInstructionSize);
323
324 // Although it must be, we do not check that the literal pool size is zero
325 // here, because we want this regression test to fail while or after we
326 // generate the nops.
327
328 for (int64_t i = 0; i < n_nops; ++i) {
329 __ nop();
330 }
331 }
332
333 ASSERT_LITERAL_POOL_SIZE(0);
334
335 masm.FinalizeCode();
336}
337
338
Alexandre Rames586c6b92016-10-24 11:59:33 +0100339#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100340
341
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100342} // namespace vixl
343