blob: 1824472feb147ac0345d2daae1d805c948a1f3bb [file] [log] [blame]
Georgia Kouveli8b57c862017-03-02 15:18:58 +00001// Copyright 2017, VIXL authors
Alexandre Rames9dd6fa32016-10-12 13:26:54 +01002// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "test-runner.h"
28
Alexandre Rames8d191ab2016-11-29 11:23:27 +000029#ifdef VIXL_INCLUDE_TARGET_AARCH32
30#include "aarch32/macro-assembler-aarch32.h"
Pierre Langlois1bce0072017-06-06 17:58:58 +010031#include "aarch32/test-utils-aarch32.h"
Alexandre Rames8d191ab2016-11-29 11:23:27 +000032#endif
33
Alexandre Rames586c6b92016-10-24 11:59:33 +010034#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010035#include "aarch64/macro-assembler-aarch64.h"
Alexandre Rames586c6b92016-10-24 11:59:33 +010036#endif
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010037
Pierre Langloisbde2e4b2017-01-24 17:41:26 +000038#define TEST(name) TEST_(SCOPES_##name)
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +000039
40#ifdef VIXL_INCLUDE_TARGET_A32
Pierre Langloisbde2e4b2017-01-24 17:41:26 +000041#define TEST_A32(name) TEST(name)
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +000042#else
43// Do not add this test to the harness.
Pierre Langloisbde2e4b2017-01-24 17:41:26 +000044#define TEST_A32(name) void Test##name()
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +000045#endif
46
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010047#define __ masm.
48
49namespace vixl {
50
51// This file contains tests for code generation scopes.
52
Alexandre Rames8d191ab2016-11-29 11:23:27 +000053#ifdef VIXL_INCLUDE_TARGET_AARCH32
54TEST(CodeBufferCheckScope_basic_32) {
55 aarch32::MacroAssembler masm;
56
57 {
58 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
59 __ Mov(aarch32::r0, 0);
60 }
61
62 masm.FinalizeCode();
63}
64#endif // VIXL_INCLUDE_TARGET_AARCH32
65
66
Alexandre Rames586c6b92016-10-24 11:59:33 +010067#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames8d191ab2016-11-29 11:23:27 +000068TEST(CodeBufferCheckScope_basic_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010069 aarch64::MacroAssembler masm;
70
71 {
72 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
73 __ Mov(aarch64::x0, 0);
74 }
75
76 masm.FinalizeCode();
77}
Alexandre Rames8d191ab2016-11-29 11:23:27 +000078#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010079
80
Alexandre Rames8d191ab2016-11-29 11:23:27 +000081#ifdef VIXL_INCLUDE_TARGET_AARCH32
82TEST(CodeBufferCheckScope_assembler_use_32) {
83 aarch32::MacroAssembler masm;
84
85 {
86 CodeBufferCheckScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
87 __ Mov(aarch32::r0, 0);
88 __ mov(aarch32::r1, 1);
89 }
90
91 masm.FinalizeCode();
92}
93#endif // VIXL_INCLUDE_TARGET_AARCH32
94
95
96#ifdef VIXL_INCLUDE_TARGET_AARCH64
97TEST(CodeBufferCheckScope_assembler_use_64) {
98 aarch64::MacroAssembler masm;
99
100 {
101 CodeBufferCheckScope scope(&masm, 2 * aarch64::kInstructionSize);
102 __ Mov(aarch64::x0, 0);
103 __ movz(aarch64::x1, 1);
104 }
105
106 masm.FinalizeCode();
107}
108#endif // VIXL_INCLUDE_TARGET_AARCH64
109
110
111#ifdef VIXL_INCLUDE_TARGET_AARCH32
112TEST(CodeBufferCheckScope_Open_32) {
113 aarch32::MacroAssembler masm;
114
115 {
116 CodeBufferCheckScope scope;
117 __ Mov(aarch32::r0, 0);
118 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
119 __ Mov(aarch32::r1, 1);
120 }
121
122 masm.FinalizeCode();
123}
124#endif // VIXL_INCLUDE_TARGET_AARCH32
125
126
127#ifdef VIXL_INCLUDE_TARGET_AARCH64
128TEST(CodeBufferCheckScope_Open_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100129 aarch64::MacroAssembler masm;
130
131 {
132 CodeBufferCheckScope scope;
133 __ Mov(aarch64::x0, 0);
134 scope.Open(&masm, aarch64::kInstructionSize);
135 __ Mov(aarch64::x1, 1);
136 }
137
138 masm.FinalizeCode();
139}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000140#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100141
142
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000143#ifdef VIXL_INCLUDE_TARGET_AARCH32
144TEST(CodeBufferCheckScope_Close_32) {
145 aarch32::MacroAssembler masm;
146
147 {
148 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
149 __ Mov(aarch32::r0, 0);
150 scope.Close();
151 __ Mov(aarch32::r1, 1);
152 }
153
154 masm.FinalizeCode();
155}
156#endif // VIXL_INCLUDE_TARGET_AARCH32
157
158
159#ifdef VIXL_INCLUDE_TARGET_AARCH64
160TEST(CodeBufferCheckScope_Close_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100161 aarch64::MacroAssembler masm;
162
163 {
164 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
165 __ Mov(aarch64::x0, 0);
166 scope.Close();
167 __ Mov(aarch64::x1, 1);
168 }
169
170 masm.FinalizeCode();
171}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000172#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100173
174
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000175#ifdef VIXL_INCLUDE_TARGET_AARCH32
176TEST(CodeBufferCheckScope_Open_Close_32) {
177 aarch32::MacroAssembler masm;
178
179 {
180 CodeBufferCheckScope scope;
181 __ Mov(aarch32::r0, 0);
182 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
183 __ Mov(aarch32::r1, 1);
184 scope.Close();
185 __ Mov(aarch32::r2, 2);
186 }
187
188 masm.FinalizeCode();
189}
190#endif // VIXL_INCLUDE_TARGET_AARCH32
191
192
193#ifdef VIXL_INCLUDE_TARGET_AARCH64
194TEST(CodeBufferCheckScope_Open_Close_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100195 aarch64::MacroAssembler masm;
196
197 {
198 CodeBufferCheckScope scope;
199 __ Mov(aarch64::x0, 0);
200 scope.Open(&masm, aarch64::kInstructionSize);
201 __ Mov(aarch64::x1, 1);
202 scope.Close();
203 __ Mov(aarch64::x2, 2);
204 }
205
206 masm.FinalizeCode();
207}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000208#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100209
210
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000211#ifdef VIXL_INCLUDE_TARGET_AARCH32
212TEST(EmissionCheckScope_basic_32) {
213 aarch32::MacroAssembler masm;
214
215 {
216 EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
217 __ Mov(aarch32::r0, 0);
218 }
219
220 masm.FinalizeCode();
221}
222#endif // VIXL_INCLUDE_TARGET_AARCH32
223
224
225#ifdef VIXL_INCLUDE_TARGET_AARCH64
226TEST(EmissionCheckScope_basic_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100227 aarch64::MacroAssembler masm;
228
229 {
230 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
231 __ Mov(aarch64::x0, 0);
232 }
233
234 masm.FinalizeCode();
235}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000236#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100237
238
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000239#ifdef VIXL_INCLUDE_TARGET_AARCH32
240TEST(EmissionCheckScope_Open_32) {
241 aarch32::MacroAssembler masm;
242
243 {
244 EmissionCheckScope scope;
245 __ Mov(aarch32::r0, 0);
246 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
247 __ Mov(aarch32::r1, 1);
248 }
249
250 masm.FinalizeCode();
251}
252#endif // VIXL_INCLUDE_TARGET_AARCH32
253
254
255#ifdef VIXL_INCLUDE_TARGET_AARCH64
256TEST(EmissionCheckScope_Open_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100257 aarch64::MacroAssembler masm;
258
259 {
260 EmissionCheckScope scope;
261 __ Mov(aarch64::x0, 0);
262 scope.Open(&masm, aarch64::kInstructionSize);
263 __ Mov(aarch64::x1, 1);
264 }
265
266 masm.FinalizeCode();
267}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000268#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100269
270
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000271#ifdef VIXL_INCLUDE_TARGET_AARCH32
272TEST(EmissionCheckScope_Close_32) {
273 aarch32::MacroAssembler masm;
274
275 {
276 EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
277 __ Mov(aarch32::r0, 0);
278 scope.Close();
279 __ Mov(aarch32::r1, 1);
280 }
281
282 masm.FinalizeCode();
283}
284#endif // VIXL_INCLUDE_TARGET_AARCH32
285
286
287#ifdef VIXL_INCLUDE_TARGET_AARCH64
288TEST(EmissionCheckScope_Close_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100289 aarch64::MacroAssembler masm;
290
291 {
292 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
293 __ Mov(aarch64::x0, 0);
294 scope.Close();
295 __ Mov(aarch64::x1, 1);
296 }
297
298 masm.FinalizeCode();
299}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000300#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100301
302
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000303#ifdef VIXL_INCLUDE_TARGET_AARCH32
304TEST(EmissionCheckScope_Open_Close_32) {
305 aarch32::MacroAssembler masm;
306
307 {
308 EmissionCheckScope scope;
309 __ Mov(aarch32::r0, 0);
310 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
311 __ Mov(aarch32::r1, 1);
312 scope.Close();
313 __ Mov(aarch32::r2, 2);
314 }
315
316 masm.FinalizeCode();
317}
318#endif // VIXL_INCLUDE_TARGET_AARCH32
319
320
321#ifdef VIXL_INCLUDE_TARGET_AARCH64
322TEST(EmissionCheckScope_Open_Close_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100323 aarch64::MacroAssembler masm;
324
325 {
326 EmissionCheckScope scope;
327 __ Mov(aarch64::x0, 0);
328 scope.Open(&masm, aarch64::kInstructionSize);
329 __ Mov(aarch64::x1, 1);
330 scope.Close();
331 __ Mov(aarch64::x2, 2);
332 }
333
334 masm.FinalizeCode();
335}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000336#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100337
338
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000339#ifdef VIXL_INCLUDE_TARGET_AARCH32
340
Georgia Kouveli8b57c862017-03-02 15:18:58 +0000341#define ASSERT_LITERAL_POOL_SIZE_32(expected) \
342 { \
343 aarch32::TestMacroAssembler test(&masm); \
344 VIXL_CHECK((expected) == test.GetPoolSize()); \
345 }
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000346
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000347TEST_A32(EmissionCheckScope_emit_pool_32) {
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000348 aarch32::MacroAssembler masm;
349
350 // Make sure the pool is empty;
Georgia Kouveli8b57c862017-03-02 15:18:58 +0000351 masm.EmitLiteralPool(PoolManager<int32_t>::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000352 ASSERT_LITERAL_POOL_SIZE_32(0);
353
354 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
355 ASSERT_LITERAL_POOL_SIZE_32(8);
356
357 const int kLdrdRange = 255;
358 const int kLessThanLdrdRange = 100;
359
360 {
361 // Check that opening the scope with a reserved space well below the limit
362 // at which can generate the literal pool does not force the emission of
363 // the pool.
364 EmissionCheckScope scope(&masm,
365 kLessThanLdrdRange,
366 EmissionCheckScope::kMaximumSize);
367 ASSERT_LITERAL_POOL_SIZE_32(8);
368 }
369
370 {
371 // Check that the scope forces emission of the pool if necessary.
372 EmissionCheckScope scope(&masm,
373 kLdrdRange + 1,
374 EmissionCheckScope::kMaximumSize);
375 ASSERT_LITERAL_POOL_SIZE_32(0);
376 }
377
378 masm.FinalizeCode();
379}
380#endif // VIXL_INCLUDE_TARGET_AARCH32
381
382
383#ifdef VIXL_INCLUDE_TARGET_AARCH64
384
Pierre Langloisbde2e4b2017-01-24 17:41:26 +0000385#define ASSERT_LITERAL_POOL_SIZE_64(expected) \
386 VIXL_CHECK((expected + aarch64::kInstructionSize) == \
387 masm.GetLiteralPoolSize())
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100388
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000389TEST(EmissionCheckScope_emit_pool_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100390 aarch64::MacroAssembler masm;
391
392 // Make sure the pool is empty;
393 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000394 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100395
396 __ Ldr(aarch64::x0, 0x1234567890abcdef);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000397 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100398
399 {
400 // Check that opening the scope with a reserved space well below the limit
401 // at which can generate the literal pool does not force the emission of
402 // the pool.
403 EmissionCheckScope scope(&masm,
404 10 * aarch64::kInstructionSize,
405 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000406 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100407 }
408
409 {
410 // Check that the scope forces emission of the pool if necessary.
411 EmissionCheckScope scope(&masm,
412 aarch64::kMaxLoadLiteralRange + 1,
413 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000414 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100415 }
416
417 masm.FinalizeCode();
418}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000419#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100420
421
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000422#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000423TEST_A32(EmissionCheckScope_emit_pool_on_Open_32) {
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000424 aarch32::MacroAssembler masm;
425
426 // Make sure the pool is empty;
Georgia Kouveli8b57c862017-03-02 15:18:58 +0000427 masm.EmitLiteralPool(PoolManager<int32_t>::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000428 ASSERT_LITERAL_POOL_SIZE_32(0);
429
430 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
431 ASSERT_LITERAL_POOL_SIZE_32(8);
432
433 const int kLdrdRange = 255;
434 const int kLessThanLdrdRange = 100;
435
436 {
437 // Check that opening the scope with a reserved space well below the limit
438 // at which can generate the literal pool does not force the emission of
439 // the pool.
440 EmissionCheckScope scope(&masm,
441 kLessThanLdrdRange,
442 EmissionCheckScope::kMaximumSize);
443 ASSERT_LITERAL_POOL_SIZE_32(8);
444 }
445
446 {
447 // Check that the scope forces emission of the pool if necessary.
448 EmissionCheckScope scope(&masm,
449 kLdrdRange + 1,
450 EmissionCheckScope::kMaximumSize);
451 ASSERT_LITERAL_POOL_SIZE_32(0);
452 }
453
454 masm.FinalizeCode();
455}
456#endif // VIXL_INCLUDE_TARGET_AARCH32
457
458
459#ifdef VIXL_INCLUDE_TARGET_AARCH64
460TEST(EmissionCheckScope_emit_pool_on_Open_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100461 aarch64::MacroAssembler masm;
462
463 // Make sure the pool is empty;
464 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000465 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100466
467 __ Ldr(aarch64::x0, 0x1234567890abcdef);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000468 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100469
470 {
471 // Check that opening the scope with a reserved space well below the limit
472 // at which can generate the literal pool does not force the emission of
473 // the pool.
474 EmissionCheckScope scope;
475 scope.Open(&masm,
476 10 * aarch64::kInstructionSize,
477 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000478 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100479 }
480
481 {
482 // Check that the scope forces emission of the pool if necessary.
483 EmissionCheckScope scope;
484 scope.Open(&masm,
485 aarch64::kMaxLoadLiteralRange + 1,
486 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000487 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100488 }
489
490 masm.FinalizeCode();
491}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000492#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100493
494
Alexandre Rames1661f512016-10-31 09:43:20 +0000495#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000496TEST_A32(ExactAssemblyScope_basic_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000497 aarch32::MacroAssembler masm;
498
499 {
500 ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
501 __ nop();
502 }
503
504 masm.FinalizeCode();
505}
506#endif // VIXL_INCLUDE_TARGET_AARCH32
507
508
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000509#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames1661f512016-10-31 09:43:20 +0000510TEST(ExactAssemblyScope_basic_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100511 aarch64::MacroAssembler masm;
512
513 {
514 ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
515 __ nop();
516 }
517
518 masm.FinalizeCode();
519}
Alexandre Rames1661f512016-10-31 09:43:20 +0000520#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100521
522
Alexandre Rames1661f512016-10-31 09:43:20 +0000523#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000524TEST_A32(ExactAssemblyScope_Open_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000525 aarch32::MacroAssembler masm;
526
527 {
528 ExactAssemblyScope scope;
529 __ Mov(aarch32::r0, 0);
530 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
531 __ mov(aarch32::r1, 1);
532 }
533
534 masm.FinalizeCode();
535}
536#endif // VIXL_INCLUDE_TARGET_AARCH32
537
538
539#ifdef VIXL_INCLUDE_TARGET_AARCH64
540TEST(ExactAssemblyScope_Open_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100541 aarch64::MacroAssembler masm;
542
543 {
544 ExactAssemblyScope scope;
545 __ Mov(aarch64::x0, 0);
546 scope.Open(&masm, aarch64::kInstructionSize);
547 __ movz(aarch64::x1, 1);
548 }
549
550 masm.FinalizeCode();
551}
Alexandre Rames1661f512016-10-31 09:43:20 +0000552#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100553
554
Alexandre Rames1661f512016-10-31 09:43:20 +0000555#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000556TEST_A32(ExactAssemblyScope_Close_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000557 aarch32::MacroAssembler masm;
558
559 {
Pierre Langlois45262d62017-01-12 15:27:32 +0000560 ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
Alexandre Rames1661f512016-10-31 09:43:20 +0000561 __ mov(aarch32::r0, 0);
562 scope.Close();
563 __ Mov(aarch32::r1, 1);
564 }
565
566 masm.FinalizeCode();
567}
568#endif // VIXL_INCLUDE_TARGET_AARCH32
569
570
571#ifdef VIXL_INCLUDE_TARGET_AARCH64
572TEST(ExactAssemblyScope_Close_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100573 aarch64::MacroAssembler masm;
574
575 {
Pierre Langlois45262d62017-01-12 15:27:32 +0000576 ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100577 __ movz(aarch64::x0, 0);
578 scope.Close();
579 __ Mov(aarch64::x1, 1);
580 }
581
582 masm.FinalizeCode();
583}
Alexandre Rames1661f512016-10-31 09:43:20 +0000584#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100585
586
Alexandre Rames1661f512016-10-31 09:43:20 +0000587#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000588TEST_A32(ExactAssemblyScope_Open_Close_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000589 aarch32::MacroAssembler masm;
590
591 {
592 ExactAssemblyScope scope;
593 __ Mov(aarch32::r0, 0);
594 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
595 __ mov(aarch32::r1, 1);
596 scope.Close();
597 __ Mov(aarch32::r2, 2);
598 }
599
600 masm.FinalizeCode();
601}
602#endif // VIXL_INCLUDE_TARGET_AARCH32
603
604
605#ifdef VIXL_INCLUDE_TARGET_AARCH64
606TEST(ExactAssemblyScope_Open_Close_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100607 aarch64::MacroAssembler masm;
608
609 {
610 ExactAssemblyScope scope;
611 __ Mov(aarch64::x0, 0);
612 scope.Open(&masm, aarch64::kInstructionSize);
613 __ movz(aarch64::x1, 1);
614 scope.Close();
615 __ Mov(aarch64::x2, 2);
616 }
617
618 masm.FinalizeCode();
619}
Alexandre Rames1661f512016-10-31 09:43:20 +0000620#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100621
622
Alexandre Rames1661f512016-10-31 09:43:20 +0000623#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000624TEST_A32(ExactAssemblyScope_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000625 aarch32::MacroAssembler masm;
626
627 // By default macro instructions are allowed.
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000628 VIXL_CHECK(!masm.ArePoolsBlocked());
629 VIXL_ASSERT(!masm.AllowAssembler());
Alexandre Rames1661f512016-10-31 09:43:20 +0000630 VIXL_ASSERT(masm.AllowMacroInstructions());
631 {
632 ExactAssemblyScope scope1(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000633 VIXL_CHECK(masm.ArePoolsBlocked());
634 VIXL_ASSERT(masm.AllowAssembler());
Alexandre Rames1661f512016-10-31 09:43:20 +0000635 VIXL_ASSERT(!masm.AllowMacroInstructions());
636 __ nop();
637 {
638 ExactAssemblyScope scope2(&masm, 1 * aarch32::kA32InstructionSizeInBytes);
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000639 VIXL_CHECK(masm.ArePoolsBlocked());
640 VIXL_ASSERT(masm.AllowAssembler());
Alexandre Rames1661f512016-10-31 09:43:20 +0000641 VIXL_ASSERT(!masm.AllowMacroInstructions());
642 __ nop();
643 }
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000644 VIXL_CHECK(masm.ArePoolsBlocked());
645 VIXL_ASSERT(masm.AllowAssembler());
Alexandre Rames1661f512016-10-31 09:43:20 +0000646 VIXL_ASSERT(!masm.AllowMacroInstructions());
647 }
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000648 VIXL_CHECK(!masm.ArePoolsBlocked());
649 VIXL_ASSERT(!masm.AllowAssembler());
Alexandre Rames1661f512016-10-31 09:43:20 +0000650 VIXL_ASSERT(masm.AllowMacroInstructions());
651
652 {
653 ExactAssemblyScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
654 __ add(aarch32::r0, aarch32::r0, aarch32::r0);
655 __ sub(aarch32::r0, aarch32::r0, aarch32::r0);
656 }
657
658 masm.FinalizeCode();
659}
660#endif // VIXL_INCLUDE_TARGET_AARCH32
661
662
663#ifdef VIXL_INCLUDE_TARGET_AARCH64
664TEST(ExactAssemblyScope_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100665 aarch64::MacroAssembler masm;
666
667 // By default macro instructions are allowed.
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000668 VIXL_CHECK(!masm.ArePoolsBlocked());
669 VIXL_ASSERT(!masm.AllowAssembler());
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100670 VIXL_ASSERT(masm.AllowMacroInstructions());
671 {
672 ExactAssemblyScope scope1(&masm, 2 * aarch64::kInstructionSize);
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000673 VIXL_CHECK(masm.ArePoolsBlocked());
674 VIXL_ASSERT(masm.AllowAssembler());
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100675 VIXL_ASSERT(!masm.AllowMacroInstructions());
676 __ nop();
677 {
678 ExactAssemblyScope scope2(&masm, 1 * aarch64::kInstructionSize);
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000679 VIXL_CHECK(masm.ArePoolsBlocked());
680 VIXL_ASSERT(masm.AllowAssembler());
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100681 VIXL_ASSERT(!masm.AllowMacroInstructions());
682 __ nop();
683 }
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000684 VIXL_CHECK(masm.ArePoolsBlocked());
685 VIXL_ASSERT(masm.AllowAssembler());
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100686 VIXL_ASSERT(!masm.AllowMacroInstructions());
687 }
Pierre Langloisfb37b5d2017-01-18 17:19:45 +0000688 VIXL_CHECK(!masm.ArePoolsBlocked());
689 VIXL_ASSERT(!masm.AllowAssembler());
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100690 VIXL_ASSERT(masm.AllowMacroInstructions());
691
692 {
693 ExactAssemblyScope scope(&masm, 2 * aarch64::kInstructionSize);
694 __ add(aarch64::x0, aarch64::x0, aarch64::x0);
695 __ sub(aarch64::x0, aarch64::x0, aarch64::x0);
696 }
697
698 masm.FinalizeCode();
699}
Alexandre Rames1661f512016-10-31 09:43:20 +0000700#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100701
702
Alexandre Rames1661f512016-10-31 09:43:20 +0000703#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000704TEST_A32(ExactAssemblyScope_scope_with_pools_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000705 aarch32::MacroAssembler masm;
706
707 ASSERT_LITERAL_POOL_SIZE_32(0);
708
709 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
710
711 ASSERT_LITERAL_POOL_SIZE_32(8);
712
713 const int32_t kLdrdRange = 255;
714 const int32_t n_nops = (kLdrdRange / aarch32::kA32InstructionSizeInBytes) + 1;
715 {
716 // The literal pool should be generated when opening this scope, as
717 // otherwise the `Ldrd` will run out of range when we generate the `nop`
718 // instructions below.
Pierre Langloisbde2e4b2017-01-24 17:41:26 +0000719 ExactAssemblyScope scope(&masm,
720 n_nops * aarch32::kA32InstructionSizeInBytes);
Alexandre Rames1661f512016-10-31 09:43:20 +0000721
722 // Although it must be, we do not check that the literal pool size is zero
723 // here, because we want this regression test to fail while or after we
724 // generate the nops.
725
726 for (int32_t i = 0; i < n_nops; ++i) {
727 __ nop();
728 }
729 }
730
731 ASSERT_LITERAL_POOL_SIZE_32(0);
732
733 masm.FinalizeCode();
734}
735#endif // VIXL_INCLUDE_TARGET_AARCH32
736
737
738#ifdef VIXL_INCLUDE_TARGET_AARCH64
739TEST(ExactAssemblyScope_scope_with_pools_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100740 aarch64::MacroAssembler masm;
741
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000742 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100743
744 __ Ldr(aarch64::x10, 0x1234567890abcdef);
745
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000746 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100747
748 const int64_t n_nops =
749 aarch64::kMaxLoadLiteralRange / aarch64::kInstructionSize;
750 {
Alexandre Rames1661f512016-10-31 09:43:20 +0000751 // The literal pool should be generated when opening this scope, as
752 // otherwise the `Ldr` will run out of range when we generate the `nop`
753 // instructions below.
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100754 ExactAssemblyScope scope(&masm, n_nops * aarch64::kInstructionSize);
755
756 // Although it must be, we do not check that the literal pool size is zero
757 // here, because we want this regression test to fail while or after we
758 // generate the nops.
759
760 for (int64_t i = 0; i < n_nops; ++i) {
761 __ nop();
762 }
763 }
764
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000765 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100766
767 masm.FinalizeCode();
768}
Alexandre Rames586c6b92016-10-24 11:59:33 +0100769#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100770
771
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100772} // namespace vixl