blob: 19f37277a619fa6af3f37d757806925cb89e03f6 [file] [log] [blame]
Alexandre Rames9dd6fa32016-10-12 13:26:54 +01001// Copyright 2016, VIXL authors
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "test-runner.h"
28
Alexandre Rames8d191ab2016-11-29 11:23:27 +000029#ifdef VIXL_INCLUDE_TARGET_AARCH32
30#include "aarch32/macro-assembler-aarch32.h"
31#endif
32
Alexandre Rames586c6b92016-10-24 11:59:33 +010033#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010034#include "aarch64/macro-assembler-aarch64.h"
Alexandre Rames586c6b92016-10-24 11:59:33 +010035#endif
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010036
37#define TEST(name) TEST_(SCOPES_##name)
38#define __ masm.
39
40namespace vixl {
41
42// This file contains tests for code generation scopes.
43
Alexandre Rames8d191ab2016-11-29 11:23:27 +000044#ifdef VIXL_INCLUDE_TARGET_AARCH32
45TEST(CodeBufferCheckScope_basic_32) {
46 aarch32::MacroAssembler masm;
47
48 {
49 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
50 __ Mov(aarch32::r0, 0);
51 }
52
53 masm.FinalizeCode();
54}
55#endif // VIXL_INCLUDE_TARGET_AARCH32
56
57
Alexandre Rames586c6b92016-10-24 11:59:33 +010058#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames8d191ab2016-11-29 11:23:27 +000059TEST(CodeBufferCheckScope_basic_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010060 aarch64::MacroAssembler masm;
61
62 {
63 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
64 __ Mov(aarch64::x0, 0);
65 }
66
67 masm.FinalizeCode();
68}
Alexandre Rames8d191ab2016-11-29 11:23:27 +000069#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010070
71
Alexandre Rames8d191ab2016-11-29 11:23:27 +000072#ifdef VIXL_INCLUDE_TARGET_AARCH32
73TEST(CodeBufferCheckScope_assembler_use_32) {
74 aarch32::MacroAssembler masm;
75
76 {
77 CodeBufferCheckScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
78 __ Mov(aarch32::r0, 0);
79 __ mov(aarch32::r1, 1);
80 }
81
82 masm.FinalizeCode();
83}
84#endif // VIXL_INCLUDE_TARGET_AARCH32
85
86
87#ifdef VIXL_INCLUDE_TARGET_AARCH64
88TEST(CodeBufferCheckScope_assembler_use_64) {
89 aarch64::MacroAssembler masm;
90
91 {
92 CodeBufferCheckScope scope(&masm, 2 * aarch64::kInstructionSize);
93 __ Mov(aarch64::x0, 0);
94 __ movz(aarch64::x1, 1);
95 }
96
97 masm.FinalizeCode();
98}
99#endif // VIXL_INCLUDE_TARGET_AARCH64
100
101
102#ifdef VIXL_INCLUDE_TARGET_AARCH32
103TEST(CodeBufferCheckScope_Open_32) {
104 aarch32::MacroAssembler masm;
105
106 {
107 CodeBufferCheckScope scope;
108 __ Mov(aarch32::r0, 0);
109 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
110 __ Mov(aarch32::r1, 1);
111 }
112
113 masm.FinalizeCode();
114}
115#endif // VIXL_INCLUDE_TARGET_AARCH32
116
117
118#ifdef VIXL_INCLUDE_TARGET_AARCH64
119TEST(CodeBufferCheckScope_Open_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100120 aarch64::MacroAssembler masm;
121
122 {
123 CodeBufferCheckScope scope;
124 __ Mov(aarch64::x0, 0);
125 scope.Open(&masm, aarch64::kInstructionSize);
126 __ Mov(aarch64::x1, 1);
127 }
128
129 masm.FinalizeCode();
130}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000131#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100132
133
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000134#ifdef VIXL_INCLUDE_TARGET_AARCH32
135TEST(CodeBufferCheckScope_Close_32) {
136 aarch32::MacroAssembler masm;
137
138 {
139 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
140 __ Mov(aarch32::r0, 0);
141 scope.Close();
142 __ Mov(aarch32::r1, 1);
143 }
144
145 masm.FinalizeCode();
146}
147#endif // VIXL_INCLUDE_TARGET_AARCH32
148
149
150#ifdef VIXL_INCLUDE_TARGET_AARCH64
151TEST(CodeBufferCheckScope_Close_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100152 aarch64::MacroAssembler masm;
153
154 {
155 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
156 __ Mov(aarch64::x0, 0);
157 scope.Close();
158 __ Mov(aarch64::x1, 1);
159 }
160
161 masm.FinalizeCode();
162}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000163#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100164
165
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000166#ifdef VIXL_INCLUDE_TARGET_AARCH32
167TEST(CodeBufferCheckScope_Open_Close_32) {
168 aarch32::MacroAssembler masm;
169
170 {
171 CodeBufferCheckScope scope;
172 __ Mov(aarch32::r0, 0);
173 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
174 __ Mov(aarch32::r1, 1);
175 scope.Close();
176 __ Mov(aarch32::r2, 2);
177 }
178
179 masm.FinalizeCode();
180}
181#endif // VIXL_INCLUDE_TARGET_AARCH32
182
183
184#ifdef VIXL_INCLUDE_TARGET_AARCH64
185TEST(CodeBufferCheckScope_Open_Close_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100186 aarch64::MacroAssembler masm;
187
188 {
189 CodeBufferCheckScope scope;
190 __ Mov(aarch64::x0, 0);
191 scope.Open(&masm, aarch64::kInstructionSize);
192 __ Mov(aarch64::x1, 1);
193 scope.Close();
194 __ Mov(aarch64::x2, 2);
195 }
196
197 masm.FinalizeCode();
198}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000199#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100200
201
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000202#ifdef VIXL_INCLUDE_TARGET_AARCH32
203TEST(EmissionCheckScope_basic_32) {
204 aarch32::MacroAssembler masm;
205
206 {
207 EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
208 __ Mov(aarch32::r0, 0);
209 }
210
211 masm.FinalizeCode();
212}
213#endif // VIXL_INCLUDE_TARGET_AARCH32
214
215
216#ifdef VIXL_INCLUDE_TARGET_AARCH64
217TEST(EmissionCheckScope_basic_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100218 aarch64::MacroAssembler masm;
219
220 {
221 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
222 __ Mov(aarch64::x0, 0);
223 }
224
225 masm.FinalizeCode();
226}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000227#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100228
229
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000230#ifdef VIXL_INCLUDE_TARGET_AARCH32
231TEST(EmissionCheckScope_Open_32) {
232 aarch32::MacroAssembler masm;
233
234 {
235 EmissionCheckScope scope;
236 __ Mov(aarch32::r0, 0);
237 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
238 __ Mov(aarch32::r1, 1);
239 }
240
241 masm.FinalizeCode();
242}
243#endif // VIXL_INCLUDE_TARGET_AARCH32
244
245
246#ifdef VIXL_INCLUDE_TARGET_AARCH64
247TEST(EmissionCheckScope_Open_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100248 aarch64::MacroAssembler masm;
249
250 {
251 EmissionCheckScope scope;
252 __ Mov(aarch64::x0, 0);
253 scope.Open(&masm, aarch64::kInstructionSize);
254 __ Mov(aarch64::x1, 1);
255 }
256
257 masm.FinalizeCode();
258}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000259#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100260
261
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000262#ifdef VIXL_INCLUDE_TARGET_AARCH32
263TEST(EmissionCheckScope_Close_32) {
264 aarch32::MacroAssembler masm;
265
266 {
267 EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
268 __ Mov(aarch32::r0, 0);
269 scope.Close();
270 __ Mov(aarch32::r1, 1);
271 }
272
273 masm.FinalizeCode();
274}
275#endif // VIXL_INCLUDE_TARGET_AARCH32
276
277
278#ifdef VIXL_INCLUDE_TARGET_AARCH64
279TEST(EmissionCheckScope_Close_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100280 aarch64::MacroAssembler masm;
281
282 {
283 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
284 __ Mov(aarch64::x0, 0);
285 scope.Close();
286 __ Mov(aarch64::x1, 1);
287 }
288
289 masm.FinalizeCode();
290}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000291#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100292
293
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000294#ifdef VIXL_INCLUDE_TARGET_AARCH32
295TEST(EmissionCheckScope_Open_Close_32) {
296 aarch32::MacroAssembler masm;
297
298 {
299 EmissionCheckScope scope;
300 __ Mov(aarch32::r0, 0);
301 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
302 __ Mov(aarch32::r1, 1);
303 scope.Close();
304 __ Mov(aarch32::r2, 2);
305 }
306
307 masm.FinalizeCode();
308}
309#endif // VIXL_INCLUDE_TARGET_AARCH32
310
311
312#ifdef VIXL_INCLUDE_TARGET_AARCH64
313TEST(EmissionCheckScope_Open_Close_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100314 aarch64::MacroAssembler masm;
315
316 {
317 EmissionCheckScope scope;
318 __ Mov(aarch64::x0, 0);
319 scope.Open(&masm, aarch64::kInstructionSize);
320 __ Mov(aarch64::x1, 1);
321 scope.Close();
322 __ Mov(aarch64::x2, 2);
323 }
324
325 masm.FinalizeCode();
326}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000327#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100328
329
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000330#ifdef VIXL_INCLUDE_TARGET_AARCH32
331
332#define ASSERT_LITERAL_POOL_SIZE_32(expected) \
333 VIXL_CHECK((expected) == masm.GetLiteralPoolSize())
334
335TEST(EmissionCheckScope_emit_pool_32) {
336 aarch32::MacroAssembler masm;
337
338 // Make sure the pool is empty;
339 masm.EmitLiteralPool(aarch32::MacroAssembler::kBranchRequired);
340 ASSERT_LITERAL_POOL_SIZE_32(0);
341
342 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
343 ASSERT_LITERAL_POOL_SIZE_32(8);
344
345 const int kLdrdRange = 255;
346 const int kLessThanLdrdRange = 100;
347
348 {
349 // Check that opening the scope with a reserved space well below the limit
350 // at which can generate the literal pool does not force the emission of
351 // the pool.
352 EmissionCheckScope scope(&masm,
353 kLessThanLdrdRange,
354 EmissionCheckScope::kMaximumSize);
355 ASSERT_LITERAL_POOL_SIZE_32(8);
356 }
357
358 {
359 // Check that the scope forces emission of the pool if necessary.
360 EmissionCheckScope scope(&masm,
361 kLdrdRange + 1,
362 EmissionCheckScope::kMaximumSize);
363 ASSERT_LITERAL_POOL_SIZE_32(0);
364 }
365
366 masm.FinalizeCode();
367}
368#endif // VIXL_INCLUDE_TARGET_AARCH32
369
370
371#ifdef VIXL_INCLUDE_TARGET_AARCH64
372
373#define ASSERT_LITERAL_POOL_SIZE_64(expected) \
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100374 VIXL_CHECK( \
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000375 (expected + aarch64::kInstructionSize) == masm.GetLiteralPoolSize())
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100376
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000377TEST(EmissionCheckScope_emit_pool_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100378 aarch64::MacroAssembler masm;
379
380 // Make sure the pool is empty;
381 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000382 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100383
384 __ Ldr(aarch64::x0, 0x1234567890abcdef);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000385 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100386
387 {
388 // Check that opening the scope with a reserved space well below the limit
389 // at which can generate the literal pool does not force the emission of
390 // the pool.
391 EmissionCheckScope scope(&masm,
392 10 * aarch64::kInstructionSize,
393 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000394 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100395 }
396
397 {
398 // Check that the scope forces emission of the pool if necessary.
399 EmissionCheckScope scope(&masm,
400 aarch64::kMaxLoadLiteralRange + 1,
401 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000402 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100403 }
404
405 masm.FinalizeCode();
406}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000407#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100408
409
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000410#ifdef VIXL_INCLUDE_TARGET_AARCH32
411TEST(EmissionCheckScope_emit_pool_on_Open_32) {
412 aarch32::MacroAssembler masm;
413
414 // Make sure the pool is empty;
415 masm.EmitLiteralPool(aarch32::MacroAssembler::kBranchRequired);
416 ASSERT_LITERAL_POOL_SIZE_32(0);
417
418 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
419 ASSERT_LITERAL_POOL_SIZE_32(8);
420
421 const int kLdrdRange = 255;
422 const int kLessThanLdrdRange = 100;
423
424 {
425 // Check that opening the scope with a reserved space well below the limit
426 // at which can generate the literal pool does not force the emission of
427 // the pool.
428 EmissionCheckScope scope(&masm,
429 kLessThanLdrdRange,
430 EmissionCheckScope::kMaximumSize);
431 ASSERT_LITERAL_POOL_SIZE_32(8);
432 }
433
434 {
435 // Check that the scope forces emission of the pool if necessary.
436 EmissionCheckScope scope(&masm,
437 kLdrdRange + 1,
438 EmissionCheckScope::kMaximumSize);
439 ASSERT_LITERAL_POOL_SIZE_32(0);
440 }
441
442 masm.FinalizeCode();
443}
444#endif // VIXL_INCLUDE_TARGET_AARCH32
445
446
447#ifdef VIXL_INCLUDE_TARGET_AARCH64
448TEST(EmissionCheckScope_emit_pool_on_Open_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100449 aarch64::MacroAssembler masm;
450
451 // Make sure the pool is empty;
452 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000453 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100454
455 __ Ldr(aarch64::x0, 0x1234567890abcdef);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000456 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100457
458 {
459 // Check that opening the scope with a reserved space well below the limit
460 // at which can generate the literal pool does not force the emission of
461 // the pool.
462 EmissionCheckScope scope;
463 scope.Open(&masm,
464 10 * aarch64::kInstructionSize,
465 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000466 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100467 }
468
469 {
470 // Check that the scope forces emission of the pool if necessary.
471 EmissionCheckScope scope;
472 scope.Open(&masm,
473 aarch64::kMaxLoadLiteralRange + 1,
474 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000475 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100476 }
477
478 masm.FinalizeCode();
479}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000480#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100481
482
Alexandre Rames1661f512016-10-31 09:43:20 +0000483#ifdef VIXL_INCLUDE_TARGET_AARCH32
484TEST(ExactAssemblyScope_basic_32) {
485 aarch32::MacroAssembler masm;
486
487 {
488 ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
489 __ nop();
490 }
491
492 masm.FinalizeCode();
493}
494#endif // VIXL_INCLUDE_TARGET_AARCH32
495
496
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000497#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames1661f512016-10-31 09:43:20 +0000498TEST(ExactAssemblyScope_basic_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100499 aarch64::MacroAssembler masm;
500
501 {
502 ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
503 __ nop();
504 }
505
506 masm.FinalizeCode();
507}
Alexandre Rames1661f512016-10-31 09:43:20 +0000508#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100509
510
Alexandre Rames1661f512016-10-31 09:43:20 +0000511#ifdef VIXL_INCLUDE_TARGET_AARCH32
512TEST(ExactAssemblyScope_Open_32) {
513 aarch32::MacroAssembler masm;
514
515 {
516 ExactAssemblyScope scope;
517 __ Mov(aarch32::r0, 0);
518 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
519 __ mov(aarch32::r1, 1);
520 }
521
522 masm.FinalizeCode();
523}
524#endif // VIXL_INCLUDE_TARGET_AARCH32
525
526
527#ifdef VIXL_INCLUDE_TARGET_AARCH64
528TEST(ExactAssemblyScope_Open_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100529 aarch64::MacroAssembler masm;
530
531 {
532 ExactAssemblyScope scope;
533 __ Mov(aarch64::x0, 0);
534 scope.Open(&masm, aarch64::kInstructionSize);
535 __ movz(aarch64::x1, 1);
536 }
537
538 masm.FinalizeCode();
539}
Alexandre Rames1661f512016-10-31 09:43:20 +0000540#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100541
542
Alexandre Rames1661f512016-10-31 09:43:20 +0000543#ifdef VIXL_INCLUDE_TARGET_AARCH32
544TEST(ExactAssemblyScope_Close_32) {
545 aarch32::MacroAssembler masm;
546
547 {
548 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
549 __ mov(aarch32::r0, 0);
550 scope.Close();
551 __ Mov(aarch32::r1, 1);
552 }
553
554 masm.FinalizeCode();
555}
556#endif // VIXL_INCLUDE_TARGET_AARCH32
557
558
559#ifdef VIXL_INCLUDE_TARGET_AARCH64
560TEST(ExactAssemblyScope_Close_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100561 aarch64::MacroAssembler masm;
562
563 {
564 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
565 __ movz(aarch64::x0, 0);
566 scope.Close();
567 __ Mov(aarch64::x1, 1);
568 }
569
570 masm.FinalizeCode();
571}
Alexandre Rames1661f512016-10-31 09:43:20 +0000572#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100573
574
Alexandre Rames1661f512016-10-31 09:43:20 +0000575#ifdef VIXL_INCLUDE_TARGET_AARCH32
576TEST(ExactAssemblyScope_Open_Close_32) {
577 aarch32::MacroAssembler masm;
578
579 {
580 ExactAssemblyScope scope;
581 __ Mov(aarch32::r0, 0);
582 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
583 __ mov(aarch32::r1, 1);
584 scope.Close();
585 __ Mov(aarch32::r2, 2);
586 }
587
588 masm.FinalizeCode();
589}
590#endif // VIXL_INCLUDE_TARGET_AARCH32
591
592
593#ifdef VIXL_INCLUDE_TARGET_AARCH64
594TEST(ExactAssemblyScope_Open_Close_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100595 aarch64::MacroAssembler masm;
596
597 {
598 ExactAssemblyScope scope;
599 __ Mov(aarch64::x0, 0);
600 scope.Open(&masm, aarch64::kInstructionSize);
601 __ movz(aarch64::x1, 1);
602 scope.Close();
603 __ Mov(aarch64::x2, 2);
604 }
605
606 masm.FinalizeCode();
607}
Alexandre Rames1661f512016-10-31 09:43:20 +0000608#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100609
610
Alexandre Rames1661f512016-10-31 09:43:20 +0000611#ifdef VIXL_INCLUDE_TARGET_AARCH32
612TEST(ExactAssemblyScope_32) {
613 aarch32::MacroAssembler masm;
614
615 // By default macro instructions are allowed.
616 VIXL_ASSERT(masm.AllowMacroInstructions());
617 {
618 ExactAssemblyScope scope1(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
619 VIXL_ASSERT(!masm.AllowMacroInstructions());
620 __ nop();
621 {
622 ExactAssemblyScope scope2(&masm, 1 * aarch32::kA32InstructionSizeInBytes);
623 VIXL_ASSERT(!masm.AllowMacroInstructions());
624 __ nop();
625 }
626 VIXL_ASSERT(!masm.AllowMacroInstructions());
627 }
628 VIXL_ASSERT(masm.AllowMacroInstructions());
629
630 {
631 ExactAssemblyScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
632 __ add(aarch32::r0, aarch32::r0, aarch32::r0);
633 __ sub(aarch32::r0, aarch32::r0, aarch32::r0);
634 }
635
636 masm.FinalizeCode();
637}
638#endif // VIXL_INCLUDE_TARGET_AARCH32
639
640
641#ifdef VIXL_INCLUDE_TARGET_AARCH64
642TEST(ExactAssemblyScope_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100643 aarch64::MacroAssembler masm;
644
645 // By default macro instructions are allowed.
646 VIXL_ASSERT(masm.AllowMacroInstructions());
647 {
648 ExactAssemblyScope scope1(&masm, 2 * aarch64::kInstructionSize);
649 VIXL_ASSERT(!masm.AllowMacroInstructions());
650 __ nop();
651 {
652 ExactAssemblyScope scope2(&masm, 1 * aarch64::kInstructionSize);
653 VIXL_ASSERT(!masm.AllowMacroInstructions());
654 __ nop();
655 }
656 VIXL_ASSERT(!masm.AllowMacroInstructions());
657 }
658 VIXL_ASSERT(masm.AllowMacroInstructions());
659
660 {
661 ExactAssemblyScope scope(&masm, 2 * aarch64::kInstructionSize);
662 __ add(aarch64::x0, aarch64::x0, aarch64::x0);
663 __ sub(aarch64::x0, aarch64::x0, aarch64::x0);
664 }
665
666 masm.FinalizeCode();
667}
Alexandre Rames1661f512016-10-31 09:43:20 +0000668#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100669
670
Alexandre Rames1661f512016-10-31 09:43:20 +0000671#ifdef VIXL_INCLUDE_TARGET_AARCH32
672TEST(ExactAssemblyScope_scope_with_pools_32) {
673 aarch32::MacroAssembler masm;
674
675 ASSERT_LITERAL_POOL_SIZE_32(0);
676
677 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
678
679 ASSERT_LITERAL_POOL_SIZE_32(8);
680
681 const int32_t kLdrdRange = 255;
682 const int32_t n_nops = (kLdrdRange / aarch32::kA32InstructionSizeInBytes) + 1;
683 {
684 // The literal pool should be generated when opening this scope, as
685 // otherwise the `Ldrd` will run out of range when we generate the `nop`
686 // instructions below.
687 ExactAssemblyScope scope(&masm, n_nops * aarch32::kA32InstructionSizeInBytes);
688
689 // Although it must be, we do not check that the literal pool size is zero
690 // here, because we want this regression test to fail while or after we
691 // generate the nops.
692
693 for (int32_t i = 0; i < n_nops; ++i) {
694 __ nop();
695 }
696 }
697
698 ASSERT_LITERAL_POOL_SIZE_32(0);
699
700 masm.FinalizeCode();
701}
702#endif // VIXL_INCLUDE_TARGET_AARCH32
703
704
705#ifdef VIXL_INCLUDE_TARGET_AARCH64
706TEST(ExactAssemblyScope_scope_with_pools_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100707 aarch64::MacroAssembler masm;
708
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000709 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100710
711 __ Ldr(aarch64::x10, 0x1234567890abcdef);
712
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000713 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100714
715 const int64_t n_nops =
716 aarch64::kMaxLoadLiteralRange / aarch64::kInstructionSize;
717 {
Alexandre Rames1661f512016-10-31 09:43:20 +0000718 // The literal pool should be generated when opening this scope, as
719 // otherwise the `Ldr` will run out of range when we generate the `nop`
720 // instructions below.
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100721 ExactAssemblyScope scope(&masm, n_nops * aarch64::kInstructionSize);
722
723 // Although it must be, we do not check that the literal pool size is zero
724 // here, because we want this regression test to fail while or after we
725 // generate the nops.
726
727 for (int64_t i = 0; i < n_nops; ++i) {
728 __ nop();
729 }
730 }
731
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000732 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100733
734 masm.FinalizeCode();
735}
Alexandre Rames586c6b92016-10-24 11:59:33 +0100736#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100737
738
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100739} // namespace vixl
740