blob: 3b312e947b4fa94a69fab39ef23a51881cda086c [file] [log] [blame]
Alexandre Rames9dd6fa32016-10-12 13:26:54 +01001// Copyright 2016, VIXL authors
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// * Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// * Neither the name of ARM Limited nor the names of its contributors may be
13// used to endorse or promote products derived from this software without
14// specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "test-runner.h"
28
Alexandre Rames8d191ab2016-11-29 11:23:27 +000029#ifdef VIXL_INCLUDE_TARGET_AARCH32
30#include "aarch32/macro-assembler-aarch32.h"
31#endif
32
Alexandre Rames586c6b92016-10-24 11:59:33 +010033#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010034#include "aarch64/macro-assembler-aarch64.h"
Alexandre Rames586c6b92016-10-24 11:59:33 +010035#endif
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010036
37#define TEST(name) TEST_(SCOPES_##name)
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +000038
39#ifdef VIXL_INCLUDE_TARGET_A32
40#define TEST_A32(name) TEST(name)
41#else
42// Do not add this test to the harness.
43#define TEST_A32(name) void Test##name()
44#endif
45
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010046#define __ masm.
47
48namespace vixl {
49
50// This file contains tests for code generation scopes.
51
Alexandre Rames8d191ab2016-11-29 11:23:27 +000052#ifdef VIXL_INCLUDE_TARGET_AARCH32
53TEST(CodeBufferCheckScope_basic_32) {
54 aarch32::MacroAssembler masm;
55
56 {
57 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
58 __ Mov(aarch32::r0, 0);
59 }
60
61 masm.FinalizeCode();
62}
63#endif // VIXL_INCLUDE_TARGET_AARCH32
64
65
Alexandre Rames586c6b92016-10-24 11:59:33 +010066#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames8d191ab2016-11-29 11:23:27 +000067TEST(CodeBufferCheckScope_basic_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010068 aarch64::MacroAssembler masm;
69
70 {
71 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
72 __ Mov(aarch64::x0, 0);
73 }
74
75 masm.FinalizeCode();
76}
Alexandre Rames8d191ab2016-11-29 11:23:27 +000077#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +010078
79
Alexandre Rames8d191ab2016-11-29 11:23:27 +000080#ifdef VIXL_INCLUDE_TARGET_AARCH32
81TEST(CodeBufferCheckScope_assembler_use_32) {
82 aarch32::MacroAssembler masm;
83
84 {
85 CodeBufferCheckScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
86 __ Mov(aarch32::r0, 0);
87 __ mov(aarch32::r1, 1);
88 }
89
90 masm.FinalizeCode();
91}
92#endif // VIXL_INCLUDE_TARGET_AARCH32
93
94
95#ifdef VIXL_INCLUDE_TARGET_AARCH64
96TEST(CodeBufferCheckScope_assembler_use_64) {
97 aarch64::MacroAssembler masm;
98
99 {
100 CodeBufferCheckScope scope(&masm, 2 * aarch64::kInstructionSize);
101 __ Mov(aarch64::x0, 0);
102 __ movz(aarch64::x1, 1);
103 }
104
105 masm.FinalizeCode();
106}
107#endif // VIXL_INCLUDE_TARGET_AARCH64
108
109
110#ifdef VIXL_INCLUDE_TARGET_AARCH32
111TEST(CodeBufferCheckScope_Open_32) {
112 aarch32::MacroAssembler masm;
113
114 {
115 CodeBufferCheckScope scope;
116 __ Mov(aarch32::r0, 0);
117 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
118 __ Mov(aarch32::r1, 1);
119 }
120
121 masm.FinalizeCode();
122}
123#endif // VIXL_INCLUDE_TARGET_AARCH32
124
125
126#ifdef VIXL_INCLUDE_TARGET_AARCH64
127TEST(CodeBufferCheckScope_Open_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100128 aarch64::MacroAssembler masm;
129
130 {
131 CodeBufferCheckScope scope;
132 __ Mov(aarch64::x0, 0);
133 scope.Open(&masm, aarch64::kInstructionSize);
134 __ Mov(aarch64::x1, 1);
135 }
136
137 masm.FinalizeCode();
138}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000139#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100140
141
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000142#ifdef VIXL_INCLUDE_TARGET_AARCH32
143TEST(CodeBufferCheckScope_Close_32) {
144 aarch32::MacroAssembler masm;
145
146 {
147 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
148 __ Mov(aarch32::r0, 0);
149 scope.Close();
150 __ Mov(aarch32::r1, 1);
151 }
152
153 masm.FinalizeCode();
154}
155#endif // VIXL_INCLUDE_TARGET_AARCH32
156
157
158#ifdef VIXL_INCLUDE_TARGET_AARCH64
159TEST(CodeBufferCheckScope_Close_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100160 aarch64::MacroAssembler masm;
161
162 {
163 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
164 __ Mov(aarch64::x0, 0);
165 scope.Close();
166 __ Mov(aarch64::x1, 1);
167 }
168
169 masm.FinalizeCode();
170}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000171#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100172
173
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000174#ifdef VIXL_INCLUDE_TARGET_AARCH32
175TEST(CodeBufferCheckScope_Open_Close_32) {
176 aarch32::MacroAssembler masm;
177
178 {
179 CodeBufferCheckScope scope;
180 __ Mov(aarch32::r0, 0);
181 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
182 __ Mov(aarch32::r1, 1);
183 scope.Close();
184 __ Mov(aarch32::r2, 2);
185 }
186
187 masm.FinalizeCode();
188}
189#endif // VIXL_INCLUDE_TARGET_AARCH32
190
191
192#ifdef VIXL_INCLUDE_TARGET_AARCH64
193TEST(CodeBufferCheckScope_Open_Close_64) {
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100194 aarch64::MacroAssembler masm;
195
196 {
197 CodeBufferCheckScope scope;
198 __ Mov(aarch64::x0, 0);
199 scope.Open(&masm, aarch64::kInstructionSize);
200 __ Mov(aarch64::x1, 1);
201 scope.Close();
202 __ Mov(aarch64::x2, 2);
203 }
204
205 masm.FinalizeCode();
206}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000207#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100208
209
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000210#ifdef VIXL_INCLUDE_TARGET_AARCH32
211TEST(EmissionCheckScope_basic_32) {
212 aarch32::MacroAssembler masm;
213
214 {
215 EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
216 __ Mov(aarch32::r0, 0);
217 }
218
219 masm.FinalizeCode();
220}
221#endif // VIXL_INCLUDE_TARGET_AARCH32
222
223
224#ifdef VIXL_INCLUDE_TARGET_AARCH64
225TEST(EmissionCheckScope_basic_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100226 aarch64::MacroAssembler masm;
227
228 {
229 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
230 __ Mov(aarch64::x0, 0);
231 }
232
233 masm.FinalizeCode();
234}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000235#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100236
237
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000238#ifdef VIXL_INCLUDE_TARGET_AARCH32
239TEST(EmissionCheckScope_Open_32) {
240 aarch32::MacroAssembler masm;
241
242 {
243 EmissionCheckScope scope;
244 __ Mov(aarch32::r0, 0);
245 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
246 __ Mov(aarch32::r1, 1);
247 }
248
249 masm.FinalizeCode();
250}
251#endif // VIXL_INCLUDE_TARGET_AARCH32
252
253
254#ifdef VIXL_INCLUDE_TARGET_AARCH64
255TEST(EmissionCheckScope_Open_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100256 aarch64::MacroAssembler masm;
257
258 {
259 EmissionCheckScope scope;
260 __ Mov(aarch64::x0, 0);
261 scope.Open(&masm, aarch64::kInstructionSize);
262 __ Mov(aarch64::x1, 1);
263 }
264
265 masm.FinalizeCode();
266}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000267#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100268
269
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000270#ifdef VIXL_INCLUDE_TARGET_AARCH32
271TEST(EmissionCheckScope_Close_32) {
272 aarch32::MacroAssembler masm;
273
274 {
275 EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
276 __ Mov(aarch32::r0, 0);
277 scope.Close();
278 __ Mov(aarch32::r1, 1);
279 }
280
281 masm.FinalizeCode();
282}
283#endif // VIXL_INCLUDE_TARGET_AARCH32
284
285
286#ifdef VIXL_INCLUDE_TARGET_AARCH64
287TEST(EmissionCheckScope_Close_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100288 aarch64::MacroAssembler masm;
289
290 {
291 EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
292 __ Mov(aarch64::x0, 0);
293 scope.Close();
294 __ Mov(aarch64::x1, 1);
295 }
296
297 masm.FinalizeCode();
298}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000299#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100300
301
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000302#ifdef VIXL_INCLUDE_TARGET_AARCH32
303TEST(EmissionCheckScope_Open_Close_32) {
304 aarch32::MacroAssembler masm;
305
306 {
307 EmissionCheckScope scope;
308 __ Mov(aarch32::r0, 0);
309 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
310 __ Mov(aarch32::r1, 1);
311 scope.Close();
312 __ Mov(aarch32::r2, 2);
313 }
314
315 masm.FinalizeCode();
316}
317#endif // VIXL_INCLUDE_TARGET_AARCH32
318
319
320#ifdef VIXL_INCLUDE_TARGET_AARCH64
321TEST(EmissionCheckScope_Open_Close_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100322 aarch64::MacroAssembler masm;
323
324 {
325 EmissionCheckScope scope;
326 __ Mov(aarch64::x0, 0);
327 scope.Open(&masm, aarch64::kInstructionSize);
328 __ Mov(aarch64::x1, 1);
329 scope.Close();
330 __ Mov(aarch64::x2, 2);
331 }
332
333 masm.FinalizeCode();
334}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000335#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100336
337
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000338#ifdef VIXL_INCLUDE_TARGET_AARCH32
339
340#define ASSERT_LITERAL_POOL_SIZE_32(expected) \
341 VIXL_CHECK((expected) == masm.GetLiteralPoolSize())
342
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000343TEST_A32(EmissionCheckScope_emit_pool_32) {
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000344 aarch32::MacroAssembler masm;
345
346 // Make sure the pool is empty;
347 masm.EmitLiteralPool(aarch32::MacroAssembler::kBranchRequired);
348 ASSERT_LITERAL_POOL_SIZE_32(0);
349
350 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
351 ASSERT_LITERAL_POOL_SIZE_32(8);
352
353 const int kLdrdRange = 255;
354 const int kLessThanLdrdRange = 100;
355
356 {
357 // Check that opening the scope with a reserved space well below the limit
358 // at which can generate the literal pool does not force the emission of
359 // the pool.
360 EmissionCheckScope scope(&masm,
361 kLessThanLdrdRange,
362 EmissionCheckScope::kMaximumSize);
363 ASSERT_LITERAL_POOL_SIZE_32(8);
364 }
365
366 {
367 // Check that the scope forces emission of the pool if necessary.
368 EmissionCheckScope scope(&masm,
369 kLdrdRange + 1,
370 EmissionCheckScope::kMaximumSize);
371 ASSERT_LITERAL_POOL_SIZE_32(0);
372 }
373
374 masm.FinalizeCode();
375}
376#endif // VIXL_INCLUDE_TARGET_AARCH32
377
378
379#ifdef VIXL_INCLUDE_TARGET_AARCH64
380
381#define ASSERT_LITERAL_POOL_SIZE_64(expected) \
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100382 VIXL_CHECK( \
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000383 (expected + aarch64::kInstructionSize) == masm.GetLiteralPoolSize())
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100384
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000385TEST(EmissionCheckScope_emit_pool_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100386 aarch64::MacroAssembler masm;
387
388 // Make sure the pool is empty;
389 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000390 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100391
392 __ Ldr(aarch64::x0, 0x1234567890abcdef);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000393 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100394
395 {
396 // Check that opening the scope with a reserved space well below the limit
397 // at which can generate the literal pool does not force the emission of
398 // the pool.
399 EmissionCheckScope scope(&masm,
400 10 * aarch64::kInstructionSize,
401 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000402 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100403 }
404
405 {
406 // Check that the scope forces emission of the pool if necessary.
407 EmissionCheckScope scope(&masm,
408 aarch64::kMaxLoadLiteralRange + 1,
409 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000410 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100411 }
412
413 masm.FinalizeCode();
414}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000415#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100416
417
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000418#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000419TEST_A32(EmissionCheckScope_emit_pool_on_Open_32) {
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000420 aarch32::MacroAssembler masm;
421
422 // Make sure the pool is empty;
423 masm.EmitLiteralPool(aarch32::MacroAssembler::kBranchRequired);
424 ASSERT_LITERAL_POOL_SIZE_32(0);
425
426 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
427 ASSERT_LITERAL_POOL_SIZE_32(8);
428
429 const int kLdrdRange = 255;
430 const int kLessThanLdrdRange = 100;
431
432 {
433 // Check that opening the scope with a reserved space well below the limit
434 // at which can generate the literal pool does not force the emission of
435 // the pool.
436 EmissionCheckScope scope(&masm,
437 kLessThanLdrdRange,
438 EmissionCheckScope::kMaximumSize);
439 ASSERT_LITERAL_POOL_SIZE_32(8);
440 }
441
442 {
443 // Check that the scope forces emission of the pool if necessary.
444 EmissionCheckScope scope(&masm,
445 kLdrdRange + 1,
446 EmissionCheckScope::kMaximumSize);
447 ASSERT_LITERAL_POOL_SIZE_32(0);
448 }
449
450 masm.FinalizeCode();
451}
452#endif // VIXL_INCLUDE_TARGET_AARCH32
453
454
455#ifdef VIXL_INCLUDE_TARGET_AARCH64
456TEST(EmissionCheckScope_emit_pool_on_Open_64) {
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100457 aarch64::MacroAssembler masm;
458
459 // Make sure the pool is empty;
460 masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000461 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100462
463 __ Ldr(aarch64::x0, 0x1234567890abcdef);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000464 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100465
466 {
467 // Check that opening the scope with a reserved space well below the limit
468 // at which can generate the literal pool does not force the emission of
469 // the pool.
470 EmissionCheckScope scope;
471 scope.Open(&masm,
472 10 * aarch64::kInstructionSize,
473 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000474 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100475 }
476
477 {
478 // Check that the scope forces emission of the pool if necessary.
479 EmissionCheckScope scope;
480 scope.Open(&masm,
481 aarch64::kMaxLoadLiteralRange + 1,
482 EmissionCheckScope::kMaximumSize);
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000483 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100484 }
485
486 masm.FinalizeCode();
487}
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000488#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100489
490
Alexandre Rames1661f512016-10-31 09:43:20 +0000491#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000492TEST_A32(ExactAssemblyScope_basic_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000493 aarch32::MacroAssembler masm;
494
495 {
496 ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
497 __ nop();
498 }
499
500 masm.FinalizeCode();
501}
502#endif // VIXL_INCLUDE_TARGET_AARCH32
503
504
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000505#ifdef VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames1661f512016-10-31 09:43:20 +0000506TEST(ExactAssemblyScope_basic_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100507 aarch64::MacroAssembler masm;
508
509 {
510 ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
511 __ nop();
512 }
513
514 masm.FinalizeCode();
515}
Alexandre Rames1661f512016-10-31 09:43:20 +0000516#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100517
518
Alexandre Rames1661f512016-10-31 09:43:20 +0000519#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000520TEST_A32(ExactAssemblyScope_Open_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000521 aarch32::MacroAssembler masm;
522
523 {
524 ExactAssemblyScope scope;
525 __ Mov(aarch32::r0, 0);
526 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
527 __ mov(aarch32::r1, 1);
528 }
529
530 masm.FinalizeCode();
531}
532#endif // VIXL_INCLUDE_TARGET_AARCH32
533
534
535#ifdef VIXL_INCLUDE_TARGET_AARCH64
536TEST(ExactAssemblyScope_Open_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100537 aarch64::MacroAssembler masm;
538
539 {
540 ExactAssemblyScope scope;
541 __ Mov(aarch64::x0, 0);
542 scope.Open(&masm, aarch64::kInstructionSize);
543 __ movz(aarch64::x1, 1);
544 }
545
546 masm.FinalizeCode();
547}
Alexandre Rames1661f512016-10-31 09:43:20 +0000548#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100549
550
Alexandre Rames1661f512016-10-31 09:43:20 +0000551#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000552TEST_A32(ExactAssemblyScope_Close_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000553 aarch32::MacroAssembler masm;
554
555 {
556 CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
557 __ mov(aarch32::r0, 0);
558 scope.Close();
559 __ Mov(aarch32::r1, 1);
560 }
561
562 masm.FinalizeCode();
563}
564#endif // VIXL_INCLUDE_TARGET_AARCH32
565
566
567#ifdef VIXL_INCLUDE_TARGET_AARCH64
568TEST(ExactAssemblyScope_Close_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100569 aarch64::MacroAssembler masm;
570
571 {
572 CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
573 __ movz(aarch64::x0, 0);
574 scope.Close();
575 __ Mov(aarch64::x1, 1);
576 }
577
578 masm.FinalizeCode();
579}
Alexandre Rames1661f512016-10-31 09:43:20 +0000580#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100581
582
Alexandre Rames1661f512016-10-31 09:43:20 +0000583#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000584TEST_A32(ExactAssemblyScope_Open_Close_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000585 aarch32::MacroAssembler masm;
586
587 {
588 ExactAssemblyScope scope;
589 __ Mov(aarch32::r0, 0);
590 scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
591 __ mov(aarch32::r1, 1);
592 scope.Close();
593 __ Mov(aarch32::r2, 2);
594 }
595
596 masm.FinalizeCode();
597}
598#endif // VIXL_INCLUDE_TARGET_AARCH32
599
600
601#ifdef VIXL_INCLUDE_TARGET_AARCH64
602TEST(ExactAssemblyScope_Open_Close_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100603 aarch64::MacroAssembler masm;
604
605 {
606 ExactAssemblyScope scope;
607 __ Mov(aarch64::x0, 0);
608 scope.Open(&masm, aarch64::kInstructionSize);
609 __ movz(aarch64::x1, 1);
610 scope.Close();
611 __ Mov(aarch64::x2, 2);
612 }
613
614 masm.FinalizeCode();
615}
Alexandre Rames1661f512016-10-31 09:43:20 +0000616#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100617
618
Alexandre Rames1661f512016-10-31 09:43:20 +0000619#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000620TEST_A32(ExactAssemblyScope_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000621 aarch32::MacroAssembler masm;
622
623 // By default macro instructions are allowed.
624 VIXL_ASSERT(masm.AllowMacroInstructions());
625 {
626 ExactAssemblyScope scope1(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
627 VIXL_ASSERT(!masm.AllowMacroInstructions());
628 __ nop();
629 {
630 ExactAssemblyScope scope2(&masm, 1 * aarch32::kA32InstructionSizeInBytes);
631 VIXL_ASSERT(!masm.AllowMacroInstructions());
632 __ nop();
633 }
634 VIXL_ASSERT(!masm.AllowMacroInstructions());
635 }
636 VIXL_ASSERT(masm.AllowMacroInstructions());
637
638 {
639 ExactAssemblyScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
640 __ add(aarch32::r0, aarch32::r0, aarch32::r0);
641 __ sub(aarch32::r0, aarch32::r0, aarch32::r0);
642 }
643
644 masm.FinalizeCode();
645}
646#endif // VIXL_INCLUDE_TARGET_AARCH32
647
648
649#ifdef VIXL_INCLUDE_TARGET_AARCH64
650TEST(ExactAssemblyScope_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100651 aarch64::MacroAssembler masm;
652
653 // By default macro instructions are allowed.
654 VIXL_ASSERT(masm.AllowMacroInstructions());
655 {
656 ExactAssemblyScope scope1(&masm, 2 * aarch64::kInstructionSize);
657 VIXL_ASSERT(!masm.AllowMacroInstructions());
658 __ nop();
659 {
660 ExactAssemblyScope scope2(&masm, 1 * aarch64::kInstructionSize);
661 VIXL_ASSERT(!masm.AllowMacroInstructions());
662 __ nop();
663 }
664 VIXL_ASSERT(!masm.AllowMacroInstructions());
665 }
666 VIXL_ASSERT(masm.AllowMacroInstructions());
667
668 {
669 ExactAssemblyScope scope(&masm, 2 * aarch64::kInstructionSize);
670 __ add(aarch64::x0, aarch64::x0, aarch64::x0);
671 __ sub(aarch64::x0, aarch64::x0, aarch64::x0);
672 }
673
674 masm.FinalizeCode();
675}
Alexandre Rames1661f512016-10-31 09:43:20 +0000676#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100677
678
Alexandre Rames1661f512016-10-31 09:43:20 +0000679#ifdef VIXL_INCLUDE_TARGET_AARCH32
Rodolph Perfetta9a9331f2016-12-09 22:05:48 +0000680TEST_A32(ExactAssemblyScope_scope_with_pools_32) {
Alexandre Rames1661f512016-10-31 09:43:20 +0000681 aarch32::MacroAssembler masm;
682
683 ASSERT_LITERAL_POOL_SIZE_32(0);
684
685 __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
686
687 ASSERT_LITERAL_POOL_SIZE_32(8);
688
689 const int32_t kLdrdRange = 255;
690 const int32_t n_nops = (kLdrdRange / aarch32::kA32InstructionSizeInBytes) + 1;
691 {
692 // The literal pool should be generated when opening this scope, as
693 // otherwise the `Ldrd` will run out of range when we generate the `nop`
694 // instructions below.
695 ExactAssemblyScope scope(&masm, n_nops * aarch32::kA32InstructionSizeInBytes);
696
697 // Although it must be, we do not check that the literal pool size is zero
698 // here, because we want this regression test to fail while or after we
699 // generate the nops.
700
701 for (int32_t i = 0; i < n_nops; ++i) {
702 __ nop();
703 }
704 }
705
706 ASSERT_LITERAL_POOL_SIZE_32(0);
707
708 masm.FinalizeCode();
709}
710#endif // VIXL_INCLUDE_TARGET_AARCH32
711
712
713#ifdef VIXL_INCLUDE_TARGET_AARCH64
714TEST(ExactAssemblyScope_scope_with_pools_64) {
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100715 aarch64::MacroAssembler masm;
716
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000717 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100718
719 __ Ldr(aarch64::x10, 0x1234567890abcdef);
720
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000721 ASSERT_LITERAL_POOL_SIZE_64(8);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100722
723 const int64_t n_nops =
724 aarch64::kMaxLoadLiteralRange / aarch64::kInstructionSize;
725 {
Alexandre Rames1661f512016-10-31 09:43:20 +0000726 // The literal pool should be generated when opening this scope, as
727 // otherwise the `Ldr` will run out of range when we generate the `nop`
728 // instructions below.
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100729 ExactAssemblyScope scope(&masm, n_nops * aarch64::kInstructionSize);
730
731 // Although it must be, we do not check that the literal pool size is zero
732 // here, because we want this regression test to fail while or after we
733 // generate the nops.
734
735 for (int64_t i = 0; i < n_nops; ++i) {
736 __ nop();
737 }
738 }
739
Alexandre Rames8d191ab2016-11-29 11:23:27 +0000740 ASSERT_LITERAL_POOL_SIZE_64(0);
Alexandre Rames07d1aa52016-10-25 17:20:51 +0100741
742 masm.FinalizeCode();
743}
Alexandre Rames586c6b92016-10-24 11:59:33 +0100744#endif // VIXL_INCLUDE_TARGET_AARCH64
Alexandre Ramesc0b25f22016-10-19 13:53:55 +0100745
746
Alexandre Rames9dd6fa32016-10-12 13:26:54 +0100747} // namespace vixl
748