blob: 854f4554552e4612e14b4bbd3e3b5673d08d457c [file] [log] [blame]
Damien George04b91472014-05-03 23:27:38 +01001/*
2 * This file is part of the Micro Python project, http://micropython.org/
3 *
4 * The MIT License (MIT)
5 *
6 * Copyright (c) 2013, 2014 Damien P. George
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 * THE SOFTWARE.
25 */
26
Damien429d7192013-10-04 19:53:11 +010027#include <stdio.h>
28#include <assert.h>
29#include <string.h>
30
Damien George51dfcb42015-01-01 20:27:54 +000031#include "py/mpconfig.h"
Damien429d7192013-10-04 19:53:11 +010032
Damien Georgee67ed5d2014-01-04 13:55:24 +000033// wrapper around everything in this file
34#if MICROPY_EMIT_THUMB || MICROPY_EMIT_INLINE_THUMB
35
Damien George51dfcb42015-01-01 20:27:54 +000036#include "py/asmthumb.h"
37
Damien429d7192013-10-04 19:53:11 +010038#define UNSIGNED_FIT8(x) (((x) & 0xffffff00) == 0)
39#define UNSIGNED_FIT16(x) (((x) & 0xffff0000) == 0)
40#define SIGNED_FIT8(x) (((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80)
41#define SIGNED_FIT9(x) (((x) & 0xffffff00) == 0) || (((x) & 0xffffff00) == 0xffffff00)
42#define SIGNED_FIT12(x) (((x) & 0xfffff800) == 0) || (((x) & 0xfffff800) == 0xfffff800)
43
44struct _asm_thumb_t {
Damien George851f15f2014-09-29 10:05:32 +010045 mp_uint_t pass;
46 mp_uint_t code_offset;
47 mp_uint_t code_size;
Damien429d7192013-10-04 19:53:11 +010048 byte *code_base;
Damien George95977712014-05-10 18:07:08 +010049 byte dummy_data[4];
Damien429d7192013-10-04 19:53:11 +010050
Damien George851f15f2014-09-29 10:05:32 +010051 mp_uint_t max_num_labels;
52 mp_uint_t *label_offsets;
53 mp_uint_t push_reglist;
54 mp_uint_t stack_adjust;
Damien429d7192013-10-04 19:53:11 +010055};
56
Damien5bfb7592013-10-05 18:41:24 +010057asm_thumb_t *asm_thumb_new(uint max_num_labels) {
Damien429d7192013-10-04 19:53:11 +010058 asm_thumb_t *as;
59
Damien George36db6bc2014-05-07 17:24:22 +010060 as = m_new0(asm_thumb_t, 1);
Damien5bfb7592013-10-05 18:41:24 +010061 as->max_num_labels = max_num_labels;
Damien George851f15f2014-09-29 10:05:32 +010062 as->label_offsets = m_new(mp_uint_t, max_num_labels);
Damien429d7192013-10-04 19:53:11 +010063
64 return as;
65}
66
67void asm_thumb_free(asm_thumb_t *as, bool free_code) {
68 if (free_code) {
Fabian Vogtb7235b82014-09-03 16:59:33 +020069 MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
Damien429d7192013-10-04 19:53:11 +010070 }
Damien George0b610de2014-09-29 16:25:04 +010071 m_del(mp_uint_t, as->label_offsets, as->max_num_labels);
Damien732407f2013-12-29 19:33:23 +000072 m_del_obj(asm_thumb_t, as);
Damien429d7192013-10-04 19:53:11 +010073}
74
Damien George36db6bc2014-05-07 17:24:22 +010075void asm_thumb_start_pass(asm_thumb_t *as, uint pass) {
Damien George36db6bc2014-05-07 17:24:22 +010076 if (pass == ASM_THUMB_PASS_COMPUTE) {
Damien George851f15f2014-09-29 10:05:32 +010077 memset(as->label_offsets, -1, as->max_num_labels * sizeof(mp_uint_t));
Damien Georged9dc6ff2015-01-14 00:38:33 +000078 } else if (pass == ASM_THUMB_PASS_EMIT) {
79 MP_PLAT_ALLOC_EXEC(as->code_offset, (void**)&as->code_base, &as->code_size);
80 if (as->code_base == NULL) {
Fabian Vogtb7235b82014-09-03 16:59:33 +020081 assert(0);
82 }
Damien0446a0d2013-11-17 13:16:36 +000083 //printf("code_size: %u\n", as->code_size);
Damien429d7192013-10-04 19:53:11 +010084 }
Damien Georged9dc6ff2015-01-14 00:38:33 +000085 as->pass = pass;
86 as->code_offset = 0;
87}
Damien429d7192013-10-04 19:53:11 +010088
Damien Georged9dc6ff2015-01-14 00:38:33 +000089void asm_thumb_end_pass(asm_thumb_t *as) {
90 // could check labels are resolved...
Damien429d7192013-10-04 19:53:11 +010091}
92
93// all functions must go through this one to emit bytes
Damien George95977712014-05-10 18:07:08 +010094// if as->pass < ASM_THUMB_PASS_EMIT, then this function only returns a buffer of 4 bytes length
Paul Sokolovsky520e2f52014-02-12 18:31:30 +020095STATIC byte *asm_thumb_get_cur_to_write_bytes(asm_thumb_t *as, int num_bytes_to_write) {
Damien429d7192013-10-04 19:53:11 +010096 //printf("emit %d\n", num_bytes_to_write);
Damien George36db6bc2014-05-07 17:24:22 +010097 if (as->pass < ASM_THUMB_PASS_EMIT) {
Damien429d7192013-10-04 19:53:11 +010098 as->code_offset += num_bytes_to_write;
99 return as->dummy_data;
100 } else {
101 assert(as->code_offset + num_bytes_to_write <= as->code_size);
102 byte *c = as->code_base + as->code_offset;
103 as->code_offset += num_bytes_to_write;
104 return c;
105 }
106}
107
108uint asm_thumb_get_code_size(asm_thumb_t *as) {
109 return as->code_size;
110}
111
112void *asm_thumb_get_code(asm_thumb_t *as) {
Damien George3c658a42014-08-24 16:28:17 +0100113 return as->code_base;
Damien429d7192013-10-04 19:53:11 +0100114}
115
116/*
Paul Sokolovsky520e2f52014-02-12 18:31:30 +0200117STATIC void asm_thumb_write_byte_1(asm_thumb_t *as, byte b1) {
Damien429d7192013-10-04 19:53:11 +0100118 byte *c = asm_thumb_get_cur_to_write_bytes(as, 1);
119 c[0] = b1;
120}
121*/
122
Damien429d7192013-10-04 19:53:11 +0100123/*
124#define IMM32_L0(x) ((x) & 0xff)
125#define IMM32_L1(x) (((x) >> 8) & 0xff)
126#define IMM32_L2(x) (((x) >> 16) & 0xff)
127#define IMM32_L3(x) (((x) >> 24) & 0xff)
128
Paul Sokolovsky520e2f52014-02-12 18:31:30 +0200129STATIC void asm_thumb_write_word32(asm_thumb_t *as, int w32) {
Damien429d7192013-10-04 19:53:11 +0100130 byte *c = asm_thumb_get_cur_to_write_bytes(as, 4);
131 c[0] = IMM32_L0(w32);
132 c[1] = IMM32_L1(w32);
133 c[2] = IMM32_L2(w32);
134 c[3] = IMM32_L3(w32);
135}
136*/
137
138// rlolist is a bit map indicating desired lo-registers
139#define OP_PUSH_RLIST(rlolist) (0xb400 | (rlolist))
140#define OP_PUSH_RLIST_LR(rlolist) (0xb400 | 0x0100 | (rlolist))
141#define OP_POP_RLIST(rlolist) (0xbc00 | (rlolist))
142#define OP_POP_RLIST_PC(rlolist) (0xbc00 | 0x0100 | (rlolist))
143
144#define OP_ADD_SP(num_words) (0xb000 | (num_words))
145#define OP_SUB_SP(num_words) (0xb080 | (num_words))
146
Damien Georged509ac22014-05-07 23:27:45 +0100147// locals:
148// - stored on the stack in ascending order
Damien George851f15f2014-09-29 10:05:32 +0100149// - numbered 0 through num_locals-1
Damien Georged509ac22014-05-07 23:27:45 +0100150// - SP points to first local
151//
152// | SP
153// v
154// l0 l1 l2 ... l(n-1)
155// ^ ^
156// | low address | high address in RAM
157
Damien429d7192013-10-04 19:53:11 +0100158void asm_thumb_entry(asm_thumb_t *as, int num_locals) {
Damien Georged509ac22014-05-07 23:27:45 +0100159 // work out what to push and how many extra spaces to reserve on stack
Damien429d7192013-10-04 19:53:11 +0100160 // so that we have enough for all locals and it's aligned an 8-byte boundary
Damien Georged509ac22014-05-07 23:27:45 +0100161 // we push extra regs (r1, r2, r3) to help do the stack adjustment
162 // we probably should just always subtract from sp, since this would be more efficient
163 // for push rlist, lowest numbered register at the lowest address
Damien429d7192013-10-04 19:53:11 +0100164 uint reglist;
165 uint stack_adjust;
166 if (num_locals < 0) {
167 num_locals = 0;
168 }
Damien Georged509ac22014-05-07 23:27:45 +0100169 // don't pop r0 because it's used for return value
Damien429d7192013-10-04 19:53:11 +0100170 switch (num_locals) {
171 case 0:
172 reglist = 0xf2;
173 stack_adjust = 0;
174 break;
175
176 case 1:
177 reglist = 0xf2;
178 stack_adjust = 0;
179 break;
180
181 case 2:
182 reglist = 0xfe;
183 stack_adjust = 0;
184 break;
185
186 case 3:
187 reglist = 0xfe;
188 stack_adjust = 0;
189 break;
190
191 default:
192 reglist = 0xfe;
193 stack_adjust = ((num_locals - 3) + 1) & (~1);
194 break;
195 }
Damien George90edf9e2014-04-18 16:56:54 +0100196 asm_thumb_op16(as, OP_PUSH_RLIST_LR(reglist));
Damien429d7192013-10-04 19:53:11 +0100197 if (stack_adjust > 0) {
Damien George90edf9e2014-04-18 16:56:54 +0100198 asm_thumb_op16(as, OP_SUB_SP(stack_adjust));
Damien429d7192013-10-04 19:53:11 +0100199 }
200 as->push_reglist = reglist;
201 as->stack_adjust = stack_adjust;
Damien429d7192013-10-04 19:53:11 +0100202}
203
204void asm_thumb_exit(asm_thumb_t *as) {
205 if (as->stack_adjust > 0) {
Damien George90edf9e2014-04-18 16:56:54 +0100206 asm_thumb_op16(as, OP_ADD_SP(as->stack_adjust));
Damien429d7192013-10-04 19:53:11 +0100207 }
Damien George90edf9e2014-04-18 16:56:54 +0100208 asm_thumb_op16(as, OP_POP_RLIST_PC(as->push_reglist));
Damien429d7192013-10-04 19:53:11 +0100209}
210
Damien George6f355fd2014-04-10 14:11:31 +0100211void asm_thumb_label_assign(asm_thumb_t *as, uint label) {
Damien5bfb7592013-10-05 18:41:24 +0100212 assert(label < as->max_num_labels);
Damien George36db6bc2014-05-07 17:24:22 +0100213 if (as->pass < ASM_THUMB_PASS_EMIT) {
Damien5bfb7592013-10-05 18:41:24 +0100214 // assign label offset
215 assert(as->label_offsets[label] == -1);
216 as->label_offsets[label] = as->code_offset;
Damien George36db6bc2014-05-07 17:24:22 +0100217 } else {
218 // ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
Damien5bfb7592013-10-05 18:41:24 +0100219 //printf("l%d: (at %d=%ld)\n", label, as->label_offsets[label], as->code_offset);
220 assert(as->label_offsets[label] == as->code_offset);
Damien429d7192013-10-04 19:53:11 +0100221 }
222}
223
Damien Georgee5f8a772014-04-21 13:33:15 +0100224void asm_thumb_align(asm_thumb_t* as, uint align) {
225 // TODO fill unused data with NOPs?
226 as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
227}
228
229void asm_thumb_data(asm_thumb_t* as, uint bytesize, uint val) {
230 byte *c = asm_thumb_get_cur_to_write_bytes(as, bytesize);
Damien George95977712014-05-10 18:07:08 +0100231 // only write to the buffer in the emit pass (otherwise we overflow dummy_data)
232 if (as->pass == ASM_THUMB_PASS_EMIT) {
233 // little endian
234 for (uint i = 0; i < bytesize; i++) {
235 *c++ = val;
236 val >>= 8;
237 }
Damien Georgee5f8a772014-04-21 13:33:15 +0100238 }
239}
240
Damien George851f15f2014-09-29 10:05:32 +0100241STATIC mp_uint_t get_label_dest(asm_thumb_t *as, uint label) {
Damien5bfb7592013-10-05 18:41:24 +0100242 assert(label < as->max_num_labels);
243 return as->label_offsets[label];
244}
245
Damien George90edf9e2014-04-18 16:56:54 +0100246void asm_thumb_op16(asm_thumb_t *as, uint op) {
247 byte *c = asm_thumb_get_cur_to_write_bytes(as, 2);
248 // little endian
249 c[0] = op;
250 c[1] = op >> 8;
251}
252
253void asm_thumb_op32(asm_thumb_t *as, uint op1, uint op2) {
254 byte *c = asm_thumb_get_cur_to_write_bytes(as, 4);
255 // little endian, op1 then op2
256 c[0] = op1;
257 c[1] = op1 >> 8;
258 c[2] = op2;
259 c[3] = op2 >> 8;
260}
261
Damien George87210872014-04-13 00:30:32 +0100262#define OP_FORMAT_2(op, rlo_dest, rlo_src, src_b) ((op) | ((src_b) << 6) | ((rlo_src) << 3) | (rlo_dest))
Damien826005c2013-10-05 23:17:28 +0100263
Damien George87210872014-04-13 00:30:32 +0100264void asm_thumb_format_2(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src, int src_b) {
Damien George0b610de2014-09-29 16:25:04 +0100265 assert(rlo_dest < ASM_THUMB_REG_R8);
266 assert(rlo_src < ASM_THUMB_REG_R8);
Damien George90edf9e2014-04-18 16:56:54 +0100267 asm_thumb_op16(as, OP_FORMAT_2(op, rlo_dest, rlo_src, src_b));
Damien George87210872014-04-13 00:30:32 +0100268}
269
270#define OP_FORMAT_3(op, rlo, i8) ((op) | ((rlo) << 8) | (i8))
271
272void asm_thumb_format_3(asm_thumb_t *as, uint op, uint rlo, int i8) {
Damien George0b610de2014-09-29 16:25:04 +0100273 assert(rlo < ASM_THUMB_REG_R8);
Damien George90edf9e2014-04-18 16:56:54 +0100274 asm_thumb_op16(as, OP_FORMAT_3(op, rlo, i8));
Damien George87210872014-04-13 00:30:32 +0100275}
276
277#define OP_FORMAT_4(op, rlo_dest, rlo_src) ((op) | ((rlo_src) << 3) | (rlo_dest))
278
279void asm_thumb_format_4(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src) {
Damien George0b610de2014-09-29 16:25:04 +0100280 assert(rlo_dest < ASM_THUMB_REG_R8);
281 assert(rlo_src < ASM_THUMB_REG_R8);
Damien George90edf9e2014-04-18 16:56:54 +0100282 asm_thumb_op16(as, OP_FORMAT_4(op, rlo_dest, rlo_src));
Damien George87210872014-04-13 00:30:32 +0100283}
284
285#define OP_FORMAT_9_10(op, rlo_dest, rlo_base, offset) ((op) | (((offset) << 6) & 0x07c0) | ((rlo_base) << 3) | (rlo_dest))
286
287void asm_thumb_format_9_10(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_base, uint offset) {
Damien George90edf9e2014-04-18 16:56:54 +0100288 asm_thumb_op16(as, OP_FORMAT_9_10(op, rlo_dest, rlo_base, offset));
Damien George87210872014-04-13 00:30:32 +0100289}
290
291void asm_thumb_mov_reg_reg(asm_thumb_t *as, uint reg_dest, uint reg_src) {
292 uint op_lo;
293 if (reg_src < 8) {
294 op_lo = reg_src << 3;
295 } else {
296 op_lo = 0x40 | ((reg_src - 8) << 3);
297 }
298 if (reg_dest < 8) {
299 op_lo |= reg_dest;
300 } else {
301 op_lo |= 0x80 | (reg_dest - 8);
302 }
303 // mov reg_dest, reg_src
Damien George90edf9e2014-04-18 16:56:54 +0100304 asm_thumb_op16(as, 0x4600 | op_lo);
Damien429d7192013-10-04 19:53:11 +0100305}
306
Damien826005c2013-10-05 23:17:28 +0100307#define OP_MOVW (0xf240)
308#define OP_MOVT (0xf2c0)
309
310// if loading lo half with movw, the i16 value will be zero extended into the r32 register!
Paul Sokolovsky520e2f52014-02-12 18:31:30 +0200311STATIC void asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src) {
Damien George0b610de2014-09-29 16:25:04 +0100312 assert(reg_dest < ASM_THUMB_REG_R15);
Damien826005c2013-10-05 23:17:28 +0100313 // mov[wt] reg_dest, #i16_src
Damien George90edf9e2014-04-18 16:56:54 +0100314 asm_thumb_op32(as, mov_op | ((i16_src >> 1) & 0x0400) | ((i16_src >> 12) & 0xf), ((i16_src << 4) & 0x7000) | (reg_dest << 8) | (i16_src & 0xff));
Damien429d7192013-10-04 19:53:11 +0100315}
316
Damien826005c2013-10-05 23:17:28 +0100317// the i16_src value will be zero extended into the r32 register!
318void asm_thumb_movw_reg_i16(asm_thumb_t *as, uint reg_dest, int i16_src) {
319 asm_thumb_mov_reg_i16(as, OP_MOVW, reg_dest, i16_src);
Damien429d7192013-10-04 19:53:11 +0100320}
321
Damien826005c2013-10-05 23:17:28 +0100322// the i16_src value will be zero extended into the r32 register!
323void asm_thumb_movt_reg_i16(asm_thumb_t *as, uint reg_dest, int i16_src) {
324 asm_thumb_mov_reg_i16(as, OP_MOVT, reg_dest, i16_src);
Damien429d7192013-10-04 19:53:11 +0100325}
326
Damien03d41242013-10-06 00:36:05 +0100327#define OP_B_N(byte_offset) (0xe000 | (((byte_offset) >> 1) & 0x07ff))
328
Damien George6f355fd2014-04-10 14:11:31 +0100329void asm_thumb_b_n(asm_thumb_t *as, uint label) {
Damien George851f15f2014-09-29 10:05:32 +0100330 mp_uint_t dest = get_label_dest(as, label);
331 mp_int_t rel = dest - as->code_offset;
Damien03d41242013-10-06 00:36:05 +0100332 rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
333 if (SIGNED_FIT12(rel)) {
Damien George90edf9e2014-04-18 16:56:54 +0100334 asm_thumb_op16(as, OP_B_N(rel));
Damien03d41242013-10-06 00:36:05 +0100335 } else {
336 printf("asm_thumb_b_n: branch does not fit in 12 bits\n");
337 }
338}
339
Damien1a6633a2013-11-03 13:58:19 +0000340#define OP_BCC_N(cond, byte_offset) (0xd000 | ((cond) << 8) | (((byte_offset) >> 1) & 0x00ff))
Damien826005c2013-10-05 23:17:28 +0100341
Damien George6f355fd2014-04-10 14:11:31 +0100342void asm_thumb_bcc_n(asm_thumb_t *as, int cond, uint label) {
Damien George851f15f2014-09-29 10:05:32 +0100343 mp_uint_t dest = get_label_dest(as, label);
344 mp_int_t rel = dest - as->code_offset;
Damien826005c2013-10-05 23:17:28 +0100345 rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
346 if (SIGNED_FIT9(rel)) {
Damien George90edf9e2014-04-18 16:56:54 +0100347 asm_thumb_op16(as, OP_BCC_N(cond, rel));
Damien826005c2013-10-05 23:17:28 +0100348 } else {
Damien1a6633a2013-11-03 13:58:19 +0000349 printf("asm_thumb_bcc_n: branch does not fit in 9 bits\n");
Damien826005c2013-10-05 23:17:28 +0100350 }
351}
352
Damien George40f3c022014-07-03 13:25:24 +0100353void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32) {
Damien826005c2013-10-05 23:17:28 +0100354 // movw, movt does it in 8 bytes
355 // ldr [pc, #], dw does it in 6 bytes, but we might not reach to end of code for dw
356
357 asm_thumb_mov_reg_i16(as, OP_MOVW, reg_dest, i32);
358 asm_thumb_mov_reg_i16(as, OP_MOVT, reg_dest, i32 >> 16);
359}
360
361void asm_thumb_mov_reg_i32_optimised(asm_thumb_t *as, uint reg_dest, int i32) {
362 if (reg_dest < 8 && UNSIGNED_FIT8(i32)) {
Damien George87210872014-04-13 00:30:32 +0100363 asm_thumb_mov_rlo_i8(as, reg_dest, i32);
Damien826005c2013-10-05 23:17:28 +0100364 } else if (UNSIGNED_FIT16(i32)) {
365 asm_thumb_mov_reg_i16(as, OP_MOVW, reg_dest, i32);
366 } else {
367 asm_thumb_mov_reg_i32(as, reg_dest, i32);
368 }
369}
370
Damien George36db6bc2014-05-07 17:24:22 +0100371// i32 is stored as a full word in the code, and aligned to machine-word boundary
372// TODO this is very inefficient, improve it!
373void asm_thumb_mov_reg_i32_aligned(asm_thumb_t *as, uint reg_dest, int i32) {
374 // align on machine-word + 2
375 if ((as->code_offset & 3) == 0) {
376 asm_thumb_op16(as, ASM_THUMB_OP_NOP);
377 }
Damien Georged1c37882015-02-15 00:45:28 +0000378 // jump over the i32 value (instruction prefetch adds 2 to PC)
379 asm_thumb_op16(as, OP_B_N(2));
Damien George36db6bc2014-05-07 17:24:22 +0100380 // store i32 on machine-word aligned boundary
381 asm_thumb_data(as, 4, i32);
382 // do the actual load of the i32 value
383 asm_thumb_mov_reg_i32_optimised(as, reg_dest, i32);
384}
385
Damien429d7192013-10-04 19:53:11 +0100386#define OP_STR_TO_SP_OFFSET(rlo_dest, word_offset) (0x9000 | ((rlo_dest) << 8) | ((word_offset) & 0x00ff))
387#define OP_LDR_FROM_SP_OFFSET(rlo_dest, word_offset) (0x9800 | ((rlo_dest) << 8) | ((word_offset) & 0x00ff))
388
389void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num, uint rlo_src) {
Damien George0b610de2014-09-29 16:25:04 +0100390 assert(rlo_src < ASM_THUMB_REG_R8);
Damien Georged509ac22014-05-07 23:27:45 +0100391 int word_offset = local_num;
Damien George36db6bc2014-05-07 17:24:22 +0100392 assert(as->pass < ASM_THUMB_PASS_EMIT || word_offset >= 0);
Damien George90edf9e2014-04-18 16:56:54 +0100393 asm_thumb_op16(as, OP_STR_TO_SP_OFFSET(rlo_src, word_offset));
Damien429d7192013-10-04 19:53:11 +0100394}
395
396void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num) {
Damien George0b610de2014-09-29 16:25:04 +0100397 assert(rlo_dest < ASM_THUMB_REG_R8);
Damien Georged509ac22014-05-07 23:27:45 +0100398 int word_offset = local_num;
Damien George36db6bc2014-05-07 17:24:22 +0100399 assert(as->pass < ASM_THUMB_PASS_EMIT || word_offset >= 0);
Damien George90edf9e2014-04-18 16:56:54 +0100400 asm_thumb_op16(as, OP_LDR_FROM_SP_OFFSET(rlo_dest, word_offset));
Damien429d7192013-10-04 19:53:11 +0100401}
402
Damien9b9e9962013-11-03 14:25:43 +0000403#define OP_ADD_REG_SP_OFFSET(rlo_dest, word_offset) (0xa800 | ((rlo_dest) << 8) | ((word_offset) & 0x00ff))
404
405void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num) {
Damien George0b610de2014-09-29 16:25:04 +0100406 assert(rlo_dest < ASM_THUMB_REG_R8);
Damien Georged509ac22014-05-07 23:27:45 +0100407 int word_offset = local_num;
Damien George36db6bc2014-05-07 17:24:22 +0100408 assert(as->pass < ASM_THUMB_PASS_EMIT || word_offset >= 0);
Damien George90edf9e2014-04-18 16:56:54 +0100409 asm_thumb_op16(as, OP_ADD_REG_SP_OFFSET(rlo_dest, word_offset));
Damien429d7192013-10-04 19:53:11 +0100410}
411
Damien429d7192013-10-04 19:53:11 +0100412// this could be wrong, because it should have a range of +/- 16MiB...
413#define OP_BW_HI(byte_offset) (0xf000 | (((byte_offset) >> 12) & 0x07ff))
414#define OP_BW_LO(byte_offset) (0xb800 | (((byte_offset) >> 1) & 0x07ff))
415
Damien George6f355fd2014-04-10 14:11:31 +0100416void asm_thumb_b_label(asm_thumb_t *as, uint label) {
Damien George851f15f2014-09-29 10:05:32 +0100417 mp_uint_t dest = get_label_dest(as, label);
418 mp_int_t rel = dest - as->code_offset;
Damien5bfb7592013-10-05 18:41:24 +0100419 rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
Damien George0b610de2014-09-29 16:25:04 +0100420 if (dest != -1 && rel <= -4) {
Damien5bfb7592013-10-05 18:41:24 +0100421 // is a backwards jump, so we know the size of the jump on the first pass
422 // calculate rel assuming 12 bit relative jump
423 if (SIGNED_FIT12(rel)) {
Damien George90edf9e2014-04-18 16:56:54 +0100424 asm_thumb_op16(as, OP_B_N(rel));
Damien429d7192013-10-04 19:53:11 +0100425 } else {
Damien5bfb7592013-10-05 18:41:24 +0100426 goto large_jump;
Damien429d7192013-10-04 19:53:11 +0100427 }
Damien5bfb7592013-10-05 18:41:24 +0100428 } else {
429 // is a forwards jump, so need to assume it's large
430 large_jump:
Damien George90edf9e2014-04-18 16:56:54 +0100431 asm_thumb_op32(as, OP_BW_HI(rel), OP_BW_LO(rel));
Damien429d7192013-10-04 19:53:11 +0100432 }
433}
434
Damien429d7192013-10-04 19:53:11 +0100435// all these bit arithmetics need coverage testing!
Damien1a6633a2013-11-03 13:58:19 +0000436#define OP_BCC_W_HI(cond, byte_offset) (0xf000 | ((cond) << 6) | (((byte_offset) >> 10) & 0x0400) | (((byte_offset) >> 14) & 0x003f))
437#define OP_BCC_W_LO(byte_offset) (0x8000 | ((byte_offset) & 0x2000) | (((byte_offset) >> 1) & 0x0fff))
Damien429d7192013-10-04 19:53:11 +0100438
Damien George6f355fd2014-04-10 14:11:31 +0100439void asm_thumb_bcc_label(asm_thumb_t *as, int cond, uint label) {
Damien George851f15f2014-09-29 10:05:32 +0100440 mp_uint_t dest = get_label_dest(as, label);
441 mp_int_t rel = dest - as->code_offset;
Damien5bfb7592013-10-05 18:41:24 +0100442 rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
Damien George0b610de2014-09-29 16:25:04 +0100443 if (dest != -1 && rel <= -4) {
Damien5bfb7592013-10-05 18:41:24 +0100444 // is a backwards jump, so we know the size of the jump on the first pass
Damien826005c2013-10-05 23:17:28 +0100445 // calculate rel assuming 9 bit relative jump
Damien5bfb7592013-10-05 18:41:24 +0100446 if (SIGNED_FIT9(rel)) {
Damien George90edf9e2014-04-18 16:56:54 +0100447 asm_thumb_op16(as, OP_BCC_N(cond, rel));
Damien429d7192013-10-04 19:53:11 +0100448 } else {
Damien5bfb7592013-10-05 18:41:24 +0100449 goto large_jump;
Damien429d7192013-10-04 19:53:11 +0100450 }
Damien5bfb7592013-10-05 18:41:24 +0100451 } else {
452 // is a forwards jump, so need to assume it's large
453 large_jump:
Damien George90edf9e2014-04-18 16:56:54 +0100454 asm_thumb_op32(as, OP_BCC_W_HI(cond, rel), OP_BCC_W_LO(rel));
Damien429d7192013-10-04 19:53:11 +0100455 }
456}
457
458#define OP_BLX(reg) (0x4780 | ((reg) << 3))
459#define OP_SVC(arg) (0xdf00 | (arg))
Damien429d7192013-10-04 19:53:11 +0100460
461void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp) {
462 /* TODO make this use less bytes
Damien George0b610de2014-09-29 16:25:04 +0100463 uint rlo_base = ASM_THUMB_REG_R3;
464 uint rlo_dest = ASM_THUMB_REG_R7;
Damien429d7192013-10-04 19:53:11 +0100465 uint word_offset = 4;
Damien George90edf9e2014-04-18 16:56:54 +0100466 asm_thumb_op16(as, 0x0000);
467 asm_thumb_op16(as, 0x6800 | (word_offset << 6) | (rlo_base << 3) | rlo_dest); // ldr rlo_dest, [rlo_base, #offset]
Damien George0b610de2014-09-29 16:25:04 +0100468 asm_thumb_op16(as, 0x4780 | (ASM_THUMB_REG_R9 << 3)); // blx reg
Damien429d7192013-10-04 19:53:11 +0100469 */
470
Damien George7fe21912014-08-16 22:31:57 +0100471 if (fun_id < 32) {
472 // load ptr to function from table, indexed by fun_id (must be in range 0-31); 4 bytes
Damien George0b610de2014-09-29 16:25:04 +0100473 asm_thumb_op16(as, OP_FORMAT_9_10(ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_WORD_TRANSFER, reg_temp, ASM_THUMB_REG_R7, fun_id));
Damien George90edf9e2014-04-18 16:56:54 +0100474 asm_thumb_op16(as, OP_BLX(reg_temp));
Damien429d7192013-10-04 19:53:11 +0100475 } else {
Damien George7fe21912014-08-16 22:31:57 +0100476 // load ptr to function into register using immediate; 6 bytes
477 asm_thumb_mov_reg_i32(as, reg_temp, (mp_uint_t)fun_ptr);
478 asm_thumb_op16(as, OP_BLX(reg_temp));
Damien429d7192013-10-04 19:53:11 +0100479 }
480}
Damien Georgee67ed5d2014-01-04 13:55:24 +0000481
482#endif // MICROPY_EMIT_THUMB || MICROPY_EMIT_INLINE_THUMB