Skip to content

Commit d970b04

Browse files
committed
[update] micropython/py folder
1 parent 32c1772 commit d970b04

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

104 files changed

+5227
-1683
lines changed

py/asmarm.c

+16-6
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,11 @@
4040

4141
void asm_arm_end_pass(asm_arm_t *as) {
4242
if (as->base.pass == MP_ASM_PASS_EMIT) {
43-
#ifdef __arm__
43+
#if defined(__linux__) && defined(__GNUC__)
44+
char *start = mp_asm_base_get_code(&as->base);
45+
char *end = start + mp_asm_base_get_code_size(&as->base);
46+
__builtin___clear_cache(start, end);
47+
#elif defined(__arm__)
4448
// flush I- and D-cache
4549
asm volatile(
4650
"0:"
@@ -197,18 +201,24 @@ void asm_arm_mov_reg_reg(asm_arm_t *as, uint reg_dest, uint reg_src) {
197201
emit_al(as, asm_arm_op_mov_reg(reg_dest, reg_src));
198202
}
199203

200-
void asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm) {
204+
size_t asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm) {
205+
// Insert immediate into code and jump over it
206+
emit_al(as, 0x59f0000 | (rd << 12)); // ldr rd, [pc]
207+
emit_al(as, 0xa000000); // b pc
208+
size_t loc = mp_asm_base_get_code_pos(&as->base);
209+
emit(as, imm);
210+
return loc;
211+
}
212+
213+
void asm_arm_mov_reg_i32_optimised(asm_arm_t *as, uint rd, int imm) {
201214
// TODO: There are more variants of immediate values
202215
if ((imm & 0xFF) == imm) {
203216
emit_al(as, asm_arm_op_mov_imm(rd, imm));
204217
} else if (imm < 0 && imm >= -256) {
205218
// mvn is "move not", not "move negative"
206219
emit_al(as, asm_arm_op_mvn_imm(rd, ~imm));
207220
} else {
208-
//Insert immediate into code and jump over it
209-
emit_al(as, 0x59f0000 | (rd << 12)); // ldr rd, [pc]
210-
emit_al(as, 0xa000000); // b pc
211-
emit(as, imm);
221+
asm_arm_mov_reg_i32(as, rd, imm);
212222
}
213223
}
214224

py/asmarm.h

+5-2
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,8 @@ void asm_arm_bkpt(asm_arm_t *as);
8181

8282
// mov
8383
void asm_arm_mov_reg_reg(asm_arm_t *as, uint reg_dest, uint reg_src);
84-
void asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm);
84+
size_t asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm);
85+
void asm_arm_mov_reg_i32_optimised(asm_arm_t *as, uint rd, int imm);
8586
void asm_arm_mov_local_reg(asm_arm_t *as, int local_num, uint rd);
8687
void asm_arm_mov_reg_local(asm_arm_t *as, uint rd, int local_num);
8788
void asm_arm_setcc_reg(asm_arm_t *as, uint rd, uint cond);
@@ -177,7 +178,9 @@ void asm_arm_bx_reg(asm_arm_t *as, uint reg_src);
177178
#define ASM_CALL_IND(as, idx) asm_arm_bl_ind(as, idx, ASM_ARM_REG_R3)
178179

179180
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_arm_mov_local_reg((as), (local_num), (reg_src))
180-
#define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_arm_mov_reg_i32((as), (reg_dest), (imm))
181+
#define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_arm_mov_reg_i32_optimised((as), (reg_dest), (imm))
182+
#define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_arm_mov_reg_i32((as), (reg_dest), (imm))
183+
#define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_arm_mov_reg_i32((as), (reg_dest), (imm))
181184
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_arm_mov_reg_local((as), (reg_dest), (local_num))
182185
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_arm_mov_reg_reg((as), (reg_dest), (reg_src))
183186
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_arm_mov_reg_local_addr((as), (reg_dest), (local_num))

py/asmbase.c

+2-2
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
#include "py/misc.h"
3232
#include "py/asmbase.h"
3333

34-
#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM
34+
#if MICROPY_EMIT_MACHINE_CODE
3535

3636
void mp_asm_base_init(mp_asm_base_t *as, size_t max_num_labels) {
3737
as->max_num_labels = max_num_labels;
@@ -99,4 +99,4 @@ void mp_asm_base_data(mp_asm_base_t* as, unsigned int bytesize, uintptr_t val) {
9999
}
100100
}
101101

102-
#endif // MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM
102+
#endif // MICROPY_EMIT_MACHINE_CODE

py/asmbase.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ static inline size_t mp_asm_base_get_code_size(mp_asm_base_t *as) {
6060

6161
static inline void *mp_asm_base_get_code(mp_asm_base_t *as) {
6262
#if defined(MP_PLAT_COMMIT_EXEC)
63-
return MP_PLAT_COMMIT_EXEC(as->code_base, as->code_size);
63+
return MP_PLAT_COMMIT_EXEC(as->code_base, as->code_size, NULL);
6464
#else
6565
return as->code_base;
6666
#endif

py/asmthumb.c

+25-2
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@
3333
// wrapper around everything in this file
3434
#if MICROPY_EMIT_THUMB || MICROPY_EMIT_INLINE_THUMB
3535

36+
#include "py/mpstate.h"
37+
#include "py/persistentcode.h"
3638
#include "py/mphal.h"
3739
#include "py/asmthumb.h"
3840

@@ -118,6 +120,21 @@ STATIC void asm_thumb_write_word32(asm_thumb_t *as, int w32) {
118120
void asm_thumb_entry(asm_thumb_t *as, int num_locals) {
119121
assert(num_locals >= 0);
120122

123+
// If this Thumb machine code is run from ARM state then add a prelude
124+
// to switch to Thumb state for the duration of the function.
125+
#if MICROPY_DYNAMIC_COMPILER || MICROPY_EMIT_ARM || (defined(__arm__) && !defined(__thumb2__))
126+
#if MICROPY_DYNAMIC_COMPILER
127+
if (mp_dynamic_compiler.native_arch == MP_NATIVE_ARCH_ARMV6)
128+
#endif
129+
{
130+
asm_thumb_op32(as, 0x4010, 0xe92d); // push {r4, lr}
131+
asm_thumb_op32(as, 0xe009, 0xe28f); // add lr, pc, 8 + 1
132+
asm_thumb_op32(as, 0xff3e, 0xe12f); // blx lr
133+
asm_thumb_op32(as, 0x4010, 0xe8bd); // pop {r4, lr}
134+
asm_thumb_op32(as, 0xff1e, 0xe12f); // bx lr
135+
}
136+
#endif
137+
121138
// work out what to push and how many extra spaces to reserve on stack
122139
// so that we have enough for all locals and it's aligned an 8-byte boundary
123140
// we push extra regs (r1, r2, r3) to help do the stack adjustment
@@ -225,10 +242,12 @@ void asm_thumb_mov_reg_reg(asm_thumb_t *as, uint reg_dest, uint reg_src) {
225242
}
226243

227244
// if loading lo half with movw, the i16 value will be zero extended into the r32 register!
228-
void asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src) {
245+
size_t asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src) {
229246
assert(reg_dest < ASM_THUMB_REG_R15);
247+
size_t loc = mp_asm_base_get_code_pos(&as->base);
230248
// mov[wt] reg_dest, #i16_src
231249
asm_thumb_op32(as, mov_op | ((i16_src >> 1) & 0x0400) | ((i16_src >> 12) & 0xf), ((i16_src << 4) & 0x7000) | (reg_dest << 8) | (i16_src & 0xff));
250+
return loc;
232251
}
233252

234253
#define OP_B_N(byte_offset) (0xe000 | (((byte_offset) >> 1) & 0x07ff))
@@ -271,12 +290,16 @@ bool asm_thumb_bl_label(asm_thumb_t *as, uint label) {
271290
return as->base.pass != MP_ASM_PASS_EMIT || SIGNED_FIT23(rel);
272291
}
273292

274-
void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32) {
293+
size_t asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32) {
275294
// movw, movt does it in 8 bytes
276295
// ldr [pc, #], dw does it in 6 bytes, but we might not reach to end of code for dw
277296

297+
size_t loc = mp_asm_base_get_code_pos(&as->base);
298+
278299
asm_thumb_mov_reg_i16(as, ASM_THUMB_OP_MOVW, reg_dest, i32);
279300
asm_thumb_mov_reg_i16(as, ASM_THUMB_OP_MOVT, reg_dest, i32 >> 16);
301+
302+
return loc;
280303
}
281304

282305
void asm_thumb_mov_reg_i32_optimised(asm_thumb_t *as, uint reg_dest, int i32) {

py/asmthumb.h

+4-2
Original file line numberDiff line numberDiff line change
@@ -241,14 +241,14 @@ static inline void asm_thumb_ldrh_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uin
241241
#define ASM_THUMB_OP_MOVT (0xf2c0)
242242

243243
void asm_thumb_mov_reg_reg(asm_thumb_t *as, uint reg_dest, uint reg_src);
244-
void asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src);
244+
size_t asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src);
245245

246246
// these return true if the destination is in range, false otherwise
247247
bool asm_thumb_b_n_label(asm_thumb_t *as, uint label);
248248
bool asm_thumb_bcc_nw_label(asm_thumb_t *as, int cond, uint label, bool wide);
249249
bool asm_thumb_bl_label(asm_thumb_t *as, uint label);
250250

251-
void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32_src); // convenience
251+
size_t asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32_src); // convenience
252252
void asm_thumb_mov_reg_i32_optimised(asm_thumb_t *as, uint reg_dest, int i32_src); // convenience
253253
void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num_dest, uint rlo_src); // convenience
254254
void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
@@ -315,6 +315,8 @@ void asm_thumb_bl_ind(asm_thumb_t *as, uint fun_id, uint reg_temp); // convenien
315315

316316
#define ASM_MOV_LOCAL_REG(as, local_num, reg) asm_thumb_mov_local_reg((as), (local_num), (reg))
317317
#define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_thumb_mov_reg_i32_optimised((as), (reg_dest), (imm))
318+
#define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_thumb_mov_reg_i16((as), ASM_THUMB_OP_MOVW, (reg_dest), (imm))
319+
#define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_thumb_mov_reg_i32((as), (reg_dest), (imm))
318320
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_thumb_mov_reg_local((as), (reg_dest), (local_num))
319321
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
320322
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_thumb_mov_reg_local_addr((as), (reg_dest), (local_num))

py/asmx64.c

+3-1
Original file line numberDiff line numberDiff line change
@@ -334,14 +334,16 @@ void asm_x64_mov_i8_to_r8(asm_x64_t *as, int src_i8, int dest_r64) {
334334
}
335335
*/
336336

337-
STATIC void asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64) {
337+
size_t asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64) {
338338
// cpu defaults to i32 to r64, with zero extension
339339
if (dest_r64 < 8) {
340340
asm_x64_write_byte_1(as, OPCODE_MOV_I64_TO_R64 | dest_r64);
341341
} else {
342342
asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_MOV_I64_TO_R64 | (dest_r64 & 7));
343343
}
344+
size_t loc = mp_asm_base_get_code_pos(&as->base);
344345
asm_x64_write_word32(as, src_i32);
346+
return loc;
345347
}
346348

347349
void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64) {

py/asmx64.h

+3
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ void asm_x64_nop(asm_x64_t* as);
8383
void asm_x64_push_r64(asm_x64_t* as, int src_r64);
8484
void asm_x64_pop_r64(asm_x64_t* as, int dest_r64);
8585
void asm_x64_mov_r64_r64(asm_x64_t* as, int dest_r64, int src_r64);
86+
size_t asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64);
8687
void asm_x64_mov_i64_to_r64(asm_x64_t* as, int64_t src_i64, int dest_r64);
8788
void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64);
8889
void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp);
@@ -181,6 +182,8 @@ void asm_x64_call_ind(asm_x64_t* as, size_t fun_id, int temp_r32);
181182

182183
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_x64_mov_r64_to_local((as), (reg_src), (local_num))
183184
#define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_x64_mov_i64_to_r64_optimised((as), (imm), (reg_dest))
185+
#define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_x64_mov_i32_to_r64((as), (imm), (reg_dest))
186+
#define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_x64_mov_i32_to_r64((as), (imm), (reg_dest))
184187
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_x64_mov_local_to_r64((as), (local_num), (reg_dest))
185188
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
186189
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_x64_mov_local_addr_to_r64((as), (local_num), (reg_dest))

py/asmx86.c

+3-1
Original file line numberDiff line numberDiff line change
@@ -236,9 +236,11 @@ void asm_x86_mov_i8_to_r8(asm_x86_t *as, int src_i8, int dest_r32) {
236236
}
237237
#endif
238238

239-
void asm_x86_mov_i32_to_r32(asm_x86_t *as, int32_t src_i32, int dest_r32) {
239+
size_t asm_x86_mov_i32_to_r32(asm_x86_t *as, int32_t src_i32, int dest_r32) {
240240
asm_x86_write_byte_1(as, OPCODE_MOV_I32_TO_R32 | dest_r32);
241+
size_t loc = mp_asm_base_get_code_pos(&as->base);
241242
asm_x86_write_word32(as, src_i32);
243+
return loc;
242244
}
243245

244246
void asm_x86_and_r32_r32(asm_x86_t *as, int dest_r32, int src_r32) {

py/asmx86.h

+3-1
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ static inline void asm_x86_end_pass(asm_x86_t *as) {
8383
}
8484

8585
void asm_x86_mov_r32_r32(asm_x86_t* as, int dest_r32, int src_r32);
86-
void asm_x86_mov_i32_to_r32(asm_x86_t *as, int32_t src_i32, int dest_r32);
86+
size_t asm_x86_mov_i32_to_r32(asm_x86_t *as, int32_t src_i32, int dest_r32);
8787
void asm_x86_mov_r8_to_mem8(asm_x86_t *as, int src_r32, int dest_r32, int dest_disp);
8888
void asm_x86_mov_r16_to_mem16(asm_x86_t *as, int src_r32, int dest_r32, int dest_disp);
8989
void asm_x86_mov_r32_to_mem32(asm_x86_t *as, int src_r32, int dest_r32, int dest_disp);
@@ -179,6 +179,8 @@ void asm_x86_call_ind(asm_x86_t* as, size_t fun_id, mp_uint_t n_args, int temp_r
179179

180180
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_x86_mov_r32_to_local((as), (reg_src), (local_num))
181181
#define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_x86_mov_i32_to_r32((as), (imm), (reg_dest))
182+
#define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_x86_mov_i32_to_r32((as), (imm), (reg_dest))
183+
#define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_x86_mov_i32_to_r32((as), (imm), (reg_dest))
182184
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_x86_mov_local_to_r32((as), (local_num), (reg_dest))
183185
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x86_mov_r32_r32((as), (reg_dest), (reg_src))
184186
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_x86_mov_local_addr_to_r32((as), (local_num), (reg_dest))

py/asmxtensa.c

+48-18
Original file line numberDiff line numberDiff line change
@@ -30,14 +30,13 @@
3030
#include "py/mpconfig.h"
3131

3232
// wrapper around everything in this file
33-
#if MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA
33+
#if MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA || MICROPY_EMIT_XTENSAWIN
3434

3535
#include "py/asmxtensa.h"
3636

3737
#define WORD_SIZE (4)
3838
#define SIGNED_FIT8(x) ((((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80))
3939
#define SIGNED_FIT12(x) ((((x) & 0xfffff800) == 0) || (((x) & 0xfffff800) == 0xfffff800))
40-
#define NUM_REGS_SAVED (5)
4140

4241
void asm_xtensa_end_pass(asm_xtensa_t *as) {
4342
as->num_const = as->cur_const;
@@ -69,7 +68,7 @@ void asm_xtensa_entry(asm_xtensa_t *as, int num_locals) {
6968
as->const_table = (uint32_t*)mp_asm_base_get_cur_to_write_bytes(&as->base, as->num_const * 4);
7069

7170
// adjust the stack-pointer to store a0, a12, a13, a14, a15 and locals, 16-byte aligned
72-
as->stack_adjust = (((NUM_REGS_SAVED + num_locals) * WORD_SIZE) + 15) & ~15;
71+
as->stack_adjust = (((ASM_XTENSA_NUM_REGS_SAVED + num_locals) * WORD_SIZE) + 15) & ~15;
7372
if (SIGNED_FIT8(-as->stack_adjust)) {
7473
asm_xtensa_op_addi(as, ASM_XTENSA_REG_A1, ASM_XTENSA_REG_A1, -as->stack_adjust);
7574
} else {
@@ -79,14 +78,14 @@ void asm_xtensa_entry(asm_xtensa_t *as, int num_locals) {
7978

8079
// save return value (a0) and callee-save registers (a12, a13, a14, a15)
8180
asm_xtensa_op_s32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0);
82-
for (int i = 1; i < NUM_REGS_SAVED; ++i) {
81+
for (int i = 1; i < ASM_XTENSA_NUM_REGS_SAVED; ++i) {
8382
asm_xtensa_op_s32i_n(as, ASM_XTENSA_REG_A11 + i, ASM_XTENSA_REG_A1, i);
8483
}
8584
}
8685

8786
void asm_xtensa_exit(asm_xtensa_t *as) {
8887
// restore registers
89-
for (int i = NUM_REGS_SAVED - 1; i >= 1; --i) {
88+
for (int i = ASM_XTENSA_NUM_REGS_SAVED - 1; i >= 1; --i) {
9089
asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A11 + i, ASM_XTENSA_REG_A1, i);
9190
}
9291
asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0);
@@ -102,6 +101,22 @@ void asm_xtensa_exit(asm_xtensa_t *as) {
102101
asm_xtensa_op_ret_n(as);
103102
}
104103

104+
void asm_xtensa_entry_win(asm_xtensa_t *as, int num_locals) {
105+
// jump over the constants
106+
asm_xtensa_op_j(as, as->num_const * WORD_SIZE + 4 - 4);
107+
mp_asm_base_get_cur_to_write_bytes(&as->base, 1); // padding/alignment byte
108+
as->const_table = (uint32_t*)mp_asm_base_get_cur_to_write_bytes(&as->base, as->num_const * 4);
109+
110+
as->stack_adjust = 32 + ((((ASM_XTENSA_NUM_REGS_SAVED_WIN + num_locals) * WORD_SIZE) + 15) & ~15);
111+
asm_xtensa_op_entry(as, ASM_XTENSA_REG_A1, as->stack_adjust);
112+
asm_xtensa_op_s32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0);
113+
}
114+
115+
void asm_xtensa_exit_win(asm_xtensa_t *as) {
116+
asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0);
117+
asm_xtensa_op_retw_n(as);
118+
}
119+
105120
STATIC uint32_t get_label_dest(asm_xtensa_t *as, uint label) {
106121
assert(label < as->base.max_num_labels);
107122
return as->base.label_offsets[label];
@@ -156,31 +171,37 @@ void asm_xtensa_setcc_reg_reg_reg(asm_xtensa_t *as, uint cond, uint reg_dest, ui
156171
asm_xtensa_op_movi_n(as, reg_dest, 0);
157172
}
158173

159-
void asm_xtensa_mov_reg_i32(asm_xtensa_t *as, uint reg_dest, uint32_t i32) {
174+
size_t asm_xtensa_mov_reg_i32(asm_xtensa_t *as, uint reg_dest, uint32_t i32) {
175+
// load the constant
176+
uint32_t const_table_offset = (uint8_t*)as->const_table - as->base.code_base;
177+
size_t loc = const_table_offset + as->cur_const * WORD_SIZE;
178+
asm_xtensa_op_l32r(as, reg_dest, as->base.code_offset, loc);
179+
// store the constant in the table
180+
if (as->const_table != NULL) {
181+
as->const_table[as->cur_const] = i32;
182+
}
183+
++as->cur_const;
184+
return loc;
185+
}
186+
187+
void asm_xtensa_mov_reg_i32_optimised(asm_xtensa_t *as, uint reg_dest, uint32_t i32) {
160188
if (SIGNED_FIT12(i32)) {
161189
asm_xtensa_op_movi(as, reg_dest, i32);
162190
} else {
163-
// load the constant
164-
uint32_t const_table_offset = (uint8_t*)as->const_table - as->base.code_base;
165-
asm_xtensa_op_l32r(as, reg_dest, as->base.code_offset, const_table_offset + as->cur_const * WORD_SIZE);
166-
// store the constant in the table
167-
if (as->const_table != NULL) {
168-
as->const_table[as->cur_const] = i32;
169-
}
170-
++as->cur_const;
191+
asm_xtensa_mov_reg_i32(as, reg_dest, i32);
171192
}
172193
}
173194

174195
void asm_xtensa_mov_local_reg(asm_xtensa_t *as, int local_num, uint reg_src) {
175-
asm_xtensa_op_s32i(as, reg_src, ASM_XTENSA_REG_A1, NUM_REGS_SAVED + local_num);
196+
asm_xtensa_op_s32i(as, reg_src, ASM_XTENSA_REG_A1, local_num);
176197
}
177198

178199
void asm_xtensa_mov_reg_local(asm_xtensa_t *as, uint reg_dest, int local_num) {
179-
asm_xtensa_op_l32i(as, reg_dest, ASM_XTENSA_REG_A1, NUM_REGS_SAVED + local_num);
200+
asm_xtensa_op_l32i(as, reg_dest, ASM_XTENSA_REG_A1, local_num);
180201
}
181202

182203
void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_num) {
183-
uint off = (NUM_REGS_SAVED + local_num) * WORD_SIZE;
204+
uint off = local_num * WORD_SIZE;
184205
if (SIGNED_FIT8(off)) {
185206
asm_xtensa_op_addi(as, reg_dest, ASM_XTENSA_REG_A1, off);
186207
} else {
@@ -220,4 +241,13 @@ void asm_xtensa_call_ind(asm_xtensa_t *as, uint idx) {
220241
asm_xtensa_op_callx0(as, ASM_XTENSA_REG_A0);
221242
}
222243

223-
#endif // MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA
244+
void asm_xtensa_call_ind_win(asm_xtensa_t *as, uint idx) {
245+
if (idx < 16) {
246+
asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A8, ASM_XTENSA_REG_FUN_TABLE_WIN, idx);
247+
} else {
248+
asm_xtensa_op_l32i(as, ASM_XTENSA_REG_A8, ASM_XTENSA_REG_FUN_TABLE_WIN, idx);
249+
}
250+
asm_xtensa_op_callx8(as, ASM_XTENSA_REG_A8);
251+
}
252+
253+
#endif // MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA || MICROPY_EMIT_XTENSAWIN

0 commit comments

Comments
 (0)