1 #define EMIT(x) *tcache_ptr++ = x
3 #define A_R14M (1 << 14)
7 /* addressing mode 1 */
13 #define A_AM1_IMM(ror2,imm8) (((ror2)<<8) | (imm8) | 0x02000000)
14 #define A_AM1_REG_XIMM(shift_imm,shift_op,rm) (((shift_imm)<<7) | ((shift_op)<<5) | (rm))
16 /* data processing op */
20 #define EOP_C_DOP_X(cond,op,s,rn,rd,shifter_op) \
21 EMIT(((cond)<<28) | ((op)<< 21) | ((s)<<20) | ((rn)<<16) | ((rd)<<12) | (shifter_op))
23 #define EOP_C_DOP_IMM(cond,op,s,rn,rd,ror2,imm8) EOP_C_DOP_X(cond,op,s,rn,rd,A_AM1_IMM(ror2,imm8))
24 #define EOP_C_DOP_REG(cond,op,s,rn,rd,shift_imm,shift_op,rm) EOP_C_DOP_X(cond,op,s,rn,rd,A_AM1_REG_XIMM(shift_imm,shift_op,rm))
26 #define EOP_MOV_IMM(s, rd,ror2,imm8) EOP_C_DOP_IMM(A_COND_AL,A_OP_MOV,s, 0,rd,ror2,imm8)
27 #define EOP_ORR_IMM(s,rn,rd,ror2,imm8) EOP_C_DOP_IMM(A_COND_AL,A_OP_ORR,s,rn,rd,ror2,imm8)
29 #define EOP_MOV_REG(s, rd,shift_imm,shift_op,rm) EOP_C_DOP_REG(A_COND_AL,A_OP_MOV,s, 0,rd,shift_imm,shift_op,rm)
31 #define EOP_MOV_REG_SIMPLE(rd,rm) EOP_MOV_REG(0,rd,0,A_AM1_LSL,rm)
34 #define EOP_C_XXR_IMM(cond,u,b,l,rn,rd,offset_12) \
35 EMIT(((cond)<<28) | 0x05000000 | ((u)<<23) | ((b)<<22) | ((l)<<20) | ((rn)<<16) | ((rd)<<12) | (offset_12))
37 #define EOP_LDR_IMM( rd,rn,offset_12) EOP_C_XXR_IMM(A_COND_AL,1,0,1,rn,rd,offset_12)
38 #define EOP_LDR_NEGIMM(rd,rn,offset_12) EOP_C_XXR_IMM(A_COND_AL,0,0,1,rn,rd,offset_12)
39 #define EOP_LDR_SIMPLE(rd,rn) EOP_C_XXR_IMM(A_COND_AL,1,0,1,rn,rd,0)
40 #define EOP_STR_SIMPLE(rd,rn) EOP_C_XXR_IMM(A_COND_AL,1,0,0,rn,rd,0)
43 #define EOP_XXM(cond,p,u,s,w,l,rn,list) \
44 EMIT(((cond)<<28) | (1<<27) | ((p)<<24) | ((u)<<23) | ((s)<<22) | ((w)<<21) | ((l)<<20) | ((rn)<<16) | (list))
46 #define EOP_STMFD_ST(list) EOP_XXM(A_COND_AL,1,0,0,1,0,13,list)
47 #define EOP_LDMFD_ST(list) EOP_XXM(A_COND_AL,0,1,0,1,1,13,list)
50 #define EOP_C_BX(cond,rm) \
51 EMIT(((cond)<<28) | 0x012fff10 | (rm))
53 #define EOP_BX(rm) EOP_C_BX(A_COND_AL,rm)
56 static void emit_mov_const(int d, unsigned int val)
59 if (val & 0xff000000) {
60 EOP_MOV_IMM(0, d, 8/2, (val>>24)&0xff);
63 if (val & 0x00ff0000) {
64 EOP_C_DOP_IMM(A_COND_AL,need_or ? A_OP_ORR : A_OP_MOV, 0, need_or ? d : 0, d, 16/2, (val>>16)&0xff);
67 if (val & 0x0000ff00) {
68 EOP_C_DOP_IMM(A_COND_AL,need_or ? A_OP_ORR : A_OP_MOV, 0, need_or ? d : 0, d, 24/2, (val>>8)&0xff);
71 if ((val &0x000000ff) || !need_or)
72 EOP_C_DOP_IMM(A_COND_AL,need_or ? A_OP_ORR : A_OP_MOV, 0, need_or ? d : 0, d, 0, val&0xff);
75 static void check_offset_12(unsigned int val)
77 if (!(val & ~0xfff)) return;
78 printf("offset_12 overflow %04x\n", val);
82 static void emit_block_prologue(void)
85 EOP_STMFD_ST(A_R14M); // stmfd r13!, {r14}
88 static void emit_block_epilogue(unsigned int *block_start, int icount)
90 int back = (tcache_ptr - block_start) + 2;
91 back += 3; // g_cycles
92 check_offset_12(back<<2);
94 EOP_LDR_NEGIMM(2,15,back<<2); // ldr r2,[pc,#back]
95 emit_mov_const(3, icount);
96 EOP_STR_SIMPLE(3,2); // str r3,[r2]
98 EOP_LDMFD_ST(A_R14M); // ldmfd r13!, {r14}
102 static void emit_pc_inc(unsigned int *block_start, int pc)
104 int back = (tcache_ptr - block_start) + 2;
105 back += 2; // rPC ptr
106 check_offset_12(back<<2);
108 EOP_LDR_NEGIMM(2,15,back<<2); // ldr r2,[pc,#back]
109 emit_mov_const(3, pc<<16);
110 EOP_STR_SIMPLE(3,2); // str r3,[r2]
113 static void emit_call(unsigned int *block_start, unsigned int op1)
115 int back = (tcache_ptr - block_start) + 2;
116 back += 1; // func table
117 check_offset_12(back<<2);
119 EOP_LDR_NEGIMM(2,15,back<<2); // ldr r2,[pc,#back]
120 EOP_MOV_REG_SIMPLE(14,15); // mov lr,pc
121 EOP_LDR_IMM(15,2,op1<<2); // ldr pc,[r2,#op1]
124 static void handle_caches()
127 extern void flush_inval_caches(const void *start_addr, const void *end_addr);
128 flush_inval_caches(tcache, tcache_ptr);