3 * Copyright (C) 2006 Exophase <exophase@gmail.com>
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2 of
8 * the License, or (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 u32 mips_update_gba(u32 pc);
25 // Although these are defined as a function, don't call them as
26 // such (jump to it instead)
27 void mips_indirect_branch_arm(u32 address);
28 void mips_indirect_branch_thumb(u32 address);
29 void mips_indirect_branch_dual(u32 address);
31 u32 execute_read_cpsr();
32 u32 execute_read_spsr();
33 void execute_swi(u32 pc);
35 u32 execute_spsr_restore(u32 address);
36 void execute_store_cpsr(u32 new_cpsr, u32 store_mask);
37 void execute_store_spsr(u32 new_spsr, u32 store_mask);
39 u32 execute_spsr_restore_body(u32 address);
40 u32 execute_store_cpsr_body(u32 _cpsr, u32 store_mask, u32 address);
42 u32 execute_lsl_flags_reg(u32 value, u32 shift);
43 u32 execute_lsr_flags_reg(u32 value, u32 shift);
44 u32 execute_asr_flags_reg(u32 value, u32 shift);
45 u32 execute_ror_flags_reg(u32 value, u32 shift);
47 void execute_aligned_store32(u32 address, u32 value);
48 u32 execute_aligned_load32(u32 address);
50 void step_debug_mips(u32 pc);
92 mips_special_sll = 0x00,
93 mips_special_srl = 0x02,
94 mips_special_sra = 0x03,
95 mips_special_sllv = 0x04,
96 mips_special_srlv = 0x06,
97 mips_special_srav = 0x07,
98 mips_special_jr = 0x08,
99 mips_special_jalr = 0x09,
100 mips_special_movz = 0x0A,
101 mips_special_movn = 0x0B,
102 mips_special_mfhi = 0x10,
103 mips_special_mthi = 0x11,
104 mips_special_mflo = 0x12,
105 mips_special_mtlo = 0x13,
106 mips_special_mult = 0x18,
107 mips_special_multu = 0x19,
108 mips_special_div = 0x1A,
109 mips_special_divu = 0x1B,
110 mips_special_madd = 0x1C,
111 mips_special_maddu = 0x1D,
112 mips_special_add = 0x20,
113 mips_special_addu = 0x21,
114 mips_special_sub = 0x22,
115 mips_special_subu = 0x23,
116 mips_special_and = 0x24,
117 mips_special_or = 0x25,
118 mips_special_xor = 0x26,
119 mips_special_nor = 0x27,
120 mips_special_slt = 0x2A,
121 mips_special_sltu = 0x2B
122 } mips_function_special;
126 mips_special3_ext = 0x00,
127 mips_special3_ins = 0x04,
128 mips_special3_bshfl = 0x20
129 } mips_function_special3;
133 mips_regimm_bltz = 0x00,
134 mips_regimm_bltzal = 0x10
135 } mips_function_regimm;
139 mips_opcode_special = 0x00,
140 mips_opcode_regimm = 0x01,
141 mips_opcode_j = 0x02,
142 mips_opcode_jal = 0x03,
143 mips_opcode_beq = 0x04,
144 mips_opcode_bne = 0x05,
145 mips_opcode_blez = 0x06,
146 mips_opcode_bgtz = 0x07,
147 mips_opcode_addi = 0x08,
148 mips_opcode_addiu = 0x09,
149 mips_opcode_slti = 0x0A,
150 mips_opcode_sltiu = 0x0B,
151 mips_opcode_andi = 0x0C,
152 mips_opcode_ori = 0x0D,
153 mips_opcode_xori = 0x0E,
154 mips_opcode_lui = 0x0F,
155 mips_opcode_llo = 0x18,
156 mips_opcode_lhi = 0x19,
157 mips_opcode_trap = 0x1A,
158 mips_opcode_special2 = 0x1C,
159 mips_opcode_special3 = 0x1F,
160 mips_opcode_lb = 0x20,
161 mips_opcode_lh = 0x21,
162 mips_opcode_lw = 0x23,
163 mips_opcode_lbu = 0x24,
164 mips_opcode_lhu = 0x25,
165 mips_opcode_sb = 0x28,
166 mips_opcode_sh = 0x29,
167 mips_opcode_sw = 0x2B,
170 #define mips_emit_reg(opcode, rs, rt, rd, shift, function) \
171 *((u32 *)translation_ptr) = (mips_opcode_##opcode << 26) | \
172 (rs << 21) | (rt << 16) | (rd << 11) | (shift << 6) | function; \
173 translation_ptr += 4 \
175 #define mips_emit_special(function, rs, rt, rd, shift) \
176 *((u32 *)translation_ptr) = (mips_opcode_special << 26) | \
177 (rs << 21) | (rt << 16) | (rd << 11) | (shift << 6) | \
178 mips_special_##function; \
179 translation_ptr += 4 \
181 #define mips_emit_special3(function, rs, rt, imm_a, imm_b) \
182 *((u32 *)translation_ptr) = (mips_opcode_special3 << 26) | \
183 (rs << 21) | (rt << 16) | (imm_a << 11) | (imm_b << 6) | \
184 mips_special3_##function; \
185 translation_ptr += 4 \
187 #define mips_emit_imm(opcode, rs, rt, immediate) \
188 *((u32 *)translation_ptr) = (mips_opcode_##opcode << 26) | \
189 (rs << 21) | (rt << 16) | (immediate & 0xFFFF); \
190 translation_ptr += 4 \
192 #define mips_emit_regimm(function, rs, immediate) \
193 *((u32 *)translation_ptr) = (mips_opcode_regimm << 26) | \
194 (rs << 21) | (mips_regimm_##function << 16) | (immediate & 0xFFFF); \
195 translation_ptr += 4 \
197 #define mips_emit_jump(opcode, offset) \
198 *((u32 *)translation_ptr) = (mips_opcode_##opcode << 26) | \
199 (offset & 0x3FFFFFF); \
200 translation_ptr += 4 \
202 #define mips_relative_offset(source, offset) \
203 (((u32)offset - ((u32)source + 4)) / 4) \
205 #define mips_absolute_offset(offset) \
208 #define mips_emit_addu(rd, rs, rt) \
209 mips_emit_special(addu, rs, rt, rd, 0) \
211 #define mips_emit_subu(rd, rs, rt) \
212 mips_emit_special(subu, rs, rt, rd, 0) \
214 #define mips_emit_xor(rd, rs, rt) \
215 mips_emit_special(xor, rs, rt, rd, 0) \
217 #define mips_emit_add(rd, rs, rt) \
218 mips_emit_special(and, rs, rt, rd, 0) \
220 #define mips_emit_sub(rd, rs, rt) \
221 mips_emit_special(sub, rs, rt, rd, 0) \
223 #define mips_emit_and(rd, rs, rt) \
224 mips_emit_special(and, rs, rt, rd, 0) \
226 #define mips_emit_or(rd, rs, rt) \
227 mips_emit_special(or, rs, rt, rd, 0) \
229 #define mips_emit_nor(rd, rs, rt) \
230 mips_emit_special(nor, rs, rt, rd, 0) \
232 #define mips_emit_slt(rd, rs, rt) \
233 mips_emit_special(slt, rs, rt, rd, 0) \
235 #define mips_emit_sltu(rd, rs, rt) \
236 mips_emit_special(sltu, rs, rt, rd, 0) \
238 #define mips_emit_sllv(rd, rt, rs) \
239 mips_emit_special(sllv, rs, rt, rd, 0) \
241 #define mips_emit_srlv(rd, rt, rs) \
242 mips_emit_special(srlv, rs, rt, rd, 0) \
244 #define mips_emit_srav(rd, rt, rs) \
245 mips_emit_special(srav, rs, rt, rd, 0) \
247 #define mips_emit_rotrv(rd, rt, rs) \
248 mips_emit_special(srlv, rs, rt, rd, 1) \
250 #define mips_emit_sll(rd, rt, shift) \
251 mips_emit_special(sll, 0, rt, rd, shift) \
253 #define mips_emit_srl(rd, rt, shift) \
254 mips_emit_special(srl, 0, rt, rd, shift) \
256 #define mips_emit_sra(rd, rt, shift) \
257 mips_emit_special(sra, 0, rt, rd, shift) \
259 #define mips_emit_rotr(rd, rt, shift) \
260 mips_emit_special(srl, 1, rt, rd, shift) \
262 #define mips_emit_mfhi(rd) \
263 mips_emit_special(mfhi, 0, 0, rd, 0) \
265 #define mips_emit_mflo(rd) \
266 mips_emit_special(mflo, 0, 0, rd, 0) \
268 #define mips_emit_mthi(rs) \
269 mips_emit_special(mthi, rs, 0, 0, 0) \
271 #define mips_emit_mtlo(rs) \
272 mips_emit_special(mtlo, rs, 0, 0, 0) \
274 #define mips_emit_mult(rs, rt) \
275 mips_emit_special(mult, rs, rt, 0, 0) \
277 #define mips_emit_multu(rs, rt) \
278 mips_emit_special(multu, rs, rt, 0, 0) \
280 #define mips_emit_div(rs, rt) \
281 mips_emit_special(div, rs, rt, 0, 0) \
283 #define mips_emit_divu(rs, rt) \
284 mips_emit_special(divu, rs, rt, 0, 0) \
286 #define mips_emit_madd(rs, rt) \
287 mips_emit_special(madd, rs, rt, 0, 0) \
289 #define mips_emit_maddu(rs, rt) \
290 mips_emit_special(maddu, rs, rt, 0, 0) \
292 #define mips_emit_movn(rd, rs, rt) \
293 mips_emit_special(movn, rs, rt, rd, 0) \
295 #define mips_emit_movz(rd, rs, rt) \
296 mips_emit_special(movz, rs, rt, rd, 0) \
298 #define mips_emit_lb(rt, rs, offset) \
299 mips_emit_imm(lb, rs, rt, offset) \
301 #define mips_emit_lbu(rt, rs, offset) \
302 mips_emit_imm(lbu, rs, rt, offset) \
304 #define mips_emit_lh(rt, rs, offset) \
305 mips_emit_imm(lh, rs, rt, offset) \
307 #define mips_emit_lhu(rt, rs, offset) \
308 mips_emit_imm(lhu, rs, rt, offset) \
310 #define mips_emit_lw(rt, rs, offset) \
311 mips_emit_imm(lw, rs, rt, offset) \
313 #define mips_emit_sb(rt, rs, offset) \
314 mips_emit_imm(sb, rs, rt, offset) \
316 #define mips_emit_sh(rt, rs, offset) \
317 mips_emit_imm(sh, rs, rt, offset) \
319 #define mips_emit_sw(rt, rs, offset) \
320 mips_emit_imm(sw, rs, rt, offset) \
322 #define mips_emit_lui(rt, imm) \
323 mips_emit_imm(lui, 0, rt, imm) \
325 #define mips_emit_addiu(rt, rs, imm) \
326 mips_emit_imm(addiu, rs, rt, imm) \
328 #define mips_emit_xori(rt, rs, imm) \
329 mips_emit_imm(xori, rs, rt, imm) \
331 #define mips_emit_ori(rt, rs, imm) \
332 mips_emit_imm(ori, rs, rt, imm) \
334 #define mips_emit_andi(rt, rs, imm) \
335 mips_emit_imm(andi, rs, rt, imm) \
337 #define mips_emit_slti(rt, rs, imm) \
338 mips_emit_imm(slti, rs, rt, imm) \
340 #define mips_emit_sltiu(rt, rs, imm) \
341 mips_emit_imm(sltiu, rs, rt, imm) \
343 #define mips_emit_ext(rt, rs, pos, size) \
344 mips_emit_special3(ext, rs, rt, (size - 1), pos) \
346 #define mips_emit_ins(rt, rs, pos, size) \
347 mips_emit_special3(ins, rs, rt, (pos + size - 1), pos) \
349 // Breaks down if the backpatch offset is greater than 16bits, take care
350 // when using (should be okay if limited to conditional instructions)
352 #define mips_emit_b_filler(type, rs, rt, writeback_location) \
353 (writeback_location) = translation_ptr; \
354 mips_emit_imm(type, rs, rt, 0) \
356 // The backpatch code for this has to be handled differently than the above
358 #define mips_emit_j_filler(writeback_location) \
359 (writeback_location) = translation_ptr; \
360 mips_emit_jump(j, 0) \
362 #define mips_emit_b(type, rs, rt, offset) \
363 mips_emit_imm(type, rs, rt, offset) \
365 #define mips_emit_j(offset) \
366 mips_emit_jump(j, offset) \
368 #define mips_emit_jal(offset) \
369 mips_emit_jump(jal, offset) \
371 #define mips_emit_jr(rs) \
372 mips_emit_special(jr, rs, 0, 0, 0) \
374 #define mips_emit_bltzal(rs, offset) \
375 mips_emit_regimm(bltzal, rs, offset) \
377 #define mips_emit_nop() \
378 mips_emit_sll(reg_zero, reg_zero, 0) \
380 #define reg_base mips_reg_s0
381 #define reg_cycles mips_reg_s1
382 #define reg_a0 mips_reg_a0
383 #define reg_a1 mips_reg_a1
384 #define reg_a2 mips_reg_a2
385 #define reg_rv mips_reg_v0
386 #define reg_pc mips_reg_s3
387 #define reg_temp mips_reg_at
388 #define reg_zero mips_reg_zero
390 #define reg_n_cache mips_reg_s4
391 #define reg_z_cache mips_reg_s5
392 #define reg_c_cache mips_reg_s6
393 #define reg_v_cache mips_reg_s7
395 #define reg_r0 mips_reg_v1
396 #define reg_r1 mips_reg_a3
397 #define reg_r2 mips_reg_t0
398 #define reg_r3 mips_reg_t1
399 #define reg_r4 mips_reg_t2
400 #define reg_r5 mips_reg_t3
401 #define reg_r6 mips_reg_t4
402 #define reg_r7 mips_reg_t5
403 #define reg_r8 mips_reg_t6
404 #define reg_r9 mips_reg_t7
405 #define reg_r10 mips_reg_s2
406 #define reg_r11 mips_reg_t8
407 #define reg_r12 mips_reg_t9
408 #define reg_r13 mips_reg_gp
409 #define reg_r14 mips_reg_fp
411 // Writing to r15 goes straight to a0, to be chained with other ops
413 u32 arm_to_mips_reg[] =
436 #define arm_reg_a0 15
437 #define arm_reg_a1 16
438 #define arm_reg_a2 17
439 #define arm_reg_temp 18
441 #define generate_load_reg(ireg, reg_index) \
442 mips_emit_addu(ireg, arm_to_mips_reg[reg_index], reg_zero) \
444 #define generate_load_imm(ireg, imm) \
445 if(((s32)imm >= -32768) && ((s32)imm <= 32767)) \
447 mips_emit_addiu(ireg, reg_zero, imm); \
451 if(((u32)imm >> 16) == 0x0000) \
453 mips_emit_ori(ireg, reg_zero, imm); \
457 mips_emit_lui(ireg, imm >> 16); \
459 if(((u32)imm & 0x0000FFFF) != 0x00000000) \
461 mips_emit_ori(ireg, ireg, imm & 0xFFFF); \
466 #define generate_load_pc(ireg, new_pc) \
468 s32 pc_delta = new_pc - stored_pc; \
469 if((pc_delta >= -32768) && (pc_delta <= 32767)) \
471 mips_emit_addiu(ireg, reg_pc, pc_delta); \
475 generate_load_imm(ireg, new_pc); \
479 #define generate_store_reg(ireg, reg_index) \
480 mips_emit_addu(arm_to_mips_reg[reg_index], ireg, reg_zero) \
482 #define generate_shift_left(ireg, imm) \
483 mips_emit_sll(ireg, ireg, imm) \
485 #define generate_shift_right(ireg, imm) \
486 mips_emit_srl(ireg, ireg, imm) \
488 #define generate_shift_right_arithmetic(ireg, imm) \
489 mips_emit_sra(ireg, ireg, imm) \
491 #define generate_rotate_right(ireg, imm) \
492 mips_emit_rotr(ireg, ireg, imm) \
494 #define generate_add(ireg_dest, ireg_src) \
495 mips_emit_addu(ireg_dest, ireg_dest, ireg_src) \
497 #define generate_sub(ireg_dest, ireg_src) \
498 mips_emit_subu(ireg_dest, ireg_dest, ireg_src) \
500 #define generate_or(ireg_dest, ireg_src) \
501 mips_emit_or(ireg_dest, ireg_dest, ireg_src) \
503 #define generate_xor(ireg_dest, ireg_src) \
504 mips_emit_xor(ireg_dest, ireg_dest, ireg_src) \
506 #define generate_alu_imm(imm_type, reg_type, ireg_dest, ireg_src, imm) \
507 if(((s32)imm >= -32768) && ((s32)imm <= 32767)) \
509 mips_emit_##imm_type(ireg_dest, ireg_src, imm); \
513 generate_load_imm(reg_temp, imm); \
514 mips_emit_##reg_type(ireg_dest, ireg_src, reg_temp); \
517 #define generate_alu_immu(imm_type, reg_type, ireg_dest, ireg_src, imm) \
518 if(((u32)imm >= 0) && ((u32)imm <= 65535)) \
520 mips_emit_##imm_type(ireg_dest, ireg_src, imm); \
524 generate_load_imm(reg_temp, imm); \
525 mips_emit_##reg_type(ireg_dest, ireg_src, reg_temp); \
528 #define generate_add_imm(ireg, imm) \
529 generate_alu_imm(addiu, add, ireg, ireg, imm) \
531 #define generate_sub_imm(ireg, imm) \
532 generate_alu_imm(addiu, add, ireg, ireg, -imm) \
534 #define generate_xor_imm(ireg, imm) \
535 generate_alu_immu(xori, xor, ireg, ireg, imm) \
537 #define generate_add_reg_reg_imm(ireg_dest, ireg_src, imm) \
538 generate_alu_imm(addiu, add, ireg_dest, ireg_src, imm) \
540 #define generate_and_imm(ireg, imm) \
541 generate_alu_immu(andi, and, ireg, ireg, imm) \
543 #define generate_mov(ireg_dest, ireg_src) \
544 mips_emit_addu(ireg_dest, ireg_src, reg_zero) \
546 #define generate_multiply_s64() \
547 mips_emit_mult(arm_to_mips_reg[rm], arm_to_mips_reg[rs]) \
549 #define generate_multiply_u64() \
550 mips_emit_multu(arm_to_mips_reg[rm], arm_to_mips_reg[rs]) \
552 #define generate_multiply_s64_add() \
553 mips_emit_madd(arm_to_mips_reg[rm], arm_to_mips_reg[rs]) \
555 #define generate_multiply_u64_add() \
556 mips_emit_maddu(arm_to_mips_reg[rm], arm_to_mips_reg[rs]) \
558 #define generate_function_call(function_location) \
559 mips_emit_jal(mips_absolute_offset(function_location)); \
562 #define generate_function_call_swap_delay(function_location) \
564 u32 delay_instruction = address32(translation_ptr, -4); \
565 translation_ptr -= 4; \
566 mips_emit_jal(mips_absolute_offset(function_location)); \
567 address32(translation_ptr, 0) = delay_instruction; \
568 translation_ptr += 4; \
571 #define generate_swap_delay() \
573 u32 delay_instruction = address32(translation_ptr, -8); \
574 u32 branch_instruction = address32(translation_ptr, -4); \
575 branch_instruction = (branch_instruction & 0xFFFF0000) | \
576 (((branch_instruction & 0x0000FFFF) + 1) & 0x0000FFFF); \
577 address32(translation_ptr, -8) = branch_instruction; \
578 address32(translation_ptr, -4) = delay_instruction; \
581 #define generate_cycle_update() \
582 if(cycle_count != 0) \
584 mips_emit_addiu(reg_cycles, reg_cycles, -cycle_count); \
588 #define generate_cycle_update_force() \
589 mips_emit_addiu(reg_cycles, reg_cycles, -cycle_count); \
592 #define generate_branch_patch_conditional(dest, offset) \
593 *((u16 *)(dest)) = mips_relative_offset(dest, offset) \
595 #define generate_branch_patch_unconditional(dest, offset) \
596 *((u32 *)(dest)) = (mips_opcode_j << 26) | \
597 ((mips_absolute_offset(offset)) & 0x3FFFFFF) \
599 #define generate_branch_no_cycle_update(writeback_location, new_pc) \
600 if(pc == idle_loop_target_pc) \
602 generate_load_pc(reg_a0, new_pc); \
603 generate_function_call_swap_delay(mips_update_gba); \
604 mips_emit_j_filler(writeback_location); \
609 generate_load_pc(reg_a0, new_pc); \
610 mips_emit_bltzal(reg_cycles, \
611 mips_relative_offset(translation_ptr, update_trampoline)); \
612 generate_swap_delay(); \
613 mips_emit_j_filler(writeback_location); \
617 #define generate_branch_cycle_update(writeback_location, new_pc) \
618 generate_cycle_update(); \
619 generate_branch_no_cycle_update(writeback_location, new_pc) \
621 #define generate_conditional_branch(ireg_a, ireg_b, type, writeback_location) \
622 generate_branch_filler_##type(ireg_a, ireg_b, writeback_location) \
624 // a0 holds the destination
626 #define generate_indirect_branch_cycle_update(type) \
627 mips_emit_j(mips_absolute_offset(mips_indirect_branch_##type)); \
628 generate_cycle_update_force() \
630 #define generate_indirect_branch_no_cycle_update(type) \
631 mips_emit_j(mips_absolute_offset(mips_indirect_branch_##type)); \
634 #define generate_block_prologue() \
635 update_trampoline = translation_ptr; \
639 "cache 8, 0(%0)" : : "r"(translation_ptr) \
642 mips_emit_j(mips_absolute_offset(mips_update_gba)); \
644 generate_load_imm(reg_pc, stored_pc) \
646 #define translate_invalidate_dcache() \
647 sceKernelDcacheWritebackAll() \
649 #define block_prologue_size 8
651 #define check_generate_n_flag \
652 (flag_status & 0x08) \
654 #define check_generate_z_flag \
655 (flag_status & 0x04) \
657 #define check_generate_c_flag \
658 (flag_status & 0x02) \
660 #define check_generate_v_flag \
661 (flag_status & 0x01) \
663 #define generate_load_reg_pc(ireg, reg_index, pc_offset) \
664 if(reg_index == REG_PC) \
666 generate_load_pc(ireg, (pc + pc_offset)); \
670 generate_load_reg(ireg, reg_index); \
673 #define check_load_reg_pc(arm_reg, reg_index, pc_offset) \
674 if(reg_index == REG_PC) \
676 reg_index = arm_reg; \
677 generate_load_pc(arm_to_mips_reg[arm_reg], (pc + pc_offset)); \
680 #define check_store_reg_pc_no_flags(reg_index) \
681 if(reg_index == REG_PC) \
683 generate_indirect_branch_arm(); \
686 #define check_store_reg_pc_flags(reg_index) \
687 if(reg_index == REG_PC) \
689 generate_function_call(execute_spsr_restore); \
690 generate_indirect_branch_dual(); \
693 #define generate_shift_imm_lsl_no_flags(arm_reg, _rm, _shift) \
694 check_load_reg_pc(arm_reg, _rm, 8); \
697 mips_emit_sll(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
701 #define generate_shift_imm_lsr_no_flags(arm_reg, _rm, _shift) \
704 check_load_reg_pc(arm_reg, _rm, 8); \
705 mips_emit_srl(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
709 mips_emit_addu(arm_to_mips_reg[arm_reg], reg_zero, reg_zero); \
713 #define generate_shift_imm_asr_no_flags(arm_reg, _rm, _shift) \
714 check_load_reg_pc(arm_reg, _rm, 8); \
717 mips_emit_sra(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
721 mips_emit_sra(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], 31); \
725 #define generate_shift_imm_ror_no_flags(arm_reg, _rm, _shift) \
726 check_load_reg_pc(arm_reg, _rm, 8); \
729 mips_emit_rotr(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
733 mips_emit_srl(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], 1); \
734 mips_emit_ins(arm_to_mips_reg[arm_reg], reg_c_cache, 31, 1); \
738 #define generate_shift_imm_lsl_flags(arm_reg, _rm, _shift) \
739 check_load_reg_pc(arm_reg, _rm, 8); \
742 mips_emit_ext(reg_c_cache, arm_to_mips_reg[_rm], (32 - _shift), 1); \
743 mips_emit_sll(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
747 #define generate_shift_imm_lsr_flags(arm_reg, _rm, _shift) \
748 check_load_reg_pc(arm_reg, _rm, 8); \
751 mips_emit_ext(reg_c_cache, arm_to_mips_reg[_rm], (_shift - 1), 1); \
752 mips_emit_srl(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
756 mips_emit_srl(reg_c_cache, arm_to_mips_reg[_rm], 31); \
757 mips_emit_addu(arm_to_mips_reg[arm_reg], reg_zero, reg_zero); \
761 #define generate_shift_imm_asr_flags(arm_reg, _rm, _shift) \
762 check_load_reg_pc(arm_reg, _rm, 8); \
765 mips_emit_ext(reg_c_cache, arm_to_mips_reg[_rm], (_shift - 1), 1); \
766 mips_emit_sra(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
770 mips_emit_sra(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], 31); \
771 mips_emit_andi(reg_c_cache, arm_to_mips_reg[arm_reg], 1); \
775 #define generate_shift_imm_ror_flags(arm_reg, _rm, _shift) \
776 check_load_reg_pc(arm_reg, _rm, 8); \
779 mips_emit_ext(reg_c_cache, arm_to_mips_reg[_rm], (_shift - 1), 1); \
780 mips_emit_rotr(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], _shift); \
784 mips_emit_andi(reg_temp, arm_to_mips_reg[_rm], 1); \
785 mips_emit_srl(arm_to_mips_reg[arm_reg], arm_to_mips_reg[_rm], 1); \
786 mips_emit_ins(arm_to_mips_reg[arm_reg], reg_c_cache, 31, 1); \
787 mips_emit_addu(reg_c_cache, reg_temp, reg_zero); \
791 #define generate_shift_reg_lsl_no_flags(_rm, _rs) \
792 mips_emit_sltiu(reg_temp, arm_to_mips_reg[_rs], 32); \
793 mips_emit_sllv(reg_a0, arm_to_mips_reg[_rm], arm_to_mips_reg[_rs]); \
794 mips_emit_movz(reg_a0, reg_zero, reg_temp) \
796 #define generate_shift_reg_lsr_no_flags(_rm, _rs) \
797 mips_emit_sltiu(reg_temp, arm_to_mips_reg[_rs], 32); \
798 mips_emit_srlv(reg_a0, arm_to_mips_reg[_rm], arm_to_mips_reg[_rs]); \
799 mips_emit_movz(reg_a0, reg_zero, reg_temp) \
801 #define generate_shift_reg_asr_no_flags(_rm, _rs) \
802 mips_emit_sltiu(reg_temp, arm_to_mips_reg[_rs], 32); \
803 mips_emit_b(bne, reg_temp, reg_zero, 2); \
804 mips_emit_srav(reg_a0, arm_to_mips_reg[_rm], arm_to_mips_reg[_rs]); \
805 mips_emit_sra(reg_a0, reg_a0, 31) \
807 #define generate_shift_reg_ror_no_flags(_rm, _rs) \
808 mips_emit_rotrv(reg_a0, arm_to_mips_reg[_rm], arm_to_mips_reg[_rs]) \
810 #define generate_shift_reg_lsl_flags(_rm, _rs) \
811 generate_load_reg_pc(reg_a0, _rm, 12); \
812 generate_load_reg_pc(reg_a1, _rs, 8); \
813 generate_function_call_swap_delay(execute_lsl_flags_reg) \
815 #define generate_shift_reg_lsr_flags(_rm, _rs) \
816 generate_load_reg_pc(reg_a0, _rm, 12); \
817 generate_load_reg_pc(reg_a1, _rs, 8) \
818 generate_function_call_swap_delay(execute_lsr_flags_reg) \
820 #define generate_shift_reg_asr_flags(_rm, _rs) \
821 generate_load_reg_pc(reg_a0, _rm, 12); \
822 generate_load_reg_pc(reg_a1, _rs, 8) \
823 generate_function_call_swap_delay(execute_asr_flags_reg) \
825 #define generate_shift_reg_ror_flags(_rm, _rs) \
826 mips_emit_b(beq, arm_to_mips_reg[_rs], reg_zero, 3); \
827 mips_emit_addiu(reg_temp, arm_to_mips_reg[_rs], -1); \
828 mips_emit_srlv(reg_temp, arm_to_mips_reg[_rm], reg_temp); \
829 mips_emit_andi(reg_c_cache, reg_temp, 1); \
830 mips_emit_rotrv(reg_a0, arm_to_mips_reg[_rm], arm_to_mips_reg[_rs]) \
832 #define generate_shift_imm(arm_reg, name, flags_op) \
833 u32 shift = (opcode >> 7) & 0x1F; \
834 generate_shift_imm_##name##_##flags_op(arm_reg, rm, shift) \
836 #define generate_shift_reg(arm_reg, name, flags_op) \
837 u32 rs = ((opcode >> 8) & 0x0F); \
838 generate_shift_reg_##name##_##flags_op(rm, rs); \
841 // Made functions due to the macro expansion getting too large.
842 // Returns a new rm if it redirects it (which will happen on most of these
845 #define generate_load_rm_sh_builder(flags_op) \
846 u32 generate_load_rm_sh_##flags_op(u32 rm) \
848 switch((opcode >> 4) & 0x07) \
853 generate_shift_imm(arm_reg_a0, lsl, flags_op); \
860 generate_shift_reg(arm_reg_a0, lsl, flags_op); \
867 generate_shift_imm(arm_reg_a0, lsr, flags_op); \
874 generate_shift_reg(arm_reg_a0, lsr, flags_op); \
881 generate_shift_imm(arm_reg_a0, asr, flags_op); \
888 generate_shift_reg(arm_reg_a0, asr, flags_op); \
895 generate_shift_imm(arm_reg_a0, ror, flags_op); \
902 generate_shift_reg(arm_reg_a0, ror, flags_op); \
910 #define read_memory_constant_u8(address) \
911 read_memory8(address) \
913 #define read_memory_constant_u16(address) \
914 read_memory16(address) \
916 #define read_memory_constant_u32(address) \
917 read_memory32(address) \
919 #define read_memory_constant_s8(address) \
920 (s8)read_memory8(address) \
922 #define read_memory_constant_s16(address) \
923 (s16)read_memory16_signed(address) \
925 #define generate_load_memory_u8(ireg, offset) \
926 mips_emit_lbu(ireg, ireg, offset) \
928 #define generate_load_memory_u16(ireg, offset) \
929 mips_emit_lhu(ireg, ireg, offset) \
931 #define generate_load_memory_u32(ireg, offset) \
932 mips_emit_lw(ireg, ireg, offset) \
934 #define generate_load_memory_s8(ireg, offset) \
935 mips_emit_lb(ireg, ireg, offset) \
937 #define generate_load_memory_s16(ireg, offset) \
938 mips_emit_lh(ireg, ireg, offset) \
940 #define generate_load_memory(type, ireg, address) \
942 u32 _address = (u32)(address); \
943 u32 _address_hi = (_address + 0x8000) >> 16; \
944 generate_load_imm(ireg, address); \
945 mips_emit_lui(ireg, _address_hi >> 16) \
946 generate_load_memory_##type(ireg, _address - (_address_hi << 16)); \
949 #define generate_known_address_load_builder(type) \
950 u32 generate_known_address_load_##type(u32 rd, u32 address) \
952 switch(address >> 24) \
954 /* Read from the BIOS ROM, can be converted to an immediate load. \
955 Only really possible to do this from the BIOS but should be okay \
956 to allow it everywhere */ \
958 u32 imm = read_memory_constant_##type(address); \
959 generate_load_imm(arm_to_mips_reg[rd], imm); \
962 /* Read from RAM, can be converted to a load */ \
964 generate_load_memory(type, arm_to_mips_reg[rd], (u8 *)ewram + \
965 (address & 0x7FFF) + ((address & 0x38000) * 2) + 0x8000); \
969 generate_load_memory(type, arm_to_mips_reg[rd], (u8 *)iwram + \
970 (address & 0x7FFF) + 0x8000); \
973 /* Read from gamepak ROM, this has to be an immediate load because \
974 it might not actually be in memory anymore when we get to it. */ \
976 u32 imm = read_memory_constant_##type(address); \
977 generate_load_imm(arm_to_mips_reg[rd], imm); \
985 #define generate_block_extra_vars() \
986 u32 stored_pc = pc; \
987 u8 *update_trampoline \
989 #define generate_block_extra_vars_arm() \
990 generate_block_extra_vars(); \
991 generate_load_rm_sh_builder(flags); \
992 generate_load_rm_sh_builder(no_flags); \
994 /* generate_known_address_load_builder(u8); \
995 generate_known_address_load_builder(u16); \
996 generate_known_address_load_builder(u32); \
997 generate_known_address_load_builder(s8); \
998 generate_known_address_load_builder(s16); */ \
1000 u32 generate_load_offset_sh(u32 rm) \
1002 switch((opcode >> 5) & 0x03) \
1007 generate_shift_imm(arm_reg_a1, lsl, no_flags); \
1014 generate_shift_imm(arm_reg_a1, lsr, no_flags); \
1021 generate_shift_imm(arm_reg_a1, asr, no_flags); \
1028 generate_shift_imm(arm_reg_a1, ror, no_flags); \
1036 void generate_indirect_branch_arm() \
1038 if(condition == 0x0E) \
1040 generate_indirect_branch_cycle_update(arm); \
1044 generate_indirect_branch_no_cycle_update(arm); \
1048 void generate_indirect_branch_dual() \
1050 if(condition == 0x0E) \
1052 generate_indirect_branch_cycle_update(dual); \
1056 generate_indirect_branch_no_cycle_update(dual); \
1060 #define generate_block_extra_vars_thumb() \
1061 generate_block_extra_vars() \
1063 // It should be okay to still generate result flags, spsr will overwrite them.
1064 // This is pretty infrequent (returning from interrupt handlers, et al) so
1065 // probably not worth optimizing for.
1067 u32 execute_spsr_restore_body(u32 address)
1069 set_cpu_mode(cpu_modes[reg[REG_CPSR] & 0x1F]);
1070 if((io_registers[REG_IE] & io_registers[REG_IF]) &&
1071 io_registers[REG_IME] && ((reg[REG_CPSR] & 0x80) == 0))
1073 reg_mode[MODE_IRQ][6] = address + 4;
1074 spsr[MODE_IRQ] = reg[REG_CPSR];
1075 reg[REG_CPSR] = 0xD2;
1076 address = 0x00000018;
1077 set_cpu_mode(MODE_IRQ);
1080 if(reg[REG_CPSR] & 0x20)
1092 } condition_check_type;
1095 #define generate_condition_eq() \
1096 mips_emit_b_filler(beq, reg_z_cache, reg_zero, backpatch_address); \
1097 generate_cycle_update_force() \
1099 #define generate_condition_ne() \
1100 mips_emit_b_filler(bne, reg_z_cache, reg_zero, backpatch_address); \
1101 generate_cycle_update_force() \
1103 #define generate_condition_cs() \
1104 mips_emit_b_filler(beq, reg_c_cache, reg_zero, backpatch_address); \
1105 generate_cycle_update_force() \
1107 #define generate_condition_cc() \
1108 mips_emit_b_filler(bne, reg_c_cache, reg_zero, backpatch_address); \
1109 generate_cycle_update_force() \
1111 #define generate_condition_mi() \
1112 mips_emit_b_filler(beq, reg_n_cache, reg_zero, backpatch_address); \
1113 generate_cycle_update_force() \
1115 #define generate_condition_pl() \
1116 mips_emit_b_filler(bne, reg_n_cache, reg_zero, backpatch_address); \
1117 generate_cycle_update_force() \
1119 #define generate_condition_vs() \
1120 mips_emit_b_filler(beq, reg_v_cache, reg_zero, backpatch_address); \
1121 generate_cycle_update_force() \
1123 #define generate_condition_vc() \
1124 mips_emit_b_filler(bne, reg_v_cache, reg_zero, backpatch_address); \
1125 generate_cycle_update_force() \
1127 #define generate_condition_hi() \
1128 mips_emit_xori(reg_temp, reg_c_cache, 1); \
1129 mips_emit_or(reg_temp, reg_temp, reg_z_cache); \
1130 mips_emit_b_filler(bne, reg_temp, reg_zero, backpatch_address); \
1131 generate_cycle_update_force() \
1133 #define generate_condition_ls() \
1134 mips_emit_xori(reg_temp, reg_c_cache, 1); \
1135 mips_emit_or(reg_temp, reg_temp, reg_z_cache); \
1136 mips_emit_b_filler(beq, reg_temp, reg_zero, backpatch_address); \
1137 generate_cycle_update_force() \
1139 #define generate_condition_ge() \
1140 mips_emit_b_filler(bne, reg_n_cache, reg_v_cache, backpatch_address); \
1141 generate_cycle_update_force() \
1143 #define generate_condition_lt() \
1144 mips_emit_b_filler(beq, reg_n_cache, reg_v_cache, backpatch_address); \
1145 generate_cycle_update_force() \
1147 #define generate_condition_gt() \
1148 mips_emit_xor(reg_temp, reg_n_cache, reg_v_cache); \
1149 mips_emit_or(reg_temp, reg_temp, reg_z_cache); \
1150 mips_emit_b_filler(bne, reg_temp, reg_zero, backpatch_address); \
1151 generate_cycle_update_force() \
1153 #define generate_condition_le() \
1154 mips_emit_xor(reg_temp, reg_n_cache, reg_v_cache); \
1155 mips_emit_or(reg_temp, reg_temp, reg_z_cache); \
1156 mips_emit_b_filler(beq, reg_temp, reg_zero, backpatch_address); \
1157 generate_cycle_update_force() \
1159 #define generate_condition() \
1163 generate_condition_eq(); \
1167 generate_condition_ne(); \
1171 generate_condition_cs(); \
1175 generate_condition_cc(); \
1179 generate_condition_mi(); \
1183 generate_condition_pl(); \
1187 generate_condition_vs(); \
1191 generate_condition_vc(); \
1195 generate_condition_hi(); \
1199 generate_condition_ls(); \
1203 generate_condition_ge(); \
1207 generate_condition_lt(); \
1211 generate_condition_gt(); \
1215 generate_condition_le(); \
1225 #define generate_branch() \
1227 if(condition == 0x0E) \
1229 generate_branch_cycle_update( \
1230 block_exits[block_exit_position].branch_source, \
1231 block_exits[block_exit_position].branch_target); \
1235 generate_branch_no_cycle_update( \
1236 block_exits[block_exit_position].branch_source, \
1237 block_exits[block_exit_position].branch_target); \
1239 block_exit_position++; \
1242 #define generate_op_and_reg(_rd, _rn, _rm) \
1243 mips_emit_and(_rd, _rn, _rm) \
1245 #define generate_op_orr_reg(_rd, _rn, _rm) \
1246 mips_emit_or(_rd, _rn, _rm) \
1248 #define generate_op_eor_reg(_rd, _rn, _rm) \
1249 mips_emit_xor(_rd, _rn, _rm) \
1251 #define generate_op_bic_reg(_rd, _rn, _rm) \
1252 mips_emit_nor(reg_temp, _rm, reg_zero); \
1253 mips_emit_and(_rd, _rn, reg_temp) \
1255 #define generate_op_sub_reg(_rd, _rn, _rm) \
1256 mips_emit_subu(_rd, _rn, _rm) \
1258 #define generate_op_rsb_reg(_rd, _rn, _rm) \
1259 mips_emit_subu(_rd, _rm, _rn) \
1261 #define generate_op_sbc_reg(_rd, _rn, _rm) \
1262 mips_emit_subu(_rd, _rn, _rm); \
1263 mips_emit_xori(reg_temp, reg_c_cache, 1); \
1264 mips_emit_subu(_rd, _rd, reg_temp) \
1266 #define generate_op_rsc_reg(_rd, _rn, _rm) \
1267 mips_emit_addu(reg_temp, _rm, reg_c_cache); \
1268 mips_emit_addiu(reg_temp, reg_temp, -1); \
1269 mips_emit_subu(_rd, reg_temp, _rn) \
1271 #define generate_op_add_reg(_rd, _rn, _rm) \
1272 mips_emit_addu(_rd, _rn, _rm) \
1274 #define generate_op_adc_reg(_rd, _rn, _rm) \
1275 mips_emit_addu(reg_temp, _rm, reg_c_cache); \
1276 mips_emit_addu(_rd, _rn, reg_temp) \
1278 #define generate_op_mov_reg(_rd, _rn, _rm) \
1279 mips_emit_addu(_rd, _rm, reg_zero) \
1281 #define generate_op_mvn_reg(_rd, _rn, _rm) \
1282 mips_emit_nor(_rd, _rm, reg_zero) \
1284 #define generate_op_imm_wrapper(name, _rd, _rn) \
1287 generate_load_imm(reg_a0, imm); \
1288 generate_op_##name##_reg(_rd, _rn, reg_a0); \
1292 generate_op_##name##_reg(_rd, _rn, reg_zero); \
1295 #define generate_op_and_imm(_rd, _rn) \
1296 generate_alu_immu(andi, and, _rd, _rn, imm) \
1298 #define generate_op_orr_imm(_rd, _rn) \
1299 generate_alu_immu(ori, or, _rd, _rn, imm) \
1301 #define generate_op_eor_imm(_rd, _rn) \
1302 generate_alu_immu(xori, xor, _rd, _rn, imm) \
1304 #define generate_op_bic_imm(_rd, _rn) \
1305 generate_alu_immu(andi, and, _rd, _rn, (~imm)) \
1307 #define generate_op_sub_imm(_rd, _rn) \
1308 generate_alu_imm(addiu, addu, _rd, _rn, (-imm)) \
1310 #define generate_op_rsb_imm(_rd, _rn) \
1313 generate_load_imm(reg_temp, imm); \
1314 mips_emit_subu(_rd, reg_temp, _rn); \
1318 mips_emit_subu(_rd, reg_zero, _rn); \
1321 #define generate_op_sbc_imm(_rd, _rn) \
1322 generate_op_imm_wrapper(sbc, _rd, _rn) \
1324 #define generate_op_rsc_imm(_rd, _rn) \
1325 generate_op_imm_wrapper(rsc, _rd, _rn) \
1327 #define generate_op_add_imm(_rd, _rn) \
1328 generate_alu_imm(addiu, addu, _rd, _rn, imm) \
1330 #define generate_op_adc_imm(_rd, _rn) \
1331 generate_op_imm_wrapper(adc, _rd, _rn) \
1333 #define generate_op_mov_imm(_rd, _rn) \
1334 generate_load_imm(_rd, imm) \
1336 #define generate_op_mvn_imm(_rd, _rn) \
1337 generate_load_imm(_rd, (~imm)) \
1339 #define generate_op_logic_flags(_rd) \
1340 if(check_generate_n_flag) \
1342 mips_emit_srl(reg_n_cache, _rd, 31); \
1344 if(check_generate_z_flag) \
1346 mips_emit_sltiu(reg_z_cache, _rd, 1); \
1349 #define generate_op_sub_flags_prologue(_rn, _rm) \
1350 if(check_generate_c_flag) \
1352 mips_emit_sltu(reg_c_cache, _rn, _rm); \
1353 mips_emit_xori(reg_c_cache, reg_c_cache, 1); \
1355 if(check_generate_v_flag) \
1357 mips_emit_slt(reg_v_cache, _rn, _rm); \
1360 #define generate_op_sub_flags_epilogue(_rd) \
1361 generate_op_logic_flags(_rd); \
1362 if(check_generate_v_flag) \
1364 if(!check_generate_n_flag) \
1366 mips_emit_srl(reg_n_cache, _rd, 31); \
1368 mips_emit_xor(reg_v_cache, reg_v_cache, reg_n_cache); \
1371 #define generate_add_flags_prologue(_rn, _rm) \
1372 if(check_generate_c_flag | check_generate_v_flag) \
1374 mips_emit_addu(reg_c_cache, _rn, reg_zero); \
1376 if(check_generate_v_flag) \
1378 mips_emit_slt(reg_v_cache, _rm, reg_zero); \
1381 #define generate_add_flags_epilogue(_rd) \
1382 if(check_generate_v_flag) \
1384 mips_emit_slt(reg_a0, _rd, reg_c_cache); \
1385 mips_emit_xor(reg_v_cache, reg_v_cache, reg_a0); \
1387 if(check_generate_c_flag) \
1389 mips_emit_sltu(reg_c_cache, _rd, reg_c_cache); \
1391 generate_op_logic_flags(_rd) \
1393 #define generate_op_ands_reg(_rd, _rn, _rm) \
1394 mips_emit_and(_rd, _rn, _rm); \
1395 generate_op_logic_flags(_rd) \
1397 #define generate_op_orrs_reg(_rd, _rn, _rm) \
1398 mips_emit_or(_rd, _rn, _rm); \
1399 generate_op_logic_flags(_rd) \
1401 #define generate_op_eors_reg(_rd, _rn, _rm) \
1402 mips_emit_xor(_rd, _rn, _rm); \
1403 generate_op_logic_flags(_rd) \
1405 #define generate_op_bics_reg(_rd, _rn, _rm) \
1406 mips_emit_nor(reg_temp, _rm, reg_zero); \
1407 mips_emit_and(_rd, _rn, reg_temp); \
1408 generate_op_logic_flags(_rd) \
1410 #define generate_op_subs_reg(_rd, _rn, _rm) \
1411 generate_op_sub_flags_prologue(_rn, _rm); \
1412 mips_emit_subu(_rd, _rn, _rm); \
1413 generate_op_sub_flags_epilogue(_rd) \
1415 #define generate_op_rsbs_reg(_rd, _rn, _rm) \
1416 generate_op_sub_flags_prologue(_rm, _rn); \
1417 mips_emit_subu(_rd, _rm, _rn); \
1418 generate_op_sub_flags_epilogue(_rd) \
1420 #define generate_op_sbcs_reg(_rd, _rn, _rm) \
1421 mips_emit_subu(_rd, _rn, _rm); \
1422 mips_emit_xori(reg_temp, reg_c_cache, 1); \
1423 generate_op_sub_flags_prologue(_rd, reg_temp); \
1424 mips_emit_subu(_rd, _rd, reg_temp); \
1425 generate_op_sub_flags_epilogue(_rd) \
1427 #define generate_op_rscs_reg(_rd, _rn, _rm) \
1428 mips_emit_addu(reg_temp, _rm, reg_c_cache); \
1429 mips_emit_addiu(reg_temp, reg_temp, -1); \
1430 generate_op_sub_flags_prologue(reg_temp, _rn); \
1431 mips_emit_subu(_rd, reg_temp, _rn); \
1432 generate_op_sub_flags_epilogue(_rd) \
1434 #define generate_op_adds_reg(_rd, _rn, _rm) \
1435 generate_add_flags_prologue(_rn, _rm); \
1436 mips_emit_addu(_rd, _rn, _rm); \
1437 generate_add_flags_epilogue(_rd) \
1439 #define generate_op_adcs_reg(_rd, _rn, _rm) \
1440 mips_emit_addu(reg_temp, _rm, reg_c_cache); \
1441 generate_add_flags_prologue(_rn, _rm); \
1442 mips_emit_addu(_rd, _rn, reg_temp); \
1443 generate_add_flags_epilogue(_rd) \
1445 #define generate_op_movs_reg(_rd, _rn, _rm) \
1446 mips_emit_addu(_rd, _rm, reg_zero); \
1447 generate_op_logic_flags(_rd) \
1449 #define generate_op_mvns_reg(_rd, _rn, _rm) \
1450 mips_emit_nor(_rd, _rm, reg_zero); \
1451 generate_op_logic_flags(_rd) \
1453 #define generate_op_neg_reg(_rd, _rn, _rm) \
1454 generate_op_subs_reg(_rd, reg_zero, _rm) \
1456 #define generate_op_muls_reg(_rd, _rn, _rm) \
1457 mips_emit_multu(_rn, _rm); \
1458 mips_emit_mflo(_rd); \
1459 generate_op_logic_flags(_rd) \
1461 #define generate_op_cmp_reg(_rd, _rn, _rm) \
1462 generate_op_subs_reg(reg_temp, _rn, _rm) \
1464 #define generate_op_cmn_reg(_rd, _rn, _rm) \
1465 generate_op_adds_reg(reg_temp, _rn, _rm) \
1467 #define generate_op_tst_reg(_rd, _rn, _rm) \
1468 generate_op_ands_reg(reg_temp, _rn, _rm) \
1470 #define generate_op_teq_reg(_rd, _rn, _rm) \
1471 generate_op_eors_reg(reg_temp, _rn, _rm) \
1473 #define generate_op_ands_imm(_rd, _rn) \
1474 generate_alu_immu(andi, and, _rd, _rn, imm); \
1475 generate_op_logic_flags(_rd) \
1477 #define generate_op_orrs_imm(_rd, _rn) \
1478 generate_alu_immu(ori, or, _rd, _rn, imm); \
1479 generate_op_logic_flags(_rd) \
1481 #define generate_op_eors_imm(_rd, _rn) \
1482 generate_alu_immu(xori, xor, _rd, _rn, imm); \
1483 generate_op_logic_flags(_rd) \
1485 #define generate_op_bics_imm(_rd, _rn) \
1486 generate_alu_immu(andi, and, _rd, _rn, (~imm)); \
1487 generate_op_logic_flags(_rd) \
1489 #define generate_op_subs_imm(_rd, _rn) \
1490 generate_op_imm_wrapper(subs, _rd, _rn) \
1492 #define generate_op_rsbs_imm(_rd, _rn) \
1493 generate_op_imm_wrapper(rsbs, _rd, _rn) \
1495 #define generate_op_sbcs_imm(_rd, _rn) \
1496 generate_op_imm_wrapper(sbcs, _rd, _rn) \
1498 #define generate_op_rscs_imm(_rd, _rn) \
1499 generate_op_imm_wrapper(rscs, _rd, _rn) \
1501 #define generate_op_adds_imm(_rd, _rn) \
1502 generate_op_imm_wrapper(adds, _rd, _rn) \
1504 #define generate_op_adcs_imm(_rd, _rn) \
1505 generate_op_imm_wrapper(adcs, _rd, _rn) \
1507 #define generate_op_movs_imm(_rd, _rn) \
1508 generate_load_imm(_rd, imm); \
1509 generate_op_logic_flags(_rd) \
1511 #define generate_op_mvns_imm(_rd, _rn) \
1512 generate_load_imm(_rd, (~imm)); \
1513 generate_op_logic_flags(_rd) \
1515 #define generate_op_cmp_imm(_rd, _rn) \
1516 generate_op_imm_wrapper(cmp, _rd, _rn) \
1518 #define generate_op_cmn_imm(_rd, _rn) \
1519 generate_op_imm_wrapper(cmn, _rd, _rn) \
1521 #define generate_op_tst_imm(_rd, _rn) \
1522 generate_op_ands_imm(reg_temp, _rn) \
1524 #define generate_op_teq_imm(_rd, _rn) \
1525 generate_op_eors_imm(reg_temp, _rn) \
1527 #define arm_generate_op_load_yes() \
1528 generate_load_reg_pc(reg_a1, rn, 8) \
1530 #define arm_generate_op_load_no() \
1532 #define arm_op_check_yes() \
1533 check_load_reg_pc(arm_reg_a1, rn, 8) \
1535 #define arm_op_check_no() \
1537 #define arm_generate_op_reg_flags(name, load_op) \
1538 arm_decode_data_proc_reg(); \
1539 if(check_generate_c_flag) \
1541 rm = generate_load_rm_sh_flags(rm); \
1545 rm = generate_load_rm_sh_no_flags(rm); \
1548 arm_op_check_##load_op(); \
1549 generate_op_##name##_reg(arm_to_mips_reg[rd], arm_to_mips_reg[rn], \
1550 arm_to_mips_reg[rm]) \
1552 #define arm_generate_op_reg(name, load_op) \
1553 arm_decode_data_proc_reg(); \
1554 rm = generate_load_rm_sh_no_flags(rm); \
1555 arm_op_check_##load_op(); \
1556 generate_op_##name##_reg(arm_to_mips_reg[rd], arm_to_mips_reg[rn], \
1557 arm_to_mips_reg[rm]) \
1559 #define arm_generate_op_imm(name, load_op) \
1560 arm_decode_data_proc_imm(); \
1561 arm_op_check_##load_op(); \
1562 generate_op_##name##_imm(arm_to_mips_reg[rd], arm_to_mips_reg[rn]) \
1564 #define arm_data_proc(name, type, flags_op) \
1566 arm_generate_op_##type(name, yes); \
1567 check_store_reg_pc_##flags_op(rd); \
1570 #define arm_data_proc_test(name, type) \
1572 arm_generate_op_##type(name, yes); \
1575 #define arm_data_proc_unary(name, type, flags_op) \
1577 arm_generate_op_##type(name, no); \
1578 check_store_reg_pc_##flags_op(rd); \
1581 #define arm_multiply_flags_yes(_rd) \
1582 generate_op_logic_flags(_rd) \
1584 #define arm_multiply_flags_no(_rd) \
1586 #define arm_multiply_add_no() \
1587 mips_emit_mflo(arm_to_mips_reg[rd]) \
1589 #define arm_multiply_add_yes() \
1590 mips_emit_mflo(reg_temp); \
1591 mips_emit_addu(arm_to_mips_reg[rd], reg_temp, arm_to_mips_reg[rn]) \
1593 #define arm_multiply(add_op, flags) \
1595 arm_decode_multiply(); \
1596 mips_emit_multu(arm_to_mips_reg[rm], arm_to_mips_reg[rs]); \
1597 arm_multiply_add_##add_op(); \
1598 arm_multiply_flags_##flags(arm_to_mips_reg[rd]); \
1601 #define arm_multiply_long_flags_yes(_rdlo, _rdhi) \
1602 mips_emit_sltiu(reg_z_cache, _rdlo, 1); \
1603 mips_emit_sltiu(reg_a0, _rdhi, 1); \
1604 mips_emit_and(reg_z_cache, reg_z_cache, reg_a0); \
1605 mips_emit_srl(reg_n_cache, _rdhi, 31); \
1607 #define arm_multiply_long_flags_no(_rdlo, _rdhi) \
1609 #define arm_multiply_long_add_yes(name) \
1610 mips_emit_mtlo(arm_to_mips_reg[rdlo]); \
1611 mips_emit_mthi(arm_to_mips_reg[rdhi]); \
1612 generate_multiply_##name() \
1614 #define arm_multiply_long_add_no(name) \
1615 generate_multiply_##name() \
1617 #define arm_multiply_long(name, add_op, flags) \
1619 arm_decode_multiply_long(); \
1620 arm_multiply_long_add_##add_op(name); \
1621 mips_emit_mflo(arm_to_mips_reg[rdlo]); \
1622 mips_emit_mfhi(arm_to_mips_reg[rdhi]); \
1623 arm_multiply_long_flags_##flags(arm_to_mips_reg[rdlo], \
1624 arm_to_mips_reg[rdhi]); \
1627 #define arm_psr_read(op_type, psr_reg) \
1628 generate_function_call(execute_read_##psr_reg); \
1629 generate_store_reg(reg_rv, rd) \
1631 u32 execute_store_cpsr_body(u32 _cpsr, u32 store_mask, u32 address)
1633 reg[REG_CPSR] = _cpsr;
1634 if(store_mask & 0xFF)
1636 set_cpu_mode(cpu_modes[_cpsr & 0x1F]);
1637 if((io_registers[REG_IE] & io_registers[REG_IF]) &&
1638 io_registers[REG_IME] && ((_cpsr & 0x80) == 0))
1640 reg_mode[MODE_IRQ][6] = address + 4;
1641 spsr[MODE_IRQ] = _cpsr;
1642 reg[REG_CPSR] = 0xD2;
1643 set_cpu_mode(MODE_IRQ);
1651 #define arm_psr_load_new_reg() \
1652 generate_load_reg(reg_a0, rm) \
1654 #define arm_psr_load_new_imm() \
1655 generate_load_imm(reg_a0, imm) \
1657 #define arm_psr_store(op_type, psr_reg) \
1658 arm_psr_load_new_##op_type(); \
1659 generate_load_imm(reg_a1, psr_masks[psr_field]); \
1660 generate_load_pc(reg_a2, (pc + 4)); \
1661 generate_function_call_swap_delay(execute_store_##psr_reg) \
1663 #define arm_psr(op_type, transfer_type, psr_reg) \
1665 arm_decode_psr_##op_type(); \
1666 arm_psr_##transfer_type(op_type, psr_reg); \
1669 #define arm_access_memory_load(mem_type) \
1671 mips_emit_jal(mips_absolute_offset(execute_load_##mem_type)); \
1672 generate_load_pc(reg_a1, (pc + 8)); \
1673 generate_store_reg(reg_rv, rd); \
1674 check_store_reg_pc_no_flags(rd) \
1676 #define arm_access_memory_store(mem_type) \
1678 generate_load_pc(reg_a2, (pc + 4)); \
1679 generate_load_reg_pc(reg_a1, rd, 12); \
1680 generate_function_call_swap_delay(execute_store_##mem_type) \
1682 #define arm_access_memory_reg_pre_up() \
1683 mips_emit_addu(reg_a0, arm_to_mips_reg[rn], arm_to_mips_reg[rm]) \
1685 #define arm_access_memory_reg_pre_down() \
1686 mips_emit_subu(reg_a0, arm_to_mips_reg[rn], arm_to_mips_reg[rm]) \
1688 #define arm_access_memory_reg_pre(adjust_dir) \
1689 check_load_reg_pc(arm_reg_a0, rn, 8); \
1690 arm_access_memory_reg_pre_##adjust_dir() \
1692 #define arm_access_memory_reg_pre_wb(adjust_dir) \
1693 arm_access_memory_reg_pre(adjust_dir); \
1694 generate_store_reg(reg_a0, rn) \
1696 #define arm_access_memory_reg_post_up() \
1697 mips_emit_addu(arm_to_mips_reg[rn], arm_to_mips_reg[rn], \
1698 arm_to_mips_reg[rm]) \
1700 #define arm_access_memory_reg_post_down() \
1701 mips_emit_subu(arm_to_mips_reg[rn], arm_to_mips_reg[rn], \
1702 arm_to_mips_reg[rm]) \
1704 #define arm_access_memory_reg_post(adjust_dir) \
1705 generate_load_reg(reg_a0, rn); \
1706 arm_access_memory_reg_post_##adjust_dir() \
1708 #define arm_access_memory_imm_pre_up() \
1709 mips_emit_addiu(reg_a0, arm_to_mips_reg[rn], offset) \
1711 #define arm_access_memory_imm_pre_down() \
1712 mips_emit_addiu(reg_a0, arm_to_mips_reg[rn], -offset) \
1714 #define arm_access_memory_imm_pre(adjust_dir) \
1715 check_load_reg_pc(arm_reg_a0, rn, 8); \
1716 arm_access_memory_imm_pre_##adjust_dir() \
1718 #define arm_access_memory_imm_pre_wb(adjust_dir) \
1719 arm_access_memory_imm_pre(adjust_dir); \
1720 generate_store_reg(reg_a0, rn) \
1722 #define arm_access_memory_imm_post_up() \
1723 mips_emit_addiu(arm_to_mips_reg[rn], arm_to_mips_reg[rn], offset) \
1725 #define arm_access_memory_imm_post_down() \
1726 mips_emit_addiu(arm_to_mips_reg[rn], arm_to_mips_reg[rn], -offset) \
1728 #define arm_access_memory_imm_post(adjust_dir) \
1729 generate_load_reg(reg_a0, rn); \
1730 arm_access_memory_imm_post_##adjust_dir() \
1732 #define arm_data_trans_reg(adjust_op, adjust_dir) \
1733 arm_decode_data_trans_reg(); \
1734 rm = generate_load_offset_sh(rm); \
1735 arm_access_memory_reg_##adjust_op(adjust_dir) \
1737 #define arm_data_trans_imm(adjust_op, adjust_dir) \
1738 arm_decode_data_trans_imm(); \
1739 arm_access_memory_imm_##adjust_op(adjust_dir) \
1741 #define arm_data_trans_half_reg(adjust_op, adjust_dir) \
1742 arm_decode_half_trans_r(); \
1743 arm_access_memory_reg_##adjust_op(adjust_dir) \
1745 #define arm_data_trans_half_imm(adjust_op, adjust_dir) \
1746 arm_decode_half_trans_of(); \
1747 arm_access_memory_imm_##adjust_op(adjust_dir) \
1749 #define arm_access_memory(access_type, direction, adjust_op, mem_type, \
1752 arm_data_trans_##offset_type(adjust_op, direction); \
1753 arm_access_memory_##access_type(mem_type); \
1756 #define word_bit_count(word) \
1757 (bit_count[word >> 8] + bit_count[word & 0xFF]) \
1759 #define sprint_no(access_type, pre_op, post_op, wb) \
1761 #define sprint_yes(access_type, pre_op, post_op, wb) \
1762 printf("sbit on %s %s %s %s\n", #access_type, #pre_op, #post_op, #wb) \
1764 #define arm_block_memory_load() \
1765 generate_function_call_swap_delay(execute_aligned_load32); \
1766 generate_store_reg(reg_rv, i) \
1768 #define arm_block_memory_store() \
1769 generate_load_reg_pc(reg_a1, i, 8); \
1770 generate_function_call_swap_delay(execute_aligned_store32) \
1772 #define arm_block_memory_final_load() \
1773 arm_block_memory_load() \
1775 #define arm_block_memory_final_store() \
1776 generate_load_pc(reg_a2, (pc + 4)); \
1777 mips_emit_jal(mips_absolute_offset(execute_store_u32)); \
1778 generate_load_reg(reg_a1, i) \
1780 #define arm_block_memory_adjust_pc_store() \
1782 #define arm_block_memory_adjust_pc_load() \
1783 if(reg_list & 0x8000) \
1785 generate_mov(reg_a0, reg_rv); \
1786 generate_indirect_branch_arm(); \
1789 #define arm_block_memory_sp_load() \
1790 mips_emit_lw(arm_to_mips_reg[i], reg_a1, offset); \
1792 #define arm_block_memory_sp_store() \
1794 u32 store_reg = i; \
1795 check_load_reg_pc(arm_reg_a0, store_reg, 8); \
1796 mips_emit_sw(arm_to_mips_reg[store_reg], reg_a1, offset); \
1799 #define arm_block_memory_sp_adjust_pc_store() \
1801 #define arm_block_memory_sp_adjust_pc_load() \
1802 if(reg_list & 0x8000) \
1804 generate_indirect_branch_arm(); \
1807 #define arm_block_memory_offset_down_a() \
1808 mips_emit_addiu(reg_a2, base_reg, (-((word_bit_count(reg_list) * 4) - 4))) \
1810 #define arm_block_memory_offset_down_b() \
1811 mips_emit_addiu(reg_a2, base_reg, (word_bit_count(reg_list) * -4)) \
1813 #define arm_block_memory_offset_no() \
1814 mips_emit_addu(reg_a2, base_reg, reg_zero) \
1816 #define arm_block_memory_offset_up() \
1817 mips_emit_addiu(reg_a2, base_reg, 4) \
1819 #define arm_block_memory_writeback_down() \
1820 mips_emit_addiu(base_reg, base_reg, (-(word_bit_count(reg_list) * 4))) \
1822 #define arm_block_memory_writeback_up() \
1823 mips_emit_addiu(base_reg, base_reg, (word_bit_count(reg_list) * 4)) \
1825 #define arm_block_memory_writeback_no()
1827 // Only emit writeback if the register is not in the list
1829 #define arm_block_memory_writeback_load(writeback_type) \
1830 if(!((reg_list >> rn) & 0x01)) \
1832 arm_block_memory_writeback_##writeback_type(); \
1835 #define arm_block_memory_writeback_store(writeback_type) \
1836 arm_block_memory_writeback_##writeback_type() \
1838 #define arm_block_memory(access_type, offset_type, writeback_type, s_bit) \
1840 arm_decode_block_trans(); \
1843 u32 base_reg = arm_to_mips_reg[rn]; \
1845 arm_block_memory_offset_##offset_type(); \
1846 arm_block_memory_writeback_##access_type(writeback_type); \
1848 if((rn == REG_SP) && iwram_stack_optimize) \
1850 mips_emit_andi(reg_a1, reg_a2, 0x7FFC); \
1851 generate_load_imm(reg_a0, ((u32)(iwram + 0x8000))); \
1852 mips_emit_addu(reg_a1, reg_a1, reg_a0); \
1854 for(i = 0; i < 16; i++) \
1856 if((reg_list >> i) & 0x01) \
1859 arm_block_memory_sp_##access_type(); \
1864 arm_block_memory_sp_adjust_pc_##access_type(); \
1868 mips_emit_ins(reg_a2, reg_zero, 0, 2); \
1870 for(i = 0; i < 16; i++) \
1872 if((reg_list >> i) & 0x01) \
1875 mips_emit_addiu(reg_a0, reg_a2, offset); \
1876 if(reg_list & ~((2 << i) - 1)) \
1878 arm_block_memory_##access_type(); \
1883 arm_block_memory_final_##access_type(); \
1889 arm_block_memory_adjust_pc_##access_type(); \
1893 #define arm_block_writeback_no()
1895 #define arm_block_writeback_yes() \
1896 mips_emit_addu(arm_to_mips_reg[rn], reg_a2, reg_zero) \
1898 #define arm_block_address_preadjust_up_full(wb) \
1899 mips_emit_addiu(reg_a2, arm_to_mips_reg[rn], \
1900 ((word_bit_count(reg_list)) * 4)); \
1901 arm_block_writeback_##wb() \
1903 #define arm_block_address_preadjust_up(wb) \
1904 mips_emit_addiu(reg_a2, arm_to_mips_reg[rn], 4); \
1905 arm_block_writeback_##wb() \
1907 #define arm_block_address_preadjust_down_full(wb) \
1908 mips_emit_addiu(reg_a2, arm_to_mips_reg[rn], \
1909 -((word_bit_count(reg_list)) * 4)); \
1910 arm_block_writeback_##wb() \
1912 #define arm_block_address_preadjust_down(wb) \
1913 mips_emit_addiu(reg_a2, arm_to_mips_reg[rn], \
1914 -(((word_bit_count(reg_list)) * 4) - 4)); \
1915 arm_block_writeback_##wb()
1917 #define arm_block_address_preadjust_no(wb) \
1918 mips_emit_addu(reg_a2, arm_to_mips_reg[rn], reg_zero) \
1920 #define arm_block_address_postadjust_no() \
1922 #define arm_block_address_postadjust_up() \
1923 mips_emit_addiu(arm_to_mips_reg[rn], reg_a2, \
1924 ((word_bit_count(reg_list)) * 4)) \
1926 #define arm_block_address_postadjust_down() \
1927 mips_emit_addiu(arm_to_mips_reg[rn], reg_a2, \
1928 -((word_bit_count(reg_list)) * 4)) \
1930 #define sprint_no(access_type, pre_op, post_op, wb) \
1932 #define sprint_yes(access_type, pre_op, post_op, wb) \
1933 printf("sbit on %s %s %s %s\n", #access_type, #pre_op, #post_op, #wb) \
1935 #define arm_block_memory_load() \
1936 generate_function_call_swap_delay(execute_aligned_load32); \
1937 generate_store_reg(reg_rv, i) \
1939 #define arm_block_memory_store() \
1940 generate_load_reg_pc(reg_a1, i, 8); \
1941 generate_function_call_swap_delay(execute_aligned_store32) \
1943 #define arm_block_memory_final_load() \
1944 arm_block_memory_load() \
1946 #define arm_block_memory_final_store() \
1947 generate_load_pc(reg_a2, (pc + 4)); \
1948 mips_emit_jal(mips_absolute_offset(execute_store_u32)); \
1949 generate_load_reg(reg_a1, i) \
1951 #define arm_block_memory_adjust_pc_store() \
1953 #define arm_block_memory_adjust_pc_load() \
1954 if(reg_list & 0x8000) \
1956 generate_mov(reg_a0, reg_rv); \
1957 generate_indirect_branch_arm(); \
1960 #define arm_block_memory_sp_load() \
1961 mips_emit_lw(arm_to_mips_reg[i], reg_a1, offset); \
1963 #define arm_block_memory_sp_store() \
1965 u32 store_reg = i; \
1966 check_load_reg_pc(arm_reg_a0, store_reg, 8); \
1967 mips_emit_sw(arm_to_mips_reg[store_reg], reg_a1, offset); \
1970 #define arm_block_memory_sp_adjust_pc_store() \
1972 #define arm_block_memory_sp_adjust_pc_load() \
1973 if(reg_list & 0x8000) \
1975 generate_indirect_branch_arm(); \
1978 #define old_arm_block_memory(access_type, pre_op, post_op, wb, s_bit) \
1980 arm_decode_block_trans(); \
1983 u32 base_reg = arm_to_mips_reg[rn]; \
1985 arm_block_address_preadjust_##pre_op(wb); \
1986 arm_block_address_postadjust_##post_op(); \
1988 sprint_##s_bit(access_type, pre_op, post_op, wb); \
1990 if((rn == REG_SP) && iwram_stack_optimize) \
1992 mips_emit_andi(reg_a1, reg_a2, 0x7FFC); \
1993 generate_load_imm(reg_a0, ((u32)(iwram + 0x8000))); \
1994 mips_emit_addu(reg_a1, reg_a1, reg_a0); \
1996 for(i = 0; i < 16; i++) \
1998 if((reg_list >> i) & 0x01) \
2001 arm_block_memory_sp_##access_type(); \
2006 arm_block_memory_sp_adjust_pc_##access_type(); \
2010 mips_emit_ins(reg_a2, reg_zero, 0, 2); \
2012 for(i = 0; i < 16; i++) \
2014 if((reg_list >> i) & 0x01) \
2017 mips_emit_addiu(reg_a0, reg_a2, offset); \
2018 if(reg_list & ~((2 << i) - 1)) \
2020 arm_block_memory_##access_type(); \
2025 arm_block_memory_final_##access_type(); \
2031 arm_block_memory_adjust_pc_##access_type(); \
2037 // This isn't really a correct implementation, may have to fix later.
2039 #define arm_swap(type) \
2041 arm_decode_swap(); \
2043 mips_emit_jal(mips_absolute_offset(execute_load_##type)); \
2044 generate_load_reg(reg_a0, rn); \
2045 generate_mov(reg_a2, reg_rv); \
2046 generate_load_reg(reg_a0, rn); \
2047 mips_emit_jal(mips_absolute_offset(execute_store_##type)); \
2048 generate_load_reg(reg_a1, rm); \
2049 generate_store_reg(reg_a2, rd); \
2052 #define thumb_generate_op_load_yes(_rs) \
2053 generate_load_reg(reg_a1, _rs) \
2055 #define thumb_generate_op_load_no(_rs) \
2057 #define thumb_generate_op_reg(name, _rd, _rs, _rn) \
2058 generate_op_##name##_reg(arm_to_mips_reg[_rd], \
2059 arm_to_mips_reg[_rs], arm_to_mips_reg[_rn]) \
2061 #define thumb_generate_op_imm(name, _rd, _rs, _rn) \
2062 generate_op_##name##_imm(arm_to_mips_reg[_rd], arm_to_mips_reg[_rs]) \
2064 // Types: add_sub, add_sub_imm, alu_op, imm
2065 // Affects N/Z/C/V flags
2067 #define thumb_data_proc(type, name, rn_type, _rd, _rs, _rn) \
2069 thumb_decode_##type(); \
2070 thumb_generate_op_##rn_type(name, _rd, _rs, _rn); \
2073 #define thumb_data_proc_test(type, name, rn_type, _rs, _rn) \
2075 thumb_decode_##type(); \
2076 thumb_generate_op_##rn_type(name, 0, _rs, _rn); \
2079 #define thumb_data_proc_unary(type, name, rn_type, _rd, _rn) \
2081 thumb_decode_##type(); \
2082 thumb_generate_op_##rn_type(name, _rd, 0, _rn); \
2085 #define check_store_reg_pc_thumb(_rd) \
2088 generate_indirect_branch_cycle_update(thumb); \
2091 #define thumb_data_proc_hi(name) \
2093 thumb_decode_hireg_op(); \
2095 check_load_reg_pc(arm_reg_a0, rs, 4); \
2096 check_load_reg_pc(arm_reg_a1, rd, 4); \
2097 generate_op_##name##_reg(arm_to_mips_reg[dest_rd], arm_to_mips_reg[rd], \
2098 arm_to_mips_reg[rs]); \
2099 check_store_reg_pc_thumb(dest_rd); \
2104 #define thumb_data_proc_hi(name) \
2106 thumb_decode_hireg_op(); \
2107 check_load_reg_pc(arm_reg_a0, rs, 4); \
2108 check_load_reg_pc(arm_reg_a1, rd, 4); \
2109 generate_op_##name##_reg(arm_to_mips_reg[rd], arm_to_mips_reg[rd], \
2110 arm_to_mips_reg[rs]); \
2111 check_store_reg_pc_thumb(rd); \
2116 #define thumb_data_proc_test_hi(name) \
2118 thumb_decode_hireg_op(); \
2119 check_load_reg_pc(arm_reg_a0, rs, 4); \
2120 check_load_reg_pc(arm_reg_a1, rd, 4); \
2121 generate_op_##name##_reg(reg_temp, arm_to_mips_reg[rd], \
2122 arm_to_mips_reg[rs]); \
2125 #define thumb_data_proc_mov_hi() \
2127 thumb_decode_hireg_op(); \
2128 check_load_reg_pc(arm_reg_a0, rs, 4); \
2129 mips_emit_addu(arm_to_mips_reg[rd], arm_to_mips_reg[rs], reg_zero); \
2130 check_store_reg_pc_thumb(rd); \
2133 #define thumb_load_pc(_rd) \
2135 thumb_decode_imm(); \
2136 generate_load_pc(arm_to_mips_reg[_rd], (((pc & ~2) + 4) + (imm * 4))); \
2139 #define thumb_load_sp(_rd) \
2141 thumb_decode_imm(); \
2142 mips_emit_addiu(arm_to_mips_reg[_rd], reg_r13, (imm * 4)); \
2145 #define thumb_adjust_sp(value) \
2147 thumb_decode_add_sp(); \
2148 mips_emit_addiu(reg_r13, reg_r13, (value)); \
2151 // Decode types: shift, alu_op
2152 // Operation types: lsl, lsr, asr, ror
2153 // Affects N/Z/C flags
2155 #define thumb_generate_shift_imm(name) \
2156 if(check_generate_c_flag) \
2158 generate_shift_imm_##name##_flags(rd, rs, imm); \
2162 generate_shift_imm_##name##_no_flags(rd, rs, imm); \
2166 mips_emit_addu(arm_to_mips_reg[rd], arm_to_mips_reg[rs], reg_zero); \
2169 #define thumb_generate_shift_reg(name) \
2171 u32 original_rd = rd; \
2172 if(check_generate_c_flag) \
2174 generate_shift_reg_##name##_flags(rd, rs); \
2178 generate_shift_reg_##name##_no_flags(rd, rs); \
2180 mips_emit_addu(arm_to_mips_reg[original_rd], reg_a0, reg_zero); \
2183 #define thumb_shift(decode_type, op_type, value_type) \
2185 thumb_decode_##decode_type(); \
2186 thumb_generate_shift_##value_type(op_type); \
2187 generate_op_logic_flags(arm_to_mips_reg[rd]); \
2190 // Operation types: imm, mem_reg, mem_imm
2192 #define thumb_access_memory_load(mem_type, reg_rd) \
2194 mips_emit_jal(mips_absolute_offset(execute_load_##mem_type)); \
2195 generate_load_pc(reg_a1, (pc + 4)); \
2196 generate_store_reg(reg_rv, reg_rd) \
2198 #define thumb_access_memory_store(mem_type, reg_rd) \
2200 generate_load_pc(reg_a2, (pc + 2)); \
2201 mips_emit_jal(mips_absolute_offset(execute_store_##mem_type)); \
2202 generate_load_reg(reg_a1, reg_rd) \
2204 #define thumb_access_memory_generate_address_pc_relative(offset, reg_rb, \
2206 generate_load_pc(reg_a0, (offset)) \
2208 #define thumb_access_memory_generate_address_reg_imm(offset, reg_rb, reg_ro) \
2209 mips_emit_addiu(reg_a0, arm_to_mips_reg[reg_rb], (offset)) \
2211 #define thumb_access_memory_generate_address_reg_reg(offset, reg_rb, reg_ro) \
2212 mips_emit_addu(reg_a0, arm_to_mips_reg[reg_rb], arm_to_mips_reg[reg_ro]) \
2214 #define thumb_access_memory(access_type, op_type, reg_rd, reg_rb, reg_ro, \
2215 address_type, offset, mem_type) \
2217 thumb_decode_##op_type(); \
2218 thumb_access_memory_generate_address_##address_type(offset, reg_rb, \
2220 thumb_access_memory_##access_type(mem_type, reg_rd); \
2224 #define thumb_block_address_preadjust_no(base_reg) \
2225 mips_emit_addu(reg_a2, arm_to_mips_reg[base_reg], reg_zero) \
2227 #define thumb_block_address_preadjust_up(base_reg) \
2228 mips_emit_addiu(reg_a2, arm_to_mips_reg[base_reg], \
2229 (bit_count[reg_list] * 4)); \
2230 mips_emit_addu(arm_to_mips_reg[base_reg], reg_a2, reg_zero) \
2232 #define thumb_block_address_preadjust_down(base_reg) \
2233 mips_emit_addiu(reg_a2, arm_to_mips_reg[base_reg], \
2234 -(bit_count[reg_list] * 4)); \
2235 mips_emit_addu(arm_to_mips_reg[base_reg], reg_a2, reg_zero) \
2237 #define thumb_block_address_preadjust_push_lr(base_reg) \
2238 mips_emit_addiu(reg_a2, arm_to_mips_reg[base_reg], \
2239 -((bit_count[reg_list] + 1) * 4)); \
2240 mips_emit_addu(arm_to_mips_reg[base_reg], reg_a2, reg_zero) \
2242 #define thumb_block_address_postadjust_no(base_reg) \
2244 #define thumb_block_address_postadjust_up(base_reg) \
2245 mips_emit_addiu(arm_to_mips_reg[base_reg], reg_a2, \
2246 (bit_count[reg_list] * 4)) \
2248 #define thumb_block_address_postadjust_down(base_reg) \
2249 mips_emit_addiu(arm_to_mips_reg[base_reg], reg_a2, \
2250 -(bit_count[reg_list] * 4)) \
2252 #define thumb_block_address_postadjust_pop_pc(base_reg) \
2253 mips_emit_addiu(arm_to_mips_reg[base_reg], reg_a2, \
2254 ((bit_count[reg_list] * 4) + 4)) \
2256 #define thumb_block_address_postadjust_push_lr(base_reg) \
2258 #define thumb_block_memory_load() \
2259 generate_function_call_swap_delay(execute_aligned_load32); \
2260 generate_store_reg(reg_rv, i) \
2262 #define thumb_block_memory_store() \
2263 mips_emit_jal(mips_absolute_offset(execute_aligned_store32)); \
2264 generate_load_reg(reg_a1, i) \
2266 #define thumb_block_memory_final_load() \
2267 thumb_block_memory_load() \
2269 #define thumb_block_memory_final_store() \
2270 generate_load_pc(reg_a2, (pc + 2)); \
2271 mips_emit_jal(mips_absolute_offset(execute_store_u32)); \
2272 generate_load_reg(reg_a1, i) \
2274 #define thumb_block_memory_final_no(access_type) \
2275 thumb_block_memory_final_##access_type() \
2277 #define thumb_block_memory_final_up(access_type) \
2278 thumb_block_memory_final_##access_type() \
2280 #define thumb_block_memory_final_down(access_type) \
2281 thumb_block_memory_final_##access_type() \
2283 #define thumb_block_memory_final_push_lr(access_type) \
2284 thumb_block_memory_##access_type() \
2286 #define thumb_block_memory_final_pop_pc(access_type) \
2287 thumb_block_memory_##access_type() \
2289 #define thumb_block_memory_extra_no() \
2291 #define thumb_block_memory_extra_up() \
2293 #define thumb_block_memory_extra_down() \
2295 #define thumb_block_memory_extra_push_lr() \
2296 mips_emit_addiu(reg_a0, reg_a2, (bit_count[reg_list] * 4)); \
2297 mips_emit_jal(mips_absolute_offset(execute_aligned_store32)); \
2298 generate_load_reg(reg_a1, REG_LR) \
2300 #define thumb_block_memory_extra_pop_pc() \
2301 mips_emit_jal(mips_absolute_offset(execute_aligned_load32)); \
2302 mips_emit_addiu(reg_a0, reg_a2, (bit_count[reg_list] * 4)); \
2303 generate_mov(reg_a0, reg_rv); \
2304 generate_indirect_branch_cycle_update(thumb) \
2306 #define thumb_block_memory_sp_load() \
2307 mips_emit_lw(arm_to_mips_reg[i], reg_a1, offset) \
2309 #define thumb_block_memory_sp_store() \
2310 mips_emit_sw(arm_to_mips_reg[i], reg_a1, offset) \
2312 #define thumb_block_memory_sp_extra_no() \
2314 #define thumb_block_memory_sp_extra_up() \
2316 #define thumb_block_memory_sp_extra_down() \
2318 #define thumb_block_memory_sp_extra_pop_pc() \
2319 mips_emit_lw(reg_a0, reg_a1, (bit_count[reg_list] * 4)); \
2320 generate_indirect_branch_cycle_update(thumb) \
2322 #define thumb_block_memory_sp_extra_push_lr() \
2323 mips_emit_sw(reg_r14, reg_a1, (bit_count[reg_list] * 4)) \
2325 #define thumb_block_memory(access_type, pre_op, post_op, base_reg) \
2327 thumb_decode_rlist(); \
2331 thumb_block_address_preadjust_##pre_op(base_reg); \
2332 thumb_block_address_postadjust_##post_op(base_reg); \
2334 if((base_reg == REG_SP) && iwram_stack_optimize) \
2336 mips_emit_andi(reg_a1, reg_a2, 0x7FFC); \
2337 generate_load_imm(reg_a0, ((u32)(iwram + 0x8000))); \
2338 generate_add(reg_a1, reg_a0); \
2340 for(i = 0; i < 8; i++) \
2342 if((reg_list >> i) & 0x01) \
2345 thumb_block_memory_sp_##access_type(); \
2350 thumb_block_memory_sp_extra_##post_op(); \
2354 mips_emit_ins(reg_a2, reg_zero, 0, 2); \
2356 for(i = 0; i < 8; i++) \
2358 if((reg_list >> i) & 0x01) \
2361 mips_emit_addiu(reg_a0, reg_a2, offset); \
2362 if(reg_list & ~((2 << i) - 1)) \
2364 thumb_block_memory_##access_type(); \
2369 thumb_block_memory_final_##post_op(access_type); \
2375 thumb_block_memory_extra_##post_op(); \
2381 #define thumb_conditional_branch(condition) \
2383 condition_check_type condition_check; \
2384 generate_condition_##condition(); \
2385 generate_branch_no_cycle_update( \
2386 block_exits[block_exit_position].branch_source, \
2387 block_exits[block_exit_position].branch_target); \
2388 generate_branch_patch_conditional(backpatch_address, translation_ptr); \
2389 block_exit_position++; \
2392 #define arm_conditional_block_header() \
2393 generate_condition(); \
2399 generate_load_pc(reg_r14, (pc + 4)); \
2403 arm_decode_branchx(); \
2404 generate_load_reg(reg_a0, rn); \
2405 /*generate_load_pc(reg_a2, pc);*/ \
2406 generate_indirect_branch_dual() \
2409 generate_swi_hle_handler((opcode >> 16) & 0xFF); \
2410 generate_load_pc(reg_a0, (pc + 4)); \
2411 generate_function_call_swap_delay(execute_swi); \
2415 generate_branch_cycle_update( \
2416 block_exits[block_exit_position].branch_source, \
2417 block_exits[block_exit_position].branch_target); \
2418 block_exit_position++ \
2420 #define thumb_bl() \
2421 generate_load_pc(reg_r14, ((pc + 2) | 0x01)); \
2422 generate_branch_cycle_update( \
2423 block_exits[block_exit_position].branch_source, \
2424 block_exits[block_exit_position].branch_target); \
2425 block_exit_position++ \
2427 #define thumb_blh() \
2429 thumb_decode_branch(); \
2430 generate_alu_imm(addiu, addu, reg_a0, reg_r14, (offset * 2)); \
2431 generate_load_pc(reg_r14, ((pc + 2) | 0x01)); \
2432 generate_indirect_branch_cycle_update(dual); \
2436 #define thumb_bx() \
2438 thumb_decode_hireg_op(); \
2439 generate_load_reg_pc(reg_a0, rs, 4); \
2440 /*generate_load_pc(reg_a2, pc);*/ \
2441 generate_indirect_branch_cycle_update(dual); \
2444 #define thumb_swi() \
2445 generate_swi_hle_handler(opcode & 0xFF); \
2446 generate_load_pc(reg_a0, (pc + 2)); \
2447 generate_function_call_swap_delay(execute_swi); \
2448 generate_branch_cycle_update( \
2449 block_exits[block_exit_position].branch_source, \
2450 block_exits[block_exit_position].branch_target); \
2451 block_exit_position++ \
2453 u8 swi_hle_handle[256] =
2455 0x0, // SWI 0: SoftReset
2456 0x0, // SWI 1: RegisterRAMReset
2458 0x0, // SWI 3: Stop/Sleep
2459 0x0, // SWI 4: IntrWait
2460 0x0, // SWI 5: VBlankIntrWait
2462 0x0, // SWI 7: DivArm
2464 0x0, // SWI 9: ArcTan
2465 0x0, // SWI A: ArcTan2
2466 0x0, // SWI B: CpuSet
2467 0x0, // SWI C: CpuFastSet
2468 0x0, // SWI D: GetBIOSCheckSum
2469 0x0, // SWI E: BgAffineSet
2470 0x0, // SWI F: ObjAffineSet
2471 0x0, // SWI 10: BitUnpack
2472 0x0, // SWI 11: LZ77UnCompWram
2473 0x0, // SWI 12: LZ77UnCompVram
2474 0x0, // SWI 13: HuffUnComp
2475 0x0, // SWI 14: RLUnCompWram
2476 0x0, // SWI 15: RLUnCompVram
2477 0x0, // SWI 16: Diff8bitUnFilterWram
2478 0x0, // SWI 17: Diff8bitUnFilterVram
2479 0x0, // SWI 18: Diff16bitUnFilter
2480 0x0, // SWI 19: SoundBias
2481 0x0, // SWI 1A: SoundDriverInit
2482 0x0, // SWI 1B: SoundDriverMode
2483 0x0, // SWI 1C: SoundDriverMain
2484 0x0, // SWI 1D: SoundDriverVSync
2485 0x0, // SWI 1E: SoundChannelClear
2486 0x0, // SWI 1F: MidiKey2Freq
2487 0x0, // SWI 20: SoundWhatever0
2488 0x0, // SWI 21: SoundWhatever1
2489 0x0, // SWI 22: SoundWhatever2
2490 0x0, // SWI 23: SoundWhatever3
2491 0x0, // SWI 24: SoundWhatever4
2492 0x0, // SWI 25: MultiBoot
2493 0x0, // SWI 26: HardReset
2494 0x0, // SWI 27: CustomHalt
2495 0x0, // SWI 28: SoundDriverVSyncOff
2496 0x0, // SWI 29: SoundDriverVSyncOn
2497 0x0 // SWI 2A: SoundGetJumpList
2500 #define generate_swi_hle_handler(_swi_number) \
2502 u32 swi_number = _swi_number; \
2503 if(swi_hle_handle[swi_number]) \
2506 if(swi_number == 0x06) \
2508 mips_emit_div(reg_r0, reg_r1); \
2509 mips_emit_mflo(reg_r0); \
2510 mips_emit_mfhi(reg_r1); \
2511 mips_emit_sra(reg_a0, reg_r0, 31); \
2512 mips_emit_xor(reg_r3, reg_r0, reg_a0); \
2513 mips_emit_subu(reg_r3, reg_r3, reg_a0); \
2519 #define generate_translation_gate(type) \
2520 generate_load_pc(reg_a0, pc); \
2521 generate_indirect_branch_no_cycle_update(type) \
2523 #define generate_step_debug() \
2524 generate_load_imm(reg_a0, pc); \
2525 generate_function_call(step_debug_mips) \
2527 #define generate_update_pc_reg() \
2528 generate_load_pc(reg_a0, pc); \
2529 mips_emit_sw(reg_a0, reg_base, (REG_PC * 4)) \