3 * Copyright (C) 2006 Exophase <exophase@gmail.com>
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2 of
8 * the License, or (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 // Not-so-important todo:
21 // - stm reglist writeback when base is in the list needs adjustment
22 // - block memory needs psr swapping and user mode reg swapping
27 u8 rom_translation_cache[ROM_TRANSLATION_CACHE_SIZE];
28 u8 *rom_translation_ptr = rom_translation_cache;
30 u8 ram_translation_cache[RAM_TRANSLATION_CACHE_SIZE];
31 u8 *ram_translation_ptr = ram_translation_cache;
32 u32 iwram_code_min = 0xFFFFFFFF;
33 u32 iwram_code_max = 0xFFFFFFFF;
34 u32 ewram_code_min = 0xFFFFFFFF;
35 u32 ewram_code_max = 0xFFFFFFFF;
37 u8 bios_translation_cache[BIOS_TRANSLATION_CACHE_SIZE];
38 u8 *bios_translation_ptr = bios_translation_cache;
40 u32 *rom_branch_hash[ROM_BRANCH_HASH_SIZE];
43 u32 idle_loop_target_pc = 0xFFFFFFFF;
44 u32 force_pc_update_target = 0xFFFFFFFF;
45 u32 translation_gate_target_pc[MAX_TRANSLATION_GATES];
46 u32 translation_gate_targets = 0;
47 u32 iwram_stack_optimize = 1;
48 u32 allow_smc_ram_u8 = 1;
49 u32 allow_smc_ram_u16 = 1;
50 u32 allow_smc_ram_u32 = 1;
66 extern u8 bit_count[256];
68 #define arm_decode_data_proc_reg() \
69 u32 rn = (opcode >> 16) & 0x0F; \
70 u32 rd = (opcode >> 12) & 0x0F; \
71 u32 rm = opcode & 0x0F \
73 #define arm_decode_data_proc_imm() \
74 u32 rn = (opcode >> 16) & 0x0F; \
75 u32 rd = (opcode >> 12) & 0x0F; \
76 u32 imm = opcode & 0xFF; \
77 u32 imm_ror = ((opcode >> 8) & 0x0F) * 2 \
79 #define arm_decode_psr_reg() \
80 u32 psr_field = (opcode >> 16) & 0x0F; \
81 u32 rd = (opcode >> 12) & 0x0F; \
82 u32 rm = opcode & 0x0F \
84 #define arm_decode_psr_imm() \
85 u32 psr_field = (opcode >> 16) & 0x0F; \
86 u32 rd = (opcode >> 12) & 0x0F; \
87 u32 imm = opcode & 0xFF; \
88 u32 imm_ror = ((opcode >> 8) & 0x0F) * 2 \
90 #define arm_decode_branchx() \
91 u32 rn = opcode & 0x0F \
93 #define arm_decode_multiply() \
94 u32 rd = (opcode >> 16) & 0x0F; \
95 u32 rn = (opcode >> 12) & 0x0F; \
96 u32 rs = (opcode >> 8) & 0x0F; \
97 u32 rm = opcode & 0x0F \
99 #define arm_decode_multiply_long() \
100 u32 rdhi = (opcode >> 16) & 0x0F; \
101 u32 rdlo = (opcode >> 12) & 0x0F; \
102 u32 rs = (opcode >> 8) & 0x0F; \
103 u32 rm = opcode & 0x0F \
105 #define arm_decode_swap() \
106 u32 rn = (opcode >> 16) & 0x0F; \
107 u32 rd = (opcode >> 12) & 0x0F; \
108 u32 rm = opcode & 0x0F \
110 #define arm_decode_half_trans_r() \
111 u32 rn = (opcode >> 16) & 0x0F; \
112 u32 rd = (opcode >> 12) & 0x0F; \
113 u32 rm = opcode & 0x0F \
115 #define arm_decode_half_trans_of() \
116 u32 rn = (opcode >> 16) & 0x0F; \
117 u32 rd = (opcode >> 12) & 0x0F; \
118 u32 offset = ((opcode >> 4) & 0xF0) | (opcode & 0x0F) \
120 #define arm_decode_data_trans_imm() \
121 u32 rn = (opcode >> 16) & 0x0F; \
122 u32 rd = (opcode >> 12) & 0x0F; \
123 u32 offset = opcode & 0x0FFF \
125 #define arm_decode_data_trans_reg() \
126 u32 rn = (opcode >> 16) & 0x0F; \
127 u32 rd = (opcode >> 12) & 0x0F; \
128 u32 rm = opcode & 0x0F \
130 #define arm_decode_block_trans() \
131 u32 rn = (opcode >> 16) & 0x0F; \
132 u32 reg_list = opcode & 0xFFFF \
134 #define arm_decode_branch() \
135 s32 offset = ((s32)(opcode & 0xFFFFFF) << 8) >> 6 \
137 #define thumb_decode_shift() \
138 u32 imm = (opcode >> 6) & 0x1F; \
139 u32 rs = (opcode >> 3) & 0x07; \
140 u32 rd = opcode & 0x07 \
142 #define thumb_decode_add_sub() \
143 u32 rn = (opcode >> 6) & 0x07; \
144 u32 rs = (opcode >> 3) & 0x07; \
145 u32 rd = opcode & 0x07 \
147 #define thumb_decode_add_sub_imm() \
148 u32 imm = (opcode >> 6) & 0x07; \
149 u32 rs = (opcode >> 3) & 0x07; \
150 u32 rd = opcode & 0x07 \
152 #define thumb_decode_imm() \
153 u32 imm = opcode & 0xFF \
155 #define thumb_decode_alu_op() \
156 u32 rs = (opcode >> 3) & 0x07; \
157 u32 rd = opcode & 0x07 \
159 #define thumb_decode_hireg_op() \
160 u32 rs = (opcode >> 3) & 0x0F; \
161 u32 rd = ((opcode >> 4) & 0x08) | (opcode & 0x07) \
163 #define thumb_decode_mem_reg() \
164 u32 ro = (opcode >> 6) & 0x07; \
165 u32 rb = (opcode >> 3) & 0x07; \
166 u32 rd = opcode & 0x07 \
168 #define thumb_decode_mem_imm() \
169 u32 imm = (opcode >> 6) & 0x1F; \
170 u32 rb = (opcode >> 3) & 0x07; \
171 u32 rd = opcode & 0x07 \
173 #define thumb_decode_add_sp() \
174 u32 imm = opcode & 0x7F \
176 #define thumb_decode_rlist() \
177 u32 reg_list = opcode & 0xFF \
179 #define thumb_decode_branch_cond() \
180 s32 offset = (s8)(opcode & 0xFF) \
182 #define thumb_decode_swi() \
183 u32 comment = opcode & 0xFF \
185 #define thumb_decode_branch() \
186 u32 offset = opcode & 0x07FF \
191 #include "psp/mips_emit.h"
193 #elif defined(GP2X_BUILD)
195 #include "gp2x/arm_emit.h"
197 #elif defined(GIZ_BUILD)
199 #include "giz/arm_emit.h"
203 #include "x86/x86_emit.h"
208 #define check_pc_region(pc) \
209 new_pc_region = (pc >> 15); \
210 if(new_pc_region != pc_region) \
212 pc_region = new_pc_region; \
213 pc_address_block = memory_map_read[new_pc_region]; \
215 if(pc_address_block == NULL) \
216 pc_address_block = load_gamepak_page(pc_region & 0x3FF); \
219 #define translate_arm_instruction() \
220 check_pc_region(pc); \
221 opcode = address32(pc_address_block, (pc & 0x7FFF)); \
222 condition = block_data[block_data_position].condition; \
224 if((condition != last_condition) || (condition >= 0x20)) \
226 if((last_condition & 0x0F) != 0x0E) \
228 generate_branch_patch_conditional(backpatch_address, translation_ptr); \
231 last_condition = condition; \
235 if(condition != 0x0E) \
237 arm_conditional_block_header(); \
241 switch((opcode >> 20) & 0xFF) \
244 if((opcode & 0x90) == 0x90) \
248 /* STRH rd, [rn], -rm */ \
249 arm_access_memory(store, down, post, u16, half_reg); \
253 /* MUL rd, rm, rs */ \
254 arm_multiply(no, no); \
259 /* AND rd, rn, reg_op */ \
260 arm_data_proc(and, reg, no_flags); \
265 if((opcode & 0x90) == 0x90) \
267 switch((opcode >> 5) & 0x03) \
270 /* MULS rd, rm, rs */ \
271 arm_multiply(no, yes); \
275 /* LDRH rd, [rn], -rm */ \
276 arm_access_memory(load, down, post, u16, half_reg); \
280 /* LDRSB rd, [rn], -rm */ \
281 arm_access_memory(load, down, post, s8, half_reg); \
285 /* LDRSH rd, [rn], -rm */ \
286 arm_access_memory(load, down, post, s16, half_reg); \
292 /* ANDS rd, rn, reg_op */ \
293 arm_data_proc(ands, reg_flags, flags); \
298 if((opcode & 0x90) == 0x90) \
302 /* STRH rd, [rn], -rm */ \
303 arm_access_memory(store, down, post, u16, half_reg); \
307 /* MLA rd, rm, rs, rn */ \
308 arm_multiply(yes, no); \
313 /* EOR rd, rn, reg_op */ \
314 arm_data_proc(eor, reg, no_flags); \
319 if((opcode & 0x90) == 0x90) \
321 switch((opcode >> 5) & 0x03) \
324 /* MLAS rd, rm, rs, rn */ \
325 arm_multiply(yes, yes); \
329 /* LDRH rd, [rn], -rm */ \
330 arm_access_memory(load, down, post, u16, half_reg); \
334 /* LDRSB rd, [rn], -rm */ \
335 arm_access_memory(load, down, post, s8, half_reg); \
339 /* LDRSH rd, [rn], -rm */ \
340 arm_access_memory(load, down, post, s16, half_reg); \
346 /* EORS rd, rn, reg_op */ \
347 arm_data_proc(eors, reg_flags, flags); \
352 if((opcode & 0x90) == 0x90) \
354 /* STRH rd, [rn], -imm */ \
355 arm_access_memory(store, down, post, u16, half_imm); \
359 /* SUB rd, rn, reg_op */ \
360 arm_data_proc(sub, reg, no_flags); \
365 if((opcode & 0x90) == 0x90) \
367 switch((opcode >> 5) & 0x03) \
370 /* LDRH rd, [rn], -imm */ \
371 arm_access_memory(load, down, post, u16, half_imm); \
375 /* LDRSB rd, [rn], -imm */ \
376 arm_access_memory(load, down, post, s8, half_imm); \
380 /* LDRSH rd, [rn], -imm */ \
381 arm_access_memory(load, down, post, s16, half_imm); \
387 /* SUBS rd, rn, reg_op */ \
388 arm_data_proc(subs, reg, flags); \
393 if((opcode & 0x90) == 0x90) \
395 /* STRH rd, [rn], -imm */ \
396 arm_access_memory(store, down, post, u16, half_imm); \
400 /* RSB rd, rn, reg_op */ \
401 arm_data_proc(rsb, reg, no_flags); \
406 if((opcode & 0x90) == 0x90) \
408 switch((opcode >> 5) & 0x03) \
411 /* LDRH rd, [rn], -imm */ \
412 arm_access_memory(load, down, post, u16, half_imm); \
416 /* LDRSB rd, [rn], -imm */ \
417 arm_access_memory(load, down, post, s8, half_imm); \
421 /* LDRSH rd, [rn], -imm */ \
422 arm_access_memory(load, down, post, s16, half_imm); \
428 /* RSBS rd, rn, reg_op */ \
429 arm_data_proc(rsbs, reg, flags); \
434 if((opcode & 0x90) == 0x90) \
438 /* STRH rd, [rn], +rm */ \
439 arm_access_memory(store, up, post, u16, half_reg); \
443 /* UMULL rd, rm, rs */ \
444 arm_multiply_long(u64, no, no); \
449 /* ADD rd, rn, reg_op */ \
450 arm_data_proc(add, reg, no_flags); \
455 if((opcode & 0x90) == 0x90) \
457 switch((opcode >> 5) & 0x03) \
460 /* UMULLS rdlo, rdhi, rm, rs */ \
461 arm_multiply_long(u64, no, yes); \
465 /* LDRH rd, [rn], +rm */ \
466 arm_access_memory(load, up, post, u16, half_reg); \
470 /* LDRSB rd, [rn], +rm */ \
471 arm_access_memory(load, up, post, s8, half_reg); \
475 /* LDRSH rd, [rn], +rm */ \
476 arm_access_memory(load, up, post, s16, half_reg); \
482 /* ADDS rd, rn, reg_op */ \
483 arm_data_proc(adds, reg, flags); \
488 if((opcode & 0x90) == 0x90) \
492 /* STRH rd, [rn], +rm */ \
493 arm_access_memory(store, up, post, u16, half_reg); \
497 /* UMLAL rd, rm, rs */ \
498 arm_multiply_long(u64_add, yes, no); \
503 /* ADC rd, rn, reg_op */ \
504 arm_data_proc(adc, reg, no_flags); \
509 if((opcode & 0x90) == 0x90) \
511 switch((opcode >> 5) & 0x03) \
514 /* UMLALS rdlo, rdhi, rm, rs */ \
515 arm_multiply_long(u64_add, yes, yes); \
519 /* LDRH rd, [rn], +rm */ \
520 arm_access_memory(load, up, post, u16, half_reg); \
524 /* LDRSB rd, [rn], +rm */ \
525 arm_access_memory(load, up, post, s8, half_reg); \
529 /* LDRSH rd, [rn], +rm */ \
530 arm_access_memory(load, up, post, s16, half_reg); \
536 /* ADCS rd, rn, reg_op */ \
537 arm_data_proc(adcs, reg, flags); \
542 if((opcode & 0x90) == 0x90) \
546 /* STRH rd, [rn], +imm */ \
547 arm_access_memory(store, up, post, u16, half_imm); \
551 /* SMULL rd, rm, rs */ \
552 arm_multiply_long(s64, no, no); \
557 /* SBC rd, rn, reg_op */ \
558 arm_data_proc(sbc, reg, no_flags); \
563 if((opcode & 0x90) == 0x90) \
565 switch((opcode >> 5) & 0x03) \
568 /* SMULLS rdlo, rdhi, rm, rs */ \
569 arm_multiply_long(s64, no, yes); \
573 /* LDRH rd, [rn], +imm */ \
574 arm_access_memory(load, up, post, u16, half_imm); \
578 /* LDRSB rd, [rn], +imm */ \
579 arm_access_memory(load, up, post, s8, half_imm); \
583 /* LDRSH rd, [rn], +imm */ \
584 arm_access_memory(load, up, post, s16, half_imm); \
590 /* SBCS rd, rn, reg_op */ \
591 arm_data_proc(sbcs, reg, flags); \
596 if((opcode & 0x90) == 0x90) \
600 /* STRH rd, [rn], +imm */ \
601 arm_access_memory(store, up, post, u16, half_imm); \
605 /* SMLAL rd, rm, rs */ \
606 arm_multiply_long(s64_add, yes, no); \
611 /* RSC rd, rn, reg_op */ \
612 arm_data_proc(rsc, reg, no_flags); \
617 if((opcode & 0x90) == 0x90) \
619 switch((opcode >> 5) & 0x03) \
622 /* SMLALS rdlo, rdhi, rm, rs */ \
623 arm_multiply_long(s64_add, yes, yes); \
627 /* LDRH rd, [rn], +imm */ \
628 arm_access_memory(load, up, post, u16, half_imm); \
632 /* LDRSB rd, [rn], +imm */ \
633 arm_access_memory(load, up, post, s8, half_imm); \
637 /* LDRSH rd, [rn], +imm */ \
638 arm_access_memory(load, up, post, s16, half_imm); \
644 /* RSCS rd, rn, reg_op */ \
645 arm_data_proc(rscs, reg, flags); \
650 if((opcode & 0x90) == 0x90) \
654 /* STRH rd, [rn - rm] */ \
655 arm_access_memory(store, down, pre, u16, half_reg); \
659 /* SWP rd, rm, [rn] */ \
666 arm_psr(reg, read, cpsr); \
671 if((opcode & 0x90) == 0x90) \
673 switch((opcode >> 5) & 0x03) \
676 /* LDRH rd, [rn - rm] */ \
677 arm_access_memory(load, down, pre, u16, half_reg); \
681 /* LDRSB rd, [rn - rm] */ \
682 arm_access_memory(load, down, pre, s8, half_reg); \
686 /* LDRSH rd, [rn - rm] */ \
687 arm_access_memory(load, down, pre, s16, half_reg); \
693 /* TST rd, rn, reg_op */ \
694 arm_data_proc_test(tst, reg_flags); \
699 if((opcode & 0x90) == 0x90) \
701 /* STRH rd, [rn - rm]! */ \
702 arm_access_memory(store, down, pre_wb, u16, half_reg); \
714 arm_psr(reg, store, cpsr); \
720 if((opcode & 0x90) == 0x90) \
722 switch((opcode >> 5) & 0x03) \
725 /* LDRH rd, [rn - rm]! */ \
726 arm_access_memory(load, down, pre_wb, u16, half_reg); \
730 /* LDRSB rd, [rn - rm]! */ \
731 arm_access_memory(load, down, pre_wb, s8, half_reg); \
735 /* LDRSH rd, [rn - rm]! */ \
736 arm_access_memory(load, down, pre_wb, s16, half_reg); \
742 /* TEQ rd, rn, reg_op */ \
743 arm_data_proc_test(teq, reg_flags); \
748 if((opcode & 0x90) == 0x90) \
752 /* STRH rd, [rn - imm] */ \
753 arm_access_memory(store, down, pre, u16, half_imm); \
757 /* SWPB rd, rm, [rn] */ \
764 arm_psr(reg, read, spsr); \
769 if((opcode & 0x90) == 0x90) \
771 switch((opcode >> 5) & 0x03) \
774 /* LDRH rd, [rn - imm] */ \
775 arm_access_memory(load, down, pre, u16, half_imm); \
779 /* LDRSB rd, [rn - imm] */ \
780 arm_access_memory(load, down, pre, s8, half_imm); \
784 /* LDRSH rd, [rn - imm] */ \
785 arm_access_memory(load, down, pre, s16, half_imm); \
791 /* CMP rn, reg_op */ \
792 arm_data_proc_test(cmp, reg); \
797 if((opcode & 0x90) == 0x90) \
799 /* STRH rd, [rn - imm]! */ \
800 arm_access_memory(store, down, pre_wb, u16, half_imm); \
805 arm_psr(reg, store, spsr); \
810 if((opcode & 0x90) == 0x90) \
812 switch((opcode >> 5) & 0x03) \
815 /* LDRH rd, [rn - imm]! */ \
816 arm_access_memory(load, down, pre_wb, u16, half_imm); \
820 /* LDRSB rd, [rn - imm]! */ \
821 arm_access_memory(load, down, pre_wb, s8, half_imm); \
825 /* LDRSH rd, [rn - imm]! */ \
826 arm_access_memory(load, down, pre_wb, s16, half_imm); \
832 /* CMN rd, rn, reg_op */ \
833 arm_data_proc_test(cmn, reg); \
838 if((opcode & 0x90) == 0x90) \
840 /* STRH rd, [rn + rm] */ \
841 arm_access_memory(store, up, pre, u16, half_reg); \
845 /* ORR rd, rn, reg_op */ \
846 arm_data_proc(orr, reg, no_flags); \
851 if((opcode & 0x90) == 0x90) \
853 switch((opcode >> 5) & 0x03) \
856 /* LDRH rd, [rn + rm] */ \
857 arm_access_memory(load, up, pre, u16, half_reg); \
861 /* LDRSB rd, [rn + rm] */ \
862 arm_access_memory(load, up, pre, s8, half_reg); \
866 /* LDRSH rd, [rn + rm] */ \
867 arm_access_memory(load, up, pre, s16, half_reg); \
873 /* ORRS rd, rn, reg_op */ \
874 arm_data_proc(orrs, reg_flags, flags); \
879 if((opcode & 0x90) == 0x90) \
881 /* STRH rd, [rn + rm]! */ \
882 arm_access_memory(store, up, pre_wb, u16, half_reg); \
886 /* MOV rd, reg_op */ \
887 arm_data_proc_unary(mov, reg, no_flags); \
892 if((opcode & 0x90) == 0x90) \
894 switch((opcode >> 5) & 0x03) \
897 /* LDRH rd, [rn + rm]! */ \
898 arm_access_memory(load, up, pre_wb, u16, half_reg); \
902 /* LDRSB rd, [rn + rm]! */ \
903 arm_access_memory(load, up, pre_wb, s8, half_reg); \
907 /* LDRSH rd, [rn + rm]! */ \
908 arm_access_memory(load, up, pre_wb, s16, half_reg); \
914 /* MOVS rd, reg_op */ \
915 arm_data_proc_unary(movs, reg_flags, flags); \
920 if((opcode & 0x90) == 0x90) \
922 /* STRH rd, [rn + imm] */ \
923 arm_access_memory(store, up, pre, u16, half_imm); \
927 /* BIC rd, rn, reg_op */ \
928 arm_data_proc(bic, reg, no_flags); \
933 if((opcode & 0x90) == 0x90) \
935 switch((opcode >> 5) & 0x03) \
938 /* LDRH rd, [rn + imm] */ \
939 arm_access_memory(load, up, pre, u16, half_imm); \
943 /* LDRSB rd, [rn + imm] */ \
944 arm_access_memory(load, up, pre, s8, half_imm); \
948 /* LDRSH rd, [rn + imm] */ \
949 arm_access_memory(load, up, pre, s16, half_imm); \
955 /* BICS rd, rn, reg_op */ \
956 arm_data_proc(bics, reg_flags, flags); \
961 if((opcode & 0x90) == 0x90) \
963 /* STRH rd, [rn + imm]! */ \
964 arm_access_memory(store, up, pre_wb, u16, half_imm); \
968 /* MVN rd, reg_op */ \
969 arm_data_proc_unary(mvn, reg, no_flags); \
974 if((opcode & 0x90) == 0x90) \
976 switch((opcode >> 5) & 0x03) \
979 /* LDRH rd, [rn + imm]! */ \
980 arm_access_memory(load, up, pre_wb, u16, half_imm); \
984 /* LDRSB rd, [rn + imm]! */ \
985 arm_access_memory(load, up, pre_wb, s8, half_imm); \
989 /* LDRSH rd, [rn + imm]! */ \
990 arm_access_memory(load, up, pre_wb, s16, half_imm); \
996 /* MVNS rd, rn, reg_op */ \
997 arm_data_proc_unary(mvns, reg_flags, flags); \
1002 /* AND rd, rn, imm */ \
1003 arm_data_proc(and, imm, no_flags); \
1007 /* ANDS rd, rn, imm */ \
1008 arm_data_proc(ands, imm_flags, flags); \
1012 /* EOR rd, rn, imm */ \
1013 arm_data_proc(eor, imm, no_flags); \
1017 /* EORS rd, rn, imm */ \
1018 arm_data_proc(eors, imm_flags, flags); \
1022 /* SUB rd, rn, imm */ \
1023 arm_data_proc(sub, imm, no_flags); \
1027 /* SUBS rd, rn, imm */ \
1028 arm_data_proc(subs, imm, flags); \
1032 /* RSB rd, rn, imm */ \
1033 arm_data_proc(rsb, imm, no_flags); \
1037 /* RSBS rd, rn, imm */ \
1038 arm_data_proc(rsbs, imm, flags); \
1042 /* ADD rd, rn, imm */ \
1043 arm_data_proc(add, imm, no_flags); \
1047 /* ADDS rd, rn, imm */ \
1048 arm_data_proc(adds, imm, flags); \
1052 /* ADC rd, rn, imm */ \
1053 arm_data_proc(adc, imm, no_flags); \
1057 /* ADCS rd, rn, imm */ \
1058 arm_data_proc(adcs, imm, flags); \
1062 /* SBC rd, rn, imm */ \
1063 arm_data_proc(sbc, imm, no_flags); \
1067 /* SBCS rd, rn, imm */ \
1068 arm_data_proc(sbcs, imm, flags); \
1072 /* RSC rd, rn, imm */ \
1073 arm_data_proc(rsc, imm, no_flags); \
1077 /* RSCS rd, rn, imm */ \
1078 arm_data_proc(rscs, imm, flags); \
1081 case 0x30 ... 0x31: \
1083 arm_data_proc_test(tst, imm); \
1087 /* MSR cpsr, imm */ \
1088 arm_psr(imm, store, cpsr); \
1093 arm_data_proc_test(teq, imm); \
1096 case 0x34 ... 0x35: \
1098 arm_data_proc_test(cmp, imm); \
1102 /* MSR spsr, imm */ \
1103 arm_psr(imm, store, spsr); \
1108 arm_data_proc_test(cmn, imm); \
1112 /* ORR rd, rn, imm */ \
1113 arm_data_proc(orr, imm, no_flags); \
1117 /* ORRS rd, rn, imm */ \
1118 arm_data_proc(orrs, imm_flags, flags); \
1123 arm_data_proc_unary(mov, imm, no_flags); \
1127 /* MOVS rd, imm */ \
1128 arm_data_proc_unary(movs, imm_flags, flags); \
1132 /* BIC rd, rn, imm */ \
1133 arm_data_proc(bic, imm, no_flags); \
1137 /* BICS rd, rn, imm */ \
1138 arm_data_proc(bics, imm_flags, flags); \
1143 arm_data_proc_unary(mvn, imm, no_flags); \
1147 /* MVNS rd, imm */ \
1148 arm_data_proc_unary(mvns, imm_flags, flags); \
1152 /* STR rd, [rn], -imm */ \
1153 arm_access_memory(store, down, post, u32, imm); \
1157 /* LDR rd, [rn], -imm */ \
1158 arm_access_memory(load, down, post, u32, imm); \
1162 /* STRT rd, [rn], -imm */ \
1163 arm_access_memory(store, down, post, u32, imm); \
1167 /* LDRT rd, [rn], -imm */ \
1168 arm_access_memory(load, down, post, u32, imm); \
1172 /* STRB rd, [rn], -imm */ \
1173 arm_access_memory(store, down, post, u8, imm); \
1177 /* LDRB rd, [rn], -imm */ \
1178 arm_access_memory(load, down, post, u8, imm); \
1182 /* STRBT rd, [rn], -imm */ \
1183 arm_access_memory(store, down, post, u8, imm); \
1187 /* LDRBT rd, [rn], -imm */ \
1188 arm_access_memory(load, down, post, u8, imm); \
1192 /* STR rd, [rn], +imm */ \
1193 arm_access_memory(store, up, post, u32, imm); \
1197 /* LDR rd, [rn], +imm */ \
1198 arm_access_memory(load, up, post, u32, imm); \
1202 /* STRT rd, [rn], +imm */ \
1203 arm_access_memory(store, up, post, u32, imm); \
1207 /* LDRT rd, [rn], +imm */ \
1208 arm_access_memory(load, up, post, u32, imm); \
1212 /* STRB rd, [rn], +imm */ \
1213 arm_access_memory(store, up, post, u8, imm); \
1217 /* LDRB rd, [rn], +imm */ \
1218 arm_access_memory(load, up, post, u8, imm); \
1222 /* STRBT rd, [rn], +imm */ \
1223 arm_access_memory(store, up, post, u8, imm); \
1227 /* LDRBT rd, [rn], +imm */ \
1228 arm_access_memory(load, up, post, u8, imm); \
1232 /* STR rd, [rn - imm] */ \
1233 arm_access_memory(store, down, pre, u32, imm); \
1237 /* LDR rd, [rn - imm] */ \
1238 arm_access_memory(load, down, pre, u32, imm); \
1242 /* STR rd, [rn - imm]! */ \
1243 arm_access_memory(store, down, pre_wb, u32, imm); \
1247 /* LDR rd, [rn - imm]! */ \
1248 arm_access_memory(load, down, pre_wb, u32, imm); \
1252 /* STRB rd, [rn - imm] */ \
1253 arm_access_memory(store, down, pre, u8, imm); \
1257 /* LDRB rd, [rn - imm] */ \
1258 arm_access_memory(load, down, pre, u8, imm); \
1262 /* STRB rd, [rn - imm]! */ \
1263 arm_access_memory(store, down, pre_wb, u8, imm); \
1267 /* LDRB rd, [rn - imm]! */ \
1268 arm_access_memory(load, down, pre_wb, u8, imm); \
1272 /* STR rd, [rn + imm] */ \
1273 arm_access_memory(store, up, pre, u32, imm); \
1277 /* LDR rd, [rn + imm] */ \
1278 arm_access_memory(load, up, pre, u32, imm); \
1282 /* STR rd, [rn + imm]! */ \
1283 arm_access_memory(store, up, pre_wb, u32, imm); \
1287 /* LDR rd, [rn + imm]! */ \
1288 arm_access_memory(load, up, pre_wb, u32, imm); \
1292 /* STRB rd, [rn + imm] */ \
1293 arm_access_memory(store, up, pre, u8, imm); \
1297 /* LDRB rd, [rn + imm] */ \
1298 arm_access_memory(load, up, pre, u8, imm); \
1302 /* STRB rd, [rn + imm]! */ \
1303 arm_access_memory(store, up, pre_wb, u8, imm); \
1307 /* LDRBT rd, [rn + imm]! */ \
1308 arm_access_memory(load, up, pre_wb, u8, imm); \
1312 /* STR rd, [rn], -rm */ \
1313 arm_access_memory(store, down, post, u32, reg); \
1317 /* LDR rd, [rn], -rm */ \
1318 arm_access_memory(load, down, post, u32, reg); \
1322 /* STRT rd, [rn], -rm */ \
1323 arm_access_memory(store, down, post, u32, reg); \
1327 /* LDRT rd, [rn], -rm */ \
1328 arm_access_memory(load, down, post, u32, reg); \
1332 /* STRB rd, [rn], -rm */ \
1333 arm_access_memory(store, down, post, u8, reg); \
1337 /* LDRB rd, [rn], -rm */ \
1338 arm_access_memory(load, down, post, u8, reg); \
1342 /* STRBT rd, [rn], -rm */ \
1343 arm_access_memory(store, down, post, u8, reg); \
1347 /* LDRBT rd, [rn], -rm */ \
1348 arm_access_memory(load, down, post, u8, reg); \
1352 /* STR rd, [rn], +rm */ \
1353 arm_access_memory(store, up, post, u32, reg); \
1357 /* LDR rd, [rn], +rm */ \
1358 arm_access_memory(load, up, post, u32, reg); \
1362 /* STRT rd, [rn], +rm */ \
1363 arm_access_memory(store, up, post, u32, reg); \
1367 /* LDRT rd, [rn], +rm */ \
1368 arm_access_memory(load, up, post, u32, reg); \
1372 /* STRB rd, [rn], +rm */ \
1373 arm_access_memory(store, up, post, u8, reg); \
1377 /* LDRB rd, [rn], +rm */ \
1378 arm_access_memory(load, up, post, u8, reg); \
1382 /* STRBT rd, [rn], +rm */ \
1383 arm_access_memory(store, up, post, u8, reg); \
1387 /* LDRBT rd, [rn], +rm */ \
1388 arm_access_memory(load, up, post, u8, reg); \
1392 /* STR rd, [rn - rm] */ \
1393 arm_access_memory(store, down, pre, u32, reg); \
1397 /* LDR rd, [rn - rm] */ \
1398 arm_access_memory(load, down, pre, u32, reg); \
1402 /* STR rd, [rn - rm]! */ \
1403 arm_access_memory(store, down, pre_wb, u32, reg); \
1407 /* LDR rd, [rn - rm]! */ \
1408 arm_access_memory(load, down, pre_wb, u32, reg); \
1412 /* STRB rd, [rn - rm] */ \
1413 arm_access_memory(store, down, pre, u8, reg); \
1417 /* LDRB rd, [rn - rm] */ \
1418 arm_access_memory(load, down, pre, u8, reg); \
1422 /* STRB rd, [rn - rm]! */ \
1423 arm_access_memory(store, down, pre_wb, u8, reg); \
1427 /* LDRB rd, [rn - rm]! */ \
1428 arm_access_memory(load, down, pre_wb, u8, reg); \
1432 /* STR rd, [rn + rm] */ \
1433 arm_access_memory(store, up, pre, u32, reg); \
1437 /* LDR rd, [rn + rm] */ \
1438 arm_access_memory(load, up, pre, u32, reg); \
1442 /* STR rd, [rn + rm]! */ \
1443 arm_access_memory(store, up, pre_wb, u32, reg); \
1447 /* LDR rd, [rn + rm]! */ \
1448 arm_access_memory(load, up, pre_wb, u32, reg); \
1452 /* STRB rd, [rn + rm] */ \
1453 arm_access_memory(store, up, pre, u8, reg); \
1457 /* LDRB rd, [rn + rm] */ \
1458 arm_access_memory(load, up, pre, u8, reg); \
1462 /* STRB rd, [rn + rm]! */ \
1463 arm_access_memory(store, up, pre_wb, u8, reg); \
1467 /* LDRBT rd, [rn + rm]! */ \
1468 arm_access_memory(load, up, pre_wb, u8, reg); \
1472 /* STMDA rn, rlist */ \
1473 arm_block_memory(store, down_a, no, no); \
1477 /* LDMDA rn, rlist */ \
1478 arm_block_memory(load, down_a, no, no); \
1482 /* STMDA rn!, rlist */ \
1483 arm_block_memory(store, down_a, down, no); \
1487 /* LDMDA rn!, rlist */ \
1488 arm_block_memory(load, down_a, down, no); \
1492 /* STMDA rn, rlist^ */ \
1493 arm_block_memory(store, down_a, no, yes); \
1497 /* LDMDA rn, rlist^ */ \
1498 arm_block_memory(load, down_a, no, yes); \
1502 /* STMDA rn!, rlist^ */ \
1503 arm_block_memory(store, down_a, down, yes); \
1507 /* LDMDA rn!, rlist^ */ \
1508 arm_block_memory(load, down_a, down, yes); \
1512 /* STMIA rn, rlist */ \
1513 arm_block_memory(store, no, no, no); \
1517 /* LDMIA rn, rlist */ \
1518 arm_block_memory(load, no, no, no); \
1522 /* STMIA rn!, rlist */ \
1523 arm_block_memory(store, no, up, no); \
1527 /* LDMIA rn!, rlist */ \
1528 arm_block_memory(load, no, up, no); \
1532 /* STMIA rn, rlist^ */ \
1533 arm_block_memory(store, no, no, yes); \
1537 /* LDMIA rn, rlist^ */ \
1538 arm_block_memory(load, no, no, yes); \
1542 /* STMIA rn!, rlist^ */ \
1543 arm_block_memory(store, no, up, yes); \
1547 /* LDMIA rn!, rlist^ */ \
1548 arm_block_memory(load, no, up, yes); \
1552 /* STMDB rn, rlist */ \
1553 arm_block_memory(store, down_b, no, no); \
1557 /* LDMDB rn, rlist */ \
1558 arm_block_memory(load, down_b, no, no); \
1562 /* STMDB rn!, rlist */ \
1563 arm_block_memory(store, down_b, down, no); \
1567 /* LDMDB rn!, rlist */ \
1568 arm_block_memory(load, down_b, down, no); \
1572 /* STMDB rn, rlist^ */ \
1573 arm_block_memory(store, down_b, no, yes); \
1577 /* LDMDB rn, rlist^ */ \
1578 arm_block_memory(load, down_b, no, yes); \
1582 /* STMDB rn!, rlist^ */ \
1583 arm_block_memory(store, down_b, down, yes); \
1587 /* LDMDB rn!, rlist^ */ \
1588 arm_block_memory(load, down_b, down, yes); \
1592 /* STMIB rn, rlist */ \
1593 arm_block_memory(store, up, no, no); \
1597 /* LDMIB rn, rlist */ \
1598 arm_block_memory(load, up, no, no); \
1602 /* STMIB rn!, rlist */ \
1603 arm_block_memory(store, up, up, no); \
1607 /* LDMIB rn!, rlist */ \
1608 arm_block_memory(load, up, up, no); \
1612 /* STMIB rn, rlist^ */ \
1613 arm_block_memory(store, up, no, yes); \
1617 /* LDMIB rn, rlist^ */ \
1618 arm_block_memory(load, up, no, yes); \
1622 /* STMIB rn!, rlist^ */ \
1623 arm_block_memory(store, up, up, yes); \
1627 /* LDMIB rn!, rlist^ */ \
1628 arm_block_memory(load, up, up, yes); \
1631 case 0xA0 ... 0xAF: \
1638 case 0xB0 ... 0xBF: \
1645 case 0xC0 ... 0xEF: \
1646 /* coprocessor instructions, reserved on GBA */ \
1649 case 0xF0 ... 0xFF: \
1659 #define arm_flag_status() \
1661 #define translate_thumb_instruction() \
1662 flag_status = block_data[block_data_position].flag_data; \
1663 check_pc_region(pc); \
1664 last_opcode = opcode; \
1665 opcode = address16(pc_address_block, (pc & 0x7FFF)); \
1667 switch((opcode >> 8) & 0xFF) \
1669 case 0x00 ... 0x07: \
1670 /* LSL rd, rs, imm */ \
1671 thumb_shift(shift, lsl, imm); \
1674 case 0x08 ... 0x0F: \
1675 /* LSR rd, rs, imm */ \
1676 thumb_shift(shift, lsr, imm); \
1679 case 0x10 ... 0x17: \
1680 /* ASR rd, rs, imm */ \
1681 thumb_shift(shift, asr, imm); \
1684 case 0x18 ... 0x19: \
1685 /* ADD rd, rs, rn */ \
1686 thumb_data_proc(add_sub, adds, reg, rd, rs, rn); \
1689 case 0x1A ... 0x1B: \
1690 /* SUB rd, rs, rn */ \
1691 thumb_data_proc(add_sub, subs, reg, rd, rs, rn); \
1694 case 0x1C ... 0x1D: \
1695 /* ADD rd, rs, imm */ \
1696 thumb_data_proc(add_sub_imm, adds, imm, rd, rs, imm); \
1699 case 0x1E ... 0x1F: \
1700 /* SUB rd, rs, imm */ \
1701 thumb_data_proc(add_sub_imm, subs, imm, rd, rs, imm); \
1706 thumb_data_proc_unary(imm, movs, imm, 0, imm); \
1711 thumb_data_proc_unary(imm, movs, imm, 1, imm); \
1716 thumb_data_proc_unary(imm, movs, imm, 2, imm); \
1721 thumb_data_proc_unary(imm, movs, imm, 3, imm); \
1726 thumb_data_proc_unary(imm, movs, imm, 4, imm); \
1731 thumb_data_proc_unary(imm, movs, imm, 5, imm); \
1736 thumb_data_proc_unary(imm, movs, imm, 6, imm); \
1741 thumb_data_proc_unary(imm, movs, imm, 7, imm); \
1746 thumb_data_proc_test(imm, cmp, imm, 0, imm); \
1751 thumb_data_proc_test(imm, cmp, imm, 1, imm); \
1756 thumb_data_proc_test(imm, cmp, imm, 2, imm); \
1761 thumb_data_proc_test(imm, cmp, imm, 3, imm); \
1766 thumb_data_proc_test(imm, cmp, imm, 4, imm); \
1771 thumb_data_proc_test(imm, cmp, imm, 5, imm); \
1776 thumb_data_proc_test(imm, cmp, imm, 6, imm); \
1781 thumb_data_proc_test(imm, cmp, imm, 7, imm); \
1786 thumb_data_proc(imm, adds, imm, 0, 0, imm); \
1791 thumb_data_proc(imm, adds, imm, 1, 1, imm); \
1796 thumb_data_proc(imm, adds, imm, 2, 2, imm); \
1801 thumb_data_proc(imm, adds, imm, 3, 3, imm); \
1806 thumb_data_proc(imm, adds, imm, 4, 4, imm); \
1811 thumb_data_proc(imm, adds, imm, 5, 5, imm); \
1816 thumb_data_proc(imm, adds, imm, 6, 6, imm); \
1821 thumb_data_proc(imm, adds, imm, 7, 7, imm); \
1826 thumb_data_proc(imm, subs, imm, 0, 0, imm); \
1831 thumb_data_proc(imm, subs, imm, 1, 1, imm); \
1836 thumb_data_proc(imm, subs, imm, 2, 2, imm); \
1841 thumb_data_proc(imm, subs, imm, 3, 3, imm); \
1846 thumb_data_proc(imm, subs, imm, 4, 4, imm); \
1851 thumb_data_proc(imm, subs, imm, 5, 5, imm); \
1856 thumb_data_proc(imm, subs, imm, 6, 6, imm); \
1861 thumb_data_proc(imm, subs, imm, 7, 7, imm); \
1865 switch((opcode >> 6) & 0x03) \
1869 thumb_data_proc(alu_op, ands, reg, rd, rd, rs); \
1874 thumb_data_proc(alu_op, eors, reg, rd, rd, rs); \
1879 thumb_shift(alu_op, lsl, reg); \
1884 thumb_shift(alu_op, lsr, reg); \
1890 switch((opcode >> 6) & 0x03) \
1894 thumb_shift(alu_op, asr, reg); \
1899 thumb_data_proc(alu_op, adcs, reg, rd, rd, rs); \
1904 thumb_data_proc(alu_op, sbcs, reg, rd, rd, rs); \
1909 thumb_shift(alu_op, ror, reg); \
1915 switch((opcode >> 6) & 0x03) \
1919 thumb_data_proc_test(alu_op, tst, reg, rd, rs); \
1924 thumb_data_proc_unary(alu_op, neg, reg, rd, rs); \
1929 thumb_data_proc_test(alu_op, cmp, reg, rd, rs); \
1934 thumb_data_proc_test(alu_op, cmn, reg, rd, rs); \
1940 switch((opcode >> 6) & 0x03) \
1944 thumb_data_proc(alu_op, orrs, reg, rd, rd, rs); \
1949 thumb_data_proc(alu_op, muls, reg, rd, rd, rs); \
1954 thumb_data_proc(alu_op, bics, reg, rd, rd, rs); \
1959 thumb_data_proc_unary(alu_op, mvns, reg, rd, rs); \
1966 thumb_data_proc_hi(add); \
1971 thumb_data_proc_test_hi(cmp); \
1976 thumb_data_proc_mov_hi(); \
1985 /* LDR r0, [pc + imm] */ \
1986 thumb_access_memory(load, imm, 0, 0, 0, pc_relative, \
1987 (pc & ~2) + (imm * 4) + 4, u32); \
1991 /* LDR r1, [pc + imm] */ \
1992 thumb_access_memory(load, imm, 1, 0, 0, pc_relative, \
1993 (pc & ~2) + (imm * 4) + 4, u32); \
1997 /* LDR r2, [pc + imm] */ \
1998 thumb_access_memory(load, imm, 2, 0, 0, pc_relative, \
1999 (pc & ~2) + (imm * 4) + 4, u32); \
2003 /* LDR r3, [pc + imm] */ \
2004 thumb_access_memory(load, imm, 3, 0, 0, pc_relative, \
2005 (pc & ~2) + (imm * 4) + 4, u32); \
2009 /* LDR r4, [pc + imm] */ \
2010 thumb_access_memory(load, imm, 4, 0, 0, pc_relative, \
2011 (pc & ~2) + (imm * 4) + 4, u32); \
2015 /* LDR r5, [pc + imm] */ \
2016 thumb_access_memory(load, imm, 5, 0, 0, pc_relative, \
2017 (pc & ~2) + (imm * 4) + 4, u32); \
2021 /* LDR r6, [pc + imm] */ \
2022 thumb_access_memory(load, imm, 6, 0, 0, pc_relative, \
2023 (pc & ~2) + (imm * 4) + 4, u32); \
2027 /* LDR r7, [pc + imm] */ \
2028 thumb_access_memory(load, imm, 7, 0, 0, pc_relative, \
2029 (pc & ~2) + (imm * 4) + 4, u32); \
2032 case 0x50 ... 0x51: \
2033 /* STR rd, [rb + ro] */ \
2034 thumb_access_memory(store, mem_reg, rd, rb, ro, reg_reg, 0, u32); \
2037 case 0x52 ... 0x53: \
2038 /* STRH rd, [rb + ro] */ \
2039 thumb_access_memory(store, mem_reg, rd, rb, ro, reg_reg, 0, u16); \
2042 case 0x54 ... 0x55: \
2043 /* STRB rd, [rb + ro] */ \
2044 thumb_access_memory(store, mem_reg, rd, rb, ro, reg_reg, 0, u8); \
2047 case 0x56 ... 0x57: \
2048 /* LDSB rd, [rb + ro] */ \
2049 thumb_access_memory(load, mem_reg, rd, rb, ro, reg_reg, 0, s8); \
2052 case 0x58 ... 0x59: \
2053 /* LDR rd, [rb + ro] */ \
2054 thumb_access_memory(load, mem_reg, rd, rb, ro, reg_reg, 0, u32); \
2057 case 0x5A ... 0x5B: \
2058 /* LDRH rd, [rb + ro] */ \
2059 thumb_access_memory(load, mem_reg, rd, rb, ro, reg_reg, 0, u16); \
2062 case 0x5C ... 0x5D: \
2063 /* LDRB rd, [rb + ro] */ \
2064 thumb_access_memory(load, mem_reg, rd, rb, ro, reg_reg, 0, u8); \
2067 case 0x5E ... 0x5F: \
2068 /* LDSH rd, [rb + ro] */ \
2069 thumb_access_memory(load, mem_reg, rd, rb, ro, reg_reg, 0, s16); \
2072 case 0x60 ... 0x67: \
2073 /* STR rd, [rb + imm] */ \
2074 thumb_access_memory(store, mem_imm, rd, rb, 0, reg_imm, (imm * 4), \
2078 case 0x68 ... 0x6F: \
2079 /* LDR rd, [rb + imm] */ \
2080 thumb_access_memory(load, mem_imm, rd, rb, 0, reg_imm, (imm * 4), u32); \
2083 case 0x70 ... 0x77: \
2084 /* STRB rd, [rb + imm] */ \
2085 thumb_access_memory(store, mem_imm, rd, rb, 0, reg_imm, imm, u8); \
2088 case 0x78 ... 0x7F: \
2089 /* LDRB rd, [rb + imm] */ \
2090 thumb_access_memory(load, mem_imm, rd, rb, 0, reg_imm, imm, u8); \
2093 case 0x80 ... 0x87: \
2094 /* STRH rd, [rb + imm] */ \
2095 thumb_access_memory(store, mem_imm, rd, rb, 0, reg_imm, \
2099 case 0x88 ... 0x8F: \
2100 /* LDRH rd, [rb + imm] */ \
2101 thumb_access_memory(load, mem_imm, rd, rb, 0, reg_imm, (imm * 2), u16); \
2105 /* STR r0, [sp + imm] */ \
2106 thumb_access_memory(store, imm, 0, 13, 0, reg_imm_sp, imm, u32); \
2110 /* STR r1, [sp + imm] */ \
2111 thumb_access_memory(store, imm, 1, 13, 0, reg_imm_sp, imm, u32); \
2115 /* STR r2, [sp + imm] */ \
2116 thumb_access_memory(store, imm, 2, 13, 0, reg_imm_sp, imm, u32); \
2120 /* STR r3, [sp + imm] */ \
2121 thumb_access_memory(store, imm, 3, 13, 0, reg_imm_sp, imm, u32); \
2125 /* STR r4, [sp + imm] */ \
2126 thumb_access_memory(store, imm, 4, 13, 0, reg_imm_sp, imm, u32); \
2130 /* STR r5, [sp + imm] */ \
2131 thumb_access_memory(store, imm, 5, 13, 0, reg_imm_sp, imm, u32); \
2135 /* STR r6, [sp + imm] */ \
2136 thumb_access_memory(store, imm, 6, 13, 0, reg_imm_sp, imm, u32); \
2140 /* STR r7, [sp + imm] */ \
2141 thumb_access_memory(store, imm, 7, 13, 0, reg_imm_sp, imm, u32); \
2145 /* LDR r0, [sp + imm] */ \
2146 thumb_access_memory(load, imm, 0, 13, 0, reg_imm_sp, imm, u32); \
2150 /* LDR r1, [sp + imm] */ \
2151 thumb_access_memory(load, imm, 1, 13, 0, reg_imm_sp, imm, u32); \
2155 /* LDR r2, [sp + imm] */ \
2156 thumb_access_memory(load, imm, 2, 13, 0, reg_imm_sp, imm, u32); \
2160 /* LDR r3, [sp + imm] */ \
2161 thumb_access_memory(load, imm, 3, 13, 0, reg_imm_sp, imm, u32); \
2165 /* LDR r4, [sp + imm] */ \
2166 thumb_access_memory(load, imm, 4, 13, 0, reg_imm_sp, imm, u32); \
2170 /* LDR r5, [sp + imm] */ \
2171 thumb_access_memory(load, imm, 5, 13, 0, reg_imm_sp, imm, u32); \
2175 /* LDR r6, [sp + imm] */ \
2176 thumb_access_memory(load, imm, 6, 13, 0, reg_imm_sp, imm, u32); \
2180 /* LDR r7, [sp + imm] */ \
2181 thumb_access_memory(load, imm, 7, 13, 0, reg_imm_sp, imm, u32); \
2185 /* ADD r0, pc, +imm */ \
2190 /* ADD r1, pc, +imm */ \
2195 /* ADD r2, pc, +imm */ \
2200 /* ADD r3, pc, +imm */ \
2205 /* ADD r4, pc, +imm */ \
2210 /* ADD r5, pc, +imm */ \
2215 /* ADD r6, pc, +imm */ \
2220 /* ADD r7, pc, +imm */ \
2225 /* ADD r0, sp, +imm */ \
2230 /* ADD r1, sp, +imm */ \
2235 /* ADD r2, sp, +imm */ \
2240 /* ADD r3, sp, +imm */ \
2245 /* ADD r4, sp, +imm */ \
2250 /* ADD r5, sp, +imm */ \
2255 /* ADD r6, sp, +imm */ \
2260 /* ADD r7, sp, +imm */ \
2264 case 0xB0 ... 0xB3: \
2265 if((opcode >> 7) & 0x01) \
2267 /* ADD sp, -imm */ \
2268 thumb_adjust_sp(down); \
2272 /* ADD sp, +imm */ \
2273 thumb_adjust_sp(up); \
2279 thumb_block_memory(store, down, no, 13); \
2283 /* PUSH rlist, lr */ \
2284 thumb_block_memory(store, push_lr, push_lr, 13); \
2289 thumb_block_memory(load, no, up, 13); \
2293 /* POP rlist, pc */ \
2294 thumb_block_memory(load, no, pop_pc, 13); \
2298 /* STMIA r0!, rlist */ \
2299 thumb_block_memory(store, no, up, 0); \
2303 /* STMIA r1!, rlist */ \
2304 thumb_block_memory(store, no, up, 1); \
2308 /* STMIA r2!, rlist */ \
2309 thumb_block_memory(store, no, up, 2); \
2313 /* STMIA r3!, rlist */ \
2314 thumb_block_memory(store, no, up, 3); \
2318 /* STMIA r4!, rlist */ \
2319 thumb_block_memory(store, no, up, 4); \
2323 /* STMIA r5!, rlist */ \
2324 thumb_block_memory(store, no, up, 5); \
2328 /* STMIA r6!, rlist */ \
2329 thumb_block_memory(store, no, up, 6); \
2333 /* STMIA r7!, rlist */ \
2334 thumb_block_memory(store, no, up, 7); \
2338 /* LDMIA r0!, rlist */ \
2339 thumb_block_memory(load, no, up, 0); \
2343 /* LDMIA r1!, rlist */ \
2344 thumb_block_memory(load, no, up, 1); \
2348 /* LDMIA r2!, rlist */ \
2349 thumb_block_memory(load, no, up, 2); \
2353 /* LDMIA r3!, rlist */ \
2354 thumb_block_memory(load, no, up, 3); \
2358 /* LDMIA r4!, rlist */ \
2359 thumb_block_memory(load, no, up, 4); \
2363 /* LDMIA r5!, rlist */ \
2364 thumb_block_memory(load, no, up, 5); \
2368 /* LDMIA r6!, rlist */ \
2369 thumb_block_memory(load, no, up, 6); \
2373 /* LDMIA r7!, rlist */ \
2374 thumb_block_memory(load, no, up, 7); \
2379 thumb_conditional_branch(eq); \
2384 thumb_conditional_branch(ne); \
2389 thumb_conditional_branch(cs); \
2394 thumb_conditional_branch(cc); \
2399 thumb_conditional_branch(mi); \
2404 thumb_conditional_branch(pl); \
2409 thumb_conditional_branch(vs); \
2414 thumb_conditional_branch(vc); \
2419 thumb_conditional_branch(hi); \
2424 thumb_conditional_branch(ls); \
2429 thumb_conditional_branch(ge); \
2434 thumb_conditional_branch(lt); \
2439 thumb_conditional_branch(gt); \
2444 thumb_conditional_branch(le); \
2454 case 0xE0 ... 0xE7: \
2461 case 0xF0 ... 0xF7: \
2463 /* (low word) BL label */ \
2464 /* This should possibly generate code if not in conjunction with a BLH \
2465 next, but I don't think anyone will do that. */ \
2469 case 0xF8 ... 0xFF: \
2471 /* (high word) BL label */ \
2472 /* This might not be preceeding a BL low word (Golden Sun 2), if so \
2473 it must be handled like an indirect branch. */ \
2474 if((last_opcode >= 0xF000) && (last_opcode < 0xF800)) \
2488 #define thumb_flag_modifies_all() \
2489 flag_status |= 0xFF \
2491 #define thumb_flag_modifies_zn() \
2492 flag_status |= 0xCC \
2494 #define thumb_flag_modifies_znc() \
2495 flag_status |= 0xEE \
2497 #define thumb_flag_modifies_zn_maybe_c() \
2498 flag_status |= 0xCE \
2500 #define thumb_flag_modifies_c() \
2501 flag_status |= 0x22 \
2503 #define thumb_flag_requires_c() \
2504 flag_status |= 0x200 \
2506 #define thumb_flag_requires_all() \
2507 flag_status |= 0xF00 \
2509 #define thumb_flag_status() \
2511 u16 flag_status = 0; \
2512 switch((opcode >> 8) & 0xFF) \
2514 /* left shift by imm */ \
2515 case 0x00 ... 0x07: \
2516 thumb_flag_modifies_zn(); \
2517 if(((opcode >> 6) & 0x1F) != 0) \
2519 thumb_flag_modifies_c(); \
2523 /* right shift by imm */ \
2524 case 0x08 ... 0x17: \
2525 thumb_flag_modifies_znc(); \
2528 /* add, subtract */ \
2529 case 0x18 ... 0x1F: \
2530 thumb_flag_modifies_all(); \
2533 /* mov reg, imm */ \
2534 case 0x20 ... 0x27: \
2535 thumb_flag_modifies_zn(); \
2538 /* cmp reg, imm; add, subtract */ \
2539 case 0x28 ... 0x3F: \
2540 thumb_flag_modifies_all(); \
2544 switch((opcode >> 6) & 0x03) \
2548 thumb_flag_modifies_zn(); \
2553 thumb_flag_modifies_zn(); \
2558 thumb_flag_modifies_zn_maybe_c(); \
2563 thumb_flag_modifies_zn_maybe_c(); \
2569 switch((opcode >> 6) & 0x03) \
2573 thumb_flag_modifies_zn_maybe_c(); \
2578 thumb_flag_modifies_all(); \
2579 thumb_flag_requires_c(); \
2584 thumb_flag_modifies_all(); \
2585 thumb_flag_requires_c(); \
2590 thumb_flag_modifies_zn_maybe_c(); \
2595 /* TST, NEG, CMP, CMN */ \
2597 thumb_flag_modifies_all(); \
2600 /* ORR, MUL, BIC, MVN */ \
2602 thumb_flag_modifies_zn(); \
2607 thumb_flag_modifies_all(); \
2610 /* mov might change PC (fall through if so) */ \
2612 if((opcode & 0xFF87) != 0x4687) \
2615 /* branches (can change PC) */ \
2618 case 0xD0 ... 0xE7: \
2619 case 0xF0 ... 0xFF: \
2620 thumb_flag_requires_all(); \
2623 block_data[block_data_position].flag_data = flag_status; \
2626 u8 *ram_block_ptrs[1024 * 64];
2627 u32 ram_block_tag_top = 0x0101;
2629 u8 *bios_block_ptrs[1024 * 8];
2630 u32 bios_block_tag_top = 0x0101;
2632 // This function will return a pointer to a translated block of code. If it
2633 // doesn't exist it will translate it, if it does it will pass it back.
2635 // type should be "arm", "thumb", or "dual." For arm or thumb the PC should
2636 // be a real PC, for dual the least significant bit will determine if it's
2637 // ARM or Thumb mode.
2639 #define block_lookup_address_pc_arm() \
2642 #define block_lookup_address_pc_thumb() \
2645 #define block_lookup_address_pc_dual() \
2646 u32 thumb = pc & 0x01; \
2651 reg[REG_CPSR] |= 0x20; \
2655 pc = (pc + 2) & ~0x03; \
2656 reg[REG_CPSR] &= ~0x20; \
2659 #define ram_translation_region TRANSLATION_REGION_RAM
2660 #define rom_translation_region TRANSLATION_REGION_ROM
2661 #define bios_translation_region TRANSLATION_REGION_BIOS
2663 #define block_lookup_translate_arm(mem_type, smc_enable) \
2664 translation_result = translate_block_arm(pc, mem_type##_translation_region, \
2667 #define block_lookup_translate_thumb(mem_type, smc_enable) \
2668 translation_result = translate_block_thumb(pc, \
2669 mem_type##_translation_region, smc_enable) \
2671 #define block_lookup_translate_dual(mem_type, smc_enable) \
2674 translation_result = translate_block_thumb(pc, \
2675 mem_type##_translation_region, smc_enable); \
2679 translation_result = translate_block_arm(pc, \
2680 mem_type##_translation_region, smc_enable); \
2683 // 0x0101 is the smallest tag that can be used. 0xFFFF is marked
2684 // in the middle of blocks and used for write guarding, it doesn't
2685 // indicate a valid block either (it's okay to compile a new block
2686 // that overlaps the earlier one, although this should be relatively
2689 #define fill_tag_arm(mem_type) \
2690 location[0] = mem_type##_block_tag_top; \
2691 location[1] = 0xFFFF \
2693 #define fill_tag_thumb(mem_type) \
2694 *location = mem_type##_block_tag_top \
2696 #define fill_tag_dual(mem_type) \
2698 fill_tag_thumb(mem_type); \
2700 fill_tag_arm(mem_type) \
2702 #define block_lookup_translate(instruction_type, mem_type, smc_enable) \
2703 block_tag = *location; \
2704 if((block_tag < 0x0101) || (block_tag == 0xFFFF)) \
2707 s32 translation_result; \
2711 translation_recursion_level++; \
2712 block_address = mem_type##_translation_ptr + block_prologue_size; \
2713 mem_type##_block_ptrs[mem_type##_block_tag_top] = block_address; \
2714 fill_tag_##instruction_type(mem_type); \
2715 mem_type##_block_tag_top++; \
2717 block_lookup_translate_##instruction_type(mem_type, smc_enable); \
2718 translation_recursion_level--; \
2720 /* If the translation failed then pass that failure on if we're in \
2721 a recursive level, or try again if we've hit the bottom. */ \
2722 if(translation_result == -1) \
2724 if(translation_recursion_level) \
2730 if(translation_recursion_level == 0) \
2731 translate_invalidate_dcache(); \
2735 block_address = mem_type##_block_ptrs[block_tag]; \
2738 u32 translation_recursion_level = 0;
2739 u32 translation_flush_count = 0;
2742 #define block_lookup_address_builder(type) \
2743 u8 function_cc *block_lookup_address_##type(u32 pc) \
2747 u8 *block_address; \
2749 /* Starting at the beginning, we allow for one translation cache flush. */ \
2750 if(translation_recursion_level == 0) \
2751 translation_flush_count = 0; \
2752 block_lookup_address_pc_##type(); \
2757 bios_region_read_allow(); \
2758 location = (u16 *)(bios_rom + pc + 0x4000); \
2759 block_lookup_translate(type, bios, 0); \
2760 if(translation_recursion_level == 0) \
2761 bios_region_read_allow(); \
2765 location = (u16 *)(ewram + (pc & 0x7FFF) + ((pc & 0x38000) * 2)); \
2766 block_lookup_translate(type, ram, 1); \
2767 if(translation_recursion_level == 0) \
2768 bios_region_read_protect(); \
2772 location = (u16 *)(iwram + (pc & 0x7FFF)); \
2773 block_lookup_translate(type, ram, 1); \
2774 if(translation_recursion_level == 0) \
2775 bios_region_read_protect(); \
2780 u32 hash_target = ((pc * 2654435761U) >> 16) & \
2781 (ROM_BRANCH_HASH_SIZE - 1); \
2782 u32 *block_ptr = rom_branch_hash[hash_target]; \
2783 u32 **block_ptr_address = rom_branch_hash + hash_target; \
2787 if(block_ptr[0] == pc) \
2789 block_address = (u8 *)(block_ptr + 2) + block_prologue_size; \
2793 block_ptr_address = (u32 **)(block_ptr + 1); \
2794 block_ptr = (u32 *)block_ptr[1]; \
2797 if(block_ptr == NULL) \
2800 s32 translation_result; \
2804 translation_recursion_level++; \
2805 ((u32 *)rom_translation_ptr)[0] = pc; \
2806 ((u32 **)rom_translation_ptr)[1] = NULL; \
2807 *block_ptr_address = (u32 *)rom_translation_ptr; \
2808 rom_translation_ptr += 8; \
2809 block_address = rom_translation_ptr + block_prologue_size; \
2810 block_lookup_translate_##type(rom, 0); \
2811 translation_recursion_level--; \
2813 /* If the translation failed then pass that failure on if we're in \
2814 a recursive level, or try again if we've hit the bottom. */ \
2815 if(translation_result == -1) \
2817 if(translation_recursion_level) \
2823 if(translation_recursion_level == 0) \
2824 translate_invalidate_dcache(); \
2826 if(translation_recursion_level == 0) \
2827 bios_region_read_protect(); \
2832 /* If we're at the bottom, it means we're actually trying to jump to an \
2833 address that we can't handle. Otherwise, it means that code scanned \
2834 has reached an address that can't be handled, which means that we \
2835 have most likely hit an area that doesn't contain code yet (for \
2836 instance, in RAM). If such a thing happens, return -1 and the \
2837 block translater will naively link it (it'll be okay, since it \
2838 should never be hit) */ \
2839 if(translation_recursion_level == 0) \
2842 sprintf(buffer, "bad jump %x (%x) (%x)\n", pc, reg[REG_PC], \
2843 last_instruction); \
2847 block_address = (u8 *)(-1); \
2851 return block_address; \
2854 block_lookup_address_builder(arm);
2855 block_lookup_address_builder(thumb);
2856 block_lookup_address_builder(dual);
2858 // Potential exit point: If the rd field is pc for instructions is 0x0F,
2859 // the instruction is b/bl/bx, or the instruction is ldm with PC in the
2861 // All instructions with upper 3 bits less than 100b have an rd field
2862 // except bx, where the bits must be 0xF there anyway, multiplies,
2863 // which cannot have 0xF in the corresponding fields, and msr, which
2864 // has 0x0F there but doesn't end things (therefore must be special
2865 // checked against). Because MSR and BX overlap both are checked for.
2867 #define arm_exit_point \
2868 (((opcode < 0x8000000) && ((opcode & 0x000F000) == 0x000F000) && \
2869 ((opcode & 0xDB0F000) != 0x120F000)) || \
2870 ((opcode & 0x12FFF10) == 0x12FFF10) || \
2871 ((opcode & 0x8108000) == 0x8108000) || \
2872 ((opcode >= 0xA000000) && (opcode < 0xF000000)) || \
2873 ((opcode > 0xF000000) && (!swi_hle_handle[((opcode >> 16) & 0xFF)]))) \
2875 #define arm_opcode_branch \
2876 ((opcode & 0xE000000) == 0xA000000) \
2878 #define arm_opcode_swi \
2879 ((opcode & 0xF000000) == 0xF000000) \
2881 #define arm_opcode_unconditional_branch \
2882 (condition == 0x0E) \
2884 #define arm_load_opcode() \
2885 opcode = address32(pc_address_block, (block_end_pc & 0x7FFF)); \
2886 condition = opcode >> 28; \
2888 opcode &= 0xFFFFFFF; \
2892 #define arm_branch_target() \
2893 branch_target = (block_end_pc + 4 + (((s32)(opcode & 0xFFFFFF) << 8) >> 6)) \
2895 // Contiguous conditional block flags modification - it will set 0x20 in the
2896 // condition's bits if this instruction modifies flags. Taken from the CPU
2897 // switch so it'd better be right this time.
2899 #define arm_set_condition(_condition) \
2900 block_data[block_data_position].condition = _condition; \
2901 switch((opcode >> 20) & 0xFF) \
2909 if((((opcode >> 5) & 0x03) == 0) || ((opcode & 0x90) != 0x90)) \
2910 block_data[block_data_position].condition |= 0x20; \
2917 case 0x15 ... 0x17: \
2922 if((opcode & 0x90) != 0x90) \
2923 block_data[block_data_position].condition |= 0x20; \
2927 if(((opcode & 0x90) != 0x90) && !(opcode & 0x10)) \
2928 block_data[block_data_position].condition |= 0x20; \
2938 case 0x2F ... 0x37: \
2943 block_data[block_data_position].condition |= 0x20; \
2947 #define arm_link_block() \
2948 translation_target = block_lookup_address_arm(branch_target) \
2950 #define arm_instruction_width 4
2952 #define arm_base_cycles() \
2953 cycle_count += waitstate_cycles_sequential[pc >> 24][2] \
2955 // For now this just sets a variable that says flags should always be
2958 #define arm_dead_flag_eliminate() \
2961 // The following Thumb instructions can exit:
2962 // b, bl, bx, swi, pop {... pc}, and mov pc, ..., the latter being a hireg
2963 // op only. Rather simpler to identify than the ARM set.
2965 #define thumb_exit_point \
2966 (((opcode >= 0xD000) && (opcode < 0xDF00)) || \
2967 (((opcode & 0xFF00) == 0xDF00) && \
2968 (!swi_hle_handle[opcode & 0xFF])) || \
2969 ((opcode >= 0xE000) && (opcode < 0xE800)) || \
2970 ((opcode & 0xFF00) == 0x4700) || \
2971 ((opcode & 0xFF00) == 0xBD00) || \
2972 ((opcode & 0xFF87) == 0x4687) || \
2973 ((opcode >= 0xF800))) \
2975 #define thumb_opcode_branch \
2976 (((opcode >= 0xD000) && (opcode < 0xDF00)) || \
2977 ((opcode >= 0xE000) && (opcode < 0xE800)) || \
2978 (opcode >= 0xF800)) \
2980 #define thumb_opcode_swi \
2981 ((opcode & 0xFF00) == 0xDF00) \
2983 #define thumb_opcode_unconditional_branch \
2984 ((opcode < 0xD000) || (opcode >= 0xDF00)) \
2986 #define thumb_load_opcode() \
2987 last_opcode = opcode; \
2988 opcode = address16(pc_address_block, (block_end_pc & 0x7FFF)); \
2992 #define thumb_branch_target() \
2993 if(opcode < 0xE000) \
2995 branch_target = block_end_pc + 2 + ((s8)(opcode & 0xFF) * 2); \
2999 if(opcode < 0xF800) \
3001 branch_target = block_end_pc + 2 + ((s32)((opcode & 0x7FF) << 21) >> 20); \
3005 if((last_opcode >= 0xF000) && (last_opcode < 0xF800)) \
3008 (block_end_pc + ((s32)((last_opcode & 0x07FF) << 21) >> 9) + \
3009 ((opcode & 0x07FF) * 2)); \
3013 goto no_direct_branch; \
3017 #define thumb_set_condition(_condition) \
3019 #define thumb_link_block() \
3020 if(branch_target != 0x00000008) \
3021 translation_target = block_lookup_address_thumb(branch_target); \
3023 translation_target = block_lookup_address_arm(branch_target) \
3025 #define thumb_instruction_width 2
3027 #define thumb_base_cycles() \
3028 cycle_count += waitstate_cycles_sequential[pc >> 24][1] \
3030 // Here's how this works: each instruction has three different sets of flag
3031 // attributes, each consisiting of a 4bit mask describing how that instruction
3032 // interacts with the 4 main flags (N/Z/C/V).
3033 // The first set, in bits 0:3, is the set of flags the instruction may
3034 // modify. After this pass this is changed to the set of flags the instruction
3035 // should modify - if the bit for the corresponding flag is not set then code
3036 // does not have to be generated to calculate the flag for that instruction.
3038 // The second set, in bits 7:4, is the set of flags that the instruction must
3039 // modify (ie, for shifts by the register values the instruction may not
3040 // always modify the C flag, and thus the C bit won't be set here).
3042 // The third set, in bits 11:8, is the set of flags that the instruction uses
3043 // in its computation, or the set of flags that will be needed after the
3044 // instruction is done. For any instructions that change the PC all of the
3045 // bits should be set because it is (for now) unknown what flags will be
3046 // needed after it arrives at its destination. Instructions that use the
3047 // carry flag as input will have it set as well.
3049 // The algorithm is a simple liveness analysis procedure: It starts at the
3050 // bottom of the instruction stream and sets a "currently needed" mask to
3051 // the flags needed mask of the current instruction. Then it moves down
3052 // an instruction, ANDs that instructions "should generate" mask by the
3053 // "currently needed" mask, then ANDs the "currently needed" mask by
3054 // the 1's complement of the instruction's "must generate" mask, and ORs
3055 // the "currently needed" mask by the instruction's "flags needed" mask.
3057 #define thumb_dead_flag_eliminate() \
3060 needed_mask = block_data[block_data_position].flag_data >> 8; \
3062 block_data_position--; \
3063 while(block_data_position >= 0) \
3065 flag_status = block_data[block_data_position].flag_data; \
3066 block_data[block_data_position].flag_data = \
3067 (flag_status & needed_mask); \
3068 needed_mask &= ~((flag_status >> 4) & 0x0F); \
3069 needed_mask |= flag_status >> 8; \
3070 block_data_position--; \
3074 #define MAX_BLOCK_SIZE 8192
3075 #define MAX_EXITS 256
3077 block_data_type block_data[MAX_BLOCK_SIZE];
3078 block_exit_type block_exits[MAX_EXITS];
3080 #define smc_write_arm_yes() \
3081 if(address32(pc_address_block, (block_end_pc & 0x7FFF) - 0x8000) == 0x0000) \
3083 address32(pc_address_block, (block_end_pc & 0x7FFF) - 0x8000) = \
3087 #define smc_write_thumb_yes() \
3088 if(address16(pc_address_block, (block_end_pc & 0x7FFF) - 0x8000) == 0x0000) \
3090 address16(pc_address_block, (block_end_pc & 0x7FFF) - 0x8000) = 0xFFFF; \
3093 #define smc_write_arm_no() \
3095 #define smc_write_thumb_no() \
3097 #define scan_block(type, smc_write_op) \
3099 __label__ block_end; \
3100 /* Find the end of the block */ \
3103 check_pc_region(block_end_pc); \
3104 smc_write_##type##_##smc_write_op(); \
3105 type##_load_opcode(); \
3106 type##_flag_status(); \
3108 if(type##_exit_point) \
3110 /* Branch/branch with link */ \
3111 if(type##_opcode_branch) \
3113 __label__ no_direct_branch; \
3114 type##_branch_target(); \
3115 block_exits[block_exit_position].branch_target = branch_target; \
3116 block_exit_position++; \
3118 /* Give the branch target macro somewhere to bail if it turns out to \
3119 be an indirect branch (ala malformed Thumb bl) */ \
3120 no_direct_branch:; \
3123 /* SWI branches to the BIOS, this will likely change when \
3124 some HLE BIOS is implemented. */ \
3125 if(type##_opcode_swi) \
3127 block_exits[block_exit_position].branch_target = 0x00000008; \
3128 block_exit_position++; \
3131 type##_set_condition(condition | 0x10); \
3133 /* Only unconditional branches can end the block. */ \
3134 if(type##_opcode_unconditional_branch) \
3136 /* Check to see if any prior block exits branch after here, \
3137 if so don't end the block. Starts from the top and works \
3138 down because the most recent branch is most likely to \
3139 join after the end (if/then form) */ \
3140 for(i = block_exit_position - 2; i >= 0; i--) \
3142 if(block_exits[i].branch_target == block_end_pc) \
3149 if(block_exit_position == MAX_EXITS) \
3154 type##_set_condition(condition); \
3157 for(i = 0; i < translation_gate_targets; i++) \
3159 if(block_end_pc == translation_gate_target_pc[i]) \
3163 block_data[block_data_position].update_cycles = 0; \
3164 block_data_position++; \
3165 if((block_data_position == MAX_BLOCK_SIZE) || \
3166 (block_end_pc == 0x3007FF0) || (block_end_pc == 0x203FFFF0)) \
3175 #define arm_fix_pc() \
3178 #define thumb_fix_pc() \
3181 #define translate_block_builder(type) \
3182 s32 translate_block_##type(u32 pc, translation_region_type \
3183 translation_region, u32 smc_enable) \
3188 u32 last_condition; \
3189 u32 pc_region = (pc >> 15); \
3190 u32 new_pc_region; \
3191 u8 *pc_address_block = memory_map_read[pc_region]; \
3192 u32 block_start_pc = pc; \
3193 u32 block_end_pc = pc; \
3194 u32 block_exit_position = 0; \
3195 s32 block_data_position = 0; \
3196 u32 external_block_exit_position = 0; \
3197 u32 branch_target; \
3198 u32 cycle_count = 0; \
3199 u8 *translation_target; \
3200 u8 *backpatch_address; \
3201 u8 *translation_ptr; \
3202 u8 *translation_cache_limit; \
3205 block_exit_type external_block_exits[MAX_EXITS]; \
3206 generate_block_extra_vars_##type(); \
3209 if(pc_address_block == NULL) \
3210 pc_address_block = load_gamepak_page(pc_region & 0x3FF); \
3212 switch(translation_region) \
3214 case TRANSLATION_REGION_RAM: \
3215 if(pc >= 0x3000000) \
3217 if((pc < iwram_code_min) || (iwram_code_min == 0xFFFFFFFF)) \
3218 iwram_code_min = pc; \
3222 if(pc >= 0x2000000) \
3224 if((pc < ewram_code_min) || (ewram_code_min == 0xFFFFFFFF)) \
3225 ewram_code_min = pc; \
3228 translation_ptr = ram_translation_ptr; \
3229 translation_cache_limit = \
3230 ram_translation_cache + RAM_TRANSLATION_CACHE_SIZE - \
3231 TRANSLATION_CACHE_LIMIT_THRESHOLD; \
3234 case TRANSLATION_REGION_ROM: \
3235 translation_ptr = rom_translation_ptr; \
3236 translation_cache_limit = \
3237 rom_translation_cache + ROM_TRANSLATION_CACHE_SIZE - \
3238 TRANSLATION_CACHE_LIMIT_THRESHOLD; \
3241 case TRANSLATION_REGION_BIOS: \
3242 translation_ptr = bios_translation_ptr; \
3243 translation_cache_limit = bios_translation_cache + \
3244 BIOS_TRANSLATION_CACHE_SIZE; \
3248 generate_block_prologue(); \
3250 /* This is a function because it's used a lot more than it might seem (all \
3251 of the data processing functions can access it), and its expansion was \
3252 massacreing the compiler. */ \
3256 scan_block(type, yes); \
3260 scan_block(type, no); \
3263 for(i = 0; i < block_exit_position; i++) \
3265 branch_target = block_exits[i].branch_target; \
3267 if((branch_target > block_start_pc) && \
3268 (branch_target < block_end_pc)) \
3270 block_data[(branch_target - block_start_pc) / \
3271 type##_instruction_width].update_cycles = 1; \
3275 type##_dead_flag_eliminate(); \
3277 block_exit_position = 0; \
3278 block_data_position = 0; \
3280 last_condition = 0x0E; \
3282 while(pc != block_end_pc) \
3284 block_data[block_data_position].block_offset = translation_ptr; \
3285 type##_base_cycles(); \
3286 /*generate_step_debug();*/ \
3288 translate_##type##_instruction(); \
3289 block_data_position++; \
3291 /* If it went too far the cache needs to be flushed and the process \
3292 restarted. Because we might already be nested several stages in \
3293 a simple recursive call here won't work, it has to pedal out to \
3296 if(translation_ptr > translation_cache_limit) \
3298 translation_flush_count++; \
3300 switch(translation_region) \
3302 case TRANSLATION_REGION_RAM: \
3303 flush_translation_cache_ram(); \
3306 case TRANSLATION_REGION_ROM: \
3307 flush_translation_cache_rom(); \
3310 case TRANSLATION_REGION_BIOS: \
3311 flush_translation_cache_bios(); \
3318 /* If the next instruction is a block entry point update the \
3319 cycle counter and update */ \
3320 if(block_data[block_data_position].update_cycles == 1) \
3322 generate_cycle_update(); \
3325 for(i = 0; i < translation_gate_targets; i++) \
3327 if(pc == translation_gate_target_pc[i]) \
3329 generate_translation_gate(type); \
3334 for(i = 0; i < block_exit_position; i++) \
3336 branch_target = block_exits[i].branch_target; \
3338 if((branch_target >= block_start_pc) && (branch_target < block_end_pc)) \
3340 /* Internal branch, patch to recorded address */ \
3341 translation_target = \
3342 block_data[(branch_target - block_start_pc) / \
3343 type##_instruction_width].block_offset; \
3345 generate_branch_patch_unconditional(block_exits[i].branch_source, \
3346 translation_target); \
3350 /* External branch, save for later */ \
3351 external_block_exits[external_block_exit_position].branch_target = \
3353 external_block_exits[external_block_exit_position].branch_source = \
3354 block_exits[i].branch_source; \
3355 external_block_exit_position++; \
3359 switch(translation_region) \
3361 case TRANSLATION_REGION_RAM: \
3362 if(pc >= 0x3000000) \
3364 if((pc > iwram_code_max) || (iwram_code_max == 0xFFFFFFFF)) \
3365 iwram_code_max = pc; \
3369 if(pc >= 0x2000000) \
3371 if((pc > ewram_code_max) || (ewram_code_max == 0xFFFFFFFF)) \
3372 ewram_code_max = pc; \
3375 ram_translation_ptr = translation_ptr; \
3378 case TRANSLATION_REGION_ROM: \
3379 rom_translation_ptr = translation_ptr; \
3382 case TRANSLATION_REGION_BIOS: \
3383 bios_translation_ptr = translation_ptr; \
3387 for(i = 0; i < external_block_exit_position; i++) \
3389 branch_target = external_block_exits[i].branch_target; \
3390 type##_link_block(); \
3391 if(translation_target == NULL) \
3393 generate_branch_patch_unconditional( \
3394 external_block_exits[i].branch_source, translation_target); \
3400 translate_block_builder(arm);
3401 translate_block_builder(thumb);
3403 void flush_translation_cache_ram()
3406 /* printf("ram flush %d (pc %x), %x to %x, %x to %x\n",
3407 flush_ram_count, reg[REG_PC], iwram_code_min, iwram_code_max,
3408 ewram_code_min, ewram_code_max); */
3411 invalidate_icache_region(ram_translation_cache,
3412 (ram_translation_ptr - ram_translation_cache) + 0x100);
3414 ram_translation_ptr = ram_translation_cache;
3415 last_ram_translation_ptr = ram_translation_cache;
3416 ram_block_tag_top = 0x0101;
3417 if(iwram_code_min != 0xFFFFFFFF)
3419 iwram_code_min &= 0x7FFF;
3420 iwram_code_max &= 0x7FFF;
3421 memset(iwram + iwram_code_min, 0, iwram_code_max - iwram_code_min);
3424 if(ewram_code_min != 0xFFFFFFFF)
3426 u32 ewram_code_min_page;
3427 u32 ewram_code_max_page;
3428 u32 ewram_code_min_offset;
3429 u32 ewram_code_max_offset;
3432 ewram_code_min &= 0x3FFFF;
3433 ewram_code_max &= 0x3FFFF;
3435 ewram_code_min_page = ewram_code_min >> 15;
3436 ewram_code_max_page = ewram_code_max >> 15;
3437 ewram_code_min_offset = ewram_code_min & 0x7FFF;
3438 ewram_code_max_offset = ewram_code_max & 0x7FFF;
3440 if(ewram_code_min_page == ewram_code_max_page)
3442 memset(ewram + (ewram_code_min_page * 0x10000) +
3443 ewram_code_min_offset, 0,
3444 ewram_code_max_offset - ewram_code_min_offset);
3448 for(i = ewram_code_min_page + 1; i < ewram_code_max_page; i++)
3450 memset(ewram + (i * 0x10000), 0, 0x8000);
3453 memset(ewram, 0, ewram_code_max_offset);
3457 iwram_code_min = 0xFFFFFFFF;
3458 iwram_code_max = 0xFFFFFFFF;
3459 ewram_code_min = 0xFFFFFFFF;
3460 ewram_code_max = 0xFFFFFFFF;
3463 void flush_translation_cache_rom()
3466 invalidate_icache_region(rom_translation_cache,
3467 rom_translation_ptr - rom_translation_cache + 0x100);
3470 rom_translation_ptr = rom_translation_cache;
3471 last_rom_translation_ptr = rom_translation_cache;
3472 memset(rom_branch_hash, 0, sizeof(rom_branch_hash));
3475 void flush_translation_cache_bios()
3478 invalidate_icache_region(bios_translation_cache,
3479 bios_translation_ptr - bios_translation_cache + 0x100);
3482 bios_block_tag_top = 0x0101;
3483 bios_translation_ptr = bios_translation_cache;
3484 last_bios_translation_ptr = bios_translation_cache;
3485 memset(bios_rom + 0x4000, 0, 0x4000);
3489 #define cache_dump_prefix "/mnt/nand/"
3491 #define cache_dump_prefix ""
3494 void dump_translation_cache()
3496 file_open(ram_cache, cache_dump_prefix "ram_cache.bin", write);
3497 file_write(ram_cache, ram_translation_cache,
3498 ram_translation_ptr - ram_translation_cache);
3499 file_close(ram_cache);
3501 file_open(rom_cache, cache_dump_prefix "rom_cache.bin", write);
3502 file_write(rom_cache, rom_translation_cache,
3503 rom_translation_ptr - rom_translation_cache);
3504 file_close(rom_cache);
3506 file_open(bios_cache, cache_dump_prefix "bios_cache.bin", write);
3507 file_write(bios_cache, bios_translation_cache,
3508 bios_translation_ptr - bios_translation_cache);
3509 file_close(bios_cache);