3 * Copyright (C) 2006 Exophase <exophase@gmail.com>
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2 of
8 * the License, or (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 // - stm reglist writeback when base is in the list needs adjustment
22 // - block memory needs psr swapping and user mode reg swapping
27 u32 memory_region_access_read_u8[16];
28 u32 memory_region_access_read_s8[16];
29 u32 memory_region_access_read_u16[16];
30 u32 memory_region_access_read_s16[16];
31 u32 memory_region_access_read_u32[16];
32 u32 memory_region_access_write_u8[16];
33 u32 memory_region_access_write_u16[16];
34 u32 memory_region_access_write_u32[16];
41 u32 memory_writes_u16;
42 u32 memory_writes_u32;
44 const u8 bit_count[256] =
46 0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4, 1, 2, 2, 3, 2, 3, 3,
47 4, 2, 3, 3, 4, 3, 4, 4, 5, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4,
48 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 1, 2, 2, 3, 2,
49 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5,
50 4, 5, 5, 6, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4,
51 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3,
52 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 2,
53 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4, 5, 4, 5, 5, 6,
54 4, 5, 5, 6, 5, 6, 6, 7, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5,
55 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 3, 4, 4, 5, 4, 5,
56 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6,
61 #ifdef REGISTER_USAGE_ANALYZE
63 u64 instructions_total = 0;
66 u64 arm_reg_access_total = 0;
67 u64 arm_instructions_total = 0;
69 u64 thumb_reg_freq[16];
70 u64 thumb_reg_access_total = 0;
71 u64 thumb_instructions_total = 0;
73 // mla/long mla's addition operand are not counted yet.
75 #define using_register(instruction_set, register, type) \
76 instruction_set##_reg_freq[register]++; \
77 instruction_set##_reg_access_total++ \
79 #define using_register_list(instruction_set, rlist, count) \
82 for(i = 0; i < count; i++) \
84 if((reg_list >> i) & 0x01) \
86 using_register(instruction_set, i, memory_target); \
91 #define using_instruction(instruction_set) \
92 instruction_set##_instructions_total++; \
93 instructions_total++ \
95 int sort_tagged_element(const void *_a, const void *_b)
100 return (int)(b[1] - a[1]);
103 void print_register_usage()
106 u64 arm_reg_freq_tagged[32];
107 u64 thumb_reg_freq_tagged[32];
109 double percent_total = 0.0;
111 for(i = 0; i < 16; i++)
113 arm_reg_freq_tagged[i * 2] = i;
114 arm_reg_freq_tagged[(i * 2) + 1] = arm_reg_freq[i];
115 thumb_reg_freq_tagged[i * 2] = i;
116 thumb_reg_freq_tagged[(i * 2) + 1] = thumb_reg_freq[i];
119 qsort(arm_reg_freq_tagged, 16, sizeof(u64) * 2, sort_tagged_element);
120 qsort(thumb_reg_freq_tagged, 16, sizeof(u64) * 2, sort_tagged_element);
122 printf("ARM register usage (%lf%% ARM instructions):\n",
123 (arm_instructions_total * 100.0) / instructions_total);
124 for(i = 0; i < 16; i++)
126 percent = (arm_reg_freq_tagged[(i * 2) + 1] * 100.0) /
127 arm_reg_access_total;
128 percent_total += percent;
129 printf("r%02d: %lf%% (-- %lf%%)\n",
130 (u32)arm_reg_freq_tagged[(i * 2)], percent, percent_total);
135 printf("\nThumb register usage (%lf%% Thumb instructions):\n",
136 (thumb_instructions_total * 100.0) / instructions_total);
137 for(i = 0; i < 16; i++)
139 percent = (thumb_reg_freq_tagged[(i * 2) + 1] * 100.0) /
140 thumb_reg_access_total;
141 percent_total += percent;
142 printf("r%02d: %lf%% (-- %lf%%)\n",
143 (u32)thumb_reg_freq_tagged[(i * 2)], percent, percent_total);
146 memset(arm_reg_freq, 0, sizeof(u64) * 16);
147 memset(thumb_reg_freq, 0, sizeof(u64) * 16);
148 arm_reg_access_total = 0;
149 thumb_reg_access_total = 0;
154 #define using_register(instruction_set, register, type) \
156 #define using_register_list(instruction_set, rlist, count) \
158 #define using_instruction(instruction_set) \
163 #define arm_decode_data_proc_reg() \
164 u32 rn = (opcode >> 16) & 0x0F; \
165 u32 rd = (opcode >> 12) & 0x0F; \
166 u32 rm = opcode & 0x0F; \
167 using_register(arm, rd, op_dest); \
168 using_register(arm, rn, op_src); \
169 using_register(arm, rm, op_src) \
171 #define arm_decode_data_proc_imm() \
172 u32 rn = (opcode >> 16) & 0x0F; \
173 u32 rd = (opcode >> 12) & 0x0F; \
175 ror(imm, opcode & 0xFF, ((opcode >> 8) & 0x0F) * 2); \
176 using_register(arm, rd, op_dest); \
177 using_register(arm, rn, op_src) \
179 #define arm_decode_psr_reg() \
180 u32 psr_field = (opcode >> 16) & 0x0F; \
181 u32 rd = (opcode >> 12) & 0x0F; \
182 u32 rm = opcode & 0x0F; \
183 using_register(arm, rd, op_dest); \
184 using_register(arm, rm, op_src) \
186 #define arm_decode_psr_imm() \
187 u32 psr_field = (opcode >> 16) & 0x0F; \
188 u32 rd = (opcode >> 12) & 0x0F; \
190 ror(imm, opcode & 0xFF, ((opcode >> 8) & 0x0F) * 2); \
191 using_register(arm, rd, op_dest) \
193 #define arm_decode_branchx() \
194 u32 rn = opcode & 0x0F; \
195 using_register(arm, rn, branch_target) \
197 #define arm_decode_multiply() \
198 u32 rd = (opcode >> 16) & 0x0F; \
199 u32 rn = (opcode >> 12) & 0x0F; \
200 u32 rs = (opcode >> 8) & 0x0F; \
201 u32 rm = opcode & 0x0F; \
202 using_register(arm, rd, op_dest); \
203 using_register(arm, rn, op_src); \
204 using_register(arm, rm, op_src) \
206 #define arm_decode_multiply_long() \
207 u32 rdhi = (opcode >> 16) & 0x0F; \
208 u32 rdlo = (opcode >> 12) & 0x0F; \
209 u32 rn = (opcode >> 8) & 0x0F; \
210 u32 rm = opcode & 0x0F; \
211 using_register(arm, rdhi, op_dest); \
212 using_register(arm, rdlo, op_dest); \
213 using_register(arm, rn, op_src); \
214 using_register(arm, rm, op_src) \
216 #define arm_decode_swap() \
217 u32 rn = (opcode >> 16) & 0x0F; \
218 u32 rd = (opcode >> 12) & 0x0F; \
219 u32 rm = opcode & 0x0F; \
220 using_register(arm, rd, memory_target); \
221 using_register(arm, rn, memory_base); \
222 using_register(arm, rm, memory_target) \
224 #define arm_decode_half_trans_r() \
225 u32 rn = (opcode >> 16) & 0x0F; \
226 u32 rd = (opcode >> 12) & 0x0F; \
227 u32 rm = opcode & 0x0F; \
228 using_register(arm, rd, memory_target); \
229 using_register(arm, rn, memory_base); \
230 using_register(arm, rm, memory_offset) \
232 #define arm_decode_half_trans_of() \
233 u32 rn = (opcode >> 16) & 0x0F; \
234 u32 rd = (opcode >> 12) & 0x0F; \
235 u32 offset = ((opcode >> 4) & 0xF0) | (opcode & 0x0F); \
236 using_register(arm, rd, memory_target); \
237 using_register(arm, rn, memory_base) \
239 #define arm_decode_data_trans_imm() \
240 u32 rn = (opcode >> 16) & 0x0F; \
241 u32 rd = (opcode >> 12) & 0x0F; \
242 u32 offset = opcode & 0x0FFF; \
243 using_register(arm, rd, memory_target); \
244 using_register(arm, rn, memory_base) \
246 #define arm_decode_data_trans_reg() \
247 u32 rn = (opcode >> 16) & 0x0F; \
248 u32 rd = (opcode >> 12) & 0x0F; \
249 u32 rm = opcode & 0x0F; \
250 using_register(arm, rd, memory_target); \
251 using_register(arm, rn, memory_base); \
252 using_register(arm, rm, memory_offset) \
254 #define arm_decode_block_trans() \
255 u32 rn = (opcode >> 16) & 0x0F; \
256 u32 reg_list = opcode & 0xFFFF; \
257 using_register(arm, rn, memory_base); \
258 using_register_list(arm, reg_list, 16) \
260 #define arm_decode_branch() \
261 s32 offset = ((s32)(opcode & 0xFFFFFF) << 8) >> 6 \
264 #define thumb_decode_shift() \
265 u32 imm = (opcode >> 6) & 0x1F; \
266 u32 rs = (opcode >> 3) & 0x07; \
267 u32 rd = opcode & 0x07; \
268 using_register(thumb, rd, op_dest); \
269 using_register(thumb, rs, op_shift) \
271 #define thumb_decode_add_sub() \
272 u32 rn = (opcode >> 6) & 0x07; \
273 u32 rs = (opcode >> 3) & 0x07; \
274 u32 rd = opcode & 0x07; \
275 using_register(thumb, rd, op_dest); \
276 using_register(thumb, rn, op_src); \
277 using_register(thumb, rn, op_src) \
279 #define thumb_decode_add_sub_imm() \
280 u32 imm = (opcode >> 6) & 0x07; \
281 u32 rs = (opcode >> 3) & 0x07; \
282 u32 rd = opcode & 0x07; \
283 using_register(thumb, rd, op_src_dest); \
284 using_register(thumb, rs, op_src) \
286 #define thumb_decode_imm() \
287 u32 imm = opcode & 0xFF; \
288 using_register(thumb, ((opcode >> 8) & 0x07), op_dest) \
290 #define thumb_decode_alu_op() \
291 u32 rs = (opcode >> 3) & 0x07; \
292 u32 rd = opcode & 0x07; \
293 using_register(thumb, rd, op_src_dest); \
294 using_register(thumb, rs, op_src) \
296 #define thumb_decode_hireg_op() \
297 u32 rs = (opcode >> 3) & 0x0F; \
298 u32 rd = ((opcode >> 4) & 0x08) | (opcode & 0x07); \
299 using_register(thumb, rd, op_src_dest); \
300 using_register(thumb, rs, op_src) \
303 #define thumb_decode_mem_reg() \
304 u32 ro = (opcode >> 6) & 0x07; \
305 u32 rb = (opcode >> 3) & 0x07; \
306 u32 rd = opcode & 0x07; \
307 using_register(thumb, rd, memory_target); \
308 using_register(thumb, rb, memory_base); \
309 using_register(thumb, ro, memory_offset) \
312 #define thumb_decode_mem_imm() \
313 u32 imm = (opcode >> 6) & 0x1F; \
314 u32 rb = (opcode >> 3) & 0x07; \
315 u32 rd = opcode & 0x07; \
316 using_register(thumb, rd, memory_target); \
317 using_register(thumb, rb, memory_base) \
320 #define thumb_decode_add_sp() \
321 u32 imm = opcode & 0x7F; \
322 using_register(thumb, REG_SP, op_dest) \
324 #define thumb_decode_rlist() \
325 u32 reg_list = opcode & 0xFF; \
326 using_register_list(thumb, rlist, 8) \
328 #define thumb_decode_branch_cond() \
329 s32 offset = (s8)(opcode & 0xFF) \
331 #define thumb_decode_swi() \
332 u32 comment = opcode & 0xFF \
334 #define thumb_decode_branch() \
335 u32 offset = opcode & 0x07FF \
338 #define get_shift_register(dest) \
339 u32 shift = reg[(opcode >> 8) & 0x0F]; \
340 using_register(arm, ((opcode >> 8) & 0x0F), op_shift); \
346 #define calculate_z_flag(dest) \
347 z_flag = (dest == 0) \
349 #define calculate_n_flag(dest) \
350 n_flag = ((signed)dest < 0) \
352 #define calculate_c_flag_sub(dest, src_a, src_b) \
353 c_flag = ((unsigned)src_b <= (unsigned)src_a) \
355 #define calculate_v_flag_sub(dest, src_a, src_b) \
356 v_flag = ((signed)src_b > (signed)src_a) != ((signed)dest < 0) \
358 #define calculate_c_flag_add(dest, src_a, src_b) \
359 c_flag = ((unsigned)dest < (unsigned)src_a) \
361 #define calculate_v_flag_add(dest, src_a, src_b) \
362 v_flag = ((signed)dest < (signed)src_a) != ((signed)src_b < 0) \
365 #define calculate_reg_sh() \
367 switch((opcode >> 4) & 0x07) \
372 reg_sh = reg[rm] << ((opcode >> 7) & 0x1F); \
379 get_shift_register(reg_sh); \
381 reg_sh = reg_sh << shift; \
390 u32 imm = (opcode >> 7) & 0x1F; \
394 reg_sh = reg[rm] >> imm; \
401 get_shift_register(reg_sh); \
403 reg_sh = reg_sh >> shift; \
412 u32 imm = (opcode >> 7) & 0x1F; \
416 reg_sh = (s32)reg_sh >> 31; \
418 reg_sh = (s32)reg_sh >> imm; \
425 get_shift_register(reg_sh); \
427 reg_sh = (s32)reg_sh >> shift; \
429 reg_sh = (s32)reg_sh >> 31; \
436 u32 imm = (opcode >> 7) & 0x1F; \
439 reg_sh = (reg[rm] >> 1) | (c_flag << 31); \
441 ror(reg_sh, reg[rm], imm); \
448 get_shift_register(reg_sh); \
449 ror(reg_sh, reg_sh, shift); \
454 #define calculate_reg_sh_flags() \
456 switch((opcode >> 4) & 0x07) \
461 u32 imm = (opcode >> 7) & 0x1F; \
466 c_flag = (reg_sh >> (32 - imm)) & 0x01; \
476 get_shift_register(reg_sh); \
482 c_flag = reg_sh & 0x01; \
489 c_flag = (reg_sh >> (32 - shift)) & 0x01; \
499 u32 imm = (opcode >> 7) & 0x1F; \
503 c_flag = reg_sh >> 31; \
508 c_flag = (reg_sh >> (imm - 1)) & 0x01; \
517 get_shift_register(reg_sh); \
523 c_flag = (reg_sh >> 31) & 0x01; \
530 c_flag = (reg_sh >> (shift - 1)) & 0x01; \
540 u32 imm = (opcode >> 7) & 0x1F; \
544 reg_sh = (s32)reg_sh >> 31; \
545 c_flag = reg_sh & 0x01; \
549 c_flag = (reg_sh >> (imm - 1)) & 0x01; \
550 reg_sh = (s32)reg_sh >> imm; \
558 get_shift_register(reg_sh); \
563 reg_sh = (s32)reg_sh >> 31; \
564 c_flag = reg_sh & 0x01; \
568 c_flag = (reg_sh >> (shift - 1)) & 0x01; \
569 reg_sh = (s32)reg_sh >> shift; \
578 u32 imm = (opcode >> 7) & 0x1F; \
582 u32 old_c_flag = c_flag; \
583 c_flag = reg_sh & 0x01; \
584 reg_sh = (reg_sh >> 1) | (old_c_flag << 31); \
588 c_flag = (reg_sh >> (imm - 1)) & 0x01; \
589 ror(reg_sh, reg_sh, imm); \
597 get_shift_register(reg_sh); \
600 c_flag = (reg_sh >> (shift - 1)) & 0x01; \
601 ror(reg_sh, reg_sh, shift); \
607 #define calculate_reg_offset() \
609 switch((opcode >> 5) & 0x03) \
614 reg_offset = reg[rm] << ((opcode >> 7) & 0x1F); \
621 u32 imm = (opcode >> 7) & 0x1F; \
625 reg_offset = reg[rm] >> imm; \
632 u32 imm = (opcode >> 7) & 0x1F; \
634 reg_offset = (s32)reg[rm] >> 31; \
636 reg_offset = (s32)reg[rm] >> imm; \
643 u32 imm = (opcode >> 7) & 0x1F; \
645 reg_offset = (reg[rm] >> 1) | (c_flag << 31); \
647 ror(reg_offset, reg[rm], imm); \
652 #define calculate_flags_add(dest, src_a, src_b) \
653 calculate_z_flag(dest); \
654 calculate_n_flag(dest); \
655 calculate_c_flag_add(dest, src_a, src_b); \
656 calculate_v_flag_add(dest, src_a, src_b) \
658 #define calculate_flags_sub(dest, src_a, src_b) \
659 calculate_z_flag(dest); \
660 calculate_n_flag(dest); \
661 calculate_c_flag_sub(dest, src_a, src_b); \
662 calculate_v_flag_sub(dest, src_a, src_b) \
664 #define calculate_flags_logic(dest) \
665 calculate_z_flag(dest); \
666 calculate_n_flag(dest) \
668 #define extract_flags() \
669 n_flag = reg[REG_CPSR] >> 31; \
670 z_flag = (reg[REG_CPSR] >> 30) & 0x01; \
671 c_flag = (reg[REG_CPSR] >> 29) & 0x01; \
672 v_flag = (reg[REG_CPSR] >> 28) & 0x01; \
674 #define collapse_flags() \
675 reg[REG_CPSR] = (n_flag << 31) | (z_flag << 30) | (c_flag << 29) | \
676 (v_flag << 28) | (reg[REG_CPSR] & 0xFF) \
678 #define memory_region(r_dest, l_dest, address) \
679 r_dest = memory_regions[address >> 24]; \
680 l_dest = memory_limits[address >> 24] \
683 #define pc_region() \
684 memory_region(pc_region, pc_limit, pc) \
686 #define check_pc_region() \
687 new_pc_region = (pc >> 15); \
688 if(new_pc_region != pc_region) \
690 pc_region = new_pc_region; \
691 pc_address_block = memory_map_read[new_pc_region]; \
693 if(pc_address_block == NULL) \
694 pc_address_block = load_gamepak_page(pc_region & 0x3FF); \
697 u32 branch_targets = 0;
698 u32 high_frequency_branch_targets = 0;
700 #define BRANCH_ACTIVITY_THRESHOLD 50
702 #define arm_update_pc() \
705 #define arm_pc_offset(val) \
709 #define arm_pc_offset_update(val) \
713 #define arm_pc_offset_update_direct(val) \
718 // It should be okay to still generate result flags, spsr will overwrite them.
719 // This is pretty infrequent (returning from interrupt handlers, et al) so
720 // probably not worth optimizing for.
722 #define check_for_interrupts() \
723 if((io_registers[REG_IE] & io_registers[REG_IF]) && \
724 io_registers[REG_IME] && ((reg[REG_CPSR] & 0x80) == 0)) \
726 reg_mode[MODE_IRQ][6] = reg[REG_PC] + 4; \
727 spsr[MODE_IRQ] = reg[REG_CPSR]; \
728 reg[REG_CPSR] = 0xD2; \
729 reg[REG_PC] = 0x00000018; \
731 set_cpu_mode(MODE_IRQ); \
735 #define arm_spsr_restore() \
738 if(reg[CPU_MODE] != MODE_USER) \
740 reg[REG_CPSR] = spsr[reg[CPU_MODE]]; \
742 set_cpu_mode(cpu_modes[reg[REG_CPSR] & 0x1F]); \
743 check_for_interrupts(); \
747 if(reg[REG_CPSR] & 0x20) \
751 #define arm_data_proc_flags_reg() \
752 arm_decode_data_proc_reg(); \
753 calculate_reg_sh_flags() \
755 #define arm_data_proc_reg() \
756 arm_decode_data_proc_reg(); \
759 #define arm_data_proc_flags_imm() \
760 arm_decode_data_proc_imm() \
762 #define arm_data_proc_imm() \
763 arm_decode_data_proc_imm() \
765 #define arm_data_proc(expr, type) \
769 arm_data_proc_##type(); \
780 #define flags_vars(src_a, src_b) \
782 const u32 _sa = src_a; \
783 const u32 _sb = src_b \
785 #define arm_data_proc_logic_flags(expr, type) \
788 arm_data_proc_flags_##type(); \
790 calculate_flags_logic(dest); \
793 arm_spsr_restore(); \
796 #define arm_data_proc_add_flags(src_a, src_b, type) \
799 arm_data_proc_##type(); \
800 flags_vars(src_a, src_b); \
802 calculate_flags_add(dest, _sa, _sb); \
805 arm_spsr_restore(); \
808 #define arm_data_proc_sub_flags(src_a, src_b, type) \
811 arm_data_proc_##type(); \
812 flags_vars(src_a, src_b); \
814 calculate_flags_sub(dest, _sa, _sb); \
817 arm_spsr_restore(); \
820 #define arm_data_proc_test_logic(expr, type) \
823 arm_data_proc_flags_##type(); \
825 calculate_flags_logic(dest); \
829 #define arm_data_proc_test_add(src_a, src_b, type) \
832 arm_data_proc_##type(); \
833 flags_vars(src_a, src_b); \
835 calculate_flags_add(dest, _sa, _sb); \
839 #define arm_data_proc_test_sub(src_a, src_b, type) \
842 arm_data_proc_##type(); \
843 flags_vars(src_a, src_b); \
845 calculate_flags_sub(dest, _sa, _sb); \
849 #define arm_multiply_flags_yes(_dest) \
850 calculate_z_flag(_dest); \
851 calculate_n_flag(_dest); \
853 #define arm_multiply_flags_no(_dest) \
855 #define arm_multiply_long_flags_yes(_dest_lo, _dest_hi) \
856 z_flag = (_dest_lo == 0) & (_dest_hi == 0); \
857 calculate_n_flag(_dest_hi) \
859 #define arm_multiply_long_flags_no(_dest_lo, _dest_hi) \
861 #define arm_multiply(add_op, flags) \
864 arm_decode_multiply(); \
865 dest = (reg[rm] * reg[rs]) add_op; \
866 arm_multiply_flags_##flags(dest); \
871 #define arm_multiply_long_addop(type) \
872 + ((type##64)((((type##64)reg[rdhi]) << 32) | reg[rdlo])); \
874 #define arm_multiply_long(add_op, flags, type) \
879 arm_decode_multiply_long(); \
880 dest = ((type##64)((type##32)reg[rm]) * \
881 (type##64)((type##32)reg[rn])) add_op; \
882 dest_lo = (u32)dest; \
883 dest_hi = (u32)(dest >> 32); \
884 arm_multiply_long_flags_##flags(dest_lo, dest_hi); \
885 reg[rdlo] = dest_lo; \
886 reg[rdhi] = dest_hi; \
890 const u32 psr_masks[16] =
892 0x00000000, 0x000000FF, 0x0000FF00, 0x0000FFFF, 0x00FF0000,
893 0x00FF00FF, 0x00FFFF00, 0x00FFFFFF, 0xFF000000, 0xFF0000FF,
894 0xFF00FF00, 0xFF00FFFF, 0xFFFF0000, 0xFFFF00FF, 0xFFFFFF00,
898 #define arm_psr_read(dummy, psr_reg) \
902 #define arm_psr_store_cpsr(source) \
903 reg[REG_CPSR] = (source & store_mask) | (reg[REG_CPSR] & (~store_mask)); \
905 if(store_mask & 0xFF) \
907 set_cpu_mode(cpu_modes[reg[REG_CPSR] & 0x1F]); \
908 check_for_interrupts(); \
911 #define arm_psr_store_spsr(source) \
912 u32 _psr = spsr[reg[CPU_MODE]]; \
913 spsr[reg[CPU_MODE]] = (source & store_mask) | (_psr & (~store_mask)) \
915 #define arm_psr_store(source, psr_reg) \
916 const u32 store_mask = psr_masks[psr_field]; \
917 arm_psr_store_##psr_reg(source) \
919 #define arm_psr_src_reg reg[rm]
921 #define arm_psr_src_imm imm
923 #define arm_psr(op_type, transfer_type, psr_reg) \
925 arm_decode_psr_##op_type(); \
927 arm_psr_##transfer_type(arm_psr_src_##op_type, psr_reg); \
930 #define arm_data_trans_reg() \
931 arm_decode_data_trans_reg(); \
932 calculate_reg_offset() \
934 #define arm_data_trans_imm() \
935 arm_decode_data_trans_imm() \
937 #define arm_data_trans_half_reg() \
938 arm_decode_half_trans_r() \
940 #define arm_data_trans_half_imm() \
941 arm_decode_half_trans_of() \
943 #define aligned_address_mask8 0xF0000000
944 #define aligned_address_mask16 0xF0000001
945 #define aligned_address_mask32 0xF0000003
947 #define fast_read_memory(size, type, address, dest) \
950 u32 _address = address; \
952 if(_address < 0x10000000) \
954 memory_region_access_read_##type[_address >> 24]++; \
955 memory_reads_##type++; \
957 if(((_address >> 24) == 0) && (pc >= 0x4000)) \
959 dest = *((type *)((u8 *)&bios_read_protect + (_address & 0x03))); \
963 if(((_address & aligned_address_mask##size) == 0) && \
964 (map = memory_map_read[address >> 15])) \
966 dest = *((type *)((u8 *)map + (_address & 0x7FFF))); \
970 dest = (type)read_memory##size(_address); \
974 #define fast_read_memory_s16(address, dest) \
977 u32 _address = address; \
978 if(_address < 0x10000000) \
980 memory_region_access_read_s16[_address >> 24]++; \
981 memory_reads_s16++; \
983 if(((_address & aligned_address_mask16) == 0) && \
984 (map = memory_map_read[_address >> 15])) \
986 dest = *((s16 *)((u8 *)map + (_address & 0x7FFF))); \
990 dest = (s16)read_memory16_signed(_address); \
995 #define fast_write_memory(size, type, address, value) \
998 u32 _address = (address) & ~(aligned_address_mask##size & 0x03); \
999 if(_address < 0x10000000) \
1001 memory_region_access_write_##type[_address >> 24]++; \
1002 memory_writes_##type++; \
1005 if(((_address & aligned_address_mask##size) == 0) && \
1006 (map = memory_map_write[_address >> 15])) \
1008 *((type *)((u8 *)map + (_address & 0x7FFF))) = value; \
1012 cpu_alert = write_memory##size(_address, value); \
1018 #define load_aligned32(address, dest) \
1020 u8 *map = memory_map_read[address >> 15]; \
1021 if(address < 0x10000000) \
1023 memory_region_access_read_u32[address >> 24]++; \
1024 memory_reads_u32++; \
1028 dest = address32(map, address & 0x7FFF); \
1032 dest = read_memory32(address); \
1036 #define store_aligned32(address, value) \
1038 u8 *map = memory_map_write[address >> 15]; \
1039 if(address < 0x10000000) \
1041 memory_region_access_write_u32[address >> 24]++; \
1042 memory_writes_u32++; \
1046 address32(map, address & 0x7FFF) = value; \
1050 cpu_alert = write_memory32(address, value); \
1056 #define load_memory_u8(address, dest) \
1057 fast_read_memory(8, u8, address, dest) \
1059 #define load_memory_u16(address, dest) \
1060 fast_read_memory(16, u16, address, dest) \
1062 #define load_memory_u32(address, dest) \
1063 fast_read_memory(32, u32, address, dest) \
1065 #define load_memory_s8(address, dest) \
1066 fast_read_memory(8, s8, address, dest) \
1068 #define load_memory_s16(address, dest) \
1069 fast_read_memory_s16(address, dest) \
1071 #define store_memory_u8(address, value) \
1072 fast_write_memory(8, u8, address, value) \
1074 #define store_memory_u16(address, value) \
1075 fast_write_memory(16, u16, address, value) \
1077 #define store_memory_u32(address, value) \
1078 fast_write_memory(32, u32, address, value) \
1082 #define arm_access_memory_writeback_yes(off_op) \
1083 reg[rn] = address off_op \
1085 #define arm_access_memory_writeback_no(off_op) \
1087 #define arm_access_memory_pc_preadjust_load() \
1089 #define arm_access_memory_pc_preadjust_store() \
1090 u32 reg_op = reg[rd]; \
1094 #define arm_access_memory_pc_postadjust_load() \
1097 #define arm_access_memory_pc_postadjust_store() \
1099 #define load_reg_op reg[rd] \
1101 #define store_reg_op reg_op \
1103 #define arm_access_memory(access_type, off_op, off_type, mem_type, \
1107 arm_data_trans_##off_type(); \
1108 u32 address = reg[rn] off_op; \
1109 arm_access_memory_pc_preadjust_##access_type(); \
1111 arm_pc_offset(-4); \
1112 arm_access_memory_writeback_##wb(wb_off_op); \
1113 access_type##_memory_##mem_type(address, access_type##_reg_op); \
1114 arm_access_memory_pc_postadjust_##access_type(); \
1117 #define word_bit_count(word) \
1118 (bit_count[word >> 8] + bit_count[word & 0xFF]) \
1120 #define sprint_no(access_type, offset_type, writeback_type) \
1122 #define sprint_yes(access_type, offset_type, writeback_type) \
1123 printf("sbit on %s %s %s\n", #access_type, #offset_type, #writeback_type) \
1125 #define arm_block_writeback_load() \
1126 if(!((reg_list >> rn) & 0x01)) \
1128 reg[rn] = address; \
1131 #define arm_block_writeback_store() \
1134 #define arm_block_writeback_yes(access_type) \
1135 arm_block_writeback_##access_type() \
1137 #define arm_block_writeback_no(access_type) \
1139 #define load_block_memory(address, dest) \
1140 dest = address32(address_region, (address + offset) & 0x7FFF) \
1142 #define store_block_memory(address, dest) \
1143 address32(address_region, (address + offset) & 0x7FFF) = dest \
1145 #define arm_block_memory_offset_down_a() \
1146 (base - (word_bit_count(reg_list) * 4) + 4) \
1148 #define arm_block_memory_offset_down_b() \
1149 (base - (word_bit_count(reg_list) * 4)) \
1151 #define arm_block_memory_offset_no() \
1154 #define arm_block_memory_offset_up() \
1157 #define arm_block_memory_writeback_down() \
1158 reg[rn] = base - (word_bit_count(reg_list) * 4) \
1160 #define arm_block_memory_writeback_up() \
1161 reg[rn] = base + (word_bit_count(reg_list) * 4) \
1163 #define arm_block_memory_writeback_no() \
1165 #define arm_block_memory_load_pc() \
1166 load_aligned32(address, pc); \
1169 #define arm_block_memory_store_pc() \
1170 store_aligned32(address, pc + 4) \
1172 #define arm_block_memory(access_type, offset_type, writeback_type, s_bit) \
1174 arm_decode_block_trans(); \
1175 u32 base = reg[rn]; \
1176 u32 address = arm_block_memory_offset_##offset_type() & 0xFFFFFFFC; \
1179 arm_block_memory_writeback_##writeback_type(); \
1181 for(i = 0; i < 15; i++) \
1183 if((reg_list >> i) & 0x01) \
1185 access_type##_aligned32(address, reg[i]); \
1191 if(reg_list & 0x8000) \
1193 arm_block_memory_##access_type##_pc(); \
1197 #define arm_swap(type) \
1199 arm_decode_swap(); \
1201 load_memory_##type(reg[rn], temp); \
1202 store_memory_##type(reg[rn], reg[rm]); \
1207 #define arm_next_instruction() \
1210 goto skip_instruction; \
1213 #define thumb_update_pc() \
1216 #define thumb_pc_offset(val) \
1220 #define thumb_pc_offset_update(val) \
1224 #define thumb_pc_offset_update_direct(val) \
1228 // Types: add_sub, add_sub_imm, alu_op, imm
1229 // Affects N/Z/C/V flags
1231 #define thumb_add(type, dest_reg, src_a, src_b) \
1233 thumb_decode_##type(); \
1234 const u32 _sa = src_a; \
1235 const u32 _sb = src_b; \
1236 u32 dest = _sa + _sb; \
1237 calculate_flags_add(dest, src_a, src_b); \
1238 reg[dest_reg] = dest; \
1239 thumb_pc_offset(2); \
1242 #define thumb_add_noflags(type, dest_reg, src_a, src_b) \
1244 thumb_decode_##type(); \
1245 u32 dest = src_a + src_b; \
1246 reg[dest_reg] = dest; \
1247 thumb_pc_offset(2); \
1250 #define thumb_sub(type, dest_reg, src_a, src_b) \
1252 thumb_decode_##type(); \
1253 const u32 _sa = src_a; \
1254 const u32 _sb = src_b; \
1255 u32 dest = _sa - _sb; \
1256 calculate_flags_sub(dest, src_a, src_b); \
1257 reg[dest_reg] = dest; \
1258 thumb_pc_offset(2); \
1261 // Affects N/Z flags
1263 #define thumb_logic(type, dest_reg, expr) \
1265 thumb_decode_##type(); \
1267 calculate_flags_logic(dest); \
1268 reg[dest_reg] = dest; \
1269 thumb_pc_offset(2); \
1272 // Decode types: shift, alu_op
1273 // Operation types: lsl, lsr, asr, ror
1274 // Affects N/Z/C flags
1276 #define thumb_shift_lsl_reg() \
1277 u32 shift = reg[rs]; \
1278 u32 dest = reg[rd]; \
1284 c_flag = dest & 0x01; \
1291 c_flag = (dest >> (32 - shift)) & 0x01; \
1296 #define thumb_shift_lsr_reg() \
1297 u32 shift = reg[rs]; \
1298 u32 dest = reg[rd]; \
1304 c_flag = dest >> 31; \
1311 c_flag = (dest >> (shift - 1)) & 0x01; \
1316 #define thumb_shift_asr_reg() \
1317 u32 shift = reg[rs]; \
1318 u32 dest = reg[rd]; \
1323 dest = (s32)dest >> 31; \
1324 c_flag = dest & 0x01; \
1328 c_flag = (dest >> (shift - 1)) & 0x01; \
1329 dest = (s32)dest >> shift; \
1333 #define thumb_shift_ror_reg() \
1334 u32 shift = reg[rs]; \
1335 u32 dest = reg[rd]; \
1338 c_flag = (dest >> (shift - 1)) & 0x01; \
1339 ror(dest, dest, shift); \
1342 #define thumb_shift_lsl_imm() \
1343 u32 dest = reg[rs]; \
1346 c_flag = (dest >> (32 - imm)) & 0x01; \
1350 #define thumb_shift_lsr_imm() \
1355 c_flag = reg[rs] >> 31; \
1360 c_flag = (dest >> (imm - 1)) & 0x01; \
1364 #define thumb_shift_asr_imm() \
1368 dest = (s32)reg[rs] >> 31; \
1369 c_flag = dest & 0x01; \
1374 c_flag = (dest >> (imm - 1)) & 0x01; \
1375 dest = (s32)dest >> imm; \
1378 #define thumb_shift_ror_imm() \
1379 u32 dest = reg[rs]; \
1382 u32 old_c_flag = c_flag; \
1383 c_flag = dest & 0x01; \
1384 dest = (dest >> 1) | (old_c_flag << 31); \
1388 c_flag = (dest >> (imm - 1)) & 0x01; \
1389 ror(dest, dest, imm); \
1392 #define thumb_shift(decode_type, op_type, value_type) \
1394 thumb_decode_##decode_type(); \
1395 thumb_shift_##op_type##_##value_type(); \
1396 calculate_flags_logic(dest); \
1398 thumb_pc_offset(2); \
1401 #define thumb_test_add(type, src_a, src_b) \
1403 thumb_decode_##type(); \
1404 const u32 _sa = src_a; \
1405 const u32 _sb = src_b; \
1406 u32 dest = _sa + _sb; \
1407 calculate_flags_add(dest, src_a, src_b); \
1408 thumb_pc_offset(2); \
1411 #define thumb_test_sub(type, src_a, src_b) \
1413 thumb_decode_##type(); \
1414 const u32 _sa = src_a; \
1415 const u32 _sb = src_b; \
1416 u32 dest = _sa - _sb; \
1417 calculate_flags_sub(dest, src_a, src_b); \
1418 thumb_pc_offset(2); \
1421 #define thumb_test_logic(type, expr) \
1423 thumb_decode_##type(); \
1425 calculate_flags_logic(dest); \
1426 thumb_pc_offset(2); \
1429 #define thumb_hireg_op(expr) \
1431 thumb_pc_offset(4); \
1432 thumb_decode_hireg_op(); \
1434 thumb_pc_offset(-2); \
1437 reg[REG_PC] = dest & ~0x01; \
1438 thumb_update_pc(); \
1446 // Operation types: imm, mem_reg, mem_imm
1448 #define thumb_access_memory(access_type, op_type, address, reg_op, \
1451 thumb_decode_##op_type(); \
1452 access_type##_memory_##mem_type(address, reg_op); \
1453 thumb_pc_offset(2); \
1456 #define thumb_block_address_preadjust_no_op() \
1458 #define thumb_block_address_preadjust_up() \
1459 address += bit_count[reg_list] * 4 \
1461 #define thumb_block_address_preadjust_down() \
1462 address -= bit_count[reg_list] * 4 \
1464 #define thumb_block_address_preadjust_push_lr() \
1465 address -= (bit_count[reg_list] + 1) * 4 \
1467 #define thumb_block_address_postadjust_no_op() \
1469 #define thumb_block_address_postadjust_up() \
1472 #define thumb_block_address_postadjust_down() \
1475 #define thumb_block_address_postadjust_pop_pc() \
1476 load_memory_u32(address + offset, pc); \
1479 address += offset + 4 \
1481 #define thumb_block_address_postadjust_push_lr() \
1482 store_memory_u32(address + offset, reg[REG_LR]); \
1484 #define thumb_block_memory_wb_load(base_reg) \
1485 if(!((reg_list >> base_reg) & 0x01)) \
1487 reg[base_reg] = address; \
1490 #define thumb_block_memory_wb_store(base_reg) \
1491 reg[base_reg] = address \
1493 #define thumb_block_memory(access_type, pre_op, post_op, base_reg) \
1497 thumb_decode_rlist(); \
1498 using_register(thumb, base_reg, memory_base); \
1499 u32 address = reg[base_reg] & ~0x03; \
1500 thumb_block_address_preadjust_##pre_op(); \
1502 for(i = 0; i < 8; i++) \
1504 if((reg_list >> i) & 1) \
1506 access_type##_aligned32(address + offset, reg[i]); \
1511 thumb_pc_offset(2); \
1513 thumb_block_address_postadjust_##post_op(); \
1514 thumb_block_memory_wb_##access_type(base_reg); \
1517 #define thumb_conditional_branch(condition) \
1519 thumb_decode_branch_cond(); \
1522 thumb_pc_offset((offset * 2) + 4); \
1526 thumb_pc_offset(2); \
1530 // When a mode change occurs from non-FIQ to non-FIQ retire the current
1531 // reg[13] and reg[14] into reg_mode[cpu_mode][5] and reg_mode[cpu_mode][6]
1532 // respectively and load into reg[13] and reg[14] reg_mode[new_mode][5] and
1533 // reg_mode[new_mode][6]. When swapping to/from FIQ retire/load reg[8]
1534 // through reg[14] to/from reg_mode[MODE_FIQ][0] through reg_mode[MODE_FIQ][6].
1540 MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID,
1541 MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID,
1542 MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID,
1543 MODE_INVALID, MODE_USER, MODE_FIQ, MODE_IRQ, MODE_SUPERVISOR, MODE_INVALID,
1544 MODE_INVALID, MODE_INVALID, MODE_ABORT, MODE_INVALID, MODE_INVALID,
1545 MODE_INVALID, MODE_INVALID, MODE_UNDEFINED, MODE_INVALID, MODE_INVALID,
1549 u32 cpu_modes_cpsr[7] = { 0x10, 0x11, 0x12, 0x13, 0x17, 0x1B, 0x1F };
1551 // When switching modes set spsr[new_mode] to cpsr. Modifying PC as the
1552 // target of a data proc instruction will set cpsr to spsr[cpu_mode].
1554 u32 initial_reg[64];
1555 u32 *reg = initial_reg;
1558 // ARM/Thumb mode is stored in the flags directly, this is simpler than
1559 // shadowing it since it has a constant 1bit represenation.
1561 char *reg_names[16] =
1563 " r0", " r1", " r2", " r3", " r4", " r5", " r6", " r7",
1564 " r8", " r9", "r10", " fp", " ip", " sp", " lr", " pc"
1567 char *cpu_mode_names[] =
1569 "user", "irq", "fiq", "svsr", "abrt", "undf", "invd"
1573 #define execute_arm_instruction() \
1574 using_instruction(arm); \
1575 check_pc_region(); \
1577 opcode = address32(pc_address_block, (pc & 0x7FFF)); \
1578 condition = opcode >> 28; \
1585 arm_next_instruction(); \
1591 arm_next_instruction(); \
1597 arm_next_instruction(); \
1603 arm_next_instruction(); \
1609 arm_next_instruction(); \
1615 arm_next_instruction(); \
1621 arm_next_instruction(); \
1627 arm_next_instruction(); \
1632 if((c_flag == 0) | z_flag) \
1633 arm_next_instruction(); \
1638 if(c_flag & (z_flag ^ 1)) \
1639 arm_next_instruction(); \
1644 if(n_flag != v_flag) \
1645 arm_next_instruction(); \
1650 if(n_flag == v_flag) \
1651 arm_next_instruction(); \
1656 if(z_flag | (n_flag != v_flag)) \
1657 arm_next_instruction(); \
1662 if((z_flag == 0) & (n_flag == v_flag)) \
1663 arm_next_instruction(); \
1671 /* Reserved - treat as "never" */ \
1673 arm_next_instruction(); \
1677 switch((opcode >> 20) & 0xFF) \
1680 if((opcode & 0x90) == 0x90) \
1684 /* STRH rd, [rn], -rm */ \
1685 arm_access_memory(store, no_op, half_reg, u16, yes, - reg[rm]); \
1689 /* MUL rd, rm, rs */ \
1690 arm_multiply(no_op, no); \
1695 /* AND rd, rn, reg_op */ \
1696 arm_data_proc(reg[rn] & reg_sh, reg); \
1701 if((opcode & 0x90) == 0x90) \
1703 switch((opcode >> 5) & 0x03) \
1706 /* MULS rd, rm, rs */ \
1707 arm_multiply(no_op, yes); \
1711 /* LDRH rd, [rn], -rm */ \
1712 arm_access_memory(load, no_op, half_reg, u16, yes, - reg[rm]); \
1716 /* LDRSB rd, [rn], -rm */ \
1717 arm_access_memory(load, no_op, half_reg, s8, yes, - reg[rm]); \
1721 /* LDRSH rd, [rn], -rm */ \
1722 arm_access_memory(load, no_op, half_reg, s16, yes, - reg[rm]); \
1728 /* ANDS rd, rn, reg_op */ \
1729 arm_data_proc_logic_flags(reg[rn] & reg_sh, reg); \
1734 if((opcode & 0x90) == 0x90) \
1738 /* STRH rd, [rn], -rm */ \
1739 arm_access_memory(store, no_op, half_reg, u16, yes, - reg[rm]); \
1743 /* MLA rd, rm, rs, rn */ \
1744 arm_multiply(+ reg[rn], no); \
1749 /* EOR rd, rn, reg_op */ \
1750 arm_data_proc(reg[rn] ^ reg_sh, reg); \
1755 if((opcode & 0x90) == 0x90) \
1757 switch((opcode >> 5) & 0x03) \
1760 /* MLAS rd, rm, rs, rn */ \
1761 arm_multiply(+ reg[rn], yes); \
1765 /* LDRH rd, [rn], -rm */ \
1766 arm_access_memory(load, no_op, half_reg, u16, yes, - reg[rm]); \
1770 /* LDRSB rd, [rn], -rm */ \
1771 arm_access_memory(load, no_op, half_reg, s8, yes, - reg[rm]); \
1775 /* LDRSH rd, [rn], -rm */ \
1776 arm_access_memory(load, no_op, half_reg, s16, yes, - reg[rm]); \
1782 /* EORS rd, rn, reg_op */ \
1783 arm_data_proc_logic_flags(reg[rn] ^ reg_sh, reg); \
1788 if((opcode & 0x90) == 0x90) \
1790 /* STRH rd, [rn], -imm */ \
1791 arm_access_memory(store, no_op, half_imm, u16, yes, - offset); \
1795 /* SUB rd, rn, reg_op */ \
1796 arm_data_proc(reg[rn] - reg_sh, reg); \
1801 if((opcode & 0x90) == 0x90) \
1803 switch((opcode >> 5) & 0x03) \
1806 /* LDRH rd, [rn], -imm */ \
1807 arm_access_memory(load, no_op, half_imm, u16, yes, - offset); \
1811 /* LDRSB rd, [rn], -imm */ \
1812 arm_access_memory(load, no_op, half_imm, s8, yes, - offset); \
1816 /* LDRSH rd, [rn], -imm */ \
1817 arm_access_memory(load, no_op, half_imm, s16, yes, - offset); \
1823 /* SUBS rd, rn, reg_op */ \
1824 arm_data_proc_sub_flags(reg[rn], reg_sh, reg); \
1829 if((opcode & 0x90) == 0x90) \
1831 /* STRH rd, [rn], -imm */ \
1832 arm_access_memory(store, no_op, half_imm, u16, yes, - offset); \
1836 /* RSB rd, rn, reg_op */ \
1837 arm_data_proc(reg_sh - reg[rn], reg); \
1842 if((opcode & 0x90) == 0x90) \
1844 switch((opcode >> 5) & 0x03) \
1847 /* LDRH rd, [rn], -imm */ \
1848 arm_access_memory(load, no_op, half_imm, u16, yes, - offset); \
1852 /* LDRSB rd, [rn], -imm */ \
1853 arm_access_memory(load, no_op, half_imm, s8, yes, - offset); \
1857 /* LDRSH rd, [rn], -imm */ \
1858 arm_access_memory(load, no_op, half_imm, s16, yes, - offset); \
1864 /* RSBS rd, rn, reg_op */ \
1865 arm_data_proc_sub_flags(reg_sh, reg[rn], reg); \
1870 if((opcode & 0x90) == 0x90) \
1874 /* STRH rd, [rn], +rm */ \
1875 arm_access_memory(store, no_op, half_reg, u16, yes, + reg[rm]); \
1879 /* UMULL rd, rm, rs */ \
1880 arm_multiply_long(no_op, no, u); \
1885 /* ADD rd, rn, reg_op */ \
1886 arm_data_proc(reg[rn] + reg_sh, reg); \
1891 if((opcode & 0x90) == 0x90) \
1893 switch((opcode >> 5) & 0x03) \
1896 /* UMULLS rdlo, rdhi, rm, rs */ \
1897 arm_multiply_long(no_op, yes, u); \
1901 /* LDRH rd, [rn], +rm */ \
1902 arm_access_memory(load, no_op, half_reg, u16, yes, + reg[rm]); \
1906 /* LDRSB rd, [rn], +rm */ \
1907 arm_access_memory(load, no_op, half_reg, s8, yes, + reg[rm]); \
1911 /* LDRSH rd, [rn], +rm */ \
1912 arm_access_memory(load, no_op, half_reg, s16, yes, + reg[rm]); \
1918 /* ADDS rd, rn, reg_op */ \
1919 arm_data_proc_add_flags(reg[rn], reg_sh, reg); \
1924 if((opcode & 0x90) == 0x90) \
1928 /* STRH rd, [rn], +rm */ \
1929 arm_access_memory(store, no_op, half_reg, u16, yes, + reg[rm]); \
1933 /* UMLAL rd, rm, rs */ \
1934 arm_multiply_long(arm_multiply_long_addop(u), no, u); \
1939 /* ADC rd, rn, reg_op */ \
1940 arm_data_proc(reg[rn] + reg_sh + c_flag, reg); \
1945 if((opcode & 0x90) == 0x90) \
1947 switch((opcode >> 5) & 0x03) \
1950 /* UMLALS rdlo, rdhi, rm, rs */ \
1951 arm_multiply_long(arm_multiply_long_addop(u), yes, u); \
1955 /* LDRH rd, [rn], +rm */ \
1956 arm_access_memory(load, no_op, half_reg, u16, yes, + reg[rm]); \
1960 /* LDRSB rd, [rn], +rm */ \
1961 arm_access_memory(load, no_op, half_reg, s8, yes, + reg[rm]); \
1965 /* LDRSH rd, [rn], +rm */ \
1966 arm_access_memory(load, no_op, half_reg, s16, yes, + reg[rm]); \
1972 /* ADCS rd, rn, reg_op */ \
1973 arm_data_proc_add_flags(reg[rn], reg_sh + c_flag, reg); \
1978 if((opcode & 0x90) == 0x90) \
1982 /* STRH rd, [rn], +imm */ \
1983 arm_access_memory(store, no_op, half_imm, u16, yes, + offset); \
1987 /* SMULL rd, rm, rs */ \
1988 arm_multiply_long(no_op, no, s); \
1993 /* SBC rd, rn, reg_op */ \
1994 arm_data_proc(reg[rn] - (reg_sh + (c_flag ^ 1)), reg); \
1999 if((opcode & 0x90) == 0x90) \
2001 switch((opcode >> 5) & 0x03) \
2004 /* SMULLS rdlo, rdhi, rm, rs */ \
2005 arm_multiply_long(no_op, yes, s); \
2009 /* LDRH rd, [rn], +imm */ \
2010 arm_access_memory(load, no_op, half_imm, u16, yes, + offset); \
2014 /* LDRSB rd, [rn], +imm */ \
2015 arm_access_memory(load, no_op, half_imm, s8, yes, + offset); \
2019 /* LDRSH rd, [rn], +imm */ \
2020 arm_access_memory(load, no_op, half_imm, s16, yes, + offset); \
2026 /* SBCS rd, rn, reg_op */ \
2027 arm_data_proc_sub_flags(reg[rn], (reg_sh + (c_flag ^ 1)), reg); \
2032 if((opcode & 0x90) == 0x90) \
2036 /* STRH rd, [rn], +imm */ \
2037 arm_access_memory(store, no_op, half_imm, u16, yes, + offset); \
2041 /* SMLAL rd, rm, rs */ \
2042 arm_multiply_long(arm_multiply_long_addop(s), no, s); \
2047 /* RSC rd, rn, reg_op */ \
2048 arm_data_proc(reg_sh - reg[rn] + c_flag - 1, reg); \
2053 if((opcode & 0x90) == 0x90) \
2055 switch((opcode >> 5) & 0x03) \
2058 /* SMLALS rdlo, rdhi, rm, rs */ \
2059 arm_multiply_long(arm_multiply_long_addop(s), yes, s); \
2063 /* LDRH rd, [rn], +imm */ \
2064 arm_access_memory(load, no_op, half_imm, u16, yes, + offset); \
2068 /* LDRSB rd, [rn], +imm */ \
2069 arm_access_memory(load, no_op, half_imm, s8, yes, + offset); \
2073 /* LDRSH rd, [rn], +imm */ \
2074 arm_access_memory(load, no_op, half_imm, s16, yes, + offset); \
2080 /* RSCS rd, rn, reg_op */ \
2081 arm_data_proc_sub_flags((reg_sh + c_flag - 1), reg[rn], reg); \
2086 if((opcode & 0x90) == 0x90) \
2090 /* STRH rd, [rn - rm] */ \
2091 arm_access_memory(store, - reg[rm], half_reg, u16, no, no_op); \
2095 /* SWP rd, rm, [rn] */ \
2101 /* MRS rd, cpsr */ \
2102 arm_psr(reg, read, reg[REG_CPSR]); \
2107 if((opcode & 0x90) == 0x90) \
2109 switch((opcode >> 5) & 0x03) \
2112 /* LDRH rd, [rn - rm] */ \
2113 arm_access_memory(load, - reg[rm], half_reg, u16, no, no_op); \
2117 /* LDRSB rd, [rn - rm] */ \
2118 arm_access_memory(load, - reg[rm], half_reg, s8, no, no_op); \
2122 /* LDRSH rd, [rn - rm] */ \
2123 arm_access_memory(load, - reg[rm], half_reg, s16, no, no_op); \
2129 /* TST rd, rn, reg_op */ \
2130 arm_data_proc_test_logic(reg[rn] & reg_sh, reg); \
2135 if((opcode & 0x90) == 0x90) \
2137 /* STRH rd, [rn - rm]! */ \
2138 arm_access_memory(store, - reg[rm], half_reg, u16, yes, no_op); \
2145 arm_decode_branchx(); \
2146 u32 src = reg[rn]; \
2150 arm_pc_offset_update_direct(src); \
2151 reg[REG_CPSR] |= 0x20; \
2156 arm_pc_offset_update_direct(src); \
2161 /* MSR cpsr, rm */ \
2162 arm_psr(reg, store, cpsr); \
2168 if((opcode & 0x90) == 0x90) \
2170 switch((opcode >> 5) & 0x03) \
2173 /* LDRH rd, [rn - rm]! */ \
2174 arm_access_memory(load, - reg[rm], half_reg, u16, yes, no_op); \
2178 /* LDRSB rd, [rn - rm]! */ \
2179 arm_access_memory(load, - reg[rm], half_reg, s8, yes, no_op); \
2183 /* LDRSH rd, [rn - rm]! */ \
2184 arm_access_memory(load, - reg[rm], half_reg, s16, yes, no_op); \
2190 /* TEQ rd, rn, reg_op */ \
2191 arm_data_proc_test_logic(reg[rn] ^ reg_sh, reg); \
2196 if((opcode & 0x90) == 0x90) \
2200 /* STRH rd, [rn - imm] */ \
2201 arm_access_memory(store, - offset, half_imm, u16, no, no_op); \
2205 /* SWPB rd, rm, [rn] */ \
2211 /* MRS rd, spsr */ \
2212 arm_psr(reg, read, spsr[reg[CPU_MODE]]); \
2217 if((opcode & 0x90) == 0x90) \
2219 switch((opcode >> 5) & 0x03) \
2222 /* LDRH rd, [rn - imm] */ \
2223 arm_access_memory(load, - offset, half_imm, u16, no, no_op); \
2227 /* LDRSB rd, [rn - imm] */ \
2228 arm_access_memory(load, - offset, half_imm, s8, no, no_op); \
2232 /* LDRSH rd, [rn - imm] */ \
2233 arm_access_memory(load, - offset, half_imm, s16, no, no_op); \
2239 /* CMP rn, reg_op */ \
2240 arm_data_proc_test_sub(reg[rn], reg_sh, reg); \
2245 if((opcode & 0x90) == 0x90) \
2247 /* STRH rd, [rn - imm]! */ \
2248 arm_access_memory(store, - offset, half_imm, u16, yes, no_op); \
2252 /* MSR spsr, rm */ \
2253 arm_psr(reg, store, spsr); \
2258 if((opcode & 0x90) == 0x90) \
2260 switch((opcode >> 5) & 0x03) \
2263 /* LDRH rd, [rn - imm]! */ \
2264 arm_access_memory(load, - offset, half_imm, u16, yes, no_op); \
2268 /* LDRSB rd, [rn - imm]! */ \
2269 arm_access_memory(load, - offset, half_imm, s8, yes, no_op); \
2273 /* LDRSH rd, [rn - imm]! */ \
2274 arm_access_memory(load, - offset, half_imm, s16, yes, no_op); \
2280 /* CMN rd, rn, reg_op */ \
2281 arm_data_proc_test_add(reg[rn], reg_sh, reg); \
2286 if((opcode & 0x90) == 0x90) \
2288 /* STRH rd, [rn + rm] */ \
2289 arm_access_memory(store, + reg[rm], half_reg, u16, no, no_op); \
2293 /* ORR rd, rn, reg_op */ \
2294 arm_data_proc(reg[rn] | reg_sh, reg); \
2299 if((opcode & 0x90) == 0x90) \
2301 switch((opcode >> 5) & 0x03) \
2304 /* LDRH rd, [rn + rm] */ \
2305 arm_access_memory(load, + reg[rm], half_reg, u16, no, no_op); \
2309 /* LDRSB rd, [rn + rm] */ \
2310 arm_access_memory(load, + reg[rm], half_reg, s8, no, no_op); \
2314 /* LDRSH rd, [rn + rm] */ \
2315 arm_access_memory(load, + reg[rm], half_reg, s16, no, no_op); \
2321 /* ORRS rd, rn, reg_op */ \
2322 arm_data_proc_logic_flags(reg[rn] | reg_sh, reg); \
2327 if((opcode & 0x90) == 0x90) \
2329 /* STRH rd, [rn + rm]! */ \
2330 arm_access_memory(store, + reg[rm], half_reg, u16, yes, no_op); \
2334 /* MOV rd, reg_op */ \
2335 arm_data_proc(reg_sh, reg); \
2340 if((opcode & 0x90) == 0x90) \
2342 switch((opcode >> 5) & 0x03) \
2345 /* LDRH rd, [rn + rm]! */ \
2346 arm_access_memory(load, + reg[rm], half_reg, u16, yes, no_op); \
2350 /* LDRSB rd, [rn + rm]! */ \
2351 arm_access_memory(load, + reg[rm], half_reg, s8, yes, no_op); \
2355 /* LDRSH rd, [rn + rm]! */ \
2356 arm_access_memory(load, + reg[rm], half_reg, s16, yes, no_op); \
2362 /* MOVS rd, reg_op */ \
2363 arm_data_proc_logic_flags(reg_sh, reg); \
2368 if((opcode & 0x90) == 0x90) \
2370 /* STRH rd, [rn + imm] */ \
2371 arm_access_memory(store, + offset, half_imm, u16, no, no_op); \
2375 /* BIC rd, rn, reg_op */ \
2376 arm_data_proc(reg[rn] & (~reg_sh), reg); \
2381 if((opcode & 0x90) == 0x90) \
2383 switch((opcode >> 5) & 0x03) \
2386 /* LDRH rd, [rn + imm] */ \
2387 arm_access_memory(load, + offset, half_imm, u16, no, no_op); \
2391 /* LDRSB rd, [rn + imm] */ \
2392 arm_access_memory(load, + offset, half_imm, s8, no, no_op); \
2396 /* LDRSH rd, [rn + imm] */ \
2397 arm_access_memory(load, + offset, half_imm, s16, no, no_op); \
2403 /* BICS rd, rn, reg_op */ \
2404 arm_data_proc_logic_flags(reg[rn] & (~reg_sh), reg); \
2409 if((opcode & 0x90) == 0x90) \
2411 /* STRH rd, [rn + imm]! */ \
2412 arm_access_memory(store, + offset, half_imm, u16, yes, no_op); \
2416 /* MVN rd, reg_op */ \
2417 arm_data_proc(~reg_sh, reg); \
2422 if((opcode & 0x90) == 0x90) \
2424 switch((opcode >> 5) & 0x03) \
2427 /* LDRH rd, [rn + imm]! */ \
2428 arm_access_memory(load, + offset, half_imm, u16, yes, no_op); \
2432 /* LDRSB rd, [rn + imm]! */ \
2433 arm_access_memory(load, + offset, half_imm, s8, yes, no_op); \
2437 /* LDRSH rd, [rn + imm]! */ \
2438 arm_access_memory(load, + offset, half_imm, s16, yes, no_op); \
2444 /* MVNS rd, rn, reg_op */ \
2445 arm_data_proc_logic_flags(~reg_sh, reg); \
2450 /* AND rd, rn, imm */ \
2451 arm_data_proc(reg[rn] & imm, imm); \
2455 /* ANDS rd, rn, imm */ \
2456 arm_data_proc_logic_flags(reg[rn] & imm, imm); \
2460 /* EOR rd, rn, imm */ \
2461 arm_data_proc(reg[rn] ^ imm, imm); \
2465 /* EORS rd, rn, imm */ \
2466 arm_data_proc_logic_flags(reg[rn] ^ imm, imm); \
2470 /* SUB rd, rn, imm */ \
2471 arm_data_proc(reg[rn] - imm, imm); \
2475 /* SUBS rd, rn, imm */ \
2476 arm_data_proc_sub_flags(reg[rn], imm, imm); \
2480 /* RSB rd, rn, imm */ \
2481 arm_data_proc(imm - reg[rn], imm); \
2485 /* RSBS rd, rn, imm */ \
2486 arm_data_proc_sub_flags(imm, reg[rn], imm); \
2490 /* ADD rd, rn, imm */ \
2491 arm_data_proc(reg[rn] + imm, imm); \
2495 /* ADDS rd, rn, imm */ \
2496 arm_data_proc_add_flags(reg[rn], imm, imm); \
2500 /* ADC rd, rn, imm */ \
2501 arm_data_proc(reg[rn] + imm + c_flag, imm); \
2505 /* ADCS rd, rn, imm */ \
2506 arm_data_proc_add_flags(reg[rn] + imm, c_flag, imm); \
2510 /* SBC rd, rn, imm */ \
2511 arm_data_proc(reg[rn] - imm + c_flag - 1, imm); \
2515 /* SBCS rd, rn, imm */ \
2516 arm_data_proc_sub_flags(reg[rn], (imm + (c_flag ^ 1)), imm); \
2520 /* RSC rd, rn, imm */ \
2521 arm_data_proc(imm - reg[rn] + c_flag - 1, imm); \
2525 /* RSCS rd, rn, imm */ \
2526 arm_data_proc_sub_flags((imm + c_flag - 1), reg[rn], imm); \
2529 case 0x30 ... 0x31: \
2531 arm_data_proc_test_logic(reg[rn] & imm, imm); \
2535 /* MSR cpsr, imm */ \
2536 arm_psr(imm, store, cpsr); \
2541 arm_data_proc_test_logic(reg[rn] ^ imm, imm); \
2544 case 0x34 ... 0x35: \
2546 arm_data_proc_test_sub(reg[rn], imm, imm); \
2550 /* MSR spsr, imm */ \
2551 arm_psr(imm, store, spsr); \
2556 arm_data_proc_test_add(reg[rn], imm, imm); \
2560 /* ORR rd, rn, imm */ \
2561 arm_data_proc(reg[rn] | imm, imm); \
2565 /* ORRS rd, rn, imm */ \
2566 arm_data_proc_logic_flags(reg[rn] | imm, imm); \
2571 arm_data_proc(imm, imm); \
2575 /* MOVS rd, imm */ \
2576 arm_data_proc_logic_flags(imm, imm); \
2580 /* BIC rd, rn, imm */ \
2581 arm_data_proc(reg[rn] & (~imm), imm); \
2585 /* BICS rd, rn, imm */ \
2586 arm_data_proc_logic_flags(reg[rn] & (~imm), imm); \
2591 arm_data_proc(~imm, imm); \
2595 /* MVNS rd, imm */ \
2596 arm_data_proc_logic_flags(~imm, imm); \
2600 /* STR rd, [rn], -imm */ \
2601 arm_access_memory(store, no_op, imm, u32, yes, - offset); \
2605 /* LDR rd, [rn], -imm */ \
2606 arm_access_memory(load, no_op, imm, u32, yes, - offset); \
2610 /* STRT rd, [rn], -imm */ \
2611 arm_access_memory(store, no_op, imm, u32, yes, - offset); \
2615 /* LDRT rd, [rn], -imm */ \
2616 arm_access_memory(load, no_op, imm, u32, yes, - offset); \
2620 /* STRB rd, [rn], -imm */ \
2621 arm_access_memory(store, no_op, imm, u8, yes, - offset); \
2625 /* LDRB rd, [rn], -imm */ \
2626 arm_access_memory(load, no_op, imm, u8, yes, - offset); \
2630 /* STRBT rd, [rn], -imm */ \
2631 arm_access_memory(store, no_op, imm, u8, yes, - offset); \
2635 /* LDRBT rd, [rn], -imm */ \
2636 arm_access_memory(load, no_op, imm, u8, yes, - offset); \
2640 /* STR rd, [rn], +imm */ \
2641 arm_access_memory(store, no_op, imm, u32, yes, + offset); \
2645 /* LDR rd, [rn], +imm */ \
2646 arm_access_memory(load, no_op, imm, u32, yes, + offset); \
2650 /* STRT rd, [rn], +imm */ \
2651 arm_access_memory(store, no_op, imm, u32, yes, + offset); \
2655 /* LDRT rd, [rn], +imm */ \
2656 arm_access_memory(load, no_op, imm, u32, yes, + offset); \
2660 /* STRB rd, [rn], +imm */ \
2661 arm_access_memory(store, no_op, imm, u8, yes, + offset); \
2665 /* LDRB rd, [rn], +imm */ \
2666 arm_access_memory(load, no_op, imm, u8, yes, + offset); \
2670 /* STRBT rd, [rn], +imm */ \
2671 arm_access_memory(store, no_op, imm, u8, yes, + offset); \
2675 /* LDRBT rd, [rn], +imm */ \
2676 arm_access_memory(load, no_op, imm, u8, yes, + offset); \
2680 /* STR rd, [rn - imm] */ \
2681 arm_access_memory(store, - offset, imm, u32, no, no_op); \
2685 /* LDR rd, [rn - imm] */ \
2686 arm_access_memory(load, - offset, imm, u32, no, no_op); \
2690 /* STR rd, [rn - imm]! */ \
2691 arm_access_memory(store, - offset, imm, u32, yes, no_op); \
2695 /* LDR rd, [rn - imm]! */ \
2696 arm_access_memory(load, - offset, imm, u32, yes, no_op); \
2700 /* STRB rd, [rn - imm] */ \
2701 arm_access_memory(store, - offset, imm, u8, no, no_op); \
2705 /* LDRB rd, [rn - imm] */ \
2706 arm_access_memory(load, - offset, imm, u8, no, no_op); \
2710 /* STRB rd, [rn - imm]! */ \
2711 arm_access_memory(store, - offset, imm, u8, yes, no_op); \
2715 /* LDRB rd, [rn - imm]! */ \
2716 arm_access_memory(load, - offset, imm, u8, yes, no_op); \
2720 /* STR rd, [rn + imm] */ \
2721 arm_access_memory(store, + offset, imm, u32, no, no_op); \
2725 /* LDR rd, [rn + imm] */ \
2726 arm_access_memory(load, + offset, imm, u32, no, no_op); \
2730 /* STR rd, [rn + imm]! */ \
2731 arm_access_memory(store, + offset, imm, u32, yes, no_op); \
2735 /* LDR rd, [rn + imm]! */ \
2736 arm_access_memory(load, + offset, imm, u32, yes, no_op); \
2740 /* STRB rd, [rn + imm] */ \
2741 arm_access_memory(store, + offset, imm, u8, no, no_op); \
2745 /* LDRB rd, [rn + imm] */ \
2746 arm_access_memory(load, + offset, imm, u8, no, no_op); \
2750 /* STRB rd, [rn + imm]! */ \
2751 arm_access_memory(store, + offset, imm, u8, yes, no_op); \
2755 /* LDRBT rd, [rn + imm]! */ \
2756 arm_access_memory(load, + offset, imm, u8, yes, no_op); \
2760 /* STR rd, [rn], -reg_op */ \
2761 arm_access_memory(store, no_op, reg, u32, yes, - reg_offset); \
2765 /* LDR rd, [rn], -reg_op */ \
2766 arm_access_memory(load, no_op, reg, u32, yes, - reg_offset); \
2770 /* STRT rd, [rn], -reg_op */ \
2771 arm_access_memory(store, no_op, reg, u32, yes, - reg_offset); \
2775 /* LDRT rd, [rn], -reg_op */ \
2776 arm_access_memory(load, no_op, reg, u32, yes, - reg_offset); \
2780 /* STRB rd, [rn], -reg_op */ \
2781 arm_access_memory(store, no_op, reg, u8, yes, - reg_offset); \
2785 /* LDRB rd, [rn], -reg_op */ \
2786 arm_access_memory(load, no_op, reg, u8, yes, - reg_offset); \
2790 /* STRBT rd, [rn], -reg_op */ \
2791 arm_access_memory(store, no_op, reg, u8, yes, - reg_offset); \
2795 /* LDRBT rd, [rn], -reg_op */ \
2796 arm_access_memory(load, no_op, reg, u8, yes, - reg_offset); \
2800 /* STR rd, [rn], +reg_op */ \
2801 arm_access_memory(store, no_op, reg, u32, yes, + reg_offset); \
2805 /* LDR rd, [rn], +reg_op */ \
2806 arm_access_memory(load, no_op, reg, u32, yes, + reg_offset); \
2810 /* STRT rd, [rn], +reg_op */ \
2811 arm_access_memory(store, no_op, reg, u32, yes, + reg_offset); \
2815 /* LDRT rd, [rn], +reg_op */ \
2816 arm_access_memory(load, no_op, reg, u32, yes, + reg_offset); \
2820 /* STRB rd, [rn], +reg_op */ \
2821 arm_access_memory(store, no_op, reg, u8, yes, + reg_offset); \
2825 /* LDRB rd, [rn], +reg_op */ \
2826 arm_access_memory(load, no_op, reg, u8, yes, + reg_offset); \
2830 /* STRBT rd, [rn], +reg_op */ \
2831 arm_access_memory(store, no_op, reg, u8, yes, + reg_offset); \
2835 /* LDRBT rd, [rn], +reg_op */ \
2836 arm_access_memory(load, no_op, reg, u8, yes, + reg_offset); \
2840 /* STR rd, [rn - reg_op] */ \
2841 arm_access_memory(store, - reg_offset, reg, u32, no, no_op); \
2845 /* LDR rd, [rn - reg_op] */ \
2846 arm_access_memory(load, - reg_offset, reg, u32, no, no_op); \
2850 /* STR rd, [rn - reg_op]! */ \
2851 arm_access_memory(store, - reg_offset, reg, u32, yes, no_op); \
2855 /* LDR rd, [rn - reg_op]! */ \
2856 arm_access_memory(load, - reg_offset, reg, u32, yes, no_op); \
2860 /* STRB rd, [rn - reg_op] */ \
2861 arm_access_memory(store, - reg_offset, reg, u8, no, no_op); \
2865 /* LDRB rd, [rn - reg_op] */ \
2866 arm_access_memory(load, - reg_offset, reg, u8, no, no_op); \
2870 /* STRB rd, [rn - reg_op]! */ \
2871 arm_access_memory(store, - reg_offset, reg, u8, yes, no_op); \
2875 /* LDRB rd, [rn - reg_op]! */ \
2876 arm_access_memory(load, - reg_offset, reg, u8, yes, no_op); \
2880 /* STR rd, [rn + reg_op] */ \
2881 arm_access_memory(store, + reg_offset, reg, u32, no, no_op); \
2885 /* LDR rd, [rn + reg_op] */ \
2886 arm_access_memory(load, + reg_offset, reg, u32, no, no_op); \
2890 /* STR rd, [rn + reg_op]! */ \
2891 arm_access_memory(store, + reg_offset, reg, u32, yes, no_op); \
2895 /* LDR rd, [rn + reg_op]! */ \
2896 arm_access_memory(load, + reg_offset, reg, u32, yes, no_op); \
2900 /* STRB rd, [rn + reg_op] */ \
2901 arm_access_memory(store, + reg_offset, reg, u8, no, no_op); \
2905 /* LDRB rd, [rn + reg_op] */ \
2906 arm_access_memory(load, + reg_offset, reg, u8, no, no_op); \
2910 /* STRB rd, [rn + reg_op]! */ \
2911 arm_access_memory(store, + reg_offset, reg, u8, yes, no_op); \
2915 /* LDRBT rd, [rn + reg_op]! */ \
2916 arm_access_memory(load, + reg_offset, reg, u8, yes, no_op); \
2920 /* STMDA rn, rlist */ \
2921 arm_block_memory(store, down_a, no, no); \
2925 /* LDMDA rn, rlist */ \
2926 arm_block_memory(load, down_a, no, no); \
2930 /* STMDA rn!, rlist */ \
2931 arm_block_memory(store, down_a, down, no); \
2935 /* LDMDA rn!, rlist */ \
2936 arm_block_memory(load, down_a, down, no); \
2940 /* STMDA rn, rlist^ */ \
2941 arm_block_memory(store, down_a, no, yes); \
2945 /* LDMDA rn, rlist^ */ \
2946 arm_block_memory(load, down_a, no, yes); \
2950 /* STMDA rn!, rlist^ */ \
2951 arm_block_memory(store, down_a, down, yes); \
2955 /* LDMDA rn!, rlist^ */ \
2956 arm_block_memory(load, down_a, down, yes); \
2960 /* STMIA rn, rlist */ \
2961 arm_block_memory(store, no, no, no); \
2965 /* LDMIA rn, rlist */ \
2966 arm_block_memory(load, no, no, no); \
2970 /* STMIA rn!, rlist */ \
2971 arm_block_memory(store, no, up, no); \
2975 /* LDMIA rn!, rlist */ \
2976 arm_block_memory(load, no, up, no); \
2980 /* STMIA rn, rlist^ */ \
2981 arm_block_memory(store, no, no, yes); \
2985 /* LDMIA rn, rlist^ */ \
2986 arm_block_memory(load, no, no, yes); \
2990 /* STMIA rn!, rlist^ */ \
2991 arm_block_memory(store, no, up, yes); \
2995 /* LDMIA rn!, rlist^ */ \
2996 arm_block_memory(load, no, up, yes); \
3000 /* STMDB rn, rlist */ \
3001 arm_block_memory(store, down_b, no, no); \
3005 /* LDMDB rn, rlist */ \
3006 arm_block_memory(load, down_b, no, no); \
3010 /* STMDB rn!, rlist */ \
3011 arm_block_memory(store, down_b, down, no); \
3015 /* LDMDB rn!, rlist */ \
3016 arm_block_memory(load, down_b, down, no); \
3020 /* STMDB rn, rlist^ */ \
3021 arm_block_memory(store, down_b, no, yes); \
3025 /* LDMDB rn, rlist^ */ \
3026 arm_block_memory(load, down_b, no, yes); \
3030 /* STMDB rn!, rlist^ */ \
3031 arm_block_memory(store, down_b, down, yes); \
3035 /* LDMDB rn!, rlist^ */ \
3036 arm_block_memory(load, down_b, down, yes); \
3040 /* STMIB rn, rlist */ \
3041 arm_block_memory(store, up, no, no); \
3045 /* LDMIB rn, rlist */ \
3046 arm_block_memory(load, up, no, no); \
3050 /* STMIB rn!, rlist */ \
3051 arm_block_memory(store, up, up, no); \
3055 /* LDMIB rn!, rlist */ \
3056 arm_block_memory(load, up, up, no); \
3060 /* STMIB rn, rlist^ */ \
3061 arm_block_memory(store, up, no, yes); \
3065 /* LDMIB rn, rlist^ */ \
3066 arm_block_memory(load, up, no, yes); \
3070 /* STMIB rn!, rlist^ */ \
3071 arm_block_memory(store, up, up, yes); \
3075 /* LDMIB rn!, rlist^ */ \
3076 arm_block_memory(load, up, up, yes); \
3097 arm_decode_branch(); \
3098 arm_pc_offset_update(offset + 8); \
3102 case 0xB0 ... 0xBF: \
3105 arm_decode_branch(); \
3106 reg[REG_LR] = pc + 4; \
3107 arm_pc_offset_update(offset + 8); \
3111 case 0xC0 ... 0xEF: \
3112 /* coprocessor instructions, reserved on GBA */ \
3115 case 0xF0 ... 0xFF: \
3118 u32 swi_comment = opcode & 0x00FFFFFF; \
3120 switch(swi_comment >> 16) \
3122 /* Jump to BIOS SWI handler */ \
3124 reg_mode[MODE_SUPERVISOR][6] = pc + 4; \
3126 spsr[MODE_SUPERVISOR] = reg[REG_CPSR]; \
3127 reg[REG_PC] = 0x00000008; \
3129 reg[REG_CPSR] = (reg[REG_CPSR] & ~0x1F) | 0x13; \
3130 set_cpu_mode(MODE_SUPERVISOR); \
3139 #define execute_thumb_instruction() \
3140 using_instruction(thumb); \
3141 check_pc_region(); \
3143 opcode = address16(pc_address_block, (pc & 0x7FFF)); \
3145 switch((opcode >> 8) & 0xFF) \
3147 case 0x00 ... 0x07: \
3148 /* LSL rd, rs, offset */ \
3149 thumb_shift(shift, lsl, imm); \
3152 case 0x08 ... 0x0F: \
3153 /* LSR rd, rs, offset */ \
3154 thumb_shift(shift, lsr, imm); \
3157 case 0x10 ... 0x17: \
3158 /* ASR rd, rs, offset */ \
3159 thumb_shift(shift, asr, imm); \
3162 case 0x18 ... 0x19: \
3163 /* ADD rd, rs, rn */ \
3164 thumb_add(add_sub, rd, reg[rs], reg[rn]); \
3167 case 0x1A ... 0x1B: \
3168 /* SUB rd, rs, rn */ \
3169 thumb_sub(add_sub, rd, reg[rs], reg[rn]); \
3172 case 0x1C ... 0x1D: \
3173 /* ADD rd, rs, imm */ \
3174 thumb_add(add_sub_imm, rd, reg[rs], imm); \
3177 case 0x1E ... 0x1F: \
3178 /* SUB rd, rs, imm */ \
3179 thumb_sub(add_sub_imm, rd, reg[rs], imm); \
3184 thumb_logic(imm, 0, imm); \
3189 thumb_logic(imm, 1, imm); \
3194 thumb_logic(imm, 2, imm); \
3199 thumb_logic(imm, 3, imm); \
3204 thumb_logic(imm, 4, imm); \
3209 thumb_logic(imm, 5, imm); \
3214 thumb_logic(imm, 6, imm); \
3219 thumb_logic(imm, 7, imm); \
3224 thumb_test_sub(imm, reg[0], imm); \
3229 thumb_test_sub(imm, reg[1], imm); \
3234 thumb_test_sub(imm, reg[2], imm); \
3239 thumb_test_sub(imm, reg[3], imm); \
3244 thumb_test_sub(imm, reg[4], imm); \
3249 thumb_test_sub(imm, reg[5], imm); \
3254 thumb_test_sub(imm, reg[6], imm); \
3259 thumb_test_sub(imm, reg[7], imm); \
3264 thumb_add(imm, 0, reg[0], imm); \
3269 thumb_add(imm, 1, reg[1], imm); \
3274 thumb_add(imm, 2, reg[2], imm); \
3279 thumb_add(imm, 3, reg[3], imm); \
3284 thumb_add(imm, 4, reg[4], imm); \
3289 thumb_add(imm, 5, reg[5], imm); \
3294 thumb_add(imm, 6, reg[6], imm); \
3299 thumb_add(imm, 7, reg[7], imm); \
3304 thumb_sub(imm, 0, reg[0], imm); \
3309 thumb_sub(imm, 1, reg[1], imm); \
3314 thumb_sub(imm, 2, reg[2], imm); \
3319 thumb_sub(imm, 3, reg[3], imm); \
3324 thumb_sub(imm, 4, reg[4], imm); \
3329 thumb_sub(imm, 5, reg[5], imm); \
3334 thumb_sub(imm, 6, reg[6], imm); \
3339 thumb_sub(imm, 7, reg[7], imm); \
3343 switch((opcode >> 6) & 0x03) \
3347 thumb_logic(alu_op, rd, reg[rd] & reg[rs]); \
3352 thumb_logic(alu_op, rd, reg[rd] ^ reg[rs]); \
3357 thumb_shift(alu_op, lsl, reg); \
3362 thumb_shift(alu_op, lsr, reg); \
3368 switch((opcode >> 6) & 0x03) \
3372 thumb_shift(alu_op, asr, reg); \
3377 thumb_add(alu_op, rd, reg[rd] + reg[rs], c_flag); \
3382 thumb_sub(alu_op, rd, reg[rd] - reg[rs], (c_flag ^ 1)); \
3387 thumb_shift(alu_op, ror, reg); \
3393 switch((opcode >> 6) & 0x03) \
3397 thumb_test_logic(alu_op, reg[rd] & reg[rs]); \
3402 thumb_sub(alu_op, rd, 0, reg[rs]); \
3407 thumb_test_sub(alu_op, reg[rd], reg[rs]); \
3412 thumb_test_add(alu_op, reg[rd], reg[rs]); \
3418 switch((opcode >> 6) & 0x03) \
3422 thumb_logic(alu_op, rd, reg[rd] | reg[rs]); \
3427 thumb_logic(alu_op, rd, reg[rd] * reg[rs]); \
3432 thumb_logic(alu_op, rd, reg[rd] & (~reg[rs])); \
3437 thumb_logic(alu_op, rd, ~reg[rs]); \
3444 thumb_hireg_op(reg[rd] + reg[rs]); \
3450 thumb_pc_offset(4); \
3451 thumb_decode_hireg_op(); \
3452 u32 _sa = reg[rd]; \
3453 u32 _sb = reg[rs]; \
3454 u32 dest = _sa - _sb; \
3455 thumb_pc_offset(-2); \
3456 calculate_flags_sub(dest, _sa, _sb); \
3462 thumb_hireg_op(reg[rs]); \
3468 thumb_decode_hireg_op(); \
3470 thumb_pc_offset(4); \
3475 thumb_pc_offset_update_direct(src); \
3479 /* Switch to ARM mode */ \
3480 thumb_pc_offset_update_direct(src); \
3481 reg[REG_CPSR] &= ~0x20; \
3489 /* LDR r0, [pc + imm] */ \
3490 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[0], u32); \
3494 /* LDR r1, [pc + imm] */ \
3495 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[1], u32); \
3499 /* LDR r2, [pc + imm] */ \
3500 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[2], u32); \
3504 /* LDR r3, [pc + imm] */ \
3505 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[3], u32); \
3509 /* LDR r4, [pc + imm] */ \
3510 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[4], u32); \
3514 /* LDR r5, [pc + imm] */ \
3515 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[5], u32); \
3519 /* LDR r6, [pc + imm] */ \
3520 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[6], u32); \
3524 /* LDR r7, [pc + imm] */ \
3525 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[7], u32); \
3528 case 0x50 ... 0x51: \
3529 /* STR rd, [rb + ro] */ \
3530 thumb_access_memory(store, mem_reg, reg[rb] + reg[ro], reg[rd], u32); \
3533 case 0x52 ... 0x53: \
3534 /* STRH rd, [rb + ro] */ \
3535 thumb_access_memory(store, mem_reg, reg[rb] + reg[ro], reg[rd], u16); \
3538 case 0x54 ... 0x55: \
3539 /* STRB rd, [rb + ro] */ \
3540 thumb_access_memory(store, mem_reg, reg[rb] + reg[ro], reg[rd], u8); \
3543 case 0x56 ... 0x57: \
3544 /* LDSB rd, [rb + ro] */ \
3545 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], s8); \
3548 case 0x58 ... 0x59: \
3549 /* LDR rd, [rb + ro] */ \
3550 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], u32); \
3553 case 0x5A ... 0x5B: \
3554 /* LDRH rd, [rb + ro] */ \
3555 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], u16); \
3558 case 0x5C ... 0x5D: \
3559 /* LDRB rd, [rb + ro] */ \
3560 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], u8); \
3563 case 0x5E ... 0x5F: \
3564 /* LDSH rd, [rb + ro] */ \
3565 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], s16); \
3568 case 0x60 ... 0x67: \
3569 /* STR rd, [rb + imm] */ \
3570 thumb_access_memory(store, mem_imm, reg[rb] + (imm * 4), reg[rd], u32); \
3573 case 0x68 ... 0x6F: \
3574 /* LDR rd, [rb + imm] */ \
3575 thumb_access_memory(load, mem_imm, reg[rb] + (imm * 4), reg[rd], u32); \
3578 case 0x70 ... 0x77: \
3579 /* STRB rd, [rb + imm] */ \
3580 thumb_access_memory(store, mem_imm, reg[rb] + imm, reg[rd], u8); \
3583 case 0x78 ... 0x7F: \
3584 /* LDRB rd, [rb + imm] */ \
3585 thumb_access_memory(load, mem_imm, reg[rb] + imm, reg[rd], u8); \
3588 case 0x80 ... 0x87: \
3589 /* STRH rd, [rb + imm] */ \
3590 thumb_access_memory(store, mem_imm, reg[rb] + (imm * 2), reg[rd], u16); \
3593 case 0x88 ... 0x8F: \
3594 /* LDRH rd, [rb + imm] */ \
3595 thumb_access_memory(load, mem_imm, reg[rb] + (imm * 2), reg[rd], u16); \
3599 /* STR r0, [sp + imm] */ \
3600 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[0], u32); \
3604 /* STR r1, [sp + imm] */ \
3605 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[1], u32); \
3609 /* STR r2, [sp + imm] */ \
3610 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[2], u32); \
3614 /* STR r3, [sp + imm] */ \
3615 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[3], u32); \
3619 /* STR r4, [sp + imm] */ \
3620 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[4], u32); \
3624 /* STR r5, [sp + imm] */ \
3625 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[5], u32); \
3629 /* STR r6, [sp + imm] */ \
3630 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[6], u32); \
3634 /* STR r7, [sp + imm] */ \
3635 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[7], u32); \
3639 /* LDR r0, [sp + imm] */ \
3640 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[0], u32); \
3644 /* LDR r1, [sp + imm] */ \
3645 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[1], u32); \
3649 /* LDR r2, [sp + imm] */ \
3650 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[2], u32); \
3654 /* LDR r3, [sp + imm] */ \
3655 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[3], u32); \
3659 /* LDR r4, [sp + imm] */ \
3660 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[4], u32); \
3664 /* LDR r5, [sp + imm] */ \
3665 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[5], u32); \
3669 /* LDR r6, [sp + imm] */ \
3670 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[6], u32); \
3674 /* LDR r7, [sp + imm] */ \
3675 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[7], u32); \
3679 /* ADD r0, pc, +imm */ \
3680 thumb_add_noflags(imm, 0, (pc & ~2) + 4, (imm * 4)); \
3684 /* ADD r1, pc, +imm */ \
3685 thumb_add_noflags(imm, 1, (pc & ~2) + 4, (imm * 4)); \
3689 /* ADD r2, pc, +imm */ \
3690 thumb_add_noflags(imm, 2, (pc & ~2) + 4, (imm * 4)); \
3694 /* ADD r3, pc, +imm */ \
3695 thumb_add_noflags(imm, 3, (pc & ~2) + 4, (imm * 4)); \
3699 /* ADD r4, pc, +imm */ \
3700 thumb_add_noflags(imm, 4, (pc & ~2) + 4, (imm * 4)); \
3704 /* ADD r5, pc, +imm */ \
3705 thumb_add_noflags(imm, 5, (pc & ~2) + 4, (imm * 4)); \
3709 /* ADD r6, pc, +imm */ \
3710 thumb_add_noflags(imm, 6, (pc & ~2) + 4, (imm * 4)); \
3714 /* ADD r7, pc, +imm */ \
3715 thumb_add_noflags(imm, 7, (pc & ~2) + 4, (imm * 4)); \
3719 /* ADD r0, sp, +imm */ \
3720 thumb_add_noflags(imm, 0, reg[REG_SP], (imm * 4)); \
3724 /* ADD r1, sp, +imm */ \
3725 thumb_add_noflags(imm, 1, reg[REG_SP], (imm * 4)); \
3729 /* ADD r2, sp, +imm */ \
3730 thumb_add_noflags(imm, 2, reg[REG_SP], (imm * 4)); \
3734 /* ADD r3, sp, +imm */ \
3735 thumb_add_noflags(imm, 3, reg[REG_SP], (imm * 4)); \
3739 /* ADD r4, sp, +imm */ \
3740 thumb_add_noflags(imm, 4, reg[REG_SP], (imm * 4)); \
3744 /* ADD r5, sp, +imm */ \
3745 thumb_add_noflags(imm, 5, reg[REG_SP], (imm * 4)); \
3749 /* ADD r6, sp, +imm */ \
3750 thumb_add_noflags(imm, 6, reg[REG_SP], (imm * 4)); \
3754 /* ADD r7, sp, +imm */ \
3755 thumb_add_noflags(imm, 7, reg[REG_SP], (imm * 4)); \
3758 case 0xB0 ... 0xB3: \
3759 if((opcode >> 7) & 0x01) \
3761 /* ADD sp, -imm */ \
3762 thumb_add_noflags(add_sp, 13, reg[REG_SP], -(imm * 4)); \
3766 /* ADD sp, +imm */ \
3767 thumb_add_noflags(add_sp, 13, reg[REG_SP], (imm * 4)); \
3773 thumb_block_memory(store, down, no_op, 13); \
3777 /* PUSH rlist, lr */ \
3778 thumb_block_memory(store, push_lr, push_lr, 13); \
3783 thumb_block_memory(load, no_op, up, 13); \
3787 /* POP rlist, pc */ \
3788 thumb_block_memory(load, no_op, pop_pc, 13); \
3792 /* STMIA r0!, rlist */ \
3793 thumb_block_memory(store, no_op, up, 0); \
3797 /* STMIA r1!, rlist */ \
3798 thumb_block_memory(store, no_op, up, 1); \
3802 /* STMIA r2!, rlist */ \
3803 thumb_block_memory(store, no_op, up, 2); \
3807 /* STMIA r3!, rlist */ \
3808 thumb_block_memory(store, no_op, up, 3); \
3812 /* STMIA r4!, rlist */ \
3813 thumb_block_memory(store, no_op, up, 4); \
3817 /* STMIA r5!, rlist */ \
3818 thumb_block_memory(store, no_op, up, 5); \
3822 /* STMIA r6!, rlist */ \
3823 thumb_block_memory(store, no_op, up, 6); \
3827 /* STMIA r7!, rlist */ \
3828 thumb_block_memory(store, no_op, up, 7); \
3832 /* LDMIA r0!, rlist */ \
3833 thumb_block_memory(load, no_op, up, 0); \
3837 /* LDMIA r1!, rlist */ \
3838 thumb_block_memory(load, no_op, up, 1); \
3842 /* LDMIA r2!, rlist */ \
3843 thumb_block_memory(load, no_op, up, 2); \
3847 /* LDMIA r3!, rlist */ \
3848 thumb_block_memory(load, no_op, up, 3); \
3852 /* LDMIA r4!, rlist */ \
3853 thumb_block_memory(load, no_op, up, 4); \
3857 /* LDMIA r5!, rlist */ \
3858 thumb_block_memory(load, no_op, up, 5); \
3862 /* LDMIA r6!, rlist */ \
3863 thumb_block_memory(load, no_op, up, 6); \
3867 /* LDMIA r7!, rlist */ \
3868 thumb_block_memory(load, no_op, up, 7); \
3873 thumb_conditional_branch(z_flag == 1); \
3878 thumb_conditional_branch(z_flag == 0); \
3883 thumb_conditional_branch(c_flag == 1); \
3888 thumb_conditional_branch(c_flag == 0); \
3893 thumb_conditional_branch(n_flag == 1); \
3898 thumb_conditional_branch(n_flag == 0); \
3903 thumb_conditional_branch(v_flag == 1); \
3908 thumb_conditional_branch(v_flag == 0); \
3913 thumb_conditional_branch(c_flag & (z_flag ^ 1)); \
3918 thumb_conditional_branch((c_flag == 0) | z_flag); \
3923 thumb_conditional_branch(n_flag == v_flag); \
3928 thumb_conditional_branch(n_flag != v_flag); \
3933 thumb_conditional_branch((z_flag == 0) & (n_flag == v_flag)); \
3938 thumb_conditional_branch(z_flag | (n_flag != v_flag)); \
3944 u32 swi_comment = opcode & 0xFF; \
3946 switch(swi_comment) \
3949 reg_mode[MODE_SUPERVISOR][6] = pc + 2; \
3950 spsr[MODE_SUPERVISOR] = reg[REG_CPSR]; \
3951 reg[REG_PC] = 0x00000008; \
3952 thumb_update_pc(); \
3953 reg[REG_CPSR] = (reg[REG_CPSR] & ~0x3F) | 0x13; \
3954 set_cpu_mode(MODE_SUPERVISOR); \
3961 case 0xE0 ... 0xE7: \
3964 thumb_decode_branch(); \
3965 thumb_pc_offset_update(((s32)(offset << 21) >> 20) + 4); \
3969 case 0xF0 ... 0xF7: \
3971 /* (low word) BL label */ \
3972 thumb_decode_branch(); \
3973 reg[REG_LR] = pc + 4 + ((s32)(offset << 21) >> 9); \
3974 thumb_pc_offset(2); \
3978 case 0xF8 ... 0xFF: \
3980 /* (high word) BL label */ \
3981 thumb_decode_branch(); \
3982 u32 lr = (pc + 2) | 0x01; \
3983 pc = reg[REG_LR] + (offset * 2); \
3990 void print_arm_registers()
3994 for(i = 0, i3 = 0; i < 4; i++)
3996 debug_screen_printf(" ");
3997 for(i2 = 0; i2 < 4; i2++, i3++)
3999 debug_screen_printf("R%02d %08x ", i3, reg[i3]);
4001 debug_screen_newline(1);
4005 void print_thumb_instruction()
4007 debug_screen_printf("Thumb instruction at PC: %04x",
4008 read_memory16(reg[REG_PC]));
4009 debug_screen_newline(1);
4012 void print_arm_instruction()
4014 debug_screen_printf("ARM instruction at PC: %08x",
4015 read_memory32(reg[REG_PC]));
4016 debug_screen_newline(1);
4021 u32 cpsr = reg[REG_CPSR];
4022 debug_screen_newline(1);
4023 debug_screen_printf(
4024 " N: %d Z: %d C: %d V: %d CPSR: %08x SPSR: %08x mode: %s",
4025 (cpsr >> 31) & 0x01, (cpsr >> 30) & 0x01, (cpsr >> 29) & 0x01,
4026 (cpsr >> 28) & 0x01, cpsr, spsr[reg[CPU_MODE]],
4027 cpu_mode_names[reg[CPU_MODE]]);
4028 debug_screen_newline(2);
4031 const u32 stack_print_lines = 2;
4037 debug_screen_printf("Stack:");
4038 debug_screen_newline(1);
4040 for(i = 0, i3 = reg[REG_SP]; i < stack_print_lines; i++)
4042 for(i2 = 0; i2 < 5; i2++, i3 += 4)
4044 debug_screen_printf(" %08x", read_memory32(i3));
4046 if(i != stack_print_lines)
4047 debug_screen_newline(1);
4050 debug_screen_newline(1);
4053 u32 instruction_count = 0;
4055 u32 output_field = 0;
4056 const u32 num_output_fields = 2;
4058 u32 last_instruction = 0;
4060 u32 in_interrupt = 0;
4064 current_debug_state = STEP;
4065 debug_screen_start();
4068 u32 debug_off(debug_state new_debug_state)
4070 current_debug_state = new_debug_state;
4074 u32 function_cc step_debug(u32 pc, u32 cycles)
4080 if(reg[REG_CPSR] & 0x20)
4083 instruction_count++;
4085 switch(current_debug_state)
4088 if(reg[REG_PC] == breakpoint_value)
4094 if(reg[REG_Z_FLAG] == 1)
4099 case VCOUNT_BREAKPOINT:
4100 if(io_registers[REG_VCOUNT] == breakpoint_value)
4105 case COUNTDOWN_BREAKPOINT:
4106 if(breakpoint_value == 0)
4113 case COUNTDOWN_BREAKPOINT_B:
4114 if(breakpoint_value == instruction_count)
4119 case COUNTDOWN_BREAKPOINT_C:
4124 if((breakpoint_value == 0) && (in_interrupt == 0))
4130 if(in_interrupt == 0)
4133 if(in_interrupt && (pc == 0x13c))
4140 if((current_debug_state == STEP) ||
4141 (current_debug_state == STEP_RUN))
4145 SDL_LockMutex(sound_mutex);
4148 if(output_field >= num_output_fields)
4151 debug_screen_clear();
4155 print_thumb_instruction(cycles);
4157 print_arm_instruction(cycles);
4159 print_arm_registers();
4164 printf("%x instructions in, VCOUNT %d, cycles remaining: %d \n",
4165 instruction_count, io_registers[REG_VCOUNT], cycles);
4167 debug_screen_update();
4170 if(current_debug_state != STEP_RUN)
4177 gui_action_type next_input = CURSOR_NONE;
4178 while(next_input == CURSOR_NONE)
4180 next_input = get_gui_input();
4207 dump_translation_cache();
4211 debug_off(Z_BREAKPOINT);
4216 printf("break at PC (hex): ");
4217 scanf("%08x", &breakpoint_value);
4218 debug_off(PC_BREAKPOINT);
4222 printf("break after N instructions (hex): ");
4223 scanf("%08x", &breakpoint_value);
4224 breakpoint_value -= 1;
4225 debug_off(COUNTDOWN_BREAKPOINT);
4229 printf("break after N instructions, skip in IRQ (hex): ");
4230 scanf("%08x", &breakpoint_value);
4231 breakpoint_value -= 1;
4232 debug_off(COUNTDOWN_BREAKPOINT_C);
4236 printf("break after N instructions (since start): ");
4237 scanf("%d", &breakpoint_value);
4238 debug_off(COUNTDOWN_BREAKPOINT_B);
4242 printf("break at VCOUNT: ");
4243 scanf("%d", &breakpoint_value);
4244 debug_off(VCOUNT_BREAKPOINT);
4249 current_debug_state = STEP_RUN;
4257 debug_off(PC_BREAKPOINT);
4261 global_cycles_per_instruction = 0;
4267 u8 current_savestate_filename[512];
4268 u16 *current_screen = copy_screen();
4269 get_savestate_filename_noshot(savestate_slot,
4270 current_savestate_filename);
4271 save_state(current_savestate_filename, current_screen);
4272 free(current_screen);
4281 SDL_UnlockMutex(sound_mutex);
4284 last_instruction = reg[REG_PC];
4287 reg[REG_PC] = pc + 2;
4289 reg[REG_PC] = pc + 4;
4294 void set_cpu_mode(cpu_mode_type new_mode)
4297 cpu_mode_type cpu_mode = reg[CPU_MODE];
4299 if(cpu_mode != new_mode)
4301 if(new_mode == MODE_FIQ)
4303 for(i = 8; i < 15; i++)
4305 reg_mode[cpu_mode][i - 8] = reg[i];
4310 reg_mode[cpu_mode][5] = reg[REG_SP];
4311 reg_mode[cpu_mode][6] = reg[REG_LR];
4314 if(cpu_mode == MODE_FIQ)
4316 for(i = 8; i < 15; i++)
4318 reg[i] = reg_mode[new_mode][i - 8];
4323 reg[REG_SP] = reg_mode[new_mode][5];
4324 reg[REG_LR] = reg_mode[new_mode][6];
4327 reg[CPU_MODE] = new_mode;
4331 void raise_interrupt(irq_type irq_raised)
4333 // The specific IRQ must be enabled in IE, master IRQ enable must be on,
4334 // and it must be on in the flags.
4335 io_registers[REG_IF] |= irq_raised;
4337 if((io_registers[REG_IE] & irq_raised) && io_registers[REG_IME] &&
4338 ((reg[REG_CPSR] & 0x80) == 0))
4340 bios_read_protect = 0xe55ec002;
4342 // Interrupt handler in BIOS
4343 reg_mode[MODE_IRQ][6] = reg[REG_PC] + 4;
4344 spsr[MODE_IRQ] = reg[REG_CPSR];
4345 reg[REG_CPSR] = 0xD2;
4346 reg[REG_PC] = 0x00000018;
4348 bios_region_read_allow();
4350 set_cpu_mode(MODE_IRQ);
4351 reg[CPU_HALT_STATE] = CPU_ACTIVE;
4352 reg[CHANGED_PC_STATUS] = 1;
4356 u32 execute_arm(u32 cycles)
4358 u32 pc = reg[REG_PC];
4361 u32 n_flag, z_flag, c_flag, v_flag;
4362 u32 pc_region = (pc >> 15);
4363 u8 *pc_address_block = memory_map_read[pc_region];
4365 s32 cycles_remaining;
4366 u32 cycles_per_instruction = global_cycles_per_instruction;
4367 cpu_alert_type cpu_alert;
4371 if(pc_address_block == NULL)
4372 pc_address_block = load_gamepak_page(pc_region & 0x3FF);
4376 cycles_remaining = cycles;
4380 if(reg[REG_CPSR] & 0x20)
4388 step_debug(pc, cycles_remaining);
4389 cycles_per_instruction = global_cycles_per_instruction;
4392 execute_arm_instruction();
4393 cycles_remaining -= cycles_per_instruction;
4394 } while(cycles_remaining > 0);
4397 cycles = update_gba();
4405 step_debug(pc, cycles_remaining);
4408 execute_thumb_instruction();
4409 cycles_remaining -= cycles_per_instruction;
4410 } while(cycles_remaining > 0);
4413 cycles = update_gba();
4418 if(cpu_alert == CPU_ALERT_IRQ)
4420 cycles = cycles_remaining;
4426 while(reg[CPU_HALT_STATE] != CPU_ACTIVE)
4428 cycles = update_gba();
4438 for(i = 0; i < 16; i++)
4443 reg[REG_SP] = 0x03007F00;
4444 reg[REG_PC] = 0x08000000;
4445 reg[REG_CPSR] = 0x0000001F;
4446 reg[CPU_HALT_STATE] = CPU_ACTIVE;
4447 reg[CPU_MODE] = MODE_USER;
4448 reg[CHANGED_PC_STATUS] = 0;
4450 reg_mode[MODE_USER][5] = 0x03007F00;
4451 reg_mode[MODE_IRQ][5] = 0x03007FA0;
4452 reg_mode[MODE_FIQ][5] = 0x03007FA0;
4453 reg_mode[MODE_SUPERVISOR][5] = 0x03007FE0;
4456 void move_reg(u32 *new_reg)
4460 for(i = 0; i < 32; i++)
4462 new_reg[i] = reg[i];
4469 #define cpu_savestate_builder(type) \
4470 void cpu_##type##_savestate(file_tag_type savestate_file) \
4472 file_##type(savestate_file, reg, 0x100); \
4473 file_##type##_array(savestate_file, spsr); \
4474 file_##type##_array(savestate_file, reg_mode); \
4477 cpu_savestate_builder(read);
4478 cpu_savestate_builder(write_mem);