3 * Copyright (C) 2006 Exophase <exophase@gmail.com>
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2 of
8 * the License, or (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 // - stm reglist writeback when base is in the list needs adjustment
22 // - block memory needs psr swapping and user mode reg swapping
26 u32 memory_region_access_read_u8[16];
27 u32 memory_region_access_read_s8[16];
28 u32 memory_region_access_read_u16[16];
29 u32 memory_region_access_read_s16[16];
30 u32 memory_region_access_read_u32[16];
31 u32 memory_region_access_write_u8[16];
32 u32 memory_region_access_write_u16[16];
33 u32 memory_region_access_write_u32[16];
40 u32 memory_writes_u16;
41 u32 memory_writes_u32;
43 const u8 bit_count[256] =
45 0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4, 1, 2, 2, 3, 2, 3, 3,
46 4, 2, 3, 3, 4, 3, 4, 4, 5, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4,
47 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 1, 2, 2, 3, 2,
48 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5,
49 4, 5, 5, 6, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4,
50 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3,
51 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 2,
52 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4, 5, 4, 5, 5, 6,
53 4, 5, 5, 6, 5, 6, 6, 7, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5,
54 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 3, 4, 4, 5, 4, 5,
55 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6,
60 #ifdef REGISTER_USAGE_ANALYZE
62 u64 instructions_total = 0;
65 u64 arm_reg_access_total = 0;
66 u64 arm_instructions_total = 0;
68 u64 thumb_reg_freq[16];
69 u64 thumb_reg_access_total = 0;
70 u64 thumb_instructions_total = 0;
72 // mla/long mla's addition operand are not counted yet.
74 #define using_register(instruction_set, register, type) \
75 instruction_set##_reg_freq[register]++; \
76 instruction_set##_reg_access_total++ \
78 #define using_register_list(instruction_set, rlist, count) \
81 for(i = 0; i < count; i++) \
83 if((reg_list >> i) & 0x01) \
85 using_register(instruction_set, i, memory_target); \
90 #define using_instruction(instruction_set) \
91 instruction_set##_instructions_total++; \
92 instructions_total++ \
94 int sort_tagged_element(const void *_a, const void *_b)
99 return (int)(b[1] - a[1]);
102 void print_register_usage()
105 u64 arm_reg_freq_tagged[32];
106 u64 thumb_reg_freq_tagged[32];
108 double percent_total = 0.0;
110 for(i = 0; i < 16; i++)
112 arm_reg_freq_tagged[i * 2] = i;
113 arm_reg_freq_tagged[(i * 2) + 1] = arm_reg_freq[i];
114 thumb_reg_freq_tagged[i * 2] = i;
115 thumb_reg_freq_tagged[(i * 2) + 1] = thumb_reg_freq[i];
118 qsort(arm_reg_freq_tagged, 16, sizeof(u64) * 2, sort_tagged_element);
119 qsort(thumb_reg_freq_tagged, 16, sizeof(u64) * 2, sort_tagged_element);
121 printf("ARM register usage (%lf%% ARM instructions):\n",
122 (arm_instructions_total * 100.0) / instructions_total);
123 for(i = 0; i < 16; i++)
125 percent = (arm_reg_freq_tagged[(i * 2) + 1] * 100.0) /
126 arm_reg_access_total;
127 percent_total += percent;
128 printf("r%02d: %lf%% (-- %lf%%)\n",
129 (u32)arm_reg_freq_tagged[(i * 2)], percent, percent_total);
134 printf("\nThumb register usage (%lf%% Thumb instructions):\n",
135 (thumb_instructions_total * 100.0) / instructions_total);
136 for(i = 0; i < 16; i++)
138 percent = (thumb_reg_freq_tagged[(i * 2) + 1] * 100.0) /
139 thumb_reg_access_total;
140 percent_total += percent;
141 printf("r%02d: %lf%% (-- %lf%%)\n",
142 (u32)thumb_reg_freq_tagged[(i * 2)], percent, percent_total);
145 memset(arm_reg_freq, 0, sizeof(u64) * 16);
146 memset(thumb_reg_freq, 0, sizeof(u64) * 16);
147 arm_reg_access_total = 0;
148 thumb_reg_access_total = 0;
153 #define using_register(instruction_set, register, type) \
155 #define using_register_list(instruction_set, rlist, count) \
157 #define using_instruction(instruction_set) \
162 #define arm_decode_data_proc_reg() \
163 u32 rn = (opcode >> 16) & 0x0F; \
164 u32 rd = (opcode >> 12) & 0x0F; \
165 u32 rm = opcode & 0x0F; \
166 using_register(arm, rd, op_dest); \
167 using_register(arm, rn, op_src); \
168 using_register(arm, rm, op_src) \
170 #define arm_decode_data_proc_imm() \
171 u32 rn = (opcode >> 16) & 0x0F; \
172 u32 rd = (opcode >> 12) & 0x0F; \
174 ror(imm, opcode & 0xFF, ((opcode >> 8) & 0x0F) * 2); \
175 using_register(arm, rd, op_dest); \
176 using_register(arm, rn, op_src) \
178 #define arm_decode_psr_reg() \
179 u32 psr_field = (opcode >> 16) & 0x0F; \
180 u32 rd = (opcode >> 12) & 0x0F; \
181 u32 rm = opcode & 0x0F; \
182 using_register(arm, rd, op_dest); \
183 using_register(arm, rm, op_src) \
185 #define arm_decode_psr_imm() \
186 u32 psr_field = (opcode >> 16) & 0x0F; \
187 u32 rd = (opcode >> 12) & 0x0F; \
189 ror(imm, opcode & 0xFF, ((opcode >> 8) & 0x0F) * 2); \
190 using_register(arm, rd, op_dest) \
192 #define arm_decode_branchx() \
193 u32 rn = opcode & 0x0F; \
194 using_register(arm, rn, branch_target) \
196 #define arm_decode_multiply() \
197 u32 rd = (opcode >> 16) & 0x0F; \
198 u32 rn = (opcode >> 12) & 0x0F; \
199 u32 rs = (opcode >> 8) & 0x0F; \
200 u32 rm = opcode & 0x0F; \
201 using_register(arm, rd, op_dest); \
202 using_register(arm, rn, op_src); \
203 using_register(arm, rm, op_src) \
205 #define arm_decode_multiply_long() \
206 u32 rdhi = (opcode >> 16) & 0x0F; \
207 u32 rdlo = (opcode >> 12) & 0x0F; \
208 u32 rn = (opcode >> 8) & 0x0F; \
209 u32 rm = opcode & 0x0F; \
210 using_register(arm, rdhi, op_dest); \
211 using_register(arm, rdlo, op_dest); \
212 using_register(arm, rn, op_src); \
213 using_register(arm, rm, op_src) \
215 #define arm_decode_swap() \
216 u32 rn = (opcode >> 16) & 0x0F; \
217 u32 rd = (opcode >> 12) & 0x0F; \
218 u32 rm = opcode & 0x0F; \
219 using_register(arm, rd, memory_target); \
220 using_register(arm, rn, memory_base); \
221 using_register(arm, rm, memory_target) \
223 #define arm_decode_half_trans_r() \
224 u32 rn = (opcode >> 16) & 0x0F; \
225 u32 rd = (opcode >> 12) & 0x0F; \
226 u32 rm = opcode & 0x0F; \
227 using_register(arm, rd, memory_target); \
228 using_register(arm, rn, memory_base); \
229 using_register(arm, rm, memory_offset) \
231 #define arm_decode_half_trans_of() \
232 u32 rn = (opcode >> 16) & 0x0F; \
233 u32 rd = (opcode >> 12) & 0x0F; \
234 u32 offset = ((opcode >> 4) & 0xF0) | (opcode & 0x0F); \
235 using_register(arm, rd, memory_target); \
236 using_register(arm, rn, memory_base) \
238 #define arm_decode_data_trans_imm() \
239 u32 rn = (opcode >> 16) & 0x0F; \
240 u32 rd = (opcode >> 12) & 0x0F; \
241 u32 offset = opcode & 0x0FFF; \
242 using_register(arm, rd, memory_target); \
243 using_register(arm, rn, memory_base) \
245 #define arm_decode_data_trans_reg() \
246 u32 rn = (opcode >> 16) & 0x0F; \
247 u32 rd = (opcode >> 12) & 0x0F; \
248 u32 rm = opcode & 0x0F; \
249 using_register(arm, rd, memory_target); \
250 using_register(arm, rn, memory_base); \
251 using_register(arm, rm, memory_offset) \
253 #define arm_decode_block_trans() \
254 u32 rn = (opcode >> 16) & 0x0F; \
255 u32 reg_list = opcode & 0xFFFF; \
256 using_register(arm, rn, memory_base); \
257 using_register_list(arm, reg_list, 16) \
259 #define arm_decode_branch() \
260 s32 offset = ((s32)(opcode & 0xFFFFFF) << 8) >> 6 \
263 #define thumb_decode_shift() \
264 u32 imm = (opcode >> 6) & 0x1F; \
265 u32 rs = (opcode >> 3) & 0x07; \
266 u32 rd = opcode & 0x07; \
267 using_register(thumb, rd, op_dest); \
268 using_register(thumb, rs, op_shift) \
270 #define thumb_decode_add_sub() \
271 u32 rn = (opcode >> 6) & 0x07; \
272 u32 rs = (opcode >> 3) & 0x07; \
273 u32 rd = opcode & 0x07; \
274 using_register(thumb, rd, op_dest); \
275 using_register(thumb, rn, op_src); \
276 using_register(thumb, rn, op_src) \
278 #define thumb_decode_add_sub_imm() \
279 u32 imm = (opcode >> 6) & 0x07; \
280 u32 rs = (opcode >> 3) & 0x07; \
281 u32 rd = opcode & 0x07; \
282 using_register(thumb, rd, op_src_dest); \
283 using_register(thumb, rs, op_src) \
285 #define thumb_decode_imm() \
286 u32 imm = opcode & 0xFF; \
287 using_register(thumb, ((opcode >> 8) & 0x07), op_dest) \
289 #define thumb_decode_alu_op() \
290 u32 rs = (opcode >> 3) & 0x07; \
291 u32 rd = opcode & 0x07; \
292 using_register(thumb, rd, op_src_dest); \
293 using_register(thumb, rs, op_src) \
295 #define thumb_decode_hireg_op() \
296 u32 rs = (opcode >> 3) & 0x0F; \
297 u32 rd = ((opcode >> 4) & 0x08) | (opcode & 0x07); \
298 using_register(thumb, rd, op_src_dest); \
299 using_register(thumb, rs, op_src) \
302 #define thumb_decode_mem_reg() \
303 u32 ro = (opcode >> 6) & 0x07; \
304 u32 rb = (opcode >> 3) & 0x07; \
305 u32 rd = opcode & 0x07; \
306 using_register(thumb, rd, memory_target); \
307 using_register(thumb, rb, memory_base); \
308 using_register(thumb, ro, memory_offset) \
311 #define thumb_decode_mem_imm() \
312 u32 imm = (opcode >> 6) & 0x1F; \
313 u32 rb = (opcode >> 3) & 0x07; \
314 u32 rd = opcode & 0x07; \
315 using_register(thumb, rd, memory_target); \
316 using_register(thumb, rb, memory_base) \
319 #define thumb_decode_add_sp() \
320 u32 imm = opcode & 0x7F; \
321 using_register(thumb, REG_SP, op_dest) \
323 #define thumb_decode_rlist() \
324 u32 reg_list = opcode & 0xFF; \
325 using_register_list(thumb, rlist, 8) \
327 #define thumb_decode_branch_cond() \
328 s32 offset = (s8)(opcode & 0xFF) \
330 #define thumb_decode_swi() \
331 u32 comment = opcode & 0xFF \
333 #define thumb_decode_branch() \
334 u32 offset = opcode & 0x07FF \
337 #define get_shift_register(dest) \
338 u32 shift = reg[(opcode >> 8) & 0x0F]; \
339 using_register(arm, ((opcode >> 8) & 0x0F), op_shift); \
345 #define calculate_z_flag(dest) \
346 z_flag = (dest == 0) \
348 #define calculate_n_flag(dest) \
349 n_flag = ((signed)dest < 0) \
351 #define calculate_c_flag_sub(dest, src_a, src_b) \
352 c_flag = ((unsigned)src_b <= (unsigned)src_a) \
354 #define calculate_v_flag_sub(dest, src_a, src_b) \
355 v_flag = ((signed)src_b > (signed)src_a) != ((signed)dest < 0) \
357 #define calculate_c_flag_add(dest, src_a, src_b) \
358 c_flag = ((unsigned)dest < (unsigned)src_a) \
360 #define calculate_v_flag_add(dest, src_a, src_b) \
361 v_flag = ((signed)dest < (signed)src_a) != ((signed)src_b < 0) \
364 #define calculate_reg_sh() \
366 switch((opcode >> 4) & 0x07) \
371 reg_sh = reg[rm] << ((opcode >> 7) & 0x1F); \
378 get_shift_register(reg_sh); \
380 reg_sh = reg_sh << shift; \
389 u32 imm = (opcode >> 7) & 0x1F; \
393 reg_sh = reg[rm] >> imm; \
400 get_shift_register(reg_sh); \
402 reg_sh = reg_sh >> shift; \
411 u32 imm = (opcode >> 7) & 0x1F; \
415 reg_sh = (s32)reg_sh >> 31; \
417 reg_sh = (s32)reg_sh >> imm; \
424 get_shift_register(reg_sh); \
426 reg_sh = (s32)reg_sh >> shift; \
428 reg_sh = (s32)reg_sh >> 31; \
435 u32 imm = (opcode >> 7) & 0x1F; \
438 reg_sh = (reg[rm] >> 1) | (c_flag << 31); \
440 ror(reg_sh, reg[rm], imm); \
447 get_shift_register(reg_sh); \
448 ror(reg_sh, reg_sh, shift); \
453 #define calculate_reg_sh_flags() \
455 switch((opcode >> 4) & 0x07) \
460 u32 imm = (opcode >> 7) & 0x1F; \
465 c_flag = (reg_sh >> (32 - imm)) & 0x01; \
475 get_shift_register(reg_sh); \
481 c_flag = reg_sh & 0x01; \
488 c_flag = (reg_sh >> (32 - shift)) & 0x01; \
498 u32 imm = (opcode >> 7) & 0x1F; \
502 c_flag = reg_sh >> 31; \
507 c_flag = (reg_sh >> (imm - 1)) & 0x01; \
516 get_shift_register(reg_sh); \
522 c_flag = (reg_sh >> 31) & 0x01; \
529 c_flag = (reg_sh >> (shift - 1)) & 0x01; \
539 u32 imm = (opcode >> 7) & 0x1F; \
543 reg_sh = (s32)reg_sh >> 31; \
544 c_flag = reg_sh & 0x01; \
548 c_flag = (reg_sh >> (imm - 1)) & 0x01; \
549 reg_sh = (s32)reg_sh >> imm; \
557 get_shift_register(reg_sh); \
562 reg_sh = (s32)reg_sh >> 31; \
563 c_flag = reg_sh & 0x01; \
567 c_flag = (reg_sh >> (shift - 1)) & 0x01; \
568 reg_sh = (s32)reg_sh >> shift; \
577 u32 imm = (opcode >> 7) & 0x1F; \
581 u32 old_c_flag = c_flag; \
582 c_flag = reg_sh & 0x01; \
583 reg_sh = (reg_sh >> 1) | (old_c_flag << 31); \
587 c_flag = (reg_sh >> (imm - 1)) & 0x01; \
588 ror(reg_sh, reg_sh, imm); \
596 get_shift_register(reg_sh); \
599 c_flag = (reg_sh >> (shift - 1)) & 0x01; \
600 ror(reg_sh, reg_sh, shift); \
606 #define calculate_reg_offset() \
608 switch((opcode >> 5) & 0x03) \
613 reg_offset = reg[rm] << ((opcode >> 7) & 0x1F); \
620 u32 imm = (opcode >> 7) & 0x1F; \
624 reg_offset = reg[rm] >> imm; \
631 u32 imm = (opcode >> 7) & 0x1F; \
633 reg_offset = (s32)reg[rm] >> 31; \
635 reg_offset = (s32)reg[rm] >> imm; \
642 u32 imm = (opcode >> 7) & 0x1F; \
644 reg_offset = (reg[rm] >> 1) | (c_flag << 31); \
646 ror(reg_offset, reg[rm], imm); \
651 #define calculate_flags_add(dest, src_a, src_b) \
652 calculate_z_flag(dest); \
653 calculate_n_flag(dest); \
654 calculate_c_flag_add(dest, src_a, src_b); \
655 calculate_v_flag_add(dest, src_a, src_b) \
657 #define calculate_flags_sub(dest, src_a, src_b) \
658 calculate_z_flag(dest); \
659 calculate_n_flag(dest); \
660 calculate_c_flag_sub(dest, src_a, src_b); \
661 calculate_v_flag_sub(dest, src_a, src_b) \
663 #define calculate_flags_logic(dest) \
664 calculate_z_flag(dest); \
665 calculate_n_flag(dest) \
667 #define extract_flags() \
668 n_flag = reg[REG_CPSR] >> 31; \
669 z_flag = (reg[REG_CPSR] >> 30) & 0x01; \
670 c_flag = (reg[REG_CPSR] >> 29) & 0x01; \
671 v_flag = (reg[REG_CPSR] >> 28) & 0x01; \
673 #define collapse_flags() \
674 reg[REG_CPSR] = (n_flag << 31) | (z_flag << 30) | (c_flag << 29) | \
675 (v_flag << 28) | (reg[REG_CPSR] & 0xFF) \
677 #define memory_region(r_dest, l_dest, address) \
678 r_dest = memory_regions[address >> 24]; \
679 l_dest = memory_limits[address >> 24] \
682 #define pc_region() \
683 memory_region(pc_region, pc_limit, pc) \
685 #define check_pc_region() \
686 new_pc_region = (pc >> 15); \
687 if(new_pc_region != pc_region) \
689 pc_region = new_pc_region; \
690 pc_address_block = memory_map_read[new_pc_region]; \
692 if(pc_address_block == NULL) \
693 pc_address_block = load_gamepak_page(pc_region & 0x3FF); \
696 u32 branch_targets = 0;
697 u32 high_frequency_branch_targets = 0;
699 #define BRANCH_ACTIVITY_THRESHOLD 50
701 #define arm_update_pc() \
704 #define arm_pc_offset(val) \
708 #define arm_pc_offset_update(val) \
712 #define arm_pc_offset_update_direct(val) \
717 // It should be okay to still generate result flags, spsr will overwrite them.
718 // This is pretty infrequent (returning from interrupt handlers, et al) so
719 // probably not worth optimizing for.
721 #define check_for_interrupts() \
722 if((io_registers[REG_IE] & io_registers[REG_IF]) && \
723 io_registers[REG_IME] && ((reg[REG_CPSR] & 0x80) == 0)) \
725 reg_mode[MODE_IRQ][6] = reg[REG_PC] + 4; \
726 spsr[MODE_IRQ] = reg[REG_CPSR]; \
727 reg[REG_CPSR] = 0xD2; \
728 reg[REG_PC] = 0x00000018; \
730 set_cpu_mode(MODE_IRQ); \
734 #define arm_spsr_restore() \
737 if(reg[CPU_MODE] != MODE_USER) \
739 reg[REG_CPSR] = spsr[reg[CPU_MODE]]; \
741 set_cpu_mode(cpu_modes[reg[REG_CPSR] & 0x1F]); \
742 check_for_interrupts(); \
746 if(reg[REG_CPSR] & 0x20) \
750 #define arm_data_proc_flags_reg() \
751 arm_decode_data_proc_reg(); \
752 calculate_reg_sh_flags() \
754 #define arm_data_proc_reg() \
755 arm_decode_data_proc_reg(); \
758 #define arm_data_proc_flags_imm() \
759 arm_decode_data_proc_imm() \
761 #define arm_data_proc_imm() \
762 arm_decode_data_proc_imm() \
764 #define arm_data_proc(expr, type) \
768 arm_data_proc_##type(); \
779 #define flags_vars(src_a, src_b) \
781 const u32 _sa = src_a; \
782 const u32 _sb = src_b \
784 #define arm_data_proc_logic_flags(expr, type) \
787 arm_data_proc_flags_##type(); \
789 calculate_flags_logic(dest); \
792 arm_spsr_restore(); \
795 #define arm_data_proc_add_flags(src_a, src_b, type) \
798 arm_data_proc_##type(); \
799 flags_vars(src_a, src_b); \
801 calculate_flags_add(dest, _sa, _sb); \
804 arm_spsr_restore(); \
807 #define arm_data_proc_sub_flags(src_a, src_b, type) \
810 arm_data_proc_##type(); \
811 flags_vars(src_a, src_b); \
813 calculate_flags_sub(dest, _sa, _sb); \
816 arm_spsr_restore(); \
819 #define arm_data_proc_test_logic(expr, type) \
822 arm_data_proc_flags_##type(); \
824 calculate_flags_logic(dest); \
828 #define arm_data_proc_test_add(src_a, src_b, type) \
831 arm_data_proc_##type(); \
832 flags_vars(src_a, src_b); \
834 calculate_flags_add(dest, _sa, _sb); \
838 #define arm_data_proc_test_sub(src_a, src_b, type) \
841 arm_data_proc_##type(); \
842 flags_vars(src_a, src_b); \
844 calculate_flags_sub(dest, _sa, _sb); \
848 #define arm_multiply_flags_yes(_dest) \
849 calculate_z_flag(_dest); \
850 calculate_n_flag(_dest); \
852 #define arm_multiply_flags_no(_dest) \
854 #define arm_multiply_long_flags_yes(_dest_lo, _dest_hi) \
855 z_flag = (_dest_lo == 0) & (_dest_hi == 0); \
856 calculate_n_flag(_dest_hi) \
858 #define arm_multiply_long_flags_no(_dest_lo, _dest_hi) \
860 #define arm_multiply(add_op, flags) \
863 arm_decode_multiply(); \
864 dest = (reg[rm] * reg[rs]) add_op; \
865 arm_multiply_flags_##flags(dest); \
870 #define arm_multiply_long_addop(type) \
871 + ((type##64)((((type##64)reg[rdhi]) << 32) | reg[rdlo])); \
873 #define arm_multiply_long(add_op, flags, type) \
878 arm_decode_multiply_long(); \
879 dest = ((type##64)((type##32)reg[rm]) * \
880 (type##64)((type##32)reg[rn])) add_op; \
881 dest_lo = (u32)dest; \
882 dest_hi = (u32)(dest >> 32); \
883 arm_multiply_long_flags_##flags(dest_lo, dest_hi); \
884 reg[rdlo] = dest_lo; \
885 reg[rdhi] = dest_hi; \
889 const u32 psr_masks[16] =
891 0x00000000, 0x000000FF, 0x0000FF00, 0x0000FFFF, 0x00FF0000,
892 0x00FF00FF, 0x00FFFF00, 0x00FFFFFF, 0xFF000000, 0xFF0000FF,
893 0xFF00FF00, 0xFF00FFFF, 0xFFFF0000, 0xFFFF00FF, 0xFFFFFF00,
897 #define arm_psr_read(dummy, psr_reg) \
901 #define arm_psr_store_cpsr(source) \
902 reg[REG_CPSR] = (source & store_mask) | (reg[REG_CPSR] & (~store_mask)); \
904 if(store_mask & 0xFF) \
906 set_cpu_mode(cpu_modes[reg[REG_CPSR] & 0x1F]); \
907 check_for_interrupts(); \
910 #define arm_psr_store_spsr(source) \
911 u32 _psr = spsr[reg[CPU_MODE]]; \
912 spsr[reg[CPU_MODE]] = (source & store_mask) | (_psr & (~store_mask)) \
914 #define arm_psr_store(source, psr_reg) \
915 const u32 store_mask = psr_masks[psr_field]; \
916 arm_psr_store_##psr_reg(source) \
918 #define arm_psr_src_reg reg[rm]
920 #define arm_psr_src_imm imm
922 #define arm_psr(op_type, transfer_type, psr_reg) \
924 arm_decode_psr_##op_type(); \
926 arm_psr_##transfer_type(arm_psr_src_##op_type, psr_reg); \
929 #define arm_data_trans_reg() \
930 arm_decode_data_trans_reg(); \
931 calculate_reg_offset() \
933 #define arm_data_trans_imm() \
934 arm_decode_data_trans_imm() \
936 #define arm_data_trans_half_reg() \
937 arm_decode_half_trans_r() \
939 #define arm_data_trans_half_imm() \
940 arm_decode_half_trans_of() \
942 #define aligned_address_mask8 0xF0000000
943 #define aligned_address_mask16 0xF0000001
944 #define aligned_address_mask32 0xF0000003
946 #define fast_read_memory(size, type, address, dest) \
949 u32 _address = address; \
951 if(_address < 0x10000000) \
953 memory_region_access_read_##type[_address >> 24]++; \
954 memory_reads_##type++; \
956 if(((_address >> 24) == 0) && (pc >= 0x4000)) \
958 dest = *((type *)((u8 *)&bios_read_protect + (_address & 0x03))); \
962 if(((_address & aligned_address_mask##size) == 0) && \
963 (map = memory_map_read[address >> 15])) \
965 dest = *((type *)((u8 *)map + (_address & 0x7FFF))); \
969 dest = (type)read_memory##size(_address); \
973 #define fast_read_memory_s16(address, dest) \
976 u32 _address = address; \
977 if(_address < 0x10000000) \
979 memory_region_access_read_s16[_address >> 24]++; \
980 memory_reads_s16++; \
982 if(((_address & aligned_address_mask16) == 0) && \
983 (map = memory_map_read[_address >> 15])) \
985 dest = *((s16 *)((u8 *)map + (_address & 0x7FFF))); \
989 dest = (s16)read_memory16_signed(_address); \
994 #define fast_write_memory(size, type, address, value) \
997 u32 _address = (address) & ~(aligned_address_mask##size & 0x03); \
998 if(_address < 0x10000000) \
1000 memory_region_access_write_##type[_address >> 24]++; \
1001 memory_writes_##type++; \
1004 if(((_address & aligned_address_mask##size) == 0) && \
1005 (map = memory_map_write[_address >> 15])) \
1007 *((type *)((u8 *)map + (_address & 0x7FFF))) = value; \
1011 cpu_alert = write_memory##size(_address, value); \
1017 #define load_aligned32(address, dest) \
1019 u8 *map = memory_map_read[address >> 15]; \
1020 if(address < 0x10000000) \
1022 memory_region_access_read_u32[address >> 24]++; \
1023 memory_reads_u32++; \
1027 dest = address32(map, address & 0x7FFF); \
1031 dest = read_memory32(address); \
1035 #define store_aligned32(address, value) \
1037 u8 *map = memory_map_write[address >> 15]; \
1038 if(address < 0x10000000) \
1040 memory_region_access_write_u32[address >> 24]++; \
1041 memory_writes_u32++; \
1045 address32(map, address & 0x7FFF) = value; \
1049 cpu_alert = write_memory32(address, value); \
1055 #define load_memory_u8(address, dest) \
1056 fast_read_memory(8, u8, address, dest) \
1058 #define load_memory_u16(address, dest) \
1059 fast_read_memory(16, u16, address, dest) \
1061 #define load_memory_u32(address, dest) \
1062 fast_read_memory(32, u32, address, dest) \
1064 #define load_memory_s8(address, dest) \
1065 fast_read_memory(8, s8, address, dest) \
1067 #define load_memory_s16(address, dest) \
1068 fast_read_memory_s16(address, dest) \
1070 #define store_memory_u8(address, value) \
1071 fast_write_memory(8, u8, address, value) \
1073 #define store_memory_u16(address, value) \
1074 fast_write_memory(16, u16, address, value) \
1076 #define store_memory_u32(address, value) \
1077 fast_write_memory(32, u32, address, value) \
1081 #define arm_access_memory_writeback_yes(off_op) \
1082 reg[rn] = address off_op \
1084 #define arm_access_memory_writeback_no(off_op) \
1086 #define arm_access_memory_pc_preadjust_load() \
1088 #define arm_access_memory_pc_preadjust_store() \
1089 u32 reg_op = reg[rd]; \
1093 #define arm_access_memory_pc_postadjust_load() \
1096 #define arm_access_memory_pc_postadjust_store() \
1098 #define load_reg_op reg[rd] \
1100 #define store_reg_op reg_op \
1102 #define arm_access_memory(access_type, off_op, off_type, mem_type, \
1106 arm_data_trans_##off_type(); \
1107 u32 address = reg[rn] off_op; \
1108 arm_access_memory_pc_preadjust_##access_type(); \
1110 arm_pc_offset(-4); \
1111 arm_access_memory_writeback_##wb(wb_off_op); \
1112 access_type##_memory_##mem_type(address, access_type##_reg_op); \
1113 arm_access_memory_pc_postadjust_##access_type(); \
1116 #define word_bit_count(word) \
1117 (bit_count[word >> 8] + bit_count[word & 0xFF]) \
1119 #define sprint_no(access_type, offset_type, writeback_type) \
1121 #define sprint_yes(access_type, offset_type, writeback_type) \
1122 printf("sbit on %s %s %s\n", #access_type, #offset_type, #writeback_type) \
1124 #define arm_block_writeback_load() \
1125 if(!((reg_list >> rn) & 0x01)) \
1127 reg[rn] = address; \
1130 #define arm_block_writeback_store() \
1133 #define arm_block_writeback_yes(access_type) \
1134 arm_block_writeback_##access_type() \
1136 #define arm_block_writeback_no(access_type) \
1138 #define load_block_memory(address, dest) \
1139 dest = address32(address_region, (address + offset) & 0x7FFF) \
1141 #define store_block_memory(address, dest) \
1142 address32(address_region, (address + offset) & 0x7FFF) = dest \
1144 #define arm_block_memory_offset_down_a() \
1145 (base - (word_bit_count(reg_list) * 4) + 4) \
1147 #define arm_block_memory_offset_down_b() \
1148 (base - (word_bit_count(reg_list) * 4)) \
1150 #define arm_block_memory_offset_no() \
1153 #define arm_block_memory_offset_up() \
1156 #define arm_block_memory_writeback_down() \
1157 reg[rn] = base - (word_bit_count(reg_list) * 4) \
1159 #define arm_block_memory_writeback_up() \
1160 reg[rn] = base + (word_bit_count(reg_list) * 4) \
1162 #define arm_block_memory_writeback_no() \
1164 #define arm_block_memory_load_pc() \
1165 load_aligned32(address, pc); \
1168 #define arm_block_memory_store_pc() \
1169 store_aligned32(address, pc + 4) \
1171 #define arm_block_memory(access_type, offset_type, writeback_type, s_bit) \
1173 arm_decode_block_trans(); \
1174 u32 base = reg[rn]; \
1175 u32 address = arm_block_memory_offset_##offset_type() & 0xFFFFFFFC; \
1178 arm_block_memory_writeback_##writeback_type(); \
1180 for(i = 0; i < 15; i++) \
1182 if((reg_list >> i) & 0x01) \
1184 access_type##_aligned32(address, reg[i]); \
1190 if(reg_list & 0x8000) \
1192 arm_block_memory_##access_type##_pc(); \
1196 #define arm_swap(type) \
1198 arm_decode_swap(); \
1200 load_memory_##type(reg[rn], temp); \
1201 store_memory_##type(reg[rn], reg[rm]); \
1206 #define arm_next_instruction() \
1209 goto skip_instruction; \
1212 #define thumb_update_pc() \
1215 #define thumb_pc_offset(val) \
1219 #define thumb_pc_offset_update(val) \
1223 #define thumb_pc_offset_update_direct(val) \
1227 // Types: add_sub, add_sub_imm, alu_op, imm
1228 // Affects N/Z/C/V flags
1230 #define thumb_add(type, dest_reg, src_a, src_b) \
1232 thumb_decode_##type(); \
1233 const u32 _sa = src_a; \
1234 const u32 _sb = src_b; \
1235 u32 dest = _sa + _sb; \
1236 calculate_flags_add(dest, src_a, src_b); \
1237 reg[dest_reg] = dest; \
1238 thumb_pc_offset(2); \
1241 #define thumb_add_noflags(type, dest_reg, src_a, src_b) \
1243 thumb_decode_##type(); \
1244 u32 dest = src_a + src_b; \
1245 reg[dest_reg] = dest; \
1246 thumb_pc_offset(2); \
1249 #define thumb_sub(type, dest_reg, src_a, src_b) \
1251 thumb_decode_##type(); \
1252 const u32 _sa = src_a; \
1253 const u32 _sb = src_b; \
1254 u32 dest = _sa - _sb; \
1255 calculate_flags_sub(dest, src_a, src_b); \
1256 reg[dest_reg] = dest; \
1257 thumb_pc_offset(2); \
1260 // Affects N/Z flags
1262 #define thumb_logic(type, dest_reg, expr) \
1264 thumb_decode_##type(); \
1266 calculate_flags_logic(dest); \
1267 reg[dest_reg] = dest; \
1268 thumb_pc_offset(2); \
1271 // Decode types: shift, alu_op
1272 // Operation types: lsl, lsr, asr, ror
1273 // Affects N/Z/C flags
1275 #define thumb_shift_lsl_reg() \
1276 u32 shift = reg[rs]; \
1277 u32 dest = reg[rd]; \
1283 c_flag = dest & 0x01; \
1290 c_flag = (dest >> (32 - shift)) & 0x01; \
1295 #define thumb_shift_lsr_reg() \
1296 u32 shift = reg[rs]; \
1297 u32 dest = reg[rd]; \
1303 c_flag = dest >> 31; \
1310 c_flag = (dest >> (shift - 1)) & 0x01; \
1315 #define thumb_shift_asr_reg() \
1316 u32 shift = reg[rs]; \
1317 u32 dest = reg[rd]; \
1322 dest = (s32)dest >> 31; \
1323 c_flag = dest & 0x01; \
1327 c_flag = (dest >> (shift - 1)) & 0x01; \
1328 dest = (s32)dest >> shift; \
1332 #define thumb_shift_ror_reg() \
1333 u32 shift = reg[rs]; \
1334 u32 dest = reg[rd]; \
1337 c_flag = (dest >> (shift - 1)) & 0x01; \
1338 ror(dest, dest, shift); \
1341 #define thumb_shift_lsl_imm() \
1342 u32 dest = reg[rs]; \
1345 c_flag = (dest >> (32 - imm)) & 0x01; \
1349 #define thumb_shift_lsr_imm() \
1354 c_flag = reg[rs] >> 31; \
1359 c_flag = (dest >> (imm - 1)) & 0x01; \
1363 #define thumb_shift_asr_imm() \
1367 dest = (s32)reg[rs] >> 31; \
1368 c_flag = dest & 0x01; \
1373 c_flag = (dest >> (imm - 1)) & 0x01; \
1374 dest = (s32)dest >> imm; \
1377 #define thumb_shift_ror_imm() \
1378 u32 dest = reg[rs]; \
1381 u32 old_c_flag = c_flag; \
1382 c_flag = dest & 0x01; \
1383 dest = (dest >> 1) | (old_c_flag << 31); \
1387 c_flag = (dest >> (imm - 1)) & 0x01; \
1388 ror(dest, dest, imm); \
1391 #define thumb_shift(decode_type, op_type, value_type) \
1393 thumb_decode_##decode_type(); \
1394 thumb_shift_##op_type##_##value_type(); \
1395 calculate_flags_logic(dest); \
1397 thumb_pc_offset(2); \
1400 #define thumb_test_add(type, src_a, src_b) \
1402 thumb_decode_##type(); \
1403 const u32 _sa = src_a; \
1404 const u32 _sb = src_b; \
1405 u32 dest = _sa + _sb; \
1406 calculate_flags_add(dest, src_a, src_b); \
1407 thumb_pc_offset(2); \
1410 #define thumb_test_sub(type, src_a, src_b) \
1412 thumb_decode_##type(); \
1413 const u32 _sa = src_a; \
1414 const u32 _sb = src_b; \
1415 u32 dest = _sa - _sb; \
1416 calculate_flags_sub(dest, src_a, src_b); \
1417 thumb_pc_offset(2); \
1420 #define thumb_test_logic(type, expr) \
1422 thumb_decode_##type(); \
1424 calculate_flags_logic(dest); \
1425 thumb_pc_offset(2); \
1428 #define thumb_hireg_op(expr) \
1430 thumb_pc_offset(4); \
1431 thumb_decode_hireg_op(); \
1433 thumb_pc_offset(-2); \
1436 reg[REG_PC] = dest & ~0x01; \
1437 thumb_update_pc(); \
1445 // Operation types: imm, mem_reg, mem_imm
1447 #define thumb_access_memory(access_type, op_type, address, reg_op, \
1450 thumb_decode_##op_type(); \
1451 access_type##_memory_##mem_type(address, reg_op); \
1452 thumb_pc_offset(2); \
1455 #define thumb_block_address_preadjust_no_op() \
1457 #define thumb_block_address_preadjust_up() \
1458 address += bit_count[reg_list] * 4 \
1460 #define thumb_block_address_preadjust_down() \
1461 address -= bit_count[reg_list] * 4 \
1463 #define thumb_block_address_preadjust_push_lr() \
1464 address -= (bit_count[reg_list] + 1) * 4 \
1466 #define thumb_block_address_postadjust_no_op() \
1468 #define thumb_block_address_postadjust_up() \
1471 #define thumb_block_address_postadjust_down() \
1474 #define thumb_block_address_postadjust_pop_pc() \
1475 load_memory_u32(address + offset, pc); \
1478 address += offset + 4 \
1480 #define thumb_block_address_postadjust_push_lr() \
1481 store_memory_u32(address + offset, reg[REG_LR]); \
1483 #define thumb_block_memory_wb_load(base_reg) \
1484 if(!((reg_list >> base_reg) & 0x01)) \
1486 reg[base_reg] = address; \
1489 #define thumb_block_memory_wb_store(base_reg) \
1490 reg[base_reg] = address \
1492 #define thumb_block_memory(access_type, pre_op, post_op, base_reg) \
1496 thumb_decode_rlist(); \
1497 using_register(thumb, base_reg, memory_base); \
1498 u32 address = reg[base_reg] & ~0x03; \
1499 thumb_block_address_preadjust_##pre_op(); \
1501 for(i = 0; i < 8; i++) \
1503 if((reg_list >> i) & 1) \
1505 access_type##_aligned32(address + offset, reg[i]); \
1510 thumb_pc_offset(2); \
1512 thumb_block_address_postadjust_##post_op(); \
1513 thumb_block_memory_wb_##access_type(base_reg); \
1516 #define thumb_conditional_branch(condition) \
1518 thumb_decode_branch_cond(); \
1521 thumb_pc_offset((offset * 2) + 4); \
1525 thumb_pc_offset(2); \
1529 // When a mode change occurs from non-FIQ to non-FIQ retire the current
1530 // reg[13] and reg[14] into reg_mode[cpu_mode][5] and reg_mode[cpu_mode][6]
1531 // respectively and load into reg[13] and reg[14] reg_mode[new_mode][5] and
1532 // reg_mode[new_mode][6]. When swapping to/from FIQ retire/load reg[8]
1533 // through reg[14] to/from reg_mode[MODE_FIQ][0] through reg_mode[MODE_FIQ][6].
1539 MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID,
1540 MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID,
1541 MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID, MODE_INVALID,
1542 MODE_INVALID, MODE_USER, MODE_FIQ, MODE_IRQ, MODE_SUPERVISOR, MODE_INVALID,
1543 MODE_INVALID, MODE_INVALID, MODE_ABORT, MODE_INVALID, MODE_INVALID,
1544 MODE_INVALID, MODE_INVALID, MODE_UNDEFINED, MODE_INVALID, MODE_INVALID,
1548 u32 cpu_modes_cpsr[7] = { 0x10, 0x11, 0x12, 0x13, 0x17, 0x1B, 0x1F };
1550 // When switching modes set spsr[new_mode] to cpsr. Modifying PC as the
1551 // target of a data proc instruction will set cpsr to spsr[cpu_mode].
1553 u32 initial_reg[64];
1554 u32 *reg = initial_reg;
1557 // ARM/Thumb mode is stored in the flags directly, this is simpler than
1558 // shadowing it since it has a constant 1bit represenation.
1560 char *reg_names[16] =
1562 " r0", " r1", " r2", " r3", " r4", " r5", " r6", " r7",
1563 " r8", " r9", "r10", " fp", " ip", " sp", " lr", " pc"
1566 char *cpu_mode_names[] =
1568 "user", "irq", "fiq", "svsr", "abrt", "undf", "invd"
1572 #define execute_arm_instruction() \
1573 using_instruction(arm); \
1574 check_pc_region(); \
1576 opcode = address32(pc_address_block, (pc & 0x7FFF)); \
1577 condition = opcode >> 28; \
1584 arm_next_instruction(); \
1590 arm_next_instruction(); \
1596 arm_next_instruction(); \
1602 arm_next_instruction(); \
1608 arm_next_instruction(); \
1614 arm_next_instruction(); \
1620 arm_next_instruction(); \
1626 arm_next_instruction(); \
1631 if((c_flag == 0) | z_flag) \
1632 arm_next_instruction(); \
1637 if(c_flag & (z_flag ^ 1)) \
1638 arm_next_instruction(); \
1643 if(n_flag != v_flag) \
1644 arm_next_instruction(); \
1649 if(n_flag == v_flag) \
1650 arm_next_instruction(); \
1655 if(z_flag | (n_flag != v_flag)) \
1656 arm_next_instruction(); \
1661 if((z_flag == 0) & (n_flag == v_flag)) \
1662 arm_next_instruction(); \
1670 /* Reserved - treat as "never" */ \
1672 arm_next_instruction(); \
1676 switch((opcode >> 20) & 0xFF) \
1679 if((opcode & 0x90) == 0x90) \
1683 /* STRH rd, [rn], -rm */ \
1684 arm_access_memory(store, no_op, half_reg, u16, yes, - reg[rm]); \
1688 /* MUL rd, rm, rs */ \
1689 arm_multiply(no_op, no); \
1694 /* AND rd, rn, reg_op */ \
1695 arm_data_proc(reg[rn] & reg_sh, reg); \
1700 if((opcode & 0x90) == 0x90) \
1702 switch((opcode >> 5) & 0x03) \
1705 /* MULS rd, rm, rs */ \
1706 arm_multiply(no_op, yes); \
1710 /* LDRH rd, [rn], -rm */ \
1711 arm_access_memory(load, no_op, half_reg, u16, yes, - reg[rm]); \
1715 /* LDRSB rd, [rn], -rm */ \
1716 arm_access_memory(load, no_op, half_reg, s8, yes, - reg[rm]); \
1720 /* LDRSH rd, [rn], -rm */ \
1721 arm_access_memory(load, no_op, half_reg, s16, yes, - reg[rm]); \
1727 /* ANDS rd, rn, reg_op */ \
1728 arm_data_proc_logic_flags(reg[rn] & reg_sh, reg); \
1733 if((opcode & 0x90) == 0x90) \
1737 /* STRH rd, [rn], -rm */ \
1738 arm_access_memory(store, no_op, half_reg, u16, yes, - reg[rm]); \
1742 /* MLA rd, rm, rs, rn */ \
1743 arm_multiply(+ reg[rn], no); \
1748 /* EOR rd, rn, reg_op */ \
1749 arm_data_proc(reg[rn] ^ reg_sh, reg); \
1754 if((opcode & 0x90) == 0x90) \
1756 switch((opcode >> 5) & 0x03) \
1759 /* MLAS rd, rm, rs, rn */ \
1760 arm_multiply(+ reg[rn], yes); \
1764 /* LDRH rd, [rn], -rm */ \
1765 arm_access_memory(load, no_op, half_reg, u16, yes, - reg[rm]); \
1769 /* LDRSB rd, [rn], -rm */ \
1770 arm_access_memory(load, no_op, half_reg, s8, yes, - reg[rm]); \
1774 /* LDRSH rd, [rn], -rm */ \
1775 arm_access_memory(load, no_op, half_reg, s16, yes, - reg[rm]); \
1781 /* EORS rd, rn, reg_op */ \
1782 arm_data_proc_logic_flags(reg[rn] ^ reg_sh, reg); \
1787 if((opcode & 0x90) == 0x90) \
1789 /* STRH rd, [rn], -imm */ \
1790 arm_access_memory(store, no_op, half_imm, u16, yes, - offset); \
1794 /* SUB rd, rn, reg_op */ \
1795 arm_data_proc(reg[rn] - reg_sh, reg); \
1800 if((opcode & 0x90) == 0x90) \
1802 switch((opcode >> 5) & 0x03) \
1805 /* LDRH rd, [rn], -imm */ \
1806 arm_access_memory(load, no_op, half_imm, u16, yes, - offset); \
1810 /* LDRSB rd, [rn], -imm */ \
1811 arm_access_memory(load, no_op, half_imm, s8, yes, - offset); \
1815 /* LDRSH rd, [rn], -imm */ \
1816 arm_access_memory(load, no_op, half_imm, s16, yes, - offset); \
1822 /* SUBS rd, rn, reg_op */ \
1823 arm_data_proc_sub_flags(reg[rn], reg_sh, reg); \
1828 if((opcode & 0x90) == 0x90) \
1830 /* STRH rd, [rn], -imm */ \
1831 arm_access_memory(store, no_op, half_imm, u16, yes, - offset); \
1835 /* RSB rd, rn, reg_op */ \
1836 arm_data_proc(reg_sh - reg[rn], reg); \
1841 if((opcode & 0x90) == 0x90) \
1843 switch((opcode >> 5) & 0x03) \
1846 /* LDRH rd, [rn], -imm */ \
1847 arm_access_memory(load, no_op, half_imm, u16, yes, - offset); \
1851 /* LDRSB rd, [rn], -imm */ \
1852 arm_access_memory(load, no_op, half_imm, s8, yes, - offset); \
1856 /* LDRSH rd, [rn], -imm */ \
1857 arm_access_memory(load, no_op, half_imm, s16, yes, - offset); \
1863 /* RSBS rd, rn, reg_op */ \
1864 arm_data_proc_sub_flags(reg_sh, reg[rn], reg); \
1869 if((opcode & 0x90) == 0x90) \
1873 /* STRH rd, [rn], +rm */ \
1874 arm_access_memory(store, no_op, half_reg, u16, yes, + reg[rm]); \
1878 /* UMULL rd, rm, rs */ \
1879 arm_multiply_long(no_op, no, u); \
1884 /* ADD rd, rn, reg_op */ \
1885 arm_data_proc(reg[rn] + reg_sh, reg); \
1890 if((opcode & 0x90) == 0x90) \
1892 switch((opcode >> 5) & 0x03) \
1895 /* UMULLS rdlo, rdhi, rm, rs */ \
1896 arm_multiply_long(no_op, yes, u); \
1900 /* LDRH rd, [rn], +rm */ \
1901 arm_access_memory(load, no_op, half_reg, u16, yes, + reg[rm]); \
1905 /* LDRSB rd, [rn], +rm */ \
1906 arm_access_memory(load, no_op, half_reg, s8, yes, + reg[rm]); \
1910 /* LDRSH rd, [rn], +rm */ \
1911 arm_access_memory(load, no_op, half_reg, s16, yes, + reg[rm]); \
1917 /* ADDS rd, rn, reg_op */ \
1918 arm_data_proc_add_flags(reg[rn], reg_sh, reg); \
1923 if((opcode & 0x90) == 0x90) \
1927 /* STRH rd, [rn], +rm */ \
1928 arm_access_memory(store, no_op, half_reg, u16, yes, + reg[rm]); \
1932 /* UMLAL rd, rm, rs */ \
1933 arm_multiply_long(arm_multiply_long_addop(u), no, u); \
1938 /* ADC rd, rn, reg_op */ \
1939 arm_data_proc(reg[rn] + reg_sh + c_flag, reg); \
1944 if((opcode & 0x90) == 0x90) \
1946 switch((opcode >> 5) & 0x03) \
1949 /* UMLALS rdlo, rdhi, rm, rs */ \
1950 arm_multiply_long(arm_multiply_long_addop(u), yes, u); \
1954 /* LDRH rd, [rn], +rm */ \
1955 arm_access_memory(load, no_op, half_reg, u16, yes, + reg[rm]); \
1959 /* LDRSB rd, [rn], +rm */ \
1960 arm_access_memory(load, no_op, half_reg, s8, yes, + reg[rm]); \
1964 /* LDRSH rd, [rn], +rm */ \
1965 arm_access_memory(load, no_op, half_reg, s16, yes, + reg[rm]); \
1971 /* ADCS rd, rn, reg_op */ \
1972 arm_data_proc_add_flags(reg[rn], reg_sh + c_flag, reg); \
1977 if((opcode & 0x90) == 0x90) \
1981 /* STRH rd, [rn], +imm */ \
1982 arm_access_memory(store, no_op, half_imm, u16, yes, + offset); \
1986 /* SMULL rd, rm, rs */ \
1987 arm_multiply_long(no_op, no, s); \
1992 /* SBC rd, rn, reg_op */ \
1993 arm_data_proc(reg[rn] - (reg_sh + (c_flag ^ 1)), reg); \
1998 if((opcode & 0x90) == 0x90) \
2000 switch((opcode >> 5) & 0x03) \
2003 /* SMULLS rdlo, rdhi, rm, rs */ \
2004 arm_multiply_long(no_op, yes, s); \
2008 /* LDRH rd, [rn], +imm */ \
2009 arm_access_memory(load, no_op, half_imm, u16, yes, + offset); \
2013 /* LDRSB rd, [rn], +imm */ \
2014 arm_access_memory(load, no_op, half_imm, s8, yes, + offset); \
2018 /* LDRSH rd, [rn], +imm */ \
2019 arm_access_memory(load, no_op, half_imm, s16, yes, + offset); \
2025 /* SBCS rd, rn, reg_op */ \
2026 arm_data_proc_sub_flags(reg[rn], (reg_sh + (c_flag ^ 1)), reg); \
2031 if((opcode & 0x90) == 0x90) \
2035 /* STRH rd, [rn], +imm */ \
2036 arm_access_memory(store, no_op, half_imm, u16, yes, + offset); \
2040 /* SMLAL rd, rm, rs */ \
2041 arm_multiply_long(arm_multiply_long_addop(s), no, s); \
2046 /* RSC rd, rn, reg_op */ \
2047 arm_data_proc(reg_sh - reg[rn] + c_flag - 1, reg); \
2052 if((opcode & 0x90) == 0x90) \
2054 switch((opcode >> 5) & 0x03) \
2057 /* SMLALS rdlo, rdhi, rm, rs */ \
2058 arm_multiply_long(arm_multiply_long_addop(s), yes, s); \
2062 /* LDRH rd, [rn], +imm */ \
2063 arm_access_memory(load, no_op, half_imm, u16, yes, + offset); \
2067 /* LDRSB rd, [rn], +imm */ \
2068 arm_access_memory(load, no_op, half_imm, s8, yes, + offset); \
2072 /* LDRSH rd, [rn], +imm */ \
2073 arm_access_memory(load, no_op, half_imm, s16, yes, + offset); \
2079 /* RSCS rd, rn, reg_op */ \
2080 arm_data_proc_sub_flags((reg_sh + c_flag - 1), reg[rn], reg); \
2085 if((opcode & 0x90) == 0x90) \
2089 /* STRH rd, [rn - rm] */ \
2090 arm_access_memory(store, - reg[rm], half_reg, u16, no, no_op); \
2094 /* SWP rd, rm, [rn] */ \
2100 /* MRS rd, cpsr */ \
2101 arm_psr(reg, read, reg[REG_CPSR]); \
2106 if((opcode & 0x90) == 0x90) \
2108 switch((opcode >> 5) & 0x03) \
2111 /* LDRH rd, [rn - rm] */ \
2112 arm_access_memory(load, - reg[rm], half_reg, u16, no, no_op); \
2116 /* LDRSB rd, [rn - rm] */ \
2117 arm_access_memory(load, - reg[rm], half_reg, s8, no, no_op); \
2121 /* LDRSH rd, [rn - rm] */ \
2122 arm_access_memory(load, - reg[rm], half_reg, s16, no, no_op); \
2128 /* TST rd, rn, reg_op */ \
2129 arm_data_proc_test_logic(reg[rn] & reg_sh, reg); \
2134 if((opcode & 0x90) == 0x90) \
2136 /* STRH rd, [rn - rm]! */ \
2137 arm_access_memory(store, - reg[rm], half_reg, u16, yes, no_op); \
2144 arm_decode_branchx(); \
2145 u32 src = reg[rn]; \
2149 arm_pc_offset_update_direct(src); \
2150 reg[REG_CPSR] |= 0x20; \
2155 arm_pc_offset_update_direct(src); \
2160 /* MSR cpsr, rm */ \
2161 arm_psr(reg, store, cpsr); \
2167 if((opcode & 0x90) == 0x90) \
2169 switch((opcode >> 5) & 0x03) \
2172 /* LDRH rd, [rn - rm]! */ \
2173 arm_access_memory(load, - reg[rm], half_reg, u16, yes, no_op); \
2177 /* LDRSB rd, [rn - rm]! */ \
2178 arm_access_memory(load, - reg[rm], half_reg, s8, yes, no_op); \
2182 /* LDRSH rd, [rn - rm]! */ \
2183 arm_access_memory(load, - reg[rm], half_reg, s16, yes, no_op); \
2189 /* TEQ rd, rn, reg_op */ \
2190 arm_data_proc_test_logic(reg[rn] ^ reg_sh, reg); \
2195 if((opcode & 0x90) == 0x90) \
2199 /* STRH rd, [rn - imm] */ \
2200 arm_access_memory(store, - offset, half_imm, u16, no, no_op); \
2204 /* SWPB rd, rm, [rn] */ \
2210 /* MRS rd, spsr */ \
2211 arm_psr(reg, read, spsr[reg[CPU_MODE]]); \
2216 if((opcode & 0x90) == 0x90) \
2218 switch((opcode >> 5) & 0x03) \
2221 /* LDRH rd, [rn - imm] */ \
2222 arm_access_memory(load, - offset, half_imm, u16, no, no_op); \
2226 /* LDRSB rd, [rn - imm] */ \
2227 arm_access_memory(load, - offset, half_imm, s8, no, no_op); \
2231 /* LDRSH rd, [rn - imm] */ \
2232 arm_access_memory(load, - offset, half_imm, s16, no, no_op); \
2238 /* CMP rn, reg_op */ \
2239 arm_data_proc_test_sub(reg[rn], reg_sh, reg); \
2244 if((opcode & 0x90) == 0x90) \
2246 /* STRH rd, [rn - imm]! */ \
2247 arm_access_memory(store, - offset, half_imm, u16, yes, no_op); \
2251 /* MSR spsr, rm */ \
2252 arm_psr(reg, store, spsr); \
2257 if((opcode & 0x90) == 0x90) \
2259 switch((opcode >> 5) & 0x03) \
2262 /* LDRH rd, [rn - imm]! */ \
2263 arm_access_memory(load, - offset, half_imm, u16, yes, no_op); \
2267 /* LDRSB rd, [rn - imm]! */ \
2268 arm_access_memory(load, - offset, half_imm, s8, yes, no_op); \
2272 /* LDRSH rd, [rn - imm]! */ \
2273 arm_access_memory(load, - offset, half_imm, s16, yes, no_op); \
2279 /* CMN rd, rn, reg_op */ \
2280 arm_data_proc_test_add(reg[rn], reg_sh, reg); \
2285 if((opcode & 0x90) == 0x90) \
2287 /* STRH rd, [rn + rm] */ \
2288 arm_access_memory(store, + reg[rm], half_reg, u16, no, no_op); \
2292 /* ORR rd, rn, reg_op */ \
2293 arm_data_proc(reg[rn] | reg_sh, reg); \
2298 if((opcode & 0x90) == 0x90) \
2300 switch((opcode >> 5) & 0x03) \
2303 /* LDRH rd, [rn + rm] */ \
2304 arm_access_memory(load, + reg[rm], half_reg, u16, no, no_op); \
2308 /* LDRSB rd, [rn + rm] */ \
2309 arm_access_memory(load, + reg[rm], half_reg, s8, no, no_op); \
2313 /* LDRSH rd, [rn + rm] */ \
2314 arm_access_memory(load, + reg[rm], half_reg, s16, no, no_op); \
2320 /* ORRS rd, rn, reg_op */ \
2321 arm_data_proc_logic_flags(reg[rn] | reg_sh, reg); \
2326 if((opcode & 0x90) == 0x90) \
2328 /* STRH rd, [rn + rm]! */ \
2329 arm_access_memory(store, + reg[rm], half_reg, u16, yes, no_op); \
2333 /* MOV rd, reg_op */ \
2334 arm_data_proc(reg_sh, reg); \
2339 if((opcode & 0x90) == 0x90) \
2341 switch((opcode >> 5) & 0x03) \
2344 /* LDRH rd, [rn + rm]! */ \
2345 arm_access_memory(load, + reg[rm], half_reg, u16, yes, no_op); \
2349 /* LDRSB rd, [rn + rm]! */ \
2350 arm_access_memory(load, + reg[rm], half_reg, s8, yes, no_op); \
2354 /* LDRSH rd, [rn + rm]! */ \
2355 arm_access_memory(load, + reg[rm], half_reg, s16, yes, no_op); \
2361 /* MOVS rd, reg_op */ \
2362 arm_data_proc_logic_flags(reg_sh, reg); \
2367 if((opcode & 0x90) == 0x90) \
2369 /* STRH rd, [rn + imm] */ \
2370 arm_access_memory(store, + offset, half_imm, u16, no, no_op); \
2374 /* BIC rd, rn, reg_op */ \
2375 arm_data_proc(reg[rn] & (~reg_sh), reg); \
2380 if((opcode & 0x90) == 0x90) \
2382 switch((opcode >> 5) & 0x03) \
2385 /* LDRH rd, [rn + imm] */ \
2386 arm_access_memory(load, + offset, half_imm, u16, no, no_op); \
2390 /* LDRSB rd, [rn + imm] */ \
2391 arm_access_memory(load, + offset, half_imm, s8, no, no_op); \
2395 /* LDRSH rd, [rn + imm] */ \
2396 arm_access_memory(load, + offset, half_imm, s16, no, no_op); \
2402 /* BICS rd, rn, reg_op */ \
2403 arm_data_proc_logic_flags(reg[rn] & (~reg_sh), reg); \
2408 if((opcode & 0x90) == 0x90) \
2410 /* STRH rd, [rn + imm]! */ \
2411 arm_access_memory(store, + offset, half_imm, u16, yes, no_op); \
2415 /* MVN rd, reg_op */ \
2416 arm_data_proc(~reg_sh, reg); \
2421 if((opcode & 0x90) == 0x90) \
2423 switch((opcode >> 5) & 0x03) \
2426 /* LDRH rd, [rn + imm]! */ \
2427 arm_access_memory(load, + offset, half_imm, u16, yes, no_op); \
2431 /* LDRSB rd, [rn + imm]! */ \
2432 arm_access_memory(load, + offset, half_imm, s8, yes, no_op); \
2436 /* LDRSH rd, [rn + imm]! */ \
2437 arm_access_memory(load, + offset, half_imm, s16, yes, no_op); \
2443 /* MVNS rd, rn, reg_op */ \
2444 arm_data_proc_logic_flags(~reg_sh, reg); \
2449 /* AND rd, rn, imm */ \
2450 arm_data_proc(reg[rn] & imm, imm); \
2454 /* ANDS rd, rn, imm */ \
2455 arm_data_proc_logic_flags(reg[rn] & imm, imm); \
2459 /* EOR rd, rn, imm */ \
2460 arm_data_proc(reg[rn] ^ imm, imm); \
2464 /* EORS rd, rn, imm */ \
2465 arm_data_proc_logic_flags(reg[rn] ^ imm, imm); \
2469 /* SUB rd, rn, imm */ \
2470 arm_data_proc(reg[rn] - imm, imm); \
2474 /* SUBS rd, rn, imm */ \
2475 arm_data_proc_sub_flags(reg[rn], imm, imm); \
2479 /* RSB rd, rn, imm */ \
2480 arm_data_proc(imm - reg[rn], imm); \
2484 /* RSBS rd, rn, imm */ \
2485 arm_data_proc_sub_flags(imm, reg[rn], imm); \
2489 /* ADD rd, rn, imm */ \
2490 arm_data_proc(reg[rn] + imm, imm); \
2494 /* ADDS rd, rn, imm */ \
2495 arm_data_proc_add_flags(reg[rn], imm, imm); \
2499 /* ADC rd, rn, imm */ \
2500 arm_data_proc(reg[rn] + imm + c_flag, imm); \
2504 /* ADCS rd, rn, imm */ \
2505 arm_data_proc_add_flags(reg[rn] + imm, c_flag, imm); \
2509 /* SBC rd, rn, imm */ \
2510 arm_data_proc(reg[rn] - imm + c_flag - 1, imm); \
2514 /* SBCS rd, rn, imm */ \
2515 arm_data_proc_sub_flags(reg[rn], (imm + (c_flag ^ 1)), imm); \
2519 /* RSC rd, rn, imm */ \
2520 arm_data_proc(imm - reg[rn] + c_flag - 1, imm); \
2524 /* RSCS rd, rn, imm */ \
2525 arm_data_proc_sub_flags((imm + c_flag - 1), reg[rn], imm); \
2528 case 0x30 ... 0x31: \
2530 arm_data_proc_test_logic(reg[rn] & imm, imm); \
2534 /* MSR cpsr, imm */ \
2535 arm_psr(imm, store, cpsr); \
2540 arm_data_proc_test_logic(reg[rn] ^ imm, imm); \
2543 case 0x34 ... 0x35: \
2545 arm_data_proc_test_sub(reg[rn], imm, imm); \
2549 /* MSR spsr, imm */ \
2550 arm_psr(imm, store, spsr); \
2555 arm_data_proc_test_add(reg[rn], imm, imm); \
2559 /* ORR rd, rn, imm */ \
2560 arm_data_proc(reg[rn] | imm, imm); \
2564 /* ORRS rd, rn, imm */ \
2565 arm_data_proc_logic_flags(reg[rn] | imm, imm); \
2570 arm_data_proc(imm, imm); \
2574 /* MOVS rd, imm */ \
2575 arm_data_proc_logic_flags(imm, imm); \
2579 /* BIC rd, rn, imm */ \
2580 arm_data_proc(reg[rn] & (~imm), imm); \
2584 /* BICS rd, rn, imm */ \
2585 arm_data_proc_logic_flags(reg[rn] & (~imm), imm); \
2590 arm_data_proc(~imm, imm); \
2594 /* MVNS rd, imm */ \
2595 arm_data_proc_logic_flags(~imm, imm); \
2599 /* STR rd, [rn], -imm */ \
2600 arm_access_memory(store, no_op, imm, u32, yes, - offset); \
2604 /* LDR rd, [rn], -imm */ \
2605 arm_access_memory(load, no_op, imm, u32, yes, - offset); \
2609 /* STRT rd, [rn], -imm */ \
2610 arm_access_memory(store, no_op, imm, u32, yes, - offset); \
2614 /* LDRT rd, [rn], -imm */ \
2615 arm_access_memory(load, no_op, imm, u32, yes, - offset); \
2619 /* STRB rd, [rn], -imm */ \
2620 arm_access_memory(store, no_op, imm, u8, yes, - offset); \
2624 /* LDRB rd, [rn], -imm */ \
2625 arm_access_memory(load, no_op, imm, u8, yes, - offset); \
2629 /* STRBT rd, [rn], -imm */ \
2630 arm_access_memory(store, no_op, imm, u8, yes, - offset); \
2634 /* LDRBT rd, [rn], -imm */ \
2635 arm_access_memory(load, no_op, imm, u8, yes, - offset); \
2639 /* STR rd, [rn], +imm */ \
2640 arm_access_memory(store, no_op, imm, u32, yes, + offset); \
2644 /* LDR rd, [rn], +imm */ \
2645 arm_access_memory(load, no_op, imm, u32, yes, + offset); \
2649 /* STRT rd, [rn], +imm */ \
2650 arm_access_memory(store, no_op, imm, u32, yes, + offset); \
2654 /* LDRT rd, [rn], +imm */ \
2655 arm_access_memory(load, no_op, imm, u32, yes, + offset); \
2659 /* STRB rd, [rn], +imm */ \
2660 arm_access_memory(store, no_op, imm, u8, yes, + offset); \
2664 /* LDRB rd, [rn], +imm */ \
2665 arm_access_memory(load, no_op, imm, u8, yes, + offset); \
2669 /* STRBT rd, [rn], +imm */ \
2670 arm_access_memory(store, no_op, imm, u8, yes, + offset); \
2674 /* LDRBT rd, [rn], +imm */ \
2675 arm_access_memory(load, no_op, imm, u8, yes, + offset); \
2679 /* STR rd, [rn - imm] */ \
2680 arm_access_memory(store, - offset, imm, u32, no, no_op); \
2684 /* LDR rd, [rn - imm] */ \
2685 arm_access_memory(load, - offset, imm, u32, no, no_op); \
2689 /* STR rd, [rn - imm]! */ \
2690 arm_access_memory(store, - offset, imm, u32, yes, no_op); \
2694 /* LDR rd, [rn - imm]! */ \
2695 arm_access_memory(load, - offset, imm, u32, yes, no_op); \
2699 /* STRB rd, [rn - imm] */ \
2700 arm_access_memory(store, - offset, imm, u8, no, no_op); \
2704 /* LDRB rd, [rn - imm] */ \
2705 arm_access_memory(load, - offset, imm, u8, no, no_op); \
2709 /* STRB rd, [rn - imm]! */ \
2710 arm_access_memory(store, - offset, imm, u8, yes, no_op); \
2714 /* LDRB rd, [rn - imm]! */ \
2715 arm_access_memory(load, - offset, imm, u8, yes, no_op); \
2719 /* STR rd, [rn + imm] */ \
2720 arm_access_memory(store, + offset, imm, u32, no, no_op); \
2724 /* LDR rd, [rn + imm] */ \
2725 arm_access_memory(load, + offset, imm, u32, no, no_op); \
2729 /* STR rd, [rn + imm]! */ \
2730 arm_access_memory(store, + offset, imm, u32, yes, no_op); \
2734 /* LDR rd, [rn + imm]! */ \
2735 arm_access_memory(load, + offset, imm, u32, yes, no_op); \
2739 /* STRB rd, [rn + imm] */ \
2740 arm_access_memory(store, + offset, imm, u8, no, no_op); \
2744 /* LDRB rd, [rn + imm] */ \
2745 arm_access_memory(load, + offset, imm, u8, no, no_op); \
2749 /* STRB rd, [rn + imm]! */ \
2750 arm_access_memory(store, + offset, imm, u8, yes, no_op); \
2754 /* LDRBT rd, [rn + imm]! */ \
2755 arm_access_memory(load, + offset, imm, u8, yes, no_op); \
2759 /* STR rd, [rn], -reg_op */ \
2760 arm_access_memory(store, no_op, reg, u32, yes, - reg_offset); \
2764 /* LDR rd, [rn], -reg_op */ \
2765 arm_access_memory(load, no_op, reg, u32, yes, - reg_offset); \
2769 /* STRT rd, [rn], -reg_op */ \
2770 arm_access_memory(store, no_op, reg, u32, yes, - reg_offset); \
2774 /* LDRT rd, [rn], -reg_op */ \
2775 arm_access_memory(load, no_op, reg, u32, yes, - reg_offset); \
2779 /* STRB rd, [rn], -reg_op */ \
2780 arm_access_memory(store, no_op, reg, u8, yes, - reg_offset); \
2784 /* LDRB rd, [rn], -reg_op */ \
2785 arm_access_memory(load, no_op, reg, u8, yes, - reg_offset); \
2789 /* STRBT rd, [rn], -reg_op */ \
2790 arm_access_memory(store, no_op, reg, u8, yes, - reg_offset); \
2794 /* LDRBT rd, [rn], -reg_op */ \
2795 arm_access_memory(load, no_op, reg, u8, yes, - reg_offset); \
2799 /* STR rd, [rn], +reg_op */ \
2800 arm_access_memory(store, no_op, reg, u32, yes, + reg_offset); \
2804 /* LDR rd, [rn], +reg_op */ \
2805 arm_access_memory(load, no_op, reg, u32, yes, + reg_offset); \
2809 /* STRT rd, [rn], +reg_op */ \
2810 arm_access_memory(store, no_op, reg, u32, yes, + reg_offset); \
2814 /* LDRT rd, [rn], +reg_op */ \
2815 arm_access_memory(load, no_op, reg, u32, yes, + reg_offset); \
2819 /* STRB rd, [rn], +reg_op */ \
2820 arm_access_memory(store, no_op, reg, u8, yes, + reg_offset); \
2824 /* LDRB rd, [rn], +reg_op */ \
2825 arm_access_memory(load, no_op, reg, u8, yes, + reg_offset); \
2829 /* STRBT rd, [rn], +reg_op */ \
2830 arm_access_memory(store, no_op, reg, u8, yes, + reg_offset); \
2834 /* LDRBT rd, [rn], +reg_op */ \
2835 arm_access_memory(load, no_op, reg, u8, yes, + reg_offset); \
2839 /* STR rd, [rn - reg_op] */ \
2840 arm_access_memory(store, - reg_offset, reg, u32, no, no_op); \
2844 /* LDR rd, [rn - reg_op] */ \
2845 arm_access_memory(load, - reg_offset, reg, u32, no, no_op); \
2849 /* STR rd, [rn - reg_op]! */ \
2850 arm_access_memory(store, - reg_offset, reg, u32, yes, no_op); \
2854 /* LDR rd, [rn - reg_op]! */ \
2855 arm_access_memory(load, - reg_offset, reg, u32, yes, no_op); \
2859 /* STRB rd, [rn - reg_op] */ \
2860 arm_access_memory(store, - reg_offset, reg, u8, no, no_op); \
2864 /* LDRB rd, [rn - reg_op] */ \
2865 arm_access_memory(load, - reg_offset, reg, u8, no, no_op); \
2869 /* STRB rd, [rn - reg_op]! */ \
2870 arm_access_memory(store, - reg_offset, reg, u8, yes, no_op); \
2874 /* LDRB rd, [rn - reg_op]! */ \
2875 arm_access_memory(load, - reg_offset, reg, u8, yes, no_op); \
2879 /* STR rd, [rn + reg_op] */ \
2880 arm_access_memory(store, + reg_offset, reg, u32, no, no_op); \
2884 /* LDR rd, [rn + reg_op] */ \
2885 arm_access_memory(load, + reg_offset, reg, u32, no, no_op); \
2889 /* STR rd, [rn + reg_op]! */ \
2890 arm_access_memory(store, + reg_offset, reg, u32, yes, no_op); \
2894 /* LDR rd, [rn + reg_op]! */ \
2895 arm_access_memory(load, + reg_offset, reg, u32, yes, no_op); \
2899 /* STRB rd, [rn + reg_op] */ \
2900 arm_access_memory(store, + reg_offset, reg, u8, no, no_op); \
2904 /* LDRB rd, [rn + reg_op] */ \
2905 arm_access_memory(load, + reg_offset, reg, u8, no, no_op); \
2909 /* STRB rd, [rn + reg_op]! */ \
2910 arm_access_memory(store, + reg_offset, reg, u8, yes, no_op); \
2914 /* LDRBT rd, [rn + reg_op]! */ \
2915 arm_access_memory(load, + reg_offset, reg, u8, yes, no_op); \
2919 /* STMDA rn, rlist */ \
2920 arm_block_memory(store, down_a, no, no); \
2924 /* LDMDA rn, rlist */ \
2925 arm_block_memory(load, down_a, no, no); \
2929 /* STMDA rn!, rlist */ \
2930 arm_block_memory(store, down_a, down, no); \
2934 /* LDMDA rn!, rlist */ \
2935 arm_block_memory(load, down_a, down, no); \
2939 /* STMDA rn, rlist^ */ \
2940 arm_block_memory(store, down_a, no, yes); \
2944 /* LDMDA rn, rlist^ */ \
2945 arm_block_memory(load, down_a, no, yes); \
2949 /* STMDA rn!, rlist^ */ \
2950 arm_block_memory(store, down_a, down, yes); \
2954 /* LDMDA rn!, rlist^ */ \
2955 arm_block_memory(load, down_a, down, yes); \
2959 /* STMIA rn, rlist */ \
2960 arm_block_memory(store, no, no, no); \
2964 /* LDMIA rn, rlist */ \
2965 arm_block_memory(load, no, no, no); \
2969 /* STMIA rn!, rlist */ \
2970 arm_block_memory(store, no, up, no); \
2974 /* LDMIA rn!, rlist */ \
2975 arm_block_memory(load, no, up, no); \
2979 /* STMIA rn, rlist^ */ \
2980 arm_block_memory(store, no, no, yes); \
2984 /* LDMIA rn, rlist^ */ \
2985 arm_block_memory(load, no, no, yes); \
2989 /* STMIA rn!, rlist^ */ \
2990 arm_block_memory(store, no, up, yes); \
2994 /* LDMIA rn!, rlist^ */ \
2995 arm_block_memory(load, no, up, yes); \
2999 /* STMDB rn, rlist */ \
3000 arm_block_memory(store, down_b, no, no); \
3004 /* LDMDB rn, rlist */ \
3005 arm_block_memory(load, down_b, no, no); \
3009 /* STMDB rn!, rlist */ \
3010 arm_block_memory(store, down_b, down, no); \
3014 /* LDMDB rn!, rlist */ \
3015 arm_block_memory(load, down_b, down, no); \
3019 /* STMDB rn, rlist^ */ \
3020 arm_block_memory(store, down_b, no, yes); \
3024 /* LDMDB rn, rlist^ */ \
3025 arm_block_memory(load, down_b, no, yes); \
3029 /* STMDB rn!, rlist^ */ \
3030 arm_block_memory(store, down_b, down, yes); \
3034 /* LDMDB rn!, rlist^ */ \
3035 arm_block_memory(load, down_b, down, yes); \
3039 /* STMIB rn, rlist */ \
3040 arm_block_memory(store, up, no, no); \
3044 /* LDMIB rn, rlist */ \
3045 arm_block_memory(load, up, no, no); \
3049 /* STMIB rn!, rlist */ \
3050 arm_block_memory(store, up, up, no); \
3054 /* LDMIB rn!, rlist */ \
3055 arm_block_memory(load, up, up, no); \
3059 /* STMIB rn, rlist^ */ \
3060 arm_block_memory(store, up, no, yes); \
3064 /* LDMIB rn, rlist^ */ \
3065 arm_block_memory(load, up, no, yes); \
3069 /* STMIB rn!, rlist^ */ \
3070 arm_block_memory(store, up, up, yes); \
3074 /* LDMIB rn!, rlist^ */ \
3075 arm_block_memory(load, up, up, yes); \
3096 arm_decode_branch(); \
3097 arm_pc_offset_update(offset + 8); \
3101 case 0xB0 ... 0xBF: \
3104 arm_decode_branch(); \
3105 reg[REG_LR] = pc + 4; \
3106 arm_pc_offset_update(offset + 8); \
3110 case 0xC0 ... 0xEF: \
3111 /* coprocessor instructions, reserved on GBA */ \
3114 case 0xF0 ... 0xFF: \
3117 u32 swi_comment = opcode & 0x00FFFFFF; \
3119 switch(swi_comment >> 16) \
3121 /* Jump to BIOS SWI handler */ \
3123 reg_mode[MODE_SUPERVISOR][6] = pc + 4; \
3125 spsr[MODE_SUPERVISOR] = reg[REG_CPSR]; \
3126 reg[REG_PC] = 0x00000008; \
3128 reg[REG_CPSR] = (reg[REG_CPSR] & ~0x1F) | 0x13; \
3129 set_cpu_mode(MODE_SUPERVISOR); \
3138 #define execute_thumb_instruction() \
3139 using_instruction(thumb); \
3140 check_pc_region(); \
3142 opcode = address16(pc_address_block, (pc & 0x7FFF)); \
3144 switch((opcode >> 8) & 0xFF) \
3146 case 0x00 ... 0x07: \
3147 /* LSL rd, rs, offset */ \
3148 thumb_shift(shift, lsl, imm); \
3151 case 0x08 ... 0x0F: \
3152 /* LSR rd, rs, offset */ \
3153 thumb_shift(shift, lsr, imm); \
3156 case 0x10 ... 0x17: \
3157 /* ASR rd, rs, offset */ \
3158 thumb_shift(shift, asr, imm); \
3161 case 0x18 ... 0x19: \
3162 /* ADD rd, rs, rn */ \
3163 thumb_add(add_sub, rd, reg[rs], reg[rn]); \
3166 case 0x1A ... 0x1B: \
3167 /* SUB rd, rs, rn */ \
3168 thumb_sub(add_sub, rd, reg[rs], reg[rn]); \
3171 case 0x1C ... 0x1D: \
3172 /* ADD rd, rs, imm */ \
3173 thumb_add(add_sub_imm, rd, reg[rs], imm); \
3176 case 0x1E ... 0x1F: \
3177 /* SUB rd, rs, imm */ \
3178 thumb_sub(add_sub_imm, rd, reg[rs], imm); \
3183 thumb_logic(imm, 0, imm); \
3188 thumb_logic(imm, 1, imm); \
3193 thumb_logic(imm, 2, imm); \
3198 thumb_logic(imm, 3, imm); \
3203 thumb_logic(imm, 4, imm); \
3208 thumb_logic(imm, 5, imm); \
3213 thumb_logic(imm, 6, imm); \
3218 thumb_logic(imm, 7, imm); \
3223 thumb_test_sub(imm, reg[0], imm); \
3228 thumb_test_sub(imm, reg[1], imm); \
3233 thumb_test_sub(imm, reg[2], imm); \
3238 thumb_test_sub(imm, reg[3], imm); \
3243 thumb_test_sub(imm, reg[4], imm); \
3248 thumb_test_sub(imm, reg[5], imm); \
3253 thumb_test_sub(imm, reg[6], imm); \
3258 thumb_test_sub(imm, reg[7], imm); \
3263 thumb_add(imm, 0, reg[0], imm); \
3268 thumb_add(imm, 1, reg[1], imm); \
3273 thumb_add(imm, 2, reg[2], imm); \
3278 thumb_add(imm, 3, reg[3], imm); \
3283 thumb_add(imm, 4, reg[4], imm); \
3288 thumb_add(imm, 5, reg[5], imm); \
3293 thumb_add(imm, 6, reg[6], imm); \
3298 thumb_add(imm, 7, reg[7], imm); \
3303 thumb_sub(imm, 0, reg[0], imm); \
3308 thumb_sub(imm, 1, reg[1], imm); \
3313 thumb_sub(imm, 2, reg[2], imm); \
3318 thumb_sub(imm, 3, reg[3], imm); \
3323 thumb_sub(imm, 4, reg[4], imm); \
3328 thumb_sub(imm, 5, reg[5], imm); \
3333 thumb_sub(imm, 6, reg[6], imm); \
3338 thumb_sub(imm, 7, reg[7], imm); \
3342 switch((opcode >> 6) & 0x03) \
3346 thumb_logic(alu_op, rd, reg[rd] & reg[rs]); \
3351 thumb_logic(alu_op, rd, reg[rd] ^ reg[rs]); \
3356 thumb_shift(alu_op, lsl, reg); \
3361 thumb_shift(alu_op, lsr, reg); \
3367 switch((opcode >> 6) & 0x03) \
3371 thumb_shift(alu_op, asr, reg); \
3376 thumb_add(alu_op, rd, reg[rd] + reg[rs], c_flag); \
3381 thumb_sub(alu_op, rd, reg[rd] - reg[rs], (c_flag ^ 1)); \
3386 thumb_shift(alu_op, ror, reg); \
3392 switch((opcode >> 6) & 0x03) \
3396 thumb_test_logic(alu_op, reg[rd] & reg[rs]); \
3401 thumb_sub(alu_op, rd, 0, reg[rs]); \
3406 thumb_test_sub(alu_op, reg[rd], reg[rs]); \
3411 thumb_test_add(alu_op, reg[rd], reg[rs]); \
3417 switch((opcode >> 6) & 0x03) \
3421 thumb_logic(alu_op, rd, reg[rd] | reg[rs]); \
3426 thumb_logic(alu_op, rd, reg[rd] * reg[rs]); \
3431 thumb_logic(alu_op, rd, reg[rd] & (~reg[rs])); \
3436 thumb_logic(alu_op, rd, ~reg[rs]); \
3443 thumb_hireg_op(reg[rd] + reg[rs]); \
3449 thumb_pc_offset(4); \
3450 thumb_decode_hireg_op(); \
3451 u32 _sa = reg[rd]; \
3452 u32 _sb = reg[rs]; \
3453 u32 dest = _sa - _sb; \
3454 thumb_pc_offset(-2); \
3455 calculate_flags_sub(dest, _sa, _sb); \
3461 thumb_hireg_op(reg[rs]); \
3467 thumb_decode_hireg_op(); \
3469 thumb_pc_offset(4); \
3474 thumb_pc_offset_update_direct(src); \
3478 /* Switch to ARM mode */ \
3479 thumb_pc_offset_update_direct(src); \
3480 reg[REG_CPSR] &= ~0x20; \
3488 /* LDR r0, [pc + imm] */ \
3489 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[0], u32); \
3493 /* LDR r1, [pc + imm] */ \
3494 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[1], u32); \
3498 /* LDR r2, [pc + imm] */ \
3499 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[2], u32); \
3503 /* LDR r3, [pc + imm] */ \
3504 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[3], u32); \
3508 /* LDR r4, [pc + imm] */ \
3509 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[4], u32); \
3513 /* LDR r5, [pc + imm] */ \
3514 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[5], u32); \
3518 /* LDR r6, [pc + imm] */ \
3519 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[6], u32); \
3523 /* LDR r7, [pc + imm] */ \
3524 thumb_access_memory(load, imm, (pc & ~2) + (imm * 4) + 4, reg[7], u32); \
3527 case 0x50 ... 0x51: \
3528 /* STR rd, [rb + ro] */ \
3529 thumb_access_memory(store, mem_reg, reg[rb] + reg[ro], reg[rd], u32); \
3532 case 0x52 ... 0x53: \
3533 /* STRH rd, [rb + ro] */ \
3534 thumb_access_memory(store, mem_reg, reg[rb] + reg[ro], reg[rd], u16); \
3537 case 0x54 ... 0x55: \
3538 /* STRB rd, [rb + ro] */ \
3539 thumb_access_memory(store, mem_reg, reg[rb] + reg[ro], reg[rd], u8); \
3542 case 0x56 ... 0x57: \
3543 /* LDSB rd, [rb + ro] */ \
3544 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], s8); \
3547 case 0x58 ... 0x59: \
3548 /* LDR rd, [rb + ro] */ \
3549 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], u32); \
3552 case 0x5A ... 0x5B: \
3553 /* LDRH rd, [rb + ro] */ \
3554 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], u16); \
3557 case 0x5C ... 0x5D: \
3558 /* LDRB rd, [rb + ro] */ \
3559 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], u8); \
3562 case 0x5E ... 0x5F: \
3563 /* LDSH rd, [rb + ro] */ \
3564 thumb_access_memory(load, mem_reg, reg[rb] + reg[ro], reg[rd], s16); \
3567 case 0x60 ... 0x67: \
3568 /* STR rd, [rb + imm] */ \
3569 thumb_access_memory(store, mem_imm, reg[rb] + (imm * 4), reg[rd], u32); \
3572 case 0x68 ... 0x6F: \
3573 /* LDR rd, [rb + imm] */ \
3574 thumb_access_memory(load, mem_imm, reg[rb] + (imm * 4), reg[rd], u32); \
3577 case 0x70 ... 0x77: \
3578 /* STRB rd, [rb + imm] */ \
3579 thumb_access_memory(store, mem_imm, reg[rb] + imm, reg[rd], u8); \
3582 case 0x78 ... 0x7F: \
3583 /* LDRB rd, [rb + imm] */ \
3584 thumb_access_memory(load, mem_imm, reg[rb] + imm, reg[rd], u8); \
3587 case 0x80 ... 0x87: \
3588 /* STRH rd, [rb + imm] */ \
3589 thumb_access_memory(store, mem_imm, reg[rb] + (imm * 2), reg[rd], u16); \
3592 case 0x88 ... 0x8F: \
3593 /* LDRH rd, [rb + imm] */ \
3594 thumb_access_memory(load, mem_imm, reg[rb] + (imm * 2), reg[rd], u16); \
3598 /* STR r0, [sp + imm] */ \
3599 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[0], u32); \
3603 /* STR r1, [sp + imm] */ \
3604 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[1], u32); \
3608 /* STR r2, [sp + imm] */ \
3609 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[2], u32); \
3613 /* STR r3, [sp + imm] */ \
3614 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[3], u32); \
3618 /* STR r4, [sp + imm] */ \
3619 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[4], u32); \
3623 /* STR r5, [sp + imm] */ \
3624 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[5], u32); \
3628 /* STR r6, [sp + imm] */ \
3629 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[6], u32); \
3633 /* STR r7, [sp + imm] */ \
3634 thumb_access_memory(store, imm, reg[REG_SP] + (imm * 4), reg[7], u32); \
3638 /* LDR r0, [sp + imm] */ \
3639 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[0], u32); \
3643 /* LDR r1, [sp + imm] */ \
3644 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[1], u32); \
3648 /* LDR r2, [sp + imm] */ \
3649 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[2], u32); \
3653 /* LDR r3, [sp + imm] */ \
3654 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[3], u32); \
3658 /* LDR r4, [sp + imm] */ \
3659 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[4], u32); \
3663 /* LDR r5, [sp + imm] */ \
3664 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[5], u32); \
3668 /* LDR r6, [sp + imm] */ \
3669 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[6], u32); \
3673 /* LDR r7, [sp + imm] */ \
3674 thumb_access_memory(load, imm, reg[REG_SP] + (imm * 4), reg[7], u32); \
3678 /* ADD r0, pc, +imm */ \
3679 thumb_add_noflags(imm, 0, (pc & ~2) + 4, (imm * 4)); \
3683 /* ADD r1, pc, +imm */ \
3684 thumb_add_noflags(imm, 1, (pc & ~2) + 4, (imm * 4)); \
3688 /* ADD r2, pc, +imm */ \
3689 thumb_add_noflags(imm, 2, (pc & ~2) + 4, (imm * 4)); \
3693 /* ADD r3, pc, +imm */ \
3694 thumb_add_noflags(imm, 3, (pc & ~2) + 4, (imm * 4)); \
3698 /* ADD r4, pc, +imm */ \
3699 thumb_add_noflags(imm, 4, (pc & ~2) + 4, (imm * 4)); \
3703 /* ADD r5, pc, +imm */ \
3704 thumb_add_noflags(imm, 5, (pc & ~2) + 4, (imm * 4)); \
3708 /* ADD r6, pc, +imm */ \
3709 thumb_add_noflags(imm, 6, (pc & ~2) + 4, (imm * 4)); \
3713 /* ADD r7, pc, +imm */ \
3714 thumb_add_noflags(imm, 7, (pc & ~2) + 4, (imm * 4)); \
3718 /* ADD r0, sp, +imm */ \
3719 thumb_add_noflags(imm, 0, reg[REG_SP], (imm * 4)); \
3723 /* ADD r1, sp, +imm */ \
3724 thumb_add_noflags(imm, 1, reg[REG_SP], (imm * 4)); \
3728 /* ADD r2, sp, +imm */ \
3729 thumb_add_noflags(imm, 2, reg[REG_SP], (imm * 4)); \
3733 /* ADD r3, sp, +imm */ \
3734 thumb_add_noflags(imm, 3, reg[REG_SP], (imm * 4)); \
3738 /* ADD r4, sp, +imm */ \
3739 thumb_add_noflags(imm, 4, reg[REG_SP], (imm * 4)); \
3743 /* ADD r5, sp, +imm */ \
3744 thumb_add_noflags(imm, 5, reg[REG_SP], (imm * 4)); \
3748 /* ADD r6, sp, +imm */ \
3749 thumb_add_noflags(imm, 6, reg[REG_SP], (imm * 4)); \
3753 /* ADD r7, sp, +imm */ \
3754 thumb_add_noflags(imm, 7, reg[REG_SP], (imm * 4)); \
3757 case 0xB0 ... 0xB3: \
3758 if((opcode >> 7) & 0x01) \
3760 /* ADD sp, -imm */ \
3761 thumb_add_noflags(add_sp, 13, reg[REG_SP], -(imm * 4)); \
3765 /* ADD sp, +imm */ \
3766 thumb_add_noflags(add_sp, 13, reg[REG_SP], (imm * 4)); \
3772 thumb_block_memory(store, down, no_op, 13); \
3776 /* PUSH rlist, lr */ \
3777 thumb_block_memory(store, push_lr, push_lr, 13); \
3782 thumb_block_memory(load, no_op, up, 13); \
3786 /* POP rlist, pc */ \
3787 thumb_block_memory(load, no_op, pop_pc, 13); \
3791 /* STMIA r0!, rlist */ \
3792 thumb_block_memory(store, no_op, up, 0); \
3796 /* STMIA r1!, rlist */ \
3797 thumb_block_memory(store, no_op, up, 1); \
3801 /* STMIA r2!, rlist */ \
3802 thumb_block_memory(store, no_op, up, 2); \
3806 /* STMIA r3!, rlist */ \
3807 thumb_block_memory(store, no_op, up, 3); \
3811 /* STMIA r4!, rlist */ \
3812 thumb_block_memory(store, no_op, up, 4); \
3816 /* STMIA r5!, rlist */ \
3817 thumb_block_memory(store, no_op, up, 5); \
3821 /* STMIA r6!, rlist */ \
3822 thumb_block_memory(store, no_op, up, 6); \
3826 /* STMIA r7!, rlist */ \
3827 thumb_block_memory(store, no_op, up, 7); \
3831 /* LDMIA r0!, rlist */ \
3832 thumb_block_memory(load, no_op, up, 0); \
3836 /* LDMIA r1!, rlist */ \
3837 thumb_block_memory(load, no_op, up, 1); \
3841 /* LDMIA r2!, rlist */ \
3842 thumb_block_memory(load, no_op, up, 2); \
3846 /* LDMIA r3!, rlist */ \
3847 thumb_block_memory(load, no_op, up, 3); \
3851 /* LDMIA r4!, rlist */ \
3852 thumb_block_memory(load, no_op, up, 4); \
3856 /* LDMIA r5!, rlist */ \
3857 thumb_block_memory(load, no_op, up, 5); \
3861 /* LDMIA r6!, rlist */ \
3862 thumb_block_memory(load, no_op, up, 6); \
3866 /* LDMIA r7!, rlist */ \
3867 thumb_block_memory(load, no_op, up, 7); \
3872 thumb_conditional_branch(z_flag == 1); \
3877 thumb_conditional_branch(z_flag == 0); \
3882 thumb_conditional_branch(c_flag == 1); \
3887 thumb_conditional_branch(c_flag == 0); \
3892 thumb_conditional_branch(n_flag == 1); \
3897 thumb_conditional_branch(n_flag == 0); \
3902 thumb_conditional_branch(v_flag == 1); \
3907 thumb_conditional_branch(v_flag == 0); \
3912 thumb_conditional_branch(c_flag & (z_flag ^ 1)); \
3917 thumb_conditional_branch((c_flag == 0) | z_flag); \
3922 thumb_conditional_branch(n_flag == v_flag); \
3927 thumb_conditional_branch(n_flag != v_flag); \
3932 thumb_conditional_branch((z_flag == 0) & (n_flag == v_flag)); \
3937 thumb_conditional_branch(z_flag | (n_flag != v_flag)); \
3943 u32 swi_comment = opcode & 0xFF; \
3945 switch(swi_comment) \
3948 reg_mode[MODE_SUPERVISOR][6] = pc + 2; \
3949 spsr[MODE_SUPERVISOR] = reg[REG_CPSR]; \
3950 reg[REG_PC] = 0x00000008; \
3951 thumb_update_pc(); \
3952 reg[REG_CPSR] = (reg[REG_CPSR] & ~0x3F) | 0x13; \
3953 set_cpu_mode(MODE_SUPERVISOR); \
3960 case 0xE0 ... 0xE7: \
3963 thumb_decode_branch(); \
3964 thumb_pc_offset_update(((s32)(offset << 21) >> 20) + 4); \
3968 case 0xF0 ... 0xF7: \
3970 /* (low word) BL label */ \
3971 thumb_decode_branch(); \
3972 reg[REG_LR] = pc + 4 + ((s32)(offset << 21) >> 9); \
3973 thumb_pc_offset(2); \
3977 case 0xF8 ... 0xFF: \
3979 /* (high word) BL label */ \
3980 thumb_decode_branch(); \
3981 u32 lr = (pc + 2) | 0x01; \
3982 pc = reg[REG_LR] + (offset * 2); \
3989 void print_arm_registers()
3993 for(i = 0, i3 = 0; i < 4; i++)
3995 debug_screen_printf(" ");
3996 for(i2 = 0; i2 < 4; i2++, i3++)
3998 debug_screen_printf("R%02d %08x ", i3, reg[i3]);
4000 debug_screen_newline(1);
4004 void print_thumb_instruction()
4006 debug_screen_printf("Thumb instruction at PC: %04x",
4007 read_memory16(reg[REG_PC]));
4008 debug_screen_newline(1);
4011 void print_arm_instruction()
4013 debug_screen_printf("ARM instruction at PC: %08x",
4014 read_memory32(reg[REG_PC]));
4015 debug_screen_newline(1);
4020 u32 cpsr = reg[REG_CPSR];
4021 debug_screen_newline(1);
4022 debug_screen_printf(
4023 " N: %d Z: %d C: %d V: %d CPSR: %08x SPSR: %08x mode: %s",
4024 (cpsr >> 31) & 0x01, (cpsr >> 30) & 0x01, (cpsr >> 29) & 0x01,
4025 (cpsr >> 28) & 0x01, cpsr, spsr[reg[CPU_MODE]],
4026 cpu_mode_names[reg[CPU_MODE]]);
4027 debug_screen_newline(2);
4030 const u32 stack_print_lines = 2;
4036 debug_screen_printf("Stack:");
4037 debug_screen_newline(1);
4039 for(i = 0, i3 = reg[REG_SP]; i < stack_print_lines; i++)
4041 for(i2 = 0; i2 < 5; i2++, i3 += 4)
4043 debug_screen_printf(" %08x", read_memory32(i3));
4045 if(i != stack_print_lines)
4046 debug_screen_newline(1);
4049 debug_screen_newline(1);
4052 u32 instruction_count = 0;
4054 u32 output_field = 0;
4055 const u32 num_output_fields = 2;
4057 u32 last_instruction = 0;
4059 u32 in_interrupt = 0;
4063 current_debug_state = STEP;
4064 debug_screen_start();
4067 u32 debug_off(debug_state new_debug_state)
4069 current_debug_state = new_debug_state;
4073 u32 function_cc step_debug(u32 pc, u32 cycles)
4079 if(reg[REG_CPSR] & 0x20)
4082 instruction_count++;
4084 switch(current_debug_state)
4087 if(reg[REG_PC] == breakpoint_value)
4093 if(reg[REG_Z_FLAG] == 1)
4098 case VCOUNT_BREAKPOINT:
4099 if(io_registers[REG_VCOUNT] == breakpoint_value)
4104 case COUNTDOWN_BREAKPOINT:
4105 if(breakpoint_value == 0)
4112 case COUNTDOWN_BREAKPOINT_B:
4113 if(breakpoint_value == instruction_count)
4118 case COUNTDOWN_BREAKPOINT_C:
4123 if((breakpoint_value == 0) && (in_interrupt == 0))
4129 if(in_interrupt == 0)
4132 if(in_interrupt && (pc == 0x13c))
4139 if((current_debug_state == STEP) ||
4140 (current_debug_state == STEP_RUN))
4144 SDL_LockMutex(sound_mutex);
4147 if(output_field >= num_output_fields)
4150 debug_screen_clear();
4154 print_thumb_instruction(cycles);
4156 print_arm_instruction(cycles);
4158 print_arm_registers();
4163 printf("%x instructions in, VCOUNT %d, cycles remaining: %d \n",
4164 instruction_count, io_registers[REG_VCOUNT], cycles);
4166 debug_screen_update();
4169 if(current_debug_state != STEP_RUN)
4176 gui_action_type next_input = CURSOR_NONE;
4177 while(next_input == CURSOR_NONE)
4179 next_input = get_gui_input();
4206 dump_translation_cache();
4210 debug_off(Z_BREAKPOINT);
4215 printf("break at PC (hex): ");
4216 scanf("%08x", &breakpoint_value);
4217 debug_off(PC_BREAKPOINT);
4221 printf("break after N instructions (hex): ");
4222 scanf("%08x", &breakpoint_value);
4223 breakpoint_value -= 1;
4224 debug_off(COUNTDOWN_BREAKPOINT);
4228 printf("break after N instructions, skip in IRQ (hex): ");
4229 scanf("%08x", &breakpoint_value);
4230 breakpoint_value -= 1;
4231 debug_off(COUNTDOWN_BREAKPOINT_C);
4235 printf("break after N instructions (since start): ");
4236 scanf("%d", &breakpoint_value);
4237 debug_off(COUNTDOWN_BREAKPOINT_B);
4241 printf("break at VCOUNT: ");
4242 scanf("%d", &breakpoint_value);
4243 debug_off(VCOUNT_BREAKPOINT);
4248 current_debug_state = STEP_RUN;
4256 debug_off(PC_BREAKPOINT);
4260 global_cycles_per_instruction = 0;
4266 u8 current_savestate_filename[512];
4267 u16 *current_screen = copy_screen();
4268 get_savestate_filename_noshot(savestate_slot,
4269 current_savestate_filename);
4270 save_state(current_savestate_filename, current_screen);
4271 free(current_screen);
4280 SDL_UnlockMutex(sound_mutex);
4283 last_instruction = reg[REG_PC];
4286 reg[REG_PC] = pc + 2;
4288 reg[REG_PC] = pc + 4;
4293 void set_cpu_mode(cpu_mode_type new_mode)
4296 cpu_mode_type cpu_mode = reg[CPU_MODE];
4298 if(cpu_mode != new_mode)
4300 if(new_mode == MODE_FIQ)
4302 for(i = 8; i < 15; i++)
4304 reg_mode[cpu_mode][i - 8] = reg[i];
4309 reg_mode[cpu_mode][5] = reg[REG_SP];
4310 reg_mode[cpu_mode][6] = reg[REG_LR];
4313 if(cpu_mode == MODE_FIQ)
4315 for(i = 8; i < 15; i++)
4317 reg[i] = reg_mode[new_mode][i - 8];
4322 reg[REG_SP] = reg_mode[new_mode][5];
4323 reg[REG_LR] = reg_mode[new_mode][6];
4326 reg[CPU_MODE] = new_mode;
4330 void raise_interrupt(irq_type irq_raised)
4332 // The specific IRQ must be enabled in IE, master IRQ enable must be on,
4333 // and it must be on in the flags.
4334 io_registers[REG_IF] |= irq_raised;
4336 if((io_registers[REG_IE] & irq_raised) && io_registers[REG_IME] &&
4337 ((reg[REG_CPSR] & 0x80) == 0))
4339 bios_read_protect = 0xe55ec002;
4341 // Interrupt handler in BIOS
4342 reg_mode[MODE_IRQ][6] = reg[REG_PC] + 4;
4343 spsr[MODE_IRQ] = reg[REG_CPSR];
4344 reg[REG_CPSR] = 0xD2;
4345 reg[REG_PC] = 0x00000018;
4347 bios_region_read_allow();
4349 set_cpu_mode(MODE_IRQ);
4350 reg[CPU_HALT_STATE] = CPU_ACTIVE;
4351 reg[CHANGED_PC_STATUS] = 1;
4355 u32 execute_arm(u32 cycles)
4357 u32 pc = reg[REG_PC];
4360 u32 n_flag, z_flag, c_flag, v_flag;
4361 u32 pc_region = (pc >> 15);
4362 u8 *pc_address_block = memory_map_read[pc_region];
4364 s32 cycles_remaining;
4365 u32 cycles_per_instruction = global_cycles_per_instruction;
4366 cpu_alert_type cpu_alert;
4370 if(pc_address_block == NULL)
4371 pc_address_block = load_gamepak_page(pc_region & 0x3FF);
4375 cycles_remaining = cycles;
4379 if(reg[REG_CPSR] & 0x20)
4387 step_debug(pc, cycles_remaining);
4388 cycles_per_instruction = global_cycles_per_instruction;
4391 execute_arm_instruction();
4392 cycles_remaining -= cycles_per_instruction;
4393 } while(cycles_remaining > 0);
4396 cycles = update_gba();
4404 step_debug(pc, cycles_remaining);
4407 execute_thumb_instruction();
4408 cycles_remaining -= cycles_per_instruction;
4409 } while(cycles_remaining > 0);
4412 cycles = update_gba();
4417 if(cpu_alert == CPU_ALERT_IRQ)
4419 cycles = cycles_remaining;
4425 while(reg[CPU_HALT_STATE] != CPU_ACTIVE)
4427 cycles = update_gba();
4437 for(i = 0; i < 16; i++)
4442 reg[REG_SP] = 0x03007F00;
4443 reg[REG_PC] = 0x08000000;
4444 reg[REG_CPSR] = 0x0000001F;
4445 reg[CPU_HALT_STATE] = CPU_ACTIVE;
4446 reg[CPU_MODE] = MODE_USER;
4447 reg[CHANGED_PC_STATUS] = 0;
4449 reg_mode[MODE_USER][5] = 0x03007F00;
4450 reg_mode[MODE_IRQ][5] = 0x03007FA0;
4451 reg_mode[MODE_FIQ][5] = 0x03007FA0;
4452 reg_mode[MODE_SUPERVISOR][5] = 0x03007FE0;
4455 void move_reg(u32 *new_reg)
4459 for(i = 0; i < 32; i++)
4461 new_reg[i] = reg[i];
4468 #define cpu_savestate_builder(type) \
4469 void cpu_##type##_savestate(file_tag_type savestate_file) \
4471 file_##type(savestate_file, reg, 0x100); \
4472 file_##type##_array(savestate_file, spsr); \
4473 file_##type##_array(savestate_file, reg_mode); \
4476 cpu_savestate_builder(read);
4477 cpu_savestate_builder(write_mem);