3 .global arm_update_gba_arm
4 .global arm_update_gba_thumb
5 .global arm_update_gba_idle_arm
6 .global arm_update_gba_idle_thumb
8 .global arm_indirect_branch_arm
9 .global arm_indirect_branch_thumb
10 .global arm_indirect_branch_dual_arm
11 .global arm_indirect_branch_dual_thumb
13 .global execute_arm_translate
15 .global execute_store_u8
16 .global execute_store_u16
17 .global execute_store_u32
18 .global execute_store_u32_safe
20 .global execute_load_u8
21 .global execute_load_s8
22 .global execute_load_u16
23 .global execute_load_s16
24 .global execute_load_u32
26 .global execute_store_cpsr
27 .global execute_read_spsr
28 .global execute_store_spsr
29 .global execute_spsr_restore
31 .global execute_swi_arm
32 .global execute_swi_thumb
34 .global execute_patch_bios_read
35 .global execute_patch_bios_protect
37 .global execute_bios_ptr_protected
38 .global execute_bios_rom_ptr
41 .global step_debug_arm
43 .global invalidate_icache_region
44 .global invalidate_cache_region
46 .global memory_map_read
47 .global memory_map_write
50 #define REG_BASE_OFFSET 1024
52 #define REG_R0 (REG_BASE_OFFSET + (0 * 4))
53 #define REG_R1 (REG_BASE_OFFSET + (1 * 4))
54 #define REG_R2 (REG_BASE_OFFSET + (2 * 4))
55 #define REG_R3 (REG_BASE_OFFSET + (3 * 4))
56 #define REG_R4 (REG_BASE_OFFSET + (4 * 4))
57 #define REG_R5 (REG_BASE_OFFSET + (5 * 4))
58 #define REG_R6 (REG_BASE_OFFSET + (6 * 4))
59 #define REG_R7 (REG_BASE_OFFSET + (7 * 4))
60 #define REG_R8 (REG_BASE_OFFSET + (8 * 4))
61 #define REG_R9 (REG_BASE_OFFSET + (9 * 4))
62 #define REG_R10 (REG_BASE_OFFSET + (10 * 4))
63 #define REG_R11 (REG_BASE_OFFSET + (11 * 4))
64 #define REG_R12 (REG_BASE_OFFSET + (12 * 4))
65 #define REG_R13 (REG_BASE_OFFSET + (13 * 4))
66 #define REG_R14 (REG_BASE_OFFSET + (14 * 4))
67 #define REG_SP (REG_BASE_OFFSET + (13 * 4))
68 #define REG_LR (REG_BASE_OFFSET + (14 * 4))
69 #define REG_PC (REG_BASE_OFFSET + (15 * 4))
71 #define REG_N_FLAG (REG_BASE_OFFSET + (16 * 4))
72 #define REG_Z_FLAG (REG_BASE_OFFSET + (17 * 4))
73 #define REG_C_FLAG (REG_BASE_OFFSET + (18 * 4))
74 #define REG_V_FLAG (REG_BASE_OFFSET + (19 * 4))
75 #define REG_CPSR (REG_BASE_OFFSET + (20 * 4))
77 #define REG_SAVE (REG_BASE_OFFSET + (21 * 4))
78 #define REG_SAVE2 (REG_BASE_OFFSET + (22 * 4))
79 #define REG_SAVE3 (REG_BASE_OFFSET + (23 * 4))
81 #define CPU_MODE (REG_BASE_OFFSET + (29 * 4))
82 #define CPU_HALT_STATE (REG_BASE_OFFSET + (30 * 4))
83 #define CHANGED_PC_STATUS (REG_BASE_OFFSET + (31 * 4))
94 #define reg_cycles r12
104 #define MODE_SUPERVISOR 3
107 @ Will load the register set from memory into the appropriate cached registers.
108 @ See arm_emit.h for listing explanation.
110 #define load_registers_arm() ;\
111 ldr reg_x0, [reg_base, #REG_R0] ;\
112 ldr reg_x1, [reg_base, #REG_R1] ;\
113 ldr reg_x2, [reg_base, #REG_R6] ;\
114 ldr reg_x3, [reg_base, #REG_R9] ;\
115 ldr reg_x4, [reg_base, #REG_R12] ;\
116 ldr reg_x5, [reg_base, #REG_R14] ;\
118 #define load_registers_thumb() ;\
119 ldr reg_x0, [reg_base, #REG_R0] ;\
120 ldr reg_x1, [reg_base, #REG_R1] ;\
121 ldr reg_x2, [reg_base, #REG_R2] ;\
122 ldr reg_x3, [reg_base, #REG_R3] ;\
123 ldr reg_x4, [reg_base, #REG_R4] ;\
124 ldr reg_x5, [reg_base, #REG_R5] ;\
127 @ Will store the register set from cached registers back to memory.
129 #define store_registers_arm() ;\
130 str reg_x0, [reg_base, #REG_R0] ;\
131 str reg_x1, [reg_base, #REG_R1] ;\
132 str reg_x2, [reg_base, #REG_R6] ;\
133 str reg_x3, [reg_base, #REG_R9] ;\
134 str reg_x4, [reg_base, #REG_R12] ;\
135 str reg_x5, [reg_base, #REG_R14] ;\
137 #define store_registers_thumb() ;\
138 str reg_x0, [reg_base, #REG_R0] ;\
139 str reg_x1, [reg_base, #REG_R1] ;\
140 str reg_x2, [reg_base, #REG_R2] ;\
141 str reg_x3, [reg_base, #REG_R3] ;\
142 str reg_x4, [reg_base, #REG_R4] ;\
143 str reg_x5, [reg_base, #REG_R5] ;\
146 @ Returns an updated persistent cpsr with the cached flags register.
147 @ Uses reg as a temporary register and returns the CPSR here.
149 #define collapse_flags_no_update(reg) ;\
150 ldr reg, [reg_base, #REG_CPSR] /* reg = cpsr */;\
151 bic reg, reg, #0xF0000000 /* clear ALU flags in cpsr */;\
152 and reg_flags, reg_flags, #0xF0000000 /* clear non-ALU flags */;\
153 orr reg, reg, reg_flags /* update cpsr with ALU flags */;\
155 @ Updates cpsr using the above macro.
157 #define collapse_flags(reg) ;\
158 collapse_flags_no_update(reg) ;\
159 str reg, [reg_base, #REG_CPSR] ;\
161 @ Loads the saved flags register from the persistent cpsr.
163 #define extract_flags() ;\
164 ldr reg_flags, [reg_base, #REG_CPSR] ;\
165 msr cpsr_f, reg_flags ;\
168 #define save_flags() ;\
169 mrs reg_flags, cpsr ;\
171 #define restore_flags() ;\
172 msr cpsr_f, reg_flags ;\
174 @ Calls a C function - all caller save registers which are important to the
175 @ dynarec and to returning from this function are saved.
177 #define call_c_function(function) ;\
178 stmdb sp!, { r3, r12, lr } ;\
180 ldmia sp!, { r3, r12, lr } ;\
183 @ Update the GBA hardware (video, sound, input, etc)
188 #define return_straight() ;\
191 #define return_add() ;\
194 #define load_pc_straight() ;\
197 #define load_pc_add() ;\
201 #define arm_update_gba_builder(name, mode, return_op) ;\
203 arm_update_gba_##name: ;\
204 load_pc_##return_op() ;\
205 str r0, [reg_base, #REG_PC] /* write out the PC */;\
208 collapse_flags(r0) /* update the flags */;\
210 store_registers_##mode() /* save out registers */;\
211 call_c_function(update_gba) /* update GBA state */;\
213 mvn reg_cycles, r0 /* load new cycle count */;\
215 ldr r0, [reg_base, #CHANGED_PC_STATUS] /* load PC changed status */;\
216 cmp r0, #0 /* see if PC has changed */;\
217 beq 1f /* if not return */;\
219 ldr r0, [reg_base, #REG_PC] /* load new PC */;\
220 ldr r1, [reg_base, #REG_CPSR] /* r1 = flags */;\
221 tst r1, #0x20 /* see if Thumb bit is set */;\
222 bne 2f /* if so load Thumb PC */;\
224 load_registers_arm() /* load ARM regs */;\
225 call_c_function(block_lookup_address_arm) ;\
227 bx r0 /* jump to new ARM block */;\
230 load_registers_##mode() /* reload registers */;\
232 return_##return_op() ;\
235 load_registers_thumb() /* load Thumb regs */;\
236 call_c_function(block_lookup_address_thumb) ;\
238 bx r0 /* jump to new ARM block */;\
241 arm_update_gba_builder(arm, arm, straight)
242 arm_update_gba_builder(thumb, thumb, straight)
244 arm_update_gba_builder(idle_arm, arm, add)
245 arm_update_gba_builder(idle_thumb, thumb, add)
249 @ These are b stubs for performing indirect branches. They are not
250 @ linked to and don't return, instead they link elsewhere.
253 @ r0: PC to branch to
255 arm_indirect_branch_arm:
257 call_c_function(block_lookup_address_arm)
261 arm_indirect_branch_thumb:
263 call_c_function(block_lookup_address_thumb)
267 arm_indirect_branch_dual_arm:
269 tst r0, #0x01 @ check lower bit
270 bne 1f @ if set going to Thumb mode
271 call_c_function(block_lookup_address_arm)
277 store_registers_arm() @ save out ARM registers
278 load_registers_thumb() @ load in Thumb registers
279 ldr r1, [reg_base, #REG_CPSR] @ load cpsr
280 orr r1, r1, #0x20 @ set Thumb mode
281 str r1, [reg_base, #REG_CPSR] @ store flags
282 call_c_function(block_lookup_address_thumb)
286 arm_indirect_branch_dual_thumb:
288 tst r0, #0x01 @ check lower bit
289 beq 1f @ if set going to ARM mode
291 call_c_function(block_lookup_address_thumb)
296 store_registers_thumb() @ save out Thumb registers
297 load_registers_arm() @ load in ARM registers
298 ldr r1, [reg_base, #REG_CPSR] @ load cpsr
299 bic r1, r1, #0x20 @ clear Thumb mode
300 str r1, [reg_base, #REG_CPSR] @ store flags
301 call_c_function(block_lookup_address_arm)
310 @ r1: bitmask of which bits in cpsr to update
315 and reg_flags, r0, r1 @ reg_flags = new_cpsr & store_mask
316 ldr r0, [reg_base, #REG_CPSR] @ r0 = cpsr
317 bic r0, r0, r1 @ r0 = cpsr & ~store_mask
318 orr reg_flags, reg_flags, r0 @ reg_flags = new_cpsr | cpsr
320 mov r0, reg_flags @ also put new cpsr in r0
322 store_registers_arm() @ save ARM registers
323 ldr r2, [lr] @ r2 = pc
324 call_c_function(execute_store_cpsr_body)
325 load_registers_arm() @ restore ARM registers
327 cmp r0, #0 @ check new PC
328 beq 1f @ if it's zero, return
330 call_c_function(block_lookup_address_arm)
333 bx r0 @ return to new ARM address
337 add pc, lr, #4 @ return
340 @ Update the current spsr.
344 @ r1: bitmask of which bits in spsr to update
347 ldr r1, 1f @ r1 = spsr
348 ldr r2, [reg_base, #CPU_MODE] @ r2 = CPU_MODE
349 str r0, [r1, r2, lsl #2] @ spsr[CPU_MODE] = new_spsr
355 @ Read the current spsr.
361 ldr r0, 1b @ r0 = spsr
362 ldr r1, [reg_base, #CPU_MODE] @ r1 = CPU_MODE
363 ldr r0, [r0, r1, lsl #2] @ r0 = spsr[CPU_MODE]
367 @ Restore the cpsr from the mode spsr and mode shift.
372 execute_spsr_restore:
374 ldr r1, 1f @ r1 = spsr
375 ldr r2, [reg_base, #CPU_MODE] @ r2 = cpu_mode
376 ldr r1, [r1, r2, lsl #2] @ r1 = spsr[cpu_mode] (new cpsr)
377 str r1, [reg_base, #REG_CPSR] @ update cpsr
378 mov reg_flags, r1 @ also, update shadow flags
380 @ This function call will pass r0 (address) and return it.
381 store_registers_arm() @ save ARM registers
382 call_c_function(execute_spsr_restore_body)
384 ldr r1, [reg_base, #REG_CPSR] @ r1 = cpsr
385 tst r1, #0x20 @ see if Thumb mode is set
386 bne 2f @ if so handle it
388 load_registers_arm() @ restore ARM registers
389 call_c_function(block_lookup_address_arm)
393 @ This will service execute_spsr_restore and execute_swi
398 load_registers_thumb() @ load Thumb registers
399 call_c_function(block_lookup_address_thumb)
405 @ Setup the mode transition work for calling an SWI.
410 #define execute_swi_builder(mode) ;\
412 execute_swi_##mode: ;\
414 ldr r1, 1f /* r1 = reg_mode */;\
415 /* reg_mode[MODE_SUPERVISOR][6] = pc */;\
416 ldr r0, [lr] /* load PC */;\
417 str r0, [r1, #((MODE_SUPERVISOR * (7 * 4)) + (6 * 4))] ;\
418 collapse_flags_no_update(r0) /* r0 = cpsr */;\
419 ldr r1, 2f /* r1 = spsr */;\
420 str r0, [r1, #(MODE_SUPERVISOR * 4)] /* spsr[MODE_SUPERVISOR] = cpsr */;\
421 bic r0, r0, #0x3F /* clear mode flag in r0 */;\
422 orr r0, r0, #0x13 /* set to supervisor mode */;\
423 str r0, [reg_base, #REG_CPSR] /* update cpsr */;\
425 call_c_function(bios_region_read_allow) ;\
427 mov r0, #MODE_SUPERVISOR ;\
429 store_registers_##mode() /* store regs for mode */;\
430 call_c_function(set_cpu_mode) /* set the CPU mode to svsr */;\
431 load_registers_arm() /* load ARM regs */;\
434 add pc, lr, #4 /* return */;\
443 .word execute_bios_rom_ptr ;\
445 execute_swi_builder(arm)
446 execute_swi_builder(thumb)
449 @ Wrapper for calling SWI functions in C (or can implement some in ASM if
452 #define execute_swi_function_builder(swi_function, mode) ;\
454 .global execute_swi_hle_##swi_function##_##mode ;\
455 execute_swi_hle_##swi_function##_##mode: ;\
457 store_registers_##mode() ;\
458 call_c_function(execute_swi_hle_##swi_function##_c) ;\
459 load_registers_##mode() ;\
463 execute_swi_function_builder(div, arm)
464 execute_swi_function_builder(div, thumb)
467 @ Start program execution. Normally the mode should be Thumb and the
468 @ PC should be 0x8000000, however if a save state is preloaded this
472 @ r0: initial value for cycle counter
474 @ Uses sp as reg_base; must hold consistently true.
476 execute_arm_translate:
477 sub sp, sp, #0x100 @ allocate room for register data
479 mvn reg_cycles, r0 @ load cycle counter
481 mov r0, reg_base @ load reg_base into first param
482 call_c_function(move_reg) @ make reg_base the new reg ptr
484 sub sp, sp, #REG_BASE_OFFSET @ allocate room for ptr table
485 bl load_ptr_read_function_table @ load read function ptr table
487 ldr r0, [reg_base, #REG_PC] @ r0 = current pc
488 ldr r1, [reg_base, #REG_CPSR] @ r1 = flags
489 tst r1, #0x20 @ see if Thumb bit is set
491 bne 1f @ if so lookup thumb
493 load_registers_arm() @ load ARM registers
494 call_c_function(block_lookup_address_arm)
495 extract_flags() @ load flags
496 bx r0 @ jump to first ARM block
499 load_registers_thumb() @ load Thumb registers
500 call_c_function(block_lookup_address_thumb)
501 extract_flags() @ load flags
502 bx r0 @ jump to first Thumb block
505 @ Write out to memory.
512 #define execute_store_body(store_type, store_op) ;\
514 stmdb sp!, { lr } /* save lr */;\
515 tst r0, #0xF0000000 /* make sure address is in range */;\
516 bne ext_store_u##store_type /* if not do ext store */;\
518 ldr r2, 1f /* r2 = memory_map_write */;\
519 mov lr, r0, lsr #15 /* lr = page index of address */;\
520 ldr r2, [r2, lr, lsl #2] /* r2 = memory page */;\
522 cmp r2, #0 /* see if map is ext */;\
523 beq ext_store_u##store_type /* if so do ext store */;\
525 mov r0, r0, lsl #17 /* isolate bottom 15 bits in top */;\
526 mov r0, r0, lsr #17 /* like performing and 0x7FFF */;\
527 store_op r1, [r2, r0] /* store result */;\
530 #define store_align_8() ;\
532 #define store_align_16() ;\
535 #define store_align_32() ;\
539 #define execute_store_builder(store_type, store_op, load_op) ;\
541 execute_store_u##store_type: ;\
542 execute_store_body(store_type, store_op) ;\
543 sub r2, r2, #0x8000 /* Pointer to code status data */;\
544 load_op r0, [r2, r0] /* check code flag */;\
546 cmp r0, #0 /* see if it's not 0 */;\
547 bne 2f /* if so perform smc write */;\
548 ldmia sp!, { lr } /* restore lr */;\
550 add pc, lr, #4 /* return */;\
553 ldmia sp!, { lr } /* restore lr */;\
554 ldr r0, [lr] /* load PC */;\
555 str r0, [reg_base, #REG_PC] /* write out PC */;\
556 b smc_write /* perform smc write */;\
558 .word memory_map_write ;\
560 ext_store_u##store_type: ;\
561 ldmia sp!, { lr } /* pop lr off of stack */;\
562 ldr r2, [lr] /* load PC */;\
563 str r2, [reg_base, #REG_PC] /* write out PC */;\
564 store_align_##store_type() ;\
565 call_c_function(write_memory##store_type) ;\
566 b write_epilogue /* handle additional write stuff */;\
568 execute_store_builder(8, strb, ldrb)
569 execute_store_builder(16, strh, ldrh)
570 execute_store_builder(32, str, ldr)
573 execute_store_u32_safe:
574 execute_store_body(32_safe, str)
576 ldmia sp!, { pc } @ return
579 .word memory_map_write
582 ldmia sp!, { lr } @ Restore lr
583 call_c_function(write_memory32) @ Perform 32bit store
589 cmp r0, #0 @ check if the write rose an alert
590 beq 4f @ if not we can exit
592 collapse_flags(r1) @ interrupt needs current flags
594 cmp r0, #2 @ see if the alert is due to SMC
595 beq smc_write @ if so, goto SMC handler
597 ldr r1, [reg_base, #REG_CPSR] @ r1 = cpsr
598 tst r1, #0x20 @ see if Thumb bit is set
599 bne 1f @ if so do Thumb update
601 store_registers_arm() @ save ARM registers
604 bl update_gba @ update GBA until CPU isn't halted
606 mvn reg_cycles, r0 @ load new cycle count
607 ldr r0, [reg_base, #REG_PC] @ load new PC
608 ldr r1, [reg_base, #REG_CPSR] @ r1 = flags
609 tst r1, #0x20 @ see if Thumb bit is set
613 call_c_function(block_lookup_address_arm)
615 bx r0 @ jump to new ARM block
618 store_registers_thumb() @ save Thumb registers
622 load_registers_thumb()
623 call_c_function(block_lookup_address_thumb)
625 bx r0 @ jump to new Thumb block
629 add pc, lr, #4 @ return
633 call_c_function(flush_translation_cache_ram)
636 ldr r0, [reg_base, #REG_PC] @ r0 = new pc
637 ldr r1, [reg_base, #REG_CPSR] @ r1 = flags
638 tst r1, #0x20 @ see if Thumb bit is set
639 beq lookup_pc_arm @ if not lookup ARM
642 call_c_function(block_lookup_address_thumb)
644 bx r0 @ jump to new Thumb block
647 call_c_function(block_lookup_address_arm)
649 bx r0 @ jump to new ARM block
652 #define sign_extend_u8(reg)
653 #define sign_extend_u16(reg)
654 #define sign_extend_u32(reg)
656 #define sign_extend_s8(reg) ;\
657 mov reg, reg, lsl #24 /* shift reg into upper 8bits */;\
658 mov reg, reg, asr #24 /* shift down, sign extending */;\
660 #define sign_extend_s16(reg) ;\
661 mov reg, reg, lsl #16 /* shift reg into upper 16bits */;\
662 mov reg, reg, asr #16 /* shift down, sign extending */;\
664 #define execute_load_op_u8(load_op) ;\
665 mov r0, r0, lsl #17 ;\
666 load_op r0, [r2, r0, lsr #17] ;\
668 #define execute_load_op_s8(load_op) ;\
669 mov r0, r0, lsl #17 ;\
670 mov r0, r0, lsr #17 ;\
671 load_op r0, [r2, r0] ;\
673 #define execute_load_op_u16(load_op) ;\
674 execute_load_op_s8(load_op) ;\
676 #define execute_load_op_s16(load_op) ;\
677 execute_load_op_s8(load_op) ;\
679 #define execute_load_op_u16(load_op) ;\
680 execute_load_op_s8(load_op) ;\
682 #define execute_load_op_u32(load_op) ;\
683 execute_load_op_u8(load_op) ;\
686 #define execute_load_builder(load_type, load_function, load_op, mask) ;\
688 execute_load_##load_type: ;\
690 tst r0, mask /* make sure address is in range */;\
691 bne ext_load_##load_type /* if not do ext load */;\
693 ldr r2, 1f /* r2 = memory_map_read */;\
694 mov r1, r0, lsr #15 /* r1 = page index of address */;\
695 ldr r2, [r2, r1, lsl #2] /* r2 = memory page */;\
697 cmp r2, #0 /* see if map is ext */;\
698 beq ext_load_##load_type /* if so do ext load */;\
700 execute_load_op_##load_type(load_op) ;\
702 add pc, lr, #4 /* return */;\
704 ext_load_##load_type: ;\
705 ldr r1, [lr] /* r1 = PC */;\
706 str r1, [reg_base, #REG_PC] /* update PC */;\
707 call_c_function(read_memory##load_function) ;\
708 sign_extend_##load_type(r0) /* sign extend result */;\
710 add pc, lr, #4 /* return */;\
713 .word memory_map_read ;\
716 execute_load_builder(u8, 8, ldrneb, #0xF0000000)
717 execute_load_builder(s8, 8, ldrnesb, #0xF0000000)
718 execute_load_builder(u16, 16, ldrneh, #0xF0000001)
719 execute_load_builder(s16, 16_signed, ldrnesh, #0xF0000001)
720 execute_load_builder(u32, 32, ldrne, #0xF0000000)
723 #define execute_ptr_builder(region, ptr, bits) ;\
725 execute_##region##_ptr: ;\
726 ldr r1, 1f /* load region ptr */;\
727 mov r0, r0, lsl #(32 - bits) /* isolate bottom bits */;\
728 mov r0, r0, lsr #(32 - bits) ;\
735 execute_bios_ptr_protected:
736 ldr r1, 1f @ load bios read ptr
737 and r0, r0, #0x03 @ only want bottom 2 bits
741 .word bios_read_protect
744 @ address = (address & 0x7FFF) + ((address & 0x38000) * 2) + 0x8000;
747 ldr r1, 1f @ load ewram read ptr
748 mov r2, r0, lsl #17 @ isolate bottom 15 bits
750 and r0, r0, #0x38000 @ isolate top 2 bits
751 add r0, r2, r0, lsl #1 @ add top 2 bits * 2 to bottom 15
755 .word (ewram + 0x8000)
758 @ u32 gamepak_index = address >> 15;
759 @ u8 *map = memory_map_read[gamepak_index];
762 @ map = load_gamepak_page(gamepak_index & 0x3FF);
764 @ value = address##type(map, address & 0x7FFF)
767 ldr r1, 1f @ load memory_map_read
768 mov r2, r0, lsr #15 @ isolate top 17 bits
769 ldr r1, [r1, r2, lsl #2] @ load memory map read ptr
772 cmp r1, #0 @ see if map entry is NULL
773 bne 2f @ if not resume
775 stmdb sp!, { r0 } @ save r0 on stack
776 mov r2, r2, lsl #20 @ isolate page index
778 call_c_function(load_gamepak_page) @ read new page into r0
780 mov r1, r0 @ new map = return
781 ldmia sp!, { r0 } @ restore r0
784 mov r0, r0, lsl #17 @ isolate bottom 15 bits
790 .word memory_map_read
793 @ These will store the result in a pointer, then pass that pointer.
798 call_c_function(read_eeprom) @ load EEPROM result
799 add r1, reg_base, #(REG_SAVE & 0xFF00)
800 add r1, r1, #(REG_SAVE & 0xFF)
801 strh r0, [r1] @ write result out
802 mov r0, #0 @ zero out address
811 mov r0, r0, lsl #16 @ only want top 16 bits
813 call_c_function(read_backup) @ load backup result
814 add r1, reg_base, #(REG_SAVE & 0xFF00)
815 add r1, r1, #(REG_SAVE & 0xFF)
816 strb r0, [r1] @ write result out
817 mov r0, #0 @ zero out address
824 ldr r1, [reg_base, #REG_CPSR] @ r1 = cpsr
827 stmdb sp!, { r0 } @ save r0
829 ldr r0, [lr, #-4] @ r0 = current PC
831 tst r1, #0x20 @ see if Thumb bit is set
832 bne 1f @ if so load Thumb op
834 call_c_function(read_memory32) @ read open address
836 add r1, reg_base, #((REG_SAVE + 4) & 0xFF00)
837 add r1, r1, #((REG_SAVE + 4) & 0xFF)
839 str r0, [r1] @ write out
841 ldmia sp!, { r0 } @ restore r0
842 and r0, r0, #0x03 @ isolate bottom 2 bits
848 call_c_function(read_memory16) @ read open address
850 orr r0, r0, r0, lsl #16 @ duplicate opcode over halves
851 add r1, reg_base, #((REG_SAVE + 4) & 0xFF00)
852 add r1, r1, #((REG_SAVE + 4) & 0xFF)
855 str r0, [r1] @ write out
857 ldmia sp!, { r0 } @ restore r0
858 and r0, r0, #0x03 @ isolate bottom 2 bits
864 execute_ptr_builder(bios_rom, bios_rom, 14)
865 execute_ptr_builder(iwram, iwram + 0x8000, 15)
866 execute_ptr_builder(vram, vram, 17)
867 execute_ptr_builder(oam_ram, oam_ram, 10)
868 execute_ptr_builder(io_registers, io_registers, 10)
869 execute_ptr_builder(palette_ram, palette_ram, 10)
871 ptr_read_function_table:
872 .word execute_bios_ptr_protected @ 0x00: BIOS
873 .word execute_open_ptr @ 0x01: open
874 .word execute_ewram_ptr @ 0x02: ewram
875 .word execute_iwram_ptr @ 0x03: iwram
876 .word execute_io_registers_ptr @ 0x04: I/O registers
877 .word execute_palette_ram_ptr @ 0x05: palette RAM
878 .word execute_vram_ptr @ 0x06: vram
879 .word execute_oam_ram_ptr @ 0x07: oam RAM
880 .word execute_gamepak_ptr @ 0x08: gamepak
881 .word execute_gamepak_ptr @ 0x09: gamepak
882 .word execute_gamepak_ptr @ 0x0A: gamepak
883 .word execute_gamepak_ptr @ 0x0B: gamepak
884 .word execute_gamepak_ptr @ 0x0C: gamepak
885 .word execute_eeprom_ptr @ 0x0D: EEPROM
886 .word execute_backup_ptr @ 0x0E: backup
888 .rept (256 - 15) @ 0x0F - 0xFF: open
889 .word execute_open_ptr
893 @ Setup the read function table.
894 @ Load this onto the the stack; assume we're free to use r3
896 load_ptr_read_function_table:
897 mov r0, #256 @ 256 elements
898 ldr r1, 1f @ r0 = ptr_read_function_table
899 mov r2, sp @ load here
902 ldr r3, [r1], #4 @ read pointer
903 str r3, [r2], #4 @ write pointer
905 subs r0, r0, #1 @ goto next iteration
911 .word ptr_read_function_table
914 @ Patch the read function table to allow for BIOS reads.
916 execute_patch_bios_read:
917 ldr r0, 1f @ r0 = patch function
918 ldr r1, 2f @ r1 = reg
920 str r0, [r1, #-REG_BASE_OFFSET]
924 .word execute_bios_rom_ptr
930 @ Patch the read function table to allow for BIOS reads.
932 execute_patch_bios_protect:
933 ldr r0, 1f @ r0 = patch function
934 ldr r1, 2f @ r1 = reg
936 str r0, [r1, #-REG_BASE_OFFSET]
940 .word execute_bios_ptr_protected
946 #define save_reg_scratch(reg) ;\
947 ldr r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4))] ;\
948 str r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4) + 128)] ;\
950 #define restore_reg_scratch(reg) ;\
951 ldr r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4) + 128)] ;\
952 str r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4))] ;\
954 #define scratch_regs_thumb(type) ;\
955 type##_reg_scratch(0) ;\
956 type##_reg_scratch(1) ;\
957 type##_reg_scratch(2) ;\
958 type##_reg_scratch(3) ;\
959 type##_reg_scratch(4) ;\
960 type##_reg_scratch(5) ;\
962 #define scratch_regs_arm(type) ;\
963 type##_reg_scratch(0) ;\
964 type##_reg_scratch(1) ;\
965 type##_reg_scratch(6) ;\
966 type##_reg_scratch(9) ;\
967 type##_reg_scratch(12) ;\
968 type##_reg_scratch(14) ;\
975 ldr r0, [reg_base, #REG_CPSR] @ r1 = cpsr
976 tst r0, #0x20 @ see if Thumb bit is set
978 ldr r0, [lr] @ load PC
979 mvn r1, reg_cycles @ load cycle counter
981 beq 1f @ if not goto ARM mode
983 scratch_regs_thumb(save)
985 store_registers_thumb() @ write back Thumb regs
986 call_c_function(step_debug) @ call debug step
987 scratch_regs_thumb(restore)
989 add pc, lr, #4 @ return
992 scratch_regs_arm(save)
993 store_registers_arm() @ write back ARM regs
994 call_c_function(step_debug) @ call debug step
995 scratch_regs_arm(restore)
997 add pc, lr, #4 @ return, skipping PC
1000 .comm memory_map_read 0x8000
1001 .comm memory_map_write 0x8000