3 .global arm_update_gba_arm
4 .global arm_update_gba_thumb
5 .global arm_update_gba_idle_arm
6 .global arm_update_gba_idle_thumb
8 .global arm_indirect_branch_arm
9 .global arm_indirect_branch_thumb
10 .global arm_indirect_branch_dual_arm
11 .global arm_indirect_branch_dual_thumb
13 .global execute_arm_translate
15 .global execute_store_u8
16 .global execute_store_u16
17 .global execute_store_u32
18 .global execute_store_u32_safe
20 .global execute_load_u8
21 .global execute_load_s8
22 .global execute_load_u16
23 .global execute_load_s16
24 .global execute_load_u32
26 .global execute_store_cpsr
27 .global execute_read_spsr
28 .global execute_store_spsr
29 .global execute_spsr_restore
31 .global execute_swi_arm
32 .global execute_swi_thumb
34 .global execute_patch_bios_read
35 .global execute_patch_bios_protect
37 .global execute_bios_ptr_protected
38 .global execute_bios_rom_ptr
41 .global step_debug_arm
43 .global invalidate_icache_region
44 .global invalidate_cache_region
46 .global memory_map_read
47 .global memory_map_write
50 #define REG_BASE_OFFSET 1024
52 #define REG_R0 (REG_BASE_OFFSET + (0 * 4))
53 #define REG_R1 (REG_BASE_OFFSET + (1 * 4))
54 #define REG_R2 (REG_BASE_OFFSET + (2 * 4))
55 #define REG_R3 (REG_BASE_OFFSET + (3 * 4))
56 #define REG_R4 (REG_BASE_OFFSET + (4 * 4))
57 #define REG_R5 (REG_BASE_OFFSET + (5 * 4))
58 #define REG_R6 (REG_BASE_OFFSET + (6 * 4))
59 #define REG_R7 (REG_BASE_OFFSET + (7 * 4))
60 #define REG_R8 (REG_BASE_OFFSET + (8 * 4))
61 #define REG_R9 (REG_BASE_OFFSET + (9 * 4))
62 #define REG_R10 (REG_BASE_OFFSET + (10 * 4))
63 #define REG_R11 (REG_BASE_OFFSET + (11 * 4))
64 #define REG_R12 (REG_BASE_OFFSET + (12 * 4))
65 #define REG_R13 (REG_BASE_OFFSET + (13 * 4))
66 #define REG_R14 (REG_BASE_OFFSET + (14 * 4))
67 #define REG_SP (REG_BASE_OFFSET + (13 * 4))
68 #define REG_LR (REG_BASE_OFFSET + (14 * 4))
69 #define REG_PC (REG_BASE_OFFSET + (15 * 4))
71 #define REG_N_FLAG (REG_BASE_OFFSET + (16 * 4))
72 #define REG_Z_FLAG (REG_BASE_OFFSET + (17 * 4))
73 #define REG_C_FLAG (REG_BASE_OFFSET + (18 * 4))
74 #define REG_V_FLAG (REG_BASE_OFFSET + (19 * 4))
75 #define REG_CPSR (REG_BASE_OFFSET + (20 * 4))
77 #define REG_SAVE (REG_BASE_OFFSET + (21 * 4))
78 #define REG_SAVE2 (REG_BASE_OFFSET + (22 * 4))
79 #define REG_SAVE3 (REG_BASE_OFFSET + (23 * 4))
81 #define CPU_MODE (REG_BASE_OFFSET + (29 * 4))
82 #define CPU_HALT_STATE (REG_BASE_OFFSET + (30 * 4))
83 #define CHANGED_PC_STATUS (REG_BASE_OFFSET + (31 * 4))
94 #define reg_cycles r12
104 #define MODE_SUPERVISOR 3
107 #ifdef __ARM_ARCH_7A__
108 #define extract_u16(rd, rs) \
111 #define extract_u16(rd, rs) \
112 bic rd, rs, #0xff000000 ;\
113 bic rd, rd, #0x00ff0000
116 @ Will load the register set from memory into the appropriate cached registers.
117 @ See arm_emit.h for listing explanation.
119 #define load_registers_arm() ;\
120 ldr reg_x0, [reg_base, #REG_R0] ;\
121 ldr reg_x1, [reg_base, #REG_R1] ;\
122 ldr reg_x2, [reg_base, #REG_R6] ;\
123 ldr reg_x3, [reg_base, #REG_R9] ;\
124 ldr reg_x4, [reg_base, #REG_R12] ;\
125 ldr reg_x5, [reg_base, #REG_R14] ;\
127 #define load_registers_thumb() ;\
128 ldr reg_x0, [reg_base, #REG_R0] ;\
129 ldr reg_x1, [reg_base, #REG_R1] ;\
130 ldr reg_x2, [reg_base, #REG_R2] ;\
131 ldr reg_x3, [reg_base, #REG_R3] ;\
132 ldr reg_x4, [reg_base, #REG_R4] ;\
133 ldr reg_x5, [reg_base, #REG_R5] ;\
136 @ Will store the register set from cached registers back to memory.
138 #define store_registers_arm() ;\
139 str reg_x0, [reg_base, #REG_R0] ;\
140 str reg_x1, [reg_base, #REG_R1] ;\
141 str reg_x2, [reg_base, #REG_R6] ;\
142 str reg_x3, [reg_base, #REG_R9] ;\
143 str reg_x4, [reg_base, #REG_R12] ;\
144 str reg_x5, [reg_base, #REG_R14] ;\
146 #define store_registers_thumb() ;\
147 str reg_x0, [reg_base, #REG_R0] ;\
148 str reg_x1, [reg_base, #REG_R1] ;\
149 str reg_x2, [reg_base, #REG_R2] ;\
150 str reg_x3, [reg_base, #REG_R3] ;\
151 str reg_x4, [reg_base, #REG_R4] ;\
152 str reg_x5, [reg_base, #REG_R5] ;\
155 @ Returns an updated persistent cpsr with the cached flags register.
156 @ Uses reg as a temporary register and returns the CPSR here.
158 #define collapse_flags_no_update(reg) ;\
159 ldr reg, [reg_base, #REG_CPSR] /* reg = cpsr */;\
160 bic reg, reg, #0xF0000000 /* clear ALU flags in cpsr */;\
161 and reg_flags, reg_flags, #0xF0000000 /* clear non-ALU flags */;\
162 orr reg, reg, reg_flags /* update cpsr with ALU flags */;\
164 @ Updates cpsr using the above macro.
166 #define collapse_flags(reg) ;\
167 collapse_flags_no_update(reg) ;\
168 str reg, [reg_base, #REG_CPSR] ;\
170 @ Loads the saved flags register from the persistent cpsr.
172 #define extract_flags() ;\
173 ldr reg_flags, [reg_base, #REG_CPSR] ;\
174 msr cpsr_f, reg_flags ;\
177 #define save_flags() ;\
178 mrs reg_flags, cpsr ;\
180 #define restore_flags() ;\
181 msr cpsr_f, reg_flags ;\
185 #define call_c_saved_regs r2, r3, r12, lr
187 #define call_c_saved_regs r3, r12, lr
190 @ Calls a C function - all caller save registers which are important to the
191 @ dynarec and to returning from this function are saved.
193 #define call_c_function(function) ;\
194 stmdb sp!, { call_c_saved_regs } ;\
196 ldmia sp!, { call_c_saved_regs } ;\
199 @ Update the GBA hardware (video, sound, input, etc)
204 #define return_straight() ;\
207 #define return_add() ;\
210 #define load_pc_straight() ;\
213 #define load_pc_add() ;\
217 #define arm_update_gba_builder(name, mode, return_op) ;\
219 arm_update_gba_##name: ;\
220 load_pc_##return_op() ;\
221 str r0, [reg_base, #REG_PC] /* write out the PC */;\
224 collapse_flags(r0) /* update the flags */;\
226 store_registers_##mode() /* save out registers */;\
227 call_c_function(update_gba) /* update GBA state */;\
229 mvn reg_cycles, r0 /* load new cycle count */;\
231 ldr r0, [reg_base, #CHANGED_PC_STATUS] /* load PC changed status */;\
232 cmp r0, #0 /* see if PC has changed */;\
233 beq 1f /* if not return */;\
235 ldr r0, [reg_base, #REG_PC] /* load new PC */;\
236 ldr r1, [reg_base, #REG_CPSR] /* r1 = flags */;\
237 tst r1, #0x20 /* see if Thumb bit is set */;\
238 bne 2f /* if so load Thumb PC */;\
240 load_registers_arm() /* load ARM regs */;\
241 call_c_function(block_lookup_address_arm) ;\
243 bx r0 /* jump to new ARM block */;\
246 load_registers_##mode() /* reload registers */;\
248 return_##return_op() ;\
251 load_registers_thumb() /* load Thumb regs */;\
252 call_c_function(block_lookup_address_thumb) ;\
254 bx r0 /* jump to new ARM block */;\
257 arm_update_gba_builder(arm, arm, straight)
258 arm_update_gba_builder(thumb, thumb, straight)
260 arm_update_gba_builder(idle_arm, arm, add)
261 arm_update_gba_builder(idle_thumb, thumb, add)
265 @ These are b stubs for performing indirect branches. They are not
266 @ linked to and don't return, instead they link elsewhere.
269 @ r0: PC to branch to
271 arm_indirect_branch_arm:
273 call_c_function(block_lookup_address_arm)
277 arm_indirect_branch_thumb:
279 call_c_function(block_lookup_address_thumb)
283 arm_indirect_branch_dual_arm:
285 tst r0, #0x01 @ check lower bit
286 bne 1f @ if set going to Thumb mode
287 call_c_function(block_lookup_address_arm)
293 store_registers_arm() @ save out ARM registers
294 load_registers_thumb() @ load in Thumb registers
295 ldr r1, [reg_base, #REG_CPSR] @ load cpsr
296 orr r1, r1, #0x20 @ set Thumb mode
297 str r1, [reg_base, #REG_CPSR] @ store flags
298 call_c_function(block_lookup_address_thumb)
302 arm_indirect_branch_dual_thumb:
304 tst r0, #0x01 @ check lower bit
305 beq 1f @ if set going to ARM mode
307 call_c_function(block_lookup_address_thumb)
312 store_registers_thumb() @ save out Thumb registers
313 load_registers_arm() @ load in ARM registers
314 ldr r1, [reg_base, #REG_CPSR] @ load cpsr
315 bic r1, r1, #0x20 @ clear Thumb mode
316 str r1, [reg_base, #REG_CPSR] @ store flags
317 call_c_function(block_lookup_address_arm)
326 @ r1: bitmask of which bits in cpsr to update
331 and reg_flags, r0, r1 @ reg_flags = new_cpsr & store_mask
332 ldr r0, [reg_base, #REG_CPSR] @ r0 = cpsr
333 bic r0, r0, r1 @ r0 = cpsr & ~store_mask
334 orr reg_flags, reg_flags, r0 @ reg_flags = new_cpsr | cpsr
336 mov r0, reg_flags @ also put new cpsr in r0
338 store_registers_arm() @ save ARM registers
339 ldr r2, [lr] @ r2 = pc
340 call_c_function(execute_store_cpsr_body)
341 load_registers_arm() @ restore ARM registers
343 cmp r0, #0 @ check new PC
344 beq 1f @ if it's zero, return
346 call_c_function(block_lookup_address_arm)
349 bx r0 @ return to new ARM address
353 add pc, lr, #4 @ return
356 @ Update the current spsr.
360 @ r1: bitmask of which bits in spsr to update
363 ldr r1, =spsr @ r1 = spsr
364 ldr r2, [reg_base, #CPU_MODE] @ r2 = CPU_MODE
365 str r0, [r1, r2, lsl #2] @ spsr[CPU_MODE] = new_spsr
368 @ Read the current spsr.
374 ldr r0, =spsr @ r0 = spsr
375 ldr r1, [reg_base, #CPU_MODE] @ r1 = CPU_MODE
376 ldr r0, [r0, r1, lsl #2] @ r0 = spsr[CPU_MODE]
380 @ Restore the cpsr from the mode spsr and mode shift.
385 execute_spsr_restore:
387 ldr r1, =spsr @ r1 = spsr
388 ldr r2, [reg_base, #CPU_MODE] @ r2 = cpu_mode
389 ldr r1, [r1, r2, lsl #2] @ r1 = spsr[cpu_mode] (new cpsr)
390 str r1, [reg_base, #REG_CPSR] @ update cpsr
391 mov reg_flags, r1 @ also, update shadow flags
393 @ This function call will pass r0 (address) and return it.
394 store_registers_arm() @ save ARM registers
395 call_c_function(execute_spsr_restore_body)
397 ldr r1, [reg_base, #REG_CPSR] @ r1 = cpsr
398 tst r1, #0x20 @ see if Thumb mode is set
399 bne 2f @ if so handle it
401 load_registers_arm() @ restore ARM registers
402 call_c_function(block_lookup_address_arm)
407 load_registers_thumb() @ load Thumb registers
408 call_c_function(block_lookup_address_thumb)
414 @ Setup the mode transition work for calling an SWI.
419 #define execute_swi_builder(mode) ;\
421 execute_swi_##mode: ;\
423 ldr r1, =reg_mode /* r1 = reg_mode */;\
424 /* reg_mode[MODE_SUPERVISOR][6] = pc */;\
425 ldr r0, [lr] /* load PC */;\
426 str r0, [r1, #((MODE_SUPERVISOR * (7 * 4)) + (6 * 4))] ;\
427 collapse_flags_no_update(r0) /* r0 = cpsr */;\
428 ldr r1, =spsr /* r1 = spsr */;\
429 str r0, [r1, #(MODE_SUPERVISOR * 4)] /* spsr[MODE_SUPERVISOR] = cpsr */;\
430 bic r0, r0, #0x3F /* clear mode flag in r0 */;\
431 orr r0, r0, #0x13 /* set to supervisor mode */;\
432 str r0, [reg_base, #REG_CPSR] /* update cpsr */;\
434 call_c_function(bios_region_read_allow) ;\
436 mov r0, #MODE_SUPERVISOR ;\
438 store_registers_##mode() /* store regs for mode */;\
439 call_c_function(set_cpu_mode) /* set the CPU mode to svsr */;\
440 load_registers_arm() /* load ARM regs */;\
443 add pc, lr, #4 /* return */;\
445 execute_swi_builder(arm)
446 execute_swi_builder(thumb)
449 @ Wrapper for calling SWI functions in C (or can implement some in ASM if
452 #define execute_swi_function_builder(swi_function, mode) ;\
454 .global execute_swi_hle_##swi_function##_##mode ;\
455 execute_swi_hle_##swi_function##_##mode: ;\
457 store_registers_##mode() ;\
458 call_c_function(execute_swi_hle_##swi_function##_c) ;\
459 load_registers_##mode() ;\
463 execute_swi_function_builder(div, arm)
464 execute_swi_function_builder(div, thumb)
467 @ Start program execution. Normally the mode should be Thumb and the
468 @ PC should be 0x8000000, however if a save state is preloaded this
472 @ r0: initial value for cycle counter
474 @ Uses sp as reg_base; must hold consistently true.
476 execute_arm_translate:
477 sub sp, sp, #0x100 @ allocate room for register data
479 mvn reg_cycles, r0 @ load cycle counter
481 mov r0, reg_base @ load reg_base into first param
482 call_c_function(move_reg) @ make reg_base the new reg ptr
484 sub sp, sp, #REG_BASE_OFFSET @ allocate room for ptr table
485 bl load_ptr_read_function_table @ load read function ptr table
487 ldr r0, [reg_base, #REG_PC] @ r0 = current pc
488 ldr r1, [reg_base, #REG_CPSR] @ r1 = flags
489 tst r1, #0x20 @ see if Thumb bit is set
491 bne 1f @ if so lookup thumb
493 load_registers_arm() @ load ARM registers
494 call_c_function(block_lookup_address_arm)
495 extract_flags() @ load flags
496 bx r0 @ jump to first ARM block
499 load_registers_thumb() @ load Thumb registers
500 call_c_function(block_lookup_address_thumb)
501 extract_flags() @ load flags
502 bx r0 @ jump to first Thumb block
505 @ Write out to memory.
512 #define execute_store_body(store_type, store_op) ;\
514 stmdb sp!, { lr } /* save lr */;\
515 tst r0, #0xF0000000 /* make sure address is in range */;\
516 bne ext_store_u##store_type /* if not do ext store */;\
518 ldr r2, =memory_map_write /* r2 = memory_map_write */;\
519 mov lr, r0, lsr #15 /* lr = page index of address */;\
520 ldr r2, [r2, lr, lsl #2] /* r2 = memory page */;\
522 cmp r2, #0 /* see if map is ext */;\
523 beq ext_store_u##store_type /* if so do ext store */;\
525 mov r0, r0, lsl #17 /* isolate bottom 15 bits in top */;\
526 mov r0, r0, lsr #17 /* like performing and 0x7FFF */;\
527 store_op r1, [r2, r0] /* store result */;\
530 #define store_align_8() ;\
533 #define store_align_16() ;\
535 extract_u16(r1, r1) ;\
537 #define store_align_32() ;\
541 #define execute_store_builder(store_type, store_op, load_op) ;\
543 execute_store_u##store_type: ;\
544 execute_store_body(store_type, store_op) ;\
545 sub r2, r2, #0x8000 /* Pointer to code status data */;\
546 load_op r0, [r2, r0] /* check code flag */;\
548 cmp r0, #0 /* see if it's not 0 */;\
549 bne 2f /* if so perform smc write */;\
550 ldmia sp!, { lr } /* restore lr */;\
552 add pc, lr, #4 /* return */;\
555 ldmia sp!, { lr } /* restore lr */;\
556 ldr r0, [lr] /* load PC */;\
557 str r0, [reg_base, #REG_PC] /* write out PC */;\
558 b smc_write /* perform smc write */;\
560 ext_store_u##store_type: ;\
561 ldmia sp!, { lr } /* pop lr off of stack */;\
562 ldr r2, [lr] /* load PC */;\
563 str r2, [reg_base, #REG_PC] /* write out PC */;\
564 store_align_##store_type() ;\
565 call_c_function(write_memory##store_type) ;\
566 b write_epilogue /* handle additional write stuff */;\
568 execute_store_builder(8, strb, ldrb)
569 execute_store_builder(16, strh, ldrh)
570 execute_store_builder(32, str, ldr)
573 execute_store_u32_safe:
574 execute_store_body(32_safe, str)
576 ldmia sp!, { pc } @ return
579 ldmia sp!, { lr } @ Restore lr
580 call_c_function(write_memory32) @ Perform 32bit store
586 cmp r0, #0 @ check if the write rose an alert
587 beq 4f @ if not we can exit
589 collapse_flags(r1) @ interrupt needs current flags
591 cmp r0, #2 @ see if the alert is due to SMC
592 beq smc_write @ if so, goto SMC handler
594 ldr r1, [reg_base, #REG_CPSR] @ r1 = cpsr
595 tst r1, #0x20 @ see if Thumb bit is set
596 bne 1f @ if so do Thumb update
598 store_registers_arm() @ save ARM registers
601 bl update_gba @ update GBA until CPU isn't halted
603 mvn reg_cycles, r0 @ load new cycle count
604 ldr r0, [reg_base, #REG_PC] @ load new PC
605 ldr r1, [reg_base, #REG_CPSR] @ r1 = flags
606 tst r1, #0x20 @ see if Thumb bit is set
610 call_c_function(block_lookup_address_arm)
612 bx r0 @ jump to new ARM block
615 store_registers_thumb() @ save Thumb registers
619 load_registers_thumb()
620 call_c_function(block_lookup_address_thumb)
622 bx r0 @ jump to new Thumb block
626 add pc, lr, #4 @ return
630 call_c_function(flush_translation_cache_ram)
633 ldr r0, [reg_base, #REG_PC] @ r0 = new pc
634 ldr r1, [reg_base, #REG_CPSR] @ r1 = flags
635 tst r1, #0x20 @ see if Thumb bit is set
636 beq lookup_pc_arm @ if not lookup ARM
639 call_c_function(block_lookup_address_thumb)
641 bx r0 @ jump to new Thumb block
644 call_c_function(block_lookup_address_arm)
646 bx r0 @ jump to new ARM block
649 #define sign_extend_u8(reg)
650 #define sign_extend_u16(reg)
651 #define sign_extend_u32(reg)
653 #define sign_extend_s8(reg) ;\
654 mov reg, reg, lsl #24 /* shift reg into upper 8bits */;\
655 mov reg, reg, asr #24 /* shift down, sign extending */;\
657 #define sign_extend_s16(reg) ;\
658 mov reg, reg, lsl #16 /* shift reg into upper 16bits */;\
659 mov reg, reg, asr #16 /* shift down, sign extending */;\
661 #define execute_load_op_u8(load_op) ;\
662 mov r0, r0, lsl #17 ;\
663 load_op r0, [r2, r0, lsr #17] ;\
665 #define execute_load_op_s8(load_op) ;\
666 mov r0, r0, lsl #17 ;\
667 mov r0, r0, lsr #17 ;\
668 load_op r0, [r2, r0] ;\
670 #define execute_load_op_u16(load_op) ;\
671 execute_load_op_s8(load_op) ;\
673 #define execute_load_op_s16(load_op) ;\
674 execute_load_op_s8(load_op) ;\
676 #define execute_load_op_u16(load_op) ;\
677 execute_load_op_s8(load_op) ;\
679 #define execute_load_op_u32(load_op) ;\
680 execute_load_op_u8(load_op) ;\
683 #define execute_load_builder(load_type, load_function, load_op, mask) ;\
685 execute_load_##load_type: ;\
687 tst r0, mask /* make sure address is in range */;\
688 bne ext_load_##load_type /* if not do ext load */;\
690 ldr r2, =memory_map_read /* r2 = memory_map_read */;\
691 mov r1, r0, lsr #15 /* r1 = page index of address */;\
692 ldr r2, [r2, r1, lsl #2] /* r2 = memory page */;\
694 cmp r2, #0 /* see if map is ext */;\
695 beq ext_load_##load_type /* if so do ext load */;\
697 execute_load_op_##load_type(load_op) ;\
699 add pc, lr, #4 /* return */;\
701 ext_load_##load_type: ;\
702 ldr r1, [lr] /* r1 = PC */;\
703 str r1, [reg_base, #REG_PC] /* update PC */;\
704 call_c_function(read_memory##load_function) ;\
705 sign_extend_##load_type(r0) /* sign extend result */;\
707 add pc, lr, #4 /* return */;\
710 execute_load_builder(u8, 8, ldrneb, #0xF0000000)
711 execute_load_builder(s8, 8, ldrnesb, #0xF0000000)
712 execute_load_builder(u16, 16, ldrneh, #0xF0000001)
713 execute_load_builder(s16, 16_signed, ldrnesh, #0xF0000001)
714 execute_load_builder(u32, 32, ldrne, #0xF0000000)
717 #define execute_ptr_builder(region, ptr, bits) ;\
719 execute_##region##_ptr: ;\
720 ldr r1, =(ptr) /* load region ptr */;\
721 mov r0, r0, lsl #(32 - bits) /* isolate bottom bits */;\
722 mov r0, r0, lsr #(32 - bits) ;\
726 execute_bios_ptr_protected:
727 ldr r1, =bios_read_protect @ load bios read ptr
728 and r0, r0, #0x03 @ only want bottom 2 bits
732 @ address = (address & 0x7FFF) + ((address & 0x38000) * 2) + 0x8000;
735 ldr r1, =(ewram + 0x8000) @ load ewram read ptr
736 mov r2, r0, lsl #17 @ isolate bottom 15 bits
738 and r0, r0, #0x38000 @ isolate top 2 bits
739 add r0, r2, r0, lsl #1 @ add top 2 bits * 2 to bottom 15
743 @ u32 gamepak_index = address >> 15;
744 @ u8 *map = memory_map_read[gamepak_index];
747 @ map = load_gamepak_page(gamepak_index & 0x3FF);
749 @ value = address##type(map, address & 0x7FFF)
752 ldr r1, =memory_map_read @ load memory_map_read
753 mov r2, r0, lsr #15 @ isolate top 17 bits
754 ldr r1, [r1, r2, lsl #2] @ load memory map read ptr
757 cmp r1, #0 @ see if map entry is NULL
758 bne 2f @ if not resume
760 stmdb sp!, { r0 } @ save r0 on stack
761 mov r2, r2, lsl #20 @ isolate page index
763 call_c_function(load_gamepak_page) @ read new page into r0
765 mov r1, r0 @ new map = return
766 ldmia sp!, { r0 } @ restore r0
769 mov r0, r0, lsl #17 @ isolate bottom 15 bits
775 @ These will store the result in a pointer, then pass that pointer.
780 call_c_function(read_eeprom) @ load EEPROM result
781 add r1, reg_base, #(REG_SAVE & 0xFF00)
782 add r1, r1, #(REG_SAVE & 0xFF)
783 strh r0, [r1] @ write result out
784 mov r0, #0 @ zero out address
793 mov r0, r0, lsl #16 @ only want top 16 bits
795 call_c_function(read_backup) @ load backup result
796 add r1, reg_base, #(REG_SAVE & 0xFF00)
797 add r1, r1, #(REG_SAVE & 0xFF)
798 strb r0, [r1] @ write result out
799 mov r0, #0 @ zero out address
806 ldr r1, [reg_base, #REG_CPSR] @ r1 = cpsr
809 stmdb sp!, { r0 } @ save r0
811 ldr r0, [lr, #-4] @ r0 = current PC
813 tst r1, #0x20 @ see if Thumb bit is set
814 bne 1f @ if so load Thumb op
816 call_c_function(read_memory32) @ read open address
818 add r1, reg_base, #((REG_SAVE + 4) & 0xFF00)
819 add r1, r1, #((REG_SAVE + 4) & 0xFF)
821 str r0, [r1] @ write out
823 ldmia sp!, { r0 } @ restore r0
824 and r0, r0, #0x03 @ isolate bottom 2 bits
830 call_c_function(read_memory16) @ read open address
832 orr r0, r0, r0, lsl #16 @ duplicate opcode over halves
833 add r1, reg_base, #((REG_SAVE + 4) & 0xFF00)
834 add r1, r1, #((REG_SAVE + 4) & 0xFF)
837 str r0, [r1] @ write out
839 ldmia sp!, { r0 } @ restore r0
840 and r0, r0, #0x03 @ isolate bottom 2 bits
846 execute_ptr_builder(bios_rom, bios_rom, 14)
847 execute_ptr_builder(iwram, iwram + 0x8000, 15)
848 execute_ptr_builder(vram, vram, 17)
849 execute_ptr_builder(oam_ram, oam_ram, 10)
850 execute_ptr_builder(io_registers, io_registers, 10)
851 execute_ptr_builder(palette_ram, palette_ram, 10)
853 ptr_read_function_table:
854 .word execute_bios_ptr_protected @ 0x00: BIOS
855 .word execute_open_ptr @ 0x01: open
856 .word execute_ewram_ptr @ 0x02: ewram
857 .word execute_iwram_ptr @ 0x03: iwram
858 .word execute_io_registers_ptr @ 0x04: I/O registers
859 .word execute_palette_ram_ptr @ 0x05: palette RAM
860 .word execute_vram_ptr @ 0x06: vram
861 .word execute_oam_ram_ptr @ 0x07: oam RAM
862 .word execute_gamepak_ptr @ 0x08: gamepak
863 .word execute_gamepak_ptr @ 0x09: gamepak
864 .word execute_gamepak_ptr @ 0x0A: gamepak
865 .word execute_gamepak_ptr @ 0x0B: gamepak
866 .word execute_gamepak_ptr @ 0x0C: gamepak
867 .word execute_eeprom_ptr @ 0x0D: EEPROM
868 .word execute_backup_ptr @ 0x0E: backup
870 .rept (256 - 15) @ 0x0F - 0xFF: open
871 .word execute_open_ptr
875 @ Setup the read function table.
876 @ Load this onto the the stack; assume we're free to use r3
878 load_ptr_read_function_table:
879 mov r0, #256 @ 256 elements
880 ldr r1, =ptr_read_function_table @ r0 = ptr_read_function_table
881 mov r2, sp @ load here
884 ldr r3, [r1], #4 @ read pointer
885 str r3, [r2], #4 @ write pointer
887 subs r0, r0, #1 @ goto next iteration
893 @ Patch the read function table to allow for BIOS reads.
895 execute_patch_bios_read:
896 ldr r1, =reg @ r1 = reg
897 ldr r0, =execute_bios_rom_ptr @ r0 = patch function
899 str r0, [r1, #-REG_BASE_OFFSET]
903 @ Patch the read function table to allow for BIOS reads.
905 execute_patch_bios_protect:
906 ldr r1, =reg @ r1 = reg
907 ldr r0, =execute_bios_ptr_protected @ r0 = patch function
909 str r0, [r1, #-REG_BASE_OFFSET]
913 #define save_reg_scratch(reg) ;\
914 ldr r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4))] ;\
915 str r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4) + 128)] ;\
917 #define restore_reg_scratch(reg) ;\
918 ldr r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4) + 128)] ;\
919 str r2, [reg_base, #(REG_BASE_OFFSET + (reg * 4))] ;\
921 #define scratch_regs_thumb(type) ;\
922 type##_reg_scratch(0) ;\
923 type##_reg_scratch(1) ;\
924 type##_reg_scratch(2) ;\
925 type##_reg_scratch(3) ;\
926 type##_reg_scratch(4) ;\
927 type##_reg_scratch(5) ;\
929 #define scratch_regs_arm(type) ;\
930 type##_reg_scratch(0) ;\
931 type##_reg_scratch(1) ;\
932 type##_reg_scratch(6) ;\
933 type##_reg_scratch(9) ;\
934 type##_reg_scratch(12) ;\
935 type##_reg_scratch(14) ;\
942 ldr r0, [reg_base, #REG_CPSR] @ r1 = cpsr
943 tst r0, #0x20 @ see if Thumb bit is set
945 ldr r0, [lr] @ load PC
946 mvn r1, reg_cycles @ load cycle counter
948 beq 1f @ if not goto ARM mode
950 scratch_regs_thumb(save)
952 store_registers_thumb() @ write back Thumb regs
953 call_c_function(step_debug) @ call debug step
954 scratch_regs_thumb(restore)
956 add pc, lr, #4 @ return
959 scratch_regs_arm(save)
960 store_registers_arm() @ write back ARM regs
961 call_c_function(step_debug) @ call debug step
962 scratch_regs_arm(restore)
964 add pc, lr, #4 @ return, skipping PC
968 .comm memory_map_read 0x8000
969 .comm memory_map_write 0x8000