1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2011 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
45 .global pending_exception
59 .global restore_candidate
68 .type dynarec_local, %object
69 .size dynarec_local, dynarec_local_end-dynarec_local
71 .space dynarec_local_end-dynarec_local /*0x400630*/
72 next_interupt = dynarec_local + 64
73 .type next_interupt, %object
74 .size next_interupt, 4
75 cycle_count = next_interupt + 4
76 .type cycle_count, %object
78 last_count = cycle_count + 4
79 .type last_count, %object
81 pending_exception = last_count + 4
82 .type pending_exception, %object
83 .size pending_exception, 4
84 stop = pending_exception + 4
88 .type invc_ptr, %object
90 address = invc_ptr + 4
91 .type address, %object
93 readmem_dword = address + 4
94 readmem_word = readmem_dword
95 .type readmem_dword, %object
96 .size readmem_dword, 8
97 dword = readmem_dword + 8
108 .size byte, 1 /* 1 byte free */
118 .type psxRegs, %object
119 .size psxRegs, psxRegs_end-psxRegs
130 .type reg_cop0, %object
132 reg_cop2d = reg_cop0 + 128
133 .type reg_cop2d, %object
135 reg_cop2c = reg_cop2d + 128
136 .type reg_cop2c, %object
148 interrupt = cycle + 4
149 .type interrupt, %object
151 intCycle = interrupt + 4
152 .type intCycle, %object
154 psxRegs_end = intCycle + 256
157 nd_pcsx_io = psxRegs_end
158 .type nd_pcsx_io, %object
159 .size nd_pcsx_io, nd_pcsx_io_end-nd_pcsx_io
160 tab_read8 = nd_pcsx_io
161 .type tab_read8, %object
163 tab_read16 = tab_read8 + 4
164 .type tab_read16, %object
166 tab_read32 = tab_read16 + 4
167 .type tab_read32, %object
169 tab_write8 = tab_read32 + 4
170 .type tab_write8, %object
172 tab_write16 = tab_write8 + 4
173 .type tab_write16, %object
175 tab_write32 = tab_write16 + 4
176 .type tab_write32, %object
178 spu_readf = tab_write32 + 4
179 .type spu_readf, %object
181 spu_writef = spu_readf + 4
182 .type spu_writef, %object
184 nd_pcsx_io_end = spu_writef + 4
186 psxH_ptr = nd_pcsx_io_end
187 .type psxH_ptr, %object
189 align0 = psxH_ptr + 4 /* just for alignment */
190 .type align0, %object
192 branch_target = align0 + 4
193 .type branch_target, %object
194 .size branch_target, 4
195 mini_ht = branch_target + 4
196 .type mini_ht, %object
198 restore_candidate = mini_ht + 256
199 .type restore_candidate, %object
200 .size restore_candidate, 512
201 memory_map = restore_candidate + 512
202 .type memory_map, %object
203 .size memory_map, 4194304
204 dynarec_local_end = memory_map + 4194304
206 .macro load_var_adr reg var
208 movw \reg, #:lower16:\var
209 movt \reg, #:upper16:\var
218 .type dyna_linker, %function
220 /* r0 = virtual target address */
221 /* r1 = instruction to patch */
228 eor r2, r2, r12, lsr #12
229 and r6, r6, r12, lsr #12
233 ldr r5, [r3, r2, lsl #2]
249 add r1, r1, r12, asr #6
251 moveq pc, r4 /* Stale i-cache */
254 and r1, r7, #0xff000000
257 add r1, r1, r2, lsr #8
261 /* hash_table lookup */
264 eor r4, r0, r0, lsl #16
270 ldr r5, [r3, r2, lsl #2]
277 /* jump_dirty lookup */
287 /* hash_table insert */
298 bl new_recompile_block
306 .size dyna_linker, .-dyna_linker
307 .global exec_pagefault
308 .type exec_pagefault, %function
310 /* r0 = instruction pointer */
311 /* r1 = fault address */
313 ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
315 ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
316 bic r6, r6, #0x0F800000
317 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
319 str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
321 str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
322 and r5, r6, r1, lsr #9
323 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
324 and r1, r1, r6, lsl #9
325 str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
327 str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
331 .size exec_pagefault, .-exec_pagefault
333 /* Special dynamic linker for the case where a page fault
334 may occur in a branch delay slot */
335 .global dyna_linker_ds
336 .type dyna_linker_ds, %function
338 /* r0 = virtual target address */
339 /* r1 = instruction to patch */
346 eor r2, r2, r12, lsr #12
347 and r6, r6, r12, lsr #12
351 ldr r5, [r3, r2, lsl #2]
367 add r1, r1, r12, asr #6
369 moveq pc, r4 /* Stale i-cache */
372 and r1, r7, #0xff000000
375 add r1, r1, r2, lsr #8
379 /* hash_table lookup */
382 eor r4, r0, r0, lsl #16
388 ldr r5, [r3, r2, lsl #2]
395 /* jump_dirty lookup */
405 /* hash_table insert */
418 bl new_recompile_block
425 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
428 .size dyna_linker_ds, .-dyna_linker_ds
437 .global jump_vaddr_r0
438 .type jump_vaddr_r0, %function
440 eor r2, r0, r0, lsl #16
442 .size jump_vaddr_r0, .-jump_vaddr_r0
443 .global jump_vaddr_r1
444 .type jump_vaddr_r1, %function
446 eor r2, r1, r1, lsl #16
449 .size jump_vaddr_r1, .-jump_vaddr_r1
450 .global jump_vaddr_r2
451 .type jump_vaddr_r2, %function
454 eor r2, r2, r2, lsl #16
456 .size jump_vaddr_r2, .-jump_vaddr_r2
457 .global jump_vaddr_r3
458 .type jump_vaddr_r3, %function
460 eor r2, r3, r3, lsl #16
463 .size jump_vaddr_r3, .-jump_vaddr_r3
464 .global jump_vaddr_r4
465 .type jump_vaddr_r4, %function
467 eor r2, r4, r4, lsl #16
470 .size jump_vaddr_r4, .-jump_vaddr_r4
471 .global jump_vaddr_r5
472 .type jump_vaddr_r5, %function
474 eor r2, r5, r5, lsl #16
477 .size jump_vaddr_r5, .-jump_vaddr_r5
478 .global jump_vaddr_r6
479 .type jump_vaddr_r6, %function
481 eor r2, r6, r6, lsl #16
484 .size jump_vaddr_r6, .-jump_vaddr_r6
485 .global jump_vaddr_r8
486 .type jump_vaddr_r8, %function
488 eor r2, r8, r8, lsl #16
491 .size jump_vaddr_r8, .-jump_vaddr_r8
492 .global jump_vaddr_r9
493 .type jump_vaddr_r9, %function
495 eor r2, r9, r9, lsl #16
498 .size jump_vaddr_r9, .-jump_vaddr_r9
499 .global jump_vaddr_r10
500 .type jump_vaddr_r10, %function
502 eor r2, r10, r10, lsl #16
505 .size jump_vaddr_r10, .-jump_vaddr_r10
506 .global jump_vaddr_r12
507 .type jump_vaddr_r12, %function
509 eor r2, r12, r12, lsl #16
512 .size jump_vaddr_r12, .-jump_vaddr_r12
513 .global jump_vaddr_r7
514 .type jump_vaddr_r7, %function
516 eor r2, r7, r7, lsl #16
518 .size jump_vaddr_r7, .-jump_vaddr_r7
520 .type jump_vaddr, %function
524 and r2, r3, r2, lsr #12
531 str r10, [fp, #cycle_count-dynarec_local]
533 ldr r10, [fp, #cycle_count-dynarec_local]
535 .size jump_vaddr, .-jump_vaddr
538 .global verify_code_ds
539 .type verify_code_ds, %function
541 str r8, [fp, #branch_target-dynarec_local]
542 .size verify_code_ds, .-verify_code_ds
543 .global verify_code_vm
544 .type verify_code_vm, %function
547 .type verify_code, %function
576 ldr r8, [fp, #branch_target-dynarec_local]
581 .size verify_code, .-verify_code
582 .size verify_code_vm, .-verify_code_vm
586 .type cc_interrupt, %function
588 ldr r0, [fp, #last_count-dynarec_local]
592 str r1, [fp, #pending_exception-dynarec_local]
593 and r2, r2, r10, lsr #17
594 add r3, fp, #restore_candidate-dynarec_local
595 str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
596 @@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
604 ldr r10, [fp, #cycle-dynarec_local]
605 ldr r0, [fp, #next_interupt-dynarec_local]
606 ldr r1, [fp, #pending_exception-dynarec_local]
607 ldr r2, [fp, #stop-dynarec_local]
608 str r0, [fp, #last_count-dynarec_local]
611 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
615 ldr r0, [fp, #pcaddr-dynarec_local]
619 /* Move 'dirty' blocks to the 'clean' list */
630 .size cc_interrupt, .-cc_interrupt
634 .type do_interrupt, %function
636 ldr r0, [fp, #pcaddr-dynarec_local]
640 .size do_interrupt, .-do_interrupt
644 .type fp_exception, %function
648 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
650 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
653 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
654 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
658 .size fp_exception, .-fp_exception
660 .global fp_exception_ds
661 .type fp_exception_ds, %function
663 mov r2, #0x90000000 /* Set high bit if delay slot */
665 .size fp_exception_ds, .-fp_exception_ds
669 .type jump_syscall, %function
671 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
673 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
676 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
677 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
681 .size jump_syscall, .-jump_syscall
685 .global jump_syscall_hle
686 .type jump_syscall_hle, %function
688 str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
689 ldr r2, [fp, #last_count-dynarec_local]
690 mov r1, #0 /* in delay slot */
692 mov r0, #0x20 /* cause */
693 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
696 /* note: psxException might do recorsive recompiler call from it's HLE code,
697 * so be ready for this */
699 ldr r1, [fp, #next_interupt-dynarec_local]
700 ldr r10, [fp, #cycle-dynarec_local]
701 ldr r0, [fp, #pcaddr-dynarec_local]
703 str r1, [fp, #last_count-dynarec_local]
706 .size jump_syscall_hle, .-jump_syscall_hle
710 .type jump_hlecall, %function
712 ldr r2, [fp, #last_count-dynarec_local]
713 str r0, [fp, #pcaddr-dynarec_local]
716 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
718 .size jump_hlecall, .-jump_hlecall
722 .type jump_intcall, %function
724 ldr r2, [fp, #last_count-dynarec_local]
725 str r0, [fp, #pcaddr-dynarec_local]
728 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
730 .size jump_hlecall, .-jump_hlecall
734 .global new_dyna_leave
735 .type new_dyna_leave, %function
736 ldr r0, [fp, #last_count-dynarec_local]
739 str r10, [fp, #cycle-dynarec_local]
740 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
741 .size new_dyna_leave, .-new_dyna_leave
743 /* these are used to call memhandlers */
745 .global indirect_jump_indexed
746 .type indirect_jump_indexed, %function
747 indirect_jump_indexed:
748 ldr r0, [r0, r1, lsl #2]
749 .global indirect_jump
750 .type indirect_jump, %function
752 ldr r12, [fp, #last_count-dynarec_local]
754 str r2, [fp, #cycle-dynarec_local]
756 .size indirect_jump, .-indirect_jump
757 .size indirect_jump_indexed, .-indirect_jump_indexed
760 .global invalidate_addr_r0
761 .type invalidate_addr_r0, %function
763 stmia fp, {r0, r1, r2, r3, r12, lr}
765 b invalidate_addr_call
766 .size invalidate_addr_r0, .-invalidate_addr_r0
768 .global invalidate_addr_r1
769 .type invalidate_addr_r1, %function
771 stmia fp, {r0, r1, r2, r3, r12, lr}
773 b invalidate_addr_call
774 .size invalidate_addr_r1, .-invalidate_addr_r1
776 .global invalidate_addr_r2
777 .type invalidate_addr_r2, %function
779 stmia fp, {r0, r1, r2, r3, r12, lr}
781 b invalidate_addr_call
782 .size invalidate_addr_r2, .-invalidate_addr_r2
784 .global invalidate_addr_r3
785 .type invalidate_addr_r3, %function
787 stmia fp, {r0, r1, r2, r3, r12, lr}
789 b invalidate_addr_call
790 .size invalidate_addr_r3, .-invalidate_addr_r3
792 .global invalidate_addr_r4
793 .type invalidate_addr_r4, %function
795 stmia fp, {r0, r1, r2, r3, r12, lr}
797 b invalidate_addr_call
798 .size invalidate_addr_r4, .-invalidate_addr_r4
800 .global invalidate_addr_r5
801 .type invalidate_addr_r5, %function
803 stmia fp, {r0, r1, r2, r3, r12, lr}
805 b invalidate_addr_call
806 .size invalidate_addr_r5, .-invalidate_addr_r5
808 .global invalidate_addr_r6
809 .type invalidate_addr_r6, %function
811 stmia fp, {r0, r1, r2, r3, r12, lr}
813 b invalidate_addr_call
814 .size invalidate_addr_r6, .-invalidate_addr_r6
816 .global invalidate_addr_r7
817 .type invalidate_addr_r7, %function
819 stmia fp, {r0, r1, r2, r3, r12, lr}
821 b invalidate_addr_call
822 .size invalidate_addr_r7, .-invalidate_addr_r7
824 .global invalidate_addr_r8
825 .type invalidate_addr_r8, %function
827 stmia fp, {r0, r1, r2, r3, r12, lr}
829 b invalidate_addr_call
830 .size invalidate_addr_r8, .-invalidate_addr_r8
832 .global invalidate_addr_r9
833 .type invalidate_addr_r9, %function
835 stmia fp, {r0, r1, r2, r3, r12, lr}
837 b invalidate_addr_call
838 .size invalidate_addr_r9, .-invalidate_addr_r9
840 .global invalidate_addr_r10
841 .type invalidate_addr_r10, %function
843 stmia fp, {r0, r1, r2, r3, r12, lr}
845 b invalidate_addr_call
846 .size invalidate_addr_r10, .-invalidate_addr_r10
848 .global invalidate_addr_r12
849 .type invalidate_addr_r12, %function
851 stmia fp, {r0, r1, r2, r3, r12, lr}
853 .size invalidate_addr_r12, .-invalidate_addr_r12
855 .global invalidate_addr_call
856 .type invalidate_addr_call, %function
857 invalidate_addr_call:
859 ldmia fp, {r0, r1, r2, r3, r12, pc}
860 .size invalidate_addr_call, .-invalidate_addr_call
863 .global new_dyna_start
864 .type new_dyna_start, %function
866 /* ip is stored to conform EABI alignment */
867 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
868 load_var_adr fp, dynarec_local
869 ldr r0, [fp, #pcaddr-dynarec_local]
871 ldr r1, [fp, #next_interupt-dynarec_local]
872 ldr r10, [fp, #cycle-dynarec_local]
873 str r1, [fp, #last_count-dynarec_local]
876 .size new_dyna_start, .-new_dyna_start
878 /* --------------------------------------- */
881 .global ari_read_ram8
882 .global ari_read_ram16
883 .global ari_read_ram32
884 .global ari_read_ram_mirror8
885 .global ari_read_ram_mirror16
886 .global ari_read_ram_mirror32
887 .global ari_write_ram8
888 .global ari_write_ram16
889 .global ari_write_ram32
890 .global ari_write_ram_mirror8
891 .global ari_write_ram_mirror16
892 .global ari_write_ram_mirror32
893 .global ari_write_ram_mirror_ro32
894 .global ari_read_bios8
895 .global ari_read_bios16
896 .global ari_read_bios32
898 .global ari_read_io16
899 .global ari_read_io32
900 .global ari_write_io8
901 .global ari_write_io16
902 .global ari_write_io32
904 .macro ari_read_ram bic_const op
905 ldr r0, [fp, #address-dynarec_local]
907 bic r0, r0, #\bic_const
910 str r0, [fp, #readmem_dword-dynarec_local]
923 .macro ari_read_ram_mirror mvn_const, op
924 ldr r0, [fp, #address-dynarec_local]
929 str r0, [fp, #readmem_dword-dynarec_local]
933 ari_read_ram_mirror8:
934 ari_read_ram_mirror 0, ldrb
936 ari_read_ram_mirror16:
937 ari_read_ram_mirror (1<<11), ldrh
939 ari_read_ram_mirror32:
940 ari_read_ram_mirror (3<<11), ldr
942 /* invalidation is already taken care of by the caller */
943 .macro ari_write_ram bic_const var pf
944 ldr r0, [fp, #address-dynarec_local]
945 ldr\pf r1, [fp, #\var-dynarec_local]
947 bic r0, r0, #\bic_const
954 ari_write_ram 0, byte, b
957 ari_write_ram 1, hword, h
960 ari_write_ram 3, word,
962 .macro ari_write_ram_mirror mvn_const var pf
963 ldr r0, [fp, #address-dynarec_local]
965 ldr\pf r1, [fp, #\var-dynarec_local]
967 ldr r2, [fp, #invc_ptr-dynarec_local]
969 ldrb r2, [r2, r0, lsr #12]
977 ari_write_ram_mirror8:
978 ari_write_ram_mirror 0, byte, b
980 ari_write_ram_mirror16:
981 ari_write_ram_mirror (1<<11), hword, h
983 ari_write_ram_mirror32:
984 ari_write_ram_mirror (3<<11), word,
986 ari_write_ram_mirror_ro32:
987 load_var_adr r0, pcsx_ram_is_ro
992 b ari_write_ram_mirror32
995 .macro ari_read_bios_mirror bic_const op
996 ldr r0, [fp, #address-dynarec_local]
997 orr r0, r0, #0x80000000
998 bic r0, r0, #(0x20000000|\bic_const) @ map to 0x9fc...
1000 str r0, [fp, #readmem_dword-dynarec_local]
1005 ari_read_bios_mirror 0, ldrb
1008 ari_read_bios_mirror 1, ldrh
1011 ari_read_bios_mirror 3, ldr
1015 .macro ari_read_io_old tab_shift
1016 str lr, [sp, #-8]! @ EABI alignment..
1026 str r0, [fp, #readmem_dword-dynarec_local]
1030 .macro ari_read_io readop mem_tab tab_shift
1031 ldr r0, [fp, #address-dynarec_local]
1032 ldr r1, [fp, #psxH_ptr-dynarec_local]
1039 bic r2, r0, #0x1f800000
1040 ldr r12,[fp, #\mem_tab-dynarec_local]
1041 subs r3, r2, #0x1000
1043 @ ari_read_io_old \tab_shift
1046 ldr r12,[r12, r3, lsl #\tab_shift]
1050 str lr, [sp, #-8]! @ EABI alignment..
1052 str r0, [fp, #readmem_dword-dynarec_local]
1056 .if \tab_shift == 1 @ read16
1061 ldr r12,[fp, #spu_readf-dynarec_local]
1065 @ no handler, just read psxH
1066 \readop r0, [r1, r2]
1067 str r0, [fp, #readmem_dword-dynarec_local]
1072 ari_read_io ldrb, tab_read8, 2
1075 ari_read_io ldrh, tab_read16, 1
1078 ari_read_io ldr, tab_read32, 0
1080 .macro ari_write_io_old tab_shift
1092 .macro ari_write_io pf var mem_tab tab_shift
1093 ldr r0, [fp, #address-dynarec_local]
1094 ldr\pf r1, [fp, #\var-dynarec_local]
1101 bic r2, r0, #0x1f800000
1102 ldr r12,[fp, #\mem_tab-dynarec_local]
1103 subs r3, r2, #0x1000
1105 @ ari_write_io_old \tab_shift
1108 ldr r12,[r12, r3, lsl #\tab_shift]
1113 ldr r3, [fp, #psxH_ptr-dynarec_local]
1121 ldrlo pc, [fp, #spu_writef-dynarec_local]
1123 @ write32 to SPU - very rare case (is this correct?)
1129 ldr pc, [fp, #spu_writef-dynarec_local]
1131 ldr pc, [fp, #spu_writef-dynarec_local]
1138 @ PCSX always writes to psxH, so do we for consistency
1139 ldr r0, [fp, #address-dynarec_local]
1140 ldr r3, [fp, #psxH_ptr-dynarec_local]
1141 ldrb r1, [fp, #byte-dynarec_local]
1142 bic r2, r0, #0x1f800000
1143 ldr r12,[fp, #tab_write8-dynarec_local]
1145 subs r3, r2, #0x1000
1147 @ ari_write_io_old 2
1150 ldr r12,[r12, r3, lsl #2]
1157 ari_write_io h, hword, tab_write16, 1
1160 ari_write_io , word, tab_write32, 0
1162 @ vim:filetype=armasm