1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2011 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
45 .global pending_exception
59 .global restore_candidate
68 .type dynarec_local, %object
69 .size dynarec_local, dynarec_local_end-dynarec_local
71 .space dynarec_local_end-dynarec_local /*0x400630*/
72 next_interupt = dynarec_local + 64
73 .type next_interupt, %object
74 .size next_interupt, 4
75 cycle_count = next_interupt + 4
76 .type cycle_count, %object
78 last_count = cycle_count + 4
79 .type last_count, %object
81 pending_exception = last_count + 4
82 .type pending_exception, %object
83 .size pending_exception, 4
84 stop = pending_exception + 4
88 .type invc_ptr, %object
90 address = invc_ptr + 4
91 .type address, %object
93 readmem_dword = address + 4
94 readmem_word = readmem_dword
95 .type readmem_dword, %object
96 .size readmem_dword, 8
97 dword = readmem_dword + 8
108 .size byte, 1 /* 1 byte free */
118 .type psxRegs, %object
119 .size psxRegs, psxRegs_end-psxRegs
130 .type reg_cop0, %object
132 reg_cop2d = reg_cop0 + 128
133 .type reg_cop2d, %object
135 reg_cop2c = reg_cop2d + 128
136 .type reg_cop2c, %object
148 interrupt = cycle + 4
149 .type interrupt, %object
151 intCycle = interrupt + 4
152 .type intCycle, %object
154 psxRegs_end = intCycle + 256
157 nd_pcsx_io = psxRegs_end
158 .type nd_pcsx_io, %object
159 .size nd_pcsx_io, nd_pcsx_io_end-nd_pcsx_io
160 tab_read8 = nd_pcsx_io
161 .type tab_read8, %object
163 tab_read16 = tab_read8 + 4
164 .type tab_read16, %object
166 tab_read32 = tab_read16 + 4
167 .type tab_read32, %object
169 tab_write8 = tab_read32 + 4
170 .type tab_write8, %object
172 tab_write16 = tab_write8 + 4
173 .type tab_write16, %object
175 tab_write32 = tab_write16 + 4
176 .type tab_write32, %object
178 spu_readf = tab_write32 + 4
179 .type spu_readf, %object
181 spu_writef = spu_readf + 4
182 .type spu_writef, %object
184 nd_pcsx_io_end = spu_writef + 4
186 psxH_ptr = nd_pcsx_io_end
187 .type psxH_ptr, %object
189 align0 = psxH_ptr + 4 /* just for alignment */
190 .type align0, %object
192 branch_target = align0 + 4
193 .type branch_target, %object
194 .size branch_target, 4
195 mini_ht = branch_target + 4
196 .type mini_ht, %object
198 restore_candidate = mini_ht + 256
199 .type restore_candidate, %object
200 .size restore_candidate, 512
201 memory_map = restore_candidate + 512
202 .type memory_map, %object
203 .size memory_map, 4194304
204 dynarec_local_end = memory_map + 4194304
209 .type dyna_linker, %function
211 /* r0 = virtual target address */
212 /* r1 = instruction to patch */
219 eor r2, r2, r12, lsr #12
220 and r6, r6, r12, lsr #12
224 ldr r5, [r3, r2, lsl #2]
240 add r1, r1, r12, asr #6
242 moveq pc, r4 /* Stale i-cache */
245 and r1, r7, #0xff000000
248 add r1, r1, r2, lsr #8
252 /* hash_table lookup */
255 eor r4, r0, r0, lsl #16
261 ldr r5, [r3, r2, lsl #2]
268 /* jump_dirty lookup */
278 /* hash_table insert */
289 bl new_recompile_block
297 .size dyna_linker, .-dyna_linker
298 .global exec_pagefault
299 .type exec_pagefault, %function
301 /* r0 = instruction pointer */
302 /* r1 = fault address */
304 ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
306 ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
307 bic r6, r6, #0x0F800000
308 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
310 str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
312 str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
313 and r5, r6, r1, lsr #9
314 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
315 and r1, r1, r6, lsl #9
316 str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
318 str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
322 .size exec_pagefault, .-exec_pagefault
324 /* Special dynamic linker for the case where a page fault
325 may occur in a branch delay slot */
326 .global dyna_linker_ds
327 .type dyna_linker_ds, %function
329 /* r0 = virtual target address */
330 /* r1 = instruction to patch */
337 eor r2, r2, r12, lsr #12
338 and r6, r6, r12, lsr #12
342 ldr r5, [r3, r2, lsl #2]
358 add r1, r1, r12, asr #6
360 moveq pc, r4 /* Stale i-cache */
363 and r1, r7, #0xff000000
366 add r1, r1, r2, lsr #8
370 /* hash_table lookup */
373 eor r4, r0, r0, lsl #16
379 ldr r5, [r3, r2, lsl #2]
386 /* jump_dirty lookup */
396 /* hash_table insert */
409 bl new_recompile_block
416 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
419 .size dyna_linker_ds, .-dyna_linker_ds
428 .global jump_vaddr_r0
429 .type jump_vaddr_r0, %function
431 eor r2, r0, r0, lsl #16
433 .size jump_vaddr_r0, .-jump_vaddr_r0
434 .global jump_vaddr_r1
435 .type jump_vaddr_r1, %function
437 eor r2, r1, r1, lsl #16
440 .size jump_vaddr_r1, .-jump_vaddr_r1
441 .global jump_vaddr_r2
442 .type jump_vaddr_r2, %function
445 eor r2, r2, r2, lsl #16
447 .size jump_vaddr_r2, .-jump_vaddr_r2
448 .global jump_vaddr_r3
449 .type jump_vaddr_r3, %function
451 eor r2, r3, r3, lsl #16
454 .size jump_vaddr_r3, .-jump_vaddr_r3
455 .global jump_vaddr_r4
456 .type jump_vaddr_r4, %function
458 eor r2, r4, r4, lsl #16
461 .size jump_vaddr_r4, .-jump_vaddr_r4
462 .global jump_vaddr_r5
463 .type jump_vaddr_r5, %function
465 eor r2, r5, r5, lsl #16
468 .size jump_vaddr_r5, .-jump_vaddr_r5
469 .global jump_vaddr_r6
470 .type jump_vaddr_r6, %function
472 eor r2, r6, r6, lsl #16
475 .size jump_vaddr_r6, .-jump_vaddr_r6
476 .global jump_vaddr_r8
477 .type jump_vaddr_r8, %function
479 eor r2, r8, r8, lsl #16
482 .size jump_vaddr_r8, .-jump_vaddr_r8
483 .global jump_vaddr_r9
484 .type jump_vaddr_r9, %function
486 eor r2, r9, r9, lsl #16
489 .size jump_vaddr_r9, .-jump_vaddr_r9
490 .global jump_vaddr_r10
491 .type jump_vaddr_r10, %function
493 eor r2, r10, r10, lsl #16
496 .size jump_vaddr_r10, .-jump_vaddr_r10
497 .global jump_vaddr_r12
498 .type jump_vaddr_r12, %function
500 eor r2, r12, r12, lsl #16
503 .size jump_vaddr_r12, .-jump_vaddr_r12
504 .global jump_vaddr_r7
505 .type jump_vaddr_r7, %function
507 eor r2, r7, r7, lsl #16
509 .size jump_vaddr_r7, .-jump_vaddr_r7
511 .type jump_vaddr, %function
515 and r2, r3, r2, lsr #12
522 str r10, [fp, #cycle_count-dynarec_local]
524 ldr r10, [fp, #cycle_count-dynarec_local]
526 .size jump_vaddr, .-jump_vaddr
529 .global verify_code_ds
530 .type verify_code_ds, %function
532 str r8, [fp, #branch_target-dynarec_local]
533 .size verify_code_ds, .-verify_code_ds
534 .global verify_code_vm
535 .type verify_code_vm, %function
538 .type verify_code, %function
567 ldr r8, [fp, #branch_target-dynarec_local]
572 .size verify_code, .-verify_code
573 .size verify_code_vm, .-verify_code_vm
577 .type cc_interrupt, %function
579 ldr r0, [fp, #last_count-dynarec_local]
583 str r1, [fp, #pending_exception-dynarec_local]
584 and r2, r2, r10, lsr #17
585 add r3, fp, #restore_candidate-dynarec_local
586 str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
587 @@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
595 ldr r10, [fp, #cycle-dynarec_local]
596 ldr r0, [fp, #next_interupt-dynarec_local]
597 ldr r1, [fp, #pending_exception-dynarec_local]
598 ldr r2, [fp, #stop-dynarec_local]
599 str r0, [fp, #last_count-dynarec_local]
602 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
606 ldr r0, [fp, #pcaddr-dynarec_local]
610 /* Move 'dirty' blocks to the 'clean' list */
621 .size cc_interrupt, .-cc_interrupt
625 .type do_interrupt, %function
627 ldr r0, [fp, #pcaddr-dynarec_local]
631 .size do_interrupt, .-do_interrupt
635 .type fp_exception, %function
639 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
641 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
644 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
645 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
649 .size fp_exception, .-fp_exception
651 .global fp_exception_ds
652 .type fp_exception_ds, %function
654 mov r2, #0x90000000 /* Set high bit if delay slot */
656 .size fp_exception_ds, .-fp_exception_ds
660 .type jump_syscall, %function
662 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
664 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
667 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
668 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
672 .size jump_syscall, .-jump_syscall
676 .global jump_syscall_hle
677 .type jump_syscall_hle, %function
679 str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
680 ldr r2, [fp, #last_count-dynarec_local]
681 mov r1, #0 /* in delay slot */
683 mov r0, #0x20 /* cause */
684 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
687 /* note: psxException might do recorsive recompiler call from it's HLE code,
688 * so be ready for this */
690 ldr r1, [fp, #next_interupt-dynarec_local]
691 ldr r10, [fp, #cycle-dynarec_local]
692 ldr r0, [fp, #pcaddr-dynarec_local]
694 str r1, [fp, #last_count-dynarec_local]
697 .size jump_syscall_hle, .-jump_syscall_hle
701 .type jump_hlecall, %function
703 ldr r2, [fp, #last_count-dynarec_local]
704 str r0, [fp, #pcaddr-dynarec_local]
707 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
709 .size jump_hlecall, .-jump_hlecall
713 .type jump_intcall, %function
715 ldr r2, [fp, #last_count-dynarec_local]
716 str r0, [fp, #pcaddr-dynarec_local]
719 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
721 .size jump_hlecall, .-jump_hlecall
725 .global new_dyna_leave
726 .type new_dyna_leave, %function
727 ldr r0, [fp, #last_count-dynarec_local]
730 str r10, [fp, #cycle-dynarec_local]
731 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
732 .size new_dyna_leave, .-new_dyna_leave
734 /* these are used to call memhandlers */
736 .global indirect_jump_indexed
737 .type indirect_jump_indexed, %function
738 indirect_jump_indexed:
739 ldr r0, [r0, r1, lsl #2]
740 .global indirect_jump
741 .type indirect_jump, %function
743 ldr r12, [fp, #last_count-dynarec_local]
745 str r2, [fp, #cycle-dynarec_local]
747 .size indirect_jump, .-indirect_jump
748 .size indirect_jump_indexed, .-indirect_jump_indexed
751 .global invalidate_addr_r0
752 .type invalidate_addr_r0, %function
754 stmia fp, {r0, r1, r2, r3, r12, lr}
756 b invalidate_addr_call
757 .size invalidate_addr_r0, .-invalidate_addr_r0
759 .global invalidate_addr_r1
760 .type invalidate_addr_r1, %function
762 stmia fp, {r0, r1, r2, r3, r12, lr}
764 b invalidate_addr_call
765 .size invalidate_addr_r1, .-invalidate_addr_r1
767 .global invalidate_addr_r2
768 .type invalidate_addr_r2, %function
770 stmia fp, {r0, r1, r2, r3, r12, lr}
772 b invalidate_addr_call
773 .size invalidate_addr_r2, .-invalidate_addr_r2
775 .global invalidate_addr_r3
776 .type invalidate_addr_r3, %function
778 stmia fp, {r0, r1, r2, r3, r12, lr}
780 b invalidate_addr_call
781 .size invalidate_addr_r3, .-invalidate_addr_r3
783 .global invalidate_addr_r4
784 .type invalidate_addr_r4, %function
786 stmia fp, {r0, r1, r2, r3, r12, lr}
788 b invalidate_addr_call
789 .size invalidate_addr_r4, .-invalidate_addr_r4
791 .global invalidate_addr_r5
792 .type invalidate_addr_r5, %function
794 stmia fp, {r0, r1, r2, r3, r12, lr}
796 b invalidate_addr_call
797 .size invalidate_addr_r5, .-invalidate_addr_r5
799 .global invalidate_addr_r6
800 .type invalidate_addr_r6, %function
802 stmia fp, {r0, r1, r2, r3, r12, lr}
804 b invalidate_addr_call
805 .size invalidate_addr_r6, .-invalidate_addr_r6
807 .global invalidate_addr_r7
808 .type invalidate_addr_r7, %function
810 stmia fp, {r0, r1, r2, r3, r12, lr}
812 b invalidate_addr_call
813 .size invalidate_addr_r7, .-invalidate_addr_r7
815 .global invalidate_addr_r8
816 .type invalidate_addr_r8, %function
818 stmia fp, {r0, r1, r2, r3, r12, lr}
820 b invalidate_addr_call
821 .size invalidate_addr_r8, .-invalidate_addr_r8
823 .global invalidate_addr_r9
824 .type invalidate_addr_r9, %function
826 stmia fp, {r0, r1, r2, r3, r12, lr}
828 b invalidate_addr_call
829 .size invalidate_addr_r9, .-invalidate_addr_r9
831 .global invalidate_addr_r10
832 .type invalidate_addr_r10, %function
834 stmia fp, {r0, r1, r2, r3, r12, lr}
836 b invalidate_addr_call
837 .size invalidate_addr_r10, .-invalidate_addr_r10
839 .global invalidate_addr_r12
840 .type invalidate_addr_r12, %function
842 stmia fp, {r0, r1, r2, r3, r12, lr}
844 .size invalidate_addr_r12, .-invalidate_addr_r12
846 .global invalidate_addr_call
847 .type invalidate_addr_call, %function
848 invalidate_addr_call:
850 ldmia fp, {r0, r1, r2, r3, r12, pc}
851 .size invalidate_addr_call, .-invalidate_addr_call
854 .global new_dyna_start
855 .type new_dyna_start, %function
857 /* ip is stored to conform EABI alignment */
858 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
860 movw fp, #:lower16:dynarec_local
861 movt fp, #:upper16:dynarec_local
865 ldr r0, [fp, #pcaddr-dynarec_local]
867 ldr r1, [fp, #next_interupt-dynarec_local]
868 ldr r10, [fp, #cycle-dynarec_local]
869 str r1, [fp, #last_count-dynarec_local]
874 .size new_dyna_start, .-new_dyna_start
876 /* --------------------------------------- */
879 .global ari_read_ram8
880 .global ari_read_ram16
881 .global ari_read_ram32
882 .global ari_read_ram_mirror8
883 .global ari_read_ram_mirror16
884 .global ari_read_ram_mirror32
885 .global ari_write_ram8
886 .global ari_write_ram16
887 .global ari_write_ram32
888 .global ari_write_ram_mirror8
889 .global ari_write_ram_mirror16
890 .global ari_write_ram_mirror32
891 .global ari_read_bios8
892 .global ari_read_bios16
893 .global ari_read_bios32
895 .global ari_read_io16
896 .global ari_read_io32
897 .global ari_write_io8
898 .global ari_write_io16
899 .global ari_write_io32
901 .macro ari_read_ram bic_const op
902 ldr r0, [fp, #address-dynarec_local]
904 bic r0, r0, #\bic_const
907 str r0, [fp, #readmem_dword-dynarec_local]
920 .macro ari_read_ram_mirror mvn_const, op
921 ldr r0, [fp, #address-dynarec_local]
926 str r0, [fp, #readmem_dword-dynarec_local]
930 ari_read_ram_mirror8:
931 ari_read_ram_mirror 0, ldrb
933 ari_read_ram_mirror16:
934 ari_read_ram_mirror (1<<11), ldrh
936 ari_read_ram_mirror32:
937 ari_read_ram_mirror (3<<11), ldr
939 /* invalidation is already taken care of by the caller */
940 .macro ari_write_ram bic_const var pf
941 ldr r0, [fp, #address-dynarec_local]
942 ldr\pf r1, [fp, #\var-dynarec_local]
944 bic r0, r0, #\bic_const
951 ari_write_ram 0, byte, b
954 ari_write_ram 1, hword, h
957 ari_write_ram 3, word,
959 .macro ari_write_ram_mirror mvn_const var pf
960 ldr r0, [fp, #address-dynarec_local]
962 ldr\pf r1, [fp, #\var-dynarec_local]
964 ldr r2, [fp, #invc_ptr-dynarec_local]
966 ldrb r2, [r2, r0, lsr #12]
974 ari_write_ram_mirror8:
975 ari_write_ram_mirror 0, byte, b
977 ari_write_ram_mirror16:
978 ari_write_ram_mirror (1<<11), hword, h
980 ari_write_ram_mirror32:
981 ari_write_ram_mirror (3<<11), word,
984 .macro ari_read_bios_mirror bic_const op
985 ldr r0, [fp, #address-dynarec_local]
986 orr r0, r0, #0x80000000
987 bic r0, r0, #(0x20000000|\bic_const) @ map to 0x9fc...
989 str r0, [fp, #readmem_dword-dynarec_local]
994 ari_read_bios_mirror 0, ldrb
997 ari_read_bios_mirror 1, ldrh
1000 ari_read_bios_mirror 3, ldr
1004 .macro ari_read_io_old tab_shift
1005 str lr, [sp, #-8]! @ EABI alignment..
1015 str r0, [fp, #readmem_dword-dynarec_local]
1019 .macro ari_read_io readop mem_tab tab_shift
1020 ldr r0, [fp, #address-dynarec_local]
1021 ldr r1, [fp, #psxH_ptr-dynarec_local]
1028 bic r2, r0, #0x1f800000
1029 ldr r12,[fp, #\mem_tab-dynarec_local]
1030 subs r3, r2, #0x1000
1032 @ ari_read_io_old \tab_shift
1035 ldr r12,[r12, r3, lsl #\tab_shift]
1039 str lr, [sp, #-8]! @ EABI alignment..
1041 str r0, [fp, #readmem_dword-dynarec_local]
1045 .if \tab_shift == 1 @ read16
1050 ldr r12,[fp, #spu_readf-dynarec_local]
1054 @ no handler, just read psxH
1055 \readop r0, [r1, r2]
1056 str r0, [fp, #readmem_dword-dynarec_local]
1061 ari_read_io ldrb, tab_read8, 2
1064 ari_read_io ldrh, tab_read16, 1
1067 ari_read_io ldr, tab_read32, 0
1069 .macro ari_write_io_old tab_shift
1081 .macro ari_write_io pf var mem_tab tab_shift
1082 ldr r0, [fp, #address-dynarec_local]
1083 ldr\pf r1, [fp, #\var-dynarec_local]
1090 bic r2, r0, #0x1f800000
1091 ldr r12,[fp, #\mem_tab-dynarec_local]
1092 subs r3, r2, #0x1000
1094 @ ari_write_io_old \tab_shift
1097 ldr r12,[r12, r3, lsl #\tab_shift]
1102 ldr r3, [fp, #psxH_ptr-dynarec_local]
1110 ldrlo pc, [fp, #spu_writef-dynarec_local]
1112 @ write32 to SPU - very rare case (is this correct?)
1118 ldr pc, [fp, #spu_writef-dynarec_local]
1120 ldr pc, [fp, #spu_writef-dynarec_local]
1127 @ PCSX always writes to psxH, so do we for consistency
1128 ldr r0, [fp, #address-dynarec_local]
1129 ldr r3, [fp, #psxH_ptr-dynarec_local]
1130 ldrb r1, [fp, #byte-dynarec_local]
1131 bic r2, r0, #0x1f800000
1132 ldr r12,[fp, #tab_write8-dynarec_local]
1134 subs r3, r2, #0x1000
1136 @ ari_write_io_old 2
1139 ldr r12,[r12, r3, lsl #2]
1146 ari_write_io h, hword, tab_write16, 1
1149 ari_write_io , word, tab_write32, 0
1151 @ vim:filetype=armasm