1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2011 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
45 .global pending_exception
59 .global restore_candidate
68 .type dynarec_local, %object
69 .size dynarec_local, dynarec_local_end-dynarec_local
71 .space dynarec_local_end-dynarec_local /*0x400630*/
72 next_interupt = dynarec_local + 64
73 .type next_interupt, %object
74 .size next_interupt, 4
75 cycle_count = next_interupt + 4
76 .type cycle_count, %object
78 last_count = cycle_count + 4
79 .type last_count, %object
81 pending_exception = last_count + 4
82 .type pending_exception, %object
83 .size pending_exception, 4
84 stop = pending_exception + 4
88 .type invc_ptr, %object
90 address = invc_ptr + 4
91 .type address, %object
93 readmem_dword = address + 4
94 readmem_word = readmem_dword
95 .type readmem_dword, %object
96 .size readmem_dword, 8
97 dword = readmem_dword + 8
108 .size byte, 1 /* 1 byte free */
118 .type psxRegs, %object
119 .size psxRegs, psxRegs_end-psxRegs
130 .type reg_cop0, %object
132 reg_cop2d = reg_cop0 + 128
133 .type reg_cop2d, %object
135 reg_cop2c = reg_cop2d + 128
136 .type reg_cop2c, %object
148 interrupt = cycle + 4
149 .type interrupt, %object
151 intCycle = interrupt + 4
152 .type intCycle, %object
154 psxRegs_end = intCycle + 256
157 nd_pcsx_io = psxRegs_end
158 .type nd_pcsx_io, %object
159 .size nd_pcsx_io, nd_pcsx_io_end-nd_pcsx_io
160 tab_read8 = nd_pcsx_io
161 .type tab_read8, %object
163 tab_read16 = tab_read8 + 4
164 .type tab_read16, %object
166 tab_read32 = tab_read16 + 4
167 .type tab_read32, %object
169 tab_write8 = tab_read32 + 4
170 .type tab_write8, %object
172 tab_write16 = tab_write8 + 4
173 .type tab_write16, %object
175 tab_write32 = tab_write16 + 4
176 .type tab_write32, %object
178 spu_readf = tab_write32 + 4
179 .type spu_readf, %object
181 spu_writef = spu_readf + 4
182 .type spu_writef, %object
184 nd_pcsx_io_end = spu_writef + 4
186 psxH_ptr = nd_pcsx_io_end
187 .type psxH_ptr, %object
189 align0 = psxH_ptr + 4 /* just for alignment */
190 .type align0, %object
192 branch_target = align0 + 4
193 .type branch_target, %object
194 .size branch_target, 4
195 mini_ht = branch_target + 4
196 .type mini_ht, %object
198 restore_candidate = mini_ht + 256
199 .type restore_candidate, %object
200 .size restore_candidate, 512
201 memory_map = restore_candidate + 512
202 .type memory_map, %object
203 .size memory_map, 4194304
204 dynarec_local_end = memory_map + 4194304
206 .macro load_var_adr reg var
208 movw \reg, #:lower16:\var
209 movt \reg, #:upper16:\var
218 .type dyna_linker, %function
220 /* r0 = virtual target address */
221 /* r1 = instruction to patch */
235 ldr r5, [r3, r2, lsl #2]
251 add r1, r1, r12, asr #6
253 moveq pc, r4 /* Stale i-cache */
256 and r1, r7, #0xff000000
259 add r1, r1, r2, lsr #8
263 /* hash_table lookup */
266 eor r4, r0, r0, lsl #16
272 ldr r5, [r3, r2, lsl #2]
279 /* jump_dirty lookup */
289 /* hash_table insert */
300 bl new_recompile_block
308 .size dyna_linker, .-dyna_linker
309 .global exec_pagefault
310 .type exec_pagefault, %function
312 /* r0 = instruction pointer */
313 /* r1 = fault address */
315 ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
317 ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
318 bic r6, r6, #0x0F800000
319 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
321 str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
323 str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
324 and r5, r6, r1, lsr #9
325 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
326 and r1, r1, r6, lsl #9
327 str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
329 str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
333 .size exec_pagefault, .-exec_pagefault
335 /* Special dynamic linker for the case where a page fault
336 may occur in a branch delay slot */
337 .global dyna_linker_ds
338 .type dyna_linker_ds, %function
340 /* r0 = virtual target address */
341 /* r1 = instruction to patch */
355 ldr r5, [r3, r2, lsl #2]
371 add r1, r1, r12, asr #6
373 moveq pc, r4 /* Stale i-cache */
376 and r1, r7, #0xff000000
379 add r1, r1, r2, lsr #8
383 /* hash_table lookup */
386 eor r4, r0, r0, lsl #16
392 ldr r5, [r3, r2, lsl #2]
399 /* jump_dirty lookup */
409 /* hash_table insert */
422 bl new_recompile_block
429 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
432 .size dyna_linker_ds, .-dyna_linker_ds
441 .global jump_vaddr_r0
442 .type jump_vaddr_r0, %function
444 eor r2, r0, r0, lsl #16
446 .size jump_vaddr_r0, .-jump_vaddr_r0
447 .global jump_vaddr_r1
448 .type jump_vaddr_r1, %function
450 eor r2, r1, r1, lsl #16
453 .size jump_vaddr_r1, .-jump_vaddr_r1
454 .global jump_vaddr_r2
455 .type jump_vaddr_r2, %function
458 eor r2, r2, r2, lsl #16
460 .size jump_vaddr_r2, .-jump_vaddr_r2
461 .global jump_vaddr_r3
462 .type jump_vaddr_r3, %function
464 eor r2, r3, r3, lsl #16
467 .size jump_vaddr_r3, .-jump_vaddr_r3
468 .global jump_vaddr_r4
469 .type jump_vaddr_r4, %function
471 eor r2, r4, r4, lsl #16
474 .size jump_vaddr_r4, .-jump_vaddr_r4
475 .global jump_vaddr_r5
476 .type jump_vaddr_r5, %function
478 eor r2, r5, r5, lsl #16
481 .size jump_vaddr_r5, .-jump_vaddr_r5
482 .global jump_vaddr_r6
483 .type jump_vaddr_r6, %function
485 eor r2, r6, r6, lsl #16
488 .size jump_vaddr_r6, .-jump_vaddr_r6
489 .global jump_vaddr_r8
490 .type jump_vaddr_r8, %function
492 eor r2, r8, r8, lsl #16
495 .size jump_vaddr_r8, .-jump_vaddr_r8
496 .global jump_vaddr_r9
497 .type jump_vaddr_r9, %function
499 eor r2, r9, r9, lsl #16
502 .size jump_vaddr_r9, .-jump_vaddr_r9
503 .global jump_vaddr_r10
504 .type jump_vaddr_r10, %function
506 eor r2, r10, r10, lsl #16
509 .size jump_vaddr_r10, .-jump_vaddr_r10
510 .global jump_vaddr_r12
511 .type jump_vaddr_r12, %function
513 eor r2, r12, r12, lsl #16
516 .size jump_vaddr_r12, .-jump_vaddr_r12
517 .global jump_vaddr_r7
518 .type jump_vaddr_r7, %function
520 eor r2, r7, r7, lsl #16
522 .size jump_vaddr_r7, .-jump_vaddr_r7
524 .type jump_vaddr, %function
528 and r2, r3, r2, lsr #12
535 str r10, [fp, #cycle_count-dynarec_local]
537 ldr r10, [fp, #cycle_count-dynarec_local]
539 .size jump_vaddr, .-jump_vaddr
542 .global verify_code_ds
543 .type verify_code_ds, %function
545 str r8, [fp, #branch_target-dynarec_local]
546 .size verify_code_ds, .-verify_code_ds
547 .global verify_code_vm
548 .type verify_code_vm, %function
551 .type verify_code, %function
580 ldr r8, [fp, #branch_target-dynarec_local]
585 .size verify_code, .-verify_code
586 .size verify_code_vm, .-verify_code_vm
590 .type cc_interrupt, %function
592 ldr r0, [fp, #last_count-dynarec_local]
596 str r1, [fp, #pending_exception-dynarec_local]
597 and r2, r2, r10, lsr #17
598 add r3, fp, #restore_candidate-dynarec_local
599 str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
600 @@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
608 ldr r10, [fp, #cycle-dynarec_local]
609 ldr r0, [fp, #next_interupt-dynarec_local]
610 ldr r1, [fp, #pending_exception-dynarec_local]
611 ldr r2, [fp, #stop-dynarec_local]
612 str r0, [fp, #last_count-dynarec_local]
615 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
619 ldr r0, [fp, #pcaddr-dynarec_local]
623 /* Move 'dirty' blocks to the 'clean' list */
634 .size cc_interrupt, .-cc_interrupt
638 .type do_interrupt, %function
640 ldr r0, [fp, #pcaddr-dynarec_local]
644 .size do_interrupt, .-do_interrupt
648 .type fp_exception, %function
652 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
654 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
657 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
658 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
662 .size fp_exception, .-fp_exception
664 .global fp_exception_ds
665 .type fp_exception_ds, %function
667 mov r2, #0x90000000 /* Set high bit if delay slot */
669 .size fp_exception_ds, .-fp_exception_ds
673 .type jump_syscall, %function
675 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
677 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
680 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
681 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
685 .size jump_syscall, .-jump_syscall
689 .global jump_syscall_hle
690 .type jump_syscall_hle, %function
692 str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
693 ldr r2, [fp, #last_count-dynarec_local]
694 mov r1, #0 /* in delay slot */
696 mov r0, #0x20 /* cause */
697 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
700 /* note: psxException might do recorsive recompiler call from it's HLE code,
701 * so be ready for this */
703 ldr r1, [fp, #next_interupt-dynarec_local]
704 ldr r10, [fp, #cycle-dynarec_local]
705 ldr r0, [fp, #pcaddr-dynarec_local]
707 str r1, [fp, #last_count-dynarec_local]
710 .size jump_syscall_hle, .-jump_syscall_hle
714 .type jump_hlecall, %function
716 ldr r2, [fp, #last_count-dynarec_local]
717 str r0, [fp, #pcaddr-dynarec_local]
720 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
722 .size jump_hlecall, .-jump_hlecall
726 .type jump_intcall, %function
728 ldr r2, [fp, #last_count-dynarec_local]
729 str r0, [fp, #pcaddr-dynarec_local]
732 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
734 .size jump_hlecall, .-jump_hlecall
738 .global new_dyna_leave
739 .type new_dyna_leave, %function
740 ldr r0, [fp, #last_count-dynarec_local]
743 str r10, [fp, #cycle-dynarec_local]
744 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
745 .size new_dyna_leave, .-new_dyna_leave
747 /* these are used to call memhandlers */
749 .global indirect_jump_indexed
750 .type indirect_jump_indexed, %function
751 indirect_jump_indexed:
752 ldr r0, [r0, r1, lsl #2]
753 .global indirect_jump
754 .type indirect_jump, %function
756 ldr r12, [fp, #last_count-dynarec_local]
758 str r2, [fp, #cycle-dynarec_local]
760 .size indirect_jump, .-indirect_jump
761 .size indirect_jump_indexed, .-indirect_jump_indexed
764 .global invalidate_addr_r0
765 .type invalidate_addr_r0, %function
767 stmia fp, {r0, r1, r2, r3, r12, lr}
769 b invalidate_addr_call
770 .size invalidate_addr_r0, .-invalidate_addr_r0
772 .global invalidate_addr_r1
773 .type invalidate_addr_r1, %function
775 stmia fp, {r0, r1, r2, r3, r12, lr}
777 b invalidate_addr_call
778 .size invalidate_addr_r1, .-invalidate_addr_r1
780 .global invalidate_addr_r2
781 .type invalidate_addr_r2, %function
783 stmia fp, {r0, r1, r2, r3, r12, lr}
785 b invalidate_addr_call
786 .size invalidate_addr_r2, .-invalidate_addr_r2
788 .global invalidate_addr_r3
789 .type invalidate_addr_r3, %function
791 stmia fp, {r0, r1, r2, r3, r12, lr}
793 b invalidate_addr_call
794 .size invalidate_addr_r3, .-invalidate_addr_r3
796 .global invalidate_addr_r4
797 .type invalidate_addr_r4, %function
799 stmia fp, {r0, r1, r2, r3, r12, lr}
801 b invalidate_addr_call
802 .size invalidate_addr_r4, .-invalidate_addr_r4
804 .global invalidate_addr_r5
805 .type invalidate_addr_r5, %function
807 stmia fp, {r0, r1, r2, r3, r12, lr}
809 b invalidate_addr_call
810 .size invalidate_addr_r5, .-invalidate_addr_r5
812 .global invalidate_addr_r6
813 .type invalidate_addr_r6, %function
815 stmia fp, {r0, r1, r2, r3, r12, lr}
817 b invalidate_addr_call
818 .size invalidate_addr_r6, .-invalidate_addr_r6
820 .global invalidate_addr_r7
821 .type invalidate_addr_r7, %function
823 stmia fp, {r0, r1, r2, r3, r12, lr}
825 b invalidate_addr_call
826 .size invalidate_addr_r7, .-invalidate_addr_r7
828 .global invalidate_addr_r8
829 .type invalidate_addr_r8, %function
831 stmia fp, {r0, r1, r2, r3, r12, lr}
833 b invalidate_addr_call
834 .size invalidate_addr_r8, .-invalidate_addr_r8
836 .global invalidate_addr_r9
837 .type invalidate_addr_r9, %function
839 stmia fp, {r0, r1, r2, r3, r12, lr}
841 b invalidate_addr_call
842 .size invalidate_addr_r9, .-invalidate_addr_r9
844 .global invalidate_addr_r10
845 .type invalidate_addr_r10, %function
847 stmia fp, {r0, r1, r2, r3, r12, lr}
849 b invalidate_addr_call
850 .size invalidate_addr_r10, .-invalidate_addr_r10
852 .global invalidate_addr_r12
853 .type invalidate_addr_r12, %function
855 stmia fp, {r0, r1, r2, r3, r12, lr}
857 .size invalidate_addr_r12, .-invalidate_addr_r12
859 .global invalidate_addr_call
860 .type invalidate_addr_call, %function
861 invalidate_addr_call:
863 ldmia fp, {r0, r1, r2, r3, r12, pc}
864 .size invalidate_addr_call, .-invalidate_addr_call
867 .global new_dyna_start
868 .type new_dyna_start, %function
870 /* ip is stored to conform EABI alignment */
871 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
872 load_var_adr fp, dynarec_local
873 ldr r0, [fp, #pcaddr-dynarec_local]
875 ldr r1, [fp, #next_interupt-dynarec_local]
876 ldr r10, [fp, #cycle-dynarec_local]
877 str r1, [fp, #last_count-dynarec_local]
880 .size new_dyna_start, .-new_dyna_start
882 /* --------------------------------------- */
885 .global ari_read_ram8
886 .global ari_read_ram16
887 .global ari_read_ram32
888 .global ari_read_ram_mirror8
889 .global ari_read_ram_mirror16
890 .global ari_read_ram_mirror32
891 .global ari_write_ram8
892 .global ari_write_ram16
893 .global ari_write_ram32
894 .global ari_write_ram_mirror8
895 .global ari_write_ram_mirror16
896 .global ari_write_ram_mirror32
897 .global ari_write_ram_mirror_ro32
898 .global ari_read_bios8
899 .global ari_read_bios16
900 .global ari_read_bios32
902 .global ari_read_io16
903 .global ari_read_io32
904 .global ari_write_io8
905 .global ari_write_io16
906 .global ari_write_io32
908 .macro ari_read_ram bic_const op
909 ldr r0, [fp, #address-dynarec_local]
911 bic r0, r0, #\bic_const
914 str r0, [fp, #readmem_dword-dynarec_local]
927 .macro ari_read_ram_mirror mvn_const, op
928 ldr r0, [fp, #address-dynarec_local]
933 str r0, [fp, #readmem_dword-dynarec_local]
937 ari_read_ram_mirror8:
938 ari_read_ram_mirror 0, ldrb
940 ari_read_ram_mirror16:
941 ari_read_ram_mirror (1<<11), ldrh
943 ari_read_ram_mirror32:
944 ari_read_ram_mirror (3<<11), ldr
946 /* invalidation is already taken care of by the caller */
947 .macro ari_write_ram bic_const var pf
948 ldr r0, [fp, #address-dynarec_local]
949 ldr\pf r1, [fp, #\var-dynarec_local]
951 bic r0, r0, #\bic_const
958 ari_write_ram 0, byte, b
961 ari_write_ram 1, hword, h
964 ari_write_ram 3, word,
966 .macro ari_write_ram_mirror mvn_const var pf
967 ldr r0, [fp, #address-dynarec_local]
969 ldr\pf r1, [fp, #\var-dynarec_local]
971 ldr r2, [fp, #invc_ptr-dynarec_local]
973 ldrb r2, [r2, r0, lsr #12]
981 ari_write_ram_mirror8:
982 ari_write_ram_mirror 0, byte, b
984 ari_write_ram_mirror16:
985 ari_write_ram_mirror (1<<11), hword, h
987 ari_write_ram_mirror32:
988 ari_write_ram_mirror (3<<11), word,
990 ari_write_ram_mirror_ro32:
991 load_var_adr r0, pcsx_ram_is_ro
996 b ari_write_ram_mirror32
999 .macro ari_read_bios_mirror bic_const op
1000 ldr r0, [fp, #address-dynarec_local]
1001 orr r0, r0, #0x80000000
1002 bic r0, r0, #(0x20000000|\bic_const) @ map to 0x9fc...
1004 str r0, [fp, #readmem_dword-dynarec_local]
1009 ari_read_bios_mirror 0, ldrb
1012 ari_read_bios_mirror 1, ldrh
1015 ari_read_bios_mirror 3, ldr
1019 .macro ari_read_io_old tab_shift
1020 str lr, [sp, #-8]! @ EABI alignment..
1030 str r0, [fp, #readmem_dword-dynarec_local]
1034 .macro ari_read_io readop mem_tab tab_shift
1035 ldr r0, [fp, #address-dynarec_local]
1036 ldr r1, [fp, #psxH_ptr-dynarec_local]
1043 bic r2, r0, #0x1f800000
1044 ldr r12,[fp, #\mem_tab-dynarec_local]
1045 subs r3, r2, #0x1000
1047 @ ari_read_io_old \tab_shift
1050 ldr r12,[r12, r3, lsl #\tab_shift]
1054 str lr, [sp, #-8]! @ EABI alignment..
1056 str r0, [fp, #readmem_dword-dynarec_local]
1060 .if \tab_shift == 1 @ read16
1065 ldr r12,[fp, #spu_readf-dynarec_local]
1069 @ no handler, just read psxH
1070 \readop r0, [r1, r2]
1071 str r0, [fp, #readmem_dword-dynarec_local]
1076 ari_read_io ldrb, tab_read8, 2
1079 ari_read_io ldrh, tab_read16, 1
1082 ari_read_io ldr, tab_read32, 0
1084 .macro ari_write_io_old tab_shift
1096 .macro ari_write_io pf var mem_tab tab_shift
1097 ldr r0, [fp, #address-dynarec_local]
1098 ldr\pf r1, [fp, #\var-dynarec_local]
1105 bic r2, r0, #0x1f800000
1106 ldr r12,[fp, #\mem_tab-dynarec_local]
1107 subs r3, r2, #0x1000
1109 @ ari_write_io_old \tab_shift
1112 ldr r12,[r12, r3, lsl #\tab_shift]
1117 ldr r3, [fp, #psxH_ptr-dynarec_local]
1125 ldrlo pc, [fp, #spu_writef-dynarec_local]
1127 @ write32 to SPU - very rare case (is this correct?)
1133 ldr pc, [fp, #spu_writef-dynarec_local]
1135 ldr pc, [fp, #spu_writef-dynarec_local]
1142 @ PCSX always writes to psxH, so do we for consistency
1143 ldr r0, [fp, #address-dynarec_local]
1144 ldr r3, [fp, #psxH_ptr-dynarec_local]
1145 ldrb r1, [fp, #byte-dynarec_local]
1146 bic r2, r0, #0x1f800000
1147 ldr r12,[fp, #tab_write8-dynarec_local]
1149 subs r3, r2, #0x1000
1151 @ ari_write_io_old 2
1154 ldr r12,[r12, r3, lsl #2]
1161 ari_write_io h, hword, tab_write16, 1
1164 ari_write_io , word, tab_write32, 0
1166 @ vim:filetype=armasm