1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2011 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 /* .equiv HAVE_ARMV7, 1 */
38 .global pending_exception
52 .global restore_candidate
58 .global inv_code_start
63 .type dynarec_local, %object
64 .size dynarec_local, dynarec_local_end-dynarec_local
66 .space dynarec_local_end-dynarec_local /*0x400630*/
67 next_interupt = dynarec_local + 64
68 .type next_interupt, %object
69 .size next_interupt, 4
70 cycle_count = next_interupt + 4
71 .type cycle_count, %object
73 last_count = cycle_count + 4
74 .type last_count, %object
76 pending_exception = last_count + 4
77 .type pending_exception, %object
78 .size pending_exception, 4
79 stop = pending_exception + 4
83 .type invc_ptr, %object
85 address = invc_ptr + 4
86 .type address, %object
88 readmem_dword = address + 4
89 readmem_word = readmem_dword
90 .type readmem_dword, %object
91 .size readmem_dword, 8
92 dword = readmem_dword + 8
103 .size byte, 1 /* 1 byte free */
113 .type psxRegs, %object
114 .size psxRegs, psxRegs_end-psxRegs
125 .type reg_cop0, %object
127 reg_cop2d = reg_cop0 + 128
128 .type reg_cop2d, %object
130 reg_cop2c = reg_cop2d + 128
131 .type reg_cop2c, %object
143 interrupt = cycle + 4
144 .type interrupt, %object
146 intCycle = interrupt + 4
147 .type intCycle, %object
149 psxRegs_end = intCycle + 256
152 nd_pcsx_io = psxRegs_end
153 .type nd_pcsx_io, %object
154 .size nd_pcsx_io, nd_pcsx_io_end-nd_pcsx_io
155 tab_read8 = nd_pcsx_io
156 .type tab_read8, %object
158 tab_read16 = tab_read8 + 4
159 .type tab_read16, %object
161 tab_read32 = tab_read16 + 4
162 .type tab_read32, %object
164 tab_write8 = tab_read32 + 4
165 .type tab_write8, %object
167 tab_write16 = tab_write8 + 4
168 .type tab_write16, %object
170 tab_write32 = tab_write16 + 4
171 .type tab_write32, %object
173 spu_readf = tab_write32 + 4
174 .type spu_readf, %object
176 spu_writef = spu_readf + 4
177 .type spu_writef, %object
179 nd_pcsx_io_end = spu_writef + 4
181 psxH_ptr = nd_pcsx_io_end
182 .type psxH_ptr, %object
184 inv_code_start = psxH_ptr + 4
185 .type inv_code_start, %object
186 .size inv_code_start, 4
187 inv_code_end = inv_code_start + 4
188 .type inv_code_end, %object
189 .size inv_code_end, 4
190 align0 = inv_code_end + 4 /* just for alignment */
191 .type align0, %object
193 branch_target = align0 + 12
194 .type branch_target, %object
195 .size branch_target, 4
196 mini_ht = branch_target + 4
197 .type mini_ht, %object
199 restore_candidate = mini_ht + 256
200 .type restore_candidate, %object
201 .size restore_candidate, 512
202 memory_map = restore_candidate + 512
203 .type memory_map, %object
204 .size memory_map, 4194304
205 dynarec_local_end = memory_map + 4194304
207 .macro load_var_adr reg var
209 movw \reg, #:lower16:\var
210 movt \reg, #:upper16:\var
216 .macro dyna_linker_main
217 /* r0 = virtual target address */
218 /* r1 = instruction to patch */
232 ldr r5, [r3, r2, lsl #2]
234 add r6, r1, r12, asr #6
249 moveq pc, r4 /* Stale i-cache */
251 b 1b /* jump_in may have dupes, continue search */
254 beq 3f /* r0 not in jump_in */
260 and r1, r7, #0xff000000
263 add r1, r1, r2, lsr #8
267 /* hash_table lookup */
270 eor r4, r0, r0, lsl #16
276 ldr r5, [r3, r2, lsl #2]
283 /* jump_dirty lookup */
293 /* hash_table insert */
307 .type dyna_linker, %function
309 /* r0 = virtual target address */
310 /* r1 = instruction to patch */
315 bl new_recompile_block
323 .size dyna_linker, .-dyna_linker
324 .global exec_pagefault
325 .type exec_pagefault, %function
327 /* r0 = instruction pointer */
328 /* r1 = fault address */
330 ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
332 ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
333 bic r6, r6, #0x0F800000
334 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
336 str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
338 str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
339 and r5, r6, r1, lsr #9
340 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
341 and r1, r1, r6, lsl #9
342 str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
344 str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
348 .size exec_pagefault, .-exec_pagefault
350 /* Special dynamic linker for the case where a page fault
351 may occur in a branch delay slot */
352 .global dyna_linker_ds
353 .type dyna_linker_ds, %function
355 /* r0 = virtual target address */
356 /* r1 = instruction to patch */
363 bl new_recompile_block
370 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
373 .size dyna_linker_ds, .-dyna_linker_ds
382 .global jump_vaddr_r0
383 .type jump_vaddr_r0, %function
385 eor r2, r0, r0, lsl #16
387 .size jump_vaddr_r0, .-jump_vaddr_r0
388 .global jump_vaddr_r1
389 .type jump_vaddr_r1, %function
391 eor r2, r1, r1, lsl #16
394 .size jump_vaddr_r1, .-jump_vaddr_r1
395 .global jump_vaddr_r2
396 .type jump_vaddr_r2, %function
399 eor r2, r2, r2, lsl #16
401 .size jump_vaddr_r2, .-jump_vaddr_r2
402 .global jump_vaddr_r3
403 .type jump_vaddr_r3, %function
405 eor r2, r3, r3, lsl #16
408 .size jump_vaddr_r3, .-jump_vaddr_r3
409 .global jump_vaddr_r4
410 .type jump_vaddr_r4, %function
412 eor r2, r4, r4, lsl #16
415 .size jump_vaddr_r4, .-jump_vaddr_r4
416 .global jump_vaddr_r5
417 .type jump_vaddr_r5, %function
419 eor r2, r5, r5, lsl #16
422 .size jump_vaddr_r5, .-jump_vaddr_r5
423 .global jump_vaddr_r6
424 .type jump_vaddr_r6, %function
426 eor r2, r6, r6, lsl #16
429 .size jump_vaddr_r6, .-jump_vaddr_r6
430 .global jump_vaddr_r8
431 .type jump_vaddr_r8, %function
433 eor r2, r8, r8, lsl #16
436 .size jump_vaddr_r8, .-jump_vaddr_r8
437 .global jump_vaddr_r9
438 .type jump_vaddr_r9, %function
440 eor r2, r9, r9, lsl #16
443 .size jump_vaddr_r9, .-jump_vaddr_r9
444 .global jump_vaddr_r10
445 .type jump_vaddr_r10, %function
447 eor r2, r10, r10, lsl #16
450 .size jump_vaddr_r10, .-jump_vaddr_r10
451 .global jump_vaddr_r12
452 .type jump_vaddr_r12, %function
454 eor r2, r12, r12, lsl #16
457 .size jump_vaddr_r12, .-jump_vaddr_r12
458 .global jump_vaddr_r7
459 .type jump_vaddr_r7, %function
461 eor r2, r7, r7, lsl #16
463 .size jump_vaddr_r7, .-jump_vaddr_r7
465 .type jump_vaddr, %function
469 and r2, r3, r2, lsr #12
476 str r10, [fp, #cycle_count-dynarec_local]
478 ldr r10, [fp, #cycle_count-dynarec_local]
480 .size jump_vaddr, .-jump_vaddr
483 .global verify_code_ds
484 .type verify_code_ds, %function
486 str r8, [fp, #branch_target-dynarec_local]
487 .size verify_code_ds, .-verify_code_ds
488 .global verify_code_vm
489 .type verify_code_vm, %function
492 .type verify_code, %function
521 ldr r8, [fp, #branch_target-dynarec_local]
526 .size verify_code, .-verify_code
527 .size verify_code_vm, .-verify_code_vm
531 .type cc_interrupt, %function
533 ldr r0, [fp, #last_count-dynarec_local]
537 str r1, [fp, #pending_exception-dynarec_local]
538 and r2, r2, r10, lsr #17
539 add r3, fp, #restore_candidate-dynarec_local
540 str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
541 @@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
549 ldr r10, [fp, #cycle-dynarec_local]
550 ldr r0, [fp, #next_interupt-dynarec_local]
551 ldr r1, [fp, #pending_exception-dynarec_local]
552 ldr r2, [fp, #stop-dynarec_local]
553 str r0, [fp, #last_count-dynarec_local]
556 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
560 ldr r0, [fp, #pcaddr-dynarec_local]
564 /* Move 'dirty' blocks to the 'clean' list */
575 .size cc_interrupt, .-cc_interrupt
579 .type do_interrupt, %function
581 ldr r0, [fp, #pcaddr-dynarec_local]
585 .size do_interrupt, .-do_interrupt
589 .type fp_exception, %function
593 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
595 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
598 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
599 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
603 .size fp_exception, .-fp_exception
605 .global fp_exception_ds
606 .type fp_exception_ds, %function
608 mov r2, #0x90000000 /* Set high bit if delay slot */
610 .size fp_exception_ds, .-fp_exception_ds
614 .type jump_syscall, %function
616 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
618 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
621 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
622 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
626 .size jump_syscall, .-jump_syscall
630 .global jump_syscall_hle
631 .type jump_syscall_hle, %function
633 str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
634 ldr r2, [fp, #last_count-dynarec_local]
635 mov r1, #0 /* in delay slot */
637 mov r0, #0x20 /* cause */
638 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
641 /* note: psxException might do recorsive recompiler call from it's HLE code,
642 * so be ready for this */
644 ldr r1, [fp, #next_interupt-dynarec_local]
645 ldr r10, [fp, #cycle-dynarec_local]
646 ldr r0, [fp, #pcaddr-dynarec_local]
648 str r1, [fp, #last_count-dynarec_local]
651 .size jump_syscall_hle, .-jump_syscall_hle
655 .type jump_hlecall, %function
657 ldr r2, [fp, #last_count-dynarec_local]
658 str r0, [fp, #pcaddr-dynarec_local]
661 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
663 .size jump_hlecall, .-jump_hlecall
667 .type jump_intcall, %function
669 ldr r2, [fp, #last_count-dynarec_local]
670 str r0, [fp, #pcaddr-dynarec_local]
673 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
675 .size jump_hlecall, .-jump_hlecall
679 .global new_dyna_leave
680 .type new_dyna_leave, %function
681 ldr r0, [fp, #last_count-dynarec_local]
684 str r10, [fp, #cycle-dynarec_local]
685 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
686 .size new_dyna_leave, .-new_dyna_leave
688 /* these are used to call memhandlers */
690 .global indirect_jump_indexed
691 .type indirect_jump_indexed, %function
692 indirect_jump_indexed:
693 ldr r0, [r0, r1, lsl #2]
694 .global indirect_jump
695 .type indirect_jump, %function
697 ldr r12, [fp, #last_count-dynarec_local]
699 str r2, [fp, #cycle-dynarec_local]
701 .size indirect_jump, .-indirect_jump
702 .size indirect_jump_indexed, .-indirect_jump_indexed
705 .global invalidate_addr_r0
706 .type invalidate_addr_r0, %function
708 stmia fp, {r0, r1, r2, r3, r12, lr}
709 b invalidate_addr_call
710 .size invalidate_addr_r0, .-invalidate_addr_r0
712 .global invalidate_addr_r1
713 .type invalidate_addr_r1, %function
715 stmia fp, {r0, r1, r2, r3, r12, lr}
717 b invalidate_addr_call
718 .size invalidate_addr_r1, .-invalidate_addr_r1
720 .global invalidate_addr_r2
721 .type invalidate_addr_r2, %function
723 stmia fp, {r0, r1, r2, r3, r12, lr}
725 b invalidate_addr_call
726 .size invalidate_addr_r2, .-invalidate_addr_r2
728 .global invalidate_addr_r3
729 .type invalidate_addr_r3, %function
731 stmia fp, {r0, r1, r2, r3, r12, lr}
733 b invalidate_addr_call
734 .size invalidate_addr_r3, .-invalidate_addr_r3
736 .global invalidate_addr_r4
737 .type invalidate_addr_r4, %function
739 stmia fp, {r0, r1, r2, r3, r12, lr}
741 b invalidate_addr_call
742 .size invalidate_addr_r4, .-invalidate_addr_r4
744 .global invalidate_addr_r5
745 .type invalidate_addr_r5, %function
747 stmia fp, {r0, r1, r2, r3, r12, lr}
749 b invalidate_addr_call
750 .size invalidate_addr_r5, .-invalidate_addr_r5
752 .global invalidate_addr_r6
753 .type invalidate_addr_r6, %function
755 stmia fp, {r0, r1, r2, r3, r12, lr}
757 b invalidate_addr_call
758 .size invalidate_addr_r6, .-invalidate_addr_r6
760 .global invalidate_addr_r7
761 .type invalidate_addr_r7, %function
763 stmia fp, {r0, r1, r2, r3, r12, lr}
765 b invalidate_addr_call
766 .size invalidate_addr_r7, .-invalidate_addr_r7
768 .global invalidate_addr_r8
769 .type invalidate_addr_r8, %function
771 stmia fp, {r0, r1, r2, r3, r12, lr}
773 b invalidate_addr_call
774 .size invalidate_addr_r8, .-invalidate_addr_r8
776 .global invalidate_addr_r9
777 .type invalidate_addr_r9, %function
779 stmia fp, {r0, r1, r2, r3, r12, lr}
781 b invalidate_addr_call
782 .size invalidate_addr_r9, .-invalidate_addr_r9
784 .global invalidate_addr_r10
785 .type invalidate_addr_r10, %function
787 stmia fp, {r0, r1, r2, r3, r12, lr}
789 b invalidate_addr_call
790 .size invalidate_addr_r10, .-invalidate_addr_r10
792 .global invalidate_addr_r12
793 .type invalidate_addr_r12, %function
795 stmia fp, {r0, r1, r2, r3, r12, lr}
797 .size invalidate_addr_r12, .-invalidate_addr_r12
799 .global invalidate_addr_call
800 .type invalidate_addr_call, %function
801 invalidate_addr_call:
802 ldr r12, [fp, #inv_code_start-dynarec_local]
803 ldr lr, [fp, #inv_code_end-dynarec_local]
807 ldmia fp, {r0, r1, r2, r3, r12, pc}
808 .size invalidate_addr_call, .-invalidate_addr_call
811 .global new_dyna_start
812 .type new_dyna_start, %function
814 /* ip is stored to conform EABI alignment */
815 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
816 load_var_adr fp, dynarec_local
817 ldr r0, [fp, #pcaddr-dynarec_local]
819 ldr r1, [fp, #next_interupt-dynarec_local]
820 ldr r10, [fp, #cycle-dynarec_local]
821 str r1, [fp, #last_count-dynarec_local]
824 .size new_dyna_start, .-new_dyna_start
826 /* --------------------------------------- */
829 .global ari_read_ram8
830 .global ari_read_ram16
831 .global ari_read_ram32
832 .global ari_read_ram_mirror8
833 .global ari_read_ram_mirror16
834 .global ari_read_ram_mirror32
835 .global ari_write_ram8
836 .global ari_write_ram16
837 .global ari_write_ram32
838 .global ari_write_ram_mirror8
839 .global ari_write_ram_mirror16
840 .global ari_write_ram_mirror32
841 .global ari_write_ram_mirror_ro32
842 .global ari_read_bios8
843 .global ari_read_bios16
844 .global ari_read_bios32
846 .global ari_read_io16
847 .global ari_read_io32
848 .global ari_write_io8
849 .global ari_write_io16
850 .global ari_write_io32
852 .macro ari_read_ram bic_const op
853 ldr r0, [fp, #address-dynarec_local]
855 bic r0, r0, #\bic_const
858 str r0, [fp, #readmem_dword-dynarec_local]
871 .macro ari_read_ram_mirror mvn_const, op
872 ldr r0, [fp, #address-dynarec_local]
877 str r0, [fp, #readmem_dword-dynarec_local]
881 ari_read_ram_mirror8:
882 ari_read_ram_mirror 0, ldrb
884 ari_read_ram_mirror16:
885 ari_read_ram_mirror (1<<11), ldrh
887 ari_read_ram_mirror32:
888 ari_read_ram_mirror (3<<11), ldr
890 /* invalidation is already taken care of by the caller */
891 .macro ari_write_ram bic_const var pf
892 ldr r0, [fp, #address-dynarec_local]
893 ldr\pf r1, [fp, #\var-dynarec_local]
895 bic r0, r0, #\bic_const
902 ari_write_ram 0, byte, b
905 ari_write_ram 1, hword, h
908 ari_write_ram 3, word,
910 .macro ari_write_ram_mirror mvn_const var pf
911 ldr r0, [fp, #address-dynarec_local]
913 ldr\pf r1, [fp, #\var-dynarec_local]
915 ldr r2, [fp, #invc_ptr-dynarec_local]
917 ldrb r2, [r2, r0, lsr #12]
921 ldr r1, [fp, #inv_code_start-dynarec_local]
922 ldr r2, [fp, #inv_code_end-dynarec_local]
930 ari_write_ram_mirror8:
931 ari_write_ram_mirror 0, byte, b
933 ari_write_ram_mirror16:
934 ari_write_ram_mirror (1<<11), hword, h
936 ari_write_ram_mirror32:
937 ari_write_ram_mirror (3<<11), word,
939 ari_write_ram_mirror_ro32:
940 load_var_adr r0, pcsx_ram_is_ro
945 b ari_write_ram_mirror32
948 .macro ari_read_bios_mirror bic_const op
949 ldr r0, [fp, #address-dynarec_local]
950 orr r0, r0, #0x80000000
951 bic r0, r0, #(0x20000000|\bic_const) @ map to 0x9fc...
953 str r0, [fp, #readmem_dword-dynarec_local]
958 ari_read_bios_mirror 0, ldrb
961 ari_read_bios_mirror 1, ldrh
964 ari_read_bios_mirror 3, ldr
968 .macro ari_read_io_old tab_shift
969 str lr, [sp, #-8]! @ EABI alignment..
979 str r0, [fp, #readmem_dword-dynarec_local]
983 .macro ari_read_io readop mem_tab tab_shift
984 ldr r0, [fp, #address-dynarec_local]
985 ldr r1, [fp, #psxH_ptr-dynarec_local]
992 bic r2, r0, #0x1f800000
993 ldr r12,[fp, #\mem_tab-dynarec_local]
996 @ ari_read_io_old \tab_shift
999 ldr r12,[r12, r3, lsl #\tab_shift]
1003 str lr, [sp, #-8]! @ EABI alignment..
1005 str r0, [fp, #readmem_dword-dynarec_local]
1009 .if \tab_shift == 1 @ read16
1014 ldr r12,[fp, #spu_readf-dynarec_local]
1018 @ no handler, just read psxH
1019 \readop r0, [r1, r2]
1020 str r0, [fp, #readmem_dword-dynarec_local]
1025 ari_read_io ldrb, tab_read8, 2
1028 ari_read_io ldrh, tab_read16, 1
1031 ari_read_io ldr, tab_read32, 0
1033 .macro ari_write_io_old tab_shift
1045 .macro ari_write_io pf var mem_tab tab_shift
1046 ldr r0, [fp, #address-dynarec_local]
1047 ldr\pf r1, [fp, #\var-dynarec_local]
1054 bic r2, r0, #0x1f800000
1055 ldr r12,[fp, #\mem_tab-dynarec_local]
1056 subs r3, r2, #0x1000
1058 @ ari_write_io_old \tab_shift
1061 ldr r12,[r12, r3, lsl #\tab_shift]
1066 ldr r3, [fp, #psxH_ptr-dynarec_local]
1074 ldrlo pc, [fp, #spu_writef-dynarec_local]
1076 @ write32 to SPU - very rare case (is this correct?)
1082 ldr pc, [fp, #spu_writef-dynarec_local]
1084 ldr pc, [fp, #spu_writef-dynarec_local]
1091 @ PCSX always writes to psxH, so do we for consistency
1092 ldr r0, [fp, #address-dynarec_local]
1093 ldr r3, [fp, #psxH_ptr-dynarec_local]
1094 ldrb r1, [fp, #byte-dynarec_local]
1095 bic r2, r0, #0x1f800000
1096 ldr r12,[fp, #tab_write8-dynarec_local]
1098 subs r3, r2, #0x1000
1100 @ ari_write_io_old 2
1103 ldr r12,[r12, r3, lsl #2]
1110 ari_write_io h, hword, tab_write16, 1
1113 ari_write_io , word, tab_write32, 0
1115 @ vim:filetype=armasm