1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2011 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 /* .equiv HAVE_ARMV7, 1 */
38 .global pending_exception
46 .global restore_candidate
53 .global inv_code_start
59 .type dynarec_local, %object
60 .size dynarec_local, dynarec_local_end-dynarec_local
62 .space dynarec_local_end-dynarec_local
63 next_interupt = dynarec_local + 64
64 .type next_interupt, %object
65 .size next_interupt, 4
66 cycle_count = next_interupt + 4
67 .type cycle_count, %object
69 last_count = cycle_count + 4
70 .type last_count, %object
72 pending_exception = last_count + 4
73 .type pending_exception, %object
74 .size pending_exception, 4
75 stop = pending_exception + 4
79 .type invc_ptr, %object
81 address = invc_ptr + 4
82 .type address, %object
87 .type psxRegs, %object
88 .size psxRegs, psxRegs_end-psxRegs
99 .type reg_cop0, %object
101 reg_cop2d = reg_cop0 + 128
102 .type reg_cop2d, %object
104 reg_cop2c = reg_cop2d + 128
105 .type reg_cop2c, %object
117 interrupt = cycle + 4
118 .type interrupt, %object
120 intCycle = interrupt + 4
121 .type intCycle, %object
123 psxRegs_end = intCycle + 256
128 rcnts_end = rcnts + 7*4*4
131 .type mem_rtab, %object
133 mem_wtab = mem_rtab + 4
134 .type mem_wtab, %object
136 psxH_ptr = mem_wtab + 4
137 .type psxH_ptr, %object
139 zeromem_ptr = psxH_ptr + 4
140 .type zeromem_ptr, %object
142 inv_code_start = zeromem_ptr + 4
143 .type inv_code_start, %object
144 .size inv_code_start, 4
145 inv_code_end = inv_code_start + 4
146 .type inv_code_end, %object
147 .size inv_code_end, 4
148 branch_target = inv_code_end + 4
149 .type branch_target, %object
150 .size branch_target, 4
151 align0 = branch_target + 4 /* unused/alignment */
152 .type align0, %object
154 mini_ht = align0 + 16
155 .type mini_ht, %object
157 restore_candidate = mini_ht + 256
158 .type restore_candidate, %object
159 .size restore_candidate, 512
160 dynarec_local_end = restore_candidate + 512
170 .macro load_var_adr reg var
172 movw \reg, #:lower16:\var
173 movt \reg, #:upper16:\var
179 .macro mov_16 reg imm
183 mov \reg, #(\imm & 0x00ff)
184 orr \reg, #(\imm & 0xff00)
188 .macro mov_24 reg imm
190 movw \reg, #(\imm & 0xffff)
191 movt \reg, #(\imm >> 16)
193 mov \reg, #(\imm & 0x0000ff)
194 orr \reg, #(\imm & 0x00ff00)
195 orr \reg, #(\imm & 0xff0000)
199 .macro dyna_linker_main
200 /* r0 = virtual target address */
201 /* r1 = instruction to patch */
215 ldr r5, [r3, r2, lsl #2]
217 add r6, r1, r12, asr #6
232 moveq pc, r4 /* Stale i-cache */
234 b 1b /* jump_in may have dupes, continue search */
237 beq 3f /* r0 not in jump_in */
243 and r1, r7, #0xff000000
246 add r1, r1, r2, lsr #8
250 /* hash_table lookup */
253 eor r4, r0, r0, lsl #16
259 ldr r5, [r3, r2, lsl #2]
266 /* jump_dirty lookup */
276 /* hash_table insert */
290 .type dyna_linker, %function
292 /* r0 = virtual target address */
293 /* r1 = instruction to patch */
298 bl new_recompile_block
306 .size dyna_linker, .-dyna_linker
307 .global exec_pagefault
308 .type exec_pagefault, %function
310 /* r0 = instruction pointer */
311 /* r1 = fault address */
313 ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
315 ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
316 bic r6, r6, #0x0F800000
317 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
319 str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
321 str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
322 and r5, r6, r1, lsr #9
323 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
324 and r1, r1, r6, lsl #9
325 str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
327 str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
331 .size exec_pagefault, .-exec_pagefault
333 /* Special dynamic linker for the case where a page fault
334 may occur in a branch delay slot */
335 .global dyna_linker_ds
336 .type dyna_linker_ds, %function
338 /* r0 = virtual target address */
339 /* r1 = instruction to patch */
346 bl new_recompile_block
353 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
356 .size dyna_linker_ds, .-dyna_linker_ds
365 .global jump_vaddr_r0
366 .type jump_vaddr_r0, %function
368 eor r2, r0, r0, lsl #16
370 .size jump_vaddr_r0, .-jump_vaddr_r0
371 .global jump_vaddr_r1
372 .type jump_vaddr_r1, %function
374 eor r2, r1, r1, lsl #16
377 .size jump_vaddr_r1, .-jump_vaddr_r1
378 .global jump_vaddr_r2
379 .type jump_vaddr_r2, %function
382 eor r2, r2, r2, lsl #16
384 .size jump_vaddr_r2, .-jump_vaddr_r2
385 .global jump_vaddr_r3
386 .type jump_vaddr_r3, %function
388 eor r2, r3, r3, lsl #16
391 .size jump_vaddr_r3, .-jump_vaddr_r3
392 .global jump_vaddr_r4
393 .type jump_vaddr_r4, %function
395 eor r2, r4, r4, lsl #16
398 .size jump_vaddr_r4, .-jump_vaddr_r4
399 .global jump_vaddr_r5
400 .type jump_vaddr_r5, %function
402 eor r2, r5, r5, lsl #16
405 .size jump_vaddr_r5, .-jump_vaddr_r5
406 .global jump_vaddr_r6
407 .type jump_vaddr_r6, %function
409 eor r2, r6, r6, lsl #16
412 .size jump_vaddr_r6, .-jump_vaddr_r6
413 .global jump_vaddr_r8
414 .type jump_vaddr_r8, %function
416 eor r2, r8, r8, lsl #16
419 .size jump_vaddr_r8, .-jump_vaddr_r8
420 .global jump_vaddr_r9
421 .type jump_vaddr_r9, %function
423 eor r2, r9, r9, lsl #16
426 .size jump_vaddr_r9, .-jump_vaddr_r9
427 .global jump_vaddr_r10
428 .type jump_vaddr_r10, %function
430 eor r2, r10, r10, lsl #16
433 .size jump_vaddr_r10, .-jump_vaddr_r10
434 .global jump_vaddr_r12
435 .type jump_vaddr_r12, %function
437 eor r2, r12, r12, lsl #16
440 .size jump_vaddr_r12, .-jump_vaddr_r12
441 .global jump_vaddr_r7
442 .type jump_vaddr_r7, %function
444 eor r2, r7, r7, lsl #16
446 .size jump_vaddr_r7, .-jump_vaddr_r7
448 .type jump_vaddr, %function
452 and r2, r3, r2, lsr #12
459 str r10, [fp, #cycle_count-dynarec_local]
461 ldr r10, [fp, #cycle_count-dynarec_local]
463 .size jump_vaddr, .-jump_vaddr
466 .global verify_code_ds
467 .type verify_code_ds, %function
469 str r8, [fp, #branch_target-dynarec_local]
470 .size verify_code_ds, .-verify_code_ds
471 .global verify_code_vm
472 .type verify_code_vm, %function
475 .type verify_code, %function
504 ldr r8, [fp, #branch_target-dynarec_local]
509 .size verify_code, .-verify_code
510 .size verify_code_vm, .-verify_code_vm
514 .type cc_interrupt, %function
516 ldr r0, [fp, #last_count-dynarec_local]
520 str r1, [fp, #pending_exception-dynarec_local]
521 and r2, r2, r10, lsr #17
522 add r3, fp, #restore_candidate-dynarec_local
523 str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
524 @@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
532 ldr r10, [fp, #cycle-dynarec_local]
533 ldr r0, [fp, #next_interupt-dynarec_local]
534 ldr r1, [fp, #pending_exception-dynarec_local]
535 ldr r2, [fp, #stop-dynarec_local]
536 str r0, [fp, #last_count-dynarec_local]
539 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
543 ldr r0, [fp, #pcaddr-dynarec_local]
547 /* Move 'dirty' blocks to the 'clean' list */
558 .size cc_interrupt, .-cc_interrupt
562 .type do_interrupt, %function
564 ldr r0, [fp, #pcaddr-dynarec_local]
568 .size do_interrupt, .-do_interrupt
572 .type fp_exception, %function
576 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
578 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
581 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
582 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
586 .size fp_exception, .-fp_exception
588 .global fp_exception_ds
589 .type fp_exception_ds, %function
591 mov r2, #0x90000000 /* Set high bit if delay slot */
593 .size fp_exception_ds, .-fp_exception_ds
597 .type jump_syscall, %function
599 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
601 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
604 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
605 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
609 .size jump_syscall, .-jump_syscall
613 .global jump_syscall_hle
614 .type jump_syscall_hle, %function
616 str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
617 ldr r2, [fp, #last_count-dynarec_local]
618 mov r1, #0 /* in delay slot */
620 mov r0, #0x20 /* cause */
621 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
624 /* note: psxException might do recorsive recompiler call from it's HLE code,
625 * so be ready for this */
627 ldr r1, [fp, #next_interupt-dynarec_local]
628 ldr r10, [fp, #cycle-dynarec_local]
629 ldr r0, [fp, #pcaddr-dynarec_local]
631 str r1, [fp, #last_count-dynarec_local]
634 .size jump_syscall_hle, .-jump_syscall_hle
638 .type jump_hlecall, %function
640 ldr r2, [fp, #last_count-dynarec_local]
641 str r0, [fp, #pcaddr-dynarec_local]
644 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
646 .size jump_hlecall, .-jump_hlecall
650 .type jump_intcall, %function
652 ldr r2, [fp, #last_count-dynarec_local]
653 str r0, [fp, #pcaddr-dynarec_local]
656 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
658 .size jump_hlecall, .-jump_hlecall
662 .global new_dyna_leave
663 .type new_dyna_leave, %function
664 ldr r0, [fp, #last_count-dynarec_local]
667 str r10, [fp, #cycle-dynarec_local]
668 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
669 .size new_dyna_leave, .-new_dyna_leave
672 .global invalidate_addr_r0
673 .type invalidate_addr_r0, %function
675 stmia fp, {r0, r1, r2, r3, r12, lr}
676 b invalidate_addr_call
677 .size invalidate_addr_r0, .-invalidate_addr_r0
679 .global invalidate_addr_r1
680 .type invalidate_addr_r1, %function
682 stmia fp, {r0, r1, r2, r3, r12, lr}
684 b invalidate_addr_call
685 .size invalidate_addr_r1, .-invalidate_addr_r1
687 .global invalidate_addr_r2
688 .type invalidate_addr_r2, %function
690 stmia fp, {r0, r1, r2, r3, r12, lr}
692 b invalidate_addr_call
693 .size invalidate_addr_r2, .-invalidate_addr_r2
695 .global invalidate_addr_r3
696 .type invalidate_addr_r3, %function
698 stmia fp, {r0, r1, r2, r3, r12, lr}
700 b invalidate_addr_call
701 .size invalidate_addr_r3, .-invalidate_addr_r3
703 .global invalidate_addr_r4
704 .type invalidate_addr_r4, %function
706 stmia fp, {r0, r1, r2, r3, r12, lr}
708 b invalidate_addr_call
709 .size invalidate_addr_r4, .-invalidate_addr_r4
711 .global invalidate_addr_r5
712 .type invalidate_addr_r5, %function
714 stmia fp, {r0, r1, r2, r3, r12, lr}
716 b invalidate_addr_call
717 .size invalidate_addr_r5, .-invalidate_addr_r5
719 .global invalidate_addr_r6
720 .type invalidate_addr_r6, %function
722 stmia fp, {r0, r1, r2, r3, r12, lr}
724 b invalidate_addr_call
725 .size invalidate_addr_r6, .-invalidate_addr_r6
727 .global invalidate_addr_r7
728 .type invalidate_addr_r7, %function
730 stmia fp, {r0, r1, r2, r3, r12, lr}
732 b invalidate_addr_call
733 .size invalidate_addr_r7, .-invalidate_addr_r7
735 .global invalidate_addr_r8
736 .type invalidate_addr_r8, %function
738 stmia fp, {r0, r1, r2, r3, r12, lr}
740 b invalidate_addr_call
741 .size invalidate_addr_r8, .-invalidate_addr_r8
743 .global invalidate_addr_r9
744 .type invalidate_addr_r9, %function
746 stmia fp, {r0, r1, r2, r3, r12, lr}
748 b invalidate_addr_call
749 .size invalidate_addr_r9, .-invalidate_addr_r9
751 .global invalidate_addr_r10
752 .type invalidate_addr_r10, %function
754 stmia fp, {r0, r1, r2, r3, r12, lr}
756 b invalidate_addr_call
757 .size invalidate_addr_r10, .-invalidate_addr_r10
759 .global invalidate_addr_r12
760 .type invalidate_addr_r12, %function
762 stmia fp, {r0, r1, r2, r3, r12, lr}
764 .size invalidate_addr_r12, .-invalidate_addr_r12
766 .global invalidate_addr_call
767 .type invalidate_addr_call, %function
768 invalidate_addr_call:
769 ldr r12, [fp, #inv_code_start-dynarec_local]
770 ldr lr, [fp, #inv_code_end-dynarec_local]
774 ldmia fp, {r0, r1, r2, r3, r12, pc}
775 .size invalidate_addr_call, .-invalidate_addr_call
778 .global new_dyna_start
779 .type new_dyna_start, %function
781 /* ip is stored to conform EABI alignment */
782 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
783 load_var_adr fp, dynarec_local
784 ldr r0, [fp, #pcaddr-dynarec_local]
786 ldr r1, [fp, #next_interupt-dynarec_local]
787 ldr r10, [fp, #cycle-dynarec_local]
788 str r1, [fp, #last_count-dynarec_local]
791 .size new_dyna_start, .-new_dyna_start
793 /* --------------------------------------- */
796 .global jump_handler_read8
797 .global jump_handler_read16
798 .global jump_handler_read32
799 .global jump_handler_write8
800 .global jump_handler_write16
801 .global jump_handler_write32
802 .global jump_handler_write_h
803 .global jump_handle_swl
804 .global jump_handle_swr
805 .global rcnt0_read_count_m0
806 .global rcnt0_read_count_m1
807 .global rcnt1_read_count_m0
808 .global rcnt1_read_count_m1
809 .global rcnt2_read_count_m0
810 .global rcnt2_read_count_m1
813 .macro pcsx_read_mem readop tab_shift
814 /* r0 = address, r1 = handler_tab, r2 = cycles */
816 lsr r3, #(20+\tab_shift)
817 ldr r12, [fp, #last_count-dynarec_local]
818 ldr r1, [r1, r3, lsl #2]
825 \readop r0, [r1, r3, lsl #\tab_shift]
828 str r2, [fp, #cycle-dynarec_local]
833 add r1, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
834 pcsx_read_mem ldrccb, 0
837 add r1, #0x1000/4*4 @ shift to r16 part
838 pcsx_read_mem ldrcch, 1
841 pcsx_read_mem ldrcc, 2
844 .macro pcsx_write_mem wrtop tab_shift
845 /* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
847 lsr r12, #(20+\tab_shift)
848 ldr r3, [r3, r12, lsl #2]
849 str r0, [fp, #address-dynarec_local] @ some handlers still need it..
851 mov r0, r2 @ cycle return in case of direct store
856 \wrtop r1, [r3, r12, lsl #\tab_shift]
859 ldr r12, [fp, #last_count-dynarec_local]
863 str r2, [fp, #cycle-dynarec_local]
866 ldr r0, [fp, #next_interupt-dynarec_local]
868 str r0, [fp, #last_count-dynarec_local]
874 add r3, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
875 pcsx_write_mem strccb, 0
877 jump_handler_write16:
878 add r3, #0x1000/4*4 @ shift to r16 part
879 pcsx_write_mem strcch, 1
881 jump_handler_write32:
882 pcsx_write_mem strcc, 2
884 jump_handler_write_h:
885 /* r0 = address, r1 = data, r2 = cycles, r3 = handler */
886 ldr r12, [fp, #last_count-dynarec_local]
887 str r0, [fp, #address-dynarec_local] @ some handlers still need it..
891 str r2, [fp, #cycle-dynarec_local]
894 ldr r0, [fp, #next_interupt-dynarec_local]
896 str r0, [fp, #last_count-dynarec_local]
901 /* r0 = address, r1 = data, r2 = cycles */
902 ldr r3, [fp, #mem_wtab-dynarec_local]
904 ldr r3, [r3, r12, lsl #2]
925 lsreq r12, r1, #24 @ 0
936 /* r0 = address, r1 = data, r2 = cycles */
937 ldr r3, [fp, #mem_wtab-dynarec_local]
939 ldr r3, [r3, r12, lsl #2]
961 .macro rcntx_read_mode0 num
962 /* r0 = address, r2 = cycles */
963 ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*\num] @ cycleStart
980 /* r0 = address, r2 = cycles */
981 ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*0] @ cycleStart
984 mul r0, r1, r2 @ /= 5
989 /* r0 = address, r2 = cycles */
990 ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*1]
993 umull r3, r0, r1, r2 @ ~ /= hsync_cycles, max ~0x1e6cdd
997 /* r0 = address, r2 = cycles */
998 ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*2]
999 mov r0, r2, lsl #16-3
1000 sub r0, r3, lsl #16-3
1004 @ vim:filetype=armasm