1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2011 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 /* .equiv HAVE_ARMV7, 1 */
38 .global pending_exception
46 .global restore_candidate
52 .global inv_code_start
57 .type dynarec_local, %object
58 .size dynarec_local, dynarec_local_end-dynarec_local
60 .space dynarec_local_end-dynarec_local /*0x400630*/
61 next_interupt = dynarec_local + 64
62 .type next_interupt, %object
63 .size next_interupt, 4
64 cycle_count = next_interupt + 4
65 .type cycle_count, %object
67 last_count = cycle_count + 4
68 .type last_count, %object
70 pending_exception = last_count + 4
71 .type pending_exception, %object
72 .size pending_exception, 4
73 stop = pending_exception + 4
77 .type invc_ptr, %object
79 address = invc_ptr + 4
80 .type address, %object
85 .type psxRegs, %object
86 .size psxRegs, psxRegs_end-psxRegs
97 .type reg_cop0, %object
99 reg_cop2d = reg_cop0 + 128
100 .type reg_cop2d, %object
102 reg_cop2c = reg_cop2d + 128
103 .type reg_cop2c, %object
115 interrupt = cycle + 4
116 .type interrupt, %object
118 intCycle = interrupt + 4
119 .type intCycle, %object
121 psxRegs_end = intCycle + 256
123 mem_rtab = psxRegs_end
124 .type mem_rtab, %object
126 mem_wtab = mem_rtab + 4
127 .type mem_wtab, %object
129 psxH_ptr = mem_wtab + 4
130 .type psxH_ptr, %object
132 inv_code_start = psxH_ptr + 4
133 .type inv_code_start, %object
134 .size inv_code_start, 4
135 inv_code_end = inv_code_start + 4
136 .type inv_code_end, %object
137 .size inv_code_end, 4
138 branch_target = inv_code_end + 4
139 .type branch_target, %object
140 .size branch_target, 4
141 align0 = branch_target + 4 /* unused/alignment */
142 .type align0, %object
145 .type mini_ht, %object
147 restore_candidate = mini_ht + 256
148 .type restore_candidate, %object
149 .size restore_candidate, 512
150 dynarec_local_end = restore_candidate + 512
160 .macro load_var_adr reg var
162 movw \reg, #:lower16:\var
163 movt \reg, #:upper16:\var
169 .macro dyna_linker_main
170 /* r0 = virtual target address */
171 /* r1 = instruction to patch */
185 ldr r5, [r3, r2, lsl #2]
187 add r6, r1, r12, asr #6
202 moveq pc, r4 /* Stale i-cache */
204 b 1b /* jump_in may have dupes, continue search */
207 beq 3f /* r0 not in jump_in */
213 and r1, r7, #0xff000000
216 add r1, r1, r2, lsr #8
220 /* hash_table lookup */
223 eor r4, r0, r0, lsl #16
229 ldr r5, [r3, r2, lsl #2]
236 /* jump_dirty lookup */
246 /* hash_table insert */
260 .type dyna_linker, %function
262 /* r0 = virtual target address */
263 /* r1 = instruction to patch */
268 bl new_recompile_block
276 .size dyna_linker, .-dyna_linker
277 .global exec_pagefault
278 .type exec_pagefault, %function
280 /* r0 = instruction pointer */
281 /* r1 = fault address */
283 ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
285 ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
286 bic r6, r6, #0x0F800000
287 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
289 str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
291 str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
292 and r5, r6, r1, lsr #9
293 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
294 and r1, r1, r6, lsl #9
295 str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
297 str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
301 .size exec_pagefault, .-exec_pagefault
303 /* Special dynamic linker for the case where a page fault
304 may occur in a branch delay slot */
305 .global dyna_linker_ds
306 .type dyna_linker_ds, %function
308 /* r0 = virtual target address */
309 /* r1 = instruction to patch */
316 bl new_recompile_block
323 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
326 .size dyna_linker_ds, .-dyna_linker_ds
335 .global jump_vaddr_r0
336 .type jump_vaddr_r0, %function
338 eor r2, r0, r0, lsl #16
340 .size jump_vaddr_r0, .-jump_vaddr_r0
341 .global jump_vaddr_r1
342 .type jump_vaddr_r1, %function
344 eor r2, r1, r1, lsl #16
347 .size jump_vaddr_r1, .-jump_vaddr_r1
348 .global jump_vaddr_r2
349 .type jump_vaddr_r2, %function
352 eor r2, r2, r2, lsl #16
354 .size jump_vaddr_r2, .-jump_vaddr_r2
355 .global jump_vaddr_r3
356 .type jump_vaddr_r3, %function
358 eor r2, r3, r3, lsl #16
361 .size jump_vaddr_r3, .-jump_vaddr_r3
362 .global jump_vaddr_r4
363 .type jump_vaddr_r4, %function
365 eor r2, r4, r4, lsl #16
368 .size jump_vaddr_r4, .-jump_vaddr_r4
369 .global jump_vaddr_r5
370 .type jump_vaddr_r5, %function
372 eor r2, r5, r5, lsl #16
375 .size jump_vaddr_r5, .-jump_vaddr_r5
376 .global jump_vaddr_r6
377 .type jump_vaddr_r6, %function
379 eor r2, r6, r6, lsl #16
382 .size jump_vaddr_r6, .-jump_vaddr_r6
383 .global jump_vaddr_r8
384 .type jump_vaddr_r8, %function
386 eor r2, r8, r8, lsl #16
389 .size jump_vaddr_r8, .-jump_vaddr_r8
390 .global jump_vaddr_r9
391 .type jump_vaddr_r9, %function
393 eor r2, r9, r9, lsl #16
396 .size jump_vaddr_r9, .-jump_vaddr_r9
397 .global jump_vaddr_r10
398 .type jump_vaddr_r10, %function
400 eor r2, r10, r10, lsl #16
403 .size jump_vaddr_r10, .-jump_vaddr_r10
404 .global jump_vaddr_r12
405 .type jump_vaddr_r12, %function
407 eor r2, r12, r12, lsl #16
410 .size jump_vaddr_r12, .-jump_vaddr_r12
411 .global jump_vaddr_r7
412 .type jump_vaddr_r7, %function
414 eor r2, r7, r7, lsl #16
416 .size jump_vaddr_r7, .-jump_vaddr_r7
418 .type jump_vaddr, %function
422 and r2, r3, r2, lsr #12
429 str r10, [fp, #cycle_count-dynarec_local]
431 ldr r10, [fp, #cycle_count-dynarec_local]
433 .size jump_vaddr, .-jump_vaddr
436 .global verify_code_ds
437 .type verify_code_ds, %function
439 str r8, [fp, #branch_target-dynarec_local]
440 .size verify_code_ds, .-verify_code_ds
441 .global verify_code_vm
442 .type verify_code_vm, %function
445 .type verify_code, %function
474 ldr r8, [fp, #branch_target-dynarec_local]
479 .size verify_code, .-verify_code
480 .size verify_code_vm, .-verify_code_vm
484 .type cc_interrupt, %function
486 ldr r0, [fp, #last_count-dynarec_local]
490 str r1, [fp, #pending_exception-dynarec_local]
491 and r2, r2, r10, lsr #17
492 add r3, fp, #restore_candidate-dynarec_local
493 str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
494 @@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
502 ldr r10, [fp, #cycle-dynarec_local]
503 ldr r0, [fp, #next_interupt-dynarec_local]
504 ldr r1, [fp, #pending_exception-dynarec_local]
505 ldr r2, [fp, #stop-dynarec_local]
506 str r0, [fp, #last_count-dynarec_local]
509 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
513 ldr r0, [fp, #pcaddr-dynarec_local]
517 /* Move 'dirty' blocks to the 'clean' list */
528 .size cc_interrupt, .-cc_interrupt
532 .type do_interrupt, %function
534 ldr r0, [fp, #pcaddr-dynarec_local]
538 .size do_interrupt, .-do_interrupt
542 .type fp_exception, %function
546 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
548 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
551 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
552 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
556 .size fp_exception, .-fp_exception
558 .global fp_exception_ds
559 .type fp_exception_ds, %function
561 mov r2, #0x90000000 /* Set high bit if delay slot */
563 .size fp_exception_ds, .-fp_exception_ds
567 .type jump_syscall, %function
569 ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
571 str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
574 str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
575 str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
579 .size jump_syscall, .-jump_syscall
583 .global jump_syscall_hle
584 .type jump_syscall_hle, %function
586 str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
587 ldr r2, [fp, #last_count-dynarec_local]
588 mov r1, #0 /* in delay slot */
590 mov r0, #0x20 /* cause */
591 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
594 /* note: psxException might do recorsive recompiler call from it's HLE code,
595 * so be ready for this */
597 ldr r1, [fp, #next_interupt-dynarec_local]
598 ldr r10, [fp, #cycle-dynarec_local]
599 ldr r0, [fp, #pcaddr-dynarec_local]
601 str r1, [fp, #last_count-dynarec_local]
604 .size jump_syscall_hle, .-jump_syscall_hle
608 .type jump_hlecall, %function
610 ldr r2, [fp, #last_count-dynarec_local]
611 str r0, [fp, #pcaddr-dynarec_local]
614 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
616 .size jump_hlecall, .-jump_hlecall
620 .type jump_intcall, %function
622 ldr r2, [fp, #last_count-dynarec_local]
623 str r0, [fp, #pcaddr-dynarec_local]
626 str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
628 .size jump_hlecall, .-jump_hlecall
632 .global new_dyna_leave
633 .type new_dyna_leave, %function
634 ldr r0, [fp, #last_count-dynarec_local]
637 str r10, [fp, #cycle-dynarec_local]
638 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
639 .size new_dyna_leave, .-new_dyna_leave
642 .global invalidate_addr_r0
643 .type invalidate_addr_r0, %function
645 stmia fp, {r0, r1, r2, r3, r12, lr}
646 b invalidate_addr_call
647 .size invalidate_addr_r0, .-invalidate_addr_r0
649 .global invalidate_addr_r1
650 .type invalidate_addr_r1, %function
652 stmia fp, {r0, r1, r2, r3, r12, lr}
654 b invalidate_addr_call
655 .size invalidate_addr_r1, .-invalidate_addr_r1
657 .global invalidate_addr_r2
658 .type invalidate_addr_r2, %function
660 stmia fp, {r0, r1, r2, r3, r12, lr}
662 b invalidate_addr_call
663 .size invalidate_addr_r2, .-invalidate_addr_r2
665 .global invalidate_addr_r3
666 .type invalidate_addr_r3, %function
668 stmia fp, {r0, r1, r2, r3, r12, lr}
670 b invalidate_addr_call
671 .size invalidate_addr_r3, .-invalidate_addr_r3
673 .global invalidate_addr_r4
674 .type invalidate_addr_r4, %function
676 stmia fp, {r0, r1, r2, r3, r12, lr}
678 b invalidate_addr_call
679 .size invalidate_addr_r4, .-invalidate_addr_r4
681 .global invalidate_addr_r5
682 .type invalidate_addr_r5, %function
684 stmia fp, {r0, r1, r2, r3, r12, lr}
686 b invalidate_addr_call
687 .size invalidate_addr_r5, .-invalidate_addr_r5
689 .global invalidate_addr_r6
690 .type invalidate_addr_r6, %function
692 stmia fp, {r0, r1, r2, r3, r12, lr}
694 b invalidate_addr_call
695 .size invalidate_addr_r6, .-invalidate_addr_r6
697 .global invalidate_addr_r7
698 .type invalidate_addr_r7, %function
700 stmia fp, {r0, r1, r2, r3, r12, lr}
702 b invalidate_addr_call
703 .size invalidate_addr_r7, .-invalidate_addr_r7
705 .global invalidate_addr_r8
706 .type invalidate_addr_r8, %function
708 stmia fp, {r0, r1, r2, r3, r12, lr}
710 b invalidate_addr_call
711 .size invalidate_addr_r8, .-invalidate_addr_r8
713 .global invalidate_addr_r9
714 .type invalidate_addr_r9, %function
716 stmia fp, {r0, r1, r2, r3, r12, lr}
718 b invalidate_addr_call
719 .size invalidate_addr_r9, .-invalidate_addr_r9
721 .global invalidate_addr_r10
722 .type invalidate_addr_r10, %function
724 stmia fp, {r0, r1, r2, r3, r12, lr}
726 b invalidate_addr_call
727 .size invalidate_addr_r10, .-invalidate_addr_r10
729 .global invalidate_addr_r12
730 .type invalidate_addr_r12, %function
732 stmia fp, {r0, r1, r2, r3, r12, lr}
734 .size invalidate_addr_r12, .-invalidate_addr_r12
736 .global invalidate_addr_call
737 .type invalidate_addr_call, %function
738 invalidate_addr_call:
739 ldr r12, [fp, #inv_code_start-dynarec_local]
740 ldr lr, [fp, #inv_code_end-dynarec_local]
744 ldmia fp, {r0, r1, r2, r3, r12, pc}
745 .size invalidate_addr_call, .-invalidate_addr_call
748 .global new_dyna_start
749 .type new_dyna_start, %function
751 /* ip is stored to conform EABI alignment */
752 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
753 load_var_adr fp, dynarec_local
754 ldr r0, [fp, #pcaddr-dynarec_local]
756 ldr r1, [fp, #next_interupt-dynarec_local]
757 ldr r10, [fp, #cycle-dynarec_local]
758 str r1, [fp, #last_count-dynarec_local]
761 .size new_dyna_start, .-new_dyna_start
763 /* --------------------------------------- */
766 .global jump_handler_read8
767 .global jump_handler_read16
768 .global jump_handler_read32
769 .global jump_handler_write8
770 .global jump_handler_write16
771 .global jump_handler_write32
772 .global jump_handler_write_h
773 .global jump_handle_swl
774 .global jump_handle_swr
777 .macro pcsx_read_mem readop tab_shift
778 /* r0 = address, r1 = handler_tab, r2 = cycles */
780 lsr r3, #(20+\tab_shift)
781 ldr r12, [fp, #last_count-dynarec_local]
782 ldr r1, [r1, r3, lsl #2]
789 \readop r0, [r1, r3, lsl #\tab_shift]
792 str r2, [fp, #cycle-dynarec_local]
797 add r1, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
798 pcsx_read_mem ldrccb, 0
801 add r1, #0x1000/4*4 @ shift to r16 part
802 pcsx_read_mem ldrcch, 1
805 pcsx_read_mem ldrcc, 2
808 .macro pcsx_write_mem wrtop tab_shift
809 /* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
811 lsr r12, #(20+\tab_shift)
812 ldr r3, [r3, r12, lsl #2]
813 str r0, [fp, #address-dynarec_local] @ some handlers still need it..
815 mov r0, r2 @ cycle return in case of direct store
820 \wrtop r1, [r3, r12, lsl #\tab_shift]
823 ldr r12, [fp, #last_count-dynarec_local]
827 str r2, [fp, #cycle-dynarec_local]
830 ldr r0, [fp, #next_interupt-dynarec_local]
832 str r0, [fp, #last_count-dynarec_local]
838 add r3, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
839 pcsx_write_mem strccb, 0
841 jump_handler_write16:
842 add r3, #0x1000/4*4 @ shift to r16 part
843 pcsx_write_mem strcch, 1
845 jump_handler_write32:
846 pcsx_write_mem strcc, 2
848 jump_handler_write_h:
849 /* r0 = address, r1 = data, r2 = cycles, r3 = handler */
850 ldr r12, [fp, #last_count-dynarec_local]
851 str r0, [fp, #address-dynarec_local] @ some handlers still need it..
855 str r2, [fp, #cycle-dynarec_local]
858 ldr r0, [fp, #next_interupt-dynarec_local]
860 str r0, [fp, #last_count-dynarec_local]
865 /* r0 = address, r1 = data, r2 = cycles */
866 ldr r3, [fp, #mem_wtab-dynarec_local]
868 ldr r3, [r3, r12, lsl #2]
889 lsreq r12, r1, #24 @ 0
900 /* r0 = address, r1 = data, r2 = cycles */
901 ldr r3, [fp, #mem_wtab-dynarec_local]
903 ldr r3, [r3, r12, lsl #2]
925 @ vim:filetype=armasm