1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2013 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "linkage_offsets.h"
27 #define dynarec_local ESYM(dynarec_local)
28 #define add_link ESYM(add_link)
29 #define new_recompile_block ESYM(new_recompile_block)
30 #define get_addr ESYM(get_addr)
31 #define get_addr_ht ESYM(get_addr_ht)
32 #define clean_blocks ESYM(clean_blocks)
33 #define gen_interupt ESYM(gen_interupt)
34 #define psxException ESYM(psxException)
35 #define execI ESYM(execI)
36 #define invalidate_addr ESYM(invalidate_addr)
42 .type dynarec_local, %object
43 .size dynarec_local, LO_dynarec_local_size
45 .space LO_dynarec_local_size
47 #define DRC_VAR_(name, vname, size_) \
48 vname = dynarec_local + LO_##name; \
50 .type vname, %object; \
53 #define DRC_VAR(name, size_) \
54 DRC_VAR_(name, ESYM(name), size_)
56 DRC_VAR(next_interupt, 4)
57 DRC_VAR(cycle_count, 4)
58 DRC_VAR(last_count, 4)
59 DRC_VAR(pending_exception, 4)
63 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
69 DRC_VAR(reg_cop0, 128)
70 DRC_VAR(reg_cop2d, 128)
71 DRC_VAR(reg_cop2c, 128)
75 @DRC_VAR(interrupt, 4)
76 @DRC_VAR(intCycle, 256)
82 DRC_VAR(zeromem_ptr, 4)
83 DRC_VAR(inv_code_start, 4)
84 DRC_VAR(inv_code_end, 4)
85 DRC_VAR(branch_target, 4)
86 DRC_VAR(scratch_buf_ptr, 4)
87 @DRC_VAR(align0, 12) /* unused/alignment */
89 DRC_VAR(restore_candidate, 512)
102 .macro load_varadr reg var
103 #if defined(__ARM_ARCH_7A__) && !defined(__PIC__)
104 movw \reg, #:lower16:\var
105 movt \reg, #:upper16:\var
111 .macro mov_16 reg imm
112 #ifdef __ARM_ARCH_7A__
115 mov \reg, #(\imm & 0x00ff)
116 orr \reg, #(\imm & 0xff00)
120 .macro mov_24 reg imm
121 #ifdef __ARM_ARCH_7A__
122 movw \reg, #(\imm & 0xffff)
123 movt \reg, #(\imm >> 16)
125 mov \reg, #(\imm & 0x0000ff)
126 orr \reg, #(\imm & 0x00ff00)
127 orr \reg, #(\imm & 0xff0000)
131 .macro dyna_linker_main
132 /* r0 = virtual target address */
133 /* r1 = instruction to patch */
147 ldr r5, [r3, r2, lsl #2]
149 add r6, r1, r12, asr #6
164 moveq pc, r4 /* Stale i-cache */
166 b 1b /* jump_in may have dupes, continue search */
169 beq 3f /* r0 not in jump_in */
175 and r1, r7, #0xff000000
178 add r1, r1, r2, lsr #8
182 /* hash_table lookup */
185 eor r4, r0, r0, lsl #16
191 ldr r5, [r3, r2, lsl #2]
198 /* jump_dirty lookup */
208 /* hash_table insert */
222 FUNCTION(dyna_linker):
223 /* r0 = virtual target address */
224 /* r1 = instruction to patch */
229 bl new_recompile_block
237 .size dyna_linker, .-dyna_linker
239 FUNCTION(exec_pagefault):
240 /* r0 = instruction pointer */
241 /* r1 = fault address */
243 ldr r3, [fp, #LO_reg_cop0+48] /* Status */
245 ldr r4, [fp, #LO_reg_cop0+16] /* Context */
246 bic r6, r6, #0x0F800000
247 str r0, [fp, #LO_reg_cop0+56] /* EPC */
249 str r1, [fp, #LO_reg_cop0+32] /* BadVAddr */
251 str r3, [fp, #LO_reg_cop0+48] /* Status */
252 and r5, r6, r1, lsr #9
253 str r2, [fp, #LO_reg_cop0+52] /* Cause */
254 and r1, r1, r6, lsl #9
255 str r1, [fp, #LO_reg_cop0+40] /* EntryHi */
257 str r4, [fp, #LO_reg_cop0+16] /* Context */
261 .size exec_pagefault, .-exec_pagefault
263 /* Special dynamic linker for the case where a page fault
264 may occur in a branch delay slot */
265 FUNCTION(dyna_linker_ds):
266 /* r0 = virtual target address */
267 /* r1 = instruction to patch */
274 bl new_recompile_block
281 mov r2, #0x80000008 /* High bit set indicates pagefault in delay slot */
284 .size dyna_linker_ds, .-dyna_linker_ds
294 FUNCTION(jump_vaddr_r0):
295 eor r2, r0, r0, lsl #16
297 .size jump_vaddr_r0, .-jump_vaddr_r0
298 FUNCTION(jump_vaddr_r1):
299 eor r2, r1, r1, lsl #16
302 .size jump_vaddr_r1, .-jump_vaddr_r1
303 FUNCTION(jump_vaddr_r2):
305 eor r2, r2, r2, lsl #16
307 .size jump_vaddr_r2, .-jump_vaddr_r2
308 FUNCTION(jump_vaddr_r3):
309 eor r2, r3, r3, lsl #16
312 .size jump_vaddr_r3, .-jump_vaddr_r3
313 FUNCTION(jump_vaddr_r4):
314 eor r2, r4, r4, lsl #16
317 .size jump_vaddr_r4, .-jump_vaddr_r4
318 FUNCTION(jump_vaddr_r5):
319 eor r2, r5, r5, lsl #16
322 .size jump_vaddr_r5, .-jump_vaddr_r5
323 FUNCTION(jump_vaddr_r6):
324 eor r2, r6, r6, lsl #16
327 .size jump_vaddr_r6, .-jump_vaddr_r6
328 FUNCTION(jump_vaddr_r8):
329 eor r2, r8, r8, lsl #16
332 .size jump_vaddr_r8, .-jump_vaddr_r8
333 FUNCTION(jump_vaddr_r9):
334 eor r2, r9, r9, lsl #16
337 .size jump_vaddr_r9, .-jump_vaddr_r9
338 FUNCTION(jump_vaddr_r10):
339 eor r2, r10, r10, lsl #16
342 .size jump_vaddr_r10, .-jump_vaddr_r10
343 FUNCTION(jump_vaddr_r12):
344 eor r2, r12, r12, lsl #16
347 .size jump_vaddr_r12, .-jump_vaddr_r12
348 FUNCTION(jump_vaddr_r7):
349 eor r2, r7, r7, lsl #16
351 .size jump_vaddr_r7, .-jump_vaddr_r7
352 FUNCTION(jump_vaddr):
355 and r2, r3, r2, lsr #12
362 str r10, [fp, #LO_cycle_count]
364 ldr r10, [fp, #LO_cycle_count]
366 .size jump_vaddr, .-jump_vaddr
370 FUNCTION(verify_code_ds):
371 str r8, [fp, #LO_branch_target]
372 FUNCTION(verify_code_vm):
373 FUNCTION(verify_code):
401 ldr r8, [fp, #LO_branch_target]
406 .size verify_code, .-verify_code
407 .size verify_code_vm, .-verify_code_vm
410 FUNCTION(cc_interrupt):
411 ldr r0, [fp, #LO_last_count]
415 str r1, [fp, #LO_pending_exception]
416 and r2, r2, r10, lsr #17
417 add r3, fp, #LO_restore_candidate
418 str r10, [fp, #LO_cycle] /* PCSX cycles */
419 @@ str r10, [fp, #LO_reg_cop0+36] /* Count */
427 ldr r10, [fp, #LO_cycle]
428 ldr r0, [fp, #LO_next_interupt]
429 ldr r1, [fp, #LO_pending_exception]
430 ldr r2, [fp, #LO_stop]
431 str r0, [fp, #LO_last_count]
434 ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
438 ldr r0, [fp, #LO_pcaddr]
442 /* Move 'dirty' blocks to the 'clean' list */
453 .size cc_interrupt, .-cc_interrupt
456 FUNCTION(do_interrupt):
457 ldr r0, [fp, #LO_pcaddr]
461 .size do_interrupt, .-do_interrupt
464 FUNCTION(fp_exception):
467 ldr r1, [fp, #LO_reg_cop0+48] /* Status */
469 str r0, [fp, #LO_reg_cop0+56] /* EPC */
472 str r1, [fp, #LO_reg_cop0+48] /* Status */
473 str r2, [fp, #LO_reg_cop0+52] /* Cause */
477 .size fp_exception, .-fp_exception
479 FUNCTION(fp_exception_ds):
480 mov r2, #0x90000000 /* Set high bit if delay slot */
482 .size fp_exception_ds, .-fp_exception_ds
485 FUNCTION(jump_syscall):
486 ldr r1, [fp, #LO_reg_cop0+48] /* Status */
488 str r0, [fp, #LO_reg_cop0+56] /* EPC */
491 str r1, [fp, #LO_reg_cop0+48] /* Status */
492 str r2, [fp, #LO_reg_cop0+52] /* Cause */
496 .size jump_syscall, .-jump_syscall
500 FUNCTION(jump_syscall_hle):
501 str r0, [fp, #LO_pcaddr] /* PC must be set to EPC for psxException */
502 ldr r2, [fp, #LO_last_count]
503 mov r1, #0 /* in delay slot */
505 mov r0, #0x20 /* cause */
506 str r2, [fp, #LO_cycle] /* PCSX cycle counter */
509 /* note: psxException might do recursive recompiler call from it's HLE code,
510 * so be ready for this */
512 ldr r1, [fp, #LO_next_interupt]
513 ldr r10, [fp, #LO_cycle]
514 ldr r0, [fp, #LO_pcaddr]
516 str r1, [fp, #LO_last_count]
519 .size jump_syscall_hle, .-jump_syscall_hle
522 FUNCTION(jump_hlecall):
523 ldr r2, [fp, #LO_last_count]
524 str r0, [fp, #LO_pcaddr]
527 str r2, [fp, #LO_cycle] /* PCSX cycle counter */
529 .size jump_hlecall, .-jump_hlecall
532 FUNCTION(jump_intcall):
533 ldr r2, [fp, #LO_last_count]
534 str r0, [fp, #LO_pcaddr]
537 str r2, [fp, #LO_cycle] /* PCSX cycle counter */
539 .size jump_hlecall, .-jump_hlecall
542 FUNCTION(new_dyna_leave):
543 ldr r0, [fp, #LO_last_count]
546 str r10, [fp, #LO_cycle]
547 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
548 .size new_dyna_leave, .-new_dyna_leave
551 FUNCTION(invalidate_addr_r0):
552 stmia fp, {r0, r1, r2, r3, r12, lr}
553 b invalidate_addr_call
554 .size invalidate_addr_r0, .-invalidate_addr_r0
556 FUNCTION(invalidate_addr_r1):
557 stmia fp, {r0, r1, r2, r3, r12, lr}
559 b invalidate_addr_call
560 .size invalidate_addr_r1, .-invalidate_addr_r1
562 FUNCTION(invalidate_addr_r2):
563 stmia fp, {r0, r1, r2, r3, r12, lr}
565 b invalidate_addr_call
566 .size invalidate_addr_r2, .-invalidate_addr_r2
568 FUNCTION(invalidate_addr_r3):
569 stmia fp, {r0, r1, r2, r3, r12, lr}
571 b invalidate_addr_call
572 .size invalidate_addr_r3, .-invalidate_addr_r3
574 FUNCTION(invalidate_addr_r4):
575 stmia fp, {r0, r1, r2, r3, r12, lr}
577 b invalidate_addr_call
578 .size invalidate_addr_r4, .-invalidate_addr_r4
580 FUNCTION(invalidate_addr_r5):
581 stmia fp, {r0, r1, r2, r3, r12, lr}
583 b invalidate_addr_call
584 .size invalidate_addr_r5, .-invalidate_addr_r5
586 FUNCTION(invalidate_addr_r6):
587 stmia fp, {r0, r1, r2, r3, r12, lr}
589 b invalidate_addr_call
590 .size invalidate_addr_r6, .-invalidate_addr_r6
592 FUNCTION(invalidate_addr_r7):
593 stmia fp, {r0, r1, r2, r3, r12, lr}
595 b invalidate_addr_call
596 .size invalidate_addr_r7, .-invalidate_addr_r7
598 FUNCTION(invalidate_addr_r8):
599 stmia fp, {r0, r1, r2, r3, r12, lr}
601 b invalidate_addr_call
602 .size invalidate_addr_r8, .-invalidate_addr_r8
604 FUNCTION(invalidate_addr_r9):
605 stmia fp, {r0, r1, r2, r3, r12, lr}
607 b invalidate_addr_call
608 .size invalidate_addr_r9, .-invalidate_addr_r9
610 FUNCTION(invalidate_addr_r10):
611 stmia fp, {r0, r1, r2, r3, r12, lr}
613 b invalidate_addr_call
614 .size invalidate_addr_r10, .-invalidate_addr_r10
616 FUNCTION(invalidate_addr_r12):
617 stmia fp, {r0, r1, r2, r3, r12, lr}
619 .size invalidate_addr_r12, .-invalidate_addr_r12
621 invalidate_addr_call:
622 ldr r12, [fp, #LO_inv_code_start]
623 ldr lr, [fp, #LO_inv_code_end]
627 ldmia fp, {r0, r1, r2, r3, r12, pc}
628 .size invalidate_addr_call, .-invalidate_addr_call
631 FUNCTION(new_dyna_start):
632 /* ip is stored to conform EABI alignment */
633 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
634 load_varadr fp, dynarec_local
635 ldr r0, [fp, #LO_pcaddr]
637 ldr r1, [fp, #LO_next_interupt]
638 ldr r10, [fp, #LO_cycle]
639 str r1, [fp, #LO_last_count]
642 .size new_dyna_start, .-new_dyna_start
644 /* --------------------------------------- */
648 .macro pcsx_read_mem readop tab_shift
649 /* r0 = address, r1 = handler_tab, r2 = cycles */
651 lsr r3, #(20+\tab_shift)
652 ldr r12, [fp, #LO_last_count]
653 ldr r1, [r1, r3, lsl #2]
660 \readop r0, [r1, r3, lsl #\tab_shift]
663 str r2, [fp, #LO_cycle]
667 FUNCTION(jump_handler_read8):
668 add r1, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
669 pcsx_read_mem ldrccb, 0
671 FUNCTION(jump_handler_read16):
672 add r1, #0x1000/4*4 @ shift to r16 part
673 pcsx_read_mem ldrcch, 1
675 FUNCTION(jump_handler_read32):
676 pcsx_read_mem ldrcc, 2
679 .macro pcsx_write_mem wrtop tab_shift
680 /* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
682 lsr r12, #(20+\tab_shift)
683 ldr r3, [r3, r12, lsl #2]
684 str r0, [fp, #LO_address] @ some handlers still need it..
686 mov r0, r2 @ cycle return in case of direct store
691 \wrtop r1, [r3, r12, lsl #\tab_shift]
694 ldr r12, [fp, #LO_last_count]
698 str r2, [fp, #LO_cycle]
701 ldr r0, [fp, #LO_next_interupt]
703 str r0, [fp, #LO_last_count]
708 FUNCTION(jump_handler_write8):
709 add r3, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
710 pcsx_write_mem strccb, 0
712 FUNCTION(jump_handler_write16):
713 add r3, #0x1000/4*4 @ shift to r16 part
714 pcsx_write_mem strcch, 1
716 FUNCTION(jump_handler_write32):
717 pcsx_write_mem strcc, 2
719 FUNCTION(jump_handler_write_h):
720 /* r0 = address, r1 = data, r2 = cycles, r3 = handler */
721 ldr r12, [fp, #LO_last_count]
722 str r0, [fp, #LO_address] @ some handlers still need it..
726 str r2, [fp, #LO_cycle]
729 ldr r0, [fp, #LO_next_interupt]
731 str r0, [fp, #LO_last_count]
735 FUNCTION(jump_handle_swl):
736 /* r0 = address, r1 = data, r2 = cycles */
737 ldr r3, [fp, #LO_mem_wtab]
739 ldr r3, [r3, r12, lsl #2]
760 lsreq r12, r1, #24 @ 0
770 FUNCTION(jump_handle_swr):
771 /* r0 = address, r1 = data, r2 = cycles */
772 ldr r3, [fp, #LO_mem_wtab]
774 ldr r3, [r3, r12, lsl #2]
796 .macro rcntx_read_mode0 num
797 /* r0 = address, r2 = cycles */
798 ldr r3, [fp, #LO_rcnts+6*4+7*4*\num] @ cycleStart
805 FUNCTION(rcnt0_read_count_m0):
808 FUNCTION(rcnt1_read_count_m0):
811 FUNCTION(rcnt2_read_count_m0):
814 FUNCTION(rcnt0_read_count_m1):
815 /* r0 = address, r2 = cycles */
816 ldr r3, [fp, #LO_rcnts+6*4+7*4*0] @ cycleStart
819 mul r0, r1, r2 @ /= 5
823 FUNCTION(rcnt1_read_count_m1):
824 /* r0 = address, r2 = cycles */
825 ldr r3, [fp, #LO_rcnts+6*4+7*4*1]
828 umull r3, r0, r1, r2 @ ~ /= hsync_cycles, max ~0x1e6cdd
831 FUNCTION(rcnt2_read_count_m1):
832 /* r0 = address, r2 = cycles */
833 ldr r3, [fp, #LO_rcnts+6*4+7*4*2]
834 mov r0, r2, lsl #16-3
835 sub r0, r3, lsl #16-3
839 @ vim:filetype=armasm