1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
30 .type dynarec_local, %object
31 .size dynarec_local, LO_dynarec_local_size
33 .space LO_dynarec_local_size
35 #define DRC_VAR_(name, vname, size_) \
36 vname = dynarec_local + LO_##name; \
38 .type vname, %object; \
41 #define DRC_VAR(name, size_) \
42 DRC_VAR_(name, ESYM(name), size_)
44 DRC_VAR(next_interupt, 4)
45 DRC_VAR(cycle_count, 4)
46 DRC_VAR(last_count, 4)
47 DRC_VAR(pending_exception, 4)
49 DRC_VAR(branch_target, 4)
51 #DRC_VAR(align0, 16) /* unused/alignment */
52 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
58 DRC_VAR(reg_cop0, 128)
59 DRC_VAR(reg_cop2d, 128)
60 DRC_VAR(reg_cop2c, 128)
64 #DRC_VAR(interrupt, 4)
65 #DRC_VAR(intCycle, 256)
68 DRC_VAR(inv_code_start, 4)
69 DRC_VAR(inv_code_end, 4)
74 DRC_VAR(zeromem_ptr, 8)
75 DRC_VAR(scratch_buf_ptr, 8)
76 #DRC_VAR(align1, 16) /* unused/alignment */
78 DRC_VAR(restore_candidate, 512)
84 /* r0 = virtual target address */
85 /* r1 = instruction to patch */
86 .macro dyna_linker_main
87 /* XXX: should be able to do better than this... */
93 FUNCTION(dyna_linker):
94 /* r0 = virtual target address */
95 /* r1 = instruction to patch */
97 .size dyna_linker, .-dyna_linker
99 FUNCTION(exec_pagefault):
100 /* r0 = instruction pointer */
101 /* r1 = fault address */
104 .size exec_pagefault, .-exec_pagefault
106 /* Special dynamic linker for the case where a page fault
107 may occur in a branch delay slot */
108 FUNCTION(dyna_linker_ds):
109 /* r0 = virtual target address */
110 /* r1 = instruction to patch */
112 .size dyna_linker_ds, .-dyna_linker_ds
116 FUNCTION(jump_vaddr):
118 .size jump_vaddr, .-jump_vaddr
122 FUNCTION(verify_code_ds):
124 FUNCTION(verify_code_vm):
125 FUNCTION(verify_code):
130 .size verify_code, .-verify_code
131 .size verify_code_vm, .-verify_code_vm
134 FUNCTION(cc_interrupt):
136 .size cc_interrupt, .-cc_interrupt
139 FUNCTION(do_interrupt):
141 .size do_interrupt, .-do_interrupt
144 FUNCTION(fp_exception):
147 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
149 str w0, [fp, #LO_reg_cop0+56] /* EPC */
152 str w1, [fp, #LO_reg_cop0+48] /* Status */
153 str w2, [fp, #LO_reg_cop0+52] /* Cause */
157 .size fp_exception, .-fp_exception
159 FUNCTION(fp_exception_ds):
160 mov w2, #0x90000000 /* Set high bit if delay slot */
162 .size fp_exception_ds, .-fp_exception_ds
165 FUNCTION(jump_syscall):
166 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
168 str w0, [fp, #LO_reg_cop0+56] /* EPC */
171 str w1, [fp, #LO_reg_cop0+48] /* Status */
172 str w2, [fp, #LO_reg_cop0+52] /* Cause */
176 .size jump_syscall, .-jump_syscall
180 FUNCTION(jump_syscall_hle):
183 /* note: psxException might do recursive recompiler call from it's HLE code,
184 * so be ready for this */
187 ldr w1, [fp, #LO_next_interupt]
188 ldr w10, [fp, #LO_cycle]
189 ldr w0, [fp, #LO_pcaddr]
191 str w1, [fp, #LO_last_count]
194 .size jump_syscall_hle, .-jump_syscall_hle
197 FUNCTION(jump_hlecall):
199 .size jump_hlecall, .-jump_hlecall
202 FUNCTION(jump_intcall):
204 .size jump_intcall, .-jump_intcall
206 /* stack must be aligned by 16, and include space for save_regs() use */
208 FUNCTION(new_dyna_start):
209 stp x29, x30, [sp, #-SSP_ALL]!
210 ldr w1, [x0, #LO_next_interupt]
211 ldr w2, [x0, #LO_cycle]
212 stp x19, x20, [sp, #16*1]
213 stp x21, x22, [sp, #16*2]
214 stp x23, x24, [sp, #16*3]
215 stp x25, x26, [sp, #16*4]
216 stp x27, x28, [sp, #16*5]
218 ldr w0, [rFP, #LO_pcaddr]
219 str w1, [rFP, #LO_last_count]
223 .size new_dyna_start, .-new_dyna_start
226 FUNCTION(new_dyna_leave):
227 ldr w0, [rFP, #LO_last_count]
229 str rCC, [rFP, #LO_cycle]
230 ldp x19, x20, [sp, #16*1]
231 ldp x21, x22, [sp, #16*2]
232 ldp x23, x24, [sp, #16*3]
233 ldp x25, x26, [sp, #16*4]
234 ldp x27, x28, [sp, #16*5]
235 ldp x29, x30, [sp], #SSP_ALL
237 .size new_dyna_leave, .-new_dyna_leave
239 /* --------------------------------------- */
243 FUNCTION(jump_handler_read8):
246 FUNCTION(jump_handler_read16):
249 FUNCTION(jump_handler_read32):
252 FUNCTION(jump_handler_write8):
255 FUNCTION(jump_handler_write16):
258 FUNCTION(jump_handler_write32):
261 FUNCTION(jump_handler_write_h):
264 FUNCTION(jump_handle_swl):
267 FUNCTION(jump_handle_swr):