1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
30 .type dynarec_local, %object
31 .size dynarec_local, LO_dynarec_local_size
33 .space LO_dynarec_local_size
35 #define DRC_VAR_(name, vname, size_) \
36 vname = dynarec_local + LO_##name; \
38 .type vname, %object; \
41 #define DRC_VAR(name, size_) \
42 DRC_VAR_(name, ESYM(name), size_)
44 DRC_VAR(next_interupt, 4)
45 DRC_VAR(cycle_count, 4)
46 DRC_VAR(last_count, 4)
47 DRC_VAR(pending_exception, 4)
49 DRC_VAR(branch_target, 4)
51 #DRC_VAR(align0, 16) /* unused/alignment */
52 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
58 DRC_VAR(reg_cop0, 128)
59 DRC_VAR(reg_cop2d, 128)
60 DRC_VAR(reg_cop2c, 128)
64 #DRC_VAR(interrupt, 4)
65 #DRC_VAR(intCycle, 256)
68 DRC_VAR(inv_code_start, 4)
69 DRC_VAR(inv_code_end, 4)
74 DRC_VAR(zeromem_ptr, 8)
75 DRC_VAR(scratch_buf_ptr, 8)
76 #DRC_VAR(align1, 16) /* unused/alignment */
78 DRC_VAR(restore_candidate, 512)
84 /* r0 = virtual target address */
85 /* r1 = instruction to patch */
86 .macro dyna_linker_main
87 /* XXX TODO: should be able to do better than this... */
93 FUNCTION(dyna_linker):
94 /* r0 = virtual target address */
95 /* r1 = instruction to patch */
97 .size dyna_linker, .-dyna_linker
99 FUNCTION(exec_pagefault):
100 /* r0 = instruction pointer */
101 /* r1 = fault address */
104 .size exec_pagefault, .-exec_pagefault
106 /* Special dynamic linker for the case where a page fault
107 may occur in a branch delay slot */
108 FUNCTION(dyna_linker_ds):
109 /* r0 = virtual target address */
110 /* r1 = instruction to patch */
112 .size dyna_linker_ds, .-dyna_linker_ds
116 FUNCTION(verify_code_ds):
118 FUNCTION(verify_code):
123 .size verify_code, .-verify_code
124 .size verify_code_ds, .-verify_code_ds
127 FUNCTION(cc_interrupt):
128 ldr w0, [rFP, #LO_last_count]
131 str wzr, [rFP, #LO_pending_exception]
132 and w2, w2, rCC, lsr #17
133 add x3, rFP, #LO_restore_candidate
134 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
135 # str rCC, [rFP, #LO_reg_cop0+36] /* Count */
136 ldr w19, [x3, w2, uxtw]
142 ldr rCC, [rFP, #LO_cycle]
143 ldr w0, [rFP, #LO_next_interupt]
144 ldr w1, [rFP, #LO_pending_exception]
145 ldr w2, [rFP, #LO_stop]
146 str w0, [rFP, #LO_last_count]
148 cbnz w2, new_dyna_leave
152 ldr w0, [rFP, #LO_pcaddr]
156 /* Move 'dirty' blocks to the 'clean' list */
158 str wzr, [x3, w2, uxtw]
169 .size cc_interrupt, .-cc_interrupt
172 FUNCTION(fp_exception):
175 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
177 str w0, [fp, #LO_reg_cop0+56] /* EPC */
180 str w1, [fp, #LO_reg_cop0+48] /* Status */
181 str w2, [fp, #LO_reg_cop0+52] /* Cause */
185 .size fp_exception, .-fp_exception
187 FUNCTION(fp_exception_ds):
188 mov w2, #0x90000000 /* Set high bit if delay slot */
190 .size fp_exception_ds, .-fp_exception_ds
193 FUNCTION(jump_syscall):
194 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
196 str w0, [fp, #LO_reg_cop0+56] /* EPC */
199 str w1, [fp, #LO_reg_cop0+48] /* Status */
200 str w2, [fp, #LO_reg_cop0+52] /* Cause */
204 .size jump_syscall, .-jump_syscall
208 FUNCTION(jump_syscall_hle):
211 /* note: psxException might do recursive recompiler call from it's HLE code,
212 * so be ready for this */
215 ldr w1, [fp, #LO_next_interupt]
216 ldr w10, [fp, #LO_cycle]
217 ldr w0, [fp, #LO_pcaddr]
219 str w1, [fp, #LO_last_count]
222 .size jump_syscall_hle, .-jump_syscall_hle
225 FUNCTION(jump_hlecall):
227 .size jump_hlecall, .-jump_hlecall
230 FUNCTION(jump_intcall):
232 .size jump_intcall, .-jump_intcall
234 /* stack must be aligned by 16, and include space for save_regs() use */
236 FUNCTION(new_dyna_start):
237 stp x29, x30, [sp, #-SSP_ALL]!
238 ldr w1, [x0, #LO_next_interupt]
239 ldr w2, [x0, #LO_cycle]
240 stp x19, x20, [sp, #16*1]
241 stp x21, x22, [sp, #16*2]
242 stp x23, x24, [sp, #16*3]
243 stp x25, x26, [sp, #16*4]
244 stp x27, x28, [sp, #16*5]
246 ldr w0, [rFP, #LO_pcaddr]
247 str w1, [rFP, #LO_last_count]
251 .size new_dyna_start, .-new_dyna_start
254 FUNCTION(new_dyna_leave):
255 ldr w0, [rFP, #LO_last_count]
257 str rCC, [rFP, #LO_cycle]
258 ldp x19, x20, [sp, #16*1]
259 ldp x21, x22, [sp, #16*2]
260 ldp x23, x24, [sp, #16*3]
261 ldp x25, x26, [sp, #16*4]
262 ldp x27, x28, [sp, #16*5]
263 ldp x29, x30, [sp], #SSP_ALL
265 .size new_dyna_leave, .-new_dyna_leave
267 /* --------------------------------------- */
271 .macro memhandler_pre
272 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
273 ldr w4, [rFP, #LO_last_count]
275 str w4, [rFP, #LO_cycle]
278 .macro memhandler_post
279 ldr w2, [rFP, #LO_next_interupt]
280 ldr w1, [rFP, #LO_cycle]
282 str w2, [rFP, #LO_last_count]
285 FUNCTION(do_memhandler_pre):
289 FUNCTION(do_memhandler_post):
293 .macro pcsx_read_mem readop tab_shift
294 /* w0 = address, x1 = handler_tab, w2 = cycles */
295 stp xzr, x30, [sp, #-16]!
296 ubfm w4, w0, #\tab_shift, #11
297 ldr x3, [x1, w4, uxtw #3]
300 \readop w0, [x3, w4, uxtw #\tab_shift]
307 FUNCTION(jump_handler_read8):
308 add x1, x1, #0x1000/4*4 + 0x1000/2*4 /* shift to r8 part */
309 pcsx_read_mem ldrb, 0
312 FUNCTION(jump_handler_read16):
313 add x1, x1, #0x1000/4*4 /* shift to r16 part */
314 pcsx_read_mem ldrh, 1
317 FUNCTION(jump_handler_read32):
321 ldp xzr, x30, [sp], #16
324 .macro pcsx_write_mem wrtop movop tab_shift
325 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
326 stp xzr, x30, [sp, #-16]!
327 ubfm w4, w0, #\tab_shift, #11
328 ldr x3, [x3, w4, uxtw #3]
329 str w0, [rFP, #LO_address] /* some handlers still need it... */
333 mov w0, w2 /* cycle return */
334 \wrtop w1, [x3, w4, uxtw #\tab_shift]
342 FUNCTION(jump_handler_write8):
343 add x3, x3, #0x1000/4*4 + 0x1000/2*4 /* shift to r8 part */
344 pcsx_write_mem strb uxtb 0
347 FUNCTION(jump_handler_write16):
348 add x3, x3, #0x1000/4*4 /* shift to r16 part */
349 pcsx_write_mem strh uxth 1
352 FUNCTION(jump_handler_write32):
353 pcsx_write_mem str mov 2
357 ldp xzr, x30, [sp], #16
360 FUNCTION(jump_handle_swl):
363 FUNCTION(jump_handle_swr):