1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
30 .type dynarec_local, %object
31 .size dynarec_local, LO_dynarec_local_size
33 .space LO_dynarec_local_size
35 #define DRC_VAR_(name, vname, size_) \
36 vname = dynarec_local + LO_##name; \
38 .type vname, %object; \
41 #define DRC_VAR(name, size_) \
42 DRC_VAR_(name, ESYM(name), size_)
44 DRC_VAR(next_interupt, 4)
45 DRC_VAR(cycle_count, 4)
46 DRC_VAR(last_count, 4)
47 DRC_VAR(pending_exception, 4)
49 DRC_VAR(branch_target, 4)
51 #DRC_VAR(align0, 16) /* unused/alignment */
52 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
58 DRC_VAR(reg_cop0, 128)
59 DRC_VAR(reg_cop2d, 128)
60 DRC_VAR(reg_cop2c, 128)
64 #DRC_VAR(interrupt, 4)
65 #DRC_VAR(intCycle, 256)
68 DRC_VAR(inv_code_start, 4)
69 DRC_VAR(inv_code_end, 4)
74 DRC_VAR(zeromem_ptr, 8)
75 DRC_VAR(scratch_buf_ptr, 8)
76 #DRC_VAR(align1, 16) /* unused/alignment */
78 DRC_VAR(restore_candidate, 512)
84 /* r0 = virtual target address */
85 /* r1 = instruction to patch */
86 .macro dyna_linker_main
87 /* XXX TODO: should be able to do better than this... */
93 FUNCTION(dyna_linker):
94 /* r0 = virtual target address */
95 /* r1 = instruction to patch */
97 .size dyna_linker, .-dyna_linker
99 FUNCTION(exec_pagefault):
100 /* r0 = instruction pointer */
101 /* r1 = fault address */
104 .size exec_pagefault, .-exec_pagefault
106 /* Special dynamic linker for the case where a page fault
107 may occur in a branch delay slot */
108 FUNCTION(dyna_linker_ds):
109 /* r0 = virtual target address */
110 /* r1 = instruction to patch */
112 .size dyna_linker_ds, .-dyna_linker_ds
115 FUNCTION(cc_interrupt):
116 ldr w0, [rFP, #LO_last_count]
119 str wzr, [rFP, #LO_pending_exception]
120 and w2, w2, rCC, lsr #17
121 add x3, rFP, #LO_restore_candidate
122 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
123 # str rCC, [rFP, #LO_reg_cop0+36] /* Count */
124 ldr w19, [x3, w2, uxtw]
130 ldr rCC, [rFP, #LO_cycle]
131 ldr w0, [rFP, #LO_next_interupt]
132 ldr w1, [rFP, #LO_pending_exception]
133 ldr w2, [rFP, #LO_stop]
134 str w0, [rFP, #LO_last_count]
136 cbnz w2, new_dyna_leave
140 ldr w0, [rFP, #LO_pcaddr]
144 /* Move 'dirty' blocks to the 'clean' list */
146 str wzr, [x3, w2, uxtw]
157 .size cc_interrupt, .-cc_interrupt
160 FUNCTION(fp_exception):
163 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
165 str w0, [fp, #LO_reg_cop0+56] /* EPC */
168 str w1, [fp, #LO_reg_cop0+48] /* Status */
169 str w2, [fp, #LO_reg_cop0+52] /* Cause */
173 .size fp_exception, .-fp_exception
175 FUNCTION(fp_exception_ds):
176 mov w2, #0x90000000 /* Set high bit if delay slot */
178 .size fp_exception_ds, .-fp_exception_ds
181 FUNCTION(jump_syscall):
182 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
184 str w0, [fp, #LO_reg_cop0+56] /* EPC */
187 str w1, [fp, #LO_reg_cop0+48] /* Status */
188 str w2, [fp, #LO_reg_cop0+52] /* Cause */
192 .size jump_syscall, .-jump_syscall
195 /* note: psxException might do recursive recompiler call from it's HLE code,
196 * so be ready for this */
197 FUNCTION(jump_to_new_pc):
198 ldr w1, [fp, #LO_next_interupt]
199 ldr rCC, [fp, #LO_cycle]
200 ldr w0, [fp, #LO_pcaddr]
202 str w1, [fp, #LO_last_count]
205 .size jump_to_new_pc, .-jump_to_new_pc
207 /* stack must be aligned by 16, and include space for save_regs() use */
209 FUNCTION(new_dyna_start):
210 stp x29, x30, [sp, #-SSP_ALL]!
211 ldr w1, [x0, #LO_next_interupt]
212 ldr w2, [x0, #LO_cycle]
213 stp x19, x20, [sp, #16*1]
214 stp x21, x22, [sp, #16*2]
215 stp x23, x24, [sp, #16*3]
216 stp x25, x26, [sp, #16*4]
217 stp x27, x28, [sp, #16*5]
219 ldr w0, [rFP, #LO_pcaddr]
220 str w1, [rFP, #LO_last_count]
224 .size new_dyna_start, .-new_dyna_start
227 FUNCTION(new_dyna_leave):
228 ldr w0, [rFP, #LO_last_count]
230 str rCC, [rFP, #LO_cycle]
231 ldp x19, x20, [sp, #16*1]
232 ldp x21, x22, [sp, #16*2]
233 ldp x23, x24, [sp, #16*3]
234 ldp x25, x26, [sp, #16*4]
235 ldp x27, x28, [sp, #16*5]
236 ldp x29, x30, [sp], #SSP_ALL
238 .size new_dyna_leave, .-new_dyna_leave
240 /* --------------------------------------- */
244 .macro memhandler_pre
245 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
246 ldr w4, [rFP, #LO_last_count]
248 str w4, [rFP, #LO_cycle]
251 .macro memhandler_post
252 ldr w2, [rFP, #LO_next_interupt]
253 ldr w1, [rFP, #LO_cycle]
255 str w2, [rFP, #LO_last_count]
258 FUNCTION(do_memhandler_pre):
262 FUNCTION(do_memhandler_post):
266 .macro pcsx_read_mem readop tab_shift
267 /* w0 = address, x1 = handler_tab, w2 = cycles */
268 ubfm w4, w0, #\tab_shift, #11
269 ldr x3, [x1, w4, uxtw #3]
272 \readop w0, [x3, w4, uxtw #\tab_shift]
275 stp xzr, x30, [sp, #-16]!
280 FUNCTION(jump_handler_read8):
281 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
282 pcsx_read_mem ldrb, 0
285 FUNCTION(jump_handler_read16):
286 add x1, x1, #0x1000/4*8 /* shift to r16 part */
287 pcsx_read_mem ldrh, 1
290 FUNCTION(jump_handler_read32):
294 ldp xzr, x30, [sp], #16
297 .macro pcsx_write_mem wrtop movop tab_shift
298 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
299 ubfm w4, w0, #\tab_shift, #11
300 ldr x3, [x3, w4, uxtw #3]
303 mov w0, w2 /* cycle return */
304 \wrtop w1, [x3, w4, uxtw #\tab_shift]
307 stp xzr, x30, [sp, #-16]!
308 str w0, [rFP, #LO_address] /* some handlers still need it... */
314 FUNCTION(jump_handler_write8):
315 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
316 pcsx_write_mem strb uxtb 0
319 FUNCTION(jump_handler_write16):
320 add x3, x3, #0x1000/4*8 /* shift to r16 part */
321 pcsx_write_mem strh uxth 1
324 FUNCTION(jump_handler_write32):
325 pcsx_write_mem str mov 2
329 ldp xzr, x30, [sp], #16
332 FUNCTION(jump_handle_swl):
333 /* w0 = address, w1 = data, w2 = cycles */
334 ldr x3, [fp, #LO_mem_wtab]
336 ldr x3, [x3, w4, uxtw #3]
341 tbz x3, #1, 10f // & 2
342 tbz x3, #0, 2f // & 1
353 tbz x3, #0, 0f // & 1
367 FUNCTION(jump_handle_swr):
368 /* w0 = address, w1 = data, w2 = cycles */
369 ldr x3, [fp, #LO_mem_wtab]
371 ldr x3, [x3, w4, uxtw #3]
376 tbz x3, #1, 10f // & 2
377 tbz x3, #0, 2f // & 1
385 tbz x3, #0, 0f // & 1