1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
28 #error misligned pointers
34 .type dynarec_local, %object
35 .size dynarec_local, LO_dynarec_local_size
37 .space LO_dynarec_local_size
39 #define DRC_VAR_(name, vname, size_) \
40 vname = dynarec_local + LO_##name; \
42 .type vname, %object; \
45 #define DRC_VAR(name, size_) \
46 DRC_VAR_(name, ESYM(name), size_)
48 DRC_VAR(next_interupt, 4)
49 DRC_VAR(cycle_count, 4)
50 DRC_VAR(last_count, 4)
51 DRC_VAR(pending_exception, 4)
53 DRC_VAR(branch_target, 4)
55 #DRC_VAR(align0, 16) /* unused/alignment */
56 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
62 DRC_VAR(reg_cop0, 128)
63 DRC_VAR(reg_cop2d, 128)
64 DRC_VAR(reg_cop2c, 128)
68 #DRC_VAR(interrupt, 4)
69 #DRC_VAR(intCycle, 256)
72 DRC_VAR(inv_code_start, 4)
73 DRC_VAR(inv_code_end, 4)
78 DRC_VAR(zeromem_ptr, 8)
79 DRC_VAR(scratch_buf_ptr, 8)
80 DRC_VAR(ram_offset, 8)
82 DRC_VAR(restore_candidate, 512)
88 /* r0 = virtual target address */
89 /* r1 = instruction to patch */
90 .macro dyna_linker_main
91 /* XXX TODO: should be able to do better than this... */
97 FUNCTION(dyna_linker):
98 /* r0 = virtual target address */
99 /* r1 = instruction to patch */
101 .size dyna_linker, .-dyna_linker
103 FUNCTION(exec_pagefault):
104 /* r0 = instruction pointer */
105 /* r1 = fault address */
108 .size exec_pagefault, .-exec_pagefault
110 /* Special dynamic linker for the case where a page fault
111 may occur in a branch delay slot */
112 FUNCTION(dyna_linker_ds):
113 /* r0 = virtual target address */
114 /* r1 = instruction to patch */
116 .size dyna_linker_ds, .-dyna_linker_ds
119 FUNCTION(cc_interrupt):
120 ldr w0, [rFP, #LO_last_count]
123 str wzr, [rFP, #LO_pending_exception]
124 and w2, w2, rCC, lsr #17
125 add x3, rFP, #LO_restore_candidate
126 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
127 # str rCC, [rFP, #LO_reg_cop0+36] /* Count */
128 ldr w19, [x3, w2, uxtw]
134 ldr rCC, [rFP, #LO_cycle]
135 ldr w0, [rFP, #LO_next_interupt]
136 ldr w1, [rFP, #LO_pending_exception]
137 ldr w2, [rFP, #LO_stop]
138 str w0, [rFP, #LO_last_count]
140 cbnz w2, new_dyna_leave
144 ldr w0, [rFP, #LO_pcaddr]
148 /* Move 'dirty' blocks to the 'clean' list */
150 str wzr, [x3, w2, uxtw]
161 .size cc_interrupt, .-cc_interrupt
164 FUNCTION(fp_exception):
167 ldr w1, [rFP, #LO_reg_cop0+48] /* Status */
169 str w0, [rFP, #LO_reg_cop0+56] /* EPC */
172 str w1, [rFP, #LO_reg_cop0+48] /* Status */
173 str w2, [rFP, #LO_reg_cop0+52] /* Cause */
177 .size fp_exception, .-fp_exception
179 FUNCTION(fp_exception_ds):
180 mov w2, #0x90000000 /* Set high bit if delay slot */
182 .size fp_exception_ds, .-fp_exception_ds
185 FUNCTION(jump_syscall):
186 ldr w1, [rFP, #LO_reg_cop0+48] /* Status */
188 str w0, [rFP, #LO_reg_cop0+56] /* EPC */
191 str w1, [rFP, #LO_reg_cop0+48] /* Status */
192 str w2, [rFP, #LO_reg_cop0+52] /* Cause */
196 .size jump_syscall, .-jump_syscall
199 /* note: psxException might do recursive recompiler call from it's HLE code,
200 * so be ready for this */
201 FUNCTION(jump_to_new_pc):
202 ldr w1, [rFP, #LO_next_interupt]
203 ldr rCC, [rFP, #LO_cycle]
204 ldr w0, [rFP, #LO_pcaddr]
206 str w1, [rFP, #LO_last_count]
209 .size jump_to_new_pc, .-jump_to_new_pc
211 /* stack must be aligned by 16, and include space for save_regs() use */
213 FUNCTION(new_dyna_start):
214 stp x29, x30, [sp, #-SSP_ALL]!
215 ldr w1, [x0, #LO_next_interupt]
216 ldr w2, [x0, #LO_cycle]
217 stp x19, x20, [sp, #16*1]
218 stp x21, x22, [sp, #16*2]
219 stp x23, x24, [sp, #16*3]
220 stp x25, x26, [sp, #16*4]
221 stp x27, x28, [sp, #16*5]
223 ldr w0, [rFP, #LO_pcaddr]
224 str w1, [rFP, #LO_last_count]
228 .size new_dyna_start, .-new_dyna_start
231 FUNCTION(new_dyna_leave):
232 ldr w0, [rFP, #LO_last_count]
234 str rCC, [rFP, #LO_cycle]
235 ldp x19, x20, [sp, #16*1]
236 ldp x21, x22, [sp, #16*2]
237 ldp x23, x24, [sp, #16*3]
238 ldp x25, x26, [sp, #16*4]
239 ldp x27, x28, [sp, #16*5]
240 ldp x29, x30, [sp], #SSP_ALL
242 .size new_dyna_leave, .-new_dyna_leave
244 /* --------------------------------------- */
248 .macro memhandler_pre
249 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
250 ldr w4, [rFP, #LO_last_count]
252 str w4, [rFP, #LO_cycle]
255 .macro memhandler_post
256 ldr w0, [rFP, #LO_next_interupt]
257 ldr w2, [rFP, #LO_cycle] // memhandlers can modify cc, like dma
258 str w0, [rFP, #LO_last_count]
262 FUNCTION(do_memhandler_pre):
266 FUNCTION(do_memhandler_post):
270 .macro pcsx_read_mem readop tab_shift
271 /* w0 = address, x1 = handler_tab, w2 = cycles */
272 ubfm w4, w0, #\tab_shift, #11
273 ldr x3, [x1, w4, uxtw #3]
276 \readop w0, [x3, w4, uxtw #\tab_shift]
279 stp xzr, x30, [sp, #-16]!
284 FUNCTION(jump_handler_read8):
285 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
286 pcsx_read_mem ldrb, 0
289 FUNCTION(jump_handler_read16):
290 add x1, x1, #0x1000/4*8 /* shift to r16 part */
291 pcsx_read_mem ldrh, 1
294 FUNCTION(jump_handler_read32):
298 ldp xzr, x30, [sp], #16
301 .macro pcsx_write_mem wrtop movop tab_shift
302 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
303 ubfm w4, w0, #\tab_shift, #11
304 ldr x3, [x3, w4, uxtw #3]
307 mov w0, w2 /* cycle return */
308 \wrtop w1, [x3, w4, uxtw #\tab_shift]
311 stp xzr, x30, [sp, #-16]!
312 str w0, [rFP, #LO_address] /* some handlers still need it... */
318 FUNCTION(jump_handler_write8):
319 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
320 pcsx_write_mem strb uxtb 0
323 FUNCTION(jump_handler_write16):
324 add x3, x3, #0x1000/4*8 /* shift to r16 part */
325 pcsx_write_mem strh uxth 1
328 FUNCTION(jump_handler_write32):
329 pcsx_write_mem str mov 2
333 ldp xzr, x30, [sp], #16
336 FUNCTION(jump_handle_swl):
337 /* w0 = address, w1 = data, w2 = cycles */
338 ldr x3, [rFP, #LO_mem_wtab]
340 ldr x3, [x3, w4, uxtw #3]
345 tbz x3, #1, 10f // & 2
346 tbz x3, #0, 2f // & 1
357 tbz x3, #0, 0f // & 1
371 FUNCTION(jump_handle_swr):
372 /* w0 = address, w1 = data, w2 = cycles */
373 ldr x3, [rFP, #LO_mem_wtab]
375 ldr x3, [x3, w4, uxtw #3]
380 tbz x3, #1, 10f // & 2
381 tbz x3, #0, 2f // & 1
389 tbz x3, #0, 0f // & 1
403 FUNCTION(call_gteStall):
404 /* w0 = op_cycles, w1 = cycles */
405 ldr w2, [rFP, #LO_last_count]
406 str lr, [rFP, #LO_saved_lr]
408 str w1, [rFP, #LO_cycle]
409 add x1, rFP, #LO_psxRegs
411 ldr lr, [rFP, #LO_saved_lr]