1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
28 #error misligned pointers
34 .type dynarec_local, %object
35 .size dynarec_local, LO_dynarec_local_size
37 .space LO_dynarec_local_size
39 #define DRC_VAR_(name, vname, size_) \
40 vname = dynarec_local + LO_##name; \
42 .type vname, %object; \
45 #define DRC_VAR(name, size_) \
46 DRC_VAR_(name, ESYM(name), size_)
48 DRC_VAR(next_interupt, 4)
49 DRC_VAR(cycle_count, 4)
50 DRC_VAR(last_count, 4)
51 DRC_VAR(pending_exception, 4)
53 DRC_VAR(branch_target, 4)
56 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
62 DRC_VAR(reg_cop0, 128)
63 DRC_VAR(reg_cop2d, 128)
64 DRC_VAR(reg_cop2c, 128)
68 #DRC_VAR(interrupt, 4)
69 #DRC_VAR(intCycle, 256)
72 DRC_VAR(inv_code_start, 4)
73 DRC_VAR(inv_code_end, 4)
78 DRC_VAR(zeromem_ptr, 8)
79 DRC_VAR(scratch_buf_ptr, 8)
80 DRC_VAR(ram_offset, 8)
82 DRC_VAR(restore_candidate, 512)
88 /* r0 = virtual target address */
89 /* r1 = instruction to patch */
90 .macro dyna_linker_main
91 /* XXX TODO: should be able to do better than this... */
97 FUNCTION(dyna_linker):
98 /* r0 = virtual target address */
99 /* r1 = instruction to patch */
101 .size dyna_linker, .-dyna_linker
103 FUNCTION(exec_pagefault):
104 /* r0 = instruction pointer */
105 /* r1 = fault address */
108 .size exec_pagefault, .-exec_pagefault
110 /* Special dynamic linker for the case where a page fault
111 may occur in a branch delay slot */
112 FUNCTION(dyna_linker_ds):
113 /* r0 = virtual target address */
114 /* r1 = instruction to patch */
116 .size dyna_linker_ds, .-dyna_linker_ds
119 FUNCTION(cc_interrupt):
120 ldr w0, [rFP, #LO_last_count]
123 str wzr, [rFP, #LO_pending_exception]
124 and w2, w2, rCC, lsr #17
125 add x3, rFP, #LO_restore_candidate
126 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
127 # str rCC, [rFP, #LO_reg_cop0+36] /* Count */
128 ldr w19, [x3, w2, uxtw]
134 ldr rCC, [rFP, #LO_cycle]
135 ldr w0, [rFP, #LO_next_interupt]
136 ldr w1, [rFP, #LO_pending_exception]
137 ldr w2, [rFP, #LO_stop]
138 str w0, [rFP, #LO_last_count]
140 cbnz w2, new_dyna_leave
144 ldr w0, [rFP, #LO_pcaddr]
148 /* Move 'dirty' blocks to the 'clean' list */
150 str wzr, [x3, w2, uxtw]
161 .size cc_interrupt, .-cc_interrupt
164 FUNCTION(fp_exception):
167 ldr w1, [rFP, #LO_reg_cop0+48] /* Status */
169 str w0, [rFP, #LO_reg_cop0+56] /* EPC */
172 str w1, [rFP, #LO_reg_cop0+48] /* Status */
173 str w2, [rFP, #LO_reg_cop0+52] /* Cause */
177 .size fp_exception, .-fp_exception
179 FUNCTION(fp_exception_ds):
180 mov w2, #0x90000000 /* Set high bit if delay slot */
182 .size fp_exception_ds, .-fp_exception_ds
185 FUNCTION(jump_break_ds):
189 FUNCTION(jump_break):
193 FUNCTION(jump_syscall_ds):
197 FUNCTION(jump_syscall):
202 ldr w3, [rFP, #LO_last_count]
203 str w2, [rFP, #LO_pcaddr]
205 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
208 /* note: psxException might do recursive recompiler call from it's HLE code,
209 * so be ready for this */
210 FUNCTION(jump_to_new_pc):
211 ldr w1, [rFP, #LO_next_interupt]
212 ldr rCC, [rFP, #LO_cycle]
213 ldr w0, [rFP, #LO_pcaddr]
215 str w1, [rFP, #LO_last_count]
218 .size jump_to_new_pc, .-jump_to_new_pc
220 /* stack must be aligned by 16, and include space for save_regs() use */
222 FUNCTION(new_dyna_start):
223 stp x29, x30, [sp, #-SSP_ALL]!
224 ldr w1, [x0, #LO_next_interupt]
225 ldr w2, [x0, #LO_cycle]
226 stp x19, x20, [sp, #16*1]
227 stp x21, x22, [sp, #16*2]
228 stp x23, x24, [sp, #16*3]
229 stp x25, x26, [sp, #16*4]
230 stp x27, x28, [sp, #16*5]
232 ldr w0, [rFP, #LO_pcaddr]
233 str w1, [rFP, #LO_last_count]
237 .size new_dyna_start, .-new_dyna_start
240 FUNCTION(new_dyna_leave):
241 ldr w0, [rFP, #LO_last_count]
243 str rCC, [rFP, #LO_cycle]
244 ldp x19, x20, [sp, #16*1]
245 ldp x21, x22, [sp, #16*2]
246 ldp x23, x24, [sp, #16*3]
247 ldp x25, x26, [sp, #16*4]
248 ldp x27, x28, [sp, #16*5]
249 ldp x29, x30, [sp], #SSP_ALL
251 .size new_dyna_leave, .-new_dyna_leave
253 /* --------------------------------------- */
257 .macro memhandler_pre
258 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
259 ldr w4, [rFP, #LO_last_count]
261 str w4, [rFP, #LO_cycle]
264 .macro memhandler_post
265 ldr w0, [rFP, #LO_next_interupt]
266 ldr w2, [rFP, #LO_cycle] // memhandlers can modify cc, like dma
267 str w0, [rFP, #LO_last_count]
271 FUNCTION(do_memhandler_pre):
275 FUNCTION(do_memhandler_post):
279 .macro pcsx_read_mem readop tab_shift
280 /* w0 = address, x1 = handler_tab, w2 = cycles */
281 ubfm w4, w0, #\tab_shift, #11
282 ldr x3, [x1, w4, uxtw #3]
285 \readop w0, [x3, w4, uxtw #\tab_shift]
288 stp xzr, x30, [sp, #-16]!
293 FUNCTION(jump_handler_read8):
294 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
295 pcsx_read_mem ldrb, 0
298 FUNCTION(jump_handler_read16):
299 add x1, x1, #0x1000/4*8 /* shift to r16 part */
300 pcsx_read_mem ldrh, 1
303 FUNCTION(jump_handler_read32):
307 ldp xzr, x30, [sp], #16
310 .macro pcsx_write_mem wrtop movop tab_shift
311 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
312 ubfm w4, w0, #\tab_shift, #11
313 ldr x3, [x3, w4, uxtw #3]
316 mov w0, w2 /* cycle return */
317 \wrtop w1, [x3, w4, uxtw #\tab_shift]
320 stp xzr, x30, [sp, #-16]!
321 str w0, [rFP, #LO_address] /* some handlers still need it... */
327 FUNCTION(jump_handler_write8):
328 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
329 pcsx_write_mem strb uxtb 0
332 FUNCTION(jump_handler_write16):
333 add x3, x3, #0x1000/4*8 /* shift to r16 part */
334 pcsx_write_mem strh uxth 1
337 FUNCTION(jump_handler_write32):
338 pcsx_write_mem str mov 2
342 ldp xzr, x30, [sp], #16
345 FUNCTION(jump_handle_swl):
346 /* w0 = address, w1 = data, w2 = cycles */
347 ldr x3, [rFP, #LO_mem_wtab]
348 orr w4, wzr, w0, lsr #12
349 ldr x3, [x3, w4, uxtw #3]
354 tbz x3, #1, 10f // & 2
355 tbz x3, #0, 2f // & 1
366 tbz x3, #0, 0f // & 1
380 FUNCTION(jump_handle_swr):
381 /* w0 = address, w1 = data, w2 = cycles */
382 ldr x3, [rFP, #LO_mem_wtab]
383 orr w4, wzr, w0, lsr #12
384 ldr x3, [x3, w4, uxtw #3]
389 tbz x3, #1, 10f // & 2
390 tbz x3, #0, 2f // & 1
398 tbz x3, #0, 0f // & 1
412 FUNCTION(call_gteStall):
413 /* w0 = op_cycles, w1 = cycles */
414 ldr w2, [rFP, #LO_last_count]
415 str lr, [rFP, #LO_saved_lr]
417 str w1, [rFP, #LO_cycle]
418 add x1, rFP, #LO_psxRegs
420 ldr lr, [rFP, #LO_saved_lr]