1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
28 #error misligned pointers
34 .type dynarec_local, %object
35 .size dynarec_local, LO_dynarec_local_size
37 .space LO_dynarec_local_size
39 #define DRC_VAR_(name, vname, size_) \
40 vname = dynarec_local + LO_##name; \
42 .type vname, %object; \
45 #define DRC_VAR(name, size_) \
46 DRC_VAR_(name, ESYM(name), size_)
48 DRC_VAR(next_interupt, 4)
49 DRC_VAR(cycle_count, 4)
50 DRC_VAR(last_count, 4)
51 DRC_VAR(pending_exception, 4)
53 DRC_VAR(branch_target, 4)
56 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
62 DRC_VAR(reg_cop0, 128)
63 DRC_VAR(reg_cop2d, 128)
64 DRC_VAR(reg_cop2c, 128)
68 #DRC_VAR(interrupt, 4)
69 #DRC_VAR(intCycle, 256)
72 DRC_VAR(inv_code_start, 4)
73 DRC_VAR(inv_code_end, 4)
78 DRC_VAR(zeromem_ptr, 8)
79 DRC_VAR(scratch_buf_ptr, 8)
80 DRC_VAR(ram_offset, 8)
87 /* r0 = virtual target address */
88 /* r1 = instruction to patch */
89 .macro dyna_linker_main
90 /* XXX TODO: should be able to do better than this... */
96 FUNCTION(dyna_linker):
97 /* r0 = virtual target address */
98 /* r1 = instruction to patch */
100 .size dyna_linker, .-dyna_linker
102 FUNCTION(exec_pagefault):
103 /* r0 = instruction pointer */
104 /* r1 = fault address */
107 .size exec_pagefault, .-exec_pagefault
109 /* Special dynamic linker for the case where a page fault
110 may occur in a branch delay slot */
111 FUNCTION(dyna_linker_ds):
112 /* r0 = virtual target address */
113 /* r1 = instruction to patch */
115 .size dyna_linker_ds, .-dyna_linker_ds
118 FUNCTION(cc_interrupt):
119 ldr w0, [rFP, #LO_last_count]
121 str wzr, [rFP, #LO_pending_exception]
122 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
123 # str rCC, [rFP, #LO_reg_cop0+36] /* Count */
128 ldr rCC, [rFP, #LO_cycle]
129 ldr w0, [rFP, #LO_next_interupt]
130 ldr w1, [rFP, #LO_pending_exception]
131 ldr w2, [rFP, #LO_stop]
132 str w0, [rFP, #LO_last_count]
134 cbnz w2, new_dyna_leave
138 ldr w0, [rFP, #LO_pcaddr]
141 .size cc_interrupt, .-cc_interrupt
144 FUNCTION(fp_exception):
147 ldr w1, [rFP, #LO_reg_cop0+48] /* Status */
149 str w0, [rFP, #LO_reg_cop0+56] /* EPC */
152 str w1, [rFP, #LO_reg_cop0+48] /* Status */
153 str w2, [rFP, #LO_reg_cop0+52] /* Cause */
157 .size fp_exception, .-fp_exception
159 FUNCTION(fp_exception_ds):
160 mov w2, #0x90000000 /* Set high bit if delay slot */
162 .size fp_exception_ds, .-fp_exception_ds
165 FUNCTION(jump_break_ds):
169 FUNCTION(jump_break):
173 FUNCTION(jump_syscall_ds):
177 FUNCTION(jump_syscall):
182 ldr w3, [rFP, #LO_last_count]
183 str w2, [rFP, #LO_pcaddr]
185 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
188 /* note: psxException might do recursive recompiler call from it's HLE code,
189 * so be ready for this */
190 FUNCTION(jump_to_new_pc):
191 ldr w1, [rFP, #LO_next_interupt]
192 ldr rCC, [rFP, #LO_cycle]
193 ldr w0, [rFP, #LO_pcaddr]
195 str w1, [rFP, #LO_last_count]
198 .size jump_to_new_pc, .-jump_to_new_pc
200 /* stack must be aligned by 16, and include space for save_regs() use */
202 FUNCTION(new_dyna_start):
203 stp x29, x30, [sp, #-SSP_ALL]!
204 ldr w1, [x0, #LO_next_interupt]
205 ldr w2, [x0, #LO_cycle]
206 stp x19, x20, [sp, #16*1]
207 stp x21, x22, [sp, #16*2]
208 stp x23, x24, [sp, #16*3]
209 stp x25, x26, [sp, #16*4]
210 stp x27, x28, [sp, #16*5]
212 ldr w0, [rFP, #LO_pcaddr]
213 str w1, [rFP, #LO_last_count]
217 .size new_dyna_start, .-new_dyna_start
220 FUNCTION(new_dyna_leave):
221 ldr w0, [rFP, #LO_last_count]
223 str rCC, [rFP, #LO_cycle]
224 ldp x19, x20, [sp, #16*1]
225 ldp x21, x22, [sp, #16*2]
226 ldp x23, x24, [sp, #16*3]
227 ldp x25, x26, [sp, #16*4]
228 ldp x27, x28, [sp, #16*5]
229 ldp x29, x30, [sp], #SSP_ALL
231 .size new_dyna_leave, .-new_dyna_leave
233 /* --------------------------------------- */
237 .macro memhandler_pre
238 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
239 ldr w4, [rFP, #LO_last_count]
241 str w4, [rFP, #LO_cycle]
244 .macro memhandler_post
245 ldr w0, [rFP, #LO_next_interupt]
246 ldr w2, [rFP, #LO_cycle] // memhandlers can modify cc, like dma
247 str w0, [rFP, #LO_last_count]
251 FUNCTION(do_memhandler_pre):
255 FUNCTION(do_memhandler_post):
259 .macro pcsx_read_mem readop tab_shift
260 /* w0 = address, x1 = handler_tab, w2 = cycles */
261 ubfm w4, w0, #\tab_shift, #11
262 ldr x3, [x1, w4, uxtw #3]
265 \readop w0, [x3, w4, uxtw #\tab_shift]
268 stp xzr, x30, [sp, #-16]!
273 FUNCTION(jump_handler_read8):
274 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
275 pcsx_read_mem ldrb, 0
278 FUNCTION(jump_handler_read16):
279 add x1, x1, #0x1000/4*8 /* shift to r16 part */
280 pcsx_read_mem ldrh, 1
283 FUNCTION(jump_handler_read32):
287 ldp xzr, x30, [sp], #16
290 .macro pcsx_write_mem wrtop movop tab_shift
291 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
292 ubfm w4, w0, #\tab_shift, #11
293 ldr x3, [x3, w4, uxtw #3]
296 mov w0, w2 /* cycle return */
297 \wrtop w1, [x3, w4, uxtw #\tab_shift]
300 stp xzr, x30, [sp, #-16]!
301 str w0, [rFP, #LO_address] /* some handlers still need it... */
307 FUNCTION(jump_handler_write8):
308 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
309 pcsx_write_mem strb uxtb 0
312 FUNCTION(jump_handler_write16):
313 add x3, x3, #0x1000/4*8 /* shift to r16 part */
314 pcsx_write_mem strh uxth 1
317 FUNCTION(jump_handler_write32):
318 pcsx_write_mem str mov 2
322 ldp xzr, x30, [sp], #16
325 FUNCTION(jump_handle_swl):
326 /* w0 = address, w1 = data, w2 = cycles */
327 ldr x3, [rFP, #LO_mem_wtab]
328 orr w4, wzr, w0, lsr #12
329 ldr x3, [x3, w4, uxtw #3]
334 tbz x3, #1, 10f // & 2
335 tbz x3, #0, 2f // & 1
346 tbz x3, #0, 0f // & 1
360 FUNCTION(jump_handle_swr):
361 /* w0 = address, w1 = data, w2 = cycles */
362 ldr x3, [rFP, #LO_mem_wtab]
363 orr w4, wzr, w0, lsr #12
364 ldr x3, [x3, w4, uxtw #3]
369 tbz x3, #1, 10f // & 2
370 tbz x3, #0, 2f // & 1
378 tbz x3, #0, 0f // & 1
392 FUNCTION(call_gteStall):
393 /* w0 = op_cycles, w1 = cycles */
394 ldr w2, [rFP, #LO_last_count]
395 str lr, [rFP, #LO_saved_lr]
397 str w1, [rFP, #LO_cycle]
398 add x1, rFP, #LO_psxRegs
400 ldr lr, [rFP, #LO_saved_lr]