1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "assem_arm64.h"
25 #include "linkage_offsets.h"
28 #error misligned pointers
34 .type dynarec_local, %object
35 .size dynarec_local, LO_dynarec_local_size
37 .space LO_dynarec_local_size
39 #define DRC_VAR_(name, vname, size_) \
40 vname = dynarec_local + LO_##name; \
42 .type vname, %object; \
45 #define DRC_VAR(name, size_) \
46 DRC_VAR_(name, ESYM(name), size_)
48 DRC_VAR(next_interupt, 4)
49 DRC_VAR(cycle_count, 4)
50 DRC_VAR(last_count, 4)
51 DRC_VAR(pending_exception, 4)
53 DRC_VAR(branch_target, 4)
56 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
62 DRC_VAR(reg_cop0, 128)
63 DRC_VAR(reg_cop2d, 128)
64 DRC_VAR(reg_cop2c, 128)
68 #DRC_VAR(interrupt, 4)
69 #DRC_VAR(intCycle, 256)
72 DRC_VAR(inv_code_start, 4)
73 DRC_VAR(inv_code_end, 4)
78 DRC_VAR(zeromem_ptr, 8)
79 DRC_VAR(scratch_buf_ptr, 8)
80 DRC_VAR(ram_offset, 8)
87 FUNCTION(dyna_linker):
88 /* r0 = virtual target address */
89 /* r1 = instruction to patch */
92 .size dyna_linker, .-dyna_linker
95 FUNCTION(cc_interrupt):
96 ldr w0, [rFP, #LO_last_count]
98 str wzr, [rFP, #LO_pending_exception]
99 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
102 add x0, rFP, #LO_reg_cop0 /* CP0 */
105 ldr rCC, [rFP, #LO_cycle]
106 ldr w0, [rFP, #LO_next_interupt]
107 ldr w1, [rFP, #LO_pending_exception]
108 ldr w2, [rFP, #LO_stop]
109 str w0, [rFP, #LO_last_count]
111 cbnz w2, new_dyna_leave
115 ldr w0, [rFP, #LO_pcaddr]
118 .size cc_interrupt, .-cc_interrupt
121 FUNCTION(jump_addrerror_ds): /* R3000E_AdEL / R3000E_AdES in w0 */
122 str w1, [rFP, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
125 FUNCTION(jump_addrerror):
126 str w1, [rFP, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
129 FUNCTION(jump_overflow_ds):
130 mov w0, #(12<<2) /* R3000E_Ov */
133 FUNCTION(jump_overflow):
137 FUNCTION(jump_break_ds):
138 mov w0, #(9<<2) /* R3000E_Bp */
141 FUNCTION(jump_break):
145 FUNCTION(jump_syscall_ds):
146 mov w0, #(8<<2) /* R3000E_Syscall */
149 FUNCTION(jump_syscall):
154 ldr w3, [rFP, #LO_last_count]
155 str w2, [rFP, #LO_pcaddr]
157 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
158 add x2, rFP, #LO_reg_cop0 /* CP0 */
161 /* note: psxException might do recursive recompiler call from it's HLE code,
162 * so be ready for this */
163 FUNCTION(jump_to_new_pc):
164 ldr w2, [rFP, #LO_stop]
165 ldr w1, [rFP, #LO_next_interupt]
166 ldr rCC, [rFP, #LO_cycle]
167 ldr w0, [rFP, #LO_pcaddr]
169 str w1, [rFP, #LO_last_count]
170 cbnz w2, new_dyna_leave
173 .size jump_to_new_pc, .-jump_to_new_pc
175 /* stack must be aligned by 16, and include space for save_regs() use */
177 FUNCTION(new_dyna_start):
178 stp x29, x30, [sp, #-SSP_ALL]!
179 ldr w1, [x0, #LO_next_interupt]
180 ldr w2, [x0, #LO_cycle]
181 stp x19, x20, [sp, #16*1]
182 stp x21, x22, [sp, #16*2]
183 stp x23, x24, [sp, #16*3]
184 stp x25, x26, [sp, #16*4]
185 stp x27, x28, [sp, #16*5]
187 ldr w0, [rFP, #LO_pcaddr]
188 str w1, [rFP, #LO_last_count]
192 .size new_dyna_start, .-new_dyna_start
195 FUNCTION(new_dyna_leave):
196 ldr w0, [rFP, #LO_last_count]
198 str rCC, [rFP, #LO_cycle]
199 ldp x19, x20, [sp, #16*1]
200 ldp x21, x22, [sp, #16*2]
201 ldp x23, x24, [sp, #16*3]
202 ldp x25, x26, [sp, #16*4]
203 ldp x27, x28, [sp, #16*5]
204 ldp x29, x30, [sp], #SSP_ALL
206 .size new_dyna_leave, .-new_dyna_leave
208 /* --------------------------------------- */
212 .macro memhandler_pre
213 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
214 ldr w4, [rFP, #LO_last_count]
216 str w4, [rFP, #LO_cycle]
219 .macro memhandler_post
220 ldr w0, [rFP, #LO_next_interupt]
221 ldr w2, [rFP, #LO_cycle] // memhandlers can modify cc, like dma
222 str w0, [rFP, #LO_last_count]
226 FUNCTION(do_memhandler_pre):
230 FUNCTION(do_memhandler_post):
234 .macro pcsx_read_mem readop tab_shift
235 /* w0 = address, x1 = handler_tab, w2 = cycles */
236 ubfm w4, w0, #\tab_shift, #11
237 ldr x3, [x1, w4, uxtw #3]
240 \readop w0, [x3, w4, uxtw #\tab_shift]
243 stp xzr, x30, [sp, #-16]!
248 FUNCTION(jump_handler_read8):
249 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
250 pcsx_read_mem ldrb, 0
253 FUNCTION(jump_handler_read16):
254 add x1, x1, #0x1000/4*8 /* shift to r16 part */
255 pcsx_read_mem ldrh, 1
258 FUNCTION(jump_handler_read32):
262 ldp xzr, x30, [sp], #16
265 .macro pcsx_write_mem wrtop movop tab_shift
266 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
267 ubfm w4, w0, #\tab_shift, #11
268 ldr x3, [x3, w4, uxtw #3]
271 mov w0, w2 /* cycle return */
272 \wrtop w1, [x3, w4, uxtw #\tab_shift]
275 stp xzr, x30, [sp, #-16]!
276 str w0, [rFP, #LO_address] /* some handlers still need it... */
282 FUNCTION(jump_handler_write8):
283 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
284 pcsx_write_mem strb uxtb 0
287 FUNCTION(jump_handler_write16):
288 add x3, x3, #0x1000/4*8 /* shift to r16 part */
289 pcsx_write_mem strh uxth 1
292 FUNCTION(jump_handler_write32):
293 pcsx_write_mem str mov 2
297 ldp xzr, x30, [sp], #16
300 FUNCTION(jump_handle_swl):
301 /* w0 = address, w1 = data, w2 = cycles */
302 ldr x3, [rFP, #LO_mem_wtab]
303 orr w4, wzr, w0, lsr #12
304 ldr x3, [x3, w4, uxtw #3]
306 bcs jump_handle_swx_interp
309 tbz x3, #1, 10f // & 2
310 tbz x3, #0, 2f // & 1
321 tbz x3, #0, 0f // & 1
331 FUNCTION(jump_handle_swr):
332 /* w0 = address, w1 = data, w2 = cycles */
333 ldr x3, [rFP, #LO_mem_wtab]
334 orr w4, wzr, w0, lsr #12
335 ldr x3, [x3, w4, uxtw #3]
337 bcs jump_handle_swx_interp
340 tbz x3, #1, 10f // & 2
341 tbz x3, #0, 2f // & 1
349 tbz x3, #0, 0f // & 1
359 jump_handle_swx_interp: /* almost never happens */
360 ldr w3, [rFP, #LO_last_count]
361 add x0, rFP, #LO_psxRegs
363 str w2, [rFP, #LO_cycle] /* PCSX cycles */
367 FUNCTION(call_gteStall):
368 /* w0 = op_cycles, w1 = cycles */
369 ldr w2, [rFP, #LO_last_count]
370 str lr, [rFP, #LO_saved_lr]
372 str w1, [rFP, #LO_cycle]
373 add x1, rFP, #LO_psxRegs
375 ldr lr, [rFP, #LO_saved_lr]
381 FUNCTION(do_insn_cmp_arm64):
382 stp x2, x3, [sp, #(SSP_CALLEE_REGS + 2*8)]
383 stp x4, x5, [sp, #(SSP_CALLEE_REGS + 4*8)]
384 stp x6, x7, [sp, #(SSP_CALLEE_REGS + 6*8)]
385 stp x8, x9, [sp, #(SSP_CALLEE_REGS + 8*8)]
386 stp x10, x11, [sp, #(SSP_CALLEE_REGS + 10*8)]
387 stp x12, x13, [sp, #(SSP_CALLEE_REGS + 12*8)]
388 stp x14, x15, [sp, #(SSP_CALLEE_REGS + 14*8)]
389 stp x16, x17, [sp, #(SSP_CALLEE_REGS + 16*8)]
390 stp x18, x30, [sp, #(SSP_CALLEE_REGS + 18*8)]
392 ldp x2, x3, [sp, #(SSP_CALLEE_REGS + 2*8)]
393 ldp x4, x5, [sp, #(SSP_CALLEE_REGS + 4*8)]
394 ldp x6, x7, [sp, #(SSP_CALLEE_REGS + 6*8)]
395 ldp x8, x9, [sp, #(SSP_CALLEE_REGS + 8*8)]
396 ldp x10, x11, [sp, #(SSP_CALLEE_REGS + 10*8)]
397 ldp x12, x13, [sp, #(SSP_CALLEE_REGS + 12*8)]
398 ldp x14, x15, [sp, #(SSP_CALLEE_REGS + 14*8)]
399 ldp x16, x17, [sp, #(SSP_CALLEE_REGS + 16*8)]
400 ldp x18, x30, [sp, #(SSP_CALLEE_REGS + 18*8)]