patch up some clang/apple issues
[pcsx_rearmed.git] / libpcsxcore / new_dynarec / linkage_arm64.S
CommitLineData
630b122b 1/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
5 * *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
10 * *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
15 * *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
21
22#include "arm_features.h"
23#include "new_dynarec_config.h"
24#include "assem_arm64.h"
25#include "linkage_offsets.h"
26
f056eca3 27#ifdef __MACH__
28#define dynarec_local ESYM(dynarec_local)
29#define ndrc_add_jump_out ESYM(ndrc_add_jump_out)
30#define ndrc_get_addr_ht ESYM(ndrc_get_addr_ht)
31#define gen_interupt ESYM(gen_interupt)
32#define gteCheckStallRaw ESYM(gteCheckStallRaw)
33#define psxException ESYM(psxException)
34#define execI ESYM(execI)
35#endif
36
630b122b 37#if (LO_mem_wtab & 7)
38#error misligned pointers
39#endif
40
41.bss
42 .align 4
43 .global dynarec_local
f056eca3 44 EOBJECT(dynarec_local)
45 ESIZE(dynarec_local, LO_dynarec_local_size)
630b122b 46dynarec_local:
47 .space LO_dynarec_local_size
48
49#define DRC_VAR_(name, vname, size_) \
f056eca3 50 vname = dynarec_local + LO_##name ASM_SEPARATOR \
51 .globl vname; \
52 EOBJECT(vname); \
53 ESIZE(vname, LO_dynarec_local_size)
630b122b 54
55#define DRC_VAR(name, size_) \
56 DRC_VAR_(name, ESYM(name), size_)
57
58DRC_VAR(next_interupt, 4)
59DRC_VAR(cycle_count, 4)
60DRC_VAR(last_count, 4)
61DRC_VAR(pending_exception, 4)
62DRC_VAR(stop, 4)
63DRC_VAR(branch_target, 4)
64DRC_VAR(address, 4)
7c8454e3 65DRC_VAR(hack_addr, 4)
630b122b 66DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
67
68/* psxRegs */
69#DRC_VAR(reg, 128)
70DRC_VAR(lo, 4)
71DRC_VAR(hi, 4)
72DRC_VAR(reg_cop0, 128)
73DRC_VAR(reg_cop2d, 128)
74DRC_VAR(reg_cop2c, 128)
75DRC_VAR(pcaddr, 4)
76#DRC_VAR(code, 4)
77#DRC_VAR(cycle, 4)
78#DRC_VAR(interrupt, 4)
79#DRC_VAR(intCycle, 256)
80
81DRC_VAR(rcnts, 7*4*4)
82DRC_VAR(inv_code_start, 4)
83DRC_VAR(inv_code_end, 4)
84DRC_VAR(mem_rtab, 8)
85DRC_VAR(mem_wtab, 8)
86DRC_VAR(psxH_ptr, 8)
87DRC_VAR(invc_ptr, 8)
88DRC_VAR(zeromem_ptr, 8)
89DRC_VAR(scratch_buf_ptr, 8)
90DRC_VAR(ram_offset, 8)
91DRC_VAR(mini_ht, 256)
630b122b 92
93
94 .text
95 .align 2
96
630b122b 97FUNCTION(dyna_linker):
98 /* r0 = virtual target address */
99 /* r1 = instruction to patch */
048fcced 100 bl ndrc_get_addr_ht
f9e37973 101 br x0
f056eca3 102 ESIZE(dyna_linker, .-dyna_linker)
630b122b 103
630b122b 104 .align 2
105FUNCTION(cc_interrupt):
106 ldr w0, [rFP, #LO_last_count]
630b122b 107 add rCC, w0, rCC
108 str wzr, [rFP, #LO_pending_exception]
630b122b 109 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
630b122b 110 mov x21, lr
630b122b 1111:
5753f874 112 add x0, rFP, #LO_reg_cop0 /* CP0 */
630b122b 113 bl gen_interupt
114 mov lr, x21
115 ldr rCC, [rFP, #LO_cycle]
116 ldr w0, [rFP, #LO_next_interupt]
117 ldr w1, [rFP, #LO_pending_exception]
118 ldr w2, [rFP, #LO_stop]
119 str w0, [rFP, #LO_last_count]
120 sub rCC, rCC, w0
121 cbnz w2, new_dyna_leave
122 cbnz w1, 2f
123 ret
1242:
125 ldr w0, [rFP, #LO_pcaddr]
048fcced 126 bl ndrc_get_addr_ht
630b122b 127 br x0
f056eca3 128 ESIZE(cc_interrupt, .-cc_interrupt)
630b122b 129
130 .align 2
259dbd60 131FUNCTION(jump_addrerror_ds): /* R3000E_AdEL / R3000E_AdES in w0 */
132 str w1, [rFP, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
133 mov w1, #1
134 b call_psxException
135FUNCTION(jump_addrerror):
136 str w1, [rFP, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
137 mov w1, #0
138 b call_psxException
f2e25348 139FUNCTION(jump_overflow_ds):
140 mov w0, #(12<<2) /* R3000E_Ov */
141 mov w1, #1
142 b call_psxException
143FUNCTION(jump_overflow):
144 mov w0, #(12<<2)
145 mov w1, #0
146 b call_psxException
467357cc 147FUNCTION(jump_break_ds):
f2e25348 148 mov w0, #(9<<2) /* R3000E_Bp */
467357cc 149 mov w1, #1
150 b call_psxException
151FUNCTION(jump_break):
f2e25348 152 mov w0, #(9<<2)
467357cc 153 mov w1, #0
154 b call_psxException
155FUNCTION(jump_syscall_ds):
f2e25348 156 mov w0, #(8<<2) /* R3000E_Syscall */
6d79a06f 157 mov w1, #2
467357cc 158 b call_psxException
630b122b 159FUNCTION(jump_syscall):
f2e25348 160 mov w0, #(8<<2)
467357cc 161 mov w1, #0
162
163call_psxException:
164 ldr w3, [rFP, #LO_last_count]
165 str w2, [rFP, #LO_pcaddr]
166 add rCC, w3, rCC
6c62131f 167 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
5753f874 168 add x2, rFP, #LO_reg_cop0 /* CP0 */
467357cc 169 bl psxException
630b122b 170
171 /* note: psxException might do recursive recompiler call from it's HLE code,
172 * so be ready for this */
173FUNCTION(jump_to_new_pc):
9415f066 174 ldr w2, [rFP, #LO_stop]
630b122b 175 ldr w1, [rFP, #LO_next_interupt]
176 ldr rCC, [rFP, #LO_cycle]
177 ldr w0, [rFP, #LO_pcaddr]
178 sub rCC, rCC, w1
179 str w1, [rFP, #LO_last_count]
9415f066 180 cbnz w2, new_dyna_leave
048fcced 181 bl ndrc_get_addr_ht
630b122b 182 br x0
f056eca3 183 ESIZE(jump_to_new_pc, .-jump_to_new_pc)
630b122b 184
185 /* stack must be aligned by 16, and include space for save_regs() use */
186 .align 2
187FUNCTION(new_dyna_start):
188 stp x29, x30, [sp, #-SSP_ALL]!
189 ldr w1, [x0, #LO_next_interupt]
190 ldr w2, [x0, #LO_cycle]
191 stp x19, x20, [sp, #16*1]
192 stp x21, x22, [sp, #16*2]
193 stp x23, x24, [sp, #16*3]
194 stp x25, x26, [sp, #16*4]
195 stp x27, x28, [sp, #16*5]
196 mov rFP, x0
197 ldr w0, [rFP, #LO_pcaddr]
198 str w1, [rFP, #LO_last_count]
199 sub rCC, w2, w1
048fcced 200 bl ndrc_get_addr_ht
630b122b 201 br x0
f056eca3 202 ESIZE(new_dyna_start, .-new_dyna_start)
630b122b 203
204 .align 2
205FUNCTION(new_dyna_leave):
206 ldr w0, [rFP, #LO_last_count]
207 add rCC, rCC, w0
208 str rCC, [rFP, #LO_cycle]
209 ldp x19, x20, [sp, #16*1]
210 ldp x21, x22, [sp, #16*2]
211 ldp x23, x24, [sp, #16*3]
212 ldp x25, x26, [sp, #16*4]
213 ldp x27, x28, [sp, #16*5]
214 ldp x29, x30, [sp], #SSP_ALL
215 ret
f056eca3 216 ESIZE(new_dyna_leave, .-new_dyna_leave)
630b122b 217
218/* --------------------------------------- */
219
220.align 2
221
222.macro memhandler_pre
1fb319ed 223 /* w0 = addr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
630b122b 224 ldr w4, [rFP, #LO_last_count]
225 add w4, w4, w2
226 str w4, [rFP, #LO_cycle]
227.endm
228
229.macro memhandler_post
230 ldr w0, [rFP, #LO_next_interupt]
231 ldr w2, [rFP, #LO_cycle] // memhandlers can modify cc, like dma
232 str w0, [rFP, #LO_last_count]
233 sub w0, w2, w0
234.endm
235
236FUNCTION(do_memhandler_pre):
237 memhandler_pre
238 ret
239
240FUNCTION(do_memhandler_post):
241 memhandler_post
242 ret
243
244.macro pcsx_read_mem readop tab_shift
245 /* w0 = address, x1 = handler_tab, w2 = cycles */
246 ubfm w4, w0, #\tab_shift, #11
247 ldr x3, [x1, w4, uxtw #3]
248 adds x3, x3, x3
249 bcs 0f
250 \readop w0, [x3, w4, uxtw #\tab_shift]
251 ret
2520:
253 stp xzr, x30, [sp, #-16]!
254 memhandler_pre
255 blr x3
256.endm
257
258FUNCTION(jump_handler_read8):
259 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
260 pcsx_read_mem ldrb, 0
261 b handler_read_end
262
263FUNCTION(jump_handler_read16):
264 add x1, x1, #0x1000/4*8 /* shift to r16 part */
265 pcsx_read_mem ldrh, 1
266 b handler_read_end
267
268FUNCTION(jump_handler_read32):
269 pcsx_read_mem ldr, 2
270
271handler_read_end:
272 ldp xzr, x30, [sp], #16
273 ret
274
275.macro pcsx_write_mem wrtop movop tab_shift
276 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
277 ubfm w4, w0, #\tab_shift, #11
278 ldr x3, [x3, w4, uxtw #3]
279 adds x3, x3, x3
280 bcs 0f
281 mov w0, w2 /* cycle return */
282 \wrtop w1, [x3, w4, uxtw #\tab_shift]
283 ret
2840:
285 stp xzr, x30, [sp, #-16]!
286 str w0, [rFP, #LO_address] /* some handlers still need it... */
287 \movop w0, w1
288 memhandler_pre
289 blr x3
290.endm
291
292FUNCTION(jump_handler_write8):
293 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
f056eca3 294 pcsx_write_mem strb, uxtb, 0
630b122b 295 b handler_write_end
296
297FUNCTION(jump_handler_write16):
298 add x3, x3, #0x1000/4*8 /* shift to r16 part */
f056eca3 299 pcsx_write_mem strh, uxth, 1
630b122b 300 b handler_write_end
301
302FUNCTION(jump_handler_write32):
f056eca3 303 pcsx_write_mem str, mov, 2
630b122b 304
305handler_write_end:
306 memhandler_post
307 ldp xzr, x30, [sp], #16
308 ret
309
310FUNCTION(jump_handle_swl):
311 /* w0 = address, w1 = data, w2 = cycles */
312 ldr x3, [rFP, #LO_mem_wtab]
313 orr w4, wzr, w0, lsr #12
314 ldr x3, [x3, w4, uxtw #3]
315 adds x3, x3, x3
5753f874 316 bcs jump_handle_swx_interp
630b122b 317 add x3, x0, x3
318 mov w0, w2
319 tbz x3, #1, 10f // & 2
320 tbz x3, #0, 2f // & 1
3213:
322 stur w1, [x3, #-3]
323 ret
3242:
325 lsr w2, w1, #8
326 lsr w1, w1, #24
327 sturh w2, [x3, #-2]
328 strb w1, [x3]
329 ret
33010:
331 tbz x3, #0, 0f // & 1
3321:
333 lsr w1, w1, #16
334 sturh w1, [x3, #-1]
335 ret
3360:
337 lsr w2, w1, #24
338 strb w2, [x3]
339 ret
630b122b 340
341FUNCTION(jump_handle_swr):
342 /* w0 = address, w1 = data, w2 = cycles */
343 ldr x3, [rFP, #LO_mem_wtab]
344 orr w4, wzr, w0, lsr #12
345 ldr x3, [x3, w4, uxtw #3]
346 adds x3, x3, x3
5753f874 347 bcs jump_handle_swx_interp
630b122b 348 add x3, x0, x3
349 mov w0, w2
350 tbz x3, #1, 10f // & 2
351 tbz x3, #0, 2f // & 1
3523:
353 strb w1, [x3]
354 ret
3552:
356 strh w1, [x3]
357 ret
35810:
359 tbz x3, #0, 0f // & 1
3601:
361 lsr w2, w1, #8
362 strb w1, [x3]
363 sturh w2, [x3, #1]
364 ret
3650:
366 str w1, [x3]
367 ret
5753f874 368
369jump_handle_swx_interp: /* almost never happens */
370 ldr w3, [rFP, #LO_last_count]
371 add x0, rFP, #LO_psxRegs
372 add w2, w3, w2
373 str w2, [rFP, #LO_cycle] /* PCSX cycles */
374 bl execI
375 b jump_to_new_pc
630b122b 376
377FUNCTION(call_gteStall):
378 /* w0 = op_cycles, w1 = cycles */
379 ldr w2, [rFP, #LO_last_count]
380 str lr, [rFP, #LO_saved_lr]
381 add w1, w1, w2
382 str w1, [rFP, #LO_cycle]
383 add x1, rFP, #LO_psxRegs
384 bl gteCheckStallRaw
385 ldr lr, [rFP, #LO_saved_lr]
386 add rCC, rCC, w0
387 ret
388
e6e590d3 389#ifdef DRC_DBG
390#undef do_insn_cmp
391FUNCTION(do_insn_cmp_arm64):
392 stp x2, x3, [sp, #(SSP_CALLEE_REGS + 2*8)]
393 stp x4, x5, [sp, #(SSP_CALLEE_REGS + 4*8)]
394 stp x6, x7, [sp, #(SSP_CALLEE_REGS + 6*8)]
395 stp x8, x9, [sp, #(SSP_CALLEE_REGS + 8*8)]
396 stp x10, x11, [sp, #(SSP_CALLEE_REGS + 10*8)]
397 stp x12, x13, [sp, #(SSP_CALLEE_REGS + 12*8)]
398 stp x14, x15, [sp, #(SSP_CALLEE_REGS + 14*8)]
399 stp x16, x17, [sp, #(SSP_CALLEE_REGS + 16*8)]
400 stp x18, x30, [sp, #(SSP_CALLEE_REGS + 18*8)]
401 bl do_insn_cmp
402 ldp x2, x3, [sp, #(SSP_CALLEE_REGS + 2*8)]
403 ldp x4, x5, [sp, #(SSP_CALLEE_REGS + 4*8)]
404 ldp x6, x7, [sp, #(SSP_CALLEE_REGS + 6*8)]
405 ldp x8, x9, [sp, #(SSP_CALLEE_REGS + 8*8)]
406 ldp x10, x11, [sp, #(SSP_CALLEE_REGS + 10*8)]
407 ldp x12, x13, [sp, #(SSP_CALLEE_REGS + 12*8)]
408 ldp x14, x15, [sp, #(SSP_CALLEE_REGS + 14*8)]
409 ldp x16, x17, [sp, #(SSP_CALLEE_REGS + 16*8)]
410 ldp x18, x30, [sp, #(SSP_CALLEE_REGS + 18*8)]
411 ret
412#endif