cdrom: change pause timing again
[pcsx_rearmed.git] / libpcsxcore / new_dynarec / linkage_arm64.S
... / ...
CommitLineData
1/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
5 * *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
10 * *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
15 * *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
21
22#include "arm_features.h"
23#include "new_dynarec_config.h"
24#include "assem_arm64.h"
25#include "linkage_offsets.h"
26
27#ifdef __MACH__
28#define dynarec_local ESYM(dynarec_local)
29#define ndrc_add_jump_out ESYM(ndrc_add_jump_out)
30#define ndrc_get_addr_ht ESYM(ndrc_get_addr_ht)
31#define gen_interupt ESYM(gen_interupt)
32#define gteCheckStallRaw ESYM(gteCheckStallRaw)
33#define psxException ESYM(psxException)
34#define execI ESYM(execI)
35#endif
36
37#if (LO_mem_wtab & 7)
38#error misligned pointers
39#endif
40
41.bss
42 .align 4
43 .global dynarec_local
44 EOBJECT(dynarec_local)
45 ESIZE(dynarec_local, LO_dynarec_local_size)
46dynarec_local:
47 .space LO_dynarec_local_size
48
49#define DRC_VAR_(name, vname, size_) \
50 vname = dynarec_local + LO_##name ASM_SEPARATOR \
51 .globl vname; \
52 EOBJECT(vname); \
53 ESIZE(vname, LO_dynarec_local_size)
54
55#define DRC_VAR(name, size_) \
56 DRC_VAR_(name, ESYM(name), size_)
57
58DRC_VAR(next_interupt, 4)
59DRC_VAR(cycle_count, 4)
60DRC_VAR(last_count, 4)
61DRC_VAR(pending_exception, 4)
62DRC_VAR(stop, 4)
63DRC_VAR(branch_target, 4)
64DRC_VAR(address, 4)
65DRC_VAR(hack_addr, 4)
66DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
67
68/* psxRegs */
69#DRC_VAR(reg, 128)
70DRC_VAR(lo, 4)
71DRC_VAR(hi, 4)
72DRC_VAR(reg_cop0, 128)
73DRC_VAR(reg_cop2d, 128)
74DRC_VAR(reg_cop2c, 128)
75DRC_VAR(pcaddr, 4)
76#DRC_VAR(code, 4)
77#DRC_VAR(cycle, 4)
78#DRC_VAR(interrupt, 4)
79#DRC_VAR(intCycle, 256)
80
81DRC_VAR(rcnts, 7*4*4)
82DRC_VAR(inv_code_start, 4)
83DRC_VAR(inv_code_end, 4)
84DRC_VAR(mem_rtab, 8)
85DRC_VAR(mem_wtab, 8)
86DRC_VAR(psxH_ptr, 8)
87DRC_VAR(invc_ptr, 8)
88DRC_VAR(zeromem_ptr, 8)
89DRC_VAR(scratch_buf_ptr, 8)
90DRC_VAR(ram_offset, 8)
91DRC_VAR(mini_ht, 256)
92
93
94 .text
95 .align 2
96
97FUNCTION(dyna_linker):
98 /* r0 = virtual target address */
99 /* r1 = instruction to patch */
100 bl ndrc_get_addr_ht
101 br x0
102 ESIZE(dyna_linker, .-dyna_linker)
103
104 .align 2
105FUNCTION(cc_interrupt):
106 ldr w0, [rFP, #LO_last_count]
107 add rCC, w0, rCC
108 str wzr, [rFP, #LO_pending_exception]
109 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
110 mov x21, lr
1111:
112 add x0, rFP, #LO_reg_cop0 /* CP0 */
113 bl gen_interupt
114 mov lr, x21
115 ldr rCC, [rFP, #LO_cycle]
116 ldr w0, [rFP, #LO_next_interupt]
117 ldr w1, [rFP, #LO_pending_exception]
118 ldr w2, [rFP, #LO_stop]
119 str w0, [rFP, #LO_last_count]
120 sub rCC, rCC, w0
121 cbnz w2, new_dyna_leave
122 cbnz w1, 2f
123 ret
1242:
125 ldr w0, [rFP, #LO_pcaddr]
126 bl ndrc_get_addr_ht
127 br x0
128 ESIZE(cc_interrupt, .-cc_interrupt)
129
130 .align 2
131FUNCTION(jump_addrerror_ds): /* R3000E_AdEL / R3000E_AdES in w0 */
132 str w1, [rFP, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
133 mov w1, #1
134 b call_psxException
135FUNCTION(jump_addrerror):
136 str w1, [rFP, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
137 mov w1, #0
138 b call_psxException
139FUNCTION(jump_overflow_ds):
140 mov w0, #(12<<2) /* R3000E_Ov */
141 mov w1, #1
142 b call_psxException
143FUNCTION(jump_overflow):
144 mov w0, #(12<<2)
145 mov w1, #0
146 b call_psxException
147FUNCTION(jump_break_ds):
148 mov w0, #(9<<2) /* R3000E_Bp */
149 mov w1, #1
150 b call_psxException
151FUNCTION(jump_break):
152 mov w0, #(9<<2)
153 mov w1, #0
154 b call_psxException
155FUNCTION(jump_syscall_ds):
156 mov w0, #(8<<2) /* R3000E_Syscall */
157 mov w1, #2
158 b call_psxException
159FUNCTION(jump_syscall):
160 mov w0, #(8<<2)
161 mov w1, #0
162
163call_psxException:
164 ldr w3, [rFP, #LO_last_count]
165 str w2, [rFP, #LO_pcaddr]
166 add rCC, w3, rCC
167 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
168 add x2, rFP, #LO_reg_cop0 /* CP0 */
169 bl psxException
170
171 /* note: psxException might do recursive recompiler call from it's HLE code,
172 * so be ready for this */
173FUNCTION(jump_to_new_pc):
174 ldr w2, [rFP, #LO_stop]
175 ldr w1, [rFP, #LO_next_interupt]
176 ldr rCC, [rFP, #LO_cycle]
177 ldr w0, [rFP, #LO_pcaddr]
178 sub rCC, rCC, w1
179 str w1, [rFP, #LO_last_count]
180 cbnz w2, new_dyna_leave
181 bl ndrc_get_addr_ht
182 br x0
183 ESIZE(jump_to_new_pc, .-jump_to_new_pc)
184
185 /* stack must be aligned by 16, and include space for save_regs() use */
186 .align 2
187FUNCTION(new_dyna_start):
188 stp x29, x30, [sp, #-SSP_ALL]!
189 ldr w1, [x0, #LO_next_interupt]
190 ldr w2, [x0, #LO_cycle]
191 stp x19, x20, [sp, #16*1]
192 stp x21, x22, [sp, #16*2]
193 stp x23, x24, [sp, #16*3]
194 stp x25, x26, [sp, #16*4]
195 stp x27, x28, [sp, #16*5]
196 mov rFP, x0
197 ldr w0, [rFP, #LO_pcaddr]
198 str w1, [rFP, #LO_last_count]
199 sub rCC, w2, w1
200 bl ndrc_get_addr_ht
201 br x0
202 ESIZE(new_dyna_start, .-new_dyna_start)
203
204 .align 2
205FUNCTION(new_dyna_leave):
206 ldr w0, [rFP, #LO_last_count]
207 add rCC, rCC, w0
208 str rCC, [rFP, #LO_cycle]
209 ldp x19, x20, [sp, #16*1]
210 ldp x21, x22, [sp, #16*2]
211 ldp x23, x24, [sp, #16*3]
212 ldp x25, x26, [sp, #16*4]
213 ldp x27, x28, [sp, #16*5]
214 ldp x29, x30, [sp], #SSP_ALL
215 ret
216 ESIZE(new_dyna_leave, .-new_dyna_leave)
217
218/* --------------------------------------- */
219
220.align 2
221
222.macro memhandler_pre
223 /* w0 = addr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
224 ldr w4, [rFP, #LO_last_count]
225 add w4, w4, w2
226 str w4, [rFP, #LO_cycle]
227.endm
228
229.macro memhandler_post
230 /* w2 = cycles_out, x3 = tmp */
231 ldr w3, [rFP, #LO_next_interupt]
232 ldr w2, [rFP, #LO_cycle] // memhandlers can modify cc, like dma
233 str w3, [rFP, #LO_last_count]
234 sub w2, w2, w3
235.endm
236
237FUNCTION(do_memhandler_pre):
238 memhandler_pre
239 ret
240
241FUNCTION(do_memhandler_post):
242 memhandler_post
243 ret
244
245.macro pcsx_read_mem readop tab_shift
246 /* w0 = address, x1 = handler_tab, w2 = cycles */
247 ubfm w4, w0, #\tab_shift, #11
248 ldr x3, [x1, w4, uxtw #3]
249 adds x3, x3, x3
250 bcs 0f
251 \readop w0, [x3, w4, uxtw #\tab_shift]
252 ret
2530:
254 stp xzr, x30, [sp, #-16]!
255 memhandler_pre
256 blr x3
257.endm
258
259FUNCTION(jump_handler_read8):
260 add x1, x1, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
261 pcsx_read_mem ldrb, 0
262 ldp xzr, x30, [sp], #16
263 ret
264
265FUNCTION(jump_handler_read16):
266 add x1, x1, #0x1000/4*8 /* shift to r16 part */
267 pcsx_read_mem ldrh, 1
268 ldp xzr, x30, [sp], #16
269 ret
270
271FUNCTION(jump_handler_read32):
272 pcsx_read_mem ldr, 2
273 /* memhandler_post */
274 ldp xzr, x30, [sp], #16
275 ret
276
277.macro pcsx_write_mem wrtop movop tab_shift
278 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
279 ubfm w4, w0, #\tab_shift, #11
280 ldr x3, [x3, w4, uxtw #3]
281 adds x3, x3, x3
282 bcs 0f
283 \wrtop w1, [x3, w4, uxtw #\tab_shift]
284 ret
2850:
286 stp xzr, x30, [sp, #-16]!
287 str w0, [rFP, #LO_address] /* some handlers still need it... */
288 \movop w0, w1
289 memhandler_pre
290 blr x3
291.endm
292
293FUNCTION(jump_handler_write8):
294 add x3, x3, #0x1000/4*8 + 0x1000/2*8 /* shift to r8 part */
295 pcsx_write_mem strb, uxtb, 0
296 b handler_write_end
297
298FUNCTION(jump_handler_write16):
299 add x3, x3, #0x1000/4*8 /* shift to r16 part */
300 pcsx_write_mem strh, uxth, 1
301 b handler_write_end
302
303FUNCTION(jump_handler_write32):
304 pcsx_write_mem str, mov, 2
305
306handler_write_end:
307 memhandler_post
308 ldp xzr, x30, [sp], #16
309 ret
310
311FUNCTION(jump_handle_swl):
312 /* w0 = address, w1 = data, w2 = cycles */
313 ldr x3, [rFP, #LO_mem_wtab]
314 orr w4, wzr, w0, lsr #12
315 ldr x3, [x3, w4, uxtw #3]
316 adds x3, x3, x3
317 bcs jump_handle_swx_interp
318 add x3, x0, x3
319 mov w0, w2
320 tbz x3, #1, 10f // & 2
321 tbz x3, #0, 2f // & 1
3223:
323 stur w1, [x3, #-3]
324 ret
3252:
326 lsr w2, w1, #8
327 lsr w1, w1, #24
328 sturh w2, [x3, #-2]
329 strb w1, [x3]
330 ret
33110:
332 tbz x3, #0, 0f // & 1
3331:
334 lsr w1, w1, #16
335 sturh w1, [x3, #-1]
336 ret
3370:
338 lsr w2, w1, #24
339 strb w2, [x3]
340 ret
341
342FUNCTION(jump_handle_swr):
343 /* w0 = address, w1 = data, w2 = cycles */
344 ldr x3, [rFP, #LO_mem_wtab]
345 orr w4, wzr, w0, lsr #12
346 ldr x3, [x3, w4, uxtw #3]
347 adds x3, x3, x3
348 bcs jump_handle_swx_interp
349 add x3, x0, x3
350 mov w0, w2
351 tbz x3, #1, 10f // & 2
352 tbz x3, #0, 2f // & 1
3533:
354 strb w1, [x3]
355 ret
3562:
357 strh w1, [x3]
358 ret
35910:
360 tbz x3, #0, 0f // & 1
3611:
362 lsr w2, w1, #8
363 strb w1, [x3]
364 sturh w2, [x3, #1]
365 ret
3660:
367 str w1, [x3]
368 ret
369
370jump_handle_swx_interp: /* almost never happens */
371 ldr w3, [rFP, #LO_last_count]
372 add x0, rFP, #LO_psxRegs
373 add w2, w3, w2
374 str w2, [rFP, #LO_cycle] /* PCSX cycles */
375 bl execI
376 b jump_to_new_pc
377
378FUNCTION(call_gteStall):
379 /* w0 = op_cycles, w1 = cycles */
380 ldr w2, [rFP, #LO_last_count]
381 str lr, [rFP, #LO_saved_lr]
382 add w1, w1, w2
383 str w1, [rFP, #LO_cycle]
384 add x1, rFP, #LO_psxRegs
385 bl gteCheckStallRaw
386 ldr lr, [rFP, #LO_saved_lr]
387 add rCC, rCC, w0
388 ret
389
390#ifdef DRC_DBG
391#undef do_insn_cmp
392FUNCTION(do_insn_cmp_arm64):
393 stp x2, x3, [sp, #(SSP_CALLEE_REGS + 2*8)]
394 stp x4, x5, [sp, #(SSP_CALLEE_REGS + 4*8)]
395 stp x6, x7, [sp, #(SSP_CALLEE_REGS + 6*8)]
396 stp x8, x9, [sp, #(SSP_CALLEE_REGS + 8*8)]
397 stp x10, x11, [sp, #(SSP_CALLEE_REGS + 10*8)]
398 stp x12, x13, [sp, #(SSP_CALLEE_REGS + 12*8)]
399 stp x14, x15, [sp, #(SSP_CALLEE_REGS + 14*8)]
400 stp x16, x17, [sp, #(SSP_CALLEE_REGS + 16*8)]
401 stp x18, x30, [sp, #(SSP_CALLEE_REGS + 18*8)]
402 bl do_insn_cmp
403 ldp x2, x3, [sp, #(SSP_CALLEE_REGS + 2*8)]
404 ldp x4, x5, [sp, #(SSP_CALLEE_REGS + 4*8)]
405 ldp x6, x7, [sp, #(SSP_CALLEE_REGS + 6*8)]
406 ldp x8, x9, [sp, #(SSP_CALLEE_REGS + 8*8)]
407 ldp x10, x11, [sp, #(SSP_CALLEE_REGS + 10*8)]
408 ldp x12, x13, [sp, #(SSP_CALLEE_REGS + 12*8)]
409 ldp x14, x15, [sp, #(SSP_CALLEE_REGS + 14*8)]
410 ldp x16, x17, [sp, #(SSP_CALLEE_REGS + 16*8)]
411 ldp x18, x30, [sp, #(SSP_CALLEE_REGS + 18*8)]
412 ret
413#endif