drc: arm64 wip
[pcsx_rearmed.git] / libpcsxcore / new_dynarec / linkage_arm64.S
CommitLineData
be516ebe 1/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2021 notaz *
5 * *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
10 * *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
15 * *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
21
22#include "arm_features.h"
23#include "new_dynarec_config.h"
24#include "assem_arm64.h"
25#include "linkage_offsets.h"
26
27.bss
28 .align 4
29 .global dynarec_local
30 .type dynarec_local, %object
31 .size dynarec_local, LO_dynarec_local_size
32dynarec_local:
33 .space LO_dynarec_local_size
34
35#define DRC_VAR_(name, vname, size_) \
36 vname = dynarec_local + LO_##name; \
37 .global vname; \
38 .type vname, %object; \
39 .size vname, size_
40
41#define DRC_VAR(name, size_) \
42 DRC_VAR_(name, ESYM(name), size_)
43
44DRC_VAR(next_interupt, 4)
45DRC_VAR(cycle_count, 4)
46DRC_VAR(last_count, 4)
47DRC_VAR(pending_exception, 4)
48DRC_VAR(stop, 4)
687b4580 49DRC_VAR(branch_target, 4)
be516ebe 50DRC_VAR(address, 4)
687b4580 51#DRC_VAR(align0, 16) /* unused/alignment */
be516ebe 52DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
53
54/* psxRegs */
7c3a5182 55#DRC_VAR(reg, 128)
be516ebe 56DRC_VAR(lo, 4)
57DRC_VAR(hi, 4)
58DRC_VAR(reg_cop0, 128)
59DRC_VAR(reg_cop2d, 128)
60DRC_VAR(reg_cop2c, 128)
61DRC_VAR(pcaddr, 4)
62#DRC_VAR(code, 4)
63#DRC_VAR(cycle, 4)
64#DRC_VAR(interrupt, 4)
65#DRC_VAR(intCycle, 256)
66
67DRC_VAR(rcnts, 7*4*4)
be516ebe 68DRC_VAR(inv_code_start, 4)
69DRC_VAR(inv_code_end, 4)
687b4580 70DRC_VAR(mem_rtab, 8)
71DRC_VAR(mem_wtab, 8)
72DRC_VAR(psxH_ptr, 8)
73DRC_VAR(invc_ptr, 8)
74DRC_VAR(zeromem_ptr, 8)
75DRC_VAR(scratch_buf_ptr, 8)
76#DRC_VAR(align1, 16) /* unused/alignment */
be516ebe 77DRC_VAR(mini_ht, 256)
78DRC_VAR(restore_candidate, 512)
79
80
81 .text
82 .align 2
83
84/* r0 = virtual target address */
85/* r1 = instruction to patch */
86.macro dyna_linker_main
d1e4ebd9 87 /* XXX TODO: should be able to do better than this... */
be516ebe 88 bl get_addr_ht
89 br x0
90.endm
91
92
93FUNCTION(dyna_linker):
94 /* r0 = virtual target address */
95 /* r1 = instruction to patch */
96 dyna_linker_main
97 .size dyna_linker, .-dyna_linker
98
99FUNCTION(exec_pagefault):
100 /* r0 = instruction pointer */
101 /* r1 = fault address */
102 /* r2 = cause */
103 bl abort
104 .size exec_pagefault, .-exec_pagefault
105
106/* Special dynamic linker for the case where a page fault
107 may occur in a branch delay slot */
108FUNCTION(dyna_linker_ds):
109 /* r0 = virtual target address */
110 /* r1 = instruction to patch */
111 dyna_linker_main
112 .size dyna_linker_ds, .-dyna_linker_ds
113
114 .align 2
115
be516ebe 116FUNCTION(verify_code_ds):
117 bl abort
be516ebe 118FUNCTION(verify_code):
119 /* r1 = source */
120 /* r2 = target */
121 /* r3 = length */
122 bl abort
123 .size verify_code, .-verify_code
7c3a5182 124 .size verify_code_ds, .-verify_code_ds
be516ebe 125
126 .align 2
127FUNCTION(cc_interrupt):
d1e4ebd9 128 ldr w0, [rFP, #LO_last_count]
129 mov w2, #0x1fc
130 add rCC, w0, rCC
131 str wzr, [rFP, #LO_pending_exception]
132 and w2, w2, rCC, lsr #17
133 add x3, rFP, #LO_restore_candidate
134 str rCC, [rFP, #LO_cycle] /* PCSX cycles */
135# str rCC, [rFP, #LO_reg_cop0+36] /* Count */
136 ldr w19, [x3, w2, uxtw]
137 mov x21, lr
138 cbnz w19, 4f
1391:
140 bl gen_interupt
141 mov lr, x21
142 ldr rCC, [rFP, #LO_cycle]
143 ldr w0, [rFP, #LO_next_interupt]
144 ldr w1, [rFP, #LO_pending_exception]
145 ldr w2, [rFP, #LO_stop]
146 str w0, [rFP, #LO_last_count]
147 sub rCC, rCC, w0
148 cbnz w2, new_dyna_leave
149 cbnz w1, 2f
150 ret
1512:
152 ldr w0, [rFP, #LO_pcaddr]
153 bl get_addr_ht
154 br x0
1554:
156 /* Move 'dirty' blocks to the 'clean' list */
157 lsl w20, w2, #3
158 str wzr, [x3, w2, uxtw]
1595:
160 mov w0, w20
161 add w20, w20, #1
162 tbz w19, #0, 6f
163 bl clean_blocks
1646:
165 lsr w19, w19, #1
166 tst w20, #31
167 bne 5b
168 b 1b
be516ebe 169 .size cc_interrupt, .-cc_interrupt
170
be516ebe 171 .align 2
172FUNCTION(fp_exception):
173 mov w2, #0x10000000
1740:
175 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
176 mov w3, #0x80000000
177 str w0, [fp, #LO_reg_cop0+56] /* EPC */
178 orr w1, w1, #2
179 add w2, w2, #0x2c
180 str w1, [fp, #LO_reg_cop0+48] /* Status */
181 str w2, [fp, #LO_reg_cop0+52] /* Cause */
182 add w0, w3, #0x80
183 bl get_addr_ht
184 br x0
185 .size fp_exception, .-fp_exception
186 .align 2
187FUNCTION(fp_exception_ds):
188 mov w2, #0x90000000 /* Set high bit if delay slot */
189 b 0b
190 .size fp_exception_ds, .-fp_exception_ds
191
192 .align 2
193FUNCTION(jump_syscall):
194 ldr w1, [fp, #LO_reg_cop0+48] /* Status */
195 mov w3, #0x80000000
196 str w0, [fp, #LO_reg_cop0+56] /* EPC */
197 orr w1, w1, #2
198 mov w2, #0x20
199 str w1, [fp, #LO_reg_cop0+48] /* Status */
200 str w2, [fp, #LO_reg_cop0+52] /* Cause */
201 add w0, w3, #0x80
202 bl get_addr_ht
203 br x0
204 .size jump_syscall, .-jump_syscall
205 .align 2
206
207 .align 2
208FUNCTION(jump_syscall_hle):
209 bl abort
210
211 /* note: psxException might do recursive recompiler call from it's HLE code,
212 * so be ready for this */
213pcsx_return:
214 bl abort // w10
215 ldr w1, [fp, #LO_next_interupt]
216 ldr w10, [fp, #LO_cycle]
217 ldr w0, [fp, #LO_pcaddr]
218 sub w10, w10, w1
219 str w1, [fp, #LO_last_count]
220 bl get_addr_ht
221 br x0
222 .size jump_syscall_hle, .-jump_syscall_hle
223
224 .align 2
225FUNCTION(jump_hlecall):
226 bl abort
227 .size jump_hlecall, .-jump_hlecall
228
229 .align 2
230FUNCTION(jump_intcall):
231 bl abort
232 .size jump_intcall, .-jump_intcall
233
687b4580 234 /* stack must be aligned by 16, and include space for save_regs() use */
be516ebe 235 .align 2
236FUNCTION(new_dyna_start):
687b4580 237 stp x29, x30, [sp, #-SSP_ALL]!
be516ebe 238 ldr w1, [x0, #LO_next_interupt]
239 ldr w2, [x0, #LO_cycle]
240 stp x19, x20, [sp, #16*1]
241 stp x21, x22, [sp, #16*2]
242 stp x23, x24, [sp, #16*3]
243 stp x25, x26, [sp, #16*4]
244 stp x27, x28, [sp, #16*5]
245 mov rFP, x0
246 ldr w0, [rFP, #LO_pcaddr]
247 str w1, [rFP, #LO_last_count]
248 sub rCC, w2, w1
249 bl get_addr_ht
250 br x0
251 .size new_dyna_start, .-new_dyna_start
252
253 .align 2
254FUNCTION(new_dyna_leave):
255 ldr w0, [rFP, #LO_last_count]
256 add rCC, rCC, w0
257 str rCC, [rFP, #LO_cycle]
258 ldp x19, x20, [sp, #16*1]
259 ldp x21, x22, [sp, #16*2]
260 ldp x23, x24, [sp, #16*3]
261 ldp x25, x26, [sp, #16*4]
262 ldp x27, x28, [sp, #16*5]
687b4580 263 ldp x29, x30, [sp], #SSP_ALL
be516ebe 264 ret
265 .size new_dyna_leave, .-new_dyna_leave
266
267/* --------------------------------------- */
268
269.align 2
270
d1e4ebd9 271.macro memhandler_pre
272 /* w0 = adddr/data, x1 = rhandler, w2 = cycles, x3 = whandler */
273 ldr w4, [rFP, #LO_last_count]
274 add w4, w4, w2
275 str w4, [rFP, #LO_cycle]
276.endm
277
278.macro memhandler_post
279 ldr w2, [rFP, #LO_next_interupt]
280 ldr w1, [rFP, #LO_cycle]
281 sub w0, w1, w2
282 str w2, [rFP, #LO_last_count]
283.endm
284
285FUNCTION(do_memhandler_pre):
286 memhandler_pre
287 ret
288
289FUNCTION(do_memhandler_post):
290 memhandler_post
291 ret
292
293.macro pcsx_read_mem readop tab_shift
294 /* w0 = address, x1 = handler_tab, w2 = cycles */
295 stp xzr, x30, [sp, #-16]!
296 ubfm w4, w0, #\tab_shift, #11
297 ldr x3, [x1, w4, uxtw #3]
298 adds x3, x3, x3
299 bcs 0f
300 \readop w0, [x3, w4, uxtw #\tab_shift]
301 ret
3020:
303 memhandler_pre
304 blr x3
305.endm
306
be516ebe 307FUNCTION(jump_handler_read8):
d1e4ebd9 308 add x1, x1, #0x1000/4*4 + 0x1000/2*4 /* shift to r8 part */
309 pcsx_read_mem ldrb, 0
310 b handler_read_end
be516ebe 311
312FUNCTION(jump_handler_read16):
d1e4ebd9 313 add x1, x1, #0x1000/4*4 /* shift to r16 part */
314 pcsx_read_mem ldrh, 1
315 b handler_read_end
be516ebe 316
317FUNCTION(jump_handler_read32):
d1e4ebd9 318 pcsx_read_mem ldr, 2
319
320handler_read_end:
321 ldp xzr, x30, [sp], #16
322 ret
323
324.macro pcsx_write_mem wrtop movop tab_shift
325 /* w0 = address, w1 = data, w2 = cycles, x3 = handler_tab */
326 stp xzr, x30, [sp, #-16]!
327 ubfm w4, w0, #\tab_shift, #11
328 ldr x3, [x3, w4, uxtw #3]
329 str w0, [rFP, #LO_address] /* some handlers still need it... */
330 adds x3, x3, x3
331# str lr, [rFP, #0]
332 bcs 0f
333 mov w0, w2 /* cycle return */
334 \wrtop w1, [x3, w4, uxtw #\tab_shift]
335 ret
3360:
337 \movop w0, w1
338 memhandler_pre
339 blr x3
340.endm
be516ebe 341
342FUNCTION(jump_handler_write8):
d1e4ebd9 343 add x3, x3, #0x1000/4*4 + 0x1000/2*4 /* shift to r8 part */
344 pcsx_write_mem strb uxtb 0
345 b handler_write_end
be516ebe 346
347FUNCTION(jump_handler_write16):
d1e4ebd9 348 add x3, x3, #0x1000/4*4 /* shift to r16 part */
349 pcsx_write_mem strh uxth 1
350 b handler_write_end
be516ebe 351
352FUNCTION(jump_handler_write32):
d1e4ebd9 353 pcsx_write_mem str mov 2
be516ebe 354
d1e4ebd9 355handler_write_end:
356 memhandler_post
357 ldp xzr, x30, [sp], #16
358 ret
be516ebe 359
360FUNCTION(jump_handle_swl):
361 bl abort
362
363FUNCTION(jump_handle_swr):
364 bl abort
365