cdrom: change pause timing again
[pcsx_rearmed.git] / libpcsxcore / new_dynarec / linkage_arm.S
CommitLineData
57871462 1/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
7e605697 2 * linkage_arm.s for PCSX *
0bbd1454 3 * Copyright (C) 2009-2011 Ari64 *
b1f89e6f 4 * Copyright (C) 2010-2013 GraÅžvydas "notaz" Ignotas *
57871462 5 * *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
10 * *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
15 * *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
b021ee75 21
665f33e1 22#include "arm_features.h"
d148d265 23#include "new_dynarec_config.h"
b1f89e6f 24#include "linkage_offsets.h"
25
26
27#ifdef __MACH__
28#define dynarec_local ESYM(dynarec_local)
104df9d3 29#define ndrc_add_jump_out ESYM(ndrc_add_jump_out)
104df9d3 30#define ndrc_get_addr_ht ESYM(ndrc_get_addr_ht)
31#define ndrc_get_addr_ht_param ESYM(ndrc_get_addr_ht_param)
9b495f6e 32#define ndrc_write_invalidate_one ESYM(ndrc_write_invalidate_one)
b1f89e6f 33#define gen_interupt ESYM(gen_interupt)
81dbbf4c 34#define gteCheckStallRaw ESYM(gteCheckStallRaw)
d1150cd6 35#define psxException ESYM(psxException)
c57af5e6 36#define execI ESYM(execI)
b1f89e6f 37#endif
f95a77f7 38
57871462 39 .bss
40 .align 4
b1f89e6f 41 .global dynarec_local
57871462 42 .type dynarec_local, %object
b1f89e6f 43 .size dynarec_local, LO_dynarec_local_size
57871462 44dynarec_local:
b1f89e6f 45 .space LO_dynarec_local_size
46
47#define DRC_VAR_(name, vname, size_) \
48 vname = dynarec_local + LO_##name; \
49 .global vname; \
50 .type vname, %object; \
51 .size vname, size_
52
53#define DRC_VAR(name, size_) \
54 DRC_VAR_(name, ESYM(name), size_)
55
56DRC_VAR(next_interupt, 4)
57DRC_VAR(cycle_count, 4)
58DRC_VAR(last_count, 4)
59DRC_VAR(pending_exception, 4)
60DRC_VAR(stop, 4)
687b4580 61DRC_VAR(branch_target, 4)
b1f89e6f 62DRC_VAR(address, 4)
7f94b097 63DRC_VAR(hack_addr, 4)
b1f89e6f 64DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
f95a77f7 65
66/* psxRegs */
7c3a5182 67@DRC_VAR(reg, 128)
b1f89e6f 68DRC_VAR(lo, 4)
69DRC_VAR(hi, 4)
70DRC_VAR(reg_cop0, 128)
71DRC_VAR(reg_cop2d, 128)
72DRC_VAR(reg_cop2c, 128)
73DRC_VAR(pcaddr, 4)
74@DRC_VAR(code, 4)
75@DRC_VAR(cycle, 4)
76@DRC_VAR(interrupt, 4)
77@DRC_VAR(intCycle, 256)
78
79DRC_VAR(rcnts, 7*4*4)
687b4580 80DRC_VAR(inv_code_start, 4)
81DRC_VAR(inv_code_end, 4)
b1f89e6f 82DRC_VAR(mem_rtab, 4)
83DRC_VAR(mem_wtab, 4)
84DRC_VAR(psxH_ptr, 4)
85DRC_VAR(zeromem_ptr, 4)
687b4580 86DRC_VAR(invc_ptr, 4)
c6d5790c 87DRC_VAR(scratch_buf_ptr, 4)
37387d8b 88DRC_VAR(ram_offset, 4)
b1f89e6f 89DRC_VAR(mini_ht, 256)
63cb0298 90
57871462 91
b861c0a9 92 .syntax unified
93 .text
94 .align 2
95
665f33e1 96#ifndef HAVE_ARMV5
97.macro blx rd
98 mov lr, pc
99 bx \rd
100.endm
101#endif
102
c67af2ac 103.macro load_varadr reg var
0e4ad319 104#if defined(HAVE_ARMV7) && defined(TEXRELS_FORBIDDEN)
1f4e070a 105 movw \reg, #:lower16:(\var-(1678f+8))
106 movt \reg, #:upper16:(\var-(1678f+8))
b861c0a9 1071678:
108 add \reg, pc
0e4ad319 109#elif defined(HAVE_ARMV7) && !defined(__PIC__)
110 movw \reg, #:lower16:\var
111 movt \reg, #:upper16:\var
c67af2ac 112#else
274c4243 113 ldr \reg, =\var
c67af2ac 114#endif
274c4243 115.endm
116
b861c0a9 117.macro load_varadr_ext reg var
0e4ad319 118#if defined(HAVE_ARMV7) && defined(TEXRELS_FORBIDDEN)
1f4e070a 119 movw \reg, #:lower16:(ptr_\var-(1678f+8))
120 movt \reg, #:upper16:(ptr_\var-(1678f+8))
b861c0a9 1211678:
122 ldr \reg, [pc, \reg]
123#else
124 load_varadr \reg \var
125#endif
126.endm
127
b1be1eee 128.macro mov_16 reg imm
8f2bb0cb 129#ifdef HAVE_ARMV7
b1be1eee 130 movw \reg, #\imm
c67af2ac 131#else
b1be1eee 132 mov \reg, #(\imm & 0x00ff)
133 orr \reg, #(\imm & 0xff00)
c67af2ac 134#endif
b1be1eee 135.endm
136
137.macro mov_24 reg imm
8f2bb0cb 138#ifdef HAVE_ARMV7
b1be1eee 139 movw \reg, #(\imm & 0xffff)
140 movt \reg, #(\imm >> 16)
c67af2ac 141#else
b1be1eee 142 mov \reg, #(\imm & 0x0000ff)
143 orr \reg, #(\imm & 0x00ff00)
144 orr \reg, #(\imm & 0xff0000)
c67af2ac 145#endif
b1be1eee 146.endm
147
104df9d3 148FUNCTION(dyna_linker):
149 /* r0 = virtual target address */
150 /* r1 = pointer to an instruction to patch */
d148d265 151#ifndef NO_WRITE_EXEC
104df9d3 152 ldr r7, [r1]
153 mov r4, r0
154 add r6, r7, #2
155 mov r5, r1
156 lsl r6, r6, #8
157 /* must not compile - that might expire the caller block */
158 mov r1, #0
159 bl ndrc_get_addr_ht_param
160
161 movs r8, r0
162 beq 0f
163 add r6, r5, r6, asr #6 /* old target */
398d6924 164 teq r0, r6
165 moveq pc, r0 /* Stale i-cache */
398d6924 166 mov r0, r4
76f71c27 167 mov r1, r6
104df9d3 168 bl ndrc_add_jump_out
169
76f71c27 170 sub r2, r8, r5
57871462 171 and r1, r7, #0xff000000
172 lsl r2, r2, #6
173 sub r1, r1, #2
174 add r1, r1, r2, lsr #8
175 str r1, [r5]
76f71c27 176 mov pc, r8
104df9d3 1770:
398d6924 178 mov r0, r4
d148d265 179#else
180 /* XXX: should be able to do better than this... */
d148d265 181#endif
104df9d3 182 bl ndrc_get_addr_ht
57871462 183 mov pc, r0
4bdc30ab 184 .size dyna_linker, .-dyna_linker
7139f3c8 185
57871462 186 .align 2
5c6457c3 187FUNCTION(jump_vaddr_r1):
57871462 188 mov r0, r1
104df9d3 189 b jump_vaddr_r0
57871462 190 .size jump_vaddr_r1, .-jump_vaddr_r1
5c6457c3 191FUNCTION(jump_vaddr_r2):
57871462 192 mov r0, r2
104df9d3 193 b jump_vaddr_r0
57871462 194 .size jump_vaddr_r2, .-jump_vaddr_r2
5c6457c3 195FUNCTION(jump_vaddr_r3):
57871462 196 mov r0, r3
104df9d3 197 b jump_vaddr_r0
57871462 198 .size jump_vaddr_r3, .-jump_vaddr_r3
5c6457c3 199FUNCTION(jump_vaddr_r4):
57871462 200 mov r0, r4
104df9d3 201 b jump_vaddr_r0
57871462 202 .size jump_vaddr_r4, .-jump_vaddr_r4
5c6457c3 203FUNCTION(jump_vaddr_r5):
57871462 204 mov r0, r5
104df9d3 205 b jump_vaddr_r0
57871462 206 .size jump_vaddr_r5, .-jump_vaddr_r5
5c6457c3 207FUNCTION(jump_vaddr_r6):
57871462 208 mov r0, r6
104df9d3 209 b jump_vaddr_r0
57871462 210 .size jump_vaddr_r6, .-jump_vaddr_r6
5c6457c3 211FUNCTION(jump_vaddr_r8):
57871462 212 mov r0, r8
104df9d3 213 b jump_vaddr_r0
57871462 214 .size jump_vaddr_r8, .-jump_vaddr_r8
5c6457c3 215FUNCTION(jump_vaddr_r9):
57871462 216 mov r0, r9
104df9d3 217 b jump_vaddr_r0
57871462 218 .size jump_vaddr_r9, .-jump_vaddr_r9
5c6457c3 219FUNCTION(jump_vaddr_r10):
57871462 220 mov r0, r10
104df9d3 221 b jump_vaddr_r0
57871462 222 .size jump_vaddr_r10, .-jump_vaddr_r10
5c6457c3 223FUNCTION(jump_vaddr_r12):
57871462 224 mov r0, r12
104df9d3 225 b jump_vaddr_r0
57871462 226 .size jump_vaddr_r12, .-jump_vaddr_r12
5c6457c3 227FUNCTION(jump_vaddr_r7):
57871462 228 add r0, r7, #0
229 .size jump_vaddr_r7, .-jump_vaddr_r7
104df9d3 230FUNCTION(jump_vaddr_r0):
231 bl ndrc_get_addr_ht
57871462 232 mov pc, r0
104df9d3 233 .size jump_vaddr_r0, .-jump_vaddr_r0
7139f3c8 234
57871462 235 .align 2
5c6457c3 236FUNCTION(cc_interrupt):
b1f89e6f 237 ldr r0, [fp, #LO_last_count]
57871462 238 mov r1, #0
57871462 239 add r10, r0, r10
b1f89e6f 240 str r1, [fp, #LO_pending_exception]
b1f89e6f 241 str r10, [fp, #LO_cycle] /* PCSX cycles */
57871462 242 mov r10, lr
398d6924 243
de6dbc52 244 add r0, fp, #LO_reg_cop0 /* CP0 */
57871462 245 bl gen_interupt
246 mov lr, r10
b1f89e6f 247 ldr r10, [fp, #LO_cycle]
248 ldr r0, [fp, #LO_next_interupt]
249 ldr r1, [fp, #LO_pending_exception]
250 ldr r2, [fp, #LO_stop]
251 str r0, [fp, #LO_last_count]
57871462 252 sub r10, r10, r0
253 tst r2, r2
b861c0a9 254 ldmfdne sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
57871462 255 tst r1, r1
256 moveq pc, lr
b1f89e6f 257 ldr r0, [fp, #LO_pcaddr]
104df9d3 258 bl ndrc_get_addr_ht
57871462 259 mov pc, r0
57871462 260 .size cc_interrupt, .-cc_interrupt
7139f3c8 261
57871462 262 .align 2
277718fa 263FUNCTION(jump_addrerror_ds): /* R3000E_AdEL / R3000E_AdES in r0 */
264 str r1, [fp, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
265 mov r1, #1
266 b call_psxException
267FUNCTION(jump_addrerror):
268 str r1, [fp, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
269 mov r1, #0
270 b call_psxException
a5cd72d0 271FUNCTION(jump_overflow_ds):
272 mov r0, #(12<<2) /* R3000E_Ov */
273 mov r1, #1
274 b call_psxException
275FUNCTION(jump_overflow):
276 mov r0, #(12<<2)
277 mov r1, #0
278 b call_psxException
d1150cd6 279FUNCTION(jump_break_ds):
a5cd72d0 280 mov r0, #(9<<2) /* R3000E_Bp */
d1150cd6 281 mov r1, #1
282 b call_psxException
283FUNCTION(jump_break):
a5cd72d0 284 mov r0, #(9<<2)
d1150cd6 285 mov r1, #0
286 b call_psxException
287FUNCTION(jump_syscall_ds):
a5cd72d0 288 mov r0, #(8<<2) /* R3000E_Syscall */
bc7c5acb 289 mov r1, #2
d1150cd6 290 b call_psxException
5c6457c3 291FUNCTION(jump_syscall):
a5cd72d0 292 mov r0, #(8<<2)
d1150cd6 293 mov r1, #0
294
295call_psxException:
296 ldr r3, [fp, #LO_last_count]
297 str r2, [fp, #LO_pcaddr]
298 add r10, r3, r10
6d75addf 299 str r10, [fp, #LO_cycle] /* PCSX cycles */
de6dbc52 300 add r2, fp, #LO_reg_cop0 /* CP0 */
d1150cd6 301 bl psxException
7139f3c8 302
b1f89e6f 303 /* note: psxException might do recursive recompiler call from it's HLE code,
7139f3c8 304 * so be ready for this */
3968e69e 305FUNCTION(jump_to_new_pc):
dc4fa8bc 306 ldr r2, [fp, #LO_stop]
b1f89e6f 307 ldr r1, [fp, #LO_next_interupt]
308 ldr r10, [fp, #LO_cycle]
309 ldr r0, [fp, #LO_pcaddr]
dc4fa8bc 310 tst r2, r2
b1f89e6f 311 str r1, [fp, #LO_last_count]
dc4fa8bc 312 sub r10, r10, r1
313 bne new_dyna_leave
104df9d3 314 bl ndrc_get_addr_ht
7139f3c8 315 mov pc, r0
3968e69e 316 .size jump_to_new_pc, .-jump_to_new_pc
0d16cda2 317
7139f3c8 318 .align 2
5c6457c3 319FUNCTION(new_dyna_leave):
b1f89e6f 320 ldr r0, [fp, #LO_last_count]
7139f3c8 321 add r10, r0, r10
b1f89e6f 322 str r10, [fp, #LO_cycle]
b021ee75 323 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
7139f3c8 324 .size new_dyna_leave, .-new_dyna_leave
325
0bbd1454 326 .align 2
5c6457c3 327FUNCTION(invalidate_addr_r0):
5df0e313 328 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
0bbd1454 329 b invalidate_addr_call
330 .size invalidate_addr_r0, .-invalidate_addr_r0
331 .align 2
5c6457c3 332FUNCTION(invalidate_addr_r1):
5df0e313 333 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 334 mov r0, r1
0bbd1454 335 b invalidate_addr_call
336 .size invalidate_addr_r1, .-invalidate_addr_r1
337 .align 2
5c6457c3 338FUNCTION(invalidate_addr_r2):
5df0e313 339 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 340 mov r0, r2
0bbd1454 341 b invalidate_addr_call
342 .size invalidate_addr_r2, .-invalidate_addr_r2
343 .align 2
5c6457c3 344FUNCTION(invalidate_addr_r3):
5df0e313 345 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 346 mov r0, r3
0bbd1454 347 b invalidate_addr_call
348 .size invalidate_addr_r3, .-invalidate_addr_r3
349 .align 2
5c6457c3 350FUNCTION(invalidate_addr_r4):
5df0e313 351 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 352 mov r0, r4
0bbd1454 353 b invalidate_addr_call
354 .size invalidate_addr_r4, .-invalidate_addr_r4
355 .align 2
5c6457c3 356FUNCTION(invalidate_addr_r5):
5df0e313 357 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 358 mov r0, r5
0bbd1454 359 b invalidate_addr_call
360 .size invalidate_addr_r5, .-invalidate_addr_r5
361 .align 2
5c6457c3 362FUNCTION(invalidate_addr_r6):
5df0e313 363 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 364 mov r0, r6
0bbd1454 365 b invalidate_addr_call
366 .size invalidate_addr_r6, .-invalidate_addr_r6
367 .align 2
5c6457c3 368FUNCTION(invalidate_addr_r7):
5df0e313 369 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 370 mov r0, r7
0bbd1454 371 b invalidate_addr_call
372 .size invalidate_addr_r7, .-invalidate_addr_r7
373 .align 2
5c6457c3 374FUNCTION(invalidate_addr_r8):
5df0e313 375 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 376 mov r0, r8
0bbd1454 377 b invalidate_addr_call
378 .size invalidate_addr_r8, .-invalidate_addr_r8
379 .align 2
5c6457c3 380FUNCTION(invalidate_addr_r9):
5df0e313 381 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 382 mov r0, r9
0bbd1454 383 b invalidate_addr_call
384 .size invalidate_addr_r9, .-invalidate_addr_r9
385 .align 2
5c6457c3 386FUNCTION(invalidate_addr_r10):
5df0e313 387 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 388 mov r0, r10
0bbd1454 389 b invalidate_addr_call
390 .size invalidate_addr_r10, .-invalidate_addr_r10
391 .align 2
5c6457c3 392FUNCTION(invalidate_addr_r12):
5df0e313 393 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
9be4ba64 394 mov r0, r12
0bbd1454 395 .size invalidate_addr_r12, .-invalidate_addr_r12
396 .align 2
b1f89e6f 397invalidate_addr_call:
398 ldr r12, [fp, #LO_inv_code_start]
399 ldr lr, [fp, #LO_inv_code_end]
9be4ba64 400 cmp r0, r12
401 cmpcs lr, r0
9b495f6e 402 blcc ndrc_write_invalidate_one
5df0e313 403 ldmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, pc}
0bbd1454 404 .size invalidate_addr_call, .-invalidate_addr_call
405
57871462 406 .align 2
5c6457c3 407FUNCTION(new_dyna_start):
b021ee75 408 /* ip is stored to conform EABI alignment */
409 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
be516ebe 410 mov fp, r0 /* dynarec_local */
b1f89e6f 411 ldr r0, [fp, #LO_pcaddr]
104df9d3 412 bl ndrc_get_addr_ht
b1f89e6f 413 ldr r1, [fp, #LO_next_interupt]
414 ldr r10, [fp, #LO_cycle]
415 str r1, [fp, #LO_last_count]
7139f3c8 416 sub r10, r10, r1
417 mov pc, r0
57871462 418 .size new_dyna_start, .-new_dyna_start
7139f3c8 419
7e605697 420/* --------------------------------------- */
7139f3c8 421
7da5c7ad 422.macro memhandler_post
423 /* r2 = cycles_out, r3 = tmp */
424 ldr r3, [fp, #LO_next_interupt]
425 ldr r2, [fp, #LO_cycle] @ memhandlers can modify cc, like dma
426 str r3, [fp, #LO_last_count]
427 sub r2, r2, r3
428.endm
c6c3b1b3 429
7da5c7ad 430.align 2
431
432.macro pcsx_read_mem_part readop tab_shift
c6c3b1b3 433 /* r0 = address, r1 = handler_tab, r2 = cycles */
434 lsl r3, r0, #20
435 lsr r3, #(20+\tab_shift)
b1f89e6f 436 ldr r12, [fp, #LO_last_count]
c6c3b1b3 437 ldr r1, [r1, r3, lsl #2]
7da5c7ad 438 add r12, r2, r12
c6c3b1b3 439 lsls r1, #1
440.if \tab_shift == 1
441 lsl r3, #1
442 \readop r0, [r1, r3]
443.else
444 \readop r0, [r1, r3, lsl #\tab_shift]
445.endif
446 movcc pc, lr
7da5c7ad 447 mov r2, r12
448 str r12, [fp, #LO_cycle]
c6c3b1b3 449.endm
450
5c6457c3 451FUNCTION(jump_handler_read8):
c6c3b1b3 452 add r1, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
7da5c7ad 453 pcsx_read_mem_part ldrbcc, 0
454 bx r1 @ addr, unused, cycles
c6c3b1b3 455
5c6457c3 456FUNCTION(jump_handler_read16):
c6c3b1b3 457 add r1, #0x1000/4*4 @ shift to r16 part
7da5c7ad 458 pcsx_read_mem_part ldrhcc, 1
459 bx r1 @ addr, unused, cycles
c6c3b1b3 460
5c6457c3 461FUNCTION(jump_handler_read32):
7da5c7ad 462 pcsx_read_mem_part ldrcc, 2
463 bx r1 @ addr, unused, cycles
464#if 0
465 str lr, [fp, #LO_saved_lr]
466 blx r1
467 ldr lr, [fp, #LO_saved_lr]
468 memhandler_post
469 bx lr
470#endif
9b9af0d1 471
b96d3df7 472.macro pcsx_write_mem wrtop tab_shift
473 /* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
474 lsl r12,r0, #20
475 lsr r12, #(20+\tab_shift)
476 ldr r3, [r3, r12, lsl #2]
b1f89e6f 477 str r0, [fp, #LO_address] @ some handlers still need it..
b96d3df7 478 lsls r3, #1
b96d3df7 479.if \tab_shift == 1
480 lsl r12, #1
481 \wrtop r1, [r3, r12]
482.else
483 \wrtop r1, [r3, r12, lsl #\tab_shift]
484.endif
485 movcc pc, lr
b1f89e6f 486 ldr r12, [fp, #LO_last_count]
b96d3df7 487 mov r0, r1
488 add r2, r2, r12
b1f89e6f 489 str r2, [fp, #LO_cycle]
9b9af0d1 490
491 str lr, [fp, #LO_saved_lr]
b96d3df7 492 blx r3
9b9af0d1 493 ldr lr, [fp, #LO_saved_lr]
b96d3df7 494
9b9af0d1 495 memhandler_post
687b4580 496 bx lr
b96d3df7 497.endm
498
5c6457c3 499FUNCTION(jump_handler_write8):
b96d3df7 500 add r3, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
b861c0a9 501 pcsx_write_mem strbcc, 0
b96d3df7 502
5c6457c3 503FUNCTION(jump_handler_write16):
b96d3df7 504 add r3, #0x1000/4*4 @ shift to r16 part
b861c0a9 505 pcsx_write_mem strhcc, 1
b96d3df7 506
5c6457c3 507FUNCTION(jump_handler_write32):
b96d3df7 508 pcsx_write_mem strcc, 2
509
5c6457c3 510FUNCTION(jump_handler_write_h):
b96d3df7 511 /* r0 = address, r1 = data, r2 = cycles, r3 = handler */
b1f89e6f 512 ldr r12, [fp, #LO_last_count]
513 str r0, [fp, #LO_address] @ some handlers still need it..
b96d3df7 514 add r2, r2, r12
515 mov r0, r1
b1f89e6f 516 str r2, [fp, #LO_cycle]
9b9af0d1 517
518 str lr, [fp, #LO_saved_lr]
b96d3df7 519 blx r3
9b9af0d1 520 ldr lr, [fp, #LO_saved_lr]
b96d3df7 521
9b9af0d1 522 memhandler_post
687b4580 523 bx lr
b96d3df7 524
5c6457c3 525FUNCTION(jump_handle_swl):
b96d3df7 526 /* r0 = address, r1 = data, r2 = cycles */
b1f89e6f 527 ldr r3, [fp, #LO_mem_wtab]
b96d3df7 528 mov r12,r0,lsr #12
529 ldr r3, [r3, r12, lsl #2]
530 lsls r3, #1
de6dbc52 531 bcs jump_handle_swx_interp
b96d3df7 532 add r3, r0, r3
533 mov r0, r2
534 tst r3, #2
535 beq 101f
536 tst r3, #1
537 beq 2f
5383:
539 str r1, [r3, #-3]
540 bx lr
5412:
542 lsr r2, r1, #8
543 lsr r1, #24
544 strh r2, [r3, #-2]
545 strb r1, [r3]
546 bx lr
547101:
548 tst r3, #1
549 lsrne r1, #16 @ 1
550 lsreq r12, r1, #24 @ 0
b861c0a9 551 strhne r1, [r3, #-1]
552 strbeq r12, [r3]
b96d3df7 553 bx lr
b96d3df7 554
5c6457c3 555FUNCTION(jump_handle_swr):
b96d3df7 556 /* r0 = address, r1 = data, r2 = cycles */
b1f89e6f 557 ldr r3, [fp, #LO_mem_wtab]
b96d3df7 558 mov r12,r0,lsr #12
559 ldr r3, [r3, r12, lsl #2]
560 lsls r3, #1
de6dbc52 561 bcs jump_handle_swx_interp
b96d3df7 562 add r3, r0, r3
563 and r12,r3, #3
564 mov r0, r2
565 cmp r12,#2
b861c0a9 566 strbgt r1, [r3] @ 3
567 strheq r1, [r3] @ 2
b96d3df7 568 cmp r12,#1
569 strlt r1, [r3] @ 0
570 bxne lr
571 lsr r2, r1, #8 @ 1
572 strb r1, [r3]
573 strh r2, [r3, #1]
574 bx lr
b96d3df7 575
de6dbc52 576jump_handle_swx_interp: /* almost never happens */
577 ldr r3, [fp, #LO_last_count]
578 add r0, fp, #LO_psxRegs
579 add r2, r3, r2
580 str r2, [fp, #LO_cycle] /* PCSX cycles */
581 bl execI
582 b jump_to_new_pc
b96d3df7 583
b1be1eee 584.macro rcntx_read_mode0 num
585 /* r0 = address, r2 = cycles */
b1f89e6f 586 ldr r3, [fp, #LO_rcnts+6*4+7*4*\num] @ cycleStart
b1be1eee 587 mov r0, r2, lsl #16
b861c0a9 588 sub r0, r0, r3, lsl #16
b1be1eee 589 lsr r0, #16
590 bx lr
591.endm
592
5c6457c3 593FUNCTION(rcnt0_read_count_m0):
b1be1eee 594 rcntx_read_mode0 0
595
5c6457c3 596FUNCTION(rcnt1_read_count_m0):
b1be1eee 597 rcntx_read_mode0 1
598
5c6457c3 599FUNCTION(rcnt2_read_count_m0):
b1be1eee 600 rcntx_read_mode0 2
601
5c6457c3 602FUNCTION(rcnt0_read_count_m1):
b1be1eee 603 /* r0 = address, r2 = cycles */
b1f89e6f 604 ldr r3, [fp, #LO_rcnts+6*4+7*4*0] @ cycleStart
b1be1eee 605 mov_16 r1, 0x3334
606 sub r2, r2, r3
607 mul r0, r1, r2 @ /= 5
608 lsr r0, #16
609 bx lr
610
5c6457c3 611FUNCTION(rcnt1_read_count_m1):
b1be1eee 612 /* r0 = address, r2 = cycles */
b1f89e6f 613 ldr r3, [fp, #LO_rcnts+6*4+7*4*1]
b1be1eee 614 mov_24 r1, 0x1e6cde
615 sub r2, r2, r3
616 umull r3, r0, r1, r2 @ ~ /= hsync_cycles, max ~0x1e6cdd
617 bx lr
618
5c6457c3 619FUNCTION(rcnt2_read_count_m1):
b1be1eee 620 /* r0 = address, r2 = cycles */
b1f89e6f 621 ldr r3, [fp, #LO_rcnts+6*4+7*4*2]
b1be1eee 622 mov r0, r2, lsl #16-3
b861c0a9 623 sub r0, r0, r3, lsl #16-3
b1be1eee 624 lsr r0, #16 @ /= 8
625 bx lr
626
81dbbf4c 627FUNCTION(call_gteStall):
628 /* r0 = op_cycles, r1 = cycles */
629 ldr r2, [fp, #LO_last_count]
630 str lr, [fp, #LO_saved_lr]
631 add r1, r1, r2
632 str r1, [fp, #LO_cycle]
633 add r1, fp, #LO_psxRegs
634 bl gteCheckStallRaw
635 ldr lr, [fp, #LO_saved_lr]
636 add r10, r10, r0
637 bx lr
638
cdc2da64 639#ifdef HAVE_ARMV6
640
641FUNCTION(get_reg):
642 ldr r12, [r0]
643 and r1, r1, #0xff
644 ldr r2, [r0, #4]
645 orr r1, r1, r1, lsl #8
646 ldr r3, [r0, #8]
647 orr r1, r1, r1, lsl #16 @ searched char in every byte
648 ldrb r0, [r0, #12] @ last byte
649 eor r12, r12, r1
650 eor r2, r2, r1
651 eor r3, r3, r1
652 cmp r0, r1, lsr #24
653 mov r0, #12
654 mvn r1, #0 @ r1=~0
655 bxeq lr
656 orr r3, r3, #0xff000000 @ EXCLUDE_REG
657 uadd8 r0, r12, r1 @ add and set GE bits when not 0 (match)
658 mov r12, #0
659 sel r0, r12, r1 @ 0 if no match, else ff in some byte
660 uadd8 r2, r2, r1
661 sel r2, r12, r1
662 uadd8 r3, r3, r1
663 sel r3, r12, r1
664 mov r12, #3
665 clz r0, r0 @ 0, 8, 16, 24 or 32
666 clz r2, r2
667 clz r3, r3
668 sub r0, r12, r0, lsr #3 @ 3, 2, 1, 0 or -1
669 sub r2, r12, r2, lsr #3
670 sub r3, r12, r3, lsr #3
671 orr r2, r2, #4
672 orr r3, r3, #8
673 and r0, r0, r2
674 and r0, r0, r3
675 bx lr
676
677#endif /* HAVE_ARMV6 */
678
7e605697 679@ vim:filetype=armasm