1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2013 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "linkage_offsets.h"
28 #define dynarec_local ESYM(dynarec_local)
29 #define ndrc_add_jump_out ESYM(ndrc_add_jump_out)
30 #define ndrc_try_restore_block ESYM(ndrc_try_restore_block)
31 #define ndrc_get_addr_ht ESYM(ndrc_get_addr_ht)
32 #define ndrc_get_addr_ht_param ESYM(ndrc_get_addr_ht_param)
33 #define ndrc_write_invalidate_one ESYM(ndrc_write_invalidate_one)
34 #define gen_interupt ESYM(gen_interupt)
35 #define gteCheckStallRaw ESYM(gteCheckStallRaw)
36 #define psxException ESYM(psxException)
42 .type dynarec_local, %object
43 .size dynarec_local, LO_dynarec_local_size
45 .space LO_dynarec_local_size
47 #define DRC_VAR_(name, vname, size_) \
48 vname = dynarec_local + LO_##name; \
50 .type vname, %object; \
53 #define DRC_VAR(name, size_) \
54 DRC_VAR_(name, ESYM(name), size_)
56 DRC_VAR(next_interupt, 4)
57 DRC_VAR(cycle_count, 4)
58 DRC_VAR(last_count, 4)
59 DRC_VAR(pending_exception, 4)
61 DRC_VAR(branch_target, 4)
64 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
70 DRC_VAR(reg_cop0, 128)
71 DRC_VAR(reg_cop2d, 128)
72 DRC_VAR(reg_cop2c, 128)
76 @DRC_VAR(interrupt, 4)
77 @DRC_VAR(intCycle, 256)
80 DRC_VAR(inv_code_start, 4)
81 DRC_VAR(inv_code_end, 4)
85 DRC_VAR(zeromem_ptr, 4)
87 DRC_VAR(scratch_buf_ptr, 4)
88 DRC_VAR(ram_offset, 4)
103 .macro load_varadr reg var
104 #if defined(HAVE_ARMV7) && defined(TEXRELS_FORBIDDEN)
105 movw \reg, #:lower16:(\var-(1678f+8))
106 movt \reg, #:upper16:(\var-(1678f+8))
109 #elif defined(HAVE_ARMV7) && !defined(__PIC__)
110 movw \reg, #:lower16:\var
111 movt \reg, #:upper16:\var
117 .macro load_varadr_ext reg var
118 #if defined(HAVE_ARMV7) && defined(TEXRELS_FORBIDDEN)
119 movw \reg, #:lower16:(ptr_\var-(1678f+8))
120 movt \reg, #:upper16:(ptr_\var-(1678f+8))
124 load_varadr \reg \var
128 .macro mov_16 reg imm
132 mov \reg, #(\imm & 0x00ff)
133 orr \reg, #(\imm & 0xff00)
137 .macro mov_24 reg imm
139 movw \reg, #(\imm & 0xffff)
140 movt \reg, #(\imm >> 16)
142 mov \reg, #(\imm & 0x0000ff)
143 orr \reg, #(\imm & 0x00ff00)
144 orr \reg, #(\imm & 0xff0000)
148 FUNCTION(dyna_linker):
149 /* r0 = virtual target address */
150 /* r1 = pointer to an instruction to patch */
151 #ifndef NO_WRITE_EXEC
157 /* must not compile - that might expire the caller block */
159 bl ndrc_get_addr_ht_param
163 add r6, r5, r6, asr #6 /* old target */
165 moveq pc, r0 /* Stale i-cache */
171 and r1, r7, #0xff000000
174 add r1, r1, r2, lsr #8
180 /* XXX: should be able to do better than this... */
184 .size dyna_linker, .-dyna_linker
187 FUNCTION(jump_vaddr_r1):
190 .size jump_vaddr_r1, .-jump_vaddr_r1
191 FUNCTION(jump_vaddr_r2):
194 .size jump_vaddr_r2, .-jump_vaddr_r2
195 FUNCTION(jump_vaddr_r3):
198 .size jump_vaddr_r3, .-jump_vaddr_r3
199 FUNCTION(jump_vaddr_r4):
202 .size jump_vaddr_r4, .-jump_vaddr_r4
203 FUNCTION(jump_vaddr_r5):
206 .size jump_vaddr_r5, .-jump_vaddr_r5
207 FUNCTION(jump_vaddr_r6):
210 .size jump_vaddr_r6, .-jump_vaddr_r6
211 FUNCTION(jump_vaddr_r8):
214 .size jump_vaddr_r8, .-jump_vaddr_r8
215 FUNCTION(jump_vaddr_r9):
218 .size jump_vaddr_r9, .-jump_vaddr_r9
219 FUNCTION(jump_vaddr_r10):
222 .size jump_vaddr_r10, .-jump_vaddr_r10
223 FUNCTION(jump_vaddr_r12):
226 .size jump_vaddr_r12, .-jump_vaddr_r12
227 FUNCTION(jump_vaddr_r7):
229 .size jump_vaddr_r7, .-jump_vaddr_r7
230 FUNCTION(jump_vaddr_r0):
233 .size jump_vaddr_r0, .-jump_vaddr_r0
236 FUNCTION(cc_interrupt):
237 ldr r0, [fp, #LO_last_count]
240 str r1, [fp, #LO_pending_exception]
241 str r10, [fp, #LO_cycle] /* PCSX cycles */
242 @@ str r10, [fp, #LO_reg_cop0+36] /* Count - not on PSX */
245 add r0, fp, #(LO_psxRegs + 34*4) /* CP0 */
248 ldr r10, [fp, #LO_cycle]
249 ldr r0, [fp, #LO_next_interupt]
250 ldr r1, [fp, #LO_pending_exception]
251 ldr r2, [fp, #LO_stop]
252 str r0, [fp, #LO_last_count]
255 ldmfdne sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
258 ldr r0, [fp, #LO_pcaddr]
261 .size cc_interrupt, .-cc_interrupt
264 FUNCTION(fp_exception):
267 ldr r1, [fp, #LO_reg_cop0+48] /* Status */
269 str r0, [fp, #LO_reg_cop0+56] /* EPC */
272 str r1, [fp, #LO_reg_cop0+48] /* Status */
273 str r2, [fp, #LO_reg_cop0+52] /* Cause */
277 .size fp_exception, .-fp_exception
279 FUNCTION(fp_exception_ds):
280 mov r2, #0x90000000 /* Set high bit if delay slot */
282 .size fp_exception_ds, .-fp_exception_ds
285 FUNCTION(jump_break_ds):
289 FUNCTION(jump_break):
293 FUNCTION(jump_syscall_ds):
297 FUNCTION(jump_syscall):
302 ldr r3, [fp, #LO_last_count]
303 str r2, [fp, #LO_pcaddr]
305 str r10, [fp, #LO_cycle] /* PCSX cycles */
306 add r2, fp, #(LO_psxRegs + 34*4) /* CP0 */
309 /* note: psxException might do recursive recompiler call from it's HLE code,
310 * so be ready for this */
311 FUNCTION(jump_to_new_pc):
312 ldr r1, [fp, #LO_next_interupt]
313 ldr r10, [fp, #LO_cycle]
314 ldr r0, [fp, #LO_pcaddr]
316 str r1, [fp, #LO_last_count]
319 .size jump_to_new_pc, .-jump_to_new_pc
322 FUNCTION(new_dyna_leave):
323 ldr r0, [fp, #LO_last_count]
326 str r10, [fp, #LO_cycle]
327 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
328 .size new_dyna_leave, .-new_dyna_leave
331 FUNCTION(invalidate_addr_r0):
332 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
333 b invalidate_addr_call
334 .size invalidate_addr_r0, .-invalidate_addr_r0
336 FUNCTION(invalidate_addr_r1):
337 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
339 b invalidate_addr_call
340 .size invalidate_addr_r1, .-invalidate_addr_r1
342 FUNCTION(invalidate_addr_r2):
343 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
345 b invalidate_addr_call
346 .size invalidate_addr_r2, .-invalidate_addr_r2
348 FUNCTION(invalidate_addr_r3):
349 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
351 b invalidate_addr_call
352 .size invalidate_addr_r3, .-invalidate_addr_r3
354 FUNCTION(invalidate_addr_r4):
355 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
357 b invalidate_addr_call
358 .size invalidate_addr_r4, .-invalidate_addr_r4
360 FUNCTION(invalidate_addr_r5):
361 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
363 b invalidate_addr_call
364 .size invalidate_addr_r5, .-invalidate_addr_r5
366 FUNCTION(invalidate_addr_r6):
367 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
369 b invalidate_addr_call
370 .size invalidate_addr_r6, .-invalidate_addr_r6
372 FUNCTION(invalidate_addr_r7):
373 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
375 b invalidate_addr_call
376 .size invalidate_addr_r7, .-invalidate_addr_r7
378 FUNCTION(invalidate_addr_r8):
379 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
381 b invalidate_addr_call
382 .size invalidate_addr_r8, .-invalidate_addr_r8
384 FUNCTION(invalidate_addr_r9):
385 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
387 b invalidate_addr_call
388 .size invalidate_addr_r9, .-invalidate_addr_r9
390 FUNCTION(invalidate_addr_r10):
391 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
393 b invalidate_addr_call
394 .size invalidate_addr_r10, .-invalidate_addr_r10
396 FUNCTION(invalidate_addr_r12):
397 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
399 .size invalidate_addr_r12, .-invalidate_addr_r12
401 invalidate_addr_call:
402 ldr r12, [fp, #LO_inv_code_start]
403 ldr lr, [fp, #LO_inv_code_end]
406 blcc ndrc_write_invalidate_one
407 ldmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, pc}
408 .size invalidate_addr_call, .-invalidate_addr_call
411 FUNCTION(new_dyna_start):
412 /* ip is stored to conform EABI alignment */
413 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
414 mov fp, r0 /* dynarec_local */
415 ldr r0, [fp, #LO_pcaddr]
417 ldr r1, [fp, #LO_next_interupt]
418 ldr r10, [fp, #LO_cycle]
419 str r1, [fp, #LO_last_count]
422 .size new_dyna_start, .-new_dyna_start
424 /* --------------------------------------- */
428 .macro pcsx_read_mem readop tab_shift
429 /* r0 = address, r1 = handler_tab, r2 = cycles */
431 lsr r3, #(20+\tab_shift)
432 ldr r12, [fp, #LO_last_count]
433 ldr r1, [r1, r3, lsl #2]
440 \readop r0, [r1, r3, lsl #\tab_shift]
443 str r2, [fp, #LO_cycle]
447 FUNCTION(jump_handler_read8):
448 add r1, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
449 pcsx_read_mem ldrbcc, 0
451 FUNCTION(jump_handler_read16):
452 add r1, #0x1000/4*4 @ shift to r16 part
453 pcsx_read_mem ldrhcc, 1
455 FUNCTION(jump_handler_read32):
456 pcsx_read_mem ldrcc, 2
459 .macro memhandler_post
460 ldr r0, [fp, #LO_next_interupt]
461 ldr r2, [fp, #LO_cycle] @ memhandlers can modify cc, like dma
462 str r0, [fp, #LO_last_count]
466 .macro pcsx_write_mem wrtop tab_shift
467 /* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
469 lsr r12, #(20+\tab_shift)
470 ldr r3, [r3, r12, lsl #2]
471 str r0, [fp, #LO_address] @ some handlers still need it..
473 mov r0, r2 @ cycle return in case of direct store
478 \wrtop r1, [r3, r12, lsl #\tab_shift]
481 ldr r12, [fp, #LO_last_count]
484 str r2, [fp, #LO_cycle]
486 str lr, [fp, #LO_saved_lr]
488 ldr lr, [fp, #LO_saved_lr]
494 FUNCTION(jump_handler_write8):
495 add r3, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
496 pcsx_write_mem strbcc, 0
498 FUNCTION(jump_handler_write16):
499 add r3, #0x1000/4*4 @ shift to r16 part
500 pcsx_write_mem strhcc, 1
502 FUNCTION(jump_handler_write32):
503 pcsx_write_mem strcc, 2
505 FUNCTION(jump_handler_write_h):
506 /* r0 = address, r1 = data, r2 = cycles, r3 = handler */
507 ldr r12, [fp, #LO_last_count]
508 str r0, [fp, #LO_address] @ some handlers still need it..
511 str r2, [fp, #LO_cycle]
513 str lr, [fp, #LO_saved_lr]
515 ldr lr, [fp, #LO_saved_lr]
520 FUNCTION(jump_handle_swl):
521 /* r0 = address, r1 = data, r2 = cycles */
522 ldr r3, [fp, #LO_mem_wtab]
524 ldr r3, [r3, r12, lsl #2]
545 lsreq r12, r1, #24 @ 0
555 FUNCTION(jump_handle_swr):
556 /* r0 = address, r1 = data, r2 = cycles */
557 ldr r3, [fp, #LO_mem_wtab]
559 ldr r3, [r3, r12, lsl #2]
581 .macro rcntx_read_mode0 num
582 /* r0 = address, r2 = cycles */
583 ldr r3, [fp, #LO_rcnts+6*4+7*4*\num] @ cycleStart
585 sub r0, r0, r3, lsl #16
590 FUNCTION(rcnt0_read_count_m0):
593 FUNCTION(rcnt1_read_count_m0):
596 FUNCTION(rcnt2_read_count_m0):
599 FUNCTION(rcnt0_read_count_m1):
600 /* r0 = address, r2 = cycles */
601 ldr r3, [fp, #LO_rcnts+6*4+7*4*0] @ cycleStart
604 mul r0, r1, r2 @ /= 5
608 FUNCTION(rcnt1_read_count_m1):
609 /* r0 = address, r2 = cycles */
610 ldr r3, [fp, #LO_rcnts+6*4+7*4*1]
613 umull r3, r0, r1, r2 @ ~ /= hsync_cycles, max ~0x1e6cdd
616 FUNCTION(rcnt2_read_count_m1):
617 /* r0 = address, r2 = cycles */
618 ldr r3, [fp, #LO_rcnts+6*4+7*4*2]
619 mov r0, r2, lsl #16-3
620 sub r0, r0, r3, lsl #16-3
624 FUNCTION(call_gteStall):
625 /* r0 = op_cycles, r1 = cycles */
626 ldr r2, [fp, #LO_last_count]
627 str lr, [fp, #LO_saved_lr]
629 str r1, [fp, #LO_cycle]
630 add r1, fp, #LO_psxRegs
632 ldr lr, [fp, #LO_saved_lr]
642 orr r1, r1, r1, lsl #8
644 orr r1, r1, r1, lsl #16 @ searched char in every byte
645 ldrb r0, [r0, #12] @ last byte
653 orr r3, r3, #0xff000000 @ EXCLUDE_REG
654 uadd8 r0, r12, r1 @ add and set GE bits when not 0 (match)
656 sel r0, r12, r1 @ 0 if no match, else ff in some byte
662 clz r0, r0 @ 0, 8, 16, 24 or 32
665 sub r0, r12, r0, lsr #3 @ 3, 2, 1, 0 or -1
666 sub r2, r12, r2, lsr #3
667 sub r3, r12, r3, lsr #3
674 #endif /* HAVE_ARMV6 */
676 @ vim:filetype=armasm