1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * linkage_arm.s for PCSX *
3 * Copyright (C) 2009-2011 Ari64 *
4 * Copyright (C) 2010-2013 GraÅžvydas "notaz" Ignotas *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program; if not, write to the *
18 * Free Software Foundation, Inc., *
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
20 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
22 #include "arm_features.h"
23 #include "new_dynarec_config.h"
24 #include "linkage_offsets.h"
28 #define dynarec_local ESYM(dynarec_local)
29 #define ndrc_patch_link ESYM(ndrc_patch_link)
30 #define ndrc_get_addr_ht ESYM(ndrc_get_addr_ht)
31 #define ndrc_get_addr_ht_param ESYM(ndrc_get_addr_ht_param)
32 #define ndrc_write_invalidate_one ESYM(ndrc_write_invalidate_one)
33 #define gen_interupt ESYM(gen_interupt)
34 #define psxException ESYM(psxException)
35 #define execI ESYM(execI)
38 /* make mini_ht reachable with a single armv4 insn */
39 #if (LO_mini_ht & ~0xff0)
40 #error misligned mini_ht
46 .type dynarec_local, %object
47 .size dynarec_local, LO_dynarec_local_size
49 .space LO_dynarec_local_size
51 #define DRC_VAR_(name, vname, size_) \
52 vname = dynarec_local + LO_##name; \
54 .type vname, %object; \
57 #define DRC_VAR(name, size_) \
58 DRC_VAR_(name, ESYM(name), size_)
60 @DRC_VAR(next_interupt, 4)
61 DRC_VAR(cycle_count, 4)
62 DRC_VAR(last_count, 4)
66 DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
71 DRC_VAR(reg_cop2d, 128)
72 DRC_VAR(reg_cop2c, 128)
75 @DRC_VAR(interrupt, 4)
76 @DRC_VAR(intCycle, 256)
79 DRC_VAR(inv_code_start, 4)
80 DRC_VAR(inv_code_end, 4)
84 DRC_VAR(zeromem_ptr, 4)
86 DRC_VAR(scratch_buf_ptr, 4)
87 DRC_VAR(ram_offset, 4)
88 DRC_VAR(hash_table_ptr, 4)
103 .macro load_varadr reg var
104 #if defined(HAVE_ARMV7) && defined(TEXRELS_FORBIDDEN)
105 movw \reg, #:lower16:(\var-(1678f+8))
106 movt \reg, #:upper16:(\var-(1678f+8))
109 #elif defined(HAVE_ARMV7) && !defined(__PIC__)
110 movw \reg, #:lower16:\var
111 movt \reg, #:upper16:\var
117 .macro load_varadr_ext reg var
118 #if defined(HAVE_ARMV7) && defined(TEXRELS_FORBIDDEN)
119 movw \reg, #:lower16:(ptr_\var-(1678f+8))
120 movt \reg, #:upper16:(ptr_\var-(1678f+8))
124 load_varadr \reg \var
128 .macro mov_16 reg imm
132 mov \reg, #(\imm & 0x00ff)
133 orr \reg, #(\imm & 0xff00)
137 .macro mov_24 reg imm
139 movw \reg, #(\imm & 0xffff)
140 movt \reg, #(\imm >> 16)
142 mov \reg, #(\imm & 0x0000ff)
143 orr \reg, #(\imm & 0x00ff00)
144 orr \reg, #(\imm & 0xff0000)
148 FUNCTION(dyna_linker):
149 /* r0 = virtual target address */
150 /* r1 = pointer to an instruction to patch */
157 /* must not compile - that might expire the caller block */
158 ldr r0, [fp, #LO_hash_table_ptr]
160 mov r2, #0 /* ndrc_compile_mode=ndrc_cm_no_compile */
161 bl ndrc_get_addr_ht_param
165 add r6, r5, r6, asr #6 /* old target */
167 bxeq r0 /* Stale i-cache */
177 ldr r1, [fp, #LO_hash_table_ptr]
180 .size dyna_linker, .-dyna_linker
183 FUNCTION(jump_vaddr_r1):
186 .size jump_vaddr_r1, .-jump_vaddr_r1
187 FUNCTION(jump_vaddr_r2):
190 .size jump_vaddr_r2, .-jump_vaddr_r2
191 FUNCTION(jump_vaddr_r3):
194 .size jump_vaddr_r3, .-jump_vaddr_r3
195 FUNCTION(jump_vaddr_r4):
198 .size jump_vaddr_r4, .-jump_vaddr_r4
199 FUNCTION(jump_vaddr_r5):
202 .size jump_vaddr_r5, .-jump_vaddr_r5
203 FUNCTION(jump_vaddr_r6):
206 .size jump_vaddr_r6, .-jump_vaddr_r6
207 FUNCTION(jump_vaddr_r8):
210 .size jump_vaddr_r8, .-jump_vaddr_r8
211 FUNCTION(jump_vaddr_r9):
214 .size jump_vaddr_r9, .-jump_vaddr_r9
215 FUNCTION(jump_vaddr_r10):
218 .size jump_vaddr_r10, .-jump_vaddr_r10
219 FUNCTION(jump_vaddr_r12):
222 .size jump_vaddr_r12, .-jump_vaddr_r12
223 FUNCTION(jump_vaddr_r7):
225 .size jump_vaddr_r7, .-jump_vaddr_r7
226 FUNCTION(jump_vaddr_r0):
227 ldr r1, [fp, #LO_hash_table_ptr]
230 .size jump_vaddr_r0, .-jump_vaddr_r0
233 FUNCTION(cc_interrupt):
234 ldr r0, [fp, #LO_last_count]
235 ldr r9, [fp, #LO_pcaddr]
237 str r1, [fp, #LO_cycle] /* PCSX cycles */
240 add r0, fp, #LO_reg_cop0 /* CP0 */
243 ldr r10, [fp, #LO_cycle]
244 ldr r0, [fp, #LO_pcaddr]
245 ldr r1, [fp, #LO_next_interupt]
246 ldrb r2, [fp, #LO_stop]
247 str r1, [fp, #LO_last_count]
250 ldmfdne sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
253 ldr r1, [fp, #LO_hash_table_ptr]
256 .size cc_interrupt, .-cc_interrupt
259 FUNCTION(jump_addrerror_ds): /* R3000E_AdEL / R3000E_AdES in r0 */
260 str r1, [fp, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
263 FUNCTION(jump_addrerror):
264 str r1, [fp, #(LO_psxRegs + (34+8)*4)] /* BadVaddr */
267 FUNCTION(jump_overflow_ds):
268 mov r0, #(12<<2) /* R3000E_Ov */
271 FUNCTION(jump_overflow):
275 FUNCTION(jump_break_ds):
276 mov r0, #(9<<2) /* R3000E_Bp */
279 FUNCTION(jump_break):
283 FUNCTION(jump_syscall_ds):
284 mov r0, #(8<<2) /* R3000E_Syscall */
287 FUNCTION(jump_syscall):
292 ldr r3, [fp, #LO_last_count]
293 str r2, [fp, #LO_pcaddr]
295 str r10, [fp, #LO_cycle] /* PCSX cycles */
296 add r2, fp, #LO_reg_cop0 /* CP0 */
299 /* note: psxException might do recursive recompiler call from it's HLE code,
300 * so be ready for this */
301 FUNCTION(jump_to_new_pc):
302 ldrb r2, [fp, #LO_stop]
303 ldr r1, [fp, #LO_next_interupt]
304 ldr r10, [fp, #LO_cycle]
305 ldr r0, [fp, #LO_pcaddr]
307 str r1, [fp, #LO_last_count]
310 ldr r1, [fp, #LO_hash_table_ptr]
313 .size jump_to_new_pc, .-jump_to_new_pc
316 FUNCTION(new_dyna_leave):
317 ldr r0, [fp, #LO_last_count]
319 str r10, [fp, #LO_cycle]
320 ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
321 .size new_dyna_leave, .-new_dyna_leave
324 FUNCTION(invalidate_addr_r0):
325 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
326 b invalidate_addr_call
327 .size invalidate_addr_r0, .-invalidate_addr_r0
329 FUNCTION(invalidate_addr_r1):
330 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
332 b invalidate_addr_call
333 .size invalidate_addr_r1, .-invalidate_addr_r1
335 FUNCTION(invalidate_addr_r2):
336 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
338 b invalidate_addr_call
339 .size invalidate_addr_r2, .-invalidate_addr_r2
341 FUNCTION(invalidate_addr_r3):
342 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
344 b invalidate_addr_call
345 .size invalidate_addr_r3, .-invalidate_addr_r3
347 FUNCTION(invalidate_addr_r4):
348 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
350 b invalidate_addr_call
351 .size invalidate_addr_r4, .-invalidate_addr_r4
353 FUNCTION(invalidate_addr_r5):
354 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
356 b invalidate_addr_call
357 .size invalidate_addr_r5, .-invalidate_addr_r5
359 FUNCTION(invalidate_addr_r6):
360 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
362 b invalidate_addr_call
363 .size invalidate_addr_r6, .-invalidate_addr_r6
365 FUNCTION(invalidate_addr_r7):
366 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
368 b invalidate_addr_call
369 .size invalidate_addr_r7, .-invalidate_addr_r7
371 FUNCTION(invalidate_addr_r8):
372 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
374 b invalidate_addr_call
375 .size invalidate_addr_r8, .-invalidate_addr_r8
377 FUNCTION(invalidate_addr_r9):
378 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
380 b invalidate_addr_call
381 .size invalidate_addr_r9, .-invalidate_addr_r9
383 FUNCTION(invalidate_addr_r10):
384 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
386 b invalidate_addr_call
387 .size invalidate_addr_r10, .-invalidate_addr_r10
389 FUNCTION(invalidate_addr_r12):
390 stmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, lr}
392 .size invalidate_addr_r12, .-invalidate_addr_r12
394 invalidate_addr_call:
395 ldr r12, [fp, #LO_inv_code_start]
396 ldr lr, [fp, #LO_inv_code_end]
399 blcc ndrc_write_invalidate_one
400 ldmia fp, {r0, r1, r2, r3, EXTRA_UNSAVED_REGS r12, pc}
401 .size invalidate_addr_call, .-invalidate_addr_call
404 FUNCTION(new_dyna_start_at):
405 /* ip is stored to conform EABI alignment */
406 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
407 mov fp, r0 /* dynarec_local */
409 b new_dyna_start_at_e
411 FUNCTION(new_dyna_start):
412 stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
413 mov fp, r0 /* dynarec_local */
414 ldr r0, [fp, #LO_pcaddr]
415 ldr r1, [fp, #LO_hash_table_ptr]
418 ldr r1, [fp, #LO_next_interupt]
419 ldr r10, [fp, #LO_cycle]
420 str r1, [fp, #LO_last_count]
423 .size new_dyna_start, .-new_dyna_start
425 /* --------------------------------------- */
427 .macro memhandler_post
428 /* r2 = cycles_out, r3 = tmp */
429 ldr r3, [fp, #LO_next_interupt]
430 ldr r2, [fp, #LO_cycle] @ memhandlers can modify cc, like dma
431 str r3, [fp, #LO_last_count]
437 .macro pcsx_read_mem_part readop tab_shift
438 /* r0 = address, r1 = handler_tab, r2 = cycles */
440 lsr r3, #(20+\tab_shift)
441 ldr r12, [fp, #LO_last_count]
442 ldr r1, [r1, r3, lsl #2]
449 \readop r0, [r1, r3, lsl #\tab_shift]
453 str r12, [fp, #LO_cycle]
456 FUNCTION(jump_handler_read8):
457 add r1, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
458 pcsx_read_mem_part ldrbcc, 0
459 bx r1 @ addr, unused, cycles
461 FUNCTION(jump_handler_read16):
462 add r1, #0x1000/4*4 @ shift to r16 part
463 pcsx_read_mem_part ldrhcc, 1
464 bx r1 @ addr, unused, cycles
466 FUNCTION(jump_handler_read32):
467 pcsx_read_mem_part ldrcc, 2
468 bx r1 @ addr, unused, cycles
470 str lr, [fp, #LO_saved_lr]
472 ldr lr, [fp, #LO_saved_lr]
477 .macro pcsx_write_mem wrtop tab_shift
478 /* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
480 lsr r12, #(20+\tab_shift)
481 ldr r3, [r3, r12, lsl #2]
482 str r0, [fp, #LO_address] @ some handlers still need it..
488 \wrtop r1, [r3, r12, lsl #\tab_shift]
491 ldr r12, [fp, #LO_last_count]
494 str r2, [fp, #LO_cycle]
496 str lr, [fp, #LO_saved_lr]
498 ldr lr, [fp, #LO_saved_lr]
504 FUNCTION(jump_handler_write8):
505 add r3, #0x1000/4*4 + 0x1000/2*4 @ shift to r8 part
506 pcsx_write_mem strbcc, 0
508 FUNCTION(jump_handler_write16):
509 add r3, #0x1000/4*4 @ shift to r16 part
510 pcsx_write_mem strhcc, 1
512 FUNCTION(jump_handler_write32):
513 pcsx_write_mem strcc, 2
515 FUNCTION(jump_handler_write_h):
516 /* r0 = address, r1 = data, r2 = cycles, r3 = handler */
517 ldr r12, [fp, #LO_last_count]
518 str r0, [fp, #LO_address] @ some handlers still need it..
521 str r2, [fp, #LO_cycle]
523 str lr, [fp, #LO_saved_lr]
525 ldr lr, [fp, #LO_saved_lr]
530 FUNCTION(jump_handle_swl):
531 /* r0 = address, r1 = data, r2 = cycles */
532 ldr r3, [fp, #LO_mem_wtab]
534 ldr r3, [r3, r12, lsl #2]
536 bcs jump_handle_swx_interp
555 lsreq r12, r1, #24 @ 0
560 FUNCTION(jump_handle_swr):
561 /* r0 = address, r1 = data, r2 = cycles */
562 ldr r3, [fp, #LO_mem_wtab]
564 ldr r3, [r3, r12, lsl #2]
566 bcs jump_handle_swx_interp
581 jump_handle_swx_interp: /* almost never happens */
582 ldr r3, [fp, #LO_last_count]
583 add r0, fp, #LO_psxRegs
585 str r2, [fp, #LO_cycle] /* PCSX cycles */
589 .macro rcntx_read_mode0 num
590 /* r0 = address, r2 = cycles */
591 ldr r3, [fp, #LO_rcnts+6*4+7*4*\num] @ cycleStart
593 sub r0, r0, r3, lsl #16
598 FUNCTION(rcnt0_read_count_m0):
601 FUNCTION(rcnt1_read_count_m0):
604 FUNCTION(rcnt2_read_count_m0):
607 FUNCTION(rcnt0_read_count_m1):
608 /* r0 = address, r2 = cycles */
609 ldr r3, [fp, #LO_rcnts+6*4+7*4*0] @ cycleStart
612 mul r0, r1, r2 @ /= 5
616 FUNCTION(rcnt1_read_count_m1):
617 /* r0 = address, r2 = cycles */
618 ldr r3, [fp, #LO_rcnts+6*4+7*4*1]
621 umull r3, r0, r1, r2 @ ~ /= hsync_cycles, max ~0x1e6cdd
624 FUNCTION(rcnt2_read_count_m1):
625 /* r0 = address, r2 = cycles */
626 ldr r3, [fp, #LO_rcnts+6*4+7*4*2]
627 mov r0, r2, lsl #16-3
628 sub r0, r0, r3, lsl #16-3
638 orr r1, r1, r1, lsl #8
640 orr r1, r1, r1, lsl #16 @ searched char in every byte
641 ldrb r0, [r0, #12] @ last byte
649 orr r3, r3, #0xff000000 @ EXCLUDE_REG
650 uadd8 r0, r12, r1 @ add and set GE bits when not 0 (match)
652 sel r0, r12, r1 @ 0 if no match, else ff in some byte
658 clz r0, r0 @ 0, 8, 16, 24 or 32
661 sub r0, r12, r0, lsr #3 @ 3, 2, 1, 0 or -1
662 sub r2, r12, r2, lsr #3
663 sub r3, r12, r3, lsr #3
670 #endif /* HAVE_ARMV6 */
672 @ vim:filetype=armasm