#ifdef __MACH__
#define dynarec_local ESYM(dynarec_local)
-#define add_link ESYM(add_link)
+#define add_jump_out ESYM(add_jump_out)
#define new_recompile_block ESYM(new_recompile_block)
#define get_addr ESYM(get_addr)
#define get_addr_ht ESYM(get_addr_ht)
#define clean_blocks ESYM(clean_blocks)
#define gen_interupt ESYM(gen_interupt)
-#define psxException ESYM(psxException)
-#define execI ESYM(execI)
#define invalidate_addr ESYM(invalidate_addr)
+#define gteCheckStallRaw ESYM(gteCheckStallRaw)
+#define psxException ESYM(psxException)
#endif
.bss
DRC_VAR(stop, 4)
DRC_VAR(branch_target, 4)
DRC_VAR(address, 4)
-@DRC_VAR(align0, 4) /* unused/alignment */
+DRC_VAR(hack_addr, 4)
DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
/* psxRegs */
DRC_VAR(zeromem_ptr, 4)
DRC_VAR(invc_ptr, 4)
DRC_VAR(scratch_buf_ptr, 4)
-@DRC_VAR(align1, 8) /* unused/alignment */
+DRC_VAR(ram_offset, 4)
DRC_VAR(mini_ht, 256)
DRC_VAR(restore_candidate, 512)
orrcs r2, r6, #2048
ldr r5, [r3, r2, lsl #2]
lsl r12, r12, #8
- add r6, r1, r12, asr #6
+ add r6, r1, r12, asr #6 /* old target */
mov r8, #0
/* jump_in lookup */
1:
mov r5, r1
mov r1, r6
- bl add_link
+ bl add_jump_out
sub r2, r8, r5
and r1, r7, #0xff000000
lsl r2, r2, #6
beq dyna_linker
/* pagefault */
mov r1, r0
- mov r2, #8
+ mov r2, #(4<<2) /* Address error (fetch) */
.size dyna_linker, .-dyna_linker
FUNCTION(exec_pagefault):
/* r1 = fault address */
/* r2 = cause */
ldr r3, [fp, #LO_reg_cop0+48] /* Status */
- mvn r6, #0xF000000F
- ldr r4, [fp, #LO_reg_cop0+16] /* Context */
- bic r6, r6, #0x0F800000
str r0, [fp, #LO_reg_cop0+56] /* EPC */
orr r3, r3, #2
str r1, [fp, #LO_reg_cop0+32] /* BadVAddr */
- bic r4, r4, r6
str r3, [fp, #LO_reg_cop0+48] /* Status */
- and r5, r6, r1, lsr #9
str r2, [fp, #LO_reg_cop0+52] /* Cause */
- and r1, r1, r6, lsl #9
- str r1, [fp, #LO_reg_cop0+40] /* EntryHi */
- orr r4, r4, r5
- str r4, [fp, #LO_reg_cop0+16] /* Context */
mov r0, #0x80000000
+ orr r0, r0, #0x80
bl get_addr_ht
mov pc, r0
.size exec_pagefault, .-exec_pagefault
.align 2
FUNCTION(verify_code_ds):
- str r8, [fp, #LO_branch_target]
+ str r8, [fp, #LO_branch_target] @ preserve HOST_BTREG?
FUNCTION(verify_code):
/* r1 = source */
/* r2 = target */
and r2, r2, r10, lsr #17
add r3, fp, #LO_restore_candidate
str r10, [fp, #LO_cycle] /* PCSX cycles */
-@@ str r10, [fp, #LO_reg_cop0+36] /* Count */
+@@ str r10, [fp, #LO_reg_cop0+36] /* Count - not on PSX */
ldr r4, [r2, r3]
mov r10, lr
tst r4, r4
b .E1
.size cc_interrupt, .-cc_interrupt
- .align 2
-FUNCTION(do_interrupt):
- ldr r0, [fp, #LO_pcaddr]
- bl get_addr_ht
- add r10, r10, #2
- mov pc, r0
- .size do_interrupt, .-do_interrupt
-
.align 2
FUNCTION(fp_exception):
mov r2, #0x10000000
.size fp_exception_ds, .-fp_exception_ds
.align 2
+FUNCTION(jump_break_ds):
+ mov r0, #0x24
+ mov r1, #1
+ b call_psxException
+FUNCTION(jump_break):
+ mov r0, #0x24
+ mov r1, #0
+ b call_psxException
+FUNCTION(jump_syscall_ds):
+ mov r0, #0x20
+ mov r1, #1
+ b call_psxException
FUNCTION(jump_syscall):
- ldr r1, [fp, #LO_reg_cop0+48] /* Status */
- mov r3, #0x80000000
- str r0, [fp, #LO_reg_cop0+56] /* EPC */
- orr r1, #2
- mov r2, #0x20
- str r1, [fp, #LO_reg_cop0+48] /* Status */
- str r2, [fp, #LO_reg_cop0+52] /* Cause */
- add r0, r3, #0x80
- bl get_addr_ht
- mov pc, r0
- .size jump_syscall, .-jump_syscall
- .align 2
+ mov r0, #0x20
+ mov r1, #0
- .align 2
-FUNCTION(jump_syscall_hle):
- str r0, [fp, #LO_pcaddr] /* PC must be set to EPC for psxException */
- ldr r2, [fp, #LO_last_count]
- mov r1, #0 /* in delay slot */
- add r2, r2, r10
- mov r0, #0x20 /* cause */
- str r2, [fp, #LO_cycle] /* PCSX cycle counter */
+call_psxException:
+ ldr r3, [fp, #LO_last_count]
+ str r2, [fp, #LO_pcaddr]
+ add r10, r3, r10
+ str r10, [fp, #LO_cycle] /* PCSX cycles */
bl psxException
/* note: psxException might do recursive recompiler call from it's HLE code,
* so be ready for this */
-pcsx_return:
+FUNCTION(jump_to_new_pc):
ldr r1, [fp, #LO_next_interupt]
ldr r10, [fp, #LO_cycle]
ldr r0, [fp, #LO_pcaddr]
str r1, [fp, #LO_last_count]
bl get_addr_ht
mov pc, r0
- .size jump_syscall_hle, .-jump_syscall_hle
-
- .align 2
-FUNCTION(jump_hlecall):
- ldr r2, [fp, #LO_last_count]
- str r0, [fp, #LO_pcaddr]
- add r2, r2, r10
- adr lr, pcsx_return
- str r2, [fp, #LO_cycle] /* PCSX cycle counter */
- bx r1
- .size jump_hlecall, .-jump_hlecall
-
- .align 2
-FUNCTION(jump_intcall):
- ldr r2, [fp, #LO_last_count]
- str r0, [fp, #LO_pcaddr]
- add r2, r2, r10
- adr lr, pcsx_return
- str r2, [fp, #LO_cycle] /* PCSX cycle counter */
- b execI
- .size jump_hlecall, .-jump_hlecall
+ .size jump_to_new_pc, .-jump_to_new_pc
.align 2
FUNCTION(new_dyna_leave):
pcsx_read_mem ldrcc, 2
+.macro memhandler_post
+ ldr r0, [fp, #LO_next_interupt]
+ ldr r2, [fp, #LO_cycle] @ memhandlers can modify cc, like dma
+ str r0, [fp, #LO_last_count]
+ sub r0, r2, r0
+.endm
+
.macro pcsx_write_mem wrtop tab_shift
/* r0 = address, r1 = data, r2 = cycles, r3 = handler_tab */
lsl r12,r0, #20
ldr r3, [r3, r12, lsl #2]
str r0, [fp, #LO_address] @ some handlers still need it..
lsls r3, #1
- mov r0, r2 @ cycle return in case of direct store
+ mov r0, r2 @ cycle return in case of direct store
.if \tab_shift == 1
lsl r12, #1
\wrtop r1, [r3, r12]
ldr r12, [fp, #LO_last_count]
mov r0, r1
add r2, r2, r12
- push {r2, lr}
str r2, [fp, #LO_cycle]
+
+ str lr, [fp, #LO_saved_lr]
blx r3
+ ldr lr, [fp, #LO_saved_lr]
- ldr r0, [fp, #LO_next_interupt]
- pop {r2, lr}
- str r0, [fp, #LO_last_count]
- sub r0, r2, r0
+ memhandler_post
bx lr
.endm
str r0, [fp, #LO_address] @ some handlers still need it..
add r2, r2, r12
mov r0, r1
- push {r2, lr}
str r2, [fp, #LO_cycle]
+
+ str lr, [fp, #LO_saved_lr]
blx r3
+ ldr lr, [fp, #LO_saved_lr]
- ldr r0, [fp, #LO_next_interupt]
- pop {r2, lr}
- str r0, [fp, #LO_last_count]
- sub r0, r2, r0
+ memhandler_post
bx lr
FUNCTION(jump_handle_swl):
lsr r0, #16 @ /= 8
bx lr
+FUNCTION(call_gteStall):
+ /* r0 = op_cycles, r1 = cycles */
+ ldr r2, [fp, #LO_last_count]
+ str lr, [fp, #LO_saved_lr]
+ add r1, r1, r2
+ str r1, [fp, #LO_cycle]
+ add r1, fp, #LO_psxRegs
+ bl gteCheckStallRaw
+ ldr lr, [fp, #LO_saved_lr]
+ add r10, r10, r0
+ bx lr
+
+#ifdef HAVE_ARMV6
+
+FUNCTION(get_reg):
+ ldr r12, [r0]
+ and r1, r1, #0xff
+ ldr r2, [r0, #4]
+ orr r1, r1, r1, lsl #8
+ ldr r3, [r0, #8]
+ orr r1, r1, r1, lsl #16 @ searched char in every byte
+ ldrb r0, [r0, #12] @ last byte
+ eor r12, r12, r1
+ eor r2, r2, r1
+ eor r3, r3, r1
+ cmp r0, r1, lsr #24
+ mov r0, #12
+ mvn r1, #0 @ r1=~0
+ bxeq lr
+ orr r3, r3, #0xff000000 @ EXCLUDE_REG
+ uadd8 r0, r12, r1 @ add and set GE bits when not 0 (match)
+ mov r12, #0
+ sel r0, r12, r1 @ 0 if no match, else ff in some byte
+ uadd8 r2, r2, r1
+ sel r2, r12, r1
+ uadd8 r3, r3, r1
+ sel r3, r12, r1
+ mov r12, #3
+ clz r0, r0 @ 0, 8, 16, 24 or 32
+ clz r2, r2
+ clz r3, r3
+ sub r0, r12, r0, lsr #3 @ 3, 2, 1, 0 or -1
+ sub r2, r12, r2, lsr #3
+ sub r3, r12, r3, lsr #3
+ orr r2, r2, #4
+ orr r3, r3, #8
+ and r0, r0, r2
+ and r0, r0, r3
+ bx lr
+
+#endif /* HAVE_ARMV6 */
+
@ vim:filetype=armasm