#ifdef __MACH__
#define dynarec_local ESYM(dynarec_local)
-#define add_link ESYM(add_link)
+#define add_jump_out ESYM(add_jump_out)
#define new_recompile_block ESYM(new_recompile_block)
#define get_addr ESYM(get_addr)
#define get_addr_ht ESYM(get_addr_ht)
#define clean_blocks ESYM(clean_blocks)
#define gen_interupt ESYM(gen_interupt)
-#define psxException ESYM(psxException)
-#define execI ESYM(execI)
#define invalidate_addr ESYM(invalidate_addr)
+#define gteCheckStallRaw ESYM(gteCheckStallRaw)
#endif
.bss
DRC_VAR(zeromem_ptr, 4)
DRC_VAR(invc_ptr, 4)
DRC_VAR(scratch_buf_ptr, 4)
-@DRC_VAR(align1, 8) /* unused/alignment */
+DRC_VAR(ram_offset, 4)
DRC_VAR(mini_ht, 256)
DRC_VAR(restore_candidate, 512)
orrcs r2, r6, #2048
ldr r5, [r3, r2, lsl #2]
lsl r12, r12, #8
- add r6, r1, r12, asr #6
+ add r6, r1, r12, asr #6 /* old target */
mov r8, #0
/* jump_in lookup */
1:
mov r5, r1
mov r1, r6
- bl add_link
+ bl add_jump_out
sub r2, r8, r5
and r1, r7, #0xff000000
lsl r2, r2, #6
.align 2
FUNCTION(verify_code_ds):
- str r8, [fp, #LO_branch_target]
+ str r8, [fp, #LO_branch_target] @ preserve HOST_BTREG?
FUNCTION(verify_code):
/* r1 = source */
/* r2 = target */
.size jump_syscall, .-jump_syscall
.align 2
- .align 2
-FUNCTION(jump_syscall_hle):
- str r0, [fp, #LO_pcaddr] /* PC must be set to EPC for psxException */
- ldr r2, [fp, #LO_last_count]
- mov r1, #0 /* in delay slot */
- add r2, r2, r10
- mov r0, #0x20 /* cause */
- str r2, [fp, #LO_cycle] /* PCSX cycle counter */
- bl psxException
-
/* note: psxException might do recursive recompiler call from it's HLE code,
* so be ready for this */
-pcsx_return:
+FUNCTION(jump_to_new_pc):
ldr r1, [fp, #LO_next_interupt]
ldr r10, [fp, #LO_cycle]
ldr r0, [fp, #LO_pcaddr]
str r1, [fp, #LO_last_count]
bl get_addr_ht
mov pc, r0
- .size jump_syscall_hle, .-jump_syscall_hle
-
- .align 2
-FUNCTION(jump_hlecall):
- ldr r2, [fp, #LO_last_count]
- str r0, [fp, #LO_pcaddr]
- add r2, r2, r10
- adr lr, pcsx_return
- str r2, [fp, #LO_cycle] /* PCSX cycle counter */
- bx r1
- .size jump_hlecall, .-jump_hlecall
-
- .align 2
-FUNCTION(jump_intcall):
- ldr r2, [fp, #LO_last_count]
- str r0, [fp, #LO_pcaddr]
- add r2, r2, r10
- adr lr, pcsx_return
- str r2, [fp, #LO_cycle] /* PCSX cycle counter */
- b execI
- .size jump_hlecall, .-jump_hlecall
+ .size jump_to_new_pc, .-jump_to_new_pc
.align 2
FUNCTION(new_dyna_leave):
lsr r0, #16 @ /= 8
bx lr
+FUNCTION(call_gteStall):
+ /* r0 = op_cycles, r1 = cycles */
+ ldr r2, [fp, #LO_last_count]
+ str lr, [fp, #LO_saved_lr]
+ add r1, r1, r2
+ str r1, [fp, #LO_cycle]
+ add r1, fp, #LO_psxRegs
+ bl gteCheckStallRaw
+ ldr lr, [fp, #LO_saved_lr]
+ add r10, r10, r0
+ bx lr
+
@ vim:filetype=armasm