* Free Software Foundation, Inc., *
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+.equiv HAVE_ARMV7, 1
+
+.if HAVE_ARMV7
+ .cpu cortex-a8
+ .fpu vfp
+.else
.cpu arm9tdmi
.fpu softvfp
- .eabi_attribute 20, 1
- .eabi_attribute 21, 1
- .eabi_attribute 23, 3
- .eabi_attribute 24, 1
- .eabi_attribute 25, 1
- .eabi_attribute 26, 2
- .eabi_attribute 30, 6
- .eabi_attribute 18, 4
- .file "linkage_arm.s"
+.endif
.global rdram
rdram = 0x80000000
.global dynarec_local
.global hi
.global lo
.global reg_cop0
+ .global reg_cop2d
+ .global reg_cop2c
.global FCR0
.global FCR31
.global next_interupt
.global memory_map
/* psx */
.global psxRegs
- .global psxHLEt_addr
- .global code
.bss
.align 4
code = PC + 4
.type code, %object
.size code, 4
-.global cycle
cycle = code + 4
.type cycle, %object
.size cycle, 4
.size intCycle, 128
psxRegs_end = intCycle + 128
-psxHLEt_addr = psxRegs_end
- .type psxHLEt_addr, %object
- .size psxHLEt_addr, 4
-align0 = psxHLEt_addr + 4 /* just for alignment */
+align0 = psxRegs_end /* just for alignment */
.type align0, %object
- .size align0, 4
-branch_target = align0 + 4
+ .size align0, 8
+branch_target = align0 + 8
.type branch_target, %object
.size branch_target, 4
mini_ht = branch_target + 4
str r1, [fp, #pending_exception-dynarec_local]
and r2, r2, r10, lsr #17
add r3, fp, #restore_candidate-dynarec_local
- str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
+@@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
ldr r4, [r2, r3]
mov r10, lr
tst r4, r4
.E1:
bl gen_interupt
mov lr, r10
- ldr r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ ldr r10, [fp, #cycle-dynarec_local]
ldr r0, [fp, #next_interupt-dynarec_local]
ldr r1, [fp, #pending_exception-dynarec_local]
ldr r2, [fp, #stop-dynarec_local]
str r0, [fp, #last_count-dynarec_local]
sub r10, r10, r0
tst r2, r2
- bne .E3
+ ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
tst r1, r1
moveq pc, lr
.E2:
ldr r0, [fp, #pcaddr-dynarec_local]
bl get_addr_ht
mov pc, r0
-.E3:
- add r12, fp, #28
- ldmia r12, {r4, r5, r6, r7, r8, r9, sl, fp, pc}
.E4:
/* Move 'dirty' blocks to the 'clean' list */
lsl r5, r2, #3
.global do_interrupt
.type do_interrupt, %function
do_interrupt:
+ /* FIXME: cycles already calculated, not needed? */
ldr r0, [fp, #pcaddr-dynarec_local]
bl get_addr_ht
ldr r1, [fp, #next_interupt-dynarec_local]
- ldr r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ ldr r10, [fp, #cycle-dynarec_local]
str r1, [fp, #last_count-dynarec_local]
sub r10, r10, r1
add r10, r10, #2
add r2, r2, r10
mov r0, #0x20 /* cause */
str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
- str r2, [fp, #reg_cop0+36-dynarec_local] /* Count */
bl psxException
/* note: psxException might do recorsive recompiler call from it's HLE code,
* so be ready for this */
+pcsx_return:
+ ldr r1, [fp, #next_interupt-dynarec_local]
+ ldr r10, [fp, #cycle-dynarec_local]
ldr r0, [fp, #pcaddr-dynarec_local]
- mov r10, #0 /* FIXME */
+ sub r10, r10, r1
+ str r1, [fp, #last_count-dynarec_local]
bl get_addr_ht
mov pc, r0
.size jump_syscall_hle, .-jump_syscall_hle
jump_hlecall:
ldr r2, [fp, #last_count-dynarec_local]
str r0, [fp, #pcaddr-dynarec_local]
- and r1, r1, #7
add r2, r2, r10
- ldr r3, [fp, #psxHLEt_addr-dynarec_local] /* psxHLEt */
str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
- str r2, [fp, #reg_cop0+36-dynarec_local] /* Count */
- mov lr, pc
- ldr pc, [r3, r1, lsl #2]
-
- ldr r0, [fp, #pcaddr-dynarec_local]
- mov r10, #0 /* FIXME */
- bl get_addr_ht
- mov pc, r0
+ adr lr, pcsx_return
+ bx r1
.size jump_hlecall, .-jump_hlecall
new_dyna_leave:
ldr r0, [fp, #last_count-dynarec_local]
add r12, fp, #28
add r10, r0, r10
- str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
- ldmia r12, {r4, r5, r6, r7, r8, r9, sl, fp, pc}
+ str r10, [fp, #cycle-dynarec_local]
+ ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
.size new_dyna_leave, .-new_dyna_leave
+ /* these are used to call memhandlers */
.align 2
.global indirect_jump_indexed
.type indirect_jump_indexed, %function
indirect_jump:
ldr r12, [fp, #last_count-dynarec_local]
add r2, r2, r12
- str r2, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ str r2, [fp, #cycle-dynarec_local]
mov pc, r0
.size indirect_jump, .-indirect_jump
.size indirect_jump_indexed, .-indirect_jump_indexed
bic r1, r1, #2
add r10, r0, r10
str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
- str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ str r10, [fp, #cycle-dynarec_local]
bl check_interupt
ldr r1, [fp, #next_interupt-dynarec_local]
ldr r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
.global new_dyna_start
.type new_dyna_start, %function
new_dyna_start:
- ldr r12, .dlptr
- stmia r12, {r4, r5, r6, r7, r8, r9, sl, fp, lr}
- sub fp, r12, #28
+ /* ip is stored to conform EABI alignment */
+ stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
+.if HAVE_ARMV7
+ movw fp, #:lower16:dynarec_local
+ movt fp, #:upper16:dynarec_local
+.else
+ ldr fp, .dlptr
+.endif
ldr r0, [fp, #pcaddr-dynarec_local]
- /*bl new_recompile_block*/
bl get_addr_ht
ldr r1, [fp, #next_interupt-dynarec_local]
- ldr r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ ldr r10, [fp, #cycle-dynarec_local]
str r1, [fp, #last_count-dynarec_local]
sub r10, r10, r1
mov pc, r0
.dlptr:
- .word dynarec_local+28
+ .word dynarec_local
.size new_dyna_start, .-new_dyna_start
.align 2