9 .global flush_inval_caches
12 .global ssp_drc_next_patch
16 @ translation cache buffer
19 .size tcache, TCACHE_SIZE
29 mov r2, #0x0 @ must be 0
34 @ SSP_GR0, SSP_X, SSP_Y, SSP_A,
35 @ SSP_ST, SSP_STACK, SSP_PC, SSP_P,
36 @ SSP_PM0, SSP_PM1, SSP_PM2, SSP_XST,
37 @ SSP_PM4, SSP_gr13, SSP_PMC, SSP_AL
42 @ r6: STACK and emu flags: sss0 * .uu. .lll NZCV (NZCV is PSR bits from ARM)
50 #define SSP_OFFS_GR 0x400
54 #define SSP_OFFS_EMUSTAT 0x484 // emu_status
55 #define SSP_OFFS_IRAM_DIRTY 0x494
56 #define SSP_OFFS_IRAM_CTX 0x498 // iram_context
57 #define SSP_OFFS_BLTAB 0x49c // block_table
58 #define SSP_OFFS_BLTAB_IRAM 0x4a0
59 #define SSP_OFFS_TMP0 0x4a4 // for entry PC
60 #define SSP_OFFS_TMP1 0x4a8
61 #define SSP_OFFS_TMP2 0x4ac
62 #define SSP_WAIT_PM0 0x2000
65 .macro ssp_drc_do_next patch_jump=0
67 str lr, [r7, #SSP_OFFS_TMP2] @ jump instr. (actually call) address + 4
71 str r0, [r7, #SSP_OFFS_TMP0]
75 ldr r2, [r7, #SSP_OFFS_BLTAB]
76 ldr r2, [r2, r0, lsl #2]
83 bl ssp_translate_block
85 ldr r0, [r7, #SSP_OFFS_TMP0] @ entry PC
86 ldr r1, [r7, #SSP_OFFS_BLTAB]
87 str r2, [r1, r0, lsl #2]
95 ldr r1, [r7, #SSP_OFFS_IRAM_DIRTY]
97 ldreq r1, [r7, #SSP_OFFS_IRAM_CTX]
98 beq 1f @ ssp_de_iram_ctx
100 bl ssp_get_iram_context
102 str r1, [r7, #SSP_OFFS_IRAM_DIRTY]
104 str r1, [r7, #SSP_OFFS_IRAM_CTX]
105 ldr r0, [r7, #SSP_OFFS_TMP0] @ entry PC
107 1: @ ssp_de_iram_ctx:
108 ldr r2, [r7, #SSP_OFFS_BLTAB_IRAM]
109 add r2, r2, r1, lsl #12 @ block_tab_iram + iram_context * 0x800/2*4
110 add r1, r2, r0, lsl #2
118 str r1, [r7, #SSP_OFFS_TMP1]
119 bl ssp_translate_block
121 ldr r0, [r7, #SSP_OFFS_TMP0] @ entry PC
122 ldr r1, [r7, #SSP_OFFS_TMP1] @ &block_table_iram[iram_context][rPC]
129 .endm @ ssp_drc_do_next
133 stmfd sp!, {r4-r11, lr}
140 ldmia r2, {r3,r4,r5,r6,r8}
143 orr r4, r3, r4, lsr #16 @ XXYY
145 and r8, r8, #0x0f0000
146 mov r8, r8, lsl #13 @ sss0 *
147 and r9, r6, #0x670000
151 orrne r8, r8, #0x4 @ sss0 * NZ..
152 orr r6, r8, r9, lsr #12 @ sss0 * .uu. .lll NZ..
154 ldr r8, [r7, #0x440] @ r0-r2
155 ldr r9, [r7, #0x444] @ r4-r6
156 ldr r10,[r7, #(0x400+SSP_P*4)] @ P
158 ldr r0, [r7, #(SSP_OFFS_GR+SSP_PC*4)]
170 ldr r1, [r7, #SSP_OFFS_TMP2] @ jump instr. (actually call) address + 4
172 moveq r3, #0xe1000000
173 orreq r3, r3, #0x00a00000 @ nop
180 streq r3, [r1, #-4] @ move the other cond up
181 moveq r3, #0xe1000000
182 orreq r3, r3, #0x00a00000
183 streq r3, [r1] @ fill it's place with nop
189 bic r3, r3, #1 @ L bit
190 orr r3, r3, r12,lsl #6
191 mov r3, r3, ror #8 @ patched branch instruction
195 str r2, [r7, #SSP_OFFS_TMP1]
198 bl flush_inval_caches
199 ldr r2, [r7, #SSP_OFFS_TMP1]
200 ldr r0, [r7, #SSP_OFFS_TMP0]
206 str r0, [r7, #(SSP_OFFS_GR+SSP_PC*4)]
209 str r10,[r7, #(0x400+SSP_P*4)] @ P
210 str r8, [r7, #0x440] @ r0-r2
211 str r9, [r7, #0x444] @ r4-r6
214 and r9, r9, #(7<<16) @ STACK
216 msr cpsr_flg, r3 @ to to ARM PSR
219 orrmi r6, r6, #0x80000000 @ N
220 orreq r6, r6, #0x20000000 @ Z
222 mov r3, r4, lsl #16 @ Y
224 mov r2, r2, lsl #16 @ X
227 stmia r8, {r2,r3,r5,r6,r9}
230 ldmfd sp!, {r4-r11, lr}
239 ldr r0, [r7, #(SSP_OFFS_GR+SSP_PM0*4)]
240 ldr r1, [r7, #SSP_OFFS_EMUSTAT]
242 orreq r1, r1, #SSP_WAIT_PM0
244 streq r1, [r7, #SSP_OFFS_EMUSTAT]