+static void emit_block_prologue(void)
+{
+ // check if there are enough cycles..
+ // note: r0 must contain PC of current block
+ EOP_CMP_IMM(11,0,0); // cmp r11, #0
+ emit_call(A_COND_LE, ssp_drc_end);
+}
+
+/* cond:
+ * >0: direct (un)conditional jump
+ * <0: indirect jump
+ */
+static void emit_block_epilogue(int cycles, int cond, int pc, int end_pc)
+{
+ if (cycles > 0xff) { elprintf(EL_ANOMALY, "large cycle count: %i\n", cycles); cycles = 0xff; }
+ EOP_SUB_IMM(11,11,0,cycles); // sub r11, r11, #cycles
+
+ if (cond < 0 || (end_pc >= 0x400 && pc < 0x400)) {
+ // indirect jump, or rom -> iram jump, must use dispatcher
+ emit_jump(A_COND_AL, ssp_drc_next);
+ }
+ else if (cond == A_COND_AL) {
+ u32 *target = (pc < 0x400) ? ssp_block_table_iram[ssp->drc.iram_context][pc] : ssp_block_table[pc];
+ if (target != NULL)
+ emit_jump(A_COND_AL, target);
+ else {
+ emit_jump(A_COND_AL, ssp_drc_next);
+ // cause the next block to be emitted over jump instrction
+ tcache_ptr--;
+ }
+ }
+ else {
+ u32 *target1 = (pc < 0x400) ? ssp_block_table_iram[ssp->drc.iram_context][pc] : ssp_block_table[pc];
+ u32 *target2 = (end_pc < 0x400) ? ssp_block_table_iram[ssp->drc.iram_context][end_pc] : ssp_block_table[end_pc];
+ if (target1 != NULL)
+ emit_jump(cond, target1);
+ else emit_call(cond, ssp_drc_next_patch);
+ if (target2 != NULL)
+ emit_jump(tr_neg_cond(cond), target2); // neg_cond, to be able to swap jumps if needed
+ else emit_call(tr_neg_cond(cond), ssp_drc_next_patch);
+ }
+}
+