1 // SPDX-License-Identifier: LGPL-2.1-or-later
3 * Copyright (C) 2019-2021 Paul Cercueil <paul@crapouillou.net>
6 #include "disassembler.h"
7 #include "interpreter.h"
8 #include "lightrec-private.h"
16 static u32 int_CP0(struct interpreter *inter);
17 static u32 int_CP2(struct interpreter *inter);
18 static u32 int_SPECIAL(struct interpreter *inter);
19 static u32 int_REGIMM(struct interpreter *inter);
20 static u32 int_branch(struct interpreter *inter, u32 pc,
21 union code code, bool branch);
23 typedef u32 (*lightrec_int_func_t)(struct interpreter *inter);
25 static const lightrec_int_func_t int_standard[64];
28 struct lightrec_state *state;
36 static u32 int_get_branch_pc(const struct interpreter *inter)
38 return get_branch_pc(inter->block, inter->offset, 0);
41 static inline u32 int_get_ds_pc(const struct interpreter *inter, s16 imm)
43 return get_ds_pc(inter->block, inter->offset, imm);
46 static inline struct opcode *next_op(const struct interpreter *inter)
48 return &inter->block->opcode_list[inter->offset + 1];
51 static inline u32 execute(lightrec_int_func_t func, struct interpreter *inter)
53 return (*func)(inter);
56 static inline u32 lightrec_int_op(struct interpreter *inter)
58 return execute(int_standard[inter->op->i.op], inter);
61 static inline u32 jump_skip(struct interpreter *inter)
63 inter->op = next_op(inter);
66 if (inter->op->flags & LIGHTREC_SYNC) {
67 inter->state->current_cycle += inter->cycles;
71 return lightrec_int_op(inter);
74 static inline u32 jump_next(struct interpreter *inter)
76 inter->cycles += lightrec_cycles_of_opcode(inter->op->c);
78 if (unlikely(inter->delay_slot))
81 return jump_skip(inter);
84 static inline u32 jump_after_branch(struct interpreter *inter)
86 inter->cycles += lightrec_cycles_of_opcode(inter->op->c);
88 if (unlikely(inter->delay_slot))
91 inter->op = next_op(inter);
94 return jump_skip(inter);
97 static void update_cycles_before_branch(struct interpreter *inter)
101 if (!inter->delay_slot) {
102 cycles = lightrec_cycles_of_opcode(inter->op->c);
104 if (has_delay_slot(inter->op->c) &&
105 !(inter->op->flags & LIGHTREC_NO_DS))
106 cycles += lightrec_cycles_of_opcode(next_op(inter)->c);
108 inter->cycles += cycles;
109 inter->state->current_cycle += inter->cycles;
110 inter->cycles = -cycles;
114 static bool is_branch_taken(const u32 *reg_cache, union code op)
118 return op.r.op == OP_SPECIAL_JR || op.r.op == OP_SPECIAL_JALR;
123 return reg_cache[op.r.rs] == reg_cache[op.r.rt];
125 return reg_cache[op.r.rs] != reg_cache[op.r.rt];
129 case OP_REGIMM_BLTZAL:
130 return (s32)reg_cache[op.r.rs] < 0;
132 case OP_REGIMM_BGEZAL:
133 return (s32)reg_cache[op.r.rs] >= 0;
142 static u32 int_delay_slot(struct interpreter *inter, u32 pc, bool branch)
144 struct lightrec_state *state = inter->state;
145 u32 *reg_cache = state->regs.gpr;
146 struct opcode new_op, *op = next_op(inter);
148 struct interpreter inter2 = {
150 .cycles = inter->cycles,
154 bool run_first_op = false, dummy_ld = false, save_rs = false,
155 load_in_ds, branch_in_ds = false, branch_at_addr = false,
157 u32 old_rs, new_rs, new_rt;
158 u32 next_pc, ds_next_pc;
161 if (op->i.op == OP_CP0 && op->r.rs == OP_CP0_RFE) {
162 /* When an IRQ happens, the PSX exception handlers (when done)
163 * will jump back to the instruction that was executed right
164 * before the IRQ, unless it was a GTE opcode; in that case, it
165 * jumps to the instruction right after.
166 * Since we will never handle the IRQ right after a GTE opcode,
167 * but on branch boundaries, we need to adjust the return
168 * address so that the GTE opcode is effectively executed.
170 cause = state->regs.cp0[13];
171 epc = state->regs.cp0[14];
173 if (!(cause & 0x7c) && epc == pc - 4)
177 if (inter->delay_slot) {
178 /* The branch opcode was in a delay slot of another branch
179 * opcode. Just return the target address of the second
184 /* An opcode located in the delay slot performing a delayed read
185 * requires special handling; we will always resort to using the
186 * interpreter in that case.
187 * Same goes for when we have a branch in a delay slot of another
189 load_in_ds = load_in_delay_slot(op->c);
190 branch_in_ds = has_delay_slot(op->c);
193 if (load_in_ds || branch_in_ds)
194 op_next = lightrec_read_opcode(state, pc);
197 /* Verify that the next block actually reads the
198 * destination register of the delay slot opcode. */
199 run_first_op = opcode_reads_register(op_next, op->r.rt);
207 if (load_in_ds && run_first_op) {
210 /* If the first opcode of the next block writes the
211 * regiser used as the address for the load, we need to
212 * reset to the old value after it has been executed,
213 * then restore the new value after the delay slot
214 * opcode has been executed. */
215 save_rs = opcode_reads_register(op->c, op->r.rs) &&
216 opcode_writes_register(op_next, op->r.rs);
218 old_rs = reg_cache[op->r.rs];
220 /* If both the first opcode of the next block and the
221 * delay slot opcode write to the same register, the
222 * value written by the delay slot opcode is
224 dummy_ld = opcode_writes_register(op_next, op->r.rt);
229 } else if (has_delay_slot(op_next)) {
230 /* The first opcode of the next block is a branch, so we
231 * cannot execute it here, because of the load delay.
232 * Just check whether or not the branch would be taken,
233 * and save that info into the interpreter struct. */
234 branch_at_addr = true;
235 branch_taken = is_branch_taken(reg_cache, op_next);
236 pr_debug("Target of impossible branch is a branch, "
237 "%staken.\n", branch_taken ? "" : "not ");
238 inter->cycles += lightrec_cycles_of_opcode(op_next);
239 old_rs = reg_cache[op_next.r.rs];
245 /* Execute the first opcode of the next block */
246 lightrec_int_op(&inter2);
249 new_rs = reg_cache[op->r.rs];
250 reg_cache[op->r.rs] = old_rs;
253 inter->cycles += lightrec_cycles_of_opcode(op_next);
256 next_pc = int_get_ds_pc(inter, 2);
259 inter2.block = inter->block;
261 inter2.cycles = inter->cycles;
264 new_rt = reg_cache[op->r.rt];
266 /* Execute delay slot opcode */
267 ds_next_pc = lightrec_int_op(&inter2);
269 if (branch_at_addr) {
270 if (op_next.i.op == OP_SPECIAL)
271 /* TODO: Handle JALR setting $ra */
273 else if (op_next.i.op == OP_J || op_next.i.op == OP_JAL)
274 /* TODO: Handle JAL setting $ra */
275 ds_next_pc = (pc & 0xf0000000) | (op_next.j.imm << 2);
277 ds_next_pc = pc + 4 + ((s16)op_next.i.imm << 2);
280 if (branch_at_addr && !branch_taken) {
281 /* If the branch at the target of the branch opcode is not
282 * taken, we jump to its delay slot */
283 next_pc = pc + sizeof(u32);
284 } else if (branch_at_addr || (!branch && branch_in_ds)) {
285 next_pc = ds_next_pc;
289 reg_cache[op->r.rs] = new_rs;
291 reg_cache[op->r.rt] = new_rt;
293 inter->cycles += lightrec_cycles_of_opcode(op->c);
295 if (branch_at_addr && branch_taken) {
296 /* If the branch at the target of the branch opcode is taken,
297 * we execute its delay slot here, and jump to its target
299 op_next = lightrec_read_opcode(state, pc + 4);
306 inter->cycles += lightrec_cycles_of_opcode(op_next);
308 pr_debug("Running delay slot of branch at target of impossible "
310 lightrec_int_op(&inter2);
316 static u32 int_unimplemented(struct interpreter *inter)
318 pr_warn("Unimplemented opcode 0x%08x\n", inter->op->opcode);
320 return jump_next(inter);
323 static u32 int_jump(struct interpreter *inter, bool link)
325 struct lightrec_state *state = inter->state;
326 u32 old_pc = int_get_branch_pc(inter);
327 u32 pc = (old_pc & 0xf0000000) | (inter->op->j.imm << 2);
330 state->regs.gpr[31] = old_pc + 8;
332 if (inter->op->flags & LIGHTREC_NO_DS)
335 return int_delay_slot(inter, pc, true);
338 static u32 int_J(struct interpreter *inter)
340 return int_jump(inter, false);
343 static u32 int_JAL(struct interpreter *inter)
345 return int_jump(inter, true);
348 static u32 int_jumpr(struct interpreter *inter, u8 link_reg)
350 struct lightrec_state *state = inter->state;
351 u32 old_pc, next_pc = state->regs.gpr[inter->op->r.rs];
354 old_pc = int_get_branch_pc(inter);
355 state->regs.gpr[link_reg] = old_pc + 8;
358 if (inter->op->flags & LIGHTREC_NO_DS)
361 return int_delay_slot(inter, next_pc, true);
364 static u32 int_special_JR(struct interpreter *inter)
366 return int_jumpr(inter, 0);
369 static u32 int_special_JALR(struct interpreter *inter)
371 return int_jumpr(inter, inter->op->r.rd);
374 static u32 int_do_branch(struct interpreter *inter, u32 old_pc, u32 next_pc)
376 if (!inter->delay_slot &&
377 (inter->op->flags & LIGHTREC_LOCAL_BRANCH) &&
378 (s16)inter->op->c.i.imm >= 0) {
379 next_pc = old_pc + ((1 + (s16)inter->op->c.i.imm) << 2);
380 next_pc = lightrec_emulate_block(inter->state, inter->block, next_pc);
386 static u32 int_branch(struct interpreter *inter, u32 pc,
387 union code code, bool branch)
389 u32 next_pc = pc + 4 + ((s16)code.i.imm << 2);
391 update_cycles_before_branch(inter);
393 if (inter->op->flags & LIGHTREC_NO_DS) {
395 return int_do_branch(inter, pc, next_pc);
397 return jump_next(inter);
400 if (!inter->delay_slot)
401 next_pc = int_delay_slot(inter, next_pc, branch);
404 return int_do_branch(inter, pc, next_pc);
406 if (inter->op->flags & LIGHTREC_EMULATE_BRANCH)
409 return jump_after_branch(inter);
412 static u32 int_beq(struct interpreter *inter, bool bne)
414 u32 rs, rt, old_pc = int_get_branch_pc(inter);
416 rs = inter->state->regs.gpr[inter->op->i.rs];
417 rt = inter->state->regs.gpr[inter->op->i.rt];
419 return int_branch(inter, old_pc, inter->op->c, (rs == rt) ^ bne);
422 static u32 int_BEQ(struct interpreter *inter)
424 return int_beq(inter, false);
427 static u32 int_BNE(struct interpreter *inter)
429 return int_beq(inter, true);
432 static u32 int_bgez(struct interpreter *inter, bool link, bool lt, bool regimm)
434 u32 old_pc = int_get_branch_pc(inter);
438 inter->state->regs.gpr[31] = old_pc + 8;
440 rs = (s32)inter->state->regs.gpr[inter->op->i.rs];
442 return int_branch(inter, old_pc, inter->op->c,
443 ((regimm && !rs) || rs > 0) ^ lt);
446 static u32 int_regimm_BLTZ(struct interpreter *inter)
448 return int_bgez(inter, false, true, true);
451 static u32 int_regimm_BGEZ(struct interpreter *inter)
453 return int_bgez(inter, false, false, true);
456 static u32 int_regimm_BLTZAL(struct interpreter *inter)
458 return int_bgez(inter, true, true, true);
461 static u32 int_regimm_BGEZAL(struct interpreter *inter)
463 return int_bgez(inter, true, false, true);
466 static u32 int_BLEZ(struct interpreter *inter)
468 return int_bgez(inter, false, true, false);
471 static u32 int_BGTZ(struct interpreter *inter)
473 return int_bgez(inter, false, false, false);
476 static u32 int_cfc(struct interpreter *inter)
478 struct lightrec_state *state = inter->state;
479 const struct opcode *op = inter->op;
482 val = lightrec_mfc(state, op->c);
484 if (likely(op->r.rt))
485 state->regs.gpr[op->r.rt] = val;
487 return jump_next(inter);
490 static u32 int_ctc(struct interpreter *inter)
492 struct lightrec_state *state = inter->state;
493 const struct opcode *op = inter->op;
495 lightrec_mtc(state, op->c, state->regs.gpr[op->r.rt]);
497 /* If we have a MTC0 or CTC0 to CP0 register 12 (Status) or 13 (Cause),
498 * return early so that the emulator will be able to check software
499 * interrupt status. */
500 if (!(inter->op->flags & LIGHTREC_NO_DS) &&
501 op->i.op == OP_CP0 && (op->r.rd == 12 || op->r.rd == 13))
502 return int_get_ds_pc(inter, 1);
504 return jump_next(inter);
507 static u32 int_cp0_RFE(struct interpreter *inter)
509 lightrec_rfe(inter->state);
511 return jump_next(inter);
514 static u32 int_CP(struct interpreter *inter)
516 lightrec_cp(inter->state, inter->op->c);
518 return jump_next(inter);
521 static u32 int_ADDI(struct interpreter *inter)
523 u32 *reg_cache = inter->state->regs.gpr;
524 struct opcode_i *op = &inter->op->i;
527 reg_cache[op->rt] = reg_cache[op->rs] + (s32)(s16)op->imm;
529 return jump_next(inter);
532 static u32 int_SLTI(struct interpreter *inter)
534 u32 *reg_cache = inter->state->regs.gpr;
535 struct opcode_i *op = &inter->op->i;
538 reg_cache[op->rt] = (s32)reg_cache[op->rs] < (s32)(s16)op->imm;
540 return jump_next(inter);
543 static u32 int_SLTIU(struct interpreter *inter)
545 u32 *reg_cache = inter->state->regs.gpr;
546 struct opcode_i *op = &inter->op->i;
549 reg_cache[op->rt] = reg_cache[op->rs] < (u32)(s32)(s16)op->imm;
551 return jump_next(inter);
554 static u32 int_ANDI(struct interpreter *inter)
556 u32 *reg_cache = inter->state->regs.gpr;
557 struct opcode_i *op = &inter->op->i;
560 reg_cache[op->rt] = reg_cache[op->rs] & op->imm;
562 return jump_next(inter);
565 static u32 int_ORI(struct interpreter *inter)
567 u32 *reg_cache = inter->state->regs.gpr;
568 struct opcode_i *op = &inter->op->i;
571 reg_cache[op->rt] = reg_cache[op->rs] | op->imm;
573 return jump_next(inter);
576 static u32 int_XORI(struct interpreter *inter)
578 u32 *reg_cache = inter->state->regs.gpr;
579 struct opcode_i *op = &inter->op->i;
582 reg_cache[op->rt] = reg_cache[op->rs] ^ op->imm;
584 return jump_next(inter);
587 static u32 int_LUI(struct interpreter *inter)
589 struct opcode_i *op = &inter->op->i;
591 inter->state->regs.gpr[op->rt] = op->imm << 16;
593 return jump_next(inter);
596 static u32 int_io(struct interpreter *inter, bool is_load)
598 struct opcode_i *op = &inter->op->i;
599 u32 *reg_cache = inter->state->regs.gpr;
602 val = lightrec_rw(inter->state, inter->op->c,
603 reg_cache[op->rs], reg_cache[op->rt],
604 &inter->op->flags, inter->block);
606 if (is_load && op->rt)
607 reg_cache[op->rt] = val;
609 return jump_next(inter);
612 static u32 int_load(struct interpreter *inter)
614 return int_io(inter, true);
617 static u32 int_store(struct interpreter *inter)
621 if (likely(!(inter->op->flags & LIGHTREC_SMC)))
622 return int_io(inter, false);
624 lightrec_rw(inter->state, inter->op->c,
625 inter->state->regs.gpr[inter->op->i.rs],
626 inter->state->regs.gpr[inter->op->i.rt],
627 &inter->op->flags, inter->block);
629 next_pc = int_get_ds_pc(inter, 1);
631 /* Invalidate next PC, to force the rest of the block to be rebuilt */
632 lightrec_invalidate(inter->state, next_pc, 4);
637 static u32 int_LWC2(struct interpreter *inter)
639 return int_io(inter, false);
642 static u32 int_special_SLL(struct interpreter *inter)
644 struct opcode *op = inter->op;
647 if (op->opcode) { /* Handle NOPs */
648 rt = inter->state->regs.gpr[op->r.rt];
649 inter->state->regs.gpr[op->r.rd] = rt << op->r.imm;
652 return jump_next(inter);
655 static u32 int_special_SRL(struct interpreter *inter)
657 struct opcode *op = inter->op;
658 u32 rt = inter->state->regs.gpr[op->r.rt];
660 inter->state->regs.gpr[op->r.rd] = rt >> op->r.imm;
662 return jump_next(inter);
665 static u32 int_special_SRA(struct interpreter *inter)
667 struct opcode *op = inter->op;
668 s32 rt = inter->state->regs.gpr[op->r.rt];
670 inter->state->regs.gpr[op->r.rd] = rt >> op->r.imm;
672 return jump_next(inter);
675 static u32 int_special_SLLV(struct interpreter *inter)
677 struct opcode *op = inter->op;
678 u32 rs = inter->state->regs.gpr[op->r.rs];
679 u32 rt = inter->state->regs.gpr[op->r.rt];
681 inter->state->regs.gpr[op->r.rd] = rt << (rs & 0x1f);
683 return jump_next(inter);
686 static u32 int_special_SRLV(struct interpreter *inter)
688 struct opcode *op = inter->op;
689 u32 rs = inter->state->regs.gpr[op->r.rs];
690 u32 rt = inter->state->regs.gpr[op->r.rt];
692 inter->state->regs.gpr[op->r.rd] = rt >> (rs & 0x1f);
694 return jump_next(inter);
697 static u32 int_special_SRAV(struct interpreter *inter)
699 struct opcode *op = inter->op;
700 u32 rs = inter->state->regs.gpr[op->r.rs];
701 s32 rt = inter->state->regs.gpr[op->r.rt];
703 inter->state->regs.gpr[op->r.rd] = rt >> (rs & 0x1f);
705 return jump_next(inter);
708 static u32 int_syscall_break(struct interpreter *inter)
711 if (inter->op->r.op == OP_SPECIAL_BREAK)
712 inter->state->exit_flags |= LIGHTREC_EXIT_BREAK;
714 inter->state->exit_flags |= LIGHTREC_EXIT_SYSCALL;
716 return int_get_ds_pc(inter, 0);
719 static u32 int_special_MFHI(struct interpreter *inter)
721 u32 *reg_cache = inter->state->regs.gpr;
722 struct opcode_r *op = &inter->op->r;
725 reg_cache[op->rd] = reg_cache[REG_HI];
727 return jump_next(inter);
730 static u32 int_special_MTHI(struct interpreter *inter)
732 u32 *reg_cache = inter->state->regs.gpr;
734 reg_cache[REG_HI] = reg_cache[inter->op->r.rs];
736 return jump_next(inter);
739 static u32 int_special_MFLO(struct interpreter *inter)
741 u32 *reg_cache = inter->state->regs.gpr;
742 struct opcode_r *op = &inter->op->r;
745 reg_cache[op->rd] = reg_cache[REG_LO];
747 return jump_next(inter);
750 static u32 int_special_MTLO(struct interpreter *inter)
752 u32 *reg_cache = inter->state->regs.gpr;
754 reg_cache[REG_LO] = reg_cache[inter->op->r.rs];
756 return jump_next(inter);
759 static u32 int_special_MULT(struct interpreter *inter)
761 u32 *reg_cache = inter->state->regs.gpr;
762 s32 rs = reg_cache[inter->op->r.rs];
763 s32 rt = reg_cache[inter->op->r.rt];
764 u8 reg_lo = get_mult_div_lo(inter->op->c);
765 u8 reg_hi = get_mult_div_hi(inter->op->c);
766 u64 res = (s64)rs * (s64)rt;
768 if (!(inter->op->flags & LIGHTREC_NO_HI))
769 reg_cache[reg_hi] = res >> 32;
770 if (!(inter->op->flags & LIGHTREC_NO_LO))
771 reg_cache[reg_lo] = res;
773 return jump_next(inter);
776 static u32 int_special_MULTU(struct interpreter *inter)
778 u32 *reg_cache = inter->state->regs.gpr;
779 u32 rs = reg_cache[inter->op->r.rs];
780 u32 rt = reg_cache[inter->op->r.rt];
781 u8 reg_lo = get_mult_div_lo(inter->op->c);
782 u8 reg_hi = get_mult_div_hi(inter->op->c);
783 u64 res = (u64)rs * (u64)rt;
785 if (!(inter->op->flags & LIGHTREC_NO_HI))
786 reg_cache[reg_hi] = res >> 32;
787 if (!(inter->op->flags & LIGHTREC_NO_LO))
788 reg_cache[reg_lo] = res;
790 return jump_next(inter);
793 static u32 int_special_DIV(struct interpreter *inter)
795 u32 *reg_cache = inter->state->regs.gpr;
796 s32 rs = reg_cache[inter->op->r.rs];
797 s32 rt = reg_cache[inter->op->r.rt];
798 u8 reg_lo = get_mult_div_lo(inter->op->c);
799 u8 reg_hi = get_mult_div_hi(inter->op->c);
804 lo = (rs < 0) * 2 - 1;
810 if (!(inter->op->flags & LIGHTREC_NO_HI))
811 reg_cache[reg_hi] = hi;
812 if (!(inter->op->flags & LIGHTREC_NO_LO))
813 reg_cache[reg_lo] = lo;
815 return jump_next(inter);
818 static u32 int_special_DIVU(struct interpreter *inter)
820 u32 *reg_cache = inter->state->regs.gpr;
821 u32 rs = reg_cache[inter->op->r.rs];
822 u32 rt = reg_cache[inter->op->r.rt];
823 u8 reg_lo = get_mult_div_lo(inter->op->c);
824 u8 reg_hi = get_mult_div_hi(inter->op->c);
835 if (!(inter->op->flags & LIGHTREC_NO_HI))
836 reg_cache[reg_hi] = hi;
837 if (!(inter->op->flags & LIGHTREC_NO_LO))
838 reg_cache[reg_lo] = lo;
840 return jump_next(inter);
843 static u32 int_special_ADD(struct interpreter *inter)
845 u32 *reg_cache = inter->state->regs.gpr;
846 struct opcode_r *op = &inter->op->r;
847 s32 rs = reg_cache[op->rs];
848 s32 rt = reg_cache[op->rt];
851 reg_cache[op->rd] = rs + rt;
853 return jump_next(inter);
856 static u32 int_special_SUB(struct interpreter *inter)
858 u32 *reg_cache = inter->state->regs.gpr;
859 struct opcode_r *op = &inter->op->r;
860 u32 rs = reg_cache[op->rs];
861 u32 rt = reg_cache[op->rt];
864 reg_cache[op->rd] = rs - rt;
866 return jump_next(inter);
869 static u32 int_special_AND(struct interpreter *inter)
871 u32 *reg_cache = inter->state->regs.gpr;
872 struct opcode_r *op = &inter->op->r;
873 u32 rs = reg_cache[op->rs];
874 u32 rt = reg_cache[op->rt];
877 reg_cache[op->rd] = rs & rt;
879 return jump_next(inter);
882 static u32 int_special_OR(struct interpreter *inter)
884 u32 *reg_cache = inter->state->regs.gpr;
885 struct opcode_r *op = &inter->op->r;
886 u32 rs = reg_cache[op->rs];
887 u32 rt = reg_cache[op->rt];
890 reg_cache[op->rd] = rs | rt;
892 return jump_next(inter);
895 static u32 int_special_XOR(struct interpreter *inter)
897 u32 *reg_cache = inter->state->regs.gpr;
898 struct opcode_r *op = &inter->op->r;
899 u32 rs = reg_cache[op->rs];
900 u32 rt = reg_cache[op->rt];
903 reg_cache[op->rd] = rs ^ rt;
905 return jump_next(inter);
908 static u32 int_special_NOR(struct interpreter *inter)
910 u32 *reg_cache = inter->state->regs.gpr;
911 struct opcode_r *op = &inter->op->r;
912 u32 rs = reg_cache[op->rs];
913 u32 rt = reg_cache[op->rt];
916 reg_cache[op->rd] = ~(rs | rt);
918 return jump_next(inter);
921 static u32 int_special_SLT(struct interpreter *inter)
923 u32 *reg_cache = inter->state->regs.gpr;
924 struct opcode_r *op = &inter->op->r;
925 s32 rs = reg_cache[op->rs];
926 s32 rt = reg_cache[op->rt];
929 reg_cache[op->rd] = rs < rt;
931 return jump_next(inter);
934 static u32 int_special_SLTU(struct interpreter *inter)
936 u32 *reg_cache = inter->state->regs.gpr;
937 struct opcode_r *op = &inter->op->r;
938 u32 rs = reg_cache[op->rs];
939 u32 rt = reg_cache[op->rt];
942 reg_cache[op->rd] = rs < rt;
944 return jump_next(inter);
947 static u32 int_META_MOV(struct interpreter *inter)
949 u32 *reg_cache = inter->state->regs.gpr;
950 struct opcode_r *op = &inter->op->r;
953 reg_cache[op->rd] = reg_cache[op->rs];
955 return jump_next(inter);
958 static u32 int_META_EXTC(struct interpreter *inter)
960 u32 *reg_cache = inter->state->regs.gpr;
961 struct opcode_i *op = &inter->op->i;
964 reg_cache[op->rt] = (u32)(s32)(s8)reg_cache[op->rs];
966 return jump_next(inter);
969 static u32 int_META_EXTS(struct interpreter *inter)
971 u32 *reg_cache = inter->state->regs.gpr;
972 struct opcode_i *op = &inter->op->i;
975 reg_cache[op->rt] = (u32)(s32)(s16)reg_cache[op->rs];
977 return jump_next(inter);
980 static const lightrec_int_func_t int_standard[64] = {
981 SET_DEFAULT_ELM(int_standard, int_unimplemented),
982 [OP_SPECIAL] = int_SPECIAL,
983 [OP_REGIMM] = int_REGIMM,
988 [OP_BLEZ] = int_BLEZ,
989 [OP_BGTZ] = int_BGTZ,
990 [OP_ADDI] = int_ADDI,
991 [OP_ADDIU] = int_ADDI,
992 [OP_SLTI] = int_SLTI,
993 [OP_SLTIU] = int_SLTIU,
994 [OP_ANDI] = int_ANDI,
996 [OP_XORI] = int_XORI,
1002 [OP_LWL] = int_load,
1004 [OP_LBU] = int_load,
1005 [OP_LHU] = int_load,
1006 [OP_LWR] = int_load,
1007 [OP_SB] = int_store,
1008 [OP_SH] = int_store,
1009 [OP_SWL] = int_store,
1010 [OP_SW] = int_store,
1011 [OP_SWR] = int_store,
1012 [OP_LWC2] = int_LWC2,
1013 [OP_SWC2] = int_store,
1015 [OP_META_MOV] = int_META_MOV,
1016 [OP_META_EXTC] = int_META_EXTC,
1017 [OP_META_EXTS] = int_META_EXTS,
1020 static const lightrec_int_func_t int_special[64] = {
1021 SET_DEFAULT_ELM(int_special, int_unimplemented),
1022 [OP_SPECIAL_SLL] = int_special_SLL,
1023 [OP_SPECIAL_SRL] = int_special_SRL,
1024 [OP_SPECIAL_SRA] = int_special_SRA,
1025 [OP_SPECIAL_SLLV] = int_special_SLLV,
1026 [OP_SPECIAL_SRLV] = int_special_SRLV,
1027 [OP_SPECIAL_SRAV] = int_special_SRAV,
1028 [OP_SPECIAL_JR] = int_special_JR,
1029 [OP_SPECIAL_JALR] = int_special_JALR,
1030 [OP_SPECIAL_SYSCALL] = int_syscall_break,
1031 [OP_SPECIAL_BREAK] = int_syscall_break,
1032 [OP_SPECIAL_MFHI] = int_special_MFHI,
1033 [OP_SPECIAL_MTHI] = int_special_MTHI,
1034 [OP_SPECIAL_MFLO] = int_special_MFLO,
1035 [OP_SPECIAL_MTLO] = int_special_MTLO,
1036 [OP_SPECIAL_MULT] = int_special_MULT,
1037 [OP_SPECIAL_MULTU] = int_special_MULTU,
1038 [OP_SPECIAL_DIV] = int_special_DIV,
1039 [OP_SPECIAL_DIVU] = int_special_DIVU,
1040 [OP_SPECIAL_ADD] = int_special_ADD,
1041 [OP_SPECIAL_ADDU] = int_special_ADD,
1042 [OP_SPECIAL_SUB] = int_special_SUB,
1043 [OP_SPECIAL_SUBU] = int_special_SUB,
1044 [OP_SPECIAL_AND] = int_special_AND,
1045 [OP_SPECIAL_OR] = int_special_OR,
1046 [OP_SPECIAL_XOR] = int_special_XOR,
1047 [OP_SPECIAL_NOR] = int_special_NOR,
1048 [OP_SPECIAL_SLT] = int_special_SLT,
1049 [OP_SPECIAL_SLTU] = int_special_SLTU,
1052 static const lightrec_int_func_t int_regimm[64] = {
1053 SET_DEFAULT_ELM(int_regimm, int_unimplemented),
1054 [OP_REGIMM_BLTZ] = int_regimm_BLTZ,
1055 [OP_REGIMM_BGEZ] = int_regimm_BGEZ,
1056 [OP_REGIMM_BLTZAL] = int_regimm_BLTZAL,
1057 [OP_REGIMM_BGEZAL] = int_regimm_BGEZAL,
1060 static const lightrec_int_func_t int_cp0[64] = {
1061 SET_DEFAULT_ELM(int_cp0, int_CP),
1062 [OP_CP0_MFC0] = int_cfc,
1063 [OP_CP0_CFC0] = int_cfc,
1064 [OP_CP0_MTC0] = int_ctc,
1065 [OP_CP0_CTC0] = int_ctc,
1066 [OP_CP0_RFE] = int_cp0_RFE,
1069 static const lightrec_int_func_t int_cp2_basic[64] = {
1070 SET_DEFAULT_ELM(int_cp2_basic, int_CP),
1071 [OP_CP2_BASIC_MFC2] = int_cfc,
1072 [OP_CP2_BASIC_CFC2] = int_cfc,
1073 [OP_CP2_BASIC_MTC2] = int_ctc,
1074 [OP_CP2_BASIC_CTC2] = int_ctc,
1077 static u32 int_SPECIAL(struct interpreter *inter)
1079 lightrec_int_func_t f = int_special[inter->op->r.op];
1081 if (!HAS_DEFAULT_ELM && unlikely(!f))
1082 return int_unimplemented(inter);
1084 return execute(f, inter);
1087 static u32 int_REGIMM(struct interpreter *inter)
1089 lightrec_int_func_t f = int_regimm[inter->op->r.rt];
1091 if (!HAS_DEFAULT_ELM && unlikely(!f))
1092 return int_unimplemented(inter);
1094 return execute(f, inter);
1097 static u32 int_CP0(struct interpreter *inter)
1099 lightrec_int_func_t f = int_cp0[inter->op->r.rs];
1101 if (!HAS_DEFAULT_ELM && unlikely(!f))
1102 return int_CP(inter);
1104 return execute(f, inter);
1107 static u32 int_CP2(struct interpreter *inter)
1109 if (inter->op->r.op == OP_CP2_BASIC) {
1110 lightrec_int_func_t f = int_cp2_basic[inter->op->r.rs];
1111 if (HAS_DEFAULT_ELM || likely(f))
1112 return execute(f, inter);
1115 return int_CP(inter);
1118 static u32 lightrec_emulate_block_list(struct lightrec_state *state,
1119 struct block *block, u32 offset)
1121 struct interpreter inter;
1124 inter.block = block;
1125 inter.state = state;
1126 inter.offset = offset;
1127 inter.op = &block->opcode_list[offset];
1129 inter.delay_slot = false;
1131 pc = lightrec_int_op(&inter);
1133 /* Add the cycles of the last branch */
1134 inter.cycles += lightrec_cycles_of_opcode(inter.op->c);
1136 state->current_cycle += inter.cycles;
1141 u32 lightrec_emulate_block(struct lightrec_state *state, struct block *block, u32 pc)
1143 u32 offset = (kunseg(pc) - kunseg(block->pc)) >> 2;
1145 if (offset < block->nb_ops)
1146 return lightrec_emulate_block_list(state, block, offset);
1148 pr_err("PC 0x%x is outside block at PC 0x%x\n", pc, block->pc);