1 /***************************************************************************
2 * Copyright (C) 2007 Ryan Schultz, PCSX-df Team, PCSX team *
4 * This program is free software; you can redistribute it and/or modify *
5 * it under the terms of the GNU General Public License as published by *
6 * the Free Software Foundation; either version 2 of the License, or *
7 * (at your option) any later version. *
9 * This program is distributed in the hope that it will be useful, *
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
12 * GNU General Public License for more details. *
14 * You should have received a copy of the GNU General Public License *
15 * along with this program; if not, write to the *
16 * Free Software Foundation, Inc., *
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02111-1307 USA. *
18 ***************************************************************************/
21 * i386 assembly functions for R3000A core.
28 #define MAP_ANONYMOUS MAP_ANON
37 #define PC_REC(x) (psxRecLUT[x >> 16] + (x & 0xffff))
38 #define PC_REC8(x) (*(u8 *)PC_REC(x))
39 #define PC_REC16(x) (*(u16*)PC_REC(x))
40 #define PC_REC32(x) (*(u32*)PC_REC(x))
42 #define RECMEM_SIZE (8 * 1024 * 1024)
44 static char *recMem; /* the recompiled blocks will be here */
45 static char *recRAM; /* and the ptr to the blocks here */
46 static char *recROM; /* and here */
48 static u32 pc; /* recompiler pc */
49 static u32 pcold; /* recompiler oldpc */
50 static int count; /* recompiler intruction count */
51 static int branch; /* set for branch */
52 static u32 target; /* branch target */
61 static iRegisters iRegs[32];
62 static iRegisters iRegsS[32];
68 #define IsConst(reg) (iRegs[reg].state == ST_CONST)
69 #define IsMapped(reg) (iRegs[reg].state == ST_MAPPED)
71 static void (*recBSC[64])();
72 static void (*recSPC[64])();
73 static void (*recREG[32])();
74 static void (*recCP0[32])();
75 static void (*recCP2[64])();
76 static void (*recCP2BSC[32])();
78 static void MapConst(int reg, u32 _const) {
79 iRegs[reg].k = _const;
80 iRegs[reg].state = ST_CONST;
83 static void iFlushReg(int reg) {
85 MOV32ItoM((u32)&psxRegs.GPR.r[reg], iRegs[reg].k);
87 iRegs[reg].state = ST_UNK;
90 static void iFlushRegs() {
93 for (i=1; i<32; i++) {
100 count = ((pc - pcold) / 4) * BIAS;
101 ADD32ItoM((u32)&psxRegs.cycle, count);
102 if (resp) ADD32ItoR(ESP, resp);
106 static int iLoadTest() {
109 // check for load delay
110 tmp = psxRegs.code >> 26;
133 if (tmp >= 0x20 && tmp <= 0x26) { // LB/LH/LWL/LW/LBU/LHU/LWR
141 /* set a pending branch */
142 static void SetBranch() {
144 psxRegs.code = PSXMu32(pc);
147 if (iLoadTest() == 1) {
149 MOV32ItoM((u32)&psxRegs.code, psxRegs.code);
151 count = ((pc - pcold) / 4) * BIAS;
152 ADD32ItoM((u32)&psxRegs.cycle, count);
153 if (resp) ADD32ItoR(ESP, resp);
155 PUSH32M((u32)&target);
157 CALLFunc((u32)psxDelayTest);
164 recBSC[psxRegs.code>>26]();
167 MOV32MtoR(EAX, (u32)&target);
168 MOV32RtoM((u32)&psxRegs.pc, EAX);
169 CALLFunc((u32)psxBranchTest);
174 static void iJump(u32 branchPC) {
176 psxRegs.code = PSXMu32(pc);
179 if (iLoadTest() == 1) {
181 MOV32ItoM((u32)&psxRegs.code, psxRegs.code);
183 count = ((pc - pcold) / 4) * BIAS;
184 ADD32ItoM((u32)&psxRegs.cycle, count);
185 if (resp) ADD32ItoR(ESP, resp);
189 CALLFunc((u32)psxDelayTest);
196 recBSC[psxRegs.code>>26]();
199 MOV32ItoM((u32)&psxRegs.pc, branchPC);
200 CALLFunc((u32)psxBranchTest);
202 count = ((pc - pcold) / 4) * BIAS;
203 ADD32ItoM((u32)&psxRegs.cycle, count);
204 if (resp) ADD32ItoR(ESP, resp);
206 // maybe just happened an interruption, check so
207 CMP32ItoM((u32)&psxRegs.pc, branchPC);
212 MOV32MtoR(EAX, PC_REC(branchPC));
213 TEST32RtoR(EAX, EAX);
222 static void iBranch(u32 branchPC, int savectx) {
227 memcpy(iRegsS, iRegs, sizeof(iRegs));
231 psxRegs.code = PSXMu32(pc);
233 // the delay test is only made when the branch is taken
234 // savectx == 0 will mean that :)
235 if (savectx == 0 && iLoadTest() == 1) {
237 MOV32ItoM((u32)&psxRegs.code, psxRegs.code);
239 count = (((pc+4) - pcold) / 4) * BIAS;
240 ADD32ItoM((u32)&psxRegs.cycle, count);
241 if (resp) ADD32ItoR(ESP, resp);
245 CALLFunc((u32)psxDelayTest);
253 recBSC[psxRegs.code>>26]();
256 MOV32ItoM((u32)&psxRegs.pc, branchPC);
257 CALLFunc((u32)psxBranchTest);
259 count = ((pc - pcold) / 4) * BIAS;
260 ADD32ItoM((u32)&psxRegs.cycle, count);
261 if (resp) ADD32ItoR(ESP, resp);
263 // maybe just happened an interruption, check so
264 CMP32ItoM((u32)&psxRegs.pc, branchPC);
269 MOV32MtoR(EAX, PC_REC(branchPC));
270 TEST32RtoR(EAX, EAX);
280 memcpy(iRegs, iRegsS, sizeof(iRegs));
285 char *txt0 = "EAX = %x : ECX = %x : EDX = %x\n";
286 char *txt1 = "EAX = %x\n";
287 char *txt2 = "M32 = %x\n";
295 PUSH32M ((u32)&txt0);
296 CALLFunc ((u32)SysPrintf);
304 PUSH32M ((u32)&txt1);
305 CALLFunc ((u32)SysPrintf);
309 void iLogM32(u32 mem) {
311 PUSH32M ((u32)&txt2);
312 CALLFunc ((u32)SysPrintf);
316 static void iDumpRegs() {
319 printf("%x %x\n", psxRegs.pc, psxRegs.cycle);
320 for (i = 0; i < 4; i++) {
321 for (j = 0; j < 8; j++)
322 printf("%x ", psxRegs.GPR.r[j * i]);
327 void iDumpBlock(char *ptr) {
331 SysPrintf("dump1 %x:%x, %x\n", psxRegs.pc, pc, psxRegs.cycle);
333 for (i = psxRegs.pc; i < pc; i += 4)
334 SysPrintf("%s\n", disR3000AF(PSXMu32(i), i));
337 f = fopen("dump1", "w");
338 fwrite(ptr, 1, (u32)x86Ptr - (u32)ptr, f);
340 system("ndisasmw -u dump1");
344 #define REC_FUNC(f) \
346 static void rec##f() { \
348 MOV32ItoM((u32)&psxRegs.code, (u32)psxRegs.code); \
349 MOV32ItoM((u32)&psxRegs.pc, (u32)pc); \
350 CALLFunc((u32)psx##f); \
356 static void rec##f() { \
358 MOV32ItoM((u32)&psxRegs.code, (u32)psxRegs.code); \
359 MOV32ItoM((u32)&psxRegs.pc, (u32)pc); \
360 CALLFunc((u32)psx##f); \
365 #define REC_BRANCH(f) \
367 static void rec##f() { \
369 MOV32ItoM((u32)&psxRegs.code, (u32)psxRegs.code); \
370 MOV32ItoM((u32)&psxRegs.pc, (u32)pc); \
371 CALLFunc((u32)psx##f); \
376 static void recRecompile();
378 static int recInit() {
381 psxRecLUT = (u32 *)malloc(0x010000 * 4);
383 recMem = mmap(0, RECMEM_SIZE + 0x1000,
384 PROT_EXEC | PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
386 recRAM = (char *)malloc(0x200000);
387 recROM = (char *)malloc(0x080000);
388 if (recRAM == NULL || recROM == NULL || recMem == NULL || psxRecLUT == NULL) {
389 SysMessage("Error allocating memory"); return -1;
392 for (i = 0; i < 0x80; i++) psxRecLUT[i + 0x0000] = (u32)&recRAM[(i & 0x1f) << 16];
393 memcpy(psxRecLUT + 0x8000, psxRecLUT, 0x80 * 4);
394 memcpy(psxRecLUT + 0xa000, psxRecLUT, 0x80 * 4);
396 for (i = 0; i < 0x08; i++) psxRecLUT[i + 0xbfc0] = (u32)&recROM[i << 16];
401 static void recReset() {
402 memset(recRAM, 0, 0x200000);
403 memset(recROM, 0, 0x080000);
410 memset(iRegs, 0, sizeof(iRegs));
411 iRegs[0].state = ST_CONST;
415 static void recShutdown() {
416 if (recMem == NULL) return;
418 munmap(recMem, RECMEM_SIZE + 0x1000);
424 static void recError() {
427 SysMessage("Unrecoverable error while running recompiler\n");
431 __inline static void execute() {
432 void (**recFunc)() = NULL;
435 p = (char *)PC_REC(psxRegs.pc);
436 if (p != NULL) recFunc = (void (**)()) (u32)p;
437 else { recError(); return; }
445 static void recExecute() {
449 static void recExecuteBlock() {
453 static void recClear(u32 Addr, u32 Size) {
454 memset((void*)PC_REC(Addr), 0, Size * 4);
457 static void recNULL() {
458 // SysMessage("recUNK: %8.8x\n", psxRegs.code);
461 /*********************************************************
462 * goes to opcodes tables... *
463 * Format: table[something....] *
464 *********************************************************/
467 static void recSPECIAL() {
471 static void recREGIMM() {
475 static void recCOP0() {
480 static void recCOP2() {
484 static void recBASIC() {
488 //end of Tables opcodes...
490 /*********************************************************
491 * Arithmetic with immediate operand *
492 * Format: OP rt, rs, immediate *
493 *********************************************************/
503 static void recADDIU() {
511 iRegs[_Rt_].k+= _Imm_;
514 INC32M((u32)&psxRegs.GPR.r[_Rt_]);
515 } else if (_Imm_ == -1) {
516 DEC32M((u32)&psxRegs.GPR.r[_Rt_]);
518 ADD32ItoM((u32)&psxRegs.GPR.r[_Rt_], _Imm_);
523 MapConst(_Rt_, iRegs[_Rs_].k + _Imm_);
525 iRegs[_Rt_].state = ST_UNK;
527 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
530 } else if (_Imm_ == -1) {
533 ADD32ItoR(EAX, _Imm_);
535 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
540 static void recADDI() {
545 static void recSLTI() {
546 // Rt = Rs < Im (signed)
552 MapConst(_Rt_, (s32)iRegs[_Rs_].k < _Imm_);
554 iRegs[_Rt_].state = ST_UNK;
556 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
557 CMP32ItoR(EAX, _Imm_);
559 AND32ItoR(EAX, 0xff);
560 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
564 static void recSLTIU() {
565 // Rt = Rs < Im (unsigned)
571 MapConst(_Rt_, iRegs[_Rs_].k < _ImmU_);
573 iRegs[_Rt_].state = ST_UNK;
575 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
576 CMP32ItoR(EAX, _Imm_);
578 AND32ItoR(EAX, 0xff);
579 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
583 static void recANDI() {
591 iRegs[_Rt_].k&= _ImmU_;
593 AND32ItoM((u32)&psxRegs.GPR.r[_Rt_], _ImmU_);
597 MapConst(_Rt_, iRegs[_Rs_].k & _ImmU_);
599 iRegs[_Rt_].state = ST_UNK;
601 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
602 AND32ItoR(EAX, _ImmU_);
603 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
608 static void recORI() {
616 iRegs[_Rt_].k|= _ImmU_;
618 OR32ItoM((u32)&psxRegs.GPR.r[_Rt_], _ImmU_);
622 MapConst(_Rt_, iRegs[_Rs_].k | _ImmU_);
624 iRegs[_Rt_].state = ST_UNK;
626 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
627 if (_ImmU_) OR32ItoR (EAX, _ImmU_);
628 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
633 static void recXORI() {
641 iRegs[_Rt_].k^= _ImmU_;
643 XOR32ItoM((u32)&psxRegs.GPR.r[_Rt_], _ImmU_);
647 MapConst(_Rt_, iRegs[_Rs_].k ^ _ImmU_);
649 iRegs[_Rt_].state = ST_UNK;
651 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
652 XOR32ItoR(EAX, _ImmU_);
653 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
658 //end of * Arithmetic with immediate operand
660 /*********************************************************
661 * Load higher 16 bits of the first word in GPR with imm *
662 * Format: OP rt, immediate *
663 *********************************************************/
666 static void recLUI() {
670 MapConst(_Rt_, psxRegs.code << 16);
673 //End of Load Higher .....
676 /*********************************************************
677 * Register arithmetic *
678 * Format: OP rd, rs, rt *
679 *********************************************************/
693 static void recADDU() {
699 if (IsConst(_Rs_) && IsConst(_Rt_)) {
700 MapConst(_Rd_, iRegs[_Rs_].k + iRegs[_Rt_].k);
701 } else if (IsConst(_Rs_)) {
702 iRegs[_Rd_].state = ST_UNK;
705 if (iRegs[_Rs_].k == 1) {
706 INC32M((u32)&psxRegs.GPR.r[_Rd_]);
707 } else if (iRegs[_Rs_].k == -1) {
708 DEC32M((u32)&psxRegs.GPR.r[_Rd_]);
709 } else if (iRegs[_Rs_].k) {
710 ADD32ItoM((u32)&psxRegs.GPR.r[_Rd_], iRegs[_Rs_].k);
713 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
714 if (iRegs[_Rs_].k == 1) {
716 } else if (iRegs[_Rs_].k == 0xffffffff) {
718 } else if (iRegs[_Rs_].k) {
719 ADD32ItoR(EAX, iRegs[_Rs_].k);
721 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
723 } else if (IsConst(_Rt_)) {
724 iRegs[_Rd_].state = ST_UNK;
727 if (iRegs[_Rt_].k == 1) {
728 INC32M((u32)&psxRegs.GPR.r[_Rd_]);
729 } else if (iRegs[_Rt_].k == -1) {
730 DEC32M((u32)&psxRegs.GPR.r[_Rd_]);
731 } else if (iRegs[_Rt_].k) {
732 ADD32ItoM((u32)&psxRegs.GPR.r[_Rd_], iRegs[_Rt_].k);
735 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
736 if (iRegs[_Rt_].k == 1) {
738 } else if (iRegs[_Rt_].k == 0xffffffff) {
740 } else if (iRegs[_Rt_].k) {
741 ADD32ItoR(EAX, iRegs[_Rt_].k);
743 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
746 iRegs[_Rd_].state = ST_UNK;
748 if (_Rs_ == _Rd_) { // Rd+= Rt
749 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
750 ADD32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
751 } else if (_Rt_ == _Rd_) { // Rd+= Rs
752 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
753 ADD32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
754 } else { // Rd = Rs + Rt
755 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
756 ADD32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
757 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
762 static void recADD() {
767 static void recSUBU() {
773 if (IsConst(_Rs_) && IsConst(_Rt_)) {
774 MapConst(_Rd_, iRegs[_Rs_].k - iRegs[_Rt_].k);
775 } else if (IsConst(_Rs_)) {
776 iRegs[_Rd_].state = ST_UNK;
778 MOV32ItoR(EAX, iRegs[_Rs_].k);
779 SUB32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
780 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
781 } else if (IsConst(_Rt_)) {
782 iRegs[_Rd_].state = ST_UNK;
784 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
785 SUB32ItoR(EAX, iRegs[_Rt_].k);
786 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
788 iRegs[_Rd_].state = ST_UNK;
790 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
791 SUB32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
792 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
796 static void recSUB() {
801 static void recAND() {
807 if (IsConst(_Rs_) && IsConst(_Rt_)) {
808 MapConst(_Rd_, iRegs[_Rs_].k & iRegs[_Rt_].k);
809 } else if (IsConst(_Rs_)) {
810 iRegs[_Rd_].state = ST_UNK;
812 if (_Rd_ == _Rt_) { // Rd&= Rs
813 AND32ItoM((u32)&psxRegs.GPR.r[_Rd_], iRegs[_Rs_].k);
815 MOV32ItoR(EAX, iRegs[_Rs_].k);
816 AND32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
817 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
819 } else if (IsConst(_Rt_)) {
820 iRegs[_Rd_].state = ST_UNK;
822 if (_Rd_ == _Rs_) { // Rd&= kRt
823 AND32ItoM((u32)&psxRegs.GPR.r[_Rd_], iRegs[_Rt_].k);
824 } else { // Rd = Rs & kRt
825 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
826 AND32ItoR(EAX, iRegs[_Rt_].k);
827 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
830 iRegs[_Rd_].state = ST_UNK;
832 if (_Rs_ == _Rd_) { // Rd&= Rt
833 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
834 AND32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
835 } else if (_Rt_ == _Rd_) { // Rd&= Rs
836 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
837 AND32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
838 } else { // Rd = Rs & Rt
839 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
840 AND32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
841 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
846 static void recOR() {
852 if (IsConst(_Rs_) && IsConst(_Rt_)) {
853 MapConst(_Rd_, iRegs[_Rs_].k | iRegs[_Rt_].k);
854 } else if (IsConst(_Rs_)) {
855 iRegs[_Rd_].state = ST_UNK;
857 MOV32ItoR(EAX, iRegs[_Rs_].k);
858 OR32MtoR (EAX, (u32)&psxRegs.GPR.r[_Rt_]);
859 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
860 } else if (IsConst(_Rt_)) {
861 iRegs[_Rd_].state = ST_UNK;
863 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
864 OR32ItoR (EAX, iRegs[_Rt_].k);
865 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
867 iRegs[_Rd_].state = ST_UNK;
869 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
870 OR32MtoR (EAX, (u32)&psxRegs.GPR.r[_Rt_]);
871 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
875 static void recXOR() {
881 if (IsConst(_Rs_) && IsConst(_Rt_)) {
882 MapConst(_Rd_, iRegs[_Rs_].k ^ iRegs[_Rt_].k);
883 } else if (IsConst(_Rs_)) {
884 iRegs[_Rd_].state = ST_UNK;
886 MOV32ItoR(EAX, iRegs[_Rs_].k);
887 XOR32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
888 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
889 } else if (IsConst(_Rt_)) {
890 iRegs[_Rd_].state = ST_UNK;
892 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
893 XOR32ItoR(EAX, iRegs[_Rt_].k);
894 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
896 iRegs[_Rd_].state = ST_UNK;
898 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
899 XOR32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
900 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
904 static void recNOR() {
910 if (IsConst(_Rs_) && IsConst(_Rt_)) {
911 MapConst(_Rd_, ~(iRegs[_Rs_].k | iRegs[_Rt_].k));
912 } else if (IsConst(_Rs_)) {
913 iRegs[_Rd_].state = ST_UNK;
915 MOV32ItoR(EAX, iRegs[_Rs_].k);
916 OR32MtoR (EAX, (u32)&psxRegs.GPR.r[_Rt_]);
918 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
919 } else if (IsConst(_Rt_)) {
920 iRegs[_Rd_].state = ST_UNK;
922 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
923 OR32ItoR (EAX, iRegs[_Rt_].k);
925 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
927 iRegs[_Rd_].state = ST_UNK;
929 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
930 OR32MtoR (EAX, (u32)&psxRegs.GPR.r[_Rt_]);
932 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
936 static void recSLT() {
937 // Rd = Rs < Rt (signed)
942 if (IsConst(_Rs_) && IsConst(_Rt_)) {
943 MapConst(_Rd_, (s32)iRegs[_Rs_].k < (s32)iRegs[_Rt_].k);
944 } else if (IsConst(_Rs_)) {
945 iRegs[_Rd_].state = ST_UNK;
947 MOV32ItoR(EAX, iRegs[_Rs_].k);
948 CMP32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
950 AND32ItoR(EAX, 0xff);
951 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
952 } else if (IsConst(_Rt_)) {
953 iRegs[_Rd_].state = ST_UNK;
955 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
956 CMP32ItoR(EAX, iRegs[_Rt_].k);
958 AND32ItoR(EAX, 0xff);
959 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
961 iRegs[_Rd_].state = ST_UNK;
963 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
964 CMP32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
966 AND32ItoR(EAX, 0xff);
967 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
971 static void recSLTU() {
972 // Rd = Rs < Rt (unsigned)
977 if (IsConst(_Rs_) && IsConst(_Rt_)) {
978 MapConst(_Rd_, iRegs[_Rs_].k < iRegs[_Rt_].k);
979 } else if (IsConst(_Rs_)) {
980 iRegs[_Rd_].state = ST_UNK;
982 MOV32ItoR(EAX, iRegs[_Rs_].k);
983 CMP32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
986 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
987 } else if (IsConst(_Rt_)) {
988 iRegs[_Rd_].state = ST_UNK;
990 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
991 CMP32ItoR(EAX, iRegs[_Rt_].k);
994 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
996 iRegs[_Rd_].state = ST_UNK;
998 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
999 CMP32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1000 SBB32RtoR(EAX, EAX);
1002 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
1006 //End of * Register arithmetic
1008 /*********************************************************
1009 * Register mult/div & Register trap logic *
1010 * Format: OP rs, rt *
1011 *********************************************************/
1018 static void recMULT() {
1019 // Lo/Hi = Rs * Rt (signed)
1023 if ((IsConst(_Rs_) && iRegs[_Rs_].k == 0) ||
1024 (IsConst(_Rt_) && iRegs[_Rt_].k == 0)) {
1025 XOR32RtoR(EAX, EAX);
1026 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
1027 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EAX);
1031 if (IsConst(_Rs_)) {
1032 MOV32ItoR(EAX, iRegs[_Rs_].k);// printf("multrsk %x\n", iRegs[_Rs_].k);
1034 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1036 if (IsConst(_Rt_)) {
1037 MOV32ItoR(EDX, iRegs[_Rt_].k);// printf("multrtk %x\n", iRegs[_Rt_].k);
1040 IMUL32M ((u32)&psxRegs.GPR.r[_Rt_]);
1042 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
1043 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EDX);
1046 static void recMULTU() {
1047 // Lo/Hi = Rs * Rt (unsigned)
1051 if ((IsConst(_Rs_) && iRegs[_Rs_].k == 0) ||
1052 (IsConst(_Rt_) && iRegs[_Rt_].k == 0)) {
1053 XOR32RtoR(EAX, EAX);
1054 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
1055 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EAX);
1059 if (IsConst(_Rs_)) {
1060 MOV32ItoR(EAX, iRegs[_Rs_].k);// printf("multursk %x\n", iRegs[_Rs_].k);
1062 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1064 if (IsConst(_Rt_)) {
1065 MOV32ItoR(EDX, iRegs[_Rt_].k);// printf("multurtk %x\n", iRegs[_Rt_].k);
1068 MUL32M ((u32)&psxRegs.GPR.r[_Rt_]);
1070 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
1071 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EDX);
1074 static void recDIV() {
1075 // Lo/Hi = Rs / Rt (signed)
1079 if (IsConst(_Rt_)) {
1080 if (iRegs[_Rt_].k == 0) return;
1081 MOV32ItoR(ECX, iRegs[_Rt_].k);// printf("divrtk %x\n", iRegs[_Rt_].k);
1083 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rt_]);
1087 if (IsConst(_Rs_)) {
1088 MOV32ItoR(EAX, iRegs[_Rs_].k);// printf("divrsk %x\n", iRegs[_Rs_].k);
1090 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1094 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
1095 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EDX);
1096 if (!IsConst(_Rt_)) {
1101 static void recDIVU() {
1102 // Lo/Hi = Rs / Rt (unsigned)
1106 if (IsConst(_Rt_)) {
1107 if (iRegs[_Rt_].k == 0) return;
1108 MOV32ItoR(ECX, iRegs[_Rt_].k);// printf("divurtk %x\n", iRegs[_Rt_].k);
1110 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rt_]);
1114 if (IsConst(_Rs_)) {
1115 MOV32ItoR(EAX, iRegs[_Rs_].k);// printf("divursk %x\n", iRegs[_Rs_].k);
1117 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1119 XOR32RtoR(EDX, EDX);
1121 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
1122 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EDX);
1123 if (!IsConst(_Rt_)) {
1128 //End of * Register mult/div & Register trap logic
1145 /* Push OfB for Stores/Loads */
1146 static void iPushOfB() {
1147 if (IsConst(_Rs_)) {
1148 PUSH32I (iRegs[_Rs_].k + _Imm_);
1151 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1152 ADD32ItoR(EAX, _Imm_);
1155 PUSH32M ((u32)&psxRegs.GPR.r[_Rs_]);
1161 static void recLB() {
1162 // Rt = mem[Rs + Im] (signed)
1166 if (IsConst(_Rs_)) {
1167 u32 addr = iRegs[_Rs_].k + _Imm_;
1170 if ((t & 0xfff0) == 0xbfc0) {
1172 // since bios is readonly it won't change
1173 MapConst(_Rt_, psxRs8(addr));
1176 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1178 iRegs[_Rt_].state = ST_UNK;
1180 MOVSX32M8toR(EAX, (u32)&psxM[addr & 0x1fffff]);
1181 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1184 if (t == 0x1f80 && addr < 0x1f801000) {
1186 iRegs[_Rt_].state = ST_UNK;
1188 MOVSX32M8toR(EAX, (u32)&psxH[addr & 0xfff]);
1189 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1192 // SysPrintf("unhandled r8 %x\n", addr);
1196 CALLFunc((u32)psxMemRead8);
1198 iRegs[_Rt_].state = ST_UNK;
1199 MOVSX32R8toR(EAX, EAX);
1200 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1202 // ADD32ItoR(ESP, 4);
1206 static void recLBU() {
1207 // Rt = mem[Rs + Im] (unsigned)
1211 if (IsConst(_Rs_)) {
1212 u32 addr = iRegs[_Rs_].k + _Imm_;
1215 if ((t & 0xfff0) == 0xbfc0) {
1217 // since bios is readonly it won't change
1218 MapConst(_Rt_, psxRu8(addr));
1221 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1223 iRegs[_Rt_].state = ST_UNK;
1225 MOVZX32M8toR(EAX, (u32)&psxM[addr & 0x1fffff]);
1226 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1229 if (t == 0x1f80 && addr < 0x1f801000) {
1231 iRegs[_Rt_].state = ST_UNK;
1233 MOVZX32M8toR(EAX, (u32)&psxH[addr & 0xfff]);
1234 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1237 // SysPrintf("unhandled r8u %x\n", addr);
1241 CALLFunc((u32)psxMemRead8);
1243 iRegs[_Rt_].state = ST_UNK;
1244 MOVZX32R8toR(EAX, EAX);
1245 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1247 // ADD32ItoR(ESP, 4);
1251 static void recLH() {
1252 // Rt = mem[Rs + Im] (signed)
1256 if (IsConst(_Rs_)) {
1257 u32 addr = iRegs[_Rs_].k + _Imm_;
1260 if ((t & 0xfff0) == 0xbfc0) {
1262 // since bios is readonly it won't change
1263 MapConst(_Rt_, psxRs16(addr));
1266 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1268 iRegs[_Rt_].state = ST_UNK;
1270 MOVSX32M16toR(EAX, (u32)&psxM[addr & 0x1fffff]);
1271 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1274 if (t == 0x1f80 && addr < 0x1f801000) {
1276 iRegs[_Rt_].state = ST_UNK;
1278 MOVSX32M16toR(EAX, (u32)&psxH[addr & 0xfff]);
1279 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1282 // SysPrintf("unhandled r16 %x\n", addr);
1286 CALLFunc((u32)psxMemRead16);
1288 iRegs[_Rt_].state = ST_UNK;
1289 MOVSX32R16toR(EAX, EAX);
1290 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1292 // ADD32ItoR(ESP, 4);
1296 static void recLHU() {
1297 // Rt = mem[Rs + Im] (unsigned)
1301 if (IsConst(_Rs_)) {
1302 u32 addr = iRegs[_Rs_].k + _Imm_;
1305 if ((t & 0xfff0) == 0xbfc0) {
1307 // since bios is readonly it won't change
1308 MapConst(_Rt_, psxRu16(addr));
1311 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1313 iRegs[_Rt_].state = ST_UNK;
1315 MOVZX32M16toR(EAX, (u32)&psxM[addr & 0x1fffff]);
1316 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1319 if (t == 0x1f80 && addr < 0x1f801000) {
1321 iRegs[_Rt_].state = ST_UNK;
1323 MOVZX32M16toR(EAX, (u32)&psxH[addr & 0xfff]);
1324 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1328 if (addr >= 0x1f801c00 && addr < 0x1f801e00) {
1330 iRegs[_Rt_].state = ST_UNK;
1333 CALL32M ((u32)&SPU_readRegister);
1334 MOVZX32R16toR(EAX, EAX);
1335 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1342 case 0x1f801100: case 0x1f801110: case 0x1f801120:
1344 iRegs[_Rt_].state = ST_UNK;
1346 PUSH32I((addr >> 4) & 0x3);
1347 CALLFunc((u32)psxRcntRcount);
1348 MOVZX32R16toR(EAX, EAX);
1349 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1353 case 0x1f801104: case 0x1f801114: case 0x1f801124:
1355 iRegs[_Rt_].state = ST_UNK;
1357 PUSH32I((addr >> 4) & 0x3);
1358 CALLFunc((u32)psxRcntRmode);
1359 MOVZX32R16toR(EAX, EAX);
1360 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1364 case 0x1f801108: case 0x1f801118: case 0x1f801128:
1366 iRegs[_Rt_].state = ST_UNK;
1368 PUSH32I((addr >> 4) & 0x3);
1369 CALLFunc((u32)psxRcntRtarget);
1370 MOVZX32R16toR(EAX, EAX);
1371 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1376 // SysPrintf("unhandled r16u %x\n", addr);
1380 CALLFunc((u32)psxMemRead16);
1382 iRegs[_Rt_].state = ST_UNK;
1383 MOVZX32R16toR(EAX, EAX);
1384 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1386 // ADD32ItoR(ESP, 4);
1390 static void recLW() {
1391 // Rt = mem[Rs + Im] (unsigned)
1395 if (IsConst(_Rs_)) {
1396 u32 addr = iRegs[_Rs_].k + _Imm_;
1399 if ((t & 0xfff0) == 0xbfc0) {
1401 // since bios is readonly it won't change
1402 MapConst(_Rt_, psxRu32(addr));
1405 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1407 iRegs[_Rt_].state = ST_UNK;
1409 MOV32MtoR(EAX, (u32)&psxM[addr & 0x1fffff]);
1410 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1413 if (t == 0x1f80 && addr < 0x1f801000) {
1415 iRegs[_Rt_].state = ST_UNK;
1417 MOV32MtoR(EAX, (u32)&psxH[addr & 0xfff]);
1418 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1423 case 0x1f801080: case 0x1f801084: case 0x1f801088:
1424 case 0x1f801090: case 0x1f801094: case 0x1f801098:
1425 case 0x1f8010a0: case 0x1f8010a4: case 0x1f8010a8:
1426 case 0x1f8010b0: case 0x1f8010b4: case 0x1f8010b8:
1427 case 0x1f8010c0: case 0x1f8010c4: case 0x1f8010c8:
1428 case 0x1f8010d0: case 0x1f8010d4: case 0x1f8010d8:
1429 case 0x1f8010e0: case 0x1f8010e4: case 0x1f8010e8:
1430 case 0x1f801070: case 0x1f801074:
1431 case 0x1f8010f0: case 0x1f8010f4:
1433 iRegs[_Rt_].state = ST_UNK;
1435 MOV32MtoR(EAX, (u32)&psxH[addr & 0xffff]);
1436 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1441 iRegs[_Rt_].state = ST_UNK;
1443 CALL32M((u32)&GPU_readData);
1444 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1449 iRegs[_Rt_].state = ST_UNK;
1451 CALL32M((u32)&GPU_readStatus);
1452 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1456 // SysPrintf("unhandled r32 %x\n", addr);
1460 CALLFunc((u32)psxMemRead32);
1462 iRegs[_Rt_].state = ST_UNK;
1463 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1465 // ADD32ItoR(ESP, 4);
1469 extern u32 LWL_MASK[4];
1470 extern u32 LWL_SHIFT[4];
1472 void iLWLk(u32 shift) {
1473 if (IsConst(_Rt_)) {
1474 MOV32ItoR(ECX, iRegs[_Rt_].k);
1476 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rt_]);
1478 AND32ItoR(ECX, LWL_MASK[shift]);
1479 SHL32ItoR(EAX, LWL_SHIFT[shift]);
1480 OR32RtoR (EAX, ECX);
1484 // Rt = Rt Merge mem[Rs + Im]
1486 if (IsConst(_Rs_)) {
1487 u32 addr = iRegs[_Rs_].k + _Imm_;
1490 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1491 MOV32MtoR(EAX, (u32)&psxM[addr & 0x1ffffc]);
1494 iRegs[_Rt_].state = ST_UNK;
1495 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1498 if (t == 0x1f80 && addr < 0x1f801000) {
1499 MOV32MtoR(EAX, (u32)&psxH[addr & 0xffc]);
1502 iRegs[_Rt_].state = ST_UNK;
1503 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1508 if (IsConst(_Rs_)) MOV32ItoR(EAX, iRegs[_Rs_].k + _Imm_);
1510 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1511 if (_Imm_) ADD32ItoR(EAX, _Imm_);
1516 CALLFunc((u32)psxMemRead32);
1521 AND32ItoR(EDX, 0x3); // shift = addr & 3;
1523 MOV32ItoR(ECX, (u32)LWL_SHIFT);
1524 MOV32RmStoR(ECX, ECX, EDX, 2);
1525 SHL32CLtoR(EAX); // mem(EAX) << LWL_SHIFT[shift]
1527 MOV32ItoR(ECX, (u32)LWL_MASK);
1528 MOV32RmStoR(ECX, ECX, EDX, 2);
1529 if (IsConst(_Rt_)) {
1530 MOV32ItoR(EDX, iRegs[_Rt_].k);
1532 MOV32MtoR(EDX, (u32)&psxRegs.GPR.r[_Rt_]);
1534 AND32RtoR(EDX, ECX); // _rRt_ & LWL_MASK[shift]
1538 iRegs[_Rt_].state = ST_UNK;
1539 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1541 // ADD32ItoR(ESP, 8);
1546 static void recLWBlock(int count) {
1547 u32 *code = (u32 *)PSXM(pc);
1549 // Rt = mem[Rs + Im] (unsigned)
1553 if (IsConst(_Rs_)) {
1554 u32 addr = iRegs[_Rs_].k + _Imm_;
1557 if ((t & 0xfff0) == 0xbfc0) {
1558 // since bios is readonly it won't change
1559 for (i = 0; i < count; i++, code++, addr += 4) {
1561 MapConst(_fRt_(*code), psxRu32(addr));
1566 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1567 for (i = 0; i < count; i++, code++, addr += 4) {
1570 iRegs[_fRt_(*code)].state = ST_UNK;
1572 MOV32MtoR(EAX, (u32)&psxM[addr & 0x1fffff]);
1573 MOV32RtoM((u32)&psxRegs.GPR.r[_fRt_(*code)], EAX);
1577 if (t == 0x1f80 && addr < 0x1f801000) {
1578 for (i = 0; i < count; i++, code++, addr += 4) {
1581 iRegs[_fRt_(*code)].state = ST_UNK;
1583 MOV32MtoR(EAX, (u32)&psxH[addr & 0xfff]);
1584 MOV32RtoM((u32)&psxRegs.GPR.r[_fRt_(*code)], EAX);
1590 SysPrintf("recLWBlock %d: %d\n", count, IsConst(_Rs_));
1592 CALLFunc((u32)psxMemPointer);
1593 // ADD32ItoR(ESP, 4);
1596 respsave = resp; resp = 0;
1597 TEST32RtoR(EAX, EAX);
1598 j32Ptr[4] = JZ32(0);
1599 XOR32RtoR(ECX, ECX);
1600 for (i = 0; i < count; i++, code++) {
1602 iRegs[_fRt_(*code)].state = ST_UNK;
1604 MOV32RmStoR(EDX, EAX, ECX, 2);
1605 MOV32RtoM((u32)&psxRegs.GPR.r[_fRt_(*code)], EDX);
1607 if (i != (count - 1))
1610 j32Ptr[5] = JMP32(0);
1611 x86SetJ32(j32Ptr[4]);
1612 for (i = 0, code = (u32 *)PSXM(pc); i < count; i++, code++) {
1613 psxRegs.code = *code;
1616 ADD32ItoR(ESP, resp);
1617 x86SetJ32(j32Ptr[5]);
1621 extern u32 LWR_MASK[4];
1622 extern u32 LWR_SHIFT[4];
1624 void iLWRk(u32 shift) {
1625 if (IsConst(_Rt_)) {
1626 MOV32ItoR(ECX, iRegs[_Rt_].k);
1628 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rt_]);
1630 AND32ItoR(ECX, LWR_MASK[shift]);
1631 SHR32ItoR(EAX, LWR_SHIFT[shift]);
1636 // Rt = Rt Merge mem[Rs + Im]
1638 if (IsConst(_Rs_)) {
1639 u32 addr = iRegs[_Rs_].k + _Imm_;
1642 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1643 MOV32MtoR(EAX, (u32)&psxM[addr & 0x1ffffc]);
1646 iRegs[_Rt_].state = ST_UNK;
1647 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1650 if (t == 0x1f80 && addr < 0x1f801000) {
1651 MOV32MtoR(EAX, (u32)&psxH[addr & 0xffc]);
1654 iRegs[_Rt_].state = ST_UNK;
1655 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1661 MOV32ItoR(EAX, iRegs[_Rs_].k + _Imm_);
1663 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1664 if (_Imm_) ADD32ItoR(EAX, _Imm_);
1669 CALLFunc((u32)psxMemRead32);
1674 AND32ItoR(EDX, 0x3); // shift = addr & 3;
1676 MOV32ItoR(ECX, (u32)LWR_SHIFT);
1677 MOV32RmStoR(ECX, ECX, EDX, 2);
1678 SHR32CLtoR(EAX); // mem(EAX) >> LWR_SHIFT[shift]
1680 MOV32ItoR(ECX, (u32)LWR_MASK);
1681 MOV32RmStoR(ECX, ECX, EDX, 2);
1683 if (IsConst(_Rt_)) {
1684 MOV32ItoR(EDX, iRegs[_Rt_].k);
1686 MOV32MtoR(EDX, (u32)&psxRegs.GPR.r[_Rt_]);
1688 AND32RtoR(EDX, ECX); // _rRt_ & LWR_MASK[shift]
1692 iRegs[_Rt_].state = ST_UNK;
1693 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
1695 // ADD32ItoR(ESP, 8);
1700 static void recSB() {
1701 // mem[Rs + Im] = Rt
1705 if (IsConst(_Rs_)) {
1706 u32 addr = iRegs[_Rs_].k + _Imm_;
1709 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1710 if (IsConst(_Rt_)) {
1711 MOV8ItoM((u32)&psxM[addr & 0x1fffff], (u8)iRegs[_Rt_].k);
1713 MOV8MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1714 MOV8RtoM((u32)&psxM[addr & 0x1fffff], EAX);
1718 if (t == 0x1f80 && addr < 0x1f801000) {
1719 if (IsConst(_Rt_)) {
1720 MOV8ItoM((u32)&psxH[addr & 0xfff], (u8)iRegs[_Rt_].k);
1722 MOV8MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1723 MOV8RtoM((u32)&psxH[addr & 0xfff], EAX);
1727 // SysPrintf("unhandled w8 %x\n", addr);
1730 if (IsConst(_Rt_)) {
1731 PUSH32I (iRegs[_Rt_].k);
1733 PUSH32M ((u32)&psxRegs.GPR.r[_Rt_]);
1736 CALLFunc((u32)psxMemWrite8);
1737 // ADD32ItoR(ESP, 8);
1741 static void recSH() {
1742 // mem[Rs + Im] = Rt
1746 if (IsConst(_Rs_)) {
1747 u32 addr = iRegs[_Rs_].k + _Imm_;
1750 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1751 if (IsConst(_Rt_)) {
1752 MOV16ItoM((u32)&psxM[addr & 0x1fffff], (u16)iRegs[_Rt_].k);
1754 MOV16MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1755 MOV16RtoM((u32)&psxM[addr & 0x1fffff], EAX);
1759 if (t == 0x1f80 && addr < 0x1f801000) {
1760 if (IsConst(_Rt_)) {
1761 MOV16ItoM((u32)&psxH[addr & 0xfff], (u16)iRegs[_Rt_].k);
1763 MOV16MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1764 MOV16RtoM((u32)&psxH[addr & 0xfff], EAX);
1769 if (addr >= 0x1f801c00 && addr < 0x1f801e00) {
1770 if (IsConst(_Rt_)) {
1771 PUSH32I(iRegs[_Rt_].k);
1773 PUSH32M((u32)&psxRegs.GPR.r[_Rt_]);
1776 CALL32M ((u32)&SPU_writeRegister);
1783 // SysPrintf("unhandled w16 %x\n", addr);
1786 if (IsConst(_Rt_)) {
1787 PUSH32I (iRegs[_Rt_].k);
1789 PUSH32M ((u32)&psxRegs.GPR.r[_Rt_]);
1792 CALLFunc((u32)psxMemWrite16);
1793 // ADD32ItoR(ESP, 8);
1797 static void recSW() {
1798 // mem[Rs + Im] = Rt
1802 if (IsConst(_Rs_)) {
1803 u32 addr = iRegs[_Rs_].k + _Imm_;
1806 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1807 if (IsConst(_Rt_)) {
1808 MOV32ItoM((u32)&psxM[addr & 0x1fffff], iRegs[_Rt_].k);
1810 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1811 MOV32RtoM((u32)&psxM[addr & 0x1fffff], EAX);
1815 if (t == 0x1f80 && addr < 0x1f801000) {
1816 if (IsConst(_Rt_)) {
1817 MOV32ItoM((u32)&psxH[addr & 0xfff], iRegs[_Rt_].k);
1819 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1820 MOV32RtoM((u32)&psxH[addr & 0xfff], EAX);
1826 case 0x1f801080: case 0x1f801084:
1827 case 0x1f801090: case 0x1f801094:
1828 case 0x1f8010a0: case 0x1f8010a4:
1829 case 0x1f8010b0: case 0x1f8010b4:
1830 case 0x1f8010c0: case 0x1f8010c4:
1831 case 0x1f8010d0: case 0x1f8010d4:
1832 case 0x1f8010e0: case 0x1f8010e4:
1835 if (IsConst(_Rt_)) {
1836 MOV32ItoM((u32)&psxH[addr & 0xffff], iRegs[_Rt_].k);
1838 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
1839 MOV32RtoM((u32)&psxH[addr & 0xffff], EAX);
1844 if (IsConst(_Rt_)) {
1845 PUSH32I(iRegs[_Rt_].k);
1847 PUSH32M((u32)&psxRegs.GPR.r[_Rt_]);
1849 CALL32M((u32)&GPU_writeData);
1856 if (IsConst(_Rt_)) {
1857 PUSH32I(iRegs[_Rt_].k);
1859 PUSH32M((u32)&psxRegs.GPR.r[_Rt_]);
1861 CALL32M((u32)&GPU_writeStatus);
1867 // SysPrintf("unhandled w32 %x\n", addr);
1870 if (IsConst(_Rt_)) {
1871 PUSH32I (iRegs[_Rt_].k);
1873 PUSH32M ((u32)&psxRegs.GPR.r[_Rt_]);
1876 CALLFunc((u32)psxMemWrite32);
1877 // ADD32ItoR(ESP, 8);
1882 static void recSWBlock(int count) {
1885 // mem[Rs + Im] = Rt
1889 if (IsConst(_Rs_)) {
1890 u32 addr = iRegs[_Rs_].k + _Imm_;
1892 code = (u32 *)PSXM(pc);
1894 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1895 for (i = 0; i < count; i++, code++, addr += 4) {
1896 if (IsConst(_fRt_(*code))) {
1897 MOV32ItoM((u32)&psxM[addr & 0x1fffff], iRegs[_fRt_(*code)].k);
1899 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_fRt_(*code)]);
1900 MOV32RtoM((u32)&psxM[addr & 0x1fffff], EAX);
1905 if (t == 0x1f80 && addr < 0x1f801000) {
1906 for (i = 0; i < count; i++, code++, addr += 4) {
1909 iRegs[_fRt_(*code)].state = ST_UNK;
1911 MOV32MtoR(EAX, (u32)&psxH[addr & 0xfff]);
1912 MOV32RtoM((u32)&psxRegs.GPR.r[_fRt_(*code)], EAX);
1918 SysPrintf("recSWBlock %d: %d\n", count, IsConst(_Rs_));
1920 CALLFunc((u32)psxMemPointer);
1921 // ADD32ItoR(ESP, 4);
1926 TEST32RtoR(EAX, EAX);
1927 j32Ptr[4] = JZ32(0);
1928 XOR32RtoR(ECX, ECX);
1929 for (i = 0, code = (u32 *)PSXM(pc); i < count; i++, code++) {
1930 if (IsConst(_fRt_(*code))) {
1931 MOV32ItoR(EDX, iRegs[_fRt_(*code)].k);
1933 MOV32MtoR(EDX, (u32)&psxRegs.GPR.r[_fRt_(*code)]);
1935 MOV32RtoRmS(EAX, ECX, 2, EDX);
1936 if (i != (count - 1))
1939 j32Ptr[5] = JMP32(0);
1940 x86SetJ32(j32Ptr[4]);
1941 for (i = 0, code = (u32 *)PSXM(pc); i < count; i++, code++) {
1942 psxRegs.code = *code;
1945 ADD32ItoR(ESP, resp);
1946 x86SetJ32(j32Ptr[5]);
1950 extern u32 SWL_MASK[4];
1951 extern u32 SWL_SHIFT[4];
1953 void iSWLk(u32 shift) {
1954 if (IsConst(_Rt_)) {
1955 MOV32ItoR(ECX, iRegs[_Rt_].k);
1957 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rt_]);
1959 SHR32ItoR(ECX, SWL_SHIFT[shift]);
1960 AND32ItoR(EAX, SWL_MASK[shift]);
1961 OR32RtoR (EAX, ECX);
1965 // mem[Rs + Im] = Rt Merge mem[Rs + Im]
1967 if (IsConst(_Rs_)) {
1968 u32 addr = iRegs[_Rs_].k + _Imm_;
1971 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
1972 MOV32MtoR(EAX, (u32)&psxM[addr & 0x1ffffc]);
1974 MOV32RtoM((u32)&psxM[addr & 0x1ffffc], EAX);
1977 if (t == 0x1f80 && addr < 0x1f801000) {
1978 MOV32MtoR(EAX, (u32)&psxH[addr & 0xffc]);
1980 MOV32RtoM((u32)&psxH[addr & 0xffc], EAX);
1985 if (IsConst(_Rs_)) {
1986 MOV32ItoR(EAX, iRegs[_Rs_].k + _Imm_);
1988 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
1989 if (_Imm_) ADD32ItoR(EAX, _Imm_);
1995 CALLFunc((u32)psxMemRead32);
1999 AND32ItoR(EDX, 0x3); // shift = addr & 3;
2001 MOV32ItoR(ECX, (u32)SWL_MASK);
2002 MOV32RmStoR(ECX, ECX, EDX, 2);
2003 AND32RtoR(EAX, ECX); // mem & SWL_MASK[shift]
2005 MOV32ItoR(ECX, (u32)SWL_SHIFT);
2006 MOV32RmStoR(ECX, ECX, EDX, 2);
2007 if (IsConst(_Rt_)) {
2008 MOV32ItoR(EDX, iRegs[_Rt_].k);
2010 MOV32MtoR(EDX, (u32)&psxRegs.GPR.r[_Rt_]);
2012 SHR32CLtoR(EDX); // _rRt_ >> SWL_SHIFT[shift]
2014 OR32RtoR (EAX, EDX);
2017 if (IsConst(_Rs_)) MOV32ItoR(EAX, iRegs[_Rs_].k + _Imm_);
2019 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2020 if (_Imm_) ADD32ItoR(EAX, _Imm_);
2025 CALLFunc((u32)psxMemWrite32);
2026 // ADD32ItoR(ESP, 8);
2030 extern u32 SWR_MASK[4];
2031 extern u32 SWR_SHIFT[4];
2033 void iSWRk(u32 shift) {
2034 if (IsConst(_Rt_)) {
2035 MOV32ItoR(ECX, iRegs[_Rt_].k);
2037 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rt_]);
2039 SHL32ItoR(ECX, SWR_SHIFT[shift]);
2040 AND32ItoR(EAX, SWR_MASK[shift]);
2041 OR32RtoR (EAX, ECX);
2045 // mem[Rs + Im] = Rt Merge mem[Rs + Im]
2047 if (IsConst(_Rs_)) {
2048 u32 addr = iRegs[_Rs_].k + _Imm_;
2051 if ((t & 0x1fe0) == 0 && (t & 0x1fff) != 0) {
2052 MOV32MtoR(EAX, (u32)&psxM[addr & 0x1ffffc]);
2054 MOV32RtoM((u32)&psxM[addr & 0x1ffffc], EAX);
2057 if (t == 0x1f80 && addr < 0x1f801000) {
2058 MOV32MtoR(EAX, (u32)&psxH[addr & 0xffc]);
2060 MOV32RtoM((u32)&psxH[addr & 0xffc], EAX);
2065 if (IsConst(_Rs_)) {
2066 MOV32ItoR(EAX, iRegs[_Rs_].k + _Imm_);
2068 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2069 if (_Imm_) ADD32ItoR(EAX, _Imm_);
2075 CALLFunc((u32)psxMemRead32);
2079 AND32ItoR(EDX, 0x3); // shift = addr & 3;
2081 MOV32ItoR(ECX, (u32)SWR_MASK);
2082 MOV32RmStoR(ECX, ECX, EDX, 2);
2083 AND32RtoR(EAX, ECX); // mem & SWR_MASK[shift]
2085 MOV32ItoR(ECX, (u32)SWR_SHIFT);
2086 MOV32RmStoR(ECX, ECX, EDX, 2);
2087 if (IsConst(_Rt_)) {
2088 MOV32ItoR(EDX, iRegs[_Rt_].k);
2090 MOV32MtoR(EDX, (u32)&psxRegs.GPR.r[_Rt_]);
2092 SHL32CLtoR(EDX); // _rRt_ << SWR_SHIFT[shift]
2094 OR32RtoR (EAX, EDX);
2097 if (IsConst(_Rs_)) MOV32ItoR(EAX, iRegs[_Rs_].k + _Imm_);
2099 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2100 if (_Imm_) ADD32ItoR(EAX, _Imm_);
2105 CALLFunc((u32)psxMemWrite32);
2106 // ADD32ItoR(ESP, 8);
2114 static void recSLL() {
2121 if (IsConst(_Rt_)) {
2122 MapConst(_Rd_, iRegs[_Rt_].k << _Sa_);
2124 iRegs[_Rd_].state = ST_UNK;
2126 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2127 if (_Sa_) SHL32ItoR(EAX, _Sa_);
2128 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2132 static void recSRL() {
2139 if (IsConst(_Rt_)) {
2140 MapConst(_Rd_, iRegs[_Rt_].k >> _Sa_);
2142 iRegs[_Rd_].state = ST_UNK;
2144 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2145 if (_Sa_) SHR32ItoR(EAX, _Sa_);
2146 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2150 static void recSRA() {
2157 if (IsConst(_Rt_)) {
2158 MapConst(_Rd_, (s32)iRegs[_Rt_].k >> _Sa_);
2160 iRegs[_Rd_].state = ST_UNK;
2162 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2163 if (_Sa_) SAR32ItoR(EAX, _Sa_);
2164 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2173 static void recSLLV() {
2180 if (IsConst(_Rt_) && IsConst(_Rs_)) {
2181 MapConst(_Rd_, iRegs[_Rt_].k << iRegs[_Rs_].k);
2182 } else if (IsConst(_Rs_)) {
2183 iRegs[_Rd_].state = ST_UNK;
2185 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2186 MOV32ItoR(ECX, iRegs[_Rs_].k);
2188 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2189 } else if (IsConst(_Rt_)) {
2190 iRegs[_Rd_].state = ST_UNK;
2192 MOV32ItoR(EAX, iRegs[_Rt_].k);
2193 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rs_]);
2195 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2197 iRegs[_Rd_].state = ST_UNK;
2199 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2200 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rs_]);
2202 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2206 static void recSRLV() {
2212 if (IsConst(_Rt_) && IsConst(_Rs_)) {
2213 MapConst(_Rd_, iRegs[_Rt_].k >> iRegs[_Rs_].k);
2214 } else if (IsConst(_Rs_)) {
2215 iRegs[_Rd_].state = ST_UNK;
2217 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2218 MOV32ItoR(ECX, iRegs[_Rs_].k);
2220 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2221 } else if (IsConst(_Rt_)) {
2222 iRegs[_Rd_].state = ST_UNK;
2224 MOV32ItoR(EAX, iRegs[_Rt_].k);
2225 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rs_]);
2227 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2229 iRegs[_Rd_].state = ST_UNK;
2231 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2232 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rs_]);
2234 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2238 static void recSRAV() {
2245 if (IsConst(_Rt_) && IsConst(_Rs_)) {
2246 MapConst(_Rd_, (s32)iRegs[_Rt_].k >> iRegs[_Rs_].k);
2247 } else if (IsConst(_Rs_)) {
2248 iRegs[_Rd_].state = ST_UNK;
2250 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2251 MOV32ItoR(ECX, iRegs[_Rs_].k);
2253 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2254 } else if (IsConst(_Rt_)) {
2255 iRegs[_Rd_].state = ST_UNK;
2257 MOV32ItoR(EAX, iRegs[_Rt_].k);
2258 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rs_]);
2260 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2262 iRegs[_Rd_].state = ST_UNK;
2264 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2265 MOV32MtoR(ECX, (u32)&psxRegs.GPR.r[_Rs_]);
2267 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2277 static void recSYSCALL() {
2281 MOV32ItoR(EAX, pc - 4);
2282 MOV32RtoM((u32)&psxRegs.pc, EAX);
2283 PUSH32I (branch == 1 ? 1 : 0);
2285 CALLFunc ((u32)psxException);
2292 static void recBREAK() {
2301 static void recMFHI() {
2306 iRegs[_Rd_].state = ST_UNK;
2307 MOV32MtoR(EAX, (u32)&psxRegs.GPR.n.hi);
2308 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2311 static void recMTHI() {
2314 if (IsConst(_Rs_)) {
2315 MOV32ItoM((u32)&psxRegs.GPR.n.hi, iRegs[_Rs_].k);
2317 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2318 MOV32RtoM((u32)&psxRegs.GPR.n.hi, EAX);
2322 static void recMFLO() {
2327 iRegs[_Rd_].state = ST_UNK;
2328 MOV32MtoR(EAX, (u32)&psxRegs.GPR.n.lo);
2329 MOV32RtoM((u32)&psxRegs.GPR.r[_Rd_], EAX);
2332 static void recMTLO() {
2335 if (IsConst(_Rs_)) {
2336 MOV32ItoM((u32)&psxRegs.GPR.n.lo, iRegs[_Rs_].k);
2338 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2339 MOV32RtoM((u32)&psxRegs.GPR.n.lo, EAX);
2358 static void recBLTZ() {
2360 u32 bpc = _Imm_ * 4 + pc;
2364 if (bpc == pc+4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2368 if (IsConst(_Rs_)) {
2369 if ((s32)iRegs[_Rs_].k < 0) {
2378 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], 0);
2379 j32Ptr[4] = JL32(0);
2383 x86SetJ32(j32Ptr[4]);
2389 static void recBGTZ() {
2391 u32 bpc = _Imm_ * 4 + pc;
2394 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2398 if (IsConst(_Rs_)) {
2399 if ((s32)iRegs[_Rs_].k > 0) {
2408 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], 0);
2409 j32Ptr[4] = JG32(0);
2413 x86SetJ32(j32Ptr[4]);
2419 static void recBLTZAL() {
2421 u32 bpc = _Imm_ * 4 + pc;
2424 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2428 if (IsConst(_Rs_)) {
2429 if ((s32)iRegs[_Rs_].k < 0) {
2430 MOV32ItoM((u32)&psxRegs.GPR.r[31], pc + 4);
2433 iJump(pc + 4); return;
2437 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], 0);
2438 j32Ptr[4] = JL32(0);
2442 x86SetJ32(j32Ptr[4]);
2444 MOV32ItoM((u32)&psxRegs.GPR.r[31], pc + 4);
2449 static void recBGEZAL() {
2450 // Branch if Rs >= 0
2451 u32 bpc = _Imm_ * 4 + pc;
2454 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2458 if (IsConst(_Rs_)) {
2459 if ((s32)iRegs[_Rs_].k >= 0) {
2460 MOV32ItoM((u32)&psxRegs.GPR.r[31], pc + 4);
2463 iJump(pc+4); return;
2467 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], 0);
2468 j32Ptr[4] = JGE32(0);
2472 x86SetJ32(j32Ptr[4]);
2474 MOV32ItoM((u32)&psxRegs.GPR.r[31], pc + 4);
2479 static void recJ() {
2482 iJump(_Target_ * 4 + (pc & 0xf0000000));
2485 static void recJAL() {
2488 MapConst(31, pc + 4);
2490 iJump(_Target_ * 4 + (pc & 0xf0000000));
2493 static void recJR() {
2496 if (IsConst(_Rs_)) {
2497 MOV32ItoM((u32)&target, iRegs[_Rs_].k);
2499 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2500 MOV32RtoM((u32)&target, EAX);
2506 static void recJALR() {
2509 if (IsConst(_Rs_)) {
2510 MOV32ItoM((u32)&target, iRegs[_Rs_].k);
2512 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2513 MOV32RtoM((u32)&target, EAX);
2517 MapConst(_Rd_, pc + 4);
2523 static void recBEQ() {
2524 // Branch if Rs == Rt
2525 u32 bpc = _Imm_ * 4 + pc;
2528 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2535 if (IsConst(_Rs_) && IsConst(_Rt_)) {
2536 if (iRegs[_Rs_].k == iRegs[_Rt_].k) {
2543 } else if (IsConst(_Rs_)) {
2544 CMP32ItoM((u32)&psxRegs.GPR.r[_Rt_], iRegs[_Rs_].k);
2545 } else if (IsConst(_Rt_)) {
2546 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], iRegs[_Rt_].k);
2548 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2549 CMP32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2552 j32Ptr[4] = JE32(0);
2556 x86SetJ32(j32Ptr[4]);
2563 static void recBNE() {
2564 // Branch if Rs != Rt
2565 u32 bpc = _Imm_ * 4 + pc;
2568 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2572 if (IsConst(_Rs_) && IsConst(_Rt_)) {
2573 if (iRegs[_Rs_].k != iRegs[_Rt_].k) {
2580 } else if (IsConst(_Rs_)) {
2581 CMP32ItoM((u32)&psxRegs.GPR.r[_Rt_], iRegs[_Rs_].k);
2582 } else if (IsConst(_Rt_)) {
2583 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], iRegs[_Rt_].k);
2585 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rs_]);
2586 CMP32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2588 j32Ptr[4] = JNE32(0);
2592 x86SetJ32(j32Ptr[4]);
2598 static void recBLEZ() {
2599 // Branch if Rs <= 0
2600 u32 bpc = _Imm_ * 4 + pc;
2603 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2607 if (IsConst(_Rs_)) {
2608 if ((s32)iRegs[_Rs_].k <= 0) {
2617 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], 0);
2618 j32Ptr[4] = JLE32(0);
2622 x86SetJ32(j32Ptr[4]);
2628 static void recBGEZ() {
2629 // Branch if Rs >= 0
2630 u32 bpc = _Imm_ * 4 + pc;
2633 if (bpc == pc + 4 && psxTestLoadDelay(_Rs_, PSXMu32(bpc)) == 0) {
2637 if (IsConst(_Rs_)) {
2638 if ((s32)iRegs[_Rs_].k >= 0) {
2647 CMP32ItoM((u32)&psxRegs.GPR.r[_Rs_], 0);
2648 j32Ptr[4] = JGE32(0);
2652 x86SetJ32(j32Ptr[4]);
2665 static void recMFC0() {
2669 iRegs[_Rt_].state = ST_UNK;
2670 MOV32MtoR(EAX, (u32)&psxRegs.CP0.r[_Rd_]);
2671 MOV32RtoM((u32)&psxRegs.GPR.r[_Rt_], EAX);
2674 static void recCFC0() {
2681 static void recMTC0() {
2684 if (IsConst(_Rt_)) {
2687 MOV32ItoM((u32)&psxRegs.CP0.r[_Rd_], iRegs[_Rt_].k);
2690 MOV32ItoM((u32)&psxRegs.CP0.r[_Rd_], iRegs[_Rt_].k & ~(0xfc00));
2693 MOV32ItoM((u32)&psxRegs.CP0.r[_Rd_], iRegs[_Rt_].k);
2697 MOV32MtoR(EAX, (u32)&psxRegs.GPR.r[_Rt_]);
2700 AND32ItoR(EAX, ~(0xfc00));
2703 MOV32RtoM((u32)&psxRegs.CP0.r[_Rd_], EAX);
2706 if (_Rd_ == 12 || _Rd_ == 13) {
2708 MOV32ItoM((u32)&psxRegs.pc, (u32)pc);
2709 CALLFunc((u32)psxTestSWInts);
2717 static void recCTC0() {
2723 static void recRFE() {
2724 MOV32MtoR(EAX, (u32)&psxRegs.CP0.n.Status);
2725 MOV32RtoR(ECX, EAX);
2726 AND32ItoR(EAX, 0xfffffff0);
2727 AND32ItoR(ECX, 0x3c);
2729 OR32RtoR (EAX, ECX);
2730 MOV32RtoM((u32)&psxRegs.CP0.n.Status, EAX);
2733 MOV32ItoM((u32)&psxRegs.pc, (u32)pc);
2734 CALLFunc((u32)psxTestSWInts);
2746 static void recHLE() {
2749 MOV32ItoR(EAX, (u32)psxHLEt[psxRegs.code & 0xffff]);
2757 static void (*recBSC[64])() = {
2758 recSPECIAL, recREGIMM, recJ , recJAL , recBEQ , recBNE , recBLEZ, recBGTZ,
2759 recADDI , recADDIU , recSLTI, recSLTIU, recANDI, recORI , recXORI, recLUI ,
2760 recCOP0 , recNULL , recCOP2, recNULL , recNULL, recNULL, recNULL, recNULL,
2761 recNULL , recNULL , recNULL, recNULL , recNULL, recNULL, recNULL, recNULL,
2762 recLB , recLH , recLWL , recLW , recLBU , recLHU , recLWR , recNULL,
2763 recSB , recSH , recSWL , recSW , recNULL, recNULL, recSWR , recNULL,
2764 recNULL , recNULL , recLWC2, recNULL , recNULL, recNULL, recNULL, recNULL,
2765 recNULL , recNULL , recSWC2, recHLE , recNULL, recNULL, recNULL, recNULL
2768 static void (*recSPC[64])() = {
2769 recSLL , recNULL, recSRL , recSRA , recSLLV , recNULL , recSRLV, recSRAV,
2770 recJR , recJALR, recNULL, recNULL, recSYSCALL, recBREAK, recNULL, recNULL,
2771 recMFHI, recMTHI, recMFLO, recMTLO, recNULL , recNULL , recNULL, recNULL,
2772 recMULT, recMULTU, recDIV, recDIVU, recNULL , recNULL , recNULL, recNULL,
2773 recADD , recADDU, recSUB , recSUBU, recAND , recOR , recXOR , recNOR ,
2774 recNULL, recNULL, recSLT , recSLTU, recNULL , recNULL , recNULL, recNULL,
2775 recNULL, recNULL, recNULL, recNULL, recNULL , recNULL , recNULL, recNULL,
2776 recNULL, recNULL, recNULL, recNULL, recNULL , recNULL , recNULL, recNULL
2779 static void (*recREG[32])() = {
2780 recBLTZ , recBGEZ , recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2781 recNULL , recNULL , recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2782 recBLTZAL, recBGEZAL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2783 recNULL , recNULL , recNULL, recNULL, recNULL, recNULL, recNULL, recNULL
2786 static void (*recCP0[32])() = {
2787 recMFC0, recNULL, recCFC0, recNULL, recMTC0, recNULL, recCTC0, recNULL,
2788 recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2789 recRFE , recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2790 recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL
2793 static void (*recCP2[64])() = {
2794 recBASIC, recRTPS , recNULL , recNULL, recNULL, recNULL , recNCLIP, recNULL, // 00
2795 recNULL , recNULL , recNULL , recNULL, recOP , recNULL , recNULL , recNULL, // 08
2796 recDPCS , recINTPL, recMVMVA, recNCDS, recCDP , recNULL , recNCDT , recNULL, // 10
2797 recNULL , recNULL , recNULL , recNCCS, recCC , recNULL , recNCS , recNULL, // 18
2798 recNCT , recNULL , recNULL , recNULL, recNULL, recNULL , recNULL , recNULL, // 20
2799 recSQR , recDCPL , recDPCT , recNULL, recNULL, recAVSZ3, recAVSZ4, recNULL, // 28
2800 recRTPT , recNULL , recNULL , recNULL, recNULL, recNULL , recNULL , recNULL, // 30
2801 recNULL , recNULL , recNULL , recNULL, recNULL, recGPF , recGPL , recNCCT // 38
2804 static void (*recCP2BSC[32])() = {
2805 recMFC2, recNULL, recCFC2, recNULL, recMTC2, recNULL, recCTC2, recNULL,
2806 recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2807 recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL,
2808 recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL, recNULL
2811 static void recRecompile() {
2818 /* if x86Ptr reached the mem limit reset whole mem */
2819 if (((u32)x86Ptr - (u32)recMem) >= (RECMEM_SIZE - 0x10000))
2825 PC_REC32(psxRegs.pc) = (u32)x86Ptr;
2829 for (count = 0; count < 500;) {
2830 p = (char *)PSXM(pc);
2831 if (p == NULL) recError();
2832 psxRegs.code = *(u32 *)p;
2834 if ((psxRegs.code >> 26) == 0x23) { // LW
2839 p = (char *)PSXM(pc+i*4);
2840 if (p == NULL) recError();
2843 if ((code >> 26) != 0x23 ||
2844 _fRs_(code) != _Rs_ ||
2845 _fImm_(code) != (_Imm_+i*4))
2850 pc = pc + i*4; continue;
2854 if ((psxRegs.code >> 26) == 0x2b) { // SW
2859 p = (char *)PSXM(pc+i*4);
2860 if (p == NULL) recError();
2863 if ((code >> 26) != 0x2b ||
2864 _fRs_(code) != _Rs_ ||
2865 _fImm_(code) != (_Imm_+i*4))
2870 pc = pc + i*4; continue;
2876 recBSC[psxRegs.code >> 26]();
2880 if (dump) iDumpBlock(ptr);
2887 MOV32ItoM((u32)&psxRegs.pc, pc);
2892 R3000Acpu psxRec = {