#include <stdint.h> //include for uint64_t
#include <assert.h>
-#include "../recomp.h"
-#include "../recomph.h" //include for function prototypes
-#include "../macros.h"
-#include "../r4300.h"
-#include "../ops.h"
-#include "../interupt.h"
-
-#include "../../memory/memory.h"
+#include "emu_if.h" //emulator interface
#include <sys/mman.h>
#define OTHER 23 // Other
#define SPAN 24 // Branch/delay slot spans 2 pages
#define NI 25 // Not implemented
+#define HLECALL 26// PCSX fake opcodes for HLE
/* stubs */
#define CC_STUB 1
void fp_exception();
void fp_exception_ds();
void jump_syscall();
+void jump_syscall_hle();
void jump_eret();
+void jump_hlecall();
+void new_dyna_leave();
// TLB
void TLBWI_new();
#define assem_debug nullf
#define inv_debug nullf
-void tlb_hacks()
+static void tlb_hacks()
{
+#ifndef DISABLE_TLB
// Goldeneye hack
if (strncmp((char *) ROM_HEADER->nom, "GOLDENEYE",9) == 0)
{
}
}
}
+#endif
}
-// Get address from virtual address
-// This is called from the recompiled JR/JALR instructions
-void *get_addr(u_int vaddr)
+static u_int get_page(u_int vaddr)
{
u_int page=(vaddr^0x80000000)>>12;
- u_int vpage=page;
+#ifndef DISABLE_TLB
if(page>262143&&tlb_LUT_r[vaddr>>12]) page=(tlb_LUT_r[vaddr>>12]^0x80000000)>>12;
+#endif
if(page>2048) page=2048+(page&2047);
+ return page;
+}
+
+static u_int get_vpage(u_int vaddr)
+{
+ u_int vpage=(vaddr^0x80000000)>>12;
+#ifndef DISABLE_TLB
if(vpage>262143&&tlb_LUT_r[vaddr>>12]) vpage&=2047; // jump_dirty uses a hash of the virtual address instead
+#endif
if(vpage>2048) vpage=2048+(vpage&2047);
+ return vpage;
+}
+
+// Get address from virtual address
+// This is called from the recompiled JR/JALR instructions
+void *get_addr(u_int vaddr)
+{
+ u_int page=get_page(vaddr);
+ u_int vpage=get_vpage(vaddr);
struct ll_entry *head;
//printf("TRACE: count=%d next=%d (get_addr %x,page %d)\n",Count,next_interupt,vaddr,page);
head=jump_in[page];
invalid_code[vaddr>>12]=0;
memory_map[vaddr>>12]|=0x40000000;
if(vpage<2048) {
+#ifndef DISABLE_TLB
if(tlb_LUT_r[vaddr>>12]) {
invalid_code[tlb_LUT_r[vaddr>>12]>>12]=0;
memory_map[tlb_LUT_r[vaddr>>12]>>12]|=0x40000000;
}
+#endif
restore_candidate[vpage>>3]|=1<<(vpage&7);
}
else restore_candidate[page>>3]|=1<<(page&7);
void *get_addr_32(u_int vaddr,u_int flags)
{
+#ifdef FORCE32
+ return get_addr(vaddr);
+#endif
//printf("TRACE: count=%d next=%d (get_addr_32 %x,flags %x)\n",Count,next_interupt,vaddr,flags);
int *ht_bin=hash_table[((vaddr>>16)^vaddr)&0xFFFF];
if(ht_bin[0]==vaddr) return (void *)ht_bin[1];
if(ht_bin[2]==vaddr) return (void *)ht_bin[3];
- u_int page=(vaddr^0x80000000)>>12;
- u_int vpage=page;
- if(page>262143&&tlb_LUT_r[vaddr>>12]) page=(tlb_LUT_r[vaddr>>12]^0x80000000)>>12;
- if(page>2048) page=2048+(page&2047);
- if(vpage>262143&&tlb_LUT_r[vaddr>>12]) vpage&=2047; // jump_dirty uses a hash of the virtual address instead
- if(vpage>2048) vpage=2048+(vpage&2047);
+ u_int page=get_page(vaddr);
+ u_int vpage=get_vpage(vaddr);
struct ll_entry *head;
head=jump_in[page];
while(head!=NULL) {
invalid_code[vaddr>>12]=0;
memory_map[vaddr>>12]|=0x40000000;
if(vpage<2048) {
+#ifndef DISABLE_TLB
if(tlb_LUT_r[vaddr>>12]) {
invalid_code[tlb_LUT_r[vaddr>>12]>>12]=0;
memory_map[tlb_LUT_r[vaddr>>12]>>12]|=0x40000000;
}
+#endif
restore_candidate[vpage>>3]|=1<<(vpage&7);
}
else restore_candidate[page>>3]|=1<<(page&7);
j++;
break;
}
- if(itype[i+j]==SYSCALL||((source[i+j]&0xfc00003f)==0x0d))
+ if(itype[i+j]==SYSCALL||itype[i+j]==HLECALL||((source[i+j]&0xfc00003f)==0x0d))
{
break;
}
// Add virtual address mapping for 32-bit compiled block
void ll_add_32(struct ll_entry **head,int vaddr,u_int reg32,void *addr)
{
- struct ll_entry *new_entry;
- new_entry=malloc(sizeof(struct ll_entry));
- assert(new_entry!=NULL);
- new_entry->vaddr=vaddr;
- new_entry->reg32=reg32;
- new_entry->addr=addr;
- new_entry->next=*head;
- *head=new_entry;
+ ll_add(head,vaddr,addr);
+#ifndef FORCE32
+ (*head)->reg32=reg32;
+#endif
}
// Check if an address is already compiled
if(((ht_bin[3]-MAX_OUTPUT_BLOCK_SIZE-(u_int)out)<<(32-TARGET_SIZE_2))>0x60000000+(MAX_OUTPUT_BLOCK_SIZE<<(32-TARGET_SIZE_2)))
if(isclean(ht_bin[3])) return (void *)ht_bin[3];
}
- u_int page=(vaddr^0x80000000)>>12;
- if(page>262143&&tlb_LUT_r[vaddr>>12]) page=(tlb_LUT_r[vaddr>>12]^0x80000000)>>12;
- if(page>2048) page=2048+(page&2047);
+ u_int page=get_page(vaddr);
struct ll_entry *head;
head=jump_in[page];
while(head!=NULL) {
void invalidate_block(u_int block)
{
int modified;
- u_int page,vpage;
- page=vpage=block^0x80000;
- if(page>262143&&tlb_LUT_r[block]) page=(tlb_LUT_r[block]^0x80000000)>>12;
- if(page>2048) page=2048+(page&2047);
- if(vpage>262143&&tlb_LUT_r[block]) vpage&=2047; // jump_dirty uses a hash of the virtual address instead
- if(vpage>2048) vpage=2048+(vpage&2047);
+ u_int page=get_page(block<<12);
+ u_int vpage=get_vpage(block<<12);
inv_debug("INVALIDATE: %x (%d)\n",block<<12,page);
//inv_debug("invalid_code[block]=%d\n",invalid_code[block]);
u_int first,last;
if((((end-1-(u_int)rdram)>>12)&2047)>last) last=((end-1-(u_int)rdram)>>12)&2047;
}
}
+#ifndef DISABLE_TLB
if(page<2048&&(signed int)start>=(signed int)0xC0000000&&(signed int)end>=(signed int)0xC0000000) {
if(((start+memory_map[start>>12]-(u_int)rdram)>>12)<=page&&((end-1+memory_map[(end-1)>>12]-(u_int)rdram)>>12)>=page) {
if((((start+memory_map[start>>12]-(u_int)rdram)>>12)&2047)<first) first=((start+memory_map[start>>12]-(u_int)rdram)>>12)&2047;
if((((end-1+memory_map[(end-1)>>12]-(u_int)rdram)>>12)&2047)>last) last=((end-1+memory_map[(end-1)>>12]-(u_int)rdram)>>12)&2047;
}
}
+#endif
}
head=head->next;
}
// Don't trap writes
invalid_code[block]=1;
+#ifndef DISABLE_TLB
// If there is a valid TLB entry for this page, remove write protect
if(tlb_LUT_w[block]) {
assert(tlb_LUT_r[block]==tlb_LUT_w[block]);
if(real_block>=0x80000&&real_block<0x80800) memory_map[real_block]=((u_int)rdram-0x80000000)>>2;
}
else if(block>=0x80000&&block<0x80800) memory_map[block]=((u_int)rdram-0x80000000)>>2;
+#endif
#ifdef __arm__
if(modified)
__clear_cache((void *)BASE_ADDR,(void *)BASE_ADDR+(1<<TARGET_SIZE_2));
#ifdef USE_MINI_HT
memset(mini_ht,-1,sizeof(mini_ht));
#endif
+ #ifndef DISABLE_TLB
// TLB
for(page=0;page<0x100000;page++) {
if(tlb_LUT_r[page]) {
if(page==0x80000) page=0xC0000;
}
tlb_hacks();
+ #endif
}
// Add an entry to jump_out after making a link
void add_link(u_int vaddr,void *src)
{
- u_int page=(vaddr^0x80000000)>>12;
- if(page>262143&&tlb_LUT_r[vaddr>>12]) page=(tlb_LUT_r[vaddr>>12]^0x80000000)>>12;
- if(page>4095) page=2048+(page&2047);
+ u_int page=get_page(vaddr);
inv_debug("add_link: %x -> %x (%d)\n",(int)src,vaddr,page);
ll_add(jump_out+page,vaddr,src);
//int ptr=get_pointer(src);
void * clean_addr=(void *)get_clean_addr((int)head->addr);
if((((u_int)clean_addr-(u_int)out)<<(32-TARGET_SIZE_2))>0x60000000+(MAX_OUTPUT_BLOCK_SIZE<<(32-TARGET_SIZE_2))) {
u_int ppage=page;
+#ifndef DISABLE_TLB
if(page<2048&&tlb_LUT_r[head->vaddr>>12]) ppage=(tlb_LUT_r[head->vaddr>>12]^0x80000000)>>12;
+#endif
inv_debug("INV: Restored %x (%x/%x)\n",head->vaddr, (int)head->addr, (int)clean_addr);
//printf("page=%x, addr=%x\n",page,head->vaddr);
//assert(head->vaddr>>12==(page|0x80000));
case RJUMP:
case FJUMP:
case SYSCALL:
+ case HLECALL:
case SPAN:
assem_debug("jump in the delay slot. this shouldn't happen.\n");//exit(1);
printf("Disabled speculative precompilation\n");
if((dirty>>hr)&1) {
if(regmap[hr]<64) {
emit_storereg(r,hr);
+#ifndef FORCE32
if((is32>>regmap[hr])&1) {
emit_sarimm(hr,31,hr);
emit_storereg(r|64,hr);
}
+#endif
}else{
emit_storereg(r|64,hr);
}
sum^=((u_int *)reg)[i];
return sum;
}
-int fchecksum()
-{
- int i;
- int sum=0;
- for(i=0;i<64;i++)
- sum^=((u_int *)reg_cop1_fgr_64)[i];
- return sum;
-}
void rlist()
{
int i;
for(i=0;i<32;i++)
printf("r%d:%8x%8x ",i,((int *)(reg+i))[1],((int *)(reg+i))[0]);
printf("\n");
+#ifndef DISABLE_COP1
printf("TRACE: ");
for(i=0;i<32;i++)
printf("f%d:%8x%8x ",i,((int*)reg_cop1_simple[i])[1],*((int*)reg_cop1_simple[i]));
printf("\n");
+#endif
}
void enabletrace()
//gen_tlb_addr_r(tl,map);
//emit_movsbl_indexed((int)rdram-0x80000000,tl,tl);
int x=0;
+#ifdef BIG_ENDIAN_MIPS
if(!c) emit_xorimm(addr,3,tl);
else x=((constmap[i][s]+offset)^3)-(constmap[i][s]+offset);
+#else
+ if(c) x=(constmap[i][s]+offset)-(constmap[i][s]+offset);
+ else if (tl!=addr) emit_mov(addr,tl);
+#endif
emit_movsbl_indexed_tlb(x,tl,map,tl);
}
if(jaddr)
#endif
{
int x=0;
+#ifdef BIG_ENDIAN_MIPS
if(!c) emit_xorimm(addr,2,tl);
else x=((constmap[i][s]+offset)^2)-(constmap[i][s]+offset);
+#else
+ if(c) x=(constmap[i][s]+offset)-(constmap[i][s]+offset);
+ else if (tl!=addr) emit_mov(addr,tl);
+#endif
//#ifdef
//emit_movswl_indexed_tlb(x,tl,map,tl);
//else
//gen_tlb_addr_r(tl,map);
//emit_movzbl_indexed((int)rdram-0x80000000,tl,tl);
int x=0;
+#ifdef BIG_ENDIAN_MIPS
if(!c) emit_xorimm(addr,3,tl);
else x=((constmap[i][s]+offset)^3)-(constmap[i][s]+offset);
+#else
+ if(c) x=(constmap[i][s]+offset)-(constmap[i][s]+offset);
+ else if (tl!=addr) emit_mov(addr,tl);
+#endif
emit_movzbl_indexed_tlb(x,tl,map,tl);
}
if(jaddr)
#endif
{
int x=0;
+#ifdef BIG_ENDIAN_MIPS
if(!c) emit_xorimm(addr,2,tl);
else x=((constmap[i][s]+offset)^2)-(constmap[i][s]+offset);
+#else
+ if(c) x=(constmap[i][s]+offset)-(constmap[i][s]+offset);
+ else if (tl!=addr) emit_mov(addr,tl);
+#endif
//#ifdef
//emit_movzwl_indexed_tlb(x,tl,map,tl);
//#else
int addr,temp;
int offset;
int jaddr=0,jaddr2,type;
- int memtarget,c=0;
+ int memtarget=0,c=0;
int agr=AGEN1+(i&1);
u_int hr,reglist=0;
th=get_reg(i_regs->regmap,rs2[i]|64);
if (opcode[i]==0x28) { // SB
if(!c||memtarget) {
int x=0;
+#ifdef BIG_ENDIAN_MIPS
if(!c) emit_xorimm(addr,3,temp);
else x=((constmap[i][s]+offset)^3)-(constmap[i][s]+offset);
+#else
+ if(c) x=(constmap[i][s]+offset)-(constmap[i][s]+offset);
+ else if (addr!=temp) emit_mov(addr,temp);
+#endif
//gen_tlb_addr_w(temp,map);
//emit_writebyte_indexed(tl,(int)rdram-0x80000000,temp);
emit_writebyte_indexed_tlb(tl,x,temp,map,temp);
if (opcode[i]==0x29) { // SH
if(!c||memtarget) {
int x=0;
+#ifdef BIG_ENDIAN_MIPS
if(!c) emit_xorimm(addr,2,temp);
else x=((constmap[i][s]+offset)^2)-(constmap[i][s]+offset);
+#else
+ if(c) x=(constmap[i][s]+offset)-(constmap[i][s]+offset);
+ else if (addr!=temp) emit_mov(addr,temp);
+#endif
//#ifdef
//emit_writehword_indexed_tlb(tl,x,temp,map,temp);
//#else
}
type=STORED_STUB;
}
+ if(!using_tlb&&(!c||memtarget))
+ // addr could be a temp, make sure it survives STORE*_STUB
+ reglist|=1<<addr;
if(jaddr) {
add_stub(type,jaddr,(int)out,i,addr,(int)i_regs,ccadj[i],reglist);
} else if(!memtarget) {
if(!rs2[i]) temp2=th=tl;
}
+#ifndef BIG_ENDIAN_MIPS
+ emit_xorimm(temp,3,temp);
+#endif
emit_testimm(temp,2);
case2=(int)out;
emit_jne(0);
void c1ls_assemble(int i,struct regstat *i_regs)
{
+#ifndef DISABLE_COP1
int s,th,tl;
int temp,ar;
int map=-1;
emit_call((int)memdebug);
emit_popa();
}/**/
+#else
+ cop1_unusable(i, i_regs);
+#endif
}
#ifndef multdiv_assemble
assert(!is_delayslot);
emit_movimm(start+i*4,EAX); // Get PC
emit_addimm(HOST_CCREG,CLOCK_DIVIDER*ccadj[i],HOST_CCREG); // CHECK: is this right? There should probably be an extra cycle...
- emit_jmp((int)jump_syscall);
+ emit_jmp((int)jump_syscall_hle); // XXX
+}
+
+void hlecall_assemble(int i,struct regstat *i_regs)
+{
+ signed char ccreg=get_reg(i_regs->regmap,CCREG);
+ assert(ccreg==HOST_CCREG);
+ assert(!is_delayslot);
+ emit_movimm(start+i*4+4,0); // Get PC
+ emit_movimm(source[i],1); // opcode
+ emit_addimm(HOST_CCREG,CLOCK_DIVIDER*ccadj[i],HOST_CCREG); // XXX
+ emit_jmp((int)jump_hlecall); // XXX
}
void ds_assemble(int i,struct regstat *i_regs)
case MOV:
mov_assemble(i,i_regs);break;
case SYSCALL:
+ case HLECALL:
case SPAN:
case UJUMP:
case RJUMP:
if((i_dirty>>hr)&1) {
if(i_regmap[hr]<64) {
emit_storereg(i_regmap[hr],hr);
+#ifndef FORCE32
if( ((i_is32>>i_regmap[hr])&1) ) {
#ifdef DESTRUCTIVE_WRITEBACK
emit_sarimm(hr,31,hr);
emit_storereg(i_regmap[hr]|64,HOST_TEMPREG);
#endif
}
+#endif
}else{
if( !((i_is32>>(i_regmap[hr]&63))&1) ) {
emit_storereg(i_regmap[hr],hr);
if((i_dirty>>hr)&1) {
if(i_regmap[hr]<64) {
emit_storereg(i_regmap[hr],hr);
+#ifndef FORCE32
if( ((i_is32>>i_regmap[hr])&1) ) {
#ifdef DESTRUCTIVE_WRITEBACK
emit_sarimm(hr,31,hr);
emit_storereg(i_regmap[hr]|64,HOST_TEMPREG);
#endif
}
+#endif
}else{
if( !((i_is32>>(i_regmap[hr]&63))&1) ) {
emit_storereg(i_regmap[hr],hr);
case MOV:
mov_assemble(t,®s[t]);break;
case SYSCALL:
+ case HLECALL:
case SPAN:
case UJUMP:
case RJUMP:
{
assem_debug("initial delay slot:\n");
u_int vaddr=start+1;
- u_int page=(0x80000000^vaddr)>>12;
- u_int vpage=page;
- if(page>262143&&tlb_LUT_r[vaddr>>12]) page=(tlb_LUT_r[page^0x80000]^0x80000000)>>12;
- if(page>2048) page=2048+(page&2047);
- if(vpage>262143&&tlb_LUT_r[vaddr>>12]) vpage&=2047; // jump_dirty uses a hash of the virtual address instead
- if(vpage>2048) vpage=2048+(vpage&2047);
+ u_int page=get_page(vaddr);
+ u_int vpage=get_vpage(vaddr);
ll_add(jump_dirty+vpage,vaddr,(void *)out);
do_dirty_stub_ds();
ll_add(jump_in+page,vaddr,(void *)out);
case MOV:
mov_assemble(0,®s[0]);break;
case SYSCALL:
+ case HLECALL:
case SPAN:
case UJUMP:
case RJUMP:
}
}
}
- else if(itype[i]==SYSCALL)
+ else if(itype[i]==SYSCALL||itype[i]==HLECALL)
{
// SYSCALL instruction (software interrupt)
u=1;
// Save it
unneeded_reg[i]=u;
unneeded_reg_upper[i]=uu;
+#ifdef FORCE32
+ unneeded_reg_upper[i]=-1LL;
+#endif
/*
printf("ur (%d,%d) %x: ",istart,iend,start+i*4);
printf("U:");
case FCOMP:
break;
case SYSCALL:
+ case HLECALL:
break;
default:
break;
if((regs[i].was32>>dep2[i+1])&1) r32|=1LL<<dep2[i+1];
}
}
- else if(itype[i]==SYSCALL)
+ else if(itype[i]==SYSCALL||itype[i]==HLECALL)
{
// SYSCALL instruction (software interrupt)
r32=0;
}
}
}
- else if(itype[i]==SYSCALL)
+ else if(itype[i]==SYSCALL||itype[i]==HLECALL)
{
// SYSCALL instruction (software interrupt)
will_dirty_i=0;
PROT_READ | PROT_WRITE | PROT_EXEC,
MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS,
-1, 0) <= 0) {printf("mmap() failed\n");}
+#ifdef MUPEN64
rdword=&readmem_dword;
fake_pc.f.r.rs=&readmem_dword;
fake_pc.f.r.rt=&readmem_dword;
fake_pc.f.r.rd=&readmem_dword;
+#endif
int n;
for(n=0x80000;n<0x80800;n++)
invalid_code[n]=1;
memory_map[n]=((u_int)rdram-0x80000000)>>2;
for(n=526336;n<1048576;n++) // 0x80800000 .. 0xFFFFFFFF
memory_map[n]=-1;
+#ifdef MUPEN64
for(n=0;n<0x8000;n++) { // 0 .. 0x7FFFFFFF
writemem[n] = write_nomem_new;
writememb[n] = write_nomemb_new;
writememh[n] = write_nomemh_new;
+#ifndef FORCE32
writememd[n] = write_nomemd_new;
+#endif
readmem[n] = read_nomem_new;
readmemb[n] = read_nomemb_new;
readmemh[n] = read_nomemh_new;
+#ifndef FORCE32
readmemd[n] = read_nomemd_new;
+#endif
}
for(n=0x8000;n<0x8080;n++) { // 0x80000000 .. 0x807FFFFF
writemem[n] = write_rdram_new;
writememb[n] = write_rdramb_new;
writememh[n] = write_rdramh_new;
+#ifndef FORCE32
writememd[n] = write_rdramd_new;
+#endif
}
for(n=0xC000;n<0x10000;n++) { // 0xC0000000 .. 0xFFFFFFFF
writemem[n] = write_nomem_new;
writememb[n] = write_nomemb_new;
writememh[n] = write_nomemh_new;
+#ifndef FORCE32
writememd[n] = write_nomemd_new;
+#endif
readmem[n] = read_nomem_new;
readmemb[n] = read_nomemb_new;
readmemh[n] = read_nomemh_new;
+#ifndef FORCE32
readmemd[n] = read_nomemd_new;
+#endif
}
+#endif
tlb_hacks();
arch_init();
}
//rlist();
start = (u_int)addr&~3;
//assert(((u_int)addr&1)==0);
+#ifdef PCSX
+ if (Config.HLE && start == 0x80001000) {
+ // XXX: is this enough? Maybe check hleSoftCall?
+ u_int page=get_page(start);
+ ll_add(jump_in+page,start,out);
+ invalid_code[start>>12]=0;
+ emit_movimm(start,0);
+ emit_writeword(0,(int)&pcaddr);
+ emit_jmp((int)new_dyna_leave); // enough??
+ return 0;
+ }
+ else if ((u_int)addr < 0x00200000) {
+ // used for BIOS calls mostly?
+ source = (u_int *)((u_int)rdram+start-0);
+ pagelimit = 0x00200000;
+ }
+ else
+#endif
+#ifdef MUPEN64
if ((int)addr >= 0xa4000000 && (int)addr < 0xa4001000) {
source = (u_int *)((u_int)SP_DMEM+start-0xa4000000);
pagelimit = 0xa4001000;
}
- else if ((int)addr >= 0x80000000 && (int)addr < 0x80800000) {
+ else
+#endif
+ if ((int)addr >= 0x80000000 && (int)addr < 0x80800000) {
source = (u_int *)((u_int)rdram+start-0x80000000);
pagelimit = 0x80800000;
}
+#ifndef DISABLE_TLB
else if ((signed int)addr >= (signed int)0xC0000000) {
//printf("addr=%x mm=%x\n",(u_int)addr,(memory_map[start>>12]<<2));
//if(tlb_LUT_r[start>>12])
}
//printf("source= %x\n",(int)source);
}
+#endif
else {
printf("Compile at bogus memory address: %x \n", (int)addr);
exit(1);
case 0x37: strcpy(insn[i],"LD"); type=LOAD; break;
case 0x38: strcpy(insn[i],"SC"); type=NI; break;
case 0x39: strcpy(insn[i],"SWC1"); type=C1LS; break;
+#ifdef PCSX
+ case 0x3B: strcpy(insn[i],"HLECALL"); type=HLECALL; break;
+#endif
case 0x3C: strcpy(insn[i],"SCD"); type=NI; break;
case 0x3D: strcpy(insn[i],"SDC1"); type=C1LS; break;
case 0x3F: strcpy(insn[i],"SD"); type=STORE; break;
- default: strcpy(insn[i],"???"); type=NI; break;
+ default: strcpy(insn[i],"???"); type=NI;
+ printf("NI %08x @%08x\n", source[i], addr + i*4);
+ break;
}
itype[i]=type;
opcode2[i]=op2;
rt2[i]=0;
break;
case SYSCALL:
+ case HLECALL:
rs1[i]=CCREG;
rs2[i]=0;
rt1[i]=0;
if(i>MAXBLOCK/2) done=1;
}
if(i>0&&itype[i-1]==SYSCALL&&stop_after_jal) done=1;
+ if(itype[i-1]==HLECALL) done=1;
assert(i<MAXBLOCK-1);
if(start+i*4==pagelimit-4) done=1;
assert(start+i*4<pagelimit);
current.is32=temp_is32;
}
}
+#ifdef FORCE32
+ memset(p32, 0xff, sizeof(p32));
+ current.is32=-1LL;
+#endif
+
memcpy(regmap_pre[i],current.regmap,sizeof(current.regmap));
regs[i].wasconst=current.isconst;
regs[i].was32=current.is32;
fcomp_alloc(¤t,i);
break;
case SYSCALL:
+ case HLECALL:
syscall_alloc(¤t,i);
break;
case SPAN:
// Count cycles in between branches
ccadj[i]=cc;
- if(i>0&&(itype[i-1]==RJUMP||itype[i-1]==UJUMP||itype[i-1]==CJUMP||itype[i-1]==SJUMP||itype[i-1]==FJUMP||itype[i]==SYSCALL))
+ if(i>0&&(itype[i-1]==RJUMP||itype[i-1]==UJUMP||itype[i-1]==CJUMP||itype[i-1]==SJUMP||itype[i-1]==FJUMP||itype[i]==SYSCALL||itype[i]==HLECALL))
{
cc=0;
}
}
}
}
- else if(itype[i]==SYSCALL)
+ else if(itype[i]==SYSCALL||itype[i]==HLECALL)
{
// SYSCALL instruction (software interrupt)
nr=0;
if((regs[i].was32>>dep2[i+1])&1) r32|=1LL<<dep2[i+1];
}
}
- else if(itype[i]==SYSCALL)
+ else if(itype[i]==SYSCALL||itype[i]==HLECALL)
{
// SYSCALL instruction (software interrupt)
r32=0;
else printf(" r%d",r);
}
}
+#ifndef FORCE32
printf(" UU:");
for(r=1;r<=CCREG;r++) {
if(((unneeded_reg_upper[i]&~unneeded_reg[i])>>r)&1) {
else printf(" r%d",r);
}
}
+#endif
printf("\n");
#if defined(__i386__) || defined(__x86_64__)
printf("pre: eax=%d ecx=%d edx=%d ebx=%d ebp=%d esi=%d edi=%d\n",regmap_pre[i][0],regmap_pre[i][1],regmap_pre[i][2],regmap_pre[i][3],regmap_pre[i][5],regmap_pre[i][6],regmap_pre[i][7]);
#endif
printf("\n");
}
+#ifndef FORCE32
printf(" 32:");
for(r=0;r<=CCREG;r++) {
if((regs[i].is32>>r)&1) {
}
}
printf("\n");
+#endif
/*printf(" p32:");
for(r=0;r<=CCREG;r++) {
if((p32[i]>>r)&1) {
if((branch_regs[i].dirty>>10)&1) printf("r10 ");
if((branch_regs[i].dirty>>12)&1) printf("r12 ");
#endif
+#ifndef FORCE32
printf(" 32:");
for(r=0;r<=CCREG;r++) {
if((branch_regs[i].is32>>r)&1) {
}
}
printf("\n");
+#endif
}
}
mov_assemble(i,®s[i]);break;
case SYSCALL:
syscall_assemble(i,®s[i]);break;
+ case HLECALL:
+ hlecall_assemble(i,®s[i]);break;
case UJUMP:
ujump_assemble(i,®s[i]);ds=1;break;
case RJUMP:
if(instr_addr[i]) // TODO - delay slots (=null)
{
u_int vaddr=start+i*4;
- u_int page=(0x80000000^vaddr)>>12;
- u_int vpage=page;
- if(page>262143&&tlb_LUT_r[vaddr>>12]) page=(tlb_LUT_r[page^0x80000]^0x80000000)>>12;
- if(page>2048) page=2048+(page&2047);
- if(vpage>262143&&tlb_LUT_r[vaddr>>12]) vpage&=2047; // jump_dirty uses a hash of the virtual address instead
- if(vpage>2048) vpage=2048+(vpage&2047);
+ u_int page=get_page(vaddr);
+ u_int vpage=get_vpage(vaddr);
literal_pool(256);
//if(!(is32[i]&(~unneeded_reg_upper[i])&~(1LL<<CCREG)))
if(!requires_32bit[i])
// Trap writes to any of the pages we compiled
for(i=start>>12;i<=(start+slen*4)>>12;i++) {
invalid_code[i]=0;
+#ifndef DISABLE_TLB
memory_map[i]|=0x40000000;
if((signed int)start>=(signed int)0xC0000000) {
assert(using_tlb);
memory_map[j]|=0x40000000;
//printf("write protect physical page: %x (virtual %x)\n",j<<12,start);
}
+#endif
}
/* Pass 10 - Free memory by expiring oldest blocks */