sms: add pause support
[picodrive.git] / cpu / sh2 / compiler.c
... / ...
CommitLineData
1/*
2 * SH2 recompiler
3 * (C) notaz, 2009,2010,2013
4 *
5 * This work is licensed under the terms of MAME license.
6 * See COPYING file in the top-level directory.
7 *
8 * notes:
9 * - tcache, block descriptor, link buffer overflows result in sh2_translate()
10 * failure, followed by full tcache invalidation for that region
11 * - jumps between blocks are tracked for SMC handling (in block_entry->links),
12 * except jumps between different tcaches
13 *
14 * implemented:
15 * - static register allocation
16 * - remaining register caching and tracking in temporaries
17 * - block-local branch linking
18 * - block linking (except between tcaches)
19 * - some constant propagation
20 *
21 * TODO:
22 * - better constant propagation
23 * - stack caching?
24 * - bug fixing
25 */
26#include <stddef.h>
27#include <stdio.h>
28#include <stdlib.h>
29#include <assert.h>
30
31#include "../../pico/pico_int.h"
32#include "sh2.h"
33#include "compiler.h"
34#include "../drc/cmn.h"
35#include "../debug.h"
36
37// features
38#define PROPAGATE_CONSTANTS 1
39#define LINK_BRANCHES 1
40
41// limits (per block)
42#define MAX_BLOCK_SIZE (BLOCK_INSN_LIMIT * 6 * 6)
43
44// max literal offset from the block end
45#define MAX_LITERAL_OFFSET 32*2
46#define MAX_LITERALS (BLOCK_INSN_LIMIT / 4)
47#define MAX_LOCAL_BRANCHES 32
48
49// debug stuff
50// 1 - warnings/errors
51// 2 - block info/smc
52// 4 - asm
53// 8 - runtime block entry log
54// {
55#ifndef DRC_DEBUG
56#define DRC_DEBUG 0
57#endif
58
59#if DRC_DEBUG
60#define dbg(l,...) { \
61 if ((l) & DRC_DEBUG) \
62 elprintf(EL_STATUS, ##__VA_ARGS__); \
63}
64#include "mame/sh2dasm.h"
65#include <platform/libpicofe/linux/host_dasm.h>
66static int insns_compiled, hash_collisions, host_insn_count;
67#define COUNT_OP \
68 host_insn_count++
69#else // !DRC_DEBUG
70#define COUNT_OP
71#define dbg(...)
72#endif
73
74///
75#define FETCH_OP(pc) \
76 dr_pc_base[(pc) / 2]
77
78#define FETCH32(a) \
79 ((dr_pc_base[(a) / 2] << 16) | dr_pc_base[(a) / 2 + 1])
80
81#define CHECK_UNHANDLED_BITS(mask, label) { \
82 if ((op & (mask)) != 0) \
83 goto label; \
84}
85
86#define GET_Fx() \
87 ((op >> 4) & 0x0f)
88
89#define GET_Rm GET_Fx
90
91#define GET_Rn() \
92 ((op >> 8) & 0x0f)
93
94#define BITMASK1(v0) (1 << (v0))
95#define BITMASK2(v0,v1) ((1 << (v0)) | (1 << (v1)))
96#define BITMASK3(v0,v1,v2) (BITMASK2(v0,v1) | (1 << (v2)))
97#define BITMASK4(v0,v1,v2,v3) (BITMASK3(v0,v1,v2) | (1 << (v3)))
98#define BITMASK5(v0,v1,v2,v3,v4) (BITMASK4(v0,v1,v2,v3) | (1 << (v4)))
99
100#define SHR_T SHR_SR // might make them separate someday
101
102static struct op_data {
103 u8 op;
104 u8 cycles;
105 u8 size; // 0, 1, 2 - byte, word, long
106 s8 rm; // branch or load/store data reg
107 u32 source; // bitmask of src regs
108 u32 dest; // bitmask of dest regs
109 u32 imm; // immediate/io address/branch target
110 // (for literal - address, not value)
111} ops[BLOCK_INSN_LIMIT];
112
113enum op_types {
114 OP_UNHANDLED = 0,
115 OP_BRANCH,
116 OP_BRANCH_CT, // conditional, branch if T set
117 OP_BRANCH_CF, // conditional, branch if T clear
118 OP_BRANCH_R, // indirect
119 OP_BRANCH_RF, // indirect far (PC + Rm)
120 OP_SETCLRT, // T flag set/clear
121 OP_MOVE, // register move
122 OP_LOAD_POOL, // literal pool load, imm is address
123 OP_MOVA,
124 OP_SLEEP,
125 OP_RTE,
126};
127
128#ifdef DRC_SH2
129
130static int literal_disabled_frames;
131
132#if (DRC_DEBUG & 4)
133static u8 *tcache_dsm_ptrs[3];
134static char sh2dasm_buff[64];
135#define do_host_disasm(tcid) \
136 host_dasm(tcache_dsm_ptrs[tcid], tcache_ptr - tcache_dsm_ptrs[tcid]); \
137 tcache_dsm_ptrs[tcid] = tcache_ptr
138#else
139#define do_host_disasm(x)
140#endif
141
142#if (DRC_DEBUG & 8) || defined(PDB)
143static void REGPARM(3) *sh2_drc_log_entry(void *block, SH2 *sh2, u32 sr)
144{
145 if (block != NULL) {
146 dbg(8, "= %csh2 enter %08x %p, c=%d", sh2->is_slave ? 's' : 'm',
147 sh2->pc, block, (signed int)sr >> 12);
148 pdb_step(sh2, sh2->pc);
149 }
150 return block;
151}
152#endif
153// } debug
154
155#define TCACHE_BUFFERS 3
156
157// we have 3 translation cache buffers, split from one drc/cmn buffer.
158// BIOS shares tcache with data array because it's only used for init
159// and can be discarded early
160// XXX: need to tune sizes
161static const int tcache_sizes[TCACHE_BUFFERS] = {
162 DRC_TCACHE_SIZE * 6 / 8, // ROM (rarely used), DRAM
163 DRC_TCACHE_SIZE / 8, // BIOS, data array in master sh2
164 DRC_TCACHE_SIZE / 8, // ... slave
165};
166
167static u8 *tcache_bases[TCACHE_BUFFERS];
168static u8 *tcache_ptrs[TCACHE_BUFFERS];
169
170// ptr for code emiters
171static u8 *tcache_ptr;
172
173#define MAX_BLOCK_ENTRIES (BLOCK_INSN_LIMIT / 8)
174
175struct block_link {
176 u32 target_pc;
177 void *jump; // insn address
178 struct block_link *next; // either in block_entry->links or
179};
180
181struct block_entry {
182 u32 pc;
183 void *tcache_ptr; // translated block for above PC
184 struct block_entry *next; // next block in hash_table with same pc hash
185 struct block_link *links; // links to this entry
186#if (DRC_DEBUG & 2)
187 struct block_desc *block;
188#endif
189};
190
191struct block_desc {
192 u32 addr; // block start SH2 PC address
193 u16 size; // ..of recompiled insns+lit. pool
194 u16 size_nolit; // same without literals
195#if (DRC_DEBUG & 2)
196 int refcount;
197#endif
198 int entry_count;
199 struct block_entry entryp[MAX_BLOCK_ENTRIES];
200};
201
202static const int block_max_counts[TCACHE_BUFFERS] = {
203 4*1024,
204 256,
205 256,
206};
207static struct block_desc *block_tables[TCACHE_BUFFERS];
208static int block_counts[TCACHE_BUFFERS];
209
210// we have block_link_pool to avoid using mallocs
211static const int block_link_pool_max_counts[TCACHE_BUFFERS] = {
212 4*1024,
213 256,
214 256,
215};
216static struct block_link *block_link_pool[TCACHE_BUFFERS];
217static int block_link_pool_counts[TCACHE_BUFFERS];
218static struct block_link *unresolved_links[TCACHE_BUFFERS];
219
220// used for invalidation
221static const int ram_sizes[TCACHE_BUFFERS] = {
222 0x40000,
223 0x1000,
224 0x1000,
225};
226#define INVAL_PAGE_SIZE 0x100
227
228struct block_list {
229 struct block_desc *block;
230 struct block_list *next;
231};
232
233// array of pointers to block_lists for RAM and 2 data arrays
234// each array has len: sizeof(mem) / INVAL_PAGE_SIZE
235static struct block_list **inval_lookup[TCACHE_BUFFERS];
236
237static const int hash_table_sizes[TCACHE_BUFFERS] = {
238 0x1000,
239 0x100,
240 0x100,
241};
242static struct block_entry **hash_tables[TCACHE_BUFFERS];
243
244#define HASH_FUNC(hash_tab, addr, mask) \
245 (hash_tab)[(((addr) >> 20) ^ ((addr) >> 2)) & (mask)]
246
247// host register tracking
248enum {
249 HR_FREE,
250 HR_CACHED, // 'val' has sh2_reg_e
251// HR_CONST, // 'val' has a constant
252 HR_TEMP, // reg used for temp storage
253};
254
255enum {
256 HRF_DIRTY = 1 << 0, // reg has "dirty" value to be written to ctx
257 HRF_LOCKED = 1 << 1, // HR_CACHED can't be evicted
258};
259
260typedef struct {
261 u32 hreg:5; // "host" reg
262 u32 greg:5; // "guest" reg
263 u32 type:3;
264 u32 flags:3;
265 u32 stamp:16; // kind of a timestamp
266} temp_reg_t;
267
268// note: reg_temp[] must have at least the amount of
269// registers used by handlers in worst case (currently 4)
270#ifdef __arm__
271#include "../drc/emit_arm.c"
272
273#ifndef __MACH__
274
275static const int reg_map_g2h[] = {
276 4, 5, 6, 7,
277 8, -1, -1, -1,
278 -1, -1, -1, -1,
279 -1, -1, -1, 9, // r12 .. sp
280 -1, -1, -1, 10, // SHR_PC, SHR_PPC, SHR_PR, SHR_SR,
281 -1, -1, -1, -1, // SHR_GBR, SHR_VBR, SHR_MACH, SHR_MACL,
282};
283
284#else
285
286// no r9..
287static const int reg_map_g2h[] = {
288 4, 5, 6, 7,
289 -1, -1, -1, -1,
290 -1, -1, -1, -1,
291 -1, -1, -1, 8, // r12 .. sp
292 -1, -1, -1, 10, // SHR_PC, SHR_PPC, SHR_PR, SHR_SR,
293 -1, -1, -1, -1, // SHR_GBR, SHR_VBR, SHR_MACH, SHR_MACL,
294};
295
296#endif
297
298static temp_reg_t reg_temp[] = {
299 { 0, },
300 { 1, },
301 { 12, },
302 { 14, },
303 { 2, },
304 { 3, },
305};
306
307#elif defined(__i386__)
308#include "../drc/emit_x86.c"
309
310static const int reg_map_g2h[] = {
311 xSI,-1, -1, -1,
312 -1, -1, -1, -1,
313 -1, -1, -1, -1,
314 -1, -1, -1, -1,
315 -1, -1, -1, xDI,
316 -1, -1, -1, -1,
317};
318
319// ax, cx, dx are usually temporaries by convention
320static temp_reg_t reg_temp[] = {
321 { xAX, },
322 { xBX, },
323 { xCX, },
324 { xDX, },
325};
326
327#else
328#error unsupported arch
329#endif
330
331#define T 0x00000001
332#define S 0x00000002
333#define I 0x000000f0
334#define Q 0x00000100
335#define M 0x00000200
336#define T_save 0x00000800
337
338#define I_SHIFT 4
339#define Q_SHIFT 8
340#define M_SHIFT 9
341
342static void REGPARM(1) (*sh2_drc_entry)(SH2 *sh2);
343static void (*sh2_drc_dispatcher)(void);
344static void (*sh2_drc_exit)(void);
345static void (*sh2_drc_test_irq)(void);
346
347static u32 REGPARM(2) (*sh2_drc_read8)(u32 a, SH2 *sh2);
348static u32 REGPARM(2) (*sh2_drc_read16)(u32 a, SH2 *sh2);
349static u32 REGPARM(2) (*sh2_drc_read32)(u32 a, SH2 *sh2);
350static void REGPARM(2) (*sh2_drc_write8)(u32 a, u32 d);
351static void REGPARM(2) (*sh2_drc_write16)(u32 a, u32 d);
352static void REGPARM(3) (*sh2_drc_write32)(u32 a, u32 d, SH2 *sh2);
353
354// address space stuff
355static int dr_ctx_get_mem_ptr(u32 a, u32 *mask)
356{
357 int poffs = -1;
358
359 if ((a & ~0x7ff) == 0) {
360 // BIOS
361 poffs = offsetof(SH2, p_bios);
362 *mask = 0x7ff;
363 }
364 else if ((a & 0xfffff000) == 0xc0000000) {
365 // data array
366 // FIXME: access sh2->data_array instead
367 poffs = offsetof(SH2, p_da);
368 *mask = 0xfff;
369 }
370 else if ((a & 0xc6000000) == 0x06000000) {
371 // SDRAM
372 poffs = offsetof(SH2, p_sdram);
373 *mask = 0x03ffff;
374 }
375 else if ((a & 0xc6000000) == 0x02000000) {
376 // ROM
377 poffs = offsetof(SH2, p_rom);
378 *mask = 0x3fffff;
379 }
380
381 return poffs;
382}
383
384static struct block_entry *dr_get_entry(u32 pc, int is_slave, int *tcache_id)
385{
386 struct block_entry *be;
387 u32 tcid = 0, mask;
388
389 // data arrays have their own caches
390 if ((pc & 0xe0000000) == 0xc0000000 || (pc & ~0xfff) == 0)
391 tcid = 1 + is_slave;
392
393 *tcache_id = tcid;
394
395 mask = hash_table_sizes[tcid] - 1;
396 be = HASH_FUNC(hash_tables[tcid], pc, mask);
397 for (; be != NULL; be = be->next)
398 if (be->pc == pc)
399 return be;
400
401 return NULL;
402}
403
404// ---------------------------------------------------------------
405
406// block management
407static void add_to_block_list(struct block_list **blist, struct block_desc *block)
408{
409 struct block_list *added = malloc(sizeof(*added));
410 if (!added) {
411 elprintf(EL_ANOMALY, "drc OOM (1)");
412 return;
413 }
414 added->block = block;
415 added->next = *blist;
416 *blist = added;
417}
418
419static void rm_from_block_list(struct block_list **blist, struct block_desc *block)
420{
421 struct block_list *prev = NULL, *current = *blist;
422 for (; current != NULL; prev = current, current = current->next) {
423 if (current->block == block) {
424 if (prev == NULL)
425 *blist = current->next;
426 else
427 prev->next = current->next;
428 free(current);
429 return;
430 }
431 }
432 dbg(1, "can't rm block %p (%08x-%08x)",
433 block, block->addr, block->addr + block->size);
434}
435
436static void rm_block_list(struct block_list **blist)
437{
438 struct block_list *tmp, *current = *blist;
439 while (current != NULL) {
440 tmp = current;
441 current = current->next;
442 free(tmp);
443 }
444 *blist = NULL;
445}
446
447static void REGPARM(1) flush_tcache(int tcid)
448{
449 int i;
450
451 dbg(1, "tcache #%d flush! (%d/%d, bds %d/%d)", tcid,
452 tcache_ptrs[tcid] - tcache_bases[tcid], tcache_sizes[tcid],
453 block_counts[tcid], block_max_counts[tcid]);
454
455 block_counts[tcid] = 0;
456 block_link_pool_counts[tcid] = 0;
457 unresolved_links[tcid] = NULL;
458 memset(hash_tables[tcid], 0, sizeof(*hash_tables[0]) * hash_table_sizes[tcid]);
459 tcache_ptrs[tcid] = tcache_bases[tcid];
460 if (Pico32xMem != NULL) {
461 if (tcid == 0) // ROM, RAM
462 memset(Pico32xMem->drcblk_ram, 0,
463 sizeof(Pico32xMem->drcblk_ram));
464 else
465 memset(Pico32xMem->drcblk_da[tcid - 1], 0,
466 sizeof(Pico32xMem->drcblk_da[0]));
467 }
468#if (DRC_DEBUG & 4)
469 tcache_dsm_ptrs[tcid] = tcache_bases[tcid];
470#endif
471
472 for (i = 0; i < ram_sizes[tcid] / INVAL_PAGE_SIZE; i++)
473 rm_block_list(&inval_lookup[tcid][i]);
474}
475
476static void add_to_hashlist(struct block_entry *be, int tcache_id)
477{
478 u32 tcmask = hash_table_sizes[tcache_id] - 1;
479
480 be->next = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
481 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be;
482
483#if (DRC_DEBUG & 2)
484 if (be->next != NULL) {
485 printf(" %08x: hash collision with %08x\n",
486 be->pc, be->next->pc);
487 hash_collisions++;
488 }
489#endif
490}
491
492static void rm_from_hashlist(struct block_entry *be, int tcache_id)
493{
494 u32 tcmask = hash_table_sizes[tcache_id] - 1;
495 struct block_entry *cur, *prev;
496
497 cur = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
498 if (cur == NULL)
499 goto missing;
500
501 if (be == cur) { // first
502 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be->next;
503 return;
504 }
505
506 for (prev = cur, cur = cur->next; cur != NULL; cur = cur->next) {
507 if (cur == be) {
508 prev->next = cur->next;
509 return;
510 }
511 }
512
513missing:
514 dbg(1, "rm_from_hashlist: be %p %08x missing?", be, be->pc);
515}
516
517static struct block_desc *dr_add_block(u32 addr, u16 size_lit,
518 u16 size_nolit, int is_slave, int *blk_id)
519{
520 struct block_entry *be;
521 struct block_desc *bd;
522 int tcache_id;
523 int *bcount;
524
525 // do a lookup to get tcache_id and override check
526 be = dr_get_entry(addr, is_slave, &tcache_id);
527 if (be != NULL)
528 dbg(1, "block override for %08x", addr);
529
530 bcount = &block_counts[tcache_id];
531 if (*bcount >= block_max_counts[tcache_id]) {
532 dbg(1, "bd overflow for tcache %d", tcache_id);
533 return NULL;
534 }
535
536 bd = &block_tables[tcache_id][*bcount];
537 bd->addr = addr;
538 bd->size = size_lit;
539 bd->size_nolit = size_nolit;
540
541 bd->entry_count = 1;
542 bd->entryp[0].pc = addr;
543 bd->entryp[0].tcache_ptr = tcache_ptr;
544 bd->entryp[0].links = NULL;
545#if (DRC_DEBUG & 2)
546 bd->entryp[0].block = bd;
547 bd->refcount = 0;
548#endif
549 add_to_hashlist(&bd->entryp[0], tcache_id);
550
551 *blk_id = *bcount;
552 (*bcount)++;
553
554 return bd;
555}
556
557static void REGPARM(3) *dr_lookup_block(u32 pc, int is_slave, int *tcache_id)
558{
559 struct block_entry *be = NULL;
560 void *block = NULL;
561
562 be = dr_get_entry(pc, is_slave, tcache_id);
563 if (be != NULL)
564 block = be->tcache_ptr;
565
566#if (DRC_DEBUG & 2)
567 if (be != NULL)
568 be->block->refcount++;
569#endif
570 return block;
571}
572
573static void *dr_failure(void)
574{
575 lprintf("recompilation failed\n");
576 exit(1);
577}
578
579static void *dr_prepare_ext_branch(u32 pc, int is_slave, int tcache_id)
580{
581#if LINK_BRANCHES
582 struct block_link *bl = block_link_pool[tcache_id];
583 int cnt = block_link_pool_counts[tcache_id];
584 struct block_entry *be = NULL;
585 int target_tcache_id;
586 int i;
587
588 be = dr_get_entry(pc, is_slave, &target_tcache_id);
589 if (target_tcache_id != tcache_id)
590 return sh2_drc_dispatcher;
591
592 // if pool has been freed, reuse
593 for (i = cnt - 1; i >= 0; i--)
594 if (bl[i].target_pc != 0)
595 break;
596 cnt = i + 1;
597 if (cnt >= block_link_pool_max_counts[tcache_id]) {
598 dbg(1, "bl overflow for tcache %d", tcache_id);
599 return NULL;
600 }
601 bl += cnt;
602 block_link_pool_counts[tcache_id]++;
603
604 bl->target_pc = pc;
605 bl->jump = tcache_ptr;
606
607 if (be != NULL) {
608 dbg(2, "- early link from %p to pc %08x", bl->jump, pc);
609 bl->next = be->links;
610 be->links = bl;
611 return be->tcache_ptr;
612 }
613 else {
614 bl->next = unresolved_links[tcache_id];
615 unresolved_links[tcache_id] = bl;
616 return sh2_drc_dispatcher;
617 }
618#else
619 return sh2_drc_dispatcher;
620#endif
621}
622
623static void dr_link_blocks(struct block_entry *be, int tcache_id)
624{
625#if LINK_BRANCHES
626 struct block_link *first = unresolved_links[tcache_id];
627 struct block_link *bl, *prev, *tmp;
628 u32 pc = be->pc;
629
630 for (bl = prev = first; bl != NULL; ) {
631 if (bl->target_pc == pc) {
632 dbg(2, "- link from %p to pc %08x", bl->jump, pc);
633 emith_jump_patch(bl->jump, tcache_ptr);
634
635 // move bl from unresolved_links to block_entry
636 tmp = bl->next;
637 bl->next = be->links;
638 be->links = bl;
639
640 if (bl == first)
641 first = prev = bl = tmp;
642 else
643 prev->next = bl = tmp;
644 continue;
645 }
646 prev = bl;
647 bl = bl->next;
648 }
649 unresolved_links[tcache_id] = first;
650
651 // could sync arm caches here, but that's unnecessary
652#endif
653}
654
655#define ADD_TO_ARRAY(array, count, item, failcode) \
656 if (count >= ARRAY_SIZE(array)) { \
657 dbg(1, "warning: " #array " overflow"); \
658 failcode; \
659 } \
660 array[count++] = item;
661
662static int find_in_array(u32 *array, size_t size, u32 what)
663{
664 size_t i;
665 for (i = 0; i < size; i++)
666 if (what == array[i])
667 return i;
668
669 return -1;
670}
671
672// ---------------------------------------------------------------
673
674// register cache / constant propagation stuff
675typedef enum {
676 RC_GR_READ,
677 RC_GR_WRITE,
678 RC_GR_RMW,
679} rc_gr_mode;
680
681static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking);
682
683// guest regs with constants
684static u32 dr_gcregs[24];
685// a mask of constant/dirty regs
686static u32 dr_gcregs_mask;
687static u32 dr_gcregs_dirty;
688
689#if PROPAGATE_CONSTANTS
690static void gconst_new(sh2_reg_e r, u32 val)
691{
692 int i;
693
694 dr_gcregs_mask |= 1 << r;
695 dr_gcregs_dirty |= 1 << r;
696 dr_gcregs[r] = val;
697
698 // throw away old r that we might have cached
699 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
700 if ((reg_temp[i].type == HR_CACHED) &&
701 reg_temp[i].greg == r) {
702 reg_temp[i].type = HR_FREE;
703 reg_temp[i].flags = 0;
704 }
705 }
706}
707#endif
708
709static int gconst_get(sh2_reg_e r, u32 *val)
710{
711 if (dr_gcregs_mask & (1 << r)) {
712 *val = dr_gcregs[r];
713 return 1;
714 }
715 return 0;
716}
717
718static int gconst_check(sh2_reg_e r)
719{
720 if ((dr_gcregs_mask | dr_gcregs_dirty) & (1 << r))
721 return 1;
722 return 0;
723}
724
725// update hr if dirty, else do nothing
726static int gconst_try_read(int hr, sh2_reg_e r)
727{
728 if (dr_gcregs_dirty & (1 << r)) {
729 emith_move_r_imm(hr, dr_gcregs[r]);
730 dr_gcregs_dirty &= ~(1 << r);
731 return 1;
732 }
733 return 0;
734}
735
736static void gconst_check_evict(sh2_reg_e r)
737{
738 if (dr_gcregs_mask & (1 << r))
739 // no longer cached in reg, make dirty again
740 dr_gcregs_dirty |= 1 << r;
741}
742
743static void gconst_kill(sh2_reg_e r)
744{
745 dr_gcregs_mask &= ~(1 << r);
746 dr_gcregs_dirty &= ~(1 << r);
747}
748
749static void gconst_clean(void)
750{
751 int i;
752
753 for (i = 0; i < ARRAY_SIZE(dr_gcregs); i++)
754 if (dr_gcregs_dirty & (1 << i)) {
755 // using RC_GR_READ here: it will call gconst_try_read,
756 // cache the reg and mark it dirty.
757 rcache_get_reg_(i, RC_GR_READ, 0);
758 }
759}
760
761static void gconst_invalidate(void)
762{
763 dr_gcregs_mask = dr_gcregs_dirty = 0;
764}
765
766static u16 rcache_counter;
767
768static temp_reg_t *rcache_evict(void)
769{
770 // evict reg with oldest stamp
771 int i, oldest = -1;
772 u16 min_stamp = (u16)-1;
773
774 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
775 if (reg_temp[i].type == HR_CACHED && !(reg_temp[i].flags & HRF_LOCKED) &&
776 reg_temp[i].stamp <= min_stamp) {
777 min_stamp = reg_temp[i].stamp;
778 oldest = i;
779 }
780 }
781
782 if (oldest == -1) {
783 printf("no registers to evict, aborting\n");
784 exit(1);
785 }
786
787 i = oldest;
788 if (reg_temp[i].type == HR_CACHED) {
789 if (reg_temp[i].flags & HRF_DIRTY)
790 // writeback
791 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
792 gconst_check_evict(reg_temp[i].greg);
793 }
794
795 reg_temp[i].type = HR_FREE;
796 reg_temp[i].flags = 0;
797 return &reg_temp[i];
798}
799
800static int get_reg_static(sh2_reg_e r, rc_gr_mode mode)
801{
802 int i = reg_map_g2h[r];
803 if (i != -1) {
804 if (mode != RC_GR_WRITE)
805 gconst_try_read(i, r);
806 }
807 return i;
808}
809
810// note: must not be called when doing conditional code
811static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking)
812{
813 temp_reg_t *tr;
814 int i, ret;
815
816 // maybe statically mapped?
817 ret = get_reg_static(r, mode);
818 if (ret != -1)
819 goto end;
820
821 rcache_counter++;
822
823 // maybe already cached?
824 // if so, prefer against gconst (they must be in sync)
825 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
826 if (reg_temp[i].type == HR_CACHED && reg_temp[i].greg == r) {
827 reg_temp[i].stamp = rcache_counter;
828 if (mode != RC_GR_READ)
829 reg_temp[i].flags |= HRF_DIRTY;
830 ret = reg_temp[i].hreg;
831 goto end;
832 }
833 }
834
835 // use any free reg
836 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
837 if (reg_temp[i].type == HR_FREE) {
838 tr = &reg_temp[i];
839 goto do_alloc;
840 }
841 }
842
843 tr = rcache_evict();
844
845do_alloc:
846 tr->type = HR_CACHED;
847 if (do_locking)
848 tr->flags |= HRF_LOCKED;
849 if (mode != RC_GR_READ)
850 tr->flags |= HRF_DIRTY;
851 tr->greg = r;
852 tr->stamp = rcache_counter;
853 ret = tr->hreg;
854
855 if (mode != RC_GR_WRITE) {
856 if (gconst_check(r)) {
857 if (gconst_try_read(ret, r))
858 tr->flags |= HRF_DIRTY;
859 }
860 else
861 emith_ctx_read(tr->hreg, r * 4);
862 }
863
864end:
865 if (mode != RC_GR_READ)
866 gconst_kill(r);
867
868 return ret;
869}
870
871static int rcache_get_reg(sh2_reg_e r, rc_gr_mode mode)
872{
873 return rcache_get_reg_(r, mode, 1);
874}
875
876static int rcache_get_tmp(void)
877{
878 temp_reg_t *tr;
879 int i;
880
881 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
882 if (reg_temp[i].type == HR_FREE) {
883 tr = &reg_temp[i];
884 goto do_alloc;
885 }
886
887 tr = rcache_evict();
888
889do_alloc:
890 tr->type = HR_TEMP;
891 return tr->hreg;
892}
893
894static int rcache_get_arg_id(int arg)
895{
896 int i, r = 0;
897 host_arg2reg(r, arg);
898
899 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
900 if (reg_temp[i].hreg == r)
901 break;
902
903 if (i == ARRAY_SIZE(reg_temp)) // can't happen
904 exit(1);
905
906 if (reg_temp[i].type == HR_CACHED) {
907 // writeback
908 if (reg_temp[i].flags & HRF_DIRTY)
909 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
910 gconst_check_evict(reg_temp[i].greg);
911 }
912 else if (reg_temp[i].type == HR_TEMP) {
913 printf("arg %d reg %d already used, aborting\n", arg, r);
914 exit(1);
915 }
916
917 reg_temp[i].type = HR_FREE;
918 reg_temp[i].flags = 0;
919
920 return i;
921}
922
923// get a reg to be used as function arg
924static int rcache_get_tmp_arg(int arg)
925{
926 int id = rcache_get_arg_id(arg);
927 reg_temp[id].type = HR_TEMP;
928
929 return reg_temp[id].hreg;
930}
931
932// same but caches a reg. RC_GR_READ only.
933static int rcache_get_reg_arg(int arg, sh2_reg_e r)
934{
935 int i, srcr, dstr, dstid;
936 int dirty = 0, src_dirty = 0;
937
938 dstid = rcache_get_arg_id(arg);
939 dstr = reg_temp[dstid].hreg;
940
941 // maybe already statically mapped?
942 srcr = get_reg_static(r, RC_GR_READ);
943 if (srcr != -1)
944 goto do_cache;
945
946 // maybe already cached?
947 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
948 if ((reg_temp[i].type == HR_CACHED) &&
949 reg_temp[i].greg == r)
950 {
951 srcr = reg_temp[i].hreg;
952 if (reg_temp[i].flags & HRF_DIRTY)
953 src_dirty = 1;
954 goto do_cache;
955 }
956 }
957
958 // must read
959 srcr = dstr;
960 if (gconst_check(r)) {
961 if (gconst_try_read(srcr, r))
962 dirty = 1;
963 }
964 else
965 emith_ctx_read(srcr, r * 4);
966
967do_cache:
968 if (dstr != srcr)
969 emith_move_r_r(dstr, srcr);
970#if 1
971 else
972 dirty |= src_dirty;
973
974 if (dirty)
975 // must clean, callers might want to modify the arg before call
976 emith_ctx_write(dstr, r * 4);
977#else
978 if (dirty)
979 reg_temp[dstid].flags |= HRF_DIRTY;
980#endif
981
982 reg_temp[dstid].stamp = ++rcache_counter;
983 reg_temp[dstid].type = HR_CACHED;
984 reg_temp[dstid].greg = r;
985 reg_temp[dstid].flags |= HRF_LOCKED;
986 return dstr;
987}
988
989static void rcache_free_tmp(int hr)
990{
991 int i;
992 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
993 if (reg_temp[i].hreg == hr)
994 break;
995
996 if (i == ARRAY_SIZE(reg_temp) || reg_temp[i].type != HR_TEMP) {
997 printf("rcache_free_tmp fail: #%i hr %d, type %d\n", i, hr, reg_temp[i].type);
998 return;
999 }
1000
1001 reg_temp[i].type = HR_FREE;
1002 reg_temp[i].flags = 0;
1003}
1004
1005static void rcache_unlock(int hr)
1006{
1007 int i;
1008 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1009 if (reg_temp[i].type == HR_CACHED && reg_temp[i].hreg == hr)
1010 reg_temp[i].flags &= ~HRF_LOCKED;
1011}
1012
1013static void rcache_unlock_all(void)
1014{
1015 int i;
1016 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1017 reg_temp[i].flags &= ~HRF_LOCKED;
1018}
1019
1020static inline u32 rcache_used_hreg_mask(void)
1021{
1022 u32 mask = 0;
1023 int i;
1024
1025 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1026 if (reg_temp[i].type != HR_FREE)
1027 mask |= 1 << reg_temp[i].hreg;
1028
1029 return mask;
1030}
1031
1032static void rcache_clean(void)
1033{
1034 int i;
1035 gconst_clean();
1036
1037 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1038 if (reg_temp[i].type == HR_CACHED && (reg_temp[i].flags & HRF_DIRTY)) {
1039 // writeback
1040 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
1041 reg_temp[i].flags &= ~HRF_DIRTY;
1042 }
1043}
1044
1045static void rcache_invalidate(void)
1046{
1047 int i;
1048 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
1049 reg_temp[i].type = HR_FREE;
1050 reg_temp[i].flags = 0;
1051 }
1052 rcache_counter = 0;
1053
1054 gconst_invalidate();
1055}
1056
1057static void rcache_flush(void)
1058{
1059 rcache_clean();
1060 rcache_invalidate();
1061}
1062
1063// ---------------------------------------------------------------
1064
1065static int emit_get_rbase_and_offs(u32 a, u32 *offs)
1066{
1067 u32 mask = 0;
1068 int poffs;
1069 int hr;
1070
1071 poffs = dr_ctx_get_mem_ptr(a, &mask);
1072 if (poffs == -1)
1073 return -1;
1074
1075 // XXX: could use some related reg
1076 hr = rcache_get_tmp();
1077 emith_ctx_read(hr, poffs);
1078 emith_add_r_imm(hr, a & mask & ~0xff);
1079 *offs = a & 0xff; // XXX: ARM oriented..
1080 return hr;
1081}
1082
1083static void emit_move_r_imm32(sh2_reg_e dst, u32 imm)
1084{
1085#if PROPAGATE_CONSTANTS
1086 gconst_new(dst, imm);
1087#else
1088 int hr = rcache_get_reg(dst, RC_GR_WRITE);
1089 emith_move_r_imm(hr, imm);
1090#endif
1091}
1092
1093static void emit_move_r_r(sh2_reg_e dst, sh2_reg_e src)
1094{
1095 int hr_d = rcache_get_reg(dst, RC_GR_WRITE);
1096 int hr_s = rcache_get_reg(src, RC_GR_READ);
1097
1098 emith_move_r_r(hr_d, hr_s);
1099}
1100
1101// T must be clear, and comparison done just before this
1102static void emit_or_t_if_eq(int srr)
1103{
1104 EMITH_SJMP_START(DCOND_NE);
1105 emith_or_r_imm_c(DCOND_EQ, srr, T);
1106 EMITH_SJMP_END(DCOND_NE);
1107}
1108
1109// arguments must be ready
1110// reg cache must be clean before call
1111static int emit_memhandler_read_(int size, int ram_check)
1112{
1113 int arg1;
1114#if 0
1115 int arg0;
1116 host_arg2reg(arg0, 0);
1117#endif
1118
1119 rcache_clean();
1120
1121 // must writeback cycles for poll detection stuff
1122 // FIXME: rm
1123 if (reg_map_g2h[SHR_SR] != -1)
1124 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1125
1126 arg1 = rcache_get_tmp_arg(1);
1127 emith_move_r_r(arg1, CONTEXT_REG);
1128
1129#if 0 // can't do this because of unmapped reads
1130 // ndef PDB_NET
1131 if (ram_check && Pico.rom == (void *)0x02000000 && Pico32xMem->sdram == (void *)0x06000000) {
1132 int tmp = rcache_get_tmp();
1133 emith_and_r_r_imm(tmp, arg0, 0xfb000000);
1134 emith_cmp_r_imm(tmp, 0x02000000);
1135 switch (size) {
1136 case 0: // 8
1137 EMITH_SJMP3_START(DCOND_NE);
1138 emith_eor_r_imm_c(DCOND_EQ, arg0, 1);
1139 emith_read8_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1140 EMITH_SJMP3_MID(DCOND_NE);
1141 emith_call_cond(DCOND_NE, sh2_drc_read8);
1142 EMITH_SJMP3_END();
1143 break;
1144 case 1: // 16
1145 EMITH_SJMP3_START(DCOND_NE);
1146 emith_read16_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1147 EMITH_SJMP3_MID(DCOND_NE);
1148 emith_call_cond(DCOND_NE, sh2_drc_read16);
1149 EMITH_SJMP3_END();
1150 break;
1151 case 2: // 32
1152 EMITH_SJMP3_START(DCOND_NE);
1153 emith_read_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1154 emith_ror_c(DCOND_EQ, arg0, arg0, 16);
1155 EMITH_SJMP3_MID(DCOND_NE);
1156 emith_call_cond(DCOND_NE, sh2_drc_read32);
1157 EMITH_SJMP3_END();
1158 break;
1159 }
1160 }
1161 else
1162#endif
1163 {
1164 switch (size) {
1165 case 0: // 8
1166 emith_call(sh2_drc_read8);
1167 break;
1168 case 1: // 16
1169 emith_call(sh2_drc_read16);
1170 break;
1171 case 2: // 32
1172 emith_call(sh2_drc_read32);
1173 break;
1174 }
1175 }
1176 rcache_invalidate();
1177
1178 if (reg_map_g2h[SHR_SR] != -1)
1179 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1180
1181 // assuming arg0 and retval reg matches
1182 return rcache_get_tmp_arg(0);
1183}
1184
1185static int emit_memhandler_read(int size)
1186{
1187 return emit_memhandler_read_(size, 1);
1188}
1189
1190static int emit_memhandler_read_rr(sh2_reg_e rd, sh2_reg_e rs, u32 offs, int size)
1191{
1192 int hr, hr2, ram_check = 1;
1193 u32 val, offs2;
1194
1195 if (gconst_get(rs, &val)) {
1196 hr = emit_get_rbase_and_offs(val + offs, &offs2);
1197 if (hr != -1) {
1198 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1199 switch (size) {
1200 case 0: // 8
1201 emith_read8_r_r_offs(hr2, hr, offs2 ^ 1);
1202 emith_sext(hr2, hr2, 8);
1203 break;
1204 case 1: // 16
1205 emith_read16_r_r_offs(hr2, hr, offs2);
1206 emith_sext(hr2, hr2, 16);
1207 break;
1208 case 2: // 32
1209 emith_read_r_r_offs(hr2, hr, offs2);
1210 emith_ror(hr2, hr2, 16);
1211 break;
1212 }
1213 rcache_free_tmp(hr);
1214 return hr2;
1215 }
1216
1217 ram_check = 0;
1218 }
1219
1220 hr = rcache_get_reg_arg(0, rs);
1221 if (offs != 0)
1222 emith_add_r_imm(hr, offs);
1223 hr = emit_memhandler_read_(size, ram_check);
1224 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1225 if (size != 2) {
1226 emith_sext(hr2, hr, (size == 1) ? 16 : 8);
1227 } else
1228 emith_move_r_r(hr2, hr);
1229 rcache_free_tmp(hr);
1230
1231 return hr2;
1232}
1233
1234static void emit_memhandler_write(int size)
1235{
1236 int ctxr;
1237 host_arg2reg(ctxr, 2);
1238 if (reg_map_g2h[SHR_SR] != -1)
1239 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1240
1241 rcache_clean();
1242
1243 switch (size) {
1244 case 0: // 8
1245 // XXX: consider inlining sh2_drc_write8
1246 emith_call(sh2_drc_write8);
1247 break;
1248 case 1: // 16
1249 emith_call(sh2_drc_write16);
1250 break;
1251 case 2: // 32
1252 emith_move_r_r(ctxr, CONTEXT_REG);
1253 emith_call(sh2_drc_write32);
1254 break;
1255 }
1256
1257 rcache_invalidate();
1258 if (reg_map_g2h[SHR_SR] != -1)
1259 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1260}
1261
1262// @(Rx,Ry)
1263static int emit_indirect_indexed_read(int rx, int ry, int size)
1264{
1265 int a0, t;
1266 a0 = rcache_get_reg_arg(0, rx);
1267 t = rcache_get_reg(ry, RC_GR_READ);
1268 emith_add_r_r(a0, t);
1269 return emit_memhandler_read(size);
1270}
1271
1272// read @Rn, @rm
1273static void emit_indirect_read_double(u32 *rnr, u32 *rmr, int rn, int rm, int size)
1274{
1275 int tmp;
1276
1277 rcache_get_reg_arg(0, rn);
1278 tmp = emit_memhandler_read(size);
1279 emith_ctx_write(tmp, offsetof(SH2, drc_tmp));
1280 rcache_free_tmp(tmp);
1281 tmp = rcache_get_reg(rn, RC_GR_RMW);
1282 emith_add_r_imm(tmp, 1 << size);
1283 rcache_unlock(tmp);
1284
1285 rcache_get_reg_arg(0, rm);
1286 *rmr = emit_memhandler_read(size);
1287 *rnr = rcache_get_tmp();
1288 emith_ctx_read(*rnr, offsetof(SH2, drc_tmp));
1289 tmp = rcache_get_reg(rm, RC_GR_RMW);
1290 emith_add_r_imm(tmp, 1 << size);
1291 rcache_unlock(tmp);
1292}
1293
1294static void emit_do_static_regs(int is_write, int tmpr)
1295{
1296 int i, r, count;
1297
1298 for (i = 0; i < ARRAY_SIZE(reg_map_g2h); i++) {
1299 r = reg_map_g2h[i];
1300 if (r == -1)
1301 continue;
1302
1303 for (count = 1; i < ARRAY_SIZE(reg_map_g2h) - 1; i++, r++) {
1304 if (reg_map_g2h[i + 1] != r + 1)
1305 break;
1306 count++;
1307 }
1308
1309 if (count > 1) {
1310 // i, r point to last item
1311 if (is_write)
1312 emith_ctx_write_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1313 else
1314 emith_ctx_read_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1315 } else {
1316 if (is_write)
1317 emith_ctx_write(r, i * 4);
1318 else
1319 emith_ctx_read(r, i * 4);
1320 }
1321 }
1322}
1323
1324static void emit_block_entry(void)
1325{
1326 int arg0;
1327
1328 host_arg2reg(arg0, 0);
1329
1330#if (DRC_DEBUG & 8) || defined(PDB)
1331 int arg1, arg2;
1332 host_arg2reg(arg1, 1);
1333 host_arg2reg(arg2, 2);
1334
1335 emit_do_static_regs(1, arg2);
1336 emith_move_r_r(arg1, CONTEXT_REG);
1337 emith_move_r_r(arg2, rcache_get_reg(SHR_SR, RC_GR_READ));
1338 emith_call(sh2_drc_log_entry);
1339 rcache_invalidate();
1340#endif
1341 emith_tst_r_r(arg0, arg0);
1342 EMITH_SJMP_START(DCOND_EQ);
1343 emith_jump_reg_c(DCOND_NE, arg0);
1344 EMITH_SJMP_END(DCOND_EQ);
1345}
1346
1347#define DELAY_SAVE_T(sr) { \
1348 emith_bic_r_imm(sr, T_save); \
1349 emith_tst_r_imm(sr, T); \
1350 EMITH_SJMP_START(DCOND_EQ); \
1351 emith_or_r_imm_c(DCOND_NE, sr, T_save); \
1352 EMITH_SJMP_END(DCOND_EQ); \
1353}
1354
1355#define FLUSH_CYCLES(sr) \
1356 if (cycles > 0) { \
1357 emith_sub_r_imm(sr, cycles << 12); \
1358 cycles = 0; \
1359 }
1360
1361static void *dr_get_pc_base(u32 pc, int is_slave);
1362
1363static void REGPARM(2) *sh2_translate(SH2 *sh2, int tcache_id)
1364{
1365 u32 branch_target_pc[MAX_LOCAL_BRANCHES];
1366 void *branch_target_ptr[MAX_LOCAL_BRANCHES];
1367 int branch_target_count = 0;
1368 void *branch_patch_ptr[MAX_LOCAL_BRANCHES];
1369 u32 branch_patch_pc[MAX_LOCAL_BRANCHES];
1370 int branch_patch_count = 0;
1371 u32 literal_addr[MAX_LITERALS];
1372 int literal_addr_count = 0;
1373 u8 op_flags[BLOCK_INSN_LIMIT];
1374 struct {
1375 u32 test_irq:1;
1376 u32 pending_branch_direct:1;
1377 u32 pending_branch_indirect:1;
1378 u32 literals_disabled:1;
1379 } drcf = { 0, };
1380
1381 // PC of current, first, last SH2 insn
1382 u32 pc, base_pc, end_pc;
1383 u32 end_literals;
1384 void *block_entry_ptr;
1385 struct block_desc *block;
1386 u16 *dr_pc_base;
1387 struct op_data *opd;
1388 int blkid_main = 0;
1389 int skip_op = 0;
1390 u32 tmp, tmp2;
1391 int cycles;
1392 int i, v;
1393 int op;
1394
1395 base_pc = sh2->pc;
1396 drcf.literals_disabled = literal_disabled_frames != 0;
1397
1398 // get base/validate PC
1399 dr_pc_base = dr_get_pc_base(base_pc, sh2->is_slave);
1400 if (dr_pc_base == (void *)-1) {
1401 printf("invalid PC, aborting: %08x\n", base_pc);
1402 // FIXME: be less destructive
1403 exit(1);
1404 }
1405
1406 tcache_ptr = tcache_ptrs[tcache_id];
1407
1408 // predict tcache overflow
1409 tmp = tcache_ptr - tcache_bases[tcache_id];
1410 if (tmp > tcache_sizes[tcache_id] - MAX_BLOCK_SIZE) {
1411 dbg(1, "tcache %d overflow", tcache_id);
1412 return NULL;
1413 }
1414
1415 // initial passes to disassemble and analyze the block
1416 scan_block(base_pc, sh2->is_slave, op_flags, &end_pc, &end_literals);
1417
1418 if (drcf.literals_disabled)
1419 end_literals = end_pc;
1420
1421 block = dr_add_block(base_pc, end_literals - base_pc,
1422 end_pc - base_pc, sh2->is_slave, &blkid_main);
1423 if (block == NULL)
1424 return NULL;
1425
1426 block_entry_ptr = tcache_ptr;
1427 dbg(2, "== %csh2 block #%d,%d %08x-%08x -> %p", sh2->is_slave ? 's' : 'm',
1428 tcache_id, blkid_main, base_pc, end_pc, block_entry_ptr);
1429
1430 dr_link_blocks(&block->entryp[0], tcache_id);
1431
1432 // collect branch_targets that don't land on delay slots
1433 for (pc = base_pc, i = 0; pc < end_pc; i++, pc += 2) {
1434 if (!(op_flags[i] & OF_BTARGET))
1435 continue;
1436 if (op_flags[i] & OF_DELAY_OP) {
1437 op_flags[i] &= ~OF_BTARGET;
1438 continue;
1439 }
1440 ADD_TO_ARRAY(branch_target_pc, branch_target_count, pc, break);
1441 }
1442
1443 if (branch_target_count > 0) {
1444 memset(branch_target_ptr, 0, sizeof(branch_target_ptr[0]) * branch_target_count);
1445 }
1446
1447 // clear stale state after compile errors
1448 rcache_invalidate();
1449
1450 // -------------------------------------------------
1451 // 3rd pass: actual compilation
1452 pc = base_pc;
1453 cycles = 0;
1454 for (i = 0; pc < end_pc; i++)
1455 {
1456 u32 delay_dep_fw = 0, delay_dep_bk = 0;
1457 u32 tmp3, tmp4, sr;
1458
1459 opd = &ops[i];
1460 op = FETCH_OP(pc);
1461
1462#if (DRC_DEBUG & 2)
1463 insns_compiled++;
1464#endif
1465#if (DRC_DEBUG & 4)
1466 DasmSH2(sh2dasm_buff, pc, op);
1467 printf("%c%08x %04x %s\n", (op_flags[i] & OF_BTARGET) ? '*' : ' ',
1468 pc, op, sh2dasm_buff);
1469#endif
1470
1471 if ((op_flags[i] & OF_BTARGET) || pc == base_pc)
1472 {
1473 if (pc != base_pc)
1474 {
1475 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1476 FLUSH_CYCLES(sr);
1477 rcache_flush();
1478
1479 // make block entry
1480 v = block->entry_count;
1481 if (v < ARRAY_SIZE(block->entryp)) {
1482 block->entryp[v].pc = pc;
1483 block->entryp[v].tcache_ptr = tcache_ptr;
1484 block->entryp[v].links = NULL;
1485#if (DRC_DEBUG & 2)
1486 block->entryp[v].block = block;
1487#endif
1488 add_to_hashlist(&block->entryp[v], tcache_id);
1489 block->entry_count++;
1490
1491 dbg(2, "-- %csh2 block #%d,%d entry %08x -> %p",
1492 sh2->is_slave ? 's' : 'm', tcache_id, blkid_main,
1493 pc, tcache_ptr);
1494
1495 // since we made a block entry, link any other blocks
1496 // that jump to current pc
1497 dr_link_blocks(&block->entryp[v], tcache_id);
1498 }
1499 else {
1500 dbg(1, "too many entryp for block #%d,%d pc=%08x",
1501 tcache_id, blkid_main, pc);
1502 }
1503
1504 do_host_disasm(tcache_id);
1505 }
1506
1507 v = find_in_array(branch_target_pc, branch_target_count, pc);
1508 if (v >= 0)
1509 branch_target_ptr[v] = tcache_ptr;
1510
1511 // must update PC
1512 emit_move_r_imm32(SHR_PC, pc);
1513 rcache_clean();
1514
1515 // check cycles
1516 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1517 emith_cmp_r_imm(sr, 0);
1518 emith_jump_cond(DCOND_LE, sh2_drc_exit);
1519 do_host_disasm(tcache_id);
1520 rcache_unlock_all();
1521 }
1522
1523#ifdef DRC_CMP
1524 if (!(op_flags[i] & OF_DELAY_OP)) {
1525 emit_move_r_imm32(SHR_PC, pc);
1526 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1527 FLUSH_CYCLES(sr);
1528 rcache_clean();
1529
1530 tmp = rcache_used_hreg_mask();
1531 emith_save_caller_regs(tmp);
1532 emit_do_static_regs(1, 0);
1533 emith_pass_arg_r(0, CONTEXT_REG);
1534 emith_call(do_sh2_cmp);
1535 emith_restore_caller_regs(tmp);
1536 }
1537#endif
1538
1539 pc += 2;
1540
1541 if (skip_op > 0) {
1542 skip_op--;
1543 continue;
1544 }
1545
1546 if (op_flags[i] & OF_DELAY_OP)
1547 {
1548 // handle delay slot dependencies
1549 delay_dep_fw = opd->dest & ops[i-1].source;
1550 delay_dep_bk = opd->source & ops[i-1].dest;
1551 if (delay_dep_fw & BITMASK1(SHR_T)) {
1552 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1553 DELAY_SAVE_T(sr);
1554 }
1555 if (delay_dep_bk & BITMASK1(SHR_PC)) {
1556 if (opd->op != OP_LOAD_POOL && opd->op != OP_MOVA) {
1557 // can only be those 2 really..
1558 elprintf_sh2(sh2, EL_ANOMALY,
1559 "drc: illegal slot insn %04x @ %08x?", op, pc - 2);
1560 }
1561 if (opd->imm != 0)
1562 ; // addr already resolved somehow
1563 else {
1564 switch (ops[i-1].op) {
1565 case OP_BRANCH:
1566 emit_move_r_imm32(SHR_PC, ops[i-1].imm);
1567 break;
1568 case OP_BRANCH_CT:
1569 case OP_BRANCH_CF:
1570 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1571 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1572 emith_move_r_imm(tmp, pc);
1573 emith_tst_r_imm(sr, T);
1574 tmp2 = ops[i-1].op == OP_BRANCH_CT ? DCOND_NE : DCOND_EQ;
1575 emith_move_r_imm_c(tmp2, tmp, ops[i-1].imm);
1576 break;
1577 // case OP_BRANCH_R OP_BRANCH_RF - PC already loaded
1578 }
1579 }
1580 }
1581 //if (delay_dep_fw & ~BITMASK1(SHR_T))
1582 // dbg(1, "unhandled delay_dep_fw: %x", delay_dep_fw & ~BITMASK1(SHR_T));
1583 if (delay_dep_bk & ~BITMASK2(SHR_PC, SHR_PR))
1584 dbg(1, "unhandled delay_dep_bk: %x", delay_dep_bk);
1585 }
1586
1587 switch (opd->op)
1588 {
1589 case OP_BRANCH:
1590 case OP_BRANCH_CT:
1591 case OP_BRANCH_CF:
1592 if (opd->dest & BITMASK1(SHR_PR))
1593 emit_move_r_imm32(SHR_PR, pc + 2);
1594 drcf.pending_branch_direct = 1;
1595 goto end_op;
1596
1597 case OP_BRANCH_R:
1598 if (opd->dest & BITMASK1(SHR_PR))
1599 emit_move_r_imm32(SHR_PR, pc + 2);
1600 emit_move_r_r(SHR_PC, opd->rm);
1601 drcf.pending_branch_indirect = 1;
1602 goto end_op;
1603
1604 case OP_BRANCH_RF:
1605 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1606 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1607 if (opd->dest & BITMASK1(SHR_PR)) {
1608 tmp3 = rcache_get_reg(SHR_PR, RC_GR_WRITE);
1609 emith_move_r_imm(tmp3, pc + 2);
1610 emith_add_r_r_r(tmp, tmp2, tmp3);
1611 }
1612 else {
1613 emith_move_r_r(tmp, tmp2);
1614 emith_add_r_imm(tmp, pc + 2);
1615 }
1616 drcf.pending_branch_indirect = 1;
1617 goto end_op;
1618
1619 case OP_SLEEP:
1620 printf("TODO sleep\n");
1621 goto end_op;
1622
1623 case OP_RTE:
1624 // pop PC
1625 emit_memhandler_read_rr(SHR_PC, SHR_SP, 0, 2);
1626 // pop SR
1627 tmp = rcache_get_reg_arg(0, SHR_SP);
1628 emith_add_r_imm(tmp, 4);
1629 tmp = emit_memhandler_read(2);
1630 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1631 emith_write_sr(sr, tmp);
1632 rcache_free_tmp(tmp);
1633 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
1634 emith_add_r_imm(tmp, 4*2);
1635 drcf.test_irq = 1;
1636 drcf.pending_branch_indirect = 1;
1637 goto end_op;
1638
1639 case OP_LOAD_POOL:
1640#if PROPAGATE_CONSTANTS
1641 if (opd->imm != 0 && opd->imm < end_literals
1642 && literal_addr_count < MAX_LITERALS)
1643 {
1644 ADD_TO_ARRAY(literal_addr, literal_addr_count, opd->imm,);
1645 if (opd->size == 2)
1646 tmp = FETCH32(opd->imm);
1647 else
1648 tmp = (u32)(int)(signed short)FETCH_OP(opd->imm);
1649 gconst_new(GET_Rn(), tmp);
1650 }
1651 else
1652#endif
1653 {
1654 tmp = rcache_get_tmp_arg(0);
1655 if (opd->imm != 0)
1656 emith_move_r_imm(tmp, opd->imm);
1657 else {
1658 // have to calculate read addr from PC
1659 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1660 if (opd->size == 2) {
1661 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1662 emith_bic_r_imm(tmp, 3);
1663 }
1664 else
1665 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 2);
1666 }
1667 tmp2 = emit_memhandler_read(opd->size);
1668 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1669 if (opd->size == 2)
1670 emith_move_r_r(tmp3, tmp2);
1671 else
1672 emith_sext(tmp3, tmp2, 16);
1673 rcache_free_tmp(tmp2);
1674 }
1675 goto end_op;
1676
1677 case OP_MOVA:
1678 if (opd->imm != 0)
1679 emit_move_r_imm32(SHR_R0, opd->imm);
1680 else {
1681 tmp = rcache_get_reg(SHR_R0, RC_GR_WRITE);
1682 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1683 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1684 emith_bic_r_imm(tmp, 3);
1685 }
1686 goto end_op;
1687 }
1688
1689 switch ((op >> 12) & 0x0f)
1690 {
1691 /////////////////////////////////////////////
1692 case 0x00:
1693 switch (op & 0x0f)
1694 {
1695 case 0x02:
1696 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1697 switch (GET_Fx())
1698 {
1699 case 0: // STC SR,Rn 0000nnnn00000010
1700 tmp2 = SHR_SR;
1701 break;
1702 case 1: // STC GBR,Rn 0000nnnn00010010
1703 tmp2 = SHR_GBR;
1704 break;
1705 case 2: // STC VBR,Rn 0000nnnn00100010
1706 tmp2 = SHR_VBR;
1707 break;
1708 default:
1709 goto default_;
1710 }
1711 tmp3 = rcache_get_reg(tmp2, RC_GR_READ);
1712 emith_move_r_r(tmp, tmp3);
1713 if (tmp2 == SHR_SR)
1714 emith_clear_msb(tmp, tmp, 22); // reserved bits defined by ISA as 0
1715 goto end_op;
1716 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
1717 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
1718 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
1719 rcache_clean();
1720 tmp = rcache_get_reg_arg(1, GET_Rm());
1721 tmp2 = rcache_get_reg_arg(0, SHR_R0);
1722 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1723 emith_add_r_r(tmp2, tmp3);
1724 emit_memhandler_write(op & 3);
1725 goto end_op;
1726 case 0x07:
1727 // MUL.L Rm,Rn 0000nnnnmmmm0111
1728 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
1729 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1730 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1731 emith_mul(tmp3, tmp2, tmp);
1732 goto end_op;
1733 case 0x08:
1734 switch (GET_Fx())
1735 {
1736 case 0: // CLRT 0000000000001000
1737 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1738 emith_bic_r_imm(sr, T);
1739 break;
1740 case 1: // SETT 0000000000011000
1741 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1742 emith_or_r_imm(sr, T);
1743 break;
1744 case 2: // CLRMAC 0000000000101000
1745 emit_move_r_imm32(SHR_MACL, 0);
1746 emit_move_r_imm32(SHR_MACH, 0);
1747 break;
1748 default:
1749 goto default_;
1750 }
1751 goto end_op;
1752 case 0x09:
1753 switch (GET_Fx())
1754 {
1755 case 0: // NOP 0000000000001001
1756 break;
1757 case 1: // DIV0U 0000000000011001
1758 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1759 emith_bic_r_imm(sr, M|Q|T);
1760 break;
1761 case 2: // MOVT Rn 0000nnnn00101001
1762 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1763 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1764 emith_clear_msb(tmp2, sr, 31);
1765 break;
1766 default:
1767 goto default_;
1768 }
1769 goto end_op;
1770 case 0x0a:
1771 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1772 switch (GET_Fx())
1773 {
1774 case 0: // STS MACH,Rn 0000nnnn00001010
1775 tmp2 = SHR_MACH;
1776 break;
1777 case 1: // STS MACL,Rn 0000nnnn00011010
1778 tmp2 = SHR_MACL;
1779 break;
1780 case 2: // STS PR,Rn 0000nnnn00101010
1781 tmp2 = SHR_PR;
1782 break;
1783 default:
1784 goto default_;
1785 }
1786 tmp2 = rcache_get_reg(tmp2, RC_GR_READ);
1787 emith_move_r_r(tmp, tmp2);
1788 goto end_op;
1789 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
1790 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
1791 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
1792 tmp = emit_indirect_indexed_read(SHR_R0, GET_Rm(), op & 3);
1793 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1794 if ((op & 3) != 2) {
1795 emith_sext(tmp2, tmp, (op & 1) ? 16 : 8);
1796 } else
1797 emith_move_r_r(tmp2, tmp);
1798 rcache_free_tmp(tmp);
1799 goto end_op;
1800 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
1801 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 2);
1802 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
1803 /* MS 16 MAC bits unused if saturated */
1804 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1805 emith_tst_r_imm(sr, S);
1806 EMITH_SJMP_START(DCOND_EQ);
1807 emith_clear_msb_c(DCOND_NE, tmp4, tmp4, 16);
1808 EMITH_SJMP_END(DCOND_EQ);
1809 rcache_unlock(sr);
1810 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW); // might evict SR
1811 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
1812 rcache_free_tmp(tmp2);
1813 sr = rcache_get_reg(SHR_SR, RC_GR_READ); // reget just in case
1814 emith_tst_r_imm(sr, S);
1815
1816 EMITH_JMP_START(DCOND_EQ);
1817 emith_asr(tmp, tmp4, 15);
1818 emith_cmp_r_imm(tmp, -1); // negative overflow (0x80000000..0xffff7fff)
1819 EMITH_SJMP_START(DCOND_GE);
1820 emith_move_r_imm_c(DCOND_LT, tmp4, 0x8000);
1821 emith_move_r_imm_c(DCOND_LT, tmp3, 0x0000);
1822 EMITH_SJMP_END(DCOND_GE);
1823 emith_cmp_r_imm(tmp, 0); // positive overflow (0x00008000..0x7fffffff)
1824 EMITH_SJMP_START(DCOND_LE);
1825 emith_move_r_imm_c(DCOND_GT, tmp4, 0x00007fff);
1826 emith_move_r_imm_c(DCOND_GT, tmp3, 0xffffffff);
1827 EMITH_SJMP_END(DCOND_LE);
1828 EMITH_JMP_END(DCOND_EQ);
1829
1830 rcache_free_tmp(tmp);
1831 goto end_op;
1832 }
1833 goto default_;
1834
1835 /////////////////////////////////////////////
1836 case 0x01:
1837 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
1838 rcache_clean();
1839 tmp = rcache_get_reg_arg(0, GET_Rn());
1840 tmp2 = rcache_get_reg_arg(1, GET_Rm());
1841 if (op & 0x0f)
1842 emith_add_r_imm(tmp, (op & 0x0f) * 4);
1843 emit_memhandler_write(2);
1844 goto end_op;
1845
1846 case 0x02:
1847 switch (op & 0x0f)
1848 {
1849 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
1850 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
1851 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
1852 rcache_clean();
1853 rcache_get_reg_arg(0, GET_Rn());
1854 rcache_get_reg_arg(1, GET_Rm());
1855 emit_memhandler_write(op & 3);
1856 goto end_op;
1857 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
1858 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
1859 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
1860 rcache_get_reg_arg(1, GET_Rm()); // for Rm == Rn
1861 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1862 emith_sub_r_imm(tmp, (1 << (op & 3)));
1863 rcache_clean();
1864 rcache_get_reg_arg(0, GET_Rn());
1865 emit_memhandler_write(op & 3);
1866 goto end_op;
1867 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
1868 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1869 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1870 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1871 emith_bic_r_imm(sr, M|Q|T);
1872 emith_tst_r_imm(tmp2, (1<<31));
1873 EMITH_SJMP_START(DCOND_EQ);
1874 emith_or_r_imm_c(DCOND_NE, sr, Q);
1875 EMITH_SJMP_END(DCOND_EQ);
1876 emith_tst_r_imm(tmp3, (1<<31));
1877 EMITH_SJMP_START(DCOND_EQ);
1878 emith_or_r_imm_c(DCOND_NE, sr, M);
1879 EMITH_SJMP_END(DCOND_EQ);
1880 emith_teq_r_r(tmp2, tmp3);
1881 EMITH_SJMP_START(DCOND_PL);
1882 emith_or_r_imm_c(DCOND_MI, sr, T);
1883 EMITH_SJMP_END(DCOND_PL);
1884 goto end_op;
1885 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
1886 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1887 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1888 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1889 emith_bic_r_imm(sr, T);
1890 emith_tst_r_r(tmp2, tmp3);
1891 emit_or_t_if_eq(sr);
1892 goto end_op;
1893 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
1894 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1895 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1896 emith_and_r_r(tmp, tmp2);
1897 goto end_op;
1898 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
1899 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1900 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1901 emith_eor_r_r(tmp, tmp2);
1902 goto end_op;
1903 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
1904 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1905 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1906 emith_or_r_r(tmp, tmp2);
1907 goto end_op;
1908 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
1909 tmp = rcache_get_tmp();
1910 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1911 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1912 emith_eor_r_r_r(tmp, tmp2, tmp3);
1913 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1914 emith_bic_r_imm(sr, T);
1915 emith_tst_r_imm(tmp, 0x000000ff);
1916 emit_or_t_if_eq(sr);
1917 emith_tst_r_imm(tmp, 0x0000ff00);
1918 emit_or_t_if_eq(sr);
1919 emith_tst_r_imm(tmp, 0x00ff0000);
1920 emit_or_t_if_eq(sr);
1921 emith_tst_r_imm(tmp, 0xff000000);
1922 emit_or_t_if_eq(sr);
1923 rcache_free_tmp(tmp);
1924 goto end_op;
1925 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
1926 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1927 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1928 emith_lsr(tmp, tmp, 16);
1929 emith_or_r_r_lsl(tmp, tmp2, 16);
1930 goto end_op;
1931 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
1932 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
1933 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1934 tmp = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1935 if (op & 1) {
1936 emith_sext(tmp, tmp2, 16);
1937 } else
1938 emith_clear_msb(tmp, tmp2, 16);
1939 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1940 tmp2 = rcache_get_tmp();
1941 if (op & 1) {
1942 emith_sext(tmp2, tmp3, 16);
1943 } else
1944 emith_clear_msb(tmp2, tmp3, 16);
1945 emith_mul(tmp, tmp, tmp2);
1946 rcache_free_tmp(tmp2);
1947 goto end_op;
1948 }
1949 goto default_;
1950
1951 /////////////////////////////////////////////
1952 case 0x03:
1953 switch (op & 0x0f)
1954 {
1955 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
1956 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
1957 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
1958 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
1959 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
1960 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1961 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1962 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1963 emith_bic_r_imm(sr, T);
1964 emith_cmp_r_r(tmp2, tmp3);
1965 switch (op & 0x07)
1966 {
1967 case 0x00: // CMP/EQ
1968 emit_or_t_if_eq(sr);
1969 break;
1970 case 0x02: // CMP/HS
1971 EMITH_SJMP_START(DCOND_LO);
1972 emith_or_r_imm_c(DCOND_HS, sr, T);
1973 EMITH_SJMP_END(DCOND_LO);
1974 break;
1975 case 0x03: // CMP/GE
1976 EMITH_SJMP_START(DCOND_LT);
1977 emith_or_r_imm_c(DCOND_GE, sr, T);
1978 EMITH_SJMP_END(DCOND_LT);
1979 break;
1980 case 0x06: // CMP/HI
1981 EMITH_SJMP_START(DCOND_LS);
1982 emith_or_r_imm_c(DCOND_HI, sr, T);
1983 EMITH_SJMP_END(DCOND_LS);
1984 break;
1985 case 0x07: // CMP/GT
1986 EMITH_SJMP_START(DCOND_LE);
1987 emith_or_r_imm_c(DCOND_GT, sr, T);
1988 EMITH_SJMP_END(DCOND_LE);
1989 break;
1990 }
1991 goto end_op;
1992 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
1993 // Q1 = carry(Rn = (Rn << 1) | T)
1994 // if Q ^ M
1995 // Q2 = carry(Rn += Rm)
1996 // else
1997 // Q2 = carry(Rn -= Rm)
1998 // Q = M ^ Q1 ^ Q2
1999 // T = (Q == M) = !(Q ^ M) = !(Q1 ^ Q2)
2000 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2001 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2002 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2003 emith_tpop_carry(sr, 0);
2004 emith_adcf_r_r(tmp2, tmp2);
2005 emith_tpush_carry(sr, 0); // keep Q1 in T for now
2006 tmp4 = rcache_get_tmp();
2007 emith_and_r_r_imm(tmp4, sr, M);
2008 emith_eor_r_r_lsr(sr, tmp4, M_SHIFT - Q_SHIFT); // Q ^= M
2009 rcache_free_tmp(tmp4);
2010 // add or sub, invert T if carry to get Q1 ^ Q2
2011 // in: (Q ^ M) passed in Q, Q1 in T
2012 emith_sh2_div1_step(tmp2, tmp3, sr);
2013 emith_bic_r_imm(sr, Q);
2014 emith_tst_r_imm(sr, M);
2015 EMITH_SJMP_START(DCOND_EQ);
2016 emith_or_r_imm_c(DCOND_NE, sr, Q); // Q = M
2017 EMITH_SJMP_END(DCOND_EQ);
2018 emith_tst_r_imm(sr, T);
2019 EMITH_SJMP_START(DCOND_EQ);
2020 emith_eor_r_imm_c(DCOND_NE, sr, Q); // Q = M ^ Q1 ^ Q2
2021 EMITH_SJMP_END(DCOND_EQ);
2022 emith_eor_r_imm(sr, T); // T = !(Q1 ^ Q2)
2023 goto end_op;
2024 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
2025 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2026 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2027 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2028 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2029 emith_mul_u64(tmp3, tmp4, tmp, tmp2);
2030 goto end_op;
2031 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
2032 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
2033 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2034 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2035 if (op & 4) {
2036 emith_add_r_r(tmp, tmp2);
2037 } else
2038 emith_sub_r_r(tmp, tmp2);
2039 goto end_op;
2040 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
2041 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
2042 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2043 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2044 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2045 if (op & 4) { // adc
2046 emith_tpop_carry(sr, 0);
2047 emith_adcf_r_r(tmp, tmp2);
2048 emith_tpush_carry(sr, 0);
2049 } else {
2050 emith_tpop_carry(sr, 1);
2051 emith_sbcf_r_r(tmp, tmp2);
2052 emith_tpush_carry(sr, 1);
2053 }
2054 goto end_op;
2055 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
2056 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
2057 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2058 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2059 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2060 emith_bic_r_imm(sr, T);
2061 if (op & 4) {
2062 emith_addf_r_r(tmp, tmp2);
2063 } else
2064 emith_subf_r_r(tmp, tmp2);
2065 EMITH_SJMP_START(DCOND_VC);
2066 emith_or_r_imm_c(DCOND_VS, sr, T);
2067 EMITH_SJMP_END(DCOND_VC);
2068 goto end_op;
2069 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
2070 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2071 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2072 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2073 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2074 emith_mul_s64(tmp3, tmp4, tmp, tmp2);
2075 goto end_op;
2076 }
2077 goto default_;
2078
2079 /////////////////////////////////////////////
2080 case 0x04:
2081 switch (op & 0x0f)
2082 {
2083 case 0x00:
2084 switch (GET_Fx())
2085 {
2086 case 0: // SHLL Rn 0100nnnn00000000
2087 case 2: // SHAL Rn 0100nnnn00100000
2088 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2089 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2090 emith_tpop_carry(sr, 0); // dummy
2091 emith_lslf(tmp, tmp, 1);
2092 emith_tpush_carry(sr, 0);
2093 goto end_op;
2094 case 1: // DT Rn 0100nnnn00010000
2095 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2096#if 0 // scheduling needs tuning
2097 if (FETCH_OP(pc) == 0x8bfd) { // BF #-2
2098 if (gconst_get(GET_Rn(), &tmp)) {
2099 // XXX: limit burned cycles
2100 emit_move_r_imm32(GET_Rn(), 0);
2101 emith_or_r_imm(sr, T);
2102 cycles += tmp * 4 + 1; // +1 syncs with noconst version, not sure why
2103 skip_op = 1;
2104 }
2105 else
2106 emith_sh2_dtbf_loop();
2107 goto end_op;
2108 }
2109#endif
2110 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2111 emith_bic_r_imm(sr, T);
2112 emith_subf_r_imm(tmp, 1);
2113 emit_or_t_if_eq(sr);
2114 goto end_op;
2115 }
2116 goto default_;
2117 case 0x01:
2118 switch (GET_Fx())
2119 {
2120 case 0: // SHLR Rn 0100nnnn00000001
2121 case 2: // SHAR Rn 0100nnnn00100001
2122 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2123 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2124 emith_tpop_carry(sr, 0); // dummy
2125 if (op & 0x20) {
2126 emith_asrf(tmp, tmp, 1);
2127 } else
2128 emith_lsrf(tmp, tmp, 1);
2129 emith_tpush_carry(sr, 0);
2130 goto end_op;
2131 case 1: // CMP/PZ Rn 0100nnnn00010001
2132 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2133 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2134 emith_bic_r_imm(sr, T);
2135 emith_cmp_r_imm(tmp, 0);
2136 EMITH_SJMP_START(DCOND_LT);
2137 emith_or_r_imm_c(DCOND_GE, sr, T);
2138 EMITH_SJMP_END(DCOND_LT);
2139 goto end_op;
2140 }
2141 goto default_;
2142 case 0x02:
2143 case 0x03:
2144 switch (op & 0x3f)
2145 {
2146 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
2147 tmp = SHR_MACH;
2148 break;
2149 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
2150 tmp = SHR_MACL;
2151 break;
2152 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
2153 tmp = SHR_PR;
2154 break;
2155 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
2156 tmp = SHR_SR;
2157 break;
2158 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
2159 tmp = SHR_GBR;
2160 break;
2161 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
2162 tmp = SHR_VBR;
2163 break;
2164 default:
2165 goto default_;
2166 }
2167 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2168 emith_sub_r_imm(tmp2, 4);
2169 rcache_clean();
2170 rcache_get_reg_arg(0, GET_Rn());
2171 tmp3 = rcache_get_reg_arg(1, tmp);
2172 if (tmp == SHR_SR)
2173 emith_clear_msb(tmp3, tmp3, 22); // reserved bits defined by ISA as 0
2174 emit_memhandler_write(2);
2175 goto end_op;
2176 case 0x04:
2177 case 0x05:
2178 switch (op & 0x3f)
2179 {
2180 case 0x04: // ROTL Rn 0100nnnn00000100
2181 case 0x05: // ROTR Rn 0100nnnn00000101
2182 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2183 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2184 emith_tpop_carry(sr, 0); // dummy
2185 if (op & 1) {
2186 emith_rorf(tmp, tmp, 1);
2187 } else
2188 emith_rolf(tmp, tmp, 1);
2189 emith_tpush_carry(sr, 0);
2190 goto end_op;
2191 case 0x24: // ROTCL Rn 0100nnnn00100100
2192 case 0x25: // ROTCR Rn 0100nnnn00100101
2193 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2194 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2195 emith_tpop_carry(sr, 0);
2196 if (op & 1) {
2197 emith_rorcf(tmp);
2198 } else
2199 emith_rolcf(tmp);
2200 emith_tpush_carry(sr, 0);
2201 goto end_op;
2202 case 0x15: // CMP/PL Rn 0100nnnn00010101
2203 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2204 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2205 emith_bic_r_imm(sr, T);
2206 emith_cmp_r_imm(tmp, 0);
2207 EMITH_SJMP_START(DCOND_LE);
2208 emith_or_r_imm_c(DCOND_GT, sr, T);
2209 EMITH_SJMP_END(DCOND_LE);
2210 goto end_op;
2211 }
2212 goto default_;
2213 case 0x06:
2214 case 0x07:
2215 switch (op & 0x3f)
2216 {
2217 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
2218 tmp = SHR_MACH;
2219 break;
2220 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
2221 tmp = SHR_MACL;
2222 break;
2223 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
2224 tmp = SHR_PR;
2225 break;
2226 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
2227 tmp = SHR_SR;
2228 break;
2229 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
2230 tmp = SHR_GBR;
2231 break;
2232 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
2233 tmp = SHR_VBR;
2234 break;
2235 default:
2236 goto default_;
2237 }
2238 rcache_get_reg_arg(0, GET_Rn());
2239 tmp2 = emit_memhandler_read(2);
2240 if (tmp == SHR_SR) {
2241 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2242 emith_write_sr(sr, tmp2);
2243 drcf.test_irq = 1;
2244 } else {
2245 tmp = rcache_get_reg(tmp, RC_GR_WRITE);
2246 emith_move_r_r(tmp, tmp2);
2247 }
2248 rcache_free_tmp(tmp2);
2249 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2250 emith_add_r_imm(tmp, 4);
2251 goto end_op;
2252 case 0x08:
2253 case 0x09:
2254 switch (GET_Fx())
2255 {
2256 case 0:
2257 // SHLL2 Rn 0100nnnn00001000
2258 // SHLR2 Rn 0100nnnn00001001
2259 tmp = 2;
2260 break;
2261 case 1:
2262 // SHLL8 Rn 0100nnnn00011000
2263 // SHLR8 Rn 0100nnnn00011001
2264 tmp = 8;
2265 break;
2266 case 2:
2267 // SHLL16 Rn 0100nnnn00101000
2268 // SHLR16 Rn 0100nnnn00101001
2269 tmp = 16;
2270 break;
2271 default:
2272 goto default_;
2273 }
2274 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2275 if (op & 1) {
2276 emith_lsr(tmp2, tmp2, tmp);
2277 } else
2278 emith_lsl(tmp2, tmp2, tmp);
2279 goto end_op;
2280 case 0x0a:
2281 switch (GET_Fx())
2282 {
2283 case 0: // LDS Rm,MACH 0100mmmm00001010
2284 tmp2 = SHR_MACH;
2285 break;
2286 case 1: // LDS Rm,MACL 0100mmmm00011010
2287 tmp2 = SHR_MACL;
2288 break;
2289 case 2: // LDS Rm,PR 0100mmmm00101010
2290 tmp2 = SHR_PR;
2291 break;
2292 default:
2293 goto default_;
2294 }
2295 emit_move_r_r(tmp2, GET_Rn());
2296 goto end_op;
2297 case 0x0b:
2298 switch (GET_Fx())
2299 {
2300 case 1: // TAS.B @Rn 0100nnnn00011011
2301 // XXX: is TAS working on 32X?
2302 rcache_get_reg_arg(0, GET_Rn());
2303 tmp = emit_memhandler_read(0);
2304 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2305 emith_bic_r_imm(sr, T);
2306 emith_cmp_r_imm(tmp, 0);
2307 emit_or_t_if_eq(sr);
2308 rcache_clean();
2309 emith_or_r_imm(tmp, 0x80);
2310 tmp2 = rcache_get_tmp_arg(1); // assuming it differs to tmp
2311 emith_move_r_r(tmp2, tmp);
2312 rcache_free_tmp(tmp);
2313 rcache_get_reg_arg(0, GET_Rn());
2314 emit_memhandler_write(0);
2315 break;
2316 default:
2317 goto default_;
2318 }
2319 goto end_op;
2320 case 0x0e:
2321 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2322 switch (GET_Fx())
2323 {
2324 case 0: // LDC Rm,SR 0100mmmm00001110
2325 tmp2 = SHR_SR;
2326 break;
2327 case 1: // LDC Rm,GBR 0100mmmm00011110
2328 tmp2 = SHR_GBR;
2329 break;
2330 case 2: // LDC Rm,VBR 0100mmmm00101110
2331 tmp2 = SHR_VBR;
2332 break;
2333 default:
2334 goto default_;
2335 }
2336 if (tmp2 == SHR_SR) {
2337 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2338 emith_write_sr(sr, tmp);
2339 drcf.test_irq = 1;
2340 } else {
2341 tmp2 = rcache_get_reg(tmp2, RC_GR_WRITE);
2342 emith_move_r_r(tmp2, tmp);
2343 }
2344 goto end_op;
2345 case 0x0f:
2346 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
2347 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 1);
2348 emith_sext(tmp, tmp, 16);
2349 emith_sext(tmp2, tmp2, 16);
2350 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW);
2351 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
2352 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
2353 rcache_free_tmp(tmp2);
2354 // XXX: MACH should be untouched when S is set?
2355 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2356 emith_tst_r_imm(sr, S);
2357 EMITH_JMP_START(DCOND_EQ);
2358
2359 emith_asr(tmp, tmp3, 31);
2360 emith_eorf_r_r(tmp, tmp4); // tmp = ((signed)macl >> 31) ^ mach
2361 EMITH_JMP_START(DCOND_EQ);
2362 emith_move_r_imm(tmp3, 0x80000000);
2363 emith_tst_r_r(tmp4, tmp4);
2364 EMITH_SJMP_START(DCOND_MI);
2365 emith_sub_r_imm_c(DCOND_PL, tmp3, 1); // positive
2366 EMITH_SJMP_END(DCOND_MI);
2367 EMITH_JMP_END(DCOND_EQ);
2368
2369 EMITH_JMP_END(DCOND_EQ);
2370 rcache_free_tmp(tmp);
2371 goto end_op;
2372 }
2373 goto default_;
2374
2375 /////////////////////////////////////////////
2376 case 0x05:
2377 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
2378 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), (op & 0x0f) * 4, 2);
2379 goto end_op;
2380
2381 /////////////////////////////////////////////
2382 case 0x06:
2383 switch (op & 0x0f)
2384 {
2385 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
2386 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
2387 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
2388 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
2389 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
2390 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
2391 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), 0, op & 3);
2392 if ((op & 7) >= 4 && GET_Rn() != GET_Rm()) {
2393 tmp = rcache_get_reg(GET_Rm(), RC_GR_RMW);
2394 emith_add_r_imm(tmp, (1 << (op & 3)));
2395 }
2396 goto end_op;
2397 case 0x03:
2398 case 0x07 ... 0x0f:
2399 tmp = rcache_get_reg(GET_Rm(), RC_GR_READ);
2400 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
2401 switch (op & 0x0f)
2402 {
2403 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
2404 emith_move_r_r(tmp2, tmp);
2405 break;
2406 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
2407 emith_mvn_r_r(tmp2, tmp);
2408 break;
2409 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
2410 tmp3 = tmp2;
2411 if (tmp == tmp2)
2412 tmp3 = rcache_get_tmp();
2413 tmp4 = rcache_get_tmp();
2414 emith_lsr(tmp3, tmp, 16);
2415 emith_or_r_r_lsl(tmp3, tmp, 24);
2416 emith_and_r_r_imm(tmp4, tmp, 0xff00);
2417 emith_or_r_r_lsl(tmp3, tmp4, 8);
2418 emith_rol(tmp2, tmp3, 16);
2419 rcache_free_tmp(tmp4);
2420 if (tmp == tmp2)
2421 rcache_free_tmp(tmp3);
2422 break;
2423 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
2424 emith_rol(tmp2, tmp, 16);
2425 break;
2426 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
2427 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2428 emith_tpop_carry(sr, 1);
2429 emith_negcf_r_r(tmp2, tmp);
2430 emith_tpush_carry(sr, 1);
2431 break;
2432 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
2433 emith_neg_r_r(tmp2, tmp);
2434 break;
2435 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
2436 emith_clear_msb(tmp2, tmp, 24);
2437 break;
2438 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
2439 emith_clear_msb(tmp2, tmp, 16);
2440 break;
2441 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
2442 emith_sext(tmp2, tmp, 8);
2443 break;
2444 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
2445 emith_sext(tmp2, tmp, 16);
2446 break;
2447 }
2448 goto end_op;
2449 }
2450 goto default_;
2451
2452 /////////////////////////////////////////////
2453 case 0x07:
2454 // ADD #imm,Rn 0111nnnniiiiiiii
2455 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2456 if (op & 0x80) { // adding negative
2457 emith_sub_r_imm(tmp, -op & 0xff);
2458 } else
2459 emith_add_r_imm(tmp, op & 0xff);
2460 goto end_op;
2461
2462 /////////////////////////////////////////////
2463 case 0x08:
2464 switch (op & 0x0f00)
2465 {
2466 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
2467 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
2468 rcache_clean();
2469 tmp = rcache_get_reg_arg(0, GET_Rm());
2470 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2471 tmp3 = (op & 0x100) >> 8;
2472 if (op & 0x0f)
2473 emith_add_r_imm(tmp, (op & 0x0f) << tmp3);
2474 emit_memhandler_write(tmp3);
2475 goto end_op;
2476 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
2477 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
2478 tmp = (op & 0x100) >> 8;
2479 emit_memhandler_read_rr(SHR_R0, GET_Rm(), (op & 0x0f) << tmp, tmp);
2480 goto end_op;
2481 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
2482 // XXX: could use cmn
2483 tmp = rcache_get_tmp();
2484 tmp2 = rcache_get_reg(0, RC_GR_READ);
2485 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2486 emith_move_r_imm_s8(tmp, op & 0xff);
2487 emith_bic_r_imm(sr, T);
2488 emith_cmp_r_r(tmp2, tmp);
2489 emit_or_t_if_eq(sr);
2490 rcache_free_tmp(tmp);
2491 goto end_op;
2492 }
2493 goto default_;
2494
2495 /////////////////////////////////////////////
2496 case 0x0c:
2497 switch (op & 0x0f00)
2498 {
2499 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
2500 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
2501 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
2502 rcache_clean();
2503 tmp = rcache_get_reg_arg(0, SHR_GBR);
2504 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2505 tmp3 = (op & 0x300) >> 8;
2506 emith_add_r_imm(tmp, (op & 0xff) << tmp3);
2507 emit_memhandler_write(tmp3);
2508 goto end_op;
2509 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
2510 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
2511 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
2512 tmp = (op & 0x300) >> 8;
2513 emit_memhandler_read_rr(SHR_R0, SHR_GBR, (op & 0xff) << tmp, tmp);
2514 goto end_op;
2515 case 0x0300: // TRAPA #imm 11000011iiiiiiii
2516 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2517 emith_sub_r_imm(tmp, 4*2);
2518 // push SR
2519 tmp = rcache_get_reg_arg(0, SHR_SP);
2520 emith_add_r_imm(tmp, 4);
2521 tmp = rcache_get_reg_arg(1, SHR_SR);
2522 emith_clear_msb(tmp, tmp, 22);
2523 emit_memhandler_write(2);
2524 // push PC
2525 rcache_get_reg_arg(0, SHR_SP);
2526 tmp = rcache_get_tmp_arg(1);
2527 emith_move_r_imm(tmp, pc);
2528 emit_memhandler_write(2);
2529 // obtain new PC
2530 emit_memhandler_read_rr(SHR_PC, SHR_VBR, (op & 0xff) * 4, 2);
2531 // indirect jump -> back to dispatcher
2532 rcache_flush();
2533 emith_jump(sh2_drc_dispatcher);
2534 goto end_op;
2535 case 0x0800: // TST #imm,R0 11001000iiiiiiii
2536 tmp = rcache_get_reg(SHR_R0, RC_GR_READ);
2537 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2538 emith_bic_r_imm(sr, T);
2539 emith_tst_r_imm(tmp, op & 0xff);
2540 emit_or_t_if_eq(sr);
2541 goto end_op;
2542 case 0x0900: // AND #imm,R0 11001001iiiiiiii
2543 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2544 emith_and_r_imm(tmp, op & 0xff);
2545 goto end_op;
2546 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
2547 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2548 emith_eor_r_imm(tmp, op & 0xff);
2549 goto end_op;
2550 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
2551 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2552 emith_or_r_imm(tmp, op & 0xff);
2553 goto end_op;
2554 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
2555 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2556 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2557 emith_bic_r_imm(sr, T);
2558 emith_tst_r_imm(tmp, op & 0xff);
2559 emit_or_t_if_eq(sr);
2560 rcache_free_tmp(tmp);
2561 goto end_op;
2562 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
2563 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2564 emith_and_r_imm(tmp, op & 0xff);
2565 goto end_rmw_op;
2566 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
2567 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2568 emith_eor_r_imm(tmp, op & 0xff);
2569 goto end_rmw_op;
2570 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
2571 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2572 emith_or_r_imm(tmp, op & 0xff);
2573 end_rmw_op:
2574 tmp2 = rcache_get_tmp_arg(1);
2575 emith_move_r_r(tmp2, tmp);
2576 rcache_free_tmp(tmp);
2577 tmp3 = rcache_get_reg_arg(0, SHR_GBR);
2578 tmp4 = rcache_get_reg(SHR_R0, RC_GR_READ);
2579 emith_add_r_r(tmp3, tmp4);
2580 emit_memhandler_write(0);
2581 goto end_op;
2582 }
2583 goto default_;
2584
2585 /////////////////////////////////////////////
2586 case 0x0e:
2587 // MOV #imm,Rn 1110nnnniiiiiiii
2588 emit_move_r_imm32(GET_Rn(), (u32)(signed int)(signed char)op);
2589 goto end_op;
2590
2591 default:
2592 default_:
2593 elprintf_sh2(sh2, EL_ANOMALY,
2594 "drc: illegal op %04x @ %08x", op, pc - 2);
2595
2596 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2597 emith_sub_r_imm(tmp, 4*2);
2598 // push SR
2599 tmp = rcache_get_reg_arg(0, SHR_SP);
2600 emith_add_r_imm(tmp, 4);
2601 tmp = rcache_get_reg_arg(1, SHR_SR);
2602 emith_clear_msb(tmp, tmp, 22);
2603 emit_memhandler_write(2);
2604 // push PC
2605 rcache_get_reg_arg(0, SHR_SP);
2606 tmp = rcache_get_tmp_arg(1);
2607 emith_move_r_imm(tmp, pc - 2);
2608 emit_memhandler_write(2);
2609 // obtain new PC
2610 emit_memhandler_read_rr(SHR_PC, SHR_VBR, 4 * 4, 2);
2611 // indirect jump -> back to dispatcher
2612 rcache_flush();
2613 emith_jump(sh2_drc_dispatcher);
2614 break;
2615 }
2616
2617end_op:
2618 rcache_unlock_all();
2619
2620 cycles += opd->cycles;
2621
2622 if (op_flags[i+1] & OF_DELAY_OP) {
2623 do_host_disasm(tcache_id);
2624 continue;
2625 }
2626
2627 // test irq?
2628 if (drcf.test_irq && !drcf.pending_branch_direct) {
2629 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2630 FLUSH_CYCLES(sr);
2631 if (!drcf.pending_branch_indirect)
2632 emit_move_r_imm32(SHR_PC, pc);
2633 rcache_flush();
2634 emith_call(sh2_drc_test_irq);
2635 drcf.test_irq = 0;
2636 }
2637
2638 // branch handling (with/without delay)
2639 if (drcf.pending_branch_direct)
2640 {
2641 struct op_data *opd_b =
2642 (op_flags[i] & OF_DELAY_OP) ? &ops[i-1] : opd;
2643 u32 target_pc = opd_b->imm;
2644 int cond = -1;
2645 void *target = NULL;
2646
2647 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2648 FLUSH_CYCLES(sr);
2649
2650 if (opd_b->op != OP_BRANCH)
2651 cond = (opd_b->op == OP_BRANCH_CF) ? DCOND_EQ : DCOND_NE;
2652 if (cond != -1) {
2653 int ctaken = (op_flags[i] & OF_DELAY_OP) ? 1 : 2;
2654
2655 if (delay_dep_fw & BITMASK1(SHR_T))
2656 emith_tst_r_imm(sr, T_save);
2657 else
2658 emith_tst_r_imm(sr, T);
2659
2660 emith_sub_r_imm_c(cond, sr, ctaken<<12);
2661 }
2662 rcache_clean();
2663
2664#if LINK_BRANCHES
2665 if (find_in_array(branch_target_pc, branch_target_count, target_pc) >= 0)
2666 {
2667 // local branch
2668 // XXX: jumps back can be linked already
2669 if (branch_patch_count < MAX_LOCAL_BRANCHES) {
2670 target = tcache_ptr;
2671 branch_patch_pc[branch_patch_count] = target_pc;
2672 branch_patch_ptr[branch_patch_count] = target;
2673 branch_patch_count++;
2674 }
2675 else
2676 dbg(1, "warning: too many local branches");
2677 }
2678
2679 if (target == NULL)
2680#endif
2681 {
2682 // can't resolve branch locally, make a block exit
2683 emit_move_r_imm32(SHR_PC, target_pc);
2684 rcache_clean();
2685
2686 target = dr_prepare_ext_branch(target_pc, sh2->is_slave, tcache_id);
2687 if (target == NULL)
2688 return NULL;
2689 }
2690
2691 if (cond != -1)
2692 emith_jump_cond_patchable(cond, target);
2693 else {
2694 emith_jump_patchable(target);
2695 rcache_invalidate();
2696 }
2697
2698 drcf.pending_branch_direct = 0;
2699 }
2700 else if (drcf.pending_branch_indirect) {
2701 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2702 FLUSH_CYCLES(sr);
2703 rcache_flush();
2704 emith_jump(sh2_drc_dispatcher);
2705 drcf.pending_branch_indirect = 0;
2706 }
2707
2708 do_host_disasm(tcache_id);
2709 }
2710
2711 tmp = rcache_get_reg(SHR_SR, RC_GR_RMW);
2712 FLUSH_CYCLES(tmp);
2713 rcache_flush();
2714
2715 // check the last op
2716 if (op_flags[i-1] & OF_DELAY_OP)
2717 opd = &ops[i-2];
2718 else
2719 opd = &ops[i-1];
2720
2721 if (opd->op != OP_BRANCH && opd->op != OP_BRANCH_R
2722 && opd->op != OP_BRANCH_RF && opd->op != OP_RTE)
2723 {
2724 void *target;
2725
2726 emit_move_r_imm32(SHR_PC, pc);
2727 rcache_flush();
2728
2729 target = dr_prepare_ext_branch(pc, sh2->is_slave, tcache_id);
2730 if (target == NULL)
2731 return NULL;
2732 emith_jump_patchable(target);
2733 }
2734
2735 // link local branches
2736 for (i = 0; i < branch_patch_count; i++) {
2737 void *target;
2738 int t;
2739 t = find_in_array(branch_target_pc, branch_target_count, branch_patch_pc[i]);
2740 target = branch_target_ptr[t];
2741 if (target == NULL) {
2742 // flush pc and go back to dispatcher (this should no longer happen)
2743 dbg(1, "stray branch to %08x %p", branch_patch_pc[i], tcache_ptr);
2744 target = tcache_ptr;
2745 emit_move_r_imm32(SHR_PC, branch_patch_pc[i]);
2746 rcache_flush();
2747 emith_jump(sh2_drc_dispatcher);
2748 }
2749 emith_jump_patch(branch_patch_ptr[i], target);
2750 }
2751
2752 // mark memory blocks as containing compiled code
2753 // override any overlay blocks as they become unreachable anyway
2754 if ((block->addr & 0xc7fc0000) == 0x06000000
2755 || (block->addr & 0xfffff000) == 0xc0000000)
2756 {
2757 u16 *drc_ram_blk = NULL;
2758 u32 addr, mask = 0, shift = 0;
2759
2760 if (tcache_id != 0) {
2761 // data array, BIOS
2762 drc_ram_blk = Pico32xMem->drcblk_da[sh2->is_slave];
2763 shift = SH2_DRCBLK_DA_SHIFT;
2764 mask = 0xfff;
2765 }
2766 else {
2767 // SDRAM
2768 drc_ram_blk = Pico32xMem->drcblk_ram;
2769 shift = SH2_DRCBLK_RAM_SHIFT;
2770 mask = 0x3ffff;
2771 }
2772
2773 // mark recompiled insns
2774 drc_ram_blk[(base_pc & mask) >> shift] = 1;
2775 for (pc = base_pc; pc < end_pc; pc += 2)
2776 drc_ram_blk[(pc & mask) >> shift] = 1;
2777
2778 // mark literals
2779 for (i = 0; i < literal_addr_count; i++) {
2780 tmp = literal_addr[i];
2781 drc_ram_blk[(tmp & mask) >> shift] = 1;
2782 }
2783
2784 // add to invalidation lookup lists
2785 addr = base_pc & ~(INVAL_PAGE_SIZE - 1);
2786 for (; addr < end_literals; addr += INVAL_PAGE_SIZE) {
2787 i = (addr & mask) / INVAL_PAGE_SIZE;
2788 add_to_block_list(&inval_lookup[tcache_id][i], block);
2789 }
2790 }
2791
2792 tcache_ptrs[tcache_id] = tcache_ptr;
2793
2794 host_instructions_updated(block_entry_ptr, tcache_ptr);
2795
2796 do_host_disasm(tcache_id);
2797
2798 if (drcf.literals_disabled && literal_addr_count)
2799 dbg(1, "literals_disabled && literal_addr_count?");
2800 dbg(2, " block #%d,%d tcache %d/%d, insns %d -> %d %.3f",
2801 tcache_id, blkid_main,
2802 tcache_ptr - tcache_bases[tcache_id], tcache_sizes[tcache_id],
2803 insns_compiled, host_insn_count, (float)host_insn_count / insns_compiled);
2804 if ((sh2->pc & 0xc6000000) == 0x02000000) // ROM
2805 dbg(2, " hash collisions %d/%d", hash_collisions, block_counts[tcache_id]);
2806/*
2807 printf("~~~\n");
2808 tcache_dsm_ptrs[tcache_id] = block_entry_ptr;
2809 do_host_disasm(tcache_id);
2810 printf("~~~\n");
2811*/
2812
2813#if (DRC_DEBUG & 4)
2814 fflush(stdout);
2815#endif
2816
2817 return block_entry_ptr;
2818}
2819
2820static void sh2_generate_utils(void)
2821{
2822 int arg0, arg1, arg2, sr, tmp;
2823
2824 sh2_drc_write32 = p32x_sh2_write32;
2825 sh2_drc_read8 = p32x_sh2_read8;
2826 sh2_drc_read16 = p32x_sh2_read16;
2827 sh2_drc_read32 = p32x_sh2_read32;
2828
2829 host_arg2reg(arg0, 0);
2830 host_arg2reg(arg1, 1);
2831 host_arg2reg(arg2, 2);
2832 emith_move_r_r(arg0, arg0); // nop
2833
2834 // sh2_drc_exit(void)
2835 sh2_drc_exit = (void *)tcache_ptr;
2836 emit_do_static_regs(1, arg2);
2837 emith_sh2_drc_exit();
2838
2839 // sh2_drc_dispatcher(void)
2840 sh2_drc_dispatcher = (void *)tcache_ptr;
2841 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2842 emith_cmp_r_imm(sr, 0);
2843 emith_jump_cond(DCOND_LT, sh2_drc_exit);
2844 rcache_invalidate();
2845 emith_ctx_read(arg0, SHR_PC * 4);
2846 emith_ctx_read(arg1, offsetof(SH2, is_slave));
2847 emith_add_r_r_imm(arg2, CONTEXT_REG, offsetof(SH2, drc_tmp));
2848 emith_call(dr_lookup_block);
2849 emit_block_entry();
2850 // lookup failed, call sh2_translate()
2851 emith_move_r_r(arg0, CONTEXT_REG);
2852 emith_ctx_read(arg1, offsetof(SH2, drc_tmp)); // tcache_id
2853 emith_call(sh2_translate);
2854 emit_block_entry();
2855 // sh2_translate() failed, flush cache and retry
2856 emith_ctx_read(arg0, offsetof(SH2, drc_tmp));
2857 emith_call(flush_tcache);
2858 emith_move_r_r(arg0, CONTEXT_REG);
2859 emith_ctx_read(arg1, offsetof(SH2, drc_tmp));
2860 emith_call(sh2_translate);
2861 emit_block_entry();
2862 // XXX: can't translate, fail
2863 emith_call(dr_failure);
2864
2865 // sh2_drc_test_irq(void)
2866 // assumes it's called from main function (may jump to dispatcher)
2867 sh2_drc_test_irq = (void *)tcache_ptr;
2868 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2869 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2870 emith_lsr(arg0, sr, I_SHIFT);
2871 emith_and_r_imm(arg0, 0x0f);
2872 emith_cmp_r_r(arg1, arg0); // pending_level > ((sr >> 4) & 0x0f)?
2873 EMITH_SJMP_START(DCOND_GT);
2874 emith_ret_c(DCOND_LE); // nope, return
2875 EMITH_SJMP_END(DCOND_GT);
2876 // adjust SP
2877 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2878 emith_sub_r_imm(tmp, 4*2);
2879 rcache_clean();
2880 // push SR
2881 tmp = rcache_get_reg_arg(0, SHR_SP);
2882 emith_add_r_imm(tmp, 4);
2883 tmp = rcache_get_reg_arg(1, SHR_SR);
2884 emith_clear_msb(tmp, tmp, 22);
2885 emith_move_r_r(arg2, CONTEXT_REG);
2886 emith_call(p32x_sh2_write32); // XXX: use sh2_drc_write32?
2887 rcache_invalidate();
2888 // push PC
2889 rcache_get_reg_arg(0, SHR_SP);
2890 emith_ctx_read(arg1, SHR_PC * 4);
2891 emith_move_r_r(arg2, CONTEXT_REG);
2892 emith_call(p32x_sh2_write32);
2893 rcache_invalidate();
2894 // update I, cycles, do callback
2895 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2896 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2897 emith_bic_r_imm(sr, I);
2898 emith_or_r_r_lsl(sr, arg1, I_SHIFT);
2899 emith_sub_r_imm(sr, 13 << 12); // at least 13 cycles
2900 rcache_flush();
2901 emith_move_r_r(arg0, CONTEXT_REG);
2902 emith_call_ctx(offsetof(SH2, irq_callback)); // vector = sh2->irq_callback(sh2, level);
2903 // obtain new PC
2904 emith_lsl(arg0, arg0, 2);
2905 emith_ctx_read(arg1, SHR_VBR * 4);
2906 emith_add_r_r(arg0, arg1);
2907 emit_memhandler_read(2);
2908 emith_ctx_write(arg0, SHR_PC * 4);
2909#ifdef __i386__
2910 emith_add_r_imm(xSP, 4); // fix stack
2911#endif
2912 emith_jump(sh2_drc_dispatcher);
2913 rcache_invalidate();
2914
2915 // sh2_drc_entry(SH2 *sh2)
2916 sh2_drc_entry = (void *)tcache_ptr;
2917 emith_sh2_drc_entry();
2918 emith_move_r_r(CONTEXT_REG, arg0); // move ctx, arg0
2919 emit_do_static_regs(0, arg2);
2920 emith_call(sh2_drc_test_irq);
2921 emith_jump(sh2_drc_dispatcher);
2922
2923 // sh2_drc_write8(u32 a, u32 d)
2924 sh2_drc_write8 = (void *)tcache_ptr;
2925 emith_ctx_read(arg2, offsetof(SH2, write8_tab));
2926 emith_sh2_wcall(arg0, arg2);
2927
2928 // sh2_drc_write16(u32 a, u32 d)
2929 sh2_drc_write16 = (void *)tcache_ptr;
2930 emith_ctx_read(arg2, offsetof(SH2, write16_tab));
2931 emith_sh2_wcall(arg0, arg2);
2932
2933#ifdef PDB_NET
2934 // debug
2935 #define MAKE_READ_WRAPPER(func) { \
2936 void *tmp = (void *)tcache_ptr; \
2937 emith_push_ret(); \
2938 emith_call(func); \
2939 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
2940 emith_addf_r_r(arg2, arg0); \
2941 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
2942 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
2943 emith_adc_r_imm(arg2, 0x01000000); \
2944 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
2945 emith_pop_and_ret(); \
2946 func = tmp; \
2947 }
2948 #define MAKE_WRITE_WRAPPER(func) { \
2949 void *tmp = (void *)tcache_ptr; \
2950 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
2951 emith_addf_r_r(arg2, arg1); \
2952 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
2953 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
2954 emith_adc_r_imm(arg2, 0x01000000); \
2955 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
2956 emith_move_r_r(arg2, CONTEXT_REG); \
2957 emith_jump(func); \
2958 func = tmp; \
2959 }
2960
2961 MAKE_READ_WRAPPER(sh2_drc_read8);
2962 MAKE_READ_WRAPPER(sh2_drc_read16);
2963 MAKE_READ_WRAPPER(sh2_drc_read32);
2964 MAKE_WRITE_WRAPPER(sh2_drc_write8);
2965 MAKE_WRITE_WRAPPER(sh2_drc_write16);
2966 MAKE_WRITE_WRAPPER(sh2_drc_write32);
2967#if (DRC_DEBUG & 4)
2968 host_dasm_new_symbol(sh2_drc_read8);
2969 host_dasm_new_symbol(sh2_drc_read16);
2970 host_dasm_new_symbol(sh2_drc_read32);
2971 host_dasm_new_symbol(sh2_drc_write32);
2972#endif
2973#endif
2974
2975 rcache_invalidate();
2976#if (DRC_DEBUG & 4)
2977 host_dasm_new_symbol(sh2_drc_entry);
2978 host_dasm_new_symbol(sh2_drc_dispatcher);
2979 host_dasm_new_symbol(sh2_drc_exit);
2980 host_dasm_new_symbol(sh2_drc_test_irq);
2981 host_dasm_new_symbol(sh2_drc_write8);
2982 host_dasm_new_symbol(sh2_drc_write16);
2983#endif
2984}
2985
2986static void sh2_smc_rm_block_entry(struct block_desc *bd, int tcache_id, u32 ram_mask)
2987{
2988 struct block_link *bl, *bl_next, *bl_unresolved;
2989 u32 i, addr, end_addr;
2990 void *tmp;
2991
2992 dbg(2, " killing entry %08x-%08x-%08x, blkid %d,%d",
2993 bd->addr, bd->addr + bd->size_nolit, bd->addr + bd->size,
2994 tcache_id, bd - block_tables[tcache_id]);
2995 if (bd->addr == 0 || bd->entry_count == 0) {
2996 dbg(1, " killing dead block!? %08x", bd->addr);
2997 return;
2998 }
2999
3000 // remove from inval_lookup
3001 addr = bd->addr & ~(INVAL_PAGE_SIZE - 1);
3002 end_addr = bd->addr + bd->size;
3003 for (; addr < end_addr; addr += INVAL_PAGE_SIZE) {
3004 i = (addr & ram_mask) / INVAL_PAGE_SIZE;
3005 rm_from_block_list(&inval_lookup[tcache_id][i], bd);
3006 }
3007
3008 tmp = tcache_ptr;
3009 bl_unresolved = unresolved_links[tcache_id];
3010
3011 // remove from hash table, make incoming links unresolved
3012 // XXX: maybe patch branches w/flush instead?
3013 for (i = 0; i < bd->entry_count; i++) {
3014 rm_from_hashlist(&bd->entryp[i], tcache_id);
3015
3016 // since we never reuse tcache space of dead blocks,
3017 // insert jump to dispatcher for blocks that are linked to this
3018 tcache_ptr = bd->entryp[i].tcache_ptr;
3019 emit_move_r_imm32(SHR_PC, bd->entryp[i].pc);
3020 rcache_flush();
3021 emith_jump(sh2_drc_dispatcher);
3022
3023 host_instructions_updated(bd->entryp[i].tcache_ptr, tcache_ptr);
3024
3025 for (bl = bd->entryp[i].links; bl != NULL; ) {
3026 bl_next = bl->next;
3027 bl->next = bl_unresolved;
3028 bl_unresolved = bl;
3029 bl = bl_next;
3030 }
3031 }
3032
3033 tcache_ptr = tmp;
3034 unresolved_links[tcache_id] = bl_unresolved;
3035
3036 bd->addr = bd->size = bd->size_nolit = 0;
3037 bd->entry_count = 0;
3038}
3039
3040static void sh2_smc_rm_block(u32 a, u16 *drc_ram_blk, int tcache_id, u32 shift, u32 mask)
3041{
3042 struct block_list **blist = NULL, *entry;
3043 u32 from = ~0, to = 0, end_addr, taddr, i;
3044 struct block_desc *block;
3045
3046 blist = &inval_lookup[tcache_id][(a & mask) / INVAL_PAGE_SIZE];
3047 entry = *blist;
3048 while (entry != NULL) {
3049 block = entry->block;
3050 end_addr = block->addr + block->size;
3051 if (block->addr <= a && a < end_addr) {
3052 // get addr range that includes all removed blocks
3053 if (from > block->addr)
3054 from = block->addr;
3055 if (to < end_addr)
3056 to = end_addr;
3057
3058 sh2_smc_rm_block_entry(block, tcache_id, mask);
3059 if (a >= block->addr + block->size_nolit)
3060 literal_disabled_frames = 3;
3061
3062 // entry lost, restart search
3063 entry = *blist;
3064 continue;
3065 }
3066 entry = entry->next;
3067 }
3068
3069 if (from >= to)
3070 return;
3071
3072 // update range around a to match latest state
3073 from &= ~(INVAL_PAGE_SIZE - 1);
3074 to |= (INVAL_PAGE_SIZE - 1);
3075 for (taddr = from; taddr < to; taddr += INVAL_PAGE_SIZE) {
3076 i = (taddr & mask) / INVAL_PAGE_SIZE;
3077 entry = inval_lookup[tcache_id][i];
3078
3079 for (; entry != NULL; entry = entry->next) {
3080 block = entry->block;
3081
3082 if (block->addr > a) {
3083 if (to > block->addr)
3084 to = block->addr;
3085 }
3086 else {
3087 end_addr = block->addr + block->size;
3088 if (from < end_addr)
3089 from = end_addr;
3090 }
3091 }
3092 }
3093
3094 // clear code marks
3095 if (from < to) {
3096 u16 *p = drc_ram_blk + ((from & mask) >> shift);
3097 memset(p, 0, (to - from) >> (shift - 1));
3098 }
3099}
3100
3101void sh2_drc_wcheck_ram(unsigned int a, int val, int cpuid)
3102{
3103 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3104 sh2_smc_rm_block(a, Pico32xMem->drcblk_ram, 0, SH2_DRCBLK_RAM_SHIFT, 0x3ffff);
3105}
3106
3107void sh2_drc_wcheck_da(unsigned int a, int val, int cpuid)
3108{
3109 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3110 sh2_smc_rm_block(a, Pico32xMem->drcblk_da[cpuid],
3111 1 + cpuid, SH2_DRCBLK_DA_SHIFT, 0xfff);
3112}
3113
3114int sh2_execute(SH2 *sh2c, int cycles)
3115{
3116 int ret_cycles;
3117
3118 sh2c->cycles_timeslice = cycles;
3119
3120 // cycles are kept in SHR_SR unused bits (upper 20)
3121 // bit11 contains T saved for delay slot
3122 // others are usual SH2 flags
3123 sh2c->sr &= 0x3f3;
3124 sh2c->sr |= cycles << 12;
3125 sh2_drc_entry(sh2c);
3126
3127 // TODO: irq cycles
3128 ret_cycles = (signed int)sh2c->sr >> 12;
3129 if (ret_cycles > 0)
3130 dbg(1, "warning: drc returned with cycles: %d", ret_cycles);
3131
3132 sh2c->sr &= 0x3f3;
3133 return sh2c->cycles_timeslice - ret_cycles;
3134}
3135
3136#if (DRC_DEBUG & 2)
3137void block_stats(void)
3138{
3139 int c, b, i, total = 0;
3140
3141 printf("block stats:\n");
3142 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3143 for (i = 0; i < block_counts[b]; i++)
3144 if (block_tables[b][i].addr != 0)
3145 total += block_tables[b][i].refcount;
3146
3147 for (c = 0; c < 10; c++) {
3148 struct block_desc *blk, *maxb = NULL;
3149 int max = 0;
3150 for (b = 0; b < ARRAY_SIZE(block_tables); b++) {
3151 for (i = 0; i < block_counts[b]; i++) {
3152 blk = &block_tables[b][i];
3153 if (blk->addr != 0 && blk->refcount > max) {
3154 max = blk->refcount;
3155 maxb = blk;
3156 }
3157 }
3158 }
3159 if (maxb == NULL)
3160 break;
3161 printf("%08x %9d %2.3f%%\n", maxb->addr, maxb->refcount,
3162 (double)maxb->refcount / total * 100.0);
3163 maxb->refcount = 0;
3164 }
3165
3166 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3167 for (i = 0; i < block_counts[b]; i++)
3168 block_tables[b][i].refcount = 0;
3169}
3170#else
3171#define block_stats()
3172#endif
3173
3174void sh2_drc_flush_all(void)
3175{
3176 block_stats();
3177 flush_tcache(0);
3178 flush_tcache(1);
3179 flush_tcache(2);
3180}
3181
3182void sh2_drc_mem_setup(SH2 *sh2)
3183{
3184 // fill the convenience pointers
3185 sh2->p_bios = sh2->is_slave ? Pico32xMem->sh2_rom_s.w : Pico32xMem->sh2_rom_m.w;
3186 sh2->p_da = sh2->data_array;
3187 sh2->p_sdram = Pico32xMem->sdram;
3188 sh2->p_rom = Pico.rom;
3189}
3190
3191void sh2_drc_frame(void)
3192{
3193 if (literal_disabled_frames > 0)
3194 literal_disabled_frames--;
3195}
3196
3197int sh2_drc_init(SH2 *sh2)
3198{
3199 int i;
3200
3201 if (block_tables[0] == NULL)
3202 {
3203 for (i = 0; i < TCACHE_BUFFERS; i++) {
3204 block_tables[i] = calloc(block_max_counts[i], sizeof(*block_tables[0]));
3205 if (block_tables[i] == NULL)
3206 goto fail;
3207 // max 2 block links (exits) per block
3208 block_link_pool[i] = calloc(block_link_pool_max_counts[i],
3209 sizeof(*block_link_pool[0]));
3210 if (block_link_pool[i] == NULL)
3211 goto fail;
3212
3213 inval_lookup[i] = calloc(ram_sizes[i] / INVAL_PAGE_SIZE,
3214 sizeof(inval_lookup[0]));
3215 if (inval_lookup[i] == NULL)
3216 goto fail;
3217
3218 hash_tables[i] = calloc(hash_table_sizes[i], sizeof(*hash_tables[0]));
3219 if (hash_tables[i] == NULL)
3220 goto fail;
3221 }
3222 memset(block_counts, 0, sizeof(block_counts));
3223 memset(block_link_pool_counts, 0, sizeof(block_link_pool_counts));
3224
3225 drc_cmn_init();
3226 tcache_ptr = tcache;
3227 sh2_generate_utils();
3228 host_instructions_updated(tcache, tcache_ptr);
3229
3230 tcache_bases[0] = tcache_ptrs[0] = tcache_ptr;
3231 for (i = 1; i < ARRAY_SIZE(tcache_bases); i++)
3232 tcache_bases[i] = tcache_ptrs[i] = tcache_bases[i - 1] + tcache_sizes[i - 1];
3233
3234#if (DRC_DEBUG & 4)
3235 for (i = 0; i < ARRAY_SIZE(block_tables); i++)
3236 tcache_dsm_ptrs[i] = tcache_bases[i];
3237 // disasm the utils
3238 tcache_dsm_ptrs[0] = tcache;
3239 do_host_disasm(0);
3240#endif
3241#if (DRC_DEBUG & 1)
3242 hash_collisions = 0;
3243#endif
3244 }
3245
3246 return 0;
3247
3248fail:
3249 sh2_drc_finish(sh2);
3250 return -1;
3251}
3252
3253void sh2_drc_finish(SH2 *sh2)
3254{
3255 int i;
3256
3257 if (block_tables[0] == NULL)
3258 return;
3259
3260 sh2_drc_flush_all();
3261
3262 for (i = 0; i < TCACHE_BUFFERS; i++) {
3263#if (DRC_DEBUG & 4)
3264 printf("~~~ tcache %d\n", i);
3265 tcache_dsm_ptrs[i] = tcache_bases[i];
3266 tcache_ptr = tcache_ptrs[i];
3267 do_host_disasm(i);
3268#endif
3269
3270 if (block_tables[i] != NULL)
3271 free(block_tables[i]);
3272 block_tables[i] = NULL;
3273 if (block_link_pool[i] == NULL)
3274 free(block_link_pool[i]);
3275 block_link_pool[i] = NULL;
3276
3277 if (inval_lookup[i] == NULL)
3278 free(inval_lookup[i]);
3279 inval_lookup[i] = NULL;
3280
3281 if (hash_tables[i] != NULL) {
3282 free(hash_tables[i]);
3283 hash_tables[i] = NULL;
3284 }
3285 }
3286
3287 drc_cmn_cleanup();
3288}
3289
3290#endif /* DRC_SH2 */
3291
3292static void *dr_get_pc_base(u32 pc, int is_slave)
3293{
3294 void *ret = NULL;
3295 u32 mask = 0;
3296
3297 if ((pc & ~0x7ff) == 0) {
3298 // BIOS
3299 ret = is_slave ? Pico32xMem->sh2_rom_s.w : Pico32xMem->sh2_rom_m.w;
3300 mask = 0x7ff;
3301 }
3302 else if ((pc & 0xfffff000) == 0xc0000000) {
3303 // data array
3304 ret = sh2s[is_slave].data_array;
3305 mask = 0xfff;
3306 }
3307 else if ((pc & 0xc6000000) == 0x06000000) {
3308 // SDRAM
3309 ret = Pico32xMem->sdram;
3310 mask = 0x03ffff;
3311 }
3312 else if ((pc & 0xc6000000) == 0x02000000) {
3313 // ROM
3314 if ((pc & 0x3fffff) < Pico.romsize)
3315 ret = Pico.rom;
3316 mask = 0x3fffff;
3317 }
3318
3319 if (ret == NULL)
3320 return (void *)-1; // NULL is valid value
3321
3322 return (char *)ret - (pc & ~mask);
3323}
3324
3325void scan_block(u32 base_pc, int is_slave, u8 *op_flags, u32 *end_pc_out,
3326 u32 *end_literals_out)
3327{
3328 u16 *dr_pc_base;
3329 u32 pc, op, tmp;
3330 u32 end_pc, end_literals = 0;
3331 u32 lowest_mova = 0;
3332 struct op_data *opd;
3333 int next_is_delay = 0;
3334 int end_block = 0;
3335 int i, i_end;
3336
3337 memset(op_flags, 0, BLOCK_INSN_LIMIT);
3338
3339 dr_pc_base = dr_get_pc_base(base_pc, is_slave);
3340
3341 // 1st pass: disassemble
3342 for (i = 0, pc = base_pc; ; i++, pc += 2) {
3343 // we need an ops[] entry after the last one initialized,
3344 // so do it before end_block checks
3345 opd = &ops[i];
3346 opd->op = OP_UNHANDLED;
3347 opd->rm = -1;
3348 opd->source = opd->dest = 0;
3349 opd->cycles = 1;
3350 opd->imm = 0;
3351
3352 if (next_is_delay) {
3353 op_flags[i] |= OF_DELAY_OP;
3354 next_is_delay = 0;
3355 }
3356 else if (end_block || i >= BLOCK_INSN_LIMIT - 2)
3357 break;
3358
3359 op = FETCH_OP(pc);
3360 switch ((op & 0xf000) >> 12)
3361 {
3362 /////////////////////////////////////////////
3363 case 0x00:
3364 switch (op & 0x0f)
3365 {
3366 case 0x02:
3367 switch (GET_Fx())
3368 {
3369 case 0: // STC SR,Rn 0000nnnn00000010
3370 tmp = SHR_SR;
3371 break;
3372 case 1: // STC GBR,Rn 0000nnnn00010010
3373 tmp = SHR_GBR;
3374 break;
3375 case 2: // STC VBR,Rn 0000nnnn00100010
3376 tmp = SHR_VBR;
3377 break;
3378 default:
3379 goto undefined;
3380 }
3381 opd->op = OP_MOVE;
3382 opd->source = BITMASK1(tmp);
3383 opd->dest = BITMASK1(GET_Rn());
3384 break;
3385 case 0x03:
3386 CHECK_UNHANDLED_BITS(0xd0, undefined);
3387 // BRAF Rm 0000mmmm00100011
3388 // BSRF Rm 0000mmmm00000011
3389 opd->op = OP_BRANCH_RF;
3390 opd->rm = GET_Rn();
3391 opd->source = BITMASK1(opd->rm);
3392 opd->dest = BITMASK1(SHR_PC);
3393 if (!(op & 0x20))
3394 opd->dest |= BITMASK1(SHR_PR);
3395 opd->cycles = 2;
3396 next_is_delay = 1;
3397 end_block = 1;
3398 break;
3399 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
3400 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
3401 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
3402 opd->source = BITMASK3(GET_Rm(), SHR_R0, GET_Rn());
3403 break;
3404 case 0x07:
3405 // MUL.L Rm,Rn 0000nnnnmmmm0111
3406 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3407 opd->dest = BITMASK1(SHR_MACL);
3408 opd->cycles = 2;
3409 break;
3410 case 0x08:
3411 CHECK_UNHANDLED_BITS(0xf00, undefined);
3412 switch (GET_Fx())
3413 {
3414 case 0: // CLRT 0000000000001000
3415 opd->op = OP_SETCLRT;
3416 opd->dest = BITMASK1(SHR_T);
3417 opd->imm = 0;
3418 break;
3419 case 1: // SETT 0000000000011000
3420 opd->op = OP_SETCLRT;
3421 opd->dest = BITMASK1(SHR_T);
3422 opd->imm = 1;
3423 break;
3424 case 2: // CLRMAC 0000000000101000
3425 opd->dest = BITMASK3(SHR_T, SHR_MACL, SHR_MACH);
3426 break;
3427 default:
3428 goto undefined;
3429 }
3430 break;
3431 case 0x09:
3432 switch (GET_Fx())
3433 {
3434 case 0: // NOP 0000000000001001
3435 CHECK_UNHANDLED_BITS(0xf00, undefined);
3436 break;
3437 case 1: // DIV0U 0000000000011001
3438 CHECK_UNHANDLED_BITS(0xf00, undefined);
3439 opd->dest = BITMASK2(SHR_SR, SHR_T);
3440 break;
3441 case 2: // MOVT Rn 0000nnnn00101001
3442 opd->source = BITMASK1(SHR_T);
3443 opd->dest = BITMASK1(GET_Rn());
3444 break;
3445 default:
3446 goto undefined;
3447 }
3448 break;
3449 case 0x0a:
3450 switch (GET_Fx())
3451 {
3452 case 0: // STS MACH,Rn 0000nnnn00001010
3453 tmp = SHR_MACH;
3454 break;
3455 case 1: // STS MACL,Rn 0000nnnn00011010
3456 tmp = SHR_MACL;
3457 break;
3458 case 2: // STS PR,Rn 0000nnnn00101010
3459 tmp = SHR_PR;
3460 break;
3461 default:
3462 goto undefined;
3463 }
3464 opd->op = OP_MOVE;
3465 opd->source = BITMASK1(tmp);
3466 opd->dest = BITMASK1(GET_Rn());
3467 break;
3468 case 0x0b:
3469 CHECK_UNHANDLED_BITS(0xf00, undefined);
3470 switch (GET_Fx())
3471 {
3472 case 0: // RTS 0000000000001011
3473 opd->op = OP_BRANCH_R;
3474 opd->rm = SHR_PR;
3475 opd->source = BITMASK1(opd->rm);
3476 opd->dest = BITMASK1(SHR_PC);
3477 opd->cycles = 2;
3478 next_is_delay = 1;
3479 end_block = 1;
3480 break;
3481 case 1: // SLEEP 0000000000011011
3482 opd->op = OP_SLEEP;
3483 end_block = 1;
3484 break;
3485 case 2: // RTE 0000000000101011
3486 opd->op = OP_RTE;
3487 opd->source = BITMASK1(SHR_SP);
3488 opd->dest = BITMASK2(SHR_SR, SHR_PC);
3489 opd->cycles = 4;
3490 next_is_delay = 1;
3491 end_block = 1;
3492 break;
3493 default:
3494 goto undefined;
3495 }
3496 break;
3497 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
3498 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
3499 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
3500 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3501 opd->dest = BITMASK1(GET_Rn());
3502 break;
3503 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
3504 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3505 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3506 opd->cycles = 3;
3507 break;
3508 default:
3509 goto undefined;
3510 }
3511 break;
3512
3513 /////////////////////////////////////////////
3514 case 0x01:
3515 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
3516 opd->source = BITMASK1(GET_Rm());
3517 opd->source = BITMASK1(GET_Rn());
3518 opd->imm = (op & 0x0f) * 4;
3519 break;
3520
3521 /////////////////////////////////////////////
3522 case 0x02:
3523 switch (op & 0x0f)
3524 {
3525 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
3526 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
3527 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
3528 opd->source = BITMASK1(GET_Rm());
3529 opd->source = BITMASK1(GET_Rn());
3530 break;
3531 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
3532 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
3533 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
3534 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3535 opd->dest = BITMASK1(GET_Rn());
3536 break;
3537 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
3538 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3539 opd->dest = BITMASK1(SHR_SR);
3540 break;
3541 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
3542 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3543 opd->dest = BITMASK1(SHR_T);
3544 break;
3545 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
3546 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
3547 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
3548 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3549 opd->dest = BITMASK1(GET_Rn());
3550 break;
3551 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
3552 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3553 opd->dest = BITMASK1(SHR_T);
3554 break;
3555 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
3556 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3557 opd->dest = BITMASK1(GET_Rn());
3558 break;
3559 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
3560 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
3561 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3562 opd->dest = BITMASK1(SHR_MACL);
3563 break;
3564 default:
3565 goto undefined;
3566 }
3567 break;
3568
3569 /////////////////////////////////////////////
3570 case 0x03:
3571 switch (op & 0x0f)
3572 {
3573 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
3574 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
3575 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
3576 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
3577 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
3578 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3579 opd->dest = BITMASK1(SHR_T);
3580 break;
3581 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
3582 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_SR);
3583 opd->dest = BITMASK2(GET_Rn(), SHR_SR);
3584 break;
3585 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
3586 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
3587 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3588 opd->dest = BITMASK2(SHR_MACL, SHR_MACH);
3589 opd->cycles = 2;
3590 break;
3591 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
3592 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
3593 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3594 opd->dest = BITMASK1(GET_Rn());
3595 break;
3596 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
3597 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
3598 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_T);
3599 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3600 break;
3601 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
3602 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
3603 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3604 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3605 break;
3606 default:
3607 goto undefined;
3608 }
3609 break;
3610
3611 /////////////////////////////////////////////
3612 case 0x04:
3613 switch (op & 0x0f)
3614 {
3615 case 0x00:
3616 switch (GET_Fx())
3617 {
3618 case 0: // SHLL Rn 0100nnnn00000000
3619 case 2: // SHAL Rn 0100nnnn00100000
3620 opd->source = BITMASK1(GET_Rn());
3621 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3622 break;
3623 case 1: // DT Rn 0100nnnn00010000
3624 opd->source = BITMASK1(GET_Rn());
3625 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3626 break;
3627 default:
3628 goto undefined;
3629 }
3630 break;
3631 case 0x01:
3632 switch (GET_Fx())
3633 {
3634 case 0: // SHLR Rn 0100nnnn00000001
3635 case 2: // SHAR Rn 0100nnnn00100001
3636 opd->source = BITMASK1(GET_Rn());
3637 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3638 break;
3639 case 1: // CMP/PZ Rn 0100nnnn00010001
3640 opd->source = BITMASK1(GET_Rn());
3641 opd->dest = BITMASK1(SHR_T);
3642 break;
3643 default:
3644 goto undefined;
3645 }
3646 break;
3647 case 0x02:
3648 case 0x03:
3649 switch (op & 0x3f)
3650 {
3651 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
3652 tmp = SHR_MACH;
3653 break;
3654 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
3655 tmp = SHR_MACL;
3656 break;
3657 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
3658 tmp = SHR_PR;
3659 break;
3660 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
3661 tmp = SHR_SR;
3662 opd->cycles = 2;
3663 break;
3664 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
3665 tmp = SHR_GBR;
3666 opd->cycles = 2;
3667 break;
3668 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
3669 tmp = SHR_VBR;
3670 opd->cycles = 2;
3671 break;
3672 default:
3673 goto undefined;
3674 }
3675 opd->source = BITMASK2(GET_Rn(), tmp);
3676 opd->dest = BITMASK1(GET_Rn());
3677 break;
3678 case 0x04:
3679 case 0x05:
3680 switch (op & 0x3f)
3681 {
3682 case 0x04: // ROTL Rn 0100nnnn00000100
3683 case 0x05: // ROTR Rn 0100nnnn00000101
3684 opd->source = BITMASK1(GET_Rn());
3685 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3686 break;
3687 case 0x24: // ROTCL Rn 0100nnnn00100100
3688 case 0x25: // ROTCR Rn 0100nnnn00100101
3689 opd->source = BITMASK2(GET_Rn(), SHR_T);
3690 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3691 break;
3692 case 0x15: // CMP/PL Rn 0100nnnn00010101
3693 opd->source = BITMASK1(GET_Rn());
3694 opd->dest = BITMASK1(SHR_T);
3695 break;
3696 default:
3697 goto undefined;
3698 }
3699 break;
3700 case 0x06:
3701 case 0x07:
3702 switch (op & 0x3f)
3703 {
3704 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
3705 tmp = SHR_MACH;
3706 break;
3707 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
3708 tmp = SHR_MACL;
3709 break;
3710 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
3711 tmp = SHR_PR;
3712 break;
3713 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
3714 tmp = SHR_SR;
3715 opd->cycles = 3;
3716 break;
3717 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
3718 tmp = SHR_GBR;
3719 opd->cycles = 3;
3720 break;
3721 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
3722 tmp = SHR_VBR;
3723 opd->cycles = 3;
3724 break;
3725 default:
3726 goto undefined;
3727 }
3728 opd->source = BITMASK1(GET_Rn());
3729 opd->dest = BITMASK2(GET_Rn(), tmp);
3730 break;
3731 case 0x08:
3732 case 0x09:
3733 switch (GET_Fx())
3734 {
3735 case 0:
3736 // SHLL2 Rn 0100nnnn00001000
3737 // SHLR2 Rn 0100nnnn00001001
3738 break;
3739 case 1:
3740 // SHLL8 Rn 0100nnnn00011000
3741 // SHLR8 Rn 0100nnnn00011001
3742 break;
3743 case 2:
3744 // SHLL16 Rn 0100nnnn00101000
3745 // SHLR16 Rn 0100nnnn00101001
3746 break;
3747 default:
3748 goto undefined;
3749 }
3750 opd->source = BITMASK1(GET_Rn());
3751 opd->dest = BITMASK1(GET_Rn());
3752 break;
3753 case 0x0a:
3754 switch (GET_Fx())
3755 {
3756 case 0: // LDS Rm,MACH 0100mmmm00001010
3757 tmp = SHR_MACH;
3758 break;
3759 case 1: // LDS Rm,MACL 0100mmmm00011010
3760 tmp = SHR_MACL;
3761 break;
3762 case 2: // LDS Rm,PR 0100mmmm00101010
3763 tmp = SHR_PR;
3764 break;
3765 default:
3766 goto undefined;
3767 }
3768 opd->op = OP_MOVE;
3769 opd->source = BITMASK1(GET_Rn());
3770 opd->dest = BITMASK1(tmp);
3771 break;
3772 case 0x0b:
3773 switch (GET_Fx())
3774 {
3775 case 0: // JSR @Rm 0100mmmm00001011
3776 opd->dest = BITMASK1(SHR_PR);
3777 case 2: // JMP @Rm 0100mmmm00101011
3778 opd->op = OP_BRANCH_R;
3779 opd->rm = GET_Rn();
3780 opd->source = BITMASK1(opd->rm);
3781 opd->dest |= BITMASK1(SHR_PC);
3782 opd->cycles = 2;
3783 next_is_delay = 1;
3784 end_block = 1;
3785 break;
3786 case 1: // TAS.B @Rn 0100nnnn00011011
3787 opd->source = BITMASK1(GET_Rn());
3788 opd->dest = BITMASK1(SHR_T);
3789 opd->cycles = 4;
3790 break;
3791 default:
3792 goto undefined;
3793 }
3794 break;
3795 case 0x0e:
3796 switch (GET_Fx())
3797 {
3798 case 0: // LDC Rm,SR 0100mmmm00001110
3799 tmp = SHR_SR;
3800 break;
3801 case 1: // LDC Rm,GBR 0100mmmm00011110
3802 tmp = SHR_GBR;
3803 break;
3804 case 2: // LDC Rm,VBR 0100mmmm00101110
3805 tmp = SHR_VBR;
3806 break;
3807 default:
3808 goto undefined;
3809 }
3810 opd->op = OP_MOVE;
3811 opd->source = BITMASK1(GET_Rn());
3812 opd->dest = BITMASK1(tmp);
3813 break;
3814 case 0x0f:
3815 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
3816 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3817 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3818 opd->cycles = 3;
3819 break;
3820 default:
3821 goto undefined;
3822 }
3823 break;
3824
3825 /////////////////////////////////////////////
3826 case 0x05:
3827 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
3828 opd->source = BITMASK1(GET_Rm());
3829 opd->dest = BITMASK1(GET_Rn());
3830 opd->imm = (op & 0x0f) * 4;
3831 break;
3832
3833 /////////////////////////////////////////////
3834 case 0x06:
3835 switch (op & 0x0f)
3836 {
3837 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
3838 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
3839 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
3840 opd->dest = BITMASK1(GET_Rm());
3841 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
3842 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
3843 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
3844 opd->source = BITMASK1(GET_Rm());
3845 opd->dest |= BITMASK1(GET_Rn());
3846 break;
3847 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
3848 opd->source = BITMASK2(GET_Rm(), SHR_T);
3849 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3850 break;
3851 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
3852 opd->op = OP_MOVE;
3853 goto arith_rmrn;
3854 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
3855 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
3856 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
3857 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
3858 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
3859 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
3860 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
3861 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
3862 arith_rmrn:
3863 opd->source = BITMASK1(GET_Rm());
3864 opd->dest = BITMASK1(GET_Rn());
3865 break;
3866 }
3867 break;
3868
3869 /////////////////////////////////////////////
3870 case 0x07:
3871 // ADD #imm,Rn 0111nnnniiiiiiii
3872 opd->source = opd->dest = BITMASK1(GET_Rn());
3873 opd->imm = (int)(signed char)op;
3874 break;
3875
3876 /////////////////////////////////////////////
3877 case 0x08:
3878 switch (op & 0x0f00)
3879 {
3880 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
3881 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3882 opd->imm = (op & 0x0f);
3883 break;
3884 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
3885 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3886 opd->imm = (op & 0x0f) * 2;
3887 break;
3888 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
3889 opd->source = BITMASK1(GET_Rm());
3890 opd->dest = BITMASK1(SHR_R0);
3891 opd->imm = (op & 0x0f);
3892 break;
3893 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
3894 opd->source = BITMASK1(GET_Rm());
3895 opd->dest = BITMASK1(SHR_R0);
3896 opd->imm = (op & 0x0f) * 2;
3897 break;
3898 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
3899 opd->source = BITMASK1(SHR_R0);
3900 opd->dest = BITMASK1(SHR_T);
3901 opd->imm = (int)(signed char)op;
3902 break;
3903 case 0x0d00: // BT/S label 10001101dddddddd
3904 case 0x0f00: // BF/S label 10001111dddddddd
3905 next_is_delay = 1;
3906 // fallthrough
3907 case 0x0900: // BT label 10001001dddddddd
3908 case 0x0b00: // BF label 10001011dddddddd
3909 opd->op = (op & 0x0200) ? OP_BRANCH_CF : OP_BRANCH_CT;
3910 opd->source = BITMASK1(SHR_T);
3911 opd->dest = BITMASK1(SHR_PC);
3912 opd->imm = ((signed int)(op << 24) >> 23);
3913 opd->imm += pc + 4;
3914 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
3915 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
3916 break;
3917 default:
3918 goto undefined;
3919 }
3920 break;
3921
3922 /////////////////////////////////////////////
3923 case 0x09:
3924 // MOV.W @(disp,PC),Rn 1001nnnndddddddd
3925 opd->op = OP_LOAD_POOL;
3926 tmp = pc + 2;
3927 if (op_flags[i] & OF_DELAY_OP) {
3928 if (ops[i-1].op == OP_BRANCH)
3929 tmp = ops[i-1].imm;
3930 else
3931 tmp = 0;
3932 }
3933 opd->source = BITMASK1(SHR_PC);
3934 opd->dest = BITMASK1(GET_Rn());
3935 if (tmp)
3936 opd->imm = tmp + 2 + (op & 0xff) * 2;
3937 opd->size = 1;
3938 break;
3939
3940 /////////////////////////////////////////////
3941 case 0x0b:
3942 // BSR label 1011dddddddddddd
3943 opd->dest = BITMASK1(SHR_PR);
3944 case 0x0a:
3945 // BRA label 1010dddddddddddd
3946 opd->op = OP_BRANCH;
3947 opd->dest |= BITMASK1(SHR_PC);
3948 opd->imm = ((signed int)(op << 20) >> 19);
3949 opd->imm += pc + 4;
3950 opd->cycles = 2;
3951 next_is_delay = 1;
3952 end_block = 1;
3953 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
3954 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
3955 break;
3956
3957 /////////////////////////////////////////////
3958 case 0x0c:
3959 switch (op & 0x0f00)
3960 {
3961 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
3962 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
3963 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
3964 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3965 opd->size = (op & 0x300) >> 8;
3966 opd->imm = (op & 0xff) << opd->size;
3967 break;
3968 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
3969 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
3970 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
3971 opd->source = BITMASK1(SHR_GBR);
3972 opd->dest = BITMASK1(SHR_R0);
3973 opd->size = (op & 0x300) >> 8;
3974 opd->imm = (op & 0xff) << opd->size;
3975 break;
3976 case 0x0300: // TRAPA #imm 11000011iiiiiiii
3977 opd->source = BITMASK2(SHR_PC, SHR_SR);
3978 opd->dest = BITMASK1(SHR_PC);
3979 opd->imm = (op & 0xff) * 4;
3980 opd->cycles = 8;
3981 end_block = 1; // FIXME
3982 break;
3983 case 0x0700: // MOVA @(disp,PC),R0 11000111dddddddd
3984 opd->op = OP_MOVA;
3985 tmp = pc + 2;
3986 if (op_flags[i] & OF_DELAY_OP) {
3987 if (ops[i-1].op == OP_BRANCH)
3988 tmp = ops[i-1].imm;
3989 else
3990 tmp = 0;
3991 }
3992 opd->dest = BITMASK1(SHR_R0);
3993 if (tmp) {
3994 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
3995 if (opd->imm >= base_pc) {
3996 if (lowest_mova == 0 || opd->imm < lowest_mova)
3997 lowest_mova = opd->imm;
3998 }
3999 }
4000 break;
4001 case 0x0800: // TST #imm,R0 11001000iiiiiiii
4002 opd->source = BITMASK1(SHR_R0);
4003 opd->dest = BITMASK1(SHR_T);
4004 opd->imm = op & 0xff;
4005 break;
4006 case 0x0900: // AND #imm,R0 11001001iiiiiiii
4007 opd->source = opd->dest = BITMASK1(SHR_R0);
4008 opd->imm = op & 0xff;
4009 break;
4010 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
4011 opd->source = opd->dest = BITMASK1(SHR_R0);
4012 opd->imm = op & 0xff;
4013 break;
4014 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
4015 opd->source = opd->dest = BITMASK1(SHR_R0);
4016 opd->imm = op & 0xff;
4017 break;
4018 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
4019 opd->source = BITMASK2(SHR_GBR, SHR_R0);
4020 opd->dest = BITMASK1(SHR_T);
4021 opd->imm = op & 0xff;
4022 opd->cycles = 3;
4023 break;
4024 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
4025 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
4026 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
4027 opd->source = BITMASK2(SHR_GBR, SHR_R0);
4028 opd->imm = op & 0xff;
4029 opd->cycles = 3;
4030 break;
4031 default:
4032 goto undefined;
4033 }
4034 break;
4035
4036 /////////////////////////////////////////////
4037 case 0x0d:
4038 // MOV.L @(disp,PC),Rn 1101nnnndddddddd
4039 opd->op = OP_LOAD_POOL;
4040 tmp = pc + 2;
4041 if (op_flags[i] & OF_DELAY_OP) {
4042 if (ops[i-1].op == OP_BRANCH)
4043 tmp = ops[i-1].imm;
4044 else
4045 tmp = 0;
4046 }
4047 opd->source = BITMASK1(SHR_PC);
4048 opd->dest = BITMASK1(GET_Rn());
4049 if (tmp)
4050 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
4051 opd->size = 2;
4052 break;
4053
4054 /////////////////////////////////////////////
4055 case 0x0e:
4056 // MOV #imm,Rn 1110nnnniiiiiiii
4057 opd->dest = BITMASK1(GET_Rn());
4058 opd->imm = (u32)(signed int)(signed char)op;
4059 break;
4060
4061 default:
4062 undefined:
4063 elprintf(EL_ANOMALY, "%csh2 drc: unhandled op %04x @ %08x",
4064 is_slave ? 's' : 'm', op, pc);
4065 break;
4066 }
4067 }
4068 i_end = i;
4069 end_pc = pc;
4070
4071 // 2nd pass: some analysis
4072 for (i = 0; i < i_end; i++) {
4073 opd = &ops[i];
4074
4075 // propagate T (TODO: DIV0U)
4076 if ((opd->op == OP_SETCLRT && !opd->imm) || opd->op == OP_BRANCH_CT)
4077 op_flags[i + 1] |= OF_T_CLEAR;
4078 else if ((opd->op == OP_SETCLRT && opd->imm) || opd->op == OP_BRANCH_CF)
4079 op_flags[i + 1] |= OF_T_SET;
4080
4081 if ((op_flags[i] & OF_BTARGET) || (opd->dest & BITMASK1(SHR_T)))
4082 op_flags[i] &= ~(OF_T_SET | OF_T_CLEAR);
4083 else
4084 op_flags[i + 1] |= op_flags[i] & (OF_T_SET | OF_T_CLEAR);
4085
4086 if ((opd->op == OP_BRANCH_CT && (op_flags[i] & OF_T_SET))
4087 || (opd->op == OP_BRANCH_CF && (op_flags[i] & OF_T_CLEAR)))
4088 {
4089 opd->op = OP_BRANCH;
4090 opd->cycles = 3;
4091 i_end = i + 1;
4092 if (op_flags[i + 1] & OF_DELAY_OP) {
4093 opd->cycles = 2;
4094 i_end++;
4095 }
4096 }
4097 else if (opd->op == OP_LOAD_POOL)
4098 {
4099 if (opd->imm < end_pc + MAX_LITERAL_OFFSET) {
4100 if (end_literals < opd->imm + opd->size * 2)
4101 end_literals = opd->imm + opd->size * 2;
4102 }
4103 }
4104 }
4105 end_pc = base_pc + i_end * 2;
4106 if (end_literals < end_pc)
4107 end_literals = end_pc;
4108
4109 // end_literals is used to decide to inline a literal or not
4110 // XXX: need better detection if this actually is used in write
4111 if (lowest_mova >= base_pc) {
4112 if (lowest_mova < end_literals) {
4113 dbg(1, "mova for %08x, block %08x", lowest_mova, base_pc);
4114 end_literals = end_pc;
4115 }
4116 if (lowest_mova < end_pc) {
4117 dbg(1, "warning: mova inside of blk for %08x, block %08x",
4118 lowest_mova, base_pc);
4119 end_literals = end_pc;
4120 }
4121 }
4122
4123 *end_pc_out = end_pc;
4124 if (end_literals_out != NULL)
4125 *end_literals_out = end_literals;
4126}
4127
4128// vim:shiftwidth=2:ts=2:expandtab