comment adjustments
[picodrive.git] / cpu / sh2 / compiler.c
... / ...
CommitLineData
1/*
2 * SH2 recompiler
3 * (C) notaz, 2009,2010,2013
4 *
5 * This work is licensed under the terms of MAME license.
6 * See COPYING file in the top-level directory.
7 *
8 * notes:
9 * - tcache, block descriptor, link buffer overflows result in sh2_translate()
10 * failure, followed by full tcache invalidation for that region
11 * - jumps between blocks are tracked for SMC handling (in block_entry->links),
12 * except jumps between different tcaches
13 *
14 * implemented:
15 * - static register allocation
16 * - remaining register caching and tracking in temporaries
17 * - block-local branch linking
18 * - block linking (except between tcaches)
19 * - some constant propagation
20 *
21 * TODO:
22 * - better constant propagation
23 * - stack caching?
24 * - bug fixing
25 */
26#include <stddef.h>
27#include <stdio.h>
28#include <stdlib.h>
29#include <assert.h>
30
31#include "../../pico/pico_int.h"
32#include "sh2.h"
33#include "compiler.h"
34#include "../drc/cmn.h"
35#include "../debug.h"
36
37// features
38#define PROPAGATE_CONSTANTS 1
39#define LINK_BRANCHES 1
40
41// limits (per block)
42#define MAX_BLOCK_SIZE (BLOCK_INSN_LIMIT * 6 * 6)
43
44// max literal offset from the block end
45#define MAX_LITERAL_OFFSET 32*2
46#define MAX_LITERALS (BLOCK_INSN_LIMIT / 4)
47#define MAX_LOCAL_BRANCHES 32
48
49// debug stuff
50// 1 - warnings/errors
51// 2 - block info/smc
52// 4 - asm
53// 8 - runtime block entry log
54// {
55#ifndef DRC_DEBUG
56#define DRC_DEBUG 0
57#endif
58
59#if DRC_DEBUG
60#define dbg(l,...) { \
61 if ((l) & DRC_DEBUG) \
62 elprintf(EL_STATUS, ##__VA_ARGS__); \
63}
64#include "mame/sh2dasm.h"
65#include <platform/libpicofe/linux/host_dasm.h>
66static int insns_compiled, hash_collisions, host_insn_count;
67#define COUNT_OP \
68 host_insn_count++
69#else // !DRC_DEBUG
70#define COUNT_OP
71#define dbg(...)
72#endif
73
74///
75#define FETCH_OP(pc) \
76 dr_pc_base[(pc) / 2]
77
78#define FETCH32(a) \
79 ((dr_pc_base[(a) / 2] << 16) | dr_pc_base[(a) / 2 + 1])
80
81#define CHECK_UNHANDLED_BITS(mask, label) { \
82 if ((op & (mask)) != 0) \
83 goto label; \
84}
85
86#define GET_Fx() \
87 ((op >> 4) & 0x0f)
88
89#define GET_Rm GET_Fx
90
91#define GET_Rn() \
92 ((op >> 8) & 0x0f)
93
94#define BITMASK1(v0) (1 << (v0))
95#define BITMASK2(v0,v1) ((1 << (v0)) | (1 << (v1)))
96#define BITMASK3(v0,v1,v2) (BITMASK2(v0,v1) | (1 << (v2)))
97#define BITMASK4(v0,v1,v2,v3) (BITMASK3(v0,v1,v2) | (1 << (v3)))
98#define BITMASK5(v0,v1,v2,v3,v4) (BITMASK4(v0,v1,v2,v3) | (1 << (v4)))
99
100#define SHR_T SHR_SR // might make them separate someday
101
102static struct op_data {
103 u8 op;
104 u8 cycles;
105 u8 size; // 0, 1, 2 - byte, word, long
106 s8 rm; // branch or load/store data reg
107 u32 source; // bitmask of src regs
108 u32 dest; // bitmask of dest regs
109 u32 imm; // immediate/io address/branch target
110 // (for literal - address, not value)
111} ops[BLOCK_INSN_LIMIT];
112
113enum op_types {
114 OP_UNHANDLED = 0,
115 OP_BRANCH,
116 OP_BRANCH_CT, // conditional, branch if T set
117 OP_BRANCH_CF, // conditional, branch if T clear
118 OP_BRANCH_R, // indirect
119 OP_BRANCH_RF, // indirect far (PC + Rm)
120 OP_SETCLRT, // T flag set/clear
121 OP_MOVE, // register move
122 OP_LOAD_POOL, // literal pool load, imm is address
123 OP_MOVA,
124 OP_SLEEP,
125 OP_RTE,
126};
127
128#ifdef DRC_SH2
129
130#if (DRC_DEBUG & 4)
131static u8 *tcache_dsm_ptrs[3];
132static char sh2dasm_buff[64];
133#define do_host_disasm(tcid) \
134 host_dasm(tcache_dsm_ptrs[tcid], tcache_ptr - tcache_dsm_ptrs[tcid]); \
135 tcache_dsm_ptrs[tcid] = tcache_ptr
136#else
137#define do_host_disasm(x)
138#endif
139
140#if (DRC_DEBUG & 8) || defined(PDB)
141static void REGPARM(3) *sh2_drc_log_entry(void *block, SH2 *sh2, u32 sr)
142{
143 if (block != NULL) {
144 dbg(8, "= %csh2 enter %08x %p, c=%d", sh2->is_slave ? 's' : 'm',
145 sh2->pc, block, (signed int)sr >> 12);
146 pdb_step(sh2, sh2->pc);
147 }
148 return block;
149}
150#endif
151// } debug
152
153#define TCACHE_BUFFERS 3
154
155// we have 3 translation cache buffers, split from one drc/cmn buffer.
156// BIOS shares tcache with data array because it's only used for init
157// and can be discarded early
158// XXX: need to tune sizes
159static const int tcache_sizes[TCACHE_BUFFERS] = {
160 DRC_TCACHE_SIZE * 6 / 8, // ROM (rarely used), DRAM
161 DRC_TCACHE_SIZE / 8, // BIOS, data array in master sh2
162 DRC_TCACHE_SIZE / 8, // ... slave
163};
164
165static u8 *tcache_bases[TCACHE_BUFFERS];
166static u8 *tcache_ptrs[TCACHE_BUFFERS];
167
168// ptr for code emiters
169static u8 *tcache_ptr;
170
171#define MAX_BLOCK_ENTRIES (BLOCK_INSN_LIMIT / 8)
172
173struct block_link {
174 u32 target_pc;
175 void *jump; // insn address
176 struct block_link *next; // either in block_entry->links or
177};
178
179struct block_entry {
180 u32 pc;
181 void *tcache_ptr; // translated block for above PC
182 struct block_entry *next; // next block in hash_table with same pc hash
183 struct block_link *links; // links to this entry
184#if (DRC_DEBUG & 2)
185 struct block_desc *block;
186#endif
187};
188
189struct block_desc {
190 u32 addr; // block start SH2 PC address
191 u32 end_addr; // address after last op or literal
192#if (DRC_DEBUG & 2)
193 int refcount;
194#endif
195 int entry_count;
196 struct block_entry entryp[MAX_BLOCK_ENTRIES];
197};
198
199static const int block_max_counts[TCACHE_BUFFERS] = {
200 4*1024,
201 256,
202 256,
203};
204static struct block_desc *block_tables[TCACHE_BUFFERS];
205static int block_counts[TCACHE_BUFFERS];
206
207// we have block_link_pool to avoid using mallocs
208static const int block_link_pool_max_counts[TCACHE_BUFFERS] = {
209 4*1024,
210 256,
211 256,
212};
213static struct block_link *block_link_pool[TCACHE_BUFFERS];
214static int block_link_pool_counts[TCACHE_BUFFERS];
215static struct block_link *unresolved_links[TCACHE_BUFFERS];
216
217// used for invalidation
218static const int ram_sizes[TCACHE_BUFFERS] = {
219 0x40000,
220 0x1000,
221 0x1000,
222};
223#define ADDR_TO_BLOCK_PAGE 0x100
224
225struct block_list {
226 struct block_desc *block;
227 struct block_list *next;
228};
229
230// array of pointers to block_lists for RAM and 2 data arrays
231// each array has len: sizeof(mem) / ADDR_TO_BLOCK_PAGE
232static struct block_list **inval_lookup[TCACHE_BUFFERS];
233
234static const int hash_table_sizes[TCACHE_BUFFERS] = {
235 0x1000,
236 0x100,
237 0x100,
238};
239static struct block_entry **hash_tables[TCACHE_BUFFERS];
240
241#define HASH_FUNC(hash_tab, addr, mask) \
242 (hash_tab)[(((addr) >> 20) ^ ((addr) >> 2)) & (mask)]
243
244// host register tracking
245enum {
246 HR_FREE,
247 HR_CACHED, // 'val' has sh2_reg_e
248// HR_CONST, // 'val' has a constant
249 HR_TEMP, // reg used for temp storage
250};
251
252enum {
253 HRF_DIRTY = 1 << 0, // reg has "dirty" value to be written to ctx
254 HRF_LOCKED = 1 << 1, // HR_CACHED can't be evicted
255};
256
257typedef struct {
258 u32 hreg:5; // "host" reg
259 u32 greg:5; // "guest" reg
260 u32 type:3;
261 u32 flags:3;
262 u32 stamp:16; // kind of a timestamp
263} temp_reg_t;
264
265// note: reg_temp[] must have at least the amount of
266// registers used by handlers in worst case (currently 4)
267#ifdef __arm__
268#include "../drc/emit_arm.c"
269
270static const int reg_map_g2h[] = {
271 4, 5, 6, 7,
272 8, -1, -1, -1,
273 -1, -1, -1, -1,
274 -1, -1, -1, 9, // r12 .. sp
275 -1, -1, -1, 10, // SHR_PC, SHR_PPC, SHR_PR, SHR_SR,
276 -1, -1, -1, -1, // SHR_GBR, SHR_VBR, SHR_MACH, SHR_MACL,
277};
278
279static temp_reg_t reg_temp[] = {
280 { 0, },
281 { 1, },
282 { 12, },
283 { 14, },
284 { 2, },
285 { 3, },
286};
287
288#elif defined(__i386__)
289#include "../drc/emit_x86.c"
290
291static const int reg_map_g2h[] = {
292 xSI,-1, -1, -1,
293 -1, -1, -1, -1,
294 -1, -1, -1, -1,
295 -1, -1, -1, -1,
296 -1, -1, -1, xDI,
297 -1, -1, -1, -1,
298};
299
300// ax, cx, dx are usually temporaries by convention
301static temp_reg_t reg_temp[] = {
302 { xAX, },
303 { xBX, },
304 { xCX, },
305 { xDX, },
306};
307
308#else
309#error unsupported arch
310#endif
311
312#define T 0x00000001
313#define S 0x00000002
314#define I 0x000000f0
315#define Q 0x00000100
316#define M 0x00000200
317#define T_save 0x00000800
318
319#define I_SHIFT 4
320#define Q_SHIFT 8
321#define M_SHIFT 9
322
323static void REGPARM(1) (*sh2_drc_entry)(SH2 *sh2);
324static void (*sh2_drc_dispatcher)(void);
325static void (*sh2_drc_exit)(void);
326static void (*sh2_drc_test_irq)(void);
327
328static u32 REGPARM(2) (*sh2_drc_read8)(u32 a, SH2 *sh2);
329static u32 REGPARM(2) (*sh2_drc_read16)(u32 a, SH2 *sh2);
330static u32 REGPARM(2) (*sh2_drc_read32)(u32 a, SH2 *sh2);
331static void REGPARM(2) (*sh2_drc_write8)(u32 a, u32 d);
332static void REGPARM(2) (*sh2_drc_write16)(u32 a, u32 d);
333static int REGPARM(3) (*sh2_drc_write32)(u32 a, u32 d, SH2 *sh2);
334
335// address space stuff
336static int dr_ctx_get_mem_ptr(u32 a, u32 *mask)
337{
338 int poffs = -1;
339
340 if ((a & ~0x7ff) == 0) {
341 // BIOS
342 poffs = offsetof(SH2, p_bios);
343 *mask = 0x7ff;
344 }
345 else if ((a & 0xfffff000) == 0xc0000000) {
346 // data array
347 poffs = offsetof(SH2, p_da);
348 *mask = 0xfff;
349 }
350 else if ((a & 0xc6000000) == 0x06000000) {
351 // SDRAM
352 poffs = offsetof(SH2, p_sdram);
353 *mask = 0x03ffff;
354 }
355 else if ((a & 0xc6000000) == 0x02000000) {
356 // ROM
357 poffs = offsetof(SH2, p_rom);
358 *mask = 0x3fffff;
359 }
360
361 return poffs;
362}
363
364static struct block_entry *dr_get_entry(u32 pc, int is_slave, int *tcache_id)
365{
366 struct block_entry *be;
367 u32 tcid = 0, mask;
368
369 // data arrays have their own caches
370 if ((pc & 0xe0000000) == 0xc0000000 || (pc & ~0xfff) == 0)
371 tcid = 1 + is_slave;
372
373 *tcache_id = tcid;
374
375 mask = hash_table_sizes[tcid] - 1;
376 be = HASH_FUNC(hash_tables[tcid], pc, mask);
377 for (; be != NULL; be = be->next)
378 if (be->pc == pc)
379 return be;
380
381 return NULL;
382}
383
384// ---------------------------------------------------------------
385
386// block management
387static void add_to_block_list(struct block_list **blist, struct block_desc *block)
388{
389 struct block_list *added = malloc(sizeof(*added));
390 if (!added) {
391 elprintf(EL_ANOMALY, "drc OOM (1)");
392 return;
393 }
394 added->block = block;
395 added->next = *blist;
396 *blist = added;
397}
398
399static void rm_from_block_list(struct block_list **blist, struct block_desc *block)
400{
401 struct block_list *prev = NULL, *current = *blist;
402 for (; current != NULL; prev = current, current = current->next) {
403 if (current->block == block) {
404 if (prev == NULL)
405 *blist = current->next;
406 else
407 prev->next = current->next;
408 free(current);
409 return;
410 }
411 }
412 dbg(1, "can't rm block %p (%08x-%08x)",
413 block, block->addr, block->end_addr);
414}
415
416static void rm_block_list(struct block_list **blist)
417{
418 struct block_list *tmp, *current = *blist;
419 while (current != NULL) {
420 tmp = current;
421 current = current->next;
422 free(tmp);
423 }
424 *blist = NULL;
425}
426
427static void REGPARM(1) flush_tcache(int tcid)
428{
429 int i;
430
431 dbg(1, "tcache #%d flush! (%d/%d, bds %d/%d)", tcid,
432 tcache_ptrs[tcid] - tcache_bases[tcid], tcache_sizes[tcid],
433 block_counts[tcid], block_max_counts[tcid]);
434
435 block_counts[tcid] = 0;
436 block_link_pool_counts[tcid] = 0;
437 unresolved_links[tcid] = NULL;
438 memset(hash_tables[tcid], 0, sizeof(*hash_tables[0]) * hash_table_sizes[tcid]);
439 tcache_ptrs[tcid] = tcache_bases[tcid];
440 if (Pico32xMem != NULL) {
441 if (tcid == 0) // ROM, RAM
442 memset(Pico32xMem->drcblk_ram, 0,
443 sizeof(Pico32xMem->drcblk_ram));
444 else
445 memset(Pico32xMem->drcblk_da[tcid - 1], 0,
446 sizeof(Pico32xMem->drcblk_da[0]));
447 }
448#if (DRC_DEBUG & 4)
449 tcache_dsm_ptrs[tcid] = tcache_bases[tcid];
450#endif
451
452 for (i = 0; i < ram_sizes[tcid] / ADDR_TO_BLOCK_PAGE; i++)
453 rm_block_list(&inval_lookup[tcid][i]);
454}
455
456static void add_to_hashlist(struct block_entry *be, int tcache_id)
457{
458 u32 tcmask = hash_table_sizes[tcache_id] - 1;
459
460 be->next = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
461 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be;
462
463#if (DRC_DEBUG & 2)
464 if (be->next != NULL) {
465 printf(" %08x: hash collision with %08x\n",
466 be->pc, be->next->pc);
467 hash_collisions++;
468 }
469#endif
470}
471
472static void rm_from_hashlist(struct block_entry *be, int tcache_id)
473{
474 u32 tcmask = hash_table_sizes[tcache_id] - 1;
475 struct block_entry *cur, *prev;
476
477 cur = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
478 if (cur == NULL)
479 goto missing;
480
481 if (be == cur) { // first
482 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be->next;
483 return;
484 }
485
486 for (prev = cur, cur = cur->next; cur != NULL; cur = cur->next) {
487 if (cur == be) {
488 prev->next = cur->next;
489 return;
490 }
491 }
492
493missing:
494 dbg(1, "rm_from_hashlist: be %p %08x missing?", be, be->pc);
495}
496
497static struct block_desc *dr_add_block(u32 addr, u32 end_addr, int is_slave, int *blk_id)
498{
499 struct block_entry *be;
500 struct block_desc *bd;
501 int tcache_id;
502 int *bcount;
503
504 // do a lookup to get tcache_id and override check
505 be = dr_get_entry(addr, is_slave, &tcache_id);
506 if (be != NULL)
507 dbg(1, "block override for %08x", addr);
508
509 bcount = &block_counts[tcache_id];
510 if (*bcount >= block_max_counts[tcache_id]) {
511 dbg(1, "bd overflow for tcache %d", tcache_id);
512 return NULL;
513 }
514
515 bd = &block_tables[tcache_id][*bcount];
516 bd->addr = addr;
517 bd->end_addr = end_addr;
518
519 bd->entry_count = 1;
520 bd->entryp[0].pc = addr;
521 bd->entryp[0].tcache_ptr = tcache_ptr;
522 bd->entryp[0].links = NULL;
523#if (DRC_DEBUG & 2)
524 bd->entryp[0].block = bd;
525 bd->refcount = 0;
526#endif
527 add_to_hashlist(&bd->entryp[0], tcache_id);
528
529 *blk_id = *bcount;
530 (*bcount)++;
531
532 return bd;
533}
534
535static void REGPARM(3) *dr_lookup_block(u32 pc, int is_slave, int *tcache_id)
536{
537 struct block_entry *be = NULL;
538 void *block = NULL;
539
540 be = dr_get_entry(pc, is_slave, tcache_id);
541 if (be != NULL)
542 block = be->tcache_ptr;
543
544#if (DRC_DEBUG & 2)
545 if (be != NULL)
546 be->block->refcount++;
547#endif
548 return block;
549}
550
551static void *dr_failure(void)
552{
553 lprintf("recompilation failed\n");
554 exit(1);
555}
556
557static void *dr_prepare_ext_branch(u32 pc, int is_slave, int tcache_id)
558{
559#if LINK_BRANCHES
560 struct block_link *bl = block_link_pool[tcache_id];
561 int cnt = block_link_pool_counts[tcache_id];
562 struct block_entry *be = NULL;
563 int target_tcache_id;
564 int i;
565
566 be = dr_get_entry(pc, is_slave, &target_tcache_id);
567 if (target_tcache_id != tcache_id)
568 return sh2_drc_dispatcher;
569
570 // if pool has been freed, reuse
571 for (i = cnt - 1; i >= 0; i--)
572 if (bl[i].target_pc != 0)
573 break;
574 cnt = i + 1;
575 if (cnt >= block_link_pool_max_counts[tcache_id]) {
576 dbg(1, "bl overflow for tcache %d", tcache_id);
577 return NULL;
578 }
579 bl += cnt;
580 block_link_pool_counts[tcache_id]++;
581
582 bl->target_pc = pc;
583 bl->jump = tcache_ptr;
584
585 if (be != NULL) {
586 dbg(2, "- early link from %p to pc %08x", bl->jump, pc);
587 bl->next = be->links;
588 be->links = bl;
589 return be->tcache_ptr;
590 }
591 else {
592 bl->next = unresolved_links[tcache_id];
593 unresolved_links[tcache_id] = bl;
594 return sh2_drc_dispatcher;
595 }
596#else
597 return sh2_drc_dispatcher;
598#endif
599}
600
601static void dr_link_blocks(struct block_entry *be, int tcache_id)
602{
603#if LINK_BRANCHES
604 struct block_link *first = unresolved_links[tcache_id];
605 struct block_link *bl, *prev, *tmp;
606 u32 pc = be->pc;
607
608 for (bl = prev = first; bl != NULL; ) {
609 if (bl->target_pc == pc) {
610 dbg(2, "- link from %p to pc %08x", bl->jump, pc);
611 emith_jump_patch(bl->jump, tcache_ptr);
612
613 // move bl from unresolved_links to block_entry
614 tmp = bl->next;
615 bl->next = be->links;
616 be->links = bl;
617
618 if (bl == first)
619 first = prev = bl = tmp;
620 else
621 prev->next = bl = tmp;
622 continue;
623 }
624 prev = bl;
625 bl = bl->next;
626 }
627 unresolved_links[tcache_id] = first;
628
629 // could sync arm caches here, but that's unnecessary
630#endif
631}
632
633#define ADD_TO_ARRAY(array, count, item, failcode) \
634 if (count >= ARRAY_SIZE(array)) { \
635 dbg(1, "warning: " #array " overflow"); \
636 failcode; \
637 } \
638 array[count++] = item;
639
640static int find_in_array(u32 *array, size_t size, u32 what)
641{
642 size_t i;
643 for (i = 0; i < size; i++)
644 if (what == array[i])
645 return i;
646
647 return -1;
648}
649
650// ---------------------------------------------------------------
651
652// register cache / constant propagation stuff
653typedef enum {
654 RC_GR_READ,
655 RC_GR_WRITE,
656 RC_GR_RMW,
657} rc_gr_mode;
658
659static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking);
660
661// guest regs with constants
662static u32 dr_gcregs[24];
663// a mask of constant/dirty regs
664static u32 dr_gcregs_mask;
665static u32 dr_gcregs_dirty;
666
667#if PROPAGATE_CONSTANTS
668static void gconst_new(sh2_reg_e r, u32 val)
669{
670 int i;
671
672 dr_gcregs_mask |= 1 << r;
673 dr_gcregs_dirty |= 1 << r;
674 dr_gcregs[r] = val;
675
676 // throw away old r that we might have cached
677 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
678 if ((reg_temp[i].type == HR_CACHED) &&
679 reg_temp[i].greg == r) {
680 reg_temp[i].type = HR_FREE;
681 reg_temp[i].flags = 0;
682 }
683 }
684}
685#endif
686
687static int gconst_get(sh2_reg_e r, u32 *val)
688{
689 if (dr_gcregs_mask & (1 << r)) {
690 *val = dr_gcregs[r];
691 return 1;
692 }
693 return 0;
694}
695
696static int gconst_check(sh2_reg_e r)
697{
698 if ((dr_gcregs_mask | dr_gcregs_dirty) & (1 << r))
699 return 1;
700 return 0;
701}
702
703// update hr if dirty, else do nothing
704static int gconst_try_read(int hr, sh2_reg_e r)
705{
706 if (dr_gcregs_dirty & (1 << r)) {
707 emith_move_r_imm(hr, dr_gcregs[r]);
708 dr_gcregs_dirty &= ~(1 << r);
709 return 1;
710 }
711 return 0;
712}
713
714static void gconst_check_evict(sh2_reg_e r)
715{
716 if (dr_gcregs_mask & (1 << r))
717 // no longer cached in reg, make dirty again
718 dr_gcregs_dirty |= 1 << r;
719}
720
721static void gconst_kill(sh2_reg_e r)
722{
723 dr_gcregs_mask &= ~(1 << r);
724 dr_gcregs_dirty &= ~(1 << r);
725}
726
727static void gconst_clean(void)
728{
729 int i;
730
731 for (i = 0; i < ARRAY_SIZE(dr_gcregs); i++)
732 if (dr_gcregs_dirty & (1 << i)) {
733 // using RC_GR_READ here: it will call gconst_try_read,
734 // cache the reg and mark it dirty.
735 rcache_get_reg_(i, RC_GR_READ, 0);
736 }
737}
738
739static void gconst_invalidate(void)
740{
741 dr_gcregs_mask = dr_gcregs_dirty = 0;
742}
743
744static u16 rcache_counter;
745
746static temp_reg_t *rcache_evict(void)
747{
748 // evict reg with oldest stamp
749 int i, oldest = -1;
750 u16 min_stamp = (u16)-1;
751
752 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
753 if (reg_temp[i].type == HR_CACHED && !(reg_temp[i].flags & HRF_LOCKED) &&
754 reg_temp[i].stamp <= min_stamp) {
755 min_stamp = reg_temp[i].stamp;
756 oldest = i;
757 }
758 }
759
760 if (oldest == -1) {
761 printf("no registers to evict, aborting\n");
762 exit(1);
763 }
764
765 i = oldest;
766 if (reg_temp[i].type == HR_CACHED) {
767 if (reg_temp[i].flags & HRF_DIRTY)
768 // writeback
769 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
770 gconst_check_evict(reg_temp[i].greg);
771 }
772
773 reg_temp[i].type = HR_FREE;
774 reg_temp[i].flags = 0;
775 return &reg_temp[i];
776}
777
778static int get_reg_static(sh2_reg_e r, rc_gr_mode mode)
779{
780 int i = reg_map_g2h[r];
781 if (i != -1) {
782 if (mode != RC_GR_WRITE)
783 gconst_try_read(i, r);
784 }
785 return i;
786}
787
788// note: must not be called when doing conditional code
789static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking)
790{
791 temp_reg_t *tr;
792 int i, ret;
793
794 // maybe statically mapped?
795 ret = get_reg_static(r, mode);
796 if (ret != -1)
797 goto end;
798
799 rcache_counter++;
800
801 // maybe already cached?
802 // if so, prefer against gconst (they must be in sync)
803 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
804 if (reg_temp[i].type == HR_CACHED && reg_temp[i].greg == r) {
805 reg_temp[i].stamp = rcache_counter;
806 if (mode != RC_GR_READ)
807 reg_temp[i].flags |= HRF_DIRTY;
808 ret = reg_temp[i].hreg;
809 goto end;
810 }
811 }
812
813 // use any free reg
814 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
815 if (reg_temp[i].type == HR_FREE) {
816 tr = &reg_temp[i];
817 goto do_alloc;
818 }
819 }
820
821 tr = rcache_evict();
822
823do_alloc:
824 tr->type = HR_CACHED;
825 if (do_locking)
826 tr->flags |= HRF_LOCKED;
827 if (mode != RC_GR_READ)
828 tr->flags |= HRF_DIRTY;
829 tr->greg = r;
830 tr->stamp = rcache_counter;
831 ret = tr->hreg;
832
833 if (mode != RC_GR_WRITE) {
834 if (gconst_check(r)) {
835 if (gconst_try_read(ret, r))
836 tr->flags |= HRF_DIRTY;
837 }
838 else
839 emith_ctx_read(tr->hreg, r * 4);
840 }
841
842end:
843 if (mode != RC_GR_READ)
844 gconst_kill(r);
845
846 return ret;
847}
848
849static int rcache_get_reg(sh2_reg_e r, rc_gr_mode mode)
850{
851 return rcache_get_reg_(r, mode, 1);
852}
853
854static int rcache_get_tmp(void)
855{
856 temp_reg_t *tr;
857 int i;
858
859 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
860 if (reg_temp[i].type == HR_FREE) {
861 tr = &reg_temp[i];
862 goto do_alloc;
863 }
864
865 tr = rcache_evict();
866
867do_alloc:
868 tr->type = HR_TEMP;
869 return tr->hreg;
870}
871
872static int rcache_get_arg_id(int arg)
873{
874 int i, r = 0;
875 host_arg2reg(r, arg);
876
877 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
878 if (reg_temp[i].hreg == r)
879 break;
880
881 if (i == ARRAY_SIZE(reg_temp)) // can't happen
882 exit(1);
883
884 if (reg_temp[i].type == HR_CACHED) {
885 // writeback
886 if (reg_temp[i].flags & HRF_DIRTY)
887 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
888 gconst_check_evict(reg_temp[i].greg);
889 }
890 else if (reg_temp[i].type == HR_TEMP) {
891 printf("arg %d reg %d already used, aborting\n", arg, r);
892 exit(1);
893 }
894
895 reg_temp[i].type = HR_FREE;
896 reg_temp[i].flags = 0;
897
898 return i;
899}
900
901// get a reg to be used as function arg
902static int rcache_get_tmp_arg(int arg)
903{
904 int id = rcache_get_arg_id(arg);
905 reg_temp[id].type = HR_TEMP;
906
907 return reg_temp[id].hreg;
908}
909
910// same but caches a reg. RC_GR_READ only.
911static int rcache_get_reg_arg(int arg, sh2_reg_e r)
912{
913 int i, srcr, dstr, dstid;
914 int dirty = 0, src_dirty = 0;
915
916 dstid = rcache_get_arg_id(arg);
917 dstr = reg_temp[dstid].hreg;
918
919 // maybe already statically mapped?
920 srcr = get_reg_static(r, RC_GR_READ);
921 if (srcr != -1)
922 goto do_cache;
923
924 // maybe already cached?
925 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
926 if ((reg_temp[i].type == HR_CACHED) &&
927 reg_temp[i].greg == r)
928 {
929 srcr = reg_temp[i].hreg;
930 if (reg_temp[i].flags & HRF_DIRTY)
931 src_dirty = 1;
932 goto do_cache;
933 }
934 }
935
936 // must read
937 srcr = dstr;
938 if (gconst_check(r)) {
939 if (gconst_try_read(srcr, r))
940 dirty = 1;
941 }
942 else
943 emith_ctx_read(srcr, r * 4);
944
945do_cache:
946 if (dstr != srcr)
947 emith_move_r_r(dstr, srcr);
948#if 1
949 else
950 dirty |= src_dirty;
951
952 if (dirty)
953 // must clean, callers might want to modify the arg before call
954 emith_ctx_write(dstr, r * 4);
955#else
956 if (dirty)
957 reg_temp[dstid].flags |= HRF_DIRTY;
958#endif
959
960 reg_temp[dstid].stamp = ++rcache_counter;
961 reg_temp[dstid].type = HR_CACHED;
962 reg_temp[dstid].greg = r;
963 reg_temp[dstid].flags |= HRF_LOCKED;
964 return dstr;
965}
966
967static void rcache_free_tmp(int hr)
968{
969 int i;
970 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
971 if (reg_temp[i].hreg == hr)
972 break;
973
974 if (i == ARRAY_SIZE(reg_temp) || reg_temp[i].type != HR_TEMP) {
975 printf("rcache_free_tmp fail: #%i hr %d, type %d\n", i, hr, reg_temp[i].type);
976 return;
977 }
978
979 reg_temp[i].type = HR_FREE;
980 reg_temp[i].flags = 0;
981}
982
983static void rcache_unlock(int hr)
984{
985 int i;
986 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
987 if (reg_temp[i].type == HR_CACHED && reg_temp[i].hreg == hr)
988 reg_temp[i].flags &= ~HRF_LOCKED;
989}
990
991static void rcache_unlock_all(void)
992{
993 int i;
994 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
995 reg_temp[i].flags &= ~HRF_LOCKED;
996}
997
998static inline u32 rcache_used_hreg_mask(void)
999{
1000 u32 mask = 0;
1001 int i;
1002
1003 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1004 if (reg_temp[i].type != HR_FREE)
1005 mask |= 1 << reg_temp[i].hreg;
1006
1007 return mask;
1008}
1009
1010static void rcache_clean(void)
1011{
1012 int i;
1013 gconst_clean();
1014
1015 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1016 if (reg_temp[i].type == HR_CACHED && (reg_temp[i].flags & HRF_DIRTY)) {
1017 // writeback
1018 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
1019 reg_temp[i].flags &= ~HRF_DIRTY;
1020 }
1021}
1022
1023static void rcache_invalidate(void)
1024{
1025 int i;
1026 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
1027 reg_temp[i].type = HR_FREE;
1028 reg_temp[i].flags = 0;
1029 }
1030 rcache_counter = 0;
1031
1032 gconst_invalidate();
1033}
1034
1035static void rcache_flush(void)
1036{
1037 rcache_clean();
1038 rcache_invalidate();
1039}
1040
1041// ---------------------------------------------------------------
1042
1043static int emit_get_rbase_and_offs(u32 a, u32 *offs)
1044{
1045 u32 mask = 0;
1046 int poffs;
1047 int hr;
1048
1049 poffs = dr_ctx_get_mem_ptr(a, &mask);
1050 if (poffs == -1)
1051 return -1;
1052
1053 // XXX: could use some related reg
1054 hr = rcache_get_tmp();
1055 emith_ctx_read(hr, poffs);
1056 emith_add_r_imm(hr, a & mask & ~0xff);
1057 *offs = a & 0xff; // XXX: ARM oriented..
1058 return hr;
1059}
1060
1061static void emit_move_r_imm32(sh2_reg_e dst, u32 imm)
1062{
1063#if PROPAGATE_CONSTANTS
1064 gconst_new(dst, imm);
1065#else
1066 int hr = rcache_get_reg(dst, RC_GR_WRITE);
1067 emith_move_r_imm(hr, imm);
1068#endif
1069}
1070
1071static void emit_move_r_r(sh2_reg_e dst, sh2_reg_e src)
1072{
1073 int hr_d = rcache_get_reg(dst, RC_GR_WRITE);
1074 int hr_s = rcache_get_reg(src, RC_GR_READ);
1075
1076 emith_move_r_r(hr_d, hr_s);
1077}
1078
1079// T must be clear, and comparison done just before this
1080static void emit_or_t_if_eq(int srr)
1081{
1082 EMITH_SJMP_START(DCOND_NE);
1083 emith_or_r_imm_c(DCOND_EQ, srr, T);
1084 EMITH_SJMP_END(DCOND_NE);
1085}
1086
1087// arguments must be ready
1088// reg cache must be clean before call
1089static int emit_memhandler_read_(int size, int ram_check)
1090{
1091 int arg0, arg1;
1092 host_arg2reg(arg0, 0);
1093
1094 rcache_clean();
1095
1096 // must writeback cycles for poll detection stuff
1097 // FIXME: rm
1098 if (reg_map_g2h[SHR_SR] != -1)
1099 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1100
1101 arg1 = rcache_get_tmp_arg(1);
1102 emith_move_r_r(arg1, CONTEXT_REG);
1103
1104#if 0 // can't do this because of unmapped reads
1105 // ndef PDB_NET
1106 if (ram_check && Pico.rom == (void *)0x02000000 && Pico32xMem->sdram == (void *)0x06000000) {
1107 int tmp = rcache_get_tmp();
1108 emith_and_r_r_imm(tmp, arg0, 0xfb000000);
1109 emith_cmp_r_imm(tmp, 0x02000000);
1110 switch (size) {
1111 case 0: // 8
1112 EMITH_SJMP3_START(DCOND_NE);
1113 emith_eor_r_imm_c(DCOND_EQ, arg0, 1);
1114 emith_read8_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1115 EMITH_SJMP3_MID(DCOND_NE);
1116 emith_call_cond(DCOND_NE, sh2_drc_read8);
1117 EMITH_SJMP3_END();
1118 break;
1119 case 1: // 16
1120 EMITH_SJMP3_START(DCOND_NE);
1121 emith_read16_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1122 EMITH_SJMP3_MID(DCOND_NE);
1123 emith_call_cond(DCOND_NE, sh2_drc_read16);
1124 EMITH_SJMP3_END();
1125 break;
1126 case 2: // 32
1127 EMITH_SJMP3_START(DCOND_NE);
1128 emith_read_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1129 emith_ror_c(DCOND_EQ, arg0, arg0, 16);
1130 EMITH_SJMP3_MID(DCOND_NE);
1131 emith_call_cond(DCOND_NE, sh2_drc_read32);
1132 EMITH_SJMP3_END();
1133 break;
1134 }
1135 }
1136 else
1137#endif
1138 {
1139 switch (size) {
1140 case 0: // 8
1141 emith_call(sh2_drc_read8);
1142 break;
1143 case 1: // 16
1144 emith_call(sh2_drc_read16);
1145 break;
1146 case 2: // 32
1147 emith_call(sh2_drc_read32);
1148 break;
1149 }
1150 }
1151 rcache_invalidate();
1152
1153 if (reg_map_g2h[SHR_SR] != -1)
1154 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1155
1156 // assuming arg0 and retval reg matches
1157 return rcache_get_tmp_arg(0);
1158}
1159
1160static int emit_memhandler_read(int size)
1161{
1162 return emit_memhandler_read_(size, 1);
1163}
1164
1165static int emit_memhandler_read_rr(sh2_reg_e rd, sh2_reg_e rs, u32 offs, int size)
1166{
1167 int hr, hr2, ram_check = 1;
1168 u32 val, offs2;
1169
1170 if (gconst_get(rs, &val)) {
1171 hr = emit_get_rbase_and_offs(val + offs, &offs2);
1172 if (hr != -1) {
1173 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1174 switch (size) {
1175 case 0: // 8
1176 emith_read8_r_r_offs(hr2, hr, offs2 ^ 1);
1177 emith_sext(hr2, hr2, 8);
1178 break;
1179 case 1: // 16
1180 emith_read16_r_r_offs(hr2, hr, offs2);
1181 emith_sext(hr2, hr2, 16);
1182 break;
1183 case 2: // 32
1184 emith_read_r_r_offs(hr2, hr, offs2);
1185 emith_ror(hr2, hr2, 16);
1186 break;
1187 }
1188 rcache_free_tmp(hr);
1189 return hr2;
1190 }
1191
1192 ram_check = 0;
1193 }
1194
1195 hr = rcache_get_reg_arg(0, rs);
1196 if (offs != 0)
1197 emith_add_r_imm(hr, offs);
1198 hr = emit_memhandler_read_(size, ram_check);
1199 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1200 if (size != 2) {
1201 emith_sext(hr2, hr, (size == 1) ? 16 : 8);
1202 } else
1203 emith_move_r_r(hr2, hr);
1204 rcache_free_tmp(hr);
1205
1206 return hr2;
1207}
1208
1209static void emit_memhandler_write(int size, u32 pc)
1210{
1211 int ctxr;
1212 host_arg2reg(ctxr, 2);
1213 if (reg_map_g2h[SHR_SR] != -1)
1214 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1215
1216 rcache_clean();
1217
1218 switch (size) {
1219 case 0: // 8
1220 // XXX: consider inlining sh2_drc_write8
1221 emith_call(sh2_drc_write8);
1222 break;
1223 case 1: // 16
1224 emith_call(sh2_drc_write16);
1225 break;
1226 case 2: // 32
1227 emith_move_r_r(ctxr, CONTEXT_REG);
1228 emith_call(sh2_drc_write32);
1229 break;
1230 }
1231
1232 rcache_invalidate();
1233 if (reg_map_g2h[SHR_SR] != -1)
1234 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1235}
1236
1237// @(Rx,Ry)
1238static int emit_indirect_indexed_read(int rx, int ry, int size)
1239{
1240 int a0, t;
1241 a0 = rcache_get_reg_arg(0, rx);
1242 t = rcache_get_reg(ry, RC_GR_READ);
1243 emith_add_r_r(a0, t);
1244 return emit_memhandler_read(size);
1245}
1246
1247// read @Rn, @rm
1248static void emit_indirect_read_double(u32 *rnr, u32 *rmr, int rn, int rm, int size)
1249{
1250 int tmp;
1251
1252 rcache_get_reg_arg(0, rn);
1253 tmp = emit_memhandler_read(size);
1254 emith_ctx_write(tmp, offsetof(SH2, drc_tmp));
1255 rcache_free_tmp(tmp);
1256 tmp = rcache_get_reg(rn, RC_GR_RMW);
1257 emith_add_r_imm(tmp, 1 << size);
1258 rcache_unlock(tmp);
1259
1260 rcache_get_reg_arg(0, rm);
1261 *rmr = emit_memhandler_read(size);
1262 *rnr = rcache_get_tmp();
1263 emith_ctx_read(*rnr, offsetof(SH2, drc_tmp));
1264 tmp = rcache_get_reg(rm, RC_GR_RMW);
1265 emith_add_r_imm(tmp, 1 << size);
1266 rcache_unlock(tmp);
1267}
1268
1269static void emit_do_static_regs(int is_write, int tmpr)
1270{
1271 int i, r, count;
1272
1273 for (i = 0; i < ARRAY_SIZE(reg_map_g2h); i++) {
1274 r = reg_map_g2h[i];
1275 if (r == -1)
1276 continue;
1277
1278 for (count = 1; i < ARRAY_SIZE(reg_map_g2h) - 1; i++, r++) {
1279 if (reg_map_g2h[i + 1] != r + 1)
1280 break;
1281 count++;
1282 }
1283
1284 if (count > 1) {
1285 // i, r point to last item
1286 if (is_write)
1287 emith_ctx_write_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1288 else
1289 emith_ctx_read_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1290 } else {
1291 if (is_write)
1292 emith_ctx_write(r, i * 4);
1293 else
1294 emith_ctx_read(r, i * 4);
1295 }
1296 }
1297}
1298
1299static void emit_block_entry(void)
1300{
1301 int arg0;
1302
1303 host_arg2reg(arg0, 0);
1304
1305#if (DRC_DEBUG & 8) || defined(PDB)
1306 int arg1, arg2;
1307 host_arg2reg(arg1, 1);
1308 host_arg2reg(arg2, 2);
1309
1310 emit_do_static_regs(1, arg2);
1311 emith_move_r_r(arg1, CONTEXT_REG);
1312 emith_move_r_r(arg2, rcache_get_reg(SHR_SR, RC_GR_READ));
1313 emith_call(sh2_drc_log_entry);
1314 rcache_invalidate();
1315#endif
1316 emith_tst_r_r(arg0, arg0);
1317 EMITH_SJMP_START(DCOND_EQ);
1318 emith_jump_reg_c(DCOND_NE, arg0);
1319 EMITH_SJMP_END(DCOND_EQ);
1320}
1321
1322#define DELAY_SAVE_T(sr) { \
1323 emith_bic_r_imm(sr, T_save); \
1324 emith_tst_r_imm(sr, T); \
1325 EMITH_SJMP_START(DCOND_EQ); \
1326 emith_or_r_imm_c(DCOND_NE, sr, T_save); \
1327 EMITH_SJMP_END(DCOND_EQ); \
1328}
1329
1330#define FLUSH_CYCLES(sr) \
1331 if (cycles > 0) { \
1332 emith_sub_r_imm(sr, cycles << 12); \
1333 cycles = 0; \
1334 }
1335
1336static void *dr_get_pc_base(u32 pc, int is_slave);
1337
1338static void REGPARM(2) *sh2_translate(SH2 *sh2, int tcache_id)
1339{
1340 u32 branch_target_pc[MAX_LOCAL_BRANCHES];
1341 void *branch_target_ptr[MAX_LOCAL_BRANCHES];
1342 int branch_target_count = 0;
1343 void *branch_patch_ptr[MAX_LOCAL_BRANCHES];
1344 u32 branch_patch_pc[MAX_LOCAL_BRANCHES];
1345 int branch_patch_count = 0;
1346 u32 literal_addr[MAX_LITERALS];
1347 int literal_addr_count = 0;
1348 u8 op_flags[BLOCK_INSN_LIMIT];
1349 struct {
1350 u32 test_irq:1;
1351 u32 pending_branch_direct:1;
1352 u32 pending_branch_indirect:1;
1353 } drcf = { 0, };
1354
1355 // PC of current, first, last SH2 insn
1356 u32 pc, base_pc, end_pc;
1357 u32 end_literals;
1358 void *block_entry_ptr;
1359 struct block_desc *block;
1360 u16 *dr_pc_base;
1361 struct op_data *opd;
1362 int blkid_main = 0;
1363 int skip_op = 0;
1364 u32 tmp, tmp2;
1365 int cycles;
1366 int i, v;
1367 int op;
1368
1369 base_pc = sh2->pc;
1370
1371 // get base/validate PC
1372 dr_pc_base = dr_get_pc_base(base_pc, sh2->is_slave);
1373 if (dr_pc_base == (void *)-1) {
1374 printf("invalid PC, aborting: %08x\n", base_pc);
1375 // FIXME: be less destructive
1376 exit(1);
1377 }
1378
1379 tcache_ptr = tcache_ptrs[tcache_id];
1380
1381 // predict tcache overflow
1382 tmp = tcache_ptr - tcache_bases[tcache_id];
1383 if (tmp > tcache_sizes[tcache_id] - MAX_BLOCK_SIZE) {
1384 dbg(1, "tcache %d overflow", tcache_id);
1385 return NULL;
1386 }
1387
1388 // initial passes to disassemble and analyze the block
1389 scan_block(base_pc, sh2->is_slave, op_flags, &end_pc, &end_literals);
1390
1391 block = dr_add_block(base_pc, end_literals, sh2->is_slave, &blkid_main);
1392 if (block == NULL)
1393 return NULL;
1394
1395 block_entry_ptr = tcache_ptr;
1396 dbg(2, "== %csh2 block #%d,%d %08x-%08x -> %p", sh2->is_slave ? 's' : 'm',
1397 tcache_id, blkid_main, base_pc, end_pc, block_entry_ptr);
1398
1399 dr_link_blocks(&block->entryp[0], tcache_id);
1400
1401 // collect branch_targets that don't land on delay slots
1402 for (pc = base_pc, i = 0; pc < end_pc; i++, pc += 2) {
1403 if (!(op_flags[i] & OF_BTARGET))
1404 continue;
1405 if (op_flags[i] & OF_DELAY_OP) {
1406 op_flags[i] &= ~OF_BTARGET;
1407 continue;
1408 }
1409 ADD_TO_ARRAY(branch_target_pc, branch_target_count, pc, break);
1410 }
1411
1412 if (branch_target_count > 0) {
1413 memset(branch_target_ptr, 0, sizeof(branch_target_ptr[0]) * branch_target_count);
1414 }
1415
1416 // clear stale state after compile errors
1417 rcache_invalidate();
1418
1419 // -------------------------------------------------
1420 // 3rd pass: actual compilation
1421 pc = base_pc;
1422 cycles = 0;
1423 for (i = 0; pc < end_pc; i++)
1424 {
1425 u32 delay_dep_fw = 0, delay_dep_bk = 0;
1426 u32 tmp3, tmp4, sr;
1427
1428 opd = &ops[i];
1429 op = FETCH_OP(pc);
1430
1431#if (DRC_DEBUG & 2)
1432 insns_compiled++;
1433#endif
1434#if (DRC_DEBUG & 4)
1435 DasmSH2(sh2dasm_buff, pc, op);
1436 printf("%c%08x %04x %s\n", (op_flags[i] & OF_BTARGET) ? '*' : ' ',
1437 pc, op, sh2dasm_buff);
1438#endif
1439
1440 if ((op_flags[i] & OF_BTARGET) || pc == base_pc)
1441 {
1442 if (pc != base_pc)
1443 {
1444 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1445 FLUSH_CYCLES(sr);
1446 rcache_flush();
1447
1448 // make block entry
1449 v = block->entry_count;
1450 if (v < ARRAY_SIZE(block->entryp)) {
1451 block->entryp[v].pc = pc;
1452 block->entryp[v].tcache_ptr = tcache_ptr;
1453 block->entryp[v].links = NULL;
1454#if (DRC_DEBUG & 2)
1455 block->entryp[v].block = block;
1456#endif
1457 add_to_hashlist(&block->entryp[v], tcache_id);
1458 block->entry_count++;
1459
1460 dbg(2, "-- %csh2 block #%d,%d entry %08x -> %p",
1461 sh2->is_slave ? 's' : 'm', tcache_id, blkid_main,
1462 pc, tcache_ptr);
1463
1464 // since we made a block entry, link any other blocks
1465 // that jump to current pc
1466 dr_link_blocks(&block->entryp[v], tcache_id);
1467 }
1468 else {
1469 dbg(1, "too many entryp for block #%d,%d pc=%08x",
1470 tcache_id, blkid_main, pc);
1471 }
1472
1473 do_host_disasm(tcache_id);
1474 }
1475
1476 v = find_in_array(branch_target_pc, branch_target_count, pc);
1477 if (v >= 0)
1478 branch_target_ptr[v] = tcache_ptr;
1479
1480 // must update PC
1481 emit_move_r_imm32(SHR_PC, pc);
1482 rcache_clean();
1483
1484 // check cycles
1485 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1486 emith_cmp_r_imm(sr, 0);
1487 emith_jump_cond(DCOND_LE, sh2_drc_exit);
1488 do_host_disasm(tcache_id);
1489 rcache_unlock_all();
1490 }
1491
1492#ifdef DRC_CMP
1493 if (!(op_flags[i] & OF_DELAY_OP)) {
1494 emit_move_r_imm32(SHR_PC, pc);
1495 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1496 FLUSH_CYCLES(sr);
1497 rcache_clean();
1498
1499 tmp = rcache_used_hreg_mask();
1500 emith_save_caller_regs(tmp);
1501 emit_do_static_regs(1, 0);
1502 emith_pass_arg_r(0, CONTEXT_REG);
1503 emith_call(do_sh2_cmp);
1504 emith_restore_caller_regs(tmp);
1505 }
1506#endif
1507
1508 pc += 2;
1509
1510 if (skip_op > 0) {
1511 skip_op--;
1512 continue;
1513 }
1514
1515 if (op_flags[i] & OF_DELAY_OP)
1516 {
1517 // handle delay slot dependencies
1518 delay_dep_fw = opd->dest & ops[i-1].source;
1519 delay_dep_bk = opd->source & ops[i-1].dest;
1520 if (delay_dep_fw & BITMASK1(SHR_T)) {
1521 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1522 DELAY_SAVE_T(sr);
1523 }
1524 if (delay_dep_bk & BITMASK1(SHR_PC)) {
1525 if (opd->op != OP_LOAD_POOL && opd->op != OP_MOVA) {
1526 // can only be those 2 really..
1527 elprintf(EL_ANOMALY, "%csh2 drc: illegal slot insn %04x @ %08x?",
1528 sh2->is_slave ? 's' : 'm', op, pc - 2);
1529 }
1530 if (opd->imm != 0)
1531 ; // addr already resolved somehow
1532 else {
1533 switch (ops[i-1].op) {
1534 case OP_BRANCH:
1535 emit_move_r_imm32(SHR_PC, ops[i-1].imm);
1536 break;
1537 case OP_BRANCH_CT:
1538 case OP_BRANCH_CF:
1539 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1540 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1541 emith_move_r_imm(tmp, pc);
1542 emith_tst_r_imm(sr, T);
1543 tmp2 = ops[i-1].op == OP_BRANCH_CT ? DCOND_NE : DCOND_EQ;
1544 emith_move_r_imm_c(tmp2, tmp, ops[i-1].imm);
1545 break;
1546 // case OP_BRANCH_R OP_BRANCH_RF - PC already loaded
1547 }
1548 }
1549 }
1550 //if (delay_dep_fw & ~BITMASK1(SHR_T))
1551 // dbg(1, "unhandled delay_dep_fw: %x", delay_dep_fw & ~BITMASK1(SHR_T));
1552 if (delay_dep_bk & ~BITMASK2(SHR_PC, SHR_PR))
1553 dbg(1, "unhandled delay_dep_bk: %x", delay_dep_bk);
1554 }
1555
1556 switch (opd->op)
1557 {
1558 case OP_BRANCH:
1559 case OP_BRANCH_CT:
1560 case OP_BRANCH_CF:
1561 if (opd->dest & BITMASK1(SHR_PR))
1562 emit_move_r_imm32(SHR_PR, pc + 2);
1563 drcf.pending_branch_direct = 1;
1564 goto end_op;
1565
1566 case OP_BRANCH_R:
1567 if (opd->dest & BITMASK1(SHR_PR))
1568 emit_move_r_imm32(SHR_PR, pc + 2);
1569 emit_move_r_r(SHR_PC, opd->rm);
1570 drcf.pending_branch_indirect = 1;
1571 goto end_op;
1572
1573 case OP_BRANCH_RF:
1574 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1575 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1576 if (opd->dest & BITMASK1(SHR_PR)) {
1577 tmp3 = rcache_get_reg(SHR_PR, RC_GR_WRITE);
1578 emith_move_r_imm(tmp3, pc + 2);
1579 emith_add_r_r_r(tmp, tmp2, tmp3);
1580 }
1581 else {
1582 emith_move_r_r(tmp, tmp2);
1583 emith_add_r_imm(tmp, pc + 2);
1584 }
1585 drcf.pending_branch_indirect = 1;
1586 goto end_op;
1587
1588 case OP_SLEEP:
1589 printf("TODO sleep\n");
1590 goto end_op;
1591
1592 case OP_RTE:
1593 // pop PC
1594 emit_memhandler_read_rr(SHR_PC, SHR_SP, 0, 2);
1595 // pop SR
1596 tmp = rcache_get_reg_arg(0, SHR_SP);
1597 emith_add_r_imm(tmp, 4);
1598 tmp = emit_memhandler_read(2);
1599 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1600 emith_write_sr(sr, tmp);
1601 rcache_free_tmp(tmp);
1602 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
1603 emith_add_r_imm(tmp, 4*2);
1604 drcf.test_irq = 1;
1605 drcf.pending_branch_indirect = 1;
1606 goto end_op;
1607
1608 case OP_LOAD_POOL:
1609#if PROPAGATE_CONSTANTS
1610 if (opd->imm != 0 && opd->imm < end_pc + MAX_LITERAL_OFFSET
1611 && literal_addr_count < MAX_LITERALS)
1612 {
1613 ADD_TO_ARRAY(literal_addr, literal_addr_count, opd->imm,);
1614 if (opd->size == 2)
1615 tmp = FETCH32(opd->imm);
1616 else
1617 tmp = (u32)(int)(signed short)FETCH_OP(opd->imm);
1618 gconst_new(GET_Rn(), tmp);
1619 }
1620 else
1621#endif
1622 {
1623 tmp = rcache_get_tmp_arg(0);
1624 if (opd->imm != 0)
1625 emith_move_r_imm(tmp, opd->imm);
1626 else {
1627 // have to calculate read addr from PC
1628 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1629 if (opd->size == 2) {
1630 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1631 emith_bic_r_imm(tmp, 3);
1632 }
1633 else
1634 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 2);
1635 }
1636 tmp2 = emit_memhandler_read(opd->size);
1637 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1638 if (opd->size == 2)
1639 emith_move_r_r(tmp3, tmp2);
1640 else
1641 emith_sext(tmp3, tmp2, 16);
1642 rcache_free_tmp(tmp2);
1643 }
1644 goto end_op;
1645
1646 case OP_MOVA:
1647 if (opd->imm != 0)
1648 emit_move_r_imm32(SHR_R0, opd->imm);
1649 else {
1650 tmp = rcache_get_reg(SHR_R0, RC_GR_WRITE);
1651 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1652 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1653 emith_bic_r_imm(tmp, 3);
1654 }
1655 goto end_op;
1656 }
1657
1658 switch ((op >> 12) & 0x0f)
1659 {
1660 /////////////////////////////////////////////
1661 case 0x00:
1662 switch (op & 0x0f)
1663 {
1664 case 0x02:
1665 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1666 switch (GET_Fx())
1667 {
1668 case 0: // STC SR,Rn 0000nnnn00000010
1669 tmp2 = SHR_SR;
1670 break;
1671 case 1: // STC GBR,Rn 0000nnnn00010010
1672 tmp2 = SHR_GBR;
1673 break;
1674 case 2: // STC VBR,Rn 0000nnnn00100010
1675 tmp2 = SHR_VBR;
1676 break;
1677 default:
1678 goto default_;
1679 }
1680 tmp3 = rcache_get_reg(tmp2, RC_GR_READ);
1681 emith_move_r_r(tmp, tmp3);
1682 if (tmp2 == SHR_SR)
1683 emith_clear_msb(tmp, tmp, 22); // reserved bits defined by ISA as 0
1684 goto end_op;
1685 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
1686 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
1687 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
1688 rcache_clean();
1689 tmp = rcache_get_reg_arg(1, GET_Rm());
1690 tmp2 = rcache_get_reg_arg(0, SHR_R0);
1691 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1692 emith_add_r_r(tmp2, tmp3);
1693 emit_memhandler_write(op & 3, pc);
1694 goto end_op;
1695 case 0x07:
1696 // MUL.L Rm,Rn 0000nnnnmmmm0111
1697 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
1698 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1699 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1700 emith_mul(tmp3, tmp2, tmp);
1701 goto end_op;
1702 case 0x08:
1703 switch (GET_Fx())
1704 {
1705 case 0: // CLRT 0000000000001000
1706 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1707 emith_bic_r_imm(sr, T);
1708 break;
1709 case 1: // SETT 0000000000011000
1710 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1711 emith_or_r_imm(sr, T);
1712 break;
1713 case 2: // CLRMAC 0000000000101000
1714 emit_move_r_imm32(SHR_MACL, 0);
1715 emit_move_r_imm32(SHR_MACH, 0);
1716 break;
1717 default:
1718 goto default_;
1719 }
1720 goto end_op;
1721 case 0x09:
1722 switch (GET_Fx())
1723 {
1724 case 0: // NOP 0000000000001001
1725 break;
1726 case 1: // DIV0U 0000000000011001
1727 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1728 emith_bic_r_imm(sr, M|Q|T);
1729 break;
1730 case 2: // MOVT Rn 0000nnnn00101001
1731 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1732 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1733 emith_clear_msb(tmp2, sr, 31);
1734 break;
1735 default:
1736 goto default_;
1737 }
1738 goto end_op;
1739 case 0x0a:
1740 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1741 switch (GET_Fx())
1742 {
1743 case 0: // STS MACH,Rn 0000nnnn00001010
1744 tmp2 = SHR_MACH;
1745 break;
1746 case 1: // STS MACL,Rn 0000nnnn00011010
1747 tmp2 = SHR_MACL;
1748 break;
1749 case 2: // STS PR,Rn 0000nnnn00101010
1750 tmp2 = SHR_PR;
1751 break;
1752 default:
1753 goto default_;
1754 }
1755 tmp2 = rcache_get_reg(tmp2, RC_GR_READ);
1756 emith_move_r_r(tmp, tmp2);
1757 goto end_op;
1758 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
1759 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
1760 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
1761 tmp = emit_indirect_indexed_read(SHR_R0, GET_Rm(), op & 3);
1762 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1763 if ((op & 3) != 2) {
1764 emith_sext(tmp2, tmp, (op & 1) ? 16 : 8);
1765 } else
1766 emith_move_r_r(tmp2, tmp);
1767 rcache_free_tmp(tmp);
1768 goto end_op;
1769 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
1770 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 2);
1771 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
1772 /* MS 16 MAC bits unused if saturated */
1773 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1774 emith_tst_r_imm(sr, S);
1775 EMITH_SJMP_START(DCOND_EQ);
1776 emith_clear_msb_c(DCOND_NE, tmp4, tmp4, 16);
1777 EMITH_SJMP_END(DCOND_EQ);
1778 rcache_unlock(sr);
1779 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW); // might evict SR
1780 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
1781 rcache_free_tmp(tmp2);
1782 sr = rcache_get_reg(SHR_SR, RC_GR_READ); // reget just in case
1783 emith_tst_r_imm(sr, S);
1784
1785 EMITH_JMP_START(DCOND_EQ);
1786 emith_asr(tmp, tmp4, 15);
1787 emith_cmp_r_imm(tmp, -1); // negative overflow (0x80000000..0xffff7fff)
1788 EMITH_SJMP_START(DCOND_GE);
1789 emith_move_r_imm_c(DCOND_LT, tmp4, 0x8000);
1790 emith_move_r_imm_c(DCOND_LT, tmp3, 0x0000);
1791 EMITH_SJMP_END(DCOND_GE);
1792 emith_cmp_r_imm(tmp, 0); // positive overflow (0x00008000..0x7fffffff)
1793 EMITH_SJMP_START(DCOND_LE);
1794 emith_move_r_imm_c(DCOND_GT, tmp4, 0x00007fff);
1795 emith_move_r_imm_c(DCOND_GT, tmp3, 0xffffffff);
1796 EMITH_SJMP_END(DCOND_LE);
1797 EMITH_JMP_END(DCOND_EQ);
1798
1799 rcache_free_tmp(tmp);
1800 goto end_op;
1801 }
1802 goto default_;
1803
1804 /////////////////////////////////////////////
1805 case 0x01:
1806 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
1807 rcache_clean();
1808 tmp = rcache_get_reg_arg(0, GET_Rn());
1809 tmp2 = rcache_get_reg_arg(1, GET_Rm());
1810 if (op & 0x0f)
1811 emith_add_r_imm(tmp, (op & 0x0f) * 4);
1812 emit_memhandler_write(2, pc);
1813 goto end_op;
1814
1815 case 0x02:
1816 switch (op & 0x0f)
1817 {
1818 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
1819 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
1820 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
1821 rcache_clean();
1822 rcache_get_reg_arg(0, GET_Rn());
1823 rcache_get_reg_arg(1, GET_Rm());
1824 emit_memhandler_write(op & 3, pc);
1825 goto end_op;
1826 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
1827 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
1828 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
1829 rcache_get_reg_arg(1, GET_Rm()); // for Rm == Rn
1830 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1831 emith_sub_r_imm(tmp, (1 << (op & 3)));
1832 rcache_clean();
1833 rcache_get_reg_arg(0, GET_Rn());
1834 emit_memhandler_write(op & 3, pc);
1835 goto end_op;
1836 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
1837 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1838 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1839 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1840 emith_bic_r_imm(sr, M|Q|T);
1841 emith_tst_r_imm(tmp2, (1<<31));
1842 EMITH_SJMP_START(DCOND_EQ);
1843 emith_or_r_imm_c(DCOND_NE, sr, Q);
1844 EMITH_SJMP_END(DCOND_EQ);
1845 emith_tst_r_imm(tmp3, (1<<31));
1846 EMITH_SJMP_START(DCOND_EQ);
1847 emith_or_r_imm_c(DCOND_NE, sr, M);
1848 EMITH_SJMP_END(DCOND_EQ);
1849 emith_teq_r_r(tmp2, tmp3);
1850 EMITH_SJMP_START(DCOND_PL);
1851 emith_or_r_imm_c(DCOND_MI, sr, T);
1852 EMITH_SJMP_END(DCOND_PL);
1853 goto end_op;
1854 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
1855 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1856 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1857 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1858 emith_bic_r_imm(sr, T);
1859 emith_tst_r_r(tmp2, tmp3);
1860 emit_or_t_if_eq(sr);
1861 goto end_op;
1862 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
1863 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1864 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1865 emith_and_r_r(tmp, tmp2);
1866 goto end_op;
1867 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
1868 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1869 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1870 emith_eor_r_r(tmp, tmp2);
1871 goto end_op;
1872 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
1873 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1874 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1875 emith_or_r_r(tmp, tmp2);
1876 goto end_op;
1877 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
1878 tmp = rcache_get_tmp();
1879 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1880 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1881 emith_eor_r_r_r(tmp, tmp2, tmp3);
1882 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1883 emith_bic_r_imm(sr, T);
1884 emith_tst_r_imm(tmp, 0x000000ff);
1885 emit_or_t_if_eq(sr);
1886 emith_tst_r_imm(tmp, 0x0000ff00);
1887 emit_or_t_if_eq(sr);
1888 emith_tst_r_imm(tmp, 0x00ff0000);
1889 emit_or_t_if_eq(sr);
1890 emith_tst_r_imm(tmp, 0xff000000);
1891 emit_or_t_if_eq(sr);
1892 rcache_free_tmp(tmp);
1893 goto end_op;
1894 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
1895 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1896 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1897 emith_lsr(tmp, tmp, 16);
1898 emith_or_r_r_lsl(tmp, tmp2, 16);
1899 goto end_op;
1900 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
1901 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
1902 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1903 tmp = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1904 if (op & 1) {
1905 emith_sext(tmp, tmp2, 16);
1906 } else
1907 emith_clear_msb(tmp, tmp2, 16);
1908 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1909 tmp2 = rcache_get_tmp();
1910 if (op & 1) {
1911 emith_sext(tmp2, tmp3, 16);
1912 } else
1913 emith_clear_msb(tmp2, tmp3, 16);
1914 emith_mul(tmp, tmp, tmp2);
1915 rcache_free_tmp(tmp2);
1916 goto end_op;
1917 }
1918 goto default_;
1919
1920 /////////////////////////////////////////////
1921 case 0x03:
1922 switch (op & 0x0f)
1923 {
1924 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
1925 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
1926 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
1927 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
1928 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
1929 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1930 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1931 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1932 emith_bic_r_imm(sr, T);
1933 emith_cmp_r_r(tmp2, tmp3);
1934 switch (op & 0x07)
1935 {
1936 case 0x00: // CMP/EQ
1937 emit_or_t_if_eq(sr);
1938 break;
1939 case 0x02: // CMP/HS
1940 EMITH_SJMP_START(DCOND_LO);
1941 emith_or_r_imm_c(DCOND_HS, sr, T);
1942 EMITH_SJMP_END(DCOND_LO);
1943 break;
1944 case 0x03: // CMP/GE
1945 EMITH_SJMP_START(DCOND_LT);
1946 emith_or_r_imm_c(DCOND_GE, sr, T);
1947 EMITH_SJMP_END(DCOND_LT);
1948 break;
1949 case 0x06: // CMP/HI
1950 EMITH_SJMP_START(DCOND_LS);
1951 emith_or_r_imm_c(DCOND_HI, sr, T);
1952 EMITH_SJMP_END(DCOND_LS);
1953 break;
1954 case 0x07: // CMP/GT
1955 EMITH_SJMP_START(DCOND_LE);
1956 emith_or_r_imm_c(DCOND_GT, sr, T);
1957 EMITH_SJMP_END(DCOND_LE);
1958 break;
1959 }
1960 goto end_op;
1961 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
1962 // Q1 = carry(Rn = (Rn << 1) | T)
1963 // if Q ^ M
1964 // Q2 = carry(Rn += Rm)
1965 // else
1966 // Q2 = carry(Rn -= Rm)
1967 // Q = M ^ Q1 ^ Q2
1968 // T = (Q == M) = !(Q ^ M) = !(Q1 ^ Q2)
1969 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1970 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1971 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1972 emith_tpop_carry(sr, 0);
1973 emith_adcf_r_r(tmp2, tmp2);
1974 emith_tpush_carry(sr, 0); // keep Q1 in T for now
1975 tmp4 = rcache_get_tmp();
1976 emith_and_r_r_imm(tmp4, sr, M);
1977 emith_eor_r_r_lsr(sr, tmp4, M_SHIFT - Q_SHIFT); // Q ^= M
1978 rcache_free_tmp(tmp4);
1979 // add or sub, invert T if carry to get Q1 ^ Q2
1980 // in: (Q ^ M) passed in Q, Q1 in T
1981 emith_sh2_div1_step(tmp2, tmp3, sr);
1982 emith_bic_r_imm(sr, Q);
1983 emith_tst_r_imm(sr, M);
1984 EMITH_SJMP_START(DCOND_EQ);
1985 emith_or_r_imm_c(DCOND_NE, sr, Q); // Q = M
1986 EMITH_SJMP_END(DCOND_EQ);
1987 emith_tst_r_imm(sr, T);
1988 EMITH_SJMP_START(DCOND_EQ);
1989 emith_eor_r_imm_c(DCOND_NE, sr, Q); // Q = M ^ Q1 ^ Q2
1990 EMITH_SJMP_END(DCOND_EQ);
1991 emith_eor_r_imm(sr, T); // T = !(Q1 ^ Q2)
1992 goto end_op;
1993 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
1994 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
1995 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1996 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1997 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
1998 emith_mul_u64(tmp3, tmp4, tmp, tmp2);
1999 goto end_op;
2000 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
2001 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
2002 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2003 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2004 if (op & 4) {
2005 emith_add_r_r(tmp, tmp2);
2006 } else
2007 emith_sub_r_r(tmp, tmp2);
2008 goto end_op;
2009 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
2010 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
2011 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2012 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2013 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2014 if (op & 4) { // adc
2015 emith_tpop_carry(sr, 0);
2016 emith_adcf_r_r(tmp, tmp2);
2017 emith_tpush_carry(sr, 0);
2018 } else {
2019 emith_tpop_carry(sr, 1);
2020 emith_sbcf_r_r(tmp, tmp2);
2021 emith_tpush_carry(sr, 1);
2022 }
2023 goto end_op;
2024 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
2025 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
2026 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2027 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2028 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2029 emith_bic_r_imm(sr, T);
2030 if (op & 4) {
2031 emith_addf_r_r(tmp, tmp2);
2032 } else
2033 emith_subf_r_r(tmp, tmp2);
2034 EMITH_SJMP_START(DCOND_VC);
2035 emith_or_r_imm_c(DCOND_VS, sr, T);
2036 EMITH_SJMP_END(DCOND_VC);
2037 goto end_op;
2038 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
2039 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2040 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2041 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2042 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2043 emith_mul_s64(tmp3, tmp4, tmp, tmp2);
2044 goto end_op;
2045 }
2046 goto default_;
2047
2048 /////////////////////////////////////////////
2049 case 0x04:
2050 switch (op & 0x0f)
2051 {
2052 case 0x00:
2053 switch (GET_Fx())
2054 {
2055 case 0: // SHLL Rn 0100nnnn00000000
2056 case 2: // SHAL Rn 0100nnnn00100000
2057 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2058 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2059 emith_tpop_carry(sr, 0); // dummy
2060 emith_lslf(tmp, tmp, 1);
2061 emith_tpush_carry(sr, 0);
2062 goto end_op;
2063 case 1: // DT Rn 0100nnnn00010000
2064 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2065#if 0 // scheduling needs tuning
2066 if (FETCH_OP(pc) == 0x8bfd) { // BF #-2
2067 if (gconst_get(GET_Rn(), &tmp)) {
2068 // XXX: limit burned cycles
2069 emit_move_r_imm32(GET_Rn(), 0);
2070 emith_or_r_imm(sr, T);
2071 cycles += tmp * 4 + 1; // +1 syncs with noconst version, not sure why
2072 skip_op = 1;
2073 }
2074 else
2075 emith_sh2_dtbf_loop();
2076 goto end_op;
2077 }
2078#endif
2079 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2080 emith_bic_r_imm(sr, T);
2081 emith_subf_r_imm(tmp, 1);
2082 emit_or_t_if_eq(sr);
2083 goto end_op;
2084 }
2085 goto default_;
2086 case 0x01:
2087 switch (GET_Fx())
2088 {
2089 case 0: // SHLR Rn 0100nnnn00000001
2090 case 2: // SHAR Rn 0100nnnn00100001
2091 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2092 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2093 emith_tpop_carry(sr, 0); // dummy
2094 if (op & 0x20) {
2095 emith_asrf(tmp, tmp, 1);
2096 } else
2097 emith_lsrf(tmp, tmp, 1);
2098 emith_tpush_carry(sr, 0);
2099 goto end_op;
2100 case 1: // CMP/PZ Rn 0100nnnn00010001
2101 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2102 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2103 emith_bic_r_imm(sr, T);
2104 emith_cmp_r_imm(tmp, 0);
2105 EMITH_SJMP_START(DCOND_LT);
2106 emith_or_r_imm_c(DCOND_GE, sr, T);
2107 EMITH_SJMP_END(DCOND_LT);
2108 goto end_op;
2109 }
2110 goto default_;
2111 case 0x02:
2112 case 0x03:
2113 switch (op & 0x3f)
2114 {
2115 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
2116 tmp = SHR_MACH;
2117 break;
2118 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
2119 tmp = SHR_MACL;
2120 break;
2121 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
2122 tmp = SHR_PR;
2123 break;
2124 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
2125 tmp = SHR_SR;
2126 break;
2127 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
2128 tmp = SHR_GBR;
2129 break;
2130 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
2131 tmp = SHR_VBR;
2132 break;
2133 default:
2134 goto default_;
2135 }
2136 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2137 emith_sub_r_imm(tmp2, 4);
2138 rcache_clean();
2139 rcache_get_reg_arg(0, GET_Rn());
2140 tmp3 = rcache_get_reg_arg(1, tmp);
2141 if (tmp == SHR_SR)
2142 emith_clear_msb(tmp3, tmp3, 22); // reserved bits defined by ISA as 0
2143 emit_memhandler_write(2, pc);
2144 goto end_op;
2145 case 0x04:
2146 case 0x05:
2147 switch (op & 0x3f)
2148 {
2149 case 0x04: // ROTL Rn 0100nnnn00000100
2150 case 0x05: // ROTR Rn 0100nnnn00000101
2151 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2152 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2153 emith_tpop_carry(sr, 0); // dummy
2154 if (op & 1) {
2155 emith_rorf(tmp, tmp, 1);
2156 } else
2157 emith_rolf(tmp, tmp, 1);
2158 emith_tpush_carry(sr, 0);
2159 goto end_op;
2160 case 0x24: // ROTCL Rn 0100nnnn00100100
2161 case 0x25: // ROTCR Rn 0100nnnn00100101
2162 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2163 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2164 emith_tpop_carry(sr, 0);
2165 if (op & 1) {
2166 emith_rorcf(tmp);
2167 } else
2168 emith_rolcf(tmp);
2169 emith_tpush_carry(sr, 0);
2170 goto end_op;
2171 case 0x15: // CMP/PL Rn 0100nnnn00010101
2172 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2173 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2174 emith_bic_r_imm(sr, T);
2175 emith_cmp_r_imm(tmp, 0);
2176 EMITH_SJMP_START(DCOND_LE);
2177 emith_or_r_imm_c(DCOND_GT, sr, T);
2178 EMITH_SJMP_END(DCOND_LE);
2179 goto end_op;
2180 }
2181 goto default_;
2182 case 0x06:
2183 case 0x07:
2184 switch (op & 0x3f)
2185 {
2186 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
2187 tmp = SHR_MACH;
2188 break;
2189 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
2190 tmp = SHR_MACL;
2191 break;
2192 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
2193 tmp = SHR_PR;
2194 break;
2195 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
2196 tmp = SHR_SR;
2197 break;
2198 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
2199 tmp = SHR_GBR;
2200 break;
2201 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
2202 tmp = SHR_VBR;
2203 break;
2204 default:
2205 goto default_;
2206 }
2207 rcache_get_reg_arg(0, GET_Rn());
2208 tmp2 = emit_memhandler_read(2);
2209 if (tmp == SHR_SR) {
2210 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2211 emith_write_sr(sr, tmp2);
2212 drcf.test_irq = 1;
2213 } else {
2214 tmp = rcache_get_reg(tmp, RC_GR_WRITE);
2215 emith_move_r_r(tmp, tmp2);
2216 }
2217 rcache_free_tmp(tmp2);
2218 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2219 emith_add_r_imm(tmp, 4);
2220 goto end_op;
2221 case 0x08:
2222 case 0x09:
2223 switch (GET_Fx())
2224 {
2225 case 0:
2226 // SHLL2 Rn 0100nnnn00001000
2227 // SHLR2 Rn 0100nnnn00001001
2228 tmp = 2;
2229 break;
2230 case 1:
2231 // SHLL8 Rn 0100nnnn00011000
2232 // SHLR8 Rn 0100nnnn00011001
2233 tmp = 8;
2234 break;
2235 case 2:
2236 // SHLL16 Rn 0100nnnn00101000
2237 // SHLR16 Rn 0100nnnn00101001
2238 tmp = 16;
2239 break;
2240 default:
2241 goto default_;
2242 }
2243 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2244 if (op & 1) {
2245 emith_lsr(tmp2, tmp2, tmp);
2246 } else
2247 emith_lsl(tmp2, tmp2, tmp);
2248 goto end_op;
2249 case 0x0a:
2250 switch (GET_Fx())
2251 {
2252 case 0: // LDS Rm,MACH 0100mmmm00001010
2253 tmp2 = SHR_MACH;
2254 break;
2255 case 1: // LDS Rm,MACL 0100mmmm00011010
2256 tmp2 = SHR_MACL;
2257 break;
2258 case 2: // LDS Rm,PR 0100mmmm00101010
2259 tmp2 = SHR_PR;
2260 break;
2261 default:
2262 goto default_;
2263 }
2264 emit_move_r_r(tmp2, GET_Rn());
2265 goto end_op;
2266 case 0x0b:
2267 switch (GET_Fx())
2268 {
2269 case 1: // TAS.B @Rn 0100nnnn00011011
2270 // XXX: is TAS working on 32X?
2271 rcache_get_reg_arg(0, GET_Rn());
2272 tmp = emit_memhandler_read(0);
2273 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2274 emith_bic_r_imm(sr, T);
2275 emith_cmp_r_imm(tmp, 0);
2276 emit_or_t_if_eq(sr);
2277 rcache_clean();
2278 emith_or_r_imm(tmp, 0x80);
2279 tmp2 = rcache_get_tmp_arg(1); // assuming it differs to tmp
2280 emith_move_r_r(tmp2, tmp);
2281 rcache_free_tmp(tmp);
2282 rcache_get_reg_arg(0, GET_Rn());
2283 emit_memhandler_write(0, pc);
2284 break;
2285 default:
2286 goto default_;
2287 }
2288 goto end_op;
2289 case 0x0e:
2290 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2291 switch (GET_Fx())
2292 {
2293 case 0: // LDC Rm,SR 0100mmmm00001110
2294 tmp2 = SHR_SR;
2295 break;
2296 case 1: // LDC Rm,GBR 0100mmmm00011110
2297 tmp2 = SHR_GBR;
2298 break;
2299 case 2: // LDC Rm,VBR 0100mmmm00101110
2300 tmp2 = SHR_VBR;
2301 break;
2302 default:
2303 goto default_;
2304 }
2305 if (tmp2 == SHR_SR) {
2306 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2307 emith_write_sr(sr, tmp);
2308 drcf.test_irq = 1;
2309 } else {
2310 tmp2 = rcache_get_reg(tmp2, RC_GR_WRITE);
2311 emith_move_r_r(tmp2, tmp);
2312 }
2313 goto end_op;
2314 case 0x0f:
2315 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
2316 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 1);
2317 emith_sext(tmp, tmp, 16);
2318 emith_sext(tmp2, tmp2, 16);
2319 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW);
2320 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
2321 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
2322 rcache_free_tmp(tmp2);
2323 // XXX: MACH should be untouched when S is set?
2324 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2325 emith_tst_r_imm(sr, S);
2326 EMITH_JMP_START(DCOND_EQ);
2327
2328 emith_asr(tmp, tmp3, 31);
2329 emith_eorf_r_r(tmp, tmp4); // tmp = ((signed)macl >> 31) ^ mach
2330 EMITH_JMP_START(DCOND_EQ);
2331 emith_move_r_imm(tmp3, 0x80000000);
2332 emith_tst_r_r(tmp4, tmp4);
2333 EMITH_SJMP_START(DCOND_MI);
2334 emith_sub_r_imm_c(DCOND_PL, tmp3, 1); // positive
2335 EMITH_SJMP_END(DCOND_MI);
2336 EMITH_JMP_END(DCOND_EQ);
2337
2338 EMITH_JMP_END(DCOND_EQ);
2339 rcache_free_tmp(tmp);
2340 goto end_op;
2341 }
2342 goto default_;
2343
2344 /////////////////////////////////////////////
2345 case 0x05:
2346 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
2347 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), (op & 0x0f) * 4, 2);
2348 goto end_op;
2349
2350 /////////////////////////////////////////////
2351 case 0x06:
2352 switch (op & 0x0f)
2353 {
2354 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
2355 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
2356 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
2357 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
2358 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
2359 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
2360 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), 0, op & 3);
2361 if ((op & 7) >= 4 && GET_Rn() != GET_Rm()) {
2362 tmp = rcache_get_reg(GET_Rm(), RC_GR_RMW);
2363 emith_add_r_imm(tmp, (1 << (op & 3)));
2364 }
2365 goto end_op;
2366 case 0x03:
2367 case 0x07 ... 0x0f:
2368 tmp = rcache_get_reg(GET_Rm(), RC_GR_READ);
2369 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
2370 switch (op & 0x0f)
2371 {
2372 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
2373 emith_move_r_r(tmp2, tmp);
2374 break;
2375 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
2376 emith_mvn_r_r(tmp2, tmp);
2377 break;
2378 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
2379 tmp3 = tmp2;
2380 if (tmp == tmp2)
2381 tmp3 = rcache_get_tmp();
2382 tmp4 = rcache_get_tmp();
2383 emith_lsr(tmp3, tmp, 16);
2384 emith_or_r_r_lsl(tmp3, tmp, 24);
2385 emith_and_r_r_imm(tmp4, tmp, 0xff00);
2386 emith_or_r_r_lsl(tmp3, tmp4, 8);
2387 emith_rol(tmp2, tmp3, 16);
2388 rcache_free_tmp(tmp4);
2389 if (tmp == tmp2)
2390 rcache_free_tmp(tmp3);
2391 break;
2392 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
2393 emith_rol(tmp2, tmp, 16);
2394 break;
2395 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
2396 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2397 emith_tpop_carry(sr, 1);
2398 emith_negcf_r_r(tmp2, tmp);
2399 emith_tpush_carry(sr, 1);
2400 break;
2401 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
2402 emith_neg_r_r(tmp2, tmp);
2403 break;
2404 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
2405 emith_clear_msb(tmp2, tmp, 24);
2406 break;
2407 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
2408 emith_clear_msb(tmp2, tmp, 16);
2409 break;
2410 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
2411 emith_sext(tmp2, tmp, 8);
2412 break;
2413 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
2414 emith_sext(tmp2, tmp, 16);
2415 break;
2416 }
2417 goto end_op;
2418 }
2419 goto default_;
2420
2421 /////////////////////////////////////////////
2422 case 0x07:
2423 // ADD #imm,Rn 0111nnnniiiiiiii
2424 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2425 if (op & 0x80) { // adding negative
2426 emith_sub_r_imm(tmp, -op & 0xff);
2427 } else
2428 emith_add_r_imm(tmp, op & 0xff);
2429 goto end_op;
2430
2431 /////////////////////////////////////////////
2432 case 0x08:
2433 switch (op & 0x0f00)
2434 {
2435 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
2436 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
2437 rcache_clean();
2438 tmp = rcache_get_reg_arg(0, GET_Rm());
2439 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2440 tmp3 = (op & 0x100) >> 8;
2441 if (op & 0x0f)
2442 emith_add_r_imm(tmp, (op & 0x0f) << tmp3);
2443 emit_memhandler_write(tmp3, pc);
2444 goto end_op;
2445 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
2446 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
2447 tmp = (op & 0x100) >> 8;
2448 emit_memhandler_read_rr(SHR_R0, GET_Rm(), (op & 0x0f) << tmp, tmp);
2449 goto end_op;
2450 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
2451 // XXX: could use cmn
2452 tmp = rcache_get_tmp();
2453 tmp2 = rcache_get_reg(0, RC_GR_READ);
2454 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2455 emith_move_r_imm_s8(tmp, op & 0xff);
2456 emith_bic_r_imm(sr, T);
2457 emith_cmp_r_r(tmp2, tmp);
2458 emit_or_t_if_eq(sr);
2459 rcache_free_tmp(tmp);
2460 goto end_op;
2461 }
2462 goto default_;
2463
2464 /////////////////////////////////////////////
2465 case 0x0c:
2466 switch (op & 0x0f00)
2467 {
2468 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
2469 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
2470 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
2471 rcache_clean();
2472 tmp = rcache_get_reg_arg(0, SHR_GBR);
2473 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2474 tmp3 = (op & 0x300) >> 8;
2475 emith_add_r_imm(tmp, (op & 0xff) << tmp3);
2476 emit_memhandler_write(tmp3, pc);
2477 goto end_op;
2478 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
2479 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
2480 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
2481 tmp = (op & 0x300) >> 8;
2482 emit_memhandler_read_rr(SHR_R0, SHR_GBR, (op & 0xff) << tmp, tmp);
2483 goto end_op;
2484 case 0x0300: // TRAPA #imm 11000011iiiiiiii
2485 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2486 emith_sub_r_imm(tmp, 4*2);
2487 // push SR
2488 tmp = rcache_get_reg_arg(0, SHR_SP);
2489 emith_add_r_imm(tmp, 4);
2490 tmp = rcache_get_reg_arg(1, SHR_SR);
2491 emith_clear_msb(tmp, tmp, 22);
2492 emit_memhandler_write(2, pc);
2493 // push PC
2494 rcache_get_reg_arg(0, SHR_SP);
2495 tmp = rcache_get_tmp_arg(1);
2496 emith_move_r_imm(tmp, pc);
2497 emit_memhandler_write(2, pc);
2498 // obtain new PC
2499 emit_memhandler_read_rr(SHR_PC, SHR_VBR, (op & 0xff) * 4, 2);
2500 // indirect jump -> back to dispatcher
2501 rcache_flush();
2502 emith_jump(sh2_drc_dispatcher);
2503 goto end_op;
2504 case 0x0800: // TST #imm,R0 11001000iiiiiiii
2505 tmp = rcache_get_reg(SHR_R0, RC_GR_READ);
2506 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2507 emith_bic_r_imm(sr, T);
2508 emith_tst_r_imm(tmp, op & 0xff);
2509 emit_or_t_if_eq(sr);
2510 goto end_op;
2511 case 0x0900: // AND #imm,R0 11001001iiiiiiii
2512 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2513 emith_and_r_imm(tmp, op & 0xff);
2514 goto end_op;
2515 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
2516 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2517 emith_eor_r_imm(tmp, op & 0xff);
2518 goto end_op;
2519 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
2520 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2521 emith_or_r_imm(tmp, op & 0xff);
2522 goto end_op;
2523 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
2524 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2525 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2526 emith_bic_r_imm(sr, T);
2527 emith_tst_r_imm(tmp, op & 0xff);
2528 emit_or_t_if_eq(sr);
2529 rcache_free_tmp(tmp);
2530 goto end_op;
2531 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
2532 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2533 emith_and_r_imm(tmp, op & 0xff);
2534 goto end_rmw_op;
2535 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
2536 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2537 emith_eor_r_imm(tmp, op & 0xff);
2538 goto end_rmw_op;
2539 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
2540 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2541 emith_or_r_imm(tmp, op & 0xff);
2542 end_rmw_op:
2543 tmp2 = rcache_get_tmp_arg(1);
2544 emith_move_r_r(tmp2, tmp);
2545 rcache_free_tmp(tmp);
2546 tmp3 = rcache_get_reg_arg(0, SHR_GBR);
2547 tmp4 = rcache_get_reg(SHR_R0, RC_GR_READ);
2548 emith_add_r_r(tmp3, tmp4);
2549 emit_memhandler_write(0, pc);
2550 goto end_op;
2551 }
2552 goto default_;
2553
2554 /////////////////////////////////////////////
2555 case 0x0e:
2556 // MOV #imm,Rn 1110nnnniiiiiiii
2557 emit_move_r_imm32(GET_Rn(), (u32)(signed int)(signed char)op);
2558 goto end_op;
2559
2560 default:
2561 default_:
2562 elprintf(EL_ANOMALY, "%csh2 drc: unhandled op %04x @ %08x",
2563 sh2->is_slave ? 's' : 'm', op, pc - 2);
2564 break;
2565 }
2566
2567end_op:
2568 rcache_unlock_all();
2569
2570 cycles += opd->cycles;
2571
2572 if (op_flags[i+1] & OF_DELAY_OP) {
2573 do_host_disasm(tcache_id);
2574 continue;
2575 }
2576
2577 // test irq?
2578 if (drcf.test_irq && !drcf.pending_branch_direct) {
2579 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2580 FLUSH_CYCLES(sr);
2581 if (!drcf.pending_branch_indirect)
2582 emit_move_r_imm32(SHR_PC, pc);
2583 rcache_flush();
2584 emith_call(sh2_drc_test_irq);
2585 drcf.test_irq = 0;
2586 }
2587
2588 // branch handling (with/without delay)
2589 if (drcf.pending_branch_direct)
2590 {
2591 struct op_data *opd_b =
2592 (op_flags[i] & OF_DELAY_OP) ? &ops[i-1] : opd;
2593 u32 target_pc = opd_b->imm;
2594 int cond = -1;
2595 void *target = NULL;
2596
2597 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2598 FLUSH_CYCLES(sr);
2599
2600 if (opd_b->op != OP_BRANCH)
2601 cond = (opd_b->op == OP_BRANCH_CF) ? DCOND_EQ : DCOND_NE;
2602 if (cond != -1) {
2603 int ctaken = (op_flags[i] & OF_DELAY_OP) ? 1 : 2;
2604
2605 if (delay_dep_fw & BITMASK1(SHR_T))
2606 emith_tst_r_imm(sr, T_save);
2607 else
2608 emith_tst_r_imm(sr, T);
2609
2610 emith_sub_r_imm_c(cond, sr, ctaken<<12);
2611 }
2612 rcache_clean();
2613
2614#if LINK_BRANCHES
2615 if (find_in_array(branch_target_pc, branch_target_count, target_pc) >= 0)
2616 {
2617 // local branch
2618 // XXX: jumps back can be linked already
2619 if (branch_patch_count < MAX_LOCAL_BRANCHES) {
2620 target = tcache_ptr;
2621 branch_patch_pc[branch_patch_count] = target_pc;
2622 branch_patch_ptr[branch_patch_count] = target;
2623 branch_patch_count++;
2624 }
2625 else
2626 dbg(1, "warning: too many local branches");
2627 }
2628
2629 if (target == NULL)
2630#endif
2631 {
2632 // can't resolve branch locally, make a block exit
2633 emit_move_r_imm32(SHR_PC, target_pc);
2634 rcache_clean();
2635
2636 target = dr_prepare_ext_branch(target_pc, sh2->is_slave, tcache_id);
2637 if (target == NULL)
2638 return NULL;
2639 }
2640
2641 if (cond != -1)
2642 emith_jump_cond_patchable(cond, target);
2643 else {
2644 emith_jump_patchable(target);
2645 rcache_invalidate();
2646 }
2647
2648 drcf.pending_branch_direct = 0;
2649 }
2650 else if (drcf.pending_branch_indirect) {
2651 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2652 FLUSH_CYCLES(sr);
2653 rcache_flush();
2654 emith_jump(sh2_drc_dispatcher);
2655 drcf.pending_branch_indirect = 0;
2656 }
2657
2658 do_host_disasm(tcache_id);
2659 }
2660
2661 tmp = rcache_get_reg(SHR_SR, RC_GR_RMW);
2662 FLUSH_CYCLES(tmp);
2663 rcache_flush();
2664
2665 // check the last op
2666 if (op_flags[i-1] & OF_DELAY_OP)
2667 opd = &ops[i-2];
2668 else
2669 opd = &ops[i-1];
2670
2671 if (opd->op != OP_BRANCH && opd->op != OP_BRANCH_R
2672 && opd->op != OP_BRANCH_RF && opd->op != OP_RTE)
2673 {
2674 void *target;
2675
2676 emit_move_r_imm32(SHR_PC, pc);
2677 rcache_flush();
2678
2679 target = dr_prepare_ext_branch(pc, sh2->is_slave, tcache_id);
2680 if (target == NULL)
2681 return NULL;
2682 emith_jump_patchable(target);
2683 }
2684
2685 // link local branches
2686 for (i = 0; i < branch_patch_count; i++) {
2687 void *target;
2688 int t;
2689 t = find_in_array(branch_target_pc, branch_target_count, branch_patch_pc[i]);
2690 target = branch_target_ptr[t];
2691 if (target == NULL) {
2692 // flush pc and go back to dispatcher (this should no longer happen)
2693 dbg(1, "stray branch to %08x %p", branch_patch_pc[i], tcache_ptr);
2694 target = tcache_ptr;
2695 emit_move_r_imm32(SHR_PC, branch_patch_pc[i]);
2696 rcache_flush();
2697 emith_jump(sh2_drc_dispatcher);
2698 }
2699 emith_jump_patch(branch_patch_ptr[i], target);
2700 }
2701
2702 // mark memory blocks as containing compiled code
2703 // override any overlay blocks as they become unreachable anyway
2704 if ((block->addr & 0xc7fc0000) == 0x06000000
2705 || (block->addr & 0xfffff000) == 0xc0000000)
2706 {
2707 u16 *drc_ram_blk = NULL;
2708 u32 addr, mask = 0, shift = 0;
2709
2710 if (tcache_id != 0) {
2711 // data array, BIOS
2712 drc_ram_blk = Pico32xMem->drcblk_da[sh2->is_slave];
2713 shift = SH2_DRCBLK_DA_SHIFT;
2714 mask = 0xfff;
2715 }
2716 else {
2717 // SDRAM
2718 drc_ram_blk = Pico32xMem->drcblk_ram;
2719 shift = SH2_DRCBLK_RAM_SHIFT;
2720 mask = 0x3ffff;
2721 }
2722
2723 // mark recompiled insns
2724 drc_ram_blk[(base_pc & mask) >> shift] = 1;
2725 for (pc = base_pc; pc < end_pc; pc += 2)
2726 drc_ram_blk[(pc & mask) >> shift] = 1;
2727
2728 // mark literals
2729 for (i = 0; i < literal_addr_count; i++) {
2730 tmp = literal_addr[i];
2731 drc_ram_blk[(tmp & mask) >> shift] = 1;
2732 }
2733
2734 // add to invalidation lookup lists
2735 addr = base_pc & ~(ADDR_TO_BLOCK_PAGE - 1);
2736 for (; addr < end_literals; addr += ADDR_TO_BLOCK_PAGE) {
2737 i = (addr & mask) / ADDR_TO_BLOCK_PAGE;
2738 add_to_block_list(&inval_lookup[tcache_id][i], block);
2739 }
2740 }
2741
2742 tcache_ptrs[tcache_id] = tcache_ptr;
2743
2744 host_instructions_updated(block_entry_ptr, tcache_ptr);
2745
2746 do_host_disasm(tcache_id);
2747 dbg(2, " block #%d,%d tcache %d/%d, insns %d -> %d %.3f",
2748 tcache_id, blkid_main,
2749 tcache_ptr - tcache_bases[tcache_id], tcache_sizes[tcache_id],
2750 insns_compiled, host_insn_count, (float)host_insn_count / insns_compiled);
2751 if ((sh2->pc & 0xc6000000) == 0x02000000) // ROM
2752 dbg(2, " hash collisions %d/%d", hash_collisions, block_counts[tcache_id]);
2753/*
2754 printf("~~~\n");
2755 tcache_dsm_ptrs[tcache_id] = block_entry_ptr;
2756 do_host_disasm(tcache_id);
2757 printf("~~~\n");
2758*/
2759
2760#if (DRC_DEBUG & 4)
2761 fflush(stdout);
2762#endif
2763
2764 return block_entry_ptr;
2765}
2766
2767static void sh2_generate_utils(void)
2768{
2769 int arg0, arg1, arg2, sr, tmp;
2770
2771 sh2_drc_write32 = p32x_sh2_write32;
2772 sh2_drc_read8 = p32x_sh2_read8;
2773 sh2_drc_read16 = p32x_sh2_read16;
2774 sh2_drc_read32 = p32x_sh2_read32;
2775
2776 host_arg2reg(arg0, 0);
2777 host_arg2reg(arg1, 1);
2778 host_arg2reg(arg2, 2);
2779 emith_move_r_r(arg0, arg0); // nop
2780
2781 // sh2_drc_exit(void)
2782 sh2_drc_exit = (void *)tcache_ptr;
2783 emit_do_static_regs(1, arg2);
2784 emith_sh2_drc_exit();
2785
2786 // sh2_drc_dispatcher(void)
2787 sh2_drc_dispatcher = (void *)tcache_ptr;
2788 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2789 emith_cmp_r_imm(sr, 0);
2790 emith_jump_cond(DCOND_LT, sh2_drc_exit);
2791 rcache_invalidate();
2792 emith_ctx_read(arg0, SHR_PC * 4);
2793 emith_ctx_read(arg1, offsetof(SH2, is_slave));
2794 emith_add_r_r_imm(arg2, CONTEXT_REG, offsetof(SH2, drc_tmp));
2795 emith_call(dr_lookup_block);
2796 emit_block_entry();
2797 // lookup failed, call sh2_translate()
2798 emith_move_r_r(arg0, CONTEXT_REG);
2799 emith_ctx_read(arg1, offsetof(SH2, drc_tmp)); // tcache_id
2800 emith_call(sh2_translate);
2801 emit_block_entry();
2802 // sh2_translate() failed, flush cache and retry
2803 emith_ctx_read(arg0, offsetof(SH2, drc_tmp));
2804 emith_call(flush_tcache);
2805 emith_move_r_r(arg0, CONTEXT_REG);
2806 emith_ctx_read(arg1, offsetof(SH2, drc_tmp));
2807 emith_call(sh2_translate);
2808 emit_block_entry();
2809 // XXX: can't translate, fail
2810 emith_call(dr_failure);
2811
2812 // sh2_drc_test_irq(void)
2813 // assumes it's called from main function (may jump to dispatcher)
2814 sh2_drc_test_irq = (void *)tcache_ptr;
2815 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2816 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2817 emith_lsr(arg0, sr, I_SHIFT);
2818 emith_and_r_imm(arg0, 0x0f);
2819 emith_cmp_r_r(arg1, arg0); // pending_level > ((sr >> 4) & 0x0f)?
2820 EMITH_SJMP_START(DCOND_GT);
2821 emith_ret_c(DCOND_LE); // nope, return
2822 EMITH_SJMP_END(DCOND_GT);
2823 // adjust SP
2824 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2825 emith_sub_r_imm(tmp, 4*2);
2826 rcache_clean();
2827 // push SR
2828 tmp = rcache_get_reg_arg(0, SHR_SP);
2829 emith_add_r_imm(tmp, 4);
2830 tmp = rcache_get_reg_arg(1, SHR_SR);
2831 emith_clear_msb(tmp, tmp, 22);
2832 emith_move_r_r(arg2, CONTEXT_REG);
2833 emith_call(p32x_sh2_write32); // XXX: use sh2_drc_write32?
2834 rcache_invalidate();
2835 // push PC
2836 rcache_get_reg_arg(0, SHR_SP);
2837 emith_ctx_read(arg1, SHR_PC * 4);
2838 emith_move_r_r(arg2, CONTEXT_REG);
2839 emith_call(p32x_sh2_write32);
2840 rcache_invalidate();
2841 // update I, cycles, do callback
2842 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2843 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2844 emith_bic_r_imm(sr, I);
2845 emith_or_r_r_lsl(sr, arg1, I_SHIFT);
2846 emith_sub_r_imm(sr, 13 << 12); // at least 13 cycles
2847 rcache_flush();
2848 emith_move_r_r(arg0, CONTEXT_REG);
2849 emith_call_ctx(offsetof(SH2, irq_callback)); // vector = sh2->irq_callback(sh2, level);
2850 // obtain new PC
2851 emith_lsl(arg0, arg0, 2);
2852 emith_ctx_read(arg1, SHR_VBR * 4);
2853 emith_add_r_r(arg0, arg1);
2854 emit_memhandler_read(2);
2855 emith_ctx_write(arg0, SHR_PC * 4);
2856#ifdef __i386__
2857 emith_add_r_imm(xSP, 4); // fix stack
2858#endif
2859 emith_jump(sh2_drc_dispatcher);
2860 rcache_invalidate();
2861
2862 // sh2_drc_entry(SH2 *sh2)
2863 sh2_drc_entry = (void *)tcache_ptr;
2864 emith_sh2_drc_entry();
2865 emith_move_r_r(CONTEXT_REG, arg0); // move ctx, arg0
2866 emit_do_static_regs(0, arg2);
2867 emith_call(sh2_drc_test_irq);
2868 emith_jump(sh2_drc_dispatcher);
2869
2870 // sh2_drc_write8(u32 a, u32 d)
2871 sh2_drc_write8 = (void *)tcache_ptr;
2872 emith_ctx_read(arg2, offsetof(SH2, write8_tab));
2873 emith_sh2_wcall(arg0, arg2);
2874
2875 // sh2_drc_write16(u32 a, u32 d)
2876 sh2_drc_write16 = (void *)tcache_ptr;
2877 emith_ctx_read(arg2, offsetof(SH2, write16_tab));
2878 emith_sh2_wcall(arg0, arg2);
2879
2880#ifdef PDB_NET
2881 // debug
2882 #define MAKE_READ_WRAPPER(func) { \
2883 void *tmp = (void *)tcache_ptr; \
2884 emith_push_ret(); \
2885 emith_call(func); \
2886 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
2887 emith_addf_r_r(arg2, arg0); \
2888 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
2889 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
2890 emith_adc_r_imm(arg2, 0x01000000); \
2891 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
2892 emith_pop_and_ret(); \
2893 func = tmp; \
2894 }
2895 #define MAKE_WRITE_WRAPPER(func) { \
2896 void *tmp = (void *)tcache_ptr; \
2897 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
2898 emith_addf_r_r(arg2, arg1); \
2899 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
2900 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
2901 emith_adc_r_imm(arg2, 0x01000000); \
2902 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
2903 emith_move_r_r(arg2, CONTEXT_REG); \
2904 emith_jump(func); \
2905 func = tmp; \
2906 }
2907
2908 MAKE_READ_WRAPPER(sh2_drc_read8);
2909 MAKE_READ_WRAPPER(sh2_drc_read16);
2910 MAKE_READ_WRAPPER(sh2_drc_read32);
2911 MAKE_WRITE_WRAPPER(sh2_drc_write8);
2912 MAKE_WRITE_WRAPPER(sh2_drc_write16);
2913 MAKE_WRITE_WRAPPER(sh2_drc_write32);
2914#if (DRC_DEBUG & 4)
2915 host_dasm_new_symbol(sh2_drc_read8);
2916 host_dasm_new_symbol(sh2_drc_read16);
2917 host_dasm_new_symbol(sh2_drc_read32);
2918 host_dasm_new_symbol(sh2_drc_write32);
2919#endif
2920#endif
2921
2922 rcache_invalidate();
2923#if (DRC_DEBUG & 4)
2924 host_dasm_new_symbol(sh2_drc_entry);
2925 host_dasm_new_symbol(sh2_drc_dispatcher);
2926 host_dasm_new_symbol(sh2_drc_exit);
2927 host_dasm_new_symbol(sh2_drc_test_irq);
2928 host_dasm_new_symbol(sh2_drc_write8);
2929 host_dasm_new_symbol(sh2_drc_write16);
2930#endif
2931}
2932
2933static void sh2_smc_rm_block_entry(struct block_desc *bd, int tcache_id, u32 ram_mask)
2934{
2935 struct block_link *bl, *bl_next, *bl_unresolved;
2936 void *tmp;
2937 u32 i, addr;
2938
2939 dbg(2, " killing entry %08x-%08x, blkid %d,%d",
2940 bd->addr, bd->end_addr, tcache_id, bd - block_tables[tcache_id]);
2941 if (bd->addr == 0 || bd->entry_count == 0) {
2942 dbg(1, " killing dead block!? %08x", bd->addr);
2943 return;
2944 }
2945
2946 // remove from inval_lookup
2947 addr = bd->addr & ~(ADDR_TO_BLOCK_PAGE - 1);
2948 for (; addr < bd->end_addr; addr += ADDR_TO_BLOCK_PAGE) {
2949 i = (addr & ram_mask) / ADDR_TO_BLOCK_PAGE;
2950 rm_from_block_list(&inval_lookup[tcache_id][i], bd);
2951 }
2952
2953 tmp = tcache_ptr;
2954 bl_unresolved = unresolved_links[tcache_id];
2955
2956 // remove from hash table, make incoming links unresolved
2957 // XXX: maybe patch branches w/flush instead?
2958 for (i = 0; i < bd->entry_count; i++) {
2959 rm_from_hashlist(&bd->entryp[i], tcache_id);
2960
2961 // since we never reuse tcache space of dead blocks,
2962 // insert jump to dispatcher for blocks that are linked to this
2963 tcache_ptr = bd->entryp[i].tcache_ptr;
2964 emit_move_r_imm32(SHR_PC, bd->entryp[i].pc);
2965 rcache_flush();
2966 emith_jump(sh2_drc_dispatcher);
2967
2968 host_instructions_updated(bd->entryp[i].tcache_ptr, tcache_ptr);
2969
2970 for (bl = bd->entryp[i].links; bl != NULL; ) {
2971 bl_next = bl->next;
2972 bl->next = bl_unresolved;
2973 bl_unresolved = bl;
2974 bl = bl_next;
2975 }
2976 }
2977
2978 tcache_ptr = tmp;
2979 unresolved_links[tcache_id] = bl_unresolved;
2980
2981 bd->addr = bd->end_addr = 0;
2982 bd->entry_count = 0;
2983}
2984
2985static void sh2_smc_rm_block(u32 a, u16 *drc_ram_blk, int tcache_id, u32 shift, u32 mask)
2986{
2987 struct block_list **blist = NULL, *entry;
2988 u32 from = ~0, to = 0;
2989 struct block_desc *block;
2990
2991 blist = &inval_lookup[tcache_id][(a & mask) / ADDR_TO_BLOCK_PAGE];
2992 entry = *blist;
2993 while (entry != NULL) {
2994 block = entry->block;
2995 if (block->addr <= a && a < block->end_addr) {
2996 if (block->addr < from)
2997 from = block->addr;
2998 if (block->end_addr > to)
2999 to = block->end_addr;
3000
3001 sh2_smc_rm_block_entry(block, tcache_id, mask);
3002
3003 // entry lost, restart search
3004 entry = *blist;
3005 continue;
3006 }
3007 entry = entry->next;
3008 }
3009
3010 // update range to not clear still alive blocks
3011 for (entry = *blist; entry != NULL; entry = entry->next) {
3012 block = entry->block;
3013 if (block->addr > a) {
3014 if (to > block->addr)
3015 to = block->addr;
3016 }
3017 else {
3018 if (from < block->end_addr)
3019 from = block->end_addr;
3020 }
3021 }
3022
3023 // clear code marks
3024 if (from < to) {
3025 u16 *p = drc_ram_blk + ((from & mask) >> shift);
3026 memset(p, 0, (to - from) >> (shift - 1));
3027 }
3028}
3029
3030void sh2_drc_wcheck_ram(unsigned int a, int val, int cpuid)
3031{
3032 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3033 sh2_smc_rm_block(a, Pico32xMem->drcblk_ram, 0, SH2_DRCBLK_RAM_SHIFT, 0x3ffff);
3034}
3035
3036void sh2_drc_wcheck_da(unsigned int a, int val, int cpuid)
3037{
3038 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3039 sh2_smc_rm_block(a, Pico32xMem->drcblk_da[cpuid],
3040 1 + cpuid, SH2_DRCBLK_DA_SHIFT, 0xfff);
3041}
3042
3043int sh2_execute(SH2 *sh2c, int cycles)
3044{
3045 int ret_cycles;
3046
3047 sh2c->cycles_timeslice = cycles;
3048
3049 // cycles are kept in SHR_SR unused bits (upper 20)
3050 // bit11 contains T saved for delay slot
3051 // others are usual SH2 flags
3052 sh2c->sr &= 0x3f3;
3053 sh2c->sr |= cycles << 12;
3054 sh2_drc_entry(sh2c);
3055
3056 // TODO: irq cycles
3057 ret_cycles = (signed int)sh2c->sr >> 12;
3058 if (ret_cycles > 0)
3059 dbg(1, "warning: drc returned with cycles: %d", ret_cycles);
3060
3061 return sh2c->cycles_timeslice - ret_cycles;
3062}
3063
3064#if (DRC_DEBUG & 2)
3065void block_stats(void)
3066{
3067 int c, b, i, total = 0;
3068
3069 printf("block stats:\n");
3070 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3071 for (i = 0; i < block_counts[b]; i++)
3072 if (block_tables[b][i].addr != 0)
3073 total += block_tables[b][i].refcount;
3074
3075 for (c = 0; c < 10; c++) {
3076 struct block_desc *blk, *maxb = NULL;
3077 int max = 0;
3078 for (b = 0; b < ARRAY_SIZE(block_tables); b++) {
3079 for (i = 0; i < block_counts[b]; i++) {
3080 blk = &block_tables[b][i];
3081 if (blk->addr != 0 && blk->refcount > max) {
3082 max = blk->refcount;
3083 maxb = blk;
3084 }
3085 }
3086 }
3087 if (maxb == NULL)
3088 break;
3089 printf("%08x %9d %2.3f%%\n", maxb->addr, maxb->refcount,
3090 (double)maxb->refcount / total * 100.0);
3091 maxb->refcount = 0;
3092 }
3093
3094 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3095 for (i = 0; i < block_counts[b]; i++)
3096 block_tables[b][i].refcount = 0;
3097}
3098#else
3099#define block_stats()
3100#endif
3101
3102void sh2_drc_flush_all(void)
3103{
3104 block_stats();
3105 flush_tcache(0);
3106 flush_tcache(1);
3107 flush_tcache(2);
3108}
3109
3110void sh2_drc_mem_setup(SH2 *sh2)
3111{
3112 // fill the convenience pointers
3113 sh2->p_bios = sh2->is_slave ? Pico32xMem->sh2_rom_s : Pico32xMem->sh2_rom_m;
3114 sh2->p_da = Pico32xMem->data_array[sh2->is_slave];
3115 sh2->p_sdram = Pico32xMem->sdram;
3116 sh2->p_rom = Pico.rom;
3117}
3118
3119int sh2_drc_init(SH2 *sh2)
3120{
3121 int i;
3122
3123 if (block_tables[0] == NULL)
3124 {
3125 for (i = 0; i < TCACHE_BUFFERS; i++) {
3126 block_tables[i] = calloc(block_max_counts[i], sizeof(*block_tables[0]));
3127 if (block_tables[i] == NULL)
3128 goto fail;
3129 // max 2 block links (exits) per block
3130 block_link_pool[i] = calloc(block_link_pool_max_counts[i],
3131 sizeof(*block_link_pool[0]));
3132 if (block_link_pool[i] == NULL)
3133 goto fail;
3134
3135 inval_lookup[i] = calloc(ram_sizes[i] / ADDR_TO_BLOCK_PAGE,
3136 sizeof(inval_lookup[0]));
3137 if (inval_lookup[i] == NULL)
3138 goto fail;
3139
3140 hash_tables[i] = calloc(hash_table_sizes[i], sizeof(*hash_tables[0]));
3141 if (hash_tables[i] == NULL)
3142 goto fail;
3143 }
3144 memset(block_counts, 0, sizeof(block_counts));
3145 memset(block_link_pool_counts, 0, sizeof(block_link_pool_counts));
3146
3147 drc_cmn_init();
3148 tcache_ptr = tcache;
3149 sh2_generate_utils();
3150 host_instructions_updated(tcache, tcache_ptr);
3151
3152 tcache_bases[0] = tcache_ptrs[0] = tcache_ptr;
3153 for (i = 1; i < ARRAY_SIZE(tcache_bases); i++)
3154 tcache_bases[i] = tcache_ptrs[i] = tcache_bases[i - 1] + tcache_sizes[i - 1];
3155
3156#if (DRC_DEBUG & 4)
3157 for (i = 0; i < ARRAY_SIZE(block_tables); i++)
3158 tcache_dsm_ptrs[i] = tcache_bases[i];
3159 // disasm the utils
3160 tcache_dsm_ptrs[0] = tcache;
3161 do_host_disasm(0);
3162#endif
3163#if (DRC_DEBUG & 1)
3164 hash_collisions = 0;
3165#endif
3166 }
3167
3168 return 0;
3169
3170fail:
3171 sh2_drc_finish(sh2);
3172 return -1;
3173}
3174
3175void sh2_drc_finish(SH2 *sh2)
3176{
3177 int i;
3178
3179 if (block_tables[0] == NULL)
3180 return;
3181
3182 sh2_drc_flush_all();
3183
3184 for (i = 0; i < TCACHE_BUFFERS; i++) {
3185#if (DRC_DEBUG & 4)
3186 printf("~~~ tcache %d\n", i);
3187 tcache_dsm_ptrs[i] = tcache_bases[i];
3188 tcache_ptr = tcache_ptrs[i];
3189 do_host_disasm(i);
3190#endif
3191
3192 if (block_tables[i] != NULL)
3193 free(block_tables[i]);
3194 block_tables[i] = NULL;
3195 if (block_link_pool[i] == NULL)
3196 free(block_link_pool[i]);
3197 block_link_pool[i] = NULL;
3198
3199 if (inval_lookup[i] == NULL)
3200 free(inval_lookup[i]);
3201 inval_lookup[i] = NULL;
3202
3203 if (hash_tables[i] != NULL) {
3204 free(hash_tables[i]);
3205 hash_tables[i] = NULL;
3206 }
3207 }
3208
3209 drc_cmn_cleanup();
3210}
3211
3212#endif /* DRC_SH2 */
3213
3214static void *dr_get_pc_base(u32 pc, int is_slave)
3215{
3216 void *ret = NULL;
3217 u32 mask = 0;
3218
3219 if ((pc & ~0x7ff) == 0) {
3220 // BIOS
3221 ret = is_slave ? Pico32xMem->sh2_rom_s : Pico32xMem->sh2_rom_m;
3222 mask = 0x7ff;
3223 }
3224 else if ((pc & 0xfffff000) == 0xc0000000) {
3225 // data array
3226 ret = Pico32xMem->data_array[is_slave];
3227 mask = 0xfff;
3228 }
3229 else if ((pc & 0xc6000000) == 0x06000000) {
3230 // SDRAM
3231 ret = Pico32xMem->sdram;
3232 mask = 0x03ffff;
3233 }
3234 else if ((pc & 0xc6000000) == 0x02000000) {
3235 // ROM
3236 ret = Pico.rom;
3237 mask = 0x3fffff;
3238 }
3239
3240 if (ret == NULL)
3241 return (void *)-1; // NULL is valid value
3242
3243 return (char *)ret - (pc & ~mask);
3244}
3245
3246void scan_block(u32 base_pc, int is_slave, u8 *op_flags, u32 *end_pc_out,
3247 u32 *end_literals_out)
3248{
3249 u16 *dr_pc_base;
3250 u32 pc, op, tmp;
3251 u32 end_pc, end_literals = 0;
3252 struct op_data *opd;
3253 int next_is_delay = 0;
3254 int end_block = 0;
3255 int i, i_end;
3256
3257 memset(op_flags, 0, BLOCK_INSN_LIMIT);
3258
3259 dr_pc_base = dr_get_pc_base(base_pc, is_slave);
3260
3261 // 1st pass: disassemble
3262 for (i = 0, pc = base_pc; ; i++, pc += 2) {
3263 // we need an ops[] entry after the last one initialized,
3264 // so do it before end_block checks
3265 opd = &ops[i];
3266 opd->op = OP_UNHANDLED;
3267 opd->rm = -1;
3268 opd->source = opd->dest = 0;
3269 opd->cycles = 1;
3270 opd->imm = 0;
3271
3272 if (next_is_delay) {
3273 op_flags[i] |= OF_DELAY_OP;
3274 next_is_delay = 0;
3275 }
3276 else if (end_block || i >= BLOCK_INSN_LIMIT - 2)
3277 break;
3278
3279 op = FETCH_OP(pc);
3280 switch ((op & 0xf000) >> 12)
3281 {
3282 /////////////////////////////////////////////
3283 case 0x00:
3284 switch (op & 0x0f)
3285 {
3286 case 0x02:
3287 switch (GET_Fx())
3288 {
3289 case 0: // STC SR,Rn 0000nnnn00000010
3290 tmp = SHR_SR;
3291 break;
3292 case 1: // STC GBR,Rn 0000nnnn00010010
3293 tmp = SHR_GBR;
3294 break;
3295 case 2: // STC VBR,Rn 0000nnnn00100010
3296 tmp = SHR_VBR;
3297 break;
3298 default:
3299 goto undefined;
3300 }
3301 opd->op = OP_MOVE;
3302 opd->source = BITMASK1(tmp);
3303 opd->dest = BITMASK1(GET_Rn());
3304 break;
3305 case 0x03:
3306 CHECK_UNHANDLED_BITS(0xd0, undefined);
3307 // BRAF Rm 0000mmmm00100011
3308 // BSRF Rm 0000mmmm00000011
3309 opd->op = OP_BRANCH_RF;
3310 opd->rm = GET_Rn();
3311 opd->source = BITMASK1(opd->rm);
3312 opd->dest = BITMASK1(SHR_PC);
3313 if (!(op & 0x20))
3314 opd->dest |= BITMASK1(SHR_PR);
3315 opd->cycles = 2;
3316 next_is_delay = 1;
3317 end_block = 1;
3318 break;
3319 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
3320 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
3321 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
3322 opd->source = BITMASK3(GET_Rm(), SHR_R0, GET_Rn());
3323 break;
3324 case 0x07:
3325 // MUL.L Rm,Rn 0000nnnnmmmm0111
3326 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3327 opd->dest = BITMASK1(SHR_MACL);
3328 opd->cycles = 2;
3329 break;
3330 case 0x08:
3331 CHECK_UNHANDLED_BITS(0xf00, undefined);
3332 switch (GET_Fx())
3333 {
3334 case 0: // CLRT 0000000000001000
3335 opd->op = OP_SETCLRT;
3336 opd->dest = BITMASK1(SHR_T);
3337 opd->imm = 0;
3338 break;
3339 case 1: // SETT 0000000000011000
3340 opd->op = OP_SETCLRT;
3341 opd->dest = BITMASK1(SHR_T);
3342 opd->imm = 1;
3343 break;
3344 case 2: // CLRMAC 0000000000101000
3345 opd->dest = BITMASK3(SHR_T, SHR_MACL, SHR_MACH);
3346 break;
3347 default:
3348 goto undefined;
3349 }
3350 break;
3351 case 0x09:
3352 switch (GET_Fx())
3353 {
3354 case 0: // NOP 0000000000001001
3355 CHECK_UNHANDLED_BITS(0xf00, undefined);
3356 break;
3357 case 1: // DIV0U 0000000000011001
3358 CHECK_UNHANDLED_BITS(0xf00, undefined);
3359 opd->dest = BITMASK2(SHR_SR, SHR_T);
3360 break;
3361 case 2: // MOVT Rn 0000nnnn00101001
3362 opd->source = BITMASK1(SHR_T);
3363 opd->dest = BITMASK1(GET_Rn());
3364 break;
3365 default:
3366 goto undefined;
3367 }
3368 break;
3369 case 0x0a:
3370 switch (GET_Fx())
3371 {
3372 case 0: // STS MACH,Rn 0000nnnn00001010
3373 tmp = SHR_MACH;
3374 break;
3375 case 1: // STS MACL,Rn 0000nnnn00011010
3376 tmp = SHR_MACL;
3377 break;
3378 case 2: // STS PR,Rn 0000nnnn00101010
3379 tmp = SHR_PR;
3380 break;
3381 default:
3382 goto undefined;
3383 }
3384 opd->op = OP_MOVE;
3385 opd->source = BITMASK1(tmp);
3386 opd->dest = BITMASK1(GET_Rn());
3387 break;
3388 case 0x0b:
3389 CHECK_UNHANDLED_BITS(0xf00, undefined);
3390 switch (GET_Fx())
3391 {
3392 case 0: // RTS 0000000000001011
3393 opd->op = OP_BRANCH_R;
3394 opd->rm = SHR_PR;
3395 opd->source = BITMASK1(opd->rm);
3396 opd->dest = BITMASK1(SHR_PC);
3397 opd->cycles = 2;
3398 next_is_delay = 1;
3399 end_block = 1;
3400 break;
3401 case 1: // SLEEP 0000000000011011
3402 opd->op = OP_SLEEP;
3403 end_block = 1;
3404 break;
3405 case 2: // RTE 0000000000101011
3406 opd->op = OP_RTE;
3407 opd->source = BITMASK1(SHR_SP);
3408 opd->dest = BITMASK2(SHR_SR, SHR_PC);
3409 opd->cycles = 4;
3410 next_is_delay = 1;
3411 end_block = 1;
3412 break;
3413 default:
3414 goto undefined;
3415 }
3416 break;
3417 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
3418 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
3419 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
3420 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3421 opd->dest = BITMASK1(GET_Rn());
3422 break;
3423 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
3424 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3425 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3426 opd->cycles = 3;
3427 break;
3428 default:
3429 goto undefined;
3430 }
3431 break;
3432
3433 /////////////////////////////////////////////
3434 case 0x01:
3435 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
3436 opd->source = BITMASK1(GET_Rm());
3437 opd->source = BITMASK1(GET_Rn());
3438 opd->imm = (op & 0x0f) * 4;
3439 break;
3440
3441 /////////////////////////////////////////////
3442 case 0x02:
3443 switch (op & 0x0f)
3444 {
3445 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
3446 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
3447 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
3448 opd->source = BITMASK1(GET_Rm());
3449 opd->source = BITMASK1(GET_Rn());
3450 break;
3451 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
3452 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
3453 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
3454 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3455 opd->dest = BITMASK1(GET_Rn());
3456 break;
3457 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
3458 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3459 opd->dest = BITMASK1(SHR_SR);
3460 break;
3461 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
3462 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3463 opd->dest = BITMASK1(SHR_T);
3464 break;
3465 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
3466 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
3467 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
3468 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3469 opd->dest = BITMASK1(GET_Rn());
3470 break;
3471 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
3472 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3473 opd->dest = BITMASK1(SHR_T);
3474 break;
3475 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
3476 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3477 opd->dest = BITMASK1(GET_Rn());
3478 break;
3479 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
3480 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
3481 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3482 opd->dest = BITMASK1(SHR_MACL);
3483 break;
3484 default:
3485 goto undefined;
3486 }
3487 break;
3488
3489 /////////////////////////////////////////////
3490 case 0x03:
3491 switch (op & 0x0f)
3492 {
3493 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
3494 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
3495 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
3496 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
3497 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
3498 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3499 opd->dest = BITMASK1(SHR_T);
3500 break;
3501 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
3502 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_SR);
3503 opd->dest = BITMASK2(GET_Rn(), SHR_SR);
3504 break;
3505 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
3506 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
3507 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3508 opd->dest = BITMASK2(SHR_MACL, SHR_MACH);
3509 opd->cycles = 2;
3510 break;
3511 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
3512 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
3513 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3514 opd->dest = BITMASK1(GET_Rn());
3515 break;
3516 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
3517 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
3518 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_T);
3519 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3520 break;
3521 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
3522 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
3523 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3524 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3525 break;
3526 default:
3527 goto undefined;
3528 }
3529 break;
3530
3531 /////////////////////////////////////////////
3532 case 0x04:
3533 switch (op & 0x0f)
3534 {
3535 case 0x00:
3536 switch (GET_Fx())
3537 {
3538 case 0: // SHLL Rn 0100nnnn00000000
3539 case 2: // SHAL Rn 0100nnnn00100000
3540 opd->source = BITMASK1(GET_Rn());
3541 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3542 break;
3543 case 1: // DT Rn 0100nnnn00010000
3544 opd->source = BITMASK1(GET_Rn());
3545 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3546 break;
3547 default:
3548 goto undefined;
3549 }
3550 break;
3551 case 0x01:
3552 switch (GET_Fx())
3553 {
3554 case 0: // SHLR Rn 0100nnnn00000001
3555 case 2: // SHAR Rn 0100nnnn00100001
3556 opd->source = BITMASK1(GET_Rn());
3557 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3558 break;
3559 case 1: // CMP/PZ Rn 0100nnnn00010001
3560 opd->source = BITMASK1(GET_Rn());
3561 opd->dest = BITMASK1(SHR_T);
3562 break;
3563 default:
3564 goto undefined;
3565 }
3566 break;
3567 case 0x02:
3568 case 0x03:
3569 switch (op & 0x3f)
3570 {
3571 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
3572 tmp = SHR_MACH;
3573 break;
3574 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
3575 tmp = SHR_MACL;
3576 break;
3577 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
3578 tmp = SHR_PR;
3579 break;
3580 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
3581 tmp = SHR_SR;
3582 opd->cycles = 2;
3583 break;
3584 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
3585 tmp = SHR_GBR;
3586 opd->cycles = 2;
3587 break;
3588 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
3589 tmp = SHR_VBR;
3590 opd->cycles = 2;
3591 break;
3592 default:
3593 goto undefined;
3594 }
3595 opd->source = BITMASK2(GET_Rn(), tmp);
3596 opd->dest = BITMASK1(GET_Rn());
3597 break;
3598 case 0x04:
3599 case 0x05:
3600 switch (op & 0x3f)
3601 {
3602 case 0x04: // ROTL Rn 0100nnnn00000100
3603 case 0x05: // ROTR Rn 0100nnnn00000101
3604 opd->source = BITMASK1(GET_Rn());
3605 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3606 break;
3607 case 0x24: // ROTCL Rn 0100nnnn00100100
3608 case 0x25: // ROTCR Rn 0100nnnn00100101
3609 opd->source = BITMASK2(GET_Rn(), SHR_T);
3610 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3611 break;
3612 case 0x15: // CMP/PL Rn 0100nnnn00010101
3613 opd->source = BITMASK1(GET_Rn());
3614 opd->dest = BITMASK1(SHR_T);
3615 break;
3616 default:
3617 goto undefined;
3618 }
3619 break;
3620 case 0x06:
3621 case 0x07:
3622 switch (op & 0x3f)
3623 {
3624 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
3625 tmp = SHR_MACH;
3626 break;
3627 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
3628 tmp = SHR_MACL;
3629 break;
3630 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
3631 tmp = SHR_PR;
3632 break;
3633 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
3634 tmp = SHR_SR;
3635 opd->cycles = 3;
3636 break;
3637 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
3638 tmp = SHR_GBR;
3639 opd->cycles = 3;
3640 break;
3641 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
3642 tmp = SHR_VBR;
3643 opd->cycles = 3;
3644 break;
3645 default:
3646 goto undefined;
3647 }
3648 opd->source = BITMASK1(GET_Rn());
3649 opd->dest = BITMASK2(GET_Rn(), tmp);
3650 break;
3651 case 0x08:
3652 case 0x09:
3653 switch (GET_Fx())
3654 {
3655 case 0:
3656 // SHLL2 Rn 0100nnnn00001000
3657 // SHLR2 Rn 0100nnnn00001001
3658 break;
3659 case 1:
3660 // SHLL8 Rn 0100nnnn00011000
3661 // SHLR8 Rn 0100nnnn00011001
3662 break;
3663 case 2:
3664 // SHLL16 Rn 0100nnnn00101000
3665 // SHLR16 Rn 0100nnnn00101001
3666 break;
3667 default:
3668 goto undefined;
3669 }
3670 opd->source = BITMASK1(GET_Rn());
3671 opd->dest = BITMASK1(GET_Rn());
3672 break;
3673 case 0x0a:
3674 switch (GET_Fx())
3675 {
3676 case 0: // LDS Rm,MACH 0100mmmm00001010
3677 tmp = SHR_MACH;
3678 break;
3679 case 1: // LDS Rm,MACL 0100mmmm00011010
3680 tmp = SHR_MACL;
3681 break;
3682 case 2: // LDS Rm,PR 0100mmmm00101010
3683 tmp = SHR_PR;
3684 break;
3685 default:
3686 goto undefined;
3687 }
3688 opd->op = OP_MOVE;
3689 opd->source = BITMASK1(GET_Rn());
3690 opd->dest = BITMASK1(tmp);
3691 break;
3692 case 0x0b:
3693 switch (GET_Fx())
3694 {
3695 case 0: // JSR @Rm 0100mmmm00001011
3696 opd->dest = BITMASK1(SHR_PR);
3697 case 2: // JMP @Rm 0100mmmm00101011
3698 opd->op = OP_BRANCH_R;
3699 opd->rm = GET_Rn();
3700 opd->source = BITMASK1(opd->rm);
3701 opd->dest |= BITMASK1(SHR_PC);
3702 opd->cycles = 2;
3703 next_is_delay = 1;
3704 end_block = 1;
3705 break;
3706 case 1: // TAS.B @Rn 0100nnnn00011011
3707 opd->source = BITMASK1(GET_Rn());
3708 opd->dest = BITMASK1(SHR_T);
3709 opd->cycles = 4;
3710 break;
3711 default:
3712 goto undefined;
3713 }
3714 break;
3715 case 0x0e:
3716 switch (GET_Fx())
3717 {
3718 case 0: // LDC Rm,SR 0100mmmm00001110
3719 tmp = SHR_SR;
3720 break;
3721 case 1: // LDC Rm,GBR 0100mmmm00011110
3722 tmp = SHR_GBR;
3723 break;
3724 case 2: // LDC Rm,VBR 0100mmmm00101110
3725 tmp = SHR_VBR;
3726 break;
3727 default:
3728 goto undefined;
3729 }
3730 opd->op = OP_MOVE;
3731 opd->source = BITMASK1(GET_Rn());
3732 opd->dest = BITMASK1(tmp);
3733 break;
3734 case 0x0f:
3735 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
3736 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3737 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3738 opd->cycles = 3;
3739 break;
3740 default:
3741 goto undefined;
3742 }
3743 break;
3744
3745 /////////////////////////////////////////////
3746 case 0x05:
3747 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
3748 opd->source = BITMASK1(GET_Rm());
3749 opd->dest = BITMASK1(GET_Rn());
3750 opd->imm = (op & 0x0f) * 4;
3751 break;
3752
3753 /////////////////////////////////////////////
3754 case 0x06:
3755 switch (op & 0x0f)
3756 {
3757 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
3758 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
3759 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
3760 opd->dest = BITMASK1(GET_Rm());
3761 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
3762 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
3763 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
3764 opd->source = BITMASK1(GET_Rm());
3765 opd->dest |= BITMASK1(GET_Rn());
3766 break;
3767 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
3768 opd->source = BITMASK2(GET_Rm(), SHR_T);
3769 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3770 break;
3771 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
3772 opd->op = OP_MOVE;
3773 goto arith_rmrn;
3774 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
3775 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
3776 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
3777 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
3778 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
3779 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
3780 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
3781 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
3782 arith_rmrn:
3783 opd->source = BITMASK1(GET_Rm());
3784 opd->dest = BITMASK1(GET_Rn());
3785 break;
3786 }
3787 break;
3788
3789 /////////////////////////////////////////////
3790 case 0x07:
3791 // ADD #imm,Rn 0111nnnniiiiiiii
3792 opd->source = opd->dest = BITMASK1(GET_Rn());
3793 opd->imm = (int)(signed char)op;
3794 break;
3795
3796 /////////////////////////////////////////////
3797 case 0x08:
3798 switch (op & 0x0f00)
3799 {
3800 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
3801 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3802 opd->imm = (op & 0x0f);
3803 break;
3804 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
3805 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3806 opd->imm = (op & 0x0f) * 2;
3807 break;
3808 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
3809 opd->source = BITMASK1(GET_Rm());
3810 opd->dest = BITMASK1(SHR_R0);
3811 opd->imm = (op & 0x0f);
3812 break;
3813 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
3814 opd->source = BITMASK1(GET_Rm());
3815 opd->dest = BITMASK1(SHR_R0);
3816 opd->imm = (op & 0x0f) * 2;
3817 break;
3818 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
3819 opd->source = BITMASK1(SHR_R0);
3820 opd->dest = BITMASK1(SHR_T);
3821 opd->imm = (int)(signed char)op;
3822 break;
3823 case 0x0d00: // BT/S label 10001101dddddddd
3824 case 0x0f00: // BF/S label 10001111dddddddd
3825 next_is_delay = 1;
3826 // fallthrough
3827 case 0x0900: // BT label 10001001dddddddd
3828 case 0x0b00: // BF label 10001011dddddddd
3829 opd->op = (op & 0x0200) ? OP_BRANCH_CF : OP_BRANCH_CT;
3830 opd->source = BITMASK1(SHR_T);
3831 opd->dest = BITMASK1(SHR_PC);
3832 opd->imm = ((signed int)(op << 24) >> 23);
3833 opd->imm += pc + 4;
3834 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
3835 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
3836 break;
3837 default:
3838 goto undefined;
3839 }
3840 break;
3841
3842 /////////////////////////////////////////////
3843 case 0x09:
3844 // MOV.W @(disp,PC),Rn 1001nnnndddddddd
3845 opd->op = OP_LOAD_POOL;
3846 tmp = pc + 2;
3847 if (op_flags[i] & OF_DELAY_OP) {
3848 if (ops[i-1].op == OP_BRANCH)
3849 tmp = ops[i-1].imm;
3850 else
3851 tmp = 0;
3852 }
3853 opd->source = BITMASK1(SHR_PC);
3854 opd->dest = BITMASK1(GET_Rn());
3855 if (tmp)
3856 opd->imm = tmp + 2 + (op & 0xff) * 2;
3857 opd->size = 1;
3858 break;
3859
3860 /////////////////////////////////////////////
3861 case 0x0b:
3862 // BSR label 1011dddddddddddd
3863 opd->dest = BITMASK1(SHR_PR);
3864 case 0x0a:
3865 // BRA label 1010dddddddddddd
3866 opd->op = OP_BRANCH;
3867 opd->dest |= BITMASK1(SHR_PC);
3868 opd->imm = ((signed int)(op << 20) >> 19);
3869 opd->imm += pc + 4;
3870 opd->cycles = 2;
3871 next_is_delay = 1;
3872 end_block = 1;
3873 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
3874 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
3875 break;
3876
3877 /////////////////////////////////////////////
3878 case 0x0c:
3879 switch (op & 0x0f00)
3880 {
3881 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
3882 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
3883 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
3884 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3885 opd->size = (op & 0x300) >> 8;
3886 opd->imm = (op & 0xff) << opd->size;
3887 break;
3888 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
3889 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
3890 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
3891 opd->source = BITMASK1(SHR_GBR);
3892 opd->dest = BITMASK1(SHR_R0);
3893 opd->size = (op & 0x300) >> 8;
3894 opd->imm = (op & 0xff) << opd->size;
3895 break;
3896 case 0x0300: // TRAPA #imm 11000011iiiiiiii
3897 opd->source = BITMASK2(SHR_PC, SHR_SR);
3898 opd->dest = BITMASK1(SHR_PC);
3899 opd->imm = (op & 0xff) * 4;
3900 opd->cycles = 8;
3901 end_block = 1; // FIXME
3902 break;
3903 case 0x0700: // MOVA @(disp,PC),R0 11000111dddddddd
3904 opd->op = OP_MOVA;
3905 tmp = pc + 2;
3906 if (op_flags[i] & OF_DELAY_OP) {
3907 if (ops[i-1].op == OP_BRANCH)
3908 tmp = ops[i-1].imm;
3909 else
3910 tmp = 0;
3911 }
3912 opd->dest = BITMASK1(SHR_R0);
3913 if (tmp)
3914 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
3915 break;
3916 case 0x0800: // TST #imm,R0 11001000iiiiiiii
3917 opd->source = BITMASK1(SHR_R0);
3918 opd->dest = BITMASK1(SHR_T);
3919 opd->imm = op & 0xff;
3920 break;
3921 case 0x0900: // AND #imm,R0 11001001iiiiiiii
3922 opd->source = opd->dest = BITMASK1(SHR_R0);
3923 opd->imm = op & 0xff;
3924 break;
3925 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
3926 opd->source = opd->dest = BITMASK1(SHR_R0);
3927 opd->imm = op & 0xff;
3928 break;
3929 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
3930 opd->source = opd->dest = BITMASK1(SHR_R0);
3931 opd->imm = op & 0xff;
3932 break;
3933 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
3934 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3935 opd->dest = BITMASK1(SHR_T);
3936 opd->imm = op & 0xff;
3937 opd->cycles = 3;
3938 break;
3939 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
3940 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
3941 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
3942 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3943 opd->imm = op & 0xff;
3944 opd->cycles = 3;
3945 break;
3946 default:
3947 goto undefined;
3948 }
3949 break;
3950
3951 /////////////////////////////////////////////
3952 case 0x0d:
3953 // MOV.L @(disp,PC),Rn 1101nnnndddddddd
3954 opd->op = OP_LOAD_POOL;
3955 tmp = pc + 2;
3956 if (op_flags[i] & OF_DELAY_OP) {
3957 if (ops[i-1].op == OP_BRANCH)
3958 tmp = ops[i-1].imm;
3959 else
3960 tmp = 0;
3961 }
3962 opd->source = BITMASK1(SHR_PC);
3963 opd->dest = BITMASK1(GET_Rn());
3964 if (tmp)
3965 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
3966 opd->size = 2;
3967 break;
3968
3969 /////////////////////////////////////////////
3970 case 0x0e:
3971 // MOV #imm,Rn 1110nnnniiiiiiii
3972 opd->dest = BITMASK1(GET_Rn());
3973 opd->imm = (u32)(signed int)(signed char)op;
3974 break;
3975
3976 default:
3977 undefined:
3978 elprintf(EL_ANOMALY, "%csh2 drc: unhandled op %04x @ %08x",
3979 is_slave ? 's' : 'm', op, pc);
3980 break;
3981 }
3982 }
3983 i_end = i;
3984 end_pc = pc;
3985
3986 // 2nd pass: some analysis
3987 for (i = 0; i < i_end; i++) {
3988 opd = &ops[i];
3989
3990 // propagate T (TODO: DIV0U)
3991 if ((opd->op == OP_SETCLRT && !opd->imm) || opd->op == OP_BRANCH_CT)
3992 op_flags[i + 1] |= OF_T_CLEAR;
3993 else if ((opd->op == OP_SETCLRT && opd->imm) || opd->op == OP_BRANCH_CF)
3994 op_flags[i + 1] |= OF_T_SET;
3995
3996 if ((op_flags[i] & OF_BTARGET) || (opd->dest & BITMASK1(SHR_T)))
3997 op_flags[i] &= ~(OF_T_SET | OF_T_CLEAR);
3998 else
3999 op_flags[i + 1] |= op_flags[i] & (OF_T_SET | OF_T_CLEAR);
4000
4001 if ((opd->op == OP_BRANCH_CT && (op_flags[i] & OF_T_SET))
4002 || (opd->op == OP_BRANCH_CF && (op_flags[i] & OF_T_CLEAR)))
4003 {
4004 opd->op = OP_BRANCH;
4005 opd->cycles = 3;
4006 i_end = i + 1;
4007 if (op_flags[i + 1] & OF_DELAY_OP) {
4008 opd->cycles = 2;
4009 i_end++;
4010 }
4011 }
4012 else if (opd->op == OP_LOAD_POOL)
4013 {
4014 if (opd->imm < end_pc + MAX_LITERAL_OFFSET) {
4015 if (end_literals < opd->imm + opd->size * 2)
4016 end_literals = opd->imm + opd->size * 2;
4017 }
4018 }
4019 }
4020 end_pc = base_pc + i_end * 2;
4021 if (end_literals < end_pc)
4022 end_literals = end_pc;
4023
4024 *end_pc_out = end_pc;
4025 if (end_literals_out != NULL)
4026 *end_literals_out = end_literals;
4027}
4028
4029// vim:shiftwidth=2:ts=2:expandtab