svp: convert asm to ual / apple gas
[picodrive.git] / cpu / sh2 / compiler.c
... / ...
CommitLineData
1/*
2 * SH2 recompiler
3 * (C) notaz, 2009,2010,2013
4 *
5 * This work is licensed under the terms of MAME license.
6 * See COPYING file in the top-level directory.
7 *
8 * notes:
9 * - tcache, block descriptor, link buffer overflows result in sh2_translate()
10 * failure, followed by full tcache invalidation for that region
11 * - jumps between blocks are tracked for SMC handling (in block_entry->links),
12 * except jumps between different tcaches
13 *
14 * implemented:
15 * - static register allocation
16 * - remaining register caching and tracking in temporaries
17 * - block-local branch linking
18 * - block linking (except between tcaches)
19 * - some constant propagation
20 *
21 * TODO:
22 * - better constant propagation
23 * - stack caching?
24 * - bug fixing
25 */
26#include <stddef.h>
27#include <stdio.h>
28#include <stdlib.h>
29#include <assert.h>
30
31#include "../../pico/pico_int.h"
32#include "sh2.h"
33#include "compiler.h"
34#include "../drc/cmn.h"
35#include "../debug.h"
36
37// features
38#define PROPAGATE_CONSTANTS 1
39#define LINK_BRANCHES 1
40
41// limits (per block)
42#define MAX_BLOCK_SIZE (BLOCK_INSN_LIMIT * 6 * 6)
43
44// max literal offset from the block end
45#define MAX_LITERAL_OFFSET 32*2
46#define MAX_LITERALS (BLOCK_INSN_LIMIT / 4)
47#define MAX_LOCAL_BRANCHES 32
48
49// debug stuff
50// 1 - warnings/errors
51// 2 - block info/smc
52// 4 - asm
53// 8 - runtime block entry log
54// {
55#ifndef DRC_DEBUG
56#define DRC_DEBUG 0
57#endif
58
59#if DRC_DEBUG
60#define dbg(l,...) { \
61 if ((l) & DRC_DEBUG) \
62 elprintf(EL_STATUS, ##__VA_ARGS__); \
63}
64#include "mame/sh2dasm.h"
65#include <platform/libpicofe/linux/host_dasm.h>
66static int insns_compiled, hash_collisions, host_insn_count;
67#define COUNT_OP \
68 host_insn_count++
69#else // !DRC_DEBUG
70#define COUNT_OP
71#define dbg(...)
72#endif
73
74///
75#define FETCH_OP(pc) \
76 dr_pc_base[(pc) / 2]
77
78#define FETCH32(a) \
79 ((dr_pc_base[(a) / 2] << 16) | dr_pc_base[(a) / 2 + 1])
80
81#define CHECK_UNHANDLED_BITS(mask, label) { \
82 if ((op & (mask)) != 0) \
83 goto label; \
84}
85
86#define GET_Fx() \
87 ((op >> 4) & 0x0f)
88
89#define GET_Rm GET_Fx
90
91#define GET_Rn() \
92 ((op >> 8) & 0x0f)
93
94#define BITMASK1(v0) (1 << (v0))
95#define BITMASK2(v0,v1) ((1 << (v0)) | (1 << (v1)))
96#define BITMASK3(v0,v1,v2) (BITMASK2(v0,v1) | (1 << (v2)))
97#define BITMASK4(v0,v1,v2,v3) (BITMASK3(v0,v1,v2) | (1 << (v3)))
98#define BITMASK5(v0,v1,v2,v3,v4) (BITMASK4(v0,v1,v2,v3) | (1 << (v4)))
99
100#define SHR_T SHR_SR // might make them separate someday
101
102static struct op_data {
103 u8 op;
104 u8 cycles;
105 u8 size; // 0, 1, 2 - byte, word, long
106 s8 rm; // branch or load/store data reg
107 u32 source; // bitmask of src regs
108 u32 dest; // bitmask of dest regs
109 u32 imm; // immediate/io address/branch target
110 // (for literal - address, not value)
111} ops[BLOCK_INSN_LIMIT];
112
113enum op_types {
114 OP_UNHANDLED = 0,
115 OP_BRANCH,
116 OP_BRANCH_CT, // conditional, branch if T set
117 OP_BRANCH_CF, // conditional, branch if T clear
118 OP_BRANCH_R, // indirect
119 OP_BRANCH_RF, // indirect far (PC + Rm)
120 OP_SETCLRT, // T flag set/clear
121 OP_MOVE, // register move
122 OP_LOAD_POOL, // literal pool load, imm is address
123 OP_MOVA,
124 OP_SLEEP,
125 OP_RTE,
126};
127
128#ifdef DRC_SH2
129
130static int literal_disabled_frames;
131
132#if (DRC_DEBUG & 4)
133static u8 *tcache_dsm_ptrs[3];
134static char sh2dasm_buff[64];
135#define do_host_disasm(tcid) \
136 host_dasm(tcache_dsm_ptrs[tcid], tcache_ptr - tcache_dsm_ptrs[tcid]); \
137 tcache_dsm_ptrs[tcid] = tcache_ptr
138#else
139#define do_host_disasm(x)
140#endif
141
142#if (DRC_DEBUG & 8) || defined(PDB)
143static void REGPARM(3) *sh2_drc_log_entry(void *block, SH2 *sh2, u32 sr)
144{
145 if (block != NULL) {
146 dbg(8, "= %csh2 enter %08x %p, c=%d", sh2->is_slave ? 's' : 'm',
147 sh2->pc, block, (signed int)sr >> 12);
148 pdb_step(sh2, sh2->pc);
149 }
150 return block;
151}
152#endif
153// } debug
154
155#define TCACHE_BUFFERS 3
156
157// we have 3 translation cache buffers, split from one drc/cmn buffer.
158// BIOS shares tcache with data array because it's only used for init
159// and can be discarded early
160// XXX: need to tune sizes
161static const int tcache_sizes[TCACHE_BUFFERS] = {
162 DRC_TCACHE_SIZE * 6 / 8, // ROM (rarely used), DRAM
163 DRC_TCACHE_SIZE / 8, // BIOS, data array in master sh2
164 DRC_TCACHE_SIZE / 8, // ... slave
165};
166
167static u8 *tcache_bases[TCACHE_BUFFERS];
168static u8 *tcache_ptrs[TCACHE_BUFFERS];
169
170// ptr for code emiters
171static u8 *tcache_ptr;
172
173#define MAX_BLOCK_ENTRIES (BLOCK_INSN_LIMIT / 8)
174
175struct block_link {
176 u32 target_pc;
177 void *jump; // insn address
178 struct block_link *next; // either in block_entry->links or
179};
180
181struct block_entry {
182 u32 pc;
183 void *tcache_ptr; // translated block for above PC
184 struct block_entry *next; // next block in hash_table with same pc hash
185 struct block_link *links; // links to this entry
186#if (DRC_DEBUG & 2)
187 struct block_desc *block;
188#endif
189};
190
191struct block_desc {
192 u32 addr; // block start SH2 PC address
193 u16 size; // ..of recompiled insns+lit. pool
194 u16 size_nolit; // same without literals
195#if (DRC_DEBUG & 2)
196 int refcount;
197#endif
198 int entry_count;
199 struct block_entry entryp[MAX_BLOCK_ENTRIES];
200};
201
202static const int block_max_counts[TCACHE_BUFFERS] = {
203 4*1024,
204 256,
205 256,
206};
207static struct block_desc *block_tables[TCACHE_BUFFERS];
208static int block_counts[TCACHE_BUFFERS];
209
210// we have block_link_pool to avoid using mallocs
211static const int block_link_pool_max_counts[TCACHE_BUFFERS] = {
212 4*1024,
213 256,
214 256,
215};
216static struct block_link *block_link_pool[TCACHE_BUFFERS];
217static int block_link_pool_counts[TCACHE_BUFFERS];
218static struct block_link *unresolved_links[TCACHE_BUFFERS];
219
220// used for invalidation
221static const int ram_sizes[TCACHE_BUFFERS] = {
222 0x40000,
223 0x1000,
224 0x1000,
225};
226#define INVAL_PAGE_SIZE 0x100
227
228struct block_list {
229 struct block_desc *block;
230 struct block_list *next;
231};
232
233// array of pointers to block_lists for RAM and 2 data arrays
234// each array has len: sizeof(mem) / INVAL_PAGE_SIZE
235static struct block_list **inval_lookup[TCACHE_BUFFERS];
236
237static const int hash_table_sizes[TCACHE_BUFFERS] = {
238 0x1000,
239 0x100,
240 0x100,
241};
242static struct block_entry **hash_tables[TCACHE_BUFFERS];
243
244#define HASH_FUNC(hash_tab, addr, mask) \
245 (hash_tab)[(((addr) >> 20) ^ ((addr) >> 2)) & (mask)]
246
247// host register tracking
248enum {
249 HR_FREE,
250 HR_CACHED, // 'val' has sh2_reg_e
251// HR_CONST, // 'val' has a constant
252 HR_TEMP, // reg used for temp storage
253};
254
255enum {
256 HRF_DIRTY = 1 << 0, // reg has "dirty" value to be written to ctx
257 HRF_LOCKED = 1 << 1, // HR_CACHED can't be evicted
258};
259
260typedef struct {
261 u32 hreg:5; // "host" reg
262 u32 greg:5; // "guest" reg
263 u32 type:3;
264 u32 flags:3;
265 u32 stamp:16; // kind of a timestamp
266} temp_reg_t;
267
268// note: reg_temp[] must have at least the amount of
269// registers used by handlers in worst case (currently 4)
270#ifdef __arm__
271#include "../drc/emit_arm.c"
272
273static const int reg_map_g2h[] = {
274 4, 5, 6, 7,
275 8, -1, -1, -1,
276 -1, -1, -1, -1,
277 -1, -1, -1, 9, // r12 .. sp
278 -1, -1, -1, 10, // SHR_PC, SHR_PPC, SHR_PR, SHR_SR,
279 -1, -1, -1, -1, // SHR_GBR, SHR_VBR, SHR_MACH, SHR_MACL,
280};
281
282static temp_reg_t reg_temp[] = {
283 { 0, },
284 { 1, },
285 { 12, },
286 { 14, },
287 { 2, },
288 { 3, },
289};
290
291#elif defined(__i386__)
292#include "../drc/emit_x86.c"
293
294static const int reg_map_g2h[] = {
295 xSI,-1, -1, -1,
296 -1, -1, -1, -1,
297 -1, -1, -1, -1,
298 -1, -1, -1, -1,
299 -1, -1, -1, xDI,
300 -1, -1, -1, -1,
301};
302
303// ax, cx, dx are usually temporaries by convention
304static temp_reg_t reg_temp[] = {
305 { xAX, },
306 { xBX, },
307 { xCX, },
308 { xDX, },
309};
310
311#else
312#error unsupported arch
313#endif
314
315#define T 0x00000001
316#define S 0x00000002
317#define I 0x000000f0
318#define Q 0x00000100
319#define M 0x00000200
320#define T_save 0x00000800
321
322#define I_SHIFT 4
323#define Q_SHIFT 8
324#define M_SHIFT 9
325
326static void REGPARM(1) (*sh2_drc_entry)(SH2 *sh2);
327static void (*sh2_drc_dispatcher)(void);
328static void (*sh2_drc_exit)(void);
329static void (*sh2_drc_test_irq)(void);
330
331static u32 REGPARM(2) (*sh2_drc_read8)(u32 a, SH2 *sh2);
332static u32 REGPARM(2) (*sh2_drc_read16)(u32 a, SH2 *sh2);
333static u32 REGPARM(2) (*sh2_drc_read32)(u32 a, SH2 *sh2);
334static void REGPARM(2) (*sh2_drc_write8)(u32 a, u32 d);
335static void REGPARM(2) (*sh2_drc_write16)(u32 a, u32 d);
336static int REGPARM(3) (*sh2_drc_write32)(u32 a, u32 d, SH2 *sh2);
337
338// address space stuff
339static int dr_ctx_get_mem_ptr(u32 a, u32 *mask)
340{
341 int poffs = -1;
342
343 if ((a & ~0x7ff) == 0) {
344 // BIOS
345 poffs = offsetof(SH2, p_bios);
346 *mask = 0x7ff;
347 }
348 else if ((a & 0xfffff000) == 0xc0000000) {
349 // data array
350 poffs = offsetof(SH2, p_da);
351 *mask = 0xfff;
352 }
353 else if ((a & 0xc6000000) == 0x06000000) {
354 // SDRAM
355 poffs = offsetof(SH2, p_sdram);
356 *mask = 0x03ffff;
357 }
358 else if ((a & 0xc6000000) == 0x02000000) {
359 // ROM
360 poffs = offsetof(SH2, p_rom);
361 *mask = 0x3fffff;
362 }
363
364 return poffs;
365}
366
367static struct block_entry *dr_get_entry(u32 pc, int is_slave, int *tcache_id)
368{
369 struct block_entry *be;
370 u32 tcid = 0, mask;
371
372 // data arrays have their own caches
373 if ((pc & 0xe0000000) == 0xc0000000 || (pc & ~0xfff) == 0)
374 tcid = 1 + is_slave;
375
376 *tcache_id = tcid;
377
378 mask = hash_table_sizes[tcid] - 1;
379 be = HASH_FUNC(hash_tables[tcid], pc, mask);
380 for (; be != NULL; be = be->next)
381 if (be->pc == pc)
382 return be;
383
384 return NULL;
385}
386
387// ---------------------------------------------------------------
388
389// block management
390static void add_to_block_list(struct block_list **blist, struct block_desc *block)
391{
392 struct block_list *added = malloc(sizeof(*added));
393 if (!added) {
394 elprintf(EL_ANOMALY, "drc OOM (1)");
395 return;
396 }
397 added->block = block;
398 added->next = *blist;
399 *blist = added;
400}
401
402static void rm_from_block_list(struct block_list **blist, struct block_desc *block)
403{
404 struct block_list *prev = NULL, *current = *blist;
405 for (; current != NULL; prev = current, current = current->next) {
406 if (current->block == block) {
407 if (prev == NULL)
408 *blist = current->next;
409 else
410 prev->next = current->next;
411 free(current);
412 return;
413 }
414 }
415 dbg(1, "can't rm block %p (%08x-%08x)",
416 block, block->addr, block->addr + block->size);
417}
418
419static void rm_block_list(struct block_list **blist)
420{
421 struct block_list *tmp, *current = *blist;
422 while (current != NULL) {
423 tmp = current;
424 current = current->next;
425 free(tmp);
426 }
427 *blist = NULL;
428}
429
430static void REGPARM(1) flush_tcache(int tcid)
431{
432 int i;
433
434 dbg(1, "tcache #%d flush! (%d/%d, bds %d/%d)", tcid,
435 tcache_ptrs[tcid] - tcache_bases[tcid], tcache_sizes[tcid],
436 block_counts[tcid], block_max_counts[tcid]);
437
438 block_counts[tcid] = 0;
439 block_link_pool_counts[tcid] = 0;
440 unresolved_links[tcid] = NULL;
441 memset(hash_tables[tcid], 0, sizeof(*hash_tables[0]) * hash_table_sizes[tcid]);
442 tcache_ptrs[tcid] = tcache_bases[tcid];
443 if (Pico32xMem != NULL) {
444 if (tcid == 0) // ROM, RAM
445 memset(Pico32xMem->drcblk_ram, 0,
446 sizeof(Pico32xMem->drcblk_ram));
447 else
448 memset(Pico32xMem->drcblk_da[tcid - 1], 0,
449 sizeof(Pico32xMem->drcblk_da[0]));
450 }
451#if (DRC_DEBUG & 4)
452 tcache_dsm_ptrs[tcid] = tcache_bases[tcid];
453#endif
454
455 for (i = 0; i < ram_sizes[tcid] / INVAL_PAGE_SIZE; i++)
456 rm_block_list(&inval_lookup[tcid][i]);
457}
458
459static void add_to_hashlist(struct block_entry *be, int tcache_id)
460{
461 u32 tcmask = hash_table_sizes[tcache_id] - 1;
462
463 be->next = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
464 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be;
465
466#if (DRC_DEBUG & 2)
467 if (be->next != NULL) {
468 printf(" %08x: hash collision with %08x\n",
469 be->pc, be->next->pc);
470 hash_collisions++;
471 }
472#endif
473}
474
475static void rm_from_hashlist(struct block_entry *be, int tcache_id)
476{
477 u32 tcmask = hash_table_sizes[tcache_id] - 1;
478 struct block_entry *cur, *prev;
479
480 cur = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
481 if (cur == NULL)
482 goto missing;
483
484 if (be == cur) { // first
485 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be->next;
486 return;
487 }
488
489 for (prev = cur, cur = cur->next; cur != NULL; cur = cur->next) {
490 if (cur == be) {
491 prev->next = cur->next;
492 return;
493 }
494 }
495
496missing:
497 dbg(1, "rm_from_hashlist: be %p %08x missing?", be, be->pc);
498}
499
500static struct block_desc *dr_add_block(u32 addr, u16 size_lit,
501 u16 size_nolit, int is_slave, int *blk_id)
502{
503 struct block_entry *be;
504 struct block_desc *bd;
505 int tcache_id;
506 int *bcount;
507
508 // do a lookup to get tcache_id and override check
509 be = dr_get_entry(addr, is_slave, &tcache_id);
510 if (be != NULL)
511 dbg(1, "block override for %08x", addr);
512
513 bcount = &block_counts[tcache_id];
514 if (*bcount >= block_max_counts[tcache_id]) {
515 dbg(1, "bd overflow for tcache %d", tcache_id);
516 return NULL;
517 }
518
519 bd = &block_tables[tcache_id][*bcount];
520 bd->addr = addr;
521 bd->size = size_lit;
522 bd->size_nolit = size_nolit;
523
524 bd->entry_count = 1;
525 bd->entryp[0].pc = addr;
526 bd->entryp[0].tcache_ptr = tcache_ptr;
527 bd->entryp[0].links = NULL;
528#if (DRC_DEBUG & 2)
529 bd->entryp[0].block = bd;
530 bd->refcount = 0;
531#endif
532 add_to_hashlist(&bd->entryp[0], tcache_id);
533
534 *blk_id = *bcount;
535 (*bcount)++;
536
537 return bd;
538}
539
540static void REGPARM(3) *dr_lookup_block(u32 pc, int is_slave, int *tcache_id)
541{
542 struct block_entry *be = NULL;
543 void *block = NULL;
544
545 be = dr_get_entry(pc, is_slave, tcache_id);
546 if (be != NULL)
547 block = be->tcache_ptr;
548
549#if (DRC_DEBUG & 2)
550 if (be != NULL)
551 be->block->refcount++;
552#endif
553 return block;
554}
555
556static void *dr_failure(void)
557{
558 lprintf("recompilation failed\n");
559 exit(1);
560}
561
562static void *dr_prepare_ext_branch(u32 pc, int is_slave, int tcache_id)
563{
564#if LINK_BRANCHES
565 struct block_link *bl = block_link_pool[tcache_id];
566 int cnt = block_link_pool_counts[tcache_id];
567 struct block_entry *be = NULL;
568 int target_tcache_id;
569 int i;
570
571 be = dr_get_entry(pc, is_slave, &target_tcache_id);
572 if (target_tcache_id != tcache_id)
573 return sh2_drc_dispatcher;
574
575 // if pool has been freed, reuse
576 for (i = cnt - 1; i >= 0; i--)
577 if (bl[i].target_pc != 0)
578 break;
579 cnt = i + 1;
580 if (cnt >= block_link_pool_max_counts[tcache_id]) {
581 dbg(1, "bl overflow for tcache %d", tcache_id);
582 return NULL;
583 }
584 bl += cnt;
585 block_link_pool_counts[tcache_id]++;
586
587 bl->target_pc = pc;
588 bl->jump = tcache_ptr;
589
590 if (be != NULL) {
591 dbg(2, "- early link from %p to pc %08x", bl->jump, pc);
592 bl->next = be->links;
593 be->links = bl;
594 return be->tcache_ptr;
595 }
596 else {
597 bl->next = unresolved_links[tcache_id];
598 unresolved_links[tcache_id] = bl;
599 return sh2_drc_dispatcher;
600 }
601#else
602 return sh2_drc_dispatcher;
603#endif
604}
605
606static void dr_link_blocks(struct block_entry *be, int tcache_id)
607{
608#if LINK_BRANCHES
609 struct block_link *first = unresolved_links[tcache_id];
610 struct block_link *bl, *prev, *tmp;
611 u32 pc = be->pc;
612
613 for (bl = prev = first; bl != NULL; ) {
614 if (bl->target_pc == pc) {
615 dbg(2, "- link from %p to pc %08x", bl->jump, pc);
616 emith_jump_patch(bl->jump, tcache_ptr);
617
618 // move bl from unresolved_links to block_entry
619 tmp = bl->next;
620 bl->next = be->links;
621 be->links = bl;
622
623 if (bl == first)
624 first = prev = bl = tmp;
625 else
626 prev->next = bl = tmp;
627 continue;
628 }
629 prev = bl;
630 bl = bl->next;
631 }
632 unresolved_links[tcache_id] = first;
633
634 // could sync arm caches here, but that's unnecessary
635#endif
636}
637
638#define ADD_TO_ARRAY(array, count, item, failcode) \
639 if (count >= ARRAY_SIZE(array)) { \
640 dbg(1, "warning: " #array " overflow"); \
641 failcode; \
642 } \
643 array[count++] = item;
644
645static int find_in_array(u32 *array, size_t size, u32 what)
646{
647 size_t i;
648 for (i = 0; i < size; i++)
649 if (what == array[i])
650 return i;
651
652 return -1;
653}
654
655// ---------------------------------------------------------------
656
657// register cache / constant propagation stuff
658typedef enum {
659 RC_GR_READ,
660 RC_GR_WRITE,
661 RC_GR_RMW,
662} rc_gr_mode;
663
664static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking);
665
666// guest regs with constants
667static u32 dr_gcregs[24];
668// a mask of constant/dirty regs
669static u32 dr_gcregs_mask;
670static u32 dr_gcregs_dirty;
671
672#if PROPAGATE_CONSTANTS
673static void gconst_new(sh2_reg_e r, u32 val)
674{
675 int i;
676
677 dr_gcregs_mask |= 1 << r;
678 dr_gcregs_dirty |= 1 << r;
679 dr_gcregs[r] = val;
680
681 // throw away old r that we might have cached
682 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
683 if ((reg_temp[i].type == HR_CACHED) &&
684 reg_temp[i].greg == r) {
685 reg_temp[i].type = HR_FREE;
686 reg_temp[i].flags = 0;
687 }
688 }
689}
690#endif
691
692static int gconst_get(sh2_reg_e r, u32 *val)
693{
694 if (dr_gcregs_mask & (1 << r)) {
695 *val = dr_gcregs[r];
696 return 1;
697 }
698 return 0;
699}
700
701static int gconst_check(sh2_reg_e r)
702{
703 if ((dr_gcregs_mask | dr_gcregs_dirty) & (1 << r))
704 return 1;
705 return 0;
706}
707
708// update hr if dirty, else do nothing
709static int gconst_try_read(int hr, sh2_reg_e r)
710{
711 if (dr_gcregs_dirty & (1 << r)) {
712 emith_move_r_imm(hr, dr_gcregs[r]);
713 dr_gcregs_dirty &= ~(1 << r);
714 return 1;
715 }
716 return 0;
717}
718
719static void gconst_check_evict(sh2_reg_e r)
720{
721 if (dr_gcregs_mask & (1 << r))
722 // no longer cached in reg, make dirty again
723 dr_gcregs_dirty |= 1 << r;
724}
725
726static void gconst_kill(sh2_reg_e r)
727{
728 dr_gcregs_mask &= ~(1 << r);
729 dr_gcregs_dirty &= ~(1 << r);
730}
731
732static void gconst_clean(void)
733{
734 int i;
735
736 for (i = 0; i < ARRAY_SIZE(dr_gcregs); i++)
737 if (dr_gcregs_dirty & (1 << i)) {
738 // using RC_GR_READ here: it will call gconst_try_read,
739 // cache the reg and mark it dirty.
740 rcache_get_reg_(i, RC_GR_READ, 0);
741 }
742}
743
744static void gconst_invalidate(void)
745{
746 dr_gcregs_mask = dr_gcregs_dirty = 0;
747}
748
749static u16 rcache_counter;
750
751static temp_reg_t *rcache_evict(void)
752{
753 // evict reg with oldest stamp
754 int i, oldest = -1;
755 u16 min_stamp = (u16)-1;
756
757 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
758 if (reg_temp[i].type == HR_CACHED && !(reg_temp[i].flags & HRF_LOCKED) &&
759 reg_temp[i].stamp <= min_stamp) {
760 min_stamp = reg_temp[i].stamp;
761 oldest = i;
762 }
763 }
764
765 if (oldest == -1) {
766 printf("no registers to evict, aborting\n");
767 exit(1);
768 }
769
770 i = oldest;
771 if (reg_temp[i].type == HR_CACHED) {
772 if (reg_temp[i].flags & HRF_DIRTY)
773 // writeback
774 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
775 gconst_check_evict(reg_temp[i].greg);
776 }
777
778 reg_temp[i].type = HR_FREE;
779 reg_temp[i].flags = 0;
780 return &reg_temp[i];
781}
782
783static int get_reg_static(sh2_reg_e r, rc_gr_mode mode)
784{
785 int i = reg_map_g2h[r];
786 if (i != -1) {
787 if (mode != RC_GR_WRITE)
788 gconst_try_read(i, r);
789 }
790 return i;
791}
792
793// note: must not be called when doing conditional code
794static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking)
795{
796 temp_reg_t *tr;
797 int i, ret;
798
799 // maybe statically mapped?
800 ret = get_reg_static(r, mode);
801 if (ret != -1)
802 goto end;
803
804 rcache_counter++;
805
806 // maybe already cached?
807 // if so, prefer against gconst (they must be in sync)
808 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
809 if (reg_temp[i].type == HR_CACHED && reg_temp[i].greg == r) {
810 reg_temp[i].stamp = rcache_counter;
811 if (mode != RC_GR_READ)
812 reg_temp[i].flags |= HRF_DIRTY;
813 ret = reg_temp[i].hreg;
814 goto end;
815 }
816 }
817
818 // use any free reg
819 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
820 if (reg_temp[i].type == HR_FREE) {
821 tr = &reg_temp[i];
822 goto do_alloc;
823 }
824 }
825
826 tr = rcache_evict();
827
828do_alloc:
829 tr->type = HR_CACHED;
830 if (do_locking)
831 tr->flags |= HRF_LOCKED;
832 if (mode != RC_GR_READ)
833 tr->flags |= HRF_DIRTY;
834 tr->greg = r;
835 tr->stamp = rcache_counter;
836 ret = tr->hreg;
837
838 if (mode != RC_GR_WRITE) {
839 if (gconst_check(r)) {
840 if (gconst_try_read(ret, r))
841 tr->flags |= HRF_DIRTY;
842 }
843 else
844 emith_ctx_read(tr->hreg, r * 4);
845 }
846
847end:
848 if (mode != RC_GR_READ)
849 gconst_kill(r);
850
851 return ret;
852}
853
854static int rcache_get_reg(sh2_reg_e r, rc_gr_mode mode)
855{
856 return rcache_get_reg_(r, mode, 1);
857}
858
859static int rcache_get_tmp(void)
860{
861 temp_reg_t *tr;
862 int i;
863
864 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
865 if (reg_temp[i].type == HR_FREE) {
866 tr = &reg_temp[i];
867 goto do_alloc;
868 }
869
870 tr = rcache_evict();
871
872do_alloc:
873 tr->type = HR_TEMP;
874 return tr->hreg;
875}
876
877static int rcache_get_arg_id(int arg)
878{
879 int i, r = 0;
880 host_arg2reg(r, arg);
881
882 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
883 if (reg_temp[i].hreg == r)
884 break;
885
886 if (i == ARRAY_SIZE(reg_temp)) // can't happen
887 exit(1);
888
889 if (reg_temp[i].type == HR_CACHED) {
890 // writeback
891 if (reg_temp[i].flags & HRF_DIRTY)
892 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
893 gconst_check_evict(reg_temp[i].greg);
894 }
895 else if (reg_temp[i].type == HR_TEMP) {
896 printf("arg %d reg %d already used, aborting\n", arg, r);
897 exit(1);
898 }
899
900 reg_temp[i].type = HR_FREE;
901 reg_temp[i].flags = 0;
902
903 return i;
904}
905
906// get a reg to be used as function arg
907static int rcache_get_tmp_arg(int arg)
908{
909 int id = rcache_get_arg_id(arg);
910 reg_temp[id].type = HR_TEMP;
911
912 return reg_temp[id].hreg;
913}
914
915// same but caches a reg. RC_GR_READ only.
916static int rcache_get_reg_arg(int arg, sh2_reg_e r)
917{
918 int i, srcr, dstr, dstid;
919 int dirty = 0, src_dirty = 0;
920
921 dstid = rcache_get_arg_id(arg);
922 dstr = reg_temp[dstid].hreg;
923
924 // maybe already statically mapped?
925 srcr = get_reg_static(r, RC_GR_READ);
926 if (srcr != -1)
927 goto do_cache;
928
929 // maybe already cached?
930 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
931 if ((reg_temp[i].type == HR_CACHED) &&
932 reg_temp[i].greg == r)
933 {
934 srcr = reg_temp[i].hreg;
935 if (reg_temp[i].flags & HRF_DIRTY)
936 src_dirty = 1;
937 goto do_cache;
938 }
939 }
940
941 // must read
942 srcr = dstr;
943 if (gconst_check(r)) {
944 if (gconst_try_read(srcr, r))
945 dirty = 1;
946 }
947 else
948 emith_ctx_read(srcr, r * 4);
949
950do_cache:
951 if (dstr != srcr)
952 emith_move_r_r(dstr, srcr);
953#if 1
954 else
955 dirty |= src_dirty;
956
957 if (dirty)
958 // must clean, callers might want to modify the arg before call
959 emith_ctx_write(dstr, r * 4);
960#else
961 if (dirty)
962 reg_temp[dstid].flags |= HRF_DIRTY;
963#endif
964
965 reg_temp[dstid].stamp = ++rcache_counter;
966 reg_temp[dstid].type = HR_CACHED;
967 reg_temp[dstid].greg = r;
968 reg_temp[dstid].flags |= HRF_LOCKED;
969 return dstr;
970}
971
972static void rcache_free_tmp(int hr)
973{
974 int i;
975 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
976 if (reg_temp[i].hreg == hr)
977 break;
978
979 if (i == ARRAY_SIZE(reg_temp) || reg_temp[i].type != HR_TEMP) {
980 printf("rcache_free_tmp fail: #%i hr %d, type %d\n", i, hr, reg_temp[i].type);
981 return;
982 }
983
984 reg_temp[i].type = HR_FREE;
985 reg_temp[i].flags = 0;
986}
987
988static void rcache_unlock(int hr)
989{
990 int i;
991 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
992 if (reg_temp[i].type == HR_CACHED && reg_temp[i].hreg == hr)
993 reg_temp[i].flags &= ~HRF_LOCKED;
994}
995
996static void rcache_unlock_all(void)
997{
998 int i;
999 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1000 reg_temp[i].flags &= ~HRF_LOCKED;
1001}
1002
1003static inline u32 rcache_used_hreg_mask(void)
1004{
1005 u32 mask = 0;
1006 int i;
1007
1008 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1009 if (reg_temp[i].type != HR_FREE)
1010 mask |= 1 << reg_temp[i].hreg;
1011
1012 return mask;
1013}
1014
1015static void rcache_clean(void)
1016{
1017 int i;
1018 gconst_clean();
1019
1020 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1021 if (reg_temp[i].type == HR_CACHED && (reg_temp[i].flags & HRF_DIRTY)) {
1022 // writeback
1023 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
1024 reg_temp[i].flags &= ~HRF_DIRTY;
1025 }
1026}
1027
1028static void rcache_invalidate(void)
1029{
1030 int i;
1031 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
1032 reg_temp[i].type = HR_FREE;
1033 reg_temp[i].flags = 0;
1034 }
1035 rcache_counter = 0;
1036
1037 gconst_invalidate();
1038}
1039
1040static void rcache_flush(void)
1041{
1042 rcache_clean();
1043 rcache_invalidate();
1044}
1045
1046// ---------------------------------------------------------------
1047
1048static int emit_get_rbase_and_offs(u32 a, u32 *offs)
1049{
1050 u32 mask = 0;
1051 int poffs;
1052 int hr;
1053
1054 poffs = dr_ctx_get_mem_ptr(a, &mask);
1055 if (poffs == -1)
1056 return -1;
1057
1058 // XXX: could use some related reg
1059 hr = rcache_get_tmp();
1060 emith_ctx_read(hr, poffs);
1061 emith_add_r_imm(hr, a & mask & ~0xff);
1062 *offs = a & 0xff; // XXX: ARM oriented..
1063 return hr;
1064}
1065
1066static void emit_move_r_imm32(sh2_reg_e dst, u32 imm)
1067{
1068#if PROPAGATE_CONSTANTS
1069 gconst_new(dst, imm);
1070#else
1071 int hr = rcache_get_reg(dst, RC_GR_WRITE);
1072 emith_move_r_imm(hr, imm);
1073#endif
1074}
1075
1076static void emit_move_r_r(sh2_reg_e dst, sh2_reg_e src)
1077{
1078 int hr_d = rcache_get_reg(dst, RC_GR_WRITE);
1079 int hr_s = rcache_get_reg(src, RC_GR_READ);
1080
1081 emith_move_r_r(hr_d, hr_s);
1082}
1083
1084// T must be clear, and comparison done just before this
1085static void emit_or_t_if_eq(int srr)
1086{
1087 EMITH_SJMP_START(DCOND_NE);
1088 emith_or_r_imm_c(DCOND_EQ, srr, T);
1089 EMITH_SJMP_END(DCOND_NE);
1090}
1091
1092// arguments must be ready
1093// reg cache must be clean before call
1094static int emit_memhandler_read_(int size, int ram_check)
1095{
1096 int arg0, arg1;
1097 host_arg2reg(arg0, 0);
1098
1099 rcache_clean();
1100
1101 // must writeback cycles for poll detection stuff
1102 // FIXME: rm
1103 if (reg_map_g2h[SHR_SR] != -1)
1104 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1105
1106 arg1 = rcache_get_tmp_arg(1);
1107 emith_move_r_r(arg1, CONTEXT_REG);
1108
1109#if 0 // can't do this because of unmapped reads
1110 // ndef PDB_NET
1111 if (ram_check && Pico.rom == (void *)0x02000000 && Pico32xMem->sdram == (void *)0x06000000) {
1112 int tmp = rcache_get_tmp();
1113 emith_and_r_r_imm(tmp, arg0, 0xfb000000);
1114 emith_cmp_r_imm(tmp, 0x02000000);
1115 switch (size) {
1116 case 0: // 8
1117 EMITH_SJMP3_START(DCOND_NE);
1118 emith_eor_r_imm_c(DCOND_EQ, arg0, 1);
1119 emith_read8_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1120 EMITH_SJMP3_MID(DCOND_NE);
1121 emith_call_cond(DCOND_NE, sh2_drc_read8);
1122 EMITH_SJMP3_END();
1123 break;
1124 case 1: // 16
1125 EMITH_SJMP3_START(DCOND_NE);
1126 emith_read16_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1127 EMITH_SJMP3_MID(DCOND_NE);
1128 emith_call_cond(DCOND_NE, sh2_drc_read16);
1129 EMITH_SJMP3_END();
1130 break;
1131 case 2: // 32
1132 EMITH_SJMP3_START(DCOND_NE);
1133 emith_read_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1134 emith_ror_c(DCOND_EQ, arg0, arg0, 16);
1135 EMITH_SJMP3_MID(DCOND_NE);
1136 emith_call_cond(DCOND_NE, sh2_drc_read32);
1137 EMITH_SJMP3_END();
1138 break;
1139 }
1140 }
1141 else
1142#endif
1143 {
1144 switch (size) {
1145 case 0: // 8
1146 emith_call(sh2_drc_read8);
1147 break;
1148 case 1: // 16
1149 emith_call(sh2_drc_read16);
1150 break;
1151 case 2: // 32
1152 emith_call(sh2_drc_read32);
1153 break;
1154 }
1155 }
1156 rcache_invalidate();
1157
1158 if (reg_map_g2h[SHR_SR] != -1)
1159 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1160
1161 // assuming arg0 and retval reg matches
1162 return rcache_get_tmp_arg(0);
1163}
1164
1165static int emit_memhandler_read(int size)
1166{
1167 return emit_memhandler_read_(size, 1);
1168}
1169
1170static int emit_memhandler_read_rr(sh2_reg_e rd, sh2_reg_e rs, u32 offs, int size)
1171{
1172 int hr, hr2, ram_check = 1;
1173 u32 val, offs2;
1174
1175 if (gconst_get(rs, &val)) {
1176 hr = emit_get_rbase_and_offs(val + offs, &offs2);
1177 if (hr != -1) {
1178 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1179 switch (size) {
1180 case 0: // 8
1181 emith_read8_r_r_offs(hr2, hr, offs2 ^ 1);
1182 emith_sext(hr2, hr2, 8);
1183 break;
1184 case 1: // 16
1185 emith_read16_r_r_offs(hr2, hr, offs2);
1186 emith_sext(hr2, hr2, 16);
1187 break;
1188 case 2: // 32
1189 emith_read_r_r_offs(hr2, hr, offs2);
1190 emith_ror(hr2, hr2, 16);
1191 break;
1192 }
1193 rcache_free_tmp(hr);
1194 return hr2;
1195 }
1196
1197 ram_check = 0;
1198 }
1199
1200 hr = rcache_get_reg_arg(0, rs);
1201 if (offs != 0)
1202 emith_add_r_imm(hr, offs);
1203 hr = emit_memhandler_read_(size, ram_check);
1204 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1205 if (size != 2) {
1206 emith_sext(hr2, hr, (size == 1) ? 16 : 8);
1207 } else
1208 emith_move_r_r(hr2, hr);
1209 rcache_free_tmp(hr);
1210
1211 return hr2;
1212}
1213
1214static void emit_memhandler_write(int size, u32 pc)
1215{
1216 int ctxr;
1217 host_arg2reg(ctxr, 2);
1218 if (reg_map_g2h[SHR_SR] != -1)
1219 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1220
1221 rcache_clean();
1222
1223 switch (size) {
1224 case 0: // 8
1225 // XXX: consider inlining sh2_drc_write8
1226 emith_call(sh2_drc_write8);
1227 break;
1228 case 1: // 16
1229 emith_call(sh2_drc_write16);
1230 break;
1231 case 2: // 32
1232 emith_move_r_r(ctxr, CONTEXT_REG);
1233 emith_call(sh2_drc_write32);
1234 break;
1235 }
1236
1237 rcache_invalidate();
1238 if (reg_map_g2h[SHR_SR] != -1)
1239 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1240}
1241
1242// @(Rx,Ry)
1243static int emit_indirect_indexed_read(int rx, int ry, int size)
1244{
1245 int a0, t;
1246 a0 = rcache_get_reg_arg(0, rx);
1247 t = rcache_get_reg(ry, RC_GR_READ);
1248 emith_add_r_r(a0, t);
1249 return emit_memhandler_read(size);
1250}
1251
1252// read @Rn, @rm
1253static void emit_indirect_read_double(u32 *rnr, u32 *rmr, int rn, int rm, int size)
1254{
1255 int tmp;
1256
1257 rcache_get_reg_arg(0, rn);
1258 tmp = emit_memhandler_read(size);
1259 emith_ctx_write(tmp, offsetof(SH2, drc_tmp));
1260 rcache_free_tmp(tmp);
1261 tmp = rcache_get_reg(rn, RC_GR_RMW);
1262 emith_add_r_imm(tmp, 1 << size);
1263 rcache_unlock(tmp);
1264
1265 rcache_get_reg_arg(0, rm);
1266 *rmr = emit_memhandler_read(size);
1267 *rnr = rcache_get_tmp();
1268 emith_ctx_read(*rnr, offsetof(SH2, drc_tmp));
1269 tmp = rcache_get_reg(rm, RC_GR_RMW);
1270 emith_add_r_imm(tmp, 1 << size);
1271 rcache_unlock(tmp);
1272}
1273
1274static void emit_do_static_regs(int is_write, int tmpr)
1275{
1276 int i, r, count;
1277
1278 for (i = 0; i < ARRAY_SIZE(reg_map_g2h); i++) {
1279 r = reg_map_g2h[i];
1280 if (r == -1)
1281 continue;
1282
1283 for (count = 1; i < ARRAY_SIZE(reg_map_g2h) - 1; i++, r++) {
1284 if (reg_map_g2h[i + 1] != r + 1)
1285 break;
1286 count++;
1287 }
1288
1289 if (count > 1) {
1290 // i, r point to last item
1291 if (is_write)
1292 emith_ctx_write_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1293 else
1294 emith_ctx_read_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1295 } else {
1296 if (is_write)
1297 emith_ctx_write(r, i * 4);
1298 else
1299 emith_ctx_read(r, i * 4);
1300 }
1301 }
1302}
1303
1304static void emit_block_entry(void)
1305{
1306 int arg0;
1307
1308 host_arg2reg(arg0, 0);
1309
1310#if (DRC_DEBUG & 8) || defined(PDB)
1311 int arg1, arg2;
1312 host_arg2reg(arg1, 1);
1313 host_arg2reg(arg2, 2);
1314
1315 emit_do_static_regs(1, arg2);
1316 emith_move_r_r(arg1, CONTEXT_REG);
1317 emith_move_r_r(arg2, rcache_get_reg(SHR_SR, RC_GR_READ));
1318 emith_call(sh2_drc_log_entry);
1319 rcache_invalidate();
1320#endif
1321 emith_tst_r_r(arg0, arg0);
1322 EMITH_SJMP_START(DCOND_EQ);
1323 emith_jump_reg_c(DCOND_NE, arg0);
1324 EMITH_SJMP_END(DCOND_EQ);
1325}
1326
1327#define DELAY_SAVE_T(sr) { \
1328 emith_bic_r_imm(sr, T_save); \
1329 emith_tst_r_imm(sr, T); \
1330 EMITH_SJMP_START(DCOND_EQ); \
1331 emith_or_r_imm_c(DCOND_NE, sr, T_save); \
1332 EMITH_SJMP_END(DCOND_EQ); \
1333}
1334
1335#define FLUSH_CYCLES(sr) \
1336 if (cycles > 0) { \
1337 emith_sub_r_imm(sr, cycles << 12); \
1338 cycles = 0; \
1339 }
1340
1341static void *dr_get_pc_base(u32 pc, int is_slave);
1342
1343static void REGPARM(2) *sh2_translate(SH2 *sh2, int tcache_id)
1344{
1345 u32 branch_target_pc[MAX_LOCAL_BRANCHES];
1346 void *branch_target_ptr[MAX_LOCAL_BRANCHES];
1347 int branch_target_count = 0;
1348 void *branch_patch_ptr[MAX_LOCAL_BRANCHES];
1349 u32 branch_patch_pc[MAX_LOCAL_BRANCHES];
1350 int branch_patch_count = 0;
1351 u32 literal_addr[MAX_LITERALS];
1352 int literal_addr_count = 0;
1353 u8 op_flags[BLOCK_INSN_LIMIT];
1354 struct {
1355 u32 test_irq:1;
1356 u32 pending_branch_direct:1;
1357 u32 pending_branch_indirect:1;
1358 u32 literals_disabled:1;
1359 } drcf = { 0, };
1360
1361 // PC of current, first, last SH2 insn
1362 u32 pc, base_pc, end_pc;
1363 u32 end_literals;
1364 void *block_entry_ptr;
1365 struct block_desc *block;
1366 u16 *dr_pc_base;
1367 struct op_data *opd;
1368 int blkid_main = 0;
1369 int skip_op = 0;
1370 u32 tmp, tmp2;
1371 int cycles;
1372 int i, v;
1373 int op;
1374
1375 base_pc = sh2->pc;
1376 drcf.literals_disabled = literal_disabled_frames != 0;
1377
1378 // get base/validate PC
1379 dr_pc_base = dr_get_pc_base(base_pc, sh2->is_slave);
1380 if (dr_pc_base == (void *)-1) {
1381 printf("invalid PC, aborting: %08x\n", base_pc);
1382 // FIXME: be less destructive
1383 exit(1);
1384 }
1385
1386 tcache_ptr = tcache_ptrs[tcache_id];
1387
1388 // predict tcache overflow
1389 tmp = tcache_ptr - tcache_bases[tcache_id];
1390 if (tmp > tcache_sizes[tcache_id] - MAX_BLOCK_SIZE) {
1391 dbg(1, "tcache %d overflow", tcache_id);
1392 return NULL;
1393 }
1394
1395 // initial passes to disassemble and analyze the block
1396 scan_block(base_pc, sh2->is_slave, op_flags, &end_pc, &end_literals);
1397
1398 if (drcf.literals_disabled)
1399 end_literals = end_pc;
1400
1401 block = dr_add_block(base_pc, end_literals - base_pc,
1402 end_pc - base_pc, sh2->is_slave, &blkid_main);
1403 if (block == NULL)
1404 return NULL;
1405
1406 block_entry_ptr = tcache_ptr;
1407 dbg(2, "== %csh2 block #%d,%d %08x-%08x -> %p", sh2->is_slave ? 's' : 'm',
1408 tcache_id, blkid_main, base_pc, end_pc, block_entry_ptr);
1409
1410 dr_link_blocks(&block->entryp[0], tcache_id);
1411
1412 // collect branch_targets that don't land on delay slots
1413 for (pc = base_pc, i = 0; pc < end_pc; i++, pc += 2) {
1414 if (!(op_flags[i] & OF_BTARGET))
1415 continue;
1416 if (op_flags[i] & OF_DELAY_OP) {
1417 op_flags[i] &= ~OF_BTARGET;
1418 continue;
1419 }
1420 ADD_TO_ARRAY(branch_target_pc, branch_target_count, pc, break);
1421 }
1422
1423 if (branch_target_count > 0) {
1424 memset(branch_target_ptr, 0, sizeof(branch_target_ptr[0]) * branch_target_count);
1425 }
1426
1427 // clear stale state after compile errors
1428 rcache_invalidate();
1429
1430 // -------------------------------------------------
1431 // 3rd pass: actual compilation
1432 pc = base_pc;
1433 cycles = 0;
1434 for (i = 0; pc < end_pc; i++)
1435 {
1436 u32 delay_dep_fw = 0, delay_dep_bk = 0;
1437 u32 tmp3, tmp4, sr;
1438
1439 opd = &ops[i];
1440 op = FETCH_OP(pc);
1441
1442#if (DRC_DEBUG & 2)
1443 insns_compiled++;
1444#endif
1445#if (DRC_DEBUG & 4)
1446 DasmSH2(sh2dasm_buff, pc, op);
1447 printf("%c%08x %04x %s\n", (op_flags[i] & OF_BTARGET) ? '*' : ' ',
1448 pc, op, sh2dasm_buff);
1449#endif
1450
1451 if ((op_flags[i] & OF_BTARGET) || pc == base_pc)
1452 {
1453 if (pc != base_pc)
1454 {
1455 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1456 FLUSH_CYCLES(sr);
1457 rcache_flush();
1458
1459 // make block entry
1460 v = block->entry_count;
1461 if (v < ARRAY_SIZE(block->entryp)) {
1462 block->entryp[v].pc = pc;
1463 block->entryp[v].tcache_ptr = tcache_ptr;
1464 block->entryp[v].links = NULL;
1465#if (DRC_DEBUG & 2)
1466 block->entryp[v].block = block;
1467#endif
1468 add_to_hashlist(&block->entryp[v], tcache_id);
1469 block->entry_count++;
1470
1471 dbg(2, "-- %csh2 block #%d,%d entry %08x -> %p",
1472 sh2->is_slave ? 's' : 'm', tcache_id, blkid_main,
1473 pc, tcache_ptr);
1474
1475 // since we made a block entry, link any other blocks
1476 // that jump to current pc
1477 dr_link_blocks(&block->entryp[v], tcache_id);
1478 }
1479 else {
1480 dbg(1, "too many entryp for block #%d,%d pc=%08x",
1481 tcache_id, blkid_main, pc);
1482 }
1483
1484 do_host_disasm(tcache_id);
1485 }
1486
1487 v = find_in_array(branch_target_pc, branch_target_count, pc);
1488 if (v >= 0)
1489 branch_target_ptr[v] = tcache_ptr;
1490
1491 // must update PC
1492 emit_move_r_imm32(SHR_PC, pc);
1493 rcache_clean();
1494
1495 // check cycles
1496 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1497 emith_cmp_r_imm(sr, 0);
1498 emith_jump_cond(DCOND_LE, sh2_drc_exit);
1499 do_host_disasm(tcache_id);
1500 rcache_unlock_all();
1501 }
1502
1503#ifdef DRC_CMP
1504 if (!(op_flags[i] & OF_DELAY_OP)) {
1505 emit_move_r_imm32(SHR_PC, pc);
1506 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1507 FLUSH_CYCLES(sr);
1508 rcache_clean();
1509
1510 tmp = rcache_used_hreg_mask();
1511 emith_save_caller_regs(tmp);
1512 emit_do_static_regs(1, 0);
1513 emith_pass_arg_r(0, CONTEXT_REG);
1514 emith_call(do_sh2_cmp);
1515 emith_restore_caller_regs(tmp);
1516 }
1517#endif
1518
1519 pc += 2;
1520
1521 if (skip_op > 0) {
1522 skip_op--;
1523 continue;
1524 }
1525
1526 if (op_flags[i] & OF_DELAY_OP)
1527 {
1528 // handle delay slot dependencies
1529 delay_dep_fw = opd->dest & ops[i-1].source;
1530 delay_dep_bk = opd->source & ops[i-1].dest;
1531 if (delay_dep_fw & BITMASK1(SHR_T)) {
1532 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1533 DELAY_SAVE_T(sr);
1534 }
1535 if (delay_dep_bk & BITMASK1(SHR_PC)) {
1536 if (opd->op != OP_LOAD_POOL && opd->op != OP_MOVA) {
1537 // can only be those 2 really..
1538 elprintf(EL_ANOMALY, "%csh2 drc: illegal slot insn %04x @ %08x?",
1539 sh2->is_slave ? 's' : 'm', op, pc - 2);
1540 }
1541 if (opd->imm != 0)
1542 ; // addr already resolved somehow
1543 else {
1544 switch (ops[i-1].op) {
1545 case OP_BRANCH:
1546 emit_move_r_imm32(SHR_PC, ops[i-1].imm);
1547 break;
1548 case OP_BRANCH_CT:
1549 case OP_BRANCH_CF:
1550 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1551 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1552 emith_move_r_imm(tmp, pc);
1553 emith_tst_r_imm(sr, T);
1554 tmp2 = ops[i-1].op == OP_BRANCH_CT ? DCOND_NE : DCOND_EQ;
1555 emith_move_r_imm_c(tmp2, tmp, ops[i-1].imm);
1556 break;
1557 // case OP_BRANCH_R OP_BRANCH_RF - PC already loaded
1558 }
1559 }
1560 }
1561 //if (delay_dep_fw & ~BITMASK1(SHR_T))
1562 // dbg(1, "unhandled delay_dep_fw: %x", delay_dep_fw & ~BITMASK1(SHR_T));
1563 if (delay_dep_bk & ~BITMASK2(SHR_PC, SHR_PR))
1564 dbg(1, "unhandled delay_dep_bk: %x", delay_dep_bk);
1565 }
1566
1567 switch (opd->op)
1568 {
1569 case OP_BRANCH:
1570 case OP_BRANCH_CT:
1571 case OP_BRANCH_CF:
1572 if (opd->dest & BITMASK1(SHR_PR))
1573 emit_move_r_imm32(SHR_PR, pc + 2);
1574 drcf.pending_branch_direct = 1;
1575 goto end_op;
1576
1577 case OP_BRANCH_R:
1578 if (opd->dest & BITMASK1(SHR_PR))
1579 emit_move_r_imm32(SHR_PR, pc + 2);
1580 emit_move_r_r(SHR_PC, opd->rm);
1581 drcf.pending_branch_indirect = 1;
1582 goto end_op;
1583
1584 case OP_BRANCH_RF:
1585 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1586 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1587 if (opd->dest & BITMASK1(SHR_PR)) {
1588 tmp3 = rcache_get_reg(SHR_PR, RC_GR_WRITE);
1589 emith_move_r_imm(tmp3, pc + 2);
1590 emith_add_r_r_r(tmp, tmp2, tmp3);
1591 }
1592 else {
1593 emith_move_r_r(tmp, tmp2);
1594 emith_add_r_imm(tmp, pc + 2);
1595 }
1596 drcf.pending_branch_indirect = 1;
1597 goto end_op;
1598
1599 case OP_SLEEP:
1600 printf("TODO sleep\n");
1601 goto end_op;
1602
1603 case OP_RTE:
1604 // pop PC
1605 emit_memhandler_read_rr(SHR_PC, SHR_SP, 0, 2);
1606 // pop SR
1607 tmp = rcache_get_reg_arg(0, SHR_SP);
1608 emith_add_r_imm(tmp, 4);
1609 tmp = emit_memhandler_read(2);
1610 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1611 emith_write_sr(sr, tmp);
1612 rcache_free_tmp(tmp);
1613 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
1614 emith_add_r_imm(tmp, 4*2);
1615 drcf.test_irq = 1;
1616 drcf.pending_branch_indirect = 1;
1617 goto end_op;
1618
1619 case OP_LOAD_POOL:
1620#if PROPAGATE_CONSTANTS
1621 if (opd->imm != 0 && opd->imm < end_literals
1622 && literal_addr_count < MAX_LITERALS)
1623 {
1624 ADD_TO_ARRAY(literal_addr, literal_addr_count, opd->imm,);
1625 if (opd->size == 2)
1626 tmp = FETCH32(opd->imm);
1627 else
1628 tmp = (u32)(int)(signed short)FETCH_OP(opd->imm);
1629 gconst_new(GET_Rn(), tmp);
1630 }
1631 else
1632#endif
1633 {
1634 tmp = rcache_get_tmp_arg(0);
1635 if (opd->imm != 0)
1636 emith_move_r_imm(tmp, opd->imm);
1637 else {
1638 // have to calculate read addr from PC
1639 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1640 if (opd->size == 2) {
1641 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1642 emith_bic_r_imm(tmp, 3);
1643 }
1644 else
1645 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 2);
1646 }
1647 tmp2 = emit_memhandler_read(opd->size);
1648 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1649 if (opd->size == 2)
1650 emith_move_r_r(tmp3, tmp2);
1651 else
1652 emith_sext(tmp3, tmp2, 16);
1653 rcache_free_tmp(tmp2);
1654 }
1655 goto end_op;
1656
1657 case OP_MOVA:
1658 if (opd->imm != 0)
1659 emit_move_r_imm32(SHR_R0, opd->imm);
1660 else {
1661 tmp = rcache_get_reg(SHR_R0, RC_GR_WRITE);
1662 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1663 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1664 emith_bic_r_imm(tmp, 3);
1665 }
1666 goto end_op;
1667 }
1668
1669 switch ((op >> 12) & 0x0f)
1670 {
1671 /////////////////////////////////////////////
1672 case 0x00:
1673 switch (op & 0x0f)
1674 {
1675 case 0x02:
1676 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1677 switch (GET_Fx())
1678 {
1679 case 0: // STC SR,Rn 0000nnnn00000010
1680 tmp2 = SHR_SR;
1681 break;
1682 case 1: // STC GBR,Rn 0000nnnn00010010
1683 tmp2 = SHR_GBR;
1684 break;
1685 case 2: // STC VBR,Rn 0000nnnn00100010
1686 tmp2 = SHR_VBR;
1687 break;
1688 default:
1689 goto default_;
1690 }
1691 tmp3 = rcache_get_reg(tmp2, RC_GR_READ);
1692 emith_move_r_r(tmp, tmp3);
1693 if (tmp2 == SHR_SR)
1694 emith_clear_msb(tmp, tmp, 22); // reserved bits defined by ISA as 0
1695 goto end_op;
1696 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
1697 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
1698 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
1699 rcache_clean();
1700 tmp = rcache_get_reg_arg(1, GET_Rm());
1701 tmp2 = rcache_get_reg_arg(0, SHR_R0);
1702 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1703 emith_add_r_r(tmp2, tmp3);
1704 emit_memhandler_write(op & 3, pc);
1705 goto end_op;
1706 case 0x07:
1707 // MUL.L Rm,Rn 0000nnnnmmmm0111
1708 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
1709 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1710 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1711 emith_mul(tmp3, tmp2, tmp);
1712 goto end_op;
1713 case 0x08:
1714 switch (GET_Fx())
1715 {
1716 case 0: // CLRT 0000000000001000
1717 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1718 emith_bic_r_imm(sr, T);
1719 break;
1720 case 1: // SETT 0000000000011000
1721 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1722 emith_or_r_imm(sr, T);
1723 break;
1724 case 2: // CLRMAC 0000000000101000
1725 emit_move_r_imm32(SHR_MACL, 0);
1726 emit_move_r_imm32(SHR_MACH, 0);
1727 break;
1728 default:
1729 goto default_;
1730 }
1731 goto end_op;
1732 case 0x09:
1733 switch (GET_Fx())
1734 {
1735 case 0: // NOP 0000000000001001
1736 break;
1737 case 1: // DIV0U 0000000000011001
1738 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1739 emith_bic_r_imm(sr, M|Q|T);
1740 break;
1741 case 2: // MOVT Rn 0000nnnn00101001
1742 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1743 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1744 emith_clear_msb(tmp2, sr, 31);
1745 break;
1746 default:
1747 goto default_;
1748 }
1749 goto end_op;
1750 case 0x0a:
1751 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1752 switch (GET_Fx())
1753 {
1754 case 0: // STS MACH,Rn 0000nnnn00001010
1755 tmp2 = SHR_MACH;
1756 break;
1757 case 1: // STS MACL,Rn 0000nnnn00011010
1758 tmp2 = SHR_MACL;
1759 break;
1760 case 2: // STS PR,Rn 0000nnnn00101010
1761 tmp2 = SHR_PR;
1762 break;
1763 default:
1764 goto default_;
1765 }
1766 tmp2 = rcache_get_reg(tmp2, RC_GR_READ);
1767 emith_move_r_r(tmp, tmp2);
1768 goto end_op;
1769 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
1770 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
1771 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
1772 tmp = emit_indirect_indexed_read(SHR_R0, GET_Rm(), op & 3);
1773 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1774 if ((op & 3) != 2) {
1775 emith_sext(tmp2, tmp, (op & 1) ? 16 : 8);
1776 } else
1777 emith_move_r_r(tmp2, tmp);
1778 rcache_free_tmp(tmp);
1779 goto end_op;
1780 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
1781 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 2);
1782 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
1783 /* MS 16 MAC bits unused if saturated */
1784 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1785 emith_tst_r_imm(sr, S);
1786 EMITH_SJMP_START(DCOND_EQ);
1787 emith_clear_msb_c(DCOND_NE, tmp4, tmp4, 16);
1788 EMITH_SJMP_END(DCOND_EQ);
1789 rcache_unlock(sr);
1790 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW); // might evict SR
1791 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
1792 rcache_free_tmp(tmp2);
1793 sr = rcache_get_reg(SHR_SR, RC_GR_READ); // reget just in case
1794 emith_tst_r_imm(sr, S);
1795
1796 EMITH_JMP_START(DCOND_EQ);
1797 emith_asr(tmp, tmp4, 15);
1798 emith_cmp_r_imm(tmp, -1); // negative overflow (0x80000000..0xffff7fff)
1799 EMITH_SJMP_START(DCOND_GE);
1800 emith_move_r_imm_c(DCOND_LT, tmp4, 0x8000);
1801 emith_move_r_imm_c(DCOND_LT, tmp3, 0x0000);
1802 EMITH_SJMP_END(DCOND_GE);
1803 emith_cmp_r_imm(tmp, 0); // positive overflow (0x00008000..0x7fffffff)
1804 EMITH_SJMP_START(DCOND_LE);
1805 emith_move_r_imm_c(DCOND_GT, tmp4, 0x00007fff);
1806 emith_move_r_imm_c(DCOND_GT, tmp3, 0xffffffff);
1807 EMITH_SJMP_END(DCOND_LE);
1808 EMITH_JMP_END(DCOND_EQ);
1809
1810 rcache_free_tmp(tmp);
1811 goto end_op;
1812 }
1813 goto default_;
1814
1815 /////////////////////////////////////////////
1816 case 0x01:
1817 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
1818 rcache_clean();
1819 tmp = rcache_get_reg_arg(0, GET_Rn());
1820 tmp2 = rcache_get_reg_arg(1, GET_Rm());
1821 if (op & 0x0f)
1822 emith_add_r_imm(tmp, (op & 0x0f) * 4);
1823 emit_memhandler_write(2, pc);
1824 goto end_op;
1825
1826 case 0x02:
1827 switch (op & 0x0f)
1828 {
1829 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
1830 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
1831 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
1832 rcache_clean();
1833 rcache_get_reg_arg(0, GET_Rn());
1834 rcache_get_reg_arg(1, GET_Rm());
1835 emit_memhandler_write(op & 3, pc);
1836 goto end_op;
1837 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
1838 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
1839 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
1840 rcache_get_reg_arg(1, GET_Rm()); // for Rm == Rn
1841 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1842 emith_sub_r_imm(tmp, (1 << (op & 3)));
1843 rcache_clean();
1844 rcache_get_reg_arg(0, GET_Rn());
1845 emit_memhandler_write(op & 3, pc);
1846 goto end_op;
1847 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
1848 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1849 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1850 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1851 emith_bic_r_imm(sr, M|Q|T);
1852 emith_tst_r_imm(tmp2, (1<<31));
1853 EMITH_SJMP_START(DCOND_EQ);
1854 emith_or_r_imm_c(DCOND_NE, sr, Q);
1855 EMITH_SJMP_END(DCOND_EQ);
1856 emith_tst_r_imm(tmp3, (1<<31));
1857 EMITH_SJMP_START(DCOND_EQ);
1858 emith_or_r_imm_c(DCOND_NE, sr, M);
1859 EMITH_SJMP_END(DCOND_EQ);
1860 emith_teq_r_r(tmp2, tmp3);
1861 EMITH_SJMP_START(DCOND_PL);
1862 emith_or_r_imm_c(DCOND_MI, sr, T);
1863 EMITH_SJMP_END(DCOND_PL);
1864 goto end_op;
1865 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
1866 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1867 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1868 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1869 emith_bic_r_imm(sr, T);
1870 emith_tst_r_r(tmp2, tmp3);
1871 emit_or_t_if_eq(sr);
1872 goto end_op;
1873 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
1874 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1875 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1876 emith_and_r_r(tmp, tmp2);
1877 goto end_op;
1878 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
1879 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1880 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1881 emith_eor_r_r(tmp, tmp2);
1882 goto end_op;
1883 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
1884 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1885 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1886 emith_or_r_r(tmp, tmp2);
1887 goto end_op;
1888 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
1889 tmp = rcache_get_tmp();
1890 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1891 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1892 emith_eor_r_r_r(tmp, tmp2, tmp3);
1893 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1894 emith_bic_r_imm(sr, T);
1895 emith_tst_r_imm(tmp, 0x000000ff);
1896 emit_or_t_if_eq(sr);
1897 emith_tst_r_imm(tmp, 0x0000ff00);
1898 emit_or_t_if_eq(sr);
1899 emith_tst_r_imm(tmp, 0x00ff0000);
1900 emit_or_t_if_eq(sr);
1901 emith_tst_r_imm(tmp, 0xff000000);
1902 emit_or_t_if_eq(sr);
1903 rcache_free_tmp(tmp);
1904 goto end_op;
1905 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
1906 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1907 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1908 emith_lsr(tmp, tmp, 16);
1909 emith_or_r_r_lsl(tmp, tmp2, 16);
1910 goto end_op;
1911 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
1912 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
1913 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1914 tmp = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1915 if (op & 1) {
1916 emith_sext(tmp, tmp2, 16);
1917 } else
1918 emith_clear_msb(tmp, tmp2, 16);
1919 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1920 tmp2 = rcache_get_tmp();
1921 if (op & 1) {
1922 emith_sext(tmp2, tmp3, 16);
1923 } else
1924 emith_clear_msb(tmp2, tmp3, 16);
1925 emith_mul(tmp, tmp, tmp2);
1926 rcache_free_tmp(tmp2);
1927 goto end_op;
1928 }
1929 goto default_;
1930
1931 /////////////////////////////////////////////
1932 case 0x03:
1933 switch (op & 0x0f)
1934 {
1935 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
1936 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
1937 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
1938 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
1939 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
1940 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1941 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1942 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1943 emith_bic_r_imm(sr, T);
1944 emith_cmp_r_r(tmp2, tmp3);
1945 switch (op & 0x07)
1946 {
1947 case 0x00: // CMP/EQ
1948 emit_or_t_if_eq(sr);
1949 break;
1950 case 0x02: // CMP/HS
1951 EMITH_SJMP_START(DCOND_LO);
1952 emith_or_r_imm_c(DCOND_HS, sr, T);
1953 EMITH_SJMP_END(DCOND_LO);
1954 break;
1955 case 0x03: // CMP/GE
1956 EMITH_SJMP_START(DCOND_LT);
1957 emith_or_r_imm_c(DCOND_GE, sr, T);
1958 EMITH_SJMP_END(DCOND_LT);
1959 break;
1960 case 0x06: // CMP/HI
1961 EMITH_SJMP_START(DCOND_LS);
1962 emith_or_r_imm_c(DCOND_HI, sr, T);
1963 EMITH_SJMP_END(DCOND_LS);
1964 break;
1965 case 0x07: // CMP/GT
1966 EMITH_SJMP_START(DCOND_LE);
1967 emith_or_r_imm_c(DCOND_GT, sr, T);
1968 EMITH_SJMP_END(DCOND_LE);
1969 break;
1970 }
1971 goto end_op;
1972 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
1973 // Q1 = carry(Rn = (Rn << 1) | T)
1974 // if Q ^ M
1975 // Q2 = carry(Rn += Rm)
1976 // else
1977 // Q2 = carry(Rn -= Rm)
1978 // Q = M ^ Q1 ^ Q2
1979 // T = (Q == M) = !(Q ^ M) = !(Q1 ^ Q2)
1980 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1981 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1982 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1983 emith_tpop_carry(sr, 0);
1984 emith_adcf_r_r(tmp2, tmp2);
1985 emith_tpush_carry(sr, 0); // keep Q1 in T for now
1986 tmp4 = rcache_get_tmp();
1987 emith_and_r_r_imm(tmp4, sr, M);
1988 emith_eor_r_r_lsr(sr, tmp4, M_SHIFT - Q_SHIFT); // Q ^= M
1989 rcache_free_tmp(tmp4);
1990 // add or sub, invert T if carry to get Q1 ^ Q2
1991 // in: (Q ^ M) passed in Q, Q1 in T
1992 emith_sh2_div1_step(tmp2, tmp3, sr);
1993 emith_bic_r_imm(sr, Q);
1994 emith_tst_r_imm(sr, M);
1995 EMITH_SJMP_START(DCOND_EQ);
1996 emith_or_r_imm_c(DCOND_NE, sr, Q); // Q = M
1997 EMITH_SJMP_END(DCOND_EQ);
1998 emith_tst_r_imm(sr, T);
1999 EMITH_SJMP_START(DCOND_EQ);
2000 emith_eor_r_imm_c(DCOND_NE, sr, Q); // Q = M ^ Q1 ^ Q2
2001 EMITH_SJMP_END(DCOND_EQ);
2002 emith_eor_r_imm(sr, T); // T = !(Q1 ^ Q2)
2003 goto end_op;
2004 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
2005 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2006 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2007 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2008 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2009 emith_mul_u64(tmp3, tmp4, tmp, tmp2);
2010 goto end_op;
2011 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
2012 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
2013 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2014 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2015 if (op & 4) {
2016 emith_add_r_r(tmp, tmp2);
2017 } else
2018 emith_sub_r_r(tmp, tmp2);
2019 goto end_op;
2020 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
2021 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
2022 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2023 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2024 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2025 if (op & 4) { // adc
2026 emith_tpop_carry(sr, 0);
2027 emith_adcf_r_r(tmp, tmp2);
2028 emith_tpush_carry(sr, 0);
2029 } else {
2030 emith_tpop_carry(sr, 1);
2031 emith_sbcf_r_r(tmp, tmp2);
2032 emith_tpush_carry(sr, 1);
2033 }
2034 goto end_op;
2035 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
2036 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
2037 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2038 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2039 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2040 emith_bic_r_imm(sr, T);
2041 if (op & 4) {
2042 emith_addf_r_r(tmp, tmp2);
2043 } else
2044 emith_subf_r_r(tmp, tmp2);
2045 EMITH_SJMP_START(DCOND_VC);
2046 emith_or_r_imm_c(DCOND_VS, sr, T);
2047 EMITH_SJMP_END(DCOND_VC);
2048 goto end_op;
2049 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
2050 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2051 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2052 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2053 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2054 emith_mul_s64(tmp3, tmp4, tmp, tmp2);
2055 goto end_op;
2056 }
2057 goto default_;
2058
2059 /////////////////////////////////////////////
2060 case 0x04:
2061 switch (op & 0x0f)
2062 {
2063 case 0x00:
2064 switch (GET_Fx())
2065 {
2066 case 0: // SHLL Rn 0100nnnn00000000
2067 case 2: // SHAL Rn 0100nnnn00100000
2068 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2069 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2070 emith_tpop_carry(sr, 0); // dummy
2071 emith_lslf(tmp, tmp, 1);
2072 emith_tpush_carry(sr, 0);
2073 goto end_op;
2074 case 1: // DT Rn 0100nnnn00010000
2075 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2076#if 0 // scheduling needs tuning
2077 if (FETCH_OP(pc) == 0x8bfd) { // BF #-2
2078 if (gconst_get(GET_Rn(), &tmp)) {
2079 // XXX: limit burned cycles
2080 emit_move_r_imm32(GET_Rn(), 0);
2081 emith_or_r_imm(sr, T);
2082 cycles += tmp * 4 + 1; // +1 syncs with noconst version, not sure why
2083 skip_op = 1;
2084 }
2085 else
2086 emith_sh2_dtbf_loop();
2087 goto end_op;
2088 }
2089#endif
2090 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2091 emith_bic_r_imm(sr, T);
2092 emith_subf_r_imm(tmp, 1);
2093 emit_or_t_if_eq(sr);
2094 goto end_op;
2095 }
2096 goto default_;
2097 case 0x01:
2098 switch (GET_Fx())
2099 {
2100 case 0: // SHLR Rn 0100nnnn00000001
2101 case 2: // SHAR Rn 0100nnnn00100001
2102 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2103 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2104 emith_tpop_carry(sr, 0); // dummy
2105 if (op & 0x20) {
2106 emith_asrf(tmp, tmp, 1);
2107 } else
2108 emith_lsrf(tmp, tmp, 1);
2109 emith_tpush_carry(sr, 0);
2110 goto end_op;
2111 case 1: // CMP/PZ Rn 0100nnnn00010001
2112 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2113 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2114 emith_bic_r_imm(sr, T);
2115 emith_cmp_r_imm(tmp, 0);
2116 EMITH_SJMP_START(DCOND_LT);
2117 emith_or_r_imm_c(DCOND_GE, sr, T);
2118 EMITH_SJMP_END(DCOND_LT);
2119 goto end_op;
2120 }
2121 goto default_;
2122 case 0x02:
2123 case 0x03:
2124 switch (op & 0x3f)
2125 {
2126 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
2127 tmp = SHR_MACH;
2128 break;
2129 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
2130 tmp = SHR_MACL;
2131 break;
2132 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
2133 tmp = SHR_PR;
2134 break;
2135 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
2136 tmp = SHR_SR;
2137 break;
2138 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
2139 tmp = SHR_GBR;
2140 break;
2141 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
2142 tmp = SHR_VBR;
2143 break;
2144 default:
2145 goto default_;
2146 }
2147 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2148 emith_sub_r_imm(tmp2, 4);
2149 rcache_clean();
2150 rcache_get_reg_arg(0, GET_Rn());
2151 tmp3 = rcache_get_reg_arg(1, tmp);
2152 if (tmp == SHR_SR)
2153 emith_clear_msb(tmp3, tmp3, 22); // reserved bits defined by ISA as 0
2154 emit_memhandler_write(2, pc);
2155 goto end_op;
2156 case 0x04:
2157 case 0x05:
2158 switch (op & 0x3f)
2159 {
2160 case 0x04: // ROTL Rn 0100nnnn00000100
2161 case 0x05: // ROTR Rn 0100nnnn00000101
2162 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2163 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2164 emith_tpop_carry(sr, 0); // dummy
2165 if (op & 1) {
2166 emith_rorf(tmp, tmp, 1);
2167 } else
2168 emith_rolf(tmp, tmp, 1);
2169 emith_tpush_carry(sr, 0);
2170 goto end_op;
2171 case 0x24: // ROTCL Rn 0100nnnn00100100
2172 case 0x25: // ROTCR Rn 0100nnnn00100101
2173 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2174 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2175 emith_tpop_carry(sr, 0);
2176 if (op & 1) {
2177 emith_rorcf(tmp);
2178 } else
2179 emith_rolcf(tmp);
2180 emith_tpush_carry(sr, 0);
2181 goto end_op;
2182 case 0x15: // CMP/PL Rn 0100nnnn00010101
2183 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2184 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2185 emith_bic_r_imm(sr, T);
2186 emith_cmp_r_imm(tmp, 0);
2187 EMITH_SJMP_START(DCOND_LE);
2188 emith_or_r_imm_c(DCOND_GT, sr, T);
2189 EMITH_SJMP_END(DCOND_LE);
2190 goto end_op;
2191 }
2192 goto default_;
2193 case 0x06:
2194 case 0x07:
2195 switch (op & 0x3f)
2196 {
2197 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
2198 tmp = SHR_MACH;
2199 break;
2200 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
2201 tmp = SHR_MACL;
2202 break;
2203 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
2204 tmp = SHR_PR;
2205 break;
2206 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
2207 tmp = SHR_SR;
2208 break;
2209 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
2210 tmp = SHR_GBR;
2211 break;
2212 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
2213 tmp = SHR_VBR;
2214 break;
2215 default:
2216 goto default_;
2217 }
2218 rcache_get_reg_arg(0, GET_Rn());
2219 tmp2 = emit_memhandler_read(2);
2220 if (tmp == SHR_SR) {
2221 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2222 emith_write_sr(sr, tmp2);
2223 drcf.test_irq = 1;
2224 } else {
2225 tmp = rcache_get_reg(tmp, RC_GR_WRITE);
2226 emith_move_r_r(tmp, tmp2);
2227 }
2228 rcache_free_tmp(tmp2);
2229 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2230 emith_add_r_imm(tmp, 4);
2231 goto end_op;
2232 case 0x08:
2233 case 0x09:
2234 switch (GET_Fx())
2235 {
2236 case 0:
2237 // SHLL2 Rn 0100nnnn00001000
2238 // SHLR2 Rn 0100nnnn00001001
2239 tmp = 2;
2240 break;
2241 case 1:
2242 // SHLL8 Rn 0100nnnn00011000
2243 // SHLR8 Rn 0100nnnn00011001
2244 tmp = 8;
2245 break;
2246 case 2:
2247 // SHLL16 Rn 0100nnnn00101000
2248 // SHLR16 Rn 0100nnnn00101001
2249 tmp = 16;
2250 break;
2251 default:
2252 goto default_;
2253 }
2254 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2255 if (op & 1) {
2256 emith_lsr(tmp2, tmp2, tmp);
2257 } else
2258 emith_lsl(tmp2, tmp2, tmp);
2259 goto end_op;
2260 case 0x0a:
2261 switch (GET_Fx())
2262 {
2263 case 0: // LDS Rm,MACH 0100mmmm00001010
2264 tmp2 = SHR_MACH;
2265 break;
2266 case 1: // LDS Rm,MACL 0100mmmm00011010
2267 tmp2 = SHR_MACL;
2268 break;
2269 case 2: // LDS Rm,PR 0100mmmm00101010
2270 tmp2 = SHR_PR;
2271 break;
2272 default:
2273 goto default_;
2274 }
2275 emit_move_r_r(tmp2, GET_Rn());
2276 goto end_op;
2277 case 0x0b:
2278 switch (GET_Fx())
2279 {
2280 case 1: // TAS.B @Rn 0100nnnn00011011
2281 // XXX: is TAS working on 32X?
2282 rcache_get_reg_arg(0, GET_Rn());
2283 tmp = emit_memhandler_read(0);
2284 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2285 emith_bic_r_imm(sr, T);
2286 emith_cmp_r_imm(tmp, 0);
2287 emit_or_t_if_eq(sr);
2288 rcache_clean();
2289 emith_or_r_imm(tmp, 0x80);
2290 tmp2 = rcache_get_tmp_arg(1); // assuming it differs to tmp
2291 emith_move_r_r(tmp2, tmp);
2292 rcache_free_tmp(tmp);
2293 rcache_get_reg_arg(0, GET_Rn());
2294 emit_memhandler_write(0, pc);
2295 break;
2296 default:
2297 goto default_;
2298 }
2299 goto end_op;
2300 case 0x0e:
2301 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2302 switch (GET_Fx())
2303 {
2304 case 0: // LDC Rm,SR 0100mmmm00001110
2305 tmp2 = SHR_SR;
2306 break;
2307 case 1: // LDC Rm,GBR 0100mmmm00011110
2308 tmp2 = SHR_GBR;
2309 break;
2310 case 2: // LDC Rm,VBR 0100mmmm00101110
2311 tmp2 = SHR_VBR;
2312 break;
2313 default:
2314 goto default_;
2315 }
2316 if (tmp2 == SHR_SR) {
2317 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2318 emith_write_sr(sr, tmp);
2319 drcf.test_irq = 1;
2320 } else {
2321 tmp2 = rcache_get_reg(tmp2, RC_GR_WRITE);
2322 emith_move_r_r(tmp2, tmp);
2323 }
2324 goto end_op;
2325 case 0x0f:
2326 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
2327 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 1);
2328 emith_sext(tmp, tmp, 16);
2329 emith_sext(tmp2, tmp2, 16);
2330 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW);
2331 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
2332 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
2333 rcache_free_tmp(tmp2);
2334 // XXX: MACH should be untouched when S is set?
2335 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2336 emith_tst_r_imm(sr, S);
2337 EMITH_JMP_START(DCOND_EQ);
2338
2339 emith_asr(tmp, tmp3, 31);
2340 emith_eorf_r_r(tmp, tmp4); // tmp = ((signed)macl >> 31) ^ mach
2341 EMITH_JMP_START(DCOND_EQ);
2342 emith_move_r_imm(tmp3, 0x80000000);
2343 emith_tst_r_r(tmp4, tmp4);
2344 EMITH_SJMP_START(DCOND_MI);
2345 emith_sub_r_imm_c(DCOND_PL, tmp3, 1); // positive
2346 EMITH_SJMP_END(DCOND_MI);
2347 EMITH_JMP_END(DCOND_EQ);
2348
2349 EMITH_JMP_END(DCOND_EQ);
2350 rcache_free_tmp(tmp);
2351 goto end_op;
2352 }
2353 goto default_;
2354
2355 /////////////////////////////////////////////
2356 case 0x05:
2357 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
2358 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), (op & 0x0f) * 4, 2);
2359 goto end_op;
2360
2361 /////////////////////////////////////////////
2362 case 0x06:
2363 switch (op & 0x0f)
2364 {
2365 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
2366 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
2367 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
2368 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
2369 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
2370 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
2371 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), 0, op & 3);
2372 if ((op & 7) >= 4 && GET_Rn() != GET_Rm()) {
2373 tmp = rcache_get_reg(GET_Rm(), RC_GR_RMW);
2374 emith_add_r_imm(tmp, (1 << (op & 3)));
2375 }
2376 goto end_op;
2377 case 0x03:
2378 case 0x07 ... 0x0f:
2379 tmp = rcache_get_reg(GET_Rm(), RC_GR_READ);
2380 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
2381 switch (op & 0x0f)
2382 {
2383 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
2384 emith_move_r_r(tmp2, tmp);
2385 break;
2386 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
2387 emith_mvn_r_r(tmp2, tmp);
2388 break;
2389 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
2390 tmp3 = tmp2;
2391 if (tmp == tmp2)
2392 tmp3 = rcache_get_tmp();
2393 tmp4 = rcache_get_tmp();
2394 emith_lsr(tmp3, tmp, 16);
2395 emith_or_r_r_lsl(tmp3, tmp, 24);
2396 emith_and_r_r_imm(tmp4, tmp, 0xff00);
2397 emith_or_r_r_lsl(tmp3, tmp4, 8);
2398 emith_rol(tmp2, tmp3, 16);
2399 rcache_free_tmp(tmp4);
2400 if (tmp == tmp2)
2401 rcache_free_tmp(tmp3);
2402 break;
2403 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
2404 emith_rol(tmp2, tmp, 16);
2405 break;
2406 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
2407 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2408 emith_tpop_carry(sr, 1);
2409 emith_negcf_r_r(tmp2, tmp);
2410 emith_tpush_carry(sr, 1);
2411 break;
2412 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
2413 emith_neg_r_r(tmp2, tmp);
2414 break;
2415 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
2416 emith_clear_msb(tmp2, tmp, 24);
2417 break;
2418 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
2419 emith_clear_msb(tmp2, tmp, 16);
2420 break;
2421 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
2422 emith_sext(tmp2, tmp, 8);
2423 break;
2424 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
2425 emith_sext(tmp2, tmp, 16);
2426 break;
2427 }
2428 goto end_op;
2429 }
2430 goto default_;
2431
2432 /////////////////////////////////////////////
2433 case 0x07:
2434 // ADD #imm,Rn 0111nnnniiiiiiii
2435 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2436 if (op & 0x80) { // adding negative
2437 emith_sub_r_imm(tmp, -op & 0xff);
2438 } else
2439 emith_add_r_imm(tmp, op & 0xff);
2440 goto end_op;
2441
2442 /////////////////////////////////////////////
2443 case 0x08:
2444 switch (op & 0x0f00)
2445 {
2446 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
2447 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
2448 rcache_clean();
2449 tmp = rcache_get_reg_arg(0, GET_Rm());
2450 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2451 tmp3 = (op & 0x100) >> 8;
2452 if (op & 0x0f)
2453 emith_add_r_imm(tmp, (op & 0x0f) << tmp3);
2454 emit_memhandler_write(tmp3, pc);
2455 goto end_op;
2456 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
2457 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
2458 tmp = (op & 0x100) >> 8;
2459 emit_memhandler_read_rr(SHR_R0, GET_Rm(), (op & 0x0f) << tmp, tmp);
2460 goto end_op;
2461 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
2462 // XXX: could use cmn
2463 tmp = rcache_get_tmp();
2464 tmp2 = rcache_get_reg(0, RC_GR_READ);
2465 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2466 emith_move_r_imm_s8(tmp, op & 0xff);
2467 emith_bic_r_imm(sr, T);
2468 emith_cmp_r_r(tmp2, tmp);
2469 emit_or_t_if_eq(sr);
2470 rcache_free_tmp(tmp);
2471 goto end_op;
2472 }
2473 goto default_;
2474
2475 /////////////////////////////////////////////
2476 case 0x0c:
2477 switch (op & 0x0f00)
2478 {
2479 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
2480 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
2481 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
2482 rcache_clean();
2483 tmp = rcache_get_reg_arg(0, SHR_GBR);
2484 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2485 tmp3 = (op & 0x300) >> 8;
2486 emith_add_r_imm(tmp, (op & 0xff) << tmp3);
2487 emit_memhandler_write(tmp3, pc);
2488 goto end_op;
2489 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
2490 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
2491 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
2492 tmp = (op & 0x300) >> 8;
2493 emit_memhandler_read_rr(SHR_R0, SHR_GBR, (op & 0xff) << tmp, tmp);
2494 goto end_op;
2495 case 0x0300: // TRAPA #imm 11000011iiiiiiii
2496 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2497 emith_sub_r_imm(tmp, 4*2);
2498 // push SR
2499 tmp = rcache_get_reg_arg(0, SHR_SP);
2500 emith_add_r_imm(tmp, 4);
2501 tmp = rcache_get_reg_arg(1, SHR_SR);
2502 emith_clear_msb(tmp, tmp, 22);
2503 emit_memhandler_write(2, pc);
2504 // push PC
2505 rcache_get_reg_arg(0, SHR_SP);
2506 tmp = rcache_get_tmp_arg(1);
2507 emith_move_r_imm(tmp, pc);
2508 emit_memhandler_write(2, pc);
2509 // obtain new PC
2510 emit_memhandler_read_rr(SHR_PC, SHR_VBR, (op & 0xff) * 4, 2);
2511 // indirect jump -> back to dispatcher
2512 rcache_flush();
2513 emith_jump(sh2_drc_dispatcher);
2514 goto end_op;
2515 case 0x0800: // TST #imm,R0 11001000iiiiiiii
2516 tmp = rcache_get_reg(SHR_R0, RC_GR_READ);
2517 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2518 emith_bic_r_imm(sr, T);
2519 emith_tst_r_imm(tmp, op & 0xff);
2520 emit_or_t_if_eq(sr);
2521 goto end_op;
2522 case 0x0900: // AND #imm,R0 11001001iiiiiiii
2523 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2524 emith_and_r_imm(tmp, op & 0xff);
2525 goto end_op;
2526 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
2527 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2528 emith_eor_r_imm(tmp, op & 0xff);
2529 goto end_op;
2530 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
2531 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2532 emith_or_r_imm(tmp, op & 0xff);
2533 goto end_op;
2534 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
2535 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2536 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2537 emith_bic_r_imm(sr, T);
2538 emith_tst_r_imm(tmp, op & 0xff);
2539 emit_or_t_if_eq(sr);
2540 rcache_free_tmp(tmp);
2541 goto end_op;
2542 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
2543 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2544 emith_and_r_imm(tmp, op & 0xff);
2545 goto end_rmw_op;
2546 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
2547 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2548 emith_eor_r_imm(tmp, op & 0xff);
2549 goto end_rmw_op;
2550 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
2551 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2552 emith_or_r_imm(tmp, op & 0xff);
2553 end_rmw_op:
2554 tmp2 = rcache_get_tmp_arg(1);
2555 emith_move_r_r(tmp2, tmp);
2556 rcache_free_tmp(tmp);
2557 tmp3 = rcache_get_reg_arg(0, SHR_GBR);
2558 tmp4 = rcache_get_reg(SHR_R0, RC_GR_READ);
2559 emith_add_r_r(tmp3, tmp4);
2560 emit_memhandler_write(0, pc);
2561 goto end_op;
2562 }
2563 goto default_;
2564
2565 /////////////////////////////////////////////
2566 case 0x0e:
2567 // MOV #imm,Rn 1110nnnniiiiiiii
2568 emit_move_r_imm32(GET_Rn(), (u32)(signed int)(signed char)op);
2569 goto end_op;
2570
2571 default:
2572 default_:
2573 elprintf(EL_ANOMALY, "%csh2 drc: unhandled op %04x @ %08x",
2574 sh2->is_slave ? 's' : 'm', op, pc - 2);
2575 break;
2576 }
2577
2578end_op:
2579 rcache_unlock_all();
2580
2581 cycles += opd->cycles;
2582
2583 if (op_flags[i+1] & OF_DELAY_OP) {
2584 do_host_disasm(tcache_id);
2585 continue;
2586 }
2587
2588 // test irq?
2589 if (drcf.test_irq && !drcf.pending_branch_direct) {
2590 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2591 FLUSH_CYCLES(sr);
2592 if (!drcf.pending_branch_indirect)
2593 emit_move_r_imm32(SHR_PC, pc);
2594 rcache_flush();
2595 emith_call(sh2_drc_test_irq);
2596 drcf.test_irq = 0;
2597 }
2598
2599 // branch handling (with/without delay)
2600 if (drcf.pending_branch_direct)
2601 {
2602 struct op_data *opd_b =
2603 (op_flags[i] & OF_DELAY_OP) ? &ops[i-1] : opd;
2604 u32 target_pc = opd_b->imm;
2605 int cond = -1;
2606 void *target = NULL;
2607
2608 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2609 FLUSH_CYCLES(sr);
2610
2611 if (opd_b->op != OP_BRANCH)
2612 cond = (opd_b->op == OP_BRANCH_CF) ? DCOND_EQ : DCOND_NE;
2613 if (cond != -1) {
2614 int ctaken = (op_flags[i] & OF_DELAY_OP) ? 1 : 2;
2615
2616 if (delay_dep_fw & BITMASK1(SHR_T))
2617 emith_tst_r_imm(sr, T_save);
2618 else
2619 emith_tst_r_imm(sr, T);
2620
2621 emith_sub_r_imm_c(cond, sr, ctaken<<12);
2622 }
2623 rcache_clean();
2624
2625#if LINK_BRANCHES
2626 if (find_in_array(branch_target_pc, branch_target_count, target_pc) >= 0)
2627 {
2628 // local branch
2629 // XXX: jumps back can be linked already
2630 if (branch_patch_count < MAX_LOCAL_BRANCHES) {
2631 target = tcache_ptr;
2632 branch_patch_pc[branch_patch_count] = target_pc;
2633 branch_patch_ptr[branch_patch_count] = target;
2634 branch_patch_count++;
2635 }
2636 else
2637 dbg(1, "warning: too many local branches");
2638 }
2639
2640 if (target == NULL)
2641#endif
2642 {
2643 // can't resolve branch locally, make a block exit
2644 emit_move_r_imm32(SHR_PC, target_pc);
2645 rcache_clean();
2646
2647 target = dr_prepare_ext_branch(target_pc, sh2->is_slave, tcache_id);
2648 if (target == NULL)
2649 return NULL;
2650 }
2651
2652 if (cond != -1)
2653 emith_jump_cond_patchable(cond, target);
2654 else {
2655 emith_jump_patchable(target);
2656 rcache_invalidate();
2657 }
2658
2659 drcf.pending_branch_direct = 0;
2660 }
2661 else if (drcf.pending_branch_indirect) {
2662 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2663 FLUSH_CYCLES(sr);
2664 rcache_flush();
2665 emith_jump(sh2_drc_dispatcher);
2666 drcf.pending_branch_indirect = 0;
2667 }
2668
2669 do_host_disasm(tcache_id);
2670 }
2671
2672 tmp = rcache_get_reg(SHR_SR, RC_GR_RMW);
2673 FLUSH_CYCLES(tmp);
2674 rcache_flush();
2675
2676 // check the last op
2677 if (op_flags[i-1] & OF_DELAY_OP)
2678 opd = &ops[i-2];
2679 else
2680 opd = &ops[i-1];
2681
2682 if (opd->op != OP_BRANCH && opd->op != OP_BRANCH_R
2683 && opd->op != OP_BRANCH_RF && opd->op != OP_RTE)
2684 {
2685 void *target;
2686
2687 emit_move_r_imm32(SHR_PC, pc);
2688 rcache_flush();
2689
2690 target = dr_prepare_ext_branch(pc, sh2->is_slave, tcache_id);
2691 if (target == NULL)
2692 return NULL;
2693 emith_jump_patchable(target);
2694 }
2695
2696 // link local branches
2697 for (i = 0; i < branch_patch_count; i++) {
2698 void *target;
2699 int t;
2700 t = find_in_array(branch_target_pc, branch_target_count, branch_patch_pc[i]);
2701 target = branch_target_ptr[t];
2702 if (target == NULL) {
2703 // flush pc and go back to dispatcher (this should no longer happen)
2704 dbg(1, "stray branch to %08x %p", branch_patch_pc[i], tcache_ptr);
2705 target = tcache_ptr;
2706 emit_move_r_imm32(SHR_PC, branch_patch_pc[i]);
2707 rcache_flush();
2708 emith_jump(sh2_drc_dispatcher);
2709 }
2710 emith_jump_patch(branch_patch_ptr[i], target);
2711 }
2712
2713 // mark memory blocks as containing compiled code
2714 // override any overlay blocks as they become unreachable anyway
2715 if ((block->addr & 0xc7fc0000) == 0x06000000
2716 || (block->addr & 0xfffff000) == 0xc0000000)
2717 {
2718 u16 *drc_ram_blk = NULL;
2719 u32 addr, mask = 0, shift = 0;
2720
2721 if (tcache_id != 0) {
2722 // data array, BIOS
2723 drc_ram_blk = Pico32xMem->drcblk_da[sh2->is_slave];
2724 shift = SH2_DRCBLK_DA_SHIFT;
2725 mask = 0xfff;
2726 }
2727 else {
2728 // SDRAM
2729 drc_ram_blk = Pico32xMem->drcblk_ram;
2730 shift = SH2_DRCBLK_RAM_SHIFT;
2731 mask = 0x3ffff;
2732 }
2733
2734 // mark recompiled insns
2735 drc_ram_blk[(base_pc & mask) >> shift] = 1;
2736 for (pc = base_pc; pc < end_pc; pc += 2)
2737 drc_ram_blk[(pc & mask) >> shift] = 1;
2738
2739 // mark literals
2740 for (i = 0; i < literal_addr_count; i++) {
2741 tmp = literal_addr[i];
2742 drc_ram_blk[(tmp & mask) >> shift] = 1;
2743 }
2744
2745 // add to invalidation lookup lists
2746 addr = base_pc & ~(INVAL_PAGE_SIZE - 1);
2747 for (; addr < end_literals; addr += INVAL_PAGE_SIZE) {
2748 i = (addr & mask) / INVAL_PAGE_SIZE;
2749 add_to_block_list(&inval_lookup[tcache_id][i], block);
2750 }
2751 }
2752
2753 tcache_ptrs[tcache_id] = tcache_ptr;
2754
2755 host_instructions_updated(block_entry_ptr, tcache_ptr);
2756
2757 do_host_disasm(tcache_id);
2758
2759 if (drcf.literals_disabled && literal_addr_count)
2760 dbg(1, "literals_disabled && literal_addr_count?");
2761 dbg(2, " block #%d,%d tcache %d/%d, insns %d -> %d %.3f",
2762 tcache_id, blkid_main,
2763 tcache_ptr - tcache_bases[tcache_id], tcache_sizes[tcache_id],
2764 insns_compiled, host_insn_count, (float)host_insn_count / insns_compiled);
2765 if ((sh2->pc & 0xc6000000) == 0x02000000) // ROM
2766 dbg(2, " hash collisions %d/%d", hash_collisions, block_counts[tcache_id]);
2767/*
2768 printf("~~~\n");
2769 tcache_dsm_ptrs[tcache_id] = block_entry_ptr;
2770 do_host_disasm(tcache_id);
2771 printf("~~~\n");
2772*/
2773
2774#if (DRC_DEBUG & 4)
2775 fflush(stdout);
2776#endif
2777
2778 return block_entry_ptr;
2779}
2780
2781static void sh2_generate_utils(void)
2782{
2783 int arg0, arg1, arg2, sr, tmp;
2784
2785 sh2_drc_write32 = p32x_sh2_write32;
2786 sh2_drc_read8 = p32x_sh2_read8;
2787 sh2_drc_read16 = p32x_sh2_read16;
2788 sh2_drc_read32 = p32x_sh2_read32;
2789
2790 host_arg2reg(arg0, 0);
2791 host_arg2reg(arg1, 1);
2792 host_arg2reg(arg2, 2);
2793 emith_move_r_r(arg0, arg0); // nop
2794
2795 // sh2_drc_exit(void)
2796 sh2_drc_exit = (void *)tcache_ptr;
2797 emit_do_static_regs(1, arg2);
2798 emith_sh2_drc_exit();
2799
2800 // sh2_drc_dispatcher(void)
2801 sh2_drc_dispatcher = (void *)tcache_ptr;
2802 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2803 emith_cmp_r_imm(sr, 0);
2804 emith_jump_cond(DCOND_LT, sh2_drc_exit);
2805 rcache_invalidate();
2806 emith_ctx_read(arg0, SHR_PC * 4);
2807 emith_ctx_read(arg1, offsetof(SH2, is_slave));
2808 emith_add_r_r_imm(arg2, CONTEXT_REG, offsetof(SH2, drc_tmp));
2809 emith_call(dr_lookup_block);
2810 emit_block_entry();
2811 // lookup failed, call sh2_translate()
2812 emith_move_r_r(arg0, CONTEXT_REG);
2813 emith_ctx_read(arg1, offsetof(SH2, drc_tmp)); // tcache_id
2814 emith_call(sh2_translate);
2815 emit_block_entry();
2816 // sh2_translate() failed, flush cache and retry
2817 emith_ctx_read(arg0, offsetof(SH2, drc_tmp));
2818 emith_call(flush_tcache);
2819 emith_move_r_r(arg0, CONTEXT_REG);
2820 emith_ctx_read(arg1, offsetof(SH2, drc_tmp));
2821 emith_call(sh2_translate);
2822 emit_block_entry();
2823 // XXX: can't translate, fail
2824 emith_call(dr_failure);
2825
2826 // sh2_drc_test_irq(void)
2827 // assumes it's called from main function (may jump to dispatcher)
2828 sh2_drc_test_irq = (void *)tcache_ptr;
2829 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2830 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2831 emith_lsr(arg0, sr, I_SHIFT);
2832 emith_and_r_imm(arg0, 0x0f);
2833 emith_cmp_r_r(arg1, arg0); // pending_level > ((sr >> 4) & 0x0f)?
2834 EMITH_SJMP_START(DCOND_GT);
2835 emith_ret_c(DCOND_LE); // nope, return
2836 EMITH_SJMP_END(DCOND_GT);
2837 // adjust SP
2838 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2839 emith_sub_r_imm(tmp, 4*2);
2840 rcache_clean();
2841 // push SR
2842 tmp = rcache_get_reg_arg(0, SHR_SP);
2843 emith_add_r_imm(tmp, 4);
2844 tmp = rcache_get_reg_arg(1, SHR_SR);
2845 emith_clear_msb(tmp, tmp, 22);
2846 emith_move_r_r(arg2, CONTEXT_REG);
2847 emith_call(p32x_sh2_write32); // XXX: use sh2_drc_write32?
2848 rcache_invalidate();
2849 // push PC
2850 rcache_get_reg_arg(0, SHR_SP);
2851 emith_ctx_read(arg1, SHR_PC * 4);
2852 emith_move_r_r(arg2, CONTEXT_REG);
2853 emith_call(p32x_sh2_write32);
2854 rcache_invalidate();
2855 // update I, cycles, do callback
2856 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2857 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2858 emith_bic_r_imm(sr, I);
2859 emith_or_r_r_lsl(sr, arg1, I_SHIFT);
2860 emith_sub_r_imm(sr, 13 << 12); // at least 13 cycles
2861 rcache_flush();
2862 emith_move_r_r(arg0, CONTEXT_REG);
2863 emith_call_ctx(offsetof(SH2, irq_callback)); // vector = sh2->irq_callback(sh2, level);
2864 // obtain new PC
2865 emith_lsl(arg0, arg0, 2);
2866 emith_ctx_read(arg1, SHR_VBR * 4);
2867 emith_add_r_r(arg0, arg1);
2868 emit_memhandler_read(2);
2869 emith_ctx_write(arg0, SHR_PC * 4);
2870#ifdef __i386__
2871 emith_add_r_imm(xSP, 4); // fix stack
2872#endif
2873 emith_jump(sh2_drc_dispatcher);
2874 rcache_invalidate();
2875
2876 // sh2_drc_entry(SH2 *sh2)
2877 sh2_drc_entry = (void *)tcache_ptr;
2878 emith_sh2_drc_entry();
2879 emith_move_r_r(CONTEXT_REG, arg0); // move ctx, arg0
2880 emit_do_static_regs(0, arg2);
2881 emith_call(sh2_drc_test_irq);
2882 emith_jump(sh2_drc_dispatcher);
2883
2884 // sh2_drc_write8(u32 a, u32 d)
2885 sh2_drc_write8 = (void *)tcache_ptr;
2886 emith_ctx_read(arg2, offsetof(SH2, write8_tab));
2887 emith_sh2_wcall(arg0, arg2);
2888
2889 // sh2_drc_write16(u32 a, u32 d)
2890 sh2_drc_write16 = (void *)tcache_ptr;
2891 emith_ctx_read(arg2, offsetof(SH2, write16_tab));
2892 emith_sh2_wcall(arg0, arg2);
2893
2894#ifdef PDB_NET
2895 // debug
2896 #define MAKE_READ_WRAPPER(func) { \
2897 void *tmp = (void *)tcache_ptr; \
2898 emith_push_ret(); \
2899 emith_call(func); \
2900 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
2901 emith_addf_r_r(arg2, arg0); \
2902 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
2903 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
2904 emith_adc_r_imm(arg2, 0x01000000); \
2905 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
2906 emith_pop_and_ret(); \
2907 func = tmp; \
2908 }
2909 #define MAKE_WRITE_WRAPPER(func) { \
2910 void *tmp = (void *)tcache_ptr; \
2911 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
2912 emith_addf_r_r(arg2, arg1); \
2913 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
2914 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
2915 emith_adc_r_imm(arg2, 0x01000000); \
2916 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
2917 emith_move_r_r(arg2, CONTEXT_REG); \
2918 emith_jump(func); \
2919 func = tmp; \
2920 }
2921
2922 MAKE_READ_WRAPPER(sh2_drc_read8);
2923 MAKE_READ_WRAPPER(sh2_drc_read16);
2924 MAKE_READ_WRAPPER(sh2_drc_read32);
2925 MAKE_WRITE_WRAPPER(sh2_drc_write8);
2926 MAKE_WRITE_WRAPPER(sh2_drc_write16);
2927 MAKE_WRITE_WRAPPER(sh2_drc_write32);
2928#if (DRC_DEBUG & 4)
2929 host_dasm_new_symbol(sh2_drc_read8);
2930 host_dasm_new_symbol(sh2_drc_read16);
2931 host_dasm_new_symbol(sh2_drc_read32);
2932 host_dasm_new_symbol(sh2_drc_write32);
2933#endif
2934#endif
2935
2936 rcache_invalidate();
2937#if (DRC_DEBUG & 4)
2938 host_dasm_new_symbol(sh2_drc_entry);
2939 host_dasm_new_symbol(sh2_drc_dispatcher);
2940 host_dasm_new_symbol(sh2_drc_exit);
2941 host_dasm_new_symbol(sh2_drc_test_irq);
2942 host_dasm_new_symbol(sh2_drc_write8);
2943 host_dasm_new_symbol(sh2_drc_write16);
2944#endif
2945}
2946
2947static void sh2_smc_rm_block_entry(struct block_desc *bd, int tcache_id, u32 ram_mask)
2948{
2949 struct block_link *bl, *bl_next, *bl_unresolved;
2950 u32 i, addr, end_addr;
2951 void *tmp;
2952
2953 dbg(2, " killing entry %08x-%08x-%08x, blkid %d,%d",
2954 bd->addr, bd->addr + bd->size_nolit, bd->addr + bd->size,
2955 tcache_id, bd - block_tables[tcache_id]);
2956 if (bd->addr == 0 || bd->entry_count == 0) {
2957 dbg(1, " killing dead block!? %08x", bd->addr);
2958 return;
2959 }
2960
2961 // remove from inval_lookup
2962 addr = bd->addr & ~(INVAL_PAGE_SIZE - 1);
2963 end_addr = bd->addr + bd->size;
2964 for (; addr < end_addr; addr += INVAL_PAGE_SIZE) {
2965 i = (addr & ram_mask) / INVAL_PAGE_SIZE;
2966 rm_from_block_list(&inval_lookup[tcache_id][i], bd);
2967 }
2968
2969 tmp = tcache_ptr;
2970 bl_unresolved = unresolved_links[tcache_id];
2971
2972 // remove from hash table, make incoming links unresolved
2973 // XXX: maybe patch branches w/flush instead?
2974 for (i = 0; i < bd->entry_count; i++) {
2975 rm_from_hashlist(&bd->entryp[i], tcache_id);
2976
2977 // since we never reuse tcache space of dead blocks,
2978 // insert jump to dispatcher for blocks that are linked to this
2979 tcache_ptr = bd->entryp[i].tcache_ptr;
2980 emit_move_r_imm32(SHR_PC, bd->entryp[i].pc);
2981 rcache_flush();
2982 emith_jump(sh2_drc_dispatcher);
2983
2984 host_instructions_updated(bd->entryp[i].tcache_ptr, tcache_ptr);
2985
2986 for (bl = bd->entryp[i].links; bl != NULL; ) {
2987 bl_next = bl->next;
2988 bl->next = bl_unresolved;
2989 bl_unresolved = bl;
2990 bl = bl_next;
2991 }
2992 }
2993
2994 tcache_ptr = tmp;
2995 unresolved_links[tcache_id] = bl_unresolved;
2996
2997 bd->addr = bd->size = bd->size_nolit = 0;
2998 bd->entry_count = 0;
2999}
3000
3001static void sh2_smc_rm_block(u32 a, u16 *drc_ram_blk, int tcache_id, u32 shift, u32 mask)
3002{
3003 struct block_list **blist = NULL, *entry;
3004 u32 from = ~0, to = 0, end_addr, taddr, i;
3005 struct block_desc *block;
3006
3007 blist = &inval_lookup[tcache_id][(a & mask) / INVAL_PAGE_SIZE];
3008 entry = *blist;
3009 while (entry != NULL) {
3010 block = entry->block;
3011 end_addr = block->addr + block->size;
3012 if (block->addr <= a && a < end_addr) {
3013 // get addr range that includes all removed blocks
3014 if (from > block->addr)
3015 from = block->addr;
3016 if (to < end_addr)
3017 to = end_addr;
3018
3019 sh2_smc_rm_block_entry(block, tcache_id, mask);
3020 if (a >= block->addr + block->size_nolit)
3021 literal_disabled_frames = 3;
3022
3023 // entry lost, restart search
3024 entry = *blist;
3025 continue;
3026 }
3027 entry = entry->next;
3028 }
3029
3030 if (from >= to)
3031 return;
3032
3033 // update range around a to match latest state
3034 from &= ~(INVAL_PAGE_SIZE - 1);
3035 to |= (INVAL_PAGE_SIZE - 1);
3036 for (taddr = from; taddr < to; taddr += INVAL_PAGE_SIZE) {
3037 i = (taddr & mask) / INVAL_PAGE_SIZE;
3038 entry = inval_lookup[tcache_id][i];
3039
3040 for (; entry != NULL; entry = entry->next) {
3041 block = entry->block;
3042
3043 if (block->addr > a) {
3044 if (to > block->addr)
3045 to = block->addr;
3046 }
3047 else {
3048 end_addr = block->addr + block->size;
3049 if (from < end_addr)
3050 from = end_addr;
3051 }
3052 }
3053 }
3054
3055 // clear code marks
3056 if (from < to) {
3057 u16 *p = drc_ram_blk + ((from & mask) >> shift);
3058 memset(p, 0, (to - from) >> (shift - 1));
3059 }
3060}
3061
3062void sh2_drc_wcheck_ram(unsigned int a, int val, int cpuid)
3063{
3064 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3065 sh2_smc_rm_block(a, Pico32xMem->drcblk_ram, 0, SH2_DRCBLK_RAM_SHIFT, 0x3ffff);
3066}
3067
3068void sh2_drc_wcheck_da(unsigned int a, int val, int cpuid)
3069{
3070 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3071 sh2_smc_rm_block(a, Pico32xMem->drcblk_da[cpuid],
3072 1 + cpuid, SH2_DRCBLK_DA_SHIFT, 0xfff);
3073}
3074
3075int sh2_execute(SH2 *sh2c, int cycles)
3076{
3077 int ret_cycles;
3078
3079 sh2c->cycles_timeslice = cycles;
3080
3081 // cycles are kept in SHR_SR unused bits (upper 20)
3082 // bit11 contains T saved for delay slot
3083 // others are usual SH2 flags
3084 sh2c->sr &= 0x3f3;
3085 sh2c->sr |= cycles << 12;
3086 sh2_drc_entry(sh2c);
3087
3088 // TODO: irq cycles
3089 ret_cycles = (signed int)sh2c->sr >> 12;
3090 if (ret_cycles > 0)
3091 dbg(1, "warning: drc returned with cycles: %d", ret_cycles);
3092
3093 return sh2c->cycles_timeslice - ret_cycles;
3094}
3095
3096#if (DRC_DEBUG & 2)
3097void block_stats(void)
3098{
3099 int c, b, i, total = 0;
3100
3101 printf("block stats:\n");
3102 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3103 for (i = 0; i < block_counts[b]; i++)
3104 if (block_tables[b][i].addr != 0)
3105 total += block_tables[b][i].refcount;
3106
3107 for (c = 0; c < 10; c++) {
3108 struct block_desc *blk, *maxb = NULL;
3109 int max = 0;
3110 for (b = 0; b < ARRAY_SIZE(block_tables); b++) {
3111 for (i = 0; i < block_counts[b]; i++) {
3112 blk = &block_tables[b][i];
3113 if (blk->addr != 0 && blk->refcount > max) {
3114 max = blk->refcount;
3115 maxb = blk;
3116 }
3117 }
3118 }
3119 if (maxb == NULL)
3120 break;
3121 printf("%08x %9d %2.3f%%\n", maxb->addr, maxb->refcount,
3122 (double)maxb->refcount / total * 100.0);
3123 maxb->refcount = 0;
3124 }
3125
3126 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3127 for (i = 0; i < block_counts[b]; i++)
3128 block_tables[b][i].refcount = 0;
3129}
3130#else
3131#define block_stats()
3132#endif
3133
3134void sh2_drc_flush_all(void)
3135{
3136 block_stats();
3137 flush_tcache(0);
3138 flush_tcache(1);
3139 flush_tcache(2);
3140}
3141
3142void sh2_drc_mem_setup(SH2 *sh2)
3143{
3144 // fill the convenience pointers
3145 sh2->p_bios = sh2->is_slave ? Pico32xMem->sh2_rom_s : Pico32xMem->sh2_rom_m;
3146 sh2->p_da = Pico32xMem->data_array[sh2->is_slave];
3147 sh2->p_sdram = Pico32xMem->sdram;
3148 sh2->p_rom = Pico.rom;
3149}
3150
3151void sh2_drc_frame(void)
3152{
3153 if (literal_disabled_frames > 0)
3154 literal_disabled_frames--;
3155}
3156
3157int sh2_drc_init(SH2 *sh2)
3158{
3159 int i;
3160
3161 if (block_tables[0] == NULL)
3162 {
3163 for (i = 0; i < TCACHE_BUFFERS; i++) {
3164 block_tables[i] = calloc(block_max_counts[i], sizeof(*block_tables[0]));
3165 if (block_tables[i] == NULL)
3166 goto fail;
3167 // max 2 block links (exits) per block
3168 block_link_pool[i] = calloc(block_link_pool_max_counts[i],
3169 sizeof(*block_link_pool[0]));
3170 if (block_link_pool[i] == NULL)
3171 goto fail;
3172
3173 inval_lookup[i] = calloc(ram_sizes[i] / INVAL_PAGE_SIZE,
3174 sizeof(inval_lookup[0]));
3175 if (inval_lookup[i] == NULL)
3176 goto fail;
3177
3178 hash_tables[i] = calloc(hash_table_sizes[i], sizeof(*hash_tables[0]));
3179 if (hash_tables[i] == NULL)
3180 goto fail;
3181 }
3182 memset(block_counts, 0, sizeof(block_counts));
3183 memset(block_link_pool_counts, 0, sizeof(block_link_pool_counts));
3184
3185 drc_cmn_init();
3186 tcache_ptr = tcache;
3187 sh2_generate_utils();
3188 host_instructions_updated(tcache, tcache_ptr);
3189
3190 tcache_bases[0] = tcache_ptrs[0] = tcache_ptr;
3191 for (i = 1; i < ARRAY_SIZE(tcache_bases); i++)
3192 tcache_bases[i] = tcache_ptrs[i] = tcache_bases[i - 1] + tcache_sizes[i - 1];
3193
3194#if (DRC_DEBUG & 4)
3195 for (i = 0; i < ARRAY_SIZE(block_tables); i++)
3196 tcache_dsm_ptrs[i] = tcache_bases[i];
3197 // disasm the utils
3198 tcache_dsm_ptrs[0] = tcache;
3199 do_host_disasm(0);
3200#endif
3201#if (DRC_DEBUG & 1)
3202 hash_collisions = 0;
3203#endif
3204 }
3205
3206 return 0;
3207
3208fail:
3209 sh2_drc_finish(sh2);
3210 return -1;
3211}
3212
3213void sh2_drc_finish(SH2 *sh2)
3214{
3215 int i;
3216
3217 if (block_tables[0] == NULL)
3218 return;
3219
3220 sh2_drc_flush_all();
3221
3222 for (i = 0; i < TCACHE_BUFFERS; i++) {
3223#if (DRC_DEBUG & 4)
3224 printf("~~~ tcache %d\n", i);
3225 tcache_dsm_ptrs[i] = tcache_bases[i];
3226 tcache_ptr = tcache_ptrs[i];
3227 do_host_disasm(i);
3228#endif
3229
3230 if (block_tables[i] != NULL)
3231 free(block_tables[i]);
3232 block_tables[i] = NULL;
3233 if (block_link_pool[i] == NULL)
3234 free(block_link_pool[i]);
3235 block_link_pool[i] = NULL;
3236
3237 if (inval_lookup[i] == NULL)
3238 free(inval_lookup[i]);
3239 inval_lookup[i] = NULL;
3240
3241 if (hash_tables[i] != NULL) {
3242 free(hash_tables[i]);
3243 hash_tables[i] = NULL;
3244 }
3245 }
3246
3247 drc_cmn_cleanup();
3248}
3249
3250#endif /* DRC_SH2 */
3251
3252static void *dr_get_pc_base(u32 pc, int is_slave)
3253{
3254 void *ret = NULL;
3255 u32 mask = 0;
3256
3257 if ((pc & ~0x7ff) == 0) {
3258 // BIOS
3259 ret = is_slave ? Pico32xMem->sh2_rom_s : Pico32xMem->sh2_rom_m;
3260 mask = 0x7ff;
3261 }
3262 else if ((pc & 0xfffff000) == 0xc0000000) {
3263 // data array
3264 ret = Pico32xMem->data_array[is_slave];
3265 mask = 0xfff;
3266 }
3267 else if ((pc & 0xc6000000) == 0x06000000) {
3268 // SDRAM
3269 ret = Pico32xMem->sdram;
3270 mask = 0x03ffff;
3271 }
3272 else if ((pc & 0xc6000000) == 0x02000000) {
3273 // ROM
3274 ret = Pico.rom;
3275 mask = 0x3fffff;
3276 }
3277
3278 if (ret == NULL)
3279 return (void *)-1; // NULL is valid value
3280
3281 return (char *)ret - (pc & ~mask);
3282}
3283
3284void scan_block(u32 base_pc, int is_slave, u8 *op_flags, u32 *end_pc_out,
3285 u32 *end_literals_out)
3286{
3287 u16 *dr_pc_base;
3288 u32 pc, op, tmp;
3289 u32 end_pc, end_literals = 0;
3290 u32 lowest_mova = 0;
3291 struct op_data *opd;
3292 int next_is_delay = 0;
3293 int end_block = 0;
3294 int i, i_end;
3295
3296 memset(op_flags, 0, BLOCK_INSN_LIMIT);
3297
3298 dr_pc_base = dr_get_pc_base(base_pc, is_slave);
3299
3300 // 1st pass: disassemble
3301 for (i = 0, pc = base_pc; ; i++, pc += 2) {
3302 // we need an ops[] entry after the last one initialized,
3303 // so do it before end_block checks
3304 opd = &ops[i];
3305 opd->op = OP_UNHANDLED;
3306 opd->rm = -1;
3307 opd->source = opd->dest = 0;
3308 opd->cycles = 1;
3309 opd->imm = 0;
3310
3311 if (next_is_delay) {
3312 op_flags[i] |= OF_DELAY_OP;
3313 next_is_delay = 0;
3314 }
3315 else if (end_block || i >= BLOCK_INSN_LIMIT - 2)
3316 break;
3317
3318 op = FETCH_OP(pc);
3319 switch ((op & 0xf000) >> 12)
3320 {
3321 /////////////////////////////////////////////
3322 case 0x00:
3323 switch (op & 0x0f)
3324 {
3325 case 0x02:
3326 switch (GET_Fx())
3327 {
3328 case 0: // STC SR,Rn 0000nnnn00000010
3329 tmp = SHR_SR;
3330 break;
3331 case 1: // STC GBR,Rn 0000nnnn00010010
3332 tmp = SHR_GBR;
3333 break;
3334 case 2: // STC VBR,Rn 0000nnnn00100010
3335 tmp = SHR_VBR;
3336 break;
3337 default:
3338 goto undefined;
3339 }
3340 opd->op = OP_MOVE;
3341 opd->source = BITMASK1(tmp);
3342 opd->dest = BITMASK1(GET_Rn());
3343 break;
3344 case 0x03:
3345 CHECK_UNHANDLED_BITS(0xd0, undefined);
3346 // BRAF Rm 0000mmmm00100011
3347 // BSRF Rm 0000mmmm00000011
3348 opd->op = OP_BRANCH_RF;
3349 opd->rm = GET_Rn();
3350 opd->source = BITMASK1(opd->rm);
3351 opd->dest = BITMASK1(SHR_PC);
3352 if (!(op & 0x20))
3353 opd->dest |= BITMASK1(SHR_PR);
3354 opd->cycles = 2;
3355 next_is_delay = 1;
3356 end_block = 1;
3357 break;
3358 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
3359 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
3360 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
3361 opd->source = BITMASK3(GET_Rm(), SHR_R0, GET_Rn());
3362 break;
3363 case 0x07:
3364 // MUL.L Rm,Rn 0000nnnnmmmm0111
3365 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3366 opd->dest = BITMASK1(SHR_MACL);
3367 opd->cycles = 2;
3368 break;
3369 case 0x08:
3370 CHECK_UNHANDLED_BITS(0xf00, undefined);
3371 switch (GET_Fx())
3372 {
3373 case 0: // CLRT 0000000000001000
3374 opd->op = OP_SETCLRT;
3375 opd->dest = BITMASK1(SHR_T);
3376 opd->imm = 0;
3377 break;
3378 case 1: // SETT 0000000000011000
3379 opd->op = OP_SETCLRT;
3380 opd->dest = BITMASK1(SHR_T);
3381 opd->imm = 1;
3382 break;
3383 case 2: // CLRMAC 0000000000101000
3384 opd->dest = BITMASK3(SHR_T, SHR_MACL, SHR_MACH);
3385 break;
3386 default:
3387 goto undefined;
3388 }
3389 break;
3390 case 0x09:
3391 switch (GET_Fx())
3392 {
3393 case 0: // NOP 0000000000001001
3394 CHECK_UNHANDLED_BITS(0xf00, undefined);
3395 break;
3396 case 1: // DIV0U 0000000000011001
3397 CHECK_UNHANDLED_BITS(0xf00, undefined);
3398 opd->dest = BITMASK2(SHR_SR, SHR_T);
3399 break;
3400 case 2: // MOVT Rn 0000nnnn00101001
3401 opd->source = BITMASK1(SHR_T);
3402 opd->dest = BITMASK1(GET_Rn());
3403 break;
3404 default:
3405 goto undefined;
3406 }
3407 break;
3408 case 0x0a:
3409 switch (GET_Fx())
3410 {
3411 case 0: // STS MACH,Rn 0000nnnn00001010
3412 tmp = SHR_MACH;
3413 break;
3414 case 1: // STS MACL,Rn 0000nnnn00011010
3415 tmp = SHR_MACL;
3416 break;
3417 case 2: // STS PR,Rn 0000nnnn00101010
3418 tmp = SHR_PR;
3419 break;
3420 default:
3421 goto undefined;
3422 }
3423 opd->op = OP_MOVE;
3424 opd->source = BITMASK1(tmp);
3425 opd->dest = BITMASK1(GET_Rn());
3426 break;
3427 case 0x0b:
3428 CHECK_UNHANDLED_BITS(0xf00, undefined);
3429 switch (GET_Fx())
3430 {
3431 case 0: // RTS 0000000000001011
3432 opd->op = OP_BRANCH_R;
3433 opd->rm = SHR_PR;
3434 opd->source = BITMASK1(opd->rm);
3435 opd->dest = BITMASK1(SHR_PC);
3436 opd->cycles = 2;
3437 next_is_delay = 1;
3438 end_block = 1;
3439 break;
3440 case 1: // SLEEP 0000000000011011
3441 opd->op = OP_SLEEP;
3442 end_block = 1;
3443 break;
3444 case 2: // RTE 0000000000101011
3445 opd->op = OP_RTE;
3446 opd->source = BITMASK1(SHR_SP);
3447 opd->dest = BITMASK2(SHR_SR, SHR_PC);
3448 opd->cycles = 4;
3449 next_is_delay = 1;
3450 end_block = 1;
3451 break;
3452 default:
3453 goto undefined;
3454 }
3455 break;
3456 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
3457 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
3458 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
3459 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3460 opd->dest = BITMASK1(GET_Rn());
3461 break;
3462 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
3463 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3464 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3465 opd->cycles = 3;
3466 break;
3467 default:
3468 goto undefined;
3469 }
3470 break;
3471
3472 /////////////////////////////////////////////
3473 case 0x01:
3474 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
3475 opd->source = BITMASK1(GET_Rm());
3476 opd->source = BITMASK1(GET_Rn());
3477 opd->imm = (op & 0x0f) * 4;
3478 break;
3479
3480 /////////////////////////////////////////////
3481 case 0x02:
3482 switch (op & 0x0f)
3483 {
3484 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
3485 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
3486 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
3487 opd->source = BITMASK1(GET_Rm());
3488 opd->source = BITMASK1(GET_Rn());
3489 break;
3490 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
3491 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
3492 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
3493 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3494 opd->dest = BITMASK1(GET_Rn());
3495 break;
3496 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
3497 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3498 opd->dest = BITMASK1(SHR_SR);
3499 break;
3500 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
3501 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3502 opd->dest = BITMASK1(SHR_T);
3503 break;
3504 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
3505 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
3506 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
3507 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3508 opd->dest = BITMASK1(GET_Rn());
3509 break;
3510 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
3511 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3512 opd->dest = BITMASK1(SHR_T);
3513 break;
3514 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
3515 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3516 opd->dest = BITMASK1(GET_Rn());
3517 break;
3518 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
3519 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
3520 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3521 opd->dest = BITMASK1(SHR_MACL);
3522 break;
3523 default:
3524 goto undefined;
3525 }
3526 break;
3527
3528 /////////////////////////////////////////////
3529 case 0x03:
3530 switch (op & 0x0f)
3531 {
3532 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
3533 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
3534 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
3535 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
3536 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
3537 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3538 opd->dest = BITMASK1(SHR_T);
3539 break;
3540 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
3541 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_SR);
3542 opd->dest = BITMASK2(GET_Rn(), SHR_SR);
3543 break;
3544 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
3545 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
3546 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3547 opd->dest = BITMASK2(SHR_MACL, SHR_MACH);
3548 opd->cycles = 2;
3549 break;
3550 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
3551 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
3552 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3553 opd->dest = BITMASK1(GET_Rn());
3554 break;
3555 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
3556 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
3557 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_T);
3558 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3559 break;
3560 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
3561 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
3562 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3563 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3564 break;
3565 default:
3566 goto undefined;
3567 }
3568 break;
3569
3570 /////////////////////////////////////////////
3571 case 0x04:
3572 switch (op & 0x0f)
3573 {
3574 case 0x00:
3575 switch (GET_Fx())
3576 {
3577 case 0: // SHLL Rn 0100nnnn00000000
3578 case 2: // SHAL Rn 0100nnnn00100000
3579 opd->source = BITMASK1(GET_Rn());
3580 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3581 break;
3582 case 1: // DT Rn 0100nnnn00010000
3583 opd->source = BITMASK1(GET_Rn());
3584 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3585 break;
3586 default:
3587 goto undefined;
3588 }
3589 break;
3590 case 0x01:
3591 switch (GET_Fx())
3592 {
3593 case 0: // SHLR Rn 0100nnnn00000001
3594 case 2: // SHAR Rn 0100nnnn00100001
3595 opd->source = BITMASK1(GET_Rn());
3596 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3597 break;
3598 case 1: // CMP/PZ Rn 0100nnnn00010001
3599 opd->source = BITMASK1(GET_Rn());
3600 opd->dest = BITMASK1(SHR_T);
3601 break;
3602 default:
3603 goto undefined;
3604 }
3605 break;
3606 case 0x02:
3607 case 0x03:
3608 switch (op & 0x3f)
3609 {
3610 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
3611 tmp = SHR_MACH;
3612 break;
3613 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
3614 tmp = SHR_MACL;
3615 break;
3616 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
3617 tmp = SHR_PR;
3618 break;
3619 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
3620 tmp = SHR_SR;
3621 opd->cycles = 2;
3622 break;
3623 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
3624 tmp = SHR_GBR;
3625 opd->cycles = 2;
3626 break;
3627 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
3628 tmp = SHR_VBR;
3629 opd->cycles = 2;
3630 break;
3631 default:
3632 goto undefined;
3633 }
3634 opd->source = BITMASK2(GET_Rn(), tmp);
3635 opd->dest = BITMASK1(GET_Rn());
3636 break;
3637 case 0x04:
3638 case 0x05:
3639 switch (op & 0x3f)
3640 {
3641 case 0x04: // ROTL Rn 0100nnnn00000100
3642 case 0x05: // ROTR Rn 0100nnnn00000101
3643 opd->source = BITMASK1(GET_Rn());
3644 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3645 break;
3646 case 0x24: // ROTCL Rn 0100nnnn00100100
3647 case 0x25: // ROTCR Rn 0100nnnn00100101
3648 opd->source = BITMASK2(GET_Rn(), SHR_T);
3649 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3650 break;
3651 case 0x15: // CMP/PL Rn 0100nnnn00010101
3652 opd->source = BITMASK1(GET_Rn());
3653 opd->dest = BITMASK1(SHR_T);
3654 break;
3655 default:
3656 goto undefined;
3657 }
3658 break;
3659 case 0x06:
3660 case 0x07:
3661 switch (op & 0x3f)
3662 {
3663 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
3664 tmp = SHR_MACH;
3665 break;
3666 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
3667 tmp = SHR_MACL;
3668 break;
3669 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
3670 tmp = SHR_PR;
3671 break;
3672 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
3673 tmp = SHR_SR;
3674 opd->cycles = 3;
3675 break;
3676 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
3677 tmp = SHR_GBR;
3678 opd->cycles = 3;
3679 break;
3680 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
3681 tmp = SHR_VBR;
3682 opd->cycles = 3;
3683 break;
3684 default:
3685 goto undefined;
3686 }
3687 opd->source = BITMASK1(GET_Rn());
3688 opd->dest = BITMASK2(GET_Rn(), tmp);
3689 break;
3690 case 0x08:
3691 case 0x09:
3692 switch (GET_Fx())
3693 {
3694 case 0:
3695 // SHLL2 Rn 0100nnnn00001000
3696 // SHLR2 Rn 0100nnnn00001001
3697 break;
3698 case 1:
3699 // SHLL8 Rn 0100nnnn00011000
3700 // SHLR8 Rn 0100nnnn00011001
3701 break;
3702 case 2:
3703 // SHLL16 Rn 0100nnnn00101000
3704 // SHLR16 Rn 0100nnnn00101001
3705 break;
3706 default:
3707 goto undefined;
3708 }
3709 opd->source = BITMASK1(GET_Rn());
3710 opd->dest = BITMASK1(GET_Rn());
3711 break;
3712 case 0x0a:
3713 switch (GET_Fx())
3714 {
3715 case 0: // LDS Rm,MACH 0100mmmm00001010
3716 tmp = SHR_MACH;
3717 break;
3718 case 1: // LDS Rm,MACL 0100mmmm00011010
3719 tmp = SHR_MACL;
3720 break;
3721 case 2: // LDS Rm,PR 0100mmmm00101010
3722 tmp = SHR_PR;
3723 break;
3724 default:
3725 goto undefined;
3726 }
3727 opd->op = OP_MOVE;
3728 opd->source = BITMASK1(GET_Rn());
3729 opd->dest = BITMASK1(tmp);
3730 break;
3731 case 0x0b:
3732 switch (GET_Fx())
3733 {
3734 case 0: // JSR @Rm 0100mmmm00001011
3735 opd->dest = BITMASK1(SHR_PR);
3736 case 2: // JMP @Rm 0100mmmm00101011
3737 opd->op = OP_BRANCH_R;
3738 opd->rm = GET_Rn();
3739 opd->source = BITMASK1(opd->rm);
3740 opd->dest |= BITMASK1(SHR_PC);
3741 opd->cycles = 2;
3742 next_is_delay = 1;
3743 end_block = 1;
3744 break;
3745 case 1: // TAS.B @Rn 0100nnnn00011011
3746 opd->source = BITMASK1(GET_Rn());
3747 opd->dest = BITMASK1(SHR_T);
3748 opd->cycles = 4;
3749 break;
3750 default:
3751 goto undefined;
3752 }
3753 break;
3754 case 0x0e:
3755 switch (GET_Fx())
3756 {
3757 case 0: // LDC Rm,SR 0100mmmm00001110
3758 tmp = SHR_SR;
3759 break;
3760 case 1: // LDC Rm,GBR 0100mmmm00011110
3761 tmp = SHR_GBR;
3762 break;
3763 case 2: // LDC Rm,VBR 0100mmmm00101110
3764 tmp = SHR_VBR;
3765 break;
3766 default:
3767 goto undefined;
3768 }
3769 opd->op = OP_MOVE;
3770 opd->source = BITMASK1(GET_Rn());
3771 opd->dest = BITMASK1(tmp);
3772 break;
3773 case 0x0f:
3774 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
3775 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3776 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3777 opd->cycles = 3;
3778 break;
3779 default:
3780 goto undefined;
3781 }
3782 break;
3783
3784 /////////////////////////////////////////////
3785 case 0x05:
3786 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
3787 opd->source = BITMASK1(GET_Rm());
3788 opd->dest = BITMASK1(GET_Rn());
3789 opd->imm = (op & 0x0f) * 4;
3790 break;
3791
3792 /////////////////////////////////////////////
3793 case 0x06:
3794 switch (op & 0x0f)
3795 {
3796 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
3797 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
3798 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
3799 opd->dest = BITMASK1(GET_Rm());
3800 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
3801 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
3802 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
3803 opd->source = BITMASK1(GET_Rm());
3804 opd->dest |= BITMASK1(GET_Rn());
3805 break;
3806 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
3807 opd->source = BITMASK2(GET_Rm(), SHR_T);
3808 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3809 break;
3810 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
3811 opd->op = OP_MOVE;
3812 goto arith_rmrn;
3813 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
3814 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
3815 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
3816 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
3817 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
3818 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
3819 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
3820 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
3821 arith_rmrn:
3822 opd->source = BITMASK1(GET_Rm());
3823 opd->dest = BITMASK1(GET_Rn());
3824 break;
3825 }
3826 break;
3827
3828 /////////////////////////////////////////////
3829 case 0x07:
3830 // ADD #imm,Rn 0111nnnniiiiiiii
3831 opd->source = opd->dest = BITMASK1(GET_Rn());
3832 opd->imm = (int)(signed char)op;
3833 break;
3834
3835 /////////////////////////////////////////////
3836 case 0x08:
3837 switch (op & 0x0f00)
3838 {
3839 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
3840 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3841 opd->imm = (op & 0x0f);
3842 break;
3843 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
3844 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3845 opd->imm = (op & 0x0f) * 2;
3846 break;
3847 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
3848 opd->source = BITMASK1(GET_Rm());
3849 opd->dest = BITMASK1(SHR_R0);
3850 opd->imm = (op & 0x0f);
3851 break;
3852 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
3853 opd->source = BITMASK1(GET_Rm());
3854 opd->dest = BITMASK1(SHR_R0);
3855 opd->imm = (op & 0x0f) * 2;
3856 break;
3857 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
3858 opd->source = BITMASK1(SHR_R0);
3859 opd->dest = BITMASK1(SHR_T);
3860 opd->imm = (int)(signed char)op;
3861 break;
3862 case 0x0d00: // BT/S label 10001101dddddddd
3863 case 0x0f00: // BF/S label 10001111dddddddd
3864 next_is_delay = 1;
3865 // fallthrough
3866 case 0x0900: // BT label 10001001dddddddd
3867 case 0x0b00: // BF label 10001011dddddddd
3868 opd->op = (op & 0x0200) ? OP_BRANCH_CF : OP_BRANCH_CT;
3869 opd->source = BITMASK1(SHR_T);
3870 opd->dest = BITMASK1(SHR_PC);
3871 opd->imm = ((signed int)(op << 24) >> 23);
3872 opd->imm += pc + 4;
3873 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
3874 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
3875 break;
3876 default:
3877 goto undefined;
3878 }
3879 break;
3880
3881 /////////////////////////////////////////////
3882 case 0x09:
3883 // MOV.W @(disp,PC),Rn 1001nnnndddddddd
3884 opd->op = OP_LOAD_POOL;
3885 tmp = pc + 2;
3886 if (op_flags[i] & OF_DELAY_OP) {
3887 if (ops[i-1].op == OP_BRANCH)
3888 tmp = ops[i-1].imm;
3889 else
3890 tmp = 0;
3891 }
3892 opd->source = BITMASK1(SHR_PC);
3893 opd->dest = BITMASK1(GET_Rn());
3894 if (tmp)
3895 opd->imm = tmp + 2 + (op & 0xff) * 2;
3896 opd->size = 1;
3897 break;
3898
3899 /////////////////////////////////////////////
3900 case 0x0b:
3901 // BSR label 1011dddddddddddd
3902 opd->dest = BITMASK1(SHR_PR);
3903 case 0x0a:
3904 // BRA label 1010dddddddddddd
3905 opd->op = OP_BRANCH;
3906 opd->dest |= BITMASK1(SHR_PC);
3907 opd->imm = ((signed int)(op << 20) >> 19);
3908 opd->imm += pc + 4;
3909 opd->cycles = 2;
3910 next_is_delay = 1;
3911 end_block = 1;
3912 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
3913 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
3914 break;
3915
3916 /////////////////////////////////////////////
3917 case 0x0c:
3918 switch (op & 0x0f00)
3919 {
3920 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
3921 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
3922 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
3923 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3924 opd->size = (op & 0x300) >> 8;
3925 opd->imm = (op & 0xff) << opd->size;
3926 break;
3927 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
3928 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
3929 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
3930 opd->source = BITMASK1(SHR_GBR);
3931 opd->dest = BITMASK1(SHR_R0);
3932 opd->size = (op & 0x300) >> 8;
3933 opd->imm = (op & 0xff) << opd->size;
3934 break;
3935 case 0x0300: // TRAPA #imm 11000011iiiiiiii
3936 opd->source = BITMASK2(SHR_PC, SHR_SR);
3937 opd->dest = BITMASK1(SHR_PC);
3938 opd->imm = (op & 0xff) * 4;
3939 opd->cycles = 8;
3940 end_block = 1; // FIXME
3941 break;
3942 case 0x0700: // MOVA @(disp,PC),R0 11000111dddddddd
3943 opd->op = OP_MOVA;
3944 tmp = pc + 2;
3945 if (op_flags[i] & OF_DELAY_OP) {
3946 if (ops[i-1].op == OP_BRANCH)
3947 tmp = ops[i-1].imm;
3948 else
3949 tmp = 0;
3950 }
3951 opd->dest = BITMASK1(SHR_R0);
3952 if (tmp) {
3953 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
3954 if (opd->imm >= base_pc) {
3955 if (lowest_mova == 0 || opd->imm < lowest_mova)
3956 lowest_mova = opd->imm;
3957 }
3958 }
3959 break;
3960 case 0x0800: // TST #imm,R0 11001000iiiiiiii
3961 opd->source = BITMASK1(SHR_R0);
3962 opd->dest = BITMASK1(SHR_T);
3963 opd->imm = op & 0xff;
3964 break;
3965 case 0x0900: // AND #imm,R0 11001001iiiiiiii
3966 opd->source = opd->dest = BITMASK1(SHR_R0);
3967 opd->imm = op & 0xff;
3968 break;
3969 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
3970 opd->source = opd->dest = BITMASK1(SHR_R0);
3971 opd->imm = op & 0xff;
3972 break;
3973 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
3974 opd->source = opd->dest = BITMASK1(SHR_R0);
3975 opd->imm = op & 0xff;
3976 break;
3977 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
3978 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3979 opd->dest = BITMASK1(SHR_T);
3980 opd->imm = op & 0xff;
3981 opd->cycles = 3;
3982 break;
3983 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
3984 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
3985 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
3986 opd->source = BITMASK2(SHR_GBR, SHR_R0);
3987 opd->imm = op & 0xff;
3988 opd->cycles = 3;
3989 break;
3990 default:
3991 goto undefined;
3992 }
3993 break;
3994
3995 /////////////////////////////////////////////
3996 case 0x0d:
3997 // MOV.L @(disp,PC),Rn 1101nnnndddddddd
3998 opd->op = OP_LOAD_POOL;
3999 tmp = pc + 2;
4000 if (op_flags[i] & OF_DELAY_OP) {
4001 if (ops[i-1].op == OP_BRANCH)
4002 tmp = ops[i-1].imm;
4003 else
4004 tmp = 0;
4005 }
4006 opd->source = BITMASK1(SHR_PC);
4007 opd->dest = BITMASK1(GET_Rn());
4008 if (tmp)
4009 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
4010 opd->size = 2;
4011 break;
4012
4013 /////////////////////////////////////////////
4014 case 0x0e:
4015 // MOV #imm,Rn 1110nnnniiiiiiii
4016 opd->dest = BITMASK1(GET_Rn());
4017 opd->imm = (u32)(signed int)(signed char)op;
4018 break;
4019
4020 default:
4021 undefined:
4022 elprintf(EL_ANOMALY, "%csh2 drc: unhandled op %04x @ %08x",
4023 is_slave ? 's' : 'm', op, pc);
4024 break;
4025 }
4026 }
4027 i_end = i;
4028 end_pc = pc;
4029
4030 // 2nd pass: some analysis
4031 for (i = 0; i < i_end; i++) {
4032 opd = &ops[i];
4033
4034 // propagate T (TODO: DIV0U)
4035 if ((opd->op == OP_SETCLRT && !opd->imm) || opd->op == OP_BRANCH_CT)
4036 op_flags[i + 1] |= OF_T_CLEAR;
4037 else if ((opd->op == OP_SETCLRT && opd->imm) || opd->op == OP_BRANCH_CF)
4038 op_flags[i + 1] |= OF_T_SET;
4039
4040 if ((op_flags[i] & OF_BTARGET) || (opd->dest & BITMASK1(SHR_T)))
4041 op_flags[i] &= ~(OF_T_SET | OF_T_CLEAR);
4042 else
4043 op_flags[i + 1] |= op_flags[i] & (OF_T_SET | OF_T_CLEAR);
4044
4045 if ((opd->op == OP_BRANCH_CT && (op_flags[i] & OF_T_SET))
4046 || (opd->op == OP_BRANCH_CF && (op_flags[i] & OF_T_CLEAR)))
4047 {
4048 opd->op = OP_BRANCH;
4049 opd->cycles = 3;
4050 i_end = i + 1;
4051 if (op_flags[i + 1] & OF_DELAY_OP) {
4052 opd->cycles = 2;
4053 i_end++;
4054 }
4055 }
4056 else if (opd->op == OP_LOAD_POOL)
4057 {
4058 if (opd->imm < end_pc + MAX_LITERAL_OFFSET) {
4059 if (end_literals < opd->imm + opd->size * 2)
4060 end_literals = opd->imm + opd->size * 2;
4061 }
4062 }
4063 }
4064 end_pc = base_pc + i_end * 2;
4065 if (end_literals < end_pc)
4066 end_literals = end_pc;
4067
4068 // end_literals is used to decide to inline a literal or not
4069 // XXX: need better detection if this actually is used in write
4070 if (lowest_mova >= base_pc) {
4071 if (lowest_mova < end_literals) {
4072 dbg(1, "mova for %08x, block %08x", lowest_mova, base_pc);
4073 end_literals = end_pc;
4074 }
4075 if (lowest_mova < end_pc) {
4076 dbg(1, "warning: mova inside of blk for %08x, block %08x",
4077 lowest_mova, base_pc);
4078 end_literals = end_pc;
4079 }
4080 }
4081
4082 *end_pc_out = end_pc;
4083 if (end_literals_out != NULL)
4084 *end_literals_out = end_literals;
4085}
4086
4087// vim:shiftwidth=2:ts=2:expandtab