improve 64bit portability
[picodrive.git] / cpu / sh2 / compiler.c
... / ...
CommitLineData
1/*
2 * SH2 recompiler
3 * (C) notaz, 2009,2010,2013
4 *
5 * This work is licensed under the terms of MAME license.
6 * See COPYING file in the top-level directory.
7 *
8 * notes:
9 * - tcache, block descriptor, link buffer overflows result in sh2_translate()
10 * failure, followed by full tcache invalidation for that region
11 * - jumps between blocks are tracked for SMC handling (in block_entry->links),
12 * except jumps between different tcaches
13 *
14 * implemented:
15 * - static register allocation
16 * - remaining register caching and tracking in temporaries
17 * - block-local branch linking
18 * - block linking (except between tcaches)
19 * - some constant propagation
20 *
21 * TODO:
22 * - better constant propagation
23 * - stack caching?
24 * - bug fixing
25 */
26#include <stddef.h>
27#include <stdio.h>
28#include <stdlib.h>
29#include <assert.h>
30
31#include "../../pico/pico_int.h"
32#include "../../pico/arm_features.h"
33#include "sh2.h"
34#include "compiler.h"
35#include "../drc/cmn.h"
36#include "../debug.h"
37
38// features
39#define PROPAGATE_CONSTANTS 1
40#define LINK_BRANCHES 1
41
42// limits (per block)
43#define MAX_BLOCK_SIZE (BLOCK_INSN_LIMIT * 6 * 6)
44
45// max literal offset from the block end
46#define MAX_LITERAL_OFFSET 32*2
47#define MAX_LITERALS (BLOCK_INSN_LIMIT / 4)
48#define MAX_LOCAL_BRANCHES 32
49
50// debug stuff
51// 01 - warnings/errors
52// 02 - block info/smc
53// 04 - asm
54// 08 - runtime block entry log
55// 10 - smc self-check
56// {
57#ifndef DRC_DEBUG
58#define DRC_DEBUG 0
59#endif
60
61#if DRC_DEBUG
62#define dbg(l,...) { \
63 if ((l) & DRC_DEBUG) \
64 elprintf(EL_STATUS, ##__VA_ARGS__); \
65}
66#include "mame/sh2dasm.h"
67#include <platform/libpicofe/linux/host_dasm.h>
68static int insns_compiled, hash_collisions, host_insn_count;
69#define COUNT_OP \
70 host_insn_count++
71#else // !DRC_DEBUG
72#define COUNT_OP
73#define dbg(...)
74#endif
75
76///
77#define FETCH_OP(pc) \
78 dr_pc_base[(pc) / 2]
79
80#define FETCH32(a) \
81 ((dr_pc_base[(a) / 2] << 16) | dr_pc_base[(a) / 2 + 1])
82
83#define CHECK_UNHANDLED_BITS(mask, label) { \
84 if ((op & (mask)) != 0) \
85 goto label; \
86}
87
88#define GET_Fx() \
89 ((op >> 4) & 0x0f)
90
91#define GET_Rm GET_Fx
92
93#define GET_Rn() \
94 ((op >> 8) & 0x0f)
95
96#define BITMASK1(v0) (1 << (v0))
97#define BITMASK2(v0,v1) ((1 << (v0)) | (1 << (v1)))
98#define BITMASK3(v0,v1,v2) (BITMASK2(v0,v1) | (1 << (v2)))
99#define BITMASK4(v0,v1,v2,v3) (BITMASK3(v0,v1,v2) | (1 << (v3)))
100#define BITMASK5(v0,v1,v2,v3,v4) (BITMASK4(v0,v1,v2,v3) | (1 << (v4)))
101
102#define SHR_T SHR_SR // might make them separate someday
103
104static struct op_data {
105 u8 op;
106 u8 cycles;
107 u8 size; // 0, 1, 2 - byte, word, long
108 s8 rm; // branch or load/store data reg
109 u32 source; // bitmask of src regs
110 u32 dest; // bitmask of dest regs
111 u32 imm; // immediate/io address/branch target
112 // (for literal - address, not value)
113} ops[BLOCK_INSN_LIMIT];
114
115enum op_types {
116 OP_UNHANDLED = 0,
117 OP_BRANCH,
118 OP_BRANCH_CT, // conditional, branch if T set
119 OP_BRANCH_CF, // conditional, branch if T clear
120 OP_BRANCH_R, // indirect
121 OP_BRANCH_RF, // indirect far (PC + Rm)
122 OP_SETCLRT, // T flag set/clear
123 OP_MOVE, // register move
124 OP_LOAD_POOL, // literal pool load, imm is address
125 OP_MOVA,
126 OP_SLEEP,
127 OP_RTE,
128};
129
130#ifdef DRC_SH2
131
132static int literal_disabled_frames;
133
134#if (DRC_DEBUG & 4)
135static u8 *tcache_dsm_ptrs[3];
136static char sh2dasm_buff[64];
137#define do_host_disasm(tcid) \
138 host_dasm(tcache_dsm_ptrs[tcid], tcache_ptr - tcache_dsm_ptrs[tcid]); \
139 tcache_dsm_ptrs[tcid] = tcache_ptr
140#else
141#define do_host_disasm(x)
142#endif
143
144#if (DRC_DEBUG & 8) || defined(PDB)
145static void REGPARM(3) *sh2_drc_log_entry(void *block, SH2 *sh2, u32 sr)
146{
147 if (block != NULL) {
148 dbg(8, "= %csh2 enter %08x %p, c=%d", sh2->is_slave ? 's' : 'm',
149 sh2->pc, block, (signed int)sr >> 12);
150 pdb_step(sh2, sh2->pc);
151 }
152 return block;
153}
154#endif
155// } debug
156
157#define TCACHE_BUFFERS 3
158
159// we have 3 translation cache buffers, split from one drc/cmn buffer.
160// BIOS shares tcache with data array because it's only used for init
161// and can be discarded early
162// XXX: need to tune sizes
163static const int tcache_sizes[TCACHE_BUFFERS] = {
164 DRC_TCACHE_SIZE * 6 / 8, // ROM (rarely used), DRAM
165 DRC_TCACHE_SIZE / 8, // BIOS, data array in master sh2
166 DRC_TCACHE_SIZE / 8, // ... slave
167};
168
169static u8 *tcache_bases[TCACHE_BUFFERS];
170static u8 *tcache_ptrs[TCACHE_BUFFERS];
171
172// ptr for code emiters
173static u8 *tcache_ptr;
174
175#define MAX_BLOCK_ENTRIES (BLOCK_INSN_LIMIT / 8)
176
177struct block_link {
178 u32 target_pc;
179 void *jump; // insn address
180 struct block_link *next; // either in block_entry->links or
181};
182
183struct block_entry {
184 u32 pc;
185 void *tcache_ptr; // translated block for above PC
186 struct block_entry *next; // next block in hash_table with same pc hash
187 struct block_link *links; // links to this entry
188#if (DRC_DEBUG & 2)
189 struct block_desc *block;
190#endif
191};
192
193struct block_desc {
194 u32 addr; // block start SH2 PC address
195 u16 size; // ..of recompiled insns+lit. pool
196 u16 size_nolit; // same without literals
197#if (DRC_DEBUG & 2)
198 int refcount;
199#endif
200 int entry_count;
201 struct block_entry entryp[MAX_BLOCK_ENTRIES];
202};
203
204static const int block_max_counts[TCACHE_BUFFERS] = {
205 4*1024,
206 256,
207 256,
208};
209static struct block_desc *block_tables[TCACHE_BUFFERS];
210static int block_counts[TCACHE_BUFFERS];
211
212// we have block_link_pool to avoid using mallocs
213static const int block_link_pool_max_counts[TCACHE_BUFFERS] = {
214 4*1024,
215 256,
216 256,
217};
218static struct block_link *block_link_pool[TCACHE_BUFFERS];
219static int block_link_pool_counts[TCACHE_BUFFERS];
220static struct block_link *unresolved_links[TCACHE_BUFFERS];
221
222// used for invalidation
223static const int ram_sizes[TCACHE_BUFFERS] = {
224 0x40000,
225 0x1000,
226 0x1000,
227};
228#define INVAL_PAGE_SIZE 0x100
229
230struct block_list {
231 struct block_desc *block;
232 struct block_list *next;
233};
234
235// array of pointers to block_lists for RAM and 2 data arrays
236// each array has len: sizeof(mem) / INVAL_PAGE_SIZE
237static struct block_list **inval_lookup[TCACHE_BUFFERS];
238
239static const int hash_table_sizes[TCACHE_BUFFERS] = {
240 0x1000,
241 0x100,
242 0x100,
243};
244static struct block_entry **hash_tables[TCACHE_BUFFERS];
245
246#define HASH_FUNC(hash_tab, addr, mask) \
247 (hash_tab)[(((addr) >> 20) ^ ((addr) >> 2)) & (mask)]
248
249// host register tracking
250enum {
251 HR_FREE,
252 HR_CACHED, // 'val' has sh2_reg_e
253// HR_CONST, // 'val' has a constant
254 HR_TEMP, // reg used for temp storage
255};
256
257enum {
258 HRF_DIRTY = 1 << 0, // reg has "dirty" value to be written to ctx
259 HRF_LOCKED = 1 << 1, // HR_CACHED can't be evicted
260};
261
262typedef struct {
263 u32 hreg:5; // "host" reg
264 u32 greg:5; // "guest" reg
265 u32 type:3;
266 u32 flags:3;
267 u32 stamp:16; // kind of a timestamp
268} temp_reg_t;
269
270// note: reg_temp[] must have at least the amount of
271// registers used by handlers in worst case (currently 4)
272#ifdef __arm__
273#include "../drc/emit_arm.c"
274
275#ifndef __MACH__
276
277static const int reg_map_g2h[] = {
278 4, 5, 6, 7,
279 8, -1, -1, -1,
280 -1, -1, -1, -1,
281 -1, -1, -1, 9, // r12 .. sp
282 -1, -1, -1, 10, // SHR_PC, SHR_PPC, SHR_PR, SHR_SR,
283 -1, -1, -1, -1, // SHR_GBR, SHR_VBR, SHR_MACH, SHR_MACL,
284};
285
286#else
287
288// no r9..
289static const int reg_map_g2h[] = {
290 4, 5, 6, 7,
291 -1, -1, -1, -1,
292 -1, -1, -1, -1,
293 -1, -1, -1, 8, // r12 .. sp
294 -1, -1, -1, 10, // SHR_PC, SHR_PPC, SHR_PR, SHR_SR,
295 -1, -1, -1, -1, // SHR_GBR, SHR_VBR, SHR_MACH, SHR_MACL,
296};
297
298#endif
299
300static temp_reg_t reg_temp[] = {
301 { 0, },
302 { 1, },
303 { 12, },
304 { 14, },
305 { 2, },
306 { 3, },
307};
308
309#elif defined(__i386__)
310#include "../drc/emit_x86.c"
311
312static const int reg_map_g2h[] = {
313 xSI,-1, -1, -1,
314 -1, -1, -1, -1,
315 -1, -1, -1, -1,
316 -1, -1, -1, -1,
317 -1, -1, -1, xDI,
318 -1, -1, -1, -1,
319};
320
321// ax, cx, dx are usually temporaries by convention
322static temp_reg_t reg_temp[] = {
323 { xAX, },
324 { xBX, },
325 { xCX, },
326 { xDX, },
327};
328
329#elif defined(__x86_64__)
330#include "../drc/emit_x86.c"
331
332static const int reg_map_g2h[] = {
333 -1, -1, -1, -1,
334 -1, -1, -1, -1,
335 -1, -1, -1, -1,
336 -1, -1, -1, -1,
337 -1, -1, -1, xBX,
338 -1, -1, -1, -1,
339};
340
341// ax, cx, dx are usually temporaries by convention
342static temp_reg_t reg_temp[] = {
343 { xAX, },
344 { xCX, },
345 { xDX, },
346 { xSI, },
347 { xDI, },
348};
349
350#else
351#error unsupported arch
352#endif
353
354#define T 0x00000001
355#define S 0x00000002
356#define I 0x000000f0
357#define Q 0x00000100
358#define M 0x00000200
359#define T_save 0x00000800
360
361#define I_SHIFT 4
362#define Q_SHIFT 8
363#define M_SHIFT 9
364
365static void REGPARM(1) (*sh2_drc_entry)(SH2 *sh2);
366static void (*sh2_drc_dispatcher)(void);
367static void (*sh2_drc_exit)(void);
368static void (*sh2_drc_test_irq)(void);
369
370static u32 REGPARM(2) (*sh2_drc_read8)(u32 a, SH2 *sh2);
371static u32 REGPARM(2) (*sh2_drc_read16)(u32 a, SH2 *sh2);
372static u32 REGPARM(2) (*sh2_drc_read32)(u32 a, SH2 *sh2);
373static void REGPARM(2) (*sh2_drc_write8)(u32 a, u32 d);
374static void REGPARM(2) (*sh2_drc_write16)(u32 a, u32 d);
375static void REGPARM(3) (*sh2_drc_write32)(u32 a, u32 d, SH2 *sh2);
376
377// address space stuff
378static int dr_ctx_get_mem_ptr(u32 a, u32 *mask)
379{
380 int poffs = -1;
381
382 if ((a & ~0x7ff) == 0) {
383 // BIOS
384 poffs = offsetof(SH2, p_bios);
385 *mask = 0x7ff;
386 }
387 else if ((a & 0xfffff000) == 0xc0000000) {
388 // data array
389 // FIXME: access sh2->data_array instead
390 poffs = offsetof(SH2, p_da);
391 *mask = 0xfff;
392 }
393 else if ((a & 0xc6000000) == 0x06000000) {
394 // SDRAM
395 poffs = offsetof(SH2, p_sdram);
396 *mask = 0x03ffff;
397 }
398 else if ((a & 0xc6000000) == 0x02000000) {
399 // ROM
400 poffs = offsetof(SH2, p_rom);
401 *mask = 0x3fffff;
402 }
403
404 return poffs;
405}
406
407static struct block_entry *dr_get_entry(u32 pc, int is_slave, int *tcache_id)
408{
409 struct block_entry *be;
410 u32 tcid = 0, mask;
411
412 // data arrays have their own caches
413 if ((pc & 0xe0000000) == 0xc0000000 || (pc & ~0xfff) == 0)
414 tcid = 1 + is_slave;
415
416 *tcache_id = tcid;
417
418 mask = hash_table_sizes[tcid] - 1;
419 be = HASH_FUNC(hash_tables[tcid], pc, mask);
420 for (; be != NULL; be = be->next)
421 if (be->pc == pc)
422 return be;
423
424 return NULL;
425}
426
427// ---------------------------------------------------------------
428
429// block management
430static void add_to_block_list(struct block_list **blist, struct block_desc *block)
431{
432 struct block_list *added = malloc(sizeof(*added));
433 if (!added) {
434 elprintf(EL_ANOMALY, "drc OOM (1)");
435 return;
436 }
437 added->block = block;
438 added->next = *blist;
439 *blist = added;
440}
441
442static void rm_from_block_list(struct block_list **blist, struct block_desc *block)
443{
444 struct block_list *prev = NULL, *current = *blist;
445 for (; current != NULL; current = current->next) {
446 if (current->block == block) {
447 if (prev == NULL)
448 *blist = current->next;
449 else
450 prev->next = current->next;
451 free(current);
452 return;
453 }
454 prev = current;
455 }
456 dbg(1, "can't rm block %p (%08x-%08x)",
457 block, block->addr, block->addr + block->size);
458}
459
460static void rm_block_list(struct block_list **blist)
461{
462 struct block_list *tmp, *current = *blist;
463 while (current != NULL) {
464 tmp = current;
465 current = current->next;
466 free(tmp);
467 }
468 *blist = NULL;
469}
470
471static void REGPARM(1) flush_tcache(int tcid)
472{
473 int i;
474
475 dbg(1, "tcache #%d flush! (%d/%d, bds %d/%d)", tcid,
476 tcache_ptrs[tcid] - tcache_bases[tcid], tcache_sizes[tcid],
477 block_counts[tcid], block_max_counts[tcid]);
478
479 block_counts[tcid] = 0;
480 block_link_pool_counts[tcid] = 0;
481 unresolved_links[tcid] = NULL;
482 memset(hash_tables[tcid], 0, sizeof(*hash_tables[0]) * hash_table_sizes[tcid]);
483 tcache_ptrs[tcid] = tcache_bases[tcid];
484 if (Pico32xMem != NULL) {
485 if (tcid == 0) // ROM, RAM
486 memset(Pico32xMem->drcblk_ram, 0,
487 sizeof(Pico32xMem->drcblk_ram));
488 else
489 memset(Pico32xMem->drcblk_da[tcid - 1], 0,
490 sizeof(Pico32xMem->drcblk_da[0]));
491 }
492#if (DRC_DEBUG & 4)
493 tcache_dsm_ptrs[tcid] = tcache_bases[tcid];
494#endif
495
496 for (i = 0; i < ram_sizes[tcid] / INVAL_PAGE_SIZE; i++)
497 rm_block_list(&inval_lookup[tcid][i]);
498}
499
500static void add_to_hashlist(struct block_entry *be, int tcache_id)
501{
502 u32 tcmask = hash_table_sizes[tcache_id] - 1;
503
504 be->next = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
505 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be;
506
507#if (DRC_DEBUG & 2)
508 if (be->next != NULL) {
509 printf(" %08x: hash collision with %08x\n",
510 be->pc, be->next->pc);
511 hash_collisions++;
512 }
513#endif
514}
515
516static void rm_from_hashlist(struct block_entry *be, int tcache_id)
517{
518 u32 tcmask = hash_table_sizes[tcache_id] - 1;
519 struct block_entry *cur, *prev;
520
521 cur = HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask);
522 if (cur == NULL)
523 goto missing;
524
525 if (be == cur) { // first
526 HASH_FUNC(hash_tables[tcache_id], be->pc, tcmask) = be->next;
527 return;
528 }
529
530 for (prev = cur, cur = cur->next; cur != NULL; cur = cur->next) {
531 if (cur == be) {
532 prev->next = cur->next;
533 return;
534 }
535 }
536
537missing:
538 dbg(1, "rm_from_hashlist: be %p %08x missing?", be, be->pc);
539}
540
541static void unregister_links(struct block_entry *be, int tcache_id)
542{
543 struct block_link *bl_unresolved = unresolved_links[tcache_id];
544 struct block_link *bl, *bl_next;
545
546 for (bl = be->links; bl != NULL; ) {
547 bl_next = bl->next;
548 bl->next = bl_unresolved;
549 bl_unresolved = bl;
550 bl = bl_next;
551 }
552 be->links = NULL;
553 unresolved_links[tcache_id] = bl_unresolved;
554}
555
556// unlike sh2_smc_rm_block, the block stays and can still be accessed
557// by other already directly linked blocks, just not preferred
558static void kill_block_entry(struct block_entry *be, int tcache_id)
559{
560 rm_from_hashlist(be, tcache_id);
561 unregister_links(be, tcache_id);
562}
563
564static struct block_desc *dr_add_block(u32 addr, u16 size_lit,
565 u16 size_nolit, int is_slave, int *blk_id)
566{
567 struct block_entry *be;
568 struct block_desc *bd;
569 int tcache_id;
570 int *bcount;
571
572 // do a lookup to get tcache_id and override check
573 be = dr_get_entry(addr, is_slave, &tcache_id);
574 if (be != NULL) {
575 dbg(1, "block override for %08x, was %p", addr, be->tcache_ptr);
576 kill_block_entry(be, tcache_id);
577 }
578
579 bcount = &block_counts[tcache_id];
580 if (*bcount >= block_max_counts[tcache_id]) {
581 dbg(1, "bd overflow for tcache %d", tcache_id);
582 return NULL;
583 }
584
585 bd = &block_tables[tcache_id][*bcount];
586 bd->addr = addr;
587 bd->size = size_lit;
588 bd->size_nolit = size_nolit;
589
590 bd->entry_count = 1;
591 bd->entryp[0].pc = addr;
592 bd->entryp[0].tcache_ptr = tcache_ptr;
593 bd->entryp[0].links = NULL;
594#if (DRC_DEBUG & 2)
595 bd->entryp[0].block = bd;
596 bd->refcount = 0;
597#endif
598 add_to_hashlist(&bd->entryp[0], tcache_id);
599
600 *blk_id = *bcount;
601 (*bcount)++;
602
603 return bd;
604}
605
606static void REGPARM(3) *dr_lookup_block(u32 pc, int is_slave, int *tcache_id)
607{
608 struct block_entry *be = NULL;
609 void *block = NULL;
610
611 be = dr_get_entry(pc, is_slave, tcache_id);
612 if (be != NULL)
613 block = be->tcache_ptr;
614
615#if (DRC_DEBUG & 2)
616 if (be != NULL)
617 be->block->refcount++;
618#endif
619 return block;
620}
621
622static void *dr_failure(void)
623{
624 lprintf("recompilation failed\n");
625 exit(1);
626}
627
628static void *dr_prepare_ext_branch(u32 pc, int is_slave, int tcache_id)
629{
630#if LINK_BRANCHES
631 struct block_link *bl = block_link_pool[tcache_id];
632 int cnt = block_link_pool_counts[tcache_id];
633 struct block_entry *be = NULL;
634 int target_tcache_id;
635 int i;
636
637 be = dr_get_entry(pc, is_slave, &target_tcache_id);
638 if (target_tcache_id != tcache_id)
639 return sh2_drc_dispatcher;
640
641 // if pool has been freed, reuse
642 for (i = cnt - 1; i >= 0; i--)
643 if (bl[i].target_pc != 0)
644 break;
645 cnt = i + 1;
646 if (cnt >= block_link_pool_max_counts[tcache_id]) {
647 dbg(1, "bl overflow for tcache %d", tcache_id);
648 return NULL;
649 }
650 bl += cnt;
651 block_link_pool_counts[tcache_id]++;
652
653 bl->target_pc = pc;
654 bl->jump = tcache_ptr;
655
656 if (be != NULL) {
657 dbg(2, "- early link from %p to pc %08x", bl->jump, pc);
658 bl->next = be->links;
659 be->links = bl;
660 return be->tcache_ptr;
661 }
662 else {
663 bl->next = unresolved_links[tcache_id];
664 unresolved_links[tcache_id] = bl;
665 return sh2_drc_dispatcher;
666 }
667#else
668 return sh2_drc_dispatcher;
669#endif
670}
671
672static void dr_link_blocks(struct block_entry *be, int tcache_id)
673{
674#if LINK_BRANCHES
675 struct block_link *first = unresolved_links[tcache_id];
676 struct block_link *bl, *prev, *tmp;
677 u32 pc = be->pc;
678
679 for (bl = prev = first; bl != NULL; ) {
680 if (bl->target_pc == pc) {
681 dbg(2, "- link from %p to pc %08x", bl->jump, pc);
682 emith_jump_patch(bl->jump, tcache_ptr);
683
684 // move bl from unresolved_links to block_entry
685 tmp = bl->next;
686 bl->next = be->links;
687 be->links = bl;
688
689 if (bl == first)
690 first = prev = bl = tmp;
691 else
692 prev->next = bl = tmp;
693 continue;
694 }
695 prev = bl;
696 bl = bl->next;
697 }
698 unresolved_links[tcache_id] = first;
699
700 // could sync arm caches here, but that's unnecessary
701#endif
702}
703
704#define ADD_TO_ARRAY(array, count, item, failcode) \
705 if (count >= ARRAY_SIZE(array)) { \
706 dbg(1, "warning: " #array " overflow"); \
707 failcode; \
708 } \
709 array[count++] = item;
710
711static int find_in_array(u32 *array, size_t size, u32 what)
712{
713 size_t i;
714 for (i = 0; i < size; i++)
715 if (what == array[i])
716 return i;
717
718 return -1;
719}
720
721// ---------------------------------------------------------------
722
723// register cache / constant propagation stuff
724typedef enum {
725 RC_GR_READ,
726 RC_GR_WRITE,
727 RC_GR_RMW,
728} rc_gr_mode;
729
730static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking);
731
732// guest regs with constants
733static u32 dr_gcregs[24];
734// a mask of constant/dirty regs
735static u32 dr_gcregs_mask;
736static u32 dr_gcregs_dirty;
737
738#if PROPAGATE_CONSTANTS
739static void gconst_new(sh2_reg_e r, u32 val)
740{
741 int i;
742
743 dr_gcregs_mask |= 1 << r;
744 dr_gcregs_dirty |= 1 << r;
745 dr_gcregs[r] = val;
746
747 // throw away old r that we might have cached
748 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
749 if ((reg_temp[i].type == HR_CACHED) &&
750 reg_temp[i].greg == r) {
751 reg_temp[i].type = HR_FREE;
752 reg_temp[i].flags = 0;
753 }
754 }
755}
756#endif
757
758static int gconst_get(sh2_reg_e r, u32 *val)
759{
760 if (dr_gcregs_mask & (1 << r)) {
761 *val = dr_gcregs[r];
762 return 1;
763 }
764 return 0;
765}
766
767static int gconst_check(sh2_reg_e r)
768{
769 if ((dr_gcregs_mask | dr_gcregs_dirty) & (1 << r))
770 return 1;
771 return 0;
772}
773
774// update hr if dirty, else do nothing
775static int gconst_try_read(int hr, sh2_reg_e r)
776{
777 if (dr_gcregs_dirty & (1 << r)) {
778 emith_move_r_imm(hr, dr_gcregs[r]);
779 dr_gcregs_dirty &= ~(1 << r);
780 return 1;
781 }
782 return 0;
783}
784
785static void gconst_check_evict(sh2_reg_e r)
786{
787 if (dr_gcregs_mask & (1 << r))
788 // no longer cached in reg, make dirty again
789 dr_gcregs_dirty |= 1 << r;
790}
791
792static void gconst_kill(sh2_reg_e r)
793{
794 dr_gcregs_mask &= ~(1 << r);
795 dr_gcregs_dirty &= ~(1 << r);
796}
797
798static void gconst_clean(void)
799{
800 int i;
801
802 for (i = 0; i < ARRAY_SIZE(dr_gcregs); i++)
803 if (dr_gcregs_dirty & (1 << i)) {
804 // using RC_GR_READ here: it will call gconst_try_read,
805 // cache the reg and mark it dirty.
806 rcache_get_reg_(i, RC_GR_READ, 0);
807 }
808}
809
810static void gconst_invalidate(void)
811{
812 dr_gcregs_mask = dr_gcregs_dirty = 0;
813}
814
815static u16 rcache_counter;
816
817static temp_reg_t *rcache_evict(void)
818{
819 // evict reg with oldest stamp
820 int i, oldest = -1;
821 u16 min_stamp = (u16)-1;
822
823 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
824 if (reg_temp[i].type == HR_CACHED && !(reg_temp[i].flags & HRF_LOCKED) &&
825 reg_temp[i].stamp <= min_stamp) {
826 min_stamp = reg_temp[i].stamp;
827 oldest = i;
828 }
829 }
830
831 if (oldest == -1) {
832 printf("no registers to evict, aborting\n");
833 exit(1);
834 }
835
836 i = oldest;
837 if (reg_temp[i].type == HR_CACHED) {
838 if (reg_temp[i].flags & HRF_DIRTY)
839 // writeback
840 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
841 gconst_check_evict(reg_temp[i].greg);
842 }
843
844 reg_temp[i].type = HR_FREE;
845 reg_temp[i].flags = 0;
846 return &reg_temp[i];
847}
848
849static int get_reg_static(sh2_reg_e r, rc_gr_mode mode)
850{
851 int i = reg_map_g2h[r];
852 if (i != -1) {
853 if (mode != RC_GR_WRITE)
854 gconst_try_read(i, r);
855 }
856 return i;
857}
858
859// note: must not be called when doing conditional code
860static int rcache_get_reg_(sh2_reg_e r, rc_gr_mode mode, int do_locking)
861{
862 temp_reg_t *tr;
863 int i, ret;
864
865 // maybe statically mapped?
866 ret = get_reg_static(r, mode);
867 if (ret != -1)
868 goto end;
869
870 rcache_counter++;
871
872 // maybe already cached?
873 // if so, prefer against gconst (they must be in sync)
874 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
875 if (reg_temp[i].type == HR_CACHED && reg_temp[i].greg == r) {
876 reg_temp[i].stamp = rcache_counter;
877 if (mode != RC_GR_READ)
878 reg_temp[i].flags |= HRF_DIRTY;
879 ret = reg_temp[i].hreg;
880 goto end;
881 }
882 }
883
884 // use any free reg
885 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
886 if (reg_temp[i].type == HR_FREE) {
887 tr = &reg_temp[i];
888 goto do_alloc;
889 }
890 }
891
892 tr = rcache_evict();
893
894do_alloc:
895 tr->type = HR_CACHED;
896 if (do_locking)
897 tr->flags |= HRF_LOCKED;
898 if (mode != RC_GR_READ)
899 tr->flags |= HRF_DIRTY;
900 tr->greg = r;
901 tr->stamp = rcache_counter;
902 ret = tr->hreg;
903
904 if (mode != RC_GR_WRITE) {
905 if (gconst_check(r)) {
906 if (gconst_try_read(ret, r))
907 tr->flags |= HRF_DIRTY;
908 }
909 else
910 emith_ctx_read(tr->hreg, r * 4);
911 }
912
913end:
914 if (mode != RC_GR_READ)
915 gconst_kill(r);
916
917 return ret;
918}
919
920static int rcache_get_reg(sh2_reg_e r, rc_gr_mode mode)
921{
922 return rcache_get_reg_(r, mode, 1);
923}
924
925static int rcache_get_tmp(void)
926{
927 temp_reg_t *tr;
928 int i;
929
930 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
931 if (reg_temp[i].type == HR_FREE) {
932 tr = &reg_temp[i];
933 goto do_alloc;
934 }
935
936 tr = rcache_evict();
937
938do_alloc:
939 tr->type = HR_TEMP;
940 return tr->hreg;
941}
942
943static int rcache_get_hr_id(int hr)
944{
945 int i;
946
947 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
948 if (reg_temp[i].hreg == hr)
949 break;
950
951 if (i == ARRAY_SIZE(reg_temp)) // can't happen
952 exit(1);
953
954 if (reg_temp[i].type == HR_CACHED) {
955 // writeback
956 if (reg_temp[i].flags & HRF_DIRTY)
957 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
958 gconst_check_evict(reg_temp[i].greg);
959 }
960 else if (reg_temp[i].type == HR_TEMP) {
961 printf("host reg %d already used, aborting\n", hr);
962 exit(1);
963 }
964
965 reg_temp[i].type = HR_FREE;
966 reg_temp[i].flags = 0;
967
968 return i;
969}
970
971static int rcache_get_arg_id(int arg)
972{
973 int r = 0;
974 host_arg2reg(r, arg);
975 return rcache_get_hr_id(r);
976}
977
978// get a reg to be used as function arg
979static int rcache_get_tmp_arg(int arg)
980{
981 int id = rcache_get_arg_id(arg);
982 reg_temp[id].type = HR_TEMP;
983
984 return reg_temp[id].hreg;
985}
986
987// ... as return value after a call
988static int rcache_get_tmp_ret(void)
989{
990 int id = rcache_get_hr_id(RET_REG);
991 reg_temp[id].type = HR_TEMP;
992
993 return reg_temp[id].hreg;
994}
995
996// same but caches a reg. RC_GR_READ only.
997static int rcache_get_reg_arg(int arg, sh2_reg_e r)
998{
999 int i, srcr, dstr, dstid;
1000 int dirty = 0, src_dirty = 0;
1001
1002 dstid = rcache_get_arg_id(arg);
1003 dstr = reg_temp[dstid].hreg;
1004
1005 // maybe already statically mapped?
1006 srcr = get_reg_static(r, RC_GR_READ);
1007 if (srcr != -1)
1008 goto do_cache;
1009
1010 // maybe already cached?
1011 for (i = ARRAY_SIZE(reg_temp) - 1; i >= 0; i--) {
1012 if ((reg_temp[i].type == HR_CACHED) &&
1013 reg_temp[i].greg == r)
1014 {
1015 srcr = reg_temp[i].hreg;
1016 if (reg_temp[i].flags & HRF_DIRTY)
1017 src_dirty = 1;
1018 goto do_cache;
1019 }
1020 }
1021
1022 // must read
1023 srcr = dstr;
1024 if (gconst_check(r)) {
1025 if (gconst_try_read(srcr, r))
1026 dirty = 1;
1027 }
1028 else
1029 emith_ctx_read(srcr, r * 4);
1030
1031do_cache:
1032 if (dstr != srcr)
1033 emith_move_r_r(dstr, srcr);
1034#if 1
1035 else
1036 dirty |= src_dirty;
1037
1038 if (dirty)
1039 // must clean, callers might want to modify the arg before call
1040 emith_ctx_write(dstr, r * 4);
1041#else
1042 if (dirty)
1043 reg_temp[dstid].flags |= HRF_DIRTY;
1044#endif
1045
1046 reg_temp[dstid].stamp = ++rcache_counter;
1047 reg_temp[dstid].type = HR_CACHED;
1048 reg_temp[dstid].greg = r;
1049 reg_temp[dstid].flags |= HRF_LOCKED;
1050 return dstr;
1051}
1052
1053static void rcache_free_tmp(int hr)
1054{
1055 int i;
1056 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1057 if (reg_temp[i].hreg == hr)
1058 break;
1059
1060 if (i == ARRAY_SIZE(reg_temp) || reg_temp[i].type != HR_TEMP) {
1061 printf("rcache_free_tmp fail: #%i hr %d, type %d\n", i, hr, reg_temp[i].type);
1062 return;
1063 }
1064
1065 reg_temp[i].type = HR_FREE;
1066 reg_temp[i].flags = 0;
1067}
1068
1069static void rcache_unlock(int hr)
1070{
1071 int i;
1072 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1073 if (reg_temp[i].type == HR_CACHED && reg_temp[i].hreg == hr)
1074 reg_temp[i].flags &= ~HRF_LOCKED;
1075}
1076
1077static void rcache_unlock_all(void)
1078{
1079 int i;
1080 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1081 reg_temp[i].flags &= ~HRF_LOCKED;
1082}
1083
1084#ifdef DRC_CMP
1085static u32 rcache_used_hreg_mask(void)
1086{
1087 u32 mask = 0;
1088 int i;
1089
1090 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1091 if (reg_temp[i].type != HR_FREE)
1092 mask |= 1 << reg_temp[i].hreg;
1093
1094 return mask;
1095}
1096#endif
1097
1098static void rcache_clean(void)
1099{
1100 int i;
1101 gconst_clean();
1102
1103 for (i = 0; i < ARRAY_SIZE(reg_temp); i++)
1104 if (reg_temp[i].type == HR_CACHED && (reg_temp[i].flags & HRF_DIRTY)) {
1105 // writeback
1106 emith_ctx_write(reg_temp[i].hreg, reg_temp[i].greg * 4);
1107 reg_temp[i].flags &= ~HRF_DIRTY;
1108 }
1109}
1110
1111static void rcache_invalidate(void)
1112{
1113 int i;
1114 for (i = 0; i < ARRAY_SIZE(reg_temp); i++) {
1115 reg_temp[i].type = HR_FREE;
1116 reg_temp[i].flags = 0;
1117 }
1118 rcache_counter = 0;
1119
1120 gconst_invalidate();
1121}
1122
1123static void rcache_flush(void)
1124{
1125 rcache_clean();
1126 rcache_invalidate();
1127}
1128
1129// ---------------------------------------------------------------
1130
1131static int emit_get_rbase_and_offs(u32 a, u32 *offs)
1132{
1133 u32 mask = 0;
1134 int poffs;
1135 int hr;
1136
1137 poffs = dr_ctx_get_mem_ptr(a, &mask);
1138 if (poffs == -1)
1139 return -1;
1140
1141 // XXX: could use some related reg
1142 hr = rcache_get_tmp();
1143 emith_ctx_read_ptr(hr, poffs);
1144 emith_add_r_r_ptr_imm(hr, hr, a & mask & ~0xff);
1145 *offs = a & 0xff; // XXX: ARM oriented..
1146 return hr;
1147}
1148
1149static void emit_move_r_imm32(sh2_reg_e dst, u32 imm)
1150{
1151#if PROPAGATE_CONSTANTS
1152 gconst_new(dst, imm);
1153#else
1154 int hr = rcache_get_reg(dst, RC_GR_WRITE);
1155 emith_move_r_imm(hr, imm);
1156#endif
1157}
1158
1159static void emit_move_r_r(sh2_reg_e dst, sh2_reg_e src)
1160{
1161 int hr_d = rcache_get_reg(dst, RC_GR_WRITE);
1162 int hr_s = rcache_get_reg(src, RC_GR_READ);
1163
1164 emith_move_r_r(hr_d, hr_s);
1165}
1166
1167// T must be clear, and comparison done just before this
1168static void emit_or_t_if_eq(int srr)
1169{
1170 EMITH_SJMP_START(DCOND_NE);
1171 emith_or_r_imm_c(DCOND_EQ, srr, T);
1172 EMITH_SJMP_END(DCOND_NE);
1173}
1174
1175// arguments must be ready
1176// reg cache must be clean before call
1177static int emit_memhandler_read_(int size, int ram_check)
1178{
1179 int arg1;
1180#if 0
1181 int arg0;
1182 host_arg2reg(arg0, 0);
1183#endif
1184
1185 rcache_clean();
1186
1187 // must writeback cycles for poll detection stuff
1188 // FIXME: rm
1189 if (reg_map_g2h[SHR_SR] != -1)
1190 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1191
1192 arg1 = rcache_get_tmp_arg(1);
1193 emith_move_r_r_ptr(arg1, CONTEXT_REG);
1194
1195#if 0 // can't do this because of unmapped reads
1196 // ndef PDB_NET
1197 if (ram_check && Pico.rom == (void *)0x02000000 && Pico32xMem->sdram == (void *)0x06000000) {
1198 int tmp = rcache_get_tmp();
1199 emith_and_r_r_imm(tmp, arg0, 0xfb000000);
1200 emith_cmp_r_imm(tmp, 0x02000000);
1201 switch (size) {
1202 case 0: // 8
1203 EMITH_SJMP3_START(DCOND_NE);
1204 emith_eor_r_imm_c(DCOND_EQ, arg0, 1);
1205 emith_read8_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1206 EMITH_SJMP3_MID(DCOND_NE);
1207 emith_call_cond(DCOND_NE, sh2_drc_read8);
1208 EMITH_SJMP3_END();
1209 break;
1210 case 1: // 16
1211 EMITH_SJMP3_START(DCOND_NE);
1212 emith_read16_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1213 EMITH_SJMP3_MID(DCOND_NE);
1214 emith_call_cond(DCOND_NE, sh2_drc_read16);
1215 EMITH_SJMP3_END();
1216 break;
1217 case 2: // 32
1218 EMITH_SJMP3_START(DCOND_NE);
1219 emith_read_r_r_offs_c(DCOND_EQ, arg0, arg0, 0);
1220 emith_ror_c(DCOND_EQ, arg0, arg0, 16);
1221 EMITH_SJMP3_MID(DCOND_NE);
1222 emith_call_cond(DCOND_NE, sh2_drc_read32);
1223 EMITH_SJMP3_END();
1224 break;
1225 }
1226 }
1227 else
1228#endif
1229 {
1230 switch (size) {
1231 case 0: // 8
1232 emith_call(sh2_drc_read8);
1233 break;
1234 case 1: // 16
1235 emith_call(sh2_drc_read16);
1236 break;
1237 case 2: // 32
1238 emith_call(sh2_drc_read32);
1239 break;
1240 }
1241 }
1242 rcache_invalidate();
1243
1244 if (reg_map_g2h[SHR_SR] != -1)
1245 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1246
1247 return rcache_get_tmp_ret();
1248}
1249
1250static int emit_memhandler_read(int size)
1251{
1252 return emit_memhandler_read_(size, 1);
1253}
1254
1255static int emit_memhandler_read_rr(sh2_reg_e rd, sh2_reg_e rs, u32 offs, int size)
1256{
1257 int hr, hr2, ram_check = 1;
1258 u32 val, offs2;
1259
1260 if (gconst_get(rs, &val)) {
1261 hr = emit_get_rbase_and_offs(val + offs, &offs2);
1262 if (hr != -1) {
1263 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1264 switch (size) {
1265 case 0: // 8
1266 emith_read8_r_r_offs(hr2, hr, offs2 ^ 1);
1267 emith_sext(hr2, hr2, 8);
1268 break;
1269 case 1: // 16
1270 emith_read16_r_r_offs(hr2, hr, offs2);
1271 emith_sext(hr2, hr2, 16);
1272 break;
1273 case 2: // 32
1274 emith_read_r_r_offs(hr2, hr, offs2);
1275 emith_ror(hr2, hr2, 16);
1276 break;
1277 }
1278 rcache_free_tmp(hr);
1279 return hr2;
1280 }
1281
1282 ram_check = 0;
1283 }
1284
1285 hr = rcache_get_reg_arg(0, rs);
1286 if (offs != 0)
1287 emith_add_r_imm(hr, offs);
1288 hr = emit_memhandler_read_(size, ram_check);
1289 hr2 = rcache_get_reg(rd, RC_GR_WRITE);
1290 if (size != 2) {
1291 emith_sext(hr2, hr, (size == 1) ? 16 : 8);
1292 } else
1293 emith_move_r_r(hr2, hr);
1294 rcache_free_tmp(hr);
1295
1296 return hr2;
1297}
1298
1299static void emit_memhandler_write(int size)
1300{
1301 int ctxr;
1302 host_arg2reg(ctxr, 2);
1303 if (reg_map_g2h[SHR_SR] != -1)
1304 emith_ctx_write(reg_map_g2h[SHR_SR], SHR_SR * 4);
1305
1306 rcache_clean();
1307
1308 switch (size) {
1309 case 0: // 8
1310 // XXX: consider inlining sh2_drc_write8
1311 emith_call(sh2_drc_write8);
1312 break;
1313 case 1: // 16
1314 emith_call(sh2_drc_write16);
1315 break;
1316 case 2: // 32
1317 emith_move_r_r_ptr(ctxr, CONTEXT_REG);
1318 emith_call(sh2_drc_write32);
1319 break;
1320 }
1321
1322 rcache_invalidate();
1323 if (reg_map_g2h[SHR_SR] != -1)
1324 emith_ctx_read(reg_map_g2h[SHR_SR], SHR_SR * 4);
1325}
1326
1327// @(Rx,Ry)
1328static int emit_indirect_indexed_read(int rx, int ry, int size)
1329{
1330 int a0, t;
1331 a0 = rcache_get_reg_arg(0, rx);
1332 t = rcache_get_reg(ry, RC_GR_READ);
1333 emith_add_r_r(a0, t);
1334 return emit_memhandler_read(size);
1335}
1336
1337// read @Rn, @rm
1338static void emit_indirect_read_double(u32 *rnr, u32 *rmr, int rn, int rm, int size)
1339{
1340 int tmp;
1341
1342 rcache_get_reg_arg(0, rn);
1343 tmp = emit_memhandler_read(size);
1344 emith_ctx_write(tmp, offsetof(SH2, drc_tmp));
1345 rcache_free_tmp(tmp);
1346 tmp = rcache_get_reg(rn, RC_GR_RMW);
1347 emith_add_r_imm(tmp, 1 << size);
1348 rcache_unlock(tmp);
1349
1350 rcache_get_reg_arg(0, rm);
1351 *rmr = emit_memhandler_read(size);
1352 *rnr = rcache_get_tmp();
1353 emith_ctx_read(*rnr, offsetof(SH2, drc_tmp));
1354 tmp = rcache_get_reg(rm, RC_GR_RMW);
1355 emith_add_r_imm(tmp, 1 << size);
1356 rcache_unlock(tmp);
1357}
1358
1359static void emit_do_static_regs(int is_write, int tmpr)
1360{
1361 int i, r, count;
1362
1363 for (i = 0; i < ARRAY_SIZE(reg_map_g2h); i++) {
1364 r = reg_map_g2h[i];
1365 if (r == -1)
1366 continue;
1367
1368 for (count = 1; i < ARRAY_SIZE(reg_map_g2h) - 1; i++, r++) {
1369 if (reg_map_g2h[i + 1] != r + 1)
1370 break;
1371 count++;
1372 }
1373
1374 if (count > 1) {
1375 // i, r point to last item
1376 if (is_write)
1377 emith_ctx_write_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1378 else
1379 emith_ctx_read_multiple(r - count + 1, (i - count + 1) * 4, count, tmpr);
1380 } else {
1381 if (is_write)
1382 emith_ctx_write(r, i * 4);
1383 else
1384 emith_ctx_read(r, i * 4);
1385 }
1386 }
1387}
1388
1389/* just after lookup function, jump to address returned */
1390static void emit_block_entry(void)
1391{
1392#if (DRC_DEBUG & 8) || defined(PDB)
1393 int arg1, arg2;
1394 host_arg2reg(arg1, 1);
1395 host_arg2reg(arg2, 2);
1396
1397 emit_do_static_regs(1, arg2);
1398 emith_move_r_r_ptr(arg1, CONTEXT_REG);
1399 emith_move_r_r(arg2, rcache_get_reg(SHR_SR, RC_GR_READ));
1400 emith_call(sh2_drc_log_entry);
1401 rcache_invalidate();
1402#endif
1403 emith_tst_r_r(RET_REG, RET_REG);
1404 EMITH_SJMP_START(DCOND_EQ);
1405 emith_jump_reg_c(DCOND_NE, RET_REG);
1406 EMITH_SJMP_END(DCOND_EQ);
1407}
1408
1409#define DELAY_SAVE_T(sr) { \
1410 emith_bic_r_imm(sr, T_save); \
1411 emith_tst_r_imm(sr, T); \
1412 EMITH_SJMP_START(DCOND_EQ); \
1413 emith_or_r_imm_c(DCOND_NE, sr, T_save); \
1414 EMITH_SJMP_END(DCOND_EQ); \
1415}
1416
1417#define FLUSH_CYCLES(sr) \
1418 if (cycles > 0) { \
1419 emith_sub_r_imm(sr, cycles << 12); \
1420 cycles = 0; \
1421 }
1422
1423static void *dr_get_pc_base(u32 pc, int is_slave);
1424
1425static void REGPARM(2) *sh2_translate(SH2 *sh2, int tcache_id)
1426{
1427 u32 branch_target_pc[MAX_LOCAL_BRANCHES];
1428 void *branch_target_ptr[MAX_LOCAL_BRANCHES];
1429 int branch_target_count = 0;
1430 void *branch_patch_ptr[MAX_LOCAL_BRANCHES];
1431 u32 branch_patch_pc[MAX_LOCAL_BRANCHES];
1432 int branch_patch_count = 0;
1433 u32 literal_addr[MAX_LITERALS];
1434 int literal_addr_count = 0;
1435 u8 op_flags[BLOCK_INSN_LIMIT];
1436 struct {
1437 u32 test_irq:1;
1438 u32 pending_branch_direct:1;
1439 u32 pending_branch_indirect:1;
1440 u32 literals_disabled:1;
1441 } drcf = { 0, };
1442
1443 // PC of current, first, last SH2 insn
1444 u32 pc, base_pc, end_pc;
1445 u32 end_literals;
1446 void *block_entry_ptr;
1447 struct block_desc *block;
1448 u16 *dr_pc_base;
1449 struct op_data *opd;
1450 int blkid_main = 0;
1451 int skip_op = 0;
1452 u32 tmp, tmp2;
1453 int cycles;
1454 int i, v;
1455 int op;
1456
1457 base_pc = sh2->pc;
1458 drcf.literals_disabled = literal_disabled_frames != 0;
1459
1460 // get base/validate PC
1461 dr_pc_base = dr_get_pc_base(base_pc, sh2->is_slave);
1462 if (dr_pc_base == (void *)-1) {
1463 printf("invalid PC, aborting: %08x\n", base_pc);
1464 // FIXME: be less destructive
1465 exit(1);
1466 }
1467
1468 tcache_ptr = tcache_ptrs[tcache_id];
1469
1470 // predict tcache overflow
1471 tmp = tcache_ptr - tcache_bases[tcache_id];
1472 if (tmp > tcache_sizes[tcache_id] - MAX_BLOCK_SIZE) {
1473 dbg(1, "tcache %d overflow", tcache_id);
1474 return NULL;
1475 }
1476
1477 // initial passes to disassemble and analyze the block
1478 scan_block(base_pc, sh2->is_slave, op_flags, &end_pc, &end_literals);
1479
1480 if (drcf.literals_disabled)
1481 end_literals = end_pc;
1482
1483 block = dr_add_block(base_pc, end_literals - base_pc,
1484 end_pc - base_pc, sh2->is_slave, &blkid_main);
1485 if (block == NULL)
1486 return NULL;
1487
1488 block_entry_ptr = tcache_ptr;
1489 dbg(2, "== %csh2 block #%d,%d %08x-%08x -> %p", sh2->is_slave ? 's' : 'm',
1490 tcache_id, blkid_main, base_pc, end_pc, block_entry_ptr);
1491
1492 dr_link_blocks(&block->entryp[0], tcache_id);
1493
1494 // collect branch_targets that don't land on delay slots
1495 for (pc = base_pc, i = 0; pc < end_pc; i++, pc += 2) {
1496 if (!(op_flags[i] & OF_BTARGET))
1497 continue;
1498 if (op_flags[i] & OF_DELAY_OP) {
1499 op_flags[i] &= ~OF_BTARGET;
1500 continue;
1501 }
1502 ADD_TO_ARRAY(branch_target_pc, branch_target_count, pc, break);
1503 }
1504
1505 if (branch_target_count > 0) {
1506 memset(branch_target_ptr, 0, sizeof(branch_target_ptr[0]) * branch_target_count);
1507 }
1508
1509 // clear stale state after compile errors
1510 rcache_invalidate();
1511
1512 // -------------------------------------------------
1513 // 3rd pass: actual compilation
1514 pc = base_pc;
1515 cycles = 0;
1516 for (i = 0; pc < end_pc; i++)
1517 {
1518 u32 delay_dep_fw = 0, delay_dep_bk = 0;
1519 u32 tmp3, tmp4, sr;
1520
1521 opd = &ops[i];
1522 op = FETCH_OP(pc);
1523
1524#if (DRC_DEBUG & 2)
1525 insns_compiled++;
1526#endif
1527#if (DRC_DEBUG & 4)
1528 DasmSH2(sh2dasm_buff, pc, op);
1529 printf("%c%08x %04x %s\n", (op_flags[i] & OF_BTARGET) ? '*' : ' ',
1530 pc, op, sh2dasm_buff);
1531#endif
1532
1533 if ((op_flags[i] & OF_BTARGET) || pc == base_pc)
1534 {
1535 if (pc != base_pc)
1536 {
1537 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1538 FLUSH_CYCLES(sr);
1539 rcache_flush();
1540
1541 // make block entry
1542 v = block->entry_count;
1543 if (v < ARRAY_SIZE(block->entryp))
1544 {
1545 struct block_entry *be_old;
1546
1547 block->entryp[v].pc = pc;
1548 block->entryp[v].tcache_ptr = tcache_ptr;
1549 block->entryp[v].links = NULL;
1550#if (DRC_DEBUG & 2)
1551 block->entryp[v].block = block;
1552#endif
1553 be_old = dr_get_entry(pc, sh2->is_slave, &tcache_id);
1554 if (be_old != NULL) {
1555 dbg(1, "entry override for %08x, was %p", pc, be_old->tcache_ptr);
1556 kill_block_entry(be_old, tcache_id);
1557 }
1558
1559 add_to_hashlist(&block->entryp[v], tcache_id);
1560 block->entry_count++;
1561
1562 dbg(2, "-- %csh2 block #%d,%d entry %08x -> %p",
1563 sh2->is_slave ? 's' : 'm', tcache_id, blkid_main,
1564 pc, tcache_ptr);
1565
1566 // since we made a block entry, link any other blocks
1567 // that jump to current pc
1568 dr_link_blocks(&block->entryp[v], tcache_id);
1569 }
1570 else {
1571 dbg(1, "too many entryp for block #%d,%d pc=%08x",
1572 tcache_id, blkid_main, pc);
1573 }
1574
1575 do_host_disasm(tcache_id);
1576 }
1577
1578 v = find_in_array(branch_target_pc, branch_target_count, pc);
1579 if (v >= 0)
1580 branch_target_ptr[v] = tcache_ptr;
1581
1582 // must update PC
1583 emit_move_r_imm32(SHR_PC, pc);
1584 rcache_clean();
1585
1586#if (DRC_DEBUG & 0x10)
1587 rcache_get_reg_arg(0, SHR_PC);
1588 tmp = emit_memhandler_read(2);
1589 tmp2 = rcache_get_tmp();
1590 tmp3 = rcache_get_tmp();
1591 emith_move_r_imm(tmp2, FETCH32(pc));
1592 emith_move_r_imm(tmp3, 0);
1593 emith_cmp_r_r(tmp, tmp2);
1594 EMITH_SJMP_START(DCOND_EQ);
1595 emith_read_r_r_offs_c(DCOND_NE, tmp3, tmp3, 0); // crash
1596 EMITH_SJMP_END(DCOND_EQ);
1597 rcache_free_tmp(tmp);
1598 rcache_free_tmp(tmp2);
1599 rcache_free_tmp(tmp3);
1600#endif
1601
1602 // check cycles
1603 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1604 emith_cmp_r_imm(sr, 0);
1605 emith_jump_cond(DCOND_LE, sh2_drc_exit);
1606 do_host_disasm(tcache_id);
1607 rcache_unlock_all();
1608 }
1609
1610#ifdef DRC_CMP
1611 if (!(op_flags[i] & OF_DELAY_OP)) {
1612 emit_move_r_imm32(SHR_PC, pc);
1613 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1614 FLUSH_CYCLES(sr);
1615 rcache_clean();
1616
1617 tmp = rcache_used_hreg_mask();
1618 emith_save_caller_regs(tmp);
1619 emit_do_static_regs(1, 0);
1620 emith_pass_arg_r(0, CONTEXT_REG);
1621 emith_call(do_sh2_cmp);
1622 emith_restore_caller_regs(tmp);
1623 }
1624#endif
1625
1626 pc += 2;
1627
1628 if (skip_op > 0) {
1629 skip_op--;
1630 continue;
1631 }
1632
1633 if (op_flags[i] & OF_DELAY_OP)
1634 {
1635 // handle delay slot dependencies
1636 delay_dep_fw = opd->dest & ops[i-1].source;
1637 delay_dep_bk = opd->source & ops[i-1].dest;
1638 if (delay_dep_fw & BITMASK1(SHR_T)) {
1639 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1640 DELAY_SAVE_T(sr);
1641 }
1642 if (delay_dep_bk & BITMASK1(SHR_PC)) {
1643 if (opd->op != OP_LOAD_POOL && opd->op != OP_MOVA) {
1644 // can only be those 2 really..
1645 elprintf_sh2(sh2, EL_ANOMALY,
1646 "drc: illegal slot insn %04x @ %08x?", op, pc - 2);
1647 }
1648 if (opd->imm != 0)
1649 ; // addr already resolved somehow
1650 else {
1651 switch (ops[i-1].op) {
1652 case OP_BRANCH:
1653 emit_move_r_imm32(SHR_PC, ops[i-1].imm);
1654 break;
1655 case OP_BRANCH_CT:
1656 case OP_BRANCH_CF:
1657 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1658 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1659 emith_move_r_imm(tmp, pc);
1660 emith_tst_r_imm(sr, T);
1661 tmp2 = ops[i-1].op == OP_BRANCH_CT ? DCOND_NE : DCOND_EQ;
1662 emith_move_r_imm_c(tmp2, tmp, ops[i-1].imm);
1663 break;
1664 // case OP_BRANCH_R OP_BRANCH_RF - PC already loaded
1665 }
1666 }
1667 }
1668 //if (delay_dep_fw & ~BITMASK1(SHR_T))
1669 // dbg(1, "unhandled delay_dep_fw: %x", delay_dep_fw & ~BITMASK1(SHR_T));
1670 if (delay_dep_bk & ~BITMASK2(SHR_PC, SHR_PR))
1671 dbg(1, "unhandled delay_dep_bk: %x", delay_dep_bk);
1672 }
1673
1674 switch (opd->op)
1675 {
1676 case OP_BRANCH:
1677 case OP_BRANCH_CT:
1678 case OP_BRANCH_CF:
1679 if (opd->dest & BITMASK1(SHR_PR))
1680 emit_move_r_imm32(SHR_PR, pc + 2);
1681 drcf.pending_branch_direct = 1;
1682 goto end_op;
1683
1684 case OP_BRANCH_R:
1685 if (opd->dest & BITMASK1(SHR_PR))
1686 emit_move_r_imm32(SHR_PR, pc + 2);
1687 emit_move_r_r(SHR_PC, opd->rm);
1688 drcf.pending_branch_indirect = 1;
1689 goto end_op;
1690
1691 case OP_BRANCH_RF:
1692 tmp = rcache_get_reg(SHR_PC, RC_GR_WRITE);
1693 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1694 if (opd->dest & BITMASK1(SHR_PR)) {
1695 tmp3 = rcache_get_reg(SHR_PR, RC_GR_WRITE);
1696 emith_move_r_imm(tmp3, pc + 2);
1697 emith_add_r_r_r(tmp, tmp2, tmp3);
1698 }
1699 else {
1700 emith_move_r_r(tmp, tmp2);
1701 emith_add_r_imm(tmp, pc + 2);
1702 }
1703 drcf.pending_branch_indirect = 1;
1704 goto end_op;
1705
1706 case OP_SLEEP:
1707 printf("TODO sleep\n");
1708 goto end_op;
1709
1710 case OP_RTE:
1711 // pop PC
1712 emit_memhandler_read_rr(SHR_PC, SHR_SP, 0, 2);
1713 // pop SR
1714 tmp = rcache_get_reg_arg(0, SHR_SP);
1715 emith_add_r_imm(tmp, 4);
1716 tmp = emit_memhandler_read(2);
1717 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1718 emith_write_sr(sr, tmp);
1719 rcache_free_tmp(tmp);
1720 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
1721 emith_add_r_imm(tmp, 4*2);
1722 drcf.test_irq = 1;
1723 drcf.pending_branch_indirect = 1;
1724 goto end_op;
1725
1726 case OP_LOAD_POOL:
1727#if PROPAGATE_CONSTANTS
1728 if (opd->imm != 0 && opd->imm < end_literals
1729 && literal_addr_count < MAX_LITERALS)
1730 {
1731 ADD_TO_ARRAY(literal_addr, literal_addr_count, opd->imm,);
1732 if (opd->size == 2)
1733 tmp = FETCH32(opd->imm);
1734 else
1735 tmp = (u32)(int)(signed short)FETCH_OP(opd->imm);
1736 gconst_new(GET_Rn(), tmp);
1737 }
1738 else
1739#endif
1740 {
1741 tmp = rcache_get_tmp_arg(0);
1742 if (opd->imm != 0)
1743 emith_move_r_imm(tmp, opd->imm);
1744 else {
1745 // have to calculate read addr from PC
1746 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1747 if (opd->size == 2) {
1748 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1749 emith_bic_r_imm(tmp, 3);
1750 }
1751 else
1752 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 2);
1753 }
1754 tmp2 = emit_memhandler_read(opd->size);
1755 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1756 if (opd->size == 2)
1757 emith_move_r_r(tmp3, tmp2);
1758 else
1759 emith_sext(tmp3, tmp2, 16);
1760 rcache_free_tmp(tmp2);
1761 }
1762 goto end_op;
1763
1764 case OP_MOVA:
1765 if (opd->imm != 0)
1766 emit_move_r_imm32(SHR_R0, opd->imm);
1767 else {
1768 tmp = rcache_get_reg(SHR_R0, RC_GR_WRITE);
1769 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
1770 emith_add_r_r_imm(tmp, tmp2, 2 + (op & 0xff) * 4);
1771 emith_bic_r_imm(tmp, 3);
1772 }
1773 goto end_op;
1774 }
1775
1776 switch ((op >> 12) & 0x0f)
1777 {
1778 /////////////////////////////////////////////
1779 case 0x00:
1780 switch (op & 0x0f)
1781 {
1782 case 0x02:
1783 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1784 switch (GET_Fx())
1785 {
1786 case 0: // STC SR,Rn 0000nnnn00000010
1787 tmp2 = SHR_SR;
1788 break;
1789 case 1: // STC GBR,Rn 0000nnnn00010010
1790 tmp2 = SHR_GBR;
1791 break;
1792 case 2: // STC VBR,Rn 0000nnnn00100010
1793 tmp2 = SHR_VBR;
1794 break;
1795 default:
1796 goto default_;
1797 }
1798 tmp3 = rcache_get_reg(tmp2, RC_GR_READ);
1799 emith_move_r_r(tmp, tmp3);
1800 if (tmp2 == SHR_SR)
1801 emith_clear_msb(tmp, tmp, 22); // reserved bits defined by ISA as 0
1802 goto end_op;
1803 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
1804 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
1805 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
1806 rcache_clean();
1807 tmp = rcache_get_reg_arg(1, GET_Rm());
1808 tmp2 = rcache_get_reg_arg(0, SHR_R0);
1809 tmp3 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1810 emith_add_r_r(tmp2, tmp3);
1811 emit_memhandler_write(op & 3);
1812 goto end_op;
1813 case 0x07:
1814 // MUL.L Rm,Rn 0000nnnnmmmm0111
1815 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
1816 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1817 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
1818 emith_mul(tmp3, tmp2, tmp);
1819 goto end_op;
1820 case 0x08:
1821 switch (GET_Fx())
1822 {
1823 case 0: // CLRT 0000000000001000
1824 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1825 emith_bic_r_imm(sr, T);
1826 break;
1827 case 1: // SETT 0000000000011000
1828 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1829 emith_or_r_imm(sr, T);
1830 break;
1831 case 2: // CLRMAC 0000000000101000
1832 emit_move_r_imm32(SHR_MACL, 0);
1833 emit_move_r_imm32(SHR_MACH, 0);
1834 break;
1835 default:
1836 goto default_;
1837 }
1838 goto end_op;
1839 case 0x09:
1840 switch (GET_Fx())
1841 {
1842 case 0: // NOP 0000000000001001
1843 break;
1844 case 1: // DIV0U 0000000000011001
1845 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1846 emith_bic_r_imm(sr, M|Q|T);
1847 break;
1848 case 2: // MOVT Rn 0000nnnn00101001
1849 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1850 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1851 emith_clear_msb(tmp2, sr, 31);
1852 break;
1853 default:
1854 goto default_;
1855 }
1856 goto end_op;
1857 case 0x0a:
1858 tmp = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1859 switch (GET_Fx())
1860 {
1861 case 0: // STS MACH,Rn 0000nnnn00001010
1862 tmp2 = SHR_MACH;
1863 break;
1864 case 1: // STS MACL,Rn 0000nnnn00011010
1865 tmp2 = SHR_MACL;
1866 break;
1867 case 2: // STS PR,Rn 0000nnnn00101010
1868 tmp2 = SHR_PR;
1869 break;
1870 default:
1871 goto default_;
1872 }
1873 tmp2 = rcache_get_reg(tmp2, RC_GR_READ);
1874 emith_move_r_r(tmp, tmp2);
1875 goto end_op;
1876 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
1877 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
1878 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
1879 tmp = emit_indirect_indexed_read(SHR_R0, GET_Rm(), op & 3);
1880 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
1881 if ((op & 3) != 2) {
1882 emith_sext(tmp2, tmp, (op & 1) ? 16 : 8);
1883 } else
1884 emith_move_r_r(tmp2, tmp);
1885 rcache_free_tmp(tmp);
1886 goto end_op;
1887 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
1888 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 2);
1889 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
1890 /* MS 16 MAC bits unused if saturated */
1891 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
1892 emith_tst_r_imm(sr, S);
1893 EMITH_SJMP_START(DCOND_EQ);
1894 emith_clear_msb_c(DCOND_NE, tmp4, tmp4, 16);
1895 EMITH_SJMP_END(DCOND_EQ);
1896 rcache_unlock(sr);
1897 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW); // might evict SR
1898 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
1899 rcache_free_tmp(tmp2);
1900 sr = rcache_get_reg(SHR_SR, RC_GR_READ); // reget just in case
1901 emith_tst_r_imm(sr, S);
1902
1903 EMITH_JMP_START(DCOND_EQ);
1904 emith_asr(tmp, tmp4, 15);
1905 emith_cmp_r_imm(tmp, -1); // negative overflow (0x80000000..0xffff7fff)
1906 EMITH_SJMP_START(DCOND_GE);
1907 emith_move_r_imm_c(DCOND_LT, tmp4, 0x8000);
1908 emith_move_r_imm_c(DCOND_LT, tmp3, 0x0000);
1909 EMITH_SJMP_END(DCOND_GE);
1910 emith_cmp_r_imm(tmp, 0); // positive overflow (0x00008000..0x7fffffff)
1911 EMITH_SJMP_START(DCOND_LE);
1912 emith_move_r_imm_c(DCOND_GT, tmp4, 0x00007fff);
1913 emith_move_r_imm_c(DCOND_GT, tmp3, 0xffffffff);
1914 EMITH_SJMP_END(DCOND_LE);
1915 EMITH_JMP_END(DCOND_EQ);
1916
1917 rcache_free_tmp(tmp);
1918 goto end_op;
1919 }
1920 goto default_;
1921
1922 /////////////////////////////////////////////
1923 case 0x01:
1924 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
1925 rcache_clean();
1926 tmp = rcache_get_reg_arg(0, GET_Rn());
1927 tmp2 = rcache_get_reg_arg(1, GET_Rm());
1928 if (op & 0x0f)
1929 emith_add_r_imm(tmp, (op & 0x0f) * 4);
1930 emit_memhandler_write(2);
1931 goto end_op;
1932
1933 case 0x02:
1934 switch (op & 0x0f)
1935 {
1936 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
1937 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
1938 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
1939 rcache_clean();
1940 rcache_get_reg_arg(0, GET_Rn());
1941 rcache_get_reg_arg(1, GET_Rm());
1942 emit_memhandler_write(op & 3);
1943 goto end_op;
1944 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
1945 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
1946 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
1947 rcache_get_reg_arg(1, GET_Rm()); // for Rm == Rn
1948 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1949 emith_sub_r_imm(tmp, (1 << (op & 3)));
1950 rcache_clean();
1951 rcache_get_reg_arg(0, GET_Rn());
1952 emit_memhandler_write(op & 3);
1953 goto end_op;
1954 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
1955 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1956 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1957 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1958 emith_bic_r_imm(sr, M|Q|T);
1959 emith_tst_r_imm(tmp2, (1<<31));
1960 EMITH_SJMP_START(DCOND_EQ);
1961 emith_or_r_imm_c(DCOND_NE, sr, Q);
1962 EMITH_SJMP_END(DCOND_EQ);
1963 emith_tst_r_imm(tmp3, (1<<31));
1964 EMITH_SJMP_START(DCOND_EQ);
1965 emith_or_r_imm_c(DCOND_NE, sr, M);
1966 EMITH_SJMP_END(DCOND_EQ);
1967 emith_teq_r_r(tmp2, tmp3);
1968 EMITH_SJMP_START(DCOND_PL);
1969 emith_or_r_imm_c(DCOND_MI, sr, T);
1970 EMITH_SJMP_END(DCOND_PL);
1971 goto end_op;
1972 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
1973 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
1974 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1975 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1976 emith_bic_r_imm(sr, T);
1977 emith_tst_r_r(tmp2, tmp3);
1978 emit_or_t_if_eq(sr);
1979 goto end_op;
1980 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
1981 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1982 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1983 emith_and_r_r(tmp, tmp2);
1984 goto end_op;
1985 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
1986 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1987 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1988 emith_eor_r_r(tmp, tmp2);
1989 goto end_op;
1990 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
1991 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
1992 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1993 emith_or_r_r(tmp, tmp2);
1994 goto end_op;
1995 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
1996 tmp = rcache_get_tmp();
1997 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
1998 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
1999 emith_eor_r_r_r(tmp, tmp2, tmp3);
2000 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2001 emith_bic_r_imm(sr, T);
2002 emith_tst_r_imm(tmp, 0x000000ff);
2003 emit_or_t_if_eq(sr);
2004 emith_tst_r_imm(tmp, 0x0000ff00);
2005 emit_or_t_if_eq(sr);
2006 emith_tst_r_imm(tmp, 0x00ff0000);
2007 emit_or_t_if_eq(sr);
2008 emith_tst_r_imm(tmp, 0xff000000);
2009 emit_or_t_if_eq(sr);
2010 rcache_free_tmp(tmp);
2011 goto end_op;
2012 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
2013 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2014 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2015 emith_lsr(tmp, tmp, 16);
2016 emith_or_r_r_lsl(tmp, tmp2, 16);
2017 goto end_op;
2018 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
2019 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
2020 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
2021 tmp = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2022 if (op & 1) {
2023 emith_sext(tmp, tmp2, 16);
2024 } else
2025 emith_clear_msb(tmp, tmp2, 16);
2026 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2027 tmp2 = rcache_get_tmp();
2028 if (op & 1) {
2029 emith_sext(tmp2, tmp3, 16);
2030 } else
2031 emith_clear_msb(tmp2, tmp3, 16);
2032 emith_mul(tmp, tmp, tmp2);
2033 rcache_free_tmp(tmp2);
2034 goto end_op;
2035 }
2036 goto default_;
2037
2038 /////////////////////////////////////////////
2039 case 0x03:
2040 switch (op & 0x0f)
2041 {
2042 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
2043 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
2044 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
2045 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
2046 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
2047 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2048 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_READ);
2049 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2050 emith_bic_r_imm(sr, T);
2051 emith_cmp_r_r(tmp2, tmp3);
2052 switch (op & 0x07)
2053 {
2054 case 0x00: // CMP/EQ
2055 emit_or_t_if_eq(sr);
2056 break;
2057 case 0x02: // CMP/HS
2058 EMITH_SJMP_START(DCOND_LO);
2059 emith_or_r_imm_c(DCOND_HS, sr, T);
2060 EMITH_SJMP_END(DCOND_LO);
2061 break;
2062 case 0x03: // CMP/GE
2063 EMITH_SJMP_START(DCOND_LT);
2064 emith_or_r_imm_c(DCOND_GE, sr, T);
2065 EMITH_SJMP_END(DCOND_LT);
2066 break;
2067 case 0x06: // CMP/HI
2068 EMITH_SJMP_START(DCOND_LS);
2069 emith_or_r_imm_c(DCOND_HI, sr, T);
2070 EMITH_SJMP_END(DCOND_LS);
2071 break;
2072 case 0x07: // CMP/GT
2073 EMITH_SJMP_START(DCOND_LE);
2074 emith_or_r_imm_c(DCOND_GT, sr, T);
2075 EMITH_SJMP_END(DCOND_LE);
2076 break;
2077 }
2078 goto end_op;
2079 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
2080 // Q1 = carry(Rn = (Rn << 1) | T)
2081 // if Q ^ M
2082 // Q2 = carry(Rn += Rm)
2083 // else
2084 // Q2 = carry(Rn -= Rm)
2085 // Q = M ^ Q1 ^ Q2
2086 // T = (Q == M) = !(Q ^ M) = !(Q1 ^ Q2)
2087 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2088 tmp3 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2089 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2090 emith_tpop_carry(sr, 0);
2091 emith_adcf_r_r(tmp2, tmp2);
2092 emith_tpush_carry(sr, 0); // keep Q1 in T for now
2093 tmp4 = rcache_get_tmp();
2094 emith_and_r_r_imm(tmp4, sr, M);
2095 emith_eor_r_r_lsr(sr, tmp4, M_SHIFT - Q_SHIFT); // Q ^= M
2096 rcache_free_tmp(tmp4);
2097 // add or sub, invert T if carry to get Q1 ^ Q2
2098 // in: (Q ^ M) passed in Q, Q1 in T
2099 emith_sh2_div1_step(tmp2, tmp3, sr);
2100 emith_bic_r_imm(sr, Q);
2101 emith_tst_r_imm(sr, M);
2102 EMITH_SJMP_START(DCOND_EQ);
2103 emith_or_r_imm_c(DCOND_NE, sr, Q); // Q = M
2104 EMITH_SJMP_END(DCOND_EQ);
2105 emith_tst_r_imm(sr, T);
2106 EMITH_SJMP_START(DCOND_EQ);
2107 emith_eor_r_imm_c(DCOND_NE, sr, Q); // Q = M ^ Q1 ^ Q2
2108 EMITH_SJMP_END(DCOND_EQ);
2109 emith_eor_r_imm(sr, T); // T = !(Q1 ^ Q2)
2110 goto end_op;
2111 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
2112 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2113 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2114 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2115 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2116 emith_mul_u64(tmp3, tmp4, tmp, tmp2);
2117 goto end_op;
2118 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
2119 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
2120 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2121 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2122 if (op & 4) {
2123 emith_add_r_r(tmp, tmp2);
2124 } else
2125 emith_sub_r_r(tmp, tmp2);
2126 goto end_op;
2127 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
2128 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
2129 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2130 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2131 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2132 if (op & 4) { // adc
2133 emith_tpop_carry(sr, 0);
2134 emith_adcf_r_r(tmp, tmp2);
2135 emith_tpush_carry(sr, 0);
2136 } else {
2137 emith_tpop_carry(sr, 1);
2138 emith_sbcf_r_r(tmp, tmp2);
2139 emith_tpush_carry(sr, 1);
2140 }
2141 goto end_op;
2142 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
2143 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
2144 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2145 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2146 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2147 emith_bic_r_imm(sr, T);
2148 if (op & 4) {
2149 emith_addf_r_r(tmp, tmp2);
2150 } else
2151 emith_subf_r_r(tmp, tmp2);
2152 EMITH_SJMP_START(DCOND_VC);
2153 emith_or_r_imm_c(DCOND_VS, sr, T);
2154 EMITH_SJMP_END(DCOND_VC);
2155 goto end_op;
2156 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
2157 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2158 tmp2 = rcache_get_reg(GET_Rm(), RC_GR_READ);
2159 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_WRITE);
2160 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_WRITE);
2161 emith_mul_s64(tmp3, tmp4, tmp, tmp2);
2162 goto end_op;
2163 }
2164 goto default_;
2165
2166 /////////////////////////////////////////////
2167 case 0x04:
2168 switch (op & 0x0f)
2169 {
2170 case 0x00:
2171 switch (GET_Fx())
2172 {
2173 case 0: // SHLL Rn 0100nnnn00000000
2174 case 2: // SHAL Rn 0100nnnn00100000
2175 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2176 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2177 emith_tpop_carry(sr, 0); // dummy
2178 emith_lslf(tmp, tmp, 1);
2179 emith_tpush_carry(sr, 0);
2180 goto end_op;
2181 case 1: // DT Rn 0100nnnn00010000
2182 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2183#if 0 // scheduling needs tuning
2184 if (FETCH_OP(pc) == 0x8bfd) { // BF #-2
2185 if (gconst_get(GET_Rn(), &tmp)) {
2186 // XXX: limit burned cycles
2187 emit_move_r_imm32(GET_Rn(), 0);
2188 emith_or_r_imm(sr, T);
2189 cycles += tmp * 4 + 1; // +1 syncs with noconst version, not sure why
2190 skip_op = 1;
2191 }
2192 else
2193 emith_sh2_dtbf_loop();
2194 goto end_op;
2195 }
2196#endif
2197 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2198 emith_bic_r_imm(sr, T);
2199 emith_subf_r_imm(tmp, 1);
2200 emit_or_t_if_eq(sr);
2201 goto end_op;
2202 }
2203 goto default_;
2204 case 0x01:
2205 switch (GET_Fx())
2206 {
2207 case 0: // SHLR Rn 0100nnnn00000001
2208 case 2: // SHAR Rn 0100nnnn00100001
2209 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2210 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2211 emith_tpop_carry(sr, 0); // dummy
2212 if (op & 0x20) {
2213 emith_asrf(tmp, tmp, 1);
2214 } else
2215 emith_lsrf(tmp, tmp, 1);
2216 emith_tpush_carry(sr, 0);
2217 goto end_op;
2218 case 1: // CMP/PZ Rn 0100nnnn00010001
2219 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2220 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2221 emith_bic_r_imm(sr, T);
2222 emith_cmp_r_imm(tmp, 0);
2223 EMITH_SJMP_START(DCOND_LT);
2224 emith_or_r_imm_c(DCOND_GE, sr, T);
2225 EMITH_SJMP_END(DCOND_LT);
2226 goto end_op;
2227 }
2228 goto default_;
2229 case 0x02:
2230 case 0x03:
2231 switch (op & 0x3f)
2232 {
2233 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
2234 tmp = SHR_MACH;
2235 break;
2236 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
2237 tmp = SHR_MACL;
2238 break;
2239 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
2240 tmp = SHR_PR;
2241 break;
2242 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
2243 tmp = SHR_SR;
2244 break;
2245 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
2246 tmp = SHR_GBR;
2247 break;
2248 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
2249 tmp = SHR_VBR;
2250 break;
2251 default:
2252 goto default_;
2253 }
2254 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2255 emith_sub_r_imm(tmp2, 4);
2256 rcache_clean();
2257 rcache_get_reg_arg(0, GET_Rn());
2258 tmp3 = rcache_get_reg_arg(1, tmp);
2259 if (tmp == SHR_SR)
2260 emith_clear_msb(tmp3, tmp3, 22); // reserved bits defined by ISA as 0
2261 emit_memhandler_write(2);
2262 goto end_op;
2263 case 0x04:
2264 case 0x05:
2265 switch (op & 0x3f)
2266 {
2267 case 0x04: // ROTL Rn 0100nnnn00000100
2268 case 0x05: // ROTR Rn 0100nnnn00000101
2269 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2270 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2271 emith_tpop_carry(sr, 0); // dummy
2272 if (op & 1) {
2273 emith_rorf(tmp, tmp, 1);
2274 } else
2275 emith_rolf(tmp, tmp, 1);
2276 emith_tpush_carry(sr, 0);
2277 goto end_op;
2278 case 0x24: // ROTCL Rn 0100nnnn00100100
2279 case 0x25: // ROTCR Rn 0100nnnn00100101
2280 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2281 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2282 emith_tpop_carry(sr, 0);
2283 if (op & 1) {
2284 emith_rorcf(tmp);
2285 } else
2286 emith_rolcf(tmp);
2287 emith_tpush_carry(sr, 0);
2288 goto end_op;
2289 case 0x15: // CMP/PL Rn 0100nnnn00010101
2290 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2291 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2292 emith_bic_r_imm(sr, T);
2293 emith_cmp_r_imm(tmp, 0);
2294 EMITH_SJMP_START(DCOND_LE);
2295 emith_or_r_imm_c(DCOND_GT, sr, T);
2296 EMITH_SJMP_END(DCOND_LE);
2297 goto end_op;
2298 }
2299 goto default_;
2300 case 0x06:
2301 case 0x07:
2302 switch (op & 0x3f)
2303 {
2304 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
2305 tmp = SHR_MACH;
2306 break;
2307 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
2308 tmp = SHR_MACL;
2309 break;
2310 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
2311 tmp = SHR_PR;
2312 break;
2313 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
2314 tmp = SHR_SR;
2315 break;
2316 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
2317 tmp = SHR_GBR;
2318 break;
2319 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
2320 tmp = SHR_VBR;
2321 break;
2322 default:
2323 goto default_;
2324 }
2325 rcache_get_reg_arg(0, GET_Rn());
2326 tmp2 = emit_memhandler_read(2);
2327 if (tmp == SHR_SR) {
2328 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2329 emith_write_sr(sr, tmp2);
2330 drcf.test_irq = 1;
2331 } else {
2332 tmp = rcache_get_reg(tmp, RC_GR_WRITE);
2333 emith_move_r_r(tmp, tmp2);
2334 }
2335 rcache_free_tmp(tmp2);
2336 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2337 emith_add_r_imm(tmp, 4);
2338 goto end_op;
2339 case 0x08:
2340 case 0x09:
2341 switch (GET_Fx())
2342 {
2343 case 0:
2344 // SHLL2 Rn 0100nnnn00001000
2345 // SHLR2 Rn 0100nnnn00001001
2346 tmp = 2;
2347 break;
2348 case 1:
2349 // SHLL8 Rn 0100nnnn00011000
2350 // SHLR8 Rn 0100nnnn00011001
2351 tmp = 8;
2352 break;
2353 case 2:
2354 // SHLL16 Rn 0100nnnn00101000
2355 // SHLR16 Rn 0100nnnn00101001
2356 tmp = 16;
2357 break;
2358 default:
2359 goto default_;
2360 }
2361 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2362 if (op & 1) {
2363 emith_lsr(tmp2, tmp2, tmp);
2364 } else
2365 emith_lsl(tmp2, tmp2, tmp);
2366 goto end_op;
2367 case 0x0a:
2368 switch (GET_Fx())
2369 {
2370 case 0: // LDS Rm,MACH 0100mmmm00001010
2371 tmp2 = SHR_MACH;
2372 break;
2373 case 1: // LDS Rm,MACL 0100mmmm00011010
2374 tmp2 = SHR_MACL;
2375 break;
2376 case 2: // LDS Rm,PR 0100mmmm00101010
2377 tmp2 = SHR_PR;
2378 break;
2379 default:
2380 goto default_;
2381 }
2382 emit_move_r_r(tmp2, GET_Rn());
2383 goto end_op;
2384 case 0x0b:
2385 switch (GET_Fx())
2386 {
2387 case 1: // TAS.B @Rn 0100nnnn00011011
2388 // XXX: is TAS working on 32X?
2389 rcache_get_reg_arg(0, GET_Rn());
2390 tmp = emit_memhandler_read(0);
2391 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2392 emith_bic_r_imm(sr, T);
2393 emith_cmp_r_imm(tmp, 0);
2394 emit_or_t_if_eq(sr);
2395 rcache_clean();
2396 emith_or_r_imm(tmp, 0x80);
2397 tmp2 = rcache_get_tmp_arg(1); // assuming it differs to tmp
2398 emith_move_r_r(tmp2, tmp);
2399 rcache_free_tmp(tmp);
2400 rcache_get_reg_arg(0, GET_Rn());
2401 emit_memhandler_write(0);
2402 break;
2403 default:
2404 goto default_;
2405 }
2406 goto end_op;
2407 case 0x0e:
2408 tmp = rcache_get_reg(GET_Rn(), RC_GR_READ);
2409 switch (GET_Fx())
2410 {
2411 case 0: // LDC Rm,SR 0100mmmm00001110
2412 tmp2 = SHR_SR;
2413 break;
2414 case 1: // LDC Rm,GBR 0100mmmm00011110
2415 tmp2 = SHR_GBR;
2416 break;
2417 case 2: // LDC Rm,VBR 0100mmmm00101110
2418 tmp2 = SHR_VBR;
2419 break;
2420 default:
2421 goto default_;
2422 }
2423 if (tmp2 == SHR_SR) {
2424 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2425 emith_write_sr(sr, tmp);
2426 drcf.test_irq = 1;
2427 } else {
2428 tmp2 = rcache_get_reg(tmp2, RC_GR_WRITE);
2429 emith_move_r_r(tmp2, tmp);
2430 }
2431 goto end_op;
2432 case 0x0f:
2433 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
2434 emit_indirect_read_double(&tmp, &tmp2, GET_Rn(), GET_Rm(), 1);
2435 emith_sext(tmp, tmp, 16);
2436 emith_sext(tmp2, tmp2, 16);
2437 tmp3 = rcache_get_reg(SHR_MACL, RC_GR_RMW);
2438 tmp4 = rcache_get_reg(SHR_MACH, RC_GR_RMW);
2439 emith_mula_s64(tmp3, tmp4, tmp, tmp2);
2440 rcache_free_tmp(tmp2);
2441 // XXX: MACH should be untouched when S is set?
2442 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2443 emith_tst_r_imm(sr, S);
2444 EMITH_JMP_START(DCOND_EQ);
2445
2446 emith_asr(tmp, tmp3, 31);
2447 emith_eorf_r_r(tmp, tmp4); // tmp = ((signed)macl >> 31) ^ mach
2448 EMITH_JMP_START(DCOND_EQ);
2449 emith_move_r_imm(tmp3, 0x80000000);
2450 emith_tst_r_r(tmp4, tmp4);
2451 EMITH_SJMP_START(DCOND_MI);
2452 emith_sub_r_imm_c(DCOND_PL, tmp3, 1); // positive
2453 EMITH_SJMP_END(DCOND_MI);
2454 EMITH_JMP_END(DCOND_EQ);
2455
2456 EMITH_JMP_END(DCOND_EQ);
2457 rcache_free_tmp(tmp);
2458 goto end_op;
2459 }
2460 goto default_;
2461
2462 /////////////////////////////////////////////
2463 case 0x05:
2464 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
2465 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), (op & 0x0f) * 4, 2);
2466 goto end_op;
2467
2468 /////////////////////////////////////////////
2469 case 0x06:
2470 switch (op & 0x0f)
2471 {
2472 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
2473 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
2474 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
2475 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
2476 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
2477 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
2478 emit_memhandler_read_rr(GET_Rn(), GET_Rm(), 0, op & 3);
2479 if ((op & 7) >= 4 && GET_Rn() != GET_Rm()) {
2480 tmp = rcache_get_reg(GET_Rm(), RC_GR_RMW);
2481 emith_add_r_imm(tmp, (1 << (op & 3)));
2482 }
2483 goto end_op;
2484 case 0x03:
2485 case 0x07 ... 0x0f:
2486 tmp = rcache_get_reg(GET_Rm(), RC_GR_READ);
2487 tmp2 = rcache_get_reg(GET_Rn(), RC_GR_WRITE);
2488 switch (op & 0x0f)
2489 {
2490 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
2491 emith_move_r_r(tmp2, tmp);
2492 break;
2493 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
2494 emith_mvn_r_r(tmp2, tmp);
2495 break;
2496 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
2497 tmp3 = tmp2;
2498 if (tmp == tmp2)
2499 tmp3 = rcache_get_tmp();
2500 tmp4 = rcache_get_tmp();
2501 emith_lsr(tmp3, tmp, 16);
2502 emith_or_r_r_lsl(tmp3, tmp, 24);
2503 emith_and_r_r_imm(tmp4, tmp, 0xff00);
2504 emith_or_r_r_lsl(tmp3, tmp4, 8);
2505 emith_rol(tmp2, tmp3, 16);
2506 rcache_free_tmp(tmp4);
2507 if (tmp == tmp2)
2508 rcache_free_tmp(tmp3);
2509 break;
2510 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
2511 emith_rol(tmp2, tmp, 16);
2512 break;
2513 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
2514 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2515 emith_tpop_carry(sr, 1);
2516 emith_negcf_r_r(tmp2, tmp);
2517 emith_tpush_carry(sr, 1);
2518 break;
2519 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
2520 emith_neg_r_r(tmp2, tmp);
2521 break;
2522 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
2523 emith_clear_msb(tmp2, tmp, 24);
2524 break;
2525 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
2526 emith_clear_msb(tmp2, tmp, 16);
2527 break;
2528 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
2529 emith_sext(tmp2, tmp, 8);
2530 break;
2531 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
2532 emith_sext(tmp2, tmp, 16);
2533 break;
2534 }
2535 goto end_op;
2536 }
2537 goto default_;
2538
2539 /////////////////////////////////////////////
2540 case 0x07:
2541 // ADD #imm,Rn 0111nnnniiiiiiii
2542 tmp = rcache_get_reg(GET_Rn(), RC_GR_RMW);
2543 if (op & 0x80) { // adding negative
2544 emith_sub_r_imm(tmp, -op & 0xff);
2545 } else
2546 emith_add_r_imm(tmp, op & 0xff);
2547 goto end_op;
2548
2549 /////////////////////////////////////////////
2550 case 0x08:
2551 switch (op & 0x0f00)
2552 {
2553 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
2554 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
2555 rcache_clean();
2556 tmp = rcache_get_reg_arg(0, GET_Rm());
2557 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2558 tmp3 = (op & 0x100) >> 8;
2559 if (op & 0x0f)
2560 emith_add_r_imm(tmp, (op & 0x0f) << tmp3);
2561 emit_memhandler_write(tmp3);
2562 goto end_op;
2563 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
2564 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
2565 tmp = (op & 0x100) >> 8;
2566 emit_memhandler_read_rr(SHR_R0, GET_Rm(), (op & 0x0f) << tmp, tmp);
2567 goto end_op;
2568 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
2569 // XXX: could use cmn
2570 tmp = rcache_get_tmp();
2571 tmp2 = rcache_get_reg(0, RC_GR_READ);
2572 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2573 emith_move_r_imm_s8(tmp, op & 0xff);
2574 emith_bic_r_imm(sr, T);
2575 emith_cmp_r_r(tmp2, tmp);
2576 emit_or_t_if_eq(sr);
2577 rcache_free_tmp(tmp);
2578 goto end_op;
2579 }
2580 goto default_;
2581
2582 /////////////////////////////////////////////
2583 case 0x0c:
2584 switch (op & 0x0f00)
2585 {
2586 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
2587 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
2588 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
2589 rcache_clean();
2590 tmp = rcache_get_reg_arg(0, SHR_GBR);
2591 tmp2 = rcache_get_reg_arg(1, SHR_R0);
2592 tmp3 = (op & 0x300) >> 8;
2593 emith_add_r_imm(tmp, (op & 0xff) << tmp3);
2594 emit_memhandler_write(tmp3);
2595 goto end_op;
2596 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
2597 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
2598 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
2599 tmp = (op & 0x300) >> 8;
2600 emit_memhandler_read_rr(SHR_R0, SHR_GBR, (op & 0xff) << tmp, tmp);
2601 goto end_op;
2602 case 0x0300: // TRAPA #imm 11000011iiiiiiii
2603 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2604 emith_sub_r_imm(tmp, 4*2);
2605 // push SR
2606 tmp = rcache_get_reg_arg(0, SHR_SP);
2607 emith_add_r_imm(tmp, 4);
2608 tmp = rcache_get_reg_arg(1, SHR_SR);
2609 emith_clear_msb(tmp, tmp, 22);
2610 emit_memhandler_write(2);
2611 // push PC
2612 rcache_get_reg_arg(0, SHR_SP);
2613 tmp = rcache_get_tmp_arg(1);
2614 emith_move_r_imm(tmp, pc);
2615 emit_memhandler_write(2);
2616 // obtain new PC
2617 emit_memhandler_read_rr(SHR_PC, SHR_VBR, (op & 0xff) * 4, 2);
2618 // indirect jump -> back to dispatcher
2619 rcache_flush();
2620 emith_jump(sh2_drc_dispatcher);
2621 goto end_op;
2622 case 0x0800: // TST #imm,R0 11001000iiiiiiii
2623 tmp = rcache_get_reg(SHR_R0, RC_GR_READ);
2624 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2625 emith_bic_r_imm(sr, T);
2626 emith_tst_r_imm(tmp, op & 0xff);
2627 emit_or_t_if_eq(sr);
2628 goto end_op;
2629 case 0x0900: // AND #imm,R0 11001001iiiiiiii
2630 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2631 emith_and_r_imm(tmp, op & 0xff);
2632 goto end_op;
2633 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
2634 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2635 emith_eor_r_imm(tmp, op & 0xff);
2636 goto end_op;
2637 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
2638 tmp = rcache_get_reg(SHR_R0, RC_GR_RMW);
2639 emith_or_r_imm(tmp, op & 0xff);
2640 goto end_op;
2641 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
2642 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2643 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2644 emith_bic_r_imm(sr, T);
2645 emith_tst_r_imm(tmp, op & 0xff);
2646 emit_or_t_if_eq(sr);
2647 rcache_free_tmp(tmp);
2648 goto end_op;
2649 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
2650 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2651 emith_and_r_imm(tmp, op & 0xff);
2652 goto end_rmw_op;
2653 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
2654 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2655 emith_eor_r_imm(tmp, op & 0xff);
2656 goto end_rmw_op;
2657 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
2658 tmp = emit_indirect_indexed_read(SHR_R0, SHR_GBR, 0);
2659 emith_or_r_imm(tmp, op & 0xff);
2660 end_rmw_op:
2661 tmp2 = rcache_get_tmp_arg(1);
2662 emith_move_r_r(tmp2, tmp);
2663 rcache_free_tmp(tmp);
2664 tmp3 = rcache_get_reg_arg(0, SHR_GBR);
2665 tmp4 = rcache_get_reg(SHR_R0, RC_GR_READ);
2666 emith_add_r_r(tmp3, tmp4);
2667 emit_memhandler_write(0);
2668 goto end_op;
2669 }
2670 goto default_;
2671
2672 /////////////////////////////////////////////
2673 case 0x0e:
2674 // MOV #imm,Rn 1110nnnniiiiiiii
2675 emit_move_r_imm32(GET_Rn(), (u32)(signed int)(signed char)op);
2676 goto end_op;
2677
2678 default:
2679 default_:
2680 if (!(op_flags[i] & OF_B_IN_DS))
2681 elprintf_sh2(sh2, EL_ANOMALY,
2682 "drc: illegal op %04x @ %08x", op, pc - 2);
2683
2684 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2685 emith_sub_r_imm(tmp, 4*2);
2686 // push SR
2687 tmp = rcache_get_reg_arg(0, SHR_SP);
2688 emith_add_r_imm(tmp, 4);
2689 tmp = rcache_get_reg_arg(1, SHR_SR);
2690 emith_clear_msb(tmp, tmp, 22);
2691 emit_memhandler_write(2);
2692 // push PC
2693 rcache_get_reg_arg(0, SHR_SP);
2694 tmp = rcache_get_tmp_arg(1);
2695 if (drcf.pending_branch_indirect) {
2696 tmp2 = rcache_get_reg(SHR_PC, RC_GR_READ);
2697 emith_move_r_r(tmp, tmp2);
2698 }
2699 else
2700 emith_move_r_imm(tmp, pc - 2);
2701 emit_memhandler_write(2);
2702 // obtain new PC
2703 v = (op_flags[i] & OF_B_IN_DS) ? 6 : 4;
2704 emit_memhandler_read_rr(SHR_PC, SHR_VBR, v * 4, 2);
2705 // indirect jump -> back to dispatcher
2706 rcache_flush();
2707 emith_jump(sh2_drc_dispatcher);
2708 break;
2709 }
2710
2711end_op:
2712 rcache_unlock_all();
2713
2714 cycles += opd->cycles;
2715
2716 if (op_flags[i+1] & OF_DELAY_OP) {
2717 do_host_disasm(tcache_id);
2718 continue;
2719 }
2720
2721 // test irq?
2722 if (drcf.test_irq && !drcf.pending_branch_direct) {
2723 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2724 FLUSH_CYCLES(sr);
2725 if (!drcf.pending_branch_indirect)
2726 emit_move_r_imm32(SHR_PC, pc);
2727 rcache_flush();
2728 emith_call(sh2_drc_test_irq);
2729 drcf.test_irq = 0;
2730 }
2731
2732 // branch handling (with/without delay)
2733 if (drcf.pending_branch_direct)
2734 {
2735 struct op_data *opd_b =
2736 (op_flags[i] & OF_DELAY_OP) ? &ops[i-1] : opd;
2737 u32 target_pc = opd_b->imm;
2738 int cond = -1, ncond = -1;
2739 void *target = NULL;
2740 EMITH_SJMP_DECL_();
2741
2742 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2743 FLUSH_CYCLES(sr);
2744 rcache_clean();
2745
2746 if (opd_b->op != OP_BRANCH) {
2747 cond = (opd_b->op == OP_BRANCH_CF) ? DCOND_EQ : DCOND_NE;
2748 ncond = (opd_b->op == OP_BRANCH_CF) ? DCOND_NE : DCOND_EQ;
2749 }
2750 if (cond != -1) {
2751 int ctaken = (op_flags[i] & OF_DELAY_OP) ? 1 : 2;
2752
2753 if (delay_dep_fw & BITMASK1(SHR_T))
2754 emith_tst_r_imm(sr, T_save);
2755 else
2756 emith_tst_r_imm(sr, T);
2757
2758 EMITH_SJMP_START_(ncond);
2759 emith_sub_r_imm_c(cond, sr, ctaken<<12);
2760 }
2761
2762#if LINK_BRANCHES
2763 if (find_in_array(branch_target_pc, branch_target_count, target_pc) >= 0)
2764 {
2765 // local branch
2766 // XXX: jumps back can be linked already
2767 if (branch_patch_count < MAX_LOCAL_BRANCHES) {
2768 target = tcache_ptr;
2769 branch_patch_pc[branch_patch_count] = target_pc;
2770 branch_patch_ptr[branch_patch_count] = target;
2771 branch_patch_count++;
2772 }
2773 else
2774 dbg(1, "warning: too many local branches");
2775 }
2776
2777 if (target == NULL)
2778#endif
2779 {
2780 // can't resolve branch locally, make a block exit
2781 emit_move_r_imm32(SHR_PC, target_pc);
2782 rcache_clean();
2783
2784 target = dr_prepare_ext_branch(target_pc, sh2->is_slave, tcache_id);
2785 if (target == NULL)
2786 return NULL;
2787 }
2788
2789 if (cond != -1) {
2790 emith_jump_cond_patchable(cond, target);
2791 EMITH_SJMP_END_(ncond);
2792 }
2793 else {
2794 emith_jump_patchable(target);
2795 rcache_invalidate();
2796 }
2797
2798 drcf.pending_branch_direct = 0;
2799 }
2800 else if (drcf.pending_branch_indirect) {
2801 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2802 FLUSH_CYCLES(sr);
2803 rcache_flush();
2804 emith_jump(sh2_drc_dispatcher);
2805 drcf.pending_branch_indirect = 0;
2806 }
2807
2808 do_host_disasm(tcache_id);
2809 }
2810
2811 tmp = rcache_get_reg(SHR_SR, RC_GR_RMW);
2812 FLUSH_CYCLES(tmp);
2813 rcache_flush();
2814
2815 // check the last op
2816 if (op_flags[i-1] & OF_DELAY_OP)
2817 opd = &ops[i-2];
2818 else
2819 opd = &ops[i-1];
2820
2821 if (opd->op != OP_BRANCH && opd->op != OP_BRANCH_R
2822 && opd->op != OP_BRANCH_RF && opd->op != OP_RTE)
2823 {
2824 void *target;
2825
2826 emit_move_r_imm32(SHR_PC, pc);
2827 rcache_flush();
2828
2829 target = dr_prepare_ext_branch(pc, sh2->is_slave, tcache_id);
2830 if (target == NULL)
2831 return NULL;
2832 emith_jump_patchable(target);
2833 }
2834
2835 // link local branches
2836 for (i = 0; i < branch_patch_count; i++) {
2837 void *target;
2838 int t;
2839 t = find_in_array(branch_target_pc, branch_target_count, branch_patch_pc[i]);
2840 target = branch_target_ptr[t];
2841 if (target == NULL) {
2842 // flush pc and go back to dispatcher (this should no longer happen)
2843 dbg(1, "stray branch to %08x %p", branch_patch_pc[i], tcache_ptr);
2844 target = tcache_ptr;
2845 emit_move_r_imm32(SHR_PC, branch_patch_pc[i]);
2846 rcache_flush();
2847 emith_jump(sh2_drc_dispatcher);
2848 }
2849 emith_jump_patch(branch_patch_ptr[i], target);
2850 }
2851
2852 // mark memory blocks as containing compiled code
2853 // override any overlay blocks as they become unreachable anyway
2854 if ((block->addr & 0xc7fc0000) == 0x06000000
2855 || (block->addr & 0xfffff000) == 0xc0000000)
2856 {
2857 u16 *drc_ram_blk = NULL;
2858 u32 addr, mask = 0, shift = 0;
2859
2860 if (tcache_id != 0) {
2861 // data array, BIOS
2862 drc_ram_blk = Pico32xMem->drcblk_da[sh2->is_slave];
2863 shift = SH2_DRCBLK_DA_SHIFT;
2864 mask = 0xfff;
2865 }
2866 else {
2867 // SDRAM
2868 drc_ram_blk = Pico32xMem->drcblk_ram;
2869 shift = SH2_DRCBLK_RAM_SHIFT;
2870 mask = 0x3ffff;
2871 }
2872
2873 // mark recompiled insns
2874 drc_ram_blk[(base_pc & mask) >> shift] = 1;
2875 for (pc = base_pc; pc < end_pc; pc += 2)
2876 drc_ram_blk[(pc & mask) >> shift] = 1;
2877
2878 // mark literals
2879 for (i = 0; i < literal_addr_count; i++) {
2880 tmp = literal_addr[i];
2881 drc_ram_blk[(tmp & mask) >> shift] = 1;
2882 }
2883
2884 // add to invalidation lookup lists
2885 addr = base_pc & ~(INVAL_PAGE_SIZE - 1);
2886 for (; addr < end_literals; addr += INVAL_PAGE_SIZE) {
2887 i = (addr & mask) / INVAL_PAGE_SIZE;
2888 add_to_block_list(&inval_lookup[tcache_id][i], block);
2889 }
2890 }
2891
2892 tcache_ptrs[tcache_id] = tcache_ptr;
2893
2894 host_instructions_updated(block_entry_ptr, tcache_ptr);
2895
2896 do_host_disasm(tcache_id);
2897
2898 if (drcf.literals_disabled && literal_addr_count)
2899 dbg(1, "literals_disabled && literal_addr_count?");
2900 dbg(2, " block #%d,%d tcache %d/%d, insns %d -> %d %.3f",
2901 tcache_id, blkid_main,
2902 tcache_ptr - tcache_bases[tcache_id], tcache_sizes[tcache_id],
2903 insns_compiled, host_insn_count, (float)host_insn_count / insns_compiled);
2904 if ((sh2->pc & 0xc6000000) == 0x02000000) // ROM
2905 dbg(2, " hash collisions %d/%d", hash_collisions, block_counts[tcache_id]);
2906/*
2907 printf("~~~\n");
2908 tcache_dsm_ptrs[tcache_id] = block_entry_ptr;
2909 do_host_disasm(tcache_id);
2910 printf("~~~\n");
2911*/
2912
2913#if (DRC_DEBUG & 4)
2914 fflush(stdout);
2915#endif
2916
2917 return block_entry_ptr;
2918}
2919
2920static void sh2_generate_utils(void)
2921{
2922 int arg0, arg1, arg2, sr, tmp;
2923
2924 sh2_drc_write32 = p32x_sh2_write32;
2925 sh2_drc_read8 = p32x_sh2_read8;
2926 sh2_drc_read16 = p32x_sh2_read16;
2927 sh2_drc_read32 = p32x_sh2_read32;
2928
2929 host_arg2reg(arg0, 0);
2930 host_arg2reg(arg1, 1);
2931 host_arg2reg(arg2, 2);
2932 emith_move_r_r(arg0, arg0); // nop
2933
2934 // sh2_drc_exit(void)
2935 sh2_drc_exit = (void *)tcache_ptr;
2936 emit_do_static_regs(1, arg2);
2937 emith_sh2_drc_exit();
2938
2939 // sh2_drc_dispatcher(void)
2940 sh2_drc_dispatcher = (void *)tcache_ptr;
2941 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2942 emith_cmp_r_imm(sr, 0);
2943 emith_jump_cond(DCOND_LT, sh2_drc_exit);
2944 rcache_invalidate();
2945 emith_ctx_read(arg0, SHR_PC * 4);
2946 emith_ctx_read(arg1, offsetof(SH2, is_slave));
2947 emith_add_r_r_ptr_imm(arg2, CONTEXT_REG, offsetof(SH2, drc_tmp));
2948 emith_call(dr_lookup_block);
2949 emit_block_entry();
2950 // lookup failed, call sh2_translate()
2951 emith_move_r_r_ptr(arg0, CONTEXT_REG);
2952 emith_ctx_read(arg1, offsetof(SH2, drc_tmp)); // tcache_id
2953 emith_call(sh2_translate);
2954 emit_block_entry();
2955 // sh2_translate() failed, flush cache and retry
2956 emith_ctx_read(arg0, offsetof(SH2, drc_tmp));
2957 emith_call(flush_tcache);
2958 emith_move_r_r_ptr(arg0, CONTEXT_REG);
2959 emith_ctx_read(arg1, offsetof(SH2, drc_tmp));
2960 emith_call(sh2_translate);
2961 emit_block_entry();
2962 // XXX: can't translate, fail
2963 emith_call(dr_failure);
2964
2965 // sh2_drc_test_irq(void)
2966 // assumes it's called from main function (may jump to dispatcher)
2967 sh2_drc_test_irq = (void *)tcache_ptr;
2968 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2969 sr = rcache_get_reg(SHR_SR, RC_GR_READ);
2970 emith_lsr(arg0, sr, I_SHIFT);
2971 emith_and_r_imm(arg0, 0x0f);
2972 emith_cmp_r_r(arg1, arg0); // pending_level > ((sr >> 4) & 0x0f)?
2973 EMITH_SJMP_START(DCOND_GT);
2974 emith_ret_c(DCOND_LE); // nope, return
2975 EMITH_SJMP_END(DCOND_GT);
2976 // adjust SP
2977 tmp = rcache_get_reg(SHR_SP, RC_GR_RMW);
2978 emith_sub_r_imm(tmp, 4*2);
2979 rcache_clean();
2980 // push SR
2981 tmp = rcache_get_reg_arg(0, SHR_SP);
2982 emith_add_r_imm(tmp, 4);
2983 tmp = rcache_get_reg_arg(1, SHR_SR);
2984 emith_clear_msb(tmp, tmp, 22);
2985 emith_move_r_r_ptr(arg2, CONTEXT_REG);
2986 emith_call(p32x_sh2_write32); // XXX: use sh2_drc_write32?
2987 rcache_invalidate();
2988 // push PC
2989 rcache_get_reg_arg(0, SHR_SP);
2990 emith_ctx_read(arg1, SHR_PC * 4);
2991 emith_move_r_r_ptr(arg2, CONTEXT_REG);
2992 emith_call(p32x_sh2_write32);
2993 rcache_invalidate();
2994 // update I, cycles, do callback
2995 emith_ctx_read(arg1, offsetof(SH2, pending_level));
2996 sr = rcache_get_reg(SHR_SR, RC_GR_RMW);
2997 emith_bic_r_imm(sr, I);
2998 emith_or_r_r_lsl(sr, arg1, I_SHIFT);
2999 emith_sub_r_imm(sr, 13 << 12); // at least 13 cycles
3000 rcache_flush();
3001 emith_move_r_r_ptr(arg0, CONTEXT_REG);
3002 emith_call_ctx(offsetof(SH2, irq_callback)); // vector = sh2->irq_callback(sh2, level);
3003 // obtain new PC
3004 emith_lsl(arg0, RET_REG, 2);
3005 emith_ctx_read(arg1, SHR_VBR * 4);
3006 emith_add_r_r(arg0, arg1);
3007 tmp = emit_memhandler_read(2);
3008 emith_ctx_write(tmp, SHR_PC * 4);
3009#if defined(__i386__) || defined(__x86_64__)
3010 emith_add_r_r_ptr_imm(xSP, xSP, sizeof(void *)); // fix stack
3011#endif
3012 emith_jump(sh2_drc_dispatcher);
3013 rcache_invalidate();
3014
3015 // sh2_drc_entry(SH2 *sh2)
3016 sh2_drc_entry = (void *)tcache_ptr;
3017 emith_sh2_drc_entry();
3018 emith_move_r_r_ptr(CONTEXT_REG, arg0); // move ctx, arg0
3019 emit_do_static_regs(0, arg2);
3020 emith_call(sh2_drc_test_irq);
3021 emith_jump(sh2_drc_dispatcher);
3022
3023 // sh2_drc_write8(u32 a, u32 d)
3024 sh2_drc_write8 = (void *)tcache_ptr;
3025 emith_ctx_read_ptr(arg2, offsetof(SH2, write8_tab));
3026 emith_sh2_wcall(arg0, arg2);
3027
3028 // sh2_drc_write16(u32 a, u32 d)
3029 sh2_drc_write16 = (void *)tcache_ptr;
3030 emith_ctx_read_ptr(arg2, offsetof(SH2, write16_tab));
3031 emith_sh2_wcall(arg0, arg2);
3032
3033#ifdef PDB_NET
3034 // debug
3035 #define MAKE_READ_WRAPPER(func) { \
3036 void *tmp = (void *)tcache_ptr; \
3037 emith_push_ret(); \
3038 emith_call(func); \
3039 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
3040 emith_addf_r_r(arg2, arg0); \
3041 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
3042 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
3043 emith_adc_r_imm(arg2, 0x01000000); \
3044 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
3045 emith_pop_and_ret(); \
3046 func = tmp; \
3047 }
3048 #define MAKE_WRITE_WRAPPER(func) { \
3049 void *tmp = (void *)tcache_ptr; \
3050 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[0])); \
3051 emith_addf_r_r(arg2, arg1); \
3052 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[0])); \
3053 emith_ctx_read(arg2, offsetof(SH2, pdb_io_csum[1])); \
3054 emith_adc_r_imm(arg2, 0x01000000); \
3055 emith_ctx_write(arg2, offsetof(SH2, pdb_io_csum[1])); \
3056 emith_move_r_r_ptr(arg2, CONTEXT_REG); \
3057 emith_jump(func); \
3058 func = tmp; \
3059 }
3060
3061 MAKE_READ_WRAPPER(sh2_drc_read8);
3062 MAKE_READ_WRAPPER(sh2_drc_read16);
3063 MAKE_READ_WRAPPER(sh2_drc_read32);
3064 MAKE_WRITE_WRAPPER(sh2_drc_write8);
3065 MAKE_WRITE_WRAPPER(sh2_drc_write16);
3066 MAKE_WRITE_WRAPPER(sh2_drc_write32);
3067#if (DRC_DEBUG & 4)
3068 host_dasm_new_symbol(sh2_drc_read8);
3069 host_dasm_new_symbol(sh2_drc_read16);
3070 host_dasm_new_symbol(sh2_drc_read32);
3071 host_dasm_new_symbol(sh2_drc_write32);
3072#endif
3073#endif
3074
3075 rcache_invalidate();
3076#if (DRC_DEBUG & 4)
3077 host_dasm_new_symbol(sh2_drc_entry);
3078 host_dasm_new_symbol(sh2_drc_dispatcher);
3079 host_dasm_new_symbol(sh2_drc_exit);
3080 host_dasm_new_symbol(sh2_drc_test_irq);
3081 host_dasm_new_symbol(sh2_drc_write8);
3082 host_dasm_new_symbol(sh2_drc_write16);
3083#endif
3084}
3085
3086static void sh2_smc_rm_block(struct block_desc *bd, int tcache_id, u32 ram_mask)
3087{
3088 u32 i, addr, end_addr;
3089 void *tmp;
3090
3091 dbg(2, " killing block %08x-%08x-%08x, blkid %d,%d",
3092 bd->addr, bd->addr + bd->size_nolit, bd->addr + bd->size,
3093 tcache_id, bd - block_tables[tcache_id]);
3094 if (bd->addr == 0 || bd->entry_count == 0) {
3095 dbg(1, " killing dead block!? %08x", bd->addr);
3096 return;
3097 }
3098
3099 // remove from inval_lookup
3100 addr = bd->addr & ~(INVAL_PAGE_SIZE - 1);
3101 end_addr = bd->addr + bd->size;
3102 for (; addr < end_addr; addr += INVAL_PAGE_SIZE) {
3103 i = (addr & ram_mask) / INVAL_PAGE_SIZE;
3104 rm_from_block_list(&inval_lookup[tcache_id][i], bd);
3105 }
3106
3107 tmp = tcache_ptr;
3108
3109 // remove from hash table, make incoming links unresolved
3110 // XXX: maybe patch branches w/flush instead?
3111 for (i = 0; i < bd->entry_count; i++) {
3112 rm_from_hashlist(&bd->entryp[i], tcache_id);
3113
3114 // since we never reuse tcache space of dead blocks,
3115 // insert jump to dispatcher for blocks that are linked to this
3116 tcache_ptr = bd->entryp[i].tcache_ptr;
3117 emit_move_r_imm32(SHR_PC, bd->entryp[i].pc);
3118 rcache_flush();
3119 emith_jump(sh2_drc_dispatcher);
3120
3121 host_instructions_updated(bd->entryp[i].tcache_ptr, tcache_ptr);
3122
3123 unregister_links(&bd->entryp[i], tcache_id);
3124 }
3125
3126 tcache_ptr = tmp;
3127
3128 bd->addr = bd->size = bd->size_nolit = 0;
3129 bd->entry_count = 0;
3130}
3131
3132/*
313304205:243: == msh2 block #0,200 060017a8-060017f0 -> 0x27cb9c
3134 060017a8 d11c MOV.L @($70,PC),R1 ; @$0600181c
3135
313604230:261: msh2 xsh w32 [260017a8] d225e304
313704230:261: msh2 smc check @260017a8
313804239:226: = ssh2 enter 060017a8 0x27cb9c, c=173
3139*/
3140static void sh2_smc_rm_blocks(u32 a, u16 *drc_ram_blk, int tcache_id, u32 shift, u32 mask)
3141{
3142 struct block_list **blist = NULL, *entry;
3143 struct block_desc *block;
3144 u32 start_addr, end_addr, taddr, i;
3145 u32 from = ~0, to = 0;
3146
3147 // ignore cache-through
3148 a &= ~0x20000000;
3149
3150 blist = &inval_lookup[tcache_id][(a & mask) / INVAL_PAGE_SIZE];
3151 entry = *blist;
3152 while (entry != NULL) {
3153 block = entry->block;
3154 start_addr = block->addr & ~0x20000000;
3155 end_addr = start_addr + block->size;
3156 if (start_addr <= a && a < end_addr) {
3157 // get addr range that includes all removed blocks
3158 if (from > start_addr)
3159 from = start_addr;
3160 if (to < end_addr)
3161 to = end_addr;
3162
3163 if (a >= start_addr + block->size_nolit)
3164 literal_disabled_frames = 3;
3165 sh2_smc_rm_block(block, tcache_id, mask);
3166
3167 // entry lost, restart search
3168 entry = *blist;
3169 continue;
3170 }
3171 entry = entry->next;
3172 }
3173
3174 if (from >= to)
3175 return;
3176
3177 // update range around a to match latest state
3178 from &= ~(INVAL_PAGE_SIZE - 1);
3179 to |= (INVAL_PAGE_SIZE - 1);
3180 for (taddr = from; taddr < to; taddr += INVAL_PAGE_SIZE) {
3181 i = (taddr & mask) / INVAL_PAGE_SIZE;
3182 entry = inval_lookup[tcache_id][i];
3183
3184 for (; entry != NULL; entry = entry->next) {
3185 block = entry->block;
3186
3187 start_addr = block->addr & ~0x20000000;
3188 if (start_addr > a) {
3189 if (to > start_addr)
3190 to = start_addr;
3191 }
3192 else {
3193 end_addr = start_addr + block->size;
3194 if (from < end_addr)
3195 from = end_addr;
3196 }
3197 }
3198 }
3199
3200 // clear code marks
3201 if (from < to) {
3202 u16 *p = drc_ram_blk + ((from & mask) >> shift);
3203 memset(p, 0, (to - from) >> (shift - 1));
3204 }
3205}
3206
3207void sh2_drc_wcheck_ram(unsigned int a, int val, int cpuid)
3208{
3209 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3210 sh2_smc_rm_blocks(a, Pico32xMem->drcblk_ram, 0, SH2_DRCBLK_RAM_SHIFT, 0x3ffff);
3211}
3212
3213void sh2_drc_wcheck_da(unsigned int a, int val, int cpuid)
3214{
3215 dbg(2, "%csh2 smc check @%08x", cpuid ? 's' : 'm', a);
3216 sh2_smc_rm_blocks(a, Pico32xMem->drcblk_da[cpuid],
3217 1 + cpuid, SH2_DRCBLK_DA_SHIFT, 0xfff);
3218}
3219
3220int sh2_execute_drc(SH2 *sh2c, int cycles)
3221{
3222 int ret_cycles;
3223
3224 // cycles are kept in SHR_SR unused bits (upper 20)
3225 // bit11 contains T saved for delay slot
3226 // others are usual SH2 flags
3227 sh2c->sr &= 0x3f3;
3228 sh2c->sr |= cycles << 12;
3229 sh2_drc_entry(sh2c);
3230
3231 // TODO: irq cycles
3232 ret_cycles = (signed int)sh2c->sr >> 12;
3233 if (ret_cycles > 0)
3234 dbg(1, "warning: drc returned with cycles: %d", ret_cycles);
3235
3236 sh2c->sr &= 0x3f3;
3237 return ret_cycles;
3238}
3239
3240#if (DRC_DEBUG & 2)
3241void block_stats(void)
3242{
3243 int c, b, i, total = 0;
3244
3245 printf("block stats:\n");
3246 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3247 for (i = 0; i < block_counts[b]; i++)
3248 if (block_tables[b][i].addr != 0)
3249 total += block_tables[b][i].refcount;
3250
3251 for (c = 0; c < 10; c++) {
3252 struct block_desc *blk, *maxb = NULL;
3253 int max = 0;
3254 for (b = 0; b < ARRAY_SIZE(block_tables); b++) {
3255 for (i = 0; i < block_counts[b]; i++) {
3256 blk = &block_tables[b][i];
3257 if (blk->addr != 0 && blk->refcount > max) {
3258 max = blk->refcount;
3259 maxb = blk;
3260 }
3261 }
3262 }
3263 if (maxb == NULL)
3264 break;
3265 printf("%08x %9d %2.3f%%\n", maxb->addr, maxb->refcount,
3266 (double)maxb->refcount / total * 100.0);
3267 maxb->refcount = 0;
3268 }
3269
3270 for (b = 0; b < ARRAY_SIZE(block_tables); b++)
3271 for (i = 0; i < block_counts[b]; i++)
3272 block_tables[b][i].refcount = 0;
3273}
3274#else
3275#define block_stats()
3276#endif
3277
3278void sh2_drc_flush_all(void)
3279{
3280 block_stats();
3281 flush_tcache(0);
3282 flush_tcache(1);
3283 flush_tcache(2);
3284}
3285
3286void sh2_drc_mem_setup(SH2 *sh2)
3287{
3288 // fill the convenience pointers
3289 sh2->p_bios = sh2->is_slave ? Pico32xMem->sh2_rom_s.w : Pico32xMem->sh2_rom_m.w;
3290 sh2->p_da = sh2->data_array;
3291 sh2->p_sdram = Pico32xMem->sdram;
3292 sh2->p_rom = Pico.rom;
3293}
3294
3295void sh2_drc_frame(void)
3296{
3297 if (literal_disabled_frames > 0)
3298 literal_disabled_frames--;
3299}
3300
3301int sh2_drc_init(SH2 *sh2)
3302{
3303 int i;
3304
3305 if (block_tables[0] == NULL)
3306 {
3307 for (i = 0; i < TCACHE_BUFFERS; i++) {
3308 block_tables[i] = calloc(block_max_counts[i], sizeof(*block_tables[0]));
3309 if (block_tables[i] == NULL)
3310 goto fail;
3311 // max 2 block links (exits) per block
3312 block_link_pool[i] = calloc(block_link_pool_max_counts[i],
3313 sizeof(*block_link_pool[0]));
3314 if (block_link_pool[i] == NULL)
3315 goto fail;
3316
3317 inval_lookup[i] = calloc(ram_sizes[i] / INVAL_PAGE_SIZE,
3318 sizeof(inval_lookup[0]));
3319 if (inval_lookup[i] == NULL)
3320 goto fail;
3321
3322 hash_tables[i] = calloc(hash_table_sizes[i], sizeof(*hash_tables[0]));
3323 if (hash_tables[i] == NULL)
3324 goto fail;
3325 }
3326 memset(block_counts, 0, sizeof(block_counts));
3327 memset(block_link_pool_counts, 0, sizeof(block_link_pool_counts));
3328
3329 drc_cmn_init();
3330 tcache_ptr = tcache;
3331 sh2_generate_utils();
3332 host_instructions_updated(tcache, tcache_ptr);
3333
3334 tcache_bases[0] = tcache_ptrs[0] = tcache_ptr;
3335 for (i = 1; i < ARRAY_SIZE(tcache_bases); i++)
3336 tcache_bases[i] = tcache_ptrs[i] = tcache_bases[i - 1] + tcache_sizes[i - 1];
3337
3338#if (DRC_DEBUG & 4)
3339 for (i = 0; i < ARRAY_SIZE(block_tables); i++)
3340 tcache_dsm_ptrs[i] = tcache_bases[i];
3341 // disasm the utils
3342 tcache_dsm_ptrs[0] = tcache;
3343 do_host_disasm(0);
3344#endif
3345#if (DRC_DEBUG & 1)
3346 hash_collisions = 0;
3347#endif
3348 }
3349
3350 return 0;
3351
3352fail:
3353 sh2_drc_finish(sh2);
3354 return -1;
3355}
3356
3357void sh2_drc_finish(SH2 *sh2)
3358{
3359 int i;
3360
3361 if (block_tables[0] == NULL)
3362 return;
3363
3364 sh2_drc_flush_all();
3365
3366 for (i = 0; i < TCACHE_BUFFERS; i++) {
3367#if (DRC_DEBUG & 4)
3368 printf("~~~ tcache %d\n", i);
3369 tcache_dsm_ptrs[i] = tcache_bases[i];
3370 tcache_ptr = tcache_ptrs[i];
3371 do_host_disasm(i);
3372#endif
3373
3374 if (block_tables[i] != NULL)
3375 free(block_tables[i]);
3376 block_tables[i] = NULL;
3377 if (block_link_pool[i] == NULL)
3378 free(block_link_pool[i]);
3379 block_link_pool[i] = NULL;
3380
3381 if (inval_lookup[i] == NULL)
3382 free(inval_lookup[i]);
3383 inval_lookup[i] = NULL;
3384
3385 if (hash_tables[i] != NULL) {
3386 free(hash_tables[i]);
3387 hash_tables[i] = NULL;
3388 }
3389 }
3390
3391 drc_cmn_cleanup();
3392}
3393
3394#endif /* DRC_SH2 */
3395
3396static void *dr_get_pc_base(u32 pc, int is_slave)
3397{
3398 void *ret = NULL;
3399 u32 mask = 0;
3400
3401 if ((pc & ~0x7ff) == 0) {
3402 // BIOS
3403 ret = is_slave ? Pico32xMem->sh2_rom_s.w : Pico32xMem->sh2_rom_m.w;
3404 mask = 0x7ff;
3405 }
3406 else if ((pc & 0xfffff000) == 0xc0000000) {
3407 // data array
3408 ret = sh2s[is_slave].data_array;
3409 mask = 0xfff;
3410 }
3411 else if ((pc & 0xc6000000) == 0x06000000) {
3412 // SDRAM
3413 ret = Pico32xMem->sdram;
3414 mask = 0x03ffff;
3415 }
3416 else if ((pc & 0xc6000000) == 0x02000000) {
3417 // ROM
3418 if ((pc & 0x3fffff) < Pico.romsize)
3419 ret = Pico.rom;
3420 mask = 0x3fffff;
3421 }
3422
3423 if (ret == NULL)
3424 return (void *)-1; // NULL is valid value
3425
3426 return (char *)ret - (pc & ~mask);
3427}
3428
3429void scan_block(u32 base_pc, int is_slave, u8 *op_flags, u32 *end_pc_out,
3430 u32 *end_literals_out)
3431{
3432 u16 *dr_pc_base;
3433 u32 pc, op, tmp;
3434 u32 end_pc, end_literals = 0;
3435 u32 lowest_mova = 0;
3436 struct op_data *opd;
3437 int next_is_delay = 0;
3438 int end_block = 0;
3439 int i, i_end;
3440
3441 memset(op_flags, 0, BLOCK_INSN_LIMIT);
3442
3443 dr_pc_base = dr_get_pc_base(base_pc, is_slave);
3444
3445 // 1st pass: disassemble
3446 for (i = 0, pc = base_pc; ; i++, pc += 2) {
3447 // we need an ops[] entry after the last one initialized,
3448 // so do it before end_block checks
3449 opd = &ops[i];
3450 opd->op = OP_UNHANDLED;
3451 opd->rm = -1;
3452 opd->source = opd->dest = 0;
3453 opd->cycles = 1;
3454 opd->imm = 0;
3455
3456 if (next_is_delay) {
3457 op_flags[i] |= OF_DELAY_OP;
3458 next_is_delay = 0;
3459 }
3460 else if (end_block || i >= BLOCK_INSN_LIMIT - 2)
3461 break;
3462
3463 op = FETCH_OP(pc);
3464 switch ((op & 0xf000) >> 12)
3465 {
3466 /////////////////////////////////////////////
3467 case 0x00:
3468 switch (op & 0x0f)
3469 {
3470 case 0x02:
3471 switch (GET_Fx())
3472 {
3473 case 0: // STC SR,Rn 0000nnnn00000010
3474 tmp = SHR_SR;
3475 break;
3476 case 1: // STC GBR,Rn 0000nnnn00010010
3477 tmp = SHR_GBR;
3478 break;
3479 case 2: // STC VBR,Rn 0000nnnn00100010
3480 tmp = SHR_VBR;
3481 break;
3482 default:
3483 goto undefined;
3484 }
3485 opd->op = OP_MOVE;
3486 opd->source = BITMASK1(tmp);
3487 opd->dest = BITMASK1(GET_Rn());
3488 break;
3489 case 0x03:
3490 CHECK_UNHANDLED_BITS(0xd0, undefined);
3491 // BRAF Rm 0000mmmm00100011
3492 // BSRF Rm 0000mmmm00000011
3493 opd->op = OP_BRANCH_RF;
3494 opd->rm = GET_Rn();
3495 opd->source = BITMASK1(opd->rm);
3496 opd->dest = BITMASK1(SHR_PC);
3497 if (!(op & 0x20))
3498 opd->dest |= BITMASK1(SHR_PR);
3499 opd->cycles = 2;
3500 next_is_delay = 1;
3501 end_block = 1;
3502 break;
3503 case 0x04: // MOV.B Rm,@(R0,Rn) 0000nnnnmmmm0100
3504 case 0x05: // MOV.W Rm,@(R0,Rn) 0000nnnnmmmm0101
3505 case 0x06: // MOV.L Rm,@(R0,Rn) 0000nnnnmmmm0110
3506 opd->source = BITMASK3(GET_Rm(), SHR_R0, GET_Rn());
3507 break;
3508 case 0x07:
3509 // MUL.L Rm,Rn 0000nnnnmmmm0111
3510 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3511 opd->dest = BITMASK1(SHR_MACL);
3512 opd->cycles = 2;
3513 break;
3514 case 0x08:
3515 CHECK_UNHANDLED_BITS(0xf00, undefined);
3516 switch (GET_Fx())
3517 {
3518 case 0: // CLRT 0000000000001000
3519 opd->op = OP_SETCLRT;
3520 opd->dest = BITMASK1(SHR_T);
3521 opd->imm = 0;
3522 break;
3523 case 1: // SETT 0000000000011000
3524 opd->op = OP_SETCLRT;
3525 opd->dest = BITMASK1(SHR_T);
3526 opd->imm = 1;
3527 break;
3528 case 2: // CLRMAC 0000000000101000
3529 opd->dest = BITMASK3(SHR_T, SHR_MACL, SHR_MACH);
3530 break;
3531 default:
3532 goto undefined;
3533 }
3534 break;
3535 case 0x09:
3536 switch (GET_Fx())
3537 {
3538 case 0: // NOP 0000000000001001
3539 CHECK_UNHANDLED_BITS(0xf00, undefined);
3540 break;
3541 case 1: // DIV0U 0000000000011001
3542 CHECK_UNHANDLED_BITS(0xf00, undefined);
3543 opd->dest = BITMASK2(SHR_SR, SHR_T);
3544 break;
3545 case 2: // MOVT Rn 0000nnnn00101001
3546 opd->source = BITMASK1(SHR_T);
3547 opd->dest = BITMASK1(GET_Rn());
3548 break;
3549 default:
3550 goto undefined;
3551 }
3552 break;
3553 case 0x0a:
3554 switch (GET_Fx())
3555 {
3556 case 0: // STS MACH,Rn 0000nnnn00001010
3557 tmp = SHR_MACH;
3558 break;
3559 case 1: // STS MACL,Rn 0000nnnn00011010
3560 tmp = SHR_MACL;
3561 break;
3562 case 2: // STS PR,Rn 0000nnnn00101010
3563 tmp = SHR_PR;
3564 break;
3565 default:
3566 goto undefined;
3567 }
3568 opd->op = OP_MOVE;
3569 opd->source = BITMASK1(tmp);
3570 opd->dest = BITMASK1(GET_Rn());
3571 break;
3572 case 0x0b:
3573 CHECK_UNHANDLED_BITS(0xf00, undefined);
3574 switch (GET_Fx())
3575 {
3576 case 0: // RTS 0000000000001011
3577 opd->op = OP_BRANCH_R;
3578 opd->rm = SHR_PR;
3579 opd->source = BITMASK1(opd->rm);
3580 opd->dest = BITMASK1(SHR_PC);
3581 opd->cycles = 2;
3582 next_is_delay = 1;
3583 end_block = 1;
3584 break;
3585 case 1: // SLEEP 0000000000011011
3586 opd->op = OP_SLEEP;
3587 end_block = 1;
3588 break;
3589 case 2: // RTE 0000000000101011
3590 opd->op = OP_RTE;
3591 opd->source = BITMASK1(SHR_SP);
3592 opd->dest = BITMASK2(SHR_SR, SHR_PC);
3593 opd->cycles = 4;
3594 next_is_delay = 1;
3595 end_block = 1;
3596 break;
3597 default:
3598 goto undefined;
3599 }
3600 break;
3601 case 0x0c: // MOV.B @(R0,Rm),Rn 0000nnnnmmmm1100
3602 case 0x0d: // MOV.W @(R0,Rm),Rn 0000nnnnmmmm1101
3603 case 0x0e: // MOV.L @(R0,Rm),Rn 0000nnnnmmmm1110
3604 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3605 opd->dest = BITMASK1(GET_Rn());
3606 break;
3607 case 0x0f: // MAC.L @Rm+,@Rn+ 0000nnnnmmmm1111
3608 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3609 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3610 opd->cycles = 3;
3611 break;
3612 default:
3613 goto undefined;
3614 }
3615 break;
3616
3617 /////////////////////////////////////////////
3618 case 0x01:
3619 // MOV.L Rm,@(disp,Rn) 0001nnnnmmmmdddd
3620 opd->source = BITMASK1(GET_Rm());
3621 opd->source = BITMASK1(GET_Rn());
3622 opd->imm = (op & 0x0f) * 4;
3623 break;
3624
3625 /////////////////////////////////////////////
3626 case 0x02:
3627 switch (op & 0x0f)
3628 {
3629 case 0x00: // MOV.B Rm,@Rn 0010nnnnmmmm0000
3630 case 0x01: // MOV.W Rm,@Rn 0010nnnnmmmm0001
3631 case 0x02: // MOV.L Rm,@Rn 0010nnnnmmmm0010
3632 opd->source = BITMASK1(GET_Rm());
3633 opd->source = BITMASK1(GET_Rn());
3634 break;
3635 case 0x04: // MOV.B Rm,@-Rn 0010nnnnmmmm0100
3636 case 0x05: // MOV.W Rm,@-Rn 0010nnnnmmmm0101
3637 case 0x06: // MOV.L Rm,@-Rn 0010nnnnmmmm0110
3638 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3639 opd->dest = BITMASK1(GET_Rn());
3640 break;
3641 case 0x07: // DIV0S Rm,Rn 0010nnnnmmmm0111
3642 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3643 opd->dest = BITMASK1(SHR_SR);
3644 break;
3645 case 0x08: // TST Rm,Rn 0010nnnnmmmm1000
3646 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3647 opd->dest = BITMASK1(SHR_T);
3648 break;
3649 case 0x09: // AND Rm,Rn 0010nnnnmmmm1001
3650 case 0x0a: // XOR Rm,Rn 0010nnnnmmmm1010
3651 case 0x0b: // OR Rm,Rn 0010nnnnmmmm1011
3652 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3653 opd->dest = BITMASK1(GET_Rn());
3654 break;
3655 case 0x0c: // CMP/STR Rm,Rn 0010nnnnmmmm1100
3656 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3657 opd->dest = BITMASK1(SHR_T);
3658 break;
3659 case 0x0d: // XTRCT Rm,Rn 0010nnnnmmmm1101
3660 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3661 opd->dest = BITMASK1(GET_Rn());
3662 break;
3663 case 0x0e: // MULU.W Rm,Rn 0010nnnnmmmm1110
3664 case 0x0f: // MULS.W Rm,Rn 0010nnnnmmmm1111
3665 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3666 opd->dest = BITMASK1(SHR_MACL);
3667 break;
3668 default:
3669 goto undefined;
3670 }
3671 break;
3672
3673 /////////////////////////////////////////////
3674 case 0x03:
3675 switch (op & 0x0f)
3676 {
3677 case 0x00: // CMP/EQ Rm,Rn 0011nnnnmmmm0000
3678 case 0x02: // CMP/HS Rm,Rn 0011nnnnmmmm0010
3679 case 0x03: // CMP/GE Rm,Rn 0011nnnnmmmm0011
3680 case 0x06: // CMP/HI Rm,Rn 0011nnnnmmmm0110
3681 case 0x07: // CMP/GT Rm,Rn 0011nnnnmmmm0111
3682 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3683 opd->dest = BITMASK1(SHR_T);
3684 break;
3685 case 0x04: // DIV1 Rm,Rn 0011nnnnmmmm0100
3686 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_SR);
3687 opd->dest = BITMASK2(GET_Rn(), SHR_SR);
3688 break;
3689 case 0x05: // DMULU.L Rm,Rn 0011nnnnmmmm0101
3690 case 0x0d: // DMULS.L Rm,Rn 0011nnnnmmmm1101
3691 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3692 opd->dest = BITMASK2(SHR_MACL, SHR_MACH);
3693 opd->cycles = 2;
3694 break;
3695 case 0x08: // SUB Rm,Rn 0011nnnnmmmm1000
3696 case 0x0c: // ADD Rm,Rn 0011nnnnmmmm1100
3697 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3698 opd->dest = BITMASK1(GET_Rn());
3699 break;
3700 case 0x0a: // SUBC Rm,Rn 0011nnnnmmmm1010
3701 case 0x0e: // ADDC Rm,Rn 0011nnnnmmmm1110
3702 opd->source = BITMASK3(GET_Rm(), GET_Rn(), SHR_T);
3703 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3704 break;
3705 case 0x0b: // SUBV Rm,Rn 0011nnnnmmmm1011
3706 case 0x0f: // ADDV Rm,Rn 0011nnnnmmmm1111
3707 opd->source = BITMASK2(GET_Rm(), GET_Rn());
3708 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3709 break;
3710 default:
3711 goto undefined;
3712 }
3713 break;
3714
3715 /////////////////////////////////////////////
3716 case 0x04:
3717 switch (op & 0x0f)
3718 {
3719 case 0x00:
3720 switch (GET_Fx())
3721 {
3722 case 0: // SHLL Rn 0100nnnn00000000
3723 case 2: // SHAL Rn 0100nnnn00100000
3724 opd->source = BITMASK1(GET_Rn());
3725 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3726 break;
3727 case 1: // DT Rn 0100nnnn00010000
3728 opd->source = BITMASK1(GET_Rn());
3729 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3730 break;
3731 default:
3732 goto undefined;
3733 }
3734 break;
3735 case 0x01:
3736 switch (GET_Fx())
3737 {
3738 case 0: // SHLR Rn 0100nnnn00000001
3739 case 2: // SHAR Rn 0100nnnn00100001
3740 opd->source = BITMASK1(GET_Rn());
3741 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3742 break;
3743 case 1: // CMP/PZ Rn 0100nnnn00010001
3744 opd->source = BITMASK1(GET_Rn());
3745 opd->dest = BITMASK1(SHR_T);
3746 break;
3747 default:
3748 goto undefined;
3749 }
3750 break;
3751 case 0x02:
3752 case 0x03:
3753 switch (op & 0x3f)
3754 {
3755 case 0x02: // STS.L MACH,@-Rn 0100nnnn00000010
3756 tmp = SHR_MACH;
3757 break;
3758 case 0x12: // STS.L MACL,@-Rn 0100nnnn00010010
3759 tmp = SHR_MACL;
3760 break;
3761 case 0x22: // STS.L PR,@-Rn 0100nnnn00100010
3762 tmp = SHR_PR;
3763 break;
3764 case 0x03: // STC.L SR,@-Rn 0100nnnn00000011
3765 tmp = SHR_SR;
3766 opd->cycles = 2;
3767 break;
3768 case 0x13: // STC.L GBR,@-Rn 0100nnnn00010011
3769 tmp = SHR_GBR;
3770 opd->cycles = 2;
3771 break;
3772 case 0x23: // STC.L VBR,@-Rn 0100nnnn00100011
3773 tmp = SHR_VBR;
3774 opd->cycles = 2;
3775 break;
3776 default:
3777 goto undefined;
3778 }
3779 opd->source = BITMASK2(GET_Rn(), tmp);
3780 opd->dest = BITMASK1(GET_Rn());
3781 break;
3782 case 0x04:
3783 case 0x05:
3784 switch (op & 0x3f)
3785 {
3786 case 0x04: // ROTL Rn 0100nnnn00000100
3787 case 0x05: // ROTR Rn 0100nnnn00000101
3788 opd->source = BITMASK1(GET_Rn());
3789 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3790 break;
3791 case 0x24: // ROTCL Rn 0100nnnn00100100
3792 case 0x25: // ROTCR Rn 0100nnnn00100101
3793 opd->source = BITMASK2(GET_Rn(), SHR_T);
3794 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3795 break;
3796 case 0x15: // CMP/PL Rn 0100nnnn00010101
3797 opd->source = BITMASK1(GET_Rn());
3798 opd->dest = BITMASK1(SHR_T);
3799 break;
3800 default:
3801 goto undefined;
3802 }
3803 break;
3804 case 0x06:
3805 case 0x07:
3806 switch (op & 0x3f)
3807 {
3808 case 0x06: // LDS.L @Rm+,MACH 0100mmmm00000110
3809 tmp = SHR_MACH;
3810 break;
3811 case 0x16: // LDS.L @Rm+,MACL 0100mmmm00010110
3812 tmp = SHR_MACL;
3813 break;
3814 case 0x26: // LDS.L @Rm+,PR 0100mmmm00100110
3815 tmp = SHR_PR;
3816 break;
3817 case 0x07: // LDC.L @Rm+,SR 0100mmmm00000111
3818 tmp = SHR_SR;
3819 opd->cycles = 3;
3820 break;
3821 case 0x17: // LDC.L @Rm+,GBR 0100mmmm00010111
3822 tmp = SHR_GBR;
3823 opd->cycles = 3;
3824 break;
3825 case 0x27: // LDC.L @Rm+,VBR 0100mmmm00100111
3826 tmp = SHR_VBR;
3827 opd->cycles = 3;
3828 break;
3829 default:
3830 goto undefined;
3831 }
3832 opd->source = BITMASK1(GET_Rn());
3833 opd->dest = BITMASK2(GET_Rn(), tmp);
3834 break;
3835 case 0x08:
3836 case 0x09:
3837 switch (GET_Fx())
3838 {
3839 case 0:
3840 // SHLL2 Rn 0100nnnn00001000
3841 // SHLR2 Rn 0100nnnn00001001
3842 break;
3843 case 1:
3844 // SHLL8 Rn 0100nnnn00011000
3845 // SHLR8 Rn 0100nnnn00011001
3846 break;
3847 case 2:
3848 // SHLL16 Rn 0100nnnn00101000
3849 // SHLR16 Rn 0100nnnn00101001
3850 break;
3851 default:
3852 goto undefined;
3853 }
3854 opd->source = BITMASK1(GET_Rn());
3855 opd->dest = BITMASK1(GET_Rn());
3856 break;
3857 case 0x0a:
3858 switch (GET_Fx())
3859 {
3860 case 0: // LDS Rm,MACH 0100mmmm00001010
3861 tmp = SHR_MACH;
3862 break;
3863 case 1: // LDS Rm,MACL 0100mmmm00011010
3864 tmp = SHR_MACL;
3865 break;
3866 case 2: // LDS Rm,PR 0100mmmm00101010
3867 tmp = SHR_PR;
3868 break;
3869 default:
3870 goto undefined;
3871 }
3872 opd->op = OP_MOVE;
3873 opd->source = BITMASK1(GET_Rn());
3874 opd->dest = BITMASK1(tmp);
3875 break;
3876 case 0x0b:
3877 switch (GET_Fx())
3878 {
3879 case 0: // JSR @Rm 0100mmmm00001011
3880 opd->dest = BITMASK1(SHR_PR);
3881 case 2: // JMP @Rm 0100mmmm00101011
3882 opd->op = OP_BRANCH_R;
3883 opd->rm = GET_Rn();
3884 opd->source = BITMASK1(opd->rm);
3885 opd->dest |= BITMASK1(SHR_PC);
3886 opd->cycles = 2;
3887 next_is_delay = 1;
3888 end_block = 1;
3889 break;
3890 case 1: // TAS.B @Rn 0100nnnn00011011
3891 opd->source = BITMASK1(GET_Rn());
3892 opd->dest = BITMASK1(SHR_T);
3893 opd->cycles = 4;
3894 break;
3895 default:
3896 goto undefined;
3897 }
3898 break;
3899 case 0x0e:
3900 switch (GET_Fx())
3901 {
3902 case 0: // LDC Rm,SR 0100mmmm00001110
3903 tmp = SHR_SR;
3904 break;
3905 case 1: // LDC Rm,GBR 0100mmmm00011110
3906 tmp = SHR_GBR;
3907 break;
3908 case 2: // LDC Rm,VBR 0100mmmm00101110
3909 tmp = SHR_VBR;
3910 break;
3911 default:
3912 goto undefined;
3913 }
3914 opd->op = OP_MOVE;
3915 opd->source = BITMASK1(GET_Rn());
3916 opd->dest = BITMASK1(tmp);
3917 break;
3918 case 0x0f:
3919 // MAC.W @Rm+,@Rn+ 0100nnnnmmmm1111
3920 opd->source = BITMASK5(GET_Rm(), GET_Rn(), SHR_SR, SHR_MACL, SHR_MACH);
3921 opd->dest = BITMASK4(GET_Rm(), GET_Rn(), SHR_MACL, SHR_MACH);
3922 opd->cycles = 3;
3923 break;
3924 default:
3925 goto undefined;
3926 }
3927 break;
3928
3929 /////////////////////////////////////////////
3930 case 0x05:
3931 // MOV.L @(disp,Rm),Rn 0101nnnnmmmmdddd
3932 opd->source = BITMASK1(GET_Rm());
3933 opd->dest = BITMASK1(GET_Rn());
3934 opd->imm = (op & 0x0f) * 4;
3935 break;
3936
3937 /////////////////////////////////////////////
3938 case 0x06:
3939 switch (op & 0x0f)
3940 {
3941 case 0x04: // MOV.B @Rm+,Rn 0110nnnnmmmm0100
3942 case 0x05: // MOV.W @Rm+,Rn 0110nnnnmmmm0101
3943 case 0x06: // MOV.L @Rm+,Rn 0110nnnnmmmm0110
3944 opd->dest = BITMASK1(GET_Rm());
3945 case 0x00: // MOV.B @Rm,Rn 0110nnnnmmmm0000
3946 case 0x01: // MOV.W @Rm,Rn 0110nnnnmmmm0001
3947 case 0x02: // MOV.L @Rm,Rn 0110nnnnmmmm0010
3948 opd->source = BITMASK1(GET_Rm());
3949 opd->dest |= BITMASK1(GET_Rn());
3950 break;
3951 case 0x0a: // NEGC Rm,Rn 0110nnnnmmmm1010
3952 opd->source = BITMASK2(GET_Rm(), SHR_T);
3953 opd->dest = BITMASK2(GET_Rn(), SHR_T);
3954 break;
3955 case 0x03: // MOV Rm,Rn 0110nnnnmmmm0011
3956 opd->op = OP_MOVE;
3957 goto arith_rmrn;
3958 case 0x07: // NOT Rm,Rn 0110nnnnmmmm0111
3959 case 0x08: // SWAP.B Rm,Rn 0110nnnnmmmm1000
3960 case 0x09: // SWAP.W Rm,Rn 0110nnnnmmmm1001
3961 case 0x0b: // NEG Rm,Rn 0110nnnnmmmm1011
3962 case 0x0c: // EXTU.B Rm,Rn 0110nnnnmmmm1100
3963 case 0x0d: // EXTU.W Rm,Rn 0110nnnnmmmm1101
3964 case 0x0e: // EXTS.B Rm,Rn 0110nnnnmmmm1110
3965 case 0x0f: // EXTS.W Rm,Rn 0110nnnnmmmm1111
3966 arith_rmrn:
3967 opd->source = BITMASK1(GET_Rm());
3968 opd->dest = BITMASK1(GET_Rn());
3969 break;
3970 }
3971 break;
3972
3973 /////////////////////////////////////////////
3974 case 0x07:
3975 // ADD #imm,Rn 0111nnnniiiiiiii
3976 opd->source = opd->dest = BITMASK1(GET_Rn());
3977 opd->imm = (int)(signed char)op;
3978 break;
3979
3980 /////////////////////////////////////////////
3981 case 0x08:
3982 switch (op & 0x0f00)
3983 {
3984 case 0x0000: // MOV.B R0,@(disp,Rn) 10000000nnnndddd
3985 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3986 opd->imm = (op & 0x0f);
3987 break;
3988 case 0x0100: // MOV.W R0,@(disp,Rn) 10000001nnnndddd
3989 opd->source = BITMASK2(GET_Rm(), SHR_R0);
3990 opd->imm = (op & 0x0f) * 2;
3991 break;
3992 case 0x0400: // MOV.B @(disp,Rm),R0 10000100mmmmdddd
3993 opd->source = BITMASK1(GET_Rm());
3994 opd->dest = BITMASK1(SHR_R0);
3995 opd->imm = (op & 0x0f);
3996 break;
3997 case 0x0500: // MOV.W @(disp,Rm),R0 10000101mmmmdddd
3998 opd->source = BITMASK1(GET_Rm());
3999 opd->dest = BITMASK1(SHR_R0);
4000 opd->imm = (op & 0x0f) * 2;
4001 break;
4002 case 0x0800: // CMP/EQ #imm,R0 10001000iiiiiiii
4003 opd->source = BITMASK1(SHR_R0);
4004 opd->dest = BITMASK1(SHR_T);
4005 opd->imm = (int)(signed char)op;
4006 break;
4007 case 0x0d00: // BT/S label 10001101dddddddd
4008 case 0x0f00: // BF/S label 10001111dddddddd
4009 next_is_delay = 1;
4010 // fallthrough
4011 case 0x0900: // BT label 10001001dddddddd
4012 case 0x0b00: // BF label 10001011dddddddd
4013 opd->op = (op & 0x0200) ? OP_BRANCH_CF : OP_BRANCH_CT;
4014 opd->source = BITMASK1(SHR_T);
4015 opd->dest = BITMASK1(SHR_PC);
4016 opd->imm = ((signed int)(op << 24) >> 23);
4017 opd->imm += pc + 4;
4018 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
4019 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
4020 break;
4021 default:
4022 goto undefined;
4023 }
4024 break;
4025
4026 /////////////////////////////////////////////
4027 case 0x09:
4028 // MOV.W @(disp,PC),Rn 1001nnnndddddddd
4029 opd->op = OP_LOAD_POOL;
4030 tmp = pc + 2;
4031 if (op_flags[i] & OF_DELAY_OP) {
4032 if (ops[i-1].op == OP_BRANCH)
4033 tmp = ops[i-1].imm;
4034 else
4035 tmp = 0;
4036 }
4037 opd->source = BITMASK1(SHR_PC);
4038 opd->dest = BITMASK1(GET_Rn());
4039 if (tmp)
4040 opd->imm = tmp + 2 + (op & 0xff) * 2;
4041 opd->size = 1;
4042 break;
4043
4044 /////////////////////////////////////////////
4045 case 0x0b:
4046 // BSR label 1011dddddddddddd
4047 opd->dest = BITMASK1(SHR_PR);
4048 case 0x0a:
4049 // BRA label 1010dddddddddddd
4050 opd->op = OP_BRANCH;
4051 opd->dest |= BITMASK1(SHR_PC);
4052 opd->imm = ((signed int)(op << 20) >> 19);
4053 opd->imm += pc + 4;
4054 opd->cycles = 2;
4055 next_is_delay = 1;
4056 end_block = 1;
4057 if (base_pc <= opd->imm && opd->imm < base_pc + BLOCK_INSN_LIMIT * 2)
4058 op_flags[(opd->imm - base_pc) / 2] |= OF_BTARGET;
4059 break;
4060
4061 /////////////////////////////////////////////
4062 case 0x0c:
4063 switch (op & 0x0f00)
4064 {
4065 case 0x0000: // MOV.B R0,@(disp,GBR) 11000000dddddddd
4066 case 0x0100: // MOV.W R0,@(disp,GBR) 11000001dddddddd
4067 case 0x0200: // MOV.L R0,@(disp,GBR) 11000010dddddddd
4068 opd->source = BITMASK2(SHR_GBR, SHR_R0);
4069 opd->size = (op & 0x300) >> 8;
4070 opd->imm = (op & 0xff) << opd->size;
4071 break;
4072 case 0x0400: // MOV.B @(disp,GBR),R0 11000100dddddddd
4073 case 0x0500: // MOV.W @(disp,GBR),R0 11000101dddddddd
4074 case 0x0600: // MOV.L @(disp,GBR),R0 11000110dddddddd
4075 opd->source = BITMASK1(SHR_GBR);
4076 opd->dest = BITMASK1(SHR_R0);
4077 opd->size = (op & 0x300) >> 8;
4078 opd->imm = (op & 0xff) << opd->size;
4079 break;
4080 case 0x0300: // TRAPA #imm 11000011iiiiiiii
4081 opd->source = BITMASK2(SHR_PC, SHR_SR);
4082 opd->dest = BITMASK1(SHR_PC);
4083 opd->imm = (op & 0xff) * 4;
4084 opd->cycles = 8;
4085 end_block = 1; // FIXME
4086 break;
4087 case 0x0700: // MOVA @(disp,PC),R0 11000111dddddddd
4088 opd->op = OP_MOVA;
4089 tmp = pc + 2;
4090 if (op_flags[i] & OF_DELAY_OP) {
4091 if (ops[i-1].op == OP_BRANCH)
4092 tmp = ops[i-1].imm;
4093 else
4094 tmp = 0;
4095 }
4096 opd->dest = BITMASK1(SHR_R0);
4097 if (tmp) {
4098 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
4099 if (opd->imm >= base_pc) {
4100 if (lowest_mova == 0 || opd->imm < lowest_mova)
4101 lowest_mova = opd->imm;
4102 }
4103 }
4104 break;
4105 case 0x0800: // TST #imm,R0 11001000iiiiiiii
4106 opd->source = BITMASK1(SHR_R0);
4107 opd->dest = BITMASK1(SHR_T);
4108 opd->imm = op & 0xff;
4109 break;
4110 case 0x0900: // AND #imm,R0 11001001iiiiiiii
4111 opd->source = opd->dest = BITMASK1(SHR_R0);
4112 opd->imm = op & 0xff;
4113 break;
4114 case 0x0a00: // XOR #imm,R0 11001010iiiiiiii
4115 opd->source = opd->dest = BITMASK1(SHR_R0);
4116 opd->imm = op & 0xff;
4117 break;
4118 case 0x0b00: // OR #imm,R0 11001011iiiiiiii
4119 opd->source = opd->dest = BITMASK1(SHR_R0);
4120 opd->imm = op & 0xff;
4121 break;
4122 case 0x0c00: // TST.B #imm,@(R0,GBR) 11001100iiiiiiii
4123 opd->source = BITMASK2(SHR_GBR, SHR_R0);
4124 opd->dest = BITMASK1(SHR_T);
4125 opd->imm = op & 0xff;
4126 opd->cycles = 3;
4127 break;
4128 case 0x0d00: // AND.B #imm,@(R0,GBR) 11001101iiiiiiii
4129 case 0x0e00: // XOR.B #imm,@(R0,GBR) 11001110iiiiiiii
4130 case 0x0f00: // OR.B #imm,@(R0,GBR) 11001111iiiiiiii
4131 opd->source = BITMASK2(SHR_GBR, SHR_R0);
4132 opd->imm = op & 0xff;
4133 opd->cycles = 3;
4134 break;
4135 default:
4136 goto undefined;
4137 }
4138 break;
4139
4140 /////////////////////////////////////////////
4141 case 0x0d:
4142 // MOV.L @(disp,PC),Rn 1101nnnndddddddd
4143 opd->op = OP_LOAD_POOL;
4144 tmp = pc + 2;
4145 if (op_flags[i] & OF_DELAY_OP) {
4146 if (ops[i-1].op == OP_BRANCH)
4147 tmp = ops[i-1].imm;
4148 else
4149 tmp = 0;
4150 }
4151 opd->source = BITMASK1(SHR_PC);
4152 opd->dest = BITMASK1(GET_Rn());
4153 if (tmp)
4154 opd->imm = (tmp + 2 + (op & 0xff) * 4) & ~3;
4155 opd->size = 2;
4156 break;
4157
4158 /////////////////////////////////////////////
4159 case 0x0e:
4160 // MOV #imm,Rn 1110nnnniiiiiiii
4161 opd->dest = BITMASK1(GET_Rn());
4162 opd->imm = (u32)(signed int)(signed char)op;
4163 break;
4164
4165 default:
4166 undefined:
4167 elprintf(EL_ANOMALY, "%csh2 drc: unhandled op %04x @ %08x",
4168 is_slave ? 's' : 'm', op, pc);
4169 break;
4170 }
4171
4172 if (op_flags[i] & OF_DELAY_OP) {
4173 switch (opd->op) {
4174 case OP_BRANCH:
4175 case OP_BRANCH_CT:
4176 case OP_BRANCH_CF:
4177 case OP_BRANCH_R:
4178 case OP_BRANCH_RF:
4179 elprintf(EL_ANOMALY, "%csh2 drc: branch in DS @ %08x",
4180 is_slave ? 's' : 'm', pc);
4181 opd->op = OP_UNHANDLED;
4182 op_flags[i] |= OF_B_IN_DS;
4183 next_is_delay = 0;
4184 break;
4185 }
4186 }
4187 }
4188 i_end = i;
4189 end_pc = pc;
4190
4191 // 2nd pass: some analysis
4192 for (i = 0; i < i_end; i++) {
4193 opd = &ops[i];
4194
4195 // propagate T (TODO: DIV0U)
4196 if ((opd->op == OP_SETCLRT && !opd->imm) || opd->op == OP_BRANCH_CT)
4197 op_flags[i + 1] |= OF_T_CLEAR;
4198 else if ((opd->op == OP_SETCLRT && opd->imm) || opd->op == OP_BRANCH_CF)
4199 op_flags[i + 1] |= OF_T_SET;
4200
4201 if ((op_flags[i] & OF_BTARGET) || (opd->dest & BITMASK1(SHR_T)))
4202 op_flags[i] &= ~(OF_T_SET | OF_T_CLEAR);
4203 else
4204 op_flags[i + 1] |= op_flags[i] & (OF_T_SET | OF_T_CLEAR);
4205
4206 if ((opd->op == OP_BRANCH_CT && (op_flags[i] & OF_T_SET))
4207 || (opd->op == OP_BRANCH_CF && (op_flags[i] & OF_T_CLEAR)))
4208 {
4209 opd->op = OP_BRANCH;
4210 opd->cycles = 3;
4211 i_end = i + 1;
4212 if (op_flags[i + 1] & OF_DELAY_OP) {
4213 opd->cycles = 2;
4214 i_end++;
4215 }
4216 }
4217 else if (opd->op == OP_LOAD_POOL)
4218 {
4219 if (opd->imm < end_pc + MAX_LITERAL_OFFSET) {
4220 if (end_literals < opd->imm + opd->size * 2)
4221 end_literals = opd->imm + opd->size * 2;
4222 }
4223 }
4224 }
4225 end_pc = base_pc + i_end * 2;
4226 if (end_literals < end_pc)
4227 end_literals = end_pc;
4228
4229 // end_literals is used to decide to inline a literal or not
4230 // XXX: need better detection if this actually is used in write
4231 if (lowest_mova >= base_pc) {
4232 if (lowest_mova < end_literals) {
4233 dbg(1, "mova for %08x, block %08x", lowest_mova, base_pc);
4234 end_literals = end_pc;
4235 }
4236 if (lowest_mova < end_pc) {
4237 dbg(1, "warning: mova inside of blk for %08x, block %08x",
4238 lowest_mova, base_pc);
4239 end_literals = end_pc;
4240 }
4241 }
4242
4243 *end_pc_out = end_pc;
4244 if (end_literals_out != NULL)
4245 *end_literals_out = end_literals;
4246}
4247
4248// vim:shiftwidth=2:ts=2:expandtab