psxcounters: try to support a dynarec with a very long timeslice
[pcsx_rearmed.git] / deps / lightrec / optimizer.c
CommitLineData
98fa08a5 1// SPDX-License-Identifier: LGPL-2.1-or-later
d16005f8 2/*
98fa08a5 3 * Copyright (C) 2014-2021 Paul Cercueil <paul@crapouillou.net>
d16005f8
PC
4 */
5
98fa08a5 6#include "lightrec-config.h"
d16005f8
PC
7#include "disassembler.h"
8#include "lightrec.h"
9#include "memmanager.h"
10#include "optimizer.h"
11#include "regcache.h"
12
13#include <errno.h>
14#include <stdbool.h>
15#include <stdlib.h>
98fa08a5
PC
16#include <string.h>
17
18#define IF_OPT(opt, ptr) ((opt) ? (ptr) : NULL)
d16005f8
PC
19
20struct optimizer_list {
21 void (**optimizers)(struct opcode *);
22 unsigned int nb_optimizers;
23};
24
98fa08a5
PC
25static bool is_nop(union code op);
26
27bool is_unconditional_jump(union code c)
28{
29 switch (c.i.op) {
30 case OP_SPECIAL:
31 return c.r.op == OP_SPECIAL_JR || c.r.op == OP_SPECIAL_JALR;
32 case OP_J:
33 case OP_JAL:
34 return true;
35 case OP_BEQ:
36 case OP_BLEZ:
37 return c.i.rs == c.i.rt;
38 case OP_REGIMM:
39 return (c.r.rt == OP_REGIMM_BGEZ ||
40 c.r.rt == OP_REGIMM_BGEZAL) && c.i.rs == 0;
41 default:
42 return false;
43 }
44}
45
46bool is_syscall(union code c)
47{
48 return (c.i.op == OP_SPECIAL && c.r.op == OP_SPECIAL_SYSCALL) ||
49 (c.i.op == OP_CP0 && (c.r.rs == OP_CP0_MTC0 ||
50 c.r.rs == OP_CP0_CTC0) &&
51 (c.r.rd == 12 || c.r.rd == 13));
52}
53
54static u64 opcode_read_mask(union code op)
d16005f8
PC
55{
56 switch (op.i.op) {
57 case OP_SPECIAL:
58 switch (op.r.op) {
59 case OP_SPECIAL_SYSCALL:
60 case OP_SPECIAL_BREAK:
98fa08a5 61 return 0;
d16005f8
PC
62 case OP_SPECIAL_JR:
63 case OP_SPECIAL_JALR:
64 case OP_SPECIAL_MTHI:
65 case OP_SPECIAL_MTLO:
98fa08a5 66 return BIT(op.r.rs);
d16005f8 67 case OP_SPECIAL_MFHI:
98fa08a5 68 return BIT(REG_HI);
d16005f8 69 case OP_SPECIAL_MFLO:
98fa08a5 70 return BIT(REG_LO);
d16005f8
PC
71 case OP_SPECIAL_SLL:
72 case OP_SPECIAL_SRL:
73 case OP_SPECIAL_SRA:
98fa08a5 74 return BIT(op.r.rt);
d16005f8 75 default:
98fa08a5 76 return BIT(op.r.rs) | BIT(op.r.rt);
d16005f8
PC
77 }
78 case OP_CP0:
79 switch (op.r.rs) {
80 case OP_CP0_MTC0:
81 case OP_CP0_CTC0:
98fa08a5 82 return BIT(op.r.rt);
d16005f8 83 default:
98fa08a5 84 return 0;
d16005f8
PC
85 }
86 case OP_CP2:
87 if (op.r.op == OP_CP2_BASIC) {
88 switch (op.r.rs) {
89 case OP_CP2_BASIC_MTC2:
90 case OP_CP2_BASIC_CTC2:
98fa08a5 91 return BIT(op.r.rt);
d16005f8 92 default:
98fa08a5 93 break;
d16005f8 94 }
d16005f8 95 }
98fa08a5 96 return 0;
d16005f8
PC
97 case OP_J:
98 case OP_JAL:
99 case OP_LUI:
98fa08a5 100 return 0;
d16005f8
PC
101 case OP_BEQ:
102 case OP_BNE:
103 case OP_LWL:
104 case OP_LWR:
105 case OP_SB:
106 case OP_SH:
107 case OP_SWL:
108 case OP_SW:
109 case OP_SWR:
98fa08a5 110 return BIT(op.i.rs) | BIT(op.i.rt);
d16005f8 111 default:
98fa08a5 112 return BIT(op.i.rs);
d16005f8
PC
113 }
114}
115
98fa08a5 116static u64 opcode_write_mask(union code op)
d16005f8 117{
98fa08a5
PC
118 u64 flags;
119
d16005f8
PC
120 switch (op.i.op) {
121 case OP_SPECIAL:
122 switch (op.r.op) {
123 case OP_SPECIAL_JR:
d16005f8
PC
124 case OP_SPECIAL_SYSCALL:
125 case OP_SPECIAL_BREAK:
98fa08a5 126 return 0;
d16005f8
PC
127 case OP_SPECIAL_MULT:
128 case OP_SPECIAL_MULTU:
129 case OP_SPECIAL_DIV:
130 case OP_SPECIAL_DIVU:
98fa08a5
PC
131 if (!OPT_FLAG_MULT_DIV)
132 return BIT(REG_LO) | BIT(REG_HI);
133
134 if (op.r.rd)
135 flags = BIT(op.r.rd);
136 else
137 flags = BIT(REG_LO);
138 if (op.r.imm)
139 flags |= BIT(op.r.imm);
140 else
141 flags |= BIT(REG_HI);
142 return flags;
d16005f8 143 case OP_SPECIAL_MTHI:
98fa08a5 144 return BIT(REG_HI);
d16005f8 145 case OP_SPECIAL_MTLO:
98fa08a5 146 return BIT(REG_LO);
d16005f8 147 default:
98fa08a5 148 return BIT(op.r.rd);
d16005f8
PC
149 }
150 case OP_ADDI:
151 case OP_ADDIU:
152 case OP_SLTI:
153 case OP_SLTIU:
154 case OP_ANDI:
155 case OP_ORI:
156 case OP_XORI:
157 case OP_LUI:
158 case OP_LB:
159 case OP_LH:
160 case OP_LWL:
161 case OP_LW:
162 case OP_LBU:
163 case OP_LHU:
164 case OP_LWR:
98fa08a5
PC
165 return BIT(op.i.rt);
166 case OP_JAL:
167 return BIT(31);
d16005f8
PC
168 case OP_CP0:
169 switch (op.r.rs) {
170 case OP_CP0_MFC0:
171 case OP_CP0_CFC0:
98fa08a5 172 return BIT(op.i.rt);
d16005f8 173 default:
98fa08a5 174 return 0;
d16005f8
PC
175 }
176 case OP_CP2:
177 if (op.r.op == OP_CP2_BASIC) {
178 switch (op.r.rs) {
179 case OP_CP2_BASIC_MFC2:
180 case OP_CP2_BASIC_CFC2:
98fa08a5 181 return BIT(op.i.rt);
d16005f8 182 default:
98fa08a5 183 break;
d16005f8 184 }
98fa08a5
PC
185 }
186 return 0;
187 case OP_REGIMM:
188 switch (op.r.rt) {
189 case OP_REGIMM_BLTZAL:
190 case OP_REGIMM_BGEZAL:
191 return BIT(31);
192 default:
193 return 0;
d16005f8
PC
194 }
195 case OP_META_MOV:
98fa08a5 196 return BIT(op.r.rd);
d16005f8 197 default:
98fa08a5
PC
198 return 0;
199 }
200}
201
202bool opcode_reads_register(union code op, u8 reg)
203{
204 return opcode_read_mask(op) & BIT(reg);
205}
206
207bool opcode_writes_register(union code op, u8 reg)
208{
209 return opcode_write_mask(op) & BIT(reg);
210}
211
212static int find_prev_writer(const struct opcode *list, unsigned int offset, u8 reg)
213{
214 union code c;
215 unsigned int i;
216
217 if (list[offset].flags & LIGHTREC_SYNC)
218 return -1;
219
220 for (i = offset; i > 0; i--) {
221 c = list[i - 1].c;
222
223 if (opcode_writes_register(c, reg)) {
224 if (i > 1 && has_delay_slot(list[i - 2].c))
225 break;
226
227 return i - 1;
228 }
229
230 if ((list[i - 1].flags & LIGHTREC_SYNC) ||
231 has_delay_slot(c) ||
232 opcode_reads_register(c, reg))
233 break;
234 }
235
236 return -1;
237}
238
239static int find_next_reader(const struct opcode *list, unsigned int offset, u8 reg)
240{
241 unsigned int i;
242 union code c;
243
244 if (list[offset].flags & LIGHTREC_SYNC)
245 return -1;
246
247 for (i = offset; ; i++) {
248 c = list[i].c;
249
250 if (opcode_reads_register(c, reg)) {
251 if (i > 0 && has_delay_slot(list[i - 1].c))
252 break;
253
254 return i;
255 }
256
257 if ((list[i].flags & LIGHTREC_SYNC) ||
258 has_delay_slot(c) || opcode_writes_register(c, reg))
259 break;
260 }
261
262 return -1;
263}
264
265static bool reg_is_dead(const struct opcode *list, unsigned int offset, u8 reg)
266{
267 unsigned int i;
268
269 if (list[offset].flags & LIGHTREC_SYNC)
d16005f8 270 return false;
98fa08a5
PC
271
272 for (i = offset + 1; ; i++) {
273 if (opcode_reads_register(list[i].c, reg))
274 return false;
275
276 if (opcode_writes_register(list[i].c, reg))
277 return true;
278
279 if (has_delay_slot(list[i].c)) {
22eee2ac
PC
280 if (list[i].flags & LIGHTREC_NO_DS ||
281 opcode_reads_register(list[i + 1].c, reg))
98fa08a5
PC
282 return false;
283
284 return opcode_writes_register(list[i + 1].c, reg);
285 }
d16005f8
PC
286 }
287}
288
98fa08a5
PC
289static bool reg_is_read(const struct opcode *list,
290 unsigned int a, unsigned int b, u8 reg)
291{
292 /* Return true if reg is read in one of the opcodes of the interval
293 * [a, b[ */
294 for (; a < b; a++) {
295 if (!is_nop(list[a].c) && opcode_reads_register(list[a].c, reg))
296 return true;
297 }
298
299 return false;
300}
301
302static bool reg_is_written(const struct opcode *list,
303 unsigned int a, unsigned int b, u8 reg)
304{
305 /* Return true if reg is written in one of the opcodes of the interval
306 * [a, b[ */
307
308 for (; a < b; a++) {
309 if (!is_nop(list[a].c) && opcode_writes_register(list[a].c, reg))
310 return true;
311 }
312
313 return false;
314}
315
316static bool reg_is_read_or_written(const struct opcode *list,
317 unsigned int a, unsigned int b, u8 reg)
318{
319 return reg_is_read(list, a, b, reg) || reg_is_written(list, a, b, reg);
320}
321
322static bool opcode_is_load(union code op)
323{
324 switch (op.i.op) {
325 case OP_LB:
326 case OP_LH:
327 case OP_LWL:
328 case OP_LW:
329 case OP_LBU:
330 case OP_LHU:
331 case OP_LWR:
332 case OP_LWC2:
333 return true;
334 default:
335 return false;
336 }
337}
338
339static bool opcode_is_store(union code op)
340{
341 switch (op.i.op) {
342 case OP_SB:
343 case OP_SH:
344 case OP_SW:
345 case OP_SWL:
346 case OP_SWR:
347 case OP_SWC2:
348 return true;
349 default:
350 return false;
351 }
352}
353
354bool opcode_is_io(union code op)
355{
356 return opcode_is_load(op) || opcode_is_store(op);
357}
358
d16005f8
PC
359/* TODO: Complete */
360static bool is_nop(union code op)
361{
362 if (opcode_writes_register(op, 0)) {
363 switch (op.i.op) {
364 case OP_CP0:
365 return op.r.rs != OP_CP0_MFC0;
366 case OP_LB:
367 case OP_LH:
368 case OP_LWL:
369 case OP_LW:
370 case OP_LBU:
371 case OP_LHU:
372 case OP_LWR:
373 return false;
374 default:
375 return true;
376 }
377 }
378
379 switch (op.i.op) {
380 case OP_SPECIAL:
381 switch (op.r.op) {
382 case OP_SPECIAL_AND:
383 return op.r.rd == op.r.rt && op.r.rd == op.r.rs;
384 case OP_SPECIAL_ADD:
385 case OP_SPECIAL_ADDU:
386 return (op.r.rd == op.r.rt && op.r.rs == 0) ||
387 (op.r.rd == op.r.rs && op.r.rt == 0);
388 case OP_SPECIAL_SUB:
389 case OP_SPECIAL_SUBU:
390 return op.r.rd == op.r.rs && op.r.rt == 0;
391 case OP_SPECIAL_OR:
392 if (op.r.rd == op.r.rt)
393 return op.r.rd == op.r.rs || op.r.rs == 0;
394 else
395 return (op.r.rd == op.r.rs) && op.r.rt == 0;
396 case OP_SPECIAL_SLL:
397 case OP_SPECIAL_SRA:
398 case OP_SPECIAL_SRL:
399 return op.r.rd == op.r.rt && op.r.imm == 0;
98fa08a5
PC
400 case OP_SPECIAL_MFHI:
401 case OP_SPECIAL_MFLO:
402 return op.r.rd == 0;
d16005f8
PC
403 default:
404 return false;
405 }
406 case OP_ORI:
407 case OP_ADDI:
408 case OP_ADDIU:
409 return op.i.rt == op.i.rs && op.i.imm == 0;
410 case OP_BGTZ:
411 return (op.i.rs == 0 || op.i.imm == 1);
412 case OP_REGIMM:
413 return (op.i.op == OP_REGIMM_BLTZ ||
414 op.i.op == OP_REGIMM_BLTZAL) &&
415 (op.i.rs == 0 || op.i.imm == 1);
416 case OP_BNE:
417 return (op.i.rs == op.i.rt || op.i.imm == 1);
418 default:
419 return false;
420 }
421}
422
423bool load_in_delay_slot(union code op)
424{
425 switch (op.i.op) {
426 case OP_CP0:
427 switch (op.r.rs) {
428 case OP_CP0_MFC0:
429 case OP_CP0_CFC0:
430 return true;
431 default:
432 break;
433 }
434
435 break;
436 case OP_CP2:
437 if (op.r.op == OP_CP2_BASIC) {
438 switch (op.r.rs) {
439 case OP_CP2_BASIC_MFC2:
440 case OP_CP2_BASIC_CFC2:
441 return true;
442 default:
443 break;
444 }
445 }
446
447 break;
448 case OP_LB:
449 case OP_LH:
450 case OP_LW:
451 case OP_LWL:
452 case OP_LWR:
453 case OP_LBU:
454 case OP_LHU:
455 return true;
456 default:
457 break;
458 }
459
460 return false;
461}
462
22eee2ac
PC
463static u32 lightrec_propagate_consts(const struct opcode *op,
464 const struct opcode *prev,
465 u32 known, u32 *v)
d16005f8 466{
22eee2ac 467 union code c = prev->c;
98fa08a5 468
fd58fa32
PC
469 /* Register $zero is always, well, zero */
470 known |= BIT(0);
471 v[0] = 0;
472
98fa08a5 473 if (op->flags & LIGHTREC_SYNC)
22eee2ac 474 return BIT(0);
98fa08a5 475
d16005f8
PC
476 switch (c.i.op) {
477 case OP_SPECIAL:
478 switch (c.r.op) {
479 case OP_SPECIAL_SLL:
480 if (known & BIT(c.r.rt)) {
481 known |= BIT(c.r.rd);
482 v[c.r.rd] = v[c.r.rt] << c.r.imm;
483 } else {
484 known &= ~BIT(c.r.rd);
485 }
486 break;
487 case OP_SPECIAL_SRL:
488 if (known & BIT(c.r.rt)) {
489 known |= BIT(c.r.rd);
490 v[c.r.rd] = v[c.r.rt] >> c.r.imm;
491 } else {
492 known &= ~BIT(c.r.rd);
493 }
494 break;
495 case OP_SPECIAL_SRA:
496 if (known & BIT(c.r.rt)) {
497 known |= BIT(c.r.rd);
498 v[c.r.rd] = (s32)v[c.r.rt] >> c.r.imm;
499 } else {
500 known &= ~BIT(c.r.rd);
501 }
502 break;
503 case OP_SPECIAL_SLLV:
504 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
505 known |= BIT(c.r.rd);
506 v[c.r.rd] = v[c.r.rt] << (v[c.r.rs] & 0x1f);
507 } else {
508 known &= ~BIT(c.r.rd);
509 }
510 break;
511 case OP_SPECIAL_SRLV:
512 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
513 known |= BIT(c.r.rd);
514 v[c.r.rd] = v[c.r.rt] >> (v[c.r.rs] & 0x1f);
515 } else {
516 known &= ~BIT(c.r.rd);
517 }
518 break;
519 case OP_SPECIAL_SRAV:
520 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
521 known |= BIT(c.r.rd);
522 v[c.r.rd] = (s32)v[c.r.rt]
523 >> (v[c.r.rs] & 0x1f);
524 } else {
525 known &= ~BIT(c.r.rd);
526 }
527 break;
528 case OP_SPECIAL_ADD:
529 case OP_SPECIAL_ADDU:
530 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
531 known |= BIT(c.r.rd);
532 v[c.r.rd] = (s32)v[c.r.rt] + (s32)v[c.r.rs];
533 } else {
534 known &= ~BIT(c.r.rd);
535 }
536 break;
537 case OP_SPECIAL_SUB:
538 case OP_SPECIAL_SUBU:
539 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
540 known |= BIT(c.r.rd);
541 v[c.r.rd] = v[c.r.rt] - v[c.r.rs];
542 } else {
543 known &= ~BIT(c.r.rd);
544 }
545 break;
546 case OP_SPECIAL_AND:
547 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
548 known |= BIT(c.r.rd);
549 v[c.r.rd] = v[c.r.rt] & v[c.r.rs];
550 } else {
551 known &= ~BIT(c.r.rd);
552 }
553 break;
554 case OP_SPECIAL_OR:
555 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
556 known |= BIT(c.r.rd);
557 v[c.r.rd] = v[c.r.rt] | v[c.r.rs];
558 } else {
559 known &= ~BIT(c.r.rd);
560 }
561 break;
562 case OP_SPECIAL_XOR:
563 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
564 known |= BIT(c.r.rd);
565 v[c.r.rd] = v[c.r.rt] ^ v[c.r.rs];
566 } else {
567 known &= ~BIT(c.r.rd);
568 }
569 break;
570 case OP_SPECIAL_NOR:
571 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
572 known |= BIT(c.r.rd);
573 v[c.r.rd] = ~(v[c.r.rt] | v[c.r.rs]);
574 } else {
575 known &= ~BIT(c.r.rd);
576 }
577 break;
578 case OP_SPECIAL_SLT:
579 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
580 known |= BIT(c.r.rd);
581 v[c.r.rd] = (s32)v[c.r.rs] < (s32)v[c.r.rt];
582 } else {
583 known &= ~BIT(c.r.rd);
584 }
585 break;
586 case OP_SPECIAL_SLTU:
587 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
588 known |= BIT(c.r.rd);
589 v[c.r.rd] = v[c.r.rs] < v[c.r.rt];
590 } else {
591 known &= ~BIT(c.r.rd);
592 }
593 break;
594 default:
595 break;
596 }
597 break;
598 case OP_REGIMM:
599 break;
600 case OP_ADDI:
601 case OP_ADDIU:
602 if (known & BIT(c.i.rs)) {
603 known |= BIT(c.i.rt);
604 v[c.i.rt] = v[c.i.rs] + (s32)(s16)c.i.imm;
605 } else {
606 known &= ~BIT(c.i.rt);
607 }
608 break;
609 case OP_SLTI:
610 if (known & BIT(c.i.rs)) {
611 known |= BIT(c.i.rt);
612 v[c.i.rt] = (s32)v[c.i.rs] < (s32)(s16)c.i.imm;
613 } else {
614 known &= ~BIT(c.i.rt);
615 }
616 break;
617 case OP_SLTIU:
618 if (known & BIT(c.i.rs)) {
619 known |= BIT(c.i.rt);
620 v[c.i.rt] = v[c.i.rs] < (u32)(s32)(s16)c.i.imm;
621 } else {
622 known &= ~BIT(c.i.rt);
623 }
624 break;
625 case OP_ANDI:
626 if (known & BIT(c.i.rs)) {
627 known |= BIT(c.i.rt);
628 v[c.i.rt] = v[c.i.rs] & c.i.imm;
629 } else {
630 known &= ~BIT(c.i.rt);
631 }
632 break;
633 case OP_ORI:
634 if (known & BIT(c.i.rs)) {
635 known |= BIT(c.i.rt);
636 v[c.i.rt] = v[c.i.rs] | c.i.imm;
637 } else {
638 known &= ~BIT(c.i.rt);
639 }
640 break;
641 case OP_XORI:
642 if (known & BIT(c.i.rs)) {
643 known |= BIT(c.i.rt);
644 v[c.i.rt] = v[c.i.rs] ^ c.i.imm;
645 } else {
646 known &= ~BIT(c.i.rt);
647 }
648 break;
649 case OP_LUI:
650 known |= BIT(c.i.rt);
651 v[c.i.rt] = c.i.imm << 16;
652 break;
653 case OP_CP0:
654 switch (c.r.rs) {
655 case OP_CP0_MFC0:
656 case OP_CP0_CFC0:
657 known &= ~BIT(c.r.rt);
658 break;
659 }
660 break;
661 case OP_CP2:
662 if (c.r.op == OP_CP2_BASIC) {
663 switch (c.r.rs) {
664 case OP_CP2_BASIC_MFC2:
665 case OP_CP2_BASIC_CFC2:
666 known &= ~BIT(c.r.rt);
667 break;
668 }
669 }
670 break;
671 case OP_LB:
672 case OP_LH:
673 case OP_LWL:
674 case OP_LW:
675 case OP_LBU:
676 case OP_LHU:
677 case OP_LWR:
678 case OP_LWC2:
679 known &= ~BIT(c.i.rt);
680 break;
681 case OP_META_MOV:
682 if (known & BIT(c.r.rs)) {
683 known |= BIT(c.r.rd);
684 v[c.r.rd] = v[c.r.rs];
685 } else {
686 known &= ~BIT(c.r.rd);
687 }
688 break;
689 default:
690 break;
691 }
692
693 return known;
694}
695
98fa08a5 696static void lightrec_optimize_sll_sra(struct opcode *list, unsigned int offset)
d16005f8 697{
98fa08a5
PC
698 struct opcode *prev, *prev2 = NULL, *curr = &list[offset];
699 struct opcode *to_change, *to_nop;
700 int idx, idx2;
d16005f8 701
98fa08a5
PC
702 if (curr->r.imm != 24 && curr->r.imm != 16)
703 return;
704
705 idx = find_prev_writer(list, offset, curr->r.rt);
706 if (idx < 0)
707 return;
708
709 prev = &list[idx];
710
711 if (prev->i.op != OP_SPECIAL || prev->r.op != OP_SPECIAL_SLL ||
712 prev->r.imm != curr->r.imm || prev->r.rd != curr->r.rt)
713 return;
d16005f8 714
98fa08a5
PC
715 if (prev->r.rd != prev->r.rt && curr->r.rd != curr->r.rt) {
716 /* sll rY, rX, 16
717 * ...
718 * srl rZ, rY, 16 */
d16005f8 719
98fa08a5
PC
720 if (!reg_is_dead(list, offset, curr->r.rt) ||
721 reg_is_read_or_written(list, idx, offset, curr->r.rd))
722 return;
723
724 /* If rY is dead after the SRL, and rZ is not used after the SLL,
725 * we can change rY to rZ */
726
727 pr_debug("Detected SLL/SRA with middle temp register\n");
728 prev->r.rd = curr->r.rd;
729 curr->r.rt = prev->r.rd;
730 }
731
732 /* We got a SLL/SRA combo. If imm #16, that's a cast to u16.
733 * If imm #24 that's a cast to u8.
734 *
735 * First of all, make sure that the target register of the SLL is not
736 * read before the SRA. */
737
738 if (prev->r.rd == prev->r.rt) {
739 /* sll rX, rX, 16
740 * ...
741 * srl rY, rX, 16 */
742 to_change = curr;
743 to_nop = prev;
744
745 /* rX is used after the SRA - we cannot convert it. */
746 if (prev->r.rd != curr->r.rd && !reg_is_dead(list, offset, prev->r.rd))
747 return;
d16005f8 748 } else {
98fa08a5
PC
749 /* sll rY, rX, 16
750 * ...
751 * srl rY, rY, 16 */
752 to_change = prev;
753 to_nop = curr;
d16005f8
PC
754 }
755
98fa08a5
PC
756 idx2 = find_prev_writer(list, idx, prev->r.rt);
757 if (idx2 >= 0) {
758 /* Note that PSX games sometimes do casts after
759 * a LHU or LBU; in this case we can change the
760 * load opcode to a LH or LB, and the cast can
761 * be changed to a MOV or a simple NOP. */
762
763 prev2 = &list[idx2];
764
765 if (curr->r.rd != prev2->i.rt &&
766 !reg_is_dead(list, offset, prev2->i.rt))
767 prev2 = NULL;
768 else if (curr->r.imm == 16 && prev2->i.op == OP_LHU)
769 prev2->i.op = OP_LH;
770 else if (curr->r.imm == 24 && prev2->i.op == OP_LBU)
771 prev2->i.op = OP_LB;
772 else
773 prev2 = NULL;
774
775 if (prev2) {
776 if (curr->r.rd == prev2->i.rt) {
777 to_change->opcode = 0;
778 } else if (reg_is_dead(list, offset, prev2->i.rt) &&
779 !reg_is_read_or_written(list, idx2 + 1, offset, curr->r.rd)) {
780 /* The target register of the SRA is dead after the
781 * LBU/LHU; we can change the target register of the
782 * LBU/LHU to the one of the SRA. */
783 prev2->i.rt = curr->r.rd;
784 to_change->opcode = 0;
785 } else {
786 to_change->i.op = OP_META_MOV;
787 to_change->r.rd = curr->r.rd;
788 to_change->r.rs = prev2->i.rt;
789 }
d16005f8 790
98fa08a5
PC
791 if (to_nop->r.imm == 24)
792 pr_debug("Convert LBU+SLL+SRA to LB\n");
793 else
794 pr_debug("Convert LHU+SLL+SRA to LH\n");
795 }
796 }
797
798 if (!prev2) {
799 pr_debug("Convert SLL/SRA #%u to EXT%c\n",
800 prev->r.imm,
801 prev->r.imm == 24 ? 'C' : 'S');
802
803 if (to_change == prev) {
804 to_change->i.rs = prev->r.rt;
805 to_change->i.rt = curr->r.rd;
806 } else {
807 to_change->i.rt = curr->r.rd;
808 to_change->i.rs = prev->r.rt;
809 }
810
811 if (to_nop->r.imm == 24)
812 to_change->i.op = OP_META_EXTC;
813 else
814 to_change->i.op = OP_META_EXTS;
815 }
816
817 to_nop->opcode = 0;
d16005f8
PC
818}
819
02487de7
PC
820static void lightrec_remove_useless_lui(struct block *block, unsigned int offset,
821 u32 known, u32 *values)
822{
823 struct opcode *list = block->opcode_list,
824 *op = &block->opcode_list[offset];
825 int reader;
826
827 if (!(op->flags & LIGHTREC_SYNC) && (known & BIT(op->i.rt)) &&
828 values[op->i.rt] == op->i.imm << 16) {
829 pr_debug("Converting duplicated LUI to NOP\n");
830 op->opcode = 0x0;
831 return;
832 }
833
834 if (op->i.imm != 0 || op->i.rt == 0)
835 return;
836
837 reader = find_next_reader(list, offset + 1, op->i.rt);
838 if (reader <= 0)
839 return;
840
841 if (opcode_writes_register(list[reader].c, op->i.rt) ||
842 reg_is_dead(list, reader, op->i.rt)) {
843 pr_debug("Removing useless LUI 0x0\n");
844
845 if (list[reader].i.rs == op->i.rt)
846 list[reader].i.rs = 0;
847 if (list[reader].i.op == OP_SPECIAL &&
848 list[reader].i.rt == op->i.rt)
849 list[reader].i.rt = 0;
850 op->opcode = 0x0;
851 }
852}
853
854static void lightrec_modify_lui(struct block *block, unsigned int offset)
855{
856 union code c, *lui = &block->opcode_list[offset].c;
857 bool stop = false, stop_next = false;
858 unsigned int i;
859
860 for (i = offset + 1; !stop && i < block->nb_ops; i++) {
861 c = block->opcode_list[i].c;
862 stop = stop_next;
863
864 if ((opcode_is_store(c) && c.i.rt == lui->i.rt)
865 || (!opcode_is_load(c) && opcode_reads_register(c, lui->i.rt)))
866 break;
867
868 if (opcode_writes_register(c, lui->i.rt)) {
869 pr_debug("Convert LUI at offset 0x%x to kuseg\n",
870 i - 1 << 2);
871 lui->i.imm = kunseg(lui->i.imm << 16) >> 16;
872 break;
873 }
874
875 if (has_delay_slot(c))
876 stop_next = true;
877 }
878}
879
98fa08a5 880static int lightrec_transform_ops(struct lightrec_state *state, struct block *block)
d16005f8
PC
881{
882 struct opcode *list = block->opcode_list;
22eee2ac 883 struct opcode *prev, *op = NULL;
98fa08a5
PC
884 u32 known = BIT(0);
885 u32 values[32] = { 0 };
886 unsigned int i;
d16005f8 887
98fa08a5 888 for (i = 0; i < block->nb_ops; i++) {
22eee2ac 889 prev = op;
98fa08a5 890 op = &list[i];
d16005f8 891
22eee2ac
PC
892 if (prev)
893 known = lightrec_propagate_consts(op, prev, known, values);
894
d16005f8
PC
895 /* Transform all opcodes detected as useless to real NOPs
896 * (0x0: SLL r0, r0, #0) */
98fa08a5 897 if (op->opcode != 0 && is_nop(op->c)) {
d16005f8 898 pr_debug("Converting useless opcode 0x%08x to NOP\n",
98fa08a5
PC
899 op->opcode);
900 op->opcode = 0x0;
d16005f8
PC
901 }
902
98fa08a5 903 if (!op->opcode)
d16005f8
PC
904 continue;
905
98fa08a5 906 switch (op->i.op) {
d16005f8 907 case OP_BEQ:
98fa08a5
PC
908 if (op->i.rs == op->i.rt) {
909 op->i.rs = 0;
910 op->i.rt = 0;
911 } else if (op->i.rs == 0) {
912 op->i.rs = op->i.rt;
913 op->i.rt = 0;
d16005f8
PC
914 }
915 break;
98fa08a5 916
d16005f8 917 case OP_BNE:
98fa08a5
PC
918 if (op->i.rs == 0) {
919 op->i.rs = op->i.rt;
920 op->i.rt = 0;
921 }
922 break;
923
924 case OP_LUI:
02487de7
PC
925 lightrec_modify_lui(block, i);
926 lightrec_remove_useless_lui(block, i, known, values);
d16005f8
PC
927 break;
928
929 /* Transform ORI/ADDI/ADDIU with imm #0 or ORR/ADD/ADDU/SUB/SUBU
930 * with register $zero to the MOV meta-opcode */
931 case OP_ORI:
932 case OP_ADDI:
933 case OP_ADDIU:
98fa08a5 934 if (op->i.imm == 0) {
d16005f8 935 pr_debug("Convert ORI/ADDI/ADDIU #0 to MOV\n");
98fa08a5
PC
936 op->i.op = OP_META_MOV;
937 op->r.rd = op->i.rt;
d16005f8
PC
938 }
939 break;
940 case OP_SPECIAL:
98fa08a5 941 switch (op->r.op) {
d16005f8 942 case OP_SPECIAL_SRA:
98fa08a5
PC
943 if (op->r.imm == 0) {
944 pr_debug("Convert SRA #0 to MOV\n");
945 op->i.op = OP_META_MOV;
946 op->r.rs = op->r.rt;
947 break;
948 }
949
950 lightrec_optimize_sll_sra(block->opcode_list, i);
951 break;
952 case OP_SPECIAL_SLL:
d16005f8 953 case OP_SPECIAL_SRL:
98fa08a5
PC
954 if (op->r.imm == 0) {
955 pr_debug("Convert SLL/SRL #0 to MOV\n");
956 op->i.op = OP_META_MOV;
957 op->r.rs = op->r.rt;
d16005f8
PC
958 }
959 break;
960 case OP_SPECIAL_OR:
961 case OP_SPECIAL_ADD:
962 case OP_SPECIAL_ADDU:
98fa08a5 963 if (op->r.rs == 0) {
d16005f8 964 pr_debug("Convert OR/ADD $zero to MOV\n");
98fa08a5
PC
965 op->i.op = OP_META_MOV;
966 op->r.rs = op->r.rt;
d16005f8
PC
967 }
968 case OP_SPECIAL_SUB: /* fall-through */
969 case OP_SPECIAL_SUBU:
98fa08a5 970 if (op->r.rt == 0) {
d16005f8 971 pr_debug("Convert OR/ADD/SUB $zero to MOV\n");
98fa08a5 972 op->i.op = OP_META_MOV;
d16005f8
PC
973 }
974 default: /* fall-through */
975 break;
976 }
977 default: /* fall-through */
978 break;
979 }
980 }
981
982 return 0;
983}
984
98fa08a5 985static int lightrec_switch_delay_slots(struct lightrec_state *state, struct block *block)
d16005f8 986{
98fa08a5
PC
987 struct opcode *list, *next = &block->opcode_list[0];
988 unsigned int i;
989 union code op, next_op;
d16005f8
PC
990 u8 flags;
991
98fa08a5
PC
992 for (i = 0; i < block->nb_ops - 1; i++) {
993 list = next;
994 next = &block->opcode_list[i + 1];
995 next_op = next->c;
996 op = list->c;
d16005f8
PC
997
998 if (!has_delay_slot(op) ||
999 list->flags & (LIGHTREC_NO_DS | LIGHTREC_EMULATE_BRANCH) ||
98fa08a5
PC
1000 op.opcode == 0 || next_op.opcode == 0)
1001 continue;
1002
1003 if (i && has_delay_slot(block->opcode_list[i - 1].c) &&
1004 !(block->opcode_list[i - 1].flags & LIGHTREC_NO_DS))
d16005f8
PC
1005 continue;
1006
98fa08a5
PC
1007 if ((list->flags & LIGHTREC_SYNC) ||
1008 (next->flags & LIGHTREC_SYNC))
d16005f8
PC
1009 continue;
1010
1011 switch (list->i.op) {
1012 case OP_SPECIAL:
1013 switch (op.r.op) {
1014 case OP_SPECIAL_JALR:
1015 if (opcode_reads_register(next_op, op.r.rd) ||
1016 opcode_writes_register(next_op, op.r.rd))
1017 continue;
1018 case OP_SPECIAL_JR: /* fall-through */
1019 if (opcode_writes_register(next_op, op.r.rs))
1020 continue;
1021 default: /* fall-through */
1022 break;
1023 }
1024 case OP_J: /* fall-through */
1025 break;
1026 case OP_JAL:
1027 if (opcode_reads_register(next_op, 31) ||
1028 opcode_writes_register(next_op, 31))
1029 continue;
1030 else
1031 break;
1032 case OP_BEQ:
1033 case OP_BNE:
1034 if (op.i.rt && opcode_writes_register(next_op, op.i.rt))
1035 continue;
1036 case OP_BLEZ: /* fall-through */
1037 case OP_BGTZ:
d16005f8
PC
1038 if (op.i.rs && opcode_writes_register(next_op, op.i.rs))
1039 continue;
1040 break;
1041 case OP_REGIMM:
1042 switch (op.r.rt) {
1043 case OP_REGIMM_BLTZAL:
1044 case OP_REGIMM_BGEZAL:
1045 if (opcode_reads_register(next_op, 31) ||
1046 opcode_writes_register(next_op, 31))
1047 continue;
1048 case OP_REGIMM_BLTZ: /* fall-through */
1049 case OP_REGIMM_BGEZ:
1050 if (op.i.rs &&
1051 opcode_writes_register(next_op, op.i.rs))
1052 continue;
1053 break;
1054 }
1055 default: /* fall-through */
1056 break;
1057 }
1058
1059 pr_debug("Swap branch and delay slot opcodes "
98fa08a5
PC
1060 "at offsets 0x%x / 0x%x\n",
1061 i << 2, (i + 1) << 2);
d16005f8 1062
98fa08a5 1063 flags = next->flags;
d16005f8 1064 list->c = next_op;
98fa08a5
PC
1065 next->c = op;
1066 next->flags = list->flags | LIGHTREC_NO_DS;
a59e5536 1067 list->flags = flags | LIGHTREC_NO_DS;
d16005f8
PC
1068 }
1069
1070 return 0;
1071}
1072
98fa08a5
PC
1073static int shrink_opcode_list(struct lightrec_state *state, struct block *block, u16 new_size)
1074{
1075 struct opcode *list;
1076
1077 if (new_size >= block->nb_ops) {
1078 pr_err("Invalid shrink size (%u vs %u)\n",
1079 new_size, block->nb_ops);
1080 return -EINVAL;
1081 }
1082
1083
1084 list = lightrec_malloc(state, MEM_FOR_IR,
1085 sizeof(*list) * new_size);
1086 if (!list) {
1087 pr_err("Unable to allocate memory\n");
1088 return -ENOMEM;
1089 }
1090
1091 memcpy(list, block->opcode_list, sizeof(*list) * new_size);
1092
1093 lightrec_free_opcode_list(state, block);
1094 block->opcode_list = list;
1095 block->nb_ops = new_size;
1096
1097 pr_debug("Shrunk opcode list of block PC 0x%08x to %u opcodes\n",
1098 block->pc, new_size);
1099
1100 return 0;
1101}
1102
1103static int lightrec_detect_impossible_branches(struct lightrec_state *state,
1104 struct block *block)
d16005f8 1105{
98fa08a5
PC
1106 struct opcode *op, *next = &block->opcode_list[0];
1107 unsigned int i;
1108 int ret = 0;
1109
1110 for (i = 0; i < block->nb_ops - 1; i++) {
1111 op = next;
1112 next = &block->opcode_list[i + 1];
d16005f8 1113
d16005f8
PC
1114 if (!has_delay_slot(op->c) ||
1115 (!load_in_delay_slot(next->c) &&
1116 !has_delay_slot(next->c) &&
1117 !(next->i.op == OP_CP0 && next->r.rs == OP_CP0_RFE)))
1118 continue;
1119
1120 if (op->c.opcode == next->c.opcode) {
1121 /* The delay slot is the exact same opcode as the branch
1122 * opcode: this is effectively a NOP */
1123 next->c.opcode = 0;
1124 continue;
1125 }
1126
98fa08a5
PC
1127 op->flags |= LIGHTREC_EMULATE_BRANCH;
1128
d16005f8 1129 if (op == block->opcode_list) {
98fa08a5
PC
1130 pr_debug("First opcode of block PC 0x%08x is an impossible branch\n",
1131 block->pc);
1132
d16005f8
PC
1133 /* If the first opcode is an 'impossible' branch, we
1134 * only keep the first two opcodes of the block (the
1135 * branch itself + its delay slot) */
98fa08a5
PC
1136 if (block->nb_ops > 2)
1137 ret = shrink_opcode_list(state, block, 2);
1138 break;
d16005f8 1139 }
d16005f8
PC
1140 }
1141
98fa08a5 1142 return ret;
d16005f8
PC
1143}
1144
98fa08a5 1145static int lightrec_local_branches(struct lightrec_state *state, struct block *block)
d16005f8 1146{
98fa08a5
PC
1147 struct opcode *list;
1148 unsigned int i;
d16005f8 1149 s32 offset;
d16005f8 1150
98fa08a5
PC
1151 for (i = 0; i < block->nb_ops; i++) {
1152 list = &block->opcode_list[i];
1153
1154 if (should_emulate(list))
d16005f8
PC
1155 continue;
1156
1157 switch (list->i.op) {
1158 case OP_BEQ:
1159 case OP_BNE:
1160 case OP_BLEZ:
1161 case OP_BGTZ:
1162 case OP_REGIMM:
98fa08a5 1163 offset = i + 1 + (s16)list->i.imm;
d16005f8
PC
1164 if (offset >= 0 && offset < block->nb_ops)
1165 break;
1166 default: /* fall-through */
1167 continue;
1168 }
1169
1170 pr_debug("Found local branch to offset 0x%x\n", offset << 2);
1171
98fa08a5
PC
1172 if (should_emulate(&block->opcode_list[offset])) {
1173 pr_debug("Branch target must be emulated - skip\n");
1174 continue;
1175 }
d16005f8 1176
98fa08a5
PC
1177 if (offset && has_delay_slot(block->opcode_list[offset - 1].c)) {
1178 pr_debug("Branch target is a delay slot - skip\n");
1179 continue;
1180 }
d16005f8 1181
98fa08a5 1182 pr_debug("Adding sync at offset 0x%x\n", offset << 2);
d16005f8 1183
98fa08a5
PC
1184 block->opcode_list[offset].flags |= LIGHTREC_SYNC;
1185 list->flags |= LIGHTREC_LOCAL_BRANCH;
d16005f8
PC
1186 }
1187
1188 return 0;
1189}
1190
1191bool has_delay_slot(union code op)
1192{
1193 switch (op.i.op) {
1194 case OP_SPECIAL:
1195 switch (op.r.op) {
1196 case OP_SPECIAL_JR:
1197 case OP_SPECIAL_JALR:
1198 return true;
1199 default:
1200 return false;
1201 }
1202 case OP_J:
1203 case OP_JAL:
1204 case OP_BEQ:
1205 case OP_BNE:
1206 case OP_BLEZ:
1207 case OP_BGTZ:
1208 case OP_REGIMM:
d16005f8
PC
1209 return true;
1210 default:
1211 return false;
1212 }
1213}
1214
98fa08a5 1215bool should_emulate(const struct opcode *list)
d16005f8 1216{
98fa08a5
PC
1217 return has_delay_slot(list->c) &&
1218 (list->flags & LIGHTREC_EMULATE_BRANCH);
d16005f8
PC
1219}
1220
98fa08a5 1221static void lightrec_add_unload(struct opcode *op, u8 reg)
d16005f8 1222{
98fa08a5
PC
1223 if (op->i.op == OP_SPECIAL && reg == op->r.rd)
1224 op->flags |= LIGHTREC_UNLOAD_RD;
d16005f8 1225
98fa08a5
PC
1226 if (op->i.rs == reg)
1227 op->flags |= LIGHTREC_UNLOAD_RS;
1228 if (op->i.rt == reg)
1229 op->flags |= LIGHTREC_UNLOAD_RT;
1230}
d16005f8 1231
98fa08a5
PC
1232static int lightrec_early_unload(struct lightrec_state *state, struct block *block)
1233{
1234 unsigned int i, offset;
1235 struct opcode *op;
1236 u8 reg;
d16005f8 1237
98fa08a5
PC
1238 for (reg = 1; reg < 34; reg++) {
1239 int last_r_id = -1, last_w_id = -1;
1240
1241 for (i = 0; i < block->nb_ops; i++) {
1242 union code c = block->opcode_list[i].c;
1243
1244 if (opcode_reads_register(c, reg))
1245 last_r_id = i;
1246 if (opcode_writes_register(c, reg))
1247 last_w_id = i;
d16005f8
PC
1248 }
1249
98fa08a5
PC
1250 if (last_w_id > last_r_id)
1251 offset = (unsigned int)last_w_id;
1252 else if (last_r_id >= 0)
1253 offset = (unsigned int)last_r_id;
1254 else
1255 continue;
d16005f8 1256
98fa08a5 1257 op = &block->opcode_list[offset];
d16005f8 1258
98fa08a5
PC
1259 if (has_delay_slot(op->c) && (op->flags & LIGHTREC_NO_DS))
1260 offset++;
1261
1262 if (offset == block->nb_ops)
1263 continue;
1264
1265 lightrec_add_unload(&block->opcode_list[offset], reg);
d16005f8
PC
1266 }
1267
1268 return 0;
1269}
1270
98fa08a5 1271static int lightrec_flag_io(struct lightrec_state *state, struct block *block)
d16005f8 1272{
02487de7
PC
1273 struct opcode *prev = NULL, *list = NULL;
1274 enum psx_map psx_map;
d16005f8
PC
1275 u32 known = BIT(0);
1276 u32 values[32] = { 0 };
98fa08a5 1277 unsigned int i;
02487de7 1278 u32 val, kunseg_val;
98fa08a5
PC
1279
1280 for (i = 0; i < block->nb_ops; i++) {
22eee2ac 1281 prev = list;
98fa08a5 1282 list = &block->opcode_list[i];
d16005f8 1283
22eee2ac
PC
1284 if (prev)
1285 known = lightrec_propagate_consts(list, prev, known, values);
1286
d16005f8
PC
1287 switch (list->i.op) {
1288 case OP_SB:
1289 case OP_SH:
1290 case OP_SW:
98fa08a5
PC
1291 if (OPT_FLAG_STORES) {
1292 /* Mark all store operations that target $sp or $gp
1293 * as not requiring code invalidation. This is based
1294 * on the heuristic that stores using one of these
1295 * registers as address will never hit a code page. */
1296 if (list->i.rs >= 28 && list->i.rs <= 29 &&
1297 !state->maps[PSX_MAP_KERNEL_USER_RAM].ops) {
1298 pr_debug("Flaging opcode 0x%08x as not "
1299 "requiring invalidation\n",
1300 list->opcode);
1301 list->flags |= LIGHTREC_NO_INVALIDATE;
1302 }
1303
1304 /* Detect writes whose destination address is inside the
1305 * current block, using constant propagation. When these
1306 * occur, we mark the blocks as not compilable. */
1307 if ((known & BIT(list->i.rs)) &&
1308 kunseg(values[list->i.rs]) >= kunseg(block->pc) &&
1309 kunseg(values[list->i.rs]) < (kunseg(block->pc) +
1310 block->nb_ops * 4)) {
1311 pr_debug("Self-modifying block detected\n");
1312 block->flags |= BLOCK_NEVER_COMPILE;
1313 list->flags |= LIGHTREC_SMC;
1314 }
1315 }
1316 case OP_SWL: /* fall-through */
1317 case OP_SWR:
1318 case OP_SWC2:
1319 case OP_LB:
1320 case OP_LBU:
1321 case OP_LH:
1322 case OP_LHU:
1323 case OP_LW:
1324 case OP_LWL:
1325 case OP_LWR:
1326 case OP_LWC2:
1327 if (OPT_FLAG_IO && (known & BIT(list->i.rs))) {
22eee2ac 1328 val = values[list->i.rs] + (s16) list->i.imm;
02487de7
PC
1329 kunseg_val = kunseg(val);
1330 psx_map = lightrec_get_map_idx(state, kunseg_val);
1331
1332 switch (psx_map) {
1333 case PSX_MAP_KERNEL_USER_RAM:
1334 if (val == kunseg_val)
1335 list->flags |= LIGHTREC_NO_MASK;
1336 /* fall-through */
1337 case PSX_MAP_MIRROR1:
1338 case PSX_MAP_MIRROR2:
1339 case PSX_MAP_MIRROR3:
22eee2ac
PC
1340 pr_debug("Flaging opcode %u as RAM access\n", i);
1341 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_RAM);
02487de7
PC
1342 break;
1343 case PSX_MAP_BIOS:
22eee2ac
PC
1344 pr_debug("Flaging opcode %u as BIOS access\n", i);
1345 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_BIOS);
02487de7
PC
1346 break;
1347 case PSX_MAP_SCRATCH_PAD:
22eee2ac
PC
1348 pr_debug("Flaging opcode %u as scratchpad access\n", i);
1349 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_SCRATCH);
02487de7
PC
1350
1351 /* Consider that we're never going to run code from
1352 * the scratchpad. */
1353 list->flags |= LIGHTREC_NO_INVALIDATE;
1354 break;
1355 default:
1356 pr_debug("Flagging opcode %u as I/O access\n",
1357 i);
1358 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_HW);
1359 break;
98fa08a5 1360 }
d16005f8
PC
1361 }
1362 default: /* fall-through */
1363 break;
1364 }
d16005f8
PC
1365 }
1366
1367 return 0;
1368}
1369
98fa08a5
PC
1370static u8 get_mfhi_mflo_reg(const struct block *block, u16 offset,
1371 const struct opcode *last,
1372 u32 mask, bool sync, bool mflo, bool another)
d16005f8 1373{
98fa08a5
PC
1374 const struct opcode *op, *next = &block->opcode_list[offset];
1375 u32 old_mask;
1376 u8 reg2, reg = mflo ? REG_LO : REG_HI;
1377 u16 branch_offset;
1378 unsigned int i;
1379
1380 for (i = offset; i < block->nb_ops; i++) {
1381 op = next;
1382 next = &block->opcode_list[i + 1];
1383 old_mask = mask;
1384
1385 /* If any other opcode writes or reads to the register
1386 * we'd use, then we cannot use it anymore. */
1387 mask |= opcode_read_mask(op->c);
1388 mask |= opcode_write_mask(op->c);
1389
1390 if (op->flags & LIGHTREC_SYNC)
1391 sync = true;
d16005f8 1392
d16005f8
PC
1393 switch (op->i.op) {
1394 case OP_BEQ:
1395 case OP_BNE:
1396 case OP_BLEZ:
1397 case OP_BGTZ:
1398 case OP_REGIMM:
d16005f8 1399 /* TODO: handle backwards branches too */
98fa08a5
PC
1400 if (!last &&
1401 (op->flags & LIGHTREC_LOCAL_BRANCH) &&
d16005f8 1402 (s16)op->c.i.imm >= 0) {
98fa08a5
PC
1403 branch_offset = i + 1 + (s16)op->c.i.imm
1404 - !!(OPT_SWITCH_DELAY_SLOTS && (op->flags & LIGHTREC_NO_DS));
1405
1406 reg = get_mfhi_mflo_reg(block, branch_offset, NULL,
1407 mask, sync, mflo, false);
1408 reg2 = get_mfhi_mflo_reg(block, offset + 1, next,
1409 mask, sync, mflo, false);
1410 if (reg > 0 && reg == reg2)
1411 return reg;
1412 if (!reg && !reg2)
1413 return 0;
d16005f8 1414 }
98fa08a5
PC
1415
1416 return mflo ? REG_LO : REG_HI;
d16005f8
PC
1417 case OP_SPECIAL:
1418 switch (op->r.op) {
1419 case OP_SPECIAL_MULT:
1420 case OP_SPECIAL_MULTU:
1421 case OP_SPECIAL_DIV:
1422 case OP_SPECIAL_DIVU:
98fa08a5 1423 return 0;
d16005f8 1424 case OP_SPECIAL_MTHI:
98fa08a5
PC
1425 if (!mflo)
1426 return 0;
1427 continue;
1428 case OP_SPECIAL_MTLO:
1429 if (mflo)
1430 return 0;
1431 continue;
d16005f8 1432 case OP_SPECIAL_JR:
98fa08a5
PC
1433 if (op->r.rs != 31)
1434 return reg;
1435
1436 if (!sync &&
1437 !(op->flags & LIGHTREC_NO_DS) &&
1438 (next->i.op == OP_SPECIAL) &&
1439 ((!mflo && next->r.op == OP_SPECIAL_MFHI) ||
1440 (mflo && next->r.op == OP_SPECIAL_MFLO)))
1441 return next->r.rd;
1442
1443 return 0;
d16005f8 1444 case OP_SPECIAL_JALR:
98fa08a5 1445 return reg;
d16005f8 1446 case OP_SPECIAL_MFHI:
98fa08a5
PC
1447 if (!mflo) {
1448 if (another)
1449 return op->r.rd;
1450 /* Must use REG_HI if there is another MFHI target*/
1451 reg2 = get_mfhi_mflo_reg(block, i + 1, next,
1452 0, sync, mflo, true);
1453 if (reg2 > 0 && reg2 != REG_HI)
1454 return REG_HI;
1455
1456 if (!sync && !(old_mask & BIT(op->r.rd)))
1457 return op->r.rd;
1458 else
1459 return REG_HI;
1460 }
1461 continue;
1462 case OP_SPECIAL_MFLO:
1463 if (mflo) {
1464 if (another)
1465 return op->r.rd;
1466 /* Must use REG_LO if there is another MFLO target*/
1467 reg2 = get_mfhi_mflo_reg(block, i + 1, next,
1468 0, sync, mflo, true);
1469 if (reg2 > 0 && reg2 != REG_LO)
1470 return REG_LO;
1471
1472 if (!sync && !(old_mask & BIT(op->r.rd)))
1473 return op->r.rd;
1474 else
1475 return REG_LO;
1476 }
d16005f8 1477 continue;
98fa08a5
PC
1478 default:
1479 break;
d16005f8 1480 }
98fa08a5
PC
1481
1482 /* fall-through */
d16005f8
PC
1483 default:
1484 continue;
1485 }
1486 }
1487
98fa08a5
PC
1488 return reg;
1489}
1490
1491static void lightrec_replace_lo_hi(struct block *block, u16 offset,
1492 u16 last, bool lo)
1493{
1494 unsigned int i;
1495 u32 branch_offset;
1496
1497 /* This function will remove the following MFLO/MFHI. It must be called
1498 * only if get_mfhi_mflo_reg() returned a non-zero value. */
1499
1500 for (i = offset; i < last; i++) {
1501 struct opcode *op = &block->opcode_list[i];
1502
1503 switch (op->i.op) {
1504 case OP_BEQ:
1505 case OP_BNE:
1506 case OP_BLEZ:
1507 case OP_BGTZ:
1508 case OP_REGIMM:
1509 /* TODO: handle backwards branches too */
1510 if ((op->flags & LIGHTREC_LOCAL_BRANCH) &&
1511 (s16)op->c.i.imm >= 0) {
1512 branch_offset = i + 1 + (s16)op->c.i.imm
1513 - !!(OPT_SWITCH_DELAY_SLOTS && (op->flags & LIGHTREC_NO_DS));
1514
1515 lightrec_replace_lo_hi(block, branch_offset, last, lo);
1516 lightrec_replace_lo_hi(block, i + 1, branch_offset, lo);
1517 }
1518 break;
1519
1520 case OP_SPECIAL:
1521 if (lo && op->r.op == OP_SPECIAL_MFLO) {
1522 pr_debug("Removing MFLO opcode at offset 0x%x\n",
1523 i << 2);
1524 op->opcode = 0;
1525 return;
1526 } else if (!lo && op->r.op == OP_SPECIAL_MFHI) {
1527 pr_debug("Removing MFHI opcode at offset 0x%x\n",
1528 i << 2);
1529 op->opcode = 0;
1530 return;
1531 }
1532
1533 /* fall-through */
1534 default:
1535 break;
1536 }
1537 }
d16005f8
PC
1538}
1539
fd58fa32
PC
1540static bool lightrec_always_skip_div_check(void)
1541{
1542#ifdef __mips__
1543 return true;
1544#else
1545 return false;
1546#endif
1547}
1548
98fa08a5 1549static int lightrec_flag_mults_divs(struct lightrec_state *state, struct block *block)
d16005f8 1550{
22eee2ac 1551 struct opcode *prev, *list = NULL;
98fa08a5
PC
1552 u8 reg_hi, reg_lo;
1553 unsigned int i;
fd58fa32
PC
1554 u32 known = BIT(0);
1555 u32 values[32] = { 0 };
98fa08a5
PC
1556
1557 for (i = 0; i < block->nb_ops - 1; i++) {
22eee2ac 1558 prev = list;
98fa08a5 1559 list = &block->opcode_list[i];
d16005f8 1560
22eee2ac
PC
1561 if (prev)
1562 known = lightrec_propagate_consts(list, prev, known, values);
1563
d16005f8
PC
1564 if (list->i.op != OP_SPECIAL)
1565 continue;
1566
1567 switch (list->r.op) {
98fa08a5
PC
1568 case OP_SPECIAL_DIV:
1569 case OP_SPECIAL_DIVU:
fd58fa32
PC
1570 /* If we are dividing by a non-zero constant, don't
1571 * emit the div-by-zero check. */
1572 if (lightrec_always_skip_div_check() ||
1573 (known & BIT(list->c.r.rt) && values[list->c.r.rt]))
1574 list->flags |= LIGHTREC_NO_DIV_CHECK;
1575 case OP_SPECIAL_MULT: /* fall-through */
1576 case OP_SPECIAL_MULTU:
d16005f8
PC
1577 break;
1578 default:
1579 continue;
1580 }
1581
98fa08a5
PC
1582 /* Don't support opcodes in delay slots */
1583 if ((i && has_delay_slot(block->opcode_list[i - 1].c)) ||
fd58fa32 1584 (list->flags & LIGHTREC_NO_DS)) {
d16005f8 1585 continue;
fd58fa32 1586 }
d16005f8 1587
98fa08a5
PC
1588 reg_lo = get_mfhi_mflo_reg(block, i + 1, NULL, 0, false, true, false);
1589 if (reg_lo == 0) {
1590 pr_debug("Mark MULT(U)/DIV(U) opcode at offset 0x%x as"
1591 " not writing LO\n", i << 2);
1592 list->flags |= LIGHTREC_NO_LO;
1593 }
1594
1595 reg_hi = get_mfhi_mflo_reg(block, i + 1, NULL, 0, false, false, false);
1596 if (reg_hi == 0) {
1597 pr_debug("Mark MULT(U)/DIV(U) opcode at offset 0x%x as"
1598 " not writing HI\n", i << 2);
1599 list->flags |= LIGHTREC_NO_HI;
1600 }
1601
1602 if (!reg_lo && !reg_hi) {
1603 pr_debug("Both LO/HI unused in this block, they will "
1604 "probably be used in parent block - removing "
1605 "flags.\n");
1606 list->flags &= ~(LIGHTREC_NO_LO | LIGHTREC_NO_HI);
1607 }
1608
1609 if (reg_lo > 0 && reg_lo != REG_LO) {
1610 pr_debug("Found register %s to hold LO (rs = %u, rt = %u)\n",
1611 lightrec_reg_name(reg_lo), list->r.rs, list->r.rt);
1612
1613 lightrec_replace_lo_hi(block, i + 1, block->nb_ops, true);
1614 list->r.rd = reg_lo;
1615 } else {
1616 list->r.rd = 0;
1617 }
1618
1619 if (reg_hi > 0 && reg_hi != REG_HI) {
1620 pr_debug("Found register %s to hold HI (rs = %u, rt = %u)\n",
1621 lightrec_reg_name(reg_hi), list->r.rs, list->r.rt);
1622
1623 lightrec_replace_lo_hi(block, i + 1, block->nb_ops, false);
1624 list->r.imm = reg_hi;
1625 } else {
1626 list->r.imm = 0;
1627 }
1628 }
1629
1630 return 0;
1631}
1632
1633static bool remove_div_sequence(struct block *block, unsigned int offset)
1634{
1635 struct opcode *op;
1636 unsigned int i, found = 0;
1637
1638 /*
1639 * Scan for the zero-checking sequence that GCC automatically introduced
1640 * after most DIV/DIVU opcodes. This sequence checks the value of the
1641 * divisor, and if zero, executes a BREAK opcode, causing the BIOS
1642 * handler to crash the PS1.
1643 *
1644 * For DIV opcodes, this sequence additionally checks that the signed
1645 * operation does not overflow.
1646 *
1647 * With the assumption that the games never crashed the PS1, we can
1648 * therefore assume that the games never divided by zero or overflowed,
1649 * and these sequences can be removed.
1650 */
1651
1652 for (i = offset; i < block->nb_ops; i++) {
1653 op = &block->opcode_list[i];
1654
1655 if (!found) {
1656 if (op->i.op == OP_SPECIAL &&
1657 (op->r.op == OP_SPECIAL_DIV || op->r.op == OP_SPECIAL_DIVU))
1658 break;
1659
1660 if ((op->opcode & 0xfc1fffff) == 0x14000002) {
1661 /* BNE ???, zero, +8 */
1662 found++;
1663 } else {
1664 offset++;
1665 }
1666 } else if (found == 1 && !op->opcode) {
1667 /* NOP */
1668 found++;
1669 } else if (found == 2 && op->opcode == 0x0007000d) {
1670 /* BREAK 0x1c00 */
1671 found++;
1672 } else if (found == 3 && op->opcode == 0x2401ffff) {
1673 /* LI at, -1 */
1674 found++;
1675 } else if (found == 4 && (op->opcode & 0xfc1fffff) == 0x14010004) {
1676 /* BNE ???, at, +16 */
1677 found++;
1678 } else if (found == 5 && op->opcode == 0x3c018000) {
1679 /* LUI at, 0x8000 */
1680 found++;
1681 } else if (found == 6 && (op->opcode & 0x141fffff) == 0x14010002) {
1682 /* BNE ???, at, +16 */
1683 found++;
1684 } else if (found == 7 && !op->opcode) {
1685 /* NOP */
1686 found++;
1687 } else if (found == 8 && op->opcode == 0x0006000d) {
1688 /* BREAK 0x1800 */
1689 found++;
1690 break;
1691 } else {
1692 break;
1693 }
1694 }
1695
1696 if (found >= 3) {
1697 if (found != 9)
1698 found = 3;
1699
1700 pr_debug("Removing DIV%s sequence at offset 0x%x\n",
1701 found == 9 ? "" : "U", offset << 2);
1702
1703 for (i = 0; i < found; i++)
1704 block->opcode_list[offset + i].opcode = 0;
1705
1706 return true;
1707 }
1708
1709 return false;
1710}
1711
1712static int lightrec_remove_div_by_zero_check_sequence(struct lightrec_state *state,
1713 struct block *block)
1714{
1715 struct opcode *op;
1716 unsigned int i;
1717
1718 for (i = 0; i < block->nb_ops; i++) {
1719 op = &block->opcode_list[i];
1720
1721 if (op->i.op == OP_SPECIAL &&
1722 (op->r.op == OP_SPECIAL_DIVU || op->r.op == OP_SPECIAL_DIV) &&
1723 remove_div_sequence(block, i + 1))
1724 op->flags |= LIGHTREC_NO_DIV_CHECK;
1725 }
1726
1727 return 0;
1728}
1729
1730static const u32 memset_code[] = {
1731 0x10a00006, // beqz a1, 2f
1732 0x24a2ffff, // addiu v0,a1,-1
1733 0x2403ffff, // li v1,-1
1734 0xac800000, // 1: sw zero,0(a0)
1735 0x2442ffff, // addiu v0,v0,-1
1736 0x1443fffd, // bne v0,v1, 1b
1737 0x24840004, // addiu a0,a0,4
1738 0x03e00008, // 2: jr ra
1739 0x00000000, // nop
1740};
1741
1742static int lightrec_replace_memset(struct lightrec_state *state, struct block *block)
1743{
1744 unsigned int i;
1745 union code c;
1746
1747 for (i = 0; i < block->nb_ops; i++) {
1748 c = block->opcode_list[i].c;
1749
1750 if (c.opcode != memset_code[i])
1751 return 0;
1752
1753 if (i == ARRAY_SIZE(memset_code) - 1) {
1754 /* success! */
1755 pr_debug("Block at PC 0x%x is a memset\n", block->pc);
1756 block->flags |= BLOCK_IS_MEMSET | BLOCK_NEVER_COMPILE;
1757
1758 /* Return non-zero to skip other optimizers. */
1759 return 1;
d16005f8
PC
1760 }
1761 }
1762
1763 return 0;
1764}
1765
98fa08a5
PC
1766static int (*lightrec_optimizers[])(struct lightrec_state *state, struct block *) = {
1767 IF_OPT(OPT_REMOVE_DIV_BY_ZERO_SEQ, &lightrec_remove_div_by_zero_check_sequence),
1768 IF_OPT(OPT_REPLACE_MEMSET, &lightrec_replace_memset),
1769 IF_OPT(OPT_DETECT_IMPOSSIBLE_BRANCHES, &lightrec_detect_impossible_branches),
1770 IF_OPT(OPT_LOCAL_BRANCHES, &lightrec_local_branches),
1771 IF_OPT(OPT_TRANSFORM_OPS, &lightrec_transform_ops),
1772 IF_OPT(OPT_SWITCH_DELAY_SLOTS, &lightrec_switch_delay_slots),
1773 IF_OPT(OPT_FLAG_IO || OPT_FLAG_STORES, &lightrec_flag_io),
1774 IF_OPT(OPT_FLAG_MULT_DIV, &lightrec_flag_mults_divs),
1775 IF_OPT(OPT_EARLY_UNLOAD, &lightrec_early_unload),
d16005f8
PC
1776};
1777
98fa08a5 1778int lightrec_optimize(struct lightrec_state *state, struct block *block)
d16005f8
PC
1779{
1780 unsigned int i;
98fa08a5 1781 int ret;
d16005f8
PC
1782
1783 for (i = 0; i < ARRAY_SIZE(lightrec_optimizers); i++) {
98fa08a5
PC
1784 if (lightrec_optimizers[i]) {
1785 ret = (*lightrec_optimizers[i])(state, block);
1786 if (ret)
1787 return ret;
1788 }
d16005f8
PC
1789 }
1790
1791 return 0;
1792}