psxmem: Add support for Lightrec's custom mem init sequence
[pcsx_rearmed.git] / deps / lightrec / optimizer.c
CommitLineData
98fa08a5 1// SPDX-License-Identifier: LGPL-2.1-or-later
d16005f8 2/*
98fa08a5 3 * Copyright (C) 2014-2021 Paul Cercueil <paul@crapouillou.net>
d16005f8
PC
4 */
5
98fa08a5 6#include "lightrec-config.h"
d16005f8
PC
7#include "disassembler.h"
8#include "lightrec.h"
9#include "memmanager.h"
10#include "optimizer.h"
11#include "regcache.h"
12
13#include <errno.h>
14#include <stdbool.h>
15#include <stdlib.h>
98fa08a5
PC
16#include <string.h>
17
18#define IF_OPT(opt, ptr) ((opt) ? (ptr) : NULL)
d16005f8
PC
19
20struct optimizer_list {
21 void (**optimizers)(struct opcode *);
22 unsigned int nb_optimizers;
23};
24
98fa08a5
PC
25static bool is_nop(union code op);
26
27bool is_unconditional_jump(union code c)
28{
29 switch (c.i.op) {
30 case OP_SPECIAL:
31 return c.r.op == OP_SPECIAL_JR || c.r.op == OP_SPECIAL_JALR;
32 case OP_J:
33 case OP_JAL:
34 return true;
35 case OP_BEQ:
36 case OP_BLEZ:
37 return c.i.rs == c.i.rt;
38 case OP_REGIMM:
39 return (c.r.rt == OP_REGIMM_BGEZ ||
40 c.r.rt == OP_REGIMM_BGEZAL) && c.i.rs == 0;
41 default:
42 return false;
43 }
44}
45
46bool is_syscall(union code c)
47{
48 return (c.i.op == OP_SPECIAL && c.r.op == OP_SPECIAL_SYSCALL) ||
49 (c.i.op == OP_CP0 && (c.r.rs == OP_CP0_MTC0 ||
50 c.r.rs == OP_CP0_CTC0) &&
51 (c.r.rd == 12 || c.r.rd == 13));
52}
53
54static u64 opcode_read_mask(union code op)
d16005f8
PC
55{
56 switch (op.i.op) {
57 case OP_SPECIAL:
58 switch (op.r.op) {
59 case OP_SPECIAL_SYSCALL:
60 case OP_SPECIAL_BREAK:
98fa08a5 61 return 0;
d16005f8
PC
62 case OP_SPECIAL_JR:
63 case OP_SPECIAL_JALR:
64 case OP_SPECIAL_MTHI:
65 case OP_SPECIAL_MTLO:
98fa08a5 66 return BIT(op.r.rs);
d16005f8 67 case OP_SPECIAL_MFHI:
98fa08a5 68 return BIT(REG_HI);
d16005f8 69 case OP_SPECIAL_MFLO:
98fa08a5 70 return BIT(REG_LO);
d16005f8
PC
71 case OP_SPECIAL_SLL:
72 case OP_SPECIAL_SRL:
73 case OP_SPECIAL_SRA:
98fa08a5 74 return BIT(op.r.rt);
d16005f8 75 default:
98fa08a5 76 return BIT(op.r.rs) | BIT(op.r.rt);
d16005f8
PC
77 }
78 case OP_CP0:
79 switch (op.r.rs) {
80 case OP_CP0_MTC0:
81 case OP_CP0_CTC0:
98fa08a5 82 return BIT(op.r.rt);
d16005f8 83 default:
98fa08a5 84 return 0;
d16005f8
PC
85 }
86 case OP_CP2:
87 if (op.r.op == OP_CP2_BASIC) {
88 switch (op.r.rs) {
89 case OP_CP2_BASIC_MTC2:
90 case OP_CP2_BASIC_CTC2:
98fa08a5 91 return BIT(op.r.rt);
d16005f8 92 default:
98fa08a5 93 break;
d16005f8 94 }
d16005f8 95 }
98fa08a5 96 return 0;
d16005f8
PC
97 case OP_J:
98 case OP_JAL:
99 case OP_LUI:
98fa08a5 100 return 0;
d16005f8
PC
101 case OP_BEQ:
102 case OP_BNE:
103 case OP_LWL:
104 case OP_LWR:
105 case OP_SB:
106 case OP_SH:
107 case OP_SWL:
108 case OP_SW:
109 case OP_SWR:
98fa08a5 110 return BIT(op.i.rs) | BIT(op.i.rt);
d16005f8 111 default:
98fa08a5 112 return BIT(op.i.rs);
d16005f8
PC
113 }
114}
115
98fa08a5 116static u64 opcode_write_mask(union code op)
d16005f8 117{
98fa08a5
PC
118 u64 flags;
119
d16005f8
PC
120 switch (op.i.op) {
121 case OP_SPECIAL:
122 switch (op.r.op) {
123 case OP_SPECIAL_JR:
d16005f8
PC
124 case OP_SPECIAL_SYSCALL:
125 case OP_SPECIAL_BREAK:
98fa08a5 126 return 0;
d16005f8
PC
127 case OP_SPECIAL_MULT:
128 case OP_SPECIAL_MULTU:
129 case OP_SPECIAL_DIV:
130 case OP_SPECIAL_DIVU:
98fa08a5
PC
131 if (!OPT_FLAG_MULT_DIV)
132 return BIT(REG_LO) | BIT(REG_HI);
133
134 if (op.r.rd)
135 flags = BIT(op.r.rd);
136 else
137 flags = BIT(REG_LO);
138 if (op.r.imm)
139 flags |= BIT(op.r.imm);
140 else
141 flags |= BIT(REG_HI);
142 return flags;
d16005f8 143 case OP_SPECIAL_MTHI:
98fa08a5 144 return BIT(REG_HI);
d16005f8 145 case OP_SPECIAL_MTLO:
98fa08a5 146 return BIT(REG_LO);
d16005f8 147 default:
98fa08a5 148 return BIT(op.r.rd);
d16005f8
PC
149 }
150 case OP_ADDI:
151 case OP_ADDIU:
152 case OP_SLTI:
153 case OP_SLTIU:
154 case OP_ANDI:
155 case OP_ORI:
156 case OP_XORI:
157 case OP_LUI:
158 case OP_LB:
159 case OP_LH:
160 case OP_LWL:
161 case OP_LW:
162 case OP_LBU:
163 case OP_LHU:
164 case OP_LWR:
98fa08a5
PC
165 return BIT(op.i.rt);
166 case OP_JAL:
167 return BIT(31);
d16005f8
PC
168 case OP_CP0:
169 switch (op.r.rs) {
170 case OP_CP0_MFC0:
171 case OP_CP0_CFC0:
98fa08a5 172 return BIT(op.i.rt);
d16005f8 173 default:
98fa08a5 174 return 0;
d16005f8
PC
175 }
176 case OP_CP2:
177 if (op.r.op == OP_CP2_BASIC) {
178 switch (op.r.rs) {
179 case OP_CP2_BASIC_MFC2:
180 case OP_CP2_BASIC_CFC2:
98fa08a5 181 return BIT(op.i.rt);
d16005f8 182 default:
98fa08a5 183 break;
d16005f8 184 }
98fa08a5
PC
185 }
186 return 0;
187 case OP_REGIMM:
188 switch (op.r.rt) {
189 case OP_REGIMM_BLTZAL:
190 case OP_REGIMM_BGEZAL:
191 return BIT(31);
192 default:
193 return 0;
d16005f8
PC
194 }
195 case OP_META_MOV:
98fa08a5 196 return BIT(op.r.rd);
d16005f8 197 default:
98fa08a5
PC
198 return 0;
199 }
200}
201
202bool opcode_reads_register(union code op, u8 reg)
203{
204 return opcode_read_mask(op) & BIT(reg);
205}
206
207bool opcode_writes_register(union code op, u8 reg)
208{
209 return opcode_write_mask(op) & BIT(reg);
210}
211
212static int find_prev_writer(const struct opcode *list, unsigned int offset, u8 reg)
213{
214 union code c;
215 unsigned int i;
216
217 if (list[offset].flags & LIGHTREC_SYNC)
218 return -1;
219
220 for (i = offset; i > 0; i--) {
221 c = list[i - 1].c;
222
223 if (opcode_writes_register(c, reg)) {
224 if (i > 1 && has_delay_slot(list[i - 2].c))
225 break;
226
227 return i - 1;
228 }
229
230 if ((list[i - 1].flags & LIGHTREC_SYNC) ||
231 has_delay_slot(c) ||
232 opcode_reads_register(c, reg))
233 break;
234 }
235
236 return -1;
237}
238
239static int find_next_reader(const struct opcode *list, unsigned int offset, u8 reg)
240{
241 unsigned int i;
242 union code c;
243
244 if (list[offset].flags & LIGHTREC_SYNC)
245 return -1;
246
247 for (i = offset; ; i++) {
248 c = list[i].c;
249
250 if (opcode_reads_register(c, reg)) {
251 if (i > 0 && has_delay_slot(list[i - 1].c))
252 break;
253
254 return i;
255 }
256
257 if ((list[i].flags & LIGHTREC_SYNC) ||
258 has_delay_slot(c) || opcode_writes_register(c, reg))
259 break;
260 }
261
262 return -1;
263}
264
265static bool reg_is_dead(const struct opcode *list, unsigned int offset, u8 reg)
266{
267 unsigned int i;
268
269 if (list[offset].flags & LIGHTREC_SYNC)
d16005f8 270 return false;
98fa08a5
PC
271
272 for (i = offset + 1; ; i++) {
273 if (opcode_reads_register(list[i].c, reg))
274 return false;
275
276 if (opcode_writes_register(list[i].c, reg))
277 return true;
278
279 if (has_delay_slot(list[i].c)) {
22eee2ac
PC
280 if (list[i].flags & LIGHTREC_NO_DS ||
281 opcode_reads_register(list[i + 1].c, reg))
98fa08a5
PC
282 return false;
283
284 return opcode_writes_register(list[i + 1].c, reg);
285 }
d16005f8
PC
286 }
287}
288
98fa08a5
PC
289static bool reg_is_read(const struct opcode *list,
290 unsigned int a, unsigned int b, u8 reg)
291{
292 /* Return true if reg is read in one of the opcodes of the interval
293 * [a, b[ */
294 for (; a < b; a++) {
295 if (!is_nop(list[a].c) && opcode_reads_register(list[a].c, reg))
296 return true;
297 }
298
299 return false;
300}
301
302static bool reg_is_written(const struct opcode *list,
303 unsigned int a, unsigned int b, u8 reg)
304{
305 /* Return true if reg is written in one of the opcodes of the interval
306 * [a, b[ */
307
308 for (; a < b; a++) {
309 if (!is_nop(list[a].c) && opcode_writes_register(list[a].c, reg))
310 return true;
311 }
312
313 return false;
314}
315
316static bool reg_is_read_or_written(const struct opcode *list,
317 unsigned int a, unsigned int b, u8 reg)
318{
319 return reg_is_read(list, a, b, reg) || reg_is_written(list, a, b, reg);
320}
321
322static bool opcode_is_load(union code op)
323{
324 switch (op.i.op) {
325 case OP_LB:
326 case OP_LH:
327 case OP_LWL:
328 case OP_LW:
329 case OP_LBU:
330 case OP_LHU:
331 case OP_LWR:
332 case OP_LWC2:
333 return true;
334 default:
335 return false;
336 }
337}
338
339static bool opcode_is_store(union code op)
340{
341 switch (op.i.op) {
342 case OP_SB:
343 case OP_SH:
344 case OP_SW:
345 case OP_SWL:
346 case OP_SWR:
347 case OP_SWC2:
348 return true;
349 default:
350 return false;
351 }
352}
353
354bool opcode_is_io(union code op)
355{
356 return opcode_is_load(op) || opcode_is_store(op);
357}
358
d16005f8
PC
359/* TODO: Complete */
360static bool is_nop(union code op)
361{
362 if (opcode_writes_register(op, 0)) {
363 switch (op.i.op) {
364 case OP_CP0:
365 return op.r.rs != OP_CP0_MFC0;
366 case OP_LB:
367 case OP_LH:
368 case OP_LWL:
369 case OP_LW:
370 case OP_LBU:
371 case OP_LHU:
372 case OP_LWR:
373 return false;
374 default:
375 return true;
376 }
377 }
378
379 switch (op.i.op) {
380 case OP_SPECIAL:
381 switch (op.r.op) {
382 case OP_SPECIAL_AND:
383 return op.r.rd == op.r.rt && op.r.rd == op.r.rs;
384 case OP_SPECIAL_ADD:
385 case OP_SPECIAL_ADDU:
386 return (op.r.rd == op.r.rt && op.r.rs == 0) ||
387 (op.r.rd == op.r.rs && op.r.rt == 0);
388 case OP_SPECIAL_SUB:
389 case OP_SPECIAL_SUBU:
390 return op.r.rd == op.r.rs && op.r.rt == 0;
391 case OP_SPECIAL_OR:
392 if (op.r.rd == op.r.rt)
393 return op.r.rd == op.r.rs || op.r.rs == 0;
394 else
395 return (op.r.rd == op.r.rs) && op.r.rt == 0;
396 case OP_SPECIAL_SLL:
397 case OP_SPECIAL_SRA:
398 case OP_SPECIAL_SRL:
399 return op.r.rd == op.r.rt && op.r.imm == 0;
98fa08a5
PC
400 case OP_SPECIAL_MFHI:
401 case OP_SPECIAL_MFLO:
402 return op.r.rd == 0;
d16005f8
PC
403 default:
404 return false;
405 }
406 case OP_ORI:
407 case OP_ADDI:
408 case OP_ADDIU:
409 return op.i.rt == op.i.rs && op.i.imm == 0;
410 case OP_BGTZ:
411 return (op.i.rs == 0 || op.i.imm == 1);
412 case OP_REGIMM:
413 return (op.i.op == OP_REGIMM_BLTZ ||
414 op.i.op == OP_REGIMM_BLTZAL) &&
415 (op.i.rs == 0 || op.i.imm == 1);
416 case OP_BNE:
417 return (op.i.rs == op.i.rt || op.i.imm == 1);
418 default:
419 return false;
420 }
421}
422
423bool load_in_delay_slot(union code op)
424{
425 switch (op.i.op) {
426 case OP_CP0:
427 switch (op.r.rs) {
428 case OP_CP0_MFC0:
429 case OP_CP0_CFC0:
430 return true;
431 default:
432 break;
433 }
434
435 break;
436 case OP_CP2:
437 if (op.r.op == OP_CP2_BASIC) {
438 switch (op.r.rs) {
439 case OP_CP2_BASIC_MFC2:
440 case OP_CP2_BASIC_CFC2:
441 return true;
442 default:
443 break;
444 }
445 }
446
447 break;
448 case OP_LB:
449 case OP_LH:
450 case OP_LW:
451 case OP_LWL:
452 case OP_LWR:
453 case OP_LBU:
454 case OP_LHU:
455 return true;
456 default:
457 break;
458 }
459
460 return false;
461}
462
22eee2ac
PC
463static u32 lightrec_propagate_consts(const struct opcode *op,
464 const struct opcode *prev,
465 u32 known, u32 *v)
d16005f8 466{
22eee2ac 467 union code c = prev->c;
98fa08a5 468
fd58fa32
PC
469 /* Register $zero is always, well, zero */
470 known |= BIT(0);
471 v[0] = 0;
472
98fa08a5 473 if (op->flags & LIGHTREC_SYNC)
22eee2ac 474 return BIT(0);
98fa08a5 475
d16005f8
PC
476 switch (c.i.op) {
477 case OP_SPECIAL:
478 switch (c.r.op) {
479 case OP_SPECIAL_SLL:
480 if (known & BIT(c.r.rt)) {
481 known |= BIT(c.r.rd);
482 v[c.r.rd] = v[c.r.rt] << c.r.imm;
483 } else {
484 known &= ~BIT(c.r.rd);
485 }
486 break;
487 case OP_SPECIAL_SRL:
488 if (known & BIT(c.r.rt)) {
489 known |= BIT(c.r.rd);
490 v[c.r.rd] = v[c.r.rt] >> c.r.imm;
491 } else {
492 known &= ~BIT(c.r.rd);
493 }
494 break;
495 case OP_SPECIAL_SRA:
496 if (known & BIT(c.r.rt)) {
497 known |= BIT(c.r.rd);
498 v[c.r.rd] = (s32)v[c.r.rt] >> c.r.imm;
499 } else {
500 known &= ~BIT(c.r.rd);
501 }
502 break;
503 case OP_SPECIAL_SLLV:
504 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
505 known |= BIT(c.r.rd);
506 v[c.r.rd] = v[c.r.rt] << (v[c.r.rs] & 0x1f);
507 } else {
508 known &= ~BIT(c.r.rd);
509 }
510 break;
511 case OP_SPECIAL_SRLV:
512 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
513 known |= BIT(c.r.rd);
514 v[c.r.rd] = v[c.r.rt] >> (v[c.r.rs] & 0x1f);
515 } else {
516 known &= ~BIT(c.r.rd);
517 }
518 break;
519 case OP_SPECIAL_SRAV:
520 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
521 known |= BIT(c.r.rd);
522 v[c.r.rd] = (s32)v[c.r.rt]
523 >> (v[c.r.rs] & 0x1f);
524 } else {
525 known &= ~BIT(c.r.rd);
526 }
527 break;
528 case OP_SPECIAL_ADD:
529 case OP_SPECIAL_ADDU:
530 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
531 known |= BIT(c.r.rd);
532 v[c.r.rd] = (s32)v[c.r.rt] + (s32)v[c.r.rs];
533 } else {
534 known &= ~BIT(c.r.rd);
535 }
536 break;
537 case OP_SPECIAL_SUB:
538 case OP_SPECIAL_SUBU:
539 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
540 known |= BIT(c.r.rd);
541 v[c.r.rd] = v[c.r.rt] - v[c.r.rs];
542 } else {
543 known &= ~BIT(c.r.rd);
544 }
545 break;
546 case OP_SPECIAL_AND:
547 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
548 known |= BIT(c.r.rd);
549 v[c.r.rd] = v[c.r.rt] & v[c.r.rs];
550 } else {
551 known &= ~BIT(c.r.rd);
552 }
553 break;
554 case OP_SPECIAL_OR:
555 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
556 known |= BIT(c.r.rd);
557 v[c.r.rd] = v[c.r.rt] | v[c.r.rs];
558 } else {
559 known &= ~BIT(c.r.rd);
560 }
561 break;
562 case OP_SPECIAL_XOR:
563 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
564 known |= BIT(c.r.rd);
565 v[c.r.rd] = v[c.r.rt] ^ v[c.r.rs];
566 } else {
567 known &= ~BIT(c.r.rd);
568 }
569 break;
570 case OP_SPECIAL_NOR:
571 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
572 known |= BIT(c.r.rd);
573 v[c.r.rd] = ~(v[c.r.rt] | v[c.r.rs]);
574 } else {
575 known &= ~BIT(c.r.rd);
576 }
577 break;
578 case OP_SPECIAL_SLT:
579 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
580 known |= BIT(c.r.rd);
581 v[c.r.rd] = (s32)v[c.r.rs] < (s32)v[c.r.rt];
582 } else {
583 known &= ~BIT(c.r.rd);
584 }
585 break;
586 case OP_SPECIAL_SLTU:
587 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
588 known |= BIT(c.r.rd);
589 v[c.r.rd] = v[c.r.rs] < v[c.r.rt];
590 } else {
591 known &= ~BIT(c.r.rd);
592 }
593 break;
594 default:
595 break;
596 }
597 break;
598 case OP_REGIMM:
599 break;
600 case OP_ADDI:
601 case OP_ADDIU:
602 if (known & BIT(c.i.rs)) {
603 known |= BIT(c.i.rt);
604 v[c.i.rt] = v[c.i.rs] + (s32)(s16)c.i.imm;
605 } else {
606 known &= ~BIT(c.i.rt);
607 }
608 break;
609 case OP_SLTI:
610 if (known & BIT(c.i.rs)) {
611 known |= BIT(c.i.rt);
612 v[c.i.rt] = (s32)v[c.i.rs] < (s32)(s16)c.i.imm;
613 } else {
614 known &= ~BIT(c.i.rt);
615 }
616 break;
617 case OP_SLTIU:
618 if (known & BIT(c.i.rs)) {
619 known |= BIT(c.i.rt);
620 v[c.i.rt] = v[c.i.rs] < (u32)(s32)(s16)c.i.imm;
621 } else {
622 known &= ~BIT(c.i.rt);
623 }
624 break;
625 case OP_ANDI:
626 if (known & BIT(c.i.rs)) {
627 known |= BIT(c.i.rt);
628 v[c.i.rt] = v[c.i.rs] & c.i.imm;
629 } else {
630 known &= ~BIT(c.i.rt);
631 }
632 break;
633 case OP_ORI:
634 if (known & BIT(c.i.rs)) {
635 known |= BIT(c.i.rt);
636 v[c.i.rt] = v[c.i.rs] | c.i.imm;
637 } else {
638 known &= ~BIT(c.i.rt);
639 }
640 break;
641 case OP_XORI:
642 if (known & BIT(c.i.rs)) {
643 known |= BIT(c.i.rt);
644 v[c.i.rt] = v[c.i.rs] ^ c.i.imm;
645 } else {
646 known &= ~BIT(c.i.rt);
647 }
648 break;
649 case OP_LUI:
650 known |= BIT(c.i.rt);
651 v[c.i.rt] = c.i.imm << 16;
652 break;
653 case OP_CP0:
654 switch (c.r.rs) {
655 case OP_CP0_MFC0:
656 case OP_CP0_CFC0:
657 known &= ~BIT(c.r.rt);
658 break;
659 }
660 break;
661 case OP_CP2:
662 if (c.r.op == OP_CP2_BASIC) {
663 switch (c.r.rs) {
664 case OP_CP2_BASIC_MFC2:
665 case OP_CP2_BASIC_CFC2:
666 known &= ~BIT(c.r.rt);
667 break;
668 }
669 }
670 break;
671 case OP_LB:
672 case OP_LH:
673 case OP_LWL:
674 case OP_LW:
675 case OP_LBU:
676 case OP_LHU:
677 case OP_LWR:
678 case OP_LWC2:
679 known &= ~BIT(c.i.rt);
680 break;
681 case OP_META_MOV:
682 if (known & BIT(c.r.rs)) {
683 known |= BIT(c.r.rd);
684 v[c.r.rd] = v[c.r.rs];
685 } else {
686 known &= ~BIT(c.r.rd);
687 }
688 break;
689 default:
690 break;
691 }
692
693 return known;
694}
695
98fa08a5 696static void lightrec_optimize_sll_sra(struct opcode *list, unsigned int offset)
d16005f8 697{
98fa08a5
PC
698 struct opcode *prev, *prev2 = NULL, *curr = &list[offset];
699 struct opcode *to_change, *to_nop;
700 int idx, idx2;
d16005f8 701
98fa08a5
PC
702 if (curr->r.imm != 24 && curr->r.imm != 16)
703 return;
704
705 idx = find_prev_writer(list, offset, curr->r.rt);
706 if (idx < 0)
707 return;
708
709 prev = &list[idx];
710
711 if (prev->i.op != OP_SPECIAL || prev->r.op != OP_SPECIAL_SLL ||
712 prev->r.imm != curr->r.imm || prev->r.rd != curr->r.rt)
713 return;
d16005f8 714
98fa08a5
PC
715 if (prev->r.rd != prev->r.rt && curr->r.rd != curr->r.rt) {
716 /* sll rY, rX, 16
717 * ...
718 * srl rZ, rY, 16 */
d16005f8 719
98fa08a5
PC
720 if (!reg_is_dead(list, offset, curr->r.rt) ||
721 reg_is_read_or_written(list, idx, offset, curr->r.rd))
722 return;
723
724 /* If rY is dead after the SRL, and rZ is not used after the SLL,
725 * we can change rY to rZ */
726
727 pr_debug("Detected SLL/SRA with middle temp register\n");
728 prev->r.rd = curr->r.rd;
729 curr->r.rt = prev->r.rd;
730 }
731
732 /* We got a SLL/SRA combo. If imm #16, that's a cast to u16.
733 * If imm #24 that's a cast to u8.
734 *
735 * First of all, make sure that the target register of the SLL is not
736 * read before the SRA. */
737
738 if (prev->r.rd == prev->r.rt) {
739 /* sll rX, rX, 16
740 * ...
741 * srl rY, rX, 16 */
742 to_change = curr;
743 to_nop = prev;
744
745 /* rX is used after the SRA - we cannot convert it. */
746 if (prev->r.rd != curr->r.rd && !reg_is_dead(list, offset, prev->r.rd))
747 return;
d16005f8 748 } else {
98fa08a5
PC
749 /* sll rY, rX, 16
750 * ...
751 * srl rY, rY, 16 */
752 to_change = prev;
753 to_nop = curr;
d16005f8
PC
754 }
755
98fa08a5
PC
756 idx2 = find_prev_writer(list, idx, prev->r.rt);
757 if (idx2 >= 0) {
758 /* Note that PSX games sometimes do casts after
759 * a LHU or LBU; in this case we can change the
760 * load opcode to a LH or LB, and the cast can
761 * be changed to a MOV or a simple NOP. */
762
763 prev2 = &list[idx2];
764
765 if (curr->r.rd != prev2->i.rt &&
766 !reg_is_dead(list, offset, prev2->i.rt))
767 prev2 = NULL;
768 else if (curr->r.imm == 16 && prev2->i.op == OP_LHU)
769 prev2->i.op = OP_LH;
770 else if (curr->r.imm == 24 && prev2->i.op == OP_LBU)
771 prev2->i.op = OP_LB;
772 else
773 prev2 = NULL;
774
775 if (prev2) {
776 if (curr->r.rd == prev2->i.rt) {
777 to_change->opcode = 0;
778 } else if (reg_is_dead(list, offset, prev2->i.rt) &&
779 !reg_is_read_or_written(list, idx2 + 1, offset, curr->r.rd)) {
780 /* The target register of the SRA is dead after the
781 * LBU/LHU; we can change the target register of the
782 * LBU/LHU to the one of the SRA. */
783 prev2->i.rt = curr->r.rd;
784 to_change->opcode = 0;
785 } else {
786 to_change->i.op = OP_META_MOV;
787 to_change->r.rd = curr->r.rd;
788 to_change->r.rs = prev2->i.rt;
789 }
d16005f8 790
98fa08a5
PC
791 if (to_nop->r.imm == 24)
792 pr_debug("Convert LBU+SLL+SRA to LB\n");
793 else
794 pr_debug("Convert LHU+SLL+SRA to LH\n");
795 }
796 }
797
798 if (!prev2) {
799 pr_debug("Convert SLL/SRA #%u to EXT%c\n",
800 prev->r.imm,
801 prev->r.imm == 24 ? 'C' : 'S');
802
803 if (to_change == prev) {
804 to_change->i.rs = prev->r.rt;
805 to_change->i.rt = curr->r.rd;
806 } else {
807 to_change->i.rt = curr->r.rd;
808 to_change->i.rs = prev->r.rt;
809 }
810
811 if (to_nop->r.imm == 24)
812 to_change->i.op = OP_META_EXTC;
813 else
814 to_change->i.op = OP_META_EXTS;
815 }
816
817 to_nop->opcode = 0;
d16005f8
PC
818}
819
98fa08a5 820static int lightrec_transform_ops(struct lightrec_state *state, struct block *block)
d16005f8
PC
821{
822 struct opcode *list = block->opcode_list;
22eee2ac 823 struct opcode *prev, *op = NULL;
98fa08a5
PC
824 u32 known = BIT(0);
825 u32 values[32] = { 0 };
826 unsigned int i;
827 int reader;
d16005f8 828
98fa08a5 829 for (i = 0; i < block->nb_ops; i++) {
22eee2ac 830 prev = op;
98fa08a5 831 op = &list[i];
d16005f8 832
22eee2ac
PC
833 if (prev)
834 known = lightrec_propagate_consts(op, prev, known, values);
835
d16005f8
PC
836 /* Transform all opcodes detected as useless to real NOPs
837 * (0x0: SLL r0, r0, #0) */
98fa08a5 838 if (op->opcode != 0 && is_nop(op->c)) {
d16005f8 839 pr_debug("Converting useless opcode 0x%08x to NOP\n",
98fa08a5
PC
840 op->opcode);
841 op->opcode = 0x0;
d16005f8
PC
842 }
843
98fa08a5 844 if (!op->opcode)
d16005f8
PC
845 continue;
846
98fa08a5 847 switch (op->i.op) {
d16005f8 848 case OP_BEQ:
98fa08a5
PC
849 if (op->i.rs == op->i.rt) {
850 op->i.rs = 0;
851 op->i.rt = 0;
852 } else if (op->i.rs == 0) {
853 op->i.rs = op->i.rt;
854 op->i.rt = 0;
d16005f8
PC
855 }
856 break;
98fa08a5 857
d16005f8 858 case OP_BNE:
98fa08a5
PC
859 if (op->i.rs == 0) {
860 op->i.rs = op->i.rt;
861 op->i.rt = 0;
862 }
863 break;
864
865 case OP_LUI:
866 if (!(op->flags & LIGHTREC_SYNC) &&
867 (known & BIT(op->i.rt)) &&
868 values[op->i.rt] == op->i.imm << 16) {
869 pr_debug("Converting duplicated LUI to NOP\n");
870 op->opcode = 0x0;
871 }
872
873 if (op->i.imm != 0 || op->i.rt == 0)
874 break;
875
876 reader = find_next_reader(list, i + 1, op->i.rt);
877 if (reader > 0 &&
878 (opcode_writes_register(list[reader].c, op->i.rt) ||
879 reg_is_dead(list, reader, op->i.rt))) {
880
881 pr_debug("Removing useless LUI 0x0\n");
882
883 if (list[reader].i.rs == op->i.rt)
884 list[reader].i.rs = 0;
885 if (list[reader].i.op == OP_SPECIAL &&
886 list[reader].i.rt == op->i.rt)
887 list[reader].i.rt = 0;
888 op->opcode = 0x0;
d16005f8
PC
889 }
890 break;
891
892 /* Transform ORI/ADDI/ADDIU with imm #0 or ORR/ADD/ADDU/SUB/SUBU
893 * with register $zero to the MOV meta-opcode */
894 case OP_ORI:
895 case OP_ADDI:
896 case OP_ADDIU:
98fa08a5 897 if (op->i.imm == 0) {
d16005f8 898 pr_debug("Convert ORI/ADDI/ADDIU #0 to MOV\n");
98fa08a5
PC
899 op->i.op = OP_META_MOV;
900 op->r.rd = op->i.rt;
d16005f8
PC
901 }
902 break;
903 case OP_SPECIAL:
98fa08a5 904 switch (op->r.op) {
d16005f8 905 case OP_SPECIAL_SRA:
98fa08a5
PC
906 if (op->r.imm == 0) {
907 pr_debug("Convert SRA #0 to MOV\n");
908 op->i.op = OP_META_MOV;
909 op->r.rs = op->r.rt;
910 break;
911 }
912
913 lightrec_optimize_sll_sra(block->opcode_list, i);
914 break;
915 case OP_SPECIAL_SLL:
d16005f8 916 case OP_SPECIAL_SRL:
98fa08a5
PC
917 if (op->r.imm == 0) {
918 pr_debug("Convert SLL/SRL #0 to MOV\n");
919 op->i.op = OP_META_MOV;
920 op->r.rs = op->r.rt;
d16005f8
PC
921 }
922 break;
923 case OP_SPECIAL_OR:
924 case OP_SPECIAL_ADD:
925 case OP_SPECIAL_ADDU:
98fa08a5 926 if (op->r.rs == 0) {
d16005f8 927 pr_debug("Convert OR/ADD $zero to MOV\n");
98fa08a5
PC
928 op->i.op = OP_META_MOV;
929 op->r.rs = op->r.rt;
d16005f8
PC
930 }
931 case OP_SPECIAL_SUB: /* fall-through */
932 case OP_SPECIAL_SUBU:
98fa08a5 933 if (op->r.rt == 0) {
d16005f8 934 pr_debug("Convert OR/ADD/SUB $zero to MOV\n");
98fa08a5 935 op->i.op = OP_META_MOV;
d16005f8
PC
936 }
937 default: /* fall-through */
938 break;
939 }
940 default: /* fall-through */
941 break;
942 }
943 }
944
945 return 0;
946}
947
98fa08a5 948static int lightrec_switch_delay_slots(struct lightrec_state *state, struct block *block)
d16005f8 949{
98fa08a5
PC
950 struct opcode *list, *next = &block->opcode_list[0];
951 unsigned int i;
952 union code op, next_op;
d16005f8
PC
953 u8 flags;
954
98fa08a5
PC
955 for (i = 0; i < block->nb_ops - 1; i++) {
956 list = next;
957 next = &block->opcode_list[i + 1];
958 next_op = next->c;
959 op = list->c;
d16005f8
PC
960
961 if (!has_delay_slot(op) ||
962 list->flags & (LIGHTREC_NO_DS | LIGHTREC_EMULATE_BRANCH) ||
98fa08a5
PC
963 op.opcode == 0 || next_op.opcode == 0)
964 continue;
965
966 if (i && has_delay_slot(block->opcode_list[i - 1].c) &&
967 !(block->opcode_list[i - 1].flags & LIGHTREC_NO_DS))
d16005f8
PC
968 continue;
969
98fa08a5
PC
970 if ((list->flags & LIGHTREC_SYNC) ||
971 (next->flags & LIGHTREC_SYNC))
d16005f8
PC
972 continue;
973
974 switch (list->i.op) {
975 case OP_SPECIAL:
976 switch (op.r.op) {
977 case OP_SPECIAL_JALR:
978 if (opcode_reads_register(next_op, op.r.rd) ||
979 opcode_writes_register(next_op, op.r.rd))
980 continue;
981 case OP_SPECIAL_JR: /* fall-through */
982 if (opcode_writes_register(next_op, op.r.rs))
983 continue;
984 default: /* fall-through */
985 break;
986 }
987 case OP_J: /* fall-through */
988 break;
989 case OP_JAL:
990 if (opcode_reads_register(next_op, 31) ||
991 opcode_writes_register(next_op, 31))
992 continue;
993 else
994 break;
995 case OP_BEQ:
996 case OP_BNE:
997 if (op.i.rt && opcode_writes_register(next_op, op.i.rt))
998 continue;
999 case OP_BLEZ: /* fall-through */
1000 case OP_BGTZ:
d16005f8
PC
1001 if (op.i.rs && opcode_writes_register(next_op, op.i.rs))
1002 continue;
1003 break;
1004 case OP_REGIMM:
1005 switch (op.r.rt) {
1006 case OP_REGIMM_BLTZAL:
1007 case OP_REGIMM_BGEZAL:
1008 if (opcode_reads_register(next_op, 31) ||
1009 opcode_writes_register(next_op, 31))
1010 continue;
1011 case OP_REGIMM_BLTZ: /* fall-through */
1012 case OP_REGIMM_BGEZ:
1013 if (op.i.rs &&
1014 opcode_writes_register(next_op, op.i.rs))
1015 continue;
1016 break;
1017 }
1018 default: /* fall-through */
1019 break;
1020 }
1021
1022 pr_debug("Swap branch and delay slot opcodes "
98fa08a5
PC
1023 "at offsets 0x%x / 0x%x\n",
1024 i << 2, (i + 1) << 2);
d16005f8 1025
98fa08a5 1026 flags = next->flags;
d16005f8 1027 list->c = next_op;
98fa08a5
PC
1028 next->c = op;
1029 next->flags = list->flags | LIGHTREC_NO_DS;
a59e5536 1030 list->flags = flags | LIGHTREC_NO_DS;
d16005f8
PC
1031 }
1032
1033 return 0;
1034}
1035
98fa08a5
PC
1036static int shrink_opcode_list(struct lightrec_state *state, struct block *block, u16 new_size)
1037{
1038 struct opcode *list;
1039
1040 if (new_size >= block->nb_ops) {
1041 pr_err("Invalid shrink size (%u vs %u)\n",
1042 new_size, block->nb_ops);
1043 return -EINVAL;
1044 }
1045
1046
1047 list = lightrec_malloc(state, MEM_FOR_IR,
1048 sizeof(*list) * new_size);
1049 if (!list) {
1050 pr_err("Unable to allocate memory\n");
1051 return -ENOMEM;
1052 }
1053
1054 memcpy(list, block->opcode_list, sizeof(*list) * new_size);
1055
1056 lightrec_free_opcode_list(state, block);
1057 block->opcode_list = list;
1058 block->nb_ops = new_size;
1059
1060 pr_debug("Shrunk opcode list of block PC 0x%08x to %u opcodes\n",
1061 block->pc, new_size);
1062
1063 return 0;
1064}
1065
1066static int lightrec_detect_impossible_branches(struct lightrec_state *state,
1067 struct block *block)
d16005f8 1068{
98fa08a5
PC
1069 struct opcode *op, *next = &block->opcode_list[0];
1070 unsigned int i;
1071 int ret = 0;
1072
1073 for (i = 0; i < block->nb_ops - 1; i++) {
1074 op = next;
1075 next = &block->opcode_list[i + 1];
d16005f8 1076
d16005f8
PC
1077 if (!has_delay_slot(op->c) ||
1078 (!load_in_delay_slot(next->c) &&
1079 !has_delay_slot(next->c) &&
1080 !(next->i.op == OP_CP0 && next->r.rs == OP_CP0_RFE)))
1081 continue;
1082
1083 if (op->c.opcode == next->c.opcode) {
1084 /* The delay slot is the exact same opcode as the branch
1085 * opcode: this is effectively a NOP */
1086 next->c.opcode = 0;
1087 continue;
1088 }
1089
98fa08a5
PC
1090 op->flags |= LIGHTREC_EMULATE_BRANCH;
1091
d16005f8 1092 if (op == block->opcode_list) {
98fa08a5
PC
1093 pr_debug("First opcode of block PC 0x%08x is an impossible branch\n",
1094 block->pc);
1095
d16005f8
PC
1096 /* If the first opcode is an 'impossible' branch, we
1097 * only keep the first two opcodes of the block (the
1098 * branch itself + its delay slot) */
98fa08a5
PC
1099 if (block->nb_ops > 2)
1100 ret = shrink_opcode_list(state, block, 2);
1101 break;
d16005f8 1102 }
d16005f8
PC
1103 }
1104
98fa08a5 1105 return ret;
d16005f8
PC
1106}
1107
98fa08a5 1108static int lightrec_local_branches(struct lightrec_state *state, struct block *block)
d16005f8 1109{
98fa08a5
PC
1110 struct opcode *list;
1111 unsigned int i;
d16005f8 1112 s32 offset;
d16005f8 1113
98fa08a5
PC
1114 for (i = 0; i < block->nb_ops; i++) {
1115 list = &block->opcode_list[i];
1116
1117 if (should_emulate(list))
d16005f8
PC
1118 continue;
1119
1120 switch (list->i.op) {
1121 case OP_BEQ:
1122 case OP_BNE:
1123 case OP_BLEZ:
1124 case OP_BGTZ:
1125 case OP_REGIMM:
98fa08a5 1126 offset = i + 1 + (s16)list->i.imm;
d16005f8
PC
1127 if (offset >= 0 && offset < block->nb_ops)
1128 break;
1129 default: /* fall-through */
1130 continue;
1131 }
1132
1133 pr_debug("Found local branch to offset 0x%x\n", offset << 2);
1134
98fa08a5
PC
1135 if (should_emulate(&block->opcode_list[offset])) {
1136 pr_debug("Branch target must be emulated - skip\n");
1137 continue;
1138 }
d16005f8 1139
98fa08a5
PC
1140 if (offset && has_delay_slot(block->opcode_list[offset - 1].c)) {
1141 pr_debug("Branch target is a delay slot - skip\n");
1142 continue;
1143 }
d16005f8 1144
98fa08a5 1145 pr_debug("Adding sync at offset 0x%x\n", offset << 2);
d16005f8 1146
98fa08a5
PC
1147 block->opcode_list[offset].flags |= LIGHTREC_SYNC;
1148 list->flags |= LIGHTREC_LOCAL_BRANCH;
d16005f8
PC
1149 }
1150
1151 return 0;
1152}
1153
1154bool has_delay_slot(union code op)
1155{
1156 switch (op.i.op) {
1157 case OP_SPECIAL:
1158 switch (op.r.op) {
1159 case OP_SPECIAL_JR:
1160 case OP_SPECIAL_JALR:
1161 return true;
1162 default:
1163 return false;
1164 }
1165 case OP_J:
1166 case OP_JAL:
1167 case OP_BEQ:
1168 case OP_BNE:
1169 case OP_BLEZ:
1170 case OP_BGTZ:
1171 case OP_REGIMM:
d16005f8
PC
1172 return true;
1173 default:
1174 return false;
1175 }
1176}
1177
98fa08a5 1178bool should_emulate(const struct opcode *list)
d16005f8 1179{
98fa08a5
PC
1180 return has_delay_slot(list->c) &&
1181 (list->flags & LIGHTREC_EMULATE_BRANCH);
d16005f8
PC
1182}
1183
98fa08a5 1184static void lightrec_add_unload(struct opcode *op, u8 reg)
d16005f8 1185{
98fa08a5
PC
1186 if (op->i.op == OP_SPECIAL && reg == op->r.rd)
1187 op->flags |= LIGHTREC_UNLOAD_RD;
d16005f8 1188
98fa08a5
PC
1189 if (op->i.rs == reg)
1190 op->flags |= LIGHTREC_UNLOAD_RS;
1191 if (op->i.rt == reg)
1192 op->flags |= LIGHTREC_UNLOAD_RT;
1193}
d16005f8 1194
98fa08a5
PC
1195static int lightrec_early_unload(struct lightrec_state *state, struct block *block)
1196{
1197 unsigned int i, offset;
1198 struct opcode *op;
1199 u8 reg;
d16005f8 1200
98fa08a5
PC
1201 for (reg = 1; reg < 34; reg++) {
1202 int last_r_id = -1, last_w_id = -1;
1203
1204 for (i = 0; i < block->nb_ops; i++) {
1205 union code c = block->opcode_list[i].c;
1206
1207 if (opcode_reads_register(c, reg))
1208 last_r_id = i;
1209 if (opcode_writes_register(c, reg))
1210 last_w_id = i;
d16005f8
PC
1211 }
1212
98fa08a5
PC
1213 if (last_w_id > last_r_id)
1214 offset = (unsigned int)last_w_id;
1215 else if (last_r_id >= 0)
1216 offset = (unsigned int)last_r_id;
1217 else
1218 continue;
d16005f8 1219
98fa08a5 1220 op = &block->opcode_list[offset];
d16005f8 1221
98fa08a5
PC
1222 if (has_delay_slot(op->c) && (op->flags & LIGHTREC_NO_DS))
1223 offset++;
1224
1225 if (offset == block->nb_ops)
1226 continue;
1227
1228 lightrec_add_unload(&block->opcode_list[offset], reg);
d16005f8
PC
1229 }
1230
1231 return 0;
1232}
1233
98fa08a5 1234static int lightrec_flag_io(struct lightrec_state *state, struct block *block)
d16005f8 1235{
98fa08a5 1236 const struct lightrec_mem_map *map;
22eee2ac 1237 struct opcode *prev2, *prev = NULL, *list = NULL;
d16005f8
PC
1238 u32 known = BIT(0);
1239 u32 values[32] = { 0 };
98fa08a5
PC
1240 unsigned int i;
1241 u32 val;
1242
1243 for (i = 0; i < block->nb_ops; i++) {
22eee2ac
PC
1244 prev2 = prev;
1245 prev = list;
98fa08a5 1246 list = &block->opcode_list[i];
d16005f8 1247
22eee2ac
PC
1248 if (prev)
1249 known = lightrec_propagate_consts(list, prev, known, values);
1250
d16005f8
PC
1251 switch (list->i.op) {
1252 case OP_SB:
1253 case OP_SH:
1254 case OP_SW:
98fa08a5
PC
1255 if (OPT_FLAG_STORES) {
1256 /* Mark all store operations that target $sp or $gp
1257 * as not requiring code invalidation. This is based
1258 * on the heuristic that stores using one of these
1259 * registers as address will never hit a code page. */
1260 if (list->i.rs >= 28 && list->i.rs <= 29 &&
1261 !state->maps[PSX_MAP_KERNEL_USER_RAM].ops) {
1262 pr_debug("Flaging opcode 0x%08x as not "
1263 "requiring invalidation\n",
1264 list->opcode);
1265 list->flags |= LIGHTREC_NO_INVALIDATE;
1266 }
1267
1268 /* Detect writes whose destination address is inside the
1269 * current block, using constant propagation. When these
1270 * occur, we mark the blocks as not compilable. */
1271 if ((known & BIT(list->i.rs)) &&
1272 kunseg(values[list->i.rs]) >= kunseg(block->pc) &&
1273 kunseg(values[list->i.rs]) < (kunseg(block->pc) +
1274 block->nb_ops * 4)) {
1275 pr_debug("Self-modifying block detected\n");
1276 block->flags |= BLOCK_NEVER_COMPILE;
1277 list->flags |= LIGHTREC_SMC;
1278 }
1279 }
1280 case OP_SWL: /* fall-through */
1281 case OP_SWR:
1282 case OP_SWC2:
1283 case OP_LB:
1284 case OP_LBU:
1285 case OP_LH:
1286 case OP_LHU:
1287 case OP_LW:
1288 case OP_LWL:
1289 case OP_LWR:
1290 case OP_LWC2:
1291 if (OPT_FLAG_IO && (known & BIT(list->i.rs))) {
22eee2ac
PC
1292 if (prev && prev->i.op == OP_LUI &&
1293 !(prev2 && has_delay_slot(prev2->c)) &&
1294 prev->i.rt == list->i.rs &&
1295 list->i.rt == list->i.rs &&
1296 prev->i.imm & 0x8000) {
1297 pr_debug("Convert LUI at offset 0x%x to kuseg\n",
1298 i - 1 << 2);
1299
1300 val = kunseg(prev->i.imm << 16);
1301 prev->i.imm = val >> 16;
1302 values[list->i.rs] = val;
1303 }
1304
1305 val = values[list->i.rs] + (s16) list->i.imm;
1306 map = lightrec_get_map(state, NULL, kunseg(val));
98fa08a5
PC
1307
1308 if (!map || map->ops ||
1309 map == &state->maps[PSX_MAP_PARALLEL_PORT]) {
22eee2ac 1310 pr_debug("Flagging opcode %u as I/O access\n",
98fa08a5 1311 i);
22eee2ac
PC
1312 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_HW);
1313 break;
1314 }
1315
1316 if (val - map->pc < map->length)
1317 list->flags |= LIGHTREC_NO_MASK;
1318
1319 if (map == &state->maps[PSX_MAP_KERNEL_USER_RAM]) {
1320 pr_debug("Flaging opcode %u as RAM access\n", i);
1321 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_RAM);
1322 } else if (map == &state->maps[PSX_MAP_BIOS]) {
1323 pr_debug("Flaging opcode %u as BIOS access\n", i);
1324 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_BIOS);
1325 } else if (map == &state->maps[PSX_MAP_SCRATCH_PAD]) {
1326 pr_debug("Flaging opcode %u as scratchpad access\n", i);
1327 list->flags |= LIGHTREC_IO_MODE(LIGHTREC_IO_SCRATCH);
98fa08a5 1328 }
d16005f8
PC
1329 }
1330 default: /* fall-through */
1331 break;
1332 }
d16005f8
PC
1333 }
1334
1335 return 0;
1336}
1337
98fa08a5
PC
1338static u8 get_mfhi_mflo_reg(const struct block *block, u16 offset,
1339 const struct opcode *last,
1340 u32 mask, bool sync, bool mflo, bool another)
d16005f8 1341{
98fa08a5
PC
1342 const struct opcode *op, *next = &block->opcode_list[offset];
1343 u32 old_mask;
1344 u8 reg2, reg = mflo ? REG_LO : REG_HI;
1345 u16 branch_offset;
1346 unsigned int i;
1347
1348 for (i = offset; i < block->nb_ops; i++) {
1349 op = next;
1350 next = &block->opcode_list[i + 1];
1351 old_mask = mask;
1352
1353 /* If any other opcode writes or reads to the register
1354 * we'd use, then we cannot use it anymore. */
1355 mask |= opcode_read_mask(op->c);
1356 mask |= opcode_write_mask(op->c);
1357
1358 if (op->flags & LIGHTREC_SYNC)
1359 sync = true;
d16005f8 1360
d16005f8
PC
1361 switch (op->i.op) {
1362 case OP_BEQ:
1363 case OP_BNE:
1364 case OP_BLEZ:
1365 case OP_BGTZ:
1366 case OP_REGIMM:
d16005f8 1367 /* TODO: handle backwards branches too */
98fa08a5
PC
1368 if (!last &&
1369 (op->flags & LIGHTREC_LOCAL_BRANCH) &&
d16005f8 1370 (s16)op->c.i.imm >= 0) {
98fa08a5
PC
1371 branch_offset = i + 1 + (s16)op->c.i.imm
1372 - !!(OPT_SWITCH_DELAY_SLOTS && (op->flags & LIGHTREC_NO_DS));
1373
1374 reg = get_mfhi_mflo_reg(block, branch_offset, NULL,
1375 mask, sync, mflo, false);
1376 reg2 = get_mfhi_mflo_reg(block, offset + 1, next,
1377 mask, sync, mflo, false);
1378 if (reg > 0 && reg == reg2)
1379 return reg;
1380 if (!reg && !reg2)
1381 return 0;
d16005f8 1382 }
98fa08a5
PC
1383
1384 return mflo ? REG_LO : REG_HI;
d16005f8
PC
1385 case OP_SPECIAL:
1386 switch (op->r.op) {
1387 case OP_SPECIAL_MULT:
1388 case OP_SPECIAL_MULTU:
1389 case OP_SPECIAL_DIV:
1390 case OP_SPECIAL_DIVU:
98fa08a5 1391 return 0;
d16005f8 1392 case OP_SPECIAL_MTHI:
98fa08a5
PC
1393 if (!mflo)
1394 return 0;
1395 continue;
1396 case OP_SPECIAL_MTLO:
1397 if (mflo)
1398 return 0;
1399 continue;
d16005f8 1400 case OP_SPECIAL_JR:
98fa08a5
PC
1401 if (op->r.rs != 31)
1402 return reg;
1403
1404 if (!sync &&
1405 !(op->flags & LIGHTREC_NO_DS) &&
1406 (next->i.op == OP_SPECIAL) &&
1407 ((!mflo && next->r.op == OP_SPECIAL_MFHI) ||
1408 (mflo && next->r.op == OP_SPECIAL_MFLO)))
1409 return next->r.rd;
1410
1411 return 0;
d16005f8 1412 case OP_SPECIAL_JALR:
98fa08a5 1413 return reg;
d16005f8 1414 case OP_SPECIAL_MFHI:
98fa08a5
PC
1415 if (!mflo) {
1416 if (another)
1417 return op->r.rd;
1418 /* Must use REG_HI if there is another MFHI target*/
1419 reg2 = get_mfhi_mflo_reg(block, i + 1, next,
1420 0, sync, mflo, true);
1421 if (reg2 > 0 && reg2 != REG_HI)
1422 return REG_HI;
1423
1424 if (!sync && !(old_mask & BIT(op->r.rd)))
1425 return op->r.rd;
1426 else
1427 return REG_HI;
1428 }
1429 continue;
1430 case OP_SPECIAL_MFLO:
1431 if (mflo) {
1432 if (another)
1433 return op->r.rd;
1434 /* Must use REG_LO if there is another MFLO target*/
1435 reg2 = get_mfhi_mflo_reg(block, i + 1, next,
1436 0, sync, mflo, true);
1437 if (reg2 > 0 && reg2 != REG_LO)
1438 return REG_LO;
1439
1440 if (!sync && !(old_mask & BIT(op->r.rd)))
1441 return op->r.rd;
1442 else
1443 return REG_LO;
1444 }
d16005f8 1445 continue;
98fa08a5
PC
1446 default:
1447 break;
d16005f8 1448 }
98fa08a5
PC
1449
1450 /* fall-through */
d16005f8
PC
1451 default:
1452 continue;
1453 }
1454 }
1455
98fa08a5
PC
1456 return reg;
1457}
1458
1459static void lightrec_replace_lo_hi(struct block *block, u16 offset,
1460 u16 last, bool lo)
1461{
1462 unsigned int i;
1463 u32 branch_offset;
1464
1465 /* This function will remove the following MFLO/MFHI. It must be called
1466 * only if get_mfhi_mflo_reg() returned a non-zero value. */
1467
1468 for (i = offset; i < last; i++) {
1469 struct opcode *op = &block->opcode_list[i];
1470
1471 switch (op->i.op) {
1472 case OP_BEQ:
1473 case OP_BNE:
1474 case OP_BLEZ:
1475 case OP_BGTZ:
1476 case OP_REGIMM:
1477 /* TODO: handle backwards branches too */
1478 if ((op->flags & LIGHTREC_LOCAL_BRANCH) &&
1479 (s16)op->c.i.imm >= 0) {
1480 branch_offset = i + 1 + (s16)op->c.i.imm
1481 - !!(OPT_SWITCH_DELAY_SLOTS && (op->flags & LIGHTREC_NO_DS));
1482
1483 lightrec_replace_lo_hi(block, branch_offset, last, lo);
1484 lightrec_replace_lo_hi(block, i + 1, branch_offset, lo);
1485 }
1486 break;
1487
1488 case OP_SPECIAL:
1489 if (lo && op->r.op == OP_SPECIAL_MFLO) {
1490 pr_debug("Removing MFLO opcode at offset 0x%x\n",
1491 i << 2);
1492 op->opcode = 0;
1493 return;
1494 } else if (!lo && op->r.op == OP_SPECIAL_MFHI) {
1495 pr_debug("Removing MFHI opcode at offset 0x%x\n",
1496 i << 2);
1497 op->opcode = 0;
1498 return;
1499 }
1500
1501 /* fall-through */
1502 default:
1503 break;
1504 }
1505 }
d16005f8
PC
1506}
1507
fd58fa32
PC
1508static bool lightrec_always_skip_div_check(void)
1509{
1510#ifdef __mips__
1511 return true;
1512#else
1513 return false;
1514#endif
1515}
1516
98fa08a5 1517static int lightrec_flag_mults_divs(struct lightrec_state *state, struct block *block)
d16005f8 1518{
22eee2ac 1519 struct opcode *prev, *list = NULL;
98fa08a5
PC
1520 u8 reg_hi, reg_lo;
1521 unsigned int i;
fd58fa32
PC
1522 u32 known = BIT(0);
1523 u32 values[32] = { 0 };
98fa08a5
PC
1524
1525 for (i = 0; i < block->nb_ops - 1; i++) {
22eee2ac 1526 prev = list;
98fa08a5 1527 list = &block->opcode_list[i];
d16005f8 1528
22eee2ac
PC
1529 if (prev)
1530 known = lightrec_propagate_consts(list, prev, known, values);
1531
d16005f8
PC
1532 if (list->i.op != OP_SPECIAL)
1533 continue;
1534
1535 switch (list->r.op) {
98fa08a5
PC
1536 case OP_SPECIAL_DIV:
1537 case OP_SPECIAL_DIVU:
fd58fa32
PC
1538 /* If we are dividing by a non-zero constant, don't
1539 * emit the div-by-zero check. */
1540 if (lightrec_always_skip_div_check() ||
1541 (known & BIT(list->c.r.rt) && values[list->c.r.rt]))
1542 list->flags |= LIGHTREC_NO_DIV_CHECK;
1543 case OP_SPECIAL_MULT: /* fall-through */
1544 case OP_SPECIAL_MULTU:
d16005f8
PC
1545 break;
1546 default:
1547 continue;
1548 }
1549
98fa08a5
PC
1550 /* Don't support opcodes in delay slots */
1551 if ((i && has_delay_slot(block->opcode_list[i - 1].c)) ||
fd58fa32 1552 (list->flags & LIGHTREC_NO_DS)) {
d16005f8 1553 continue;
fd58fa32 1554 }
d16005f8 1555
98fa08a5
PC
1556 reg_lo = get_mfhi_mflo_reg(block, i + 1, NULL, 0, false, true, false);
1557 if (reg_lo == 0) {
1558 pr_debug("Mark MULT(U)/DIV(U) opcode at offset 0x%x as"
1559 " not writing LO\n", i << 2);
1560 list->flags |= LIGHTREC_NO_LO;
1561 }
1562
1563 reg_hi = get_mfhi_mflo_reg(block, i + 1, NULL, 0, false, false, false);
1564 if (reg_hi == 0) {
1565 pr_debug("Mark MULT(U)/DIV(U) opcode at offset 0x%x as"
1566 " not writing HI\n", i << 2);
1567 list->flags |= LIGHTREC_NO_HI;
1568 }
1569
1570 if (!reg_lo && !reg_hi) {
1571 pr_debug("Both LO/HI unused in this block, they will "
1572 "probably be used in parent block - removing "
1573 "flags.\n");
1574 list->flags &= ~(LIGHTREC_NO_LO | LIGHTREC_NO_HI);
1575 }
1576
1577 if (reg_lo > 0 && reg_lo != REG_LO) {
1578 pr_debug("Found register %s to hold LO (rs = %u, rt = %u)\n",
1579 lightrec_reg_name(reg_lo), list->r.rs, list->r.rt);
1580
1581 lightrec_replace_lo_hi(block, i + 1, block->nb_ops, true);
1582 list->r.rd = reg_lo;
1583 } else {
1584 list->r.rd = 0;
1585 }
1586
1587 if (reg_hi > 0 && reg_hi != REG_HI) {
1588 pr_debug("Found register %s to hold HI (rs = %u, rt = %u)\n",
1589 lightrec_reg_name(reg_hi), list->r.rs, list->r.rt);
1590
1591 lightrec_replace_lo_hi(block, i + 1, block->nb_ops, false);
1592 list->r.imm = reg_hi;
1593 } else {
1594 list->r.imm = 0;
1595 }
1596 }
1597
1598 return 0;
1599}
1600
1601static bool remove_div_sequence(struct block *block, unsigned int offset)
1602{
1603 struct opcode *op;
1604 unsigned int i, found = 0;
1605
1606 /*
1607 * Scan for the zero-checking sequence that GCC automatically introduced
1608 * after most DIV/DIVU opcodes. This sequence checks the value of the
1609 * divisor, and if zero, executes a BREAK opcode, causing the BIOS
1610 * handler to crash the PS1.
1611 *
1612 * For DIV opcodes, this sequence additionally checks that the signed
1613 * operation does not overflow.
1614 *
1615 * With the assumption that the games never crashed the PS1, we can
1616 * therefore assume that the games never divided by zero or overflowed,
1617 * and these sequences can be removed.
1618 */
1619
1620 for (i = offset; i < block->nb_ops; i++) {
1621 op = &block->opcode_list[i];
1622
1623 if (!found) {
1624 if (op->i.op == OP_SPECIAL &&
1625 (op->r.op == OP_SPECIAL_DIV || op->r.op == OP_SPECIAL_DIVU))
1626 break;
1627
1628 if ((op->opcode & 0xfc1fffff) == 0x14000002) {
1629 /* BNE ???, zero, +8 */
1630 found++;
1631 } else {
1632 offset++;
1633 }
1634 } else if (found == 1 && !op->opcode) {
1635 /* NOP */
1636 found++;
1637 } else if (found == 2 && op->opcode == 0x0007000d) {
1638 /* BREAK 0x1c00 */
1639 found++;
1640 } else if (found == 3 && op->opcode == 0x2401ffff) {
1641 /* LI at, -1 */
1642 found++;
1643 } else if (found == 4 && (op->opcode & 0xfc1fffff) == 0x14010004) {
1644 /* BNE ???, at, +16 */
1645 found++;
1646 } else if (found == 5 && op->opcode == 0x3c018000) {
1647 /* LUI at, 0x8000 */
1648 found++;
1649 } else if (found == 6 && (op->opcode & 0x141fffff) == 0x14010002) {
1650 /* BNE ???, at, +16 */
1651 found++;
1652 } else if (found == 7 && !op->opcode) {
1653 /* NOP */
1654 found++;
1655 } else if (found == 8 && op->opcode == 0x0006000d) {
1656 /* BREAK 0x1800 */
1657 found++;
1658 break;
1659 } else {
1660 break;
1661 }
1662 }
1663
1664 if (found >= 3) {
1665 if (found != 9)
1666 found = 3;
1667
1668 pr_debug("Removing DIV%s sequence at offset 0x%x\n",
1669 found == 9 ? "" : "U", offset << 2);
1670
1671 for (i = 0; i < found; i++)
1672 block->opcode_list[offset + i].opcode = 0;
1673
1674 return true;
1675 }
1676
1677 return false;
1678}
1679
1680static int lightrec_remove_div_by_zero_check_sequence(struct lightrec_state *state,
1681 struct block *block)
1682{
1683 struct opcode *op;
1684 unsigned int i;
1685
1686 for (i = 0; i < block->nb_ops; i++) {
1687 op = &block->opcode_list[i];
1688
1689 if (op->i.op == OP_SPECIAL &&
1690 (op->r.op == OP_SPECIAL_DIVU || op->r.op == OP_SPECIAL_DIV) &&
1691 remove_div_sequence(block, i + 1))
1692 op->flags |= LIGHTREC_NO_DIV_CHECK;
1693 }
1694
1695 return 0;
1696}
1697
1698static const u32 memset_code[] = {
1699 0x10a00006, // beqz a1, 2f
1700 0x24a2ffff, // addiu v0,a1,-1
1701 0x2403ffff, // li v1,-1
1702 0xac800000, // 1: sw zero,0(a0)
1703 0x2442ffff, // addiu v0,v0,-1
1704 0x1443fffd, // bne v0,v1, 1b
1705 0x24840004, // addiu a0,a0,4
1706 0x03e00008, // 2: jr ra
1707 0x00000000, // nop
1708};
1709
1710static int lightrec_replace_memset(struct lightrec_state *state, struct block *block)
1711{
1712 unsigned int i;
1713 union code c;
1714
1715 for (i = 0; i < block->nb_ops; i++) {
1716 c = block->opcode_list[i].c;
1717
1718 if (c.opcode != memset_code[i])
1719 return 0;
1720
1721 if (i == ARRAY_SIZE(memset_code) - 1) {
1722 /* success! */
1723 pr_debug("Block at PC 0x%x is a memset\n", block->pc);
1724 block->flags |= BLOCK_IS_MEMSET | BLOCK_NEVER_COMPILE;
1725
1726 /* Return non-zero to skip other optimizers. */
1727 return 1;
d16005f8
PC
1728 }
1729 }
1730
1731 return 0;
1732}
1733
98fa08a5
PC
1734static int (*lightrec_optimizers[])(struct lightrec_state *state, struct block *) = {
1735 IF_OPT(OPT_REMOVE_DIV_BY_ZERO_SEQ, &lightrec_remove_div_by_zero_check_sequence),
1736 IF_OPT(OPT_REPLACE_MEMSET, &lightrec_replace_memset),
1737 IF_OPT(OPT_DETECT_IMPOSSIBLE_BRANCHES, &lightrec_detect_impossible_branches),
1738 IF_OPT(OPT_LOCAL_BRANCHES, &lightrec_local_branches),
1739 IF_OPT(OPT_TRANSFORM_OPS, &lightrec_transform_ops),
1740 IF_OPT(OPT_SWITCH_DELAY_SLOTS, &lightrec_switch_delay_slots),
1741 IF_OPT(OPT_FLAG_IO || OPT_FLAG_STORES, &lightrec_flag_io),
1742 IF_OPT(OPT_FLAG_MULT_DIV, &lightrec_flag_mults_divs),
1743 IF_OPT(OPT_EARLY_UNLOAD, &lightrec_early_unload),
d16005f8
PC
1744};
1745
98fa08a5 1746int lightrec_optimize(struct lightrec_state *state, struct block *block)
d16005f8
PC
1747{
1748 unsigned int i;
98fa08a5 1749 int ret;
d16005f8
PC
1750
1751 for (i = 0; i < ARRAY_SIZE(lightrec_optimizers); i++) {
98fa08a5
PC
1752 if (lightrec_optimizers[i]) {
1753 ret = (*lightrec_optimizers[i])(state, block);
1754 if (ret)
1755 return ret;
1756 }
d16005f8
PC
1757 }
1758
1759 return 0;
1760}