git subrepo pull --force deps/lightning
[pcsx_rearmed.git] / deps / lightrec / optimizer.c
CommitLineData
98fa08a5 1// SPDX-License-Identifier: LGPL-2.1-or-later
d16005f8 2/*
98fa08a5 3 * Copyright (C) 2014-2021 Paul Cercueil <paul@crapouillou.net>
d16005f8
PC
4 */
5
98fa08a5 6#include "lightrec-config.h"
d16005f8
PC
7#include "disassembler.h"
8#include "lightrec.h"
9#include "memmanager.h"
10#include "optimizer.h"
11#include "regcache.h"
12
13#include <errno.h>
14#include <stdbool.h>
15#include <stdlib.h>
98fa08a5
PC
16#include <string.h>
17
18#define IF_OPT(opt, ptr) ((opt) ? (ptr) : NULL)
d16005f8
PC
19
20struct optimizer_list {
21 void (**optimizers)(struct opcode *);
22 unsigned int nb_optimizers;
23};
24
98fa08a5
PC
25static bool is_nop(union code op);
26
27bool is_unconditional_jump(union code c)
28{
29 switch (c.i.op) {
30 case OP_SPECIAL:
31 return c.r.op == OP_SPECIAL_JR || c.r.op == OP_SPECIAL_JALR;
32 case OP_J:
33 case OP_JAL:
34 return true;
35 case OP_BEQ:
36 case OP_BLEZ:
37 return c.i.rs == c.i.rt;
38 case OP_REGIMM:
39 return (c.r.rt == OP_REGIMM_BGEZ ||
40 c.r.rt == OP_REGIMM_BGEZAL) && c.i.rs == 0;
41 default:
42 return false;
43 }
44}
45
46bool is_syscall(union code c)
47{
48 return (c.i.op == OP_SPECIAL && c.r.op == OP_SPECIAL_SYSCALL) ||
49 (c.i.op == OP_CP0 && (c.r.rs == OP_CP0_MTC0 ||
50 c.r.rs == OP_CP0_CTC0) &&
51 (c.r.rd == 12 || c.r.rd == 13));
52}
53
54static u64 opcode_read_mask(union code op)
d16005f8
PC
55{
56 switch (op.i.op) {
57 case OP_SPECIAL:
58 switch (op.r.op) {
59 case OP_SPECIAL_SYSCALL:
60 case OP_SPECIAL_BREAK:
98fa08a5 61 return 0;
d16005f8
PC
62 case OP_SPECIAL_JR:
63 case OP_SPECIAL_JALR:
64 case OP_SPECIAL_MTHI:
65 case OP_SPECIAL_MTLO:
98fa08a5 66 return BIT(op.r.rs);
d16005f8 67 case OP_SPECIAL_MFHI:
98fa08a5 68 return BIT(REG_HI);
d16005f8 69 case OP_SPECIAL_MFLO:
98fa08a5 70 return BIT(REG_LO);
d16005f8
PC
71 case OP_SPECIAL_SLL:
72 case OP_SPECIAL_SRL:
73 case OP_SPECIAL_SRA:
98fa08a5 74 return BIT(op.r.rt);
d16005f8 75 default:
98fa08a5 76 return BIT(op.r.rs) | BIT(op.r.rt);
d16005f8
PC
77 }
78 case OP_CP0:
79 switch (op.r.rs) {
80 case OP_CP0_MTC0:
81 case OP_CP0_CTC0:
98fa08a5 82 return BIT(op.r.rt);
d16005f8 83 default:
98fa08a5 84 return 0;
d16005f8
PC
85 }
86 case OP_CP2:
87 if (op.r.op == OP_CP2_BASIC) {
88 switch (op.r.rs) {
89 case OP_CP2_BASIC_MTC2:
90 case OP_CP2_BASIC_CTC2:
98fa08a5 91 return BIT(op.r.rt);
d16005f8 92 default:
98fa08a5 93 break;
d16005f8 94 }
d16005f8 95 }
98fa08a5 96 return 0;
d16005f8
PC
97 case OP_J:
98 case OP_JAL:
99 case OP_LUI:
98fa08a5 100 return 0;
d16005f8
PC
101 case OP_BEQ:
102 case OP_BNE:
103 case OP_LWL:
104 case OP_LWR:
105 case OP_SB:
106 case OP_SH:
107 case OP_SWL:
108 case OP_SW:
109 case OP_SWR:
98fa08a5 110 return BIT(op.i.rs) | BIT(op.i.rt);
d16005f8 111 default:
98fa08a5 112 return BIT(op.i.rs);
d16005f8
PC
113 }
114}
115
98fa08a5 116static u64 opcode_write_mask(union code op)
d16005f8 117{
98fa08a5
PC
118 u64 flags;
119
d16005f8
PC
120 switch (op.i.op) {
121 case OP_SPECIAL:
122 switch (op.r.op) {
123 case OP_SPECIAL_JR:
d16005f8
PC
124 case OP_SPECIAL_SYSCALL:
125 case OP_SPECIAL_BREAK:
98fa08a5 126 return 0;
d16005f8
PC
127 case OP_SPECIAL_MULT:
128 case OP_SPECIAL_MULTU:
129 case OP_SPECIAL_DIV:
130 case OP_SPECIAL_DIVU:
98fa08a5
PC
131 if (!OPT_FLAG_MULT_DIV)
132 return BIT(REG_LO) | BIT(REG_HI);
133
134 if (op.r.rd)
135 flags = BIT(op.r.rd);
136 else
137 flags = BIT(REG_LO);
138 if (op.r.imm)
139 flags |= BIT(op.r.imm);
140 else
141 flags |= BIT(REG_HI);
142 return flags;
d16005f8 143 case OP_SPECIAL_MTHI:
98fa08a5 144 return BIT(REG_HI);
d16005f8 145 case OP_SPECIAL_MTLO:
98fa08a5 146 return BIT(REG_LO);
d16005f8 147 default:
98fa08a5 148 return BIT(op.r.rd);
d16005f8
PC
149 }
150 case OP_ADDI:
151 case OP_ADDIU:
152 case OP_SLTI:
153 case OP_SLTIU:
154 case OP_ANDI:
155 case OP_ORI:
156 case OP_XORI:
157 case OP_LUI:
158 case OP_LB:
159 case OP_LH:
160 case OP_LWL:
161 case OP_LW:
162 case OP_LBU:
163 case OP_LHU:
164 case OP_LWR:
98fa08a5
PC
165 return BIT(op.i.rt);
166 case OP_JAL:
167 return BIT(31);
d16005f8
PC
168 case OP_CP0:
169 switch (op.r.rs) {
170 case OP_CP0_MFC0:
171 case OP_CP0_CFC0:
98fa08a5 172 return BIT(op.i.rt);
d16005f8 173 default:
98fa08a5 174 return 0;
d16005f8
PC
175 }
176 case OP_CP2:
177 if (op.r.op == OP_CP2_BASIC) {
178 switch (op.r.rs) {
179 case OP_CP2_BASIC_MFC2:
180 case OP_CP2_BASIC_CFC2:
98fa08a5 181 return BIT(op.i.rt);
d16005f8 182 default:
98fa08a5 183 break;
d16005f8 184 }
98fa08a5
PC
185 }
186 return 0;
187 case OP_REGIMM:
188 switch (op.r.rt) {
189 case OP_REGIMM_BLTZAL:
190 case OP_REGIMM_BGEZAL:
191 return BIT(31);
192 default:
193 return 0;
d16005f8
PC
194 }
195 case OP_META_MOV:
98fa08a5 196 return BIT(op.r.rd);
d16005f8 197 default:
98fa08a5
PC
198 return 0;
199 }
200}
201
202bool opcode_reads_register(union code op, u8 reg)
203{
204 return opcode_read_mask(op) & BIT(reg);
205}
206
207bool opcode_writes_register(union code op, u8 reg)
208{
209 return opcode_write_mask(op) & BIT(reg);
210}
211
212static int find_prev_writer(const struct opcode *list, unsigned int offset, u8 reg)
213{
214 union code c;
215 unsigned int i;
216
217 if (list[offset].flags & LIGHTREC_SYNC)
218 return -1;
219
220 for (i = offset; i > 0; i--) {
221 c = list[i - 1].c;
222
223 if (opcode_writes_register(c, reg)) {
224 if (i > 1 && has_delay_slot(list[i - 2].c))
225 break;
226
227 return i - 1;
228 }
229
230 if ((list[i - 1].flags & LIGHTREC_SYNC) ||
231 has_delay_slot(c) ||
232 opcode_reads_register(c, reg))
233 break;
234 }
235
236 return -1;
237}
238
239static int find_next_reader(const struct opcode *list, unsigned int offset, u8 reg)
240{
241 unsigned int i;
242 union code c;
243
244 if (list[offset].flags & LIGHTREC_SYNC)
245 return -1;
246
247 for (i = offset; ; i++) {
248 c = list[i].c;
249
250 if (opcode_reads_register(c, reg)) {
251 if (i > 0 && has_delay_slot(list[i - 1].c))
252 break;
253
254 return i;
255 }
256
257 if ((list[i].flags & LIGHTREC_SYNC) ||
258 has_delay_slot(c) || opcode_writes_register(c, reg))
259 break;
260 }
261
262 return -1;
263}
264
265static bool reg_is_dead(const struct opcode *list, unsigned int offset, u8 reg)
266{
267 unsigned int i;
268
269 if (list[offset].flags & LIGHTREC_SYNC)
d16005f8 270 return false;
98fa08a5
PC
271
272 for (i = offset + 1; ; i++) {
273 if (opcode_reads_register(list[i].c, reg))
274 return false;
275
276 if (opcode_writes_register(list[i].c, reg))
277 return true;
278
279 if (has_delay_slot(list[i].c)) {
280 if (list[i].flags & LIGHTREC_NO_DS)
281 return false;
282
283 return opcode_writes_register(list[i + 1].c, reg);
284 }
d16005f8
PC
285 }
286}
287
98fa08a5
PC
288static bool reg_is_read(const struct opcode *list,
289 unsigned int a, unsigned int b, u8 reg)
290{
291 /* Return true if reg is read in one of the opcodes of the interval
292 * [a, b[ */
293 for (; a < b; a++) {
294 if (!is_nop(list[a].c) && opcode_reads_register(list[a].c, reg))
295 return true;
296 }
297
298 return false;
299}
300
301static bool reg_is_written(const struct opcode *list,
302 unsigned int a, unsigned int b, u8 reg)
303{
304 /* Return true if reg is written in one of the opcodes of the interval
305 * [a, b[ */
306
307 for (; a < b; a++) {
308 if (!is_nop(list[a].c) && opcode_writes_register(list[a].c, reg))
309 return true;
310 }
311
312 return false;
313}
314
315static bool reg_is_read_or_written(const struct opcode *list,
316 unsigned int a, unsigned int b, u8 reg)
317{
318 return reg_is_read(list, a, b, reg) || reg_is_written(list, a, b, reg);
319}
320
321static bool opcode_is_load(union code op)
322{
323 switch (op.i.op) {
324 case OP_LB:
325 case OP_LH:
326 case OP_LWL:
327 case OP_LW:
328 case OP_LBU:
329 case OP_LHU:
330 case OP_LWR:
331 case OP_LWC2:
332 return true;
333 default:
334 return false;
335 }
336}
337
338static bool opcode_is_store(union code op)
339{
340 switch (op.i.op) {
341 case OP_SB:
342 case OP_SH:
343 case OP_SW:
344 case OP_SWL:
345 case OP_SWR:
346 case OP_SWC2:
347 return true;
348 default:
349 return false;
350 }
351}
352
353bool opcode_is_io(union code op)
354{
355 return opcode_is_load(op) || opcode_is_store(op);
356}
357
d16005f8
PC
358/* TODO: Complete */
359static bool is_nop(union code op)
360{
361 if (opcode_writes_register(op, 0)) {
362 switch (op.i.op) {
363 case OP_CP0:
364 return op.r.rs != OP_CP0_MFC0;
365 case OP_LB:
366 case OP_LH:
367 case OP_LWL:
368 case OP_LW:
369 case OP_LBU:
370 case OP_LHU:
371 case OP_LWR:
372 return false;
373 default:
374 return true;
375 }
376 }
377
378 switch (op.i.op) {
379 case OP_SPECIAL:
380 switch (op.r.op) {
381 case OP_SPECIAL_AND:
382 return op.r.rd == op.r.rt && op.r.rd == op.r.rs;
383 case OP_SPECIAL_ADD:
384 case OP_SPECIAL_ADDU:
385 return (op.r.rd == op.r.rt && op.r.rs == 0) ||
386 (op.r.rd == op.r.rs && op.r.rt == 0);
387 case OP_SPECIAL_SUB:
388 case OP_SPECIAL_SUBU:
389 return op.r.rd == op.r.rs && op.r.rt == 0;
390 case OP_SPECIAL_OR:
391 if (op.r.rd == op.r.rt)
392 return op.r.rd == op.r.rs || op.r.rs == 0;
393 else
394 return (op.r.rd == op.r.rs) && op.r.rt == 0;
395 case OP_SPECIAL_SLL:
396 case OP_SPECIAL_SRA:
397 case OP_SPECIAL_SRL:
398 return op.r.rd == op.r.rt && op.r.imm == 0;
98fa08a5
PC
399 case OP_SPECIAL_MFHI:
400 case OP_SPECIAL_MFLO:
401 return op.r.rd == 0;
d16005f8
PC
402 default:
403 return false;
404 }
405 case OP_ORI:
406 case OP_ADDI:
407 case OP_ADDIU:
408 return op.i.rt == op.i.rs && op.i.imm == 0;
409 case OP_BGTZ:
410 return (op.i.rs == 0 || op.i.imm == 1);
411 case OP_REGIMM:
412 return (op.i.op == OP_REGIMM_BLTZ ||
413 op.i.op == OP_REGIMM_BLTZAL) &&
414 (op.i.rs == 0 || op.i.imm == 1);
415 case OP_BNE:
416 return (op.i.rs == op.i.rt || op.i.imm == 1);
417 default:
418 return false;
419 }
420}
421
422bool load_in_delay_slot(union code op)
423{
424 switch (op.i.op) {
425 case OP_CP0:
426 switch (op.r.rs) {
427 case OP_CP0_MFC0:
428 case OP_CP0_CFC0:
429 return true;
430 default:
431 break;
432 }
433
434 break;
435 case OP_CP2:
436 if (op.r.op == OP_CP2_BASIC) {
437 switch (op.r.rs) {
438 case OP_CP2_BASIC_MFC2:
439 case OP_CP2_BASIC_CFC2:
440 return true;
441 default:
442 break;
443 }
444 }
445
446 break;
447 case OP_LB:
448 case OP_LH:
449 case OP_LW:
450 case OP_LWL:
451 case OP_LWR:
452 case OP_LBU:
453 case OP_LHU:
454 return true;
455 default:
456 break;
457 }
458
459 return false;
460}
461
98fa08a5 462static u32 lightrec_propagate_consts(const struct opcode *op, u32 known, u32 *v)
d16005f8 463{
98fa08a5
PC
464 union code c = op->c;
465
466 if (op->flags & LIGHTREC_SYNC)
467 return 0;
468
d16005f8
PC
469 switch (c.i.op) {
470 case OP_SPECIAL:
471 switch (c.r.op) {
472 case OP_SPECIAL_SLL:
473 if (known & BIT(c.r.rt)) {
474 known |= BIT(c.r.rd);
475 v[c.r.rd] = v[c.r.rt] << c.r.imm;
476 } else {
477 known &= ~BIT(c.r.rd);
478 }
479 break;
480 case OP_SPECIAL_SRL:
481 if (known & BIT(c.r.rt)) {
482 known |= BIT(c.r.rd);
483 v[c.r.rd] = v[c.r.rt] >> c.r.imm;
484 } else {
485 known &= ~BIT(c.r.rd);
486 }
487 break;
488 case OP_SPECIAL_SRA:
489 if (known & BIT(c.r.rt)) {
490 known |= BIT(c.r.rd);
491 v[c.r.rd] = (s32)v[c.r.rt] >> c.r.imm;
492 } else {
493 known &= ~BIT(c.r.rd);
494 }
495 break;
496 case OP_SPECIAL_SLLV:
497 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
498 known |= BIT(c.r.rd);
499 v[c.r.rd] = v[c.r.rt] << (v[c.r.rs] & 0x1f);
500 } else {
501 known &= ~BIT(c.r.rd);
502 }
503 break;
504 case OP_SPECIAL_SRLV:
505 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
506 known |= BIT(c.r.rd);
507 v[c.r.rd] = v[c.r.rt] >> (v[c.r.rs] & 0x1f);
508 } else {
509 known &= ~BIT(c.r.rd);
510 }
511 break;
512 case OP_SPECIAL_SRAV:
513 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
514 known |= BIT(c.r.rd);
515 v[c.r.rd] = (s32)v[c.r.rt]
516 >> (v[c.r.rs] & 0x1f);
517 } else {
518 known &= ~BIT(c.r.rd);
519 }
520 break;
521 case OP_SPECIAL_ADD:
522 case OP_SPECIAL_ADDU:
523 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
524 known |= BIT(c.r.rd);
525 v[c.r.rd] = (s32)v[c.r.rt] + (s32)v[c.r.rs];
526 } else {
527 known &= ~BIT(c.r.rd);
528 }
529 break;
530 case OP_SPECIAL_SUB:
531 case OP_SPECIAL_SUBU:
532 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
533 known |= BIT(c.r.rd);
534 v[c.r.rd] = v[c.r.rt] - v[c.r.rs];
535 } else {
536 known &= ~BIT(c.r.rd);
537 }
538 break;
539 case OP_SPECIAL_AND:
540 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
541 known |= BIT(c.r.rd);
542 v[c.r.rd] = v[c.r.rt] & v[c.r.rs];
543 } else {
544 known &= ~BIT(c.r.rd);
545 }
546 break;
547 case OP_SPECIAL_OR:
548 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
549 known |= BIT(c.r.rd);
550 v[c.r.rd] = v[c.r.rt] | v[c.r.rs];
551 } else {
552 known &= ~BIT(c.r.rd);
553 }
554 break;
555 case OP_SPECIAL_XOR:
556 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
557 known |= BIT(c.r.rd);
558 v[c.r.rd] = v[c.r.rt] ^ v[c.r.rs];
559 } else {
560 known &= ~BIT(c.r.rd);
561 }
562 break;
563 case OP_SPECIAL_NOR:
564 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
565 known |= BIT(c.r.rd);
566 v[c.r.rd] = ~(v[c.r.rt] | v[c.r.rs]);
567 } else {
568 known &= ~BIT(c.r.rd);
569 }
570 break;
571 case OP_SPECIAL_SLT:
572 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
573 known |= BIT(c.r.rd);
574 v[c.r.rd] = (s32)v[c.r.rs] < (s32)v[c.r.rt];
575 } else {
576 known &= ~BIT(c.r.rd);
577 }
578 break;
579 case OP_SPECIAL_SLTU:
580 if (known & BIT(c.r.rt) && known & BIT(c.r.rs)) {
581 known |= BIT(c.r.rd);
582 v[c.r.rd] = v[c.r.rs] < v[c.r.rt];
583 } else {
584 known &= ~BIT(c.r.rd);
585 }
586 break;
587 default:
588 break;
589 }
590 break;
591 case OP_REGIMM:
592 break;
593 case OP_ADDI:
594 case OP_ADDIU:
595 if (known & BIT(c.i.rs)) {
596 known |= BIT(c.i.rt);
597 v[c.i.rt] = v[c.i.rs] + (s32)(s16)c.i.imm;
598 } else {
599 known &= ~BIT(c.i.rt);
600 }
601 break;
602 case OP_SLTI:
603 if (known & BIT(c.i.rs)) {
604 known |= BIT(c.i.rt);
605 v[c.i.rt] = (s32)v[c.i.rs] < (s32)(s16)c.i.imm;
606 } else {
607 known &= ~BIT(c.i.rt);
608 }
609 break;
610 case OP_SLTIU:
611 if (known & BIT(c.i.rs)) {
612 known |= BIT(c.i.rt);
613 v[c.i.rt] = v[c.i.rs] < (u32)(s32)(s16)c.i.imm;
614 } else {
615 known &= ~BIT(c.i.rt);
616 }
617 break;
618 case OP_ANDI:
619 if (known & BIT(c.i.rs)) {
620 known |= BIT(c.i.rt);
621 v[c.i.rt] = v[c.i.rs] & c.i.imm;
622 } else {
623 known &= ~BIT(c.i.rt);
624 }
625 break;
626 case OP_ORI:
627 if (known & BIT(c.i.rs)) {
628 known |= BIT(c.i.rt);
629 v[c.i.rt] = v[c.i.rs] | c.i.imm;
630 } else {
631 known &= ~BIT(c.i.rt);
632 }
633 break;
634 case OP_XORI:
635 if (known & BIT(c.i.rs)) {
636 known |= BIT(c.i.rt);
637 v[c.i.rt] = v[c.i.rs] ^ c.i.imm;
638 } else {
639 known &= ~BIT(c.i.rt);
640 }
641 break;
642 case OP_LUI:
643 known |= BIT(c.i.rt);
644 v[c.i.rt] = c.i.imm << 16;
645 break;
646 case OP_CP0:
647 switch (c.r.rs) {
648 case OP_CP0_MFC0:
649 case OP_CP0_CFC0:
650 known &= ~BIT(c.r.rt);
651 break;
652 }
653 break;
654 case OP_CP2:
655 if (c.r.op == OP_CP2_BASIC) {
656 switch (c.r.rs) {
657 case OP_CP2_BASIC_MFC2:
658 case OP_CP2_BASIC_CFC2:
659 known &= ~BIT(c.r.rt);
660 break;
661 }
662 }
663 break;
664 case OP_LB:
665 case OP_LH:
666 case OP_LWL:
667 case OP_LW:
668 case OP_LBU:
669 case OP_LHU:
670 case OP_LWR:
671 case OP_LWC2:
672 known &= ~BIT(c.i.rt);
673 break;
674 case OP_META_MOV:
675 if (known & BIT(c.r.rs)) {
676 known |= BIT(c.r.rd);
677 v[c.r.rd] = v[c.r.rs];
678 } else {
679 known &= ~BIT(c.r.rd);
680 }
681 break;
682 default:
683 break;
684 }
685
686 return known;
687}
688
98fa08a5 689static void lightrec_optimize_sll_sra(struct opcode *list, unsigned int offset)
d16005f8 690{
98fa08a5
PC
691 struct opcode *prev, *prev2 = NULL, *curr = &list[offset];
692 struct opcode *to_change, *to_nop;
693 int idx, idx2;
d16005f8 694
98fa08a5
PC
695 if (curr->r.imm != 24 && curr->r.imm != 16)
696 return;
697
698 idx = find_prev_writer(list, offset, curr->r.rt);
699 if (idx < 0)
700 return;
701
702 prev = &list[idx];
703
704 if (prev->i.op != OP_SPECIAL || prev->r.op != OP_SPECIAL_SLL ||
705 prev->r.imm != curr->r.imm || prev->r.rd != curr->r.rt)
706 return;
d16005f8 707
98fa08a5
PC
708 if (prev->r.rd != prev->r.rt && curr->r.rd != curr->r.rt) {
709 /* sll rY, rX, 16
710 * ...
711 * srl rZ, rY, 16 */
d16005f8 712
98fa08a5
PC
713 if (!reg_is_dead(list, offset, curr->r.rt) ||
714 reg_is_read_or_written(list, idx, offset, curr->r.rd))
715 return;
716
717 /* If rY is dead after the SRL, and rZ is not used after the SLL,
718 * we can change rY to rZ */
719
720 pr_debug("Detected SLL/SRA with middle temp register\n");
721 prev->r.rd = curr->r.rd;
722 curr->r.rt = prev->r.rd;
723 }
724
725 /* We got a SLL/SRA combo. If imm #16, that's a cast to u16.
726 * If imm #24 that's a cast to u8.
727 *
728 * First of all, make sure that the target register of the SLL is not
729 * read before the SRA. */
730
731 if (prev->r.rd == prev->r.rt) {
732 /* sll rX, rX, 16
733 * ...
734 * srl rY, rX, 16 */
735 to_change = curr;
736 to_nop = prev;
737
738 /* rX is used after the SRA - we cannot convert it. */
739 if (prev->r.rd != curr->r.rd && !reg_is_dead(list, offset, prev->r.rd))
740 return;
d16005f8 741 } else {
98fa08a5
PC
742 /* sll rY, rX, 16
743 * ...
744 * srl rY, rY, 16 */
745 to_change = prev;
746 to_nop = curr;
d16005f8
PC
747 }
748
98fa08a5
PC
749 idx2 = find_prev_writer(list, idx, prev->r.rt);
750 if (idx2 >= 0) {
751 /* Note that PSX games sometimes do casts after
752 * a LHU or LBU; in this case we can change the
753 * load opcode to a LH or LB, and the cast can
754 * be changed to a MOV or a simple NOP. */
755
756 prev2 = &list[idx2];
757
758 if (curr->r.rd != prev2->i.rt &&
759 !reg_is_dead(list, offset, prev2->i.rt))
760 prev2 = NULL;
761 else if (curr->r.imm == 16 && prev2->i.op == OP_LHU)
762 prev2->i.op = OP_LH;
763 else if (curr->r.imm == 24 && prev2->i.op == OP_LBU)
764 prev2->i.op = OP_LB;
765 else
766 prev2 = NULL;
767
768 if (prev2) {
769 if (curr->r.rd == prev2->i.rt) {
770 to_change->opcode = 0;
771 } else if (reg_is_dead(list, offset, prev2->i.rt) &&
772 !reg_is_read_or_written(list, idx2 + 1, offset, curr->r.rd)) {
773 /* The target register of the SRA is dead after the
774 * LBU/LHU; we can change the target register of the
775 * LBU/LHU to the one of the SRA. */
776 prev2->i.rt = curr->r.rd;
777 to_change->opcode = 0;
778 } else {
779 to_change->i.op = OP_META_MOV;
780 to_change->r.rd = curr->r.rd;
781 to_change->r.rs = prev2->i.rt;
782 }
d16005f8 783
98fa08a5
PC
784 if (to_nop->r.imm == 24)
785 pr_debug("Convert LBU+SLL+SRA to LB\n");
786 else
787 pr_debug("Convert LHU+SLL+SRA to LH\n");
788 }
789 }
790
791 if (!prev2) {
792 pr_debug("Convert SLL/SRA #%u to EXT%c\n",
793 prev->r.imm,
794 prev->r.imm == 24 ? 'C' : 'S');
795
796 if (to_change == prev) {
797 to_change->i.rs = prev->r.rt;
798 to_change->i.rt = curr->r.rd;
799 } else {
800 to_change->i.rt = curr->r.rd;
801 to_change->i.rs = prev->r.rt;
802 }
803
804 if (to_nop->r.imm == 24)
805 to_change->i.op = OP_META_EXTC;
806 else
807 to_change->i.op = OP_META_EXTS;
808 }
809
810 to_nop->opcode = 0;
d16005f8
PC
811}
812
98fa08a5 813static int lightrec_transform_ops(struct lightrec_state *state, struct block *block)
d16005f8
PC
814{
815 struct opcode *list = block->opcode_list;
98fa08a5
PC
816 struct opcode *op;
817 u32 known = BIT(0);
818 u32 values[32] = { 0 };
819 unsigned int i;
820 int reader;
d16005f8 821
98fa08a5
PC
822 for (i = 0; i < block->nb_ops; i++) {
823 op = &list[i];
d16005f8
PC
824
825 /* Transform all opcodes detected as useless to real NOPs
826 * (0x0: SLL r0, r0, #0) */
98fa08a5 827 if (op->opcode != 0 && is_nop(op->c)) {
d16005f8 828 pr_debug("Converting useless opcode 0x%08x to NOP\n",
98fa08a5
PC
829 op->opcode);
830 op->opcode = 0x0;
d16005f8
PC
831 }
832
98fa08a5 833 if (!op->opcode)
d16005f8
PC
834 continue;
835
98fa08a5
PC
836 /* Register $zero is always, well, zero */
837 known |= BIT(0);
838 values[0] = 0;
839
840 switch (op->i.op) {
d16005f8 841 case OP_BEQ:
98fa08a5
PC
842 if (op->i.rs == op->i.rt) {
843 op->i.rs = 0;
844 op->i.rt = 0;
845 } else if (op->i.rs == 0) {
846 op->i.rs = op->i.rt;
847 op->i.rt = 0;
d16005f8
PC
848 }
849 break;
98fa08a5 850
d16005f8 851 case OP_BNE:
98fa08a5
PC
852 if (op->i.rs == 0) {
853 op->i.rs = op->i.rt;
854 op->i.rt = 0;
855 }
856 break;
857
858 case OP_LUI:
859 if (!(op->flags & LIGHTREC_SYNC) &&
860 (known & BIT(op->i.rt)) &&
861 values[op->i.rt] == op->i.imm << 16) {
862 pr_debug("Converting duplicated LUI to NOP\n");
863 op->opcode = 0x0;
864 }
865
866 if (op->i.imm != 0 || op->i.rt == 0)
867 break;
868
869 reader = find_next_reader(list, i + 1, op->i.rt);
870 if (reader > 0 &&
871 (opcode_writes_register(list[reader].c, op->i.rt) ||
872 reg_is_dead(list, reader, op->i.rt))) {
873
874 pr_debug("Removing useless LUI 0x0\n");
875
876 if (list[reader].i.rs == op->i.rt)
877 list[reader].i.rs = 0;
878 if (list[reader].i.op == OP_SPECIAL &&
879 list[reader].i.rt == op->i.rt)
880 list[reader].i.rt = 0;
881 op->opcode = 0x0;
d16005f8
PC
882 }
883 break;
884
885 /* Transform ORI/ADDI/ADDIU with imm #0 or ORR/ADD/ADDU/SUB/SUBU
886 * with register $zero to the MOV meta-opcode */
887 case OP_ORI:
888 case OP_ADDI:
889 case OP_ADDIU:
98fa08a5 890 if (op->i.imm == 0) {
d16005f8 891 pr_debug("Convert ORI/ADDI/ADDIU #0 to MOV\n");
98fa08a5
PC
892 op->i.op = OP_META_MOV;
893 op->r.rd = op->i.rt;
d16005f8
PC
894 }
895 break;
896 case OP_SPECIAL:
98fa08a5 897 switch (op->r.op) {
d16005f8 898 case OP_SPECIAL_SRA:
98fa08a5
PC
899 if (op->r.imm == 0) {
900 pr_debug("Convert SRA #0 to MOV\n");
901 op->i.op = OP_META_MOV;
902 op->r.rs = op->r.rt;
903 break;
904 }
905
906 lightrec_optimize_sll_sra(block->opcode_list, i);
907 break;
908 case OP_SPECIAL_SLL:
d16005f8 909 case OP_SPECIAL_SRL:
98fa08a5
PC
910 if (op->r.imm == 0) {
911 pr_debug("Convert SLL/SRL #0 to MOV\n");
912 op->i.op = OP_META_MOV;
913 op->r.rs = op->r.rt;
d16005f8
PC
914 }
915 break;
916 case OP_SPECIAL_OR:
917 case OP_SPECIAL_ADD:
918 case OP_SPECIAL_ADDU:
98fa08a5 919 if (op->r.rs == 0) {
d16005f8 920 pr_debug("Convert OR/ADD $zero to MOV\n");
98fa08a5
PC
921 op->i.op = OP_META_MOV;
922 op->r.rs = op->r.rt;
d16005f8
PC
923 }
924 case OP_SPECIAL_SUB: /* fall-through */
925 case OP_SPECIAL_SUBU:
98fa08a5 926 if (op->r.rt == 0) {
d16005f8 927 pr_debug("Convert OR/ADD/SUB $zero to MOV\n");
98fa08a5 928 op->i.op = OP_META_MOV;
d16005f8
PC
929 }
930 default: /* fall-through */
931 break;
932 }
933 default: /* fall-through */
934 break;
935 }
98fa08a5
PC
936
937 known = lightrec_propagate_consts(op, known, values);
d16005f8
PC
938 }
939
940 return 0;
941}
942
98fa08a5 943static int lightrec_switch_delay_slots(struct lightrec_state *state, struct block *block)
d16005f8 944{
98fa08a5
PC
945 struct opcode *list, *next = &block->opcode_list[0];
946 unsigned int i;
947 union code op, next_op;
d16005f8
PC
948 u8 flags;
949
98fa08a5
PC
950 for (i = 0; i < block->nb_ops - 1; i++) {
951 list = next;
952 next = &block->opcode_list[i + 1];
953 next_op = next->c;
954 op = list->c;
d16005f8
PC
955
956 if (!has_delay_slot(op) ||
957 list->flags & (LIGHTREC_NO_DS | LIGHTREC_EMULATE_BRANCH) ||
98fa08a5
PC
958 op.opcode == 0 || next_op.opcode == 0)
959 continue;
960
961 if (i && has_delay_slot(block->opcode_list[i - 1].c) &&
962 !(block->opcode_list[i - 1].flags & LIGHTREC_NO_DS))
d16005f8
PC
963 continue;
964
98fa08a5
PC
965 if ((list->flags & LIGHTREC_SYNC) ||
966 (next->flags & LIGHTREC_SYNC))
d16005f8
PC
967 continue;
968
969 switch (list->i.op) {
970 case OP_SPECIAL:
971 switch (op.r.op) {
972 case OP_SPECIAL_JALR:
973 if (opcode_reads_register(next_op, op.r.rd) ||
974 opcode_writes_register(next_op, op.r.rd))
975 continue;
976 case OP_SPECIAL_JR: /* fall-through */
977 if (opcode_writes_register(next_op, op.r.rs))
978 continue;
979 default: /* fall-through */
980 break;
981 }
982 case OP_J: /* fall-through */
983 break;
984 case OP_JAL:
985 if (opcode_reads_register(next_op, 31) ||
986 opcode_writes_register(next_op, 31))
987 continue;
988 else
989 break;
990 case OP_BEQ:
991 case OP_BNE:
992 if (op.i.rt && opcode_writes_register(next_op, op.i.rt))
993 continue;
994 case OP_BLEZ: /* fall-through */
995 case OP_BGTZ:
d16005f8
PC
996 if (op.i.rs && opcode_writes_register(next_op, op.i.rs))
997 continue;
998 break;
999 case OP_REGIMM:
1000 switch (op.r.rt) {
1001 case OP_REGIMM_BLTZAL:
1002 case OP_REGIMM_BGEZAL:
1003 if (opcode_reads_register(next_op, 31) ||
1004 opcode_writes_register(next_op, 31))
1005 continue;
1006 case OP_REGIMM_BLTZ: /* fall-through */
1007 case OP_REGIMM_BGEZ:
1008 if (op.i.rs &&
1009 opcode_writes_register(next_op, op.i.rs))
1010 continue;
1011 break;
1012 }
1013 default: /* fall-through */
1014 break;
1015 }
1016
1017 pr_debug("Swap branch and delay slot opcodes "
98fa08a5
PC
1018 "at offsets 0x%x / 0x%x\n",
1019 i << 2, (i + 1) << 2);
d16005f8 1020
98fa08a5 1021 flags = next->flags;
d16005f8 1022 list->c = next_op;
98fa08a5
PC
1023 next->c = op;
1024 next->flags = list->flags | LIGHTREC_NO_DS;
a59e5536 1025 list->flags = flags | LIGHTREC_NO_DS;
d16005f8
PC
1026 }
1027
1028 return 0;
1029}
1030
98fa08a5
PC
1031static int shrink_opcode_list(struct lightrec_state *state, struct block *block, u16 new_size)
1032{
1033 struct opcode *list;
1034
1035 if (new_size >= block->nb_ops) {
1036 pr_err("Invalid shrink size (%u vs %u)\n",
1037 new_size, block->nb_ops);
1038 return -EINVAL;
1039 }
1040
1041
1042 list = lightrec_malloc(state, MEM_FOR_IR,
1043 sizeof(*list) * new_size);
1044 if (!list) {
1045 pr_err("Unable to allocate memory\n");
1046 return -ENOMEM;
1047 }
1048
1049 memcpy(list, block->opcode_list, sizeof(*list) * new_size);
1050
1051 lightrec_free_opcode_list(state, block);
1052 block->opcode_list = list;
1053 block->nb_ops = new_size;
1054
1055 pr_debug("Shrunk opcode list of block PC 0x%08x to %u opcodes\n",
1056 block->pc, new_size);
1057
1058 return 0;
1059}
1060
1061static int lightrec_detect_impossible_branches(struct lightrec_state *state,
1062 struct block *block)
d16005f8 1063{
98fa08a5
PC
1064 struct opcode *op, *next = &block->opcode_list[0];
1065 unsigned int i;
1066 int ret = 0;
1067
1068 for (i = 0; i < block->nb_ops - 1; i++) {
1069 op = next;
1070 next = &block->opcode_list[i + 1];
d16005f8 1071
d16005f8
PC
1072 if (!has_delay_slot(op->c) ||
1073 (!load_in_delay_slot(next->c) &&
1074 !has_delay_slot(next->c) &&
1075 !(next->i.op == OP_CP0 && next->r.rs == OP_CP0_RFE)))
1076 continue;
1077
1078 if (op->c.opcode == next->c.opcode) {
1079 /* The delay slot is the exact same opcode as the branch
1080 * opcode: this is effectively a NOP */
1081 next->c.opcode = 0;
1082 continue;
1083 }
1084
98fa08a5
PC
1085 op->flags |= LIGHTREC_EMULATE_BRANCH;
1086
d16005f8 1087 if (op == block->opcode_list) {
98fa08a5
PC
1088 pr_debug("First opcode of block PC 0x%08x is an impossible branch\n",
1089 block->pc);
1090
d16005f8
PC
1091 /* If the first opcode is an 'impossible' branch, we
1092 * only keep the first two opcodes of the block (the
1093 * branch itself + its delay slot) */
98fa08a5
PC
1094 if (block->nb_ops > 2)
1095 ret = shrink_opcode_list(state, block, 2);
1096 break;
d16005f8 1097 }
d16005f8
PC
1098 }
1099
98fa08a5 1100 return ret;
d16005f8
PC
1101}
1102
98fa08a5 1103static int lightrec_local_branches(struct lightrec_state *state, struct block *block)
d16005f8 1104{
98fa08a5
PC
1105 struct opcode *list;
1106 unsigned int i;
d16005f8 1107 s32 offset;
d16005f8 1108
98fa08a5
PC
1109 for (i = 0; i < block->nb_ops; i++) {
1110 list = &block->opcode_list[i];
1111
1112 if (should_emulate(list))
d16005f8
PC
1113 continue;
1114
1115 switch (list->i.op) {
1116 case OP_BEQ:
1117 case OP_BNE:
1118 case OP_BLEZ:
1119 case OP_BGTZ:
1120 case OP_REGIMM:
98fa08a5 1121 offset = i + 1 + (s16)list->i.imm;
d16005f8
PC
1122 if (offset >= 0 && offset < block->nb_ops)
1123 break;
1124 default: /* fall-through */
1125 continue;
1126 }
1127
1128 pr_debug("Found local branch to offset 0x%x\n", offset << 2);
1129
98fa08a5
PC
1130 if (should_emulate(&block->opcode_list[offset])) {
1131 pr_debug("Branch target must be emulated - skip\n");
1132 continue;
1133 }
d16005f8 1134
98fa08a5
PC
1135 if (offset && has_delay_slot(block->opcode_list[offset - 1].c)) {
1136 pr_debug("Branch target is a delay slot - skip\n");
1137 continue;
1138 }
d16005f8 1139
98fa08a5 1140 pr_debug("Adding sync at offset 0x%x\n", offset << 2);
d16005f8 1141
98fa08a5
PC
1142 block->opcode_list[offset].flags |= LIGHTREC_SYNC;
1143 list->flags |= LIGHTREC_LOCAL_BRANCH;
d16005f8
PC
1144 }
1145
1146 return 0;
1147}
1148
1149bool has_delay_slot(union code op)
1150{
1151 switch (op.i.op) {
1152 case OP_SPECIAL:
1153 switch (op.r.op) {
1154 case OP_SPECIAL_JR:
1155 case OP_SPECIAL_JALR:
1156 return true;
1157 default:
1158 return false;
1159 }
1160 case OP_J:
1161 case OP_JAL:
1162 case OP_BEQ:
1163 case OP_BNE:
1164 case OP_BLEZ:
1165 case OP_BGTZ:
1166 case OP_REGIMM:
d16005f8
PC
1167 return true;
1168 default:
1169 return false;
1170 }
1171}
1172
98fa08a5 1173bool should_emulate(const struct opcode *list)
d16005f8 1174{
98fa08a5
PC
1175 return has_delay_slot(list->c) &&
1176 (list->flags & LIGHTREC_EMULATE_BRANCH);
d16005f8
PC
1177}
1178
98fa08a5 1179static void lightrec_add_unload(struct opcode *op, u8 reg)
d16005f8 1180{
98fa08a5
PC
1181 if (op->i.op == OP_SPECIAL && reg == op->r.rd)
1182 op->flags |= LIGHTREC_UNLOAD_RD;
d16005f8 1183
98fa08a5
PC
1184 if (op->i.rs == reg)
1185 op->flags |= LIGHTREC_UNLOAD_RS;
1186 if (op->i.rt == reg)
1187 op->flags |= LIGHTREC_UNLOAD_RT;
1188}
d16005f8 1189
98fa08a5
PC
1190static int lightrec_early_unload(struct lightrec_state *state, struct block *block)
1191{
1192 unsigned int i, offset;
1193 struct opcode *op;
1194 u8 reg;
d16005f8 1195
98fa08a5
PC
1196 for (reg = 1; reg < 34; reg++) {
1197 int last_r_id = -1, last_w_id = -1;
1198
1199 for (i = 0; i < block->nb_ops; i++) {
1200 union code c = block->opcode_list[i].c;
1201
1202 if (opcode_reads_register(c, reg))
1203 last_r_id = i;
1204 if (opcode_writes_register(c, reg))
1205 last_w_id = i;
d16005f8
PC
1206 }
1207
98fa08a5
PC
1208 if (last_w_id > last_r_id)
1209 offset = (unsigned int)last_w_id;
1210 else if (last_r_id >= 0)
1211 offset = (unsigned int)last_r_id;
1212 else
1213 continue;
d16005f8 1214
98fa08a5 1215 op = &block->opcode_list[offset];
d16005f8 1216
98fa08a5
PC
1217 if (has_delay_slot(op->c) && (op->flags & LIGHTREC_NO_DS))
1218 offset++;
1219
1220 if (offset == block->nb_ops)
1221 continue;
1222
1223 lightrec_add_unload(&block->opcode_list[offset], reg);
d16005f8
PC
1224 }
1225
1226 return 0;
1227}
1228
98fa08a5 1229static int lightrec_flag_io(struct lightrec_state *state, struct block *block)
d16005f8 1230{
98fa08a5 1231 const struct lightrec_mem_map *map;
d16005f8
PC
1232 struct opcode *list;
1233 u32 known = BIT(0);
1234 u32 values[32] = { 0 };
98fa08a5
PC
1235 unsigned int i;
1236 u32 val;
1237
1238 for (i = 0; i < block->nb_ops; i++) {
1239 list = &block->opcode_list[i];
d16005f8 1240
d16005f8
PC
1241 /* Register $zero is always, well, zero */
1242 known |= BIT(0);
1243 values[0] = 0;
1244
1245 switch (list->i.op) {
1246 case OP_SB:
1247 case OP_SH:
1248 case OP_SW:
98fa08a5
PC
1249 if (OPT_FLAG_STORES) {
1250 /* Mark all store operations that target $sp or $gp
1251 * as not requiring code invalidation. This is based
1252 * on the heuristic that stores using one of these
1253 * registers as address will never hit a code page. */
1254 if (list->i.rs >= 28 && list->i.rs <= 29 &&
1255 !state->maps[PSX_MAP_KERNEL_USER_RAM].ops) {
1256 pr_debug("Flaging opcode 0x%08x as not "
1257 "requiring invalidation\n",
1258 list->opcode);
1259 list->flags |= LIGHTREC_NO_INVALIDATE;
1260 }
1261
1262 /* Detect writes whose destination address is inside the
1263 * current block, using constant propagation. When these
1264 * occur, we mark the blocks as not compilable. */
1265 if ((known & BIT(list->i.rs)) &&
1266 kunseg(values[list->i.rs]) >= kunseg(block->pc) &&
1267 kunseg(values[list->i.rs]) < (kunseg(block->pc) +
1268 block->nb_ops * 4)) {
1269 pr_debug("Self-modifying block detected\n");
1270 block->flags |= BLOCK_NEVER_COMPILE;
1271 list->flags |= LIGHTREC_SMC;
1272 }
1273 }
1274 case OP_SWL: /* fall-through */
1275 case OP_SWR:
1276 case OP_SWC2:
1277 case OP_LB:
1278 case OP_LBU:
1279 case OP_LH:
1280 case OP_LHU:
1281 case OP_LW:
1282 case OP_LWL:
1283 case OP_LWR:
1284 case OP_LWC2:
1285 if (OPT_FLAG_IO && (known & BIT(list->i.rs))) {
1286 val = kunseg(values[list->i.rs] + (s16) list->i.imm);
1287 map = lightrec_get_map(state, NULL, val);
1288
1289 if (!map || map->ops ||
1290 map == &state->maps[PSX_MAP_PARALLEL_PORT]) {
1291 pr_debug("Flagging opcode %u as accessing I/O registers\n",
1292 i);
1293 list->flags |= LIGHTREC_HW_IO;
1294 } else {
1295 pr_debug("Flaging opcode %u as direct memory access\n", i);
1296 list->flags |= LIGHTREC_DIRECT_IO;
1297 }
d16005f8
PC
1298 }
1299 default: /* fall-through */
1300 break;
1301 }
1302
98fa08a5 1303 known = lightrec_propagate_consts(list, known, values);
d16005f8
PC
1304 }
1305
1306 return 0;
1307}
1308
98fa08a5
PC
1309static u8 get_mfhi_mflo_reg(const struct block *block, u16 offset,
1310 const struct opcode *last,
1311 u32 mask, bool sync, bool mflo, bool another)
d16005f8 1312{
98fa08a5
PC
1313 const struct opcode *op, *next = &block->opcode_list[offset];
1314 u32 old_mask;
1315 u8 reg2, reg = mflo ? REG_LO : REG_HI;
1316 u16 branch_offset;
1317 unsigned int i;
1318
1319 for (i = offset; i < block->nb_ops; i++) {
1320 op = next;
1321 next = &block->opcode_list[i + 1];
1322 old_mask = mask;
1323
1324 /* If any other opcode writes or reads to the register
1325 * we'd use, then we cannot use it anymore. */
1326 mask |= opcode_read_mask(op->c);
1327 mask |= opcode_write_mask(op->c);
1328
1329 if (op->flags & LIGHTREC_SYNC)
1330 sync = true;
d16005f8 1331
d16005f8
PC
1332 switch (op->i.op) {
1333 case OP_BEQ:
1334 case OP_BNE:
1335 case OP_BLEZ:
1336 case OP_BGTZ:
1337 case OP_REGIMM:
d16005f8 1338 /* TODO: handle backwards branches too */
98fa08a5
PC
1339 if (!last &&
1340 (op->flags & LIGHTREC_LOCAL_BRANCH) &&
d16005f8 1341 (s16)op->c.i.imm >= 0) {
98fa08a5
PC
1342 branch_offset = i + 1 + (s16)op->c.i.imm
1343 - !!(OPT_SWITCH_DELAY_SLOTS && (op->flags & LIGHTREC_NO_DS));
1344
1345 reg = get_mfhi_mflo_reg(block, branch_offset, NULL,
1346 mask, sync, mflo, false);
1347 reg2 = get_mfhi_mflo_reg(block, offset + 1, next,
1348 mask, sync, mflo, false);
1349 if (reg > 0 && reg == reg2)
1350 return reg;
1351 if (!reg && !reg2)
1352 return 0;
d16005f8 1353 }
98fa08a5
PC
1354
1355 return mflo ? REG_LO : REG_HI;
d16005f8
PC
1356 case OP_SPECIAL:
1357 switch (op->r.op) {
1358 case OP_SPECIAL_MULT:
1359 case OP_SPECIAL_MULTU:
1360 case OP_SPECIAL_DIV:
1361 case OP_SPECIAL_DIVU:
98fa08a5 1362 return 0;
d16005f8 1363 case OP_SPECIAL_MTHI:
98fa08a5
PC
1364 if (!mflo)
1365 return 0;
1366 continue;
1367 case OP_SPECIAL_MTLO:
1368 if (mflo)
1369 return 0;
1370 continue;
d16005f8 1371 case OP_SPECIAL_JR:
98fa08a5
PC
1372 if (op->r.rs != 31)
1373 return reg;
1374
1375 if (!sync &&
1376 !(op->flags & LIGHTREC_NO_DS) &&
1377 (next->i.op == OP_SPECIAL) &&
1378 ((!mflo && next->r.op == OP_SPECIAL_MFHI) ||
1379 (mflo && next->r.op == OP_SPECIAL_MFLO)))
1380 return next->r.rd;
1381
1382 return 0;
d16005f8 1383 case OP_SPECIAL_JALR:
98fa08a5 1384 return reg;
d16005f8 1385 case OP_SPECIAL_MFHI:
98fa08a5
PC
1386 if (!mflo) {
1387 if (another)
1388 return op->r.rd;
1389 /* Must use REG_HI if there is another MFHI target*/
1390 reg2 = get_mfhi_mflo_reg(block, i + 1, next,
1391 0, sync, mflo, true);
1392 if (reg2 > 0 && reg2 != REG_HI)
1393 return REG_HI;
1394
1395 if (!sync && !(old_mask & BIT(op->r.rd)))
1396 return op->r.rd;
1397 else
1398 return REG_HI;
1399 }
1400 continue;
1401 case OP_SPECIAL_MFLO:
1402 if (mflo) {
1403 if (another)
1404 return op->r.rd;
1405 /* Must use REG_LO if there is another MFLO target*/
1406 reg2 = get_mfhi_mflo_reg(block, i + 1, next,
1407 0, sync, mflo, true);
1408 if (reg2 > 0 && reg2 != REG_LO)
1409 return REG_LO;
1410
1411 if (!sync && !(old_mask & BIT(op->r.rd)))
1412 return op->r.rd;
1413 else
1414 return REG_LO;
1415 }
d16005f8 1416 continue;
98fa08a5
PC
1417 default:
1418 break;
d16005f8 1419 }
98fa08a5
PC
1420
1421 /* fall-through */
d16005f8
PC
1422 default:
1423 continue;
1424 }
1425 }
1426
98fa08a5
PC
1427 return reg;
1428}
1429
1430static void lightrec_replace_lo_hi(struct block *block, u16 offset,
1431 u16 last, bool lo)
1432{
1433 unsigned int i;
1434 u32 branch_offset;
1435
1436 /* This function will remove the following MFLO/MFHI. It must be called
1437 * only if get_mfhi_mflo_reg() returned a non-zero value. */
1438
1439 for (i = offset; i < last; i++) {
1440 struct opcode *op = &block->opcode_list[i];
1441
1442 switch (op->i.op) {
1443 case OP_BEQ:
1444 case OP_BNE:
1445 case OP_BLEZ:
1446 case OP_BGTZ:
1447 case OP_REGIMM:
1448 /* TODO: handle backwards branches too */
1449 if ((op->flags & LIGHTREC_LOCAL_BRANCH) &&
1450 (s16)op->c.i.imm >= 0) {
1451 branch_offset = i + 1 + (s16)op->c.i.imm
1452 - !!(OPT_SWITCH_DELAY_SLOTS && (op->flags & LIGHTREC_NO_DS));
1453
1454 lightrec_replace_lo_hi(block, branch_offset, last, lo);
1455 lightrec_replace_lo_hi(block, i + 1, branch_offset, lo);
1456 }
1457 break;
1458
1459 case OP_SPECIAL:
1460 if (lo && op->r.op == OP_SPECIAL_MFLO) {
1461 pr_debug("Removing MFLO opcode at offset 0x%x\n",
1462 i << 2);
1463 op->opcode = 0;
1464 return;
1465 } else if (!lo && op->r.op == OP_SPECIAL_MFHI) {
1466 pr_debug("Removing MFHI opcode at offset 0x%x\n",
1467 i << 2);
1468 op->opcode = 0;
1469 return;
1470 }
1471
1472 /* fall-through */
1473 default:
1474 break;
1475 }
1476 }
d16005f8
PC
1477}
1478
98fa08a5 1479static int lightrec_flag_mults_divs(struct lightrec_state *state, struct block *block)
d16005f8 1480{
98fa08a5
PC
1481 struct opcode *list;
1482 u8 reg_hi, reg_lo;
1483 unsigned int i;
1484
1485 for (i = 0; i < block->nb_ops - 1; i++) {
1486 list = &block->opcode_list[i];
d16005f8 1487
d16005f8
PC
1488 if (list->i.op != OP_SPECIAL)
1489 continue;
1490
1491 switch (list->r.op) {
1492 case OP_SPECIAL_MULT:
1493 case OP_SPECIAL_MULTU:
98fa08a5
PC
1494 case OP_SPECIAL_DIV:
1495 case OP_SPECIAL_DIVU:
d16005f8
PC
1496 break;
1497 default:
1498 continue;
1499 }
1500
98fa08a5
PC
1501 /* Don't support opcodes in delay slots */
1502 if ((i && has_delay_slot(block->opcode_list[i - 1].c)) ||
1503 (list->flags & LIGHTREC_NO_DS))
d16005f8
PC
1504 continue;
1505
98fa08a5
PC
1506 reg_lo = get_mfhi_mflo_reg(block, i + 1, NULL, 0, false, true, false);
1507 if (reg_lo == 0) {
1508 pr_debug("Mark MULT(U)/DIV(U) opcode at offset 0x%x as"
1509 " not writing LO\n", i << 2);
1510 list->flags |= LIGHTREC_NO_LO;
1511 }
1512
1513 reg_hi = get_mfhi_mflo_reg(block, i + 1, NULL, 0, false, false, false);
1514 if (reg_hi == 0) {
1515 pr_debug("Mark MULT(U)/DIV(U) opcode at offset 0x%x as"
1516 " not writing HI\n", i << 2);
1517 list->flags |= LIGHTREC_NO_HI;
1518 }
1519
1520 if (!reg_lo && !reg_hi) {
1521 pr_debug("Both LO/HI unused in this block, they will "
1522 "probably be used in parent block - removing "
1523 "flags.\n");
1524 list->flags &= ~(LIGHTREC_NO_LO | LIGHTREC_NO_HI);
1525 }
1526
1527 if (reg_lo > 0 && reg_lo != REG_LO) {
1528 pr_debug("Found register %s to hold LO (rs = %u, rt = %u)\n",
1529 lightrec_reg_name(reg_lo), list->r.rs, list->r.rt);
1530
1531 lightrec_replace_lo_hi(block, i + 1, block->nb_ops, true);
1532 list->r.rd = reg_lo;
1533 } else {
1534 list->r.rd = 0;
1535 }
1536
1537 if (reg_hi > 0 && reg_hi != REG_HI) {
1538 pr_debug("Found register %s to hold HI (rs = %u, rt = %u)\n",
1539 lightrec_reg_name(reg_hi), list->r.rs, list->r.rt);
1540
1541 lightrec_replace_lo_hi(block, i + 1, block->nb_ops, false);
1542 list->r.imm = reg_hi;
1543 } else {
1544 list->r.imm = 0;
1545 }
1546 }
1547
1548 return 0;
1549}
1550
1551static bool remove_div_sequence(struct block *block, unsigned int offset)
1552{
1553 struct opcode *op;
1554 unsigned int i, found = 0;
1555
1556 /*
1557 * Scan for the zero-checking sequence that GCC automatically introduced
1558 * after most DIV/DIVU opcodes. This sequence checks the value of the
1559 * divisor, and if zero, executes a BREAK opcode, causing the BIOS
1560 * handler to crash the PS1.
1561 *
1562 * For DIV opcodes, this sequence additionally checks that the signed
1563 * operation does not overflow.
1564 *
1565 * With the assumption that the games never crashed the PS1, we can
1566 * therefore assume that the games never divided by zero or overflowed,
1567 * and these sequences can be removed.
1568 */
1569
1570 for (i = offset; i < block->nb_ops; i++) {
1571 op = &block->opcode_list[i];
1572
1573 if (!found) {
1574 if (op->i.op == OP_SPECIAL &&
1575 (op->r.op == OP_SPECIAL_DIV || op->r.op == OP_SPECIAL_DIVU))
1576 break;
1577
1578 if ((op->opcode & 0xfc1fffff) == 0x14000002) {
1579 /* BNE ???, zero, +8 */
1580 found++;
1581 } else {
1582 offset++;
1583 }
1584 } else if (found == 1 && !op->opcode) {
1585 /* NOP */
1586 found++;
1587 } else if (found == 2 && op->opcode == 0x0007000d) {
1588 /* BREAK 0x1c00 */
1589 found++;
1590 } else if (found == 3 && op->opcode == 0x2401ffff) {
1591 /* LI at, -1 */
1592 found++;
1593 } else if (found == 4 && (op->opcode & 0xfc1fffff) == 0x14010004) {
1594 /* BNE ???, at, +16 */
1595 found++;
1596 } else if (found == 5 && op->opcode == 0x3c018000) {
1597 /* LUI at, 0x8000 */
1598 found++;
1599 } else if (found == 6 && (op->opcode & 0x141fffff) == 0x14010002) {
1600 /* BNE ???, at, +16 */
1601 found++;
1602 } else if (found == 7 && !op->opcode) {
1603 /* NOP */
1604 found++;
1605 } else if (found == 8 && op->opcode == 0x0006000d) {
1606 /* BREAK 0x1800 */
1607 found++;
1608 break;
1609 } else {
1610 break;
1611 }
1612 }
1613
1614 if (found >= 3) {
1615 if (found != 9)
1616 found = 3;
1617
1618 pr_debug("Removing DIV%s sequence at offset 0x%x\n",
1619 found == 9 ? "" : "U", offset << 2);
1620
1621 for (i = 0; i < found; i++)
1622 block->opcode_list[offset + i].opcode = 0;
1623
1624 return true;
1625 }
1626
1627 return false;
1628}
1629
1630static int lightrec_remove_div_by_zero_check_sequence(struct lightrec_state *state,
1631 struct block *block)
1632{
1633 struct opcode *op;
1634 unsigned int i;
1635
1636 for (i = 0; i < block->nb_ops; i++) {
1637 op = &block->opcode_list[i];
1638
1639 if (op->i.op == OP_SPECIAL &&
1640 (op->r.op == OP_SPECIAL_DIVU || op->r.op == OP_SPECIAL_DIV) &&
1641 remove_div_sequence(block, i + 1))
1642 op->flags |= LIGHTREC_NO_DIV_CHECK;
1643 }
1644
1645 return 0;
1646}
1647
1648static const u32 memset_code[] = {
1649 0x10a00006, // beqz a1, 2f
1650 0x24a2ffff, // addiu v0,a1,-1
1651 0x2403ffff, // li v1,-1
1652 0xac800000, // 1: sw zero,0(a0)
1653 0x2442ffff, // addiu v0,v0,-1
1654 0x1443fffd, // bne v0,v1, 1b
1655 0x24840004, // addiu a0,a0,4
1656 0x03e00008, // 2: jr ra
1657 0x00000000, // nop
1658};
1659
1660static int lightrec_replace_memset(struct lightrec_state *state, struct block *block)
1661{
1662 unsigned int i;
1663 union code c;
1664
1665 for (i = 0; i < block->nb_ops; i++) {
1666 c = block->opcode_list[i].c;
1667
1668 if (c.opcode != memset_code[i])
1669 return 0;
1670
1671 if (i == ARRAY_SIZE(memset_code) - 1) {
1672 /* success! */
1673 pr_debug("Block at PC 0x%x is a memset\n", block->pc);
1674 block->flags |= BLOCK_IS_MEMSET | BLOCK_NEVER_COMPILE;
1675
1676 /* Return non-zero to skip other optimizers. */
1677 return 1;
d16005f8
PC
1678 }
1679 }
1680
1681 return 0;
1682}
1683
98fa08a5
PC
1684static int (*lightrec_optimizers[])(struct lightrec_state *state, struct block *) = {
1685 IF_OPT(OPT_REMOVE_DIV_BY_ZERO_SEQ, &lightrec_remove_div_by_zero_check_sequence),
1686 IF_OPT(OPT_REPLACE_MEMSET, &lightrec_replace_memset),
1687 IF_OPT(OPT_DETECT_IMPOSSIBLE_BRANCHES, &lightrec_detect_impossible_branches),
1688 IF_OPT(OPT_LOCAL_BRANCHES, &lightrec_local_branches),
1689 IF_OPT(OPT_TRANSFORM_OPS, &lightrec_transform_ops),
1690 IF_OPT(OPT_SWITCH_DELAY_SLOTS, &lightrec_switch_delay_slots),
1691 IF_OPT(OPT_FLAG_IO || OPT_FLAG_STORES, &lightrec_flag_io),
1692 IF_OPT(OPT_FLAG_MULT_DIV, &lightrec_flag_mults_divs),
1693 IF_OPT(OPT_EARLY_UNLOAD, &lightrec_early_unload),
d16005f8
PC
1694};
1695
98fa08a5 1696int lightrec_optimize(struct lightrec_state *state, struct block *block)
d16005f8
PC
1697{
1698 unsigned int i;
98fa08a5 1699 int ret;
d16005f8
PC
1700
1701 for (i = 0; i < ARRAY_SIZE(lightrec_optimizers); i++) {
98fa08a5
PC
1702 if (lightrec_optimizers[i]) {
1703 ret = (*lightrec_optimizers[i])(state, block);
1704 if (ret)
1705 return ret;
1706 }
d16005f8
PC
1707 }
1708
1709 return 0;
1710}