1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * Mupen64plus - linkage_x86.s *
3 * Copyright (C) 2009-2011 Ari64 *
5 * This program is free software; you can redistribute it and/or modify *
6 * it under the terms of the GNU General Public License as published by *
7 * the Free Software Foundation; either version 2 of the License, or *
8 * (at your option) any later version. *
10 * This program is distributed in the hope that it will be useful, *
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13 * GNU General Public License for more details. *
15 * You should have received a copy of the GNU General Public License *
16 * along with this program; if not, write to the *
17 * Free Software Foundation, Inc., *
18 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
19 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
28 .type dyna_linker, @function
30 /* eax = virtual target address */
31 /* ebx = instruction to patch */
36 cmovge tlb_LUT_r(,%edi,4), %ecx
47 mov jump_in(,%ecx,4), %edx
60 lea 4(%ebx,%edi,1), %esi
72 /* hash_table lookup */
83 cmp hash_table(%edi), %eax
86 mov hash_table+4(%edi), %edx
89 cmp hash_table+8(%edi), %eax
92 /* jump_dirty lookup */
93 mov jump_dirty(,%ecx,4), %edx
105 /* hash_table insert */
106 mov hash_table-8(%edi), %ebx
107 mov hash_table-4(%edi), %ecx
108 mov %eax, hash_table-8(%edi)
109 mov %edx, hash_table-4(%edi)
110 mov %ebx, hash_table(%edi)
111 mov %ecx, hash_table+4(%edi)
116 call new_recompile_block
123 .size dyna_linker, .-dyna_linker
125 .type exec_pagefault, @function
127 /* eax = instruction pointer */
128 /* ebx = fault address */
130 mov reg_cop0+48, %edx
132 mov reg_cop0+16, %edi
134 mov %ebx, reg_cop0+32 /* BadVAddr */
135 and $0xFF80000F, %edi
136 mov %edx, reg_cop0+48 /* Status */
137 mov %ecx, reg_cop0+52 /* Cause */
138 mov %eax, reg_cop0+56 /* EPC */
141 and $0xFFFFE000, %ecx
142 and $0x007FFFF0, %ebx
143 mov %ecx, reg_cop0+40 /* EntryHI */
145 mov %edi, reg_cop0+16 /* Context */
150 .size exec_pagefault, .-exec_pagefault
152 /* Special dynamic linker for the case where a page fault
153 may occur in a branch delay slot */
154 .globl dyna_linker_ds
155 .hidden dyna_linker_ds
156 .type dyna_linker_ds, @function
161 cmp $0xC0000000, %eax
162 cmovge tlb_LUT_r(,%edi,4), %ecx
165 xor $0x80000000, %ecx
173 mov jump_in(,%ecx,4), %edx
186 lea 4(%ebx,%edi,1), %esi
198 /* hash_table lookup */
209 cmp hash_table(%edi), %eax
212 mov hash_table+4(%edi), %edx
215 cmp hash_table+8(%edi), %eax
218 /* jump_dirty lookup */
219 mov jump_dirty(,%ecx,4), %edx
231 /* hash_table insert */
232 mov hash_table-8(%edi), %ebx
233 mov hash_table-4(%edi), %ecx
234 mov %eax, hash_table-8(%edi)
235 mov %edx, hash_table-4(%edi)
236 mov %ebx, hash_table(%edi)
237 mov %ecx, hash_table+4(%edi)
241 and $0xFFFFFFF8, %edi
244 call new_recompile_block
249 and $0xFFFFFFF8, %eax
250 mov $0x80000008, %ecx /* High bit set indicates pagefault in delay slot */
254 .size dyna_linker_ds, .-dyna_linker_ds
256 .globl jump_vaddr_eax
257 .hidden jump_vaddr_eax
258 .type jump_vaddr_eax, @function
262 .size jump_vaddr_eax, .-jump_vaddr_eax
263 .globl jump_vaddr_ecx
264 .hidden jump_vaddr_ecx
265 .type jump_vaddr_ecx, @function
269 .size jump_vaddr_ecx, .-jump_vaddr_ecx
270 .globl jump_vaddr_edx
271 .hidden jump_vaddr_edx
272 .type jump_vaddr_edx, @function
276 .size jump_vaddr_edx, .-jump_vaddr_edx
277 .globl jump_vaddr_ebx
278 .hidden jump_vaddr_ebx
279 .type jump_vaddr_ebx, @function
283 .size jump_vaddr_ebx, .-jump_vaddr_ebx
284 .globl jump_vaddr_ebp
285 .hidden jump_vaddr_ebp
286 .type jump_vaddr_ebp, @function
289 .size jump_vaddr_ebp, .-jump_vaddr_ebp
290 .globl jump_vaddr_edi
291 .hidden jump_vaddr_edi
292 .type jump_vaddr_edi, @function
295 .size jump_vaddr_edi, .-jump_vaddr_edi
297 .type jump_vaddr, @function
299 /* Check hash table */
304 cmp hash_table(%eax), %edi
307 mov hash_table+4(%eax), %edi
310 cmp hash_table+8(%eax), %edi
313 /* No hit on hash table, call compiler */
316 mov %esi, cycle_count /* CCREG */
318 mov cycle_count, %esi
321 .size jump_vaddr, .-jump_vaddr
323 .globl verify_code_ds
324 .hidden verify_code_ds
325 .type verify_code_ds, @function
327 mov %ebp, branch_target
328 .size verify_code_ds, .-verify_code_ds
330 .globl verify_code_vm
331 .hidden verify_code_vm
332 .type verify_code_vm, @function
334 /* eax = source (virtual address) */
337 cmp $0xC0000000, %eax
340 lea -1(%eax,%ecx,1), %ebp
343 mov memory_map(,%edx,4), %edi
346 lea (%eax,%edi,4), %eax
348 xor memory_map(,%edx,4), %edi
351 mov memory_map(,%edx,4), %edi
355 .size verify_code_vm, .-verify_code_vm
359 .type verify_code, @function
364 mov -4(%eax,%ecx,1), %edi
365 xor -4(%ebx,%ecx,1), %edi
372 mov %esi, cycle_count
374 mov -4(%eax,%ecx,1), %edx
375 mov -4(%ebx,%ecx,1), %ebp
376 mov -8(%eax,%ecx,1), %esi
378 mov -8(%ebx,%ecx,1), %edi
384 mov cycle_count, %esi
385 mov branch_target, %ebp
389 mov cycle_count, %esi
391 mov branch_target, %ebp
392 push %esi /* for stack alignment, unused */
395 add $16, %esp /* pop stack */
397 .size verify_code, .-verify_code
401 .type cc_interrupt, @function
404 add $-28, %esp /* Align stack */
405 mov %esi, reg_cop0+36 /* Count */
407 movl $0, pending_exception
409 cmpl $0, restore_candidate(,%esi,4)
413 mov reg_cop0+36, %esi
414 mov next_interupt, %eax
415 mov pending_exception, %ebx
428 mov %esi, cycle_count /* CCREG */
431 mov cycle_count, %esi
435 add $16, %esp /* pop stack */
436 pop %edi /* restore edi */
437 pop %esi /* restore esi */
438 pop %ebx /* restore ebx */
439 pop %ebp /* restore ebp */
440 ret /* exit dynarec */
442 /* Move 'dirty' blocks to the 'clean' list */
443 mov restore_candidate(,%esi,4), %ebx
445 movl $0, restore_candidate(,%esi,4)
457 .size cc_interrupt, .-cc_interrupt
461 .type do_interrupt, @function
468 mov reg_cop0+36, %esi
469 mov next_interupt, %ebx
474 .size do_interrupt, .-do_interrupt
478 .type fp_exception, @function
480 mov $0x1000002c, %edx
482 mov reg_cop0+48, %ebx
485 mov %ebx, reg_cop0+48 /* Status */
486 mov %edx, reg_cop0+52 /* Cause */
487 mov %eax, reg_cop0+56 /* EPC */
492 .size fp_exception, .-fp_exception
494 .globl fp_exception_ds
495 .hidden fp_exception_ds
496 .type fp_exception_ds, @function
498 mov $0x9000002c, %edx /* Set high bit if delay slot */
500 .size fp_exception_ds, .-fp_exception_ds
504 .type jump_syscall, @function
507 mov reg_cop0+48, %ebx
510 mov %ebx, reg_cop0+48 /* Status */
511 mov %edx, reg_cop0+52 /* Cause */
512 mov %eax, reg_cop0+56 /* EPC */
517 .size jump_syscall, .-jump_syscall
521 .type jump_eret, @function
523 mov reg_cop0+48, %ebx /* Status */
525 and $0xFFFFFFFD, %ebx
526 mov %esi, reg_cop0+36 /* Count */
527 mov %ebx, reg_cop0+48 /* Status */
529 mov next_interupt, %eax
530 mov reg_cop0+36, %esi
533 mov reg_cop0+56, %eax /* EPC */
540 mov reg+4(%ebx), %edx
562 mov %esi, cycle_count
564 mov cycle_count, %esi
572 .size jump_eret, .-jump_eret
574 .globl new_dyna_start
575 .hidden new_dyna_start
576 .type new_dyna_start, @function
582 add $-8, %esp /* align stack */
584 call new_recompile_block
585 movl next_interupt, %edi
586 movl reg_cop0+36, %esi
587 movl %edi, last_count
590 .size new_dyna_start, .-new_dyna_start
592 /* Note: Assumes %ebx, %ebp, %esi, %edi are callee-saved */
593 .globl invalidate_block_eax
594 .hidden invalidate_block_eax
595 .type invalidate_block_eax, @function
596 invalidate_block_eax:
601 jmp invalidate_block_call
602 .size invalidate_block_eax, .-invalidate_block_eax
603 .globl invalidate_block_ecx
604 .hidden invalidate_block_ecx
605 .type invalidate_block_ecx, @function
606 invalidate_block_ecx:
611 jmp invalidate_block_call
612 .size invalidate_block_ecx, .-invalidate_block_ecx
613 .globl invalidate_block_edx
614 .hidden invalidate_block_edx
615 .type invalidate_block_edx, @function
616 invalidate_block_edx:
621 jmp invalidate_block_call
622 .size invalidate_block_edx, .-invalidate_block_edx
623 .globl invalidate_block_ebx
624 .hidden invalidate_block_ebx
625 .type invalidate_block_ebx, @function
626 invalidate_block_ebx:
631 jmp invalidate_block_call
632 .size invalidate_block_ebx, .-invalidate_block_ebx
633 .globl invalidate_block_ebp
634 .hidden invalidate_block_ebp
635 .type invalidate_block_ebp, @function
636 invalidate_block_ebp:
641 jmp invalidate_block_call
642 .size invalidate_block_ebp, .-invalidate_block_ebp
643 .globl invalidate_block_esi
644 .hidden invalidate_block_esi
645 .type invalidate_block_esi, @function
646 invalidate_block_esi:
651 jmp invalidate_block_call
652 .size invalidate_block_esi, .-invalidate_block_esi
653 .globl invalidate_block_edi
654 .hidden invalidate_block_edi
655 .type invalidate_block_edi, @function
656 invalidate_block_edi:
661 .size invalidate_block_edi, .-invalidate_block_edi
663 .type invalidate_block_call, @function
664 invalidate_block_call:
665 call invalidate_block
666 pop %eax /* Throw away */
671 .size invalidate_block_call, .-invalidate_block_call
673 .globl write_rdram_new
674 .hidden write_rdram_new
675 .type write_rdram_new, @function
679 mov %ecx, rdram-0x80000000(%edi)
681 .size write_rdram_new, .-write_rdram_new
683 .globl write_rdramb_new
684 .hidden write_rdramb_new
685 .type write_rdramb_new, @function
690 movb %cl, rdram-0x80000000(%edi)
692 .size write_rdramb_new, .-write_rdramb_new
694 .globl write_rdramh_new
695 .hidden write_rdramh_new
696 .type write_rdramh_new, @function
701 movw %cx, rdram-0x80000000(%edi)
703 .size write_rdramh_new, .-write_rdramh_new
705 .globl write_rdramd_new
706 .hidden write_rdramd_new
707 .type write_rdramd_new, @function
712 mov %ecx, rdram-0x80000000(%edi)
713 mov %edx, rdram-0x80000000+4(%edi)
715 .size write_rdramd_new, .-write_rdramd_new
717 .type do_invalidate, @function
720 mov %edi, %ebx /* Return ebx to caller */
723 cmpb $1, invalid_code(%edi)
726 call invalidate_block
730 .size do_invalidate, .-do_invalidate
732 .globl read_nomem_new
733 .hidden read_nomem_new
734 .type read_nomem_new, @function
739 mov memory_map(,%edi,4),%edi
743 mov (%ebx,%edi,4), %ecx
744 mov %ecx, readmem_dword
746 .size read_nomem_new, .-read_nomem_new
748 .globl read_nomemb_new
749 .hidden read_nomemb_new
750 .type read_nomemb_new, @function
755 mov memory_map(,%edi,4),%edi
760 movzbl (%ebx,%edi,4), %ecx
761 mov %ecx, readmem_dword
763 .size read_nomemb_new, .-read_nomemb_new
765 .globl read_nomemh_new
766 .hidden read_nomemh_new
767 .type read_nomemh_new, @function
772 mov memory_map(,%edi,4),%edi
777 movzwl (%ebx,%edi,4), %ecx
778 mov %ecx, readmem_dword
780 .size read_nomemh_new, .-read_nomemh_new
782 .globl read_nomemd_new
783 .hidden read_nomemd_new
784 .type read_nomemd_new, @function
789 mov memory_map(,%edi,4),%edi
793 mov 4(%ebx,%edi,4), %ecx
794 mov (%ebx,%edi,4), %edx
795 mov %ecx, readmem_dword
796 mov %edx, readmem_dword+4
798 .size read_nomemd_new, .-read_nomemd_new
800 .globl write_nomem_new
801 .hidden write_nomem_new
802 .type write_nomem_new, @function
805 mov memory_map(,%edi,4),%edi
810 mov %ecx, (%ebx,%edi)
812 .size write_nomem_new, .-write_nomem_new
814 .globl write_nomemb_new
815 .hidden write_nomemb_new
816 .type write_nomemb_new, @function
819 mov memory_map(,%edi,4),%edi
825 movb %cl, (%ebx,%edi)
827 .size write_nomemb_new, .-write_nomemb_new
829 .globl write_nomemh_new
830 .hidden write_nomemh_new
831 .type write_nomemh_new, @function
834 mov memory_map(,%edi,4),%edi
840 movw %cx, (%ebx,%edi)
842 .size write_nomemh_new, .-write_nomemh_new
844 .globl write_nomemd_new
845 .hidden write_nomemd_new
846 .type write_nomemd_new, @function
849 mov memory_map(,%edi,4),%edi
855 mov %edx, (%ebx,%edi)
856 mov %ecx, 4(%ebx,%edi)
858 .size write_nomemd_new, .-write_nomemd_new
860 .type tlb_exception, @function
864 /* ebp = instr addr + flags */
875 mov reg_cop0+48, %esi
883 and $0xFFFFFFFC, %edx
884 mov memory_map(,%ecx,4), %ecx
886 mov (%edx, %ecx, 4), %ecx
888 mov %esi, reg_cop0+48 /* Status */
889 mov %eax, reg_cop0+52 /* Cause */
890 mov %edx, reg_cop0+56 /* EPC */
900 mov reg_cop0+16, %esi
901 cmovc reg(,%ebp,8), %ebx
902 and $0xFF80000F, %esi
903 mov %ebx, reg(,%ebp,8)
906 mov %eax, reg_cop0+32 /* BadVAddr */
909 cmove reg+4(,%ebp,8), %ebx
911 and $0x007FFFF0, %eax
912 mov %ebx, reg+4(,%ebp,8)
915 mov %esi, reg_cop0+16 /* Context */
918 movl next_interupt, %edi
919 movl reg_cop0+36, %esi /* Count */
920 movl %edi, last_count
923 .size tlb_exception, .-tlb_exception
927 .type breakpoint, @function
929 .size breakpoint, .-breakpoint