1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * Mupen64plus - linkage_x86.s *
3 * Copyright (C) 2009-2010 Ari64 *
5 * This program is free software; you can redistribute it and/or modify *
6 * it under the terms of the GNU General Public License as published by *
7 * the Free Software Foundation; either version 2 of the License, or *
8 * (at your option) any later version. *
10 * This program is distributed in the hope that it will be useful, *
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13 * GNU General Public License for more details. *
15 * You should have received a copy of the GNU General Public License *
16 * along with this program; if not, write to the *
17 * Free Software Foundation, Inc., *
18 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. *
19 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
33 .type dyna_linker, @function
35 /* eax = virtual target address */
36 /* ebx = instruction to patch */
41 cmovge tlb_LUT_r(,%edi,4), %ecx
52 mov jump_in(,%ecx,4), %edx
65 lea 4(%ebx,%edi,1), %esi
77 /* hash_table lookup */
88 cmp hash_table(%edi), %eax
91 mov hash_table+4(%edi), %edx
94 cmp hash_table+8(%edi), %eax
97 /* jump_dirty lookup */
98 mov jump_dirty(,%ecx,4), %edx
110 /* hash_table insert */
111 mov hash_table-8(%edi), %ebx
112 mov hash_table-4(%edi), %ecx
113 mov %eax, hash_table-8(%edi)
114 mov %edx, hash_table-4(%edi)
115 mov %ebx, hash_table(%edi)
116 mov %ecx, hash_table+4(%edi)
121 call new_recompile_block
128 .size dyna_linker, .-dyna_linker
130 .globl exec_pagefault
131 .type exec_pagefault, @function
133 /* eax = instruction pointer */
134 /* ebx = fault address */
136 mov reg_cop0+48, %edx
137 mov reg_cop0+16, %edi
139 mov %ebx, reg_cop0+32 /* BadVAddr */
140 and $0xFF80000F, %edi
141 mov %edx, reg_cop0+48 /* Status */
142 mov %ecx, reg_cop0+52 /* Cause */
143 mov %eax, reg_cop0+56 /* EPC */
146 and $0xFFFFE000, %ecx
147 and $0x007FFFF0, %ebx
148 mov %ecx, reg_cop0+40 /* EntryHI */
150 mov %edi, reg_cop0+16 /* Context */
157 .size exec_pagefault, .-exec_pagefault
159 /* Special dynamic linker for the case where a page fault
160 may occur in a branch delay slot */
161 .globl dyna_linker_ds
162 .type dyna_linker_ds, @function
167 cmp $0xC0000000, %eax
168 cmovge tlb_LUT_r(,%edi,4), %ecx
171 xor $0x80000000, %ecx
179 mov jump_in(,%ecx,4), %edx
192 lea 4(%ebx,%edi,1), %esi
204 /* hash_table lookup */
215 cmp hash_table(%edi), %eax
218 mov hash_table+4(%edi), %edx
221 cmp hash_table+8(%edi), %eax
224 /* jump_dirty lookup */
225 mov jump_dirty(,%ecx,4), %edx
237 /* hash_table insert */
238 mov hash_table-8(%edi), %ebx
239 mov hash_table-4(%edi), %ecx
240 mov %eax, hash_table-8(%edi)
241 mov %edx, hash_table-4(%edi)
242 mov %ebx, hash_table(%edi)
243 mov %ecx, hash_table+4(%edi)
247 and $0xFFFFFFF8, %edi
250 call new_recompile_block
255 and $0xFFFFFFF8, %eax
256 mov $0x80000008, %ecx /* High bit set indicates pagefault in delay slot */
260 .size dyna_linker_ds, .-dyna_linker_ds
262 .globl jump_vaddr_eax
263 .type jump_vaddr_eax, @function
267 .size jump_vaddr_eax, .-jump_vaddr_eax
268 .globl jump_vaddr_ecx
269 .type jump_vaddr_ecx, @function
273 .size jump_vaddr_ecx, .-jump_vaddr_ecx
274 .globl jump_vaddr_edx
275 .type jump_vaddr_edx, @function
279 .size jump_vaddr_edx, .-jump_vaddr_edx
280 .globl jump_vaddr_ebx
281 .type jump_vaddr_ebx, @function
285 .size jump_vaddr_ebx, .-jump_vaddr_ebx
286 .globl jump_vaddr_ebp
287 .type jump_vaddr_ebp, @function
290 .size jump_vaddr_ebp, .-jump_vaddr_ebp
291 .globl jump_vaddr_edi
292 .type jump_vaddr_edi, @function
295 .size jump_vaddr_edi, .-jump_vaddr_edi
298 .type jump_vaddr, @function
300 /* Check hash table */
305 cmp hash_table(%eax), %edi
308 mov hash_table+4(%eax), %edi
311 cmp hash_table+8(%eax), %edi
314 /* No hit on hash table, call compiler */
316 mov %esi, cycle_count /* CCREG */
318 mov cycle_count, %esi
321 .size jump_vaddr, .-jump_vaddr
323 .globl verify_code_ds
324 .type verify_code_ds, @function
326 mov %ebp, branch_target
327 .size verify_code_ds, .-verify_code_ds
329 .globl verify_code_vm
330 .type verify_code_vm, @function
332 /* eax = source (virtual address) */
335 cmp $0xC0000000, %eax
338 lea -1(%eax,%ecx,1), %ebp
341 mov memory_map(,%edx,4), %edi
344 lea (%eax,%edi,4), %eax
346 xor memory_map(,%edx,4), %edi
349 mov memory_map(,%edx,4), %edi
353 .size verify_code_vm, .-verify_code_vm
356 .type verify_code, @function
361 mov -4(%eax,%ecx,1), %edi
362 xor -4(%ebx,%ecx,1), %edi
369 mov %esi, cycle_count
371 mov -4(%eax,%ecx,1), %edx
372 mov -4(%ebx,%ecx,1), %ebp
373 mov -8(%eax,%ecx,1), %esi
375 mov -8(%ebx,%ecx,1), %edi
381 mov cycle_count, %esi
382 mov branch_target, %ebp
386 mov cycle_count, %esi
388 mov branch_target, %ebp
389 add $4, %esp /* pop return address, we're not returning */
391 add $4, %esp /* pop virtual address */
393 .size verify_code, .-verify_code
396 .type cc_interrupt, @function
399 add $-28, %esp /* Align stack */
400 mov %esi, reg_cop0+36 /* Count */
402 movl $0, pending_exception
404 cmpl $0, restore_candidate(,%esi,4)
408 mov reg_cop0+36, %esi
409 mov next_interupt, %eax
410 mov pending_exception, %ebx
422 mov %esi, cycle_count /* CCREG */
425 mov cycle_count, %esi
429 add $16, %esp /* pop stack */
430 pop %edi /* restore edi */
431 pop %esi /* restore esi */
432 pop %ebx /* restore ebx */
433 pop %ebp /* restore ebp */
434 ret /* exit dynarec */
436 /* Move 'dirty' blocks to the 'clean' list */
437 mov restore_candidate(,%esi,4), %ebx
439 movl $0, restore_candidate(,%esi,4)
451 .size cc_interrupt, .-cc_interrupt
454 .type do_interrupt, @function
460 mov reg_cop0+36, %esi
461 mov next_interupt, %ebx
466 .size do_interrupt, .-do_interrupt
469 .type fp_exception, @function
471 mov $0x1000002c, %edx
473 mov reg_cop0+48, %ebx
475 mov %ebx, reg_cop0+48 /* Status */
476 mov %edx, reg_cop0+52 /* Cause */
477 mov %eax, reg_cop0+56 /* EPC */
484 .size fp_exception, .-fp_exception
486 .globl fp_exception_ds
487 .type fp_exception_ds, @function
489 mov $0x9000002c, %edx /* Set high bit if delay slot */
491 .size fp_exception_ds, .-fp_exception_ds
494 .type jump_syscall, @function
497 mov reg_cop0+48, %ebx
499 mov %ebx, reg_cop0+48 /* Status */
500 mov %edx, reg_cop0+52 /* Cause */
501 mov %eax, reg_cop0+56 /* EPC */
508 .size jump_syscall, .-jump_syscall
511 .type jump_eret, @function
513 mov reg_cop0+48, %ebx /* Status */
515 and $0xFFFFFFFD, %ebx
516 mov %esi, reg_cop0+36 /* Count */
517 mov %ebx, reg_cop0+48 /* Status */
519 mov next_interupt, %eax
520 mov reg_cop0+36, %esi
523 mov reg_cop0+56, %eax /* EPC */
530 mov reg+4(%ebx), %edx
551 mov %esi, cycle_count
553 mov cycle_count, %esi
561 .size jump_eret, .-jump_eret
563 .globl new_dyna_start
564 .type new_dyna_start, @function
571 call new_recompile_block
572 add $-8, %esp /* align stack */
573 movl next_interupt, %edi
574 movl reg_cop0+36, %esi
575 movl %edi, last_count
578 .size new_dyna_start, .-new_dyna_start
580 .globl write_rdram_new
581 .type write_rdram_new, @function
585 mov %ecx, rdram-0x80000000(%edi)
587 .size write_rdram_new, .-write_rdram_new
589 .globl write_rdramb_new
590 .type write_rdramb_new, @function
595 movb %cl, rdram-0x80000000(%edi)
597 .size write_rdramb_new, .-write_rdramb_new
599 .globl write_rdramh_new
600 .type write_rdramh_new, @function
605 movw %cx, rdram-0x80000000(%edi)
607 .size write_rdramh_new, .-write_rdramh_new
609 .globl write_rdramd_new
610 .type write_rdramd_new, @function
615 mov %ecx, rdram-0x80000000(%edi)
616 mov %edx, rdram-0x80000000+4(%edi)
618 .size write_rdramd_new, .-write_rdramd_new
621 .type do_invalidate, @function
624 mov %edi, %ebx /* Return ebx to caller */
627 cmpb $1, invalid_code(%edi)
630 call invalidate_block
634 .size do_invalidate, .-do_invalidate
636 .globl read_nomem_new
637 .type read_nomem_new, @function
642 mov memory_map(,%edi,4),%edi
646 mov (%ebx,%edi,4), %ecx
647 mov %ecx, readmem_dword
649 .size read_nomem_new, .-read_nomem_new
651 .globl read_nomemb_new
652 .type read_nomemb_new, @function
657 mov memory_map(,%edi,4),%edi
662 movzbl (%ebx,%edi,4), %ecx
663 mov %ecx, readmem_dword
665 .size read_nomemb_new, .-read_nomemb_new
667 .globl read_nomemh_new
668 .type read_nomemh_new, @function
673 mov memory_map(,%edi,4),%edi
678 movzwl (%ebx,%edi,4), %ecx
679 mov %ecx, readmem_dword
681 .size read_nomemh_new, .-read_nomemh_new
683 .globl read_nomemd_new
684 .type read_nomemd_new, @function
689 mov memory_map(,%edi,4),%edi
693 mov 4(%ebx,%edi,4), %ecx
694 mov (%ebx,%edi,4), %edx
695 mov %ecx, readmem_dword
696 mov %edx, readmem_dword+4
698 .size read_nomemd_new, .-read_nomemd_new
700 .globl write_nomem_new
701 .type write_nomem_new, @function
704 mov memory_map(,%edi,4),%edi
709 mov %ecx, (%ebx,%edi)
711 .size write_nomem_new, .-write_nomem_new
713 .globl write_nomemb_new
714 .type write_nomemb_new, @function
717 mov memory_map(,%edi,4),%edi
723 movb %cl, (%ebx,%edi)
725 .size write_nomemb_new, .-write_nomemb_new
727 .globl write_nomemh_new
728 .type write_nomemh_new, @function
731 mov memory_map(,%edi,4),%edi
737 movw %cx, (%ebx,%edi)
739 .size write_nomemh_new, .-write_nomemh_new
741 .globl write_nomemd_new
742 .type write_nomemd_new, @function
745 mov memory_map(,%edi,4),%edi
751 mov %edx, (%ebx,%edi)
752 mov %ecx, 4(%ebx,%edi)
754 .size write_nomemd_new, .-write_nomemd_new
757 .type tlb_exception, @function
761 /* ebp = instr addr + flags */
772 mov reg_cop0+48, %esi
780 and $0xFFFFFFFC, %edx
781 mov memory_map(,%ecx,4), %ecx
783 mov (%edx, %ecx, 4), %ecx
785 mov %esi, reg_cop0+48 /* Status */
786 mov %eax, reg_cop0+52 /* Cause */
787 mov %edx, reg_cop0+56 /* EPC */
797 mov reg_cop0+16, %esi
798 cmovc reg(,%ebp,8), %ebx
799 and $0xFF80000F, %esi
800 mov %ebx, reg(,%ebp,8)
803 mov %eax, reg_cop0+32 /* BadVAddr */
806 cmove reg+4(,%ebp,8), %ebx
807 and $0x007FFFF0, %eax
809 mov %ebx, reg+4(,%ebp,8)
811 mov %esi, reg_cop0+16 /* Context */
814 movl next_interupt, %edi
815 movl reg_cop0+36, %esi /* Count */
816 movl %edi, last_count
819 .size tlb_exception, .-tlb_exception