| 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * |
| 2 | * Mupen64plus - linkage_x86.s * |
| 3 | * Copyright (C) 2009-2011 Ari64 * |
| 4 | * * |
| 5 | * This program is free software; you can redistribute it and/or modify * |
| 6 | * it under the terms of the GNU General Public License as published by * |
| 7 | * the Free Software Foundation; either version 2 of the License, or * |
| 8 | * (at your option) any later version. * |
| 9 | * * |
| 10 | * This program is distributed in the hope that it will be useful, * |
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of * |
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * |
| 13 | * GNU General Public License for more details. * |
| 14 | * * |
| 15 | * You should have received a copy of the GNU General Public License * |
| 16 | * along with this program; if not, write to the * |
| 17 | * Free Software Foundation, Inc., * |
| 18 | * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * |
| 19 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ |
| 20 | .file "linkage_x86.s" |
| 21 | .bss |
| 22 | .align 4 |
| 23 | |
| 24 | .section .rodata |
| 25 | .text |
| 26 | .globl dyna_linker |
| 27 | .hidden dyna_linker |
| 28 | .type dyna_linker, @function |
| 29 | dyna_linker: |
| 30 | /* eax = virtual target address */ |
| 31 | /* ebx = instruction to patch */ |
| 32 | mov %eax, %edi |
| 33 | mov %eax, %ecx |
| 34 | shr $12, %edi |
| 35 | cmp $0xC0000000, %eax |
| 36 | cmovge tlb_LUT_r(,%edi,4), %ecx |
| 37 | test %ecx, %ecx |
| 38 | cmovz %eax, %ecx |
| 39 | xor $0x80000000, %ecx |
| 40 | mov $2047, %edx |
| 41 | shr $12, %ecx |
| 42 | and %ecx, %edx |
| 43 | or $2048, %edx |
| 44 | cmp %edx, %ecx |
| 45 | cmova %edx, %ecx |
| 46 | /* jump_in lookup */ |
| 47 | mov jump_in(,%ecx,4), %edx |
| 48 | .A1: |
| 49 | test %edx, %edx |
| 50 | je .A3 |
| 51 | mov (%edx), %edi |
| 52 | xor %eax, %edi |
| 53 | or 4(%edx), %edi |
| 54 | je .A2 |
| 55 | movl 12(%edx), %edx |
| 56 | jmp .A1 |
| 57 | .A2: |
| 58 | mov (%ebx), %edi |
| 59 | mov %esi, %ebp |
| 60 | lea 4(%ebx,%edi,1), %esi |
| 61 | mov %eax, %edi |
| 62 | pusha |
| 63 | call add_link |
| 64 | popa |
| 65 | mov 8(%edx), %edi |
| 66 | mov %ebp, %esi |
| 67 | lea -4(%edi), %edx |
| 68 | subl %ebx, %edx |
| 69 | movl %edx, (%ebx) |
| 70 | jmp *%edi |
| 71 | .A3: |
| 72 | /* hash_table lookup */ |
| 73 | mov %eax, %edi |
| 74 | mov %eax, %edx |
| 75 | shr $16, %edi |
| 76 | shr $12, %edx |
| 77 | xor %eax, %edi |
| 78 | and $2047, %edx |
| 79 | movzwl %di, %edi |
| 80 | shl $4, %edi |
| 81 | cmp $2048, %ecx |
| 82 | cmovc %edx, %ecx |
| 83 | cmp hash_table(%edi), %eax |
| 84 | jne .A5 |
| 85 | .A4: |
| 86 | mov hash_table+4(%edi), %edx |
| 87 | jmp *%edx |
| 88 | .A5: |
| 89 | cmp hash_table+8(%edi), %eax |
| 90 | lea 8(%edi), %edi |
| 91 | je .A4 |
| 92 | /* jump_dirty lookup */ |
| 93 | mov jump_dirty(,%ecx,4), %edx |
| 94 | .A6: |
| 95 | testl %edx, %edx |
| 96 | je .A8 |
| 97 | mov (%edx), %ecx |
| 98 | xor %eax, %ecx |
| 99 | or 4(%edx), %ecx |
| 100 | je .A7 |
| 101 | movl 12(%edx), %edx |
| 102 | jmp .A6 |
| 103 | .A7: |
| 104 | mov 8(%edx), %edx |
| 105 | /* hash_table insert */ |
| 106 | mov hash_table-8(%edi), %ebx |
| 107 | mov hash_table-4(%edi), %ecx |
| 108 | mov %eax, hash_table-8(%edi) |
| 109 | mov %edx, hash_table-4(%edi) |
| 110 | mov %ebx, hash_table(%edi) |
| 111 | mov %ecx, hash_table+4(%edi) |
| 112 | jmp *%edx |
| 113 | .A8: |
| 114 | mov %eax, %edi |
| 115 | pusha |
| 116 | call new_recompile_block |
| 117 | test %eax, %eax |
| 118 | popa |
| 119 | je dyna_linker |
| 120 | /* pagefault */ |
| 121 | mov %eax, %ebx |
| 122 | mov $0x08, %ecx |
| 123 | .size dyna_linker, .-dyna_linker |
| 124 | |
| 125 | .type exec_pagefault, @function |
| 126 | exec_pagefault: |
| 127 | /* eax = instruction pointer */ |
| 128 | /* ebx = fault address */ |
| 129 | /* ecx = cause */ |
| 130 | mov reg_cop0+48, %edx |
| 131 | add $-12, %esp |
| 132 | mov reg_cop0+16, %edi |
| 133 | or $2, %edx |
| 134 | mov %ebx, reg_cop0+32 /* BadVAddr */ |
| 135 | and $0xFF80000F, %edi |
| 136 | mov %edx, reg_cop0+48 /* Status */ |
| 137 | mov %ecx, reg_cop0+52 /* Cause */ |
| 138 | mov %eax, reg_cop0+56 /* EPC */ |
| 139 | mov %ebx, %ecx |
| 140 | shr $9, %ebx |
| 141 | and $0xFFFFE000, %ecx |
| 142 | and $0x007FFFF0, %ebx |
| 143 | mov %ecx, reg_cop0+40 /* EntryHI */ |
| 144 | or %ebx, %edi |
| 145 | mov %edi, reg_cop0+16 /* Context */ |
| 146 | push $0x80000000 |
| 147 | call get_addr_ht |
| 148 | add $16, %esp |
| 149 | jmp *%eax |
| 150 | .size exec_pagefault, .-exec_pagefault |
| 151 | |
| 152 | /* Special dynamic linker for the case where a page fault |
| 153 | may occur in a branch delay slot */ |
| 154 | .globl dyna_linker_ds |
| 155 | .hidden dyna_linker_ds |
| 156 | .type dyna_linker_ds, @function |
| 157 | dyna_linker_ds: |
| 158 | mov %eax, %edi |
| 159 | mov %eax, %ecx |
| 160 | shr $12, %edi |
| 161 | cmp $0xC0000000, %eax |
| 162 | cmovge tlb_LUT_r(,%edi,4), %ecx |
| 163 | test %ecx, %ecx |
| 164 | cmovz %eax, %ecx |
| 165 | xor $0x80000000, %ecx |
| 166 | mov $2047, %edx |
| 167 | shr $12, %ecx |
| 168 | and %ecx, %edx |
| 169 | or $2048, %edx |
| 170 | cmp %edx, %ecx |
| 171 | cmova %edx, %ecx |
| 172 | /* jump_in lookup */ |
| 173 | mov jump_in(,%ecx,4), %edx |
| 174 | .B1: |
| 175 | test %edx, %edx |
| 176 | je .B3 |
| 177 | mov (%edx), %edi |
| 178 | xor %eax, %edi |
| 179 | or 4(%edx), %edi |
| 180 | je .B2 |
| 181 | movl 12(%edx), %edx |
| 182 | jmp .B1 |
| 183 | .B2: |
| 184 | mov (%ebx), %edi |
| 185 | mov %esi, %ecx |
| 186 | lea 4(%ebx,%edi,1), %esi |
| 187 | mov %eax, %edi |
| 188 | pusha |
| 189 | call add_link |
| 190 | popa |
| 191 | mov 8(%edx), %edi |
| 192 | mov %ecx, %esi |
| 193 | lea -4(%edi), %edx |
| 194 | subl %ebx, %edx |
| 195 | movl %edx, (%ebx) |
| 196 | jmp *%edi |
| 197 | .B3: |
| 198 | /* hash_table lookup */ |
| 199 | mov %eax, %edi |
| 200 | mov %eax, %edx |
| 201 | shr $16, %edi |
| 202 | shr $12, %edx |
| 203 | xor %eax, %edi |
| 204 | and $2047, %edx |
| 205 | movzwl %di, %edi |
| 206 | shl $4, %edi |
| 207 | cmp $2048, %ecx |
| 208 | cmovc %edx, %ecx |
| 209 | cmp hash_table(%edi), %eax |
| 210 | jne .B5 |
| 211 | .B4: |
| 212 | mov hash_table+4(%edi), %edx |
| 213 | jmp *%edx |
| 214 | .B5: |
| 215 | cmp hash_table+8(%edi), %eax |
| 216 | lea 8(%edi), %edi |
| 217 | je .B4 |
| 218 | /* jump_dirty lookup */ |
| 219 | mov jump_dirty(,%ecx,4), %edx |
| 220 | .B6: |
| 221 | testl %edx, %edx |
| 222 | je .B8 |
| 223 | mov (%edx), %ecx |
| 224 | xor %eax, %ecx |
| 225 | or 4(%edx), %ecx |
| 226 | je .B7 |
| 227 | movl 12(%edx), %edx |
| 228 | jmp .B6 |
| 229 | .B7: |
| 230 | mov 8(%edx), %edx |
| 231 | /* hash_table insert */ |
| 232 | mov hash_table-8(%edi), %ebx |
| 233 | mov hash_table-4(%edi), %ecx |
| 234 | mov %eax, hash_table-8(%edi) |
| 235 | mov %edx, hash_table-4(%edi) |
| 236 | mov %ebx, hash_table(%edi) |
| 237 | mov %ecx, hash_table+4(%edi) |
| 238 | jmp *%edx |
| 239 | .B8: |
| 240 | mov %eax, %edi |
| 241 | and $0xFFFFFFF8, %edi |
| 242 | inc %edi |
| 243 | pusha |
| 244 | call new_recompile_block |
| 245 | test %eax, %eax |
| 246 | popa |
| 247 | je dyna_linker_ds |
| 248 | /* pagefault */ |
| 249 | and $0xFFFFFFF8, %eax |
| 250 | mov $0x80000008, %ecx /* High bit set indicates pagefault in delay slot */ |
| 251 | mov %eax, %ebx |
| 252 | sub $4, %eax |
| 253 | jmp exec_pagefault |
| 254 | .size dyna_linker_ds, .-dyna_linker_ds |
| 255 | |
| 256 | .globl jump_vaddr_eax |
| 257 | .hidden jump_vaddr_eax |
| 258 | .type jump_vaddr_eax, @function |
| 259 | jump_vaddr_eax: |
| 260 | mov %eax, %edi |
| 261 | jmp jump_vaddr_edi |
| 262 | .size jump_vaddr_eax, .-jump_vaddr_eax |
| 263 | .globl jump_vaddr_ecx |
| 264 | .hidden jump_vaddr_ecx |
| 265 | .type jump_vaddr_ecx, @function |
| 266 | jump_vaddr_ecx: |
| 267 | mov %ecx, %edi |
| 268 | jmp jump_vaddr_edi |
| 269 | .size jump_vaddr_ecx, .-jump_vaddr_ecx |
| 270 | .globl jump_vaddr_edx |
| 271 | .hidden jump_vaddr_edx |
| 272 | .type jump_vaddr_edx, @function |
| 273 | jump_vaddr_edx: |
| 274 | mov %edx, %edi |
| 275 | jmp jump_vaddr_edi |
| 276 | .size jump_vaddr_edx, .-jump_vaddr_edx |
| 277 | .globl jump_vaddr_ebx |
| 278 | .hidden jump_vaddr_ebx |
| 279 | .type jump_vaddr_ebx, @function |
| 280 | jump_vaddr_ebx: |
| 281 | mov %ebx, %edi |
| 282 | jmp jump_vaddr_edi |
| 283 | .size jump_vaddr_ebx, .-jump_vaddr_ebx |
| 284 | .globl jump_vaddr_ebp |
| 285 | .hidden jump_vaddr_ebp |
| 286 | .type jump_vaddr_ebp, @function |
| 287 | jump_vaddr_ebp: |
| 288 | mov %ebp, %edi |
| 289 | .size jump_vaddr_ebp, .-jump_vaddr_ebp |
| 290 | .globl jump_vaddr_edi |
| 291 | .hidden jump_vaddr_edi |
| 292 | .type jump_vaddr_edi, @function |
| 293 | jump_vaddr_edi: |
| 294 | mov %edi, %eax |
| 295 | .size jump_vaddr_edi, .-jump_vaddr_edi |
| 296 | |
| 297 | .type jump_vaddr, @function |
| 298 | jump_vaddr: |
| 299 | /* Check hash table */ |
| 300 | shr $16, %eax |
| 301 | xor %edi, %eax |
| 302 | movzwl %ax, %eax |
| 303 | shl $4, %eax |
| 304 | cmp hash_table(%eax), %edi |
| 305 | jne .C2 |
| 306 | .C1: |
| 307 | mov hash_table+4(%eax), %edi |
| 308 | jmp *%edi |
| 309 | .C2: |
| 310 | cmp hash_table+8(%eax), %edi |
| 311 | lea 8(%eax), %eax |
| 312 | je .C1 |
| 313 | /* No hit on hash table, call compiler */ |
| 314 | add $-12, %esp |
| 315 | push %edi |
| 316 | mov %esi, cycle_count /* CCREG */ |
| 317 | call get_addr |
| 318 | mov cycle_count, %esi |
| 319 | add $16, %esp |
| 320 | jmp *%eax |
| 321 | .size jump_vaddr, .-jump_vaddr |
| 322 | |
| 323 | .globl verify_code_ds |
| 324 | .hidden verify_code_ds |
| 325 | .type verify_code_ds, @function |
| 326 | verify_code_ds: |
| 327 | mov %ebp, branch_target |
| 328 | .size verify_code_ds, .-verify_code_ds |
| 329 | |
| 330 | .globl verify_code_vm |
| 331 | .hidden verify_code_vm |
| 332 | .type verify_code_vm, @function |
| 333 | verify_code_vm: |
| 334 | /* eax = source (virtual address) */ |
| 335 | /* ebx = target */ |
| 336 | /* ecx = length */ |
| 337 | cmp $0xC0000000, %eax |
| 338 | jl verify_code |
| 339 | mov %eax, %edx |
| 340 | lea -1(%eax,%ecx,1), %ebp |
| 341 | shr $12, %edx |
| 342 | shr $12, %ebp |
| 343 | mov memory_map(,%edx,4), %edi |
| 344 | test %edi, %edi |
| 345 | js .D5 |
| 346 | lea (%eax,%edi,4), %eax |
| 347 | .D1: |
| 348 | xor memory_map(,%edx,4), %edi |
| 349 | shl $2, %edi |
| 350 | jne .D5 |
| 351 | mov memory_map(,%edx,4), %edi |
| 352 | inc %edx |
| 353 | cmp %ebp, %edx |
| 354 | jbe .D1 |
| 355 | .size verify_code_vm, .-verify_code_vm |
| 356 | |
| 357 | .globl verify_code |
| 358 | .hidden verify_code |
| 359 | .type verify_code, @function |
| 360 | verify_code: |
| 361 | /* eax = source */ |
| 362 | /* ebx = target */ |
| 363 | /* ecx = length */ |
| 364 | mov -4(%eax,%ecx,1), %edi |
| 365 | xor -4(%ebx,%ecx,1), %edi |
| 366 | jne .D5 |
| 367 | mov %ecx, %edx |
| 368 | add $-4, %ecx |
| 369 | je .D3 |
| 370 | test $4, %edx |
| 371 | cmove %edx, %ecx |
| 372 | mov %esi, cycle_count |
| 373 | .D2: |
| 374 | mov -4(%eax,%ecx,1), %edx |
| 375 | mov -4(%ebx,%ecx,1), %ebp |
| 376 | mov -8(%eax,%ecx,1), %esi |
| 377 | xor %edx, %ebp |
| 378 | mov -8(%ebx,%ecx,1), %edi |
| 379 | jne .D4 |
| 380 | xor %esi, %edi |
| 381 | jne .D4 |
| 382 | add $-8, %ecx |
| 383 | jne .D2 |
| 384 | mov cycle_count, %esi |
| 385 | mov branch_target, %ebp |
| 386 | .D3: |
| 387 | ret |
| 388 | .D4: |
| 389 | mov cycle_count, %esi |
| 390 | .D5: |
| 391 | mov branch_target, %ebp |
| 392 | push %esi /* for stack alignment, unused */ |
| 393 | push 8(%esp) |
| 394 | call get_addr |
| 395 | add $16, %esp /* pop stack */ |
| 396 | jmp *%eax |
| 397 | .size verify_code, .-verify_code |
| 398 | |
| 399 | .globl cc_interrupt |
| 400 | .hidden cc_interrupt |
| 401 | .type cc_interrupt, @function |
| 402 | cc_interrupt: |
| 403 | add last_count, %esi |
| 404 | add $-28, %esp /* Align stack */ |
| 405 | mov %esi, reg_cop0+36 /* Count */ |
| 406 | shr $19, %esi |
| 407 | movl $0, pending_exception |
| 408 | and $0x7f, %esi |
| 409 | cmpl $0, restore_candidate(,%esi,4) |
| 410 | jne .E4 |
| 411 | .E1: |
| 412 | call gen_interupt |
| 413 | mov reg_cop0+36, %esi |
| 414 | mov next_interupt, %eax |
| 415 | mov pending_exception, %ebx |
| 416 | mov stop, %ecx |
| 417 | add $28, %esp |
| 418 | mov %eax, last_count |
| 419 | sub %eax, %esi |
| 420 | test %ecx, %ecx |
| 421 | jne .E3 |
| 422 | test %ebx, %ebx |
| 423 | jne .E2 |
| 424 | ret |
| 425 | .E2: |
| 426 | add $-8, %esp |
| 427 | mov pcaddr, %edi |
| 428 | mov %esi, cycle_count /* CCREG */ |
| 429 | push %edi |
| 430 | call get_addr_ht |
| 431 | mov cycle_count, %esi |
| 432 | add $16, %esp |
| 433 | jmp *%eax |
| 434 | .E3: |
| 435 | add $16, %esp /* pop stack */ |
| 436 | pop %edi /* restore edi */ |
| 437 | pop %esi /* restore esi */ |
| 438 | pop %ebx /* restore ebx */ |
| 439 | pop %ebp /* restore ebp */ |
| 440 | ret /* exit dynarec */ |
| 441 | .E4: |
| 442 | /* Move 'dirty' blocks to the 'clean' list */ |
| 443 | mov restore_candidate(,%esi,4), %ebx |
| 444 | mov %esi, %ebp |
| 445 | movl $0, restore_candidate(,%esi,4) |
| 446 | shl $5, %ebp |
| 447 | .E5: |
| 448 | shr $1, %ebx |
| 449 | jnc .E6 |
| 450 | mov %ebp, (%esp) |
| 451 | call clean_blocks |
| 452 | .E6: |
| 453 | inc %ebp |
| 454 | test $31, %ebp |
| 455 | jne .E5 |
| 456 | jmp .E1 |
| 457 | .size cc_interrupt, .-cc_interrupt |
| 458 | |
| 459 | .globl do_interrupt |
| 460 | .hidden do_interrupt |
| 461 | .type do_interrupt, @function |
| 462 | do_interrupt: |
| 463 | mov pcaddr, %edi |
| 464 | add $-12, %esp |
| 465 | push %edi |
| 466 | call get_addr_ht |
| 467 | add $16, %esp |
| 468 | mov reg_cop0+36, %esi |
| 469 | mov next_interupt, %ebx |
| 470 | mov %ebx, last_count |
| 471 | sub %ebx, %esi |
| 472 | add $2, %esi |
| 473 | jmp *%eax |
| 474 | .size do_interrupt, .-do_interrupt |
| 475 | |
| 476 | .globl fp_exception |
| 477 | .hidden fp_exception |
| 478 | .type fp_exception, @function |
| 479 | fp_exception: |
| 480 | mov $0x1000002c, %edx |
| 481 | .E7: |
| 482 | mov reg_cop0+48, %ebx |
| 483 | add $-12, %esp |
| 484 | or $2, %ebx |
| 485 | mov %ebx, reg_cop0+48 /* Status */ |
| 486 | mov %edx, reg_cop0+52 /* Cause */ |
| 487 | mov %eax, reg_cop0+56 /* EPC */ |
| 488 | push $0x80000180 |
| 489 | call get_addr_ht |
| 490 | add $16, %esp |
| 491 | jmp *%eax |
| 492 | .size fp_exception, .-fp_exception |
| 493 | |
| 494 | .globl fp_exception_ds |
| 495 | .hidden fp_exception_ds |
| 496 | .type fp_exception_ds, @function |
| 497 | fp_exception_ds: |
| 498 | mov $0x9000002c, %edx /* Set high bit if delay slot */ |
| 499 | jmp .E7 |
| 500 | .size fp_exception_ds, .-fp_exception_ds |
| 501 | |
| 502 | .globl jump_syscall |
| 503 | .hidden jump_syscall |
| 504 | .type jump_syscall, @function |
| 505 | jump_syscall: |
| 506 | mov $0x20, %edx |
| 507 | mov reg_cop0+48, %ebx |
| 508 | add $-12, %esp |
| 509 | or $2, %ebx |
| 510 | mov %ebx, reg_cop0+48 /* Status */ |
| 511 | mov %edx, reg_cop0+52 /* Cause */ |
| 512 | mov %eax, reg_cop0+56 /* EPC */ |
| 513 | push $0x80000180 |
| 514 | call get_addr_ht |
| 515 | mov $16, %esp |
| 516 | jmp *%eax |
| 517 | .size jump_syscall, .-jump_syscall |
| 518 | |
| 519 | .globl jump_eret |
| 520 | .hidden jump_eret |
| 521 | .type jump_eret, @function |
| 522 | jump_eret: |
| 523 | mov reg_cop0+48, %ebx /* Status */ |
| 524 | add last_count, %esi |
| 525 | and $0xFFFFFFFD, %ebx |
| 526 | mov %esi, reg_cop0+36 /* Count */ |
| 527 | mov %ebx, reg_cop0+48 /* Status */ |
| 528 | call check_interupt |
| 529 | mov next_interupt, %eax |
| 530 | mov reg_cop0+36, %esi |
| 531 | mov %eax, last_count |
| 532 | sub %eax, %esi |
| 533 | mov reg_cop0+56, %eax /* EPC */ |
| 534 | jns .E11 |
| 535 | .E8: |
| 536 | mov $248, %ebx |
| 537 | xor %edi, %edi |
| 538 | .E9: |
| 539 | mov reg(%ebx), %ecx |
| 540 | mov reg+4(%ebx), %edx |
| 541 | sar $31, %ecx |
| 542 | xor %ecx, %edx |
| 543 | neg %edx |
| 544 | adc %edi, %edi |
| 545 | sub $8, %ebx |
| 546 | jne .E9 |
| 547 | mov hi(%ebx), %ecx |
| 548 | mov hi+4(%ebx), %edx |
| 549 | sar $31, %ecx |
| 550 | xor %ecx, %edx |
| 551 | jne .E10 |
| 552 | mov lo(%ebx), %ecx |
| 553 | mov lo+4(%ebx), %edx |
| 554 | sar $31, %ecx |
| 555 | xor %ecx, %edx |
| 556 | .E10: |
| 557 | neg %edx |
| 558 | adc %edi, %edi |
| 559 | add $-8, %esp |
| 560 | push %edi |
| 561 | push %eax |
| 562 | mov %esi, cycle_count |
| 563 | call get_addr_32 |
| 564 | mov cycle_count, %esi |
| 565 | add $16, %esp |
| 566 | jmp *%eax |
| 567 | .E11: |
| 568 | mov %eax, pcaddr |
| 569 | call cc_interrupt |
| 570 | mov pcaddr, %eax |
| 571 | jmp .E8 |
| 572 | .size jump_eret, .-jump_eret |
| 573 | |
| 574 | .globl new_dyna_start |
| 575 | .hidden new_dyna_start |
| 576 | .type new_dyna_start, @function |
| 577 | new_dyna_start: |
| 578 | push %ebp |
| 579 | push %ebx |
| 580 | push %esi |
| 581 | push %edi |
| 582 | add $-8, %esp /* align stack */ |
| 583 | push $0xa4000040 |
| 584 | call new_recompile_block |
| 585 | movl next_interupt, %edi |
| 586 | movl reg_cop0+36, %esi |
| 587 | movl %edi, last_count |
| 588 | subl %edi, %esi |
| 589 | jmp *base_addr |
| 590 | .size new_dyna_start, .-new_dyna_start |
| 591 | |
| 592 | /* Note: Assumes %ebx, %ebp, %esi, %edi are callee-saved */ |
| 593 | .globl invalidate_block_eax |
| 594 | .hidden invalidate_block_eax |
| 595 | .type invalidate_block_eax, @function |
| 596 | invalidate_block_eax: |
| 597 | push %eax |
| 598 | push %ecx |
| 599 | push %edx |
| 600 | push %eax |
| 601 | jmp invalidate_block_call |
| 602 | .size invalidate_block_eax, .-invalidate_block_eax |
| 603 | .globl invalidate_block_ecx |
| 604 | .hidden invalidate_block_ecx |
| 605 | .type invalidate_block_ecx, @function |
| 606 | invalidate_block_ecx: |
| 607 | push %eax |
| 608 | push %ecx |
| 609 | push %edx |
| 610 | push %ecx |
| 611 | jmp invalidate_block_call |
| 612 | .size invalidate_block_ecx, .-invalidate_block_ecx |
| 613 | .globl invalidate_block_edx |
| 614 | .hidden invalidate_block_edx |
| 615 | .type invalidate_block_edx, @function |
| 616 | invalidate_block_edx: |
| 617 | push %eax |
| 618 | push %ecx |
| 619 | push %edx |
| 620 | push %edx |
| 621 | jmp invalidate_block_call |
| 622 | .size invalidate_block_edx, .-invalidate_block_edx |
| 623 | .globl invalidate_block_ebx |
| 624 | .hidden invalidate_block_ebx |
| 625 | .type invalidate_block_ebx, @function |
| 626 | invalidate_block_ebx: |
| 627 | push %eax |
| 628 | push %ecx |
| 629 | push %edx |
| 630 | push %ebx |
| 631 | jmp invalidate_block_call |
| 632 | .size invalidate_block_ebx, .-invalidate_block_ebx |
| 633 | .globl invalidate_block_ebp |
| 634 | .hidden invalidate_block_ebp |
| 635 | .type invalidate_block_ebp, @function |
| 636 | invalidate_block_ebp: |
| 637 | push %eax |
| 638 | push %ecx |
| 639 | push %edx |
| 640 | push %ebp |
| 641 | jmp invalidate_block_call |
| 642 | .size invalidate_block_ebp, .-invalidate_block_ebp |
| 643 | .globl invalidate_block_esi |
| 644 | .hidden invalidate_block_esi |
| 645 | .type invalidate_block_esi, @function |
| 646 | invalidate_block_esi: |
| 647 | push %eax |
| 648 | push %ecx |
| 649 | push %edx |
| 650 | push %esi |
| 651 | jmp invalidate_block_call |
| 652 | .size invalidate_block_esi, .-invalidate_block_esi |
| 653 | .globl invalidate_block_edi |
| 654 | .hidden invalidate_block_edi |
| 655 | .type invalidate_block_edi, @function |
| 656 | invalidate_block_edi: |
| 657 | push %eax |
| 658 | push %ecx |
| 659 | push %edx |
| 660 | push %edi |
| 661 | .size invalidate_block_edi, .-invalidate_block_edi |
| 662 | |
| 663 | .type invalidate_block_call, @function |
| 664 | invalidate_block_call: |
| 665 | call invalidate_block |
| 666 | pop %eax /* Throw away */ |
| 667 | pop %edx |
| 668 | pop %ecx |
| 669 | pop %eax |
| 670 | ret |
| 671 | .size invalidate_block_call, .-invalidate_block_call |
| 672 | |
| 673 | .globl write_rdram_new |
| 674 | .hidden write_rdram_new |
| 675 | .type write_rdram_new, @function |
| 676 | write_rdram_new: |
| 677 | mov address, %edi |
| 678 | mov word, %ecx |
| 679 | mov %ecx, rdram-0x80000000(%edi) |
| 680 | jmp .E12 |
| 681 | .size write_rdram_new, .-write_rdram_new |
| 682 | |
| 683 | .globl write_rdramb_new |
| 684 | .hidden write_rdramb_new |
| 685 | .type write_rdramb_new, @function |
| 686 | write_rdramb_new: |
| 687 | mov address, %edi |
| 688 | xor $3, %edi |
| 689 | movb cpu_byte, %cl |
| 690 | movb %cl, rdram-0x80000000(%edi) |
| 691 | jmp .E12 |
| 692 | .size write_rdramb_new, .-write_rdramb_new |
| 693 | |
| 694 | .globl write_rdramh_new |
| 695 | .hidden write_rdramh_new |
| 696 | .type write_rdramh_new, @function |
| 697 | write_rdramh_new: |
| 698 | mov address, %edi |
| 699 | xor $2, %edi |
| 700 | movw hword, %cx |
| 701 | movw %cx, rdram-0x80000000(%edi) |
| 702 | jmp .E12 |
| 703 | .size write_rdramh_new, .-write_rdramh_new |
| 704 | |
| 705 | .globl write_rdramd_new |
| 706 | .hidden write_rdramd_new |
| 707 | .type write_rdramd_new, @function |
| 708 | write_rdramd_new: |
| 709 | mov address, %edi |
| 710 | mov dword+4, %ecx |
| 711 | mov dword, %edx |
| 712 | mov %ecx, rdram-0x80000000(%edi) |
| 713 | mov %edx, rdram-0x80000000+4(%edi) |
| 714 | jmp .E12 |
| 715 | .size write_rdramd_new, .-write_rdramd_new |
| 716 | |
| 717 | .type do_invalidate, @function |
| 718 | do_invalidate: |
| 719 | mov address, %edi |
| 720 | mov %edi, %ebx /* Return ebx to caller */ |
| 721 | .E12: |
| 722 | shr $12, %edi |
| 723 | cmpb $1, invalid_code(%edi) |
| 724 | je .E13 |
| 725 | push %edi |
| 726 | call invalidate_block |
| 727 | pop %edi |
| 728 | .E13: |
| 729 | ret |
| 730 | .size do_invalidate, .-do_invalidate |
| 731 | |
| 732 | .globl read_nomem_new |
| 733 | .hidden read_nomem_new |
| 734 | .type read_nomem_new, @function |
| 735 | read_nomem_new: |
| 736 | mov address, %edi |
| 737 | mov %edi, %ebx |
| 738 | shr $12, %edi |
| 739 | mov memory_map(,%edi,4),%edi |
| 740 | mov $0x8, %eax |
| 741 | test %edi, %edi |
| 742 | js tlb_exception |
| 743 | mov (%ebx,%edi,4), %ecx |
| 744 | mov %ecx, readmem_dword |
| 745 | ret |
| 746 | .size read_nomem_new, .-read_nomem_new |
| 747 | |
| 748 | .globl read_nomemb_new |
| 749 | .hidden read_nomemb_new |
| 750 | .type read_nomemb_new, @function |
| 751 | read_nomemb_new: |
| 752 | mov address, %edi |
| 753 | mov %edi, %ebx |
| 754 | shr $12, %edi |
| 755 | mov memory_map(,%edi,4),%edi |
| 756 | mov $0x8, %eax |
| 757 | test %edi, %edi |
| 758 | js tlb_exception |
| 759 | xor $3, %ebx |
| 760 | movzbl (%ebx,%edi,4), %ecx |
| 761 | mov %ecx, readmem_dword |
| 762 | ret |
| 763 | .size read_nomemb_new, .-read_nomemb_new |
| 764 | |
| 765 | .globl read_nomemh_new |
| 766 | .hidden read_nomemh_new |
| 767 | .type read_nomemh_new, @function |
| 768 | read_nomemh_new: |
| 769 | mov address, %edi |
| 770 | mov %edi, %ebx |
| 771 | shr $12, %edi |
| 772 | mov memory_map(,%edi,4),%edi |
| 773 | mov $0x8, %eax |
| 774 | test %edi, %edi |
| 775 | js tlb_exception |
| 776 | xor $2, %ebx |
| 777 | movzwl (%ebx,%edi,4), %ecx |
| 778 | mov %ecx, readmem_dword |
| 779 | ret |
| 780 | .size read_nomemh_new, .-read_nomemh_new |
| 781 | |
| 782 | .globl read_nomemd_new |
| 783 | .hidden read_nomemd_new |
| 784 | .type read_nomemd_new, @function |
| 785 | read_nomemd_new: |
| 786 | mov address, %edi |
| 787 | mov %edi, %ebx |
| 788 | shr $12, %edi |
| 789 | mov memory_map(,%edi,4),%edi |
| 790 | mov $0x8, %eax |
| 791 | test %edi, %edi |
| 792 | js tlb_exception |
| 793 | mov 4(%ebx,%edi,4), %ecx |
| 794 | mov (%ebx,%edi,4), %edx |
| 795 | mov %ecx, readmem_dword |
| 796 | mov %edx, readmem_dword+4 |
| 797 | ret |
| 798 | .size read_nomemd_new, .-read_nomemd_new |
| 799 | |
| 800 | .globl write_nomem_new |
| 801 | .hidden write_nomem_new |
| 802 | .type write_nomem_new, @function |
| 803 | write_nomem_new: |
| 804 | call do_invalidate |
| 805 | mov memory_map(,%edi,4),%edi |
| 806 | mov word, %ecx |
| 807 | mov $0xc, %eax |
| 808 | shl $2, %edi |
| 809 | jc tlb_exception |
| 810 | mov %ecx, (%ebx,%edi) |
| 811 | ret |
| 812 | .size write_nomem_new, .-write_nomem_new |
| 813 | |
| 814 | .globl write_nomemb_new |
| 815 | .hidden write_nomemb_new |
| 816 | .type write_nomemb_new, @function |
| 817 | write_nomemb_new: |
| 818 | call do_invalidate |
| 819 | mov memory_map(,%edi,4),%edi |
| 820 | movb cpu_byte, %cl |
| 821 | mov $0xc, %eax |
| 822 | shl $2, %edi |
| 823 | jc tlb_exception |
| 824 | xor $3, %ebx |
| 825 | movb %cl, (%ebx,%edi) |
| 826 | ret |
| 827 | .size write_nomemb_new, .-write_nomemb_new |
| 828 | |
| 829 | .globl write_nomemh_new |
| 830 | .hidden write_nomemh_new |
| 831 | .type write_nomemh_new, @function |
| 832 | write_nomemh_new: |
| 833 | call do_invalidate |
| 834 | mov memory_map(,%edi,4),%edi |
| 835 | movw hword, %cx |
| 836 | mov $0xc, %eax |
| 837 | shl $2, %edi |
| 838 | jc tlb_exception |
| 839 | xor $2, %ebx |
| 840 | movw %cx, (%ebx,%edi) |
| 841 | ret |
| 842 | .size write_nomemh_new, .-write_nomemh_new |
| 843 | |
| 844 | .globl write_nomemd_new |
| 845 | .hidden write_nomemd_new |
| 846 | .type write_nomemd_new, @function |
| 847 | write_nomemd_new: |
| 848 | call do_invalidate |
| 849 | mov memory_map(,%edi,4),%edi |
| 850 | mov dword+4, %edx |
| 851 | mov dword, %ecx |
| 852 | mov $0xc, %eax |
| 853 | shl $2, %edi |
| 854 | jc tlb_exception |
| 855 | mov %edx, (%ebx,%edi) |
| 856 | mov %ecx, 4(%ebx,%edi) |
| 857 | ret |
| 858 | .size write_nomemd_new, .-write_nomemd_new |
| 859 | |
| 860 | .type tlb_exception, @function |
| 861 | tlb_exception: |
| 862 | /* eax = cause */ |
| 863 | /* ebx = address */ |
| 864 | /* ebp = instr addr + flags */ |
| 865 | mov 0x24(%esp), %ebp |
| 866 | /* Debug: |
| 867 | push %ebp |
| 868 | push %ebx |
| 869 | push %eax |
| 870 | call tlb_debug |
| 871 | pop %eax |
| 872 | pop %ebx |
| 873 | pop %ebp |
| 874 | /* end debug */ |
| 875 | mov reg_cop0+48, %esi |
| 876 | mov %ebp, %ecx |
| 877 | mov %ebp, %edx |
| 878 | mov %ebp, %edi |
| 879 | shl $31, %ebp |
| 880 | shr $12, %ecx |
| 881 | or %ebp, %eax |
| 882 | sar $29, %ebp |
| 883 | and $0xFFFFFFFC, %edx |
| 884 | mov memory_map(,%ecx,4), %ecx |
| 885 | or $2, %esi |
| 886 | mov (%edx, %ecx, 4), %ecx |
| 887 | add %ebp, %edx |
| 888 | mov %esi, reg_cop0+48 /* Status */ |
| 889 | mov %eax, reg_cop0+52 /* Cause */ |
| 890 | mov %edx, reg_cop0+56 /* EPC */ |
| 891 | add $0x24, %esp |
| 892 | mov $0x6000022, %edx |
| 893 | mov %ecx, %ebp |
| 894 | movswl %cx, %eax |
| 895 | shr $26, %ecx |
| 896 | shr $21, %ebp |
| 897 | sub %eax, %ebx |
| 898 | and $0x1f, %ebp |
| 899 | ror %cl, %edx |
| 900 | mov reg_cop0+16, %esi |
| 901 | cmovc reg(,%ebp,8), %ebx |
| 902 | and $0xFF80000F, %esi |
| 903 | mov %ebx, reg(,%ebp,8) |
| 904 | add %ebx, %eax |
| 905 | sar $31, %ebx |
| 906 | mov %eax, reg_cop0+32 /* BadVAddr */ |
| 907 | shr $9, %eax |
| 908 | test $2, %edi |
| 909 | cmove reg+4(,%ebp,8), %ebx |
| 910 | add $-12, %esp |
| 911 | and $0x007FFFF0, %eax |
| 912 | mov %ebx, reg+4(,%ebp,8) |
| 913 | push $0x80000180 |
| 914 | or %eax, %esi |
| 915 | mov %esi, reg_cop0+16 /* Context */ |
| 916 | call get_addr_ht |
| 917 | add $16, %esp |
| 918 | movl next_interupt, %edi |
| 919 | movl reg_cop0+36, %esi /* Count */ |
| 920 | movl %edi, last_count |
| 921 | subl %edi, %esi |
| 922 | jmp *%eax |
| 923 | .size tlb_exception, .-tlb_exception |
| 924 | |
| 925 | .globl breakpoint |
| 926 | .hidden breakpoint |
| 927 | .type breakpoint, @function |
| 928 | breakpoint: |
| 929 | .size breakpoint, .-breakpoint |