| 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * |
| 2 | * Mupen64plus - linkage_x86_64.s * |
| 3 | * Copyright (C) 2009-2010 Ari64 * |
| 4 | * * |
| 5 | * This program is free software; you can redistribute it and/or modify * |
| 6 | * it under the terms of the GNU General Public License as published by * |
| 7 | * the Free Software Foundation; either version 2 of the License, or * |
| 8 | * (at your option) any later version. * |
| 9 | * * |
| 10 | * This program is distributed in the hope that it will be useful, * |
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of * |
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * |
| 13 | * GNU General Public License for more details. * |
| 14 | * * |
| 15 | * You should have received a copy of the GNU General Public License * |
| 16 | * along with this program; if not, write to the * |
| 17 | * Free Software Foundation, Inc., * |
| 18 | * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * |
| 19 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ |
| 20 | .file "linkage_x86_64.s" |
| 21 | .bss |
| 22 | .align 4 |
| 23 | //.globl rdram |
| 24 | //rdram = 0x80000000 |
| 25 | .section .rodata |
| 26 | .text |
| 27 | .globl dyna_linker |
| 28 | .type dyna_linker, @function |
| 29 | dyna_linker: |
| 30 | /* eax = virtual target address */ |
| 31 | /* ebx = instruction to patch */ |
| 32 | mov %eax, %edi |
| 33 | mov %eax, %ecx |
| 34 | shr $12, %edi |
| 35 | cmp $0xC0000000, %eax |
| 36 | cmovge tlb_LUT_r(,%edi,4), %ecx |
| 37 | test %ecx, %ecx |
| 38 | cmovz %eax, %ecx |
| 39 | xor $0x80000000, %ecx |
| 40 | mov $2047, %edx |
| 41 | shr $12, %ecx |
| 42 | and %ecx, %edx |
| 43 | or $2048, %edx |
| 44 | cmp %edx, %ecx |
| 45 | cmova %edx, %ecx |
| 46 | /* jump_in lookup */ |
| 47 | movq jump_in(,%ecx,8), %r12 |
| 48 | .A1: |
| 49 | test %r12, %r12 |
| 50 | je .A3 |
| 51 | mov (%r12), %edi |
| 52 | xor %eax, %edi |
| 53 | or 4(%r12), %edi |
| 54 | je .A2 |
| 55 | movq 16(%r12), %r12 |
| 56 | jmp .A1 |
| 57 | .A2: |
| 58 | mov (%ebx), %edi |
| 59 | mov %esi, %ebp |
| 60 | lea 4(%ebx,%edi,1), %esi |
| 61 | mov %eax, %edi |
| 62 | call add_link |
| 63 | mov 8(%r12), %edi |
| 64 | mov %ebp, %esi |
| 65 | lea -4(%edi), %edx |
| 66 | subl %ebx, %edx |
| 67 | movl %edx, (%ebx) |
| 68 | jmp *%rdi |
| 69 | .A3: |
| 70 | /* hash_table lookup */ |
| 71 | mov %eax, %edi |
| 72 | mov %eax, %edx |
| 73 | shr $16, %edi |
| 74 | shr $12, %edx |
| 75 | xor %eax, %edi |
| 76 | and $2047, %edx |
| 77 | movzwl %di, %edi |
| 78 | shl $4, %edi |
| 79 | cmp $2048, %ecx |
| 80 | cmovc %edx, %ecx |
| 81 | cmp hash_table(%edi), %eax |
| 82 | jne .A5 |
| 83 | .A4: |
| 84 | mov hash_table+4(%edi), %edx |
| 85 | jmp *%rdx |
| 86 | .A5: |
| 87 | cmp hash_table+8(%edi), %eax |
| 88 | lea 8(%edi), %edi |
| 89 | je .A4 |
| 90 | /* jump_dirty lookup */ |
| 91 | movq jump_dirty(,%ecx,8), %r12 |
| 92 | .A6: |
| 93 | test %r12, %r12 |
| 94 | je .A8 |
| 95 | mov (%r12), %ecx |
| 96 | xor %eax, %ecx |
| 97 | or 4(%r12), %ecx |
| 98 | je .A7 |
| 99 | movq 16(%r12), %r12 |
| 100 | jmp .A6 |
| 101 | .A7: |
| 102 | movl 8(%r12), %edx |
| 103 | /* hash_table insert */ |
| 104 | mov hash_table-8(%edi), %ebx |
| 105 | mov hash_table-4(%edi), %ecx |
| 106 | mov %eax, hash_table-8(%edi) |
| 107 | mov %edx, hash_table-4(%edi) |
| 108 | mov %ebx, hash_table(%edi) |
| 109 | mov %ecx, hash_table+4(%edi) |
| 110 | jmp *%rdx |
| 111 | .A8: |
| 112 | mov %eax, %edi |
| 113 | mov %eax, %ebp /* Note: assumes %rbx and %rbp are callee-saved */ |
| 114 | mov %esi, %r12d |
| 115 | call new_recompile_block |
| 116 | test %eax, %eax |
| 117 | mov %ebp, %eax |
| 118 | mov %r12d, %esi |
| 119 | je dyna_linker |
| 120 | /* pagefault */ |
| 121 | mov %eax, %ebx |
| 122 | mov $0x08, %ecx |
| 123 | .size dyna_linker, .-dyna_linker |
| 124 | |
| 125 | .globl exec_pagefault |
| 126 | .type exec_pagefault, @function |
| 127 | exec_pagefault: |
| 128 | /* eax = instruction pointer */ |
| 129 | /* ebx = fault address */ |
| 130 | /* ecx = cause */ |
| 131 | mov reg_cop0+48, %edx |
| 132 | mov reg_cop0+16, %edi |
| 133 | or $2, %edx |
| 134 | mov %ebx, reg_cop0+32 /* BadVAddr */ |
| 135 | and $0xFF80000F, %edi |
| 136 | mov %edx, reg_cop0+48 /* Status */ |
| 137 | mov %ecx, reg_cop0+52 /* Cause */ |
| 138 | mov %eax, reg_cop0+56 /* EPC */ |
| 139 | mov %ebx, %ecx |
| 140 | shr $9, %ebx |
| 141 | and $0xFFFFE000, %ecx |
| 142 | and $0x007FFFF0, %ebx |
| 143 | mov %ecx, reg_cop0+40 /* EntryHI */ |
| 144 | or %ebx, %edi |
| 145 | mov %edi, reg_cop0+16 /* Context */ |
| 146 | mov %esi, %ebx |
| 147 | mov $0x80000000, %edi |
| 148 | call get_addr_ht |
| 149 | mov %ebx, %esi |
| 150 | jmp *%rax |
| 151 | .size exec_pagefault, .-exec_pagefault |
| 152 | |
| 153 | /* Special dynamic linker for the case where a page fault |
| 154 | may occur in a branch delay slot */ |
| 155 | .globl dyna_linker_ds |
| 156 | .type dyna_linker_ds, @function |
| 157 | dyna_linker_ds: |
| 158 | mov %eax, %edi |
| 159 | mov %eax, %ecx |
| 160 | shr $12, %edi |
| 161 | cmp $0xC0000000, %eax |
| 162 | cmovge tlb_LUT_r(,%edi,4), %ecx |
| 163 | test %ecx, %ecx |
| 164 | cmovz %eax, %ecx |
| 165 | xor $0x80000000, %ecx |
| 166 | mov $2047, %edx |
| 167 | shr $12, %ecx |
| 168 | and %ecx, %edx |
| 169 | or $2048, %edx |
| 170 | cmp %edx, %ecx |
| 171 | cmova %edx, %ecx |
| 172 | /* jump_in lookup */ |
| 173 | movq jump_in(,%ecx,8), %r12 |
| 174 | .B1: |
| 175 | test %r12, %r12 |
| 176 | je .B3 |
| 177 | mov (%r12), %edi |
| 178 | xor %eax, %edi |
| 179 | or 4(%r12), %edi |
| 180 | je .B2 |
| 181 | movq 16(%r12), %r12 |
| 182 | jmp .B1 |
| 183 | .B2: |
| 184 | mov (%ebx), %edi |
| 185 | mov %esi, %r13d |
| 186 | lea 4(%ebx,%edi,1), %esi |
| 187 | mov %eax, %edi |
| 188 | call add_link |
| 189 | mov 8(%r12), %edi |
| 190 | mov %r13d, %esi |
| 191 | lea -4(%edi), %edx |
| 192 | subl %ebx, %edx |
| 193 | movl %edx, (%ebx) |
| 194 | jmp *%rdi |
| 195 | .B3: |
| 196 | /* hash_table lookup */ |
| 197 | mov %eax, %edi |
| 198 | mov %eax, %edx |
| 199 | shr $16, %edi |
| 200 | shr $12, %edx |
| 201 | xor %eax, %edi |
| 202 | and $2047, %edx |
| 203 | movzwl %di, %edi |
| 204 | shl $4, %edi |
| 205 | cmp $2048, %ecx |
| 206 | cmovc %edx, %ecx |
| 207 | cmp hash_table(%edi), %eax |
| 208 | jne .B5 |
| 209 | .B4: |
| 210 | mov hash_table+4(%edi), %edx |
| 211 | jmp *%rdx |
| 212 | .B5: |
| 213 | cmp hash_table+8(%edi), %eax |
| 214 | lea 8(%edi), %edi |
| 215 | je .B4 |
| 216 | /* jump_dirty lookup */ |
| 217 | movq jump_dirty(,%ecx,8), %r12 |
| 218 | .B6: |
| 219 | test %r12, %r12 |
| 220 | je .B8 |
| 221 | mov (%r12), %ecx |
| 222 | xor %eax, %ecx |
| 223 | or 4(%r12), %ecx |
| 224 | je .B7 |
| 225 | movq 16(%r12), %r12 |
| 226 | jmp .B6 |
| 227 | .B7: |
| 228 | movl 8(%r12), %edx |
| 229 | /* hash_table insert */ |
| 230 | mov hash_table-8(%edi), %ebx |
| 231 | mov hash_table-4(%edi), %ecx |
| 232 | mov %eax, hash_table-8(%edi) |
| 233 | mov %edx, hash_table-4(%edi) |
| 234 | mov %ebx, hash_table(%edi) |
| 235 | mov %ecx, hash_table+4(%edi) |
| 236 | jmp *%rdx |
| 237 | .B8: |
| 238 | mov %eax, %edi |
| 239 | mov %eax, %r12d /* Note: assumes %rbx and %rbp are callee-saved */ |
| 240 | and $0xFFFFFFF8, %edi |
| 241 | mov %esi, %r13d |
| 242 | inc %edi |
| 243 | call new_recompile_block |
| 244 | test %eax, %eax |
| 245 | mov %r12d, %eax |
| 246 | mov %r13d, %esi |
| 247 | je dyna_linker_ds |
| 248 | /* pagefault */ |
| 249 | and $0xFFFFFFF8, %eax |
| 250 | mov $0x80000008, %ecx /* High bit set indicates pagefault in delay slot */ |
| 251 | mov %eax, %ebx |
| 252 | sub $4, %eax |
| 253 | jmp exec_pagefault |
| 254 | .size dyna_linker_ds, .-dyna_linker_ds |
| 255 | |
| 256 | .globl jump_vaddr_eax |
| 257 | .type jump_vaddr_eax, @function |
| 258 | jump_vaddr_eax: |
| 259 | mov %eax, %edi |
| 260 | jmp jump_vaddr_edi |
| 261 | .size jump_vaddr_eax, .-jump_vaddr_eax |
| 262 | .globl jump_vaddr_ecx |
| 263 | .type jump_vaddr_ecx, @function |
| 264 | jump_vaddr_ecx: |
| 265 | mov %ecx, %edi |
| 266 | jmp jump_vaddr_edi |
| 267 | .size jump_vaddr_ecx, .-jump_vaddr_ecx |
| 268 | .globl jump_vaddr_edx |
| 269 | .type jump_vaddr_edx, @function |
| 270 | jump_vaddr_edx: |
| 271 | mov %edx, %edi |
| 272 | jmp jump_vaddr_edi |
| 273 | .size jump_vaddr_edx, .-jump_vaddr_edx |
| 274 | .globl jump_vaddr_ebx |
| 275 | .type jump_vaddr_ebx, @function |
| 276 | jump_vaddr_ebx: |
| 277 | mov %ebx, %edi |
| 278 | jmp jump_vaddr_edi |
| 279 | .size jump_vaddr_ebx, .-jump_vaddr_ebx |
| 280 | .globl jump_vaddr_ebp |
| 281 | .type jump_vaddr_ebp, @function |
| 282 | jump_vaddr_ebp: |
| 283 | mov %ebp, %edi |
| 284 | .size jump_vaddr_ebp, .-jump_vaddr_ebp |
| 285 | .globl jump_vaddr_edi |
| 286 | .type jump_vaddr_edi, @function |
| 287 | jump_vaddr_edi: |
| 288 | mov %edi, %eax |
| 289 | .size jump_vaddr_edi, .-jump_vaddr_edi |
| 290 | |
| 291 | .globl jump_vaddr |
| 292 | .type jump_vaddr, @function |
| 293 | jump_vaddr: |
| 294 | /* Check hash table */ |
| 295 | shr $16, %eax |
| 296 | xor %edi, %eax |
| 297 | movzwl %ax, %eax |
| 298 | shl $4, %eax |
| 299 | cmp hash_table(%eax), %edi |
| 300 | jne .C2 |
| 301 | .C1: |
| 302 | mov hash_table+4(%eax), %edi |
| 303 | jmp *%rdi |
| 304 | .C2: |
| 305 | cmp hash_table+8(%eax), %edi |
| 306 | lea 8(%eax), %eax |
| 307 | je .C1 |
| 308 | /* No hit on hash table, call compiler */ |
| 309 | mov %esi, cycle_count /* CCREG */ |
| 310 | call get_addr |
| 311 | mov cycle_count, %esi |
| 312 | jmp *%rax |
| 313 | .size jump_vaddr, .-jump_vaddr |
| 314 | |
| 315 | .globl verify_code_ds |
| 316 | .type verify_code_ds, @function |
| 317 | verify_code_ds: |
| 318 | nop |
| 319 | .size verify_code_ds, .-verify_code_ds |
| 320 | |
| 321 | .globl verify_code_vm |
| 322 | .type verify_code_vm, @function |
| 323 | verify_code_vm: |
| 324 | /* eax = source (virtual address) */ |
| 325 | /* ebx = target */ |
| 326 | /* ecx = length */ |
| 327 | cmp $0xC0000000, %eax |
| 328 | jl verify_code |
| 329 | mov %eax, %edx |
| 330 | lea -1(%eax,%ecx,1), %r9d |
| 331 | shr $12, %edx |
| 332 | shr $12, %r9d |
| 333 | mov memory_map(,%edx,4), %edi |
| 334 | test %edi, %edi |
| 335 | js .D4 |
| 336 | lea (%eax,%edi,4), %eax |
| 337 | mov %edi, %r8d |
| 338 | .D1: |
| 339 | xor memory_map(,%edx,4), %edi |
| 340 | shl $2, %edi |
| 341 | jne .D4 |
| 342 | mov %r8d, %edi |
| 343 | inc %edx |
| 344 | cmp %r9d, %edx |
| 345 | jbe .D1 |
| 346 | .size verify_code_vm, .-verify_code_vm |
| 347 | |
| 348 | .globl verify_code |
| 349 | .type verify_code, @function |
| 350 | verify_code: |
| 351 | /* eax = source */ |
| 352 | /* ebx = target */ |
| 353 | /* ecx = length */ |
| 354 | /* r12d = instruction pointer */ |
| 355 | mov -4(%eax,%ecx,1), %edi |
| 356 | xor -4(%ebx,%ecx,1), %edi |
| 357 | jne .D4 |
| 358 | mov %ecx, %edx |
| 359 | add $-4, %ecx |
| 360 | je .D3 |
| 361 | test $4, %edx |
| 362 | cmove %edx, %ecx |
| 363 | .D2: |
| 364 | mov -8(%eax,%ecx,1), %rdi |
| 365 | cmp -8(%ebx,%ecx,1), %rdi |
| 366 | jne .D4 |
| 367 | add $-8, %ecx |
| 368 | jne .D2 |
| 369 | .D3: |
| 370 | ret |
| 371 | .D4: |
| 372 | add $8, %rsp /* pop return address, we're not returning */ |
| 373 | mov %r12d, %edi |
| 374 | mov %esi, %ebx |
| 375 | call get_addr |
| 376 | mov %ebx, %esi |
| 377 | jmp *%rax |
| 378 | .size verify_code, .-verify_code |
| 379 | |
| 380 | .globl cc_interrupt |
| 381 | .type cc_interrupt, @function |
| 382 | cc_interrupt: |
| 383 | add last_count, %esi |
| 384 | add $-8, %rsp /* Align stack */ |
| 385 | mov %esi, reg_cop0+36 /* Count */ |
| 386 | shr $19, %esi |
| 387 | movl $0, pending_exception |
| 388 | and $0x7f, %esi |
| 389 | cmpl $0, restore_candidate(,%esi,4) |
| 390 | jne .E4 |
| 391 | .E1: |
| 392 | call gen_interupt |
| 393 | mov reg_cop0+36, %esi |
| 394 | mov next_interupt, %eax |
| 395 | mov pending_exception, %ebx |
| 396 | mov stop, %ecx |
| 397 | add $8, %rsp |
| 398 | mov %eax, last_count |
| 399 | sub %eax, %esi |
| 400 | test %ecx, %ecx |
| 401 | jne .E3 |
| 402 | test %ebx, %ebx |
| 403 | jne .E2 |
| 404 | ret |
| 405 | .E2: |
| 406 | mov pcaddr, %edi |
| 407 | mov %esi, cycle_count /* CCREG */ |
| 408 | call get_addr_ht |
| 409 | mov cycle_count, %esi |
| 410 | add $8, %rsp /* pop return address */ |
| 411 | jmp *%rax |
| 412 | .E3: |
| 413 | pop %rbp /* pop return address and discard it */ |
| 414 | pop %rbp /* pop junk */ |
| 415 | pop %r15 /* restore callee-save registers */ |
| 416 | pop %r14 |
| 417 | pop %r13 |
| 418 | pop %r12 |
| 419 | pop %rbx |
| 420 | pop %rbp |
| 421 | ret /* exit dynarec */ |
| 422 | .E4: |
| 423 | /* Move 'dirty' blocks to the 'clean' list */ |
| 424 | mov restore_candidate(,%esi,4), %ebx |
| 425 | mov %esi, %ebp |
| 426 | movl $0, restore_candidate(,%esi,4) |
| 427 | shl $5, %ebp |
| 428 | .E5: |
| 429 | shr $1, %ebx |
| 430 | jnc .E6 |
| 431 | mov %ebp, %edi |
| 432 | call clean_blocks |
| 433 | .E6: |
| 434 | inc %ebp |
| 435 | test $31, %ebp |
| 436 | jne .E5 |
| 437 | jmp .E1 |
| 438 | .size cc_interrupt, .-cc_interrupt |
| 439 | |
| 440 | .globl do_interrupt |
| 441 | .type do_interrupt, @function |
| 442 | do_interrupt: |
| 443 | mov pcaddr, %edi |
| 444 | call get_addr_ht |
| 445 | mov reg_cop0+36, %esi |
| 446 | mov next_interupt, %ebx |
| 447 | mov %ebx, last_count |
| 448 | sub %ebx, %esi |
| 449 | add $2, %esi |
| 450 | jmp *%rax |
| 451 | .size do_interrupt, .-do_interrupt |
| 452 | |
| 453 | .globl fp_exception |
| 454 | .type fp_exception, @function |
| 455 | fp_exception: |
| 456 | mov $0x1000002c, %edx |
| 457 | .E7: |
| 458 | mov reg_cop0+48, %ebx |
| 459 | or $2, %ebx |
| 460 | mov %ebx, reg_cop0+48 /* Status */ |
| 461 | mov %edx, reg_cop0+52 /* Cause */ |
| 462 | mov %eax, reg_cop0+56 /* EPC */ |
| 463 | mov %esi, %ebx |
| 464 | mov $0x80000180, %edi |
| 465 | call get_addr_ht |
| 466 | mov %ebx, %esi |
| 467 | jmp *%rax |
| 468 | .size fp_exception, .-fp_exception |
| 469 | |
| 470 | .globl fp_exception_ds |
| 471 | .type fp_exception_ds, @function |
| 472 | fp_exception_ds: |
| 473 | mov $0x9000002c, %edx /* Set high bit if delay slot */ |
| 474 | jmp .E7 |
| 475 | .size fp_exception_ds, .-fp_exception_ds |
| 476 | |
| 477 | .globl jump_syscall |
| 478 | .type jump_syscall, @function |
| 479 | jump_syscall: |
| 480 | mov $0x20, %edx |
| 481 | mov reg_cop0+48, %ebx |
| 482 | or $2, %ebx |
| 483 | mov %ebx, reg_cop0+48 /* Status */ |
| 484 | mov %edx, reg_cop0+52 /* Cause */ |
| 485 | mov %eax, reg_cop0+56 /* EPC */ |
| 486 | mov %esi, %ebx |
| 487 | mov $0x80000180, %edi |
| 488 | call get_addr_ht |
| 489 | mov %ebx, %esi |
| 490 | jmp *%rax |
| 491 | .size jump_syscall, .-jump_syscall |
| 492 | |
| 493 | .globl jump_eret |
| 494 | .type jump_eret, @function |
| 495 | jump_eret: |
| 496 | mov reg_cop0+48, %ebx /* Status */ |
| 497 | add last_count, %esi |
| 498 | and $0xFFFFFFFD, %ebx |
| 499 | mov %esi, reg_cop0+36 /* Count */ |
| 500 | mov %ebx, reg_cop0+48 /* Status */ |
| 501 | call check_interupt |
| 502 | mov next_interupt, %eax |
| 503 | mov reg_cop0+36, %esi |
| 504 | mov %eax, last_count |
| 505 | sub %eax, %esi |
| 506 | mov reg_cop0+56, %edi /* EPC */ |
| 507 | jns .E11 |
| 508 | .E8: |
| 509 | mov %esi, %r12d |
| 510 | mov $248, %ebx |
| 511 | xor %esi, %esi |
| 512 | .E9: |
| 513 | mov reg(%ebx), %ecx |
| 514 | mov reg+4(%ebx), %edx |
| 515 | sar $31, %ecx |
| 516 | xor %ecx, %edx |
| 517 | neg %edx |
| 518 | adc %esi, %esi |
| 519 | sub $8, %ebx |
| 520 | jne .E9 |
| 521 | mov hi(%ebx), %ecx |
| 522 | mov hi+4(%ebx), %edx |
| 523 | sar $31, %ecx |
| 524 | xor %ecx, %edx |
| 525 | jne .E10 |
| 526 | mov lo(%ebx), %ecx |
| 527 | mov lo+4(%ebx), %edx |
| 528 | sar $31, %ecx |
| 529 | xor %ecx, %edx |
| 530 | .E10: |
| 531 | neg %edx |
| 532 | adc %esi, %esi |
| 533 | call get_addr_32 |
| 534 | mov %r12d, %esi |
| 535 | jmp *%rax |
| 536 | .E11: |
| 537 | mov %edi, pcaddr |
| 538 | call cc_interrupt |
| 539 | mov pcaddr, %edi |
| 540 | jmp .E8 |
| 541 | .size jump_eret, .-jump_eret |
| 542 | |
| 543 | .globl new_dyna_start |
| 544 | .type new_dyna_start, @function |
| 545 | new_dyna_start: |
| 546 | push %rbp |
| 547 | push %rbx |
| 548 | push %r12 |
| 549 | push %r13 |
| 550 | push %r14 |
| 551 | push %r15 |
| 552 | mov $0xa4000040, %edi |
| 553 | call new_recompile_block |
| 554 | add $-8, %rsp /* align stack */ |
| 555 | movl next_interupt, %edi |
| 556 | movl reg_cop0+36, %esi |
| 557 | movl %edi, last_count |
| 558 | subl %edi, %esi |
| 559 | jmp 0x70000000 |
| 560 | .size new_dyna_start, .-new_dyna_start |
| 561 | |
| 562 | .globl write_rdram_new |
| 563 | .type write_rdram_new, @function |
| 564 | write_rdram_new: |
| 565 | mov address, %edi |
| 566 | mov word, %ecx |
| 567 | and $0x7FFFFFFF, %edi |
| 568 | mov %ecx, rdram(%rdi) |
| 569 | jmp .E12 |
| 570 | .size write_rdram_new, .-write_rdram_new |
| 571 | |
| 572 | .globl write_rdramb_new |
| 573 | .type write_rdramb_new, @function |
| 574 | write_rdramb_new: |
| 575 | mov address, %edi |
| 576 | xor $3, %edi |
| 577 | movb byte, %cl |
| 578 | and $0x7FFFFFFF, %edi |
| 579 | movb %cl, rdram(%rdi) |
| 580 | jmp .E12 |
| 581 | .size write_rdramb_new, .-write_rdramb_new |
| 582 | |
| 583 | .globl write_rdramh_new |
| 584 | .type write_rdramh_new, @function |
| 585 | write_rdramh_new: |
| 586 | mov address, %edi |
| 587 | xor $2, %edi |
| 588 | movw hword, %cx |
| 589 | and $0x7FFFFFFF, %edi |
| 590 | movw %cx, rdram(%rdi) |
| 591 | jmp .E12 |
| 592 | .size write_rdramh_new, .-write_rdramh_new |
| 593 | |
| 594 | .globl write_rdramd_new |
| 595 | .type write_rdramd_new, @function |
| 596 | write_rdramd_new: |
| 597 | mov address, %edi |
| 598 | mov dword+4, %ecx |
| 599 | mov dword, %edx |
| 600 | and $0x7FFFFFFF, %edi |
| 601 | mov %ecx, rdram(%rdi) |
| 602 | mov %edx, rdram+4(%rdi) |
| 603 | jmp .E12 |
| 604 | .size write_rdramd_new, .-write_rdramd_new |
| 605 | |
| 606 | .globl do_invalidate |
| 607 | .type do_invalidate, @function |
| 608 | do_invalidate: |
| 609 | mov address, %edi |
| 610 | mov %edi, %ebx /* Return ebx to caller */ |
| 611 | .E12: |
| 612 | shr $12, %edi |
| 613 | mov %edi, %r12d /* Return r12 to caller */ |
| 614 | cmpb $1, invalid_code(%edi) |
| 615 | je .E13 |
| 616 | call invalidate_block |
| 617 | .E13: |
| 618 | ret |
| 619 | .size do_invalidate, .-do_invalidate |
| 620 | |
| 621 | .globl read_nomem_new |
| 622 | .type read_nomem_new, @function |
| 623 | read_nomem_new: |
| 624 | mov address, %edi |
| 625 | mov %edi, %ebx |
| 626 | shr $12, %edi |
| 627 | mov memory_map(,%edi,4),%edi |
| 628 | mov $0x8, %eax |
| 629 | test %edi, %edi |
| 630 | js tlb_exception |
| 631 | mov (%ebx,%edi,4), %ecx |
| 632 | mov %ecx, readmem_dword |
| 633 | ret |
| 634 | .size read_nomem_new, .-read_nomem_new |
| 635 | |
| 636 | .globl read_nomemb_new |
| 637 | .type read_nomemb_new, @function |
| 638 | read_nomemb_new: |
| 639 | mov address, %edi |
| 640 | mov %edi, %ebx |
| 641 | shr $12, %edi |
| 642 | mov memory_map(,%edi,4),%edi |
| 643 | mov $0x8, %eax |
| 644 | test %edi, %edi |
| 645 | js tlb_exception |
| 646 | xor $3, %ebx |
| 647 | movzbl (%ebx,%edi,4), %ecx |
| 648 | mov %ecx, readmem_dword |
| 649 | ret |
| 650 | .size read_nomemb_new, .-read_nomemb_new |
| 651 | |
| 652 | .globl read_nomemh_new |
| 653 | .type read_nomemh_new, @function |
| 654 | read_nomemh_new: |
| 655 | mov address, %edi |
| 656 | mov %edi, %ebx |
| 657 | shr $12, %edi |
| 658 | mov memory_map(,%edi,4),%edi |
| 659 | mov $0x8, %eax |
| 660 | test %edi, %edi |
| 661 | js tlb_exception |
| 662 | xor $2, %ebx |
| 663 | movzwl (%ebx,%edi,4), %ecx |
| 664 | mov %ecx, readmem_dword |
| 665 | ret |
| 666 | .size read_nomemh_new, .-read_nomemh_new |
| 667 | |
| 668 | .globl read_nomemd_new |
| 669 | .type read_nomemd_new, @function |
| 670 | read_nomemd_new: |
| 671 | mov address, %edi |
| 672 | mov %edi, %ebx |
| 673 | shr $12, %edi |
| 674 | mov memory_map(,%edi,4),%edi |
| 675 | mov $0x8, %eax |
| 676 | test %edi, %edi |
| 677 | js tlb_exception |
| 678 | mov 4(%ebx,%edi,4), %ecx |
| 679 | mov (%ebx,%edi,4), %edx |
| 680 | mov %ecx, readmem_dword |
| 681 | mov %edx, readmem_dword+4 |
| 682 | ret |
| 683 | .size read_nomemd_new, .-read_nomemd_new |
| 684 | |
| 685 | .globl write_nomem_new |
| 686 | .type write_nomem_new, @function |
| 687 | write_nomem_new: |
| 688 | call do_invalidate |
| 689 | mov memory_map(,%r12d,4),%edi |
| 690 | mov word, %ecx |
| 691 | mov $0xc, %eax |
| 692 | shl $2, %edi |
| 693 | jc tlb_exception |
| 694 | mov %ecx, (%ebx,%edi) |
| 695 | ret |
| 696 | .size write_nomem_new, .-write_nomem_new |
| 697 | |
| 698 | .globl write_nomemb_new |
| 699 | .type write_nomemb_new, @function |
| 700 | write_nomemb_new: |
| 701 | call do_invalidate |
| 702 | mov memory_map(,%r12d,4),%edi |
| 703 | movb byte, %cl |
| 704 | mov $0xc, %eax |
| 705 | shl $2, %edi |
| 706 | jc tlb_exception |
| 707 | xor $3, %ebx |
| 708 | movb %cl, (%ebx,%edi) |
| 709 | ret |
| 710 | .size write_nomemb_new, .-write_nomemb_new |
| 711 | |
| 712 | .globl write_nomemh_new |
| 713 | .type write_nomemh_new, @function |
| 714 | write_nomemh_new: |
| 715 | call do_invalidate |
| 716 | mov memory_map(,%r12d,4),%edi |
| 717 | movw hword, %cx |
| 718 | mov $0xc, %eax |
| 719 | shl $2, %edi |
| 720 | jc tlb_exception |
| 721 | xor $2, %ebx |
| 722 | movw %cx, (%ebx,%edi) |
| 723 | ret |
| 724 | .size write_nomemh_new, .-write_nomemh_new |
| 725 | |
| 726 | .globl write_nomemd_new |
| 727 | .type write_nomemd_new, @function |
| 728 | write_nomemd_new: |
| 729 | call do_invalidate |
| 730 | mov memory_map(,%r12d,4),%edi |
| 731 | mov dword+4, %edx |
| 732 | mov dword, %ecx |
| 733 | mov $0xc, %eax |
| 734 | shl $2, %edi |
| 735 | jc tlb_exception |
| 736 | mov %edx, (%ebx,%edi) |
| 737 | mov %ecx, 4(%ebx,%edi) |
| 738 | ret |
| 739 | .size write_nomemd_new, .-write_nomemd_new |
| 740 | |
| 741 | .globl tlb_exception |
| 742 | .type tlb_exception, @function |
| 743 | tlb_exception: |
| 744 | /* eax = cause */ |
| 745 | /* ebx = address */ |
| 746 | /* ebp = instr addr + flags */ |
| 747 | mov 8(%rsp), %ebp |
| 748 | mov reg_cop0+48, %esi |
| 749 | mov %ebp, %ecx |
| 750 | mov %ebp, %edx |
| 751 | mov %ebp, %edi |
| 752 | shl $31, %ebp |
| 753 | shr $12, %ecx |
| 754 | or %ebp, %eax |
| 755 | sar $29, %ebp |
| 756 | and $0xFFFFFFFC, %edx |
| 757 | mov memory_map(,%ecx,4), %ecx |
| 758 | or $2, %esi |
| 759 | mov (%edx, %ecx, 4), %ecx |
| 760 | add %ebp, %edx |
| 761 | mov %esi, reg_cop0+48 /* Status */ |
| 762 | mov %eax, reg_cop0+52 /* Cause */ |
| 763 | mov %edx, reg_cop0+56 /* EPC */ |
| 764 | add $0x48, %rsp |
| 765 | mov $0x6000022, %edx |
| 766 | mov %ecx, %ebp |
| 767 | movswl %cx, %eax |
| 768 | shr $26, %ecx |
| 769 | shr $21, %ebp |
| 770 | sub %eax, %ebx |
| 771 | and $0x1f, %ebp |
| 772 | ror %cl, %edx |
| 773 | mov reg_cop0+16, %esi |
| 774 | cmovc reg(,%ebp,8), %ebx |
| 775 | and $0xFF80000F, %esi |
| 776 | mov %ebx, reg(,%ebp,8) |
| 777 | add %ebx, %eax |
| 778 | sar $31, %ebx |
| 779 | mov %eax, reg_cop0+32 /* BadVAddr */ |
| 780 | shr $9, %eax |
| 781 | test $2, %edi |
| 782 | cmove reg+4(,%ebp,8), %ebx |
| 783 | and $0x007FFFF0, %eax |
| 784 | mov $0x80000180, %edi |
| 785 | mov %ebx, reg+4(,%ebp,8) |
| 786 | or %eax, %esi |
| 787 | mov %esi, reg_cop0+16 /* Context */ |
| 788 | call get_addr_ht |
| 789 | movl next_interupt, %edi |
| 790 | movl reg_cop0+36, %esi /* Count */ |
| 791 | movl %edi, last_count |
| 792 | subl %edi, %esi |
| 793 | jmp *%rax |
| 794 | .size tlb_exception, .-tlb_exception |