57871462 |
1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * |
2 | * Mupen64plus - linkage_x86.s * |
3 | * Copyright (C) 2009-2010 Ari64 * |
4 | * * |
5 | * This program is free software; you can redistribute it and/or modify * |
6 | * it under the terms of the GNU General Public License as published by * |
7 | * the Free Software Foundation; either version 2 of the License, or * |
8 | * (at your option) any later version. * |
9 | * * |
10 | * This program is distributed in the hope that it will be useful, * |
11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of * |
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * |
13 | * GNU General Public License for more details. * |
14 | * * |
15 | * You should have received a copy of the GNU General Public License * |
16 | * along with this program; if not, write to the * |
17 | * Free Software Foundation, Inc., * |
18 | * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * |
19 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ |
20 | .file "linkage_x86.s" |
21 | .bss |
22 | .align 4 |
23 | .globl rdram |
24 | rdram = 0x80000000 |
25 | /*rdram: |
26 | .space 8388608 |
27 | .type rdram, %object |
28 | .size rdram, 8388608 |
29 | */ |
30 | .section .rodata |
31 | .text |
32 | .globl dyna_linker |
33 | .type dyna_linker, @function |
34 | dyna_linker: |
35 | /* eax = virtual target address */ |
36 | /* ebx = instruction to patch */ |
37 | mov %eax, %edi |
38 | mov %eax, %ecx |
39 | shr $12, %edi |
40 | cmp $0xC0000000, %eax |
41 | cmovge tlb_LUT_r(,%edi,4), %ecx |
42 | test %ecx, %ecx |
43 | cmovz %eax, %ecx |
44 | xor $0x80000000, %ecx |
45 | mov $2047, %edx |
46 | shr $12, %ecx |
47 | and %ecx, %edx |
48 | or $2048, %edx |
49 | cmp %edx, %ecx |
50 | cmova %edx, %ecx |
51 | /* jump_in lookup */ |
52 | mov jump_in(,%ecx,4), %edx |
53 | .A1: |
54 | test %edx, %edx |
55 | je .A3 |
56 | mov (%edx), %edi |
57 | xor %eax, %edi |
58 | or 4(%edx), %edi |
59 | je .A2 |
60 | movl 12(%edx), %edx |
61 | jmp .A1 |
62 | .A2: |
63 | mov (%ebx), %edi |
64 | mov %esi, %ebp |
65 | lea 4(%ebx,%edi,1), %esi |
66 | mov %eax, %edi |
67 | pusha |
68 | call add_link |
69 | popa |
70 | mov 8(%edx), %edi |
71 | mov %ebp, %esi |
72 | lea -4(%edi), %edx |
73 | subl %ebx, %edx |
74 | movl %edx, (%ebx) |
75 | jmp *%edi |
76 | .A3: |
77 | /* hash_table lookup */ |
78 | mov %eax, %edi |
79 | mov %eax, %edx |
80 | shr $16, %edi |
81 | shr $12, %edx |
82 | xor %eax, %edi |
83 | and $2047, %edx |
84 | movzwl %di, %edi |
85 | shl $4, %edi |
86 | cmp $2048, %ecx |
87 | cmovc %edx, %ecx |
88 | cmp hash_table(%edi), %eax |
89 | jne .A5 |
90 | .A4: |
91 | mov hash_table+4(%edi), %edx |
92 | jmp *%edx |
93 | .A5: |
94 | cmp hash_table+8(%edi), %eax |
95 | lea 8(%edi), %edi |
96 | je .A4 |
97 | /* jump_dirty lookup */ |
98 | mov jump_dirty(,%ecx,4), %edx |
99 | .A6: |
100 | testl %edx, %edx |
101 | je .A8 |
102 | mov (%edx), %ecx |
103 | xor %eax, %ecx |
104 | or 4(%edx), %ecx |
105 | je .A7 |
106 | movl 12(%edx), %edx |
107 | jmp .A6 |
108 | .A7: |
109 | mov 8(%edx), %edx |
110 | /* hash_table insert */ |
111 | mov hash_table-8(%edi), %ebx |
112 | mov hash_table-4(%edi), %ecx |
113 | mov %eax, hash_table-8(%edi) |
114 | mov %edx, hash_table-4(%edi) |
115 | mov %ebx, hash_table(%edi) |
116 | mov %ecx, hash_table+4(%edi) |
117 | jmp *%edx |
118 | .A8: |
119 | mov %eax, %edi |
120 | pusha |
121 | call new_recompile_block |
122 | test %eax, %eax |
123 | popa |
124 | je dyna_linker |
125 | /* pagefault */ |
126 | mov %eax, %ebx |
127 | mov $0x08, %ecx |
128 | .size dyna_linker, .-dyna_linker |
129 | |
130 | .globl exec_pagefault |
131 | .type exec_pagefault, @function |
132 | exec_pagefault: |
133 | /* eax = instruction pointer */ |
134 | /* ebx = fault address */ |
135 | /* ecx = cause */ |
136 | mov reg_cop0+48, %edx |
137 | mov reg_cop0+16, %edi |
138 | or $2, %edx |
139 | mov %ebx, reg_cop0+32 /* BadVAddr */ |
140 | and $0xFF80000F, %edi |
141 | mov %edx, reg_cop0+48 /* Status */ |
142 | mov %ecx, reg_cop0+52 /* Cause */ |
143 | mov %eax, reg_cop0+56 /* EPC */ |
144 | mov %ebx, %ecx |
145 | shr $9, %ebx |
146 | and $0xFFFFE000, %ecx |
147 | and $0x007FFFF0, %ebx |
148 | mov %ecx, reg_cop0+40 /* EntryHI */ |
149 | or %ebx, %edi |
150 | mov %edi, reg_cop0+16 /* Context */ |
151 | push %esi |
152 | push $0x80000000 |
153 | call get_addr_ht |
154 | pop %esi |
155 | pop %esi |
156 | jmp *%eax |
157 | .size exec_pagefault, .-exec_pagefault |
158 | |
159 | /* Special dynamic linker for the case where a page fault |
160 | may occur in a branch delay slot */ |
161 | .globl dyna_linker_ds |
162 | .type dyna_linker_ds, @function |
163 | dyna_linker_ds: |
164 | mov %eax, %edi |
165 | mov %eax, %ecx |
166 | shr $12, %edi |
167 | cmp $0xC0000000, %eax |
168 | cmovge tlb_LUT_r(,%edi,4), %ecx |
169 | test %ecx, %ecx |
170 | cmovz %eax, %ecx |
171 | xor $0x80000000, %ecx |
172 | mov $2047, %edx |
173 | shr $12, %ecx |
174 | and %ecx, %edx |
175 | or $2048, %edx |
176 | cmp %edx, %ecx |
177 | cmova %edx, %ecx |
178 | /* jump_in lookup */ |
179 | mov jump_in(,%ecx,4), %edx |
180 | .B1: |
181 | test %edx, %edx |
182 | je .B3 |
183 | mov (%edx), %edi |
184 | xor %eax, %edi |
185 | or 4(%edx), %edi |
186 | je .B2 |
187 | movl 12(%edx), %edx |
188 | jmp .B1 |
189 | .B2: |
190 | mov (%ebx), %edi |
191 | mov %esi, %ecx |
192 | lea 4(%ebx,%edi,1), %esi |
193 | mov %eax, %edi |
194 | pusha |
195 | call add_link |
196 | popa |
197 | mov 8(%edx), %edi |
198 | mov %ecx, %esi |
199 | lea -4(%edi), %edx |
200 | subl %ebx, %edx |
201 | movl %edx, (%ebx) |
202 | jmp *%edi |
203 | .B3: |
204 | /* hash_table lookup */ |
205 | mov %eax, %edi |
206 | mov %eax, %edx |
207 | shr $16, %edi |
208 | shr $12, %edx |
209 | xor %eax, %edi |
210 | and $2047, %edx |
211 | movzwl %di, %edi |
212 | shl $4, %edi |
213 | cmp $2048, %ecx |
214 | cmovc %edx, %ecx |
215 | cmp hash_table(%edi), %eax |
216 | jne .B5 |
217 | .B4: |
218 | mov hash_table+4(%edi), %edx |
219 | jmp *%edx |
220 | .B5: |
221 | cmp hash_table+8(%edi), %eax |
222 | lea 8(%edi), %edi |
223 | je .B4 |
224 | /* jump_dirty lookup */ |
225 | mov jump_dirty(,%ecx,4), %edx |
226 | .B6: |
227 | testl %edx, %edx |
228 | je .B8 |
229 | mov (%edx), %ecx |
230 | xor %eax, %ecx |
231 | or 4(%edx), %ecx |
232 | je .B7 |
233 | movl 12(%edx), %edx |
234 | jmp .B6 |
235 | .B7: |
236 | mov 8(%edx), %edx |
237 | /* hash_table insert */ |
238 | mov hash_table-8(%edi), %ebx |
239 | mov hash_table-4(%edi), %ecx |
240 | mov %eax, hash_table-8(%edi) |
241 | mov %edx, hash_table-4(%edi) |
242 | mov %ebx, hash_table(%edi) |
243 | mov %ecx, hash_table+4(%edi) |
244 | jmp *%edx |
245 | .B8: |
246 | mov %eax, %edi |
247 | and $0xFFFFFFF8, %edi |
248 | inc %edi |
249 | pusha |
250 | call new_recompile_block |
251 | test %eax, %eax |
252 | popa |
253 | je dyna_linker_ds |
254 | /* pagefault */ |
255 | and $0xFFFFFFF8, %eax |
256 | mov $0x80000008, %ecx /* High bit set indicates pagefault in delay slot */ |
257 | mov %eax, %ebx |
258 | sub $4, %eax |
259 | jmp exec_pagefault |
260 | .size dyna_linker_ds, .-dyna_linker_ds |
261 | |
262 | .globl jump_vaddr_eax |
263 | .type jump_vaddr_eax, @function |
264 | jump_vaddr_eax: |
265 | mov %eax, %edi |
266 | jmp jump_vaddr_edi |
267 | .size jump_vaddr_eax, .-jump_vaddr_eax |
268 | .globl jump_vaddr_ecx |
269 | .type jump_vaddr_ecx, @function |
270 | jump_vaddr_ecx: |
271 | mov %ecx, %edi |
272 | jmp jump_vaddr_edi |
273 | .size jump_vaddr_ecx, .-jump_vaddr_ecx |
274 | .globl jump_vaddr_edx |
275 | .type jump_vaddr_edx, @function |
276 | jump_vaddr_edx: |
277 | mov %edx, %edi |
278 | jmp jump_vaddr_edi |
279 | .size jump_vaddr_edx, .-jump_vaddr_edx |
280 | .globl jump_vaddr_ebx |
281 | .type jump_vaddr_ebx, @function |
282 | jump_vaddr_ebx: |
283 | mov %ebx, %edi |
284 | jmp jump_vaddr_edi |
285 | .size jump_vaddr_ebx, .-jump_vaddr_ebx |
286 | .globl jump_vaddr_ebp |
287 | .type jump_vaddr_ebp, @function |
288 | jump_vaddr_ebp: |
289 | mov %ebp, %edi |
290 | .size jump_vaddr_ebp, .-jump_vaddr_ebp |
291 | .globl jump_vaddr_edi |
292 | .type jump_vaddr_edi, @function |
293 | jump_vaddr_edi: |
294 | mov %edi, %eax |
295 | .size jump_vaddr_edi, .-jump_vaddr_edi |
296 | |
297 | .globl jump_vaddr |
298 | .type jump_vaddr, @function |
299 | jump_vaddr: |
300 | /* Check hash table */ |
301 | shr $16, %eax |
302 | xor %edi, %eax |
303 | movzwl %ax, %eax |
304 | shl $4, %eax |
305 | cmp hash_table(%eax), %edi |
306 | jne .C2 |
307 | .C1: |
308 | mov hash_table+4(%eax), %edi |
309 | jmp *%edi |
310 | .C2: |
311 | cmp hash_table+8(%eax), %edi |
312 | lea 8(%eax), %eax |
313 | je .C1 |
314 | /* No hit on hash table, call compiler */ |
315 | push %edi |
316 | mov %esi, cycle_count /* CCREG */ |
317 | call get_addr |
318 | mov cycle_count, %esi |
319 | add $4, %esp |
320 | jmp *%eax |
321 | .size jump_vaddr, .-jump_vaddr |
322 | |
323 | .globl verify_code_ds |
324 | .type verify_code_ds, @function |
325 | verify_code_ds: |
326 | mov %ebp, branch_target |
327 | .size verify_code_ds, .-verify_code_ds |
328 | |
329 | .globl verify_code_vm |
330 | .type verify_code_vm, @function |
331 | verify_code_vm: |
332 | /* eax = source (virtual address) */ |
333 | /* ebx = target */ |
334 | /* ecx = length */ |
335 | cmp $0xC0000000, %eax |
336 | jl verify_code |
337 | mov %eax, %edx |
338 | lea -1(%eax,%ecx,1), %ebp |
339 | shr $12, %edx |
340 | shr $12, %ebp |
341 | mov memory_map(,%edx,4), %edi |
342 | test %edi, %edi |
343 | js .D5 |
344 | lea (%eax,%edi,4), %eax |
345 | .D1: |
346 | xor memory_map(,%edx,4), %edi |
347 | shl $2, %edi |
348 | jne .D5 |
349 | mov memory_map(,%edx,4), %edi |
350 | inc %edx |
351 | cmp %ebp, %edx |
352 | jbe .D1 |
353 | .size verify_code_vm, .-verify_code_vm |
354 | |
355 | .globl verify_code |
356 | .type verify_code, @function |
357 | verify_code: |
358 | /* eax = source */ |
359 | /* ebx = target */ |
360 | /* ecx = length */ |
361 | mov -4(%eax,%ecx,1), %edi |
362 | xor -4(%ebx,%ecx,1), %edi |
363 | jne .D5 |
364 | mov %ecx, %edx |
365 | add $-4, %ecx |
366 | je .D3 |
367 | test $4, %edx |
368 | cmove %edx, %ecx |
369 | mov %esi, cycle_count |
370 | .D2: |
371 | mov -4(%eax,%ecx,1), %edx |
372 | mov -4(%ebx,%ecx,1), %ebp |
373 | mov -8(%eax,%ecx,1), %esi |
374 | xor %edx, %ebp |
375 | mov -8(%ebx,%ecx,1), %edi |
376 | jne .D4 |
377 | xor %esi, %edi |
378 | jne .D4 |
379 | add $-8, %ecx |
380 | jne .D2 |
381 | mov cycle_count, %esi |
382 | mov branch_target, %ebp |
383 | .D3: |
384 | ret |
385 | .D4: |
386 | mov cycle_count, %esi |
387 | .D5: |
388 | mov branch_target, %ebp |
389 | add $4, %esp /* pop return address, we're not returning */ |
390 | call get_addr |
391 | add $4, %esp /* pop virtual address */ |
392 | jmp *%eax |
393 | .size verify_code, .-verify_code |
394 | |
395 | .globl cc_interrupt |
396 | .type cc_interrupt, @function |
397 | cc_interrupt: |
398 | add last_count, %esi |
399 | add $-28, %esp /* Align stack */ |
400 | mov %esi, reg_cop0+36 /* Count */ |
401 | shr $19, %esi |
402 | movl $0, pending_exception |
403 | and $0x7f, %esi |
404 | cmpl $0, restore_candidate(,%esi,4) |
405 | jne .E4 |
406 | .E1: |
407 | call gen_interupt |
408 | mov reg_cop0+36, %esi |
409 | mov next_interupt, %eax |
410 | mov pending_exception, %ebx |
411 | mov stop, %ecx |
412 | add $28, %esp |
413 | mov %eax, last_count |
414 | sub %eax, %esi |
415 | test %ecx, %ecx |
416 | jne .E3 |
417 | test %ebx, %ebx |
418 | jne .E2 |
419 | ret |
420 | .E2: |
421 | mov pcaddr, %edi |
422 | mov %esi, cycle_count /* CCREG */ |
423 | push %edi |
424 | call get_addr_ht |
425 | mov cycle_count, %esi |
426 | add $8, %esp |
427 | jmp *%eax |
428 | .E3: |
429 | add $16, %esp /* pop stack */ |
430 | pop %edi /* restore edi */ |
431 | pop %esi /* restore esi */ |
432 | pop %ebx /* restore ebx */ |
433 | pop %ebp /* restore ebp */ |
434 | ret /* exit dynarec */ |
435 | .E4: |
436 | /* Move 'dirty' blocks to the 'clean' list */ |
437 | mov restore_candidate(,%esi,4), %ebx |
438 | mov %esi, %ebp |
439 | movl $0, restore_candidate(,%esi,4) |
440 | shl $5, %ebp |
441 | .E5: |
442 | shr $1, %ebx |
443 | jnc .E6 |
444 | mov %ebp, (%esp) |
445 | call clean_blocks |
446 | .E6: |
447 | inc %ebp |
448 | test $31, %ebp |
449 | jne .E5 |
450 | jmp .E1 |
451 | .size cc_interrupt, .-cc_interrupt |
452 | |
453 | .globl do_interrupt |
454 | .type do_interrupt, @function |
455 | do_interrupt: |
456 | mov pcaddr, %edi |
457 | push %edi |
458 | call get_addr_ht |
459 | add $4, %esp |
460 | mov reg_cop0+36, %esi |
461 | mov next_interupt, %ebx |
462 | mov %ebx, last_count |
463 | sub %ebx, %esi |
464 | add $2, %esi |
465 | jmp *%eax |
466 | .size do_interrupt, .-do_interrupt |
467 | |
468 | .globl fp_exception |
469 | .type fp_exception, @function |
470 | fp_exception: |
471 | mov $0x1000002c, %edx |
472 | .E7: |
473 | mov reg_cop0+48, %ebx |
474 | or $2, %ebx |
475 | mov %ebx, reg_cop0+48 /* Status */ |
476 | mov %edx, reg_cop0+52 /* Cause */ |
477 | mov %eax, reg_cop0+56 /* EPC */ |
478 | push %esi |
479 | push $0x80000180 |
480 | call get_addr_ht |
481 | pop %esi |
482 | pop %esi |
483 | jmp *%eax |
484 | .size fp_exception, .-fp_exception |
485 | |
486 | .globl fp_exception_ds |
487 | .type fp_exception_ds, @function |
488 | fp_exception_ds: |
489 | mov $0x9000002c, %edx /* Set high bit if delay slot */ |
490 | jmp .E7 |
491 | .size fp_exception_ds, .-fp_exception_ds |
492 | |
493 | .globl jump_syscall |
494 | .type jump_syscall, @function |
495 | jump_syscall: |
496 | mov $0x20, %edx |
497 | mov reg_cop0+48, %ebx |
498 | or $2, %ebx |
499 | mov %ebx, reg_cop0+48 /* Status */ |
500 | mov %edx, reg_cop0+52 /* Cause */ |
501 | mov %eax, reg_cop0+56 /* EPC */ |
502 | push %esi |
503 | push $0x80000180 |
504 | call get_addr_ht |
505 | pop %esi |
506 | pop %esi |
507 | jmp *%eax |
508 | .size jump_syscall, .-jump_syscall |
509 | |
510 | .globl jump_eret |
511 | .type jump_eret, @function |
512 | jump_eret: |
513 | mov reg_cop0+48, %ebx /* Status */ |
514 | add last_count, %esi |
515 | and $0xFFFFFFFD, %ebx |
516 | mov %esi, reg_cop0+36 /* Count */ |
517 | mov %ebx, reg_cop0+48 /* Status */ |
518 | call check_interupt |
519 | mov next_interupt, %eax |
520 | mov reg_cop0+36, %esi |
521 | mov %eax, last_count |
522 | sub %eax, %esi |
523 | mov reg_cop0+56, %eax /* EPC */ |
524 | jns .E11 |
525 | .E8: |
526 | mov $248, %ebx |
527 | xor %edi, %edi |
528 | .E9: |
529 | mov reg(%ebx), %ecx |
530 | mov reg+4(%ebx), %edx |
531 | sar $31, %ecx |
532 | xor %ecx, %edx |
533 | neg %edx |
534 | adc %edi, %edi |
535 | sub $8, %ebx |
536 | jne .E9 |
537 | mov hi(%ebx), %ecx |
538 | mov hi+4(%ebx), %edx |
539 | sar $31, %ecx |
540 | xor %ecx, %edx |
541 | jne .E10 |
542 | mov lo(%ebx), %ecx |
543 | mov lo+4(%ebx), %edx |
544 | sar $31, %ecx |
545 | xor %ecx, %edx |
546 | .E10: |
547 | neg %edx |
548 | adc %edi, %edi |
549 | push %edi |
550 | push %eax |
551 | mov %esi, cycle_count |
552 | call get_addr_32 |
553 | mov cycle_count, %esi |
554 | add $8, %esp |
555 | jmp *%eax |
556 | .E11: |
557 | mov %eax, pcaddr |
558 | call cc_interrupt |
559 | mov pcaddr, %eax |
560 | jmp .E8 |
561 | .size jump_eret, .-jump_eret |
562 | |
563 | .globl new_dyna_start |
564 | .type new_dyna_start, @function |
565 | new_dyna_start: |
566 | push %ebp |
567 | push %ebx |
568 | push %esi |
569 | push %edi |
570 | push $0xa4000040 |
571 | call new_recompile_block |
572 | add $-8, %esp /* align stack */ |
573 | movl next_interupt, %edi |
574 | movl reg_cop0+36, %esi |
575 | movl %edi, last_count |
576 | subl %edi, %esi |
577 | jmp 0x70000000 |
578 | .size new_dyna_start, .-new_dyna_start |
579 | |
580 | .globl write_rdram_new |
581 | .type write_rdram_new, @function |
582 | write_rdram_new: |
583 | mov address, %edi |
584 | mov word, %ecx |
585 | mov %ecx, rdram-0x80000000(%edi) |
586 | jmp .E12 |
587 | .size write_rdram_new, .-write_rdram_new |
588 | |
589 | .globl write_rdramb_new |
590 | .type write_rdramb_new, @function |
591 | write_rdramb_new: |
592 | mov address, %edi |
593 | xor $3, %edi |
594 | movb byte, %cl |
595 | movb %cl, rdram-0x80000000(%edi) |
596 | jmp .E12 |
597 | .size write_rdramb_new, .-write_rdramb_new |
598 | |
599 | .globl write_rdramh_new |
600 | .type write_rdramh_new, @function |
601 | write_rdramh_new: |
602 | mov address, %edi |
603 | xor $2, %edi |
604 | movw hword, %cx |
605 | movw %cx, rdram-0x80000000(%edi) |
606 | jmp .E12 |
607 | .size write_rdramh_new, .-write_rdramh_new |
608 | |
609 | .globl write_rdramd_new |
610 | .type write_rdramd_new, @function |
611 | write_rdramd_new: |
612 | mov address, %edi |
613 | mov dword+4, %ecx |
614 | mov dword, %edx |
615 | mov %ecx, rdram-0x80000000(%edi) |
616 | mov %edx, rdram-0x80000000+4(%edi) |
617 | jmp .E12 |
618 | .size write_rdramd_new, .-write_rdramd_new |
619 | |
620 | .globl do_invalidate |
621 | .type do_invalidate, @function |
622 | do_invalidate: |
623 | mov address, %edi |
624 | mov %edi, %ebx /* Return ebx to caller */ |
625 | .E12: |
626 | shr $12, %edi |
627 | cmpb $1, invalid_code(%edi) |
628 | je .E13 |
629 | push %edi |
630 | call invalidate_block |
631 | pop %edi |
632 | .E13: |
633 | ret |
634 | .size do_invalidate, .-do_invalidate |
635 | |
636 | .globl read_nomem_new |
637 | .type read_nomem_new, @function |
638 | read_nomem_new: |
639 | mov address, %edi |
640 | mov %edi, %ebx |
641 | shr $12, %edi |
642 | mov memory_map(,%edi,4),%edi |
643 | mov $0x8, %eax |
644 | test %edi, %edi |
645 | js tlb_exception |
646 | mov (%ebx,%edi,4), %ecx |
647 | mov %ecx, readmem_dword |
648 | ret |
649 | .size read_nomem_new, .-read_nomem_new |
650 | |
651 | .globl read_nomemb_new |
652 | .type read_nomemb_new, @function |
653 | read_nomemb_new: |
654 | mov address, %edi |
655 | mov %edi, %ebx |
656 | shr $12, %edi |
657 | mov memory_map(,%edi,4),%edi |
658 | mov $0x8, %eax |
659 | test %edi, %edi |
660 | js tlb_exception |
661 | xor $3, %ebx |
662 | movzbl (%ebx,%edi,4), %ecx |
663 | mov %ecx, readmem_dword |
664 | ret |
665 | .size read_nomemb_new, .-read_nomemb_new |
666 | |
667 | .globl read_nomemh_new |
668 | .type read_nomemh_new, @function |
669 | read_nomemh_new: |
670 | mov address, %edi |
671 | mov %edi, %ebx |
672 | shr $12, %edi |
673 | mov memory_map(,%edi,4),%edi |
674 | mov $0x8, %eax |
675 | test %edi, %edi |
676 | js tlb_exception |
677 | xor $2, %ebx |
678 | movzwl (%ebx,%edi,4), %ecx |
679 | mov %ecx, readmem_dword |
680 | ret |
681 | .size read_nomemh_new, .-read_nomemh_new |
682 | |
683 | .globl read_nomemd_new |
684 | .type read_nomemd_new, @function |
685 | read_nomemd_new: |
686 | mov address, %edi |
687 | mov %edi, %ebx |
688 | shr $12, %edi |
689 | mov memory_map(,%edi,4),%edi |
690 | mov $0x8, %eax |
691 | test %edi, %edi |
692 | js tlb_exception |
693 | mov 4(%ebx,%edi,4), %ecx |
694 | mov (%ebx,%edi,4), %edx |
695 | mov %ecx, readmem_dword |
696 | mov %edx, readmem_dword+4 |
697 | ret |
698 | .size read_nomemd_new, .-read_nomemd_new |
699 | |
700 | .globl write_nomem_new |
701 | .type write_nomem_new, @function |
702 | write_nomem_new: |
703 | call do_invalidate |
704 | mov memory_map(,%edi,4),%edi |
705 | mov word, %ecx |
706 | mov $0xc, %eax |
707 | shl $2, %edi |
708 | jc tlb_exception |
709 | mov %ecx, (%ebx,%edi) |
710 | ret |
711 | .size write_nomem_new, .-write_nomem_new |
712 | |
713 | .globl write_nomemb_new |
714 | .type write_nomemb_new, @function |
715 | write_nomemb_new: |
716 | call do_invalidate |
717 | mov memory_map(,%edi,4),%edi |
718 | movb byte, %cl |
719 | mov $0xc, %eax |
720 | shl $2, %edi |
721 | jc tlb_exception |
722 | xor $3, %ebx |
723 | movb %cl, (%ebx,%edi) |
724 | ret |
725 | .size write_nomemb_new, .-write_nomemb_new |
726 | |
727 | .globl write_nomemh_new |
728 | .type write_nomemh_new, @function |
729 | write_nomemh_new: |
730 | call do_invalidate |
731 | mov memory_map(,%edi,4),%edi |
732 | movw hword, %cx |
733 | mov $0xc, %eax |
734 | shl $2, %edi |
735 | jc tlb_exception |
736 | xor $2, %ebx |
737 | movw %cx, (%ebx,%edi) |
738 | ret |
739 | .size write_nomemh_new, .-write_nomemh_new |
740 | |
741 | .globl write_nomemd_new |
742 | .type write_nomemd_new, @function |
743 | write_nomemd_new: |
744 | call do_invalidate |
745 | mov memory_map(,%edi,4),%edi |
746 | mov dword+4, %edx |
747 | mov dword, %ecx |
748 | mov $0xc, %eax |
749 | shl $2, %edi |
750 | jc tlb_exception |
751 | mov %edx, (%ebx,%edi) |
752 | mov %ecx, 4(%ebx,%edi) |
753 | ret |
754 | .size write_nomemd_new, .-write_nomemd_new |
755 | |
756 | .globl tlb_exception |
757 | .type tlb_exception, @function |
758 | tlb_exception: |
759 | /* eax = cause */ |
760 | /* ebx = address */ |
761 | /* ebp = instr addr + flags */ |
762 | mov 0x24(%esp), %ebp |
763 | /* Debug: |
764 | push %ebp |
765 | push %ebx |
766 | push %eax |
767 | call tlb_debug |
768 | pop %eax |
769 | pop %ebx |
770 | pop %ebp |
771 | /* end debug */ |
772 | mov reg_cop0+48, %esi |
773 | mov %ebp, %ecx |
774 | mov %ebp, %edx |
775 | mov %ebp, %edi |
776 | shl $31, %ebp |
777 | shr $12, %ecx |
778 | or %ebp, %eax |
779 | sar $29, %ebp |
780 | and $0xFFFFFFFC, %edx |
781 | mov memory_map(,%ecx,4), %ecx |
782 | or $2, %esi |
783 | mov (%edx, %ecx, 4), %ecx |
784 | add %ebp, %edx |
785 | mov %esi, reg_cop0+48 /* Status */ |
786 | mov %eax, reg_cop0+52 /* Cause */ |
787 | mov %edx, reg_cop0+56 /* EPC */ |
788 | add $0x24, %esp |
789 | mov $0x6000022, %edx |
790 | mov %ecx, %ebp |
791 | movswl %cx, %eax |
792 | shr $26, %ecx |
793 | shr $21, %ebp |
794 | sub %eax, %ebx |
795 | and $0x1f, %ebp |
796 | ror %cl, %edx |
797 | mov reg_cop0+16, %esi |
798 | cmovc reg(,%ebp,8), %ebx |
799 | and $0xFF80000F, %esi |
800 | mov %ebx, reg(,%ebp,8) |
801 | add %ebx, %eax |
802 | sar $31, %ebx |
803 | mov %eax, reg_cop0+32 /* BadVAddr */ |
804 | shr $9, %eax |
805 | test $2, %edi |
806 | cmove reg+4(,%ebp,8), %ebx |
807 | and $0x007FFFF0, %eax |
808 | push $0x80000180 |
809 | mov %ebx, reg+4(,%ebp,8) |
810 | or %eax, %esi |
811 | mov %esi, reg_cop0+16 /* Context */ |
812 | call get_addr_ht |
813 | pop %esi |
814 | movl next_interupt, %edi |
815 | movl reg_cop0+36, %esi /* Count */ |
816 | movl %edi, last_count |
817 | subl %edi, %esi |
818 | jmp *%eax |
819 | .size tlb_exception, .-tlb_exception |