Add copyright message to gles_video
[gpsp.git] / x86 / x86_emit.h
1 /* gameplaySP
2  *
3  * Copyright (C) 2006 Exophase <exophase@gmail.com>
4  *
5  * This program is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU General Public License as
7  * published by the Free Software Foundation; either version 2 of
8  * the License, or (at your option) any later version.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program; if not, write to the Free Software
17  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18  */
19
20 #ifndef X86_EMIT_H
21 #define X86_EMIT_H
22
23 u32 x86_update_gba(u32 pc);
24
25 // Although these are defined as a function, don't call them as
26 // such (jump to it instead)
27 void x86_indirect_branch_arm(u32 address);
28 void x86_indirect_branch_thumb(u32 address);
29 void x86_indirect_branch_dual(u32 address);
30
31 void function_cc execute_store_cpsr(u32 new_cpsr, u32 store_mask);
32
33 void step_debug_x86(u32 pc);
34
35 typedef enum
36 {
37   x86_reg_number_eax,
38   x86_reg_number_ecx,
39   x86_reg_number_edx,
40   x86_reg_number_ebx,
41   x86_reg_number_esp,
42   x86_reg_number_ebp,
43   x86_reg_number_esi,
44   x86_reg_number_edi
45 } x86_reg_number;
46
47 #define x86_emit_byte(value)                                                  \
48   *translation_ptr = value;                                                   \
49   translation_ptr++                                                           \
50
51 #define x86_emit_dword(value)                                                 \
52   *((u32 *)translation_ptr) = value;                                          \
53   translation_ptr += 4                                                        \
54
55 typedef enum
56 {
57   x86_mod_mem        = 0,
58   x86_mod_mem_disp8  = 1,
59   x86_mod_mem_disp32 = 2,
60   x86_mod_reg        = 3
61 } x86_mod;
62
63 #define x86_emit_mod_rm(mod, rm, spare)                                       \
64   x86_emit_byte((mod << 6) | (spare << 3) | rm)                               \
65
66 #define x86_emit_mem_op(dest, base, offset)                                   \
67   if(offset == 0)                                                             \
68   {                                                                           \
69     x86_emit_mod_rm(x86_mod_mem, base, dest);                                 \
70   }                                                                           \
71   else                                                                        \
72                                                                               \
73   if(((s32)offset < 127) && ((s32)offset > -128))                             \
74   {                                                                           \
75     x86_emit_mod_rm(x86_mod_mem_disp8, base, dest);                           \
76     x86_emit_byte((s8)offset);                                                \
77   }                                                                           \
78   else                                                                        \
79   {                                                                           \
80     x86_emit_mod_rm(x86_mod_mem_disp32, base, dest);                          \
81     x86_emit_dword(offset);                                                   \
82   }                                                                           \
83
84 #define x86_emit_reg_op(dest, source)                                         \
85   x86_emit_mod_rm(x86_mod_reg, source, dest)                                  \
86
87
88 typedef enum
89 {
90   x86_opcode_mov_rm_reg                 = 0x89,
91   x86_opcode_mov_reg_rm                 = 0x8B,
92   x86_opcode_mov_reg_imm                = 0xB8,
93   x86_opcode_mov_rm_imm                 = 0x00C7,
94   x86_opcode_ror_reg_imm                = 0x01C1,
95   x86_opcode_shl_reg_imm                = 0x04C1,
96   x86_opcode_shr_reg_imm                = 0x05C1,
97   x86_opcode_sar_reg_imm                = 0x07C1,
98   x86_opcode_push_reg                   = 0x50,
99   x86_opcode_push_rm                    = 0xFF,
100   x86_opcode_push_imm                   = 0x0668,
101   x86_opcode_call_offset                = 0xE8,
102   x86_opcode_ret                        = 0xC3,
103   x86_opcode_test_rm_imm                = 0x00F7,
104   x86_opcode_test_reg_rm                = 0x85,
105   x86_opcode_mul_eax_rm                 = 0x04F7,
106   x86_opcode_imul_eax_rm                = 0x05F7,
107   x86_opcode_idiv_eax_rm                = 0x07F7,
108   x86_opcode_add_rm_imm                 = 0x0081,
109   x86_opcode_and_rm_imm                 = 0x0481,
110   x86_opcode_sub_rm_imm                 = 0x0581,
111   x86_opcode_xor_rm_imm                 = 0x0681,
112   x86_opcode_add_reg_rm                 = 0x03,
113   x86_opcode_adc_reg_rm                 = 0x13,
114   x86_opcode_or_reg_rm                  = 0x0B,
115   x86_opcode_sub_reg_rm                 = 0x2B,
116   x86_opcode_xor_reg_rm                 = 0x33,
117   x86_opcode_cmp_reg_rm                 = 0x39,
118   x86_opcode_cmp_rm_imm                 = 0x053B,
119   x86_opcode_lea_reg_rm                 = 0x8D,
120   x86_opcode_j                          = 0x80,
121   x86_opcode_jmp                        = 0xE9,
122   x86_opcode_jmp_reg                    = 0x04FF,
123   x86_opcode_ext                        = 0x0F
124 } x86_opcodes;
125
126 typedef enum
127 {
128   x86_condition_code_o                  = 0x00,
129   x86_condition_code_no                 = 0x01,
130   x86_condition_code_c                  = 0x02,
131   x86_condition_code_nc                 = 0x03,
132   x86_condition_code_z                  = 0x04,
133   x86_condition_code_nz                 = 0x05,
134   x86_condition_code_na                 = 0x06,
135   x86_condition_code_a                  = 0x07,
136   x86_condition_code_s                  = 0x08,
137   x86_condition_code_ns                 = 0x09,
138   x86_condition_code_p                  = 0x0A,
139   x86_condition_code_np                 = 0x0B,
140   x86_condition_code_l                  = 0x0C,
141   x86_condition_code_nl                 = 0x0D,
142   x86_condition_code_ng                 = 0x0E,
143   x86_condition_code_g                  = 0x0F
144 } x86_condition_codes;
145
146 #define x86_relative_offset(source, offset, next)                             \
147   ((u32)offset - ((u32)source + next))                                        \
148
149 #define x86_unequal_operands(op_a, op_b)                                      \
150   (x86_reg_number_##op_a != x86_reg_number_##op_b)                            \
151
152 #define x86_emit_opcode_1b_reg(opcode, dest, source)                          \
153 {                                                                             \
154   x86_emit_byte(x86_opcode_##opcode);                                         \
155   x86_emit_reg_op(x86_reg_number_##dest, x86_reg_number_##source);            \
156 }                                                                             \
157
158 #define x86_emit_opcode_1b_mem(opcode, dest, base, offset)                    \
159 {                                                                             \
160   x86_emit_byte(x86_opcode_##opcode);                                         \
161   x86_emit_mem_op(x86_reg_number_##dest, x86_reg_number_##base, offset);      \
162 }                                                                             \
163
164 #define x86_emit_opcode_1b(opcode, reg)                                       \
165   x86_emit_byte(x86_opcode_##opcode | x86_reg_number_##reg)                   \
166
167 #define x86_emit_opcode_1b_ext_reg(opcode, dest)                              \
168   x86_emit_byte(x86_opcode_##opcode & 0xFF);                                  \
169   x86_emit_reg_op(x86_opcode_##opcode >> 8, x86_reg_number_##dest)            \
170
171 #define x86_emit_opcode_1b_ext_mem(opcode, base, offset)                      \
172   x86_emit_byte(x86_opcode_##opcode & 0xFF);                                  \
173   x86_emit_mem_op(x86_opcode_##opcode >> 8, x86_reg_number_##base, offset)    \
174
175 #define x86_emit_mov_reg_mem(dest, base, offset)                              \
176   x86_emit_opcode_1b_mem(mov_reg_rm, dest, base, offset)                      \
177
178 #define x86_emit_mov_mem_reg(source, base, offset)                            \
179   x86_emit_opcode_1b_mem(mov_rm_reg, source, base, offset)                    \
180
181 #define x86_emit_mov_reg_reg(dest, source)                                    \
182   if(x86_unequal_operands(dest, source))                                      \
183   {                                                                           \
184     x86_emit_opcode_1b_reg(mov_reg_rm, dest, source)                          \
185   }                                                                           \
186
187 #define x86_emit_mov_reg_imm(dest, imm)                                       \
188   x86_emit_opcode_1b(mov_reg_imm, dest);                                      \
189   x86_emit_dword(imm)                                                         \
190
191 #define x86_emit_mov_mem_imm(imm, base, offset)                               \
192   x86_emit_opcode_1b_ext_mem(mov_rm_imm, base, offset);                       \
193   x86_emit_dword(imm)                                                         \
194
195 #define x86_emit_shl_reg_imm(dest, imm)                                       \
196   x86_emit_opcode_1b_ext_reg(shl_reg_imm, dest);                              \
197   x86_emit_byte(imm)                                                          \
198
199 #define x86_emit_shr_reg_imm(dest, imm)                                       \
200   x86_emit_opcode_1b_ext_reg(shr_reg_imm, dest);                              \
201   x86_emit_byte(imm)                                                          \
202
203 #define x86_emit_sar_reg_imm(dest, imm)                                       \
204   x86_emit_opcode_1b_ext_reg(sar_reg_imm, dest);                              \
205   x86_emit_byte(imm)                                                          \
206
207 #define x86_emit_ror_reg_imm(dest, imm)                                       \
208   x86_emit_opcode_1b_ext_reg(ror_reg_imm, dest);                              \
209   x86_emit_byte(imm)                                                          \
210
211 #define x86_emit_add_reg_reg(dest, source)                                    \
212   x86_emit_opcode_1b_reg(add_reg_rm, dest, source)                            \
213
214 #define x86_emit_adc_reg_reg(dest, source)                                    \
215   x86_emit_opcode_1b_reg(adc_reg_rm, dest, source)                            \
216
217 #define x86_emit_sub_reg_reg(dest, source)                                    \
218   x86_emit_opcode_1b_reg(sub_reg_rm, dest, source)                            \
219
220 #define x86_emit_or_reg_reg(dest, source)                                     \
221   x86_emit_opcode_1b_reg(or_reg_rm, dest, source)                             \
222
223 #define x86_emit_xor_reg_reg(dest, source)                                    \
224   x86_emit_opcode_1b_reg(xor_reg_rm, dest, source)                            \
225
226 #define x86_emit_add_reg_imm(dest, imm)                                       \
227   if(imm != 0)                                                                \
228   {                                                                           \
229     x86_emit_opcode_1b_ext_reg(add_rm_imm, dest);                             \
230     x86_emit_dword(imm);                                                      \
231   }                                                                           \
232
233 #define x86_emit_sub_reg_imm(dest, imm)                                       \
234   if(imm != 0)                                                                \
235   {                                                                           \
236     x86_emit_opcode_1b_ext_reg(sub_rm_imm, dest);                             \
237     x86_emit_dword(imm);                                                      \
238   }                                                                           \
239
240 #define x86_emit_and_reg_imm(dest, imm)                                       \
241   x86_emit_opcode_1b_ext_reg(and_rm_imm, dest);                               \
242   x86_emit_dword(imm)                                                         \
243
244 #define x86_emit_xor_reg_imm(dest, imm)                                       \
245   x86_emit_opcode_1b_ext_reg(xor_rm_imm, dest);                               \
246   x86_emit_dword(imm)                                                         \
247
248 #define x86_emit_test_reg_imm(dest, imm)                                      \
249   x86_emit_opcode_1b_ext_reg(test_rm_imm, dest);                              \
250   x86_emit_dword(imm)                                                         \
251
252 #define x86_emit_cmp_reg_reg(dest, source)                                    \
253   x86_emit_opcode_1b_reg(cmp_reg_rm, dest, source)                            \
254
255 #define x86_emit_test_reg_reg(dest, source)                                   \
256   x86_emit_opcode_1b_reg(test_reg_rm, dest, source)                           \
257
258 #define x86_emit_cmp_reg_imm(dest, imm)                                       \
259   x86_emit_opcode_1b_ext_reg(cmp_rm_imm, dest);                               \
260   x86_emit_dword(imm)                                                         \
261
262 #define x86_emit_mul_eax_reg(source)                                          \
263   x86_emit_opcode_1b_ext_reg(mul_eax_rm, source)                              \
264
265 #define x86_emit_imul_eax_reg(source)                                         \
266   x86_emit_opcode_1b_ext_reg(imul_eax_rm, source)                             \
267
268 #define x86_emit_idiv_eax_reg(source)                                         \
269   x86_emit_opcode_1b_ext_reg(idiv_eax_rm, source)                             \
270
271 #define x86_emit_push_mem(base, offset)                                       \
272   x86_emit_opcode_1b_mem(push_rm, 0x06, base, offset)                         \
273
274 #define x86_emit_push_imm(imm)                                                \
275   x86_emit_byte(x86_opcode_push_imm);                                         \
276   x86_emit_dword(imm)                                                         \
277
278 #define x86_emit_call_offset(relative_offset)                                 \
279   x86_emit_byte(x86_opcode_call_offset);                                      \
280   x86_emit_dword(relative_offset)                                             \
281
282 #define x86_emit_ret()                                                        \
283   x86_emit_byte(x86_opcode_ret)                                               \
284
285 #define x86_emit_lea_reg_mem(dest, base, offset)                              \
286   x86_emit_opcode_1b_mem(lea_reg_rm, dest, base, offset)                      \
287
288 #define x86_emit_j_filler(condition_code, writeback_location)                 \
289   x86_emit_byte(x86_opcode_ext);                                              \
290   x86_emit_byte(x86_opcode_j | condition_code);                               \
291   (writeback_location) = translation_ptr;                                     \
292   translation_ptr += 4                                                        \
293
294 #define x86_emit_j_offset(condition_code, offset)                             \
295   x86_emit_byte(x86_opcode_ext);                                              \
296   x86_emit_byte(x86_opcode_j | condition_code);                               \
297   x86_emit_dword(offset)                                                      \
298
299 #define x86_emit_jmp_filler(writeback_location)                               \
300   x86_emit_byte(x86_opcode_jmp);                                              \
301   (writeback_location) = translation_ptr;                                     \
302   translation_ptr += 4                                                        \
303
304 #define x86_emit_jmp_offset(offset)                                           \
305   x86_emit_byte(x86_opcode_jmp);                                              \
306   x86_emit_dword(offset)                                                      \
307
308 #define x86_emit_jmp_reg(source)                                              \
309   x86_emit_opcode_1b_ext_reg(jmp_reg, source)                                 \
310
311 #define reg_base    ebx
312 #define reg_cycles  edi
313 #define reg_a0      eax
314 #define reg_a1      edx
315 #define reg_a2      ecx
316 #define reg_rv      eax
317 #define reg_s0      esi
318
319 #define generate_load_reg(ireg, reg_index)                                    \
320   x86_emit_mov_reg_mem(reg_##ireg, reg_base, reg_index * 4);                  \
321
322 #define generate_load_pc(ireg, new_pc)                                        \
323   x86_emit_mov_reg_imm(reg_##ireg, new_pc)                                    \
324
325 #define generate_load_imm(ireg, imm)                                          \
326   x86_emit_mov_reg_imm(reg_##ireg, imm)                                       \
327
328 #define generate_store_reg(ireg, reg_index)                                   \
329   x86_emit_mov_mem_reg(reg_##ireg, reg_base, reg_index * 4)                   \
330
331 #define generate_shift_left(ireg, imm)                                        \
332   x86_emit_shl_reg_imm(reg_##ireg, imm)                                       \
333
334 #define generate_shift_right(ireg, imm)                                       \
335   x86_emit_shr_reg_imm(reg_##ireg, imm)                                       \
336
337 #define generate_shift_right_arithmetic(ireg, imm)                            \
338   x86_emit_sar_reg_imm(reg_##ireg, imm)                                       \
339
340 #define generate_rotate_right(ireg, imm)                                      \
341   x86_emit_ror_reg_imm(reg_##ireg, imm)                                       \
342
343 #define generate_add(ireg_dest, ireg_src)                                     \
344   x86_emit_add_reg_reg(reg_##ireg_dest, reg_##ireg_src)                       \
345
346 #define generate_sub(ireg_dest, ireg_src)                                     \
347   x86_emit_sub_reg_reg(reg_##ireg_dest, reg_##ireg_src)                       \
348
349 #define generate_or(ireg_dest, ireg_src)                                      \
350   x86_emit_or_reg_reg(reg_##ireg_dest, reg_##ireg_src)                        \
351
352 #define generate_xor(ireg_dest, ireg_src)                                     \
353   x86_emit_xor_reg_reg(reg_##ireg_dest, reg_##ireg_src)                       \
354
355 #define generate_add_imm(ireg, imm)                                           \
356   x86_emit_add_reg_imm(reg_##ireg, imm)                                       \
357
358 #define generate_sub_imm(ireg, imm)                                           \
359   x86_emit_sub_reg_imm(reg_##ireg, imm)                                       \
360
361 #define generate_xor_imm(ireg, imm)                                           \
362   x86_emit_xor_reg_imm(reg_##ireg, imm)                                       \
363
364 #define generate_add_reg_reg_imm(ireg_dest, ireg_src, imm)                    \
365   x86_emit_lea_reg_mem(reg_##ireg_dest, reg_##ireg_src, imm)                  \
366
367 #define generate_and_imm(ireg, imm)                                           \
368   x86_emit_and_reg_imm(reg_##ireg, imm)                                       \
369
370 #define generate_mov(ireg_dest, ireg_src)                                     \
371   x86_emit_mov_reg_reg(reg_##ireg_dest, reg_##ireg_src)                       \
372
373 #define generate_multiply(ireg)                                               \
374   x86_emit_imul_eax_reg(reg_##ireg)                                           \
375
376 #define generate_multiply_s64(ireg)                                           \
377   x86_emit_imul_eax_reg(reg_##ireg)                                           \
378
379 #define generate_multiply_u64(ireg)                                           \
380   x86_emit_mul_eax_reg(reg_##ireg)                                            \
381
382 #define generate_multiply_s64_add(ireg_src, ireg_lo, ireg_hi)                 \
383   x86_emit_imul_eax_reg(reg_##ireg_src);                                      \
384   x86_emit_add_reg_reg(reg_a0, reg_##ireg_lo);                                \
385   x86_emit_adc_reg_reg(reg_a1, reg_##ireg_hi)                                 \
386
387 #define generate_multiply_u64_add(ireg_src, ireg_lo, ireg_hi)                 \
388   x86_emit_mul_eax_reg(reg_##ireg_src);                                       \
389   x86_emit_add_reg_reg(reg_a0, reg_##ireg_lo);                                \
390   x86_emit_adc_reg_reg(reg_a1, reg_##ireg_hi)                                 \
391
392
393 #define generate_function_call(function_location)                             \
394   x86_emit_call_offset(x86_relative_offset(translation_ptr,                   \
395    function_location, 4));                                                    \
396
397 #define generate_exit_block()                                                 \
398   x86_emit_ret();                                                             \
399
400 #define generate_branch_filler_true(ireg_dest, ireg_src, writeback_location)  \
401   x86_emit_test_reg_imm(reg_##ireg_dest, 1);                                  \
402   x86_emit_j_filler(x86_condition_code_z, writeback_location)                 \
403
404 #define generate_branch_filler_false(ireg_dest, ireg_src, writeback_location) \
405   x86_emit_test_reg_imm(reg_##ireg_dest, 1);                                  \
406   x86_emit_j_filler(x86_condition_code_nz, writeback_location)                \
407
408 #define generate_branch_filler_equal(ireg_dest, ireg_src, writeback_location) \
409   x86_emit_cmp_reg_reg(reg_##ireg_dest, reg_##ireg_src);                      \
410   x86_emit_j_filler(x86_condition_code_nz, writeback_location)                \
411
412 #define generate_branch_filler_not_equal(ireg_dest, ireg_src,                 \
413  writeback_location)                                                          \
414   x86_emit_cmp_reg_reg(reg_##ireg_dest, reg_##ireg_src);                      \
415   x86_emit_j_filler(x86_condition_code_z, writeback_location)                 \
416
417 #define generate_update_pc(new_pc)                                            \
418   x86_emit_mov_reg_imm(eax, new_pc)                                           \
419
420 #define generate_update_pc_reg()                                              \
421   generate_update_pc(pc);                                                     \
422   generate_store_reg(a0, REG_PC)                                              \
423
424 #define generate_cycle_update()                                               \
425   x86_emit_sub_reg_imm(reg_cycles, cycle_count);                              \
426   cycle_count = 0                                                             \
427
428 #define generate_branch_patch_conditional(dest, offset)                       \
429   *((u32 *)(dest)) = x86_relative_offset(dest, offset, 4)                     \
430
431 #define generate_branch_patch_unconditional(dest, offset)                     \
432   *((u32 *)(dest)) = x86_relative_offset(dest, offset, 4)                     \
433
434 #define generate_branch_no_cycle_update(writeback_location, new_pc)           \
435   if(pc == idle_loop_target_pc)                                               \
436   {                                                                           \
437     x86_emit_mov_reg_imm(eax, new_pc);                                        \
438     generate_function_call(x86_update_gba);                                   \
439     x86_emit_jmp_filler(writeback_location);                                  \
440   }                                                                           \
441   else                                                                        \
442   {                                                                           \
443     x86_emit_test_reg_reg(reg_cycles, reg_cycles);                            \
444     x86_emit_j_offset(x86_condition_code_ns, 10);                             \
445     x86_emit_mov_reg_imm(eax, new_pc);                                        \
446     generate_function_call(x86_update_gba);                                   \
447     x86_emit_jmp_filler(writeback_location);                                  \
448   }                                                                           \
449
450 #define generate_branch_cycle_update(writeback_location, new_pc)              \
451   generate_cycle_update();                                                    \
452   generate_branch_no_cycle_update(writeback_location, new_pc)                 \
453
454 #define generate_conditional_branch(ireg_a, ireg_b, type, writeback_location) \
455   generate_branch_filler_##type(ireg_a, ireg_b, writeback_location)           \
456
457 // a0 holds the destination
458
459 #define generate_indirect_branch_cycle_update(type)                           \
460   generate_cycle_update();                                                    \
461   x86_emit_jmp_offset(x86_relative_offset(translation_ptr,                    \
462    x86_indirect_branch_##type, 4))                                            \
463
464 #define generate_indirect_branch_no_cycle_update(type)                        \
465   x86_emit_jmp_offset(x86_relative_offset(translation_ptr,                    \
466    x86_indirect_branch_##type, 4))                                            \
467
468 #define generate_block_prologue()                                             \
469
470 #define generate_block_extra_vars_arm()                                       \
471   void generate_indirect_branch_arm()                                         \
472   {                                                                           \
473     if(condition == 0x0E)                                                     \
474     {                                                                         \
475       generate_indirect_branch_cycle_update(arm);                             \
476     }                                                                         \
477     else                                                                      \
478     {                                                                         \
479       generate_indirect_branch_no_cycle_update(arm);                          \
480     }                                                                         \
481   }                                                                           \
482                                                                               \
483   void generate_indirect_branch_dual()                                        \
484   {                                                                           \
485     if(condition == 0x0E)                                                     \
486     {                                                                         \
487       generate_indirect_branch_cycle_update(dual);                            \
488     }                                                                         \
489     else                                                                      \
490     {                                                                         \
491       generate_indirect_branch_no_cycle_update(dual);                         \
492     }                                                                         \
493   }                                                                           \
494
495 #define generate_block_extra_vars_thumb()                                     \
496
497
498 #define translate_invalidate_dcache()                                         \
499
500 #define block_prologue_size 0
501
502 #define calculate_z_flag(dest)                                                \
503   reg[REG_Z_FLAG] = (dest == 0)                                               \
504
505 #define calculate_n_flag(dest)                                                \
506   reg[REG_N_FLAG] = ((signed)dest < 0)                                        \
507
508 #define calculate_c_flag_sub(dest, src_a, src_b)                              \
509   reg[REG_C_FLAG] = ((unsigned)src_b <= (unsigned)src_a)                      \
510
511 #define calculate_v_flag_sub(dest, src_a, src_b)                              \
512   reg[REG_V_FLAG] = ((signed)src_b > (signed)src_a) != ((signed)dest < 0)     \
513
514 #define calculate_c_flag_add(dest, src_a, src_b)                              \
515   reg[REG_C_FLAG] = ((unsigned)dest < (unsigned)src_a)                        \
516
517 #define calculate_v_flag_add(dest, src_a, src_b)                              \
518   reg[REG_V_FLAG] = ((signed)dest < (signed)src_a) != ((signed)src_b < 0)     \
519
520
521
522 #define get_shift_imm()                                                       \
523   u32 shift = (opcode >> 7) & 0x1F                                            \
524
525 #define generate_shift_reg(ireg, name, flags_op)                              \
526   generate_load_reg_pc(ireg, rm, 12);                                         \
527   generate_load_reg(a1, ((opcode >> 8) & 0x0F));                              \
528   generate_function_call(execute_##name##_##flags_op##_reg);                  \
529   generate_mov(ireg, rv)                                                      \
530
531 u32 function_cc execute_lsl_no_flags_reg(u32 value, u32 shift)
532 {
533   if(shift != 0)
534   {
535     if(shift > 31)
536       value = 0;
537     else
538       value <<= shift;
539   }
540   return value;
541 }
542
543 u32 function_cc execute_lsr_no_flags_reg(u32 value, u32 shift)
544 {
545   if(shift != 0)
546   {
547     if(shift > 31)
548       value = 0;
549     else
550       value >>= shift;
551   }
552   return value;
553 }
554
555 u32 function_cc execute_asr_no_flags_reg(u32 value, u32 shift)
556 {
557   if(shift != 0)
558   {
559     if(shift > 31)
560       value = (s32)value >> 31;
561     else
562       value = (s32)value >> shift;
563   }
564   return value;
565 }
566
567 u32 function_cc execute_ror_no_flags_reg(u32 value, u32 shift)
568 {
569   if(shift != 0)
570   {
571     ror(value, value, shift);
572   }
573
574   return value;
575 }
576
577
578 u32 function_cc execute_lsl_flags_reg(u32 value, u32 shift)
579 {
580   if(shift != 0)
581   {
582     if(shift > 31)
583     {
584       reg[REG_C_FLAG] = value & 0x01;
585
586       if(shift != 32)
587         reg[REG_C_FLAG] = 0;
588
589       value = 0;
590     }
591     else
592     {
593       reg[REG_C_FLAG] = (value >> (32 - shift)) & 0x01;
594       value <<= shift;
595     }
596   }
597   return value;
598 }
599
600 u32 function_cc execute_lsr_flags_reg(u32 value, u32 shift)
601 {
602   if(shift != 0)
603   {
604     if(shift > 31)
605     {
606       reg[REG_C_FLAG] = value >> 31;
607
608       if(shift != 32)
609         reg[REG_C_FLAG] = 0;
610
611       value = 0;
612     }
613     else
614     {
615       reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
616       value >>= shift;
617     }
618   }
619   return value;
620 }
621
622 u32 function_cc execute_asr_flags_reg(u32 value, u32 shift)
623 {
624   if(shift != 0)
625   {
626     if(shift > 31)
627     {
628       value = (s32)value >> 31;
629       reg[REG_C_FLAG] = value & 0x01;
630     }
631     else
632     {
633       reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
634       value = (s32)value >> shift;
635     }
636   }
637   return value;
638 }
639
640 u32 function_cc execute_ror_flags_reg(u32 value, u32 shift)
641 {
642   if(shift != 0)
643   {
644     reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
645     ror(value, value, shift);
646   }
647
648   return value;
649 }
650
651 u32 function_cc execute_rrx_flags(u32 value)
652 {
653   u32 c_flag = reg[REG_C_FLAG];
654   reg[REG_C_FLAG] = value & 0x01;
655   return (value >> 1) | (c_flag << 31);
656 }
657
658 u32 function_cc execute_rrx(u32 value)
659 {
660   return (value >> 1) | (reg[REG_C_FLAG] << 31);
661 }
662
663 #define generate_shift_imm_lsl_no_flags(ireg)                                 \
664   generate_load_reg_pc(ireg, rm, 8);                                          \
665   if(shift != 0)                                                              \
666   {                                                                           \
667     generate_shift_left(ireg, shift);                                         \
668   }                                                                           \
669
670 #define generate_shift_imm_lsr_no_flags(ireg)                                 \
671   if(shift != 0)                                                              \
672   {                                                                           \
673     generate_load_reg_pc(ireg, rm, 8);                                        \
674     generate_shift_right(ireg, shift);                                        \
675   }                                                                           \
676   else                                                                        \
677   {                                                                           \
678     generate_load_imm(ireg, 0);                                               \
679   }                                                                           \
680
681 #define generate_shift_imm_asr_no_flags(ireg)                                 \
682   generate_load_reg_pc(ireg, rm, 8);                                          \
683   if(shift != 0)                                                              \
684   {                                                                           \
685     generate_shift_right_arithmetic(ireg, shift);                             \
686   }                                                                           \
687   else                                                                        \
688   {                                                                           \
689     generate_shift_right_arithmetic(ireg, 31);                                \
690   }                                                                           \
691
692 #define generate_shift_imm_ror_no_flags(ireg)                                 \
693   if(shift != 0)                                                              \
694   {                                                                           \
695     generate_load_reg_pc(ireg, rm, 8);                                        \
696     generate_rotate_right(ireg, shift);                                       \
697   }                                                                           \
698   else                                                                        \
699   {                                                                           \
700     generate_load_reg_pc(a0, rm, 8);                                          \
701     generate_function_call(execute_rrx);                                      \
702     generate_mov(ireg, rv);                                                   \
703   }                                                                           \
704
705 #define generate_shift_imm_lsl_flags(ireg)                                    \
706   generate_load_reg_pc(ireg, rm, 8);                                          \
707   if(shift != 0)                                                              \
708   {                                                                           \
709     generate_mov(a1, ireg);                                                   \
710     generate_shift_right(a1, (32 - shift));                                   \
711     generate_and_imm(a1, 1);                                                  \
712     generate_store_reg(a1, REG_C_FLAG);                                       \
713     generate_shift_left(ireg, shift);                                         \
714   }                                                                           \
715
716 #define generate_shift_imm_lsr_flags(ireg)                                    \
717   if(shift != 0)                                                              \
718   {                                                                           \
719     generate_load_reg_pc(ireg, rm, 8);                                        \
720     generate_mov(a1, ireg);                                                   \
721     generate_shift_right(a1, shift - 1);                                      \
722     generate_and_imm(a1, 1);                                                  \
723     generate_store_reg(a1, REG_C_FLAG);                                       \
724     generate_shift_right(ireg, shift);                                        \
725   }                                                                           \
726   else                                                                        \
727   {                                                                           \
728     generate_load_reg_pc(a1, rm, 8);                                          \
729     generate_shift_right(a1, 31);                                             \
730     generate_store_reg(a1, REG_C_FLAG);                                       \
731     generate_load_imm(ireg, 0);                                               \
732   }                                                                           \
733
734 #define generate_shift_imm_asr_flags(ireg)                                    \
735   if(shift != 0)                                                              \
736   {                                                                           \
737     generate_load_reg_pc(ireg, rm, 8);                                        \
738     generate_mov(a1, ireg);                                                   \
739     generate_shift_right_arithmetic(a1, shift - 1);                           \
740     generate_and_imm(a1, 1);                                                  \
741     generate_store_reg(a1, REG_C_FLAG);                                       \
742     generate_shift_right_arithmetic(ireg, shift);                             \
743   }                                                                           \
744   else                                                                        \
745   {                                                                           \
746     generate_load_reg_pc(a0, rm, 8);                                          \
747     generate_shift_right_arithmetic(ireg, 31);                                \
748     generate_mov(a1, ireg);                                                   \
749     generate_and_imm(a1, 1);                                                  \
750     generate_store_reg(a1, REG_C_FLAG);                                       \
751   }                                                                           \
752
753 #define generate_shift_imm_ror_flags(ireg)                                    \
754   generate_load_reg_pc(ireg, rm, 8);                                          \
755   if(shift != 0)                                                              \
756   {                                                                           \
757     generate_mov(a1, ireg);                                                   \
758     generate_shift_right(a1, shift - 1);                                      \
759     generate_and_imm(a1, 1);                                                  \
760     generate_store_reg(a1, REG_C_FLAG);                                       \
761     generate_rotate_right(ireg, shift);                                       \
762   }                                                                           \
763   else                                                                        \
764   {                                                                           \
765     generate_function_call(execute_rrx_flags);                                \
766     generate_mov(ireg, rv);                                                   \
767   }                                                                           \
768
769 #define generate_shift_imm(ireg, name, flags_op)                              \
770   get_shift_imm();                                                            \
771   generate_shift_imm_##name##_##flags_op(ireg)                                \
772
773 #define generate_load_rm_sh(flags_op)                                         \
774   switch((opcode >> 4) & 0x07)                                                \
775   {                                                                           \
776     /* LSL imm */                                                             \
777     case 0x0:                                                                 \
778     {                                                                         \
779       generate_shift_imm(a0, lsl, flags_op);                                  \
780       break;                                                                  \
781     }                                                                         \
782                                                                               \
783     /* LSL reg */                                                             \
784     case 0x1:                                                                 \
785     {                                                                         \
786       generate_shift_reg(a0, lsl, flags_op);                                  \
787       break;                                                                  \
788     }                                                                         \
789                                                                               \
790     /* LSR imm */                                                             \
791     case 0x2:                                                                 \
792     {                                                                         \
793       generate_shift_imm(a0, lsr, flags_op);                                  \
794       break;                                                                  \
795     }                                                                         \
796                                                                               \
797     /* LSR reg */                                                             \
798     case 0x3:                                                                 \
799     {                                                                         \
800       generate_shift_reg(a0, lsr, flags_op);                                  \
801       break;                                                                  \
802     }                                                                         \
803                                                                               \
804     /* ASR imm */                                                             \
805     case 0x4:                                                                 \
806     {                                                                         \
807       generate_shift_imm(a0, asr, flags_op);                                  \
808       break;                                                                  \
809     }                                                                         \
810                                                                               \
811     /* ASR reg */                                                             \
812     case 0x5:                                                                 \
813     {                                                                         \
814       generate_shift_reg(a0, asr, flags_op);                                  \
815       break;                                                                  \
816     }                                                                         \
817                                                                               \
818     /* ROR imm */                                                             \
819     case 0x6:                                                                 \
820     {                                                                         \
821       generate_shift_imm(a0, ror, flags_op);                                  \
822       break;                                                                  \
823     }                                                                         \
824                                                                               \
825     /* ROR reg */                                                             \
826     case 0x7:                                                                 \
827     {                                                                         \
828       generate_shift_reg(a0, ror, flags_op);                                  \
829       break;                                                                  \
830     }                                                                         \
831   }                                                                           \
832
833 #define generate_load_offset_sh()                                             \
834   switch((opcode >> 5) & 0x03)                                                \
835   {                                                                           \
836     /* LSL imm */                                                             \
837     case 0x0:                                                                 \
838     {                                                                         \
839       generate_shift_imm(a1, lsl, no_flags);                                  \
840       break;                                                                  \
841     }                                                                         \
842                                                                               \
843     /* LSR imm */                                                             \
844     case 0x1:                                                                 \
845     {                                                                         \
846       generate_shift_imm(a1, lsr, no_flags);                                  \
847       break;                                                                  \
848     }                                                                         \
849                                                                               \
850     /* ASR imm */                                                             \
851     case 0x2:                                                                 \
852     {                                                                         \
853       generate_shift_imm(a1, asr, no_flags);                                  \
854       break;                                                                  \
855     }                                                                         \
856                                                                               \
857     /* ROR imm */                                                             \
858     case 0x3:                                                                 \
859     {                                                                         \
860       generate_shift_imm(a1, ror, no_flags);                                  \
861       break;                                                                  \
862     }                                                                         \
863   }                                                                           \
864
865 #define calculate_flags_add(dest, src_a, src_b)                               \
866   calculate_z_flag(dest);                                                     \
867   calculate_n_flag(dest);                                                     \
868   calculate_c_flag_add(dest, src_a, src_b);                                   \
869   calculate_v_flag_add(dest, src_a, src_b)                                    \
870
871 #define calculate_flags_sub(dest, src_a, src_b)                               \
872   calculate_z_flag(dest);                                                     \
873   calculate_n_flag(dest);                                                     \
874   calculate_c_flag_sub(dest, src_a, src_b);                                   \
875   calculate_v_flag_sub(dest, src_a, src_b)                                    \
876
877 #define calculate_flags_logic(dest)                                           \
878   calculate_z_flag(dest);                                                     \
879   calculate_n_flag(dest)                                                      \
880
881 #define extract_flags()                                                       \
882   reg[REG_N_FLAG] = reg[REG_CPSR] >> 31;                                      \
883   reg[REG_Z_FLAG] = (reg[REG_CPSR] >> 30) & 0x01;                             \
884   reg[REG_C_FLAG] = (reg[REG_CPSR] >> 29) & 0x01;                             \
885   reg[REG_V_FLAG] = (reg[REG_CPSR] >> 28) & 0x01;                             \
886
887 #define collapse_flags()                                                      \
888   reg[REG_CPSR] = (reg[REG_N_FLAG] << 31) | (reg[REG_Z_FLAG] << 30) |         \
889    (reg[REG_C_FLAG] << 29) | (reg[REG_V_FLAG] << 28) |                        \
890    (reg[REG_CPSR] & 0xFF)                                                     \
891
892 // It should be okay to still generate result flags, spsr will overwrite them.
893 // This is pretty infrequent (returning from interrupt handlers, et al) so
894 // probably not worth optimizing for.
895
896 #define check_for_interrupts()                                                \
897   if((io_registers[REG_IE] & io_registers[REG_IF]) &&                         \
898    io_registers[REG_IME] && ((reg[REG_CPSR] & 0x80) == 0))                    \
899   {                                                                           \
900     reg_mode[MODE_IRQ][6] = reg[REG_PC] + 4;                                  \
901     spsr[MODE_IRQ] = reg[REG_CPSR];                                           \
902     reg[REG_CPSR] = 0xD2;                                                     \
903     address = 0x00000018;                                                     \
904     set_cpu_mode(MODE_IRQ);                                                   \
905   }                                                                           \
906
907 #define generate_load_reg_pc(ireg, reg_index, pc_offset)                      \
908   if(reg_index == 15)                                                         \
909   {                                                                           \
910     generate_load_pc(ireg, pc + pc_offset);                                   \
911   }                                                                           \
912   else                                                                        \
913   {                                                                           \
914     generate_load_reg(ireg, reg_index);                                       \
915   }                                                                           \
916
917 #define generate_store_reg_pc_no_flags(ireg, reg_index)                       \
918   generate_store_reg(ireg, reg_index);                                        \
919   if(reg_index == 15)                                                         \
920   {                                                                           \
921     generate_mov(a0, ireg);                                                   \
922     generate_indirect_branch_arm();                                           \
923   }                                                                           \
924
925 u32 function_cc execute_spsr_restore(u32 address)
926 {
927   if(reg[CPU_MODE] != MODE_USER)
928   {
929     reg[REG_CPSR] = spsr[reg[CPU_MODE]];
930     extract_flags();
931     set_cpu_mode(cpu_modes[reg[REG_CPSR] & 0x1F]);
932     check_for_interrupts();
933
934     if(reg[REG_CPSR] & 0x20)
935       address |= 0x01;
936   }
937
938   return address;
939 }
940
941 #define generate_store_reg_pc_flags(ireg, reg_index)                          \
942   generate_store_reg(ireg, reg_index);                                        \
943   if(reg_index == 15)                                                         \
944   {                                                                           \
945     generate_mov(a0, ireg);                                                   \
946     generate_function_call(execute_spsr_restore);                             \
947     generate_mov(a0, rv);                                                     \
948     generate_indirect_branch_dual();                                          \
949   }                                                                           \
950
951 typedef enum
952 {
953   CONDITION_TRUE,
954   CONDITION_FALSE,
955   CONDITION_EQUAL,
956   CONDITION_NOT_EQUAL
957 } condition_check_type;
958
959
960 #define generate_condition_eq(ireg_a, ireg_b)                                 \
961   generate_load_reg(ireg_a, REG_Z_FLAG);                                      \
962   condition_check = CONDITION_TRUE                                            \
963
964 #define generate_condition_ne(ireg_a, ireg_b)                                 \
965   generate_load_reg(ireg_a, REG_Z_FLAG);                                      \
966   condition_check = CONDITION_FALSE                                           \
967
968 #define generate_condition_cs(ireg_a, ireg_b)                                 \
969   generate_load_reg(ireg_a, REG_C_FLAG);                                      \
970   condition_check = CONDITION_TRUE                                            \
971
972 #define generate_condition_cc(ireg_a, ireg_b)                                 \
973   generate_load_reg(ireg_a, REG_C_FLAG);                                      \
974   condition_check = CONDITION_FALSE                                           \
975
976 #define generate_condition_mi(ireg_a, ireg_b)                                 \
977   generate_load_reg(ireg_a, REG_N_FLAG);                                      \
978   condition_check = CONDITION_TRUE                                            \
979
980 #define generate_condition_pl(ireg_a, ireg_b)                                 \
981   generate_load_reg(ireg_a, REG_N_FLAG);                                      \
982   condition_check = CONDITION_FALSE                                           \
983
984 #define generate_condition_vs(ireg_a, ireg_b)                                 \
985   generate_load_reg(ireg_a, REG_V_FLAG);                                      \
986   condition_check = CONDITION_TRUE                                            \
987
988 #define generate_condition_vc(ireg_a, ireg_b)                                 \
989   generate_load_reg(ireg_a, REG_V_FLAG);                                      \
990   condition_check = CONDITION_FALSE                                           \
991
992 #define generate_condition_hi(ireg_a, ireg_b)                                 \
993   generate_load_reg(ireg_a, REG_C_FLAG);                                      \
994   generate_xor_imm(ireg_a, 1);                                                \
995   generate_load_reg(ireg_b, REG_Z_FLAG);                                      \
996   generate_or(ireg_a, ireg_b);                                                \
997   condition_check = CONDITION_FALSE                                           \
998
999 #define generate_condition_ls(ireg_a, ireg_b)                                 \
1000   generate_load_reg(ireg_a, REG_C_FLAG);                                      \
1001   generate_xor_imm(ireg_a, 1);                                                \
1002   generate_load_reg(ireg_b, REG_Z_FLAG);                                      \
1003   generate_or(ireg_a, ireg_b);                                                \
1004   condition_check = CONDITION_TRUE                                            \
1005
1006 #define generate_condition_ge(ireg_a, ireg_b)                                 \
1007   generate_load_reg(ireg_a, REG_N_FLAG);                                      \
1008   generate_load_reg(ireg_b, REG_V_FLAG);                                      \
1009   condition_check = CONDITION_EQUAL                                           \
1010
1011 #define generate_condition_lt(ireg_a, ireg_b)                                 \
1012   generate_load_reg(ireg_a, REG_N_FLAG);                                      \
1013   generate_load_reg(ireg_b, REG_V_FLAG);                                      \
1014   condition_check = CONDITION_NOT_EQUAL                                       \
1015
1016 #define generate_condition_gt(ireg_a, ireg_b)                                 \
1017   generate_load_reg(ireg_a, REG_N_FLAG);                                      \
1018   generate_load_reg(ireg_b, REG_V_FLAG);                                      \
1019   generate_xor(ireg_b, ireg_a);                                               \
1020   generate_load_reg(a0, REG_Z_FLAG);                                          \
1021   generate_or(ireg_a, ireg_b);                                                \
1022   condition_check = CONDITION_FALSE                                           \
1023
1024 #define generate_condition_le(ireg_a, ireg_b)                                 \
1025   generate_load_reg(ireg_a, REG_N_FLAG);                                      \
1026   generate_load_reg(ireg_b, REG_V_FLAG);                                      \
1027   generate_xor(ireg_b, ireg_a);                                               \
1028   generate_load_reg(a0, REG_Z_FLAG);                                          \
1029   generate_or(ireg_a, ireg_b);                                                \
1030   condition_check = CONDITION_TRUE                                            \
1031
1032
1033 #define generate_condition(ireg_a, ireg_b)                                    \
1034   switch(condition)                                                           \
1035   {                                                                           \
1036     case 0x0:                                                                 \
1037       generate_condition_eq(ireg_a, ireg_b);                                  \
1038       break;                                                                  \
1039                                                                               \
1040     case 0x1:                                                                 \
1041       generate_condition_ne(ireg_a, ireg_b);                                  \
1042       break;                                                                  \
1043                                                                               \
1044     case 0x2:                                                                 \
1045       generate_condition_cs(ireg_a, ireg_b);                                  \
1046       break;                                                                  \
1047                                                                               \
1048     case 0x3:                                                                 \
1049       generate_condition_cc(ireg_a, ireg_b);                                  \
1050       break;                                                                  \
1051                                                                               \
1052     case 0x4:                                                                 \
1053       generate_condition_mi(ireg_a, ireg_b);                                  \
1054       break;                                                                  \
1055                                                                               \
1056     case 0x5:                                                                 \
1057       generate_condition_pl(ireg_a, ireg_b);                                  \
1058       break;                                                                  \
1059                                                                               \
1060     case 0x6:                                                                 \
1061       generate_condition_vs(ireg_a, ireg_b);                                  \
1062       break;                                                                  \
1063                                                                               \
1064     case 0x7:                                                                 \
1065       generate_condition_vc(ireg_a, ireg_b);                                  \
1066       break;                                                                  \
1067                                                                               \
1068     case 0x8:                                                                 \
1069       generate_condition_hi(ireg_a, ireg_b);                                  \
1070       break;                                                                  \
1071                                                                               \
1072     case 0x9:                                                                 \
1073       generate_condition_ls(ireg_a, ireg_b);                                  \
1074       break;                                                                  \
1075                                                                               \
1076     case 0xA:                                                                 \
1077       generate_condition_ge(ireg_a, ireg_b);                                  \
1078       break;                                                                  \
1079                                                                               \
1080     case 0xB:                                                                 \
1081       generate_condition_lt(ireg_a, ireg_b);                                  \
1082       break;                                                                  \
1083                                                                               \
1084     case 0xC:                                                                 \
1085       generate_condition_gt(ireg_a, ireg_b);                                  \
1086       break;                                                                  \
1087                                                                               \
1088     case 0xD:                                                                 \
1089       generate_condition_le(ireg_a, ireg_b);                                  \
1090       break;                                                                  \
1091                                                                               \
1092     case 0xE:                                                                 \
1093       /* AL       */                                                          \
1094       break;                                                                  \
1095                                                                               \
1096     case 0xF:                                                                 \
1097       /* Reserved */                                                          \
1098       break;                                                                  \
1099   }                                                                           \
1100   generate_cycle_update()                                                     \
1101
1102 #define generate_conditional_branch_type(ireg_a, ireg_b)                      \
1103   switch(condition_check)                                                     \
1104   {                                                                           \
1105     case CONDITION_TRUE:                                                      \
1106       generate_conditional_branch(ireg_a, ireg_b, true, backpatch_address);   \
1107       break;                                                                  \
1108                                                                               \
1109     case CONDITION_FALSE:                                                     \
1110       generate_conditional_branch(ireg_a, ireg_b, false, backpatch_address);  \
1111       break;                                                                  \
1112                                                                               \
1113     case CONDITION_EQUAL:                                                     \
1114       generate_conditional_branch(ireg_a, ireg_b, equal, backpatch_address);  \
1115       break;                                                                  \
1116                                                                               \
1117     case CONDITION_NOT_EQUAL:                                                 \
1118       generate_conditional_branch(ireg_a, ireg_b, not_equal,                  \
1119        backpatch_address);                                                    \
1120       break;                                                                  \
1121   }                                                                           \
1122
1123 #define generate_branch()                                                     \
1124 {                                                                             \
1125   if(condition == 0x0E)                                                       \
1126   {                                                                           \
1127     generate_branch_cycle_update(                                             \
1128      block_exits[block_exit_position].branch_source,                          \
1129      block_exits[block_exit_position].branch_target);                         \
1130   }                                                                           \
1131   else                                                                        \
1132   {                                                                           \
1133     generate_branch_no_cycle_update(                                          \
1134      block_exits[block_exit_position].branch_source,                          \
1135      block_exits[block_exit_position].branch_target);                         \
1136   }                                                                           \
1137   block_exit_position++;                                                      \
1138 }                                                                             \
1139
1140 #define rm_op_reg rm
1141 #define rm_op_imm imm
1142
1143 #define arm_data_proc_reg_flags()                                             \
1144   arm_decode_data_proc_reg();                                                 \
1145   if(flag_status & 0x02)                                                      \
1146   {                                                                           \
1147     generate_load_rm_sh(flags)                                                \
1148   }                                                                           \
1149   else                                                                        \
1150   {                                                                           \
1151     generate_load_rm_sh(no_flags);                                            \
1152   }                                                                           \
1153
1154 #define arm_data_proc_reg()                                                   \
1155   arm_decode_data_proc_reg();                                                 \
1156   generate_load_rm_sh(no_flags)                                               \
1157
1158 #define arm_data_proc_imm()                                                   \
1159   arm_decode_data_proc_imm();                                                 \
1160   ror(imm, imm, imm_ror);                                                     \
1161   generate_load_imm(a0, imm)                                                  \
1162
1163 #define arm_data_proc_imm_flags()                                             \
1164   arm_decode_data_proc_imm();                                                 \
1165   if((flag_status & 0x02) && (imm_ror != 0))                                  \
1166   {                                                                           \
1167     /* Generate carry flag from integer rotation */                           \
1168     generate_load_imm(a0, ((imm >> (imm_ror - 1)) & 0x01));                   \
1169     generate_store_reg(a0, REG_C_FLAG);                                       \
1170   }                                                                           \
1171   ror(imm, imm, imm_ror);                                                     \
1172   generate_load_imm(a0, imm)                                                  \
1173
1174
1175 #define arm_data_proc(name, type, flags_op)                                   \
1176 {                                                                             \
1177   arm_data_proc_##type();                                                     \
1178   generate_load_reg_pc(a1, rn, 8);                                            \
1179   generate_function_call(execute_##name);                                     \
1180   generate_store_reg_pc_##flags_op(rv, rd);                                   \
1181 }                                                                             \
1182
1183 #define arm_data_proc_test(name, type)                                        \
1184 {                                                                             \
1185   arm_data_proc_##type();                                                     \
1186   generate_load_reg_pc(a1, rn, 8);                                            \
1187   generate_function_call(execute_##name);                                     \
1188 }                                                                             \
1189
1190 #define arm_data_proc_unary(name, type, flags_op)                             \
1191 {                                                                             \
1192   arm_data_proc_##type();                                                     \
1193   generate_function_call(execute_##name);                                     \
1194   generate_store_reg_pc_##flags_op(rv, rd);                                   \
1195 }                                                                             \
1196
1197 #define arm_data_proc_mov(type)                                               \
1198 {                                                                             \
1199   arm_data_proc_##type();                                                     \
1200   generate_store_reg_pc_no_flags(a0, rd);                                     \
1201 }                                                                             \
1202
1203 static void function_cc execute_mul_flags(u32 dest)
1204 {
1205   calculate_z_flag(dest);
1206   calculate_n_flag(dest);
1207 }
1208
1209 #define arm_multiply_flags_yes()                                              \
1210   generate_function_call(execute_mul_flags)                                   \
1211
1212 #define arm_multiply_flags_no(_dest)                                          \
1213
1214 #define arm_multiply_add_no()                                                 \
1215
1216 #define arm_multiply_add_yes()                                                \
1217   generate_load_reg(a1, rn);                                                  \
1218   generate_add(a0, a1)                                                        \
1219
1220 #define arm_multiply(add_op, flags)                                           \
1221 {                                                                             \
1222   arm_decode_multiply();                                                      \
1223   generate_load_reg(a0, rm);                                                  \
1224   generate_load_reg(a1, rs);                                                  \
1225   generate_multiply(a1);                                                      \
1226   arm_multiply_add_##add_op();                                                \
1227   generate_store_reg(a0, rd);                                                 \
1228   arm_multiply_flags_##flags();                                               \
1229 }                                                                             \
1230
1231 static void function_cc execute_mul_long_flags(u32 dest_lo, u32 dest_hi)
1232 {
1233   reg[REG_Z_FLAG] = (dest_lo == 0) & (dest_hi == 0);
1234   calculate_n_flag(dest_hi);
1235 }
1236
1237 #define arm_multiply_long_flags_yes()                                         \
1238   generate_function_call(execute_mul_long_flags)                              \
1239
1240 #define arm_multiply_long_flags_no(_dest)                                     \
1241
1242 #define arm_multiply_long_add_yes(name)                                       \
1243   generate_load_reg(a2, rdlo);                                                \
1244   generate_load_reg(s0, rdhi);                                                \
1245   generate_multiply_##name(a1, a2, s0)                                        \
1246
1247 #define arm_multiply_long_add_no(name)                                        \
1248   generate_multiply_##name(a1)                                                \
1249
1250 #define arm_multiply_long(name, add_op, flags)                                \
1251 {                                                                             \
1252   arm_decode_multiply_long();                                                 \
1253   generate_load_reg(a0, rm);                                                  \
1254   generate_load_reg(a1, rs);                                                  \
1255   arm_multiply_long_add_##add_op(name);                                       \
1256   generate_store_reg(a0, rdlo);                                               \
1257   generate_store_reg(a1, rdhi);                                               \
1258   arm_multiply_long_flags_##flags();                                          \
1259 }                                                                             \
1260
1261 u32 function_cc execute_read_cpsr()
1262 {
1263   collapse_flags();
1264   return reg[REG_CPSR];
1265 }
1266
1267 u32 function_cc execute_read_spsr()
1268 {
1269   collapse_flags();
1270   return spsr[reg[CPU_MODE]];
1271 }
1272
1273 #define arm_psr_read(op_type, psr_reg)                                        \
1274   generate_function_call(execute_read_##psr_reg);                             \
1275   generate_store_reg(rv, rd)                                                  \
1276
1277 // store_mask and address are stored in the SAVE slots, since there's no real
1278 // register space to nicely pass them.
1279
1280 u32 function_cc execute_store_cpsr_body(u32 _cpsr)
1281 {
1282   reg[REG_CPSR] = _cpsr;
1283   if(reg[REG_SAVE] & 0xFF)
1284   {
1285     set_cpu_mode(cpu_modes[_cpsr & 0x1F]);
1286     if((io_registers[REG_IE] & io_registers[REG_IF]) &&
1287      io_registers[REG_IME] && ((_cpsr & 0x80) == 0))
1288     {
1289       reg_mode[MODE_IRQ][6] = reg[REG_SAVE2] + 4;
1290       spsr[MODE_IRQ] = _cpsr;
1291       reg[REG_CPSR] = (_cpsr & 0xFFFFFF00) | 0xD2;
1292       set_cpu_mode(MODE_IRQ);
1293       return 0x00000018;
1294     }
1295   }
1296
1297   return 0;
1298 }
1299
1300
1301 void function_cc execute_store_spsr(u32 new_spsr, u32 store_mask)
1302 {
1303   u32 _spsr = spsr[reg[CPU_MODE]];
1304   spsr[reg[CPU_MODE]] = (new_spsr & store_mask) | (_spsr & (~store_mask));
1305 }
1306
1307 #define arm_psr_load_new_reg()                                                \
1308   generate_load_reg(a0, rm)                                                   \
1309
1310 #define arm_psr_load_new_imm()                                                \
1311   ror(imm, imm, imm_ror);                                                     \
1312   generate_load_imm(a0, imm)                                                  \
1313
1314 #define arm_psr_store(op_type, psr_reg)                                       \
1315   arm_psr_load_new_##op_type();                                               \
1316   generate_load_imm(a1, psr_masks[psr_field]);                                \
1317   generate_load_pc(a2, (pc + 4));                                             \
1318   generate_function_call(execute_store_##psr_reg)                             \
1319
1320 #define arm_psr(op_type, transfer_type, psr_reg)                              \
1321 {                                                                             \
1322   arm_decode_psr_##op_type();                                                 \
1323   arm_psr_##transfer_type(op_type, psr_reg);                                  \
1324 }                                                                             \
1325
1326 #define aligned_address_mask8  0xF0000000
1327 #define aligned_address_mask16 0xF0000001
1328 #define aligned_address_mask32 0xF0000003
1329
1330 #define read_memory(size, type, address, dest)                                \
1331 {                                                                             \
1332   u8 *map;                                                                    \
1333                                                                               \
1334   if(((address >> 24) == 0) && (reg[REG_PC] >= 0x4000))                       \
1335   {                                                                           \
1336     dest = *((type *)((u8 *)&bios_read_protect + (address & 0x03)));          \
1337   }                                                                           \
1338   else                                                                        \
1339                                                                               \
1340   if(((address & aligned_address_mask##size) == 0) &&                         \
1341    (map = memory_map_read[address >> 15]))                                    \
1342   {                                                                           \
1343     dest = *((type *)((u8 *)map + (address & 0x7FFF)));                       \
1344   }                                                                           \
1345   else                                                                        \
1346   {                                                                           \
1347     dest = (type)read_memory##size(address);                                  \
1348   }                                                                           \
1349 }                                                                             \
1350
1351 #define read_memory_s16(address, dest)                                        \
1352 {                                                                             \
1353   u8 *map;                                                                    \
1354                                                                               \
1355   if(((address >> 24) == 0) && (reg[REG_PC] >= 0x4000))                       \
1356   {                                                                           \
1357     dest = *((s16 *)((u8 *)&bios_read_protect + (address & 0x03)));           \
1358   }                                                                           \
1359   else                                                                        \
1360                                                                               \
1361   if(((address & aligned_address_mask16) == 0) &&                             \
1362    (map = memory_map_read[address >> 15]))                                    \
1363   {                                                                           \
1364     dest = *((s16 *)((u8 *)map + (address & 0x7FFF)));                        \
1365   }                                                                           \
1366   else                                                                        \
1367   {                                                                           \
1368     dest = (s16)read_memory16_signed(address);                                \
1369   }                                                                           \
1370 }                                                                             \
1371
1372 #define access_memory_generate_read_function(mem_size, mem_type)              \
1373 u32 function_cc execute_load_##mem_type(u32 address)                          \
1374 {                                                                             \
1375   u32 dest;                                                                   \
1376   read_memory(mem_size, mem_type, address, dest);                             \
1377   return dest;                                                                \
1378 }                                                                             \
1379
1380 access_memory_generate_read_function(8, u8);
1381 access_memory_generate_read_function(8, s8);
1382 access_memory_generate_read_function(16, u16);
1383 access_memory_generate_read_function(32, u32);
1384
1385 u32 function_cc execute_load_s16(u32 address)
1386 {
1387   u32 dest;
1388   read_memory_s16(address, dest);
1389   return dest;
1390 }
1391
1392 #define access_memory_generate_write_function(mem_size, mem_type)             \
1393 void function_cc execute_store_##mem_type(u32 address, u32 source)            \
1394 {                                                                             \
1395   u8 *map;                                                                    \
1396                                                                               \
1397   if(((address & aligned_address_mask##mem_size) == 0) &&                     \
1398    (map = memory_map_write[address >> 15]))                                   \
1399   {                                                                           \
1400     *((mem_type *)((u8 *)map + (address & 0x7FFF))) = source;                 \
1401   }                                                                           \
1402   else                                                                        \
1403   {                                                                           \
1404     write_memory##mem_size(address, source);                                  \
1405   }                                                                           \
1406 }                                                                             \
1407
1408 #define arm_access_memory_load(mem_type)                                      \
1409   cycle_count += 2;                                                           \
1410   generate_function_call(execute_load_##mem_type);                            \
1411   generate_store_reg_pc_no_flags(rv, rd)                                      \
1412
1413 #define arm_access_memory_store(mem_type)                                     \
1414   cycle_count++;                                                              \
1415   generate_load_reg_pc(a1, rd, 12);                                           \
1416   generate_load_pc(a2, (pc + 4));                                             \
1417   generate_function_call(execute_store_##mem_type)                            \
1418
1419 #define no_op                                                                 \
1420
1421 #define arm_access_memory_writeback_yes(off_op)                               \
1422   reg[rn] = address off_op                                                    \
1423
1424 #define arm_access_memory_writeback_no(off_op)                                \
1425
1426 #define load_reg_op reg[rd]                                                   \
1427
1428 #define store_reg_op reg_op                                                   \
1429
1430 #define arm_access_memory_adjust_op_up      add
1431 #define arm_access_memory_adjust_op_down    sub
1432 #define arm_access_memory_reverse_op_up     sub
1433 #define arm_access_memory_reverse_op_down   add
1434
1435 #define arm_access_memory_reg_pre(adjust_dir_op, reverse_dir_op)              \
1436   generate_load_reg_pc(a0, rn, 8);                                            \
1437   generate_##adjust_dir_op(a0, a1)                                            \
1438
1439 #define arm_access_memory_reg_pre_wb(adjust_dir_op, reverse_dir_op)           \
1440   arm_access_memory_reg_pre(adjust_dir_op, reverse_dir_op);                   \
1441   generate_store_reg(a0, rn)                                                  \
1442
1443 #define arm_access_memory_reg_post(adjust_dir_op, reverse_dir_op)             \
1444   generate_load_reg(a0, rn);                                                  \
1445   generate_##adjust_dir_op(a0, a1);                                           \
1446   generate_store_reg(a0, rn);                                                 \
1447   generate_##reverse_dir_op(a0, a1)                                           \
1448
1449 #define arm_access_memory_imm_pre(adjust_dir_op, reverse_dir_op)              \
1450   generate_load_reg_pc(a0, rn, 8);                                            \
1451   generate_##adjust_dir_op##_imm(a0, offset)                                  \
1452
1453 #define arm_access_memory_imm_pre_wb(adjust_dir_op, reverse_dir_op)           \
1454   arm_access_memory_imm_pre(adjust_dir_op, reverse_dir_op);                   \
1455   generate_store_reg(a0, rn)                                                  \
1456
1457 #define arm_access_memory_imm_post(adjust_dir_op, reverse_dir_op)             \
1458   generate_load_reg(a0, rn);                                                  \
1459   generate_##adjust_dir_op##_imm(a0, offset);                                 \
1460   generate_store_reg(a0, rn);                                                 \
1461   generate_##reverse_dir_op##_imm(a0, offset)                                 \
1462
1463
1464 #define arm_data_trans_reg(adjust_op, adjust_dir_op, reverse_dir_op)          \
1465   arm_decode_data_trans_reg();                                                \
1466   generate_load_offset_sh();                                                  \
1467   arm_access_memory_reg_##adjust_op(adjust_dir_op, reverse_dir_op)            \
1468
1469 #define arm_data_trans_imm(adjust_op, adjust_dir_op, reverse_dir_op)          \
1470   arm_decode_data_trans_imm();                                                \
1471   arm_access_memory_imm_##adjust_op(adjust_dir_op, reverse_dir_op)            \
1472
1473 #define arm_data_trans_half_reg(adjust_op, adjust_dir_op, reverse_dir_op)     \
1474   arm_decode_half_trans_r();                                                  \
1475   generate_load_reg(a1, rm);                                                  \
1476   arm_access_memory_reg_##adjust_op(adjust_dir_op, reverse_dir_op)            \
1477
1478 #define arm_data_trans_half_imm(adjust_op, adjust_dir_op, reverse_dir_op)     \
1479   arm_decode_half_trans_of();                                                 \
1480   arm_access_memory_imm_##adjust_op(adjust_dir_op, reverse_dir_op)            \
1481
1482 #define arm_access_memory(access_type, direction, adjust_op, mem_type,        \
1483  offset_type)                                                                 \
1484 {                                                                             \
1485   arm_data_trans_##offset_type(adjust_op,                                     \
1486    arm_access_memory_adjust_op_##direction,                                   \
1487    arm_access_memory_reverse_op_##direction);                                 \
1488                                                                               \
1489   arm_access_memory_##access_type(mem_type);                                  \
1490 }                                                                             \
1491
1492 #define word_bit_count(word)                                                  \
1493   (bit_count[word >> 8] + bit_count[word & 0xFF])                             \
1494
1495 #define sprint_no(access_type, pre_op, post_op, wb)                           \
1496
1497 #define sprint_yes(access_type, pre_op, post_op, wb)                          \
1498   printf("sbit on %s %s %s %s\n", #access_type, #pre_op, #post_op, #wb)       \
1499
1500 u32 function_cc execute_aligned_load32(u32 address)
1501 {
1502   u8 *map;
1503   if(!(address & 0xF0000000) && (map = memory_map_read[address >> 15]))
1504     return address32(map, address & 0x7FFF);
1505   else
1506     return read_memory32(address);
1507 }
1508
1509 void function_cc execute_aligned_store32(u32 address, u32 source)
1510 {
1511   u8 *map;
1512
1513   if(!(address & 0xF0000000) && (map = memory_map_write[address >> 15]))
1514     address32(map, address & 0x7FFF) = source;
1515   else
1516     write_memory32(address, source);
1517 }
1518
1519 #define arm_block_memory_load()                                               \
1520   generate_function_call(execute_aligned_load32);                             \
1521   generate_store_reg(rv, i)                                                   \
1522
1523 #define arm_block_memory_store()                                              \
1524   generate_load_reg_pc(a1, i, 8);                                             \
1525   generate_function_call(execute_aligned_store32)                             \
1526
1527 #define arm_block_memory_final_load()                                         \
1528   arm_block_memory_load()                                                     \
1529
1530 #define arm_block_memory_final_store()                                        \
1531   generate_load_reg_pc(a1, i, 12);                                            \
1532   generate_load_pc(a2, (pc + 4));                                             \
1533   generate_function_call(execute_store_u32)                                   \
1534
1535 #define arm_block_memory_adjust_pc_store()                                    \
1536
1537 #define arm_block_memory_adjust_pc_load()                                     \
1538   if(reg_list & 0x8000)                                                       \
1539   {                                                                           \
1540     generate_mov(a0, rv);                                                     \
1541     generate_indirect_branch_arm();                                           \
1542   }                                                                           \
1543
1544 #define arm_block_memory_offset_down_a()                                      \
1545   generate_add_imm(s0, -((word_bit_count(reg_list) * 4) - 4))                 \
1546
1547 #define arm_block_memory_offset_down_b()                                      \
1548   generate_add_imm(s0, -(word_bit_count(reg_list) * 4))                       \
1549
1550 #define arm_block_memory_offset_no()                                          \
1551
1552 #define arm_block_memory_offset_up()                                          \
1553   generate_add_imm(s0, 4)                                                     \
1554
1555 #define arm_block_memory_writeback_down()                                     \
1556   generate_load_reg(a0, rn)                                                   \
1557   generate_add_imm(a0, -(word_bit_count(reg_list) * 4));                      \
1558   generate_store_reg(a0, rn)                                                  \
1559
1560 #define arm_block_memory_writeback_up()                                       \
1561   generate_load_reg(a0, rn);                                                  \
1562   generate_add_imm(a0, (word_bit_count(reg_list) * 4));                       \
1563   generate_store_reg(a0, rn)                                                  \
1564
1565 #define arm_block_memory_writeback_no()
1566
1567 // Only emit writeback if the register is not in the list
1568
1569 #define arm_block_memory_writeback_load(writeback_type)                       \
1570   if(!((reg_list >> rn) & 0x01))                                              \
1571   {                                                                           \
1572     arm_block_memory_writeback_##writeback_type();                            \
1573   }                                                                           \
1574
1575 #define arm_block_memory_writeback_store(writeback_type)                      \
1576   arm_block_memory_writeback_##writeback_type()                               \
1577
1578 #define arm_block_memory(access_type, offset_type, writeback_type, s_bit)     \
1579 {                                                                             \
1580   arm_decode_block_trans();                                                   \
1581   u32 offset = 0;                                                             \
1582   u32 i;                                                                      \
1583                                                                               \
1584   generate_load_reg(s0, rn);                                                  \
1585   arm_block_memory_offset_##offset_type();                                    \
1586   arm_block_memory_writeback_##access_type(writeback_type);                   \
1587   generate_and_imm(s0, ~0x03);                                                \
1588                                                                               \
1589   for(i = 0; i < 16; i++)                                                     \
1590   {                                                                           \
1591     if((reg_list >> i) & 0x01)                                                \
1592     {                                                                         \
1593       cycle_count++;                                                          \
1594       generate_add_reg_reg_imm(a0, s0, offset)                                \
1595       if(reg_list & ~((2 << i) - 1))                                          \
1596       {                                                                       \
1597         arm_block_memory_##access_type();                                     \
1598         offset += 4;                                                          \
1599       }                                                                       \
1600       else                                                                    \
1601       {                                                                       \
1602         arm_block_memory_final_##access_type();                               \
1603       }                                                                       \
1604     }                                                                         \
1605   }                                                                           \
1606                                                                               \
1607   arm_block_memory_adjust_pc_##access_type();                                 \
1608 }                                                                             \
1609
1610 #define arm_swap(type)                                                        \
1611 {                                                                             \
1612   arm_decode_swap();                                                          \
1613   cycle_count += 3;                                                           \
1614   generate_load_reg(a0, rn);                                                  \
1615   generate_function_call(execute_load_##type);                                \
1616   generate_mov(s0, rv);                                                       \
1617   generate_load_reg(a0, rn);                                                  \
1618   generate_load_reg(a1, rm);                                                  \
1619   generate_function_call(execute_store_##type);                               \
1620   generate_store_reg(s0, rd);                                                 \
1621 }                                                                             \
1622
1623 #define thumb_rn_op_reg(_rn)                                                  \
1624   generate_load_reg(a0, _rn)                                                  \
1625
1626 #define thumb_rn_op_imm(_imm)                                                 \
1627   generate_load_imm(a0, _imm)                                                 \
1628
1629 // Types: add_sub, add_sub_imm, alu_op, imm
1630 // Affects N/Z/C/V flags
1631
1632 #define thumb_data_proc(type, name, rn_type, _rd, _rs, _rn)                   \
1633 {                                                                             \
1634   thumb_decode_##type();                                                      \
1635   thumb_rn_op_##rn_type(_rn);                                                 \
1636   generate_load_reg(a1, _rs);                                                 \
1637   generate_function_call(execute_##name);                                     \
1638   generate_store_reg(rv, _rd);                                                \
1639 }                                                                             \
1640
1641 #define thumb_data_proc_test(type, name, rn_type, _rs, _rn)                   \
1642 {                                                                             \
1643   thumb_decode_##type();                                                      \
1644   thumb_rn_op_##rn_type(_rn);                                                 \
1645   generate_load_reg(a1, _rs);                                                 \
1646   generate_function_call(execute_##name);                                     \
1647 }                                                                             \
1648
1649 #define thumb_data_proc_unary(type, name, rn_type, _rd, _rn)                  \
1650 {                                                                             \
1651   thumb_decode_##type();                                                      \
1652   thumb_rn_op_##rn_type(_rn);                                                 \
1653   generate_function_call(execute_##name);                                     \
1654   generate_store_reg(rv, _rd);                                                \
1655 }                                                                             \
1656
1657 #define thumb_data_proc_mov(type, rn_type, _rd, _rn)                          \
1658 {                                                                             \
1659   thumb_decode_##type();                                                      \
1660   thumb_rn_op_##rn_type(_rn);                                                 \
1661   generate_store_reg(a0, _rd);                                                \
1662 }                                                                             \
1663
1664 #define generate_store_reg_pc_thumb(ireg)                                     \
1665   generate_store_reg(ireg, rd);                                               \
1666   if(rd == 15)                                                                \
1667   {                                                                           \
1668     generate_indirect_branch_cycle_update(thumb);                             \
1669   }                                                                           \
1670
1671 #define thumb_data_proc_hi(name)                                              \
1672 {                                                                             \
1673   thumb_decode_hireg_op();                                                    \
1674   generate_load_reg_pc(a0, rs, 4);                                            \
1675   generate_load_reg_pc(a1, rd, 4);                                            \
1676   generate_function_call(execute_##name);                                     \
1677   generate_store_reg_pc_thumb(rv);                                            \
1678 }                                                                             \
1679
1680 #define thumb_data_proc_test_hi(name)                                         \
1681 {                                                                             \
1682   thumb_decode_hireg_op();                                                    \
1683   generate_load_reg_pc(a0, rs, 4);                                            \
1684   generate_load_reg_pc(a1, rd, 4);                                            \
1685   generate_function_call(execute_##name);                                     \
1686 }                                                                             \
1687
1688 #define thumb_data_proc_unary_hi(name)                                        \
1689 {                                                                             \
1690   thumb_decode_hireg_op();                                                    \
1691   generate_load_reg_pc(a0, rn, 4);                                            \
1692   generate_function_call(execute_##name);                                     \
1693   generate_store_reg_pc_thumb(rv);                                            \
1694 }                                                                             \
1695
1696 #define thumb_data_proc_mov_hi()                                              \
1697 {                                                                             \
1698   thumb_decode_hireg_op();                                                    \
1699   generate_load_reg_pc(a0, rs, 4);                                            \
1700   generate_store_reg_pc_thumb(a0);                                            \
1701 }                                                                             \
1702
1703 #define thumb_load_pc(_rd)                                                    \
1704 {                                                                             \
1705   thumb_decode_imm();                                                         \
1706   generate_load_pc(a0, (((pc & ~2) + 4) + (imm * 4)));                        \
1707   generate_store_reg(a0, _rd);                                                \
1708 }                                                                             \
1709
1710 #define thumb_load_sp(_rd)                                                    \
1711 {                                                                             \
1712   thumb_decode_imm();                                                         \
1713   generate_load_reg(a0, 13);                                                  \
1714   generate_add_imm(a0, (imm * 4));                                            \
1715   generate_store_reg(a0, _rd);                                                \
1716 }                                                                             \
1717
1718 #define thumb_adjust_sp_up()                                                  \
1719   generate_add_imm(a0, imm * 4)                                               \
1720
1721 #define thumb_adjust_sp_down()                                                \
1722   generate_sub_imm(a0, imm * 4)                                               \
1723
1724
1725 #define thumb_adjust_sp(direction)                                            \
1726 {                                                                             \
1727   thumb_decode_add_sp();                                                      \
1728   generate_load_reg(a0, REG_SP);                                              \
1729   thumb_adjust_sp_##direction();                                              \
1730   generate_store_reg(a0, REG_SP);                                             \
1731 }                                                                             \
1732
1733 // Decode types: shift, alu_op
1734 // Operation types: lsl, lsr, asr, ror
1735 // Affects N/Z/C flags
1736
1737 u32 function_cc execute_lsl_reg_op(u32 value, u32 shift)
1738 {
1739   if(shift != 0)
1740   {
1741     if(shift > 31)
1742     {
1743       if(shift == 32)
1744         reg[REG_C_FLAG] = value & 0x01;
1745       else
1746         reg[REG_C_FLAG] = 0;
1747
1748       value = 0;
1749     }
1750     else
1751     {
1752       reg[REG_C_FLAG] = (value >> (32 - shift)) & 0x01;
1753       value <<= shift;
1754     }
1755   }
1756
1757   calculate_flags_logic(value);
1758   return value;
1759 }
1760
1761 u32 function_cc execute_lsr_reg_op(u32 value, u32 shift)
1762 {
1763   if(shift != 0)
1764   {
1765     if(shift > 31)
1766     {
1767       if(shift == 32)
1768         reg[REG_C_FLAG] = (value >> 31) & 0x01;
1769       else
1770         reg[REG_C_FLAG] = 0;
1771
1772       value = 0;
1773     }
1774     else
1775     {
1776       reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
1777       value >>= shift;
1778     }
1779   }
1780
1781   calculate_flags_logic(value);
1782   return value;
1783 }
1784
1785 u32 function_cc execute_asr_reg_op(u32 value, u32 shift)
1786 {
1787   if(shift != 0)
1788   {
1789     if(shift > 31)
1790     {
1791       value = (s32)value >> 31;
1792       reg[REG_C_FLAG] = value & 0x01;
1793     }
1794     else
1795     {
1796       reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
1797       value = (s32)value >> shift;
1798     }
1799   }
1800
1801   calculate_flags_logic(value);
1802   return value;
1803 }
1804
1805 u32 function_cc execute_ror_reg_op(u32 value, u32 shift)
1806 {
1807   if(shift != 0)
1808   {
1809     reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
1810     ror(value, value, shift);
1811   }
1812
1813   calculate_flags_logic(value);
1814   return value;
1815 }
1816
1817 u32 function_cc execute_lsl_imm_op(u32 value, u32 shift)
1818 {
1819   if(shift != 0)
1820   {
1821     reg[REG_C_FLAG] = (value >> (32 - shift)) & 0x01;
1822     value <<= shift;
1823   }
1824
1825   calculate_flags_logic(value);
1826   return value;
1827 }
1828
1829 u32 function_cc execute_lsr_imm_op(u32 value, u32 shift)
1830 {
1831   if(shift != 0)
1832   {
1833     reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
1834     value >>= shift;
1835   }
1836   else
1837   {
1838     reg[REG_C_FLAG] = value >> 31;
1839     value = 0;
1840   }
1841
1842   calculate_flags_logic(value);
1843   return value;
1844 }
1845
1846 u32 function_cc execute_asr_imm_op(u32 value, u32 shift)
1847 {
1848   if(shift != 0)
1849   {
1850     reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
1851     value = (s32)value >> shift;
1852   }
1853   else
1854   {
1855     value = (s32)value >> 31;
1856     reg[REG_C_FLAG] = value & 0x01;
1857   }
1858
1859   calculate_flags_logic(value);
1860   return value;
1861 }
1862
1863 u32 function_cc execute_ror_imm_op(u32 value, u32 shift)
1864 {
1865   if(shift != 0)
1866   {
1867     reg[REG_C_FLAG] = (value >> (shift - 1)) & 0x01;
1868     ror(value, value, shift);
1869   }
1870   else
1871   {
1872     u32 c_flag = reg[REG_C_FLAG];
1873     reg[REG_C_FLAG] = value & 0x01;
1874     value = (value >> 1) | (c_flag << 31);
1875   }
1876
1877   calculate_flags_logic(value);
1878   return value;
1879 }
1880
1881 #define generate_shift_load_operands_reg()                                    \
1882   generate_load_reg(a0, rd);                                                  \
1883   generate_load_reg(a1, rs)                                                   \
1884
1885 #define generate_shift_load_operands_imm()                                    \
1886   generate_load_reg(a0, rs);                                                  \
1887   generate_load_imm(a1, imm)                                                  \
1888
1889 #define thumb_shift(decode_type, op_type, value_type)                         \
1890 {                                                                             \
1891   thumb_decode_##decode_type();                                               \
1892   generate_shift_load_operands_##value_type();                                \
1893   generate_function_call(execute_##op_type##_##value_type##_op);              \
1894   generate_store_reg(rv, rd);                                                 \
1895 }                                                                             \
1896
1897 // Operation types: imm, mem_reg, mem_imm
1898
1899 #define thumb_access_memory_load(mem_type, reg_rd)                            \
1900   cycle_count += 2;                                                           \
1901   generate_function_call(execute_load_##mem_type);                            \
1902   generate_store_reg(rv, reg_rd)                                              \
1903
1904 #define thumb_access_memory_store(mem_type, reg_rd)                           \
1905   cycle_count++;                                                              \
1906   generate_load_reg(a1, reg_rd);                                              \
1907   generate_load_pc(a2, (pc + 2));                                             \
1908   generate_function_call(execute_store_##mem_type)                            \
1909
1910 #define thumb_access_memory_generate_address_pc_relative(offset, _rb, _ro)    \
1911   generate_load_pc(a0, (offset))                                              \
1912
1913 #define thumb_access_memory_generate_address_reg_imm_sp(offset, _rb, _ro)     \
1914   generate_load_reg(a0, _rb);                                                 \
1915   generate_add_imm(a0, (offset * 4))                                          \
1916
1917 #define thumb_access_memory_generate_address_reg_imm(offset, _rb, _ro)        \
1918   generate_load_reg(a0, _rb);                                                 \
1919   generate_add_imm(a0, (offset))                                              \
1920
1921 #define thumb_access_memory_generate_address_reg_reg(offset, _rb, _ro)        \
1922   generate_load_reg(a0, _rb);                                                 \
1923   generate_load_reg(a1, _ro);                                                 \
1924   generate_add(a0, a1)                                                        \
1925
1926 #define thumb_access_memory(access_type, op_type, _rd, _rb, _ro,              \
1927  address_type, offset, mem_type)                                              \
1928 {                                                                             \
1929   thumb_decode_##op_type();                                                   \
1930   thumb_access_memory_generate_address_##address_type(offset, _rb, _ro);      \
1931   thumb_access_memory_##access_type(mem_type, _rd);                           \
1932 }                                                                             \
1933
1934 #define thumb_block_address_preadjust_up()                                    \
1935   generate_add_imm(s0, (bit_count[reg_list] * 4))                             \
1936
1937 #define thumb_block_address_preadjust_down()                                  \
1938   generate_sub_imm(s0, (bit_count[reg_list] * 4))                             \
1939
1940 #define thumb_block_address_preadjust_push_lr()                               \
1941   generate_sub_imm(s0, ((bit_count[reg_list] + 1) * 4))                       \
1942
1943 #define thumb_block_address_preadjust_no()                                    \
1944
1945 #define thumb_block_address_postadjust_no(base_reg)                           \
1946   generate_store_reg(s0, base_reg)                                            \
1947
1948 #define thumb_block_address_postadjust_up(base_reg)                           \
1949   generate_add_reg_reg_imm(a0, s0, (bit_count[reg_list] * 4));                \
1950   generate_store_reg(a0, base_reg)                                            \
1951
1952 #define thumb_block_address_postadjust_down(base_reg)                         \
1953   generate_mov(a0, s0);                                                       \
1954   generate_sub_imm(a0, (bit_count[reg_list] * 4));                            \
1955   generate_store_reg(a0, base_reg)                                            \
1956
1957 #define thumb_block_address_postadjust_pop_pc(base_reg)                       \
1958   generate_add_reg_reg_imm(a0, s0, ((bit_count[reg_list] + 1) * 4));          \
1959   generate_store_reg(a0, base_reg)                                            \
1960
1961 #define thumb_block_address_postadjust_push_lr(base_reg)                      \
1962   generate_store_reg(s0, base_reg)                                            \
1963
1964 #define thumb_block_memory_extra_no()                                         \
1965
1966 #define thumb_block_memory_extra_up()                                         \
1967
1968 #define thumb_block_memory_extra_down()                                       \
1969
1970 #define thumb_block_memory_extra_pop_pc()                                     \
1971   generate_add_reg_reg_imm(a0, s0, (bit_count[reg_list] * 4));                \
1972   generate_function_call(execute_aligned_load32);                             \
1973   generate_store_reg(rv, REG_PC);                                             \
1974   generate_mov(a0, rv);                                                       \
1975   generate_indirect_branch_cycle_update(thumb)                                \
1976
1977 #define thumb_block_memory_extra_push_lr(base_reg)                            \
1978   generate_add_reg_reg_imm(a0, s0, (bit_count[reg_list] * 4));                \
1979   generate_load_reg(a1, REG_LR);                                              \
1980   generate_function_call(execute_aligned_store32)                             \
1981
1982 #define thumb_block_memory_load()                                             \
1983   generate_function_call(execute_aligned_load32);                             \
1984   generate_store_reg(rv, i)                                                   \
1985
1986 #define thumb_block_memory_store()                                            \
1987   generate_load_reg(a1, i);                                                   \
1988   generate_function_call(execute_aligned_store32)                             \
1989
1990 #define thumb_block_memory_final_load()                                       \
1991   thumb_block_memory_load()                                                   \
1992
1993 #define thumb_block_memory_final_store()                                      \
1994   generate_load_reg(a1, i);                                                   \
1995   generate_load_pc(a2, (pc + 2));                                             \
1996   generate_function_call(execute_store_u32)                                   \
1997
1998 #define thumb_block_memory_final_no(access_type)                              \
1999   thumb_block_memory_final_##access_type()                                    \
2000
2001 #define thumb_block_memory_final_up(access_type)                              \
2002   thumb_block_memory_final_##access_type()                                    \
2003
2004 #define thumb_block_memory_final_down(access_type)                            \
2005   thumb_block_memory_final_##access_type()                                    \
2006
2007 #define thumb_block_memory_final_push_lr(access_type)                         \
2008   thumb_block_memory_##access_type()                                          \
2009
2010 #define thumb_block_memory_final_pop_pc(access_type)                          \
2011   thumb_block_memory_##access_type()                                          \
2012
2013 #define thumb_block_memory(access_type, pre_op, post_op, base_reg)            \
2014 {                                                                             \
2015   thumb_decode_rlist();                                                       \
2016   u32 i;                                                                      \
2017   u32 offset = 0;                                                             \
2018                                                                               \
2019   generate_load_reg(s0, base_reg);                                            \
2020   generate_and_imm(s0, ~0x03);                                                \
2021   thumb_block_address_preadjust_##pre_op();                                   \
2022   thumb_block_address_postadjust_##post_op(base_reg);                         \
2023                                                                               \
2024   for(i = 0; i < 8; i++)                                                      \
2025   {                                                                           \
2026     if((reg_list >> i) & 0x01)                                                \
2027     {                                                                         \
2028       cycle_count++;                                                          \
2029       generate_add_reg_reg_imm(a0, s0, offset)                                \
2030       if(reg_list & ~((2 << i) - 1))                                          \
2031       {                                                                       \
2032         thumb_block_memory_##access_type();                                   \
2033         offset += 4;                                                          \
2034       }                                                                       \
2035       else                                                                    \
2036       {                                                                       \
2037         thumb_block_memory_final_##post_op(access_type);                      \
2038       }                                                                       \
2039     }                                                                         \
2040   }                                                                           \
2041                                                                               \
2042   thumb_block_memory_extra_##post_op();                                       \
2043 }                                                                             \
2044
2045
2046 #define thumb_conditional_branch(condition)                                   \
2047 {                                                                             \
2048   condition_check_type condition_check = CONDITION_TRUE;                      \
2049   generate_cycle_update();                                                    \
2050   generate_condition_##condition(a0, a1);                                     \
2051   generate_conditional_branch_type(a0, a1);                                   \
2052   generate_branch_no_cycle_update(                                            \
2053    block_exits[block_exit_position].branch_source,                            \
2054    block_exits[block_exit_position].branch_target);                           \
2055   generate_branch_patch_conditional(backpatch_address, translation_ptr);      \
2056   block_exit_position++;                                                      \
2057 }                                                                             \
2058
2059 #define flags_vars(src_a, src_b)                                              \
2060   u32 dest;                                                                   \
2061   const u32 _sa = src_a;                                                      \
2062   const u32 _sb = src_b                                                       \
2063
2064 #define data_proc_generate_logic_function(name, expr)                         \
2065 u32 function_cc execute_##name(u32 rm, u32 rn)                                \
2066 {                                                                             \
2067   return expr;                                                                \
2068 }                                                                             \
2069                                                                               \
2070 u32 function_cc execute_##name##s(u32 rm, u32 rn)                             \
2071 {                                                                             \
2072   u32 dest = expr;                                                            \
2073   calculate_z_flag(dest);                                                     \
2074   calculate_n_flag(dest);                                                     \
2075   return expr;                                                                \
2076 }                                                                             \
2077
2078 #define data_proc_generate_logic_unary_function(name, expr)                   \
2079 u32 function_cc execute_##name(u32 rm)                                        \
2080 {                                                                             \
2081   return expr;                                                                \
2082 }                                                                             \
2083                                                                               \
2084 u32 function_cc execute_##name##s(u32 rm)                                     \
2085 {                                                                             \
2086   u32 dest = expr;                                                            \
2087   calculate_z_flag(dest);                                                     \
2088   calculate_n_flag(dest);                                                     \
2089   return expr;                                                                \
2090 }                                                                             \
2091
2092
2093 #define data_proc_generate_sub_function(name, src_a, src_b)                   \
2094 u32 function_cc execute_##name(u32 rm, u32 rn)                                \
2095 {                                                                             \
2096   return (src_a) - (src_b);                                                   \
2097 }                                                                             \
2098                                                                               \
2099 u32 function_cc execute_##name##s(u32 rm, u32 rn)                             \
2100 {                                                                             \
2101   flags_vars(src_a, src_b);                                                   \
2102   dest = _sa - _sb;                                                           \
2103   calculate_flags_sub(dest, _sa, _sb);                                        \
2104   return dest;                                                                \
2105 }                                                                             \
2106
2107 #define data_proc_generate_add_function(name, src_a, src_b)                   \
2108 u32 function_cc execute_##name(u32 rm, u32 rn)                                \
2109 {                                                                             \
2110   return (src_a) + (src_b);                                                   \
2111 }                                                                             \
2112                                                                               \
2113 u32 function_cc execute_##name##s(u32 rm, u32 rn)                             \
2114 {                                                                             \
2115   flags_vars(src_a, src_b);                                                   \
2116   dest = _sa + _sb;                                                           \
2117   calculate_flags_add(dest, _sa, _sb);                                        \
2118   return dest;                                                                \
2119 }                                                                             \
2120
2121 #define data_proc_generate_sub_test_function(name, src_a, src_b)              \
2122 void function_cc execute_##name(u32 rm, u32 rn)                               \
2123 {                                                                             \
2124   flags_vars(src_a, src_b);                                                   \
2125   dest = _sa - _sb;                                                           \
2126   calculate_flags_sub(dest, _sa, _sb);                                        \
2127 }                                                                             \
2128
2129 #define data_proc_generate_add_test_function(name, src_a, src_b)              \
2130 void function_cc execute_##name(u32 rm, u32 rn)                               \
2131 {                                                                             \
2132   flags_vars(src_a, src_b);                                                   \
2133   dest = _sa + _sb;                                                           \
2134   calculate_flags_add(dest, _sa, _sb);                                        \
2135 }                                                                             \
2136
2137 #define data_proc_generate_logic_test_function(name, expr)                    \
2138 void function_cc execute_##name(u32 rm, u32 rn)                               \
2139 {                                                                             \
2140   u32 dest = expr;                                                            \
2141   calculate_z_flag(dest);                                                     \
2142   calculate_n_flag(dest);                                                     \
2143 }                                                                             \
2144
2145 u32 function_cc execute_neg(u32 rm)                                           \
2146 {                                                                             \
2147   u32 dest = 0 - rm;                                                          \
2148   calculate_flags_sub(dest, 0, rm);                                           \
2149   return dest;                                                                \
2150 }                                                                             \
2151
2152 // Execute functions
2153
2154 data_proc_generate_logic_function(and, rn & rm);
2155 data_proc_generate_logic_function(eor, rn ^ rm);
2156 data_proc_generate_logic_function(orr, rn | rm);
2157 data_proc_generate_logic_function(bic, rn & (~rm));
2158 data_proc_generate_logic_function(mul, rn * rm);
2159 data_proc_generate_logic_unary_function(mov, rm);
2160 data_proc_generate_logic_unary_function(mvn, ~rm);
2161
2162 data_proc_generate_sub_function(sub, rn, rm);
2163 data_proc_generate_sub_function(rsb, rm, rn);
2164 data_proc_generate_sub_function(sbc, rn, (rm + (reg[REG_C_FLAG] ^ 1)));
2165 data_proc_generate_sub_function(rsc, (rm + reg[REG_C_FLAG] - 1), rn);
2166 data_proc_generate_add_function(add, rn, rm);
2167 data_proc_generate_add_function(adc, rn, rm + reg[REG_C_FLAG]);
2168
2169 data_proc_generate_logic_test_function(tst, rn & rm);
2170 data_proc_generate_logic_test_function(teq, rn ^ rm);
2171 data_proc_generate_sub_test_function(cmp, rn, rm);
2172 data_proc_generate_add_test_function(cmn, rn, rm);
2173
2174 static void function_cc execute_swi(u32 pc)
2175 {
2176   reg_mode[MODE_SUPERVISOR][6] = pc;
2177   collapse_flags();
2178   spsr[MODE_SUPERVISOR] = reg[REG_CPSR];
2179   reg[REG_CPSR] = (reg[REG_CPSR] & ~0x3F) | 0x13;
2180   set_cpu_mode(MODE_SUPERVISOR);
2181 }
2182
2183 #define arm_conditional_block_header()                                        \
2184 {                                                                             \
2185   condition_check_type condition_check = CONDITION_TRUE;                      \
2186   generate_condition(a0, a1);                                                 \
2187   generate_conditional_branch_type(a0, a1);                                   \
2188 }
2189
2190 #define arm_b()                                                               \
2191   generate_branch()                                                           \
2192
2193 #define arm_bl()                                                              \
2194   generate_update_pc((pc + 4));                                               \
2195   generate_store_reg(a0, REG_LR);                                             \
2196   generate_branch()                                                           \
2197
2198 #define arm_bx()                                                              \
2199   arm_decode_branchx();                                                       \
2200   generate_load_reg(a0, rn);                                                  \
2201   generate_indirect_branch_dual();                                            \
2202
2203 #define arm_swi()                                                             \
2204   generate_swi_hle_handler((opcode >> 16) & 0xFF);                            \
2205   generate_update_pc((pc + 4));                                               \
2206   generate_function_call(execute_swi);                                        \
2207   generate_branch()                                                           \
2208
2209 #define thumb_b()                                                             \
2210   generate_branch_cycle_update(                                               \
2211    block_exits[block_exit_position].branch_source,                            \
2212    block_exits[block_exit_position].branch_target);                           \
2213   block_exit_position++                                                       \
2214
2215 #define thumb_bl()                                                            \
2216   generate_update_pc(((pc + 2) | 0x01));                                      \
2217   generate_store_reg(a0, REG_LR);                                             \
2218   generate_branch_cycle_update(                                               \
2219    block_exits[block_exit_position].branch_source,                            \
2220    block_exits[block_exit_position].branch_target);                           \
2221   block_exit_position++                                                       \
2222
2223 #define thumb_blh()                                                           \
2224 {                                                                             \
2225   thumb_decode_branch();                                                      \
2226   generate_update_pc(((pc + 2) | 0x01));                                      \
2227   generate_load_reg(a1, REG_LR);                                              \
2228   generate_store_reg(a0, REG_LR);                                             \
2229   generate_mov(a0, a1);                                                       \
2230   generate_add_imm(a0, (offset * 2));                                         \
2231   generate_indirect_branch_cycle_update(thumb);                               \
2232 }                                                                             \
2233
2234 #define thumb_bx()                                                            \
2235 {                                                                             \
2236   thumb_decode_hireg_op();                                                    \
2237   generate_load_reg_pc(a0, rs, 4);                                            \
2238   generate_indirect_branch_cycle_update(dual);                                \
2239 }                                                                             \
2240
2241 #define thumb_swi()                                                           \
2242   generate_swi_hle_handler(opcode & 0xFF);                                    \
2243   generate_update_pc((pc + 2));                                               \
2244   generate_function_call(execute_swi);                                        \
2245   generate_branch_cycle_update(                                               \
2246    block_exits[block_exit_position].branch_source,                            \
2247    block_exits[block_exit_position].branch_target);                           \
2248   block_exit_position++                                                       \
2249
2250 u8 swi_hle_handle[256] =
2251 {
2252   0x0,    // SWI 0:  SoftReset
2253   0x0,    // SWI 1:  RegisterRAMReset
2254   0x0,    // SWI 2:  Halt
2255   0x0,    // SWI 3:  Stop/Sleep
2256   0x0,    // SWI 4:  IntrWait
2257   0x0,    // SWI 5:  VBlankIntrWait
2258   0x1,    // SWI 6:  Div
2259   0x0,    // SWI 7:  DivArm
2260   0x0,    // SWI 8:  Sqrt
2261   0x0,    // SWI 9:  ArcTan
2262   0x0,    // SWI A:  ArcTan2
2263   0x0,    // SWI B:  CpuSet
2264   0x0,    // SWI C:  CpuFastSet
2265   0x0,    // SWI D:  GetBIOSCheckSum
2266   0x0,    // SWI E:  BgAffineSet
2267   0x0,    // SWI F:  ObjAffineSet
2268   0x0,    // SWI 10: BitUnpack
2269   0x0,    // SWI 11: LZ77UnCompWram
2270   0x0,    // SWI 12: LZ77UnCompVram
2271   0x0,    // SWI 13: HuffUnComp
2272   0x0,    // SWI 14: RLUnCompWram
2273   0x0,    // SWI 15: RLUnCompVram
2274   0x0,    // SWI 16: Diff8bitUnFilterWram
2275   0x0,    // SWI 17: Diff8bitUnFilterVram
2276   0x0,    // SWI 18: Diff16bitUnFilter
2277   0x0,    // SWI 19: SoundBias
2278   0x0,    // SWI 1A: SoundDriverInit
2279   0x0,    // SWI 1B: SoundDriverMode
2280   0x0,    // SWI 1C: SoundDriverMain
2281   0x0,    // SWI 1D: SoundDriverVSync
2282   0x0,    // SWI 1E: SoundChannelClear
2283   0x0,    // SWI 1F: MidiKey2Freq
2284   0x0,    // SWI 20: SoundWhatever0
2285   0x0,    // SWI 21: SoundWhatever1
2286   0x0,    // SWI 22: SoundWhatever2
2287   0x0,    // SWI 23: SoundWhatever3
2288   0x0,    // SWI 24: SoundWhatever4
2289   0x0,    // SWI 25: MultiBoot
2290   0x0,    // SWI 26: HardReset
2291   0x0,    // SWI 27: CustomHalt
2292   0x0,    // SWI 28: SoundDriverVSyncOff
2293   0x0,    // SWI 29: SoundDriverVSyncOn
2294   0x0     // SWI 2A: SoundGetJumpList
2295 };
2296
2297 void function_cc swi_hle_div()
2298 {
2299   s32 result = (s32)reg[0] / (s32)reg[1];
2300   reg[1] = (s32)reg[0] % (s32)reg[1];
2301   reg[0] = result;
2302   reg[3] = (result ^ (result >> 31)) - (result >> 31);
2303 }
2304
2305 #define generate_swi_hle_handler(_swi_number)                                 \
2306 {                                                                             \
2307   u32 swi_number = _swi_number;                                               \
2308   if(swi_hle_handle[swi_number])                                              \
2309   {                                                                           \
2310     /* Div */                                                                 \
2311     if(swi_number == 0x06)                                                    \
2312     {                                                                         \
2313       generate_function_call(swi_hle_div);                                    \
2314     }                                                                         \
2315     break;                                                                    \
2316   }                                                                           \
2317 }                                                                             \
2318
2319 #define generate_translation_gate(type)                                       \
2320   generate_update_pc(pc);                                                     \
2321   generate_indirect_branch_no_cycle_update(type)                              \
2322
2323 #define generate_step_debug()                                                 \
2324   generate_load_imm(a0, pc);                                                  \
2325   generate_function_call(step_debug_x86)                                      \
2326
2327 #endif