+#define emith_move_r_imm_s8(r, imm) { \
+ if ((imm) & 0x80) \
+ EOP_MVN_IMM(r, 0, ((imm) ^ 0xff)); \
+ else \
+ EOP_MOV_IMM(r, 0, imm); \
+}
+
+#define emith_and_r_r_imm(d, s, imm) \
+ emith_op_imm2(A_COND_AL, 0, A_OP_AND, d, s, imm)
+
+#define emith_add_r_r_imm(d, s, imm) \
+ emith_op_imm2(A_COND_AL, 0, A_OP_ADD, d, s, imm)
+
+#define emith_add_r_r_ptr_imm(d, s, imm) \
+ emith_add_r_r_imm(d, s, imm)
+
+#define emith_sub_r_r_imm(d, s, imm) \
+ emith_op_imm2(A_COND_AL, 0, A_OP_SUB, d, s, imm)
+
+#define emith_neg_r_r(d, s) \
+ EOP_RSB_IMM(d, s, 0, 0)
+