+static int scan_for_reg_clear(int i, int reg)
+{
+ while (i >= 0) {
+ if (g_labels[i][0] != 0) {
+ if (g_label_refs[i].next != NULL)
+ return -1;
+ if (i > 0 && LAST_OP(i - 1)) {
+ i = g_label_refs[i].i;
+ continue;
+ }
+ return -1;
+ }
+ i--;
+
+ if (ops[i].op == OP_XOR
+ && ops[i].operand[0].lmod == OPLM_DWORD
+ && ops[i].operand[0].reg == ops[i].operand[1].reg
+ && ops[i].operand[0].reg == reg)
+ return i;
+
+ if (ops[i].regmask_dst & (1 << reg))
+ return -1;
+ if (g_labels[i][0] != 0)
+ return -1;
+ }
+
+ return -1;
+}
+
+// scan for positive, constant esp adjust
+static int scan_for_esp_adjust(int i, int opcnt, int *adj)
+{
+ struct parsed_op *po;
+ int first_pop = -1;
+ *adj = 0;
+
+ for (; i < opcnt; i++) {
+ po = &ops[i];
+
+ if (g_labels[i][0] != 0)
+ break;
+
+ if (po->op == OP_ADD && po->operand[0].reg == xSP) {
+ if (po->operand[1].type != OPT_CONST)
+ ferr(&ops[i], "non-const esp adjust?\n");
+ *adj += po->operand[1].val;
+ if (*adj & 3)
+ ferr(&ops[i], "unaligned esp adjust: %x\n", *adj);
+ return i;
+ }
+ else if (po->op == OP_PUSH) {
+ if (first_pop == -1)
+ first_pop = -2; // none
+ *adj -= lmod_bytes(po, po->operand[0].lmod);
+ }
+ else if (po->op == OP_POP) {
+ if (first_pop == -1)
+ first_pop = i;
+ *adj += lmod_bytes(po, po->operand[0].lmod);
+ }
+ else if (po->flags & (OPF_JMP|OPF_TAIL)) {
+ if (po->op != OP_CALL)
+ break;
+ if (po->operand[0].type != OPT_LABEL)
+ break;
+ // TODO: should only allow combining __cdecl calls..
+ }
+ }
+
+ if (*adj == 4 && first_pop >= 0 && ops[first_pop].op == OP_POP
+ && ops[first_pop].operand[0].type == OPT_REG
+ && ops[first_pop].operand[0].reg == xCX)
+ {
+ // probably 'pop ecx' was used..
+ return first_pop;
+ }
+
+ return -1;
+}
+
+static void scan_fwd_set_flags(int i, int opcnt, int magic, int flags)
+{
+ struct parsed_op *po;
+ int j;
+
+ if (i < 0)
+ ferr(ops, "%s: followed bad branch?\n", __func__);
+
+ for (; i < opcnt; i++) {
+ po = &ops[i];
+ if (po->cc_scratch == magic)
+ return;
+ po->cc_scratch = magic;
+ po->flags |= flags;
+
+ if ((po->flags & OPF_JMP) && po->op != OP_CALL) {
+ if (po->btj != NULL) {
+ // jumptable
+ for (j = 0; j < po->btj->count; j++)
+ scan_fwd_set_flags(po->btj->d[j].bt_i, opcnt, magic, flags);
+ return;
+ }
+
+ scan_fwd_set_flags(po->bt_i, opcnt, magic, flags);
+ if (!(po->flags & OPF_CC))
+ return;
+ }
+ if (po->flags & OPF_TAIL)
+ return;
+ }
+}
+
+static const struct parsed_proto *try_recover_pp(
+ struct parsed_op *po, const struct parsed_opr *opr)
+{
+ const struct parsed_proto *pp = NULL;
+
+ // maybe an arg of g_func?
+ if (opr->type == OPT_REGMEM && is_stack_access(po, opr))
+ {
+ char ofs_reg[16] = { 0, };
+ int arg, arg_s, arg_i;
+ int stack_ra = 0;
+ int offset = 0;
+
+ parse_stack_access(po, opr->name, ofs_reg,
+ &offset, &stack_ra, NULL);
+ if (ofs_reg[0] != 0)
+ ferr(po, "offset reg on arg access?\n");
+ if (offset <= stack_ra)
+ ferr(po, "stack var call unhandled yet\n");
+
+ arg_i = (offset - stack_ra - 4) / 4;
+ for (arg = arg_s = 0; arg < g_func_pp->argc; arg++) {
+ if (g_func_pp->arg[arg].reg != NULL)
+ continue;
+ if (arg_s == arg_i)
+ break;
+ arg_s++;
+ }
+ if (arg == g_func_pp->argc)
+ ferr(po, "stack arg %d not in prototype?\n", arg_i);
+
+ pp = g_func_pp->arg[arg].fptr;
+ if (pp == NULL)
+ ferr(po, "icall sa: arg%d is not a fptr?\n", arg + 1);
+ if (pp->argc_reg != 0)
+ ferr(po, "icall sa: reg arg in arg-call unhandled yet\n");
+ }
+ else if (opr->type == OPT_OFFSET || opr->type == OPT_LABEL) {
+ pp = proto_parse(g_fhdr, opr->name);
+ if (pp == NULL)
+ ferr(po, "proto_parse failed for icall from '%s'\n", opr->name);
+ if (pp->argc_reg != 0)
+ ferr(po, "arg-call unhandled yet for '%s'\n", opr->name);
+ }
+
+ return pp;
+}
+
+static void scan_for_call_type(int i, struct parsed_opr *opr,
+ int magic, const struct parsed_proto **pp_found)
+{
+ const struct parsed_proto *pp = NULL;
+ struct parsed_op *po;
+ struct label_ref *lr;
+
+ while (i >= 0) {
+ if (ops[i].cc_scratch == magic)
+ return;
+ ops[i].cc_scratch = magic;
+
+ if (g_labels[i][0] != 0) {
+ lr = &g_label_refs[i];
+ for (; lr != NULL; lr = lr->next)
+ scan_for_call_type(lr->i, opr, magic, pp_found);
+ if (i > 0 && LAST_OP(i - 1))
+ return;
+ }
+ i--;
+
+ if (!(ops[i].flags & OPF_DATA))
+ continue;
+ if (!is_opr_modified(opr, &ops[i]))
+ continue;
+ if (ops[i].op != OP_MOV && ops[i].op != OP_LEA) {
+ // most probably trashed by some processing
+ *pp_found = NULL;
+ return;
+ }
+
+ opr = &ops[i].operand[1];
+ if (opr->type != OPT_REG)
+ break;
+ }
+
+ po = (i >= 0) ? &ops[i] : ops;
+
+ if (i < 0) {
+ // reached the top - can only be an arg-reg
+ if (opr->type != OPT_REG)
+ return;
+
+ for (i = 0; i < g_func_pp->argc; i++) {
+ if (g_func_pp->arg[i].reg == NULL)
+ continue;
+ if (IS(opr->name, g_func_pp->arg[i].reg))
+ break;
+ }
+ if (i == g_func_pp->argc)
+ return;
+ pp = g_func_pp->arg[i].fptr;
+ if (pp == NULL)
+ ferr(po, "icall: arg%d (%s) is not a fptr?\n",
+ i + 1, g_func_pp->arg[i].reg);
+ if (pp->argc_reg != 0)
+ ferr(po, "icall: reg arg in arg-call unhandled yet\n");
+ }
+ else
+ pp = try_recover_pp(po, opr);
+
+ if (*pp_found != NULL && pp != NULL) {
+ if (!IS((*pp_found)->ret_type.name, pp->ret_type.name)
+ || (*pp_found)->is_stdcall != pp->is_stdcall
+ || (*pp_found)->argc != pp->argc
+ || (*pp_found)->argc_reg != pp->argc_reg
+ || (*pp_found)->argc_stack != pp->argc_stack)
+ {
+ ferr(po, "icall: parsed_proto mismatch\n");
+ }
+ }
+ if (pp != NULL)
+ *pp_found = pp;
+}
+
+static const struct parsed_proto *resolve_call(int i, int opcnt)
+{
+ const struct parsed_proto *pp = NULL;
+
+ switch (ops[i].operand[0].type) {
+ case OPT_REGMEM:
+ case OPT_LABEL:
+ case OPT_OFFSET:
+ pp = try_recover_pp(&ops[i], &ops[i].operand[0]);
+ break;
+ default:
+ scan_for_call_type(i, &ops[i].operand[0], i + opcnt * 9, &pp);
+ break;
+ }
+
+ return pp;
+}
+
+static int collect_call_args(struct parsed_op *po, int i,
+ struct parsed_proto *pp, int *save_arg_vars, int arg,
+ int magic, int need_op_saving, int may_reuse)
+{
+ struct parsed_proto *pp_tmp;
+ struct label_ref *lr;
+ int need_to_save_current;
+ int ret = 0;
+ int j;
+
+ if (i < 0) {
+ ferr(po, "dead label encountered\n");
+ return -1;
+ }
+
+ for (; arg < pp->argc; arg++)
+ if (pp->arg[arg].reg == NULL)
+ break;
+ magic = (magic & 0xffffff) | (arg << 24);
+
+ for (j = i; j >= 0 && arg < pp->argc; )
+ {
+ if (((ops[j].cc_scratch ^ magic) & 0xffffff) == 0) {
+ if (ops[j].cc_scratch != magic) {
+ ferr(&ops[j], "arg collect hit same path with diff args for %s\n",
+ pp->name);
+ return -1;
+ }
+ // ok: have already been here
+ return 0;
+ }
+ ops[j].cc_scratch = magic;
+
+ if (g_labels[j][0] != 0 && g_label_refs[j].i != -1) {
+ lr = &g_label_refs[j];
+ if (lr->next != NULL)
+ need_op_saving = 1;
+ for (; lr->next; lr = lr->next) {
+ if ((ops[lr->i].flags & (OPF_JMP|OPF_CC)) != OPF_JMP)
+ may_reuse = 1;
+ ret = collect_call_args(po, lr->i, pp, save_arg_vars,
+ arg, magic, need_op_saving, may_reuse);
+ if (ret < 0)
+ return ret;
+ }
+
+ if ((ops[lr->i].flags & (OPF_JMP|OPF_CC)) != OPF_JMP)
+ may_reuse = 1;
+ if (j > 0 && LAST_OP(j - 1)) {
+ // follow last branch in reverse
+ j = lr->i;
+ continue;
+ }
+ need_op_saving = 1;
+ ret = collect_call_args(po, lr->i, pp, save_arg_vars,
+ arg, magic, need_op_saving, may_reuse);
+ if (ret < 0)
+ return ret;
+ }
+ j--;
+
+ if (ops[j].op == OP_CALL)
+ {
+ pp_tmp = ops[j].datap;
+ if (pp_tmp == NULL)
+ ferr(po, "arg collect hit unparsed call '%s'\n",
+ ops[j].operand[0].name);
+ if (may_reuse && pp_tmp->argc_stack > 0)
+ ferr(po, "arg collect %d/%d hit '%s' with %d stack args\n",
+ arg, pp->argc, opr_name(&ops[j], 0), pp_tmp->argc_stack);
+ }
+ else if (ops[j].op == OP_ADD && ops[j].operand[0].reg == xSP) {
+ ferr(po, "arg collect %d/%d hit esp adjust\n",
+ arg, pp->argc);
+ }
+ else if (ops[j].op == OP_POP) {
+ ferr(po, "arg collect %d/%d hit pop\n", arg, pp->argc);
+ }
+ else if ((ops[j].flags & (OPF_JMP|OPF_CC)) == (OPF_JMP|OPF_CC))
+ {
+ may_reuse = 1;
+ }
+ else if (ops[j].op == OP_PUSH && !(ops[j].flags & OPF_FARG))
+ {
+ pp->arg[arg].datap = &ops[j];
+ need_to_save_current = 0;
+ if (!need_op_saving) {
+ ret = scan_for_mod(&ops[j], j + 1, i);
+ need_to_save_current = (ret >= 0);
+ }
+ if (need_op_saving || need_to_save_current) {
+ // mark this push as one that needs operand saving
+ ops[j].flags &= ~OPF_RMD;
+ if (ops[j].argnum == 0) {
+ ops[j].argnum = arg + 1;
+ *save_arg_vars |= 1 << arg;
+ }
+ else if (ops[j].argnum < arg + 1)
+ ferr(&ops[j], "argnum conflict (%d<%d) for '%s'\n",
+ ops[j].argnum, arg + 1, pp->name);
+ }
+ else if (ops[j].argnum == 0)
+ ops[j].flags |= OPF_RMD;
+
+ // some PUSHes are reused by different calls on other branches,
+ // but that can't happen if we didn't branch, so they
+ // can be removed from future searches (handles nested calls)
+ if (!may_reuse)
+ ops[j].flags |= OPF_FARG;
+
+ // next arg
+ for (arg++; arg < pp->argc; arg++)
+ if (pp->arg[arg].reg == NULL)
+ break;
+ magic = (magic & 0xffffff) | (arg << 24);
+ }
+ }
+
+ if (arg < pp->argc) {
+ ferr(po, "arg collect failed for '%s': %d/%d\n",
+ pp->name, arg, pp->argc);
+ ret = -1;
+ }
+ return ret;
+}
+
+static void add_label_ref(struct label_ref *lr, int op_i)
+{
+ struct label_ref *lr_new;
+
+ if (lr->i == -1) {
+ lr->i = op_i;
+ return;
+ }
+
+ lr_new = calloc(1, sizeof(*lr_new));
+ lr_new->i = op_i;
+ lr_new->next = lr->next;
+ lr->next = lr_new;
+}
+
+static void output_std_flags(FILE *fout, struct parsed_op *po,
+ int *pfomask, const char *dst_opr_text)
+{
+ if (*pfomask & (1 << PFO_Z)) {
+ fprintf(fout, "\n cond_z = (%s%s == 0);",
+ lmod_cast_u(po, po->operand[0].lmod), dst_opr_text);
+ *pfomask &= ~(1 << PFO_Z);
+ }
+ if (*pfomask & (1 << PFO_S)) {
+ fprintf(fout, "\n cond_s = (%s%s < 0);",
+ lmod_cast_s(po, po->operand[0].lmod), dst_opr_text);
+ *pfomask &= ~(1 << PFO_S);
+ }
+}
+