Index: tcg/tcg-op.h =================================================================== --- tcg/tcg-op.h (revision 5617) +++ tcg/tcg-op.h (working copy) @@ -499,6 +499,12 @@ } } +static inline void tcg_gen_setcond_i32(int cond, TCGv ret, + TCGv arg1, int32_t arg2) +{ + tcg_gen_op4i(INDEX_op_setcond_i32, ret, arg1, arg2, cond); +} + static inline void tcg_gen_brcond_i32(int cond, TCGv arg1, TCGv arg2, int label_index) { Index: tcg/tcg-opc.h =================================================================== --- tcg/tcg-opc.h (revision 5617) +++ tcg/tcg-opc.h (working copy) @@ -76,6 +76,8 @@ DEF2(shr_i32, 1, 2, 0, 0) DEF2(sar_i32, 1, 2, 0, 0) +DEF2(setcond_i32, 1, 2, 1, 0) + DEF2(brcond_i32, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS) #if TCG_TARGET_REG_BITS == 32 DEF2(add2_i32, 2, 4, 0, 0) @@ -129,6 +131,8 @@ DEF2(shr_i64, 1, 2, 0, 0) DEF2(sar_i64, 1, 2, 0, 0) +DEF2(setcond_i64, 1, 2, 0, 0) + DEF2(brcond_i64, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS) #ifdef TCG_TARGET_HAS_ext8s_i64 DEF2(ext8s_i64, 1, 1, 0, 0) Index: tcg/x86_64/tcg-target.c =================================================================== --- tcg/x86_64/tcg-target.c (revision 5617) +++ tcg/x86_64/tcg-target.c (working copy) @@ -198,6 +198,7 @@ #define SHIFT_SHR 5 #define SHIFT_SAR 7 +/* The following defines apply to setcc instruction too. */ #define JCC_JMP (-1) #define JCC_JO 0x0 #define JCC_JNO 0x1 @@ -464,6 +465,27 @@ } } +// TODO should apply to setcond_i64 but not tested +static void tcg_out_setcond(TCGContext *s, int cond, + TCGArg ret, TCGArg arg1, TCGArg arg2, int rexw) +{ + /* clear ret since setcc only sets the lower 8 bits */ + tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3) | rexw, ret, ret); + /* cmp */ + tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1); + /* setcc */ + // TODO this should use tcg_out_modrm + // however currently tcg_out_modrm outputs an extra byte for [abcd]l + //tcg_out_modrm(s, (0x90 + tcg_cond_to_jcc[cond]) | P_EXT | P_REXB, ret, 0); + if (ret > 3) + tcg_out8(s, 0x40); + else if (ret > 7) + tcg_out8(s, 0x41); + tcg_out8(s, 0x0f); + tcg_out8(s, 0x90 + tcg_cond_to_jcc[cond]); + tcg_out8(s, 0xc0 + (ret & 7)); +} + static void tcg_out_brcond(TCGContext *s, int cond, TCGArg arg1, TCGArg arg2, int const_arg2, int label_index, int rexw) @@ -1065,6 +1087,14 @@ c = SHIFT_SAR; goto gen_shift64; + case INDEX_op_setcond_i32: + tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0); + break; + + case INDEX_op_setcond_i64: + tcg_out_setcond(s, args[3], args[0], args[1], args[2], P_REXW); + break; + case INDEX_op_brcond_i32: tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], args[3], 0); @@ -1225,6 +1255,8 @@ { INDEX_op_shr_i32, { "r", "0", "ci" } }, { INDEX_op_sar_i32, { "r", "0", "ci" } }, + { INDEX_op_setcond_i32, { "r", "r", "r" } }, + { INDEX_op_brcond_i32, { "r", "ri" } }, { INDEX_op_mov_i64, { "r", "r" } }, @@ -1254,6 +1286,8 @@ { INDEX_op_shr_i64, { "r", "0", "ci" } }, { INDEX_op_sar_i64, { "r", "0", "ci" } }, + { INDEX_op_setcond_i64, { "r", "r", "r" } }, + { INDEX_op_brcond_i64, { "r", "re" } }, { INDEX_op_bswap_i32, { "r", "0" } }, Index: tcg/tcg.c =================================================================== --- tcg/tcg.c (revision 5617) +++ tcg/tcg.c (working copy) @@ -877,6 +877,8 @@ #elif TCG_TARGET_REG_BITS == 64 || c == INDEX_op_brcond_i64 #endif + || c == INDEX_op_setcond_i32 + || c == INDEX_op_setcond_i64 ) { if (args[k] < ARRAY_SIZE(cond_name) && cond_name[args[k]]) fprintf(outfile, ",%s", cond_name[args[k++]]); Index: target-arm/translate.c =================================================================== --- target-arm/translate.c (revision 5617) +++ target-arm/translate.c (working copy) @@ -201,7 +201,6 @@ #define gen_op_addl_T0_T1_cc() gen_helper_add_cc(cpu_T[0], cpu_T[0], cpu_T[1]) #define gen_op_adcl_T0_T1_cc() gen_helper_adc_cc(cpu_T[0], cpu_T[0], cpu_T[1]) -#define gen_op_subl_T0_T1_cc() gen_helper_sub_cc(cpu_T[0], cpu_T[0], cpu_T[1]) #define gen_op_sbcl_T0_T1_cc() gen_helper_sbc_cc(cpu_T[0], cpu_T[0], cpu_T[1]) #define gen_op_rsbl_T0_T1_cc() gen_helper_sub_cc(cpu_T[0], cpu_T[1], cpu_T[0]) #define gen_op_rscl_T0_T1_cc() gen_helper_sbc_cc(cpu_T[0], cpu_T[1], cpu_T[0]) @@ -243,6 +242,36 @@ dead_tmp(tmp); } +//#define gen_op_subl_T0_T1_cc() gen_helper_sub_cc(cpu_T[0], cpu_T[0], cpu_T[1]) +static inline void gen_op_subl_T0_T1_cc(void) +{ + TCGv tmp32_res; + TCGv tmp1; + TCGv tmp2; + + tmp32_res = new_tmp(); + tcg_gen_sub_i32(tmp32_res, cpu_T[0], cpu_T[1]); + tcg_gen_st_i32(tmp32_res, cpu_env, offsetof(CPUState, NF)); + tcg_gen_st_i32(tmp32_res, cpu_env, offsetof(CPUState, ZF)); + + tmp1 = new_tmp(); + + tcg_gen_setcond_i32(TCG_COND_GEU, tmp1, cpu_T[0], cpu_T[1]); + tcg_gen_st_i32(tmp1, cpu_env, offsetof(CPUState, CF)); + + tmp2 = new_tmp(); + tcg_gen_xor_i32(tmp1, cpu_T[0], cpu_T[1]); + tcg_gen_xor_i32(tmp2, cpu_T[0], tmp32_res); + tcg_gen_and_i32(tmp1, tmp1, tmp2); + dead_tmp(tmp2); + tcg_gen_st_i32(tmp1, cpu_env, offsetof(CPUState, VF)); + dead_tmp(tmp1); + + tcg_gen_mov_i32(cpu_T[0], tmp32_res); + + dead_tmp(tmp32_res); +} + static void gen_smul_dual(TCGv a, TCGv b) { TCGv tmp1 = new_tmp();