From 4bd3d5005e5c272c22f100bdc538cb0adf102b04 Mon Sep 17 00:00:00 2001 From: Aurelien Jarno Date: Sat, 10 Feb 2018 22:47:05 -0500 Subject: [PATCH] tcg/optimize: add optimizations for ext_i32_i64 and extu_i32_i64 ops They behave the same as ext32s_i64 and ext32u_i64 from the constant folding and zero propagation point of view, except that they can't be replaced by a mov, so we don't compute the affected value. Backports commit 8bcb5c8f34f9215d4f88f388c7ff14c9bd5cecd3 from qemu --- qemu/tcg/optimize.c | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/qemu/tcg/optimize.c b/qemu/tcg/optimize.c index 7d29e757..fbe6c61c 100644 --- a/qemu/tcg/optimize.c +++ b/qemu/tcg/optimize.c @@ -348,9 +348,11 @@ static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y) CASE_OP_32_64(ext16u): return (uint16_t)x; + case INDEX_op_ext_i32_i64: case INDEX_op_ext32s_i64: return (int32_t)x; + case INDEX_op_extu_i32_i64: case INDEX_op_ext32u_i64: return (uint32_t)x; @@ -849,6 +851,15 @@ void tcg_optimize(TCGContext *s) mask = temps[args[1]].mask & mask; break; + case INDEX_op_ext_i32_i64: + if ((temps[args[1]].mask & 0x80000000) != 0) { + break; + } + case INDEX_op_extu_i32_i64: + /* We do not compute affected as it is a size changing op. */ + mask = (uint32_t)temps[args[1]].mask; + break; + CASE_OP_32_64(andc): /* Known-zeros does not imply known-ones. Therefore unless args[2] is constant, we can't infer anything from it. */ @@ -1027,6 +1038,8 @@ void tcg_optimize(TCGContext *s) CASE_OP_32_64(ext16u): case INDEX_op_ext32s_i64: case INDEX_op_ext32u_i64: + case INDEX_op_ext_i32_i64: + case INDEX_op_extu_i32_i64: if (temp_is_const(s, args[1])) { tmp = do_constant_folding(s, opc, temps[args[1]].val, 0); tcg_opt_gen_movi(s, op, args, args[0], tmp);