From 4c74f704644c2f843cb304aba0c901723097ff14 Mon Sep 17 00:00:00 2001 From: Mike Pall Date: Sun, 6 Feb 2011 00:45:39 +0100 Subject: Strength-reduce 32 to 64 bit widening for XLOAD U8/U16 inputs. --- src/lj_opt_fold.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/lj_opt_fold.c b/src/lj_opt_fold.c index 1172f4fc..f4872046 100644 --- a/src/lj_opt_fold.c +++ b/src/lj_opt_fold.c @@ -917,6 +917,8 @@ LJFOLDF(simplify_conv_sext) if (!(fins->op2 & IRCONV_SEXT)) return NEXTFOLD; PHIBARRIER(fleft); + if (fleft->o == IR_XLOAD && (irt_isu8(fleft->t) || irt_isu16(fleft->t))) + goto ok_reduce; if (fleft->o == IR_ADD && irref_isk(fleft->op2)) { ofs = (int64_t)IR(fleft->op2)->i; ref = fleft->op1; @@ -926,6 +928,7 @@ LJFOLDF(simplify_conv_sext) IRRef lo = J->scev.dir ? J->scev.start : J->scev.stop; lua_assert(irt_isint(J->scev.t)); if (lo && IR(lo)->i + ofs >= 0) { + ok_reduce: #if LJ_TARGET_X64 /* Eliminate widening. All 32 bit ops do an implicit zero-extension. */ return LEFTFOLD; -- cgit v1.2.3-55-g6feb