aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMike Pall <mike>2023-09-11 21:06:25 +0200
committerMike Pall <mike>2023-09-11 21:06:25 +0200
commit9e0437240f1fb4bfa7248f6ec8be0e3181016119 (patch)
treeb1a94cc6056ce34218a8482feb9736a42ac20d6a /src
parent1c33f46314cc4e3cb52ac83c5b27419bc06b5154 (diff)
downloadluajit-9e0437240f1fb4bfa7248f6ec8be0e3181016119.tar.gz
luajit-9e0437240f1fb4bfa7248f6ec8be0e3181016119.tar.bz2
luajit-9e0437240f1fb4bfa7248f6ec8be0e3181016119.zip
FFI: Fix 64 bit shift fold rules.
Thanks to Peter Cawley. #1079
Diffstat (limited to 'src')
-rw-r--r--src/lj_opt_fold.c8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/lj_opt_fold.c b/src/lj_opt_fold.c
index d90477f6..743dfb07 100644
--- a/src/lj_opt_fold.c
+++ b/src/lj_opt_fold.c
@@ -377,10 +377,10 @@ static uint64_t kfold_int64arith(jit_State *J, uint64_t k1, uint64_t k2,
377 case IR_BOR: k1 |= k2; break; 377 case IR_BOR: k1 |= k2; break;
378 case IR_BXOR: k1 ^= k2; break; 378 case IR_BXOR: k1 ^= k2; break;
379 case IR_BSHL: k1 <<= (k2 & 63); break; 379 case IR_BSHL: k1 <<= (k2 & 63); break;
380 case IR_BSHR: k1 = (int32_t)((uint32_t)k1 >> (k2 & 63)); break; 380 case IR_BSHR: k1 >>= (k2 & 63); break;
381 case IR_BSAR: k1 >>= (k2 & 63); break; 381 case IR_BSAR: k1 = (uint64_t)((int64_t)k1 >> (k2 & 63)); break;
382 case IR_BROL: k1 = (int32_t)lj_rol((uint32_t)k1, (k2 & 63)); break; 382 case IR_BROL: k1 = lj_rol(k1, (k2 & 63)); break;
383 case IR_BROR: k1 = (int32_t)lj_ror((uint32_t)k1, (k2 & 63)); break; 383 case IR_BROR: k1 = lj_ror(k1, (k2 & 63)); break;
384 default: lj_assertJ(0, "bad IR op %d", op); break; 384 default: lj_assertJ(0, "bad IR op %d", op); break;
385 } 385 }
386#else 386#else