aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMike Pall <mike>2023-11-12 15:25:14 +0100
committerMike Pall <mike>2023-11-12 15:25:14 +0100
commit6807e60af13c7f9a582dbf4cabc56e57dc791808 (patch)
tree6372ae90b7b9186b9892bcbf07e68bc411909565 /src
parent7c9671a043468b27c02301b6261a9e2a8eff787f (diff)
parentd854d00ce94b274359e5181bed13e977420daf5c (diff)
downloadluajit-6807e60af13c7f9a582dbf4cabc56e57dc791808.tar.gz
luajit-6807e60af13c7f9a582dbf4cabc56e57dc791808.tar.bz2
luajit-6807e60af13c7f9a582dbf4cabc56e57dc791808.zip
Merge branch 'master' into v2.1
Diffstat (limited to 'src')
-rw-r--r--src/lj_asm_x86.h8
1 files changed, 6 insertions, 2 deletions
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h
index 0e0b28a4..d98fb827 100644
--- a/src/lj_asm_x86.h
+++ b/src/lj_asm_x86.h
@@ -815,6 +815,7 @@ static void asm_tointg(ASMState *as, IRIns *ir, Reg left)
815 emit_rr(as, XO_UCOMISD, left, tmp); 815 emit_rr(as, XO_UCOMISD, left, tmp);
816 emit_rr(as, XO_CVTSI2SD, tmp, dest); 816 emit_rr(as, XO_CVTSI2SD, tmp, dest);
817 emit_rr(as, XO_XORPS, tmp, tmp); /* Avoid partial register stall. */ 817 emit_rr(as, XO_XORPS, tmp, tmp); /* Avoid partial register stall. */
818 checkmclim(as);
818 emit_rr(as, XO_CVTTSD2SI, dest, left); 819 emit_rr(as, XO_CVTTSD2SI, dest, left);
819 /* Can't fuse since left is needed twice. */ 820 /* Can't fuse since left is needed twice. */
820} 821}
@@ -857,6 +858,7 @@ static void asm_conv(ASMState *as, IRIns *ir)
857 emit_rr(as, XO_SUBSD, dest, bias); /* Subtract 2^52+2^51 bias. */ 858 emit_rr(as, XO_SUBSD, dest, bias); /* Subtract 2^52+2^51 bias. */
858 emit_rr(as, XO_XORPS, dest, bias); /* Merge bias and integer. */ 859 emit_rr(as, XO_XORPS, dest, bias); /* Merge bias and integer. */
859 emit_rma(as, XO_MOVSD, bias, k); 860 emit_rma(as, XO_MOVSD, bias, k);
861 checkmclim(as);
860 emit_mrm(as, XO_MOVD, dest, asm_fuseload(as, lref, RSET_GPR)); 862 emit_mrm(as, XO_MOVD, dest, asm_fuseload(as, lref, RSET_GPR));
861 return; 863 return;
862 } else { /* Integer to FP conversion. */ 864 } else { /* Integer to FP conversion. */
@@ -1173,6 +1175,7 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
1173 asm_guardcc(as, CC_E); 1175 asm_guardcc(as, CC_E);
1174 else 1176 else
1175 emit_sjcc(as, CC_E, l_end); 1177 emit_sjcc(as, CC_E, l_end);
1178 checkmclim(as);
1176 if (irt_isnum(kt)) { 1179 if (irt_isnum(kt)) {
1177 if (isk) { 1180 if (isk) {
1178 /* Assumes -0.0 is already canonicalized to +0.0. */ 1181 /* Assumes -0.0 is already canonicalized to +0.0. */
@@ -1232,7 +1235,6 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
1232#endif 1235#endif
1233 } 1236 }
1234 emit_sfixup(as, l_loop); 1237 emit_sfixup(as, l_loop);
1235 checkmclim(as);
1236#if LJ_GC64 1238#if LJ_GC64
1237 if (!isk && irt_isaddr(kt)) { 1239 if (!isk && irt_isaddr(kt)) {
1238 emit_rr(as, XO_OR, tmp|REX_64, key); 1240 emit_rr(as, XO_OR, tmp|REX_64, key);
@@ -1259,6 +1261,7 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
1259 emit_rr(as, XO_ARITH(XOg_SUB), dest, tmp); 1261 emit_rr(as, XO_ARITH(XOg_SUB), dest, tmp);
1260 emit_shifti(as, XOg_ROL, tmp, HASH_ROT3); 1262 emit_shifti(as, XOg_ROL, tmp, HASH_ROT3);
1261 emit_rr(as, XO_ARITH(XOg_XOR), dest, tmp); 1263 emit_rr(as, XO_ARITH(XOg_XOR), dest, tmp);
1264 checkmclim(as);
1262 emit_shifti(as, XOg_ROL, dest, HASH_ROT2); 1265 emit_shifti(as, XOg_ROL, dest, HASH_ROT2);
1263 emit_rr(as, XO_ARITH(XOg_SUB), tmp, dest); 1266 emit_rr(as, XO_ARITH(XOg_SUB), tmp, dest);
1264 emit_shifti(as, XOg_ROL, dest, HASH_ROT1); 1267 emit_shifti(as, XOg_ROL, dest, HASH_ROT1);
@@ -1276,7 +1279,6 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
1276 } else { 1279 } else {
1277 emit_rr(as, XO_MOV, tmp, key); 1280 emit_rr(as, XO_MOV, tmp, key);
1278#if LJ_GC64 1281#if LJ_GC64
1279 checkmclim(as);
1280 emit_gri(as, XG_ARITHi(XOg_XOR), dest, irt_toitype(kt) << 15); 1282 emit_gri(as, XG_ARITHi(XOg_XOR), dest, irt_toitype(kt) << 15);
1281 if ((as->flags & JIT_F_BMI2)) { 1283 if ((as->flags & JIT_F_BMI2)) {
1282 emit_i8(as, 32); 1284 emit_i8(as, 32);
@@ -1554,6 +1556,7 @@ static void asm_ahuvload(ASMState *as, IRIns *ir)
1554 if (irt_islightud(ir->t)) { 1556 if (irt_islightud(ir->t)) {
1555 Reg dest = asm_load_lightud64(as, ir, 1); 1557 Reg dest = asm_load_lightud64(as, ir, 1);
1556 if (ra_hasreg(dest)) { 1558 if (ra_hasreg(dest)) {
1559 checkmclim(as);
1557 asm_fuseahuref(as, ir->op1, RSET_GPR); 1560 asm_fuseahuref(as, ir->op1, RSET_GPR);
1558 if (ir->o == IR_VLOAD) as->mrm.ofs += 8 * ir->op2; 1561 if (ir->o == IR_VLOAD) as->mrm.ofs += 8 * ir->op2;
1559 emit_mrm(as, XO_MOV, dest|REX_64, RID_MRM); 1562 emit_mrm(as, XO_MOV, dest|REX_64, RID_MRM);
@@ -1601,6 +1604,7 @@ static void asm_ahuvload(ASMState *as, IRIns *ir)
1601 if (LJ_64 && irt_type(ir->t) >= IRT_NUM) { 1604 if (LJ_64 && irt_type(ir->t) >= IRT_NUM) {
1602 lj_assertA(irt_isinteger(ir->t) || irt_isnum(ir->t), 1605 lj_assertA(irt_isinteger(ir->t) || irt_isnum(ir->t),
1603 "bad load type %d", irt_type(ir->t)); 1606 "bad load type %d", irt_type(ir->t));
1607 checkmclim(as);
1604#if LJ_GC64 1608#if LJ_GC64
1605 emit_u32(as, LJ_TISNUM << 15); 1609 emit_u32(as, LJ_TISNUM << 15);
1606#else 1610#else