aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Pall <mike>2023-11-12 15:18:44 +0100
committerMike Pall <mike>2023-11-12 15:18:44 +0100
commitd854d00ce94b274359e5181bed13e977420daf5c (patch)
treebadafd05123b0443cfd29e090b7fba6c2847fda5
parenta4c1640432a9d8a60624cdc8065b15078c228e36 (diff)
downloadluajit-d854d00ce94b274359e5181bed13e977420daf5c.tar.gz
luajit-d854d00ce94b274359e5181bed13e977420daf5c.tar.bz2
luajit-d854d00ce94b274359e5181bed13e977420daf5c.zip
x86/x64: Add more red zone checks to assembler backend.
Thanks to Peter Cawley. #1116
-rw-r--r--src/lj_asm_x86.h7
1 files changed, 6 insertions, 1 deletions
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h
index e01def59..6b114802 100644
--- a/src/lj_asm_x86.h
+++ b/src/lj_asm_x86.h
@@ -674,6 +674,7 @@ static void asm_tointg(ASMState *as, IRIns *ir, Reg left)
674 emit_rr(as, XO_CVTSI2SD, tmp, dest); 674 emit_rr(as, XO_CVTSI2SD, tmp, dest);
675 if (!(as->flags & JIT_F_SPLIT_XMM)) 675 if (!(as->flags & JIT_F_SPLIT_XMM))
676 emit_rr(as, XO_XORPS, tmp, tmp); /* Avoid partial register stall. */ 676 emit_rr(as, XO_XORPS, tmp, tmp); /* Avoid partial register stall. */
677 checkmclim(as);
677 emit_rr(as, XO_CVTTSD2SI, dest, left); 678 emit_rr(as, XO_CVTTSD2SI, dest, left);
678 /* Can't fuse since left is needed twice. */ 679 /* Can't fuse since left is needed twice. */
679} 680}
@@ -713,6 +714,7 @@ static void asm_conv(ASMState *as, IRIns *ir)
713 emit_rr(as, XO_SUBSD, dest, bias); /* Subtract 2^52+2^51 bias. */ 714 emit_rr(as, XO_SUBSD, dest, bias); /* Subtract 2^52+2^51 bias. */
714 emit_rr(as, XO_XORPS, dest, bias); /* Merge bias and integer. */ 715 emit_rr(as, XO_XORPS, dest, bias); /* Merge bias and integer. */
715 emit_loadn(as, bias, k); 716 emit_loadn(as, bias, k);
717 checkmclim(as);
716 emit_mrm(as, XO_MOVD, dest, asm_fuseload(as, lref, RSET_GPR)); 718 emit_mrm(as, XO_MOVD, dest, asm_fuseload(as, lref, RSET_GPR));
717 return; 719 return;
718 } else { /* Integer to FP conversion. */ 720 } else { /* Integer to FP conversion. */
@@ -1025,6 +1027,7 @@ static void asm_href(ASMState *as, IRIns *ir)
1025 emit_jcc(as, CC_E, nilexit); 1027 emit_jcc(as, CC_E, nilexit);
1026 else 1028 else
1027 emit_sjcc(as, CC_E, l_end); 1029 emit_sjcc(as, CC_E, l_end);
1030 checkmclim(as);
1028 if (irt_isnum(kt)) { 1031 if (irt_isnum(kt)) {
1029 if (isk) { 1032 if (isk) {
1030 /* Assumes -0.0 is already canonicalized to +0.0. */ 1033 /* Assumes -0.0 is already canonicalized to +0.0. */
@@ -1065,7 +1068,6 @@ static void asm_href(ASMState *as, IRIns *ir)
1065 emit_rmro(as, XO_ARITHi8, XOg_CMP, dest, offsetof(Node, key.it)); 1068 emit_rmro(as, XO_ARITHi8, XOg_CMP, dest, offsetof(Node, key.it));
1066 } 1069 }
1067 emit_sfixup(as, l_loop); 1070 emit_sfixup(as, l_loop);
1068 checkmclim(as);
1069 1071
1070 /* Load main position relative to tab->node into dest. */ 1072 /* Load main position relative to tab->node into dest. */
1071 khash = isk ? ir_khash(irkey) : 1; 1073 khash = isk ? ir_khash(irkey) : 1;
@@ -1091,6 +1093,7 @@ static void asm_href(ASMState *as, IRIns *ir)
1091 emit_rr(as, XO_ARITH(XOg_SUB), dest, tmp); 1093 emit_rr(as, XO_ARITH(XOg_SUB), dest, tmp);
1092 emit_shifti(as, XOg_ROL, tmp, HASH_ROT3); 1094 emit_shifti(as, XOg_ROL, tmp, HASH_ROT3);
1093 emit_rr(as, XO_ARITH(XOg_XOR), dest, tmp); 1095 emit_rr(as, XO_ARITH(XOg_XOR), dest, tmp);
1096 checkmclim(as);
1094 emit_shifti(as, XOg_ROL, dest, HASH_ROT2); 1097 emit_shifti(as, XOg_ROL, dest, HASH_ROT2);
1095 emit_rr(as, XO_ARITH(XOg_SUB), tmp, dest); 1098 emit_rr(as, XO_ARITH(XOg_SUB), tmp, dest);
1096 emit_shifti(as, XOg_ROL, dest, HASH_ROT1); 1099 emit_shifti(as, XOg_ROL, dest, HASH_ROT1);
@@ -1375,6 +1378,7 @@ static void asm_ahuvload(ASMState *as, IRIns *ir)
1375 if (irt_islightud(ir->t)) { 1378 if (irt_islightud(ir->t)) {
1376 Reg dest = asm_load_lightud64(as, ir, 1); 1379 Reg dest = asm_load_lightud64(as, ir, 1);
1377 if (ra_hasreg(dest)) { 1380 if (ra_hasreg(dest)) {
1381 checkmclim(as);
1378 asm_fuseahuref(as, ir->op1, RSET_GPR); 1382 asm_fuseahuref(as, ir->op1, RSET_GPR);
1379 emit_mrm(as, XO_MOV, dest|REX_64, RID_MRM); 1383 emit_mrm(as, XO_MOV, dest|REX_64, RID_MRM);
1380 } 1384 }
@@ -1394,6 +1398,7 @@ static void asm_ahuvload(ASMState *as, IRIns *ir)
1394 asm_guardcc(as, irt_isnum(ir->t) ? CC_AE : CC_NE); 1398 asm_guardcc(as, irt_isnum(ir->t) ? CC_AE : CC_NE);
1395 if (LJ_64 && irt_type(ir->t) >= IRT_NUM) { 1399 if (LJ_64 && irt_type(ir->t) >= IRT_NUM) {
1396 lua_assert(irt_isinteger(ir->t) || irt_isnum(ir->t)); 1400 lua_assert(irt_isinteger(ir->t) || irt_isnum(ir->t));
1401 checkmclim(as);
1397 emit_u32(as, LJ_TISNUM); 1402 emit_u32(as, LJ_TISNUM);
1398 emit_mrm(as, XO_ARITHi, XOg_CMP, RID_MRM); 1403 emit_mrm(as, XO_ARITHi, XOg_CMP, RID_MRM);
1399 } else { 1404 } else {