aboutsummaryrefslogtreecommitdiff
path: root/src/lj_asm_x86.h
diff options
context:
space:
mode:
authorMike Pall <mike>2020-05-20 20:42:04 +0200
committerMike Pall <mike>2020-05-20 20:42:04 +0200
commit0eddcbead2d67c16dcd4039a6765b9d2fc8ea631 (patch)
tree614b63b87bb6ba476b616b10b95e278d4af0c452 /src/lj_asm_x86.h
parent5bf0da3d7c02f9959fa3a9fb721e0565137b70c8 (diff)
downloadluajit-0eddcbead2d67c16dcd4039a6765b9d2fc8ea631.tar.gz
luajit-0eddcbead2d67c16dcd4039a6765b9d2fc8ea631.tar.bz2
luajit-0eddcbead2d67c16dcd4039a6765b9d2fc8ea631.zip
Cleanup CPU detection and tuning for old CPUs.
Diffstat (limited to 'src/lj_asm_x86.h')
-rw-r--r--src/lj_asm_x86.h33
1 files changed, 9 insertions, 24 deletions
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h
index db3409b9..bf818f5a 100644
--- a/src/lj_asm_x86.h
+++ b/src/lj_asm_x86.h
@@ -1214,13 +1214,8 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
1214 emit_rmro(as, XO_MOV, dest|REX_GC64, tab, offsetof(GCtab, node)); 1214 emit_rmro(as, XO_MOV, dest|REX_GC64, tab, offsetof(GCtab, node));
1215 } else { 1215 } else {
1216 emit_rmro(as, XO_ARITH(XOg_ADD), dest|REX_GC64, tab, offsetof(GCtab,node)); 1216 emit_rmro(as, XO_ARITH(XOg_ADD), dest|REX_GC64, tab, offsetof(GCtab,node));
1217 if ((as->flags & JIT_F_PREFER_IMUL)) { 1217 emit_shifti(as, XOg_SHL, dest, 3);
1218 emit_i8(as, sizeof(Node)); 1218 emit_rmrxo(as, XO_LEA, dest, dest, dest, XM_SCALE2, 0);
1219 emit_rr(as, XO_IMULi8, dest, dest);
1220 } else {
1221 emit_shifti(as, XOg_SHL, dest, 3);
1222 emit_rmrxo(as, XO_LEA, dest, dest, dest, XM_SCALE2, 0);
1223 }
1224 if (isk) { 1219 if (isk) {
1225 emit_gri(as, XG_ARITHi(XOg_AND), dest, (int32_t)khash); 1220 emit_gri(as, XG_ARITHi(XOg_AND), dest, (int32_t)khash);
1226 emit_rmro(as, XO_MOV, dest, tab, offsetof(GCtab, hmask)); 1221 emit_rmro(as, XO_MOV, dest, tab, offsetof(GCtab, hmask));
@@ -1279,7 +1274,7 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
1279 lua_assert(ofs % sizeof(Node) == 0); 1274 lua_assert(ofs % sizeof(Node) == 0);
1280 if (ra_hasreg(dest)) { 1275 if (ra_hasreg(dest)) {
1281 if (ofs != 0) { 1276 if (ofs != 0) {
1282 if (dest == node && !(as->flags & JIT_F_LEA_AGU)) 1277 if (dest == node)
1283 emit_gri(as, XG_ARITHi(XOg_ADD), dest|REX_GC64, ofs); 1278 emit_gri(as, XG_ARITHi(XOg_ADD), dest|REX_GC64, ofs);
1284 else 1279 else
1285 emit_rmro(as, XO_LEA, dest|REX_GC64, node, ofs); 1280 emit_rmro(as, XO_LEA, dest|REX_GC64, node, ofs);
@@ -2180,8 +2175,7 @@ static void asm_add(ASMState *as, IRIns *ir)
2180{ 2175{
2181 if (irt_isnum(ir->t)) 2176 if (irt_isnum(ir->t))
2182 asm_fparith(as, ir, XO_ADDSD); 2177 asm_fparith(as, ir, XO_ADDSD);
2183 else if ((as->flags & JIT_F_LEA_AGU) || as->flagmcp == as->mcp || 2178 else if (as->flagmcp == as->mcp || irt_is64(ir->t) || !asm_lea(as, ir))
2184 irt_is64(ir->t) || !asm_lea(as, ir))
2185 asm_intarith(as, ir, XOg_ADD); 2179 asm_intarith(as, ir, XOg_ADD);
2186} 2180}
2187 2181
@@ -2903,7 +2897,7 @@ static void asm_tail_fixup(ASMState *as, TraceNo lnk)
2903 MCode *target, *q; 2897 MCode *target, *q;
2904 int32_t spadj = as->T->spadjust; 2898 int32_t spadj = as->T->spadjust;
2905 if (spadj == 0) { 2899 if (spadj == 0) {
2906 p -= ((as->flags & JIT_F_LEA_AGU) ? 7 : 6) + (LJ_64 ? 1 : 0); 2900 p -= LJ_64 ? 7 : 6;
2907 } else { 2901 } else {
2908 MCode *p1; 2902 MCode *p1;
2909 /* Patch stack adjustment. */ 2903 /* Patch stack adjustment. */
@@ -2915,20 +2909,11 @@ static void asm_tail_fixup(ASMState *as, TraceNo lnk)
2915 p1 = p-9; 2909 p1 = p-9;
2916 *(int32_t *)p1 = spadj; 2910 *(int32_t *)p1 = spadj;
2917 } 2911 }
2918 if ((as->flags & JIT_F_LEA_AGU)) {
2919#if LJ_64
2920 p1[-4] = 0x48;
2921#endif
2922 p1[-3] = (MCode)XI_LEA;
2923 p1[-2] = MODRM(checki8(spadj) ? XM_OFS8 : XM_OFS32, RID_ESP, RID_ESP);
2924 p1[-1] = MODRM(XM_SCALE1, RID_ESP, RID_ESP);
2925 } else {
2926#if LJ_64 2912#if LJ_64
2927 p1[-3] = 0x48; 2913 p1[-3] = 0x48;
2928#endif 2914#endif
2929 p1[-2] = (MCode)(checki8(spadj) ? XI_ARITHi8 : XI_ARITHi); 2915 p1[-2] = (MCode)(checki8(spadj) ? XI_ARITHi8 : XI_ARITHi);
2930 p1[-1] = MODRM(XM_REG, XOg_ADD, RID_ESP); 2916 p1[-1] = MODRM(XM_REG, XOg_ADD, RID_ESP);
2931 }
2932 } 2917 }
2933 /* Patch exit branch. */ 2918 /* Patch exit branch. */
2934 target = lnk ? traceref(as->J, lnk)->mcode : (MCode *)lj_vm_exit_interp; 2919 target = lnk ? traceref(as->J, lnk)->mcode : (MCode *)lj_vm_exit_interp;
@@ -2959,7 +2944,7 @@ static void asm_tail_prep(ASMState *as)
2959 as->invmcp = as->mcp = p; 2944 as->invmcp = as->mcp = p;
2960 } else { 2945 } else {
2961 /* Leave room for ESP adjustment: add esp, imm or lea esp, [esp+imm] */ 2946 /* Leave room for ESP adjustment: add esp, imm or lea esp, [esp+imm] */
2962 as->mcp = p - (((as->flags & JIT_F_LEA_AGU) ? 7 : 6) + (LJ_64 ? 1 : 0)); 2947 as->mcp = p - (LJ_64 ? 7 : 6);
2963 as->invmcp = NULL; 2948 as->invmcp = NULL;
2964 } 2949 }
2965} 2950}