aboutsummaryrefslogtreecommitdiff
path: root/src/lj_asm_x86.h
diff options
context:
space:
mode:
authorMike Pall <mike>2020-06-23 03:06:45 +0200
committerMike Pall <mike>2020-06-23 03:06:45 +0200
commitff34b48ddd6f2b3bdd26d6088662a214ba6b0288 (patch)
tree5585ab1933d148b046061a1e061686aa09e63789 /src/lj_asm_x86.h
parenta44f53acf53603e7d9b88352de035b1804be4e88 (diff)
downloadluajit-ff34b48ddd6f2b3bdd26d6088662a214ba6b0288.tar.gz
luajit-ff34b48ddd6f2b3bdd26d6088662a214ba6b0288.tar.bz2
luajit-ff34b48ddd6f2b3bdd26d6088662a214ba6b0288.zip
Redesign and harden string interning.
Up to 40% faster on hash-intensive benchmarks. With some ideas from Sokolov Yura.
Diffstat (limited to 'src/lj_asm_x86.h')
-rw-r--r--src/lj_asm_x86.h2
1 files changed, 1 insertions, 1 deletions
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h
index a3adee14..e40b5e54 100644
--- a/src/lj_asm_x86.h
+++ b/src/lj_asm_x86.h
@@ -1228,7 +1228,7 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
1228 emit_gri(as, XG_ARITHi(XOg_AND), dest, (int32_t)khash); 1228 emit_gri(as, XG_ARITHi(XOg_AND), dest, (int32_t)khash);
1229 emit_rmro(as, XO_MOV, dest, tab, offsetof(GCtab, hmask)); 1229 emit_rmro(as, XO_MOV, dest, tab, offsetof(GCtab, hmask));
1230 } else if (irt_isstr(kt)) { 1230 } else if (irt_isstr(kt)) {
1231 emit_rmro(as, XO_ARITH(XOg_AND), dest, key, offsetof(GCstr, hash)); 1231 emit_rmro(as, XO_ARITH(XOg_AND), dest, key, offsetof(GCstr, sid));
1232 emit_rmro(as, XO_MOV, dest, tab, offsetof(GCtab, hmask)); 1232 emit_rmro(as, XO_MOV, dest, tab, offsetof(GCtab, hmask));
1233 } else { /* Must match with hashrot() in lj_tab.c. */ 1233 } else { /* Must match with hashrot() in lj_tab.c. */
1234 emit_rmro(as, XO_ARITH(XOg_AND), dest, tab, offsetof(GCtab, hmask)); 1234 emit_rmro(as, XO_ARITH(XOg_AND), dest, tab, offsetof(GCtab, hmask));