aboutsummaryrefslogtreecommitdiff
path: root/src/lj_asm_arm64.h
diff options
context:
space:
mode:
authorMike Pall <mike>2020-06-23 03:06:45 +0200
committerMike Pall <mike>2020-06-23 03:06:45 +0200
commitff34b48ddd6f2b3bdd26d6088662a214ba6b0288 (patch)
tree5585ab1933d148b046061a1e061686aa09e63789 /src/lj_asm_arm64.h
parenta44f53acf53603e7d9b88352de035b1804be4e88 (diff)
downloadluajit-ff34b48ddd6f2b3bdd26d6088662a214ba6b0288.tar.gz
luajit-ff34b48ddd6f2b3bdd26d6088662a214ba6b0288.tar.bz2
luajit-ff34b48ddd6f2b3bdd26d6088662a214ba6b0288.zip
Redesign and harden string interning.
Up to 40% faster on hash-intensive benchmarks. With some ideas from Sokolov Yura.
Diffstat (limited to 'src/lj_asm_arm64.h')
-rw-r--r--src/lj_asm_arm64.h4
1 files changed, 2 insertions, 2 deletions
diff --git a/src/lj_asm_arm64.h b/src/lj_asm_arm64.h
index 0729a3a5..b1fd3acc 100644
--- a/src/lj_asm_arm64.h
+++ b/src/lj_asm_arm64.h
@@ -847,9 +847,9 @@ static void asm_href(ASMState *as, IRIns *ir, IROp merge)
847 emit_dnm(as, A64I_ANDw, dest, dest, tmphash); 847 emit_dnm(as, A64I_ANDw, dest, dest, tmphash);
848 emit_lso(as, A64I_LDRw, dest, tab, offsetof(GCtab, hmask)); 848 emit_lso(as, A64I_LDRw, dest, tab, offsetof(GCtab, hmask));
849 } else if (irt_isstr(kt)) { 849 } else if (irt_isstr(kt)) {
850 /* Fetch of str->hash is cheaper than ra_allock. */ 850 /* Fetch of str->sid is cheaper than ra_allock. */
851 emit_dnm(as, A64I_ANDw, dest, dest, tmp); 851 emit_dnm(as, A64I_ANDw, dest, dest, tmp);
852 emit_lso(as, A64I_LDRw, tmp, key, offsetof(GCstr, hash)); 852 emit_lso(as, A64I_LDRw, tmp, key, offsetof(GCstr, sid));
853 emit_lso(as, A64I_LDRw, dest, tab, offsetof(GCtab, hmask)); 853 emit_lso(as, A64I_LDRw, dest, tab, offsetof(GCtab, hmask));
854 } else { /* Must match with hash*() in lj_tab.c. */ 854 } else { /* Must match with hash*() in lj_tab.c. */
855 emit_dnm(as, A64I_ANDw, dest, dest, tmp); 855 emit_dnm(as, A64I_ANDw, dest, dest, tmp);