aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMike Pall <mike>2010-09-21 02:28:14 +0200
committerMike Pall <mike>2010-09-21 02:28:14 +0200
commit38628d93b819602c70cf7cf4b3da5eb2ad4f8744 (patch)
tree35ab19d0924593cfe4313bd5d896663f72084d4f /src
parent23b5c56d41d24a29cfd17d943a9a849a7b9ac20c (diff)
downloadluajit-38628d93b819602c70cf7cf4b3da5eb2ad4f8744.tar.gz
luajit-38628d93b819602c70cf7cf4b3da5eb2ad4f8744.tar.bz2
luajit-38628d93b819602c70cf7cf4b3da5eb2ad4f8744.zip
Improve FOLD/CSE of field loads and array/hash refs across NEWREF.
Diffstat (limited to 'src')
-rw-r--r--src/lj_iropt.h1
-rw-r--r--src/lj_opt_fold.c23
-rw-r--r--src/lj_opt_mem.c14
3 files changed, 23 insertions, 15 deletions
diff --git a/src/lj_iropt.h b/src/lj_iropt.h
index ee593b40..00bb2496 100644
--- a/src/lj_iropt.h
+++ b/src/lj_iropt.h
@@ -111,6 +111,7 @@ LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J);
111LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J); 111LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
112LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J); 112LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J);
113LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J); 113LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
114LJ_FUNC int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim);
114LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref); 115LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
115 116
116/* Dead-store elimination. */ 117/* Dead-store elimination. */
diff --git a/src/lj_opt_fold.c b/src/lj_opt_fold.c
index 5a6ea29b..e6f6d3b6 100644
--- a/src/lj_opt_fold.c
+++ b/src/lj_opt_fold.c
@@ -1233,23 +1233,15 @@ LJFOLDF(fwd_href_tdup)
1233} 1233}
1234 1234
1235/* We can safely FOLD/CSE array/hash refs and field loads, since there 1235/* We can safely FOLD/CSE array/hash refs and field loads, since there
1236** are no corresponding stores. But NEWREF may invalidate all of them. 1236** are no corresponding stores. But we need to check for any NEWREF with
1237** Lacking better disambiguation for table references, these optimizations 1237** an aliased table, as it may invalidate all of the pointers and fields.
1238** are simply disabled across any NEWREF.
1239** Only HREF needs the NEWREF check -- AREF and HREFK already depend on 1238** Only HREF needs the NEWREF check -- AREF and HREFK already depend on
1240** FLOADs. And NEWREF itself is treated like a store (see below). 1239** FLOADs. And NEWREF itself is treated like a store (see below).
1241*/ 1240*/
1242LJFOLD(HREF any any)
1243LJFOLDF(cse_href)
1244{
1245 TRef tr = lj_opt_cse(J);
1246 return tref_ref(tr) < J->chain[IR_NEWREF] ? EMITFOLD : tr;
1247}
1248
1249LJFOLD(FLOAD TNEW IRFL_TAB_ASIZE) 1241LJFOLD(FLOAD TNEW IRFL_TAB_ASIZE)
1250LJFOLDF(fload_tab_tnew_asize) 1242LJFOLDF(fload_tab_tnew_asize)
1251{ 1243{
1252 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF]) 1244 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
1253 return INTFOLD(fleft->op1); 1245 return INTFOLD(fleft->op1);
1254 return NEXTFOLD; 1246 return NEXTFOLD;
1255} 1247}
@@ -1257,7 +1249,7 @@ LJFOLDF(fload_tab_tnew_asize)
1257LJFOLD(FLOAD TNEW IRFL_TAB_HMASK) 1249LJFOLD(FLOAD TNEW IRFL_TAB_HMASK)
1258LJFOLDF(fload_tab_tnew_hmask) 1250LJFOLDF(fload_tab_tnew_hmask)
1259{ 1251{
1260 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF]) 1252 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
1261 return INTFOLD((1 << fleft->op2)-1); 1253 return INTFOLD((1 << fleft->op2)-1);
1262 return NEXTFOLD; 1254 return NEXTFOLD;
1263} 1255}
@@ -1265,7 +1257,7 @@ LJFOLDF(fload_tab_tnew_hmask)
1265LJFOLD(FLOAD TDUP IRFL_TAB_ASIZE) 1257LJFOLD(FLOAD TDUP IRFL_TAB_ASIZE)
1266LJFOLDF(fload_tab_tdup_asize) 1258LJFOLDF(fload_tab_tdup_asize)
1267{ 1259{
1268 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF]) 1260 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
1269 return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->asize); 1261 return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->asize);
1270 return NEXTFOLD; 1262 return NEXTFOLD;
1271} 1263}
@@ -1273,11 +1265,12 @@ LJFOLDF(fload_tab_tdup_asize)
1273LJFOLD(FLOAD TDUP IRFL_TAB_HMASK) 1265LJFOLD(FLOAD TDUP IRFL_TAB_HMASK)
1274LJFOLDF(fload_tab_tdup_hmask) 1266LJFOLDF(fload_tab_tdup_hmask)
1275{ 1267{
1276 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF]) 1268 if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
1277 return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->hmask); 1269 return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->hmask);
1278 return NEXTFOLD; 1270 return NEXTFOLD;
1279} 1271}
1280 1272
1273LJFOLD(HREF any any)
1281LJFOLD(FLOAD any IRFL_TAB_ARRAY) 1274LJFOLD(FLOAD any IRFL_TAB_ARRAY)
1282LJFOLD(FLOAD any IRFL_TAB_NODE) 1275LJFOLD(FLOAD any IRFL_TAB_NODE)
1283LJFOLD(FLOAD any IRFL_TAB_ASIZE) 1276LJFOLD(FLOAD any IRFL_TAB_ASIZE)
@@ -1285,7 +1278,7 @@ LJFOLD(FLOAD any IRFL_TAB_HMASK)
1285LJFOLDF(fload_tab_ah) 1278LJFOLDF(fload_tab_ah)
1286{ 1279{
1287 TRef tr = lj_opt_cse(J); 1280 TRef tr = lj_opt_cse(J);
1288 return tref_ref(tr) < J->chain[IR_NEWREF] ? EMITFOLD : tr; 1281 return lj_opt_fwd_tptr(J, tref_ref(tr)) ? tr : EMITFOLD;
1289} 1282}
1290 1283
1291/* Strings are immutable, so we can safely FOLD/CSE the related FLOAD. */ 1284/* Strings are immutable, so we can safely FOLD/CSE the related FLOAD. */
diff --git a/src/lj_opt_mem.c b/src/lj_opt_mem.c
index 4d2e9664..7e364f0b 100644
--- a/src/lj_opt_mem.c
+++ b/src/lj_opt_mem.c
@@ -277,6 +277,20 @@ int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J)
277 return 1; /* No conflict. Can fold to niltv. */ 277 return 1; /* No conflict. Can fold to niltv. */
278} 278}
279 279
280/* Check whether there's no aliasing NEWREF for the left operand. */
281int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim)
282{
283 IRRef ta = fins->op1;
284 IRRef ref = J->chain[IR_NEWREF];
285 while (ref > lim) {
286 IRIns *newref = IR(ref);
287 if (ta == newref->op1 || aa_table(J, ta, newref->op1) != ALIAS_NO)
288 return 0; /* Conflict. */
289 ref = newref->prev;
290 }
291 return 1; /* No conflict. Can safely FOLD/CSE. */
292}
293
280/* ASTORE/HSTORE elimination. */ 294/* ASTORE/HSTORE elimination. */
281TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J) 295TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J)
282{ 296{