aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMike Pall <mike>2010-12-31 03:56:30 +0100
committerMike Pall <mike>2010-12-31 03:56:30 +0100
commit1716540c55e8d432f17ea6c0666b1427a485ff4a (patch)
treeee192f7d8e0b4f296504b92226fbf8d9149acdd9 /src
parent1f269610925829f55ed3e88e4af2b6575598adbc (diff)
downloadluajit-1716540c55e8d432f17ea6c0666b1427a485ff4a.tar.gz
luajit-1716540c55e8d432f17ea6c0666b1427a485ff4a.tar.bz2
luajit-1716540c55e8d432f17ea6c0666b1427a485ff4a.zip
Refactoring of conversion ops, part 4: use CONV instead of TOINT/TONUM.
Also narrow CONV.int.num and CONV.i64.num.
Diffstat (limited to 'src')
-rw-r--r--src/lj_crecord.c14
-rw-r--r--src/lj_ir.c4
-rw-r--r--src/lj_ir.h4
-rw-r--r--src/lj_jit.h2
-rw-r--r--src/lj_opt_fold.c13
-rw-r--r--src/lj_opt_loop.c2
-rw-r--r--src/lj_opt_narrow.c89
-rw-r--r--src/lj_record.c22
8 files changed, 89 insertions, 61 deletions
diff --git a/src/lj_crecord.c b/src/lj_crecord.c
index 23b92831..791c177c 100644
--- a/src/lj_crecord.c
+++ b/src/lj_crecord.c
@@ -165,7 +165,7 @@ static void crec_ct_ct(jit_State *J, CType *d, CType *s, TRef dp, TRef sp)
165 /* fallthrough */ 165 /* fallthrough */
166 case CCX(I, F): 166 case CCX(I, F):
167 if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi; 167 if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi;
168 sp = emitconv(sp, dsize < 4 ? IRT_INT : dt, st, IRCONV_TRUNC); 168 sp = emitconv(sp, dsize < 4 ? IRT_INT : dt, st, IRCONV_TRUNC|IRCONV_ANY);
169 goto xstore; 169 goto xstore;
170 case CCX(I, P): 170 case CCX(I, P):
171 case CCX(I, A): 171 case CCX(I, A):
@@ -241,7 +241,7 @@ static void crec_ct_ct(jit_State *J, CType *d, CType *s, TRef dp, TRef sp)
241 if (st == IRT_CDATA) goto err_nyi; 241 if (st == IRT_CDATA) goto err_nyi;
242 /* The signed conversion is cheaper. x64 really has 47 bit pointers. */ 242 /* The signed conversion is cheaper. x64 really has 47 bit pointers. */
243 sp = emitconv(sp, (LJ_64 && dsize == 8) ? IRT_I64 : IRT_U32, 243 sp = emitconv(sp, (LJ_64 && dsize == 8) ? IRT_I64 : IRT_U32,
244 st, IRCONV_TRUNC); 244 st, IRCONV_TRUNC|IRCONV_ANY);
245 goto xstore; 245 goto xstore;
246 246
247 /* Destination is an array. */ 247 /* Destination is an array. */
@@ -401,11 +401,13 @@ void LJ_FASTCALL recff_cdata_index(jit_State *J, RecordFFData *rd)
401 if (tref_isnumber(idx)) { 401 if (tref_isnumber(idx)) {
402 /* The size of a ptrdiff_t is target-specific. */ 402 /* The size of a ptrdiff_t is target-specific. */
403#if LJ_64 403#if LJ_64
404 idx = emitir(IRT(IR_TOI64, IRT_INTP), idx, 404 if (tref_isnum(idx))
405 tref_isinteger(idx) ? IRTOINT_SEXT64 : IRTOINT_TRUNCI64); 405 idx = emitconv(idx, IRT_I64, IRT_NUM, IRCONV_TRUNC|IRCONV_ANY);
406 else
407 idx = emitconv(idx, IRT_I64, IRT_INT, IRCONV_SEXT);
406#else 408#else
407 if (!tref_isinteger(idx)) 409 if (tref_isnum(idx))
408 idx = emitir(IRT(IR_TOINT, IRT_INTP), idx, IRTOINT_ANY); 410 idx = emitconv(idx, IRT_INT, IRT_NUM, IRCONV_TRUNC|IRCONV_ANY);
409#endif 411#endif
410 if (ctype_ispointer(ct->info)) { 412 if (ctype_ispointer(ct->info)) {
411 CTSize sz = lj_ctype_size(cts, (sid = ctype_cid(ct->info))); 413 CTSize sz = lj_ctype_size(cts, (sid = ctype_cid(ct->info)));
diff --git a/src/lj_ir.c b/src/lj_ir.c
index e9364762..3217bc1e 100644
--- a/src/lj_ir.c
+++ b/src/lj_ir.c
@@ -393,7 +393,7 @@ TRef LJ_FASTCALL lj_ir_tonum(jit_State *J, TRef tr)
393{ 393{
394 if (!tref_isnum(tr)) { 394 if (!tref_isnum(tr)) {
395 if (tref_isinteger(tr)) 395 if (tref_isinteger(tr))
396 tr = emitir(IRTN(IR_TONUM), tr, 0); 396 tr = emitir(IRTN(IR_CONV), tr, IRCONV_NUM_INT);
397 else if (tref_isstr(tr)) 397 else if (tref_isstr(tr))
398 tr = emitir(IRTG(IR_STRTO, IRT_NUM), tr, 0); 398 tr = emitir(IRTG(IR_STRTO, IRT_NUM), tr, 0);
399 else 399 else
@@ -434,7 +434,7 @@ TRef LJ_FASTCALL lj_ir_toint(jit_State *J, TRef tr)
434 tr = emitir(IRTG(IR_STRTO, IRT_NUM), tr, 0); 434 tr = emitir(IRTG(IR_STRTO, IRT_NUM), tr, 0);
435 else if (!tref_isnum(tr)) 435 else if (!tref_isnum(tr))
436 lj_trace_err(J, LJ_TRERR_BADTYPE); 436 lj_trace_err(J, LJ_TRERR_BADTYPE);
437 tr = emitir(IRTI(IR_TOINT), tr, IRTOINT_ANY); 437 tr = emitir(IRTI(IR_CONV), tr, IRCONV_INT_NUM|IRCONV_ANY);
438 } 438 }
439 return tr; 439 return tr;
440} 440}
diff --git a/src/lj_ir.h b/src/lj_ir.h
index a6d36283..5733f6af 100644
--- a/src/lj_ir.h
+++ b/src/lj_ir.h
@@ -222,9 +222,12 @@ IRFLDEF(FLENUM)
222/* CONV mode, stored in op2. */ 222/* CONV mode, stored in op2. */
223#define IRCONV_SRCMASK 0x001f /* Source IRType. */ 223#define IRCONV_SRCMASK 0x001f /* Source IRType. */
224#define IRCONV_DSTMASK 0x03e0 /* Dest. IRType (also in ir->t). */ 224#define IRCONV_DSTMASK 0x03e0 /* Dest. IRType (also in ir->t). */
225#define IRCONV_NUM_INT ((IRT_NUM<<5)|IRT_INT)
226#define IRCONV_INT_NUM ((IRT_INT<<5)|IRT_NUM)
225#define IRCONV_TRUNC 0x0400 /* Truncate number to integer. */ 227#define IRCONV_TRUNC 0x0400 /* Truncate number to integer. */
226#define IRCONV_SEXT 0x0800 /* Sign-extend integer to integer. */ 228#define IRCONV_SEXT 0x0800 /* Sign-extend integer to integer. */
227#define IRCONV_MODEMASK 0x0fff 229#define IRCONV_MODEMASK 0x0fff
230#define IRCONV_CONVMASK 0xf000
228#define IRCONV_CSH 12 231#define IRCONV_CSH 12
229/* Number to integer conversion mode. Ordered by strength of the checks. */ 232/* Number to integer conversion mode. Ordered by strength of the checks. */
230#define IRCONV_TOBIT (0<<IRCONV_CSH) /* None. Cache only: TOBIT conv. */ 233#define IRCONV_TOBIT (0<<IRCONV_CSH) /* None. Cache only: TOBIT conv. */
@@ -232,6 +235,7 @@ IRFLDEF(FLENUM)
232#define IRCONV_INDEX (2<<IRCONV_CSH) /* Check + special backprop rules. */ 235#define IRCONV_INDEX (2<<IRCONV_CSH) /* Check + special backprop rules. */
233#define IRCONV_CHECK (3<<IRCONV_CSH) /* Number checked for integerness. */ 236#define IRCONV_CHECK (3<<IRCONV_CSH) /* Number checked for integerness. */
234 237
238
235/* C call info for CALL* instructions. */ 239/* C call info for CALL* instructions. */
236typedef struct CCallInfo { 240typedef struct CCallInfo {
237 ASMFunction func; /* Function pointer. */ 241 ASMFunction func; /* Function pointer. */
diff --git a/src/lj_jit.h b/src/lj_jit.h
index c5902a61..cb7ce982 100644
--- a/src/lj_jit.h
+++ b/src/lj_jit.h
@@ -205,7 +205,7 @@ typedef struct HotPenalty {
205typedef struct BPropEntry { 205typedef struct BPropEntry {
206 IRRef1 key; /* Key: original reference. */ 206 IRRef1 key; /* Key: original reference. */
207 IRRef1 val; /* Value: reference after conversion. */ 207 IRRef1 val; /* Value: reference after conversion. */
208 IRRef mode; /* Mode for this entry (currently IRTOINT_*). */ 208 IRRef mode; /* Mode for this entry (currently IRCONV_*). */
209} BPropEntry; 209} BPropEntry;
210 210
211/* Number of slots for the backpropagation cache. Must be a power of 2. */ 211/* Number of slots for the backpropagation cache. Must be a power of 2. */
diff --git a/src/lj_opt_fold.c b/src/lj_opt_fold.c
index 92b9dfb4..adf88f4d 100644
--- a/src/lj_opt_fold.c
+++ b/src/lj_opt_fold.c
@@ -796,9 +796,9 @@ LJFOLDF(simplify_powi_kx)
796{ 796{
797 lua_Number n = knumleft; 797 lua_Number n = knumleft;
798 if (n == 2.0) { /* 2.0 ^ i ==> ldexp(1.0, tonum(i)) */ 798 if (n == 2.0) { /* 2.0 ^ i ==> ldexp(1.0, tonum(i)) */
799 fins->o = IR_TONUM; 799 fins->o = IR_CONV;
800 fins->op1 = fins->op2; 800 fins->op1 = fins->op2;
801 fins->op2 = 0; 801 fins->op2 = IRCONV_NUM_INT;
802 fins->op2 = (IRRef1)lj_opt_fold(J); 802 fins->op2 = (IRRef1)lj_opt_fold(J);
803 fins->op1 = (IRRef1)lj_ir_knum_one(J); 803 fins->op1 = (IRRef1)lj_ir_knum_one(J);
804 fins->o = IR_LDEXP; 804 fins->o = IR_LDEXP;
@@ -953,18 +953,19 @@ LJFOLDF(cse_conv)
953} 953}
954 954
955/* FP conversion narrowing. */ 955/* FP conversion narrowing. */
956LJFOLD(TOINT ADD any)
957LJFOLD(TOINT SUB any)
958LJFOLD(TOBIT ADD KNUM) 956LJFOLD(TOBIT ADD KNUM)
959LJFOLD(TOBIT SUB KNUM) 957LJFOLD(TOBIT SUB KNUM)
960LJFOLD(TOI64 ADD 5) /* IRTOINT_TRUNCI64 */ 958LJFOLD(CONV ADD IRCONV_INT_NUM)
961LJFOLD(TOI64 SUB 5) /* IRTOINT_TRUNCI64 */ 959LJFOLD(CONV SUB IRCONV_INT_NUM)
960LJFOLD(CONV ADD IRCONV_I64_NUM)
961LJFOLD(CONV SUB IRCONV_I64_NUM)
962LJFOLDF(narrow_convert) 962LJFOLDF(narrow_convert)
963{ 963{
964 PHIBARRIER(fleft); 964 PHIBARRIER(fleft);
965 /* Narrowing ignores PHIs and repeating it inside the loop is not useful. */ 965 /* Narrowing ignores PHIs and repeating it inside the loop is not useful. */
966 if (J->chain[IR_LOOP]) 966 if (J->chain[IR_LOOP])
967 return NEXTFOLD; 967 return NEXTFOLD;
968 lua_assert(fins->o != IR_CONV || (fins->op2&IRCONV_CONVMASK) != IRCONV_TOBIT);
968 return lj_opt_narrow_convert(J); 969 return lj_opt_narrow_convert(J);
969} 970}
970 971
diff --git a/src/lj_opt_loop.c b/src/lj_opt_loop.c
index f370d59f..4f1d31a4 100644
--- a/src/lj_opt_loop.c
+++ b/src/lj_opt_loop.c
@@ -301,7 +301,7 @@ static void loop_unroll(jit_State *J)
301 /* Check all loop-carried dependencies for type instability. */ 301 /* Check all loop-carried dependencies for type instability. */
302 if (!irt_sametype(t, irr->t)) { 302 if (!irt_sametype(t, irr->t)) {
303 if (irt_isnum(t) && irt_isinteger(irr->t)) /* Fix int->num case. */ 303 if (irt_isnum(t) && irt_isinteger(irr->t)) /* Fix int->num case. */
304 subst[ins] = tref_ref(emitir(IRTN(IR_TONUM), ref, 0)); 304 subst[ins] = tref_ref(emitir(IRTN(IR_CONV), ref, IRCONV_NUM_INT));
305 else if (!(irt_isinteger(t) && irt_isinteger(irr->t))) 305 else if (!(irt_isinteger(t) && irt_isinteger(irr->t)))
306 lj_trace_err(J, LJ_TRERR_TYPEINS); 306 lj_trace_err(J, LJ_TRERR_TYPEINS);
307 } 307 }
diff --git a/src/lj_opt_narrow.c b/src/lj_opt_narrow.c
index b6615f32..fb6601e9 100644
--- a/src/lj_opt_narrow.c
+++ b/src/lj_opt_narrow.c
@@ -89,16 +89,17 @@
89/* -- Elimination of narrowing type conversions --------------------------- */ 89/* -- Elimination of narrowing type conversions --------------------------- */
90 90
91/* Narrowing of index expressions and bit operations is demand-driven. The 91/* Narrowing of index expressions and bit operations is demand-driven. The
92** trace recorder emits a narrowing type conversion (TOINT or TOBIT) in 92** trace recorder emits a narrowing type conversion (CONV.int.num or TOBIT)
93** all of these cases (e.g. array indexing or string indexing). FOLD 93** in all of these cases (e.g. array indexing or string indexing). FOLD
94** already takes care of eliminating simple redundant conversions like 94** already takes care of eliminating simple redundant conversions like
95** TOINT(TONUM(x)) ==> x. 95** CONV.int.num(CONV.num.int(x)) ==> x.
96** 96**
97** But the surrounding code is FP-heavy and all arithmetic operations are 97** But the surrounding code is FP-heavy and all arithmetic operations are
98** performed on FP numbers. Consider a common example such as 'x=t[i+1]', 98** performed on FP numbers. Consider a common example such as 'x=t[i+1]',
99** with 'i' already an integer (due to induction variable narrowing). The 99** with 'i' already an integer (due to induction variable narrowing). The
100** index expression would be recorded as TOINT(ADD(TONUM(i), 1)), which is 100** index expression would be recorded as
101** clearly suboptimal. 101** CONV.int.num(ADD(CONV.num.int(i), 1))
102** which is clearly suboptimal.
102** 103**
103** One can do better by recursively backpropagating the narrowing type 104** One can do better by recursively backpropagating the narrowing type
104** conversion across FP arithmetic operations. This turns FP ops into 105** conversion across FP arithmetic operations. This turns FP ops into
@@ -106,9 +107,10 @@
106** the conversion they also need to check for overflow. Currently only ADD 107** the conversion they also need to check for overflow. Currently only ADD
107** and SUB are supported. 108** and SUB are supported.
108** 109**
109** The above example can be rewritten as ADDOV(TOINT(TONUM(i)), 1) and 110** The above example can be rewritten as
110** then into ADDOV(i, 1) after folding of the conversions. The original FP 111** ADDOV(CONV.int.num(CONV.num.int(i)), 1)
111** ops remain in the IR and are eliminated by DCE since all references to 112** and then into ADDOV(i, 1) after folding of the conversions. The original
113** FP ops remain in the IR and are eliminated by DCE since all references to
112** them are gone. 114** them are gone.
113** 115**
114** Special care has to be taken to avoid narrowing across an operation 116** Special care has to be taken to avoid narrowing across an operation
@@ -173,6 +175,7 @@
173enum { 175enum {
174 NARROW_REF, /* Push ref. */ 176 NARROW_REF, /* Push ref. */
175 NARROW_CONV, /* Push conversion of ref. */ 177 NARROW_CONV, /* Push conversion of ref. */
178 NARROW_SEXT, /* Push sign-extension of ref. */
176 NARROW_INT /* Push KINT ref. The next code holds an int32_t. */ 179 NARROW_INT /* Push KINT ref. The next code holds an int32_t. */
177}; 180};
178 181
@@ -188,7 +191,8 @@ typedef struct NarrowConv {
188 NarrowIns *sp; /* Current stack pointer. */ 191 NarrowIns *sp; /* Current stack pointer. */
189 NarrowIns *maxsp; /* Maximum stack pointer minus redzone. */ 192 NarrowIns *maxsp; /* Maximum stack pointer minus redzone. */
190 int lim; /* Limit on the number of emitted conversions. */ 193 int lim; /* Limit on the number of emitted conversions. */
191 IRRef mode; /* Conversion mode (IRTOINT_*). */ 194 IRRef mode; /* Conversion mode (IRCONV_*). */
195 IRType t; /* Destination type: IRT_INT or IRT_I64. */
192 NarrowIns stack[NARROW_MAX_STACK]; /* Stack holding stack-machine code. */ 196 NarrowIns stack[NARROW_MAX_STACK]; /* Stack holding stack-machine code. */
193} NarrowConv; 197} NarrowConv;
194 198
@@ -198,7 +202,9 @@ static BPropEntry *narrow_bpc_get(jit_State *J, IRRef1 key, IRRef mode)
198 ptrdiff_t i; 202 ptrdiff_t i;
199 for (i = 0; i < BPROP_SLOTS; i++) { 203 for (i = 0; i < BPROP_SLOTS; i++) {
200 BPropEntry *bp = &J->bpropcache[i]; 204 BPropEntry *bp = &J->bpropcache[i];
201 if (bp->key == key && bp->mode <= mode) /* Stronger checks are ok, too. */ 205 /* Stronger checks are ok, too. */
206 if (bp->key == key && bp->mode >= mode &&
207 ((bp->mode ^ mode) & IRCONV_MODEMASK) == 0)
202 return bp; 208 return bp;
203 } 209 }
204 return NULL; 210 return NULL;
@@ -223,16 +229,16 @@ static int narrow_conv_backprop(NarrowConv *nc, IRRef ref, int depth)
223 IRRef cref; 229 IRRef cref;
224 230
225 /* Check the easy cases first. */ 231 /* Check the easy cases first. */
226 if (ir->o == IR_TONUM) { /* Undo inverse conversion. */ 232 if (ir->o == IR_CONV && (ir->op2 & IRCONV_SRCMASK) == IRT_INT) {
227 *nc->sp++ = NARROWINS(NARROW_REF, ir->op1); 233 if (nc->t == IRT_I64)
228 if (nc->mode == IRTOINT_TRUNCI64) { 234 *nc->sp++ = NARROWINS(NARROW_SEXT, ir->op1); /* Reduce to sign-ext. */
229 *nc->sp++ = NARROWINS(NARROW_REF, IRTOINT_SEXT64); 235 else
230 *nc->sp++ = NARROWINS(IRT(IR_TOI64, IRT_I64), 0); 236 *nc->sp++ = NARROWINS(NARROW_REF, ir->op1); /* Undo conversion. */
231 }
232 return 0; 237 return 0;
233 } else if (ir->o == IR_KNUM) { /* Narrow FP constant. */ 238 } else if (ir->o == IR_KNUM) { /* Narrow FP constant. */
234 lua_Number n = ir_knum(ir)->n; 239 lua_Number n = ir_knum(ir)->n;
235 if (nc->mode == IRTOINT_TOBIT) { /* Allows a wider range of constants. */ 240 if ((nc->mode & IRCONV_CONVMASK) == IRCONV_TOBIT) {
241 /* Allows a wider range of constants. */
236 int64_t k64 = (int64_t)n; 242 int64_t k64 = (int64_t)n;
237 if (n == cast_num(k64)) { /* Only if constant doesn't lose precision. */ 243 if (n == cast_num(k64)) { /* Only if constant doesn't lose precision. */
238 *nc->sp++ = NARROWINS(NARROW_INT, 0); 244 *nc->sp++ = NARROWINS(NARROW_INT, 0);
@@ -251,36 +257,46 @@ static int narrow_conv_backprop(NarrowConv *nc, IRRef ref, int depth)
251 } 257 }
252 258
253 /* Try to CSE the conversion. Stronger checks are ok, too. */ 259 /* Try to CSE the conversion. Stronger checks are ok, too. */
254 for (cref = J->chain[fins->o]; cref > ref; cref = IR(cref)->prev) 260 cref = J->chain[fins->o];
255 if (IR(cref)->op1 == ref && 261 while (cref > ref) {
256 irt_isguard(IR(cref)->t) >= irt_isguard(fins->t)) { 262 IRIns *cr = IR(cref);
263 if (cr->op1 == ref &&
264 (fins->o == IR_TOBIT ||
265 ((cr->op2 & IRCONV_MODEMASK) == (nc->mode & IRCONV_MODEMASK) &&
266 irt_isguard(cr->t) >= irt_isguard(fins->t)))) {
257 *nc->sp++ = NARROWINS(NARROW_REF, cref); 267 *nc->sp++ = NARROWINS(NARROW_REF, cref);
258 return 0; /* Already there, no additional conversion needed. */ 268 return 0; /* Already there, no additional conversion needed. */
259 } 269 }
270 cref = cr->prev;
271 }
260 272
261 /* Backpropagate across ADD/SUB. */ 273 /* Backpropagate across ADD/SUB. */
262 if (ir->o == IR_ADD || ir->o == IR_SUB) { 274 if (ir->o == IR_ADD || ir->o == IR_SUB) {
263 /* Try cache lookup first. */ 275 /* Try cache lookup first. */
264 IRRef mode = nc->mode; 276 IRRef mode = nc->mode;
265 BPropEntry *bp; 277 BPropEntry *bp;
266 if (mode == IRTOINT_INDEX && depth > 0) 278 /* Inner conversions need a stronger check. */
267 mode = IRTOINT_CHECK; /* Inner conversions need a stronger check. */ 279 if ((mode & IRCONV_CONVMASK) == IRCONV_INDEX && depth > 0)
280 mode += IRCONV_CHECK-IRCONV_INDEX;
268 bp = narrow_bpc_get(nc->J, (IRRef1)ref, mode); 281 bp = narrow_bpc_get(nc->J, (IRRef1)ref, mode);
269 if (bp) { 282 if (bp) {
270 *nc->sp++ = NARROWINS(NARROW_REF, bp->val); 283 *nc->sp++ = NARROWINS(NARROW_REF, bp->val);
271 if (mode == IRTOINT_TRUNCI64 && mode != bp->mode) {
272 *nc->sp++ = NARROWINS(NARROW_REF, IRTOINT_SEXT64);
273 *nc->sp++ = NARROWINS(IRT(IR_TOI64, IRT_I64), 0);
274 }
275 return 0; 284 return 0;
285 } else if (nc->t == IRT_I64) {
286 /* Try sign-extending from an existing (checked) conversion to int. */
287 mode = (IRT_INT<<5)|IRT_NUM|IRCONV_INDEX;
288 bp = narrow_bpc_get(nc->J, (IRRef1)ref, mode);
289 if (bp) {
290 *nc->sp++ = NARROWINS(NARROW_SEXT, bp->val);
291 return 0;
292 }
276 } 293 }
277 if (++depth < NARROW_MAX_BACKPROP && nc->sp < nc->maxsp) { 294 if (++depth < NARROW_MAX_BACKPROP && nc->sp < nc->maxsp) {
278 NarrowIns *savesp = nc->sp; 295 NarrowIns *savesp = nc->sp;
279 int count = narrow_conv_backprop(nc, ir->op1, depth); 296 int count = narrow_conv_backprop(nc, ir->op1, depth);
280 count += narrow_conv_backprop(nc, ir->op2, depth); 297 count += narrow_conv_backprop(nc, ir->op2, depth);
281 if (count <= nc->lim) { /* Limit total number of conversions. */ 298 if (count <= nc->lim) { /* Limit total number of conversions. */
282 IRType t = mode == IRTOINT_TRUNCI64 ? IRT_I64 : IRT_INT; 299 *nc->sp++ = NARROWINS(IRT(ir->o, nc->t), ref);
283 *nc->sp++ = NARROWINS(IRT(ir->o, t), ref);
284 return count; 300 return count;
285 } 301 }
286 nc->sp = savesp; /* Too many conversions, need to backtrack. */ 302 nc->sp = savesp; /* Too many conversions, need to backtrack. */
@@ -309,9 +325,12 @@ static IRRef narrow_conv_emit(jit_State *J, NarrowConv *nc)
309 *sp++ = ref; 325 *sp++ = ref;
310 } else if (op == NARROW_CONV) { 326 } else if (op == NARROW_CONV) {
311 *sp++ = emitir_raw(convot, ref, convop2); /* Raw emit avoids a loop. */ 327 *sp++ = emitir_raw(convot, ref, convop2); /* Raw emit avoids a loop. */
328 } else if (op == NARROW_SEXT) {
329 *sp++ = emitir(IRT(IR_CONV, IRT_I64), ref,
330 (IRT_I64<<5)|IRT_INT|IRCONV_SEXT);
312 } else if (op == NARROW_INT) { 331 } else if (op == NARROW_INT) {
313 lua_assert(next < last); 332 lua_assert(next < last);
314 *sp++ = nc->mode == IRTOINT_TRUNCI64 ? 333 *sp++ = nc->t == IRT_I64 ?
315 lj_ir_kint64(J, (int64_t)(int32_t)*next++) : 334 lj_ir_kint64(J, (int64_t)(int32_t)*next++) :
316 lj_ir_kint(J, *next++); 335 lj_ir_kint(J, *next++);
317 } else { /* Regular IROpT. Pops two operands and pushes one result. */ 336 } else { /* Regular IROpT. Pops two operands and pushes one result. */
@@ -319,12 +338,12 @@ static IRRef narrow_conv_emit(jit_State *J, NarrowConv *nc)
319 lua_assert(sp >= nc->stack+2); 338 lua_assert(sp >= nc->stack+2);
320 sp--; 339 sp--;
321 /* Omit some overflow checks for array indexing. See comments above. */ 340 /* Omit some overflow checks for array indexing. See comments above. */
322 if (mode == IRTOINT_INDEX) { 341 if ((mode & IRCONV_CONVMASK) == IRCONV_INDEX) {
323 if (next == last && irref_isk(narrow_ref(sp[0])) && 342 if (next == last && irref_isk(narrow_ref(sp[0])) &&
324 (uint32_t)IR(narrow_ref(sp[0]))->i + 0x40000000 < 0x80000000) 343 (uint32_t)IR(narrow_ref(sp[0]))->i + 0x40000000 < 0x80000000)
325 guardot = 0; 344 guardot = 0;
326 else 345 else /* Otherwise cache a stronger check. */
327 mode = IRTOINT_CHECK; /* Otherwise cache a stronger check. */ 346 mode += IRCONV_CHECK-IRCONV_INDEX;
328 } 347 }
329 sp[-1] = emitir(op+guardot, sp[-1], sp[0]); 348 sp[-1] = emitir(op+guardot, sp[-1], sp[0]);
330 /* Add to cache. */ 349 /* Add to cache. */
@@ -344,8 +363,9 @@ TRef LJ_FASTCALL lj_opt_narrow_convert(jit_State *J)
344 nc.J = J; 363 nc.J = J;
345 nc.sp = nc.stack; 364 nc.sp = nc.stack;
346 nc.maxsp = &nc.stack[NARROW_MAX_STACK-4]; 365 nc.maxsp = &nc.stack[NARROW_MAX_STACK-4];
366 nc.t = irt_type(fins->t);
347 if (fins->o == IR_TOBIT) { 367 if (fins->o == IR_TOBIT) {
348 nc.mode = IRTOINT_TOBIT; /* Used only in the backpropagation cache. */ 368 nc.mode = IRCONV_TOBIT; /* Used only in the backpropagation cache. */
349 nc.lim = 2; /* TOBIT can use a more optimistic rule. */ 369 nc.lim = 2; /* TOBIT can use a more optimistic rule. */
350 } else { 370 } else {
351 nc.mode = fins->op2; 371 nc.mode = fins->op2;
@@ -401,7 +421,8 @@ TRef lj_opt_narrow_pow(jit_State *J, TRef rb, TRef rc, TValue *vc)
401 if (!tref_isinteger(rc)) { 421 if (!tref_isinteger(rc)) {
402 if (tref_isstr(rc)) 422 if (tref_isstr(rc))
403 rc = emitir(IRTG(IR_STRTO, IRT_NUM), rc, 0); 423 rc = emitir(IRTG(IR_STRTO, IRT_NUM), rc, 0);
404 rc = emitir(IRTGI(IR_TOINT), rc, IRTOINT_CHECK); /* Guarded TOINT! */ 424 /* Guarded conversion to integer! */
425 rc = emitir(IRTGI(IR_CONV), rc, IRCONV_INT_NUM|IRCONV_CHECK);
405 } 426 }
406 if (!tref_isk(rc)) { /* Range guard: -65536 <= i <= 65536 */ 427 if (!tref_isk(rc)) { /* Range guard: -65536 <= i <= 65536 */
407 tmp = emitir(IRTI(IR_ADD), rc, lj_ir_kint(J, 65536-2147483647-1)); 428 tmp = emitir(IRTI(IR_ADD), rc, lj_ir_kint(J, 65536-2147483647-1));
diff --git a/src/lj_record.c b/src/lj_record.c
index ae47f236..ec03afe1 100644
--- a/src/lj_record.c
+++ b/src/lj_record.c
@@ -169,10 +169,10 @@ int lj_record_objcmp(jit_State *J, TRef a, TRef b, cTValue *av, cTValue *bv)
169 if (ta != tb) { 169 if (ta != tb) {
170 /* Widen mixed number/int comparisons to number/number comparison. */ 170 /* Widen mixed number/int comparisons to number/number comparison. */
171 if (ta == IRT_INT && tb == IRT_NUM) { 171 if (ta == IRT_INT && tb == IRT_NUM) {
172 a = emitir(IRTN(IR_TONUM), a, 0); 172 a = emitir(IRTN(IR_CONV), a, IRCONV_NUM_INT);
173 ta = IRT_NUM; 173 ta = IRT_NUM;
174 } else if (ta == IRT_NUM && tb == IRT_INT) { 174 } else if (ta == IRT_NUM && tb == IRT_INT) {
175 b = emitir(IRTN(IR_TONUM), b, 0); 175 b = emitir(IRTN(IR_CONV), b, IRCONV_NUM_INT);
176 } else { 176 } else {
177 return 2; /* Two different types are never equal. */ 177 return 2; /* Two different types are never equal. */
178 } 178 }
@@ -199,7 +199,7 @@ static void canonicalize_slots(jit_State *J)
199 if (tref_isinteger(tr)) { 199 if (tref_isinteger(tr)) {
200 IRIns *ir = IR(tref_ref(tr)); 200 IRIns *ir = IR(tref_ref(tr));
201 if (!(ir->o == IR_SLOAD && (ir->op2 & IRSLOAD_READONLY))) 201 if (!(ir->o == IR_SLOAD && (ir->op2 & IRSLOAD_READONLY)))
202 J->slot[s] = emitir(IRTN(IR_TONUM), tr, 0); 202 J->slot[s] = emitir(IRTN(IR_CONV), tr, IRCONV_NUM_INT);
203 } 203 }
204 } 204 }
205} 205}
@@ -869,7 +869,7 @@ static TRef rec_idx_key(jit_State *J, RecordIndex *ix)
869 if ((MSize)k < LJ_MAX_ASIZE && n == cast_num(k)) { 869 if ((MSize)k < LJ_MAX_ASIZE && n == cast_num(k)) {
870 TRef asizeref, ikey = key; 870 TRef asizeref, ikey = key;
871 if (!tref_isinteger(ikey)) 871 if (!tref_isinteger(ikey))
872 ikey = emitir(IRTGI(IR_TOINT), ikey, IRTOINT_INDEX); 872 ikey = emitir(IRTGI(IR_CONV), ikey, IRCONV_INT_NUM|IRCONV_INDEX);
873 asizeref = emitir(IRTI(IR_FLOAD), ix->tab, IRFL_TAB_ASIZE); 873 asizeref = emitir(IRTI(IR_FLOAD), ix->tab, IRFL_TAB_ASIZE);
874 if ((MSize)k < t->asize) { /* Currently an array key? */ 874 if ((MSize)k < t->asize) { /* Currently an array key? */
875 TRef arrayref; 875 TRef arrayref;
@@ -898,7 +898,7 @@ static TRef rec_idx_key(jit_State *J, RecordIndex *ix)
898 898
899 /* Otherwise the key is located in the hash part. */ 899 /* Otherwise the key is located in the hash part. */
900 if (tref_isinteger(key)) /* Hash keys are based on numbers, not ints. */ 900 if (tref_isinteger(key)) /* Hash keys are based on numbers, not ints. */
901 ix->key = key = emitir(IRTN(IR_TONUM), key, 0); 901 ix->key = key = emitir(IRTN(IR_CONV), key, IRCONV_NUM_INT);
902 if (tref_isk(key)) { 902 if (tref_isk(key)) {
903 /* Optimize lookup of constant hash keys. */ 903 /* Optimize lookup of constant hash keys. */
904 MSize hslot = (MSize)((char *)ix->oldv - (char *)&noderef(t->node)[0].val); 904 MSize hslot = (MSize)((char *)ix->oldv - (char *)&noderef(t->node)[0].val);
@@ -1024,7 +1024,7 @@ TRef lj_record_idx(jit_State *J, RecordIndex *ix)
1024 if (oldv == niltvg(J2G(J))) { /* Need to insert a new key. */ 1024 if (oldv == niltvg(J2G(J))) { /* Need to insert a new key. */
1025 TRef key = ix->key; 1025 TRef key = ix->key;
1026 if (tref_isinteger(key)) /* NEWREF needs a TValue as a key. */ 1026 if (tref_isinteger(key)) /* NEWREF needs a TValue as a key. */
1027 key = emitir(IRTN(IR_TONUM), key, 0); 1027 key = emitir(IRTN(IR_CONV), key, IRCONV_NUM_INT);
1028 xref = emitir(IRT(IR_NEWREF, IRT_P32), ix->tab, key); 1028 xref = emitir(IRT(IR_NEWREF, IRT_P32), ix->tab, key);
1029 keybarrier = 0; /* NEWREF already takes care of the key barrier. */ 1029 keybarrier = 0; /* NEWREF already takes care of the key barrier. */
1030 } 1030 }
@@ -1046,7 +1046,7 @@ TRef lj_record_idx(jit_State *J, RecordIndex *ix)
1046 keybarrier = 0; /* Previous non-nil value kept the key alive. */ 1046 keybarrier = 0; /* Previous non-nil value kept the key alive. */
1047 } 1047 }
1048 if (tref_isinteger(ix->val)) /* Convert int to number before storing. */ 1048 if (tref_isinteger(ix->val)) /* Convert int to number before storing. */
1049 ix->val = emitir(IRTN(IR_TONUM), ix->val, 0); 1049 ix->val = emitir(IRTN(IR_CONV), ix->val, IRCONV_NUM_INT);
1050 emitir(IRT(loadop+IRDELTA_L2S, tref_type(ix->val)), xref, ix->val); 1050 emitir(IRT(loadop+IRDELTA_L2S, tref_type(ix->val)), xref, ix->val);
1051 if (keybarrier || tref_isgcv(ix->val)) 1051 if (keybarrier || tref_isgcv(ix->val))
1052 emitir(IRT(IR_TBAR, IRT_NIL), ix->tab, 0); 1052 emitir(IRT(IR_TBAR, IRT_NIL), ix->tab, 0);
@@ -1100,7 +1100,7 @@ static TRef rec_upvalue(jit_State *J, uint32_t uv, TRef val)
1100 return res; 1100 return res;
1101 } else { /* Upvalue store. */ 1101 } else { /* Upvalue store. */
1102 if (tref_isinteger(val)) /* Convert int to number before storing. */ 1102 if (tref_isinteger(val)) /* Convert int to number before storing. */
1103 val = emitir(IRTN(IR_TONUM), val, 0); 1103 val = emitir(IRTN(IR_CONV), val, IRCONV_NUM_INT);
1104 emitir(IRT(IR_USTORE, tref_type(val)), uref, val); 1104 emitir(IRT(IR_USTORE, tref_type(val)), uref, val);
1105 if (needbarrier && tref_isgcv(val)) 1105 if (needbarrier && tref_isgcv(val))
1106 emitir(IRT(IR_OBAR, IRT_NIL), uref, val); 1106 emitir(IRT(IR_OBAR, IRT_NIL), uref, val);
@@ -1254,7 +1254,7 @@ static void rec_varg(jit_State *J, BCReg dst, ptrdiff_t nresults)
1254 ptrdiff_t idx = lj_ffrecord_select_mode(J, tridx, &J->L->base[dst-1]); 1254 ptrdiff_t idx = lj_ffrecord_select_mode(J, tridx, &J->L->base[dst-1]);
1255 if (idx < 0) goto nyivarg; 1255 if (idx < 0) goto nyivarg;
1256 if (idx != 0 && !tref_isinteger(tridx)) 1256 if (idx != 0 && !tref_isinteger(tridx))
1257 tridx = emitir(IRTGI(IR_TOINT), tridx, IRTOINT_INDEX); 1257 tridx = emitir(IRTGI(IR_CONV), tridx, IRCONV_INT_NUM|IRCONV_INDEX);
1258 if (idx != 0 && tref_isk(tridx)) { 1258 if (idx != 0 && tref_isk(tridx)) {
1259 emitir(IRTGI(idx <= nvararg ? IR_GE : IR_LT), 1259 emitir(IRTGI(idx <= nvararg ? IR_GE : IR_LT),
1260 fr, lj_ir_kint(J, frofs+8*(int32_t)idx)); 1260 fr, lj_ir_kint(J, frofs+8*(int32_t)idx));
@@ -1418,10 +1418,10 @@ void lj_record_ins(jit_State *J)
1418 if (ta != tc) { 1418 if (ta != tc) {
1419 /* Widen mixed number/int comparisons to number/number comparison. */ 1419 /* Widen mixed number/int comparisons to number/number comparison. */
1420 if (ta == IRT_INT && tc == IRT_NUM) { 1420 if (ta == IRT_INT && tc == IRT_NUM) {
1421 ra = emitir(IRTN(IR_TONUM), ra, 0); 1421 ra = emitir(IRTN(IR_CONV), ra, IRCONV_NUM_INT);
1422 ta = IRT_NUM; 1422 ta = IRT_NUM;
1423 } else if (ta == IRT_NUM && tc == IRT_INT) { 1423 } else if (ta == IRT_NUM && tc == IRT_INT) {
1424 rc = emitir(IRTN(IR_TONUM), rc, 0); 1424 rc = emitir(IRTN(IR_CONV), rc, IRCONV_NUM_INT);
1425 } else if (!((ta == IRT_FALSE || ta == IRT_TRUE) && 1425 } else if (!((ta == IRT_FALSE || ta == IRT_TRUE) &&
1426 (tc == IRT_FALSE || tc == IRT_TRUE))) { 1426 (tc == IRT_FALSE || tc == IRT_TRUE))) {
1427 break; /* Interpreter will throw for two different types. */ 1427 break; /* Interpreter will throw for two different types. */