diff options
author | Mike Pall <mike> | 2010-01-30 06:50:39 +0100 |
---|---|---|
committer | Mike Pall <mike> | 2010-01-30 06:50:39 +0100 |
commit | 02e58f5e56b15cae536ba026f364a274b43f0749 (patch) | |
tree | 92a806c9e8922809791259e0b8084fdf3428a04c /src | |
parent | 96e8a56260cd3bd76bc824e43d824140a303a159 (diff) | |
download | luajit-02e58f5e56b15cae536ba026f364a274b43f0749.tar.gz luajit-02e58f5e56b15cae536ba026f364a274b43f0749.tar.bz2 luajit-02e58f5e56b15cae536ba026f364a274b43f0749.zip |
Add support for weak IR references to register allocator.
Spilling a weak ref forces a spill slot, but omits the restore.
Spill slots for snapshot refs override the register, anyway.
Marking snapshot refs weak avoids pointless restores.
Diffstat (limited to 'src')
-rw-r--r-- | src/lj_asm.c | 69 |
1 files changed, 51 insertions, 18 deletions
diff --git a/src/lj_asm.c b/src/lj_asm.c index c7527c15..bb8a6fe7 100644 --- a/src/lj_asm.c +++ b/src/lj_asm.c | |||
@@ -42,6 +42,7 @@ typedef struct ASMState { | |||
42 | 42 | ||
43 | RegSet freeset; /* Set of free registers. */ | 43 | RegSet freeset; /* Set of free registers. */ |
44 | RegSet modset; /* Set of registers modified inside the loop. */ | 44 | RegSet modset; /* Set of registers modified inside the loop. */ |
45 | RegSet weakset; /* Set of weakly referenced registers. */ | ||
45 | RegSet phiset; /* Set of PHI registers. */ | 46 | RegSet phiset; /* Set of PHI registers. */ |
46 | 47 | ||
47 | uint32_t flags; /* Copy of JIT compiler flags. */ | 48 | uint32_t flags; /* Copy of JIT compiler flags. */ |
@@ -565,6 +566,8 @@ static void ra_dprintf(ASMState *as, const char *fmt, ...) | |||
565 | 566 | ||
566 | #define ra_free(as, r) rset_set(as->freeset, (r)) | 567 | #define ra_free(as, r) rset_set(as->freeset, (r)) |
567 | #define ra_modified(as, r) rset_set(as->modset, (r)) | 568 | #define ra_modified(as, r) rset_set(as->modset, (r)) |
569 | #define ra_weak(as, r) rset_set(as->weakset, (r)) | ||
570 | #define ra_noweak(as, r) rset_clear(as->weakset, (r)) | ||
568 | 571 | ||
569 | #define ra_used(ir) (ra_hasreg((ir)->r) || ra_hasspill((ir)->s)) | 572 | #define ra_used(ir) (ra_hasreg((ir)->r) || ra_hasspill((ir)->s)) |
570 | 573 | ||
@@ -574,6 +577,7 @@ static void ra_setup(ASMState *as) | |||
574 | /* Initially all regs (except the stack pointer) are free for use. */ | 577 | /* Initially all regs (except the stack pointer) are free for use. */ |
575 | as->freeset = RSET_ALL; | 578 | as->freeset = RSET_ALL; |
576 | as->modset = RSET_EMPTY; | 579 | as->modset = RSET_EMPTY; |
580 | as->weakset = RSET_EMPTY; | ||
577 | as->phiset = RSET_EMPTY; | 581 | as->phiset = RSET_EMPTY; |
578 | memset(as->phireg, 0, sizeof(as->phireg)); | 582 | memset(as->phireg, 0, sizeof(as->phireg)); |
579 | memset(as->cost, 0, sizeof(as->cost)); | 583 | memset(as->cost, 0, sizeof(as->cost)); |
@@ -647,13 +651,16 @@ static Reg ra_restore(ASMState *as, IRRef ref) | |||
647 | if (irref_isk(ref) || ref == REF_BASE) { | 651 | if (irref_isk(ref) || ref == REF_BASE) { |
648 | return ra_rematk(as, ir); | 652 | return ra_rematk(as, ir); |
649 | } else { | 653 | } else { |
654 | int32_t ofs = ra_spill(as, ir); /* Force a spill slot. */ | ||
650 | Reg r = ir->r; | 655 | Reg r = ir->r; |
651 | lua_assert(ra_hasreg(r)); | 656 | lua_assert(ra_hasreg(r)); |
652 | ra_free(as, r); | ||
653 | ra_modified(as, r); | ||
654 | ra_sethint(ir->r, r); /* Keep hint. */ | 657 | ra_sethint(ir->r, r); /* Keep hint. */ |
655 | RA_DBGX((as, "restore $i $r", ir, r)); | 658 | ra_free(as, r); |
656 | emit_movrmro(as, r, RID_ESP, ra_spill(as, ir)); /* Force a spill. */ | 659 | if (!rset_test(as->weakset, r)) { /* Only restore non-weak references. */ |
660 | ra_modified(as, r); | ||
661 | RA_DBGX((as, "restore $i $r", ir, r)); | ||
662 | emit_movrmro(as, r, RID_ESP, ofs); | ||
663 | } | ||
657 | return r; | 664 | return r; |
658 | } | 665 | } |
659 | } | 666 | } |
@@ -673,7 +680,9 @@ static LJ_AINLINE void ra_save(ASMState *as, IRIns *ir, Reg r) | |||
673 | /* Evict the register with the lowest cost, forcing a restore. */ | 680 | /* Evict the register with the lowest cost, forcing a restore. */ |
674 | static Reg ra_evict(ASMState *as, RegSet allow) | 681 | static Reg ra_evict(ASMState *as, RegSet allow) |
675 | { | 682 | { |
683 | IRRef ref; | ||
676 | RegCost cost = ~(RegCost)0; | 684 | RegCost cost = ~(RegCost)0; |
685 | lua_assert(allow != RSET_EMPTY); | ||
677 | if (allow < RID2RSET(RID_MAX_GPR)) { | 686 | if (allow < RID2RSET(RID_MAX_GPR)) { |
678 | MINCOST(RID_EAX);MINCOST(RID_ECX);MINCOST(RID_EDX);MINCOST(RID_EBX); | 687 | MINCOST(RID_EAX);MINCOST(RID_ECX);MINCOST(RID_EDX);MINCOST(RID_EBX); |
679 | MINCOST(RID_EBP);MINCOST(RID_ESI);MINCOST(RID_EDI); | 688 | MINCOST(RID_EBP);MINCOST(RID_ESI);MINCOST(RID_EDI); |
@@ -689,9 +698,15 @@ static Reg ra_evict(ASMState *as, RegSet allow) | |||
689 | MINCOST(RID_XMM12);MINCOST(RID_XMM13);MINCOST(RID_XMM14);MINCOST(RID_XMM15); | 698 | MINCOST(RID_XMM12);MINCOST(RID_XMM13);MINCOST(RID_XMM14);MINCOST(RID_XMM15); |
690 | #endif | 699 | #endif |
691 | } | 700 | } |
692 | lua_assert(allow != RSET_EMPTY); | 701 | ref = regcost_ref(cost); |
693 | lua_assert(regcost_ref(cost) >= as->T->nk && regcost_ref(cost) < as->T->nins); | 702 | lua_assert(ref >= as->T->nk && ref < as->T->nins); |
694 | return ra_restore(as, regcost_ref(cost)); | 703 | /* Preferably pick any weak ref instead of a non-weak, non-const ref. */ |
704 | if (!irref_isk(ref) && (as->weakset & allow)) { | ||
705 | IRIns *ir = IR(ref); | ||
706 | if (!rset_test(as->weakset, ir->r)) | ||
707 | ref = regcost_ref(as->cost[rset_pickbot((as->weakset & allow))]); | ||
708 | } | ||
709 | return ra_restore(as, ref); | ||
695 | } | 710 | } |
696 | 711 | ||
697 | /* Pick any register (marked as free). Evict on-demand. */ | 712 | /* Pick any register (marked as free). Evict on-demand. */ |
@@ -764,6 +779,7 @@ found: | |||
764 | RA_DBGX((as, "alloc $f $r", ref, r)); | 779 | RA_DBGX((as, "alloc $f $r", ref, r)); |
765 | ir->r = (uint8_t)r; | 780 | ir->r = (uint8_t)r; |
766 | rset_clear(as->freeset, r); | 781 | rset_clear(as->freeset, r); |
782 | ra_noweak(as, r); | ||
767 | as->cost[r] = REGCOST_REF_T(ref, irt_t(ir->t)); | 783 | as->cost[r] = REGCOST_REF_T(ref, irt_t(ir->t)); |
768 | return r; | 784 | return r; |
769 | } | 785 | } |
@@ -774,6 +790,7 @@ static LJ_INLINE Reg ra_alloc1(ASMState *as, IRRef ref, RegSet allow) | |||
774 | Reg r = IR(ref)->r; | 790 | Reg r = IR(ref)->r; |
775 | /* Note: allow is ignored if the register is already allocated. */ | 791 | /* Note: allow is ignored if the register is already allocated. */ |
776 | if (ra_noreg(r)) r = ra_allocref(as, ref, allow); | 792 | if (ra_noreg(r)) r = ra_allocref(as, ref, allow); |
793 | ra_noweak(as, r); | ||
777 | return r; | 794 | return r; |
778 | } | 795 | } |
779 | 796 | ||
@@ -787,6 +804,7 @@ static void ra_rename(ASMState *as, Reg down, Reg up) | |||
787 | lua_assert(!rset_test(as->freeset, down) && rset_test(as->freeset, up)); | 804 | lua_assert(!rset_test(as->freeset, down) && rset_test(as->freeset, up)); |
788 | rset_set(as->freeset, down); /* 'down' is free ... */ | 805 | rset_set(as->freeset, down); /* 'down' is free ... */ |
789 | rset_clear(as->freeset, up); /* ... and 'up' is now allocated. */ | 806 | rset_clear(as->freeset, up); /* ... and 'up' is now allocated. */ |
807 | ra_noweak(as, up); | ||
790 | RA_DBGX((as, "rename $f $r $r", regcost_ref(as->cost[up]), down, up)); | 808 | RA_DBGX((as, "rename $f $r $r", regcost_ref(as->cost[up]), down, up)); |
791 | emit_movrr(as, down, up); /* Backwards code generation needs inverse move. */ | 809 | emit_movrr(as, down, up); /* Backwards code generation needs inverse move. */ |
792 | if (!ra_hasspill(IR(ref)->s)) { /* Add the rename to the IR. */ | 810 | if (!ra_hasspill(IR(ref)->s)) { /* Add the rename to the IR. */ |
@@ -852,6 +870,7 @@ static void ra_left(ASMState *as, Reg dest, IRRef lref) | |||
852 | ra_sethint(ir->r, dest); /* Propagate register hint. */ | 870 | ra_sethint(ir->r, dest); /* Propagate register hint. */ |
853 | left = ra_allocref(as, lref, dest < RID_MAX_GPR ? RSET_GPR : RSET_FPR); | 871 | left = ra_allocref(as, lref, dest < RID_MAX_GPR ? RSET_GPR : RSET_FPR); |
854 | } | 872 | } |
873 | ra_noweak(as, left); | ||
855 | /* Move needed for true 3-operand instruction: y=a+b ==> y=a; y+=b. */ | 874 | /* Move needed for true 3-operand instruction: y=a+b ==> y=a; y+=b. */ |
856 | if (dest != left) { | 875 | if (dest != left) { |
857 | /* Use register renaming if dest is the PHI reg. */ | 876 | /* Use register renaming if dest is the PHI reg. */ |
@@ -933,11 +952,12 @@ static void asm_snap_alloc(ASMState *as) | |||
933 | IRIns *ir = IR(ref); | 952 | IRIns *ir = IR(ref); |
934 | if (!ra_used(ir)) { | 953 | if (!ra_used(ir)) { |
935 | RegSet allow = irt_isnum(ir->t) ? RSET_FPR : RSET_GPR; | 954 | RegSet allow = irt_isnum(ir->t) ? RSET_FPR : RSET_GPR; |
936 | /* Not a var-to-invar ref and got a free register (or a remat)? */ | 955 | /* Get a weak register if we have a free one or can rematerialize. */ |
937 | if ((!iscrossref(as, ref) || irt_isphi(ir->t)) && | 956 | if ((as->freeset & allow) || |
938 | ((as->freeset & allow) || | 957 | (allow == RSET_FPR && asm_snap_canremat(as))) { |
939 | (allow == RSET_FPR && asm_snap_canremat(as)))) { | 958 | Reg r = ra_allocref(as, ref, allow); /* Allocate a register. */ |
940 | ra_allocref(as, ref, allow); /* Allocate a register. */ | 959 | if (!irt_isphi(ir->t)) |
960 | ra_weak(as, r); /* But mark it as weakly referenced. */ | ||
941 | checkmclim(as); | 961 | checkmclim(as); |
942 | RA_DBGX((as, "snapreg $f $r", ref, ir->r)); | 962 | RA_DBGX((as, "snapreg $f $r", ref, ir->r)); |
943 | } else { | 963 | } else { |
@@ -1185,7 +1205,10 @@ static Reg asm_fuseload(ASMState *as, IRRef ref, RegSet allow) | |||
1185 | { | 1205 | { |
1186 | IRIns *ir = IR(ref); | 1206 | IRIns *ir = IR(ref); |
1187 | if (ra_hasreg(ir->r)) { | 1207 | if (ra_hasreg(ir->r)) { |
1188 | if (allow != RSET_EMPTY) return ir->r; /* Fast path. */ | 1208 | if (allow != RSET_EMPTY) { /* Fast path. */ |
1209 | ra_noweak(as, ir->r); | ||
1210 | return ir->r; | ||
1211 | } | ||
1189 | fusespill: | 1212 | fusespill: |
1190 | /* Force a spill if only memory operands are allowed (asm_x87load). */ | 1213 | /* Force a spill if only memory operands are allowed (asm_x87load). */ |
1191 | as->mrm.base = RID_ESP; | 1214 | as->mrm.base = RID_ESP; |
@@ -1275,10 +1298,12 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args) | |||
1275 | } else { | 1298 | } else { |
1276 | lua_assert(rset_test(as->freeset, r)); /* Must have been evicted. */ | 1299 | lua_assert(rset_test(as->freeset, r)); /* Must have been evicted. */ |
1277 | allow &= ~RID2RSET(r); | 1300 | allow &= ~RID2RSET(r); |
1278 | if (ra_hasreg(ir->r)) | 1301 | if (ra_hasreg(ir->r)) { |
1302 | ra_noweak(as, ir->r); | ||
1279 | emit_movrr(as, r, ir->r); | 1303 | emit_movrr(as, r, ir->r); |
1280 | else | 1304 | } else { |
1281 | ra_allocref(as, args[n], RID2RSET(r)); | 1305 | ra_allocref(as, args[n], RID2RSET(r)); |
1306 | } | ||
1282 | } | 1307 | } |
1283 | } else { | 1308 | } else { |
1284 | if (args[n] < ASMREF_TMP1) { | 1309 | if (args[n] < ASMREF_TMP1) { |
@@ -2151,8 +2176,10 @@ static void asm_fparith(ASMState *as, IRIns *ir, x86Op xo) | |||
2151 | RegSet allow = RSET_FPR; | 2176 | RegSet allow = RSET_FPR; |
2152 | Reg dest; | 2177 | Reg dest; |
2153 | Reg right = IR(rref)->r; | 2178 | Reg right = IR(rref)->r; |
2154 | if (ra_hasreg(right)) | 2179 | if (ra_hasreg(right)) { |
2155 | rset_clear(allow, right); | 2180 | rset_clear(allow, right); |
2181 | ra_noweak(as, right); | ||
2182 | } | ||
2156 | dest = ra_dest(as, ir, allow); | 2183 | dest = ra_dest(as, ir, allow); |
2157 | if (lref == rref) { | 2184 | if (lref == rref) { |
2158 | right = dest; | 2185 | right = dest; |
@@ -2177,8 +2204,10 @@ static void asm_intarith(ASMState *as, IRIns *ir, x86Arith xa) | |||
2177 | as->mcp += (LJ_64 && *as->mcp != XI_TEST) ? 3 : 2; | 2204 | as->mcp += (LJ_64 && *as->mcp != XI_TEST) ? 3 : 2; |
2178 | } | 2205 | } |
2179 | right = IR(rref)->r; | 2206 | right = IR(rref)->r; |
2180 | if (ra_hasreg(right)) | 2207 | if (ra_hasreg(right)) { |
2181 | rset_clear(allow, right); | 2208 | rset_clear(allow, right); |
2209 | ra_noweak(as, right); | ||
2210 | } | ||
2182 | dest = ra_dest(as, ir, allow); | 2211 | dest = ra_dest(as, ir, allow); |
2183 | if (lref == rref) { | 2212 | if (lref == rref) { |
2184 | right = dest; | 2213 | right = dest; |
@@ -2225,6 +2254,7 @@ static int asm_lea(ASMState *as, IRIns *ir) | |||
2225 | as->mrm.ofs = 0; | 2254 | as->mrm.ofs = 0; |
2226 | if (ra_hasreg(irl->r)) { | 2255 | if (ra_hasreg(irl->r)) { |
2227 | rset_clear(allow, irl->r); | 2256 | rset_clear(allow, irl->r); |
2257 | ra_noweak(as, irl->r); | ||
2228 | as->mrm.base = irl->r; | 2258 | as->mrm.base = irl->r; |
2229 | if (irref_isk(ir->op2) || ra_hasreg(irr->r)) { | 2259 | if (irref_isk(ir->op2) || ra_hasreg(irr->r)) { |
2230 | /* The PHI renaming logic does a better job in some cases. */ | 2260 | /* The PHI renaming logic does a better job in some cases. */ |
@@ -2236,6 +2266,7 @@ static int asm_lea(ASMState *as, IRIns *ir) | |||
2236 | as->mrm.ofs = irr->i; | 2266 | as->mrm.ofs = irr->i; |
2237 | } else { | 2267 | } else { |
2238 | rset_clear(allow, irr->r); | 2268 | rset_clear(allow, irr->r); |
2269 | ra_noweak(as, irr->r); | ||
2239 | as->mrm.idx = irr->r; | 2270 | as->mrm.idx = irr->r; |
2240 | } | 2271 | } |
2241 | } else if (irr->o == IR_ADD && mayfuse(as, ir->op2) && | 2272 | } else if (irr->o == IR_ADD && mayfuse(as, ir->op2) && |
@@ -2322,8 +2353,10 @@ static void asm_bitshift(ASMState *as, IRIns *ir, x86Shift xs) | |||
2322 | } | 2353 | } |
2323 | dest = ra_dest(as, ir, allow); | 2354 | dest = ra_dest(as, ir, allow); |
2324 | emit_rr(as, XO_SHIFTcl, (Reg)xs, dest); | 2355 | emit_rr(as, XO_SHIFTcl, (Reg)xs, dest); |
2325 | if (right != RID_ECX) | 2356 | if (right != RID_ECX) { |
2357 | ra_noweak(as, right); | ||
2326 | emit_rr(as, XO_MOV, RID_ECX, right); | 2358 | emit_rr(as, XO_MOV, RID_ECX, right); |
2359 | } | ||
2327 | } | 2360 | } |
2328 | ra_left(as, dest, ir->op1); | 2361 | ra_left(as, dest, ir->op1); |
2329 | /* | 2362 | /* |