aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMike Pall <mike>2010-04-19 23:41:48 +0200
committerMike Pall <mike>2010-04-19 23:41:48 +0200
commitd8cb69ed076c3444258f63314662451c9d117cae (patch)
tree1557535f38b1b8aba610690672c3d797fcbc193f /src
parent932cda0fe3cbd34e60aa68479935c946f69b756f (diff)
downloadluajit-d8cb69ed076c3444258f63314662451c9d117cae.tar.gz
luajit-d8cb69ed076c3444258f63314662451c9d117cae.tar.bz2
luajit-d8cb69ed076c3444258f63314662451c9d117cae.zip
Simplify GC step calls from on-trace code.
Diffstat (limited to 'src')
-rw-r--r--src/lj_asm.c67
-rw-r--r--src/lj_gc.c4
-rw-r--r--src/lj_gc.h2
3 files changed, 32 insertions, 41 deletions
diff --git a/src/lj_asm.c b/src/lj_asm.c
index 73416768..b57cf118 100644
--- a/src/lj_asm.c
+++ b/src/lj_asm.c
@@ -1530,9 +1530,9 @@ static void asm_strto(ASMState *as, IRIns *ir)
1530 rset_set(drop, ir->r); /* WIN64 doesn't spill all FPRs. */ 1530 rset_set(drop, ir->r); /* WIN64 doesn't spill all FPRs. */
1531 ra_evictset(as, drop); 1531 ra_evictset(as, drop);
1532 asm_guardcc(as, CC_E); 1532 asm_guardcc(as, CC_E);
1533 emit_rr(as, XO_TEST, RID_RET, RID_RET); 1533 emit_rr(as, XO_TEST, RID_RET, RID_RET); /* Test return status. */
1534 args[0] = ir->op1; 1534 args[0] = ir->op1; /* GCstr *str */
1535 args[1] = ASMREF_TMP1; 1535 args[1] = ASMREF_TMP1; /* TValue *n */
1536 asm_gencall(as, ci, args); 1536 asm_gencall(as, ci, args);
1537 /* Store the result to the spill slot or temp slots. */ 1537 /* Store the result to the spill slot or temp slots. */
1538 emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64, 1538 emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64,
@@ -1547,15 +1547,15 @@ static void asm_tostr(ASMState *as, IRIns *ir)
1547 as->gcsteps++; 1547 as->gcsteps++;
1548 if (irt_isnum(irl->t)) { 1548 if (irt_isnum(irl->t)) {
1549 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum]; 1549 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
1550 args[1] = ASMREF_TMP1; 1550 args[1] = ASMREF_TMP1; /* const lua_Number * */
1551 asm_setupresult(as, ir, ci); 1551 asm_setupresult(as, ir, ci); /* GCstr * */
1552 asm_gencall(as, ci, args); 1552 asm_gencall(as, ci, args);
1553 emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64, 1553 emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64,
1554 RID_ESP, ra_spill(as, irl)); 1554 RID_ESP, ra_spill(as, irl));
1555 } else { 1555 } else {
1556 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint]; 1556 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
1557 args[1] = ir->op1; 1557 args[1] = ir->op1; /* int32_t k */
1558 asm_setupresult(as, ir, ci); 1558 asm_setupresult(as, ir, ci); /* GCstr * */
1559 asm_gencall(as, ci, args); 1559 asm_gencall(as, ci, args);
1560 } 1560 }
1561} 1561}
@@ -1812,10 +1812,10 @@ static void asm_newref(ASMState *as, IRIns *ir)
1812 IRRef args[3]; 1812 IRRef args[3];
1813 IRIns *irkey; 1813 IRIns *irkey;
1814 Reg tmp; 1814 Reg tmp;
1815 args[0] = ASMREF_L; 1815 args[0] = ASMREF_L; /* lua_State *L */
1816 args[1] = ir->op1; 1816 args[1] = ir->op1; /* GCtab *t */
1817 args[2] = ASMREF_TMP1; 1817 args[2] = ASMREF_TMP1; /* cTValue *key */
1818 asm_setupresult(as, ir, ci); 1818 asm_setupresult(as, ir, ci); /* TValue * */
1819 asm_gencall(as, ci, args); 1819 asm_gencall(as, ci, args);
1820 tmp = ra_releasetmp(as, ASMREF_TMP1); 1820 tmp = ra_releasetmp(as, ASMREF_TMP1);
1821 irkey = IR(ir->op2); 1821 irkey = IR(ir->op2);
@@ -2086,11 +2086,11 @@ static void asm_snew(ASMState *as, IRIns *ir)
2086{ 2086{
2087 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_new]; 2087 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_new];
2088 IRRef args[3]; 2088 IRRef args[3];
2089 args[0] = ASMREF_L; 2089 args[0] = ASMREF_L; /* lua_State *L */
2090 args[1] = ir->op1; 2090 args[1] = ir->op1; /* const char *str */
2091 args[2] = ir->op2; 2091 args[2] = ir->op2; /* size_t len */
2092 as->gcsteps++; 2092 as->gcsteps++;
2093 asm_setupresult(as, ir, ci); 2093 asm_setupresult(as, ir, ci); /* GCstr * */
2094 asm_gencall(as, ci, args); 2094 asm_gencall(as, ci, args);
2095} 2095}
2096 2096
@@ -2098,10 +2098,10 @@ static void asm_tnew(ASMState *as, IRIns *ir)
2098{ 2098{
2099 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_new1]; 2099 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_new1];
2100 IRRef args[2]; 2100 IRRef args[2];
2101 args[0] = ASMREF_L; 2101 args[0] = ASMREF_L; /* lua_State *L */
2102 args[1] = ASMREF_TMP1; 2102 args[1] = ASMREF_TMP1; /* uint32_t ahsize */
2103 as->gcsteps++; 2103 as->gcsteps++;
2104 asm_setupresult(as, ir, ci); 2104 asm_setupresult(as, ir, ci); /* GCtab * */
2105 asm_gencall(as, ci, args); 2105 asm_gencall(as, ci, args);
2106 emit_loadi(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1 | (ir->op2 << 24)); 2106 emit_loadi(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1 | (ir->op2 << 24));
2107} 2107}
@@ -2110,10 +2110,10 @@ static void asm_tdup(ASMState *as, IRIns *ir)
2110{ 2110{
2111 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_dup]; 2111 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_dup];
2112 IRRef args[2]; 2112 IRRef args[2];
2113 args[0] = ASMREF_L; 2113 args[0] = ASMREF_L; /* lua_State *L */
2114 args[1] = ir->op1; 2114 args[1] = ir->op1; /* const GCtab *kt */
2115 as->gcsteps++; 2115 as->gcsteps++;
2116 asm_setupresult(as, ir, ci); 2116 asm_setupresult(as, ir, ci); /* GCtab * */
2117 asm_gencall(as, ci, args); 2117 asm_gencall(as, ci, args);
2118} 2118}
2119 2119
@@ -2144,8 +2144,8 @@ static void asm_obar(ASMState *as, IRIns *ir)
2144 lua_assert(IR(ir->op1)->o == IR_UREFC); 2144 lua_assert(IR(ir->op1)->o == IR_UREFC);
2145 ra_evictset(as, RSET_SCRATCH); 2145 ra_evictset(as, RSET_SCRATCH);
2146 l_end = emit_label(as); 2146 l_end = emit_label(as);
2147 args[0] = ASMREF_TMP1; 2147 args[0] = ASMREF_TMP1; /* global_State *g */
2148 args[1] = ir->op1; 2148 args[1] = ir->op1; /* TValue *tv */
2149 asm_gencall(as, ci, args); 2149 asm_gencall(as, ci, args);
2150 emit_loada(as, ra_releasetmp(as, ASMREF_TMP1), J2G(as->J)); 2150 emit_loada(as, ra_releasetmp(as, ASMREF_TMP1), J2G(as->J));
2151 obj = IR(ir->op1)->r; 2151 obj = IR(ir->op1)->r;
@@ -2758,30 +2758,19 @@ static void asm_gc_check(ASMState *as)
2758 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_gc_step_jit]; 2758 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_gc_step_jit];
2759 IRRef args[2]; 2759 IRRef args[2];
2760 MCLabel l_end; 2760 MCLabel l_end;
2761 Reg base, lstate, tmp; 2761 Reg tmp;
2762 ra_evictset(as, RSET_SCRATCH); 2762 ra_evictset(as, RSET_SCRATCH);
2763 l_end = emit_label(as); 2763 l_end = emit_label(as);
2764 /* Exit trace if in GCSatomic or GCSfinalize. Avoids syncing GC objects. */ 2764 /* Exit trace if in GCSatomic or GCSfinalize. Avoids syncing GC objects. */
2765 asm_guardcc(as, CC_NE); /* Assumes asm_snap_prep() already done. */ 2765 asm_guardcc(as, CC_NE); /* Assumes asm_snap_prep() already done. */
2766 emit_rr(as, XO_TEST, RID_RET, RID_RET); 2766 emit_rr(as, XO_TEST, RID_RET, RID_RET);
2767 args[0] = ASMREF_L; 2767 args[0] = ASMREF_TMP1; /* global_State *g */
2768 args[1] = ASMREF_TMP1; 2768 args[1] = ASMREF_TMP2; /* MSize steps */
2769 asm_gencall(as, ci, args); 2769 asm_gencall(as, ci, args);
2770 tmp = ra_releasetmp(as, ASMREF_TMP1); 2770 tmp = ra_releasetmp(as, ASMREF_TMP1);
2771 emit_loadi(as, tmp, (int32_t)as->gcsteps); 2771 emit_loada(as, tmp, J2G(as->J));
2772 /* It's ok if lstate is already in a non-scratch reg. But all allocations 2772 emit_loadi(as, ra_releasetmp(as, ASMREF_TMP2), (int32_t)as->gcsteps);
2773 ** in the non-fast path must use a scratch reg (avoids unification).
2774 */
2775 lstate = IR(ASMREF_L)->r;
2776 base = ra_alloc1(as, REF_BASE, rset_exclude(RSET_SCRATCH & RSET_GPR, lstate));
2777 emit_movtomro(as, base|REX_64, lstate, offsetof(lua_State, base));
2778 /* BASE/L get restored anyway, better do it inside the slow path. */
2779 if (rset_test(RSET_SCRATCH, base) && (as->parent || as->snapno != 0))
2780 ra_restore(as, REF_BASE);
2781 if (rset_test(RSET_SCRATCH, lstate) && ra_hasreg(IR(ASMREF_L)->r))
2782 ra_restore(as, ASMREF_L);
2783 /* Jump around GC step if GC total < GC threshold. */ 2773 /* Jump around GC step if GC total < GC threshold. */
2784 tmp = ra_scratch(as, RSET_SCRATCH & RSET_GPR);
2785 emit_sjcc(as, CC_B, l_end); 2774 emit_sjcc(as, CC_B, l_end);
2786 emit_opgl(as, XO_ARITH(XOg_CMP), tmp, gc.threshold); 2775 emit_opgl(as, XO_ARITH(XOg_CMP), tmp, gc.threshold);
2787 emit_getgl(as, tmp, gc.total); 2776 emit_getgl(as, tmp, gc.total);
diff --git a/src/lj_gc.c b/src/lj_gc.c
index 18c07533..b97fb955 100644
--- a/src/lj_gc.c
+++ b/src/lj_gc.c
@@ -631,8 +631,10 @@ void LJ_FASTCALL lj_gc_step_fixtop(lua_State *L)
631 631
632#if LJ_HASJIT 632#if LJ_HASJIT
633/* Perform multiple GC steps. Called from JIT-compiled code. */ 633/* Perform multiple GC steps. Called from JIT-compiled code. */
634int LJ_FASTCALL lj_gc_step_jit(lua_State *L, MSize steps) 634int LJ_FASTCALL lj_gc_step_jit(global_State *g, MSize steps)
635{ 635{
636 lua_State *L = gco2th(gcref(g->jit_L));
637 L->base = mref(G(L)->jit_base, TValue);
636 L->top = curr_topL(L); 638 L->top = curr_topL(L);
637 while (steps-- > 0 && lj_gc_step(L) == 0) 639 while (steps-- > 0 && lj_gc_step(L) == 0)
638 ; 640 ;
diff --git a/src/lj_gc.h b/src/lj_gc.h
index 4832eca8..7279b93c 100644
--- a/src/lj_gc.h
+++ b/src/lj_gc.h
@@ -47,7 +47,7 @@ LJ_FUNC void lj_gc_freeall(global_State *g);
47LJ_FUNCA int LJ_FASTCALL lj_gc_step(lua_State *L); 47LJ_FUNCA int LJ_FASTCALL lj_gc_step(lua_State *L);
48LJ_FUNCA void LJ_FASTCALL lj_gc_step_fixtop(lua_State *L); 48LJ_FUNCA void LJ_FASTCALL lj_gc_step_fixtop(lua_State *L);
49#if LJ_HASJIT 49#if LJ_HASJIT
50LJ_FUNC int LJ_FASTCALL lj_gc_step_jit(lua_State *L, MSize steps); 50LJ_FUNC int LJ_FASTCALL lj_gc_step_jit(global_State *g, MSize steps);
51#endif 51#endif
52LJ_FUNC void lj_gc_fullgc(lua_State *L); 52LJ_FUNC void lj_gc_fullgc(lua_State *L);
53 53