diff options
Diffstat (limited to 'src/lj_asm_ppc.h')
-rw-r--r-- | src/lj_asm_ppc.h | 34 |
1 files changed, 26 insertions, 8 deletions
diff --git a/src/lj_asm_ppc.h b/src/lj_asm_ppc.h index 5d538fc8..142ef212 100644 --- a/src/lj_asm_ppc.h +++ b/src/lj_asm_ppc.h | |||
@@ -773,6 +773,8 @@ static void asm_newref(ASMState *as, IRIns *ir) | |||
773 | { | 773 | { |
774 | const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey]; | 774 | const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey]; |
775 | IRRef args[3]; | 775 | IRRef args[3]; |
776 | if (ir->r == RID_SINK) /* Sink newref. */ | ||
777 | return; | ||
776 | args[0] = ASMREF_L; /* lua_State *L */ | 778 | args[0] = ASMREF_L; /* lua_State *L */ |
777 | args[1] = ir->op1; /* GCtab *t */ | 779 | args[1] = ir->op1; /* GCtab *t */ |
778 | args[2] = ASMREF_TMP1; /* cTValue *key */ | 780 | args[2] = ASMREF_TMP1; /* cTValue *key */ |
@@ -892,12 +894,16 @@ static void asm_fload(ASMState *as, IRIns *ir) | |||
892 | 894 | ||
893 | static void asm_fstore(ASMState *as, IRIns *ir) | 895 | static void asm_fstore(ASMState *as, IRIns *ir) |
894 | { | 896 | { |
895 | Reg src = ra_alloc1(as, ir->op2, RSET_GPR); | 897 | if (ir->r == RID_SINK) { /* Sink store. */ |
896 | IRIns *irf = IR(ir->op1); | 898 | asm_snap_prep(as); |
897 | Reg idx = ra_alloc1(as, irf->op1, rset_exclude(RSET_GPR, src)); | 899 | } else { |
898 | int32_t ofs = field_ofs[irf->op2]; | 900 | Reg src = ra_alloc1(as, ir->op2, RSET_GPR); |
899 | PPCIns pi = asm_fxstoreins(ir); | 901 | IRIns *irf = IR(ir->op1); |
900 | emit_tai(as, pi, src, idx, ofs); | 902 | Reg idx = ra_alloc1(as, irf->op1, rset_exclude(RSET_GPR, src)); |
903 | int32_t ofs = field_ofs[irf->op2]; | ||
904 | PPCIns pi = asm_fxstoreins(ir); | ||
905 | emit_tai(as, pi, src, idx, ofs); | ||
906 | } | ||
901 | } | 907 | } |
902 | 908 | ||
903 | static void asm_xload(ASMState *as, IRIns *ir) | 909 | static void asm_xload(ASMState *as, IRIns *ir) |
@@ -912,6 +918,10 @@ static void asm_xload(ASMState *as, IRIns *ir) | |||
912 | static void asm_xstore(ASMState *as, IRIns *ir, int32_t ofs) | 918 | static void asm_xstore(ASMState *as, IRIns *ir, int32_t ofs) |
913 | { | 919 | { |
914 | IRIns *irb; | 920 | IRIns *irb; |
921 | if (ir->r == RID_SINK) { /* Sink store. */ | ||
922 | asm_snap_prep(as); | ||
923 | return; | ||
924 | } | ||
915 | if (ofs == 0 && mayfuse(as, ir->op2) && (irb = IR(ir->op2))->o == IR_BSWAP && | 925 | if (ofs == 0 && mayfuse(as, ir->op2) && (irb = IR(ir->op2))->o == IR_BSWAP && |
916 | ra_noreg(irb->r) && (irt_isint(ir->t) || irt_isu32(ir->t))) { | 926 | ra_noreg(irb->r) && (irt_isint(ir->t) || irt_isu32(ir->t))) { |
917 | /* Fuse BSWAP with XSTORE to stwbrx. */ | 927 | /* Fuse BSWAP with XSTORE to stwbrx. */ |
@@ -968,6 +978,10 @@ static void asm_ahustore(ASMState *as, IRIns *ir) | |||
968 | RegSet allow = RSET_GPR; | 978 | RegSet allow = RSET_GPR; |
969 | Reg idx, src = RID_NONE, type = RID_NONE; | 979 | Reg idx, src = RID_NONE, type = RID_NONE; |
970 | int32_t ofs = AHUREF_LSX; | 980 | int32_t ofs = AHUREF_LSX; |
981 | if (ir->r == RID_SINK) { /* Sink store. */ | ||
982 | asm_snap_prep(as); | ||
983 | return; | ||
984 | } | ||
971 | if (irt_isnum(ir->t)) { | 985 | if (irt_isnum(ir->t)) { |
972 | src = ra_alloc1(as, ir->op2, RSET_FPR); | 986 | src = ra_alloc1(as, ir->op2, RSET_FPR); |
973 | } else { | 987 | } else { |
@@ -1747,8 +1761,12 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
1747 | return; | 1761 | return; |
1748 | } else if ((ir-1)->o == IR_XSTORE) { | 1762 | } else if ((ir-1)->o == IR_XSTORE) { |
1749 | as->curins--; /* Handle both stores here. */ | 1763 | as->curins--; /* Handle both stores here. */ |
1750 | asm_xstore(as, ir, 0); | 1764 | if ((ir-1)->r == RID_SINK) { |
1751 | asm_xstore(as, ir-1, 4); | 1765 | asm_snap_prep(as); |
1766 | } else { | ||
1767 | asm_xstore(as, ir, 0); | ||
1768 | asm_xstore(as, ir-1, 4); | ||
1769 | } | ||
1752 | return; | 1770 | return; |
1753 | } | 1771 | } |
1754 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ | 1772 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ |