diff options
| author | Mike Pall <mike> | 2021-09-19 17:47:11 +0200 |
|---|---|---|
| committer | Mike Pall <mike> | 2021-09-19 17:47:11 +0200 |
| commit | 986bb406ad6af93eebd781860c384cc853103827 (patch) | |
| tree | 8aadc7e59e7926dfdd527a74205e3f9c665072d3 /src | |
| parent | c6f5ef649b645db9cf3d11d1b5c63602c49c6411 (diff) | |
| download | luajit-986bb406ad6af93eebd781860c384cc853103827.tar.gz luajit-986bb406ad6af93eebd781860c384cc853103827.tar.bz2 luajit-986bb406ad6af93eebd781860c384cc853103827.zip | |
Use IR_HIOP for generalized two-register returns.
Sponsored by OpenResty Inc.
Diffstat (limited to 'src')
| -rw-r--r-- | src/lj_asm.c | 31 | ||||
| -rw-r--r-- | src/lj_asm_arm.h | 27 | ||||
| -rw-r--r-- | src/lj_asm_arm64.h | 25 | ||||
| -rw-r--r-- | src/lj_asm_mips.h | 37 | ||||
| -rw-r--r-- | src/lj_asm_ppc.h | 29 | ||||
| -rw-r--r-- | src/lj_asm_x86.h | 26 | ||||
| -rw-r--r-- | src/lj_target_arm64.h | 2 | ||||
| -rw-r--r-- | src/lj_target_x86.h | 3 |
8 files changed, 84 insertions, 96 deletions
diff --git a/src/lj_asm.c b/src/lj_asm.c index 1ecbe45e..d377eb4d 100644 --- a/src/lj_asm.c +++ b/src/lj_asm.c | |||
| @@ -818,11 +818,11 @@ static void ra_leftov(ASMState *as, Reg dest, IRRef lref) | |||
| 818 | } | 818 | } |
| 819 | #endif | 819 | #endif |
| 820 | 820 | ||
| 821 | #if !LJ_64 | ||
| 822 | /* Force a RID_RETLO/RID_RETHI destination register pair (marked as free). */ | 821 | /* Force a RID_RETLO/RID_RETHI destination register pair (marked as free). */ |
| 823 | static void ra_destpair(ASMState *as, IRIns *ir) | 822 | static void ra_destpair(ASMState *as, IRIns *ir) |
| 824 | { | 823 | { |
| 825 | Reg destlo = ir->r, desthi = (ir+1)->r; | 824 | Reg destlo = ir->r, desthi = (ir+1)->r; |
| 825 | IRIns *irx = (LJ_64 && !irt_is64(ir->t)) ? ir+1 : ir; | ||
| 826 | /* First spill unrelated refs blocking the destination registers. */ | 826 | /* First spill unrelated refs blocking the destination registers. */ |
| 827 | if (!rset_test(as->freeset, RID_RETLO) && | 827 | if (!rset_test(as->freeset, RID_RETLO) && |
| 828 | destlo != RID_RETLO && desthi != RID_RETLO) | 828 | destlo != RID_RETLO && desthi != RID_RETLO) |
| @@ -846,29 +846,28 @@ static void ra_destpair(ASMState *as, IRIns *ir) | |||
| 846 | /* Check for conflicts and shuffle the registers as needed. */ | 846 | /* Check for conflicts and shuffle the registers as needed. */ |
| 847 | if (destlo == RID_RETHI) { | 847 | if (destlo == RID_RETHI) { |
| 848 | if (desthi == RID_RETLO) { | 848 | if (desthi == RID_RETLO) { |
| 849 | #if LJ_TARGET_X86 | 849 | #if LJ_TARGET_X86ORX64 |
| 850 | *--as->mcp = XI_XCHGa + RID_RETHI; | 850 | *--as->mcp = REX_64IR(irx, XI_XCHGa + RID_RETHI); |
| 851 | #else | 851 | #else |
| 852 | emit_movrr(as, ir, RID_RETHI, RID_TMP); | 852 | emit_movrr(as, irx, RID_RETHI, RID_TMP); |
| 853 | emit_movrr(as, ir, RID_RETLO, RID_RETHI); | 853 | emit_movrr(as, irx, RID_RETLO, RID_RETHI); |
| 854 | emit_movrr(as, ir, RID_TMP, RID_RETLO); | 854 | emit_movrr(as, irx, RID_TMP, RID_RETLO); |
| 855 | #endif | 855 | #endif |
| 856 | } else { | 856 | } else { |
| 857 | emit_movrr(as, ir, RID_RETHI, RID_RETLO); | 857 | emit_movrr(as, irx, RID_RETHI, RID_RETLO); |
| 858 | if (desthi != RID_RETHI) emit_movrr(as, ir, desthi, RID_RETHI); | 858 | if (desthi != RID_RETHI) emit_movrr(as, irx, desthi, RID_RETHI); |
| 859 | } | 859 | } |
| 860 | } else if (desthi == RID_RETLO) { | 860 | } else if (desthi == RID_RETLO) { |
| 861 | emit_movrr(as, ir, RID_RETLO, RID_RETHI); | 861 | emit_movrr(as, irx, RID_RETLO, RID_RETHI); |
| 862 | if (destlo != RID_RETLO) emit_movrr(as, ir, destlo, RID_RETLO); | 862 | if (destlo != RID_RETLO) emit_movrr(as, irx, destlo, RID_RETLO); |
| 863 | } else { | 863 | } else { |
| 864 | if (desthi != RID_RETHI) emit_movrr(as, ir, desthi, RID_RETHI); | 864 | if (desthi != RID_RETHI) emit_movrr(as, irx, desthi, RID_RETHI); |
| 865 | if (destlo != RID_RETLO) emit_movrr(as, ir, destlo, RID_RETLO); | 865 | if (destlo != RID_RETLO) emit_movrr(as, irx, destlo, RID_RETLO); |
| 866 | } | 866 | } |
| 867 | /* Restore spill slots (if any). */ | 867 | /* Restore spill slots (if any). */ |
| 868 | if (ra_hasspill((ir+1)->s)) ra_save(as, ir+1, RID_RETHI); | 868 | if (ra_hasspill((ir+1)->s)) ra_save(as, ir+1, RID_RETHI); |
| 869 | if (ra_hasspill(ir->s)) ra_save(as, ir, RID_RETLO); | 869 | if (ra_hasspill(ir->s)) ra_save(as, ir, RID_RETLO); |
| 870 | } | 870 | } |
| 871 | #endif | ||
| 872 | 871 | ||
| 873 | /* -- Snapshot handling --------- ----------------------------------------- */ | 872 | /* -- Snapshot handling --------- ----------------------------------------- */ |
| 874 | 873 | ||
| @@ -2234,7 +2233,6 @@ static void asm_setup_regsp(ASMState *as) | |||
| 2234 | (RSET_SCRATCH & ~RSET_FPR) : RSET_SCRATCH; | 2233 | (RSET_SCRATCH & ~RSET_FPR) : RSET_SCRATCH; |
| 2235 | continue; | 2234 | continue; |
| 2236 | } | 2235 | } |
| 2237 | #if LJ_SOFTFP || (LJ_32 && LJ_HASFFI) | ||
| 2238 | case IR_HIOP: | 2236 | case IR_HIOP: |
| 2239 | switch ((ir-1)->o) { | 2237 | switch ((ir-1)->o) { |
| 2240 | #if LJ_SOFTFP && LJ_TARGET_ARM | 2238 | #if LJ_SOFTFP && LJ_TARGET_ARM |
| @@ -2245,7 +2243,7 @@ static void asm_setup_regsp(ASMState *as) | |||
| 2245 | } | 2243 | } |
| 2246 | break; | 2244 | break; |
| 2247 | #endif | 2245 | #endif |
| 2248 | #if !LJ_SOFTFP && LJ_NEED_FP64 | 2246 | #if !LJ_SOFTFP && LJ_NEED_FP64 && LJ_32 && LJ_HASFFI |
| 2249 | case IR_CONV: | 2247 | case IR_CONV: |
| 2250 | if (irt_isfp((ir-1)->t)) { | 2248 | if (irt_isfp((ir-1)->t)) { |
| 2251 | ir->prev = REGSP_HINT(RID_FPRET); | 2249 | ir->prev = REGSP_HINT(RID_FPRET); |
| @@ -2253,7 +2251,7 @@ static void asm_setup_regsp(ASMState *as) | |||
| 2253 | } | 2251 | } |
| 2254 | #endif | 2252 | #endif |
| 2255 | /* fallthrough */ | 2253 | /* fallthrough */ |
| 2256 | case IR_CALLN: case IR_CALLXS: | 2254 | case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS: |
| 2257 | #if LJ_SOFTFP | 2255 | #if LJ_SOFTFP |
| 2258 | case IR_MIN: case IR_MAX: | 2256 | case IR_MIN: case IR_MAX: |
| 2259 | #endif | 2257 | #endif |
| @@ -2264,7 +2262,6 @@ static void asm_setup_regsp(ASMState *as) | |||
| 2264 | break; | 2262 | break; |
| 2265 | } | 2263 | } |
| 2266 | break; | 2264 | break; |
| 2267 | #endif | ||
| 2268 | #if LJ_SOFTFP | 2265 | #if LJ_SOFTFP |
| 2269 | case IR_MIN: case IR_MAX: | 2266 | case IR_MIN: case IR_MAX: |
| 2270 | if ((ir+1)->o != IR_HIOP) break; | 2267 | if ((ir+1)->o != IR_HIOP) break; |
diff --git a/src/lj_asm_arm.h b/src/lj_asm_arm.h index 3adb534b..e53f9b08 100644 --- a/src/lj_asm_arm.h +++ b/src/lj_asm_arm.h | |||
| @@ -1885,15 +1885,15 @@ static void asm_int64comp(ASMState *as, IRIns *ir) | |||
| 1885 | } | 1885 | } |
| 1886 | #endif | 1886 | #endif |
| 1887 | 1887 | ||
| 1888 | /* -- Support for 64 bit ops in 32 bit mode ------------------------------- */ | 1888 | /* -- Split register ops -------------------------------------------------- */ |
| 1889 | 1889 | ||
| 1890 | /* Hiword op of a split 64 bit op. Previous op must be the loword op. */ | 1890 | /* Hiword op of a split 32/32 bit op. Previous op is the loword op. */ |
| 1891 | static void asm_hiop(ASMState *as, IRIns *ir) | 1891 | static void asm_hiop(ASMState *as, IRIns *ir) |
| 1892 | { | 1892 | { |
| 1893 | #if LJ_HASFFI || LJ_SOFTFP | ||
| 1894 | /* HIOP is marked as a store because it needs its own DCE logic. */ | 1893 | /* HIOP is marked as a store because it needs its own DCE logic. */ |
| 1895 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ | 1894 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ |
| 1896 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; | 1895 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; |
| 1896 | #if LJ_HASFFI || LJ_SOFTFP | ||
| 1897 | if ((ir-1)->o <= IR_NE) { /* 64 bit integer or FP comparisons. ORDER IR. */ | 1897 | if ((ir-1)->o <= IR_NE) { /* 64 bit integer or FP comparisons. ORDER IR. */ |
| 1898 | as->curins--; /* Always skip the loword comparison. */ | 1898 | as->curins--; /* Always skip the loword comparison. */ |
| 1899 | #if LJ_SOFTFP | 1899 | #if LJ_SOFTFP |
| @@ -1924,6 +1924,7 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 1924 | asm_xstore_(as, ir, 4); | 1924 | asm_xstore_(as, ir, 4); |
| 1925 | return; | 1925 | return; |
| 1926 | } | 1926 | } |
| 1927 | #endif | ||
| 1927 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ | 1928 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ |
| 1928 | switch ((ir-1)->o) { | 1929 | switch ((ir-1)->o) { |
| 1929 | #if LJ_HASFFI | 1930 | #if LJ_HASFFI |
| @@ -1942,6 +1943,9 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 1942 | asm_intneg(as, ir, ARMI_RSC); | 1943 | asm_intneg(as, ir, ARMI_RSC); |
| 1943 | asm_intneg(as, ir-1, ARMI_RSB|ARMI_S); | 1944 | asm_intneg(as, ir-1, ARMI_RSB|ARMI_S); |
| 1944 | break; | 1945 | break; |
| 1946 | case IR_CNEWI: | ||
| 1947 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 1948 | break; | ||
| 1945 | #endif | 1949 | #endif |
| 1946 | #if LJ_SOFTFP | 1950 | #if LJ_SOFTFP |
| 1947 | case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD: | 1951 | case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD: |
| @@ -1949,25 +1953,16 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 1949 | if (!uselo) | 1953 | if (!uselo) |
| 1950 | ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */ | 1954 | ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */ |
| 1951 | break; | 1955 | break; |
| 1956 | case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF: | ||
| 1957 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 1958 | break; | ||
| 1952 | #endif | 1959 | #endif |
| 1953 | case IR_CALLN: | 1960 | case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS: |
| 1954 | case IR_CALLS: | ||
| 1955 | case IR_CALLXS: | ||
| 1956 | if (!uselo) | 1961 | if (!uselo) |
| 1957 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ | 1962 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ |
| 1958 | break; | 1963 | break; |
| 1959 | #if LJ_SOFTFP | ||
| 1960 | case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF: | ||
| 1961 | #endif | ||
| 1962 | case IR_CNEWI: | ||
| 1963 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 1964 | break; | ||
| 1965 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; | 1964 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; |
| 1966 | } | 1965 | } |
| 1967 | #else | ||
| 1968 | /* Unused without SOFTFP or FFI. */ | ||
| 1969 | UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP"); | ||
| 1970 | #endif | ||
| 1971 | } | 1966 | } |
| 1972 | 1967 | ||
| 1973 | /* -- Profiling ----------------------------------------------------------- */ | 1968 | /* -- Profiling ----------------------------------------------------------- */ |
diff --git a/src/lj_asm_arm64.h b/src/lj_asm_arm64.h index f51c6f76..3cedd021 100644 --- a/src/lj_asm_arm64.h +++ b/src/lj_asm_arm64.h | |||
| @@ -460,8 +460,11 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args) | |||
| 460 | static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | 460 | static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) |
| 461 | { | 461 | { |
| 462 | RegSet drop = RSET_SCRATCH; | 462 | RegSet drop = RSET_SCRATCH; |
| 463 | int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t)); | ||
| 463 | if (ra_hasreg(ir->r)) | 464 | if (ra_hasreg(ir->r)) |
| 464 | rset_clear(drop, ir->r); /* Dest reg handled below. */ | 465 | rset_clear(drop, ir->r); /* Dest reg handled below. */ |
| 466 | if (hiop && ra_hasreg((ir+1)->r)) | ||
| 467 | rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */ | ||
| 465 | ra_evictset(as, drop); /* Evictions must be performed first. */ | 468 | ra_evictset(as, drop); /* Evictions must be performed first. */ |
| 466 | if (ra_used(ir)) { | 469 | if (ra_used(ir)) { |
| 467 | lj_assertA(!irt_ispri(ir->t), "PRI dest"); | 470 | lj_assertA(!irt_ispri(ir->t), "PRI dest"); |
| @@ -473,6 +476,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | |||
| 473 | } else { | 476 | } else { |
| 474 | ra_destreg(as, ir, RID_FPRET); | 477 | ra_destreg(as, ir, RID_FPRET); |
| 475 | } | 478 | } |
| 479 | } else if (hiop) { | ||
| 480 | ra_destpair(as, ir); | ||
| 476 | } else { | 481 | } else { |
| 477 | ra_destreg(as, ir, RID_RET); | 482 | ra_destreg(as, ir, RID_RET); |
| 478 | } | 483 | } |
| @@ -1720,13 +1725,25 @@ static void asm_comp(ASMState *as, IRIns *ir) | |||
| 1720 | 1725 | ||
| 1721 | #define asm_equal(as, ir) asm_comp(as, ir) | 1726 | #define asm_equal(as, ir) asm_comp(as, ir) |
| 1722 | 1727 | ||
| 1723 | /* -- Support for 64 bit ops in 32 bit mode ------------------------------- */ | 1728 | /* -- Split register ops -------------------------------------------------- */ |
| 1724 | 1729 | ||
| 1725 | /* Hiword op of a split 64 bit op. Previous op must be the loword op. */ | 1730 | /* Hiword op of a split 64/64 bit op. Previous op is the loword op. */ |
| 1726 | static void asm_hiop(ASMState *as, IRIns *ir) | 1731 | static void asm_hiop(ASMState *as, IRIns *ir) |
| 1727 | { | 1732 | { |
| 1728 | UNUSED(as); UNUSED(ir); | 1733 | /* HIOP is marked as a store because it needs its own DCE logic. */ |
| 1729 | lj_assertA(0, "unexpected HIOP"); /* Unused on 64 bit. */ | 1734 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ |
| 1735 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; | ||
| 1736 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ | ||
| 1737 | switch ((ir-1)->o) { | ||
| 1738 | case IR_CALLN: | ||
| 1739 | case IR_CALLL: | ||
| 1740 | case IR_CALLS: | ||
| 1741 | case IR_CALLXS: | ||
| 1742 | if (!uselo) | ||
| 1743 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ | ||
| 1744 | break; | ||
| 1745 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; | ||
| 1746 | } | ||
| 1730 | } | 1747 | } |
| 1731 | 1748 | ||
| 1732 | /* -- Profiling ----------------------------------------------------------- */ | 1749 | /* -- Profiling ----------------------------------------------------------- */ |
diff --git a/src/lj_asm_mips.h b/src/lj_asm_mips.h index cd32d038..7f7dc6a0 100644 --- a/src/lj_asm_mips.h +++ b/src/lj_asm_mips.h | |||
| @@ -351,19 +351,15 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args) | |||
| 351 | static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | 351 | static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) |
| 352 | { | 352 | { |
| 353 | RegSet drop = RSET_SCRATCH; | 353 | RegSet drop = RSET_SCRATCH; |
| 354 | #if LJ_32 | ||
| 355 | int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t)); | 354 | int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t)); |
| 356 | #endif | ||
| 357 | #if !LJ_SOFTFP | 355 | #if !LJ_SOFTFP |
| 358 | if ((ci->flags & CCI_NOFPRCLOBBER)) | 356 | if ((ci->flags & CCI_NOFPRCLOBBER)) |
| 359 | drop &= ~RSET_FPR; | 357 | drop &= ~RSET_FPR; |
| 360 | #endif | 358 | #endif |
| 361 | if (ra_hasreg(ir->r)) | 359 | if (ra_hasreg(ir->r)) |
| 362 | rset_clear(drop, ir->r); /* Dest reg handled below. */ | 360 | rset_clear(drop, ir->r); /* Dest reg handled below. */ |
| 363 | #if LJ_32 | ||
| 364 | if (hiop && ra_hasreg((ir+1)->r)) | 361 | if (hiop && ra_hasreg((ir+1)->r)) |
| 365 | rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */ | 362 | rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */ |
| 366 | #endif | ||
| 367 | ra_evictset(as, drop); /* Evictions must be performed first. */ | 363 | ra_evictset(as, drop); /* Evictions must be performed first. */ |
| 368 | if (ra_used(ir)) { | 364 | if (ra_used(ir)) { |
| 369 | lj_assertA(!irt_ispri(ir->t), "PRI dest"); | 365 | lj_assertA(!irt_ispri(ir->t), "PRI dest"); |
| @@ -392,10 +388,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | |||
| 392 | } else { | 388 | } else { |
| 393 | ra_destreg(as, ir, RID_FPRET); | 389 | ra_destreg(as, ir, RID_FPRET); |
| 394 | } | 390 | } |
| 395 | #if LJ_32 | ||
| 396 | } else if (hiop) { | 391 | } else if (hiop) { |
| 397 | ra_destpair(as, ir); | 392 | ra_destpair(as, ir); |
| 398 | #endif | ||
| 399 | } else { | 393 | } else { |
| 400 | ra_destreg(as, ir, RID_RET); | 394 | ra_destreg(as, ir, RID_RET); |
| 401 | } | 395 | } |
| @@ -2393,15 +2387,15 @@ static void asm_comp64eq(ASMState *as, IRIns *ir) | |||
| 2393 | } | 2387 | } |
| 2394 | #endif | 2388 | #endif |
| 2395 | 2389 | ||
| 2396 | /* -- Support for 64 bit ops in 32 bit mode ------------------------------- */ | 2390 | /* -- Split register ops -------------------------------------------------- */ |
| 2397 | 2391 | ||
| 2398 | /* Hiword op of a split 64 bit op. Previous op must be the loword op. */ | 2392 | /* Hiword op of a split 32/32 or 64/64 bit op. Previous op is the loword op. */ |
| 2399 | static void asm_hiop(ASMState *as, IRIns *ir) | 2393 | static void asm_hiop(ASMState *as, IRIns *ir) |
| 2400 | { | 2394 | { |
| 2401 | #if LJ_32 && (LJ_HASFFI || LJ_SOFTFP) | ||
| 2402 | /* HIOP is marked as a store because it needs its own DCE logic. */ | 2395 | /* HIOP is marked as a store because it needs its own DCE logic. */ |
| 2403 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ | 2396 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ |
| 2404 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; | 2397 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; |
| 2398 | #if LJ_32 && (LJ_HASFFI || LJ_SOFTFP) | ||
| 2405 | if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ | 2399 | if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ |
| 2406 | as->curins--; /* Always skip the CONV. */ | 2400 | as->curins--; /* Always skip the CONV. */ |
| 2407 | #if LJ_HASFFI && !LJ_SOFTFP | 2401 | #if LJ_HASFFI && !LJ_SOFTFP |
| @@ -2448,38 +2442,33 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 2448 | } | 2442 | } |
| 2449 | return; | 2443 | return; |
| 2450 | } | 2444 | } |
| 2445 | #endif | ||
| 2451 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ | 2446 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ |
| 2452 | switch ((ir-1)->o) { | 2447 | switch ((ir-1)->o) { |
| 2453 | #if LJ_HASFFI | 2448 | #if LJ_32 && LJ_HASFFI |
| 2454 | case IR_ADD: as->curins--; asm_add64(as, ir); break; | 2449 | case IR_ADD: as->curins--; asm_add64(as, ir); break; |
| 2455 | case IR_SUB: as->curins--; asm_sub64(as, ir); break; | 2450 | case IR_SUB: as->curins--; asm_sub64(as, ir); break; |
| 2456 | case IR_NEG: as->curins--; asm_neg64(as, ir); break; | 2451 | case IR_NEG: as->curins--; asm_neg64(as, ir); break; |
| 2452 | case IR_CNEWI: | ||
| 2453 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 2454 | break; | ||
| 2457 | #endif | 2455 | #endif |
| 2458 | #if LJ_SOFTFP | 2456 | #if LJ_32 && LJ_SOFTFP |
| 2459 | case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD: | 2457 | case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD: |
| 2460 | case IR_STRTO: | 2458 | case IR_STRTO: |
| 2461 | if (!uselo) | 2459 | if (!uselo) |
| 2462 | ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */ | 2460 | ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */ |
| 2463 | break; | 2461 | break; |
| 2462 | case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF: | ||
| 2463 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 2464 | break; | ||
| 2464 | #endif | 2465 | #endif |
| 2465 | case IR_CALLN: | 2466 | case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS: |
| 2466 | case IR_CALLS: | ||
| 2467 | case IR_CALLXS: | ||
| 2468 | if (!uselo) | 2467 | if (!uselo) |
| 2469 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ | 2468 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ |
| 2470 | break; | 2469 | break; |
| 2471 | #if LJ_SOFTFP | ||
| 2472 | case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF: | ||
| 2473 | #endif | ||
| 2474 | case IR_CNEWI: | ||
| 2475 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 2476 | break; | ||
| 2477 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; | 2470 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; |
| 2478 | } | 2471 | } |
| 2479 | #else | ||
| 2480 | /* Unused on MIPS64 or without SOFTFP or FFI. */ | ||
| 2481 | UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP"); | ||
| 2482 | #endif | ||
| 2483 | } | 2472 | } |
| 2484 | 2473 | ||
| 2485 | /* -- Profiling ----------------------------------------------------------- */ | 2474 | /* -- Profiling ----------------------------------------------------------- */ |
diff --git a/src/lj_asm_ppc.h b/src/lj_asm_ppc.h index ba60b7e6..f99561b3 100644 --- a/src/lj_asm_ppc.h +++ b/src/lj_asm_ppc.h | |||
| @@ -340,10 +340,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | |||
| 340 | } else { | 340 | } else { |
| 341 | ra_destreg(as, ir, RID_FPRET); | 341 | ra_destreg(as, ir, RID_FPRET); |
| 342 | } | 342 | } |
| 343 | #if LJ_32 | ||
| 344 | } else if (hiop) { | 343 | } else if (hiop) { |
| 345 | ra_destpair(as, ir); | 344 | ra_destpair(as, ir); |
| 346 | #endif | ||
| 347 | } else { | 345 | } else { |
| 348 | ra_destreg(as, ir, RID_RET); | 346 | ra_destreg(as, ir, RID_RET); |
| 349 | } | 347 | } |
| @@ -1942,15 +1940,15 @@ static void asm_comp64(ASMState *as, IRIns *ir) | |||
| 1942 | } | 1940 | } |
| 1943 | #endif | 1941 | #endif |
| 1944 | 1942 | ||
| 1945 | /* -- Support for 64 bit ops in 32 bit mode ------------------------------- */ | 1943 | /* -- Split register ops -------------------------------------------------- */ |
| 1946 | 1944 | ||
| 1947 | /* Hiword op of a split 64 bit op. Previous op must be the loword op. */ | 1945 | /* Hiword op of a split 32/32 bit op. Previous op is be the loword op. */ |
| 1948 | static void asm_hiop(ASMState *as, IRIns *ir) | 1946 | static void asm_hiop(ASMState *as, IRIns *ir) |
| 1949 | { | 1947 | { |
| 1950 | #if LJ_HASFFI || LJ_SOFTFP | ||
| 1951 | /* HIOP is marked as a store because it needs its own DCE logic. */ | 1948 | /* HIOP is marked as a store because it needs its own DCE logic. */ |
| 1952 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ | 1949 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ |
| 1953 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; | 1950 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; |
| 1951 | #if LJ_HASFFI || LJ_SOFTFP | ||
| 1954 | if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ | 1952 | if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ |
| 1955 | as->curins--; /* Always skip the CONV. */ | 1953 | as->curins--; /* Always skip the CONV. */ |
| 1956 | #if LJ_HASFFI && !LJ_SOFTFP | 1954 | #if LJ_HASFFI && !LJ_SOFTFP |
| @@ -1985,12 +1983,16 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 1985 | } | 1983 | } |
| 1986 | return; | 1984 | return; |
| 1987 | } | 1985 | } |
| 1986 | #endif | ||
| 1988 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ | 1987 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ |
| 1989 | switch ((ir-1)->o) { | 1988 | switch ((ir-1)->o) { |
| 1990 | #if LJ_HASFFI | 1989 | #if LJ_HASFFI |
| 1991 | case IR_ADD: as->curins--; asm_add64(as, ir); break; | 1990 | case IR_ADD: as->curins--; asm_add64(as, ir); break; |
| 1992 | case IR_SUB: as->curins--; asm_sub64(as, ir); break; | 1991 | case IR_SUB: as->curins--; asm_sub64(as, ir); break; |
| 1993 | case IR_NEG: as->curins--; asm_neg64(as, ir); break; | 1992 | case IR_NEG: as->curins--; asm_neg64(as, ir); break; |
| 1993 | case IR_CNEWI: | ||
| 1994 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 1995 | break; | ||
| 1994 | #endif | 1996 | #endif |
| 1995 | #if LJ_SOFTFP | 1997 | #if LJ_SOFTFP |
| 1996 | case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD: | 1998 | case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD: |
| @@ -1998,25 +2000,16 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 1998 | if (!uselo) | 2000 | if (!uselo) |
| 1999 | ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */ | 2001 | ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */ |
| 2000 | break; | 2002 | break; |
| 2003 | case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF: | ||
| 2004 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 2005 | break; | ||
| 2001 | #endif | 2006 | #endif |
| 2002 | case IR_CALLN: | 2007 | case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS: |
| 2003 | case IR_CALLS: | ||
| 2004 | case IR_CALLXS: | ||
| 2005 | if (!uselo) | 2008 | if (!uselo) |
| 2006 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ | 2009 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ |
| 2007 | break; | 2010 | break; |
| 2008 | #if LJ_SOFTFP | ||
| 2009 | case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF: | ||
| 2010 | #endif | ||
| 2011 | case IR_CNEWI: | ||
| 2012 | /* Nothing to do here. Handled by lo op itself. */ | ||
| 2013 | break; | ||
| 2014 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; | 2011 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; |
| 2015 | } | 2012 | } |
| 2016 | #else | ||
| 2017 | /* Unused without SOFTFP or FFI. */ | ||
| 2018 | UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP"); | ||
| 2019 | #endif | ||
| 2020 | } | 2013 | } |
| 2021 | 2014 | ||
| 2022 | /* -- Profiling ----------------------------------------------------------- */ | 2015 | /* -- Profiling ----------------------------------------------------------- */ |
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h index 512f1afd..48c31fe3 100644 --- a/src/lj_asm_x86.h +++ b/src/lj_asm_x86.h | |||
| @@ -659,7 +659,7 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args) | |||
| 659 | static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | 659 | static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) |
| 660 | { | 660 | { |
| 661 | RegSet drop = RSET_SCRATCH; | 661 | RegSet drop = RSET_SCRATCH; |
| 662 | int hiop = (LJ_32 && (ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t)); | 662 | int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t)); |
| 663 | if ((ci->flags & CCI_NOFPRCLOBBER)) | 663 | if ((ci->flags & CCI_NOFPRCLOBBER)) |
| 664 | drop &= ~RSET_FPR; | 664 | drop &= ~RSET_FPR; |
| 665 | if (ra_hasreg(ir->r)) | 665 | if (ra_hasreg(ir->r)) |
| @@ -699,10 +699,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci) | |||
| 699 | irt_isnum(ir->t) ? XOg_FSTPq : XOg_FSTPd, RID_ESP, ofs); | 699 | irt_isnum(ir->t) ? XOg_FSTPq : XOg_FSTPd, RID_ESP, ofs); |
| 700 | } | 700 | } |
| 701 | #endif | 701 | #endif |
| 702 | #if LJ_32 | ||
| 703 | } else if (hiop) { | 702 | } else if (hiop) { |
| 704 | ra_destpair(as, ir); | 703 | ra_destpair(as, ir); |
| 705 | #endif | ||
| 706 | } else { | 704 | } else { |
| 707 | lj_assertA(!irt_ispri(ir->t), "PRI dest"); | 705 | lj_assertA(!irt_ispri(ir->t), "PRI dest"); |
| 708 | ra_destreg(as, ir, RID_RET); | 706 | ra_destreg(as, ir, RID_RET); |
| @@ -2611,15 +2609,15 @@ static void asm_comp_int64(ASMState *as, IRIns *ir) | |||
| 2611 | } | 2609 | } |
| 2612 | #endif | 2610 | #endif |
| 2613 | 2611 | ||
| 2614 | /* -- Support for 64 bit ops in 32 bit mode ------------------------------- */ | 2612 | /* -- Split register ops -------------------------------------------------- */ |
| 2615 | 2613 | ||
| 2616 | /* Hiword op of a split 64 bit op. Previous op must be the loword op. */ | 2614 | /* Hiword op of a split 32/32 or 64/64 bit op. Previous op is the loword op. */ |
| 2617 | static void asm_hiop(ASMState *as, IRIns *ir) | 2615 | static void asm_hiop(ASMState *as, IRIns *ir) |
| 2618 | { | 2616 | { |
| 2619 | #if LJ_32 && LJ_HASFFI | ||
| 2620 | /* HIOP is marked as a store because it needs its own DCE logic. */ | 2617 | /* HIOP is marked as a store because it needs its own DCE logic. */ |
| 2621 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ | 2618 | int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ |
| 2622 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; | 2619 | if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; |
| 2620 | #if LJ_32 && LJ_HASFFI | ||
| 2623 | if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ | 2621 | if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ |
| 2624 | as->curins--; /* Always skip the CONV. */ | 2622 | as->curins--; /* Always skip the CONV. */ |
| 2625 | if (usehi || uselo) | 2623 | if (usehi || uselo) |
| @@ -2633,8 +2631,10 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 2633 | asm_fxstore(as, ir); | 2631 | asm_fxstore(as, ir); |
| 2634 | return; | 2632 | return; |
| 2635 | } | 2633 | } |
| 2634 | #endif | ||
| 2636 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ | 2635 | if (!usehi) return; /* Skip unused hiword op for all remaining ops. */ |
| 2637 | switch ((ir-1)->o) { | 2636 | switch ((ir-1)->o) { |
| 2637 | #if LJ_32 && LJ_HASFFI | ||
| 2638 | case IR_ADD: | 2638 | case IR_ADD: |
| 2639 | as->flagmcp = NULL; | 2639 | as->flagmcp = NULL; |
| 2640 | as->curins--; | 2640 | as->curins--; |
| @@ -2657,20 +2657,16 @@ static void asm_hiop(ASMState *as, IRIns *ir) | |||
| 2657 | asm_neg_not(as, ir-1, XOg_NEG); | 2657 | asm_neg_not(as, ir-1, XOg_NEG); |
| 2658 | break; | 2658 | break; |
| 2659 | } | 2659 | } |
| 2660 | case IR_CALLN: | ||
| 2661 | case IR_CALLXS: | ||
| 2662 | if (!uselo) | ||
| 2663 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ | ||
| 2664 | break; | ||
| 2665 | case IR_CNEWI: | 2660 | case IR_CNEWI: |
| 2666 | /* Nothing to do here. Handled by CNEWI itself. */ | 2661 | /* Nothing to do here. Handled by CNEWI itself. */ |
| 2667 | break; | 2662 | break; |
| 2663 | #endif | ||
| 2664 | case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS: | ||
| 2665 | if (!uselo) | ||
| 2666 | ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */ | ||
| 2667 | break; | ||
| 2668 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; | 2668 | default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break; |
| 2669 | } | 2669 | } |
| 2670 | #else | ||
| 2671 | /* Unused on x64 or without FFI. */ | ||
| 2672 | UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP"); | ||
| 2673 | #endif | ||
| 2674 | } | 2670 | } |
| 2675 | 2671 | ||
| 2676 | /* -- Profiling ----------------------------------------------------------- */ | 2672 | /* -- Profiling ----------------------------------------------------------- */ |
diff --git a/src/lj_target_arm64.h b/src/lj_target_arm64.h index bf568a8d..6d39ffb8 100644 --- a/src/lj_target_arm64.h +++ b/src/lj_target_arm64.h | |||
| @@ -31,6 +31,8 @@ enum { | |||
| 31 | 31 | ||
| 32 | /* Calling conventions. */ | 32 | /* Calling conventions. */ |
| 33 | RID_RET = RID_X0, | 33 | RID_RET = RID_X0, |
| 34 | RID_RETLO = RID_X0, | ||
| 35 | RID_RETHI = RID_X1, | ||
| 34 | RID_FPRET = RID_D0, | 36 | RID_FPRET = RID_D0, |
| 35 | 37 | ||
| 36 | /* These definitions must match with the *.dasc file(s): */ | 38 | /* These definitions must match with the *.dasc file(s): */ |
diff --git a/src/lj_target_x86.h b/src/lj_target_x86.h index a403f820..d0ce196c 100644 --- a/src/lj_target_x86.h +++ b/src/lj_target_x86.h | |||
| @@ -38,10 +38,9 @@ enum { | |||
| 38 | RID_RET = RID_EAX, | 38 | RID_RET = RID_EAX, |
| 39 | #if LJ_64 | 39 | #if LJ_64 |
| 40 | RID_FPRET = RID_XMM0, | 40 | RID_FPRET = RID_XMM0, |
| 41 | #else | 41 | #endif |
| 42 | RID_RETLO = RID_EAX, | 42 | RID_RETLO = RID_EAX, |
| 43 | RID_RETHI = RID_EDX, | 43 | RID_RETHI = RID_EDX, |
| 44 | #endif | ||
| 45 | 44 | ||
| 46 | /* These definitions must match with the *.dasc file(s): */ | 45 | /* These definitions must match with the *.dasc file(s): */ |
| 47 | RID_BASE = RID_EDX, /* Interpreter BASE. */ | 46 | RID_BASE = RID_EDX, /* Interpreter BASE. */ |
