aboutsummaryrefslogtreecommitdiff
path: root/src/lj_asm_x86.h
diff options
context:
space:
mode:
authorMike Pall <mike>2013-04-22 22:32:41 +0200
committerMike Pall <mike>2013-04-22 22:32:41 +0200
commita2c78810ca0162c06b3ae02b52d6b4c04a8d5be3 (patch)
treed82fe00c6ca8ff6a2bfce89176e0d97b3095be38 /src/lj_asm_x86.h
parent2ab5e7c5dce9e8bd19b7f4c9d7a90ef30af53d0a (diff)
downloadluajit-a2c78810ca0162c06b3ae02b52d6b4c04a8d5be3.tar.gz
luajit-a2c78810ca0162c06b3ae02b52d6b4c04a8d5be3.tar.bz2
luajit-a2c78810ca0162c06b3ae02b52d6b4c04a8d5be3.zip
Combine IR instruction dispatch for all assembler backends.
Diffstat (limited to 'src/lj_asm_x86.h')
-rw-r--r--src/lj_asm_x86.h279
1 files changed, 107 insertions, 172 deletions
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h
index 45fc7e85..2ab1dbf5 100644
--- a/src/lj_asm_x86.h
+++ b/src/lj_asm_x86.h
@@ -1218,6 +1218,9 @@ static void asm_fxload(ASMState *as, IRIns *ir)
1218 emit_mrm(as, xo, dest, RID_MRM); 1218 emit_mrm(as, xo, dest, RID_MRM);
1219} 1219}
1220 1220
1221#define asm_fload(as, ir) asm_fxload(as, ir)
1222#define asm_xload(as, ir) asm_fxload(as, ir)
1223
1221static void asm_fxstore(ASMState *as, IRIns *ir) 1224static void asm_fxstore(ASMState *as, IRIns *ir)
1222{ 1225{
1223 RegSet allow = RSET_GPR; 1226 RegSet allow = RSET_GPR;
@@ -1281,6 +1284,9 @@ static void asm_fxstore(ASMState *as, IRIns *ir)
1281 } 1284 }
1282} 1285}
1283 1286
1287#define asm_fstore(as, ir) asm_fxstore(as, ir)
1288#define asm_xstore(as, ir) asm_fxstore(as, ir)
1289
1284#if LJ_64 1290#if LJ_64
1285static Reg asm_load_lightud64(ASMState *as, IRIns *ir, int typecheck) 1291static Reg asm_load_lightud64(ASMState *as, IRIns *ir, int typecheck)
1286{ 1292{
@@ -1666,6 +1672,9 @@ static void asm_fpmath(ASMState *as, IRIns *ir)
1666 } 1672 }
1667} 1673}
1668 1674
1675#define asm_atan2(as, ir) asm_fpmath(as, ir)
1676#define asm_ldexp(as, ir) asm_fpmath(as, ir)
1677
1669static void asm_fppowi(ASMState *as, IRIns *ir) 1678static void asm_fppowi(ASMState *as, IRIns *ir)
1670{ 1679{
1671 /* The modified regs must match with the *.dasc implementation. */ 1680 /* The modified regs must match with the *.dasc implementation. */
@@ -1679,6 +1688,17 @@ static void asm_fppowi(ASMState *as, IRIns *ir)
1679 ra_left(as, RID_EAX, ir->op2); 1688 ra_left(as, RID_EAX, ir->op2);
1680} 1689}
1681 1690
1691static void asm_pow(ASMState *as, IRIns *ir)
1692{
1693#if LJ_64 && LJ_HASFFI
1694 if (!irt_isnum(ir->t))
1695 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_powi64 :
1696 IRCALL_lj_carith_powu64);
1697 else
1698#endif
1699 asm_fppowi(as, ir);
1700}
1701
1682static int asm_swapops(ASMState *as, IRIns *ir) 1702static int asm_swapops(ASMState *as, IRIns *ir)
1683{ 1703{
1684 IRIns *irl = IR(ir->op1); 1704 IRIns *irl = IR(ir->op1);
@@ -1855,6 +1875,44 @@ static void asm_add(ASMState *as, IRIns *ir)
1855 asm_intarith(as, ir, XOg_ADD); 1875 asm_intarith(as, ir, XOg_ADD);
1856} 1876}
1857 1877
1878static void asm_sub(ASMState *as, IRIns *ir)
1879{
1880 if (irt_isnum(ir->t))
1881 asm_fparith(as, ir, XO_SUBSD);
1882 else /* Note: no need for LEA trick here. i-k is encoded as i+(-k). */
1883 asm_intarith(as, ir, XOg_SUB);
1884}
1885
1886static void asm_mul(ASMState *as, IRIns *ir)
1887{
1888 if (irt_isnum(ir->t))
1889 asm_fparith(as, ir, XO_MULSD);
1890 else
1891 asm_intarith(as, ir, XOg_X_IMUL);
1892}
1893
1894static void asm_div(ASMState *as, IRIns *ir)
1895{
1896#if LJ_64 && LJ_HASFFI
1897 if (!irt_isnum(ir->t))
1898 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_divi64 :
1899 IRCALL_lj_carith_divu64);
1900 else
1901#endif
1902 asm_fparith(as, ir, XO_DIVSD);
1903}
1904
1905static void asm_mod(ASMState *as, IRIns *ir)
1906{
1907#if LJ_64 && LJ_HASFFI
1908 if (!irt_isint(ir->t))
1909 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_modi64 :
1910 IRCALL_lj_carith_modu64);
1911 else
1912#endif
1913 asm_callid(as, ir, IRCALL_lj_vm_modi);
1914}
1915
1858static void asm_neg_not(ASMState *as, IRIns *ir, x86Group3 xg) 1916static void asm_neg_not(ASMState *as, IRIns *ir, x86Group3 xg)
1859{ 1917{
1860 Reg dest = ra_dest(as, ir, RSET_GPR); 1918 Reg dest = ra_dest(as, ir, RSET_GPR);
@@ -1862,7 +1920,17 @@ static void asm_neg_not(ASMState *as, IRIns *ir, x86Group3 xg)
1862 ra_left(as, dest, ir->op1); 1920 ra_left(as, dest, ir->op1);
1863} 1921}
1864 1922
1865static void asm_min_max(ASMState *as, IRIns *ir, int cc) 1923static void asm_neg(ASMState *as, IRIns *ir)
1924{
1925 if (irt_isnum(ir->t))
1926 asm_fparith(as, ir, XO_XORPS);
1927 else
1928 asm_neg_not(as, ir, XOg_NEG);
1929}
1930
1931#define asm_abs(as, ir) asm_fparith(as, ir, XO_ANDPS)
1932
1933static void asm_intmin_max(ASMState *as, IRIns *ir, int cc)
1866{ 1934{
1867 Reg right, dest = ra_dest(as, ir, RSET_GPR); 1935 Reg right, dest = ra_dest(as, ir, RSET_GPR);
1868 IRRef lref = ir->op1, rref = ir->op2; 1936 IRRef lref = ir->op1, rref = ir->op2;
@@ -1873,7 +1941,30 @@ static void asm_min_max(ASMState *as, IRIns *ir, int cc)
1873 ra_left(as, dest, lref); 1941 ra_left(as, dest, lref);
1874} 1942}
1875 1943
1876static void asm_bitswap(ASMState *as, IRIns *ir) 1944static void asm_min(ASMState *as, IRIns *ir)
1945{
1946 if (irt_isnum(ir->t))
1947 asm_fparith(as, ir, XO_MINSD);
1948 else
1949 asm_intmin_max(as, ir, CC_G);
1950}
1951
1952static void asm_max(ASMState *as, IRIns *ir)
1953{
1954 if (irt_isnum(ir->t))
1955 asm_fparith(as, ir, XO_MAXSD);
1956 else
1957 asm_intmin_max(as, ir, CC_L);
1958}
1959
1960/* Note: don't use LEA for overflow-checking arithmetic! */
1961#define asm_addov(as, ir) asm_intarith(as, ir, XOg_ADD)
1962#define asm_subov(as, ir) asm_intarith(as, ir, XOg_SUB)
1963#define asm_mulov(as, ir) asm_intarith(as, ir, XOg_X_IMUL)
1964
1965#define asm_bnot(as, ir) asm_neg_not(as, ir, XOg_NOT)
1966
1967static void asm_bswap(ASMState *as, IRIns *ir)
1877{ 1968{
1878 Reg dest = ra_dest(as, ir, RSET_GPR); 1969 Reg dest = ra_dest(as, ir, RSET_GPR);
1879 as->mcp = emit_op(XO_BSWAP + ((dest&7) << 24), 1970 as->mcp = emit_op(XO_BSWAP + ((dest&7) << 24),
@@ -1881,6 +1972,10 @@ static void asm_bitswap(ASMState *as, IRIns *ir)
1881 ra_left(as, dest, ir->op1); 1972 ra_left(as, dest, ir->op1);
1882} 1973}
1883 1974
1975#define asm_band(as, ir) asm_intarith(as, ir, XOg_AND)
1976#define asm_bor(as, ir) asm_intarith(as, ir, XOg_OR)
1977#define asm_bxor(as, ir) asm_intarith(as, ir, XOg_XOR)
1978
1884static void asm_bitshift(ASMState *as, IRIns *ir, x86Shift xs) 1979static void asm_bitshift(ASMState *as, IRIns *ir, x86Shift xs)
1885{ 1980{
1886 IRRef rref = ir->op2; 1981 IRRef rref = ir->op2;
@@ -1920,6 +2015,12 @@ static void asm_bitshift(ASMState *as, IRIns *ir, x86Shift xs)
1920 */ 2015 */
1921} 2016}
1922 2017
2018#define asm_bshl(as, ir) asm_bitshift(as, ir, XOg_SHL)
2019#define asm_bshr(as, ir) asm_bitshift(as, ir, XOg_SHR)
2020#define asm_bsar(as, ir) asm_bitshift(as, ir, XOg_SAR)
2021#define asm_brol(as, ir) asm_bitshift(as, ir, XOg_ROL)
2022#define asm_bror(as, ir) asm_bitshift(as, ir, XOg_ROR)
2023
1923/* -- Comparisons --------------------------------------------------------- */ 2024/* -- Comparisons --------------------------------------------------------- */
1924 2025
1925/* Virtual flags for unordered FP comparisons. */ 2026/* Virtual flags for unordered FP comparisons. */
@@ -1946,8 +2047,9 @@ static const uint16_t asm_compmap[IR_ABC+1] = {
1946}; 2047};
1947 2048
1948/* FP and integer comparisons. */ 2049/* FP and integer comparisons. */
1949static void asm_comp(ASMState *as, IRIns *ir, uint32_t cc) 2050static void asm_comp(ASMState *as, IRIns *ir)
1950{ 2051{
2052 uint32_t cc = asm_compmap[ir->o];
1951 if (irt_isnum(ir->t)) { 2053 if (irt_isnum(ir->t)) {
1952 IRRef lref = ir->op1; 2054 IRRef lref = ir->op1;
1953 IRRef rref = ir->op2; 2055 IRRef rref = ir->op2;
@@ -2102,6 +2204,8 @@ static void asm_comp(ASMState *as, IRIns *ir, uint32_t cc)
2102 } 2204 }
2103} 2205}
2104 2206
2207#define asm_equal(as, ir) asm_comp(as, ir)
2208
2105#if LJ_32 && LJ_HASFFI 2209#if LJ_32 && LJ_HASFFI
2106/* 64 bit integer comparisons in 32 bit mode. */ 2210/* 64 bit integer comparisons in 32 bit mode. */
2107static void asm_comp_int64(ASMState *as, IRIns *ir) 2211static void asm_comp_int64(ASMState *as, IRIns *ir)
@@ -2484,175 +2588,6 @@ static void asm_tail_prep(ASMState *as)
2484 } 2588 }
2485} 2589}
2486 2590
2487/* -- Instruction dispatch ------------------------------------------------ */
2488
2489/* Assemble a single instruction. */
2490static void asm_ir(ASMState *as, IRIns *ir)
2491{
2492 switch ((IROp)ir->o) {
2493 /* Miscellaneous ops. */
2494 case IR_LOOP: asm_loop(as); break;
2495 case IR_NOP: case IR_XBAR: lua_assert(!ra_used(ir)); break;
2496 case IR_USE:
2497 ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
2498 case IR_PHI: asm_phi(as, ir); break;
2499 case IR_HIOP: asm_hiop(as, ir); break;
2500 case IR_GCSTEP: asm_gcstep(as, ir); break;
2501
2502 /* Guarded assertions. */
2503 case IR_EQ: case IR_NE:
2504 if ((ir-1)->o == IR_HREF && ir->op1 == as->curins-1) {
2505 as->curins--;
2506 asm_href(as, ir-1, (IROp)ir->o);
2507 break;
2508 }
2509 /* fallthrough */
2510 case IR_LT: case IR_GE: case IR_LE: case IR_GT:
2511 case IR_ULT: case IR_UGE: case IR_ULE: case IR_UGT:
2512 case IR_ABC:
2513 asm_comp(as, ir, asm_compmap[ir->o]);
2514 break;
2515
2516 case IR_RETF: asm_retf(as, ir); break;
2517
2518 /* Bit ops. */
2519 case IR_BNOT: asm_neg_not(as, ir, XOg_NOT); break;
2520 case IR_BSWAP: asm_bitswap(as, ir); break;
2521
2522 case IR_BAND: asm_intarith(as, ir, XOg_AND); break;
2523 case IR_BOR: asm_intarith(as, ir, XOg_OR); break;
2524 case IR_BXOR: asm_intarith(as, ir, XOg_XOR); break;
2525
2526 case IR_BSHL: asm_bitshift(as, ir, XOg_SHL); break;
2527 case IR_BSHR: asm_bitshift(as, ir, XOg_SHR); break;
2528 case IR_BSAR: asm_bitshift(as, ir, XOg_SAR); break;
2529 case IR_BROL: asm_bitshift(as, ir, XOg_ROL); break;
2530 case IR_BROR: asm_bitshift(as, ir, XOg_ROR); break;
2531
2532 /* Arithmetic ops. */
2533 case IR_ADD: asm_add(as, ir); break;
2534 case IR_SUB:
2535 if (irt_isnum(ir->t))
2536 asm_fparith(as, ir, XO_SUBSD);
2537 else /* Note: no need for LEA trick here. i-k is encoded as i+(-k). */
2538 asm_intarith(as, ir, XOg_SUB);
2539 break;
2540 case IR_MUL:
2541 if (irt_isnum(ir->t))
2542 asm_fparith(as, ir, XO_MULSD);
2543 else
2544 asm_intarith(as, ir, XOg_X_IMUL);
2545 break;
2546 case IR_DIV:
2547#if LJ_64 && LJ_HASFFI
2548 if (!irt_isnum(ir->t))
2549 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_divi64 :
2550 IRCALL_lj_carith_divu64);
2551 else
2552#endif
2553 asm_fparith(as, ir, XO_DIVSD);
2554 break;
2555 case IR_MOD:
2556#if LJ_64 && LJ_HASFFI
2557 if (!irt_isint(ir->t))
2558 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_modi64 :
2559 IRCALL_lj_carith_modu64);
2560 else
2561#endif
2562 asm_callid(as, ir, IRCALL_lj_vm_modi);
2563 break;
2564
2565 case IR_NEG:
2566 if (irt_isnum(ir->t))
2567 asm_fparith(as, ir, XO_XORPS);
2568 else
2569 asm_neg_not(as, ir, XOg_NEG);
2570 break;
2571 case IR_ABS: asm_fparith(as, ir, XO_ANDPS); break;
2572
2573 case IR_MIN:
2574 if (irt_isnum(ir->t))
2575 asm_fparith(as, ir, XO_MINSD);
2576 else
2577 asm_min_max(as, ir, CC_G);
2578 break;
2579 case IR_MAX:
2580 if (irt_isnum(ir->t))
2581 asm_fparith(as, ir, XO_MAXSD);
2582 else
2583 asm_min_max(as, ir, CC_L);
2584 break;
2585
2586 case IR_FPMATH: case IR_ATAN2: case IR_LDEXP:
2587 asm_fpmath(as, ir);
2588 break;
2589 case IR_POW:
2590#if LJ_64 && LJ_HASFFI
2591 if (!irt_isnum(ir->t))
2592 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_powi64 :
2593 IRCALL_lj_carith_powu64);
2594 else
2595#endif
2596 asm_fppowi(as, ir);
2597 break;
2598
2599 /* Overflow-checking arithmetic ops. Note: don't use LEA here! */
2600 case IR_ADDOV: asm_intarith(as, ir, XOg_ADD); break;
2601 case IR_SUBOV: asm_intarith(as, ir, XOg_SUB); break;
2602 case IR_MULOV: asm_intarith(as, ir, XOg_X_IMUL); break;
2603
2604 /* Memory references. */
2605 case IR_AREF: asm_aref(as, ir); break;
2606 case IR_HREF: asm_href(as, ir, 0); break;
2607 case IR_HREFK: asm_hrefk(as, ir); break;
2608 case IR_NEWREF: asm_newref(as, ir); break;
2609 case IR_UREFO: case IR_UREFC: asm_uref(as, ir); break;
2610 case IR_FREF: asm_fref(as, ir); break;
2611 case IR_STRREF: asm_strref(as, ir); break;
2612
2613 /* Loads and stores. */
2614 case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
2615 asm_ahuvload(as, ir);
2616 break;
2617 case IR_FLOAD: case IR_XLOAD: asm_fxload(as, ir); break;
2618 case IR_SLOAD: asm_sload(as, ir); break;
2619
2620 case IR_ASTORE: case IR_HSTORE: case IR_USTORE: asm_ahustore(as, ir); break;
2621 case IR_FSTORE: case IR_XSTORE: asm_fxstore(as, ir); break;
2622
2623 /* Allocations. */
2624 case IR_SNEW: case IR_XSNEW: asm_snew(as, ir); break;
2625 case IR_TNEW: asm_tnew(as, ir); break;
2626 case IR_TDUP: asm_tdup(as, ir); break;
2627 case IR_CNEW: case IR_CNEWI: asm_cnew(as, ir); break;
2628
2629 /* Buffer operations. */
2630 case IR_BUFHDR: asm_bufhdr(as, ir); break;
2631 case IR_BUFPUT: asm_bufput(as, ir); break;
2632 case IR_BUFSTR: asm_bufstr(as, ir); break;
2633
2634 /* Write barriers. */
2635 case IR_TBAR: asm_tbar(as, ir); break;
2636 case IR_OBAR: asm_obar(as, ir); break;
2637
2638 /* Type conversions. */
2639 case IR_TOBIT: asm_tobit(as, ir); break;
2640 case IR_CONV: asm_conv(as, ir); break;
2641 case IR_TOSTR: asm_tostr(as, ir); break;
2642 case IR_STRTO: asm_strto(as, ir); break;
2643
2644 /* Calls. */
2645 case IR_CALLN: case IR_CALLL: case IR_CALLS: asm_call(as, ir); break;
2646 case IR_CALLXS: asm_callx(as, ir); break;
2647 case IR_CARG: break;
2648
2649 default:
2650 setintV(&as->J->errinfo, ir->o);
2651 lj_trace_err_info(as->J, LJ_TRERR_NYIIR);
2652 break;
2653 }
2654}
2655
2656/* -- Trace setup --------------------------------------------------------- */ 2591/* -- Trace setup --------------------------------------------------------- */
2657 2592
2658/* Ensure there are enough stack slots for call arguments. */ 2593/* Ensure there are enough stack slots for call arguments. */