diff options
author | Mike Pall <mike> | 2023-09-09 17:52:43 +0200 |
---|---|---|
committer | Mike Pall <mike> | 2023-09-09 17:52:43 +0200 |
commit | 0705ef6ce41320b097cfb4f3c9a2a876c1949e86 (patch) | |
tree | bc274941774eeab10f11069351048b1363c36072 /src | |
parent | 59be97edb6aaa4e898ae2d0f63d91bae27698f57 (diff) | |
download | luajit-0705ef6ce41320b097cfb4f3c9a2a876c1949e86.tar.gz luajit-0705ef6ce41320b097cfb4f3c9a2a876c1949e86.tar.bz2 luajit-0705ef6ce41320b097cfb4f3c9a2a876c1949e86.zip |
ARM64: Ensure branch is in range before emitting TBZ/TBNZ.
Thanks to Peter Cawley. #1074
Diffstat (limited to 'src')
-rw-r--r-- | src/lj_asm_arm64.h | 24 |
1 files changed, 14 insertions, 10 deletions
diff --git a/src/lj_asm_arm64.h b/src/lj_asm_arm64.h index d9866e9d..05bdc78a 100644 --- a/src/lj_asm_arm64.h +++ b/src/lj_asm_arm64.h | |||
@@ -84,18 +84,23 @@ static void asm_guardcc(ASMState *as, A64CC cc) | |||
84 | emit_cond_branch(as, cc, target); | 84 | emit_cond_branch(as, cc, target); |
85 | } | 85 | } |
86 | 86 | ||
87 | /* Emit test and branch instruction to exit for guard. */ | 87 | /* Emit test and branch instruction to exit for guard, if in range. */ |
88 | static void asm_guardtnb(ASMState *as, A64Ins ai, Reg r, uint32_t bit) | 88 | static int asm_guardtnb(ASMState *as, A64Ins ai, Reg r, uint32_t bit) |
89 | { | 89 | { |
90 | MCode *target = asm_exitstub_addr(as, as->snapno); | 90 | MCode *target = asm_exitstub_addr(as, as->snapno); |
91 | MCode *p = as->mcp; | 91 | MCode *p = as->mcp; |
92 | ptrdiff_t delta = target - p; | ||
92 | if (LJ_UNLIKELY(p == as->invmcp)) { | 93 | if (LJ_UNLIKELY(p == as->invmcp)) { |
94 | if (as->orignins > 1023) return 0; /* Delta might end up too large. */ | ||
93 | as->loopinv = 1; | 95 | as->loopinv = 1; |
94 | *p = A64I_B | A64F_S26(target-p); | 96 | *p = A64I_B | A64F_S26(delta); |
95 | emit_tnb(as, ai^0x01000000u, r, bit, p-1); | 97 | ai ^= 0x01000000u; |
96 | return; | 98 | target = p-1; |
99 | } else if (LJ_UNLIKELY(delta >= 0x1fff)) { | ||
100 | return 0; | ||
97 | } | 101 | } |
98 | emit_tnb(as, ai, r, bit, target); | 102 | emit_tnb(as, ai, r, bit, target); |
103 | return 1; | ||
99 | } | 104 | } |
100 | 105 | ||
101 | /* Emit compare and branch instruction to exit for guard. */ | 106 | /* Emit compare and branch instruction to exit for guard. */ |
@@ -1651,16 +1656,15 @@ static void asm_intcomp(ASMState *as, IRIns *ir) | |||
1651 | if (asm_swapops(as, blref, brref)) { | 1656 | if (asm_swapops(as, blref, brref)) { |
1652 | Reg tmp = blref; blref = brref; brref = tmp; | 1657 | Reg tmp = blref; blref = brref; brref = tmp; |
1653 | } | 1658 | } |
1659 | bleft = ra_alloc1(as, blref, RSET_GPR); | ||
1654 | if (irref_isk(brref)) { | 1660 | if (irref_isk(brref)) { |
1655 | uint64_t k = get_k64val(as, brref); | 1661 | uint64_t k = get_k64val(as, brref); |
1656 | if (k && !(k & (k-1)) && (cc == CC_EQ || cc == CC_NE)) { | 1662 | if (k && !(k & (k-1)) && (cc == CC_EQ || cc == CC_NE) && |
1657 | asm_guardtnb(as, cc == CC_EQ ? A64I_TBZ : A64I_TBNZ, | 1663 | asm_guardtnb(as, cc == CC_EQ ? A64I_TBZ : A64I_TBNZ, bleft, |
1658 | ra_alloc1(as, blref, RSET_GPR), emit_ctz64(k)); | 1664 | emit_ctz64(k))) |
1659 | return; | 1665 | return; |
1660 | } | ||
1661 | m2 = emit_isk13(k, irt_is64(irl->t)); | 1666 | m2 = emit_isk13(k, irt_is64(irl->t)); |
1662 | } | 1667 | } |
1663 | bleft = ra_alloc1(as, blref, RSET_GPR); | ||
1664 | ai = (irt_is64(irl->t) ? A64I_TSTx : A64I_TSTw); | 1668 | ai = (irt_is64(irl->t) ? A64I_TSTx : A64I_TSTw); |
1665 | if (!m2) | 1669 | if (!m2) |
1666 | m2 = asm_fuseopm(as, ai, brref, rset_exclude(RSET_GPR, bleft)); | 1670 | m2 = asm_fuseopm(as, ai, brref, rset_exclude(RSET_GPR, bleft)); |