aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Pall <mike>2013-04-22 17:34:36 +0200
committerMike Pall <mike>2013-04-22 17:34:36 +0200
commit988e1839658523e772de53f89df389b568883fde (patch)
tree2ba73f16385b3c8765a05580071308f2a4f11123
parent5f1781a1277508c2b7bec527f722da98d8556e26 (diff)
downloadluajit-988e1839658523e772de53f89df389b568883fde.tar.gz
luajit-988e1839658523e772de53f89df389b568883fde.tar.bz2
luajit-988e1839658523e772de53f89df389b568883fde.zip
Reorganize generic operations common to all assembler backends.
-rw-r--r--src/lj_asm.c209
-rw-r--r--src/lj_asm_arm.h102
-rw-r--r--src/lj_asm_mips.h97
-rw-r--r--src/lj_asm_ppc.h97
-rw-r--r--src/lj_asm_x86.h175
5 files changed, 219 insertions, 461 deletions
diff --git a/src/lj_asm.c b/src/lj_asm.c
index 1304c180..231e76fc 100644
--- a/src/lj_asm.c
+++ b/src/lj_asm.c
@@ -949,44 +949,6 @@ static void asm_snap_prep(ASMState *as)
949 949
950/* -- Miscellaneous helpers ----------------------------------------------- */ 950/* -- Miscellaneous helpers ----------------------------------------------- */
951 951
952/* Collect arguments from CALL* and CARG instructions. */
953static void asm_collectargs(ASMState *as, IRIns *ir,
954 const CCallInfo *ci, IRRef *args)
955{
956 uint32_t n = CCI_NARGS(ci);
957 lua_assert(n <= CCI_NARGS_MAX);
958 if ((ci->flags & CCI_L)) { *args++ = ASMREF_L; n--; }
959 while (n-- > 1) {
960 ir = IR(ir->op1);
961 lua_assert(ir->o == IR_CARG);
962 args[n] = ir->op2 == REF_NIL ? 0 : ir->op2;
963 }
964 args[0] = ir->op1 == REF_NIL ? 0 : ir->op1;
965 lua_assert(IR(ir->op1)->o != IR_CARG);
966}
967
968/* Reconstruct CCallInfo flags for CALLX*. */
969static uint32_t asm_callx_flags(ASMState *as, IRIns *ir)
970{
971 uint32_t nargs = 0;
972 if (ir->op1 != REF_NIL) { /* Count number of arguments first. */
973 IRIns *ira = IR(ir->op1);
974 nargs++;
975 while (ira->o == IR_CARG) { nargs++; ira = IR(ira->op1); }
976 }
977#if LJ_HASFFI
978 if (IR(ir->op2)->o == IR_CARG) { /* Copy calling convention info. */
979 CTypeID id = (CTypeID)IR(IR(ir->op2)->op2)->i;
980 CType *ct = ctype_get(ctype_ctsG(J2G(as->J)), id);
981 nargs |= ((ct->info & CTF_VARARG) ? CCI_VARARG : 0);
982#if LJ_TARGET_X86
983 nargs |= (ctype_cconv(ct->info) << CCI_CC_SHIFT);
984#endif
985 }
986#endif
987 return (nargs | (ir->t.irt << CCI_OTSHIFT));
988}
989
990/* Calculate stack adjustment. */ 952/* Calculate stack adjustment. */
991static int32_t asm_stack_adjust(ASMState *as) 953static int32_t asm_stack_adjust(ASMState *as)
992{ 954{
@@ -1071,7 +1033,9 @@ static void asm_gcstep(ASMState *as, IRIns *ir)
1071 as->gcsteps = 0x80000000; /* Prevent implicit GC check further up. */ 1033 as->gcsteps = 0x80000000; /* Prevent implicit GC check further up. */
1072} 1034}
1073 1035
1074/* -- Buffer handling ----------------------------------------------------- */ 1036/* -- Buffer operations --------------------------------------------------- */
1037
1038static void asm_tvptr(ASMState *as, Reg dest, IRRef ref);
1075 1039
1076static void asm_bufhdr(ASMState *as, IRIns *ir) 1040static void asm_bufhdr(ASMState *as, IRIns *ir)
1077{ 1041{
@@ -1091,10 +1055,6 @@ static void asm_bufhdr(ASMState *as, IRIns *ir)
1091 } 1055 }
1092} 1056}
1093 1057
1094#if !LJ_TARGET_X86ORX64
1095static void asm_tvptr(ASMState *as, Reg dest, IRRef ref);
1096#endif
1097
1098static void asm_bufput(ASMState *as, IRIns *ir) 1058static void asm_bufput(ASMState *as, IRIns *ir)
1099{ 1059{
1100 const CCallInfo *ci; 1060 const CCallInfo *ci;
@@ -1115,14 +1075,8 @@ static void asm_bufput(ASMState *as, IRIns *ir)
1115 } 1075 }
1116 asm_setupresult(as, ir, ci); /* SBuf * */ 1076 asm_setupresult(as, ir, ci); /* SBuf * */
1117 asm_gencall(as, ci, args); 1077 asm_gencall(as, ci, args);
1118 if (args[1] == ASMREF_TMP1) { 1078 if (args[1] == ASMREF_TMP1)
1119#if LJ_TARGET_X86ORX64
1120 emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64,
1121 RID_ESP, ra_spill(as, IR(ir->op2)));
1122#else
1123 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2); 1079 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
1124#endif
1125 }
1126} 1080}
1127 1081
1128static void asm_bufstr(ASMState *as, IRIns *ir) 1082static void asm_bufstr(ASMState *as, IRIns *ir)
@@ -1135,6 +1089,161 @@ static void asm_bufstr(ASMState *as, IRIns *ir)
1135 asm_gencall(as, ci, args); 1089 asm_gencall(as, ci, args);
1136} 1090}
1137 1091
1092/* -- Type conversions ---------------------------------------------------- */
1093
1094static void asm_tostr(ASMState *as, IRIns *ir)
1095{
1096 IRRef args[2];
1097 args[0] = ASMREF_L;
1098 as->gcsteps++;
1099 if (irt_isnum(IR(ir->op1)->t) || (LJ_SOFTFP && (ir+1)->o == IR_HIOP)) {
1100 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
1101 args[1] = ASMREF_TMP1; /* const lua_Number * */
1102 asm_setupresult(as, ir, ci); /* GCstr * */
1103 asm_gencall(as, ci, args);
1104 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
1105 } else {
1106 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
1107 args[1] = ir->op1; /* int32_t k */
1108 asm_setupresult(as, ir, ci); /* GCstr * */
1109 asm_gencall(as, ci, args);
1110 }
1111}
1112
1113#if LJ_32 && LJ_HASFFI && !LJ_SOFTFP && !LJ_TARGET_X86
1114static void asm_conv64(ASMState *as, IRIns *ir)
1115{
1116 IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
1117 IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
1118 IRCallID id;
1119 IRRef args[2];
1120 lua_assert((ir-1)->o == IR_CONV && ir->o == IR_HIOP);
1121 args[LJ_BE] = (ir-1)->op1;
1122 args[LJ_LE] = ir->op1;
1123 if (st == IRT_NUM || st == IRT_FLOAT) {
1124 id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
1125 ir--;
1126 } else {
1127 id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
1128 }
1129 {
1130#if LJ_TARGET_ARM && !LJ_ABI_SOFTFP
1131 CCallInfo cim = lj_ir_callinfo[id], *ci = &cim;
1132 cim.flags |= CCI_VARARG; /* These calls don't use the hard-float ABI! */
1133#else
1134 const CCallInfo *ci = &lj_ir_callinfo[id];
1135#endif
1136 asm_setupresult(as, ir, ci);
1137 asm_gencall(as, ci, args);
1138 }
1139}
1140#endif
1141
1142/* -- Memory references --------------------------------------------------- */
1143
1144static void asm_newref(ASMState *as, IRIns *ir)
1145{
1146 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
1147 IRRef args[3];
1148 if (ir->r == RID_SINK)
1149 return;
1150 args[0] = ASMREF_L; /* lua_State *L */
1151 args[1] = ir->op1; /* GCtab *t */
1152 args[2] = ASMREF_TMP1; /* cTValue *key */
1153 asm_setupresult(as, ir, ci); /* TValue * */
1154 asm_gencall(as, ci, args);
1155 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
1156}
1157
1158/* -- Calls --------------------------------------------------------------- */
1159
1160/* Collect arguments from CALL* and CARG instructions. */
1161static void asm_collectargs(ASMState *as, IRIns *ir,
1162 const CCallInfo *ci, IRRef *args)
1163{
1164 uint32_t n = CCI_NARGS(ci);
1165 lua_assert(n <= CCI_NARGS_MAX);
1166 if ((ci->flags & CCI_L)) { *args++ = ASMREF_L; n--; }
1167 while (n-- > 1) {
1168 ir = IR(ir->op1);
1169 lua_assert(ir->o == IR_CARG);
1170 args[n] = ir->op2 == REF_NIL ? 0 : ir->op2;
1171 }
1172 args[0] = ir->op1 == REF_NIL ? 0 : ir->op1;
1173 lua_assert(IR(ir->op1)->o != IR_CARG);
1174}
1175
1176/* Reconstruct CCallInfo flags for CALLX*. */
1177static uint32_t asm_callx_flags(ASMState *as, IRIns *ir)
1178{
1179 uint32_t nargs = 0;
1180 if (ir->op1 != REF_NIL) { /* Count number of arguments first. */
1181 IRIns *ira = IR(ir->op1);
1182 nargs++;
1183 while (ira->o == IR_CARG) { nargs++; ira = IR(ira->op1); }
1184 }
1185#if LJ_HASFFI
1186 if (IR(ir->op2)->o == IR_CARG) { /* Copy calling convention info. */
1187 CTypeID id = (CTypeID)IR(IR(ir->op2)->op2)->i;
1188 CType *ct = ctype_get(ctype_ctsG(J2G(as->J)), id);
1189 nargs |= ((ct->info & CTF_VARARG) ? CCI_VARARG : 0);
1190#if LJ_TARGET_X86
1191 nargs |= (ctype_cconv(ct->info) << CCI_CC_SHIFT);
1192#endif
1193 }
1194#endif
1195 return (nargs | (ir->t.irt << CCI_OTSHIFT));
1196}
1197
1198static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
1199{
1200 const CCallInfo *ci = &lj_ir_callinfo[id];
1201 IRRef args[2];
1202 args[0] = ir->op1;
1203 args[1] = ir->op2;
1204 asm_setupresult(as, ir, ci);
1205 asm_gencall(as, ci, args);
1206}
1207
1208static void asm_call(ASMState *as, IRIns *ir)
1209{
1210 IRRef args[CCI_NARGS_MAX];
1211 const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
1212 asm_collectargs(as, ir, ci, args);
1213 asm_setupresult(as, ir, ci);
1214 asm_gencall(as, ci, args);
1215}
1216
1217#if !LJ_SOFTFP
1218static void asm_fppow(ASMState *as, IRIns *ir, IRRef lref, IRRef rref);
1219
1220#if !LJ_TARGET_X86ORX64
1221static void asm_fppow(ASMState *as, IRIns *ir, IRRef lref, IRRef rref)
1222{
1223 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
1224 IRRef args[2];
1225 args[0] = lref;
1226 args[1] = rref;
1227 asm_setupresult(as, ir, ci);
1228 asm_gencall(as, ci, args);
1229}
1230#endif
1231
1232static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
1233{
1234 IRIns *irp = IR(ir->op1);
1235 if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
1236 IRIns *irpp = IR(irp->op1);
1237 if (irpp == ir-2 && irpp->o == IR_FPMATH &&
1238 irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
1239 asm_fppow(as, ir, irpp->op1, irp->op2);
1240 return 1;
1241 }
1242 }
1243 return 0;
1244}
1245#endif
1246
1138/* -- PHI and loop handling ----------------------------------------------- */ 1247/* -- PHI and loop handling ----------------------------------------------- */
1139 1248
1140/* Break a PHI cycle by renaming to a free register (evict if needed). */ 1249/* Break a PHI cycle by renaming to a free register (evict if needed). */
diff --git a/src/lj_asm_arm.h b/src/lj_asm_arm.h
index 57c2dd81..37cbb5be 100644
--- a/src/lj_asm_arm.h
+++ b/src/lj_asm_arm.h
@@ -453,15 +453,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
453 UNUSED(ci); 453 UNUSED(ci);
454} 454}
455 455
456static void asm_call(ASMState *as, IRIns *ir)
457{
458 IRRef args[CCI_NARGS_MAX];
459 const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
460 asm_collectargs(as, ir, ci, args);
461 asm_setupresult(as, ir, ci);
462 asm_gencall(as, ci, args);
463}
464
465static void asm_callx(ASMState *as, IRIns *ir) 456static void asm_callx(ASMState *as, IRIns *ir)
466{ 457{
467 IRRef args[CCI_NARGS_MAX]; 458 IRRef args[CCI_NARGS_MAX];
@@ -600,31 +591,6 @@ static void asm_conv(ASMState *as, IRIns *ir)
600 } 591 }
601} 592}
602 593
603#if !LJ_SOFTFP && LJ_HASFFI
604static void asm_conv64(ASMState *as, IRIns *ir)
605{
606 IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
607 IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
608 IRCallID id;
609 CCallInfo ci;
610 IRRef args[2];
611 args[0] = (ir-1)->op1;
612 args[1] = ir->op1;
613 if (st == IRT_NUM || st == IRT_FLOAT) {
614 id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
615 ir--;
616 } else {
617 id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
618 }
619 ci = lj_ir_callinfo[id];
620#if !LJ_ABI_SOFTFP
621 ci.flags |= CCI_VARARG; /* These calls don't use the hard-float ABI! */
622#endif
623 asm_setupresult(as, ir, &ci);
624 asm_gencall(as, &ci, args);
625}
626#endif
627
628static void asm_strto(ASMState *as, IRIns *ir) 594static void asm_strto(ASMState *as, IRIns *ir)
629{ 595{
630 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num]; 596 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num];
@@ -688,6 +654,8 @@ static void asm_strto(ASMState *as, IRIns *ir)
688 emit_opk(as, ARMI_ADD, tmp, RID_SP, ofs, RSET_GPR); 654 emit_opk(as, ARMI_ADD, tmp, RID_SP, ofs, RSET_GPR);
689} 655}
690 656
657/* -- Memory references --------------------------------------------------- */
658
691/* Get pointer to TValue. */ 659/* Get pointer to TValue. */
692static void asm_tvptr(ASMState *as, Reg dest, IRRef ref) 660static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
693{ 661{
@@ -713,7 +681,7 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
713 Reg src = ra_alloc1(as, ref, allow); 681 Reg src = ra_alloc1(as, ref, allow);
714 emit_lso(as, ARMI_STR, src, RID_SP, 0); 682 emit_lso(as, ARMI_STR, src, RID_SP, 0);
715 } 683 }
716 if ((ir+1)->o == IR_HIOP) 684 if (LJ_SOFTFP && (ir+1)->o == IR_HIOP)
717 type = ra_alloc1(as, ref+1, allow); 685 type = ra_alloc1(as, ref+1, allow);
718 else 686 else
719 type = ra_allock(as, irt_toitype(ir->t), allow); 687 type = ra_allock(as, irt_toitype(ir->t), allow);
@@ -721,27 +689,6 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
721 } 689 }
722} 690}
723 691
724static void asm_tostr(ASMState *as, IRIns *ir)
725{
726 IRRef args[2];
727 args[0] = ASMREF_L;
728 as->gcsteps++;
729 if (irt_isnum(IR(ir->op1)->t) || (ir+1)->o == IR_HIOP) {
730 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
731 args[1] = ASMREF_TMP1; /* const lua_Number * */
732 asm_setupresult(as, ir, ci); /* GCstr * */
733 asm_gencall(as, ci, args);
734 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
735 } else {
736 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
737 args[1] = ir->op1; /* int32_t k */
738 asm_setupresult(as, ir, ci); /* GCstr * */
739 asm_gencall(as, ci, args);
740 }
741}
742
743/* -- Memory references --------------------------------------------------- */
744
745static void asm_aref(ASMState *as, IRIns *ir) 692static void asm_aref(ASMState *as, IRIns *ir)
746{ 693{
747 Reg dest = ra_dest(as, ir, RSET_GPR); 694 Reg dest = ra_dest(as, ir, RSET_GPR);
@@ -959,20 +906,6 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
959 emit_opk(as, ARMI_ADD, dest, node, ofs, RSET_GPR); 906 emit_opk(as, ARMI_ADD, dest, node, ofs, RSET_GPR);
960} 907}
961 908
962static void asm_newref(ASMState *as, IRIns *ir)
963{
964 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
965 IRRef args[3];
966 if (ir->r == RID_SINK)
967 return;
968 args[0] = ASMREF_L; /* lua_State *L */
969 args[1] = ir->op1; /* GCtab *t */
970 args[2] = ASMREF_TMP1; /* cTValue *key */
971 asm_setupresult(as, ir, ci); /* TValue * */
972 asm_gencall(as, ci, args);
973 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
974}
975
976static void asm_uref(ASMState *as, IRIns *ir) 909static void asm_uref(ASMState *as, IRIns *ir)
977{ 910{
978 /* NYI: Check that UREFO is still open and not aliasing a slot. */ 911 /* NYI: Check that UREFO is still open and not aliasing a slot. */
@@ -1391,25 +1324,6 @@ static void asm_fpunary(ASMState *as, IRIns *ir, ARMIns ai)
1391 Reg left = ra_hintalloc(as, ir->op1, dest, RSET_FPR); 1324 Reg left = ra_hintalloc(as, ir->op1, dest, RSET_FPR);
1392 emit_dm(as, ai, (dest & 15), (left & 15)); 1325 emit_dm(as, ai, (dest & 15), (left & 15));
1393} 1326}
1394
1395static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
1396{
1397 IRIns *irp = IR(ir->op1);
1398 if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
1399 IRIns *irpp = IR(irp->op1);
1400 if (irpp == ir-2 && irpp->o == IR_FPMATH &&
1401 irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
1402 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
1403 IRRef args[2];
1404 args[0] = irpp->op1;
1405 args[1] = irp->op2;
1406 asm_setupresult(as, ir, ci);
1407 asm_gencall(as, ci, args);
1408 return 1;
1409 }
1410 }
1411 return 0;
1412}
1413#endif 1327#endif
1414 1328
1415static int asm_swapops(ASMState *as, IRRef lref, IRRef rref) 1329static int asm_swapops(ASMState *as, IRRef lref, IRRef rref)
@@ -1561,16 +1475,6 @@ static void asm_neg(ASMState *as, IRIns *ir)
1561 asm_intneg(as, ir, ARMI_RSB); 1475 asm_intneg(as, ir, ARMI_RSB);
1562} 1476}
1563 1477
1564static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
1565{
1566 const CCallInfo *ci = &lj_ir_callinfo[id];
1567 IRRef args[2];
1568 args[0] = ir->op1;
1569 args[1] = ir->op2;
1570 asm_setupresult(as, ir, ci);
1571 asm_gencall(as, ci, args);
1572}
1573
1574#if !LJ_SOFTFP 1478#if !LJ_SOFTFP
1575static void asm_callround(ASMState *as, IRIns *ir, int id) 1479static void asm_callround(ASMState *as, IRIns *ir, int id)
1576{ 1480{
diff --git a/src/lj_asm_mips.h b/src/lj_asm_mips.h
index 55fe10b8..44cb85e5 100644
--- a/src/lj_asm_mips.h
+++ b/src/lj_asm_mips.h
@@ -326,15 +326,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
326 } 326 }
327} 327}
328 328
329static void asm_call(ASMState *as, IRIns *ir)
330{
331 IRRef args[CCI_NARGS_MAX];
332 const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
333 asm_collectargs(as, ir, ci, args);
334 asm_setupresult(as, ir, ci);
335 asm_gencall(as, ci, args);
336}
337
338static void asm_callx(ASMState *as, IRIns *ir) 329static void asm_callx(ASMState *as, IRIns *ir)
339{ 330{
340 IRRef args[CCI_NARGS_MAX]; 331 IRRef args[CCI_NARGS_MAX];
@@ -362,16 +353,6 @@ static void asm_callx(ASMState *as, IRIns *ir)
362 asm_gencall(as, &ci, args); 353 asm_gencall(as, &ci, args);
363} 354}
364 355
365static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
366{
367 const CCallInfo *ci = &lj_ir_callinfo[id];
368 IRRef args[2];
369 args[0] = ir->op1;
370 args[1] = ir->op2;
371 asm_setupresult(as, ir, ci);
372 asm_gencall(as, ci, args);
373}
374
375static void asm_callround(ASMState *as, IRIns *ir, IRCallID id) 356static void asm_callround(ASMState *as, IRIns *ir, IRCallID id)
376{ 357{
377 /* The modified regs must match with the *.dasc implementation. */ 358 /* The modified regs must match with the *.dasc implementation. */
@@ -519,28 +500,6 @@ static void asm_conv(ASMState *as, IRIns *ir)
519 } 500 }
520} 501}
521 502
522#if LJ_HASFFI
523static void asm_conv64(ASMState *as, IRIns *ir)
524{
525 IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
526 IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
527 IRCallID id;
528 const CCallInfo *ci;
529 IRRef args[2];
530 args[LJ_BE?0:1] = ir->op1;
531 args[LJ_BE?1:0] = (ir-1)->op1;
532 if (st == IRT_NUM || st == IRT_FLOAT) {
533 id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
534 ir--;
535 } else {
536 id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
537 }
538 ci = &lj_ir_callinfo[id];
539 asm_setupresult(as, ir, ci);
540 asm_gencall(as, ci, args);
541}
542#endif
543
544static void asm_strto(ASMState *as, IRIns *ir) 503static void asm_strto(ASMState *as, IRIns *ir)
545{ 504{
546 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num]; 505 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num];
@@ -557,6 +516,8 @@ static void asm_strto(ASMState *as, IRIns *ir)
557 RID_SP, sps_scale(ir->s)); 516 RID_SP, sps_scale(ir->s));
558} 517}
559 518
519/* -- Memory references --------------------------------------------------- */
520
560/* Get pointer to TValue. */ 521/* Get pointer to TValue. */
561static void asm_tvptr(ASMState *as, Reg dest, IRRef ref) 522static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
562{ 523{
@@ -580,27 +541,6 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
580 } 541 }
581} 542}
582 543
583static void asm_tostr(ASMState *as, IRIns *ir)
584{
585 IRRef args[2];
586 args[0] = ASMREF_L;
587 as->gcsteps++;
588 if (irt_isnum(IR(ir->op1)->t) || (ir+1)->o == IR_HIOP) {
589 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
590 args[1] = ASMREF_TMP1; /* const lua_Number * */
591 asm_setupresult(as, ir, ci); /* GCstr * */
592 asm_gencall(as, ci, args);
593 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
594 } else {
595 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
596 args[1] = ir->op1; /* int32_t k */
597 asm_setupresult(as, ir, ci); /* GCstr * */
598 asm_gencall(as, ci, args);
599 }
600}
601
602/* -- Memory references --------------------------------------------------- */
603
604static void asm_aref(ASMState *as, IRIns *ir) 544static void asm_aref(ASMState *as, IRIns *ir)
605{ 545{
606 Reg dest = ra_dest(as, ir, RSET_GPR); 546 Reg dest = ra_dest(as, ir, RSET_GPR);
@@ -776,20 +716,6 @@ nolo:
776 emit_tsi(as, MIPSI_ADDU, dest, node, ra_allock(as, ofs, allow)); 716 emit_tsi(as, MIPSI_ADDU, dest, node, ra_allock(as, ofs, allow));
777} 717}
778 718
779static void asm_newref(ASMState *as, IRIns *ir)
780{
781 if (ir->r != RID_SINK) {
782 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
783 IRRef args[3];
784 args[0] = ASMREF_L; /* lua_State *L */
785 args[1] = ir->op1; /* GCtab *t */
786 args[2] = ASMREF_TMP1; /* cTValue *key */
787 asm_setupresult(as, ir, ci); /* TValue * */
788 asm_gencall(as, ci, args);
789 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
790 }
791}
792
793static void asm_uref(ASMState *as, IRIns *ir) 719static void asm_uref(ASMState *as, IRIns *ir)
794{ 720{
795 /* NYI: Check that UREFO is still open and not aliasing a slot. */ 721 /* NYI: Check that UREFO is still open and not aliasing a slot. */
@@ -1152,25 +1078,6 @@ static void asm_fpunary(ASMState *as, IRIns *ir, MIPSIns mi)
1152 emit_fg(as, mi, dest, left); 1078 emit_fg(as, mi, dest, left);
1153} 1079}
1154 1080
1155static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
1156{
1157 IRIns *irp = IR(ir->op1);
1158 if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
1159 IRIns *irpp = IR(irp->op1);
1160 if (irpp == ir-2 && irpp->o == IR_FPMATH &&
1161 irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
1162 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
1163 IRRef args[2];
1164 args[0] = irpp->op1;
1165 args[1] = irp->op2;
1166 asm_setupresult(as, ir, ci);
1167 asm_gencall(as, ci, args);
1168 return 1;
1169 }
1170 }
1171 return 0;
1172}
1173
1174static void asm_add(ASMState *as, IRIns *ir) 1081static void asm_add(ASMState *as, IRIns *ir)
1175{ 1082{
1176 if (irt_isnum(ir->t)) { 1083 if (irt_isnum(ir->t)) {
diff --git a/src/lj_asm_ppc.h b/src/lj_asm_ppc.h
index d0feb43a..9c9c3ea4 100644
--- a/src/lj_asm_ppc.h
+++ b/src/lj_asm_ppc.h
@@ -329,15 +329,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
329 } 329 }
330} 330}
331 331
332static void asm_call(ASMState *as, IRIns *ir)
333{
334 IRRef args[CCI_NARGS_MAX];
335 const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
336 asm_collectargs(as, ir, ci, args);
337 asm_setupresult(as, ir, ci);
338 asm_gencall(as, ci, args);
339}
340
341static void asm_callx(ASMState *as, IRIns *ir) 332static void asm_callx(ASMState *as, IRIns *ir)
342{ 333{
343 IRRef args[CCI_NARGS_MAX]; 334 IRRef args[CCI_NARGS_MAX];
@@ -361,16 +352,6 @@ static void asm_callx(ASMState *as, IRIns *ir)
361 asm_gencall(as, &ci, args); 352 asm_gencall(as, &ci, args);
362} 353}
363 354
364static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
365{
366 const CCallInfo *ci = &lj_ir_callinfo[id];
367 IRRef args[2];
368 args[0] = ir->op1;
369 args[1] = ir->op2;
370 asm_setupresult(as, ir, ci);
371 asm_gencall(as, ci, args);
372}
373
374/* -- Returns ------------------------------------------------------------- */ 355/* -- Returns ------------------------------------------------------------- */
375 356
376/* Return to lower frame. Guard that it goes to the right spot. */ 357/* Return to lower frame. Guard that it goes to the right spot. */
@@ -510,28 +491,6 @@ static void asm_conv(ASMState *as, IRIns *ir)
510 } 491 }
511} 492}
512 493
513#if LJ_HASFFI
514static void asm_conv64(ASMState *as, IRIns *ir)
515{
516 IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
517 IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
518 IRCallID id;
519 const CCallInfo *ci;
520 IRRef args[2];
521 args[0] = ir->op1;
522 args[1] = (ir-1)->op1;
523 if (st == IRT_NUM || st == IRT_FLOAT) {
524 id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
525 ir--;
526 } else {
527 id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
528 }
529 ci = &lj_ir_callinfo[id];
530 asm_setupresult(as, ir, ci);
531 asm_gencall(as, ci, args);
532}
533#endif
534
535static void asm_strto(ASMState *as, IRIns *ir) 494static void asm_strto(ASMState *as, IRIns *ir)
536{ 495{
537 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num]; 496 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num];
@@ -550,6 +509,8 @@ static void asm_strto(ASMState *as, IRIns *ir)
550 emit_tai(as, PPCI_ADDI, ra_releasetmp(as, ASMREF_TMP1), RID_SP, ofs); 509 emit_tai(as, PPCI_ADDI, ra_releasetmp(as, ASMREF_TMP1), RID_SP, ofs);
551} 510}
552 511
512/* -- Memory references --------------------------------------------------- */
513
553/* Get pointer to TValue. */ 514/* Get pointer to TValue. */
554static void asm_tvptr(ASMState *as, Reg dest, IRRef ref) 515static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
555{ 516{
@@ -573,27 +534,6 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
573 } 534 }
574} 535}
575 536
576static void asm_tostr(ASMState *as, IRIns *ir)
577{
578 IRRef args[2];
579 args[0] = ASMREF_L;
580 as->gcsteps++;
581 if (irt_isnum(IR(ir->op1)->t) || (ir+1)->o == IR_HIOP) {
582 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
583 args[1] = ASMREF_TMP1; /* const lua_Number * */
584 asm_setupresult(as, ir, ci); /* GCstr * */
585 asm_gencall(as, ci, args);
586 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
587 } else {
588 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
589 args[1] = ir->op1; /* int32_t k */
590 asm_setupresult(as, ir, ci); /* GCstr * */
591 asm_gencall(as, ci, args);
592 }
593}
594
595/* -- Memory references --------------------------------------------------- */
596
597static void asm_aref(ASMState *as, IRIns *ir) 537static void asm_aref(ASMState *as, IRIns *ir)
598{ 538{
599 Reg dest = ra_dest(as, ir, RSET_GPR); 539 Reg dest = ra_dest(as, ir, RSET_GPR);
@@ -770,20 +710,6 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
770 } 710 }
771} 711}
772 712
773static void asm_newref(ASMState *as, IRIns *ir)
774{
775 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
776 IRRef args[3];
777 if (ir->r == RID_SINK)
778 return;
779 args[0] = ASMREF_L; /* lua_State *L */
780 args[1] = ir->op1; /* GCtab *t */
781 args[2] = ASMREF_TMP1; /* cTValue *key */
782 asm_setupresult(as, ir, ci); /* TValue * */
783 asm_gencall(as, ci, args);
784 asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
785}
786
787static void asm_uref(ASMState *as, IRIns *ir) 713static void asm_uref(ASMState *as, IRIns *ir)
788{ 714{
789 /* NYI: Check that UREFO is still open and not aliasing a slot. */ 715 /* NYI: Check that UREFO is still open and not aliasing a slot. */
@@ -1194,25 +1120,6 @@ static void asm_fpunary(ASMState *as, IRIns *ir, PPCIns pi)
1194 emit_fb(as, pi, dest, left); 1120 emit_fb(as, pi, dest, left);
1195} 1121}
1196 1122
1197static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
1198{
1199 IRIns *irp = IR(ir->op1);
1200 if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
1201 IRIns *irpp = IR(irp->op1);
1202 if (irpp == ir-2 && irpp->o == IR_FPMATH &&
1203 irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
1204 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
1205 IRRef args[2];
1206 args[0] = irpp->op1;
1207 args[1] = irp->op2;
1208 asm_setupresult(as, ir, ci);
1209 asm_gencall(as, ci, args);
1210 return 1;
1211 }
1212 }
1213 return 0;
1214}
1215
1216static void asm_add(ASMState *as, IRIns *ir) 1123static void asm_add(ASMState *as, IRIns *ir)
1217{ 1124{
1218 if (irt_isnum(ir->t)) { 1125 if (irt_isnum(ir->t)) {
diff --git a/src/lj_asm_x86.h b/src/lj_asm_x86.h
index 1e32b6c9..e79dca93 100644
--- a/src/lj_asm_x86.h
+++ b/src/lj_asm_x86.h
@@ -576,15 +576,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
576 } 576 }
577} 577}
578 578
579static void asm_call(ASMState *as, IRIns *ir)
580{
581 IRRef args[CCI_NARGS_MAX];
582 const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
583 asm_collectargs(as, ir, ci, args);
584 asm_setupresult(as, ir, ci);
585 asm_gencall(as, ci, args);
586}
587
588/* Return a constant function pointer or NULL for indirect calls. */ 579/* Return a constant function pointer or NULL for indirect calls. */
589static void *asm_callx_func(ASMState *as, IRIns *irf, IRRef func) 580static void *asm_callx_func(ASMState *as, IRIns *irf, IRRef func)
590{ 581{
@@ -891,6 +882,14 @@ static void asm_conv_int64_fp(ASMState *as, IRIns *ir)
891 st == IRT_NUM ? XOg_FLDq: XOg_FLDd, 882 st == IRT_NUM ? XOg_FLDq: XOg_FLDd,
892 asm_fuseload(as, ir->op1, RSET_EMPTY)); 883 asm_fuseload(as, ir->op1, RSET_EMPTY));
893} 884}
885
886static void asm_conv64(ASMState *as, IRIns *ir)
887{
888 if (irt_isfp(ir->t))
889 asm_conv_fp_int64(as, ir);
890 else
891 asm_conv_int64_fp(as, ir);
892}
894#endif 893#endif
895 894
896static void asm_strto(ASMState *as, IRIns *ir) 895static void asm_strto(ASMState *as, IRIns *ir)
@@ -912,29 +911,32 @@ static void asm_strto(ASMState *as, IRIns *ir)
912 RID_ESP, sps_scale(ir->s)); 911 RID_ESP, sps_scale(ir->s));
913} 912}
914 913
915static void asm_tostr(ASMState *as, IRIns *ir) 914/* -- Memory references --------------------------------------------------- */
915
916/* Get pointer to TValue. */
917static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
916{ 918{
917 IRIns *irl = IR(ir->op1); 919 IRIns *ir = IR(ref);
918 IRRef args[2]; 920 if (irt_isnum(ir->t)) {
919 args[0] = ASMREF_L; 921 /* For numbers use the constant itself or a spill slot as a TValue. */
920 as->gcsteps++; 922 if (irref_isk(ref))
921 if (irt_isnum(irl->t)) { 923 emit_loada(as, dest, ir_knum(ir));
922 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum]; 924 else
923 args[1] = ASMREF_TMP1; /* const lua_Number * */ 925 emit_rmro(as, XO_LEA, dest|REX_64, RID_ESP, ra_spill(as, ir));
924 asm_setupresult(as, ir, ci); /* GCstr * */
925 asm_gencall(as, ci, args);
926 emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64,
927 RID_ESP, ra_spill(as, irl));
928 } else { 926 } else {
929 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint]; 927 /* Otherwise use g->tmptv to hold the TValue. */
930 args[1] = ir->op1; /* int32_t k */ 928 if (!irref_isk(ref)) {
931 asm_setupresult(as, ir, ci); /* GCstr * */ 929 Reg src = ra_alloc1(as, ref, rset_exclude(RSET_GPR, dest));
932 asm_gencall(as, ci, args); 930 emit_movtomro(as, REX_64IR(ir, src), dest, 0);
931 } else if (!irt_ispri(ir->t)) {
932 emit_movmroi(as, dest, 0, ir->i);
933 }
934 if (!(LJ_64 && irt_islightud(ir->t)))
935 emit_movmroi(as, dest, 4, irt_toitype(ir->t));
936 emit_loada(as, dest, &J2G(as->J)->tmptv);
933 } 937 }
934} 938}
935 939
936/* -- Memory references --------------------------------------------------- */
937
938static void asm_aref(ASMState *as, IRIns *ir) 940static void asm_aref(ASMState *as, IRIns *ir)
939{ 941{
940 Reg dest = ra_dest(as, ir, RSET_GPR); 942 Reg dest = ra_dest(as, ir, RSET_GPR);
@@ -1163,41 +1165,6 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
1163#endif 1165#endif
1164} 1166}
1165 1167
1166static void asm_newref(ASMState *as, IRIns *ir)
1167{
1168 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
1169 IRRef args[3];
1170 IRIns *irkey;
1171 Reg tmp;
1172 if (ir->r == RID_SINK)
1173 return;
1174 args[0] = ASMREF_L; /* lua_State *L */
1175 args[1] = ir->op1; /* GCtab *t */
1176 args[2] = ASMREF_TMP1; /* cTValue *key */
1177 asm_setupresult(as, ir, ci); /* TValue * */
1178 asm_gencall(as, ci, args);
1179 tmp = ra_releasetmp(as, ASMREF_TMP1);
1180 irkey = IR(ir->op2);
1181 if (irt_isnum(irkey->t)) {
1182 /* For numbers use the constant itself or a spill slot as a TValue. */
1183 if (irref_isk(ir->op2))
1184 emit_loada(as, tmp, ir_knum(irkey));
1185 else
1186 emit_rmro(as, XO_LEA, tmp|REX_64, RID_ESP, ra_spill(as, irkey));
1187 } else {
1188 /* Otherwise use g->tmptv to hold the TValue. */
1189 if (!irref_isk(ir->op2)) {
1190 Reg src = ra_alloc1(as, ir->op2, rset_exclude(RSET_GPR, tmp));
1191 emit_movtomro(as, REX_64IR(irkey, src), tmp, 0);
1192 } else if (!irt_ispri(irkey->t)) {
1193 emit_movmroi(as, tmp, 0, irkey->i);
1194 }
1195 if (!(LJ_64 && irt_islightud(irkey->t)))
1196 emit_movmroi(as, tmp, 4, irt_toitype(irkey->t));
1197 emit_loada(as, tmp, &J2G(as->J)->tmptv);
1198 }
1199}
1200
1201static void asm_uref(ASMState *as, IRIns *ir) 1168static void asm_uref(ASMState *as, IRIns *ir)
1202{ 1169{
1203 /* NYI: Check that UREFO is still open and not aliasing a slot. */ 1170 /* NYI: Check that UREFO is still open and not aliasing a slot. */
@@ -1621,31 +1588,21 @@ static void asm_x87load(ASMState *as, IRRef ref)
1621 } 1588 }
1622} 1589}
1623 1590
1624/* Try to rejoin pow from EXP2, MUL and LOG2 (if still unsplit). */ 1591static void asm_fppow(ASMState *as, IRIns *ir, IRRef lref, IRRef rref)
1625static int fpmjoin_pow(ASMState *as, IRIns *ir)
1626{ 1592{
1627 IRIns *irp = IR(ir->op1); 1593 /* The modified regs must match with the *.dasc implementation. */
1628 if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) { 1594 RegSet drop = RSET_RANGE(RID_XMM0, RID_XMM2+1)|RID2RSET(RID_EAX);
1629 IRIns *irpp = IR(irp->op1); 1595 IRIns *irx;
1630 if (irpp == ir-2 && irpp->o == IR_FPMATH && 1596 if (ra_hasreg(ir->r))
1631 irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) { 1597 rset_clear(drop, ir->r); /* Dest reg handled below. */
1632 /* The modified regs must match with the *.dasc implementation. */ 1598 ra_evictset(as, drop);
1633 RegSet drop = RSET_RANGE(RID_XMM0, RID_XMM2+1)|RID2RSET(RID_EAX); 1599 ra_destreg(as, ir, RID_XMM0);
1634 IRIns *irx; 1600 emit_call(as, lj_vm_pow_sse);
1635 if (ra_hasreg(ir->r)) 1601 irx = IR(lref);
1636 rset_clear(drop, ir->r); /* Dest reg handled below. */ 1602 if (ra_noreg(irx->r) && ra_gethint(irx->r) == RID_XMM1)
1637 ra_evictset(as, drop); 1603 irx->r = RID_INIT; /* Avoid allocating xmm1 for x. */
1638 ra_destreg(as, ir, RID_XMM0); 1604 ra_left(as, RID_XMM0, lref);
1639 emit_call(as, lj_vm_pow_sse); 1605 ra_left(as, RID_XMM1, rref);
1640 irx = IR(irpp->op1);
1641 if (ra_noreg(irx->r) && ra_gethint(irx->r) == RID_XMM1)
1642 irx->r = RID_INIT; /* Avoid allocating xmm1 for x. */
1643 ra_left(as, RID_XMM0, irpp->op1);
1644 ra_left(as, RID_XMM1, irp->op2);
1645 return 1;
1646 }
1647 }
1648 return 0;
1649} 1606}
1650 1607
1651static void asm_fpmath(ASMState *as, IRIns *ir) 1608static void asm_fpmath(ASMState *as, IRIns *ir)
@@ -1681,7 +1638,7 @@ static void asm_fpmath(ASMState *as, IRIns *ir)
1681 fpm == IRFPM_CEIL ? lj_vm_ceil_sse : lj_vm_trunc_sse); 1638 fpm == IRFPM_CEIL ? lj_vm_ceil_sse : lj_vm_trunc_sse);
1682 ra_left(as, RID_XMM0, ir->op1); 1639 ra_left(as, RID_XMM0, ir->op1);
1683 } 1640 }
1684 } else if (fpm == IRFPM_EXP2 && fpmjoin_pow(as, ir)) { 1641 } else if (fpm == IRFPM_EXP2 && asm_fpjoin_pow(as, ir)) {
1685 /* Rejoined to pow(). */ 1642 /* Rejoined to pow(). */
1686 } else { /* Handle x87 ops. */ 1643 } else { /* Handle x87 ops. */
1687 int32_t ofs = sps_scale(ir->s); /* Use spill slot or temp slots. */ 1644 int32_t ofs = sps_scale(ir->s); /* Use spill slot or temp slots. */
@@ -1741,28 +1698,6 @@ static void asm_fppowi(ASMState *as, IRIns *ir)
1741 ra_left(as, RID_EAX, ir->op2); 1698 ra_left(as, RID_EAX, ir->op2);
1742} 1699}
1743 1700
1744#if LJ_64 && LJ_HASFFI
1745static void asm_arith64(ASMState *as, IRIns *ir, IRCallID id)
1746{
1747 const CCallInfo *ci = &lj_ir_callinfo[id];
1748 IRRef args[2];
1749 args[0] = ir->op1;
1750 args[1] = ir->op2;
1751 asm_setupresult(as, ir, ci);
1752 asm_gencall(as, ci, args);
1753}
1754#endif
1755
1756static void asm_intmod(ASMState *as, IRIns *ir)
1757{
1758 const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_vm_modi];
1759 IRRef args[2];
1760 args[0] = ir->op1;
1761 args[1] = ir->op2;
1762 asm_setupresult(as, ir, ci);
1763 asm_gencall(as, ci, args);
1764}
1765
1766static int asm_swapops(ASMState *as, IRIns *ir) 1701static int asm_swapops(ASMState *as, IRIns *ir)
1767{ 1702{
1768 IRIns *irl = IR(ir->op1); 1703 IRIns *irl = IR(ir->op1);
@@ -2268,13 +2203,9 @@ static void asm_hiop(ASMState *as, IRIns *ir)
2268 int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */ 2203 int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
2269 if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1; 2204 if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
2270 if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */ 2205 if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
2271 if (usehi || uselo) {
2272 if (irt_isfp(ir->t))
2273 asm_conv_fp_int64(as, ir);
2274 else
2275 asm_conv_int64_fp(as, ir);
2276 }
2277 as->curins--; /* Always skip the CONV. */ 2206 as->curins--; /* Always skip the CONV. */
2207 if (usehi || uselo)
2208 asm_conv64(as, ir);
2278 return; 2209 return;
2279 } else if ((ir-1)->o <= IR_NE) { /* 64 bit integer comparisons. ORDER IR. */ 2210 } else if ((ir-1)->o <= IR_NE) { /* 64 bit integer comparisons. ORDER IR. */
2280 asm_comp_int64(as, ir); 2211 asm_comp_int64(as, ir);
@@ -2627,8 +2558,8 @@ static void asm_ir(ASMState *as, IRIns *ir)
2627 case IR_DIV: 2558 case IR_DIV:
2628#if LJ_64 && LJ_HASFFI 2559#if LJ_64 && LJ_HASFFI
2629 if (!irt_isnum(ir->t)) 2560 if (!irt_isnum(ir->t))
2630 asm_arith64(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_divi64 : 2561 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_divi64 :
2631 IRCALL_lj_carith_divu64); 2562 IRCALL_lj_carith_divu64);
2632 else 2563 else
2633#endif 2564#endif
2634 asm_fparith(as, ir, XO_DIVSD); 2565 asm_fparith(as, ir, XO_DIVSD);
@@ -2636,11 +2567,11 @@ static void asm_ir(ASMState *as, IRIns *ir)
2636 case IR_MOD: 2567 case IR_MOD:
2637#if LJ_64 && LJ_HASFFI 2568#if LJ_64 && LJ_HASFFI
2638 if (!irt_isint(ir->t)) 2569 if (!irt_isint(ir->t))
2639 asm_arith64(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_modi64 : 2570 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_modi64 :
2640 IRCALL_lj_carith_modu64); 2571 IRCALL_lj_carith_modu64);
2641 else 2572 else
2642#endif 2573#endif
2643 asm_intmod(as, ir); 2574 asm_callid(as, ir, IRCALL_lj_vm_modi);
2644 break; 2575 break;
2645 2576
2646 case IR_NEG: 2577 case IR_NEG:
@@ -2670,8 +2601,8 @@ static void asm_ir(ASMState *as, IRIns *ir)
2670 case IR_POW: 2601 case IR_POW:
2671#if LJ_64 && LJ_HASFFI 2602#if LJ_64 && LJ_HASFFI
2672 if (!irt_isnum(ir->t)) 2603 if (!irt_isnum(ir->t))
2673 asm_arith64(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_powi64 : 2604 asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_powi64 :
2674 IRCALL_lj_carith_powu64); 2605 IRCALL_lj_carith_powu64);
2675 else 2606 else
2676#endif 2607#endif
2677 asm_fppowi(as, ir); 2608 asm_fppowi(as, ir);