|
@@ -2283,55 +2283,12 @@ static void asm_tdup(ASMState *as, IRIns *ir)
|
|
}
|
|
}
|
|
|
|
|
|
#if LJ_HASFFI
|
|
#if LJ_HASFFI
|
|
-static RegSet asm_cnew_init(ASMState *as, IRRef ref, int32_t ofs, RegSet allow)
|
|
|
|
-{
|
|
|
|
- IRIns *ir = IR(ref);
|
|
|
|
- if (irref_isk(ref)) {
|
|
|
|
-#if LJ_64
|
|
|
|
- if (ir->o == IR_KNUM || ir->o == IR_KINT64) {
|
|
|
|
- uint64_t k = ir_k64(ir)->u64;
|
|
|
|
- if (checki32((int64_t)k)) {
|
|
|
|
- emit_i32(as, (int32_t)k);
|
|
|
|
- emit_rmro(as, XO_MOVmi, REX_64, RID_RET, ofs);
|
|
|
|
- } else {
|
|
|
|
- emit_movtomro(as, RID_ECX|REX_64, RID_RET, ofs);
|
|
|
|
- emit_loadu64(as, RID_ECX, k);
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- emit_movmroi(as, RID_RET, ofs, ir->i);
|
|
|
|
- }
|
|
|
|
-#else
|
|
|
|
- if (ir->o == IR_KNUM) {
|
|
|
|
- emit_rmro(as, XO_MOVSDto, RID_XMM0, RID_RET, ofs);
|
|
|
|
- emit_loadn(as, RID_XMM0, ir_k64(ir));
|
|
|
|
- } else if (ir->o == IR_KINT64) {
|
|
|
|
- uint64_t k = ir_k64(ir)->u64;
|
|
|
|
- emit_movmroi(as, RID_RET, ofs, (int32_t)k);
|
|
|
|
- emit_movmroi(as, RID_RET, ofs+4, (int32_t)(k >> 32));
|
|
|
|
- } else {
|
|
|
|
- emit_movmroi(as, RID_RET, ofs, ir->i);
|
|
|
|
- }
|
|
|
|
-#endif
|
|
|
|
- } else {
|
|
|
|
- Reg r;
|
|
|
|
- if (irt_isnum(ir->t)) {
|
|
|
|
- r = ra_alloc1(as, ref, (RSET_FPR & allow));
|
|
|
|
- emit_rmro(as, XO_MOVSDto, r, RID_RET, ofs);
|
|
|
|
- } else {
|
|
|
|
- r = ra_alloc1(as, ref, (RSET_GPR & allow));
|
|
|
|
- emit_movtomro(as, REX_64IR(ir, r), RID_RET, ofs);
|
|
|
|
- }
|
|
|
|
- rset_clear(allow, r);
|
|
|
|
- }
|
|
|
|
- return allow;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
static void asm_cnew(ASMState *as, IRIns *ir)
|
|
static void asm_cnew(ASMState *as, IRIns *ir)
|
|
{
|
|
{
|
|
CTState *cts = ctype_ctsG(J2G(as->J));
|
|
CTState *cts = ctype_ctsG(J2G(as->J));
|
|
- CTypeID typeid = (CTypeID)IR(ir->op2)->i;
|
|
|
|
- CTSize sz = (ir->o == IR_CNEWI || ir->op1 == REF_NIL) ?
|
|
|
|
- lj_ctype_size(cts, typeid) : (CTSize)IR(ir->op1)->i;
|
|
|
|
|
|
+ CTypeID typeid = (CTypeID)IR(ir->op1)->i;
|
|
|
|
+ CTSize sz = (ir->o == IR_CNEWP || ir->op2 == REF_NIL) ?
|
|
|
|
+ lj_ctype_size(cts, typeid) : (CTSize)IR(ir->op2)->i;
|
|
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_mem_newgco];
|
|
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_mem_newgco];
|
|
IRRef args[2];
|
|
IRRef args[2];
|
|
lua_assert(sz != CTSIZE_INVALID);
|
|
lua_assert(sz != CTSIZE_INVALID);
|
|
@@ -2339,18 +2296,35 @@ static void asm_cnew(ASMState *as, IRIns *ir)
|
|
args[0] = ASMREF_L; /* lua_State *L */
|
|
args[0] = ASMREF_L; /* lua_State *L */
|
|
args[1] = ASMREF_TMP1; /* MSize size */
|
|
args[1] = ASMREF_TMP1; /* MSize size */
|
|
as->gcsteps++;
|
|
as->gcsteps++;
|
|
- asm_setupresult(as, ir, ci); /* GCobj * */
|
|
|
|
|
|
+ asm_setupresult(as, ir, ci); /* GCcdata * */
|
|
|
|
|
|
- /* Initialize immutable cdata object. */
|
|
|
|
- if (ir->o == IR_CNEWI) {
|
|
|
|
- RegSet allow = ~RSET_SCRATCH;
|
|
|
|
- IRRef ref = ir->op1;
|
|
|
|
- if (IR(ref)->o == IR_CARG) { /* 2nd initializer. */
|
|
|
|
- IRIns *ira = IR(ref);
|
|
|
|
- allow = asm_cnew_init(as, ira->op2, sizeof(GCcdata) + (sz>>1), allow);
|
|
|
|
- ref = ira->op1;
|
|
|
|
|
|
+ /* Initialize pointer cdata object. */
|
|
|
|
+ if (ir->o == IR_CNEWP) {
|
|
|
|
+ if (irref_isk(ir->op2)) {
|
|
|
|
+ IRIns *irk = IR(ir->op2);
|
|
|
|
+#if LJ_64
|
|
|
|
+ if (irk->o == IR_KINT64) {
|
|
|
|
+ uint64_t k = ir_k64(irk)->u64;
|
|
|
|
+ lua_assert(sz == 8);
|
|
|
|
+ if (checki32((int64_t)k)) {
|
|
|
|
+ emit_i32(as, (int32_t)k);
|
|
|
|
+ emit_rmro(as, XO_MOVmi, REX_64, RID_RET, sizeof(GCcdata));
|
|
|
|
+ } else {
|
|
|
|
+ emit_movtomro(as, RID_ECX|REX_64, RID_RET, sizeof(GCcdata));
|
|
|
|
+ emit_loadu64(as, RID_ECX, k);
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+#endif
|
|
|
|
+ lua_assert(sz == 4);
|
|
|
|
+ emit_movmroi(as, RID_RET, sizeof(GCcdata), irk->i);
|
|
|
|
+#if LJ_64
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
|
|
+ } else {
|
|
|
|
+ Reg r = ra_alloc1(as, ir->op2, (RSET_GPR & ~RSET_SCRATCH));
|
|
|
|
+ emit_movtomro(as, r + ((LJ_64 && sz == 8) ? REX_64 : 0),
|
|
|
|
+ RID_RET, sizeof(GCcdata));
|
|
}
|
|
}
|
|
- asm_cnew_init(as, ref, sizeof(GCcdata), allow); /* 1st initializer. */
|
|
|
|
}
|
|
}
|
|
|
|
|
|
/* Combine initialization of marked, gct and typeid. */
|
|
/* Combine initialization of marked, gct and typeid. */
|
|
@@ -3675,7 +3649,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
|
|
case IR_SNEW: asm_snew(as, ir); break;
|
|
case IR_SNEW: asm_snew(as, ir); break;
|
|
case IR_TNEW: asm_tnew(as, ir); break;
|
|
case IR_TNEW: asm_tnew(as, ir); break;
|
|
case IR_TDUP: asm_tdup(as, ir); break;
|
|
case IR_TDUP: asm_tdup(as, ir); break;
|
|
- case IR_CNEW: case IR_CNEWI: asm_cnew(as, ir); break;
|
|
|
|
|
|
+ case IR_CNEW: case IR_CNEWP: asm_cnew(as, ir); break;
|
|
|
|
|
|
/* Write barriers. */
|
|
/* Write barriers. */
|
|
case IR_TBAR: asm_tbar(as, ir); break;
|
|
case IR_TBAR: asm_tbar(as, ir); break;
|
|
@@ -3793,7 +3767,7 @@ static void asm_setup_regsp(ASMState *as, GCtrace *T)
|
|
if (as->evenspill < 3) /* lj_str_new and lj_tab_newkey need 3 args. */
|
|
if (as->evenspill < 3) /* lj_str_new and lj_tab_newkey need 3 args. */
|
|
as->evenspill = 3;
|
|
as->evenspill = 3;
|
|
#endif
|
|
#endif
|
|
- case IR_TNEW: case IR_TDUP: case IR_CNEW: case IR_CNEWI: case IR_TOSTR:
|
|
|
|
|
|
+ case IR_TNEW: case IR_TDUP: case IR_CNEW: case IR_CNEWP: case IR_TOSTR:
|
|
ir->prev = REGSP_HINT(RID_RET);
|
|
ir->prev = REGSP_HINT(RID_RET);
|
|
if (inloop)
|
|
if (inloop)
|
|
as->modset = RSET_SCRATCH;
|
|
as->modset = RSET_SCRATCH;
|