Просмотр исходного кода

Add explicit IR_GCSTEP instruction.

Mike Pall 13 лет назад
Родитель
Сommit
5d0115ef8d
6 измененных файлов с 27 добавлено и 7 удалено
  1. 18 3
      src/lj_asm.c
  2. 2 1
      src/lj_asm_arm.h
  3. 2 1
      src/lj_asm_mips.h
  4. 2 1
      src/lj_asm_ppc.h
  5. 2 1
      src/lj_asm_x86.h
  6. 1 0
      src/lj_ir.h

+ 18 - 3
src/lj_asm.c

@@ -71,7 +71,7 @@ typedef struct ASMState {
   IRRef loopref;	/* Reference of LOOP instruction (or 0). */
 
   BCReg topslot;	/* Number of slots for stack check (unless 0). */
-  MSize gcsteps;	/* Accumulated number of GC steps (per section). */
+  int32_t gcsteps;	/* Accumulated number of GC steps (per section). */
 
   GCtrace *T;		/* Trace to assemble. */
   GCtrace *parent;	/* Parent trace (or NULL). */
@@ -972,6 +972,22 @@ static void asm_tdup(ASMState *as, IRIns *ir)
   asm_gencall(as, ci, args);
 }
 
+static void asm_gc_check(ASMState *as);
+
+/* Explicit GC step. */
+static void asm_gcstep(ASMState *as, IRIns *ir)
+{
+  IRIns *ira;
+  for (ira = IR(as->stopins+1); ira < ir; ira++)
+    if ((ira->o == IR_TNEW || ira->o == IR_TDUP ||
+	 (LJ_HASFFI && (ira->o == IR_CNEW || ira->o == IR_CNEWI))) &&
+	ra_used(ira))
+      as->gcsteps++;
+  if (as->gcsteps)
+    asm_gc_check(as);
+  as->gcsteps = 0x80000000;  /* Prevent implicit GC check further up. */
+}
+
 /* -- PHI and loop handling ----------------------------------------------- */
 
 /* Break a PHI cycle by renaming to a free register (evict if needed). */
@@ -1191,7 +1207,6 @@ static void asm_phi(ASMState *as, IRIns *ir)
   }
 }
 
-static void asm_gc_check(ASMState *as);
 static void asm_loop_fixup(ASMState *as);
 
 /* Middle part of a loop. */
@@ -1757,7 +1772,7 @@ void lj_asm_trace(jit_State *J, GCtrace *T)
   /* Emit head of trace. */
   RA_DBG_REF();
   checkmclim(as);
-  if (as->gcsteps) {
+  if (as->gcsteps > 0) {
     as->curins = as->T->snap[0].ref;
     asm_snap_prep(as);  /* The GC check is a guard. */
     asm_gc_check(as);

+ 2 - 1
src/lj_asm_arm.h

@@ -1532,7 +1532,7 @@ static void asm_gc_check(ASMState *as)
   asm_gencall(as, ci, args);
   tmp1 = ra_releasetmp(as, ASMREF_TMP1);
   tmp2 = ra_releasetmp(as, ASMREF_TMP2);
-  emit_loadi(as, tmp2, (int32_t)as->gcsteps);
+  emit_loadi(as, tmp2, as->gcsteps);
   /* Jump around GC step if GC total < GC threshold. */
   emit_branch(as, ARMF_CC(ARMI_B, CC_LS), l_end);
   emit_nm(as, ARMI_CMP, RID_TMP, tmp2);
@@ -1646,6 +1646,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
   case IR_USE: ra_alloc1(as, ir->op1, RSET_GPR); break;
   case IR_PHI: asm_phi(as, ir); break;
   case IR_HIOP: asm_hiop(as, ir); break;
+  case IR_GCSTEP: asm_gcstep(as, ir); break;
 
   /* Guarded assertions. */
   case IR_EQ: case IR_NE:

+ 2 - 1
src/lj_asm_mips.h

@@ -1673,7 +1673,7 @@ static void asm_gc_check(ASMState *as)
   asm_gencall(as, ci, args);
   emit_tsi(as, MIPSI_ADDIU, ra_releasetmp(as, ASMREF_TMP1), RID_JGL, -32768);
   tmp = ra_releasetmp(as, ASMREF_TMP2);
-  emit_loadi(as, tmp, (int32_t)as->gcsteps);
+  emit_loadi(as, tmp, as->gcsteps);
   /* Jump around GC step if GC total < GC threshold. */
   emit_branch(as, MIPSI_BNE, RID_TMP, RID_ZERO, l_end);
   emit_dst(as, MIPSI_SLTU, RID_TMP, RID_TMP, tmp);
@@ -1770,6 +1770,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
     ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
   case IR_PHI: asm_phi(as, ir); break;
   case IR_HIOP: asm_hiop(as, ir); break;
+  case IR_GCSTEP: asm_gcstep(as, ir); break;
 
   /* Guarded assertions. */
   case IR_EQ: case IR_NE: asm_compeq(as, ir); break;

+ 2 - 1
src/lj_asm_ppc.h

@@ -1856,7 +1856,7 @@ static void asm_gc_check(ASMState *as)
   asm_gencall(as, ci, args);
   emit_tai(as, PPCI_ADDI, ra_releasetmp(as, ASMREF_TMP1), RID_JGL, -32768);
   tmp = ra_releasetmp(as, ASMREF_TMP2);
-  emit_loadi(as, tmp, (int32_t)as->gcsteps);
+  emit_loadi(as, tmp, as->gcsteps);
   /* Jump around GC step if GC total < GC threshold. */
   emit_condbranch(as, PPCI_BC|PPCF_Y, CC_LT, l_end);
   emit_ab(as, PPCI_CMPLW, RID_TMP, tmp);
@@ -1966,6 +1966,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
     ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
   case IR_PHI: asm_phi(as, ir); break;
   case IR_HIOP: asm_hiop(as, ir); break;
+  case IR_GCSTEP: asm_gcstep(as, ir); break;
 
   /* Guarded assertions. */
   case IR_EQ: case IR_NE:

+ 2 - 1
src/lj_asm_x86.h

@@ -2379,7 +2379,7 @@ static void asm_gc_check(ASMState *as)
   asm_gencall(as, ci, args);
   tmp = ra_releasetmp(as, ASMREF_TMP1);
   emit_loada(as, tmp, J2G(as->J));
-  emit_loadi(as, ra_releasetmp(as, ASMREF_TMP2), (int32_t)as->gcsteps);
+  emit_loadi(as, ra_releasetmp(as, ASMREF_TMP2), as->gcsteps);
   /* Jump around GC step if GC total < GC threshold. */
   emit_sjcc(as, CC_B, l_end);
   emit_opgl(as, XO_ARITH(XOg_CMP), tmp, gc.threshold);
@@ -2556,6 +2556,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
     ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
   case IR_PHI: asm_phi(as, ir); break;
   case IR_HIOP: asm_hiop(as, ir); break;
+  case IR_GCSTEP: asm_gcstep(as, ir); break;
 
   /* Guarded assertions. */
   case IR_LT: case IR_GE: case IR_LE: case IR_GT:

+ 1 - 0
src/lj_ir.h

@@ -33,6 +33,7 @@
   /* Miscellaneous ops. */ \
   _(NOP,	N , ___, ___) \
   _(BASE,	N , lit, lit) \
+  _(GCSTEP,	S , ___, ___) \
   _(HIOP,	S , ref, ref) \
   _(LOOP,	S , ___, ___) \
   _(USE,	S , ref, ___) \