lj_iropt.h 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. /*
  2. ** Common header for IR emitter and optimizations.
  3. ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
  4. */
  5. #ifndef _LJ_IROPT_H
  6. #define _LJ_IROPT_H
  7. #include <stdarg.h>
  8. #include "lj_obj.h"
  9. #include "lj_jit.h"
  10. #if LJ_HASJIT
  11. /* IR emitter. */
  12. LJ_FUNC void LJ_FASTCALL lj_ir_growtop(jit_State *J);
  13. LJ_FUNC TRef LJ_FASTCALL lj_ir_emit(jit_State *J);
  14. /* Save current IR in J->fold.ins, but do not emit it (yet). */
  15. static LJ_AINLINE void lj_ir_set_(jit_State *J, uint16_t ot, IRRef1 a, IRRef1 b)
  16. {
  17. J->fold.ins.ot = ot; J->fold.ins.op1 = a; J->fold.ins.op2 = b;
  18. }
  19. #define lj_ir_set(J, ot, a, b) \
  20. lj_ir_set_(J, (uint16_t)(ot), (IRRef1)(a), (IRRef1)(b))
  21. /* Get ref of next IR instruction and optionally grow IR.
  22. ** Note: this may invalidate all IRIns*!
  23. */
  24. static LJ_AINLINE IRRef lj_ir_nextins(jit_State *J)
  25. {
  26. IRRef ref = J->cur.nins;
  27. if (LJ_UNLIKELY(ref >= J->irtoplim)) lj_ir_growtop(J);
  28. J->cur.nins = ref + 1;
  29. return ref;
  30. }
  31. LJ_FUNC TRef lj_ir_ggfload(jit_State *J, IRType t, uintptr_t ofs);
  32. /* Interning of constants. */
  33. LJ_FUNC TRef LJ_FASTCALL lj_ir_kint(jit_State *J, int32_t k);
  34. LJ_FUNC TRef lj_ir_k64(jit_State *J, IROp op, uint64_t u64);
  35. LJ_FUNC TRef lj_ir_knum_u64(jit_State *J, uint64_t u64);
  36. LJ_FUNC TRef lj_ir_knumint(jit_State *J, lua_Number n);
  37. LJ_FUNC TRef lj_ir_kint64(jit_State *J, uint64_t u64);
  38. LJ_FUNC TRef lj_ir_kgc(jit_State *J, GCobj *o, IRType t);
  39. LJ_FUNC TRef lj_ir_kptr_(jit_State *J, IROp op, void *ptr);
  40. LJ_FUNC TRef lj_ir_knull(jit_State *J, IRType t);
  41. LJ_FUNC TRef lj_ir_kslot(jit_State *J, TRef key, IRRef slot);
  42. LJ_FUNC TRef lj_ir_ktrace(jit_State *J);
  43. #if LJ_64
  44. #define lj_ir_kintp(J, k) lj_ir_kint64(J, (uint64_t)(k))
  45. #else
  46. #define lj_ir_kintp(J, k) lj_ir_kint(J, (int32_t)(k))
  47. #endif
  48. #if LJ_GC64
  49. #define lj_ir_kintpgc lj_ir_kintp
  50. #else
  51. #define lj_ir_kintpgc lj_ir_kint
  52. #endif
  53. static LJ_AINLINE TRef lj_ir_knum(jit_State *J, lua_Number n)
  54. {
  55. TValue tv;
  56. tv.n = n;
  57. return lj_ir_knum_u64(J, tv.u64);
  58. }
  59. #define lj_ir_kstr(J, str) lj_ir_kgc(J, obj2gco((str)), IRT_STR)
  60. #define lj_ir_ktab(J, tab) lj_ir_kgc(J, obj2gco((tab)), IRT_TAB)
  61. #define lj_ir_kfunc(J, func) lj_ir_kgc(J, obj2gco((func)), IRT_FUNC)
  62. #define lj_ir_kptr(J, ptr) lj_ir_kptr_(J, IR_KPTR, (ptr))
  63. #define lj_ir_kkptr(J, ptr) lj_ir_kptr_(J, IR_KKPTR, (ptr))
  64. /* Special FP constants. */
  65. #define lj_ir_knum_zero(J) lj_ir_knum_u64(J, U64x(00000000,00000000))
  66. #define lj_ir_knum_one(J) lj_ir_knum_u64(J, U64x(3ff00000,00000000))
  67. #define lj_ir_knum_tobit(J) lj_ir_knum_u64(J, U64x(43380000,00000000))
  68. /* Special 128 bit SIMD constants. */
  69. #define lj_ir_ksimd(J, idx) \
  70. lj_ir_ggfload(J, IRT_NUM, (uintptr_t)LJ_KSIMD(J, idx) - (uintptr_t)J2GG(J))
  71. /* Access to constants. */
  72. LJ_FUNC void lj_ir_kvalue(lua_State *L, TValue *tv, const IRIns *ir);
  73. /* Convert IR operand types. */
  74. LJ_FUNC TRef LJ_FASTCALL lj_ir_tonumber(jit_State *J, TRef tr);
  75. LJ_FUNC TRef LJ_FASTCALL lj_ir_tonum(jit_State *J, TRef tr);
  76. LJ_FUNC TRef LJ_FASTCALL lj_ir_tostr(jit_State *J, TRef tr);
  77. /* Miscellaneous IR ops. */
  78. LJ_FUNC int lj_ir_numcmp(lua_Number a, lua_Number b, IROp op);
  79. LJ_FUNC int lj_ir_strcmp(GCstr *a, GCstr *b, IROp op);
  80. LJ_FUNC void lj_ir_rollback(jit_State *J, IRRef ref);
  81. /* Emit IR instructions with on-the-fly optimizations. */
  82. LJ_FUNC TRef LJ_FASTCALL lj_opt_fold(jit_State *J);
  83. LJ_FUNC TRef LJ_FASTCALL lj_opt_cse(jit_State *J);
  84. LJ_FUNC TRef LJ_FASTCALL lj_opt_cselim(jit_State *J, IRRef lim);
  85. /* Special return values for the fold functions. */
  86. enum {
  87. NEXTFOLD, /* Couldn't fold, pass on. */
  88. RETRYFOLD, /* Retry fold with modified fins. */
  89. KINTFOLD, /* Return ref for int constant in fins->i. */
  90. FAILFOLD, /* Guard would always fail. */
  91. DROPFOLD, /* Guard eliminated. */
  92. MAX_FOLD
  93. };
  94. #define INTFOLD(k) ((J->fold.ins.i = (k)), (TRef)KINTFOLD)
  95. #define INT64FOLD(k) (lj_ir_kint64(J, (k)))
  96. #define CONDFOLD(cond) ((TRef)FAILFOLD + (TRef)(cond))
  97. #define LEFTFOLD (J->fold.ins.op1)
  98. #define RIGHTFOLD (J->fold.ins.op2)
  99. #define CSEFOLD (lj_opt_cse(J))
  100. #define EMITFOLD (lj_ir_emit(J))
  101. /* Load/store forwarding. */
  102. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_aload(jit_State *J);
  103. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_hload(jit_State *J);
  104. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J);
  105. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
  106. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_xload(jit_State *J);
  107. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_alen(jit_State *J);
  108. LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_hrefk(jit_State *J);
  109. LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
  110. LJ_FUNC int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim);
  111. LJ_FUNC int LJ_FASTCALL lj_opt_fwd_sbuf(jit_State *J, IRRef lim);
  112. LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
  113. /* Dead-store elimination. */
  114. LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J);
  115. LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_ustore(jit_State *J);
  116. LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_fstore(jit_State *J);
  117. LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_xstore(jit_State *J);
  118. /* Narrowing. */
  119. LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_convert(jit_State *J);
  120. LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_index(jit_State *J, TRef key);
  121. LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_toint(jit_State *J, TRef tr);
  122. LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_tobit(jit_State *J, TRef tr);
  123. #if LJ_HASFFI
  124. LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_cindex(jit_State *J, TRef key);
  125. #endif
  126. LJ_FUNC TRef lj_opt_narrow_arith(jit_State *J, TRef rb, TRef rc,
  127. TValue *vb, TValue *vc, IROp op);
  128. LJ_FUNC TRef lj_opt_narrow_unm(jit_State *J, TRef rc, TValue *vc);
  129. LJ_FUNC TRef lj_opt_narrow_mod(jit_State *J, TRef rb, TRef rc, TValue *vb, TValue *vc);
  130. LJ_FUNC IRType lj_opt_narrow_forl(jit_State *J, cTValue *forbase);
  131. /* Optimization passes. */
  132. LJ_FUNC void lj_opt_dce(jit_State *J);
  133. LJ_FUNC int lj_opt_loop(jit_State *J);
  134. #if LJ_SOFTFP32 || (LJ_32 && LJ_HASFFI)
  135. LJ_FUNC void lj_opt_split(jit_State *J);
  136. #else
  137. #define lj_opt_split(J) UNUSED(J)
  138. #endif
  139. LJ_FUNC void lj_opt_sink(jit_State *J);
  140. #endif
  141. #endif