lj_crecord.c 63 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972
  1. /*
  2. ** Trace recorder for C data operations.
  3. ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
  4. */
  5. #define lj_ffrecord_c
  6. #define LUA_CORE
  7. #include "lj_obj.h"
  8. #if LJ_HASJIT && LJ_HASFFI
  9. #include "lj_err.h"
  10. #include "lj_tab.h"
  11. #include "lj_frame.h"
  12. #include "lj_ctype.h"
  13. #include "lj_cdata.h"
  14. #include "lj_cparse.h"
  15. #include "lj_cconv.h"
  16. #include "lj_carith.h"
  17. #include "lj_clib.h"
  18. #include "lj_ccall.h"
  19. #include "lj_ff.h"
  20. #include "lj_ir.h"
  21. #include "lj_jit.h"
  22. #include "lj_ircall.h"
  23. #include "lj_iropt.h"
  24. #include "lj_trace.h"
  25. #include "lj_record.h"
  26. #include "lj_ffrecord.h"
  27. #include "lj_snap.h"
  28. #include "lj_crecord.h"
  29. #include "lj_dispatch.h"
  30. #include "lj_strfmt.h"
  31. /* Some local macros to save typing. Undef'd at the end. */
  32. #define IR(ref) (&J->cur.ir[(ref)])
  33. /* Pass IR on to next optimization in chain (FOLD). */
  34. #define emitir(ot, a, b) (lj_ir_set(J, (ot), (a), (b)), lj_opt_fold(J))
  35. #define emitconv(a, dt, st, flags) \
  36. emitir(IRT(IR_CONV, (dt)), (a), (st)|((dt) << 5)|(flags))
  37. /* -- C type checks ------------------------------------------------------- */
  38. static GCcdata *argv2cdata(jit_State *J, TRef tr, cTValue *o)
  39. {
  40. GCcdata *cd;
  41. TRef trtypeid;
  42. if (!tref_iscdata(tr))
  43. lj_trace_err(J, LJ_TRERR_BADTYPE);
  44. cd = cdataV(o);
  45. /* Specialize to the CTypeID. */
  46. trtypeid = emitir(IRT(IR_FLOAD, IRT_U16), tr, IRFL_CDATA_CTYPEID);
  47. emitir(IRTG(IR_EQ, IRT_INT), trtypeid, lj_ir_kint(J, (int32_t)cd->ctypeid));
  48. return cd;
  49. }
  50. /* Specialize to the CTypeID held by a cdata constructor. */
  51. static CTypeID crec_constructor(jit_State *J, GCcdata *cd, TRef tr)
  52. {
  53. CTypeID id;
  54. lj_assertJ(tref_iscdata(tr) && cd->ctypeid == CTID_CTYPEID,
  55. "expected CTypeID cdata");
  56. id = *(CTypeID *)cdataptr(cd);
  57. tr = emitir(IRT(IR_FLOAD, IRT_INT), tr, IRFL_CDATA_INT);
  58. emitir(IRTG(IR_EQ, IRT_INT), tr, lj_ir_kint(J, (int32_t)id));
  59. return id;
  60. }
  61. static CTypeID argv2ctype(jit_State *J, TRef tr, cTValue *o)
  62. {
  63. if (tref_isstr(tr)) {
  64. GCstr *s = strV(o);
  65. CPState cp;
  66. CTypeID oldtop;
  67. /* Specialize to the string containing the C type declaration. */
  68. emitir(IRTG(IR_EQ, IRT_STR), tr, lj_ir_kstr(J, s));
  69. cp.L = J->L;
  70. cp.cts = ctype_cts(J->L);
  71. oldtop = cp.cts->top;
  72. cp.srcname = strdata(s);
  73. cp.p = strdata(s);
  74. cp.param = NULL;
  75. cp.mode = CPARSE_MODE_ABSTRACT|CPARSE_MODE_NOIMPLICIT;
  76. if (lj_cparse(&cp) || cp.cts->top > oldtop) /* Avoid new struct defs. */
  77. lj_trace_err(J, LJ_TRERR_BADTYPE);
  78. return cp.val.id;
  79. } else {
  80. GCcdata *cd = argv2cdata(J, tr, o);
  81. return cd->ctypeid == CTID_CTYPEID ? crec_constructor(J, cd, tr) :
  82. cd->ctypeid;
  83. }
  84. }
  85. /* Convert CType to IRType (if possible). */
  86. static IRType crec_ct2irt(CTState *cts, CType *ct)
  87. {
  88. if (ctype_isenum(ct->info)) ct = ctype_child(cts, ct);
  89. if (LJ_LIKELY(ctype_isnum(ct->info))) {
  90. if ((ct->info & CTF_FP)) {
  91. if (ct->size == sizeof(double))
  92. return IRT_NUM;
  93. else if (ct->size == sizeof(float))
  94. return IRT_FLOAT;
  95. } else {
  96. uint32_t b = lj_fls(ct->size);
  97. if (b <= 3)
  98. return IRT_I8 + 2*b + ((ct->info & CTF_UNSIGNED) ? 1 : 0);
  99. }
  100. } else if (ctype_isptr(ct->info)) {
  101. return (LJ_64 && ct->size == 8) ? IRT_P64 : IRT_P32;
  102. } else if (ctype_iscomplex(ct->info)) {
  103. if (ct->size == 2*sizeof(double))
  104. return IRT_NUM;
  105. else if (ct->size == 2*sizeof(float))
  106. return IRT_FLOAT;
  107. }
  108. return IRT_CDATA;
  109. }
  110. /* -- Optimized memory fill and copy -------------------------------------- */
  111. /* Maximum length and unroll of inlined copy/fill. */
  112. #define CREC_COPY_MAXUNROLL 16
  113. #define CREC_COPY_MAXLEN 128
  114. #define CREC_FILL_MAXUNROLL 16
  115. /* Number of windowed registers used for optimized memory copy. */
  116. #if LJ_TARGET_X86
  117. #define CREC_COPY_REGWIN 2
  118. #elif LJ_TARGET_PPC || LJ_TARGET_MIPS
  119. #define CREC_COPY_REGWIN 8
  120. #else
  121. #define CREC_COPY_REGWIN 4
  122. #endif
  123. /* List of memory offsets for copy/fill. */
  124. typedef struct CRecMemList {
  125. CTSize ofs; /* Offset in bytes. */
  126. IRType tp; /* Type of load/store. */
  127. TRef trofs; /* TRef of interned offset. */
  128. TRef trval; /* TRef of load value. */
  129. } CRecMemList;
  130. /* Generate copy list for element-wise struct copy. */
  131. static MSize crec_copy_struct(CRecMemList *ml, CTState *cts, CType *ct)
  132. {
  133. CTypeID fid = ct->sib;
  134. MSize mlp = 0;
  135. while (fid) {
  136. CType *df = ctype_get(cts, fid);
  137. fid = df->sib;
  138. if (ctype_isfield(df->info)) {
  139. CType *cct;
  140. IRType tp;
  141. if (!gcref(df->name)) continue; /* Ignore unnamed fields. */
  142. cct = ctype_rawchild(cts, df); /* Field type. */
  143. tp = crec_ct2irt(cts, cct);
  144. if (tp == IRT_CDATA) return 0; /* NYI: aggregates. */
  145. if (mlp >= CREC_COPY_MAXUNROLL) return 0;
  146. ml[mlp].ofs = df->size;
  147. ml[mlp].tp = tp;
  148. mlp++;
  149. if (ctype_iscomplex(cct->info)) {
  150. if (mlp >= CREC_COPY_MAXUNROLL) return 0;
  151. ml[mlp].ofs = df->size + (cct->size >> 1);
  152. ml[mlp].tp = tp;
  153. mlp++;
  154. }
  155. } else if (!ctype_isconstval(df->info)) {
  156. /* NYI: bitfields and sub-structures. */
  157. return 0;
  158. }
  159. }
  160. return mlp;
  161. }
  162. /* Generate unrolled copy list, from highest to lowest step size/alignment. */
  163. static MSize crec_copy_unroll(CRecMemList *ml, CTSize len, CTSize step,
  164. IRType tp)
  165. {
  166. CTSize ofs = 0;
  167. MSize mlp = 0;
  168. if (tp == IRT_CDATA) tp = IRT_U8 + 2*lj_fls(step);
  169. do {
  170. while (ofs + step <= len) {
  171. if (mlp >= CREC_COPY_MAXUNROLL) return 0;
  172. ml[mlp].ofs = ofs;
  173. ml[mlp].tp = tp;
  174. mlp++;
  175. ofs += step;
  176. }
  177. step >>= 1;
  178. tp -= 2;
  179. } while (ofs < len);
  180. return mlp;
  181. }
  182. /*
  183. ** Emit copy list with windowed loads/stores.
  184. ** LJ_TARGET_UNALIGNED: may emit unaligned loads/stores (not marked as such).
  185. */
  186. static void crec_copy_emit(jit_State *J, CRecMemList *ml, MSize mlp,
  187. TRef trdst, TRef trsrc)
  188. {
  189. MSize i, j, rwin = 0;
  190. for (i = 0, j = 0; i < mlp; ) {
  191. TRef trofs = lj_ir_kintp(J, ml[i].ofs);
  192. TRef trsptr = emitir(IRT(IR_ADD, IRT_PTR), trsrc, trofs);
  193. ml[i].trval = emitir(IRT(IR_XLOAD, ml[i].tp), trsptr, 0);
  194. ml[i].trofs = trofs;
  195. i++;
  196. rwin += (LJ_SOFTFP32 && ml[i].tp == IRT_NUM) ? 2 : 1;
  197. if (rwin >= CREC_COPY_REGWIN || i >= mlp) { /* Flush buffered stores. */
  198. rwin = 0;
  199. for ( ; j < i; j++) {
  200. TRef trdptr = emitir(IRT(IR_ADD, IRT_PTR), trdst, ml[j].trofs);
  201. emitir(IRT(IR_XSTORE, ml[j].tp), trdptr, ml[j].trval);
  202. }
  203. }
  204. }
  205. }
  206. /* Optimized memory copy. */
  207. static void crec_copy(jit_State *J, TRef trdst, TRef trsrc, TRef trlen,
  208. CType *ct)
  209. {
  210. if (tref_isk(trlen)) { /* Length must be constant. */
  211. CRecMemList ml[CREC_COPY_MAXUNROLL];
  212. MSize mlp = 0;
  213. CTSize step = 1, len = (CTSize)IR(tref_ref(trlen))->i;
  214. IRType tp = IRT_CDATA;
  215. int needxbar = 0;
  216. if (len == 0) return; /* Shortcut. */
  217. if (len > CREC_COPY_MAXLEN) goto fallback;
  218. if (ct) {
  219. CTState *cts = ctype_ctsG(J2G(J));
  220. lj_assertJ(ctype_isarray(ct->info) || ctype_isstruct(ct->info),
  221. "copy of non-aggregate");
  222. if (ctype_isarray(ct->info)) {
  223. CType *cct = ctype_rawchild(cts, ct);
  224. tp = crec_ct2irt(cts, cct);
  225. if (tp == IRT_CDATA) goto rawcopy;
  226. step = lj_ir_type_size[tp];
  227. lj_assertJ((len & (step-1)) == 0, "copy of fractional size");
  228. } else if ((ct->info & CTF_UNION)) {
  229. step = (1u << ctype_align(ct->info));
  230. goto rawcopy;
  231. } else {
  232. mlp = crec_copy_struct(ml, cts, ct);
  233. goto emitcopy;
  234. }
  235. } else {
  236. rawcopy:
  237. needxbar = 1;
  238. if (LJ_TARGET_UNALIGNED || step >= CTSIZE_PTR)
  239. step = CTSIZE_PTR;
  240. }
  241. mlp = crec_copy_unroll(ml, len, step, tp);
  242. emitcopy:
  243. if (mlp) {
  244. crec_copy_emit(J, ml, mlp, trdst, trsrc);
  245. if (needxbar)
  246. emitir(IRT(IR_XBAR, IRT_NIL), 0, 0);
  247. return;
  248. }
  249. }
  250. fallback:
  251. /* Call memcpy. Always needs a barrier to disable alias analysis. */
  252. lj_ir_call(J, IRCALL_memcpy, trdst, trsrc, trlen);
  253. emitir(IRT(IR_XBAR, IRT_NIL), 0, 0);
  254. }
  255. /* Generate unrolled fill list, from highest to lowest step size/alignment. */
  256. static MSize crec_fill_unroll(CRecMemList *ml, CTSize len, CTSize step)
  257. {
  258. CTSize ofs = 0;
  259. MSize mlp = 0;
  260. IRType tp = IRT_U8 + 2*lj_fls(step);
  261. do {
  262. while (ofs + step <= len) {
  263. if (mlp >= CREC_COPY_MAXUNROLL) return 0;
  264. ml[mlp].ofs = ofs;
  265. ml[mlp].tp = tp;
  266. mlp++;
  267. ofs += step;
  268. }
  269. step >>= 1;
  270. tp -= 2;
  271. } while (ofs < len);
  272. return mlp;
  273. }
  274. /*
  275. ** Emit stores for fill list.
  276. ** LJ_TARGET_UNALIGNED: may emit unaligned stores (not marked as such).
  277. */
  278. static void crec_fill_emit(jit_State *J, CRecMemList *ml, MSize mlp,
  279. TRef trdst, TRef trfill)
  280. {
  281. MSize i;
  282. for (i = 0; i < mlp; i++) {
  283. TRef trofs = lj_ir_kintp(J, ml[i].ofs);
  284. TRef trdptr = emitir(IRT(IR_ADD, IRT_PTR), trdst, trofs);
  285. emitir(IRT(IR_XSTORE, ml[i].tp), trdptr, trfill);
  286. }
  287. }
  288. /* Optimized memory fill. */
  289. static void crec_fill(jit_State *J, TRef trdst, TRef trlen, TRef trfill,
  290. CTSize step)
  291. {
  292. if (tref_isk(trlen)) { /* Length must be constant. */
  293. CRecMemList ml[CREC_FILL_MAXUNROLL];
  294. MSize mlp;
  295. CTSize len = (CTSize)IR(tref_ref(trlen))->i;
  296. if (len == 0) return; /* Shortcut. */
  297. if (LJ_TARGET_UNALIGNED || step >= CTSIZE_PTR)
  298. step = CTSIZE_PTR;
  299. if (step * CREC_FILL_MAXUNROLL < len) goto fallback;
  300. mlp = crec_fill_unroll(ml, len, step);
  301. if (!mlp) goto fallback;
  302. if (tref_isk(trfill) || ml[0].tp != IRT_U8)
  303. trfill = emitconv(trfill, IRT_INT, IRT_U8, 0);
  304. if (ml[0].tp != IRT_U8) { /* Scatter U8 to U16/U32/U64. */
  305. if (CTSIZE_PTR == 8 && ml[0].tp == IRT_U64) {
  306. if (tref_isk(trfill)) /* Pointless on x64 with zero-extended regs. */
  307. trfill = emitconv(trfill, IRT_U64, IRT_U32, 0);
  308. trfill = emitir(IRT(IR_MUL, IRT_U64), trfill,
  309. lj_ir_kint64(J, U64x(01010101,01010101)));
  310. } else {
  311. trfill = emitir(IRTI(IR_MUL), trfill,
  312. lj_ir_kint(J, ml[0].tp == IRT_U16 ? 0x0101 : 0x01010101));
  313. }
  314. }
  315. crec_fill_emit(J, ml, mlp, trdst, trfill);
  316. } else {
  317. fallback:
  318. /* Call memset. Always needs a barrier to disable alias analysis. */
  319. lj_ir_call(J, IRCALL_memset, trdst, trfill, trlen); /* Note: arg order! */
  320. }
  321. emitir(IRT(IR_XBAR, IRT_NIL), 0, 0);
  322. }
  323. /* -- Convert C type to C type -------------------------------------------- */
  324. /*
  325. ** This code mirrors the code in lj_cconv.c. It performs the same steps
  326. ** for the trace recorder that lj_cconv.c does for the interpreter.
  327. **
  328. ** One major difference is that we can get away with much fewer checks
  329. ** here. E.g. checks for casts, constness or correct types can often be
  330. ** omitted, even if they might fail. The interpreter subsequently throws
  331. ** an error, which aborts the trace.
  332. **
  333. ** All operations are specialized to their C types, so the on-trace
  334. ** outcome must be the same as the outcome in the interpreter. If the
  335. ** interpreter doesn't throw an error, then the trace is correct, too.
  336. ** Care must be taken not to generate invalid (temporary) IR or to
  337. ** trigger asserts.
  338. */
  339. /* Determine whether a passed number or cdata number is non-zero. */
  340. static int crec_isnonzero(CType *s, void *p)
  341. {
  342. if (p == (void *)0)
  343. return 0;
  344. if (p == (void *)1)
  345. return 1;
  346. if ((s->info & CTF_FP)) {
  347. if (s->size == sizeof(float))
  348. return (*(float *)p != 0);
  349. else
  350. return (*(double *)p != 0);
  351. } else {
  352. if (s->size == 1)
  353. return (*(uint8_t *)p != 0);
  354. else if (s->size == 2)
  355. return (*(uint16_t *)p != 0);
  356. else if (s->size == 4)
  357. return (*(uint32_t *)p != 0);
  358. else
  359. return (*(uint64_t *)p != 0);
  360. }
  361. }
  362. static TRef crec_ct_ct(jit_State *J, CType *d, CType *s, TRef dp, TRef sp,
  363. void *svisnz)
  364. {
  365. IRType dt = crec_ct2irt(ctype_ctsG(J2G(J)), d);
  366. IRType st = crec_ct2irt(ctype_ctsG(J2G(J)), s);
  367. CTSize dsize = d->size, ssize = s->size;
  368. CTInfo dinfo = d->info, sinfo = s->info;
  369. if (ctype_type(dinfo) > CT_MAYCONVERT || ctype_type(sinfo) > CT_MAYCONVERT)
  370. goto err_conv;
  371. /*
  372. ** Note: Unlike lj_cconv_ct_ct(), sp holds the _value_ of pointers and
  373. ** numbers up to 8 bytes. Otherwise sp holds a pointer.
  374. */
  375. switch (cconv_idx2(dinfo, sinfo)) {
  376. /* Destination is a bool. */
  377. case CCX(B, B):
  378. goto xstore; /* Source operand is already normalized. */
  379. case CCX(B, I):
  380. case CCX(B, F):
  381. if (st != IRT_CDATA) {
  382. /* Specialize to the result of a comparison against 0. */
  383. TRef zero = (st == IRT_NUM || st == IRT_FLOAT) ? lj_ir_knum(J, 0) :
  384. (st == IRT_I64 || st == IRT_U64) ? lj_ir_kint64(J, 0) :
  385. lj_ir_kint(J, 0);
  386. int isnz = crec_isnonzero(s, svisnz);
  387. emitir(IRTG(isnz ? IR_NE : IR_EQ, st), sp, zero);
  388. sp = lj_ir_kint(J, isnz);
  389. goto xstore;
  390. }
  391. goto err_nyi;
  392. /* Destination is an integer. */
  393. case CCX(I, B):
  394. case CCX(I, I):
  395. conv_I_I:
  396. if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi;
  397. /* Extend 32 to 64 bit integer. */
  398. if (dsize == 8 && ssize < 8 && !(LJ_64 && (sinfo & CTF_UNSIGNED)))
  399. sp = emitconv(sp, dt, ssize < 4 ? IRT_INT : st,
  400. (sinfo & CTF_UNSIGNED) ? 0 : IRCONV_SEXT);
  401. else if (dsize < 8 && ssize == 8) /* Truncate from 64 bit integer. */
  402. sp = emitconv(sp, dsize < 4 ? IRT_INT : dt, st, 0);
  403. else if (st == IRT_INT)
  404. sp = lj_opt_narrow_toint(J, sp);
  405. xstore:
  406. if (dt == IRT_I64 || dt == IRT_U64) lj_needsplit(J);
  407. if (dp == 0) return sp;
  408. emitir(IRT(IR_XSTORE, dt), dp, sp);
  409. break;
  410. case CCX(I, C):
  411. sp = emitir(IRT(IR_XLOAD, st), sp, 0); /* Load re. */
  412. /* fallthrough */
  413. case CCX(I, F):
  414. if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi;
  415. sp = emitconv(sp, dsize < 4 ? IRT_INT : dt, st, IRCONV_ANY);
  416. goto xstore;
  417. case CCX(I, P):
  418. case CCX(I, A):
  419. sinfo = CTINFO(CT_NUM, CTF_UNSIGNED);
  420. ssize = CTSIZE_PTR;
  421. st = IRT_UINTP;
  422. if (((dsize ^ ssize) & 8) == 0) { /* Must insert no-op type conversion. */
  423. sp = emitconv(sp, dsize < 4 ? IRT_INT : dt, IRT_PTR, 0);
  424. goto xstore;
  425. }
  426. goto conv_I_I;
  427. /* Destination is a floating-point number. */
  428. case CCX(F, B):
  429. case CCX(F, I):
  430. conv_F_I:
  431. if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi;
  432. sp = emitconv(sp, dt, ssize < 4 ? IRT_INT : st, 0);
  433. goto xstore;
  434. case CCX(F, C):
  435. sp = emitir(IRT(IR_XLOAD, st), sp, 0); /* Load re. */
  436. /* fallthrough */
  437. case CCX(F, F):
  438. conv_F_F:
  439. if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi;
  440. if (dt != st) sp = emitconv(sp, dt, st, 0);
  441. goto xstore;
  442. /* Destination is a complex number. */
  443. case CCX(C, I):
  444. case CCX(C, F):
  445. { /* Clear im. */
  446. TRef ptr = emitir(IRT(IR_ADD, IRT_PTR), dp, lj_ir_kintp(J, (dsize >> 1)));
  447. emitir(IRT(IR_XSTORE, dt), ptr, lj_ir_knum(J, 0));
  448. }
  449. /* Convert to re. */
  450. if ((sinfo & CTF_FP)) goto conv_F_F; else goto conv_F_I;
  451. case CCX(C, C):
  452. if (dt == IRT_CDATA || st == IRT_CDATA) goto err_nyi;
  453. {
  454. TRef re, im, ptr;
  455. re = emitir(IRT(IR_XLOAD, st), sp, 0);
  456. ptr = emitir(IRT(IR_ADD, IRT_PTR), sp, lj_ir_kintp(J, (ssize >> 1)));
  457. im = emitir(IRT(IR_XLOAD, st), ptr, 0);
  458. if (dt != st) {
  459. re = emitconv(re, dt, st, 0);
  460. im = emitconv(im, dt, st, 0);
  461. }
  462. emitir(IRT(IR_XSTORE, dt), dp, re);
  463. ptr = emitir(IRT(IR_ADD, IRT_PTR), dp, lj_ir_kintp(J, (dsize >> 1)));
  464. emitir(IRT(IR_XSTORE, dt), ptr, im);
  465. }
  466. break;
  467. /* Destination is a vector. */
  468. case CCX(V, I):
  469. case CCX(V, F):
  470. case CCX(V, C):
  471. case CCX(V, V):
  472. goto err_nyi;
  473. /* Destination is a pointer. */
  474. case CCX(P, P):
  475. case CCX(P, A):
  476. case CCX(P, S):
  477. /* There are only 32 bit pointers/addresses on 32 bit machines.
  478. ** Also ok on x64, since all 32 bit ops clear the upper part of the reg.
  479. */
  480. goto xstore;
  481. case CCX(P, I):
  482. if (st == IRT_CDATA) goto err_nyi;
  483. if (!LJ_64 && ssize == 8) /* Truncate from 64 bit integer. */
  484. sp = emitconv(sp, IRT_U32, st, 0);
  485. goto xstore;
  486. case CCX(P, F):
  487. if (st == IRT_CDATA) goto err_nyi;
  488. /* The signed conversion is cheaper. x64 really has 47 bit pointers. */
  489. sp = emitconv(sp, (LJ_64 && dsize == 8) ? IRT_I64 : IRT_U32,
  490. st, IRCONV_ANY);
  491. goto xstore;
  492. /* Destination is an array. */
  493. case CCX(A, A):
  494. /* Destination is a struct/union. */
  495. case CCX(S, S):
  496. if (dp == 0) goto err_conv;
  497. crec_copy(J, dp, sp, lj_ir_kint(J, dsize), d);
  498. break;
  499. default:
  500. err_conv:
  501. err_nyi:
  502. lj_trace_err(J, LJ_TRERR_NYICONV);
  503. break;
  504. }
  505. return 0;
  506. }
  507. /* -- Convert C type to TValue (load) ------------------------------------- */
  508. static TRef crec_tv_ct(jit_State *J, CType *s, CTypeID sid, TRef sp)
  509. {
  510. CTState *cts = ctype_ctsG(J2G(J));
  511. IRType t = crec_ct2irt(cts, s);
  512. CTInfo sinfo = s->info;
  513. if (ctype_isnum(sinfo)) {
  514. TRef tr;
  515. if (t == IRT_CDATA)
  516. goto err_nyi; /* NYI: copyval of >64 bit integers. */
  517. tr = emitir(IRT(IR_XLOAD, t), sp, 0);
  518. if (t == IRT_FLOAT || t == IRT_U32) { /* Keep uint32_t/float as numbers. */
  519. return emitconv(tr, IRT_NUM, t, 0);
  520. } else if (t == IRT_I64 || t == IRT_U64) { /* Box 64 bit integer. */
  521. sp = tr;
  522. lj_needsplit(J);
  523. } else if ((sinfo & CTF_BOOL)) {
  524. /* Assume not equal to zero. Fixup and emit pending guard later. */
  525. lj_ir_set(J, IRTGI(IR_NE), tr, lj_ir_kint(J, 0));
  526. J->postproc = LJ_POST_FIXGUARD;
  527. return TREF_TRUE;
  528. } else {
  529. return tr;
  530. }
  531. } else if (ctype_isptr(sinfo) || ctype_isenum(sinfo)) {
  532. sp = emitir(IRT(IR_XLOAD, t), sp, 0); /* Box pointers and enums. */
  533. } else if (ctype_isrefarray(sinfo) || ctype_isstruct(sinfo)) {
  534. cts->L = J->L;
  535. sid = lj_ctype_intern(cts, CTINFO_REF(sid), CTSIZE_PTR); /* Create ref. */
  536. } else if (ctype_iscomplex(sinfo)) { /* Unbox/box complex. */
  537. ptrdiff_t esz = (ptrdiff_t)(s->size >> 1);
  538. TRef ptr, tr1, tr2, dp;
  539. dp = emitir(IRTG(IR_CNEW, IRT_CDATA), lj_ir_kint(J, sid), TREF_NIL);
  540. tr1 = emitir(IRT(IR_XLOAD, t), sp, 0);
  541. ptr = emitir(IRT(IR_ADD, IRT_PTR), sp, lj_ir_kintp(J, esz));
  542. tr2 = emitir(IRT(IR_XLOAD, t), ptr, 0);
  543. ptr = emitir(IRT(IR_ADD, IRT_PTR), dp, lj_ir_kintp(J, sizeof(GCcdata)));
  544. emitir(IRT(IR_XSTORE, t), ptr, tr1);
  545. ptr = emitir(IRT(IR_ADD, IRT_PTR), dp, lj_ir_kintp(J, sizeof(GCcdata)+esz));
  546. emitir(IRT(IR_XSTORE, t), ptr, tr2);
  547. return dp;
  548. } else {
  549. /* NYI: copyval of vectors. */
  550. err_nyi:
  551. lj_trace_err(J, LJ_TRERR_NYICONV);
  552. }
  553. /* Box pointer, ref, enum or 64 bit integer. */
  554. return emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, sid), sp);
  555. }
  556. /* -- Convert TValue to C type (store) ------------------------------------ */
  557. static TRef crec_ct_tv(jit_State *J, CType *d, TRef dp, TRef sp, cTValue *sval)
  558. {
  559. CTState *cts = ctype_ctsG(J2G(J));
  560. CTypeID sid = CTID_P_VOID;
  561. void *svisnz = 0;
  562. CType *s;
  563. if (LJ_LIKELY(tref_isinteger(sp))) {
  564. sid = CTID_INT32;
  565. svisnz = (void *)(intptr_t)(tvisint(sval)?(intV(sval)!=0):!tviszero(sval));
  566. } else if (tref_isnum(sp)) {
  567. sid = CTID_DOUBLE;
  568. svisnz = (void *)(intptr_t)(tvisint(sval)?(intV(sval)!=0):!tviszero(sval));
  569. } else if (tref_isbool(sp)) {
  570. sp = lj_ir_kint(J, tref_istrue(sp) ? 1 : 0);
  571. sid = CTID_BOOL;
  572. } else if (tref_isnil(sp)) {
  573. sp = lj_ir_kptr(J, NULL);
  574. } else if (tref_isudata(sp)) {
  575. GCudata *ud = udataV(sval);
  576. if (ud->udtype == UDTYPE_IO_FILE || ud->udtype == UDTYPE_BUFFER) {
  577. TRef tr = emitir(IRT(IR_FLOAD, IRT_U8), sp, IRFL_UDATA_UDTYPE);
  578. emitir(IRTGI(IR_EQ), tr, lj_ir_kint(J, ud->udtype));
  579. sp = emitir(IRT(IR_FLOAD, IRT_PTR), sp,
  580. ud->udtype == UDTYPE_IO_FILE ? IRFL_UDATA_FILE :
  581. IRFL_SBUF_R);
  582. } else {
  583. sp = emitir(IRT(IR_ADD, IRT_PTR), sp, lj_ir_kintp(J, sizeof(GCudata)));
  584. }
  585. } else if (tref_isstr(sp)) {
  586. if (ctype_isenum(d->info)) { /* Match string against enum constant. */
  587. GCstr *str = strV(sval);
  588. CTSize ofs;
  589. CType *cct = lj_ctype_getfield(cts, d, str, &ofs);
  590. /* Specialize to the name of the enum constant. */
  591. emitir(IRTG(IR_EQ, IRT_STR), sp, lj_ir_kstr(J, str));
  592. if (cct && ctype_isconstval(cct->info)) {
  593. lj_assertJ(ctype_child(cts, cct)->size == 4,
  594. "only 32 bit const supported"); /* NYI */
  595. svisnz = (void *)(intptr_t)(ofs != 0);
  596. sp = lj_ir_kint(J, (int32_t)ofs);
  597. sid = ctype_cid(cct->info);
  598. } /* else: interpreter will throw. */
  599. } else if (ctype_isrefarray(d->info)) { /* Copy string to array. */
  600. lj_trace_err(J, LJ_TRERR_BADTYPE); /* NYI */
  601. } else { /* Otherwise pass the string data as a const char[]. */
  602. /* Don't use STRREF. It folds with SNEW, which loses the trailing NUL. */
  603. sp = emitir(IRT(IR_ADD, IRT_PTR), sp, lj_ir_kintp(J, sizeof(GCstr)));
  604. sid = CTID_A_CCHAR;
  605. }
  606. } else if (tref_islightud(sp)) {
  607. #if LJ_64
  608. lj_trace_err(J, LJ_TRERR_NYICONV);
  609. #endif
  610. } else { /* NYI: tref_istab(sp). */
  611. IRType t;
  612. sid = argv2cdata(J, sp, sval)->ctypeid;
  613. s = ctype_raw(cts, sid);
  614. svisnz = cdataptr(cdataV(sval));
  615. if (ctype_isfunc(s->info)) {
  616. sid = lj_ctype_intern(cts, CTINFO(CT_PTR, CTALIGN_PTR|sid), CTSIZE_PTR);
  617. s = ctype_get(cts, sid);
  618. t = IRT_PTR;
  619. } else {
  620. t = crec_ct2irt(cts, s);
  621. }
  622. if (ctype_isptr(s->info)) {
  623. sp = emitir(IRT(IR_FLOAD, t), sp, IRFL_CDATA_PTR);
  624. if (ctype_isref(s->info)) {
  625. svisnz = *(void **)svisnz;
  626. s = ctype_rawchild(cts, s);
  627. if (ctype_isenum(s->info)) s = ctype_child(cts, s);
  628. t = crec_ct2irt(cts, s);
  629. } else {
  630. goto doconv;
  631. }
  632. } else if (t == IRT_I64 || t == IRT_U64) {
  633. sp = emitir(IRT(IR_FLOAD, t), sp, IRFL_CDATA_INT64);
  634. lj_needsplit(J);
  635. goto doconv;
  636. } else if (t == IRT_INT || t == IRT_U32) {
  637. if (ctype_isenum(s->info)) s = ctype_child(cts, s);
  638. sp = emitir(IRT(IR_FLOAD, t), sp, IRFL_CDATA_INT);
  639. goto doconv;
  640. } else {
  641. sp = emitir(IRT(IR_ADD, IRT_PTR), sp, lj_ir_kintp(J, sizeof(GCcdata)));
  642. }
  643. if (ctype_isnum(s->info) && t != IRT_CDATA)
  644. sp = emitir(IRT(IR_XLOAD, t), sp, 0); /* Load number value. */
  645. goto doconv;
  646. }
  647. s = ctype_get(cts, sid);
  648. doconv:
  649. if (ctype_isenum(d->info)) d = ctype_child(cts, d);
  650. return crec_ct_ct(J, d, s, dp, sp, svisnz);
  651. }
  652. /* -- C data metamethods -------------------------------------------------- */
  653. /* This would be rather difficult in FOLD, so do it here:
  654. ** (base+k)+(idx*sz)+ofs ==> (base+idx*sz)+(ofs+k)
  655. ** (base+(idx+k)*sz)+ofs ==> (base+idx*sz)+(ofs+k*sz)
  656. */
  657. static TRef crec_reassoc_ofs(jit_State *J, TRef tr, ptrdiff_t *ofsp, MSize sz)
  658. {
  659. IRIns *ir = IR(tref_ref(tr));
  660. if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && irref_isk(ir->op2) &&
  661. (ir->o == IR_ADD || ir->o == IR_ADDOV || ir->o == IR_SUBOV)) {
  662. IRIns *irk = IR(ir->op2);
  663. ptrdiff_t k;
  664. if (LJ_64 && irk->o == IR_KINT64)
  665. k = (ptrdiff_t)ir_kint64(irk)->u64 * sz;
  666. else
  667. k = (ptrdiff_t)irk->i * sz;
  668. if (ir->o == IR_SUBOV) *ofsp -= k; else *ofsp += k;
  669. tr = ir->op1; /* Not a TRef, but the caller doesn't care. */
  670. }
  671. return tr;
  672. }
  673. /* Tailcall to function. */
  674. static void crec_tailcall(jit_State *J, RecordFFData *rd, cTValue *tv)
  675. {
  676. TRef kfunc = lj_ir_kfunc(J, funcV(tv));
  677. #if LJ_FR2
  678. J->base[-2] = kfunc;
  679. J->base[-1] = TREF_FRAME;
  680. #else
  681. J->base[-1] = kfunc | TREF_FRAME;
  682. #endif
  683. rd->nres = -1; /* Pending tailcall. */
  684. }
  685. /* Record ctype __index/__newindex metamethods. */
  686. static void crec_index_meta(jit_State *J, CTState *cts, CType *ct,
  687. RecordFFData *rd)
  688. {
  689. CTypeID id = ctype_typeid(cts, ct);
  690. cTValue *tv = lj_ctype_meta(cts, id, rd->data ? MM_newindex : MM_index);
  691. if (!tv)
  692. lj_trace_err(J, LJ_TRERR_BADTYPE);
  693. if (tvisfunc(tv)) {
  694. crec_tailcall(J, rd, tv);
  695. } else if (rd->data == 0 && tvistab(tv) && tref_isstr(J->base[1])) {
  696. /* Specialize to result of __index lookup. */
  697. cTValue *o = lj_tab_get(J->L, tabV(tv), &rd->argv[1]);
  698. J->base[0] = lj_record_constify(J, o);
  699. if (!J->base[0])
  700. lj_trace_err(J, LJ_TRERR_BADTYPE);
  701. /* Always specialize to the key. */
  702. emitir(IRTG(IR_EQ, IRT_STR), J->base[1], lj_ir_kstr(J, strV(&rd->argv[1])));
  703. } else {
  704. /* NYI: resolving of non-function metamethods. */
  705. /* NYI: non-string keys for __index table. */
  706. /* NYI: stores to __newindex table. */
  707. lj_trace_err(J, LJ_TRERR_BADTYPE);
  708. }
  709. }
  710. /* Record bitfield load/store. */
  711. static void crec_index_bf(jit_State *J, RecordFFData *rd, TRef ptr, CTInfo info)
  712. {
  713. IRType t = IRT_I8 + 2*lj_fls(ctype_bitcsz(info)) + ((info&CTF_UNSIGNED)?1:0);
  714. TRef tr = emitir(IRT(IR_XLOAD, t), ptr, 0);
  715. CTSize pos = ctype_bitpos(info), bsz = ctype_bitbsz(info), shift = 32 - bsz;
  716. lj_assertJ(t <= IRT_U32, "only 32 bit bitfields supported"); /* NYI */
  717. if (rd->data == 0) { /* __index metamethod. */
  718. if ((info & CTF_BOOL)) {
  719. tr = emitir(IRTI(IR_BAND), tr, lj_ir_kint(J, (int32_t)((1u << pos))));
  720. /* Assume not equal to zero. Fixup and emit pending guard later. */
  721. lj_ir_set(J, IRTGI(IR_NE), tr, lj_ir_kint(J, 0));
  722. J->postproc = LJ_POST_FIXGUARD;
  723. tr = TREF_TRUE;
  724. } else if (!(info & CTF_UNSIGNED)) {
  725. tr = emitir(IRTI(IR_BSHL), tr, lj_ir_kint(J, shift - pos));
  726. tr = emitir(IRTI(IR_BSAR), tr, lj_ir_kint(J, shift));
  727. } else {
  728. lj_assertJ(bsz < 32, "unexpected full bitfield index");
  729. tr = emitir(IRTI(IR_BSHR), tr, lj_ir_kint(J, pos));
  730. tr = emitir(IRTI(IR_BAND), tr, lj_ir_kint(J, (int32_t)((1u << bsz)-1)));
  731. /* We can omit the U32 to NUM conversion, since bsz < 32. */
  732. }
  733. J->base[0] = tr;
  734. } else { /* __newindex metamethod. */
  735. CTState *cts = ctype_ctsG(J2G(J));
  736. CType *ct = ctype_get(cts,
  737. (info & CTF_BOOL) ? CTID_BOOL :
  738. (info & CTF_UNSIGNED) ? CTID_UINT32 : CTID_INT32);
  739. int32_t mask = (int32_t)(((1u << bsz)-1) << pos);
  740. TRef sp = crec_ct_tv(J, ct, 0, J->base[2], &rd->argv[2]);
  741. sp = emitir(IRTI(IR_BSHL), sp, lj_ir_kint(J, pos));
  742. /* Use of the target type avoids forwarding conversions. */
  743. sp = emitir(IRT(IR_BAND, t), sp, lj_ir_kint(J, mask));
  744. tr = emitir(IRT(IR_BAND, t), tr, lj_ir_kint(J, (int32_t)~mask));
  745. tr = emitir(IRT(IR_BOR, t), tr, sp);
  746. emitir(IRT(IR_XSTORE, t), ptr, tr);
  747. rd->nres = 0;
  748. J->needsnap = 1;
  749. }
  750. }
  751. void LJ_FASTCALL recff_cdata_index(jit_State *J, RecordFFData *rd)
  752. {
  753. TRef idx, ptr = J->base[0];
  754. ptrdiff_t ofs = sizeof(GCcdata);
  755. GCcdata *cd = argv2cdata(J, ptr, &rd->argv[0]);
  756. CTState *cts = ctype_ctsG(J2G(J));
  757. CType *ct = ctype_raw(cts, cd->ctypeid);
  758. CTypeID sid = 0;
  759. /* Resolve pointer or reference for cdata object. */
  760. if (ctype_isptr(ct->info)) {
  761. IRType t = (LJ_64 && ct->size == 8) ? IRT_P64 : IRT_P32;
  762. if (ctype_isref(ct->info)) ct = ctype_rawchild(cts, ct);
  763. ptr = emitir(IRT(IR_FLOAD, t), ptr, IRFL_CDATA_PTR);
  764. ofs = 0;
  765. ptr = crec_reassoc_ofs(J, ptr, &ofs, 1);
  766. }
  767. again:
  768. idx = J->base[1];
  769. if (tref_isnumber(idx)) {
  770. idx = lj_opt_narrow_cindex(J, idx);
  771. if (ctype_ispointer(ct->info)) {
  772. CTSize sz;
  773. integer_key:
  774. if ((ct->info & CTF_COMPLEX))
  775. idx = emitir(IRT(IR_BAND, IRT_INTP), idx, lj_ir_kintp(J, 1));
  776. sz = lj_ctype_size(cts, (sid = ctype_cid(ct->info)));
  777. idx = crec_reassoc_ofs(J, idx, &ofs, sz);
  778. #if LJ_TARGET_ARM || LJ_TARGET_PPC
  779. /* Hoist base add to allow fusion of index/shift into operands. */
  780. if (LJ_LIKELY(J->flags & JIT_F_OPT_LOOP) && ofs
  781. #if LJ_TARGET_ARM
  782. && (sz == 1 || sz == 4)
  783. #endif
  784. ) {
  785. ptr = emitir(IRT(IR_ADD, IRT_PTR), ptr, lj_ir_kintp(J, ofs));
  786. ofs = 0;
  787. }
  788. #endif
  789. idx = emitir(IRT(IR_MUL, IRT_INTP), idx, lj_ir_kintp(J, sz));
  790. ptr = emitir(IRT(IR_ADD, IRT_PTR), idx, ptr);
  791. }
  792. } else if (tref_iscdata(idx)) {
  793. GCcdata *cdk = cdataV(&rd->argv[1]);
  794. CType *ctk = ctype_raw(cts, cdk->ctypeid);
  795. IRType t = crec_ct2irt(cts, ctk);
  796. if (ctype_ispointer(ct->info) && t >= IRT_I8 && t <= IRT_U64) {
  797. if (ctk->size == 8) {
  798. idx = emitir(IRT(IR_FLOAD, t), idx, IRFL_CDATA_INT64);
  799. } else if (ctk->size == 4) {
  800. idx = emitir(IRT(IR_FLOAD, t), idx, IRFL_CDATA_INT);
  801. } else {
  802. idx = emitir(IRT(IR_ADD, IRT_PTR), idx,
  803. lj_ir_kintp(J, sizeof(GCcdata)));
  804. idx = emitir(IRT(IR_XLOAD, t), idx, 0);
  805. }
  806. if (LJ_64 && ctk->size < sizeof(intptr_t) && !(ctk->info & CTF_UNSIGNED))
  807. idx = emitconv(idx, IRT_INTP, IRT_INT, IRCONV_SEXT);
  808. if (!LJ_64 && ctk->size > sizeof(intptr_t)) {
  809. idx = emitconv(idx, IRT_INTP, t, 0);
  810. lj_needsplit(J);
  811. }
  812. goto integer_key;
  813. }
  814. } else if (tref_isstr(idx)) {
  815. GCstr *name = strV(&rd->argv[1]);
  816. if (cd && cd->ctypeid == CTID_CTYPEID)
  817. ct = ctype_raw(cts, crec_constructor(J, cd, ptr));
  818. if (ctype_isstruct(ct->info)) {
  819. CTSize fofs;
  820. CType *fct;
  821. fct = lj_ctype_getfield(cts, ct, name, &fofs);
  822. if (fct) {
  823. ofs += (ptrdiff_t)fofs;
  824. /* Always specialize to the field name. */
  825. emitir(IRTG(IR_EQ, IRT_STR), idx, lj_ir_kstr(J, name));
  826. if (ctype_isconstval(fct->info)) {
  827. if (fct->size >= 0x80000000u &&
  828. (ctype_child(cts, fct)->info & CTF_UNSIGNED)) {
  829. J->base[0] = lj_ir_knum(J, (lua_Number)(uint32_t)fct->size);
  830. return;
  831. }
  832. J->base[0] = lj_ir_kint(J, (int32_t)fct->size);
  833. return; /* Interpreter will throw for newindex. */
  834. } else if (ctype_isbitfield(fct->info)) {
  835. if (ofs)
  836. ptr = emitir(IRT(IR_ADD, IRT_PTR), ptr, lj_ir_kintp(J, ofs));
  837. crec_index_bf(J, rd, ptr, fct->info);
  838. return;
  839. } else {
  840. lj_assertJ(ctype_isfield(fct->info), "field expected");
  841. sid = ctype_cid(fct->info);
  842. }
  843. }
  844. } else if (ctype_iscomplex(ct->info)) {
  845. if (name->len == 2 &&
  846. ((strdata(name)[0] == 'r' && strdata(name)[1] == 'e') ||
  847. (strdata(name)[0] == 'i' && strdata(name)[1] == 'm'))) {
  848. /* Always specialize to the field name. */
  849. emitir(IRTG(IR_EQ, IRT_STR), idx, lj_ir_kstr(J, name));
  850. if (strdata(name)[0] == 'i') ofs += (ct->size >> 1);
  851. sid = ctype_cid(ct->info);
  852. }
  853. }
  854. }
  855. if (!sid) {
  856. if (ctype_isptr(ct->info)) { /* Automatically perform '->'. */
  857. CType *cct = ctype_rawchild(cts, ct);
  858. if (ctype_isstruct(cct->info)) {
  859. ct = cct;
  860. cd = NULL;
  861. if (tref_isstr(idx)) goto again;
  862. }
  863. }
  864. crec_index_meta(J, cts, ct, rd);
  865. return;
  866. }
  867. if (ofs)
  868. ptr = emitir(IRT(IR_ADD, IRT_PTR), ptr, lj_ir_kintp(J, ofs));
  869. /* Resolve reference for field. */
  870. ct = ctype_get(cts, sid);
  871. if (ctype_isref(ct->info)) {
  872. ptr = emitir(IRT(IR_XLOAD, IRT_PTR), ptr, 0);
  873. sid = ctype_cid(ct->info);
  874. ct = ctype_get(cts, sid);
  875. }
  876. while (ctype_isattrib(ct->info))
  877. ct = ctype_child(cts, ct); /* Skip attributes. */
  878. if (rd->data == 0) { /* __index metamethod. */
  879. J->base[0] = crec_tv_ct(J, ct, sid, ptr);
  880. } else { /* __newindex metamethod. */
  881. rd->nres = 0;
  882. J->needsnap = 1;
  883. crec_ct_tv(J, ct, ptr, J->base[2], &rd->argv[2]);
  884. }
  885. }
  886. /* Record setting a finalizer. */
  887. static void crec_finalizer(jit_State *J, TRef trcd, TRef trfin, cTValue *fin)
  888. {
  889. if (tvisgcv(fin)) {
  890. if (!trfin) trfin = lj_ir_kptr(J, gcval(fin));
  891. } else if (tvisnil(fin)) {
  892. trfin = lj_ir_kptr(J, NULL);
  893. } else {
  894. lj_trace_err(J, LJ_TRERR_BADTYPE);
  895. }
  896. lj_ir_call(J, IRCALL_lj_cdata_setfin, trcd,
  897. trfin, lj_ir_kint(J, (int32_t)itype(fin)));
  898. J->needsnap = 1;
  899. }
  900. /* Record cdata allocation. */
  901. static void crec_alloc(jit_State *J, RecordFFData *rd, CTypeID id)
  902. {
  903. CTState *cts = ctype_ctsG(J2G(J));
  904. CTSize sz;
  905. CTInfo info = lj_ctype_info(cts, id, &sz);
  906. CType *d = ctype_raw(cts, id);
  907. TRef trcd, trid = lj_ir_kint(J, id);
  908. cTValue *fin;
  909. /* Use special instruction to box pointer or 32/64 bit integer. */
  910. if (ctype_isptr(info) || (ctype_isinteger(info) && (sz == 4 || sz == 8))) {
  911. TRef sp = J->base[1] ? crec_ct_tv(J, d, 0, J->base[1], &rd->argv[1]) :
  912. ctype_isptr(info) ? lj_ir_kptr(J, NULL) :
  913. sz == 4 ? lj_ir_kint(J, 0) :
  914. (lj_needsplit(J), lj_ir_kint64(J, 0));
  915. J->base[0] = emitir(IRTG(IR_CNEWI, IRT_CDATA), trid, sp);
  916. return;
  917. } else {
  918. TRef trsz = TREF_NIL;
  919. if ((info & CTF_VLA)) { /* Calculate VLA/VLS size at runtime. */
  920. CTSize sz0, sz1;
  921. if (!J->base[1] || J->base[2])
  922. lj_trace_err(J, LJ_TRERR_NYICONV); /* NYI: init VLA/VLS. */
  923. trsz = crec_ct_tv(J, ctype_get(cts, CTID_INT32), 0,
  924. J->base[1], &rd->argv[1]);
  925. sz0 = lj_ctype_vlsize(cts, d, 0);
  926. sz1 = lj_ctype_vlsize(cts, d, 1);
  927. trsz = emitir(IRTGI(IR_MULOV), trsz, lj_ir_kint(J, (int32_t)(sz1-sz0)));
  928. trsz = emitir(IRTGI(IR_ADDOV), trsz, lj_ir_kint(J, (int32_t)sz0));
  929. J->base[1] = 0; /* Simplify logic below. */
  930. } else if (ctype_align(info) > CT_MEMALIGN) {
  931. trsz = lj_ir_kint(J, sz);
  932. }
  933. trcd = emitir(IRTG(IR_CNEW, IRT_CDATA), trid, trsz);
  934. if (sz > 128 || (info & CTF_VLA)) {
  935. TRef dp;
  936. CTSize align;
  937. special: /* Only handle bulk zero-fill for large/VLA/VLS types. */
  938. if (J->base[1])
  939. lj_trace_err(J, LJ_TRERR_NYICONV); /* NYI: init large/VLA/VLS types. */
  940. dp = emitir(IRT(IR_ADD, IRT_PTR), trcd, lj_ir_kintp(J, sizeof(GCcdata)));
  941. if (trsz == TREF_NIL) trsz = lj_ir_kint(J, sz);
  942. align = ctype_align(info);
  943. if (align < CT_MEMALIGN) align = CT_MEMALIGN;
  944. crec_fill(J, dp, trsz, lj_ir_kint(J, 0), (1u << align));
  945. } else if (J->base[1] && !J->base[2] &&
  946. !lj_cconv_multi_init(cts, d, &rd->argv[1])) {
  947. goto single_init;
  948. } else if (ctype_isarray(d->info)) {
  949. CType *dc = ctype_rawchild(cts, d); /* Array element type. */
  950. CTSize ofs, esize = dc->size;
  951. TRef sp = 0;
  952. TValue tv;
  953. TValue *sval = &tv;
  954. MSize i;
  955. tv.u64 = 0;
  956. if (!(ctype_isnum(dc->info) || ctype_isptr(dc->info)) ||
  957. esize * CREC_FILL_MAXUNROLL < sz)
  958. goto special;
  959. for (i = 1, ofs = 0; ofs < sz; ofs += esize) {
  960. TRef dp = emitir(IRT(IR_ADD, IRT_PTR), trcd,
  961. lj_ir_kintp(J, ofs + sizeof(GCcdata)));
  962. if (J->base[i]) {
  963. sp = J->base[i];
  964. sval = &rd->argv[i];
  965. i++;
  966. } else if (i != 2) {
  967. sp = ctype_isnum(dc->info) ? lj_ir_kint(J, 0) : TREF_NIL;
  968. }
  969. crec_ct_tv(J, dc, dp, sp, sval);
  970. }
  971. } else if (ctype_isstruct(d->info)) {
  972. CTypeID fid;
  973. MSize i = 1;
  974. if (!J->base[1]) { /* Handle zero-fill of struct-of-NYI. */
  975. fid = d->sib;
  976. while (fid) {
  977. CType *df = ctype_get(cts, fid);
  978. fid = df->sib;
  979. if (ctype_isfield(df->info)) {
  980. CType *dc;
  981. if (!gcref(df->name)) continue; /* Ignore unnamed fields. */
  982. dc = ctype_rawchild(cts, df); /* Field type. */
  983. if (!(ctype_isnum(dc->info) || ctype_isptr(dc->info) ||
  984. ctype_isenum(dc->info)))
  985. goto special;
  986. } else if (!ctype_isconstval(df->info)) {
  987. goto special;
  988. }
  989. }
  990. }
  991. fid = d->sib;
  992. while (fid) {
  993. CType *df = ctype_get(cts, fid);
  994. fid = df->sib;
  995. if (ctype_isfield(df->info)) {
  996. CType *dc;
  997. TRef sp, dp;
  998. TValue tv;
  999. TValue *sval = &tv;
  1000. setintV(&tv, 0);
  1001. if (!gcref(df->name)) continue; /* Ignore unnamed fields. */
  1002. dc = ctype_rawchild(cts, df); /* Field type. */
  1003. if (!(ctype_isnum(dc->info) || ctype_isptr(dc->info) ||
  1004. ctype_isenum(dc->info)))
  1005. lj_trace_err(J, LJ_TRERR_NYICONV); /* NYI: init aggregates. */
  1006. if (J->base[i]) {
  1007. sp = J->base[i];
  1008. sval = &rd->argv[i];
  1009. i++;
  1010. } else {
  1011. sp = ctype_isptr(dc->info) ? TREF_NIL : lj_ir_kint(J, 0);
  1012. }
  1013. dp = emitir(IRT(IR_ADD, IRT_PTR), trcd,
  1014. lj_ir_kintp(J, df->size + sizeof(GCcdata)));
  1015. crec_ct_tv(J, dc, dp, sp, sval);
  1016. if ((d->info & CTF_UNION)) {
  1017. if (d->size != dc->size) /* NYI: partial init of union. */
  1018. lj_trace_err(J, LJ_TRERR_NYICONV);
  1019. break;
  1020. }
  1021. } else if (!ctype_isconstval(df->info)) {
  1022. /* NYI: init bitfields and sub-structures. */
  1023. lj_trace_err(J, LJ_TRERR_NYICONV);
  1024. }
  1025. }
  1026. } else {
  1027. TRef dp;
  1028. single_init:
  1029. dp = emitir(IRT(IR_ADD, IRT_PTR), trcd, lj_ir_kintp(J, sizeof(GCcdata)));
  1030. if (J->base[1]) {
  1031. crec_ct_tv(J, d, dp, J->base[1], &rd->argv[1]);
  1032. } else {
  1033. TValue tv;
  1034. tv.u64 = 0;
  1035. crec_ct_tv(J, d, dp, lj_ir_kint(J, 0), &tv);
  1036. }
  1037. }
  1038. }
  1039. J->base[0] = trcd;
  1040. /* Handle __gc metamethod. */
  1041. fin = lj_ctype_meta(cts, id, MM_gc);
  1042. if (fin)
  1043. crec_finalizer(J, trcd, 0, fin);
  1044. }
  1045. /* Record argument conversions. */
  1046. static TRef crec_call_args(jit_State *J, RecordFFData *rd,
  1047. CTState *cts, CType *ct)
  1048. {
  1049. TRef args[CCI_NARGS_MAX];
  1050. CTypeID fid;
  1051. MSize i, n;
  1052. TRef tr, *base;
  1053. cTValue *o;
  1054. #if LJ_TARGET_X86
  1055. #if LJ_ABI_WIN
  1056. TRef *arg0 = NULL, *arg1 = NULL;
  1057. #endif
  1058. int ngpr = 0;
  1059. if (ctype_cconv(ct->info) == CTCC_THISCALL)
  1060. ngpr = 1;
  1061. else if (ctype_cconv(ct->info) == CTCC_FASTCALL)
  1062. ngpr = 2;
  1063. #elif LJ_TARGET_ARM64 && LJ_TARGET_OSX
  1064. int ngpr = CCALL_NARG_GPR;
  1065. #endif
  1066. /* Skip initial attributes. */
  1067. fid = ct->sib;
  1068. while (fid) {
  1069. CType *ctf = ctype_get(cts, fid);
  1070. if (!ctype_isattrib(ctf->info)) break;
  1071. fid = ctf->sib;
  1072. }
  1073. args[0] = TREF_NIL;
  1074. for (n = 0, base = J->base+1, o = rd->argv+1; *base; n++, base++, o++) {
  1075. CTypeID did;
  1076. CType *d;
  1077. if (n >= CCI_NARGS_MAX)
  1078. lj_trace_err(J, LJ_TRERR_NYICALL);
  1079. if (fid) { /* Get argument type from field. */
  1080. CType *ctf = ctype_get(cts, fid);
  1081. fid = ctf->sib;
  1082. lj_assertJ(ctype_isfield(ctf->info), "field expected");
  1083. did = ctype_cid(ctf->info);
  1084. } else {
  1085. if (!(ct->info & CTF_VARARG))
  1086. lj_trace_err(J, LJ_TRERR_NYICALL); /* Too many arguments. */
  1087. #if LJ_TARGET_ARM64 && LJ_TARGET_OSX
  1088. if (ngpr >= 0) {
  1089. ngpr = -1;
  1090. args[n++] = TREF_NIL; /* Marker for start of varargs. */
  1091. if (n >= CCI_NARGS_MAX)
  1092. lj_trace_err(J, LJ_TRERR_NYICALL);
  1093. }
  1094. #endif
  1095. did = lj_ccall_ctid_vararg(cts, o); /* Infer vararg type. */
  1096. }
  1097. d = ctype_raw(cts, did);
  1098. if (!(ctype_isnum(d->info) || ctype_isptr(d->info) ||
  1099. ctype_isenum(d->info)))
  1100. lj_trace_err(J, LJ_TRERR_NYICALL);
  1101. tr = crec_ct_tv(J, d, 0, *base, o);
  1102. if (ctype_isinteger_or_bool(d->info)) {
  1103. #if LJ_TARGET_ARM64 && LJ_TARGET_OSX
  1104. if (!ngpr) {
  1105. /* Fixed args passed on the stack use their unpromoted size. */
  1106. if (d->size != lj_ir_type_size[tref_type(tr)]) {
  1107. lj_assertJ(d->size == 1 || d->size==2, "unexpected size %d", d->size);
  1108. tr = emitconv(tr, d->size==1 ? IRT_U8 : IRT_U16, tref_type(tr), 0);
  1109. }
  1110. } else
  1111. #endif
  1112. if (d->size < 4) {
  1113. if ((d->info & CTF_UNSIGNED))
  1114. tr = emitconv(tr, IRT_INT, d->size==1 ? IRT_U8 : IRT_U16, 0);
  1115. else
  1116. tr = emitconv(tr, IRT_INT, d->size==1 ? IRT_I8 : IRT_I16,IRCONV_SEXT);
  1117. }
  1118. } else if (LJ_SOFTFP32 && ctype_isfp(d->info) && d->size > 4) {
  1119. lj_needsplit(J);
  1120. }
  1121. #if LJ_TARGET_X86
  1122. /* 64 bit args must not end up in registers for fastcall/thiscall. */
  1123. #if LJ_ABI_WIN
  1124. if (!ctype_isfp(d->info)) {
  1125. /* Sigh, the Windows/x86 ABI allows reordering across 64 bit args. */
  1126. if (tref_typerange(tr, IRT_I64, IRT_U64)) {
  1127. if (ngpr) {
  1128. arg0 = &args[n]; args[n++] = TREF_NIL; ngpr--;
  1129. if (ngpr) {
  1130. arg1 = &args[n]; args[n++] = TREF_NIL; ngpr--;
  1131. }
  1132. }
  1133. } else {
  1134. if (arg0) { *arg0 = tr; arg0 = NULL; n--; continue; }
  1135. if (arg1) { *arg1 = tr; arg1 = NULL; n--; continue; }
  1136. if (ngpr) ngpr--;
  1137. }
  1138. }
  1139. #else
  1140. if (!ctype_isfp(d->info) && ngpr) {
  1141. if (tref_typerange(tr, IRT_I64, IRT_U64)) {
  1142. /* No reordering for other x86 ABIs. Simply add alignment args. */
  1143. do { args[n++] = TREF_NIL; } while (--ngpr);
  1144. } else {
  1145. ngpr--;
  1146. }
  1147. }
  1148. #endif
  1149. #elif LJ_TARGET_ARM64 && LJ_TARGET_OSX
  1150. if (!ctype_isfp(d->info) && ngpr) {
  1151. ngpr--;
  1152. }
  1153. #endif
  1154. args[n] = tr;
  1155. }
  1156. tr = args[0];
  1157. for (i = 1; i < n; i++)
  1158. tr = emitir(IRT(IR_CARG, IRT_NIL), tr, args[i]);
  1159. return tr;
  1160. }
  1161. /* Create a snapshot for the caller, simulating a 'false' return value. */
  1162. static void crec_snap_caller(jit_State *J)
  1163. {
  1164. lua_State *L = J->L;
  1165. TValue *base = L->base, *top = L->top;
  1166. const BCIns *pc = J->pc;
  1167. TRef ftr = J->base[-1-LJ_FR2];
  1168. ptrdiff_t delta;
  1169. if (!frame_islua(base-1) || J->framedepth <= 0)
  1170. lj_trace_err(J, LJ_TRERR_NYICALL);
  1171. J->pc = frame_pc(base-1); delta = 1+LJ_FR2+bc_a(J->pc[-1]);
  1172. L->top = base; L->base = base - delta;
  1173. J->base[-1-LJ_FR2] = TREF_FALSE;
  1174. J->base -= delta; J->baseslot -= (BCReg)delta;
  1175. J->maxslot = (BCReg)delta-LJ_FR2; J->framedepth--;
  1176. lj_snap_add(J);
  1177. L->base = base; L->top = top;
  1178. J->framedepth++; J->maxslot = 1;
  1179. J->base += delta; J->baseslot += (BCReg)delta;
  1180. J->base[-1-LJ_FR2] = ftr; J->pc = pc;
  1181. }
  1182. /* Record function call. */
  1183. static int crec_call(jit_State *J, RecordFFData *rd, GCcdata *cd)
  1184. {
  1185. CTState *cts = ctype_ctsG(J2G(J));
  1186. CType *ct = ctype_raw(cts, cd->ctypeid);
  1187. IRType tp = IRT_PTR;
  1188. if (ctype_isptr(ct->info)) {
  1189. tp = (LJ_64 && ct->size == 8) ? IRT_P64 : IRT_P32;
  1190. ct = ctype_rawchild(cts, ct);
  1191. }
  1192. if (ctype_isfunc(ct->info)) {
  1193. TRef func = emitir(IRT(IR_FLOAD, tp), J->base[0], IRFL_CDATA_PTR);
  1194. CType *ctr = ctype_rawchild(cts, ct);
  1195. IRType t = crec_ct2irt(cts, ctr);
  1196. TRef tr;
  1197. TValue tv;
  1198. /* Check for blacklisted C functions that might call a callback. */
  1199. tv.u64 = ((uintptr_t)cdata_getptr(cdataptr(cd), (LJ_64 && tp == IRT_P64) ? 8 : 4) >> 2) | U64x(800000000, 00000000);
  1200. if (tvistrue(lj_tab_get(J->L, cts->miscmap, &tv)))
  1201. lj_trace_err(J, LJ_TRERR_BLACKL);
  1202. if (ctype_isvoid(ctr->info)) {
  1203. t = IRT_NIL;
  1204. rd->nres = 0;
  1205. } else if (!(ctype_isnum(ctr->info) || ctype_isptr(ctr->info) ||
  1206. ctype_isenum(ctr->info)) || t == IRT_CDATA) {
  1207. lj_trace_err(J, LJ_TRERR_NYICALL);
  1208. }
  1209. if ((ct->info & CTF_VARARG)
  1210. #if LJ_TARGET_X86
  1211. || ctype_cconv(ct->info) != CTCC_CDECL
  1212. #endif
  1213. )
  1214. func = emitir(IRT(IR_CARG, IRT_NIL), func,
  1215. lj_ir_kint(J, ctype_typeid(cts, ct)));
  1216. tr = emitir(IRT(IR_CALLXS, t), crec_call_args(J, rd, cts, ct), func);
  1217. if (ctype_isbool(ctr->info)) {
  1218. if (frame_islua(J->L->base-1) && bc_b(frame_pc(J->L->base-1)[-1]) == 1) {
  1219. /* Don't check result if ignored. */
  1220. tr = TREF_NIL;
  1221. } else {
  1222. crec_snap_caller(J);
  1223. #if LJ_TARGET_X86ORX64
  1224. /* Note: only the x86/x64 backend supports U8 and only for EQ(tr, 0). */
  1225. lj_ir_set(J, IRTG(IR_NE, IRT_U8), tr, lj_ir_kint(J, 0));
  1226. #else
  1227. lj_ir_set(J, IRTGI(IR_NE), tr, lj_ir_kint(J, 0));
  1228. #endif
  1229. J->postproc = LJ_POST_FIXGUARDSNAP;
  1230. tr = TREF_TRUE;
  1231. }
  1232. } else if (t == IRT_PTR || (LJ_64 && t == IRT_P32) ||
  1233. t == IRT_I64 || t == IRT_U64 || ctype_isenum(ctr->info)) {
  1234. TRef trid = lj_ir_kint(J, ctype_cid(ct->info));
  1235. tr = emitir(IRTG(IR_CNEWI, IRT_CDATA), trid, tr);
  1236. if (t == IRT_I64 || t == IRT_U64) lj_needsplit(J);
  1237. } else if (t == IRT_FLOAT || t == IRT_U32) {
  1238. tr = emitconv(tr, IRT_NUM, t, 0);
  1239. } else if (t == IRT_I8 || t == IRT_I16) {
  1240. tr = emitconv(tr, IRT_INT, t, IRCONV_SEXT);
  1241. } else if (t == IRT_U8 || t == IRT_U16) {
  1242. tr = emitconv(tr, IRT_INT, t, 0);
  1243. }
  1244. J->base[0] = tr;
  1245. J->needsnap = 1;
  1246. return 1;
  1247. }
  1248. return 0;
  1249. }
  1250. void LJ_FASTCALL recff_cdata_call(jit_State *J, RecordFFData *rd)
  1251. {
  1252. CTState *cts = ctype_ctsG(J2G(J));
  1253. GCcdata *cd = argv2cdata(J, J->base[0], &rd->argv[0]);
  1254. CTypeID id = cd->ctypeid;
  1255. CType *ct;
  1256. cTValue *tv;
  1257. MMS mm = MM_call;
  1258. if (id == CTID_CTYPEID) {
  1259. id = crec_constructor(J, cd, J->base[0]);
  1260. mm = MM_new;
  1261. } else if (crec_call(J, rd, cd)) {
  1262. return;
  1263. }
  1264. /* Record ctype __call/__new metamethod. */
  1265. ct = ctype_raw(cts, id);
  1266. tv = lj_ctype_meta(cts, ctype_isptr(ct->info) ? ctype_cid(ct->info) : id, mm);
  1267. if (tv) {
  1268. if (tvisfunc(tv)) {
  1269. crec_tailcall(J, rd, tv);
  1270. return;
  1271. }
  1272. } else if (mm == MM_new) {
  1273. crec_alloc(J, rd, id);
  1274. return;
  1275. }
  1276. /* No metamethod or NYI: non-function metamethods. */
  1277. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1278. }
  1279. static TRef crec_arith_int64(jit_State *J, TRef *sp, CType **s, MMS mm)
  1280. {
  1281. if (sp[0] && sp[1] && ctype_isnum(s[0]->info) && ctype_isnum(s[1]->info)) {
  1282. IRType dt;
  1283. CTypeID id;
  1284. TRef tr;
  1285. MSize i;
  1286. IROp op;
  1287. lj_needsplit(J);
  1288. if (((s[0]->info & CTF_UNSIGNED) && s[0]->size == 8) ||
  1289. ((s[1]->info & CTF_UNSIGNED) && s[1]->size == 8)) {
  1290. dt = IRT_U64; id = CTID_UINT64;
  1291. } else {
  1292. dt = IRT_I64; id = CTID_INT64;
  1293. if (mm < MM_add &&
  1294. !((s[0]->info | s[1]->info) & CTF_FP) &&
  1295. s[0]->size == 4 && s[1]->size == 4) { /* Try to narrow comparison. */
  1296. if (!((s[0]->info ^ s[1]->info) & CTF_UNSIGNED) ||
  1297. (tref_isk(sp[1]) && IR(tref_ref(sp[1]))->i >= 0)) {
  1298. dt = (s[0]->info & CTF_UNSIGNED) ? IRT_U32 : IRT_INT;
  1299. goto comp;
  1300. } else if (tref_isk(sp[0]) && IR(tref_ref(sp[0]))->i >= 0) {
  1301. dt = (s[1]->info & CTF_UNSIGNED) ? IRT_U32 : IRT_INT;
  1302. goto comp;
  1303. }
  1304. }
  1305. }
  1306. for (i = 0; i < 2; i++) {
  1307. IRType st = tref_type(sp[i]);
  1308. if (st == IRT_NUM || st == IRT_FLOAT)
  1309. sp[i] = emitconv(sp[i], dt, st, IRCONV_ANY);
  1310. else if (!(st == IRT_I64 || st == IRT_U64))
  1311. sp[i] = emitconv(sp[i], dt, IRT_INT,
  1312. (s[i]->info & CTF_UNSIGNED) ? 0 : IRCONV_SEXT);
  1313. }
  1314. if (mm < MM_add) {
  1315. comp:
  1316. /* Assume true comparison. Fixup and emit pending guard later. */
  1317. if (mm == MM_eq) {
  1318. op = IR_EQ;
  1319. } else {
  1320. op = mm == MM_lt ? IR_LT : IR_LE;
  1321. if (dt == IRT_U32 || dt == IRT_U64)
  1322. op += (IR_ULT-IR_LT);
  1323. }
  1324. lj_ir_set(J, IRTG(op, dt), sp[0], sp[1]);
  1325. J->postproc = LJ_POST_FIXGUARD;
  1326. return TREF_TRUE;
  1327. } else {
  1328. tr = emitir(IRT(mm+(int)IR_ADD-(int)MM_add, dt), sp[0], sp[1]);
  1329. }
  1330. return emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, id), tr);
  1331. }
  1332. return 0;
  1333. }
  1334. static TRef crec_arith_ptr(jit_State *J, TRef *sp, CType **s, MMS mm)
  1335. {
  1336. CTState *cts = ctype_ctsG(J2G(J));
  1337. CType *ctp = s[0];
  1338. if (!(sp[0] && sp[1])) return 0;
  1339. if (ctype_isptr(ctp->info) || ctype_isrefarray(ctp->info)) {
  1340. if ((mm == MM_sub || mm == MM_eq || mm == MM_lt || mm == MM_le) &&
  1341. (ctype_isptr(s[1]->info) || ctype_isrefarray(s[1]->info))) {
  1342. if (mm == MM_sub) { /* Pointer difference. */
  1343. TRef tr;
  1344. CTSize sz = lj_ctype_size(cts, ctype_cid(ctp->info));
  1345. if (sz == 0 || (sz & (sz-1)) != 0)
  1346. return 0; /* NYI: integer division. */
  1347. tr = emitir(IRT(IR_SUB, IRT_INTP), sp[0], sp[1]);
  1348. tr = emitir(IRT(IR_BSAR, IRT_INTP), tr, lj_ir_kint(J, lj_fls(sz)));
  1349. #if LJ_64
  1350. tr = emitconv(tr, IRT_NUM, IRT_INTP, 0);
  1351. #endif
  1352. return tr;
  1353. } else { /* Pointer comparison (unsigned). */
  1354. /* Assume true comparison. Fixup and emit pending guard later. */
  1355. IROp op = mm == MM_eq ? IR_EQ : mm == MM_lt ? IR_ULT : IR_ULE;
  1356. lj_ir_set(J, IRTG(op, IRT_PTR), sp[0], sp[1]);
  1357. J->postproc = LJ_POST_FIXGUARD;
  1358. return TREF_TRUE;
  1359. }
  1360. }
  1361. if (!((mm == MM_add || mm == MM_sub) && ctype_isnum(s[1]->info)))
  1362. return 0;
  1363. } else if (mm == MM_add && ctype_isnum(ctp->info) &&
  1364. (ctype_isptr(s[1]->info) || ctype_isrefarray(s[1]->info))) {
  1365. TRef tr = sp[0]; sp[0] = sp[1]; sp[1] = tr; /* Swap pointer and index. */
  1366. ctp = s[1];
  1367. } else {
  1368. return 0;
  1369. }
  1370. {
  1371. TRef tr = sp[1];
  1372. IRType t = tref_type(tr);
  1373. CTSize sz = lj_ctype_size(cts, ctype_cid(ctp->info));
  1374. CTypeID id;
  1375. #if LJ_64
  1376. if (t == IRT_NUM || t == IRT_FLOAT)
  1377. tr = emitconv(tr, IRT_INTP, t, IRCONV_ANY);
  1378. else if (!(t == IRT_I64 || t == IRT_U64))
  1379. tr = emitconv(tr, IRT_INTP, IRT_INT,
  1380. ((t - IRT_I8) & 1) ? 0 : IRCONV_SEXT);
  1381. #else
  1382. if (!tref_typerange(sp[1], IRT_I8, IRT_U32)) {
  1383. tr = emitconv(tr, IRT_INTP, t,
  1384. (t == IRT_NUM || t == IRT_FLOAT) ? IRCONV_ANY : 0);
  1385. }
  1386. #endif
  1387. tr = emitir(IRT(IR_MUL, IRT_INTP), tr, lj_ir_kintp(J, sz));
  1388. tr = emitir(IRT(mm+(int)IR_ADD-(int)MM_add, IRT_PTR), sp[0], tr);
  1389. id = lj_ctype_intern(cts, CTINFO(CT_PTR, CTALIGN_PTR|ctype_cid(ctp->info)),
  1390. CTSIZE_PTR);
  1391. return emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, id), tr);
  1392. }
  1393. }
  1394. /* Record ctype arithmetic metamethods. */
  1395. static TRef crec_arith_meta(jit_State *J, TRef *sp, CType **s, CTState *cts,
  1396. RecordFFData *rd)
  1397. {
  1398. cTValue *tv = NULL;
  1399. if (J->base[0]) {
  1400. if (tviscdata(&rd->argv[0])) {
  1401. CTypeID id = argv2cdata(J, J->base[0], &rd->argv[0])->ctypeid;
  1402. CType *ct = ctype_raw(cts, id);
  1403. if (ctype_isptr(ct->info)) id = ctype_cid(ct->info);
  1404. tv = lj_ctype_meta(cts, id, (MMS)rd->data);
  1405. }
  1406. if (!tv && J->base[1] && tviscdata(&rd->argv[1])) {
  1407. CTypeID id = argv2cdata(J, J->base[1], &rd->argv[1])->ctypeid;
  1408. CType *ct = ctype_raw(cts, id);
  1409. if (ctype_isptr(ct->info)) id = ctype_cid(ct->info);
  1410. tv = lj_ctype_meta(cts, id, (MMS)rd->data);
  1411. }
  1412. }
  1413. if (tv) {
  1414. if (tvisfunc(tv)) {
  1415. crec_tailcall(J, rd, tv);
  1416. return 0;
  1417. } /* NYI: non-function metamethods. */
  1418. } else if ((MMS)rd->data == MM_eq) { /* Fallback cdata pointer comparison. */
  1419. if (sp[0] && sp[1] && ctype_isnum(s[0]->info) == ctype_isnum(s[1]->info)) {
  1420. /* Assume true comparison. Fixup and emit pending guard later. */
  1421. lj_ir_set(J, IRTG(IR_EQ, IRT_PTR), sp[0], sp[1]);
  1422. J->postproc = LJ_POST_FIXGUARD;
  1423. return TREF_TRUE;
  1424. } else {
  1425. return TREF_FALSE;
  1426. }
  1427. }
  1428. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1429. return 0;
  1430. }
  1431. void LJ_FASTCALL recff_cdata_arith(jit_State *J, RecordFFData *rd)
  1432. {
  1433. CTState *cts = ctype_ctsG(J2G(J));
  1434. TRef sp[2];
  1435. CType *s[2];
  1436. MSize i;
  1437. for (i = 0; i < 2; i++) {
  1438. TRef tr = J->base[i];
  1439. CType *ct = ctype_get(cts, CTID_DOUBLE);
  1440. if (!tr) {
  1441. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1442. } else if (tref_iscdata(tr)) {
  1443. CTypeID id = argv2cdata(J, tr, &rd->argv[i])->ctypeid;
  1444. IRType t;
  1445. ct = ctype_raw(cts, id);
  1446. t = crec_ct2irt(cts, ct);
  1447. if (ctype_isptr(ct->info)) { /* Resolve pointer or reference. */
  1448. tr = emitir(IRT(IR_FLOAD, t), tr, IRFL_CDATA_PTR);
  1449. if (ctype_isref(ct->info)) {
  1450. ct = ctype_rawchild(cts, ct);
  1451. t = crec_ct2irt(cts, ct);
  1452. }
  1453. } else if (t == IRT_I64 || t == IRT_U64) {
  1454. tr = emitir(IRT(IR_FLOAD, t), tr, IRFL_CDATA_INT64);
  1455. lj_needsplit(J);
  1456. goto ok;
  1457. } else if (t == IRT_INT || t == IRT_U32) {
  1458. tr = emitir(IRT(IR_FLOAD, t), tr, IRFL_CDATA_INT);
  1459. if (ctype_isenum(ct->info)) ct = ctype_child(cts, ct);
  1460. goto ok;
  1461. } else if (ctype_isfunc(ct->info)) {
  1462. CTypeID id0 = i ? ctype_typeid(cts, s[0]) : 0;
  1463. tr = emitir(IRT(IR_FLOAD, IRT_PTR), tr, IRFL_CDATA_PTR);
  1464. ct = ctype_get(cts,
  1465. lj_ctype_intern(cts, CTINFO(CT_PTR, CTALIGN_PTR|id), CTSIZE_PTR));
  1466. if (i) {
  1467. s[0] = ctype_get(cts, id0); /* cts->tab may have been reallocated. */
  1468. }
  1469. goto ok;
  1470. } else {
  1471. tr = emitir(IRT(IR_ADD, IRT_PTR), tr, lj_ir_kintp(J, sizeof(GCcdata)));
  1472. }
  1473. if (ctype_isenum(ct->info)) ct = ctype_child(cts, ct);
  1474. if (ctype_isnum(ct->info)) {
  1475. if (t == IRT_CDATA) {
  1476. tr = 0;
  1477. } else {
  1478. if (t == IRT_I64 || t == IRT_U64) lj_needsplit(J);
  1479. tr = emitir(IRT(IR_XLOAD, t), tr, 0);
  1480. }
  1481. }
  1482. } else if (tref_isnil(tr)) {
  1483. tr = lj_ir_kptr(J, NULL);
  1484. ct = ctype_get(cts, CTID_P_VOID);
  1485. } else if (tref_isinteger(tr)) {
  1486. ct = ctype_get(cts, CTID_INT32);
  1487. } else if (tref_isstr(tr)) {
  1488. TRef tr2 = J->base[1-i];
  1489. CTypeID id = argv2cdata(J, tr2, &rd->argv[1-i])->ctypeid;
  1490. ct = ctype_raw(cts, id);
  1491. if (ctype_isenum(ct->info)) { /* Match string against enum constant. */
  1492. GCstr *str = strV(&rd->argv[i]);
  1493. CTSize ofs;
  1494. CType *cct = lj_ctype_getfield(cts, ct, str, &ofs);
  1495. if (cct && ctype_isconstval(cct->info)) {
  1496. /* Specialize to the name of the enum constant. */
  1497. emitir(IRTG(IR_EQ, IRT_STR), tr, lj_ir_kstr(J, str));
  1498. ct = ctype_child(cts, cct);
  1499. tr = lj_ir_kint(J, (int32_t)ofs);
  1500. } else { /* Interpreter will throw or return false. */
  1501. ct = ctype_get(cts, CTID_P_VOID);
  1502. }
  1503. } else if (ctype_isptr(ct->info)) {
  1504. tr = emitir(IRT(IR_ADD, IRT_PTR), tr, lj_ir_kintp(J, sizeof(GCstr)));
  1505. } else {
  1506. ct = ctype_get(cts, CTID_P_VOID);
  1507. }
  1508. } else if (!tref_isnum(tr)) {
  1509. tr = 0;
  1510. ct = ctype_get(cts, CTID_P_VOID);
  1511. }
  1512. ok:
  1513. s[i] = ct;
  1514. sp[i] = tr;
  1515. }
  1516. {
  1517. TRef tr;
  1518. MMS mm = (MMS)rd->data;
  1519. if ((mm == MM_len || mm == MM_concat ||
  1520. (!(tr = crec_arith_int64(J, sp, s, mm)) &&
  1521. !(tr = crec_arith_ptr(J, sp, s, mm)))) &&
  1522. !(tr = crec_arith_meta(J, sp, s, cts, rd)))
  1523. return;
  1524. J->base[0] = tr;
  1525. /* Fixup cdata comparisons, too. Avoids some cdata escapes. */
  1526. if (J->postproc == LJ_POST_FIXGUARD && frame_iscont(J->L->base-1) &&
  1527. !irt_isguard(J->guardemit)) {
  1528. const BCIns *pc = frame_contpc(J->L->base-1) - 1;
  1529. if (bc_op(*pc) <= BC_ISNEP) {
  1530. J2G(J)->tmptv.u64 = (uint64_t)(uintptr_t)pc;
  1531. J->postproc = LJ_POST_FIXCOMP;
  1532. }
  1533. }
  1534. }
  1535. }
  1536. /* -- C library namespace metamethods ------------------------------------- */
  1537. void LJ_FASTCALL recff_clib_index(jit_State *J, RecordFFData *rd)
  1538. {
  1539. CTState *cts = ctype_ctsG(J2G(J));
  1540. if (tref_isudata(J->base[0]) && tref_isstr(J->base[1]) &&
  1541. udataV(&rd->argv[0])->udtype == UDTYPE_FFI_CLIB) {
  1542. CLibrary *cl = (CLibrary *)uddata(udataV(&rd->argv[0]));
  1543. GCstr *name = strV(&rd->argv[1]);
  1544. CType *ct;
  1545. CTypeID id = lj_ctype_getname(cts, &ct, name, CLNS_INDEX);
  1546. cTValue *tv = lj_tab_getstr(cl->cache, name);
  1547. rd->nres = rd->data;
  1548. if (id && tv && !tvisnil(tv)) {
  1549. /* Specialize to the symbol name and make the result a constant. */
  1550. emitir(IRTG(IR_EQ, IRT_STR), J->base[1], lj_ir_kstr(J, name));
  1551. if (ctype_isconstval(ct->info)) {
  1552. if (ct->size >= 0x80000000u &&
  1553. (ctype_child(cts, ct)->info & CTF_UNSIGNED))
  1554. J->base[0] = lj_ir_knum(J, (lua_Number)(uint32_t)ct->size);
  1555. else
  1556. J->base[0] = lj_ir_kint(J, (int32_t)ct->size);
  1557. } else if (ctype_isextern(ct->info)) {
  1558. CTypeID sid = ctype_cid(ct->info);
  1559. void *sp = *(void **)cdataptr(cdataV(tv));
  1560. TRef ptr;
  1561. ct = ctype_raw(cts, sid);
  1562. if (LJ_64 && !checkptr32(sp))
  1563. ptr = lj_ir_kintp(J, (uintptr_t)sp);
  1564. else
  1565. ptr = lj_ir_kptr(J, sp);
  1566. if (rd->data) {
  1567. J->base[0] = crec_tv_ct(J, ct, sid, ptr);
  1568. } else {
  1569. J->needsnap = 1;
  1570. crec_ct_tv(J, ct, ptr, J->base[2], &rd->argv[2]);
  1571. }
  1572. } else {
  1573. J->base[0] = lj_ir_kgc(J, obj2gco(cdataV(tv)), IRT_CDATA);
  1574. }
  1575. } else {
  1576. lj_trace_err(J, LJ_TRERR_NOCACHE);
  1577. }
  1578. } /* else: interpreter will throw. */
  1579. }
  1580. /* -- FFI library functions ----------------------------------------------- */
  1581. static TRef crec_toint(jit_State *J, CTState *cts, TRef sp, TValue *sval)
  1582. {
  1583. return crec_ct_tv(J, ctype_get(cts, CTID_INT32), 0, sp, sval);
  1584. }
  1585. void LJ_FASTCALL recff_ffi_new(jit_State *J, RecordFFData *rd)
  1586. {
  1587. crec_alloc(J, rd, argv2ctype(J, J->base[0], &rd->argv[0]));
  1588. }
  1589. void LJ_FASTCALL recff_ffi_errno(jit_State *J, RecordFFData *rd)
  1590. {
  1591. UNUSED(rd);
  1592. if (J->base[0])
  1593. lj_trace_err(J, LJ_TRERR_NYICALL);
  1594. J->base[0] = lj_ir_call(J, IRCALL_lj_vm_errno);
  1595. }
  1596. void LJ_FASTCALL recff_ffi_string(jit_State *J, RecordFFData *rd)
  1597. {
  1598. CTState *cts = ctype_ctsG(J2G(J));
  1599. TRef tr = J->base[0];
  1600. if (tr) {
  1601. TRef trlen = J->base[1];
  1602. if (!tref_isnil(trlen)) {
  1603. trlen = crec_toint(J, cts, trlen, &rd->argv[1]);
  1604. tr = crec_ct_tv(J, ctype_get(cts, CTID_P_CVOID), 0, tr, &rd->argv[0]);
  1605. } else {
  1606. tr = crec_ct_tv(J, ctype_get(cts, CTID_P_CCHAR), 0, tr, &rd->argv[0]);
  1607. trlen = lj_ir_call(J, IRCALL_strlen, tr);
  1608. }
  1609. J->base[0] = emitir(IRT(IR_XSNEW, IRT_STR), tr, trlen);
  1610. } /* else: interpreter will throw. */
  1611. }
  1612. void LJ_FASTCALL recff_ffi_copy(jit_State *J, RecordFFData *rd)
  1613. {
  1614. CTState *cts = ctype_ctsG(J2G(J));
  1615. TRef trdst = J->base[0], trsrc = J->base[1], trlen = J->base[2];
  1616. if (trdst && trsrc && (trlen || tref_isstr(trsrc))) {
  1617. trdst = crec_ct_tv(J, ctype_get(cts, CTID_P_VOID), 0, trdst, &rd->argv[0]);
  1618. trsrc = crec_ct_tv(J, ctype_get(cts, CTID_P_CVOID), 0, trsrc, &rd->argv[1]);
  1619. if (trlen) {
  1620. trlen = crec_toint(J, cts, trlen, &rd->argv[2]);
  1621. } else {
  1622. trlen = emitir(IRTI(IR_FLOAD), J->base[1], IRFL_STR_LEN);
  1623. trlen = emitir(IRTI(IR_ADD), trlen, lj_ir_kint(J, 1));
  1624. }
  1625. rd->nres = 0;
  1626. crec_copy(J, trdst, trsrc, trlen, NULL);
  1627. } /* else: interpreter will throw. */
  1628. }
  1629. void LJ_FASTCALL recff_ffi_fill(jit_State *J, RecordFFData *rd)
  1630. {
  1631. CTState *cts = ctype_ctsG(J2G(J));
  1632. TRef trdst = J->base[0], trlen = J->base[1], trfill = J->base[2];
  1633. if (trdst && trlen) {
  1634. CTSize step = 1;
  1635. if (tviscdata(&rd->argv[0])) { /* Get alignment of original destination. */
  1636. CTSize sz;
  1637. CType *ct = ctype_raw(cts, cdataV(&rd->argv[0])->ctypeid);
  1638. if (ctype_isptr(ct->info))
  1639. ct = ctype_rawchild(cts, ct);
  1640. step = (1u<<ctype_align(lj_ctype_info(cts, ctype_typeid(cts, ct), &sz)));
  1641. }
  1642. trdst = crec_ct_tv(J, ctype_get(cts, CTID_P_VOID), 0, trdst, &rd->argv[0]);
  1643. trlen = crec_toint(J, cts, trlen, &rd->argv[1]);
  1644. if (trfill)
  1645. trfill = crec_toint(J, cts, trfill, &rd->argv[2]);
  1646. else
  1647. trfill = lj_ir_kint(J, 0);
  1648. rd->nres = 0;
  1649. crec_fill(J, trdst, trlen, trfill, step);
  1650. } /* else: interpreter will throw. */
  1651. }
  1652. void LJ_FASTCALL recff_ffi_typeof(jit_State *J, RecordFFData *rd)
  1653. {
  1654. if (tref_iscdata(J->base[0])) {
  1655. TRef trid = lj_ir_kint(J, argv2ctype(J, J->base[0], &rd->argv[0]));
  1656. J->base[0] = emitir(IRTG(IR_CNEWI, IRT_CDATA),
  1657. lj_ir_kint(J, CTID_CTYPEID), trid);
  1658. } else {
  1659. setfuncV(J->L, &J->errinfo, J->fn);
  1660. lj_trace_err_info(J, LJ_TRERR_NYIFFU);
  1661. }
  1662. }
  1663. void LJ_FASTCALL recff_ffi_istype(jit_State *J, RecordFFData *rd)
  1664. {
  1665. argv2ctype(J, J->base[0], &rd->argv[0]);
  1666. if (tref_iscdata(J->base[1])) {
  1667. argv2ctype(J, J->base[1], &rd->argv[1]);
  1668. J->postproc = LJ_POST_FIXBOOL;
  1669. J->base[0] = TREF_TRUE;
  1670. } else {
  1671. J->base[0] = TREF_FALSE;
  1672. }
  1673. }
  1674. void LJ_FASTCALL recff_ffi_abi(jit_State *J, RecordFFData *rd)
  1675. {
  1676. if (tref_isstr(J->base[0])) {
  1677. /* Specialize to the ABI string to make the boolean result a constant. */
  1678. emitir(IRTG(IR_EQ, IRT_STR), J->base[0], lj_ir_kstr(J, strV(&rd->argv[0])));
  1679. J->postproc = LJ_POST_FIXBOOL;
  1680. J->base[0] = TREF_TRUE;
  1681. } else {
  1682. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1683. }
  1684. }
  1685. /* Record ffi.sizeof(), ffi.alignof(), ffi.offsetof(). */
  1686. void LJ_FASTCALL recff_ffi_xof(jit_State *J, RecordFFData *rd)
  1687. {
  1688. CTypeID id = argv2ctype(J, J->base[0], &rd->argv[0]);
  1689. if (rd->data == FF_ffi_sizeof) {
  1690. CType *ct = lj_ctype_rawref(ctype_ctsG(J2G(J)), id);
  1691. if (ctype_isvltype(ct->info))
  1692. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1693. } else if (rd->data == FF_ffi_offsetof) { /* Specialize to the field name. */
  1694. if (!tref_isstr(J->base[1]))
  1695. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1696. emitir(IRTG(IR_EQ, IRT_STR), J->base[1], lj_ir_kstr(J, strV(&rd->argv[1])));
  1697. rd->nres = 3; /* Just in case. */
  1698. }
  1699. J->postproc = LJ_POST_FIXCONST;
  1700. J->base[0] = J->base[1] = J->base[2] = TREF_NIL;
  1701. }
  1702. void LJ_FASTCALL recff_ffi_gc(jit_State *J, RecordFFData *rd)
  1703. {
  1704. argv2cdata(J, J->base[0], &rd->argv[0]);
  1705. if (!J->base[1])
  1706. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1707. crec_finalizer(J, J->base[0], J->base[1], &rd->argv[1]);
  1708. }
  1709. /* -- 64 bit bit.* library functions -------------------------------------- */
  1710. /* Determine bit operation type from argument type. */
  1711. static CTypeID crec_bit64_type(CTState *cts, cTValue *tv)
  1712. {
  1713. if (tviscdata(tv)) {
  1714. CType *ct = lj_ctype_rawref(cts, cdataV(tv)->ctypeid);
  1715. if (ctype_isenum(ct->info)) ct = ctype_child(cts, ct);
  1716. if ((ct->info & (CTMASK_NUM|CTF_BOOL|CTF_FP|CTF_UNSIGNED)) ==
  1717. CTINFO(CT_NUM, CTF_UNSIGNED) && ct->size == 8)
  1718. return CTID_UINT64; /* Use uint64_t, since it has the highest rank. */
  1719. return CTID_INT64; /* Otherwise use int64_t. */
  1720. }
  1721. return 0; /* Use regular 32 bit ops. */
  1722. }
  1723. void LJ_FASTCALL recff_bit64_tobit(jit_State *J, RecordFFData *rd)
  1724. {
  1725. CTState *cts = ctype_ctsG(J2G(J));
  1726. TRef tr = crec_ct_tv(J, ctype_get(cts, CTID_INT64), 0,
  1727. J->base[0], &rd->argv[0]);
  1728. if (!tref_isinteger(tr))
  1729. tr = emitconv(tr, IRT_INT, tref_type(tr), 0);
  1730. J->base[0] = tr;
  1731. }
  1732. int LJ_FASTCALL recff_bit64_unary(jit_State *J, RecordFFData *rd)
  1733. {
  1734. CTState *cts = ctype_ctsG(J2G(J));
  1735. CTypeID id = crec_bit64_type(cts, &rd->argv[0]);
  1736. if (id) {
  1737. TRef tr = crec_ct_tv(J, ctype_get(cts, id), 0, J->base[0], &rd->argv[0]);
  1738. tr = emitir(IRT(rd->data, id-CTID_INT64+IRT_I64), tr, 0);
  1739. J->base[0] = emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, id), tr);
  1740. return 1;
  1741. }
  1742. return 0;
  1743. }
  1744. int LJ_FASTCALL recff_bit64_nary(jit_State *J, RecordFFData *rd)
  1745. {
  1746. CTState *cts = ctype_ctsG(J2G(J));
  1747. CTypeID id = 0;
  1748. MSize i;
  1749. for (i = 0; J->base[i] != 0; i++) {
  1750. CTypeID aid = crec_bit64_type(cts, &rd->argv[i]);
  1751. if (id < aid) id = aid; /* Determine highest type rank of all arguments. */
  1752. }
  1753. if (id) {
  1754. CType *ct = ctype_get(cts, id);
  1755. uint32_t ot = IRT(rd->data, id-CTID_INT64+IRT_I64);
  1756. TRef tr = crec_ct_tv(J, ct, 0, J->base[0], &rd->argv[0]);
  1757. for (i = 1; J->base[i] != 0; i++) {
  1758. TRef tr2 = crec_ct_tv(J, ct, 0, J->base[i], &rd->argv[i]);
  1759. tr = emitir(ot, tr, tr2);
  1760. }
  1761. J->base[0] = emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, id), tr);
  1762. return 1;
  1763. }
  1764. return 0;
  1765. }
  1766. int LJ_FASTCALL recff_bit64_shift(jit_State *J, RecordFFData *rd)
  1767. {
  1768. CTState *cts = ctype_ctsG(J2G(J));
  1769. CTypeID id;
  1770. TRef tsh = 0;
  1771. if (J->base[0] && tref_iscdata(J->base[1])) {
  1772. tsh = crec_ct_tv(J, ctype_get(cts, CTID_INT64), 0,
  1773. J->base[1], &rd->argv[1]);
  1774. if (!tref_isinteger(tsh))
  1775. tsh = emitconv(tsh, IRT_INT, tref_type(tsh), 0);
  1776. J->base[1] = tsh;
  1777. }
  1778. id = crec_bit64_type(cts, &rd->argv[0]);
  1779. if (id) {
  1780. TRef tr = crec_ct_tv(J, ctype_get(cts, id), 0, J->base[0], &rd->argv[0]);
  1781. uint32_t op = rd->data;
  1782. if (!tsh) tsh = lj_opt_narrow_tobit(J, J->base[1]);
  1783. if (!(op < IR_BROL ? LJ_TARGET_MASKSHIFT : LJ_TARGET_MASKROT) &&
  1784. !tref_isk(tsh))
  1785. tsh = emitir(IRTI(IR_BAND), tsh, lj_ir_kint(J, 63));
  1786. #ifdef LJ_TARGET_UNIFYROT
  1787. if (op == (LJ_TARGET_UNIFYROT == 1 ? IR_BROR : IR_BROL)) {
  1788. op = LJ_TARGET_UNIFYROT == 1 ? IR_BROL : IR_BROR;
  1789. tsh = emitir(IRTI(IR_NEG), tsh, tsh);
  1790. }
  1791. #endif
  1792. tr = emitir(IRT(op, id-CTID_INT64+IRT_I64), tr, tsh);
  1793. J->base[0] = emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, id), tr);
  1794. return 1;
  1795. }
  1796. return 0;
  1797. }
  1798. TRef recff_bit64_tohex(jit_State *J, RecordFFData *rd, TRef hdr)
  1799. {
  1800. CTState *cts = ctype_ctsG(J2G(J));
  1801. CTypeID id = crec_bit64_type(cts, &rd->argv[0]);
  1802. TRef tr, trsf = J->base[1];
  1803. SFormat sf = (STRFMT_UINT|STRFMT_T_HEX);
  1804. int32_t n;
  1805. if (trsf) {
  1806. CTypeID id2 = 0;
  1807. n = (int32_t)lj_carith_check64(J->L, 2, &id2);
  1808. if (id2)
  1809. trsf = crec_ct_tv(J, ctype_get(cts, CTID_INT32), 0, trsf, &rd->argv[1]);
  1810. else
  1811. trsf = lj_opt_narrow_tobit(J, trsf);
  1812. emitir(IRTGI(IR_EQ), trsf, lj_ir_kint(J, n)); /* Specialize to n. */
  1813. } else {
  1814. n = id ? 16 : 8;
  1815. }
  1816. if (n < 0) { n = (int32_t)(~n+1u); sf |= STRFMT_F_UPPER; }
  1817. if ((uint32_t)n > 254) n = 254;
  1818. sf |= ((SFormat)((n+1)&255) << STRFMT_SH_PREC);
  1819. if (id) {
  1820. tr = crec_ct_tv(J, ctype_get(cts, id), 0, J->base[0], &rd->argv[0]);
  1821. if (n < 16)
  1822. tr = emitir(IRT(IR_BAND, IRT_U64), tr,
  1823. lj_ir_kint64(J, ((uint64_t)1 << 4*n)-1));
  1824. } else {
  1825. tr = lj_opt_narrow_tobit(J, J->base[0]);
  1826. if (n < 8)
  1827. tr = emitir(IRTI(IR_BAND), tr, lj_ir_kint(J, (int32_t)((1u << 4*n)-1)));
  1828. tr = emitconv(tr, IRT_U64, IRT_INT, 0); /* No sign-extension. */
  1829. lj_needsplit(J);
  1830. }
  1831. return lj_ir_call(J, IRCALL_lj_strfmt_putfxint, hdr, lj_ir_kint(J, sf), tr);
  1832. }
  1833. /* -- Miscellaneous library functions ------------------------------------- */
  1834. void LJ_FASTCALL lj_crecord_tonumber(jit_State *J, RecordFFData *rd)
  1835. {
  1836. CTState *cts = ctype_ctsG(J2G(J));
  1837. CType *d, *ct = lj_ctype_rawref(cts, cdataV(&rd->argv[0])->ctypeid);
  1838. if (ctype_isenum(ct->info)) ct = ctype_child(cts, ct);
  1839. if (ctype_isnum(ct->info) || ctype_iscomplex(ct->info)) {
  1840. if (ctype_isinteger_or_bool(ct->info) && ct->size <= 4 &&
  1841. !(ct->size == 4 && (ct->info & CTF_UNSIGNED)))
  1842. d = ctype_get(cts, CTID_INT32);
  1843. else
  1844. d = ctype_get(cts, CTID_DOUBLE);
  1845. J->base[0] = crec_ct_tv(J, d, 0, J->base[0], &rd->argv[0]);
  1846. } else {
  1847. /* Specialize to the ctype that couldn't be converted. */
  1848. argv2cdata(J, J->base[0], &rd->argv[0]);
  1849. J->base[0] = TREF_NIL;
  1850. }
  1851. }
  1852. TRef lj_crecord_loadiu64(jit_State *J, TRef tr, cTValue *o)
  1853. {
  1854. CTypeID id = argv2cdata(J, tr, o)->ctypeid;
  1855. if (!(id == CTID_INT64 || id == CTID_UINT64))
  1856. lj_trace_err(J, LJ_TRERR_BADTYPE);
  1857. lj_needsplit(J);
  1858. return emitir(IRT(IR_FLOAD, id == CTID_INT64 ? IRT_I64 : IRT_U64), tr,
  1859. IRFL_CDATA_INT64);
  1860. }
  1861. #if LJ_HASBUFFER
  1862. TRef lj_crecord_topcvoid(jit_State *J, TRef tr, cTValue *o)
  1863. {
  1864. CTState *cts = ctype_ctsG(J2G(J));
  1865. if (!tref_iscdata(tr)) lj_trace_err(J, LJ_TRERR_BADTYPE);
  1866. return crec_ct_tv(J, ctype_get(cts, CTID_P_CVOID), 0, tr, o);
  1867. }
  1868. TRef lj_crecord_topuint8(jit_State *J, TRef tr)
  1869. {
  1870. return emitir(IRTG(IR_CNEWI, IRT_CDATA), lj_ir_kint(J, CTID_P_UINT8), tr);
  1871. }
  1872. #endif
  1873. #undef IR
  1874. #undef emitir
  1875. #undef emitconv
  1876. #endif