llvm_backend_expr.cpp 128 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205
  1. lbValue lb_emit_logical_binary_expr(lbProcedure *p, TokenKind op, Ast *left, Ast *right, Type *type) {
  2. lbModule *m = p->module;
  3. lbBlock *rhs = lb_create_block(p, "logical.cmp.rhs");
  4. lbBlock *done = lb_create_block(p, "logical.cmp.done");
  5. type = default_type(type);
  6. lbValue short_circuit = {};
  7. if (op == Token_CmpAnd) {
  8. lb_build_cond(p, left, rhs, done);
  9. short_circuit = lb_const_bool(m, type, false);
  10. } else if (op == Token_CmpOr) {
  11. lb_build_cond(p, left, done, rhs);
  12. short_circuit = lb_const_bool(m, type, true);
  13. }
  14. if (rhs->preds.count == 0) {
  15. lb_start_block(p, done);
  16. return short_circuit;
  17. }
  18. if (done->preds.count == 0) {
  19. lb_start_block(p, rhs);
  20. if (lb_is_expr_untyped_const(right)) {
  21. return lb_expr_untyped_const_to_typed(m, right, type);
  22. }
  23. return lb_build_expr(p, right);
  24. }
  25. Array<LLVMValueRef> incoming_values = {};
  26. Array<LLVMBasicBlockRef> incoming_blocks = {};
  27. array_init(&incoming_values, heap_allocator(), done->preds.count+1);
  28. array_init(&incoming_blocks, heap_allocator(), done->preds.count+1);
  29. for_array(i, done->preds) {
  30. incoming_values[i] = short_circuit.value;
  31. incoming_blocks[i] = done->preds[i]->block;
  32. }
  33. lb_start_block(p, rhs);
  34. lbValue edge = {};
  35. if (lb_is_expr_untyped_const(right)) {
  36. edge = lb_expr_untyped_const_to_typed(m, right, type);
  37. } else {
  38. edge = lb_build_expr(p, right);
  39. }
  40. incoming_values[done->preds.count] = edge.value;
  41. incoming_blocks[done->preds.count] = p->curr_block->block;
  42. lb_emit_jump(p, done);
  43. lb_start_block(p, done);
  44. LLVMTypeRef dst_type = lb_type(m, type);
  45. LLVMValueRef phi = nullptr;
  46. GB_ASSERT(incoming_values.count == incoming_blocks.count);
  47. GB_ASSERT(incoming_values.count > 0);
  48. LLVMTypeRef phi_type = nullptr;
  49. for_array(i, incoming_values) {
  50. LLVMValueRef incoming_value = incoming_values[i];
  51. if (!LLVMIsConstant(incoming_value)) {
  52. phi_type = LLVMTypeOf(incoming_value);
  53. break;
  54. }
  55. }
  56. if (phi_type == nullptr) {
  57. phi = LLVMBuildPhi(p->builder, dst_type, "");
  58. LLVMAddIncoming(phi, incoming_values.data, incoming_blocks.data, cast(unsigned)incoming_values.count);
  59. lbValue res = {};
  60. res.type = type;
  61. res.value = phi;
  62. return res;
  63. }
  64. for_array(i, incoming_values) {
  65. LLVMValueRef incoming_value = incoming_values[i];
  66. LLVMTypeRef incoming_type = LLVMTypeOf(incoming_value);
  67. if (phi_type != incoming_type) {
  68. GB_ASSERT_MSG(LLVMIsConstant(incoming_value), "%s vs %s", LLVMPrintTypeToString(phi_type), LLVMPrintTypeToString(incoming_type));
  69. bool ok = !!LLVMConstIntGetZExtValue(incoming_value);
  70. incoming_values[i] = LLVMConstInt(phi_type, ok, false);
  71. }
  72. }
  73. phi = LLVMBuildPhi(p->builder, phi_type, "");
  74. LLVMAddIncoming(phi, incoming_values.data, incoming_blocks.data, cast(unsigned)incoming_values.count);
  75. LLVMTypeRef i1 = LLVMInt1TypeInContext(m->ctx);
  76. if ((phi_type == i1) ^ (dst_type == i1)) {
  77. if (phi_type == i1) {
  78. phi = LLVMBuildZExt(p->builder, phi, dst_type, "");
  79. } else {
  80. phi = LLVMBuildTruncOrBitCast(p->builder, phi, dst_type, "");
  81. }
  82. } else if (lb_sizeof(phi_type) < lb_sizeof(dst_type)) {
  83. phi = LLVMBuildZExt(p->builder, phi, dst_type, "");
  84. } else {
  85. phi = LLVMBuildTruncOrBitCast(p->builder, phi, dst_type, "");
  86. }
  87. lbValue res = {};
  88. res.type = type;
  89. res.value = phi;
  90. return res;
  91. }
  92. lbValue lb_emit_unary_arith(lbProcedure *p, TokenKind op, lbValue x, Type *type) {
  93. switch (op) {
  94. case Token_Add:
  95. return x;
  96. case Token_Not: // Boolean not
  97. case Token_Xor: // Bitwise not
  98. case Token_Sub: // Number negation
  99. break;
  100. case Token_Pointer:
  101. GB_PANIC("This should be handled elsewhere");
  102. break;
  103. }
  104. if (is_type_array_like(x.type)) {
  105. // IMPORTANT TODO(bill): This is very wasteful with regards to stack memory
  106. Type *tl = base_type(x.type);
  107. lbValue val = lb_address_from_load_or_generate_local(p, x);
  108. GB_ASSERT(is_type_array_like(type));
  109. Type *elem_type = base_array_type(type);
  110. // NOTE(bill): Doesn't need to be zero because it will be initialized in the loops
  111. lbAddr res_addr = lb_add_local(p, type, nullptr, false, 0, true);
  112. lbValue res = lb_addr_get_ptr(p, res_addr);
  113. bool inline_array_arith = type_size_of(type) <= build_context.max_align;
  114. i32 count = cast(i32)get_array_type_count(tl);
  115. LLVMTypeRef vector_type = nullptr;
  116. if (op != Token_Not && lb_try_vector_cast(p->module, val, &vector_type)) {
  117. LLVMValueRef vp = LLVMBuildPointerCast(p->builder, val.value, LLVMPointerType(vector_type, 0), "");
  118. LLVMValueRef v = LLVMBuildLoad2(p->builder, vector_type, vp, "");
  119. LLVMValueRef opv = nullptr;
  120. switch (op) {
  121. case Token_Xor:
  122. opv = LLVMBuildNot(p->builder, v, "");
  123. break;
  124. case Token_Sub:
  125. if (is_type_float(elem_type)) {
  126. opv = LLVMBuildFNeg(p->builder, v, "");
  127. } else {
  128. opv = LLVMBuildNeg(p->builder, v, "");
  129. }
  130. break;
  131. }
  132. if (opv != nullptr) {
  133. LLVMSetAlignment(res.value, cast(unsigned)lb_alignof(vector_type));
  134. LLVMValueRef res_ptr = LLVMBuildPointerCast(p->builder, res.value, LLVMPointerType(vector_type, 0), "");
  135. LLVMBuildStore(p->builder, opv, res_ptr);
  136. return lb_emit_conv(p, lb_emit_load(p, res), type);
  137. }
  138. }
  139. if (inline_array_arith) {
  140. // inline
  141. for (i32 i = 0; i < count; i++) {
  142. lbValue e = lb_emit_load(p, lb_emit_array_epi(p, val, i));
  143. lbValue z = lb_emit_unary_arith(p, op, e, elem_type);
  144. lb_emit_store(p, lb_emit_array_epi(p, res, i), z);
  145. }
  146. } else {
  147. auto loop_data = lb_loop_start(p, count, t_i32);
  148. lbValue e = lb_emit_load(p, lb_emit_array_ep(p, val, loop_data.idx));
  149. lbValue z = lb_emit_unary_arith(p, op, e, elem_type);
  150. lb_emit_store(p, lb_emit_array_ep(p, res, loop_data.idx), z);
  151. lb_loop_end(p, loop_data);
  152. }
  153. return lb_emit_load(p, res);
  154. }
  155. if (op == Token_Xor) {
  156. lbValue cmp = {};
  157. cmp.value = LLVMBuildNot(p->builder, x.value, "");
  158. cmp.type = x.type;
  159. return lb_emit_conv(p, cmp, type);
  160. }
  161. if (op == Token_Not) {
  162. lbValue cmp = {};
  163. LLVMValueRef zero = LLVMConstInt(lb_type(p->module, x.type), 0, false);
  164. cmp.value = LLVMBuildICmp(p->builder, LLVMIntEQ, x.value, zero, "");
  165. cmp.type = t_llvm_bool;
  166. return lb_emit_conv(p, cmp, type);
  167. }
  168. if (op == Token_Sub && is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  169. Type *platform_type = integer_endian_type_to_platform_type(type);
  170. lbValue v = lb_emit_byte_swap(p, x, platform_type);
  171. lbValue res = {};
  172. res.value = LLVMBuildNeg(p->builder, v.value, "");
  173. res.type = platform_type;
  174. return lb_emit_byte_swap(p, res, type);
  175. }
  176. if (op == Token_Sub && is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  177. Type *platform_type = integer_endian_type_to_platform_type(type);
  178. lbValue v = lb_emit_byte_swap(p, x, platform_type);
  179. lbValue res = {};
  180. res.value = LLVMBuildFNeg(p->builder, v.value, "");
  181. res.type = platform_type;
  182. return lb_emit_byte_swap(p, res, type);
  183. }
  184. lbValue res = {};
  185. switch (op) {
  186. case Token_Not: // Boolean not
  187. case Token_Xor: // Bitwise not
  188. res.value = LLVMBuildNot(p->builder, x.value, "");
  189. res.type = x.type;
  190. return res;
  191. case Token_Sub: // Number negation
  192. if (is_type_integer(x.type)) {
  193. res.value = LLVMBuildNeg(p->builder, x.value, "");
  194. } else if (is_type_float(x.type)) {
  195. res.value = LLVMBuildFNeg(p->builder, x.value, "");
  196. } else if (is_type_complex(x.type)) {
  197. LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  198. LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  199. lbAddr addr = lb_add_local_generated(p, x.type, false);
  200. LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP(p->builder, addr.addr.value, 0, ""));
  201. LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP(p->builder, addr.addr.value, 1, ""));
  202. return lb_addr_load(p, addr);
  203. } else if (is_type_quaternion(x.type)) {
  204. LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  205. LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  206. LLVMValueRef v2 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 2, ""), "");
  207. LLVMValueRef v3 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 3, ""), "");
  208. lbAddr addr = lb_add_local_generated(p, x.type, false);
  209. LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP(p->builder, addr.addr.value, 0, ""));
  210. LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP(p->builder, addr.addr.value, 1, ""));
  211. LLVMBuildStore(p->builder, v2, LLVMBuildStructGEP(p->builder, addr.addr.value, 2, ""));
  212. LLVMBuildStore(p->builder, v3, LLVMBuildStructGEP(p->builder, addr.addr.value, 3, ""));
  213. return lb_addr_load(p, addr);
  214. } else {
  215. GB_PANIC("Unhandled type %s", type_to_string(x.type));
  216. }
  217. res.type = x.type;
  218. return res;
  219. }
  220. return res;
  221. }
  222. bool lb_try_direct_vector_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type, lbValue *res_) {
  223. GB_ASSERT(is_type_array_like(type));
  224. Type *elem_type = base_array_type(type);
  225. // NOTE(bill): Shift operations cannot be easily dealt with due to Odin's semantics
  226. if (op == Token_Shl || op == Token_Shr) {
  227. return false;
  228. }
  229. if (!LLVMIsALoadInst(lhs.value) || !LLVMIsALoadInst(rhs.value)) {
  230. return false;
  231. }
  232. lbValue lhs_ptr = {};
  233. lbValue rhs_ptr = {};
  234. lhs_ptr.value = LLVMGetOperand(lhs.value, 0);
  235. lhs_ptr.type = alloc_type_pointer(lhs.type);
  236. rhs_ptr.value = LLVMGetOperand(rhs.value, 0);
  237. rhs_ptr.type = alloc_type_pointer(rhs.type);
  238. LLVMTypeRef vector_type0 = nullptr;
  239. LLVMTypeRef vector_type1 = nullptr;
  240. if (lb_try_vector_cast(p->module, lhs_ptr, &vector_type0) &&
  241. lb_try_vector_cast(p->module, rhs_ptr, &vector_type1)) {
  242. GB_ASSERT(vector_type0 == vector_type1);
  243. LLVMTypeRef vector_type = vector_type0;
  244. LLVMValueRef lhs_vp = LLVMBuildPointerCast(p->builder, lhs_ptr.value, LLVMPointerType(vector_type, 0), "");
  245. LLVMValueRef rhs_vp = LLVMBuildPointerCast(p->builder, rhs_ptr.value, LLVMPointerType(vector_type, 0), "");
  246. LLVMValueRef x = LLVMBuildLoad2(p->builder, vector_type, lhs_vp, "");
  247. LLVMValueRef y = LLVMBuildLoad2(p->builder, vector_type, rhs_vp, "");
  248. LLVMValueRef z = nullptr;
  249. Type *integral_type = base_type(elem_type);
  250. if (is_type_simd_vector(integral_type)) {
  251. integral_type = core_array_type(integral_type);
  252. }
  253. if (is_type_bit_set(integral_type)) {
  254. switch (op) {
  255. case Token_Add: op = Token_Or; break;
  256. case Token_Sub: op = Token_AndNot; break;
  257. }
  258. }
  259. if (is_type_float(integral_type)) {
  260. switch (op) {
  261. case Token_Add:
  262. z = LLVMBuildFAdd(p->builder, x, y, "");
  263. break;
  264. case Token_Sub:
  265. z = LLVMBuildFSub(p->builder, x, y, "");
  266. break;
  267. case Token_Mul:
  268. z = LLVMBuildFMul(p->builder, x, y, "");
  269. break;
  270. case Token_Quo:
  271. z = LLVMBuildFDiv(p->builder, x, y, "");
  272. break;
  273. case Token_Mod:
  274. z = LLVMBuildFRem(p->builder, x, y, "");
  275. break;
  276. default:
  277. GB_PANIC("Unsupported vector operation");
  278. break;
  279. }
  280. } else {
  281. switch (op) {
  282. case Token_Add:
  283. z = LLVMBuildAdd(p->builder, x, y, "");
  284. break;
  285. case Token_Sub:
  286. z = LLVMBuildSub(p->builder, x, y, "");
  287. break;
  288. case Token_Mul:
  289. z = LLVMBuildMul(p->builder, x, y, "");
  290. break;
  291. case Token_Quo:
  292. if (is_type_unsigned(integral_type)) {
  293. z = LLVMBuildUDiv(p->builder, x, y, "");
  294. } else {
  295. z = LLVMBuildSDiv(p->builder, x, y, "");
  296. }
  297. break;
  298. case Token_Mod:
  299. if (is_type_unsigned(integral_type)) {
  300. z = LLVMBuildURem(p->builder, x, y, "");
  301. } else {
  302. z = LLVMBuildSRem(p->builder, x, y, "");
  303. }
  304. break;
  305. case Token_ModMod:
  306. if (is_type_unsigned(integral_type)) {
  307. z = LLVMBuildURem(p->builder, x, y, "");
  308. } else {
  309. LLVMValueRef a = LLVMBuildSRem(p->builder, x, y, "");
  310. LLVMValueRef b = LLVMBuildAdd(p->builder, a, y, "");
  311. z = LLVMBuildSRem(p->builder, b, y, "");
  312. }
  313. break;
  314. case Token_And:
  315. z = LLVMBuildAnd(p->builder, x, y, "");
  316. break;
  317. case Token_AndNot:
  318. z = LLVMBuildAnd(p->builder, x, LLVMBuildNot(p->builder, y, ""), "");
  319. break;
  320. case Token_Or:
  321. z = LLVMBuildOr(p->builder, x, y, "");
  322. break;
  323. case Token_Xor:
  324. z = LLVMBuildXor(p->builder, x, y, "");
  325. break;
  326. default:
  327. GB_PANIC("Unsupported vector operation");
  328. break;
  329. }
  330. }
  331. if (z != nullptr) {
  332. lbAddr res = lb_add_local_generated_temp(p, type, lb_alignof(vector_type));
  333. LLVMValueRef vp = LLVMBuildPointerCast(p->builder, res.addr.value, LLVMPointerType(vector_type, 0), "");
  334. LLVMBuildStore(p->builder, z, vp);
  335. lbValue v = lb_addr_load(p, res);
  336. if (res_) *res_ = v;
  337. return true;
  338. }
  339. }
  340. return false;
  341. }
  342. lbValue lb_emit_arith_array(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type) {
  343. GB_ASSERT(is_type_array_like(lhs.type) || is_type_array_like(rhs.type));
  344. lhs = lb_emit_conv(p, lhs, type);
  345. rhs = lb_emit_conv(p, rhs, type);
  346. GB_ASSERT(is_type_array_like(type));
  347. Type *elem_type = base_array_type(type);
  348. i64 count = get_array_type_count(type);
  349. unsigned n = cast(unsigned)count;
  350. // NOTE(bill, 2021-06-12): Try to do a direct operation as a vector, if possible
  351. lbValue direct_vector_res = {};
  352. if (lb_try_direct_vector_arith(p, op, lhs, rhs, type, &direct_vector_res)) {
  353. return direct_vector_res;
  354. }
  355. bool inline_array_arith = type_size_of(type) <= build_context.max_align;
  356. if (inline_array_arith) {
  357. auto dst_ptrs = slice_make<lbValue>(temporary_allocator(), n);
  358. auto a_loads = slice_make<lbValue>(temporary_allocator(), n);
  359. auto b_loads = slice_make<lbValue>(temporary_allocator(), n);
  360. auto c_ops = slice_make<lbValue>(temporary_allocator(), n);
  361. for (unsigned i = 0; i < n; i++) {
  362. a_loads[i].value = LLVMBuildExtractValue(p->builder, lhs.value, i, "");
  363. a_loads[i].type = elem_type;
  364. }
  365. for (unsigned i = 0; i < n; i++) {
  366. b_loads[i].value = LLVMBuildExtractValue(p->builder, rhs.value, i, "");
  367. b_loads[i].type = elem_type;
  368. }
  369. for (unsigned i = 0; i < n; i++) {
  370. c_ops[i] = lb_emit_arith(p, op, a_loads[i], b_loads[i], elem_type);
  371. }
  372. lbAddr res = lb_add_local_generated(p, type, false);
  373. for (unsigned i = 0; i < n; i++) {
  374. dst_ptrs[i] = lb_emit_array_epi(p, res.addr, i);
  375. }
  376. for (unsigned i = 0; i < n; i++) {
  377. lb_emit_store(p, dst_ptrs[i], c_ops[i]);
  378. }
  379. return lb_addr_load(p, res);
  380. } else {
  381. lbValue x = lb_address_from_load_or_generate_local(p, lhs);
  382. lbValue y = lb_address_from_load_or_generate_local(p, rhs);
  383. lbAddr res = lb_add_local_generated(p, type, false);
  384. auto loop_data = lb_loop_start(p, cast(isize)count, t_i32);
  385. lbValue a_ptr = lb_emit_array_ep(p, x, loop_data.idx);
  386. lbValue b_ptr = lb_emit_array_ep(p, y, loop_data.idx);
  387. lbValue dst_ptr = lb_emit_array_ep(p, res.addr, loop_data.idx);
  388. lbValue a = lb_emit_load(p, a_ptr);
  389. lbValue b = lb_emit_load(p, b_ptr);
  390. lbValue c = lb_emit_arith(p, op, a, b, elem_type);
  391. lb_emit_store(p, dst_ptr, c);
  392. lb_loop_end(p, loop_data);
  393. return lb_addr_load(p, res);
  394. }
  395. }
  396. bool lb_matrix_elem_simple(Type *t) {
  397. Type *mt = base_type(t);
  398. GB_ASSERT(mt->kind == Type_Matrix);
  399. Type *elem = core_type(mt->Matrix.elem);
  400. if (is_type_complex(elem)) {
  401. return false;
  402. }
  403. if (is_type_different_to_arch_endianness(elem)) {
  404. return false;
  405. }
  406. if (elem->kind == Type_Basic) {
  407. switch (elem->Basic.kind) {
  408. case Basic_f16:
  409. case Basic_f16le:
  410. case Basic_f16be:
  411. // TODO(bill): determine when this is fine
  412. return false;
  413. }
  414. }
  415. return true;
  416. }
  417. lbValue lb_emit_matrix_tranpose(lbProcedure *p, lbValue m, Type *type) {
  418. if (is_type_array(m.type)) {
  419. m.type = type;
  420. return m;
  421. }
  422. Type *mt = base_type(m.type);
  423. GB_ASSERT(mt->kind == Type_Matrix);
  424. lbAddr res = lb_add_local_generated(p, type, true);
  425. i64 row_count = mt->Matrix.row_count;
  426. i64 column_count = mt->Matrix.column_count;
  427. for (i64 j = 0; j < column_count; j++) {
  428. for (i64 i = 0; i < row_count; i++) {
  429. lbValue src = lb_emit_matrix_ev(p, m, i, j);
  430. lbValue dst = lb_emit_matrix_epi(p, res.addr, j, i);
  431. lb_emit_store(p, dst, src);
  432. }
  433. }
  434. return lb_addr_load(p, res);
  435. }
  436. lbValue lb_emit_matrix_mul(lbProcedure *p, lbValue lhs, lbValue rhs, Type *type) {
  437. Type *xt = base_type(lhs.type);
  438. Type *yt = base_type(rhs.type);
  439. GB_ASSERT(is_type_matrix(type));
  440. GB_ASSERT(is_type_matrix(xt));
  441. GB_ASSERT(is_type_matrix(yt));
  442. GB_ASSERT(xt->Matrix.column_count == yt->Matrix.row_count);
  443. GB_ASSERT(are_types_identical(xt->Matrix.elem, yt->Matrix.elem));
  444. if (!lb_matrix_elem_simple(xt)) {
  445. goto slow_form;
  446. }
  447. slow_form:
  448. {
  449. Type *elem = xt->Matrix.elem;
  450. lbAddr res = lb_add_local_generated(p, type, true);
  451. i64 outer_rows = xt->Matrix.row_count;
  452. i64 inner = xt->Matrix.column_count;
  453. i64 outer_columns = yt->Matrix.column_count;
  454. auto inners = slice_make<lbValue[2]>(permanent_allocator(), inner);
  455. for (i64 j = 0; j < outer_columns; j++) {
  456. for (i64 i = 0; i < outer_rows; i++) {
  457. lbValue dst = lb_emit_matrix_epi(p, res.addr, i, j);
  458. for (i64 k = 0; k < inner; k++) {
  459. inners[k][0] = lb_emit_matrix_ev(p, lhs, i, k);
  460. inners[k][1] = lb_emit_matrix_ev(p, rhs, k, j);
  461. }
  462. lbValue sum = lb_emit_load(p, dst);
  463. for (i64 k = 0; k < inner; k++) {
  464. lbValue a = inners[k][0];
  465. lbValue b = inners[k][1];
  466. sum = lb_emit_mul_add(p, a, b, sum, elem);
  467. }
  468. lb_emit_store(p, dst, sum);
  469. }
  470. }
  471. return lb_addr_load(p, res);
  472. }
  473. }
  474. lbValue lb_emit_matrix_mul_vector(lbProcedure *p, lbValue lhs, lbValue rhs, Type *type) {
  475. Type *mt = base_type(lhs.type);
  476. Type *vt = base_type(rhs.type);
  477. GB_ASSERT(is_type_matrix(mt));
  478. GB_ASSERT(is_type_array_like(vt));
  479. i64 vector_count = get_array_type_count(vt);
  480. GB_ASSERT(mt->Matrix.column_count == vector_count);
  481. GB_ASSERT(are_types_identical(mt->Matrix.elem, base_array_type(vt)));
  482. Type *elem = mt->Matrix.elem;
  483. lbAddr res = lb_add_local_generated(p, type, true);
  484. for (i64 i = 0; i < mt->Matrix.row_count; i++) {
  485. for (i64 j = 0; j < mt->Matrix.column_count; j++) {
  486. lbValue dst = lb_emit_matrix_epi(p, res.addr, i, 0);
  487. lbValue d0 = lb_emit_load(p, dst);
  488. lbValue a = lb_emit_matrix_ev(p, lhs, i, j);
  489. lbValue b = lb_emit_struct_ev(p, rhs, cast(i32)j);
  490. lbValue c = lb_emit_arith(p, Token_Mul, a, b, elem);
  491. lbValue d = lb_emit_arith(p, Token_Add, d0, c, elem);
  492. lb_emit_store(p, dst, d);
  493. }
  494. }
  495. return lb_addr_load(p, res);
  496. }
  497. lbValue lb_emit_vector_mul_matrix(lbProcedure *p, lbValue lhs, lbValue rhs, Type *type) {
  498. Type *mt = base_type(rhs.type);
  499. Type *vt = base_type(lhs.type);
  500. GB_ASSERT(is_type_matrix(mt));
  501. GB_ASSERT(is_type_array_like(vt));
  502. i64 vector_count = get_array_type_count(vt);
  503. GB_ASSERT(mt->Matrix.row_count == vector_count);
  504. GB_ASSERT(are_types_identical(mt->Matrix.elem, base_array_type(vt)));
  505. Type *elem = mt->Matrix.elem;
  506. lbAddr res = lb_add_local_generated(p, type, true);
  507. for (i64 j = 0; j < mt->Matrix.column_count; j++) {
  508. for (i64 k = 0; k < mt->Matrix.row_count; k++) {
  509. lbValue dst = lb_emit_matrix_epi(p, res.addr, 0, j);
  510. lbValue d0 = lb_emit_load(p, dst);
  511. lbValue a = lb_emit_struct_ev(p, lhs, cast(i32)k);
  512. lbValue b = lb_emit_matrix_ev(p, rhs, k, j);
  513. lbValue c = lb_emit_arith(p, Token_Mul, a, b, elem);
  514. lbValue d = lb_emit_arith(p, Token_Add, d0, c, elem);
  515. lb_emit_store(p, dst, d);
  516. }
  517. }
  518. return lb_addr_load(p, res);
  519. }
  520. lbValue lb_emit_arith_matrix(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type) {
  521. GB_ASSERT(is_type_matrix(lhs.type) || is_type_matrix(rhs.type));
  522. Type *xt = base_type(lhs.type);
  523. Type *yt = base_type(rhs.type);
  524. if (op == Token_Mul) {
  525. if (xt->kind == Type_Matrix) {
  526. if (yt->kind == Type_Matrix) {
  527. return lb_emit_matrix_mul(p, lhs, rhs, type);
  528. } else if (is_type_array_like(yt)) {
  529. return lb_emit_matrix_mul_vector(p, lhs, rhs, type);
  530. }
  531. } else if (is_type_array_like(xt)) {
  532. GB_ASSERT(yt->kind == Type_Matrix);
  533. return lb_emit_vector_mul_matrix(p, lhs, rhs, type);
  534. }
  535. } else {
  536. GB_ASSERT(are_types_identical(xt, yt));
  537. GB_ASSERT(xt->kind == Type_Matrix);
  538. // element-wise arithmetic
  539. // pretend it is an array
  540. lbValue array_lhs = lhs;
  541. lbValue array_rhs = rhs;
  542. Type *array_type = alloc_type_array(xt->Matrix.elem, matrix_type_total_elems(xt));
  543. GB_ASSERT(type_size_of(array_type) == type_size_of(type));
  544. array_lhs.type = array_type;
  545. array_rhs.type = array_type;
  546. lbValue array = lb_emit_arith_array(p, op, array_lhs, array_rhs, type);
  547. array.type = type;
  548. return array;
  549. }
  550. GB_PANIC("TODO: lb_emit_arith_matrix");
  551. return {};
  552. }
  553. lbValue lb_emit_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type) {
  554. if (is_type_array_like(lhs.type) || is_type_array_like(rhs.type)) {
  555. return lb_emit_arith_array(p, op, lhs, rhs, type);
  556. } else if (is_type_matrix(lhs.type) || is_type_matrix(rhs.type)) {
  557. return lb_emit_arith_matrix(p, op, lhs, rhs, type);
  558. } else if (is_type_complex(type)) {
  559. lhs = lb_emit_conv(p, lhs, type);
  560. rhs = lb_emit_conv(p, rhs, type);
  561. Type *ft = base_complex_elem_type(type);
  562. if (op == Token_Quo) {
  563. auto args = array_make<lbValue>(permanent_allocator(), 2);
  564. args[0] = lhs;
  565. args[1] = rhs;
  566. switch (type_size_of(ft)) {
  567. case 4: return lb_emit_runtime_call(p, "quo_complex64", args);
  568. case 8: return lb_emit_runtime_call(p, "quo_complex128", args);
  569. default: GB_PANIC("Unknown float type"); break;
  570. }
  571. }
  572. lbAddr res = lb_add_local_generated(p, type, false); // NOTE: initialized in full later
  573. lbValue a = lb_emit_struct_ev(p, lhs, 0);
  574. lbValue b = lb_emit_struct_ev(p, lhs, 1);
  575. lbValue c = lb_emit_struct_ev(p, rhs, 0);
  576. lbValue d = lb_emit_struct_ev(p, rhs, 1);
  577. lbValue real = {};
  578. lbValue imag = {};
  579. switch (op) {
  580. case Token_Add:
  581. real = lb_emit_arith(p, Token_Add, a, c, ft);
  582. imag = lb_emit_arith(p, Token_Add, b, d, ft);
  583. break;
  584. case Token_Sub:
  585. real = lb_emit_arith(p, Token_Sub, a, c, ft);
  586. imag = lb_emit_arith(p, Token_Sub, b, d, ft);
  587. break;
  588. case Token_Mul: {
  589. lbValue x = lb_emit_arith(p, Token_Mul, a, c, ft);
  590. lbValue y = lb_emit_arith(p, Token_Mul, b, d, ft);
  591. real = lb_emit_arith(p, Token_Sub, x, y, ft);
  592. lbValue z = lb_emit_arith(p, Token_Mul, b, c, ft);
  593. lbValue w = lb_emit_arith(p, Token_Mul, a, d, ft);
  594. imag = lb_emit_arith(p, Token_Add, z, w, ft);
  595. break;
  596. }
  597. }
  598. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 0), real);
  599. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 1), imag);
  600. return lb_addr_load(p, res);
  601. } else if (is_type_quaternion(type)) {
  602. lhs = lb_emit_conv(p, lhs, type);
  603. rhs = lb_emit_conv(p, rhs, type);
  604. Type *ft = base_complex_elem_type(type);
  605. if (op == Token_Add || op == Token_Sub) {
  606. lbAddr res = lb_add_local_generated(p, type, false); // NOTE: initialized in full later
  607. lbValue x0 = lb_emit_struct_ev(p, lhs, 0);
  608. lbValue x1 = lb_emit_struct_ev(p, lhs, 1);
  609. lbValue x2 = lb_emit_struct_ev(p, lhs, 2);
  610. lbValue x3 = lb_emit_struct_ev(p, lhs, 3);
  611. lbValue y0 = lb_emit_struct_ev(p, rhs, 0);
  612. lbValue y1 = lb_emit_struct_ev(p, rhs, 1);
  613. lbValue y2 = lb_emit_struct_ev(p, rhs, 2);
  614. lbValue y3 = lb_emit_struct_ev(p, rhs, 3);
  615. lbValue z0 = lb_emit_arith(p, op, x0, y0, ft);
  616. lbValue z1 = lb_emit_arith(p, op, x1, y1, ft);
  617. lbValue z2 = lb_emit_arith(p, op, x2, y2, ft);
  618. lbValue z3 = lb_emit_arith(p, op, x3, y3, ft);
  619. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 0), z0);
  620. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 1), z1);
  621. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 2), z2);
  622. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 3), z3);
  623. return lb_addr_load(p, res);
  624. } else if (op == Token_Mul) {
  625. auto args = array_make<lbValue>(permanent_allocator(), 2);
  626. args[0] = lhs;
  627. args[1] = rhs;
  628. switch (8*type_size_of(ft)) {
  629. case 32: return lb_emit_runtime_call(p, "mul_quaternion128", args);
  630. case 64: return lb_emit_runtime_call(p, "mul_quaternion256", args);
  631. default: GB_PANIC("Unknown float type"); break;
  632. }
  633. } else if (op == Token_Quo) {
  634. auto args = array_make<lbValue>(permanent_allocator(), 2);
  635. args[0] = lhs;
  636. args[1] = rhs;
  637. switch (8*type_size_of(ft)) {
  638. case 32: return lb_emit_runtime_call(p, "quo_quaternion128", args);
  639. case 64: return lb_emit_runtime_call(p, "quo_quaternion256", args);
  640. default: GB_PANIC("Unknown float type"); break;
  641. }
  642. }
  643. }
  644. if (is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  645. switch (op) {
  646. case Token_AndNot:
  647. case Token_And:
  648. case Token_Or:
  649. case Token_Xor:
  650. goto handle_op;
  651. }
  652. Type *platform_type = integer_endian_type_to_platform_type(type);
  653. lbValue x = lb_emit_byte_swap(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  654. lbValue y = lb_emit_byte_swap(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  655. lbValue res = lb_emit_arith(p, op, x, y, platform_type);
  656. return lb_emit_byte_swap(p, res, type);
  657. }
  658. if (is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  659. Type *platform_type = integer_endian_type_to_platform_type(type);
  660. lbValue x = lb_emit_conv(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  661. lbValue y = lb_emit_conv(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  662. lbValue res = lb_emit_arith(p, op, x, y, platform_type);
  663. return lb_emit_byte_swap(p, res, type);
  664. }
  665. handle_op:
  666. lhs = lb_emit_conv(p, lhs, type);
  667. rhs = lb_emit_conv(p, rhs, type);
  668. lbValue res = {};
  669. res.type = type;
  670. // NOTE(bill): Bit Set Aliases for + and -
  671. if (is_type_bit_set(type)) {
  672. switch (op) {
  673. case Token_Add: op = Token_Or; break;
  674. case Token_Sub: op = Token_AndNot; break;
  675. }
  676. }
  677. Type *integral_type = type;
  678. if (is_type_simd_vector(integral_type)) {
  679. integral_type = core_array_type(integral_type);
  680. }
  681. switch (op) {
  682. case Token_Add:
  683. if (is_type_float(integral_type)) {
  684. res.value = LLVMBuildFAdd(p->builder, lhs.value, rhs.value, "");
  685. return res;
  686. }
  687. res.value = LLVMBuildAdd(p->builder, lhs.value, rhs.value, "");
  688. return res;
  689. case Token_Sub:
  690. if (is_type_float(integral_type)) {
  691. res.value = LLVMBuildFSub(p->builder, lhs.value, rhs.value, "");
  692. return res;
  693. }
  694. res.value = LLVMBuildSub(p->builder, lhs.value, rhs.value, "");
  695. return res;
  696. case Token_Mul:
  697. if (is_type_float(integral_type)) {
  698. res.value = LLVMBuildFMul(p->builder, lhs.value, rhs.value, "");
  699. return res;
  700. }
  701. res.value = LLVMBuildMul(p->builder, lhs.value, rhs.value, "");
  702. return res;
  703. case Token_Quo:
  704. if (is_type_float(integral_type)) {
  705. res.value = LLVMBuildFDiv(p->builder, lhs.value, rhs.value, "");
  706. return res;
  707. } else if (is_type_unsigned(integral_type)) {
  708. res.value = LLVMBuildUDiv(p->builder, lhs.value, rhs.value, "");
  709. return res;
  710. }
  711. res.value = LLVMBuildSDiv(p->builder, lhs.value, rhs.value, "");
  712. return res;
  713. case Token_Mod:
  714. if (is_type_float(integral_type)) {
  715. res.value = LLVMBuildFRem(p->builder, lhs.value, rhs.value, "");
  716. return res;
  717. } else if (is_type_unsigned(integral_type)) {
  718. res.value = LLVMBuildURem(p->builder, lhs.value, rhs.value, "");
  719. return res;
  720. }
  721. res.value = LLVMBuildSRem(p->builder, lhs.value, rhs.value, "");
  722. return res;
  723. case Token_ModMod:
  724. if (is_type_unsigned(integral_type)) {
  725. res.value = LLVMBuildURem(p->builder, lhs.value, rhs.value, "");
  726. return res;
  727. } else {
  728. LLVMValueRef a = LLVMBuildSRem(p->builder, lhs.value, rhs.value, "");
  729. LLVMValueRef b = LLVMBuildAdd(p->builder, a, rhs.value, "");
  730. LLVMValueRef c = LLVMBuildSRem(p->builder, b, rhs.value, "");
  731. res.value = c;
  732. return res;
  733. }
  734. case Token_And:
  735. res.value = LLVMBuildAnd(p->builder, lhs.value, rhs.value, "");
  736. return res;
  737. case Token_Or:
  738. res.value = LLVMBuildOr(p->builder, lhs.value, rhs.value, "");
  739. return res;
  740. case Token_Xor:
  741. res.value = LLVMBuildXor(p->builder, lhs.value, rhs.value, "");
  742. return res;
  743. case Token_Shl:
  744. {
  745. rhs = lb_emit_conv(p, rhs, lhs.type);
  746. LLVMValueRef lhsval = lhs.value;
  747. LLVMValueRef bits = rhs.value;
  748. LLVMValueRef bit_size = LLVMConstInt(lb_type(p->module, rhs.type), 8*type_size_of(lhs.type), false);
  749. LLVMValueRef width_test = LLVMBuildICmp(p->builder, LLVMIntULT, bits, bit_size, "");
  750. res.value = LLVMBuildShl(p->builder, lhsval, bits, "");
  751. LLVMValueRef zero = LLVMConstNull(lb_type(p->module, lhs.type));
  752. res.value = LLVMBuildSelect(p->builder, width_test, res.value, zero, "");
  753. return res;
  754. }
  755. case Token_Shr:
  756. {
  757. rhs = lb_emit_conv(p, rhs, lhs.type);
  758. LLVMValueRef lhsval = lhs.value;
  759. LLVMValueRef bits = rhs.value;
  760. bool is_unsigned = is_type_unsigned(integral_type);
  761. LLVMValueRef bit_size = LLVMConstInt(lb_type(p->module, rhs.type), 8*type_size_of(lhs.type), false);
  762. LLVMValueRef width_test = LLVMBuildICmp(p->builder, LLVMIntULT, bits, bit_size, "");
  763. if (is_unsigned) {
  764. res.value = LLVMBuildLShr(p->builder, lhsval, bits, "");
  765. } else {
  766. res.value = LLVMBuildAShr(p->builder, lhsval, bits, "");
  767. }
  768. LLVMValueRef zero = LLVMConstNull(lb_type(p->module, lhs.type));
  769. res.value = LLVMBuildSelect(p->builder, width_test, res.value, zero, "");
  770. return res;
  771. }
  772. case Token_AndNot:
  773. {
  774. LLVMValueRef new_rhs = LLVMBuildNot(p->builder, rhs.value, "");
  775. res.value = LLVMBuildAnd(p->builder, lhs.value, new_rhs, "");
  776. return res;
  777. }
  778. break;
  779. }
  780. GB_PANIC("unhandled operator of lb_emit_arith");
  781. return {};
  782. }
  783. lbValue lb_build_binary_expr(lbProcedure *p, Ast *expr) {
  784. ast_node(be, BinaryExpr, expr);
  785. TypeAndValue tv = type_and_value_of_expr(expr);
  786. if (is_type_matrix(be->left->tav.type) || is_type_matrix(be->right->tav.type)) {
  787. lbValue left = lb_build_expr(p, be->left);
  788. lbValue right = lb_build_expr(p, be->right);
  789. return lb_emit_arith_matrix(p, be->op.kind, left, right, default_type(tv.type));
  790. }
  791. switch (be->op.kind) {
  792. case Token_Add:
  793. case Token_Sub:
  794. case Token_Mul:
  795. case Token_Quo:
  796. case Token_Mod:
  797. case Token_ModMod:
  798. case Token_And:
  799. case Token_Or:
  800. case Token_Xor:
  801. case Token_AndNot: {
  802. Type *type = default_type(tv.type);
  803. lbValue left = lb_build_expr(p, be->left);
  804. lbValue right = lb_build_expr(p, be->right);
  805. return lb_emit_arith(p, be->op.kind, left, right, type);
  806. }
  807. case Token_Shl:
  808. case Token_Shr: {
  809. lbValue left, right;
  810. Type *type = default_type(tv.type);
  811. left = lb_build_expr(p, be->left);
  812. if (lb_is_expr_untyped_const(be->right)) {
  813. // NOTE(bill): RHS shift operands can still be untyped
  814. // Just bypass the standard lb_build_expr
  815. right = lb_expr_untyped_const_to_typed(p->module, be->right, type);
  816. } else {
  817. right = lb_build_expr(p, be->right);
  818. }
  819. return lb_emit_arith(p, be->op.kind, left, right, type);
  820. }
  821. case Token_CmpEq:
  822. case Token_NotEq:
  823. if (is_type_untyped_nil(be->right->tav.type)) {
  824. lbValue left = lb_build_expr(p, be->left);
  825. lbValue cmp = lb_emit_comp_against_nil(p, be->op.kind, left);
  826. Type *type = default_type(tv.type);
  827. return lb_emit_conv(p, cmp, type);
  828. } else if (is_type_untyped_nil(be->left->tav.type)) {
  829. lbValue right = lb_build_expr(p, be->right);
  830. lbValue cmp = lb_emit_comp_against_nil(p, be->op.kind, right);
  831. Type *type = default_type(tv.type);
  832. return lb_emit_conv(p, cmp, type);
  833. }
  834. /*fallthrough*/
  835. case Token_Lt:
  836. case Token_LtEq:
  837. case Token_Gt:
  838. case Token_GtEq:
  839. {
  840. lbValue left = {};
  841. lbValue right = {};
  842. if (be->left->tav.mode == Addressing_Type) {
  843. left = lb_typeid(p->module, be->left->tav.type);
  844. }
  845. if (be->right->tav.mode == Addressing_Type) {
  846. right = lb_typeid(p->module, be->right->tav.type);
  847. }
  848. if (left.value == nullptr) left = lb_build_expr(p, be->left);
  849. if (right.value == nullptr) right = lb_build_expr(p, be->right);
  850. lbValue cmp = lb_emit_comp(p, be->op.kind, left, right);
  851. Type *type = default_type(tv.type);
  852. return lb_emit_conv(p, cmp, type);
  853. }
  854. case Token_CmpAnd:
  855. case Token_CmpOr:
  856. return lb_emit_logical_binary_expr(p, be->op.kind, be->left, be->right, tv.type);
  857. case Token_in:
  858. case Token_not_in:
  859. {
  860. lbValue left = lb_build_expr(p, be->left);
  861. lbValue right = lb_build_expr(p, be->right);
  862. Type *rt = base_type(right.type);
  863. if (is_type_pointer(rt)) {
  864. right = lb_emit_load(p, right);
  865. rt = type_deref(rt);
  866. }
  867. switch (rt->kind) {
  868. case Type_Map:
  869. {
  870. lbValue addr = lb_address_from_load_or_generate_local(p, right);
  871. lbValue h = lb_gen_map_header(p, addr, rt);
  872. lbValue key = lb_gen_map_hash(p, left, rt->Map.key);
  873. auto args = array_make<lbValue>(permanent_allocator(), 2);
  874. args[0] = h;
  875. args[1] = key;
  876. lbValue ptr = lb_emit_runtime_call(p, "__dynamic_map_get", args);
  877. if (be->op.kind == Token_in) {
  878. return lb_emit_conv(p, lb_emit_comp_against_nil(p, Token_NotEq, ptr), t_bool);
  879. } else {
  880. return lb_emit_conv(p, lb_emit_comp_against_nil(p, Token_CmpEq, ptr), t_bool);
  881. }
  882. }
  883. break;
  884. case Type_BitSet:
  885. {
  886. Type *key_type = rt->BitSet.elem;
  887. GB_ASSERT(are_types_identical(left.type, key_type));
  888. Type *it = bit_set_to_int(rt);
  889. left = lb_emit_conv(p, left, it);
  890. lbValue lower = lb_const_value(p->module, it, exact_value_i64(rt->BitSet.lower));
  891. lbValue key = lb_emit_arith(p, Token_Sub, left, lower, it);
  892. lbValue bit = lb_emit_arith(p, Token_Shl, lb_const_int(p->module, it, 1), key, it);
  893. bit = lb_emit_conv(p, bit, it);
  894. lbValue old_value = lb_emit_transmute(p, right, it);
  895. lbValue new_value = lb_emit_arith(p, Token_And, old_value, bit, it);
  896. if (be->op.kind == Token_in) {
  897. return lb_emit_conv(p, lb_emit_comp(p, Token_NotEq, new_value, lb_const_int(p->module, new_value.type, 0)), t_bool);
  898. } else {
  899. return lb_emit_conv(p, lb_emit_comp(p, Token_CmpEq, new_value, lb_const_int(p->module, new_value.type, 0)), t_bool);
  900. }
  901. }
  902. break;
  903. default:
  904. GB_PANIC("Invalid 'in' type");
  905. }
  906. break;
  907. }
  908. break;
  909. default:
  910. GB_PANIC("Invalid binary expression");
  911. break;
  912. }
  913. return {};
  914. }
  915. lbValue lb_emit_conv(lbProcedure *p, lbValue value, Type *t) {
  916. lbModule *m = p->module;
  917. t = reduce_tuple_to_single_type(t);
  918. Type *src_type = value.type;
  919. if (are_types_identical(t, src_type)) {
  920. return value;
  921. }
  922. Type *src = core_type(src_type);
  923. Type *dst = core_type(t);
  924. GB_ASSERT(src != nullptr);
  925. GB_ASSERT(dst != nullptr);
  926. if (is_type_untyped_nil(src)) {
  927. return lb_const_nil(m, t);
  928. }
  929. if (is_type_untyped_undef(src)) {
  930. return lb_const_undef(m, t);
  931. }
  932. if (LLVMIsConstant(value.value)) {
  933. if (is_type_any(dst)) {
  934. Type *st = default_type(src_type);
  935. lbAddr default_value = lb_add_local_generated(p, st, false);
  936. lb_addr_store(p, default_value, value);
  937. lbValue data = lb_emit_conv(p, default_value.addr, t_rawptr);
  938. lbValue id = lb_typeid(m, st);
  939. lbAddr res = lb_add_local_generated(p, t, false);
  940. lbValue a0 = lb_emit_struct_ep(p, res.addr, 0);
  941. lbValue a1 = lb_emit_struct_ep(p, res.addr, 1);
  942. lb_emit_store(p, a0, data);
  943. lb_emit_store(p, a1, id);
  944. return lb_addr_load(p, res);
  945. } else if (dst->kind == Type_Basic) {
  946. if (src->Basic.kind == Basic_string && dst->Basic.kind == Basic_cstring) {
  947. String str = lb_get_const_string(m, value);
  948. lbValue res = {};
  949. res.type = t;
  950. res.value = llvm_cstring(m, str);
  951. return res;
  952. }
  953. // if (is_type_float(dst)) {
  954. // return value;
  955. // } else if (is_type_integer(dst)) {
  956. // return value;
  957. // }
  958. // ExactValue ev = value->Constant.value;
  959. // if (is_type_float(dst)) {
  960. // ev = exact_value_to_float(ev);
  961. // } else if (is_type_complex(dst)) {
  962. // ev = exact_value_to_complex(ev);
  963. // } else if (is_type_quaternion(dst)) {
  964. // ev = exact_value_to_quaternion(ev);
  965. // } else if (is_type_string(dst)) {
  966. // // Handled elsewhere
  967. // GB_ASSERT_MSG(ev.kind == ExactValue_String, "%d", ev.kind);
  968. // } else if (is_type_integer(dst)) {
  969. // ev = exact_value_to_integer(ev);
  970. // } else if (is_type_pointer(dst)) {
  971. // // IMPORTANT NOTE(bill): LLVM doesn't support pointer constants expect 'null'
  972. // lbValue i = lb_add_module_constant(p->module, t_uintptr, ev);
  973. // return lb_emit(p, lb_instr_conv(p, irConv_inttoptr, i, t_uintptr, dst));
  974. // }
  975. // return lb_const_value(p->module, t, ev);
  976. }
  977. }
  978. if (are_types_identical(src, dst)) {
  979. if (!are_types_identical(src_type, t)) {
  980. return lb_emit_transmute(p, value, t);
  981. }
  982. return value;
  983. }
  984. // bool <-> llvm bool
  985. if (is_type_boolean(src) && dst == t_llvm_bool) {
  986. lbValue res = {};
  987. res.value = LLVMBuildTrunc(p->builder, value.value, lb_type(m, dst), "");
  988. res.type = dst;
  989. return res;
  990. }
  991. if (src == t_llvm_bool && is_type_boolean(dst)) {
  992. lbValue res = {};
  993. res.value = LLVMBuildZExt(p->builder, value.value, lb_type(m, dst), "");
  994. res.type = dst;
  995. return res;
  996. }
  997. // integer -> integer
  998. if (is_type_integer(src) && is_type_integer(dst)) {
  999. GB_ASSERT(src->kind == Type_Basic &&
  1000. dst->kind == Type_Basic);
  1001. i64 sz = type_size_of(default_type(src));
  1002. i64 dz = type_size_of(default_type(dst));
  1003. if (sz == dz) {
  1004. if (dz > 1 && !types_have_same_internal_endian(src, dst)) {
  1005. return lb_emit_byte_swap(p, value, t);
  1006. }
  1007. lbValue res = {};
  1008. res.value = value.value;
  1009. res.type = t;
  1010. return res;
  1011. }
  1012. if (sz > 1 && is_type_different_to_arch_endianness(src)) {
  1013. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1014. value = lb_emit_byte_swap(p, value, platform_src_type);
  1015. }
  1016. LLVMOpcode op = LLVMTrunc;
  1017. if (dz < sz) {
  1018. op = LLVMTrunc;
  1019. } else if (dz == sz) {
  1020. // NOTE(bill): In LLVM, all integers are signed and rely upon 2's compliment
  1021. // NOTE(bill): Copy the value just for type correctness
  1022. op = LLVMBitCast;
  1023. } else if (dz > sz) {
  1024. op = is_type_unsigned(src) ? LLVMZExt : LLVMSExt; // zero extent
  1025. }
  1026. if (dz > 1 && is_type_different_to_arch_endianness(dst)) {
  1027. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1028. lbValue res = {};
  1029. res.value = LLVMBuildCast(p->builder, op, value.value, lb_type(m, platform_dst_type), "");
  1030. res.type = t;
  1031. return lb_emit_byte_swap(p, res, t);
  1032. } else {
  1033. lbValue res = {};
  1034. res.value = LLVMBuildCast(p->builder, op, value.value, lb_type(m, t), "");
  1035. res.type = t;
  1036. return res;
  1037. }
  1038. }
  1039. // boolean -> boolean/integer
  1040. if (is_type_boolean(src) && (is_type_boolean(dst) || is_type_integer(dst))) {
  1041. LLVMValueRef b = LLVMBuildICmp(p->builder, LLVMIntNE, value.value, LLVMConstNull(lb_type(m, value.type)), "");
  1042. lbValue res = {};
  1043. res.value = LLVMBuildIntCast2(p->builder, b, lb_type(m, t), false, "");
  1044. res.type = t;
  1045. return res;
  1046. }
  1047. if (is_type_cstring(src) && is_type_u8_ptr(dst)) {
  1048. return lb_emit_transmute(p, value, dst);
  1049. }
  1050. if (is_type_u8_ptr(src) && is_type_cstring(dst)) {
  1051. return lb_emit_transmute(p, value, dst);
  1052. }
  1053. if (is_type_cstring(src) && is_type_u8_multi_ptr(dst)) {
  1054. return lb_emit_transmute(p, value, dst);
  1055. }
  1056. if (is_type_u8_multi_ptr(src) && is_type_cstring(dst)) {
  1057. return lb_emit_transmute(p, value, dst);
  1058. }
  1059. if (is_type_cstring(src) && is_type_rawptr(dst)) {
  1060. return lb_emit_transmute(p, value, dst);
  1061. }
  1062. if (is_type_rawptr(src) && is_type_cstring(dst)) {
  1063. return lb_emit_transmute(p, value, dst);
  1064. }
  1065. if (are_types_identical(src, t_cstring) && are_types_identical(dst, t_string)) {
  1066. lbValue c = lb_emit_conv(p, value, t_cstring);
  1067. auto args = array_make<lbValue>(permanent_allocator(), 1);
  1068. args[0] = c;
  1069. lbValue s = lb_emit_runtime_call(p, "cstring_to_string", args);
  1070. return lb_emit_conv(p, s, dst);
  1071. }
  1072. // integer -> boolean
  1073. if (is_type_integer(src) && is_type_boolean(dst)) {
  1074. lbValue res = {};
  1075. res.value = LLVMBuildICmp(p->builder, LLVMIntNE, value.value, LLVMConstNull(lb_type(m, value.type)), "");
  1076. res.type = t_llvm_bool;
  1077. return lb_emit_conv(p, res, t);
  1078. }
  1079. // float -> float
  1080. if (is_type_float(src) && is_type_float(dst)) {
  1081. i64 sz = type_size_of(src);
  1082. i64 dz = type_size_of(dst);
  1083. if (dz == sz) {
  1084. if (types_have_same_internal_endian(src, dst)) {
  1085. lbValue res = {};
  1086. res.type = t;
  1087. res.value = value.value;
  1088. return res;
  1089. } else {
  1090. return lb_emit_byte_swap(p, value, t);
  1091. }
  1092. }
  1093. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  1094. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1095. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1096. lbValue res = {};
  1097. res = lb_emit_conv(p, value, platform_src_type);
  1098. res = lb_emit_conv(p, res, platform_dst_type);
  1099. if (is_type_different_to_arch_endianness(dst)) {
  1100. res = lb_emit_byte_swap(p, res, t);
  1101. }
  1102. return lb_emit_conv(p, res, t);
  1103. }
  1104. lbValue res = {};
  1105. res.type = t;
  1106. if (dz >= sz) {
  1107. res.value = LLVMBuildFPExt(p->builder, value.value, lb_type(m, t), "");
  1108. } else {
  1109. res.value = LLVMBuildFPTrunc(p->builder, value.value, lb_type(m, t), "");
  1110. }
  1111. return res;
  1112. }
  1113. if (is_type_complex(src) && is_type_complex(dst)) {
  1114. Type *ft = base_complex_elem_type(dst);
  1115. lbAddr gen = lb_add_local_generated(p, dst, false);
  1116. lbValue gp = lb_addr_get_ptr(p, gen);
  1117. lbValue real = lb_emit_conv(p, lb_emit_struct_ev(p, value, 0), ft);
  1118. lbValue imag = lb_emit_conv(p, lb_emit_struct_ev(p, value, 1), ft);
  1119. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), real);
  1120. lb_emit_store(p, lb_emit_struct_ep(p, gp, 1), imag);
  1121. return lb_addr_load(p, gen);
  1122. }
  1123. if (is_type_quaternion(src) && is_type_quaternion(dst)) {
  1124. // @QuaternionLayout
  1125. Type *ft = base_complex_elem_type(dst);
  1126. lbAddr gen = lb_add_local_generated(p, dst, false);
  1127. lbValue gp = lb_addr_get_ptr(p, gen);
  1128. lbValue q0 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 0), ft);
  1129. lbValue q1 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 1), ft);
  1130. lbValue q2 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 2), ft);
  1131. lbValue q3 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 3), ft);
  1132. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), q0);
  1133. lb_emit_store(p, lb_emit_struct_ep(p, gp, 1), q1);
  1134. lb_emit_store(p, lb_emit_struct_ep(p, gp, 2), q2);
  1135. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), q3);
  1136. return lb_addr_load(p, gen);
  1137. }
  1138. if (is_type_integer(src) && is_type_complex(dst)) {
  1139. Type *ft = base_complex_elem_type(dst);
  1140. lbAddr gen = lb_add_local_generated(p, dst, true);
  1141. lbValue gp = lb_addr_get_ptr(p, gen);
  1142. lbValue real = lb_emit_conv(p, value, ft);
  1143. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), real);
  1144. return lb_addr_load(p, gen);
  1145. }
  1146. if (is_type_float(src) && is_type_complex(dst)) {
  1147. Type *ft = base_complex_elem_type(dst);
  1148. lbAddr gen = lb_add_local_generated(p, dst, true);
  1149. lbValue gp = lb_addr_get_ptr(p, gen);
  1150. lbValue real = lb_emit_conv(p, value, ft);
  1151. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), real);
  1152. return lb_addr_load(p, gen);
  1153. }
  1154. if (is_type_integer(src) && is_type_quaternion(dst)) {
  1155. Type *ft = base_complex_elem_type(dst);
  1156. lbAddr gen = lb_add_local_generated(p, dst, true);
  1157. lbValue gp = lb_addr_get_ptr(p, gen);
  1158. lbValue real = lb_emit_conv(p, value, ft);
  1159. // @QuaternionLayout
  1160. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), real);
  1161. return lb_addr_load(p, gen);
  1162. }
  1163. if (is_type_float(src) && is_type_quaternion(dst)) {
  1164. Type *ft = base_complex_elem_type(dst);
  1165. lbAddr gen = lb_add_local_generated(p, dst, true);
  1166. lbValue gp = lb_addr_get_ptr(p, gen);
  1167. lbValue real = lb_emit_conv(p, value, ft);
  1168. // @QuaternionLayout
  1169. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), real);
  1170. return lb_addr_load(p, gen);
  1171. }
  1172. if (is_type_complex(src) && is_type_quaternion(dst)) {
  1173. Type *ft = base_complex_elem_type(dst);
  1174. lbAddr gen = lb_add_local_generated(p, dst, true);
  1175. lbValue gp = lb_addr_get_ptr(p, gen);
  1176. lbValue real = lb_emit_conv(p, lb_emit_struct_ev(p, value, 0), ft);
  1177. lbValue imag = lb_emit_conv(p, lb_emit_struct_ev(p, value, 1), ft);
  1178. // @QuaternionLayout
  1179. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), real);
  1180. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), imag);
  1181. return lb_addr_load(p, gen);
  1182. }
  1183. // float <-> integer
  1184. if (is_type_float(src) && is_type_integer(dst)) {
  1185. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  1186. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1187. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1188. lbValue res = {};
  1189. res = lb_emit_conv(p, value, platform_src_type);
  1190. res = lb_emit_conv(p, res, platform_dst_type);
  1191. if (is_type_different_to_arch_endianness(dst)) {
  1192. res = lb_emit_byte_swap(p, res, platform_dst_type);
  1193. }
  1194. return lb_emit_conv(p, res, t);
  1195. }
  1196. if (is_type_integer_128bit(dst)) {
  1197. auto args = array_make<lbValue>(temporary_allocator(), 1);
  1198. args[0] = value;
  1199. char const *call = "fixunsdfdi";
  1200. if (is_type_unsigned(dst)) {
  1201. call = "fixunsdfti";
  1202. }
  1203. lbValue res_i128 = lb_emit_runtime_call(p, call, args);
  1204. return lb_emit_conv(p, res_i128, t);
  1205. }
  1206. lbValue res = {};
  1207. res.type = t;
  1208. if (is_type_unsigned(dst)) {
  1209. res.value = LLVMBuildFPToUI(p->builder, value.value, lb_type(m, t), "");
  1210. } else {
  1211. res.value = LLVMBuildFPToSI(p->builder, value.value, lb_type(m, t), "");
  1212. }
  1213. return res;
  1214. }
  1215. if (is_type_integer(src) && is_type_float(dst)) {
  1216. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  1217. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1218. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1219. lbValue res = {};
  1220. res = lb_emit_conv(p, value, platform_src_type);
  1221. res = lb_emit_conv(p, res, platform_dst_type);
  1222. if (is_type_different_to_arch_endianness(dst)) {
  1223. res = lb_emit_byte_swap(p, res, t);
  1224. }
  1225. return lb_emit_conv(p, res, t);
  1226. }
  1227. if (is_type_integer_128bit(src)) {
  1228. auto args = array_make<lbValue>(temporary_allocator(), 1);
  1229. args[0] = value;
  1230. char const *call = "floattidf";
  1231. if (is_type_unsigned(src)) {
  1232. call = "floattidf_unsigned";
  1233. }
  1234. lbValue res_f64 = lb_emit_runtime_call(p, call, args);
  1235. return lb_emit_conv(p, res_f64, t);
  1236. }
  1237. lbValue res = {};
  1238. res.type = t;
  1239. if (is_type_unsigned(src)) {
  1240. res.value = LLVMBuildUIToFP(p->builder, value.value, lb_type(m, t), "");
  1241. } else {
  1242. res.value = LLVMBuildSIToFP(p->builder, value.value, lb_type(m, t), "");
  1243. }
  1244. return res;
  1245. }
  1246. // Pointer <-> uintptr
  1247. if (is_type_pointer(src) && is_type_uintptr(dst)) {
  1248. lbValue res = {};
  1249. res.type = t;
  1250. res.value = LLVMBuildPtrToInt(p->builder, value.value, lb_type(m, t), "");
  1251. return res;
  1252. }
  1253. if (is_type_uintptr(src) && is_type_pointer(dst)) {
  1254. lbValue res = {};
  1255. res.type = t;
  1256. res.value = LLVMBuildIntToPtr(p->builder, value.value, lb_type(m, t), "");
  1257. return res;
  1258. }
  1259. if (is_type_multi_pointer(src) && is_type_uintptr(dst)) {
  1260. lbValue res = {};
  1261. res.type = t;
  1262. res.value = LLVMBuildPtrToInt(p->builder, value.value, lb_type(m, t), "");
  1263. return res;
  1264. }
  1265. if (is_type_uintptr(src) && is_type_multi_pointer(dst)) {
  1266. lbValue res = {};
  1267. res.type = t;
  1268. res.value = LLVMBuildIntToPtr(p->builder, value.value, lb_type(m, t), "");
  1269. return res;
  1270. }
  1271. #if 1
  1272. if (is_type_union(dst)) {
  1273. for_array(i, dst->Union.variants) {
  1274. Type *vt = dst->Union.variants[i];
  1275. if (are_types_identical(vt, src_type)) {
  1276. lbAddr parent = lb_add_local_generated(p, t, true);
  1277. lb_emit_store_union_variant(p, parent.addr, value, vt);
  1278. return lb_addr_load(p, parent);
  1279. }
  1280. }
  1281. }
  1282. #endif
  1283. // NOTE(bill): This has to be done before 'Pointer <-> Pointer' as it's
  1284. // subtype polymorphism casting
  1285. if (check_is_assignable_to_using_subtype(src_type, t)) {
  1286. Type *st = type_deref(src_type);
  1287. st = type_deref(st);
  1288. bool st_is_ptr = is_type_pointer(src_type);
  1289. st = base_type(st);
  1290. Type *dt = t;
  1291. GB_ASSERT(is_type_struct(st) || is_type_raw_union(st));
  1292. String field_name = lookup_subtype_polymorphic_field(t, src_type);
  1293. if (field_name.len > 0) {
  1294. // NOTE(bill): It can be casted
  1295. Selection sel = lookup_field(st, field_name, false, true);
  1296. if (sel.entity != nullptr) {
  1297. if (st_is_ptr) {
  1298. lbValue res = lb_emit_deep_field_gep(p, value, sel);
  1299. Type *rt = res.type;
  1300. if (!are_types_identical(rt, dt) && are_types_identical(type_deref(rt), dt)) {
  1301. res = lb_emit_load(p, res);
  1302. }
  1303. return res;
  1304. } else {
  1305. if (is_type_pointer(value.type)) {
  1306. Type *rt = value.type;
  1307. if (!are_types_identical(rt, dt) && are_types_identical(type_deref(rt), dt)) {
  1308. value = lb_emit_load(p, value);
  1309. } else {
  1310. value = lb_emit_deep_field_gep(p, value, sel);
  1311. return lb_emit_load(p, value);
  1312. }
  1313. }
  1314. return lb_emit_deep_field_ev(p, value, sel);
  1315. }
  1316. } else {
  1317. GB_PANIC("invalid subtype cast %s.%.*s", type_to_string(src_type), LIT(field_name));
  1318. }
  1319. }
  1320. }
  1321. // Pointer <-> Pointer
  1322. if (is_type_pointer(src) && is_type_pointer(dst)) {
  1323. lbValue res = {};
  1324. res.type = t;
  1325. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1326. return res;
  1327. }
  1328. if (is_type_multi_pointer(src) && is_type_pointer(dst)) {
  1329. lbValue res = {};
  1330. res.type = t;
  1331. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1332. return res;
  1333. }
  1334. if (is_type_pointer(src) && is_type_multi_pointer(dst)) {
  1335. lbValue res = {};
  1336. res.type = t;
  1337. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1338. return res;
  1339. }
  1340. if (is_type_multi_pointer(src) && is_type_multi_pointer(dst)) {
  1341. lbValue res = {};
  1342. res.type = t;
  1343. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1344. return res;
  1345. }
  1346. // proc <-> proc
  1347. if (is_type_proc(src) && is_type_proc(dst)) {
  1348. lbValue res = {};
  1349. res.type = t;
  1350. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1351. return res;
  1352. }
  1353. // pointer -> proc
  1354. if (is_type_pointer(src) && is_type_proc(dst)) {
  1355. lbValue res = {};
  1356. res.type = t;
  1357. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1358. return res;
  1359. }
  1360. // proc -> pointer
  1361. if (is_type_proc(src) && is_type_pointer(dst)) {
  1362. lbValue res = {};
  1363. res.type = t;
  1364. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1365. return res;
  1366. }
  1367. // []byte/[]u8 <-> string
  1368. if (is_type_u8_slice(src) && is_type_string(dst)) {
  1369. return lb_emit_transmute(p, value, t);
  1370. }
  1371. if (is_type_string(src) && is_type_u8_slice(dst)) {
  1372. return lb_emit_transmute(p, value, t);
  1373. }
  1374. if (is_type_array_like(dst)) {
  1375. Type *elem = base_array_type(dst);
  1376. lbValue e = lb_emit_conv(p, value, elem);
  1377. // NOTE(bill): Doesn't need to be zero because it will be initialized in the loops
  1378. lbAddr v = lb_add_local_generated(p, t, false);
  1379. isize index_count = cast(isize)get_array_type_count(dst);
  1380. for (isize i = 0; i < index_count; i++) {
  1381. lbValue elem = lb_emit_array_epi(p, v.addr, i);
  1382. lb_emit_store(p, elem, e);
  1383. }
  1384. return lb_addr_load(p, v);
  1385. }
  1386. if (is_type_matrix(dst) && !is_type_matrix(src)) {
  1387. GB_ASSERT_MSG(dst->Matrix.row_count == dst->Matrix.column_count, "%s <- %s", type_to_string(dst), type_to_string(src));
  1388. Type *elem = base_array_type(dst);
  1389. lbValue e = lb_emit_conv(p, value, elem);
  1390. lbAddr v = lb_add_local_generated(p, t, false);
  1391. for (i64 i = 0; i < dst->Matrix.row_count; i++) {
  1392. isize j = cast(isize)i;
  1393. lbValue ptr = lb_emit_matrix_epi(p, v.addr, j, j);
  1394. lb_emit_store(p, ptr, e);
  1395. }
  1396. return lb_addr_load(p, v);
  1397. }
  1398. if (is_type_any(dst)) {
  1399. if (is_type_untyped_nil(src)) {
  1400. return lb_const_nil(p->module, t);
  1401. }
  1402. if (is_type_untyped_undef(src)) {
  1403. return lb_const_undef(p->module, t);
  1404. }
  1405. lbAddr result = lb_add_local_generated(p, t, true);
  1406. Type *st = default_type(src_type);
  1407. lbValue data = lb_address_from_load_or_generate_local(p, value);
  1408. GB_ASSERT_MSG(is_type_pointer(data.type), "%s", type_to_string(data.type));
  1409. GB_ASSERT_MSG(is_type_typed(st), "%s", type_to_string(st));
  1410. data = lb_emit_conv(p, data, t_rawptr);
  1411. lbValue id = lb_typeid(p->module, st);
  1412. lbValue any_data = lb_emit_struct_ep(p, result.addr, 0);
  1413. lbValue any_id = lb_emit_struct_ep(p, result.addr, 1);
  1414. lb_emit_store(p, any_data, data);
  1415. lb_emit_store(p, any_id, id);
  1416. return lb_addr_load(p, result);
  1417. }
  1418. i64 src_sz = type_size_of(src);
  1419. i64 dst_sz = type_size_of(dst);
  1420. if (src_sz == dst_sz) {
  1421. // bit_set <-> integer
  1422. if (is_type_integer(src) && is_type_bit_set(dst)) {
  1423. lbValue res = lb_emit_conv(p, value, bit_set_to_int(dst));
  1424. res.type = dst;
  1425. return res;
  1426. }
  1427. if (is_type_bit_set(src) && is_type_integer(dst)) {
  1428. lbValue bs = value;
  1429. bs.type = bit_set_to_int(src);
  1430. return lb_emit_conv(p, bs, dst);
  1431. }
  1432. // typeid <-> integer
  1433. if (is_type_integer(src) && is_type_typeid(dst)) {
  1434. return lb_emit_transmute(p, value, dst);
  1435. }
  1436. if (is_type_typeid(src) && is_type_integer(dst)) {
  1437. return lb_emit_transmute(p, value, dst);
  1438. }
  1439. }
  1440. if (is_type_untyped(src)) {
  1441. if (is_type_string(src) && is_type_string(dst)) {
  1442. lbAddr result = lb_add_local_generated(p, t, false);
  1443. lb_addr_store(p, result, value);
  1444. return lb_addr_load(p, result);
  1445. }
  1446. }
  1447. gb_printf_err("%.*s\n", LIT(p->name));
  1448. gb_printf_err("lb_emit_conv: src -> dst\n");
  1449. gb_printf_err("Not Identical %s != %s\n", type_to_string(src_type), type_to_string(t));
  1450. gb_printf_err("Not Identical %s != %s\n", type_to_string(src), type_to_string(dst));
  1451. gb_printf_err("Not Identical %p != %p\n", src_type, t);
  1452. gb_printf_err("Not Identical %p != %p\n", src, dst);
  1453. GB_PANIC("Invalid type conversion: '%s' to '%s' for procedure '%.*s'",
  1454. type_to_string(src_type), type_to_string(t),
  1455. LIT(p->name));
  1456. return {};
  1457. }
  1458. lbValue lb_compare_records(lbProcedure *p, TokenKind op_kind, lbValue left, lbValue right, Type *type) {
  1459. GB_ASSERT((is_type_struct(type) || is_type_union(type)) && is_type_comparable(type));
  1460. lbValue left_ptr = lb_address_from_load_or_generate_local(p, left);
  1461. lbValue right_ptr = lb_address_from_load_or_generate_local(p, right);
  1462. lbValue res = {};
  1463. if (is_type_simple_compare(type)) {
  1464. // TODO(bill): Test to see if this is actually faster!!!!
  1465. auto args = array_make<lbValue>(permanent_allocator(), 3);
  1466. args[0] = lb_emit_conv(p, left_ptr, t_rawptr);
  1467. args[1] = lb_emit_conv(p, right_ptr, t_rawptr);
  1468. args[2] = lb_const_int(p->module, t_int, type_size_of(type));
  1469. res = lb_emit_runtime_call(p, "memory_equal", args);
  1470. } else {
  1471. lbValue value = lb_get_equal_proc_for_type(p->module, type);
  1472. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1473. args[0] = lb_emit_conv(p, left_ptr, t_rawptr);
  1474. args[1] = lb_emit_conv(p, right_ptr, t_rawptr);
  1475. res = lb_emit_call(p, value, args);
  1476. }
  1477. if (op_kind == Token_NotEq) {
  1478. res = lb_emit_unary_arith(p, Token_Not, res, res.type);
  1479. }
  1480. return res;
  1481. }
  1482. lbValue lb_emit_comp(lbProcedure *p, TokenKind op_kind, lbValue left, lbValue right) {
  1483. Type *a = core_type(left.type);
  1484. Type *b = core_type(right.type);
  1485. GB_ASSERT(gb_is_between(op_kind, Token__ComparisonBegin+1, Token__ComparisonEnd-1));
  1486. lbValue nil_check = {};
  1487. if (is_type_untyped_nil(left.type)) {
  1488. nil_check = lb_emit_comp_against_nil(p, op_kind, right);
  1489. } else if (is_type_untyped_nil(right.type)) {
  1490. nil_check = lb_emit_comp_against_nil(p, op_kind, left);
  1491. }
  1492. if (nil_check.value != nullptr) {
  1493. return nil_check;
  1494. }
  1495. if (are_types_identical(a, b)) {
  1496. // NOTE(bill): No need for a conversion
  1497. } else if (lb_is_const(left) || lb_is_const_nil(left)) {
  1498. left = lb_emit_conv(p, left, right.type);
  1499. } else if (lb_is_const(right) || lb_is_const_nil(right)) {
  1500. right = lb_emit_conv(p, right, left.type);
  1501. } else {
  1502. Type *lt = left.type;
  1503. Type *rt = right.type;
  1504. lt = left.type;
  1505. rt = right.type;
  1506. i64 ls = type_size_of(lt);
  1507. i64 rs = type_size_of(rt);
  1508. // NOTE(bill): Quick heuristic, larger types are usually the target type
  1509. if (ls < rs) {
  1510. left = lb_emit_conv(p, left, rt);
  1511. } else if (ls > rs) {
  1512. right = lb_emit_conv(p, right, lt);
  1513. } else {
  1514. if (is_type_union(rt)) {
  1515. left = lb_emit_conv(p, left, rt);
  1516. } else {
  1517. right = lb_emit_conv(p, right, lt);
  1518. }
  1519. }
  1520. }
  1521. if (is_type_array(a) || is_type_enumerated_array(a)) {
  1522. Type *tl = base_type(a);
  1523. lbValue lhs = lb_address_from_load_or_generate_local(p, left);
  1524. lbValue rhs = lb_address_from_load_or_generate_local(p, right);
  1525. TokenKind cmp_op = Token_And;
  1526. lbValue res = lb_const_bool(p->module, t_llvm_bool, true);
  1527. if (op_kind == Token_NotEq) {
  1528. res = lb_const_bool(p->module, t_llvm_bool, false);
  1529. cmp_op = Token_Or;
  1530. } else if (op_kind == Token_CmpEq) {
  1531. res = lb_const_bool(p->module, t_llvm_bool, true);
  1532. cmp_op = Token_And;
  1533. }
  1534. bool inline_array_arith = type_size_of(tl) <= build_context.max_align;
  1535. i32 count = 0;
  1536. switch (tl->kind) {
  1537. case Type_Array: count = cast(i32)tl->Array.count; break;
  1538. case Type_EnumeratedArray: count = cast(i32)tl->EnumeratedArray.count; break;
  1539. }
  1540. if (inline_array_arith) {
  1541. // inline
  1542. lbAddr val = lb_add_local_generated(p, t_bool, false);
  1543. lb_addr_store(p, val, res);
  1544. for (i32 i = 0; i < count; i++) {
  1545. lbValue x = lb_emit_load(p, lb_emit_array_epi(p, lhs, i));
  1546. lbValue y = lb_emit_load(p, lb_emit_array_epi(p, rhs, i));
  1547. lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  1548. lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  1549. lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  1550. }
  1551. return lb_addr_load(p, val);
  1552. } else {
  1553. if (is_type_simple_compare(tl) && (op_kind == Token_CmpEq || op_kind == Token_NotEq)) {
  1554. // TODO(bill): Test to see if this is actually faster!!!!
  1555. auto args = array_make<lbValue>(permanent_allocator(), 3);
  1556. args[0] = lb_emit_conv(p, lhs, t_rawptr);
  1557. args[1] = lb_emit_conv(p, rhs, t_rawptr);
  1558. args[2] = lb_const_int(p->module, t_int, type_size_of(tl));
  1559. lbValue val = lb_emit_runtime_call(p, "memory_compare", args);
  1560. lbValue res = lb_emit_comp(p, op_kind, val, lb_const_nil(p->module, val.type));
  1561. return lb_emit_conv(p, res, t_bool);
  1562. } else {
  1563. lbAddr val = lb_add_local_generated(p, t_bool, false);
  1564. lb_addr_store(p, val, res);
  1565. auto loop_data = lb_loop_start(p, count, t_i32);
  1566. {
  1567. lbValue i = loop_data.idx;
  1568. lbValue x = lb_emit_load(p, lb_emit_array_ep(p, lhs, i));
  1569. lbValue y = lb_emit_load(p, lb_emit_array_ep(p, rhs, i));
  1570. lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  1571. lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  1572. lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  1573. }
  1574. lb_loop_end(p, loop_data);
  1575. return lb_addr_load(p, val);
  1576. }
  1577. }
  1578. }
  1579. if ((is_type_struct(a) || is_type_union(a)) && is_type_comparable(a)) {
  1580. return lb_compare_records(p, op_kind, left, right, a);
  1581. }
  1582. if ((is_type_struct(b) || is_type_union(b)) && is_type_comparable(b)) {
  1583. return lb_compare_records(p, op_kind, left, right, b);
  1584. }
  1585. if (is_type_string(a)) {
  1586. if (is_type_cstring(a)) {
  1587. left = lb_emit_conv(p, left, t_string);
  1588. right = lb_emit_conv(p, right, t_string);
  1589. }
  1590. char const *runtime_procedure = nullptr;
  1591. switch (op_kind) {
  1592. case Token_CmpEq: runtime_procedure = "string_eq"; break;
  1593. case Token_NotEq: runtime_procedure = "string_ne"; break;
  1594. case Token_Lt: runtime_procedure = "string_lt"; break;
  1595. case Token_Gt: runtime_procedure = "string_gt"; break;
  1596. case Token_LtEq: runtime_procedure = "string_le"; break;
  1597. case Token_GtEq: runtime_procedure = "string_gt"; break;
  1598. }
  1599. GB_ASSERT(runtime_procedure != nullptr);
  1600. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1601. args[0] = left;
  1602. args[1] = right;
  1603. return lb_emit_runtime_call(p, runtime_procedure, args);
  1604. }
  1605. if (is_type_complex(a)) {
  1606. char const *runtime_procedure = "";
  1607. i64 sz = 8*type_size_of(a);
  1608. switch (sz) {
  1609. case 32:
  1610. switch (op_kind) {
  1611. case Token_CmpEq: runtime_procedure = "complex32_eq"; break;
  1612. case Token_NotEq: runtime_procedure = "complex32_ne"; break;
  1613. }
  1614. break;
  1615. case 64:
  1616. switch (op_kind) {
  1617. case Token_CmpEq: runtime_procedure = "complex64_eq"; break;
  1618. case Token_NotEq: runtime_procedure = "complex64_ne"; break;
  1619. }
  1620. break;
  1621. case 128:
  1622. switch (op_kind) {
  1623. case Token_CmpEq: runtime_procedure = "complex128_eq"; break;
  1624. case Token_NotEq: runtime_procedure = "complex128_ne"; break;
  1625. }
  1626. break;
  1627. }
  1628. GB_ASSERT(runtime_procedure != nullptr);
  1629. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1630. args[0] = left;
  1631. args[1] = right;
  1632. return lb_emit_runtime_call(p, runtime_procedure, args);
  1633. }
  1634. if (is_type_quaternion(a)) {
  1635. char const *runtime_procedure = "";
  1636. i64 sz = 8*type_size_of(a);
  1637. switch (sz) {
  1638. case 64:
  1639. switch (op_kind) {
  1640. case Token_CmpEq: runtime_procedure = "quaternion64_eq"; break;
  1641. case Token_NotEq: runtime_procedure = "quaternion64_ne"; break;
  1642. }
  1643. break;
  1644. case 128:
  1645. switch (op_kind) {
  1646. case Token_CmpEq: runtime_procedure = "quaternion128_eq"; break;
  1647. case Token_NotEq: runtime_procedure = "quaternion128_ne"; break;
  1648. }
  1649. break;
  1650. case 256:
  1651. switch (op_kind) {
  1652. case Token_CmpEq: runtime_procedure = "quaternion256_eq"; break;
  1653. case Token_NotEq: runtime_procedure = "quaternion256_ne"; break;
  1654. }
  1655. break;
  1656. }
  1657. GB_ASSERT(runtime_procedure != nullptr);
  1658. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1659. args[0] = left;
  1660. args[1] = right;
  1661. return lb_emit_runtime_call(p, runtime_procedure, args);
  1662. }
  1663. if (is_type_bit_set(a)) {
  1664. switch (op_kind) {
  1665. case Token_Lt:
  1666. case Token_LtEq:
  1667. case Token_Gt:
  1668. case Token_GtEq:
  1669. {
  1670. Type *it = bit_set_to_int(a);
  1671. lbValue lhs = lb_emit_transmute(p, left, it);
  1672. lbValue rhs = lb_emit_transmute(p, right, it);
  1673. lbValue res = lb_emit_arith(p, Token_And, lhs, rhs, it);
  1674. if (op_kind == Token_Lt || op_kind == Token_LtEq) {
  1675. // (lhs & rhs) == lhs
  1676. res.value = LLVMBuildICmp(p->builder, LLVMIntEQ, res.value, lhs.value, "");
  1677. res.type = t_llvm_bool;
  1678. } else if (op_kind == Token_Gt || op_kind == Token_GtEq) {
  1679. // (lhs & rhs) == rhs
  1680. res.value = LLVMBuildICmp(p->builder, LLVMIntEQ, res.value, rhs.value, "");
  1681. res.type = t_llvm_bool;
  1682. }
  1683. // NOTE(bill): Strict subsets
  1684. if (op_kind == Token_Lt || op_kind == Token_Gt) {
  1685. // res &~ (lhs == rhs)
  1686. lbValue eq = {};
  1687. eq.value = LLVMBuildICmp(p->builder, LLVMIntEQ, lhs.value, rhs.value, "");
  1688. eq.type = t_llvm_bool;
  1689. res = lb_emit_arith(p, Token_AndNot, res, eq, t_llvm_bool);
  1690. }
  1691. return res;
  1692. }
  1693. case Token_CmpEq:
  1694. case Token_NotEq:
  1695. {
  1696. LLVMIntPredicate pred = {};
  1697. switch (op_kind) {
  1698. case Token_CmpEq: pred = LLVMIntEQ; break;
  1699. case Token_NotEq: pred = LLVMIntNE; break;
  1700. }
  1701. lbValue res = {};
  1702. res.type = t_llvm_bool;
  1703. res.value = LLVMBuildICmp(p->builder, pred, left.value, right.value, "");
  1704. return res;
  1705. }
  1706. }
  1707. }
  1708. if (op_kind != Token_CmpEq && op_kind != Token_NotEq) {
  1709. Type *t = left.type;
  1710. if (is_type_integer(t) && is_type_different_to_arch_endianness(t)) {
  1711. Type *platform_type = integer_endian_type_to_platform_type(t);
  1712. lbValue x = lb_emit_byte_swap(p, left, platform_type);
  1713. lbValue y = lb_emit_byte_swap(p, right, platform_type);
  1714. left = x;
  1715. right = y;
  1716. } else if (is_type_float(t) && is_type_different_to_arch_endianness(t)) {
  1717. Type *platform_type = integer_endian_type_to_platform_type(t);
  1718. lbValue x = lb_emit_conv(p, left, platform_type);
  1719. lbValue y = lb_emit_conv(p, right, platform_type);
  1720. left = x;
  1721. right = y;
  1722. }
  1723. }
  1724. a = core_type(left.type);
  1725. b = core_type(right.type);
  1726. lbValue res = {};
  1727. res.type = t_llvm_bool;
  1728. if (is_type_integer(a) ||
  1729. is_type_boolean(a) ||
  1730. is_type_pointer(a) ||
  1731. is_type_multi_pointer(a) ||
  1732. is_type_proc(a) ||
  1733. is_type_enum(a)) {
  1734. LLVMIntPredicate pred = {};
  1735. if (is_type_unsigned(left.type)) {
  1736. switch (op_kind) {
  1737. case Token_Gt: pred = LLVMIntUGT; break;
  1738. case Token_GtEq: pred = LLVMIntUGE; break;
  1739. case Token_Lt: pred = LLVMIntULT; break;
  1740. case Token_LtEq: pred = LLVMIntULE; break;
  1741. }
  1742. } else {
  1743. switch (op_kind) {
  1744. case Token_Gt: pred = LLVMIntSGT; break;
  1745. case Token_GtEq: pred = LLVMIntSGE; break;
  1746. case Token_Lt: pred = LLVMIntSLT; break;
  1747. case Token_LtEq: pred = LLVMIntSLE; break;
  1748. }
  1749. }
  1750. switch (op_kind) {
  1751. case Token_CmpEq: pred = LLVMIntEQ; break;
  1752. case Token_NotEq: pred = LLVMIntNE; break;
  1753. }
  1754. LLVMValueRef lhs = left.value;
  1755. LLVMValueRef rhs = right.value;
  1756. if (LLVMTypeOf(lhs) != LLVMTypeOf(rhs)) {
  1757. if (lb_is_type_kind(LLVMTypeOf(lhs), LLVMPointerTypeKind)) {
  1758. rhs = LLVMBuildPointerCast(p->builder, rhs, LLVMTypeOf(lhs), "");
  1759. }
  1760. }
  1761. res.value = LLVMBuildICmp(p->builder, pred, lhs, rhs, "");
  1762. } else if (is_type_float(a)) {
  1763. LLVMRealPredicate pred = {};
  1764. switch (op_kind) {
  1765. case Token_CmpEq: pred = LLVMRealOEQ; break;
  1766. case Token_Gt: pred = LLVMRealOGT; break;
  1767. case Token_GtEq: pred = LLVMRealOGE; break;
  1768. case Token_Lt: pred = LLVMRealOLT; break;
  1769. case Token_LtEq: pred = LLVMRealOLE; break;
  1770. case Token_NotEq: pred = LLVMRealONE; break;
  1771. }
  1772. res.value = LLVMBuildFCmp(p->builder, pred, left.value, right.value, "");
  1773. } else if (is_type_typeid(a)) {
  1774. LLVMIntPredicate pred = {};
  1775. switch (op_kind) {
  1776. case Token_Gt: pred = LLVMIntUGT; break;
  1777. case Token_GtEq: pred = LLVMIntUGE; break;
  1778. case Token_Lt: pred = LLVMIntULT; break;
  1779. case Token_LtEq: pred = LLVMIntULE; break;
  1780. case Token_CmpEq: pred = LLVMIntEQ; break;
  1781. case Token_NotEq: pred = LLVMIntNE; break;
  1782. }
  1783. res.value = LLVMBuildICmp(p->builder, pred, left.value, right.value, "");
  1784. } else {
  1785. GB_PANIC("Unhandled comparison kind %s (%s) %.*s %s (%s)", type_to_string(left.type), type_to_string(base_type(left.type)), LIT(token_strings[op_kind]), type_to_string(right.type), type_to_string(base_type(right.type)));
  1786. }
  1787. return res;
  1788. }
  1789. lbValue lb_emit_comp_against_nil(lbProcedure *p, TokenKind op_kind, lbValue x) {
  1790. lbValue res = {};
  1791. res.type = t_llvm_bool;
  1792. Type *t = x.type;
  1793. Type *bt = base_type(t);
  1794. TypeKind type_kind = bt->kind;
  1795. switch (type_kind) {
  1796. case Type_Basic:
  1797. switch (bt->Basic.kind) {
  1798. case Basic_rawptr:
  1799. case Basic_cstring:
  1800. if (op_kind == Token_CmpEq) {
  1801. res.value = LLVMBuildIsNull(p->builder, x.value, "");
  1802. } else if (op_kind == Token_NotEq) {
  1803. res.value = LLVMBuildIsNotNull(p->builder, x.value, "");
  1804. }
  1805. return res;
  1806. case Basic_any:
  1807. {
  1808. // TODO(bill): is this correct behaviour for nil comparison for any?
  1809. lbValue data = lb_emit_struct_ev(p, x, 0);
  1810. lbValue ti = lb_emit_struct_ev(p, x, 1);
  1811. if (op_kind == Token_CmpEq) {
  1812. LLVMValueRef a = LLVMBuildIsNull(p->builder, data.value, "");
  1813. LLVMValueRef b = LLVMBuildIsNull(p->builder, ti.value, "");
  1814. res.value = LLVMBuildOr(p->builder, a, b, "");
  1815. return res;
  1816. } else if (op_kind == Token_NotEq) {
  1817. LLVMValueRef a = LLVMBuildIsNotNull(p->builder, data.value, "");
  1818. LLVMValueRef b = LLVMBuildIsNotNull(p->builder, ti.value, "");
  1819. res.value = LLVMBuildAnd(p->builder, a, b, "");
  1820. return res;
  1821. }
  1822. }
  1823. break;
  1824. case Basic_typeid:
  1825. lbValue invalid_typeid = lb_const_value(p->module, t_typeid, exact_value_i64(0));
  1826. return lb_emit_comp(p, op_kind, x, invalid_typeid);
  1827. }
  1828. break;
  1829. case Type_Enum:
  1830. case Type_Pointer:
  1831. case Type_MultiPointer:
  1832. case Type_Proc:
  1833. case Type_BitSet:
  1834. if (op_kind == Token_CmpEq) {
  1835. res.value = LLVMBuildIsNull(p->builder, x.value, "");
  1836. } else if (op_kind == Token_NotEq) {
  1837. res.value = LLVMBuildIsNotNull(p->builder, x.value, "");
  1838. }
  1839. return res;
  1840. case Type_Slice:
  1841. {
  1842. lbValue data = lb_emit_struct_ev(p, x, 0);
  1843. if (op_kind == Token_CmpEq) {
  1844. res.value = LLVMBuildIsNull(p->builder, data.value, "");
  1845. return res;
  1846. } else if (op_kind == Token_NotEq) {
  1847. res.value = LLVMBuildIsNotNull(p->builder, data.value, "");
  1848. return res;
  1849. }
  1850. }
  1851. break;
  1852. case Type_DynamicArray:
  1853. {
  1854. lbValue data = lb_emit_struct_ev(p, x, 0);
  1855. if (op_kind == Token_CmpEq) {
  1856. res.value = LLVMBuildIsNull(p->builder, data.value, "");
  1857. return res;
  1858. } else if (op_kind == Token_NotEq) {
  1859. res.value = LLVMBuildIsNotNull(p->builder, data.value, "");
  1860. return res;
  1861. }
  1862. }
  1863. break;
  1864. case Type_Map:
  1865. {
  1866. lbValue map_ptr = lb_address_from_load_or_generate_local(p, x);
  1867. unsigned indices[2] = {0, 0};
  1868. lbValue hashes_data = lb_emit_struct_ep(p, map_ptr, 0);
  1869. lbValue hashes_data_ptr_ptr = lb_emit_struct_ep(p, hashes_data, 0);
  1870. LLVMValueRef hashes_data_ptr = LLVMBuildLoad(p->builder, hashes_data_ptr_ptr.value, "");
  1871. if (op_kind == Token_CmpEq) {
  1872. res.value = LLVMBuildIsNull(p->builder, hashes_data_ptr, "");
  1873. return res;
  1874. } else {
  1875. res.value = LLVMBuildIsNotNull(p->builder, hashes_data_ptr, "");
  1876. return res;
  1877. }
  1878. }
  1879. break;
  1880. case Type_Union:
  1881. {
  1882. if (type_size_of(t) == 0) {
  1883. if (op_kind == Token_CmpEq) {
  1884. return lb_const_bool(p->module, t_llvm_bool, true);
  1885. } else if (op_kind == Token_NotEq) {
  1886. return lb_const_bool(p->module, t_llvm_bool, false);
  1887. }
  1888. } else if (is_type_union_maybe_pointer(t)) {
  1889. lbValue tag = lb_emit_transmute(p, x, t_rawptr);
  1890. return lb_emit_comp_against_nil(p, op_kind, tag);
  1891. } else {
  1892. lbValue tag = lb_emit_union_tag_value(p, x);
  1893. return lb_emit_comp(p, op_kind, tag, lb_zero(p->module, tag.type));
  1894. }
  1895. }
  1896. case Type_Struct:
  1897. if (is_type_soa_struct(t)) {
  1898. Type *bt = base_type(t);
  1899. if (bt->Struct.soa_kind == StructSoa_Slice) {
  1900. LLVMValueRef the_value = {};
  1901. if (bt->Struct.fields.count == 0) {
  1902. lbValue len = lb_soa_struct_len(p, x);
  1903. the_value = len.value;
  1904. } else {
  1905. lbValue first_field = lb_emit_struct_ev(p, x, 0);
  1906. the_value = first_field.value;
  1907. }
  1908. if (op_kind == Token_CmpEq) {
  1909. res.value = LLVMBuildIsNull(p->builder, the_value, "");
  1910. return res;
  1911. } else if (op_kind == Token_NotEq) {
  1912. res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  1913. return res;
  1914. }
  1915. } else if (bt->Struct.soa_kind == StructSoa_Dynamic) {
  1916. LLVMValueRef the_value = {};
  1917. if (bt->Struct.fields.count == 0) {
  1918. lbValue cap = lb_soa_struct_cap(p, x);
  1919. the_value = cap.value;
  1920. } else {
  1921. lbValue first_field = lb_emit_struct_ev(p, x, 0);
  1922. the_value = first_field.value;
  1923. }
  1924. if (op_kind == Token_CmpEq) {
  1925. res.value = LLVMBuildIsNull(p->builder, the_value, "");
  1926. return res;
  1927. } else if (op_kind == Token_NotEq) {
  1928. res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  1929. return res;
  1930. }
  1931. }
  1932. } else if (is_type_struct(t) && type_has_nil(t)) {
  1933. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1934. lbValue lhs = lb_address_from_load_or_generate_local(p, x);
  1935. args[0] = lb_emit_conv(p, lhs, t_rawptr);
  1936. args[1] = lb_const_int(p->module, t_int, type_size_of(t));
  1937. lbValue val = lb_emit_runtime_call(p, "memory_compare_zero", args);
  1938. lbValue res = lb_emit_comp(p, op_kind, val, lb_const_int(p->module, t_int, 0));
  1939. return res;
  1940. }
  1941. break;
  1942. }
  1943. GB_PANIC("Unknown handled type: %s -> %s", type_to_string(t), type_to_string(bt));
  1944. return {};
  1945. }
  1946. lbValue lb_build_unary_and(lbProcedure *p, Ast *expr) {
  1947. ast_node(ue, UnaryExpr, expr);
  1948. auto tv = type_and_value_of_expr(expr);
  1949. Ast *ue_expr = unparen_expr(ue->expr);
  1950. if (ue_expr->kind == Ast_IndexExpr && tv.mode == Addressing_OptionalOkPtr && is_type_tuple(tv.type)) {
  1951. Type *tuple = tv.type;
  1952. Type *map_type = type_of_expr(ue_expr->IndexExpr.expr);
  1953. Type *ot = base_type(map_type);
  1954. Type *t = base_type(type_deref(ot));
  1955. bool deref = t != ot;
  1956. GB_ASSERT(t->kind == Type_Map);
  1957. ast_node(ie, IndexExpr, ue_expr);
  1958. lbValue map_val = lb_build_addr_ptr(p, ie->expr);
  1959. if (deref) {
  1960. map_val = lb_emit_load(p, map_val);
  1961. }
  1962. lbValue key = lb_build_expr(p, ie->index);
  1963. key = lb_emit_conv(p, key, t->Map.key);
  1964. lbAddr addr = lb_addr_map(map_val, key, t, alloc_type_pointer(t->Map.value));
  1965. lbValue ptr = lb_addr_get_ptr(p, addr);
  1966. lbValue ok = lb_emit_comp_against_nil(p, Token_NotEq, ptr);
  1967. ok = lb_emit_conv(p, ok, tuple->Tuple.variables[1]->type);
  1968. lbAddr res = lb_add_local_generated(p, tuple, false);
  1969. lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  1970. lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  1971. lb_emit_store(p, gep0, ptr);
  1972. lb_emit_store(p, gep1, ok);
  1973. return lb_addr_load(p, res);
  1974. } if (ue_expr->kind == Ast_CompoundLit) {
  1975. lbValue v = lb_build_expr(p, ue->expr);
  1976. Type *type = v.type;
  1977. lbAddr addr = {};
  1978. if (p->is_startup) {
  1979. addr = lb_add_global_generated(p->module, type, v);
  1980. } else {
  1981. addr = lb_add_local_generated(p, type, false);
  1982. }
  1983. lb_addr_store(p, addr, v);
  1984. return addr.addr;
  1985. } else if (ue_expr->kind == Ast_TypeAssertion) {
  1986. if (is_type_tuple(tv.type)) {
  1987. Type *tuple = tv.type;
  1988. Type *ptr_type = tuple->Tuple.variables[0]->type;
  1989. Type *ok_type = tuple->Tuple.variables[1]->type;
  1990. ast_node(ta, TypeAssertion, ue_expr);
  1991. TokenPos pos = ast_token(expr).pos;
  1992. Type *type = type_of_expr(ue_expr);
  1993. GB_ASSERT(!is_type_tuple(type));
  1994. lbValue e = lb_build_expr(p, ta->expr);
  1995. Type *t = type_deref(e.type);
  1996. if (is_type_union(t)) {
  1997. lbValue v = e;
  1998. if (!is_type_pointer(v.type)) {
  1999. v = lb_address_from_load_or_generate_local(p, v);
  2000. }
  2001. Type *src_type = type_deref(v.type);
  2002. Type *dst_type = type;
  2003. lbValue src_tag = {};
  2004. lbValue dst_tag = {};
  2005. if (is_type_union_maybe_pointer(src_type)) {
  2006. src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v);
  2007. dst_tag = lb_const_bool(p->module, t_bool, true);
  2008. } else {
  2009. src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v));
  2010. dst_tag = lb_const_union_tag(p->module, src_type, dst_type);
  2011. }
  2012. lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag);
  2013. lbValue data_ptr = lb_emit_conv(p, v, ptr_type);
  2014. lbAddr res = lb_add_local_generated(p, tuple, true);
  2015. lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2016. lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2017. lb_emit_store(p, gep0, lb_emit_select(p, ok, data_ptr, lb_const_nil(p->module, ptr_type)));
  2018. lb_emit_store(p, gep1, lb_emit_conv(p, ok, ok_type));
  2019. return lb_addr_load(p, res);
  2020. } else if (is_type_any(t)) {
  2021. lbValue v = e;
  2022. if (is_type_pointer(v.type)) {
  2023. v = lb_emit_load(p, v);
  2024. }
  2025. lbValue data_ptr = lb_emit_conv(p, lb_emit_struct_ev(p, v, 0), ptr_type);
  2026. lbValue any_id = lb_emit_struct_ev(p, v, 1);
  2027. lbValue id = lb_typeid(p->module, type);
  2028. lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id);
  2029. lbAddr res = lb_add_local_generated(p, tuple, false);
  2030. lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2031. lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2032. lb_emit_store(p, gep0, lb_emit_select(p, ok, data_ptr, lb_const_nil(p->module, ptr_type)));
  2033. lb_emit_store(p, gep1, lb_emit_conv(p, ok, ok_type));
  2034. return lb_addr_load(p, res);
  2035. } else {
  2036. GB_PANIC("TODO(bill): type assertion %s", type_to_string(type));
  2037. }
  2038. } else {
  2039. GB_ASSERT(is_type_pointer(tv.type));
  2040. ast_node(ta, TypeAssertion, ue_expr);
  2041. TokenPos pos = ast_token(expr).pos;
  2042. Type *type = type_of_expr(ue_expr);
  2043. GB_ASSERT(!is_type_tuple(type));
  2044. lbValue e = lb_build_expr(p, ta->expr);
  2045. Type *t = type_deref(e.type);
  2046. if (is_type_union(t)) {
  2047. lbValue v = e;
  2048. if (!is_type_pointer(v.type)) {
  2049. v = lb_address_from_load_or_generate_local(p, v);
  2050. }
  2051. Type *src_type = type_deref(v.type);
  2052. Type *dst_type = type;
  2053. lbValue src_tag = {};
  2054. lbValue dst_tag = {};
  2055. if (is_type_union_maybe_pointer(src_type)) {
  2056. src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v);
  2057. dst_tag = lb_const_bool(p->module, t_bool, true);
  2058. } else {
  2059. src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v));
  2060. dst_tag = lb_const_union_tag(p->module, src_type, dst_type);
  2061. }
  2062. lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag);
  2063. auto args = array_make<lbValue>(permanent_allocator(), 6);
  2064. args[0] = ok;
  2065. args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id));
  2066. args[2] = lb_const_int(p->module, t_i32, pos.line);
  2067. args[3] = lb_const_int(p->module, t_i32, pos.column);
  2068. args[4] = lb_typeid(p->module, src_type);
  2069. args[5] = lb_typeid(p->module, dst_type);
  2070. lb_emit_runtime_call(p, "type_assertion_check", args);
  2071. lbValue data_ptr = v;
  2072. return lb_emit_conv(p, data_ptr, tv.type);
  2073. } else if (is_type_any(t)) {
  2074. lbValue v = e;
  2075. if (is_type_pointer(v.type)) {
  2076. v = lb_emit_load(p, v);
  2077. }
  2078. lbValue data_ptr = lb_emit_struct_ev(p, v, 0);
  2079. lbValue any_id = lb_emit_struct_ev(p, v, 1);
  2080. lbValue id = lb_typeid(p->module, type);
  2081. lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id);
  2082. auto args = array_make<lbValue>(permanent_allocator(), 6);
  2083. args[0] = ok;
  2084. args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id));
  2085. args[2] = lb_const_int(p->module, t_i32, pos.line);
  2086. args[3] = lb_const_int(p->module, t_i32, pos.column);
  2087. args[4] = any_id;
  2088. args[5] = id;
  2089. lb_emit_runtime_call(p, "type_assertion_check", args);
  2090. return lb_emit_conv(p, data_ptr, tv.type);
  2091. } else {
  2092. GB_PANIC("TODO(bill): type assertion %s", type_to_string(type));
  2093. }
  2094. }
  2095. }
  2096. return lb_build_addr_ptr(p, ue->expr);
  2097. }
  2098. lbValue lb_build_expr(lbProcedure *p, Ast *expr) {
  2099. lbModule *m = p->module;
  2100. u16 prev_state_flags = p->state_flags;
  2101. defer (p->state_flags = prev_state_flags);
  2102. if (expr->state_flags != 0) {
  2103. u16 in = expr->state_flags;
  2104. u16 out = p->state_flags;
  2105. if (in & StateFlag_bounds_check) {
  2106. out |= StateFlag_bounds_check;
  2107. out &= ~StateFlag_no_bounds_check;
  2108. } else if (in & StateFlag_no_bounds_check) {
  2109. out |= StateFlag_no_bounds_check;
  2110. out &= ~StateFlag_bounds_check;
  2111. }
  2112. p->state_flags = out;
  2113. }
  2114. expr = unparen_expr(expr);
  2115. TokenPos expr_pos = ast_token(expr).pos;
  2116. TypeAndValue tv = type_and_value_of_expr(expr);
  2117. GB_ASSERT_MSG(tv.mode != Addressing_Invalid, "invalid expression '%s' (tv.mode = %d, tv.type = %s) @ %s\n Current Proc: %.*s : %s", expr_to_string(expr), tv.mode, type_to_string(tv.type), token_pos_to_string(expr_pos), LIT(p->name), type_to_string(p->type));
  2118. if (tv.value.kind != ExactValue_Invalid) {
  2119. // NOTE(bill): The commented out code below is just for debug purposes only
  2120. // GB_ASSERT_MSG(!is_type_untyped(tv.type), "%s @ %s\n%s", type_to_string(tv.type), token_pos_to_string(expr_pos), expr_to_string(expr));
  2121. // if (is_type_untyped(tv.type)) {
  2122. // gb_printf_err("%s %s\n", token_pos_to_string(expr_pos), expr_to_string(expr));
  2123. // }
  2124. // NOTE(bill): Short on constant values
  2125. return lb_const_value(p->module, tv.type, tv.value);
  2126. }
  2127. #if 0
  2128. LLVMMetadataRef prev_debug_location = nullptr;
  2129. if (p->debug_info != nullptr) {
  2130. prev_debug_location = LLVMGetCurrentDebugLocation2(p->builder);
  2131. LLVMSetCurrentDebugLocation2(p->builder, lb_debug_location_from_ast(p, expr));
  2132. }
  2133. defer (if (prev_debug_location != nullptr) {
  2134. LLVMSetCurrentDebugLocation2(p->builder, prev_debug_location);
  2135. });
  2136. #endif
  2137. switch (expr->kind) {
  2138. case_ast_node(bl, BasicLit, expr);
  2139. TokenPos pos = bl->token.pos;
  2140. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(token_strings[bl->token.kind]));
  2141. case_end;
  2142. case_ast_node(bd, BasicDirective, expr);
  2143. TokenPos pos = bd->token.pos;
  2144. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(bd->name.string));
  2145. case_end;
  2146. case_ast_node(i, Implicit, expr);
  2147. return lb_addr_load(p, lb_build_addr(p, expr));
  2148. case_end;
  2149. case_ast_node(u, Undef, expr)
  2150. lbValue res = {};
  2151. if (is_type_untyped(tv.type)) {
  2152. res.value = nullptr;
  2153. res.type = t_untyped_undef;
  2154. } else {
  2155. res.value = LLVMGetUndef(lb_type(m, tv.type));
  2156. res.type = tv.type;
  2157. }
  2158. return res;
  2159. case_end;
  2160. case_ast_node(i, Ident, expr);
  2161. Entity *e = entity_from_expr(expr);
  2162. e = strip_entity_wrapping(e);
  2163. GB_ASSERT_MSG(e != nullptr, "%s", expr_to_string(expr));
  2164. if (e->kind == Entity_Builtin) {
  2165. Token token = ast_token(expr);
  2166. GB_PANIC("TODO(bill): lb_build_expr Entity_Builtin '%.*s'\n"
  2167. "\t at %s", LIT(builtin_procs[e->Builtin.id].name),
  2168. token_pos_to_string(token.pos));
  2169. return {};
  2170. } else if (e->kind == Entity_Nil) {
  2171. lbValue res = {};
  2172. res.value = nullptr;
  2173. res.type = e->type;
  2174. return res;
  2175. }
  2176. GB_ASSERT(e->kind != Entity_ProcGroup);
  2177. return lb_find_ident(p, m, e, expr);
  2178. case_end;
  2179. case_ast_node(de, DerefExpr, expr);
  2180. return lb_addr_load(p, lb_build_addr(p, expr));
  2181. case_end;
  2182. case_ast_node(se, SelectorExpr, expr);
  2183. TypeAndValue tav = type_and_value_of_expr(expr);
  2184. GB_ASSERT(tav.mode != Addressing_Invalid);
  2185. return lb_addr_load(p, lb_build_addr(p, expr));
  2186. case_end;
  2187. case_ast_node(ise, ImplicitSelectorExpr, expr);
  2188. TypeAndValue tav = type_and_value_of_expr(expr);
  2189. GB_ASSERT(tav.mode == Addressing_Constant);
  2190. return lb_const_value(p->module, tv.type, tv.value);
  2191. case_end;
  2192. case_ast_node(se, SelectorCallExpr, expr);
  2193. GB_ASSERT(se->modified_call);
  2194. TypeAndValue tav = type_and_value_of_expr(expr);
  2195. GB_ASSERT(tav.mode != Addressing_Invalid);
  2196. lbValue res = lb_build_call_expr(p, se->call);
  2197. ast_node(ce, CallExpr, se->call);
  2198. ce->sce_temp_data = gb_alloc_copy(permanent_allocator(), &res, gb_size_of(res));
  2199. return res;
  2200. case_end;
  2201. case_ast_node(te, TernaryIfExpr, expr);
  2202. LLVMValueRef incoming_values[2] = {};
  2203. LLVMBasicBlockRef incoming_blocks[2] = {};
  2204. GB_ASSERT(te->y != nullptr);
  2205. lbBlock *then = lb_create_block(p, "if.then");
  2206. lbBlock *done = lb_create_block(p, "if.done"); // NOTE(bill): Append later
  2207. lbBlock *else_ = lb_create_block(p, "if.else");
  2208. lbValue cond = lb_build_cond(p, te->cond, then, else_);
  2209. lb_start_block(p, then);
  2210. Type *type = default_type(type_of_expr(expr));
  2211. incoming_values[0] = lb_emit_conv(p, lb_build_expr(p, te->x), type).value;
  2212. lb_emit_jump(p, done);
  2213. lb_start_block(p, else_);
  2214. incoming_values[1] = lb_emit_conv(p, lb_build_expr(p, te->y), type).value;
  2215. lb_emit_jump(p, done);
  2216. lb_start_block(p, done);
  2217. lbValue res = {};
  2218. res.value = LLVMBuildPhi(p->builder, lb_type(p->module, type), "");
  2219. res.type = type;
  2220. GB_ASSERT(p->curr_block->preds.count >= 2);
  2221. incoming_blocks[0] = p->curr_block->preds[0]->block;
  2222. incoming_blocks[1] = p->curr_block->preds[1]->block;
  2223. LLVMAddIncoming(res.value, incoming_values, incoming_blocks, 2);
  2224. return res;
  2225. case_end;
  2226. case_ast_node(te, TernaryWhenExpr, expr);
  2227. TypeAndValue tav = type_and_value_of_expr(te->cond);
  2228. GB_ASSERT(tav.mode == Addressing_Constant);
  2229. GB_ASSERT(tav.value.kind == ExactValue_Bool);
  2230. if (tav.value.value_bool) {
  2231. return lb_build_expr(p, te->x);
  2232. } else {
  2233. return lb_build_expr(p, te->y);
  2234. }
  2235. case_end;
  2236. case_ast_node(oe, OrElseExpr, expr);
  2237. return lb_emit_or_else(p, oe->x, oe->y, tv);
  2238. case_end;
  2239. case_ast_node(oe, OrReturnExpr, expr);
  2240. return lb_emit_or_return(p, oe->expr, tv);
  2241. case_end;
  2242. case_ast_node(ta, TypeAssertion, expr);
  2243. TokenPos pos = ast_token(expr).pos;
  2244. Type *type = tv.type;
  2245. lbValue e = lb_build_expr(p, ta->expr);
  2246. Type *t = type_deref(e.type);
  2247. if (is_type_union(t)) {
  2248. if (ta->ignores[0]) {
  2249. // NOTE(bill): This is not needed for optimization levels other than 0
  2250. return lb_emit_union_cast_only_ok_check(p, e, type, pos);
  2251. }
  2252. return lb_emit_union_cast(p, e, type, pos);
  2253. } else if (is_type_any(t)) {
  2254. return lb_emit_any_cast(p, e, type, pos);
  2255. } else {
  2256. GB_PANIC("TODO(bill): type assertion %s", type_to_string(e.type));
  2257. }
  2258. case_end;
  2259. case_ast_node(tc, TypeCast, expr);
  2260. lbValue e = lb_build_expr(p, tc->expr);
  2261. switch (tc->token.kind) {
  2262. case Token_cast:
  2263. return lb_emit_conv(p, e, tv.type);
  2264. case Token_transmute:
  2265. return lb_emit_transmute(p, e, tv.type);
  2266. }
  2267. GB_PANIC("Invalid AST TypeCast");
  2268. case_end;
  2269. case_ast_node(ac, AutoCast, expr);
  2270. lbValue value = lb_build_expr(p, ac->expr);
  2271. return lb_emit_conv(p, value, tv.type);
  2272. case_end;
  2273. case_ast_node(ue, UnaryExpr, expr);
  2274. switch (ue->op.kind) {
  2275. case Token_And:
  2276. return lb_build_unary_and(p, expr);
  2277. default:
  2278. {
  2279. lbValue v = lb_build_expr(p, ue->expr);
  2280. return lb_emit_unary_arith(p, ue->op.kind, v, tv.type);
  2281. }
  2282. }
  2283. case_end;
  2284. case_ast_node(be, BinaryExpr, expr);
  2285. return lb_build_binary_expr(p, expr);
  2286. case_end;
  2287. case_ast_node(pl, ProcLit, expr);
  2288. return lb_generate_anonymous_proc_lit(p->module, p->name, expr, p);
  2289. case_end;
  2290. case_ast_node(cl, CompoundLit, expr);
  2291. return lb_addr_load(p, lb_build_addr(p, expr));
  2292. case_end;
  2293. case_ast_node(ce, CallExpr, expr);
  2294. return lb_build_call_expr(p, expr);
  2295. case_end;
  2296. case_ast_node(se, SliceExpr, expr);
  2297. if (is_type_slice(type_of_expr(se->expr))) {
  2298. // NOTE(bill): Quick optimization
  2299. if (se->high == nullptr &&
  2300. (se->low == nullptr || lb_is_expr_constant_zero(se->low))) {
  2301. return lb_build_expr(p, se->expr);
  2302. }
  2303. }
  2304. return lb_addr_load(p, lb_build_addr(p, expr));
  2305. case_end;
  2306. case_ast_node(ie, IndexExpr, expr);
  2307. return lb_addr_load(p, lb_build_addr(p, expr));
  2308. case_end;
  2309. case_ast_node(ie, MatrixIndexExpr, expr);
  2310. return lb_addr_load(p, lb_build_addr(p, expr));
  2311. case_end;
  2312. case_ast_node(ia, InlineAsmExpr, expr);
  2313. Type *t = type_of_expr(expr);
  2314. GB_ASSERT(is_type_asm_proc(t));
  2315. String asm_string = {};
  2316. String constraints_string = {};
  2317. TypeAndValue tav;
  2318. tav = type_and_value_of_expr(ia->asm_string);
  2319. GB_ASSERT(is_type_string(tav.type));
  2320. GB_ASSERT(tav.value.kind == ExactValue_String);
  2321. asm_string = tav.value.value_string;
  2322. tav = type_and_value_of_expr(ia->constraints_string);
  2323. GB_ASSERT(is_type_string(tav.type));
  2324. GB_ASSERT(tav.value.kind == ExactValue_String);
  2325. constraints_string = tav.value.value_string;
  2326. LLVMInlineAsmDialect dialect = LLVMInlineAsmDialectATT;
  2327. switch (ia->dialect) {
  2328. case InlineAsmDialect_Default: dialect = LLVMInlineAsmDialectATT; break;
  2329. case InlineAsmDialect_ATT: dialect = LLVMInlineAsmDialectATT; break;
  2330. case InlineAsmDialect_Intel: dialect = LLVMInlineAsmDialectIntel; break;
  2331. default: GB_PANIC("Unhandled inline asm dialect"); break;
  2332. }
  2333. LLVMTypeRef func_type = LLVMGetElementType(lb_type(p->module, t));
  2334. LLVMValueRef the_asm = LLVMGetInlineAsm(func_type,
  2335. cast(char *)asm_string.text, cast(size_t)asm_string.len,
  2336. cast(char *)constraints_string.text, cast(size_t)constraints_string.len,
  2337. ia->has_side_effects, ia->is_align_stack, dialect
  2338. );
  2339. GB_ASSERT(the_asm != nullptr);
  2340. return {the_asm, t};
  2341. case_end;
  2342. }
  2343. GB_PANIC("lb_build_expr: %.*s", LIT(ast_strings[expr->kind]));
  2344. return {};
  2345. }
  2346. lbAddr lb_get_soa_variable_addr(lbProcedure *p, Entity *e) {
  2347. return map_must_get(&p->module->soa_values, hash_entity(e));
  2348. }
  2349. lbValue lb_get_using_variable(lbProcedure *p, Entity *e) {
  2350. GB_ASSERT(e->kind == Entity_Variable && e->flags & EntityFlag_Using);
  2351. String name = e->token.string;
  2352. Entity *parent = e->using_parent;
  2353. Selection sel = lookup_field(parent->type, name, false);
  2354. GB_ASSERT(sel.entity != nullptr);
  2355. lbValue *pv = map_get(&p->module->values, hash_entity(parent));
  2356. lbValue v = {};
  2357. if (pv == nullptr && parent->flags & EntityFlag_SoaPtrField) {
  2358. // NOTE(bill): using SOA value (probably from for-in statement)
  2359. lbAddr parent_addr = lb_get_soa_variable_addr(p, parent);
  2360. v = lb_addr_get_ptr(p, parent_addr);
  2361. } else if (pv != nullptr) {
  2362. v = *pv;
  2363. } else {
  2364. GB_ASSERT_MSG(e->using_expr != nullptr, "%.*s", LIT(name));
  2365. v = lb_build_addr_ptr(p, e->using_expr);
  2366. }
  2367. GB_ASSERT(v.value != nullptr);
  2368. GB_ASSERT_MSG(parent->type == type_deref(v.type), "%s %s", type_to_string(parent->type), type_to_string(v.type));
  2369. lbValue ptr = lb_emit_deep_field_gep(p, v, sel);
  2370. if (parent->scope) {
  2371. if ((parent->scope->flags & (ScopeFlag_File|ScopeFlag_Pkg)) == 0) {
  2372. lb_add_debug_local_variable(p, ptr.value, e->type, e->token);
  2373. }
  2374. } else {
  2375. lb_add_debug_local_variable(p, ptr.value, e->type, e->token);
  2376. }
  2377. return ptr;
  2378. }
  2379. lbAddr lb_build_addr_from_entity(lbProcedure *p, Entity *e, Ast *expr) {
  2380. GB_ASSERT(e != nullptr);
  2381. if (e->kind == Entity_Constant) {
  2382. Type *t = default_type(type_of_expr(expr));
  2383. lbValue v = lb_const_value(p->module, t, e->Constant.value);
  2384. lbAddr g = lb_add_global_generated(p->module, t, v);
  2385. return g;
  2386. }
  2387. lbValue v = {};
  2388. lbValue *found = map_get(&p->module->values, hash_entity(e));
  2389. if (found) {
  2390. v = *found;
  2391. } else if (e->kind == Entity_Variable && e->flags & EntityFlag_Using) {
  2392. // NOTE(bill): Calculate the using variable every time
  2393. v = lb_get_using_variable(p, e);
  2394. } else if (e->flags & EntityFlag_SoaPtrField) {
  2395. return lb_get_soa_variable_addr(p, e);
  2396. }
  2397. if (v.value == nullptr) {
  2398. return lb_addr(lb_find_value_from_entity(p->module, e));
  2399. // error(expr, "%.*s Unknown value: %.*s, entity: %p %.*s",
  2400. // LIT(p->name),
  2401. // LIT(e->token.string), e, LIT(entity_strings[e->kind]));
  2402. // GB_PANIC("Unknown value");
  2403. }
  2404. return lb_addr(v);
  2405. }
  2406. lbAddr lb_build_array_swizzle_addr(lbProcedure *p, AstCallExpr *ce, TypeAndValue const &tv) {
  2407. isize index_count = ce->args.count-1;
  2408. lbAddr addr = lb_build_addr(p, ce->args[0]);
  2409. if (index_count == 0) {
  2410. return addr;
  2411. }
  2412. Type *type = base_type(lb_addr_type(addr));
  2413. GB_ASSERT(type->kind == Type_Array);
  2414. i64 count = type->Array.count;
  2415. if (count <= 4) {
  2416. u8 indices[4] = {};
  2417. u8 index_count = 0;
  2418. for (i32 i = 1; i < ce->args.count; i++) {
  2419. TypeAndValue tv = type_and_value_of_expr(ce->args[i]);
  2420. GB_ASSERT(is_type_integer(tv.type));
  2421. GB_ASSERT(tv.value.kind == ExactValue_Integer);
  2422. i64 src_index = big_int_to_i64(&tv.value.value_integer);
  2423. indices[index_count++] = cast(u8)src_index;
  2424. }
  2425. return lb_addr_swizzle(lb_addr_get_ptr(p, addr), tv.type, index_count, indices);
  2426. }
  2427. auto indices = slice_make<i32>(permanent_allocator(), ce->args.count-1);
  2428. isize index_index = 0;
  2429. for (i32 i = 1; i < ce->args.count; i++) {
  2430. TypeAndValue tv = type_and_value_of_expr(ce->args[i]);
  2431. GB_ASSERT(is_type_integer(tv.type));
  2432. GB_ASSERT(tv.value.kind == ExactValue_Integer);
  2433. i64 src_index = big_int_to_i64(&tv.value.value_integer);
  2434. indices[index_index++] = cast(i32)src_index;
  2435. }
  2436. return lb_addr_swizzle_large(lb_addr_get_ptr(p, addr), tv.type, indices);
  2437. }
  2438. lbAddr lb_build_addr(lbProcedure *p, Ast *expr) {
  2439. expr = unparen_expr(expr);
  2440. switch (expr->kind) {
  2441. case_ast_node(i, Implicit, expr);
  2442. lbAddr v = {};
  2443. switch (i->kind) {
  2444. case Token_context:
  2445. v = lb_find_or_generate_context_ptr(p);
  2446. break;
  2447. }
  2448. GB_ASSERT(v.addr.value != nullptr);
  2449. return v;
  2450. case_end;
  2451. case_ast_node(i, Ident, expr);
  2452. if (is_blank_ident(expr)) {
  2453. lbAddr val = {};
  2454. return val;
  2455. }
  2456. String name = i->token.string;
  2457. Entity *e = entity_of_node(expr);
  2458. return lb_build_addr_from_entity(p, e, expr);
  2459. case_end;
  2460. case_ast_node(se, SelectorExpr, expr);
  2461. Ast *sel = unparen_expr(se->selector);
  2462. if (sel->kind == Ast_Ident) {
  2463. String selector = sel->Ident.token.string;
  2464. TypeAndValue tav = type_and_value_of_expr(se->expr);
  2465. if (tav.mode == Addressing_Invalid) {
  2466. // NOTE(bill): Imports
  2467. Entity *imp = entity_of_node(se->expr);
  2468. if (imp != nullptr) {
  2469. GB_ASSERT(imp->kind == Entity_ImportName);
  2470. }
  2471. return lb_build_addr(p, unparen_expr(se->selector));
  2472. }
  2473. Type *type = base_type(tav.type);
  2474. if (tav.mode == Addressing_Type) { // Addressing_Type
  2475. GB_PANIC("Unreachable");
  2476. }
  2477. if (se->swizzle_count > 0) {
  2478. Type *array_type = base_type(type_deref(tav.type));
  2479. GB_ASSERT(array_type->kind == Type_Array);
  2480. u8 swizzle_count = se->swizzle_count;
  2481. u8 swizzle_indices_raw = se->swizzle_indices;
  2482. u8 swizzle_indices[4] = {};
  2483. for (u8 i = 0; i < swizzle_count; i++) {
  2484. u8 index = swizzle_indices_raw>>(i*2) & 3;
  2485. swizzle_indices[i] = index;
  2486. }
  2487. lbValue a = {};
  2488. if (is_type_pointer(tav.type)) {
  2489. a = lb_build_expr(p, se->expr);
  2490. } else {
  2491. lbAddr addr = lb_build_addr(p, se->expr);
  2492. a = lb_addr_get_ptr(p, addr);
  2493. }
  2494. GB_ASSERT(is_type_array(expr->tav.type));
  2495. return lb_addr_swizzle(a, expr->tav.type, swizzle_count, swizzle_indices);
  2496. }
  2497. Selection sel = lookup_field(type, selector, false);
  2498. GB_ASSERT(sel.entity != nullptr);
  2499. {
  2500. lbAddr addr = lb_build_addr(p, se->expr);
  2501. if (addr.kind == lbAddr_Map) {
  2502. lbValue v = lb_addr_load(p, addr);
  2503. lbValue a = lb_address_from_load_or_generate_local(p, v);
  2504. a = lb_emit_deep_field_gep(p, a, sel);
  2505. return lb_addr(a);
  2506. } else if (addr.kind == lbAddr_Context) {
  2507. GB_ASSERT(sel.index.count > 0);
  2508. if (addr.ctx.sel.index.count >= 0) {
  2509. sel = selection_combine(addr.ctx.sel, sel);
  2510. }
  2511. addr.ctx.sel = sel;
  2512. addr.kind = lbAddr_Context;
  2513. return addr;
  2514. } else if (addr.kind == lbAddr_SoaVariable) {
  2515. lbValue index = addr.soa.index;
  2516. i32 first_index = sel.index[0];
  2517. Selection sub_sel = sel;
  2518. sub_sel.index.data += 1;
  2519. sub_sel.index.count -= 1;
  2520. lbValue arr = lb_emit_struct_ep(p, addr.addr, first_index);
  2521. Type *t = base_type(type_deref(addr.addr.type));
  2522. GB_ASSERT(is_type_soa_struct(t));
  2523. if (addr.soa.index_expr != nullptr && (!lb_is_const(addr.soa.index) || t->Struct.soa_kind != StructSoa_Fixed)) {
  2524. lbValue len = lb_soa_struct_len(p, addr.addr);
  2525. lb_emit_bounds_check(p, ast_token(addr.soa.index_expr), addr.soa.index, len);
  2526. }
  2527. lbValue item = {};
  2528. if (t->Struct.soa_kind == StructSoa_Fixed) {
  2529. item = lb_emit_array_ep(p, arr, index);
  2530. } else {
  2531. item = lb_emit_ptr_offset(p, lb_emit_load(p, arr), index);
  2532. }
  2533. if (sub_sel.index.count > 0) {
  2534. item = lb_emit_deep_field_gep(p, item, sub_sel);
  2535. }
  2536. return lb_addr(item);
  2537. } else if (addr.kind == lbAddr_Swizzle) {
  2538. GB_ASSERT(sel.index.count > 0);
  2539. // NOTE(bill): just patch the index in place
  2540. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  2541. } else if (addr.kind == lbAddr_SwizzleLarge) {
  2542. GB_ASSERT(sel.index.count > 0);
  2543. // NOTE(bill): just patch the index in place
  2544. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  2545. }
  2546. lbValue a = lb_addr_get_ptr(p, addr);
  2547. a = lb_emit_deep_field_gep(p, a, sel);
  2548. return lb_addr(a);
  2549. }
  2550. } else {
  2551. GB_PANIC("Unsupported selector expression");
  2552. }
  2553. case_end;
  2554. case_ast_node(se, SelectorCallExpr, expr);
  2555. GB_ASSERT(se->modified_call);
  2556. TypeAndValue tav = type_and_value_of_expr(expr);
  2557. GB_ASSERT(tav.mode != Addressing_Invalid);
  2558. lbValue e = lb_build_expr(p, expr);
  2559. return lb_addr(lb_address_from_load_or_generate_local(p, e));
  2560. case_end;
  2561. case_ast_node(ta, TypeAssertion, expr);
  2562. TokenPos pos = ast_token(expr).pos;
  2563. lbValue e = lb_build_expr(p, ta->expr);
  2564. Type *t = type_deref(e.type);
  2565. if (is_type_union(t)) {
  2566. Type *type = type_of_expr(expr);
  2567. lbAddr v = lb_add_local_generated(p, type, false);
  2568. lb_addr_store(p, v, lb_emit_union_cast(p, lb_build_expr(p, ta->expr), type, pos));
  2569. return v;
  2570. } else if (is_type_any(t)) {
  2571. Type *type = type_of_expr(expr);
  2572. return lb_emit_any_cast_addr(p, lb_build_expr(p, ta->expr), type, pos);
  2573. } else {
  2574. GB_PANIC("TODO(bill): type assertion %s", type_to_string(e.type));
  2575. }
  2576. case_end;
  2577. case_ast_node(ue, UnaryExpr, expr);
  2578. switch (ue->op.kind) {
  2579. case Token_And: {
  2580. lbValue ptr = lb_build_expr(p, expr);
  2581. return lb_addr(lb_address_from_load_or_generate_local(p, ptr));
  2582. }
  2583. default:
  2584. GB_PANIC("Invalid unary expression for lb_build_addr");
  2585. }
  2586. case_end;
  2587. case_ast_node(be, BinaryExpr, expr);
  2588. lbValue v = lb_build_expr(p, expr);
  2589. Type *t = v.type;
  2590. if (is_type_pointer(t)) {
  2591. return lb_addr(v);
  2592. }
  2593. return lb_addr(lb_address_from_load_or_generate_local(p, v));
  2594. case_end;
  2595. case_ast_node(ie, IndexExpr, expr);
  2596. Type *t = base_type(type_of_expr(ie->expr));
  2597. bool deref = is_type_pointer(t);
  2598. t = base_type(type_deref(t));
  2599. if (is_type_soa_struct(t)) {
  2600. // SOA STRUCTURES!!!!
  2601. lbValue val = lb_build_addr_ptr(p, ie->expr);
  2602. if (deref) {
  2603. val = lb_emit_load(p, val);
  2604. }
  2605. lbValue index = lb_build_expr(p, ie->index);
  2606. return lb_addr_soa_variable(val, index, ie->index);
  2607. }
  2608. if (ie->expr->tav.mode == Addressing_SoaVariable) {
  2609. // SOA Structures for slices/dynamic arrays
  2610. GB_ASSERT(is_type_pointer(type_of_expr(ie->expr)));
  2611. lbValue field = lb_build_expr(p, ie->expr);
  2612. lbValue index = lb_build_expr(p, ie->index);
  2613. if (!build_context.no_bounds_check) {
  2614. // TODO HACK(bill): Clean up this hack to get the length for bounds checking
  2615. // GB_ASSERT(LLVMIsALoadInst(field.value));
  2616. // lbValue a = {};
  2617. // a.value = LLVMGetOperand(field.value, 0);
  2618. // a.type = alloc_type_pointer(field.type);
  2619. // irInstr *b = &a->Instr;
  2620. // GB_ASSERT(b->kind == irInstr_StructElementPtr);
  2621. // lbValue base_struct = b->StructElementPtr.address;
  2622. // GB_ASSERT(is_type_soa_struct(type_deref(ir_type(base_struct))));
  2623. // lbValue len = ir_soa_struct_len(p, base_struct);
  2624. // lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2625. }
  2626. lbValue val = lb_emit_ptr_offset(p, field, index);
  2627. return lb_addr(val);
  2628. }
  2629. GB_ASSERT_MSG(is_type_indexable(t), "%s %s", type_to_string(t), expr_to_string(expr));
  2630. if (is_type_map(t)) {
  2631. lbValue map_val = lb_build_addr_ptr(p, ie->expr);
  2632. if (deref) {
  2633. map_val = lb_emit_load(p, map_val);
  2634. }
  2635. lbValue key = lb_build_expr(p, ie->index);
  2636. key = lb_emit_conv(p, key, t->Map.key);
  2637. Type *result_type = type_of_expr(expr);
  2638. return lb_addr_map(map_val, key, t, result_type);
  2639. }
  2640. switch (t->kind) {
  2641. case Type_Array: {
  2642. lbValue array = {};
  2643. array = lb_build_addr_ptr(p, ie->expr);
  2644. if (deref) {
  2645. array = lb_emit_load(p, array);
  2646. }
  2647. lbValue index = lb_build_expr(p, ie->index);
  2648. index = lb_emit_conv(p, index, t_int);
  2649. lbValue elem = lb_emit_array_ep(p, array, index);
  2650. auto index_tv = type_and_value_of_expr(ie->index);
  2651. if (index_tv.mode != Addressing_Constant) {
  2652. lbValue len = lb_const_int(p->module, t_int, t->Array.count);
  2653. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2654. }
  2655. return lb_addr(elem);
  2656. }
  2657. case Type_EnumeratedArray: {
  2658. lbValue array = {};
  2659. array = lb_build_addr_ptr(p, ie->expr);
  2660. if (deref) {
  2661. array = lb_emit_load(p, array);
  2662. }
  2663. Type *index_type = t->EnumeratedArray.index;
  2664. auto index_tv = type_and_value_of_expr(ie->index);
  2665. lbValue index = {};
  2666. if (compare_exact_values(Token_NotEq, *t->EnumeratedArray.min_value, exact_value_i64(0))) {
  2667. if (index_tv.mode == Addressing_Constant) {
  2668. ExactValue idx = exact_value_sub(index_tv.value, *t->EnumeratedArray.min_value);
  2669. index = lb_const_value(p->module, index_type, idx);
  2670. } else {
  2671. index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2672. index = lb_emit_arith(p, Token_Sub, index, lb_const_value(p->module, index_type, *t->EnumeratedArray.min_value), index_type);
  2673. }
  2674. } else {
  2675. index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2676. }
  2677. lbValue elem = lb_emit_array_ep(p, array, index);
  2678. if (index_tv.mode != Addressing_Constant) {
  2679. lbValue len = lb_const_int(p->module, t_int, t->EnumeratedArray.count);
  2680. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2681. }
  2682. return lb_addr(elem);
  2683. }
  2684. case Type_Slice: {
  2685. lbValue slice = {};
  2686. slice = lb_build_expr(p, ie->expr);
  2687. if (deref) {
  2688. slice = lb_emit_load(p, slice);
  2689. }
  2690. lbValue elem = lb_slice_elem(p, slice);
  2691. lbValue index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2692. lbValue len = lb_slice_len(p, slice);
  2693. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2694. lbValue v = lb_emit_ptr_offset(p, elem, index);
  2695. return lb_addr(v);
  2696. }
  2697. case Type_MultiPointer: {
  2698. lbValue multi_ptr = {};
  2699. multi_ptr = lb_build_expr(p, ie->expr);
  2700. if (deref) {
  2701. multi_ptr = lb_emit_load(p, multi_ptr);
  2702. }
  2703. lbValue index = lb_build_expr(p, ie->index);
  2704. lbValue v = {};
  2705. LLVMValueRef indices[1] = {index.value};
  2706. v.value = LLVMBuildGEP(p->builder, multi_ptr.value, indices, 1, "");
  2707. v.type = alloc_type_pointer(t->MultiPointer.elem);
  2708. return lb_addr(v);
  2709. }
  2710. case Type_RelativeSlice: {
  2711. lbAddr slice_addr = {};
  2712. if (deref) {
  2713. slice_addr = lb_addr(lb_build_expr(p, ie->expr));
  2714. } else {
  2715. slice_addr = lb_build_addr(p, ie->expr);
  2716. }
  2717. lbValue slice = lb_addr_load(p, slice_addr);
  2718. lbValue elem = lb_slice_elem(p, slice);
  2719. lbValue index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2720. lbValue len = lb_slice_len(p, slice);
  2721. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2722. lbValue v = lb_emit_ptr_offset(p, elem, index);
  2723. return lb_addr(v);
  2724. }
  2725. case Type_DynamicArray: {
  2726. lbValue dynamic_array = {};
  2727. dynamic_array = lb_build_expr(p, ie->expr);
  2728. if (deref) {
  2729. dynamic_array = lb_emit_load(p, dynamic_array);
  2730. }
  2731. lbValue elem = lb_dynamic_array_elem(p, dynamic_array);
  2732. lbValue len = lb_dynamic_array_len(p, dynamic_array);
  2733. lbValue index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2734. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2735. lbValue v = lb_emit_ptr_offset(p, elem, index);
  2736. return lb_addr(v);
  2737. }
  2738. case Type_Matrix: {
  2739. lbValue matrix = {};
  2740. matrix = lb_build_addr_ptr(p, ie->expr);
  2741. if (deref) {
  2742. matrix = lb_emit_load(p, matrix);
  2743. }
  2744. lbValue index = lb_build_expr(p, ie->index);
  2745. index = lb_emit_conv(p, index, t_int);
  2746. lbValue elem = lb_emit_matrix_ep(p, matrix, lb_const_int(p->module, t_int, 0), index);
  2747. elem = lb_emit_conv(p, elem, alloc_type_pointer(type_of_expr(expr)));
  2748. auto index_tv = type_and_value_of_expr(ie->index);
  2749. if (index_tv.mode != Addressing_Constant) {
  2750. lbValue len = lb_const_int(p->module, t_int, t->Matrix.column_count);
  2751. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2752. }
  2753. return lb_addr(elem);
  2754. }
  2755. case Type_Basic: { // Basic_string
  2756. lbValue str;
  2757. lbValue elem;
  2758. lbValue len;
  2759. lbValue index;
  2760. str = lb_build_expr(p, ie->expr);
  2761. if (deref) {
  2762. str = lb_emit_load(p, str);
  2763. }
  2764. elem = lb_string_elem(p, str);
  2765. len = lb_string_len(p, str);
  2766. index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2767. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2768. return lb_addr(lb_emit_ptr_offset(p, elem, index));
  2769. }
  2770. }
  2771. case_end;
  2772. case_ast_node(ie, MatrixIndexExpr, expr);
  2773. Type *t = base_type(type_of_expr(ie->expr));
  2774. bool deref = is_type_pointer(t);
  2775. t = base_type(type_deref(t));
  2776. lbValue m = {};
  2777. m = lb_build_addr_ptr(p, ie->expr);
  2778. if (deref) {
  2779. m = lb_emit_load(p, m);
  2780. }
  2781. lbValue row_index = lb_build_expr(p, ie->row_index);
  2782. lbValue column_index = lb_build_expr(p, ie->column_index);
  2783. row_index = lb_emit_conv(p, row_index, t_int);
  2784. column_index = lb_emit_conv(p, column_index, t_int);
  2785. lbValue elem = lb_emit_matrix_ep(p, m, row_index, column_index);
  2786. auto row_index_tv = type_and_value_of_expr(ie->row_index);
  2787. auto column_index_tv = type_and_value_of_expr(ie->column_index);
  2788. if (row_index_tv.mode != Addressing_Constant || column_index_tv.mode != Addressing_Constant) {
  2789. lbValue row_count = lb_const_int(p->module, t_int, t->Matrix.row_count);
  2790. lbValue column_count = lb_const_int(p->module, t_int, t->Matrix.column_count);
  2791. lb_emit_matrix_bounds_check(p, ast_token(ie->row_index), row_index, column_index, row_count, column_count);
  2792. }
  2793. return lb_addr(elem);
  2794. case_end;
  2795. case_ast_node(se, SliceExpr, expr);
  2796. lbValue low = lb_const_int(p->module, t_int, 0);
  2797. lbValue high = {};
  2798. if (se->low != nullptr) {
  2799. low = lb_correct_endianness(p, lb_build_expr(p, se->low));
  2800. }
  2801. if (se->high != nullptr) {
  2802. high = lb_correct_endianness(p, lb_build_expr(p, se->high));
  2803. }
  2804. bool no_indices = se->low == nullptr && se->high == nullptr;
  2805. lbAddr addr = lb_build_addr(p, se->expr);
  2806. lbValue base = lb_addr_load(p, addr);
  2807. Type *type = base_type(base.type);
  2808. if (is_type_pointer(type)) {
  2809. type = base_type(type_deref(type));
  2810. addr = lb_addr(base);
  2811. base = lb_addr_load(p, addr);
  2812. }
  2813. switch (type->kind) {
  2814. case Type_Slice: {
  2815. Type *slice_type = type;
  2816. lbValue len = lb_slice_len(p, base);
  2817. if (high.value == nullptr) high = len;
  2818. if (!no_indices) {
  2819. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  2820. }
  2821. lbValue elem = lb_emit_ptr_offset(p, lb_slice_elem(p, base), low);
  2822. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2823. lbAddr slice = lb_add_local_generated(p, slice_type, false);
  2824. lb_fill_slice(p, slice, elem, new_len);
  2825. return slice;
  2826. }
  2827. case Type_RelativeSlice:
  2828. GB_PANIC("TODO(bill): Type_RelativeSlice should be handled above already on the lb_addr_load");
  2829. break;
  2830. case Type_DynamicArray: {
  2831. Type *elem_type = type->DynamicArray.elem;
  2832. Type *slice_type = alloc_type_slice(elem_type);
  2833. lbValue len = lb_dynamic_array_len(p, base);
  2834. if (high.value == nullptr) high = len;
  2835. if (!no_indices) {
  2836. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  2837. }
  2838. lbValue elem = lb_emit_ptr_offset(p, lb_dynamic_array_elem(p, base), low);
  2839. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2840. lbAddr slice = lb_add_local_generated(p, slice_type, false);
  2841. lb_fill_slice(p, slice, elem, new_len);
  2842. return slice;
  2843. }
  2844. case Type_MultiPointer: {
  2845. lbAddr res = lb_add_local_generated(p, type_of_expr(expr), false);
  2846. if (se->high == nullptr) {
  2847. lbValue offset = base;
  2848. LLVMValueRef indices[1] = {low.value};
  2849. offset.value = LLVMBuildGEP(p->builder, offset.value, indices, 1, "");
  2850. lb_addr_store(p, res, offset);
  2851. } else {
  2852. low = lb_emit_conv(p, low, t_int);
  2853. high = lb_emit_conv(p, high, t_int);
  2854. lb_emit_multi_pointer_slice_bounds_check(p, se->open, low, high);
  2855. LLVMValueRef indices[1] = {low.value};
  2856. LLVMValueRef ptr = LLVMBuildGEP(p->builder, base.value, indices, 1, "");
  2857. LLVMValueRef len = LLVMBuildSub(p->builder, high.value, low.value, "");
  2858. LLVMValueRef gep0 = lb_emit_struct_ep(p, res.addr, 0).value;
  2859. LLVMValueRef gep1 = lb_emit_struct_ep(p, res.addr, 1).value;
  2860. LLVMBuildStore(p->builder, ptr, gep0);
  2861. LLVMBuildStore(p->builder, len, gep1);
  2862. }
  2863. return res;
  2864. }
  2865. case Type_Array: {
  2866. Type *slice_type = alloc_type_slice(type->Array.elem);
  2867. lbValue len = lb_const_int(p->module, t_int, type->Array.count);
  2868. if (high.value == nullptr) high = len;
  2869. bool low_const = type_and_value_of_expr(se->low).mode == Addressing_Constant;
  2870. bool high_const = type_and_value_of_expr(se->high).mode == Addressing_Constant;
  2871. if (!low_const || !high_const) {
  2872. if (!no_indices) {
  2873. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  2874. }
  2875. }
  2876. lbValue elem = lb_emit_ptr_offset(p, lb_array_elem(p, lb_addr_get_ptr(p, addr)), low);
  2877. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2878. lbAddr slice = lb_add_local_generated(p, slice_type, false);
  2879. lb_fill_slice(p, slice, elem, new_len);
  2880. return slice;
  2881. }
  2882. case Type_Basic: {
  2883. GB_ASSERT(type == t_string);
  2884. lbValue len = lb_string_len(p, base);
  2885. if (high.value == nullptr) high = len;
  2886. if (!no_indices) {
  2887. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  2888. }
  2889. lbValue elem = lb_emit_ptr_offset(p, lb_string_elem(p, base), low);
  2890. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2891. lbAddr str = lb_add_local_generated(p, t_string, false);
  2892. lb_fill_string(p, str, elem, new_len);
  2893. return str;
  2894. }
  2895. case Type_Struct:
  2896. if (is_type_soa_struct(type)) {
  2897. lbValue len = lb_soa_struct_len(p, lb_addr_get_ptr(p, addr));
  2898. if (high.value == nullptr) high = len;
  2899. if (!no_indices) {
  2900. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  2901. }
  2902. #if 1
  2903. lbAddr dst = lb_add_local_generated(p, type_of_expr(expr), true);
  2904. if (type->Struct.soa_kind == StructSoa_Fixed) {
  2905. i32 field_count = cast(i32)type->Struct.fields.count;
  2906. for (i32 i = 0; i < field_count; i++) {
  2907. lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  2908. lbValue field_src = lb_emit_struct_ep(p, lb_addr_get_ptr(p, addr), i);
  2909. field_src = lb_emit_array_ep(p, field_src, low);
  2910. lb_emit_store(p, field_dst, field_src);
  2911. }
  2912. lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  2913. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2914. lb_emit_store(p, len_dst, new_len);
  2915. } else if (type->Struct.soa_kind == StructSoa_Slice) {
  2916. if (no_indices) {
  2917. lb_addr_store(p, dst, base);
  2918. } else {
  2919. i32 field_count = cast(i32)type->Struct.fields.count - 1;
  2920. for (i32 i = 0; i < field_count; i++) {
  2921. lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  2922. lbValue field_src = lb_emit_struct_ev(p, base, i);
  2923. field_src = lb_emit_ptr_offset(p, field_src, low);
  2924. lb_emit_store(p, field_dst, field_src);
  2925. }
  2926. lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  2927. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2928. lb_emit_store(p, len_dst, new_len);
  2929. }
  2930. } else if (type->Struct.soa_kind == StructSoa_Dynamic) {
  2931. i32 field_count = cast(i32)type->Struct.fields.count - 3;
  2932. for (i32 i = 0; i < field_count; i++) {
  2933. lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  2934. lbValue field_src = lb_emit_struct_ev(p, base, i);
  2935. field_src = lb_emit_ptr_offset(p, field_src, low);
  2936. lb_emit_store(p, field_dst, field_src);
  2937. }
  2938. lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  2939. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  2940. lb_emit_store(p, len_dst, new_len);
  2941. }
  2942. return dst;
  2943. #endif
  2944. }
  2945. break;
  2946. }
  2947. GB_PANIC("Unknown slicable type");
  2948. case_end;
  2949. case_ast_node(de, DerefExpr, expr);
  2950. if (is_type_relative_pointer(type_of_expr(de->expr))) {
  2951. lbAddr addr = lb_build_addr(p, de->expr);
  2952. addr.relative.deref = true;
  2953. return addr;\
  2954. }
  2955. lbValue addr = lb_build_expr(p, de->expr);
  2956. return lb_addr(addr);
  2957. case_end;
  2958. case_ast_node(ce, CallExpr, expr);
  2959. BuiltinProcId builtin_id = BuiltinProc_Invalid;
  2960. if (ce->proc->tav.mode == Addressing_Builtin) {
  2961. Entity *e = entity_of_node(ce->proc);
  2962. if (e != nullptr) {
  2963. builtin_id = cast(BuiltinProcId)e->Builtin.id;
  2964. } else {
  2965. builtin_id = BuiltinProc_DIRECTIVE;
  2966. }
  2967. }
  2968. auto const &tv = expr->tav;
  2969. if (builtin_id == BuiltinProc_swizzle &&
  2970. is_type_array(tv.type)) {
  2971. // NOTE(bill, 2021-08-09): `swizzle` has some bizarre semantics so it needs to be
  2972. // specialized here for to be addressable
  2973. return lb_build_array_swizzle_addr(p, ce, tv);
  2974. }
  2975. // NOTE(bill): This is make sure you never need to have an 'array_ev'
  2976. lbValue e = lb_build_expr(p, expr);
  2977. #if 1
  2978. return lb_addr(lb_address_from_load_or_generate_local(p, e));
  2979. #else
  2980. lbAddr v = lb_add_local_generated(p, e.type, false);
  2981. lb_addr_store(p, v, e);
  2982. return v;
  2983. #endif
  2984. case_end;
  2985. case_ast_node(cl, CompoundLit, expr);
  2986. Type *type = type_of_expr(expr);
  2987. Type *bt = base_type(type);
  2988. lbAddr v = lb_add_local_generated(p, type, true);
  2989. Type *et = nullptr;
  2990. switch (bt->kind) {
  2991. case Type_Array: et = bt->Array.elem; break;
  2992. case Type_EnumeratedArray: et = bt->EnumeratedArray.elem; break;
  2993. case Type_Slice: et = bt->Slice.elem; break;
  2994. case Type_BitSet: et = bt->BitSet.elem; break;
  2995. case Type_SimdVector: et = bt->SimdVector.elem; break;
  2996. case Type_Matrix: et = bt->Matrix.elem; break;
  2997. }
  2998. String proc_name = {};
  2999. if (p->entity) {
  3000. proc_name = p->entity->token.string;
  3001. }
  3002. TokenPos pos = ast_token(expr).pos;
  3003. switch (bt->kind) {
  3004. default: GB_PANIC("Unknown CompoundLit type: %s", type_to_string(type)); break;
  3005. case Type_Struct: {
  3006. // TODO(bill): "constant" '#raw_union's are not initialized constantly at the moment.
  3007. // NOTE(bill): This is due to the layout of the unions when printed to LLVM-IR
  3008. bool is_raw_union = is_type_raw_union(bt);
  3009. GB_ASSERT(is_type_struct(bt) || is_raw_union);
  3010. TypeStruct *st = &bt->Struct;
  3011. if (cl->elems.count > 0) {
  3012. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3013. lbValue comp_lit_ptr = lb_addr_get_ptr(p, v);
  3014. for_array(field_index, cl->elems) {
  3015. Ast *elem = cl->elems[field_index];
  3016. lbValue field_expr = {};
  3017. Entity *field = nullptr;
  3018. isize index = field_index;
  3019. if (elem->kind == Ast_FieldValue) {
  3020. ast_node(fv, FieldValue, elem);
  3021. String name = fv->field->Ident.token.string;
  3022. Selection sel = lookup_field(bt, name, false);
  3023. index = sel.index[0];
  3024. elem = fv->value;
  3025. TypeAndValue tav = type_and_value_of_expr(elem);
  3026. } else {
  3027. TypeAndValue tav = type_and_value_of_expr(elem);
  3028. Selection sel = lookup_field_from_index(bt, st->fields[field_index]->Variable.field_index);
  3029. index = sel.index[0];
  3030. }
  3031. field = st->fields[index];
  3032. Type *ft = field->type;
  3033. if (!is_raw_union && !is_type_typeid(ft) && lb_is_elem_const(elem, ft)) {
  3034. continue;
  3035. }
  3036. field_expr = lb_build_expr(p, elem);
  3037. lbValue gep = {};
  3038. if (is_raw_union) {
  3039. gep = lb_emit_conv(p, comp_lit_ptr, alloc_type_pointer(ft));
  3040. } else {
  3041. gep = lb_emit_struct_ep(p, comp_lit_ptr, cast(i32)index);
  3042. }
  3043. Type *fet = field_expr.type;
  3044. GB_ASSERT(fet->kind != Type_Tuple);
  3045. // HACK TODO(bill): THIS IS A MASSIVE HACK!!!!
  3046. if (is_type_union(ft) && !are_types_identical(fet, ft) && !is_type_untyped(fet)) {
  3047. GB_ASSERT_MSG(union_variant_index(ft, fet) > 0, "%s", type_to_string(fet));
  3048. lb_emit_store_union_variant(p, gep, field_expr, fet);
  3049. } else {
  3050. lbValue fv = lb_emit_conv(p, field_expr, ft);
  3051. lb_emit_store(p, gep, fv);
  3052. }
  3053. }
  3054. }
  3055. break;
  3056. }
  3057. case Type_Map: {
  3058. if (cl->elems.count == 0) {
  3059. break;
  3060. }
  3061. {
  3062. auto args = array_make<lbValue>(permanent_allocator(), 3);
  3063. args[0] = lb_gen_map_header(p, v.addr, type);
  3064. args[1] = lb_const_int(p->module, t_int, 2*cl->elems.count);
  3065. args[2] = lb_emit_source_code_location(p, proc_name, pos);
  3066. lb_emit_runtime_call(p, "__dynamic_map_reserve", args);
  3067. }
  3068. for_array(field_index, cl->elems) {
  3069. Ast *elem = cl->elems[field_index];
  3070. ast_node(fv, FieldValue, elem);
  3071. lbValue key = lb_build_expr(p, fv->field);
  3072. lbValue value = lb_build_expr(p, fv->value);
  3073. lb_insert_dynamic_map_key_and_value(p, v, type, key, value, elem);
  3074. }
  3075. break;
  3076. }
  3077. case Type_Array: {
  3078. if (cl->elems.count > 0) {
  3079. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3080. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3081. // NOTE(bill): Separate value, gep, store into their own chunks
  3082. for_array(i, cl->elems) {
  3083. Ast *elem = cl->elems[i];
  3084. if (elem->kind == Ast_FieldValue) {
  3085. ast_node(fv, FieldValue, elem);
  3086. if (lb_is_elem_const(fv->value, et)) {
  3087. continue;
  3088. }
  3089. if (is_ast_range(fv->field)) {
  3090. ast_node(ie, BinaryExpr, fv->field);
  3091. TypeAndValue lo_tav = ie->left->tav;
  3092. TypeAndValue hi_tav = ie->right->tav;
  3093. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3094. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3095. TokenKind op = ie->op.kind;
  3096. i64 lo = exact_value_to_i64(lo_tav.value);
  3097. i64 hi = exact_value_to_i64(hi_tav.value);
  3098. if (op != Token_RangeHalf) {
  3099. hi += 1;
  3100. }
  3101. lbValue value = lb_build_expr(p, fv->value);
  3102. for (i64 k = lo; k < hi; k++) {
  3103. lbCompoundLitElemTempData data = {};
  3104. data.value = value;
  3105. data.elem_index = cast(i32)k;
  3106. array_add(&temp_data, data);
  3107. }
  3108. } else {
  3109. auto tav = fv->field->tav;
  3110. GB_ASSERT(tav.mode == Addressing_Constant);
  3111. i64 index = exact_value_to_i64(tav.value);
  3112. lbValue value = lb_build_expr(p, fv->value);
  3113. lbCompoundLitElemTempData data = {};
  3114. data.value = lb_emit_conv(p, value, et);
  3115. data.expr = fv->value;
  3116. data.elem_index = cast(i32)index;
  3117. array_add(&temp_data, data);
  3118. }
  3119. } else {
  3120. if (lb_is_elem_const(elem, et)) {
  3121. continue;
  3122. }
  3123. lbCompoundLitElemTempData data = {};
  3124. data.expr = elem;
  3125. data.elem_index = cast(i32)i;
  3126. array_add(&temp_data, data);
  3127. }
  3128. }
  3129. for_array(i, temp_data) {
  3130. temp_data[i].gep = lb_emit_array_epi(p, lb_addr_get_ptr(p, v), temp_data[i].elem_index);
  3131. }
  3132. for_array(i, temp_data) {
  3133. lbValue field_expr = temp_data[i].value;
  3134. Ast *expr = temp_data[i].expr;
  3135. auto prev_hint = lb_set_copy_elision_hint(p, lb_addr(temp_data[i].gep), expr);
  3136. if (field_expr.value == nullptr) {
  3137. field_expr = lb_build_expr(p, expr);
  3138. }
  3139. Type *t = field_expr.type;
  3140. GB_ASSERT(t->kind != Type_Tuple);
  3141. lbValue ev = lb_emit_conv(p, field_expr, et);
  3142. if (!p->copy_elision_hint.used) {
  3143. temp_data[i].value = ev;
  3144. }
  3145. lb_reset_copy_elision_hint(p, prev_hint);
  3146. }
  3147. for_array(i, temp_data) {
  3148. if (temp_data[i].value.value != nullptr) {
  3149. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3150. }
  3151. }
  3152. }
  3153. break;
  3154. }
  3155. case Type_EnumeratedArray: {
  3156. if (cl->elems.count > 0) {
  3157. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3158. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3159. // NOTE(bill): Separate value, gep, store into their own chunks
  3160. for_array(i, cl->elems) {
  3161. Ast *elem = cl->elems[i];
  3162. if (elem->kind == Ast_FieldValue) {
  3163. ast_node(fv, FieldValue, elem);
  3164. if (lb_is_elem_const(fv->value, et)) {
  3165. continue;
  3166. }
  3167. if (is_ast_range(fv->field)) {
  3168. ast_node(ie, BinaryExpr, fv->field);
  3169. TypeAndValue lo_tav = ie->left->tav;
  3170. TypeAndValue hi_tav = ie->right->tav;
  3171. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3172. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3173. TokenKind op = ie->op.kind;
  3174. i64 lo = exact_value_to_i64(lo_tav.value);
  3175. i64 hi = exact_value_to_i64(hi_tav.value);
  3176. if (op != Token_RangeHalf) {
  3177. hi += 1;
  3178. }
  3179. lbValue value = lb_build_expr(p, fv->value);
  3180. for (i64 k = lo; k < hi; k++) {
  3181. lbCompoundLitElemTempData data = {};
  3182. data.value = value;
  3183. data.elem_index = cast(i32)k;
  3184. array_add(&temp_data, data);
  3185. }
  3186. } else {
  3187. auto tav = fv->field->tav;
  3188. GB_ASSERT(tav.mode == Addressing_Constant);
  3189. i64 index = exact_value_to_i64(tav.value);
  3190. lbValue value = lb_build_expr(p, fv->value);
  3191. lbCompoundLitElemTempData data = {};
  3192. data.value = lb_emit_conv(p, value, et);
  3193. data.expr = fv->value;
  3194. data.elem_index = cast(i32)index;
  3195. array_add(&temp_data, data);
  3196. }
  3197. } else {
  3198. if (lb_is_elem_const(elem, et)) {
  3199. continue;
  3200. }
  3201. lbCompoundLitElemTempData data = {};
  3202. data.expr = elem;
  3203. data.elem_index = cast(i32)i;
  3204. array_add(&temp_data, data);
  3205. }
  3206. }
  3207. i32 index_offset = cast(i32)exact_value_to_i64(*bt->EnumeratedArray.min_value);
  3208. for_array(i, temp_data) {
  3209. i32 index = temp_data[i].elem_index - index_offset;
  3210. temp_data[i].gep = lb_emit_array_epi(p, lb_addr_get_ptr(p, v), index);
  3211. }
  3212. for_array(i, temp_data) {
  3213. lbValue field_expr = temp_data[i].value;
  3214. Ast *expr = temp_data[i].expr;
  3215. auto prev_hint = lb_set_copy_elision_hint(p, lb_addr(temp_data[i].gep), expr);
  3216. if (field_expr.value == nullptr) {
  3217. field_expr = lb_build_expr(p, expr);
  3218. }
  3219. Type *t = field_expr.type;
  3220. GB_ASSERT(t->kind != Type_Tuple);
  3221. lbValue ev = lb_emit_conv(p, field_expr, et);
  3222. if (!p->copy_elision_hint.used) {
  3223. temp_data[i].value = ev;
  3224. }
  3225. lb_reset_copy_elision_hint(p, prev_hint);
  3226. }
  3227. for_array(i, temp_data) {
  3228. if (temp_data[i].value.value != nullptr) {
  3229. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3230. }
  3231. }
  3232. }
  3233. break;
  3234. }
  3235. case Type_Slice: {
  3236. if (cl->elems.count > 0) {
  3237. lbValue slice = lb_const_value(p->module, type, exact_value_compound(expr));
  3238. lbValue data = lb_slice_elem(p, slice);
  3239. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3240. for_array(i, cl->elems) {
  3241. Ast *elem = cl->elems[i];
  3242. if (elem->kind == Ast_FieldValue) {
  3243. ast_node(fv, FieldValue, elem);
  3244. if (lb_is_elem_const(fv->value, et)) {
  3245. continue;
  3246. }
  3247. if (is_ast_range(fv->field)) {
  3248. ast_node(ie, BinaryExpr, fv->field);
  3249. TypeAndValue lo_tav = ie->left->tav;
  3250. TypeAndValue hi_tav = ie->right->tav;
  3251. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3252. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3253. TokenKind op = ie->op.kind;
  3254. i64 lo = exact_value_to_i64(lo_tav.value);
  3255. i64 hi = exact_value_to_i64(hi_tav.value);
  3256. if (op != Token_RangeHalf) {
  3257. hi += 1;
  3258. }
  3259. lbValue value = lb_emit_conv(p, lb_build_expr(p, fv->value), et);
  3260. for (i64 k = lo; k < hi; k++) {
  3261. lbCompoundLitElemTempData data = {};
  3262. data.value = value;
  3263. data.elem_index = cast(i32)k;
  3264. array_add(&temp_data, data);
  3265. }
  3266. } else {
  3267. GB_ASSERT(fv->field->tav.mode == Addressing_Constant);
  3268. i64 index = exact_value_to_i64(fv->field->tav.value);
  3269. lbValue field_expr = lb_build_expr(p, fv->value);
  3270. GB_ASSERT(!is_type_tuple(field_expr.type));
  3271. lbValue ev = lb_emit_conv(p, field_expr, et);
  3272. lbCompoundLitElemTempData data = {};
  3273. data.value = ev;
  3274. data.elem_index = cast(i32)index;
  3275. array_add(&temp_data, data);
  3276. }
  3277. } else {
  3278. if (lb_is_elem_const(elem, et)) {
  3279. continue;
  3280. }
  3281. lbValue field_expr = lb_build_expr(p, elem);
  3282. GB_ASSERT(!is_type_tuple(field_expr.type));
  3283. lbValue ev = lb_emit_conv(p, field_expr, et);
  3284. lbCompoundLitElemTempData data = {};
  3285. data.value = ev;
  3286. data.elem_index = cast(i32)i;
  3287. array_add(&temp_data, data);
  3288. }
  3289. }
  3290. for_array(i, temp_data) {
  3291. temp_data[i].gep = lb_emit_ptr_offset(p, data, lb_const_int(p->module, t_int, temp_data[i].elem_index));
  3292. }
  3293. for_array(i, temp_data) {
  3294. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3295. }
  3296. {
  3297. lbValue count = {};
  3298. count.type = t_int;
  3299. if (lb_is_const(slice)) {
  3300. unsigned indices[1] = {1};
  3301. count.value = LLVMConstExtractValue(slice.value, indices, gb_count_of(indices));
  3302. } else {
  3303. count.value = LLVMBuildExtractValue(p->builder, slice.value, 1, "");
  3304. }
  3305. lb_fill_slice(p, v, data, count);
  3306. }
  3307. }
  3308. break;
  3309. }
  3310. case Type_DynamicArray: {
  3311. if (cl->elems.count == 0) {
  3312. break;
  3313. }
  3314. Type *et = bt->DynamicArray.elem;
  3315. lbValue size = lb_const_int(p->module, t_int, type_size_of(et));
  3316. lbValue align = lb_const_int(p->module, t_int, type_align_of(et));
  3317. i64 item_count = gb_max(cl->max_count, cl->elems.count);
  3318. {
  3319. auto args = array_make<lbValue>(permanent_allocator(), 5);
  3320. args[0] = lb_emit_conv(p, lb_addr_get_ptr(p, v), t_rawptr);
  3321. args[1] = size;
  3322. args[2] = align;
  3323. args[3] = lb_const_int(p->module, t_int, 2*item_count); // TODO(bill): Is this too much waste?
  3324. args[4] = lb_emit_source_code_location(p, proc_name, pos);
  3325. lb_emit_runtime_call(p, "__dynamic_array_reserve", args);
  3326. }
  3327. lbValue items = lb_generate_local_array(p, et, item_count);
  3328. // lbValue items = lb_generate_global_array(p->module, et, item_count, str_lit("dacl$"), cast(i64)cast(intptr)expr);
  3329. for_array(i, cl->elems) {
  3330. Ast *elem = cl->elems[i];
  3331. if (elem->kind == Ast_FieldValue) {
  3332. ast_node(fv, FieldValue, elem);
  3333. if (is_ast_range(fv->field)) {
  3334. ast_node(ie, BinaryExpr, fv->field);
  3335. TypeAndValue lo_tav = ie->left->tav;
  3336. TypeAndValue hi_tav = ie->right->tav;
  3337. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3338. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3339. TokenKind op = ie->op.kind;
  3340. i64 lo = exact_value_to_i64(lo_tav.value);
  3341. i64 hi = exact_value_to_i64(hi_tav.value);
  3342. if (op != Token_RangeHalf) {
  3343. hi += 1;
  3344. }
  3345. lbValue value = lb_emit_conv(p, lb_build_expr(p, fv->value), et);
  3346. for (i64 k = lo; k < hi; k++) {
  3347. lbValue ep = lb_emit_array_epi(p, items, cast(i32)k);
  3348. lb_emit_store(p, ep, value);
  3349. }
  3350. } else {
  3351. GB_ASSERT(fv->field->tav.mode == Addressing_Constant);
  3352. i64 field_index = exact_value_to_i64(fv->field->tav.value);
  3353. lbValue ev = lb_build_expr(p, fv->value);
  3354. lbValue value = lb_emit_conv(p, ev, et);
  3355. lbValue ep = lb_emit_array_epi(p, items, cast(i32)field_index);
  3356. lb_emit_store(p, ep, value);
  3357. }
  3358. } else {
  3359. lbValue value = lb_emit_conv(p, lb_build_expr(p, elem), et);
  3360. lbValue ep = lb_emit_array_epi(p, items, cast(i32)i);
  3361. lb_emit_store(p, ep, value);
  3362. }
  3363. }
  3364. {
  3365. auto args = array_make<lbValue>(permanent_allocator(), 6);
  3366. args[0] = lb_emit_conv(p, v.addr, t_rawptr);
  3367. args[1] = size;
  3368. args[2] = align;
  3369. args[3] = lb_emit_conv(p, items, t_rawptr);
  3370. args[4] = lb_const_int(p->module, t_int, item_count);
  3371. args[5] = lb_emit_source_code_location(p, proc_name, pos);
  3372. lb_emit_runtime_call(p, "__dynamic_array_append", args);
  3373. }
  3374. break;
  3375. }
  3376. case Type_Basic: {
  3377. GB_ASSERT(is_type_any(bt));
  3378. if (cl->elems.count > 0) {
  3379. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3380. String field_names[2] = {
  3381. str_lit("data"),
  3382. str_lit("id"),
  3383. };
  3384. Type *field_types[2] = {
  3385. t_rawptr,
  3386. t_typeid,
  3387. };
  3388. for_array(field_index, cl->elems) {
  3389. Ast *elem = cl->elems[field_index];
  3390. lbValue field_expr = {};
  3391. isize index = field_index;
  3392. if (elem->kind == Ast_FieldValue) {
  3393. ast_node(fv, FieldValue, elem);
  3394. Selection sel = lookup_field(bt, fv->field->Ident.token.string, false);
  3395. index = sel.index[0];
  3396. elem = fv->value;
  3397. } else {
  3398. TypeAndValue tav = type_and_value_of_expr(elem);
  3399. Selection sel = lookup_field(bt, field_names[field_index], false);
  3400. index = sel.index[0];
  3401. }
  3402. field_expr = lb_build_expr(p, elem);
  3403. GB_ASSERT(field_expr.type->kind != Type_Tuple);
  3404. Type *ft = field_types[index];
  3405. lbValue fv = lb_emit_conv(p, field_expr, ft);
  3406. lbValue gep = lb_emit_struct_ep(p, lb_addr_get_ptr(p, v), cast(i32)index);
  3407. lb_emit_store(p, gep, fv);
  3408. }
  3409. }
  3410. break;
  3411. }
  3412. case Type_BitSet: {
  3413. i64 sz = type_size_of(type);
  3414. if (cl->elems.count > 0 && sz > 0) {
  3415. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3416. lbValue lower = lb_const_value(p->module, t_int, exact_value_i64(bt->BitSet.lower));
  3417. for_array(i, cl->elems) {
  3418. Ast *elem = cl->elems[i];
  3419. GB_ASSERT(elem->kind != Ast_FieldValue);
  3420. if (lb_is_elem_const(elem, et)) {
  3421. continue;
  3422. }
  3423. lbValue expr = lb_build_expr(p, elem);
  3424. GB_ASSERT(expr.type->kind != Type_Tuple);
  3425. Type *it = bit_set_to_int(bt);
  3426. lbValue one = lb_const_value(p->module, it, exact_value_i64(1));
  3427. lbValue e = lb_emit_conv(p, expr, it);
  3428. e = lb_emit_arith(p, Token_Sub, e, lower, it);
  3429. e = lb_emit_arith(p, Token_Shl, one, e, it);
  3430. lbValue old_value = lb_emit_transmute(p, lb_addr_load(p, v), it);
  3431. lbValue new_value = lb_emit_arith(p, Token_Or, old_value, e, it);
  3432. new_value = lb_emit_transmute(p, new_value, type);
  3433. lb_addr_store(p, v, new_value);
  3434. }
  3435. }
  3436. break;
  3437. }
  3438. case Type_Matrix: {
  3439. if (cl->elems.count > 0) {
  3440. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3441. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3442. // NOTE(bill): Separate value, gep, store into their own chunks
  3443. for_array(i, cl->elems) {
  3444. Ast *elem = cl->elems[i];
  3445. if (elem->kind == Ast_FieldValue) {
  3446. ast_node(fv, FieldValue, elem);
  3447. if (lb_is_elem_const(fv->value, et)) {
  3448. continue;
  3449. }
  3450. if (is_ast_range(fv->field)) {
  3451. ast_node(ie, BinaryExpr, fv->field);
  3452. TypeAndValue lo_tav = ie->left->tav;
  3453. TypeAndValue hi_tav = ie->right->tav;
  3454. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3455. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3456. TokenKind op = ie->op.kind;
  3457. i64 lo = exact_value_to_i64(lo_tav.value);
  3458. i64 hi = exact_value_to_i64(hi_tav.value);
  3459. if (op != Token_RangeHalf) {
  3460. hi += 1;
  3461. }
  3462. lbValue value = lb_build_expr(p, fv->value);
  3463. for (i64 k = lo; k < hi; k++) {
  3464. lbCompoundLitElemTempData data = {};
  3465. data.value = value;
  3466. data.elem_index = cast(i32)matrix_index_to_offset(bt, k);
  3467. array_add(&temp_data, data);
  3468. }
  3469. } else {
  3470. auto tav = fv->field->tav;
  3471. GB_ASSERT(tav.mode == Addressing_Constant);
  3472. i64 index = exact_value_to_i64(tav.value);
  3473. lbValue value = lb_build_expr(p, fv->value);
  3474. lbCompoundLitElemTempData data = {};
  3475. data.value = lb_emit_conv(p, value, et);
  3476. data.expr = fv->value;
  3477. data.elem_index = cast(i32)matrix_index_to_offset(bt, index);
  3478. array_add(&temp_data, data);
  3479. }
  3480. } else {
  3481. if (lb_is_elem_const(elem, et)) {
  3482. continue;
  3483. }
  3484. lbCompoundLitElemTempData data = {};
  3485. data.expr = elem;
  3486. data.elem_index = cast(i32)matrix_index_to_offset(bt, i);
  3487. array_add(&temp_data, data);
  3488. }
  3489. }
  3490. for_array(i, temp_data) {
  3491. temp_data[i].gep = lb_emit_array_epi(p, lb_addr_get_ptr(p, v), temp_data[i].elem_index);
  3492. }
  3493. for_array(i, temp_data) {
  3494. lbValue field_expr = temp_data[i].value;
  3495. Ast *expr = temp_data[i].expr;
  3496. auto prev_hint = lb_set_copy_elision_hint(p, lb_addr(temp_data[i].gep), expr);
  3497. if (field_expr.value == nullptr) {
  3498. field_expr = lb_build_expr(p, expr);
  3499. }
  3500. Type *t = field_expr.type;
  3501. GB_ASSERT(t->kind != Type_Tuple);
  3502. lbValue ev = lb_emit_conv(p, field_expr, et);
  3503. if (!p->copy_elision_hint.used) {
  3504. temp_data[i].value = ev;
  3505. }
  3506. lb_reset_copy_elision_hint(p, prev_hint);
  3507. }
  3508. for_array(i, temp_data) {
  3509. if (temp_data[i].value.value != nullptr) {
  3510. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3511. }
  3512. }
  3513. }
  3514. break;
  3515. }
  3516. }
  3517. return v;
  3518. case_end;
  3519. case_ast_node(tc, TypeCast, expr);
  3520. Type *type = type_of_expr(expr);
  3521. lbValue x = lb_build_expr(p, tc->expr);
  3522. lbValue e = {};
  3523. switch (tc->token.kind) {
  3524. case Token_cast:
  3525. e = lb_emit_conv(p, x, type);
  3526. break;
  3527. case Token_transmute:
  3528. e = lb_emit_transmute(p, x, type);
  3529. break;
  3530. default:
  3531. GB_PANIC("Invalid AST TypeCast");
  3532. }
  3533. lbAddr v = lb_add_local_generated(p, type, false);
  3534. lb_addr_store(p, v, e);
  3535. return v;
  3536. case_end;
  3537. case_ast_node(ac, AutoCast, expr);
  3538. return lb_build_addr(p, ac->expr);
  3539. case_end;
  3540. }
  3541. TokenPos token_pos = ast_token(expr).pos;
  3542. GB_PANIC("Unexpected address expression\n"
  3543. "\tAst: %.*s @ "
  3544. "%s\n",
  3545. LIT(ast_strings[expr->kind]),
  3546. token_pos_to_string(token_pos));
  3547. return {};
  3548. }