parser.cpp 158 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688
  1. #include "parser_pos.cpp"
  2. Token token_end_of_line(AstFile *f, Token tok) {
  3. u8 const *start = f->tokenizer.start + tok.pos.offset;
  4. u8 const *s = start;
  5. while (*s && *s != '\n' && s < f->tokenizer.end) {
  6. s += 1;
  7. }
  8. tok.pos.column += cast(i32)(s - start) - 1;
  9. return tok;
  10. }
  11. gbString get_file_line_as_string(TokenPos const &pos, i32 *offset_) {
  12. AstFile *file = get_ast_file_from_id(pos.file_id);
  13. if (file == nullptr) {
  14. return nullptr;
  15. }
  16. isize offset = pos.offset;
  17. u8 *start = file->tokenizer.start;
  18. u8 *end = file->tokenizer.end;
  19. isize len = end-start;
  20. if (len < offset) {
  21. return nullptr;
  22. }
  23. u8 *pos_offset = start+offset;
  24. u8 *line_start = pos_offset;
  25. u8 *line_end = pos_offset;
  26. while (line_start >= start) {
  27. if (*line_start == '\n') {
  28. line_start += 1;
  29. break;
  30. }
  31. line_start -= 1;
  32. }
  33. while (line_end < end) {
  34. if (*line_end == '\n') {
  35. line_end -= 1;
  36. break;
  37. }
  38. line_end += 1;
  39. }
  40. String the_line = make_string(line_start, line_end-line_start);
  41. the_line = string_trim_whitespace(the_line);
  42. if (offset_) *offset_ = cast(i32)(pos_offset - the_line.text);
  43. return gb_string_make_length(heap_allocator(), the_line.text, the_line.len);
  44. }
  45. isize ast_node_size(AstKind kind) {
  46. return align_formula_isize(gb_size_of(AstCommonStuff) + ast_variant_sizes[kind], gb_align_of(void *));
  47. }
  48. // NOTE(bill): And this below is why is I/we need a new language! Discriminated unions are a pain in C/C++
  49. Ast *alloc_ast_node(AstFile *f, AstKind kind) {
  50. gbAllocator a = ast_allocator(f);
  51. isize size = ast_node_size(kind);
  52. Ast *node = cast(Ast *)gb_alloc(a, size);
  53. node->kind = kind;
  54. node->file = f;
  55. return node;
  56. }
  57. Ast *clone_ast(Ast *node);
  58. Array<Ast *> clone_ast_array(Array<Ast *> const &array) {
  59. Array<Ast *> result = {};
  60. if (array.count > 0) {
  61. result = array_make<Ast *>(ast_allocator(nullptr), array.count);
  62. for_array(i, array) {
  63. result[i] = clone_ast(array[i]);
  64. }
  65. }
  66. return result;
  67. }
  68. Slice<Ast *> clone_ast_array(Slice<Ast *> const &array) {
  69. Slice<Ast *> result = {};
  70. if (array.count > 0) {
  71. result = slice_clone(permanent_allocator(), array);
  72. for_array(i, array) {
  73. result[i] = clone_ast(array[i]);
  74. }
  75. }
  76. return result;
  77. }
  78. Ast *clone_ast(Ast *node) {
  79. if (node == nullptr) {
  80. return nullptr;
  81. }
  82. Ast *n = alloc_ast_node(node->file, node->kind);
  83. gb_memmove(n, node, ast_node_size(node->kind));
  84. switch (n->kind) {
  85. default: GB_PANIC("Unhandled Ast %.*s", LIT(ast_strings[n->kind])); break;
  86. case Ast_Invalid: break;
  87. case Ast_Ident:
  88. n->Ident.entity = nullptr;
  89. break;
  90. case Ast_Implicit: break;
  91. case Ast_Undef: break;
  92. case Ast_BasicLit: break;
  93. case Ast_BasicDirective: break;
  94. case Ast_PolyType:
  95. n->PolyType.type = clone_ast(n->PolyType.type);
  96. n->PolyType.specialization = clone_ast(n->PolyType.specialization);
  97. break;
  98. case Ast_Ellipsis:
  99. n->Ellipsis.expr = clone_ast(n->Ellipsis.expr);
  100. break;
  101. case Ast_ProcGroup:
  102. n->ProcGroup.args = clone_ast_array(n->ProcGroup.args);
  103. break;
  104. case Ast_ProcLit:
  105. n->ProcLit.type = clone_ast(n->ProcLit.type);
  106. n->ProcLit.body = clone_ast(n->ProcLit.body);
  107. n->ProcLit.where_clauses = clone_ast_array(n->ProcLit.where_clauses);
  108. break;
  109. case Ast_CompoundLit:
  110. n->CompoundLit.type = clone_ast(n->CompoundLit.type);
  111. n->CompoundLit.elems = clone_ast_array(n->CompoundLit.elems);
  112. break;
  113. case Ast_BadExpr: break;
  114. case Ast_TagExpr:
  115. n->TagExpr.expr = clone_ast(n->TagExpr.expr);
  116. break;
  117. case Ast_UnaryExpr:
  118. n->UnaryExpr.expr = clone_ast(n->UnaryExpr.expr);
  119. break;
  120. case Ast_BinaryExpr:
  121. n->BinaryExpr.left = clone_ast(n->BinaryExpr.left);
  122. n->BinaryExpr.right = clone_ast(n->BinaryExpr.right);
  123. break;
  124. case Ast_ParenExpr:
  125. n->ParenExpr.expr = clone_ast(n->ParenExpr.expr);
  126. break;
  127. case Ast_SelectorExpr:
  128. n->SelectorExpr.expr = clone_ast(n->SelectorExpr.expr);
  129. n->SelectorExpr.selector = clone_ast(n->SelectorExpr.selector);
  130. break;
  131. case Ast_ImplicitSelectorExpr:
  132. n->ImplicitSelectorExpr.selector = clone_ast(n->ImplicitSelectorExpr.selector);
  133. break;
  134. case Ast_SelectorCallExpr:
  135. n->SelectorCallExpr.expr = clone_ast(n->SelectorCallExpr.expr);
  136. n->SelectorCallExpr.call = clone_ast(n->SelectorCallExpr.call);
  137. break;
  138. case Ast_IndexExpr:
  139. n->IndexExpr.expr = clone_ast(n->IndexExpr.expr);
  140. n->IndexExpr.index = clone_ast(n->IndexExpr.index);
  141. break;
  142. case Ast_DerefExpr:
  143. n->DerefExpr.expr = clone_ast(n->DerefExpr.expr);
  144. break;
  145. case Ast_SliceExpr:
  146. n->SliceExpr.expr = clone_ast(n->SliceExpr.expr);
  147. n->SliceExpr.low = clone_ast(n->SliceExpr.low);
  148. n->SliceExpr.high = clone_ast(n->SliceExpr.high);
  149. break;
  150. case Ast_CallExpr:
  151. n->CallExpr.proc = clone_ast(n->CallExpr.proc);
  152. n->CallExpr.args = clone_ast_array(n->CallExpr.args);
  153. break;
  154. case Ast_FieldValue:
  155. n->FieldValue.field = clone_ast(n->FieldValue.field);
  156. n->FieldValue.value = clone_ast(n->FieldValue.value);
  157. break;
  158. case Ast_TernaryIfExpr:
  159. n->TernaryIfExpr.x = clone_ast(n->TernaryIfExpr.x);
  160. n->TernaryIfExpr.cond = clone_ast(n->TernaryIfExpr.cond);
  161. n->TernaryIfExpr.y = clone_ast(n->TernaryIfExpr.y);
  162. break;
  163. case Ast_TernaryWhenExpr:
  164. n->TernaryWhenExpr.x = clone_ast(n->TernaryWhenExpr.x);
  165. n->TernaryWhenExpr.cond = clone_ast(n->TernaryWhenExpr.cond);
  166. n->TernaryWhenExpr.y = clone_ast(n->TernaryWhenExpr.y);
  167. break;
  168. case Ast_OrElseExpr:
  169. n->OrElseExpr.x = clone_ast(n->OrElseExpr.x);
  170. n->OrElseExpr.y = clone_ast(n->OrElseExpr.y);
  171. break;
  172. case Ast_OrReturnExpr:
  173. n->OrReturnExpr.expr = clone_ast(n->OrReturnExpr.expr);
  174. break;
  175. case Ast_TypeAssertion:
  176. n->TypeAssertion.expr = clone_ast(n->TypeAssertion.expr);
  177. n->TypeAssertion.type = clone_ast(n->TypeAssertion.type);
  178. break;
  179. case Ast_TypeCast:
  180. n->TypeCast.type = clone_ast(n->TypeCast.type);
  181. n->TypeCast.expr = clone_ast(n->TypeCast.expr);
  182. break;
  183. case Ast_AutoCast:
  184. n->AutoCast.expr = clone_ast(n->AutoCast.expr);
  185. break;
  186. case Ast_InlineAsmExpr:
  187. n->InlineAsmExpr.param_types = clone_ast_array(n->InlineAsmExpr.param_types);
  188. n->InlineAsmExpr.return_type = clone_ast(n->InlineAsmExpr.return_type);
  189. n->InlineAsmExpr.asm_string = clone_ast(n->InlineAsmExpr.asm_string);
  190. n->InlineAsmExpr.constraints_string = clone_ast(n->InlineAsmExpr.constraints_string);
  191. break;
  192. case Ast_BadStmt: break;
  193. case Ast_EmptyStmt: break;
  194. case Ast_ExprStmt:
  195. n->ExprStmt.expr = clone_ast(n->ExprStmt.expr);
  196. break;
  197. case Ast_TagStmt:
  198. n->TagStmt.stmt = clone_ast(n->TagStmt.stmt);
  199. break;
  200. case Ast_AssignStmt:
  201. n->AssignStmt.lhs = clone_ast_array(n->AssignStmt.lhs);
  202. n->AssignStmt.rhs = clone_ast_array(n->AssignStmt.rhs);
  203. break;
  204. case Ast_BlockStmt:
  205. n->BlockStmt.label = clone_ast(n->BlockStmt.label);
  206. n->BlockStmt.stmts = clone_ast_array(n->BlockStmt.stmts);
  207. break;
  208. case Ast_IfStmt:
  209. n->IfStmt.label = clone_ast(n->IfStmt.label);
  210. n->IfStmt.init = clone_ast(n->IfStmt.init);
  211. n->IfStmt.cond = clone_ast(n->IfStmt.cond);
  212. n->IfStmt.body = clone_ast(n->IfStmt.body);
  213. n->IfStmt.else_stmt = clone_ast(n->IfStmt.else_stmt);
  214. break;
  215. case Ast_WhenStmt:
  216. n->WhenStmt.cond = clone_ast(n->WhenStmt.cond);
  217. n->WhenStmt.body = clone_ast(n->WhenStmt.body);
  218. n->WhenStmt.else_stmt = clone_ast(n->WhenStmt.else_stmt);
  219. break;
  220. case Ast_ReturnStmt:
  221. n->ReturnStmt.results = clone_ast_array(n->ReturnStmt.results);
  222. break;
  223. case Ast_ForStmt:
  224. n->ForStmt.label = clone_ast(n->ForStmt.label);
  225. n->ForStmt.init = clone_ast(n->ForStmt.init);
  226. n->ForStmt.cond = clone_ast(n->ForStmt.cond);
  227. n->ForStmt.post = clone_ast(n->ForStmt.post);
  228. n->ForStmt.body = clone_ast(n->ForStmt.body);
  229. break;
  230. case Ast_RangeStmt:
  231. n->RangeStmt.label = clone_ast(n->RangeStmt.label);
  232. n->RangeStmt.vals = clone_ast_array(n->RangeStmt.vals);
  233. n->RangeStmt.expr = clone_ast(n->RangeStmt.expr);
  234. n->RangeStmt.body = clone_ast(n->RangeStmt.body);
  235. break;
  236. case Ast_UnrollRangeStmt:
  237. n->UnrollRangeStmt.val0 = clone_ast(n->UnrollRangeStmt.val0);
  238. n->UnrollRangeStmt.val1 = clone_ast(n->UnrollRangeStmt.val1);
  239. n->UnrollRangeStmt.expr = clone_ast(n->UnrollRangeStmt.expr);
  240. n->UnrollRangeStmt.body = clone_ast(n->UnrollRangeStmt.body);
  241. break;
  242. case Ast_CaseClause:
  243. n->CaseClause.list = clone_ast_array(n->CaseClause.list);
  244. n->CaseClause.stmts = clone_ast_array(n->CaseClause.stmts);
  245. n->CaseClause.implicit_entity = nullptr;
  246. break;
  247. case Ast_SwitchStmt:
  248. n->SwitchStmt.label = clone_ast(n->SwitchStmt.label);
  249. n->SwitchStmt.init = clone_ast(n->SwitchStmt.init);
  250. n->SwitchStmt.tag = clone_ast(n->SwitchStmt.tag);
  251. n->SwitchStmt.body = clone_ast(n->SwitchStmt.body);
  252. break;
  253. case Ast_TypeSwitchStmt:
  254. n->TypeSwitchStmt.label = clone_ast(n->TypeSwitchStmt.label);
  255. n->TypeSwitchStmt.tag = clone_ast(n->TypeSwitchStmt.tag);
  256. n->TypeSwitchStmt.body = clone_ast(n->TypeSwitchStmt.body);
  257. break;
  258. case Ast_DeferStmt:
  259. n->DeferStmt.stmt = clone_ast(n->DeferStmt.stmt);
  260. break;
  261. case Ast_BranchStmt:
  262. n->BranchStmt.label = clone_ast(n->BranchStmt.label);
  263. break;
  264. case Ast_UsingStmt:
  265. n->UsingStmt.list = clone_ast_array(n->UsingStmt.list);
  266. break;
  267. case Ast_BadDecl: break;
  268. case Ast_ForeignBlockDecl:
  269. n->ForeignBlockDecl.foreign_library = clone_ast(n->ForeignBlockDecl.foreign_library);
  270. n->ForeignBlockDecl.body = clone_ast(n->ForeignBlockDecl.body);
  271. n->ForeignBlockDecl.attributes = clone_ast_array(n->ForeignBlockDecl.attributes);
  272. break;
  273. case Ast_Label:
  274. n->Label.name = clone_ast(n->Label.name);
  275. break;
  276. case Ast_ValueDecl:
  277. n->ValueDecl.names = clone_ast_array(n->ValueDecl.names);
  278. n->ValueDecl.type = clone_ast(n->ValueDecl.type);
  279. n->ValueDecl.values = clone_ast_array(n->ValueDecl.values);
  280. n->ValueDecl.attributes = clone_ast_array(n->ValueDecl.attributes);
  281. break;
  282. case Ast_Attribute:
  283. n->Attribute.elems = clone_ast_array(n->Attribute.elems);
  284. break;
  285. case Ast_Field:
  286. n->Field.names = clone_ast_array(n->Field.names);
  287. n->Field.type = clone_ast(n->Field.type);
  288. break;
  289. case Ast_FieldList:
  290. n->FieldList.list = clone_ast_array(n->FieldList.list);
  291. break;
  292. case Ast_TypeidType:
  293. n->TypeidType.specialization = clone_ast(n->TypeidType.specialization);
  294. break;
  295. case Ast_HelperType:
  296. n->HelperType.type = clone_ast(n->HelperType.type);
  297. break;
  298. case Ast_DistinctType:
  299. n->DistinctType.type = clone_ast(n->DistinctType.type);
  300. break;
  301. case Ast_ProcType:
  302. n->ProcType.params = clone_ast(n->ProcType.params);
  303. n->ProcType.results = clone_ast(n->ProcType.results);
  304. break;
  305. case Ast_RelativeType:
  306. n->RelativeType.tag = clone_ast(n->RelativeType.tag);
  307. n->RelativeType.type = clone_ast(n->RelativeType.type);
  308. break;
  309. case Ast_PointerType:
  310. n->PointerType.type = clone_ast(n->PointerType.type);
  311. break;
  312. case Ast_ArrayType:
  313. n->ArrayType.count = clone_ast(n->ArrayType.count);
  314. n->ArrayType.elem = clone_ast(n->ArrayType.elem);
  315. break;
  316. case Ast_DynamicArrayType:
  317. n->DynamicArrayType.elem = clone_ast(n->DynamicArrayType.elem);
  318. break;
  319. case Ast_StructType:
  320. n->StructType.fields = clone_ast_array(n->StructType.fields);
  321. n->StructType.polymorphic_params = clone_ast(n->StructType.polymorphic_params);
  322. n->StructType.align = clone_ast(n->StructType.align);
  323. n->StructType.where_clauses = clone_ast_array(n->StructType.where_clauses);
  324. break;
  325. case Ast_UnionType:
  326. n->UnionType.variants = clone_ast_array(n->UnionType.variants);
  327. n->UnionType.polymorphic_params = clone_ast(n->UnionType.polymorphic_params);
  328. n->UnionType.where_clauses = clone_ast_array(n->UnionType.where_clauses);
  329. break;
  330. case Ast_EnumType:
  331. n->EnumType.base_type = clone_ast(n->EnumType.base_type);
  332. n->EnumType.fields = clone_ast_array(n->EnumType.fields);
  333. break;
  334. case Ast_BitSetType:
  335. n->BitSetType.elem = clone_ast(n->BitSetType.elem);
  336. n->BitSetType.underlying = clone_ast(n->BitSetType.underlying);
  337. break;
  338. case Ast_MapType:
  339. n->MapType.count = clone_ast(n->MapType.count);
  340. n->MapType.key = clone_ast(n->MapType.key);
  341. n->MapType.value = clone_ast(n->MapType.value);
  342. break;
  343. }
  344. return n;
  345. }
  346. void error(Ast *node, char const *fmt, ...) {
  347. Token token = {};
  348. TokenPos end_pos = {};
  349. if (node != nullptr) {
  350. token = ast_token(node);
  351. end_pos = ast_end_pos(node);
  352. }
  353. va_list va;
  354. va_start(va, fmt);
  355. error_va(token.pos, end_pos, fmt, va);
  356. va_end(va);
  357. if (node != nullptr && node->file != nullptr) {
  358. node->file->error_count += 1;
  359. }
  360. }
  361. void error_no_newline(Ast *node, char const *fmt, ...) {
  362. Token token = {};
  363. if (node != nullptr) {
  364. token = ast_token(node);
  365. }
  366. va_list va;
  367. va_start(va, fmt);
  368. error_no_newline_va(token.pos, fmt, va);
  369. va_end(va);
  370. if (node != nullptr && node->file != nullptr) {
  371. node->file->error_count += 1;
  372. }
  373. }
  374. void warning(Ast *node, char const *fmt, ...) {
  375. Token token = {};
  376. TokenPos end_pos = {};
  377. if (node != nullptr) {
  378. token = ast_token(node);
  379. end_pos = ast_end_pos(node);
  380. }
  381. va_list va;
  382. va_start(va, fmt);
  383. warning_va(token.pos, end_pos, fmt, va);
  384. va_end(va);
  385. }
  386. void syntax_error(Ast *node, char const *fmt, ...) {
  387. Token token = {};
  388. TokenPos end_pos = {};
  389. if (node != nullptr) {
  390. token = ast_token(node);
  391. end_pos = ast_end_pos(node);
  392. }
  393. va_list va;
  394. va_start(va, fmt);
  395. syntax_error_va(token.pos, end_pos, fmt, va);
  396. va_end(va);
  397. if (node != nullptr && node->file != nullptr) {
  398. node->file->error_count += 1;
  399. }
  400. }
  401. bool ast_node_expect(Ast *node, AstKind kind) {
  402. if (node->kind != kind) {
  403. syntax_error(node, "Expected %.*s, got %.*s", LIT(ast_strings[kind]), LIT(ast_strings[node->kind]));
  404. return false;
  405. }
  406. return true;
  407. }
  408. bool ast_node_expect2(Ast *node, AstKind kind0, AstKind kind1) {
  409. if (node->kind != kind0 && node->kind != kind1) {
  410. syntax_error(node, "Expected %.*s or %.*s, got %.*s", LIT(ast_strings[kind0]), LIT(ast_strings[kind1]), LIT(ast_strings[node->kind]));
  411. return false;
  412. }
  413. return true;
  414. }
  415. Ast *ast_bad_expr(AstFile *f, Token begin, Token end) {
  416. Ast *result = alloc_ast_node(f, Ast_BadExpr);
  417. result->BadExpr.begin = begin;
  418. result->BadExpr.end = end;
  419. return result;
  420. }
  421. Ast *ast_tag_expr(AstFile *f, Token token, Token name, Ast *expr) {
  422. Ast *result = alloc_ast_node(f, Ast_TagExpr);
  423. result->TagExpr.token = token;
  424. result->TagExpr.name = name;
  425. result->TagExpr.expr = expr;
  426. return result;
  427. }
  428. Ast *ast_tag_stmt(AstFile *f, Token token, Token name, Ast *stmt) {
  429. Ast *result = alloc_ast_node(f, Ast_TagStmt);
  430. result->TagStmt.token = token;
  431. result->TagStmt.name = name;
  432. result->TagStmt.stmt = stmt;
  433. return result;
  434. }
  435. Ast *ast_unary_expr(AstFile *f, Token op, Ast *expr) {
  436. Ast *result = alloc_ast_node(f, Ast_UnaryExpr);
  437. result->UnaryExpr.op = op;
  438. result->UnaryExpr.expr = expr;
  439. return result;
  440. }
  441. Ast *ast_binary_expr(AstFile *f, Token op, Ast *left, Ast *right) {
  442. Ast *result = alloc_ast_node(f, Ast_BinaryExpr);
  443. if (left == nullptr) {
  444. syntax_error(op, "No lhs expression for binary expression '%.*s'", LIT(op.string));
  445. left = ast_bad_expr(f, op, op);
  446. }
  447. if (right == nullptr) {
  448. syntax_error(op, "No rhs expression for binary expression '%.*s'", LIT(op.string));
  449. right = ast_bad_expr(f, op, op);
  450. }
  451. result->BinaryExpr.op = op;
  452. result->BinaryExpr.left = left;
  453. result->BinaryExpr.right = right;
  454. return result;
  455. }
  456. Ast *ast_paren_expr(AstFile *f, Ast *expr, Token open, Token close) {
  457. Ast *result = alloc_ast_node(f, Ast_ParenExpr);
  458. result->ParenExpr.expr = expr;
  459. result->ParenExpr.open = open;
  460. result->ParenExpr.close = close;
  461. return result;
  462. }
  463. Ast *ast_call_expr(AstFile *f, Ast *proc, Array<Ast *> const &args, Token open, Token close, Token ellipsis) {
  464. Ast *result = alloc_ast_node(f, Ast_CallExpr);
  465. result->CallExpr.proc = proc;
  466. result->CallExpr.args = slice_from_array(args);
  467. result->CallExpr.open = open;
  468. result->CallExpr.close = close;
  469. result->CallExpr.ellipsis = ellipsis;
  470. return result;
  471. }
  472. Ast *ast_selector_expr(AstFile *f, Token token, Ast *expr, Ast *selector) {
  473. Ast *result = alloc_ast_node(f, Ast_SelectorExpr);
  474. result->SelectorExpr.token = token;
  475. result->SelectorExpr.expr = expr;
  476. result->SelectorExpr.selector = selector;
  477. return result;
  478. }
  479. Ast *ast_implicit_selector_expr(AstFile *f, Token token, Ast *selector) {
  480. Ast *result = alloc_ast_node(f, Ast_ImplicitSelectorExpr);
  481. result->ImplicitSelectorExpr.token = token;
  482. result->ImplicitSelectorExpr.selector = selector;
  483. return result;
  484. }
  485. Ast *ast_selector_call_expr(AstFile *f, Token token, Ast *expr, Ast *call) {
  486. Ast *result = alloc_ast_node(f, Ast_SelectorCallExpr);
  487. result->SelectorCallExpr.token = token;
  488. result->SelectorCallExpr.expr = expr;
  489. result->SelectorCallExpr.call = call;
  490. return result;
  491. }
  492. Ast *ast_index_expr(AstFile *f, Ast *expr, Ast *index, Token open, Token close) {
  493. Ast *result = alloc_ast_node(f, Ast_IndexExpr);
  494. result->IndexExpr.expr = expr;
  495. result->IndexExpr.index = index;
  496. result->IndexExpr.open = open;
  497. result->IndexExpr.close = close;
  498. return result;
  499. }
  500. Ast *ast_slice_expr(AstFile *f, Ast *expr, Token open, Token close, Token interval, Ast *low, Ast *high) {
  501. Ast *result = alloc_ast_node(f, Ast_SliceExpr);
  502. result->SliceExpr.expr = expr;
  503. result->SliceExpr.open = open;
  504. result->SliceExpr.close = close;
  505. result->SliceExpr.interval = interval;
  506. result->SliceExpr.low = low;
  507. result->SliceExpr.high = high;
  508. return result;
  509. }
  510. Ast *ast_deref_expr(AstFile *f, Ast *expr, Token op) {
  511. Ast *result = alloc_ast_node(f, Ast_DerefExpr);
  512. result->DerefExpr.expr = expr;
  513. result->DerefExpr.op = op;
  514. return result;
  515. }
  516. Ast *ast_ident(AstFile *f, Token token) {
  517. Ast *result = alloc_ast_node(f, Ast_Ident);
  518. result->Ident.token = token;
  519. return result;
  520. }
  521. Ast *ast_implicit(AstFile *f, Token token) {
  522. Ast *result = alloc_ast_node(f, Ast_Implicit);
  523. result->Implicit = token;
  524. return result;
  525. }
  526. Ast *ast_undef(AstFile *f, Token token) {
  527. Ast *result = alloc_ast_node(f, Ast_Undef);
  528. result->Undef = token;
  529. return result;
  530. }
  531. ExactValue exact_value_from_token(AstFile *f, Token const &token) {
  532. String s = token.string;
  533. switch (token.kind) {
  534. case Token_Rune:
  535. if (!unquote_string(ast_allocator(f), &s, 0)) {
  536. syntax_error(token, "Invalid rune literal");
  537. }
  538. break;
  539. case Token_String:
  540. if (!unquote_string(ast_allocator(f), &s, 0, s.text[0] == '`')) {
  541. syntax_error(token, "Invalid string literal");
  542. }
  543. break;
  544. }
  545. return exact_value_from_basic_literal(token.kind, s);
  546. }
  547. String string_value_from_token(AstFile *f, Token const &token) {
  548. ExactValue value = exact_value_from_token(f, token);
  549. String str = {};
  550. if (value.kind == ExactValue_String) {
  551. str = value.value_string;
  552. }
  553. return str;
  554. }
  555. Ast *ast_basic_lit(AstFile *f, Token basic_lit) {
  556. Ast *result = alloc_ast_node(f, Ast_BasicLit);
  557. result->BasicLit.token = basic_lit;
  558. result->tav.mode = Addressing_Constant;
  559. result->tav.value = exact_value_from_token(f, basic_lit);
  560. return result;
  561. }
  562. Ast *ast_basic_directive(AstFile *f, Token token, Token name) {
  563. Ast *result = alloc_ast_node(f, Ast_BasicDirective);
  564. result->BasicDirective.token = token;
  565. result->BasicDirective.name = name;
  566. return result;
  567. }
  568. Ast *ast_ellipsis(AstFile *f, Token token, Ast *expr) {
  569. Ast *result = alloc_ast_node(f, Ast_Ellipsis);
  570. result->Ellipsis.token = token;
  571. result->Ellipsis.expr = expr;
  572. return result;
  573. }
  574. Ast *ast_proc_group(AstFile *f, Token token, Token open, Token close, Array<Ast *> const &args) {
  575. Ast *result = alloc_ast_node(f, Ast_ProcGroup);
  576. result->ProcGroup.token = token;
  577. result->ProcGroup.open = open;
  578. result->ProcGroup.close = close;
  579. result->ProcGroup.args = slice_from_array(args);
  580. return result;
  581. }
  582. Ast *ast_proc_lit(AstFile *f, Ast *type, Ast *body, u64 tags, Token where_token, Array<Ast *> const &where_clauses) {
  583. Ast *result = alloc_ast_node(f, Ast_ProcLit);
  584. result->ProcLit.type = type;
  585. result->ProcLit.body = body;
  586. result->ProcLit.tags = tags;
  587. result->ProcLit.where_token = where_token;
  588. result->ProcLit.where_clauses = slice_from_array(where_clauses);
  589. return result;
  590. }
  591. Ast *ast_field_value(AstFile *f, Ast *field, Ast *value, Token eq) {
  592. Ast *result = alloc_ast_node(f, Ast_FieldValue);
  593. result->FieldValue.field = field;
  594. result->FieldValue.value = value;
  595. result->FieldValue.eq = eq;
  596. return result;
  597. }
  598. Ast *ast_compound_lit(AstFile *f, Ast *type, Array<Ast *> const &elems, Token open, Token close) {
  599. Ast *result = alloc_ast_node(f, Ast_CompoundLit);
  600. result->CompoundLit.type = type;
  601. result->CompoundLit.elems = slice_from_array(elems);
  602. result->CompoundLit.open = open;
  603. result->CompoundLit.close = close;
  604. return result;
  605. }
  606. Ast *ast_ternary_if_expr(AstFile *f, Ast *x, Ast *cond, Ast *y) {
  607. Ast *result = alloc_ast_node(f, Ast_TernaryIfExpr);
  608. result->TernaryIfExpr.x = x;
  609. result->TernaryIfExpr.cond = cond;
  610. result->TernaryIfExpr.y = y;
  611. return result;
  612. }
  613. Ast *ast_ternary_when_expr(AstFile *f, Ast *x, Ast *cond, Ast *y) {
  614. Ast *result = alloc_ast_node(f, Ast_TernaryWhenExpr);
  615. result->TernaryWhenExpr.x = x;
  616. result->TernaryWhenExpr.cond = cond;
  617. result->TernaryWhenExpr.y = y;
  618. return result;
  619. }
  620. Ast *ast_or_else_expr(AstFile *f, Ast *x, Token const &token, Ast *y) {
  621. Ast *result = alloc_ast_node(f, Ast_OrElseExpr);
  622. result->OrElseExpr.x = x;
  623. result->OrElseExpr.token = token;
  624. result->OrElseExpr.y = y;
  625. return result;
  626. }
  627. Ast *ast_or_return_expr(AstFile *f, Ast *expr, Token const &token) {
  628. Ast *result = alloc_ast_node(f, Ast_OrReturnExpr);
  629. result->OrReturnExpr.expr = expr;
  630. result->OrReturnExpr.token = token;
  631. return result;
  632. }
  633. Ast *ast_type_assertion(AstFile *f, Ast *expr, Token dot, Ast *type) {
  634. Ast *result = alloc_ast_node(f, Ast_TypeAssertion);
  635. result->TypeAssertion.expr = expr;
  636. result->TypeAssertion.dot = dot;
  637. result->TypeAssertion.type = type;
  638. return result;
  639. }
  640. Ast *ast_type_cast(AstFile *f, Token token, Ast *type, Ast *expr) {
  641. Ast *result = alloc_ast_node(f, Ast_TypeCast);
  642. result->TypeCast.token = token;
  643. result->TypeCast.type = type;
  644. result->TypeCast.expr = expr;
  645. return result;
  646. }
  647. Ast *ast_auto_cast(AstFile *f, Token token, Ast *expr) {
  648. Ast *result = alloc_ast_node(f, Ast_AutoCast);
  649. result->AutoCast.token = token;
  650. result->AutoCast.expr = expr;
  651. return result;
  652. }
  653. Ast *ast_inline_asm_expr(AstFile *f, Token token, Token open, Token close,
  654. Array<Ast *> const &param_types,
  655. Ast *return_type,
  656. Ast *asm_string,
  657. Ast *constraints_string,
  658. bool has_side_effects,
  659. bool is_align_stack,
  660. InlineAsmDialectKind dialect) {
  661. Ast *result = alloc_ast_node(f, Ast_InlineAsmExpr);
  662. result->InlineAsmExpr.token = token;
  663. result->InlineAsmExpr.open = open;
  664. result->InlineAsmExpr.close = close;
  665. result->InlineAsmExpr.param_types = slice_from_array(param_types);
  666. result->InlineAsmExpr.return_type = return_type;
  667. result->InlineAsmExpr.asm_string = asm_string;
  668. result->InlineAsmExpr.constraints_string = constraints_string;
  669. result->InlineAsmExpr.has_side_effects = has_side_effects;
  670. result->InlineAsmExpr.is_align_stack = is_align_stack;
  671. result->InlineAsmExpr.dialect = dialect;
  672. return result;
  673. }
  674. Ast *ast_bad_stmt(AstFile *f, Token begin, Token end) {
  675. Ast *result = alloc_ast_node(f, Ast_BadStmt);
  676. result->BadStmt.begin = begin;
  677. result->BadStmt.end = end;
  678. return result;
  679. }
  680. Ast *ast_empty_stmt(AstFile *f, Token token) {
  681. Ast *result = alloc_ast_node(f, Ast_EmptyStmt);
  682. result->EmptyStmt.token = token;
  683. return result;
  684. }
  685. Ast *ast_expr_stmt(AstFile *f, Ast *expr) {
  686. Ast *result = alloc_ast_node(f, Ast_ExprStmt);
  687. result->ExprStmt.expr = expr;
  688. return result;
  689. }
  690. Ast *ast_assign_stmt(AstFile *f, Token op, Array<Ast *> const &lhs, Array<Ast *> const &rhs) {
  691. Ast *result = alloc_ast_node(f, Ast_AssignStmt);
  692. result->AssignStmt.op = op;
  693. result->AssignStmt.lhs = slice_from_array(lhs);
  694. result->AssignStmt.rhs = slice_from_array(rhs);
  695. return result;
  696. }
  697. Ast *ast_block_stmt(AstFile *f, Array<Ast *> const &stmts, Token open, Token close) {
  698. Ast *result = alloc_ast_node(f, Ast_BlockStmt);
  699. result->BlockStmt.stmts = slice_from_array(stmts);
  700. result->BlockStmt.open = open;
  701. result->BlockStmt.close = close;
  702. return result;
  703. }
  704. Ast *ast_if_stmt(AstFile *f, Token token, Ast *init, Ast *cond, Ast *body, Ast *else_stmt) {
  705. Ast *result = alloc_ast_node(f, Ast_IfStmt);
  706. result->IfStmt.token = token;
  707. result->IfStmt.init = init;
  708. result->IfStmt.cond = cond;
  709. result->IfStmt.body = body;
  710. result->IfStmt.else_stmt = else_stmt;
  711. return result;
  712. }
  713. Ast *ast_when_stmt(AstFile *f, Token token, Ast *cond, Ast *body, Ast *else_stmt) {
  714. Ast *result = alloc_ast_node(f, Ast_WhenStmt);
  715. result->WhenStmt.token = token;
  716. result->WhenStmt.cond = cond;
  717. result->WhenStmt.body = body;
  718. result->WhenStmt.else_stmt = else_stmt;
  719. return result;
  720. }
  721. Ast *ast_return_stmt(AstFile *f, Token token, Array<Ast *> const &results) {
  722. Ast *result = alloc_ast_node(f, Ast_ReturnStmt);
  723. result->ReturnStmt.token = token;
  724. result->ReturnStmt.results = slice_from_array(results);
  725. return result;
  726. }
  727. Ast *ast_for_stmt(AstFile *f, Token token, Ast *init, Ast *cond, Ast *post, Ast *body) {
  728. Ast *result = alloc_ast_node(f, Ast_ForStmt);
  729. result->ForStmt.token = token;
  730. result->ForStmt.init = init;
  731. result->ForStmt.cond = cond;
  732. result->ForStmt.post = post;
  733. result->ForStmt.body = body;
  734. return result;
  735. }
  736. Ast *ast_range_stmt(AstFile *f, Token token, Slice<Ast *> vals, Token in_token, Ast *expr, Ast *body) {
  737. Ast *result = alloc_ast_node(f, Ast_RangeStmt);
  738. result->RangeStmt.token = token;
  739. result->RangeStmt.vals = vals;
  740. result->RangeStmt.in_token = in_token;
  741. result->RangeStmt.expr = expr;
  742. result->RangeStmt.body = body;
  743. return result;
  744. }
  745. Ast *ast_unroll_range_stmt(AstFile *f, Token unroll_token, Token for_token, Ast *val0, Ast *val1, Token in_token, Ast *expr, Ast *body) {
  746. Ast *result = alloc_ast_node(f, Ast_UnrollRangeStmt);
  747. result->UnrollRangeStmt.unroll_token = unroll_token;
  748. result->UnrollRangeStmt.for_token = for_token;
  749. result->UnrollRangeStmt.val0 = val0;
  750. result->UnrollRangeStmt.val1 = val1;
  751. result->UnrollRangeStmt.in_token = in_token;
  752. result->UnrollRangeStmt.expr = expr;
  753. result->UnrollRangeStmt.body = body;
  754. return result;
  755. }
  756. Ast *ast_switch_stmt(AstFile *f, Token token, Ast *init, Ast *tag, Ast *body) {
  757. Ast *result = alloc_ast_node(f, Ast_SwitchStmt);
  758. result->SwitchStmt.token = token;
  759. result->SwitchStmt.init = init;
  760. result->SwitchStmt.tag = tag;
  761. result->SwitchStmt.body = body;
  762. result->SwitchStmt.partial = false;
  763. return result;
  764. }
  765. Ast *ast_type_switch_stmt(AstFile *f, Token token, Ast *tag, Ast *body) {
  766. Ast *result = alloc_ast_node(f, Ast_TypeSwitchStmt);
  767. result->TypeSwitchStmt.token = token;
  768. result->TypeSwitchStmt.tag = tag;
  769. result->TypeSwitchStmt.body = body;
  770. result->TypeSwitchStmt.partial = false;
  771. return result;
  772. }
  773. Ast *ast_case_clause(AstFile *f, Token token, Array<Ast *> const &list, Array<Ast *> const &stmts) {
  774. Ast *result = alloc_ast_node(f, Ast_CaseClause);
  775. result->CaseClause.token = token;
  776. result->CaseClause.list = slice_from_array(list);
  777. result->CaseClause.stmts = slice_from_array(stmts);
  778. return result;
  779. }
  780. Ast *ast_defer_stmt(AstFile *f, Token token, Ast *stmt) {
  781. Ast *result = alloc_ast_node(f, Ast_DeferStmt);
  782. result->DeferStmt.token = token;
  783. result->DeferStmt.stmt = stmt;
  784. return result;
  785. }
  786. Ast *ast_branch_stmt(AstFile *f, Token token, Ast *label) {
  787. Ast *result = alloc_ast_node(f, Ast_BranchStmt);
  788. result->BranchStmt.token = token;
  789. result->BranchStmt.label = label;
  790. return result;
  791. }
  792. Ast *ast_using_stmt(AstFile *f, Token token, Array<Ast *> const &list) {
  793. Ast *result = alloc_ast_node(f, Ast_UsingStmt);
  794. result->UsingStmt.token = token;
  795. result->UsingStmt.list = slice_from_array(list);
  796. return result;
  797. }
  798. Ast *ast_bad_decl(AstFile *f, Token begin, Token end) {
  799. Ast *result = alloc_ast_node(f, Ast_BadDecl);
  800. result->BadDecl.begin = begin;
  801. result->BadDecl.end = end;
  802. return result;
  803. }
  804. Ast *ast_field(AstFile *f, Array<Ast *> const &names, Ast *type, Ast *default_value, u32 flags, Token tag,
  805. CommentGroup *docs, CommentGroup *comment) {
  806. Ast *result = alloc_ast_node(f, Ast_Field);
  807. result->Field.names = slice_from_array(names);
  808. result->Field.type = type;
  809. result->Field.default_value = default_value;
  810. result->Field.flags = flags;
  811. result->Field.tag = tag;
  812. result->Field.docs = docs;
  813. result->Field.comment = comment;
  814. return result;
  815. }
  816. Ast *ast_field_list(AstFile *f, Token token, Array<Ast *> const &list) {
  817. Ast *result = alloc_ast_node(f, Ast_FieldList);
  818. result->FieldList.token = token;
  819. result->FieldList.list = slice_from_array(list);
  820. return result;
  821. }
  822. Ast *ast_typeid_type(AstFile *f, Token token, Ast *specialization) {
  823. Ast *result = alloc_ast_node(f, Ast_TypeidType);
  824. result->TypeidType.token = token;
  825. result->TypeidType.specialization = specialization;
  826. return result;
  827. }
  828. Ast *ast_helper_type(AstFile *f, Token token, Ast *type) {
  829. Ast *result = alloc_ast_node(f, Ast_HelperType);
  830. result->HelperType.token = token;
  831. result->HelperType.type = type;
  832. return result;
  833. }
  834. Ast *ast_distinct_type(AstFile *f, Token token, Ast *type) {
  835. Ast *result = alloc_ast_node(f, Ast_DistinctType);
  836. result->DistinctType.token = token;
  837. result->DistinctType.type = type;
  838. return result;
  839. }
  840. Ast *ast_poly_type(AstFile *f, Token token, Ast *type, Ast *specialization) {
  841. Ast *result = alloc_ast_node(f, Ast_PolyType);
  842. result->PolyType.token = token;
  843. result->PolyType.type = type;
  844. result->PolyType.specialization = specialization;
  845. return result;
  846. }
  847. Ast *ast_proc_type(AstFile *f, Token token, Ast *params, Ast *results, u64 tags, ProcCallingConvention calling_convention, bool generic, bool diverging) {
  848. Ast *result = alloc_ast_node(f, Ast_ProcType);
  849. result->ProcType.token = token;
  850. result->ProcType.params = params;
  851. result->ProcType.results = results;
  852. result->ProcType.tags = tags;
  853. result->ProcType.calling_convention = calling_convention;
  854. result->ProcType.generic = generic;
  855. result->ProcType.diverging = diverging;
  856. return result;
  857. }
  858. Ast *ast_relative_type(AstFile *f, Ast *tag, Ast *type) {
  859. Ast *result = alloc_ast_node(f, Ast_RelativeType);
  860. result->RelativeType.tag = tag;
  861. result->RelativeType.type = type;
  862. return result;
  863. }
  864. Ast *ast_pointer_type(AstFile *f, Token token, Ast *type) {
  865. Ast *result = alloc_ast_node(f, Ast_PointerType);
  866. result->PointerType.token = token;
  867. result->PointerType.type = type;
  868. return result;
  869. }
  870. Ast *ast_array_type(AstFile *f, Token token, Ast *count, Ast *elem) {
  871. Ast *result = alloc_ast_node(f, Ast_ArrayType);
  872. result->ArrayType.token = token;
  873. result->ArrayType.count = count;
  874. result->ArrayType.elem = elem;
  875. return result;
  876. }
  877. Ast *ast_dynamic_array_type(AstFile *f, Token token, Ast *elem) {
  878. Ast *result = alloc_ast_node(f, Ast_DynamicArrayType);
  879. result->DynamicArrayType.token = token;
  880. result->DynamicArrayType.elem = elem;
  881. return result;
  882. }
  883. Ast *ast_struct_type(AstFile *f, Token token, Slice<Ast *> fields, isize field_count,
  884. Ast *polymorphic_params, bool is_packed, bool is_raw_union,
  885. Ast *align,
  886. Token where_token, Array<Ast *> const &where_clauses) {
  887. Ast *result = alloc_ast_node(f, Ast_StructType);
  888. result->StructType.token = token;
  889. result->StructType.fields = fields;
  890. result->StructType.field_count = field_count;
  891. result->StructType.polymorphic_params = polymorphic_params;
  892. result->StructType.is_packed = is_packed;
  893. result->StructType.is_raw_union = is_raw_union;
  894. result->StructType.align = align;
  895. result->StructType.where_token = where_token;
  896. result->StructType.where_clauses = slice_from_array(where_clauses);
  897. return result;
  898. }
  899. Ast *ast_union_type(AstFile *f, Token token, Array<Ast *> const &variants, Ast *polymorphic_params, Ast *align, bool no_nil, bool maybe,
  900. Token where_token, Array<Ast *> const &where_clauses) {
  901. Ast *result = alloc_ast_node(f, Ast_UnionType);
  902. result->UnionType.token = token;
  903. result->UnionType.variants = slice_from_array(variants);
  904. result->UnionType.polymorphic_params = polymorphic_params;
  905. result->UnionType.align = align;
  906. result->UnionType.no_nil = no_nil;
  907. result->UnionType.maybe = maybe;
  908. result->UnionType.where_token = where_token;
  909. result->UnionType.where_clauses = slice_from_array(where_clauses);
  910. return result;
  911. }
  912. Ast *ast_enum_type(AstFile *f, Token token, Ast *base_type, Array<Ast *> const &fields) {
  913. Ast *result = alloc_ast_node(f, Ast_EnumType);
  914. result->EnumType.token = token;
  915. result->EnumType.base_type = base_type;
  916. result->EnumType.fields = slice_from_array(fields);
  917. return result;
  918. }
  919. Ast *ast_bit_set_type(AstFile *f, Token token, Ast *elem, Ast *underlying) {
  920. Ast *result = alloc_ast_node(f, Ast_BitSetType);
  921. result->BitSetType.token = token;
  922. result->BitSetType.elem = elem;
  923. result->BitSetType.underlying = underlying;
  924. return result;
  925. }
  926. Ast *ast_map_type(AstFile *f, Token token, Ast *key, Ast *value) {
  927. Ast *result = alloc_ast_node(f, Ast_MapType);
  928. result->MapType.token = token;
  929. result->MapType.key = key;
  930. result->MapType.value = value;
  931. return result;
  932. }
  933. Ast *ast_foreign_block_decl(AstFile *f, Token token, Ast *foreign_library, Ast *body,
  934. CommentGroup *docs) {
  935. Ast *result = alloc_ast_node(f, Ast_ForeignBlockDecl);
  936. result->ForeignBlockDecl.token = token;
  937. result->ForeignBlockDecl.foreign_library = foreign_library;
  938. result->ForeignBlockDecl.body = body;
  939. result->ForeignBlockDecl.docs = docs;
  940. result->ForeignBlockDecl.attributes.allocator = heap_allocator();
  941. return result;
  942. }
  943. Ast *ast_label_decl(AstFile *f, Token token, Ast *name) {
  944. Ast *result = alloc_ast_node(f, Ast_Label);
  945. result->Label.token = token;
  946. result->Label.name = name;
  947. return result;
  948. }
  949. Ast *ast_value_decl(AstFile *f, Array<Ast *> const &names, Ast *type, Array<Ast *> const &values, bool is_mutable,
  950. CommentGroup *docs, CommentGroup *comment) {
  951. Ast *result = alloc_ast_node(f, Ast_ValueDecl);
  952. result->ValueDecl.names = slice_from_array(names);
  953. result->ValueDecl.type = type;
  954. result->ValueDecl.values = slice_from_array(values);
  955. result->ValueDecl.is_mutable = is_mutable;
  956. result->ValueDecl.docs = docs;
  957. result->ValueDecl.comment = comment;
  958. result->ValueDecl.attributes.allocator = heap_allocator();
  959. return result;
  960. }
  961. Ast *ast_package_decl(AstFile *f, Token token, Token name, CommentGroup *docs, CommentGroup *comment) {
  962. Ast *result = alloc_ast_node(f, Ast_PackageDecl);
  963. result->PackageDecl.token = token;
  964. result->PackageDecl.name = name;
  965. result->PackageDecl.docs = docs;
  966. result->PackageDecl.comment = comment;
  967. return result;
  968. }
  969. Ast *ast_import_decl(AstFile *f, Token token, bool is_using, Token relpath, Token import_name,
  970. CommentGroup *docs, CommentGroup *comment) {
  971. Ast *result = alloc_ast_node(f, Ast_ImportDecl);
  972. result->ImportDecl.token = token;
  973. result->ImportDecl.is_using = is_using;
  974. result->ImportDecl.relpath = relpath;
  975. result->ImportDecl.import_name = import_name;
  976. result->ImportDecl.docs = docs;
  977. result->ImportDecl.comment = comment;
  978. return result;
  979. }
  980. Ast *ast_foreign_import_decl(AstFile *f, Token token, Array<Token> filepaths, Token library_name,
  981. CommentGroup *docs, CommentGroup *comment) {
  982. Ast *result = alloc_ast_node(f, Ast_ForeignImportDecl);
  983. result->ForeignImportDecl.token = token;
  984. result->ForeignImportDecl.filepaths = slice_from_array(filepaths);
  985. result->ForeignImportDecl.library_name = library_name;
  986. result->ForeignImportDecl.docs = docs;
  987. result->ForeignImportDecl.comment = comment;
  988. result->ForeignImportDecl.attributes.allocator = heap_allocator();
  989. return result;
  990. }
  991. Ast *ast_attribute(AstFile *f, Token token, Token open, Token close, Array<Ast *> const &elems) {
  992. Ast *result = alloc_ast_node(f, Ast_Attribute);
  993. result->Attribute.token = token;
  994. result->Attribute.open = open;
  995. result->Attribute.elems = slice_from_array(elems);
  996. result->Attribute.close = close;
  997. return result;
  998. }
  999. bool next_token0(AstFile *f) {
  1000. if (f->curr_token_index+1 < f->tokens.count) {
  1001. f->curr_token = f->tokens[++f->curr_token_index];
  1002. return true;
  1003. }
  1004. syntax_error(f->curr_token, "Token is EOF");
  1005. return false;
  1006. }
  1007. Token consume_comment(AstFile *f, isize *end_line_) {
  1008. Token tok = f->curr_token;
  1009. GB_ASSERT(tok.kind == Token_Comment);
  1010. isize end_line = tok.pos.line;
  1011. if (tok.string[1] == '*') {
  1012. for (isize i = 2; i < tok.string.len; i++) {
  1013. if (tok.string[i] == '\n') {
  1014. end_line++;
  1015. }
  1016. }
  1017. }
  1018. if (end_line_) *end_line_ = end_line;
  1019. next_token0(f);
  1020. if (f->curr_token.pos.line > tok.pos.line || tok.kind == Token_EOF) {
  1021. end_line++;
  1022. }
  1023. return tok;
  1024. }
  1025. CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_line_) {
  1026. Array<Token> list = {};
  1027. list.allocator = heap_allocator();
  1028. isize end_line = f->curr_token.pos.line;
  1029. if (f->curr_token_index == 1 &&
  1030. f->prev_token.kind == Token_Comment &&
  1031. f->prev_token.pos.line+1 == f->curr_token.pos.line) {
  1032. // NOTE(bill): Special logic for the first comment in the file
  1033. array_add(&list, f->prev_token);
  1034. }
  1035. while (f->curr_token.kind == Token_Comment &&
  1036. f->curr_token.pos.line <= end_line+n) {
  1037. array_add(&list, consume_comment(f, &end_line));
  1038. }
  1039. if (end_line_) *end_line_ = end_line;
  1040. CommentGroup *comments = nullptr;
  1041. if (list.count > 0) {
  1042. comments = gb_alloc_item(heap_allocator(), CommentGroup);
  1043. comments->list = slice_from_array(list);
  1044. array_add(&f->comments, comments);
  1045. }
  1046. return comments;
  1047. }
  1048. void comsume_comment_groups(AstFile *f, Token prev) {
  1049. if (f->curr_token.kind == Token_Comment) {
  1050. CommentGroup *comment = nullptr;
  1051. isize end_line = 0;
  1052. if (f->curr_token.pos.line == prev.pos.line) {
  1053. comment = consume_comment_group(f, 0, &end_line);
  1054. if (f->curr_token.pos.line != end_line || f->curr_token.kind == Token_EOF) {
  1055. f->line_comment = comment;
  1056. }
  1057. }
  1058. end_line = -1;
  1059. while (f->curr_token.kind == Token_Comment) {
  1060. comment = consume_comment_group(f, 1, &end_line);
  1061. }
  1062. if (end_line+1 == f->curr_token.pos.line || end_line < 0) {
  1063. f->lead_comment = comment;
  1064. }
  1065. GB_ASSERT(f->curr_token.kind != Token_Comment);
  1066. }
  1067. }
  1068. bool ignore_newlines(AstFile *f) {
  1069. if (f->allow_newline) {
  1070. return f->expr_level > 0;
  1071. }
  1072. return f->expr_level >= 0;
  1073. }
  1074. Token advance_token(AstFile *f) {
  1075. f->lead_comment = nullptr;
  1076. f->line_comment = nullptr;
  1077. Token prev = f->prev_token = f->curr_token;
  1078. bool ok = next_token0(f);
  1079. if (ok) {
  1080. switch (f->curr_token.kind) {
  1081. case Token_Comment:
  1082. comsume_comment_groups(f, prev);
  1083. break;
  1084. case Token_Semicolon:
  1085. if (ignore_newlines(f) && f->curr_token.string == "\n") {
  1086. advance_token(f);
  1087. }
  1088. break;
  1089. }
  1090. }
  1091. return prev;
  1092. }
  1093. bool peek_token_kind(AstFile *f, TokenKind kind) {
  1094. for (isize i = f->curr_token_index+1; i < f->tokens.count; i++) {
  1095. Token tok = f->tokens[i];
  1096. if (kind != Token_Comment && tok.kind == Token_Comment) {
  1097. continue;
  1098. }
  1099. return tok.kind == kind;
  1100. }
  1101. return false;
  1102. }
  1103. Token peek_token(AstFile *f) {
  1104. for (isize i = f->curr_token_index+1; i < f->tokens.count; i++) {
  1105. Token tok = f->tokens[i];
  1106. if (tok.kind == Token_Comment) {
  1107. continue;
  1108. }
  1109. return tok;
  1110. }
  1111. return {};
  1112. }
  1113. bool skip_possible_newline(AstFile *f) {
  1114. if ((f->tokenizer.flags & TokenizerFlag_InsertSemicolon) == 0) {
  1115. return false;
  1116. }
  1117. if (token_is_newline(f->curr_token)) {
  1118. advance_token(f);
  1119. return true;
  1120. }
  1121. return false;
  1122. }
  1123. bool skip_possible_newline_for_literal(AstFile *f) {
  1124. if ((f->tokenizer.flags & TokenizerFlag_InsertSemicolon) == 0) {
  1125. return false;
  1126. }
  1127. Token curr = f->curr_token;
  1128. if (token_is_newline(curr)) {
  1129. Token next = peek_token(f);
  1130. if (curr.pos.line+1 >= next.pos.line) {
  1131. switch (next.kind) {
  1132. case Token_OpenBrace:
  1133. case Token_else:
  1134. case Token_where:
  1135. advance_token(f);
  1136. return true;
  1137. }
  1138. }
  1139. }
  1140. return false;
  1141. }
  1142. String token_to_string(Token const &tok) {
  1143. String p = token_strings[tok.kind];
  1144. if (token_is_newline(tok)) {
  1145. p = str_lit("newline");
  1146. }
  1147. return p;
  1148. }
  1149. Token expect_token(AstFile *f, TokenKind kind) {
  1150. Token prev = f->curr_token;
  1151. if (prev.kind != kind) {
  1152. String c = token_strings[kind];
  1153. String p = token_to_string(prev);
  1154. syntax_error(f->curr_token, "Expected '%.*s', got '%.*s'", LIT(c), LIT(p));
  1155. if (prev.kind == Token_EOF) {
  1156. gb_exit(1);
  1157. }
  1158. }
  1159. advance_token(f);
  1160. return prev;
  1161. }
  1162. Token expect_token_after(AstFile *f, TokenKind kind, char const *msg) {
  1163. Token prev = f->curr_token;
  1164. if (prev.kind != kind) {
  1165. String p = token_to_string(prev);
  1166. syntax_error(f->curr_token, "Expected '%.*s' after %s, got '%.*s'",
  1167. LIT(token_strings[kind]),
  1168. msg,
  1169. LIT(p));
  1170. }
  1171. advance_token(f);
  1172. return prev;
  1173. }
  1174. bool is_token_range(TokenKind kind) {
  1175. switch (kind) {
  1176. case Token_Ellipsis:
  1177. case Token_RangeFull:
  1178. case Token_RangeHalf:
  1179. return true;
  1180. }
  1181. return false;
  1182. }
  1183. bool is_token_range(Token tok) {
  1184. return is_token_range(tok.kind);
  1185. }
  1186. Token expect_operator(AstFile *f) {
  1187. Token prev = f->curr_token;
  1188. if ((prev.kind == Token_in || prev.kind == Token_not_in) && (f->expr_level >= 0 || f->allow_in_expr)) {
  1189. // okay
  1190. } else if (prev.kind == Token_if || prev.kind == Token_when) {
  1191. // okay
  1192. } else if (prev.kind == Token_or_else || prev.kind == Token_or_return) {
  1193. // okay
  1194. } else if (!gb_is_between(prev.kind, Token__OperatorBegin+1, Token__OperatorEnd-1)) {
  1195. String p = token_to_string(prev);
  1196. syntax_error(f->curr_token, "Expected an operator, got '%.*s'",
  1197. LIT(p));
  1198. } else if (!f->allow_range && is_token_range(prev)) {
  1199. String p = token_to_string(prev);
  1200. syntax_error(f->curr_token, "Expected an non-range operator, got '%.*s'",
  1201. LIT(p));
  1202. }
  1203. advance_token(f);
  1204. return prev;
  1205. }
  1206. Token expect_keyword(AstFile *f) {
  1207. Token prev = f->curr_token;
  1208. if (!gb_is_between(prev.kind, Token__KeywordBegin+1, Token__KeywordEnd-1)) {
  1209. String p = token_to_string(prev);
  1210. syntax_error(f->curr_token, "Expected a keyword, got '%.*s'",
  1211. LIT(p));
  1212. }
  1213. advance_token(f);
  1214. return prev;
  1215. }
  1216. bool allow_token(AstFile *f, TokenKind kind) {
  1217. Token prev = f->curr_token;
  1218. if (prev.kind == kind) {
  1219. advance_token(f);
  1220. return true;
  1221. }
  1222. return false;
  1223. }
  1224. Token expect_closing_brace_of_field_list(AstFile *f) {
  1225. Token token = f->curr_token;
  1226. if (allow_token(f, Token_CloseBrace)) {
  1227. return token;
  1228. }
  1229. if (allow_token(f, Token_Semicolon)) {
  1230. String p = token_to_string(token);
  1231. syntax_error(token_end_of_line(f, f->prev_token), "Expected a comma, got a %.*s", LIT(p));
  1232. }
  1233. return expect_token(f, Token_CloseBrace);
  1234. }
  1235. bool is_blank_ident(String str) {
  1236. if (str.len == 1) {
  1237. return str[0] == '_';
  1238. }
  1239. return false;
  1240. }
  1241. bool is_blank_ident(Token token) {
  1242. if (token.kind == Token_Ident) {
  1243. return is_blank_ident(token.string);
  1244. }
  1245. return false;
  1246. }
  1247. bool is_blank_ident(Ast *node) {
  1248. if (node->kind == Ast_Ident) {
  1249. ast_node(i, Ident, node);
  1250. return is_blank_ident(i->token.string);
  1251. }
  1252. return false;
  1253. }
  1254. // NOTE(bill): Go to next statement to prevent numerous error messages popping up
  1255. void fix_advance_to_next_stmt(AstFile *f) {
  1256. for (;;) {
  1257. Token t = f->curr_token;
  1258. switch (t.kind) {
  1259. case Token_EOF:
  1260. case Token_Semicolon:
  1261. return;
  1262. case Token_package:
  1263. case Token_foreign:
  1264. case Token_import:
  1265. case Token_if:
  1266. case Token_for:
  1267. case Token_when:
  1268. case Token_return:
  1269. case Token_switch:
  1270. case Token_defer:
  1271. case Token_using:
  1272. case Token_break:
  1273. case Token_continue:
  1274. case Token_fallthrough:
  1275. case Token_Hash:
  1276. {
  1277. if (t.pos == f->fix_prev_pos &&
  1278. f->fix_count < PARSER_MAX_FIX_COUNT) {
  1279. f->fix_count++;
  1280. return;
  1281. }
  1282. if (f->fix_prev_pos < t.pos) {
  1283. f->fix_prev_pos = t.pos;
  1284. f->fix_count = 0; // NOTE(bill): Reset
  1285. return;
  1286. }
  1287. // NOTE(bill): Reaching here means there is a parsing bug
  1288. } break;
  1289. }
  1290. advance_token(f);
  1291. }
  1292. }
  1293. Token expect_closing(AstFile *f, TokenKind kind, String context) {
  1294. if (f->curr_token.kind != kind &&
  1295. f->curr_token.kind == Token_Semicolon &&
  1296. f->curr_token.string == "\n") {
  1297. Token tok = f->prev_token;
  1298. tok.pos.column += cast(i32)tok.string.len;
  1299. syntax_error(tok, "Missing ',' before newline in %.*s", LIT(context));
  1300. advance_token(f);
  1301. }
  1302. return expect_token(f, kind);
  1303. }
  1304. bool is_semicolon_optional_for_node(AstFile *f, Ast *s) {
  1305. if (s == nullptr) {
  1306. return false;
  1307. }
  1308. if (build_context.insert_semicolon) {
  1309. return true;
  1310. }
  1311. switch (s->kind) {
  1312. case Ast_EmptyStmt:
  1313. case Ast_BlockStmt:
  1314. return true;
  1315. case Ast_IfStmt:
  1316. case Ast_WhenStmt:
  1317. case Ast_ForStmt:
  1318. case Ast_RangeStmt:
  1319. case Ast_SwitchStmt:
  1320. case Ast_TypeSwitchStmt:
  1321. return true;
  1322. case Ast_HelperType:
  1323. return is_semicolon_optional_for_node(f, s->HelperType.type);
  1324. case Ast_DistinctType:
  1325. return is_semicolon_optional_for_node(f, s->DistinctType.type);
  1326. case Ast_PointerType:
  1327. return is_semicolon_optional_for_node(f, s->PointerType.type);
  1328. case Ast_StructType:
  1329. case Ast_UnionType:
  1330. case Ast_EnumType:
  1331. // Require semicolon within a procedure body
  1332. return f->curr_proc == nullptr;
  1333. case Ast_ProcLit:
  1334. return true;
  1335. case Ast_PackageDecl:
  1336. case Ast_ImportDecl:
  1337. case Ast_ForeignImportDecl:
  1338. return true;
  1339. case Ast_ValueDecl:
  1340. if (s->ValueDecl.is_mutable) {
  1341. return false;
  1342. }
  1343. if (s->ValueDecl.values.count > 0) {
  1344. return is_semicolon_optional_for_node(f, s->ValueDecl.values[s->ValueDecl.values.count-1]);
  1345. }
  1346. break;
  1347. case Ast_ForeignBlockDecl:
  1348. return is_semicolon_optional_for_node(f, s->ForeignBlockDecl.body);
  1349. }
  1350. return false;
  1351. }
  1352. void expect_semicolon_newline_error(AstFile *f, Token const &token, Ast *s) {
  1353. if (!build_context.insert_semicolon && token.string == "\n") {
  1354. switch (token.kind) {
  1355. case Token_CloseBrace:
  1356. case Token_CloseParen:
  1357. case Token_else:
  1358. return;
  1359. }
  1360. if (is_semicolon_optional_for_node(f, s)) {
  1361. return;
  1362. }
  1363. Token tok = token;
  1364. tok.pos.column -= 1;
  1365. syntax_error(tok, "Expected ';', got newline");
  1366. }
  1367. }
  1368. void expect_semicolon(AstFile *f, Ast *s) {
  1369. Token prev_token = {};
  1370. if (allow_token(f, Token_Semicolon)) {
  1371. expect_semicolon_newline_error(f, f->prev_token, s);
  1372. return;
  1373. }
  1374. switch (f->curr_token.kind) {
  1375. case Token_CloseBrace:
  1376. case Token_CloseParen:
  1377. if (f->curr_token.pos.line == f->prev_token.pos.line) {
  1378. return;
  1379. }
  1380. break;
  1381. }
  1382. prev_token = f->prev_token;
  1383. if (prev_token.kind == Token_Semicolon) {
  1384. expect_semicolon_newline_error(f, f->prev_token, s);
  1385. return;
  1386. }
  1387. if (f->curr_token.kind == Token_EOF) {
  1388. return;
  1389. }
  1390. if (s != nullptr) {
  1391. bool insert_semi = (f->tokenizer.flags & TokenizerFlag_InsertSemicolon) != 0;
  1392. if (insert_semi) {
  1393. switch (f->curr_token.kind) {
  1394. case Token_CloseBrace:
  1395. case Token_CloseParen:
  1396. case Token_else:
  1397. case Token_EOF:
  1398. return;
  1399. default:
  1400. if (is_semicolon_optional_for_node(f, s)) {
  1401. return;
  1402. }
  1403. break;
  1404. }
  1405. } else if (prev_token.pos.line != f->curr_token.pos.line) {
  1406. if (is_semicolon_optional_for_node(f, s)) {
  1407. return;
  1408. }
  1409. } else {
  1410. switch (f->curr_token.kind) {
  1411. case Token_CloseBrace:
  1412. case Token_CloseParen:
  1413. case Token_else:
  1414. return;
  1415. case Token_EOF:
  1416. if (is_semicolon_optional_for_node(f, s)) {
  1417. return;
  1418. }
  1419. break;
  1420. }
  1421. }
  1422. String node_string = ast_strings[s->kind];
  1423. String p = token_to_string(f->curr_token);
  1424. syntax_error(prev_token, "Expected ';' after %.*s, got %.*s",
  1425. LIT(node_string), LIT(p));
  1426. } else {
  1427. switch (f->curr_token.kind) {
  1428. case Token_EOF:
  1429. return;
  1430. }
  1431. String p = token_to_string(f->curr_token);
  1432. syntax_error(prev_token, "Expected ';', got %.*s", LIT(p));
  1433. }
  1434. fix_advance_to_next_stmt(f);
  1435. }
  1436. Ast * parse_expr(AstFile *f, bool lhs);
  1437. Ast * parse_proc_type(AstFile *f, Token proc_token);
  1438. Array<Ast *> parse_stmt_list(AstFile *f);
  1439. Ast * parse_stmt(AstFile *f);
  1440. Ast * parse_body(AstFile *f);
  1441. Ast * parse_block_stmt(AstFile *f, b32 is_when);
  1442. Ast *parse_ident(AstFile *f, bool allow_poly_names=false) {
  1443. Token token = f->curr_token;
  1444. if (token.kind == Token_Ident) {
  1445. advance_token(f);
  1446. } else if (allow_poly_names && token.kind == Token_Dollar) {
  1447. Token dollar = expect_token(f, Token_Dollar);
  1448. Ast *name = ast_ident(f, expect_token(f, Token_Ident));
  1449. if (is_blank_ident(name)) {
  1450. syntax_error(name, "Invalid polymorphic type definition with a blank identifier");
  1451. }
  1452. return ast_poly_type(f, dollar, name, nullptr);
  1453. } else {
  1454. token.string = str_lit("_");
  1455. expect_token(f, Token_Ident);
  1456. }
  1457. return ast_ident(f, token);
  1458. }
  1459. Ast *parse_tag_expr(AstFile *f, Ast *expression) {
  1460. Token token = expect_token(f, Token_Hash);
  1461. Token name = expect_token(f, Token_Ident);
  1462. return ast_tag_expr(f, token, name, expression);
  1463. }
  1464. Ast *unparen_expr(Ast *node) {
  1465. for (;;) {
  1466. if (node == nullptr) {
  1467. return nullptr;
  1468. }
  1469. if (node->kind != Ast_ParenExpr) {
  1470. return node;
  1471. }
  1472. node = node->ParenExpr.expr;
  1473. }
  1474. }
  1475. Ast *unselector_expr(Ast *node) {
  1476. node = unparen_expr(node);
  1477. if (node == nullptr) {
  1478. return nullptr;
  1479. }
  1480. while (node->kind == Ast_SelectorExpr) {
  1481. node = node->SelectorExpr.selector;
  1482. }
  1483. return node;
  1484. }
  1485. Ast *strip_or_return_expr(Ast *node) {
  1486. for (;;) {
  1487. if (node == nullptr) {
  1488. return node;
  1489. }
  1490. if (node->kind == Ast_OrReturnExpr) {
  1491. node = node->OrReturnExpr.expr;
  1492. } else if (node->kind == Ast_ParenExpr) {
  1493. node = node->ParenExpr.expr;
  1494. } else {
  1495. return node;
  1496. }
  1497. }
  1498. }
  1499. Ast *parse_value(AstFile *f);
  1500. Array<Ast *> parse_element_list(AstFile *f) {
  1501. auto elems = array_make<Ast *>(heap_allocator());
  1502. while (f->curr_token.kind != Token_CloseBrace &&
  1503. f->curr_token.kind != Token_EOF) {
  1504. Ast *elem = parse_value(f);
  1505. if (f->curr_token.kind == Token_Eq) {
  1506. Token eq = expect_token(f, Token_Eq);
  1507. Ast *value = parse_value(f);
  1508. elem = ast_field_value(f, elem, value, eq);
  1509. }
  1510. array_add(&elems, elem);
  1511. if (!allow_token(f, Token_Comma)) {
  1512. break;
  1513. }
  1514. }
  1515. return elems;
  1516. }
  1517. Ast *parse_literal_value(AstFile *f, Ast *type) {
  1518. Array<Ast *> elems = {};
  1519. Token open = expect_token(f, Token_OpenBrace);
  1520. isize expr_level = f->expr_level;
  1521. f->expr_level = 0;
  1522. if (f->curr_token.kind != Token_CloseBrace) {
  1523. elems = parse_element_list(f);
  1524. }
  1525. f->expr_level = expr_level;
  1526. Token close = expect_closing(f, Token_CloseBrace, str_lit("compound literal"));
  1527. return ast_compound_lit(f, type, elems, open, close);
  1528. }
  1529. Ast *parse_value(AstFile *f) {
  1530. if (f->curr_token.kind == Token_OpenBrace) {
  1531. return parse_literal_value(f, nullptr);
  1532. }
  1533. Ast *value;
  1534. bool prev_allow_range = f->allow_range;
  1535. f->allow_range = true;
  1536. value = parse_expr(f, false);
  1537. f->allow_range = prev_allow_range;
  1538. return value;
  1539. }
  1540. Ast *parse_type_or_ident(AstFile *f);
  1541. void check_proc_add_tag(AstFile *f, Ast *tag_expr, u64 *tags, ProcTag tag, String tag_name) {
  1542. if (*tags & tag) {
  1543. syntax_error(tag_expr, "Procedure tag already used: %.*s", LIT(tag_name));
  1544. }
  1545. *tags |= tag;
  1546. }
  1547. bool is_foreign_name_valid(String name) {
  1548. if (name.len == 0) {
  1549. return false;
  1550. }
  1551. isize offset = 0;
  1552. while (offset < name.len) {
  1553. Rune rune;
  1554. isize remaining = name.len - offset;
  1555. isize width = utf8_decode(name.text+offset, remaining, &rune);
  1556. if (rune == GB_RUNE_INVALID && width == 1) {
  1557. return false;
  1558. } else if (rune == GB_RUNE_BOM && remaining > 0) {
  1559. return false;
  1560. }
  1561. if (offset == 0) {
  1562. switch (rune) {
  1563. case '-':
  1564. case '$':
  1565. case '.':
  1566. case '_':
  1567. break;
  1568. default:
  1569. if (!gb_char_is_alpha(cast(char)rune))
  1570. return false;
  1571. break;
  1572. }
  1573. } else {
  1574. switch (rune) {
  1575. case '-':
  1576. case '$':
  1577. case '.':
  1578. case '_':
  1579. break;
  1580. default:
  1581. if (!gb_char_is_alphanumeric(cast(char)rune)) {
  1582. return false;
  1583. }
  1584. break;
  1585. }
  1586. }
  1587. offset += width;
  1588. }
  1589. return true;
  1590. }
  1591. void parse_proc_tags(AstFile *f, u64 *tags) {
  1592. GB_ASSERT(tags != nullptr);
  1593. while (f->curr_token.kind == Token_Hash) {
  1594. Ast *tag_expr = parse_tag_expr(f, nullptr);
  1595. ast_node(te, TagExpr, tag_expr);
  1596. String tag_name = te->name.string;
  1597. #define ELSE_IF_ADD_TAG(name) \
  1598. else if (tag_name == #name) { \
  1599. check_proc_add_tag(f, tag_expr, tags, ProcTag_##name, tag_name); \
  1600. }
  1601. if (false) {}
  1602. ELSE_IF_ADD_TAG(optional_ok)
  1603. ELSE_IF_ADD_TAG(optional_second)
  1604. ELSE_IF_ADD_TAG(require_results)
  1605. ELSE_IF_ADD_TAG(bounds_check)
  1606. ELSE_IF_ADD_TAG(no_bounds_check)
  1607. else {
  1608. syntax_error(tag_expr, "Unknown procedure type tag #%.*s", LIT(tag_name));
  1609. }
  1610. #undef ELSE_IF_ADD_TAG
  1611. }
  1612. if ((*tags & ProcTag_bounds_check) && (*tags & ProcTag_no_bounds_check)) {
  1613. syntax_error(f->curr_token, "You cannot apply both #bounds_check and #no_bounds_check to a procedure");
  1614. }
  1615. }
  1616. Array<Ast *> parse_lhs_expr_list (AstFile *f);
  1617. Array<Ast *> parse_rhs_expr_list (AstFile *f);
  1618. Ast * parse_simple_stmt (AstFile *f, u32 flags);
  1619. Ast * parse_type (AstFile *f);
  1620. Ast * parse_call_expr (AstFile *f, Ast *operand);
  1621. Ast * parse_struct_field_list(AstFile *f, isize *name_count_);
  1622. Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKind follow, bool allow_default_parameters, bool allow_typeid_token);
  1623. Ast *parse_unary_expr(AstFile *f, bool lhs);
  1624. Ast *convert_stmt_to_expr(AstFile *f, Ast *statement, String kind) {
  1625. if (statement == nullptr) {
  1626. return nullptr;
  1627. }
  1628. if (statement->kind == Ast_ExprStmt) {
  1629. return statement->ExprStmt.expr;
  1630. }
  1631. syntax_error(f->curr_token, "Expected '%.*s', found a simple statement.", LIT(kind));
  1632. Token end = f->curr_token;
  1633. if (f->tokens.count < f->curr_token_index) {
  1634. end = f->tokens[f->curr_token_index+1];
  1635. }
  1636. return ast_bad_expr(f, f->curr_token, end);
  1637. }
  1638. Ast *convert_stmt_to_body(AstFile *f, Ast *stmt) {
  1639. if (stmt->kind == Ast_BlockStmt) {
  1640. syntax_error(stmt, "Expected a normal statement rather than a block statement");
  1641. return stmt;
  1642. }
  1643. if (stmt->kind == Ast_EmptyStmt) {
  1644. syntax_error(stmt, "Expected a non-empty statement");
  1645. }
  1646. GB_ASSERT(is_ast_stmt(stmt) || is_ast_decl(stmt));
  1647. Token open = ast_token(stmt);
  1648. Token close = ast_token(stmt);
  1649. auto stmts = array_make<Ast *>(heap_allocator(), 0, 1);
  1650. array_add(&stmts, stmt);
  1651. return ast_block_stmt(f, stmts, open, close);
  1652. }
  1653. void check_polymorphic_params_for_type(AstFile *f, Ast *polymorphic_params, Token token) {
  1654. if (polymorphic_params == nullptr) {
  1655. return;
  1656. }
  1657. if (polymorphic_params->kind != Ast_FieldList) {
  1658. return;
  1659. }
  1660. ast_node(fl, FieldList, polymorphic_params);
  1661. for_array(fi, fl->list) {
  1662. Ast *field = fl->list[fi];
  1663. if (field->kind != Ast_Field) {
  1664. continue;
  1665. }
  1666. for_array(i, field->Field.names) {
  1667. Ast *name = field->Field.names[i];
  1668. if (name->kind != field->Field.names[0]->kind) {
  1669. syntax_error(name, "Mixture of polymorphic names using both $ and not for %.*s parameters", LIT(token.string));
  1670. return;
  1671. }
  1672. }
  1673. }
  1674. }
  1675. bool ast_on_same_line(Token const &x, Ast *yp) {
  1676. Token y = ast_token(yp);
  1677. return x.pos.line == y.pos.line;
  1678. }
  1679. bool ast_on_same_line(Ast *x, Ast *y) {
  1680. return ast_on_same_line(ast_token(x), y);
  1681. }
  1682. Ast *parse_force_inlining_operand(AstFile *f, Token token) {
  1683. Ast *expr = parse_unary_expr(f, false);
  1684. Ast *e = strip_or_return_expr(expr);
  1685. if (e->kind != Ast_ProcLit && e->kind != Ast_CallExpr) {
  1686. syntax_error(expr, "%.*s must be followed by a procedure literal or call, got %.*s", LIT(token.string), LIT(ast_strings[expr->kind]));
  1687. return ast_bad_expr(f, token, f->curr_token);
  1688. }
  1689. ProcInlining pi = ProcInlining_none;
  1690. if (token.kind == Token_inline) {
  1691. syntax_warning(token, "'inline' is deprecated in favour of '#force_inline'");
  1692. pi = ProcInlining_inline;
  1693. } else if (token.kind == Token_no_inline) {
  1694. syntax_warning(token, "'no_inline' is deprecated in favour of '#force_no_inline'");
  1695. pi = ProcInlining_no_inline;
  1696. } else if (token.kind == Token_Ident) {
  1697. if (token.string == "force_inline") {
  1698. pi = ProcInlining_inline;
  1699. } else if (token.string == "force_no_inline") {
  1700. pi = ProcInlining_no_inline;
  1701. }
  1702. }
  1703. if (pi != ProcInlining_none) {
  1704. if (e->kind == Ast_ProcLit) {
  1705. if (expr->ProcLit.inlining != ProcInlining_none &&
  1706. expr->ProcLit.inlining != pi) {
  1707. syntax_error(expr, "Cannot apply both '#force_inline' and '#force_no_inline' to a procedure literal");
  1708. }
  1709. expr->ProcLit.inlining = pi;
  1710. } else if (e->kind == Ast_CallExpr) {
  1711. if (expr->CallExpr.inlining != ProcInlining_none &&
  1712. expr->CallExpr.inlining != pi) {
  1713. syntax_error(expr, "Cannot apply both '#force_inline' and '#force_no_inline' to a procedure call");
  1714. }
  1715. expr->CallExpr.inlining = pi;
  1716. }
  1717. }
  1718. return expr;
  1719. }
  1720. Ast *parse_check_directive_for_statement(Ast *s, Token const &tag_token, u16 state_flag) {
  1721. String name = tag_token.string;
  1722. if (s == nullptr) {
  1723. syntax_error(tag_token, "Invalid operand for #%.*s", LIT(name));
  1724. return nullptr;
  1725. }
  1726. if (s != nullptr && s->kind == Ast_EmptyStmt) {
  1727. if (s->EmptyStmt.token.string == "\n") {
  1728. syntax_error(tag_token, "#%.*s cannot be followed by a newline", LIT(name));
  1729. } else {
  1730. syntax_error(tag_token, "#%.*s cannot be applied to an empty statement ';'", LIT(name));
  1731. }
  1732. }
  1733. if (s->state_flags & state_flag) {
  1734. syntax_error(tag_token, "#%.*s has been applied multiple times", LIT(name));
  1735. }
  1736. s->state_flags |= state_flag;
  1737. switch (state_flag) {
  1738. case StateFlag_bounds_check:
  1739. if ((s->state_flags & StateFlag_no_bounds_check) != 0) {
  1740. syntax_error(tag_token, "#bounds_check and #no_bounds_check cannot be applied together");
  1741. }
  1742. break;
  1743. case StateFlag_no_bounds_check:
  1744. if ((s->state_flags & StateFlag_bounds_check) != 0) {
  1745. syntax_error(tag_token, "#bounds_check and #no_bounds_check cannot be applied together");
  1746. }
  1747. break;
  1748. }
  1749. switch (state_flag) {
  1750. case StateFlag_bounds_check:
  1751. case StateFlag_no_bounds_check:
  1752. switch (s->kind) {
  1753. case Ast_BlockStmt:
  1754. case Ast_IfStmt:
  1755. case Ast_WhenStmt:
  1756. case Ast_ForStmt:
  1757. case Ast_RangeStmt:
  1758. case Ast_UnrollRangeStmt:
  1759. case Ast_SwitchStmt:
  1760. case Ast_TypeSwitchStmt:
  1761. case Ast_ReturnStmt:
  1762. case Ast_DeferStmt:
  1763. case Ast_AssignStmt:
  1764. // Okay
  1765. break;
  1766. case Ast_ValueDecl:
  1767. if (!s->ValueDecl.is_mutable) {
  1768. syntax_error(tag_token, "#%.*s may only be applied to a variable declaration, and not a constant value declaration", LIT(name));
  1769. }
  1770. break;
  1771. default:
  1772. syntax_error(tag_token, "#%.*s may only be applied to the following statements: '{}', 'if', 'when', 'for', 'switch', 'return', 'defer', assignment, variable declaration", LIT(name));
  1773. break;
  1774. }
  1775. break;
  1776. }
  1777. return s;
  1778. }
  1779. Ast *parse_operand(AstFile *f, bool lhs) {
  1780. Ast *operand = nullptr; // Operand
  1781. switch (f->curr_token.kind) {
  1782. case Token_Ident:
  1783. return parse_ident(f);
  1784. case Token_Undef:
  1785. return ast_undef(f, expect_token(f, Token_Undef));
  1786. case Token_context:
  1787. return ast_implicit(f, expect_token(f, Token_context));
  1788. case Token_Integer:
  1789. case Token_Float:
  1790. case Token_Imag:
  1791. case Token_Rune:
  1792. return ast_basic_lit(f, advance_token(f));
  1793. case Token_String:
  1794. return ast_basic_lit(f, advance_token(f));
  1795. case Token_OpenBrace:
  1796. if (!lhs) return parse_literal_value(f, nullptr);
  1797. break;
  1798. case Token_OpenParen: {
  1799. bool allow_newline;
  1800. Token open, close;
  1801. // NOTE(bill): Skip the Paren Expression
  1802. open = expect_token(f, Token_OpenParen);
  1803. if (f->prev_token.kind == Token_CloseParen) {
  1804. close = expect_token(f, Token_CloseParen);
  1805. syntax_error(open, "Invalid parentheses expression with no inside expression");
  1806. return ast_bad_expr(f, open, close);
  1807. }
  1808. allow_newline = f->allow_newline;
  1809. if (f->expr_level < 0) {
  1810. f->allow_newline = false;
  1811. }
  1812. f->expr_level++;
  1813. operand = parse_expr(f, false);
  1814. f->expr_level--;
  1815. f->allow_newline = allow_newline;
  1816. close = expect_token(f, Token_CloseParen);
  1817. return ast_paren_expr(f, operand, open, close);
  1818. }
  1819. case Token_distinct: {
  1820. Token token = expect_token(f, Token_distinct);
  1821. Ast *type = parse_type(f);
  1822. return ast_distinct_type(f, token, type);
  1823. }
  1824. case Token_Hash: {
  1825. Token token = expect_token(f, Token_Hash);
  1826. Token name = expect_token(f, Token_Ident);
  1827. if (name.string == "type") {
  1828. return ast_helper_type(f, token, parse_type(f));
  1829. } else if (name.string == "file") {
  1830. return ast_basic_directive(f, token, name);
  1831. } else if (name.string == "line") { return ast_basic_directive(f, token, name);
  1832. } else if (name.string == "procedure") { return ast_basic_directive(f, token, name);
  1833. } else if (name.string == "caller_location") { return ast_basic_directive(f, token, name);
  1834. } else if (name.string == "location") {
  1835. Ast *tag = ast_basic_directive(f, token, name);
  1836. return parse_call_expr(f, tag);
  1837. } else if (name.string == "load") {
  1838. Ast *tag = ast_basic_directive(f, token, name);
  1839. return parse_call_expr(f, tag);
  1840. } else if (name.string == "assert") {
  1841. Ast *tag = ast_basic_directive(f, token, name);
  1842. return parse_call_expr(f, tag);
  1843. } else if (name.string == "defined") {
  1844. Ast *tag = ast_basic_directive(f, token, name);
  1845. return parse_call_expr(f, tag);
  1846. } else if (name.string == "config") {
  1847. Ast *tag = ast_basic_directive(f, token, name);
  1848. return parse_call_expr(f, tag);
  1849. } else if (name.string == "soa" || name.string == "simd") {
  1850. Ast *tag = ast_basic_directive(f, token, name);
  1851. Ast *original_type = parse_type(f);
  1852. Ast *type = unparen_expr(original_type);
  1853. switch (type->kind) {
  1854. case Ast_ArrayType: type->ArrayType.tag = tag; break;
  1855. case Ast_DynamicArrayType: type->DynamicArrayType.tag = tag; break;
  1856. default:
  1857. syntax_error(type, "Expected an array type after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[type->kind]));
  1858. break;
  1859. }
  1860. return original_type;
  1861. } else if (name.string == "partial") {
  1862. Ast *tag = ast_basic_directive(f, token, name);
  1863. Ast *original_type = parse_type(f);
  1864. Ast *type = unparen_expr(original_type);
  1865. switch (type->kind) {
  1866. case Ast_ArrayType: type->ArrayType.tag = tag; break;
  1867. default:
  1868. syntax_error(type, "Expected an enumerated array type after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[type->kind]));
  1869. break;
  1870. }
  1871. return original_type;
  1872. } else if (name.string == "bounds_check") {
  1873. Ast *operand = parse_expr(f, lhs);
  1874. return parse_check_directive_for_statement(operand, name, StateFlag_bounds_check);
  1875. } else if (name.string == "no_bounds_check") {
  1876. Ast *operand = parse_expr(f, lhs);
  1877. return parse_check_directive_for_statement(operand, name, StateFlag_no_bounds_check);
  1878. } else if (name.string == "relative") {
  1879. Ast *tag = ast_basic_directive(f, token, name);
  1880. tag = parse_call_expr(f, tag);
  1881. Ast *type = parse_type(f);
  1882. return ast_relative_type(f, tag, type);
  1883. } else if (name.string == "opaque") {
  1884. syntax_warning(token, "'#opaque' has been removed and will do nothing to the applied type");
  1885. return parse_type(f);
  1886. } else if (name.string == "force_inline" ||
  1887. name.string == "force_no_inline") {
  1888. return parse_force_inlining_operand(f, name);
  1889. } else {
  1890. operand = ast_tag_expr(f, token, name, parse_expr(f, false));
  1891. }
  1892. return operand;
  1893. }
  1894. case Token_inline:
  1895. case Token_no_inline:
  1896. {
  1897. Token token = advance_token(f);
  1898. return parse_force_inlining_operand(f, token);
  1899. } break;
  1900. // Parse Procedure Type or Literal or Group
  1901. case Token_proc: {
  1902. Token token = expect_token(f, Token_proc);
  1903. if (f->curr_token.kind == Token_OpenBrace) { // ProcGroup
  1904. Token open = expect_token(f, Token_OpenBrace);
  1905. auto args = array_make<Ast *>(heap_allocator());
  1906. while (f->curr_token.kind != Token_CloseBrace &&
  1907. f->curr_token.kind != Token_EOF) {
  1908. Ast *elem = parse_expr(f, false);
  1909. array_add(&args, elem);
  1910. if (!allow_token(f, Token_Comma)) {
  1911. break;
  1912. }
  1913. }
  1914. Token close = expect_token(f, Token_CloseBrace);
  1915. if (args.count == 0) {
  1916. syntax_error(token, "Expected a least 1 argument in a procedure group");
  1917. }
  1918. return ast_proc_group(f, token, open, close, args);
  1919. }
  1920. Ast *type = parse_proc_type(f, token);
  1921. Token where_token = {};
  1922. Array<Ast *> where_clauses = {};
  1923. u64 tags = 0;
  1924. skip_possible_newline_for_literal(f);
  1925. if (f->curr_token.kind == Token_where) {
  1926. where_token = expect_token(f, Token_where);
  1927. isize prev_level = f->expr_level;
  1928. f->expr_level = -1;
  1929. where_clauses = parse_rhs_expr_list(f);
  1930. f->expr_level = prev_level;
  1931. }
  1932. parse_proc_tags(f, &tags);
  1933. if ((tags & ProcTag_require_results) != 0) {
  1934. syntax_error(f->curr_token, "#require_results has now been replaced as an attribute @(require_results) on the declaration");
  1935. tags &= ~ProcTag_require_results;
  1936. }
  1937. GB_ASSERT(type->kind == Ast_ProcType);
  1938. type->ProcType.tags = tags;
  1939. if (f->allow_type && f->expr_level < 0) {
  1940. if (tags != 0) {
  1941. syntax_error(token, "A procedure type cannot have suffix tags");
  1942. }
  1943. if (where_token.kind != Token_Invalid) {
  1944. syntax_error(where_token, "'where' clauses are not allowed on procedure types");
  1945. }
  1946. return type;
  1947. }
  1948. skip_possible_newline_for_literal(f);
  1949. if (allow_token(f, Token_Undef)) {
  1950. if (where_token.kind != Token_Invalid) {
  1951. syntax_error(where_token, "'where' clauses are not allowed on procedure literals without a defined body (replaced with ---)");
  1952. }
  1953. return ast_proc_lit(f, type, nullptr, tags, where_token, where_clauses);
  1954. } else if (f->curr_token.kind == Token_OpenBrace) {
  1955. Ast *curr_proc = f->curr_proc;
  1956. Ast *body = nullptr;
  1957. f->curr_proc = type;
  1958. body = parse_body(f);
  1959. f->curr_proc = curr_proc;
  1960. // Apply the tags directly to the body rather than the type
  1961. if (tags & ProcTag_no_bounds_check) {
  1962. body->state_flags |= StateFlag_no_bounds_check;
  1963. }
  1964. if (tags & ProcTag_bounds_check) {
  1965. body->state_flags |= StateFlag_bounds_check;
  1966. }
  1967. return ast_proc_lit(f, type, body, tags, where_token, where_clauses);
  1968. } else if (allow_token(f, Token_do)) {
  1969. Ast *curr_proc = f->curr_proc;
  1970. Ast *body = nullptr;
  1971. f->curr_proc = type;
  1972. body = convert_stmt_to_body(f, parse_stmt(f));
  1973. f->curr_proc = curr_proc;
  1974. if (build_context.disallow_do) {
  1975. syntax_error(body, "'do' has been disallowed");
  1976. } else if (!ast_on_same_line(type, body)) {
  1977. syntax_error(body, "The body of a 'do' must be on the same line as the signature");
  1978. }
  1979. return ast_proc_lit(f, type, body, tags, where_token, where_clauses);
  1980. }
  1981. if (tags != 0) {
  1982. syntax_error(token, "A procedure type cannot have suffix tags");
  1983. }
  1984. if (where_token.kind != Token_Invalid) {
  1985. syntax_error(where_token, "'where' clauses are not allowed on procedure types");
  1986. }
  1987. return type;
  1988. }
  1989. // Check for Types
  1990. case Token_Dollar: {
  1991. Token token = expect_token(f, Token_Dollar);
  1992. Ast *type = parse_ident(f);
  1993. if (is_blank_ident(type)) {
  1994. syntax_error(type, "Invalid polymorphic type definition with a blank identifier");
  1995. }
  1996. Ast *specialization = nullptr;
  1997. if (allow_token(f, Token_Quo)) {
  1998. specialization = parse_type(f);
  1999. }
  2000. return ast_poly_type(f, token, type, specialization);
  2001. } break;
  2002. case Token_typeid: {
  2003. Token token = expect_token(f, Token_typeid);
  2004. return ast_typeid_type(f, token, nullptr);
  2005. } break;
  2006. case Token_Pointer: {
  2007. Token token = expect_token(f, Token_Pointer);
  2008. Ast *elem = parse_type(f);
  2009. return ast_pointer_type(f, token, elem);
  2010. } break;
  2011. case Token_OpenBracket: {
  2012. Token token = expect_token(f, Token_OpenBracket);
  2013. Ast *count_expr = nullptr;
  2014. if (f->curr_token.kind == Token_Question) {
  2015. count_expr = ast_unary_expr(f, expect_token(f, Token_Question), nullptr);
  2016. } else if (allow_token(f, Token_dynamic)) {
  2017. expect_token(f, Token_CloseBracket);
  2018. return ast_dynamic_array_type(f, token, parse_type(f));
  2019. } else if (f->curr_token.kind != Token_CloseBracket) {
  2020. f->expr_level++;
  2021. count_expr = parse_expr(f, false);
  2022. f->expr_level--;
  2023. }
  2024. expect_token(f, Token_CloseBracket);
  2025. return ast_array_type(f, token, count_expr, parse_type(f));
  2026. } break;
  2027. case Token_map: {
  2028. Token token = expect_token(f, Token_map);
  2029. Ast *key = nullptr;
  2030. Ast *value = nullptr;
  2031. Token open, close;
  2032. open = expect_token_after(f, Token_OpenBracket, "map");
  2033. key = parse_expr(f, true);
  2034. close = expect_token(f, Token_CloseBracket);
  2035. value = parse_type(f);
  2036. return ast_map_type(f, token, key, value);
  2037. } break;
  2038. case Token_struct: {
  2039. Token token = expect_token(f, Token_struct);
  2040. Ast *polymorphic_params = nullptr;
  2041. bool is_packed = false;
  2042. bool is_raw_union = false;
  2043. Ast *align = nullptr;
  2044. if (allow_token(f, Token_OpenParen)) {
  2045. isize param_count = 0;
  2046. polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, true, true);
  2047. if (param_count == 0) {
  2048. syntax_error(polymorphic_params, "Expected at least 1 polymorphic parameter");
  2049. polymorphic_params = nullptr;
  2050. }
  2051. expect_token_after(f, Token_CloseParen, "parameter list");
  2052. check_polymorphic_params_for_type(f, polymorphic_params, token);
  2053. }
  2054. isize prev_level = f->expr_level;
  2055. f->expr_level = -1;
  2056. while (allow_token(f, Token_Hash)) {
  2057. Token tag = expect_token_after(f, Token_Ident, "#");
  2058. if (tag.string == "packed") {
  2059. if (is_packed) {
  2060. syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
  2061. }
  2062. is_packed = true;
  2063. } else if (tag.string == "align") {
  2064. if (align) {
  2065. syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
  2066. }
  2067. align = parse_expr(f, true);
  2068. } else if (tag.string == "raw_union") {
  2069. if (is_raw_union) {
  2070. syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
  2071. }
  2072. is_raw_union = true;
  2073. } else {
  2074. syntax_error(tag, "Invalid struct tag '#%.*s'", LIT(tag.string));
  2075. }
  2076. }
  2077. f->expr_level = prev_level;
  2078. if (is_raw_union && is_packed) {
  2079. is_packed = false;
  2080. syntax_error(token, "'#raw_union' cannot also be '#packed'");
  2081. }
  2082. Token where_token = {};
  2083. Array<Ast *> where_clauses = {};
  2084. skip_possible_newline_for_literal(f);
  2085. if (f->curr_token.kind == Token_where) {
  2086. where_token = expect_token(f, Token_where);
  2087. isize prev_level = f->expr_level;
  2088. f->expr_level = -1;
  2089. where_clauses = parse_rhs_expr_list(f);
  2090. f->expr_level = prev_level;
  2091. }
  2092. skip_possible_newline_for_literal(f);
  2093. Token open = expect_token_after(f, Token_OpenBrace, "struct");
  2094. isize name_count = 0;
  2095. Ast *fields = parse_struct_field_list(f, &name_count);
  2096. Token close = expect_closing_brace_of_field_list(f);
  2097. Slice<Ast *> decls = {};
  2098. if (fields != nullptr) {
  2099. GB_ASSERT(fields->kind == Ast_FieldList);
  2100. decls = fields->FieldList.list;
  2101. }
  2102. return ast_struct_type(f, token, decls, name_count, polymorphic_params, is_packed, is_raw_union, align, where_token, where_clauses);
  2103. } break;
  2104. case Token_union: {
  2105. Token token = expect_token(f, Token_union);
  2106. auto variants = array_make<Ast *>(heap_allocator());
  2107. Ast *polymorphic_params = nullptr;
  2108. Ast *align = nullptr;
  2109. bool no_nil = false;
  2110. bool maybe = false;
  2111. CommentGroup *docs = f->lead_comment;
  2112. Token start_token = f->curr_token;
  2113. if (allow_token(f, Token_OpenParen)) {
  2114. isize param_count = 0;
  2115. polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, true, true);
  2116. if (param_count == 0) {
  2117. syntax_error(polymorphic_params, "Expected at least 1 polymorphic parametric");
  2118. polymorphic_params = nullptr;
  2119. }
  2120. expect_token_after(f, Token_CloseParen, "parameter list");
  2121. check_polymorphic_params_for_type(f, polymorphic_params, token);
  2122. }
  2123. while (allow_token(f, Token_Hash)) {
  2124. Token tag = expect_token_after(f, Token_Ident, "#");
  2125. if (tag.string == "align") {
  2126. if (align) {
  2127. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2128. }
  2129. align = parse_expr(f, true);
  2130. } else if (tag.string == "no_nil") {
  2131. if (no_nil) {
  2132. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2133. }
  2134. no_nil = true;
  2135. } else if (tag.string == "maybe") {
  2136. if (maybe) {
  2137. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2138. }
  2139. maybe = true;
  2140. }else {
  2141. syntax_error(tag, "Invalid union tag '#%.*s'", LIT(tag.string));
  2142. }
  2143. }
  2144. if (no_nil && maybe) {
  2145. syntax_error(f->curr_token, "#maybe and #no_nil cannot be applied together");
  2146. }
  2147. skip_possible_newline_for_literal(f);
  2148. Token where_token = {};
  2149. Array<Ast *> where_clauses = {};
  2150. if (f->curr_token.kind == Token_where) {
  2151. where_token = expect_token(f, Token_where);
  2152. isize prev_level = f->expr_level;
  2153. f->expr_level = -1;
  2154. where_clauses = parse_rhs_expr_list(f);
  2155. f->expr_level = prev_level;
  2156. }
  2157. skip_possible_newline_for_literal(f);
  2158. Token open = expect_token_after(f, Token_OpenBrace, "union");
  2159. while (f->curr_token.kind != Token_CloseBrace &&
  2160. f->curr_token.kind != Token_EOF) {
  2161. Ast *type = parse_type(f);
  2162. if (type->kind != Ast_BadExpr) {
  2163. array_add(&variants, type);
  2164. }
  2165. if (!allow_token(f, Token_Comma)) {
  2166. break;
  2167. }
  2168. }
  2169. Token close = expect_closing_brace_of_field_list(f);
  2170. return ast_union_type(f, token, variants, polymorphic_params, align, no_nil, maybe, where_token, where_clauses);
  2171. } break;
  2172. case Token_enum: {
  2173. Token token = expect_token(f, Token_enum);
  2174. Ast *base_type = nullptr;
  2175. if (f->curr_token.kind != Token_OpenBrace) {
  2176. base_type = parse_type(f);
  2177. }
  2178. skip_possible_newline_for_literal(f);
  2179. Token open = expect_token(f, Token_OpenBrace);
  2180. Array<Ast *> values = parse_element_list(f);
  2181. Token close = expect_closing_brace_of_field_list(f);
  2182. return ast_enum_type(f, token, base_type, values);
  2183. } break;
  2184. case Token_bit_set: {
  2185. Token token = expect_token(f, Token_bit_set);
  2186. expect_token(f, Token_OpenBracket);
  2187. Ast *elem = nullptr;
  2188. Ast *underlying = nullptr;
  2189. bool prev_allow_range = f->allow_range;
  2190. f->allow_range = true;
  2191. elem = parse_expr(f, true);
  2192. f->allow_range = prev_allow_range;
  2193. if (allow_token(f, Token_Semicolon)) {
  2194. underlying = parse_type(f);
  2195. } else if (allow_token(f, Token_Comma)) {
  2196. String p = token_to_string(f->prev_token);
  2197. syntax_error(token_end_of_line(f, f->prev_token), "Expected a semicolon, got a %.*s", LIT(p));
  2198. underlying = parse_type(f);
  2199. }
  2200. expect_token(f, Token_CloseBracket);
  2201. return ast_bit_set_type(f, token, elem, underlying);
  2202. }
  2203. case Token_asm: {
  2204. Token token = expect_token(f, Token_asm);
  2205. Array<Ast *> param_types = {};
  2206. Ast *return_type = nullptr;
  2207. if (allow_token(f, Token_OpenParen)) {
  2208. param_types = array_make<Ast *>(heap_allocator());
  2209. while (f->curr_token.kind != Token_CloseParen && f->curr_token.kind != Token_EOF) {
  2210. Ast *t = parse_type(f);
  2211. array_add(&param_types, t);
  2212. if (f->curr_token.kind != Token_Comma ||
  2213. f->curr_token.kind == Token_EOF) {
  2214. break;
  2215. }
  2216. advance_token(f);
  2217. }
  2218. expect_token(f, Token_CloseParen);
  2219. if (allow_token(f, Token_ArrowRight)) {
  2220. return_type = parse_type(f);
  2221. }
  2222. }
  2223. bool has_side_effects = false;
  2224. bool is_align_stack = false;
  2225. InlineAsmDialectKind dialect = InlineAsmDialect_Default;
  2226. while (f->curr_token.kind == Token_Hash) {
  2227. advance_token(f);
  2228. if (f->curr_token.kind == Token_Ident) {
  2229. Token token = advance_token(f);
  2230. String name = token.string;
  2231. if (name == "side_effects") {
  2232. if (has_side_effects) {
  2233. syntax_error(token, "Duplicate directive on inline asm expression: '#side_effects'");
  2234. }
  2235. has_side_effects = true;
  2236. } else if (name == "align_stack") {
  2237. if (is_align_stack) {
  2238. syntax_error(token, "Duplicate directive on inline asm expression: '#align_stack'");
  2239. }
  2240. is_align_stack = true;
  2241. } else if (name == "att") {
  2242. if (dialect == InlineAsmDialect_ATT) {
  2243. syntax_error(token, "Duplicate directive on inline asm expression: '#att'");
  2244. } else if (dialect != InlineAsmDialect_Default) {
  2245. syntax_error(token, "Conflicting asm dialects");
  2246. } else {
  2247. dialect = InlineAsmDialect_ATT;
  2248. }
  2249. } else if (name == "intel") {
  2250. if (dialect == InlineAsmDialect_Intel) {
  2251. syntax_error(token, "Duplicate directive on inline asm expression: '#intel'");
  2252. } else if (dialect != InlineAsmDialect_Default) {
  2253. syntax_error(token, "Conflicting asm dialects");
  2254. } else {
  2255. dialect = InlineAsmDialect_Intel;
  2256. }
  2257. }
  2258. } else {
  2259. syntax_error(f->curr_token, "Expected an identifier after hash");
  2260. }
  2261. }
  2262. skip_possible_newline_for_literal(f);
  2263. Token open = expect_token(f, Token_OpenBrace);
  2264. Ast *asm_string = parse_expr(f, false);
  2265. expect_token(f, Token_Comma);
  2266. Ast *constraints_string = parse_expr(f, false);
  2267. allow_token(f, Token_Comma);
  2268. Token close = expect_closing_brace_of_field_list(f);
  2269. return ast_inline_asm_expr(f, token, open, close, param_types, return_type, asm_string, constraints_string, has_side_effects, is_align_stack, dialect);
  2270. }
  2271. }
  2272. return nullptr;
  2273. }
  2274. bool is_literal_type(Ast *node) {
  2275. node = unparen_expr(node);
  2276. switch (node->kind) {
  2277. case Ast_BadExpr:
  2278. case Ast_Ident:
  2279. case Ast_SelectorExpr:
  2280. case Ast_ArrayType:
  2281. case Ast_StructType:
  2282. case Ast_UnionType:
  2283. case Ast_EnumType:
  2284. case Ast_DynamicArrayType:
  2285. case Ast_MapType:
  2286. case Ast_BitSetType:
  2287. case Ast_CallExpr:
  2288. return true;
  2289. }
  2290. return false;
  2291. }
  2292. Ast *parse_call_expr(AstFile *f, Ast *operand) {
  2293. auto args = array_make<Ast *>(heap_allocator());
  2294. Token open_paren, close_paren;
  2295. Token ellipsis = {};
  2296. isize prev_expr_level = f->expr_level;
  2297. bool prev_allow_newline = f->allow_newline;
  2298. f->expr_level = 0;
  2299. f->allow_newline = true;
  2300. open_paren = expect_token(f, Token_OpenParen);
  2301. while (f->curr_token.kind != Token_CloseParen &&
  2302. f->curr_token.kind != Token_EOF &&
  2303. ellipsis.pos.line == 0) {
  2304. if (f->curr_token.kind == Token_Comma) {
  2305. syntax_error(f->curr_token, "Expected an expression not ,");
  2306. } else if (f->curr_token.kind == Token_Eq) {
  2307. syntax_error(f->curr_token, "Expected an expression not =");
  2308. }
  2309. bool prefix_ellipsis = false;
  2310. if (f->curr_token.kind == Token_Ellipsis) {
  2311. prefix_ellipsis = true;
  2312. ellipsis = expect_token(f, Token_Ellipsis);
  2313. }
  2314. Ast *arg = parse_expr(f, false);
  2315. if (f->curr_token.kind == Token_Eq) {
  2316. Token eq = expect_token(f, Token_Eq);
  2317. if (prefix_ellipsis) {
  2318. syntax_error(ellipsis, "'..' must be applied to value rather than the field name");
  2319. }
  2320. Ast *value = parse_value(f);
  2321. arg = ast_field_value(f, arg, value, eq);
  2322. }
  2323. array_add(&args, arg);
  2324. if (!allow_token(f, Token_Comma)) {
  2325. break;
  2326. }
  2327. }
  2328. f->allow_newline = prev_allow_newline;
  2329. f->expr_level = prev_expr_level;
  2330. close_paren = expect_closing(f, Token_CloseParen, str_lit("argument list"));
  2331. Ast *call = ast_call_expr(f, operand, args, open_paren, close_paren, ellipsis);
  2332. Ast *o = unparen_expr(operand);
  2333. if (o->kind == Ast_SelectorExpr && o->SelectorExpr.token.kind == Token_ArrowRight) {
  2334. return ast_selector_call_expr(f, o->SelectorExpr.token, o, call);
  2335. }
  2336. return call;
  2337. }
  2338. Ast *parse_atom_expr(AstFile *f, Ast *operand, bool lhs) {
  2339. if (operand == nullptr) {
  2340. if (f->allow_type) return nullptr;
  2341. Token begin = f->curr_token;
  2342. syntax_error(begin, "Expected an operand");
  2343. fix_advance_to_next_stmt(f);
  2344. operand = ast_bad_expr(f, begin, f->curr_token);
  2345. }
  2346. bool loop = true;
  2347. while (loop) {
  2348. switch (f->curr_token.kind) {
  2349. case Token_OpenParen:
  2350. operand = parse_call_expr(f, operand);
  2351. break;
  2352. case Token_Period: {
  2353. Token token = advance_token(f);
  2354. switch (f->curr_token.kind) {
  2355. case Token_Ident:
  2356. operand = ast_selector_expr(f, token, operand, parse_ident(f));
  2357. break;
  2358. // case Token_Integer:
  2359. // operand = ast_selector_expr(f, token, operand, parse_expr(f, lhs));
  2360. // break;
  2361. case Token_OpenParen: {
  2362. Token open = expect_token(f, Token_OpenParen);
  2363. Ast *type = parse_type(f);
  2364. Token close = expect_token(f, Token_CloseParen);
  2365. operand = ast_type_assertion(f, operand, token, type);
  2366. } break;
  2367. case Token_Question: {
  2368. Token question = expect_token(f, Token_Question);
  2369. Ast *type = ast_unary_expr(f, question, nullptr);
  2370. operand = ast_type_assertion(f, operand, token, type);
  2371. } break;
  2372. default:
  2373. syntax_error(f->curr_token, "Expected a selector");
  2374. advance_token(f);
  2375. operand = ast_bad_expr(f, ast_token(operand), f->curr_token);
  2376. // operand = ast_selector_expr(f, f->curr_token, operand, nullptr);
  2377. break;
  2378. }
  2379. } break;
  2380. case Token_ArrowRight: {
  2381. Token token = advance_token(f);
  2382. operand = ast_selector_expr(f, token, operand, parse_ident(f));
  2383. // Ast *call = parse_call_expr(f, sel);
  2384. // operand = ast_selector_call_expr(f, token, sel, call);
  2385. break;
  2386. }
  2387. case Token_OpenBracket: {
  2388. bool prev_allow_range = f->allow_range;
  2389. f->allow_range = false;
  2390. Token open = {}, close = {}, interval = {};
  2391. Ast *indices[2] = {};
  2392. bool is_interval = false;
  2393. f->expr_level++;
  2394. open = expect_token(f, Token_OpenBracket);
  2395. switch (f->curr_token.kind) {
  2396. case Token_Ellipsis:
  2397. case Token_RangeFull:
  2398. case Token_RangeHalf:
  2399. // NOTE(bill): Do not err yet
  2400. case Token_Colon:
  2401. break;
  2402. default:
  2403. indices[0] = parse_expr(f, false);
  2404. break;
  2405. }
  2406. switch (f->curr_token.kind) {
  2407. case Token_Ellipsis:
  2408. case Token_RangeFull:
  2409. case Token_RangeHalf:
  2410. syntax_error(f->curr_token, "Expected a colon, not a range");
  2411. /* fallthrough */
  2412. case Token_Colon:
  2413. interval = advance_token(f);
  2414. is_interval = true;
  2415. if (f->curr_token.kind != Token_CloseBracket &&
  2416. f->curr_token.kind != Token_EOF) {
  2417. indices[1] = parse_expr(f, false);
  2418. }
  2419. break;
  2420. }
  2421. f->expr_level--;
  2422. close = expect_token(f, Token_CloseBracket);
  2423. if (is_interval) {
  2424. operand = ast_slice_expr(f, operand, open, close, interval, indices[0], indices[1]);
  2425. } else {
  2426. operand = ast_index_expr(f, operand, indices[0], open, close);
  2427. }
  2428. f->allow_range = prev_allow_range;
  2429. } break;
  2430. case Token_Pointer: // Deference
  2431. operand = ast_deref_expr(f, operand, expect_token(f, Token_Pointer));
  2432. break;
  2433. case Token_or_return:
  2434. operand = ast_or_return_expr(f, operand, expect_token(f, Token_or_return));
  2435. break;
  2436. case Token_OpenBrace:
  2437. if (!lhs && is_literal_type(operand) && f->expr_level >= 0) {
  2438. operand = parse_literal_value(f, operand);
  2439. } else {
  2440. loop = false;
  2441. }
  2442. break;
  2443. case Token_Increment:
  2444. case Token_Decrement:
  2445. if (!lhs) {
  2446. Token token = advance_token(f);
  2447. syntax_error(token, "Postfix '%.*s' operator is not supported", LIT(token.string));
  2448. } else {
  2449. loop = false;
  2450. }
  2451. break;
  2452. default:
  2453. loop = false;
  2454. break;
  2455. }
  2456. lhs = false; // NOTE(bill): 'tis not lhs anymore
  2457. }
  2458. return operand;
  2459. }
  2460. Ast *parse_unary_expr(AstFile *f, bool lhs) {
  2461. switch (f->curr_token.kind) {
  2462. case Token_transmute:
  2463. case Token_cast: {
  2464. Token token = advance_token(f);
  2465. expect_token(f, Token_OpenParen);
  2466. Ast *type = parse_type(f);
  2467. expect_token(f, Token_CloseParen);
  2468. Ast *expr = parse_unary_expr(f, lhs);
  2469. return ast_type_cast(f, token, type, expr);
  2470. }
  2471. case Token_auto_cast: {
  2472. Token token = advance_token(f);
  2473. Ast *expr = parse_unary_expr(f, lhs);
  2474. return ast_auto_cast(f, token, expr);
  2475. }
  2476. case Token_Add:
  2477. case Token_Sub:
  2478. case Token_Xor:
  2479. case Token_And:
  2480. case Token_Not: {
  2481. Token token = advance_token(f);
  2482. Ast *expr = parse_unary_expr(f, lhs);
  2483. return ast_unary_expr(f, token, expr);
  2484. }
  2485. case Token_Increment:
  2486. case Token_Decrement: {
  2487. Token token = advance_token(f);
  2488. syntax_error(token, "Unary '%.*s' operator is not supported", LIT(token.string));
  2489. Ast *expr = parse_unary_expr(f, lhs);
  2490. return ast_unary_expr(f, token, expr);
  2491. }
  2492. case Token_Period: {
  2493. Token token = expect_token(f, Token_Period);
  2494. Ast *ident = parse_ident(f);
  2495. return ast_implicit_selector_expr(f, token, ident);
  2496. }
  2497. }
  2498. return parse_atom_expr(f, parse_operand(f, lhs), lhs);
  2499. }
  2500. bool is_ast_range(Ast *expr) {
  2501. if (expr == nullptr) {
  2502. return false;
  2503. }
  2504. if (expr->kind != Ast_BinaryExpr) {
  2505. return false;
  2506. }
  2507. return is_token_range(expr->BinaryExpr.op.kind);
  2508. }
  2509. // NOTE(bill): result == priority
  2510. i32 token_precedence(AstFile *f, TokenKind t) {
  2511. switch (t) {
  2512. case Token_Question:
  2513. case Token_if:
  2514. case Token_when:
  2515. case Token_or_else:
  2516. return 1;
  2517. case Token_Ellipsis:
  2518. case Token_RangeFull:
  2519. case Token_RangeHalf:
  2520. if (!f->allow_range) {
  2521. return 0;
  2522. }
  2523. return 2;
  2524. case Token_CmpOr:
  2525. return 3;
  2526. case Token_CmpAnd:
  2527. return 4;
  2528. case Token_CmpEq:
  2529. case Token_NotEq:
  2530. case Token_Lt:
  2531. case Token_Gt:
  2532. case Token_LtEq:
  2533. case Token_GtEq:
  2534. return 5;
  2535. case Token_in:
  2536. case Token_not_in:
  2537. if (f->expr_level < 0 && !f->allow_in_expr) {
  2538. return 0;
  2539. }
  2540. /*fallthrough*/
  2541. case Token_Add:
  2542. case Token_Sub:
  2543. case Token_Or:
  2544. case Token_Xor:
  2545. return 6;
  2546. case Token_Mul:
  2547. case Token_Quo:
  2548. case Token_Mod:
  2549. case Token_ModMod:
  2550. case Token_And:
  2551. case Token_AndNot:
  2552. case Token_Shl:
  2553. case Token_Shr:
  2554. return 7;
  2555. }
  2556. return 0;
  2557. }
  2558. Ast *parse_binary_expr(AstFile *f, bool lhs, i32 prec_in) {
  2559. Ast *expr = parse_unary_expr(f, lhs);
  2560. for (i32 prec = token_precedence(f, f->curr_token.kind); prec >= prec_in; prec--) {
  2561. for (;;) {
  2562. Token op = f->curr_token;
  2563. i32 op_prec = token_precedence(f, op.kind);
  2564. if (op_prec != prec) {
  2565. // NOTE(bill): This will also catch operators that are not valid "binary" operators
  2566. break;
  2567. }
  2568. Token prev = f->prev_token;
  2569. switch (op.kind) {
  2570. case Token_if:
  2571. case Token_when:
  2572. if (prev.pos.line < op.pos.line) {
  2573. // NOTE(bill): Check to see if the `if` or `when` is on the same line of the `lhs` condition
  2574. goto loop_end;
  2575. }
  2576. break;
  2577. }
  2578. expect_operator(f); // NOTE(bill): error checks too
  2579. if (op.kind == Token_Question) {
  2580. Ast *cond = expr;
  2581. // Token_Question
  2582. Ast *x = parse_expr(f, lhs);
  2583. Token token_c = expect_token(f, Token_Colon);
  2584. Ast *y = parse_expr(f, lhs);
  2585. expr = ast_ternary_if_expr(f, x, cond, y);
  2586. } else if (op.kind == Token_if) {
  2587. Ast *x = expr;
  2588. // Token_if
  2589. Ast *cond = parse_expr(f, lhs);
  2590. Token tok_else = expect_token(f, Token_else);
  2591. Ast *y = parse_expr(f, lhs);
  2592. expr = ast_ternary_if_expr(f, x, cond, y);
  2593. } else if (op.kind == Token_when) {
  2594. Ast *x = expr;
  2595. // Token_when
  2596. Ast *cond = parse_expr(f, lhs);
  2597. Token tok_else = expect_token(f, Token_else);
  2598. Ast *y = parse_expr(f, lhs);
  2599. expr = ast_ternary_when_expr(f, x, cond, y);
  2600. } else if (op.kind == Token_or_else) {
  2601. Ast *x = expr;
  2602. Ast *y = parse_expr(f, lhs);
  2603. expr = ast_or_else_expr(f, x, op, y);
  2604. } else {
  2605. Ast *right = parse_binary_expr(f, false, prec+1);
  2606. if (right == nullptr) {
  2607. syntax_error(op, "Expected expression on the right-hand side of the binary operator");
  2608. }
  2609. expr = ast_binary_expr(f, op, expr, right);
  2610. }
  2611. lhs = false;
  2612. }
  2613. loop_end:;
  2614. }
  2615. return expr;
  2616. }
  2617. Ast *parse_expr(AstFile *f, bool lhs) {
  2618. return parse_binary_expr(f, lhs, 0+1);
  2619. }
  2620. Array<Ast *> parse_expr_list(AstFile *f, bool lhs) {
  2621. bool allow_newline = f->allow_newline;
  2622. f->allow_newline = true;
  2623. auto list = array_make<Ast *>(heap_allocator());
  2624. for (;;) {
  2625. Ast *e = parse_expr(f, lhs);
  2626. array_add(&list, e);
  2627. if (f->curr_token.kind != Token_Comma ||
  2628. f->curr_token.kind == Token_EOF) {
  2629. break;
  2630. }
  2631. advance_token(f);
  2632. }
  2633. f->allow_newline = allow_newline;
  2634. return list;
  2635. }
  2636. Array<Ast *> parse_lhs_expr_list(AstFile *f) {
  2637. return parse_expr_list(f, true);
  2638. }
  2639. Array<Ast *> parse_rhs_expr_list(AstFile *f) {
  2640. return parse_expr_list(f, false);
  2641. }
  2642. Array<Ast *> parse_ident_list(AstFile *f, bool allow_poly_names) {
  2643. auto list = array_make<Ast *>(heap_allocator());
  2644. for (;;) {
  2645. array_add(&list, parse_ident(f, allow_poly_names));
  2646. if (f->curr_token.kind != Token_Comma ||
  2647. f->curr_token.kind == Token_EOF) {
  2648. break;
  2649. }
  2650. advance_token(f);
  2651. }
  2652. return list;
  2653. }
  2654. Ast *parse_type(AstFile *f) {
  2655. Ast *type = parse_type_or_ident(f);
  2656. if (type == nullptr) {
  2657. Token token = advance_token(f);
  2658. syntax_error(token, "Expected a type");
  2659. return ast_bad_expr(f, token, f->curr_token);
  2660. }
  2661. return type;
  2662. }
  2663. void parse_foreign_block_decl(AstFile *f, Array<Ast *> *decls) {
  2664. Ast *decl = parse_stmt(f);
  2665. switch (decl->kind) {
  2666. case Ast_EmptyStmt:
  2667. case Ast_BadStmt:
  2668. case Ast_BadDecl:
  2669. return;
  2670. case Ast_WhenStmt:
  2671. case Ast_ValueDecl:
  2672. array_add(decls, decl);
  2673. return;
  2674. default:
  2675. syntax_error(decl, "Foreign blocks only allow procedure and variable declarations");
  2676. return;
  2677. }
  2678. }
  2679. Ast *parse_foreign_block(AstFile *f, Token token) {
  2680. CommentGroup *docs = f->lead_comment;
  2681. Ast *foreign_library = nullptr;
  2682. if (f->curr_token.kind == Token_OpenBrace) {
  2683. foreign_library = ast_ident(f, blank_token);
  2684. } else {
  2685. foreign_library = parse_ident(f);
  2686. }
  2687. Token open = {};
  2688. Token close = {};
  2689. auto decls = array_make<Ast *>(heap_allocator());
  2690. bool prev_in_foreign_block = f->in_foreign_block;
  2691. defer (f->in_foreign_block = prev_in_foreign_block);
  2692. f->in_foreign_block = true;
  2693. skip_possible_newline_for_literal(f);
  2694. open = expect_token(f, Token_OpenBrace);
  2695. while (f->curr_token.kind != Token_CloseBrace &&
  2696. f->curr_token.kind != Token_EOF) {
  2697. parse_foreign_block_decl(f, &decls);
  2698. }
  2699. close = expect_token(f, Token_CloseBrace);
  2700. Ast *body = ast_block_stmt(f, decls, open, close);
  2701. Ast *decl = ast_foreign_block_decl(f, token, foreign_library, body, docs);
  2702. expect_semicolon(f, decl);
  2703. return decl;
  2704. }
  2705. Ast *parse_value_decl(AstFile *f, Array<Ast *> names, CommentGroup *docs) {
  2706. bool is_mutable = true;
  2707. Array<Ast *> values = {};
  2708. Ast *type = parse_type_or_ident(f);
  2709. if (f->curr_token.kind == Token_Eq ||
  2710. f->curr_token.kind == Token_Colon) {
  2711. Token sep = {};
  2712. if (!is_mutable) {
  2713. sep = expect_token_after(f, Token_Colon, "type");
  2714. } else {
  2715. sep = advance_token(f);
  2716. is_mutable = sep.kind != Token_Colon;
  2717. }
  2718. values = parse_rhs_expr_list(f);
  2719. if (values.count > names.count) {
  2720. syntax_error(f->curr_token, "Too many values on the right hand side of the declaration");
  2721. } else if (values.count < names.count && !is_mutable) {
  2722. syntax_error(f->curr_token, "All constant declarations must be defined");
  2723. } else if (values.count == 0) {
  2724. syntax_error(f->curr_token, "Expected an expression for this declaration");
  2725. }
  2726. }
  2727. if (is_mutable) {
  2728. if (type == nullptr && values.count == 0) {
  2729. syntax_error(f->curr_token, "Missing variable type or initialization");
  2730. return ast_bad_decl(f, f->curr_token, f->curr_token);
  2731. }
  2732. } else {
  2733. if (type == nullptr && values.count == 0 && names.count > 0) {
  2734. syntax_error(f->curr_token, "Missing constant value");
  2735. return ast_bad_decl(f, f->curr_token, f->curr_token);
  2736. }
  2737. }
  2738. if (values.data == nullptr) {
  2739. values.allocator = heap_allocator();
  2740. }
  2741. if (f->expr_level >= 0) {
  2742. Ast *end = nullptr;
  2743. if (!is_mutable && values.count > 0) {
  2744. end = values[values.count-1];
  2745. }
  2746. if (f->curr_token.kind == Token_CloseBrace &&
  2747. f->curr_token.pos.line == f->prev_token.pos.line) {
  2748. } else {
  2749. expect_semicolon(f, end);
  2750. }
  2751. }
  2752. if (f->curr_proc == nullptr) {
  2753. if (values.count > 0 && names.count != values.count) {
  2754. syntax_error(
  2755. values[0],
  2756. "Expected %td expressions on the right hand side, got %td\n"
  2757. "\tNote: Global declarations do not allow for multi-valued expressions",
  2758. names.count, values.count
  2759. );
  2760. }
  2761. }
  2762. return ast_value_decl(f, names, type, values, is_mutable, docs, f->line_comment);
  2763. }
  2764. Ast *parse_simple_stmt(AstFile *f, u32 flags) {
  2765. Token token = f->curr_token;
  2766. CommentGroup *docs = f->lead_comment;
  2767. Array<Ast *> lhs = parse_lhs_expr_list(f);
  2768. token = f->curr_token;
  2769. switch (token.kind) {
  2770. case Token_Eq:
  2771. case Token_AddEq:
  2772. case Token_SubEq:
  2773. case Token_MulEq:
  2774. case Token_QuoEq:
  2775. case Token_ModEq:
  2776. case Token_ModModEq:
  2777. case Token_AndEq:
  2778. case Token_OrEq:
  2779. case Token_XorEq:
  2780. case Token_ShlEq:
  2781. case Token_ShrEq:
  2782. case Token_AndNotEq:
  2783. case Token_CmpAndEq:
  2784. case Token_CmpOrEq:
  2785. {
  2786. if (f->curr_proc == nullptr) {
  2787. syntax_error(f->curr_token, "You cannot use a simple statement in the file scope");
  2788. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  2789. }
  2790. advance_token(f);
  2791. Array<Ast *> rhs = parse_rhs_expr_list(f);
  2792. if (rhs.count == 0) {
  2793. syntax_error(token, "No right-hand side in assignment statement.");
  2794. return ast_bad_stmt(f, token, f->curr_token);
  2795. }
  2796. return ast_assign_stmt(f, token, lhs, rhs);
  2797. } break;
  2798. case Token_in:
  2799. if (flags&StmtAllowFlag_In) {
  2800. allow_token(f, Token_in);
  2801. bool prev_allow_range = f->allow_range;
  2802. f->allow_range = true;
  2803. Ast *expr = parse_expr(f, true);
  2804. f->allow_range = prev_allow_range;
  2805. auto rhs = array_make<Ast *>(heap_allocator(), 0, 1);
  2806. array_add(&rhs, expr);
  2807. return ast_assign_stmt(f, token, lhs, rhs);
  2808. }
  2809. break;
  2810. case Token_Colon:
  2811. expect_token_after(f, Token_Colon, "identifier list");
  2812. if ((flags&StmtAllowFlag_Label) && lhs.count == 1) {
  2813. switch (f->curr_token.kind) {
  2814. case Token_OpenBrace: // block statement
  2815. case Token_if:
  2816. case Token_for:
  2817. case Token_switch: {
  2818. Ast *name = lhs[0];
  2819. Ast *label = ast_label_decl(f, ast_token(name), name);
  2820. Ast *stmt = parse_stmt(f);
  2821. #define _SET_LABEL(Kind_, label_) case GB_JOIN2(Ast_, Kind_): (stmt->Kind_).label = label_; break
  2822. switch (stmt->kind) {
  2823. _SET_LABEL(BlockStmt, label);
  2824. _SET_LABEL(IfStmt, label);
  2825. _SET_LABEL(ForStmt, label);
  2826. _SET_LABEL(RangeStmt, label);
  2827. _SET_LABEL(SwitchStmt, label);
  2828. _SET_LABEL(TypeSwitchStmt, label);
  2829. default:
  2830. syntax_error(token, "Labels can only be applied to a loop or switch statement");
  2831. break;
  2832. }
  2833. #undef _SET_LABEL
  2834. return stmt;
  2835. } break;
  2836. }
  2837. }
  2838. return parse_value_decl(f, lhs, docs);
  2839. }
  2840. if (lhs.count > 1) {
  2841. syntax_error(token, "Expected 1 expression");
  2842. return ast_bad_stmt(f, token, f->curr_token);
  2843. }
  2844. switch (token.kind) {
  2845. case Token_Increment:
  2846. case Token_Decrement:
  2847. advance_token(f);
  2848. syntax_error(token, "Postfix '%.*s' statement is not supported", LIT(token.string));
  2849. break;
  2850. }
  2851. #if 0
  2852. switch (token.kind) {
  2853. case Token_Inc:
  2854. case Token_Dec:
  2855. advance_token(f);
  2856. return ast_inc_dec_stmt(f, token, lhs[0]);
  2857. }
  2858. #endif
  2859. return ast_expr_stmt(f, lhs[0]);
  2860. }
  2861. Ast *parse_block_stmt(AstFile *f, b32 is_when) {
  2862. skip_possible_newline_for_literal(f);
  2863. if (!is_when && f->curr_proc == nullptr) {
  2864. syntax_error(f->curr_token, "You cannot use a block statement in the file scope");
  2865. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  2866. }
  2867. return parse_body(f);
  2868. }
  2869. Ast *parse_results(AstFile *f, bool *diverging) {
  2870. if (!allow_token(f, Token_ArrowRight)) {
  2871. return nullptr;
  2872. }
  2873. if (allow_token(f, Token_Not)) {
  2874. if (diverging) *diverging = true;
  2875. return nullptr;
  2876. }
  2877. isize prev_level = f->expr_level;
  2878. defer (f->expr_level = prev_level);
  2879. // f->expr_level = -1;
  2880. if (f->curr_token.kind != Token_OpenParen) {
  2881. Token begin_token = f->curr_token;
  2882. Array<Ast *> empty_names = {};
  2883. auto list = array_make<Ast *>(heap_allocator(), 0, 1);
  2884. Ast *type = parse_type(f);
  2885. Token tag = {};
  2886. array_add(&list, ast_field(f, empty_names, type, nullptr, 0, tag, nullptr, nullptr));
  2887. return ast_field_list(f, begin_token, list);
  2888. }
  2889. Ast *list = nullptr;
  2890. expect_token(f, Token_OpenParen);
  2891. list = parse_field_list(f, nullptr, FieldFlag_Results, Token_CloseParen, true, false);
  2892. expect_token_after(f, Token_CloseParen, "parameter list");
  2893. return list;
  2894. }
  2895. ProcCallingConvention string_to_calling_convention(String s) {
  2896. if (s == "odin") return ProcCC_Odin;
  2897. if (s == "contextless") return ProcCC_Contextless;
  2898. if (s == "cdecl") return ProcCC_CDecl;
  2899. if (s == "c") return ProcCC_CDecl;
  2900. if (s == "stdcall") return ProcCC_StdCall;
  2901. if (s == "std") return ProcCC_StdCall;
  2902. if (s == "fastcall") return ProcCC_FastCall;
  2903. if (s == "fast") return ProcCC_FastCall;
  2904. if (s == "none") return ProcCC_None;
  2905. if (s == "naked") return ProcCC_Naked;
  2906. return ProcCC_Invalid;
  2907. }
  2908. Ast *parse_proc_type(AstFile *f, Token proc_token) {
  2909. Ast *params = nullptr;
  2910. Ast *results = nullptr;
  2911. bool diverging = false;
  2912. ProcCallingConvention cc = ProcCC_Invalid;
  2913. if (f->curr_token.kind == Token_String) {
  2914. Token token = expect_token(f, Token_String);
  2915. auto c = string_to_calling_convention(string_value_from_token(f, token));
  2916. if (c == ProcCC_Invalid) {
  2917. syntax_error(token, "Unknown procedure calling convention: '%.*s'", LIT(token.string));
  2918. } else {
  2919. cc = c;
  2920. }
  2921. }
  2922. if (cc == ProcCC_Invalid) {
  2923. if (f->in_foreign_block) {
  2924. cc = ProcCC_ForeignBlockDefault;
  2925. } else {
  2926. cc = default_calling_convention();
  2927. }
  2928. }
  2929. expect_token(f, Token_OpenParen);
  2930. params = parse_field_list(f, nullptr, FieldFlag_Signature, Token_CloseParen, true, true);
  2931. expect_token_after(f, Token_CloseParen, "parameter list");
  2932. results = parse_results(f, &diverging);
  2933. u64 tags = 0;
  2934. bool is_generic = false;
  2935. for_array(i, params->FieldList.list) {
  2936. Ast *param = params->FieldList.list[i];
  2937. ast_node(field, Field, param);
  2938. if (field->type != nullptr) {
  2939. if (field->type->kind == Ast_PolyType) {
  2940. is_generic = true;
  2941. goto end;
  2942. }
  2943. for_array(j, field->names) {
  2944. Ast *name = field->names[j];
  2945. if (name->kind == Ast_PolyType) {
  2946. is_generic = true;
  2947. goto end;
  2948. }
  2949. }
  2950. }
  2951. }
  2952. end:
  2953. return ast_proc_type(f, proc_token, params, results, tags, cc, is_generic, diverging);
  2954. }
  2955. Ast *parse_var_type(AstFile *f, bool allow_ellipsis, bool allow_typeid_token) {
  2956. if (allow_ellipsis && f->curr_token.kind == Token_Ellipsis) {
  2957. Token tok = advance_token(f);
  2958. Ast *type = parse_type_or_ident(f);
  2959. if (type == nullptr) {
  2960. syntax_error(tok, "variadic field missing type after '..'");
  2961. type = ast_bad_expr(f, tok, f->curr_token);
  2962. }
  2963. return ast_ellipsis(f, tok, type);
  2964. }
  2965. Ast *type = nullptr;
  2966. if (allow_typeid_token &&
  2967. f->curr_token.kind == Token_typeid) {
  2968. Token token = expect_token(f, Token_typeid);
  2969. Ast *specialization = nullptr;
  2970. if (allow_token(f, Token_Quo)) {
  2971. specialization = parse_type(f);
  2972. }
  2973. type = ast_typeid_type(f, token, specialization);
  2974. } else {
  2975. type = parse_type(f);
  2976. }
  2977. return type;
  2978. }
  2979. enum FieldPrefixKind : i32 {
  2980. FieldPrefix_Unknown = -1,
  2981. FieldPrefix_Invalid = 0,
  2982. FieldPrefix_using,
  2983. FieldPrefix_const,
  2984. FieldPrefix_no_alias,
  2985. FieldPrefix_c_vararg,
  2986. FieldPrefix_auto_cast,
  2987. FieldPrefix_any_int,
  2988. };
  2989. struct ParseFieldPrefixMapping {
  2990. String name;
  2991. TokenKind token_kind;
  2992. FieldPrefixKind prefix;
  2993. FieldFlag flag;
  2994. };
  2995. gb_global ParseFieldPrefixMapping parse_field_prefix_mappings[] = {
  2996. {str_lit("using"), Token_using, FieldPrefix_using, FieldFlag_using},
  2997. {str_lit("auto_cast"), Token_auto_cast, FieldPrefix_auto_cast, FieldFlag_auto_cast},
  2998. {str_lit("no_alias"), Token_Hash, FieldPrefix_no_alias, FieldFlag_no_alias},
  2999. {str_lit("c_vararg"), Token_Hash, FieldPrefix_c_vararg, FieldFlag_c_vararg},
  3000. {str_lit("const"), Token_Hash, FieldPrefix_const, FieldFlag_const},
  3001. {str_lit("any_int"), Token_Hash, FieldPrefix_any_int, FieldFlag_any_int},
  3002. };
  3003. FieldPrefixKind is_token_field_prefix(AstFile *f) {
  3004. switch (f->curr_token.kind) {
  3005. case Token_EOF:
  3006. return FieldPrefix_Invalid;
  3007. case Token_using:
  3008. return FieldPrefix_using;
  3009. case Token_auto_cast:
  3010. return FieldPrefix_auto_cast;
  3011. case Token_Hash:
  3012. advance_token(f);
  3013. switch (f->curr_token.kind) {
  3014. case Token_Ident:
  3015. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3016. auto const &mapping = parse_field_prefix_mappings[i];
  3017. if (mapping.token_kind == Token_Hash) {
  3018. if (f->curr_token.string == mapping.name) {
  3019. return mapping.prefix;
  3020. }
  3021. }
  3022. }
  3023. break;
  3024. }
  3025. return FieldPrefix_Unknown;
  3026. }
  3027. return FieldPrefix_Invalid;
  3028. }
  3029. u32 parse_field_prefixes(AstFile *f) {
  3030. i32 counts[gb_count_of(parse_field_prefix_mappings)] = {};
  3031. for (;;) {
  3032. FieldPrefixKind kind = is_token_field_prefix(f);
  3033. if (kind == FieldPrefix_Invalid) {
  3034. break;
  3035. }
  3036. if (kind == FieldPrefix_Unknown) {
  3037. syntax_error(f->curr_token, "Unknown prefix kind '#%.*s'", LIT(f->curr_token.string));
  3038. advance_token(f);
  3039. continue;
  3040. }
  3041. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3042. if (parse_field_prefix_mappings[i].prefix == kind) {
  3043. counts[i] += 1;
  3044. advance_token(f);
  3045. break;
  3046. }
  3047. }
  3048. }
  3049. u32 field_flags = 0;
  3050. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3051. if (counts[i] > 0) {
  3052. field_flags |= parse_field_prefix_mappings[i].flag;
  3053. if (counts[i] != 1) {
  3054. auto const &mapping = parse_field_prefix_mappings[i];
  3055. String name = mapping.name;
  3056. char const *prefix = "";
  3057. if (mapping.token_kind == Token_Hash) {
  3058. prefix = "#";
  3059. }
  3060. syntax_error(f->curr_token, "Multiple '%s%.*s' in this field list", prefix, LIT(name));
  3061. }
  3062. }
  3063. }
  3064. return field_flags;
  3065. }
  3066. u32 check_field_prefixes(AstFile *f, isize name_count, u32 allowed_flags, u32 set_flags) {
  3067. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3068. bool err = false;
  3069. auto const &m = parse_field_prefix_mappings[i];
  3070. if ((set_flags & m.flag) != 0) {
  3071. if (m.flag == FieldFlag_using && name_count > 1) {
  3072. err = true;
  3073. syntax_error(f->curr_token, "Cannot apply 'using' to more than one of the same type");
  3074. }
  3075. if ((allowed_flags & m.flag) == 0) {
  3076. err = true;
  3077. char const *prefix = "";
  3078. if (m.token_kind == Token_Hash) {
  3079. prefix = "#";
  3080. }
  3081. syntax_error(f->curr_token, "'%s%.*s' in not allowed within this field list", prefix, LIT(m.name));
  3082. }
  3083. }
  3084. if (err) {
  3085. set_flags &= ~m.flag;
  3086. }
  3087. }
  3088. return set_flags;
  3089. }
  3090. struct AstAndFlags {
  3091. Ast *node;
  3092. u32 flags;
  3093. };
  3094. Array<Ast *> convert_to_ident_list(AstFile *f, Array<AstAndFlags> list, bool ignore_flags, bool allow_poly_names) {
  3095. auto idents = array_make<Ast *>(heap_allocator(), 0, list.count);
  3096. // Convert to ident list
  3097. for_array(i, list) {
  3098. Ast *ident = list[i].node;
  3099. if (!ignore_flags) {
  3100. if (i != 0) {
  3101. syntax_error(ident, "Illegal use of prefixes in parameter list");
  3102. }
  3103. }
  3104. switch (ident->kind) {
  3105. case Ast_Ident:
  3106. case Ast_BadExpr:
  3107. break;
  3108. case Ast_PolyType:
  3109. if (allow_poly_names) {
  3110. if (ident->PolyType.specialization == nullptr) {
  3111. break;
  3112. } else {
  3113. syntax_error(ident, "Expected a polymorphic identifier without any specialization");
  3114. }
  3115. } else {
  3116. syntax_error(ident, "Expected a non-polymorphic identifier");
  3117. }
  3118. /*fallthrough*/
  3119. default:
  3120. syntax_error(ident, "Expected an identifier");
  3121. ident = ast_ident(f, blank_token);
  3122. break;
  3123. }
  3124. array_add(&idents, ident);
  3125. }
  3126. return idents;
  3127. }
  3128. bool parse_expect_field_separator(AstFile *f, Ast *param) {
  3129. Token token = f->curr_token;
  3130. if (allow_token(f, Token_Comma)) {
  3131. return true;
  3132. }
  3133. if (token.kind == Token_Semicolon) {
  3134. String p = token_to_string(token);
  3135. syntax_error(token_end_of_line(f, f->prev_token), "Expected a comma, got a %.*s", LIT(p));
  3136. advance_token(f);
  3137. return true;
  3138. }
  3139. return false;
  3140. }
  3141. Ast *parse_struct_field_list(AstFile *f, isize *name_count_) {
  3142. CommentGroup *docs = f->lead_comment;
  3143. Token start_token = f->curr_token;
  3144. auto decls = array_make<Ast *>(heap_allocator());
  3145. isize total_name_count = 0;
  3146. Ast *params = parse_field_list(f, &total_name_count, FieldFlag_Struct, Token_CloseBrace, false, false);
  3147. if (name_count_) *name_count_ = total_name_count;
  3148. return params;
  3149. }
  3150. // Returns true if any are polymorphic names
  3151. bool check_procedure_name_list(Array<Ast *> const &names) {
  3152. if (names.count == 0) {
  3153. return false;
  3154. }
  3155. bool first_is_polymorphic = names[0]->kind == Ast_PolyType;
  3156. bool any_polymorphic_names = first_is_polymorphic;
  3157. for (isize i = 1; i < names.count; i++) {
  3158. Ast *name = names[i];
  3159. if (first_is_polymorphic) {
  3160. if (name->kind == Ast_PolyType) {
  3161. any_polymorphic_names = true;
  3162. } else {
  3163. syntax_error(name, "Mixture of polymorphic and non-polymorphic identifiers");
  3164. return any_polymorphic_names;
  3165. }
  3166. } else {
  3167. if (name->kind == Ast_PolyType) {
  3168. any_polymorphic_names = true;
  3169. syntax_error(name, "Mixture of polymorphic and non-polymorphic identifiers");
  3170. return any_polymorphic_names;
  3171. } else {
  3172. // Okay
  3173. }
  3174. }
  3175. }
  3176. return any_polymorphic_names;
  3177. }
  3178. Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKind follow, bool allow_default_parameters, bool allow_typeid_token) {
  3179. Token start_token = f->curr_token;
  3180. CommentGroup *docs = f->lead_comment;
  3181. auto params = array_make<Ast *>(heap_allocator());
  3182. auto list = array_make<AstAndFlags>(heap_allocator());
  3183. defer (array_free(&list));
  3184. bool allow_poly_names = allow_typeid_token;
  3185. isize total_name_count = 0;
  3186. bool allow_ellipsis = allowed_flags&FieldFlag_ellipsis;
  3187. bool seen_ellipsis = false;
  3188. bool is_signature = (allowed_flags & FieldFlag_Signature) == FieldFlag_Signature;
  3189. while (f->curr_token.kind != follow &&
  3190. f->curr_token.kind != Token_Colon &&
  3191. f->curr_token.kind != Token_EOF) {
  3192. u32 flags = parse_field_prefixes(f);
  3193. Ast *param = parse_var_type(f, allow_ellipsis, allow_typeid_token);
  3194. if (param->kind == Ast_Ellipsis) {
  3195. if (seen_ellipsis) syntax_error(param, "Extra variadic parameter after ellipsis");
  3196. seen_ellipsis = true;
  3197. } else if (seen_ellipsis) {
  3198. syntax_error(param, "Extra parameter after ellipsis");
  3199. }
  3200. AstAndFlags naf = {param, flags};
  3201. array_add(&list, naf);
  3202. if (!allow_token(f, Token_Comma)) {
  3203. break;
  3204. }
  3205. }
  3206. if (f->curr_token.kind == Token_Colon) {
  3207. Array<Ast *> names = convert_to_ident_list(f, list, true, allow_poly_names); // Copy for semantic reasons
  3208. if (names.count == 0) {
  3209. syntax_error(f->curr_token, "Empty field declaration");
  3210. }
  3211. bool any_polymorphic_names = check_procedure_name_list(names);
  3212. u32 set_flags = 0;
  3213. if (list.count > 0) {
  3214. set_flags = list[0].flags;
  3215. }
  3216. set_flags = check_field_prefixes(f, names.count, allowed_flags, set_flags);
  3217. total_name_count += names.count;
  3218. Ast *type = nullptr;
  3219. Ast *default_value = nullptr;
  3220. Token tag = {};
  3221. expect_token_after(f, Token_Colon, "field list");
  3222. if (f->curr_token.kind != Token_Eq) {
  3223. type = parse_var_type(f, allow_ellipsis, allow_typeid_token);
  3224. Ast *tt = unparen_expr(type);
  3225. if (tt == nullptr) {
  3226. syntax_error(f->prev_token, "Invalid type expression in field list");
  3227. } else if (is_signature && !any_polymorphic_names && tt->kind == Ast_TypeidType && tt->TypeidType.specialization != nullptr) {
  3228. syntax_error(type, "Specialization of typeid is not allowed without polymorphic names");
  3229. }
  3230. }
  3231. if (allow_token(f, Token_Eq)) {
  3232. default_value = parse_expr(f, false);
  3233. if (!allow_default_parameters) {
  3234. syntax_error(f->curr_token, "Default parameters are only allowed for procedures");
  3235. default_value = nullptr;
  3236. }
  3237. }
  3238. if (default_value != nullptr && names.count > 1) {
  3239. syntax_error(f->curr_token, "Default parameters can only be applied to single values");
  3240. }
  3241. if (allowed_flags == FieldFlag_Struct && default_value != nullptr) {
  3242. syntax_error(default_value, "Default parameters are not allowed for structs");
  3243. default_value = nullptr;
  3244. }
  3245. if (type != nullptr && type->kind == Ast_Ellipsis) {
  3246. if (seen_ellipsis) syntax_error(type, "Extra variadic parameter after ellipsis");
  3247. seen_ellipsis = true;
  3248. if (names.count != 1) {
  3249. syntax_error(type, "Variadic parameters can only have one field name");
  3250. }
  3251. } else if (seen_ellipsis && default_value == nullptr) {
  3252. syntax_error(f->curr_token, "Extra parameter after ellipsis without a default value");
  3253. }
  3254. if (type != nullptr && default_value == nullptr) {
  3255. if (f->curr_token.kind == Token_String) {
  3256. tag = expect_token(f, Token_String);
  3257. if ((allowed_flags & FieldFlag_Tags) == 0) {
  3258. syntax_error(tag, "Field tags are only allowed within structures");
  3259. }
  3260. }
  3261. }
  3262. parse_expect_field_separator(f, type);
  3263. Ast *param = ast_field(f, names, type, default_value, set_flags, tag, docs, f->line_comment);
  3264. array_add(&params, param);
  3265. while (f->curr_token.kind != follow &&
  3266. f->curr_token.kind != Token_EOF) {
  3267. CommentGroup *docs = f->lead_comment;
  3268. u32 set_flags = parse_field_prefixes(f);
  3269. Token tag = {};
  3270. Array<Ast *> names = parse_ident_list(f, allow_poly_names);
  3271. if (names.count == 0) {
  3272. syntax_error(f->curr_token, "Empty field declaration");
  3273. break;
  3274. }
  3275. bool any_polymorphic_names = check_procedure_name_list(names);
  3276. set_flags = check_field_prefixes(f, names.count, allowed_flags, set_flags);
  3277. total_name_count += names.count;
  3278. Ast *type = nullptr;
  3279. Ast *default_value = nullptr;
  3280. expect_token_after(f, Token_Colon, "field list");
  3281. if (f->curr_token.kind != Token_Eq) {
  3282. type = parse_var_type(f, allow_ellipsis, allow_typeid_token);
  3283. Ast *tt = unparen_expr(type);
  3284. if (is_signature && !any_polymorphic_names && tt->kind == Ast_TypeidType && tt->TypeidType.specialization != nullptr) {
  3285. syntax_error(type, "Specialization of typeid is not allowed without polymorphic names");
  3286. }
  3287. }
  3288. if (allow_token(f, Token_Eq)) {
  3289. default_value = parse_expr(f, false);
  3290. if (!allow_default_parameters) {
  3291. syntax_error(f->curr_token, "Default parameters are only allowed for procedures");
  3292. default_value = nullptr;
  3293. }
  3294. }
  3295. if (default_value != nullptr && names.count > 1) {
  3296. syntax_error(f->curr_token, "Default parameters can only be applied to single values");
  3297. }
  3298. if (type != nullptr && type->kind == Ast_Ellipsis) {
  3299. if (seen_ellipsis) syntax_error(type, "Extra variadic parameter after ellipsis");
  3300. seen_ellipsis = true;
  3301. if (names.count != 1) {
  3302. syntax_error(type, "Variadic parameters can only have one field name");
  3303. }
  3304. } else if (seen_ellipsis && default_value == nullptr) {
  3305. syntax_error(f->curr_token, "Extra parameter after ellipsis without a default value");
  3306. }
  3307. if (type != nullptr && default_value == nullptr) {
  3308. if (f->curr_token.kind == Token_String) {
  3309. tag = expect_token(f, Token_String);
  3310. if ((allowed_flags & FieldFlag_Tags) == 0) {
  3311. syntax_error(tag, "Field tags are only allowed within structures");
  3312. }
  3313. }
  3314. }
  3315. bool ok = parse_expect_field_separator(f, param);
  3316. Ast *param = ast_field(f, names, type, default_value, set_flags, tag, docs, f->line_comment);
  3317. array_add(&params, param);
  3318. if (!ok) {
  3319. break;
  3320. }
  3321. }
  3322. if (name_count_) *name_count_ = total_name_count;
  3323. return ast_field_list(f, start_token, params);
  3324. }
  3325. for_array(i, list) {
  3326. Ast *type = list[i].node;
  3327. Token token = blank_token;
  3328. if (allowed_flags&FieldFlag_Results) {
  3329. // NOTE(bill): Make this nothing and not `_`
  3330. token.string = str_lit("");
  3331. }
  3332. auto names = array_make<Ast *>(heap_allocator(), 1);
  3333. token.pos = ast_token(type).pos;
  3334. names[0] = ast_ident(f, token);
  3335. u32 flags = check_field_prefixes(f, list.count, allowed_flags, list[i].flags);
  3336. Token tag = {};
  3337. Ast *param = ast_field(f, names, list[i].node, nullptr, flags, tag, docs, f->line_comment);
  3338. array_add(&params, param);
  3339. }
  3340. if (name_count_) *name_count_ = total_name_count;
  3341. return ast_field_list(f, start_token, params);
  3342. }
  3343. Ast *parse_type_or_ident(AstFile *f) {
  3344. bool prev_allow_type = f->allow_type;
  3345. isize prev_expr_level = f->expr_level;
  3346. defer ({
  3347. f->allow_type = prev_allow_type;
  3348. f->expr_level = prev_expr_level;
  3349. });
  3350. f->allow_type = true;
  3351. f->expr_level = -1;
  3352. bool lhs = true;
  3353. Ast *operand = parse_operand(f, lhs);
  3354. Ast *type = parse_atom_expr(f, operand, lhs);
  3355. return type;
  3356. }
  3357. Ast *parse_body(AstFile *f) {
  3358. Array<Ast *> stmts = {};
  3359. Token open, close;
  3360. isize prev_expr_level = f->expr_level;
  3361. // NOTE(bill): The body may be within an expression so reset to zero
  3362. f->expr_level = 0;
  3363. open = expect_token(f, Token_OpenBrace);
  3364. stmts = parse_stmt_list(f);
  3365. close = expect_token(f, Token_CloseBrace);
  3366. f->expr_level = prev_expr_level;
  3367. return ast_block_stmt(f, stmts, open, close);
  3368. }
  3369. bool parse_control_statement_semicolon_separator(AstFile *f) {
  3370. Token tok = peek_token(f);
  3371. if (tok.kind != Token_OpenBrace) {
  3372. return allow_token(f, Token_Semicolon);
  3373. }
  3374. if (f->curr_token.string == ";") {
  3375. return allow_token(f, Token_Semicolon);
  3376. }
  3377. return false;
  3378. }
  3379. Ast *parse_if_stmt(AstFile *f) {
  3380. if (f->curr_proc == nullptr) {
  3381. syntax_error(f->curr_token, "You cannot use an if statement in the file scope");
  3382. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3383. }
  3384. Token token = expect_token(f, Token_if);
  3385. Ast *init = nullptr;
  3386. Ast *cond = nullptr;
  3387. Ast *body = nullptr;
  3388. Ast *else_stmt = nullptr;
  3389. isize prev_level = f->expr_level;
  3390. f->expr_level = -1;
  3391. bool prev_allow_in_expr = f->allow_in_expr;
  3392. f->allow_in_expr = true;
  3393. if (allow_token(f, Token_Semicolon)) {
  3394. cond = parse_expr(f, false);
  3395. } else {
  3396. init = parse_simple_stmt(f, StmtAllowFlag_None);
  3397. if (parse_control_statement_semicolon_separator(f)) {
  3398. cond = parse_expr(f, false);
  3399. } else {
  3400. cond = convert_stmt_to_expr(f, init, str_lit("boolean expression"));
  3401. init = nullptr;
  3402. }
  3403. }
  3404. f->expr_level = prev_level;
  3405. f->allow_in_expr = prev_allow_in_expr;
  3406. if (cond == nullptr) {
  3407. syntax_error(f->curr_token, "Expected condition for if statement");
  3408. }
  3409. if (allow_token(f, Token_do)) {
  3410. body = convert_stmt_to_body(f, parse_stmt(f));
  3411. if (build_context.disallow_do) {
  3412. syntax_error(body, "'do' has been disallowed");
  3413. } else if (!ast_on_same_line(cond, body)) {
  3414. syntax_error(body, "The body of a 'do' be on the same line as if condition");
  3415. }
  3416. } else {
  3417. body = parse_block_stmt(f, false);
  3418. }
  3419. skip_possible_newline_for_literal(f);
  3420. if (f->curr_token.kind == Token_else) {
  3421. Token else_token = expect_token(f, Token_else);
  3422. switch (f->curr_token.kind) {
  3423. case Token_if:
  3424. else_stmt = parse_if_stmt(f);
  3425. break;
  3426. case Token_OpenBrace:
  3427. else_stmt = parse_block_stmt(f, false);
  3428. break;
  3429. case Token_do: {
  3430. expect_token(f, Token_do);
  3431. else_stmt = convert_stmt_to_body(f, parse_stmt(f));
  3432. if (build_context.disallow_do) {
  3433. syntax_error(else_stmt, "'do' has been disallowed");
  3434. } else if (!ast_on_same_line(else_token, else_stmt)) {
  3435. syntax_error(else_stmt, "The body of a 'do' be on the same line as 'else'");
  3436. }
  3437. } break;
  3438. default:
  3439. syntax_error(f->curr_token, "Expected if statement block statement");
  3440. else_stmt = ast_bad_stmt(f, f->curr_token, f->tokens[f->curr_token_index+1]);
  3441. break;
  3442. }
  3443. }
  3444. return ast_if_stmt(f, token, init, cond, body, else_stmt);
  3445. }
  3446. Ast *parse_when_stmt(AstFile *f) {
  3447. Token token = expect_token(f, Token_when);
  3448. Ast *cond = nullptr;
  3449. Ast *body = nullptr;
  3450. Ast *else_stmt = nullptr;
  3451. isize prev_level = f->expr_level;
  3452. f->expr_level = -1;
  3453. cond = parse_expr(f, false);
  3454. f->expr_level = prev_level;
  3455. if (cond == nullptr) {
  3456. syntax_error(f->curr_token, "Expected condition for when statement");
  3457. }
  3458. if (allow_token(f, Token_do)) {
  3459. body = convert_stmt_to_body(f, parse_stmt(f));
  3460. if (build_context.disallow_do) {
  3461. syntax_error(body, "'do' has been disallowed");
  3462. } else if (!ast_on_same_line(cond, body)) {
  3463. syntax_error(body, "The body of a 'do' be on the same line as when statement");
  3464. }
  3465. } else {
  3466. body = parse_block_stmt(f, true);
  3467. }
  3468. skip_possible_newline_for_literal(f);
  3469. if (f->curr_token.kind == Token_else) {
  3470. Token else_token = expect_token(f, Token_else);
  3471. switch (f->curr_token.kind) {
  3472. case Token_when:
  3473. else_stmt = parse_when_stmt(f);
  3474. break;
  3475. case Token_OpenBrace:
  3476. else_stmt = parse_block_stmt(f, true);
  3477. break;
  3478. case Token_do: {
  3479. expect_token(f, Token_do);
  3480. else_stmt = convert_stmt_to_body(f, parse_stmt(f));
  3481. if (build_context.disallow_do) {
  3482. syntax_error(else_stmt, "'do' has been disallowed");
  3483. } else if (!ast_on_same_line(else_token, else_stmt)) {
  3484. syntax_error(else_stmt, "The body of a 'do' be on the same line as 'else'");
  3485. }
  3486. } break;
  3487. default:
  3488. syntax_error(f->curr_token, "Expected when statement block statement");
  3489. else_stmt = ast_bad_stmt(f, f->curr_token, f->tokens[f->curr_token_index+1]);
  3490. break;
  3491. }
  3492. }
  3493. return ast_when_stmt(f, token, cond, body, else_stmt);
  3494. }
  3495. Ast *parse_return_stmt(AstFile *f) {
  3496. Token token = expect_token(f, Token_return);
  3497. if (f->curr_proc == nullptr) {
  3498. syntax_error(f->curr_token, "You cannot use a return statement in the file scope");
  3499. return ast_bad_stmt(f, token, f->curr_token);
  3500. }
  3501. if (f->expr_level > 0) {
  3502. syntax_error(f->curr_token, "You cannot use a return statement within an expression");
  3503. return ast_bad_stmt(f, token, f->curr_token);
  3504. }
  3505. auto results = array_make<Ast *>(heap_allocator());
  3506. while (f->curr_token.kind != Token_Semicolon && f->curr_token.kind != Token_CloseBrace) {
  3507. Ast *arg = parse_expr(f, false);
  3508. array_add(&results, arg);
  3509. if (f->curr_token.kind != Token_Comma ||
  3510. f->curr_token.kind == Token_EOF) {
  3511. break;
  3512. }
  3513. advance_token(f);
  3514. }
  3515. Ast *end = nullptr;
  3516. if (results.count > 0) {
  3517. end = results[results.count-1];
  3518. }
  3519. expect_semicolon(f, end);
  3520. return ast_return_stmt(f, token, results);
  3521. }
  3522. Ast *parse_for_stmt(AstFile *f) {
  3523. if (f->curr_proc == nullptr) {
  3524. syntax_error(f->curr_token, "You cannot use a for statement in the file scope");
  3525. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3526. }
  3527. Token token = expect_token(f, Token_for);
  3528. Ast *init = nullptr;
  3529. Ast *cond = nullptr;
  3530. Ast *post = nullptr;
  3531. Ast *body = nullptr;
  3532. bool is_range = false;
  3533. if (f->curr_token.kind != Token_OpenBrace &&
  3534. f->curr_token.kind != Token_do) {
  3535. isize prev_level = f->expr_level;
  3536. defer (f->expr_level = prev_level);
  3537. f->expr_level = -1;
  3538. if (f->curr_token.kind == Token_in) {
  3539. Token in_token = expect_token(f, Token_in);
  3540. Ast *rhs = nullptr;
  3541. bool prev_allow_range = f->allow_range;
  3542. f->allow_range = true;
  3543. rhs = parse_expr(f, false);
  3544. f->allow_range = prev_allow_range;
  3545. if (allow_token(f, Token_do)) {
  3546. body = convert_stmt_to_body(f, parse_stmt(f));
  3547. if (build_context.disallow_do) {
  3548. syntax_error(body, "'do' has been disallowed");
  3549. } else if (!ast_on_same_line(token, body)) {
  3550. syntax_error(body, "The body of a 'do' be on the same line as the 'for' token");
  3551. }
  3552. } else {
  3553. body = parse_block_stmt(f, false);
  3554. }
  3555. return ast_range_stmt(f, token, {}, in_token, rhs, body);
  3556. }
  3557. if (f->curr_token.kind != Token_Semicolon) {
  3558. cond = parse_simple_stmt(f, StmtAllowFlag_In);
  3559. if (cond->kind == Ast_AssignStmt && cond->AssignStmt.op.kind == Token_in) {
  3560. is_range = true;
  3561. }
  3562. }
  3563. if (!is_range && parse_control_statement_semicolon_separator(f)) {
  3564. init = cond;
  3565. cond = nullptr;
  3566. if (f->curr_token.kind != Token_Semicolon) {
  3567. cond = parse_simple_stmt(f, StmtAllowFlag_None);
  3568. }
  3569. expect_semicolon(f, cond);
  3570. if (f->curr_token.kind != Token_OpenBrace &&
  3571. f->curr_token.kind != Token_do) {
  3572. post = parse_simple_stmt(f, StmtAllowFlag_None);
  3573. }
  3574. }
  3575. }
  3576. if (allow_token(f, Token_do)) {
  3577. body = convert_stmt_to_body(f, parse_stmt(f));
  3578. if (build_context.disallow_do) {
  3579. syntax_error(body, "'do' has been disallowed");
  3580. } else if (!ast_on_same_line(token, body)) {
  3581. syntax_error(body, "The body of a 'do' be on the same line as the 'for' token");
  3582. }
  3583. } else {
  3584. body = parse_block_stmt(f, false);
  3585. }
  3586. if (is_range) {
  3587. GB_ASSERT(cond->kind == Ast_AssignStmt);
  3588. Token in_token = cond->AssignStmt.op;
  3589. Slice<Ast *> vals = cond->AssignStmt.lhs;
  3590. Ast *rhs = nullptr;
  3591. if (cond->AssignStmt.rhs.count > 0) {
  3592. rhs = cond->AssignStmt.rhs[0];
  3593. }
  3594. return ast_range_stmt(f, token, vals, in_token, rhs, body);
  3595. }
  3596. cond = convert_stmt_to_expr(f, cond, str_lit("boolean expression"));
  3597. return ast_for_stmt(f, token, init, cond, post, body);
  3598. }
  3599. Ast *parse_case_clause(AstFile *f, bool is_type) {
  3600. Token token = f->curr_token;
  3601. Array<Ast *> list = {};
  3602. expect_token(f, Token_case);
  3603. bool prev_allow_range = f->allow_range;
  3604. bool prev_allow_in_expr = f->allow_in_expr;
  3605. f->allow_range = !is_type;
  3606. f->allow_in_expr = !is_type;
  3607. if (f->curr_token.kind != Token_Colon) {
  3608. list = parse_rhs_expr_list(f);
  3609. }
  3610. f->allow_range = prev_allow_range;
  3611. f->allow_in_expr = prev_allow_in_expr;
  3612. expect_token(f, Token_Colon);
  3613. Array<Ast *> stmts = parse_stmt_list(f);
  3614. return ast_case_clause(f, token, list, stmts);
  3615. }
  3616. Ast *parse_switch_stmt(AstFile *f) {
  3617. if (f->curr_proc == nullptr) {
  3618. syntax_error(f->curr_token, "You cannot use a switch statement in the file scope");
  3619. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3620. }
  3621. Token token = expect_token(f, Token_switch);
  3622. Ast *init = nullptr;
  3623. Ast *tag = nullptr;
  3624. Ast *body = nullptr;
  3625. Token open, close;
  3626. bool is_type_switch = false;
  3627. auto list = array_make<Ast *>(heap_allocator());
  3628. if (f->curr_token.kind != Token_OpenBrace) {
  3629. isize prev_level = f->expr_level;
  3630. f->expr_level = -1;
  3631. defer (f->expr_level = prev_level);
  3632. if (allow_token(f, Token_in)) {
  3633. auto lhs = array_make<Ast *>(heap_allocator(), 0, 1);
  3634. auto rhs = array_make<Ast *>(heap_allocator(), 0, 1);
  3635. Token blank_ident = token;
  3636. blank_ident.kind = Token_Ident;
  3637. blank_ident.string = str_lit("_");
  3638. Ast *blank = ast_ident(f, blank_ident);
  3639. array_add(&lhs, blank);
  3640. array_add(&rhs, parse_expr(f, true));
  3641. tag = ast_assign_stmt(f, token, lhs, rhs);
  3642. is_type_switch = true;
  3643. } else {
  3644. tag = parse_simple_stmt(f, StmtAllowFlag_In);
  3645. if (tag->kind == Ast_AssignStmt && tag->AssignStmt.op.kind == Token_in) {
  3646. is_type_switch = true;
  3647. } else if (parse_control_statement_semicolon_separator(f)) {
  3648. init = tag;
  3649. tag = nullptr;
  3650. if (f->curr_token.kind != Token_OpenBrace) {
  3651. tag = parse_simple_stmt(f, StmtAllowFlag_None);
  3652. }
  3653. }
  3654. }
  3655. }
  3656. skip_possible_newline(f);
  3657. open = expect_token(f, Token_OpenBrace);
  3658. while (f->curr_token.kind == Token_case) {
  3659. array_add(&list, parse_case_clause(f, is_type_switch));
  3660. }
  3661. close = expect_token(f, Token_CloseBrace);
  3662. body = ast_block_stmt(f, list, open, close);
  3663. if (is_type_switch) {
  3664. return ast_type_switch_stmt(f, token, tag, body);
  3665. }
  3666. tag = convert_stmt_to_expr(f, tag, str_lit("switch expression"));
  3667. return ast_switch_stmt(f, token, init, tag, body);
  3668. }
  3669. Ast *parse_defer_stmt(AstFile *f) {
  3670. if (f->curr_proc == nullptr) {
  3671. syntax_error(f->curr_token, "You cannot use a defer statement in the file scope");
  3672. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3673. }
  3674. Token token = expect_token(f, Token_defer);
  3675. Ast *stmt = parse_stmt(f);
  3676. switch (stmt->kind) {
  3677. case Ast_EmptyStmt:
  3678. syntax_error(token, "Empty statement after defer (e.g. ';')");
  3679. break;
  3680. case Ast_DeferStmt:
  3681. syntax_error(token, "You cannot defer a defer statement");
  3682. stmt = stmt->DeferStmt.stmt;
  3683. break;
  3684. case Ast_ReturnStmt:
  3685. syntax_error(token, "You cannot defer a return statement");
  3686. break;
  3687. }
  3688. return ast_defer_stmt(f, token, stmt);
  3689. }
  3690. enum ImportDeclKind {
  3691. ImportDecl_Standard,
  3692. ImportDecl_Using,
  3693. };
  3694. Ast *parse_import_decl(AstFile *f, ImportDeclKind kind) {
  3695. CommentGroup *docs = f->lead_comment;
  3696. Token token = expect_token(f, Token_import);
  3697. Token import_name = {};
  3698. bool is_using = kind != ImportDecl_Standard;
  3699. switch (f->curr_token.kind) {
  3700. case Token_Ident:
  3701. import_name = advance_token(f);
  3702. break;
  3703. default:
  3704. import_name.pos = f->curr_token.pos;
  3705. break;
  3706. }
  3707. if (!is_using && is_blank_ident(import_name)) {
  3708. syntax_error(import_name, "Illegal import name: '_'");
  3709. }
  3710. Token file_path = expect_token_after(f, Token_String, "import");
  3711. Ast *s = nullptr;
  3712. if (f->curr_proc != nullptr) {
  3713. syntax_error(import_name, "You cannot use 'import' within a procedure. This must be done at the file scope");
  3714. s = ast_bad_decl(f, import_name, file_path);
  3715. } else {
  3716. s = ast_import_decl(f, token, is_using, file_path, import_name, docs, f->line_comment);
  3717. array_add(&f->imports, s);
  3718. }
  3719. if (is_using) {
  3720. syntax_error(import_name, "'using import' is not allowed, please use the import name explicitly");
  3721. }
  3722. expect_semicolon(f, s);
  3723. return s;
  3724. }
  3725. Ast *parse_foreign_decl(AstFile *f) {
  3726. CommentGroup *docs = f->lead_comment;
  3727. Token token = expect_token(f, Token_foreign);
  3728. switch (f->curr_token.kind) {
  3729. case Token_Ident:
  3730. case Token_OpenBrace:
  3731. return parse_foreign_block(f, token);
  3732. case Token_import: {
  3733. Token import_token = expect_token(f, Token_import);
  3734. Token lib_name = {};
  3735. switch (f->curr_token.kind) {
  3736. case Token_Ident:
  3737. lib_name = advance_token(f);
  3738. break;
  3739. default:
  3740. lib_name.pos = token.pos;
  3741. break;
  3742. }
  3743. if (is_blank_ident(lib_name)) {
  3744. syntax_error(lib_name, "Illegal foreign import name: '_'");
  3745. }
  3746. Array<Token> filepaths = {};
  3747. if (allow_token(f, Token_OpenBrace)) {
  3748. array_init(&filepaths, heap_allocator());
  3749. while (f->curr_token.kind != Token_CloseBrace &&
  3750. f->curr_token.kind != Token_EOF) {
  3751. Token path = expect_token(f, Token_String);
  3752. array_add(&filepaths, path);
  3753. if (!allow_token(f, Token_Comma)) {
  3754. break;
  3755. }
  3756. }
  3757. expect_token(f, Token_CloseBrace);
  3758. } else {
  3759. filepaths = array_make<Token>(heap_allocator(), 0, 1);
  3760. Token path = expect_token(f, Token_String);
  3761. array_add(&filepaths, path);
  3762. }
  3763. Ast *s = nullptr;
  3764. if (filepaths.count == 0) {
  3765. syntax_error(lib_name, "foreign import without any paths");
  3766. s = ast_bad_decl(f, lib_name, f->curr_token);
  3767. } else if (f->curr_proc != nullptr) {
  3768. syntax_error(lib_name, "You cannot use foreign import within a procedure. This must be done at the file scope");
  3769. s = ast_bad_decl(f, lib_name, filepaths[0]);
  3770. } else {
  3771. s = ast_foreign_import_decl(f, token, filepaths, lib_name, docs, f->line_comment);
  3772. }
  3773. expect_semicolon(f, s);
  3774. return s;
  3775. }
  3776. }
  3777. syntax_error(token, "Invalid foreign declaration");
  3778. return ast_bad_decl(f, token, f->curr_token);
  3779. }
  3780. Ast *parse_attribute(AstFile *f, Token token, TokenKind open_kind, TokenKind close_kind) {
  3781. Array<Ast *> elems = {};
  3782. Token open = {};
  3783. Token close = {};
  3784. if (f->curr_token.kind == Token_Ident) {
  3785. elems = array_make<Ast *>(heap_allocator(), 0, 1);
  3786. Ast *elem = parse_ident(f);
  3787. array_add(&elems, elem);
  3788. } else {
  3789. open = expect_token(f, open_kind);
  3790. f->expr_level++;
  3791. if (f->curr_token.kind != close_kind) {
  3792. elems = array_make<Ast *>(heap_allocator());
  3793. while (f->curr_token.kind != close_kind &&
  3794. f->curr_token.kind != Token_EOF) {
  3795. Ast *elem = nullptr;
  3796. elem = parse_ident(f);
  3797. if (f->curr_token.kind == Token_Eq) {
  3798. Token eq = expect_token(f, Token_Eq);
  3799. Ast *value = parse_value(f);
  3800. elem = ast_field_value(f, elem, value, eq);
  3801. }
  3802. array_add(&elems, elem);
  3803. if (!allow_token(f, Token_Comma)) {
  3804. break;
  3805. }
  3806. }
  3807. }
  3808. f->expr_level--;
  3809. close = expect_closing(f, close_kind, str_lit("attribute"));
  3810. }
  3811. Ast *attribute = ast_attribute(f, token, open, close, elems);
  3812. skip_possible_newline(f);
  3813. Ast *decl = parse_stmt(f);
  3814. if (decl->kind == Ast_ValueDecl) {
  3815. array_add(&decl->ValueDecl.attributes, attribute);
  3816. } else if (decl->kind == Ast_ForeignBlockDecl) {
  3817. array_add(&decl->ForeignBlockDecl.attributes, attribute);
  3818. } else if (decl->kind == Ast_ForeignImportDecl) {
  3819. array_add(&decl->ForeignImportDecl.attributes, attribute);
  3820. }else {
  3821. syntax_error(decl, "Expected a value or foreign declaration after an attribute, got %.*s", LIT(ast_strings[decl->kind]));
  3822. return ast_bad_stmt(f, token, f->curr_token);
  3823. }
  3824. return decl;
  3825. }
  3826. Ast *parse_unrolled_for_loop(AstFile *f, Token unroll_token) {
  3827. if (unroll_token.kind == Token_inline) {
  3828. syntax_warning(unroll_token, "'inline for' is deprecated in favour of `#unroll for'");
  3829. }
  3830. Token for_token = expect_token(f, Token_for);
  3831. Ast *val0 = nullptr;
  3832. Ast *val1 = nullptr;
  3833. Token in_token = {};
  3834. Ast *expr = nullptr;
  3835. Ast *body = nullptr;
  3836. bool bad_stmt = false;
  3837. if (f->curr_token.kind != Token_in) {
  3838. Array<Ast *> idents = parse_ident_list(f, false);
  3839. switch (idents.count) {
  3840. case 1:
  3841. val0 = idents[0];
  3842. break;
  3843. case 2:
  3844. val0 = idents[0];
  3845. val1 = idents[1];
  3846. break;
  3847. default:
  3848. syntax_error(for_token, "Expected either 1 or 2 identifiers");
  3849. bad_stmt = true;
  3850. break;
  3851. }
  3852. }
  3853. in_token = expect_token(f, Token_in);
  3854. bool prev_allow_range = f->allow_range;
  3855. isize prev_level = f->expr_level;
  3856. f->allow_range = true;
  3857. f->expr_level = -1;
  3858. expr = parse_expr(f, false);
  3859. f->expr_level = prev_level;
  3860. f->allow_range = prev_allow_range;
  3861. if (allow_token(f, Token_do)) {
  3862. body = convert_stmt_to_body(f, parse_stmt(f));
  3863. if (build_context.disallow_do) {
  3864. syntax_error(body, "'do' has been disallowed");
  3865. } else if (!ast_on_same_line(for_token, body)) {
  3866. syntax_error(body, "The body of a 'do' be on the same line as the 'for' token");
  3867. }
  3868. } else {
  3869. body = parse_block_stmt(f, false);
  3870. }
  3871. if (bad_stmt) {
  3872. return ast_bad_stmt(f, unroll_token, f->curr_token);
  3873. }
  3874. return ast_unroll_range_stmt(f, unroll_token, for_token, val0, val1, in_token, expr, body);
  3875. }
  3876. Ast *parse_stmt(AstFile *f) {
  3877. Ast *s = nullptr;
  3878. Token token = f->curr_token;
  3879. switch (token.kind) {
  3880. // Operands
  3881. case Token_inline:
  3882. if (peek_token_kind(f, Token_for)) {
  3883. Token unroll_token = expect_token(f, Token_inline);
  3884. return parse_unrolled_for_loop(f, unroll_token);
  3885. }
  3886. /* fallthrough */
  3887. case Token_no_inline:
  3888. case Token_context: // Also allows for `context =`
  3889. case Token_proc:
  3890. case Token_Ident:
  3891. case Token_Integer:
  3892. case Token_Float:
  3893. case Token_Imag:
  3894. case Token_Rune:
  3895. case Token_String:
  3896. case Token_OpenParen:
  3897. case Token_Pointer:
  3898. case Token_asm: // Inline assembly
  3899. // Unary Operators
  3900. case Token_Add:
  3901. case Token_Sub:
  3902. case Token_Xor:
  3903. case Token_Not:
  3904. case Token_And:
  3905. s = parse_simple_stmt(f, StmtAllowFlag_Label);
  3906. expect_semicolon(f, s);
  3907. return s;
  3908. case Token_foreign:
  3909. return parse_foreign_decl(f);
  3910. case Token_import:
  3911. return parse_import_decl(f, ImportDecl_Standard);
  3912. case Token_if: return parse_if_stmt(f);
  3913. case Token_when: return parse_when_stmt(f);
  3914. case Token_for: return parse_for_stmt(f);
  3915. case Token_switch: return parse_switch_stmt(f);
  3916. case Token_defer: return parse_defer_stmt(f);
  3917. case Token_return: return parse_return_stmt(f);
  3918. case Token_break:
  3919. case Token_continue:
  3920. case Token_fallthrough: {
  3921. Token token = advance_token(f);
  3922. Ast *label = nullptr;
  3923. if (token.kind != Token_fallthrough &&
  3924. f->curr_token.kind == Token_Ident) {
  3925. label = parse_ident(f);
  3926. }
  3927. s = ast_branch_stmt(f, token, label);
  3928. expect_semicolon(f, s);
  3929. return s;
  3930. }
  3931. case Token_using: {
  3932. CommentGroup *docs = f->lead_comment;
  3933. Token token = expect_token(f, Token_using);
  3934. if (f->curr_token.kind == Token_import) {
  3935. return parse_import_decl(f, ImportDecl_Using);
  3936. }
  3937. Ast *decl = nullptr;
  3938. Array<Ast *> list = parse_lhs_expr_list(f);
  3939. if (list.count == 0) {
  3940. syntax_error(token, "Illegal use of 'using' statement");
  3941. expect_semicolon(f, nullptr);
  3942. return ast_bad_stmt(f, token, f->curr_token);
  3943. }
  3944. if (f->curr_token.kind != Token_Colon) {
  3945. expect_semicolon(f, list[list.count-1]);
  3946. return ast_using_stmt(f, token, list);
  3947. }
  3948. expect_token_after(f, Token_Colon, "identifier list");
  3949. decl = parse_value_decl(f, list, docs);
  3950. if (decl != nullptr && decl->kind == Ast_ValueDecl) {
  3951. decl->ValueDecl.is_using = true;
  3952. return decl;
  3953. }
  3954. syntax_error(token, "Illegal use of 'using' statement");
  3955. return ast_bad_stmt(f, token, f->curr_token);
  3956. } break;
  3957. case Token_At: {
  3958. Token token = expect_token(f, Token_At);
  3959. return parse_attribute(f, token, Token_OpenParen, Token_CloseParen);
  3960. }
  3961. case Token_Hash: {
  3962. Ast *s = nullptr;
  3963. Token hash_token = expect_token(f, Token_Hash);
  3964. Token name = expect_token(f, Token_Ident);
  3965. String tag = name.string;
  3966. if (tag == "bounds_check") {
  3967. s = parse_stmt(f);
  3968. return parse_check_directive_for_statement(s, name, StateFlag_bounds_check);
  3969. } else if (tag == "no_bounds_check") {
  3970. s = parse_stmt(f);
  3971. return parse_check_directive_for_statement(s, name, StateFlag_no_bounds_check);
  3972. } else if (tag == "partial") {
  3973. s = parse_stmt(f);
  3974. switch (s->kind) {
  3975. case Ast_SwitchStmt:
  3976. s->SwitchStmt.partial = true;
  3977. break;
  3978. case Ast_TypeSwitchStmt:
  3979. s->TypeSwitchStmt.partial = true;
  3980. break;
  3981. case Ast_EmptyStmt:
  3982. return parse_check_directive_for_statement(s, name, 0);
  3983. default:
  3984. syntax_error(token, "#partial can only be applied to a switch statement");
  3985. break;
  3986. }
  3987. return s;
  3988. } else if (tag == "assert") {
  3989. Ast *t = ast_basic_directive(f, hash_token, name);
  3990. return ast_expr_stmt(f, parse_call_expr(f, t));
  3991. } else if (tag == "panic") {
  3992. Ast *t = ast_basic_directive(f, hash_token, name);
  3993. return ast_expr_stmt(f, parse_call_expr(f, t));
  3994. } else if (name.string == "force_inline" ||
  3995. name.string == "force_no_inline") {
  3996. Ast *expr = parse_force_inlining_operand(f, name);
  3997. return ast_expr_stmt(f, expr);
  3998. } else if (tag == "unroll") {
  3999. return parse_unrolled_for_loop(f, name);
  4000. } else if (tag == "include") {
  4001. syntax_error(token, "#include is not a valid import declaration kind. Did you mean 'import'?");
  4002. s = ast_bad_stmt(f, token, f->curr_token);
  4003. } else {
  4004. syntax_error(token, "Unknown tag directive used: '%.*s'", LIT(tag));
  4005. s = ast_bad_stmt(f, token, f->curr_token);
  4006. }
  4007. fix_advance_to_next_stmt(f);
  4008. return s;
  4009. } break;
  4010. case Token_OpenBrace:
  4011. return parse_block_stmt(f, false);
  4012. case Token_Semicolon:
  4013. s = ast_empty_stmt(f, token);
  4014. advance_token(f);
  4015. return s;
  4016. }
  4017. // Error correction statements
  4018. switch (token.kind) {
  4019. case Token_else:
  4020. expect_token(f, Token_else);
  4021. syntax_error(token, "'else' unattached to an 'if' statement");
  4022. switch (f->curr_token.kind) {
  4023. case Token_if:
  4024. return parse_if_stmt(f);
  4025. case Token_when:
  4026. return parse_when_stmt(f);
  4027. case Token_OpenBrace:
  4028. return parse_block_stmt(f, true);
  4029. case Token_do: {
  4030. expect_token(f, Token_do);
  4031. Ast *stmt = convert_stmt_to_body(f, parse_stmt(f));
  4032. if (build_context.disallow_do) {
  4033. syntax_error(stmt, "'do' has been disallowed");
  4034. }
  4035. return stmt;
  4036. } break;
  4037. default:
  4038. fix_advance_to_next_stmt(f);
  4039. return ast_bad_stmt(f, token, f->curr_token);
  4040. }
  4041. }
  4042. syntax_error(token, "Expected a statement, got '%.*s'", LIT(token_strings[token.kind]));
  4043. fix_advance_to_next_stmt(f);
  4044. return ast_bad_stmt(f, token, f->curr_token);
  4045. }
  4046. Array<Ast *> parse_stmt_list(AstFile *f) {
  4047. auto list = array_make<Ast *>(heap_allocator());
  4048. while (f->curr_token.kind != Token_case &&
  4049. f->curr_token.kind != Token_CloseBrace &&
  4050. f->curr_token.kind != Token_EOF) {
  4051. Ast *stmt = parse_stmt(f);
  4052. if (stmt && stmt->kind != Ast_EmptyStmt) {
  4053. array_add(&list, stmt);
  4054. if (stmt->kind == Ast_ExprStmt &&
  4055. stmt->ExprStmt.expr != nullptr &&
  4056. stmt->ExprStmt.expr->kind == Ast_ProcLit) {
  4057. syntax_error(stmt, "Procedure literal evaluated but not used");
  4058. }
  4059. }
  4060. }
  4061. return list;
  4062. }
  4063. ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
  4064. GB_ASSERT(f != nullptr);
  4065. f->fullpath = string_trim_whitespace(fullpath); // Just in case
  4066. set_file_path_string(f->id, fullpath);
  4067. set_ast_file_from_id(f->id, f);
  4068. if (!string_ends_with(f->fullpath, str_lit(".odin"))) {
  4069. return ParseFile_WrongExtension;
  4070. }
  4071. TokenizerFlags tokenizer_flags = TokenizerFlag_InsertSemicolon;
  4072. zero_item(&f->tokenizer);
  4073. f->tokenizer.curr_file_id = f->id;
  4074. TokenizerInitError err = init_tokenizer_from_fullpath(&f->tokenizer, f->fullpath, tokenizer_flags);
  4075. if (err != TokenizerInit_None) {
  4076. switch (err) {
  4077. case TokenizerInit_Empty:
  4078. break;
  4079. case TokenizerInit_NotExists:
  4080. return ParseFile_NotFound;
  4081. case TokenizerInit_Permission:
  4082. return ParseFile_Permission;
  4083. case TokenizerInit_FileTooLarge:
  4084. return ParseFile_FileTooLarge;
  4085. default:
  4086. return ParseFile_InvalidFile;
  4087. }
  4088. }
  4089. isize file_size = f->tokenizer.end - f->tokenizer.start;
  4090. // NOTE(bill): Determine allocation size required for tokens
  4091. isize token_cap = file_size/3ll;
  4092. isize pow2_cap = gb_max(cast(isize)prev_pow2(cast(i64)token_cap)/2, 16);
  4093. token_cap = ((token_cap + pow2_cap-1)/pow2_cap) * pow2_cap;
  4094. isize init_token_cap = gb_max(token_cap, 16);
  4095. array_init(&f->tokens, heap_allocator(), 0, gb_max(init_token_cap, 16));
  4096. isize cap0 = f->tokens.capacity;
  4097. if (err == TokenizerInit_Empty) {
  4098. Token token = {Token_EOF};
  4099. token.pos.file_id = f->id;
  4100. token.pos.line = 1;
  4101. token.pos.column = 1;
  4102. array_add(&f->tokens, token);
  4103. return ParseFile_None;
  4104. }
  4105. u64 start = time_stamp_time_now();
  4106. for (;;) {
  4107. Token *token = array_add_and_get(&f->tokens);
  4108. tokenizer_get_token(&f->tokenizer, token);
  4109. if (token->kind == Token_Invalid) {
  4110. err_pos->line = token->pos.line;
  4111. err_pos->column = token->pos.column;
  4112. return ParseFile_InvalidToken;
  4113. }
  4114. if (token->kind == Token_EOF) {
  4115. break;
  4116. }
  4117. }
  4118. u64 end = time_stamp_time_now();
  4119. f->time_to_tokenize = cast(f64)(end-start)/cast(f64)time_stamp__freq();
  4120. f->curr_token_index = 0;
  4121. f->prev_token = f->tokens[f->curr_token_index];
  4122. f->curr_token = f->tokens[f->curr_token_index];
  4123. isize const page_size = 4*1024;
  4124. isize block_size = 2*f->tokens.count*gb_size_of(Ast);
  4125. block_size = ((block_size + page_size-1)/page_size) * page_size;
  4126. block_size = gb_clamp(block_size, page_size, ARENA_DEFAULT_BLOCK_SIZE);
  4127. arena_init(&f->arena, heap_allocator(), block_size);
  4128. array_init(&f->comments, heap_allocator(), 0, 0);
  4129. array_init(&f->imports, heap_allocator(), 0, 0);
  4130. f->curr_proc = nullptr;
  4131. return ParseFile_None;
  4132. }
  4133. void destroy_ast_file(AstFile *f) {
  4134. GB_ASSERT(f != nullptr);
  4135. array_free(&f->tokens);
  4136. array_free(&f->comments);
  4137. array_free(&f->imports);
  4138. gb_free(heap_allocator(), f->tokenizer.fullpath.text);
  4139. destroy_tokenizer(&f->tokenizer);
  4140. }
  4141. bool init_parser(Parser *p) {
  4142. GB_ASSERT(p != nullptr);
  4143. string_set_init(&p->imported_files, heap_allocator());
  4144. array_init(&p->packages, heap_allocator());
  4145. array_init(&p->package_imports, heap_allocator());
  4146. mutex_init(&p->import_mutex);
  4147. mutex_init(&p->file_add_mutex);
  4148. mutex_init(&p->file_decl_mutex);
  4149. mutex_init(&p->packages_mutex);
  4150. mpmc_init(&p->file_error_queue, heap_allocator(), 1024);
  4151. return true;
  4152. }
  4153. void destroy_parser(Parser *p) {
  4154. GB_ASSERT(p != nullptr);
  4155. // TODO(bill): Fix memory leak
  4156. for_array(i, p->packages) {
  4157. AstPackage *pkg = p->packages[i];
  4158. for_array(j, pkg->files) {
  4159. destroy_ast_file(pkg->files[j]);
  4160. }
  4161. array_free(&pkg->files);
  4162. array_free(&pkg->foreign_files);
  4163. }
  4164. #if 0
  4165. for_array(i, p->package_imports) {
  4166. // gb_free(heap_allocator(), p->package_imports[i].text);
  4167. }
  4168. #endif
  4169. array_free(&p->packages);
  4170. array_free(&p->package_imports);
  4171. string_set_destroy(&p->imported_files);
  4172. mutex_destroy(&p->import_mutex);
  4173. mutex_destroy(&p->file_add_mutex);
  4174. mutex_destroy(&p->file_decl_mutex);
  4175. mutex_destroy(&p->packages_mutex);
  4176. mpmc_destroy(&p->file_error_queue);
  4177. }
  4178. void parser_add_package(Parser *p, AstPackage *pkg) {
  4179. mutex_lock(&p->packages_mutex);
  4180. pkg->id = p->packages.count+1;
  4181. array_add(&p->packages, pkg);
  4182. mutex_unlock(&p->packages_mutex);
  4183. }
  4184. ParseFileError process_imported_file(Parser *p, ImportedFile imported_file);
  4185. WORKER_TASK_PROC(parser_worker_proc) {
  4186. ParserWorkerData *wd = cast(ParserWorkerData *)data;
  4187. ParseFileError err = process_imported_file(wd->parser, wd->imported_file);
  4188. if (err != ParseFile_None) {
  4189. mpmc_enqueue(&wd->parser->file_error_queue, err);
  4190. }
  4191. return cast(isize)err;
  4192. }
  4193. void parser_add_file_to_process(Parser *p, AstPackage *pkg, FileInfo fi, TokenPos pos) {
  4194. // TODO(bill): Use a better allocator
  4195. ImportedFile f = {pkg, fi, pos, p->file_to_process_count++};
  4196. auto wd = gb_alloc_item(heap_allocator(), ParserWorkerData);
  4197. wd->parser = p;
  4198. wd->imported_file = f;
  4199. thread_pool_add_task(&parser_thread_pool, parser_worker_proc, wd);
  4200. }
  4201. WORKER_TASK_PROC(foreign_file_worker_proc) {
  4202. ForeignFileWorkerData *wd = cast(ForeignFileWorkerData *)data;
  4203. Parser *p = wd->parser;
  4204. ImportedFile *imp = &wd->imported_file;
  4205. AstPackage *pkg = imp->pkg;
  4206. AstForeignFile foreign_file = {wd->foreign_kind};
  4207. String fullpath = string_trim_whitespace(imp->fi.fullpath); // Just in case
  4208. char *c_str = alloc_cstring(heap_allocator(), fullpath);
  4209. defer (gb_free(heap_allocator(), c_str));
  4210. gbFileContents fc = gb_file_read_contents(heap_allocator(), true, c_str);
  4211. foreign_file.source.text = (u8 *)fc.data;
  4212. foreign_file.source.len = fc.size;
  4213. switch (wd->foreign_kind) {
  4214. case AstForeignFile_S:
  4215. // TODO(bill): Actually do something with it
  4216. break;
  4217. }
  4218. mutex_lock(&p->file_add_mutex);
  4219. array_add(&pkg->foreign_files, foreign_file);
  4220. mutex_unlock(&p->file_add_mutex);
  4221. return 0;
  4222. }
  4223. void parser_add_foreign_file_to_process(Parser *p, AstPackage *pkg, AstForeignFileKind kind, FileInfo fi, TokenPos pos) {
  4224. // TODO(bill): Use a better allocator
  4225. ImportedFile f = {pkg, fi, pos, p->file_to_process_count++};
  4226. auto wd = gb_alloc_item(heap_allocator(), ForeignFileWorkerData);
  4227. wd->parser = p;
  4228. wd->imported_file = f;
  4229. wd->foreign_kind = kind;
  4230. thread_pool_add_task(&parser_thread_pool, foreign_file_worker_proc, wd);
  4231. }
  4232. // NOTE(bill): Returns true if it's added
  4233. AstPackage *try_add_import_path(Parser *p, String const &path, String const &rel_path, TokenPos pos, PackageKind kind = Package_Normal) {
  4234. String const FILE_EXT = str_lit(".odin");
  4235. mutex_lock(&p->import_mutex);
  4236. defer (mutex_unlock(&p->import_mutex));
  4237. if (string_set_exists(&p->imported_files, path)) {
  4238. return nullptr;
  4239. }
  4240. string_set_add(&p->imported_files, path);
  4241. AstPackage *pkg = gb_alloc_item(heap_allocator(), AstPackage);
  4242. pkg->kind = kind;
  4243. pkg->fullpath = path;
  4244. array_init(&pkg->files, heap_allocator());
  4245. pkg->foreign_files.allocator = heap_allocator();
  4246. // NOTE(bill): Single file initial package
  4247. if (kind == Package_Init && string_ends_with(path, FILE_EXT)) {
  4248. FileInfo fi = {};
  4249. fi.name = filename_from_path(path);
  4250. fi.fullpath = path;
  4251. fi.size = get_file_size(path);
  4252. fi.is_dir = false;
  4253. pkg->is_single_file = true;
  4254. parser_add_file_to_process(p, pkg, fi, pos);
  4255. parser_add_package(p, pkg);
  4256. return pkg;
  4257. }
  4258. Array<FileInfo> list = {};
  4259. ReadDirectoryError rd_err = read_directory(path, &list);
  4260. defer (array_free(&list));
  4261. if (list.count == 1) {
  4262. GB_ASSERT(path != list[0].fullpath);
  4263. }
  4264. switch (rd_err) {
  4265. case ReadDirectory_InvalidPath:
  4266. syntax_error(pos, "Invalid path: %.*s", LIT(rel_path));
  4267. return nullptr;
  4268. case ReadDirectory_NotExists:
  4269. syntax_error(pos, "Path does not exist: %.*s", LIT(rel_path));
  4270. return nullptr;
  4271. case ReadDirectory_Permission:
  4272. syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path));
  4273. return nullptr;
  4274. case ReadDirectory_NotDir:
  4275. syntax_error(pos, "Expected a directory for a package, got a file: %.*s", LIT(rel_path));
  4276. return nullptr;
  4277. case ReadDirectory_Empty:
  4278. syntax_error(pos, "Empty directory: %.*s", LIT(rel_path));
  4279. return nullptr;
  4280. case ReadDirectory_Unknown:
  4281. syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path));
  4282. return nullptr;
  4283. }
  4284. for_array(list_index, list) {
  4285. FileInfo fi = list[list_index];
  4286. String name = fi.name;
  4287. String ext = path_extension(name);
  4288. if (ext == FILE_EXT) {
  4289. if (is_excluded_target_filename(name)) {
  4290. continue;
  4291. }
  4292. parser_add_file_to_process(p, pkg, fi, pos);
  4293. } else if (ext == ".S" || ext ==".s") {
  4294. if (is_excluded_target_filename(name)) {
  4295. continue;
  4296. }
  4297. parser_add_foreign_file_to_process(p, pkg, AstForeignFile_S, fi, pos);
  4298. }
  4299. }
  4300. parser_add_package(p, pkg);
  4301. return pkg;
  4302. }
  4303. gb_global Rune illegal_import_runes[] = {
  4304. '"', '\'', '`',
  4305. '\t', '\r', '\n', '\v', '\f',
  4306. '\\', // NOTE(bill): Disallow windows style filepaths
  4307. '!', '$', '%', '^', '&', '*', '(', ')', '=',
  4308. '[', ']', '{', '}',
  4309. ';',
  4310. ':', // NOTE(bill): Disallow windows style absolute filepaths
  4311. '#',
  4312. '|', ',', '<', '>', '?',
  4313. };
  4314. bool is_import_path_valid(String path) {
  4315. if (path.len > 0) {
  4316. u8 *start = path.text;
  4317. u8 *end = path.text + path.len;
  4318. u8 *curr = start;
  4319. while (curr < end) {
  4320. isize width = 1;
  4321. Rune r = *curr;
  4322. if (r >= 0x80) {
  4323. width = utf8_decode(curr, end-curr, &r);
  4324. if (r == GB_RUNE_INVALID && width == 1) {
  4325. return false;
  4326. }
  4327. else if (r == GB_RUNE_BOM && curr-start > 0) {
  4328. return false;
  4329. }
  4330. }
  4331. for (isize i = 0; i < gb_count_of(illegal_import_runes); i++) {
  4332. if (r == illegal_import_runes[i]) {
  4333. return false;
  4334. }
  4335. }
  4336. curr += width;
  4337. }
  4338. return true;
  4339. }
  4340. return false;
  4341. }
  4342. bool is_build_flag_path_valid(String path) {
  4343. if (path.len > 0) {
  4344. u8 *start = path.text;
  4345. u8 *end = path.text + path.len;
  4346. u8 *curr = start;
  4347. isize index = 0;
  4348. while (curr < end) {
  4349. isize width = 1;
  4350. Rune r = *curr;
  4351. if (r >= 0x80) {
  4352. width = utf8_decode(curr, end-curr, &r);
  4353. if (r == GB_RUNE_INVALID && width == 1) {
  4354. return false;
  4355. }
  4356. else if (r == GB_RUNE_BOM && curr-start > 0) {
  4357. return false;
  4358. }
  4359. }
  4360. for (isize i = 0; i < gb_count_of(illegal_import_runes); i++) {
  4361. #if defined(GB_SYSTEM_WINDOWS)
  4362. if (r == '\\') {
  4363. break;
  4364. } else if (r == ':') {
  4365. break;
  4366. }
  4367. #endif
  4368. if (r == illegal_import_runes[i]) {
  4369. return false;
  4370. }
  4371. }
  4372. curr += width;
  4373. index += 1;
  4374. }
  4375. return true;
  4376. }
  4377. return false;
  4378. }
  4379. bool is_package_name_reserved(String const &name) {
  4380. if (name == "builtin") {
  4381. return true;
  4382. } else if (name == "intrinsics") {
  4383. return true;
  4384. }
  4385. return false;
  4386. }
  4387. bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node, String base_dir, String original_string, String *path) {
  4388. GB_ASSERT(path != nullptr);
  4389. // NOTE(bill): if file_mutex == nullptr, this means that the code is used within the semantics stage
  4390. gbAllocator a = heap_allocator();
  4391. String collection_name = {};
  4392. isize colon_pos = -1;
  4393. for (isize j = 0; j < original_string.len; j++) {
  4394. if (original_string[j] == ':') {
  4395. colon_pos = j;
  4396. break;
  4397. }
  4398. }
  4399. bool has_windows_drive = false;
  4400. #if defined(GB_SYSTEM_WINDOWS)
  4401. if (file_mutex == nullptr) {
  4402. if (colon_pos == 1 && original_string.len > 2) {
  4403. if (original_string[2] == '/' || original_string[2] == '\\') {
  4404. colon_pos = -1;
  4405. has_windows_drive = true;
  4406. }
  4407. }
  4408. }
  4409. #endif
  4410. String file_str = {};
  4411. if (colon_pos == 0) {
  4412. syntax_error(node, "Expected a collection name");
  4413. return false;
  4414. }
  4415. if (original_string.len > 0 && colon_pos > 0) {
  4416. collection_name = substring(original_string, 0, colon_pos);
  4417. file_str = substring(original_string, colon_pos+1, original_string.len);
  4418. } else {
  4419. file_str = original_string;
  4420. }
  4421. if (has_windows_drive) {
  4422. String sub_file_path = substring(file_str, 3, file_str.len);
  4423. if (!is_import_path_valid(sub_file_path)) {
  4424. syntax_error(node, "Invalid import path: '%.*s'", LIT(file_str));
  4425. return false;
  4426. }
  4427. } else if (!is_import_path_valid(file_str)) {
  4428. syntax_error(node, "Invalid import path: '%.*s'", LIT(file_str));
  4429. return false;
  4430. }
  4431. if (collection_name.len > 0) {
  4432. if (collection_name == "system") {
  4433. if (node->kind != Ast_ForeignImportDecl) {
  4434. syntax_error(node, "The library collection 'system' is restrict for 'foreign_library'");
  4435. return false;
  4436. } else {
  4437. *path = file_str;
  4438. return true;
  4439. }
  4440. } else if (!find_library_collection_path(collection_name, &base_dir)) {
  4441. // NOTE(bill): It's a naughty name
  4442. syntax_error(node, "Unknown library collection: '%.*s'", LIT(collection_name));
  4443. return false;
  4444. }
  4445. } else {
  4446. #if !defined(GB_SYSTEM_WINDOWS)
  4447. // @NOTE(vassvik): foreign imports of shared libraries that are not in the system collection on
  4448. // linux/mac have to be local to the executable for consistency with shared libraries.
  4449. // Unix does not have a concept of "import library" for shared/dynamic libraries,
  4450. // so we need to pass the relative path to the linker, and add the current
  4451. // working directory of the exe to the library search paths.
  4452. // Static libraries can be linked directly with the full pathname
  4453. //
  4454. if (node->kind == Ast_ForeignImportDecl && string_ends_with(file_str, str_lit(".so"))) {
  4455. *path = file_str;
  4456. return true;
  4457. }
  4458. #endif
  4459. }
  4460. if (is_package_name_reserved(file_str)) {
  4461. *path = file_str;
  4462. return true;
  4463. }
  4464. if (file_mutex) mutex_lock(file_mutex);
  4465. defer (if (file_mutex) mutex_unlock(file_mutex));
  4466. if (node->kind == Ast_ForeignImportDecl) {
  4467. node->ForeignImportDecl.collection_name = collection_name;
  4468. }
  4469. if (has_windows_drive) {
  4470. *path = file_str;
  4471. } else {
  4472. String fullpath = string_trim_whitespace(get_fullpath_relative(a, base_dir, file_str));
  4473. *path = fullpath;
  4474. }
  4475. return true;
  4476. }
  4477. void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls);
  4478. void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstWhenStmt *ws) {
  4479. if (ws->body != nullptr) {
  4480. auto stmts = ws->body->BlockStmt.stmts;
  4481. parse_setup_file_decls(p, f, base_dir, stmts);
  4482. }
  4483. if (ws->else_stmt != nullptr) {
  4484. switch (ws->else_stmt->kind) {
  4485. case Ast_BlockStmt: {
  4486. auto stmts = ws->else_stmt->BlockStmt.stmts;
  4487. parse_setup_file_decls(p, f, base_dir, stmts);
  4488. } break;
  4489. case Ast_WhenStmt:
  4490. parse_setup_file_when_stmt(p, f, base_dir, &ws->else_stmt->WhenStmt);
  4491. break;
  4492. }
  4493. }
  4494. }
  4495. void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls) {
  4496. for_array(i, decls) {
  4497. Ast *node = decls[i];
  4498. if (!is_ast_decl(node) &&
  4499. node->kind != Ast_WhenStmt &&
  4500. node->kind != Ast_BadStmt &&
  4501. node->kind != Ast_EmptyStmt) {
  4502. // NOTE(bill): Sanity check
  4503. if (node->kind == Ast_ExprStmt) {
  4504. Ast *expr = node->ExprStmt.expr;
  4505. if (expr->kind == Ast_CallExpr &&
  4506. expr->CallExpr.proc->kind == Ast_BasicDirective) {
  4507. f->directive_count += 1;
  4508. continue;
  4509. }
  4510. }
  4511. syntax_error(node, "Only declarations are allowed at file scope, got %.*s", LIT(ast_strings[node->kind]));
  4512. } else if (node->kind == Ast_ImportDecl) {
  4513. ast_node(id, ImportDecl, node);
  4514. String original_string = string_trim_whitespace(string_value_from_token(f, id->relpath));
  4515. String import_path = {};
  4516. bool ok = determine_path_from_string(&p->file_decl_mutex, node, base_dir, original_string, &import_path);
  4517. if (!ok) {
  4518. decls[i] = ast_bad_decl(f, id->relpath, id->relpath);
  4519. continue;
  4520. }
  4521. import_path = string_trim_whitespace(import_path);
  4522. id->fullpath = import_path;
  4523. if (is_package_name_reserved(import_path)) {
  4524. continue;
  4525. }
  4526. try_add_import_path(p, import_path, original_string, ast_token(node).pos);
  4527. } else if (node->kind == Ast_ForeignImportDecl) {
  4528. ast_node(fl, ForeignImportDecl, node);
  4529. auto fullpaths = array_make<String>(permanent_allocator(), 0, fl->filepaths.count);
  4530. for_array(fp_idx, fl->filepaths) {
  4531. String file_str = string_trim_whitespace(string_value_from_token(f, fl->filepaths[fp_idx]));
  4532. String fullpath = file_str;
  4533. if (allow_check_foreign_filepath()) {
  4534. String foreign_path = {};
  4535. bool ok = determine_path_from_string(&p->file_decl_mutex, node, base_dir, file_str, &foreign_path);
  4536. if (!ok) {
  4537. decls[i] = ast_bad_decl(f, fl->filepaths[fp_idx], fl->filepaths[fl->filepaths.count-1]);
  4538. goto end;
  4539. }
  4540. fullpath = foreign_path;
  4541. }
  4542. array_add(&fullpaths, fullpath);
  4543. }
  4544. if (fullpaths.count == 0) {
  4545. syntax_error(decls[i], "No foreign paths found");
  4546. decls[i] = ast_bad_decl(f, fl->filepaths[0], fl->filepaths[fl->filepaths.count-1]);
  4547. goto end;
  4548. }
  4549. fl->fullpaths = slice_from_array(fullpaths);
  4550. } else if (node->kind == Ast_WhenStmt) {
  4551. ast_node(ws, WhenStmt, node);
  4552. parse_setup_file_when_stmt(p, f, base_dir, ws);
  4553. }
  4554. end:;
  4555. }
  4556. }
  4557. String build_tag_get_token(String s, String *out) {
  4558. s = string_trim_whitespace(s);
  4559. isize n = 0;
  4560. while (n < s.len) {
  4561. Rune rune = 0;
  4562. isize width = utf8_decode(&s[n], s.len-n, &rune);
  4563. if (n == 0 && rune == '!') {
  4564. } else if (!rune_is_letter(rune) && !rune_is_digit(rune)) {
  4565. isize k = gb_max(gb_max(n, width), 1);
  4566. *out = substring(s, k, s.len);
  4567. return substring(s, 0, k);
  4568. }
  4569. n += width;
  4570. }
  4571. out->len = 0;
  4572. return s;
  4573. }
  4574. bool parse_build_tag(Token token_for_pos, String s) {
  4575. String const prefix = str_lit("+build");
  4576. GB_ASSERT(string_starts_with(s, prefix));
  4577. s = string_trim_whitespace(substring(s, prefix.len, s.len));
  4578. if (s.len == 0) {
  4579. return true;
  4580. }
  4581. bool any_correct = false;
  4582. while (s.len > 0) {
  4583. bool this_kind_correct = true;
  4584. do {
  4585. String p = string_trim_whitespace(build_tag_get_token(s, &s));
  4586. if (p.len == 0) break;
  4587. if (p == ",") break;
  4588. bool is_notted = false;
  4589. if (p[0] == '!') {
  4590. is_notted = true;
  4591. p = substring(p, 1, p.len);
  4592. if (p.len == 0) {
  4593. syntax_error(token_for_pos, "Expected a build platform after '!'");
  4594. break;
  4595. }
  4596. }
  4597. if (p.len == 0) {
  4598. continue;
  4599. }
  4600. if (p == "ignore") {
  4601. this_kind_correct = false;
  4602. continue;
  4603. }
  4604. TargetOsKind os = get_target_os_from_string(p);
  4605. TargetArchKind arch = get_target_arch_from_string(p);
  4606. if (os != TargetOs_Invalid) {
  4607. GB_ASSERT(arch == TargetArch_Invalid);
  4608. if (is_notted) {
  4609. this_kind_correct = this_kind_correct && (os != build_context.metrics.os);
  4610. } else {
  4611. this_kind_correct = this_kind_correct && (os == build_context.metrics.os);
  4612. }
  4613. } else if (arch != TargetArch_Invalid) {
  4614. if (is_notted) {
  4615. this_kind_correct = this_kind_correct && (arch != build_context.metrics.arch);
  4616. } else {
  4617. this_kind_correct = this_kind_correct && (arch == build_context.metrics.arch);
  4618. }
  4619. }
  4620. if (os == TargetOs_Invalid && arch == TargetArch_Invalid) {
  4621. syntax_error(token_for_pos, "Invalid build tag platform: %.*s", LIT(p));
  4622. break;
  4623. }
  4624. } while (s.len > 0);
  4625. any_correct = any_correct || this_kind_correct;
  4626. }
  4627. return any_correct;
  4628. }
  4629. String dir_from_path(String path) {
  4630. String base_dir = path;
  4631. for (isize i = path.len-1; i >= 0; i--) {
  4632. if (base_dir[i] == '\\' ||
  4633. base_dir[i] == '/') {
  4634. break;
  4635. }
  4636. base_dir.len--;
  4637. }
  4638. return base_dir;
  4639. }
  4640. isize calc_decl_count(Ast *decl) {
  4641. isize count = 0;
  4642. switch (decl->kind) {
  4643. case Ast_BlockStmt:
  4644. for_array(i, decl->BlockStmt.stmts) {
  4645. count += calc_decl_count(decl->BlockStmt.stmts.data[i]);
  4646. }
  4647. break;
  4648. case Ast_ValueDecl:
  4649. count = decl->ValueDecl.names.count;
  4650. break;
  4651. case Ast_ForeignBlockDecl:
  4652. count = calc_decl_count(decl->ForeignBlockDecl.body);
  4653. break;
  4654. case Ast_ImportDecl:
  4655. case Ast_ForeignImportDecl:
  4656. count = 1;
  4657. break;
  4658. }
  4659. return count;
  4660. }
  4661. bool parse_file(Parser *p, AstFile *f) {
  4662. if (f->tokens.count == 0) {
  4663. return true;
  4664. }
  4665. if (f->tokens.count > 0 && f->tokens[0].kind == Token_EOF) {
  4666. return true;
  4667. }
  4668. u64 start = time_stamp_time_now();
  4669. String filepath = f->tokenizer.fullpath;
  4670. String base_dir = dir_from_path(filepath);
  4671. if (f->curr_token.kind == Token_Comment) {
  4672. comsume_comment_groups(f, f->prev_token);
  4673. }
  4674. CommentGroup *docs = f->lead_comment;
  4675. if (f->curr_token.kind != Token_package) {
  4676. syntax_error(f->curr_token, "Expected a package declaration at the beginning of the file");
  4677. return false;
  4678. }
  4679. f->package_token = expect_token(f, Token_package);
  4680. if (f->package_token.kind != Token_package) {
  4681. return false;
  4682. }
  4683. Token package_name = expect_token_after(f, Token_Ident, "package");
  4684. if (package_name.kind == Token_Ident) {
  4685. if (package_name.string == "_") {
  4686. syntax_error(package_name, "Invalid package name '_'");
  4687. } else if (f->pkg->kind != Package_Runtime && package_name.string == "runtime") {
  4688. syntax_error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string));
  4689. } else if (is_package_name_reserved(package_name.string)) {
  4690. syntax_error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string));
  4691. }
  4692. }
  4693. f->package_name = package_name.string;
  4694. if (!f->pkg->is_single_file && docs != nullptr && docs->list.count > 0) {
  4695. for_array(i, docs->list) {
  4696. Token tok = docs->list[i]; GB_ASSERT(tok.kind == Token_Comment);
  4697. String str = tok.string;
  4698. if (string_starts_with(str, str_lit("//"))) {
  4699. String lc = string_trim_whitespace(substring(str, 2, str.len));
  4700. if (lc.len > 0 && lc[0] == '+') {
  4701. if (string_starts_with(lc, str_lit("+build"))) {
  4702. if (!parse_build_tag(tok, lc)) {
  4703. return false;
  4704. }
  4705. } else if (lc == "+private") {
  4706. f->flags |= AstFile_IsPrivate;
  4707. } else if (lc == "+lazy") {
  4708. if (build_context.ignore_lazy) {
  4709. // Ignore
  4710. } else if (f->flags & AstFile_IsTest) {
  4711. // Ignore
  4712. } else if (build_context.command_kind == Command_doc &&
  4713. f->pkg->kind == Package_Init) {
  4714. // Ignore
  4715. } else {
  4716. f->flags |= AstFile_IsLazy;
  4717. }
  4718. }
  4719. }
  4720. }
  4721. }
  4722. }
  4723. Ast *pd = ast_package_decl(f, f->package_token, package_name, docs, f->line_comment);
  4724. expect_semicolon(f, pd);
  4725. f->pkg_decl = pd;
  4726. if (f->error_count == 0) {
  4727. auto decls = array_make<Ast *>(heap_allocator());
  4728. while (f->curr_token.kind != Token_EOF) {
  4729. Ast *stmt = parse_stmt(f);
  4730. if (stmt && stmt->kind != Ast_EmptyStmt) {
  4731. array_add(&decls, stmt);
  4732. if (stmt->kind == Ast_ExprStmt &&
  4733. stmt->ExprStmt.expr != nullptr &&
  4734. stmt->ExprStmt.expr->kind == Ast_ProcLit) {
  4735. syntax_error(stmt, "Procedure literal evaluated but not used");
  4736. }
  4737. f->total_file_decl_count += calc_decl_count(stmt);
  4738. if (stmt->kind == Ast_WhenStmt || stmt->kind == Ast_ExprStmt || stmt->kind == Ast_ImportDecl) {
  4739. f->delayed_decl_count += 1;
  4740. }
  4741. }
  4742. }
  4743. f->decls = slice_from_array(decls);
  4744. parse_setup_file_decls(p, f, base_dir, f->decls);
  4745. }
  4746. u64 end = time_stamp_time_now();
  4747. f->time_to_parse = cast(f64)(end-start)/cast(f64)time_stamp__freq();
  4748. for (int i = 0; i < AstDelayQueue_COUNT; i++) {
  4749. mpmc_init(f->delayed_decls_queues+i, heap_allocator(), f->delayed_decl_count);
  4750. }
  4751. return f->error_count == 0;
  4752. }
  4753. ParseFileError process_imported_file(Parser *p, ImportedFile imported_file) {
  4754. AstPackage *pkg = imported_file.pkg;
  4755. FileInfo fi = imported_file.fi;
  4756. TokenPos pos = imported_file.pos;
  4757. AstFile *file = gb_alloc_item(heap_allocator(), AstFile);
  4758. file->pkg = pkg;
  4759. file->id = cast(i32)(imported_file.index+1);
  4760. TokenPos err_pos = {0};
  4761. ParseFileError err = init_ast_file(file, fi.fullpath, &err_pos);
  4762. err_pos.file_id = file->id;
  4763. file->last_error = err;
  4764. if (err != ParseFile_None) {
  4765. if (err == ParseFile_EmptyFile) {
  4766. if (fi.fullpath == p->init_fullpath) {
  4767. syntax_error(pos, "Initial file is empty - %.*s\n", LIT(p->init_fullpath));
  4768. gb_exit(1);
  4769. }
  4770. } else {
  4771. switch (err) {
  4772. case ParseFile_WrongExtension:
  4773. syntax_error(pos, "Failed to parse file: %.*s; invalid file extension: File must have the extension '.odin'", LIT(fi.name));
  4774. break;
  4775. case ParseFile_InvalidFile:
  4776. syntax_error(pos, "Failed to parse file: %.*s; invalid file or cannot be found", LIT(fi.name));
  4777. break;
  4778. case ParseFile_Permission:
  4779. syntax_error(pos, "Failed to parse file: %.*s; file permissions problem", LIT(fi.name));
  4780. break;
  4781. case ParseFile_NotFound:
  4782. syntax_error(pos, "Failed to parse file: %.*s; file cannot be found ('%.*s')", LIT(fi.name), LIT(fi.fullpath));
  4783. break;
  4784. case ParseFile_InvalidToken:
  4785. syntax_error(err_pos, "Failed to parse file: %.*s; invalid token found in file", LIT(fi.name));
  4786. break;
  4787. case ParseFile_EmptyFile:
  4788. syntax_error(pos, "Failed to parse file: %.*s; file contains no tokens", LIT(fi.name));
  4789. break;
  4790. case ParseFile_FileTooLarge:
  4791. syntax_error(pos, "Failed to parse file: %.*s; file is too large, exceeds maximum file size of 2 GiB", LIT(fi.name));
  4792. break;
  4793. }
  4794. return err;
  4795. }
  4796. }
  4797. if (build_context.command_kind == Command_test) {
  4798. String name = file->fullpath;
  4799. name = remove_extension_from_path(name);
  4800. String test_suffix = str_lit("_test");
  4801. if (string_ends_with(name, test_suffix) && name != test_suffix) {
  4802. file->flags |= AstFile_IsTest;
  4803. }
  4804. }
  4805. if (parse_file(p, file)) {
  4806. mutex_lock(&p->file_add_mutex);
  4807. defer (mutex_unlock(&p->file_add_mutex));
  4808. array_add(&pkg->files, file);
  4809. if (pkg->name.len == 0) {
  4810. pkg->name = file->package_name;
  4811. } else if (pkg->name != file->package_name) {
  4812. if (file->tokens.count > 0 && file->tokens[0].kind != Token_EOF) {
  4813. Token tok = file->package_token;
  4814. tok.pos.file_id = file->id;
  4815. tok.pos.line = gb_max(tok.pos.line, 1);
  4816. tok.pos.column = gb_max(tok.pos.column, 1);
  4817. syntax_error(tok, "Different package name, expected '%.*s', got '%.*s'", LIT(pkg->name), LIT(file->package_name));
  4818. }
  4819. }
  4820. p->total_line_count += file->tokenizer.line_count;
  4821. p->total_token_count += file->tokens.count;
  4822. }
  4823. return ParseFile_None;
  4824. }
  4825. ParseFileError parse_packages(Parser *p, String init_filename) {
  4826. GB_ASSERT(init_filename.text[init_filename.len] == 0);
  4827. isize thread_count = gb_max(build_context.thread_count, 1);
  4828. isize worker_count = thread_count-1; // NOTE(bill): The main thread will also be used for work
  4829. thread_pool_init(&parser_thread_pool, heap_allocator(), worker_count, "ParserWork");
  4830. String init_fullpath = path_to_full_path(heap_allocator(), init_filename);
  4831. if (!path_is_directory(init_fullpath)) {
  4832. String const ext = str_lit(".odin");
  4833. if (!string_ends_with(init_fullpath, ext)) {
  4834. error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(init_filename));
  4835. return ParseFile_WrongExtension;
  4836. }
  4837. }
  4838. TokenPos init_pos = {};
  4839. {
  4840. String s = get_fullpath_core(heap_allocator(), str_lit("runtime"));
  4841. try_add_import_path(p, s, s, init_pos, Package_Runtime);
  4842. }
  4843. try_add_import_path(p, init_fullpath, init_fullpath, init_pos, Package_Init);
  4844. p->init_fullpath = init_fullpath;
  4845. if (build_context.command_kind == Command_test) {
  4846. String s = get_fullpath_core(heap_allocator(), str_lit("testing"));
  4847. try_add_import_path(p, s, s, init_pos, Package_Normal);
  4848. }
  4849. for_array(i, build_context.extra_packages) {
  4850. String path = build_context.extra_packages[i];
  4851. String fullpath = path_to_full_path(heap_allocator(), path); // LEAK?
  4852. if (!path_is_directory(fullpath)) {
  4853. String const ext = str_lit(".odin");
  4854. if (!string_ends_with(fullpath, ext)) {
  4855. error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(fullpath));
  4856. return ParseFile_WrongExtension;
  4857. }
  4858. }
  4859. AstPackage *pkg = try_add_import_path(p, fullpath, fullpath, init_pos, Package_Normal);
  4860. if (pkg) {
  4861. pkg->is_extra = true;
  4862. }
  4863. }
  4864. thread_pool_start(&parser_thread_pool);
  4865. thread_pool_wait_to_process(&parser_thread_pool);
  4866. for (ParseFileError err = ParseFile_None; mpmc_dequeue(&p->file_error_queue, &err); /**/) {
  4867. if (err != ParseFile_None) {
  4868. return err;
  4869. }
  4870. }
  4871. for (isize i = p->packages.count-1; i >= 0; i--) {
  4872. AstPackage *pkg = p->packages[i];
  4873. for (isize j = pkg->files.count-1; j >= 0; j--) {
  4874. AstFile *file = pkg->files[j];
  4875. if (file->error_count != 0) {
  4876. if (file->last_error != ParseFile_None) {
  4877. return file->last_error;
  4878. }
  4879. return ParseFile_GeneralError;
  4880. }
  4881. }
  4882. }
  4883. return ParseFile_None;
  4884. }