parser.cpp 163 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815
  1. #include "parser_pos.cpp"
  2. Token token_end_of_line(AstFile *f, Token tok) {
  3. u8 const *start = f->tokenizer.start + tok.pos.offset;
  4. u8 const *s = start;
  5. while (*s && *s != '\n' && s < f->tokenizer.end) {
  6. s += 1;
  7. }
  8. tok.pos.column += cast(i32)(s - start) - 1;
  9. return tok;
  10. }
  11. gbString get_file_line_as_string(TokenPos const &pos, i32 *offset_) {
  12. AstFile *file = thread_safe_get_ast_file_from_id(pos.file_id);
  13. if (file == nullptr) {
  14. return nullptr;
  15. }
  16. isize offset = pos.offset;
  17. u8 *start = file->tokenizer.start;
  18. u8 *end = file->tokenizer.end;
  19. isize len = end-start;
  20. if (len < offset) {
  21. return nullptr;
  22. }
  23. u8 *pos_offset = start+offset;
  24. u8 *line_start = pos_offset;
  25. u8 *line_end = pos_offset;
  26. while (line_start >= start) {
  27. if (*line_start == '\n') {
  28. line_start += 1;
  29. break;
  30. }
  31. line_start -= 1;
  32. }
  33. while (line_end < end) {
  34. if (*line_end == '\n') {
  35. line_end -= 1;
  36. break;
  37. }
  38. line_end += 1;
  39. }
  40. String the_line = make_string(line_start, line_end-line_start);
  41. the_line = string_trim_whitespace(the_line);
  42. if (offset_) *offset_ = cast(i32)(pos_offset - the_line.text);
  43. return gb_string_make_length(heap_allocator(), the_line.text, the_line.len);
  44. }
  45. isize ast_node_size(AstKind kind) {
  46. return align_formula_isize(gb_size_of(AstCommonStuff) + ast_variant_sizes[kind], gb_align_of(void *));
  47. }
  48. gb_global std::atomic<isize> global_total_node_memory_allocated;
  49. // NOTE(bill): And this below is why is I/we need a new language! Discriminated unions are a pain in C/C++
  50. Ast *alloc_ast_node(AstFile *f, AstKind kind) {
  51. gbAllocator a = ast_allocator(f);
  52. isize size = ast_node_size(kind);
  53. Ast *node = cast(Ast *)gb_alloc(a, size);
  54. node->kind = kind;
  55. node->file_id = f ? f->id : 0;
  56. global_total_node_memory_allocated += size;
  57. return node;
  58. }
  59. Ast *clone_ast(Ast *node);
  60. Array<Ast *> clone_ast_array(Array<Ast *> const &array) {
  61. Array<Ast *> result = {};
  62. if (array.count > 0) {
  63. result = array_make<Ast *>(ast_allocator(nullptr), array.count);
  64. for_array(i, array) {
  65. result[i] = clone_ast(array[i]);
  66. }
  67. }
  68. return result;
  69. }
  70. Slice<Ast *> clone_ast_array(Slice<Ast *> const &array) {
  71. Slice<Ast *> result = {};
  72. if (array.count > 0) {
  73. result = slice_clone(permanent_allocator(), array);
  74. for_array(i, array) {
  75. result[i] = clone_ast(array[i]);
  76. }
  77. }
  78. return result;
  79. }
  80. Ast *clone_ast(Ast *node) {
  81. if (node == nullptr) {
  82. return nullptr;
  83. }
  84. AstFile *f = node->thread_safe_file();
  85. Ast *n = alloc_ast_node(f, node->kind);
  86. gb_memmove(n, node, ast_node_size(node->kind));
  87. switch (n->kind) {
  88. default: GB_PANIC("Unhandled Ast %.*s", LIT(ast_strings[n->kind])); break;
  89. case Ast_Invalid: break;
  90. case Ast_Ident:
  91. n->Ident.entity = nullptr;
  92. break;
  93. case Ast_Implicit: break;
  94. case Ast_Undef: break;
  95. case Ast_BasicLit: break;
  96. case Ast_BasicDirective: break;
  97. case Ast_PolyType:
  98. n->PolyType.type = clone_ast(n->PolyType.type);
  99. n->PolyType.specialization = clone_ast(n->PolyType.specialization);
  100. break;
  101. case Ast_Ellipsis:
  102. n->Ellipsis.expr = clone_ast(n->Ellipsis.expr);
  103. break;
  104. case Ast_ProcGroup:
  105. n->ProcGroup.args = clone_ast_array(n->ProcGroup.args);
  106. break;
  107. case Ast_ProcLit:
  108. n->ProcLit.type = clone_ast(n->ProcLit.type);
  109. n->ProcLit.body = clone_ast(n->ProcLit.body);
  110. n->ProcLit.where_clauses = clone_ast_array(n->ProcLit.where_clauses);
  111. break;
  112. case Ast_CompoundLit:
  113. n->CompoundLit.type = clone_ast(n->CompoundLit.type);
  114. n->CompoundLit.elems = clone_ast_array(n->CompoundLit.elems);
  115. break;
  116. case Ast_BadExpr: break;
  117. case Ast_TagExpr:
  118. n->TagExpr.expr = clone_ast(n->TagExpr.expr);
  119. break;
  120. case Ast_UnaryExpr:
  121. n->UnaryExpr.expr = clone_ast(n->UnaryExpr.expr);
  122. break;
  123. case Ast_BinaryExpr:
  124. n->BinaryExpr.left = clone_ast(n->BinaryExpr.left);
  125. n->BinaryExpr.right = clone_ast(n->BinaryExpr.right);
  126. break;
  127. case Ast_ParenExpr:
  128. n->ParenExpr.expr = clone_ast(n->ParenExpr.expr);
  129. break;
  130. case Ast_SelectorExpr:
  131. n->SelectorExpr.expr = clone_ast(n->SelectorExpr.expr);
  132. n->SelectorExpr.selector = clone_ast(n->SelectorExpr.selector);
  133. break;
  134. case Ast_ImplicitSelectorExpr:
  135. n->ImplicitSelectorExpr.selector = clone_ast(n->ImplicitSelectorExpr.selector);
  136. break;
  137. case Ast_SelectorCallExpr:
  138. n->SelectorCallExpr.expr = clone_ast(n->SelectorCallExpr.expr);
  139. n->SelectorCallExpr.call = clone_ast(n->SelectorCallExpr.call);
  140. break;
  141. case Ast_IndexExpr:
  142. n->IndexExpr.expr = clone_ast(n->IndexExpr.expr);
  143. n->IndexExpr.index = clone_ast(n->IndexExpr.index);
  144. break;
  145. case Ast_MatrixIndexExpr:
  146. n->MatrixIndexExpr.expr = clone_ast(n->MatrixIndexExpr.expr);
  147. n->MatrixIndexExpr.row_index = clone_ast(n->MatrixIndexExpr.row_index);
  148. n->MatrixIndexExpr.column_index = clone_ast(n->MatrixIndexExpr.column_index);
  149. break;
  150. case Ast_DerefExpr:
  151. n->DerefExpr.expr = clone_ast(n->DerefExpr.expr);
  152. break;
  153. case Ast_SliceExpr:
  154. n->SliceExpr.expr = clone_ast(n->SliceExpr.expr);
  155. n->SliceExpr.low = clone_ast(n->SliceExpr.low);
  156. n->SliceExpr.high = clone_ast(n->SliceExpr.high);
  157. break;
  158. case Ast_CallExpr:
  159. n->CallExpr.proc = clone_ast(n->CallExpr.proc);
  160. n->CallExpr.args = clone_ast_array(n->CallExpr.args);
  161. break;
  162. case Ast_FieldValue:
  163. n->FieldValue.field = clone_ast(n->FieldValue.field);
  164. n->FieldValue.value = clone_ast(n->FieldValue.value);
  165. break;
  166. case Ast_EnumFieldValue:
  167. n->EnumFieldValue.name = clone_ast(n->EnumFieldValue.name);
  168. n->EnumFieldValue.value = clone_ast(n->EnumFieldValue.value);
  169. break;
  170. case Ast_TernaryIfExpr:
  171. n->TernaryIfExpr.x = clone_ast(n->TernaryIfExpr.x);
  172. n->TernaryIfExpr.cond = clone_ast(n->TernaryIfExpr.cond);
  173. n->TernaryIfExpr.y = clone_ast(n->TernaryIfExpr.y);
  174. break;
  175. case Ast_TernaryWhenExpr:
  176. n->TernaryWhenExpr.x = clone_ast(n->TernaryWhenExpr.x);
  177. n->TernaryWhenExpr.cond = clone_ast(n->TernaryWhenExpr.cond);
  178. n->TernaryWhenExpr.y = clone_ast(n->TernaryWhenExpr.y);
  179. break;
  180. case Ast_OrElseExpr:
  181. n->OrElseExpr.x = clone_ast(n->OrElseExpr.x);
  182. n->OrElseExpr.y = clone_ast(n->OrElseExpr.y);
  183. break;
  184. case Ast_OrReturnExpr:
  185. n->OrReturnExpr.expr = clone_ast(n->OrReturnExpr.expr);
  186. break;
  187. case Ast_TypeAssertion:
  188. n->TypeAssertion.expr = clone_ast(n->TypeAssertion.expr);
  189. n->TypeAssertion.type = clone_ast(n->TypeAssertion.type);
  190. break;
  191. case Ast_TypeCast:
  192. n->TypeCast.type = clone_ast(n->TypeCast.type);
  193. n->TypeCast.expr = clone_ast(n->TypeCast.expr);
  194. break;
  195. case Ast_AutoCast:
  196. n->AutoCast.expr = clone_ast(n->AutoCast.expr);
  197. break;
  198. case Ast_InlineAsmExpr:
  199. n->InlineAsmExpr.param_types = clone_ast_array(n->InlineAsmExpr.param_types);
  200. n->InlineAsmExpr.return_type = clone_ast(n->InlineAsmExpr.return_type);
  201. n->InlineAsmExpr.asm_string = clone_ast(n->InlineAsmExpr.asm_string);
  202. n->InlineAsmExpr.constraints_string = clone_ast(n->InlineAsmExpr.constraints_string);
  203. break;
  204. case Ast_BadStmt: break;
  205. case Ast_EmptyStmt: break;
  206. case Ast_ExprStmt:
  207. n->ExprStmt.expr = clone_ast(n->ExprStmt.expr);
  208. break;
  209. case Ast_TagStmt:
  210. n->TagStmt.stmt = clone_ast(n->TagStmt.stmt);
  211. break;
  212. case Ast_AssignStmt:
  213. n->AssignStmt.lhs = clone_ast_array(n->AssignStmt.lhs);
  214. n->AssignStmt.rhs = clone_ast_array(n->AssignStmt.rhs);
  215. break;
  216. case Ast_BlockStmt:
  217. n->BlockStmt.label = clone_ast(n->BlockStmt.label);
  218. n->BlockStmt.stmts = clone_ast_array(n->BlockStmt.stmts);
  219. break;
  220. case Ast_IfStmt:
  221. n->IfStmt.label = clone_ast(n->IfStmt.label);
  222. n->IfStmt.init = clone_ast(n->IfStmt.init);
  223. n->IfStmt.cond = clone_ast(n->IfStmt.cond);
  224. n->IfStmt.body = clone_ast(n->IfStmt.body);
  225. n->IfStmt.else_stmt = clone_ast(n->IfStmt.else_stmt);
  226. break;
  227. case Ast_WhenStmt:
  228. n->WhenStmt.cond = clone_ast(n->WhenStmt.cond);
  229. n->WhenStmt.body = clone_ast(n->WhenStmt.body);
  230. n->WhenStmt.else_stmt = clone_ast(n->WhenStmt.else_stmt);
  231. break;
  232. case Ast_ReturnStmt:
  233. n->ReturnStmt.results = clone_ast_array(n->ReturnStmt.results);
  234. break;
  235. case Ast_ForStmt:
  236. n->ForStmt.label = clone_ast(n->ForStmt.label);
  237. n->ForStmt.init = clone_ast(n->ForStmt.init);
  238. n->ForStmt.cond = clone_ast(n->ForStmt.cond);
  239. n->ForStmt.post = clone_ast(n->ForStmt.post);
  240. n->ForStmt.body = clone_ast(n->ForStmt.body);
  241. break;
  242. case Ast_RangeStmt:
  243. n->RangeStmt.label = clone_ast(n->RangeStmt.label);
  244. n->RangeStmt.vals = clone_ast_array(n->RangeStmt.vals);
  245. n->RangeStmt.expr = clone_ast(n->RangeStmt.expr);
  246. n->RangeStmt.body = clone_ast(n->RangeStmt.body);
  247. break;
  248. case Ast_UnrollRangeStmt:
  249. n->UnrollRangeStmt.val0 = clone_ast(n->UnrollRangeStmt.val0);
  250. n->UnrollRangeStmt.val1 = clone_ast(n->UnrollRangeStmt.val1);
  251. n->UnrollRangeStmt.expr = clone_ast(n->UnrollRangeStmt.expr);
  252. n->UnrollRangeStmt.body = clone_ast(n->UnrollRangeStmt.body);
  253. break;
  254. case Ast_CaseClause:
  255. n->CaseClause.list = clone_ast_array(n->CaseClause.list);
  256. n->CaseClause.stmts = clone_ast_array(n->CaseClause.stmts);
  257. n->CaseClause.implicit_entity = nullptr;
  258. break;
  259. case Ast_SwitchStmt:
  260. n->SwitchStmt.label = clone_ast(n->SwitchStmt.label);
  261. n->SwitchStmt.init = clone_ast(n->SwitchStmt.init);
  262. n->SwitchStmt.tag = clone_ast(n->SwitchStmt.tag);
  263. n->SwitchStmt.body = clone_ast(n->SwitchStmt.body);
  264. break;
  265. case Ast_TypeSwitchStmt:
  266. n->TypeSwitchStmt.label = clone_ast(n->TypeSwitchStmt.label);
  267. n->TypeSwitchStmt.tag = clone_ast(n->TypeSwitchStmt.tag);
  268. n->TypeSwitchStmt.body = clone_ast(n->TypeSwitchStmt.body);
  269. break;
  270. case Ast_DeferStmt:
  271. n->DeferStmt.stmt = clone_ast(n->DeferStmt.stmt);
  272. break;
  273. case Ast_BranchStmt:
  274. n->BranchStmt.label = clone_ast(n->BranchStmt.label);
  275. break;
  276. case Ast_UsingStmt:
  277. n->UsingStmt.list = clone_ast_array(n->UsingStmt.list);
  278. break;
  279. case Ast_BadDecl: break;
  280. case Ast_ForeignBlockDecl:
  281. n->ForeignBlockDecl.foreign_library = clone_ast(n->ForeignBlockDecl.foreign_library);
  282. n->ForeignBlockDecl.body = clone_ast(n->ForeignBlockDecl.body);
  283. n->ForeignBlockDecl.attributes = clone_ast_array(n->ForeignBlockDecl.attributes);
  284. break;
  285. case Ast_Label:
  286. n->Label.name = clone_ast(n->Label.name);
  287. break;
  288. case Ast_ValueDecl:
  289. n->ValueDecl.names = clone_ast_array(n->ValueDecl.names);
  290. n->ValueDecl.type = clone_ast(n->ValueDecl.type);
  291. n->ValueDecl.values = clone_ast_array(n->ValueDecl.values);
  292. n->ValueDecl.attributes = clone_ast_array(n->ValueDecl.attributes);
  293. break;
  294. case Ast_Attribute:
  295. n->Attribute.elems = clone_ast_array(n->Attribute.elems);
  296. break;
  297. case Ast_Field:
  298. n->Field.names = clone_ast_array(n->Field.names);
  299. n->Field.type = clone_ast(n->Field.type);
  300. break;
  301. case Ast_FieldList:
  302. n->FieldList.list = clone_ast_array(n->FieldList.list);
  303. break;
  304. case Ast_TypeidType:
  305. n->TypeidType.specialization = clone_ast(n->TypeidType.specialization);
  306. break;
  307. case Ast_HelperType:
  308. n->HelperType.type = clone_ast(n->HelperType.type);
  309. break;
  310. case Ast_DistinctType:
  311. n->DistinctType.type = clone_ast(n->DistinctType.type);
  312. break;
  313. case Ast_ProcType:
  314. n->ProcType.params = clone_ast(n->ProcType.params);
  315. n->ProcType.results = clone_ast(n->ProcType.results);
  316. break;
  317. case Ast_RelativeType:
  318. n->RelativeType.tag = clone_ast(n->RelativeType.tag);
  319. n->RelativeType.type = clone_ast(n->RelativeType.type);
  320. break;
  321. case Ast_PointerType:
  322. n->PointerType.type = clone_ast(n->PointerType.type);
  323. break;
  324. case Ast_MultiPointerType:
  325. n->MultiPointerType.type = clone_ast(n->MultiPointerType.type);
  326. break;
  327. case Ast_ArrayType:
  328. n->ArrayType.count = clone_ast(n->ArrayType.count);
  329. n->ArrayType.elem = clone_ast(n->ArrayType.elem);
  330. break;
  331. case Ast_DynamicArrayType:
  332. n->DynamicArrayType.elem = clone_ast(n->DynamicArrayType.elem);
  333. break;
  334. case Ast_StructType:
  335. n->StructType.fields = clone_ast_array(n->StructType.fields);
  336. n->StructType.polymorphic_params = clone_ast(n->StructType.polymorphic_params);
  337. n->StructType.align = clone_ast(n->StructType.align);
  338. n->StructType.where_clauses = clone_ast_array(n->StructType.where_clauses);
  339. break;
  340. case Ast_UnionType:
  341. n->UnionType.variants = clone_ast_array(n->UnionType.variants);
  342. n->UnionType.polymorphic_params = clone_ast(n->UnionType.polymorphic_params);
  343. n->UnionType.where_clauses = clone_ast_array(n->UnionType.where_clauses);
  344. break;
  345. case Ast_EnumType:
  346. n->EnumType.base_type = clone_ast(n->EnumType.base_type);
  347. n->EnumType.fields = clone_ast_array(n->EnumType.fields);
  348. break;
  349. case Ast_BitSetType:
  350. n->BitSetType.elem = clone_ast(n->BitSetType.elem);
  351. n->BitSetType.underlying = clone_ast(n->BitSetType.underlying);
  352. break;
  353. case Ast_MapType:
  354. n->MapType.count = clone_ast(n->MapType.count);
  355. n->MapType.key = clone_ast(n->MapType.key);
  356. n->MapType.value = clone_ast(n->MapType.value);
  357. break;
  358. case Ast_MatrixType:
  359. n->MatrixType.row_count = clone_ast(n->MatrixType.row_count);
  360. n->MatrixType.column_count = clone_ast(n->MatrixType.column_count);
  361. n->MatrixType.elem = clone_ast(n->MatrixType.elem);
  362. break;
  363. }
  364. return n;
  365. }
  366. void error(Ast *node, char const *fmt, ...) {
  367. Token token = {};
  368. TokenPos end_pos = {};
  369. if (node != nullptr) {
  370. token = ast_token(node);
  371. end_pos = ast_end_pos(node);
  372. }
  373. va_list va;
  374. va_start(va, fmt);
  375. error_va(token.pos, end_pos, fmt, va);
  376. va_end(va);
  377. if (node != nullptr && node->file_id != 0) {
  378. AstFile *f = node->thread_safe_file();
  379. f->error_count += 1;
  380. }
  381. }
  382. void error_no_newline(Ast *node, char const *fmt, ...) {
  383. Token token = {};
  384. if (node != nullptr) {
  385. token = ast_token(node);
  386. }
  387. va_list va;
  388. va_start(va, fmt);
  389. error_no_newline_va(token.pos, fmt, va);
  390. va_end(va);
  391. if (node != nullptr && node->file_id != 0) {
  392. AstFile *f = node->thread_safe_file();
  393. f->error_count += 1;
  394. }
  395. }
  396. void warning(Ast *node, char const *fmt, ...) {
  397. Token token = {};
  398. TokenPos end_pos = {};
  399. if (node != nullptr) {
  400. token = ast_token(node);
  401. end_pos = ast_end_pos(node);
  402. }
  403. va_list va;
  404. va_start(va, fmt);
  405. warning_va(token.pos, end_pos, fmt, va);
  406. va_end(va);
  407. }
  408. void syntax_error(Ast *node, char const *fmt, ...) {
  409. Token token = {};
  410. TokenPos end_pos = {};
  411. if (node != nullptr) {
  412. token = ast_token(node);
  413. end_pos = ast_end_pos(node);
  414. }
  415. va_list va;
  416. va_start(va, fmt);
  417. syntax_error_va(token.pos, end_pos, fmt, va);
  418. va_end(va);
  419. if (node != nullptr && node->file_id != 0) {
  420. AstFile *f = node->thread_safe_file();
  421. f->error_count += 1;
  422. }
  423. }
  424. bool ast_node_expect(Ast *node, AstKind kind) {
  425. if (node->kind != kind) {
  426. syntax_error(node, "Expected %.*s, got %.*s", LIT(ast_strings[kind]), LIT(ast_strings[node->kind]));
  427. return false;
  428. }
  429. return true;
  430. }
  431. bool ast_node_expect2(Ast *node, AstKind kind0, AstKind kind1) {
  432. if (node->kind != kind0 && node->kind != kind1) {
  433. syntax_error(node, "Expected %.*s or %.*s, got %.*s", LIT(ast_strings[kind0]), LIT(ast_strings[kind1]), LIT(ast_strings[node->kind]));
  434. return false;
  435. }
  436. return true;
  437. }
  438. Ast *ast_bad_expr(AstFile *f, Token begin, Token end) {
  439. Ast *result = alloc_ast_node(f, Ast_BadExpr);
  440. result->BadExpr.begin = begin;
  441. result->BadExpr.end = end;
  442. return result;
  443. }
  444. Ast *ast_tag_expr(AstFile *f, Token token, Token name, Ast *expr) {
  445. Ast *result = alloc_ast_node(f, Ast_TagExpr);
  446. result->TagExpr.token = token;
  447. result->TagExpr.name = name;
  448. result->TagExpr.expr = expr;
  449. return result;
  450. }
  451. Ast *ast_tag_stmt(AstFile *f, Token token, Token name, Ast *stmt) {
  452. Ast *result = alloc_ast_node(f, Ast_TagStmt);
  453. result->TagStmt.token = token;
  454. result->TagStmt.name = name;
  455. result->TagStmt.stmt = stmt;
  456. return result;
  457. }
  458. Ast *ast_unary_expr(AstFile *f, Token op, Ast *expr) {
  459. Ast *result = alloc_ast_node(f, Ast_UnaryExpr);
  460. result->UnaryExpr.op = op;
  461. result->UnaryExpr.expr = expr;
  462. return result;
  463. }
  464. Ast *ast_binary_expr(AstFile *f, Token op, Ast *left, Ast *right) {
  465. Ast *result = alloc_ast_node(f, Ast_BinaryExpr);
  466. if (left == nullptr) {
  467. syntax_error(op, "No lhs expression for binary expression '%.*s'", LIT(op.string));
  468. left = ast_bad_expr(f, op, op);
  469. }
  470. if (right == nullptr) {
  471. syntax_error(op, "No rhs expression for binary expression '%.*s'", LIT(op.string));
  472. right = ast_bad_expr(f, op, op);
  473. }
  474. result->BinaryExpr.op = op;
  475. result->BinaryExpr.left = left;
  476. result->BinaryExpr.right = right;
  477. return result;
  478. }
  479. Ast *ast_paren_expr(AstFile *f, Ast *expr, Token open, Token close) {
  480. Ast *result = alloc_ast_node(f, Ast_ParenExpr);
  481. result->ParenExpr.expr = expr;
  482. result->ParenExpr.open = open;
  483. result->ParenExpr.close = close;
  484. return result;
  485. }
  486. Ast *ast_call_expr(AstFile *f, Ast *proc, Array<Ast *> const &args, Token open, Token close, Token ellipsis) {
  487. Ast *result = alloc_ast_node(f, Ast_CallExpr);
  488. result->CallExpr.proc = proc;
  489. result->CallExpr.args = slice_from_array(args);
  490. result->CallExpr.open = open;
  491. result->CallExpr.close = close;
  492. result->CallExpr.ellipsis = ellipsis;
  493. return result;
  494. }
  495. Ast *ast_selector_expr(AstFile *f, Token token, Ast *expr, Ast *selector) {
  496. Ast *result = alloc_ast_node(f, Ast_SelectorExpr);
  497. result->SelectorExpr.token = token;
  498. result->SelectorExpr.expr = expr;
  499. result->SelectorExpr.selector = selector;
  500. return result;
  501. }
  502. Ast *ast_implicit_selector_expr(AstFile *f, Token token, Ast *selector) {
  503. Ast *result = alloc_ast_node(f, Ast_ImplicitSelectorExpr);
  504. result->ImplicitSelectorExpr.token = token;
  505. result->ImplicitSelectorExpr.selector = selector;
  506. return result;
  507. }
  508. Ast *ast_selector_call_expr(AstFile *f, Token token, Ast *expr, Ast *call) {
  509. Ast *result = alloc_ast_node(f, Ast_SelectorCallExpr);
  510. result->SelectorCallExpr.token = token;
  511. result->SelectorCallExpr.expr = expr;
  512. result->SelectorCallExpr.call = call;
  513. return result;
  514. }
  515. Ast *ast_index_expr(AstFile *f, Ast *expr, Ast *index, Token open, Token close) {
  516. Ast *result = alloc_ast_node(f, Ast_IndexExpr);
  517. result->IndexExpr.expr = expr;
  518. result->IndexExpr.index = index;
  519. result->IndexExpr.open = open;
  520. result->IndexExpr.close = close;
  521. return result;
  522. }
  523. Ast *ast_slice_expr(AstFile *f, Ast *expr, Token open, Token close, Token interval, Ast *low, Ast *high) {
  524. Ast *result = alloc_ast_node(f, Ast_SliceExpr);
  525. result->SliceExpr.expr = expr;
  526. result->SliceExpr.open = open;
  527. result->SliceExpr.close = close;
  528. result->SliceExpr.interval = interval;
  529. result->SliceExpr.low = low;
  530. result->SliceExpr.high = high;
  531. return result;
  532. }
  533. Ast *ast_deref_expr(AstFile *f, Ast *expr, Token op) {
  534. Ast *result = alloc_ast_node(f, Ast_DerefExpr);
  535. result->DerefExpr.expr = expr;
  536. result->DerefExpr.op = op;
  537. return result;
  538. }
  539. Ast *ast_matrix_index_expr(AstFile *f, Ast *expr, Token open, Token close, Token interval, Ast *row, Ast *column) {
  540. Ast *result = alloc_ast_node(f, Ast_MatrixIndexExpr);
  541. result->MatrixIndexExpr.expr = expr;
  542. result->MatrixIndexExpr.row_index = row;
  543. result->MatrixIndexExpr.column_index = column;
  544. result->MatrixIndexExpr.open = open;
  545. result->MatrixIndexExpr.close = close;
  546. return result;
  547. }
  548. Ast *ast_ident(AstFile *f, Token token) {
  549. Ast *result = alloc_ast_node(f, Ast_Ident);
  550. result->Ident.token = token;
  551. return result;
  552. }
  553. Ast *ast_implicit(AstFile *f, Token token) {
  554. Ast *result = alloc_ast_node(f, Ast_Implicit);
  555. result->Implicit = token;
  556. return result;
  557. }
  558. Ast *ast_undef(AstFile *f, Token token) {
  559. Ast *result = alloc_ast_node(f, Ast_Undef);
  560. result->Undef = token;
  561. return result;
  562. }
  563. ExactValue exact_value_from_token(AstFile *f, Token const &token) {
  564. String s = token.string;
  565. switch (token.kind) {
  566. case Token_Rune:
  567. if (!unquote_string(ast_allocator(f), &s, 0)) {
  568. syntax_error(token, "Invalid rune literal");
  569. }
  570. break;
  571. case Token_String:
  572. if (!unquote_string(ast_allocator(f), &s, 0, s.text[0] == '`')) {
  573. syntax_error(token, "Invalid string literal");
  574. }
  575. break;
  576. }
  577. return exact_value_from_basic_literal(token.kind, s);
  578. }
  579. String string_value_from_token(AstFile *f, Token const &token) {
  580. ExactValue value = exact_value_from_token(f, token);
  581. String str = {};
  582. if (value.kind == ExactValue_String) {
  583. str = value.value_string;
  584. }
  585. return str;
  586. }
  587. Ast *ast_basic_lit(AstFile *f, Token basic_lit) {
  588. Ast *result = alloc_ast_node(f, Ast_BasicLit);
  589. result->BasicLit.token = basic_lit;
  590. result->tav.mode = Addressing_Constant;
  591. result->tav.value = exact_value_from_token(f, basic_lit);
  592. return result;
  593. }
  594. Ast *ast_basic_directive(AstFile *f, Token token, Token name) {
  595. Ast *result = alloc_ast_node(f, Ast_BasicDirective);
  596. result->BasicDirective.token = token;
  597. result->BasicDirective.name = name;
  598. return result;
  599. }
  600. Ast *ast_ellipsis(AstFile *f, Token token, Ast *expr) {
  601. Ast *result = alloc_ast_node(f, Ast_Ellipsis);
  602. result->Ellipsis.token = token;
  603. result->Ellipsis.expr = expr;
  604. return result;
  605. }
  606. Ast *ast_proc_group(AstFile *f, Token token, Token open, Token close, Array<Ast *> const &args) {
  607. Ast *result = alloc_ast_node(f, Ast_ProcGroup);
  608. result->ProcGroup.token = token;
  609. result->ProcGroup.open = open;
  610. result->ProcGroup.close = close;
  611. result->ProcGroup.args = slice_from_array(args);
  612. return result;
  613. }
  614. Ast *ast_proc_lit(AstFile *f, Ast *type, Ast *body, u64 tags, Token where_token, Array<Ast *> const &where_clauses) {
  615. Ast *result = alloc_ast_node(f, Ast_ProcLit);
  616. result->ProcLit.type = type;
  617. result->ProcLit.body = body;
  618. result->ProcLit.tags = tags;
  619. result->ProcLit.where_token = where_token;
  620. result->ProcLit.where_clauses = slice_from_array(where_clauses);
  621. return result;
  622. }
  623. Ast *ast_field_value(AstFile *f, Ast *field, Ast *value, Token eq) {
  624. Ast *result = alloc_ast_node(f, Ast_FieldValue);
  625. result->FieldValue.field = field;
  626. result->FieldValue.value = value;
  627. result->FieldValue.eq = eq;
  628. return result;
  629. }
  630. Ast *ast_enum_field_value(AstFile *f, Ast *name, Ast *value, CommentGroup *docs, CommentGroup *comment) {
  631. Ast *result = alloc_ast_node(f, Ast_EnumFieldValue);
  632. result->EnumFieldValue.name = name;
  633. result->EnumFieldValue.value = value;
  634. result->EnumFieldValue.docs = docs;
  635. result->EnumFieldValue.comment = comment;
  636. return result;
  637. }
  638. Ast *ast_compound_lit(AstFile *f, Ast *type, Array<Ast *> const &elems, Token open, Token close) {
  639. Ast *result = alloc_ast_node(f, Ast_CompoundLit);
  640. result->CompoundLit.type = type;
  641. result->CompoundLit.elems = slice_from_array(elems);
  642. result->CompoundLit.open = open;
  643. result->CompoundLit.close = close;
  644. return result;
  645. }
  646. Ast *ast_ternary_if_expr(AstFile *f, Ast *x, Ast *cond, Ast *y) {
  647. Ast *result = alloc_ast_node(f, Ast_TernaryIfExpr);
  648. result->TernaryIfExpr.x = x;
  649. result->TernaryIfExpr.cond = cond;
  650. result->TernaryIfExpr.y = y;
  651. return result;
  652. }
  653. Ast *ast_ternary_when_expr(AstFile *f, Ast *x, Ast *cond, Ast *y) {
  654. Ast *result = alloc_ast_node(f, Ast_TernaryWhenExpr);
  655. result->TernaryWhenExpr.x = x;
  656. result->TernaryWhenExpr.cond = cond;
  657. result->TernaryWhenExpr.y = y;
  658. return result;
  659. }
  660. Ast *ast_or_else_expr(AstFile *f, Ast *x, Token const &token, Ast *y) {
  661. Ast *result = alloc_ast_node(f, Ast_OrElseExpr);
  662. result->OrElseExpr.x = x;
  663. result->OrElseExpr.token = token;
  664. result->OrElseExpr.y = y;
  665. return result;
  666. }
  667. Ast *ast_or_return_expr(AstFile *f, Ast *expr, Token const &token) {
  668. Ast *result = alloc_ast_node(f, Ast_OrReturnExpr);
  669. result->OrReturnExpr.expr = expr;
  670. result->OrReturnExpr.token = token;
  671. return result;
  672. }
  673. Ast *ast_type_assertion(AstFile *f, Ast *expr, Token dot, Ast *type) {
  674. Ast *result = alloc_ast_node(f, Ast_TypeAssertion);
  675. result->TypeAssertion.expr = expr;
  676. result->TypeAssertion.dot = dot;
  677. result->TypeAssertion.type = type;
  678. return result;
  679. }
  680. Ast *ast_type_cast(AstFile *f, Token token, Ast *type, Ast *expr) {
  681. Ast *result = alloc_ast_node(f, Ast_TypeCast);
  682. result->TypeCast.token = token;
  683. result->TypeCast.type = type;
  684. result->TypeCast.expr = expr;
  685. return result;
  686. }
  687. Ast *ast_auto_cast(AstFile *f, Token token, Ast *expr) {
  688. Ast *result = alloc_ast_node(f, Ast_AutoCast);
  689. result->AutoCast.token = token;
  690. result->AutoCast.expr = expr;
  691. return result;
  692. }
  693. Ast *ast_inline_asm_expr(AstFile *f, Token token, Token open, Token close,
  694. Array<Ast *> const &param_types,
  695. Ast *return_type,
  696. Ast *asm_string,
  697. Ast *constraints_string,
  698. bool has_side_effects,
  699. bool is_align_stack,
  700. InlineAsmDialectKind dialect) {
  701. Ast *result = alloc_ast_node(f, Ast_InlineAsmExpr);
  702. result->InlineAsmExpr.token = token;
  703. result->InlineAsmExpr.open = open;
  704. result->InlineAsmExpr.close = close;
  705. result->InlineAsmExpr.param_types = slice_from_array(param_types);
  706. result->InlineAsmExpr.return_type = return_type;
  707. result->InlineAsmExpr.asm_string = asm_string;
  708. result->InlineAsmExpr.constraints_string = constraints_string;
  709. result->InlineAsmExpr.has_side_effects = has_side_effects;
  710. result->InlineAsmExpr.is_align_stack = is_align_stack;
  711. result->InlineAsmExpr.dialect = dialect;
  712. return result;
  713. }
  714. Ast *ast_bad_stmt(AstFile *f, Token begin, Token end) {
  715. Ast *result = alloc_ast_node(f, Ast_BadStmt);
  716. result->BadStmt.begin = begin;
  717. result->BadStmt.end = end;
  718. return result;
  719. }
  720. Ast *ast_empty_stmt(AstFile *f, Token token) {
  721. Ast *result = alloc_ast_node(f, Ast_EmptyStmt);
  722. result->EmptyStmt.token = token;
  723. return result;
  724. }
  725. Ast *ast_expr_stmt(AstFile *f, Ast *expr) {
  726. Ast *result = alloc_ast_node(f, Ast_ExprStmt);
  727. result->ExprStmt.expr = expr;
  728. return result;
  729. }
  730. Ast *ast_assign_stmt(AstFile *f, Token op, Array<Ast *> const &lhs, Array<Ast *> const &rhs) {
  731. Ast *result = alloc_ast_node(f, Ast_AssignStmt);
  732. result->AssignStmt.op = op;
  733. result->AssignStmt.lhs = slice_from_array(lhs);
  734. result->AssignStmt.rhs = slice_from_array(rhs);
  735. return result;
  736. }
  737. Ast *ast_block_stmt(AstFile *f, Array<Ast *> const &stmts, Token open, Token close) {
  738. Ast *result = alloc_ast_node(f, Ast_BlockStmt);
  739. result->BlockStmt.stmts = slice_from_array(stmts);
  740. result->BlockStmt.open = open;
  741. result->BlockStmt.close = close;
  742. return result;
  743. }
  744. Ast *ast_if_stmt(AstFile *f, Token token, Ast *init, Ast *cond, Ast *body, Ast *else_stmt) {
  745. Ast *result = alloc_ast_node(f, Ast_IfStmt);
  746. result->IfStmt.token = token;
  747. result->IfStmt.init = init;
  748. result->IfStmt.cond = cond;
  749. result->IfStmt.body = body;
  750. result->IfStmt.else_stmt = else_stmt;
  751. return result;
  752. }
  753. Ast *ast_when_stmt(AstFile *f, Token token, Ast *cond, Ast *body, Ast *else_stmt) {
  754. Ast *result = alloc_ast_node(f, Ast_WhenStmt);
  755. result->WhenStmt.token = token;
  756. result->WhenStmt.cond = cond;
  757. result->WhenStmt.body = body;
  758. result->WhenStmt.else_stmt = else_stmt;
  759. return result;
  760. }
  761. Ast *ast_return_stmt(AstFile *f, Token token, Array<Ast *> const &results) {
  762. Ast *result = alloc_ast_node(f, Ast_ReturnStmt);
  763. result->ReturnStmt.token = token;
  764. result->ReturnStmt.results = slice_from_array(results);
  765. return result;
  766. }
  767. Ast *ast_for_stmt(AstFile *f, Token token, Ast *init, Ast *cond, Ast *post, Ast *body) {
  768. Ast *result = alloc_ast_node(f, Ast_ForStmt);
  769. result->ForStmt.token = token;
  770. result->ForStmt.init = init;
  771. result->ForStmt.cond = cond;
  772. result->ForStmt.post = post;
  773. result->ForStmt.body = body;
  774. return result;
  775. }
  776. Ast *ast_range_stmt(AstFile *f, Token token, Slice<Ast *> vals, Token in_token, Ast *expr, Ast *body) {
  777. Ast *result = alloc_ast_node(f, Ast_RangeStmt);
  778. result->RangeStmt.token = token;
  779. result->RangeStmt.vals = vals;
  780. result->RangeStmt.in_token = in_token;
  781. result->RangeStmt.expr = expr;
  782. result->RangeStmt.body = body;
  783. return result;
  784. }
  785. Ast *ast_unroll_range_stmt(AstFile *f, Token unroll_token, Token for_token, Ast *val0, Ast *val1, Token in_token, Ast *expr, Ast *body) {
  786. Ast *result = alloc_ast_node(f, Ast_UnrollRangeStmt);
  787. result->UnrollRangeStmt.unroll_token = unroll_token;
  788. result->UnrollRangeStmt.for_token = for_token;
  789. result->UnrollRangeStmt.val0 = val0;
  790. result->UnrollRangeStmt.val1 = val1;
  791. result->UnrollRangeStmt.in_token = in_token;
  792. result->UnrollRangeStmt.expr = expr;
  793. result->UnrollRangeStmt.body = body;
  794. return result;
  795. }
  796. Ast *ast_switch_stmt(AstFile *f, Token token, Ast *init, Ast *tag, Ast *body) {
  797. Ast *result = alloc_ast_node(f, Ast_SwitchStmt);
  798. result->SwitchStmt.token = token;
  799. result->SwitchStmt.init = init;
  800. result->SwitchStmt.tag = tag;
  801. result->SwitchStmt.body = body;
  802. result->SwitchStmt.partial = false;
  803. return result;
  804. }
  805. Ast *ast_type_switch_stmt(AstFile *f, Token token, Ast *tag, Ast *body) {
  806. Ast *result = alloc_ast_node(f, Ast_TypeSwitchStmt);
  807. result->TypeSwitchStmt.token = token;
  808. result->TypeSwitchStmt.tag = tag;
  809. result->TypeSwitchStmt.body = body;
  810. result->TypeSwitchStmt.partial = false;
  811. return result;
  812. }
  813. Ast *ast_case_clause(AstFile *f, Token token, Array<Ast *> const &list, Array<Ast *> const &stmts) {
  814. Ast *result = alloc_ast_node(f, Ast_CaseClause);
  815. result->CaseClause.token = token;
  816. result->CaseClause.list = slice_from_array(list);
  817. result->CaseClause.stmts = slice_from_array(stmts);
  818. return result;
  819. }
  820. Ast *ast_defer_stmt(AstFile *f, Token token, Ast *stmt) {
  821. Ast *result = alloc_ast_node(f, Ast_DeferStmt);
  822. result->DeferStmt.token = token;
  823. result->DeferStmt.stmt = stmt;
  824. return result;
  825. }
  826. Ast *ast_branch_stmt(AstFile *f, Token token, Ast *label) {
  827. Ast *result = alloc_ast_node(f, Ast_BranchStmt);
  828. result->BranchStmt.token = token;
  829. result->BranchStmt.label = label;
  830. return result;
  831. }
  832. Ast *ast_using_stmt(AstFile *f, Token token, Array<Ast *> const &list) {
  833. Ast *result = alloc_ast_node(f, Ast_UsingStmt);
  834. result->UsingStmt.token = token;
  835. result->UsingStmt.list = slice_from_array(list);
  836. return result;
  837. }
  838. Ast *ast_bad_decl(AstFile *f, Token begin, Token end) {
  839. Ast *result = alloc_ast_node(f, Ast_BadDecl);
  840. result->BadDecl.begin = begin;
  841. result->BadDecl.end = end;
  842. return result;
  843. }
  844. Ast *ast_field(AstFile *f, Array<Ast *> const &names, Ast *type, Ast *default_value, u32 flags, Token tag,
  845. CommentGroup *docs, CommentGroup *comment) {
  846. Ast *result = alloc_ast_node(f, Ast_Field);
  847. result->Field.names = slice_from_array(names);
  848. result->Field.type = type;
  849. result->Field.default_value = default_value;
  850. result->Field.flags = flags;
  851. result->Field.tag = tag;
  852. result->Field.docs = docs;
  853. result->Field.comment = comment;
  854. return result;
  855. }
  856. Ast *ast_field_list(AstFile *f, Token token, Array<Ast *> const &list) {
  857. Ast *result = alloc_ast_node(f, Ast_FieldList);
  858. result->FieldList.token = token;
  859. result->FieldList.list = slice_from_array(list);
  860. return result;
  861. }
  862. Ast *ast_typeid_type(AstFile *f, Token token, Ast *specialization) {
  863. Ast *result = alloc_ast_node(f, Ast_TypeidType);
  864. result->TypeidType.token = token;
  865. result->TypeidType.specialization = specialization;
  866. return result;
  867. }
  868. Ast *ast_helper_type(AstFile *f, Token token, Ast *type) {
  869. Ast *result = alloc_ast_node(f, Ast_HelperType);
  870. result->HelperType.token = token;
  871. result->HelperType.type = type;
  872. return result;
  873. }
  874. Ast *ast_distinct_type(AstFile *f, Token token, Ast *type) {
  875. Ast *result = alloc_ast_node(f, Ast_DistinctType);
  876. result->DistinctType.token = token;
  877. result->DistinctType.type = type;
  878. return result;
  879. }
  880. Ast *ast_poly_type(AstFile *f, Token token, Ast *type, Ast *specialization) {
  881. Ast *result = alloc_ast_node(f, Ast_PolyType);
  882. result->PolyType.token = token;
  883. result->PolyType.type = type;
  884. result->PolyType.specialization = specialization;
  885. return result;
  886. }
  887. Ast *ast_proc_type(AstFile *f, Token token, Ast *params, Ast *results, u64 tags, ProcCallingConvention calling_convention, bool generic, bool diverging) {
  888. Ast *result = alloc_ast_node(f, Ast_ProcType);
  889. result->ProcType.token = token;
  890. result->ProcType.params = params;
  891. result->ProcType.results = results;
  892. result->ProcType.tags = tags;
  893. result->ProcType.calling_convention = calling_convention;
  894. result->ProcType.generic = generic;
  895. result->ProcType.diverging = diverging;
  896. return result;
  897. }
  898. Ast *ast_relative_type(AstFile *f, Ast *tag, Ast *type) {
  899. Ast *result = alloc_ast_node(f, Ast_RelativeType);
  900. result->RelativeType.tag = tag;
  901. result->RelativeType.type = type;
  902. return result;
  903. }
  904. Ast *ast_pointer_type(AstFile *f, Token token, Ast *type) {
  905. Ast *result = alloc_ast_node(f, Ast_PointerType);
  906. result->PointerType.token = token;
  907. result->PointerType.type = type;
  908. return result;
  909. }
  910. Ast *ast_multi_pointer_type(AstFile *f, Token token, Ast *type) {
  911. Ast *result = alloc_ast_node(f, Ast_MultiPointerType);
  912. result->MultiPointerType.token = token;
  913. result->MultiPointerType.type = type;
  914. return result;
  915. }
  916. Ast *ast_array_type(AstFile *f, Token token, Ast *count, Ast *elem) {
  917. Ast *result = alloc_ast_node(f, Ast_ArrayType);
  918. result->ArrayType.token = token;
  919. result->ArrayType.count = count;
  920. result->ArrayType.elem = elem;
  921. return result;
  922. }
  923. Ast *ast_dynamic_array_type(AstFile *f, Token token, Ast *elem) {
  924. Ast *result = alloc_ast_node(f, Ast_DynamicArrayType);
  925. result->DynamicArrayType.token = token;
  926. result->DynamicArrayType.elem = elem;
  927. return result;
  928. }
  929. Ast *ast_struct_type(AstFile *f, Token token, Slice<Ast *> fields, isize field_count,
  930. Ast *polymorphic_params, bool is_packed, bool is_raw_union,
  931. Ast *align,
  932. Token where_token, Array<Ast *> const &where_clauses) {
  933. Ast *result = alloc_ast_node(f, Ast_StructType);
  934. result->StructType.token = token;
  935. result->StructType.fields = fields;
  936. result->StructType.field_count = field_count;
  937. result->StructType.polymorphic_params = polymorphic_params;
  938. result->StructType.is_packed = is_packed;
  939. result->StructType.is_raw_union = is_raw_union;
  940. result->StructType.align = align;
  941. result->StructType.where_token = where_token;
  942. result->StructType.where_clauses = slice_from_array(where_clauses);
  943. return result;
  944. }
  945. Ast *ast_union_type(AstFile *f, Token token, Array<Ast *> const &variants, Ast *polymorphic_params, Ast *align, UnionTypeKind kind,
  946. Token where_token, Array<Ast *> const &where_clauses) {
  947. Ast *result = alloc_ast_node(f, Ast_UnionType);
  948. result->UnionType.token = token;
  949. result->UnionType.variants = slice_from_array(variants);
  950. result->UnionType.polymorphic_params = polymorphic_params;
  951. result->UnionType.align = align;
  952. result->UnionType.kind = kind;
  953. result->UnionType.where_token = where_token;
  954. result->UnionType.where_clauses = slice_from_array(where_clauses);
  955. return result;
  956. }
  957. Ast *ast_enum_type(AstFile *f, Token token, Ast *base_type, Array<Ast *> const &fields) {
  958. Ast *result = alloc_ast_node(f, Ast_EnumType);
  959. result->EnumType.token = token;
  960. result->EnumType.base_type = base_type;
  961. result->EnumType.fields = slice_from_array(fields);
  962. return result;
  963. }
  964. Ast *ast_bit_set_type(AstFile *f, Token token, Ast *elem, Ast *underlying) {
  965. Ast *result = alloc_ast_node(f, Ast_BitSetType);
  966. result->BitSetType.token = token;
  967. result->BitSetType.elem = elem;
  968. result->BitSetType.underlying = underlying;
  969. return result;
  970. }
  971. Ast *ast_map_type(AstFile *f, Token token, Ast *key, Ast *value) {
  972. Ast *result = alloc_ast_node(f, Ast_MapType);
  973. result->MapType.token = token;
  974. result->MapType.key = key;
  975. result->MapType.value = value;
  976. return result;
  977. }
  978. Ast *ast_matrix_type(AstFile *f, Token token, Ast *row_count, Ast *column_count, Ast *elem) {
  979. Ast *result = alloc_ast_node(f, Ast_MatrixType);
  980. result->MatrixType.token = token;
  981. result->MatrixType.row_count = row_count;
  982. result->MatrixType.column_count = column_count;
  983. result->MatrixType.elem = elem;
  984. return result;
  985. }
  986. Ast *ast_foreign_block_decl(AstFile *f, Token token, Ast *foreign_library, Ast *body,
  987. CommentGroup *docs) {
  988. Ast *result = alloc_ast_node(f, Ast_ForeignBlockDecl);
  989. result->ForeignBlockDecl.token = token;
  990. result->ForeignBlockDecl.foreign_library = foreign_library;
  991. result->ForeignBlockDecl.body = body;
  992. result->ForeignBlockDecl.docs = docs;
  993. result->ForeignBlockDecl.attributes.allocator = heap_allocator();
  994. return result;
  995. }
  996. Ast *ast_label_decl(AstFile *f, Token token, Ast *name) {
  997. Ast *result = alloc_ast_node(f, Ast_Label);
  998. result->Label.token = token;
  999. result->Label.name = name;
  1000. return result;
  1001. }
  1002. Ast *ast_value_decl(AstFile *f, Array<Ast *> const &names, Ast *type, Array<Ast *> const &values, bool is_mutable,
  1003. CommentGroup *docs, CommentGroup *comment) {
  1004. Ast *result = alloc_ast_node(f, Ast_ValueDecl);
  1005. result->ValueDecl.names = slice_from_array(names);
  1006. result->ValueDecl.type = type;
  1007. result->ValueDecl.values = slice_from_array(values);
  1008. result->ValueDecl.is_mutable = is_mutable;
  1009. result->ValueDecl.docs = docs;
  1010. result->ValueDecl.comment = comment;
  1011. result->ValueDecl.attributes.allocator = heap_allocator();
  1012. return result;
  1013. }
  1014. Ast *ast_package_decl(AstFile *f, Token token, Token name, CommentGroup *docs, CommentGroup *comment) {
  1015. Ast *result = alloc_ast_node(f, Ast_PackageDecl);
  1016. result->PackageDecl.token = token;
  1017. result->PackageDecl.name = name;
  1018. result->PackageDecl.docs = docs;
  1019. result->PackageDecl.comment = comment;
  1020. return result;
  1021. }
  1022. Ast *ast_import_decl(AstFile *f, Token token, bool is_using, Token relpath, Token import_name,
  1023. CommentGroup *docs, CommentGroup *comment) {
  1024. Ast *result = alloc_ast_node(f, Ast_ImportDecl);
  1025. result->ImportDecl.token = token;
  1026. result->ImportDecl.is_using = is_using;
  1027. result->ImportDecl.relpath = relpath;
  1028. result->ImportDecl.import_name = import_name;
  1029. result->ImportDecl.docs = docs;
  1030. result->ImportDecl.comment = comment;
  1031. return result;
  1032. }
  1033. Ast *ast_foreign_import_decl(AstFile *f, Token token, Array<Token> filepaths, Token library_name,
  1034. CommentGroup *docs, CommentGroup *comment) {
  1035. Ast *result = alloc_ast_node(f, Ast_ForeignImportDecl);
  1036. result->ForeignImportDecl.token = token;
  1037. result->ForeignImportDecl.filepaths = slice_from_array(filepaths);
  1038. result->ForeignImportDecl.library_name = library_name;
  1039. result->ForeignImportDecl.docs = docs;
  1040. result->ForeignImportDecl.comment = comment;
  1041. result->ForeignImportDecl.attributes.allocator = heap_allocator();
  1042. return result;
  1043. }
  1044. Ast *ast_attribute(AstFile *f, Token token, Token open, Token close, Array<Ast *> const &elems) {
  1045. Ast *result = alloc_ast_node(f, Ast_Attribute);
  1046. result->Attribute.token = token;
  1047. result->Attribute.open = open;
  1048. result->Attribute.elems = slice_from_array(elems);
  1049. result->Attribute.close = close;
  1050. return result;
  1051. }
  1052. bool next_token0(AstFile *f) {
  1053. if (f->curr_token_index+1 < f->tokens.count) {
  1054. f->curr_token = f->tokens[++f->curr_token_index];
  1055. return true;
  1056. }
  1057. syntax_error(f->curr_token, "Token is EOF");
  1058. return false;
  1059. }
  1060. Token consume_comment(AstFile *f, isize *end_line_) {
  1061. Token tok = f->curr_token;
  1062. GB_ASSERT(tok.kind == Token_Comment);
  1063. isize end_line = tok.pos.line;
  1064. if (tok.string[1] == '*') {
  1065. for (isize i = 2; i < tok.string.len; i++) {
  1066. if (tok.string[i] == '\n') {
  1067. end_line++;
  1068. }
  1069. }
  1070. }
  1071. if (end_line_) *end_line_ = end_line;
  1072. next_token0(f);
  1073. if (f->curr_token.pos.line > tok.pos.line || tok.kind == Token_EOF) {
  1074. end_line++;
  1075. }
  1076. return tok;
  1077. }
  1078. CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_line_) {
  1079. Array<Token> list = {};
  1080. list.allocator = heap_allocator();
  1081. isize end_line = f->curr_token.pos.line;
  1082. if (f->curr_token_index == 1 &&
  1083. f->prev_token.kind == Token_Comment &&
  1084. f->prev_token.pos.line+1 == f->curr_token.pos.line) {
  1085. // NOTE(bill): Special logic for the first comment in the file
  1086. array_add(&list, f->prev_token);
  1087. }
  1088. while (f->curr_token.kind == Token_Comment &&
  1089. f->curr_token.pos.line <= end_line+n) {
  1090. array_add(&list, consume_comment(f, &end_line));
  1091. }
  1092. if (end_line_) *end_line_ = end_line;
  1093. CommentGroup *comments = nullptr;
  1094. if (list.count > 0) {
  1095. comments = gb_alloc_item(permanent_allocator(), CommentGroup);
  1096. comments->list = slice_from_array(list);
  1097. array_add(&f->comments, comments);
  1098. }
  1099. return comments;
  1100. }
  1101. void consume_comment_groups(AstFile *f, Token prev) {
  1102. if (f->curr_token.kind == Token_Comment) {
  1103. CommentGroup *comment = nullptr;
  1104. isize end_line = 0;
  1105. if (f->curr_token.pos.line == prev.pos.line) {
  1106. comment = consume_comment_group(f, 0, &end_line);
  1107. if (f->curr_token.pos.line != end_line || f->curr_token.kind == Token_EOF) {
  1108. f->line_comment = comment;
  1109. }
  1110. }
  1111. end_line = -1;
  1112. while (f->curr_token.kind == Token_Comment) {
  1113. comment = consume_comment_group(f, 1, &end_line);
  1114. }
  1115. if (end_line+1 == f->curr_token.pos.line || end_line < 0) {
  1116. f->lead_comment = comment;
  1117. }
  1118. GB_ASSERT(f->curr_token.kind != Token_Comment);
  1119. }
  1120. }
  1121. bool ignore_newlines(AstFile *f) {
  1122. if (f->allow_newline) {
  1123. return f->expr_level > 0;
  1124. }
  1125. return f->expr_level >= 0;
  1126. }
  1127. Token advance_token(AstFile *f) {
  1128. f->lead_comment = nullptr;
  1129. f->line_comment = nullptr;
  1130. f->prev_token_index = f->curr_token_index;
  1131. Token prev = f->prev_token = f->curr_token;
  1132. bool ok = next_token0(f);
  1133. if (ok) {
  1134. switch (f->curr_token.kind) {
  1135. case Token_Comment:
  1136. consume_comment_groups(f, prev);
  1137. break;
  1138. case Token_Semicolon:
  1139. if (ignore_newlines(f) && f->curr_token.string == "\n") {
  1140. advance_token(f);
  1141. }
  1142. break;
  1143. }
  1144. }
  1145. return prev;
  1146. }
  1147. bool peek_token_kind(AstFile *f, TokenKind kind) {
  1148. for (isize i = f->curr_token_index+1; i < f->tokens.count; i++) {
  1149. Token tok = f->tokens[i];
  1150. if (kind != Token_Comment && tok.kind == Token_Comment) {
  1151. continue;
  1152. }
  1153. return tok.kind == kind;
  1154. }
  1155. return false;
  1156. }
  1157. Token peek_token(AstFile *f) {
  1158. for (isize i = f->curr_token_index+1; i < f->tokens.count; i++) {
  1159. Token tok = f->tokens[i];
  1160. if (tok.kind == Token_Comment) {
  1161. continue;
  1162. }
  1163. return tok;
  1164. }
  1165. return {};
  1166. }
  1167. bool skip_possible_newline(AstFile *f) {
  1168. if (token_is_newline(f->curr_token)) {
  1169. advance_token(f);
  1170. return true;
  1171. }
  1172. return false;
  1173. }
  1174. bool skip_possible_newline_for_literal(AstFile *f) {
  1175. Token curr = f->curr_token;
  1176. if (token_is_newline(curr)) {
  1177. Token next = peek_token(f);
  1178. if (curr.pos.line+1 >= next.pos.line) {
  1179. switch (next.kind) {
  1180. case Token_OpenBrace:
  1181. case Token_else:
  1182. case Token_where:
  1183. advance_token(f);
  1184. return true;
  1185. }
  1186. }
  1187. }
  1188. return false;
  1189. }
  1190. String token_to_string(Token const &tok) {
  1191. String p = token_strings[tok.kind];
  1192. if (token_is_newline(tok)) {
  1193. p = str_lit("newline");
  1194. }
  1195. return p;
  1196. }
  1197. Token expect_token(AstFile *f, TokenKind kind) {
  1198. Token prev = f->curr_token;
  1199. if (prev.kind != kind) {
  1200. String c = token_strings[kind];
  1201. String p = token_to_string(prev);
  1202. syntax_error(f->curr_token, "Expected '%.*s', got '%.*s'", LIT(c), LIT(p));
  1203. if (prev.kind == Token_EOF) {
  1204. gb_exit(1);
  1205. }
  1206. }
  1207. advance_token(f);
  1208. return prev;
  1209. }
  1210. Token expect_token_after(AstFile *f, TokenKind kind, char const *msg) {
  1211. Token prev = f->curr_token;
  1212. if (prev.kind != kind) {
  1213. String p = token_to_string(prev);
  1214. syntax_error(f->curr_token, "Expected '%.*s' after %s, got '%.*s'",
  1215. LIT(token_strings[kind]),
  1216. msg,
  1217. LIT(p));
  1218. }
  1219. advance_token(f);
  1220. return prev;
  1221. }
  1222. bool is_token_range(TokenKind kind) {
  1223. switch (kind) {
  1224. case Token_Ellipsis:
  1225. case Token_RangeFull:
  1226. case Token_RangeHalf:
  1227. return true;
  1228. }
  1229. return false;
  1230. }
  1231. bool is_token_range(Token tok) {
  1232. return is_token_range(tok.kind);
  1233. }
  1234. Token expect_operator(AstFile *f) {
  1235. Token prev = f->curr_token;
  1236. if ((prev.kind == Token_in || prev.kind == Token_not_in) && (f->expr_level >= 0 || f->allow_in_expr)) {
  1237. // okay
  1238. } else if (prev.kind == Token_if || prev.kind == Token_when) {
  1239. // okay
  1240. } else if (prev.kind == Token_or_else || prev.kind == Token_or_return) {
  1241. // okay
  1242. } else if (!gb_is_between(prev.kind, Token__OperatorBegin+1, Token__OperatorEnd-1)) {
  1243. String p = token_to_string(prev);
  1244. syntax_error(f->curr_token, "Expected an operator, got '%.*s'",
  1245. LIT(p));
  1246. } else if (!f->allow_range && is_token_range(prev)) {
  1247. String p = token_to_string(prev);
  1248. syntax_error(f->curr_token, "Expected an non-range operator, got '%.*s'",
  1249. LIT(p));
  1250. }
  1251. if (f->curr_token.kind == Token_Ellipsis) {
  1252. f->tokens[f->curr_token_index].flags |= TokenFlag_Replace;
  1253. }
  1254. advance_token(f);
  1255. return prev;
  1256. }
  1257. Token expect_keyword(AstFile *f) {
  1258. Token prev = f->curr_token;
  1259. if (!gb_is_between(prev.kind, Token__KeywordBegin+1, Token__KeywordEnd-1)) {
  1260. String p = token_to_string(prev);
  1261. syntax_error(f->curr_token, "Expected a keyword, got '%.*s'",
  1262. LIT(p));
  1263. }
  1264. advance_token(f);
  1265. return prev;
  1266. }
  1267. bool allow_token(AstFile *f, TokenKind kind) {
  1268. Token prev = f->curr_token;
  1269. if (prev.kind == kind) {
  1270. advance_token(f);
  1271. return true;
  1272. }
  1273. return false;
  1274. }
  1275. Token expect_closing_brace_of_field_list(AstFile *f) {
  1276. Token token = f->curr_token;
  1277. if (allow_token(f, Token_CloseBrace)) {
  1278. return token;
  1279. }
  1280. if (allow_token(f, Token_Semicolon)) {
  1281. String p = token_to_string(token);
  1282. syntax_error(token_end_of_line(f, f->prev_token), "Expected a comma, got a %.*s", LIT(p));
  1283. }
  1284. return expect_token(f, Token_CloseBrace);
  1285. }
  1286. bool is_blank_ident(String str) {
  1287. if (str.len == 1) {
  1288. return str[0] == '_';
  1289. }
  1290. return false;
  1291. }
  1292. bool is_blank_ident(Token token) {
  1293. if (token.kind == Token_Ident) {
  1294. return is_blank_ident(token.string);
  1295. }
  1296. return false;
  1297. }
  1298. bool is_blank_ident(Ast *node) {
  1299. if (node->kind == Ast_Ident) {
  1300. ast_node(i, Ident, node);
  1301. return is_blank_ident(i->token.string);
  1302. }
  1303. return false;
  1304. }
  1305. // NOTE(bill): Go to next statement to prevent numerous error messages popping up
  1306. void fix_advance_to_next_stmt(AstFile *f) {
  1307. for (;;) {
  1308. Token t = f->curr_token;
  1309. switch (t.kind) {
  1310. case Token_EOF:
  1311. case Token_Semicolon:
  1312. return;
  1313. case Token_package:
  1314. case Token_foreign:
  1315. case Token_import:
  1316. case Token_if:
  1317. case Token_for:
  1318. case Token_when:
  1319. case Token_return:
  1320. case Token_switch:
  1321. case Token_defer:
  1322. case Token_using:
  1323. case Token_break:
  1324. case Token_continue:
  1325. case Token_fallthrough:
  1326. case Token_Hash:
  1327. {
  1328. if (t.pos == f->fix_prev_pos &&
  1329. f->fix_count < PARSER_MAX_FIX_COUNT) {
  1330. f->fix_count++;
  1331. return;
  1332. }
  1333. if (f->fix_prev_pos < t.pos) {
  1334. f->fix_prev_pos = t.pos;
  1335. f->fix_count = 0; // NOTE(bill): Reset
  1336. return;
  1337. }
  1338. // NOTE(bill): Reaching here means there is a parsing bug
  1339. } break;
  1340. }
  1341. advance_token(f);
  1342. }
  1343. }
  1344. Token expect_closing(AstFile *f, TokenKind kind, String context) {
  1345. if (f->curr_token.kind != kind &&
  1346. f->curr_token.kind == Token_Semicolon &&
  1347. (f->curr_token.string == "\n" || f->curr_token.kind == Token_EOF)) {
  1348. Token tok = f->prev_token;
  1349. tok.pos.column += cast(i32)tok.string.len;
  1350. syntax_error(tok, "Missing ',' before newline in %.*s", LIT(context));
  1351. advance_token(f);
  1352. }
  1353. return expect_token(f, kind);
  1354. }
  1355. void assign_removal_flag_to_semicolon(AstFile *f) {
  1356. // NOTE(bill): this is used for rewriting files to strip unneeded semicolons
  1357. Token *prev_token = &f->tokens[f->prev_token_index];
  1358. Token *curr_token = &f->tokens[f->curr_token_index];
  1359. GB_ASSERT(prev_token->kind == Token_Semicolon);
  1360. if (prev_token->string == ";") {
  1361. bool ok = false;
  1362. if (curr_token->pos.line > prev_token->pos.line) {
  1363. ok = true;
  1364. } else if (curr_token->pos.line == prev_token->pos.line) {
  1365. switch (curr_token->kind) {
  1366. case Token_CloseBrace:
  1367. case Token_CloseParen:
  1368. case Token_EOF:
  1369. ok = true;
  1370. break;
  1371. }
  1372. }
  1373. if (ok) {
  1374. if (build_context.strict_style) {
  1375. syntax_error(*prev_token, "Found unneeded semicolon");
  1376. } else if (build_context.strict_style_init_only && f->pkg->kind == Package_Init) {
  1377. syntax_error(*prev_token, "Found unneeded semicolon");
  1378. }
  1379. prev_token->flags |= TokenFlag_Remove;
  1380. }
  1381. }
  1382. }
  1383. void expect_semicolon(AstFile *f) {
  1384. Token prev_token = {};
  1385. if (allow_token(f, Token_Semicolon)) {
  1386. assign_removal_flag_to_semicolon(f);
  1387. return;
  1388. }
  1389. switch (f->curr_token.kind) {
  1390. case Token_CloseBrace:
  1391. case Token_CloseParen:
  1392. if (f->curr_token.pos.line == f->prev_token.pos.line) {
  1393. return;
  1394. }
  1395. break;
  1396. }
  1397. prev_token = f->prev_token;
  1398. if (prev_token.kind == Token_Semicolon) {
  1399. assign_removal_flag_to_semicolon(f);
  1400. return;
  1401. }
  1402. if (f->curr_token.kind == Token_EOF) {
  1403. return;
  1404. }
  1405. switch (f->curr_token.kind) {
  1406. case Token_EOF:
  1407. return;
  1408. }
  1409. if (f->curr_token.pos.line == f->prev_token.pos.line) {
  1410. String p = token_to_string(f->curr_token);
  1411. prev_token.pos = token_pos_end(prev_token);
  1412. syntax_error(prev_token, "Expected ';', got %.*s", LIT(p));
  1413. fix_advance_to_next_stmt(f);
  1414. }
  1415. }
  1416. Ast * parse_expr(AstFile *f, bool lhs);
  1417. Ast * parse_proc_type(AstFile *f, Token proc_token);
  1418. Array<Ast *> parse_stmt_list(AstFile *f);
  1419. Ast * parse_stmt(AstFile *f);
  1420. Ast * parse_body(AstFile *f);
  1421. Ast * parse_block_stmt(AstFile *f, b32 is_when);
  1422. Ast *parse_ident(AstFile *f, bool allow_poly_names=false) {
  1423. Token token = f->curr_token;
  1424. if (token.kind == Token_Ident) {
  1425. advance_token(f);
  1426. } else if (allow_poly_names && token.kind == Token_Dollar) {
  1427. Token dollar = expect_token(f, Token_Dollar);
  1428. Ast *name = ast_ident(f, expect_token(f, Token_Ident));
  1429. if (is_blank_ident(name)) {
  1430. syntax_error(name, "Invalid polymorphic type definition with a blank identifier");
  1431. }
  1432. return ast_poly_type(f, dollar, name, nullptr);
  1433. } else {
  1434. token.string = str_lit("_");
  1435. expect_token(f, Token_Ident);
  1436. }
  1437. return ast_ident(f, token);
  1438. }
  1439. Ast *parse_tag_expr(AstFile *f, Ast *expression) {
  1440. Token token = expect_token(f, Token_Hash);
  1441. Token name = expect_token(f, Token_Ident);
  1442. return ast_tag_expr(f, token, name, expression);
  1443. }
  1444. Ast *unparen_expr(Ast *node) {
  1445. for (;;) {
  1446. if (node == nullptr) {
  1447. return nullptr;
  1448. }
  1449. if (node->kind != Ast_ParenExpr) {
  1450. return node;
  1451. }
  1452. node = node->ParenExpr.expr;
  1453. }
  1454. }
  1455. Ast *unselector_expr(Ast *node) {
  1456. node = unparen_expr(node);
  1457. if (node == nullptr) {
  1458. return nullptr;
  1459. }
  1460. while (node->kind == Ast_SelectorExpr) {
  1461. node = node->SelectorExpr.selector;
  1462. }
  1463. return node;
  1464. }
  1465. Ast *strip_or_return_expr(Ast *node) {
  1466. for (;;) {
  1467. if (node == nullptr) {
  1468. return node;
  1469. }
  1470. if (node->kind == Ast_OrReturnExpr) {
  1471. node = node->OrReturnExpr.expr;
  1472. } else if (node->kind == Ast_ParenExpr) {
  1473. node = node->ParenExpr.expr;
  1474. } else {
  1475. return node;
  1476. }
  1477. }
  1478. }
  1479. Ast *parse_value(AstFile *f);
  1480. Array<Ast *> parse_element_list(AstFile *f) {
  1481. auto elems = array_make<Ast *>(heap_allocator());
  1482. while (f->curr_token.kind != Token_CloseBrace &&
  1483. f->curr_token.kind != Token_EOF) {
  1484. Ast *elem = parse_value(f);
  1485. if (f->curr_token.kind == Token_Eq) {
  1486. Token eq = expect_token(f, Token_Eq);
  1487. Ast *value = parse_value(f);
  1488. elem = ast_field_value(f, elem, value, eq);
  1489. }
  1490. array_add(&elems, elem);
  1491. if (!allow_token(f, Token_Comma)) {
  1492. break;
  1493. }
  1494. }
  1495. return elems;
  1496. }
  1497. CommentGroup *consume_line_comment(AstFile *f) {
  1498. CommentGroup *comment = f->line_comment;
  1499. if (f->line_comment == f->lead_comment) {
  1500. f->lead_comment = nullptr;
  1501. }
  1502. f->line_comment = nullptr;
  1503. return comment;
  1504. }
  1505. Array<Ast *> parse_enum_field_list(AstFile *f) {
  1506. auto elems = array_make<Ast *>(heap_allocator());
  1507. while (f->curr_token.kind != Token_CloseBrace &&
  1508. f->curr_token.kind != Token_EOF) {
  1509. CommentGroup *docs = f->lead_comment;
  1510. CommentGroup *comment = nullptr;
  1511. Ast *name = parse_value(f);
  1512. Ast *value = nullptr;
  1513. if (f->curr_token.kind == Token_Eq) {
  1514. Token eq = expect_token(f, Token_Eq);
  1515. value = parse_value(f);
  1516. }
  1517. comment = consume_line_comment(f);
  1518. Ast *elem = ast_enum_field_value(f, name, value, docs, comment);
  1519. array_add(&elems, elem);
  1520. if (!allow_token(f, Token_Comma)) {
  1521. break;
  1522. }
  1523. if (!elem->EnumFieldValue.comment) {
  1524. elem->EnumFieldValue.comment = consume_line_comment(f);
  1525. }
  1526. }
  1527. return elems;
  1528. }
  1529. Ast *parse_literal_value(AstFile *f, Ast *type) {
  1530. Array<Ast *> elems = {};
  1531. Token open = expect_token(f, Token_OpenBrace);
  1532. isize expr_level = f->expr_level;
  1533. f->expr_level = 0;
  1534. if (f->curr_token.kind != Token_CloseBrace) {
  1535. elems = parse_element_list(f);
  1536. }
  1537. f->expr_level = expr_level;
  1538. Token close = expect_closing(f, Token_CloseBrace, str_lit("compound literal"));
  1539. return ast_compound_lit(f, type, elems, open, close);
  1540. }
  1541. Ast *parse_value(AstFile *f) {
  1542. if (f->curr_token.kind == Token_OpenBrace) {
  1543. return parse_literal_value(f, nullptr);
  1544. }
  1545. Ast *value;
  1546. bool prev_allow_range = f->allow_range;
  1547. f->allow_range = true;
  1548. value = parse_expr(f, false);
  1549. f->allow_range = prev_allow_range;
  1550. return value;
  1551. }
  1552. Ast *parse_type_or_ident(AstFile *f);
  1553. void check_proc_add_tag(AstFile *f, Ast *tag_expr, u64 *tags, ProcTag tag, String tag_name) {
  1554. if (*tags & tag) {
  1555. syntax_error(tag_expr, "Procedure tag already used: %.*s", LIT(tag_name));
  1556. }
  1557. *tags |= tag;
  1558. }
  1559. bool is_foreign_name_valid(String name) {
  1560. if (name.len == 0) {
  1561. return false;
  1562. }
  1563. isize offset = 0;
  1564. while (offset < name.len) {
  1565. Rune rune;
  1566. isize remaining = name.len - offset;
  1567. isize width = utf8_decode(name.text+offset, remaining, &rune);
  1568. if (rune == GB_RUNE_INVALID && width == 1) {
  1569. return false;
  1570. } else if (rune == GB_RUNE_BOM && remaining > 0) {
  1571. return false;
  1572. }
  1573. if (offset == 0) {
  1574. switch (rune) {
  1575. case '-':
  1576. case '$':
  1577. case '.':
  1578. case '_':
  1579. break;
  1580. default:
  1581. if (!gb_char_is_alpha(cast(char)rune))
  1582. return false;
  1583. break;
  1584. }
  1585. } else {
  1586. switch (rune) {
  1587. case '-':
  1588. case '$':
  1589. case '.':
  1590. case '_':
  1591. break;
  1592. default:
  1593. if (!gb_char_is_alphanumeric(cast(char)rune)) {
  1594. return false;
  1595. }
  1596. break;
  1597. }
  1598. }
  1599. offset += width;
  1600. }
  1601. return true;
  1602. }
  1603. void parse_proc_tags(AstFile *f, u64 *tags) {
  1604. GB_ASSERT(tags != nullptr);
  1605. while (f->curr_token.kind == Token_Hash) {
  1606. Ast *tag_expr = parse_tag_expr(f, nullptr);
  1607. ast_node(te, TagExpr, tag_expr);
  1608. String tag_name = te->name.string;
  1609. #define ELSE_IF_ADD_TAG(name) \
  1610. else if (tag_name == #name) { \
  1611. check_proc_add_tag(f, tag_expr, tags, ProcTag_##name, tag_name); \
  1612. }
  1613. if (false) {}
  1614. ELSE_IF_ADD_TAG(optional_ok)
  1615. ELSE_IF_ADD_TAG(optional_second)
  1616. ELSE_IF_ADD_TAG(require_results)
  1617. ELSE_IF_ADD_TAG(bounds_check)
  1618. ELSE_IF_ADD_TAG(no_bounds_check)
  1619. ELSE_IF_ADD_TAG(type_assert)
  1620. ELSE_IF_ADD_TAG(no_type_assert)
  1621. else {
  1622. syntax_error(tag_expr, "Unknown procedure type tag #%.*s", LIT(tag_name));
  1623. }
  1624. #undef ELSE_IF_ADD_TAG
  1625. }
  1626. if ((*tags & ProcTag_bounds_check) && (*tags & ProcTag_no_bounds_check)) {
  1627. syntax_error(f->curr_token, "You cannot apply both #bounds_check and #no_bounds_check to a procedure");
  1628. }
  1629. if ((*tags & ProcTag_type_assert) && (*tags & ProcTag_no_type_assert)) {
  1630. syntax_error(f->curr_token, "You cannot apply both #type_assert and #no_type_assert to a procedure");
  1631. }
  1632. }
  1633. Array<Ast *> parse_lhs_expr_list (AstFile *f);
  1634. Array<Ast *> parse_rhs_expr_list (AstFile *f);
  1635. Ast * parse_simple_stmt (AstFile *f, u32 flags);
  1636. Ast * parse_type (AstFile *f);
  1637. Ast * parse_call_expr (AstFile *f, Ast *operand);
  1638. Ast * parse_struct_field_list(AstFile *f, isize *name_count_);
  1639. Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKind follow, bool allow_default_parameters, bool allow_typeid_token);
  1640. Ast *parse_unary_expr(AstFile *f, bool lhs);
  1641. Ast *convert_stmt_to_expr(AstFile *f, Ast *statement, String kind) {
  1642. if (statement == nullptr) {
  1643. return nullptr;
  1644. }
  1645. if (statement->kind == Ast_ExprStmt) {
  1646. return statement->ExprStmt.expr;
  1647. }
  1648. syntax_error(f->curr_token, "Expected '%.*s', found a simple statement.", LIT(kind));
  1649. Token end = f->curr_token;
  1650. if (f->tokens.count < f->curr_token_index) {
  1651. end = f->tokens[f->curr_token_index+1];
  1652. }
  1653. return ast_bad_expr(f, f->curr_token, end);
  1654. }
  1655. Ast *convert_stmt_to_body(AstFile *f, Ast *stmt) {
  1656. if (stmt->kind == Ast_BlockStmt) {
  1657. syntax_error(stmt, "Expected a normal statement rather than a block statement");
  1658. return stmt;
  1659. }
  1660. if (stmt->kind == Ast_EmptyStmt) {
  1661. syntax_error(stmt, "Expected a non-empty statement");
  1662. }
  1663. GB_ASSERT(is_ast_stmt(stmt) || is_ast_decl(stmt));
  1664. Token open = ast_token(stmt);
  1665. Token close = ast_token(stmt);
  1666. auto stmts = array_make<Ast *>(heap_allocator(), 0, 1);
  1667. array_add(&stmts, stmt);
  1668. return ast_block_stmt(f, stmts, open, close);
  1669. }
  1670. void check_polymorphic_params_for_type(AstFile *f, Ast *polymorphic_params, Token token) {
  1671. if (polymorphic_params == nullptr) {
  1672. return;
  1673. }
  1674. if (polymorphic_params->kind != Ast_FieldList) {
  1675. return;
  1676. }
  1677. ast_node(fl, FieldList, polymorphic_params);
  1678. for_array(fi, fl->list) {
  1679. Ast *field = fl->list[fi];
  1680. if (field->kind != Ast_Field) {
  1681. continue;
  1682. }
  1683. for_array(i, field->Field.names) {
  1684. Ast *name = field->Field.names[i];
  1685. if (name->kind != field->Field.names[0]->kind) {
  1686. syntax_error(name, "Mixture of polymorphic names using both $ and not for %.*s parameters", LIT(token.string));
  1687. return;
  1688. }
  1689. }
  1690. }
  1691. }
  1692. bool ast_on_same_line(Token const &x, Ast *yp) {
  1693. Token y = ast_token(yp);
  1694. return x.pos.line == y.pos.line;
  1695. }
  1696. bool ast_on_same_line(Ast *x, Ast *y) {
  1697. return ast_on_same_line(ast_token(x), y);
  1698. }
  1699. Ast *parse_force_inlining_operand(AstFile *f, Token token) {
  1700. Ast *expr = parse_unary_expr(f, false);
  1701. Ast *e = strip_or_return_expr(expr);
  1702. if (e->kind != Ast_ProcLit && e->kind != Ast_CallExpr) {
  1703. syntax_error(expr, "%.*s must be followed by a procedure literal or call, got %.*s", LIT(token.string), LIT(ast_strings[expr->kind]));
  1704. return ast_bad_expr(f, token, f->curr_token);
  1705. }
  1706. ProcInlining pi = ProcInlining_none;
  1707. if (token.kind == Token_Ident) {
  1708. if (token.string == "force_inline") {
  1709. pi = ProcInlining_inline;
  1710. } else if (token.string == "force_no_inline") {
  1711. pi = ProcInlining_no_inline;
  1712. }
  1713. }
  1714. if (pi != ProcInlining_none) {
  1715. if (e->kind == Ast_ProcLit) {
  1716. if (expr->ProcLit.inlining != ProcInlining_none &&
  1717. expr->ProcLit.inlining != pi) {
  1718. syntax_error(expr, "Cannot apply both '#force_inline' and '#force_no_inline' to a procedure literal");
  1719. }
  1720. expr->ProcLit.inlining = pi;
  1721. } else if (e->kind == Ast_CallExpr) {
  1722. if (expr->CallExpr.inlining != ProcInlining_none &&
  1723. expr->CallExpr.inlining != pi) {
  1724. syntax_error(expr, "Cannot apply both '#force_inline' and '#force_no_inline' to a procedure call");
  1725. }
  1726. expr->CallExpr.inlining = pi;
  1727. }
  1728. }
  1729. return expr;
  1730. }
  1731. Ast *parse_check_directive_for_statement(Ast *s, Token const &tag_token, u16 state_flag) {
  1732. String name = tag_token.string;
  1733. if (s == nullptr) {
  1734. syntax_error(tag_token, "Invalid operand for #%.*s", LIT(name));
  1735. return nullptr;
  1736. }
  1737. if (s != nullptr && s->kind == Ast_EmptyStmt) {
  1738. if (s->EmptyStmt.token.string == "\n") {
  1739. syntax_error(tag_token, "#%.*s cannot be followed by a newline", LIT(name));
  1740. } else {
  1741. syntax_error(tag_token, "#%.*s cannot be applied to an empty statement ';'", LIT(name));
  1742. }
  1743. }
  1744. if (s->state_flags & state_flag) {
  1745. syntax_error(tag_token, "#%.*s has been applied multiple times", LIT(name));
  1746. }
  1747. s->state_flags |= state_flag;
  1748. switch (state_flag) {
  1749. case StateFlag_bounds_check:
  1750. if ((s->state_flags & StateFlag_no_bounds_check) != 0) {
  1751. syntax_error(tag_token, "#bounds_check and #no_bounds_check cannot be applied together");
  1752. }
  1753. break;
  1754. case StateFlag_no_bounds_check:
  1755. if ((s->state_flags & StateFlag_bounds_check) != 0) {
  1756. syntax_error(tag_token, "#bounds_check and #no_bounds_check cannot be applied together");
  1757. }
  1758. break;
  1759. case StateFlag_type_assert:
  1760. if ((s->state_flags & StateFlag_no_type_assert) != 0) {
  1761. syntax_error(tag_token, "#type_assert and #no_type_assert cannot be applied together");
  1762. }
  1763. break;
  1764. case StateFlag_no_type_assert:
  1765. if ((s->state_flags & StateFlag_type_assert) != 0) {
  1766. syntax_error(tag_token, "#type_assert and #no_type_assert cannot be applied together");
  1767. }
  1768. break;
  1769. }
  1770. switch (state_flag) {
  1771. case StateFlag_bounds_check:
  1772. case StateFlag_no_bounds_check:
  1773. case StateFlag_type_assert:
  1774. case StateFlag_no_type_assert:
  1775. switch (s->kind) {
  1776. case Ast_BlockStmt:
  1777. case Ast_IfStmt:
  1778. case Ast_WhenStmt:
  1779. case Ast_ForStmt:
  1780. case Ast_RangeStmt:
  1781. case Ast_UnrollRangeStmt:
  1782. case Ast_SwitchStmt:
  1783. case Ast_TypeSwitchStmt:
  1784. case Ast_ReturnStmt:
  1785. case Ast_DeferStmt:
  1786. case Ast_AssignStmt:
  1787. // Okay
  1788. break;
  1789. case Ast_ValueDecl:
  1790. if (!s->ValueDecl.is_mutable) {
  1791. syntax_error(tag_token, "#%.*s may only be applied to a variable declaration, and not a constant value declaration", LIT(name));
  1792. }
  1793. break;
  1794. default:
  1795. syntax_error(tag_token, "#%.*s may only be applied to the following statements: '{}', 'if', 'when', 'for', 'switch', 'return', 'defer', assignment, variable declaration", LIT(name));
  1796. break;
  1797. }
  1798. break;
  1799. }
  1800. return s;
  1801. }
  1802. Ast *parse_operand(AstFile *f, bool lhs) {
  1803. Ast *operand = nullptr; // Operand
  1804. switch (f->curr_token.kind) {
  1805. case Token_Ident:
  1806. return parse_ident(f);
  1807. case Token_Undef:
  1808. return ast_undef(f, expect_token(f, Token_Undef));
  1809. case Token_context:
  1810. return ast_implicit(f, expect_token(f, Token_context));
  1811. case Token_Integer:
  1812. case Token_Float:
  1813. case Token_Imag:
  1814. case Token_Rune:
  1815. return ast_basic_lit(f, advance_token(f));
  1816. case Token_String:
  1817. return ast_basic_lit(f, advance_token(f));
  1818. case Token_OpenBrace:
  1819. if (!lhs) return parse_literal_value(f, nullptr);
  1820. break;
  1821. case Token_OpenParen: {
  1822. bool allow_newline;
  1823. Token open, close;
  1824. // NOTE(bill): Skip the Paren Expression
  1825. open = expect_token(f, Token_OpenParen);
  1826. if (f->prev_token.kind == Token_CloseParen) {
  1827. close = expect_token(f, Token_CloseParen);
  1828. syntax_error(open, "Invalid parentheses expression with no inside expression");
  1829. return ast_bad_expr(f, open, close);
  1830. }
  1831. allow_newline = f->allow_newline;
  1832. if (f->expr_level < 0) {
  1833. f->allow_newline = false;
  1834. }
  1835. f->expr_level++;
  1836. operand = parse_expr(f, false);
  1837. f->expr_level--;
  1838. f->allow_newline = allow_newline;
  1839. close = expect_token(f, Token_CloseParen);
  1840. return ast_paren_expr(f, operand, open, close);
  1841. }
  1842. case Token_distinct: {
  1843. Token token = expect_token(f, Token_distinct);
  1844. Ast *type = parse_type(f);
  1845. return ast_distinct_type(f, token, type);
  1846. }
  1847. case Token_Hash: {
  1848. Token token = expect_token(f, Token_Hash);
  1849. Token name = expect_token(f, Token_Ident);
  1850. if (name.string == "type") {
  1851. return ast_helper_type(f, token, parse_type(f));
  1852. } else if (name.string == "soa" || name.string == "simd") {
  1853. Ast *tag = ast_basic_directive(f, token, name);
  1854. Ast *original_type = parse_type(f);
  1855. Ast *type = unparen_expr(original_type);
  1856. switch (type->kind) {
  1857. case Ast_ArrayType: type->ArrayType.tag = tag; break;
  1858. case Ast_DynamicArrayType: type->DynamicArrayType.tag = tag; break;
  1859. default:
  1860. syntax_error(type, "Expected an array type after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[type->kind]));
  1861. break;
  1862. }
  1863. return original_type;
  1864. } else if (name.string == "partial") {
  1865. Ast *tag = ast_basic_directive(f, token, name);
  1866. Ast *original_expr = parse_expr(f, lhs);
  1867. Ast *expr = unparen_expr(original_expr);
  1868. switch (expr->kind) {
  1869. case Ast_ArrayType:
  1870. syntax_error(expr, "#partial has been replaced with #sparse for non-contiguous enumerated array types");
  1871. break;
  1872. case Ast_CompoundLit:
  1873. expr->CompoundLit.tag = tag;
  1874. break;
  1875. default:
  1876. syntax_error(expr, "Expected a compound literal after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[expr->kind]));
  1877. break;
  1878. }
  1879. return original_expr;
  1880. } else if (name.string == "sparse") {
  1881. Ast *tag = ast_basic_directive(f, token, name);
  1882. Ast *original_type = parse_type(f);
  1883. Ast *type = unparen_expr(original_type);
  1884. switch (type->kind) {
  1885. case Ast_ArrayType: type->ArrayType.tag = tag; break;
  1886. default:
  1887. syntax_error(type, "Expected an enumerated array type after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[type->kind]));
  1888. break;
  1889. }
  1890. return original_type;
  1891. } else if (name.string == "bounds_check") {
  1892. Ast *operand = parse_expr(f, lhs);
  1893. return parse_check_directive_for_statement(operand, name, StateFlag_bounds_check);
  1894. } else if (name.string == "no_bounds_check") {
  1895. Ast *operand = parse_expr(f, lhs);
  1896. return parse_check_directive_for_statement(operand, name, StateFlag_no_bounds_check);
  1897. } else if (name.string == "type_assert") {
  1898. Ast *operand = parse_expr(f, lhs);
  1899. return parse_check_directive_for_statement(operand, name, StateFlag_type_assert);
  1900. } else if (name.string == "no_type_assert") {
  1901. Ast *operand = parse_expr(f, lhs);
  1902. return parse_check_directive_for_statement(operand, name, StateFlag_no_type_assert);
  1903. } else if (name.string == "relative") {
  1904. Ast *tag = ast_basic_directive(f, token, name);
  1905. tag = parse_call_expr(f, tag);
  1906. Ast *type = parse_type(f);
  1907. return ast_relative_type(f, tag, type);
  1908. } else if (name.string == "force_inline" ||
  1909. name.string == "force_no_inline") {
  1910. return parse_force_inlining_operand(f, name);
  1911. }
  1912. return ast_basic_directive(f, token, name);
  1913. }
  1914. // Parse Procedure Type or Literal or Group
  1915. case Token_proc: {
  1916. Token token = expect_token(f, Token_proc);
  1917. if (f->curr_token.kind == Token_OpenBrace) { // ProcGroup
  1918. Token open = expect_token(f, Token_OpenBrace);
  1919. auto args = array_make<Ast *>(heap_allocator());
  1920. while (f->curr_token.kind != Token_CloseBrace &&
  1921. f->curr_token.kind != Token_EOF) {
  1922. Ast *elem = parse_expr(f, false);
  1923. array_add(&args, elem);
  1924. if (!allow_token(f, Token_Comma)) {
  1925. break;
  1926. }
  1927. }
  1928. Token close = expect_token(f, Token_CloseBrace);
  1929. if (args.count == 0) {
  1930. syntax_error(token, "Expected a least 1 argument in a procedure group");
  1931. }
  1932. return ast_proc_group(f, token, open, close, args);
  1933. }
  1934. Ast *type = parse_proc_type(f, token);
  1935. Token where_token = {};
  1936. Array<Ast *> where_clauses = {};
  1937. u64 tags = 0;
  1938. skip_possible_newline_for_literal(f);
  1939. if (f->curr_token.kind == Token_where) {
  1940. where_token = expect_token(f, Token_where);
  1941. isize prev_level = f->expr_level;
  1942. f->expr_level = -1;
  1943. where_clauses = parse_rhs_expr_list(f);
  1944. f->expr_level = prev_level;
  1945. }
  1946. parse_proc_tags(f, &tags);
  1947. if ((tags & ProcTag_require_results) != 0) {
  1948. syntax_error(f->curr_token, "#require_results has now been replaced as an attribute @(require_results) on the declaration");
  1949. tags &= ~ProcTag_require_results;
  1950. }
  1951. GB_ASSERT(type->kind == Ast_ProcType);
  1952. type->ProcType.tags = tags;
  1953. if (f->allow_type && f->expr_level < 0) {
  1954. if (tags != 0) {
  1955. syntax_error(token, "A procedure type cannot have suffix tags");
  1956. }
  1957. if (where_token.kind != Token_Invalid) {
  1958. syntax_error(where_token, "'where' clauses are not allowed on procedure types");
  1959. }
  1960. return type;
  1961. }
  1962. skip_possible_newline_for_literal(f);
  1963. if (allow_token(f, Token_Undef)) {
  1964. if (where_token.kind != Token_Invalid) {
  1965. syntax_error(where_token, "'where' clauses are not allowed on procedure literals without a defined body (replaced with ---)");
  1966. }
  1967. return ast_proc_lit(f, type, nullptr, tags, where_token, where_clauses);
  1968. } else if (f->curr_token.kind == Token_OpenBrace) {
  1969. Ast *curr_proc = f->curr_proc;
  1970. Ast *body = nullptr;
  1971. f->curr_proc = type;
  1972. body = parse_body(f);
  1973. f->curr_proc = curr_proc;
  1974. // Apply the tags directly to the body rather than the type
  1975. if (tags & ProcTag_no_bounds_check) {
  1976. body->state_flags |= StateFlag_no_bounds_check;
  1977. }
  1978. if (tags & ProcTag_bounds_check) {
  1979. body->state_flags |= StateFlag_bounds_check;
  1980. }
  1981. if (tags & ProcTag_no_type_assert) {
  1982. body->state_flags |= StateFlag_no_type_assert;
  1983. }
  1984. if (tags & ProcTag_type_assert) {
  1985. body->state_flags |= StateFlag_type_assert;
  1986. }
  1987. return ast_proc_lit(f, type, body, tags, where_token, where_clauses);
  1988. } else if (allow_token(f, Token_do)) {
  1989. Ast *curr_proc = f->curr_proc;
  1990. Ast *body = nullptr;
  1991. f->curr_proc = type;
  1992. body = convert_stmt_to_body(f, parse_stmt(f));
  1993. f->curr_proc = curr_proc;
  1994. if (build_context.disallow_do) {
  1995. syntax_error(body, "'do' has been disallowed");
  1996. } else if (!ast_on_same_line(type, body)) {
  1997. syntax_error(body, "The body of a 'do' must be on the same line as the signature");
  1998. }
  1999. return ast_proc_lit(f, type, body, tags, where_token, where_clauses);
  2000. }
  2001. if (tags != 0) {
  2002. syntax_error(token, "A procedure type cannot have suffix tags");
  2003. }
  2004. if (where_token.kind != Token_Invalid) {
  2005. syntax_error(where_token, "'where' clauses are not allowed on procedure types");
  2006. }
  2007. return type;
  2008. }
  2009. // Check for Types
  2010. case Token_Dollar: {
  2011. Token token = expect_token(f, Token_Dollar);
  2012. Ast *type = parse_ident(f);
  2013. if (is_blank_ident(type)) {
  2014. syntax_error(type, "Invalid polymorphic type definition with a blank identifier");
  2015. }
  2016. Ast *specialization = nullptr;
  2017. if (allow_token(f, Token_Quo)) {
  2018. specialization = parse_type(f);
  2019. }
  2020. return ast_poly_type(f, token, type, specialization);
  2021. } break;
  2022. case Token_typeid: {
  2023. Token token = expect_token(f, Token_typeid);
  2024. return ast_typeid_type(f, token, nullptr);
  2025. } break;
  2026. case Token_Pointer: {
  2027. Token token = expect_token(f, Token_Pointer);
  2028. Ast *elem = parse_type(f);
  2029. return ast_pointer_type(f, token, elem);
  2030. } break;
  2031. case Token_OpenBracket: {
  2032. Token token = expect_token(f, Token_OpenBracket);
  2033. Ast *count_expr = nullptr;
  2034. if (f->curr_token.kind == Token_Pointer) {
  2035. expect_token(f, Token_Pointer);
  2036. expect_token(f, Token_CloseBracket);
  2037. return ast_multi_pointer_type(f, token, parse_type(f));
  2038. } else if (f->curr_token.kind == Token_Question) {
  2039. count_expr = ast_unary_expr(f, expect_token(f, Token_Question), nullptr);
  2040. } else if (allow_token(f, Token_dynamic)) {
  2041. expect_token(f, Token_CloseBracket);
  2042. return ast_dynamic_array_type(f, token, parse_type(f));
  2043. } else if (f->curr_token.kind != Token_CloseBracket) {
  2044. f->expr_level++;
  2045. count_expr = parse_expr(f, false);
  2046. f->expr_level--;
  2047. }
  2048. expect_token(f, Token_CloseBracket);
  2049. return ast_array_type(f, token, count_expr, parse_type(f));
  2050. } break;
  2051. case Token_map: {
  2052. Token token = expect_token(f, Token_map);
  2053. Ast *key = nullptr;
  2054. Ast *value = nullptr;
  2055. Token open, close;
  2056. open = expect_token_after(f, Token_OpenBracket, "map");
  2057. key = parse_expr(f, true);
  2058. close = expect_token(f, Token_CloseBracket);
  2059. value = parse_type(f);
  2060. return ast_map_type(f, token, key, value);
  2061. } break;
  2062. case Token_matrix: {
  2063. Token token = expect_token(f, Token_matrix);
  2064. Ast *row_count = nullptr;
  2065. Ast *column_count = nullptr;
  2066. Ast *type = nullptr;
  2067. Token open, close;
  2068. open = expect_token_after(f, Token_OpenBracket, "matrix");
  2069. row_count = parse_expr(f, true);
  2070. expect_token(f, Token_Comma);
  2071. column_count = parse_expr(f, true);
  2072. close = expect_token(f, Token_CloseBracket);
  2073. type = parse_type(f);
  2074. return ast_matrix_type(f, token, row_count, column_count, type);
  2075. } break;
  2076. case Token_struct: {
  2077. Token token = expect_token(f, Token_struct);
  2078. Ast *polymorphic_params = nullptr;
  2079. bool is_packed = false;
  2080. bool is_raw_union = false;
  2081. Ast *align = nullptr;
  2082. if (allow_token(f, Token_OpenParen)) {
  2083. isize param_count = 0;
  2084. polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, true, true);
  2085. if (param_count == 0) {
  2086. syntax_error(polymorphic_params, "Expected at least 1 polymorphic parameter");
  2087. polymorphic_params = nullptr;
  2088. }
  2089. expect_token_after(f, Token_CloseParen, "parameter list");
  2090. check_polymorphic_params_for_type(f, polymorphic_params, token);
  2091. }
  2092. isize prev_level = f->expr_level;
  2093. f->expr_level = -1;
  2094. while (allow_token(f, Token_Hash)) {
  2095. Token tag = expect_token_after(f, Token_Ident, "#");
  2096. if (tag.string == "packed") {
  2097. if (is_packed) {
  2098. syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
  2099. }
  2100. is_packed = true;
  2101. } else if (tag.string == "align") {
  2102. if (align) {
  2103. syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
  2104. }
  2105. align = parse_expr(f, true);
  2106. } else if (tag.string == "raw_union") {
  2107. if (is_raw_union) {
  2108. syntax_error(tag, "Duplicate struct tag '#%.*s'", LIT(tag.string));
  2109. }
  2110. is_raw_union = true;
  2111. } else {
  2112. syntax_error(tag, "Invalid struct tag '#%.*s'", LIT(tag.string));
  2113. }
  2114. }
  2115. f->expr_level = prev_level;
  2116. if (is_raw_union && is_packed) {
  2117. is_packed = false;
  2118. syntax_error(token, "'#raw_union' cannot also be '#packed'");
  2119. }
  2120. Token where_token = {};
  2121. Array<Ast *> where_clauses = {};
  2122. skip_possible_newline_for_literal(f);
  2123. if (f->curr_token.kind == Token_where) {
  2124. where_token = expect_token(f, Token_where);
  2125. isize prev_level = f->expr_level;
  2126. f->expr_level = -1;
  2127. where_clauses = parse_rhs_expr_list(f);
  2128. f->expr_level = prev_level;
  2129. }
  2130. skip_possible_newline_for_literal(f);
  2131. Token open = expect_token_after(f, Token_OpenBrace, "struct");
  2132. isize name_count = 0;
  2133. Ast *fields = parse_struct_field_list(f, &name_count);
  2134. Token close = expect_closing_brace_of_field_list(f);
  2135. Slice<Ast *> decls = {};
  2136. if (fields != nullptr) {
  2137. GB_ASSERT(fields->kind == Ast_FieldList);
  2138. decls = fields->FieldList.list;
  2139. }
  2140. return ast_struct_type(f, token, decls, name_count, polymorphic_params, is_packed, is_raw_union, align, where_token, where_clauses);
  2141. } break;
  2142. case Token_union: {
  2143. Token token = expect_token(f, Token_union);
  2144. auto variants = array_make<Ast *>(heap_allocator());
  2145. Ast *polymorphic_params = nullptr;
  2146. Ast *align = nullptr;
  2147. bool no_nil = false;
  2148. bool maybe = false;
  2149. bool shared_nil = false;
  2150. UnionTypeKind union_kind = UnionType_Normal;
  2151. Token start_token = f->curr_token;
  2152. if (allow_token(f, Token_OpenParen)) {
  2153. isize param_count = 0;
  2154. polymorphic_params = parse_field_list(f, &param_count, 0, Token_CloseParen, true, true);
  2155. if (param_count == 0) {
  2156. syntax_error(polymorphic_params, "Expected at least 1 polymorphic parametric");
  2157. polymorphic_params = nullptr;
  2158. }
  2159. expect_token_after(f, Token_CloseParen, "parameter list");
  2160. check_polymorphic_params_for_type(f, polymorphic_params, token);
  2161. }
  2162. while (allow_token(f, Token_Hash)) {
  2163. Token tag = expect_token_after(f, Token_Ident, "#");
  2164. if (tag.string == "align") {
  2165. if (align) {
  2166. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2167. }
  2168. align = parse_expr(f, true);
  2169. } else if (tag.string == "no_nil") {
  2170. if (no_nil) {
  2171. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2172. }
  2173. no_nil = true;
  2174. } else if (tag.string == "shared_nil") {
  2175. if (shared_nil) {
  2176. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2177. }
  2178. shared_nil = true;
  2179. } else if (tag.string == "maybe") {
  2180. if (maybe) {
  2181. syntax_error(tag, "Duplicate union tag '#%.*s'", LIT(tag.string));
  2182. }
  2183. maybe = true;
  2184. }else {
  2185. syntax_error(tag, "Invalid union tag '#%.*s'", LIT(tag.string));
  2186. }
  2187. }
  2188. if (no_nil && maybe) {
  2189. syntax_error(f->curr_token, "#maybe and #no_nil cannot be applied together");
  2190. }
  2191. if (no_nil && shared_nil) {
  2192. syntax_error(f->curr_token, "#shared_nil and #no_nil cannot be applied together");
  2193. }
  2194. if (shared_nil && maybe) {
  2195. syntax_error(f->curr_token, "#maybe and #shared_nil cannot be applied together");
  2196. }
  2197. if (maybe) {
  2198. union_kind = UnionType_maybe;
  2199. } else if (no_nil) {
  2200. union_kind = UnionType_no_nil;
  2201. } else if (shared_nil) {
  2202. union_kind = UnionType_shared_nil;
  2203. }
  2204. skip_possible_newline_for_literal(f);
  2205. Token where_token = {};
  2206. Array<Ast *> where_clauses = {};
  2207. if (f->curr_token.kind == Token_where) {
  2208. where_token = expect_token(f, Token_where);
  2209. isize prev_level = f->expr_level;
  2210. f->expr_level = -1;
  2211. where_clauses = parse_rhs_expr_list(f);
  2212. f->expr_level = prev_level;
  2213. }
  2214. skip_possible_newline_for_literal(f);
  2215. Token open = expect_token_after(f, Token_OpenBrace, "union");
  2216. while (f->curr_token.kind != Token_CloseBrace &&
  2217. f->curr_token.kind != Token_EOF) {
  2218. Ast *type = parse_type(f);
  2219. if (type->kind != Ast_BadExpr) {
  2220. array_add(&variants, type);
  2221. }
  2222. if (!allow_token(f, Token_Comma)) {
  2223. break;
  2224. }
  2225. }
  2226. Token close = expect_closing_brace_of_field_list(f);
  2227. return ast_union_type(f, token, variants, polymorphic_params, align, union_kind, where_token, where_clauses);
  2228. } break;
  2229. case Token_enum: {
  2230. Token token = expect_token(f, Token_enum);
  2231. Ast *base_type = nullptr;
  2232. if (f->curr_token.kind != Token_OpenBrace) {
  2233. base_type = parse_type(f);
  2234. }
  2235. skip_possible_newline_for_literal(f);
  2236. Token open = expect_token(f, Token_OpenBrace);
  2237. Array<Ast *> values = parse_enum_field_list(f);
  2238. Token close = expect_closing_brace_of_field_list(f);
  2239. return ast_enum_type(f, token, base_type, values);
  2240. } break;
  2241. case Token_bit_set: {
  2242. Token token = expect_token(f, Token_bit_set);
  2243. expect_token(f, Token_OpenBracket);
  2244. Ast *elem = nullptr;
  2245. Ast *underlying = nullptr;
  2246. bool prev_allow_range = f->allow_range;
  2247. f->allow_range = true;
  2248. elem = parse_expr(f, true);
  2249. f->allow_range = prev_allow_range;
  2250. if (allow_token(f, Token_Semicolon)) {
  2251. underlying = parse_type(f);
  2252. } else if (allow_token(f, Token_Comma)) {
  2253. String p = token_to_string(f->prev_token);
  2254. syntax_error(token_end_of_line(f, f->prev_token), "Expected a semicolon, got a %.*s", LIT(p));
  2255. underlying = parse_type(f);
  2256. }
  2257. expect_token(f, Token_CloseBracket);
  2258. return ast_bit_set_type(f, token, elem, underlying);
  2259. }
  2260. case Token_asm: {
  2261. Token token = expect_token(f, Token_asm);
  2262. Array<Ast *> param_types = {};
  2263. Ast *return_type = nullptr;
  2264. if (allow_token(f, Token_OpenParen)) {
  2265. param_types = array_make<Ast *>(heap_allocator());
  2266. while (f->curr_token.kind != Token_CloseParen && f->curr_token.kind != Token_EOF) {
  2267. Ast *t = parse_type(f);
  2268. array_add(&param_types, t);
  2269. if (f->curr_token.kind != Token_Comma ||
  2270. f->curr_token.kind == Token_EOF) {
  2271. break;
  2272. }
  2273. advance_token(f);
  2274. }
  2275. expect_token(f, Token_CloseParen);
  2276. if (allow_token(f, Token_ArrowRight)) {
  2277. return_type = parse_type(f);
  2278. }
  2279. }
  2280. bool has_side_effects = false;
  2281. bool is_align_stack = false;
  2282. InlineAsmDialectKind dialect = InlineAsmDialect_Default;
  2283. while (f->curr_token.kind == Token_Hash) {
  2284. advance_token(f);
  2285. if (f->curr_token.kind == Token_Ident) {
  2286. Token token = advance_token(f);
  2287. String name = token.string;
  2288. if (name == "side_effects") {
  2289. if (has_side_effects) {
  2290. syntax_error(token, "Duplicate directive on inline asm expression: '#side_effects'");
  2291. }
  2292. has_side_effects = true;
  2293. } else if (name == "align_stack") {
  2294. if (is_align_stack) {
  2295. syntax_error(token, "Duplicate directive on inline asm expression: '#align_stack'");
  2296. }
  2297. is_align_stack = true;
  2298. } else if (name == "att") {
  2299. if (dialect == InlineAsmDialect_ATT) {
  2300. syntax_error(token, "Duplicate directive on inline asm expression: '#att'");
  2301. } else if (dialect != InlineAsmDialect_Default) {
  2302. syntax_error(token, "Conflicting asm dialects");
  2303. } else {
  2304. dialect = InlineAsmDialect_ATT;
  2305. }
  2306. } else if (name == "intel") {
  2307. if (dialect == InlineAsmDialect_Intel) {
  2308. syntax_error(token, "Duplicate directive on inline asm expression: '#intel'");
  2309. } else if (dialect != InlineAsmDialect_Default) {
  2310. syntax_error(token, "Conflicting asm dialects");
  2311. } else {
  2312. dialect = InlineAsmDialect_Intel;
  2313. }
  2314. }
  2315. } else {
  2316. syntax_error(f->curr_token, "Expected an identifier after hash");
  2317. }
  2318. }
  2319. skip_possible_newline_for_literal(f);
  2320. Token open = expect_token(f, Token_OpenBrace);
  2321. Ast *asm_string = parse_expr(f, false);
  2322. expect_token(f, Token_Comma);
  2323. Ast *constraints_string = parse_expr(f, false);
  2324. allow_token(f, Token_Comma);
  2325. Token close = expect_closing_brace_of_field_list(f);
  2326. return ast_inline_asm_expr(f, token, open, close, param_types, return_type, asm_string, constraints_string, has_side_effects, is_align_stack, dialect);
  2327. }
  2328. }
  2329. return nullptr;
  2330. }
  2331. bool is_literal_type(Ast *node) {
  2332. node = unparen_expr(node);
  2333. switch (node->kind) {
  2334. case Ast_BadExpr:
  2335. case Ast_Ident:
  2336. case Ast_SelectorExpr:
  2337. case Ast_ArrayType:
  2338. case Ast_StructType:
  2339. case Ast_UnionType:
  2340. case Ast_EnumType:
  2341. case Ast_DynamicArrayType:
  2342. case Ast_MapType:
  2343. case Ast_BitSetType:
  2344. case Ast_MatrixType:
  2345. case Ast_CallExpr:
  2346. return true;
  2347. case Ast_MultiPointerType:
  2348. // For better error messages
  2349. return true;
  2350. }
  2351. return false;
  2352. }
  2353. Ast *parse_call_expr(AstFile *f, Ast *operand) {
  2354. auto args = array_make<Ast *>(heap_allocator());
  2355. Token open_paren, close_paren;
  2356. Token ellipsis = {};
  2357. isize prev_expr_level = f->expr_level;
  2358. bool prev_allow_newline = f->allow_newline;
  2359. f->expr_level = 0;
  2360. f->allow_newline = true;
  2361. open_paren = expect_token(f, Token_OpenParen);
  2362. while (f->curr_token.kind != Token_CloseParen &&
  2363. f->curr_token.kind != Token_EOF &&
  2364. ellipsis.pos.line == 0) {
  2365. if (f->curr_token.kind == Token_Comma) {
  2366. syntax_error(f->curr_token, "Expected an expression not ,");
  2367. } else if (f->curr_token.kind == Token_Eq) {
  2368. syntax_error(f->curr_token, "Expected an expression not =");
  2369. }
  2370. bool prefix_ellipsis = false;
  2371. if (f->curr_token.kind == Token_Ellipsis) {
  2372. prefix_ellipsis = true;
  2373. ellipsis = expect_token(f, Token_Ellipsis);
  2374. }
  2375. Ast *arg = parse_expr(f, false);
  2376. if (f->curr_token.kind == Token_Eq) {
  2377. Token eq = expect_token(f, Token_Eq);
  2378. if (prefix_ellipsis) {
  2379. syntax_error(ellipsis, "'..' must be applied to value rather than the field name");
  2380. }
  2381. Ast *value = parse_value(f);
  2382. arg = ast_field_value(f, arg, value, eq);
  2383. }
  2384. array_add(&args, arg);
  2385. if (!allow_token(f, Token_Comma)) {
  2386. break;
  2387. }
  2388. }
  2389. f->allow_newline = prev_allow_newline;
  2390. f->expr_level = prev_expr_level;
  2391. close_paren = expect_closing(f, Token_CloseParen, str_lit("argument list"));
  2392. Ast *call = ast_call_expr(f, operand, args, open_paren, close_paren, ellipsis);
  2393. Ast *o = unparen_expr(operand);
  2394. if (o->kind == Ast_SelectorExpr && o->SelectorExpr.token.kind == Token_ArrowRight) {
  2395. return ast_selector_call_expr(f, o->SelectorExpr.token, o, call);
  2396. }
  2397. return call;
  2398. }
  2399. Ast *parse_atom_expr(AstFile *f, Ast *operand, bool lhs) {
  2400. if (operand == nullptr) {
  2401. if (f->allow_type) return nullptr;
  2402. Token begin = f->curr_token;
  2403. syntax_error(begin, "Expected an operand");
  2404. fix_advance_to_next_stmt(f);
  2405. operand = ast_bad_expr(f, begin, f->curr_token);
  2406. }
  2407. bool loop = true;
  2408. while (loop) {
  2409. switch (f->curr_token.kind) {
  2410. case Token_OpenParen:
  2411. operand = parse_call_expr(f, operand);
  2412. break;
  2413. case Token_Period: {
  2414. Token token = advance_token(f);
  2415. switch (f->curr_token.kind) {
  2416. case Token_Ident:
  2417. operand = ast_selector_expr(f, token, operand, parse_ident(f));
  2418. break;
  2419. // case Token_Integer:
  2420. // operand = ast_selector_expr(f, token, operand, parse_expr(f, lhs));
  2421. // break;
  2422. case Token_OpenParen: {
  2423. Token open = expect_token(f, Token_OpenParen);
  2424. Ast *type = parse_type(f);
  2425. Token close = expect_token(f, Token_CloseParen);
  2426. operand = ast_type_assertion(f, operand, token, type);
  2427. } break;
  2428. case Token_Question: {
  2429. Token question = expect_token(f, Token_Question);
  2430. Ast *type = ast_unary_expr(f, question, nullptr);
  2431. operand = ast_type_assertion(f, operand, token, type);
  2432. } break;
  2433. default:
  2434. syntax_error(f->curr_token, "Expected a selector");
  2435. advance_token(f);
  2436. operand = ast_bad_expr(f, ast_token(operand), f->curr_token);
  2437. // operand = ast_selector_expr(f, f->curr_token, operand, nullptr);
  2438. break;
  2439. }
  2440. } break;
  2441. case Token_ArrowRight: {
  2442. Token token = advance_token(f);
  2443. operand = ast_selector_expr(f, token, operand, parse_ident(f));
  2444. // Ast *call = parse_call_expr(f, sel);
  2445. // operand = ast_selector_call_expr(f, token, sel, call);
  2446. break;
  2447. }
  2448. case Token_OpenBracket: {
  2449. bool prev_allow_range = f->allow_range;
  2450. f->allow_range = false;
  2451. Token open = {}, close = {}, interval = {};
  2452. Ast *indices[2] = {};
  2453. bool is_interval = false;
  2454. f->expr_level++;
  2455. open = expect_token(f, Token_OpenBracket);
  2456. switch (f->curr_token.kind) {
  2457. case Token_Ellipsis:
  2458. case Token_RangeFull:
  2459. case Token_RangeHalf:
  2460. // NOTE(bill): Do not err yet
  2461. case Token_Colon:
  2462. break;
  2463. default:
  2464. indices[0] = parse_expr(f, false);
  2465. break;
  2466. }
  2467. switch (f->curr_token.kind) {
  2468. case Token_Ellipsis:
  2469. case Token_RangeFull:
  2470. case Token_RangeHalf:
  2471. syntax_error(f->curr_token, "Expected a colon, not a range");
  2472. /* fallthrough */
  2473. case Token_Comma: // matrix index
  2474. case Token_Colon:
  2475. interval = advance_token(f);
  2476. is_interval = true;
  2477. if (f->curr_token.kind != Token_CloseBracket &&
  2478. f->curr_token.kind != Token_EOF) {
  2479. indices[1] = parse_expr(f, false);
  2480. }
  2481. break;
  2482. }
  2483. f->expr_level--;
  2484. close = expect_token(f, Token_CloseBracket);
  2485. if (is_interval) {
  2486. if (interval.kind == Token_Comma) {
  2487. if (indices[0] == nullptr || indices[1] == nullptr) {
  2488. syntax_error(open, "Matrix index expressions require both row and column indices");
  2489. }
  2490. operand = ast_matrix_index_expr(f, operand, open, close, interval, indices[0], indices[1]);
  2491. } else {
  2492. operand = ast_slice_expr(f, operand, open, close, interval, indices[0], indices[1]);
  2493. }
  2494. } else {
  2495. operand = ast_index_expr(f, operand, indices[0], open, close);
  2496. }
  2497. f->allow_range = prev_allow_range;
  2498. } break;
  2499. case Token_Pointer: // Deference
  2500. operand = ast_deref_expr(f, operand, expect_token(f, Token_Pointer));
  2501. break;
  2502. case Token_or_return:
  2503. operand = ast_or_return_expr(f, operand, expect_token(f, Token_or_return));
  2504. break;
  2505. case Token_OpenBrace:
  2506. if (!lhs && is_literal_type(operand) && f->expr_level >= 0) {
  2507. operand = parse_literal_value(f, operand);
  2508. } else {
  2509. loop = false;
  2510. }
  2511. break;
  2512. case Token_Increment:
  2513. case Token_Decrement:
  2514. if (!lhs) {
  2515. Token token = advance_token(f);
  2516. syntax_error(token, "Postfix '%.*s' operator is not supported", LIT(token.string));
  2517. } else {
  2518. loop = false;
  2519. }
  2520. break;
  2521. default:
  2522. loop = false;
  2523. break;
  2524. }
  2525. lhs = false; // NOTE(bill): 'tis not lhs anymore
  2526. }
  2527. return operand;
  2528. }
  2529. Ast *parse_unary_expr(AstFile *f, bool lhs) {
  2530. switch (f->curr_token.kind) {
  2531. case Token_transmute:
  2532. case Token_cast: {
  2533. Token token = advance_token(f);
  2534. expect_token(f, Token_OpenParen);
  2535. Ast *type = parse_type(f);
  2536. expect_token(f, Token_CloseParen);
  2537. Ast *expr = parse_unary_expr(f, lhs);
  2538. return ast_type_cast(f, token, type, expr);
  2539. }
  2540. case Token_auto_cast: {
  2541. Token token = advance_token(f);
  2542. Ast *expr = parse_unary_expr(f, lhs);
  2543. return ast_auto_cast(f, token, expr);
  2544. }
  2545. case Token_Add:
  2546. case Token_Sub:
  2547. case Token_Xor:
  2548. case Token_And:
  2549. case Token_Not: {
  2550. Token token = advance_token(f);
  2551. Ast *expr = parse_unary_expr(f, lhs);
  2552. return ast_unary_expr(f, token, expr);
  2553. }
  2554. case Token_Increment:
  2555. case Token_Decrement: {
  2556. Token token = advance_token(f);
  2557. syntax_error(token, "Unary '%.*s' operator is not supported", LIT(token.string));
  2558. Ast *expr = parse_unary_expr(f, lhs);
  2559. return ast_unary_expr(f, token, expr);
  2560. }
  2561. case Token_Period: {
  2562. Token token = expect_token(f, Token_Period);
  2563. Ast *ident = parse_ident(f);
  2564. return ast_implicit_selector_expr(f, token, ident);
  2565. }
  2566. }
  2567. return parse_atom_expr(f, parse_operand(f, lhs), lhs);
  2568. }
  2569. bool is_ast_range(Ast *expr) {
  2570. if (expr == nullptr) {
  2571. return false;
  2572. }
  2573. if (expr->kind != Ast_BinaryExpr) {
  2574. return false;
  2575. }
  2576. return is_token_range(expr->BinaryExpr.op.kind);
  2577. }
  2578. // NOTE(bill): result == priority
  2579. i32 token_precedence(AstFile *f, TokenKind t) {
  2580. switch (t) {
  2581. case Token_Question:
  2582. case Token_if:
  2583. case Token_when:
  2584. case Token_or_else:
  2585. return 1;
  2586. case Token_Ellipsis:
  2587. case Token_RangeFull:
  2588. case Token_RangeHalf:
  2589. if (!f->allow_range) {
  2590. return 0;
  2591. }
  2592. return 2;
  2593. case Token_CmpOr:
  2594. return 3;
  2595. case Token_CmpAnd:
  2596. return 4;
  2597. case Token_CmpEq:
  2598. case Token_NotEq:
  2599. case Token_Lt:
  2600. case Token_Gt:
  2601. case Token_LtEq:
  2602. case Token_GtEq:
  2603. return 5;
  2604. case Token_in:
  2605. case Token_not_in:
  2606. if (f->expr_level < 0 && !f->allow_in_expr) {
  2607. return 0;
  2608. }
  2609. /*fallthrough*/
  2610. case Token_Add:
  2611. case Token_Sub:
  2612. case Token_Or:
  2613. case Token_Xor:
  2614. return 6;
  2615. case Token_Mul:
  2616. case Token_Quo:
  2617. case Token_Mod:
  2618. case Token_ModMod:
  2619. case Token_And:
  2620. case Token_AndNot:
  2621. case Token_Shl:
  2622. case Token_Shr:
  2623. return 7;
  2624. }
  2625. return 0;
  2626. }
  2627. Ast *parse_binary_expr(AstFile *f, bool lhs, i32 prec_in) {
  2628. Ast *expr = parse_unary_expr(f, lhs);
  2629. for (;;) {
  2630. Token op = f->curr_token;
  2631. i32 op_prec = token_precedence(f, op.kind);
  2632. if (op_prec < prec_in) {
  2633. // NOTE(bill): This will also catch operators that are not valid "binary" operators
  2634. break;
  2635. }
  2636. Token prev = f->prev_token;
  2637. switch (op.kind) {
  2638. case Token_if:
  2639. case Token_when:
  2640. if (prev.pos.line < op.pos.line) {
  2641. // NOTE(bill): Check to see if the `if` or `when` is on the same line of the `lhs` condition
  2642. goto loop_end;
  2643. }
  2644. break;
  2645. }
  2646. expect_operator(f); // NOTE(bill): error checks too
  2647. if (op.kind == Token_Question) {
  2648. Ast *cond = expr;
  2649. // Token_Question
  2650. Ast *x = parse_expr(f, lhs);
  2651. Token token_c = expect_token(f, Token_Colon);
  2652. Ast *y = parse_expr(f, lhs);
  2653. expr = ast_ternary_if_expr(f, x, cond, y);
  2654. } else if (op.kind == Token_if || op.kind == Token_when) {
  2655. Ast *x = expr;
  2656. Ast *cond = parse_expr(f, lhs);
  2657. Token tok_else = expect_token(f, Token_else);
  2658. Ast *y = parse_expr(f, lhs);
  2659. switch (op.kind) {
  2660. case Token_if:
  2661. expr = ast_ternary_if_expr(f, x, cond, y);
  2662. break;
  2663. case Token_when:
  2664. expr = ast_ternary_when_expr(f, x, cond, y);
  2665. break;
  2666. }
  2667. } else {
  2668. Ast *right = parse_binary_expr(f, false, op_prec+1);
  2669. if (right == nullptr) {
  2670. syntax_error(op, "Expected expression on the right-hand side of the binary operator '%.*s'", LIT(op.string));
  2671. }
  2672. if (op.kind == Token_or_else) {
  2673. // NOTE(bill): easier to handle its logic different with its own AST kind
  2674. expr = ast_or_else_expr(f, expr, op, right);
  2675. } else {
  2676. expr = ast_binary_expr(f, op, expr, right);
  2677. }
  2678. }
  2679. lhs = false;
  2680. }
  2681. loop_end:;
  2682. return expr;
  2683. }
  2684. Ast *parse_expr(AstFile *f, bool lhs) {
  2685. return parse_binary_expr(f, lhs, 0+1);
  2686. }
  2687. Array<Ast *> parse_expr_list(AstFile *f, bool lhs) {
  2688. bool allow_newline = f->allow_newline;
  2689. f->allow_newline = true;
  2690. auto list = array_make<Ast *>(heap_allocator());
  2691. for (;;) {
  2692. Ast *e = parse_expr(f, lhs);
  2693. array_add(&list, e);
  2694. if (f->curr_token.kind != Token_Comma ||
  2695. f->curr_token.kind == Token_EOF) {
  2696. break;
  2697. }
  2698. advance_token(f);
  2699. }
  2700. f->allow_newline = allow_newline;
  2701. return list;
  2702. }
  2703. Array<Ast *> parse_lhs_expr_list(AstFile *f) {
  2704. return parse_expr_list(f, true);
  2705. }
  2706. Array<Ast *> parse_rhs_expr_list(AstFile *f) {
  2707. return parse_expr_list(f, false);
  2708. }
  2709. Array<Ast *> parse_ident_list(AstFile *f, bool allow_poly_names) {
  2710. auto list = array_make<Ast *>(heap_allocator());
  2711. for (;;) {
  2712. array_add(&list, parse_ident(f, allow_poly_names));
  2713. if (f->curr_token.kind != Token_Comma ||
  2714. f->curr_token.kind == Token_EOF) {
  2715. break;
  2716. }
  2717. advance_token(f);
  2718. }
  2719. return list;
  2720. }
  2721. Ast *parse_type(AstFile *f) {
  2722. Ast *type = parse_type_or_ident(f);
  2723. if (type == nullptr) {
  2724. Token token = advance_token(f);
  2725. syntax_error(token, "Expected a type");
  2726. return ast_bad_expr(f, token, f->curr_token);
  2727. }
  2728. return type;
  2729. }
  2730. void parse_foreign_block_decl(AstFile *f, Array<Ast *> *decls) {
  2731. Ast *decl = parse_stmt(f);
  2732. switch (decl->kind) {
  2733. case Ast_EmptyStmt:
  2734. case Ast_BadStmt:
  2735. case Ast_BadDecl:
  2736. return;
  2737. case Ast_WhenStmt:
  2738. case Ast_ValueDecl:
  2739. array_add(decls, decl);
  2740. return;
  2741. default:
  2742. syntax_error(decl, "Foreign blocks only allow procedure and variable declarations");
  2743. return;
  2744. }
  2745. }
  2746. Ast *parse_foreign_block(AstFile *f, Token token) {
  2747. CommentGroup *docs = f->lead_comment;
  2748. Ast *foreign_library = nullptr;
  2749. if (f->curr_token.kind == Token_OpenBrace) {
  2750. foreign_library = ast_ident(f, blank_token);
  2751. } else {
  2752. foreign_library = parse_ident(f);
  2753. }
  2754. Token open = {};
  2755. Token close = {};
  2756. auto decls = array_make<Ast *>(heap_allocator());
  2757. bool prev_in_foreign_block = f->in_foreign_block;
  2758. defer (f->in_foreign_block = prev_in_foreign_block);
  2759. f->in_foreign_block = true;
  2760. skip_possible_newline_for_literal(f);
  2761. open = expect_token(f, Token_OpenBrace);
  2762. while (f->curr_token.kind != Token_CloseBrace &&
  2763. f->curr_token.kind != Token_EOF) {
  2764. parse_foreign_block_decl(f, &decls);
  2765. }
  2766. close = expect_token(f, Token_CloseBrace);
  2767. Ast *body = ast_block_stmt(f, decls, open, close);
  2768. Ast *decl = ast_foreign_block_decl(f, token, foreign_library, body, docs);
  2769. expect_semicolon(f);
  2770. return decl;
  2771. }
  2772. Ast *parse_value_decl(AstFile *f, Array<Ast *> names, CommentGroup *docs) {
  2773. bool is_mutable = true;
  2774. Array<Ast *> values = {};
  2775. Ast *type = parse_type_or_ident(f);
  2776. if (f->curr_token.kind == Token_Eq ||
  2777. f->curr_token.kind == Token_Colon) {
  2778. Token sep = {};
  2779. if (!is_mutable) {
  2780. sep = expect_token_after(f, Token_Colon, "type");
  2781. } else {
  2782. sep = advance_token(f);
  2783. is_mutable = sep.kind != Token_Colon;
  2784. }
  2785. values = parse_rhs_expr_list(f);
  2786. if (values.count > names.count) {
  2787. syntax_error(f->curr_token, "Too many values on the right hand side of the declaration");
  2788. } else if (values.count < names.count && !is_mutable) {
  2789. syntax_error(f->curr_token, "All constant declarations must be defined");
  2790. } else if (values.count == 0) {
  2791. syntax_error(f->curr_token, "Expected an expression for this declaration");
  2792. }
  2793. }
  2794. if (is_mutable) {
  2795. if (type == nullptr && values.count == 0) {
  2796. syntax_error(f->curr_token, "Missing variable type or initialization");
  2797. return ast_bad_decl(f, f->curr_token, f->curr_token);
  2798. }
  2799. } else {
  2800. if (type == nullptr && values.count == 0 && names.count > 0) {
  2801. syntax_error(f->curr_token, "Missing constant value");
  2802. return ast_bad_decl(f, f->curr_token, f->curr_token);
  2803. }
  2804. }
  2805. if (values.data == nullptr) {
  2806. values.allocator = heap_allocator();
  2807. }
  2808. if (f->expr_level >= 0) {
  2809. if (f->curr_token.kind == Token_CloseBrace &&
  2810. f->curr_token.pos.line == f->prev_token.pos.line) {
  2811. } else {
  2812. expect_semicolon(f);
  2813. }
  2814. }
  2815. if (f->curr_proc == nullptr) {
  2816. if (values.count > 0 && names.count != values.count) {
  2817. syntax_error(
  2818. values[0],
  2819. "Expected %td expressions on the right hand side, got %td\n"
  2820. "\tNote: Global declarations do not allow for multi-valued expressions",
  2821. names.count, values.count
  2822. );
  2823. }
  2824. }
  2825. return ast_value_decl(f, names, type, values, is_mutable, docs, f->line_comment);
  2826. }
  2827. Ast *parse_simple_stmt(AstFile *f, u32 flags) {
  2828. Token token = f->curr_token;
  2829. CommentGroup *docs = f->lead_comment;
  2830. Array<Ast *> lhs = parse_lhs_expr_list(f);
  2831. token = f->curr_token;
  2832. switch (token.kind) {
  2833. case Token_Eq:
  2834. case Token_AddEq:
  2835. case Token_SubEq:
  2836. case Token_MulEq:
  2837. case Token_QuoEq:
  2838. case Token_ModEq:
  2839. case Token_ModModEq:
  2840. case Token_AndEq:
  2841. case Token_OrEq:
  2842. case Token_XorEq:
  2843. case Token_ShlEq:
  2844. case Token_ShrEq:
  2845. case Token_AndNotEq:
  2846. case Token_CmpAndEq:
  2847. case Token_CmpOrEq:
  2848. {
  2849. if (f->curr_proc == nullptr) {
  2850. syntax_error(f->curr_token, "You cannot use a simple statement in the file scope");
  2851. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  2852. }
  2853. advance_token(f);
  2854. Array<Ast *> rhs = parse_rhs_expr_list(f);
  2855. if (rhs.count == 0) {
  2856. syntax_error(token, "No right-hand side in assignment statement.");
  2857. return ast_bad_stmt(f, token, f->curr_token);
  2858. }
  2859. return ast_assign_stmt(f, token, lhs, rhs);
  2860. } break;
  2861. case Token_in:
  2862. if (flags&StmtAllowFlag_In) {
  2863. allow_token(f, Token_in);
  2864. bool prev_allow_range = f->allow_range;
  2865. f->allow_range = true;
  2866. Ast *expr = parse_expr(f, true);
  2867. f->allow_range = prev_allow_range;
  2868. auto rhs = array_make<Ast *>(heap_allocator(), 0, 1);
  2869. array_add(&rhs, expr);
  2870. return ast_assign_stmt(f, token, lhs, rhs);
  2871. }
  2872. break;
  2873. case Token_Colon:
  2874. expect_token_after(f, Token_Colon, "identifier list");
  2875. if ((flags&StmtAllowFlag_Label) && lhs.count == 1) {
  2876. switch (f->curr_token.kind) {
  2877. case Token_OpenBrace: // block statement
  2878. case Token_if:
  2879. case Token_for:
  2880. case Token_switch: {
  2881. Ast *name = lhs[0];
  2882. Ast *label = ast_label_decl(f, ast_token(name), name);
  2883. Ast *stmt = parse_stmt(f);
  2884. #define _SET_LABEL(Kind_, label_) case GB_JOIN2(Ast_, Kind_): (stmt->Kind_).label = label_; break
  2885. switch (stmt->kind) {
  2886. _SET_LABEL(BlockStmt, label);
  2887. _SET_LABEL(IfStmt, label);
  2888. _SET_LABEL(ForStmt, label);
  2889. _SET_LABEL(RangeStmt, label);
  2890. _SET_LABEL(SwitchStmt, label);
  2891. _SET_LABEL(TypeSwitchStmt, label);
  2892. default:
  2893. syntax_error(token, "Labels can only be applied to a loop or switch statement");
  2894. break;
  2895. }
  2896. #undef _SET_LABEL
  2897. return stmt;
  2898. } break;
  2899. }
  2900. }
  2901. return parse_value_decl(f, lhs, docs);
  2902. }
  2903. if (lhs.count > 1) {
  2904. syntax_error(token, "Expected 1 expression");
  2905. return ast_bad_stmt(f, token, f->curr_token);
  2906. }
  2907. switch (token.kind) {
  2908. case Token_Increment:
  2909. case Token_Decrement:
  2910. advance_token(f);
  2911. syntax_error(token, "Postfix '%.*s' statement is not supported", LIT(token.string));
  2912. break;
  2913. }
  2914. #if 0
  2915. switch (token.kind) {
  2916. case Token_Inc:
  2917. case Token_Dec:
  2918. advance_token(f);
  2919. return ast_inc_dec_stmt(f, token, lhs[0]);
  2920. }
  2921. #endif
  2922. return ast_expr_stmt(f, lhs[0]);
  2923. }
  2924. Ast *parse_block_stmt(AstFile *f, b32 is_when) {
  2925. skip_possible_newline_for_literal(f);
  2926. if (!is_when && f->curr_proc == nullptr) {
  2927. syntax_error(f->curr_token, "You cannot use a block statement in the file scope");
  2928. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  2929. }
  2930. return parse_body(f);
  2931. }
  2932. Ast *parse_results(AstFile *f, bool *diverging) {
  2933. if (!allow_token(f, Token_ArrowRight)) {
  2934. return nullptr;
  2935. }
  2936. if (allow_token(f, Token_Not)) {
  2937. if (diverging) *diverging = true;
  2938. return nullptr;
  2939. }
  2940. isize prev_level = f->expr_level;
  2941. defer (f->expr_level = prev_level);
  2942. // f->expr_level = -1;
  2943. if (f->curr_token.kind != Token_OpenParen) {
  2944. Token begin_token = f->curr_token;
  2945. Array<Ast *> empty_names = {};
  2946. auto list = array_make<Ast *>(heap_allocator(), 0, 1);
  2947. Ast *type = parse_type(f);
  2948. Token tag = {};
  2949. array_add(&list, ast_field(f, empty_names, type, nullptr, 0, tag, nullptr, nullptr));
  2950. return ast_field_list(f, begin_token, list);
  2951. }
  2952. Ast *list = nullptr;
  2953. expect_token(f, Token_OpenParen);
  2954. list = parse_field_list(f, nullptr, FieldFlag_Results, Token_CloseParen, true, false);
  2955. expect_token_after(f, Token_CloseParen, "parameter list");
  2956. return list;
  2957. }
  2958. ProcCallingConvention string_to_calling_convention(String s) {
  2959. if (s == "odin") return ProcCC_Odin;
  2960. if (s == "contextless") return ProcCC_Contextless;
  2961. if (s == "cdecl") return ProcCC_CDecl;
  2962. if (s == "c") return ProcCC_CDecl;
  2963. if (s == "stdcall") return ProcCC_StdCall;
  2964. if (s == "std") return ProcCC_StdCall;
  2965. if (s == "fastcall") return ProcCC_FastCall;
  2966. if (s == "fast") return ProcCC_FastCall;
  2967. if (s == "none") return ProcCC_None;
  2968. if (s == "naked") return ProcCC_Naked;
  2969. if (s == "win64") return ProcCC_Win64;
  2970. if (s == "sysv") return ProcCC_SysV;
  2971. if (s == "system") {
  2972. if (build_context.metrics.os == TargetOs_windows) {
  2973. return ProcCC_StdCall;
  2974. }
  2975. return ProcCC_CDecl;
  2976. }
  2977. return ProcCC_Invalid;
  2978. }
  2979. Ast *parse_proc_type(AstFile *f, Token proc_token) {
  2980. Ast *params = nullptr;
  2981. Ast *results = nullptr;
  2982. bool diverging = false;
  2983. ProcCallingConvention cc = ProcCC_Invalid;
  2984. if (f->curr_token.kind == Token_String) {
  2985. Token token = expect_token(f, Token_String);
  2986. auto c = string_to_calling_convention(string_value_from_token(f, token));
  2987. if (c == ProcCC_Invalid) {
  2988. syntax_error(token, "Unknown procedure calling convention: '%.*s'", LIT(token.string));
  2989. } else {
  2990. cc = c;
  2991. }
  2992. }
  2993. if (cc == ProcCC_Invalid) {
  2994. if (f->in_foreign_block) {
  2995. cc = ProcCC_ForeignBlockDefault;
  2996. } else {
  2997. cc = default_calling_convention();
  2998. }
  2999. }
  3000. expect_token(f, Token_OpenParen);
  3001. params = parse_field_list(f, nullptr, FieldFlag_Signature, Token_CloseParen, true, true);
  3002. expect_token_after(f, Token_CloseParen, "parameter list");
  3003. results = parse_results(f, &diverging);
  3004. u64 tags = 0;
  3005. bool is_generic = false;
  3006. for_array(i, params->FieldList.list) {
  3007. Ast *param = params->FieldList.list[i];
  3008. ast_node(field, Field, param);
  3009. if (field->type != nullptr) {
  3010. if (field->type->kind == Ast_PolyType) {
  3011. is_generic = true;
  3012. goto end;
  3013. }
  3014. for_array(j, field->names) {
  3015. Ast *name = field->names[j];
  3016. if (name->kind == Ast_PolyType) {
  3017. is_generic = true;
  3018. goto end;
  3019. }
  3020. }
  3021. }
  3022. }
  3023. end:
  3024. return ast_proc_type(f, proc_token, params, results, tags, cc, is_generic, diverging);
  3025. }
  3026. Ast *parse_var_type(AstFile *f, bool allow_ellipsis, bool allow_typeid_token) {
  3027. if (allow_ellipsis && f->curr_token.kind == Token_Ellipsis) {
  3028. Token tok = advance_token(f);
  3029. Ast *type = parse_type_or_ident(f);
  3030. if (type == nullptr) {
  3031. syntax_error(tok, "variadic field missing type after '..'");
  3032. type = ast_bad_expr(f, tok, f->curr_token);
  3033. }
  3034. return ast_ellipsis(f, tok, type);
  3035. }
  3036. Ast *type = nullptr;
  3037. if (allow_typeid_token &&
  3038. f->curr_token.kind == Token_typeid) {
  3039. Token token = expect_token(f, Token_typeid);
  3040. Ast *specialization = nullptr;
  3041. if (allow_token(f, Token_Quo)) {
  3042. specialization = parse_type(f);
  3043. }
  3044. type = ast_typeid_type(f, token, specialization);
  3045. } else {
  3046. type = parse_type(f);
  3047. }
  3048. return type;
  3049. }
  3050. enum FieldPrefixKind : i32 {
  3051. FieldPrefix_Unknown = -1,
  3052. FieldPrefix_Invalid = 0,
  3053. FieldPrefix_using, // implies #subtype
  3054. FieldPrefix_const,
  3055. FieldPrefix_no_alias,
  3056. FieldPrefix_c_vararg,
  3057. FieldPrefix_auto_cast,
  3058. FieldPrefix_any_int,
  3059. FieldPrefix_subtype, // does not imply `using` semantics
  3060. };
  3061. struct ParseFieldPrefixMapping {
  3062. String name;
  3063. TokenKind token_kind;
  3064. FieldPrefixKind prefix;
  3065. FieldFlag flag;
  3066. };
  3067. gb_global ParseFieldPrefixMapping parse_field_prefix_mappings[] = {
  3068. {str_lit("using"), Token_using, FieldPrefix_using, FieldFlag_using},
  3069. {str_lit("auto_cast"), Token_auto_cast, FieldPrefix_auto_cast, FieldFlag_auto_cast},
  3070. {str_lit("no_alias"), Token_Hash, FieldPrefix_no_alias, FieldFlag_no_alias},
  3071. {str_lit("c_vararg"), Token_Hash, FieldPrefix_c_vararg, FieldFlag_c_vararg},
  3072. {str_lit("const"), Token_Hash, FieldPrefix_const, FieldFlag_const},
  3073. {str_lit("any_int"), Token_Hash, FieldPrefix_any_int, FieldFlag_any_int},
  3074. {str_lit("subtype"), Token_Hash, FieldPrefix_subtype, FieldFlag_subtype},
  3075. };
  3076. FieldPrefixKind is_token_field_prefix(AstFile *f) {
  3077. switch (f->curr_token.kind) {
  3078. case Token_EOF:
  3079. return FieldPrefix_Invalid;
  3080. case Token_using:
  3081. return FieldPrefix_using;
  3082. case Token_auto_cast:
  3083. return FieldPrefix_auto_cast;
  3084. case Token_Hash:
  3085. advance_token(f);
  3086. switch (f->curr_token.kind) {
  3087. case Token_Ident:
  3088. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3089. auto const &mapping = parse_field_prefix_mappings[i];
  3090. if (mapping.token_kind == Token_Hash) {
  3091. if (f->curr_token.string == mapping.name) {
  3092. return mapping.prefix;
  3093. }
  3094. }
  3095. }
  3096. break;
  3097. }
  3098. return FieldPrefix_Unknown;
  3099. }
  3100. return FieldPrefix_Invalid;
  3101. }
  3102. u32 parse_field_prefixes(AstFile *f) {
  3103. i32 counts[gb_count_of(parse_field_prefix_mappings)] = {};
  3104. for (;;) {
  3105. FieldPrefixKind kind = is_token_field_prefix(f);
  3106. if (kind == FieldPrefix_Invalid) {
  3107. break;
  3108. }
  3109. if (kind == FieldPrefix_Unknown) {
  3110. syntax_error(f->curr_token, "Unknown prefix kind '#%.*s'", LIT(f->curr_token.string));
  3111. advance_token(f);
  3112. continue;
  3113. }
  3114. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3115. if (parse_field_prefix_mappings[i].prefix == kind) {
  3116. counts[i] += 1;
  3117. advance_token(f);
  3118. break;
  3119. }
  3120. }
  3121. }
  3122. u32 field_flags = 0;
  3123. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3124. if (counts[i] > 0) {
  3125. field_flags |= parse_field_prefix_mappings[i].flag;
  3126. if (counts[i] != 1) {
  3127. auto const &mapping = parse_field_prefix_mappings[i];
  3128. String name = mapping.name;
  3129. char const *prefix = "";
  3130. if (mapping.token_kind == Token_Hash) {
  3131. prefix = "#";
  3132. }
  3133. syntax_error(f->curr_token, "Multiple '%s%.*s' in this field list", prefix, LIT(name));
  3134. }
  3135. }
  3136. }
  3137. return field_flags;
  3138. }
  3139. u32 check_field_prefixes(AstFile *f, isize name_count, u32 allowed_flags, u32 set_flags) {
  3140. for (i32 i = 0; i < gb_count_of(parse_field_prefix_mappings); i++) {
  3141. bool err = false;
  3142. auto const &m = parse_field_prefix_mappings[i];
  3143. if ((set_flags & m.flag) != 0) {
  3144. if (m.flag == FieldFlag_using && name_count > 1) {
  3145. err = true;
  3146. syntax_error(f->curr_token, "Cannot apply 'using' to more than one of the same type");
  3147. }
  3148. if ((allowed_flags & m.flag) == 0) {
  3149. err = true;
  3150. char const *prefix = "";
  3151. if (m.token_kind == Token_Hash) {
  3152. prefix = "#";
  3153. }
  3154. syntax_error(f->curr_token, "'%s%.*s' in not allowed within this field list", prefix, LIT(m.name));
  3155. }
  3156. }
  3157. if (err) {
  3158. set_flags &= ~m.flag;
  3159. }
  3160. }
  3161. return set_flags;
  3162. }
  3163. struct AstAndFlags {
  3164. Ast *node;
  3165. u32 flags;
  3166. };
  3167. Array<Ast *> convert_to_ident_list(AstFile *f, Array<AstAndFlags> list, bool ignore_flags, bool allow_poly_names) {
  3168. auto idents = array_make<Ast *>(heap_allocator(), 0, list.count);
  3169. // Convert to ident list
  3170. for_array(i, list) {
  3171. Ast *ident = list[i].node;
  3172. if (!ignore_flags) {
  3173. if (i != 0) {
  3174. syntax_error(ident, "Illegal use of prefixes in parameter list");
  3175. }
  3176. }
  3177. switch (ident->kind) {
  3178. case Ast_Ident:
  3179. case Ast_BadExpr:
  3180. break;
  3181. case Ast_PolyType:
  3182. if (allow_poly_names) {
  3183. if (ident->PolyType.specialization == nullptr) {
  3184. break;
  3185. } else {
  3186. syntax_error(ident, "Expected a polymorphic identifier without any specialization");
  3187. }
  3188. } else {
  3189. syntax_error(ident, "Expected a non-polymorphic identifier");
  3190. }
  3191. /*fallthrough*/
  3192. default:
  3193. syntax_error(ident, "Expected an identifier");
  3194. ident = ast_ident(f, blank_token);
  3195. break;
  3196. }
  3197. array_add(&idents, ident);
  3198. }
  3199. return idents;
  3200. }
  3201. bool parse_expect_field_separator(AstFile *f, Ast *param) {
  3202. Token token = f->curr_token;
  3203. if (allow_token(f, Token_Comma)) {
  3204. return true;
  3205. }
  3206. if (token.kind == Token_Semicolon) {
  3207. String p = token_to_string(token);
  3208. syntax_error(token_end_of_line(f, f->prev_token), "Expected a comma, got a %.*s", LIT(p));
  3209. advance_token(f);
  3210. return true;
  3211. }
  3212. return false;
  3213. }
  3214. Ast *parse_struct_field_list(AstFile *f, isize *name_count_) {
  3215. Token start_token = f->curr_token;
  3216. auto decls = array_make<Ast *>(heap_allocator());
  3217. isize total_name_count = 0;
  3218. Ast *params = parse_field_list(f, &total_name_count, FieldFlag_Struct, Token_CloseBrace, false, false);
  3219. if (name_count_) *name_count_ = total_name_count;
  3220. return params;
  3221. }
  3222. // Returns true if any are polymorphic names
  3223. bool check_procedure_name_list(Array<Ast *> const &names) {
  3224. if (names.count == 0) {
  3225. return false;
  3226. }
  3227. bool first_is_polymorphic = names[0]->kind == Ast_PolyType;
  3228. bool any_polymorphic_names = first_is_polymorphic;
  3229. for (isize i = 1; i < names.count; i++) {
  3230. Ast *name = names[i];
  3231. if (first_is_polymorphic) {
  3232. if (name->kind == Ast_PolyType) {
  3233. any_polymorphic_names = true;
  3234. } else {
  3235. syntax_error(name, "Mixture of polymorphic and non-polymorphic identifiers");
  3236. return any_polymorphic_names;
  3237. }
  3238. } else {
  3239. if (name->kind == Ast_PolyType) {
  3240. any_polymorphic_names = true;
  3241. syntax_error(name, "Mixture of polymorphic and non-polymorphic identifiers");
  3242. return any_polymorphic_names;
  3243. } else {
  3244. // Okay
  3245. }
  3246. }
  3247. }
  3248. return any_polymorphic_names;
  3249. }
  3250. Ast *parse_field_list(AstFile *f, isize *name_count_, u32 allowed_flags, TokenKind follow, bool allow_default_parameters, bool allow_typeid_token) {
  3251. Token start_token = f->curr_token;
  3252. CommentGroup *docs = f->lead_comment;
  3253. auto params = array_make<Ast *>(heap_allocator());
  3254. auto list = array_make<AstAndFlags>(heap_allocator());
  3255. defer (array_free(&list));
  3256. bool allow_poly_names = allow_typeid_token;
  3257. isize total_name_count = 0;
  3258. bool allow_ellipsis = allowed_flags&FieldFlag_ellipsis;
  3259. bool seen_ellipsis = false;
  3260. bool is_signature = (allowed_flags & FieldFlag_Signature) == FieldFlag_Signature;
  3261. while (f->curr_token.kind != follow &&
  3262. f->curr_token.kind != Token_Colon &&
  3263. f->curr_token.kind != Token_EOF) {
  3264. u32 flags = parse_field_prefixes(f);
  3265. Ast *param = parse_var_type(f, allow_ellipsis, allow_typeid_token);
  3266. if (param->kind == Ast_Ellipsis) {
  3267. if (seen_ellipsis) syntax_error(param, "Extra variadic parameter after ellipsis");
  3268. seen_ellipsis = true;
  3269. } else if (seen_ellipsis) {
  3270. syntax_error(param, "Extra parameter after ellipsis");
  3271. }
  3272. AstAndFlags naf = {param, flags};
  3273. array_add(&list, naf);
  3274. if (!allow_token(f, Token_Comma)) {
  3275. break;
  3276. }
  3277. }
  3278. if (f->curr_token.kind == Token_Colon) {
  3279. Array<Ast *> names = convert_to_ident_list(f, list, true, allow_poly_names); // Copy for semantic reasons
  3280. if (names.count == 0) {
  3281. syntax_error(f->curr_token, "Empty field declaration");
  3282. }
  3283. bool any_polymorphic_names = check_procedure_name_list(names);
  3284. u32 set_flags = 0;
  3285. if (list.count > 0) {
  3286. set_flags = list[0].flags;
  3287. }
  3288. set_flags = check_field_prefixes(f, names.count, allowed_flags, set_flags);
  3289. total_name_count += names.count;
  3290. Ast *type = nullptr;
  3291. Ast *default_value = nullptr;
  3292. Token tag = {};
  3293. expect_token_after(f, Token_Colon, "field list");
  3294. if (f->curr_token.kind != Token_Eq) {
  3295. type = parse_var_type(f, allow_ellipsis, allow_typeid_token);
  3296. Ast *tt = unparen_expr(type);
  3297. if (tt == nullptr) {
  3298. syntax_error(f->prev_token, "Invalid type expression in field list");
  3299. } else if (is_signature && !any_polymorphic_names && tt->kind == Ast_TypeidType && tt->TypeidType.specialization != nullptr) {
  3300. syntax_error(type, "Specialization of typeid is not allowed without polymorphic names");
  3301. }
  3302. }
  3303. if (allow_token(f, Token_Eq)) {
  3304. default_value = parse_expr(f, false);
  3305. if (!allow_default_parameters) {
  3306. syntax_error(f->curr_token, "Default parameters are only allowed for procedures");
  3307. default_value = nullptr;
  3308. }
  3309. }
  3310. if (default_value != nullptr && names.count > 1) {
  3311. syntax_error(f->curr_token, "Default parameters can only be applied to single values");
  3312. }
  3313. if (allowed_flags == FieldFlag_Struct && default_value != nullptr) {
  3314. syntax_error(default_value, "Default parameters are not allowed for structs");
  3315. default_value = nullptr;
  3316. }
  3317. if (type != nullptr && type->kind == Ast_Ellipsis) {
  3318. if (seen_ellipsis) syntax_error(type, "Extra variadic parameter after ellipsis");
  3319. seen_ellipsis = true;
  3320. if (names.count != 1) {
  3321. syntax_error(type, "Variadic parameters can only have one field name");
  3322. }
  3323. } else if (seen_ellipsis && default_value == nullptr) {
  3324. syntax_error(f->curr_token, "Extra parameter after ellipsis without a default value");
  3325. }
  3326. if (type != nullptr && default_value == nullptr) {
  3327. if (f->curr_token.kind == Token_String) {
  3328. tag = expect_token(f, Token_String);
  3329. if ((allowed_flags & FieldFlag_Tags) == 0) {
  3330. syntax_error(tag, "Field tags are only allowed within structures");
  3331. }
  3332. }
  3333. }
  3334. parse_expect_field_separator(f, type);
  3335. Ast *param = ast_field(f, names, type, default_value, set_flags, tag, docs, f->line_comment);
  3336. array_add(&params, param);
  3337. while (f->curr_token.kind != follow &&
  3338. f->curr_token.kind != Token_EOF) {
  3339. CommentGroup *docs = f->lead_comment;
  3340. u32 set_flags = parse_field_prefixes(f);
  3341. Token tag = {};
  3342. Array<Ast *> names = parse_ident_list(f, allow_poly_names);
  3343. if (names.count == 0) {
  3344. syntax_error(f->curr_token, "Empty field declaration");
  3345. break;
  3346. }
  3347. bool any_polymorphic_names = check_procedure_name_list(names);
  3348. set_flags = check_field_prefixes(f, names.count, allowed_flags, set_flags);
  3349. total_name_count += names.count;
  3350. Ast *type = nullptr;
  3351. Ast *default_value = nullptr;
  3352. expect_token_after(f, Token_Colon, "field list");
  3353. if (f->curr_token.kind != Token_Eq) {
  3354. type = parse_var_type(f, allow_ellipsis, allow_typeid_token);
  3355. Ast *tt = unparen_expr(type);
  3356. if (is_signature && !any_polymorphic_names && tt->kind == Ast_TypeidType && tt->TypeidType.specialization != nullptr) {
  3357. syntax_error(type, "Specialization of typeid is not allowed without polymorphic names");
  3358. }
  3359. }
  3360. if (allow_token(f, Token_Eq)) {
  3361. default_value = parse_expr(f, false);
  3362. if (!allow_default_parameters) {
  3363. syntax_error(f->curr_token, "Default parameters are only allowed for procedures");
  3364. default_value = nullptr;
  3365. }
  3366. }
  3367. if (default_value != nullptr && names.count > 1) {
  3368. syntax_error(f->curr_token, "Default parameters can only be applied to single values");
  3369. }
  3370. if (type != nullptr && type->kind == Ast_Ellipsis) {
  3371. if (seen_ellipsis) syntax_error(type, "Extra variadic parameter after ellipsis");
  3372. seen_ellipsis = true;
  3373. if (names.count != 1) {
  3374. syntax_error(type, "Variadic parameters can only have one field name");
  3375. }
  3376. } else if (seen_ellipsis && default_value == nullptr) {
  3377. syntax_error(f->curr_token, "Extra parameter after ellipsis without a default value");
  3378. }
  3379. if (type != nullptr && default_value == nullptr) {
  3380. if (f->curr_token.kind == Token_String) {
  3381. tag = expect_token(f, Token_String);
  3382. if ((allowed_flags & FieldFlag_Tags) == 0) {
  3383. syntax_error(tag, "Field tags are only allowed within structures");
  3384. }
  3385. }
  3386. }
  3387. bool ok = parse_expect_field_separator(f, param);
  3388. Ast *param = ast_field(f, names, type, default_value, set_flags, tag, docs, f->line_comment);
  3389. array_add(&params, param);
  3390. if (!ok) {
  3391. break;
  3392. }
  3393. }
  3394. if (name_count_) *name_count_ = total_name_count;
  3395. return ast_field_list(f, start_token, params);
  3396. }
  3397. for_array(i, list) {
  3398. Ast *type = list[i].node;
  3399. Token token = blank_token;
  3400. if (allowed_flags&FieldFlag_Results) {
  3401. // NOTE(bill): Make this nothing and not `_`
  3402. token.string = str_lit("");
  3403. }
  3404. auto names = array_make<Ast *>(heap_allocator(), 1);
  3405. token.pos = ast_token(type).pos;
  3406. names[0] = ast_ident(f, token);
  3407. u32 flags = check_field_prefixes(f, list.count, allowed_flags, list[i].flags);
  3408. Token tag = {};
  3409. Ast *param = ast_field(f, names, list[i].node, nullptr, flags, tag, docs, f->line_comment);
  3410. array_add(&params, param);
  3411. }
  3412. if (name_count_) *name_count_ = total_name_count;
  3413. return ast_field_list(f, start_token, params);
  3414. }
  3415. Ast *parse_type_or_ident(AstFile *f) {
  3416. bool prev_allow_type = f->allow_type;
  3417. isize prev_expr_level = f->expr_level;
  3418. defer ({
  3419. f->allow_type = prev_allow_type;
  3420. f->expr_level = prev_expr_level;
  3421. });
  3422. f->allow_type = true;
  3423. f->expr_level = -1;
  3424. bool lhs = true;
  3425. Ast *operand = parse_operand(f, lhs);
  3426. Ast *type = parse_atom_expr(f, operand, lhs);
  3427. return type;
  3428. }
  3429. Ast *parse_body(AstFile *f) {
  3430. Array<Ast *> stmts = {};
  3431. Token open, close;
  3432. isize prev_expr_level = f->expr_level;
  3433. // NOTE(bill): The body may be within an expression so reset to zero
  3434. f->expr_level = 0;
  3435. open = expect_token(f, Token_OpenBrace);
  3436. stmts = parse_stmt_list(f);
  3437. close = expect_token(f, Token_CloseBrace);
  3438. f->expr_level = prev_expr_level;
  3439. return ast_block_stmt(f, stmts, open, close);
  3440. }
  3441. bool parse_control_statement_semicolon_separator(AstFile *f) {
  3442. Token tok = peek_token(f);
  3443. if (tok.kind != Token_OpenBrace) {
  3444. return allow_token(f, Token_Semicolon);
  3445. }
  3446. if (f->curr_token.string == ";") {
  3447. return allow_token(f, Token_Semicolon);
  3448. }
  3449. return false;
  3450. }
  3451. Ast *parse_if_stmt(AstFile *f) {
  3452. if (f->curr_proc == nullptr) {
  3453. syntax_error(f->curr_token, "You cannot use an if statement in the file scope");
  3454. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3455. }
  3456. Token token = expect_token(f, Token_if);
  3457. Ast *init = nullptr;
  3458. Ast *cond = nullptr;
  3459. Ast *body = nullptr;
  3460. Ast *else_stmt = nullptr;
  3461. isize prev_level = f->expr_level;
  3462. f->expr_level = -1;
  3463. bool prev_allow_in_expr = f->allow_in_expr;
  3464. f->allow_in_expr = true;
  3465. if (allow_token(f, Token_Semicolon)) {
  3466. cond = parse_expr(f, false);
  3467. } else {
  3468. init = parse_simple_stmt(f, StmtAllowFlag_None);
  3469. if (parse_control_statement_semicolon_separator(f)) {
  3470. cond = parse_expr(f, false);
  3471. } else {
  3472. cond = convert_stmt_to_expr(f, init, str_lit("boolean expression"));
  3473. init = nullptr;
  3474. }
  3475. }
  3476. f->expr_level = prev_level;
  3477. f->allow_in_expr = prev_allow_in_expr;
  3478. if (cond == nullptr) {
  3479. syntax_error(f->curr_token, "Expected condition for if statement");
  3480. }
  3481. if (allow_token(f, Token_do)) {
  3482. body = convert_stmt_to_body(f, parse_stmt(f));
  3483. if (build_context.disallow_do) {
  3484. syntax_error(body, "'do' has been disallowed");
  3485. } else if (!ast_on_same_line(cond, body)) {
  3486. syntax_error(body, "The body of a 'do' be on the same line as if condition");
  3487. }
  3488. } else {
  3489. body = parse_block_stmt(f, false);
  3490. }
  3491. skip_possible_newline_for_literal(f);
  3492. if (f->curr_token.kind == Token_else) {
  3493. Token else_token = expect_token(f, Token_else);
  3494. switch (f->curr_token.kind) {
  3495. case Token_if:
  3496. else_stmt = parse_if_stmt(f);
  3497. break;
  3498. case Token_OpenBrace:
  3499. else_stmt = parse_block_stmt(f, false);
  3500. break;
  3501. case Token_do: {
  3502. expect_token(f, Token_do);
  3503. else_stmt = convert_stmt_to_body(f, parse_stmt(f));
  3504. if (build_context.disallow_do) {
  3505. syntax_error(else_stmt, "'do' has been disallowed");
  3506. } else if (!ast_on_same_line(else_token, else_stmt)) {
  3507. syntax_error(else_stmt, "The body of a 'do' be on the same line as 'else'");
  3508. }
  3509. } break;
  3510. default:
  3511. syntax_error(f->curr_token, "Expected if statement block statement");
  3512. else_stmt = ast_bad_stmt(f, f->curr_token, f->tokens[f->curr_token_index+1]);
  3513. break;
  3514. }
  3515. }
  3516. return ast_if_stmt(f, token, init, cond, body, else_stmt);
  3517. }
  3518. Ast *parse_when_stmt(AstFile *f) {
  3519. Token token = expect_token(f, Token_when);
  3520. Ast *cond = nullptr;
  3521. Ast *body = nullptr;
  3522. Ast *else_stmt = nullptr;
  3523. isize prev_level = f->expr_level;
  3524. f->expr_level = -1;
  3525. cond = parse_expr(f, false);
  3526. f->expr_level = prev_level;
  3527. if (cond == nullptr) {
  3528. syntax_error(f->curr_token, "Expected condition for when statement");
  3529. }
  3530. if (allow_token(f, Token_do)) {
  3531. body = convert_stmt_to_body(f, parse_stmt(f));
  3532. if (build_context.disallow_do) {
  3533. syntax_error(body, "'do' has been disallowed");
  3534. } else if (!ast_on_same_line(cond, body)) {
  3535. syntax_error(body, "The body of a 'do' be on the same line as when statement");
  3536. }
  3537. } else {
  3538. body = parse_block_stmt(f, true);
  3539. }
  3540. skip_possible_newline_for_literal(f);
  3541. if (f->curr_token.kind == Token_else) {
  3542. Token else_token = expect_token(f, Token_else);
  3543. switch (f->curr_token.kind) {
  3544. case Token_when:
  3545. else_stmt = parse_when_stmt(f);
  3546. break;
  3547. case Token_OpenBrace:
  3548. else_stmt = parse_block_stmt(f, true);
  3549. break;
  3550. case Token_do: {
  3551. expect_token(f, Token_do);
  3552. else_stmt = convert_stmt_to_body(f, parse_stmt(f));
  3553. if (build_context.disallow_do) {
  3554. syntax_error(else_stmt, "'do' has been disallowed");
  3555. } else if (!ast_on_same_line(else_token, else_stmt)) {
  3556. syntax_error(else_stmt, "The body of a 'do' be on the same line as 'else'");
  3557. }
  3558. } break;
  3559. default:
  3560. syntax_error(f->curr_token, "Expected when statement block statement");
  3561. else_stmt = ast_bad_stmt(f, f->curr_token, f->tokens[f->curr_token_index+1]);
  3562. break;
  3563. }
  3564. }
  3565. return ast_when_stmt(f, token, cond, body, else_stmt);
  3566. }
  3567. Ast *parse_return_stmt(AstFile *f) {
  3568. Token token = expect_token(f, Token_return);
  3569. if (f->curr_proc == nullptr) {
  3570. syntax_error(f->curr_token, "You cannot use a return statement in the file scope");
  3571. return ast_bad_stmt(f, token, f->curr_token);
  3572. }
  3573. if (f->expr_level > 0) {
  3574. syntax_error(f->curr_token, "You cannot use a return statement within an expression");
  3575. return ast_bad_stmt(f, token, f->curr_token);
  3576. }
  3577. auto results = array_make<Ast *>(heap_allocator());
  3578. while (f->curr_token.kind != Token_Semicolon && f->curr_token.kind != Token_CloseBrace) {
  3579. Ast *arg = parse_expr(f, false);
  3580. array_add(&results, arg);
  3581. if (f->curr_token.kind != Token_Comma ||
  3582. f->curr_token.kind == Token_EOF) {
  3583. break;
  3584. }
  3585. advance_token(f);
  3586. }
  3587. expect_semicolon(f);
  3588. return ast_return_stmt(f, token, results);
  3589. }
  3590. Ast *parse_for_stmt(AstFile *f) {
  3591. if (f->curr_proc == nullptr) {
  3592. syntax_error(f->curr_token, "You cannot use a for statement in the file scope");
  3593. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3594. }
  3595. Token token = expect_token(f, Token_for);
  3596. Ast *init = nullptr;
  3597. Ast *cond = nullptr;
  3598. Ast *post = nullptr;
  3599. Ast *body = nullptr;
  3600. bool is_range = false;
  3601. if (f->curr_token.kind != Token_OpenBrace &&
  3602. f->curr_token.kind != Token_do) {
  3603. isize prev_level = f->expr_level;
  3604. defer (f->expr_level = prev_level);
  3605. f->expr_level = -1;
  3606. if (f->curr_token.kind == Token_in) {
  3607. Token in_token = expect_token(f, Token_in);
  3608. Ast *rhs = nullptr;
  3609. bool prev_allow_range = f->allow_range;
  3610. f->allow_range = true;
  3611. rhs = parse_expr(f, false);
  3612. f->allow_range = prev_allow_range;
  3613. if (allow_token(f, Token_do)) {
  3614. body = convert_stmt_to_body(f, parse_stmt(f));
  3615. if (build_context.disallow_do) {
  3616. syntax_error(body, "'do' has been disallowed");
  3617. } else if (!ast_on_same_line(token, body)) {
  3618. syntax_error(body, "The body of a 'do' be on the same line as the 'for' token");
  3619. }
  3620. } else {
  3621. body = parse_block_stmt(f, false);
  3622. }
  3623. return ast_range_stmt(f, token, {}, in_token, rhs, body);
  3624. }
  3625. if (f->curr_token.kind != Token_Semicolon) {
  3626. cond = parse_simple_stmt(f, StmtAllowFlag_In);
  3627. if (cond->kind == Ast_AssignStmt && cond->AssignStmt.op.kind == Token_in) {
  3628. is_range = true;
  3629. }
  3630. }
  3631. if (!is_range && parse_control_statement_semicolon_separator(f)) {
  3632. init = cond;
  3633. cond = nullptr;
  3634. if (f->curr_token.kind == Token_OpenBrace || f->curr_token.kind == Token_do) {
  3635. syntax_error(f->curr_token, "Expected ';', followed by a condition expression and post statement, got %.*s", LIT(token_strings[f->curr_token.kind]));
  3636. } else {
  3637. if (f->curr_token.kind != Token_Semicolon) {
  3638. cond = parse_simple_stmt(f, StmtAllowFlag_None);
  3639. }
  3640. if (f->curr_token.string != ";") {
  3641. syntax_error(f->curr_token, "Expected ';', got %.*s", LIT(token_to_string(f->curr_token)));
  3642. } else {
  3643. expect_token(f, Token_Semicolon);
  3644. }
  3645. if (f->curr_token.kind != Token_OpenBrace &&
  3646. f->curr_token.kind != Token_do) {
  3647. post = parse_simple_stmt(f, StmtAllowFlag_None);
  3648. }
  3649. }
  3650. }
  3651. }
  3652. if (allow_token(f, Token_do)) {
  3653. body = convert_stmt_to_body(f, parse_stmt(f));
  3654. if (build_context.disallow_do) {
  3655. syntax_error(body, "'do' has been disallowed");
  3656. } else if (!ast_on_same_line(token, body)) {
  3657. syntax_error(body, "The body of a 'do' be on the same line as the 'for' token");
  3658. }
  3659. } else {
  3660. body = parse_block_stmt(f, false);
  3661. }
  3662. if (is_range) {
  3663. GB_ASSERT(cond->kind == Ast_AssignStmt);
  3664. Token in_token = cond->AssignStmt.op;
  3665. Slice<Ast *> vals = cond->AssignStmt.lhs;
  3666. Ast *rhs = nullptr;
  3667. if (cond->AssignStmt.rhs.count > 0) {
  3668. rhs = cond->AssignStmt.rhs[0];
  3669. }
  3670. return ast_range_stmt(f, token, vals, in_token, rhs, body);
  3671. }
  3672. cond = convert_stmt_to_expr(f, cond, str_lit("boolean expression"));
  3673. return ast_for_stmt(f, token, init, cond, post, body);
  3674. }
  3675. Ast *parse_case_clause(AstFile *f, bool is_type) {
  3676. Token token = f->curr_token;
  3677. Array<Ast *> list = {};
  3678. expect_token(f, Token_case);
  3679. bool prev_allow_range = f->allow_range;
  3680. bool prev_allow_in_expr = f->allow_in_expr;
  3681. f->allow_range = !is_type;
  3682. f->allow_in_expr = !is_type;
  3683. if (f->curr_token.kind != Token_Colon) {
  3684. list = parse_rhs_expr_list(f);
  3685. }
  3686. f->allow_range = prev_allow_range;
  3687. f->allow_in_expr = prev_allow_in_expr;
  3688. expect_token(f, Token_Colon);
  3689. Array<Ast *> stmts = parse_stmt_list(f);
  3690. return ast_case_clause(f, token, list, stmts);
  3691. }
  3692. Ast *parse_switch_stmt(AstFile *f) {
  3693. if (f->curr_proc == nullptr) {
  3694. syntax_error(f->curr_token, "You cannot use a switch statement in the file scope");
  3695. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3696. }
  3697. Token token = expect_token(f, Token_switch);
  3698. Ast *init = nullptr;
  3699. Ast *tag = nullptr;
  3700. Ast *body = nullptr;
  3701. Token open, close;
  3702. bool is_type_switch = false;
  3703. auto list = array_make<Ast *>(heap_allocator());
  3704. if (f->curr_token.kind != Token_OpenBrace) {
  3705. isize prev_level = f->expr_level;
  3706. f->expr_level = -1;
  3707. defer (f->expr_level = prev_level);
  3708. if (allow_token(f, Token_in)) {
  3709. auto lhs = array_make<Ast *>(heap_allocator(), 0, 1);
  3710. auto rhs = array_make<Ast *>(heap_allocator(), 0, 1);
  3711. Token blank_ident = token;
  3712. blank_ident.kind = Token_Ident;
  3713. blank_ident.string = str_lit("_");
  3714. Ast *blank = ast_ident(f, blank_ident);
  3715. array_add(&lhs, blank);
  3716. array_add(&rhs, parse_expr(f, true));
  3717. tag = ast_assign_stmt(f, token, lhs, rhs);
  3718. is_type_switch = true;
  3719. } else {
  3720. tag = parse_simple_stmt(f, StmtAllowFlag_In);
  3721. if (tag->kind == Ast_AssignStmt && tag->AssignStmt.op.kind == Token_in) {
  3722. is_type_switch = true;
  3723. } else if (parse_control_statement_semicolon_separator(f)) {
  3724. init = tag;
  3725. tag = nullptr;
  3726. if (f->curr_token.kind != Token_OpenBrace) {
  3727. tag = parse_simple_stmt(f, StmtAllowFlag_None);
  3728. }
  3729. }
  3730. }
  3731. }
  3732. skip_possible_newline(f);
  3733. open = expect_token(f, Token_OpenBrace);
  3734. while (f->curr_token.kind == Token_case) {
  3735. array_add(&list, parse_case_clause(f, is_type_switch));
  3736. }
  3737. close = expect_token(f, Token_CloseBrace);
  3738. body = ast_block_stmt(f, list, open, close);
  3739. if (is_type_switch) {
  3740. return ast_type_switch_stmt(f, token, tag, body);
  3741. }
  3742. tag = convert_stmt_to_expr(f, tag, str_lit("switch expression"));
  3743. return ast_switch_stmt(f, token, init, tag, body);
  3744. }
  3745. Ast *parse_defer_stmt(AstFile *f) {
  3746. if (f->curr_proc == nullptr) {
  3747. syntax_error(f->curr_token, "You cannot use a defer statement in the file scope");
  3748. return ast_bad_stmt(f, f->curr_token, f->curr_token);
  3749. }
  3750. Token token = expect_token(f, Token_defer);
  3751. Ast *stmt = parse_stmt(f);
  3752. switch (stmt->kind) {
  3753. case Ast_EmptyStmt:
  3754. syntax_error(token, "Empty statement after defer (e.g. ';')");
  3755. break;
  3756. case Ast_DeferStmt:
  3757. syntax_error(token, "You cannot defer a defer statement");
  3758. stmt = stmt->DeferStmt.stmt;
  3759. break;
  3760. case Ast_ReturnStmt:
  3761. syntax_error(token, "You cannot defer a return statement");
  3762. break;
  3763. }
  3764. return ast_defer_stmt(f, token, stmt);
  3765. }
  3766. enum ImportDeclKind {
  3767. ImportDecl_Standard,
  3768. ImportDecl_Using,
  3769. };
  3770. Ast *parse_import_decl(AstFile *f, ImportDeclKind kind) {
  3771. CommentGroup *docs = f->lead_comment;
  3772. Token token = expect_token(f, Token_import);
  3773. Token import_name = {};
  3774. bool is_using = kind != ImportDecl_Standard;
  3775. switch (f->curr_token.kind) {
  3776. case Token_Ident:
  3777. import_name = advance_token(f);
  3778. break;
  3779. default:
  3780. import_name.pos = f->curr_token.pos;
  3781. break;
  3782. }
  3783. if (!is_using && is_blank_ident(import_name)) {
  3784. syntax_error(import_name, "Illegal import name: '_'");
  3785. }
  3786. Token file_path = expect_token_after(f, Token_String, "import");
  3787. Ast *s = nullptr;
  3788. if (f->curr_proc != nullptr) {
  3789. syntax_error(import_name, "You cannot use 'import' within a procedure. This must be done at the file scope");
  3790. s = ast_bad_decl(f, import_name, file_path);
  3791. } else {
  3792. s = ast_import_decl(f, token, is_using, file_path, import_name, docs, f->line_comment);
  3793. array_add(&f->imports, s);
  3794. }
  3795. if (is_using) {
  3796. syntax_error(import_name, "'using import' is not allowed, please use the import name explicitly");
  3797. }
  3798. expect_semicolon(f);
  3799. return s;
  3800. }
  3801. Ast *parse_foreign_decl(AstFile *f) {
  3802. CommentGroup *docs = f->lead_comment;
  3803. Token token = expect_token(f, Token_foreign);
  3804. switch (f->curr_token.kind) {
  3805. case Token_Ident:
  3806. case Token_OpenBrace:
  3807. return parse_foreign_block(f, token);
  3808. case Token_import: {
  3809. Token import_token = expect_token(f, Token_import);
  3810. Token lib_name = {};
  3811. switch (f->curr_token.kind) {
  3812. case Token_Ident:
  3813. lib_name = advance_token(f);
  3814. break;
  3815. default:
  3816. lib_name.pos = token.pos;
  3817. break;
  3818. }
  3819. if (is_blank_ident(lib_name)) {
  3820. syntax_error(lib_name, "Illegal foreign import name: '_'");
  3821. }
  3822. Array<Token> filepaths = {};
  3823. if (allow_token(f, Token_OpenBrace)) {
  3824. array_init(&filepaths, heap_allocator());
  3825. while (f->curr_token.kind != Token_CloseBrace &&
  3826. f->curr_token.kind != Token_EOF) {
  3827. Token path = expect_token(f, Token_String);
  3828. array_add(&filepaths, path);
  3829. if (!allow_token(f, Token_Comma)) {
  3830. break;
  3831. }
  3832. }
  3833. expect_token(f, Token_CloseBrace);
  3834. } else {
  3835. filepaths = array_make<Token>(heap_allocator(), 0, 1);
  3836. Token path = expect_token(f, Token_String);
  3837. array_add(&filepaths, path);
  3838. }
  3839. Ast *s = nullptr;
  3840. if (filepaths.count == 0) {
  3841. syntax_error(lib_name, "foreign import without any paths");
  3842. s = ast_bad_decl(f, lib_name, f->curr_token);
  3843. } else if (f->curr_proc != nullptr) {
  3844. syntax_error(lib_name, "You cannot use foreign import within a procedure. This must be done at the file scope");
  3845. s = ast_bad_decl(f, lib_name, filepaths[0]);
  3846. } else {
  3847. s = ast_foreign_import_decl(f, token, filepaths, lib_name, docs, f->line_comment);
  3848. }
  3849. expect_semicolon(f);
  3850. return s;
  3851. }
  3852. }
  3853. syntax_error(token, "Invalid foreign declaration");
  3854. return ast_bad_decl(f, token, f->curr_token);
  3855. }
  3856. Ast *parse_attribute(AstFile *f, Token token, TokenKind open_kind, TokenKind close_kind) {
  3857. Array<Ast *> elems = {};
  3858. Token open = {};
  3859. Token close = {};
  3860. if (f->curr_token.kind == Token_Ident) {
  3861. elems = array_make<Ast *>(heap_allocator(), 0, 1);
  3862. Ast *elem = parse_ident(f);
  3863. array_add(&elems, elem);
  3864. } else {
  3865. open = expect_token(f, open_kind);
  3866. f->expr_level++;
  3867. if (f->curr_token.kind != close_kind) {
  3868. elems = array_make<Ast *>(heap_allocator());
  3869. while (f->curr_token.kind != close_kind &&
  3870. f->curr_token.kind != Token_EOF) {
  3871. Ast *elem = nullptr;
  3872. elem = parse_ident(f);
  3873. if (f->curr_token.kind == Token_Eq) {
  3874. Token eq = expect_token(f, Token_Eq);
  3875. Ast *value = parse_value(f);
  3876. elem = ast_field_value(f, elem, value, eq);
  3877. }
  3878. array_add(&elems, elem);
  3879. if (!allow_token(f, Token_Comma)) {
  3880. break;
  3881. }
  3882. }
  3883. }
  3884. f->expr_level--;
  3885. close = expect_closing(f, close_kind, str_lit("attribute"));
  3886. }
  3887. Ast *attribute = ast_attribute(f, token, open, close, elems);
  3888. skip_possible_newline(f);
  3889. Ast *decl = parse_stmt(f);
  3890. if (decl->kind == Ast_ValueDecl) {
  3891. array_add(&decl->ValueDecl.attributes, attribute);
  3892. } else if (decl->kind == Ast_ForeignBlockDecl) {
  3893. array_add(&decl->ForeignBlockDecl.attributes, attribute);
  3894. } else if (decl->kind == Ast_ForeignImportDecl) {
  3895. array_add(&decl->ForeignImportDecl.attributes, attribute);
  3896. }else {
  3897. syntax_error(decl, "Expected a value or foreign declaration after an attribute, got %.*s", LIT(ast_strings[decl->kind]));
  3898. return ast_bad_stmt(f, token, f->curr_token);
  3899. }
  3900. return decl;
  3901. }
  3902. Ast *parse_unrolled_for_loop(AstFile *f, Token unroll_token) {
  3903. Token for_token = expect_token(f, Token_for);
  3904. Ast *val0 = nullptr;
  3905. Ast *val1 = nullptr;
  3906. Token in_token = {};
  3907. Ast *expr = nullptr;
  3908. Ast *body = nullptr;
  3909. bool bad_stmt = false;
  3910. if (f->curr_token.kind != Token_in) {
  3911. Array<Ast *> idents = parse_ident_list(f, false);
  3912. switch (idents.count) {
  3913. case 1:
  3914. val0 = idents[0];
  3915. break;
  3916. case 2:
  3917. val0 = idents[0];
  3918. val1 = idents[1];
  3919. break;
  3920. default:
  3921. syntax_error(for_token, "Expected either 1 or 2 identifiers");
  3922. bad_stmt = true;
  3923. break;
  3924. }
  3925. }
  3926. in_token = expect_token(f, Token_in);
  3927. bool prev_allow_range = f->allow_range;
  3928. isize prev_level = f->expr_level;
  3929. f->allow_range = true;
  3930. f->expr_level = -1;
  3931. expr = parse_expr(f, false);
  3932. f->expr_level = prev_level;
  3933. f->allow_range = prev_allow_range;
  3934. if (allow_token(f, Token_do)) {
  3935. body = convert_stmt_to_body(f, parse_stmt(f));
  3936. if (build_context.disallow_do) {
  3937. syntax_error(body, "'do' has been disallowed");
  3938. } else if (!ast_on_same_line(for_token, body)) {
  3939. syntax_error(body, "The body of a 'do' be on the same line as the 'for' token");
  3940. }
  3941. } else {
  3942. body = parse_block_stmt(f, false);
  3943. }
  3944. if (bad_stmt) {
  3945. return ast_bad_stmt(f, unroll_token, f->curr_token);
  3946. }
  3947. return ast_unroll_range_stmt(f, unroll_token, for_token, val0, val1, in_token, expr, body);
  3948. }
  3949. Ast *parse_stmt(AstFile *f) {
  3950. Ast *s = nullptr;
  3951. Token token = f->curr_token;
  3952. switch (token.kind) {
  3953. // Operands
  3954. case Token_context: // Also allows for `context =`
  3955. case Token_proc:
  3956. case Token_Ident:
  3957. case Token_Integer:
  3958. case Token_Float:
  3959. case Token_Imag:
  3960. case Token_Rune:
  3961. case Token_String:
  3962. case Token_OpenParen:
  3963. case Token_Pointer:
  3964. case Token_asm: // Inline assembly
  3965. // Unary Operators
  3966. case Token_Add:
  3967. case Token_Sub:
  3968. case Token_Xor:
  3969. case Token_Not:
  3970. case Token_And:
  3971. s = parse_simple_stmt(f, StmtAllowFlag_Label);
  3972. expect_semicolon(f);
  3973. return s;
  3974. case Token_foreign:
  3975. return parse_foreign_decl(f);
  3976. case Token_import:
  3977. return parse_import_decl(f, ImportDecl_Standard);
  3978. case Token_if: return parse_if_stmt(f);
  3979. case Token_when: return parse_when_stmt(f);
  3980. case Token_for: return parse_for_stmt(f);
  3981. case Token_switch: return parse_switch_stmt(f);
  3982. case Token_defer: return parse_defer_stmt(f);
  3983. case Token_return: return parse_return_stmt(f);
  3984. case Token_break:
  3985. case Token_continue:
  3986. case Token_fallthrough: {
  3987. Token token = advance_token(f);
  3988. Ast *label = nullptr;
  3989. if (token.kind != Token_fallthrough &&
  3990. f->curr_token.kind == Token_Ident) {
  3991. label = parse_ident(f);
  3992. }
  3993. s = ast_branch_stmt(f, token, label);
  3994. expect_semicolon(f);
  3995. return s;
  3996. }
  3997. case Token_using: {
  3998. CommentGroup *docs = f->lead_comment;
  3999. Token token = expect_token(f, Token_using);
  4000. if (f->curr_token.kind == Token_import) {
  4001. return parse_import_decl(f, ImportDecl_Using);
  4002. }
  4003. Ast *decl = nullptr;
  4004. Array<Ast *> list = parse_lhs_expr_list(f);
  4005. if (list.count == 0) {
  4006. syntax_error(token, "Illegal use of 'using' statement");
  4007. expect_semicolon(f);
  4008. return ast_bad_stmt(f, token, f->curr_token);
  4009. }
  4010. if (f->curr_token.kind != Token_Colon) {
  4011. expect_semicolon(f);
  4012. return ast_using_stmt(f, token, list);
  4013. }
  4014. expect_token_after(f, Token_Colon, "identifier list");
  4015. decl = parse_value_decl(f, list, docs);
  4016. if (decl != nullptr && decl->kind == Ast_ValueDecl) {
  4017. decl->ValueDecl.is_using = true;
  4018. return decl;
  4019. }
  4020. syntax_error(token, "Illegal use of 'using' statement");
  4021. return ast_bad_stmt(f, token, f->curr_token);
  4022. } break;
  4023. case Token_At: {
  4024. Token token = expect_token(f, Token_At);
  4025. return parse_attribute(f, token, Token_OpenParen, Token_CloseParen);
  4026. }
  4027. case Token_Hash: {
  4028. Ast *s = nullptr;
  4029. Token hash_token = expect_token(f, Token_Hash);
  4030. Token name = expect_token(f, Token_Ident);
  4031. String tag = name.string;
  4032. if (tag == "bounds_check") {
  4033. s = parse_stmt(f);
  4034. return parse_check_directive_for_statement(s, name, StateFlag_bounds_check);
  4035. } else if (tag == "no_bounds_check") {
  4036. s = parse_stmt(f);
  4037. return parse_check_directive_for_statement(s, name, StateFlag_no_bounds_check);
  4038. } else if (tag == "type_assert") {
  4039. s = parse_stmt(f);
  4040. return parse_check_directive_for_statement(s, name, StateFlag_type_assert);
  4041. } else if (tag == "no_type_assert") {
  4042. s = parse_stmt(f);
  4043. return parse_check_directive_for_statement(s, name, StateFlag_no_type_assert);
  4044. } else if (tag == "partial") {
  4045. s = parse_stmt(f);
  4046. switch (s->kind) {
  4047. case Ast_SwitchStmt:
  4048. s->SwitchStmt.partial = true;
  4049. break;
  4050. case Ast_TypeSwitchStmt:
  4051. s->TypeSwitchStmt.partial = true;
  4052. break;
  4053. case Ast_EmptyStmt:
  4054. return parse_check_directive_for_statement(s, name, 0);
  4055. default:
  4056. syntax_error(token, "#partial can only be applied to a switch statement");
  4057. break;
  4058. }
  4059. return s;
  4060. } else if (tag == "assert" || tag == "panic") {
  4061. Ast *t = ast_basic_directive(f, hash_token, name);
  4062. Ast *stmt = ast_expr_stmt(f, parse_call_expr(f, t));
  4063. expect_semicolon(f);
  4064. return stmt;
  4065. } else if (name.string == "force_inline" ||
  4066. name.string == "force_no_inline") {
  4067. Ast *expr = parse_force_inlining_operand(f, name);
  4068. Ast *stmt = ast_expr_stmt(f, expr);
  4069. expect_semicolon(f);
  4070. return stmt;
  4071. } else if (tag == "unroll") {
  4072. return parse_unrolled_for_loop(f, name);
  4073. } else if (tag == "include") {
  4074. syntax_error(token, "#include is not a valid import declaration kind. Did you mean 'import'?");
  4075. s = ast_bad_stmt(f, token, f->curr_token);
  4076. } else {
  4077. syntax_error(token, "Unknown tag directive used: '%.*s'", LIT(tag));
  4078. s = ast_bad_stmt(f, token, f->curr_token);
  4079. }
  4080. fix_advance_to_next_stmt(f);
  4081. return s;
  4082. } break;
  4083. case Token_OpenBrace:
  4084. return parse_block_stmt(f, false);
  4085. case Token_Semicolon:
  4086. s = ast_empty_stmt(f, token);
  4087. expect_semicolon(f);
  4088. return s;
  4089. }
  4090. // Error correction statements
  4091. switch (token.kind) {
  4092. case Token_else:
  4093. expect_token(f, Token_else);
  4094. syntax_error(token, "'else' unattached to an 'if' statement");
  4095. switch (f->curr_token.kind) {
  4096. case Token_if:
  4097. return parse_if_stmt(f);
  4098. case Token_when:
  4099. return parse_when_stmt(f);
  4100. case Token_OpenBrace:
  4101. return parse_block_stmt(f, true);
  4102. case Token_do: {
  4103. expect_token(f, Token_do);
  4104. Ast *stmt = convert_stmt_to_body(f, parse_stmt(f));
  4105. if (build_context.disallow_do) {
  4106. syntax_error(stmt, "'do' has been disallowed");
  4107. }
  4108. return stmt;
  4109. } break;
  4110. default:
  4111. fix_advance_to_next_stmt(f);
  4112. return ast_bad_stmt(f, token, f->curr_token);
  4113. }
  4114. }
  4115. syntax_error(token, "Expected a statement, got '%.*s'", LIT(token_strings[token.kind]));
  4116. fix_advance_to_next_stmt(f);
  4117. return ast_bad_stmt(f, token, f->curr_token);
  4118. }
  4119. Array<Ast *> parse_stmt_list(AstFile *f) {
  4120. auto list = array_make<Ast *>(heap_allocator());
  4121. while (f->curr_token.kind != Token_case &&
  4122. f->curr_token.kind != Token_CloseBrace &&
  4123. f->curr_token.kind != Token_EOF) {
  4124. Ast *stmt = parse_stmt(f);
  4125. if (stmt && stmt->kind != Ast_EmptyStmt) {
  4126. array_add(&list, stmt);
  4127. if (stmt->kind == Ast_ExprStmt &&
  4128. stmt->ExprStmt.expr != nullptr &&
  4129. stmt->ExprStmt.expr->kind == Ast_ProcLit) {
  4130. syntax_error(stmt, "Procedure literal evaluated but not used");
  4131. }
  4132. }
  4133. }
  4134. return list;
  4135. }
  4136. ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
  4137. GB_ASSERT(f != nullptr);
  4138. f->fullpath = string_trim_whitespace(fullpath); // Just in case
  4139. set_file_path_string(f->id, fullpath);
  4140. thread_safe_set_ast_file_from_id(f->id, f);
  4141. if (!string_ends_with(f->fullpath, str_lit(".odin"))) {
  4142. return ParseFile_WrongExtension;
  4143. }
  4144. zero_item(&f->tokenizer);
  4145. f->tokenizer.curr_file_id = f->id;
  4146. TokenizerInitError err = init_tokenizer_from_fullpath(&f->tokenizer, f->fullpath, build_context.copy_file_contents);
  4147. if (err != TokenizerInit_None) {
  4148. switch (err) {
  4149. case TokenizerInit_Empty:
  4150. break;
  4151. case TokenizerInit_NotExists:
  4152. return ParseFile_NotFound;
  4153. case TokenizerInit_Permission:
  4154. return ParseFile_Permission;
  4155. case TokenizerInit_FileTooLarge:
  4156. return ParseFile_FileTooLarge;
  4157. default:
  4158. return ParseFile_InvalidFile;
  4159. }
  4160. }
  4161. isize file_size = f->tokenizer.end - f->tokenizer.start;
  4162. // NOTE(bill): Determine allocation size required for tokens
  4163. isize token_cap = file_size/3ll;
  4164. isize pow2_cap = gb_max(cast(isize)prev_pow2(cast(i64)token_cap)/2, 16);
  4165. token_cap = ((token_cap + pow2_cap-1)/pow2_cap) * pow2_cap;
  4166. isize init_token_cap = gb_max(token_cap, 16);
  4167. array_init(&f->tokens, heap_allocator(), 0, gb_max(init_token_cap, 16));
  4168. if (err == TokenizerInit_Empty) {
  4169. Token token = {Token_EOF};
  4170. token.pos.file_id = f->id;
  4171. token.pos.line = 1;
  4172. token.pos.column = 1;
  4173. array_add(&f->tokens, token);
  4174. return ParseFile_None;
  4175. }
  4176. u64 start = time_stamp_time_now();
  4177. for (;;) {
  4178. Token *token = array_add_and_get(&f->tokens);
  4179. tokenizer_get_token(&f->tokenizer, token);
  4180. if (token->kind == Token_Invalid) {
  4181. err_pos->line = token->pos.line;
  4182. err_pos->column = token->pos.column;
  4183. return ParseFile_InvalidToken;
  4184. }
  4185. if (token->kind == Token_EOF) {
  4186. break;
  4187. }
  4188. }
  4189. u64 end = time_stamp_time_now();
  4190. f->time_to_tokenize = cast(f64)(end-start)/cast(f64)time_stamp__freq();
  4191. f->prev_token_index = 0;
  4192. f->curr_token_index = 0;
  4193. f->prev_token = f->tokens[f->prev_token_index];
  4194. f->curr_token = f->tokens[f->curr_token_index];
  4195. array_init(&f->comments, heap_allocator(), 0, 0);
  4196. array_init(&f->imports, heap_allocator(), 0, 0);
  4197. f->curr_proc = nullptr;
  4198. return ParseFile_None;
  4199. }
  4200. void destroy_ast_file(AstFile *f) {
  4201. GB_ASSERT(f != nullptr);
  4202. array_free(&f->tokens);
  4203. array_free(&f->comments);
  4204. array_free(&f->imports);
  4205. }
  4206. bool init_parser(Parser *p) {
  4207. GB_ASSERT(p != nullptr);
  4208. string_set_init(&p->imported_files, heap_allocator());
  4209. array_init(&p->packages, heap_allocator());
  4210. array_init(&p->package_imports, heap_allocator());
  4211. mutex_init(&p->wait_mutex);
  4212. mutex_init(&p->import_mutex);
  4213. mutex_init(&p->file_add_mutex);
  4214. mutex_init(&p->file_decl_mutex);
  4215. mutex_init(&p->packages_mutex);
  4216. mpmc_init(&p->file_error_queue, heap_allocator(), 1024);
  4217. return true;
  4218. }
  4219. void destroy_parser(Parser *p) {
  4220. GB_ASSERT(p != nullptr);
  4221. // TODO(bill): Fix memory leak
  4222. for_array(i, p->packages) {
  4223. AstPackage *pkg = p->packages[i];
  4224. for_array(j, pkg->files) {
  4225. destroy_ast_file(pkg->files[j]);
  4226. }
  4227. array_free(&pkg->files);
  4228. array_free(&pkg->foreign_files);
  4229. }
  4230. #if 0
  4231. for_array(i, p->package_imports) {
  4232. // gb_free(heap_allocator(), p->package_imports[i].text);
  4233. }
  4234. #endif
  4235. array_free(&p->packages);
  4236. array_free(&p->package_imports);
  4237. string_set_destroy(&p->imported_files);
  4238. mutex_destroy(&p->wait_mutex);
  4239. mutex_destroy(&p->import_mutex);
  4240. mutex_destroy(&p->file_add_mutex);
  4241. mutex_destroy(&p->file_decl_mutex);
  4242. mutex_destroy(&p->packages_mutex);
  4243. mpmc_destroy(&p->file_error_queue);
  4244. }
  4245. void parser_add_package(Parser *p, AstPackage *pkg) {
  4246. mutex_lock(&p->packages_mutex);
  4247. pkg->id = p->packages.count+1;
  4248. array_add(&p->packages, pkg);
  4249. mutex_unlock(&p->packages_mutex);
  4250. }
  4251. ParseFileError process_imported_file(Parser *p, ImportedFile imported_file);
  4252. WORKER_TASK_PROC(parser_worker_proc) {
  4253. ParserWorkerData *wd = cast(ParserWorkerData *)data;
  4254. ParseFileError err = process_imported_file(wd->parser, wd->imported_file);
  4255. if (err != ParseFile_None) {
  4256. mpmc_enqueue(&wd->parser->file_error_queue, err);
  4257. }
  4258. return cast(isize)err;
  4259. }
  4260. void parser_add_file_to_process(Parser *p, AstPackage *pkg, FileInfo fi, TokenPos pos) {
  4261. // TODO(bill): Use a better allocator
  4262. ImportedFile f = {pkg, fi, pos, p->file_to_process_count++};
  4263. auto wd = gb_alloc_item(permanent_allocator(), ParserWorkerData);
  4264. wd->parser = p;
  4265. wd->imported_file = f;
  4266. global_thread_pool_add_task(parser_worker_proc, wd);
  4267. }
  4268. WORKER_TASK_PROC(foreign_file_worker_proc) {
  4269. ForeignFileWorkerData *wd = cast(ForeignFileWorkerData *)data;
  4270. Parser *p = wd->parser;
  4271. ImportedFile *imp = &wd->imported_file;
  4272. AstPackage *pkg = imp->pkg;
  4273. AstForeignFile foreign_file = {wd->foreign_kind};
  4274. String fullpath = string_trim_whitespace(imp->fi.fullpath); // Just in case
  4275. char *c_str = alloc_cstring(heap_allocator(), fullpath);
  4276. defer (gb_free(heap_allocator(), c_str));
  4277. gbFileContents fc = gb_file_read_contents(heap_allocator(), true, c_str);
  4278. foreign_file.source.text = (u8 *)fc.data;
  4279. foreign_file.source.len = fc.size;
  4280. switch (wd->foreign_kind) {
  4281. case AstForeignFile_S:
  4282. // TODO(bill): Actually do something with it
  4283. break;
  4284. }
  4285. mutex_lock(&p->file_add_mutex);
  4286. array_add(&pkg->foreign_files, foreign_file);
  4287. mutex_unlock(&p->file_add_mutex);
  4288. return 0;
  4289. }
  4290. void parser_add_foreign_file_to_process(Parser *p, AstPackage *pkg, AstForeignFileKind kind, FileInfo fi, TokenPos pos) {
  4291. // TODO(bill): Use a better allocator
  4292. ImportedFile f = {pkg, fi, pos, p->file_to_process_count++};
  4293. auto wd = gb_alloc_item(permanent_allocator(), ForeignFileWorkerData);
  4294. wd->parser = p;
  4295. wd->imported_file = f;
  4296. wd->foreign_kind = kind;
  4297. global_thread_pool_add_task(foreign_file_worker_proc, wd);
  4298. }
  4299. // NOTE(bill): Returns true if it's added
  4300. AstPackage *try_add_import_path(Parser *p, String const &path, String const &rel_path, TokenPos pos, PackageKind kind = Package_Normal) {
  4301. String const FILE_EXT = str_lit(".odin");
  4302. mutex_lock(&p->import_mutex);
  4303. defer (mutex_unlock(&p->import_mutex));
  4304. if (string_set_exists(&p->imported_files, path)) {
  4305. return nullptr;
  4306. }
  4307. string_set_add(&p->imported_files, path);
  4308. AstPackage *pkg = gb_alloc_item(permanent_allocator(), AstPackage);
  4309. pkg->kind = kind;
  4310. pkg->fullpath = path;
  4311. array_init(&pkg->files, heap_allocator());
  4312. pkg->foreign_files.allocator = heap_allocator();
  4313. // NOTE(bill): Single file initial package
  4314. if (kind == Package_Init && string_ends_with(path, FILE_EXT)) {
  4315. FileInfo fi = {};
  4316. fi.name = filename_from_path(path);
  4317. fi.fullpath = path;
  4318. fi.size = get_file_size(path);
  4319. fi.is_dir = false;
  4320. pkg->is_single_file = true;
  4321. parser_add_file_to_process(p, pkg, fi, pos);
  4322. parser_add_package(p, pkg);
  4323. return pkg;
  4324. }
  4325. Array<FileInfo> list = {};
  4326. ReadDirectoryError rd_err = read_directory(path, &list);
  4327. defer (array_free(&list));
  4328. if (list.count == 1) {
  4329. GB_ASSERT(path != list[0].fullpath);
  4330. }
  4331. switch (rd_err) {
  4332. case ReadDirectory_InvalidPath:
  4333. syntax_error(pos, "Invalid path: %.*s", LIT(rel_path));
  4334. return nullptr;
  4335. case ReadDirectory_NotExists:
  4336. syntax_error(pos, "Path does not exist: %.*s", LIT(rel_path));
  4337. return nullptr;
  4338. case ReadDirectory_Permission:
  4339. syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path));
  4340. return nullptr;
  4341. case ReadDirectory_NotDir:
  4342. syntax_error(pos, "Expected a directory for a package, got a file: %.*s", LIT(rel_path));
  4343. return nullptr;
  4344. case ReadDirectory_Empty:
  4345. syntax_error(pos, "Empty directory: %.*s", LIT(rel_path));
  4346. return nullptr;
  4347. case ReadDirectory_Unknown:
  4348. syntax_error(pos, "Unknown error whilst reading path %.*s", LIT(rel_path));
  4349. return nullptr;
  4350. }
  4351. for_array(list_index, list) {
  4352. FileInfo fi = list[list_index];
  4353. String name = fi.name;
  4354. String ext = path_extension(name);
  4355. if (ext == FILE_EXT) {
  4356. if (is_excluded_target_filename(name)) {
  4357. continue;
  4358. }
  4359. parser_add_file_to_process(p, pkg, fi, pos);
  4360. } else if (ext == ".S" || ext ==".s") {
  4361. if (is_excluded_target_filename(name)) {
  4362. continue;
  4363. }
  4364. parser_add_foreign_file_to_process(p, pkg, AstForeignFile_S, fi, pos);
  4365. }
  4366. }
  4367. parser_add_package(p, pkg);
  4368. return pkg;
  4369. }
  4370. gb_global Rune illegal_import_runes[] = {
  4371. '"', '\'', '`',
  4372. '\t', '\r', '\n', '\v', '\f',
  4373. '\\', // NOTE(bill): Disallow windows style filepaths
  4374. '!', '$', '%', '^', '&', '*', '(', ')', '=',
  4375. '[', ']', '{', '}',
  4376. ';',
  4377. ':', // NOTE(bill): Disallow windows style absolute filepaths
  4378. '#',
  4379. '|', ',', '<', '>', '?',
  4380. };
  4381. bool is_import_path_valid(String path) {
  4382. if (path.len > 0) {
  4383. u8 *start = path.text;
  4384. u8 *end = path.text + path.len;
  4385. u8 *curr = start;
  4386. while (curr < end) {
  4387. isize width = 1;
  4388. Rune r = *curr;
  4389. if (r >= 0x80) {
  4390. width = utf8_decode(curr, end-curr, &r);
  4391. if (r == GB_RUNE_INVALID && width == 1) {
  4392. return false;
  4393. }
  4394. else if (r == GB_RUNE_BOM && curr-start > 0) {
  4395. return false;
  4396. }
  4397. }
  4398. for (isize i = 0; i < gb_count_of(illegal_import_runes); i++) {
  4399. if (r == illegal_import_runes[i]) {
  4400. return false;
  4401. }
  4402. }
  4403. curr += width;
  4404. }
  4405. return true;
  4406. }
  4407. return false;
  4408. }
  4409. bool is_build_flag_path_valid(String path) {
  4410. if (path.len > 0) {
  4411. u8 *start = path.text;
  4412. u8 *end = path.text + path.len;
  4413. u8 *curr = start;
  4414. isize index = 0;
  4415. while (curr < end) {
  4416. isize width = 1;
  4417. Rune r = *curr;
  4418. if (r >= 0x80) {
  4419. width = utf8_decode(curr, end-curr, &r);
  4420. if (r == GB_RUNE_INVALID && width == 1) {
  4421. return false;
  4422. }
  4423. else if (r == GB_RUNE_BOM && curr-start > 0) {
  4424. return false;
  4425. }
  4426. }
  4427. for (isize i = 0; i < gb_count_of(illegal_import_runes); i++) {
  4428. #if defined(GB_SYSTEM_WINDOWS)
  4429. if (r == '\\') {
  4430. break;
  4431. } else if (r == ':') {
  4432. break;
  4433. }
  4434. #endif
  4435. if (r == illegal_import_runes[i]) {
  4436. return false;
  4437. }
  4438. }
  4439. curr += width;
  4440. index += 1;
  4441. }
  4442. return true;
  4443. }
  4444. return false;
  4445. }
  4446. bool is_package_name_reserved(String const &name) {
  4447. if (name == "builtin") {
  4448. return true;
  4449. } else if (name == "intrinsics") {
  4450. return true;
  4451. }
  4452. return false;
  4453. }
  4454. bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node, String base_dir, String original_string, String *path) {
  4455. GB_ASSERT(path != nullptr);
  4456. // NOTE(bill): if file_mutex == nullptr, this means that the code is used within the semantics stage
  4457. gbAllocator a = heap_allocator();
  4458. String collection_name = {};
  4459. isize colon_pos = -1;
  4460. for (isize j = 0; j < original_string.len; j++) {
  4461. if (original_string[j] == ':') {
  4462. colon_pos = j;
  4463. break;
  4464. }
  4465. }
  4466. bool has_windows_drive = false;
  4467. #if defined(GB_SYSTEM_WINDOWS)
  4468. if (file_mutex == nullptr) {
  4469. if (colon_pos == 1 && original_string.len > 2) {
  4470. if (original_string[2] == '/' || original_string[2] == '\\') {
  4471. colon_pos = -1;
  4472. has_windows_drive = true;
  4473. }
  4474. }
  4475. }
  4476. #endif
  4477. String file_str = {};
  4478. if (colon_pos == 0) {
  4479. syntax_error(node, "Expected a collection name");
  4480. return false;
  4481. }
  4482. if (original_string.len > 0 && colon_pos > 0) {
  4483. collection_name = substring(original_string, 0, colon_pos);
  4484. file_str = substring(original_string, colon_pos+1, original_string.len);
  4485. } else {
  4486. file_str = original_string;
  4487. }
  4488. if (has_windows_drive) {
  4489. String sub_file_path = substring(file_str, 3, file_str.len);
  4490. if (!is_import_path_valid(sub_file_path)) {
  4491. syntax_error(node, "Invalid import path: '%.*s'", LIT(file_str));
  4492. return false;
  4493. }
  4494. } else if (!is_import_path_valid(file_str)) {
  4495. syntax_error(node, "Invalid import path: '%.*s'", LIT(file_str));
  4496. return false;
  4497. }
  4498. if (collection_name.len > 0) {
  4499. if (collection_name == "system") {
  4500. if (node->kind != Ast_ForeignImportDecl) {
  4501. syntax_error(node, "The library collection 'system' is restrict for 'foreign_library'");
  4502. return false;
  4503. } else {
  4504. *path = file_str;
  4505. return true;
  4506. }
  4507. } else if (!find_library_collection_path(collection_name, &base_dir)) {
  4508. // NOTE(bill): It's a naughty name
  4509. syntax_error(node, "Unknown library collection: '%.*s'", LIT(collection_name));
  4510. return false;
  4511. }
  4512. } else {
  4513. #if !defined(GB_SYSTEM_WINDOWS)
  4514. // @NOTE(vassvik): foreign imports of shared libraries that are not in the system collection on
  4515. // linux/mac have to be local to the executable for consistency with shared libraries.
  4516. // Unix does not have a concept of "import library" for shared/dynamic libraries,
  4517. // so we need to pass the relative path to the linker, and add the current
  4518. // working directory of the exe to the library search paths.
  4519. // Static libraries can be linked directly with the full pathname
  4520. //
  4521. if (node->kind == Ast_ForeignImportDecl && string_ends_with(file_str, str_lit(".so"))) {
  4522. *path = file_str;
  4523. return true;
  4524. }
  4525. #endif
  4526. }
  4527. if (is_package_name_reserved(file_str)) {
  4528. *path = file_str;
  4529. if (collection_name == "core") {
  4530. return true;
  4531. } else {
  4532. syntax_error(node, "The package '%.*s' must be imported with the core library collection: 'core:%.*s'", LIT(file_str), LIT(file_str));
  4533. return false;
  4534. }
  4535. }
  4536. if (file_mutex) mutex_lock(file_mutex);
  4537. defer (if (file_mutex) mutex_unlock(file_mutex));
  4538. if (node->kind == Ast_ForeignImportDecl) {
  4539. node->ForeignImportDecl.collection_name = collection_name;
  4540. }
  4541. if (has_windows_drive) {
  4542. *path = file_str;
  4543. } else {
  4544. String fullpath = string_trim_whitespace(get_fullpath_relative(a, base_dir, file_str));
  4545. *path = fullpath;
  4546. }
  4547. return true;
  4548. }
  4549. void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls);
  4550. void parse_setup_file_when_stmt(Parser *p, AstFile *f, String base_dir, AstWhenStmt *ws) {
  4551. if (ws->body != nullptr) {
  4552. auto stmts = ws->body->BlockStmt.stmts;
  4553. parse_setup_file_decls(p, f, base_dir, stmts);
  4554. }
  4555. if (ws->else_stmt != nullptr) {
  4556. switch (ws->else_stmt->kind) {
  4557. case Ast_BlockStmt: {
  4558. auto stmts = ws->else_stmt->BlockStmt.stmts;
  4559. parse_setup_file_decls(p, f, base_dir, stmts);
  4560. } break;
  4561. case Ast_WhenStmt:
  4562. parse_setup_file_when_stmt(p, f, base_dir, &ws->else_stmt->WhenStmt);
  4563. break;
  4564. }
  4565. }
  4566. }
  4567. void parse_setup_file_decls(Parser *p, AstFile *f, String base_dir, Slice<Ast *> &decls) {
  4568. for_array(i, decls) {
  4569. Ast *node = decls[i];
  4570. if (!is_ast_decl(node) &&
  4571. node->kind != Ast_WhenStmt &&
  4572. node->kind != Ast_BadStmt &&
  4573. node->kind != Ast_EmptyStmt) {
  4574. // NOTE(bill): Sanity check
  4575. if (node->kind == Ast_ExprStmt) {
  4576. Ast *expr = node->ExprStmt.expr;
  4577. if (expr->kind == Ast_CallExpr &&
  4578. expr->CallExpr.proc->kind == Ast_BasicDirective) {
  4579. f->directive_count += 1;
  4580. continue;
  4581. }
  4582. }
  4583. syntax_error(node, "Only declarations are allowed at file scope, got %.*s", LIT(ast_strings[node->kind]));
  4584. } else if (node->kind == Ast_ImportDecl) {
  4585. ast_node(id, ImportDecl, node);
  4586. String original_string = string_trim_whitespace(string_value_from_token(f, id->relpath));
  4587. String import_path = {};
  4588. bool ok = determine_path_from_string(&p->file_decl_mutex, node, base_dir, original_string, &import_path);
  4589. if (!ok) {
  4590. decls[i] = ast_bad_decl(f, id->relpath, id->relpath);
  4591. continue;
  4592. }
  4593. import_path = string_trim_whitespace(import_path);
  4594. id->fullpath = import_path;
  4595. if (is_package_name_reserved(import_path)) {
  4596. continue;
  4597. }
  4598. try_add_import_path(p, import_path, original_string, ast_token(node).pos);
  4599. } else if (node->kind == Ast_ForeignImportDecl) {
  4600. ast_node(fl, ForeignImportDecl, node);
  4601. auto fullpaths = array_make<String>(permanent_allocator(), 0, fl->filepaths.count);
  4602. for_array(fp_idx, fl->filepaths) {
  4603. String file_str = string_trim_whitespace(string_value_from_token(f, fl->filepaths[fp_idx]));
  4604. String fullpath = file_str;
  4605. if (allow_check_foreign_filepath()) {
  4606. String foreign_path = {};
  4607. bool ok = determine_path_from_string(&p->file_decl_mutex, node, base_dir, file_str, &foreign_path);
  4608. if (!ok) {
  4609. decls[i] = ast_bad_decl(f, fl->filepaths[fp_idx], fl->filepaths[fl->filepaths.count-1]);
  4610. goto end;
  4611. }
  4612. fullpath = foreign_path;
  4613. }
  4614. array_add(&fullpaths, fullpath);
  4615. }
  4616. if (fullpaths.count == 0) {
  4617. syntax_error(decls[i], "No foreign paths found");
  4618. decls[i] = ast_bad_decl(f, fl->filepaths[0], fl->filepaths[fl->filepaths.count-1]);
  4619. goto end;
  4620. }
  4621. fl->fullpaths = slice_from_array(fullpaths);
  4622. } else if (node->kind == Ast_WhenStmt) {
  4623. ast_node(ws, WhenStmt, node);
  4624. parse_setup_file_when_stmt(p, f, base_dir, ws);
  4625. }
  4626. end:;
  4627. }
  4628. }
  4629. String build_tag_get_token(String s, String *out) {
  4630. s = string_trim_whitespace(s);
  4631. isize n = 0;
  4632. while (n < s.len) {
  4633. Rune rune = 0;
  4634. isize width = utf8_decode(&s[n], s.len-n, &rune);
  4635. if (n == 0 && rune == '!') {
  4636. } else if (!rune_is_letter(rune) && !rune_is_digit(rune)) {
  4637. isize k = gb_max(gb_max(n, width), 1);
  4638. *out = substring(s, k, s.len);
  4639. return substring(s, 0, k);
  4640. }
  4641. n += width;
  4642. }
  4643. out->len = 0;
  4644. return s;
  4645. }
  4646. bool parse_build_tag(Token token_for_pos, String s) {
  4647. String const prefix = str_lit("+build");
  4648. GB_ASSERT(string_starts_with(s, prefix));
  4649. s = string_trim_whitespace(substring(s, prefix.len, s.len));
  4650. if (s.len == 0) {
  4651. return true;
  4652. }
  4653. bool any_correct = false;
  4654. while (s.len > 0) {
  4655. bool this_kind_correct = true;
  4656. do {
  4657. String p = string_trim_whitespace(build_tag_get_token(s, &s));
  4658. if (p.len == 0) break;
  4659. if (p == ",") break;
  4660. bool is_notted = false;
  4661. if (p[0] == '!') {
  4662. is_notted = true;
  4663. p = substring(p, 1, p.len);
  4664. if (p.len == 0) {
  4665. syntax_error(token_for_pos, "Expected a build platform after '!'");
  4666. break;
  4667. }
  4668. }
  4669. if (p.len == 0) {
  4670. continue;
  4671. }
  4672. if (p == "ignore") {
  4673. this_kind_correct = false;
  4674. continue;
  4675. }
  4676. TargetOsKind os = get_target_os_from_string(p);
  4677. TargetArchKind arch = get_target_arch_from_string(p);
  4678. if (os != TargetOs_Invalid) {
  4679. GB_ASSERT(arch == TargetArch_Invalid);
  4680. if (is_notted) {
  4681. this_kind_correct = this_kind_correct && (os != build_context.metrics.os);
  4682. } else {
  4683. this_kind_correct = this_kind_correct && (os == build_context.metrics.os);
  4684. }
  4685. } else if (arch != TargetArch_Invalid) {
  4686. if (is_notted) {
  4687. this_kind_correct = this_kind_correct && (arch != build_context.metrics.arch);
  4688. } else {
  4689. this_kind_correct = this_kind_correct && (arch == build_context.metrics.arch);
  4690. }
  4691. }
  4692. if (os == TargetOs_Invalid && arch == TargetArch_Invalid) {
  4693. syntax_error(token_for_pos, "Invalid build tag platform: %.*s", LIT(p));
  4694. break;
  4695. }
  4696. } while (s.len > 0);
  4697. any_correct = any_correct || this_kind_correct;
  4698. }
  4699. return any_correct;
  4700. }
  4701. String dir_from_path(String path) {
  4702. String base_dir = path;
  4703. for (isize i = path.len-1; i >= 0; i--) {
  4704. if (base_dir[i] == '\\' ||
  4705. base_dir[i] == '/') {
  4706. break;
  4707. }
  4708. base_dir.len--;
  4709. }
  4710. return base_dir;
  4711. }
  4712. isize calc_decl_count(Ast *decl) {
  4713. isize count = 0;
  4714. switch (decl->kind) {
  4715. case Ast_BlockStmt:
  4716. for_array(i, decl->BlockStmt.stmts) {
  4717. count += calc_decl_count(decl->BlockStmt.stmts.data[i]);
  4718. }
  4719. break;
  4720. case Ast_WhenStmt:
  4721. {
  4722. isize inner_count = calc_decl_count(decl->WhenStmt.body);
  4723. if (decl->WhenStmt.else_stmt) {
  4724. inner_count = gb_max(inner_count, calc_decl_count(decl->WhenStmt.else_stmt));
  4725. }
  4726. count += inner_count;
  4727. }
  4728. break;
  4729. case Ast_ValueDecl:
  4730. count = decl->ValueDecl.names.count;
  4731. break;
  4732. case Ast_ForeignBlockDecl:
  4733. count = calc_decl_count(decl->ForeignBlockDecl.body);
  4734. break;
  4735. case Ast_ImportDecl:
  4736. case Ast_ForeignImportDecl:
  4737. count = 1;
  4738. break;
  4739. }
  4740. return count;
  4741. }
  4742. bool parse_file(Parser *p, AstFile *f) {
  4743. if (f->tokens.count == 0) {
  4744. return true;
  4745. }
  4746. if (f->tokens.count > 0 && f->tokens[0].kind == Token_EOF) {
  4747. return true;
  4748. }
  4749. u64 start = time_stamp_time_now();
  4750. String filepath = f->tokenizer.fullpath;
  4751. String base_dir = dir_from_path(filepath);
  4752. if (f->curr_token.kind == Token_Comment) {
  4753. consume_comment_groups(f, f->prev_token);
  4754. }
  4755. CommentGroup *docs = f->lead_comment;
  4756. if (f->curr_token.kind != Token_package) {
  4757. syntax_error(f->curr_token, "Expected a package declaration at the beginning of the file");
  4758. return false;
  4759. }
  4760. f->package_token = expect_token(f, Token_package);
  4761. if (f->package_token.kind != Token_package) {
  4762. return false;
  4763. }
  4764. if (docs != nullptr) {
  4765. TokenPos end = token_pos_end(docs->list[docs->list.count-1]);
  4766. if (end.line == f->package_token.pos.line || end.line+1 == f->package_token.pos.line) {
  4767. // Okay
  4768. } else {
  4769. docs = nullptr;
  4770. }
  4771. }
  4772. Token package_name = expect_token_after(f, Token_Ident, "package");
  4773. if (package_name.kind == Token_Ident) {
  4774. if (package_name.string == "_") {
  4775. syntax_error(package_name, "Invalid package name '_'");
  4776. } else if (f->pkg->kind != Package_Runtime && package_name.string == "runtime") {
  4777. syntax_error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string));
  4778. } else if (is_package_name_reserved(package_name.string)) {
  4779. syntax_error(package_name, "Use of reserved package name '%.*s'", LIT(package_name.string));
  4780. }
  4781. }
  4782. f->package_name = package_name.string;
  4783. if (!f->pkg->is_single_file && docs != nullptr && docs->list.count > 0) {
  4784. for_array(i, docs->list) {
  4785. Token tok = docs->list[i]; GB_ASSERT(tok.kind == Token_Comment);
  4786. String str = tok.string;
  4787. if (string_starts_with(str, str_lit("//"))) {
  4788. String lc = string_trim_whitespace(substring(str, 2, str.len));
  4789. if (lc.len > 0 && lc[0] == '+') {
  4790. if (string_starts_with(lc, str_lit("+build"))) {
  4791. if (!parse_build_tag(tok, lc)) {
  4792. return false;
  4793. }
  4794. } else if (string_starts_with(lc, str_lit("+private"))) {
  4795. f->flags |= AstFile_IsPrivatePkg;
  4796. String command = string_trim_starts_with(lc, str_lit("+private "));
  4797. command = string_trim_whitespace(command);
  4798. if (lc == "+private") {
  4799. f->flags |= AstFile_IsPrivatePkg;
  4800. } else if (command == "package") {
  4801. f->flags |= AstFile_IsPrivatePkg;
  4802. } else if (command == "file") {
  4803. f->flags |= AstFile_IsPrivateFile;
  4804. }
  4805. } else if (lc == "+lazy") {
  4806. if (build_context.ignore_lazy) {
  4807. // Ignore
  4808. } else if (f->flags & AstFile_IsTest) {
  4809. // Ignore
  4810. } else if (f->pkg->kind == Package_Init && build_context.command_kind == Command_doc) {
  4811. // Ignore
  4812. } else {
  4813. f->flags |= AstFile_IsLazy;
  4814. }
  4815. }
  4816. }
  4817. }
  4818. }
  4819. }
  4820. Ast *pd = ast_package_decl(f, f->package_token, package_name, docs, f->line_comment);
  4821. expect_semicolon(f);
  4822. f->pkg_decl = pd;
  4823. if (f->error_count == 0) {
  4824. auto decls = array_make<Ast *>(heap_allocator());
  4825. while (f->curr_token.kind != Token_EOF) {
  4826. Ast *stmt = parse_stmt(f);
  4827. if (stmt && stmt->kind != Ast_EmptyStmt) {
  4828. array_add(&decls, stmt);
  4829. if (stmt->kind == Ast_ExprStmt &&
  4830. stmt->ExprStmt.expr != nullptr &&
  4831. stmt->ExprStmt.expr->kind == Ast_ProcLit) {
  4832. syntax_error(stmt, "Procedure literal evaluated but not used");
  4833. }
  4834. f->total_file_decl_count += calc_decl_count(stmt);
  4835. if (stmt->kind == Ast_WhenStmt || stmt->kind == Ast_ExprStmt || stmt->kind == Ast_ImportDecl) {
  4836. f->delayed_decl_count += 1;
  4837. }
  4838. }
  4839. }
  4840. f->decls = slice_from_array(decls);
  4841. parse_setup_file_decls(p, f, base_dir, f->decls);
  4842. }
  4843. u64 end = time_stamp_time_now();
  4844. f->time_to_parse = cast(f64)(end-start)/cast(f64)time_stamp__freq();
  4845. for (int i = 0; i < AstDelayQueue_COUNT; i++) {
  4846. mpmc_init(f->delayed_decls_queues+i, heap_allocator(), f->delayed_decl_count);
  4847. }
  4848. return f->error_count == 0;
  4849. }
  4850. ParseFileError process_imported_file(Parser *p, ImportedFile imported_file) {
  4851. AstPackage *pkg = imported_file.pkg;
  4852. FileInfo fi = imported_file.fi;
  4853. TokenPos pos = imported_file.pos;
  4854. AstFile *file = gb_alloc_item(permanent_allocator(), AstFile);
  4855. file->pkg = pkg;
  4856. file->id = cast(i32)(imported_file.index+1);
  4857. TokenPos err_pos = {0};
  4858. ParseFileError err = init_ast_file(file, fi.fullpath, &err_pos);
  4859. err_pos.file_id = file->id;
  4860. file->last_error = err;
  4861. if (err != ParseFile_None) {
  4862. if (err == ParseFile_EmptyFile) {
  4863. if (fi.fullpath == p->init_fullpath) {
  4864. syntax_error(pos, "Initial file is empty - %.*s\n", LIT(p->init_fullpath));
  4865. gb_exit(1);
  4866. }
  4867. } else {
  4868. switch (err) {
  4869. case ParseFile_WrongExtension:
  4870. syntax_error(pos, "Failed to parse file: %.*s; invalid file extension: File must have the extension '.odin'", LIT(fi.name));
  4871. break;
  4872. case ParseFile_InvalidFile:
  4873. syntax_error(pos, "Failed to parse file: %.*s; invalid file or cannot be found", LIT(fi.name));
  4874. break;
  4875. case ParseFile_Permission:
  4876. syntax_error(pos, "Failed to parse file: %.*s; file permissions problem", LIT(fi.name));
  4877. break;
  4878. case ParseFile_NotFound:
  4879. syntax_error(pos, "Failed to parse file: %.*s; file cannot be found ('%.*s')", LIT(fi.name), LIT(fi.fullpath));
  4880. break;
  4881. case ParseFile_InvalidToken:
  4882. syntax_error(err_pos, "Failed to parse file: %.*s; invalid token found in file", LIT(fi.name));
  4883. break;
  4884. case ParseFile_EmptyFile:
  4885. syntax_error(pos, "Failed to parse file: %.*s; file contains no tokens", LIT(fi.name));
  4886. break;
  4887. case ParseFile_FileTooLarge:
  4888. syntax_error(pos, "Failed to parse file: %.*s; file is too large, exceeds maximum file size of 2 GiB", LIT(fi.name));
  4889. break;
  4890. }
  4891. return err;
  4892. }
  4893. }
  4894. if (build_context.command_kind == Command_test) {
  4895. String name = file->fullpath;
  4896. name = remove_extension_from_path(name);
  4897. String test_suffix = str_lit("_test");
  4898. if (string_ends_with(name, test_suffix) && name != test_suffix) {
  4899. file->flags |= AstFile_IsTest;
  4900. }
  4901. }
  4902. if (parse_file(p, file)) {
  4903. mutex_lock(&p->file_add_mutex);
  4904. defer (mutex_unlock(&p->file_add_mutex));
  4905. array_add(&pkg->files, file);
  4906. if (pkg->name.len == 0) {
  4907. pkg->name = file->package_name;
  4908. } else if (pkg->name != file->package_name) {
  4909. if (file->tokens.count > 0 && file->tokens[0].kind != Token_EOF) {
  4910. Token tok = file->package_token;
  4911. tok.pos.file_id = file->id;
  4912. tok.pos.line = gb_max(tok.pos.line, 1);
  4913. tok.pos.column = gb_max(tok.pos.column, 1);
  4914. syntax_error(tok, "Different package name, expected '%.*s', got '%.*s'", LIT(pkg->name), LIT(file->package_name));
  4915. }
  4916. }
  4917. p->total_line_count += file->tokenizer.line_count;
  4918. p->total_token_count += file->tokens.count;
  4919. }
  4920. return ParseFile_None;
  4921. }
  4922. ParseFileError parse_packages(Parser *p, String init_filename) {
  4923. GB_ASSERT(init_filename.text[init_filename.len] == 0);
  4924. String init_fullpath = path_to_full_path(heap_allocator(), init_filename);
  4925. if (!path_is_directory(init_fullpath)) {
  4926. String const ext = str_lit(".odin");
  4927. if (!string_ends_with(init_fullpath, ext)) {
  4928. error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(init_filename));
  4929. return ParseFile_WrongExtension;
  4930. }
  4931. } else if (init_fullpath.len != 0) {
  4932. String path = init_fullpath;
  4933. if (path[path.len-1] == '/') {
  4934. path.len -= 1;
  4935. }
  4936. if ((build_context.command_kind & Command__does_build) &&
  4937. build_context.build_mode == BuildMode_Executable) {
  4938. String short_path = filename_from_path(path);
  4939. char *cpath = alloc_cstring(heap_allocator(), short_path);
  4940. defer (gb_free(heap_allocator(), cpath));
  4941. if (gb_file_exists(cpath)) {
  4942. error_line("Please specify the executable name with -out:<string> as a directory exists with the same name in the current working directory");
  4943. return ParseFile_DirectoryAlreadyExists;
  4944. }
  4945. }
  4946. }
  4947. { // Add these packages serially and then process them parallel
  4948. mutex_lock(&p->wait_mutex);
  4949. defer (mutex_unlock(&p->wait_mutex));
  4950. TokenPos init_pos = {};
  4951. {
  4952. String s = get_fullpath_core(heap_allocator(), str_lit("runtime"));
  4953. try_add_import_path(p, s, s, init_pos, Package_Runtime);
  4954. }
  4955. try_add_import_path(p, init_fullpath, init_fullpath, init_pos, Package_Init);
  4956. p->init_fullpath = init_fullpath;
  4957. if (build_context.command_kind == Command_test) {
  4958. String s = get_fullpath_core(heap_allocator(), str_lit("testing"));
  4959. try_add_import_path(p, s, s, init_pos, Package_Normal);
  4960. }
  4961. for_array(i, build_context.extra_packages) {
  4962. String path = build_context.extra_packages[i];
  4963. String fullpath = path_to_full_path(heap_allocator(), path); // LEAK?
  4964. if (!path_is_directory(fullpath)) {
  4965. String const ext = str_lit(".odin");
  4966. if (!string_ends_with(fullpath, ext)) {
  4967. error_line("Expected either a directory or a .odin file, got '%.*s'\n", LIT(fullpath));
  4968. return ParseFile_WrongExtension;
  4969. }
  4970. }
  4971. AstPackage *pkg = try_add_import_path(p, fullpath, fullpath, init_pos, Package_Normal);
  4972. if (pkg) {
  4973. pkg->is_extra = true;
  4974. }
  4975. }
  4976. }
  4977. global_thread_pool_wait();
  4978. for (ParseFileError err = ParseFile_None; mpmc_dequeue(&p->file_error_queue, &err); /**/) {
  4979. if (err != ParseFile_None) {
  4980. return err;
  4981. }
  4982. }
  4983. for (isize i = p->packages.count-1; i >= 0; i--) {
  4984. AstPackage *pkg = p->packages[i];
  4985. for (isize j = pkg->files.count-1; j >= 0; j--) {
  4986. AstFile *file = pkg->files[j];
  4987. if (file->error_count != 0) {
  4988. if (file->last_error != ParseFile_None) {
  4989. return file->last_error;
  4990. }
  4991. return ParseFile_GeneralError;
  4992. }
  4993. }
  4994. }
  4995. return ParseFile_None;
  4996. }