12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767 |
- package odin_parser
- import "core:odin/ast"
- import "core:odin/tokenizer"
- import "core:fmt"
- Warning_Handler :: #type proc(pos: tokenizer.Pos, fmt: string, args: ..any)
- Error_Handler :: #type proc(pos: tokenizer.Pos, fmt: string, args: ..any)
- Flag :: enum u32 {
- Optional_Semicolons,
- }
- Flags :: distinct bit_set[Flag; u32]
- Parser :: struct {
- file: ^ast.File,
- tok: tokenizer.Tokenizer,
- // If .Optional_Semicolons is true, semicolons are completely as statement terminators
- // different to .Insert_Semicolon in tok.flags
- flags: Flags,
- warn: Warning_Handler,
- err: Error_Handler,
- prev_tok: tokenizer.Token,
- curr_tok: tokenizer.Token,
- // >= 0: In Expression
- // < 0: In Control Clause
- // NOTE(bill): Used to prevent type literals in control clauses
- expr_level: int,
- allow_range: bool, // NOTE(bill): Ranges are only allowed in certain cases
- allow_in_expr: bool, // NOTE(bill): in expression are only allowed in certain cases
- in_foreign_block: bool,
- allow_type: bool,
- lead_comment: ^ast.Comment_Group,
- line_comment: ^ast.Comment_Group,
- curr_proc: ^ast.Node,
- error_count: int,
- fix_count: int,
- fix_prev_pos: tokenizer.Pos,
- peeking: bool,
- }
- MAX_FIX_COUNT :: 10
- Stmt_Allow_Flag :: enum {
- In,
- Label,
- }
- Stmt_Allow_Flags :: distinct bit_set[Stmt_Allow_Flag]
- Import_Decl_Kind :: enum {
- Standard,
- Using,
- }
- default_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
- fmt.eprintf("%s(%d:%d): Warning: ", pos.file, pos.line, pos.column)
- fmt.eprintf(msg, ..args)
- fmt.eprintf("\n")
- }
- default_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
- fmt.eprintf("%s(%d:%d): ", pos.file, pos.line, pos.column)
- fmt.eprintf(msg, ..args)
- fmt.eprintf("\n")
- }
- warn :: proc(p: ^Parser, pos: tokenizer.Pos, msg: string, args: ..any) {
- if p.warn != nil {
- p.warn(pos, msg, ..args)
- }
- p.file.syntax_warning_count += 1
- }
- error :: proc(p: ^Parser, pos: tokenizer.Pos, msg: string, args: ..any) {
- if p.err != nil {
- p.err(pos, msg, ..args)
- }
- p.file.syntax_error_count += 1
- p.error_count += 1
- }
- end_pos :: proc(tok: tokenizer.Token) -> tokenizer.Pos {
- pos := tok.pos
- pos.offset += len(tok.text)
- if tok.kind == .Comment {
- if tok.text[:2] != "/*" {
- pos.column += len(tok.text)
- } else {
- for i := 0; i < len(tok.text); i += 1 {
- c := tok.text[i]
- if c == '\n' {
- pos.line += 1
- pos.column = 1
- } else {
- pos.column += 1
- }
- }
- }
- } else {
- pos.column += len(tok.text)
- }
- return pos
- }
- default_parser :: proc(flags := Flags{.Optional_Semicolons}) -> Parser {
- return Parser {
- flags = flags,
- err = default_error_handler,
- warn = default_warning_handler,
- }
- }
- is_package_name_reserved :: proc(name: string) -> bool {
- switch name {
- case "builtin", "intrinsics":
- return true
- }
- return false
- }
- parse_file :: proc(p: ^Parser, file: ^ast.File) -> bool {
- zero_parser: {
- p.prev_tok = {}
- p.curr_tok = {}
- p.expr_level = 0
- p.allow_range = false
- p.allow_in_expr = false
- p.in_foreign_block = false
- p.allow_type = false
- p.lead_comment = nil
- p.line_comment = nil
- }
- p.tok.flags += {.Insert_Semicolon}
- p.file = file
- tokenizer.init(&p.tok, file.src, file.fullpath, p.err)
- if p.tok.ch <= 0 {
- return true
- }
- advance_token(p)
- consume_comment_groups(p, p.prev_tok)
- docs := p.lead_comment
- invalid_pre_package_token: Maybe(tokenizer.Token)
- for p.curr_tok.kind != .Package && p.curr_tok.kind != .EOF {
- if p.curr_tok.kind == .Comment {
- consume_comment_groups(p, p.prev_tok)
- } else if p.curr_tok.kind == .File_Tag {
- append(&p.file.tags, p.curr_tok)
- advance_token(p)
- } else {
- if invalid_pre_package_token == nil {
- invalid_pre_package_token = p.curr_tok
- }
- advance_token(p)
- }
- }
- if p.curr_tok.kind != .Package {
- t := invalid_pre_package_token.? or_else p.curr_tok
- error(p, t.pos, "Expected a package declaration at the start of the file")
- return false
- }
-
- p.file.pkg_token = expect_token(p, .Package)
-
- if ippt, ok := invalid_pre_package_token.?; ok {
- error(p, ippt.pos, "Expected only comments or lines starting with '#+' before the package declaration")
- return false
- }
-
- pkg_name := expect_token_after(p, .Ident, "package")
- if pkg_name.kind == .Ident {
- switch name := pkg_name.text; {
- case is_blank_ident(name):
- error(p, pkg_name.pos, "invalid package name '_'")
- case is_package_name_reserved(name), file.pkg != nil && file.pkg.kind != .Runtime && name == "runtime":
- error(p, pkg_name.pos, "use of reserved package name '%s'", name)
- }
- }
- p.file.pkg_name = pkg_name.text
- pd := ast.new(ast.Package_Decl, pkg_name.pos, end_pos(p.prev_tok))
- pd.docs = docs
- pd.token = p.file.pkg_token
- pd.name = pkg_name.text
- pd.comment = p.line_comment
- p.file.pkg_decl = pd
- p.file.docs = docs
- expect_semicolon(p, pd)
- if p.file.syntax_error_count > 0 {
- return false
- }
- p.file.decls = make([dynamic]^ast.Stmt)
- for p.curr_tok.kind != .EOF {
- stmt := parse_stmt(p)
- if stmt != nil {
- if _, ok := stmt.derived.(^ast.Empty_Stmt); !ok {
- append(&p.file.decls, stmt)
- if es, es_ok := stmt.derived.(^ast.Expr_Stmt); es_ok && es.expr != nil {
- if _, pl_ok := es.expr.derived.(^ast.Proc_Lit); pl_ok {
- error(p, stmt.pos, "procedure literal evaluated but not used")
- }
- }
- }
- }
- }
- return true
- }
- peek_token_kind :: proc(p: ^Parser, kind: tokenizer.Token_Kind, lookahead := 0) -> (ok: bool) {
- prev_parser := p^
- p.peeking = true
- defer {
- p^ = prev_parser
- p.peeking = false
- }
- p.tok.err = nil
- for i := 0; i <= lookahead; i += 1 {
- advance_token(p)
- }
- ok = p.curr_tok.kind == kind
- return
- }
- peek_token :: proc(p: ^Parser, lookahead := 0) -> (tok: tokenizer.Token) {
- prev_parser := p^
- p.peeking = true
- defer {
- p^ = prev_parser
- p.peeking = false
- }
- p.tok.err = nil
- for i := 0; i <= lookahead; i += 1 {
- advance_token(p)
- }
- tok = p.curr_tok
- return
- }
- skip_possible_newline :: proc(p: ^Parser) -> bool {
- if tokenizer.is_newline(p.curr_tok) {
- advance_token(p)
- return true
- }
- return false
- }
- skip_possible_newline_for_literal :: proc(p: ^Parser) -> bool {
- if .Optional_Semicolons not_in p.flags {
- return false
- }
- curr_pos := p.curr_tok.pos
- if tokenizer.is_newline(p.curr_tok) {
- next := peek_token(p)
- if curr_pos.line+1 >= next.pos.line {
- #partial switch next.kind {
- case .Open_Brace, .Else, .Where:
- advance_token(p)
- return true
- }
- }
- }
- return false
- }
- next_token0 :: proc(p: ^Parser) -> bool {
- p.curr_tok = tokenizer.scan(&p.tok)
- if p.curr_tok.kind == .EOF {
- // error(p, p.curr_tok.pos, "token is EOF");
- return false
- }
- return true
- }
- consume_comment :: proc(p: ^Parser) -> (tok: tokenizer.Token, end_line: int) {
- tok = p.curr_tok
- assert(tok.kind == .Comment)
- end_line = tok.pos.line
- if tok.text[1] == '*' {
- for c in tok.text {
- if c == '\n' {
- end_line += 1
- }
- }
- }
- _ = next_token0(p)
- if p.curr_tok.pos.line > tok.pos.line {
- end_line += 1
- }
- return
- }
- consume_comment_group :: proc(p: ^Parser, n: int) -> (comments: ^ast.Comment_Group, end_line: int) {
- list: [dynamic]tokenizer.Token
- end_line = p.curr_tok.pos.line
- for p.curr_tok.kind == .Comment &&
- p.curr_tok.pos.line <= end_line+n {
- comment: tokenizer.Token
- comment, end_line = consume_comment(p)
- append(&list, comment)
- }
- if len(list) > 0 && !p.peeking {
- comments = ast.new(ast.Comment_Group, list[0].pos, end_pos(list[len(list)-1]))
- comments.list = list[:]
- append(&p.file.comments, comments)
- }
- return
- }
- consume_comment_groups :: proc(p: ^Parser, prev: tokenizer.Token) {
- if p.curr_tok.kind == .Comment {
- comment: ^ast.Comment_Group
- end_line := 0
- if p.curr_tok.pos.line == prev.pos.line {
- comment, end_line = consume_comment_group(p, 0)
- if p.curr_tok.pos.line != end_line || p.curr_tok.kind == .EOF {
- p.line_comment = comment
- }
- }
- end_line = -1
- for p.curr_tok.kind == .Comment {
- comment, end_line = consume_comment_group(p, 1)
- }
- if end_line+1 >= p.curr_tok.pos.line || end_line < 0 {
- p.lead_comment = comment
- }
- assert(p.curr_tok.kind != .Comment)
- }
- }
- advance_token :: proc(p: ^Parser) -> tokenizer.Token {
- p.lead_comment = nil
- p.line_comment = nil
- p.prev_tok = p.curr_tok
- prev := p.prev_tok
- if next_token0(p) {
- consume_comment_groups(p, prev)
- }
- return prev
- }
- expect_token :: proc(p: ^Parser, kind: tokenizer.Token_Kind) -> tokenizer.Token {
- prev := p.curr_tok
- if prev.kind != kind {
- e := tokenizer.to_string(kind)
- g := tokenizer.token_to_string(prev)
- error(p, prev.pos, "expected '%s', got '%s'", e, g)
- }
- advance_token(p)
- return prev
- }
- expect_token_after :: proc(p: ^Parser, kind: tokenizer.Token_Kind, msg: string) -> tokenizer.Token {
- prev := p.curr_tok
- if prev.kind != kind {
- e := tokenizer.to_string(kind)
- g := tokenizer.token_to_string(prev)
- error(p, prev.pos, "expected '%s' after %s, got '%s'", e, msg, g)
- }
- advance_token(p)
- return prev
- }
- expect_operator :: proc(p: ^Parser) -> tokenizer.Token {
- prev := p.curr_tok
- #partial switch prev.kind {
- case .If, .When, .Or_Else:
- // okay
- case:
- if !tokenizer.is_operator(prev.kind) {
- g := tokenizer.token_to_string(prev)
- error(p, prev.pos, "expected an operator, got '%s'", g)
- }
- }
- advance_token(p)
- return prev
- }
- allow_token :: proc(p: ^Parser, kind: tokenizer.Token_Kind) -> bool {
- if p.curr_tok.kind == kind {
- advance_token(p)
- return true
- }
- return false
- }
- end_of_line_pos :: proc(p: ^Parser, tok: tokenizer.Token) -> tokenizer.Pos {
- offset := clamp(tok.pos.offset, 0, len(p.tok.src)-1)
- s := p.tok.src[offset:]
- pos := tok.pos
- pos.column -= 1
- for len(s) != 0 && s[0] != 0 && s[0] != '\n' {
- s = s[1:]
- pos.column += 1
- }
- return pos
- }
- expect_closing_brace_of_field_list :: proc(p: ^Parser) -> tokenizer.Token {
- return expect_closing_token_of_field_list(p, .Close_Brace, "field list")
- }
- expect_closing_token_of_field_list :: proc(p: ^Parser, closing_kind: tokenizer.Token_Kind, msg: string) -> tokenizer.Token {
- token := p.curr_tok
- if allow_token(p, closing_kind) {
- return token
- }
- if allow_token(p, .Semicolon) && !tokenizer.is_newline(token) {
- str := tokenizer.token_to_string(token)
- error(p, end_of_line_pos(p, p.prev_tok), "expected a comma, got %s", str)
- }
- expect_closing := expect_token_after(p, closing_kind, msg)
- if expect_closing.kind != closing_kind {
- for p.curr_tok.kind != closing_kind && p.curr_tok.kind != .EOF && !is_non_inserted_semicolon(p.curr_tok) {
- advance_token(p)
- }
- return p.curr_tok
- }
- return expect_closing
- }
- expect_closing_parentheses_of_field_list :: proc(p: ^Parser) -> tokenizer.Token {
- token := p.curr_tok
- if allow_token(p, .Close_Paren) {
- return token
- }
- if allow_token(p, .Semicolon) && !tokenizer.is_newline(token) {
- str := tokenizer.token_to_string(token)
- error(p, end_of_line_pos(p, p.prev_tok), "expected a comma, got %s", str)
- }
- for p.curr_tok.kind != .Close_Paren && p.curr_tok.kind != .EOF && !is_non_inserted_semicolon(p.curr_tok) {
- advance_token(p)
- }
- return expect_token(p, .Close_Paren)
- }
- is_non_inserted_semicolon :: proc(tok: tokenizer.Token) -> bool {
- return tok.kind == .Semicolon && tok.text != "\n"
- }
- is_blank_ident :: proc{
- is_blank_ident_string,
- is_blank_ident_token,
- is_blank_ident_node,
- }
- is_blank_ident_string :: proc(str: string) -> bool {
- return str == "_"
- }
- is_blank_ident_token :: proc(tok: tokenizer.Token) -> bool {
- if tok.kind == .Ident {
- return is_blank_ident_string(tok.text)
- }
- return false
- }
- is_blank_ident_node :: proc(node: ^ast.Node) -> bool {
- if ident, ok := node.derived.(^ast.Ident); ok {
- return is_blank_ident(ident.name)
- }
- return true
- }
- fix_advance_to_next_stmt :: proc(p: ^Parser) {
- for {
- #partial switch t := p.curr_tok; t.kind {
- case .EOF, .Semicolon:
- return
- case .Package, .Foreign, .Import,
- .If, .For, .When, .Return, .Switch,
- .Defer, .Using,
- .Break, .Continue, .Fallthrough,
- .Hash:
- if t.pos == p.fix_prev_pos && p.fix_count < MAX_FIX_COUNT {
- p.fix_count += 1
- return
- }
- if t.pos.offset < p.fix_prev_pos.offset {
- p.fix_prev_pos = t.pos
- p.fix_count = 0
- return
- }
- }
- advance_token(p)
- }
- }
- is_semicolon_optional_for_node :: proc(p: ^Parser, node: ^ast.Node) -> bool {
- if node == nil {
- return false
- }
- if .Optional_Semicolons in p.flags {
- return true
- }
- #partial switch n in node.derived {
- case ^ast.Empty_Stmt, ^ast.Block_Stmt:
- return true
- case ^ast.If_Stmt, ^ast.When_Stmt,
- ^ast.For_Stmt, ^ast.Range_Stmt, ^ast.Inline_Range_Stmt,
- ^ast.Switch_Stmt, ^ast.Type_Switch_Stmt:
- return true
- case ^ast.Helper_Type:
- return is_semicolon_optional_for_node(p, n.type)
- case ^ast.Distinct_Type:
- return is_semicolon_optional_for_node(p, n.type)
- case ^ast.Pointer_Type:
- return is_semicolon_optional_for_node(p, n.elem)
- case ^ast.Struct_Type, ^ast.Union_Type, ^ast.Enum_Type, ^ast.Bit_Set_Type, ^ast.Bit_Field_Type:
- // Require semicolon within a procedure body
- return p.curr_proc == nil
- case ^ast.Proc_Lit:
- return true
- case ^ast.Package_Decl, ^ast.Import_Decl, ^ast.Foreign_Import_Decl:
- return true
- case ^ast.Foreign_Block_Decl:
- return is_semicolon_optional_for_node(p, n.body)
- case ^ast.Value_Decl:
- if n.is_mutable {
- return false
- }
- if len(n.values) > 0 {
- return is_semicolon_optional_for_node(p, n.values[len(n.values)-1])
- }
- }
- return false
- }
- expect_semicolon_newline_error :: proc(p: ^Parser, token: tokenizer.Token, s: ^ast.Node) {
- if .Optional_Semicolons not_in p.flags && .Insert_Semicolon in p.tok.flags && token.text == "\n" {
- #partial switch token.kind {
- case .Close_Brace:
- case .Close_Paren:
- case .Else:
- return
- }
- if is_semicolon_optional_for_node(p, s) {
- return
- }
- tok := token
- tok.pos.column -= 1
- error(p, tok.pos, "expected ';', got newline")
- }
- }
- expect_semicolon :: proc(p: ^Parser, node: ^ast.Node) -> bool {
- if allow_token(p, .Semicolon) {
- expect_semicolon_newline_error(p, p.prev_tok, node)
- return true
- }
- prev := p.prev_tok
- if prev.kind == .Semicolon {
- expect_semicolon_newline_error(p, p.prev_tok, node)
- return true
- }
- if p.curr_tok.kind == .EOF {
- return true
- }
- if node != nil {
- if .Insert_Semicolon in p.tok.flags {
- #partial switch p.curr_tok.kind {
- case .Close_Brace, .Close_Paren, .Else, .EOF:
- return true
- }
- if is_semicolon_optional_for_node(p, node) {
- return true
- }
- } else if prev.pos.line != p.curr_tok.pos.line {
- if is_semicolon_optional_for_node(p, node) {
- return true
- }
- } else {
- #partial switch p.curr_tok.kind {
- case .Close_Brace, .Close_Paren, .Else:
- return true
- case .EOF:
- if is_semicolon_optional_for_node(p, node) {
- return true
- }
- }
- }
- } else {
- if p.curr_tok.kind == .EOF {
- return true
- }
- }
- error(p, prev.pos, "expected ';', got %s", tokenizer.token_to_string(p.curr_tok))
- fix_advance_to_next_stmt(p)
- return false
- }
- new_blank_ident :: proc(p: ^Parser, pos: tokenizer.Pos) -> ^ast.Ident {
- tok: tokenizer.Token
- tok.pos = pos
- i := ast.new(ast.Ident, pos, end_pos(tok))
- i.name = "_"
- return i
- }
- parse_ident :: proc(p: ^Parser) -> ^ast.Ident {
- tok := p.curr_tok
- pos := tok.pos
- name := "_"
- if tok.kind == .Ident {
- name = tok.text
- advance_token(p)
- } else {
- expect_token(p, .Ident)
- }
- i := ast.new(ast.Ident, pos, end_pos(tok))
- i.name = name
- return i
- }
- parse_stmt_list :: proc(p: ^Parser) -> []^ast.Stmt {
- list: [dynamic]^ast.Stmt
- for p.curr_tok.kind != .Case &&
- p.curr_tok.kind != .Close_Brace &&
- p.curr_tok.kind != .EOF {
- stmt := parse_stmt(p)
- if stmt != nil {
- if _, ok := stmt.derived.(^ast.Empty_Stmt); !ok {
- append(&list, stmt)
- if es, es_ok := stmt.derived.(^ast.Expr_Stmt); es_ok && es.expr != nil {
- if _, pl_ok := es.expr.derived.(^ast.Proc_Lit); pl_ok {
- error(p, stmt.pos, "procedure literal evaluated but not used")
- }
- }
- }
- }
- }
- return list[:]
- }
- parse_block_stmt :: proc(p: ^Parser, is_when: bool) -> ^ast.Stmt {
- skip_possible_newline_for_literal(p)
- if !is_when && p.curr_proc == nil {
- error(p, p.curr_tok.pos, "you cannot use a block statement in the file scope")
- }
- return parse_body(p)
- }
- parse_when_stmt :: proc(p: ^Parser) -> ^ast.When_Stmt {
- tok := expect_token(p, .When)
- cond: ^ast.Expr
- body: ^ast.Stmt
- else_stmt: ^ast.Stmt
- prev_level := p.expr_level
- p.expr_level = -1
- prev_allow_in_expr := p.allow_in_expr
- p.allow_in_expr = true
- cond = parse_expr(p, false)
- p.allow_in_expr = prev_allow_in_expr
- p.expr_level = prev_level
- if cond == nil {
- error(p, p.curr_tok.pos, "expected a condition for when statement")
- }
- if allow_token(p, .Do) {
- body = convert_stmt_to_body(p, parse_stmt(p))
- if cond.pos.line != body.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as when statement")
- }
- } else {
- body = parse_block_stmt(p, true)
- }
- skip_possible_newline_for_literal(p)
- if p.curr_tok.kind == .Else {
- else_tok := expect_token(p, .Else)
- #partial switch p.curr_tok.kind {
- case .When:
- else_stmt = parse_when_stmt(p)
- case .Open_Brace:
- else_stmt = parse_block_stmt(p, true)
- case .Do:
- expect_token(p, .Do)
- else_stmt = convert_stmt_to_body(p, parse_stmt(p))
- if else_tok.pos.line != else_stmt.pos.line {
- error(p, else_stmt.pos, "the body of a 'do' must be on the same line as 'else'")
- }
- case:
- error(p, p.curr_tok.pos, "expected when statement block statement")
- else_stmt = ast.new(ast.Bad_Stmt, p.curr_tok.pos, end_pos(p.curr_tok))
- }
- }
- end := body.end
- if else_stmt != nil {
- end = else_stmt.end
- }
- when_stmt := ast.new(ast.When_Stmt, tok.pos, end)
- when_stmt.when_pos = tok.pos
- when_stmt.cond = cond
- when_stmt.body = body
- when_stmt.else_stmt = else_stmt
- return when_stmt
- }
- convert_stmt_to_expr :: proc(p: ^Parser, stmt: ^ast.Stmt, kind: string) -> ^ast.Expr {
- if stmt == nil {
- return nil
- }
- if es, ok := stmt.derived.(^ast.Expr_Stmt); ok {
- return es.expr
- }
- error(p, stmt.pos, "expected %s, found a simple statement", kind)
- return ast.new(ast.Bad_Expr, p.curr_tok.pos, end_pos(p.curr_tok))
- }
- parse_if_stmt :: proc(p: ^Parser) -> ^ast.If_Stmt {
- tok := expect_token(p, .If)
- init: ^ast.Stmt
- cond: ^ast.Expr
- body: ^ast.Stmt
- else_stmt: ^ast.Stmt
- prev_level := p.expr_level
- p.expr_level = -1
- prev_allow_in_expr := p.allow_in_expr
- p.allow_in_expr = true
- if allow_token(p, .Semicolon) {
- cond = parse_expr(p, false)
- } else {
- init = parse_simple_stmt(p, nil)
- if parse_control_statement_semicolon_separator(p) {
- cond = parse_expr(p, false)
- } else {
- cond = convert_stmt_to_expr(p, init, "boolean expression")
- init = nil
- }
- }
- p.expr_level = prev_level
- p.allow_in_expr = prev_allow_in_expr
- if cond == nil {
- error(p, p.curr_tok.pos, "expected a condition for if statement")
- }
- if allow_token(p, .Do) {
- body = convert_stmt_to_body(p, parse_stmt(p))
- if cond.pos.line != body.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as the if condition")
- }
- } else {
- body = parse_block_stmt(p, false)
- }
- else_tok := p.curr_tok.pos
- skip_possible_newline_for_literal(p)
- if p.curr_tok.kind == .Else {
- else_tok := expect_token(p, .Else)
- #partial switch p.curr_tok.kind {
- case .If:
- else_stmt = parse_if_stmt(p)
- case .Open_Brace:
- else_stmt = parse_block_stmt(p, false)
- case .Do:
- expect_token(p, .Do)
- else_stmt = convert_stmt_to_body(p, parse_stmt(p))
- if else_tok.pos.line != else_stmt.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as 'else'")
- }
- case:
- error(p, p.curr_tok.pos, "expected if statement block statement")
- else_stmt = ast.new(ast.Bad_Stmt, p.curr_tok.pos, end_pos(p.curr_tok))
- }
- }
-
- end: tokenizer.Pos
- if body != nil {
- end = body.end
- }
- if else_stmt != nil {
- end = else_stmt.end
- }
- if_stmt := ast.new(ast.If_Stmt, tok.pos, end)
- if_stmt.if_pos = tok.pos
- if_stmt.init = init
- if_stmt.cond = cond
- if_stmt.body = body
- if_stmt.else_stmt = else_stmt
- if_stmt.else_pos = else_tok
- return if_stmt
- }
- parse_control_statement_semicolon_separator :: proc(p: ^Parser) -> bool {
- tok := peek_token(p)
- if tok.kind != .Open_Brace {
- return allow_token(p, .Semicolon)
- }
- if p.curr_tok.text == ";" {
- return allow_token(p, .Semicolon)
- }
- return false
- }
- parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
- if p.curr_proc == nil {
- error(p, p.curr_tok.pos, "you cannot use a for statement in the file scope")
- }
- tok := expect_token(p, .For)
- init: ^ast.Stmt
- cond: ^ast.Stmt
- post: ^ast.Stmt
- body: ^ast.Stmt
- is_range := false
- if p.curr_tok.kind != .Open_Brace && p.curr_tok.kind != .Do {
- prev_level := p.expr_level
- defer p.expr_level = prev_level
- p.expr_level = -1
- if p.curr_tok.kind == .In {
- in_tok := expect_token(p, .In)
- rhs: ^ast.Expr
- prev_allow_range := p.allow_range
- p.allow_range = true
- rhs = parse_expr(p, false)
- p.allow_range = prev_allow_range
- if allow_token(p, .Do) {
- body = convert_stmt_to_body(p, parse_stmt(p))
- if tok.pos.line != body.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as 'else'")
- }
- } else {
- body = parse_body(p)
- }
- range_stmt := ast.new(ast.Range_Stmt, tok.pos, body)
- range_stmt.for_pos = tok.pos
- range_stmt.in_pos = in_tok.pos
- range_stmt.expr = rhs
- range_stmt.body = body
- return range_stmt
- }
- if p.curr_tok.kind != .Semicolon {
- cond = parse_simple_stmt(p, {Stmt_Allow_Flag.In})
- if as, ok := cond.derived.(^ast.Assign_Stmt); ok && as.op.kind == .In {
- is_range = true
- }
- }
- if !is_range && parse_control_statement_semicolon_separator(p) {
- init = cond
- cond = nil
- if p.curr_tok.kind == .Open_Brace || p.curr_tok.kind == .Do {
- error(p, p.curr_tok.pos, "Expected ';', followed by a condition expression and post statement, got %s", tokenizer.tokens[p.curr_tok.kind])
- } else {
- if p.curr_tok.kind != .Semicolon {
- cond = parse_simple_stmt(p, nil)
- }
- if p.curr_tok.text != ";" {
- error(p, p.curr_tok.pos, "Expected ';', got %s", tokenizer.token_to_string(p.curr_tok))
- } else {
- expect_semicolon(p, nil)
- }
- if p.curr_tok.kind != .Open_Brace && p.curr_tok.kind != .Do {
- post = parse_simple_stmt(p, nil)
- }
- }
- }
- }
- if allow_token(p, .Do) {
- body = convert_stmt_to_body(p, parse_stmt(p))
- if tok.pos.line != body.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as the 'for' token")
- }
- } else {
- allow_token(p, .Semicolon)
- body = parse_body(p)
- }
- if is_range {
- assign_stmt := cond.derived.(^ast.Assign_Stmt)
- vals := assign_stmt.lhs[:]
- rhs: ^ast.Expr
- if len(assign_stmt.rhs) > 0 {
- rhs = assign_stmt.rhs[0]
- }
- range_stmt := ast.new(ast.Range_Stmt, tok.pos, body)
- range_stmt.for_pos = tok.pos
- range_stmt.vals = vals
- range_stmt.in_pos = assign_stmt.op.pos
- range_stmt.expr = rhs
- range_stmt.body = body
- return range_stmt
- }
- cond_expr := convert_stmt_to_expr(p, cond, "boolean expression")
- for_stmt := ast.new(ast.For_Stmt, tok.pos, body)
- for_stmt.for_pos = tok.pos
- for_stmt.init = init
- for_stmt.cond = cond_expr
- for_stmt.post = post
- for_stmt.body = body
- return for_stmt
- }
- parse_case_clause :: proc(p: ^Parser, is_type_switch: bool) -> ^ast.Case_Clause {
- tok := expect_token(p, .Case)
- list: []^ast.Expr
- if p.curr_tok.kind != .Colon {
- prev_allow_range, prev_allow_in_expr := p.allow_range, p.allow_in_expr
- defer p.allow_range, p.allow_in_expr = prev_allow_range, prev_allow_in_expr
- p.allow_range, p.allow_in_expr = !is_type_switch, !is_type_switch
- list = parse_rhs_expr_list(p)
- }
- terminator := expect_token(p, .Colon)
- stmts := parse_stmt_list(p)
- cc := ast.new(ast.Case_Clause, tok.pos, end_pos(p.prev_tok))
- cc.list = list
- cc.terminator = terminator
- cc.body = stmts
- cc.case_pos = tok.pos
- return cc
- }
- parse_switch_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
- tok := expect_token(p, .Switch)
- init: ^ast.Stmt
- tag: ^ast.Stmt
- is_type_switch := false
- clauses: [dynamic]^ast.Stmt
- if p.curr_tok.kind != .Open_Brace {
- prev_level := p.expr_level
- defer p.expr_level = prev_level
- p.expr_level = -1
- if p.curr_tok.kind == .In {
- in_tok := expect_token(p, .In)
- is_type_switch = true
- lhs := make([]^ast.Expr, 1)
- rhs := make([]^ast.Expr, 1)
- lhs[0] = new_blank_ident(p, tok.pos)
- rhs[0] = parse_expr(p, true)
- as := ast.new(ast.Assign_Stmt, tok.pos, rhs[0])
- as.lhs = lhs
- as.op = in_tok
- as.rhs = rhs
- tag = as
- } else {
- tag = parse_simple_stmt(p, {Stmt_Allow_Flag.In})
- if as, ok := tag.derived.(^ast.Assign_Stmt); ok && as.op.kind == .In {
- is_type_switch = true
- } else if parse_control_statement_semicolon_separator(p) {
- init = tag
- tag = nil
- if p.curr_tok.kind != .Open_Brace {
- tag = parse_simple_stmt(p, nil)
- }
- }
- }
- }
- skip_possible_newline(p)
- open := expect_token(p, .Open_Brace)
- for p.curr_tok.kind == .Case {
- clause := parse_case_clause(p, is_type_switch)
- append(&clauses, clause)
- }
- close := expect_token(p, .Close_Brace)
- body := ast.new(ast.Block_Stmt, open.pos, end_pos(close))
- body.stmts = clauses[:]
- if is_type_switch {
- ts := ast.new(ast.Type_Switch_Stmt, tok.pos, body)
- ts.tag = tag
- ts.body = body
- ts.switch_pos = tok.pos
- return ts
- } else {
- cond := convert_stmt_to_expr(p, tag, "switch expression")
- ts := ast.new(ast.Switch_Stmt, tok.pos, body)
- ts.init = init
- ts.cond = cond
- ts.body = body
- ts.switch_pos = tok.pos
- return ts
- }
- }
- parse_attribute :: proc(p: ^Parser, tok: tokenizer.Token, open_kind, close_kind: tokenizer.Token_Kind, docs: ^ast.Comment_Group) -> ^ast.Stmt {
- elems: [dynamic]^ast.Expr
- open, close: tokenizer.Token
- if p.curr_tok.kind == .Ident {
- elem := parse_ident(p)
- append(&elems, elem)
- } else {
- open = expect_token(p, open_kind)
- p.expr_level += 1
- for p.curr_tok.kind != close_kind &&
- p.curr_tok.kind != .EOF {
- elem: ^ast.Expr
- elem = parse_ident(p)
- if p.curr_tok.kind == .Eq {
- eq := expect_token(p, .Eq)
- value := parse_value(p)
- fv := ast.new(ast.Field_Value, elem.pos, value)
- fv.field = elem
- fv.sep = eq.pos
- fv.value = value
- elem = fv
- }
- append(&elems, elem)
- allow_token(p, .Comma) or_break
- }
- p.expr_level -= 1
- close = expect_token_after(p, close_kind, "attribute")
- }
- attribute := ast.new(ast.Attribute, tok.pos, end_pos(close))
- attribute.tok = tok.kind
- attribute.open = open.pos
- attribute.elems = elems[:]
- attribute.close = close.pos
- skip_possible_newline(p)
- decl := parse_stmt(p)
- #partial switch d in decl.derived_stmt {
- case ^ast.Value_Decl:
- if d.docs == nil { d.docs = docs }
- append(&d.attributes, attribute)
- case ^ast.Foreign_Block_Decl:
- if d.docs == nil { d.docs = docs }
- append(&d.attributes, attribute)
- case ^ast.Foreign_Import_Decl:
- if d.docs == nil { d.docs = docs }
- append(&d.attributes, attribute)
- case ^ast.Import_Decl:
- if d.docs == nil { d.docs = docs }
- append(&d.attributes, attribute)
- case:
- error(p, decl.pos, "expected a value or foreign declaration after an attribute")
- free(attribute)
- delete(elems)
- }
- return decl
- }
- parse_foreign_block_decl :: proc(p: ^Parser) -> ^ast.Stmt {
- decl := parse_stmt(p)
- #partial switch _ in decl.derived_stmt {
- case ^ast.Empty_Stmt, ^ast.Bad_Stmt, ^ast.Bad_Decl:
- // Ignore
- return nil
- case ^ast.When_Stmt, ^ast.Value_Decl:
- return decl
- }
- error(p, decl.pos, "foreign blocks only allow procedure and variable declarations")
- return nil
- }
- parse_foreign_block :: proc(p: ^Parser, tok: tokenizer.Token) -> ^ast.Foreign_Block_Decl {
- docs := p.lead_comment
- foreign_library: ^ast.Expr
- #partial switch p.curr_tok.kind {
- case .Open_Brace:
- i := ast.new(ast.Ident, tok.pos, end_pos(tok))
- i.name = "_"
- foreign_library = i
- case:
- foreign_library = parse_ident(p)
- }
- decls: [dynamic]^ast.Stmt
- prev_in_foreign_block := p.in_foreign_block
- defer p.in_foreign_block = prev_in_foreign_block
- p.in_foreign_block = true
- skip_possible_newline_for_literal(p)
- open := expect_token(p, .Open_Brace)
- for p.curr_tok.kind != .Close_Brace && p.curr_tok.kind != .EOF {
- decl := parse_foreign_block_decl(p)
- if decl != nil {
- append(&decls, decl)
- }
- }
- close := expect_token(p, .Close_Brace)
- body := ast.new(ast.Block_Stmt, open.pos, end_pos(close))
- body.open = open.pos
- body.stmts = decls[:]
- body.close = close.pos
- decl := ast.new(ast.Foreign_Block_Decl, tok.pos, body)
- decl.docs = docs
- decl.tok = tok
- decl.foreign_library = foreign_library
- decl.body = body
- return decl
- }
- parse_foreign_decl :: proc(p: ^Parser) -> ^ast.Decl {
- docs := p.lead_comment
- tok := expect_token(p, .Foreign)
- #partial switch p.curr_tok.kind {
- case .Ident, .Open_Brace:
- return parse_foreign_block(p, tok)
- case .Import:
- import_tok := expect_token(p, .Import)
- name: ^ast.Ident
- if p.curr_tok.kind == .Ident {
- name = parse_ident(p)
- }
- if name != nil && is_blank_ident(name) {
- error(p, name.pos, "illegal foreign import name: '_'")
- }
- fullpaths: [dynamic]^ast.Expr
- if allow_token(p, .Open_Brace) {
- for p.curr_tok.kind != .Close_Brace &&
- p.curr_tok.kind != .EOF {
- path := parse_expr(p, false)
- append(&fullpaths, path)
- allow_token(p, .Comma) or_break
- }
- expect_token(p, .Close_Brace)
- } else {
- path := expect_token(p, .String)
- reserve(&fullpaths, 1)
- bl := ast.new(ast.Basic_Lit, path.pos, end_pos(path))
- bl.tok = path
- append(&fullpaths, bl)
- }
- if len(fullpaths) == 0 {
- error(p, import_tok.pos, "foreign import without any paths")
- }
- decl := ast.new(ast.Foreign_Import_Decl, tok.pos, end_pos(p.prev_tok))
- decl.docs = docs
- decl.foreign_tok = tok
- decl.import_tok = import_tok
- decl.name = name
- decl.fullpaths = fullpaths[:]
- expect_semicolon(p, decl)
- decl.comment = p.line_comment
- return decl
- }
- error(p, tok.pos, "invalid foreign declaration")
- return ast.new(ast.Bad_Decl, tok.pos, end_pos(tok))
- }
- parse_unrolled_for_loop :: proc(p: ^Parser, inline_tok: tokenizer.Token) -> ^ast.Stmt {
- for_tok := expect_token(p, .For)
- val0, val1: ^ast.Expr
- in_tok: tokenizer.Token
- expr: ^ast.Expr
- body: ^ast.Stmt
- bad_stmt := false
- if p.curr_tok.kind != .In {
- idents := parse_ident_list(p, false)
- switch len(idents) {
- case 1:
- val0 = idents[0]
- case 2:
- val0, val1 = idents[0], idents[1]
- case:
- error(p, for_tok.pos, "expected either 1 or 2 identifiers")
- bad_stmt = true
- }
- }
- in_tok = expect_token(p, .In)
- prev_allow_range := p.allow_range
- prev_level := p.expr_level
- p.allow_range = true
- p.expr_level = -1
- expr = parse_expr(p, false)
- p.expr_level = prev_level
- p.allow_range = prev_allow_range
- if allow_token(p, .Do) {
- body = convert_stmt_to_body(p, parse_stmt(p))
- if for_tok.pos.line != body.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as the 'for' token")
- }
- } else {
- body = parse_block_stmt(p, false)
- }
- if bad_stmt {
- return ast.new(ast.Bad_Stmt, inline_tok.pos, end_pos(p.prev_tok))
- }
- range_stmt := ast.new(ast.Inline_Range_Stmt, inline_tok.pos, body)
- range_stmt.inline_pos = inline_tok.pos
- range_stmt.for_pos = for_tok.pos
- range_stmt.val0 = val0
- range_stmt.val1 = val1
- range_stmt.in_pos = in_tok.pos
- range_stmt.expr = expr
- range_stmt.body = body
- return range_stmt
- }
- parse_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
- #partial switch p.curr_tok.kind {
- case .Inline:
- if peek_token_kind(p, .For) {
- inline_tok := expect_token(p, .Inline)
- return parse_unrolled_for_loop(p, inline_tok)
- }
- fallthrough
- // Operands
- case .No_Inline,
- .Context, // Also allows for 'context = '
- .Proc,
- .Ident,
- .Integer, .Float, .Imag,
- .Rune, .String,
- .Open_Paren,
- .Pointer,
- .Asm, // Inline assembly
- // Unary Expressions
- .Add, .Sub, .Xor, .Not, .And:
- s := parse_simple_stmt(p, {Stmt_Allow_Flag.Label})
- expect_semicolon(p, s)
- return s
- case .Foreign: return parse_foreign_decl(p)
- case .Import: return parse_import_decl(p)
- case .If: return parse_if_stmt(p)
- case .When: return parse_when_stmt(p)
- case .For: return parse_for_stmt(p)
- case .Switch: return parse_switch_stmt(p)
- case .Defer:
- tok := advance_token(p)
- stmt := parse_stmt(p)
- #partial switch s in stmt.derived_stmt {
- case ^ast.Empty_Stmt:
- error(p, s.pos, "empty statement after defer (e.g. ';')")
- case ^ast.Defer_Stmt:
- error(p, s.pos, "you cannot defer a defer statement")
- stmt = s.stmt
- case ^ast.Return_Stmt:
- error(p, s.pos, "you cannot defer a return statement")
- }
- ds := ast.new(ast.Defer_Stmt, tok.pos, stmt)
- ds.stmt = stmt
- return ds
- case .Return:
- tok := advance_token(p)
- if p.expr_level > 0 {
- error(p, tok.pos, "you cannot use a return statement within an expression")
- }
- results: [dynamic]^ast.Expr
- for p.curr_tok.kind != .Semicolon && p.curr_tok.kind != .Close_Brace {
- result := parse_expr(p, false)
- append(&results, result)
- if p.curr_tok.kind != .Comma ||
- p.curr_tok.kind == .EOF {
- break
- }
- advance_token(p)
- }
- end := end_pos(tok)
- if len(results) > 0 {
- end = results[len(results)-1].end
- }
- rs := ast.new(ast.Return_Stmt, tok.pos, end)
- rs.results = results[:]
- expect_semicolon(p, rs)
- return rs
- case .Break, .Continue, .Fallthrough:
- tok := advance_token(p)
- label: ^ast.Ident
- if tok.kind != .Fallthrough && p.curr_tok.kind == .Ident {
- label = parse_ident(p)
- }
- s := ast.new(ast.Branch_Stmt, tok.pos, label)
- s.tok = tok
- s.label = label
- expect_semicolon(p, s)
- return s
- case .Using:
- docs := p.lead_comment
- tok := expect_token(p, .Using)
- if p.curr_tok.kind == .Import {
- return parse_import_decl(p, Import_Decl_Kind.Using)
- }
- list := parse_lhs_expr_list(p)
- if len(list) == 0 {
- error(p, tok.pos, "illegal use of 'using' statement")
- expect_semicolon(p, nil)
- return ast.new(ast.Bad_Stmt, tok.pos, end_pos(p.prev_tok))
- }
- if p.curr_tok.kind != .Colon {
- end := list[len(list)-1]
- expect_semicolon(p, end)
- us := ast.new(ast.Using_Stmt, tok.pos, end)
- us.list = list
- return us
- }
- expect_token_after(p, .Colon, "identifier list")
- decl := parse_value_decl(p, list, docs)
- if decl != nil {
- #partial switch d in decl.derived_stmt {
- case ^ast.Value_Decl:
- d.is_using = true
- return decl
- }
- }
- error(p, tok.pos, "illegal use of 'using' statement")
- return ast.new(ast.Bad_Stmt, tok.pos, end_pos(p.prev_tok))
- case .At:
- docs := p.lead_comment
- tok := advance_token(p)
- return parse_attribute(p, tok, .Open_Paren, .Close_Paren, docs)
- case .Hash:
- tok := expect_token(p, .Hash)
- tag := expect_token(p, .Ident)
- name := tag.text
- switch name {
- case "bounds_check", "no_bounds_check":
- stmt := parse_stmt(p)
- switch name {
- case "bounds_check":
- stmt.state_flags += {.Bounds_Check}
- case "no_bounds_check":
- stmt.state_flags += {.No_Bounds_Check}
- }
- return stmt
- case "type_assert", "no_type_assert":
- stmt := parse_stmt(p)
- switch name {
- case "type_assert":
- stmt.state_flags += {.Type_Assert}
- case "no_type_assert":
- stmt.state_flags += {.No_Type_Assert}
- }
- return stmt
- case "partial":
- stmt := parse_stmt(p)
- #partial switch s in stmt.derived_stmt {
- case ^ast.Switch_Stmt: s.partial = true
- case ^ast.Type_Switch_Stmt: s.partial = true
- case: error(p, stmt.pos, "#partial can only be applied to a switch statement")
- }
- return stmt
- case "assert", "panic":
- bd := ast.new(ast.Basic_Directive, tok.pos, end_pos(tag))
- bd.tok = tok
- bd.name = name
- ce := parse_call_expr(p, bd)
- es := ast.new(ast.Expr_Stmt, ce.pos, ce)
- es.expr = ce
- return es
- case "force_inline", "force_no_inline":
- expr := parse_inlining_operand(p, true, tag)
- es := ast.new(ast.Expr_Stmt, expr.pos, expr)
- es.expr = expr
- return es
- case "unroll":
- return parse_unrolled_for_loop(p, tag)
- case "reverse":
- stmt := parse_stmt(p)
- if range, is_range := stmt.derived.(^ast.Range_Stmt); is_range {
- if range.reverse {
- error(p, range.pos, "#reverse already applied to a 'for in' statement")
- }
- range.reverse = true
- } else {
- error(p, stmt.pos, "#reverse can only be applied to a 'for in' statement")
- }
- return stmt
- case "include":
- error(p, tag.pos, "#include is not a valid import declaration kind. Did you meant 'import'?")
- return ast.new(ast.Bad_Stmt, tok.pos, end_pos(tag))
- case:
- stmt := parse_stmt(p)
- end := stmt.pos if stmt != nil else end_pos(tok)
- te := ast.new(ast.Tag_Stmt, tok.pos, end)
- te.op = tok
- te.name = name
- te.stmt = stmt
- fix_advance_to_next_stmt(p)
- return te
- }
- case .Open_Brace:
- return parse_block_stmt(p, false)
- case .Semicolon:
- tok := advance_token(p)
- s := ast.new(ast.Empty_Stmt, tok.pos, end_pos(tok))
- return s
- }
- #partial switch p.curr_tok.kind {
- case .Else:
- token := expect_token(p, .Else)
- error(p, token.pos, "'else' unattached to an 'if' statement")
- #partial switch p.curr_tok.kind {
- case .If:
- return parse_if_stmt(p)
- case .When:
- return parse_when_stmt(p)
- case .Open_Brace:
- return parse_block_stmt(p, true)
- case .Do:
- expect_token(p, .Do)
- return convert_stmt_to_body(p, parse_stmt(p))
- case:
- fix_advance_to_next_stmt(p)
- return ast.new(ast.Bad_Stmt, token.pos, end_pos(p.curr_tok))
- }
- }
- tok := advance_token(p)
- error(p, tok.pos, "expected a statement, got %s", tokenizer.token_to_string(tok))
- fix_advance_to_next_stmt(p)
- s := ast.new(ast.Bad_Stmt, tok.pos, end_pos(tok))
- return s
- }
- token_precedence :: proc(p: ^Parser, kind: tokenizer.Token_Kind) -> int {
- #partial switch kind {
- case .Question, .If, .When, .Or_Else:
- return 1
- case .Ellipsis, .Range_Half, .Range_Full:
- if !p.allow_range {
- return 0
- }
- return 2
- case .Cmp_Or:
- return 3
- case .Cmp_And:
- return 4
- case .Cmp_Eq, .Not_Eq,
- .Lt, .Gt,
- .Lt_Eq, .Gt_Eq:
- return 5
- case .In, .Not_In:
- if p.expr_level < 0 && !p.allow_in_expr {
- return 0
- }
- fallthrough
- case .Add, .Sub, .Or, .Xor:
- return 6
- case .Mul, .Quo,
- .Mod, .Mod_Mod,
- .And, .And_Not,
- .Shl, .Shr:
- return 7
- }
- return 0
- }
- parse_type_or_ident :: proc(p: ^Parser) -> ^ast.Expr {
- prev_allow_type := p.allow_type
- prev_expr_level := p.expr_level
- defer {
- p.allow_type = prev_allow_type
- p.expr_level = prev_expr_level
- }
- p.allow_type = true
- p.expr_level = -1
- lhs := true
- return parse_atom_expr(p, parse_operand(p, lhs), lhs)
- }
- parse_type :: proc(p: ^Parser) -> ^ast.Expr {
- type := parse_type_or_ident(p)
- if type == nil {
- error(p, p.curr_tok.pos, "expected a type")
- return ast.new(ast.Bad_Expr, p.curr_tok.pos, end_pos(p.curr_tok))
- }
- return type
- }
- parse_body :: proc(p: ^Parser) -> ^ast.Block_Stmt {
- prev_expr_level := p.expr_level
- defer p.expr_level = prev_expr_level
- p.expr_level = 0
- open := expect_token(p, .Open_Brace)
- stmts := parse_stmt_list(p)
- close := expect_token(p, .Close_Brace)
- bs := ast.new(ast.Block_Stmt, open.pos, end_pos(close))
- bs.open = open.pos
- bs.stmts = stmts
- bs.close = close.pos
- return bs
- }
- convert_stmt_to_body :: proc(p: ^Parser, stmt: ^ast.Stmt) -> ^ast.Stmt {
- #partial switch s in stmt.derived_stmt {
- case ^ast.Block_Stmt:
- error(p, stmt.pos, "expected a normal statement rather than a block statement")
- return stmt
- case ^ast.Empty_Stmt:
- error(p, stmt.pos, "expected a non-empty statement")
- }
- bs := ast.new(ast.Block_Stmt, stmt.pos, stmt)
- bs.open = stmt.pos
- bs.stmts = make([]^ast.Stmt, 1)
- bs.stmts[0] = stmt
- bs.close = stmt.end
- bs.uses_do = true
- return bs
- }
- new_ast_field :: proc(names: []^ast.Expr, type: ^ast.Expr, default_value: ^ast.Expr) -> ^ast.Field {
- pos, end: tokenizer.Pos
- if len(names) > 0 {
- pos = names[0].pos
- if default_value != nil {
- end = default_value.end
- } else if type != nil {
- end = type.end
- } else {
- end = names[len(names)-1].pos
- }
- } else {
- if type != nil {
- pos = type.pos
- } else if default_value != nil {
- pos = default_value.pos
- }
- if default_value != nil {
- end = default_value.end
- } else if type != nil {
- end = type.end
- }
- }
- field := ast.new(ast.Field, pos, end)
- field.names = names
- field.type = type
- field.default_value = default_value
- return field
- }
- Expr_And_Flags :: struct {
- expr: ^ast.Expr,
- flags: ast.Field_Flags,
- }
- convert_to_ident_list :: proc(p: ^Parser, list: []Expr_And_Flags, ignore_flags, allow_poly_names: bool) -> []^ast.Expr {
- idents := make([dynamic]^ast.Expr, 0, len(list))
- for ident, i in list {
- if !ignore_flags {
- if i != 0 {
- error(p, ident.expr.pos, "illegal use of prefixes in parameter list")
- }
- }
- id: ^ast.Expr = ident.expr
- #partial switch n in ident.expr.derived_expr {
- case ^ast.Ident:
- case ^ast.Bad_Expr:
- case ^ast.Poly_Type:
- if allow_poly_names {
- if n.specialization == nil {
- break
- } else {
- error(p, ident.expr.pos, "expected a polymorphic identifier without an specialization")
- }
- } else {
- error(p, ident.expr.pos, "expected a non-polymorphic identifier")
- }
- case:
- error(p, ident.expr.pos, "expected an identifier")
- id = ast.new(ast.Ident, ident.expr.pos, ident.expr.end)
- }
- append(&idents, id)
- }
- return idents[:]
- }
- is_token_field_prefix :: proc(p: ^Parser) -> ast.Field_Flag {
- #partial switch p.curr_tok.kind {
- case .EOF:
- return .Invalid
- case .Using:
- advance_token(p)
- return .Using
- case .Hash:
- tok: tokenizer.Token
- advance_token(p)
- tok = p.curr_tok
- advance_token(p)
- if tok.kind == .Ident {
- for kf in ast.field_hash_flag_strings {
- if kf.key == tok.text {
- return kf.flag
- }
- }
- }
- return .Unknown
- }
- return .Invalid
- }
- parse_field_prefixes :: proc(p: ^Parser) -> (flags: ast.Field_Flags) {
- counts: [len(ast.Field_Flag)]int
- for {
- kind := is_token_field_prefix(p)
- if kind == .Invalid {
- break
- }
- if kind == .Unknown {
- error(p, p.curr_tok.pos, "unknown prefix kind '#%s'", p.curr_tok.text)
- continue
- }
- counts[kind] += 1
- }
- for kind in ast.Field_Flag {
- count := counts[kind]
- if kind == .Invalid || kind == .Unknown {
- // Ignore
- } else {
- if count > 1 { error(p, p.curr_tok.pos, "multiple '%s' in this field list", ast.field_flag_strings[kind]) }
- if count > 0 { flags += {kind} }
- }
- }
- return
- }
- check_field_flag_prefixes :: proc(p: ^Parser, name_count: int, allowed_flags, set_flags: ast.Field_Flags) -> (flags: ast.Field_Flags) {
- flags = set_flags
- if name_count > 1 && .Using in flags {
- error(p, p.curr_tok.pos, "cannot apply 'using' to more than one of the same type")
- flags -= {.Using}
- }
- for flag in ast.Field_Flag {
- if flag not_in allowed_flags && flag in flags {
- #partial switch flag {
- case .Unknown, .Invalid:
- // ignore
- case .Tags, .Ellipsis, .Results, .Default_Parameters, .Typeid_Token:
- panic("Impossible prefixes")
- case:
- error(p, p.curr_tok.pos, "'%s' is not allowed within this field list", ast.field_flag_strings[flag])
- }
- flags -= {flag}
- }
- }
- return flags
- }
- parse_var_type :: proc(p: ^Parser, flags: ast.Field_Flags) -> ^ast.Expr {
- if .Ellipsis in flags && p.curr_tok.kind == .Ellipsis {
- tok := advance_token(p)
- type := parse_type_or_ident(p)
- if type == nil {
- error(p, tok.pos, "variadic field missing type after '..'")
- type = ast.new(ast.Bad_Expr, tok.pos, end_pos(tok))
- }
- e := ast.new(ast.Ellipsis, type.pos, type)
- e.tok = tok.kind
- e.expr = type
- return e
- }
- type: ^ast.Expr
- if .Typeid_Token in flags && p.curr_tok.kind == .Typeid {
- tok := expect_token(p, .Typeid)
- specialization: ^ast.Expr
- end := tok.pos
- if allow_token(p, .Quo) {
- specialization = parse_type(p)
- end = specialization.end
- }
- ti := ast.new(ast.Typeid_Type, tok.pos, end)
- ti.tok = tok.kind
- ti.specialization = specialization
- type = ti
- } else {
- type = parse_type(p)
- }
- return type
- }
- check_procedure_name_list :: proc(p: ^Parser, names: []^ast.Expr) -> bool {
- if len(names) == 0 {
- return false
- }
- _, first_is_polymorphic := names[0].derived.(^ast.Poly_Type)
- any_polymorphic_names := first_is_polymorphic
- for i := 1; i < len(names); i += 1 {
- name := names[i]
- if first_is_polymorphic {
- if _, ok := name.derived.(^ast.Poly_Type); ok {
- any_polymorphic_names = true
- } else {
- error(p, name.pos, "mixture of polymorphic and non-polymorphic identifiers")
- return any_polymorphic_names
- }
- } else {
- if _, ok := name.derived.(^ast.Poly_Type); ok {
- any_polymorphic_names = true
- error(p, name.pos, "mixture of polymorphic and non-polymorphic identifiers")
- return any_polymorphic_names
- } else {
- // Okay
- }
- }
- }
- return any_polymorphic_names
- }
- parse_ident_list :: proc(p: ^Parser, allow_poly_names: bool) -> []^ast.Expr {
- list: [dynamic]^ast.Expr
- for {
- if allow_poly_names && p.curr_tok.kind == .Dollar {
- tok := expect_token(p, .Dollar)
- ident := parse_ident(p)
- if is_blank_ident(ident) {
- error(p, ident.pos, "invalid polymorphic type definition with a blank identifier")
- }
- poly_name := ast.new(ast.Poly_Type, tok.pos, ident)
- poly_name.type = ident
- append(&list, poly_name)
- } else {
- ident := parse_ident(p)
- append(&list, ident)
- }
- if p.curr_tok.kind != .Comma ||
- p.curr_tok.kind == .EOF {
- break
- }
- advance_token(p)
- }
- return list[:]
- }
- parse_field_list :: proc(p: ^Parser, follow: tokenizer.Token_Kind, allowed_flags: ast.Field_Flags) -> (field_list: ^ast.Field_List, total_name_count: int) {
- handle_field :: proc(p: ^Parser,
- seen_ellipsis: ^bool, fields: ^[dynamic]^ast.Field,
- docs: ^ast.Comment_Group,
- names: []^ast.Expr,
- allowed_flags, set_flags: ast.Field_Flags,
- ) -> bool {
- expect_field_separator :: proc(p: ^Parser, param: ^ast.Expr) -> bool {
- tok := p.curr_tok
- if allow_token(p, .Comma) {
- return true
- }
- if allow_token(p, .Semicolon) {
- if !tokenizer.is_newline(tok) {
- error(p, tok.pos, "expected a comma, got a semicolon")
- }
- return true
- }
- return false
- }
- is_type_ellipsis :: proc(type: ^ast.Expr) -> bool {
- if type == nil {
- return false
- }
- _, ok := type.derived.(^ast.Ellipsis)
- return ok
- }
- is_signature := (allowed_flags & ast.Field_Flags_Signature_Params) == ast.Field_Flags_Signature_Params
- any_polymorphic_names := check_procedure_name_list(p, names)
- flags := check_field_flag_prefixes(p, len(names), allowed_flags, set_flags)
- type: ^ast.Expr
- default_value: ^ast.Expr
- tag: tokenizer.Token
- expect_token_after(p, .Colon, "field list")
- if p.curr_tok.kind != .Eq {
- type = parse_var_type(p, allowed_flags)
- tt := ast.unparen_expr(type)
- if is_signature && !any_polymorphic_names {
- if ti, ok := tt.derived.(^ast.Typeid_Type); ok && ti.specialization != nil {
- error(p, tt.pos, "specialization of typeid is not allowed without polymorphic names")
- }
- }
- }
- if allow_token(p, .Eq) {
- default_value = parse_expr(p, false)
- if .Default_Parameters not_in allowed_flags {
- error(p, p.curr_tok.pos, "default parameters are only allowed for procedures")
- default_value = nil
- }
- }
- if default_value != nil && len(names) > 1 {
- error(p, p.curr_tok.pos, "default parameters can only be applied to single values")
- }
- if allowed_flags == ast.Field_Flags_Struct && default_value != nil {
- error(p, default_value.pos, "default parameters are not allowed for structs")
- default_value = nil
- }
- if is_type_ellipsis(type) {
- if seen_ellipsis^ {
- error(p, type.pos, "extra variadic parameter after ellipsis")
- }
- seen_ellipsis^ = true
- if len(names) != 1 {
- error(p, type.pos, "variadic parameters can only have one field name")
- }
- } else if seen_ellipsis^ && default_value == nil {
- error(p, p.curr_tok.pos, "extra parameter after ellipsis without a default value")
- }
- if type != nil && default_value == nil {
- if p.curr_tok.kind == .String {
- tag = expect_token(p, .String)
- if .Tags not_in allowed_flags {
- error(p, tag.pos, "Field tags are only allowed within structures")
- }
- }
- }
- ok := expect_field_separator(p, type)
- field := new_ast_field(names, type, default_value)
- field.tag = tag
- field.docs = docs
- field.flags = flags
- field.comment = p.line_comment
- append(fields, field)
- return ok
- }
- start_tok := p.curr_tok
- docs := p.lead_comment
- fields: [dynamic]^ast.Field
- list: [dynamic]Expr_And_Flags
- defer delete(list)
- seen_ellipsis := false
- allow_typeid_token := .Typeid_Token in allowed_flags
- allow_poly_names := allow_typeid_token
- for p.curr_tok.kind != follow &&
- p.curr_tok.kind != .Colon &&
- p.curr_tok.kind != .EOF {
- prefix_flags := parse_field_prefixes(p)
- param := parse_var_type(p, allowed_flags & {.Typeid_Token, .Ellipsis})
- if _, ok := param.derived.(^ast.Ellipsis); ok {
- if seen_ellipsis {
- error(p, param.pos, "extra variadic parameter after ellipsis")
- }
- seen_ellipsis = true
- } else if seen_ellipsis {
- error(p, param.pos, "extra parameter after ellipsis")
- }
- eaf := Expr_And_Flags{param, prefix_flags}
- append(&list, eaf)
- allow_token(p, .Comma) or_break
- }
- if p.curr_tok.kind != .Colon {
- for eaf in list {
- type := eaf.expr
- tok: tokenizer.Token
- tok.pos = type.pos
- if .Results not_in allowed_flags {
- tok.text = "_"
- }
- names := make([]^ast.Expr, 1)
- names[0] = ast.new(ast.Ident, tok.pos, end_pos(tok))
- #partial switch ident in names[0].derived_expr {
- case ^ast.Ident:
- ident.name = tok.text
- case:
- unreachable()
- }
- flags := check_field_flag_prefixes(p, len(list), allowed_flags, eaf.flags)
- field := new_ast_field(names, type, nil)
- field.docs = docs
- field.flags = flags
- field.comment = p.line_comment
- append(&fields, field)
- }
- } else {
- names := convert_to_ident_list(p, list[:], true, allow_poly_names)
- if len(names) == 0 {
- error(p, p.curr_tok.pos, "empty field declaration")
- }
- set_flags: ast.Field_Flags
- if len(list) > 0 {
- set_flags = list[0].flags
- }
- total_name_count += len(names)
- handle_field(p, &seen_ellipsis, &fields, docs, names, allowed_flags, set_flags)
- for p.curr_tok.kind != follow && p.curr_tok.kind != .EOF {
- docs = p.lead_comment
- set_flags = parse_field_prefixes(p)
- names = parse_ident_list(p, allow_poly_names)
- total_name_count += len(names)
- handle_field(p, &seen_ellipsis, &fields, docs, names, allowed_flags, set_flags) or_break
- }
- }
- field_list = ast.new(ast.Field_List, start_tok.pos, p.curr_tok.pos)
- field_list.list = fields[:]
- return
- }
- parse_results :: proc(p: ^Parser) -> (list: ^ast.Field_List, diverging: bool) {
- if !allow_token(p, .Arrow_Right) {
- return
- }
- if allow_token(p, .Not) {
- diverging = true
- return
- }
- prev_level := p.expr_level
- defer p.expr_level = prev_level
- if p.curr_tok.kind != .Open_Paren {
- type := parse_type(p)
- field := new_ast_field(nil, type, nil)
- list = ast.new(ast.Field_List, field.pos, field.end)
- list.list = make([]^ast.Field, 1)
- list.list[0] = field
- return
- }
- expect_token(p, .Open_Paren)
- list, _ = parse_field_list(p, .Close_Paren, ast.Field_Flags_Signature_Results)
- expect_token_after(p, .Close_Paren, "parameter list")
- return
- }
- string_to_calling_convention :: proc(s: string) -> ast.Proc_Calling_Convention {
- if s[0] != '"' && s[0] != '`' {
- return nil
- }
- if len(s) == 2 {
- return nil
- }
- return s
- }
- parse_proc_tags :: proc(p: ^Parser) -> (tags: ast.Proc_Tags) {
- for p.curr_tok.kind == .Hash {
- _ = expect_token(p, .Hash)
- ident := expect_token(p, .Ident)
- switch ident.text {
- case "bounds_check": tags += {.Bounds_Check}
- case "no_bounds_check": tags += {.No_Bounds_Check}
- case "optional_ok": tags += {.Optional_Ok}
- case "optional_allocator_error": tags += {.Optional_Allocator_Error}
- case:
- }
- }
- if .Bounds_Check in tags && .No_Bounds_Check in tags {
- p.err(p.curr_tok.pos, "#bounds_check and #no_bounds_check applied to the same procedure type")
- }
- return
- }
- parse_proc_type :: proc(p: ^Parser, tok: tokenizer.Token) -> ^ast.Proc_Type {
- cc: ast.Proc_Calling_Convention
- if p.curr_tok.kind == .String {
- str := expect_token(p, .String)
- cc = string_to_calling_convention(str.text)
- if cc == nil {
- error(p, str.pos, "unknown calling convention '%s'", str.text)
- }
- }
- if cc == nil && p.in_foreign_block {
- cc = .Foreign_Block_Default
- }
- expect_token(p, .Open_Paren)
- p.expr_level += 1
- params, _ := parse_field_list(p, .Close_Paren, ast.Field_Flags_Signature_Params)
- p.expr_level -= 1
- expect_closing_parentheses_of_field_list(p)
- results, diverging := parse_results(p)
- is_generic := false
- loop: for param in params.list {
- if param.type != nil {
- if _, ok := param.type.derived.(^ast.Poly_Type); ok {
- is_generic = true
- break loop
- }
- for name in param.names {
- if _, ok := name.derived.(^ast.Poly_Type); ok {
- is_generic = true
- break loop
- }
- }
- }
- }
- end := end_pos(p.prev_tok)
- pt := ast.new(ast.Proc_Type, tok.pos, end)
- pt.tok = tok
- pt.calling_convention = cc
- pt.params = params
- pt.results = results
- pt.diverging = diverging
- pt.generic = is_generic
- return pt
- }
- parse_inlining_operand :: proc(p: ^Parser, lhs: bool, tok: tokenizer.Token) -> ^ast.Expr {
- expr := parse_unary_expr(p, lhs)
- pi := ast.Proc_Inlining.None
- #partial switch tok.kind {
- case .Inline:
- pi = .Inline
- case .No_Inline:
- pi = .No_Inline
- case .Ident:
- switch tok.text {
- case "force_inline":
- pi = .Inline
- case "force_no_inline":
- pi = .No_Inline
- }
- }
- if expr != nil {
- #partial switch e in ast.strip_or_return_expr(expr).derived_expr {
- case ^ast.Proc_Lit:
- if e.inlining != .None && e.inlining != pi {
- error(p, expr.pos, "both 'inline' and 'no_inline' cannot be applied to a procedure literal")
- }
- e.inlining = pi
- return expr
- case ^ast.Call_Expr:
- if e.inlining != .None && e.inlining != pi {
- error(p, expr.pos, "both 'inline' and 'no_inline' cannot be applied to a procedure call")
- }
- e.inlining = pi
- return expr
- }
- }
- error(p, tok.pos, "'%s' must be followed by a procedure literal or call", tok.text)
- return ast.new(ast.Bad_Expr, tok.pos, expr)
- }
- parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
- #partial switch p.curr_tok.kind {
- case .Ident:
- return parse_ident(p)
- case .Undef:
- tok := expect_token(p, .Undef)
- undef := ast.new(ast.Undef, tok.pos, end_pos(tok))
- undef.tok = tok.kind
- return undef
- case .Context:
- tok := expect_token(p, .Context)
- ctx := ast.new(ast.Implicit, tok.pos, end_pos(tok))
- ctx.tok = tok
- return ctx
- case .Integer, .Float, .Imag,
- .Rune, .String:
- tok := advance_token(p)
- bl := ast.new(ast.Basic_Lit, tok.pos, end_pos(tok))
- bl.tok = tok
- return bl
- case .Open_Brace:
- if !lhs {
- return parse_literal_value(p, nil)
- }
- case .Open_Paren:
- open := expect_token(p, .Open_Paren)
- p.expr_level += 1
- expr := parse_expr(p, false)
- p.expr_level -= 1
- close := expect_token(p, .Close_Paren)
- pe := ast.new(ast.Paren_Expr, open.pos, end_pos(close))
- pe.open = open.pos
- pe.expr = expr
- pe.close = close.pos
- return pe
- case .Distinct:
- tok := advance_token(p)
- type := parse_type(p)
- dt := ast.new(ast.Distinct_Type, tok.pos, type)
- dt.tok = tok.kind
- dt.type = type
- return dt
- case .Hash:
- tok := expect_token(p, .Hash)
- name := expect_token(p, .Ident)
- switch name.text {
- case "type":
- type := parse_type(p)
- hp := ast.new(ast.Helper_Type, tok.pos, type)
- hp.tok = tok.kind
- hp.type = type
- return hp
- case "file", "directory", "line", "procedure", "caller_location":
- bd := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- bd.tok = tok
- bd.name = name.text
- return bd
- case "caller_expression":
- bd := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- bd.tok = tok
- bd.name = name.text
- if peek_token_kind(p, .Open_Paren) {
- return parse_call_expr(p, bd)
- }
- return bd
- case "location", "exists", "load", "load_directory", "load_hash", "hash", "assert", "panic", "defined", "config":
- bd := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- bd.tok = tok
- bd.name = name.text
- return parse_call_expr(p, bd)
- case "soa":
- bd := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- bd.tok = tok
- bd.name = name.text
- original_type := parse_type(p)
- type := ast.unparen_expr(original_type)
- #partial switch t in type.derived_expr {
- case ^ast.Array_Type: t.tag = bd
- case ^ast.Dynamic_Array_Type: t.tag = bd
- case ^ast.Pointer_Type: t.tag = bd
- case:
- error(p, original_type.pos, "expected an array or pointer type after #%s", name.text)
- }
- return original_type
- case "simd":
- bd := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- bd.tok = tok
- bd.name = name.text
- original_type := parse_type(p)
- type := ast.unparen_expr(original_type)
- #partial switch t in type.derived_expr {
- case ^ast.Array_Type: t.tag = bd
- case:
- error(p, original_type.pos, "expected an array type after #%s", name.text)
- }
- return original_type
- case "partial":
- tag := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- tag.tok = tok
- tag.name = name.text
- original_expr := parse_expr(p, lhs)
- expr := ast.unparen_expr(original_expr)
- #partial switch t in expr.derived_expr {
- case ^ast.Comp_Lit:
- t.tag = tag
- case ^ast.Array_Type:
- t.tag = tag
- error(p, tok.pos, "#%s has been replaced with #sparse for non-contiguous enumerated array types", name.text)
- case:
- error(p, tok.pos, "expected a compound literal after #%s", name.text)
- }
- return original_expr
- case "sparse":
- tag := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- tag.tok = tok
- tag.name = name.text
- original_type := parse_type(p)
- type := ast.unparen_expr(original_type)
- #partial switch t in type.derived_expr {
- case ^ast.Array_Type:
- t.tag = tag
- case:
- error(p, tok.pos, "expected an enumerated array type after #%s", name.text)
- }
- return original_type
- case "bounds_check", "no_bounds_check":
- operand := parse_expr(p, lhs)
- switch name.text {
- case "bounds_check":
- operand.state_flags += {.Bounds_Check}
- if .No_Bounds_Check in operand.state_flags {
- error(p, name.pos, "#bounds_check and #no_bounds_check cannot be applied together")
- }
- case "no_bounds_check":
- operand.state_flags += {.No_Bounds_Check}
- if .Bounds_Check in operand.state_flags {
- error(p, name.pos, "#bounds_check and #no_bounds_check cannot be applied together")
- }
- case: unimplemented()
- }
- return operand
- case "relative":
- tag := ast.new(ast.Basic_Directive, tok.pos, end_pos(name))
- tag.tok = tok
- tag.name = name.text
- tag_call := parse_call_expr(p, tag)
- type := parse_type(p)
- rt := ast.new(ast.Relative_Type, tok.pos, type)
- rt.tag = tag_call
- rt.type = type
- return rt
- case "force_inline", "force_no_inline":
- return parse_inlining_operand(p, lhs, name)
- case:
- expr := parse_expr(p, lhs)
- end := expr.pos if expr != nil else end_pos(tok)
- te := ast.new(ast.Tag_Expr, tok.pos, end)
- te.op = tok
- te.name = name.text
- te.expr = expr
- return te
- }
- case .Inline, .No_Inline:
- tok := advance_token(p)
- return parse_inlining_operand(p, lhs, tok)
- case .Proc:
- tok := expect_token(p, .Proc)
- if p.curr_tok.kind == .Open_Brace {
- open := expect_token(p, .Open_Brace)
- args: [dynamic]^ast.Expr
- for p.curr_tok.kind != .Close_Brace &&
- p.curr_tok.kind != .EOF {
- elem := parse_expr(p, false)
- append(&args, elem)
- allow_token(p, .Comma) or_break
- }
- close := expect_token(p, .Close_Brace)
- if len(args) == 0 {
- error(p, tok.pos, "expected at least 1 argument in procedure group")
- }
- pg := ast.new(ast.Proc_Group, tok.pos, end_pos(close))
- pg.tok = tok
- pg.open = open.pos
- pg.args = args[:]
- pg.close = close.pos
- return pg
- }
- type := parse_proc_type(p, tok)
- tags: ast.Proc_Tags
- where_token: tokenizer.Token
- where_clauses: []^ast.Expr
- skip_possible_newline_for_literal(p)
- if p.curr_tok.kind == .Where {
- where_token = expect_token(p, .Where)
- prev_level := p.expr_level
- p.expr_level = -1
- where_clauses = parse_rhs_expr_list(p)
- p.expr_level = prev_level
- }
- tags = parse_proc_tags(p)
- type.tags = tags
- if p.allow_type && p.expr_level < 0 {
- if where_token.kind != .Invalid {
- error(p, where_token.pos, "'where' clauses are not allowed on procedure types")
- }
- return type
- }
- body: ^ast.Stmt
- skip_possible_newline_for_literal(p)
- if allow_token(p, .Undef) {
- body = nil
- if where_token.kind != .Invalid {
- error(p, where_token.pos, "'where' clauses are not allowed on procedure literals without a defined body (replaced with ---")
- }
- } else if p.curr_tok.kind == .Open_Brace {
- prev_proc := p.curr_proc
- p.curr_proc = type
- body = parse_body(p)
- p.curr_proc = prev_proc
- } else if allow_token(p, .Do) {
- prev_proc := p.curr_proc
- p.curr_proc = type
- body = convert_stmt_to_body(p, parse_stmt(p))
- p.curr_proc = prev_proc
- if type.pos.line != body.pos.line {
- error(p, body.pos, "the body of a 'do' must be on the same line as the signature")
- }
- } else {
- return type
- }
- pl := ast.new(ast.Proc_Lit, tok.pos, end_pos(p.prev_tok))
- pl.type = type
- pl.body = body
- pl.tags = tags
- pl.where_token = where_token
- pl.where_clauses = where_clauses
- return pl
- case .Dollar:
- tok := advance_token(p)
- type := parse_ident(p)
- end := type.end
- specialization: ^ast.Expr
- if allow_token(p, .Quo) {
- specialization = parse_type(p)
- end = specialization.pos
- }
- if is_blank_ident(type) {
- error(p, type.pos, "invalid polymorphic type definition with a blank identifier")
- }
- pt := ast.new(ast.Poly_Type, tok.pos, end)
- pt.dollar = tok.pos
- pt.type = type
- pt.specialization = specialization
- return pt
- case .Typeid:
- tok := advance_token(p)
- ti := ast.new(ast.Typeid_Type, tok.pos, end_pos(tok))
- ti.tok = tok.kind
- ti.specialization = nil
- return ti
- case .Pointer:
- tok := expect_token(p, .Pointer)
- elem := parse_type(p)
- ptr := ast.new(ast.Pointer_Type, tok.pos, elem)
- ptr.pointer = tok.pos
- ptr.elem = elem
- return ptr
- case .Open_Bracket:
- open := expect_token(p, .Open_Bracket)
- count: ^ast.Expr
- #partial switch p.curr_tok.kind {
- case .Pointer:
- tok := expect_token(p, .Pointer)
- close := expect_token(p, .Close_Bracket)
- elem := parse_type(p)
- t := ast.new(ast.Multi_Pointer_Type, open.pos, elem)
- t.open = open.pos
- t.pointer = tok.pos
- t.close = close.pos
- t.elem = elem
- return t
- case .Dynamic:
- tok := expect_token(p, .Dynamic)
- close := expect_token(p, .Close_Bracket)
- elem := parse_type(p)
- da := ast.new(ast.Dynamic_Array_Type, open.pos, elem)
- da.open = open.pos
- da.dynamic_pos = tok.pos
- da.close = close.pos
- da.elem = elem
- return da
- case .Question:
- tok := expect_token(p, .Question)
- q := ast.new(ast.Unary_Expr, tok.pos, end_pos(tok))
- q.op = tok
- count = q
- case:
- p.expr_level += 1
- count = parse_expr(p, false)
- p.expr_level -= 1
- case .Close_Bracket:
- // handle below
- }
- close := expect_token(p, .Close_Bracket)
- elem := parse_type(p)
- at := ast.new(ast.Array_Type, open.pos, elem)
- at.open = open.pos
- at.len = count
- at.close = close.pos
- at.elem = elem
- return at
- case .Map:
- tok := expect_token(p, .Map)
- expect_token(p, .Open_Bracket)
- key := parse_type(p)
- expect_token(p, .Close_Bracket)
- value := parse_type(p)
- mt := ast.new(ast.Map_Type, tok.pos, value)
- mt.tok_pos = tok.pos
- mt.key = key
- mt.value = value
- return mt
- case .Struct:
- tok := expect_token(p, .Struct)
- poly_params: ^ast.Field_List
- align: ^ast.Expr
- min_field_align: ^ast.Expr
- max_field_align: ^ast.Expr
- is_packed: bool
- is_raw_union: bool
- is_no_copy: bool
- fields: ^ast.Field_List
- name_count: int
- if allow_token(p, .Open_Paren) {
- param_count: int
- poly_params, param_count = parse_field_list(p, .Close_Paren, ast.Field_Flags_Record_Poly_Params)
- if param_count == 0 {
- error(p, poly_params.pos, "expected at least 1 polymorphic parameter")
- poly_params = nil
- }
- expect_token_after(p, .Close_Paren, "parameter list")
- }
- prev_level := p.expr_level
- p.expr_level = -1
- for allow_token(p, .Hash) {
- tag := expect_token_after(p, .Ident, "#")
- switch tag.text {
- case "packed":
- if is_packed {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- is_packed = true
- case "align":
- if align != nil {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- align = parse_expr(p, true)
- case "field_align":
- if min_field_align != nil {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- warn(p, tag.pos, "#field_align has been deprecated in favour of #min_field_align")
- min_field_align = parse_expr(p, true)
- case "min_field_align":
- if min_field_align != nil {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- min_field_align = parse_expr(p, true)
- case "max_field_align":
- if max_field_align != nil {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- max_field_align = parse_expr(p, true)
- case "raw_union":
- if is_raw_union {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- is_raw_union = true
- case "no_copy":
- if is_no_copy {
- error(p, tag.pos, "duplicate struct tag '#%s'", tag.text)
- }
- is_no_copy = true
- case:
- error(p, tag.pos, "invalid struct tag '#%s", tag.text)
- }
- }
- p.expr_level = prev_level
- if is_raw_union && is_packed {
- is_packed = false
- error(p, tok.pos, "'#raw_union' cannot also be '#packed")
- }
- where_token: tokenizer.Token
- where_clauses: []^ast.Expr
- skip_possible_newline_for_literal(p)
- if p.curr_tok.kind == .Where {
- where_token = expect_token(p, .Where)
- where_prev_level := p.expr_level
- p.expr_level = -1
- where_clauses = parse_rhs_expr_list(p)
- p.expr_level = where_prev_level
- }
- skip_possible_newline_for_literal(p)
- expect_token(p, .Open_Brace)
- fields, name_count = parse_field_list(p, .Close_Brace, ast.Field_Flags_Struct)
- close := expect_closing_brace_of_field_list(p)
- st := ast.new(ast.Struct_Type, tok.pos, end_pos(close))
- st.poly_params = poly_params
- st.align = align
- st.min_field_align = min_field_align
- st.max_field_align = max_field_align
- st.is_packed = is_packed
- st.is_raw_union = is_raw_union
- st.is_no_copy = is_no_copy
- st.fields = fields
- st.name_count = name_count
- st.where_token = where_token
- st.where_clauses = where_clauses
- return st
- case .Union:
- tok := expect_token(p, .Union)
- poly_params: ^ast.Field_List
- align: ^ast.Expr
- is_no_nil: bool
- is_shared_nil: bool
- if allow_token(p, .Open_Paren) {
- param_count: int
- poly_params, param_count = parse_field_list(p, .Close_Paren, ast.Field_Flags_Record_Poly_Params)
- if param_count == 0 {
- error(p, poly_params.pos, "expected at least 1 polymorphic parameter")
- poly_params = nil
- }
- expect_token_after(p, .Close_Paren, "parameter list")
- }
- prev_level := p.expr_level
- p.expr_level = -1
- for allow_token(p, .Hash) {
- tag := expect_token_after(p, .Ident, "#")
- switch tag.text {
- case "align":
- if align != nil {
- error(p, tag.pos, "duplicate union tag '#%s'", tag.text)
- }
- align = parse_expr(p, true)
- case "maybe":
- error(p, tag.pos, "#%s functionality has now been merged with standard 'union' functionality", tag.text)
- case "no_nil":
- if is_no_nil {
- error(p, tag.pos, "duplicate union tag '#%s'", tag.text)
- }
- is_no_nil = true
- case "shared_nil":
- if is_shared_nil {
- error(p, tag.pos, "duplicate union tag '#%s'", tag.text)
- }
- is_shared_nil = true
- case:
- error(p, tag.pos, "invalid union tag '#%s", tag.text)
- }
- }
- p.expr_level = prev_level
- if is_no_nil && is_shared_nil {
- error(p, p.curr_tok.pos, "#shared_nil and #no_nil cannot be applied together")
- }
- union_kind := ast.Union_Type_Kind.Normal
- switch {
- case is_no_nil: union_kind = .no_nil
- case is_shared_nil: union_kind = .shared_nil
- }
- where_token: tokenizer.Token
- where_clauses: []^ast.Expr
- skip_possible_newline_for_literal(p)
- if p.curr_tok.kind == .Where {
- where_token = expect_token(p, .Where)
- where_prev_level := p.expr_level
- p.expr_level = -1
- where_clauses = parse_rhs_expr_list(p)
- p.expr_level = where_prev_level
- }
- skip_possible_newline_for_literal(p)
- expect_token_after(p, .Open_Brace, "union")
- variants: [dynamic]^ast.Expr
- for p.curr_tok.kind != .Close_Brace && p.curr_tok.kind != .EOF {
- type := parse_type(p)
- if _, ok := type.derived.(^ast.Bad_Expr); !ok {
- append(&variants, type)
- }
- allow_token(p, .Comma) or_break
- }
- close := expect_closing_brace_of_field_list(p)
- ut := ast.new(ast.Union_Type, tok.pos, end_pos(close))
- ut.poly_params = poly_params
- ut.variants = variants[:]
- ut.align = align
- ut.where_token = where_token
- ut.where_clauses = where_clauses
- ut.kind = union_kind
- return ut
- case .Enum:
- tok := expect_token(p, .Enum)
- base_type: ^ast.Expr
- if p.curr_tok.kind != .Open_Brace {
- base_type = parse_type(p)
- }
- skip_possible_newline_for_literal(p)
- open := expect_token(p, .Open_Brace)
- fields := parse_elem_list(p)
- close := expect_closing_brace_of_field_list(p)
- et := ast.new(ast.Enum_Type, tok.pos, end_pos(close))
- et.base_type = base_type
- et.open = open.pos
- et.fields = fields
- et.close = close.pos
- return et
- case .Bit_Set:
- tok := expect_token(p, .Bit_Set)
- open := expect_token(p, .Open_Bracket)
- elem, underlying: ^ast.Expr
- prev_allow_range := p.allow_range
- p.allow_range = true
- elem = parse_expr(p, false)
- p.allow_range = prev_allow_range
- if allow_token(p, .Semicolon) {
- underlying = parse_type(p)
- }
- close := expect_token(p, .Close_Bracket)
- bst := ast.new(ast.Bit_Set_Type, tok.pos, end_pos(close))
- bst.tok_pos = tok.pos
- bst.open = open.pos
- bst.elem = elem
- bst.underlying = underlying
- bst.close = close.pos
- return bst
-
- case .Matrix:
- tok := expect_token(p, .Matrix)
- expect_token(p, .Open_Bracket)
- row_count := parse_expr(p, false)
- expect_token(p, .Comma)
- column_count := parse_expr(p, false)
- expect_token(p, .Close_Bracket)
- elem := parse_type(p)
- mt := ast.new(ast.Matrix_Type, tok.pos, elem)
- mt.tok_pos = tok.pos
- mt.row_count = row_count
- mt.column_count = column_count
- mt.elem = elem
- return mt
-
- case .Bit_Field:
- tok := expect_token(p, .Bit_Field)
- backing_type := parse_type_or_ident(p)
- if backing_type == nil {
- token := advance_token(p)
- error(p, token.pos, "Expected a backing type for a 'bit_field'")
- }
- skip_possible_newline_for_literal(p)
- open := expect_token_after(p, .Open_Brace, "bit_field")
- fields: [dynamic]^ast.Bit_Field_Field
- for p.curr_tok.kind != .Close_Brace && p.curr_tok.kind != .EOF {
- name := parse_ident(p)
- expect_token(p, .Colon)
- type := parse_type(p)
- expect_token(p, .Or)
- bit_size := parse_expr(p, true)
- tag: tokenizer.Token
- if p.curr_tok.kind == .String {
- tag = expect_token(p, .String)
- }
- field := ast.new(ast.Bit_Field_Field, name.pos, bit_size)
- field.name = name
- field.type = type
- field.bit_size = bit_size
- field.tag = tag
- append(&fields, field)
- allow_token(p, .Comma) or_break
- }
- close := expect_closing_brace_of_field_list(p)
- bf := ast.new(ast.Bit_Field_Type, tok.pos, end_pos(close))
- bf.tok_pos = tok.pos
- bf.backing_type = backing_type
- bf.open = open.pos
- bf.fields = fields[:]
- bf.close = close.pos
- return bf
- case .Asm:
- tok := expect_token(p, .Asm)
- param_types: [dynamic]^ast.Expr
- return_type: ^ast.Expr
- if allow_token(p, .Open_Paren) {
- for p.curr_tok.kind != .Close_Paren && p.curr_tok.kind != .EOF {
- t := parse_type(p)
- append(¶m_types, t)
- if p.curr_tok.kind != .Comma ||
- p.curr_tok.kind == .EOF {
- break
- }
- advance_token(p)
- }
- expect_token(p, .Close_Paren)
- if allow_token(p, .Arrow_Right) {
- return_type = parse_type(p)
- }
- }
- has_side_effects := false
- is_align_stack := false
- dialect := ast.Inline_Asm_Dialect.Default
- for allow_token(p, .Hash) {
- if p.curr_tok.kind == .Ident {
- name := advance_token(p)
- switch name.text {
- case "side_effects":
- if has_side_effects {
- error(p, tok.pos, "duplicate directive on inline asm expression: '#side_effects'")
- }
- has_side_effects = true
- case "align_stack":
- if is_align_stack {
- error(p, tok.pos, "duplicate directive on inline asm expression: '#align_stack'")
- }
- is_align_stack = true
- case "att":
- if dialect == .ATT {
- error(p, tok.pos, "duplicate directive on inline asm expression: '#att'")
- } else if dialect != .Default {
- error(p, tok.pos, "conflicting asm dialects")
- } else {
- dialect = .ATT
- }
- case "intel":
- if dialect == .Intel {
- error(p, tok.pos, "duplicate directive on inline asm expression: '#intel'")
- } else if dialect != .Default {
- error(p, tok.pos, "conflicting asm dialects")
- } else {
- dialect = .Intel
- }
- }
- } else {
- error(p, p.curr_tok.pos, "expected an identifier after hash")
- }
- }
- skip_possible_newline_for_literal(p)
- open := expect_token(p, .Open_Brace)
- asm_string := parse_expr(p, false)
- expect_token(p, .Comma)
- constraints_string := parse_expr(p, false)
- allow_token(p, .Comma)
- close := expect_closing_brace_of_field_list(p)
- e := ast.new(ast.Inline_Asm_Expr, tok.pos, end_pos(close))
- e.tok = tok
- e.param_types = param_types[:]
- e.return_type = return_type
- e.constraints_string = constraints_string
- e.has_side_effects = has_side_effects
- e.is_align_stack = is_align_stack
- e.dialect = dialect
- e.open = open.pos
- e.asm_string = asm_string
- e.close = close.pos
- return e
- }
- return nil
- }
- is_literal_type :: proc(expr: ^ast.Expr) -> bool {
- val := ast.unparen_expr(expr)
- if val == nil {
- return false
- }
- #partial switch _ in val.derived_expr {
- case ^ast.Bad_Expr,
- ^ast.Ident,
- ^ast.Selector_Expr,
- ^ast.Array_Type,
- ^ast.Struct_Type,
- ^ast.Union_Type,
- ^ast.Enum_Type,
- ^ast.Dynamic_Array_Type,
- ^ast.Map_Type,
- ^ast.Bit_Set_Type,
- ^ast.Matrix_Type,
- ^ast.Call_Expr,
- ^ast.Bit_Field_Type:
- return true
- }
- return false
- }
- parse_value :: proc(p: ^Parser) -> ^ast.Expr {
- if p.curr_tok.kind == .Open_Brace {
- return parse_literal_value(p, nil)
- }
- prev_allow_range := p.allow_range
- defer p.allow_range = prev_allow_range
- p.allow_range = true
- return parse_expr(p, false)
- }
- parse_elem_list :: proc(p: ^Parser) -> []^ast.Expr {
- elems: [dynamic]^ast.Expr
- for p.curr_tok.kind != .Close_Brace && p.curr_tok.kind != .EOF {
- elem := parse_value(p)
- if p.curr_tok.kind == .Eq {
- eq := expect_token(p, .Eq)
- value := parse_value(p)
- fv := ast.new(ast.Field_Value, elem.pos, value)
- fv.field = elem
- fv.sep = eq.pos
- fv.value = value
- elem = fv
- }
- append(&elems, elem)
- allow_token(p, .Comma) or_break
- }
- return elems[:]
- }
- parse_literal_value :: proc(p: ^Parser, type: ^ast.Expr) -> ^ast.Comp_Lit {
- elems: []^ast.Expr
- open := expect_token(p, .Open_Brace)
- p.expr_level += 1
- if p.curr_tok.kind != .Close_Brace {
- elems = parse_elem_list(p)
- }
- p.expr_level -= 1
- skip_possible_newline(p)
- close := expect_closing_brace_of_field_list(p)
- pos := type.pos if type != nil else open.pos
- lit := ast.new(ast.Comp_Lit, pos, end_pos(close))
- lit.type = type
- lit.open = open.pos
- lit.elems = elems
- lit.close = close.pos
- return lit
- }
- parse_call_expr :: proc(p: ^Parser, operand: ^ast.Expr) -> ^ast.Expr {
- args: [dynamic]^ast.Expr
- ellipsis: tokenizer.Token
- p.expr_level += 1
- open := expect_token(p, .Open_Paren)
- seen_ellipsis := false
- for p.curr_tok.kind != .Close_Paren &&
- p.curr_tok.kind != .EOF {
- if p.curr_tok.kind == .Comma {
- error(p, p.curr_tok.pos, "expected an expression not ,")
- } else if p.curr_tok.kind == .Eq {
- error(p, p.curr_tok.pos, "expected an expression not =")
- }
- prefix_ellipsis := false
- if p.curr_tok.kind == .Ellipsis {
- prefix_ellipsis = true
- ellipsis = expect_token(p, .Ellipsis)
- }
- arg := parse_expr(p, false)
- if p.curr_tok.kind == .Eq {
- eq := expect_token(p, .Eq)
- if prefix_ellipsis {
- error(p, ellipsis.pos, "'..' must be applied to value rather than a field name")
- }
- value := parse_value(p)
- fv := ast.new(ast.Field_Value, arg.pos, value)
- fv.field = arg
- fv.sep = eq.pos
- fv.value = value
- arg = fv
- } else if seen_ellipsis {
- error(p, arg.pos, "Positional arguments are not allowed after '..'")
- }
- append(&args, arg)
- if ellipsis.pos.line != 0 {
- seen_ellipsis = true
- }
- allow_token(p, .Comma) or_break
- }
- close := expect_closing_token_of_field_list(p, .Close_Paren, "argument list")
- p.expr_level -= 1
- ce := ast.new(ast.Call_Expr, operand.pos, end_pos(close))
- ce.expr = operand
- ce.open = open.pos
- ce.args = args[:]
- ce.ellipsis = ellipsis
- ce.close = close.pos
- o := ast.unparen_expr(operand)
- if se, ok := o.derived.(^ast.Selector_Expr); ok && se.op.kind == .Arrow_Right {
- sce := ast.new(ast.Selector_Call_Expr, ce.pos, ce)
- sce.expr = o
- sce.call = ce
- return sce
- }
- return ce
- }
- parse_atom_expr :: proc(p: ^Parser, value: ^ast.Expr, lhs: bool) -> (operand: ^ast.Expr) {
- operand = value
- if operand == nil {
- if p.allow_type {
- return nil
- }
- error(p, p.curr_tok.pos, "expected an operand")
- fix_advance_to_next_stmt(p)
- be := ast.new(ast.Bad_Expr, p.curr_tok.pos, end_pos(p.curr_tok))
- operand = be
- }
- loop := true
- is_lhs := lhs
- for loop {
- #partial switch p.curr_tok.kind {
- case:
- loop = false
- case .Open_Paren:
- operand = parse_call_expr(p, operand)
- case .Open_Bracket:
- prev_allow_range := p.allow_range
- defer p.allow_range = prev_allow_range
- p.allow_range = false
- indices: [2]^ast.Expr
- interval: tokenizer.Token
- is_slice_op := false
- p.expr_level += 1
- open := expect_token(p, .Open_Bracket)
- #partial switch p.curr_tok.kind {
- case .Colon, .Ellipsis, .Range_Half, .Range_Full:
- // NOTE(bill): Do not err yet
- break
- case:
- indices[0] = parse_expr(p, false)
- }
- #partial switch p.curr_tok.kind {
- case .Ellipsis, .Range_Half, .Range_Full:
- error(p, p.curr_tok.pos, "expected a colon, not a range")
- fallthrough
- case .Colon, .Comma/*matrix index*/:
- interval = advance_token(p)
- is_slice_op = true
- if p.curr_tok.kind != .Close_Bracket && p.curr_tok.kind != .EOF {
- indices[1] = parse_expr(p, false)
- }
- }
- close := expect_token(p, .Close_Bracket)
- p.expr_level -= 1
- if is_slice_op {
- if interval.kind == .Comma {
- if indices[0] == nil || indices[1] == nil {
- error(p, p.curr_tok.pos, "matrix index expressions require both row and column indices")
- }
- se := ast.new(ast.Matrix_Index_Expr, operand.pos, end_pos(close))
- se.expr = operand
- se.open = open.pos
- se.row_index = indices[0]
- se.column_index = indices[1]
- se.close = close.pos
- operand = se
- } else {
- se := ast.new(ast.Slice_Expr, operand.pos, end_pos(close))
- se.expr = operand
- se.open = open.pos
- se.low = indices[0]
- se.interval = interval
- se.high = indices[1]
- se.close = close.pos
- operand = se
- }
- } else {
- ie := ast.new(ast.Index_Expr, operand.pos, end_pos(close))
- ie.expr = operand
- ie.open = open.pos
- ie.index = indices[0]
- ie.close = close.pos
- operand = ie
- }
- case .Period:
- tok := expect_token(p, .Period)
- #partial switch p.curr_tok.kind {
- case .Ident:
- field := parse_ident(p)
- sel := ast.new(ast.Selector_Expr, operand.pos, field)
- sel.expr = operand
- sel.op = tok
- sel.field = field
- operand = sel
- case .Open_Paren:
- open := expect_token(p, .Open_Paren)
- type := parse_type(p)
- close := expect_token(p, .Close_Paren)
- ta := ast.new(ast.Type_Assertion, operand.pos, end_pos(close))
- ta.expr = operand
- ta.open = open.pos
- ta.type = type
- ta.close = close.pos
- operand = ta
- case .Question:
- question := expect_token(p, .Question)
- type := ast.new(ast.Unary_Expr, question.pos, end_pos(question))
- type.op = question
- type.expr = nil
- ta := ast.new(ast.Type_Assertion, operand.pos, type)
- ta.expr = operand
- ta.type = type
- operand = ta
- case:
- error(p, p.curr_tok.pos, "expected a selector")
- advance_token(p)
- operand = ast.new(ast.Bad_Expr, operand.pos, end_pos(tok))
- }
- case .Arrow_Right:
- tok := expect_token(p, .Arrow_Right)
- #partial switch p.curr_tok.kind {
- case .Ident:
- field := parse_ident(p)
- sel := ast.new(ast.Selector_Expr, operand.pos, field)
- sel.expr = operand
- sel.op = tok
- sel.field = field
- operand = sel
- case:
- error(p, p.curr_tok.pos, "expected a selector")
- advance_token(p)
- operand = ast.new(ast.Bad_Expr, operand.pos, end_pos(tok))
- }
- case .Pointer:
- op := expect_token(p, .Pointer)
- deref := ast.new(ast.Deref_Expr, operand.pos, end_pos(op))
- deref.expr = operand
- deref.op = op
- operand = deref
- case .Or_Return:
- token := expect_token(p, .Or_Return)
- oe := ast.new(ast.Or_Return_Expr, operand.pos, end_pos(token))
- oe.expr = operand
- oe.token = token
- operand = oe
- case .Or_Break, .Or_Continue:
- token := advance_token(p)
- label: ^ast.Ident
- end := end_pos(token)
- if p.curr_tok.kind == .Ident {
- end = end_pos(p.curr_tok)
- label = parse_ident(p)
- }
- oe := ast.new(ast.Or_Branch_Expr, operand.pos, end)
- oe.expr = operand
- oe.token = token
- oe.label = label
- operand = oe
- case .Open_Brace:
- if !is_lhs && is_literal_type(operand) && p.expr_level >= 0 {
- operand = parse_literal_value(p, operand)
- } else {
- loop = false
- }
- case .Increment, .Decrement:
- if !lhs {
- tok := advance_token(p)
- error(p, tok.pos, "postfix '%s' operator is not supported", tok.text)
- } else {
- loop = false
- }
- }
- is_lhs = false
- }
- return operand
- }
- parse_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
- return parse_binary_expr(p, lhs, 0+1)
- }
- parse_unary_expr :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
- #partial switch p.curr_tok.kind {
- case .Transmute, .Cast:
- tok := advance_token(p)
- open := expect_token(p, .Open_Paren)
- type := parse_type(p)
- close := expect_token(p, .Close_Paren)
- expr := parse_unary_expr(p, lhs)
- tc := ast.new(ast.Type_Cast, tok.pos, expr)
- tc.tok = tok
- tc.open = open.pos
- tc.type = type
- tc.close = close.pos
- tc.expr = expr
- return tc
- case .Auto_Cast:
- op := advance_token(p)
- expr := parse_unary_expr(p, lhs)
- ac := ast.new(ast.Auto_Cast, op.pos, expr)
- ac.op = op
- ac.expr = expr
- return ac
- case .Add, .Sub,
- .Not, .Xor,
- .And:
- op := advance_token(p)
- expr := parse_unary_expr(p, lhs)
-
- ue := ast.new(ast.Unary_Expr, op.pos, expr)
- ue.op = op
- ue.expr = expr
- return ue
- case .Increment, .Decrement:
- op := advance_token(p)
- error(p, op.pos, "unary '%s' operator is not supported", op.text)
- expr := parse_unary_expr(p, lhs)
- ue := ast.new(ast.Unary_Expr, op.pos, expr)
- ue.op = op
- ue.expr = expr
- return ue
- case .Period:
- op := advance_token(p)
- field := parse_ident(p)
- ise := ast.new(ast.Implicit_Selector_Expr, op.pos, field)
- ise.field = field
- return ise
- }
- return parse_atom_expr(p, parse_operand(p, lhs), lhs)
- }
- parse_binary_expr :: proc(p: ^Parser, lhs: bool, prec_in: int) -> ^ast.Expr {
- start_pos := p.curr_tok.pos
- expr := parse_unary_expr(p, lhs)
- if expr == nil {
- return ast.new(ast.Bad_Expr, start_pos, end_pos(p.prev_tok))
- }
- for prec := token_precedence(p, p.curr_tok.kind); prec >= prec_in; prec -= 1 {
- loop: for {
- op := p.curr_tok
- op_prec := token_precedence(p, op.kind)
- if op_prec != prec {
- break loop
- }
- #partial switch op.kind {
- case .If, .When:
- if p.prev_tok.pos.line < op.pos.line {
- // NOTE(bill): Check to see if the `if` or `when` is on the same line of the `lhs` condition
- break loop
- }
- }
- expect_operator(p)
- #partial switch op.kind {
- case .Question:
- cond := expr
- x := parse_expr(p, lhs)
- colon := expect_token(p, .Colon)
- y := parse_expr(p, lhs)
- te := ast.new(ast.Ternary_If_Expr, expr.pos, end_pos(p.prev_tok))
- te.cond = cond
- te.op1 = op
- te.x = x
- te.op2 = colon
- te.y = y
- expr = te
- case .If:
- x := expr
- cond := parse_expr(p, lhs)
- else_tok := expect_token(p, .Else)
- y := parse_expr(p, lhs)
- te := ast.new(ast.Ternary_If_Expr, expr.pos, end_pos(p.prev_tok))
- te.x = x
- te.op1 = op
- te.cond = cond
- te.op2 = else_tok
- te.y = y
- expr = te
- case .When:
- x := expr
- cond := parse_expr(p, lhs)
- else_tok := expect_token(p, .Else)
- y := parse_expr(p, lhs)
- te := ast.new(ast.Ternary_When_Expr, expr.pos, end_pos(p.prev_tok))
- te.x = x
- te.op1 = op
- te.cond = cond
- te.op2 = else_tok
- te.y = y
- expr = te
- case .Or_Else:
- x := expr
- y := parse_expr(p, lhs)
- oe := ast.new(ast.Or_Else_Expr, expr.pos, end_pos(p.prev_tok))
- oe.x = x
- oe.token = op
- oe.y = y
- expr = oe
- case:
- right := parse_binary_expr(p, false, prec+1)
- if right == nil {
- error(p, op.pos, "expected expression on the right-hand side of the binary operator")
- }
- be := ast.new(ast.Binary_Expr, expr.pos, end_pos(p.prev_tok))
- be.left = expr
- be.op = op
- be.right = right
- expr = be
- }
- }
- }
- return expr
- }
- parse_expr_list :: proc(p: ^Parser, lhs: bool) -> ([]^ast.Expr) {
- list: [dynamic]^ast.Expr
- for {
- expr := parse_expr(p, lhs)
- append(&list, expr)
- if p.curr_tok.kind != .Comma || p.curr_tok.kind == .EOF {
- break
- }
- advance_token(p)
- }
- return list[:]
- }
- parse_lhs_expr_list :: proc(p: ^Parser) -> []^ast.Expr {
- return parse_expr_list(p, true)
- }
- parse_rhs_expr_list :: proc(p: ^Parser) -> []^ast.Expr {
- return parse_expr_list(p, false)
- }
- parse_simple_stmt :: proc(p: ^Parser, flags: Stmt_Allow_Flags) -> ^ast.Stmt {
- start_tok := p.curr_tok
- docs := p.lead_comment
- lhs := parse_lhs_expr_list(p)
- op := p.curr_tok
- switch {
- case tokenizer.is_assignment_operator(op.kind):
- // if p.curr_proc == nil {
- // error(p, p.curr_tok.pos, "simple statements are not allowed at the file scope");
- // return ast.new(ast.Bad_Stmt, start_tok.pos, end_pos(p.curr_tok));
- // }
- advance_token(p)
- rhs := parse_rhs_expr_list(p)
- if len(rhs) == 0 {
- error(p, p.curr_tok.pos, "no right-hand side in assignment statement")
- return ast.new(ast.Bad_Stmt, start_tok.pos, end_pos(p.curr_tok))
- }
- stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1])
- stmt.lhs = lhs
- stmt.op = op
- stmt.rhs = rhs
- return stmt
- case op.kind == .In:
- if .In in flags {
- allow_token(p, .In)
- prev_allow_range := p.allow_range
- p.allow_range = true
- expr := parse_expr(p, false)
- p.allow_range = prev_allow_range
- rhs := make([]^ast.Expr, 1)
- rhs[0] = expr
- stmt := ast.new(ast.Assign_Stmt, lhs[0].pos, rhs[len(rhs)-1])
- stmt.lhs = lhs
- stmt.op = op
- stmt.rhs = rhs
- return stmt
- }
- case op.kind == .Colon:
- expect_token_after(p, .Colon, "identifier list")
- if .Label in flags && len(lhs) == 1 {
- is_partial := false
- is_reverse := false
- partial_token: tokenizer.Token
- if p.curr_tok.kind == .Hash {
- name := peek_token(p)
- if name.kind == .Ident && name.text == "partial" &&
- peek_token(p, 1).kind == .Switch {
- partial_token = expect_token(p, .Hash)
- expect_token(p, .Ident)
- is_partial = true
- } else if name.kind == .Ident && name.text == "reverse" &&
- peek_token(p, 1).kind == .For {
- partial_token = expect_token(p, .Hash)
- expect_token(p, .Ident)
- is_reverse = true
- }
- }
- #partial switch p.curr_tok.kind {
- case .Open_Brace, .If, .For, .Switch:
- label := lhs[0]
- stmt := parse_stmt(p)
- if stmt != nil {
- #partial switch n in stmt.derived_stmt {
- case ^ast.Block_Stmt: n.label = label
- case ^ast.If_Stmt: n.label = label
- case ^ast.For_Stmt: n.label = label
- case ^ast.Switch_Stmt: n.label = label
- case ^ast.Type_Switch_Stmt: n.label = label
- case ^ast.Range_Stmt: n.label = label
- }
- if is_partial {
- #partial switch n in stmt.derived_stmt {
- case ^ast.Switch_Stmt: n.partial = true
- case ^ast.Type_Switch_Stmt: n.partial = true
- case:
- error(p, partial_token.pos, "incorrect use of directive, use '%s: #partial switch'", partial_token.text)
- }
- }
- if is_reverse {
- #partial switch n in stmt.derived_stmt {
- case ^ast.Range_Stmt: n.reverse = true
- case:
- error(p, partial_token.pos, "incorrect use of directive, use '%s: #reverse for'", partial_token.text)
- }
- }
- }
- return stmt
- }
- }
- return parse_value_decl(p, lhs, docs)
- }
- if len(lhs) > 1 {
- error(p, op.pos, "expected 1 expression, got %d", len(lhs))
- return ast.new(ast.Bad_Stmt, start_tok.pos, end_pos(p.curr_tok))
- }
- #partial switch op.kind {
- case .Increment, .Decrement:
- advance_token(p)
- error(p, op.pos, "postfix '%s' statement is not supported", op.text)
- }
- es := ast.new(ast.Expr_Stmt, lhs[0].pos, lhs[0])
- es.expr = lhs[0]
- return es
- }
- parse_value_decl :: proc(p: ^Parser, names: []^ast.Expr, docs: ^ast.Comment_Group) -> ^ast.Decl {
- is_mutable := true
- values: []^ast.Expr
- type := parse_type_or_ident(p)
- #partial switch p.curr_tok.kind {
- case .Eq, .Colon:
- sep := advance_token(p)
- is_mutable = sep.kind != .Colon
- values = parse_rhs_expr_list(p)
- if len(values) > len(names) {
- error(p, p.curr_tok.pos, "too many values on the right-hand side of the declaration")
- } else if len(values) < len(names) && !is_mutable {
- error(p, p.curr_tok.pos, "all constant declarations must be defined")
- } else if len(values) == 0 {
- error(p, p.curr_tok.pos, "expected an expression for this declaration")
- }
- }
- if is_mutable {
- if type == nil && len(values) == 0 {
- error(p, p.curr_tok.pos, "missing variable type or initialization")
- return ast.new(ast.Bad_Decl, names[0].pos, end_pos(p.curr_tok))
- }
- } else {
- if type == nil && len(values) == 0 && len(names) > 0 {
- error(p, p.curr_tok.pos, "missing constant value")
- return ast.new(ast.Bad_Decl, names[0].pos, end_pos(p.curr_tok))
- }
- }
- end := p.prev_tok
- if p.expr_level >= 0 {
- end: ^ast.Expr
- if !is_mutable && len(values) > 0 {
- end = values[len(values)-1]
- }
- if p.curr_tok.kind == .Close_Brace &&
- p.curr_tok.pos.line == p.prev_tok.pos.line {
- } else {
- expect_semicolon(p, end)
- }
- }
- if p.curr_proc == nil {
- if len(values) > 0 && len(names) != len(values) {
- error(p, values[0].pos, "expected %d expressions on the right-hand side, got %d", len(names), len(values))
- }
- }
- decl := ast.new(ast.Value_Decl, names[0].pos, end_pos(end))
- decl.docs = docs
- decl.names = names
- decl.type = type
- decl.values = values
- decl.is_mutable = is_mutable
- return decl
- }
- parse_import_decl :: proc(p: ^Parser, kind := Import_Decl_Kind.Standard) -> ^ast.Import_Decl {
- docs := p.lead_comment
- tok := expect_token(p, .Import)
- import_name: tokenizer.Token
- is_using := kind != Import_Decl_Kind.Standard
- #partial switch p.curr_tok.kind {
- case .Ident:
- import_name = advance_token(p)
- case:
- import_name.pos = p.curr_tok.pos
- }
- if !is_using && is_blank_ident(import_name) {
- error(p, import_name.pos, "illegal import name: '_'")
- }
- path := expect_token_after(p, .String, "import")
- decl := ast.new(ast.Import_Decl, tok.pos, end_pos(path))
- decl.docs = docs
- decl.is_using = is_using
- decl.import_tok = tok
- decl.name = import_name
- decl.relpath = path
- decl.fullpath = path.text
- if p.curr_proc != nil {
- error(p, decl.pos, "import declarations cannot be used within a procedure, it must be done at the file scope")
- } else {
- append(&p.file.imports, decl)
- }
- expect_semicolon(p, decl)
- decl.comment = p.line_comment
- return decl
- }
|