123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627 |
- package odin_ast
- import "core:odin/tokenizer"
- Proc_Tag :: enum {
- Bounds_Check,
- No_Bounds_Check,
- }
- Proc_Tags :: distinct bit_set[Proc_Tag; u32];
- Proc_Inlining :: enum u32 {
- None = 0,
- Inline = 1,
- No_Inline = 2,
- }
- Proc_Calling_Convention :: enum i32 {
- Invalid = 0,
- Odin,
- Contextless,
- C_Decl,
- Std_Call,
- Fast_Call,
- Foreign_Block_Default = -1,
- }
- Node_State_Flag :: enum {
- Bounds_Check,
- No_Bounds_Check,
- }
- Node_State_Flags :: distinct bit_set[Node_State_Flag];
- Comment_Group :: struct {
- list: []tokenizer.Token,
- }
- Node :: struct {
- pos: tokenizer.Pos,
- end: tokenizer.Pos,
- derived: any,
- state_flags: Node_State_Flags,
- }
- Expr :: struct {
- using expr_base: Node,
- }
- Stmt :: struct {
- using stmt_base: Node,
- }
- Decl :: struct {
- using decl_base: Stmt,
- }
- // Expressions
- Bad_Expr :: struct {
- using node: Expr,
- }
- Ident :: struct {
- using node: Expr,
- name: string,
- }
- Implicit :: struct {
- using node: Expr,
- tok: tokenizer.Token,
- }
- Undef :: struct {
- using node: Expr,
- tok: tokenizer.Token_Kind,
- }
- Basic_Lit :: struct {
- using node: Expr,
- tok: tokenizer.Token,
- }
- Basic_Directive :: struct {
- using node: Expr,
- tok: tokenizer.Token,
- name: string,
- }
- Ellipsis :: struct {
- using node: Expr,
- tok: tokenizer.Token_Kind,
- expr: ^Expr,
- }
- Proc_Lit :: struct {
- using node: Expr,
- type: ^Proc_Type,
- body: ^Stmt,
- tags: Proc_Tags,
- inlining: Proc_Inlining,
- where_token: tokenizer.Token,
- where_clauses: []^Expr,
- }
- Comp_Lit :: struct {
- using node: Expr,
- type: ^Expr,
- open: tokenizer.Pos,
- elems: []^Expr,
- close: tokenizer.Pos,
- }
- Tag_Expr :: struct {
- using node: Expr,
- op: tokenizer.Token,
- name: string,
- expr: ^Expr,
- }
- Unary_Expr :: struct {
- using node: Expr,
- op: tokenizer.Token,
- expr: ^Expr,
- }
- Binary_Expr :: struct {
- using node: Expr,
- left: ^Expr,
- op: tokenizer.Token,
- right: ^Expr,
- }
- Paren_Expr :: struct {
- using node: Expr,
- open: tokenizer.Pos,
- expr: ^Expr,
- close: tokenizer.Pos,
- }
- Selector_Expr :: struct {
- using node: Expr,
- expr: ^Expr,
- field: ^Ident,
- }
- Implicit_Selector_Expr :: struct {
- using node: Expr,
- field: ^Ident,
- }
- Index_Expr :: struct {
- using node: Expr,
- expr: ^Expr,
- open: tokenizer.Pos,
- index: ^Expr,
- close: tokenizer.Pos,
- }
- Deref_Expr :: struct {
- using node: Expr,
- expr: ^Expr,
- op: tokenizer.Token,
- }
- Slice_Expr :: struct {
- using node: Expr,
- expr: ^Expr,
- open: tokenizer.Pos,
- low: ^Expr,
- interval: tokenizer.Token,
- high: ^Expr,
- close: tokenizer.Pos,
- }
- Call_Expr :: struct {
- using node: Expr,
- inlining: Proc_Inlining,
- expr: ^Expr,
- open: tokenizer.Pos,
- args: []^Expr,
- ellipsis: tokenizer.Token,
- close: tokenizer.Pos,
- }
- Field_Value :: struct {
- using node: Expr,
- field: ^Expr,
- sep: tokenizer.Pos,
- value: ^Expr,
- }
- Ternary_Expr :: struct {
- using node: Expr,
- cond: ^Expr,
- op1: tokenizer.Token,
- x: ^Expr,
- op2: tokenizer.Token,
- y: ^Expr,
- }
- Type_Assertion :: struct {
- using node: Expr,
- expr: ^Expr,
- dot: tokenizer.Pos,
- open: tokenizer.Pos,
- type: ^Expr,
- close: tokenizer.Pos,
- }
- Type_Cast :: struct {
- using node: Expr,
- tok: tokenizer.Token,
- open: tokenizer.Pos,
- type: ^Expr,
- close: tokenizer.Pos,
- expr: ^Expr,
- }
- Auto_Cast :: struct {
- using node: Expr,
- op: tokenizer.Token,
- expr: ^Expr,
- }
- // Statements
- Bad_Stmt :: struct {
- using node: Stmt,
- }
- Empty_Stmt :: struct {
- using node: Stmt,
- semicolon: tokenizer.Pos, // Position of the following ';'
- }
- Expr_Stmt :: struct {
- using node: Stmt,
- expr: ^Expr,
- }
- Tag_Stmt :: struct {
- using node: Stmt,
- op: tokenizer.Token,
- name: string,
- stmt: ^Stmt,
- }
- Assign_Stmt :: struct {
- using node: Stmt,
- lhs: []^Expr,
- op: tokenizer.Token,
- rhs: []^Expr,
- }
- Block_Stmt :: struct {
- using node: Stmt,
- label: ^Expr,
- open: tokenizer.Pos,
- stmts: []^Stmt,
- close: tokenizer.Pos,
- }
- If_Stmt :: struct {
- using node: Stmt,
- label: ^Expr,
- if_pos: tokenizer.Pos,
- init: ^Stmt,
- cond: ^Expr,
- body: ^Stmt,
- else_stmt: ^Stmt,
- }
- When_Stmt :: struct {
- using node: Stmt,
- when_pos: tokenizer.Pos,
- cond: ^Expr,
- body: ^Stmt,
- else_stmt: ^Stmt,
- }
- Return_Stmt :: struct {
- using node: Stmt,
- results: []^Expr,
- }
- Defer_Stmt :: struct {
- using node: Stmt,
- stmt: ^Stmt,
- }
- For_Stmt :: struct {
- using node: Stmt,
- label: ^Expr,
- for_pos: tokenizer.Pos,
- init: ^Stmt,
- cond: ^Expr,
- post: ^Stmt,
- body: ^Stmt,
- }
- Range_Stmt :: struct {
- using node: Stmt,
- label: ^Expr,
- for_pos: tokenizer.Pos,
- val0: ^Expr,
- val1: ^Expr,
- in_pos: tokenizer.Pos,
- expr: ^Expr,
- body: ^Stmt,
- }
- Case_Clause :: struct {
- using node: Stmt,
- case_pos: tokenizer.Pos,
- list: []^Expr,
- terminator: tokenizer.Token,
- body: []^Stmt,
- }
- Switch_Stmt :: struct {
- using node: Stmt,
- label: ^Expr,
- switch_pos: tokenizer.Pos,
- init: ^Stmt,
- cond: ^Expr,
- body: ^Stmt,
- complete: bool,
- }
- Type_Switch_Stmt :: struct {
- using node: Stmt,
- label: ^Expr,
- switch_pos: tokenizer.Pos,
- tag: ^Stmt,
- expr: ^Expr,
- body: ^Stmt,
- complete: bool,
- }
- Branch_Stmt :: struct {
- using node: Stmt,
- tok: tokenizer.Token,
- label: ^Ident,
- }
- Using_Stmt :: struct {
- using node: Stmt,
- list: []^Expr,
- }
- // Declarations
- Bad_Decl :: struct {
- using node: Decl,
- }
- Value_Decl :: struct {
- using node: Decl,
- docs: ^Comment_Group,
- attributes: [dynamic]^Attribute, // dynamic as parsing will add to them lazily
- names: []^Expr,
- type: ^Expr,
- values: []^Expr,
- comment: ^Comment_Group,
- is_using: bool,
- is_mutable: bool,
- }
- Package_Decl :: struct {
- using node: Decl,
- docs: ^Comment_Group,
- token: tokenizer.Token,
- name: string,
- comment: ^Comment_Group,
- }
- Import_Decl :: struct {
- using node: Decl,
- docs: ^Comment_Group,
- is_using: bool,
- import_tok: tokenizer.Token,
- name: tokenizer.Token,
- relpath: tokenizer.Token,
- fullpath: string,
- comment: ^Comment_Group,
- }
- Foreign_Block_Decl :: struct {
- using node: Decl,
- docs: ^Comment_Group,
- attributes: [dynamic]^Attribute, // dynamic as parsing will add to them lazily
- tok: tokenizer.Token,
- foreign_library: ^Expr,
- body: ^Stmt,
- }
- Foreign_Import_Decl :: struct {
- using node: Decl,
- docs: ^Comment_Group,
- foreign_tok: tokenizer.Token,
- import_tok: tokenizer.Token,
- name: ^Ident,
- collection_name: string,
- fullpaths: []string,
- attributes: [dynamic]^Attribute, // dynamic as parsing will add to them lazily
- comment: ^Comment_Group,
- }
- // Other things
- unparen_expr :: proc(expr: ^Expr) -> (val: ^Expr) {
- val = expr;
- if expr == nil {
- return;
- }
- for {
- e, ok := val.derived.(Paren_Expr);
- if !ok do break;
- val = e.expr;
- }
- return;
- }
- Field_Flag :: enum {
- Ellipsis,
- Using,
- No_Alias,
- C_Vararg,
- Auto_Cast,
- In,
- Results,
- Tags,
- Default_Parameters,
- Typeid_Token,
- }
- Field_Flags :: distinct bit_set[Field_Flag];
- Field_Flags_Struct :: Field_Flags{
- .Using,
- .Tags,
- };
- Field_Flags_Record_Poly_Params :: Field_Flags{
- .Typeid_Token,
- };
- Field_Flags_Signature :: Field_Flags{
- .Ellipsis,
- .Using,
- .No_Alias,
- .C_Vararg,
- .Auto_Cast,
- .Default_Parameters,
- };
- Field_Flags_Signature_Params :: Field_Flags_Signature | {Field_Flag.Typeid_Token};
- Field_Flags_Signature_Results :: Field_Flags_Signature;
- Proc_Group :: struct {
- using node: Expr,
- tok: tokenizer.Token,
- open: tokenizer.Pos,
- args: []^Expr,
- close: tokenizer.Pos,
- }
- Attribute :: struct {
- using node: Node,
- tok: tokenizer.Token_Kind,
- open: tokenizer.Pos,
- elems: []^Expr,
- close: tokenizer.Pos,
- }
- Field :: struct {
- using node: Node,
- docs: ^Comment_Group,
- names: []^Expr, // Could be polymorphic
- type: ^Expr,
- default_value: ^Expr,
- tag: tokenizer.Token,
- flags: Field_Flags,
- comment: ^Comment_Group,
- }
- Field_List :: struct {
- using node: Node,
- open: tokenizer.Pos,
- list: []^Field,
- close: tokenizer.Pos,
- }
- // Types
- Typeid_Type :: struct {
- using node: Expr,
- tok: tokenizer.Token_Kind,
- specialization: ^Expr,
- }
- Helper_Type :: struct {
- using node: Expr,
- tok: tokenizer.Token_Kind,
- type: ^Expr,
- }
- Distinct_Type :: struct {
- using node: Expr,
- tok: tokenizer.Token_Kind,
- type: ^Expr,
- }
- Opaque_Type :: struct {
- using node: Expr,
- tok: tokenizer.Token_Kind,
- type: ^Expr,
- }
- Poly_Type :: struct {
- using node: Expr,
- dollar: tokenizer.Pos,
- type: ^Ident,
- specialization: ^Expr,
- }
- Proc_Type :: struct {
- using node: Expr,
- tok: tokenizer.Token,
- calling_convention: Proc_Calling_Convention,
- params: ^Field_List,
- arrow: tokenizer.Pos,
- results: ^Field_List,
- tags: Proc_Tags,
- generic: bool,
- diverging: bool,
- }
- Pointer_Type :: struct {
- using node: Expr,
- pointer: tokenizer.Pos,
- elem: ^Expr,
- }
- Array_Type :: struct {
- using node: Expr,
- open: tokenizer.Pos,
- len: ^Expr, // Ellipsis node for [?]T arrray types, nil for slice types
- close: tokenizer.Pos,
- elem: ^Expr,
- }
- Dynamic_Array_Type :: struct {
- using node: Expr,
- open: tokenizer.Pos,
- dynamic_pos: tokenizer.Pos,
- close: tokenizer.Pos,
- elem: ^Expr,
- }
- Struct_Type :: struct {
- using node: Expr,
- tok_pos: tokenizer.Pos,
- poly_params: ^Field_List,
- align: ^Expr,
- fields: ^Field_List,
- name_count: int,
- where_token: tokenizer.Token,
- where_clauses: []^Expr,
- is_packed: bool,
- is_raw_union: bool,
- }
- Union_Type :: struct {
- using node: Expr,
- tok_pos: tokenizer.Pos,
- poly_params: ^Field_List,
- align: ^Expr,
- variants: []^Expr,
- where_token: tokenizer.Token,
- where_clauses: []^Expr,
- }
- Enum_Type :: struct {
- using node: Expr,
- tok_pos: tokenizer.Pos,
- base_type: ^Expr,
- open: tokenizer.Pos,
- fields: []^Expr,
- close: tokenizer.Pos,
- is_using: bool,
- }
- Bit_Field_Type :: struct {
- using node: Expr,
- tok_pos: tokenizer.Pos,
- align: ^Expr,
- open: tokenizer.Pos,
- fields: []^Field_Value, // Field_Value with ':' rather than '='
- close: tokenizer.Pos,
- }
- Bit_Set_Type :: struct {
- using node: Expr,
- tok_pos: tokenizer.Pos,
- open: tokenizer.Pos,
- elem: ^Expr,
- underlying: ^Expr,
- close: tokenizer.Pos,
- }
- Map_Type :: struct {
- using node: Expr,
- tok_pos: tokenizer.Pos,
- key: ^Expr,
- value: ^Expr,
- }
|