|
@@ -12,23 +12,24 @@ TOKEN_KIND(Token__LiteralBegin, "_LiteralBegin"), \
|
|
|
TOKEN_KIND(Token__LiteralEnd, "_LiteralEnd"), \
|
|
|
\
|
|
|
TOKEN_KIND(Token__OperatorBegin, "_OperatorBegin"), \
|
|
|
- TOKEN_KIND(Token_Eq, "="), \
|
|
|
- TOKEN_KIND(Token_Not, "!"), \
|
|
|
- TOKEN_KIND(Token_Hash, "#"), \
|
|
|
- TOKEN_KIND(Token_At, "@"), \
|
|
|
- TOKEN_KIND(Token_Pointer, "^"), \
|
|
|
- TOKEN_KIND(Token_Maybe, "?"), \
|
|
|
- TOKEN_KIND(Token_Add, "+"), \
|
|
|
- TOKEN_KIND(Token_Sub, "-"), \
|
|
|
- TOKEN_KIND(Token_Mul, "*"), \
|
|
|
- TOKEN_KIND(Token_Quo, "/"), \
|
|
|
- TOKEN_KIND(Token_Mod, "%"), \
|
|
|
- TOKEN_KIND(Token_And, "&"), \
|
|
|
- TOKEN_KIND(Token_Or, "|"), \
|
|
|
- TOKEN_KIND(Token_Xor, "~"), \
|
|
|
- TOKEN_KIND(Token_AndNot, "&~"), \
|
|
|
- TOKEN_KIND(Token_Shl, "<<"), \
|
|
|
- TOKEN_KIND(Token_Shr, ">>"), \
|
|
|
+ TOKEN_KIND(Token_Eq, "="), \
|
|
|
+ TOKEN_KIND(Token_Not, "!"), \
|
|
|
+ TOKEN_KIND(Token_Hash, "#"), \
|
|
|
+ TOKEN_KIND(Token_At, "@"), \
|
|
|
+ TOKEN_KIND(Token_Dollar, "$"), \
|
|
|
+ TOKEN_KIND(Token_Pointer, "^"), \
|
|
|
+ TOKEN_KIND(Token_Question, "?"), \
|
|
|
+ TOKEN_KIND(Token_Add, "+"), \
|
|
|
+ TOKEN_KIND(Token_Sub, "-"), \
|
|
|
+ TOKEN_KIND(Token_Mul, "*"), \
|
|
|
+ TOKEN_KIND(Token_Quo, "/"), \
|
|
|
+ TOKEN_KIND(Token_Mod, "%"), \
|
|
|
+ TOKEN_KIND(Token_And, "&"), \
|
|
|
+ TOKEN_KIND(Token_Or, "|"), \
|
|
|
+ TOKEN_KIND(Token_Xor, "~"), \
|
|
|
+ TOKEN_KIND(Token_AndNot, "&~"), \
|
|
|
+ TOKEN_KIND(Token_Shl, "<<"), \
|
|
|
+ TOKEN_KIND(Token_Shr, ">>"), \
|
|
|
\
|
|
|
/*TOKEN_KIND(Token_as, "as"), */\
|
|
|
/*TOKEN_KIND(Token_transmute, "transmute"), */\
|
|
@@ -55,6 +56,8 @@ TOKEN_KIND(Token__AssignOpBegin, "_AssignOpBegin"), \
|
|
|
TOKEN_KIND(Token__AssignOpEnd, "_AssignOpEnd"), \
|
|
|
TOKEN_KIND(Token_ArrowRight, "->"), \
|
|
|
TOKEN_KIND(Token_ArrowLeft, "<-"), \
|
|
|
+ TOKEN_KIND(Token_Increment, "++"), \
|
|
|
+ TOKEN_KIND(Token_Decrement, "--"), \
|
|
|
\
|
|
|
TOKEN_KIND(Token__ComparisonBegin, "_ComparisonBegin"), \
|
|
|
TOKEN_KIND(Token_CmpEq, "=="), \
|
|
@@ -80,45 +83,45 @@ TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \
|
|
|
TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \
|
|
|
\
|
|
|
TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
|
|
|
- /* TODO(bill): So of these keywords are not used but "reserved", why not remove them? */ \
|
|
|
- TOKEN_KIND(Token_when, "when"), \
|
|
|
- TOKEN_KIND(Token_if, "if"), \
|
|
|
- TOKEN_KIND(Token_else, "else"), \
|
|
|
- TOKEN_KIND(Token_for, "for"), \
|
|
|
- TOKEN_KIND(Token_in, "in"), \
|
|
|
- TOKEN_KIND(Token_break, "break"), \
|
|
|
- TOKEN_KIND(Token_continue, "continue"), \
|
|
|
- TOKEN_KIND(Token_fallthrough, "fallthrough"), \
|
|
|
- TOKEN_KIND(Token_match, "match"), \
|
|
|
- TOKEN_KIND(Token_type, "type"), \
|
|
|
- TOKEN_KIND(Token_default, "default"), \
|
|
|
- TOKEN_KIND(Token_case, "case"), \
|
|
|
- TOKEN_KIND(Token_defer, "defer"), \
|
|
|
- TOKEN_KIND(Token_return, "return"), \
|
|
|
- TOKEN_KIND(Token_give, "give"), \
|
|
|
- TOKEN_KIND(Token_proc, "proc"), \
|
|
|
- TOKEN_KIND(Token_macro, "macro"), \
|
|
|
- TOKEN_KIND(Token_struct, "struct"), \
|
|
|
- TOKEN_KIND(Token_union, "union"), \
|
|
|
- TOKEN_KIND(Token_raw_union, "raw_union"), \
|
|
|
- TOKEN_KIND(Token_enum, "enum"), \
|
|
|
- TOKEN_KIND(Token_vector, "vector"), \
|
|
|
- TOKEN_KIND(Token_map, "map"), \
|
|
|
- TOKEN_KIND(Token_static, "static"), \
|
|
|
- TOKEN_KIND(Token_dynamic, "dynamic"), \
|
|
|
- TOKEN_KIND(Token_using, "using"), \
|
|
|
- TOKEN_KIND(Token_no_alias, "no_alias"), \
|
|
|
- /* TOKEN_KIND(Token_mutable, "mutable"), */\
|
|
|
+ /* TODO(bill): Of these keywords are not used but "reserved", why not remove them? */ \
|
|
|
+ TOKEN_KIND(Token_when, "when"), \
|
|
|
+ TOKEN_KIND(Token_if, "if"), \
|
|
|
+ TOKEN_KIND(Token_else, "else"), \
|
|
|
+ TOKEN_KIND(Token_for, "for"), \
|
|
|
+ TOKEN_KIND(Token_in, "in"), \
|
|
|
+ TOKEN_KIND(Token_break, "break"), \
|
|
|
+ TOKEN_KIND(Token_continue, "continue"), \
|
|
|
+ TOKEN_KIND(Token_fallthrough, "fallthrough"), \
|
|
|
+ TOKEN_KIND(Token_match, "match"), \
|
|
|
+ /* TOKEN_KIND(Token_type, "type"), */ \
|
|
|
+ TOKEN_KIND(Token_default, "default"), \
|
|
|
+ TOKEN_KIND(Token_case, "case"), \
|
|
|
+ TOKEN_KIND(Token_defer, "defer"), \
|
|
|
+ TOKEN_KIND(Token_return, "return"), \
|
|
|
+ TOKEN_KIND(Token_give, "give"), \
|
|
|
+ TOKEN_KIND(Token_proc, "proc"), \
|
|
|
+ TOKEN_KIND(Token_macro, "macro"), \
|
|
|
+ TOKEN_KIND(Token_struct, "struct"), \
|
|
|
+ TOKEN_KIND(Token_union, "union"), \
|
|
|
+ TOKEN_KIND(Token_raw_union, "raw_union"), \
|
|
|
+ TOKEN_KIND(Token_enum, "enum"), \
|
|
|
+ TOKEN_KIND(Token_vector, "vector"), \
|
|
|
+ TOKEN_KIND(Token_map, "map"), \
|
|
|
+ TOKEN_KIND(Token_static, "static"), \
|
|
|
+ TOKEN_KIND(Token_dynamic, "dynamic"), \
|
|
|
+ TOKEN_KIND(Token_using, "using"), \
|
|
|
+ TOKEN_KIND(Token_no_alias, "no_alias"), \
|
|
|
+ /* TOKEN_KIND(Token_mutable, "mutable"), */ \
|
|
|
/* TOKEN_KIND(Token_immutable, "immutable"), */\
|
|
|
- TOKEN_KIND(Token_thread_local, "thread_local"), \
|
|
|
- TOKEN_KIND(Token_cast, "cast"), \
|
|
|
- TOKEN_KIND(Token_transmute, "transmute"), \
|
|
|
- TOKEN_KIND(Token_down_cast, "down_cast"), \
|
|
|
- TOKEN_KIND(Token_union_cast, "union_cast"), \
|
|
|
- TOKEN_KIND(Token_context, "context"), \
|
|
|
- TOKEN_KIND(Token_push_context, "push_context"), \
|
|
|
- TOKEN_KIND(Token_push_allocator, "push_allocator"), \
|
|
|
- TOKEN_KIND(Token_asm, "asm"), \
|
|
|
+ TOKEN_KIND(Token_thread_local, "thread_local"), \
|
|
|
+ TOKEN_KIND(Token_cast, "cast"), \
|
|
|
+ TOKEN_KIND(Token_transmute, "transmute"), \
|
|
|
+ TOKEN_KIND(Token_down_cast, "down_cast"), \
|
|
|
+ TOKEN_KIND(Token_union_cast, "union_cast"), \
|
|
|
+ TOKEN_KIND(Token_context, "context"), \
|
|
|
+ TOKEN_KIND(Token_push_context, "push_context"), \
|
|
|
+ TOKEN_KIND(Token_push_allocator, "push_allocator"), \
|
|
|
+ TOKEN_KIND(Token_asm, "asm"), \
|
|
|
TOKEN_KIND(Token__KeywordEnd, "_KeywordEnd"), \
|
|
|
TOKEN_KIND(Token_Count, "")
|
|
|
|
|
@@ -478,7 +481,6 @@ gb_inline void scan_mantissa(Tokenizer *t, i32 base, bool allow_underscore) {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-
|
|
|
Token scan_number_to_token(Tokenizer *t, bool seen_decimal_point) {
|
|
|
Token token = {0};
|
|
|
token.kind = Token_Integer;
|
|
@@ -734,20 +736,10 @@ Token tokenizer_get_token(Tokenizer *t) {
|
|
|
|
|
|
// NOTE(bill): All keywords are > 1
|
|
|
if (token.string.len > 1) {
|
|
|
- /* if (str_eq(token.string, token_strings[Token_as])) {
|
|
|
- token.kind = Token_as;
|
|
|
- } else if (str_eq(token.string, token_strings[Token_transmute])) {
|
|
|
- token.kind = Token_transmute;
|
|
|
- } else if (str_eq(token.string, token_strings[Token_down_cast])) {
|
|
|
- token.kind = Token_down_cast;
|
|
|
- } else if (str_eq(token.string, token_strings[Token_union_cast])) {
|
|
|
- token.kind = Token_union_cast;
|
|
|
- } else */{
|
|
|
- for (i32 k = Token__KeywordBegin+1; k < Token__KeywordEnd; k++) {
|
|
|
- if (str_eq(token.string, token_strings[k])) {
|
|
|
- token.kind = cast(TokenKind)k;
|
|
|
- break;
|
|
|
- }
|
|
|
+ for (i32 k = Token__KeywordBegin+1; k < Token__KeywordEnd; k++) {
|
|
|
+ if (str_eq(token.string, token_strings[k])) {
|
|
|
+ token.kind = cast(TokenKind)k;
|
|
|
+ break;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@@ -861,57 +853,28 @@ Token tokenizer_get_token(Tokenizer *t) {
|
|
|
}
|
|
|
break;
|
|
|
|
|
|
- case '#':
|
|
|
- token.kind = Token_Hash;
|
|
|
- break;
|
|
|
- case '@':
|
|
|
- token.kind = Token_At;
|
|
|
- break;
|
|
|
- case '^':
|
|
|
- token.kind = Token_Pointer;
|
|
|
- break;
|
|
|
- case '?':
|
|
|
- token.kind = Token_Maybe;
|
|
|
- break;
|
|
|
- case ';':
|
|
|
- token.kind = Token_Semicolon;
|
|
|
- break;
|
|
|
- case ',':
|
|
|
- token.kind = Token_Comma;
|
|
|
- break;
|
|
|
- case ':':
|
|
|
- token.kind = Token_Colon;
|
|
|
- break;
|
|
|
- case '(':
|
|
|
- token.kind = Token_OpenParen;
|
|
|
- break;
|
|
|
- case ')':
|
|
|
- token.kind = Token_CloseParen;
|
|
|
- break;
|
|
|
- case '[':
|
|
|
- token.kind = Token_OpenBracket;
|
|
|
- break;
|
|
|
- case ']':
|
|
|
- token.kind = Token_CloseBracket;
|
|
|
- break;
|
|
|
- case '{':
|
|
|
- token.kind = Token_OpenBrace;
|
|
|
- break;
|
|
|
- case '}':
|
|
|
- token.kind = Token_CloseBrace;
|
|
|
- break;
|
|
|
-
|
|
|
- case '*': token.kind = token_kind_variant2(t, Token_Mul, Token_MulEq); break;
|
|
|
- case '%': token.kind = token_kind_variant2(t, Token_Mod, Token_ModEq); break;
|
|
|
- case '=': token.kind = token_kind_variant2(t, Token_Eq, Token_CmpEq); break;
|
|
|
- case '~': token.kind = token_kind_variant2(t, Token_Xor, Token_XorEq); break;
|
|
|
- case '!': token.kind = token_kind_variant2(t, Token_Not, Token_NotEq); break;
|
|
|
- case '+':
|
|
|
- token.kind = token_kind_variant2(t, Token_Add, Token_AddEq);
|
|
|
- break;
|
|
|
- case '-':
|
|
|
- token.kind = token_kind_variant3(t, Token_Sub, Token_SubEq, '>', Token_ArrowRight);
|
|
|
- break;
|
|
|
+ case '#': token.kind = Token_Hash; break;
|
|
|
+ case '@': token.kind = Token_At; break;
|
|
|
+ case '$': token.kind = Token_Dollar; break;
|
|
|
+ case '?': token.kind = Token_Question; break;
|
|
|
+ case '^': token.kind = Token_Pointer; break;
|
|
|
+ case ';': token.kind = Token_Semicolon; break;
|
|
|
+ case ',': token.kind = Token_Comma; break;
|
|
|
+ case ':': token.kind = Token_Colon; break;
|
|
|
+ case '(': token.kind = Token_OpenParen; break;
|
|
|
+ case ')': token.kind = Token_CloseParen; break;
|
|
|
+ case '[': token.kind = Token_OpenBracket; break;
|
|
|
+ case ']': token.kind = Token_CloseBracket; break;
|
|
|
+ case '{': token.kind = Token_OpenBrace; break;
|
|
|
+ case '}': token.kind = Token_CloseBrace; break;
|
|
|
+
|
|
|
+ case '*': token.kind = token_kind_variant2(t, Token_Mul, Token_MulEq); break;
|
|
|
+ case '%': token.kind = token_kind_variant2(t, Token_Mod, Token_ModEq); break;
|
|
|
+ case '=': token.kind = token_kind_variant2(t, Token_Eq, Token_CmpEq); break;
|
|
|
+ case '~': token.kind = token_kind_variant2(t, Token_Xor, Token_XorEq); break;
|
|
|
+ case '!': token.kind = token_kind_variant2(t, Token_Not, Token_NotEq); break;
|
|
|
+ case '+': token.kind = token_kind_variant3(t, Token_Add, Token_AddEq, '+', Token_Increment); break;
|
|
|
+ case '-': token.kind = token_kind_variant4(t, Token_Sub, Token_SubEq, '-', Token_Decrement, '>', Token_ArrowRight); break;
|
|
|
case '/': {
|
|
|
if (t->curr_rune == '/') {
|
|
|
while (t->curr_rune != '\n' && t->curr_rune != GB_RUNE_EOF) {
|
|
@@ -951,9 +914,7 @@ Token tokenizer_get_token(Tokenizer *t) {
|
|
|
token.kind = token_kind_dub_eq(t, '<', Token_Lt, Token_LtEq, Token_Shl, Token_ShlEq);
|
|
|
}
|
|
|
break;
|
|
|
- case '>':
|
|
|
- token.kind = token_kind_dub_eq(t, '>', Token_Gt, Token_GtEq, Token_Shr, Token_ShrEq);
|
|
|
- break;
|
|
|
+ case '>': token.kind = token_kind_dub_eq(t, '>', Token_Gt, Token_GtEq, Token_Shr, Token_ShrEq); break;
|
|
|
|
|
|
case '&':
|
|
|
token.kind = Token_And;
|