tokenizer.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814
  1. #define TOKEN_KINDS \
  2. TOKEN_KIND(Token_Invalid, "Invalid"), \
  3. TOKEN_KIND(Token_EOF, "EOF"), \
  4. TOKEN_KIND(Token_Comment, "Comment"), \
  5. \
  6. TOKEN_KIND(Token__LiteralBegin, "_LiteralBegin"), \
  7. TOKEN_KIND(Token_Identifier, "Identifier"), \
  8. TOKEN_KIND(Token_Integer, "Integer"), \
  9. TOKEN_KIND(Token_Float, "Float"), \
  10. TOKEN_KIND(Token_Rune, "Rune"), \
  11. TOKEN_KIND(Token_String, "String"), \
  12. TOKEN_KIND(Token__LiteralEnd, "_LiteralEnd"), \
  13. \
  14. TOKEN_KIND(Token__OperatorBegin, "_OperatorBegin"), \
  15. TOKEN_KIND(Token_Eq, "="), \
  16. TOKEN_KIND(Token_Not, "!"), \
  17. TOKEN_KIND(Token_Hash, "#"), \
  18. TOKEN_KIND(Token_At, "@"), \
  19. TOKEN_KIND(Token_Pointer, "^"), \
  20. TOKEN_KIND(Token_Maybe, "?"), \
  21. TOKEN_KIND(Token_Add, "+"), \
  22. TOKEN_KIND(Token_Sub, "-"), \
  23. TOKEN_KIND(Token_Mul, "*"), \
  24. TOKEN_KIND(Token_Quo, "/"), \
  25. TOKEN_KIND(Token_Mod, "%"), \
  26. TOKEN_KIND(Token_And, "&"), \
  27. TOKEN_KIND(Token_Or, "|"), \
  28. TOKEN_KIND(Token_Xor, "~"), \
  29. TOKEN_KIND(Token_AndNot, "&~"), \
  30. TOKEN_KIND(Token_Shl, "<<"), \
  31. TOKEN_KIND(Token_Shr, ">>"), \
  32. \
  33. TOKEN_KIND(Token_as, "as"), \
  34. TOKEN_KIND(Token_transmute, "transmute"), \
  35. TOKEN_KIND(Token_down_cast, "down_cast"), \
  36. \
  37. TOKEN_KIND(Token_Prime, "'"), \
  38. TOKEN_KIND(Token_DoublePrime, "''"), \
  39. \
  40. TOKEN_KIND(Token__AssignOpBegin, "_AssignOpBegin"), \
  41. TOKEN_KIND(Token_AddEq, "+="), \
  42. TOKEN_KIND(Token_SubEq, "-="), \
  43. TOKEN_KIND(Token_MulEq, "*="), \
  44. TOKEN_KIND(Token_QuoEq, "/="), \
  45. TOKEN_KIND(Token_ModEq, "%="), \
  46. TOKEN_KIND(Token_AndEq, "&="), \
  47. TOKEN_KIND(Token_OrEq, "|="), \
  48. TOKEN_KIND(Token_XorEq, "~="), \
  49. TOKEN_KIND(Token_AndNotEq, "&~="), \
  50. TOKEN_KIND(Token_ShlEq, "<<="), \
  51. TOKEN_KIND(Token_ShrEq, ">>="), \
  52. TOKEN_KIND(Token__AssignOpEnd, "_AssignOpEnd"), \
  53. TOKEN_KIND(Token_Increment, "++"), \
  54. TOKEN_KIND(Token_Decrement, "--"), \
  55. TOKEN_KIND(Token_ArrowRight, "->"), \
  56. TOKEN_KIND(Token_ArrowLeft, "<-"), \
  57. \
  58. TOKEN_KIND(Token_CmpAnd, "&&"), \
  59. TOKEN_KIND(Token_CmpOr, "||"), \
  60. TOKEN_KIND(Token_CmpAndEq, "&&="), \
  61. TOKEN_KIND(Token_CmpOrEq, "||="), \
  62. \
  63. TOKEN_KIND(Token__ComparisonBegin, "_ComparisonBegin"), \
  64. TOKEN_KIND(Token_CmpEq, "=="), \
  65. TOKEN_KIND(Token_NotEq, "!="), \
  66. TOKEN_KIND(Token_Lt, "<"), \
  67. TOKEN_KIND(Token_Gt, ">"), \
  68. TOKEN_KIND(Token_LtEq, "<="), \
  69. TOKEN_KIND(Token_GtEq, ">="), \
  70. TOKEN_KIND(Token__ComparisonEnd, "_ComparisonEnd"), \
  71. \
  72. TOKEN_KIND(Token_OpenParen, "("), \
  73. TOKEN_KIND(Token_CloseParen, ")"), \
  74. TOKEN_KIND(Token_OpenBracket, "["), \
  75. TOKEN_KIND(Token_CloseBracket, "]"), \
  76. TOKEN_KIND(Token_OpenBrace, "{"), \
  77. TOKEN_KIND(Token_CloseBrace, "}"), \
  78. TOKEN_KIND(Token_Colon, ":"), \
  79. TOKEN_KIND(Token_Semicolon, ";"), \
  80. TOKEN_KIND(Token_Period, "."), \
  81. TOKEN_KIND(Token_Comma, ","), \
  82. TOKEN_KIND(Token_Ellipsis, ".."), \
  83. TOKEN_KIND(Token_RangeExclusive, "..<"), \
  84. TOKEN_KIND(Token__OperatorEnd, "_OperatorEnd"), \
  85. \
  86. TOKEN_KIND(Token__KeywordBegin, "_KeywordBegin"), \
  87. TOKEN_KIND(Token_type, "type"), \
  88. TOKEN_KIND(Token_proc, "proc"), \
  89. TOKEN_KIND(Token_match, "match"), \
  90. TOKEN_KIND(Token_break, "break"), \
  91. TOKEN_KIND(Token_continue, "continue"), \
  92. TOKEN_KIND(Token_fallthrough, "fallthrough"), \
  93. TOKEN_KIND(Token_case, "case"), \
  94. TOKEN_KIND(Token_default, "default"), \
  95. TOKEN_KIND(Token_then, "then"), \
  96. TOKEN_KIND(Token_if, "if"), \
  97. TOKEN_KIND(Token_else, "else"), \
  98. TOKEN_KIND(Token_for, "for"), \
  99. TOKEN_KIND(Token_range, "range"), \
  100. TOKEN_KIND(Token_defer, "defer"), \
  101. TOKEN_KIND(Token_return, "return"), \
  102. TOKEN_KIND(Token_struct, "struct"), \
  103. TOKEN_KIND(Token_union, "union"), \
  104. TOKEN_KIND(Token_raw_union, "raw_union"), \
  105. TOKEN_KIND(Token_enum, "enum"), \
  106. TOKEN_KIND(Token_using, "using"), \
  107. TOKEN_KIND(Token_asm, "asm"), \
  108. TOKEN_KIND(Token_volatile, "volatile"), \
  109. TOKEN_KIND(Token_atomic, "atomic"), \
  110. TOKEN_KIND(Token_push_allocator, "push_allocator"), \
  111. TOKEN_KIND(Token_push_context, "push_context"), \
  112. TOKEN_KIND(Token__KeywordEnd, "_KeywordEnd"), \
  113. TOKEN_KIND(Token_Count, "")
  114. enum TokenKind {
  115. #define TOKEN_KIND(e, s) e
  116. TOKEN_KINDS
  117. #undef TOKEN_KIND
  118. };
  119. String const token_strings[] = {
  120. #define TOKEN_KIND(e, s) {cast(u8 *)s, gb_size_of(s)-1}
  121. TOKEN_KINDS
  122. #undef TOKEN_KIND
  123. };
  124. struct TokenPos {
  125. String file;
  126. isize line, column;
  127. };
  128. i32 token_pos_cmp(TokenPos a, TokenPos b) {
  129. if (a.line == b.line) {
  130. if (a.column == b.column) {
  131. isize min_len = gb_min(a.file.len, b.file.len);
  132. return gb_memcompare(a.file.text, b.file.text, min_len);
  133. }
  134. return (a.column < b.column) ? -1 : +1;
  135. }
  136. return (a.line < b.line) ? -1 : +1;
  137. }
  138. b32 token_pos_are_equal(TokenPos a, TokenPos b) {
  139. return token_pos_cmp(a, b) == 0;
  140. }
  141. // NOTE(bill): Text is UTF-8, thus why u8 and not char
  142. struct Token {
  143. TokenKind kind;
  144. String string;
  145. TokenPos pos;
  146. };
  147. Token empty_token = {Token_Invalid};
  148. Token blank_token = {Token_Identifier, {cast(u8 *)"_", 1}};
  149. Token make_token_ident(String s) {
  150. Token t = {Token_Identifier};
  151. t.string = s;
  152. return t;
  153. }
  154. struct ErrorCollector {
  155. TokenPos prev;
  156. i64 count;
  157. i64 warning_count;
  158. gbMutex mutex;
  159. };
  160. gb_global ErrorCollector global_error_collector;
  161. void init_global_error_collector(void) {
  162. gb_mutex_init(&global_error_collector.mutex);
  163. }
  164. void warning(Token token, char *fmt, ...) {
  165. gb_mutex_lock(&global_error_collector.mutex);
  166. defer (gb_mutex_unlock(&global_error_collector.mutex));
  167. global_error_collector.warning_count++;
  168. // NOTE(bill): Duplicate error, skip it
  169. if (!token_pos_are_equal(global_error_collector.prev, token.pos)) {
  170. va_list va;
  171. global_error_collector.prev = token.pos;
  172. va_start(va, fmt);
  173. gb_printf_err("%.*s(%td:%td) Warning: %s\n",
  174. LIT(token.pos.file), token.pos.line, token.pos.column,
  175. gb_bprintf_va(fmt, va));
  176. va_end(va);
  177. }
  178. }
  179. void error(Token token, char *fmt, ...) {
  180. gb_mutex_lock(&global_error_collector.mutex);
  181. defer (gb_mutex_unlock(&global_error_collector.mutex));
  182. global_error_collector.count++;
  183. // NOTE(bill): Duplicate error, skip it
  184. if (!token_pos_are_equal(global_error_collector.prev, token.pos)) {
  185. va_list va;
  186. global_error_collector.prev = token.pos;
  187. va_start(va, fmt);
  188. gb_printf_err("%.*s(%td:%td) %s\n",
  189. LIT(token.pos.file), token.pos.line, token.pos.column,
  190. gb_bprintf_va(fmt, va));
  191. va_end(va);
  192. }
  193. }
  194. void syntax_error(Token token, char *fmt, ...) {
  195. gb_mutex_lock(&global_error_collector.mutex);
  196. defer (gb_mutex_unlock(&global_error_collector.mutex));
  197. global_error_collector.count++;
  198. // NOTE(bill): Duplicate error, skip it
  199. if (!token_pos_are_equal(global_error_collector.prev, token.pos)) {
  200. va_list va;
  201. global_error_collector.prev = token.pos;
  202. va_start(va, fmt);
  203. gb_printf_err("%.*s(%td:%td) Syntax Error: %s\n",
  204. LIT(token.pos.file), token.pos.line, token.pos.column,
  205. gb_bprintf_va(fmt, va));
  206. va_end(va);
  207. }
  208. }
  209. void compiler_error(char *fmt, ...) {
  210. va_list va;
  211. va_start(va, fmt);
  212. gb_printf_err("Internal Compiler Error: %s\n",
  213. gb_bprintf_va(fmt, va));
  214. va_end(va);
  215. gb_exit(1);
  216. }
  217. gb_inline b32 token_is_literal(Token t) {
  218. return gb_is_between(t.kind, Token__LiteralBegin+1, Token__LiteralEnd-1);
  219. }
  220. gb_inline b32 token_is_operator(Token t) {
  221. return gb_is_between(t.kind, Token__OperatorBegin+1, Token__OperatorEnd-1);
  222. }
  223. gb_inline b32 token_is_keyword(Token t) {
  224. return gb_is_between(t.kind, Token__KeywordBegin+1, Token__KeywordEnd-1);
  225. }
  226. gb_inline b32 token_is_comparison(Token t) {
  227. return gb_is_between(t.kind, Token__ComparisonBegin+1, Token__ComparisonEnd-1);
  228. }
  229. gb_inline b32 token_is_shift(Token t) {
  230. return t.kind == Token_Shl || t.kind == Token_Shr;
  231. }
  232. gb_inline void print_token(Token t) { gb_printf("%.*s\n", LIT(t.string)); }
  233. enum TokenizerInitError {
  234. TokenizerInit_None,
  235. TokenizerInit_Invalid,
  236. TokenizerInit_NotExists,
  237. TokenizerInit_Permission,
  238. TokenizerInit_Empty,
  239. TokenizerInit_Count,
  240. };
  241. struct Tokenizer {
  242. String fullpath;
  243. u8 *start;
  244. u8 *end;
  245. Rune curr_rune; // current character
  246. u8 * curr; // character pos
  247. u8 * read_curr; // pos from start
  248. u8 * line; // current line pos
  249. isize line_count;
  250. isize error_count;
  251. Array<String> allocated_strings;
  252. };
  253. void tokenizer_err(Tokenizer *t, char *msg, ...) {
  254. va_list va;
  255. isize column = t->read_curr - t->line+1;
  256. if (column < 1)
  257. column = 1;
  258. gb_printf_err("%.*s(%td:%td) Syntax error: ", LIT(t->fullpath), t->line_count, column);
  259. va_start(va, msg);
  260. gb_printf_err_va(msg, va);
  261. va_end(va);
  262. gb_printf_err("\n");
  263. t->error_count++;
  264. }
  265. void advance_to_next_rune(Tokenizer *t) {
  266. if (t->read_curr < t->end) {
  267. Rune rune;
  268. isize width = 1;
  269. t->curr = t->read_curr;
  270. if (t->curr_rune == '\n') {
  271. t->line = t->curr;
  272. t->line_count++;
  273. }
  274. rune = *t->read_curr;
  275. if (rune == 0) {
  276. tokenizer_err(t, "Illegal character NUL");
  277. } else if (rune >= 0x80) { // not ASCII
  278. width = gb_utf8_decode(t->read_curr, t->end-t->read_curr, &rune);
  279. if (rune == GB_RUNE_INVALID && width == 1)
  280. tokenizer_err(t, "Illegal UTF-8 encoding");
  281. else if (rune == GB_RUNE_BOM && t->curr-t->start > 0)
  282. tokenizer_err(t, "Illegal byte order mark");
  283. }
  284. t->read_curr += width;
  285. t->curr_rune = rune;
  286. } else {
  287. t->curr = t->end;
  288. if (t->curr_rune == '\n') {
  289. t->line = t->curr;
  290. t->line_count++;
  291. }
  292. t->curr_rune = GB_RUNE_EOF;
  293. }
  294. }
  295. TokenizerInitError init_tokenizer(Tokenizer *t, String fullpath) {
  296. PROF_PROC();
  297. char *c_str = gb_alloc_array(gb_heap_allocator(), char, fullpath.len+1);
  298. memcpy(c_str, fullpath.text, fullpath.len);
  299. c_str[fullpath.len] = '\0';
  300. defer (gb_free(gb_heap_allocator(), c_str));
  301. gbFileContents fc = gb_file_read_contents(gb_heap_allocator(), true, c_str);
  302. gb_zero_item(t);
  303. if (fc.data != NULL) {
  304. t->start = cast(u8 *)fc.data;
  305. t->line = t->read_curr = t->curr = t->start;
  306. t->end = t->start + fc.size;
  307. t->fullpath = fullpath;
  308. t->line_count = 1;
  309. advance_to_next_rune(t);
  310. if (t->curr_rune == GB_RUNE_BOM)
  311. advance_to_next_rune(t); // Ignore BOM at file beginning
  312. array_init(&t->allocated_strings, gb_heap_allocator());
  313. return TokenizerInit_None;
  314. }
  315. gbFile f = {};
  316. gbFileError err = gb_file_open(&f, c_str);
  317. defer (gb_file_close(&f));
  318. switch (err) {
  319. case gbFileError_Invalid:
  320. return TokenizerInit_Invalid;
  321. case gbFileError_NotExists:
  322. return TokenizerInit_NotExists;
  323. case gbFileError_Permission:
  324. return TokenizerInit_Permission;
  325. }
  326. if (gb_file_size(&f) == 0)
  327. return TokenizerInit_Empty;
  328. return TokenizerInit_None;
  329. }
  330. gb_inline void destroy_tokenizer(Tokenizer *t) {
  331. if (t->start != NULL) {
  332. gb_free(gb_heap_allocator(), t->start);
  333. }
  334. for_array(i, t->allocated_strings) {
  335. gb_free(gb_heap_allocator(), t->allocated_strings[i].text);
  336. }
  337. array_free(&t->allocated_strings);
  338. }
  339. void tokenizer_skip_whitespace(Tokenizer *t) {
  340. while (rune_is_whitespace(t->curr_rune)) {
  341. advance_to_next_rune(t);
  342. }
  343. }
  344. gb_inline i32 digit_value(Rune r) {
  345. if (gb_char_is_digit(cast(char)r))
  346. return r - '0';
  347. if (gb_is_between(cast(char)r, 'a', 'f'))
  348. return r - 'a' + 10;
  349. if (gb_is_between(cast(char)r, 'A', 'F'))
  350. return r - 'A' + 10;
  351. return 16; // NOTE(bill): Larger than highest possible
  352. }
  353. gb_inline void scan_mantissa(Tokenizer *t, i32 base) {
  354. // TODO(bill): Allow for underscores in numbers as a number separator
  355. // TODO(bill): Is this a good idea?
  356. // while (digit_value(t->curr_rune) < base || t->curr_rune == '_')
  357. while (digit_value(t->curr_rune) < base)
  358. advance_to_next_rune(t);
  359. }
  360. Token scan_number_to_token(Tokenizer *t, b32 seen_decimal_point) {
  361. Token token = {};
  362. u8 *start_curr = t->curr;
  363. token.kind = Token_Integer;
  364. token.string = make_string(start_curr, 1);
  365. token.pos.file = t->fullpath;
  366. token.pos.line = t->line_count;
  367. token.pos.column = t->curr-t->line+1;
  368. if (seen_decimal_point) {
  369. start_curr--;
  370. token.kind = Token_Float;
  371. scan_mantissa(t, 10);
  372. goto exponent;
  373. }
  374. if (t->curr_rune == '0') {
  375. u8 *prev = t->curr;
  376. advance_to_next_rune(t);
  377. if (t->curr_rune == 'b') { // Binary
  378. advance_to_next_rune(t);
  379. scan_mantissa(t, 2);
  380. if (t->curr - prev <= 2)
  381. token.kind = Token_Invalid;
  382. } else if (t->curr_rune == 'o') { // Octal
  383. advance_to_next_rune(t);
  384. scan_mantissa(t, 8);
  385. if (t->curr - prev <= 2)
  386. token.kind = Token_Invalid;
  387. } else if (t->curr_rune == 'd') { // Decimal
  388. advance_to_next_rune(t);
  389. scan_mantissa(t, 10);
  390. if (t->curr - prev <= 2)
  391. token.kind = Token_Invalid;
  392. } else if (t->curr_rune == 'x') { // Hexadecimal
  393. advance_to_next_rune(t);
  394. scan_mantissa(t, 16);
  395. if (t->curr - prev <= 2)
  396. token.kind = Token_Invalid;
  397. } else {
  398. seen_decimal_point = false;
  399. scan_mantissa(t, 10);
  400. if (t->curr_rune == '.' || t->curr_rune == 'e' || t->curr_rune == 'E') {
  401. seen_decimal_point = true;
  402. goto fraction;
  403. }
  404. }
  405. token.string.len = t->curr - token.string.text;
  406. return token;
  407. }
  408. scan_mantissa(t, 10);
  409. fraction:
  410. if (t->curr_rune == '.') {
  411. token.kind = Token_Float;
  412. advance_to_next_rune(t);
  413. scan_mantissa(t, 10);
  414. }
  415. exponent:
  416. if (t->curr_rune == 'e' || t->curr_rune == 'E') {
  417. token.kind = Token_Float;
  418. advance_to_next_rune(t);
  419. if (t->curr_rune == '-' || t->curr_rune == '+')
  420. advance_to_next_rune(t);
  421. scan_mantissa(t, 10);
  422. }
  423. token.string.len = t->curr - token.string.text;
  424. return token;
  425. }
  426. // Quote == " for string
  427. b32 scan_escape(Tokenizer *t, Rune quote) {
  428. isize len = 0;
  429. u32 base = 0, max = 0, x = 0;
  430. Rune r = t->curr_rune;
  431. if (r == 'a' ||
  432. r == 'b' ||
  433. r == 'f' ||
  434. r == 'n' ||
  435. r == 'r' ||
  436. r == 't' ||
  437. r == 'v' ||
  438. r == '\\' ||
  439. r == quote) {
  440. advance_to_next_rune(t);
  441. return true;
  442. } else if (gb_is_between(r, '0', '7')) {
  443. len = 3; base = 8; max = 255;
  444. } else if (r == 'x') {
  445. advance_to_next_rune(t);
  446. len = 2; base = 16; max = 255;
  447. } else if (r == 'u') {
  448. advance_to_next_rune(t);
  449. len = 4; base = 16; max = GB_RUNE_MAX;
  450. } else if (r == 'U') {
  451. advance_to_next_rune(t);
  452. len = 8; base = 16; max = GB_RUNE_MAX;
  453. } else {
  454. if (t->curr_rune < 0)
  455. tokenizer_err(t, "Escape sequence was not terminated");
  456. else
  457. tokenizer_err(t, "Unknown escape sequence");
  458. return false;
  459. }
  460. while (len --> 0) {
  461. u32 d = cast(u32)digit_value(t->curr_rune);
  462. if (d >= base) {
  463. if (t->curr_rune < 0)
  464. tokenizer_err(t, "Escape sequence was not terminated");
  465. else
  466. tokenizer_err(t, "Illegal character %d in escape sequence", t->curr_rune);
  467. return false;
  468. }
  469. x = x*base + d;
  470. advance_to_next_rune(t);
  471. }
  472. return true;
  473. }
  474. gb_inline TokenKind token_kind_variant2(Tokenizer *t, TokenKind a, TokenKind b) {
  475. if (t->curr_rune == '=') {
  476. advance_to_next_rune(t);
  477. return b;
  478. }
  479. return a;
  480. }
  481. gb_inline TokenKind token_kind_variant3(Tokenizer *t, TokenKind a, TokenKind b, Rune ch_c, TokenKind c) {
  482. if (t->curr_rune == '=') {
  483. advance_to_next_rune(t);
  484. return b;
  485. }
  486. if (t->curr_rune == ch_c) {
  487. advance_to_next_rune(t);
  488. return c;
  489. }
  490. return a;
  491. }
  492. gb_inline TokenKind token_kind_variant4(Tokenizer *t, TokenKind a, TokenKind b, Rune ch_c, TokenKind c, Rune ch_d, TokenKind d) {
  493. if (t->curr_rune == '=') {
  494. advance_to_next_rune(t);
  495. return b;
  496. } else if (t->curr_rune == ch_c) {
  497. advance_to_next_rune(t);
  498. return c;
  499. } else if (t->curr_rune == ch_d) {
  500. advance_to_next_rune(t);
  501. return d;
  502. }
  503. return a;
  504. }
  505. gb_inline TokenKind token_kind_dub_eq(Tokenizer *t, Rune sing_rune, TokenKind sing, TokenKind sing_eq, TokenKind dub, TokenKind dub_eq) {
  506. if (t->curr_rune == '=') {
  507. advance_to_next_rune(t);
  508. return sing_eq;
  509. } else if (t->curr_rune == sing_rune) {
  510. advance_to_next_rune(t);
  511. if (t->curr_rune == '=') {
  512. advance_to_next_rune(t);
  513. return dub_eq;
  514. }
  515. return dub;
  516. }
  517. return sing;
  518. }
  519. Token tokenizer_get_token(Tokenizer *t) {
  520. Token token = {};
  521. Rune curr_rune;
  522. tokenizer_skip_whitespace(t);
  523. token.string = make_string(t->curr, 1);
  524. token.pos.file = t->fullpath;
  525. token.pos.line = t->line_count;
  526. token.pos.column = t->curr - t->line + 1;
  527. curr_rune = t->curr_rune;
  528. if (rune_is_letter(curr_rune)) {
  529. token.kind = Token_Identifier;
  530. while (rune_is_letter(t->curr_rune) || rune_is_digit(t->curr_rune))
  531. advance_to_next_rune(t);
  532. token.string.len = t->curr - token.string.text;
  533. // NOTE(bill): All keywords are > 1
  534. if (token.string.len > 1) {
  535. if (token.string == token_strings[Token_as]) {
  536. token.kind = Token_as;
  537. } else if (token.string == token_strings[Token_transmute]) {
  538. token.kind = Token_transmute;
  539. } else if (token.string == token_strings[Token_down_cast]) {
  540. token.kind = Token_down_cast;
  541. } else {
  542. for (i32 k = Token__KeywordBegin+1; k < Token__KeywordEnd; k++) {
  543. if (token.string == token_strings[k]) {
  544. token.kind = cast(TokenKind)k;
  545. break;
  546. }
  547. }
  548. }
  549. }
  550. } else if (gb_is_between(curr_rune, '0', '9')) {
  551. token = scan_number_to_token(t, false);
  552. } else {
  553. advance_to_next_rune(t);
  554. switch (curr_rune) {
  555. case GB_RUNE_EOF:
  556. token.kind = Token_EOF;
  557. break;
  558. case '\'':
  559. token.kind = Token_Prime;
  560. if (t->curr_rune == '\'') {
  561. advance_to_next_rune(t);
  562. token.kind = Token_DoublePrime;
  563. }
  564. break;
  565. case '`': // Raw String Literal
  566. case '"': // String Literal
  567. {
  568. Rune quote = curr_rune;
  569. token.kind = Token_String;
  570. if (curr_rune == '"') {
  571. for (;;) {
  572. Rune r = t->curr_rune;
  573. if (r == '\n' || r < 0) {
  574. tokenizer_err(t, "String literal not terminated");
  575. break;
  576. }
  577. advance_to_next_rune(t);
  578. if (r == quote)
  579. break;
  580. if (r == '\\')
  581. scan_escape(t, '"');
  582. }
  583. } else {
  584. for (;;) {
  585. Rune r = t->curr_rune;
  586. if (r < 0) {
  587. tokenizer_err(t, "String literal not terminated");
  588. break;
  589. }
  590. advance_to_next_rune(t);
  591. if (r == quote)
  592. break;
  593. }
  594. }
  595. token.string.len = t->curr - token.string.text;
  596. i32 success = unquote_string(gb_heap_allocator(), &token.string);
  597. if (success > 0) {
  598. if (success == 2) {
  599. array_add(&t->allocated_strings, token.string);
  600. }
  601. return token;
  602. } else {
  603. tokenizer_err(t, "Invalid string literal");
  604. }
  605. } break;
  606. case '.':
  607. token.kind = Token_Period; // Default
  608. if (gb_is_between(t->curr_rune, '0', '9')) { // Might be a number
  609. token = scan_number_to_token(t, true);
  610. } else if (t->curr_rune == '.') { // Could be an ellipsis
  611. advance_to_next_rune(t);
  612. token.kind = Token_Ellipsis;
  613. if (t->curr_rune == '<') {
  614. advance_to_next_rune(t);
  615. token.kind = Token_RangeExclusive;
  616. }
  617. }
  618. break;
  619. case '#': token.kind = Token_Hash; break;
  620. case '@': token.kind = Token_At; break;
  621. case '^': token.kind = Token_Pointer; break;
  622. case '?': token.kind = Token_Maybe; break;
  623. case ';': token.kind = Token_Semicolon; break;
  624. case ',': token.kind = Token_Comma; break;
  625. case '(': token.kind = Token_OpenParen; break;
  626. case ')': token.kind = Token_CloseParen; break;
  627. case '[': token.kind = Token_OpenBracket; break;
  628. case ']': token.kind = Token_CloseBracket; break;
  629. case '{': token.kind = Token_OpenBrace; break;
  630. case '}': token.kind = Token_CloseBrace; break;
  631. case ':': token.kind = Token_Colon; break;
  632. case '*': token.kind = token_kind_variant2(t, Token_Mul, Token_MulEq); break;
  633. case '%': token.kind = token_kind_variant2(t, Token_Mod, Token_ModEq); break;
  634. case '=': token.kind = token_kind_variant2(t, Token_Eq, Token_CmpEq); break;
  635. case '~': token.kind = token_kind_variant2(t, Token_Xor, Token_XorEq); break;
  636. case '!': token.kind = token_kind_variant2(t, Token_Not, Token_NotEq); break;
  637. case '+': token.kind = token_kind_variant3(t, Token_Add, Token_AddEq, '+', Token_Increment); break;
  638. case '-': token.kind = token_kind_variant4(t, Token_Sub, Token_SubEq, '-', Token_Decrement, '>', Token_ArrowRight); break;
  639. case '/': {
  640. if (t->curr_rune == '/') {
  641. while (t->curr_rune != '\n') {
  642. advance_to_next_rune(t);
  643. }
  644. token.kind = Token_Comment;
  645. } else if (t->curr_rune == '*') {
  646. isize comment_scope = 1;
  647. advance_to_next_rune(t);
  648. while (comment_scope > 0) {
  649. if (t->curr_rune == '/') {
  650. advance_to_next_rune(t);
  651. if (t->curr_rune == '*') {
  652. advance_to_next_rune(t);
  653. comment_scope++;
  654. }
  655. } else if (t->curr_rune == '*') {
  656. advance_to_next_rune(t);
  657. if (t->curr_rune == '/') {
  658. advance_to_next_rune(t);
  659. comment_scope--;
  660. }
  661. } else {
  662. advance_to_next_rune(t);
  663. }
  664. }
  665. token.kind = Token_Comment;
  666. } else {
  667. token.kind = token_kind_variant2(t, Token_Quo, Token_QuoEq);
  668. }
  669. } break;
  670. case '<':
  671. if (t->curr_rune == '-') {
  672. token.kind = Token_ArrowLeft;
  673. } else {
  674. token.kind = token_kind_dub_eq(t, '<', Token_Lt, Token_LtEq, Token_Shl, Token_ShlEq);
  675. }
  676. break;
  677. case '>':
  678. token.kind = token_kind_dub_eq(t, '>', Token_Gt, Token_GtEq, Token_Shr, Token_ShrEq);
  679. break;
  680. case '&':
  681. token.kind = Token_And;
  682. if (t->curr_rune == '~') {
  683. token.kind = Token_AndNot;
  684. advance_to_next_rune(t);
  685. if (t->curr_rune == '=') {
  686. token.kind = Token_AndNotEq;
  687. advance_to_next_rune(t);
  688. }
  689. } else {
  690. token.kind = token_kind_dub_eq(t, '&', Token_And, Token_AndEq, Token_CmpAnd, Token_CmpAndEq);
  691. }
  692. break;
  693. case '|': token.kind = token_kind_dub_eq(t, '|', Token_Or, Token_OrEq, Token_CmpOr, Token_CmpOrEq); break;
  694. default:
  695. if (curr_rune != GB_RUNE_BOM) {
  696. u8 str[4] = {};
  697. int len = cast(int)gb_utf8_encode_rune(str, curr_rune);
  698. tokenizer_err(t, "Illegal character: %.*s (%d) ", len, str, curr_rune);
  699. }
  700. token.kind = Token_Invalid;
  701. break;
  702. }
  703. }
  704. token.string.len = t->curr - token.string.text;
  705. return token;
  706. }