lexer_test.go 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376
  1. package parser
  2. import (
  3. "testing"
  4. "github.com/dop251/goja/file"
  5. "github.com/dop251/goja/token"
  6. )
  7. func TestLexer(t *testing.T) {
  8. tt(t, func() {
  9. setup := func(src string) *_parser {
  10. parser := newParser("", src)
  11. return parser
  12. }
  13. test := func(src string, test ...interface{}) {
  14. parser := setup(src)
  15. for len(test) > 0 {
  16. tkn, literal, idx := parser.scan()
  17. if len(test) > 0 {
  18. is(tkn, test[0].(token.Token))
  19. test = test[1:]
  20. }
  21. if len(test) > 0 {
  22. is(literal, test[0].(string))
  23. test = test[1:]
  24. }
  25. if len(test) > 0 {
  26. // FIXME terst, Fix this so that cast to file.Idx is not necessary?
  27. is(idx, file.Idx(test[0].(int)))
  28. test = test[1:]
  29. }
  30. }
  31. }
  32. test("",
  33. token.EOF, "", 1,
  34. )
  35. test("1",
  36. token.NUMBER, "1", 1,
  37. token.EOF, "", 2,
  38. )
  39. test(".0",
  40. token.NUMBER, ".0", 1,
  41. token.EOF, "", 3,
  42. )
  43. test("abc",
  44. token.IDENTIFIER, "abc", 1,
  45. token.EOF, "", 4,
  46. )
  47. test("abc(1)",
  48. token.IDENTIFIER, "abc", 1,
  49. token.LEFT_PARENTHESIS, "", 4,
  50. token.NUMBER, "1", 5,
  51. token.RIGHT_PARENTHESIS, "", 6,
  52. token.EOF, "", 7,
  53. )
  54. test(".",
  55. token.PERIOD, "", 1,
  56. token.EOF, "", 2,
  57. )
  58. test("===.",
  59. token.STRICT_EQUAL, "", 1,
  60. token.PERIOD, "", 4,
  61. token.EOF, "", 5,
  62. )
  63. test(">>>=.0",
  64. token.UNSIGNED_SHIFT_RIGHT_ASSIGN, "", 1,
  65. token.NUMBER, ".0", 5,
  66. token.EOF, "", 7,
  67. )
  68. test(">>>=0.0.",
  69. token.UNSIGNED_SHIFT_RIGHT_ASSIGN, "", 1,
  70. token.NUMBER, "0.0", 5,
  71. token.PERIOD, "", 8,
  72. token.EOF, "", 9,
  73. )
  74. test("\"abc\"",
  75. token.STRING, "\"abc\"", 1,
  76. token.EOF, "", 6,
  77. )
  78. test("abc = //",
  79. token.IDENTIFIER, "abc", 1,
  80. token.ASSIGN, "", 5,
  81. token.EOF, "", 9,
  82. )
  83. test("abc = 1 / 2",
  84. token.IDENTIFIER, "abc", 1,
  85. token.ASSIGN, "", 5,
  86. token.NUMBER, "1", 7,
  87. token.SLASH, "", 9,
  88. token.NUMBER, "2", 11,
  89. token.EOF, "", 12,
  90. )
  91. test("xyzzy = 'Nothing happens.'",
  92. token.IDENTIFIER, "xyzzy", 1,
  93. token.ASSIGN, "", 7,
  94. token.STRING, "'Nothing happens.'", 9,
  95. token.EOF, "", 27,
  96. )
  97. test("abc = !false",
  98. token.IDENTIFIER, "abc", 1,
  99. token.ASSIGN, "", 5,
  100. token.NOT, "", 7,
  101. token.BOOLEAN, "false", 8,
  102. token.EOF, "", 13,
  103. )
  104. test("abc = !!true",
  105. token.IDENTIFIER, "abc", 1,
  106. token.ASSIGN, "", 5,
  107. token.NOT, "", 7,
  108. token.NOT, "", 8,
  109. token.BOOLEAN, "true", 9,
  110. token.EOF, "", 13,
  111. )
  112. test("abc *= 1",
  113. token.IDENTIFIER, "abc", 1,
  114. token.MULTIPLY_ASSIGN, "", 5,
  115. token.NUMBER, "1", 8,
  116. token.EOF, "", 9,
  117. )
  118. test("if 1 else",
  119. token.IF, "if", 1,
  120. token.NUMBER, "1", 4,
  121. token.ELSE, "else", 6,
  122. token.EOF, "", 10,
  123. )
  124. test("null",
  125. token.NULL, "null", 1,
  126. token.EOF, "", 5,
  127. )
  128. test(`"\u007a\x79\u000a\x78"`,
  129. token.STRING, "\"\\u007a\\x79\\u000a\\x78\"", 1,
  130. token.EOF, "", 23,
  131. )
  132. test(`"[First line \
  133. Second line \
  134. Third line\
  135. . ]"
  136. `,
  137. token.STRING, "\"[First line \\\nSecond line \\\n Third line\\\n. ]\"", 1,
  138. token.EOF, "", 53,
  139. )
  140. test("/",
  141. token.SLASH, "", 1,
  142. token.EOF, "", 2,
  143. )
  144. test("var abc = \"abc\uFFFFabc\"",
  145. token.VAR, "var", 1,
  146. token.IDENTIFIER, "abc", 5,
  147. token.ASSIGN, "", 9,
  148. token.STRING, "\"abc\uFFFFabc\"", 11,
  149. token.EOF, "", 22,
  150. )
  151. test(`'\t' === '\r'`,
  152. token.STRING, "'\\t'", 1,
  153. token.STRICT_EQUAL, "", 6,
  154. token.STRING, "'\\r'", 10,
  155. token.EOF, "", 14,
  156. )
  157. test(`var \u0024 = 1`,
  158. token.VAR, "var", 1,
  159. token.IDENTIFIER, "$", 5,
  160. token.ASSIGN, "", 12,
  161. token.NUMBER, "1", 14,
  162. token.EOF, "", 15,
  163. )
  164. test("10e10000",
  165. token.NUMBER, "10e10000", 1,
  166. token.EOF, "", 9,
  167. )
  168. test(`var if var class`,
  169. token.VAR, "var", 1,
  170. token.IF, "if", 5,
  171. token.VAR, "var", 8,
  172. token.KEYWORD, "class", 12,
  173. token.EOF, "", 17,
  174. )
  175. test(`-0`,
  176. token.MINUS, "", 1,
  177. token.NUMBER, "0", 2,
  178. token.EOF, "", 3,
  179. )
  180. test(`.01`,
  181. token.NUMBER, ".01", 1,
  182. token.EOF, "", 4,
  183. )
  184. test(`.01e+2`,
  185. token.NUMBER, ".01e+2", 1,
  186. token.EOF, "", 7,
  187. )
  188. test(";",
  189. token.SEMICOLON, "", 1,
  190. token.EOF, "", 2,
  191. )
  192. test(";;",
  193. token.SEMICOLON, "", 1,
  194. token.SEMICOLON, "", 2,
  195. token.EOF, "", 3,
  196. )
  197. test("//",
  198. token.EOF, "", 3,
  199. )
  200. test(";;//",
  201. token.SEMICOLON, "", 1,
  202. token.SEMICOLON, "", 2,
  203. token.EOF, "", 5,
  204. )
  205. test("1",
  206. token.NUMBER, "1", 1,
  207. )
  208. test("12 123",
  209. token.NUMBER, "12", 1,
  210. token.NUMBER, "123", 4,
  211. )
  212. test("1.2 12.3",
  213. token.NUMBER, "1.2", 1,
  214. token.NUMBER, "12.3", 5,
  215. )
  216. test("/ /=",
  217. token.SLASH, "", 1,
  218. token.QUOTIENT_ASSIGN, "", 3,
  219. )
  220. test(`"abc"`,
  221. token.STRING, `"abc"`, 1,
  222. )
  223. test(`'abc'`,
  224. token.STRING, `'abc'`, 1,
  225. )
  226. test("++",
  227. token.INCREMENT, "", 1,
  228. )
  229. test(">",
  230. token.GREATER, "", 1,
  231. )
  232. test(">=",
  233. token.GREATER_OR_EQUAL, "", 1,
  234. )
  235. test(">>",
  236. token.SHIFT_RIGHT, "", 1,
  237. )
  238. test(">>=",
  239. token.SHIFT_RIGHT_ASSIGN, "", 1,
  240. )
  241. test(">>>",
  242. token.UNSIGNED_SHIFT_RIGHT, "", 1,
  243. )
  244. test(">>>=",
  245. token.UNSIGNED_SHIFT_RIGHT_ASSIGN, "", 1,
  246. )
  247. test("1 \"abc\"",
  248. token.NUMBER, "1", 1,
  249. token.STRING, "\"abc\"", 3,
  250. )
  251. test(",",
  252. token.COMMA, "", 1,
  253. )
  254. test("1, \"abc\"",
  255. token.NUMBER, "1", 1,
  256. token.COMMA, "", 2,
  257. token.STRING, "\"abc\"", 4,
  258. )
  259. test("new abc(1, 3.14159);",
  260. token.NEW, "new", 1,
  261. token.IDENTIFIER, "abc", 5,
  262. token.LEFT_PARENTHESIS, "", 8,
  263. token.NUMBER, "1", 9,
  264. token.COMMA, "", 10,
  265. token.NUMBER, "3.14159", 12,
  266. token.RIGHT_PARENTHESIS, "", 19,
  267. token.SEMICOLON, "", 20,
  268. )
  269. test("1 == \"1\"",
  270. token.NUMBER, "1", 1,
  271. token.EQUAL, "", 3,
  272. token.STRING, "\"1\"", 6,
  273. )
  274. test("1\n[]\n",
  275. token.NUMBER, "1", 1,
  276. token.LEFT_BRACKET, "", 3,
  277. token.RIGHT_BRACKET, "", 4,
  278. )
  279. test("1\ufeff[]\ufeff",
  280. token.NUMBER, "1", 1,
  281. token.LEFT_BRACKET, "", 5,
  282. token.RIGHT_BRACKET, "", 6,
  283. )
  284. // ILLEGAL
  285. test(`3ea`,
  286. token.ILLEGAL, "3e", 1,
  287. token.IDENTIFIER, "a", 3,
  288. token.EOF, "", 4,
  289. )
  290. test(`3in`,
  291. token.ILLEGAL, "3", 1,
  292. token.IN, "in", 2,
  293. token.EOF, "", 4,
  294. )
  295. test("\"Hello\nWorld\"",
  296. token.ILLEGAL, "", 1,
  297. token.IDENTIFIER, "World", 8,
  298. token.ILLEGAL, "", 13,
  299. token.EOF, "", 14,
  300. )
  301. test("\u203f = 10",
  302. token.ILLEGAL, "", 1,
  303. token.ASSIGN, "", 5,
  304. token.NUMBER, "10", 7,
  305. token.EOF, "", 9,
  306. )
  307. test(`"\x0G"`,
  308. token.STRING, "\"\\x0G\"", 1,
  309. token.EOF, "", 7,
  310. )
  311. })
  312. }