lexer_test.go 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378
  1. package parser
  2. import (
  3. "testing"
  4. "github.com/dop251/goja/file"
  5. "github.com/dop251/goja/token"
  6. "github.com/dop251/goja/unistring"
  7. )
  8. func TestLexer(t *testing.T) {
  9. tt(t, func() {
  10. setup := func(src string) *_parser {
  11. parser := newParser("", src)
  12. return parser
  13. }
  14. test := func(src string, test ...interface{}) {
  15. parser := setup(src)
  16. for len(test) > 0 {
  17. tkn, literal, _, idx := parser.scan()
  18. if len(test) > 0 {
  19. is(tkn, test[0].(token.Token))
  20. test = test[1:]
  21. }
  22. if len(test) > 0 {
  23. is(literal, unistring.String(test[0].(string)))
  24. test = test[1:]
  25. }
  26. if len(test) > 0 {
  27. // FIXME terst, Fix this so that cast to file.Idx is not necessary?
  28. is(idx, file.Idx(test[0].(int)))
  29. test = test[1:]
  30. }
  31. }
  32. }
  33. test("",
  34. token.EOF, "", 1,
  35. )
  36. test("1",
  37. token.NUMBER, "1", 1,
  38. token.EOF, "", 2,
  39. )
  40. test(".0",
  41. token.NUMBER, ".0", 1,
  42. token.EOF, "", 3,
  43. )
  44. test("abc",
  45. token.IDENTIFIER, "abc", 1,
  46. token.EOF, "", 4,
  47. )
  48. test("abc(1)",
  49. token.IDENTIFIER, "abc", 1,
  50. token.LEFT_PARENTHESIS, "", 4,
  51. token.NUMBER, "1", 5,
  52. token.RIGHT_PARENTHESIS, "", 6,
  53. token.EOF, "", 7,
  54. )
  55. test(".",
  56. token.PERIOD, "", 1,
  57. token.EOF, "", 2,
  58. )
  59. test("===.",
  60. token.STRICT_EQUAL, "", 1,
  61. token.PERIOD, "", 4,
  62. token.EOF, "", 5,
  63. )
  64. test(">>>=.0",
  65. token.UNSIGNED_SHIFT_RIGHT_ASSIGN, "", 1,
  66. token.NUMBER, ".0", 5,
  67. token.EOF, "", 7,
  68. )
  69. test(">>>=0.0.",
  70. token.UNSIGNED_SHIFT_RIGHT_ASSIGN, "", 1,
  71. token.NUMBER, "0.0", 5,
  72. token.PERIOD, "", 8,
  73. token.EOF, "", 9,
  74. )
  75. test("\"abc\"",
  76. token.STRING, "\"abc\"", 1,
  77. token.EOF, "", 6,
  78. )
  79. test("abc = //",
  80. token.IDENTIFIER, "abc", 1,
  81. token.ASSIGN, "", 5,
  82. token.EOF, "", 9,
  83. )
  84. test("abc = 1 / 2",
  85. token.IDENTIFIER, "abc", 1,
  86. token.ASSIGN, "", 5,
  87. token.NUMBER, "1", 7,
  88. token.SLASH, "", 9,
  89. token.NUMBER, "2", 11,
  90. token.EOF, "", 12,
  91. )
  92. test("xyzzy = 'Nothing happens.'",
  93. token.IDENTIFIER, "xyzzy", 1,
  94. token.ASSIGN, "", 7,
  95. token.STRING, "'Nothing happens.'", 9,
  96. token.EOF, "", 27,
  97. )
  98. test("abc = !false",
  99. token.IDENTIFIER, "abc", 1,
  100. token.ASSIGN, "", 5,
  101. token.NOT, "", 7,
  102. token.BOOLEAN, "false", 8,
  103. token.EOF, "", 13,
  104. )
  105. test("abc = !!true",
  106. token.IDENTIFIER, "abc", 1,
  107. token.ASSIGN, "", 5,
  108. token.NOT, "", 7,
  109. token.NOT, "", 8,
  110. token.BOOLEAN, "true", 9,
  111. token.EOF, "", 13,
  112. )
  113. test("abc *= 1",
  114. token.IDENTIFIER, "abc", 1,
  115. token.MULTIPLY_ASSIGN, "", 5,
  116. token.NUMBER, "1", 8,
  117. token.EOF, "", 9,
  118. )
  119. test("if 1 else",
  120. token.IF, "if", 1,
  121. token.NUMBER, "1", 4,
  122. token.ELSE, "else", 6,
  123. token.EOF, "", 10,
  124. )
  125. test("null",
  126. token.NULL, "null", 1,
  127. token.EOF, "", 5,
  128. )
  129. test(`"\u007a\x79\u000a\x78"`,
  130. token.STRING, "\"\\u007a\\x79\\u000a\\x78\"", 1,
  131. token.EOF, "", 23,
  132. )
  133. test(`"[First line \
  134. Second line \
  135. Third line\
  136. . ]"
  137. `,
  138. token.STRING, "\"[First line \\\nSecond line \\\n Third line\\\n. ]\"", 1,
  139. token.EOF, "", 53,
  140. )
  141. test("/",
  142. token.SLASH, "", 1,
  143. token.EOF, "", 2,
  144. )
  145. test("var abc = \"abc\uFFFFabc\"",
  146. token.VAR, "var", 1,
  147. token.IDENTIFIER, "abc", 5,
  148. token.ASSIGN, "", 9,
  149. token.STRING, "\"abc\uFFFFabc\"", 11,
  150. token.EOF, "", 22,
  151. )
  152. test(`'\t' === '\r'`,
  153. token.STRING, "'\\t'", 1,
  154. token.STRICT_EQUAL, "", 6,
  155. token.STRING, "'\\r'", 10,
  156. token.EOF, "", 14,
  157. )
  158. test(`var \u0024 = 1`,
  159. token.VAR, "var", 1,
  160. token.IDENTIFIER, "\\u0024", 5,
  161. token.ASSIGN, "", 12,
  162. token.NUMBER, "1", 14,
  163. token.EOF, "", 15,
  164. )
  165. test("10e10000",
  166. token.NUMBER, "10e10000", 1,
  167. token.EOF, "", 9,
  168. )
  169. test(`var if var class`,
  170. token.VAR, "var", 1,
  171. token.IF, "if", 5,
  172. token.VAR, "var", 8,
  173. token.KEYWORD, "class", 12,
  174. token.EOF, "", 17,
  175. )
  176. test(`-0`,
  177. token.MINUS, "", 1,
  178. token.NUMBER, "0", 2,
  179. token.EOF, "", 3,
  180. )
  181. test(`.01`,
  182. token.NUMBER, ".01", 1,
  183. token.EOF, "", 4,
  184. )
  185. test(`.01e+2`,
  186. token.NUMBER, ".01e+2", 1,
  187. token.EOF, "", 7,
  188. )
  189. test(";",
  190. token.SEMICOLON, "", 1,
  191. token.EOF, "", 2,
  192. )
  193. test(";;",
  194. token.SEMICOLON, "", 1,
  195. token.SEMICOLON, "", 2,
  196. token.EOF, "", 3,
  197. )
  198. test("//",
  199. token.EOF, "", 3,
  200. )
  201. test(";;//",
  202. token.SEMICOLON, "", 1,
  203. token.SEMICOLON, "", 2,
  204. token.EOF, "", 5,
  205. )
  206. test("1",
  207. token.NUMBER, "1", 1,
  208. )
  209. test("12 123",
  210. token.NUMBER, "12", 1,
  211. token.NUMBER, "123", 4,
  212. )
  213. test("1.2 12.3",
  214. token.NUMBER, "1.2", 1,
  215. token.NUMBER, "12.3", 5,
  216. )
  217. test("/ /=",
  218. token.SLASH, "", 1,
  219. token.QUOTIENT_ASSIGN, "", 3,
  220. )
  221. test(`"abc"`,
  222. token.STRING, `"abc"`, 1,
  223. )
  224. test(`'abc'`,
  225. token.STRING, `'abc'`, 1,
  226. )
  227. test("++",
  228. token.INCREMENT, "", 1,
  229. )
  230. test(">",
  231. token.GREATER, "", 1,
  232. )
  233. test(">=",
  234. token.GREATER_OR_EQUAL, "", 1,
  235. )
  236. test(">>",
  237. token.SHIFT_RIGHT, "", 1,
  238. )
  239. test(">>=",
  240. token.SHIFT_RIGHT_ASSIGN, "", 1,
  241. )
  242. test(">>>",
  243. token.UNSIGNED_SHIFT_RIGHT, "", 1,
  244. )
  245. test(">>>=",
  246. token.UNSIGNED_SHIFT_RIGHT_ASSIGN, "", 1,
  247. )
  248. test("1 \"abc\"",
  249. token.NUMBER, "1", 1,
  250. token.STRING, "\"abc\"", 3,
  251. )
  252. test(",",
  253. token.COMMA, "", 1,
  254. )
  255. test("1, \"abc\"",
  256. token.NUMBER, "1", 1,
  257. token.COMMA, "", 2,
  258. token.STRING, "\"abc\"", 4,
  259. )
  260. test("new abc(1, 3.14159);",
  261. token.NEW, "new", 1,
  262. token.IDENTIFIER, "abc", 5,
  263. token.LEFT_PARENTHESIS, "", 8,
  264. token.NUMBER, "1", 9,
  265. token.COMMA, "", 10,
  266. token.NUMBER, "3.14159", 12,
  267. token.RIGHT_PARENTHESIS, "", 19,
  268. token.SEMICOLON, "", 20,
  269. )
  270. test("1 == \"1\"",
  271. token.NUMBER, "1", 1,
  272. token.EQUAL, "", 3,
  273. token.STRING, "\"1\"", 6,
  274. )
  275. test("1\n[]\n",
  276. token.NUMBER, "1", 1,
  277. token.LEFT_BRACKET, "", 3,
  278. token.RIGHT_BRACKET, "", 4,
  279. )
  280. test("1\ufeff[]\ufeff",
  281. token.NUMBER, "1", 1,
  282. token.LEFT_BRACKET, "", 5,
  283. token.RIGHT_BRACKET, "", 6,
  284. )
  285. // ILLEGAL
  286. test(`3ea`,
  287. token.ILLEGAL, "3e", 1,
  288. token.IDENTIFIER, "a", 3,
  289. token.EOF, "", 4,
  290. )
  291. test(`3in`,
  292. token.ILLEGAL, "3", 1,
  293. token.IN, "in", 2,
  294. token.EOF, "", 4,
  295. )
  296. test("\"Hello\nWorld\"",
  297. token.ILLEGAL, "", 1,
  298. token.IDENTIFIER, "World", 8,
  299. token.ILLEGAL, "", 13,
  300. token.EOF, "", 14,
  301. )
  302. test("\u203f = 10",
  303. token.ILLEGAL, "", 1,
  304. token.ASSIGN, "", 5,
  305. token.NUMBER, "10", 7,
  306. token.EOF, "", 9,
  307. )
  308. test(`"\x0G"`,
  309. token.ILLEGAL, "\"\\x0G\"", 1,
  310. //token.STRING, "\"\\x0G\"", 1,
  311. token.EOF, "", 7,
  312. )
  313. })
  314. }