test.lua 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235
  1. #!/usr/bin/env lua
  2. -- CJSON tests
  3. --
  4. -- Mark Pulford <[email protected]>
  5. --
  6. -- Note: The output of this script is easier to read with "less -S"
  7. require "common"
  8. local json = require "cjson"
  9. local function gen_ascii()
  10. local chars = {}
  11. for i = 0, 255 do chars[i + 1] = string.char(i) end
  12. return table.concat(chars)
  13. end
  14. -- Generate every UTF-16 codepoint, including supplementary codes
  15. local function gen_utf16_escaped()
  16. -- Create raw table escapes
  17. local utf16_escaped = {}
  18. local count = 0
  19. local function append_escape(code)
  20. local esc = string.format('\\u%04X', code)
  21. table.insert(utf16_escaped, esc)
  22. end
  23. table.insert(utf16_escaped, '"')
  24. for i = 0, 0xD7FF do
  25. append_escape(i)
  26. end
  27. -- Skip 0xD800 - 0xDFFF since they are used to encode supplementary
  28. -- codepoints
  29. for i = 0xE000, 0xFFFF do
  30. append_escape(i)
  31. end
  32. -- Append surrogate pair for each supplementary codepoint
  33. for high = 0xD800, 0xDBFF do
  34. for low = 0xDC00, 0xDFFF do
  35. append_escape(high)
  36. append_escape(low)
  37. end
  38. end
  39. table.insert(utf16_escaped, '"')
  40. return table.concat(utf16_escaped)
  41. end
  42. function test_decode_cycle(filename)
  43. local obj1 = json.decode(file_load(filename))
  44. local obj2 = json.decode(json.encode(obj1))
  45. return compare_values(obj1, obj2)
  46. end
  47. local Inf = math.huge;
  48. local NaN = math.huge * 0;
  49. local octets_raw = gen_ascii()
  50. local octets_escaped = file_load("octets-escaped.dat")
  51. local utf8_loaded, utf8_raw = pcall(file_load, "utf8.dat")
  52. if not utf8_loaded then
  53. utf8_raw = "Failed to load utf8.dat"
  54. end
  55. local utf16_escaped = gen_utf16_escaped()
  56. local nested5 = {{{{{ "nested" }}}}}
  57. local table_cycle = {}
  58. local table_cycle2 = { table_cycle }
  59. table_cycle[1] = table_cycle2
  60. local decode_simple_tests = {
  61. { json.decode, { '"test string"' }, true, { "test string" } },
  62. { json.decode, { '-5e3' }, true, { -5000 } },
  63. { json.decode, { 'null' }, true, { json.null } },
  64. { json.decode, { 'true' }, true, { true } },
  65. { json.decode, { 'false' }, true, { false } },
  66. { json.decode, { '{ "1": "one", "3": "three" }' },
  67. true, { { ["1"] = "one", ["3"] = "three" } } },
  68. { json.decode, { '[ "one", null, "three" ]' },
  69. true, { { "one", json.null, "three" } } }
  70. }
  71. local encode_simple_tests = {
  72. { json.encode, { json.null }, true, { 'null' } },
  73. { json.encode, { true }, true, { 'true' } },
  74. { json.encode, { false }, true, { 'false' } },
  75. { json.encode, { { } }, true, { '{}' } },
  76. { json.encode, { 10 }, true, { '10' } },
  77. { json.encode, { NaN },
  78. false, { "Cannot serialise number: must not be NaN or Inf" } },
  79. { json.encode, { Inf },
  80. false, { "Cannot serialise number: must not be NaN or Inf" } },
  81. { json.encode, { "hello" }, true, { '"hello"' } },
  82. }
  83. local decode_numeric_tests = {
  84. { json.decode, { '[ 0.0, -1, 0.3e-3, 1023.2 ]' },
  85. true, { { 0.0, -1, 0.0003, 1023.2 } } },
  86. { json.decode, { '00123' }, true, { 123 } },
  87. { json.decode, { '05.2' }, true, { 5.2 } },
  88. { json.decode, { '0e10' }, true, { 0 } },
  89. { json.decode, { '0x6' }, true, { 6 } },
  90. { json.decode, { '[ +Inf, Inf, -Inf ]' }, true, { { Inf, Inf, -Inf } } },
  91. { json.decode, { '[ +Infinity, Infinity, -Infinity ]' },
  92. true, { { Inf, Inf, -Inf } } },
  93. { json.decode, { '[ +NaN, NaN, -NaN ]' }, true, { { NaN, NaN, NaN } } },
  94. { json.decode, { 'Infrared' },
  95. false, { "Expected the end but found invalid token at character 4" } },
  96. { json.decode, { 'Noodle' },
  97. false, { "Expected value but found invalid token at character 1" } },
  98. }
  99. local encode_table_tests = {
  100. function()
  101. cjson.encode_sparse_array(true, 2, 3)
  102. cjson.encode_max_depth(5)
  103. return "Setting sparse array (true, 2, 3) / max depth (5)"
  104. end,
  105. { json.encode, { { [3] = "sparse test" } },
  106. true, { '[null,null,"sparse test"]' } },
  107. { json.encode, { { [1] = "one", [4] = "sparse test" } },
  108. true, { '["one",null,null,"sparse test"]' } },
  109. { json.encode, { { [1] = "one", [5] = "sparse test" } },
  110. true, { '{"1":"one","5":"sparse test"}' } },
  111. { json.encode, { nested5 }, true, { '[[[[["nested"]]]]]' } },
  112. { json.encode, { { nested5 } },
  113. false, { "Cannot serialise, excessive nesting (6)" } },
  114. { json.encode, { table_cycle },
  115. false, { "Cannot serialise, excessive nesting (6)" } }
  116. }
  117. local encode_error_tests = {
  118. { json.encode, { { [false] = "wrong" } },
  119. false, { "Cannot serialise boolean: table key must be a number or string" } },
  120. { json.encode, { function () end },
  121. false, { "Cannot serialise function: type not supported" } },
  122. function ()
  123. json.refuse_invalid_numbers("encode")
  124. return 'Setting refuse_invalid_numbers("encode")'
  125. end,
  126. { json.encode, { NaN },
  127. false, { "Cannot serialise number: must not be NaN or Inf" } },
  128. { json.encode, { Inf },
  129. false, { "Cannot serialise number: must not be NaN or Inf" } },
  130. function ()
  131. json.refuse_invalid_numbers(false)
  132. return 'Setting refuse_invalid_numbers(false).'
  133. end,
  134. function ()
  135. print('NOTE: receiving "-nan" in the following test is ok..')
  136. return
  137. end,
  138. { json.encode, { NaN }, true, { "nan" } },
  139. { json.encode, { Inf }, true, { "inf" } },
  140. function ()
  141. json.refuse_invalid_numbers("encode")
  142. return 'Setting refuse_invalid_numbers("encode")'
  143. end,
  144. }
  145. local json_nested = string.rep("[", 100000) .. string.rep("]", 100000)
  146. local decode_error_tests = {
  147. { json.decode, { '\0"\0"' },
  148. false, { "JSON parser does not support UTF-16 or UTF-32" } },
  149. { json.decode, { '"\0"\0' },
  150. false, { "JSON parser does not support UTF-16 or UTF-32" } },
  151. { json.decode, { '{ "unexpected eof": ' },
  152. false, { "Expected value but found T_END at character 21" } },
  153. { json.decode, { '{ "extra data": true }, false' },
  154. false, { "Expected the end but found T_COMMA at character 23" } },
  155. { json.decode, { ' { "bad escape \\q code" } ' },
  156. false, { "Expected object key string but found invalid escape code at character 16" } },
  157. { json.decode, { ' { "bad unicode \\u0f6 escape" } ' },
  158. false, { "Expected object key string but found invalid unicode escape code at character 17" } },
  159. { json.decode, { ' [ "bad barewood", test ] ' },
  160. false, { "Expected value but found invalid token at character 20" } },
  161. { json.decode, { '[ -+12 ]' },
  162. false, { "Expected value but found invalid number at character 3" } },
  163. { json.decode, { '-v' },
  164. false, { "Expected value but found invalid number at character 1" } },
  165. { json.decode, { '[ 0.4eg10 ]' },
  166. false, { "Expected comma or array end but found invalid token at character 6" } },
  167. { json.decode, { json_nested },
  168. false, { "Too many nested data structures" } }
  169. }
  170. local escape_tests = {
  171. -- Test 8bit clean
  172. { json.encode, { octets_raw }, true, { octets_escaped } },
  173. { json.decode, { octets_escaped }, true, { octets_raw } },
  174. -- Ensure high bits are removed from surrogate codes
  175. { json.decode, { '"\\uF800"' }, true, { "\239\160\128" } },
  176. -- Test inverted surrogate pairs
  177. { json.decode, { '"\\uDB00\\uD800"' },
  178. false, { "Expected value but found invalid unicode escape code at character 2" } },
  179. -- Test 2x high surrogate code units
  180. { json.decode, { '"\\uDB00\\uDB00"' },
  181. false, { "Expected value but found invalid unicode escape code at character 2" } },
  182. -- Test invalid 2nd escape
  183. { json.decode, { '"\\uDB00\\"' },
  184. false, { "Expected value but found invalid unicode escape code at character 2" } },
  185. { json.decode, { '"\\uDB00\\uD"' },
  186. false, { "Expected value but found invalid unicode escape code at character 2" } },
  187. -- Test decoding of all UTF-16 escapes
  188. { json.decode, { utf16_escaped }, true, { utf8_raw } }
  189. }
  190. print(string.format("Testing CJSON v%s\n", cjson.version))
  191. run_test_group("decode simple value", decode_simple_tests)
  192. run_test_group("encode simple value", encode_simple_tests)
  193. run_test_group("decode numeric", decode_numeric_tests)
  194. -- INCLUDE:
  195. -- - Sparse array exception..
  196. -- - ..
  197. -- cjson.encode_sparse_array(true, 2, 3)
  198. run_test_group("encode table", encode_table_tests)
  199. run_test_group("decode error", decode_error_tests)
  200. run_test_group("encode error", encode_error_tests)
  201. run_test_group("escape", escape_tests)
  202. cjson.refuse_invalid_numbers(false)
  203. cjson.encode_max_depth(20)
  204. for i = 1, #arg do
  205. run_test("decode cycle " .. arg[i], test_decode_cycle, { arg[i] },
  206. true, { true })
  207. end
  208. -- vi:ai et sw=4 ts=4: