diff options
Diffstat (limited to 'tests/test.lua')
-rwxr-xr-x | tests/test.lua | 252 |
1 files changed, 252 insertions, 0 deletions
diff --git a/tests/test.lua b/tests/test.lua new file mode 100755 index 0000000..c860878 --- /dev/null +++ b/tests/test.lua | |||
@@ -0,0 +1,252 @@ | |||
1 | #!/usr/bin/env lua | ||
2 | |||
3 | -- CJSON tests | ||
4 | -- | ||
5 | -- Mark Pulford <mark@kyne.com.au> | ||
6 | -- | ||
7 | -- Note: The output of this script is easier to read with "less -S" | ||
8 | |||
9 | local json = require "cjson" | ||
10 | local misc = require "cjson-misc" | ||
11 | |||
12 | local function gen_ascii() | ||
13 | local chars = {} | ||
14 | for i = 0, 255 do chars[i + 1] = string.char(i) end | ||
15 | return table.concat(chars) | ||
16 | end | ||
17 | |||
18 | -- Generate every UTF-16 codepoint, including supplementary codes | ||
19 | local function gen_utf16_escaped() | ||
20 | -- Create raw table escapes | ||
21 | local utf16_escaped = {} | ||
22 | local count = 0 | ||
23 | |||
24 | local function append_escape(code) | ||
25 | local esc = string.format('\\u%04X', code) | ||
26 | table.insert(utf16_escaped, esc) | ||
27 | end | ||
28 | |||
29 | table.insert(utf16_escaped, '"') | ||
30 | for i = 0, 0xD7FF do | ||
31 | append_escape(i) | ||
32 | end | ||
33 | -- Skip 0xD800 - 0xDFFF since they are used to encode supplementary | ||
34 | -- codepoints | ||
35 | for i = 0xE000, 0xFFFF do | ||
36 | append_escape(i) | ||
37 | end | ||
38 | -- Append surrogate pair for each supplementary codepoint | ||
39 | for high = 0xD800, 0xDBFF do | ||
40 | for low = 0xDC00, 0xDFFF do | ||
41 | append_escape(high) | ||
42 | append_escape(low) | ||
43 | end | ||
44 | end | ||
45 | table.insert(utf16_escaped, '"') | ||
46 | |||
47 | return table.concat(utf16_escaped) | ||
48 | end | ||
49 | |||
50 | function test_decode_cycle(filename) | ||
51 | local obj1 = json.decode(file_load(filename)) | ||
52 | local obj2 = json.decode(json.encode(obj1)) | ||
53 | return misc.compare_values(obj1, obj2) | ||
54 | end | ||
55 | |||
56 | local Inf = math.huge; | ||
57 | local NaN = math.huge * 0; | ||
58 | local octets_raw = gen_ascii() | ||
59 | local octets_escaped = misc.file_load("octets-escaped.dat") | ||
60 | local utf8_loaded, utf8_raw = pcall(misc.file_load, "utf8.dat") | ||
61 | if not utf8_loaded then | ||
62 | utf8_raw = "Failed to load utf8.dat" | ||
63 | end | ||
64 | local utf16_escaped = gen_utf16_escaped() | ||
65 | local nested5 = {{{{{ "nested" }}}}} | ||
66 | local table_cycle = {} | ||
67 | local table_cycle2 = { table_cycle } | ||
68 | table_cycle[1] = table_cycle2 | ||
69 | |||
70 | local decode_simple_tests = { | ||
71 | { json.decode, { '"test string"' }, true, { "test string" } }, | ||
72 | { json.decode, { '-5e3' }, true, { -5000 } }, | ||
73 | { json.decode, { 'null' }, true, { json.null } }, | ||
74 | { json.decode, { 'true' }, true, { true } }, | ||
75 | { json.decode, { 'false' }, true, { false } }, | ||
76 | { json.decode, { '{ "1": "one", "3": "three" }' }, | ||
77 | true, { { ["1"] = "one", ["3"] = "three" } } }, | ||
78 | { json.decode, { '[ "one", null, "three" ]' }, | ||
79 | true, { { "one", json.null, "three" } } } | ||
80 | } | ||
81 | |||
82 | local encode_simple_tests = { | ||
83 | { json.encode, { json.null }, true, { 'null' } }, | ||
84 | { json.encode, { true }, true, { 'true' } }, | ||
85 | { json.encode, { false }, true, { 'false' } }, | ||
86 | { json.encode, { { } }, true, { '{}' } }, | ||
87 | { json.encode, { 10 }, true, { '10' } }, | ||
88 | { json.encode, { NaN }, | ||
89 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
90 | { json.encode, { Inf }, | ||
91 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
92 | { json.encode, { "hello" }, true, { '"hello"' } }, | ||
93 | } | ||
94 | |||
95 | local decode_numeric_tests = { | ||
96 | { json.decode, { '[ 0.0, -1, 0.3e-3, 1023.2 ]' }, | ||
97 | true, { { 0.0, -1, 0.0003, 1023.2 } } }, | ||
98 | { json.decode, { '00123' }, true, { 123 } }, | ||
99 | { json.decode, { '05.2' }, true, { 5.2 } }, | ||
100 | { json.decode, { '0e10' }, true, { 0 } }, | ||
101 | { json.decode, { '0x6' }, true, { 6 } }, | ||
102 | { json.decode, { '[ +Inf, Inf, -Inf ]' }, true, { { Inf, Inf, -Inf } } }, | ||
103 | { json.decode, { '[ +Infinity, Infinity, -Infinity ]' }, | ||
104 | true, { { Inf, Inf, -Inf } } }, | ||
105 | { json.decode, { '[ +NaN, NaN, -NaN ]' }, true, { { NaN, NaN, NaN } } }, | ||
106 | { json.decode, { 'Infrared' }, | ||
107 | false, { "Expected the end but found invalid token at character 4" } }, | ||
108 | { json.decode, { 'Noodle' }, | ||
109 | false, { "Expected value but found invalid token at character 1" } }, | ||
110 | } | ||
111 | |||
112 | local encode_table_tests = { | ||
113 | function() | ||
114 | json.encode_sparse_array(true, 2, 3) | ||
115 | json.encode_max_depth(5) | ||
116 | return "Setting sparse array (true, 2, 3) / max depth (5)" | ||
117 | end, | ||
118 | { json.encode, { { [3] = "sparse test" } }, | ||
119 | true, { '[null,null,"sparse test"]' } }, | ||
120 | { json.encode, { { [1] = "one", [4] = "sparse test" } }, | ||
121 | true, { '["one",null,null,"sparse test"]' } }, | ||
122 | { json.encode, { { [1] = "one", [5] = "sparse test" } }, | ||
123 | true, { '{"1":"one","5":"sparse test"}' } }, | ||
124 | |||
125 | { json.encode, { { ["2"] = "numeric string key test" } }, | ||
126 | true, { '{"2":"numeric string key test"}' } }, | ||
127 | |||
128 | { json.encode, { nested5 }, true, { '[[[[["nested"]]]]]' } }, | ||
129 | { json.encode, { { nested5 } }, | ||
130 | false, { "Cannot serialise, excessive nesting (6)" } }, | ||
131 | { json.encode, { table_cycle }, | ||
132 | false, { "Cannot serialise, excessive nesting (6)" } } | ||
133 | } | ||
134 | |||
135 | local encode_error_tests = { | ||
136 | { json.encode, { { [false] = "wrong" } }, | ||
137 | false, { "Cannot serialise boolean: table key must be a number or string" } }, | ||
138 | { json.encode, { function () end }, | ||
139 | false, { "Cannot serialise function: type not supported" } }, | ||
140 | function () | ||
141 | json.refuse_invalid_numbers("encode") | ||
142 | return 'Setting refuse_invalid_numbers("encode")' | ||
143 | end, | ||
144 | { json.encode, { NaN }, | ||
145 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
146 | { json.encode, { Inf }, | ||
147 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
148 | function () | ||
149 | json.refuse_invalid_numbers(false) | ||
150 | return 'Setting refuse_invalid_numbers(false).' | ||
151 | end, | ||
152 | { json.encode, { NaN }, true, { "nan" } }, | ||
153 | { json.encode, { Inf }, true, { "inf" } }, | ||
154 | function () | ||
155 | json.refuse_invalid_numbers("encode") | ||
156 | return 'Setting refuse_invalid_numbers("encode")' | ||
157 | end, | ||
158 | } | ||
159 | |||
160 | local json_nested = string.rep("[", 100000) .. string.rep("]", 100000) | ||
161 | |||
162 | local decode_error_tests = { | ||
163 | { json.decode, { '\0"\0"' }, | ||
164 | false, { "JSON parser does not support UTF-16 or UTF-32" } }, | ||
165 | { json.decode, { '"\0"\0' }, | ||
166 | false, { "JSON parser does not support UTF-16 or UTF-32" } }, | ||
167 | { json.decode, { '{ "unexpected eof": ' }, | ||
168 | false, { "Expected value but found T_END at character 21" } }, | ||
169 | { json.decode, { '{ "extra data": true }, false' }, | ||
170 | false, { "Expected the end but found T_COMMA at character 23" } }, | ||
171 | { json.decode, { ' { "bad escape \\q code" } ' }, | ||
172 | false, { "Expected object key string but found invalid escape code at character 16" } }, | ||
173 | { json.decode, { ' { "bad unicode \\u0f6 escape" } ' }, | ||
174 | false, { "Expected object key string but found invalid unicode escape code at character 17" } }, | ||
175 | { json.decode, { ' [ "bad barewood", test ] ' }, | ||
176 | false, { "Expected value but found invalid token at character 20" } }, | ||
177 | { json.decode, { '[ -+12 ]' }, | ||
178 | false, { "Expected value but found invalid number at character 3" } }, | ||
179 | { json.decode, { '-v' }, | ||
180 | false, { "Expected value but found invalid number at character 1" } }, | ||
181 | { json.decode, { '[ 0.4eg10 ]' }, | ||
182 | false, { "Expected comma or array end but found invalid token at character 6" } }, | ||
183 | { json.decode, { json_nested }, | ||
184 | false, { "Too many nested data structures" } } | ||
185 | } | ||
186 | |||
187 | local escape_tests = { | ||
188 | -- Test 8bit clean | ||
189 | { json.encode, { octets_raw }, true, { octets_escaped } }, | ||
190 | { json.decode, { octets_escaped }, true, { octets_raw } }, | ||
191 | -- Ensure high bits are removed from surrogate codes | ||
192 | { json.decode, { '"\\uF800"' }, true, { "\239\160\128" } }, | ||
193 | -- Test inverted surrogate pairs | ||
194 | { json.decode, { '"\\uDB00\\uD800"' }, | ||
195 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | ||
196 | -- Test 2x high surrogate code units | ||
197 | { json.decode, { '"\\uDB00\\uDB00"' }, | ||
198 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | ||
199 | -- Test invalid 2nd escape | ||
200 | { json.decode, { '"\\uDB00\\"' }, | ||
201 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | ||
202 | { json.decode, { '"\\uDB00\\uD"' }, | ||
203 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | ||
204 | -- Test decoding of all UTF-16 escapes | ||
205 | { json.decode, { utf16_escaped }, true, { utf8_raw } } | ||
206 | } | ||
207 | |||
208 | -- The standard Lua interpreter is ANSI C online doesn't support locales | ||
209 | -- by default. Force a known problematic locale to test strtod()/sprintf(). | ||
210 | local locale_tests = { | ||
211 | function () | ||
212 | os.setlocale("cs_CZ") | ||
213 | json.new() | ||
214 | return "Setting locale to cs_CZ (comma separator)" | ||
215 | end, | ||
216 | { json.encode, { 1.5 }, true, { '1.5' } }, | ||
217 | { json.decode, { "[ 10, \"test\" ]" }, true, { { 10, "test" } } }, | ||
218 | function () | ||
219 | os.setlocale("C") | ||
220 | json.new() | ||
221 | return "Reverting locale to POSIX" | ||
222 | end | ||
223 | } | ||
224 | |||
225 | print(string.format("Testing Lua CJSON version %s\n", json.version)) | ||
226 | |||
227 | misc.run_test_group("decode simple value", decode_simple_tests) | ||
228 | misc.run_test_group("encode simple value", encode_simple_tests) | ||
229 | misc.run_test_group("decode numeric", decode_numeric_tests) | ||
230 | misc.run_test_group("encode table", encode_table_tests) | ||
231 | misc.run_test_group("decode error", decode_error_tests) | ||
232 | misc.run_test_group("encode error", encode_error_tests) | ||
233 | misc.run_test_group("escape", escape_tests) | ||
234 | misc.run_test_group("locale", locale_tests) | ||
235 | |||
236 | json.refuse_invalid_numbers(false) | ||
237 | json.encode_max_depth(20) | ||
238 | for i = 1, #arg do | ||
239 | misc.run_test("decode cycle " .. arg[i], test_decode_cycle, { arg[i] }, | ||
240 | true, { true }) | ||
241 | end | ||
242 | |||
243 | local pass, total = misc.run_test_summary() | ||
244 | |||
245 | if pass == total then | ||
246 | print("==> Summary: all tests succeeded") | ||
247 | else | ||
248 | print(string.format("==> Summary: %d/%d tests failed", total - pass, total)) | ||
249 | os.exit(1) | ||
250 | end | ||
251 | |||
252 | -- vi:ai et sw=4 ts=4: | ||