diff options
author | Mark Pulford <mark@kyne.com.au> | 2012-01-16 19:36:59 +1030 |
---|---|---|
committer | Mark Pulford <mark@kyne.com.au> | 2012-03-04 18:54:35 +1030 |
commit | c9a4e121f18a1c36b3b0065aa7eed7097f12356c (patch) | |
tree | 6fd0996973232c26d2591f080cd1315a561b668a /tests | |
parent | 1c1adf1f43b78ac73617437d97d191546d9d6ab7 (diff) | |
download | lua-cjson-c9a4e121f18a1c36b3b0065aa7eed7097f12356c.tar.gz lua-cjson-c9a4e121f18a1c36b3b0065aa7eed7097f12356c.tar.bz2 lua-cjson-c9a4e121f18a1c36b3b0065aa7eed7097f12356c.zip |
Tidy tests and improve coverage
- Rearrange test sections to keep more related tests together.
- Test configuration functions for errors
Add tests for more parts of the Lua CJSON API:
- _NAME, _VERSION, version
- encode_keep_buffer
- encode_number_precision
- decode_invalid_numbers
- decode_max_depth
Diffstat (limited to 'tests')
-rwxr-xr-x | tests/test.lua | 196 |
1 files changed, 112 insertions, 84 deletions
diff --git a/tests/test.lua b/tests/test.lua index 43f48ad..a411b1e 100755 --- a/tests/test.lua +++ b/tests/test.lua | |||
@@ -83,11 +83,16 @@ local NaN = math.huge * 0; | |||
83 | local testdata = load_testdata() | 83 | local testdata = load_testdata() |
84 | 84 | ||
85 | local all_tests = { | 85 | local all_tests = { |
86 | { "Check module name, version", | ||
87 | function () return json._NAME, json._VERSION, json.version end, { }, | ||
88 | true, { "cjson", "1.0devel", "1.0devel" } }, | ||
89 | |||
86 | -- Simple decode tests | 90 | -- Simple decode tests |
87 | { "Decode string", | 91 | { "Decode string", |
88 | json.decode, { '"test string"' }, true, { "test string" } }, | 92 | json.decode, { '"test string"' }, true, { "test string" } }, |
89 | { "Decode number with exponent", | 93 | { "Decode numbers", |
90 | json.decode, { '-5e3' }, true, { -5000 } }, | 94 | json.decode, { '[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 00123, 05.2, 0e10 ]' }, |
95 | true, { { 0.0, -5000, -1, 0.0003, 1023.2, 123, 5.2, 0 } } }, | ||
91 | { "Decode null", | 96 | { "Decode null", |
92 | json.decode, { 'null' }, true, { json.null } }, | 97 | json.decode, { 'null' }, true, { json.null } }, |
93 | { "Decode true", | 98 | { "Decode true", |
@@ -101,32 +106,6 @@ local all_tests = { | |||
101 | json.decode, { '[ "one", null, "three" ]' }, | 106 | json.decode, { '[ "one", null, "three" ]' }, |
102 | true, { { "one", json.null, "three" } } }, | 107 | true, { { "one", json.null, "three" } } }, |
103 | 108 | ||
104 | -- Numeric decode tests | ||
105 | { "Decode various numbers", | ||
106 | json.decode, { '[ 0.0, -1, 0.3e-3, 1023.2 ]' }, | ||
107 | true, { { 0.0, -1, 0.0003, 1023.2 } } }, | ||
108 | { "Decode integer with leading zeros", | ||
109 | json.decode, { '00123' }, true, { 123 } }, | ||
110 | { "Decode floating point with leading zero", | ||
111 | json.decode, { '05.2' }, true, { 5.2 } }, | ||
112 | { "Decode zero with exponent", | ||
113 | json.decode, { '0e10' }, true, { 0 } }, | ||
114 | { "Decode hexadecimal", | ||
115 | json.decode, { '0x6' }, true, { 6 } }, | ||
116 | { "Decode +-Inf", | ||
117 | json.decode, { '[ +Inf, Inf, -Inf ]' }, true, { { Inf, Inf, -Inf } } }, | ||
118 | { "Decode +-Infinity", | ||
119 | json.decode, { '[ +Infinity, Infinity, -Infinity ]' }, | ||
120 | true, { { Inf, Inf, -Inf } } }, | ||
121 | { "Decode +-NaN", | ||
122 | json.decode, { '[ +NaN, NaN, -NaN ]' }, true, { { NaN, NaN, NaN } } }, | ||
123 | { "Decode Infrared (not infinity)", | ||
124 | json.decode, { 'Infrared' }, | ||
125 | false, { "Expected the end but found invalid token at character 4" } }, | ||
126 | { "Decode Noodle (not NaN)", | ||
127 | json.decode, { 'Noodle' }, | ||
128 | false, { "Expected value but found invalid token at character 1" } }, | ||
129 | |||
130 | -- Decode error tests | 109 | -- Decode error tests |
131 | { "Decode UTF-16BE", | 110 | { "Decode UTF-16BE", |
132 | json.decode, { '\0"\0"' }, | 111 | json.decode, { '\0"\0"' }, |
@@ -164,18 +143,43 @@ local all_tests = { | |||
164 | { "Decode invalid number exponent", | 143 | { "Decode invalid number exponent", |
165 | json.decode, { '[ 0.4eg10 ]' }, | 144 | json.decode, { '[ 0.4eg10 ]' }, |
166 | false, { "Expected comma or array end but found invalid token at character 6" } }, | 145 | false, { "Expected comma or array end but found invalid token at character 6" } }, |
167 | { "Setting decode_max_depth(5)", function () | 146 | |
168 | json.decode_max_depth(5) | 147 | -- Test nested tables / arrays / objects |
169 | end }, | 148 | { "Set decode_max_depth(5)", |
149 | json.decode_max_depth, { 5 }, true, { 5 } }, | ||
170 | { "Decode array at nested limit", | 150 | { "Decode array at nested limit", |
171 | json.decode, { '[[[[[ "nested" ]]]]]' }, | 151 | json.decode, { '[[[[[ "nested" ]]]]]' }, |
172 | true, { {{{{{ "nested" }}}}} } }, | 152 | true, { {{{{{ "nested" }}}}} } }, |
173 | { "Decode array over nested limit", | 153 | { "Decode array over nested limit", |
174 | json.decode, { '[[[[[[ "nested" ]]]]]]' }, | 154 | json.decode, { '[[[[[[ "nested" ]]]]]]' }, |
175 | false, { "Too many nested data structures" } }, | 155 | false, { "Too many nested data structures" } }, |
176 | { "Setting decode_max_depth(1000)", function () | 156 | { "Decode object at nested limit", |
177 | json.decode_max_depth(1000) | 157 | json.decode, { '{"a":{"b":{"c":{"d":{"e":"nested"}}}}}' }, |
178 | end }, | 158 | true, { {a={b={c={d={e="nested"}}}}} } }, |
159 | { "Decode object over nested limit", | ||
160 | json.decode, { '{"a":{"b":{"c":{"d":{"e":{"f":"nested"}}}}}}' }, | ||
161 | false, { "Too many nested data structures" } }, | ||
162 | { "Set decode_max_depth(1000)", | ||
163 | json.decode_max_depth, { 1000 }, true, { 1000 } }, | ||
164 | |||
165 | { "Set encode_max_depth(5)", | ||
166 | json.encode_max_depth, { 5 }, true, { 5 } }, | ||
167 | { "Encode nested table as array at nested limit", | ||
168 | json.encode, { {{{{{"nested"}}}}} }, true, { '[[[[["nested"]]]]]' } }, | ||
169 | { "Encode nested table as array after nested limit", | ||
170 | json.encode, { { {{{{{"nested"}}}}} } }, | ||
171 | false, { "Cannot serialise, excessive nesting (6)" } }, | ||
172 | { "Encode nested table as object at nested limit", | ||
173 | json.encode, { {a={b={c={d={e="nested"}}}}} }, | ||
174 | true, { '{"a":{"b":{"c":{"d":{"e":"nested"}}}}}' } }, | ||
175 | { "Encode nested table as object over nested limit", | ||
176 | json.encode, { {a={b={c={d={e={f="nested"}}}}}} }, | ||
177 | false, { "Cannot serialise, excessive nesting (6)" } }, | ||
178 | { "Encode table with cycle", | ||
179 | json.encode, { testdata.table_cycle }, | ||
180 | false, { "Cannot serialise, excessive nesting (6)" } }, | ||
181 | { "Set encode_max_depth(1000)", | ||
182 | json.encode_max_depth, { 1000 }, true, { 1000 } }, | ||
179 | 183 | ||
180 | -- Simple encode tests | 184 | -- Simple encode tests |
181 | { "Encode null", | 185 | { "Encode null", |
@@ -188,20 +192,82 @@ local all_tests = { | |||
188 | json.encode, { { } }, true, { '{}' } }, | 192 | json.encode, { { } }, true, { '{}' } }, |
189 | { "Encode integer", | 193 | { "Encode integer", |
190 | json.encode, { 10 }, true, { '10' } }, | 194 | json.encode, { 10 }, true, { '10' } }, |
195 | { "Encode string", | ||
196 | json.encode, { "hello" }, true, { '"hello"' } }, | ||
197 | |||
198 | { "Set encode_keep_buffer(false)", | ||
199 | json.encode_keep_buffer, { false }, true, { "off" } }, | ||
200 | { "Set encode_number_precision(3)", | ||
201 | json.encode_number_precision, { 3 }, true, { 3 } }, | ||
202 | { "Encode number with precision 3", | ||
203 | json.encode, { 1/3 }, true, { "0.333" } }, | ||
204 | { "Set encode_number_precision(14)", | ||
205 | json.encode_number_precision, { 14 }, true, { 14 } }, | ||
206 | { "Set encode_keep_buffer(true)", | ||
207 | json.encode_keep_buffer, { true }, true, { "on" } }, | ||
208 | |||
209 | -- Test decoding invalid numbers | ||
210 | { "Set decode_invalid_numbers(true)", | ||
211 | json.decode_invalid_numbers, { true }, true, { "on" } }, | ||
212 | { "Decode hexadecimal", | ||
213 | json.decode, { '0x6' }, true, { 6 } }, | ||
214 | { "Decode +-Inf", | ||
215 | json.decode, { '[ +Inf, Inf, -Inf ]' }, true, { { Inf, Inf, -Inf } } }, | ||
216 | { "Decode +-Infinity", | ||
217 | json.decode, { '[ +Infinity, Infinity, -Infinity ]' }, | ||
218 | true, { { Inf, Inf, -Inf } } }, | ||
219 | { "Decode +-NaN", | ||
220 | json.decode, { '[ +NaN, NaN, -NaN ]' }, true, { { NaN, NaN, NaN } } }, | ||
221 | { "Decode Infrared (not infinity)", | ||
222 | json.decode, { 'Infrared' }, | ||
223 | false, { "Expected the end but found invalid token at character 4" } }, | ||
224 | { "Decode Noodle (not NaN)", | ||
225 | json.decode, { 'Noodle' }, | ||
226 | false, { "Expected value but found invalid token at character 1" } }, | ||
227 | { "Set decode_invalid_numbers(false)", | ||
228 | json.decode_invalid_numbers, { false }, true, { "off" } }, | ||
229 | { "Decode hexadecimal (throw error)", | ||
230 | json.decode, { '0x6' }, | ||
231 | false, { "Expected value but found invalid number at character 1" } }, | ||
232 | { "Decode +-Inf (throw error)", | ||
233 | json.decode, { '[ +Inf, Inf, -Inf ]' }, | ||
234 | false, { "Expected value but found invalid token at character 3" } }, | ||
235 | { "Decode +-Infinity (throw error)", | ||
236 | json.decode, { '[ +Infinity, Infinity, -Infinity ]' }, | ||
237 | false, { "Expected value but found invalid token at character 3" } }, | ||
238 | { "Decode +-NaN (throw error)", | ||
239 | json.decode, { '[ +NaN, NaN, -NaN ]' }, | ||
240 | false, { "Expected value but found invalid token at character 3" } }, | ||
241 | { 'Set decode_invalid_numbers("on")', | ||
242 | json.decode_invalid_numbers, { "on" }, true, { "on" } }, | ||
243 | |||
244 | -- Test encoding invalid numbers | ||
245 | { "Set encode_invalid_numbers(false)", | ||
246 | json.encode_invalid_numbers, { false }, true, { "off" } }, | ||
191 | { "Encode NaN (invalid numbers disabled)", | 247 | { "Encode NaN (invalid numbers disabled)", |
192 | json.encode, { NaN }, | 248 | json.encode, { NaN }, |
193 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | 249 | false, { "Cannot serialise number: must not be NaN or Inf" } }, |
194 | { "Encode Infinity (invalid numbers disabled)", | 250 | { "Encode Infinity (invalid numbers disabled)", |
195 | json.encode, { Inf }, | 251 | json.encode, { Inf }, |
196 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | 252 | false, { "Cannot serialise number: must not be NaN or Inf" } }, |
197 | { "Encode string", | 253 | { "Set encode_invalid_numbers(\"null\")", |
198 | json.encode, { "hello" }, true, { '"hello"' } }, | 254 | json.encode_invalid_numbers, { "null" }, true, { "null" } }, |
255 | { "Encode NaN as null", | ||
256 | json.encode, { NaN }, true, { "null" } }, | ||
257 | { "Encode Infinity as null", | ||
258 | json.encode, { Inf }, true, { "null" } }, | ||
259 | { "Set encode_invalid_numbers(true)", | ||
260 | json.encode_invalid_numbers, { true }, true, { "on" } }, | ||
261 | { "Encode NaN", | ||
262 | json.encode, { NaN }, true, { "nan" } }, | ||
263 | { "Encode Infinity", | ||
264 | json.encode, { Inf }, true, { "inf" } }, | ||
265 | { 'Set encode_invalid_numbers("off")', | ||
266 | json.encode_invalid_numbers, { "off" }, true, { "off" } }, | ||
199 | 267 | ||
200 | -- Table encode tests | 268 | -- Table encode tests |
201 | { "Setting sparse array (true, 2, 3) / max depth (5)", function() | 269 | { "Set encode_sparse_array(true, 2, 3)", |
202 | json.encode_sparse_array(true, 2, 3) | 270 | json.encode_sparse_array, { true, 2, 3 }, true, { true, 2, 3 } }, |
203 | json.encode_max_depth(5) | ||
204 | end }, | ||
205 | { "Encode sparse table as array #1", | 271 | { "Encode sparse table as array #1", |
206 | json.encode, { { [3] = "sparse test" } }, | 272 | json.encode, { { [3] = "sparse test" } }, |
207 | true, { '[null,null,"sparse test"]' } }, | 273 | true, { '[null,null,"sparse test"]' } }, |
@@ -216,15 +282,6 @@ local all_tests = { | |||
216 | json.encode, { { ["2"] = "numeric string key test" } }, | 282 | json.encode, { { ["2"] = "numeric string key test" } }, |
217 | true, { '{"2":"numeric string key test"}' } }, | 283 | true, { '{"2":"numeric string key test"}' } }, |
218 | 284 | ||
219 | { "Encode nested table", | ||
220 | json.encode, { {{{{{"nested"}}}}} }, true, { '[[[[["nested"]]]]]' } }, | ||
221 | { "Encode nested table (throw error)", | ||
222 | json.encode, { { {{{{{"nested"}}}}} } }, | ||
223 | false, { "Cannot serialise, excessive nesting (6)" } }, | ||
224 | { "Encode table with cycle", | ||
225 | json.encode, { testdata.table_cycle }, | ||
226 | false, { "Cannot serialise, excessive nesting (6)" } }, | ||
227 | |||
228 | -- Encode error tests | 285 | -- Encode error tests |
229 | { "Encode table with incompatible key", | 286 | { "Encode table with incompatible key", |
230 | json.encode, { { [false] = "wrong" } }, | 287 | json.encode, { { [false] = "wrong" } }, |
@@ -232,32 +289,6 @@ local all_tests = { | |||
232 | { "Encode Lua function", | 289 | { "Encode Lua function", |
233 | json.encode, { function () end }, | 290 | json.encode, { function () end }, |
234 | false, { "Cannot serialise function: type not supported" } }, | 291 | false, { "Cannot serialise function: type not supported" } }, |
235 | { "Setting encode_invalid_numbers(false)", function () | ||
236 | json.encode_invalid_numbers(false) | ||
237 | end }, | ||
238 | { "Encode NaN (invalid numbers disabled)", | ||
239 | json.encode, { NaN }, | ||
240 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
241 | { "Encode Infinity (invalid numbers disabled)", | ||
242 | json.encode, { Inf }, | ||
243 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
244 | { 'Setting encode_invalid_numbers("null").', function () | ||
245 | json.encode_invalid_numbers("null") | ||
246 | end }, | ||
247 | { "Encode NaN as null", | ||
248 | json.encode, { NaN }, true, { "null" } }, | ||
249 | { "Encode Infinity as null", | ||
250 | json.encode, { Inf }, true, { "null" } }, | ||
251 | { 'Setting encode_invalid_numbers(true).', function () | ||
252 | json.encode_invalid_numbers(true) | ||
253 | end }, | ||
254 | { "Encode NaN", | ||
255 | json.encode, { NaN }, true, { "nan" } }, | ||
256 | { "Encode Infinity", | ||
257 | json.encode, { Inf }, true, { "inf" } }, | ||
258 | { 'Setting encode_invalid_numbers(false)', function () | ||
259 | json.encode_invalid_numbers(false) | ||
260 | end }, | ||
261 | 292 | ||
262 | -- Escaping tests | 293 | -- Escaping tests |
263 | { "Encode all octets (8-bit clean)", | 294 | { "Encode all octets (8-bit clean)", |
@@ -266,6 +297,8 @@ local all_tests = { | |||
266 | json.decode, { testdata.octets_escaped }, true, { testdata.octets_raw } }, | 297 | json.decode, { testdata.octets_escaped }, true, { testdata.octets_raw } }, |
267 | { "Decode single UTF-16 escape", | 298 | { "Decode single UTF-16 escape", |
268 | json.decode, { [["\uF800"]] }, true, { "\239\160\128" } }, | 299 | json.decode, { [["\uF800"]] }, true, { "\239\160\128" } }, |
300 | { "Decode all UTF-16 escapes (including surrogate combinations)", | ||
301 | json.decode, { testdata.utf16_escaped }, true, { testdata.utf8_raw } }, | ||
269 | { "Decode swapped surrogate pair", | 302 | { "Decode swapped surrogate pair", |
270 | json.decode, { [["\uDC00\uD800"]] }, | 303 | json.decode, { [["\uDC00\uD800"]] }, |
271 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | 304 | false, { "Expected value but found invalid unicode escape code at character 2" } }, |
@@ -281,14 +314,12 @@ local all_tests = { | |||
281 | { "Decode invalid low surrogate", | 314 | { "Decode invalid low surrogate", |
282 | json.decode, { [["\uDB00\uD"]] }, | 315 | json.decode, { [["\uDB00\uD"]] }, |
283 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | 316 | false, { "Expected value but found invalid unicode escape code at character 2" } }, |
284 | { "Decode all UTF-16 escapes (including surrogate combinations)", | ||
285 | json.decode, { testdata.utf16_escaped }, true, { testdata.utf8_raw } }, | ||
286 | 317 | ||
287 | -- Locale tests | 318 | -- Locale tests |
288 | -- | 319 | -- |
289 | -- The standard Lua interpreter is ANSI C online doesn't support locales | 320 | -- The standard Lua interpreter is ANSI C online doesn't support locales |
290 | -- by default. Force a known problematic locale to test strtod()/sprintf(). | 321 | -- by default. Force a known problematic locale to test strtod()/sprintf(). |
291 | { "Setting locale to cs_CZ (comma separator)", function () | 322 | { "Set locale to cs_CZ (comma separator)", function () |
292 | os.setlocale("cs_CZ") | 323 | os.setlocale("cs_CZ") |
293 | json.new() | 324 | json.new() |
294 | end }, | 325 | end }, |
@@ -296,7 +327,7 @@ local all_tests = { | |||
296 | json.encode, { 1.5 }, true, { '1.5' } }, | 327 | json.encode, { 1.5 }, true, { '1.5' } }, |
297 | { "Decode number in array under comma locale", | 328 | { "Decode number in array under comma locale", |
298 | json.decode, { '[ 10, "test" ]' }, true, { { 10, "test" } } }, | 329 | json.decode, { '[ 10, "test" ]' }, true, { { 10, "test" } } }, |
299 | { "Reverting locale to POSIX", function () | 330 | { "Revert locale to POSIX", function () |
300 | os.setlocale("C") | 331 | os.setlocale("C") |
301 | json.new() | 332 | json.new() |
302 | end }, | 333 | end }, |
@@ -306,11 +337,8 @@ print(string.format("Testing Lua CJSON version %s\n", json.version)) | |||
306 | 337 | ||
307 | util.run_test_group(all_tests) | 338 | util.run_test_group(all_tests) |
308 | 339 | ||
309 | json.encode_invalid_numbers(true) | 340 | for _, filename in ipairs(arg) do |
310 | json.decode_invalid_numbers(true) | 341 | util.run_test("Decode cycle " .. filename, test_decode_cycle, { filename }, |
311 | json.encode_max_depth(20) | ||
312 | for i = 1, #arg do | ||
313 | util.run_test("Decode cycle " .. arg[i], test_decode_cycle, { arg[i] }, | ||
314 | true, { true }) | 342 | true, { true }) |
315 | end | 343 | end |
316 | 344 | ||