diff options
author | Mark Pulford <mark@kyne.com.au> | 2012-01-15 22:53:07 +1030 |
---|---|---|
committer | Mark Pulford <mark@kyne.com.au> | 2012-03-04 18:54:34 +1030 |
commit | efed00e617c5ff68d672a148190dcb7c0bbacaa7 (patch) | |
tree | 78f8de88bb7bd87a3ae814f8f99dbeaca7d5a2d7 /tests | |
parent | 8e1b49f0dc0819991783d262faf33d5e53d6f621 (diff) | |
download | lua-cjson-efed00e617c5ff68d672a148190dcb7c0bbacaa7.tar.gz lua-cjson-efed00e617c5ff68d672a148190dcb7c0bbacaa7.tar.bz2 lua-cjson-efed00e617c5ff68d672a148190dcb7c0bbacaa7.zip |
Gather all test data into a table
Move all test data into a single data to tidy the main namespace.
Diffstat (limited to 'tests')
-rwxr-xr-x | tests/test.lua | 52 |
1 files changed, 32 insertions, 20 deletions
diff --git a/tests/test.lua b/tests/test.lua index b1212af..424a630 100755 --- a/tests/test.lua +++ b/tests/test.lua | |||
@@ -47,6 +47,31 @@ local function gen_utf16_escaped() | |||
47 | return table.concat(utf16_escaped) | 47 | return table.concat(utf16_escaped) |
48 | end | 48 | end |
49 | 49 | ||
50 | function load_testdata() | ||
51 | local data = {} | ||
52 | |||
53 | -- Data for 8bit raw <-> escaped octets tests | ||
54 | data.octets_raw = gen_raw_octets() | ||
55 | data.octets_escaped = util.file_load("octets-escaped.dat") | ||
56 | |||
57 | -- Data for \uXXXX -> UTF-8 test | ||
58 | data.utf16_escaped = gen_utf16_escaped() | ||
59 | |||
60 | -- Load matching data for utf16_escaped | ||
61 | local utf8_loaded | ||
62 | utf8_loaded, data.utf8_raw = pcall(util.file_load, "utf8.dat") | ||
63 | if not utf8_loaded then | ||
64 | data.utf8_raw = "Failed to load utf8.dat" | ||
65 | end | ||
66 | |||
67 | data.nested5 = {{{{{ "nested" }}}}} | ||
68 | |||
69 | data.table_cycle = {} | ||
70 | data.table_cycle[1] = data.table_cycle | ||
71 | |||
72 | return data | ||
73 | end | ||
74 | |||
50 | function test_decode_cycle(filename) | 75 | function test_decode_cycle(filename) |
51 | local obj1 = json.decode(util.file_load(filename)) | 76 | local obj1 = json.decode(util.file_load(filename)) |
52 | local obj2 = json.decode(json.encode(obj1)) | 77 | local obj2 = json.decode(json.encode(obj1)) |
@@ -57,20 +82,7 @@ end | |||
57 | local Inf = math.huge; | 82 | local Inf = math.huge; |
58 | local NaN = math.huge * 0; | 83 | local NaN = math.huge * 0; |
59 | 84 | ||
60 | local octets_raw = gen_raw_octets() | 85 | local testdata = load_testdata() |
61 | local octets_escaped = util.file_load("octets-escaped.dat") | ||
62 | |||
63 | local utf8_loaded, utf8_raw = pcall(util.file_load, "utf8.dat") | ||
64 | if not utf8_loaded then | ||
65 | utf8_raw = "Failed to load utf8.dat" | ||
66 | end | ||
67 | local utf16_escaped = gen_utf16_escaped() | ||
68 | |||
69 | local nested5 = {{{{{ "nested" }}}}} | ||
70 | |||
71 | local table_cycle = {} | ||
72 | local table_cycle2 = { table_cycle } | ||
73 | table_cycle[1] = table_cycle2 | ||
74 | 86 | ||
75 | local all_tests = { | 87 | local all_tests = { |
76 | -- Simple decode tests | 88 | -- Simple decode tests |
@@ -207,12 +219,12 @@ local all_tests = { | |||
207 | true, { '{"2":"numeric string key test"}' } }, | 219 | true, { '{"2":"numeric string key test"}' } }, |
208 | 220 | ||
209 | { "Encode nested table", | 221 | { "Encode nested table", |
210 | json.encode, { nested5 }, true, { '[[[[["nested"]]]]]' } }, | 222 | json.encode, { testdata.nested5 }, true, { '[[[[["nested"]]]]]' } }, |
211 | { "Encode nested table (throw error)", | 223 | { "Encode nested table (throw error)", |
212 | json.encode, { { nested5 } }, | 224 | json.encode, { { testdata.nested5 } }, |
213 | false, { "Cannot serialise, excessive nesting (6)" } }, | 225 | false, { "Cannot serialise, excessive nesting (6)" } }, |
214 | { "Encode table with cycle", | 226 | { "Encode table with cycle", |
215 | json.encode, { table_cycle }, | 227 | json.encode, { testdata.table_cycle }, |
216 | false, { "Cannot serialise, excessive nesting (6)" } }, | 228 | false, { "Cannot serialise, excessive nesting (6)" } }, |
217 | 229 | ||
218 | -- Encode error tests | 230 | -- Encode error tests |
@@ -251,9 +263,9 @@ local all_tests = { | |||
251 | 263 | ||
252 | -- Escaping tests | 264 | -- Escaping tests |
253 | { "Encode all octets (8-bit clean)", | 265 | { "Encode all octets (8-bit clean)", |
254 | json.encode, { octets_raw }, true, { octets_escaped } }, | 266 | json.encode, { testdata.octets_raw }, true, { testdata.octets_escaped } }, |
255 | { "Decode all escaped octets", | 267 | { "Decode all escaped octets", |
256 | json.decode, { octets_escaped }, true, { octets_raw } }, | 268 | json.decode, { testdata.octets_escaped }, true, { testdata.octets_raw } }, |
257 | { "Decode single UTF-16 escape", | 269 | { "Decode single UTF-16 escape", |
258 | json.decode, { [["\uF800"]] }, true, { "\239\160\128" } }, | 270 | json.decode, { [["\uF800"]] }, true, { "\239\160\128" } }, |
259 | { "Decode swapped surrogate pair", | 271 | { "Decode swapped surrogate pair", |
@@ -272,7 +284,7 @@ local all_tests = { | |||
272 | json.decode, { [["\uDB00\uD"]] }, | 284 | json.decode, { [["\uDB00\uD"]] }, |
273 | false, { "Expected value but found invalid unicode escape code at character 2" } }, | 285 | false, { "Expected value but found invalid unicode escape code at character 2" } }, |
274 | { "Decode all UTF-16 escapes (including surrogate combinations)", | 286 | { "Decode all UTF-16 escapes (including surrogate combinations)", |
275 | json.decode, { utf16_escaped }, true, { utf8_raw } }, | 287 | json.decode, { testdata.utf16_escaped }, true, { testdata.utf8_raw } }, |
276 | 288 | ||
277 | -- Locale tests | 289 | -- Locale tests |
278 | -- | 290 | -- |