diff options
author | Mark Pulford <mark@kyne.com.au> | 2011-05-08 21:12:32 +0930 |
---|---|---|
committer | Mark Pulford <mark@kyne.com.au> | 2011-05-08 21:12:32 +0930 |
commit | f59b3b671984d6a3bf7f7dd08512a89dd916a901 (patch) | |
tree | a7745e6eef05f32dc19f488f9e240d4a2416fcaa | |
parent | 75ee6bfcf40b531024fad9a5ebb0908859769816 (diff) | |
download | lua-cjson-f59b3b671984d6a3bf7f7dd08512a89dd916a901.tar.gz lua-cjson-f59b3b671984d6a3bf7f7dd08512a89dd916a901.tar.bz2 lua-cjson-f59b3b671984d6a3bf7f7dd08512a89dd916a901.zip |
Add NaN/Inf encoding tests, rearrange test order
-rwxr-xr-x | tests/test.lua | 145 |
1 files changed, 74 insertions, 71 deletions
diff --git a/tests/test.lua b/tests/test.lua index ab2fe7c..436b20c 100755 --- a/tests/test.lua +++ b/tests/test.lua | |||
@@ -9,7 +9,62 @@ | |||
9 | require "common" | 9 | require "common" |
10 | local json = require "cjson" | 10 | local json = require "cjson" |
11 | 11 | ||
12 | local simple_value_tests = { | 12 | local function gen_ascii() |
13 | local chars = {} | ||
14 | for i = 0, 255 do chars[i + 1] = string.char(i) end | ||
15 | return table.concat(chars) | ||
16 | end | ||
17 | |||
18 | -- Generate every UTF-16 codepoint, including supplementary codes | ||
19 | local function gen_utf16_escaped() | ||
20 | -- Create raw table escapes | ||
21 | local utf16_escaped = {} | ||
22 | local count = 0 | ||
23 | |||
24 | local function append_escape(code) | ||
25 | local esc = string.format('\\u%04X', code) | ||
26 | table.insert(utf16_escaped, esc) | ||
27 | end | ||
28 | |||
29 | table.insert(utf16_escaped, '"') | ||
30 | for i = 0, 0xD7FF do | ||
31 | append_escape(i) | ||
32 | end | ||
33 | -- Skip 0xD800 - 0xDFFF since they are used to encode supplementary | ||
34 | -- codepoints | ||
35 | for i = 0xE000, 0xFFFF do | ||
36 | append_escape(i) | ||
37 | end | ||
38 | -- Append surrogate pair for each supplementary codepoint | ||
39 | for high = 0xD800, 0xDBFF do | ||
40 | for low = 0xDC00, 0xDFFF do | ||
41 | append_escape(high) | ||
42 | append_escape(low) | ||
43 | end | ||
44 | end | ||
45 | table.insert(utf16_escaped, '"') | ||
46 | |||
47 | return table.concat(utf16_escaped) | ||
48 | end | ||
49 | |||
50 | function test_decode_cycle(filename) | ||
51 | local obj1 = json.decode(file_load(filename)) | ||
52 | local obj2 = json.decode(json.encode(obj1)) | ||
53 | return compare_values(obj1, obj2) | ||
54 | end | ||
55 | |||
56 | local Inf = math.huge; | ||
57 | local NaN = math.huge * 0; | ||
58 | local octets_raw = gen_ascii() | ||
59 | local octets_escaped = file_load("octets-escaped.dat") | ||
60 | local utf8_loaded, utf8_raw = pcall(file_load, "utf8.dat") | ||
61 | if not utf8_loaded then | ||
62 | utf8_raw = "Failed to load utf8.dat" | ||
63 | end | ||
64 | local utf16_escaped = gen_utf16_escaped() | ||
65 | local nested5 = {{{{{ "nested" }}}}} | ||
66 | |||
67 | local decode_simple_tests = { | ||
13 | { json.decode, { '"test string"' }, true, { "test string" } }, | 68 | { json.decode, { '"test string"' }, true, { "test string" } }, |
14 | { json.decode, { '-5e3' }, true, { -5000 } }, | 69 | { json.decode, { '-5e3' }, true, { -5000 } }, |
15 | { json.decode, { 'null' }, true, { json.null } }, | 70 | { json.decode, { 'null' }, true, { json.null } }, |
@@ -21,10 +76,21 @@ local simple_value_tests = { | |||
21 | true, { { "one", json.null, "three" } } } | 76 | true, { { "one", json.null, "three" } } } |
22 | } | 77 | } |
23 | 78 | ||
24 | local Inf = math.huge; | 79 | local encode_simple_tests = { |
25 | local NaN = math.huge * 0; | 80 | { json.encode, { json.null }, true, { 'null' } }, |
81 | { json.encode, { true }, true, { 'true' } }, | ||
82 | { json.encode, { false }, true, { 'false' } }, | ||
83 | { json.encode, { { } }, true, { '{ }' } }, | ||
84 | { json.encode, { 10 }, true, { '10' } }, | ||
85 | { json.encode, { NaN }, | ||
86 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
87 | { json.encode, { Inf }, | ||
88 | false, { "Cannot serialise number: must not be NaN or Inf" } }, | ||
89 | { json.encode, { "hello" }, true, { '"hello"' } }, | ||
90 | } | ||
91 | |||
26 | 92 | ||
27 | local numeric_tests = { | 93 | local decode_numeric_tests = { |
28 | { json.decode, { '[ 0.0, -1, 0.3e-3, 1023.2 ]' }, | 94 | { json.decode, { '[ 0.0, -1, 0.3e-3, 1023.2 ]' }, |
29 | true, { { 0.0, -1, 0.0003, 1023.2 } } }, | 95 | true, { { 0.0, -1, 0.0003, 1023.2 } } }, |
30 | { json.decode, { '00123' }, true, { 123 } }, | 96 | { json.decode, { '00123' }, true, { 123 } }, |
@@ -41,13 +107,11 @@ local numeric_tests = { | |||
41 | false, { "Expected value but found invalid token at character 1" } }, | 107 | false, { "Expected value but found invalid token at character 1" } }, |
42 | } | 108 | } |
43 | 109 | ||
44 | local nested5 = {{{{{ "nested" }}}}} | ||
45 | |||
46 | local encode_table_tests = { | 110 | local encode_table_tests = { |
47 | function() | 111 | function() |
48 | cjson.encode_sparse_array(true, 2, 3) | 112 | cjson.encode_sparse_array(true, 2, 3) |
49 | cjson.encode_max_depth(5) | 113 | cjson.encode_max_depth(5) |
50 | return "Setting sparse array / max depth" | 114 | return "Setting sparse array (true, 2, 3) / max depth (5)" |
51 | end, | 115 | end, |
52 | { json.encode, { { [3] = "sparse test" } }, | 116 | { json.encode, { { [3] = "sparse test" } }, |
53 | true, { '[ null, null, "sparse test" ]' } }, | 117 | true, { '[ null, null, "sparse test" ]' } }, |
@@ -86,61 +150,6 @@ local decode_error_tests = { | |||
86 | false, { "Expected comma or array end but found invalid token at character 6" } }, | 150 | false, { "Expected comma or array end but found invalid token at character 6" } }, |
87 | } | 151 | } |
88 | 152 | ||
89 | local encode_simple_tests = { | ||
90 | { json.encode, { json.null }, true, { 'null' } }, | ||
91 | { json.encode, { true }, true, { 'true' } }, | ||
92 | { json.encode, { false }, true, { 'false' } }, | ||
93 | { json.encode, { { } }, true, { '{ }' } }, | ||
94 | { json.encode, { 10 }, true, { '10' } }, | ||
95 | { json.encode, { "hello" }, true, { '"hello"' } }, | ||
96 | } | ||
97 | |||
98 | local function gen_ascii() | ||
99 | local chars = {} | ||
100 | for i = 0, 255 do chars[i + 1] = string.char(i) end | ||
101 | return table.concat(chars) | ||
102 | end | ||
103 | |||
104 | -- Generate every UTF-16 codepoint, including supplementary codes | ||
105 | local function gen_utf16_escaped() | ||
106 | -- Create raw table escapes | ||
107 | local utf16_escaped = {} | ||
108 | local count = 0 | ||
109 | |||
110 | local function append_escape(code) | ||
111 | local esc = string.format('\\u%04X', code) | ||
112 | table.insert(utf16_escaped, esc) | ||
113 | end | ||
114 | |||
115 | table.insert(utf16_escaped, '"') | ||
116 | for i = 0, 0xD7FF do | ||
117 | append_escape(i) | ||
118 | end | ||
119 | -- Skip 0xD800 - 0xDFFF since they are used to encode supplementary | ||
120 | -- codepoints | ||
121 | for i = 0xE000, 0xFFFF do | ||
122 | append_escape(i) | ||
123 | end | ||
124 | -- Append surrogate pair for each supplementary codepoint | ||
125 | for high = 0xD800, 0xDBFF do | ||
126 | for low = 0xDC00, 0xDFFF do | ||
127 | append_escape(high) | ||
128 | append_escape(low) | ||
129 | end | ||
130 | end | ||
131 | table.insert(utf16_escaped, '"') | ||
132 | |||
133 | return table.concat(utf16_escaped) | ||
134 | end | ||
135 | |||
136 | local octets_raw = gen_ascii() | ||
137 | local octets_escaped = file_load("octets-escaped.dat") | ||
138 | local utf8_loaded, utf8_raw = pcall(file_load, "utf8.dat") | ||
139 | if not utf8_loaded then | ||
140 | utf8_raw = "Failed to load utf8.dat" | ||
141 | end | ||
142 | local utf16_escaped = gen_utf16_escaped() | ||
143 | |||
144 | local escape_tests = { | 153 | local escape_tests = { |
145 | -- Test 8bit clean | 154 | -- Test 8bit clean |
146 | { json.encode, { octets_raw }, true, { octets_escaped } }, | 155 | { json.encode, { octets_raw }, true, { octets_escaped } }, |
@@ -162,14 +171,9 @@ local escape_tests = { | |||
162 | { json.decode, { utf16_escaped }, true, { utf8_raw } } | 171 | { json.decode, { utf16_escaped }, true, { utf8_raw } } |
163 | } | 172 | } |
164 | 173 | ||
165 | function test_decode_cycle(filename) | 174 | run_test_group("decode simple value", decode_simple_tests) |
166 | local obj1 = json.decode(file_load(filename)) | 175 | run_test_group("encode simple value", encode_simple_tests) |
167 | local obj2 = json.decode(json.encode(obj1)) | 176 | run_test_group("decode numeric", decode_numeric_tests) |
168 | return compare_values(obj1, obj2) | ||
169 | end | ||
170 | |||
171 | run_test_group("decode simple value", simple_value_tests) | ||
172 | run_test_group("decode numeric", numeric_tests) | ||
173 | 177 | ||
174 | -- INCLUDE: | 178 | -- INCLUDE: |
175 | -- - Sparse array exception.. | 179 | -- - Sparse array exception.. |
@@ -179,7 +183,6 @@ run_test_group("decode numeric", numeric_tests) | |||
179 | 183 | ||
180 | run_test_group("encode table", encode_table_tests) | 184 | run_test_group("encode table", encode_table_tests) |
181 | run_test_group("decode error", decode_error_tests) | 185 | run_test_group("decode error", decode_error_tests) |
182 | run_test_group("encode simple value", encode_simple_tests) | ||
183 | run_test_group("escape", escape_tests) | 186 | run_test_group("escape", escape_tests) |
184 | 187 | ||
185 | cjson.encode_max_depth(20) | 188 | cjson.encode_max_depth(20) |