-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcomplex.test.ts
94 lines (72 loc) · 2.52 KB
/
complex.test.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import test from "ava"
import cbor from "cbor"
import { encode, decode, encodeAsyncIterable, decodeAsyncIterable, encodingLength } from "microcbor"
import values from "./JSONDataSetSample.js"
test("complex nested objects", (t) => {
for (const [i, value] of values.entries()) {
const reference = cbor.encodeCanonical(value)
const data = encode(value)
t.deepEqual(Buffer.from(data), reference, `encode complex nested object ${i}`)
t.is(data.length, encodingLength(value))
t.deepEqual(decode(reference), value, `decode complex nested object ${i}`)
}
})
test("encode value stream", async (t) => {
const reference = Buffer.concat(values.map((value) => cbor.encodeCanonical(value)))
async function* streamValues() {
for (const value of values) yield value
}
const chunks = []
for await (const chunk of encodeAsyncIterable(streamValues())) {
chunks.push(chunk)
}
t.deepEqual(Buffer.concat(chunks), reference)
})
test("decode value stream in chunks of 10 bytes", async (t) => {
const reference = Buffer.concat(values.map((value) => cbor.encodeCanonical(value)))
const chunkSize = 10
async function* streamChunks() {
let offset = 0
while (offset < reference.byteLength) {
yield reference.subarray(offset, offset + chunkSize)
offset += chunkSize
}
}
const decodedValues = []
for await (const value of decodeAsyncIterable(streamChunks())) {
decodedValues.push(value)
}
t.deepEqual(decodedValues, values)
})
test("compose encodeStream(decodeStream()) | chunkSize = 16", async (t) => {
const reference = Buffer.concat(values.map((value) => cbor.encodeCanonical(value)))
const chunkSize = 16
async function* streamChunks() {
let offset = 0
while (offset < reference.byteLength) {
yield reference.subarray(offset, offset + chunkSize)
offset += chunkSize
}
}
const chunks = []
for await (const chunk of encodeAsyncIterable(decodeAsyncIterable(streamChunks()))) {
chunks.push(chunk)
}
t.deepEqual(Buffer.concat(chunks), reference)
})
test("compose decodeStream(encodeStream()) | chunkSize = 64", async (t) => {
async function* streamValues() {
for (const value of values) yield value
}
const decodedValues = []
for await (const value of decodeAsyncIterable(encodeAsyncIterable(streamValues(), { chunkSize: 64 }))) {
decodedValues.push(value)
}
t.deepEqual(decodedValues, values)
})
test("encode objects with adversarial unicode keys", (t) => {
const keyA = "fia"
const keyB = "👍"
const value = { [keyA]: 1, [keyB]: 2 }
t.deepEqual(Buffer.from(encode(value)), cbor.encodeCanonical(value))
})