-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathserialization.ts
More file actions
311 lines (278 loc) · 8.24 KB
/
serialization.ts
File metadata and controls
311 lines (278 loc) · 8.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
/**
* VecPack Canonical Serialization Format
*
* This module implements the VecPack canonical serialization format used by Amadeus.
* VecPack provides deterministic, canonical encoding of values for use in cryptographic
* operations (e.g., transaction signing). It ensures that equivalent data structures
* always serialize to the same byte sequence through canonical ordering of map keys.
*
* Format specification:
* - Supports: null, booleans, integers (varint), bytes, lists, and maps
* - Maps/objects are sorted by their encoded key bytes for canonical ordering
* - Varints are limited to 16 bytes maximum
* - Zero values are encoded as a single 0x00 byte
*/
const TYPE_NULL = 0x00
const TYPE_TRUE = 0x01
const TYPE_FALSE = 0x02
const TYPE_INT = 0x03
const TYPE_BYTES = 0x05
const TYPE_LIST = 0x06
const TYPE_MAP = 0x07
import type { DecodedValue, SerializableValue } from './types'
interface DecodeRef {
offset: number
}
/**
* Encode a value into canonical serialization format
*
* @param term - The value to encode
* @returns Encoded bytes as Uint8Array
* @throws Error if the value contains unsupported types
*
* @example
* ```ts
* const encoded = encode({ foo: 'bar', count: 42 })
* ```
*/
export function encode(term: SerializableValue): Uint8Array {
const bytesOut: number[] = []
encodeTerm(term, bytesOut)
return new Uint8Array(bytesOut)
}
/**
* Decode a value from canonical serialization format
*
* @param bytes - The encoded bytes to decode
* @returns Decoded value
* @throws Error if the bytes are invalid or contain trailing bytes
*
* @example
* ```ts
* const decoded = decode(encodedBytes)
* ```
*/
export function decode(bytes: Uint8Array | number[]): DecodedValue {
const data = bytes instanceof Uint8Array ? bytes : new Uint8Array(bytes)
const ref: DecodeRef = { offset: 0 }
const value = decodeTerm(data, ref)
if (ref.offset !== data.length) {
throw new Error('trailing_bytes')
}
return value
}
/**
* Decode VecPack-encoded contract state into key-value pairs
*
* Contract state prefix queries return a VecPack-encoded map where both
* keys and values are binary (Uint8Array). This convenience function decodes
* the binary data and returns the entries as an array of tuples.
*
* @param bytes - VecPack-encoded bytes (Uint8Array or ArrayBuffer)
* @returns Array of [key, value] tuples
* @throws Error if the data is not a valid VecPack map or entries are not binary
*
* @example
* ```ts
* const response = await fetch('/contract/state/prefix?prefix=...')
* const buffer = await response.arrayBuffer()
* const entries = decodeContractState(buffer)
* for (const [key, value] of entries) {
* console.log(key, value)
* }
* ```
*/
export function decodeContractState(
bytes: Uint8Array | ArrayBuffer
): Array<[Uint8Array, Uint8Array]> {
const data = bytes instanceof ArrayBuffer ? new Uint8Array(bytes) : bytes
const decoded = decode(data)
if (!(decoded instanceof Map)) {
throw new Error(`Expected MAP type, got ${typeof decoded}`)
}
const result: Array<[Uint8Array, Uint8Array]> = []
for (const [key, value] of decoded.entries()) {
if (!(key instanceof Uint8Array) || !(value instanceof Uint8Array)) {
throw new Error('Expected Uint8Array for key and value')
}
result.push([key, value])
}
return result
}
/*
* Helper functions for encoding and decoding
*/
function appendBytes(out: number[], bytes: Uint8Array | number[]): void {
for (const b of bytes) {
out.push(b)
}
}
function compareBytes(a: Uint8Array | number[], b: Uint8Array | number[]): number {
const n = Math.min(a.length, b.length)
for (let i = 0; i < n; i++) {
if (a[i] !== b[i]) return a[i] - b[i]
}
return a.length - b.length // shorter wins if prefix equal
}
function encodeKeyBytes(k: SerializableValue): number[] {
const tmp: number[] = []
encodeTerm(k, tmp)
return tmp
}
function encodeVarint(n: number | bigint, out: number[]): void {
let value = typeof n === 'bigint' ? n : BigInt(n)
if (value === 0n) {
out.push(0)
return
}
const isNegative = value < 0n
if (isNegative) value = -value
// build big-endian magnitude
const magBytes: number[] = []
while (value > 0n) {
magBytes.push(Number(value & 0xffn))
value >>= 8n
}
magBytes.reverse() // now big-endian
const len = magBytes.length
if (len === 0 || len > 16) {
throw new Error('bad_varint_length')
}
// Rust also rejects leading zero in decode; we don't generate those here
if (magBytes[0] === 0) {
throw new Error('varint_leading_zero')
}
const header = ((isNegative ? 1 : 0) << 7) | len
out.push(header)
appendBytes(out, magBytes)
}
function decodeVarint(data: Uint8Array, ref: DecodeRef): bigint {
const header = data[ref.offset++]
if (header === 0) return 0n
if (header === 0x80) throw new Error('noncanonical_zero')
const signBit = header >> 7
const length = header & 0x7f
let mag = 0n
for (let i = 0; i < length; i++) {
mag = (mag << 8n) | BigInt(data[ref.offset++])
}
if (signBit === 1) {
return -mag
} else {
return mag
}
}
function decodeVarintGteZero(data: Uint8Array, ref: DecodeRef): number {
const n = decodeVarint(data, ref)
if (n < 0n) {
throw new Error('length_is_negative')
}
if (n > BigInt(Number.MAX_SAFE_INTEGER)) {
throw new Error('length_overflow')
}
return Number(n)
}
function encodeTerm(value: SerializableValue, out: number[]): void {
if (value === null) {
out.push(TYPE_NULL)
} else if (typeof value === 'boolean') {
out.push(value ? TYPE_TRUE : TYPE_FALSE)
} else if (typeof value === 'number' || typeof value === 'bigint') {
out.push(TYPE_INT)
encodeVarint(value, out)
} else if (typeof value === 'string') {
out.push(TYPE_BYTES)
const utf8 = new TextEncoder().encode(value)
encodeVarint(utf8.length, out)
appendBytes(out, utf8)
} else if (value instanceof Uint8Array) {
out.push(TYPE_BYTES)
encodeVarint(value.length, out)
appendBytes(out, value)
} else if (Array.isArray(value)) {
out.push(TYPE_LIST)
encodeVarint(value.length, out)
for (const element of value) {
encodeTerm(element, out)
}
} else if (
value instanceof Map ||
(typeof value === 'object' && !Array.isArray(value) && !(value instanceof Uint8Array))
) {
const entries: Array<{ k: SerializableValue; v: SerializableValue; bytes: number[] }> = []
if (value instanceof Map) {
for (const [k, v] of value.entries()) {
const bytes = encodeKeyBytes(k) // encodes key as Term
entries.push({ k, v, bytes })
}
} else {
for (const k of Object.keys(value)) {
const v = value[k]
const bytes = encodeKeyBytes(k) // k is string; still encoded via encodeValue
entries.push({ k, v, bytes })
}
}
entries.sort((a, b) => compareBytes(a.bytes, b.bytes))
out.push(TYPE_MAP)
encodeVarint(entries.length, out)
for (const entry of entries) {
encodeTerm(entry.k, out)
encodeTerm(entry.v, out)
}
} else {
throw new Error(`Unsupported type: ${typeof value}`)
}
}
function decodeTerm(data: Uint8Array, ref: DecodeRef): DecodedValue {
if (ref.offset >= data.length) {
throw new Error('decodeBytes: Out of bounds read')
}
const type = data[ref.offset++]
switch (type) {
case TYPE_NULL:
return null
case TYPE_TRUE:
return true
case TYPE_FALSE:
return false
case TYPE_INT:
return decodeVarint(data, ref)
case TYPE_BYTES: {
const length = decodeVarintGteZero(data, ref)
const bytes = data.slice(ref.offset, ref.offset + length)
ref.offset += length
return bytes
}
case TYPE_LIST: {
const count = decodeVarintGteZero(data, ref)
const items: DecodedValue[] = new Array(count)
for (let i = 0; i < count; i++) {
items[i] = decodeTerm(data, ref)
}
return items
}
case TYPE_MAP: {
const count = decodeVarintGteZero(data, ref)
let prevKeyBytes: Uint8Array | null = null
const map = new Map<DecodedValue, DecodedValue>()
for (let idx = 0; idx < count; idx++) {
// canonical check: track raw key bytes
const kStart = ref.offset
const key = decodeTerm(data, ref)
const kEnd = ref.offset
const keyBytes = data.slice(kStart, kEnd)
if (prevKeyBytes !== null) {
if (compareBytes(keyBytes, prevKeyBytes) <= 0) {
throw new Error('map_not_canonical')
}
}
prevKeyBytes = keyBytes
const value = decodeTerm(data, ref)
map.set(key, value)
}
return map
}
default:
throw new Error('decodeBytes: Unknown type')
}
}