microcbor 0.4.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +156 -40
- package/lib/CBORDecoderStream.d.ts +5 -0
- package/lib/CBORDecoderStream.js +37 -0
- package/lib/CBOREncoderStream.d.ts +9 -0
- package/lib/CBOREncoderStream.js +24 -0
- package/lib/{decode.d.ts → Decoder.d.ts} +1 -0
- package/lib/{decode.js → Decoder.js} +1 -0
- package/lib/Encoder.d.ts +65 -0
- package/lib/{encode.js → Encoder.js} +58 -42
- package/lib/decodeAsyncIterable.d.ts +37 -0
- package/lib/{decodeStream.js → decodeAsyncIterable.js} +11 -4
- package/lib/decodeIterable.d.ts +3 -0
- package/lib/decodeIterable.js +206 -0
- package/lib/encodeAsyncIterable.d.ts +4 -0
- package/lib/encodeAsyncIterable.js +9 -0
- package/lib/encodeIterable.d.ts +4 -0
- package/lib/encodeIterable.js +9 -0
- package/lib/encodingLength.d.ts +5 -2
- package/lib/encodingLength.js +6 -61
- package/lib/index.d.ts +8 -4
- package/lib/index.js +8 -4
- package/lib/utils.d.ts +1 -0
- package/lib/utils.js +60 -0
- package/package.json +2 -2
- package/lib/decodeStream.d.ts +0 -2
- package/lib/encode.d.ts +0 -37
- package/lib/encodeStream.d.ts +0 -5
- package/lib/encodeStream.js +0 -8
package/README.md
CHANGED
|
@@ -1,24 +1,38 @@
|
|
|
1
1
|
# microcbor
|
|
2
2
|
|
|
3
|
-
[](https://github.com/RichardLitt/standard-readme) [](https://opensource.org/licenses/MIT) [](https://www.npmjs.com/package/microcbor) 
|
|
3
|
+
[](https://github.com/RichardLitt/standard-readme) [](https://opensource.org/licenses/MIT) [](https://www.npmjs.com/package/microcbor) 
|
|
4
4
|
|
|
5
5
|
Encode JavaScript values as canonical CBOR.
|
|
6
6
|
|
|
7
7
|
microcbor is a minimal JavaScript [CBOR](https://cbor.io/) implementation featuring
|
|
8
8
|
|
|
9
|
-
-
|
|
10
|
-
- fast performance
|
|
11
|
-
- `Iterable` and `AsyncIterable` streaming
|
|
9
|
+
- small footprint
|
|
10
|
+
- fast performance
|
|
11
|
+
- `Iterable` and `AsyncIterable` streaming APIs with "chunk recycling" encoding option
|
|
12
|
+
- [Web Streams API](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API)-compatible [TransformStream](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) classes
|
|
12
13
|
|
|
13
|
-
microcbor follows the [deterministic CBOR encoding requirements](https://www.rfc-editor.org/rfc/rfc8949.html#core-det) - all floating-point numbers are serialized in the smallest possible size without losing precision, and object entries are always sorted by key in byte-wise lexicographic order. `NaN` is always serialized as `0xf97e00`. **microcbor doesn't support tags, bigints, typed arrays, non-string keys, or indefinite-length collections.**
|
|
14
|
+
microcbor follows the [deterministic CBOR encoding requirements](https://www.rfc-editor.org/rfc/rfc8949.html#core-det) - all floating-point numbers are serialized in the smallest possible size without losing precision, and object entries are always sorted by key in byte-wise utf-8 lexicographic order. `NaN` is always serialized as `0xf97e00`. **microcbor doesn't support tags, bigints, typed arrays, non-string keys, or indefinite-length collections.**
|
|
14
15
|
|
|
15
|
-
This library is TypeScript-native, ESM-only, and has just one dependency [joeltg/fp16](https://github.com/joeltg/fp16) for half-precision floats.
|
|
16
|
+
This library is TypeScript-native, ESM-only, and has just **one dependency** [joeltg/fp16](https://github.com/joeltg/fp16) for half-precision floats.
|
|
16
17
|
|
|
17
18
|
## Table of Contents
|
|
18
19
|
|
|
19
20
|
- [Install](#install)
|
|
20
21
|
- [Usage](#usage)
|
|
21
22
|
- [API](#api)
|
|
23
|
+
- [CBOR Values](#cbor-values)
|
|
24
|
+
- [Encoding](#encoding)
|
|
25
|
+
- [`EncodeOptions`](#encodeoptions)
|
|
26
|
+
- [`encodingLength`](#encodinglength)
|
|
27
|
+
- [`encode`](#encode)
|
|
28
|
+
- [`encodeIterable`](#encodeiterable)
|
|
29
|
+
- [`encodeAsyncIterable`](#encodeasynciterable)
|
|
30
|
+
- [`CBOREncoderStream`](#cborencoderstream)
|
|
31
|
+
- [Decoding](#decoding)
|
|
32
|
+
- [`decode`](#decode)
|
|
33
|
+
- [`decodeIterable`](#decodeiterable)
|
|
34
|
+
- [`decodeAsyncIterable`](#decodeasynciterable)
|
|
35
|
+
- [`CBORDecoderStream`](#cbordecoderstream)
|
|
22
36
|
- [Value mapping](#value-mapping)
|
|
23
37
|
- [Testing](#testing)
|
|
24
38
|
- [Benchmarks](#benchmarks)
|
|
@@ -31,12 +45,6 @@ This library is TypeScript-native, ESM-only, and has just one dependency [joeltg
|
|
|
31
45
|
npm i microcbor
|
|
32
46
|
```
|
|
33
47
|
|
|
34
|
-
Or in Deno:
|
|
35
|
-
|
|
36
|
-
```typescript
|
|
37
|
-
import { encode, decode } from "https://cdn.skypack.dev/microcbor"
|
|
38
|
-
```
|
|
39
|
-
|
|
40
48
|
## Usage
|
|
41
49
|
|
|
42
50
|
```typescript
|
|
@@ -57,6 +65,8 @@ console.log(decode(data))
|
|
|
57
65
|
|
|
58
66
|
## API
|
|
59
67
|
|
|
68
|
+
### CBOR Values
|
|
69
|
+
|
|
60
70
|
```ts
|
|
61
71
|
declare type CBORValue = undefined | null | boolean | number | string | Uint8Array | CBORArray | CBORMap
|
|
62
72
|
|
|
@@ -64,27 +74,125 @@ interface CBORArray extends Array<CBORValue> {}
|
|
|
64
74
|
interface CBORMap {
|
|
65
75
|
[key: string]: CBORValue
|
|
66
76
|
}
|
|
77
|
+
```
|
|
67
78
|
|
|
68
|
-
|
|
69
|
-
// It's only a guideline; `encodeStream` won't break up
|
|
70
|
-
// individual CBOR values like strings or byte arrays
|
|
71
|
-
// that are larger than the provided chunk size.
|
|
72
|
-
declare function encode(value: CBORValue, options?: { chunkSize?: number }): Uint8Array
|
|
79
|
+
### Encoding
|
|
73
80
|
|
|
74
|
-
|
|
75
|
-
source: AsyncIterable<CBORValue>,
|
|
76
|
-
options?: { chunkSize?: number },
|
|
77
|
-
): AsyncIterable<Uint8Array>
|
|
81
|
+
#### `EncodeOptions`
|
|
78
82
|
|
|
79
|
-
|
|
83
|
+
```ts
|
|
84
|
+
export interface EncodeOptions {
|
|
85
|
+
/**
|
|
86
|
+
* Re-use the same underlying ArrayBuffer for all yielded chunks.
|
|
87
|
+
* If this is enabled, the consumer must copy each chunk content
|
|
88
|
+
* themselves to a new buffer if they wish to keep it.
|
|
89
|
+
* This mode is useful for efficiently hashing objects without
|
|
90
|
+
* ever allocating memory for the entire encoded result.
|
|
91
|
+
* @default false
|
|
92
|
+
*/
|
|
93
|
+
chunkRecycling?: boolean
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Maximum chunk size.
|
|
97
|
+
* @default 4096
|
|
98
|
+
*/
|
|
99
|
+
chunkSize?: number
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Minimum bitsize for floating-point numbers: 16, 32, or 64.
|
|
103
|
+
* @default 16
|
|
104
|
+
*/
|
|
105
|
+
minFloatSize?: (typeof FloatSize)[keyof typeof FloatSize]
|
|
106
|
+
}
|
|
107
|
+
```
|
|
80
108
|
|
|
81
|
-
|
|
109
|
+
#### `encodingLength`
|
|
82
110
|
|
|
83
|
-
|
|
84
|
-
|
|
111
|
+
```ts
|
|
112
|
+
/**
|
|
113
|
+
* Calculate the byte length that a value will encode into
|
|
114
|
+
* without actually allocating anything.
|
|
115
|
+
*/
|
|
85
116
|
declare function encodingLength(value: CBORValue): number
|
|
86
117
|
```
|
|
87
118
|
|
|
119
|
+
#### `encode`
|
|
120
|
+
|
|
121
|
+
```ts
|
|
122
|
+
/**
|
|
123
|
+
* Encode a single CBOR value.
|
|
124
|
+
* options.chunkRecycling has no effect here.
|
|
125
|
+
*/
|
|
126
|
+
export function encode(value: CBORValue, options: EncodeOptions = {}): Uint8Array
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
#### `encodeIterable`
|
|
130
|
+
|
|
131
|
+
```ts
|
|
132
|
+
/** Encode an iterable of CBOR values into an iterable of Uint8Array chunks */
|
|
133
|
+
export function* encodeIterable(
|
|
134
|
+
source: Iterable<CBORValue>,
|
|
135
|
+
options: EncodeOptions = {},
|
|
136
|
+
): IterableIterator<Uint8Array>
|
|
137
|
+
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
#### `encodeAsyncIterable`
|
|
141
|
+
|
|
142
|
+
```ts
|
|
143
|
+
/** Encode an async iterable of CBOR values into an async iterable of Uint8Array chunks */
|
|
144
|
+
export async function* encodeAsyncIterable(
|
|
145
|
+
source: AsyncIterable<CBORValue>,
|
|
146
|
+
options: EncodeOptions = {},
|
|
147
|
+
): AsyncIterableIterator<Uint8Array>
|
|
148
|
+
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
#### `CBOREncoderStream`
|
|
152
|
+
|
|
153
|
+
```ts
|
|
154
|
+
/**
|
|
155
|
+
* Encode a Web Streams API ReadableStream.
|
|
156
|
+
* options.chunkRecycling has no effect here.
|
|
157
|
+
*/
|
|
158
|
+
export class CBOREncoderStream extends TransformStream<CBORValue, Uint8Array> {
|
|
159
|
+
public constructor(options: EncodeOptions = {})
|
|
160
|
+
}
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
### Decoding
|
|
164
|
+
|
|
165
|
+
#### `decode`
|
|
166
|
+
|
|
167
|
+
```ts
|
|
168
|
+
/** Decode a single CBOR value. */
|
|
169
|
+
export function decode(data: Uint8Array): CBORValue
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
#### `decodeIterable`
|
|
173
|
+
|
|
174
|
+
```ts
|
|
175
|
+
/** Decode an iterable of Uint8Array chunks into an iterable of CBOR values */
|
|
176
|
+
export function* decodeIterable(source: Iterable<Uint8Array>): IterableIterator<CBORValue>
|
|
177
|
+
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
#### `decodeAsyncIterable`
|
|
181
|
+
|
|
182
|
+
```ts
|
|
183
|
+
/** Decode an async iterable of Uint8Array chunks into an async iterable of CBOR values */
|
|
184
|
+
export async function* decodeAsyncIterable(source: AsyncIterable<Uint8Array>): AsyncIterable<CBORValue>
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
#### `CBORDecoderStream`
|
|
188
|
+
|
|
189
|
+
```ts
|
|
190
|
+
/** Decode a Web Streams API ReadableStream. */
|
|
191
|
+
export class CBORDecoderStream extends TransformStream<Uint8Array, CBORValue> {
|
|
192
|
+
public constructor()
|
|
193
|
+
}
|
|
194
|
+
```
|
|
195
|
+
|
|
88
196
|
## Unsafe integer handling
|
|
89
197
|
|
|
90
198
|
- JavaScript integers below `Number.MIN_SAFE_INTEGER` or greater than `Number.MAX_SAFE_INTEGER` will encode as CBOR floating-point numbers, as per the [suggestion in the CBOR spec](https://www.rfc-editor.org/rfc/rfc8949.html#name-converting-from-json-to-cbo).
|
|
@@ -125,40 +233,48 @@ npm run test
|
|
|
125
233
|
- microcbor runs isomorphically on the web, in Node, and in Deno. node-cbor ships a separate cbor-web package.
|
|
126
234
|
- microcbor encodes `Uint8Array` values as CBOR byte strings (major type 2). node-cbor encodes `Uint8Array` values as tagged type arrays (major type 6 / RFC 8746), and encodes NodeJS `Buffer` values as CBOR byte strings (major type 2).
|
|
127
235
|
- microcbor uses async iterables for its streaming API. node-cbor uses NodeJS streams.
|
|
128
|
-
- microcbor is about **
|
|
236
|
+
- microcbor is about **4x faster** than node-cbor at canonical encoding, ~2x faster than node-cbor's default non-canonical encoding, and ~1.5x faster than node-cbor at decoding.
|
|
129
237
|
|
|
130
238
|
```
|
|
131
239
|
microcbor % npm run test -- test/benchmarks.test.ts
|
|
132
240
|
|
|
133
|
-
> microcbor@0.
|
|
241
|
+
> microcbor@0.4.0 test
|
|
134
242
|
> ava test/benchmarks.test.ts
|
|
135
243
|
|
|
136
|
-
|
|
244
|
+
|
|
245
|
+
✔ time encode() (237ms)
|
|
137
246
|
ℹ microcbor: {
|
|
138
|
-
avg: 0.
|
|
139
|
-
std: 0.
|
|
247
|
+
avg: 0.2836770999999993,
|
|
248
|
+
std: 0.1553461595001637,
|
|
140
249
|
} (ms)
|
|
141
250
|
ℹ node-cbor: {
|
|
142
|
-
avg:
|
|
143
|
-
std:
|
|
251
|
+
avg: 0.47247252999999945,
|
|
252
|
+
std: 0.6099837601508338,
|
|
253
|
+
} (ms)
|
|
254
|
+
ℹ node-cbor (canonical): {
|
|
255
|
+
avg: 0.9973837600000031,
|
|
256
|
+
std: 1.203792591464195,
|
|
144
257
|
} (ms)
|
|
145
258
|
ℹ JSON.stringify: {
|
|
146
|
-
avg: 0.
|
|
147
|
-
std: 0.
|
|
259
|
+
avg: 0.009709539999999493,
|
|
260
|
+
std: 0.0014329558361671918,
|
|
148
261
|
} (ms)
|
|
149
262
|
✔ time decode()
|
|
150
263
|
ℹ microcbor: {
|
|
151
|
-
avg: 0.
|
|
152
|
-
std: 0.
|
|
264
|
+
avg: 0.19635871000000235,
|
|
265
|
+
std: 0.35634472331099276,
|
|
153
266
|
} (ms)
|
|
154
267
|
ℹ node-cbor: {
|
|
155
|
-
avg: 0.
|
|
156
|
-
std: 0.
|
|
268
|
+
avg: 0.35364794999999843,
|
|
269
|
+
std: 0.31256985912702206,
|
|
157
270
|
} (ms)
|
|
158
271
|
ℹ JSON.parse: {
|
|
159
|
-
avg: 0.
|
|
160
|
-
std: 0.
|
|
272
|
+
avg: 0.018565019999997504,
|
|
273
|
+
std: 0.004339636959421219,
|
|
161
274
|
} (ms)
|
|
275
|
+
─
|
|
276
|
+
|
|
277
|
+
2 tests passed
|
|
162
278
|
```
|
|
163
279
|
|
|
164
280
|
## Contributing
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { Decoder } from "./decodeAsyncIterable.js";
|
|
2
|
+
/** Decode a Web Streams API ReadableStream */
|
|
3
|
+
export class CBORDecoderStream extends TransformStream {
|
|
4
|
+
constructor() {
|
|
5
|
+
let readableController;
|
|
6
|
+
const readable = new ReadableStream({
|
|
7
|
+
start(controller) {
|
|
8
|
+
readableController = controller;
|
|
9
|
+
},
|
|
10
|
+
});
|
|
11
|
+
// We need to track whick chunks have been "processed" and only resolve each
|
|
12
|
+
// .transform() promise once all data from each chunk has been enqueued.
|
|
13
|
+
const chunks = new WeakMap();
|
|
14
|
+
async function pipe(controller) {
|
|
15
|
+
const decoder = new Decoder(readable.values(), {
|
|
16
|
+
onFree: (chunk) => chunks.get(chunk)?.resolve(),
|
|
17
|
+
});
|
|
18
|
+
for await (const value of decoder) {
|
|
19
|
+
controller.enqueue(value);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
super({
|
|
23
|
+
start(controller) {
|
|
24
|
+
pipe(controller).catch((err) => controller.error(err));
|
|
25
|
+
},
|
|
26
|
+
transform(chunk) {
|
|
27
|
+
return new Promise((resolve) => {
|
|
28
|
+
chunks.set(chunk, { resolve });
|
|
29
|
+
readableController.enqueue(chunk);
|
|
30
|
+
});
|
|
31
|
+
},
|
|
32
|
+
flush() {
|
|
33
|
+
readableController.close();
|
|
34
|
+
},
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { CBORValue } from "./types.js";
|
|
2
|
+
import { EncodeOptions } from "./Encoder.js";
|
|
3
|
+
/**
|
|
4
|
+
* Encode a Web Streams API ReadableStream.
|
|
5
|
+
* options.chunkRecycling has no effect here.
|
|
6
|
+
*/
|
|
7
|
+
export declare class CBOREncoderStream extends TransformStream<CBORValue, Uint8Array> {
|
|
8
|
+
constructor(options?: EncodeOptions);
|
|
9
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { Encoder } from "./Encoder.js";
|
|
2
|
+
/**
|
|
3
|
+
* Encode a Web Streams API ReadableStream.
|
|
4
|
+
* options.chunkRecycling has no effect here.
|
|
5
|
+
*/
|
|
6
|
+
export class CBOREncoderStream extends TransformStream {
|
|
7
|
+
constructor(options = {}) {
|
|
8
|
+
const encoder = new Encoder({ ...options, chunkRecycling: false });
|
|
9
|
+
super({
|
|
10
|
+
transform(value, controller) {
|
|
11
|
+
// Encode the incoming value and push all resulting chunks
|
|
12
|
+
for (const chunk of encoder.encodeValue(value)) {
|
|
13
|
+
controller.enqueue(chunk);
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
flush(controller) {
|
|
17
|
+
// Push any remaining chunks when the stream is closing
|
|
18
|
+
for (const chunk of encoder.flush()) {
|
|
19
|
+
controller.enqueue(chunk);
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
}
|
package/lib/Encoder.d.ts
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import type { CBORValue } from "./types.js";
|
|
2
|
+
export declare const FloatSize: {
|
|
3
|
+
f16: number;
|
|
4
|
+
f32: number;
|
|
5
|
+
f64: number;
|
|
6
|
+
};
|
|
7
|
+
export interface EncodeOptions {
|
|
8
|
+
/**
|
|
9
|
+
* Re-use the same underlying ArrayBuffer for all yielded chunks.
|
|
10
|
+
* If this is enabled, the consumer must copy each chunk content
|
|
11
|
+
* themselves to a new buffer if they wish to keep it.
|
|
12
|
+
* This mode is useful for efficiently hashing objects without
|
|
13
|
+
* ever allocating memory for the entire encoded result.
|
|
14
|
+
* @default false
|
|
15
|
+
*/
|
|
16
|
+
chunkRecycling?: boolean;
|
|
17
|
+
/**
|
|
18
|
+
* Maximum chunk size
|
|
19
|
+
* @default 4096
|
|
20
|
+
*/
|
|
21
|
+
chunkSize?: number;
|
|
22
|
+
/**
|
|
23
|
+
* Minimum bitsize for floating-point numbers: 16, 32, or 64
|
|
24
|
+
* @default 16
|
|
25
|
+
*/
|
|
26
|
+
minFloatSize?: (typeof FloatSize)[keyof typeof FloatSize];
|
|
27
|
+
}
|
|
28
|
+
export declare class Encoder {
|
|
29
|
+
#private;
|
|
30
|
+
static defaultChunkSize: number;
|
|
31
|
+
readonly chunkRecycling: boolean;
|
|
32
|
+
readonly chunkSize: number;
|
|
33
|
+
readonly minFloatSize: (typeof FloatSize)[keyof typeof FloatSize];
|
|
34
|
+
private readonly encoder;
|
|
35
|
+
private readonly buffer;
|
|
36
|
+
private readonly view;
|
|
37
|
+
private readonly array;
|
|
38
|
+
private offset;
|
|
39
|
+
constructor(options?: EncodeOptions);
|
|
40
|
+
get closed(): boolean;
|
|
41
|
+
private allocate;
|
|
42
|
+
private float16;
|
|
43
|
+
private float32;
|
|
44
|
+
private float64;
|
|
45
|
+
private uint8;
|
|
46
|
+
private uint16;
|
|
47
|
+
private uint32;
|
|
48
|
+
private uint64;
|
|
49
|
+
private encodeTypeAndArgument;
|
|
50
|
+
private encodeNumber;
|
|
51
|
+
private encodeInteger;
|
|
52
|
+
private encodeFloat;
|
|
53
|
+
private encodeString;
|
|
54
|
+
private encodeBytes;
|
|
55
|
+
private writeBytes;
|
|
56
|
+
encodeValue(value: CBORValue): Iterable<Uint8Array>;
|
|
57
|
+
flush(): Iterable<Uint8Array>;
|
|
58
|
+
private static compareEntries;
|
|
59
|
+
private static getAdditionalInformation;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Encode a single CBOR value.
|
|
63
|
+
* options.chunkRecycling has no effect here.
|
|
64
|
+
*/
|
|
65
|
+
export declare function encode(value: CBORValue, options?: EncodeOptions): Uint8Array;
|
|
@@ -10,19 +10,25 @@ var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (
|
|
|
10
10
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
11
11
|
};
|
|
12
12
|
var _Encoder_instances, _Encoder_closed, _Encoder_flush;
|
|
13
|
-
import { getFloat16Precision, getFloat32Precision, setFloat16
|
|
14
|
-
import { getByteLength } from "./encodingLength.js";
|
|
13
|
+
import { Precision, getFloat16Precision, getFloat32Precision, setFloat16 } from "fp16";
|
|
15
14
|
import { assert } from "./utils.js";
|
|
15
|
+
export const FloatSize = {
|
|
16
|
+
f16: 16,
|
|
17
|
+
f32: 32,
|
|
18
|
+
f64: 64,
|
|
19
|
+
};
|
|
16
20
|
export class Encoder {
|
|
17
21
|
constructor(options = {}) {
|
|
18
22
|
_Encoder_instances.add(this);
|
|
19
23
|
_Encoder_closed.set(this, void 0);
|
|
20
24
|
this.encoder = new TextEncoder();
|
|
21
|
-
this.
|
|
25
|
+
this.minFloatSize = options.minFloatSize ?? 16;
|
|
26
|
+
this.chunkRecycling = options.chunkRecycling ?? false;
|
|
22
27
|
this.chunkSize = options.chunkSize ?? Encoder.defaultChunkSize;
|
|
23
28
|
assert(this.chunkSize >= 8, "expected chunkSize >= 8");
|
|
24
29
|
this.buffer = new ArrayBuffer(this.chunkSize);
|
|
25
30
|
this.view = new DataView(this.buffer);
|
|
31
|
+
this.array = new Uint8Array(this.buffer, 0, this.chunkSize);
|
|
26
32
|
this.offset = 0;
|
|
27
33
|
__classPrivateFieldSet(this, _Encoder_closed, false, "f");
|
|
28
34
|
}
|
|
@@ -107,11 +113,11 @@ export class Encoder {
|
|
|
107
113
|
}
|
|
108
114
|
}
|
|
109
115
|
*encodeFloat(value) {
|
|
110
|
-
if (getFloat16Precision(value) === Precision.Exact) {
|
|
116
|
+
if (this.minFloatSize === FloatSize.f16 && getFloat16Precision(value) === Precision.Exact) {
|
|
111
117
|
yield* this.uint8(0xe0 | 25);
|
|
112
118
|
yield* this.float16(value);
|
|
113
119
|
}
|
|
114
|
-
else if (getFloat32Precision(value) === Precision.Exact) {
|
|
120
|
+
else if (this.minFloatSize <= FloatSize.f32 && getFloat32Precision(value) === Precision.Exact) {
|
|
115
121
|
yield* this.uint8(0xe0 | 26);
|
|
116
122
|
yield* this.float32(value);
|
|
117
123
|
}
|
|
@@ -121,23 +127,27 @@ export class Encoder {
|
|
|
121
127
|
}
|
|
122
128
|
}
|
|
123
129
|
*encodeString(value) {
|
|
124
|
-
const
|
|
125
|
-
yield* this.encodeTypeAndArgument(3, byteLength);
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
130
|
+
const bytes = this.encoder.encode(value);
|
|
131
|
+
yield* this.encodeTypeAndArgument(3, bytes.byteLength);
|
|
132
|
+
yield* this.writeBytes(bytes);
|
|
133
|
+
// const byteLength = getByteLength(value)
|
|
134
|
+
// let start = 0
|
|
135
|
+
// while (start < value.length) {
|
|
136
|
+
// if (this.offset + 4 > this.buffer.byteLength) {
|
|
137
|
+
// yield this.#flush()
|
|
138
|
+
// }
|
|
139
|
+
// const target = new Uint8Array(this.buffer, this.offset)
|
|
140
|
+
// const result = this.encoder.encodeInto(value.slice(start), target)
|
|
141
|
+
// start += result.read
|
|
142
|
+
// this.offset += result.written
|
|
143
|
+
// assert(this.offset <= this.buffer.byteLength, "expected this.offset <= this.buffer.byteLength")
|
|
144
|
+
// }
|
|
137
145
|
}
|
|
138
146
|
*encodeBytes(value) {
|
|
139
147
|
yield* this.encodeTypeAndArgument(2, value.byteLength);
|
|
140
|
-
|
|
148
|
+
yield* this.writeBytes(value);
|
|
149
|
+
}
|
|
150
|
+
*writeBytes(value) {
|
|
141
151
|
let start = 0;
|
|
142
152
|
while (start < value.byteLength) {
|
|
143
153
|
if (this.offset >= this.buffer.byteLength) {
|
|
@@ -146,7 +156,7 @@ export class Encoder {
|
|
|
146
156
|
const capacity = this.buffer.byteLength - this.offset;
|
|
147
157
|
const remaining = value.byteLength - start;
|
|
148
158
|
const chunkLength = Math.min(capacity, remaining);
|
|
149
|
-
|
|
159
|
+
this.array.set(value.subarray(start, start + chunkLength), this.offset);
|
|
150
160
|
start += chunkLength;
|
|
151
161
|
this.offset += chunkLength;
|
|
152
162
|
}
|
|
@@ -183,11 +193,14 @@ export class Encoder {
|
|
|
183
193
|
}
|
|
184
194
|
}
|
|
185
195
|
else {
|
|
186
|
-
const
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
196
|
+
const entries = Object.entries(value)
|
|
197
|
+
.map(([key, value]) => [this.encoder.encode(key), value])
|
|
198
|
+
.sort(Encoder.compareEntries);
|
|
199
|
+
yield* this.encodeTypeAndArgument(5, entries.length);
|
|
200
|
+
for (const [key, value] of entries) {
|
|
201
|
+
yield* this.encodeTypeAndArgument(3, key.byteLength);
|
|
202
|
+
yield* this.writeBytes(key);
|
|
203
|
+
yield* this.encodeValue(value);
|
|
191
204
|
}
|
|
192
205
|
}
|
|
193
206
|
}
|
|
@@ -195,31 +208,30 @@ export class Encoder {
|
|
|
195
208
|
if (__classPrivateFieldGet(this, _Encoder_closed, "f")) {
|
|
196
209
|
return;
|
|
197
210
|
}
|
|
198
|
-
__classPrivateFieldSet(this, _Encoder_closed, true, "f");
|
|
199
211
|
if (this.offset > 0) {
|
|
200
|
-
yield
|
|
212
|
+
yield __classPrivateFieldGet(this, _Encoder_instances, "m", _Encoder_flush).call(this);
|
|
201
213
|
}
|
|
202
214
|
}
|
|
203
215
|
// Per the deterministic CBOR spec, we're supposed to sort keys
|
|
204
216
|
// the byte-wise lexicographic order of the key's CBOR encoding
|
|
205
|
-
// - ie lower major types come before higher major types
|
|
206
|
-
//
|
|
207
|
-
//
|
|
208
|
-
// first. One thing we know for sure about strings is that a
|
|
209
|
-
// string with a smaller length will sort byte-wise before a string
|
|
217
|
+
// - ie lower major types come before higher major types.
|
|
218
|
+
// One thing we know for sure about strings is that a string with
|
|
219
|
+
// a smaller length will sort byte-wise before a string
|
|
210
220
|
// with a longer length, since strings are encoded with a length
|
|
211
221
|
// prefix (either in the additionalInformation bits, if < 24, or
|
|
212
222
|
// in the next serveral bytes, but in all cases the order holds).
|
|
213
|
-
static
|
|
214
|
-
if (a.
|
|
223
|
+
static compareEntries([a], [b]) {
|
|
224
|
+
if (a.byteLength < b.byteLength)
|
|
215
225
|
return -1;
|
|
216
|
-
|
|
217
|
-
else if (b.length < a.length) {
|
|
226
|
+
if (b.byteLength < a.byteLength)
|
|
218
227
|
return 1;
|
|
228
|
+
for (let i = 0; i < a.byteLength; i++) {
|
|
229
|
+
if (a[i] < b[i])
|
|
230
|
+
return -1;
|
|
231
|
+
if (b[i] < a[i])
|
|
232
|
+
return 1;
|
|
219
233
|
}
|
|
220
|
-
|
|
221
|
-
return a < b ? -1 : 1;
|
|
222
|
-
}
|
|
234
|
+
return 0;
|
|
223
235
|
}
|
|
224
236
|
static getAdditionalInformation(argument) {
|
|
225
237
|
if (argument < 24) {
|
|
@@ -240,7 +252,7 @@ export class Encoder {
|
|
|
240
252
|
}
|
|
241
253
|
}
|
|
242
254
|
_Encoder_closed = new WeakMap(), _Encoder_instances = new WeakSet(), _Encoder_flush = function _Encoder_flush() {
|
|
243
|
-
if (this.
|
|
255
|
+
if (this.chunkRecycling) {
|
|
244
256
|
const chunk = new Uint8Array(this.buffer, 0, this.offset);
|
|
245
257
|
this.offset = 0;
|
|
246
258
|
return chunk;
|
|
@@ -252,9 +264,13 @@ _Encoder_closed = new WeakMap(), _Encoder_instances = new WeakSet(), _Encoder_fl
|
|
|
252
264
|
return chunk;
|
|
253
265
|
}
|
|
254
266
|
};
|
|
255
|
-
Encoder.defaultChunkSize =
|
|
267
|
+
Encoder.defaultChunkSize = 4096;
|
|
268
|
+
/**
|
|
269
|
+
* Encode a single CBOR value.
|
|
270
|
+
* options.chunkRecycling has no effect here.
|
|
271
|
+
*/
|
|
256
272
|
export function encode(value, options = {}) {
|
|
257
|
-
const encoder = new Encoder(options);
|
|
273
|
+
const encoder = new Encoder({ ...options, chunkRecycling: false });
|
|
258
274
|
let byteLength = 0;
|
|
259
275
|
const chunks = [];
|
|
260
276
|
for (const chunk of encoder.encodeValue(value)) {
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import type { CBORValue } from "./types.js";
|
|
2
|
+
export declare class Decoder implements AsyncIterableIterator<CBORValue> {
|
|
3
|
+
private offset;
|
|
4
|
+
private byteLength;
|
|
5
|
+
private readonly chunks;
|
|
6
|
+
private readonly constantBuffer;
|
|
7
|
+
private readonly constantView;
|
|
8
|
+
private readonly iter;
|
|
9
|
+
private readonly onFree?;
|
|
10
|
+
constructor(source: AsyncIterable<Uint8Array>, options?: {
|
|
11
|
+
onFree?: (chunk: Uint8Array) => void;
|
|
12
|
+
});
|
|
13
|
+
[Symbol.asyncIterator]: () => this;
|
|
14
|
+
private allocate;
|
|
15
|
+
private fill;
|
|
16
|
+
private constant;
|
|
17
|
+
private float16;
|
|
18
|
+
private float32;
|
|
19
|
+
private float64;
|
|
20
|
+
private uint8;
|
|
21
|
+
private uint16;
|
|
22
|
+
private uint32;
|
|
23
|
+
private uint64;
|
|
24
|
+
private decodeBytes;
|
|
25
|
+
private decodeString;
|
|
26
|
+
private getArgument;
|
|
27
|
+
next(): Promise<{
|
|
28
|
+
done: true;
|
|
29
|
+
value: undefined;
|
|
30
|
+
} | {
|
|
31
|
+
done: false;
|
|
32
|
+
value: CBORValue;
|
|
33
|
+
}>;
|
|
34
|
+
private decodeValue;
|
|
35
|
+
}
|
|
36
|
+
/** Decode an async iterable of Uint8Array chunks into an async iterable of CBOR values */
|
|
37
|
+
export declare function decodeAsyncIterable(source: AsyncIterable<Uint8Array>): AsyncIterableIterator<CBORValue>;
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
var _a;
|
|
2
2
|
import { getFloat16 } from "fp16";
|
|
3
3
|
import { UnsafeIntegerError, maxSafeInteger, minSafeInteger } from "./utils.js";
|
|
4
|
-
class
|
|
5
|
-
constructor(source) {
|
|
4
|
+
export class Decoder {
|
|
5
|
+
constructor(source, options = {}) {
|
|
6
6
|
this.offset = 0;
|
|
7
7
|
this.byteLength = 0;
|
|
8
8
|
this.chunks = [];
|
|
@@ -25,6 +25,7 @@ class DecoderStream {
|
|
|
25
25
|
this.uint32 = this.constant(4, (view) => view.getUint32(0));
|
|
26
26
|
this.uint64 = this.constant(8, (view) => view.getBigUint64(0));
|
|
27
27
|
this.iter = source[Symbol.asyncIterator]();
|
|
28
|
+
this.onFree = options.onFree;
|
|
28
29
|
}
|
|
29
30
|
async allocate(size) {
|
|
30
31
|
while (this.byteLength < size) {
|
|
@@ -64,6 +65,11 @@ class DecoderStream {
|
|
|
64
65
|
this.byteLength -= capacity;
|
|
65
66
|
}
|
|
66
67
|
}
|
|
68
|
+
if (this.onFree !== undefined) {
|
|
69
|
+
for (let i = 0; i < deleteCount; i++) {
|
|
70
|
+
this.onFree(this.chunks[i]);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
67
73
|
this.chunks.splice(0, deleteCount);
|
|
68
74
|
}
|
|
69
75
|
async decodeBytes(length) {
|
|
@@ -200,6 +206,7 @@ class DecoderStream {
|
|
|
200
206
|
}
|
|
201
207
|
}
|
|
202
208
|
_a = Symbol.asyncIterator;
|
|
203
|
-
|
|
204
|
-
|
|
209
|
+
/** Decode an async iterable of Uint8Array chunks into an async iterable of CBOR values */
|
|
210
|
+
export async function* decodeAsyncIterable(source) {
|
|
211
|
+
yield* new Decoder(source);
|
|
205
212
|
}
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
var _a;
|
|
2
|
+
import { getFloat16 } from "fp16";
|
|
3
|
+
import { UnsafeIntegerError, maxSafeInteger, minSafeInteger } from "./utils.js";
|
|
4
|
+
class Decoder {
|
|
5
|
+
constructor(source) {
|
|
6
|
+
this.offset = 0;
|
|
7
|
+
this.byteLength = 0;
|
|
8
|
+
this.chunks = [];
|
|
9
|
+
this.constantBuffer = new ArrayBuffer(8);
|
|
10
|
+
this.constantView = new DataView(this.constantBuffer);
|
|
11
|
+
this[_a] = () => this;
|
|
12
|
+
this.constant = (size, f) => {
|
|
13
|
+
return () => {
|
|
14
|
+
this.allocate(size);
|
|
15
|
+
const array = new Uint8Array(this.constantBuffer, 0, size);
|
|
16
|
+
this.fill(array);
|
|
17
|
+
return f(this.constantView);
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
this.float16 = this.constant(2, (view) => getFloat16(view, 0));
|
|
21
|
+
this.float32 = this.constant(4, (view) => view.getFloat32(0));
|
|
22
|
+
this.float64 = this.constant(8, (view) => view.getFloat64(0));
|
|
23
|
+
this.uint8 = this.constant(1, (view) => view.getUint8(0));
|
|
24
|
+
this.uint16 = this.constant(2, (view) => view.getUint16(0));
|
|
25
|
+
this.uint32 = this.constant(4, (view) => view.getUint32(0));
|
|
26
|
+
this.uint64 = this.constant(8, (view) => view.getBigUint64(0));
|
|
27
|
+
this.iter = source[Symbol.iterator]();
|
|
28
|
+
}
|
|
29
|
+
allocate(size) {
|
|
30
|
+
while (this.byteLength < size) {
|
|
31
|
+
const { done, value } = this.iter.next();
|
|
32
|
+
if (done) {
|
|
33
|
+
throw new Error("stream ended prematurely");
|
|
34
|
+
}
|
|
35
|
+
else {
|
|
36
|
+
this.chunks.push(value);
|
|
37
|
+
this.byteLength += value.byteLength;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
fill(target) {
|
|
42
|
+
if (this.byteLength < target.byteLength) {
|
|
43
|
+
throw new Error("internal error - please file a bug report!");
|
|
44
|
+
}
|
|
45
|
+
let byteLength = 0;
|
|
46
|
+
let deleteCount = 0;
|
|
47
|
+
for (let i = 0; byteLength < target.byteLength; i++) {
|
|
48
|
+
const chunk = this.chunks[i];
|
|
49
|
+
const capacity = target.byteLength - byteLength;
|
|
50
|
+
const length = chunk.byteLength - this.offset;
|
|
51
|
+
if (length <= capacity) {
|
|
52
|
+
// copy the entire remainder of the chunk
|
|
53
|
+
target.set(chunk.subarray(this.offset), byteLength);
|
|
54
|
+
byteLength += length;
|
|
55
|
+
deleteCount += 1;
|
|
56
|
+
this.offset = 0;
|
|
57
|
+
this.byteLength -= length;
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
// fill the remainder of the target
|
|
61
|
+
target.set(chunk.subarray(this.offset, this.offset + capacity), byteLength);
|
|
62
|
+
byteLength += capacity; // equivalent to break
|
|
63
|
+
this.offset += capacity;
|
|
64
|
+
this.byteLength -= capacity;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
this.chunks.splice(0, deleteCount);
|
|
68
|
+
}
|
|
69
|
+
decodeBytes(length) {
|
|
70
|
+
this.allocate(length);
|
|
71
|
+
const array = new Uint8Array(length);
|
|
72
|
+
this.fill(array);
|
|
73
|
+
return array;
|
|
74
|
+
}
|
|
75
|
+
decodeString(length) {
|
|
76
|
+
this.allocate(length);
|
|
77
|
+
const data = new Uint8Array(length);
|
|
78
|
+
this.fill(data);
|
|
79
|
+
return new TextDecoder().decode(data);
|
|
80
|
+
}
|
|
81
|
+
getArgument(additionalInformation) {
|
|
82
|
+
if (additionalInformation < 24) {
|
|
83
|
+
return { value: additionalInformation };
|
|
84
|
+
}
|
|
85
|
+
else if (additionalInformation === 24) {
|
|
86
|
+
return { value: this.uint8() };
|
|
87
|
+
}
|
|
88
|
+
else if (additionalInformation === 25) {
|
|
89
|
+
return { value: this.uint16() };
|
|
90
|
+
}
|
|
91
|
+
else if (additionalInformation === 26) {
|
|
92
|
+
return { value: this.uint32() };
|
|
93
|
+
}
|
|
94
|
+
else if (additionalInformation === 27) {
|
|
95
|
+
const uint64 = this.uint64();
|
|
96
|
+
const value = maxSafeInteger < uint64 ? Infinity : Number(uint64);
|
|
97
|
+
return { value, uint64 };
|
|
98
|
+
}
|
|
99
|
+
else if (additionalInformation === 31) {
|
|
100
|
+
throw new Error("microcbor does not support decoding indefinite-length items");
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
throw new Error("invalid argument encoding");
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
next() {
|
|
107
|
+
while (this.byteLength === 0) {
|
|
108
|
+
const { done, value } = this.iter.next();
|
|
109
|
+
if (done) {
|
|
110
|
+
return { done: true, value: undefined };
|
|
111
|
+
}
|
|
112
|
+
else if (value.byteLength > 0) {
|
|
113
|
+
this.chunks.push(value);
|
|
114
|
+
this.byteLength += value.byteLength;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
const value = this.decodeValue();
|
|
118
|
+
return { done: false, value };
|
|
119
|
+
}
|
|
120
|
+
decodeValue() {
|
|
121
|
+
const initialByte = this.uint8();
|
|
122
|
+
const majorType = initialByte >> 5;
|
|
123
|
+
const additionalInformation = initialByte & 0x1f;
|
|
124
|
+
if (majorType === 0) {
|
|
125
|
+
const { value, uint64 } = this.getArgument(additionalInformation);
|
|
126
|
+
if (uint64 !== undefined && maxSafeInteger < uint64) {
|
|
127
|
+
throw new UnsafeIntegerError("cannot decode integers greater than 2^53-1", uint64);
|
|
128
|
+
}
|
|
129
|
+
else {
|
|
130
|
+
return value;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
else if (majorType === 1) {
|
|
134
|
+
const { value, uint64 } = this.getArgument(additionalInformation);
|
|
135
|
+
if (uint64 !== undefined && -1n - uint64 < minSafeInteger) {
|
|
136
|
+
throw new UnsafeIntegerError("cannot decode integers less than -2^53+1", -1n - uint64);
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
return -1 - value;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
else if (majorType === 2) {
|
|
143
|
+
const { value: length } = this.getArgument(additionalInformation);
|
|
144
|
+
return this.decodeBytes(length);
|
|
145
|
+
}
|
|
146
|
+
else if (majorType === 3) {
|
|
147
|
+
const { value: length } = this.getArgument(additionalInformation);
|
|
148
|
+
return this.decodeString(length);
|
|
149
|
+
}
|
|
150
|
+
else if (majorType === 4) {
|
|
151
|
+
const { value: length } = this.getArgument(additionalInformation);
|
|
152
|
+
const value = new Array(length);
|
|
153
|
+
for (let i = 0; i < length; i++) {
|
|
154
|
+
value[i] = this.decodeValue();
|
|
155
|
+
}
|
|
156
|
+
return value;
|
|
157
|
+
}
|
|
158
|
+
else if (majorType === 5) {
|
|
159
|
+
const { value: length } = this.getArgument(additionalInformation);
|
|
160
|
+
const value = {};
|
|
161
|
+
for (let i = 0; i < length; i++) {
|
|
162
|
+
const key = this.decodeValue();
|
|
163
|
+
if (typeof key !== "string") {
|
|
164
|
+
throw new Error("microcbor only supports string keys in objects");
|
|
165
|
+
}
|
|
166
|
+
value[key] = this.decodeValue();
|
|
167
|
+
}
|
|
168
|
+
return value;
|
|
169
|
+
}
|
|
170
|
+
else if (majorType === 6) {
|
|
171
|
+
throw new Error("microcbor does not support tagged data items");
|
|
172
|
+
}
|
|
173
|
+
else if (majorType === 7) {
|
|
174
|
+
switch (additionalInformation) {
|
|
175
|
+
case 20:
|
|
176
|
+
return false;
|
|
177
|
+
case 21:
|
|
178
|
+
return true;
|
|
179
|
+
case 22:
|
|
180
|
+
return null;
|
|
181
|
+
case 23:
|
|
182
|
+
return undefined;
|
|
183
|
+
case 24:
|
|
184
|
+
throw new Error("microcbor does not support decoding unassigned simple values");
|
|
185
|
+
case 25:
|
|
186
|
+
return this.float16();
|
|
187
|
+
case 26:
|
|
188
|
+
return this.float32();
|
|
189
|
+
case 27:
|
|
190
|
+
return this.float64();
|
|
191
|
+
case 31:
|
|
192
|
+
throw new Error("microcbor does not support decoding indefinite-length items");
|
|
193
|
+
default:
|
|
194
|
+
throw new Error("invalid simple value");
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
throw new Error("invalid major type");
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
_a = Symbol.iterator;
|
|
203
|
+
/** Decode an iterable of Uint8Array chunks into an iterable of CBOR values */
|
|
204
|
+
export function* decodeIterable(source) {
|
|
205
|
+
yield* new Decoder(source);
|
|
206
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { CBORValue } from "./types.js";
|
|
2
|
+
import { EncodeOptions } from "./Encoder.js";
|
|
3
|
+
/** Encode an async iterable of CBOR values into an async iterable of Uint8Array chunks */
|
|
4
|
+
export declare function encodeAsyncIterable(source: AsyncIterable<CBORValue>, options?: EncodeOptions): AsyncIterableIterator<Uint8Array>;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Encoder } from "./Encoder.js";
|
|
2
|
+
/** Encode an async iterable of CBOR values into an async iterable of Uint8Array chunks */
|
|
3
|
+
export async function* encodeAsyncIterable(source, options = {}) {
|
|
4
|
+
const encoder = new Encoder(options);
|
|
5
|
+
for await (const value of source) {
|
|
6
|
+
yield* encoder.encodeValue(value);
|
|
7
|
+
}
|
|
8
|
+
yield* encoder.flush();
|
|
9
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { CBORValue } from "./types.js";
|
|
2
|
+
import { EncodeOptions } from "./Encoder.js";
|
|
3
|
+
/** Encode an iterable of CBOR values into an iterable of Uint8Array chunks */
|
|
4
|
+
export declare function encodeIterable(source: Iterable<CBORValue>, options?: EncodeOptions): IterableIterator<Uint8Array>;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Encoder } from "./Encoder.js";
|
|
2
|
+
/** Encode an iterable of CBOR values into an iterable of Uint8Array chunks */
|
|
3
|
+
export function* encodeIterable(source, options = {}) {
|
|
4
|
+
const encoder = new Encoder(options);
|
|
5
|
+
for (const value of source) {
|
|
6
|
+
yield* encoder.encodeValue(value);
|
|
7
|
+
}
|
|
8
|
+
yield* encoder.flush();
|
|
9
|
+
}
|
package/lib/encodingLength.d.ts
CHANGED
|
@@ -1,3 +1,6 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { CBORValue } from "./types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Calculate the byte length that a value will encode into
|
|
4
|
+
* without actually allocating anything.
|
|
5
|
+
*/
|
|
2
6
|
export declare function encodingLength(value: CBORValue): number;
|
|
3
|
-
export declare function getByteLength(string: string): number;
|
package/lib/encodingLength.js
CHANGED
|
@@ -1,4 +1,9 @@
|
|
|
1
|
-
import { getFloat16Precision, getFloat32Precision
|
|
1
|
+
import { Precision, getFloat16Precision, getFloat32Precision } from "fp16";
|
|
2
|
+
import { getByteLength } from "./utils.js";
|
|
3
|
+
/**
|
|
4
|
+
* Calculate the byte length that a value will encode into
|
|
5
|
+
* without actually allocating anything.
|
|
6
|
+
*/
|
|
2
7
|
export function encodingLength(value) {
|
|
3
8
|
if (value === false) {
|
|
4
9
|
return 1;
|
|
@@ -104,63 +109,3 @@ function bytesEncodingLength(value) {
|
|
|
104
109
|
const length = value.byteLength;
|
|
105
110
|
return argumentEncodingLength(length) + length;
|
|
106
111
|
}
|
|
107
|
-
// https://github.com/feross/buffer/blob/57caad4450d241207066ca3832fb8e9095ad402f/index.js#L434
|
|
108
|
-
export function getByteLength(string) {
|
|
109
|
-
let codePoint;
|
|
110
|
-
const length = string.length;
|
|
111
|
-
let leadSurrogate = null;
|
|
112
|
-
let bytes = 0;
|
|
113
|
-
for (let i = 0; i < length; ++i) {
|
|
114
|
-
codePoint = string.charCodeAt(i);
|
|
115
|
-
// is surrogate component
|
|
116
|
-
if (codePoint > 0xd7ff && codePoint < 0xe000) {
|
|
117
|
-
// last char was a lead
|
|
118
|
-
if (!leadSurrogate) {
|
|
119
|
-
// no lead yet
|
|
120
|
-
if (codePoint > 0xdbff) {
|
|
121
|
-
// unexpected trail
|
|
122
|
-
bytes += 3;
|
|
123
|
-
continue;
|
|
124
|
-
}
|
|
125
|
-
else if (i + 1 === length) {
|
|
126
|
-
// unpaired lead
|
|
127
|
-
bytes += 3;
|
|
128
|
-
continue;
|
|
129
|
-
}
|
|
130
|
-
// valid lead
|
|
131
|
-
leadSurrogate = codePoint;
|
|
132
|
-
continue;
|
|
133
|
-
}
|
|
134
|
-
// 2 leads in a row
|
|
135
|
-
if (codePoint < 0xdc00) {
|
|
136
|
-
bytes += 3;
|
|
137
|
-
leadSurrogate = codePoint;
|
|
138
|
-
continue;
|
|
139
|
-
}
|
|
140
|
-
// valid surrogate pair
|
|
141
|
-
codePoint = (((leadSurrogate - 0xd800) << 10) | (codePoint - 0xdc00)) + 0x10000;
|
|
142
|
-
}
|
|
143
|
-
else if (leadSurrogate) {
|
|
144
|
-
// valid bmp char, but last char was a lead
|
|
145
|
-
bytes += 3;
|
|
146
|
-
}
|
|
147
|
-
leadSurrogate = null;
|
|
148
|
-
// encode utf8
|
|
149
|
-
if (codePoint < 0x80) {
|
|
150
|
-
bytes += 1;
|
|
151
|
-
}
|
|
152
|
-
else if (codePoint < 0x800) {
|
|
153
|
-
bytes += 2;
|
|
154
|
-
}
|
|
155
|
-
else if (codePoint < 0x10000) {
|
|
156
|
-
bytes += 3;
|
|
157
|
-
}
|
|
158
|
-
else if (codePoint < 0x110000) {
|
|
159
|
-
bytes += 4;
|
|
160
|
-
}
|
|
161
|
-
else {
|
|
162
|
-
throw new Error("Invalid code point");
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
return bytes;
|
|
166
|
-
}
|
package/lib/index.d.ts
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
export type { CBORValue, CBORMap, CBORArray } from "./types.js";
|
|
2
|
-
export { encode, Encoder } from "./encode.js";
|
|
3
|
-
export { decode, Decoder } from "./decode.js";
|
|
4
|
-
export { encodeStream } from "./encodeStream.js";
|
|
5
|
-
export { decodeStream } from "./decodeStream.js";
|
|
6
2
|
export { encodingLength } from "./encodingLength.js";
|
|
3
|
+
export { Encoder, encode } from "./Encoder.js";
|
|
4
|
+
export { Decoder, decode } from "./Decoder.js";
|
|
5
|
+
export { encodeIterable } from "./encodeIterable.js";
|
|
6
|
+
export { decodeIterable } from "./decodeIterable.js";
|
|
7
|
+
export { encodeAsyncIterable } from "./encodeAsyncIterable.js";
|
|
8
|
+
export { decodeAsyncIterable } from "./decodeAsyncIterable.js";
|
|
9
|
+
export { CBORDecoderStream } from "./CBORDecoderStream.js";
|
|
10
|
+
export { CBOREncoderStream } from "./CBOREncoderStream.js";
|
|
7
11
|
export { UnsafeIntegerError } from "./utils.js";
|
package/lib/index.js
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
|
-
export { encode, Encoder } from "./encode.js";
|
|
2
|
-
export { decode, Decoder } from "./decode.js";
|
|
3
|
-
export { encodeStream } from "./encodeStream.js";
|
|
4
|
-
export { decodeStream } from "./decodeStream.js";
|
|
5
1
|
export { encodingLength } from "./encodingLength.js";
|
|
2
|
+
export { Encoder, encode } from "./Encoder.js";
|
|
3
|
+
export { Decoder, decode } from "./Decoder.js";
|
|
4
|
+
export { encodeIterable } from "./encodeIterable.js";
|
|
5
|
+
export { decodeIterable } from "./decodeIterable.js";
|
|
6
|
+
export { encodeAsyncIterable } from "./encodeAsyncIterable.js";
|
|
7
|
+
export { decodeAsyncIterable } from "./decodeAsyncIterable.js";
|
|
8
|
+
export { CBORDecoderStream } from "./CBORDecoderStream.js";
|
|
9
|
+
export { CBOREncoderStream } from "./CBOREncoderStream.js";
|
|
6
10
|
export { UnsafeIntegerError } from "./utils.js";
|
package/lib/utils.d.ts
CHANGED
|
@@ -10,3 +10,4 @@ export declare class AssertError extends Error {
|
|
|
10
10
|
constructor(message: string, props?: any | undefined);
|
|
11
11
|
}
|
|
12
12
|
export declare function assert(condition: unknown, message?: string, props?: any): asserts condition;
|
|
13
|
+
export declare function getByteLength(string: string): number;
|
package/lib/utils.js
CHANGED
|
@@ -18,3 +18,63 @@ export function assert(condition, message = "assertion failed", props) {
|
|
|
18
18
|
throw new AssertError(message, props);
|
|
19
19
|
}
|
|
20
20
|
}
|
|
21
|
+
// https://github.com/feross/buffer/blob/57caad4450d241207066ca3832fb8e9095ad402f/index.js#L434
|
|
22
|
+
export function getByteLength(string) {
|
|
23
|
+
let codePoint;
|
|
24
|
+
const length = string.length;
|
|
25
|
+
let leadSurrogate = null;
|
|
26
|
+
let bytes = 0;
|
|
27
|
+
for (let i = 0; i < length; ++i) {
|
|
28
|
+
codePoint = string.charCodeAt(i);
|
|
29
|
+
// is surrogate component
|
|
30
|
+
if (codePoint > 0xd7ff && codePoint < 0xe000) {
|
|
31
|
+
// last char was a lead
|
|
32
|
+
if (!leadSurrogate) {
|
|
33
|
+
// no lead yet
|
|
34
|
+
if (codePoint > 0xdbff) {
|
|
35
|
+
// unexpected trail
|
|
36
|
+
bytes += 3;
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
else if (i + 1 === length) {
|
|
40
|
+
// unpaired lead
|
|
41
|
+
bytes += 3;
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
// valid lead
|
|
45
|
+
leadSurrogate = codePoint;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
// 2 leads in a row
|
|
49
|
+
if (codePoint < 0xdc00) {
|
|
50
|
+
bytes += 3;
|
|
51
|
+
leadSurrogate = codePoint;
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
// valid surrogate pair
|
|
55
|
+
codePoint = (((leadSurrogate - 0xd800) << 10) | (codePoint - 0xdc00)) + 0x10000;
|
|
56
|
+
}
|
|
57
|
+
else if (leadSurrogate) {
|
|
58
|
+
// valid bmp char, but last char was a lead
|
|
59
|
+
bytes += 3;
|
|
60
|
+
}
|
|
61
|
+
leadSurrogate = null;
|
|
62
|
+
// encode utf8
|
|
63
|
+
if (codePoint < 0x80) {
|
|
64
|
+
bytes += 1;
|
|
65
|
+
}
|
|
66
|
+
else if (codePoint < 0x800) {
|
|
67
|
+
bytes += 2;
|
|
68
|
+
}
|
|
69
|
+
else if (codePoint < 0x10000) {
|
|
70
|
+
bytes += 3;
|
|
71
|
+
}
|
|
72
|
+
else if (codePoint < 0x110000) {
|
|
73
|
+
bytes += 4;
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
throw new Error("Invalid code point");
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return bytes;
|
|
80
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "microcbor",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "1.0.0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"files": [
|
|
6
6
|
"lib"
|
|
@@ -35,7 +35,7 @@
|
|
|
35
35
|
"@ava/typescript": "^5.0.0",
|
|
36
36
|
"@types/node": "^22.13.9",
|
|
37
37
|
"ava": "^6.2.0",
|
|
38
|
-
"cbor": "^
|
|
38
|
+
"cbor": "^10.0.3",
|
|
39
39
|
"typescript": "^5.6.0"
|
|
40
40
|
},
|
|
41
41
|
"dependencies": {
|
package/lib/decodeStream.d.ts
DELETED
package/lib/encode.d.ts
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
import type { CBORValue } from "./types.js";
|
|
2
|
-
export declare class Encoder {
|
|
3
|
-
#private;
|
|
4
|
-
static defaultChunkSize: number;
|
|
5
|
-
readonly noCopy: boolean;
|
|
6
|
-
readonly chunkSize: number;
|
|
7
|
-
private readonly encoder;
|
|
8
|
-
private readonly buffer;
|
|
9
|
-
private readonly view;
|
|
10
|
-
private offset;
|
|
11
|
-
constructor(options?: {
|
|
12
|
-
noCopy?: boolean;
|
|
13
|
-
chunkSize?: number;
|
|
14
|
-
});
|
|
15
|
-
get closed(): boolean;
|
|
16
|
-
private allocate;
|
|
17
|
-
private float16;
|
|
18
|
-
private float32;
|
|
19
|
-
private float64;
|
|
20
|
-
private uint8;
|
|
21
|
-
private uint16;
|
|
22
|
-
private uint32;
|
|
23
|
-
private uint64;
|
|
24
|
-
private encodeTypeAndArgument;
|
|
25
|
-
private encodeNumber;
|
|
26
|
-
private encodeInteger;
|
|
27
|
-
private encodeFloat;
|
|
28
|
-
private encodeString;
|
|
29
|
-
private encodeBytes;
|
|
30
|
-
encodeValue(value: CBORValue): Iterable<Uint8Array>;
|
|
31
|
-
flush(): Iterable<Uint8Array>;
|
|
32
|
-
private static compareKeys;
|
|
33
|
-
private static getAdditionalInformation;
|
|
34
|
-
}
|
|
35
|
-
export declare function encode(value: CBORValue, options?: {
|
|
36
|
-
chunkSize?: number;
|
|
37
|
-
}): Uint8Array;
|
package/lib/encodeStream.d.ts
DELETED
package/lib/encodeStream.js
DELETED