@bigmistqke/rpc 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/utils.ts ADDED
@@ -0,0 +1,159 @@
1
+ import { BaseSchema, InferOutput, safeParse } from 'valibot'
2
+
3
+ export function createIdAllocator() {
4
+ const freeIds = new Array<number>()
5
+ let id = 0
6
+
7
+ return {
8
+ create() {
9
+ if (freeIds.length) {
10
+ return freeIds.pop()!
11
+ }
12
+ return id++
13
+ },
14
+ free(id: number) {
15
+ freeIds.push(id)
16
+ },
17
+ }
18
+ }
19
+
20
+ export interface IdRegistry<T> {
21
+ register(value: T): number
22
+ free(id: number): T | undefined
23
+ }
24
+
25
+ export function createIdRegistry<T>(): IdRegistry<T> {
26
+ const map = new Map<number, T>()
27
+ const idFactory = createIdAllocator()
28
+
29
+ return {
30
+ register(value: T) {
31
+ const id = idFactory.create()
32
+ map.set(id, value)
33
+ return id
34
+ },
35
+ free(id: number) {
36
+ idFactory.free(id)
37
+ return map.get(id)
38
+ },
39
+ }
40
+ }
41
+
42
+ export type PromiseRegistry = IdRegistry<{
43
+ resolve(value: any): void
44
+ reject(value: unknown): void
45
+ }>
46
+
47
+ export function createPromiseRegistry(): PromiseRegistry {
48
+ return createIdRegistry()
49
+ }
50
+
51
+ export function defer<T = void>() {
52
+ let resolve: (value: T) => void = null!
53
+ let reject: (value: unknown) => void = null!
54
+ return {
55
+ promise: new Promise<T>((_resolve, _reject) => ((resolve = _resolve), (reject = _reject))),
56
+ resolve,
57
+ reject,
58
+ }
59
+ }
60
+
61
+ export function createCommander<T extends object = object>(
62
+ apply: (topics: Array<string>, args: Array<any>) => void,
63
+ ): T {
64
+ function _createCommander(
65
+ topics: Array<string>,
66
+ apply: (topics: Array<string>, args: Array<any>) => void,
67
+ ): T {
68
+ return new Proxy(function () {} as T, {
69
+ get(_, topic) {
70
+ if (typeof topic === 'symbol') return undefined
71
+ return _createCommander([...topics, topic], apply)
72
+ },
73
+ apply(_, __, args) {
74
+ return apply(topics, args)
75
+ },
76
+ })
77
+ }
78
+ return _createCommander([], apply)
79
+ }
80
+
81
+ /**
82
+ * Creates a schema-backed shape definition with a validator and constructor.
83
+ *
84
+ * @param schema - A Valibot schema for the shape
85
+ * @param create - A function that produces valid output for the schema
86
+ */
87
+ export function createShape<
88
+ TSchema extends BaseSchema<any, any, any>,
89
+ TCreate extends (...args: Array<any>) => InferOutput<TSchema>,
90
+ >(schema: TSchema, create: TCreate) {
91
+ return {
92
+ validate: (value: any): value is InferOutput<TSchema> => safeParse(schema, value).success,
93
+ create,
94
+ }
95
+ }
96
+
97
+ // expose-core.ts
98
+ export function callMethod(methods: object, topics: string[], args: unknown[]) {
99
+ const method = topics.reduce((acc, topic) => {
100
+ const result = (acc as any)?.[topic]
101
+ return result
102
+ }, methods)
103
+ if (typeof method !== 'function') {
104
+ throw new Error(`Topics did not resolve to a function: [${topics.join(',')}]`)
105
+ }
106
+ return method(...args)
107
+ }
108
+
109
+ // NOTE: safari does not implement AsyncIterator for ReadableStream
110
+ // see https://caniuse.com/mdn-api_readablestream_--asynciterator
111
+ export function streamToAsyncIterable<T>(stream: ReadableStream<T>): AsyncIterable<T> {
112
+ if (Symbol.asyncIterator in stream) {
113
+ return stream as ReadableStream<T> & AsyncIterable<T>
114
+ }
115
+ const reader = stream.getReader()
116
+ return {
117
+ [Symbol.asyncIterator]() {
118
+ return {
119
+ async next() {
120
+ const result = await reader.read()
121
+ return result as IteratorResult<T>
122
+ },
123
+ async return() {
124
+ reader.releaseLock()
125
+ return { value: undefined, done: true }
126
+ },
127
+ }
128
+ },
129
+ }
130
+ }
131
+
132
+ export function createReadableStream() {
133
+ const closeHandlers = new Set<() => void>()
134
+ let controller: ReadableStreamDefaultController = null!
135
+ let closed = false
136
+
137
+ const stream = new ReadableStream({
138
+ start(_controller) {
139
+ controller = _controller
140
+ },
141
+ cancel() {
142
+ closeHandlers.forEach(handler => handler())
143
+ closed = true
144
+ },
145
+ })
146
+
147
+ return {
148
+ controller,
149
+ stream,
150
+ enqueue: controller.enqueue.bind(controller),
151
+ closed() {
152
+ return closed
153
+ },
154
+ onClose(cb: () => void) {
155
+ closeHandlers.add(cb)
156
+ return () => closeHandlers.delete(cb)
157
+ },
158
+ }
159
+ }
@@ -0,0 +1,413 @@
1
+ import { describe, it, expect, vi } from 'vitest'
2
+ import {
3
+ createStreamCodec,
4
+ PrimitiveCodec,
5
+ StructuralCodec,
6
+ GeneratorCodec,
7
+ } from '../src/stream/encoding'
8
+
9
+ const encoder = new TextEncoder()
10
+ const decoder = new TextDecoder()
11
+
12
+ // Helper to create a stream from chunks
13
+ function createStreamFromChunks(chunks: Uint8Array[]): ReadableStream<Uint8Array> {
14
+ let index = 0
15
+ return new ReadableStream({
16
+ pull(controller) {
17
+ if (index < chunks.length) {
18
+ controller.enqueue(chunks[index++])
19
+ } else {
20
+ controller.close()
21
+ }
22
+ },
23
+ })
24
+ }
25
+
26
+ // Helper to collect serialized chunks
27
+ function collectChunks(serialize: (onChunk: (chunk: Uint8Array) => void) => void): Uint8Array[] {
28
+ const chunks: Uint8Array[] = []
29
+ serialize(chunk => chunks.push(chunk))
30
+ return chunks
31
+ }
32
+
33
+ describe('PrimitiveCodec', () => {
34
+ it('should create a primitive codec', () => {
35
+ const stringCodec = new PrimitiveCodec({
36
+ test: (value): value is string => typeof value === 'string',
37
+ encode: (value: string) => encoder.encode(value),
38
+ decode: (buffer: Uint8Array) => decoder.decode(buffer),
39
+ })
40
+
41
+ expect(stringCodec.type).toBe('primitive')
42
+ expect(stringCodec.test('hello')).toBe(true)
43
+ expect(stringCodec.test(123)).toBe(false)
44
+
45
+ const encoded = stringCodec.encode('hello')
46
+ expect(encoded).toBeInstanceOf(Uint8Array)
47
+ expect(stringCodec.decode(encoded)).toBe('hello')
48
+ })
49
+ })
50
+
51
+ describe('StructuralCodec', () => {
52
+ it('should create a structural codec for objects', () => {
53
+ const objectCodec = new StructuralCodec({
54
+ test: (value): value is object =>
55
+ typeof value === 'object' && value !== null && !Array.isArray(value),
56
+ encode: (value: Record<string, any>) => ({
57
+ keys: Object.keys(value),
58
+ values: Object.values(value),
59
+ }),
60
+ decode: () => {
61
+ const obj: Record<string, any> = {}
62
+ return {
63
+ value: obj,
64
+ set: (value: any, key: string | number) => {
65
+ obj[key] = value
66
+ },
67
+ }
68
+ },
69
+ })
70
+
71
+ expect(objectCodec.type).toBe('structural')
72
+ expect(objectCodec.test({ foo: 'bar' })).toBe(true)
73
+ expect(objectCodec.test([1, 2, 3])).toBe(false)
74
+ expect(objectCodec.test(null)).toBe(false)
75
+ })
76
+
77
+ it('should create a structural codec for arrays', () => {
78
+ const arrayCodec = new StructuralCodec({
79
+ test: (value): value is any[] => Array.isArray(value),
80
+ encode: (value: any[]) => ({
81
+ length: value.length,
82
+ values: value,
83
+ }),
84
+ decode: () => {
85
+ const arr: any[] = []
86
+ return {
87
+ value: arr,
88
+ set: (value: any, key: string | number) => {
89
+ arr[key as number] = value
90
+ },
91
+ }
92
+ },
93
+ })
94
+
95
+ expect(arrayCodec.type).toBe('structural')
96
+ expect(arrayCodec.test([1, 2, 3])).toBe(true)
97
+ expect(arrayCodec.test({ a: 1 })).toBe(false)
98
+ })
99
+ })
100
+
101
+ describe('GeneratorCodec', () => {
102
+ it('should create a generator codec', () => {
103
+ const generatorCodec = new GeneratorCodec({
104
+ test: (value): value is AsyncGenerator => value?.[Symbol.asyncIterator] !== undefined,
105
+ encode: async function* (value: AsyncGenerator<string>) {
106
+ for await (const item of value) {
107
+ yield encoder.encode(item)
108
+ }
109
+ },
110
+ decode: async function* () {
111
+ let buffer: Uint8Array
112
+ while (true) {
113
+ buffer = yield decoder.decode(buffer!)
114
+ }
115
+ },
116
+ })
117
+
118
+ expect(generatorCodec.type).toBe('generator')
119
+ })
120
+ })
121
+
122
+ describe('createStreamCodec', () => {
123
+ describe('with default JSON codec only', () => {
124
+ it('should serialize and deserialize primitive values', async () => {
125
+ const codec = createStreamCodec([])
126
+ const values: any[] = []
127
+
128
+ const chunks = collectChunks(onChunk => codec.serialize({ test: 'value' }, onChunk))
129
+ expect(chunks.length).toBeGreaterThan(0)
130
+
131
+ const stream = createStreamFromChunks(chunks)
132
+ await codec.deserialize(stream, value => values.push(value))
133
+
134
+ expect(values).toEqual([{ test: 'value' }])
135
+ })
136
+
137
+ it('should serialize and deserialize numbers', async () => {
138
+ const codec = createStreamCodec([])
139
+ const values: any[] = []
140
+
141
+ const chunks = collectChunks(onChunk => codec.serialize(42, onChunk))
142
+ const stream = createStreamFromChunks(chunks)
143
+ await codec.deserialize(stream, value => values.push(value))
144
+
145
+ expect(values).toEqual([42])
146
+ })
147
+
148
+ it('should serialize and deserialize strings', async () => {
149
+ const codec = createStreamCodec([])
150
+ const values: any[] = []
151
+
152
+ const chunks = collectChunks(onChunk => codec.serialize('hello world', onChunk))
153
+ const stream = createStreamFromChunks(chunks)
154
+ await codec.deserialize(stream, value => values.push(value))
155
+
156
+ expect(values).toEqual(['hello world'])
157
+ })
158
+
159
+ it('should serialize and deserialize null', async () => {
160
+ const codec = createStreamCodec([])
161
+ const values: any[] = []
162
+
163
+ const chunks = collectChunks(onChunk => codec.serialize(null, onChunk))
164
+ const stream = createStreamFromChunks(chunks)
165
+ await codec.deserialize(stream, value => values.push(value))
166
+
167
+ expect(values).toEqual([null])
168
+ })
169
+
170
+ it('should serialize and deserialize arrays', async () => {
171
+ const codec = createStreamCodec([])
172
+ const values: any[] = []
173
+
174
+ const chunks = collectChunks(onChunk => codec.serialize([1, 2, 3], onChunk))
175
+ const stream = createStreamFromChunks(chunks)
176
+ await codec.deserialize(stream, value => values.push(value))
177
+
178
+ expect(values).toEqual([[1, 2, 3]])
179
+ })
180
+
181
+ it('should handle multiple values', async () => {
182
+ const codec = createStreamCodec([])
183
+ const values: any[] = []
184
+
185
+ const allChunks: Uint8Array[] = []
186
+ codec.serialize('first', chunk => allChunks.push(chunk))
187
+ codec.serialize({ second: true }, chunk => allChunks.push(chunk))
188
+ codec.serialize([3], chunk => allChunks.push(chunk))
189
+
190
+ const stream = createStreamFromChunks(allChunks)
191
+ await codec.deserialize(stream, value => values.push(value))
192
+
193
+ expect(values).toEqual(['first', { second: true }, [3]])
194
+ })
195
+ })
196
+
197
+ describe('with custom PrimitiveCodec', () => {
198
+ it('should use custom codec when test matches', async () => {
199
+ const uint8Codec = new PrimitiveCodec({
200
+ test: (value): value is Uint8Array => value instanceof Uint8Array,
201
+ encode: (value: Uint8Array) => value,
202
+ decode: (buffer: Uint8Array) => buffer,
203
+ })
204
+
205
+ const codec = createStreamCodec([uint8Codec])
206
+ const values: any[] = []
207
+
208
+ const testData = new Uint8Array([1, 2, 3, 4, 5])
209
+ const chunks = collectChunks(onChunk => codec.serialize(testData, onChunk))
210
+ const stream = createStreamFromChunks(chunks)
211
+ await codec.deserialize(stream, value => values.push(value))
212
+
213
+ expect(values[0]).toBeInstanceOf(Uint8Array)
214
+ expect(Array.from(values[0])).toEqual([1, 2, 3, 4, 5])
215
+ })
216
+
217
+ it('should fall back to JSON codec when custom codec does not match', async () => {
218
+ const uint8Codec = new PrimitiveCodec({
219
+ test: (value): value is Uint8Array => value instanceof Uint8Array,
220
+ encode: (value: Uint8Array) => value,
221
+ decode: (buffer: Uint8Array) => buffer,
222
+ })
223
+
224
+ const codec = createStreamCodec([uint8Codec])
225
+ const values: any[] = []
226
+
227
+ const chunks = collectChunks(onChunk => codec.serialize({ regular: 'object' }, onChunk))
228
+ const stream = createStreamFromChunks(chunks)
229
+ await codec.deserialize(stream, value => values.push(value))
230
+
231
+ expect(values).toEqual([{ regular: 'object' }])
232
+ })
233
+ })
234
+
235
+ describe('with StructuralCodec', () => {
236
+ it('should handle structural encoding with keys', async () => {
237
+ const mapCodec = new StructuralCodec({
238
+ test: (value): value is Map<string, any> => value instanceof Map,
239
+ encode: (value: Map<string, any>) => ({
240
+ keys: Array.from(value.keys()),
241
+ values: value.values(),
242
+ }),
243
+ decode: () => {
244
+ const map = new Map()
245
+ return {
246
+ value: map,
247
+ set: (value: any, key: string | number) => map.set(key, value),
248
+ }
249
+ },
250
+ })
251
+
252
+ const codec = createStreamCodec([mapCodec])
253
+ const values: any[] = []
254
+
255
+ const testMap = new Map([
256
+ ['a', 1],
257
+ ['b', 2],
258
+ ])
259
+ const chunks = collectChunks(onChunk => codec.serialize(testMap, onChunk))
260
+ const stream = createStreamFromChunks(chunks)
261
+ await codec.deserialize(stream, value => values.push(value))
262
+
263
+ expect(values[0]).toBeInstanceOf(Map)
264
+ expect(values[0].get('a')).toBe(1)
265
+ expect(values[0].get('b')).toBe(2)
266
+ })
267
+
268
+ it('should handle structural encoding with length (arrays)', async () => {
269
+ const setCodec = new StructuralCodec({
270
+ test: (value): value is Set<any> => value instanceof Set,
271
+ encode: (value: Set<any>) => ({
272
+ length: value.size,
273
+ values: value.values(),
274
+ }),
275
+ decode: () => {
276
+ const items: any[] = []
277
+ return {
278
+ value: new Set(items),
279
+ set: (value: any) => items.push(value),
280
+ }
281
+ },
282
+ })
283
+
284
+ const codec = createStreamCodec([setCodec])
285
+ const values: any[] = []
286
+
287
+ const testSet = new Set([1, 2, 3])
288
+ const chunks = collectChunks(onChunk => codec.serialize(testSet, onChunk))
289
+ const stream = createStreamFromChunks(chunks)
290
+ await codec.deserialize(stream, value => values.push(value))
291
+
292
+ // Note: The Set's values are decoded but stored in array order
293
+ expect(values[0]).toBeInstanceOf(Set)
294
+ })
295
+ })
296
+
297
+ describe('with GeneratorCodec', () => {
298
+ it('should handle async generator encoding', async () => {
299
+ const asyncIterableCodec = new GeneratorCodec({
300
+ test: (value): value is AsyncIterable<any> =>
301
+ value !== null && typeof value === 'object' && Symbol.asyncIterator in value,
302
+ encode: async function* (value: AsyncIterable<any>) {
303
+ for await (const item of value) {
304
+ yield encoder.encode(JSON.stringify(item))
305
+ }
306
+ },
307
+ decode: async function* () {
308
+ const items: any[] = []
309
+ try {
310
+ while (true) {
311
+ const chunk: Uint8Array = yield items
312
+ items.push(JSON.parse(decoder.decode(chunk)))
313
+ }
314
+ } finally {
315
+ return items
316
+ }
317
+ },
318
+ })
319
+
320
+ const codec = createStreamCodec([asyncIterableCodec])
321
+
322
+ async function* testGenerator() {
323
+ yield 'a'
324
+ yield 'b'
325
+ yield 'c'
326
+ }
327
+
328
+ const chunks: Uint8Array[] = []
329
+ codec.serialize(testGenerator(), chunk => chunks.push(chunk))
330
+
331
+ // Wait for async generator to complete
332
+ await new Promise(resolve => setTimeout(resolve, 50))
333
+
334
+ const values: any[] = []
335
+ const stream = createStreamFromChunks(chunks)
336
+ await codec.deserialize(stream, value => values.push(value))
337
+
338
+ // The generator codec yields its accumulated value
339
+ expect(values.length).toBeGreaterThan(0)
340
+ })
341
+ })
342
+
343
+ describe('codec selection order', () => {
344
+ it('should use first matching codec', async () => {
345
+ const specificCodec = new PrimitiveCodec({
346
+ test: (value): value is { special: true } =>
347
+ typeof value === 'object' && value?.special === true,
348
+ encode: () => encoder.encode('SPECIAL'),
349
+ decode: () => ({ special: true, decoded: true }),
350
+ })
351
+
352
+ const codec = createStreamCodec([specificCodec])
353
+ const values: any[] = []
354
+
355
+ const chunks = collectChunks(onChunk => codec.serialize({ special: true }, onChunk))
356
+ const stream = createStreamFromChunks(chunks)
357
+ await codec.deserialize(stream, value => values.push(value))
358
+
359
+ expect(values[0]).toEqual({ special: true, decoded: true })
360
+ })
361
+ })
362
+
363
+ describe('binary packing', () => {
364
+ it('should produce valid binary format', () => {
365
+ const codec = createStreamCodec([])
366
+ const chunks: Uint8Array[] = []
367
+
368
+ codec.serialize('test', chunk => chunks.push(chunk))
369
+
370
+ // First chunk should have proper header
371
+ const chunk = chunks[0]!
372
+ expect(chunk.length).toBeGreaterThan(6) // kind(1) + header(1) + length(4) + payload
373
+
374
+ // Kind should be 0x01 (default)
375
+ expect(chunk[0]).toBe(0x01)
376
+ })
377
+ })
378
+
379
+ describe('stream handling', () => {
380
+ it('should handle chunked stream input', async () => {
381
+ const codec = createStreamCodec([])
382
+ const values: any[] = []
383
+
384
+ // Serialize a value
385
+ const chunks = collectChunks(onChunk => codec.serialize({ chunked: 'data' }, onChunk))
386
+ const fullBuffer = concatenate(chunks)
387
+
388
+ // Split into smaller chunks to simulate network conditions
389
+ const splitChunks = [
390
+ fullBuffer.slice(0, 3),
391
+ fullBuffer.slice(3, 10),
392
+ fullBuffer.slice(10),
393
+ ].filter(c => c.length > 0)
394
+
395
+ const stream = createStreamFromChunks(splitChunks)
396
+ await codec.deserialize(stream, value => values.push(value))
397
+
398
+ expect(values).toEqual([{ chunked: 'data' }])
399
+ })
400
+ })
401
+ })
402
+
403
+ // Helper function
404
+ function concatenate(arrays: Uint8Array[]): Uint8Array {
405
+ const totalLength = arrays.reduce((sum, arr) => sum + arr.length, 0)
406
+ const result = new Uint8Array(totalLength)
407
+ let offset = 0
408
+ for (const arr of arrays) {
409
+ result.set(arr, offset)
410
+ offset += arr.length
411
+ }
412
+ return result
413
+ }