@peers-app/peers-sdk 0.7.17 → 0.7.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -230,16 +230,17 @@ describe(connection_1.Connection, () => {
230
230
  const result = await clientConnection.doHandshake(serverAddress).catch((err) => String(err));
231
231
  expect(result).toMatch(/Inconsistent public keys/);
232
232
  });
233
- it("should handle RPC calls with large arguments through chunking", async () => {
233
+ it("should handle RPC calls with large arguments and responses through chunking", async () => {
234
234
  const { clientSocket, serverSocket } = createTestSocketPair();
235
235
  const clientDevice = new device_1.Device();
236
236
  const serverDevice = new device_1.Device();
237
237
  const clientConnection = new connection_1.Connection(clientSocket, clientDevice);
238
238
  const serverConnection = new connection_1.Connection(serverSocket, serverDevice, ['localhost']);
239
- clientConnection.maxChunkSize = 50;
240
- serverConnection.maxChunkSize = 50;
241
- clientConnection.socket.maxChunkSize = 50;
242
- serverConnection.socket.maxChunkSize = 50;
239
+ const chunkSize = 256; // 256 minimum chunk size due to encoding overheads
240
+ clientConnection.maxChunkSize = chunkSize;
241
+ serverConnection.maxChunkSize = chunkSize;
242
+ clientConnection.socket.maxChunkSize = chunkSize;
243
+ serverConnection.socket.maxChunkSize = chunkSize;
243
244
  await clientConnection.doHandshake('localhost');
244
245
  const clientEmitSpy = jest.spyOn(clientConnection.socket.socket, 'emit');
245
246
  const serverEmitSpy = jest.spyOn(serverConnection.socket.socket, 'emit');
@@ -259,15 +260,21 @@ describe(connection_1.Connection, () => {
259
260
  expect(result.received.message).toBe("A".repeat(500));
260
261
  expect(result.received.metadata).toBe("B".repeat(300));
261
262
  expect(result.received.payload).toBe("C".repeat(400));
262
- expect(result.processedSize).toBeGreaterThan(1000); // Should be large
263
+ expect(JSON.stringify(largeData).length).toEqual(1241);
263
264
  // Count chunk emissions
264
265
  const clientChunkCalls = clientEmitSpy.mock.calls.filter((call) => call[0] === clientConnection.socket.safeSocketChunkEventName);
265
266
  const serverChunkCalls = serverEmitSpy.mock.calls.filter((call) => call[0] === serverConnection.socket.safeSocketChunkEventName);
266
- // The data gets wrapped by Connection layer (encryption/signing), making it larger
267
- // 41 chunks at 50 bytes each = ~2050 bytes (41 * 50)
268
- // 43 chunks at 50 bytes each = ~2150 bytes (43 * 50)
269
- expect(clientChunkCalls.length).toBe(41); // ~2050 bytes wrapped data / 50 bytes per chunk
270
- expect(serverChunkCalls.length).toBe(43); // ~2150 bytes wrapped data / 50 bytes per chunk
267
+ expect(clientChunkCalls.length).toBe(4);
268
+ expect(serverChunkCalls.length).toBe(5);
269
+ // expect calls to never have chunks greater than chunk size
270
+ for (const call of clientChunkCalls) {
271
+ const chunkData = call[1];
272
+ expect(chunkData.length).toBeLessThanOrEqual(chunkSize);
273
+ }
274
+ for (const call of serverChunkCalls) {
275
+ const chunkData = call[1];
276
+ expect(chunkData.length).toBeLessThanOrEqual(chunkSize);
277
+ }
271
278
  });
272
279
  it("should reject handshake if device timestamps are too far apart", async () => {
273
280
  const { clientSocket, serverSocket } = createTestSocketPair();
@@ -9,9 +9,21 @@ function getTrustLevelFn(me, serverUrl) {
9
9
  const userContext = await (0, context_1.getUserContext)();
10
10
  const userDataContext = userContext.userDataContext;
11
11
  if (deviceInfo.userId === me.userId && deviceInfo.publicKey === me.publicKey && deviceInfo.publicBoxKey === me.publicBoxKey) {
12
- if (deviceInfo.deviceId === (0, data_1.thisDeviceId)()) {
13
- // return TrustLevel.Untrusted;
14
- }
12
+ // if (deviceInfo.deviceId === thisDeviceId()) {
13
+ // return TrustLevel.Untrusted;
14
+ // }
15
+ const device = await (0, data_1.Devices)(userDataContext).get(deviceInfo.deviceId) || {
16
+ deviceId: deviceInfo.deviceId,
17
+ userId: deviceInfo.userId,
18
+ firstSeen: new Date(),
19
+ lastSeen: new Date(),
20
+ trustLevel: socket_type_1.TrustLevel.Trusted,
21
+ serverUrl,
22
+ };
23
+ device.lastSeen = new Date();
24
+ device.trustLevel = socket_type_1.TrustLevel.Trusted;
25
+ console.log(`Updating my own device: ${deviceInfo.deviceId}`);
26
+ await (0, data_1.Devices)(userDataContext).save(device);
15
27
  return socket_type_1.TrustLevel.Trusted;
16
28
  }
17
29
  // await Devices().delete(deviceInfo.deviceId);
@@ -1,11 +1,4 @@
1
1
  import { ISocket, RPCCallback } from "./socket.type";
2
- export interface IMessageChunk {
3
- messageId: string;
4
- chunkIndex: number;
5
- totalChunks?: number;
6
- eventName?: string;
7
- data: string;
8
- }
9
2
  export declare class StreamedSocket implements ISocket {
10
3
  readonly socket: ISocket;
11
4
  maxChunkSize: number;
@@ -2,9 +2,50 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.StreamedSocket = void 0;
4
4
  const lodash_1 = require("lodash");
5
- const serial_json_1 = require("../serial-json");
6
5
  const utils_1 = require("../utils");
7
6
  const socket_type_1 = require("./socket.type");
7
+ const tx_encoding_1 = require("./tx-encoding");
8
+ const msgpack_1 = require("@msgpack/msgpack");
9
+ // Encode chunk metadata + data into a single Uint8Array
10
+ function encodeChunk(chunk) {
11
+ // Encode metadata using msgpack
12
+ const metadata = {
13
+ messageId: chunk.messageId,
14
+ chunkIndex: chunk.chunkIndex,
15
+ ...(chunk.totalChunks !== undefined && { totalChunks: chunk.totalChunks }),
16
+ ...(chunk.eventName !== undefined && { eventName: chunk.eventName }),
17
+ };
18
+ const metadataBytes = (0, msgpack_1.encode)(metadata);
19
+ const metadataLength = metadataBytes.length;
20
+ // Create combined buffer: [4 bytes length][metadata][data]
21
+ const combined = new Uint8Array(4 + metadataLength + chunk.data.length);
22
+ // Write metadata length as 32-bit unsigned integer (big-endian)
23
+ const view = new DataView(combined.buffer, combined.byteOffset, combined.byteLength);
24
+ view.setUint32(0, metadataLength, false); // big-endian
25
+ // Write metadata
26
+ combined.set(metadataBytes, 4);
27
+ // Write data
28
+ combined.set(chunk.data, 4 + metadataLength);
29
+ return combined;
30
+ }
31
+ // Decode binary chunk back into IMessageChunk
32
+ function decodeChunk(bytes) {
33
+ // Read metadata length
34
+ const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
35
+ const metadataLength = view.getUint32(0, false); // big-endian
36
+ // Extract metadata
37
+ const metadataBytes = bytes.subarray(4, 4 + metadataLength);
38
+ const metadata = (0, msgpack_1.decode)(metadataBytes);
39
+ // Extract data
40
+ const data = bytes.subarray(4 + metadataLength);
41
+ return {
42
+ messageId: metadata.messageId,
43
+ chunkIndex: metadata.chunkIndex,
44
+ totalChunks: metadata.totalChunks,
45
+ eventName: metadata.eventName,
46
+ data: data,
47
+ };
48
+ }
8
49
  class StreamedSocket {
9
50
  socket;
10
51
  maxChunkSize;
@@ -13,7 +54,10 @@ class StreamedSocket {
13
54
  constructor(socket, maxChunkSize = socket_type_1.DEFAULT_MAX_CHUNK_SIZE) {
14
55
  this.socket = socket;
15
56
  this.maxChunkSize = maxChunkSize;
16
- socket.on(this.safeSocketChunkEventName, (chunk) => this.handleChunk(chunk));
57
+ socket.on(this.safeSocketChunkEventName, (encodedChunk) => {
58
+ const chunk = decodeChunk(encodedChunk);
59
+ this.handleChunk(chunk);
60
+ });
17
61
  }
18
62
  get id() {
19
63
  return this.socket.id;
@@ -26,8 +70,8 @@ class StreamedSocket {
26
70
  }
27
71
  callbacks = {};
28
72
  emit(eventName, args, callback) {
29
- const strArgs = (0, serial_json_1.toJSONString)(args);
30
- // if (strArgs.length < this.maxChunkSize) {
73
+ const encoded = (0, tx_encoding_1.txEncode)(args);
74
+ // if (encoded.length < this.maxChunkSize) {
31
75
  // // If the data is small enough, send it directly
32
76
  // this.socket.emit(eventName, args, callback);
33
77
  // return;
@@ -37,25 +81,34 @@ class StreamedSocket {
37
81
  this.callbacks[messageId] = callback;
38
82
  }
39
83
  const chunks = [];
40
- for (let i = 0; i < strArgs.length; i += this.maxChunkSize) {
41
- const chunkData = strArgs.slice(i, i + this.maxChunkSize);
84
+ // Reserve space for metadata overhead (typically ~50-100 bytes)
85
+ const metadataOverhead = 128;
86
+ if (this.maxChunkSize < (metadataOverhead * 2)) {
87
+ throw new Error(`maxChunkSize ${this.maxChunkSize} is too small to accommodate metadata overhead, must be greater than ${metadataOverhead * 2}`);
88
+ }
89
+ const effectiveChunkSize = this.maxChunkSize - metadataOverhead;
90
+ for (let i = 0; i < encoded.length; i += effectiveChunkSize) {
91
+ const chunkData = encoded.subarray(i, i + effectiveChunkSize);
42
92
  chunks.push({
43
93
  messageId,
44
- chunkIndex: Math.floor(i / this.maxChunkSize),
45
- data: chunkData
94
+ chunkIndex: Math.floor(i / effectiveChunkSize),
95
+ data: chunkData,
46
96
  });
47
97
  if (i === 0) {
48
98
  // The first chunk contains metadata about the chunks
49
99
  chunks[0].eventName = eventName;
50
- chunks[0].totalChunks = Math.ceil(strArgs.length / this.maxChunkSize);
100
+ chunks[0].totalChunks = Math.ceil(encoded.length / effectiveChunkSize);
51
101
  }
52
102
  }
53
103
  if (chunks.length > 1) {
54
104
  console.debug(`Sending ${chunks.length} chunks for event ${eventName} with messageId ${messageId}`);
55
105
  }
56
106
  for (const chunk of chunks) {
107
+ // Encode chunk with metadata into single Uint8Array
108
+ const encodedChunk = encodeChunk(chunk);
57
109
  // TODO on error or timeout retry
58
- this.socket.emit(this.safeSocketChunkEventName, chunk, lodash_1.noop);
110
+ // TODO respond to backpressure from socket
111
+ this.socket.emit(this.safeSocketChunkEventName, encodedChunk, lodash_1.noop);
59
112
  }
60
113
  }
61
114
  handlers = {};
@@ -98,10 +151,17 @@ class StreamedSocket {
98
151
  async processChunks(chunks) {
99
152
  try {
100
153
  const chunkZero = chunks[0];
101
- const strData = chunks.map(c => c.data).join('');
154
+ // Reassemble Uint8Array chunks (already decoded from binary format)
155
+ const totalSize = chunks.reduce((sum, chunk) => sum + chunk.data.length, 0);
156
+ const reassembled = new Uint8Array(totalSize);
157
+ let offset = 0;
158
+ for (const chunk of chunks) {
159
+ reassembled.set(chunk.data, offset);
160
+ offset += chunk.data.length;
161
+ }
102
162
  delete this.chunkBuffers[chunkZero.messageId];
103
163
  const eventName = chunkZero.eventName;
104
- const args = (0, serial_json_1.fromJSONString)(strData);
164
+ const args = (0, tx_encoding_1.txDecode)(reassembled);
105
165
  if (eventName === this.safeSocketResponseEventName) {
106
166
  this.handleResponse(args.messageId, args.error, args.result);
107
167
  return;
@@ -2,6 +2,17 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const streamed_socket_1 = require("./streamed-socket");
4
4
  const utils_1 = require("../utils");
5
+ const tx_encoding_1 = require("./tx-encoding");
6
+ const msgpack_1 = require("@msgpack/msgpack");
7
+ // Helper to decode the binary chunk format
8
+ function decodeChunk(bytes) {
9
+ const view = new DataView(bytes.buffer, bytes.byteOffset);
10
+ const metadataLength = view.getUint32(0, false);
11
+ const metadataBytes = bytes.subarray(4, 4 + metadataLength);
12
+ const metadata = (0, msgpack_1.decode)(metadataBytes);
13
+ const data = bytes.subarray(4 + metadataLength);
14
+ return { ...metadata, data };
15
+ }
5
16
  function createMockSocket() {
6
17
  const emittedEvents = [];
7
18
  const socket = {
@@ -35,10 +46,171 @@ describe('StreamedSocket', () => {
35
46
  expect(emittedEvents).toHaveLength(1);
36
47
  const chunkEvent = emittedEvents[0];
37
48
  expect(chunkEvent.eventName).toBe('__streamed-socket-chunk');
38
- expect(chunkEvent.args.messageId).toBeTruthy();
39
- expect(chunkEvent.args.chunkIndex).toBe(0);
40
- expect(chunkEvent.args.totalChunks).toBe(1);
41
- expect(chunkEvent.args.eventName).toBe('test-event');
42
- expect(JSON.parse(chunkEvent.args.data)).toEqual(testData);
49
+ // Args should be a Uint8Array (binary encoded chunk)
50
+ expect(chunkEvent.args).toBeInstanceOf(Uint8Array);
51
+ // Decode the binary chunk
52
+ const decodedChunk = decodeChunk(chunkEvent.args);
53
+ expect(decodedChunk.messageId).toBeTruthy();
54
+ expect(decodedChunk.chunkIndex).toBe(0);
55
+ expect(decodedChunk.totalChunks).toBe(1);
56
+ expect(decodedChunk.eventName).toBe('test-event');
57
+ // Decode the actual data
58
+ const decoded = (0, tx_encoding_1.txDecode)(decodedChunk.data);
59
+ expect(decoded).toEqual(testData);
60
+ });
61
+ it('should emit multiple chunks for large messages', () => {
62
+ const mock = createMockSocket();
63
+ const streamedSocket = new streamed_socket_1.StreamedSocket(mock.socket, 256); // Small chunk size to force chunking
64
+ // Create a large test object
65
+ const testData = {
66
+ message: 'x'.repeat(200),
67
+ array: [1, 2, 3, 4, 5],
68
+ nested: { deep: 'value' }
69
+ };
70
+ streamedSocket.emit('test-event', testData, () => { });
71
+ const emittedEvents = mock.getEmittedEvents();
72
+ expect(emittedEvents.length).toBeGreaterThan(1);
73
+ // Decode all chunks
74
+ const decodedChunks = emittedEvents.map(event => {
75
+ expect(event.args).toBeInstanceOf(Uint8Array);
76
+ return decodeChunk(event.args);
77
+ });
78
+ // Verify first chunk has metadata
79
+ const firstChunk = decodedChunks[0];
80
+ expect(firstChunk.eventName).toBe('test-event');
81
+ expect(firstChunk.totalChunks).toBe(emittedEvents.length);
82
+ expect(firstChunk.chunkIndex).toBe(0);
83
+ // Verify all chunks have the same messageId
84
+ const messageId = firstChunk.messageId;
85
+ decodedChunks.forEach(chunk => {
86
+ expect(chunk.messageId).toBe(messageId);
87
+ });
88
+ // Verify chunks are in order
89
+ decodedChunks.forEach((chunk, index) => {
90
+ expect(chunk.chunkIndex).toBe(index);
91
+ });
92
+ // Verify all chunk data are Uint8Arrays
93
+ decodedChunks.forEach(chunk => {
94
+ expect(chunk.data).toBeInstanceOf(Uint8Array);
95
+ });
96
+ // Reassemble chunks and verify data integrity
97
+ const reassembled = new Uint8Array(decodedChunks.reduce((sum, chunk) => sum + chunk.data.length, 0));
98
+ let offset = 0;
99
+ for (const chunk of decodedChunks) {
100
+ reassembled.set(chunk.data, offset);
101
+ offset += chunk.data.length;
102
+ }
103
+ const decoded = (0, tx_encoding_1.txDecode)(reassembled);
104
+ expect(decoded).toEqual(testData);
105
+ });
106
+ it('should handle binary data in messages', () => {
107
+ const mock = createMockSocket();
108
+ const streamedSocket = new streamed_socket_1.StreamedSocket(mock.socket, 1000);
109
+ const testData = {
110
+ binaryData: new Uint8Array([1, 2, 3, 4, 5]),
111
+ metadata: { name: 'test' }
112
+ };
113
+ streamedSocket.emit('test-event', testData, () => { });
114
+ const emittedEvents = mock.getEmittedEvents();
115
+ expect(emittedEvents).toHaveLength(1);
116
+ const decodedChunk = decodeChunk(emittedEvents[0].args);
117
+ const decoded = (0, tx_encoding_1.txDecode)(decodedChunk.data);
118
+ expect(decoded.binaryData).toBeInstanceOf(Uint8Array);
119
+ expect(decoded.binaryData).toEqual(testData.binaryData);
120
+ expect(decoded.metadata).toEqual(testData.metadata);
121
+ });
122
+ it('should handle special types in messages', () => {
123
+ const mock = createMockSocket();
124
+ const streamedSocket = new streamed_socket_1.StreamedSocket(mock.socket, 1000);
125
+ const testData = {
126
+ date: new Date('2023-01-01T00:00:00.000Z'),
127
+ nan: NaN,
128
+ infinity: Infinity,
129
+ undef: undefined,
130
+ nullVal: null
131
+ };
132
+ streamedSocket.emit('test-event', testData, () => { });
133
+ const emittedEvents = mock.getEmittedEvents();
134
+ expect(emittedEvents).toHaveLength(1);
135
+ const decodedChunk = decodeChunk(emittedEvents[0].args);
136
+ const decoded = (0, tx_encoding_1.txDecode)(decodedChunk.data);
137
+ expect(decoded.date).toBeInstanceOf(Date);
138
+ expect(decoded.date.toISOString()).toBe('2023-01-01T00:00:00.000Z');
139
+ expect(Number.isNaN(decoded.nan)).toBe(true);
140
+ expect(decoded.infinity).toBe(Infinity);
141
+ expect(decoded.undef).toBeUndefined();
142
+ expect(decoded.nullVal).toBeNull();
143
+ });
144
+ it('should demonstrate msgpack efficiency: structured data vs JSON string (uncompressed)', () => {
145
+ const mock1 = createMockSocket();
146
+ const mock2 = createMockSocket();
147
+ const streamedSocket1 = new streamed_socket_1.StreamedSocket(mock1.socket, 1000000); // Large chunk to avoid splitting
148
+ const streamedSocket2 = new streamed_socket_1.StreamedSocket(mock2.socket, 1000000);
149
+ // Create a small structured object that won't trigger compression (< 1024 bytes)
150
+ const smallObject = {
151
+ users: Array.from({ length: 5 }, (_, i) => ({
152
+ id: i + 1000000, // Use larger numbers to make JSON less compressible
153
+ name: `User${i}_${Math.random().toString(36).substring(7)}`, // Add randomness
154
+ email: `user${i}@example.com`,
155
+ score: Math.random() * 1000,
156
+ active: i % 2 === 0,
157
+ }))
158
+ };
159
+ // Convert to JSON string
160
+ const jsonString = JSON.stringify(smallObject);
161
+ console.log(`Original JSON string size: ${jsonString.length} bytes`);
162
+ // Send as structured object (msgpack can optimize)
163
+ streamedSocket1.emit('test-object', smallObject, () => { });
164
+ // Send as JSON string (msgpack treats it as opaque string)
165
+ streamedSocket2.emit('test-string', { jsonPayload: jsonString }, () => { });
166
+ const objectEvents = mock1.getEmittedEvents();
167
+ const stringEvents = mock2.getEmittedEvents();
168
+ expect(objectEvents).toHaveLength(1);
169
+ expect(stringEvents).toHaveLength(1);
170
+ // Decode and measure sizes
171
+ const objectChunk = decodeChunk(objectEvents[0].args);
172
+ const stringChunk = decodeChunk(stringEvents[0].args);
173
+ const objectSize = objectChunk.data.length;
174
+ const stringSize = stringChunk.data.length;
175
+ console.log(`Msgpack with structured data: ${objectSize} bytes (${((1 - objectSize / jsonString.length) * 100).toFixed(1)}% reduction from JSON)`);
176
+ console.log(`Msgpack with JSON string: ${stringSize} bytes (${((1 - stringSize / jsonString.length) * 100).toFixed(1)}% reduction from JSON)`);
177
+ console.log(`Efficiency gain: structured data is ${((1 - objectSize / stringSize) * 100).toFixed(1)}% smaller than JSON string`);
178
+ // Msgpack should be more efficient with structured data than with a JSON string
179
+ expect(objectSize).toBeLessThan(stringSize);
180
+ // Structured data should be at least 10% more efficient
181
+ const efficiency = (1 - objectSize / stringSize) * 100;
182
+ expect(efficiency).toBeGreaterThan(10);
183
+ // Verify data integrity
184
+ const decodedObject = (0, tx_encoding_1.txDecode)(objectChunk.data);
185
+ const decodedString = (0, tx_encoding_1.txDecode)(stringChunk.data);
186
+ expect(decodedObject.users).toHaveLength(5);
187
+ expect(decodedString.jsonPayload).toBe(jsonString);
188
+ });
189
+ it('should demonstrate compression effectiveness for large repetitive data', () => {
190
+ const mock = createMockSocket();
191
+ const streamedSocket = new streamed_socket_1.StreamedSocket(mock.socket, 1000000);
192
+ // Create large repetitive object that will compress well
193
+ const largeObject = {
194
+ data: Array.from({ length: 200 }, () => ({
195
+ type: 'event',
196
+ status: 'active',
197
+ category: 'user_action',
198
+ timestamp: new Date('2024-01-01'),
199
+ metadata: { app: 'test', version: '1.0.0' }
200
+ }))
201
+ };
202
+ const jsonString = JSON.stringify(largeObject);
203
+ console.log(`Large repetitive data - JSON size: ${jsonString.length} bytes`);
204
+ streamedSocket.emit('test-compressed', largeObject, () => { });
205
+ const events = mock.getEmittedEvents();
206
+ const chunk = decodeChunk(events[0].args);
207
+ console.log(`After msgpack + compression: ${chunk.data.length} bytes`);
208
+ console.log(`Compression ratio: ${((1 - chunk.data.length / jsonString.length) * 100).toFixed(1)}% reduction`);
209
+ // Should achieve significant compression (>80% reduction)
210
+ expect(chunk.data.length).toBeLessThan(jsonString.length * 0.2);
211
+ // Verify data integrity
212
+ const decoded = (0, tx_encoding_1.txDecode)(chunk.data);
213
+ expect(decoded.data).toHaveLength(200);
214
+ expect(decoded.data[0].type).toBe('event');
43
215
  });
44
216
  });
@@ -0,0 +1,2 @@
1
+ export declare function txEncode(data: unknown): Uint8Array;
2
+ export declare function txDecode(data: Uint8Array): unknown;
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.txEncode = txEncode;
4
+ exports.txDecode = txDecode;
5
+ const msgpack_1 = require("@msgpack/msgpack");
6
+ const fflate_1 = require("fflate");
7
+ const serial_json_1 = require("../serial-json");
8
+ function txEncode(data) {
9
+ const noCycles = (0, serial_json_1.toJSON)(data); // remove cycles and encode dates, etc.
10
+ // First encode to msgpack
11
+ const encoded = (0, msgpack_1.encode)(noCycles);
12
+ let body;
13
+ let flag;
14
+ if (encoded.length > 1024) {
15
+ // Compress big payloads
16
+ body = (0, fflate_1.compressSync)(encoded);
17
+ flag = 1; // compressed
18
+ }
19
+ else {
20
+ body = encoded;
21
+ flag = 0; // not compressed
22
+ }
23
+ const out = new Uint8Array(1 + body.length);
24
+ out[0] = flag;
25
+ out.set(body, 1);
26
+ return out;
27
+ }
28
+ function txDecode(data) {
29
+ if (data.length === 0) {
30
+ throw new Error("Empty payload");
31
+ }
32
+ const flag = data[0];
33
+ let body = data.subarray(1);
34
+ if (flag === 1) {
35
+ body = (0, fflate_1.decompressSync)(body);
36
+ }
37
+ else if (flag !== 0) {
38
+ // Optional: future versioning or legacy handling
39
+ throw new Error(`Unknown tx flag: ${flag}`);
40
+ }
41
+ const unpacked = (0, msgpack_1.decode)(body);
42
+ const restored = (0, serial_json_1.fromJSON)(unpacked); // restore cycles, dates, etc.
43
+ return restored;
44
+ }
@@ -0,0 +1 @@
1
+ export {};