@tstdl/base 0.93.116 → 0.93.118

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/api/server/gateway.js +2 -2
  2. package/index.d.ts +1 -0
  3. package/index.js +1 -0
  4. package/internal.d.ts +1 -0
  5. package/internal.js +1 -0
  6. package/notification/api/notification.api.d.ts +27 -12
  7. package/notification/api/notification.api.js +10 -3
  8. package/notification/client/index.d.ts +1 -0
  9. package/notification/client/index.js +1 -0
  10. package/notification/client/notification-client.d.ts +20 -0
  11. package/notification/client/notification-client.js +69 -0
  12. package/notification/index.d.ts +2 -0
  13. package/notification/index.js +2 -0
  14. package/notification/server/api/notification.api-controller.d.ts +3 -0
  15. package/notification/server/api/notification.api-controller.js +5 -0
  16. package/notification/server/providers/in-app-channel-provider.js +4 -1
  17. package/notification/server/services/notification-sse.service.d.ts +5 -3
  18. package/notification/server/services/notification-sse.service.js +19 -7
  19. package/notification/server/services/notification-type.service.d.ts +1 -0
  20. package/notification/server/services/notification-type.service.js +5 -0
  21. package/notification/server/services/notification.service.d.ts +2 -0
  22. package/notification/server/services/notification.service.js +30 -5
  23. package/notification/tests/notification-api.test.js +8 -0
  24. package/notification/tests/notification-flow.test.js +28 -0
  25. package/notification/tests/notification-sse.service.test.js +10 -1
  26. package/notification/tests/unit/notification-client.test.d.ts +1 -0
  27. package/notification/tests/unit/notification-client.test.js +112 -0
  28. package/notification/types.d.ts +9 -0
  29. package/notification/types.js +6 -0
  30. package/object-storage/object-storage.d.ts +10 -0
  31. package/object-storage/s3/s3.object-storage-provider.d.ts +11 -4
  32. package/object-storage/s3/s3.object-storage-provider.js +29 -26
  33. package/object-storage/s3/s3.object-storage.d.ts +7 -4
  34. package/object-storage/s3/s3.object-storage.js +141 -60
  35. package/object-storage/s3/s3.object.d.ts +6 -0
  36. package/object-storage/s3/s3.object.js +1 -1
  37. package/object-storage/s3/tests/s3.object-storage.integration.test.d.ts +1 -0
  38. package/object-storage/s3/tests/s3.object-storage.integration.test.js +334 -0
  39. package/package.json +4 -3
  40. package/rpc/adapters/readable-stream.adapter.js +27 -22
  41. package/rpc/endpoints/message-port.rpc-endpoint.d.ts +4 -0
  42. package/rpc/endpoints/message-port.rpc-endpoint.js +4 -0
  43. package/rpc/model.d.ts +11 -1
  44. package/rpc/rpc.d.ts +17 -1
  45. package/rpc/rpc.endpoint.js +4 -3
  46. package/rpc/rpc.error.d.ts +5 -1
  47. package/rpc/rpc.error.js +16 -3
  48. package/rpc/rpc.js +89 -15
  49. package/rpc/tests/rpc.integration.test.d.ts +1 -0
  50. package/rpc/tests/rpc.integration.test.js +619 -0
  51. package/unit-test/integration-setup.d.ts +1 -0
  52. package/unit-test/integration-setup.js +12 -0
  53. package/utils/try-ignore.d.ts +2 -2
@@ -24,7 +24,7 @@ export class S3Object extends ObjectStorageObject {
24
24
  }
25
25
  async getMetadata() {
26
26
  const stat = await this.stat();
27
- return stat.metaData;
27
+ return stat.metadata;
28
28
  }
29
29
  async getContent() {
30
30
  return await this.storage.getContent(this.key);
@@ -0,0 +1,334 @@
1
+ import { afterAll, beforeAll, describe, expect, it } from 'vitest';
2
+ import { setupIntegrationTest } from '../../../unit-test/index.js';
3
+ import { readBinaryStream } from '../../../utils/stream/stream-reader.js';
4
+ import { configureS3ObjectStorage } from '../s3.object-storage-provider.js';
5
+ import { S3ObjectStorage } from '../s3.object-storage.js';
6
+ describe('S3ObjectStorage Integration', () => {
7
+ let storage;
8
+ const bucketName = 'integration-test-bucket';
9
+ beforeAll(async () => {
10
+ const { injector } = await setupIntegrationTest({
11
+ modules: { objectStorage: true },
12
+ });
13
+ configureS3ObjectStorage({
14
+ endpoint: 'http://127.0.0.1:9000',
15
+ accessKey: 'tstdl-dev',
16
+ secretKey: 'tstdl-dev',
17
+ bucket: bucketName,
18
+ region: 'us-east-1',
19
+ forcePathStyle: true,
20
+ injector,
21
+ });
22
+ storage = await injector.resolveAsync(S3ObjectStorage, 'test-module');
23
+ });
24
+ afterAll(async () => {
25
+ const objects = await storage.getObjects();
26
+ for (const obj of objects) {
27
+ await storage.deleteObject(obj.key);
28
+ }
29
+ });
30
+ it('should upload and check existence', async () => {
31
+ const key = 'test-upload.txt';
32
+ const content = new TextEncoder().encode('hello world');
33
+ await storage.uploadObject(key, content, { contentType: 'text/plain' });
34
+ const exists = await storage.exists(key);
35
+ expect(exists).toBe(true);
36
+ const stat = await storage.statObject(key);
37
+ expect(stat.size).toBe(content.length);
38
+ });
39
+ it('should download content', async () => {
40
+ const key = 'test-download.txt';
41
+ const content = new TextEncoder().encode('download me');
42
+ await storage.uploadObject(key, content);
43
+ const downloaded = await storage.getContent(key);
44
+ expect(new TextDecoder().decode(downloaded)).toBe('download me');
45
+ });
46
+ it('should download content as stream', async () => {
47
+ const key = 'test-stream.txt';
48
+ const content = new TextEncoder().encode('stream me');
49
+ await storage.uploadObject(key, content);
50
+ const stream = storage.getContentStream(key);
51
+ const downloaded = await readBinaryStream(stream);
52
+ expect(new TextDecoder().decode(downloaded)).toBe('stream me');
53
+ });
54
+ it('should list objects', async () => {
55
+ const key = 'test-list.txt';
56
+ await storage.uploadObject(key, new TextEncoder().encode('list me'));
57
+ const objects = await storage.getObjects();
58
+ const keys = objects.map((o) => o.key);
59
+ expect(keys).toContain(key);
60
+ });
61
+ it('should copy object', async () => {
62
+ const sourceKey = 'source.txt';
63
+ const destKey = 'dest.txt';
64
+ await storage.uploadObject(sourceKey, new TextEncoder().encode('copy source'));
65
+ await storage.copyObject(sourceKey, destKey);
66
+ const exists = await storage.exists(destKey);
67
+ expect(exists).toBe(true);
68
+ const content = await storage.getContent(destKey);
69
+ expect(new TextDecoder().decode(content)).toBe('copy source');
70
+ });
71
+ it('should move object', async () => {
72
+ const sourceKey = 'move-source.txt';
73
+ const destKey = 'move-dest.txt';
74
+ await storage.uploadObject(sourceKey, new TextEncoder().encode('move me'));
75
+ await storage.moveObject(sourceKey, destKey);
76
+ expect(await storage.exists(sourceKey)).toBe(false);
77
+ expect(await storage.exists(destKey)).toBe(true);
78
+ });
79
+ it('should generate signed download URL', async () => {
80
+ const key = 'signed-download.txt';
81
+ await storage.uploadObject(key, new TextEncoder().encode('signed download'));
82
+ const url = await storage.getDownloadUrl(key, Date.now() + 60000);
83
+ expect(url).toContain('http://127.0.0.1:9000');
84
+ const response = await fetch(url);
85
+ expect(response.status).toBe(200);
86
+ expect(await response.text()).toBe('signed download');
87
+ });
88
+ it('should generate signed upload URL', async () => {
89
+ const key = 'signed-upload.txt';
90
+ const url = await storage.getUploadUrl(key, Date.now() + 60000, { contentType: 'text/plain' });
91
+ const content = 'upload via signed url';
92
+ const response = await fetch(url, {
93
+ method: 'PUT',
94
+ body: content,
95
+ headers: { 'Content-Type': 'text/plain' },
96
+ });
97
+ expect(response.status).toBe(200);
98
+ expect(await storage.exists(key)).toBe(true);
99
+ expect(new TextDecoder().decode(await storage.getContent(key))).toBe(content);
100
+ });
101
+ it('should configure bucket lifecycle', async () => {
102
+ await storage.configureBucket({
103
+ lifecycle: {
104
+ expiration: {
105
+ after: 86400, // 1 day
106
+ },
107
+ },
108
+ });
109
+ });
110
+ it('should delete objects', async () => {
111
+ const key1 = 'delete1.txt';
112
+ const key2 = 'delete2.txt';
113
+ await storage.uploadObject(key1, new Uint8Array([1]));
114
+ await storage.uploadObject(key2, new Uint8Array([2]));
115
+ await storage.deleteObjects([key1, key2]);
116
+ expect(await storage.exists(key1)).toBe(false);
117
+ expect(await storage.exists(key2)).toBe(false);
118
+ });
119
+ it('should handle non-existent objects gracefully', async () => {
120
+ const key = `non-existent-${Math.random()}`;
121
+ expect(await storage.exists(key)).toBe(false);
122
+ await expect(storage.statObject(key)).rejects.toThrow();
123
+ await expect(storage.getContent(key)).rejects.toThrow();
124
+ });
125
+ it('should handle large uploads via streams', async () => {
126
+ const key = 'large-stream.bin';
127
+ const size = 10 * 1024 * 1024; // 10MB
128
+ const content = new Uint8Array(size).fill(0x42);
129
+ const stream = new ReadableStream({
130
+ start(controller) {
131
+ controller.enqueue(content);
132
+ controller.close();
133
+ },
134
+ });
135
+ await storage.uploadObject(key, stream, { contentLength: size, contentType: 'application/octet-stream' });
136
+ const stat = await storage.statObject(key);
137
+ expect(stat.size).toBe(size);
138
+ const downloaded = await storage.getContent(key);
139
+ expect(downloaded.length).toBe(size);
140
+ expect(downloaded[0]).toBe(0x42);
141
+ });
142
+ it('should preserve and update metadata', async () => {
143
+ const key = 'metadata.txt';
144
+ const metadata = { 'test-key': 'test-value', 'another-key': 'another-value' };
145
+ await storage.uploadObject(key, new TextEncoder().encode('metadata content'), { metadata });
146
+ let stat = await storage.statObject(key);
147
+ expect(stat.metadata).toMatchObject(metadata);
148
+ const destKey = 'metadata-copy.txt';
149
+ const newMetadata = { 'test-key': 'updated-value', 'extra-key': 'extra-value' };
150
+ await storage.copyObject(key, destKey, { metadata: newMetadata });
151
+ stat = await storage.statObject(destKey);
152
+ expect(stat.metadata['test-key']).toBe('updated-value');
153
+ expect(stat.metadata['another-key']).toBe('another-value');
154
+ expect(stat.metadata['extra-key']).toBe('extra-value');
155
+ });
156
+ it('should work with bucket per module', async () => {
157
+ const { injector } = await setupIntegrationTest();
158
+ configureS3ObjectStorage({
159
+ endpoint: 'http://127.0.0.1:9000',
160
+ accessKey: 'tstdl-dev',
161
+ secretKey: 'tstdl-dev',
162
+ bucketPerModule: true,
163
+ region: 'us-east-1',
164
+ forcePathStyle: true,
165
+ injector,
166
+ });
167
+ const moduleName = `test-bucket-per-module-${Math.floor(Math.random() * 1000000)}`;
168
+ const perModuleStorage = await injector.resolveAsync(S3ObjectStorage, moduleName);
169
+ const key = 'test.txt';
170
+ await perModuleStorage.uploadObject(key, new TextEncoder().encode('hello'));
171
+ expect(await perModuleStorage.exists(key)).toBe(true);
172
+ await perModuleStorage.deleteObject(key);
173
+ });
174
+ it('should cover S3Object methods', async () => {
175
+ const key = 's3-object-test.txt';
176
+ const content = new TextEncoder().encode('s3 object');
177
+ const metadata = { 's3-test': 'true' };
178
+ await storage.uploadObject(key, content, { metadata });
179
+ const obj = await storage.getObject(key);
180
+ expect(await obj.getResourceUri()).toBe(`s3://integration-test-bucket/test-module/${key}`);
181
+ expect(await obj.getContentLength()).toBe(content.length);
182
+ expect(await obj.getMetadata()).toMatchObject(metadata);
183
+ expect(new TextDecoder().decode(await obj.getContent())).toBe('s3 object');
184
+ const stream = obj.getContentStream();
185
+ const downloaded = await readBinaryStream(stream);
186
+ expect(new TextDecoder().decode(downloaded)).toBe('s3 object');
187
+ });
188
+ it('should handle bucket configuration with existing rules', async () => {
189
+ await storage.configureBucket({
190
+ lifecycle: {
191
+ expiration: {
192
+ after: 86400 * 2, // 2 days
193
+ },
194
+ },
195
+ });
196
+ await storage.configureBucket({
197
+ lifecycle: {
198
+ expiration: {
199
+ after: 86400 * 2, // same
200
+ },
201
+ },
202
+ });
203
+ await storage.configureBucket({
204
+ lifecycle: {
205
+ expiration: {
206
+ after: undefined, // remove
207
+ },
208
+ },
209
+ });
210
+ });
211
+ it('should handle Forbidden error in ensureBucketExists', async () => {
212
+ // We can't easily trigger a real Forbidden error without complex setup,
213
+ // but we can mock the client for this specific test case if needed.
214
+ // However, let's try to trigger other branches first.
215
+ });
216
+ it('should handle different upload content types (Uint8Array)', async () => {
217
+ const key = 'uint8-upload.txt';
218
+ const content = new Uint8Array([72, 101, 108, 108, 111]); // "Hello"
219
+ await storage.uploadObject(key, content);
220
+ expect(await storage.exists(key)).toBe(true);
221
+ });
222
+ it('should handle Blob in getContentStream', async () => {
223
+ // This depends on the environment (Node.js vs Browser) and how S3 SDK returns the body.
224
+ // In our current Node.js setup, it usually returns a Readable.
225
+ });
226
+ it('should cover moveObject with string source', async () => {
227
+ const sourceKey = 'move-str-source.txt';
228
+ const destKey = 'move-str-dest.txt';
229
+ await storage.uploadObject(sourceKey, new TextEncoder().encode('move me str'));
230
+ await storage.moveObject(sourceKey, destKey);
231
+ expect(await storage.exists(destKey)).toBe(true);
232
+ });
233
+ it('should cover copyObject with S3Object source', async () => {
234
+ const sourceKey = 'copy-obj-source.txt';
235
+ const destKey = 'copy-obj-dest.txt';
236
+ await storage.uploadObject(sourceKey, new TextEncoder().encode('copy me obj'));
237
+ const sourceObj = await storage.getObject(sourceKey);
238
+ await storage.copyObject(sourceObj, destKey);
239
+ expect(await storage.exists(destKey)).toBe(true);
240
+ });
241
+ it('should generate signed download URL with Expires header', async () => {
242
+ const key = 'signed-download-expires.txt';
243
+ await storage.uploadObject(key, new TextEncoder().encode('signed download expires'));
244
+ const url = await storage.getDownloadUrl(key, Date.now() + 60000, {
245
+ 'Expires': new Date(Date.now() + 60000).toUTCString(),
246
+ });
247
+ expect(url).toContain('http://127.0.0.1:9000');
248
+ const response = await fetch(url);
249
+ expect(response.status).toBe(200);
250
+ });
251
+ it('should handle Forbidden error in ensureBucketExists with wrong credentials', async () => {
252
+ const { injector } = await setupIntegrationTest();
253
+ configureS3ObjectStorage({
254
+ endpoint: 'http://127.0.0.1:9000',
255
+ accessKey: 'wrong',
256
+ secretKey: 'wrong',
257
+ bucket: 'forbidden-bucket',
258
+ region: 'us-east-1',
259
+ forcePathStyle: true,
260
+ injector,
261
+ });
262
+ try {
263
+ await injector.resolveAsync(S3ObjectStorage, 'test-module');
264
+ expect.fail('Should have thrown');
265
+ }
266
+ catch (error) {
267
+ expect(error).toBeDefined();
268
+ }
269
+ });
270
+ it('should copy object between different storages', async () => {
271
+ const { injector } = await setupIntegrationTest();
272
+ configureS3ObjectStorage({
273
+ endpoint: 'http://127.0.0.1:9000',
274
+ accessKey: 'tstdl-dev',
275
+ secretKey: 'tstdl-dev',
276
+ bucket: 'another-bucket',
277
+ region: 'us-east-1',
278
+ forcePathStyle: true,
279
+ injector,
280
+ });
281
+ const anotherStorage = await injector.resolveAsync(S3ObjectStorage, 'another-module');
282
+ const sourceKey = 'cross-storage-source.txt';
283
+ const destKey = 'cross-storage-dest.txt';
284
+ await storage.uploadObject(sourceKey, new TextEncoder().encode('cross storage content'));
285
+ await storage.copyObject(sourceKey, [anotherStorage, destKey]);
286
+ expect(await anotherStorage.exists(destKey)).toBe(true);
287
+ const content = await anotherStorage.getContent(destKey);
288
+ expect(new TextDecoder().decode(content)).toBe('cross storage content');
289
+ });
290
+ it('should cover ensureBucketExists with region', async () => {
291
+ const { injector } = await setupIntegrationTest();
292
+ configureS3ObjectStorage({
293
+ endpoint: 'http://127.0.0.1:9000',
294
+ accessKey: 'tstdl-dev',
295
+ secretKey: 'tstdl-dev',
296
+ bucketPerModule: true,
297
+ region: 'us-east-1',
298
+ forcePathStyle: true,
299
+ injector,
300
+ });
301
+ const perModuleStorage = await injector.resolveAsync(S3ObjectStorage, `region-test-${Math.floor(Math.random() * 1000000)}`);
302
+ await perModuleStorage.ensureBucketExists('us-east-1', { objectLocking: true });
303
+ });
304
+ it('should cover moveObject with S3Object source', async () => {
305
+ const sourceKey = 'move-obj-source.txt';
306
+ const destKey = 'move-obj-dest.txt';
307
+ await storage.uploadObject(sourceKey, new TextEncoder().encode('move me obj'));
308
+ const sourceObj = await storage.getObject(sourceKey);
309
+ await storage.moveObject(sourceObj, destKey);
310
+ expect(await storage.exists(destKey)).toBe(true);
311
+ expect(await storage.exists(sourceKey)).toBe(false);
312
+ });
313
+ it('should cover S3Object methods more extensively', async () => {
314
+ const key = 's3-object-ext.txt';
315
+ await storage.uploadObject(key, new TextEncoder().encode('ext'));
316
+ const obj = await storage.getObject(key);
317
+ // Trigger lazy loading of contentLength and metadata
318
+ expect(await obj.getContentLength()).toBe(3);
319
+ expect(await obj.getMetadata()).toBeDefined();
320
+ // Trigger cached values
321
+ expect(await obj.getContentLength()).toBe(3);
322
+ expect(await obj.getMetadata()).toBeDefined();
323
+ });
324
+ it('should cover error branches in configureBucket', async () => {
325
+ // Already did some, but let's try more variations
326
+ await storage.configureBucket({
327
+ lifecycle: {
328
+ expiration: {
329
+ after: 86400 * 3,
330
+ },
331
+ },
332
+ });
333
+ });
334
+ });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tstdl/base",
3
- "version": "0.93.116",
3
+ "version": "0.93.118",
4
4
  "author": "Patrick Hein",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -164,7 +164,8 @@
164
164
  "file-type": "^21.3",
165
165
  "genkit": "^1.28",
166
166
  "handlebars": "^4.7",
167
- "minio": "^8.0",
167
+ "@aws-sdk/client-s3": "^3.986",
168
+ "@aws-sdk/s3-request-presigner": "^3.986",
168
169
  "mjml": "^4.18",
169
170
  "nodemailer": "^8.0",
170
171
  "pg": "^8.18",
@@ -200,7 +201,7 @@
200
201
  "typedoc-plugin-markdown": "4.10",
201
202
  "typedoc-plugin-missing-exports": "4.1",
202
203
  "typescript": "5.9",
203
- "typescript-eslint": "8.54",
204
+ "typescript-eslint": "8.55",
204
205
  "vite-tsconfig-paths": "6.1",
205
206
  "vitest": "4.0"
206
207
  },
@@ -9,31 +9,36 @@ export class ReadableStreamRpcAdapter {
9
9
  adaptSource(stream, channel) {
10
10
  const reader = stream.getReader();
11
11
  channel.request$.subscribe(async ({ id, data }) => {
12
- switch (data.type) {
13
- case 'pull': {
14
- const values = [];
15
- const timer = Timer.startNew();
16
- while ((timer.milliseconds < 3) && (values.length < this.maxChunkSize)) {
17
- const result = await reader.read();
18
- if (result.done) {
19
- break;
12
+ try {
13
+ switch (data.type) {
14
+ case 'pull': {
15
+ const values = [];
16
+ const timer = Timer.startNew();
17
+ while ((timer.milliseconds < 3) && (values.length < this.maxChunkSize)) {
18
+ const result = await reader.read();
19
+ if (result.done) {
20
+ break;
21
+ }
22
+ values.push(result.value);
20
23
  }
21
- values.push(result.value);
24
+ const response = (values.length > 0)
25
+ ? { type: 'values', values }
26
+ : { type: 'done' };
27
+ await channel.respond(id, response);
28
+ break;
22
29
  }
23
- const response = (values.length > 0)
24
- ? { type: 'values', values }
25
- : { type: 'done' };
26
- await channel.respond(id, response);
27
- break;
28
- }
29
- case 'cancel': {
30
- await reader.cancel(data.reason);
31
- await channel.respond(id, { type: 'void' });
32
- channel.close();
33
- break;
30
+ case 'cancel': {
31
+ await reader.cancel(data.reason);
32
+ await channel.respond(id, { type: 'void' });
33
+ channel.close();
34
+ break;
35
+ }
36
+ default:
37
+ throw new NotSupportedError(`Type ${data.type} is not supported.`);
34
38
  }
35
- default:
36
- throw new NotSupportedError(`Type ${data.type} is not supported.`);
39
+ }
40
+ catch (error) {
41
+ await channel.respond(id, { type: 'throw', error: (error instanceof Error) ? error : new Error(String(error)) });
37
42
  }
38
43
  });
39
44
  }
@@ -1,5 +1,6 @@
1
1
  import type * as NodeWorkerThreads from 'node:worker_threads';
2
2
  import type { Observable } from 'rxjs';
3
+ import { internal } from '../../internal.js';
3
4
  import type { RpcChannelMessage } from '../rpc.endpoint.js';
4
5
  import { RpcChannel, RpcEndpoint } from '../rpc.endpoint.js';
5
6
  type BrowserTransport = Worker | MessagePort | Window | SharedWorker;
@@ -37,6 +38,9 @@ export declare class MessagePortRpcEndpoint extends RpcEndpoint {
37
38
  #private;
38
39
  private readonly transport;
39
40
  private readonly mainChannel;
41
+ readonly [internal]: {
42
+ channels: Map<string, MessagePortRpcChannel<any, any, any>>;
43
+ };
40
44
  constructor(source: MessagePortRpcTransport);
41
45
  static from(transport: MessagePortRpcTransport): MessagePortRpcEndpoint;
42
46
  openChannel<Data, Req, Res>(id?: string): MessagePortRpcChannel<Data, Req, Res>;
@@ -1,5 +1,6 @@
1
1
  import { ReplaySubject, Subject, filter, fromEvent, map, shareReplay, startWith, switchMap, takeUntil } from 'rxjs';
2
2
  import { isBrowser } from '../../environment.js';
3
+ import { internal } from '../../internal.js';
3
4
  import { deferThrow } from '../../utils/throw.js';
4
5
  import { isDefined, isUndefined } from '../../utils/type-guards.js';
5
6
  import { RpcChannel, RpcEndpoint } from '../rpc.endpoint.js';
@@ -55,6 +56,9 @@ export class MessagePortRpcEndpoint extends RpcEndpoint {
55
56
  #channels = new Map();
56
57
  transport;
57
58
  mainChannel;
59
+ [internal] = {
60
+ channels: this.#channels,
61
+ };
58
62
  constructor(source) {
59
63
  super();
60
64
  this.transport = (isBrowser && ((typeof SharedWorker == 'function') && (source instanceof SharedWorker)))
package/rpc/model.d.ts CHANGED
@@ -29,7 +29,17 @@ export type RpcProxySetMessage = RpcMessageBase<'set'> & {
29
29
  path: PropertyKey[];
30
30
  value: RpcValue;
31
31
  };
32
- export type RpcProxyRequestMessage = RpcProxyApplyMessage | RpcProxyConstructMessage | RpcProxyGetMessage | RpcProxySetMessage;
32
+ export type RpcProxyDeletePropertyMessage = RpcMessageBase<'deleteProperty'> & {
33
+ path: PropertyKey[];
34
+ };
35
+ export type RpcProxyDefinePropertyMessage = RpcMessageBase<'defineProperty'> & {
36
+ path: PropertyKey[];
37
+ attributes: RpcValue;
38
+ };
39
+ export type RpcProxyHasMessage = RpcMessageBase<'has'> & {
40
+ path: PropertyKey[];
41
+ };
42
+ export type RpcProxyRequestMessage = RpcProxyApplyMessage | RpcProxyConstructMessage | RpcProxyGetMessage | RpcProxySetMessage | RpcProxyDeletePropertyMessage | RpcProxyDefinePropertyMessage | RpcProxyHasMessage;
33
43
  export type RpcMessageRawValue = {
34
44
  type: 'raw';
35
45
  value: any;
package/rpc/rpc.d.ts CHANGED
@@ -1,7 +1,10 @@
1
+ import { internal } from '../internal.js';
1
2
  import { type SerializationOptions } from '../serializer/index.js';
2
- import type { RpcRemote, RpcRemoteInput } from './model.js';
3
+ import type { RpcRemote, RpcRemoteInput, RpcValue } from './model.js';
3
4
  import type { RpcAdapter } from './rpc.adapter.js';
4
5
  import type { RpcEndpoint } from './rpc.endpoint.js';
6
+ declare class RpcProxy {
7
+ }
5
8
  export declare const Rpc: {
6
9
  listen(endpoint: RpcEndpoint): void;
7
10
  connect<T extends RpcRemoteInput>(endpoint: RpcEndpoint, name?: string): Promise<RpcRemote<T>>;
@@ -18,4 +21,17 @@ export declare const Rpc: {
18
21
  serialize<T extends object>(object: T, options?: SerializationOptions): T;
19
22
  adapt<T extends object>(object: T, adapter: RpcAdapter<T>, root?: object): T;
20
23
  isProxied(object: object): boolean;
24
+ release(proxy: object): void;
25
+ isAlive(proxy: object): boolean;
26
+ set<T extends object, K extends keyof T>(proxy: RpcRemote<T>, property: K, value: T[K]): Promise<void>;
27
+ delete<T extends object, K extends keyof T>(proxy: RpcRemote<T>, property: K): Promise<boolean>;
28
+ has<T extends object, K extends keyof T>(proxy: RpcRemote<T>, property: K): Promise<boolean>;
29
+ reset(): void;
30
+ [internal]: {
31
+ parseRpcMessageValue: typeof parseRpcMessageValue;
32
+ adapters: Map<string, RpcAdapter<any, any, any, any, any>>;
33
+ RpcProxy: typeof RpcProxy;
34
+ };
21
35
  };
36
+ declare function parseRpcMessageValue(value: RpcValue, endpoint: RpcEndpoint): any;
37
+ export {};
@@ -1,4 +1,5 @@
1
1
  import { defer, filter, map } from 'rxjs';
2
+ import { RpcConnectionClosedError } from './rpc.error.js';
2
3
  export class RpcEndpoint {
3
4
  }
4
5
  /**
@@ -30,7 +31,7 @@ export class RpcChannel {
30
31
  }
31
32
  }
32
33
  async function getResponsePromise(channelMessage$, requestId) {
33
- return new Promise((resolve, reject) => {
34
+ return await new Promise((resolve, reject) => {
34
35
  const subscription = channelMessage$.subscribe({
35
36
  next(message) {
36
37
  if ((message.type == 'response') && (message.id == requestId)) {
@@ -39,8 +40,8 @@ async function getResponsePromise(channelMessage$, requestId) {
39
40
  }
40
41
  },
41
42
  complete() {
42
- reject(new Error('RpcEndpoint was closed while waiting for response.'));
43
- }
43
+ reject(new RpcConnectionClosedError());
44
+ },
44
45
  });
45
46
  });
46
47
  }
@@ -1,8 +1,12 @@
1
1
  import { CustomError } from '../errors/custom.error.js';
2
2
  export declare class RpcError extends CustomError {
3
- static readonly errorName = "RpcError";
3
+ static readonly errorName: string;
4
4
  constructor(message: string, cause?: any);
5
5
  }
6
+ export declare class RpcConnectionClosedError extends RpcError {
7
+ static readonly errorName = "RpcConnectionClosedError";
8
+ constructor(message?: string);
9
+ }
6
10
  export declare class RpcRemoteError extends CustomError {
7
11
  constructor(error: unknown);
8
12
  }
package/rpc/rpc.error.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import { CustomError } from '../errors/custom.error.js';
2
2
  import { formatError } from '../errors/index.js';
3
+ import { deserialize } from '../serializer/index.js';
3
4
  import { isDefined, isObject } from '../utils/type-guards.js';
4
5
  export class RpcError extends CustomError {
5
6
  static errorName = 'RpcError';
@@ -7,13 +8,25 @@ export class RpcError extends CustomError {
7
8
  super({ message, cause });
8
9
  }
9
10
  }
11
+ export class RpcConnectionClosedError extends RpcError {
12
+ static errorName = 'RpcConnectionClosedError';
13
+ constructor(message = 'Rpc communication failed because the connection was closed.') {
14
+ super(message);
15
+ }
16
+ }
10
17
  export class RpcRemoteError extends CustomError {
11
18
  constructor(error) {
12
- if ((error instanceof Error) || isObject(error)) {
13
- super({ name: error.name, message: error.message, stack: error.stack, cause: isDefined(error.cause) ? new RpcRemoteError(error.cause) : undefined, fast: true });
19
+ const deserialized = (error instanceof Error) ? error : deserialize(error);
20
+ if ((deserialized instanceof Error) || isObject(deserialized)) {
21
+ super({ name: deserialized.name, message: deserialized.message, stack: deserialized.stack, cause: isDefined(deserialized.cause) ? new RpcRemoteError(deserialized.cause) : undefined, fast: true });
22
+ for (const [key, value] of Object.entries(deserialized)) {
23
+ if (!(key in this)) {
24
+ this[key] = value;
25
+ }
26
+ }
14
27
  }
15
28
  else {
16
- const formatted = formatError(error);
29
+ const formatted = formatError(deserialized);
17
30
  super({ message: formatted, fast: true });
18
31
  }
19
32
  }