@peers-app/peers-sdk 0.7.0 → 0.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,9 +11,6 @@ export declare class UserContext {
11
11
  readonly currentlyActiveGroupId: Observable<string | undefined>;
12
12
  readonly packagesRootDir: Observable<string>;
13
13
  readonly reloadPackagesOnPageRefresh: Observable<boolean>;
14
- readonly packageLocalPathsResolved: Observable<{
15
- [packageId: string]: string;
16
- }>;
17
14
  readonly groupIds: Observable<string[]>;
18
15
  readonly userDataContext: DataContext;
19
16
  readonly groupDataContexts: Map<string, DataContext>;
@@ -16,7 +16,6 @@ class UserContext {
16
16
  currentlyActiveGroupId = (0, observable_1.observable)();
17
17
  packagesRootDir = (0, observable_1.observable)('');
18
18
  reloadPackagesOnPageRefresh = (0, observable_1.observable)(false);
19
- packageLocalPathsResolved = (0, observable_1.observable)({});
20
19
  groupIds = (0, observable_1.observable)([]);
21
20
  userDataContext;
22
21
  groupDataContexts = new Map();
@@ -109,7 +108,7 @@ class UserContext {
109
108
  */
110
109
  async loadUserContextObservablesFromDB() {
111
110
  const persistentVars = await this.userDataContext.tableContainer.getTableByName(data_1.persistentVarsMetaData.name);
112
- const vars = await persistentVars.list({ name: { $in: ['myUserId', 'thisDeviceId', 'currentlyActiveGroupId', 'packagesRootDir', 'reloadPackagesOnPageRefresh', 'packageLocalPathsResolved'] } });
111
+ const vars = await persistentVars.list({ name: { $in: ['myUserId', 'thisDeviceId', 'currentlyActiveGroupId', 'packagesRootDir', 'reloadPackagesOnPageRefresh'] } });
113
112
  const varDbValues = vars.reduce((acc, curr) => {
114
113
  acc[curr.name] = curr.value?.value;
115
114
  return acc;
@@ -127,14 +126,10 @@ class UserContext {
127
126
  const reloadPackagesOnPageRefreshPVar = (0, data_1.deviceVar)('reloadPackagesOnPageRefresh', { defaultValue: false, dbValue: varDbValues['reloadPackagesOnPageRefresh'], userContext: this });
128
127
  this.reloadPackagesOnPageRefresh(reloadPackagesOnPageRefreshPVar());
129
128
  (0, observable_1.linkObservables)(reloadPackagesOnPageRefreshPVar, this.reloadPackagesOnPageRefresh);
130
- const packageLocalPathsResolvedPVar = (0, data_1.deviceVar)('packageLocalPathsResolved', { defaultValue: {}, dbValue: varDbValues['packageLocalPathsResolved'], userContext: this });
131
- this.packageLocalPathsResolved(packageLocalPathsResolvedPVar());
132
- (0, observable_1.linkObservables)(packageLocalPathsResolvedPVar, this.packageLocalPathsResolved);
133
129
  await Promise.all([
134
130
  deviceIdPVar.loadingPromise,
135
131
  packagesRootDirPVar.loadingPromise,
136
132
  reloadPackagesOnPageRefreshPVar.loadingPromise,
137
- packageLocalPathsResolvedPVar.loadingPromise,
138
133
  ]);
139
134
  }
140
135
  async getMe() {
@@ -4,23 +4,25 @@ import { FileWriteStream } from "./file-write-stream";
4
4
  import { IFile, IFileInput } from "./file.types";
5
5
  import type { DataContext } from "../../context/data-context";
6
6
  export declare class FilesTable extends Table<IFile> {
7
- /** @deprecated Direct inserts forbidden; use safeFile() or saveFileRecord() */
7
+ /** @deprecated Direct inserts forbidden; use saveFile() or saveFileRecord() */
8
8
  insert(..._args: Parameters<Table<any>['insert']>): never;
9
- /** @deprecated Direct updates forbidden; use safeFile() or saveFileRecord() */
9
+ /** @deprecated Direct updates forbidden; use saveFile() or saveFileRecord() */
10
10
  update(..._args: Parameters<Table<any>['update']>): never;
11
11
  /** @deprecated Direct deletes forbidden; use deleteFile() */
12
12
  delete(..._args: Parameters<Table<any>['delete']>): never;
13
+ /** @deprecated Direct save forbidden; use saveFile() or saveFileRecord() */
14
+ save(..._args: Parameters<Table<any>['save']>): never;
13
15
  createWriteStream(metadata: IFileInput): Promise<FileWriteStream>;
14
16
  /**
15
- * Note: Use `saveFile` instead for direct use. This method is intended for internal use
16
- * to insert a file record into the database as part of streaming operations.
17
- * @param fileRecord The file record to insert
17
+ * Note: Use `saveFile` to write a file to disk. This method is for managing
18
+ * the file metadata in the database which is done automatically by `saveFile`.
19
+ * @param fileRecord The file record to save to the database
18
20
  * @returns The inserted file record
19
21
  */
20
22
  saveFileRecord(fileRecord: IFile): Promise<IFile>;
21
23
  openReadStream(fileId: string, preloadChunksCount?: number): Promise<FileReadStream | null>;
22
24
  saveFile(metaData: IFileInput, data: Uint8Array | string): Promise<IFile>;
23
- getFile(fileId: string): Promise<Uint8Array | null>;
25
+ getFileContents(fileId: string): Promise<Uint8Array | null>;
24
26
  deleteFile(fileId: string): Promise<void>;
25
27
  createIndexFileRecursively(chunkHashes: string[]): Promise<string>;
26
28
  loadChunkHashesRecursively(indexFileId: string): Promise<string[]>;
@@ -11,25 +11,29 @@ const file_read_stream_1 = require("./file-read-stream");
11
11
  const file_write_stream_1 = require("./file-write-stream");
12
12
  const file_types_1 = require("./file.types");
13
13
  class FilesTable extends orm_1.Table {
14
- /** @deprecated Direct inserts forbidden; use safeFile() or saveFileRecord() */
14
+ /** @deprecated Direct inserts forbidden; use saveFile() or saveFileRecord() */
15
15
  insert(..._args) {
16
- throw new Error('Direct inserts forbidden; use safeFile() or saveFileRecord()');
16
+ throw new Error('Direct inserts forbidden; use saveFile() or saveFileRecord()');
17
17
  }
18
- /** @deprecated Direct updates forbidden; use safeFile() or saveFileRecord() */
18
+ /** @deprecated Direct updates forbidden; use saveFile() or saveFileRecord() */
19
19
  update(..._args) {
20
- throw new Error('Direct updates forbidden; use safeFile() or saveFileRecord()');
20
+ throw new Error('Direct updates forbidden; use saveFile() or saveFileRecord()');
21
21
  }
22
22
  /** @deprecated Direct deletes forbidden; use deleteFile() */
23
23
  delete(..._args) {
24
24
  throw new Error('Direct deletes forbidden; use deleteFile()');
25
25
  }
26
+ /** @deprecated Direct save forbidden; use saveFile() or saveFileRecord() */
27
+ save(..._args) {
28
+ throw new Error('Direct saves forbidden; use saveFile() or saveFileRecord()');
29
+ }
26
30
  async createWriteStream(metadata) {
27
31
  return new file_write_stream_1.FileWriteStream(metadata, this);
28
32
  }
29
33
  /**
30
- * Note: Use `saveFile` instead for direct use. This method is intended for internal use
31
- * to insert a file record into the database as part of streaming operations.
32
- * @param fileRecord The file record to insert
34
+ * Note: Use `saveFile` to write a file to disk. This method is for managing
35
+ * the file metadata in the database which is done automatically by `saveFile`.
36
+ * @param fileRecord The file record to save to the database
33
37
  * @returns The inserted file record
34
38
  */
35
39
  async saveFileRecord(fileRecord) {
@@ -56,7 +60,7 @@ class FilesTable extends orm_1.Table {
56
60
  // Finalize and return the result
57
61
  return await writeStream.finalize();
58
62
  }
59
- async getFile(fileId) {
63
+ async getFileContents(fileId) {
60
64
  // Use FileReadStream internally to ensure consistent chunk reading logic
61
65
  const readStream = await this.openReadStream(fileId);
62
66
  if (!readStream) {
@@ -76,9 +80,9 @@ class FilesTable extends orm_1.Table {
76
80
  await this.deleteFile(fileRecord.indexFileId);
77
81
  }
78
82
  // Note: We don't delete chunks since they may be shared with other files
79
- // Chunk cleanup could be implemented as a separate garbage collection process
83
+ // TODO Chunk cleanup needs to be implemented as a separate garbage collection process
80
84
  // that removes chunks not referenced by any files
81
- // Delete from database
85
+ // Delete file record from database
82
86
  await super.delete(fileId);
83
87
  }
84
88
  // Create an index file recursively for large files
@@ -103,7 +107,7 @@ class FilesTable extends orm_1.Table {
103
107
  // Load chunk hashes recursively from an index file
104
108
  async loadChunkHashesRecursively(indexFileId) {
105
109
  // Recursively load the index file
106
- const indexBuffer = await this.getFile(indexFileId);
110
+ const indexBuffer = await this.getFileContents(indexFileId);
107
111
  if (!indexBuffer) {
108
112
  throw new Error(`Index file not found: ${indexFileId}`);
109
113
  }
@@ -119,7 +123,7 @@ function Files(dataContext) {
119
123
  }
120
124
  // TODO implement permissions check for file access
121
125
  rpc_types_1.rpcServerCalls.getFileContents = async (fileId, encoding = 'utf8') => {
122
- const data = await Files().getFile(fileId);
126
+ const data = await Files().getFileContents(fileId);
123
127
  if (data === null) {
124
128
  throw new Error(`File not found: ${fileId}`);
125
129
  }
@@ -179,7 +179,7 @@ describe('FileTable', () => {
179
179
  // Save file first
180
180
  await fileTable.saveFile(metadata, originalData);
181
181
  // Retrieve file
182
- const retrievedData = await fileTable.getFile(fileId);
182
+ const retrievedData = await fileTable.getFileContents(fileId);
183
183
  expect(new Uint8Array(retrievedData)).toEqual(originalData);
184
184
  });
185
185
  it('should retrieve a multi-chunk file', async () => {
@@ -194,11 +194,11 @@ describe('FileTable', () => {
194
194
  // Save file first
195
195
  await fileTable.saveFile(metadata, originalData);
196
196
  // Retrieve file
197
- const retrievedData = await fileTable.getFile(fileId);
197
+ const retrievedData = await fileTable.getFileContents(fileId);
198
198
  expect(new Uint8Array(retrievedData)).toEqual(originalData);
199
199
  });
200
200
  it('should return null for non-existent file', async () => {
201
- const result = await fileTable.getFile('non-existent');
201
+ const result = await fileTable.getFileContents('non-existent');
202
202
  expect(result).toBeNull();
203
203
  });
204
204
  it('should return null when chunk is missing', async () => {
@@ -212,11 +212,11 @@ describe('FileTable', () => {
212
212
  };
213
213
  // Insert metadata directly without saving chunks
214
214
  await fileTable.dataSource.insert(metadata);
215
- const result = await fileTable.getFile(fileId);
215
+ const result = await fileTable.getFileContents(fileId);
216
216
  expect(result).toBeNull();
217
217
  });
218
218
  it('should return null when chunk is missing during read', async () => {
219
- const result = await fileTable.getFile('non-existent');
219
+ const result = await fileTable.getFileContents('non-existent');
220
220
  expect(result).toBeNull();
221
221
  });
222
222
  });
@@ -233,14 +233,14 @@ describe('FileTable', () => {
233
233
  // Save file first
234
234
  const saved = await fileTable.saveFile(metadata, data);
235
235
  // Verify file exists
236
- const fileData = await fileTable.getFile(fileId);
236
+ const fileData = await fileTable.getFileContents(fileId);
237
237
  expect(new Uint8Array(fileData)).toEqual(data);
238
238
  const chunkHash = saved.chunkHashes[0];
239
239
  expect(await mockFileOps.fileExists(`file_chunks/${chunkHash}`)).toBe(true);
240
240
  // Delete file
241
241
  await fileTable.deleteFile(fileId);
242
242
  // Verify file metadata is gone but chunk remains (for potential sharing)
243
- expect(await fileTable.getFile(fileId)).toBeNull();
243
+ expect(await fileTable.getFileContents(fileId)).toBeNull();
244
244
  expect(await mockFileOps.fileExists(`file_chunks/${chunkHash}`)).toBe(true);
245
245
  });
246
246
  it('should handle deleting non-existent file gracefully', async () => {
@@ -266,7 +266,7 @@ describe('FileTable', () => {
266
266
  // Delete file
267
267
  await fileTable.deleteFile(fileId);
268
268
  // Verify file metadata is gone but chunks remain (for potential sharing)
269
- expect(await fileTable.getFile(fileId)).toBeNull();
269
+ expect(await fileTable.getFileContents(fileId)).toBeNull();
270
270
  expect(await mockFileOps.fileExists(`file_chunks/${chunk0Hash}`)).toBe(true);
271
271
  expect(await mockFileOps.fileExists(`file_chunks/${chunk1Hash}`)).toBe(true);
272
272
  });
@@ -300,8 +300,8 @@ describe('FileTable', () => {
300
300
  expect(await mockFileOps.fileExists(chunkPath)).toBe(true);
301
301
  expect(mockFileOps.getStoredFiles().filter(path => path.includes(chunkHash))).toHaveLength(1);
302
302
  // Both files should retrieve the same content
303
- const file1Data = await fileTable.getFile(fileId1);
304
- const file2Data = await fileTable.getFile(fileId2);
303
+ const file1Data = await fileTable.getFileContents(fileId1);
304
+ const file2Data = await fileTable.getFileContents(fileId2);
305
305
  expect(new Uint8Array(file1Data)).toEqual(data);
306
306
  expect(new Uint8Array(file2Data)).toEqual(data);
307
307
  });
@@ -324,7 +324,7 @@ describe('FileTable', () => {
324
324
  expect(result.chunkHashes).toHaveLength(1);
325
325
  expect(result.indexFileId).toBeUndefined();
326
326
  // Should be able to retrieve the file
327
- const retrievedData = await fileTable.getFile(fileId);
327
+ const retrievedData = await fileTable.getFileContents(fileId);
328
328
  expect(new Uint8Array(retrievedData)).toEqual(smallData);
329
329
  });
330
330
  it('should use recursive index files for very large files', async () => {
@@ -347,7 +347,7 @@ describe('FileTable', () => {
347
347
  expect(result.chunkHashes).toBeUndefined();
348
348
  expect(result.indexFileId).toBeTruthy();
349
349
  // Should be able to retrieve the file
350
- const retrievedData = await fileTable.getFile(fileId);
350
+ const retrievedData = await fileTable.getFileContents(fileId);
351
351
  expect(retrievedData).toBeTruthy();
352
352
  expect(new Uint8Array(retrievedData)).toEqual(largeData);
353
353
  // The index file should be marked as an index file
@@ -374,7 +374,7 @@ describe('FileTable', () => {
374
374
  };
375
375
  // Insert metadata directly without creating index file
376
376
  await fileTable.dataSource.insert(metadata);
377
- await expect(fileTable.getFile(fileId)).rejects.toThrow('Index file not found: missing_index_file_id');
377
+ await expect(fileTable.getFileContents(fileId)).rejects.toThrow('Index file not found: missing_index_file_id');
378
378
  });
379
379
  });
380
380
  describe('Streaming API', () => {
@@ -399,7 +399,7 @@ describe('FileTable', () => {
399
399
  expect(result.fileSize).toBe(23); // Total bytes written: 'Hello, streaming world!'
400
400
  expect(result.chunkHashes).toHaveLength(1); // Small file, single chunk
401
401
  // Verify we can read the file back
402
- const retrievedData = await fileTable.getFile(fileId);
402
+ const retrievedData = await fileTable.getFileContents(fileId);
403
403
  expect(Buffer.from(retrievedData).toString('utf8')).toBe('Hello, streaming world!');
404
404
  });
405
405
  it('should handle large file streaming across multiple chunks', async () => {
@@ -421,7 +421,7 @@ describe('FileTable', () => {
421
421
  expect(result.fileSize).toBe(chunkSize + 500);
422
422
  expect(result.chunkHashes).toHaveLength(2); // Two chunks
423
423
  // Verify content
424
- const retrievedData = await fileTable.getFile(fileId);
424
+ const retrievedData = await fileTable.getFileContents(fileId);
425
425
  expect(retrievedData?.length).toBe(chunkSize + 500);
426
426
  expect(new Uint8Array(retrievedData).subarray(0, chunkSize).every(b => b === 65)).toBe(true); // All 'A's
427
427
  expect(new Uint8Array(retrievedData).subarray(chunkSize).every(b => b === 66)).toBe(true); // All 'B's
@@ -490,7 +490,7 @@ describe('FileTable', () => {
490
490
  expect(result.fileSize).toBe(data.length);
491
491
  expect(result.chunkHashes).toHaveLength(3); // 2 complete + 1 partial chunk
492
492
  // Verify we can read it back correctly
493
- const retrievedData = await fileTable.getFile(fileId);
493
+ const retrievedData = await fileTable.getFileContents(fileId);
494
494
  expect(retrievedData?.length).toBe(data.length);
495
495
  expect(new Uint8Array(retrievedData)).toEqual(data);
496
496
  });
@@ -686,7 +686,7 @@ describe('FileTable', () => {
686
686
  await fileTable.dataSource.insert(metadata);
687
687
  // This should return null when the chunk can't be found
688
688
  // instead of throwing an error or hanging
689
- const result = await fileTable.getFile(fileId);
689
+ const result = await fileTable.getFileContents(fileId);
690
690
  expect(result).toBeNull();
691
691
  });
692
692
  });
@@ -18,6 +18,7 @@ export * from "./knowledge/peer-types";
18
18
  export * from "./knowledge/predicates";
19
19
  export * from "./messages";
20
20
  export * from "./packages";
21
+ export * from "./packages.utils";
21
22
  export * from "./peer-events/peer-event-handlers";
22
23
  export * from "./peer-events/peer-event-types";
23
24
  export * from "./peer-events/peer-events";
@@ -34,6 +34,7 @@ __exportStar(require("./knowledge/peer-types"), exports);
34
34
  __exportStar(require("./knowledge/predicates"), exports);
35
35
  __exportStar(require("./messages"), exports);
36
36
  __exportStar(require("./packages"), exports);
37
+ __exportStar(require("./packages.utils"), exports);
37
38
  __exportStar(require("./peer-events/peer-event-handlers"), exports);
38
39
  __exportStar(require("./peer-events/peer-event-types"), exports);
39
40
  __exportStar(require("./peer-events/peer-events"), exports);
@@ -144,11 +144,9 @@ class Table {
144
144
  this.setLocalOperation(recordId, operation);
145
145
  let result;
146
146
  try {
147
- // if (isInsert) {
148
- // result = await this.dataSource.insert(record);
149
- // } else {
150
- // result = await this.dataSource.update(record);
151
- // }
147
+ // NOTE: done this way to allow individual tables to have custom logic in `save`, `insert`, and `update`
148
+ // but this results in a double-read (this method and the data-source method).
149
+ // TODO: try to avoid the double-read, probably by passing the just-read result as `opts.dbData`
152
150
  result = await this.dataSource.save(record, opts);
153
151
  }
154
152
  finally {
@@ -3,13 +3,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.reloadPackagesOnPageRefresh = exports.packagesRootDir = exports.PackagesTable = void 0;
4
4
  exports.Packages = Packages;
5
5
  const zod_1 = require("zod");
6
- const app_nav_1 = require("../types/app-nav");
7
- const types_1 = require("./orm/types");
8
- const persistent_vars_1 = require("./persistent-vars");
9
- const table_definitions_system_1 = require("./orm/table-definitions.system");
10
6
  const context_1 = require("../context");
7
+ const app_nav_1 = require("../types/app-nav");
11
8
  const orm_1 = require("./orm");
9
+ const table_definitions_system_1 = require("./orm/table-definitions.system");
10
+ const types_1 = require("./orm/types");
12
11
  const package_permissions_1 = require("./package-permissions");
12
+ const persistent_vars_1 = require("./persistent-vars");
13
13
  const schema = zod_1.z.object({
14
14
  packageId: zod_1.z.string(),
15
15
  name: zod_1.z.string(),
@@ -87,4 +87,3 @@ function Packages(dataContext) {
87
87
  }
88
88
  exports.packagesRootDir = (0, persistent_vars_1.deviceVar)('packagesRootDir', { defaultValue: '~/peers-packages' });
89
89
  exports.reloadPackagesOnPageRefresh = (0, persistent_vars_1.deviceVar)('reloadPackagesOnPageRefresh', { defaultValue: true, });
90
- // export const packageLocalPathsResolved = persistentVar<{ [packageId: string]: string}>('packageLocalPathsResolved', { defaultValue: {}, scope: 'device' });
@@ -0,0 +1,3 @@
1
+ import { IPackage } from "./packages";
2
+ import { DataContext } from "../context/data-context";
3
+ export declare function copyPackageToAnotherDataContext(packageId: string, fromDataContext: DataContext, toDataContext: DataContext): Promise<IPackage>;
@@ -0,0 +1,41 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.copyPackageToAnotherDataContext = copyPackageToAnotherDataContext;
4
+ const packages_1 = require("./packages");
5
+ const files_1 = require("./files");
6
+ async function copyPackageToAnotherDataContext(packageId, fromDataContext, toDataContext) {
7
+ const fromPackages = (0, packages_1.Packages)(fromDataContext);
8
+ const toPackages = (0, packages_1.Packages)(toDataContext);
9
+ const pkg = await fromPackages.get(packageId);
10
+ if (!pkg) {
11
+ throw new Error(`Package ${packageId} not found in fromDataContext`);
12
+ }
13
+ const fromFiles = (0, files_1.Files)(fromDataContext);
14
+ const toFiles = (0, files_1.Files)(toDataContext);
15
+ const bundleFile = await fromFiles.get(pkg.packageBundleFileId);
16
+ if (bundleFile) {
17
+ await toFiles.saveFileRecord(bundleFile);
18
+ }
19
+ else {
20
+ throw new Error(`Package bundle file ${pkg.packageBundleFileId} not found in fromDataContext`);
21
+ }
22
+ if (pkg.routesBundleFileId) {
23
+ const routesBundleFile = await fromFiles.get(pkg.routesBundleFileId);
24
+ if (routesBundleFile) {
25
+ await toFiles.saveFileRecord(routesBundleFile);
26
+ }
27
+ else {
28
+ throw new Error(`Routes bundle file ${pkg.routesBundleFileId} not found in fromDataContext`);
29
+ }
30
+ }
31
+ if (pkg.uiBundleFileId) {
32
+ const uiBundleFile = await fromFiles.get(pkg.uiBundleFileId);
33
+ if (uiBundleFile) {
34
+ await toFiles.saveFileRecord(uiBundleFile);
35
+ }
36
+ else {
37
+ throw new Error(`UI bundle file ${pkg.uiBundleFileId} not found in fromDataContext`);
38
+ }
39
+ }
40
+ return toPackages.signAndSave(pkg);
41
+ }
package/dist/keys.d.ts CHANGED
@@ -26,7 +26,7 @@ export interface IDataBox {
26
26
  fromPublicKey: string;
27
27
  }
28
28
  export declare function newKeys(): IPublicPrivateKeys;
29
- export declare function loadKeys(secretKey: string): IPublicPrivateKeys;
29
+ export declare function hydrateKeys(secretKey: string): IPublicPrivateKeys;
30
30
  export declare function signMessageWithSecretKey(msg: string, secretKey: string): string;
31
31
  export declare function openMessageWithPublicKey(msg: string, publicKey: string): string;
32
32
  export declare function signObjectWithSecretKey<T>(obj: T, secretKey: string): ISignedObject<T>;
package/dist/keys.js CHANGED
@@ -9,7 +9,7 @@ exports.encodeBase64 = encodeBase64;
9
9
  exports.decodeBase64 = decodeBase64;
10
10
  exports.newToken = newToken;
11
11
  exports.newKeys = newKeys;
12
- exports.loadKeys = loadKeys;
12
+ exports.hydrateKeys = hydrateKeys;
13
13
  exports.signMessageWithSecretKey = signMessageWithSecretKey;
14
14
  exports.openMessageWithPublicKey = openMessageWithPublicKey;
15
15
  exports.signObjectWithSecretKey = signObjectWithSecretKey;
@@ -80,7 +80,7 @@ function newKeys() {
80
80
  publicBoxKey: encodeBase64(boxKeyPair.publicKey),
81
81
  };
82
82
  }
83
- function loadKeys(secretKey) {
83
+ function hydrateKeys(secretKey) {
84
84
  let _secretKey = decodeBase64(secretKey);
85
85
  const boxKeyPair = nacl.box.keyPair.fromSecretKey(_secretKey.slice(0, 32));
86
86
  return {
package/dist/keys.test.js CHANGED
@@ -97,7 +97,7 @@ describe('keys', () => {
97
97
  describe('loading keys', () => {
98
98
  it('should allow loading keys from a secret key', () => {
99
99
  const keys = (0, keys_1.newKeys)();
100
- const loadedKeys = (0, keys_1.loadKeys)(keys.secretKey);
100
+ const loadedKeys = (0, keys_1.hydrateKeys)(keys.secretKey);
101
101
  expect(loadedKeys.secretKey).toEqual(keys.secretKey);
102
102
  expect(loadedKeys.publicKey).toEqual(keys.publicKey);
103
103
  expect(loadedKeys.publicBoxKey).toEqual(keys.publicBoxKey);
@@ -11,6 +11,6 @@ export declare class PackageLoader {
11
11
  }): Promise<void>;
12
12
  loadPackage(pkg: IPackage, opts?: {
13
13
  force?: boolean;
14
- }): Promise<IPeersPackage | null>;
14
+ }): Promise<IPeersPackage | undefined>;
15
15
  }
16
16
  export declare function setDefaultRequire(require: (<T>(module: string) => T)): void;
@@ -31,13 +31,13 @@ class PackageLoader {
31
31
  let bundleCode = '';
32
32
  if (pkg.packageBundleFileId) {
33
33
  const filesTable = (0, files_1.Files)(this.dataContext);
34
- const bundleBuffer = await filesTable.getFile(pkg.packageBundleFileId);
34
+ const bundleBuffer = await filesTable.getFileContents(pkg.packageBundleFileId);
35
35
  if (bundleBuffer) {
36
36
  bundleCode = Buffer.from(bundleBuffer).toString('utf8');
37
37
  }
38
38
  else {
39
39
  console.warn(`Package bundle file not found for ${pkg.name} (fileId: ${pkg.packageBundleFileId})`);
40
- return null;
40
+ return;
41
41
  }
42
42
  }
43
43
  else {
@@ -87,11 +87,11 @@ class PackageLoader {
87
87
  });
88
88
  return packageInstance;
89
89
  }
90
- return null;
90
+ return;
91
91
  }
92
92
  catch (err) {
93
93
  console.debug(`Could not load package bundle for ${pkg.name}`, err);
94
- return null;
94
+ return;
95
95
  }
96
96
  }
97
97
  }
@@ -8,17 +8,14 @@ export declare const rpcServerCalls: {
8
8
  openLinkInBrowser: ((url: string) => Promise<void>);
9
9
  openPath: ((path: string) => Promise<void>);
10
10
  openPackage: ((path: string) => Promise<void>);
11
- createLocalPackage: ((packageName: string, packageLocation: string) => Promise<string>);
12
- addRemotePackage: ((remoteRepo: string) => Promise<string>);
13
- linkRemoteRepo: ((packageId: string) => Promise<void>);
14
- installOrUpdatePackage: ((packageId: string) => Promise<void>);
11
+ addOrUpdatePackage: ((input: string, options?: {
12
+ dataContextId?: string;
13
+ packageLocation?: string;
14
+ update?: boolean;
15
+ linkRemote?: boolean;
16
+ }) => Promise<string>);
15
17
  setUserIdAndSecretKey: ((userId: string, secretKey: string) => Promise<void>);
16
18
  getUserId: () => Promise<string | undefined>;
17
- updateUserSettings: ((settings: {
18
- id: string;
19
- name: string;
20
- value: any;
21
- }) => Promise<void>);
22
19
  encryptData: ((value: string, groupId?: string) => Promise<string>);
23
20
  tableGet: (dataContextId: string, tableName: string, id: string, opts?: {
24
21
  useCache?: boolean;
package/dist/rpc-types.js CHANGED
@@ -8,17 +8,15 @@ function rpcStub(rpcName) {
8
8
  }
9
9
  exports.rpcServerCalls = {
10
10
  ping: async (msg) => `pong: ${msg}`,
11
+ // TODO collapse these two down to just openPath
11
12
  openLinkInBrowser: rpcStub('openLinkInBrowser'),
12
13
  openPath: rpcStub('openPath'),
13
14
  openPackage: rpcStub('OpenPackage'),
14
- createLocalPackage: rpcStub('createLocalPackage'),
15
- addRemotePackage: rpcStub('addRemotePackage'),
16
- linkRemoteRepo: rpcStub('linkRemoteRepo'),
17
- installOrUpdatePackage: rpcStub('installOrUpdatePackage'),
15
+ addOrUpdatePackage: rpcStub('addOrUpdatePackage'),
18
16
  setUserIdAndSecretKey: rpcStub('setUserIdAndSecretKey'),
19
17
  getUserId: rpcStub('getUserId'),
20
- updateUserSettings: rpcStub('updateUserSettings'),
21
18
  encryptData: rpcStub('encryptData'),
19
+ // TODO collapse these all down to just tableMethodCall
22
20
  tableGet: rpcStub('tableGet'),
23
21
  tableList: rpcStub('tableList'),
24
22
  tableCount: rpcStub('tableCount'),
@@ -32,6 +30,8 @@ exports.rpcServerCalls = {
32
30
  resetAllDeviceSyncInfo: rpcStub('resetAllDeviceSyncInfo'),
33
31
  importGroupShare: rpcStub('importGroupShare'),
34
32
  // TODO try to get rid of this and rely on the client-side table and server-side table individually emitting events
33
+ // TODO TODO before deleting this, check if we can stop client-side tables from emitting events and rely solely on server-side tables
34
+ // propagating events with rpcClientCalls.emitEvent. It's very likely we're currently seeing two events for every one write originating from the UI
35
35
  // emitEvent: _na as ((event: IEventData) => Promise<boolean>),
36
36
  };
37
37
  exports.rpcClientCalls = {
@@ -4,3 +4,5 @@ export declare const defaultAssistantRunnerToolId = "000peers0tool00000runner1";
4
4
  export declare const defaultSendMessageToolId = "000peers0tool00000sendmsg";
5
5
  export declare const runWorkflowToolId = "000peers0tool0runworkflow";
6
6
  export declare const emitEventToolId = "000peers0tool000emitevent";
7
+ export declare const peersCorePackageId = "00mh0wlipjixk2gqmurbwee0o";
8
+ export declare const peersCorePackageRepoUrl = "https://github.com/peers-app/peers-core";
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.emitEventToolId = exports.runWorkflowToolId = exports.defaultSendMessageToolId = exports.defaultAssistantRunnerToolId = exports.defaultAssistantId = exports.peersRootUserId = void 0;
3
+ exports.peersCorePackageRepoUrl = exports.peersCorePackageId = exports.emitEventToolId = exports.runWorkflowToolId = exports.defaultSendMessageToolId = exports.defaultAssistantRunnerToolId = exports.defaultAssistantId = exports.peersRootUserId = void 0;
4
4
  exports.peersRootUserId = '000peers0user000000000001';
5
5
  exports.defaultAssistantId = '000peers0bot00000000shell';
6
6
  // export const defaultAssistantId = '000peers0bot000000000bot1';
@@ -8,3 +8,5 @@ exports.defaultAssistantRunnerToolId = '000peers0tool00000runner1';
8
8
  exports.defaultSendMessageToolId = '000peers0tool00000sendmsg';
9
9
  exports.runWorkflowToolId = '000peers0tool0runworkflow';
10
10
  exports.emitEventToolId = '000peers0tool000emitevent';
11
+ exports.peersCorePackageId = '00mh0wlipjixk2gqmurbwee0o';
12
+ exports.peersCorePackageRepoUrl = 'https://github.com/peers-app/peers-core';
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@peers-app/peers-sdk",
3
- "version": "0.7.0",
3
+ "version": "0.7.2",
4
4
  "repository": {
5
5
  "type": "git",
6
6
  "url": "git+https://github.com/peers-app/peers-sdk.git"