@aj-archipelago/cortex 1.3.58 → 1.3.60

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/config/default.example.json +15 -1
  2. package/config.js +42 -0
  3. package/helper-apps/cortex-file-handler/INTERFACE.md +20 -9
  4. package/helper-apps/cortex-file-handler/package-lock.json +2 -2
  5. package/helper-apps/cortex-file-handler/package.json +1 -1
  6. package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +17 -17
  7. package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +35 -35
  8. package/helper-apps/cortex-file-handler/src/blobHandler.js +1010 -909
  9. package/helper-apps/cortex-file-handler/src/constants.js +98 -98
  10. package/helper-apps/cortex-file-handler/src/docHelper.js +27 -27
  11. package/helper-apps/cortex-file-handler/src/fileChunker.js +224 -214
  12. package/helper-apps/cortex-file-handler/src/helper.js +93 -93
  13. package/helper-apps/cortex-file-handler/src/index.js +584 -550
  14. package/helper-apps/cortex-file-handler/src/localFileHandler.js +86 -86
  15. package/helper-apps/cortex-file-handler/src/redis.js +186 -90
  16. package/helper-apps/cortex-file-handler/src/services/ConversionService.js +301 -273
  17. package/helper-apps/cortex-file-handler/src/services/FileConversionService.js +55 -55
  18. package/helper-apps/cortex-file-handler/src/services/storage/AzureStorageProvider.js +174 -154
  19. package/helper-apps/cortex-file-handler/src/services/storage/GCSStorageProvider.js +239 -223
  20. package/helper-apps/cortex-file-handler/src/services/storage/LocalStorageProvider.js +161 -159
  21. package/helper-apps/cortex-file-handler/src/services/storage/StorageFactory.js +73 -71
  22. package/helper-apps/cortex-file-handler/src/services/storage/StorageProvider.js +46 -45
  23. package/helper-apps/cortex-file-handler/src/services/storage/StorageService.js +256 -213
  24. package/helper-apps/cortex-file-handler/src/start.js +4 -1
  25. package/helper-apps/cortex-file-handler/src/utils/filenameUtils.js +59 -25
  26. package/helper-apps/cortex-file-handler/tests/FileConversionService.test.js +119 -116
  27. package/helper-apps/cortex-file-handler/tests/blobHandler.test.js +257 -257
  28. package/helper-apps/cortex-file-handler/tests/cleanup.test.js +676 -0
  29. package/helper-apps/cortex-file-handler/tests/conversionResilience.test.js +124 -124
  30. package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +249 -208
  31. package/helper-apps/cortex-file-handler/tests/fileUpload.test.js +439 -380
  32. package/helper-apps/cortex-file-handler/tests/getOperations.test.js +299 -263
  33. package/helper-apps/cortex-file-handler/tests/postOperations.test.js +265 -239
  34. package/helper-apps/cortex-file-handler/tests/start.test.js +1230 -1201
  35. package/helper-apps/cortex-file-handler/tests/storage/AzureStorageProvider.test.js +110 -105
  36. package/helper-apps/cortex-file-handler/tests/storage/GCSStorageProvider.test.js +201 -175
  37. package/helper-apps/cortex-file-handler/tests/storage/LocalStorageProvider.test.js +128 -125
  38. package/helper-apps/cortex-file-handler/tests/storage/StorageFactory.test.js +78 -73
  39. package/helper-apps/cortex-file-handler/tests/storage/StorageService.test.js +99 -99
  40. package/helper-apps/cortex-file-handler/tests/testUtils.helper.js +74 -70
  41. package/lib/azureAuthTokenHelper.js +78 -0
  42. package/lib/entityConstants.js +5 -4
  43. package/package.json +1 -1
  44. package/pathways/bing_afagent.js +13 -0
  45. package/pathways/gemini_15_vision.js +4 -0
  46. package/pathways/system/entity/tools/sys_tool_bing_search.js +1 -1
  47. package/pathways/system/entity/tools/sys_tool_bing_search_afagent.js +141 -0
  48. package/pathways/system/entity/tools/sys_tool_browser_jina.js +1 -1
  49. package/pathways/system/entity/tools/sys_tool_readfile.js +4 -0
  50. package/pathways/system/workspaces/workspace_applet_edit.js +4 -0
  51. package/pathways/transcribe_gemini.js +4 -0
  52. package/pathways/translate_subtitle.js +15 -8
  53. package/server/modelExecutor.js +4 -0
  54. package/server/plugins/azureFoundryAgentsPlugin.js +372 -0
  55. package/server/plugins/gemini15ChatPlugin.js +3 -3
  56. package/tests/azureAuthTokenHelper.test.js +150 -0
  57. package/tests/azureFoundryAgents.test.js +212 -0
@@ -1,107 +1,107 @@
1
- import { promises as fs } from 'fs';
2
- import { join, basename } from 'path';
1
+ import { promises as fs } from "fs";
2
+ import { join, basename } from "path";
3
3
 
4
- import { v4 as uuidv4 } from 'uuid';
4
+ import { v4 as uuidv4 } from "uuid";
5
5
 
6
- import { publicFolder, port, ipAddress } from './start.js';
6
+ import { publicFolder, port, ipAddress } from "./start.js";
7
7
 
8
8
  async function moveFileToPublicFolder(chunkPath, requestId) {
9
- // Use the filename with a UUID as the blob name
10
- const filename = `${requestId}/${uuidv4()}_${basename(chunkPath)}`;
9
+ // Use the filename with a UUID as the blob name
10
+ const filename = `${requestId}/${uuidv4()}_${basename(chunkPath)}`;
11
11
 
12
- // Create the target folder if it doesn't exist
13
- const targetFolder = join(publicFolder, requestId);
14
- await fs.mkdir(targetFolder, { recursive: true });
12
+ // Create the target folder if it doesn't exist
13
+ const targetFolder = join(publicFolder, requestId);
14
+ await fs.mkdir(targetFolder, { recursive: true });
15
15
 
16
- // Move the file to the target folder
17
- const targetPath = join(targetFolder, basename(filename));
18
- await fs.rename(chunkPath, targetPath);
16
+ // Move the file to the target folder
17
+ const targetPath = join(targetFolder, basename(filename));
18
+ await fs.rename(chunkPath, targetPath);
19
19
 
20
- // Return the complete URL of the file
21
- const fileUrl = `http://${ipAddress}:${port}/files/${filename}`;
22
- // const fileUrl = `http://localhost:${port}/files/${filename}`;
23
- return fileUrl;
20
+ // Return the complete URL of the file
21
+ const fileUrl = `http://${ipAddress}:${port}/files/${filename}`;
22
+ // const fileUrl = `http://localhost:${port}/files/${filename}`;
23
+ return fileUrl;
24
24
  }
25
25
 
26
26
  async function deleteFolder(requestId) {
27
- if (!requestId) throw new Error('Missing requestId parameter');
28
- const targetFolder = join(publicFolder, requestId);
29
- try {
27
+ if (!requestId) throw new Error("Missing requestId parameter");
28
+ const targetFolder = join(publicFolder, requestId);
29
+ try {
30
30
  // Check if folder exists first
31
- const stats = await fs.stat(targetFolder);
32
- if (stats.isDirectory()) {
33
- // Get list of files before deleting
34
- const files = await fs.readdir(targetFolder);
35
- const deletedFiles = files.map((file) => join(requestId, file));
36
- // Delete the folder
37
- await fs.rm(targetFolder, { recursive: true });
38
- console.log(`Cleaned folder: ${targetFolder}`);
39
- return deletedFiles;
40
- }
41
- return [];
42
- } catch (error) {
43
- if (error.code === 'ENOENT') {
44
- // Folder doesn't exist, return empty array
45
- return [];
46
- }
47
- throw error;
31
+ const stats = await fs.stat(targetFolder);
32
+ if (stats.isDirectory()) {
33
+ // Get list of files before deleting
34
+ const files = await fs.readdir(targetFolder);
35
+ const deletedFiles = files.map((file) => join(requestId, file));
36
+ // Delete the folder
37
+ await fs.rm(targetFolder, { recursive: true });
38
+ console.log(`Cleaned folder: ${targetFolder}`);
39
+ return deletedFiles;
40
+ }
41
+ return [];
42
+ } catch (error) {
43
+ if (error.code === "ENOENT") {
44
+ // Folder doesn't exist, return empty array
45
+ return [];
48
46
  }
47
+ throw error;
48
+ }
49
49
  }
50
50
 
51
51
  async function cleanupLocal(urls = null) {
52
- const cleanedUrls = [];
53
- if (!urls) {
54
- try {
55
- // Read the directory
56
- const items = await fs.readdir(publicFolder);
57
-
58
- // Calculate the date that is x months ago
59
- const monthsAgo = new Date();
60
- monthsAgo.setMonth(monthsAgo.getMonth() - 1);
61
-
62
- // Iterate through the items
63
- for (const item of items) {
64
- const itemPath = join(publicFolder, item);
65
-
66
- // Get the stats of the item
67
- const stats = await fs.stat(itemPath);
68
-
69
- // Check if the item is a file or a directory
70
- const isDirectory = stats.isDirectory();
71
-
72
- // Compare the last modified date with three months ago
73
- if (stats.mtime < monthsAgo) {
74
- if (isDirectory) {
75
- // If it's a directory, delete it recursively
76
- await fs.rm(itemPath, { recursive: true });
77
- console.log(`Cleaned directory: ${item}`);
78
- } else {
79
- // If it's a file, delete it
80
- await fs.unlink(itemPath);
81
- console.log(`Cleaned file: ${item}`);
82
-
83
- // Add the URL of the cleaned file to cleanedUrls array
84
- cleanedUrls.push(`http://${ipAddress}:${port}/files/${item}`);
85
- }
86
- }
87
- }
88
- } catch (error) {
89
- console.error(`Error cleaning up files: ${error}`);
90
- }
91
- } else {
92
- try {
93
- for (const url of urls) {
94
- const filename = url.split('/').pop();
95
- const itemPath = join(publicFolder, filename);
96
- await fs.unlink(itemPath);
97
- }
98
- } catch (error) {
99
- console.error(`Error cleaning up files: ${error}`);
52
+ const cleanedUrls = [];
53
+ if (!urls) {
54
+ try {
55
+ // Read the directory
56
+ const items = await fs.readdir(publicFolder);
57
+
58
+ // Calculate the date that is x months ago
59
+ const monthsAgo = new Date();
60
+ monthsAgo.setMonth(monthsAgo.getMonth() - 1);
61
+
62
+ // Iterate through the items
63
+ for (const item of items) {
64
+ const itemPath = join(publicFolder, item);
65
+
66
+ // Get the stats of the item
67
+ const stats = await fs.stat(itemPath);
68
+
69
+ // Check if the item is a file or a directory
70
+ const isDirectory = stats.isDirectory();
71
+
72
+ // Compare the last modified date with three months ago
73
+ if (stats.mtime < monthsAgo) {
74
+ if (isDirectory) {
75
+ // If it's a directory, delete it recursively
76
+ await fs.rm(itemPath, { recursive: true });
77
+ console.log(`Cleaned directory: ${item}`);
78
+ } else {
79
+ // If it's a file, delete it
80
+ await fs.unlink(itemPath);
81
+ console.log(`Cleaned file: ${item}`);
82
+
83
+ // Add the URL of the cleaned file to cleanedUrls array
84
+ cleanedUrls.push(`http://${ipAddress}:${port}/files/${item}`);
85
+ }
100
86
  }
87
+ }
88
+ } catch (error) {
89
+ console.error(`Error cleaning up files: ${error}`);
90
+ }
91
+ } else {
92
+ try {
93
+ for (const url of urls) {
94
+ const filename = url.split("/").pop();
95
+ const itemPath = join(publicFolder, filename);
96
+ await fs.unlink(itemPath);
97
+ }
98
+ } catch (error) {
99
+ console.error(`Error cleaning up files: ${error}`);
101
100
  }
101
+ }
102
102
 
103
- // Return the array of cleaned file URLs
104
- return cleanedUrls;
103
+ // Return the array of cleaned file URLs
104
+ return cleanedUrls;
105
105
  }
106
106
 
107
107
  export { moveFileToPublicFolder, deleteFolder, cleanupLocal };
@@ -1,129 +1,225 @@
1
- import redis from 'ioredis';
2
- const connectionString = process.env['REDIS_CONNECTION_STRING'];
1
+ import redis from "ioredis";
2
+ const connectionString = process.env["REDIS_CONNECTION_STRING"];
3
3
  const client = redis.createClient(connectionString);
4
- // client.connect();
5
4
 
6
- const channel = 'requestProgress';
5
+ const channel = "requestProgress";
7
6
 
8
7
  const connectClient = async () => {
9
- if (!client.connected) {
10
- try {
11
- await client.connect();
12
- } catch (error) {
13
- console.error(`Error reconnecting to Redis: ${error}`);
14
- return;
15
- }
8
+ if (!client.connected) {
9
+ try {
10
+ await client.connect();
11
+ } catch (error) {
12
+ console.error(`Error reconnecting to Redis: ${error}`);
13
+ return;
16
14
  }
15
+ }
17
16
  };
18
17
 
19
18
  const publishRequestProgress = async (data) => {
20
- // await connectClient();
21
- try {
22
- const message = JSON.stringify(data);
23
- console.log(`Publishing message ${message} to channel ${channel}`);
24
- await client.publish(channel, message);
25
- } catch (error) {
26
- console.error(`Error publishing message: ${error}`);
27
- }
19
+ // await connectClient();
20
+ try {
21
+ const message = JSON.stringify(data);
22
+ console.log(`Publishing message ${message} to channel ${channel}`);
23
+ await client.publish(channel, message);
24
+ } catch (error) {
25
+ console.error(`Error publishing message: ${error}`);
26
+ }
28
27
  };
29
28
 
30
29
  // Function to get all key value pairs in "FileStoreMap" hash map
31
30
  const getAllFileStoreMap = async () => {
32
- try {
33
- const allKeyValuePairs = await client.hgetall('FileStoreMap');
34
- // Parse each JSON value in the returned object
35
- for (const key in allKeyValuePairs) {
36
- try {
37
- // Modify the value directly in the returned object
38
- allKeyValuePairs[key] = JSON.parse(allKeyValuePairs[key]);
39
- } catch (error) {
40
- console.error(`Error parsing JSON for key ${key}: ${error}`);
41
- // keep original value if parsing failed
42
- }
43
- }
44
- return allKeyValuePairs;
45
- } catch (error) {
46
- console.error(
47
- `Error getting all key-value pairs from FileStoreMap: ${error}`,
48
- );
49
- return {}; // Return null or any default value indicating an error occurred
31
+ try {
32
+ const allKeyValuePairs = await client.hgetall("FileStoreMap");
33
+ // Parse each JSON value in the returned object
34
+ for (const key in allKeyValuePairs) {
35
+ try {
36
+ // Modify the value directly in the returned object
37
+ allKeyValuePairs[key] = JSON.parse(allKeyValuePairs[key]);
38
+ } catch (error) {
39
+ console.error(`Error parsing JSON for key ${key}: ${error}`);
40
+ // keep original value if parsing failed
41
+ }
50
42
  }
43
+ return allKeyValuePairs;
44
+ } catch (error) {
45
+ console.error(
46
+ `Error getting all key-value pairs from FileStoreMap: ${error}`,
47
+ );
48
+ return {}; // Return null or any default value indicating an error occurred
49
+ }
51
50
  };
52
51
 
53
52
  // Function to set key value in "FileStoreMap" hash map
54
53
  const setFileStoreMap = async (key, value) => {
55
- try {
56
- value.timestamp = new Date().toISOString();
57
- await client.hset('FileStoreMap', key, JSON.stringify(value));
58
- } catch (error) {
59
- console.error(`Error setting key in FileStoreMap: ${error}`);
54
+ try {
55
+ // Only set timestamp if one doesn't already exist
56
+ if (!value.timestamp) {
57
+ value.timestamp = new Date().toISOString();
60
58
  }
59
+ await client.hset("FileStoreMap", key, JSON.stringify(value));
60
+ } catch (error) {
61
+ console.error(`Error setting key in FileStoreMap: ${error}`);
62
+ }
61
63
  };
62
64
 
63
- const getFileStoreMap = async (key) => {
64
- try {
65
- const value = await client.hget('FileStoreMap', key);
66
- if (value) {
67
- try {
68
- // parse the value back to an object before returning
69
- return JSON.parse(value);
70
- } catch (error) {
71
- console.error(`Error parsing JSON: ${error}`);
72
- return value; // return original value if parsing failed
65
+ const getFileStoreMap = async (key, skipLazyCleanup = false) => {
66
+ try {
67
+ const value = await client.hget("FileStoreMap", key);
68
+ if (value) {
69
+ try {
70
+ // parse the value back to an object before returning
71
+ const parsedValue = JSON.parse(value);
72
+
73
+ // Lazy cleanup: check if file still exists when accessed (unless disabled)
74
+ if (!skipLazyCleanup && (parsedValue?.url || parsedValue?.gcs)) {
75
+ try {
76
+ // Import StorageService here to avoid circular dependencies
77
+ const { StorageService } = await import(
78
+ "./services/storage/StorageService.js"
79
+ );
80
+ const storageService = new StorageService();
81
+
82
+ let shouldRemove = false;
83
+
84
+ // Check primary storage
85
+ if (parsedValue?.url) {
86
+ const exists = await storageService.fileExists(parsedValue.url);
87
+ if (!exists) {
88
+ console.log(
89
+ `Lazy cleanup: Primary storage file missing for key ${key}: ${parsedValue.url}`,
90
+ );
91
+ shouldRemove = true;
92
+ }
93
+ }
94
+
95
+ // Check GCS backup if primary is missing
96
+ if (
97
+ shouldRemove &&
98
+ parsedValue?.gcs &&
99
+ storageService.backupProvider
100
+ ) {
101
+ const gcsExists = await storageService.fileExists(
102
+ parsedValue.gcs,
103
+ );
104
+ if (gcsExists) {
105
+ // GCS backup exists, so don't remove the entry
106
+ shouldRemove = false;
107
+ console.log(
108
+ `Lazy cleanup: GCS backup found for key ${key}, keeping entry`,
109
+ );
110
+ }
73
111
  }
112
+
113
+ // Remove stale entry if both primary and backup are missing
114
+ if (shouldRemove) {
115
+ await removeFromFileStoreMap(key);
116
+ console.log(
117
+ `Lazy cleanup: Removed stale cache entry for key ${key}`,
118
+ );
119
+ return null; // Return null since file no longer exists
120
+ }
121
+ } catch (error) {
122
+ console.log(`Lazy cleanup error for key ${key}: ${error.message}`);
123
+ // If cleanup fails, return the original value to avoid breaking functionality
124
+ }
74
125
  }
75
- return value;
76
- } catch (error) {
77
- console.error(`Error getting key from FileStoreMap: ${error}`);
78
- return null; // Return null or any default value indicating an error occurred
126
+
127
+ return parsedValue;
128
+ } catch (error) {
129
+ console.error(`Error parsing JSON: ${error}`);
130
+ return value; // return original value if parsing failed
131
+ }
79
132
  }
133
+ return value;
134
+ } catch (error) {
135
+ console.error(`Error getting key from FileStoreMap: ${error}`);
136
+ return null; // Return null or any default value indicating an error occurred
137
+ }
80
138
  };
81
139
 
82
140
  // Function to remove key from "FileStoreMap" hash map
83
141
  const removeFromFileStoreMap = async (key) => {
84
- try {
142
+ try {
85
143
  // hdel returns the number of keys that were removed.
86
144
  // If the key does not exist, 0 is returned.
87
- const result = await client.hdel('FileStoreMap', key);
88
- if (result === 0) {
89
- console.log(`The key ${key} does not exist`);
90
- } else {
91
- console.log(`The key ${key} was removed successfully`);
92
- }
93
- } catch (error) {
94
- console.error(`Error removing key from FileStoreMap: ${error}`);
145
+ const result = await client.hdel("FileStoreMap", key);
146
+ if (result === 0) {
147
+ console.log(`The key ${key} does not exist`);
148
+ } else {
149
+ console.log(`The key ${key} was removed successfully`);
95
150
  }
151
+ } catch (error) {
152
+ console.error(`Error removing key from FileStoreMap: ${error}`);
153
+ }
96
154
  };
97
155
 
98
156
  const cleanupRedisFileStoreMap = async (nDays = 1) => {
99
- const cleaned = [];
100
- try {
101
- const map = await getAllFileStoreMap();
102
- const nDaysAgo = new Date(Date.now() - nDays * 24 * 60 * 60 * 1000);
103
-
104
- for (const key in map) {
105
- const value = map[key];
106
- const timestamp = value?.timestamp ? new Date(value.timestamp) : null;
107
- if (!timestamp || timestamp.getTime() < nDaysAgo.getTime()) {
108
- // Remove the key from the "FileStoreMap" hash map
109
- await removeFromFileStoreMap(key);
110
- console.log(`Removed key ${key} from FileStoreMap`);
111
- cleaned.push(Object.assign({ hash: key }, value));
112
- }
113
- }
114
- } catch (error) {
115
- console.error(`Error cleaning FileStoreMap: ${error}`);
116
- } finally {
157
+ const cleaned = [];
158
+ try {
159
+ const map = await getAllFileStoreMap();
160
+ const nDaysAgo = new Date(Date.now() - nDays * 24 * 60 * 60 * 1000);
161
+
162
+ for (const key in map) {
163
+ const value = map[key];
164
+ const timestamp = value?.timestamp ? new Date(value.timestamp) : null;
165
+ if (!timestamp || timestamp.getTime() < nDaysAgo.getTime()) {
166
+ // Remove the key from the "FileStoreMap" hash map
167
+ await removeFromFileStoreMap(key);
168
+ console.log(`Removed key ${key} from FileStoreMap`);
169
+ cleaned.push(Object.assign({ hash: key }, value));
170
+ }
171
+ }
172
+ } catch (error) {
173
+ console.error(`Error cleaning FileStoreMap: ${error}`);
174
+ } finally {
117
175
  // Cleanup code if needed
176
+ }
177
+ return cleaned;
178
+ };
179
+
180
+ // Age-based cleanup: removes old entries to prevent cache bloat
181
+ const cleanupRedisFileStoreMapAge = async (
182
+ maxAgeDays = 7,
183
+ maxEntriesToCheck = 10,
184
+ ) => {
185
+ const cleaned = [];
186
+ try {
187
+ const map = await getAllFileStoreMap();
188
+ const maxAgeAgo = new Date(Date.now() - maxAgeDays * 24 * 60 * 60 * 1000);
189
+
190
+ // Convert to array and sort by timestamp (oldest first)
191
+ const entries = Object.entries(map)
192
+ .filter(([_, value]) => value?.timestamp) // Only entries with timestamps
193
+ .sort(([_, a], [__, b]) => {
194
+ const timeA = new Date(a.timestamp).getTime();
195
+ const timeB = new Date(b.timestamp).getTime();
196
+ return timeA - timeB; // Oldest first
197
+ })
198
+ .slice(0, maxEntriesToCheck); // Only check the oldest N entries
199
+
200
+ for (const [key, value] of entries) {
201
+ const timestamp = new Date(value.timestamp);
202
+ if (timestamp.getTime() < maxAgeAgo.getTime()) {
203
+ await removeFromFileStoreMap(key);
204
+ console.log(
205
+ `Age cleanup: Removed old entry ${key} (age: ${Math.round((Date.now() - timestamp.getTime()) / (24 * 60 * 60 * 1000))} days)`,
206
+ );
207
+ cleaned.push(Object.assign({ hash: key }, value));
208
+ }
118
209
  }
119
- return cleaned;
210
+ } catch (error) {
211
+ console.error(`Error during age-based cleanup: ${error}`);
212
+ }
213
+
214
+ return cleaned;
120
215
  };
121
216
 
122
217
  export {
123
- publishRequestProgress,
124
- connectClient,
125
- setFileStoreMap,
126
- getFileStoreMap,
127
- removeFromFileStoreMap,
128
- cleanupRedisFileStoreMap,
218
+ publishRequestProgress,
219
+ connectClient,
220
+ setFileStoreMap,
221
+ getFileStoreMap,
222
+ removeFromFileStoreMap,
223
+ cleanupRedisFileStoreMap,
224
+ cleanupRedisFileStoreMapAge,
129
225
  };