@aj-archipelago/cortex 1.3.11 → 1.3.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/helper-apps/cortex-file-handler/.env.test +7 -0
  2. package/helper-apps/cortex-file-handler/.env.test.azure +6 -0
  3. package/helper-apps/cortex-file-handler/.env.test.gcs +9 -0
  4. package/helper-apps/cortex-file-handler/blobHandler.js +313 -204
  5. package/helper-apps/cortex-file-handler/constants.js +107 -0
  6. package/helper-apps/cortex-file-handler/docHelper.js +4 -1
  7. package/helper-apps/cortex-file-handler/fileChunker.js +170 -109
  8. package/helper-apps/cortex-file-handler/helper.js +82 -16
  9. package/helper-apps/cortex-file-handler/index.js +226 -146
  10. package/helper-apps/cortex-file-handler/localFileHandler.js +21 -3
  11. package/helper-apps/cortex-file-handler/package-lock.json +2622 -51
  12. package/helper-apps/cortex-file-handler/package.json +25 -4
  13. package/helper-apps/cortex-file-handler/redis.js +9 -18
  14. package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +22 -0
  15. package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +49 -0
  16. package/helper-apps/cortex-file-handler/scripts/test-azure.sh +34 -0
  17. package/helper-apps/cortex-file-handler/scripts/test-gcs.sh +49 -0
  18. package/helper-apps/cortex-file-handler/start.js +39 -4
  19. package/helper-apps/cortex-file-handler/tests/blobHandler.test.js +292 -0
  20. package/helper-apps/cortex-file-handler/tests/docHelper.test.js +148 -0
  21. package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +311 -0
  22. package/helper-apps/cortex-file-handler/tests/start.test.js +930 -0
  23. package/package.json +1 -1
  24. package/pathways/system/entity/sys_entity_continue.js +1 -1
  25. package/pathways/system/entity/sys_entity_start.js +1 -0
  26. package/pathways/system/entity/sys_generator_video_vision.js +2 -1
  27. package/pathways/system/entity/sys_router_tool.js +6 -4
  28. package/server/plugins/openAiWhisperPlugin.js +9 -13
  29. package/server/plugins/replicateApiPlugin.js +54 -2
@@ -1,12 +1,15 @@
1
1
  {
2
- "name": "@aj-archipelago/mediafilechunker",
3
- "version": "1.0.0",
4
- "description": "",
2
+ "name": "@aj-archipelago/cortex-file-handler",
3
+ "version": "1.0.16",
4
+ "description": "File handling service for Cortex - handles file uploads, media chunking, and document processing",
5
5
  "type": "module",
6
6
  "scripts": {
7
7
  "start": "node start.js",
8
8
  "dev": "node -r dotenv/config start.js",
9
- "test": "echo \"No tests yet...\""
9
+ "test": "DOTENV_CONFIG_PATH=.env.test NODE_ENV=test node -r dotenv/config node_modules/ava/entrypoints/cli.mjs",
10
+ "test:azure": "DOTENV_CONFIG_PATH=.env.test.azure NODE_ENV=test ./scripts/test-azure.sh",
11
+ "test:watch": "DOTENV_CONFIG_PATH=.env.test NODE_ENV=test node -r dotenv/config node_modules/ava/entrypoints/cli.mjs --watch",
12
+ "test:gcs": "DOTENV_CONFIG_PATH=.env.test.gcs NODE_ENV=test ./scripts/test-gcs.sh"
10
13
  },
11
14
  "dependencies": {
12
15
  "@azure/storage-blob": "^12.13.0",
@@ -24,5 +27,23 @@
24
27
  "public-ip": "^6.0.1",
25
28
  "uuid": "^9.0.0",
26
29
  "xlsx": "^0.18.5"
30
+ },
31
+ "devDependencies": {
32
+ "ava": "^5.3.1",
33
+ "dotenv": "^16.3.1",
34
+ "nock": "^13.3.0"
35
+ },
36
+ "ava": {
37
+ "files": [
38
+ "tests/**/*",
39
+ "!tests/test-files/**/*",
40
+ "!tests/test-docs/**/*",
41
+ "!tests/mocks/**/*"
42
+ ],
43
+ "timeout": "1m",
44
+ "nodeArguments": [
45
+ "--experimental-modules"
46
+ ],
47
+ "serial": true
27
48
  }
28
49
  }
@@ -96,34 +96,25 @@ const removeFromFileStoreMap = async (key) => {
96
96
  const cleanupRedisFileStoreMap = async (nDays=1) => {
97
97
  let cleaned = [];
98
98
  try {
99
- // Get all key-value pairs from "FileStoreMap"
100
- const fileStoreMap = await getAllFileStoreMap();
99
+ const map = await getAllFileStoreMap();
100
+ const nDaysAgo = new Date(Date.now() - nDays * 24 * 60 * 60 * 1000);
101
101
 
102
- if(!fileStoreMap){
103
- console.log("FileStoreMap is empty");
104
- return;
105
- }
106
-
107
- // Iterate over each key-value pair in the fileStoreMap
108
- for (const [key, value] of Object.entries(fileStoreMap)) {
109
- //check timestamp of each value compare to nDays and remove if older
110
- const timestamp = new Date(value.timestamp);
111
- const now = new Date();
112
- const diffTime = Math.abs(now - timestamp);
113
- const diffDays = Math.ceil(diffTime / (1000 * 60 * 60 * 24));
114
- if (diffDays > nDays) {
102
+ for(const key in map){
103
+ const value = map[key];
104
+ const timestamp = value?.timestamp ? new Date(value.timestamp) : null;
105
+ if(!timestamp || timestamp.getTime() < nDaysAgo.getTime()){
115
106
  // Remove the key from the "FileStoreMap" hash map
116
107
  await removeFromFileStoreMap(key);
117
108
  console.log(`Removed key ${key} from FileStoreMap`);
118
109
  cleaned.push(Object.assign({hash:key}, value));
119
110
  }
120
-
121
111
  }
122
112
  } catch (error) {
123
113
  console.error(`Error cleaning FileStoreMap: ${error}`);
124
- }finally{
125
- return cleaned;
114
+ } finally {
115
+ // Cleanup code if needed
126
116
  }
117
+ return cleaned;
127
118
  };
128
119
 
129
120
 
@@ -0,0 +1,22 @@
1
+ import { BlobServiceClient } from '@azure/storage-blob';
2
+
3
+ async function createContainer() {
4
+ try {
5
+ const blobServiceClient = BlobServiceClient.fromConnectionString("UseDevelopmentStorage=true");
6
+ const containerClient = blobServiceClient.getContainerClient("test-container");
7
+
8
+ console.log("Creating container...");
9
+ await containerClient.create();
10
+ console.log("Container created successfully");
11
+ } catch (error) {
12
+ // Ignore if container already exists
13
+ if (error.statusCode === 409) {
14
+ console.log("Container already exists");
15
+ } else {
16
+ console.error("Error creating container:", error);
17
+ process.exit(1);
18
+ }
19
+ }
20
+ }
21
+
22
+ createContainer();
@@ -0,0 +1,49 @@
1
+ import { BlobServiceClient } from '@azure/storage-blob';
2
+ import { Storage } from '@google-cloud/storage';
3
+
4
+ async function createAzureContainer() {
5
+ try {
6
+ const blobServiceClient = BlobServiceClient.fromConnectionString("UseDevelopmentStorage=true");
7
+ const containerClient = blobServiceClient.getContainerClient("test-container");
8
+
9
+ console.log("Creating Azure container...");
10
+ await containerClient.create();
11
+ console.log("Azure container created successfully");
12
+ } catch (error) {
13
+ // Ignore if container already exists
14
+ if (error.statusCode === 409) {
15
+ console.log("Azure container already exists");
16
+ } else {
17
+ console.error("Error creating Azure container:", error);
18
+ process.exit(1);
19
+ }
20
+ }
21
+ }
22
+
23
+ async function createGCSBucket() {
24
+ try {
25
+ const storage = new Storage({
26
+ projectId: "test-project",
27
+ apiEndpoint: "http://localhost:4443",
28
+ });
29
+
30
+ console.log("Creating GCS bucket...");
31
+ await storage.createBucket("cortextempfiles");
32
+ console.log("GCS bucket created successfully");
33
+ } catch (error) {
34
+ // Ignore if bucket already exists
35
+ if (error.code === 409) {
36
+ console.log("GCS bucket already exists");
37
+ } else {
38
+ console.error("Error creating GCS bucket:", error);
39
+ process.exit(1);
40
+ }
41
+ }
42
+ }
43
+
44
+ async function setup() {
45
+ await createAzureContainer();
46
+ await createGCSBucket();
47
+ }
48
+
49
+ setup();
@@ -0,0 +1,34 @@
1
+ #!/bin/bash
2
+
3
+ # Create temp directory for Azurite
4
+ AZURITE_DIR="/tmp/azurite-test"
5
+ mkdir -p $AZURITE_DIR
6
+
7
+ # Start Azurite in background
8
+ echo "Starting Azurite..."
9
+ azurite --silent --location $AZURITE_DIR &
10
+ AZURITE_PID=$!
11
+
12
+ # Wait for Azurite to start
13
+ sleep 2
14
+
15
+ # Create test container
16
+ echo "Setting up Azure container..."
17
+ node scripts/setup-azure-container.js
18
+
19
+ # Run the tests
20
+ echo "Running tests..."
21
+ node -r dotenv/config node_modules/ava/entrypoints/cli.mjs "$@"
22
+
23
+ # Store test result
24
+ TEST_RESULT=$?
25
+
26
+ # Kill Azurite
27
+ echo "Cleaning up..."
28
+ kill $AZURITE_PID
29
+
30
+ # Clean up Azurite directory
31
+ rm -rf $AZURITE_DIR
32
+
33
+ # Exit with test result
34
+ exit $TEST_RESULT
@@ -0,0 +1,49 @@
1
+ #!/bin/bash
2
+
3
+ # Exit on error
4
+ set -e
5
+
6
+ cleanup() {
7
+ echo "Cleaning up..."
8
+ if [ ! -z "$AZURITE_PID" ]; then
9
+ kill $AZURITE_PID 2>/dev/null || true
10
+ fi
11
+ docker stop fake-gcs-server 2>/dev/null || true
12
+ docker rm fake-gcs-server 2>/dev/null || true
13
+ }
14
+
15
+ # Set up cleanup trap
16
+ trap cleanup EXIT
17
+
18
+ echo "Starting test environment..."
19
+
20
+ # Start Azurite if not running
21
+ if ! nc -z localhost 10000; then
22
+ echo "Starting Azurite..."
23
+ azurite --silent --location .azurite --debug .azurite/debug.log &
24
+ AZURITE_PID=$!
25
+ # Wait for Azurite to be ready
26
+ until nc -z localhost 10000; do
27
+ sleep 1
28
+ done
29
+ fi
30
+
31
+ # Start fake-gcs-server if not running
32
+ if ! nc -z localhost 4443; then
33
+ echo "Starting fake-gcs-server..."
34
+ docker run -d --name fake-gcs-server \
35
+ -p 4443:4443 \
36
+ fsouza/fake-gcs-server -scheme http
37
+ # Wait for fake-gcs-server to be ready
38
+ until nc -z localhost 4443; do
39
+ sleep 1
40
+ done
41
+ fi
42
+
43
+ # Create containers
44
+ echo "Setting up test containers..."
45
+ node scripts/setup-test-containers.js
46
+
47
+ # Run the tests
48
+ echo "Running tests..."
49
+ node -r dotenv/config node_modules/ava/entrypoints/cli.mjs "$@"
@@ -1,8 +1,9 @@
1
- import MediaFileChunker from "./index.js";
1
+ import CortexFileHandler from "./index.js";
2
2
  import express from "express";
3
3
  import { fileURLToPath } from 'url';
4
4
  import { dirname, join } from 'path';
5
5
  import cors from 'cors';
6
+ import { readFileSync } from 'fs';
6
7
 
7
8
  import { publicIpv4 } from 'public-ip';
8
9
  const ipAddress = await publicIpv4();
@@ -11,18 +12,52 @@ const app = express();
11
12
  const port = process.env.PORT || 7071;
12
13
  const publicFolder = join(dirname(fileURLToPath(import.meta.url)), 'files');
13
14
 
15
+ // Get version from package.json
16
+ const packageJson = JSON.parse(readFileSync(join(dirname(fileURLToPath(import.meta.url)), 'package.json'), 'utf8'));
17
+ const version = packageJson.version;
18
+
14
19
  app.use(cors());
15
20
  // Serve static files from the public folder
16
21
  app.use('/files', express.static(publicFolder));
17
22
 
23
+ // Health check endpoint
24
+ app.get('/health', (req, res) => {
25
+ res.status(200).json({
26
+ status: 'healthy',
27
+ version: version
28
+ });
29
+ });
30
+
31
+ // New primary endpoint
32
+ app.all('/api/CortexFileHandler', async (req, res) => {
33
+ const context = { req, res, log: console.log }
34
+ try {
35
+ await CortexFileHandler(context, req);
36
+ context.log(context.res);
37
+ res.status(context.res.status || 200).send(context.res.body);
38
+ } catch (error) {
39
+ const status = error.status || 500;
40
+ const message = error.message || 'Internal server error';
41
+ res.status(status).send(message);
42
+ }
43
+ });
44
+
45
+ // Legacy endpoint for compatibility
18
46
  app.all('/api/MediaFileChunker', async (req, res) => {
19
47
  const context = { req, res, log: console.log }
20
- await MediaFileChunker(context, req);
21
- res.send(context.res.body);
48
+ try {
49
+ await CortexFileHandler(context, req);
50
+ context.log(context.res);
51
+ res.status(context.res.status || 200).send(context.res.body);
52
+ } catch (error) {
53
+ const status = error.status || 500;
54
+ const message = error.message || 'Internal server error';
55
+ res.status(status).send(message);
56
+ }
22
57
  });
23
58
 
24
59
  app.listen(port, () => {
25
- console.log(`MediaFileChunker helper running on port ${port}`);
60
+ console.log(`Cortex File Handler v${version} running on port ${port} (includes legacy MediaFileChunker endpoint)`);
26
61
  });
27
62
 
28
63
  export { port, publicFolder, ipAddress };
@@ -0,0 +1,292 @@
1
+ import test from 'ava';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import { fileURLToPath } from 'url';
5
+ import { uploadBlob, ensureGCSUpload, gcsUrlExists, deleteGCS, getBlobClient } from '../blobHandler.js';
6
+ import axios from 'axios';
7
+ import CortexFileHandler from '../index.js';
8
+ import { setFileStoreMap } from '../redis.js';
9
+ import { urlExists } from '../helper.js';
10
+
11
+ const __filename = fileURLToPath(import.meta.url);
12
+ const __dirname = path.dirname(__filename);
13
+
14
+ // Helper function to determine if GCS is configured
15
+ function isGCSConfigured() {
16
+ return process.env.GCP_SERVICE_ACCOUNT_KEY_BASE64 || process.env.GCP_SERVICE_ACCOUNT_KEY;
17
+ }
18
+
19
+ // Helper function to check file size in GCS
20
+ async function getGCSFileSize(gcsUrl) {
21
+ if (!isGCSConfigured()) return null;
22
+ try {
23
+ const bucket = gcsUrl.split('/')[2];
24
+ const filename = gcsUrl.split('/').slice(3).join('/');
25
+
26
+ if (process.env.STORAGE_EMULATOR_HOST) {
27
+ const response = await axios.get(
28
+ `${process.env.STORAGE_EMULATOR_HOST}/storage/v1/b/${bucket}/o/${encodeURIComponent(filename)}`,
29
+ { validateStatus: status => status === 200 || status === 404 }
30
+ );
31
+ if (response.status === 200) {
32
+ return parseInt(response.data.size);
33
+ }
34
+ return null;
35
+ }
36
+ } catch (error) {
37
+ return null;
38
+ }
39
+ }
40
+
41
+ // Helper function to check file size in Azure/HTTP
42
+ async function getHttpFileSize(url) {
43
+ try {
44
+ const response = await axios.head(url);
45
+ const contentLength = response.headers['content-length'];
46
+ return contentLength ? parseInt(contentLength) : null;
47
+ } catch (error) {
48
+ console.error('Error getting HTTP file size:', error);
49
+ return null;
50
+ }
51
+ }
52
+
53
+ test('test GCS backup during initial upload', async t => {
54
+ if (!isGCSConfigured()) {
55
+ t.pass('Skipping test - GCS not configured');
56
+ return;
57
+ }
58
+
59
+ // Create a test file with known content
60
+ const testContent = 'Hello World!'.repeat(1000); // Create a decent sized file
61
+ const testFile = path.join(__dirname, 'test.txt');
62
+ fs.writeFileSync(testFile, testContent);
63
+
64
+ try {
65
+ // Upload the file - should go to both Azure/local and GCS
66
+ const context = { log: console.log };
67
+ const result = await uploadBlob(context, null, false, testFile);
68
+
69
+ // Verify we got both URLs
70
+ t.truthy(result.url, 'Should have primary storage URL');
71
+ t.truthy(result.gcs, 'Should have GCS backup URL');
72
+
73
+ // Verify GCS file exists
74
+ const gcsExists = await gcsUrlExists(result.gcs);
75
+ t.true(gcsExists, 'File should exist in GCS');
76
+
77
+ // Verify file content size in GCS
78
+ const gcsSize = await getGCSFileSize(result.gcs);
79
+ t.is(gcsSize, testContent.length, 'GCS file size should match original');
80
+ } finally {
81
+ // Cleanup
82
+ if (fs.existsSync(testFile)) {
83
+ fs.unlinkSync(testFile);
84
+ }
85
+ }
86
+ });
87
+
88
+ test('test GCS backup restoration when missing', async t => {
89
+ if (!isGCSConfigured()) {
90
+ t.pass('Skipping test - GCS not configured');
91
+ return;
92
+ }
93
+
94
+ // Create a test file with known content
95
+ const testContent = 'Hello World!'.repeat(1000); // Create a decent sized file
96
+ const testFile = path.join(__dirname, 'test.txt');
97
+ fs.writeFileSync(testFile, testContent);
98
+
99
+ try {
100
+ // First upload normally
101
+ const context = { log: console.log };
102
+ const result = await uploadBlob(context, null, false, testFile);
103
+
104
+ // Verify initial upload worked
105
+ t.truthy(result.gcs, 'Should have GCS backup URL after initial upload');
106
+
107
+ // Delete the GCS file
108
+ const gcsFileName = result.gcs.replace('gs://cortextempfiles/', '');
109
+ await deleteGCS(gcsFileName);
110
+
111
+ // Verify file is gone
112
+ const existsAfterDelete = await gcsUrlExists(result.gcs);
113
+ t.false(existsAfterDelete, 'File should not exist in GCS after deletion');
114
+
115
+ // Remove GCS URL to simulate missing backup
116
+ const { gcs: _, ...fileInfo } = result; // eslint-disable-line no-unused-vars
117
+
118
+ // Try to ensure GCS backup
119
+ const updatedResult = await ensureGCSUpload(context, fileInfo);
120
+
121
+ // Verify GCS URL was added
122
+ t.truthy(updatedResult.gcs, 'Should have GCS backup URL after ensure');
123
+
124
+ // Verify GCS file exists
125
+ const gcsExists = await gcsUrlExists(updatedResult.gcs);
126
+ t.true(gcsExists, 'File should exist in GCS after ensure');
127
+
128
+ // Verify file content size in GCS
129
+ const gcsSize = await getGCSFileSize(updatedResult.gcs);
130
+ t.is(gcsSize, testContent.length, 'GCS file size should match original after ensure');
131
+ } finally {
132
+ // Cleanup
133
+ if (fs.existsSync(testFile)) {
134
+ fs.unlinkSync(testFile);
135
+ }
136
+ }
137
+ });
138
+
139
+ test('test primary storage restoration from GCS backup', async t => {
140
+ if (!isGCSConfigured()) {
141
+ t.pass('Skipping test - GCS not configured');
142
+ return;
143
+ }
144
+
145
+ // Create a test file with known content
146
+ const testContent = 'Hello World!'.repeat(1000);
147
+ const testFile = path.join(__dirname, 'test.txt');
148
+ fs.writeFileSync(testFile, testContent);
149
+
150
+ try {
151
+ // First upload normally
152
+ const context = { log: console.log };
153
+ const initialResult = await uploadBlob(context, null, false, testFile);
154
+
155
+ // Verify initial upload worked
156
+ t.truthy(initialResult.url, 'Should have primary storage URL');
157
+ t.truthy(initialResult.gcs, 'Should have GCS backup URL');
158
+
159
+ // Store the hash and simulate a missing primary file by requesting with a bad URL
160
+ const hash = 'test_primary_restore';
161
+ const modifiedResult = {
162
+ ...initialResult,
163
+ url: initialResult.url.replace('.blob.core.windows.net', '.invalid.url')
164
+ };
165
+
166
+ // Set up Redis state with the bad URL
167
+ await setFileStoreMap(hash, modifiedResult);
168
+
169
+ // Set up request for the handler
170
+ const mockReq = {
171
+ method: 'GET',
172
+ body: { params: { hash, checkHash: true } }
173
+ };
174
+
175
+ // Set up context for the handler
176
+ const handlerContext = {
177
+ log: console.log,
178
+ res: null
179
+ };
180
+
181
+ // Call the handler which should restore from GCS
182
+ await CortexFileHandler(handlerContext, mockReq);
183
+
184
+ // Verify we got a valid response
185
+ t.is(handlerContext.res.status, 200, 'Should get successful response');
186
+ t.truthy(handlerContext.res.body.url, 'Should have restored primary URL');
187
+ t.truthy(handlerContext.res.body.gcs, 'Should still have GCS URL');
188
+
189
+ // Verify the restored URL is accessible
190
+ const { valid } = await urlExists(handlerContext.res.body.url);
191
+ t.true(valid, 'Restored URL should be accessible');
192
+
193
+ // Verify file sizes match in both storages
194
+ const gcsSize = await getGCSFileSize(handlerContext.res.body.gcs);
195
+ const azureSize = await getHttpFileSize(handlerContext.res.body.url);
196
+ t.is(azureSize, testContent.length, 'Azure file size should match original');
197
+ t.is(gcsSize, azureSize, 'Azure and GCS file sizes should match');
198
+
199
+ } finally {
200
+ // Cleanup
201
+ if (fs.existsSync(testFile)) {
202
+ fs.unlinkSync(testFile);
203
+ }
204
+ }
205
+ });
206
+
207
+ test('test hash check returns 404 when both storages are empty', async t => {
208
+ if (!isGCSConfigured()) {
209
+ t.pass('Skipping test - GCS not configured');
210
+ return;
211
+ }
212
+
213
+ // Create a test file with known content
214
+ const testContent = 'Hello World!'.repeat(1000);
215
+ const testFile = path.join(__dirname, 'test.txt');
216
+ fs.writeFileSync(testFile, testContent);
217
+
218
+ try {
219
+ // First upload normally
220
+ const context = { log: console.log };
221
+ const initialResult = await uploadBlob(context, null, false, testFile);
222
+
223
+ // Verify initial upload worked
224
+ t.truthy(initialResult.url, 'Should have primary storage URL');
225
+ t.truthy(initialResult.gcs, 'Should have GCS backup URL');
226
+
227
+ // Store the hash
228
+ const hash = 'test_both_missing';
229
+ await setFileStoreMap(hash, initialResult);
230
+
231
+ // Verify both files exist initially
232
+ const initialPrimaryCheck = await urlExists(initialResult.url);
233
+ const initialGcsCheck = await gcsUrlExists(initialResult.gcs);
234
+ t.true(initialPrimaryCheck.valid, 'Primary file should exist initially');
235
+ t.true(initialGcsCheck, 'GCS file should exist initially');
236
+
237
+ // Delete from Azure/primary storage
238
+ const azureUrl = new URL(initialResult.url);
239
+ console.log('Azure URL:', initialResult.url);
240
+ // Get the path without query parameters and decode it
241
+ const fullPath = decodeURIComponent(azureUrl.pathname);
242
+ console.log('Full path:', fullPath);
243
+ // Get the request ID and filename from the path
244
+ const pathParts = fullPath.split('/');
245
+ const blobName = pathParts[pathParts.length - 1];
246
+ console.log('Attempting to delete Azure blob:', blobName);
247
+
248
+ // Delete the blob using the correct container name
249
+ const { containerClient } = await getBlobClient();
250
+ const blockBlobClient = containerClient.getBlockBlobClient(blobName);
251
+ await blockBlobClient.delete();
252
+ console.log('Azure deletion completed');
253
+
254
+ // Add a small delay to ensure deletion is complete
255
+ await new Promise(resolve => setTimeout(resolve, 1000));
256
+
257
+ // Delete from GCS
258
+ const gcsFileName = initialResult.gcs.replace('gs://cortextempfiles/', '');
259
+ console.log('Attempting to delete GCS file:', gcsFileName);
260
+ await deleteGCS(gcsFileName);
261
+ console.log('GCS deletion completed');
262
+
263
+ // Verify both files are gone
264
+ const primaryExists = await urlExists(initialResult.url);
265
+ console.log('Primary exists after deletion:', primaryExists.valid);
266
+ const gcsExists = await gcsUrlExists(initialResult.gcs);
267
+ console.log('GCS exists after deletion:', gcsExists);
268
+ t.false(primaryExists.valid, 'Primary file should be deleted');
269
+ t.false(gcsExists, 'GCS file should be deleted');
270
+
271
+ // Try to get the file via hash - should fail
272
+ const handlerContext = {
273
+ log: console.log,
274
+ res: null
275
+ };
276
+
277
+ await CortexFileHandler(handlerContext, {
278
+ method: 'GET',
279
+ body: { params: { hash, checkHash: true } }
280
+ });
281
+
282
+ // Verify we got a 404 response
283
+ t.is(handlerContext.res.status, 404, 'Should get 404 when both files are missing');
284
+ t.true(handlerContext.res.body.includes('not found in storage'), 'Should indicate files are missing in storage');
285
+
286
+ } finally {
287
+ // Cleanup
288
+ if (fs.existsSync(testFile)) {
289
+ fs.unlinkSync(testFile);
290
+ }
291
+ }
292
+ });