@aj-archipelago/cortex 1.3.18 → 1.3.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,5 @@
1
1
  # Cortex
2
- Cortex simplifies and accelerates the process of creating applications that harness the power of modern AI models like GPT-4o (chatGPT), o1, Gemini, the Claude series, Flux, Grok and more by poviding a structured interface (GraphQL or REST) to a powerful prompt execution environment. This enables complex augmented prompting and abstracts away most of the complexity of managing model connections like chunking input, rate limiting, formatting output, caching, and handling errors.
2
+ Cortex simplifies and accelerates the process of creating applications that harness the power of modern AI models like GPT-4o (chatGPT), o1, o3-mini, Gemini, the Claude series, Flux, Grok and more by poviding a structured interface (GraphQL or REST) to a powerful prompt execution environment. This enables complex augmented prompting and abstracts away most of the complexity of managing model connections like chunking input, rate limiting, formatting output, caching, and handling errors.
3
3
  ## Why build Cortex?
4
4
  Modern AI models are transformational, but a number of complexities emerge when developers start using them to deliver application-ready functions. Most models require precisely formatted, carefully engineered and sequenced prompts to produce consistent results, and the responses are typically largely unstructured text without validation or formatting. Additionally, these models are evolving rapidly, are typically costly and slow to query and implement hard request size and rate restrictions that need to be carefully navigated for optimum throughput. Cortex offers a solution to these problems and provides a simple and extensible package for interacting with NL AI models.
5
5
 
@@ -20,7 +20,7 @@ Just about anything! It's kind of an LLM swiss army knife. Here are some ideas:
20
20
  - OpenAI models:
21
21
  - GPT-4 Omni (GPT-4o)
22
22
  - GPT-4 Omni Mini (GPT-4o-mini)
23
- - O1 (including o1-mini and o1-preview) (Advanced reasoning models)
23
+ - O1 and O3-mini (Advanced reasoning models)
24
24
  - Most of the earlier GPT models (GPT-4, 3.5 Turbo, etc.)
25
25
  - Google models:
26
26
  - Gemini 1.5 Pro
@@ -521,7 +521,7 @@ Models are configured in the `models` section of the config. Each model can have
521
521
 
522
522
  - `OPENAI-CHAT`: For OpenAI chat models (legacy GPT-3.5)
523
523
  - `OPENAI-VISION`: For multimodal models (GPT-4o, GPT-4o-mini) supporting text, images, and other content types
524
- - `OPENAI-REASONING`: For O1 reasoning model with vision capabilities
524
+ - `OPENAI-REASONING`: For O1 and O3-mini reasoning models with vision capabilities
525
525
  - `OPENAI-COMPLETION`: For OpenAI completion models
526
526
  - `OPENAI-WHISPER`: For Whisper transcription
527
527
  - `GEMINI-1.5-CHAT`: For Gemini 1.5 Pro chat models
package/config.js CHANGED
@@ -207,7 +207,7 @@ var config = convict({
207
207
  "maxReturnTokens": 100000,
208
208
  "supportsStreaming": false
209
209
  },
210
- "oai-o1-mini": {
210
+ "oai-o3-mini": {
211
211
  "type": "OPENAI-REASONING",
212
212
  "url": "https://api.openai.com/v1/chat/completions",
213
213
  "headers": {
@@ -215,26 +215,11 @@ var config = convict({
215
215
  "Content-Type": "application/json"
216
216
  },
217
217
  "params": {
218
- "model": "o1-mini"
218
+ "model": "o3-mini"
219
219
  },
220
220
  "requestsPerSecond": 10,
221
- "maxTokenLength": 128000,
222
- "maxReturnTokens": 65536,
223
- "supportsStreaming": false
224
- },
225
- "oai-o1-preview": {
226
- "type": "OPENAI-REASONING",
227
- "url": "https://api.openai.com/v1/chat/completions",
228
- "headers": {
229
- "Authorization": "Bearer {{OPENAI_API_KEY}}",
230
- "Content-Type": "application/json"
231
- },
232
- "params": {
233
- "model": "o1-preview"
234
- },
235
- "requestsPerSecond": 10,
236
- "maxTokenLength": 128000,
237
- "maxReturnTokens": 32768,
221
+ "maxTokenLength": 200000,
222
+ "maxReturnTokens": 100000,
238
223
  "supportsStreaming": false
239
224
  },
240
225
  "azure-bing": {
@@ -494,25 +494,37 @@ async function deleteGCS(blobName) {
494
494
  if (!gcs) throw new Error("Google Cloud Storage is not initialized");
495
495
 
496
496
  try {
497
+ const bucket = gcs.bucket(GCS_BUCKETNAME);
498
+ const deletedFiles = [];
499
+
497
500
  if (process.env.STORAGE_EMULATOR_HOST) {
498
501
  // For fake GCS server, use HTTP API directly
499
- const response = await axios.delete(
500
- `http://localhost:4443/storage/v1/b/${GCS_BUCKETNAME}/o/${encodeURIComponent(blobName)}`,
501
- { validateStatus: status => status === 200 || status === 404 }
502
+ const response = await axios.get(
503
+ `http://localhost:4443/storage/v1/b/${GCS_BUCKETNAME}/o`,
504
+ { params: { prefix: blobName } }
502
505
  );
503
- if (response.status === 200) {
504
- console.log(`Cleaned GCS file: ${blobName}`);
505
- return [blobName];
506
+ if (response.data.items) {
507
+ for (const item of response.data.items) {
508
+ await axios.delete(
509
+ `http://localhost:4443/storage/v1/b/${GCS_BUCKETNAME}/o/${encodeURIComponent(item.name)}`,
510
+ { validateStatus: status => status === 200 || status === 404 }
511
+ );
512
+ deletedFiles.push(item.name);
513
+ }
506
514
  }
507
- return [];
508
515
  } else {
509
516
  // For real GCS, use the SDK
510
- const bucket = gcs.bucket(GCS_BUCKETNAME);
511
- const file = bucket.file(blobName);
512
- await file.delete();
513
- console.log(`Cleaned GCS file: ${blobName}`);
514
- return [blobName];
517
+ const [files] = await bucket.getFiles({ prefix: blobName });
518
+ for (const file of files) {
519
+ await file.delete();
520
+ deletedFiles.push(file.name);
521
+ }
522
+ }
523
+
524
+ if (deletedFiles.length > 0) {
525
+ console.log(`Cleaned GCS files: ${deletedFiles.join(', ')}`);
515
526
  }
527
+ return deletedFiles;
516
528
  } catch (error) {
517
529
  if (error.code !== 404) {
518
530
  console.error(`Error in deleteGCS: ${error}`);
@@ -541,4 +553,15 @@ async function ensureGCSUpload(context, existingFile) {
541
553
  return existingFile;
542
554
  }
543
555
 
544
- export { saveFileToBlob, deleteBlob, deleteGCS, uploadBlob, cleanup, cleanupGCS, gcsUrlExists, ensureGCSUpload, gcs };
556
+ // Helper function to upload a chunk to GCS
557
+ async function uploadChunkToGCS(chunkPath, requestId) {
558
+ if (!gcs) return null;
559
+
560
+ const gcsFileName = `${requestId}/${path.basename(chunkPath)}`;
561
+ await gcs.bucket(GCS_BUCKETNAME).upload(chunkPath, {
562
+ destination: gcsFileName
563
+ });
564
+ return `gs://${GCS_BUCKETNAME}/${gcsFileName}`;
565
+ }
566
+
567
+ export { saveFileToBlob, deleteBlob, deleteGCS, uploadBlob, cleanup, cleanupGCS, gcsUrlExists, ensureGCSUpload, gcs, uploadChunkToGCS };
@@ -77,16 +77,23 @@ export const ACCEPTED_MIME_TYPES = {
77
77
  'audio/aac': ['.aac'],
78
78
  'audio/ogg': ['.ogg'],
79
79
  'audio/flac': ['.flac'],
80
+ 'audio/m4a': ['.m4a'],
81
+ 'audio/x-m4a': ['.m4a'],
82
+ 'audio/mp3': ['.mp3'],
83
+ 'audio/mp4': ['.mp4'],
80
84
 
81
85
  // Video types
82
86
  'video/mp4': ['.mp4'],
83
87
  'video/mpeg': ['.mpeg', '.mpg'],
88
+ 'video/mov': ['.mov'],
84
89
  'video/quicktime': ['.mov'],
85
90
  'video/x-msvideo': ['.avi'],
86
91
  'video/x-flv': ['.flv'],
92
+ 'video/mpg': ['.mpeg', '.mpg'],
87
93
  'video/webm': ['.webm'],
88
- 'video/x-ms-wmv': ['.wmv'],
89
- 'video/3gpp': ['.3gp']
94
+ 'video/wmv': ['.wmv'],
95
+ 'video/3gpp': ['.3gp'],
96
+ 'video/m4v': ['.m4v'],
90
97
  };
91
98
 
92
99
  // Helper function to check if a mime type is accepted
@@ -1,5 +1,5 @@
1
1
  import { downloadFile, splitMediaFile } from './fileChunker.js';
2
- import { saveFileToBlob, deleteBlob, deleteGCS, uploadBlob, cleanup, cleanupGCS, gcsUrlExists, ensureGCSUpload, gcs, AZURE_STORAGE_CONTAINER_NAME } from './blobHandler.js';
2
+ import { saveFileToBlob, deleteBlob, deleteGCS, uploadBlob, cleanup, cleanupGCS, gcsUrlExists, ensureGCSUpload, gcs, AZURE_STORAGE_CONTAINER_NAME, uploadChunkToGCS } from './blobHandler.js';
3
3
  import { cleanupRedisFileStoreMap, getFileStoreMap, publishRequestProgress, removeFromFileStoreMap, setFileStoreMap } from './redis.js';
4
4
  import { ensureEncoded, ensureFileExtension, urlExists } from './helper.js';
5
5
  import { moveFileToPublicFolder, deleteFolder, cleanupLocal } from './localFileHandler.js';
@@ -104,9 +104,7 @@ async function CortexFileHandler(context, req) {
104
104
  const azureResult = useAzure ? await deleteBlob(deleteRequestId) : await deleteFolder(deleteRequestId);
105
105
  const gcsResult = [];
106
106
  if (gcs) {
107
- for (const blobName of azureResult) {
108
- gcsResult.push(...await deleteGCS(blobName));
109
- }
107
+ gcsResult.push(...await deleteGCS(deleteRequestId));
110
108
  }
111
109
 
112
110
  context.res = {
@@ -393,10 +391,21 @@ async function CortexFileHandler(context, req) {
393
391
  // sequential processing of chunks
394
392
  for (let index = 0; index < chunks.length; index++) {
395
393
  const chunkPath = chunks[index];
396
- const blobName = useAzure ? await saveFileToBlob(chunkPath, requestId) : await moveFileToPublicFolder(chunkPath, requestId);
394
+ let blobName;
395
+ let gcsUrl;
396
+
397
+ if (useAzure) {
398
+ blobName = await saveFileToBlob(chunkPath, requestId);
399
+ } else {
400
+ blobName = await moveFileToPublicFolder(chunkPath, requestId);
401
+ }
402
+
403
+ // If GCS is configured, save to GCS
404
+ gcsUrl = await uploadChunkToGCS(chunkPath, requestId);
405
+
397
406
  const chunkOffset = chunkOffsets[index];
398
- result.push({ uri: blobName, offset: chunkOffset });
399
- console.log(`Saved chunk as: ${blobName}`);
407
+ result.push({ uri: blobName, offset: chunkOffset, gcs: gcsUrl });
408
+ console.log(`Saved chunk as: ${blobName}${gcsUrl ? ` and ${gcsUrl}` : ''}`);
400
409
  await sendProgress();
401
410
  }
402
411
 
@@ -5,6 +5,10 @@ import axios from 'axios';
5
5
  import FormData from 'form-data';
6
6
  import { port, publicFolder, ipAddress } from '../start.js';
7
7
  import { v4 as uuidv4 } from 'uuid';
8
+ import path from 'path';
9
+ import os from 'os';
10
+ import fs from 'fs';
11
+ import { execSync } from 'child_process';
8
12
 
9
13
  // Add these helper functions at the top after imports
10
14
  const baseUrl = `http://localhost:${port}/api/CortexFileHandler`;
@@ -597,8 +601,7 @@ test.serial('should handle hash reuse with Azure storage', async t => {
597
601
  const originalUrl = upload1.data.url;
598
602
 
599
603
  // Check hash exists and returns the correct URL
600
- const hashCheck1 = await axios.get(baseUrl, {
601
- params: { hash: testHash, checkHash: true },
604
+ const hashCheck1 = await axios.get(baseUrl, { hash: testHash, checkHash: true }, {
602
605
  validateStatus: status => true
603
606
  });
604
607
  t.is(hashCheck1.status, 200, 'Hash should exist after first upload');
@@ -634,8 +637,7 @@ test.serial('should handle hash reuse with Azure storage', async t => {
634
637
  await cleanupUploadedFile(t, originalUrl);
635
638
 
636
639
  // Verify hash is now gone
637
- const hashCheckAfterDelete = await axios.get(baseUrl, {
638
- params: { hash: testHash, checkHash: true },
640
+ const hashCheckAfterDelete = await axios.get(baseUrl, { hash: testHash, checkHash: true }, {
639
641
  validateStatus: status => true
640
642
  });
641
643
  t.is(hashCheckAfterDelete.status, 404, 'Hash should be gone after file deletion');
@@ -762,6 +764,144 @@ test.serial('should handle GCS URL format and accessibility', async t => {
762
764
  await cleanupUploadedFile(t, uploadResponse.data.url);
763
765
  });
764
766
 
767
+ // Add this helper function after other helper functions
768
+ async function createAndUploadTestFile() {
769
+ // Create a temporary file path
770
+ const tempDir = path.join(os.tmpdir(), uuidv4());
771
+ fs.mkdirSync(tempDir, { recursive: true });
772
+ const tempFile = path.join(tempDir, 'test.mp3');
773
+
774
+ // Generate a real MP3 file using ffmpeg
775
+ try {
776
+ execSync(`ffmpeg -f lavfi -i anullsrc=r=44100:cl=mono -t 10 -q:a 9 -acodec libmp3lame "${tempFile}"`, {
777
+ stdio: ['ignore', 'pipe', 'pipe']
778
+ });
779
+
780
+ // Upload the real media file
781
+ const form = new FormData();
782
+ form.append('file', fs.createReadStream(tempFile));
783
+
784
+ const uploadResponse = await axios.post(baseUrl, form, {
785
+ headers: form.getHeaders(),
786
+ validateStatus: status => true,
787
+ timeout: 5000
788
+ });
789
+
790
+ // Wait a short time to ensure file is available
791
+ await new Promise(resolve => setTimeout(resolve, 1000));
792
+
793
+ // Clean up temp file
794
+ fs.rmSync(tempDir, { recursive: true, force: true });
795
+
796
+ return uploadResponse.data.url;
797
+ } catch (error) {
798
+ console.error('Error creating test file:', error);
799
+ throw error;
800
+ }
801
+ }
802
+
803
+ test.serial('should handle chunking with GCS integration when configured', async t => {
804
+ if (!isGCSConfigured()) {
805
+ t.pass('Skipping test - GCS not configured');
806
+ return;
807
+ }
808
+
809
+ // Create a large test file first
810
+ const testFileUrl = await createAndUploadTestFile();
811
+ const requestId = uuidv4();
812
+
813
+ // Request chunking via GET
814
+ const chunkResponse = await axios.get(baseUrl, {
815
+ params: {
816
+ uri: testFileUrl,
817
+ requestId
818
+ },
819
+ validateStatus: status => true,
820
+ timeout: 5000
821
+ });
822
+
823
+ t.is(chunkResponse.status, 200, 'Chunked request should succeed');
824
+ t.truthy(chunkResponse.data, 'Response should contain data');
825
+ t.true(Array.isArray(chunkResponse.data), 'Response should be an array');
826
+ t.true(chunkResponse.data.length > 0, 'Should have created at least one chunk');
827
+
828
+ // Verify each chunk exists in both Azure/Local and GCS
829
+ for (const chunk of chunkResponse.data) {
830
+ // Verify Azure/Local URL is accessible
831
+ const azureResponse = await axios.get(convertToLocalUrl(chunk.uri), {
832
+ validateStatus: status => true,
833
+ timeout: 5000
834
+ });
835
+ t.is(azureResponse.status, 200, `Chunk should be accessible in Azure/Local: ${chunk.uri}`);
836
+
837
+ // Verify GCS URL exists and is in correct format
838
+ t.truthy(chunk.gcs, 'Chunk should contain GCS URL');
839
+ t.true(chunk.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
840
+
841
+ // Check if chunk exists in fake GCS
842
+ const exists = await checkGCSFile(chunk.gcs);
843
+ t.true(exists, `Chunk should exist in GCS: ${chunk.gcs}`);
844
+ }
845
+
846
+ // Clean up chunks
847
+ const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${requestId}`);
848
+ t.is(deleteResponse.status, 200, 'Delete should succeed');
849
+
850
+ // Verify all chunks are deleted from both storages
851
+ for (const chunk of chunkResponse.data) {
852
+ // Verify Azure/Local chunk is gone
853
+ const azureResponse = await axios.get(convertToLocalUrl(chunk.uri), {
854
+ validateStatus: status => true,
855
+ timeout: 5000
856
+ });
857
+ t.is(azureResponse.status, 404, `Chunk should not be accessible in Azure/Local after deletion: ${chunk.uri}`);
858
+
859
+ // Verify GCS chunk is gone
860
+ const exists = await checkGCSFile(chunk.gcs);
861
+ t.false(exists, `Chunk should not exist in GCS after deletion: ${chunk.gcs}`);
862
+ }
863
+ });
864
+
865
+ test.serial('should handle chunking errors gracefully with GCS', async t => {
866
+ if (!isGCSConfigured()) {
867
+ t.pass('Skipping test - GCS not configured');
868
+ return;
869
+ }
870
+
871
+ // Create a test file to get a valid URL format
872
+ const validFileUrl = await createAndUploadTestFile();
873
+
874
+ // Test with invalid URL that matches the format of our real URLs
875
+ const invalidUrl = validFileUrl.replace(/[^/]+$/, 'nonexistent-file.mp3');
876
+ const invalidResponse = await axios.get(baseUrl, {
877
+ params: {
878
+ uri: invalidUrl,
879
+ requestId: uuidv4()
880
+ },
881
+ validateStatus: status => true,
882
+ timeout: 5000
883
+ });
884
+
885
+ t.is(invalidResponse.status, 500, 'Should reject nonexistent file URL');
886
+ t.true(invalidResponse.data.includes('Error processing media file'), 'Should indicate error processing media file');
887
+
888
+ // Test with missing URI
889
+ const noUriResponse = await axios.get(baseUrl, {
890
+ params: {
891
+ requestId: uuidv4()
892
+ },
893
+ validateStatus: status => true,
894
+ timeout: 5000
895
+ });
896
+
897
+ t.is(noUriResponse.status, 400, 'Should reject request with no URI');
898
+ t.is(
899
+ noUriResponse.data,
900
+ 'Please pass a uri and requestId on the query string or in the request body',
901
+ 'Should return proper error message'
902
+ );
903
+ });
904
+
765
905
  // Legacy MediaFileChunker Tests
766
906
  test.serial('should handle file upload through legacy MediaFileChunker endpoint', async t => {
767
907
  const form = new FormData();
@@ -4,19 +4,22 @@ To install dependencies:
4
4
 
5
5
  ```bash
6
6
  bun install
7
+ cd client
8
+ bun install
7
9
  ```
8
10
 
9
11
  To run:
10
12
 
13
+ Set up your .env file with the correct Cortex API key and access to the realtime voice service.
14
+
11
15
  ```bash
12
- cd client
13
- bun run build
14
- cd ..
15
- bun run start
16
+ # In the server directory
17
+ bun run dev
16
18
  ```
17
19
 
18
20
  To run in production:
19
21
 
20
22
  ```bash
23
+ # In the server directory
21
24
  bun run start:prod
22
25
  ```
@@ -566,10 +566,10 @@ export class SocketServer {
566
566
 
567
567
  // Parallelize memory reads
568
568
  const [memorySelf, memoryUser, memoryDirectives, memoryTopics, voiceSample] = await Promise.all([
569
- readMemory(socket.data.userId, socket.data.aiName, "memorySelf", 1),
570
- readMemory(socket.data.userId, socket.data.aiName, "memoryUser", 1),
571
- readMemory(socket.data.userId, socket.data.aiName, "memoryDirectives", 1),
572
- readMemory(socket.data.userId, socket.data.aiName, "memoryTopics", 0, 0, 10),
569
+ readMemory(socket.data.userId, "memorySelf", 1, 0, 0, true),
570
+ readMemory(socket.data.userId, "memoryUser", 1, 0, 0, true),
571
+ readMemory(socket.data.userId, "memoryDirectives", 1, 0, 0, true),
572
+ readMemory(socket.data.userId, "memoryTopics", 0, 0, 10, false),
573
573
  style(socket.data.userId, socket.data.aiName, socket.data.aiStyle, [], "")
574
574
  ]);
575
575
 
@@ -24,8 +24,8 @@ query ManageMemory($contextId: String, $chatHistory: [MultiMessage], $aiName: St
24
24
  `
25
25
 
26
26
  const READ_MEMORY = `
27
- query ReadMemory($contextId: String, $aiName: String, $section: String, $priority: Int, $recentHours: Int, $numResults: Int) {
28
- sys_read_memory(contextId: $contextId, aiName: $aiName, section: $section, priority: $priority, recentHours: $recentHours, numResults: $numResults) {
27
+ query ReadMemory($contextId: String, $section: String, $priority: Int, $recentHours: Int, $numResults: Int) {
28
+ sys_read_memory(contextId: $contextId, section: $section, priority: $priority, recentHours: $recentHours, numResults: $numResults) {
29
29
  result
30
30
  tool
31
31
  warnings
@@ -69,20 +69,20 @@ export async function manageMemory(contextId: string,
69
69
  }
70
70
 
71
71
  export async function readMemory(contextId: string,
72
- aiName: string,
73
72
  section: MemorySection,
74
73
  priority: number = 0,
75
74
  recentHours: number = 0,
76
- numResults: number = 0
75
+ numResults: number = 0,
76
+ stripMetadata: boolean = false
77
77
  ) {
78
78
 
79
79
  const variables: CortexVariables = {
80
80
  section,
81
81
  contextId,
82
- aiName,
83
82
  priority,
84
83
  recentHours,
85
- numResults
84
+ numResults,
85
+ stripMetadata
86
86
  }
87
87
 
88
88
  const res = await getCortexResponse(variables, READ_MEMORY);
@@ -56,6 +56,7 @@ export type CortexVariables = {
56
56
  priority?: number;
57
57
  recentHours?: number;
58
58
  numResults?: number;
59
+ stripMetadata?: boolean;
59
60
  }
60
61
 
61
62
  function truncateBody(body: any): string {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aj-archipelago/cortex",
3
- "version": "1.3.18",
3
+ "version": "1.3.20",
4
4
  "description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
5
5
  "private": false,
6
6
  "repository": {
@@ -57,7 +57,15 @@ export default {
57
57
 
58
58
  const result = await callPathway(generatorPathway, newArgs, resolver);
59
59
 
60
- return args.stream ? "" : result;
60
+ if (args.stream) {
61
+ return "";
62
+ }
63
+
64
+ if (!result) {
65
+ result = await callPathway('sys_generator_error', { ...args, text: `Tried to use a tool (${generatorPathway}), but no result was returned`, stream: false }, resolver);
66
+ }
67
+
68
+ return result;
61
69
 
62
70
  } catch (e) {
63
71
  resolver.logError(e.message ?? e);
@@ -15,7 +15,7 @@ export default {
15
15
  aiName: "Jarvis",
16
16
  language: "English",
17
17
  },
18
- model: 'oai-o1',
18
+ model: 'oai-o3-mini',
19
19
  useInputChunking: false,
20
20
  enableDuplicateRequests: false,
21
21
  timeout: 600,
@@ -1,4 +1,4 @@
1
- // sys_openai_chat_o1_mini.js
1
+ // sys_openai_chat_o3_mini.js
2
2
 
3
3
  import { Prompt } from '../../../server/prompt.js';
4
4
 
@@ -12,8 +12,8 @@ export default {
12
12
  inputParameters: {
13
13
  messages: [],
14
14
  },
15
- model: 'oai-o1-mini',
15
+ model: 'oai-o3-mini',
16
16
  useInputChunking: false,
17
- emulateOpenAIChatModel: 'o1-mini',
17
+ emulateOpenAIChatModel: 'o3-mini',
18
18
  enableDuplicateRequests: false,
19
19
  }
@@ -56,19 +56,33 @@ class AzureVideoTranslatePlugin extends ModelPlugin {
56
56
  throw new Error("File handler URL is not configured");
57
57
  }
58
58
 
59
- // Use the file handler's fetch endpoint
60
- const response = await axios.get(fileHandlerUrl, {
61
- params: {
59
+ // Start heartbeat progress updates
60
+ const heartbeat = setInterval(() => {
61
+ publishRequestProgress({
62
62
  requestId: this.requestId,
63
- fetch: videoUrl
63
+ progress: 0,
64
+ info: 'Uploading and processing video...'
65
+ });
66
+ }, 5000);
67
+
68
+ try {
69
+ // Start the fetch request
70
+ const response = await axios.get(fileHandlerUrl, {
71
+ params: {
72
+ requestId: this.requestId,
73
+ fetch: videoUrl
74
+ }
75
+ });
76
+
77
+ if (!response.data?.url) {
78
+ throw new Error("File handler did not return a valid URL");
64
79
  }
65
- });
66
80
 
67
- if (!response.data?.url) {
68
- throw new Error("File handler did not return a valid URL");
81
+ return response.data.url;
82
+ } finally {
83
+ // Always clear the heartbeat interval
84
+ clearInterval(heartbeat);
69
85
  }
70
-
71
- return response.data.url;
72
86
  } catch (error) {
73
87
  logger.error(`Failed to upload video to file handler: ${error.message}`);
74
88
  if (error.response?.data) {