@aj-archipelago/cortex 1.1.21 → 1.1.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/config/default.example.json +84 -0
  2. package/config.js +5 -4
  3. package/helper-apps/cortex-file-handler/blobHandler.js +115 -98
  4. package/helper-apps/cortex-file-handler/fileChunker.js +15 -10
  5. package/helper-apps/cortex-file-handler/index.js +48 -2
  6. package/helper-apps/cortex-file-handler/package-lock.json +226 -53
  7. package/helper-apps/cortex-file-handler/package.json +3 -3
  8. package/package.json +2 -1
  9. package/pathways/categorize.js +23 -0
  10. package/pathways/chat.js +1 -1
  11. package/pathways/chat_code.js +19 -0
  12. package/pathways/chat_context.js +19 -0
  13. package/pathways/chat_jarvis.js +19 -0
  14. package/pathways/chat_persist.js +23 -0
  15. package/pathways/code_review.js +17 -0
  16. package/pathways/cognitive_delete.js +2 -1
  17. package/pathways/cognitive_insert.js +1 -0
  18. package/pathways/cognitive_search.js +1 -0
  19. package/pathways/embeddings.js +1 -1
  20. package/pathways/expand_story.js +12 -0
  21. package/pathways/format_paragraph_turbo.js +16 -0
  22. package/pathways/format_summarization.js +21 -0
  23. package/pathways/gemini_15_vision.js +20 -0
  24. package/pathways/gemini_vision.js +20 -0
  25. package/pathways/grammar.js +30 -0
  26. package/pathways/hashtags.js +19 -0
  27. package/pathways/headline.js +43 -0
  28. package/pathways/headline_custom.js +169 -0
  29. package/pathways/highlights.js +22 -0
  30. package/pathways/image.js +2 -1
  31. package/pathways/index.js +111 -17
  32. package/pathways/jira_story.js +18 -0
  33. package/pathways/keywords.js +4 -0
  34. package/pathways/language.js +17 -6
  35. package/pathways/locations.js +93 -0
  36. package/pathways/quotes.js +19 -0
  37. package/pathways/rag.js +207 -0
  38. package/pathways/rag_jarvis.js +254 -0
  39. package/pathways/rag_search_helper.js +21 -0
  40. package/pathways/readme.js +18 -0
  41. package/pathways/release_notes.js +16 -0
  42. package/pathways/remove_content.js +31 -0
  43. package/pathways/retrieval.js +23 -0
  44. package/pathways/run_claude35_sonnet.js +21 -0
  45. package/pathways/run_claude3_haiku.js +20 -0
  46. package/pathways/run_gpt35turbo.js +20 -0
  47. package/pathways/run_gpt4.js +20 -0
  48. package/pathways/run_gpt4_32.js +20 -0
  49. package/pathways/select_extension.js +6 -0
  50. package/pathways/select_services.js +10 -0
  51. package/pathways/spelling.js +3 -0
  52. package/pathways/story_angles.js +13 -0
  53. package/pathways/styleguide/styleguide.js +221 -0
  54. package/pathways/styleguidemulti.js +127 -0
  55. package/pathways/subhead.js +48 -0
  56. package/pathways/summarize_turbo.js +98 -0
  57. package/pathways/summary.js +31 -12
  58. package/pathways/sys_claude_35_sonnet.js +19 -0
  59. package/pathways/sys_claude_3_haiku.js +19 -0
  60. package/pathways/sys_google_chat.js +19 -0
  61. package/pathways/sys_google_code_chat.js +19 -0
  62. package/pathways/sys_google_gemini_chat.js +23 -0
  63. package/pathways/sys_openai_chat.js +2 -2
  64. package/pathways/sys_openai_chat_16.js +19 -0
  65. package/pathways/sys_openai_chat_gpt4.js +19 -0
  66. package/pathways/sys_openai_chat_gpt4_32.js +19 -0
  67. package/pathways/sys_openai_chat_gpt4_turbo.js +19 -0
  68. package/pathways/tags.js +25 -0
  69. package/pathways/taxonomy.js +135 -0
  70. package/pathways/timeline.js +51 -0
  71. package/pathways/topics.js +25 -0
  72. package/pathways/topics_sentiment.js +20 -0
  73. package/pathways/transcribe.js +2 -4
  74. package/pathways/translate.js +10 -12
  75. package/pathways/translate_azure.js +13 -0
  76. package/pathways/translate_context.js +21 -0
  77. package/pathways/translate_gpt4.js +19 -0
  78. package/pathways/translate_gpt4_turbo.js +19 -0
  79. package/pathways/translate_subtitle.js +201 -0
  80. package/pathways/translate_subtitle_helper.js +31 -0
  81. package/pathways/translate_turbo.js +19 -0
  82. package/pathways/vision.js +9 -7
  83. package/server/pathwayResolver.js +1 -1
  84. package/server/plugins/azureCognitivePlugin.js +10 -1
  85. package/server/plugins/openAiVisionPlugin.js +14 -6
  86. package/tests/main.test.js +62 -2
  87. package/tests/sublong.srt +4543 -0
  88. package/tests/vision.test.js +0 -34
@@ -12,6 +12,62 @@
12
12
  "requestsPerSecond": 10,
13
13
  "maxTokenLength": 2000
14
14
  },
15
+
16
+ "gemini-pro-chat": {
17
+ "type": "GEMINI-CHAT",
18
+ "url": "https://us-central1-aiplatform.googleapis.com/v1/projects/project-id/locations/us-central1/publishers/google/models/gemini-pro:streamGenerateContent",
19
+ "headers": {
20
+ "Content-Type": "application/json"
21
+ },
22
+ "requestsPerSecond": 10,
23
+ "maxTokenLength": 32768,
24
+ "maxReturnTokens": 8192,
25
+ "supportsStreaming": true
26
+ },
27
+ "gemini-pro-vision": {
28
+ "type": "GEMINI-VISION",
29
+ "url": "https://us-central1-aiplatform.googleapis.com/v1/projects/project-id/locations/us-central1/publishers/google/models/gemini-pro-vision:streamGenerateContent",
30
+ "headers": {
31
+ "Content-Type": "application/json"
32
+ },
33
+ "requestsPerSecond": 10,
34
+ "maxTokenLength": 32768,
35
+ "maxReturnTokens": 2048,
36
+ "supportsStreaming": true
37
+ },
38
+ "gemini-pro-15-vision": {
39
+ "type": "GEMINI-VISION",
40
+ "url": "https://us-central1-aiplatform.googleapis.com/v1/projects/project-id/locations/us-central1/publishers/google/models/gemini-1.5-pro-preview-0215:streamGenerateContent",
41
+ "headers": {
42
+ "Content-Type": "application/json"
43
+ },
44
+ "requestsPerSecond": 10,
45
+ "maxTokenLength": 1048576,
46
+ "maxReturnTokens": 2048,
47
+ "supportsStreaming": true
48
+ },
49
+ "claude-3-haiku-vertex": {
50
+ "type": "CLAUDE-3-VERTEX",
51
+ "url": "https://us-central1-aiplatform.googleapis.com/v1/projects/project-id/locations/us-central1/publishers/anthropic/models/claude-3-haiku@20240307",
52
+ "headers": {
53
+ "Content-Type": "application/json"
54
+ },
55
+ "requestsPerSecond": 10,
56
+ "maxTokenLength": 200000,
57
+ "maxReturnTokens": 2048,
58
+ "supportsStreaming": true
59
+ },
60
+ "claude-35-sonnet-vertex": {
61
+ "type": "CLAUDE-3-VERTEX",
62
+ "url": "https://us-central1-aiplatform.googleapis.com/v1/projects/project-id/locations/us-central1/publishers/anthropic/models/claude-3-5-sonnet@20240229",
63
+ "headers": {
64
+ "Content-Type": "application/json"
65
+ },
66
+ "requestsPerSecond": 10,
67
+ "maxTokenLength": 200000,
68
+ "maxReturnTokens": 2048,
69
+ "supportsStreaming": true
70
+ },
15
71
  "oai-gpturbo": {
16
72
  "type": "OPENAI-CHAT",
17
73
  "url": "https://api.openai.com/v1/chat/completions",
@@ -38,6 +94,34 @@
38
94
  "requestsPerSecond": 10,
39
95
  "maxTokenLength": 8192
40
96
  },
97
+ "oai-gpt4-32": {
98
+ "type": "OPENAI-CHAT",
99
+ "url": "https://api.openai.com/v1/chat/completions",
100
+ "headers": {
101
+ "Authorization": "Bearer {{OPENAI_API_KEY}}",
102
+ "Content-Type": "application/json"
103
+ },
104
+ "params": {
105
+ "model": "gpt-4-32"
106
+ },
107
+ "requestsPerSecond": 10,
108
+ "maxTokenLength": 32768
109
+ },
110
+ "oai-gpt4o": {
111
+ "type": "OPENAI-VISION",
112
+ "url": "https://api.openai.com/v1/chat/completions",
113
+ "headers": {
114
+ "Authorization": "Bearer {{OPENAI_API_KEY}}",
115
+ "Content-Type": "application/json"
116
+ },
117
+ "params": {
118
+ "model": "gpt-4o"
119
+ },
120
+ "requestsPerSecond": 10,
121
+ "maxTokenLength": 131072,
122
+ "maxReturnTokens": 4096,
123
+ "supportsStreaming": true
124
+ },
41
125
  "palm-text": {
42
126
  "type": "PALM-COMPLETION",
43
127
  "url": "https://us-central1-aiplatform.googleapis.com/v1/projects/project-id/locations/us-central1/publishers/google/models/text-bison@001:predict",
package/config.js CHANGED
@@ -139,7 +139,7 @@ var config = convict({
139
139
  },
140
140
  "maxTokenLength": 8192,
141
141
  },
142
- "oai-gpt4-vision": {
142
+ "oai-gpt4o": {
143
143
  "type": "OPENAI-VISION",
144
144
  "url": "https://api.openai.com/v1/chat/completions",
145
145
  "headers": {
@@ -147,10 +147,11 @@ var config = convict({
147
147
  "Content-Type": "application/json"
148
148
  },
149
149
  "params": {
150
- "model": "gpt-4-vision-preview"
150
+ "model": "gpt-4o"
151
151
  },
152
- "requestsPerSecond": 1,
153
- "maxTokenLength": 128000,
152
+ "requestsPerSecond": 50,
153
+ "maxTokenLength": 131072,
154
+ "maxReturnTokens": 4096,
154
155
  "supportsStreaming": true
155
156
  },
156
157
  "azure-bing": {
@@ -36,6 +36,15 @@ const VIDEO_EXTENSIONS = [
36
36
  ".mkv",
37
37
  ];
38
38
 
39
+ const AUDIO_EXTENSIONS = [
40
+ ".mp3",
41
+ ".wav",
42
+ ".ogg",
43
+ ".flac",
44
+ ".aac",
45
+ ".aiff",
46
+ ];
47
+
39
48
  function isBase64(str) {
40
49
  try {
41
50
  return btoa(atob(str)) == str;
@@ -162,131 +171,139 @@ async function deleteBlob(requestId) {
162
171
  return result;
163
172
  }
164
173
 
165
- async function uploadBlob(
166
- context,
167
- req,
168
- saveToLocal = false,
169
- useGoogle = false
170
- ) {
174
+ async function uploadBlob(context, req, saveToLocal = false, useGoogle = false, filePath=null) {
171
175
  return new Promise((resolve, reject) => {
172
176
  try {
173
- const busboy = Busboy({ headers: req.headers });
174
177
  let requestId = uuidv4();
175
178
  let body = {};
176
179
 
177
- busboy.on("field", (fieldname, value) => {
178
- if (fieldname === "requestId") {
179
- requestId = value;
180
- } else if (fieldname === "useGoogle") {
181
- useGoogle = value;
182
- }
183
- });
184
-
185
- busboy.on("file", async (fieldname, file, info) => {
186
- //do not use google if file is not image or video
187
- const ext = path.extname(info.filename).toLowerCase();
188
- const canUseGoogle = IMAGE_EXTENSIONS.includes(ext) || VIDEO_EXTENSIONS.includes(ext);
189
- if(!canUseGoogle) {
190
- useGoogle = false;
191
- }
180
+ // If filePath is given, we are dealing with local file and not form-data
181
+ if (filePath) {
182
+ const file = fs.createReadStream(filePath);
183
+ const filename = path.basename(filePath);
184
+ uploadFile(context, requestId, body, saveToLocal, useGoogle, file, filename, resolve)
185
+ } else {
186
+ // Otherwise, continue working with form-data
187
+ const busboy = Busboy({ headers: req.headers });
188
+
189
+ busboy.on("field", (fieldname, value) => {
190
+ if (fieldname === "requestId") {
191
+ requestId = value;
192
+ } else if (fieldname === "useGoogle") {
193
+ useGoogle = value;
194
+ }
195
+ });
196
+
197
+ busboy.on("file", async (fieldname, file, filename) => {
198
+ uploadFile(context, requestId, body, saveToLocal, useGoogle, file, filename?.filename || filename, resolve)
199
+ });
200
+
201
+ busboy.on("error", (error) => {
202
+ context.log.error("Error processing file upload:", error);
203
+ context.res = {
204
+ status: 500,
205
+ body: "Error processing file upload.",
206
+ };
207
+ reject(error); // Reject the promise
208
+ });
209
+
210
+ req.pipe(busboy);
211
+ }
212
+ } catch (error) {
213
+ context.log.error("Error processing file upload:", error);
214
+ context.res = {
215
+ status: 500,
216
+ body: "Error processing file upload.",
217
+ };
218
+ reject(error); // Reject the promise
219
+ }
220
+ });
221
+ }
192
222
 
193
- //check if useGoogle is set but no gcs and warn
194
- if(useGoogle && useGoogle !== "false" && !gcs) {
195
- context.log.warn("Google Cloud Storage is not initialized reverting google upload ");
196
- useGoogle = false;
197
- }
223
+ async function uploadFile(context, requestId, body, saveToLocal, useGoogle, file, filename, resolve) {
224
+ // do not use Google if the file is not an image or video
225
+ const ext = path.extname(filename).toLowerCase();
226
+ const canUseGoogle = IMAGE_EXTENSIONS.includes(ext) || VIDEO_EXTENSIONS.includes(ext) || AUDIO_EXTENSIONS.includes(ext);
227
+ if (!canUseGoogle) {
228
+ useGoogle = false;
229
+ }
198
230
 
199
- if (saveToLocal) {
200
- // Create the target folder if it doesn't exist
201
- const localPath = join(publicFolder, requestId);
202
- fs.mkdirSync(localPath, { recursive: true });
231
+ // check if useGoogle is set but no gcs and warn
232
+ if (useGoogle && useGoogle !== "false" && !gcs) {
233
+ context.log.warn("Google Cloud Storage is not initialized reverting google upload ");
234
+ useGoogle = false;
235
+ }
203
236
 
204
- const filename = encodeURIComponent(`${uuidv4()}_${info.filename}`);
205
- const destinationPath = `${localPath}/${filename}`;
237
+ const encodedFilename = encodeURIComponent(`${requestId || uuidv4()}_${filename}`);
206
238
 
207
- await pipeline(file, fs.createWriteStream(destinationPath));
208
239
 
209
- const message = `File '${filename}' saved to folder successfully.`;
210
- context.log(message);
240
+ if (saveToLocal) {
241
+ // create the target folder if it doesn't exist
242
+ const localPath = join(publicFolder, requestId);
243
+ fs.mkdirSync(localPath, { recursive: true });
211
244
 
212
- const url = `http://${ipAddress}:${port}/files/${requestId}/${filename}`;
245
+ const destinationPath = `${localPath}/${encodedFilename}`;
213
246
 
214
- body = { message, url };
247
+ await pipeline(file, fs.createWriteStream(destinationPath));
215
248
 
216
- resolve(body); // Resolve the promise
217
- } else {
218
- const filename = encodeURIComponent(`${requestId}/${uuidv4()}_${info.filename}`);
219
- const { containerClient } = await getBlobClient();
249
+ const message = `File '${encodedFilename}' saved to folder successfully.`;
250
+ context.log(message);
220
251
 
221
- const contentType = mime.lookup(filename); // content type based on file extension
222
- const options = {};
223
- if (contentType) {
224
- options.blobHTTPHeaders = { blobContentType: contentType };
225
- }
252
+ const url = `http://${ipAddress}:${port}/files/${requestId}/${encodedFilename}`;
226
253
 
227
- const blockBlobClient = containerClient.getBlockBlobClient(filename);
254
+ body = { message, url };
228
255
 
229
- const passThroughStream = new PassThrough();
230
- file.pipe(passThroughStream);
256
+ resolve(body); // Resolve the promise
257
+ } else {
258
+ const { containerClient } = await getBlobClient();
231
259
 
232
- await blockBlobClient.uploadStream(passThroughStream, undefined, undefined, options);
260
+ const contentType = mime.lookup(encodedFilename); // content type based on file extension
261
+ const options = {};
262
+ if (contentType) {
263
+ options.blobHTTPHeaders = { blobContentType: contentType };
264
+ }
233
265
 
234
- const message = `File '${filename}' uploaded successfully.`;
235
- const url = blockBlobClient.url;
236
- context.log(message);
237
- body = { message, url };
238
- }
266
+ const blockBlobClient = containerClient.getBlockBlobClient(encodedFilename);
239
267
 
240
- context.res = {
241
- status: 200,
242
- body,
243
- };
268
+ const passThroughStream = new PassThrough();
269
+ file.pipe(passThroughStream);
244
270
 
245
- if (useGoogle && useGoogle !== "false") {
246
- const { url } = body;
247
- const filename = encodeURIComponent(`${requestId}/${uuidv4()}_${info.filename}`);
248
- const gcsFile = gcs.bucket(GCS_BUCKETNAME).file(filename);
249
- const writeStream = gcsFile.createWriteStream();
271
+ await blockBlobClient.uploadStream(passThroughStream, undefined, undefined, options);
250
272
 
251
- const response = await axios({
252
- method: "get",
253
- url: url,
254
- responseType: "stream",
255
- });
273
+ const message = `File '${encodedFilename}' uploaded successfully.`;
274
+ const url = blockBlobClient.url;
275
+ context.log(message);
276
+ body = { message, url };
277
+ }
256
278
 
257
- // Pipe the Axios response stream directly into the GCS Write Stream
258
- response.data.pipe(writeStream);
279
+ context.res = {
280
+ status: 200,
281
+ body,
282
+ };
259
283
 
260
- await new Promise((resolve, reject) => {
261
- writeStream.on("finish", resolve);
262
- writeStream.on("error", reject);
263
- });
284
+ if (useGoogle && useGoogle !== "false") {
285
+ const { url } = body;
286
+ const gcsFile = gcs.bucket(GCS_BUCKETNAME).file(encodedFilename);
287
+ const writeStream = gcsFile.createWriteStream();
264
288
 
265
- body.gcs = `gs://${GCS_BUCKETNAME}/${filename}`;
266
- }
289
+ const response = await axios({
290
+ method: "get",
291
+ url: url,
292
+ responseType: "stream",
293
+ });
267
294
 
268
- resolve(body); // Resolve the promise
269
- });
295
+ // pipe the Axios response stream directly into the GCS Write Stream
296
+ response.data.pipe(writeStream);
270
297
 
271
- busboy.on("error", (error) => {
272
- context.log.error("Error processing file upload:", error);
273
- context.res = {
274
- status: 500,
275
- body: "Error processing file upload.",
276
- };
277
- reject(error); // Reject the promise
278
- });
298
+ await new Promise((resolve, reject) => {
299
+ writeStream.on("finish", resolve);
300
+ writeStream.on("error", reject);
301
+ });
279
302
 
280
- req.pipe(busboy);
281
- } catch (error) {
282
- context.log.error("Error processing file upload:", error);
283
- context.res = {
284
- status: 500,
285
- body: "Error processing file upload.",
286
- };
287
- reject(error); // Reject the promise
288
- }
289
- });
303
+ body.gcs = `gs://${GCS_BUCKETNAME}/${encodedFilename}`;
304
+ }
305
+
306
+ resolve(body); // Resolve the promise
290
307
  }
291
308
 
292
309
  // Function to delete files that haven't been used in more than a month
@@ -6,7 +6,7 @@ import os from 'os';
6
6
  import { promisify } from 'util';
7
7
  import axios from 'axios';
8
8
  import { ensureEncoded } from './helper.js';
9
- import ytdl from 'ytdl-core';
9
+ import ytdl from '@distube/ytdl-core';
10
10
 
11
11
 
12
12
  const ffmpegProbe = promisify(ffmpeg.ffprobe);
@@ -112,16 +112,20 @@ async function splitMediaFile(inputPath, chunkDurationInSeconds = 500) {
112
112
  }
113
113
  }
114
114
 
115
- const ytdlDownload = async (url, filename) => {
115
+ const ytdlDownload = async (url, filename, video = false) => {
116
116
  return new Promise((resolve, reject) => {
117
- const video = ytdl(url, { quality: 'highestaudio' });
117
+ const videoOptions = video
118
+ ? { filter: 'audioandvideo' } // audio and video
119
+ : { quality: 'highestaudio' }; // audio only
120
+
121
+ const videoStream = ytdl(url, videoOptions);
118
122
  let lastLoggedTime = Date.now();
119
123
 
120
- video.on('error', (error) => {
124
+ videoStream.on('error', (error) => {
121
125
  reject(error);
122
126
  });
123
127
 
124
- video.on('progress', (chunkLength, downloaded, total) => {
128
+ videoStream.on('progress', (chunkLength, downloaded, total) => {
125
129
  const currentTime = Date.now();
126
130
  if (currentTime - lastLoggedTime >= 2000) { // Log every 2 seconds
127
131
  const percent = downloaded / total;
@@ -130,7 +134,7 @@ const ytdlDownload = async (url, filename) => {
130
134
  }
131
135
  });
132
136
 
133
- video.pipe(fs.createWriteStream(filename))
137
+ videoStream.pipe(fs.createWriteStream(filename))
134
138
  .on('finish', () => {
135
139
  resolve();
136
140
  })
@@ -140,14 +144,15 @@ const ytdlDownload = async (url, filename) => {
140
144
  });
141
145
  };
142
146
 
143
- const processYoutubeUrl = async (url) => {
147
+ async function processYoutubeUrl(url, video=false) {
144
148
  try {
145
- const outputFileName = path.join(os.tmpdir(), `${uuidv4()}.mp3`);
146
- await ytdlDownload(url, outputFileName);
149
+ const outputFormat = video ? '.mp4' : '.mp3';
150
+ const outputFileName = path.join(os.tmpdir(), `${uuidv4()}${outputFormat}`);
151
+ await ytdlDownload(url, outputFileName, video);
147
152
  return outputFileName;
148
153
  } catch (e) {
149
154
  console.log(e);
150
- throw e;
155
+ throw new Error(`Error processing YouTube video, YouTube downloader might be outdated or blocked. ${e.message}`);
151
156
  }
152
157
  }
153
158
 
@@ -10,6 +10,10 @@ import { v4 as uuidv4 } from 'uuid';
10
10
  import fs from 'fs';
11
11
  import http from 'http';
12
12
  import https from 'https';
13
+ import axios from "axios";
14
+ import { pipeline } from "stream";
15
+ import { promisify } from "util";
16
+ const pipelineUtility = promisify(pipeline); // To pipe streams using async/await
13
17
 
14
18
  const DOC_EXTENSIONS = [".txt", ".json", ".csv", ".md", ".xml", ".js", ".html", ".css", '.pdf', '.docx', '.xlsx', '.csv'];
15
19
 
@@ -116,7 +120,48 @@ async function main(context, req) {
116
120
  return;
117
121
  }
118
122
 
119
- const { uri, requestId, save, hash, checkHash } = req.body?.params || req.query;
123
+ const { uri, requestId, save, hash, checkHash, fetch, load, restore } = req.body?.params || req.query;
124
+
125
+ const filepond = fetch || restore || load;
126
+ if (req.method.toLowerCase() === `get` && filepond) {
127
+ context.log(`Remote file: ${filepond}`);
128
+ // Check if file already exists (using hash as the key)
129
+ const exists = await getFileStoreMap(filepond);
130
+ if(exists){
131
+ context.res = {
132
+ status: 200,
133
+ body: exists // existing file URL
134
+ };
135
+ return;
136
+ }
137
+
138
+ // Check if it's a youtube url
139
+ let youtubeDownloadedFile = null;
140
+ if(isValidYoutubeUrl(filepond)){
141
+ youtubeDownloadedFile = await processYoutubeUrl(filepond, true);
142
+ }
143
+ const filename = path.join(os.tmpdir(), path.basename(youtubeDownloadedFile || filepond));
144
+ // Download the remote file to a local/temporary location keep name & ext
145
+ if(!youtubeDownloadedFile){
146
+ const response = await axios.get(filepond, { responseType: "stream" });
147
+ await pipelineUtility(response.data, fs.createWriteStream(filename));
148
+ }
149
+
150
+
151
+ const res = await uploadBlob(context, null, !useAzure, true, filename);
152
+ context.log(`File uploaded: ${JSON.stringify(res)}`);
153
+
154
+ //Update Redis (using hash as the key)
155
+ await setFileStoreMap(filepond, res);
156
+
157
+ // Return the file URL
158
+ context.res = {
159
+ status: 200,
160
+ body: res,
161
+ };
162
+
163
+ return;
164
+ }
120
165
 
121
166
  if(hash && checkHash){ //check if hash exists
122
167
  context.log(`Checking hash: ${hash}`);
@@ -229,7 +274,8 @@ async function main(context, req) {
229
274
 
230
275
  if (isYoutubeUrl) {
231
276
  // totalCount += 1; // extra 1 step for youtube download
232
- file = await processYoutubeUrl(file);
277
+ const processAsVideo = req.body?.params?.processAsVideo || req.query?.processAsVideo;
278
+ file = await processYoutubeUrl(file, processAsVideo);
233
279
  }
234
280
 
235
281
  const { chunkPromises, chunkOffsets, uniqueOutputPath } = await splitMediaFile(file);