@aj-archipelago/cortex 1.0.24 → 1.1.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/.eslintrc +1 -1
  2. package/config.js +36 -7
  3. package/helper_apps/{MediaFileChunker → CortexFileHandler}/blobHandler.js +27 -1
  4. package/helper_apps/{MediaFileChunker → CortexFileHandler}/fileChunker.js +0 -1
  5. package/helper_apps/{MediaFileChunker → CortexFileHandler}/index.js +22 -2
  6. package/helper_apps/{MediaFileChunker → CortexFileHandler}/localFileHandler.js +38 -2
  7. package/helper_apps/{MediaFileChunker → CortexFileHandler}/package-lock.json +1 -24
  8. package/lib/keyValueStorageClient.js +2 -5
  9. package/lib/logger.js +29 -0
  10. package/lib/redisSubscription.js +112 -28
  11. package/lib/request.js +62 -27
  12. package/package.json +3 -2
  13. package/pathways/index.js +3 -1
  14. package/pathways/transcribe.js +4 -0
  15. package/pathways/vision.js +18 -0
  16. package/server/chunker.js +46 -6
  17. package/server/graphql.js +12 -4
  18. package/server/pathwayPrompter.js +4 -0
  19. package/server/pathwayResolver.js +23 -28
  20. package/server/plugins/azureCognitivePlugin.js +5 -4
  21. package/server/plugins/azureTranslatePlugin.js +2 -2
  22. package/server/plugins/localModelPlugin.js +3 -3
  23. package/server/plugins/modelPlugin.js +20 -8
  24. package/server/plugins/openAiChatPlugin.js +21 -7
  25. package/server/plugins/openAiCompletionPlugin.js +9 -3
  26. package/server/plugins/openAiDallE3Plugin.js +6 -7
  27. package/server/plugins/openAiImagePlugin.js +4 -7
  28. package/server/plugins/openAiVisionPlugin.js +35 -0
  29. package/server/plugins/openAiWhisperPlugin.js +41 -24
  30. package/server/plugins/palmChatPlugin.js +14 -6
  31. package/server/plugins/palmCompletionPlugin.js +10 -3
  32. package/server/pubsub.js +1 -0
  33. package/server/rest.js +7 -6
  34. package/server/subscriptions.js +3 -15
  35. package/server/typeDef.js +10 -6
  36. package/tests/main.test.js +157 -0
  37. package/tests/modelPlugin.test.js +1 -1
  38. package/tests/palmChatPlugin.test.js +0 -38
  39. package/tests/palmCompletionPlugin.test.js +0 -30
  40. package/helper_apps/HealthCheck/.funcignore +0 -10
  41. package/helper_apps/HealthCheck/host.json +0 -15
  42. package/helper_apps/HealthCheck/package-lock.json +0 -142
  43. package/helper_apps/HealthCheck/package.json +0 -14
  44. package/helper_apps/HealthCheck/src/functions/timerTrigger.js +0 -13
  45. package/helper_apps/HealthCheck/src/transcribeHealthCheck.js +0 -93
  46. package/helper_apps/WhisperX/.dockerignore +0 -27
  47. package/helper_apps/WhisperX/Dockerfile +0 -32
  48. package/helper_apps/WhisperX/app.py +0 -104
  49. package/helper_apps/WhisperX/docker-compose.debug.yml +0 -12
  50. package/helper_apps/WhisperX/docker-compose.yml +0 -10
  51. package/helper_apps/WhisperX/requirements.txt +0 -5
  52. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/Dockerfile +0 -0
  53. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/docHelper.js +0 -0
  54. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/function.json +0 -0
  55. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/helper.js +0 -0
  56. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/package.json +0 -0
  57. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/redis.js +0 -0
  58. /package/helper_apps/{MediaFileChunker → CortexFileHandler}/start.js +0 -0
package/.eslintrc CHANGED
@@ -16,7 +16,7 @@
16
16
  ],
17
17
  "rules": {
18
18
  "import/no-unresolved": "error",
19
- "import/no-extraneous-dependencies": ["error", {"devDependencies": true}],
19
+ "import/no-extraneous-dependencies": ["error", {"devDependencies": true, "dependencies": true}],
20
20
  "no-unused-vars": ["error", { "argsIgnorePattern": "^_" }]
21
21
  },
22
22
  "settings": {
package/config.js CHANGED
@@ -4,11 +4,22 @@ import HandleBars from './lib/handleBars.js';
4
4
  import fs from 'fs';
5
5
  import { fileURLToPath, pathToFileURL } from 'url';
6
6
  import GcpAuthTokenHelper from './lib/gcpAuthTokenHelper.js';
7
+ import logger from './lib/logger.js';
7
8
 
8
9
  const __dirname = path.dirname(fileURLToPath(import.meta.url));
9
10
 
10
11
  // Schema for config
11
12
  var config = convict({
13
+ env: {
14
+ format: String,
15
+ default: 'development',
16
+ env: 'NODE_ENV'
17
+ },
18
+ cortexId: {
19
+ format: String,
20
+ default: 'local',
21
+ env: 'CORTEX_ID'
22
+ },
12
23
  basePathwayPath: {
13
24
  format: String,
14
25
  default: path.join(__dirname, 'pathways', 'basePathway.js'),
@@ -109,6 +120,20 @@ var config = convict({
109
120
  },
110
121
  "maxTokenLength": 8192,
111
122
  },
123
+ "oai-gpt4-vision": {
124
+ "type": "OPENAI-VISION",
125
+ "url": "https://api.openai.com/v1/chat/completions",
126
+ "headers": {
127
+ "Authorization": "Bearer {{OPENAI_API_KEY}}",
128
+ "Content-Type": "application/json"
129
+ },
130
+ "params": {
131
+ "model": "gpt-4-vision-preview"
132
+ },
133
+ "requestsPerSecond": 1,
134
+ "maxTokenLength": 128000,
135
+ "supportsStreaming": true
136
+ },
112
137
  },
113
138
  env: 'CORTEX_MODELS'
114
139
  },
@@ -176,14 +201,16 @@ const configFile = config.get('cortexConfigFile');
176
201
 
177
202
  // Load config file
178
203
  if (configFile && fs.existsSync(configFile)) {
179
- console.log('Loading config from', configFile);
204
+ logger.info(`Loading config from ${configFile}`);
180
205
  config.loadFile(configFile);
181
206
  } else {
182
207
  const openaiApiKey = config.get('openaiApiKey');
183
208
  if (!openaiApiKey) {
184
- throw console.log('No config file or api key specified. Please set the OPENAI_API_KEY to use OAI or use CORTEX_CONFIG_FILE environment variable to point at the Cortex configuration for your project.');
209
+ const errorString = 'No config file or api key specified. Please set the OPENAI_API_KEY to use OAI or use CORTEX_CONFIG_FILE environment variable to point at the Cortex configuration for your project.';
210
+ logger.error(errorString);
211
+ throw new Error(errorString);
185
212
  } else {
186
- console.log(`Using default model with OPENAI_API_KEY environment variable`)
213
+ logger.info(`Using default model with OPENAI_API_KEY environment variable`)
187
214
  }
188
215
  }
189
216
 
@@ -204,12 +231,12 @@ const buildPathways = async (config) => {
204
231
  const basePathway = await import(basePathwayURL).then(module => module.default);
205
232
 
206
233
  // Load core pathways, default from the Cortex package
207
- console.log('Loading core pathways from', corePathwaysPath)
234
+ logger.info(`Loading core pathways from ${corePathwaysPath}`)
208
235
  let loadedPathways = await import(`${corePathwaysURL}/index.js`).then(module => module);
209
236
 
210
237
  // Load custom pathways and override core pathways if same
211
238
  if (pathwaysPath && fs.existsSync(pathwaysPath)) {
212
- console.log('Loading custom pathways from', pathwaysPath)
239
+ logger.info(`Loading custom pathways from ${pathwaysPath}`)
213
240
  const customPathways = await import(`${pathwaysURL}/index.js`).then(module => module);
214
241
  loadedPathways = { ...loadedPathways, ...customPathways };
215
242
  }
@@ -244,12 +271,14 @@ const buildModels = (config) => {
244
271
 
245
272
  // Check that models are specified, Cortex cannot run without a model
246
273
  if (Object.keys(config.get('models')).length <= 0) {
247
- throw console.log('No models specified! Please set the models in your config file or via CORTEX_MODELS environment variable to point at the models for your project.');
274
+ const errorString = 'No models specified! Please set the models in your config file or via CORTEX_MODELS environment variable to point at the models for your project.';
275
+ logger.error(errorString);
276
+ throw new Error(errorString);
248
277
  }
249
278
 
250
279
  // Set default model name to the first model in the config in case no default is specified
251
280
  if (!config.get('defaultModelName')) {
252
- console.log('No default model specified, using first model as default.');
281
+ logger.warn('No default model specified, using first model as default.');
253
282
  config.load({ defaultModelName: Object.keys(config.get('models'))[0] });
254
283
  }
255
284
 
@@ -145,6 +145,32 @@ async function uploadBlob(context, req, saveToLocal = false) {
145
145
  });
146
146
  }
147
147
 
148
+ // Function to delete files that haven't been used in more than a month
149
+ async function cleanup() {
150
+ const { containerClient } = getBlobClient();
151
+
152
+ // List all the blobs in the container
153
+ const blobs = containerClient.listBlobsFlat();
154
+
155
+ // Calculate the date that is x month ago
156
+ const xMonthAgo = new Date();
157
+ xMonthAgo.setMonth(xMonthAgo.getMonth() - 1);
158
+
159
+ // Iterate through the blobs
160
+ for await (const blob of blobs) {
161
+ // Get the last modified date of the blob
162
+ const lastModified = blob.properties.lastModified;
163
+
164
+ // Compare the last modified date with one month ago
165
+ if (lastModified < xMonthAgo) {
166
+ // Delete the blob
167
+ const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
168
+ await blockBlobClient.delete();
169
+ console.log(`Cleaned blob: ${blob.name}`);
170
+ }
171
+ }
172
+ }
173
+
148
174
  export {
149
- saveFileToBlob, deleteBlob, uploadBlob
175
+ saveFileToBlob, deleteBlob, uploadBlob, cleanup
150
176
  }
@@ -3,7 +3,6 @@ import path from 'path';
3
3
  import ffmpeg from 'fluent-ffmpeg';
4
4
  import { v4 as uuidv4 } from 'uuid';
5
5
  import os from 'os';
6
- import ytdl from 'ytdl-core';
7
6
  import { promisify } from 'util';
8
7
  import axios from 'axios';
9
8
  import { ensureEncoded } from './helper.js';
@@ -1,8 +1,8 @@
1
1
  import { downloadFile, processYoutubeUrl, splitMediaFile } from './fileChunker.js';
2
- import { saveFileToBlob, deleteBlob, uploadBlob } from './blobHandler.js';
2
+ import { saveFileToBlob, deleteBlob, uploadBlob, cleanup } from './blobHandler.js';
3
3
  import { publishRequestProgress } from './redis.js';
4
4
  import { deleteTempPath, ensureEncoded, isValidYoutubeUrl } from './helper.js';
5
- import { moveFileToPublicFolder, deleteFolder } from './localFileHandler.js';
5
+ import { moveFileToPublicFolder, deleteFolder, cleanupLocal } from './localFileHandler.js';
6
6
  import { documentToText, easyChunker } from './docHelper.js';
7
7
  import path from 'path';
8
8
  import os from 'os';
@@ -15,9 +15,29 @@ const useAzure = process.env.AZURE_STORAGE_CONNECTION_STRING ? true : false;
15
15
  console.log(useAzure ? 'Using Azure Storage' : 'Using local file system');
16
16
 
17
17
 
18
+ let isCleanupRunning = false;
19
+ async function cleanupInactive(useAzure) {
20
+ try {
21
+ if (isCleanupRunning) { return; } //no need to cleanup every call
22
+ isCleanupRunning = true;
23
+ if (useAzure) {
24
+ await cleanup();
25
+ } else {
26
+ await cleanupLocal();
27
+ }
28
+ } catch (error) {
29
+ console.log('Error occurred during cleanup:', error);
30
+ } finally{
31
+ isCleanupRunning = false;
32
+ }
33
+ }
34
+
35
+
18
36
  async function main(context, req) {
19
37
  context.log('Starting req processing..');
20
38
 
39
+ cleanupInactive(useAzure); //trigger & no need to wait for it
40
+
21
41
  // Clean up blob when request delete which means processing marked completed
22
42
  if (req.method.toLowerCase() === `delete`) {
23
43
  const { requestId } = req.query;
@@ -4,7 +4,6 @@ import { v4 as uuidv4 } from 'uuid';
4
4
 
5
5
  import { publicFolder, port, ipAddress } from "./start.js";
6
6
 
7
-
8
7
  async function moveFileToPublicFolder(chunkPath, requestId) {
9
8
  // Use the filename with a UUID as the blob name
10
9
  const filename = `${requestId}/${uuidv4()}_${basename(chunkPath)}`;
@@ -30,7 +29,44 @@ async function deleteFolder(requestId) {
30
29
  console.log(`Cleaned folder: ${targetFolder}`);
31
30
  }
32
31
 
32
+ async function cleanupLocal() {
33
+ try {
34
+ // Read the directory
35
+ const items = await fs.readdir(publicFolder);
36
+
37
+ // Calculate the date that is x months ago
38
+ const monthsAgo = new Date();
39
+ monthsAgo.setMonth(monthsAgo.getMonth() - 1);
40
+
41
+ // Iterate through the items
42
+ for (const item of items) {
43
+ const itemPath = join(publicFolder, item);
44
+
45
+ // Get the stats of the item
46
+ const stats = await fs.stat(itemPath);
47
+
48
+ // Check if the item is a file or a directory
49
+ const isDirectory = stats.isDirectory();
50
+
51
+ // Compare the last modified date with three months ago
52
+ if (stats.mtime < monthsAgo) {
53
+ if (isDirectory) {
54
+ // If it's a directory, delete it recursively
55
+ await fs.rm(itemPath, { recursive: true });
56
+ console.log(`Cleaned directory: ${item}`);
57
+ } else {
58
+ // If it's a file, delete it
59
+ await fs.unlink(itemPath);
60
+ console.log(`Cleaned file: ${item}`);
61
+ }
62
+ }
63
+ }
64
+ } catch (error) {
65
+ console.error(`Error cleaning up files: ${error}`);
66
+ }
67
+ }
68
+
33
69
 
34
70
  export {
35
- moveFileToPublicFolder, deleteFolder
71
+ moveFileToPublicFolder, deleteFolder, cleanupLocal
36
72
  };
@@ -21,8 +21,7 @@
21
21
  "pdfjs-dist": "^3.9.179",
22
22
  "public-ip": "^6.0.1",
23
23
  "uuid": "^9.0.0",
24
- "xlsx": "^0.18.5",
25
- "ytdl-core": "git+ssh://git@github.com:khlevon/node-ytdl-core.git#v4.11.4-patch.2"
24
+ "xlsx": "^0.18.5"
26
25
  }
27
26
  },
28
27
  "node_modules/@azure/abort-controller": {
@@ -2545,19 +2544,6 @@
2545
2544
  "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
2546
2545
  "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
2547
2546
  "optional": true
2548
- },
2549
- "node_modules/ytdl-core": {
2550
- "version": "0.0.0-development",
2551
- "resolved": "git+ssh://git@github.com/khlevon/node-ytdl-core.git#87450450caabb91f81afa6e66758bf2f629664a1",
2552
- "license": "MIT",
2553
- "dependencies": {
2554
- "m3u8stream": "^0.8.6",
2555
- "miniget": "^4.2.2",
2556
- "sax": "^1.1.3"
2557
- },
2558
- "engines": {
2559
- "node": ">=12"
2560
- }
2561
2547
  }
2562
2548
  },
2563
2549
  "dependencies": {
@@ -4452,15 +4438,6 @@
4452
4438
  "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
4453
4439
  "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
4454
4440
  "optional": true
4455
- },
4456
- "ytdl-core": {
4457
- "version": "git+ssh://git@github.com/khlevon/node-ytdl-core.git#87450450caabb91f81afa6e66758bf2f629664a1",
4458
- "from": "ytdl-core@git+ssh://git@github.com:khlevon/node-ytdl-core.git#v4.11.4-patch.2",
4459
- "requires": {
4460
- "m3u8stream": "^0.8.6",
4461
- "miniget": "^4.2.2",
4462
- "sax": "^1.1.3"
4463
- }
4464
4441
  }
4465
4442
  }
4466
4443
  }
@@ -2,10 +2,7 @@ import Keyv from 'keyv';
2
2
  import { config } from '../config.js';
3
3
 
4
4
  const storageConnectionString = config.get('storageConnectionString');
5
-
6
- if (!config.get('storageConnectionString')) {
7
- console.log('No storageConnectionString specified. Please set the storageConnectionString or STORAGE_CONNECTION_STRING environment variable if you need caching or stored context.')
8
- }
5
+ const cortexId = config.get('cortexId');
9
6
 
10
7
  // Create a keyv client to store data
11
8
  const keyValueStorageClient = new Keyv(storageConnectionString, {
@@ -13,7 +10,7 @@ const keyValueStorageClient = new Keyv(storageConnectionString, {
13
10
  abortConnect: false,
14
11
  serialize: JSON.stringify,
15
12
  deserialize: JSON.parse,
16
- namespace: 'cortex-context'
13
+ namespace: `${cortexId}-cortex-context`
17
14
  });
18
15
 
19
16
  // Set values to keyv
package/lib/logger.js ADDED
@@ -0,0 +1,29 @@
1
+ // logger.js
2
+ import winston from 'winston';
3
+
4
+ const format = winston.format.combine(
5
+ //winston.format.timestamp(),
6
+ winston.format.colorize({ all: true }),
7
+ winston.format.simple()
8
+ );
9
+
10
+ const transports = [
11
+ new winston.transports.Console({ format })
12
+ ];
13
+
14
+ const logger = winston.createLogger({
15
+ level: process.env.NODE_ENV === 'production' ? 'info' : 'debug',
16
+ format: format,
17
+ transports: transports
18
+ });
19
+
20
+ winston.addColors({
21
+ debug: 'green',
22
+ verbose: 'blue',
23
+ http: 'gray',
24
+ info: 'cyan',
25
+ warn: 'yellow',
26
+ error: 'red'
27
+ });
28
+
29
+ export default logger;
@@ -1,51 +1,135 @@
1
1
  import Redis from 'ioredis';
2
2
  import { config } from '../config.js';
3
3
  import pubsub from '../server/pubsub.js';
4
+ import { requestState } from '../server/requestState.js';
5
+ import logger from '../lib/logger.js';
4
6
 
5
7
  const connectionString = config.get('storageConnectionString');
6
- const client = new Redis(connectionString);
8
+ const channels = ['requestProgress', 'requestProgressSubscriptions'];
9
+ let client;
7
10
 
8
- const channel = 'requestProgress';
11
+ if (connectionString) {
12
+ logger.info(`Using Redis subscription for channel(s) ${channels.join(', ')}`);
13
+ try {
14
+ client = connectionString && new Redis(connectionString);
15
+ } catch (error) {
16
+ logger.error(`Redis connection error: ${JSON.stringify(error)}`);
17
+ }
9
18
 
10
- client.on('error', (error) => {
11
- console.error(`Redis client error: ${error}`);
12
- });
19
+ if (client) {
13
20
 
14
- client.on('connect', () => {
15
- client.subscribe(channel, (error) => {
16
- if (error) {
17
- console.error(`Error subscribing to channel ${channel}: ${error}`);
18
- } else {
19
- console.log(`Subscribed to channel ${channel}`);
20
- }
21
- });
22
- });
21
+ client.on('error', (error) => {
22
+ logger.error(`Redis client error: ${JSON.stringify(error)}`);
23
+ });
24
+
25
+ client.on('connect', () => {
26
+ client.subscribe('requestProgress', (error) => {
27
+ if (error) {
28
+ logger.error(`Error subscribing to redis channel requestProgress: ${JSON.stringify(error)}`);
29
+ } else {
30
+ logger.info(`Subscribed to channel requestProgress`);
31
+ }
32
+ });
33
+ client.subscribe('requestProgressSubscriptions', (error) => {
34
+ if (error) {
35
+ logger.error(`Error subscribing to redis channel requestProgressSubscriptions: ${JSON.stringify(error)}`);
36
+ } else {
37
+ logger.info(`Subscribed to channel requestProgressSubscriptions`);
38
+ }
39
+ });
40
+ });
41
+
42
+ client.on('message', (channel, message) => {
43
+ if (channel === 'requestProgress') {
44
+ logger.debug(`Received message from ${channel}: ${message}`);
45
+ let parsedMessage;
46
+
47
+ try {
48
+ parsedMessage = JSON.parse(message);
49
+ } catch (error) {
50
+ parsedMessage = message;
51
+ }
52
+
53
+ pubsubHandleMessage(parsedMessage);
54
+ } else {
55
+ if (channel === 'requestProgressSubscriptions') {
56
+ logger.debug(`Received message from ${channel}: ${message}`);
57
+ let parsedMessage;
58
+
59
+ try {
60
+ parsedMessage = JSON.parse(message);
61
+ } catch (error) {
62
+ parsedMessage = message;
63
+ }
64
+
65
+ handleSubscription(parsedMessage);
66
+ }
67
+ }
68
+ });
69
+ }
70
+ }
23
71
 
24
- client.on('message', (channel, message) => {
25
- if (channel === 'requestProgress') {
26
- console.log(`Received message from ${channel}: ${message}`);
27
- let parsedMessage;
28
72
 
73
+ let publisherClient;
74
+
75
+ if (connectionString) {
76
+ logger.info(`Using Redis publish for channel(s) ${channels.join(', ')}`);
77
+ publisherClient = Redis.createClient(connectionString);
78
+ } else {
79
+ logger.info(`Using pubsub publish for channel ${channels[0]}`);
80
+ }
81
+
82
+ async function publishRequestProgress(data) {
83
+ if (publisherClient) {
29
84
  try {
30
- parsedMessage = JSON.parse(message);
85
+ const message = JSON.stringify(data);
86
+ logger.debug(`Publishing message ${message} to channel ${channels[0]}`);
87
+ await publisherClient.publish(channels[0], message);
31
88
  } catch (error) {
32
- parsedMessage = message;
89
+ logger.error(`Error publishing message: ${JSON.stringify(error)}`);
33
90
  }
91
+ } else {
92
+ pubsubHandleMessage(data);
93
+ }
94
+ }
34
95
 
35
- handleMessage(parsedMessage);
96
+ async function publishRequestProgressSubscription(data) {
97
+ if (publisherClient) {
98
+ try {
99
+ const message = JSON.stringify(data);
100
+ logger.debug(`Publishing message ${message} to channel ${channels[1]}`);
101
+ await publisherClient.publish(channels[1], message);
102
+ } catch (error) {
103
+ logger.error(`Error publishing message: ${JSON.stringify(error)}`);
104
+ }
105
+ } else {
106
+ handleSubscription(data);
36
107
  }
37
- });
108
+ }
38
109
 
39
- const handleMessage = (data) => {
40
- // Process the received data
41
- console.log('Processing data:', data);
110
+ function pubsubHandleMessage(data){
111
+ const message = JSON.stringify(data);
112
+ logger.debug(`Publishing message to pubsub: ${message}`);
42
113
  try {
43
114
  pubsub.publish('REQUEST_PROGRESS', { requestProgress: data });
44
115
  } catch (error) {
45
- console.error(`Error publishing data to pubsub: ${error}`);
116
+ logger.error(`Error publishing data to pubsub: ${JSON.stringify(error)}`);
46
117
  }
47
- };
118
+ }
119
+
120
+ function handleSubscription(data){
121
+ const requestIds = data;
122
+ for (const requestId of requestIds) {
123
+ if (requestState[requestId] && !requestState[requestId].started) {
124
+ requestState[requestId].started = true;
125
+ logger.info(`Subscription starting async requestProgress, requestId: ${requestId}`);
126
+ const { resolver, args } = requestState[requestId];
127
+ resolver(args);
128
+ }
129
+ }
130
+ }
131
+
48
132
 
49
133
  export {
50
- client as subscriptionClient,
134
+ client as subscriptionClient, publishRequestProgress, publishRequestProgressSubscription
51
135
  };