@aj-archipelago/cortex 1.1.4-0 → 1.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -422,7 +422,7 @@ Configuration of Cortex is done via a [convict](https://github.com/mozilla/node-
422
422
  - `enableCache`: A boolean flag indicating whether to enable Axios-level request caching. Default is true. The value can be set using the `CORTEX_ENABLE_CACHE` environment variable.
423
423
  - `enableGraphqlCache`: A boolean flag indicating whether to enable GraphQL query caching. Default is false. The value can be set using the `CORTEX_ENABLE_GRAPHQL_CACHE` environment variable.
424
424
  - `enableRestEndpoints`: A boolean flag indicating whether create REST endpoints for pathways as well as GraphQL queries. Default is false. The value can be set using the `CORTEX_ENABLE_REST` environment variable.
425
- - `cortexApiKey`: A string containing an API key that the client must pass to Cortex for authorization. Default is null in which case Cortex is unprotected. The value can be set using the `CORTEX_API_KEY` environment variable
425
+ - `cortexApiKeys`: A string containing one or more comma separated API keys that the client must pass to Cortex for authorization. Default is null in which case Cortex is unprotected. The value can be set using the `CORTEX_API_KEY` environment variable
426
426
  - `models`: An object containing the different models used by the project. The value can be set using the `CORTEX_MODELS` environment variable. Cortex is model and vendor agnostic - you can use this config to set up models of any type from any vendor.
427
427
  - `openaiApiKey`: The API key used for accessing the OpenAI API. This is sensitive information and has no default value. The value can be set using the `OPENAI_API_KEY` environment variable.
428
428
  - `openaiApiUrl`: The URL used for accessing the OpenAI API. Default is https://api.openai.com/v1/completions. The value can be set using the `OPENAI_API_URL` environment variable.
package/config.js CHANGED
@@ -8,6 +8,18 @@ import logger from './lib/logger.js';
8
8
 
9
9
  const __dirname = path.dirname(fileURLToPath(import.meta.url));
10
10
 
11
+ convict.addFormat({
12
+ name: 'string-array',
13
+ validate: function(val) {
14
+ if (!Array.isArray(val)) {
15
+ throw new Error('must be of type Array');
16
+ }
17
+ },
18
+ coerce: function(val) {
19
+ return val.split(',');
20
+ },
21
+ });
22
+
11
23
  // Schema for config
12
24
  var config = convict({
13
25
  env: {
@@ -30,8 +42,8 @@ var config = convict({
30
42
  default: path.join(__dirname, 'pathways'),
31
43
  env: 'CORTEX_CORE_PATHWAYS_PATH'
32
44
  },
33
- cortexApiKey: {
34
- format: String,
45
+ cortexApiKeys: {
46
+ format: 'string-array',
35
47
  default: null,
36
48
  env: 'CORTEX_API_KEY',
37
49
  sensitive: true
@@ -264,7 +276,7 @@ const buildPathways = async (config) => {
264
276
 
265
277
  // Build and load models to config
266
278
  const buildModels = (config) => {
267
- let { models } = config.getProperties();
279
+ const { models } = config.getProperties();
268
280
 
269
281
  // iterate over each model
270
282
  for (let [key, model] of Object.entries(models)) {
@@ -113,7 +113,9 @@ async function splitMediaFile(inputPath, chunkDurationInSeconds = 600) {
113
113
 
114
114
  return { chunkPromises, uniqueOutputPath };
115
115
  } catch (err) {
116
- console.error('Error occurred during the splitting process:', err);
116
+ const msg = `Error processing media file, check if the file is a valid media file or is accessible`;
117
+ console.error(msg, err);
118
+ throw new Error(msg);
117
119
  }
118
120
  }
119
121
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aj-archipelago/cortex",
3
- "version": "1.1.4-0",
3
+ "version": "1.1.4",
4
4
  "description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
5
5
  "private": false,
6
6
  "repository": {
package/server/graphql.js CHANGED
@@ -97,7 +97,7 @@ const getResolvers = (config, pathways) => {
97
97
  // add shared state to contextValue
98
98
  contextValue.pathway = pathway;
99
99
  contextValue.config = config;
100
- return pathway.rootResolver(parent, args, contextValue, info);
100
+ return pathway.rootResolver(parent, args, contextValue, info);
101
101
  }
102
102
  }
103
103
 
@@ -176,8 +176,8 @@ const build = async (config) => {
176
176
  });
177
177
 
178
178
  // If CORTEX_API_KEY is set, we roll our own auth middleware - usually not used if you're being fronted by a proxy
179
- const cortexApiKey = config.get('cortexApiKey');
180
- if (cortexApiKey) {
179
+ const cortexApiKeys = config.get('cortexApiKeys');
180
+ if (cortexApiKeys && Array.isArray(cortexApiKeys)) {
181
181
  app.use((req, res, next) => {
182
182
  let providedApiKey = req.headers['cortex-api-key'] || req.query['cortex-api-key'];
183
183
  if (!providedApiKey) {
@@ -185,7 +185,7 @@ const build = async (config) => {
185
185
  providedApiKey = providedApiKey?.startsWith('Bearer ') ? providedApiKey.slice(7) : providedApiKey;
186
186
  }
187
187
 
188
- if (cortexApiKey && cortexApiKey !== providedApiKey) {
188
+ if (!cortexApiKeys.includes(providedApiKey)) {
189
189
  if (req.baseUrl === '/graphql' || req.headers['content-type'] === 'application/graphql') {
190
190
  res.status(401)
191
191
  .set('WWW-Authenticate', 'Cortex-Api-Key')
@@ -289,7 +289,7 @@ class OpenAIWhisperPlugin extends ModelPlugin {
289
289
  }
290
290
 
291
291
  } catch (error) {
292
- const errMsg = `Transcribe error: ${error?.message || error}`;
292
+ const errMsg = `Transcribe error: ${error?.response?.data || error?.message || error}`;
293
293
  logger.error(errMsg);
294
294
  return errMsg;
295
295
  }