@qelos/plugins-cli 0.0.19 → 0.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -47,7 +47,7 @@ qplay create my-app
47
47
 
48
48
  ### Pull
49
49
 
50
- Pull resources from your Qelos instance to your local filesystem. This allows you to work on components, plugins, integrations, and blueprints locally.
50
+ Pull resources from your Qelos instance to your local filesystem. This allows you to work on components, blueprints, configs, plugins, blocks, integrations, and connections locally.
51
51
 
52
52
  **Syntax:**
53
53
  ```bash
@@ -55,7 +55,7 @@ qelos pull <type> <path>
55
55
  ```
56
56
 
57
57
  **Arguments:**
58
- - `type` - Type of resource to pull (e.g., `components`, `plugins`, `integrations`, `blueprints`)
58
+ - `type` - Type of resource to pull (e.g., `components`, `plugins`, `integrations`, `connections`, `blueprints`)
59
59
  - `path` - Local directory path where resources will be saved
60
60
 
61
61
  **Example - Pull Components:**
@@ -82,7 +82,7 @@ All 5 components pulled to ./my-components
82
82
 
83
83
  ### Push
84
84
 
85
- Push local resources to your Qelos instance. This allows you to update or create components, plugins, integrations, and blueprints from your local filesystem.
85
+ Push local resources to your Qelos instance. This allows you to update or create components, blueprints, configs, plugins, blocks, integrations, and connections from your local filesystem.
86
86
 
87
87
  **Syntax:**
88
88
  ```bash
@@ -90,7 +90,7 @@ qelos push <type> <path>
90
90
  ```
91
91
 
92
92
  **Arguments:**
93
- - `type` - Type of resource to push (e.g., `components`, `plugins`, `integrations`, `blueprints`)
93
+ - `type` - Type of resource to push (e.g., `components`, `plugins`, `integrations`, `connections`, `blueprints`)
94
94
  - `path` - Local directory path containing the resources to push
95
95
 
96
96
  **Example - Push Components:**
@@ -129,6 +129,39 @@ qelos pull components ./local-components
129
129
  qelos push components ./local-components
130
130
  ```
131
131
 
132
+ ### Integrations
133
+
134
+ - Pulled integrations are stored as `.integration.json` files that exclude server-only fields such as `tenant`, `user`, `created`, `updated`, and `__v`.
135
+ - When pushing, the CLI automatically recalculates backend-only properties (like `kind`) so you do not need to keep them in local files.
136
+
137
+ ### Connections (Integration Sources)
138
+
139
+ Connections (integration sources) are now fully supported via `qelos pull connections <path>` and `qelos push connections <path>`.
140
+
141
+ - Each connection is stored as `<name>.connection.json` containing `name`, `kind`, `labels`, `metadata`, and an `authentication` placeholder:
142
+
143
+ ```json
144
+ {
145
+ "_id": "64f1...",
146
+ "name": "OpenAI",
147
+ "kind": "openai",
148
+ "labels": ["ai"],
149
+ "metadata": { "defaultModel": "gpt-4o" },
150
+ "authentication": {
151
+ "$var": "INTEGRATION_AUTH_OPENAI"
152
+ }
153
+ }
154
+ ```
155
+
156
+ - At push time, the CLI reads the referenced environment variable (e.g., `INTEGRATION_AUTH_OPENAI`) which must contain a JSON string with the real credentials:
157
+
158
+ ```bash
159
+ export INTEGRATION_AUTH_OPENAI='{"token":"sk-..."}'
160
+ ```
161
+
162
+ - If the env var is missing, the CLI skips updating the secure authentication payload and only syncs metadata.
163
+ - When new connections are created, the CLI persists the returned `_id` so future pushes update the same record.
164
+
132
165
  ## Help
133
166
 
134
167
  View all available commands and options:
package/commands/pull.mjs CHANGED
@@ -8,7 +8,7 @@ export default function pullCommand(program) {
8
8
  .positional('type', {
9
9
  describe: 'Type of the resource to pull. Can be components, blueprints, configurations, plugins, blocks, or all.',
10
10
  type: 'string',
11
- choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'all', '*'],
11
+ choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'integrations', 'connections', 'all', '*'],
12
12
  required: true
13
13
  })
14
14
  .positional('path', {
package/commands/push.mjs CHANGED
@@ -8,7 +8,7 @@ export default function createCommand(program) {
8
8
  .positional('type', {
9
9
  describe: 'Type of the resource to push. Can be components, blueprints, configurations, plugins, blocks, or all.',
10
10
  type: 'string',
11
- choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'all', '*'],
11
+ choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'integrations', 'connections', 'all', '*'],
12
12
  required: true
13
13
  })
14
14
  .positional('path', {
@@ -4,6 +4,8 @@ import { pullBlueprints } from '../services/blueprints.mjs';
4
4
  import { pullConfigurations } from '../services/configurations.mjs';
5
5
  import { pullPlugins } from '../services/plugins.mjs';
6
6
  import { pullBlocks } from '../services/blocks.mjs';
7
+ import { pullIntegrations } from '../services/integrations.mjs';
8
+ import { pullConnections } from '../services/connections.mjs';
7
9
  import { logger } from '../services/logger.mjs';
8
10
  import fs from 'node:fs';
9
11
  import path from 'node:path';
@@ -36,7 +38,9 @@ export default async function pullController({ type, path: targetPath = './' })
36
38
  { name: 'blueprints', fn: pullBlueprints },
37
39
  { name: 'configs', fn: pullConfigurations },
38
40
  { name: 'plugins', fn: pullPlugins },
39
- { name: 'blocks', fn: pullBlocks }
41
+ { name: 'blocks', fn: pullBlocks },
42
+ { name: 'integrations', fn: pullIntegrations },
43
+ { name: 'connections', fn: pullConnections }
40
44
  ];
41
45
 
42
46
  for (const { name, fn } of types) {
@@ -64,11 +68,15 @@ export default async function pullController({ type, path: targetPath = './' })
64
68
  await pullPlugins(sdk, targetPath);
65
69
  } else if (type === 'blocks') {
66
70
  await pullBlocks(sdk, targetPath);
71
+ } else if (type === 'integrations' || type === 'integration') {
72
+ await pullIntegrations(sdk, targetPath);
73
+ } else if (type === 'connections' || type === 'connection') {
74
+ await pullConnections(sdk, targetPath);
67
75
  } else if (type === 'config' || type === 'configs' || type === 'configuration') {
68
76
  await pullConfigurations(sdk, targetPath);
69
77
  } else {
70
78
  logger.error(`Unknown type: ${type}`);
71
- logger.info('Supported types: components, blueprints, plugins, blocks, config, configs, configuration, all');
79
+ logger.info('Supported types: components, blueprints, plugins, blocks, integrations, connections, config, configs, configuration, all');
72
80
  process.exit(1);
73
81
  }
74
82
 
@@ -4,6 +4,8 @@ import { pushBlueprints } from '../services/blueprints.mjs';
4
4
  import { pushConfigurations } from '../services/configurations.mjs';
5
5
  import { pushPlugins } from '../services/plugins.mjs';
6
6
  import { pushBlocks } from '../services/blocks.mjs';
7
+ import { pushIntegrations } from '../services/integrations.mjs';
8
+ import { pushConnections } from '../services/connections.mjs';
7
9
  import { logger } from '../services/logger.mjs';
8
10
  import fs from 'node:fs';
9
11
  import path from 'node:path';
@@ -45,7 +47,9 @@ export default async function pushController({ type, path: sourcePath }) {
45
47
  { name: 'blueprints', fn: pushBlueprints },
46
48
  { name: 'configs', fn: pushConfigurations },
47
49
  { name: 'plugins', fn: pushPlugins },
48
- { name: 'blocks', fn: pushBlocks }
50
+ { name: 'blocks', fn: pushBlocks },
51
+ { name: 'integrations', fn: pushIntegrations },
52
+ { name: 'connections', fn: pushConnections }
49
53
  ];
50
54
 
51
55
  for (const { name, fn } of types) {
@@ -80,11 +84,15 @@ export default async function pushController({ type, path: sourcePath }) {
80
84
  await pushPlugins(sdk, basePath, { targetFile });
81
85
  } else if (type === 'blocks') {
82
86
  await pushBlocks(sdk, basePath, { targetFile });
83
- } else if (type === 'config' || type === 'configs' || type === 'configuration') {
87
+ } else if (type === 'integrations' || type === 'integration') {
88
+ await pushIntegrations(sdk, basePath, { targetFile });
89
+ } else if (type === 'connections' || type === 'connection') {
90
+ await pushConnections(sdk, basePath, { targetFile });
91
+ } else if (type === 'config' || type === 'configs' || type === 'configuration') {
84
92
  await pushConfigurations(sdk, basePath, { targetFile });
85
93
  } else {
86
94
  logger.error(`Unknown type: ${type}`);
87
- logger.info('Supported types: components, blueprints, plugins, blocks, config, configs, configuration, all');
95
+ logger.info('Supported types: components, blueprints, plugins, blocks, integrations, connections, config, configs, configuration, all');
88
96
  process.exit(1);
89
97
  }
90
98
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@qelos/plugins-cli",
3
- "version": "0.0.19",
3
+ "version": "0.0.21",
4
4
  "description": "CLI to manage QELOS plugins",
5
5
  "main": "cli.mjs",
6
6
  "bin": {
@@ -68,12 +68,12 @@ function buildMarkdown({ blueprint, interfaceName, entityVarName, interfaceDefin
68
68
  '',
69
69
  '### Create an Entity',
70
70
  '```ts',
71
- `const created = await ${entityVarName}.create(${exampleLiteral});`,
71
+ `const created = await ${entityVarName}.create({\n metadata: ${indentLiteral(exampleLiteral, 2)},\n});`,
72
72
  '```',
73
73
  '',
74
74
  '### Update an Entity',
75
75
  '```ts',
76
- `const updated = await ${entityVarName}.update('replace-with-entity-id', {\n ...${exampleLiteral.replace(/\n/g, '\n ')},\n});`,
76
+ `const updated = await ${entityVarName}.update('replace-with-entity-id', {\n metadata: {\n ...oldMetadata,\n ...${indentLiteral(exampleLiteral, 4)},\n },\n});`,
77
77
  '```',
78
78
  '',
79
79
  '### Delete an Entity',
@@ -219,6 +219,11 @@ function stringifyObjectLiteral(value, level = 0) {
219
219
  return String(value);
220
220
  }
221
221
 
222
+ function indentLiteral(literal, spaces) {
223
+ const indent = ' '.repeat(spaces);
224
+ return literal.replace(/\n/g, `\n${indent}`);
225
+ }
226
+
222
227
  function toCamelCase(value) {
223
228
  return (value || '')
224
229
  .replace(/[-_\s]+(.)?/g, (_, chr) => (chr ? chr.toUpperCase() : ''))
@@ -50,22 +50,29 @@ export async function pushComponents(sdk, path, options = {}) {
50
50
  component => component.identifier === targetIdentifier || component.componentName === componentName
51
51
  );
52
52
 
53
- if (existingComponent) {
54
- await sdk.components.update(existingComponent._id, {
55
- identifier: targetIdentifier,
56
- componentName: componentName,
57
- content,
58
- description: info.description || existingComponent.description || 'Component description'
59
- });
60
- logger.success(`Updated: ${componentName}`);
61
- } else {
62
- await sdk.components.create({
63
- identifier: targetIdentifier,
64
- componentName: componentName,
65
- content,
66
- description: targetDescription
67
- });
68
- logger.success(`Created: ${componentName}`);
53
+ try {
54
+ if (existingComponent) {
55
+ await sdk.components.update(existingComponent._id, {
56
+ identifier: targetIdentifier,
57
+ componentName: componentName,
58
+ content,
59
+ description: info.description || existingComponent.description || 'Component description'
60
+ });
61
+ logger.success(`Updated: ${componentName}`);
62
+ } else {
63
+ await sdk.components.create({
64
+ identifier: targetIdentifier,
65
+ componentName: componentName,
66
+ content,
67
+ description: targetDescription
68
+ });
69
+ logger.success(`Created: ${componentName}`);
70
+ }
71
+ } catch (error) {
72
+ // Extract reason from error details if available
73
+ const reason = error.details?.reason || error.message;
74
+ logger.error(`Failed to push component: ${componentName} - ${reason}`);
75
+ throw error;
69
76
  }
70
77
  }
71
78
  }));
@@ -2,6 +2,7 @@ import fs from 'node:fs';
2
2
  import { join } from 'node:path';
3
3
  import { logger } from './logger.mjs';
4
4
  import { appUrl } from './sdk.mjs';
5
+ import { extractConfigContent, resolveReferences } from './file-refs.mjs';
5
6
 
6
7
  /**
7
8
  * Push configurations from local directory to remote
@@ -45,6 +46,9 @@ export async function pushConfigurations(sdk, path, options = {}) {
45
46
 
46
47
  logger.step(`Pushing configuration: ${key}`);
47
48
 
49
+ // Resolve any $ref references in the configuration
50
+ configData = await resolveReferences(configData, path);
51
+
48
52
  // Special handling for app-configuration: ensure QELOS_URL hostname is in websiteUrls
49
53
  if (key === 'app-configuration') {
50
54
  try {
@@ -160,7 +164,10 @@ export async function pullConfigurations(sdk, targetPath) {
160
164
  // Remove fields that shouldn't be in the file
161
165
  const { _id, tenant, created, updated, ...relevantFields } = fullConfig;
162
166
 
163
- fs.writeFileSync(filePath, JSON.stringify(relevantFields, null, 2), 'utf-8');
167
+ // Extract content to files for supported config types
168
+ const processedConfig = extractConfigContent(relevantFields, targetPath);
169
+
170
+ fs.writeFileSync(filePath, JSON.stringify(processedConfig, null, 2), 'utf-8');
164
171
  logger.step(`Pulled: ${config.key}`);
165
172
  }));
166
173
 
@@ -0,0 +1,239 @@
1
+ import fs from 'node:fs';
2
+ import path from 'node:path';
3
+ import { logger } from './logger.mjs';
4
+
5
+ const CONNECTION_FILE_EXTENSION = '.connection.json';
6
+ const AUTH_PLACEHOLDER_KEY = '$var';
7
+
8
+ function slugify(value = '') {
9
+ return value
10
+ .toString()
11
+ .trim()
12
+ .toLowerCase()
13
+ .replace(/[^a-z0-9]+/g, '-')
14
+ .replace(/^-+|-+$/g, '');
15
+ }
16
+
17
+ function ensureDirectory(dirPath) {
18
+ if (!fs.existsSync(dirPath)) {
19
+ fs.mkdirSync(dirPath, { recursive: true });
20
+ logger.info(`Created directory: ${dirPath}`);
21
+ }
22
+ }
23
+
24
+ function getDefaultAuthEnvVar(connection) {
25
+ if (connection?.authentication?.[AUTH_PLACEHOLDER_KEY]) {
26
+ return connection.authentication[AUTH_PLACEHOLDER_KEY];
27
+ }
28
+
29
+ if (connection?._id) {
30
+ return `INTEGRATION_AUTH_${connection._id}`;
31
+ }
32
+
33
+ const slug = slugify(connection?.name || 'connection').replace(/-/g, '_').toUpperCase();
34
+ const suffix = slug ? slug.slice(-6) : Math.random().toString(36).slice(-6).toUpperCase();
35
+ return `INTEGRATION_AUTH_${suffix || 'NEW'}`;
36
+ }
37
+
38
+ function buildFileName(connection, index, usedNames) {
39
+ const base =
40
+ slugify(connection?.name) ||
41
+ (connection?._id ? slugify(connection._id) : '') ||
42
+ `connection-${index + 1}`;
43
+
44
+ let fileName = base;
45
+ while (usedNames.has(fileName)) {
46
+ fileName = `${base}-${usedNames.size + 1}`;
47
+ }
48
+ usedNames.add(fileName);
49
+ return `${fileName}${CONNECTION_FILE_EXTENSION}`;
50
+ }
51
+
52
+ function sanitizeConnectionForFile(connection) {
53
+ return {
54
+ _id: connection._id,
55
+ name: connection.name,
56
+ kind: connection.kind,
57
+ labels: connection.labels || [],
58
+ metadata: connection.metadata || {},
59
+ authentication: {
60
+ [AUTH_PLACEHOLDER_KEY]: getDefaultAuthEnvVar(connection),
61
+ },
62
+ };
63
+ }
64
+
65
+ function writeConnectionFile(filePath, connection) {
66
+ fs.writeFileSync(filePath, JSON.stringify(connection, null, 2), 'utf-8');
67
+ }
68
+
69
+ function validateConnectionPayload(data, fileName) {
70
+ if (!data?.name) {
71
+ throw new Error(`Connection file ${fileName} must include a name`);
72
+ }
73
+
74
+ if (!data?.kind) {
75
+ throw new Error(`Connection file ${fileName} must include a kind`);
76
+ }
77
+
78
+ if (!data?.metadata || typeof data.metadata !== 'object') {
79
+ throw new Error(`Connection file ${fileName} must include metadata object`);
80
+ }
81
+
82
+ if (data.labels && !Array.isArray(data.labels)) {
83
+ throw new Error(`Connection file ${fileName} has invalid labels (must be an array)`);
84
+ }
85
+ }
86
+
87
+ function extractAuthenticationPayload(connectionFile, fileName) {
88
+ const authField = connectionFile.authentication;
89
+
90
+ if (!authField || typeof authField !== 'object') {
91
+ return { payload: undefined, envVar: null };
92
+ }
93
+
94
+ const envVarName = authField[AUTH_PLACEHOLDER_KEY];
95
+
96
+ if (!envVarName) {
97
+ throw new Error(
98
+ `Connection file ${fileName} must define authentication as { "${AUTH_PLACEHOLDER_KEY}": "INTEGRATION_AUTH_..." }`
99
+ );
100
+ }
101
+
102
+ const raw = process.env[envVarName];
103
+
104
+ if (!raw) {
105
+ if (!connectionFile._id) {
106
+ throw new Error(
107
+ `Environment variable ${envVarName} is required to create connection defined in ${fileName}`
108
+ );
109
+ }
110
+
111
+ logger.info(
112
+ `Skipping authentication update for ${connectionFile.name} (env ${envVarName} not set)`
113
+ );
114
+ return { payload: undefined, envVar: envVarName };
115
+ }
116
+
117
+ try {
118
+ const parsed = JSON.parse(raw);
119
+ if (typeof parsed !== 'object' || !parsed) {
120
+ throw new Error('Authentication env must contain a JSON object');
121
+ }
122
+ return { payload: parsed, envVar: envVarName };
123
+ } catch (error) {
124
+ throw new Error(
125
+ `Failed to parse JSON from env ${envVarName} for connection ${fileName}: ${error.message}`
126
+ );
127
+ }
128
+ }
129
+
130
+ export async function pullConnections(sdk, targetPath) {
131
+ ensureDirectory(targetPath);
132
+
133
+ let connections = [];
134
+ try {
135
+ connections = await sdk.integrationSources.getList();
136
+ } catch (error) {
137
+ logger.error('Failed to fetch integration sources', error);
138
+ throw error;
139
+ }
140
+
141
+ if (!Array.isArray(connections) || connections.length === 0) {
142
+ logger.warning('No connections found to pull');
143
+ return;
144
+ }
145
+
146
+ logger.info(`Found ${connections.length} connection(s) to pull`);
147
+ const usedNames = new Set();
148
+
149
+ connections.forEach((connection, index) => {
150
+ const fileName = buildFileName(connection, index, usedNames);
151
+ const filePath = path.join(targetPath, fileName);
152
+ writeConnectionFile(filePath, sanitizeConnectionForFile(connection));
153
+ logger.step(`Pulled connection: ${connection.name || connection._id}`);
154
+ });
155
+
156
+ logger.info(`Pulled ${connections.length} connection(s)`);
157
+ }
158
+
159
+ export async function pushConnections(sdk, sourcePath, options = {}) {
160
+ const { targetFile } = options;
161
+ const directoryFiles = fs.readdirSync(sourcePath);
162
+ const files = targetFile ? [targetFile] : directoryFiles;
163
+ const connectionFiles = files.filter((file) => file.endsWith(CONNECTION_FILE_EXTENSION));
164
+
165
+ if (connectionFiles.length === 0) {
166
+ if (targetFile) {
167
+ logger.warning(
168
+ `File ${targetFile} is not a ${CONNECTION_FILE_EXTENSION} connection file. Skipping.`
169
+ );
170
+ } else {
171
+ logger.warning(`No connection files (*${CONNECTION_FILE_EXTENSION}) found in ${sourcePath}`);
172
+ }
173
+ return;
174
+ }
175
+
176
+ logger.info(`Found ${connectionFiles.length} connection(s) to push`);
177
+
178
+ const results = [];
179
+
180
+ for (const file of connectionFiles) {
181
+ const filePath = path.join(sourcePath, file);
182
+
183
+ try {
184
+ const connectionData = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
185
+ validateConnectionPayload(connectionData, file);
186
+
187
+ const { payload: authentication, envVar } = extractAuthenticationPayload(connectionData, file);
188
+
189
+ const requestBody = {
190
+ name: connectionData.name,
191
+ kind: connectionData.kind,
192
+ labels: connectionData.labels || [],
193
+ metadata: connectionData.metadata || {},
194
+ ...(authentication ? { authentication } : {}),
195
+ };
196
+
197
+ logger.step(`Pushing connection: ${connectionData.name}`);
198
+ let response;
199
+
200
+ if (connectionData._id) {
201
+ response = await sdk.integrationSources.update(connectionData._id, requestBody);
202
+ logger.success(`Updated connection: ${connectionData.name}`);
203
+ } else {
204
+ response = await sdk.integrationSources.create(requestBody);
205
+ logger.success(`Created connection: ${connectionData.name}`);
206
+ }
207
+
208
+ const authPlaceholder = connectionData.authentication?.[AUTH_PLACEHOLDER_KEY]
209
+ ? connectionData.authentication
210
+ : { [AUTH_PLACEHOLDER_KEY]: envVar || getDefaultAuthEnvVar(response) };
211
+
212
+ const fileContent = {
213
+ _id: response._id,
214
+ name: response.name,
215
+ kind: response.kind,
216
+ labels: response.labels || [],
217
+ metadata: response.metadata || {},
218
+ authentication: authPlaceholder,
219
+ };
220
+
221
+ writeConnectionFile(filePath, fileContent);
222
+ results.push({ status: 'fulfilled' });
223
+ } catch (error) {
224
+ logger.error(`Failed to push connection file ${file}`, error);
225
+ results.push({ status: 'rejected', reason: error });
226
+ }
227
+ }
228
+
229
+ const failures = results.filter((result) => result.status === 'rejected');
230
+ if (failures.length) {
231
+ logger.error(`\n${failures.length} connection(s) failed to push:`);
232
+ failures.forEach((failure) => {
233
+ logger.error(` • ${failure.reason?.message || 'Unknown error'}`);
234
+ });
235
+ throw new Error(`Failed to push ${failures.length} connection(s)`);
236
+ }
237
+
238
+ logger.info(`Pushed ${connectionFiles.length} connection(s)`);
239
+ }
@@ -0,0 +1,256 @@
1
+ import fs from 'node:fs';
2
+ import path from 'node:path';
3
+ import { logger } from './logger.mjs';
4
+
5
+ /**
6
+ * Configuration mapping for which fields should be extracted to files for each config type
7
+ */
8
+ const CONFIG_EXTRACTION_MAP = {
9
+ 'ssr-scripts': {
10
+ fields: ['head', 'body'],
11
+ fileExtension: '.html',
12
+ subdirectory: 'html'
13
+ },
14
+ 'users-header': {
15
+ fields: ['html'],
16
+ fileExtension: '.html',
17
+ subdirectory: 'html'
18
+ }
19
+ };
20
+
21
+ /**
22
+ * Check if an integration is an AI agent that should have its pre_messages extracted
23
+ * @param {Object} integration - Integration object
24
+ * @returns {boolean} True if this is an AI agent with chatCompletion
25
+ */
26
+ function isAiAgent(integration) {
27
+ return (
28
+ integration &&
29
+ Array.isArray(integration.kind) &&
30
+ integration.kind.includes('qelos') &&
31
+ integration.trigger?.operation === 'chatCompletion' &&
32
+ integration.target?.operation === 'chatCompletion' &&
33
+ integration.target?.details?.pre_messages?.length > 0
34
+ );
35
+ }
36
+
37
+ /**
38
+ * Convert a string to kebab-case for filenames
39
+ * @param {string} str - String to convert
40
+ * @returns {string} Kebab-cased string
41
+ */
42
+ function toKebabCase(str) {
43
+ return str
44
+ .replace(/([a-z])([A-Z])/g, '$1-$2')
45
+ .replace(/[\s_]+/g, '-')
46
+ .toLowerCase();
47
+ }
48
+
49
+ /**
50
+ * Load content from a $ref reference
51
+ * @param {string} ref - Reference path (relative, absolute, or URL)
52
+ * @param {string} basePath - Base path for resolving relative references
53
+ * @returns {Promise<string>} Loaded content
54
+ */
55
+ export async function loadReference(ref, basePath) {
56
+ // Handle HTTP/HTTPS URLs
57
+ if (ref.startsWith('http://') || ref.startsWith('https://')) {
58
+ logger.debug(`Loading reference from URL: ${ref}`);
59
+ try {
60
+ const response = await fetch(ref);
61
+ if (!response.ok) {
62
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
63
+ }
64
+ return await response.text();
65
+ } catch (error) {
66
+ throw new Error(`Failed to load from URL ${ref}: ${error.message}`);
67
+ }
68
+ }
69
+
70
+ // Handle absolute and relative paths
71
+ const filePath = path.isAbsolute(ref)
72
+ ? ref
73
+ : path.resolve(basePath, ref);
74
+
75
+ logger.debug(`Loading reference from file: ${filePath}`);
76
+
77
+ if (!fs.existsSync(filePath)) {
78
+ throw new Error(`Referenced file does not exist: ${filePath}`);
79
+ }
80
+
81
+ return fs.readFileSync(filePath, 'utf-8');
82
+ }
83
+
84
+ /**
85
+ * Recursively resolve all $ref references in an object
86
+ * @param {any} obj - Object to resolve references in
87
+ * @param {string} basePath - Base path for resolving relative references
88
+ * @returns {Promise<any>} Object with all references resolved
89
+ */
90
+ export async function resolveReferences(obj, basePath) {
91
+ if (Array.isArray(obj)) {
92
+ return await Promise.all(obj.map(item => resolveReferences(item, basePath)));
93
+ }
94
+
95
+ if (obj && typeof obj === 'object') {
96
+ // Check if this is a $ref object
97
+ if (obj.$ref && typeof obj.$ref === 'string') {
98
+ return await loadReference(obj.$ref, basePath);
99
+ }
100
+
101
+ // Otherwise, recursively resolve all properties
102
+ const resolved = {};
103
+ for (const [key, value] of Object.entries(obj)) {
104
+ resolved[key] = await resolveReferences(value, basePath);
105
+ }
106
+ return resolved;
107
+ }
108
+
109
+ // Primitive value, return as-is
110
+ return obj;
111
+ }
112
+
113
+ /**
114
+ * Extract content from integration pre_messages to separate files
115
+ * @param {Object} integration - Integration object
116
+ * @param {string} integrationPath - Path where integration files are stored
117
+ * @param {string} fileName - Name of the integration file (without extension)
118
+ * @returns {Object} Updated integration with $ref objects
119
+ */
120
+ export function extractIntegrationContent(integration, integrationPath, fileName) {
121
+ // Check if this is an AI agent
122
+ if (!isAiAgent(integration)) {
123
+ return integration;
124
+ }
125
+
126
+ const updatedIntegration = JSON.parse(JSON.stringify(integration)); // Deep clone
127
+
128
+ // Create prompts subdirectory if needed
129
+ const extractDir = path.join(integrationPath, 'prompts');
130
+ if (!fs.existsSync(extractDir)) {
131
+ fs.mkdirSync(extractDir, { recursive: true });
132
+ logger.debug(`Created directory: ${extractDir}`);
133
+ }
134
+
135
+ // Extract the first pre_message content
136
+ const preMessage = updatedIntegration.target.details.pre_messages[0];
137
+ if (preMessage && preMessage.content && typeof preMessage.content === 'string') {
138
+ const content = preMessage.content;
139
+
140
+ // Skip if content is empty or just whitespace
141
+ if (!content.trim()) {
142
+ return updatedIntegration;
143
+ }
144
+
145
+ // Generate filename
146
+ const baseName = path.basename(fileName, '.integration.json');
147
+ const mdFileName = `${baseName}.md`;
148
+ const mdFilePath = path.join(extractDir, mdFileName);
149
+ const relativeRef = `./prompts/${mdFileName}`;
150
+
151
+ // Write content to file
152
+ fs.writeFileSync(mdFilePath, content, 'utf-8');
153
+ logger.debug(`Extracted pre_message content to: ${relativeRef}`);
154
+
155
+ // Replace with $ref
156
+ updatedIntegration.target.details.pre_messages[0].content = { $ref: relativeRef };
157
+ }
158
+
159
+ return updatedIntegration;
160
+ }
161
+
162
+ /**
163
+ * Extract all integrations that have content to be externalized
164
+ * @param {Array} integrations - Array of integration objects
165
+ * @param {string} integrationPath - Path where integration files are stored
166
+ * @returns {Array} Updated integrations with $ref objects
167
+ */
168
+ export function extractAllIntegrationContent(integrations, integrationPath) {
169
+ return integrations.map((integration, index) => {
170
+ // Generate a filename for this integration
171
+ const displayName = integration?.trigger?.details?.name ||
172
+ integration?.target?.details?.name ||
173
+ integration?._id ||
174
+ `integration-${index + 1}`;
175
+ const fileName = `${toKebabCase(displayName)}.integration.json`;
176
+
177
+ return extractIntegrationContent(integration, integrationPath, fileName);
178
+ });
179
+ }
180
+ export function extractConfigContent(config, configPath) {
181
+ const extractionConfig = CONFIG_EXTRACTION_MAP[config.key];
182
+
183
+ if (!extractionConfig || !config.metadata) {
184
+ return config;
185
+ }
186
+
187
+ const updatedConfig = JSON.parse(JSON.stringify(config)); // Deep clone
188
+ const { fields, fileExtension, subdirectory } = extractionConfig;
189
+
190
+ // Create subdirectory if needed
191
+ const extractDir = path.join(configPath, subdirectory);
192
+ if (!fs.existsSync(extractDir)) {
193
+ fs.mkdirSync(extractDir, { recursive: true });
194
+ logger.debug(`Created directory: ${extractDir}`);
195
+ }
196
+
197
+ // Extract each field
198
+ for (const field of fields) {
199
+ if (updatedConfig.metadata[field] && typeof updatedConfig.metadata[field] === 'string') {
200
+ const content = updatedConfig.metadata[field];
201
+
202
+ // Skip if content is empty or just whitespace
203
+ if (!content.trim()) {
204
+ continue;
205
+ }
206
+
207
+ // Generate filename - use config key if only one field, otherwise include field name
208
+ let fileName;
209
+ if (fields.length === 1) {
210
+ fileName = `${config.key}${fileExtension}`;
211
+ } else {
212
+ fileName = `${config.key}-${field}${fileExtension}`;
213
+ }
214
+ const filePath = path.join(extractDir, fileName);
215
+ const relativeRef = `./${subdirectory}/${fileName}`;
216
+
217
+ // Write content to file
218
+ fs.writeFileSync(filePath, content, 'utf-8');
219
+ logger.debug(`Extracted ${field} to: ${relativeRef}`);
220
+
221
+ // Replace with $ref
222
+ updatedConfig.metadata[field] = { $ref: relativeRef };
223
+ }
224
+ }
225
+
226
+ return updatedConfig;
227
+ }
228
+
229
+ /**
230
+ * Extract all configurations that have content to be externalized
231
+ * @param {Array} configs - Array of configuration objects
232
+ * @param {string} configPath - Path where config files are stored
233
+ * @returns {Array} Updated configurations with $ref objects
234
+ */
235
+ export function extractAllConfigContent(configs, configPath) {
236
+ return configs.map(config => extractConfigContent(config, configPath));
237
+ }
238
+
239
+ /**
240
+ * Recursively find all $ref objects in an object
241
+ * @param {any} obj - Object to search
242
+ * @param {Array} refs - Array to collect found references (used internally)
243
+ * @returns {Array} Array of found $ref paths
244
+ */
245
+ export function findAllRefs(obj, refs = []) {
246
+ if (Array.isArray(obj)) {
247
+ obj.forEach(item => findAllRefs(item, refs));
248
+ } else if (obj && typeof obj === 'object') {
249
+ if (obj.$ref && typeof obj.$ref === 'string') {
250
+ refs.push(obj.$ref);
251
+ } else {
252
+ Object.values(obj).forEach(value => findAllRefs(value, refs));
253
+ }
254
+ }
255
+ return refs;
256
+ }
@@ -0,0 +1,199 @@
1
+ import fs from 'node:fs';
2
+ import { join } from 'node:path';
3
+ import { logger } from './logger.mjs';
4
+ import { extractIntegrationContent, resolveReferences } from './file-refs.mjs';
5
+
6
+ const INTEGRATION_FILE_EXTENSION = '.integration.json';
7
+ const INTEGRATIONS_API_PATH = '/api/integrations';
8
+ const SERVER_ONLY_FIELDS = ['tenant', 'plugin', 'user', 'created', 'updated', '__v'];
9
+
10
+ function slugify(value = '') {
11
+ return value
12
+ .toString()
13
+ .trim()
14
+ .toLowerCase()
15
+ .replace(/[^a-z0-9]+/g, '-')
16
+ .replace(/^-+|-+$/g, '');
17
+ }
18
+
19
+ function getIntegrationDisplayName(integration) {
20
+ return (
21
+ integration?.trigger?.details?.name ||
22
+ integration?.target?.details?.name ||
23
+ integration?._id ||
24
+ ''
25
+ );
26
+ }
27
+
28
+ function buildFileName(integration, index, usedNames) {
29
+ const preferred =
30
+ slugify(getIntegrationDisplayName(integration)) ||
31
+ slugify(integration?._id) ||
32
+ '';
33
+
34
+ let baseName = preferred || `integration-${index + 1}`;
35
+
36
+ while (usedNames.has(baseName)) {
37
+ const suffix = integration?._id ? integration._id.slice(-4) : `${usedNames.size + 1}`;
38
+ baseName = `${baseName}-${suffix}`;
39
+ }
40
+
41
+ usedNames.add(baseName);
42
+ return `${baseName}${INTEGRATION_FILE_EXTENSION}`;
43
+ }
44
+
45
+ function validateIntegrationPayload(data, file) {
46
+ if (!data?.trigger || !data?.target) {
47
+ throw new Error(`Integration file ${file} must include trigger and target`);
48
+ }
49
+
50
+ if (!data.trigger.source || !data.trigger.operation) {
51
+ throw new Error(`Integration ${file}: trigger must include source and operation`);
52
+ }
53
+
54
+ if (!data.target.source || !data.target.operation) {
55
+ throw new Error(`Integration ${file}: target must include source and operation`);
56
+ }
57
+ }
58
+
59
+ function toRequestPayload(data) {
60
+ return {
61
+ trigger: data.trigger,
62
+ target: data.target,
63
+ dataManipulation: data.dataManipulation || [],
64
+ active: data.active ?? false,
65
+ };
66
+ }
67
+
68
+ function writeIntegrationFile(filePath, integration) {
69
+ fs.writeFileSync(filePath, JSON.stringify(integration, null, 2), 'utf-8');
70
+ }
71
+
72
+ function sanitizeIntegrationForFile(integration) {
73
+ const sanitized = JSON.parse(JSON.stringify(integration));
74
+ SERVER_ONLY_FIELDS.forEach((field) => {
75
+ if (field in sanitized) {
76
+ delete sanitized[field];
77
+ }
78
+ });
79
+ return sanitized;
80
+ }
81
+
82
+ async function fetchIntegrations(sdk) {
83
+ return sdk.callJsonApi(INTEGRATIONS_API_PATH);
84
+ }
85
+
86
+ async function createIntegration(sdk, payload) {
87
+ return sdk.callJsonApi(INTEGRATIONS_API_PATH, {
88
+ method: 'post',
89
+ headers: { 'content-type': 'application/json' },
90
+ body: JSON.stringify(payload),
91
+ });
92
+ }
93
+
94
+ async function updateIntegration(sdk, id, payload) {
95
+ return sdk.callJsonApi(`${INTEGRATIONS_API_PATH}/${id}`, {
96
+ method: 'put',
97
+ headers: { 'content-type': 'application/json' },
98
+ body: JSON.stringify(payload),
99
+ });
100
+ }
101
+
102
+ export async function pullIntegrations(sdk, targetPath) {
103
+ if (!fs.existsSync(targetPath)) {
104
+ fs.mkdirSync(targetPath, { recursive: true });
105
+ logger.info(`Created directory: ${targetPath}`);
106
+ }
107
+
108
+ let integrations = [];
109
+ try {
110
+ integrations = await fetchIntegrations(sdk);
111
+ } catch (error) {
112
+ logger.error('Failed to fetch integrations', error);
113
+ throw error;
114
+ }
115
+
116
+ if (!Array.isArray(integrations) || integrations.length === 0) {
117
+ logger.warning('No integrations found to pull');
118
+ return;
119
+ }
120
+
121
+ logger.info(`Found ${integrations.length} integration(s) to pull`);
122
+
123
+ const usedNames = new Set();
124
+ integrations.forEach((integration, index) => {
125
+ const fileName = buildFileName(integration, index, usedNames);
126
+ const filePath = join(targetPath, fileName);
127
+
128
+ // Extract content to files for AI agents
129
+ const processedIntegration = extractIntegrationContent(integration, targetPath, fileName);
130
+
131
+ writeIntegrationFile(filePath, sanitizeIntegrationForFile(processedIntegration));
132
+ logger.step(`Pulled: ${getIntegrationDisplayName(integration) || integration._id || fileName}`);
133
+ });
134
+
135
+ logger.info(`Pulled ${integrations.length} integration(s)`);
136
+ }
137
+
138
+ export async function pushIntegrations(sdk, path, options = {}) {
139
+ const { targetFile } = options;
140
+ const directoryFiles = fs.readdirSync(path);
141
+ const files = targetFile ? [targetFile] : directoryFiles;
142
+ const integrationFiles = files.filter((f) => f.endsWith(INTEGRATION_FILE_EXTENSION));
143
+
144
+ if (integrationFiles.length === 0) {
145
+ if (targetFile) {
146
+ logger.warning(`File ${targetFile} is not an ${INTEGRATION_FILE_EXTENSION} file. Skipping.`);
147
+ } else {
148
+ logger.warning(`No integration files (*${INTEGRATION_FILE_EXTENSION}) found in ${path}`);
149
+ }
150
+ return;
151
+ }
152
+
153
+ logger.info(`Found ${integrationFiles.length} integration(s) to push`);
154
+
155
+ const results = [];
156
+
157
+ for (const file of integrationFiles) {
158
+ const filePath = join(path, file);
159
+ try {
160
+ const integrationData = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
161
+ validateIntegrationPayload(integrationData, file);
162
+
163
+ // Resolve any $ref references in the integration
164
+ const resolvedIntegration = await resolveReferences(integrationData, path);
165
+
166
+ const payload = toRequestPayload(resolvedIntegration);
167
+ const displayName = getIntegrationDisplayName(resolvedIntegration) || file.replace(INTEGRATION_FILE_EXTENSION, '');
168
+
169
+ logger.step(`Pushing integration: ${displayName}`);
170
+
171
+ let response;
172
+ if (integrationData._id) {
173
+ response = await updateIntegration(sdk, integrationData._id, payload);
174
+ logger.success(`Updated: ${displayName}`);
175
+ } else {
176
+ response = await createIntegration(sdk, payload);
177
+ logger.success(`Created: ${displayName}`);
178
+ }
179
+
180
+ // Persist returned integration (with _id) back to disk
181
+ writeIntegrationFile(filePath, sanitizeIntegrationForFile(response));
182
+ results.push({ status: 'fulfilled' });
183
+ } catch (error) {
184
+ logger.error(`Failed to push integration file ${file}`, error);
185
+ results.push({ status: 'rejected', reason: error });
186
+ }
187
+ }
188
+
189
+ const failures = results.filter((result) => result.status === 'rejected');
190
+ if (failures.length) {
191
+ logger.error(`\n${failures.length} integration(s) failed to push:`);
192
+ failures.forEach((failure) => {
193
+ logger.error(` • ${failure.reason?.message || 'Unknown error'}`);
194
+ });
195
+ throw new Error(`Failed to push ${failures.length} integration(s)`);
196
+ }
197
+
198
+ logger.info(`Pushed ${integrationFiles.length} integration(s)`);
199
+ }
@@ -1,6 +1,7 @@
1
1
  import fs from 'node:fs';
2
2
  import path from 'node:path';
3
3
  import { logger } from './logger.mjs';
4
+ import { loadReference } from './file-refs.mjs';
4
5
 
5
6
  /**
6
7
  * Convert a string to kebab-case
@@ -59,41 +60,6 @@ export function extractMicroFrontendStructures(microFrontends, pluginPath) {
59
60
  });
60
61
  }
61
62
 
62
- /**
63
- * Load content from a $ref reference
64
- * @param {string} ref - Reference path (relative, absolute, or URL)
65
- * @param {string} basePath - Base path for resolving relative references
66
- * @returns {Promise<string>} Loaded content
67
- */
68
- async function loadReference(ref, basePath) {
69
- // Handle HTTP/HTTPS URLs
70
- if (ref.startsWith('http://') || ref.startsWith('https://')) {
71
- logger.debug(`Loading structure from URL: ${ref}`);
72
- try {
73
- const response = await fetch(ref);
74
- if (!response.ok) {
75
- throw new Error(`HTTP ${response.status}: ${response.statusText}`);
76
- }
77
- return await response.text();
78
- } catch (error) {
79
- throw new Error(`Failed to load from URL ${ref}: ${error.message}`);
80
- }
81
- }
82
-
83
- // Handle absolute and relative paths
84
- const filePath = path.isAbsolute(ref)
85
- ? ref
86
- : path.resolve(basePath, ref);
87
-
88
- logger.debug(`Loading structure from file: ${filePath}`);
89
-
90
- if (!fs.existsSync(filePath)) {
91
- throw new Error(`Referenced file does not exist: ${filePath}`);
92
- }
93
-
94
- return fs.readFileSync(filePath, 'utf-8');
95
- }
96
-
97
63
  /**
98
64
  * Resolve micro-frontend structures from $ref references
99
65
  * @param {Array} microFrontends - Array of micro-frontend objects