@qelos/plugins-cli 0.0.19 → 0.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -47,7 +47,7 @@ qplay create my-app
47
47
 
48
48
  ### Pull
49
49
 
50
- Pull resources from your Qelos instance to your local filesystem. This allows you to work on components, plugins, integrations, and blueprints locally.
50
+ Pull resources from your Qelos instance to your local filesystem. This allows you to work on components, blueprints, configs, plugins, blocks, integrations, and connections locally.
51
51
 
52
52
  **Syntax:**
53
53
  ```bash
@@ -55,7 +55,7 @@ qelos pull <type> <path>
55
55
  ```
56
56
 
57
57
  **Arguments:**
58
- - `type` - Type of resource to pull (e.g., `components`, `plugins`, `integrations`, `blueprints`)
58
+ - `type` - Type of resource to pull (e.g., `components`, `plugins`, `integrations`, `connections`, `blueprints`)
59
59
  - `path` - Local directory path where resources will be saved
60
60
 
61
61
  **Example - Pull Components:**
@@ -82,7 +82,7 @@ All 5 components pulled to ./my-components
82
82
 
83
83
  ### Push
84
84
 
85
- Push local resources to your Qelos instance. This allows you to update or create components, plugins, integrations, and blueprints from your local filesystem.
85
+ Push local resources to your Qelos instance. This allows you to update or create components, blueprints, configs, plugins, blocks, integrations, and connections from your local filesystem.
86
86
 
87
87
  **Syntax:**
88
88
  ```bash
@@ -90,7 +90,7 @@ qelos push <type> <path>
90
90
  ```
91
91
 
92
92
  **Arguments:**
93
- - `type` - Type of resource to push (e.g., `components`, `plugins`, `integrations`, `blueprints`)
93
+ - `type` - Type of resource to push (e.g., `components`, `plugins`, `integrations`, `connections`, `blueprints`)
94
94
  - `path` - Local directory path containing the resources to push
95
95
 
96
96
  **Example - Push Components:**
@@ -129,6 +129,39 @@ qelos pull components ./local-components
129
129
  qelos push components ./local-components
130
130
  ```
131
131
 
132
+ ### Integrations
133
+
134
+ - Pulled integrations are stored as `.integration.json` files that exclude server-only fields such as `tenant`, `user`, `created`, `updated`, and `__v`.
135
+ - When pushing, the CLI automatically recalculates backend-only properties (like `kind`) so you do not need to keep them in local files.
136
+
137
+ ### Connections (Integration Sources)
138
+
139
+ Connections (integration sources) are now fully supported via `qelos pull connections <path>` and `qelos push connections <path>`.
140
+
141
+ - Each connection is stored as `<name>.connection.json` containing `name`, `kind`, `labels`, `metadata`, and an `authentication` placeholder:
142
+
143
+ ```json
144
+ {
145
+ "_id": "64f1...",
146
+ "name": "OpenAI",
147
+ "kind": "openai",
148
+ "labels": ["ai"],
149
+ "metadata": { "defaultModel": "gpt-4o" },
150
+ "authentication": {
151
+ "$var": "INTEGRATION_AUTH_OPENAI"
152
+ }
153
+ }
154
+ ```
155
+
156
+ - At push time, the CLI reads the referenced environment variable (e.g., `INTEGRATION_AUTH_OPENAI`) which must contain a JSON string with the real credentials:
157
+
158
+ ```bash
159
+ export INTEGRATION_AUTH_OPENAI='{"token":"sk-..."}'
160
+ ```
161
+
162
+ - If the env var is missing, the CLI skips updating the secure authentication payload and only syncs metadata.
163
+ - When new connections are created, the CLI persists the returned `_id` so future pushes update the same record.
164
+
132
165
  ## Help
133
166
 
134
167
  View all available commands and options:
package/commands/pull.mjs CHANGED
@@ -8,7 +8,7 @@ export default function pullCommand(program) {
8
8
  .positional('type', {
9
9
  describe: 'Type of the resource to pull. Can be components, blueprints, configurations, plugins, blocks, or all.',
10
10
  type: 'string',
11
- choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'all', '*'],
11
+ choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'integrations', 'all', '*'],
12
12
  required: true
13
13
  })
14
14
  .positional('path', {
package/commands/push.mjs CHANGED
@@ -8,7 +8,7 @@ export default function createCommand(program) {
8
8
  .positional('type', {
9
9
  describe: 'Type of the resource to push. Can be components, blueprints, configurations, plugins, blocks, or all.',
10
10
  type: 'string',
11
- choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'all', '*'],
11
+ choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'integrations', 'all', '*'],
12
12
  required: true
13
13
  })
14
14
  .positional('path', {
@@ -4,6 +4,8 @@ import { pullBlueprints } from '../services/blueprints.mjs';
4
4
  import { pullConfigurations } from '../services/configurations.mjs';
5
5
  import { pullPlugins } from '../services/plugins.mjs';
6
6
  import { pullBlocks } from '../services/blocks.mjs';
7
+ import { pullIntegrations } from '../services/integrations.mjs';
8
+ import { pullConnections } from '../services/connections.mjs';
7
9
  import { logger } from '../services/logger.mjs';
8
10
  import fs from 'node:fs';
9
11
  import path from 'node:path';
@@ -36,7 +38,9 @@ export default async function pullController({ type, path: targetPath = './' })
36
38
  { name: 'blueprints', fn: pullBlueprints },
37
39
  { name: 'configs', fn: pullConfigurations },
38
40
  { name: 'plugins', fn: pullPlugins },
39
- { name: 'blocks', fn: pullBlocks }
41
+ { name: 'blocks', fn: pullBlocks },
42
+ { name: 'integrations', fn: pullIntegrations },
43
+ { name: 'connections', fn: pullConnections }
40
44
  ];
41
45
 
42
46
  for (const { name, fn } of types) {
@@ -64,11 +68,15 @@ export default async function pullController({ type, path: targetPath = './' })
64
68
  await pullPlugins(sdk, targetPath);
65
69
  } else if (type === 'blocks') {
66
70
  await pullBlocks(sdk, targetPath);
71
+ } else if (type === 'integrations' || type === 'integration') {
72
+ await pullIntegrations(sdk, targetPath);
73
+ } else if (type === 'connections' || type === 'connection') {
74
+ await pullConnections(sdk, targetPath);
67
75
  } else if (type === 'config' || type === 'configs' || type === 'configuration') {
68
76
  await pullConfigurations(sdk, targetPath);
69
77
  } else {
70
78
  logger.error(`Unknown type: ${type}`);
71
- logger.info('Supported types: components, blueprints, plugins, blocks, config, configs, configuration, all');
79
+ logger.info('Supported types: components, blueprints, plugins, blocks, integrations, connections, config, configs, configuration, all');
72
80
  process.exit(1);
73
81
  }
74
82
 
@@ -4,6 +4,8 @@ import { pushBlueprints } from '../services/blueprints.mjs';
4
4
  import { pushConfigurations } from '../services/configurations.mjs';
5
5
  import { pushPlugins } from '../services/plugins.mjs';
6
6
  import { pushBlocks } from '../services/blocks.mjs';
7
+ import { pushIntegrations } from '../services/integrations.mjs';
8
+ import { pushConnections } from '../services/connections.mjs';
7
9
  import { logger } from '../services/logger.mjs';
8
10
  import fs from 'node:fs';
9
11
  import path from 'node:path';
@@ -45,7 +47,9 @@ export default async function pushController({ type, path: sourcePath }) {
45
47
  { name: 'blueprints', fn: pushBlueprints },
46
48
  { name: 'configs', fn: pushConfigurations },
47
49
  { name: 'plugins', fn: pushPlugins },
48
- { name: 'blocks', fn: pushBlocks }
50
+ { name: 'blocks', fn: pushBlocks },
51
+ { name: 'integrations', fn: pushIntegrations },
52
+ { name: 'connections', fn: pushConnections }
49
53
  ];
50
54
 
51
55
  for (const { name, fn } of types) {
@@ -80,11 +84,15 @@ export default async function pushController({ type, path: sourcePath }) {
80
84
  await pushPlugins(sdk, basePath, { targetFile });
81
85
  } else if (type === 'blocks') {
82
86
  await pushBlocks(sdk, basePath, { targetFile });
83
- } else if (type === 'config' || type === 'configs' || type === 'configuration') {
87
+ } else if (type === 'integrations' || type === 'integration') {
88
+ await pushIntegrations(sdk, basePath, { targetFile });
89
+ } else if (type === 'connections' || type === 'connection') {
90
+ await pushConnections(sdk, basePath, { targetFile });
91
+ } else if (type === 'config' || type === 'configs' || type === 'configuration') {
84
92
  await pushConfigurations(sdk, basePath, { targetFile });
85
93
  } else {
86
94
  logger.error(`Unknown type: ${type}`);
87
- logger.info('Supported types: components, blueprints, plugins, blocks, config, configs, configuration, all');
95
+ logger.info('Supported types: components, blueprints, plugins, blocks, integrations, connections, config, configs, configuration, all');
88
96
  process.exit(1);
89
97
  }
90
98
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@qelos/plugins-cli",
3
- "version": "0.0.19",
3
+ "version": "0.0.20",
4
4
  "description": "CLI to manage QELOS plugins",
5
5
  "main": "cli.mjs",
6
6
  "bin": {
@@ -68,12 +68,12 @@ function buildMarkdown({ blueprint, interfaceName, entityVarName, interfaceDefin
68
68
  '',
69
69
  '### Create an Entity',
70
70
  '```ts',
71
- `const created = await ${entityVarName}.create(${exampleLiteral});`,
71
+ `const created = await ${entityVarName}.create({\n metadata: ${indentLiteral(exampleLiteral, 2)},\n});`,
72
72
  '```',
73
73
  '',
74
74
  '### Update an Entity',
75
75
  '```ts',
76
- `const updated = await ${entityVarName}.update('replace-with-entity-id', {\n ...${exampleLiteral.replace(/\n/g, '\n ')},\n});`,
76
+ `const updated = await ${entityVarName}.update('replace-with-entity-id', {\n metadata: {\n ...oldMetadata,\n ...${indentLiteral(exampleLiteral, 4)},\n },\n});`,
77
77
  '```',
78
78
  '',
79
79
  '### Delete an Entity',
@@ -219,6 +219,11 @@ function stringifyObjectLiteral(value, level = 0) {
219
219
  return String(value);
220
220
  }
221
221
 
222
+ function indentLiteral(literal, spaces) {
223
+ const indent = ' '.repeat(spaces);
224
+ return literal.replace(/\n/g, `\n${indent}`);
225
+ }
226
+
222
227
  function toCamelCase(value) {
223
228
  return (value || '')
224
229
  .replace(/[-_\s]+(.)?/g, (_, chr) => (chr ? chr.toUpperCase() : ''))
@@ -0,0 +1,239 @@
1
+ import fs from 'node:fs';
2
+ import path from 'node:path';
3
+ import { logger } from './logger.mjs';
4
+
5
+ const CONNECTION_FILE_EXTENSION = '.connection.json';
6
+ const AUTH_PLACEHOLDER_KEY = '$var';
7
+
8
+ function slugify(value = '') {
9
+ return value
10
+ .toString()
11
+ .trim()
12
+ .toLowerCase()
13
+ .replace(/[^a-z0-9]+/g, '-')
14
+ .replace(/^-+|-+$/g, '');
15
+ }
16
+
17
+ function ensureDirectory(dirPath) {
18
+ if (!fs.existsSync(dirPath)) {
19
+ fs.mkdirSync(dirPath, { recursive: true });
20
+ logger.info(`Created directory: ${dirPath}`);
21
+ }
22
+ }
23
+
24
+ function getDefaultAuthEnvVar(connection) {
25
+ if (connection?.authentication?.[AUTH_PLACEHOLDER_KEY]) {
26
+ return connection.authentication[AUTH_PLACEHOLDER_KEY];
27
+ }
28
+
29
+ if (connection?._id) {
30
+ return `INTEGRATION_AUTH_${connection._id}`;
31
+ }
32
+
33
+ const slug = slugify(connection?.name || 'connection').replace(/-/g, '_').toUpperCase();
34
+ const suffix = slug ? slug.slice(-6) : Math.random().toString(36).slice(-6).toUpperCase();
35
+ return `INTEGRATION_AUTH_${suffix || 'NEW'}`;
36
+ }
37
+
38
+ function buildFileName(connection, index, usedNames) {
39
+ const base =
40
+ slugify(connection?.name) ||
41
+ (connection?._id ? slugify(connection._id) : '') ||
42
+ `connection-${index + 1}`;
43
+
44
+ let fileName = base;
45
+ while (usedNames.has(fileName)) {
46
+ fileName = `${base}-${usedNames.size + 1}`;
47
+ }
48
+ usedNames.add(fileName);
49
+ return `${fileName}${CONNECTION_FILE_EXTENSION}`;
50
+ }
51
+
52
+ function sanitizeConnectionForFile(connection) {
53
+ return {
54
+ _id: connection._id,
55
+ name: connection.name,
56
+ kind: connection.kind,
57
+ labels: connection.labels || [],
58
+ metadata: connection.metadata || {},
59
+ authentication: {
60
+ [AUTH_PLACEHOLDER_KEY]: getDefaultAuthEnvVar(connection),
61
+ },
62
+ };
63
+ }
64
+
65
+ function writeConnectionFile(filePath, connection) {
66
+ fs.writeFileSync(filePath, JSON.stringify(connection, null, 2), 'utf-8');
67
+ }
68
+
69
+ function validateConnectionPayload(data, fileName) {
70
+ if (!data?.name) {
71
+ throw new Error(`Connection file ${fileName} must include a name`);
72
+ }
73
+
74
+ if (!data?.kind) {
75
+ throw new Error(`Connection file ${fileName} must include a kind`);
76
+ }
77
+
78
+ if (!data?.metadata || typeof data.metadata !== 'object') {
79
+ throw new Error(`Connection file ${fileName} must include metadata object`);
80
+ }
81
+
82
+ if (data.labels && !Array.isArray(data.labels)) {
83
+ throw new Error(`Connection file ${fileName} has invalid labels (must be an array)`);
84
+ }
85
+ }
86
+
87
+ function extractAuthenticationPayload(connectionFile, fileName) {
88
+ const authField = connectionFile.authentication;
89
+
90
+ if (!authField || typeof authField !== 'object') {
91
+ return { payload: undefined, envVar: null };
92
+ }
93
+
94
+ const envVarName = authField[AUTH_PLACEHOLDER_KEY];
95
+
96
+ if (!envVarName) {
97
+ throw new Error(
98
+ `Connection file ${fileName} must define authentication as { "${AUTH_PLACEHOLDER_KEY}": "INTEGRATION_AUTH_..." }`
99
+ );
100
+ }
101
+
102
+ const raw = process.env[envVarName];
103
+
104
+ if (!raw) {
105
+ if (!connectionFile._id) {
106
+ throw new Error(
107
+ `Environment variable ${envVarName} is required to create connection defined in ${fileName}`
108
+ );
109
+ }
110
+
111
+ logger.info(
112
+ `Skipping authentication update for ${connectionFile.name} (env ${envVarName} not set)`
113
+ );
114
+ return { payload: undefined, envVar: envVarName };
115
+ }
116
+
117
+ try {
118
+ const parsed = JSON.parse(raw);
119
+ if (typeof parsed !== 'object' || !parsed) {
120
+ throw new Error('Authentication env must contain a JSON object');
121
+ }
122
+ return { payload: parsed, envVar: envVarName };
123
+ } catch (error) {
124
+ throw new Error(
125
+ `Failed to parse JSON from env ${envVarName} for connection ${fileName}: ${error.message}`
126
+ );
127
+ }
128
+ }
129
+
130
+ export async function pullConnections(sdk, targetPath) {
131
+ ensureDirectory(targetPath);
132
+
133
+ let connections = [];
134
+ try {
135
+ connections = await sdk.integrationSources.getList();
136
+ } catch (error) {
137
+ logger.error('Failed to fetch integration sources', error);
138
+ throw error;
139
+ }
140
+
141
+ if (!Array.isArray(connections) || connections.length === 0) {
142
+ logger.warning('No connections found to pull');
143
+ return;
144
+ }
145
+
146
+ logger.info(`Found ${connections.length} connection(s) to pull`);
147
+ const usedNames = new Set();
148
+
149
+ connections.forEach((connection, index) => {
150
+ const fileName = buildFileName(connection, index, usedNames);
151
+ const filePath = path.join(targetPath, fileName);
152
+ writeConnectionFile(filePath, sanitizeConnectionForFile(connection));
153
+ logger.step(`Pulled connection: ${connection.name || connection._id}`);
154
+ });
155
+
156
+ logger.info(`Pulled ${connections.length} connection(s)`);
157
+ }
158
+
159
+ export async function pushConnections(sdk, sourcePath, options = {}) {
160
+ const { targetFile } = options;
161
+ const directoryFiles = fs.readdirSync(sourcePath);
162
+ const files = targetFile ? [targetFile] : directoryFiles;
163
+ const connectionFiles = files.filter((file) => file.endsWith(CONNECTION_FILE_EXTENSION));
164
+
165
+ if (connectionFiles.length === 0) {
166
+ if (targetFile) {
167
+ logger.warning(
168
+ `File ${targetFile} is not a ${CONNECTION_FILE_EXTENSION} connection file. Skipping.`
169
+ );
170
+ } else {
171
+ logger.warning(`No connection files (*${CONNECTION_FILE_EXTENSION}) found in ${sourcePath}`);
172
+ }
173
+ return;
174
+ }
175
+
176
+ logger.info(`Found ${connectionFiles.length} connection(s) to push`);
177
+
178
+ const results = [];
179
+
180
+ for (const file of connectionFiles) {
181
+ const filePath = path.join(sourcePath, file);
182
+
183
+ try {
184
+ const connectionData = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
185
+ validateConnectionPayload(connectionData, file);
186
+
187
+ const { payload: authentication, envVar } = extractAuthenticationPayload(connectionData, file);
188
+
189
+ const requestBody = {
190
+ name: connectionData.name,
191
+ kind: connectionData.kind,
192
+ labels: connectionData.labels || [],
193
+ metadata: connectionData.metadata || {},
194
+ ...(authentication ? { authentication } : {}),
195
+ };
196
+
197
+ logger.step(`Pushing connection: ${connectionData.name}`);
198
+ let response;
199
+
200
+ if (connectionData._id) {
201
+ response = await sdk.integrationSources.update(connectionData._id, requestBody);
202
+ logger.success(`Updated connection: ${connectionData.name}`);
203
+ } else {
204
+ response = await sdk.integrationSources.create(requestBody);
205
+ logger.success(`Created connection: ${connectionData.name}`);
206
+ }
207
+
208
+ const authPlaceholder = connectionData.authentication?.[AUTH_PLACEHOLDER_KEY]
209
+ ? connectionData.authentication
210
+ : { [AUTH_PLACEHOLDER_KEY]: envVar || getDefaultAuthEnvVar(response) };
211
+
212
+ const fileContent = {
213
+ _id: response._id,
214
+ name: response.name,
215
+ kind: response.kind,
216
+ labels: response.labels || [],
217
+ metadata: response.metadata || {},
218
+ authentication: authPlaceholder,
219
+ };
220
+
221
+ writeConnectionFile(filePath, fileContent);
222
+ results.push({ status: 'fulfilled' });
223
+ } catch (error) {
224
+ logger.error(`Failed to push connection file ${file}`, error);
225
+ results.push({ status: 'rejected', reason: error });
226
+ }
227
+ }
228
+
229
+ const failures = results.filter((result) => result.status === 'rejected');
230
+ if (failures.length) {
231
+ logger.error(`\n${failures.length} connection(s) failed to push:`);
232
+ failures.forEach((failure) => {
233
+ logger.error(` • ${failure.reason?.message || 'Unknown error'}`);
234
+ });
235
+ throw new Error(`Failed to push ${failures.length} connection(s)`);
236
+ }
237
+
238
+ logger.info(`Pushed ${connectionFiles.length} connection(s)`);
239
+ }
@@ -0,0 +1,190 @@
1
+ import fs from 'node:fs';
2
+ import { join } from 'node:path';
3
+ import { logger } from './logger.mjs';
4
+
5
+ const INTEGRATION_FILE_EXTENSION = '.integration.json';
6
+ const INTEGRATIONS_API_PATH = '/api/integrations';
7
+ const SERVER_ONLY_FIELDS = ['tenant', 'plugin', 'user', 'created', 'updated', '__v'];
8
+
9
+ function slugify(value = '') {
10
+ return value
11
+ .toString()
12
+ .trim()
13
+ .toLowerCase()
14
+ .replace(/[^a-z0-9]+/g, '-')
15
+ .replace(/^-+|-+$/g, '');
16
+ }
17
+
18
+ function getIntegrationDisplayName(integration) {
19
+ return (
20
+ integration?.trigger?.details?.name ||
21
+ integration?.target?.details?.name ||
22
+ integration?._id ||
23
+ ''
24
+ );
25
+ }
26
+
27
+ function buildFileName(integration, index, usedNames) {
28
+ const preferred =
29
+ slugify(getIntegrationDisplayName(integration)) ||
30
+ slugify(integration?._id) ||
31
+ '';
32
+
33
+ let baseName = preferred || `integration-${index + 1}`;
34
+
35
+ while (usedNames.has(baseName)) {
36
+ const suffix = integration?._id ? integration._id.slice(-4) : `${usedNames.size + 1}`;
37
+ baseName = `${baseName}-${suffix}`;
38
+ }
39
+
40
+ usedNames.add(baseName);
41
+ return `${baseName}${INTEGRATION_FILE_EXTENSION}`;
42
+ }
43
+
44
+ function validateIntegrationPayload(data, file) {
45
+ if (!data?.trigger || !data?.target) {
46
+ throw new Error(`Integration file ${file} must include trigger and target`);
47
+ }
48
+
49
+ if (!data.trigger.source || !data.trigger.operation) {
50
+ throw new Error(`Integration ${file}: trigger must include source and operation`);
51
+ }
52
+
53
+ if (!data.target.source || !data.target.operation) {
54
+ throw new Error(`Integration ${file}: target must include source and operation`);
55
+ }
56
+ }
57
+
58
+ function toRequestPayload(data) {
59
+ return {
60
+ trigger: data.trigger,
61
+ target: data.target,
62
+ dataManipulation: data.dataManipulation || [],
63
+ active: data.active ?? false,
64
+ };
65
+ }
66
+
67
+ function writeIntegrationFile(filePath, integration) {
68
+ fs.writeFileSync(filePath, JSON.stringify(integration, null, 2), 'utf-8');
69
+ }
70
+
71
+ function sanitizeIntegrationForFile(integration) {
72
+ const sanitized = JSON.parse(JSON.stringify(integration));
73
+ SERVER_ONLY_FIELDS.forEach((field) => {
74
+ if (field in sanitized) {
75
+ delete sanitized[field];
76
+ }
77
+ });
78
+ return sanitized;
79
+ }
80
+
81
+ async function fetchIntegrations(sdk) {
82
+ return sdk.callJsonApi(INTEGRATIONS_API_PATH);
83
+ }
84
+
85
+ async function createIntegration(sdk, payload) {
86
+ return sdk.callJsonApi(INTEGRATIONS_API_PATH, {
87
+ method: 'post',
88
+ headers: { 'content-type': 'application/json' },
89
+ body: JSON.stringify(payload),
90
+ });
91
+ }
92
+
93
+ async function updateIntegration(sdk, id, payload) {
94
+ return sdk.callJsonApi(`${INTEGRATIONS_API_PATH}/${id}`, {
95
+ method: 'put',
96
+ headers: { 'content-type': 'application/json' },
97
+ body: JSON.stringify(payload),
98
+ });
99
+ }
100
+
101
+ export async function pullIntegrations(sdk, targetPath) {
102
+ if (!fs.existsSync(targetPath)) {
103
+ fs.mkdirSync(targetPath, { recursive: true });
104
+ logger.info(`Created directory: ${targetPath}`);
105
+ }
106
+
107
+ let integrations = [];
108
+ try {
109
+ integrations = await fetchIntegrations(sdk);
110
+ } catch (error) {
111
+ logger.error('Failed to fetch integrations', error);
112
+ throw error;
113
+ }
114
+
115
+ if (!Array.isArray(integrations) || integrations.length === 0) {
116
+ logger.warning('No integrations found to pull');
117
+ return;
118
+ }
119
+
120
+ logger.info(`Found ${integrations.length} integration(s) to pull`);
121
+
122
+ const usedNames = new Set();
123
+ integrations.forEach((integration, index) => {
124
+ const fileName = buildFileName(integration, index, usedNames);
125
+ const filePath = join(targetPath, fileName);
126
+ writeIntegrationFile(filePath, sanitizeIntegrationForFile(integration));
127
+ logger.step(`Pulled: ${getIntegrationDisplayName(integration) || integration._id || fileName}`);
128
+ });
129
+
130
+ logger.info(`Pulled ${integrations.length} integration(s)`);
131
+ }
132
+
133
+ export async function pushIntegrations(sdk, path, options = {}) {
134
+ const { targetFile } = options;
135
+ const directoryFiles = fs.readdirSync(path);
136
+ const files = targetFile ? [targetFile] : directoryFiles;
137
+ const integrationFiles = files.filter((f) => f.endsWith(INTEGRATION_FILE_EXTENSION));
138
+
139
+ if (integrationFiles.length === 0) {
140
+ if (targetFile) {
141
+ logger.warning(`File ${targetFile} is not an ${INTEGRATION_FILE_EXTENSION} file. Skipping.`);
142
+ } else {
143
+ logger.warning(`No integration files (*${INTEGRATION_FILE_EXTENSION}) found in ${path}`);
144
+ }
145
+ return;
146
+ }
147
+
148
+ logger.info(`Found ${integrationFiles.length} integration(s) to push`);
149
+
150
+ const results = [];
151
+
152
+ for (const file of integrationFiles) {
153
+ const filePath = join(path, file);
154
+ try {
155
+ const integrationData = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
156
+ validateIntegrationPayload(integrationData, file);
157
+ const payload = toRequestPayload(integrationData);
158
+ const displayName = getIntegrationDisplayName(integrationData) || file.replace(INTEGRATION_FILE_EXTENSION, '');
159
+
160
+ logger.step(`Pushing integration: ${displayName}`);
161
+
162
+ let response;
163
+ if (integrationData._id) {
164
+ response = await updateIntegration(sdk, integrationData._id, payload);
165
+ logger.success(`Updated: ${displayName}`);
166
+ } else {
167
+ response = await createIntegration(sdk, payload);
168
+ logger.success(`Created: ${displayName}`);
169
+ }
170
+
171
+ // Persist returned integration (with _id) back to disk
172
+ writeIntegrationFile(filePath, sanitizeIntegrationForFile(response));
173
+ results.push({ status: 'fulfilled' });
174
+ } catch (error) {
175
+ logger.error(`Failed to push integration file ${file}`, error);
176
+ results.push({ status: 'rejected', reason: error });
177
+ }
178
+ }
179
+
180
+ const failures = results.filter((result) => result.status === 'rejected');
181
+ if (failures.length) {
182
+ logger.error(`\n${failures.length} integration(s) failed to push:`);
183
+ failures.forEach((failure) => {
184
+ logger.error(` • ${failure.reason?.message || 'Unknown error'}`);
185
+ });
186
+ throw new Error(`Failed to push ${failures.length} integration(s)`);
187
+ }
188
+
189
+ logger.info(`Pushed ${integrationFiles.length} integration(s)`);
190
+ }