@qelos/plugins-cli 0.0.21 → 0.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/commands/push.mjs CHANGED
@@ -1,14 +1,14 @@
1
1
  import pushController from "../controllers/push.mjs";
2
2
 
3
- export default function createCommand(program) {
3
+ export default function pushCommand(program) {
4
4
  program
5
- .command('push [type] [path]', 'push to qelos app. Ability to push components, blueprints, configurations, plugins, blocks, and more.',
5
+ .command('push [type] [path]', 'push to qelos app. Ability to push components, blueprints, configurations, plugins, blocks, committed files, or staged files.',
6
6
  (yargs) => {
7
7
  return yargs
8
8
  .positional('type', {
9
- describe: 'Type of the resource to push. Can be components, blueprints, configurations, plugins, blocks, or all.',
9
+ describe: 'Type of the resource to push. Can be components, blueprints, configurations, plugins, blocks, integrations, connections, committed, staged, or all.',
10
10
  type: 'string',
11
- choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'integrations', 'connections', 'all', '*'],
11
+ choices: ['components', 'blueprints', 'configs', 'plugins', 'blocks', 'integrations', 'connections', 'committed', 'staged', 'all', '*'],
12
12
  required: true
13
13
  })
14
14
  .positional('path', {
@@ -6,12 +6,95 @@ import { pushPlugins } from '../services/plugins.mjs';
6
6
  import { pushBlocks } from '../services/blocks.mjs';
7
7
  import { pushIntegrations } from '../services/integrations.mjs';
8
8
  import { pushConnections } from '../services/connections.mjs';
9
+ import { getGitFiles, prepareTempDirectories } from '../services/git-files.mjs';
9
10
  import { logger } from '../services/logger.mjs';
10
11
  import fs from 'node:fs';
11
12
  import path from 'node:path';
13
+ import { mkdtemp } from 'node:fs/promises';
14
+ import { tmpdir } from 'node:os';
12
15
 
13
16
  export default async function pushController({ type, path: sourcePath }) {
17
+ let tempDir = null;
18
+
14
19
  try {
20
+ // Handle git-based types (committed and staged)
21
+ if (type === 'committed' || type === 'staged') {
22
+ // Validate path exists and is a directory
23
+ if (!fs.existsSync(sourcePath)) {
24
+ logger.error(`Path does not exist: ${sourcePath}`);
25
+ logger.info('Please provide a valid directory path');
26
+ process.exit(1);
27
+ }
28
+
29
+ const stat = fs.statSync(sourcePath);
30
+ if (!stat.isDirectory()) {
31
+ logger.error(`For ${type} files, path must be a directory: ${sourcePath}`);
32
+ process.exit(1);
33
+ }
34
+
35
+ // Get and classify files from git
36
+ const classifiedFiles = getGitFiles(type, sourcePath);
37
+
38
+ // Check if we have any files to push
39
+ const hasFiles = Object.values(classifiedFiles).some(files => files.length > 0);
40
+ if (!hasFiles) {
41
+ logger.info(`No ${type} files to push`);
42
+ return;
43
+ }
44
+
45
+ // Create temporary directory
46
+ tempDir = await mkdtemp(path.join(tmpdir(), 'qelos-push-'));
47
+
48
+ // Prepare temporary directories and copy files
49
+ const tempPaths = prepareTempDirectories(classifiedFiles, tempDir);
50
+
51
+ logger.section(`Pushing ${type} files from ${sourcePath}`);
52
+
53
+ const sdk = await initializeSdk();
54
+
55
+ // Push each type of file
56
+ const types = [
57
+ { name: 'components', fn: pushComponents },
58
+ { name: 'blueprints', fn: pushBlueprints },
59
+ { name: 'configs', fn: pushConfigurations },
60
+ { name: 'plugins', fn: pushPlugins },
61
+ { name: 'blocks', fn: pushBlocks },
62
+ { name: 'integrations', fn: pushIntegrations },
63
+ { name: 'connections', fn: pushConnections }
64
+ ];
65
+
66
+ for (const { name, fn } of types) {
67
+ if (!tempPaths[name]) continue;
68
+
69
+ logger.section(`Pushing ${name} (${classifiedFiles[name].length} file(s))`);
70
+
71
+ // Show the actual files being pushed
72
+ classifiedFiles[name].forEach(file => {
73
+ logger.step(`→ ${path.relative(sourcePath, file)}`);
74
+ });
75
+
76
+ try {
77
+ await fn(sdk, tempPaths[name]);
78
+ logger.success(`Successfully pushed ${name}`);
79
+ } catch (error) {
80
+ logger.error(`Failed to push ${name}`, error);
81
+ }
82
+ }
83
+
84
+ // Handle special cases (prompts and micro-frontends)
85
+ if (classifiedFiles.prompts.length > 0) {
86
+ logger.info(`Found ${classifiedFiles.prompts.length} prompt file(s) that will be pushed via their parent integrations`);
87
+ }
88
+
89
+ if (classifiedFiles.microFrontends.length > 0) {
90
+ logger.info(`Found ${classifiedFiles.microFrontends.length} micro-frontend HTML file(s) that will be pushed via their parent plugins`);
91
+ }
92
+
93
+ logger.success(`Successfully pushed ${type} files`);
94
+ return;
95
+ }
96
+
97
+ // Original logic for other types
15
98
  // Validate path exists
16
99
  if (!fs.existsSync(sourcePath)) {
17
100
  logger.error(`Path does not exist: ${sourcePath}`);
@@ -92,7 +175,7 @@ export default async function pushController({ type, path: sourcePath }) {
92
175
  await pushConfigurations(sdk, basePath, { targetFile });
93
176
  } else {
94
177
  logger.error(`Unknown type: ${type}`);
95
- logger.info('Supported types: components, blueprints, plugins, blocks, integrations, connections, config, configs, configuration, all');
178
+ logger.info('Supported types: components, blueprints, plugins, blocks, integrations, connections, config, configs, configuration, committed, staged, all');
96
179
  process.exit(1);
97
180
  }
98
181
 
@@ -112,5 +195,15 @@ export default async function pushController({ type, path: sourcePath }) {
112
195
  }
113
196
 
114
197
  process.exit(1);
198
+ } finally {
199
+ // Clean up temporary directory if it was created
200
+ if (tempDir && fs.existsSync(tempDir)) {
201
+ try {
202
+ fs.rmSync(tempDir, { recursive: true, force: true });
203
+ logger.debug(`Cleaned up temporary directory: ${tempDir}`);
204
+ } catch (error) {
205
+ logger.warning(`Failed to clean up temporary directory: ${tempDir}`, error);
206
+ }
207
+ }
115
208
  }
116
209
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@qelos/plugins-cli",
3
- "version": "0.0.21",
3
+ "version": "0.0.23",
4
4
  "description": "CLI to manage QELOS plugins",
5
5
  "main": "cli.mjs",
6
6
  "bin": {
@@ -134,26 +134,33 @@ export function extractIntegrationContent(integration, integrationPath, fileName
134
134
 
135
135
  // Extract the first pre_message content
136
136
  const preMessage = updatedIntegration.target.details.pre_messages[0];
137
- if (preMessage && preMessage.content && typeof preMessage.content === 'string') {
138
- const content = preMessage.content;
139
-
140
- // Skip if content is empty or just whitespace
141
- if (!content.trim()) {
137
+ if (preMessage && preMessage.content) {
138
+ // Check if content is already a $ref
139
+ if (typeof preMessage.content === 'object' && preMessage.content.$ref) {
142
140
  return updatedIntegration;
143
141
  }
142
+
143
+ // Only extract if content is a string
144
+ if (typeof preMessage.content === 'string') {
145
+ const content = preMessage.content;
146
+
147
+ // Skip if content is empty or just whitespace
148
+ if (!content.trim()) {
149
+ return updatedIntegration;
150
+ }
144
151
 
145
- // Generate filename
146
- const baseName = path.basename(fileName, '.integration.json');
147
- const mdFileName = `${baseName}.md`;
148
- const mdFilePath = path.join(extractDir, mdFileName);
149
- const relativeRef = `./prompts/${mdFileName}`;
152
+ // Generate filename
153
+ const baseName = path.basename(fileName, '.integration.json');
154
+ const mdFileName = `${baseName}.md`;
155
+ const mdFilePath = path.join(extractDir, mdFileName);
156
+ const relativeRef = `./prompts/${mdFileName}`;
150
157
 
151
- // Write content to file
152
- fs.writeFileSync(mdFilePath, content, 'utf-8');
153
- logger.debug(`Extracted pre_message content to: ${relativeRef}`);
158
+ // Write content to file
159
+ fs.writeFileSync(mdFilePath, content, 'utf-8');
154
160
 
155
- // Replace with $ref
156
- updatedIntegration.target.details.pre_messages[0].content = { $ref: relativeRef };
161
+ // Replace with $ref
162
+ updatedIntegration.target.details.pre_messages[0].content = { $ref: relativeRef };
163
+ }
157
164
  }
158
165
 
159
166
  return updatedIntegration;
@@ -0,0 +1,288 @@
1
+ import { execSync } from 'node:child_process';
2
+ import { logger } from './logger.mjs';
3
+ import path from 'node:path';
4
+ import fs from 'node:fs';
5
+
6
+ /**
7
+ * Get the list of files committed in the last commit
8
+ * @returns {string[]} Array of file paths
9
+ */
10
+ function getCommittedFiles() {
11
+ try {
12
+ const output = execSync('git diff-tree --no-commit-id --name-only -r HEAD', { encoding: 'utf-8' });
13
+ return output.trim().split('\n').filter(file => file);
14
+ } catch (error) {
15
+ logger.error('Failed to get committed files', error);
16
+ throw new Error('Unable to retrieve committed files from git');
17
+ }
18
+ }
19
+
20
+ /**
21
+ * Get the list of staged files
22
+ * @returns {string[]} Array of file paths
23
+ */
24
+ function getStagedFiles() {
25
+ try {
26
+ const output = execSync('git diff --cached --name-only', { encoding: 'utf-8' });
27
+ return output.trim().split('\n').filter(file => file);
28
+ } catch (error) {
29
+ logger.error('Failed to get staged files', error);
30
+ throw new Error('Unable to retrieve staged files from git');
31
+ }
32
+ }
33
+
34
+ /**
35
+ * Find integration files that reference a specific file via $ref
36
+ * @param {string} refPath - The referenced file path (relative)
37
+ * @param {string} basePath - Base path to search for integrations
38
+ * @returns {string[]} Array of integration file paths that reference the file
39
+ */
40
+ function findReferencingIntegrations(refPath, basePath) {
41
+ const referencingIntegrations = [];
42
+ const integrationsDir = path.join(basePath, 'integrations');
43
+
44
+ if (!fs.existsSync(integrationsDir)) {
45
+ return referencingIntegrations;
46
+ }
47
+
48
+ const integrationFiles = fs.readdirSync(integrationsDir)
49
+ .filter(file => file.endsWith('.integration.json'));
50
+
51
+ for (const file of integrationFiles) {
52
+ const filePath = path.join(integrationsDir, file);
53
+ try {
54
+ const content = fs.readFileSync(filePath, 'utf-8');
55
+ const integration = JSON.parse(content);
56
+
57
+ // Check all $ref references in the integration
58
+ const refs = findAllRefs(integration);
59
+
60
+ // Check if any ref matches our target path
61
+ // Normalize paths for comparison (handle ./ and different separators)
62
+ const normalizedRefPath = refPath.replace(/^\.\//, '').replace(/\\/g, '/');
63
+
64
+ if (refs.some(ref => {
65
+ const normalizedRef = ref.replace(/^\.\//, '').replace(/\\/g, '/');
66
+ return normalizedRef === normalizedRefPath;
67
+ })) {
68
+ referencingIntegrations.push(filePath);
69
+ }
70
+ } catch (error) {
71
+ logger.debug(`Failed to parse integration ${file}: ${error.message}`);
72
+ }
73
+ }
74
+
75
+ return referencingIntegrations;
76
+ }
77
+
78
+ /**
79
+ * Recursively find all $ref objects in an object
80
+ * @param {any} obj - Object to search
81
+ * @param {Array} refs - Array to collect found references (used internally)
82
+ * @returns {Array} Array of found $ref paths
83
+ */
84
+ function findAllRefs(obj, refs = []) {
85
+ if (Array.isArray(obj)) {
86
+ obj.forEach(item => findAllRefs(item, refs));
87
+ } else if (obj && typeof obj === 'object') {
88
+ if (obj.$ref && typeof obj.$ref === 'string') {
89
+ refs.push(obj.$ref);
90
+ } else {
91
+ Object.values(obj).forEach(value => findAllRefs(value, refs));
92
+ }
93
+ }
94
+ return refs;
95
+ }
96
+
97
+ /**
98
+ * Classify files by their type based on their location and extension
99
+ * @param {string[]} files - Array of file paths
100
+ * @param {string} basePath - Base path to resolve relative paths from
101
+ * @returns {Object} Object with file paths grouped by type
102
+ */
103
+ function classifyFiles(files, basePath) {
104
+ const classified = {
105
+ components: [],
106
+ blueprints: [],
107
+ configs: [],
108
+ plugins: [],
109
+ blocks: [],
110
+ integrations: [],
111
+ connections: [],
112
+ prompts: [], // For .md files in prompts directories
113
+ microFrontends: [] // For .html files
114
+ };
115
+
116
+ for (const file of files) {
117
+ // Make sure the file exists
118
+ const fullPath = path.resolve(basePath, file);
119
+ if (!fs.existsSync(fullPath)) {
120
+ logger.warning(`File not found, skipping: ${file}`);
121
+ continue;
122
+ }
123
+
124
+ const relativePath = path.relative(basePath, fullPath);
125
+ const dir = path.dirname(relativePath);
126
+ const ext = path.extname(fullPath);
127
+ const basename = path.basename(fullPath, ext);
128
+
129
+ // Check for specific file types
130
+ if (relativePath.includes('components/') && ext === '.vue') {
131
+ classified.components.push(fullPath);
132
+ } else if (relativePath.includes('blueprints/') && ext === '.json') {
133
+ classified.blueprints.push(fullPath);
134
+ } else if (relativePath.includes('configs/') && ext === '.json') {
135
+ classified.configs.push(fullPath);
136
+ } else if (relativePath.includes('plugins/') && ext === '.json') {
137
+ classified.plugins.push(fullPath);
138
+ } else if (relativePath.includes('blocks/') && ext === '.json') {
139
+ classified.blocks.push(fullPath);
140
+ } else if (relativePath.includes('integrations/') && ext === '.json') {
141
+ classified.integrations.push(fullPath);
142
+ } else if (relativePath.includes('connections/') && ext === '.json') {
143
+ classified.connections.push(fullPath);
144
+ } else if (dir.includes('prompts') && ext === '.md') {
145
+ // Find integrations that reference this prompt file
146
+ classified.prompts.push(fullPath);
147
+
148
+ // The ref path should be relative to the integrations directory
149
+ // If file is integrations/prompts/file.md, ref should be ./prompts/file.md
150
+ const refPath = './' + path.relative('integrations', relativePath);
151
+
152
+ const referencingIntegrations = findReferencingIntegrations(refPath, basePath);
153
+
154
+ // Add the referencing integrations to the integrations list
155
+ for (const integrationPath of referencingIntegrations) {
156
+ if (!classified.integrations.includes(integrationPath)) {
157
+ classified.integrations.push(integrationPath);
158
+ logger.debug(`Found integration referencing ${relativePath}: ${path.basename(integrationPath)}`);
159
+ }
160
+ }
161
+ } else if (ext === '.html') {
162
+ // Find plugins that contain this HTML file (micro-frontends)
163
+ classified.microFrontends.push(fullPath);
164
+
165
+ // For HTML files, we need to find which plugin contains them
166
+ // HTML files in plugins are typically part of the plugin structure
167
+ const pluginDir = path.dirname(fullPath);
168
+ const pluginJson = path.join(pluginDir, 'plugin.json');
169
+
170
+ if (fs.existsSync(pluginJson)) {
171
+ // This HTML file is part of a plugin
172
+ if (!classified.plugins.includes(pluginJson)) {
173
+ classified.plugins.push(pluginJson);
174
+ logger.debug(`Found plugin containing HTML ${relativePath}: ${path.basename(pluginJson)}`);
175
+ }
176
+ }
177
+ } else {
178
+ logger.debug(`Unclassified file: ${relativePath}`);
179
+ }
180
+ }
181
+
182
+ return classified;
183
+ }
184
+
185
+ /**
186
+ * Get files from git (committed or staged) and classify them
187
+ * @param {string} type - 'committed' or 'staged'
188
+ * @param {string} basePath - Base path to resolve files from
189
+ * @returns {Object} Classified files object
190
+ */
191
+ export function getGitFiles(type, basePath) {
192
+ if (type !== 'committed' && type !== 'staged') {
193
+ throw new Error('Type must be either "committed" or "staged"');
194
+ }
195
+
196
+ const files = type === 'committed' ? getCommittedFiles() : getStagedFiles();
197
+
198
+ if (files.length === 0) {
199
+ logger.info(`No ${type} files found`);
200
+ return {};
201
+ }
202
+
203
+ logger.info(`Found ${files.length} ${type} file(s)`);
204
+ const classified = classifyFiles(files, basePath);
205
+
206
+ // Log what we found
207
+ Object.entries(classified).forEach(([key, value]) => {
208
+ if (value.length > 0) {
209
+ if (key === 'prompts' || key === 'microFrontends') {
210
+ logger.info(` ${key}: ${value.length} file(s) (will be pushed via parent)`);
211
+ } else {
212
+ logger.info(` ${key}: ${value.length} file(s)`);
213
+ }
214
+ }
215
+ });
216
+
217
+ return classified;
218
+ }
219
+
220
+ /**
221
+ * Create temporary directories for each type and copy files
222
+ * @param {Object} classifiedFiles - Object with classified file paths
223
+ * @param {string} tempDir - Temporary directory base path
224
+ * @returns {Object} Object with paths to temporary directories
225
+ */
226
+ export function prepareTempDirectories(classifiedFiles, tempDir) {
227
+ const tempPaths = {};
228
+ const copiedRefs = new Set(); // Track which ref files have been copied
229
+
230
+ // Create temp directory structure
231
+ fs.mkdirSync(tempDir, { recursive: true });
232
+
233
+ for (const [type, files] of Object.entries(classifiedFiles)) {
234
+ if (files.length === 0) continue;
235
+
236
+ // Skip prompts and microFrontends as they are handled by their parents
237
+ if (type === 'prompts' || type === 'microFrontends') continue;
238
+
239
+ const typeDir = path.join(tempDir, type);
240
+ fs.mkdirSync(typeDir, { recursive: true });
241
+ tempPaths[type] = typeDir;
242
+
243
+ // Use a Set to avoid duplicate files
244
+ const uniqueFiles = [...new Set(files)];
245
+
246
+ // Copy files to temp directory
247
+ for (const file of uniqueFiles) {
248
+ const dest = path.join(typeDir, path.basename(file));
249
+ fs.copyFileSync(file, dest);
250
+ logger.debug(`Copied ${file} to ${dest}`);
251
+
252
+ // If this is an integration, check for $ref files and copy them too
253
+ if (type === 'integrations' && file.endsWith('.integration.json')) {
254
+ try {
255
+ const content = fs.readFileSync(dest, 'utf-8');
256
+ const integration = JSON.parse(content);
257
+ const refs = findAllRefs(integration);
258
+
259
+ for (const ref of refs) {
260
+ if (copiedRefs.has(ref)) continue;
261
+
262
+ // Resolve the ref path relative to the original file location
263
+ const originalDir = path.dirname(file);
264
+ const refSourcePath = path.resolve(originalDir, ref);
265
+
266
+ if (fs.existsSync(refSourcePath)) {
267
+ // Create the same directory structure in temp
268
+ // The ref is relative to the integration file, so we need to copy it to the same relative path
269
+ const refDestPath = path.join(tempDir, 'integrations', ref);
270
+ const refDestDir = path.dirname(refDestPath);
271
+
272
+ fs.mkdirSync(refDestDir, { recursive: true });
273
+ fs.copyFileSync(refSourcePath, refDestPath);
274
+ copiedRefs.add(ref);
275
+ logger.debug(`Copied referenced file ${ref} from ${refSourcePath} to ${refDestPath}`);
276
+ } else {
277
+ logger.debug(`Referenced file not found: ${refSourcePath}`);
278
+ }
279
+ }
280
+ } catch (error) {
281
+ logger.debug(`Failed to process refs for ${path.basename(file)}: ${error.message}`);
282
+ }
283
+ }
284
+ }
285
+ }
286
+
287
+ return tempPaths;
288
+ }
@@ -76,6 +76,24 @@ function sanitizeIntegrationForFile(integration) {
76
76
  delete sanitized[field];
77
77
  }
78
78
  });
79
+
80
+ // Remove _id from internal objects
81
+ if (sanitized.trigger && sanitized.trigger._id) {
82
+ delete sanitized.trigger._id;
83
+ }
84
+
85
+ if (sanitized.target && sanitized.target._id) {
86
+ delete sanitized.target._id;
87
+ }
88
+
89
+ if (Array.isArray(sanitized.dataManipulation)) {
90
+ sanitized.dataManipulation.forEach(item => {
91
+ if (item._id) {
92
+ delete item._id;
93
+ }
94
+ });
95
+ }
96
+
79
97
  return sanitized;
80
98
  }
81
99
 
@@ -177,8 +195,13 @@ export async function pushIntegrations(sdk, path, options = {}) {
177
195
  logger.success(`Created: ${displayName}`);
178
196
  }
179
197
 
198
+ // Re-extract content to files to maintain $ref structure
199
+ // This ensures pre_messages are stored as prompt md files after pushing
200
+ const processedResponse = extractIntegrationContent(response, path, file);
201
+
180
202
  // Persist returned integration (with _id) back to disk
181
- writeIntegrationFile(filePath, sanitizeIntegrationForFile(response));
203
+ writeIntegrationFile(filePath, sanitizeIntegrationForFile(processedResponse));
204
+
182
205
  results.push({ status: 'fulfilled' });
183
206
  } catch (error) {
184
207
  logger.error(`Failed to push integration file ${file}`, error);
@@ -4,6 +4,48 @@ import { join } from 'node:path';
4
4
  import { logger } from './logger.mjs';
5
5
  import { extractMicroFrontendStructures, resolveMicroFrontendStructures } from './micro-frontends.mjs';
6
6
 
7
+ function sanitizePluginForFile(plugin) {
8
+ const sanitized = JSON.parse(JSON.stringify(plugin));
9
+
10
+ // Remove _id from internal objects in arrays
11
+ if (Array.isArray(sanitized.subscribedEvents)) {
12
+ sanitized.subscribedEvents.forEach(item => {
13
+ if (item._id) delete item._id;
14
+ });
15
+ }
16
+
17
+ if (Array.isArray(sanitized.microFrontends)) {
18
+ sanitized.microFrontends.forEach(mfe => {
19
+ if (mfe._id) delete mfe._id;
20
+ if (Array.isArray(mfe.requires)) {
21
+ mfe.requires.forEach(item => {
22
+ if (item._id) delete item._id;
23
+ });
24
+ }
25
+ });
26
+ }
27
+
28
+ if (Array.isArray(sanitized.injectables)) {
29
+ sanitized.injectables.forEach(item => {
30
+ if (item._id) delete item._id;
31
+ });
32
+ }
33
+
34
+ if (Array.isArray(sanitized.navBarGroups)) {
35
+ sanitized.navBarGroups.forEach(item => {
36
+ if (item._id) delete item._id;
37
+ });
38
+ }
39
+
40
+ if (Array.isArray(sanitized.cruds)) {
41
+ sanitized.cruds.forEach(item => {
42
+ if (item._id) delete item._id;
43
+ });
44
+ }
45
+
46
+ return sanitized;
47
+ }
48
+
7
49
  /**
8
50
  * Push plugins from local directory to remote
9
51
  * @param {Object} sdk - Initialized SDK instance
@@ -179,7 +221,10 @@ export async function pullPlugins(sdk, targetPath) {
179
221
  cruds: (fullPlugin.cruds || []).map(removeIdFromObject),
180
222
  }
181
223
 
182
- fs.writeFileSync(filePath, JSON.stringify(relevantFields, null, 2), 'utf-8');
224
+ // Sanitize the plugin to remove any remaining _id fields from internal objects
225
+ const sanitizedPlugin = sanitizePluginForFile(relevantFields);
226
+
227
+ fs.writeFileSync(filePath, JSON.stringify(sanitizedPlugin, null, 2), 'utf-8');
183
228
  logger.step(`Pulled: ${plugin.apiPath}`);
184
229
  }));
185
230