claude-flow 3.5.7 → 3.5.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-flow",
3
- "version": "3.5.7",
3
+ "version": "3.5.8",
4
4
  "description": "Ruflo - Enterprise AI agent orchestration for Claude Code. Deploy 60+ specialized agents in coordinated swarms with self-learning, fault-tolerant consensus, vector memory, and MCP integration",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",
@@ -2,8 +2,11 @@
2
2
  * V3 CLI Commands Index
3
3
  * Central registry for all CLI commands
4
4
  *
5
- * OPTIMIZATION: Uses lazy loading for commands to reduce CLI startup time by ~200ms
6
- * Commands are loaded on-demand when first accessed, not at module load time.
5
+ * NOTE: All commands are synchronously imported at module load time (lines below).
6
+ * The commandLoaders/loadCommand infrastructure provides an async fallback for
7
+ * commands looked up via getCommandAsync() but does NOT reduce startup time since
8
+ * all modules are already imported synchronously for the commands array and
9
+ * commandsByCategory exports.
7
10
  */
8
11
  import type { Command } from '../types.js';
9
12
  export { initCommand } from './init.js';
@@ -2,8 +2,11 @@
2
2
  * V3 CLI Commands Index
3
3
  * Central registry for all CLI commands
4
4
  *
5
- * OPTIMIZATION: Uses lazy loading for commands to reduce CLI startup time by ~200ms
6
- * Commands are loaded on-demand when first accessed, not at module load time.
5
+ * NOTE: All commands are synchronously imported at module load time (lines below).
6
+ * The commandLoaders/loadCommand infrastructure provides an async fallback for
7
+ * commands looked up via getCommandAsync() but does NOT reduce startup time since
8
+ * all modules are already imported synchronously for the commands array and
9
+ * commandsByCategory exports.
7
10
  */
8
11
  /**
9
12
  * Command loaders - commands are only imported when needed
@@ -2,6 +2,7 @@
2
2
  * Settings.json Generator
3
3
  * Creates .claude/settings.json with V3-optimized hook configurations
4
4
  */
5
+ import { detectPlatform } from './types.js';
5
6
  /**
6
7
  * Generate the complete settings.json content
7
8
  */
@@ -44,10 +45,17 @@ export function generateSettings(options) {
44
45
  CLAUDE_FLOW_V3_ENABLED: 'true',
45
46
  CLAUDE_FLOW_HOOKS_ENABLED: 'true',
46
47
  };
48
+ // Detect platform for platform-aware configuration
49
+ const platform = detectPlatform();
47
50
  // Add V3-specific settings
48
51
  settings.claudeFlow = {
49
52
  version: '3.0.0',
50
53
  enabled: true,
54
+ platform: {
55
+ os: platform.os,
56
+ arch: platform.arch,
57
+ shell: platform.shell,
58
+ },
51
59
  modelPreferences: {
52
60
  default: 'claude-opus-4-6',
53
61
  routing: 'claude-haiku-4-5-20251001',
@@ -142,47 +150,20 @@ export function generateSettings(options) {
142
150
  return settings;
143
151
  }
144
152
  /**
145
- * Build a cross-platform hook command that resolves paths to the project root.
146
- * Uses `git rev-parse --show-toplevel` at runtime so hooks work regardless of CWD.
147
- * Falls back to process.cwd() when not inside a git repo.
148
- *
149
- * The generated command is a `node -e` one-liner that:
150
- * 1. Finds the git root (or falls back to cwd)
151
- * 2. Requires the target script with the resolved absolute path
152
- * 3. Passes through process.argv so the script sees its subcommand in argv[2]
153
+ * Build a cross-platform hook command.
154
+ * Claude Code always runs hooks from the project root, so we invoke scripts
155
+ * directly without git-root resolution. This avoids `node -e` one-liners
156
+ * with shell-quoting issues on Windows cmd.exe and PowerShell.
153
157
  */
154
158
  function hookCmd(script, subcommand) {
155
- // Compact one-liner: resolve project root, then require the script.
156
- // With `node -e "..." arg`, process.argv = ['node', 'arg'] (no -e entry).
157
- // hook-handler.cjs reads argv[2] as its command, so we splice in the resolved
158
- // script path at argv[1] to produce: ['node', '<script>', 'subcommand'].
159
- // Use single quotes for the script path to avoid conflicting with outer double quotes.
160
- const scriptLiteral = `'${script}'`;
161
- const resolver = [
162
- "var c=require('child_process'),p=require('path'),r;",
163
- "try{r=c.execSync('git rev-parse --show-toplevel',{encoding:'utf8'}).trim()}",
164
- 'catch(e){r=process.cwd()}',
165
- `var s=p.join(r,${scriptLiteral});`,
166
- 'process.argv.splice(1,0,s);',
167
- 'require(s)',
168
- ].join('');
169
- return `node -e "${resolver}" ${subcommand}`.trim();
159
+ return `node ${script} ${subcommand}`.trim();
170
160
  }
171
161
  /**
172
162
  * Build a cross-platform hook command for ESM scripts (.mjs).
173
- * Uses dynamic import() with a file:// URL for cross-platform ESM loading.
163
+ * Same direct invocation Node.js handles .mjs files natively.
174
164
  */
175
165
  function hookCmdEsm(script, subcommand) {
176
- const scriptLiteral = `'${script}'`;
177
- const resolver = [
178
- "var c=require('child_process'),p=require('path'),u=require('url'),r;",
179
- "try{r=c.execSync('git rev-parse --show-toplevel',{encoding:'utf8'}).trim()}",
180
- 'catch(e){r=process.cwd()}',
181
- `var f=p.join(r,${scriptLiteral});`,
182
- 'process.argv.splice(1,0,f);',
183
- 'import(u.pathToFileURL(f).href)',
184
- ].join('');
185
- return `node -e "${resolver}" ${subcommand}`.trim();
166
+ return `node ${script} ${subcommand}`.trim();
186
167
  }
187
168
  /** Shorthand for CJS hook-handler commands */
188
169
  function hookHandlerCmd(subcommand) {
@@ -208,8 +189,8 @@ function generateStatusLineConfig(_options) {
208
189
  /**
209
190
  * Generate hooks configuration
210
191
  * Uses local hook-handler.cjs for cross-platform compatibility.
211
- * All hooks delegate to hook-handler.cjs via resolved absolute paths,
212
- * so they work identically on Windows, macOS, and Linux regardless of CWD.
192
+ * All hooks invoke scripts directly via `node <script> <subcommand>`,
193
+ * working identically on Windows, macOS, and Linux.
213
194
  */
214
195
  function generateHooksConfig(config) {
215
196
  const hooks = {};
@@ -261,10 +261,20 @@ export class MCPServerManager extends EventEmitter {
261
261
  },
262
262
  },
263
263
  }));
264
- // Handle stdin messages
264
+ // Handle stdin messages (S-5: bounded buffer to prevent OOM)
265
+ const MAX_BUFFER_SIZE = 10 * 1024 * 1024; // 10MB
265
266
  let buffer = '';
266
267
  process.stdin.on('data', async (chunk) => {
267
268
  buffer += chunk.toString();
269
+ if (buffer.length > MAX_BUFFER_SIZE) {
270
+ console.error(`[${new Date().toISOString()}] ERROR [claude-flow-mcp] Buffer exceeded ${MAX_BUFFER_SIZE} bytes, rejecting`);
271
+ buffer = '';
272
+ console.log(JSON.stringify({
273
+ jsonrpc: '2.0',
274
+ error: { code: -32600, message: 'Request too large' },
275
+ }));
276
+ return;
277
+ }
268
278
  // Process complete JSON messages
269
279
  let lines = buffer.split('\n');
270
280
  buffer = lines.pop() || ''; // Keep incomplete line in buffer
@@ -69,6 +69,16 @@ function getNestedValue(obj, key) {
69
69
  }
70
70
  return current;
71
71
  }
72
+ const DANGEROUS_KEYS = new Set(['__proto__', 'constructor', 'prototype']);
73
+ function filterDangerousKeys(obj) {
74
+ const filtered = {};
75
+ for (const [key, value] of Object.entries(obj)) {
76
+ if (!DANGEROUS_KEYS.has(key)) {
77
+ filtered[key] = value;
78
+ }
79
+ }
80
+ return filtered;
81
+ }
72
82
  function setNestedValue(obj, key, value) {
73
83
  const parts = key.split('.');
74
84
  let current = obj;
@@ -301,7 +311,7 @@ export const configTools = [
301
311
  },
302
312
  handler: async (input) => {
303
313
  const store = loadConfigStore();
304
- const config = input.config;
314
+ const config = filterDangerousKeys(input.config);
305
315
  const scope = input.scope || 'default';
306
316
  const merge = input.merge !== false;
307
317
  const importedKeys = Object.keys(config);
@@ -30,6 +30,21 @@ function ensureMemoryDir() {
30
30
  mkdirSync(dir, { recursive: true });
31
31
  }
32
32
  }
33
+ // D-2: Input bounds for memory parameters
34
+ const MAX_KEY_LENGTH = 1024;
35
+ const MAX_VALUE_SIZE = 1024 * 1024; // 1MB
36
+ const MAX_QUERY_LENGTH = 4096;
37
+ function validateMemoryInput(key, value, query) {
38
+ if (key && key.length > MAX_KEY_LENGTH) {
39
+ throw new Error(`Key exceeds maximum length of ${MAX_KEY_LENGTH} characters`);
40
+ }
41
+ if (value && value.length > MAX_VALUE_SIZE) {
42
+ throw new Error(`Value exceeds maximum size of ${MAX_VALUE_SIZE} bytes`);
43
+ }
44
+ if (query && query.length > MAX_QUERY_LENGTH) {
45
+ throw new Error(`Query exceeds maximum length of ${MAX_QUERY_LENGTH} characters`);
46
+ }
47
+ }
33
48
  /**
34
49
  * Check if legacy JSON store exists and needs migration
35
50
  */
@@ -146,6 +161,7 @@ export const memoryTools = [
146
161
  const tags = input.tags || [];
147
162
  const ttl = input.ttl;
148
163
  const upsert = input.upsert || false;
164
+ validateMemoryInput(key, value);
149
165
  const startTime = performance.now();
150
166
  try {
151
167
  const result = await storeEntry({
@@ -260,6 +276,7 @@ export const memoryTools = [
260
276
  const namespace = input.namespace || 'default';
261
277
  const limit = input.limit || 10;
262
278
  const threshold = input.threshold || 0.3;
279
+ validateMemoryInput(undefined, undefined, query);
263
280
  const startTime = performance.now();
264
281
  try {
265
282
  const result = await searchEntries({
@@ -5,9 +5,18 @@
5
5
  */
6
6
  import * as fs from 'fs';
7
7
  import * as path from 'path';
8
- import { exec } from 'child_process';
8
+ import { execFile } from 'child_process';
9
9
  import { promisify } from 'util';
10
- const execAsync = promisify(exec);
10
+ const execFileAsync = promisify(execFile);
11
+ /**
12
+ * Validate npm package name to prevent shell injection (S-3)
13
+ */
14
+ const VALID_PACKAGE_RE = /^(@[a-z0-9-~][a-z0-9-._~]*\/)?[a-z0-9-~][a-z0-9-._~]*(@[a-z0-9._\-^~>=<]+)?$/;
15
+ function validatePackageName(spec) {
16
+ if (!VALID_PACKAGE_RE.test(spec)) {
17
+ throw new Error(`Invalid package name: ${spec}`);
18
+ }
19
+ }
11
20
  // ============================================================================
12
21
  // Plugin Manager
13
22
  // ============================================================================
@@ -92,9 +101,11 @@ export class PluginManager {
92
101
  // Install to local plugins directory
93
102
  const installDir = path.join(this.config.pluginsDir, 'node_modules');
94
103
  await this.ensureDirectory(installDir);
95
- // Use npm to install
104
+ // Validate package name to prevent injection (S-3)
105
+ validatePackageName(versionSpec);
106
+ // Use npm to install (array form prevents shell injection)
96
107
  console.log(`[PluginManager] Installing ${versionSpec}...`);
97
- const { stdout, stderr } = await execAsync(`npm install --prefix "${this.config.pluginsDir}" ${versionSpec}`, { timeout: 120000 });
108
+ await execFileAsync('npm', ['install', '--prefix', this.config.pluginsDir, versionSpec], { timeout: 120000 });
98
109
  // Get installed version
99
110
  const packageJsonPath = path.join(installDir, packageName, 'package.json');
100
111
  let installedVersion = version || 'latest';
@@ -198,7 +209,8 @@ export class PluginManager {
198
209
  try {
199
210
  // For npm-installed plugins, remove from node_modules
200
211
  if (plugin.source === 'npm') {
201
- await execAsync(`npm uninstall --prefix "${this.config.pluginsDir}" ${packageName}`, { timeout: 60000 });
212
+ validatePackageName(packageName);
213
+ await execFileAsync('npm', ['uninstall', '--prefix', this.config.pluginsDir, packageName], { timeout: 60000 });
202
214
  }
203
215
  // Remove from manifest
204
216
  delete this.manifest.plugins[packageName];
@@ -316,8 +328,10 @@ export class PluginManager {
316
328
  }
317
329
  try {
318
330
  const versionSpec = version ? `${packageName}@${version}` : `${packageName}@latest`;
319
- // Reinstall with new version
320
- await execAsync(`npm install --prefix "${this.config.pluginsDir}" ${versionSpec}`, { timeout: 120000 });
331
+ // Validate package name to prevent injection (S-3)
332
+ validatePackageName(versionSpec);
333
+ // Reinstall with new version (array form prevents shell injection)
334
+ await execFileAsync('npm', ['install', '--prefix', this.config.pluginsDir, versionSpec], { timeout: 120000 });
321
335
  // Update manifest
322
336
  const installDir = path.join(this.config.pluginsDir, 'node_modules');
323
337
  const packageJsonPath = path.join(installDir, packageName, 'package.json');
@@ -375,6 +389,9 @@ export function getPluginManager(baseDir) {
375
389
  if (!defaultManager) {
376
390
  defaultManager = new PluginManager(baseDir);
377
391
  }
392
+ else if (baseDir && defaultManager.getPluginsDir() !== path.join(baseDir, '.claude-flow', 'plugins')) {
393
+ console.warn(`[PluginManager] Warning: getPluginManager called with different baseDir. Using existing instance. Call resetPluginManager() first to change.`);
394
+ }
378
395
  return defaultManager;
379
396
  }
380
397
  export function resetPluginManager() {
@@ -133,7 +133,7 @@ export class ErrorHandler {
133
133
  const sanitized = {};
134
134
  for (const [key, value] of Object.entries(input)) {
135
135
  const lowerKey = key.toLowerCase();
136
- const isSensitive = SENSITIVE_KEYS.some(sk => lowerKey.includes(sk));
136
+ const isSensitive = SENSITIVE_KEYS.some(sk => lowerKey.includes(sk.toLowerCase()));
137
137
  if (isSensitive) {
138
138
  sanitized[key] = '[REDACTED]';
139
139
  }
@@ -93,6 +93,8 @@ export async function resolveIPNS(ipnsName, preferredGateway) {
93
93
  * @returns Parsed JSON content or null if fetch fails
94
94
  */
95
95
  export async function fetchFromIPFS(cid, preferredGateway) {
96
+ if (!isValidCID(cid))
97
+ return null;
96
98
  const gateways = preferredGateway
97
99
  ? [preferredGateway, ...IPFS_GATEWAYS.filter(g => g !== preferredGateway)]
98
100
  : IPFS_GATEWAYS;
@@ -135,6 +137,8 @@ export async function fetchFromIPFS(cid, preferredGateway) {
135
137
  * Fetch with full result metadata
136
138
  */
137
139
  export async function fetchFromIPFSWithMetadata(cid, preferredGateway) {
140
+ if (!isValidCID(cid))
141
+ return null;
138
142
  const gateways = preferredGateway
139
143
  ? [preferredGateway, ...IPFS_GATEWAYS.filter(g => g !== preferredGateway)]
140
144
  : IPFS_GATEWAYS;
@@ -172,6 +176,8 @@ export async function fetchFromIPFSWithMetadata(cid, preferredGateway) {
172
176
  * Check if CID is pinned/available on a gateway
173
177
  */
174
178
  export async function isPinned(cid, gateway = 'https://ipfs.io') {
179
+ if (!isValidCID(cid))
180
+ return false;
175
181
  try {
176
182
  const response = await fetch(`${gateway}/ipfs/${cid}`, {
177
183
  method: 'HEAD',
@@ -187,6 +193,8 @@ export async function isPinned(cid, gateway = 'https://ipfs.io') {
187
193
  * Check availability across multiple gateways
188
194
  */
189
195
  export async function checkAvailability(cid) {
196
+ if (!isValidCID(cid))
197
+ return { available: false, gateways: [] };
190
198
  const results = await Promise.all(IPFS_GATEWAYS.map(async (gateway) => {
191
199
  const startTime = Date.now();
192
200
  try {
@@ -187,8 +187,6 @@ export async function uploadToIPFS(content, options = {}) {
187
187
  const size = content.length;
188
188
  console.log(`[IPFS] Demo upload: ${size} bytes`);
189
189
  console.log(`[IPFS] Name: ${name}`);
190
- // Simulate upload delay
191
- await new Promise(resolve => setTimeout(resolve, 500));
192
190
  const result = {
193
191
  cid,
194
192
  size,
@@ -102,10 +102,7 @@ export function serializeToBuffer(cfp, format) {
102
102
  case 'cbor.gz':
103
103
  case 'cbor.zstd':
104
104
  case 'msgpack':
105
- // Fallback to JSON for now
106
- // In production: use cbor-x, msgpack-lite, etc.
107
- console.warn(`Format ${format} not implemented, using JSON`);
108
- return Buffer.from(json, 'utf-8');
105
+ throw new Error(`Serialization format '${format}' is not implemented. Use 'json' instead.`);
109
106
  default:
110
107
  return Buffer.from(json, 'utf-8');
111
108
  }
@@ -115,7 +112,13 @@ export function serializeToBuffer(cfp, format) {
115
112
  */
116
113
  export function deserializeCFP(data) {
117
114
  const str = typeof data === 'string' ? data : data.toString('utf-8');
118
- const parsed = JSON.parse(str);
115
+ let parsed;
116
+ try {
117
+ parsed = JSON.parse(str);
118
+ }
119
+ catch (e) {
120
+ throw new Error(`Invalid CFP file: ${e instanceof Error ? e.message : String(e)}`);
121
+ }
119
122
  // Validate magic bytes
120
123
  if (parsed.magic !== 'CFP1') {
121
124
  throw new Error(`Invalid CFP format: expected magic 'CFP1', got '${parsed.magic}'`);
@@ -8,7 +8,7 @@
8
8
  import * as crypto from 'crypto';
9
9
  import * as fs from 'fs';
10
10
  import * as path from 'path';
11
- import { execSync } from 'child_process';
11
+ import { execFileSync } from 'child_process';
12
12
  /**
13
13
  * Get GCS configuration from environment
14
14
  */
@@ -23,12 +23,24 @@ export function getGCSConfig() {
23
23
  prefix: process.env.GCS_PREFIX || 'claude-flow-patterns',
24
24
  };
25
25
  }
26
+ /**
27
+ * Validate GCS bucket name (prevents command injection via bucket names)
28
+ */
29
+ function isValidBucketName(bucket) {
30
+ return /^[a-z0-9][a-z0-9._-]{1,221}[a-z0-9]$/.test(bucket);
31
+ }
32
+ /**
33
+ * Validate GCS object path (no shell metacharacters)
34
+ */
35
+ function isValidObjectPath(objectPath) {
36
+ return /^[a-zA-Z0-9_.\/\-]+$/.test(objectPath);
37
+ }
26
38
  /**
27
39
  * Check if gcloud CLI is available
28
40
  */
29
41
  export function isGCloudAvailable() {
30
42
  try {
31
- execSync('gcloud --version', { stdio: 'pipe' });
43
+ execFileSync('gcloud', ['--version'], { stdio: 'pipe' });
32
44
  return true;
33
45
  }
34
46
  catch {
@@ -40,7 +52,7 @@ export function isGCloudAvailable() {
40
52
  */
41
53
  export async function isGCloudAuthenticated() {
42
54
  try {
43
- execSync('gcloud auth print-access-token', { stdio: 'pipe' });
55
+ execFileSync('gcloud', ['auth', 'print-access-token'], { stdio: 'pipe' });
44
56
  return true;
45
57
  }
46
58
  catch {
@@ -67,27 +79,33 @@ export async function uploadToGCS(content, options = {}) {
67
79
  const checksum = crypto.createHash('sha256').update(content).digest('hex');
68
80
  const fileName = options.name || `${contentId}.cfp.json`;
69
81
  const objectPath = config.prefix ? `${config.prefix}/${fileName}` : fileName;
82
+ // S-1: Validate bucket name and object path to prevent command injection
83
+ if (!isValidBucketName(config.bucket)) {
84
+ throw new Error(`Invalid GCS bucket name: ${config.bucket}`);
85
+ }
86
+ if (!isValidObjectPath(objectPath)) {
87
+ throw new Error(`Invalid GCS object path: ${objectPath}`);
88
+ }
70
89
  console.log(`[GCS] Uploading to gs://${config.bucket}/${objectPath}...`);
71
90
  // Write content to temp file
72
91
  const tempDir = process.env.TMPDIR || '/tmp';
73
92
  const tempFile = path.join(tempDir, `claude-flow-upload-${Date.now()}.json`);
74
93
  fs.writeFileSync(tempFile, content);
75
94
  try {
76
- // Build gcloud command
77
- const metadataArgs = options.metadata
78
- ? Object.entries(options.metadata)
79
- .map(([k, v]) => `--metadata=${k}=${v}`)
80
- .join(' ')
81
- : '';
82
- const projectArg = config.projectId ? `--project=${config.projectId}` : '';
83
- // Upload using gcloud storage cp
84
- const cmd = `gcloud storage cp "${tempFile}" "gs://${config.bucket}/${objectPath}" ${projectArg} --content-type="${options.contentType || 'application/json'}" 2>&1`;
85
- execSync(cmd, { encoding: 'utf-8' });
95
+ // Build gcloud args (array form prevents shell injection)
96
+ const uploadArgs = ['storage', 'cp', tempFile, `gs://${config.bucket}/${objectPath}`];
97
+ if (config.projectId)
98
+ uploadArgs.push(`--project=${config.projectId}`);
99
+ uploadArgs.push(`--content-type=${options.contentType || 'application/json'}`);
100
+ execFileSync('gcloud', uploadArgs, { encoding: 'utf-8', stdio: 'pipe' });
86
101
  // Set metadata if provided
87
102
  if (options.metadata && Object.keys(options.metadata).length > 0) {
88
103
  const metadataJson = JSON.stringify(options.metadata);
89
104
  try {
90
- execSync(`gcloud storage objects update "gs://${config.bucket}/${objectPath}" --custom-metadata='${metadataJson}' ${projectArg} 2>&1`, { encoding: 'utf-8' });
105
+ const metaArgs = ['storage', 'objects', 'update', `gs://${config.bucket}/${objectPath}`, `--custom-metadata=${metadataJson}`];
106
+ if (config.projectId)
107
+ metaArgs.push(`--project=${config.projectId}`);
108
+ execFileSync('gcloud', metaArgs, { encoding: 'utf-8', stdio: 'pipe' });
91
109
  }
92
110
  catch {
93
111
  // Metadata update failed, but upload succeeded
@@ -121,14 +139,16 @@ export async function uploadToGCS(content, options = {}) {
121
139
  */
122
140
  export async function downloadFromGCS(uri, config) {
123
141
  const cfg = config || getGCSConfig();
124
- const projectArg = cfg?.projectId ? `--project=${cfg.projectId}` : '';
125
142
  console.log(`[GCS] Downloading from ${uri}...`);
126
143
  // Write to temp file first
127
144
  const tempDir = process.env.TMPDIR || '/tmp';
128
145
  const tempFile = path.join(tempDir, `claude-flow-download-${Date.now()}.json`);
129
146
  try {
130
- // Download using gcloud storage cp
131
- execSync(`gcloud storage cp "${uri}" "${tempFile}" ${projectArg} 2>&1`, { encoding: 'utf-8' });
147
+ // Download using gcloud storage cp (array form prevents shell injection)
148
+ const downloadArgs = ['storage', 'cp', uri, tempFile];
149
+ if (cfg?.projectId)
150
+ downloadArgs.push(`--project=${cfg.projectId}`);
151
+ execFileSync('gcloud', downloadArgs, { encoding: 'utf-8', stdio: 'pipe' });
132
152
  const content = fs.readFileSync(tempFile);
133
153
  fs.unlinkSync(tempFile);
134
154
  console.log(`[GCS] Downloaded ${content.length} bytes`);
@@ -148,9 +168,11 @@ export async function downloadFromGCS(uri, config) {
148
168
  */
149
169
  export async function existsInGCS(uri, config) {
150
170
  const cfg = config || getGCSConfig();
151
- const projectArg = cfg?.projectId ? `--project=${cfg.projectId}` : '';
152
171
  try {
153
- execSync(`gcloud storage ls "${uri}" ${projectArg} 2>&1`, { encoding: 'utf-8', stdio: 'pipe' });
172
+ const lsArgs = ['storage', 'ls', uri];
173
+ if (cfg?.projectId)
174
+ lsArgs.push(`--project=${cfg.projectId}`);
175
+ execFileSync('gcloud', lsArgs, { encoding: 'utf-8', stdio: 'pipe' });
154
176
  return true;
155
177
  }
156
178
  catch {
@@ -165,10 +187,12 @@ export async function listGCSObjects(prefix, config) {
165
187
  if (!cfg)
166
188
  return [];
167
189
  const objectPrefix = prefix || cfg.prefix || '';
168
- const projectArg = cfg.projectId ? `--project=${cfg.projectId}` : '';
169
190
  const uri = `gs://${cfg.bucket}/${objectPrefix}`;
170
191
  try {
171
- const result = execSync(`gcloud storage ls -l "${uri}" ${projectArg} --format=json 2>&1`, { encoding: 'utf-8' });
192
+ const listArgs = ['storage', 'ls', '-l', uri, '--format=json'];
193
+ if (cfg.projectId)
194
+ listArgs.push(`--project=${cfg.projectId}`);
195
+ const result = execFileSync('gcloud', listArgs, { encoding: 'utf-8', stdio: 'pipe' });
172
196
  const objects = JSON.parse(result);
173
197
  return objects.map((obj) => ({
174
198
  name: obj.name,
@@ -185,9 +209,11 @@ export async function listGCSObjects(prefix, config) {
185
209
  */
186
210
  export async function deleteFromGCS(uri, config) {
187
211
  const cfg = config || getGCSConfig();
188
- const projectArg = cfg?.projectId ? `--project=${cfg.projectId}` : '';
189
212
  try {
190
- execSync(`gcloud storage rm "${uri}" ${projectArg} 2>&1`, { encoding: 'utf-8' });
213
+ const rmArgs = ['storage', 'rm', uri];
214
+ if (cfg?.projectId)
215
+ rmArgs.push(`--project=${cfg.projectId}`);
216
+ execFileSync('gcloud', rmArgs, { encoding: 'utf-8', stdio: 'pipe' });
191
217
  return true;
192
218
  }
193
219
  catch {
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@claude-flow/cli",
3
- "version": "3.5.7",
3
+ "version": "3.5.8",
4
4
  "type": "module",
5
5
  "description": "Ruflo CLI - Enterprise AI agent orchestration with 60+ specialized agents, swarm coordination, MCP server, self-learning hooks, and vector memory for Claude Code",
6
6
  "main": "dist/src/index.js",