@exaudeus/workrail 0.16.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/di/container.js +15 -0
  2. package/dist/di/tokens.d.ts +3 -0
  3. package/dist/di/tokens.js +3 -0
  4. package/dist/infrastructure/session/HttpServer.d.ts +2 -1
  5. package/dist/infrastructure/session/HttpServer.js +34 -10
  6. package/dist/infrastructure/session/SessionManager.js +19 -1
  7. package/dist/infrastructure/storage/enhanced-multi-source-workflow-storage.js +26 -2
  8. package/dist/infrastructure/storage/file-workflow-storage.js +4 -4
  9. package/dist/infrastructure/storage/git-workflow-storage.d.ts +0 -1
  10. package/dist/infrastructure/storage/git-workflow-storage.js +28 -29
  11. package/dist/infrastructure/storage/plugin-workflow-storage.js +11 -5
  12. package/dist/manifest.json +154 -66
  13. package/dist/mcp/handlers/v2-execution-helpers.d.ts +4 -4
  14. package/dist/mcp/handlers/v2-execution-helpers.js +29 -0
  15. package/dist/mcp/handlers/v2-execution.js +167 -120
  16. package/dist/mcp/output-schemas.d.ts +110 -21
  17. package/dist/mcp/output-schemas.js +8 -11
  18. package/dist/mcp/server.js +15 -3
  19. package/dist/mcp/types.d.ts +6 -6
  20. package/dist/utils/storage-security.js +15 -1
  21. package/dist/v2/durable-core/encoding/base32-lower.d.ts +5 -0
  22. package/dist/v2/durable-core/encoding/base32-lower.js +58 -0
  23. package/dist/v2/durable-core/ids/attempt-id-derivation.d.ts +3 -0
  24. package/dist/v2/durable-core/ids/attempt-id-derivation.js +32 -0
  25. package/dist/v2/durable-core/ids/index.d.ts +4 -0
  26. package/dist/v2/durable-core/ids/index.js +7 -0
  27. package/dist/v2/durable-core/ids/workflow-hash-ref.d.ts +7 -0
  28. package/dist/v2/durable-core/ids/workflow-hash-ref.js +23 -0
  29. package/dist/v2/durable-core/tokens/binary-payload.d.ts +35 -0
  30. package/dist/v2/durable-core/tokens/binary-payload.js +279 -0
  31. package/dist/v2/durable-core/tokens/index.d.ts +9 -4
  32. package/dist/v2/durable-core/tokens/index.js +17 -7
  33. package/dist/v2/durable-core/tokens/payloads.d.ts +12 -8
  34. package/dist/v2/durable-core/tokens/payloads.js +10 -7
  35. package/dist/v2/durable-core/tokens/token-codec-capabilities.d.ts +4 -0
  36. package/dist/v2/durable-core/tokens/token-codec-capabilities.js +2 -0
  37. package/dist/v2/durable-core/tokens/token-codec-ports.d.ts +42 -0
  38. package/dist/v2/durable-core/tokens/token-codec-ports.js +27 -0
  39. package/dist/v2/durable-core/tokens/token-codec.d.ts +18 -0
  40. package/dist/v2/durable-core/tokens/token-codec.js +108 -0
  41. package/dist/v2/durable-core/tokens/token-signer.d.ts +13 -1
  42. package/dist/v2/durable-core/tokens/token-signer.js +65 -0
  43. package/dist/v2/infra/local/base32/index.d.ts +6 -0
  44. package/dist/v2/infra/local/base32/index.js +44 -0
  45. package/dist/v2/infra/local/bech32m/index.d.ts +8 -0
  46. package/dist/v2/infra/local/bech32m/index.js +56 -0
  47. package/dist/v2/infra/local/data-dir/index.d.ts +1 -0
  48. package/dist/v2/infra/local/data-dir/index.js +5 -2
  49. package/dist/v2/infra/local/fs/index.js +3 -0
  50. package/dist/v2/infra/local/id-factory/index.d.ts +11 -0
  51. package/dist/v2/infra/local/id-factory/index.js +32 -0
  52. package/dist/v2/infra/local/session-store/index.js +38 -4
  53. package/dist/v2/ports/base32.port.d.ts +16 -0
  54. package/dist/v2/ports/base32.port.js +2 -0
  55. package/dist/v2/ports/bech32m.port.d.ts +11 -0
  56. package/dist/v2/ports/bech32m.port.js +2 -0
  57. package/package.json +20 -2
@@ -179,8 +179,11 @@ async function registerV2Services() {
179
179
  const { NodeCryptoV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/crypto/index.js')));
180
180
  const { NodeHmacSha256V2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/hmac-sha256/index.js')));
181
181
  const { NodeBase64UrlV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/base64url/index.js')));
182
+ const { Base32AdapterV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/base32/index.js')));
183
+ const { Bech32mAdapterV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/bech32m/index.js')));
182
184
  const { NodeRandomEntropyV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/random-entropy/index.js')));
183
185
  const { NodeTimeClockV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/time-clock/index.js')));
186
+ const { IdFactoryV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/id-factory/index.js')));
184
187
  tsyringe_1.container.register(tokens_js_1.DI.V2.DataDir, {
185
188
  useFactory: (0, tsyringe_1.instanceCachingFactory)(() => new LocalDataDirV2(process.env)),
186
189
  });
@@ -199,12 +202,24 @@ async function registerV2Services() {
199
202
  tsyringe_1.container.register(tokens_js_1.DI.V2.Base64Url, {
200
203
  useFactory: (0, tsyringe_1.instanceCachingFactory)(() => new NodeBase64UrlV2()),
201
204
  });
205
+ tsyringe_1.container.register(tokens_js_1.DI.V2.Base32, {
206
+ useFactory: (0, tsyringe_1.instanceCachingFactory)(() => new Base32AdapterV2()),
207
+ });
208
+ tsyringe_1.container.register(tokens_js_1.DI.V2.Bech32m, {
209
+ useFactory: (0, tsyringe_1.instanceCachingFactory)(() => new Bech32mAdapterV2()),
210
+ });
202
211
  tsyringe_1.container.register(tokens_js_1.DI.V2.RandomEntropy, {
203
212
  useFactory: (0, tsyringe_1.instanceCachingFactory)(() => new NodeRandomEntropyV2()),
204
213
  });
205
214
  tsyringe_1.container.register(tokens_js_1.DI.V2.TimeClock, {
206
215
  useFactory: (0, tsyringe_1.instanceCachingFactory)(() => new NodeTimeClockV2()),
207
216
  });
217
+ tsyringe_1.container.register(tokens_js_1.DI.V2.IdFactory, {
218
+ useFactory: (0, tsyringe_1.instanceCachingFactory)((c) => {
219
+ const entropy = c.resolve(tokens_js_1.DI.V2.RandomEntropy);
220
+ return new IdFactoryV2(entropy);
221
+ }),
222
+ });
208
223
  const { LocalKeyringV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/keyring/index.js')));
209
224
  const { LocalSessionEventLogStoreV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/session-store/index.js')));
210
225
  const { LocalSnapshotStoreV2 } = await Promise.resolve().then(() => __importStar(require('../v2/infra/local/snapshot-store/index.js')));
@@ -28,8 +28,11 @@ export declare const DI: {
28
28
  readonly Crypto: symbol;
29
29
  readonly HmacSha256: symbol;
30
30
  readonly Base64Url: symbol;
31
+ readonly Base32: symbol;
32
+ readonly Bech32m: symbol;
31
33
  readonly RandomEntropy: symbol;
32
34
  readonly TimeClock: symbol;
35
+ readonly IdFactory: symbol;
33
36
  readonly Keyring: symbol;
34
37
  readonly SessionStore: symbol;
35
38
  readonly SnapshotStore: symbol;
package/dist/di/tokens.js CHANGED
@@ -31,8 +31,11 @@ exports.DI = {
31
31
  Crypto: Symbol('V2.Crypto'),
32
32
  HmacSha256: Symbol('V2.HmacSha256'),
33
33
  Base64Url: Symbol('V2.Base64Url'),
34
+ Base32: Symbol('V2.Base32'),
35
+ Bech32m: Symbol('V2.Bech32m'),
34
36
  RandomEntropy: Symbol('V2.RandomEntropy'),
35
37
  TimeClock: Symbol('V2.TimeClock'),
38
+ IdFactory: Symbol('V2.IdFactory'),
36
39
  Keyring: Symbol('V2.Keyring'),
37
40
  SessionStore: Symbol('V2.SessionStore'),
38
41
  SnapshotStore: Symbol('V2.SnapshotStore'),
@@ -16,6 +16,7 @@ export interface ServerConfig {
16
16
  port?: number;
17
17
  browserBehavior?: BrowserBehavior;
18
18
  dashboardMode?: DashboardMode;
19
+ lockFilePath?: string;
19
20
  }
20
21
  export declare class HttpServer {
21
22
  private sessionManager;
@@ -30,7 +31,7 @@ export declare class HttpServer {
30
31
  private baseUrl;
31
32
  private isPrimary;
32
33
  private lockFile;
33
- private readonly heartbeat;
34
+ private heartbeat;
34
35
  constructor(sessionManager: SessionManager, processLifecyclePolicy: ProcessLifecyclePolicy, processSignals: ProcessSignals, shutdownEvents: ShutdownEvents, dashboardMode: DashboardMode, browserBehavior: BrowserBehavior);
35
36
  private config;
36
37
  setConfig(config: ServerConfig): this;
@@ -53,6 +53,10 @@ let HttpServer = class HttpServer {
53
53
  if (config.port) {
54
54
  this.port = config.port;
55
55
  }
56
+ if (config.lockFilePath) {
57
+ this.lockFile = config.lockFilePath;
58
+ this.heartbeat = new DashboardHeartbeat_js_1.DashboardHeartbeat(this.lockFile, () => this.isPrimary);
59
+ }
56
60
  return this;
57
61
  }
58
62
  setupMiddleware() {
@@ -350,7 +354,7 @@ let HttpServer = class HttpServer {
350
354
  }
351
355
  catch (error) {
352
356
  if (error.code === 'EADDRINUSE') {
353
- console.error('[Dashboard] Port 3456 busy despite lock, falling back to legacy mode');
357
+ console.error(`[Dashboard] Port ${this.port} busy despite lock, falling back to legacy mode`);
354
358
  await promises_1.default.unlink(this.lockFile).catch(() => { });
355
359
  return await this.startLegacyMode();
356
360
  }
@@ -358,7 +362,7 @@ let HttpServer = class HttpServer {
358
362
  }
359
363
  }
360
364
  else {
361
- console.error('[Dashboard] ✅ Unified dashboard at http://localhost:3456');
365
+ console.error(`[Dashboard] ✅ Unified dashboard at http://localhost:${this.port}`);
362
366
  return null;
363
367
  }
364
368
  }
@@ -367,7 +371,7 @@ let HttpServer = class HttpServer {
367
371
  await promises_1.default.mkdir(path_1.default.dirname(this.lockFile), { recursive: true });
368
372
  const lockData = {
369
373
  pid: process.pid,
370
- port: 3456,
374
+ port: this.port,
371
375
  startedAt: new Date().toISOString(),
372
376
  lastHeartbeat: new Date().toISOString(),
373
377
  projectId: this.sessionManager.getProjectId(),
@@ -435,7 +439,7 @@ let HttpServer = class HttpServer {
435
439
  const tempPath = `${this.lockFile}.${process.pid}.${Date.now()}`;
436
440
  const newLockData = {
437
441
  pid: process.pid,
438
- port: 3456,
442
+ port: this.port,
439
443
  startedAt: new Date().toISOString(),
440
444
  lastHeartbeat: new Date().toISOString(),
441
445
  projectId: this.sessionManager.getProjectId(),
@@ -443,7 +447,22 @@ let HttpServer = class HttpServer {
443
447
  };
444
448
  try {
445
449
  await promises_1.default.writeFile(tempPath, JSON.stringify(newLockData, null, 2));
446
- await promises_1.default.rename(tempPath, this.lockFile);
450
+ let retries = 3;
451
+ let renamed = false;
452
+ while (retries > 0 && !renamed) {
453
+ try {
454
+ await promises_1.default.rename(tempPath, this.lockFile);
455
+ renamed = true;
456
+ }
457
+ catch (err) {
458
+ if (err.code === 'EPERM' && process.platform === 'win32' && retries > 1) {
459
+ await new Promise(resolve => setTimeout(resolve, 10));
460
+ retries--;
461
+ continue;
462
+ }
463
+ throw err;
464
+ }
465
+ }
447
466
  console.error('[Dashboard] Lock reclaimed successfully');
448
467
  this.isPrimary = true;
449
468
  this.setupPrimaryCleanup();
@@ -532,9 +551,9 @@ let HttpServer = class HttpServer {
532
551
  this.server.on('error', (error) => {
533
552
  reject(error);
534
553
  });
535
- this.server.listen(3456, () => {
536
- this.port = 3456;
537
- this.baseUrl = 'http://localhost:3456';
554
+ const listenPort = this.port;
555
+ this.server.listen(listenPort, () => {
556
+ this.baseUrl = `http://localhost:${listenPort}`;
538
557
  this.printBanner();
539
558
  resolve();
540
559
  });
@@ -688,12 +707,17 @@ let HttpServer = class HttpServer {
688
707
  return [];
689
708
  return output.split('\n').filter(Boolean).map(line => {
690
709
  const parts = line.trim().split(/\s+/);
710
+ const state = parts[3] || '';
691
711
  const address = parts[1] || '';
692
712
  const pid = parseInt(parts[4]);
693
713
  const portMatch = address.match(/:(\d+)$/);
694
714
  const port = portMatch ? parseInt(portMatch[1]) : 0;
695
- return { port, pid };
696
- }).filter(item => item.port >= 3456 && item.port < 3500 && !isNaN(item.pid));
715
+ return { port, pid, state };
716
+ }).filter(item => item.state === 'LISTENING' &&
717
+ item.port >= 3456 &&
718
+ item.port < 3500 &&
719
+ !isNaN(item.pid) &&
720
+ item.pid > 0).map(({ port, pid }) => ({ port, pid }));
697
721
  }
698
722
  return [];
699
723
  }
@@ -345,7 +345,25 @@ let SessionManager = class SessionManager extends events_1.EventEmitter {
345
345
  const tempPath = `${filePath}.tmp.${Date.now()}`;
346
346
  try {
347
347
  await promises_1.default.writeFile(tempPath, JSON.stringify(data, null, 2), 'utf-8');
348
- await promises_1.default.rename(tempPath, filePath);
348
+ let retries = 3;
349
+ while (retries > 0) {
350
+ try {
351
+ await promises_1.default.rename(tempPath, filePath);
352
+ return;
353
+ }
354
+ catch (error) {
355
+ if (error.code === 'EPERM' && process.platform === 'win32' && retries > 1) {
356
+ await new Promise(resolve => setTimeout(resolve, 10));
357
+ retries--;
358
+ continue;
359
+ }
360
+ try {
361
+ await promises_1.default.unlink(tempPath);
362
+ }
363
+ catch { }
364
+ throw error;
365
+ }
366
+ }
349
367
  }
350
368
  catch (error) {
351
369
  try {
@@ -8,6 +8,7 @@ exports.createEnhancedMultiSourceWorkflowStorage = createEnhancedMultiSourceWork
8
8
  const fs_1 = require("fs");
9
9
  const path_1 = __importDefault(require("path"));
10
10
  const os_1 = __importDefault(require("os"));
11
+ const url_1 = require("url");
11
12
  const workflow_1 = require("../../types/workflow");
12
13
  const file_workflow_storage_1 = require("./file-workflow-storage");
13
14
  const git_workflow_storage_1 = require("./git-workflow-storage");
@@ -319,8 +320,16 @@ function createEnhancedMultiSourceWorkflowStorage(overrides = {}, featureFlagPro
319
320
  const urls = gitReposJson.split(',').map(url => url.trim());
320
321
  const localFileUrls = [];
321
322
  const actualGitUrls = [];
323
+ const isWindowsAbsolutePath = (p) => {
324
+ if (/^[a-zA-Z]:[\\/]/.test(p))
325
+ return true;
326
+ if (p.startsWith('\\\\'))
327
+ return true;
328
+ return false;
329
+ };
322
330
  for (const url of urls) {
323
- if (url.startsWith('file://') || (!url.includes('://') && url.startsWith('/'))) {
331
+ const isLocalPath = !url.includes('://') && (url.startsWith('/') || isWindowsAbsolutePath(url));
332
+ if (url.startsWith('file://') || isLocalPath) {
324
333
  localFileUrls.push(url);
325
334
  }
326
335
  else {
@@ -330,7 +339,22 @@ function createEnhancedMultiSourceWorkflowStorage(overrides = {}, featureFlagPro
330
339
  if (localFileUrls.length > 0) {
331
340
  config.customPaths = config.customPaths || [];
332
341
  for (const url of localFileUrls) {
333
- const localPath = url.startsWith('file://') ? url.substring(7) : url;
342
+ const localPath = (() => {
343
+ if (!url.startsWith('file://'))
344
+ return url;
345
+ try {
346
+ const decoded = decodeURIComponent(url);
347
+ return (0, url_1.fileURLToPath)(new URL(decoded));
348
+ }
349
+ catch {
350
+ try {
351
+ return (0, url_1.fileURLToPath)(new URL(url));
352
+ }
353
+ catch {
354
+ return url.substring('file://'.length);
355
+ }
356
+ }
357
+ })();
334
358
  config.customPaths.push(localPath);
335
359
  logger.info('Using direct file access for local repository', { localPath });
336
360
  }
@@ -11,6 +11,7 @@ const path_1 = __importDefault(require("path"));
11
11
  const workflow_1 = require("../../types/workflow");
12
12
  const error_handler_1 = require("../../core/error-handler");
13
13
  const workflow_id_policy_1 = require("../../domain/workflow-id-policy");
14
+ const storage_security_1 = require("../../utils/storage-security");
14
15
  function sanitizeId(id) {
15
16
  if (id.includes('\u0000')) {
16
17
  throw new error_handler_1.SecurityError('Null byte detected in identifier', 'sanitizeId');
@@ -23,9 +24,7 @@ function sanitizeId(id) {
23
24
  return normalised;
24
25
  }
25
26
  function assertWithinBase(resolvedPath, baseDir) {
26
- if (!resolvedPath.startsWith(baseDir + path_1.default.sep) && resolvedPath !== baseDir) {
27
- throw new error_handler_1.SecurityError('Path escapes storage sandbox', 'file-access');
28
- }
27
+ (0, storage_security_1.assertWithinBase)(resolvedPath, baseDir);
29
28
  }
30
29
  class FileWorkflowStorage {
31
30
  constructor(directory, source, featureFlagProvider, options = {}) {
@@ -69,7 +68,8 @@ class FileWorkflowStorage {
69
68
  for (const file of relativeFiles) {
70
69
  try {
71
70
  if (!this.featureFlags.isEnabled('agenticRoutines')) {
72
- if (file.includes('routines/') || path_1.default.basename(file).startsWith('routine-')) {
71
+ const normalizedFile = file.replace(/\\/g, '/');
72
+ if (normalizedFile.includes('routines/') || path_1.default.basename(file).startsWith('routine-')) {
73
73
  continue;
74
74
  }
75
75
  }
@@ -35,5 +35,4 @@ export declare class GitWorkflowStorage implements IWorkflowStorage {
35
35
  private cloneRepository;
36
36
  private pullRepository;
37
37
  private gitCommitAndPush;
38
- private escapeShellArg;
39
38
  }
@@ -11,10 +11,12 @@ const path_1 = __importDefault(require("path"));
11
11
  const promises_1 = __importDefault(require("fs/promises"));
12
12
  const fs_1 = require("fs");
13
13
  const os_1 = __importDefault(require("os"));
14
+ const url_1 = require("url");
14
15
  const storage_security_1 = require("../../utils/storage-security");
15
16
  const error_handler_1 = require("../../core/error-handler");
16
17
  const logger_1 = require("../../utils/logger");
17
18
  const execAsync = (0, util_1.promisify)(child_process_1.exec);
19
+ const execFileAsync = (0, util_1.promisify)(child_process_1.execFile);
18
20
  const logger = (0, logger_1.createLogger)('GitWorkflowStorage');
19
21
  class GitWorkflowStorage {
20
22
  constructor(config, source) {
@@ -64,6 +66,8 @@ class GitWorkflowStorage {
64
66
  };
65
67
  }
66
68
  isValidGitUrl(url) {
69
+ if (/^[a-zA-Z]:[\\/]/.test(url) || url.startsWith('\\\\'))
70
+ return true;
67
71
  const sshPattern = /^git@[\w.-]+:[\w\/-]+\.git$/;
68
72
  if (sshPattern.test(url))
69
73
  return true;
@@ -226,27 +230,35 @@ class GitWorkflowStorage {
226
230
  const parentDir = path_1.default.dirname(this.localPath);
227
231
  await promises_1.default.mkdir(parentDir, { recursive: true });
228
232
  let cloneUrl = this.config.repositoryUrl;
229
- if (cloneUrl.startsWith('/')) {
230
- cloneUrl = `file://${cloneUrl}`;
233
+ if (!cloneUrl.includes('://')) {
234
+ const abs = path_1.default.resolve(cloneUrl);
235
+ cloneUrl = (0, url_1.pathToFileURL)(abs).href;
236
+ }
237
+ else if (cloneUrl.startsWith('file://') && process.platform === 'win32') {
238
+ try {
239
+ const raw = cloneUrl.substring('file://'.length);
240
+ if (/^[a-zA-Z]:[\\/]/.test(raw)) {
241
+ cloneUrl = (0, url_1.pathToFileURL)(path_1.default.resolve(raw)).href;
242
+ }
243
+ }
244
+ catch {
245
+ }
231
246
  }
232
247
  if (!this.isSshUrl(this.config.repositoryUrl) && this.config.authToken && cloneUrl.startsWith('https://')) {
233
248
  cloneUrl = cloneUrl.replace('https://', `https://${this.config.authToken}@`);
234
249
  }
235
- const escapedUrl = this.escapeShellArg(cloneUrl);
236
- const escapedBranch = this.escapeShellArg(this.config.branch);
237
- const escapedPath = this.escapeShellArg(this.localPath);
238
- let command = `git clone --branch ${escapedBranch} ${escapedUrl} ${escapedPath}`;
239
250
  try {
240
- await execAsync(command, { timeout: 60000 });
251
+ const dest = process.platform === 'win32' ? this.localPath.replace(/\\/g, '/') : this.localPath;
252
+ await execFileAsync('git', ['clone', '--branch', this.config.branch, cloneUrl, dest], { timeout: 60000 });
241
253
  logger.info('Successfully cloned repository', { branch: this.config.branch });
242
254
  }
243
255
  catch (error) {
244
256
  const errorMsg = error.message;
245
257
  if (errorMsg.includes('Remote branch') && errorMsg.includes('not found')) {
246
- command = `git clone ${escapedUrl} ${escapedPath}`;
247
258
  try {
248
- await execAsync(command, { timeout: 60000 });
249
- const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: this.localPath });
259
+ const dest = process.platform === 'win32' ? this.localPath.replace(/\\/g, '/') : this.localPath;
260
+ await execFileAsync('git', ['clone', cloneUrl, dest], { timeout: 60000 });
261
+ const { stdout } = await execFileAsync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], { cwd: this.localPath });
250
262
  this.config.branch = stdout.trim();
251
263
  }
252
264
  catch (fallbackError) {
@@ -259,15 +271,13 @@ class GitWorkflowStorage {
259
271
  }
260
272
  }
261
273
  async pullRepository() {
262
- const escapedPath = this.escapeShellArg(this.localPath);
263
- const escapedBranch = this.escapeShellArg(this.config.branch);
264
274
  try {
265
- await execAsync(`cd ${escapedPath} && git fetch origin ${escapedBranch}`, { timeout: 30000 });
266
- await execAsync(`cd ${escapedPath} && git reset --hard origin/${escapedBranch}`, { timeout: 30000 });
275
+ await execFileAsync('git', ['fetch', 'origin', this.config.branch], { cwd: this.localPath, timeout: 30000 });
276
+ await execFileAsync('git', ['reset', '--hard', `origin/${this.config.branch}`], { cwd: this.localPath, timeout: 30000 });
267
277
  }
268
278
  catch {
269
279
  try {
270
- await execAsync(`cd ${escapedPath} && git pull origin ${escapedBranch}`, { timeout: 30000 });
280
+ await execFileAsync('git', ['pull', 'origin', this.config.branch], { cwd: this.localPath, timeout: 30000 });
271
281
  }
272
282
  catch (pullError) {
273
283
  logger.warn('Git pull failed, using cached version', pullError);
@@ -275,25 +285,14 @@ class GitWorkflowStorage {
275
285
  }
276
286
  }
277
287
  async gitCommitAndPush(definition) {
278
- const escapedPath = this.escapeShellArg(this.localPath);
279
- const escapedFilename = this.escapeShellArg(`workflows/${definition.id}.json`);
280
- const escapedMessage = this.escapeShellArg(`Add/update workflow: ${definition.name}`);
281
- const escapedBranch = this.escapeShellArg(this.config.branch);
282
- const command = [
283
- `cd ${escapedPath}`,
284
- `git add ${escapedFilename}`,
285
- `git commit -m ${escapedMessage}`,
286
- `git push origin ${escapedBranch}`
287
- ].join(' && ');
288
288
  try {
289
- await execAsync(command, { timeout: 60000 });
289
+ await execFileAsync('git', ['add', `workflows/${definition.id}.json`], { cwd: this.localPath, timeout: 60000 });
290
+ await execFileAsync('git', ['commit', '-m', `Add/update workflow: ${definition.name}`], { cwd: this.localPath, timeout: 60000 });
291
+ await execFileAsync('git', ['push', 'origin', this.config.branch], { cwd: this.localPath, timeout: 60000 });
290
292
  }
291
293
  catch (error) {
292
294
  throw new error_handler_1.StorageError(`Failed to push workflow to repository: ${error.message}`);
293
295
  }
294
296
  }
295
- escapeShellArg(arg) {
296
- return `'${arg.replace(/'/g, "'\"'\"'")}'`;
297
- }
298
297
  }
299
298
  exports.GitWorkflowStorage = GitWorkflowStorage;
@@ -28,7 +28,8 @@ class PluginWorkflowStorage {
28
28
  for (const pluginPath of pluginPaths) {
29
29
  try {
30
30
  if (path_1.default.isAbsolute(pluginPath)) {
31
- (0, storage_security_1.assertWithinBase)(pluginPath, '/');
31
+ const baseCheck = process.platform === 'win32' ? path_1.default.parse(pluginPath).root : '/';
32
+ (0, storage_security_1.assertWithinBase)(pluginPath, baseCheck);
32
33
  }
33
34
  }
34
35
  catch (error) {
@@ -46,10 +47,15 @@ class PluginWorkflowStorage {
46
47
  getDefaultPluginPaths() {
47
48
  const paths = [];
48
49
  try {
49
- const globalPath = require.resolve('npm').replace(/\/npm\/.*$/, '');
50
- const globalNodeModules = path_1.default.join(globalPath, 'node_modules');
51
- if ((0, fs_1.existsSync)(globalNodeModules)) {
52
- paths.push(globalNodeModules);
50
+ const npmPath = require.resolve('npm');
51
+ const npmSegments = npmPath.split(path_1.default.sep);
52
+ const npmIdx = npmSegments.findIndex((s) => s === 'npm');
53
+ if (npmIdx > 0) {
54
+ const globalPath = npmSegments.slice(0, npmIdx).join(path_1.default.sep);
55
+ const globalNodeModules = path_1.default.join(globalPath, 'node_modules');
56
+ if ((0, fs_1.existsSync)(globalNodeModules)) {
57
+ paths.push(globalNodeModules);
58
+ }
53
59
  }
54
60
  }
55
61
  catch {