@yemi33/minions 0.1.1712 → 0.1.1714

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.1.1714 (2026-05-04)
4
+
5
+ ### Features
6
+ - loosen work item create dedupe (#2053)
7
+ - shorten Windows worktree paths (#2051)
8
+ - fix ado project metadata discovery (#2048)
9
+
3
10
  ## 0.1.1712 (2026-05-04)
4
11
 
5
12
  ### Fixes
package/README.md CHANGED
@@ -541,7 +541,7 @@ Engine behavior is controlled via `config.json`. Key settings:
541
541
  | `inboxConsolidateThreshold` | 5 | Inbox files needed before consolidation |
542
542
  | `worktreeCreateTimeout` | 300000 (5min) | Timeout for each `git worktree add` attempt |
543
543
  | `worktreeCreateRetries` | 1 | Retry count for transient `git worktree add` failures (0-3) |
544
- | `worktreeRoot` | `../worktrees` | Where git worktrees are created |
544
+ | `worktreeRoot` | `../worktrees` | Where git worktrees are created; may be absolute, and Windows users should prefer a short root such as `C:\wt` |
545
545
  | `idleAlertMinutes` | 15 | Alert after no dispatch for this many minutes |
546
546
  | `restartGracePeriod` | 1200000 (20min) | Grace period for agent re-attachment after engine restart |
547
547
  | `shutdownTimeout` | 300000 (5min) | Max wait for active agents during graceful shutdown (SIGTERM/SIGINT) |
@@ -75,7 +75,7 @@ async function openSettings() {
75
75
  settingsField('Heartbeat Timeout', 'set-heartbeatTimeout', e.heartbeatTimeout || 300000, 'ms', 'No output = dead after this') +
76
76
  settingsField('Worktree Create Timeout', 'set-worktreeCreateTimeout', e.worktreeCreateTimeout || 300000, 'ms', 'Timeout for git worktree add (increase for large repos/Windows)') +
77
77
  settingsField('Worktree Create Retries', 'set-worktreeCreateRetries', e.worktreeCreateRetries || 1, '', 'Retry count for transient worktree add failures (0-3)') +
78
- settingsField('Worktree Root', 'set-worktreeRoot', e.worktreeRoot || '../worktrees', '', 'Relative path for git worktrees') +
78
+ settingsField('Worktree Root', 'set-worktreeRoot', e.worktreeRoot || '../worktrees', '', 'Relative or absolute path for git worktrees; on Windows prefer a short path like C:\\wt') +
79
79
  settingsField('Idle Alert', 'set-idleAlertMinutes', e.idleAlertMinutes || 15, 'min', 'Alert after agent idle this long') +
80
80
  settingsField('Shutdown Timeout', 'set-shutdownTimeout', e.shutdownTimeout || 300000, 'ms', 'Max wait for agents during graceful shutdown') +
81
81
  settingsField('Restart Grace Period', 'set-restartGracePeriod', e.restartGracePeriod || 1200000, 'ms', 'Grace period before orphan detection on restart') +
package/dashboard.js CHANGED
@@ -29,6 +29,7 @@ const routing = require('./engine/routing');
29
29
  const playbook = require('./engine/playbook');
30
30
  const dispatchMod = require('./engine/dispatch');
31
31
  const steering = require('./engine/steering');
32
+ const projectDiscovery = require('./engine/project-discovery');
32
33
  const os = require('os');
33
34
 
34
35
  const { safeRead, safeReadDir, safeWrite, safeJson, safeJsonObj, safeJsonArr, safeUnlink, mutateJsonFileLocked, mutateControl, mutateCooldowns, mutateWorkItems, getProjects: _getProjects, DONE_STATUSES, WI_STATUS, WORK_TYPE, reopenWorkItem } = shared;
@@ -134,6 +135,12 @@ function normalizeWorkItemDedupText(value) {
134
135
  .trim();
135
136
  }
136
137
 
138
+ function normalizeWorkItemDedupTitle(value) {
139
+ return normalizeWorkItemDedupText(value)
140
+ .replace(/\s+/g, ' ')
141
+ .toLowerCase();
142
+ }
143
+
137
144
  function resolveWorkItemDedupProject(item, wiPath = '') {
138
145
  const projectName = normalizeWorkItemDedupText(item?.project || item?._project || item?._source);
139
146
  if (projectName) {
@@ -180,15 +187,30 @@ function normalizeWorkItemDedupPrIdentity(item, project = null) {
180
187
  function workItemCreateFingerprint(item, options = {}) {
181
188
  const project = resolveWorkItemDedupProject(item, options.wiPath);
182
189
  return {
183
- title: normalizeWorkItemDedupText(item?.title),
190
+ title: normalizeWorkItemDedupTitle(item?.title),
184
191
  type: routing.normalizeWorkType(item?.type || item?.workType, WORK_TYPE.IMPLEMENT),
185
- priority: normalizeWorkItemDedupText(item?.priority || 'medium').toLowerCase(),
186
- description: normalizeWorkItemDedupText(item?.description),
192
+ source: normalizeWorkItemDedupText(project?.name || item?.project || item?._project || item?._source).toLowerCase(),
187
193
  scope: normalizeWorkItemDedupText(item?.scope).toLowerCase(),
188
194
  prIdentity: normalizeWorkItemDedupPrIdentity(item, project),
189
195
  };
190
196
  }
191
197
 
198
+ function isCompatibleWorkItemCreateScope(existingFingerprint, candidateFingerprint) {
199
+ const existingFanOut = existingFingerprint.scope === 'fan-out';
200
+ const candidateFanOut = candidateFingerprint.scope === 'fan-out';
201
+ if (existingFanOut || candidateFanOut) {
202
+ return existingFingerprint.scope === candidateFingerprint.scope;
203
+ }
204
+ return true;
205
+ }
206
+
207
+ function isCompatibleWorkItemCreatePrIdentity(existingFingerprint, candidateFingerprint) {
208
+ if (existingFingerprint.prIdentity || candidateFingerprint.prIdentity) {
209
+ return existingFingerprint.prIdentity === candidateFingerprint.prIdentity;
210
+ }
211
+ return true;
212
+ }
213
+
192
214
  function isActiveWorkItemCreateStatus(status) {
193
215
  return status === WI_STATUS.PENDING || status === WI_STATUS.DISPATCHED || status === WI_STATUS.QUEUED;
194
216
  }
@@ -210,10 +232,9 @@ function findDuplicateWorkItemCreate(items, candidate, options = {}) {
210
232
  const existingFingerprint = workItemCreateFingerprint(item, options);
211
233
  return existingFingerprint.title === candidateFingerprint.title &&
212
234
  existingFingerprint.type === candidateFingerprint.type &&
213
- existingFingerprint.priority === candidateFingerprint.priority &&
214
- existingFingerprint.description === candidateFingerprint.description &&
215
- existingFingerprint.scope === candidateFingerprint.scope &&
216
- existingFingerprint.prIdentity === candidateFingerprint.prIdentity;
235
+ existingFingerprint.source === candidateFingerprint.source &&
236
+ isCompatibleWorkItemCreateScope(existingFingerprint, candidateFingerprint) &&
237
+ isCompatibleWorkItemCreatePrIdentity(existingFingerprint, candidateFingerprint);
217
238
  }) || null;
218
239
  }
219
240
 
@@ -4854,17 +4875,6 @@ What would you like to discuss or change? When you're happy, say "approve" and I
4854
4875
  return jsonReply(res, 200, { confirmToken: token, ttlMs: PROJECT_CONFIRM_TOKEN_TTL_MS });
4855
4876
  }
4856
4877
 
4857
- function _execGitInRepo(repoPath, args, timeoutMs) {
4858
- const { execFileSync } = require('child_process');
4859
- return execFileSync('git', args, {
4860
- cwd: repoPath,
4861
- encoding: 'utf8',
4862
- timeout: timeoutMs || 5000,
4863
- stdio: ['ignore', 'pipe', 'pipe'],
4864
- windowsHide: true,
4865
- }).trim();
4866
- }
4867
-
4868
4878
  async function handleProjectsAdd(req, res) {
4869
4879
  try {
4870
4880
  const body = await readBody(req);
@@ -4897,43 +4907,11 @@ What would you like to discuss or change? When you're happy, say "approve" and I
4897
4907
  return jsonReply(res, 400, { error: 'Project already linked at ' + target });
4898
4908
  }
4899
4909
 
4900
- // Auto-discover from git repo
4901
- const detected = { name: path.basename(target), _found: [] };
4902
- try {
4903
- let head = '';
4904
- try { head = _execGitInRepo(target, ['symbolic-ref', 'refs/remotes/origin/HEAD'], 5000); }
4905
- catch { head = _execGitInRepo(target, ['symbolic-ref', 'HEAD'], 5000); }
4906
- if (!head) throw new Error('empty git ref');
4907
- detected.mainBranch = head.replace('refs/remotes/origin/', '').replace('refs/heads/', '');
4908
- } catch { detected.mainBranch = 'main'; }
4909
- try {
4910
- const remoteUrl = _execGitInRepo(target, ['remote', 'get-url', 'origin'], 5000);
4911
- if (remoteUrl.includes('github.com')) {
4912
- detected.repoHost = 'github';
4913
- const m = remoteUrl.match(/github\.com[:/]([^/]+)\/([^/.]+)/);
4914
- if (m) { detected.org = m[1]; detected.repoName = m[2]; }
4915
- } else if (remoteUrl.includes('visualstudio.com') || remoteUrl.includes('dev.azure.com')) {
4916
- detected.repoHost = 'ado';
4917
- const m = remoteUrl.match(/https:\/\/([^.]+)\.visualstudio\.com[^/]*\/([^/]+)\/_git\/([^/\s]+)/) ||
4918
- remoteUrl.match(/https:\/\/dev\.azure\.com\/([^/]+)\/([^/]+)\/_git\/([^/\s]+)/);
4919
- if (m) { detected.org = m[1]; detected.project = m[2]; detected.repoName = m[3]; }
4920
- }
4921
- } catch (e) { console.error('git remote detection:', e.message); }
4922
- try {
4923
- const pkgPath = path.join(target, 'package.json');
4924
- if (fs.existsSync(pkgPath)) {
4925
- const pkg = safeJson(pkgPath);
4926
- if (pkg.name) detected.name = pkg.name.replace(/^@[^/]+\//, '');
4927
- }
4928
- } catch { /* optional */ }
4929
- let description = '';
4930
- try {
4931
- const claudeMd = path.join(target, 'CLAUDE.md');
4932
- if (fs.existsSync(claudeMd)) {
4933
- const lines = (safeRead(claudeMd) || '').split('\n').filter(l => l.trim() && !l.startsWith('#'));
4934
- if (lines[0] && lines[0].length < 200) description = lines[0].trim();
4935
- }
4936
- } catch { /* optional */ }
4910
+ // Auto-discover from git repo. Shared with minions.js so CLI and dashboard
4911
+ // handle ADO URL variants and repository GUID enrichment consistently.
4912
+ const detected = projectDiscovery.discoverProjectMetadata(target);
4913
+ if (!detected.name) detected.name = path.basename(target);
4914
+ const description = detected.description || '';
4937
4915
 
4938
4916
  const rawName = body.name || detected.name;
4939
4917
 
@@ -4949,17 +4927,18 @@ What would you like to discuss or change? When you're happy, say "approve" and I
4949
4927
  return jsonReply(res, e.statusCode || 400, { error: e.message });
4950
4928
  }
4951
4929
 
4952
- const prUrlBase = detected.repoHost === 'github'
4953
- ? (detected.org && detected.repoName ? `https://github.com/${detected.org}/${detected.repoName}/pull/` : '')
4954
- : (detected.org && detected.project && detected.repoName
4955
- ? `https://${detected.org}.visualstudio.com/DefaultCollection/${detected.project}/_git/${detected.repoName}/pullrequest/` : '');
4956
-
4957
4930
  const project = {
4958
4931
  name, description, localPath: target.replace(/\\/g, '/'),
4959
- repoHost: detected.repoHost || 'ado', repositoryId: '',
4932
+ repoHost: detected.repoHost || 'ado', repositoryId: detected.repositoryId || '',
4960
4933
  adoOrg: detected.org || '', adoProject: detected.project || '',
4961
4934
  repoName: detected.repoName || name, mainBranch: detected.mainBranch || 'main',
4962
- prUrlBase,
4935
+ prUrlBase: projectDiscovery.buildPrUrlBase({
4936
+ repoHost: detected.repoHost,
4937
+ org: detected.org,
4938
+ project: detected.project,
4939
+ repoName: detected.repoName,
4940
+ prUrlBase: detected.prUrlBase,
4941
+ }),
4963
4942
  workSources: { pullRequests: { enabled: true, cooldownMinutes: 30 }, workItems: { enabled: true, cooldownMinutes: 0 } }
4964
4943
  };
4965
4944
 
@@ -5028,20 +5007,8 @@ What would you like to discuss or change? When you're happy, say "approve" and I
5028
5007
  // Enrich each repo with metadata
5029
5008
  const existingPaths = new Set(PROJECTS.map(p => path.resolve(p.localPath)));
5030
5009
  const results = repos.map(repoPath => {
5031
- const result = { path: repoPath.replace(/\\/g, '/'), name: path.basename(repoPath), host: 'git', linked: existingPaths.has(path.resolve(repoPath)) };
5032
- try {
5033
- const remoteUrl = _execGitInRepo(repoPath, ['remote', 'get-url', 'origin'], 3000);
5034
- const gh = remoteUrl.match(/github\.com[:/]([^/]+)\/([^/.]+)/);
5035
- const ado = remoteUrl.match(/dev\.azure\.com\/([^/]+)\/([^/]+)\/_git\/([^/\s]+)/) || remoteUrl.match(/([^.]+)\.visualstudio\.com.*?\/([^/]+)\/_git\/([^/\s]+)/);
5036
- if (gh) { result.host = 'GitHub'; result.org = gh[1]; result.name = gh[2]; }
5037
- else if (ado) { result.host = 'ADO'; result.org = ado[1]; result.name = ado[3] || ado[2]; }
5038
- } catch { /* no remote */ }
5039
- try {
5040
- const pkg = JSON.parse(fs.readFileSync(path.join(repoPath, 'package.json'), 'utf8'));
5041
- if (pkg.name) result.name = pkg.name.replace(/@[^/]+\//, '');
5042
- if (pkg.description) result.description = pkg.description.slice(0, 100);
5043
- } catch { /* no package.json */ }
5044
- return result;
5010
+ const detected = projectDiscovery.discoverProjectMetadata(repoPath, { adoLookupTimeoutMs: 5000 });
5011
+ return projectDiscovery.buildScanResult(repoPath, detected, existingPaths.has(path.resolve(repoPath)));
5045
5012
  });
5046
5013
 
5047
5014
  return jsonReply(res, 200, { repos: results });
package/engine/cleanup.js CHANGED
@@ -39,6 +39,23 @@ function worktreeDirMatchesBranch(dirLower, branch) {
39
39
  return dirLower === branchSlug || dirLower.includes(branchSlug + '-') || dirLower.endsWith('-' + branchSlug);
40
40
  }
41
41
 
42
+ function worktreeBranchMatches(actualBranch, branch) {
43
+ if (!actualBranch || !branch) return false;
44
+ return sanitizeBranch(actualBranch).toLowerCase() === sanitizeBranch(branch).toLowerCase();
45
+ }
46
+
47
+ function worktreeMatchesBranch(dirLower, branch, actualBranch = '') {
48
+ return worktreeBranchMatches(actualBranch, branch) || worktreeDirMatchesBranch(dirLower, branch);
49
+ }
50
+
51
+ function getWorktreeBranch(wtPath) {
52
+ try {
53
+ return exec(`git -C "${wtPath}" branch --show-current`, { encoding: 'utf8', stdio: 'pipe', timeout: 5000, windowsHide: true }).trim();
54
+ } catch {
55
+ return '';
56
+ }
57
+ }
58
+
42
59
  let _orphanPidProcessNamesCache = null;
43
60
  function _orphanPidProcessNames() {
44
61
  if (_orphanPidProcessNamesCache) return _orphanPidProcessNamesCache;
@@ -235,12 +252,13 @@ function runCleanup(config, verbose = false) {
235
252
 
236
253
  let shouldClean = false;
237
254
  let isProtected = false;
255
+ const actualBranch = getWorktreeBranch(wtPath);
238
256
 
239
257
  // Check if this worktree's branch is merged/abandoned
240
- // Use sanitized exact match on the branch portion of the dir name (format: {slug}-{branch}-{suffix})
258
+ // Prefer actual git branch metadata; compact Windows dirs intentionally omit branch names.
241
259
  const dirLower = dir.toLowerCase();
242
260
  for (const branch of mergedBranches) {
243
- if (worktreeDirMatchesBranch(dirLower, branch)) {
261
+ if (worktreeMatchesBranch(dirLower, branch, actualBranch)) {
244
262
  shouldClean = true;
245
263
  break;
246
264
  }
@@ -249,8 +267,7 @@ function runCleanup(config, verbose = false) {
249
267
  // Check if referenced by active/pending dispatch (use sanitized branch comparison)
250
268
  const isReferenced = [...dispatch.pending, ...(dispatch.active || [])].some(d => {
251
269
  if (!d.meta?.branch) return false;
252
- const dispBranch = sanitizeBranch(d.meta.branch).toLowerCase();
253
- return dirLower.includes(dispBranch);
270
+ return worktreeMatchesBranch(dirLower, d.meta.branch, actualBranch);
254
271
  });
255
272
  if (isReferenced) isProtected = true;
256
273
 
@@ -275,8 +292,7 @@ function runCleanup(config, verbose = false) {
275
292
  for (const pf of fs.readdirSync(checkDir).filter(f => f.endsWith('.json'))) {
276
293
  const plan = safeJson(path.join(checkDir, pf));
277
294
  if (plan?.branch_strategy === 'shared-branch' && plan?.feature_branch && plan?.status !== 'completed') {
278
- const planBranch = sanitizeBranch(plan.feature_branch).toLowerCase();
279
- if (dirLower.includes(planBranch)) {
295
+ if (worktreeMatchesBranch(dirLower, plan.feature_branch, actualBranch)) {
280
296
  isProtected = true;
281
297
  if (shouldClean) {
282
298
  shouldClean = false;
@@ -291,7 +307,7 @@ function runCleanup(config, verbose = false) {
291
307
  } catch (e) { log('warn', 'check shared-branch protection: ' + e.message); }
292
308
  }
293
309
 
294
- wtEntries.push({ dir, wtPath, mtime, shouldClean, isProtected });
310
+ wtEntries.push({ dir, wtPath, mtime, shouldClean, isProtected, actualBranch });
295
311
  }
296
312
 
297
313
  // Enforce max worktree cap — if over limit, mark oldest unprotected for cleanup
@@ -323,7 +339,7 @@ function runCleanup(config, verbose = false) {
323
339
  const entryDirLower = entry.dir.toLowerCase();
324
340
  let stillMerged = false;
325
341
  for (const branch of freshMergedBranches) {
326
- if (worktreeDirMatchesBranch(entryDirLower, branch)) {
342
+ if (worktreeMatchesBranch(entryDirLower, branch, entry.actualBranch)) {
327
343
  stillMerged = true;
328
344
  break;
329
345
  }
@@ -331,7 +347,7 @@ function runCleanup(config, verbose = false) {
331
347
  // If originally marked due to merged branch but PR was reopened, skip deletion
332
348
  if (!stillMerged) {
333
349
  // Check if it was marked for age/cap cleanup (not branch-based) — those are still valid
334
- const wasMarkedByBranch = [...mergedBranches].some(branch => worktreeDirMatchesBranch(entryDirLower, branch));
350
+ const wasMarkedByBranch = [...mergedBranches].some(branch => worktreeMatchesBranch(entryDirLower, branch, entry.actualBranch));
335
351
  if (wasMarkedByBranch) {
336
352
  if (verbose) console.log(` Skipping worktree ${entry.dir}: PR was reopened since initial check`);
337
353
  log('info', `Worktree deletion skipped — PR reopened: ${entry.dir}`);
@@ -832,4 +848,6 @@ module.exports = {
832
848
  runCleanup,
833
849
  scrubStaleMetrics,
834
850
  worktreeDirMatchesBranch, // exported for testing
851
+ worktreeMatchesBranch, // exported for testing
852
+ getWorktreeBranch, // exported for lifecycle cleanup
835
853
  };
@@ -1,5 +1,5 @@
1
1
  {
2
2
  "runtime": "copilot",
3
3
  "models": null,
4
- "cachedAt": "2026-05-04T17:48:19.949Z"
4
+ "cachedAt": "2026-05-04T18:45:33.215Z"
5
5
  }
@@ -14,7 +14,7 @@ const { trackEngineUsage } = require('./llm');
14
14
  const { resolveRuntime } = require('./runtimes');
15
15
  const queries = require('./queries');
16
16
  const { isBranchActive } = require('./cooldown');
17
- const { worktreeDirMatchesBranch } = require('./cleanup');
17
+ const { worktreeMatchesBranch, getWorktreeBranch } = require('./cleanup');
18
18
  const { getConfig, getInboxFiles, getNotes, getPrs, getDispatch,
19
19
  MINIONS_DIR, ENGINE_DIR, PLANS_DIR, PRD_DIR, INBOX_DIR, AGENTS_DIR } = queries;
20
20
 
@@ -446,10 +446,12 @@ function cleanupPlanWorktrees(planFile, plan, projects, config) {
446
446
  if (!fs.existsSync(wtRoot)) continue;
447
447
  const dirs = fs.readdirSync(wtRoot);
448
448
  for (const dir of dirs) {
449
+ const wtPath = path.join(wtRoot, dir);
449
450
  const dirLower = dir.toLowerCase();
450
- const matches = [...branchSlugs].some(slug => dirLower.includes(slug));
451
+ const actualBranch = getWorktreeBranch(wtPath);
452
+ const actualBranchSlug = actualBranch ? shared.sanitizeBranch(actualBranch).toLowerCase() : '';
453
+ const matches = [...branchSlugs].some(slug => dirLower.includes(slug) || actualBranchSlug === slug);
451
454
  if (matches) {
452
- const wtPath = path.join(wtRoot, dir);
453
455
  if (shared.removeWorktree(wtPath, root, wtRoot)) cleanedWt++;
454
456
  }
455
457
  }
@@ -1587,13 +1589,13 @@ async function handlePostMerge(pr, project, config, newStatus) {
1587
1589
  if (pr.branch && project) {
1588
1590
  const root = path.resolve(project.localPath);
1589
1591
  const wtRoot = path.resolve(root, config.engine?.worktreeRoot || '../worktrees');
1590
- // Find worktrees matching this branch dir format is {slug}-{branch}-{suffix}
1592
+ // Find worktrees matching this branch; compact Windows dirs require branch metadata.
1591
1593
  try {
1592
1594
  const dirs = require('fs').readdirSync(wtRoot);
1593
1595
  for (const dir of dirs) {
1596
+ const wtPath = path.join(wtRoot, dir);
1594
1597
  const dirLower = dir.toLowerCase();
1595
- if (worktreeDirMatchesBranch(dirLower, pr.branch) || dir === pr.branch || dir === `bt-${prNum}`) {
1596
- const wtPath = path.join(wtRoot, dir);
1598
+ if (worktreeMatchesBranch(dirLower, pr.branch, getWorktreeBranch(wtPath)) || dir === pr.branch || dir === `bt-${prNum}`) {
1597
1599
  try {
1598
1600
  if (!require('fs').statSync(wtPath).isDirectory()) continue;
1599
1601
  execSilent(`git worktree remove "${wtPath}" --force`, { cwd: root, stdio: 'pipe', timeout: 15000 });
@@ -2724,7 +2726,9 @@ async function runPostCompletionHooks(dispatchItem, agentId, code, stdout, confi
2724
2726
  // Find the worktree directory for this dispatch's branch
2725
2727
  const branchSlug = shared.sanitizeBranch ? shared.sanitizeBranch(meta.branch) : meta.branch.replace(/[^a-zA-Z0-9._\-\/]/g, '-');
2726
2728
  const dirs = fs.readdirSync(worktreeRoot).filter(d => {
2727
- return worktreeDirMatchesBranch(d.toLowerCase(), meta.branch) && fs.statSync(path.join(worktreeRoot, d)).isDirectory();
2729
+ const wtPath = path.join(worktreeRoot, d);
2730
+ return fs.statSync(wtPath).isDirectory()
2731
+ && worktreeMatchesBranch(d.toLowerCase(), meta.branch, getWorktreeBranch(wtPath));
2728
2732
  });
2729
2733
  // Only remove if no other active dispatch uses this branch
2730
2734
  const dispatch = getDispatch();
@@ -0,0 +1,377 @@
1
+ /**
2
+ * Shared project metadata discovery for CLI and dashboard project linking.
3
+ */
4
+
5
+ const fs = require('fs');
6
+ const path = require('path');
7
+ const { execFileSync: defaultExecFileSync } = require('child_process');
8
+
9
+ function decodeUrlSegment(segment) {
10
+ try { return decodeURIComponent(String(segment || '')); } catch { return String(segment || ''); }
11
+ }
12
+
13
+ function stripGitSuffix(value) {
14
+ return String(value || '').replace(/\.git$/i, '');
15
+ }
16
+
17
+ function encodePathSegment(segment) {
18
+ return encodeURIComponent(String(segment || '')).replace(/%2F/gi, '/');
19
+ }
20
+
21
+ function normalizeRemoteForUrl(remoteUrl) {
22
+ const raw = String(remoteUrl || '').trim();
23
+ if (/^git@ssh\.dev\.azure\.com:/i.test(raw)) {
24
+ return raw.replace(/^git@ssh\.dev\.azure\.com:/i, 'ssh://git@ssh.dev.azure.com/');
25
+ }
26
+ return raw;
27
+ }
28
+
29
+ function urlWithoutCredentials(url) {
30
+ url.username = '';
31
+ url.password = '';
32
+ return url;
33
+ }
34
+
35
+ function sanitizeUrlString(value) {
36
+ try {
37
+ const url = urlWithoutCredentials(new URL(normalizeRemoteForUrl(value)));
38
+ return url.toString().replace(/\/$/, '');
39
+ } catch {
40
+ return String(value || '').trim();
41
+ }
42
+ }
43
+
44
+ function isAdoRemoteUrl(remoteUrl) {
45
+ return /(dev\.azure\.com|visualstudio\.com|ssh\.dev\.azure\.com)/i.test(String(remoteUrl || ''));
46
+ }
47
+
48
+ function adoRemoteFromParts({ url, org, project, repoName, orgUrl, repoPathParts, collection = '' }) {
49
+ const safeRepo = stripGitSuffix(repoName);
50
+ const remoteUrl = sanitizeUrlString(`${url.origin}/${repoPathParts.join('/')}`).replace(/\.git$/i, '');
51
+ return {
52
+ repoHost: 'ado',
53
+ org: decodeUrlSegment(org),
54
+ project: decodeUrlSegment(project),
55
+ repoName: stripGitSuffix(decodeUrlSegment(safeRepo)),
56
+ orgUrl,
57
+ collection,
58
+ remoteUrl,
59
+ prUrlBase: deriveAdoPrUrlBase({ repoUrl: remoteUrl, orgUrl, project, repoName: safeRepo }),
60
+ };
61
+ }
62
+
63
+ function parseAdoRemoteUrl(remoteUrl) {
64
+ const raw = String(remoteUrl || '').trim();
65
+ if (!raw || !isAdoRemoteUrl(raw)) return null;
66
+
67
+ let url;
68
+ try {
69
+ url = urlWithoutCredentials(new URL(normalizeRemoteForUrl(raw)));
70
+ } catch {
71
+ return null;
72
+ }
73
+
74
+ const host = url.hostname.toLowerCase();
75
+ const encodedParts = url.pathname.split('/').filter(Boolean);
76
+ const decodedParts = encodedParts.map(decodeUrlSegment);
77
+
78
+ if (host === 'dev.azure.com') {
79
+ const gitIndex = decodedParts.findIndex(p => p.toLowerCase() === '_git');
80
+ if (gitIndex < 2 || !decodedParts[gitIndex + 1]) return null;
81
+ const org = decodedParts[0];
82
+ const project = decodedParts[1];
83
+ const repoName = decodedParts[gitIndex + 1];
84
+ return adoRemoteFromParts({
85
+ url,
86
+ org,
87
+ project,
88
+ repoName,
89
+ orgUrl: `https://dev.azure.com/${encodePathSegment(org)}`,
90
+ repoPathParts: encodedParts.slice(0, gitIndex + 2),
91
+ });
92
+ }
93
+
94
+ if (host.endsWith('.visualstudio.com')) {
95
+ const org = host.slice(0, -'.visualstudio.com'.length);
96
+ let offset = 0;
97
+ let collection = '';
98
+ if ((decodedParts[0] || '').toLowerCase() === 'defaultcollection') {
99
+ offset = 1;
100
+ collection = 'DefaultCollection';
101
+ }
102
+ const gitIndex = decodedParts.findIndex((p, i) => i >= offset && p.toLowerCase() === '_git');
103
+ if (gitIndex < offset + 1 || !decodedParts[gitIndex + 1]) return null;
104
+ const project = decodedParts[offset];
105
+ const repoName = decodedParts[gitIndex + 1];
106
+ const orgUrl = collection
107
+ ? `https://${org}.visualstudio.com/${collection}`
108
+ : `https://${org}.visualstudio.com`;
109
+ return adoRemoteFromParts({
110
+ url,
111
+ org,
112
+ project,
113
+ repoName,
114
+ orgUrl,
115
+ repoPathParts: encodedParts.slice(0, gitIndex + 2),
116
+ collection,
117
+ });
118
+ }
119
+
120
+ if (host === 'ssh.dev.azure.com') {
121
+ if ((decodedParts[0] || '').toLowerCase() !== 'v3' || decodedParts.length < 4) return null;
122
+ const [, org, project, repoName] = decodedParts;
123
+ const orgUrl = `https://dev.azure.com/${encodePathSegment(org)}`;
124
+ return {
125
+ repoHost: 'ado',
126
+ org,
127
+ project,
128
+ repoName: stripGitSuffix(repoName),
129
+ orgUrl,
130
+ collection: '',
131
+ remoteUrl: `https://dev.azure.com/${encodePathSegment(org)}/${encodePathSegment(project)}/_git/${encodePathSegment(stripGitSuffix(repoName))}`,
132
+ prUrlBase: deriveAdoPrUrlBase({ orgUrl, project, repoName }),
133
+ };
134
+ }
135
+
136
+ return null;
137
+ }
138
+
139
+ function parseGitHubRemoteUrl(remoteUrl) {
140
+ const raw = String(remoteUrl || '').trim();
141
+ const match = raw.match(/github\.com[:/]([^/\s]+)\/([^/\s]+?)(?:\.git)?(?:[#?].*)?$/i);
142
+ if (!match) return null;
143
+ return {
144
+ repoHost: 'github',
145
+ org: decodeUrlSegment(match[1]),
146
+ repoName: stripGitSuffix(decodeUrlSegment(match[2])),
147
+ };
148
+ }
149
+
150
+ function deriveAdoPrUrlBase({ repoUrl, orgUrl, project, repoName }) {
151
+ const candidate = sanitizeUrlString(repoUrl || '');
152
+ if (candidate && /\/_git\//i.test(candidate)) {
153
+ return `${candidate.replace(/\.git$/i, '').replace(/\/$/, '')}/pullrequest/`;
154
+ }
155
+ if (orgUrl && project && repoName) {
156
+ return `${String(orgUrl).replace(/\/$/, '')}/${encodePathSegment(project)}/_git/${encodePathSegment(stripGitSuffix(repoName))}/pullrequest/`;
157
+ }
158
+ return '';
159
+ }
160
+
161
+ function parseJsonOutput(output) {
162
+ const text = String(output || '').trim();
163
+ if (!text) return null;
164
+ return JSON.parse(text);
165
+ }
166
+
167
+ function normalizeAzRepoResult(repo, fallback) {
168
+ if (!repo || typeof repo !== 'object') return null;
169
+ const repoUrl = repo.webUrl || repo.remoteUrl || fallback.remoteUrl || '';
170
+ const parsedUrl = parseAdoRemoteUrl(repoUrl);
171
+ const project = repo.project?.name || parsedUrl?.project || fallback.project || '';
172
+ const repoName = repo.name || parsedUrl?.repoName || fallback.repoName || '';
173
+ const org = parsedUrl?.org || fallback.org || '';
174
+ const orgUrl = parsedUrl?.orgUrl || fallback.orgUrl || '';
175
+ return {
176
+ ...fallback,
177
+ ...(parsedUrl || {}),
178
+ org,
179
+ orgUrl,
180
+ project,
181
+ repoName,
182
+ repositoryId: String(repo.id || fallback.repositoryId || '').trim(),
183
+ remoteUrl: repoUrl || parsedUrl?.remoteUrl || fallback.remoteUrl || '',
184
+ prUrlBase: deriveAdoPrUrlBase({ repoUrl, orgUrl, project, repoName }) || parsedUrl?.prUrlBase || fallback.prUrlBase || '',
185
+ };
186
+ }
187
+
188
+ function runAzJson(execFileSync, args, timeoutMs) {
189
+ return parseJsonOutput(execFileSync('az', args, {
190
+ encoding: 'utf8',
191
+ timeout: timeoutMs,
192
+ stdio: ['ignore', 'pipe', 'ignore'],
193
+ windowsHide: true,
194
+ }));
195
+ }
196
+
197
+ function resolveAdoRemoteMetadata(remote, options = {}) {
198
+ if (!remote) return null;
199
+ const execFileSync = options.execFileSync || defaultExecFileSync;
200
+ const timeoutMs = options.adoLookupTimeoutMs || 10000;
201
+ if (options.resolveAdo !== false && remote.orgUrl && remote.project && remote.repoName) {
202
+ const baseArgs = [
203
+ 'repos', 'show',
204
+ '--repository', remote.repoName,
205
+ '--organization', remote.orgUrl,
206
+ '--project', remote.project,
207
+ '--output', 'json',
208
+ ];
209
+ try {
210
+ const repo = runAzJson(execFileSync, baseArgs, timeoutMs);
211
+ const normalized = normalizeAzRepoResult(repo, remote);
212
+ if (normalized) return normalized;
213
+ } catch { /* fall back to parsed remote metadata */ }
214
+
215
+ try {
216
+ const repos = runAzJson(execFileSync, [
217
+ 'repos', 'list',
218
+ '--organization', remote.orgUrl,
219
+ '--project', remote.project,
220
+ '--output', 'json',
221
+ ], timeoutMs);
222
+ const match = Array.isArray(repos)
223
+ ? repos.find(repo => {
224
+ const name = String(repo?.name || '').toLowerCase();
225
+ const parsed = parseAdoRemoteUrl(repo?.remoteUrl || repo?.webUrl || '');
226
+ return name === String(remote.repoName || '').toLowerCase()
227
+ || parsed?.remoteUrl === remote.remoteUrl
228
+ || parsed?.repoName?.toLowerCase() === String(remote.repoName || '').toLowerCase();
229
+ })
230
+ : null;
231
+ const normalized = normalizeAzRepoResult(match, remote);
232
+ if (normalized) return normalized;
233
+ } catch { /* fall back to parsed remote metadata */ }
234
+ }
235
+ return { ...remote, repositoryId: remote.repositoryId || '', prUrlBase: remote.prUrlBase || deriveAdoPrUrlBase(remote) };
236
+ }
237
+
238
+ function execGit(execFileSync, targetDir, args, timeout = 5000) {
239
+ return String(execFileSync('git', args, {
240
+ cwd: targetDir,
241
+ encoding: 'utf8',
242
+ timeout,
243
+ stdio: ['ignore', 'pipe', 'pipe'],
244
+ windowsHide: true,
245
+ })).trim();
246
+ }
247
+
248
+ function discoverProjectMetadata(targetDir, options = {}) {
249
+ const execFileSync = options.execFileSync || defaultExecFileSync;
250
+ const result = { _found: [] };
251
+
252
+ try {
253
+ let head = '';
254
+ try {
255
+ head = execGit(execFileSync, targetDir, ['symbolic-ref', 'refs/remotes/origin/HEAD']);
256
+ } catch {
257
+ head = execGit(execFileSync, targetDir, ['symbolic-ref', 'HEAD']);
258
+ }
259
+ const branch = head.replace('refs/remotes/origin/', '').replace('refs/heads/', '');
260
+ if (branch) {
261
+ result.mainBranch = branch;
262
+ result._found.push('main branch');
263
+ }
264
+ } catch {}
265
+
266
+ try {
267
+ const remoteUrl = execGit(execFileSync, targetDir, ['remote', 'get-url', 'origin']);
268
+ const github = parseGitHubRemoteUrl(remoteUrl);
269
+ if (github) {
270
+ Object.assign(result, github);
271
+ result._found.push('GitHub remote');
272
+ } else {
273
+ const adoRemote = parseAdoRemoteUrl(remoteUrl);
274
+ if (adoRemote) {
275
+ const ado = resolveAdoRemoteMetadata(adoRemote, options);
276
+ Object.assign(result, ado);
277
+ result._found.push(ado.repositoryId ? 'Azure DevOps remote + repository metadata' : 'Azure DevOps remote');
278
+ }
279
+ }
280
+ } catch {}
281
+
282
+ try {
283
+ const claudeMdPath = path.join(targetDir, 'CLAUDE.md');
284
+ if (fs.existsSync(claudeMdPath)) {
285
+ const content = fs.readFileSync(claudeMdPath, 'utf8');
286
+ const lines = content.split('\n').filter(l => l.trim() && !l.startsWith('#'));
287
+ if (lines[0] && lines[0].length < 200) {
288
+ result.description = lines[0].trim();
289
+ result._found.push('description from CLAUDE.md');
290
+ }
291
+ }
292
+ } catch {}
293
+ if (!result.description) {
294
+ try {
295
+ const readmePath = path.join(targetDir, 'README.md');
296
+ if (fs.existsSync(readmePath)) {
297
+ const content = fs.readFileSync(readmePath, 'utf8').slice(0, 2000);
298
+ const lines = content.split('\n').filter(l => l.trim() && !l.startsWith('#') && !l.startsWith('!'));
299
+ if (lines[0] && lines[0].length < 200) {
300
+ result.description = lines[0].trim();
301
+ result._found.push('description from README.md');
302
+ }
303
+ }
304
+ } catch {}
305
+ }
306
+
307
+ try {
308
+ const pkgPath = path.join(targetDir, 'package.json');
309
+ if (fs.existsSync(pkgPath)) {
310
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
311
+ if (pkg.name) {
312
+ result.name = pkg.name.replace(/^@[^/]+\//, '');
313
+ result._found.push('name from package.json');
314
+ }
315
+ if (!result.description && pkg.description) result.description = String(pkg.description).slice(0, 200);
316
+ }
317
+ } catch {}
318
+
319
+ return result;
320
+ }
321
+
322
+ function buildPrUrlBase({ repoHost, org, project, repoName, prUrlBase }) {
323
+ if (prUrlBase) return prUrlBase;
324
+ if (repoHost === 'github') {
325
+ return org && repoName ? `https://github.com/${org}/${repoName}/pull/` : '';
326
+ }
327
+ if (repoHost === 'ado' && org && project && repoName) {
328
+ return `https://dev.azure.com/${org}/${encodePathSegment(project)}/_git/${encodePathSegment(repoName)}/pullrequest/`;
329
+ }
330
+ return '';
331
+ }
332
+
333
+ function buildProjectEntry({ name, description, localPath, repoHost, repositoryId, org, project, repoName, mainBranch, prUrlBase }) {
334
+ const safeName = (name || 'project').replace(/[^a-zA-Z0-9._-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '').slice(0, 60) || 'project';
335
+ return {
336
+ name: safeName,
337
+ description: description || '',
338
+ localPath: (localPath || '').replace(/\\/g, '/'),
339
+ repoHost: repoHost || 'github',
340
+ repositoryId: repositoryId || '',
341
+ adoOrg: org || '',
342
+ adoProject: project || '',
343
+ repoName: repoName || name,
344
+ mainBranch: mainBranch || 'main',
345
+ prUrlBase: buildPrUrlBase({ repoHost, org, project, repoName, prUrlBase }),
346
+ workSources: {
347
+ pullRequests: { enabled: true, cooldownMinutes: 30 },
348
+ workItems: { enabled: true, cooldownMinutes: 0 },
349
+ },
350
+ };
351
+ }
352
+
353
+ function buildScanResult(repoPath, detected = {}, linked = false) {
354
+ return {
355
+ path: repoPath.replace(/\\/g, '/'),
356
+ name: detected.name || detected.repoName || path.basename(repoPath),
357
+ host: detected.repoHost || 'git',
358
+ org: detected.org || '',
359
+ project: detected.project || '',
360
+ repoName: detected.repoName || path.basename(repoPath),
361
+ repositoryId: detected.repositoryId || '',
362
+ mainBranch: detected.mainBranch || 'main',
363
+ description: detected.description || '',
364
+ prUrlBase: detected.prUrlBase || '',
365
+ linked,
366
+ };
367
+ }
368
+
369
+ module.exports = {
370
+ parseAdoRemoteUrl,
371
+ parseGitHubRemoteUrl,
372
+ resolveAdoRemoteMetadata,
373
+ discoverProjectMetadata,
374
+ buildPrUrlBase,
375
+ buildProjectEntry,
376
+ buildScanResult,
377
+ };
package/engine/shared.js CHANGED
@@ -1479,6 +1479,8 @@ function nextWorkItemId(items, prefix) {
1479
1479
 
1480
1480
  function getAdoOrgBase(project) {
1481
1481
  if (project.prUrlBase) {
1482
+ const devAzure = project.prUrlBase.match(/^(https?:\/\/dev\.azure\.com\/[^/]+)/i);
1483
+ if (devAzure) return devAzure[1];
1482
1484
  const m = project.prUrlBase.match(/^(https?:\/\/[^/]+(?:\/DefaultCollection)?)/);
1483
1485
  if (m) return m[1];
1484
1486
  }
@@ -1700,6 +1702,28 @@ function sanitizeBranch(name) {
1700
1702
  return String(name).replace(/[^a-zA-Z0-9._\-\/]/g, '-').slice(0, 200);
1701
1703
  }
1702
1704
 
1705
+ function _worktreeNameSuffix(dispatchId, projectName, branchName) {
1706
+ const id = String(dispatchId || '').split('-').filter(Boolean).pop();
1707
+ if (id) return safeSlugComponent(id, 32);
1708
+ const hash = crypto.createHash('sha1')
1709
+ .update(`${projectName || 'default'}\n${branchName || 'worktree'}`)
1710
+ .digest('hex')
1711
+ .slice(0, 12);
1712
+ return hash;
1713
+ }
1714
+
1715
+ function buildWorktreeDirName({
1716
+ dispatchId = '',
1717
+ projectName = 'default',
1718
+ branchName = 'worktree',
1719
+ platform = process.platform,
1720
+ } = {}) {
1721
+ const suffix = _worktreeNameSuffix(dispatchId, projectName, branchName);
1722
+ if (platform === 'win32') return `W-${suffix}`;
1723
+ const projectSlug = String(projectName || 'default').replace(/[^a-zA-Z0-9_-]/g, '-');
1724
+ return `${projectSlug}-${sanitizeBranch(branchName || 'worktree')}-${suffix}`;
1725
+ }
1726
+
1703
1727
  // ── HTTP Origin Allowlist & Security Headers ─────────────────────────────────
1704
1728
  // Pure helpers used by dashboard.js to gate mutating requests against an
1705
1729
  // explicit allowlist of local origins and to attach uniform security response
@@ -2659,6 +2683,7 @@ module.exports = {
2659
2683
  getAdoOrgBase,
2660
2684
  sanitizePath,
2661
2685
  sanitizeBranch,
2686
+ buildWorktreeDirName, // exported for testing
2662
2687
  isLiveCommandCenterPath,
2663
2688
  describeCcProtectedPaths,
2664
2689
  renderCcSystemPrompt,
package/engine.js CHANGED
@@ -513,9 +513,11 @@ async function spawnAgent(dispatchItem, config) {
513
513
 
514
514
  if (branchName) {
515
515
  updateAgentStatus(id, AGENT_STATUS.WORKTREE_SETUP, `Setting up worktree for branch ${branchName}`);
516
- const wtSuffix = id ? id.split('-').pop() : shared.uid();
517
- const projectSlug = (project.name || 'default').replace(/[^a-zA-Z0-9_-]/g, '-');
518
- const wtDirName = `${projectSlug}-${branchName}-${wtSuffix}`;
516
+ const wtDirName = shared.buildWorktreeDirName({
517
+ dispatchId: id,
518
+ projectName: project.name || 'default',
519
+ branchName,
520
+ });
519
521
  worktreePath = path.resolve(rootDir, engineConfig.worktreeRoot || '../worktrees', wtDirName);
520
522
 
521
523
  // If branch is already checked out in an existing worktree, reuse it
package/minions.js CHANGED
@@ -17,6 +17,7 @@ const path = require('path');
17
17
  const readline = require('readline');
18
18
  const { execSync } = require('child_process');
19
19
  const { ENGINE_DEFAULTS, DEFAULT_AGENTS, DEFAULT_CLAUDE } = require('./engine/shared');
20
+ const projectDiscovery = require('./engine/project-discovery');
20
21
 
21
22
  const MINIONS_HOME = __dirname;
22
23
  const CONFIG_PATH = path.join(MINIONS_HOME, 'config.json');
@@ -50,77 +51,7 @@ function ask(q, def) {
50
51
  }
51
52
 
52
53
  function autoDiscover(targetDir) {
53
- const result = { _found: [] };
54
-
55
- // 1. Detect main branch from git
56
- try {
57
- let head = '';
58
- try {
59
- head = execSync('git symbolic-ref refs/remotes/origin/HEAD', { cwd: targetDir, encoding: 'utf8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] }).trim();
60
- } catch {
61
- head = execSync('git symbolic-ref HEAD', { cwd: targetDir, encoding: 'utf8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] }).trim();
62
- }
63
- const branch = head.replace('refs/remotes/origin/', '').replace('refs/heads/', '');
64
- if (branch) { result.mainBranch = branch; result._found.push('main branch'); }
65
- } catch {}
66
-
67
- // 2. Detect repo host, org, project, repo name from git remote URL
68
- try {
69
- const remoteUrl = execSync('git remote get-url origin', { cwd: targetDir, encoding: 'utf8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] }).trim();
70
- if (remoteUrl.includes('github.com')) {
71
- result.repoHost = 'github';
72
- // https://github.com/org/repo.git or git@github.com:org/repo.git
73
- const m = remoteUrl.match(/github\.com[:/]([^/]+)\/([^/.]+)/);
74
- if (m) { result.org = m[1]; result.repoName = m[2]; }
75
- result._found.push('GitHub remote');
76
- } else if (remoteUrl.includes('visualstudio.com') || remoteUrl.includes('dev.azure.com')) {
77
- result.repoHost = 'ado';
78
- // https://org.visualstudio.com/project/_git/repo or https://dev.azure.com/org/project/_git/repo
79
- const m1 = remoteUrl.match(/https:\/\/([^.]+)\.visualstudio\.com[^/]*\/([^/]+)\/_git\/([^/\s]+)/);
80
- const m2 = remoteUrl.match(/https:\/\/dev\.azure\.com\/([^/]+)\/([^/]+)\/_git\/([^/\s]+)/);
81
- const m = m1 || m2;
82
- if (m) { result.org = m[1]; result.project = m[2]; result.repoName = m[3]; }
83
- result._found.push('Azure DevOps remote');
84
- }
85
- } catch {}
86
-
87
- // 3. Read description from CLAUDE.md first line or README.md first paragraph
88
- try {
89
- const claudeMdPath = path.join(targetDir, 'CLAUDE.md');
90
- if (fs.existsSync(claudeMdPath)) {
91
- const content = fs.readFileSync(claudeMdPath, 'utf8');
92
- // Look for a description-like first line or paragraph (skip headings)
93
- const lines = content.split('\n').filter(l => l.trim() && !l.startsWith('#'));
94
- if (lines[0] && lines[0].length < 200) {
95
- result.description = lines[0].trim();
96
- result._found.push('description from CLAUDE.md');
97
- }
98
- }
99
- } catch {}
100
- if (!result.description) {
101
- try {
102
- const readmePath = path.join(targetDir, 'README.md');
103
- if (fs.existsSync(readmePath)) {
104
- const content = fs.readFileSync(readmePath, 'utf8').slice(0, 2000);
105
- const lines = content.split('\n').filter(l => l.trim() && !l.startsWith('#') && !l.startsWith('!'));
106
- if (lines[0] && lines[0].length < 200) {
107
- result.description = lines[0].trim();
108
- result._found.push('description from README.md');
109
- }
110
- }
111
- } catch {}
112
- }
113
-
114
- // 4. Detect project name
115
- try {
116
- const pkgPath = path.join(targetDir, 'package.json');
117
- if (fs.existsSync(pkgPath)) {
118
- const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
119
- if (pkg.name) { result.name = pkg.name.replace(/^@[^/]+\//, ''); result._found.push('name from package.json'); }
120
- }
121
- } catch {}
122
-
123
- return result;
54
+ return projectDiscovery.discoverProjectMetadata(targetDir);
124
55
  }
125
56
 
126
57
  // ─── Shared Helpers (used by both addProject and scanAndAdd) ─────────────────
@@ -149,38 +80,11 @@ function _detectAvailableRuntimes() {
149
80
  return found;
150
81
  }
151
82
 
152
- function buildPrUrlBase({ repoHost, org, project, repoName }) {
153
- if (repoHost === 'github') {
154
- return org && repoName ? `https://github.com/${org}/${repoName}/pull/` : '';
155
- }
156
- if (repoHost === 'ado' && org && project && repoName) {
157
- return `https://dev.azure.com/${org}/${project}/_git/${repoName}/pullrequest/`;
158
- }
159
- return '';
160
- }
161
-
162
- function buildProjectEntry({ name, description, localPath, repoHost, repositoryId, org, project, repoName, mainBranch }) {
163
- // Sanitize name for use as directory name in projects/<name>/
164
- const safeName = (name || 'project').replace(/[^a-zA-Z0-9._-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '').slice(0, 60) || 'project';
165
- return {
166
- name: safeName,
167
- description: description || '',
168
- localPath: (localPath || '').replace(/\\/g, '/'),
169
- repoHost: repoHost || 'github',
170
- repositoryId: repositoryId || '',
171
- adoOrg: org || '',
172
- adoProject: project || '',
173
- repoName: repoName || name,
174
- mainBranch: mainBranch || 'main',
175
- prUrlBase: buildPrUrlBase({ repoHost, org, project, repoName }),
176
- // Discovery defaults must mirror dashboard.js POST /api/projects — without
177
- // these, discoverFromWorkItems / discoverFromPrs silently no-op (the engine
178
- // looks healthy but never dispatches anything).
179
- workSources: {
180
- pullRequests: { enabled: true, cooldownMinutes: 30 },
181
- workItems: { enabled: true, cooldownMinutes: 0 },
182
- },
183
- };
83
+ function buildProjectEntry(opts) {
84
+ // Discovery defaults must mirror dashboard.js POST /api/projects — without
85
+ // these, discoverFromWorkItems / discoverFromPrs silently no-op (the engine
86
+ // looks healthy but never dispatches anything).
87
+ return projectDiscovery.buildProjectEntry(opts);
184
88
  }
185
89
 
186
90
 
@@ -221,12 +125,15 @@ async function addProject(targetDir) {
221
125
  const org = await ask('Organization', detected.org || '');
222
126
  const project = await ask('Project', detected.project || '');
223
127
  const repoName = await ask('Repo name', detected.repoName || name);
224
- const repositoryId = await ask('Repository ID (GUID, optional)', '');
128
+ const repositoryId = await ask('Repository ID (GUID, optional)', detected.repositoryId || '');
225
129
  const mainBranch = await ask('Main branch', detected.mainBranch || 'main');
226
130
 
227
131
  rl.close();
228
132
 
229
- config.projects.push(buildProjectEntry({ name, description, localPath: target, repoHost, repositoryId, org, project, repoName, mainBranch }));
133
+ config.projects.push(buildProjectEntry({
134
+ name, description, localPath: target, repoHost, repositoryId, org, project, repoName, mainBranch,
135
+ prUrlBase: detected.prUrlBase,
136
+ }));
230
137
  saveConfig(config);
231
138
 
232
139
  console.log(`\n Linked "${name}" (${target})`);
@@ -373,17 +280,7 @@ async function scanAndAdd({ root, depth } = {}) {
373
280
  const enriched = repos.map(repoPath => {
374
281
  const detected = autoDiscover(repoPath);
375
282
  const alreadyLinked = linkedPaths.has(path.resolve(repoPath));
376
- return {
377
- path: repoPath,
378
- name: detected.name || detected.repoName || path.basename(repoPath),
379
- host: detected.repoHost || '?',
380
- org: detected.org || '',
381
- project: detected.project || '',
382
- repoName: detected.repoName || path.basename(repoPath),
383
- mainBranch: detected.mainBranch || 'main',
384
- description: detected.description || '',
385
- linked: alreadyLinked,
386
- };
283
+ return projectDiscovery.buildScanResult(repoPath, detected, alreadyLinked);
387
284
  });
388
285
 
389
286
  console.log(` Found ${enriched.length} git repo(s):\n`);
@@ -449,8 +346,8 @@ async function scanAndAdd({ root, depth } = {}) {
449
346
  existingNames.add(name);
450
347
  config.projects.push(buildProjectEntry({
451
348
  name, description: repo.description, localPath: repo.path,
452
- repoHost: repo.host, org: repo.org, project: repo.project,
453
- repoName: repo.repoName, mainBranch: repo.mainBranch,
349
+ repoHost: repo.host, repositoryId: repo.repositoryId, org: repo.org, project: repo.project,
350
+ repoName: repo.repoName, mainBranch: repo.mainBranch, prUrlBase: repo.prUrlBase,
454
351
  }));
455
352
  console.log(` + ${name} (${repo.path})`);
456
353
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yemi33/minions",
3
- "version": "0.1.1712",
3
+ "version": "0.1.1714",
4
4
  "description": "Multi-agent AI dev team that runs from ~/.minions/ — five autonomous agents share a single engine, dashboard, and knowledge base",
5
5
  "bin": {
6
6
  "minions": "bin/minions.js"