specweave 0.28.61 ā 0.28.62
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/cli/helpers/init/ado-repo-cloning.d.ts.map +1 -1
- package/dist/src/cli/helpers/init/ado-repo-cloning.js +37 -24
- package/dist/src/cli/helpers/init/ado-repo-cloning.js.map +1 -1
- package/package.json +1 -1
- package/plugins/specweave/commands/specweave-jobs.md +7 -7
- package/plugins/specweave/hooks/v2/detectors/lifecycle-detector.sh +85 -0
- package/plugins/specweave/hooks/v2/detectors/us-completion-detector.sh +148 -0
- package/plugins/specweave/hooks/v2/dispatchers/post-tool-use.sh +58 -16
- package/plugins/specweave/hooks/v2/handlers/ac-validation-handler.sh +4 -0
- package/plugins/specweave/hooks/v2/handlers/github-sync-handler.sh +4 -0
- package/plugins/specweave/hooks/v2/handlers/living-docs-handler.sh +4 -0
- package/plugins/specweave/hooks/v2/handlers/living-specs-handler.sh +179 -0
- package/plugins/specweave/hooks/v2/handlers/status-line-handler.sh +165 -0
- package/plugins/specweave/hooks/v2/handlers/status-update.sh +7 -0
- package/plugins/specweave/hooks/v2/queue/dequeue.sh +4 -0
- package/plugins/specweave/hooks/v2/queue/enqueue.sh +50 -12
- package/plugins/specweave/hooks/v2/queue/processor.sh +74 -12
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ado-repo-cloning.d.ts","sourceRoot":"","sources":["../../../../../src/cli/helpers/init/ado-repo-cloning.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAMH,OAAO,EAA+B,KAAK,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAKhG;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,GAAG,EAAE,MAAM,CAAC;IACZ,GAAG,EAAE,MAAM,CAAC;IACZ,QAAQ,EAAE,MAAM,EAAE,CAAC;CACpB;
|
|
1
|
+
{"version":3,"file":"ado-repo-cloning.d.ts","sourceRoot":"","sources":["../../../../../src/cli/helpers/init/ado-repo-cloning.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAMH,OAAO,EAA+B,KAAK,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAKhG;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,GAAG,EAAE,MAAM,CAAC;IACZ,GAAG,EAAE,MAAM,CAAC;IACZ,QAAQ,EAAE,MAAM,EAAE,CAAC;CACpB;AA+BD;;;;;;;;;;;;GAYG;AACH,wBAAsB,qBAAqB,CACzC,WAAW,EAAE,MAAM,EACnB,mBAAmB,EAAE,mBAAmB,EACxC,YAAY,EAAE,kBAAkB,GAC/B,OAAO,CAAC,IAAI,CAAC,CAuFf"}
|
|
@@ -13,6 +13,22 @@ import { AzureDevOpsProvider } from '../../../core/repo-structure/providers/azur
|
|
|
13
13
|
import { filterRepositoriesByPattern } from '../selection-strategy.js';
|
|
14
14
|
import { getJobManager } from '../../../core/background/index.js';
|
|
15
15
|
import { execFileNoThrowSync } from '../../../utils/execFileNoThrow.js';
|
|
16
|
+
/**
|
|
17
|
+
* Build proper ADO clone URL with PAT authentication
|
|
18
|
+
*
|
|
19
|
+
* ADO clone URL format: https://{PAT}@dev.azure.com/{org}/{project}/_git/{repo}
|
|
20
|
+
*
|
|
21
|
+
* The remoteUrl from ADO API may come in different formats:
|
|
22
|
+
* - https://dev.azure.com/org/project/_git/repo
|
|
23
|
+
* - https://org@dev.azure.com/org/project/_git/repo (with org as placeholder)
|
|
24
|
+
*
|
|
25
|
+
* We need to construct a proper URL with PAT authentication.
|
|
26
|
+
*/
|
|
27
|
+
function buildAdoCloneUrl(org, project, repoName, pat) {
|
|
28
|
+
// Build clean URL with PAT authentication
|
|
29
|
+
// Format: https://{PAT}@dev.azure.com/{org}/{project}/_git/{repo}
|
|
30
|
+
return `https://${pat}@dev.azure.com/${org}/${project}/_git/${repoName}`;
|
|
31
|
+
}
|
|
16
32
|
/**
|
|
17
33
|
* Trigger ADO repository cloning during init
|
|
18
34
|
*
|
|
@@ -37,6 +53,10 @@ export async function triggerAdoRepoCloning(projectPath, adoProjectSelection, cl
|
|
|
37
53
|
console.log(chalk.yellow('\n No ADO projects selected for cloning.\n'));
|
|
38
54
|
return;
|
|
39
55
|
}
|
|
56
|
+
if (!pat) {
|
|
57
|
+
console.log(chalk.yellow('\n No PAT provided. Cannot clone repositories.\n'));
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
40
60
|
console.log(chalk.blue('\nš¦ Fetching ADO Repositories\n'));
|
|
41
61
|
const provider = new AzureDevOpsProvider();
|
|
42
62
|
const allRepos = [];
|
|
@@ -85,7 +105,8 @@ export async function triggerAdoRepoCloning(projectPath, adoProjectSelection, cl
|
|
|
85
105
|
const job = jobManager.createJob('clone-repos', jobConfig, filteredRepos.length);
|
|
86
106
|
jobManager.startJob(job.id);
|
|
87
107
|
// Start cloning asynchronously (non-blocking)
|
|
88
|
-
|
|
108
|
+
// IMPORTANT: Pass PAT directly - don't rely on .env which may not exist yet
|
|
109
|
+
cloneRepositoriesAsync(projectPath, filteredRepos, org, pat, job.id).catch(error => {
|
|
89
110
|
console.error(chalk.red(`Clone error: ${error instanceof Error ? error.message : String(error)}`));
|
|
90
111
|
jobManager.completeJob(job.id, error instanceof Error ? error.message : String(error));
|
|
91
112
|
});
|
|
@@ -99,8 +120,14 @@ export async function triggerAdoRepoCloning(projectPath, adoProjectSelection, cl
|
|
|
99
120
|
* Clone repositories asynchronously
|
|
100
121
|
*
|
|
101
122
|
* This runs in background and updates job progress as repos are cloned.
|
|
123
|
+
*
|
|
124
|
+
* @param projectPath - Target directory
|
|
125
|
+
* @param repos - Repositories to clone
|
|
126
|
+
* @param org - ADO organization name
|
|
127
|
+
* @param pat - Personal Access Token (passed directly, not from .env)
|
|
128
|
+
* @param jobId - Background job ID for progress tracking
|
|
102
129
|
*/
|
|
103
|
-
async function cloneRepositoriesAsync(projectPath, repos, org, jobId) {
|
|
130
|
+
async function cloneRepositoriesAsync(projectPath, repos, org, pat, jobId) {
|
|
104
131
|
const jobManager = getJobManager(projectPath);
|
|
105
132
|
const reposDir = path.join(projectPath, 'repos');
|
|
106
133
|
let completed = 0;
|
|
@@ -113,49 +140,35 @@ async function cloneRepositoriesAsync(projectPath, repos, org, jobId) {
|
|
|
113
140
|
continue;
|
|
114
141
|
}
|
|
115
142
|
try {
|
|
116
|
-
//
|
|
117
|
-
//
|
|
118
|
-
|
|
119
|
-
const pat = (await getPatFromJobConfig(projectPath, jobId)) || '';
|
|
120
|
-
const authUrl = repo.remoteUrl.replace('https://', `https://${pat}@`);
|
|
143
|
+
// Build proper clone URL with PAT authentication
|
|
144
|
+
// Format: https://{PAT}@dev.azure.com/{org}/{project}/_git/{repo}
|
|
145
|
+
const cloneUrl = buildAdoCloneUrl(org, repo.project, repo.name, pat);
|
|
121
146
|
// Create parent directory if needed
|
|
122
147
|
if (!existsSync(reposDir)) {
|
|
123
148
|
mkdirSync(reposDir, { recursive: true });
|
|
124
149
|
}
|
|
125
150
|
// Clone the repository
|
|
126
|
-
const result = execFileNoThrowSync('git', ['clone',
|
|
151
|
+
const result = execFileNoThrowSync('git', ['clone', cloneUrl, repo.name], { cwd: reposDir });
|
|
127
152
|
if (result.exitCode === 0) {
|
|
128
153
|
completed++;
|
|
129
154
|
jobManager.updateProgress(jobId, completed, repo.name, repo.name);
|
|
155
|
+
console.log(chalk.green(` ā Cloned ${repo.name}`));
|
|
130
156
|
}
|
|
131
157
|
else {
|
|
132
158
|
// Clone failed - mark as failed but continue
|
|
133
159
|
jobManager.updateProgress(jobId, completed, repo.name, undefined, repo.name);
|
|
134
|
-
console.error(chalk.yellow(` Failed to clone ${repo.name}: ${result.stderr || result.stdout}`));
|
|
160
|
+
console.error(chalk.yellow(` ā Failed to clone ${repo.name}: ${result.stderr || result.stdout}`));
|
|
135
161
|
}
|
|
136
162
|
}
|
|
137
163
|
catch (error) {
|
|
138
164
|
// Mark repo as failed but continue with others
|
|
139
165
|
jobManager.updateProgress(jobId, completed, repo.name, undefined, repo.name);
|
|
140
166
|
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
141
|
-
console.error(chalk.yellow(` Error cloning ${repo.name}: ${errorMsg}`));
|
|
167
|
+
console.error(chalk.yellow(` ā Error cloning ${repo.name}: ${errorMsg}`));
|
|
142
168
|
}
|
|
143
169
|
}
|
|
144
170
|
// Mark job as complete
|
|
145
171
|
jobManager.completeJob(jobId);
|
|
146
|
-
}
|
|
147
|
-
/**
|
|
148
|
-
* Get PAT from job config (stored in background-jobs.json)
|
|
149
|
-
*/
|
|
150
|
-
async function getPatFromJobConfig(projectPath, jobId) {
|
|
151
|
-
// PAT is stored in .env, not in job config (security)
|
|
152
|
-
// Read from environment or .env file
|
|
153
|
-
const { readEnvFile, parseEnvFile } = await import('../../../utils/env-file.js');
|
|
154
|
-
const envContent = readEnvFile(projectPath);
|
|
155
|
-
if (envContent) {
|
|
156
|
-
const parsed = parseEnvFile(envContent);
|
|
157
|
-
return parsed.AZURE_DEVOPS_PAT || null;
|
|
158
|
-
}
|
|
159
|
-
return process.env.AZURE_DEVOPS_PAT || null;
|
|
172
|
+
console.log(chalk.green(`\nā
Clone job completed: ${completed}/${repos.length} repositories cloned`));
|
|
160
173
|
}
|
|
161
174
|
//# sourceMappingURL=ado-repo-cloning.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ado-repo-cloning.js","sourceRoot":"","sources":["../../../../../src/cli/helpers/init/ado-repo-cloning.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAC3C,OAAO,EAAE,mBAAmB,EAAE,MAAM,iEAAiE,CAAC;AACtG,OAAO,EAAE,2BAA2B,EAA2B,MAAM,0BAA0B,CAAC;AAChG,OAAO,EAAE,aAAa,EAAE,MAAM,mCAAmC,CAAC;AAElE,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAuBxE;;;;;;;;;;;;GAYG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,WAAmB,EACnB,mBAAwC,EACxC,YAAgC;IAEhC,qCAAqC;IACrC,IAAI,YAAY,CAAC,QAAQ,KAAK,MAAM,EAAE,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,0FAA0F,CAAC,CAAC,CAAC;QACpH,OAAO;IACT,CAAC;IAED,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,mBAAmB,CAAC;IAEnD,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,8CAA8C,CAAC,CAAC,CAAC;QAC1E,OAAO;IACT,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,kCAAkC,CAAC,CAAC,CAAC;IAE5D,MAAM,QAAQ,GAAG,IAAI,mBAAmB,EAAE,CAAC;IAC3C,MAAM,QAAQ,GAAoB,EAAE,CAAC;IAErC,gDAAgD;IAChD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;QAC/B,IAAI,CAAC;YACH,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,oBAAoB,GAAG,IAAI,OAAO,KAAK,CAAC,CAAC,CAAC;YACjE,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,gBAAgB,CAAC,GAAG,EAAE,OAAO,EAAE,GAAG,CAAC,CAAC;YACjE,MAAM,gBAAgB,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YAC7D,QAAQ,CAAC,IAAI,CAAC,GAAG,gBAAgB,CAAC,CAAC;YACnC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,cAAc,KAAK,CAAC,MAAM,oBAAoB,OAAO,EAAE,CAAC,CAAC,CAAC;QACpF,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxE,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,8BAA8B,OAAO,KAAK,QAAQ,EAAE,CAAC,CAAC,CAAC;QAClF,CAAC;IACH,CAAC;IAED,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,oDAAoD,CAAC,CAAC,CAAC;QAChF,OAAO;IACT,CAAC;IAED,oBAAoB;IACpB,MAAM,aAAa,GAAG,2BAA2B,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;IAE1E,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC/B,MAAM,WAAW,GAAG,YAAY,CAAC,OAAO,CAAC,CAAC,CAAC,cAAc,YAAY,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,uBAAuB,WAAW,cAAc,CAAC,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,gBAAgB,aAAa,CAAC,MAAM,kCAAkC,CAAC,CAAC,CAAC;IAEhG,mCAAmC;IACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;IACjD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC3C,CAAC;IAED,wBAAwB;IACxB,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;IAC9C,MAAM,SAAS,GAAmB;QAChC,IAAI,EAAE,aAAa;QACnB,YAAY,EAAE,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACpC,KAAK,EAAE,GAAG,GAAG,IAAI,CAAC,CAAC,OAAO,EAAE;YAC5B,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,IAAI,CAAC;SACjC,CAAC,CAAC;QACH,WAAW;KACZ,CAAC;IAEF,MAAM,GAAG,GAAG,UAAU,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,EAAE,aAAa,CAAC,MAAM,CAAC,CAAC;IACjF,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAE5B,8CAA8C;IAC9C,sBAAsB,CAAC,WAAW,EAAE,aAAa,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;
|
|
1
|
+
{"version":3,"file":"ado-repo-cloning.js","sourceRoot":"","sources":["../../../../../src/cli/helpers/init/ado-repo-cloning.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAC3C,OAAO,EAAE,mBAAmB,EAAE,MAAM,iEAAiE,CAAC;AACtG,OAAO,EAAE,2BAA2B,EAA2B,MAAM,0BAA0B,CAAC;AAChG,OAAO,EAAE,aAAa,EAAE,MAAM,mCAAmC,CAAC;AAElE,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAuBxE;;;;;;;;;;GAUG;AACH,SAAS,gBAAgB,CAAC,GAAW,EAAE,OAAe,EAAE,QAAgB,EAAE,GAAW;IACnF,0CAA0C;IAC1C,kEAAkE;IAClE,OAAO,WAAW,GAAG,kBAAkB,GAAG,IAAI,OAAO,SAAS,QAAQ,EAAE,CAAC;AAC3E,CAAC;AAED;;;;;;;;;;;;GAYG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,WAAmB,EACnB,mBAAwC,EACxC,YAAgC;IAEhC,qCAAqC;IACrC,IAAI,YAAY,CAAC,QAAQ,KAAK,MAAM,EAAE,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,0FAA0F,CAAC,CAAC,CAAC;QACpH,OAAO;IACT,CAAC;IAED,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,mBAAmB,CAAC;IAEnD,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,8CAA8C,CAAC,CAAC,CAAC;QAC1E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,oDAAoD,CAAC,CAAC,CAAC;QAChF,OAAO;IACT,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,kCAAkC,CAAC,CAAC,CAAC;IAE5D,MAAM,QAAQ,GAAG,IAAI,mBAAmB,EAAE,CAAC;IAC3C,MAAM,QAAQ,GAAoB,EAAE,CAAC;IAErC,gDAAgD;IAChD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;QAC/B,IAAI,CAAC;YACH,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,oBAAoB,GAAG,IAAI,OAAO,KAAK,CAAC,CAAC,CAAC;YACjE,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,gBAAgB,CAAC,GAAG,EAAE,OAAO,EAAE,GAAG,CAAC,CAAC;YACjE,MAAM,gBAAgB,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YAC7D,QAAQ,CAAC,IAAI,CAAC,GAAG,gBAAgB,CAAC,CAAC;YACnC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,cAAc,KAAK,CAAC,MAAM,oBAAoB,OAAO,EAAE,CAAC,CAAC,CAAC;QACpF,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxE,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,8BAA8B,OAAO,KAAK,QAAQ,EAAE,CAAC,CAAC,CAAC;QAClF,CAAC;IACH,CAAC;IAED,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,oDAAoD,CAAC,CAAC,CAAC;QAChF,OAAO;IACT,CAAC;IAED,oBAAoB;IACpB,MAAM,aAAa,GAAG,2BAA2B,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;IAE1E,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC/B,MAAM,WAAW,GAAG,YAAY,CAAC,OAAO,CAAC,CAAC,CAAC,cAAc,YAAY,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,uBAAuB,WAAW,cAAc,CAAC,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,gBAAgB,aAAa,CAAC,MAAM,kCAAkC,CAAC,CAAC,CAAC;IAEhG,mCAAmC;IACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;IACjD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC3C,CAAC;IAED,wBAAwB;IACxB,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;IAC9C,MAAM,SAAS,GAAmB;QAChC,IAAI,EAAE,aAAa;QACnB,YAAY,EAAE,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACpC,KAAK,EAAE,GAAG,GAAG,IAAI,CAAC,CAAC,OAAO,EAAE;YAC5B,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,IAAI,CAAC;SACjC,CAAC,CAAC;QACH,WAAW;KACZ,CAAC;IAEF,MAAM,GAAG,GAAG,UAAU,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,EAAE,aAAa,CAAC,MAAM,CAAC,CAAC;IACjF,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAE5B,8CAA8C;IAC9C,4EAA4E;IAC5E,sBAAsB,CAAC,WAAW,EAAE,aAAa,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;QACjF,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,gBAAgB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;QACnG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;IACzF,CAAC,CAAC,CAAC;IAEH,qBAAqB;IACrB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,sCAAsC,QAAQ,GAAG,CAAC,CAAC,CAAC;IAC3E,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAChD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,sCAAsC,CAAC,CAAC,CAAC;IAChE,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,qDAAqD,GAAG,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC;AACjG,CAAC;AAED;;;;;;;;;;GAUG;AACH,KAAK,UAAU,sBAAsB,CACnC,WAAmB,EACnB,KAAsB,EACtB,GAAW,EACX,GAAW,EACX,KAAa;IAEb,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;IAC9C,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;IACjD,IAAI,SAAS,GAAG,CAAC,CAAC;IAElB,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QAEhD,yBAAyB;QACzB,IAAI,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,EAAE,CAAC;YAC5C,SAAS,EAAE,CAAC;YACZ,UAAU,CAAC,cAAc,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;YAClE,SAAS;QACX,CAAC;QAED,IAAI,CAAC;YACH,iDAAiD;YACjD,kEAAkE;YAClE,MAAM,QAAQ,GAAG,gBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;YAErE,oCAAoC;YACpC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;gBAC1B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC3C,CAAC;YAED,uBAAuB;YACvB,MAAM,MAAM,GAAG,mBAAmB,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,GAAG,EAAE,QAAQ,EAAE,CAAC,CAAC;YAE7F,IAAI,MAAM,CAAC,QAAQ,KAAK,CAAC,EAAE,CAAC;gBAC1B,SAAS,EAAE,CAAC;gBACZ,UAAU,CAAC,cAAc,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;gBAClE,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,eAAe,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC;iBAAM,CAAC;gBACN,6CAA6C;gBAC7C,UAAU,CAAC,cAAc,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC7E,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,wBAAwB,IAAI,CAAC,IAAI,KAAK,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACtG,CAAC;QACH,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,+CAA+C;YAC/C,UAAU,CAAC,cAAc,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;YAC7E,MAAM,QAAQ,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxE,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC,CAAC,CAAC;QAC9E,CAAC;IACH,CAAC;IAED,uBAAuB;IACvB,UAAU,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,4BAA4B,SAAS,IAAI,KAAK,CAAC,MAAM,sBAAsB,CAAC,CAAC,CAAC;AACxG,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "specweave",
|
|
3
|
-
"version": "0.28.
|
|
3
|
+
"version": "0.28.62",
|
|
4
4
|
"description": "Spec-driven development framework for Claude Code. AI-native workflow with living documentation, intelligent agents, and multilingual support (9 languages). Enterprise-grade traceability with permanent specs and temporary increments.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -103,7 +103,7 @@ Started: 2024-01-15 10:30:00
|
|
|
103
103
|
Updated: 2024-01-15 10:32:15
|
|
104
104
|
|
|
105
105
|
Progress: 2,500/10,000 (25%)
|
|
106
|
-
Current:
|
|
106
|
+
Current: Acme\Core-Operations
|
|
107
107
|
Rate: 15.2 items/sec
|
|
108
108
|
ETA: ~8 minutes
|
|
109
109
|
|
|
@@ -122,10 +122,10 @@ Watch job progress in real-time (like `tail -f`):
|
|
|
122
122
|
|
|
123
123
|
š¦ Following job abc12345 (Ctrl+C to stop)
|
|
124
124
|
|
|
125
|
-
[10:30:15] Progress: 2,500/10,000 (25%) -
|
|
126
|
-
[10:30:16] Progress: 2,520/10,000 (25%) -
|
|
127
|
-
[10:30:17] Progress: 2,545/10,000 (25%) -
|
|
128
|
-
[10:30:18] Progress: 2,570/10,000 (26%) -
|
|
125
|
+
[10:30:15] Progress: 2,500/10,000 (25%) - Acme\Core-Operations
|
|
126
|
+
[10:30:16] Progress: 2,520/10,000 (25%) - Acme\Core-Operations
|
|
127
|
+
[10:30:17] Progress: 2,545/10,000 (25%) - Acme\AI-Platform
|
|
128
|
+
[10:30:18] Progress: 2,570/10,000 (26%) - Acme\AI-Platform
|
|
129
129
|
...
|
|
130
130
|
```
|
|
131
131
|
|
|
@@ -144,8 +144,8 @@ Show detailed worker output:
|
|
|
144
144
|
[2024-01-15T10:30:00.456Z] Project path: /Users/dev/my-project
|
|
145
145
|
[2024-01-15T10:30:00.789Z] PID: 45678
|
|
146
146
|
[2024-01-15T10:30:01.234Z] Dependencies loaded, starting import...
|
|
147
|
-
[2024-01-15T10:30:02.567Z] Progress: 100/10000 - ado
|
|
148
|
-
[2024-01-15T10:30:03.890Z] Progress: 200/10000 - ado
|
|
147
|
+
[2024-01-15T10:30:02.567Z] Progress: 100/10000 - ado Acme\Core-Operations
|
|
148
|
+
[2024-01-15T10:30:03.890Z] Progress: 200/10000 - ado Acme\Core-Operations
|
|
149
149
|
...
|
|
150
150
|
```
|
|
151
151
|
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# lifecycle-detector.sh - Detect increment lifecycle changes
|
|
3
|
+
# Events: increment.created, increment.done, increment.archived, increment.reopened
|
|
4
|
+
#
|
|
5
|
+
# Called from post-tool-use.sh when metadata.json is edited
|
|
6
|
+
# Compares current vs previous status to detect transitions
|
|
7
|
+
#
|
|
8
|
+
# IMPORTANT: This script must be fast (<10ms) and never crash
|
|
9
|
+
set +e
|
|
10
|
+
|
|
11
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
12
|
+
|
|
13
|
+
INC_ID="${1:-}"
|
|
14
|
+
[[ -z "$INC_ID" ]] && exit 0
|
|
15
|
+
|
|
16
|
+
# Find project root
|
|
17
|
+
PROJECT_ROOT="$PWD"
|
|
18
|
+
while [[ "$PROJECT_ROOT" != "/" ]] && [[ ! -d "$PROJECT_ROOT/.specweave" ]]; do
|
|
19
|
+
PROJECT_ROOT=$(dirname "$PROJECT_ROOT")
|
|
20
|
+
done
|
|
21
|
+
[[ ! -d "$PROJECT_ROOT/.specweave" ]] && exit 0
|
|
22
|
+
|
|
23
|
+
STATE_DIR="$PROJECT_ROOT/.specweave/state"
|
|
24
|
+
PREV_STATUS_FILE="$STATE_DIR/.prev-status-$INC_ID"
|
|
25
|
+
META_FILE="$PROJECT_ROOT/.specweave/increments/$INC_ID/metadata.json"
|
|
26
|
+
ARCHIVE_META="$PROJECT_ROOT/.specweave/increments/_archive/$INC_ID/metadata.json"
|
|
27
|
+
|
|
28
|
+
mkdir -p "$STATE_DIR" 2>/dev/null
|
|
29
|
+
|
|
30
|
+
# Detect event
|
|
31
|
+
EVENT=""
|
|
32
|
+
EVENT_DATA="$INC_ID"
|
|
33
|
+
|
|
34
|
+
# Check if archived (folder moved to _archive/)
|
|
35
|
+
if [[ -f "$ARCHIVE_META" ]] && [[ ! -f "$META_FILE" ]]; then
|
|
36
|
+
# Check if we already detected this
|
|
37
|
+
PREV=$(cat "$PREV_STATUS_FILE" 2>/dev/null || echo "")
|
|
38
|
+
if [[ "$PREV" != "archived" ]]; then
|
|
39
|
+
EVENT="increment.archived"
|
|
40
|
+
echo "archived" > "$PREV_STATUS_FILE"
|
|
41
|
+
fi
|
|
42
|
+
|
|
43
|
+
elif [[ -f "$META_FILE" ]]; then
|
|
44
|
+
# Get current status (fast grep, no jq)
|
|
45
|
+
CURRENT_STATUS=$(grep -o '"status"[[:space:]]*:[[:space:]]*"[^"]*"' "$META_FILE" | head -1 | sed 's/.*"\([^"]*\)".*/\1/')
|
|
46
|
+
[[ -z "$CURRENT_STATUS" ]] && exit 0
|
|
47
|
+
|
|
48
|
+
# Get previous status
|
|
49
|
+
PREV_STATUS=$(cat "$PREV_STATUS_FILE" 2>/dev/null || echo "")
|
|
50
|
+
|
|
51
|
+
# Detect transitions
|
|
52
|
+
if [[ -z "$PREV_STATUS" ]]; then
|
|
53
|
+
# First time seeing this increment
|
|
54
|
+
if [[ "$CURRENT_STATUS" == "planning" ]] || [[ "$CURRENT_STATUS" == "active" ]]; then
|
|
55
|
+
EVENT="increment.created"
|
|
56
|
+
fi
|
|
57
|
+
elif [[ "$PREV_STATUS" != "$CURRENT_STATUS" ]]; then
|
|
58
|
+
# Status changed
|
|
59
|
+
case "$CURRENT_STATUS" in
|
|
60
|
+
completed)
|
|
61
|
+
EVENT="increment.done"
|
|
62
|
+
;;
|
|
63
|
+
active)
|
|
64
|
+
# Was it completed before? That's a reopen
|
|
65
|
+
if [[ "$PREV_STATUS" == "completed" ]]; then
|
|
66
|
+
EVENT="increment.reopened"
|
|
67
|
+
fi
|
|
68
|
+
;;
|
|
69
|
+
paused|abandoned)
|
|
70
|
+
# Status changes we don't emit events for
|
|
71
|
+
;;
|
|
72
|
+
esac
|
|
73
|
+
fi
|
|
74
|
+
|
|
75
|
+
# Save current status
|
|
76
|
+
echo "$CURRENT_STATUS" > "$PREV_STATUS_FILE"
|
|
77
|
+
fi
|
|
78
|
+
|
|
79
|
+
# Fire event if detected
|
|
80
|
+
if [[ -n "$EVENT" ]]; then
|
|
81
|
+
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
82
|
+
bash "$HOOK_DIR/queue/enqueue.sh" "$EVENT" "$EVENT_DATA" 2>/dev/null
|
|
83
|
+
fi
|
|
84
|
+
|
|
85
|
+
exit 0
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# us-completion-detector.sh - Detect user story completion
|
|
3
|
+
# Events: user-story.completed, user-story.reopened
|
|
4
|
+
#
|
|
5
|
+
# A user story is complete when:
|
|
6
|
+
# 1. ALL tasks for that US are completed ([x])
|
|
7
|
+
# 2. ALL ACs for that US are checked ([x])
|
|
8
|
+
#
|
|
9
|
+
# Called from post-tool-use.sh when tasks.md or spec.md is edited
|
|
10
|
+
#
|
|
11
|
+
# IMPORTANT: This script must be fast (<50ms) and never crash
|
|
12
|
+
set +e
|
|
13
|
+
|
|
14
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
15
|
+
|
|
16
|
+
INC_ID="${1:-}"
|
|
17
|
+
[[ -z "$INC_ID" ]] && exit 0
|
|
18
|
+
|
|
19
|
+
# Find project root
|
|
20
|
+
PROJECT_ROOT="$PWD"
|
|
21
|
+
while [[ "$PROJECT_ROOT" != "/" ]] && [[ ! -d "$PROJECT_ROOT/.specweave" ]]; do
|
|
22
|
+
PROJECT_ROOT=$(dirname "$PROJECT_ROOT")
|
|
23
|
+
done
|
|
24
|
+
[[ ! -d "$PROJECT_ROOT/.specweave" ]] && exit 0
|
|
25
|
+
|
|
26
|
+
STATE_DIR="$PROJECT_ROOT/.specweave/state"
|
|
27
|
+
TASKS_FILE="$PROJECT_ROOT/.specweave/increments/$INC_ID/tasks.md"
|
|
28
|
+
SPEC_FILE="$PROJECT_ROOT/.specweave/increments/$INC_ID/spec.md"
|
|
29
|
+
US_STATE_FILE="$STATE_DIR/.us-completion-$INC_ID"
|
|
30
|
+
|
|
31
|
+
mkdir -p "$STATE_DIR" 2>/dev/null
|
|
32
|
+
|
|
33
|
+
[[ ! -f "$TASKS_FILE" ]] && exit 0
|
|
34
|
+
[[ ! -f "$SPEC_FILE" ]] && exit 0
|
|
35
|
+
|
|
36
|
+
# Parse tasks.md to find US -> Tasks mapping and completion status
|
|
37
|
+
# Format: ### T-001: Title
|
|
38
|
+
# **Satisfies ACs**: AC-US1-01, AC-US1-02
|
|
39
|
+
# **Status**: [x] completed
|
|
40
|
+
|
|
41
|
+
declare -A US_TASKS_TOTAL
|
|
42
|
+
declare -A US_TASKS_DONE
|
|
43
|
+
|
|
44
|
+
# Parse task blocks
|
|
45
|
+
CURRENT_TASK=""
|
|
46
|
+
CURRENT_STATUS=""
|
|
47
|
+
CURRENT_US=""
|
|
48
|
+
|
|
49
|
+
while IFS= read -r line; do
|
|
50
|
+
# Detect task header
|
|
51
|
+
if [[ "$line" =~ ^###[[:space:]]+T-[0-9]+ ]]; then
|
|
52
|
+
# Process previous task
|
|
53
|
+
if [[ -n "$CURRENT_US" ]] && [[ -n "$CURRENT_TASK" ]]; then
|
|
54
|
+
US_TASKS_TOTAL["$CURRENT_US"]=$((${US_TASKS_TOTAL["$CURRENT_US"]:-0} + 1))
|
|
55
|
+
if [[ "$CURRENT_STATUS" == "done" ]]; then
|
|
56
|
+
US_TASKS_DONE["$CURRENT_US"]=$((${US_TASKS_DONE["$CURRENT_US"]:-0} + 1))
|
|
57
|
+
fi
|
|
58
|
+
fi
|
|
59
|
+
CURRENT_TASK=$(echo "$line" | grep -o 'T-[0-9][0-9][0-9]' | head -1)
|
|
60
|
+
CURRENT_STATUS=""
|
|
61
|
+
CURRENT_US=""
|
|
62
|
+
fi
|
|
63
|
+
|
|
64
|
+
# Detect User Story reference
|
|
65
|
+
if [[ "$line" =~ User[[:space:]]*Story.*:.*US-[0-9]+ ]]; then
|
|
66
|
+
CURRENT_US=$(echo "$line" | grep -o 'US-[0-9][0-9][0-9]' | head -1)
|
|
67
|
+
fi
|
|
68
|
+
|
|
69
|
+
# Detect completion status
|
|
70
|
+
if [[ "$line" =~ Status.*\[x\] ]]; then
|
|
71
|
+
CURRENT_STATUS="done"
|
|
72
|
+
fi
|
|
73
|
+
done < "$TASKS_FILE"
|
|
74
|
+
|
|
75
|
+
# Process last task
|
|
76
|
+
if [[ -n "$CURRENT_US" ]] && [[ -n "$CURRENT_TASK" ]]; then
|
|
77
|
+
US_TASKS_TOTAL["$CURRENT_US"]=$((${US_TASKS_TOTAL["$CURRENT_US"]:-0} + 1))
|
|
78
|
+
if [[ "$CURRENT_STATUS" == "done" ]]; then
|
|
79
|
+
US_TASKS_DONE["$CURRENT_US"]=$((${US_TASKS_DONE["$CURRENT_US"]:-0} + 1))
|
|
80
|
+
fi
|
|
81
|
+
fi
|
|
82
|
+
|
|
83
|
+
# Parse spec.md for AC completion status
|
|
84
|
+
# Format: - [x] **AC-US1-01**: Description
|
|
85
|
+
declare -A US_ACS_TOTAL
|
|
86
|
+
declare -A US_ACS_DONE
|
|
87
|
+
|
|
88
|
+
while IFS= read -r line; do
|
|
89
|
+
# Find AC lines with US reference
|
|
90
|
+
if [[ "$line" =~ AC-US([0-9]+)-[0-9]+ ]]; then
|
|
91
|
+
US_NUM="${BASH_REMATCH[1]}"
|
|
92
|
+
US_ID="US-${US_NUM}"
|
|
93
|
+
|
|
94
|
+
US_ACS_TOTAL["$US_ID"]=$((${US_ACS_TOTAL["$US_ID"]:-0} + 1))
|
|
95
|
+
|
|
96
|
+
if [[ "$line" =~ \[x\] ]]; then
|
|
97
|
+
US_ACS_DONE["$US_ID"]=$((${US_ACS_DONE["$US_ID"]:-0} + 1))
|
|
98
|
+
fi
|
|
99
|
+
fi
|
|
100
|
+
done < "$SPEC_FILE"
|
|
101
|
+
|
|
102
|
+
# Load previous completion state
|
|
103
|
+
declare -A PREV_COMPLETE
|
|
104
|
+
if [[ -f "$US_STATE_FILE" ]]; then
|
|
105
|
+
while IFS='=' read -r us status; do
|
|
106
|
+
[[ -n "$us" ]] && PREV_COMPLETE["$us"]="$status"
|
|
107
|
+
done < "$US_STATE_FILE"
|
|
108
|
+
fi
|
|
109
|
+
|
|
110
|
+
# Check completion for each US
|
|
111
|
+
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
112
|
+
NEW_STATE=""
|
|
113
|
+
|
|
114
|
+
for US_ID in "${!US_TASKS_TOTAL[@]}"; do
|
|
115
|
+
TASKS_TOTAL=${US_TASKS_TOTAL["$US_ID"]:-0}
|
|
116
|
+
TASKS_DONE=${US_TASKS_DONE["$US_ID"]:-0}
|
|
117
|
+
ACS_TOTAL=${US_ACS_TOTAL["$US_ID"]:-0}
|
|
118
|
+
ACS_DONE=${US_ACS_DONE["$US_ID"]:-0}
|
|
119
|
+
|
|
120
|
+
# US is complete if ALL tasks done AND ALL ACs checked
|
|
121
|
+
CURRENT_COMPLETE="no"
|
|
122
|
+
if [[ $TASKS_TOTAL -gt 0 ]] && [[ $TASKS_DONE -eq $TASKS_TOTAL ]]; then
|
|
123
|
+
if [[ $ACS_TOTAL -gt 0 ]] && [[ $ACS_DONE -eq $ACS_TOTAL ]]; then
|
|
124
|
+
CURRENT_COMPLETE="yes"
|
|
125
|
+
elif [[ $ACS_TOTAL -eq 0 ]]; then
|
|
126
|
+
# No ACs defined, just check tasks
|
|
127
|
+
CURRENT_COMPLETE="yes"
|
|
128
|
+
fi
|
|
129
|
+
fi
|
|
130
|
+
|
|
131
|
+
PREV="${PREV_COMPLETE["$US_ID"]:-no}"
|
|
132
|
+
|
|
133
|
+
# Detect transitions
|
|
134
|
+
if [[ "$CURRENT_COMPLETE" == "yes" ]] && [[ "$PREV" == "no" ]]; then
|
|
135
|
+
# User story just completed
|
|
136
|
+
bash "$HOOK_DIR/queue/enqueue.sh" "user-story.completed" "$INC_ID:$US_ID" 2>/dev/null
|
|
137
|
+
elif [[ "$CURRENT_COMPLETE" == "no" ]] && [[ "$PREV" == "yes" ]]; then
|
|
138
|
+
# User story reopened
|
|
139
|
+
bash "$HOOK_DIR/queue/enqueue.sh" "user-story.reopened" "$INC_ID:$US_ID" 2>/dev/null
|
|
140
|
+
fi
|
|
141
|
+
|
|
142
|
+
NEW_STATE="${NEW_STATE}${US_ID}=${CURRENT_COMPLETE}\n"
|
|
143
|
+
done
|
|
144
|
+
|
|
145
|
+
# Save new state
|
|
146
|
+
echo -e "$NEW_STATE" > "$US_STATE_FILE"
|
|
147
|
+
|
|
148
|
+
exit 0
|
|
@@ -1,7 +1,20 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
# post-tool-use.sh - Single dispatcher for ALL PostToolUse events
|
|
3
3
|
# Replaces: post-task-edit, post-metadata-change, post-increment-planning, etc.
|
|
4
|
-
#
|
|
4
|
+
#
|
|
5
|
+
# Architecture (EDA v2):
|
|
6
|
+
# - Detectors run synchronously (fast, detect state transitions)
|
|
7
|
+
# - Detectors emit events to queue
|
|
8
|
+
# - Handlers process events asynchronously from queue
|
|
9
|
+
#
|
|
10
|
+
# Event flow:
|
|
11
|
+
# 1. metadata.json change -> lifecycle-detector -> increment.* events
|
|
12
|
+
# 2. tasks.md/spec.md change -> us-completion-detector -> user-story.* events
|
|
13
|
+
# 3. Events queued -> processor routes to handlers
|
|
14
|
+
#
|
|
15
|
+
# Goal: <10ms execution, all heavy work through event queue
|
|
16
|
+
#
|
|
17
|
+
# IMPORTANT: Never crash Claude, always exit 0
|
|
5
18
|
set +e
|
|
6
19
|
|
|
7
20
|
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
@@ -20,25 +33,54 @@ INPUT=$(cat)
|
|
|
20
33
|
FILE_PATH=$(echo "$INPUT" | grep -o '"file_path"[[:space:]]*:[[:space:]]*"[^"]*"' | head -1 | sed 's/.*"\([^"]*\)".*/\1/')
|
|
21
34
|
[[ -z "$FILE_PATH" ]] && exit 0
|
|
22
35
|
|
|
23
|
-
#
|
|
24
|
-
EVENT_TYPE=""
|
|
25
|
-
case "$FILE_PATH" in
|
|
26
|
-
*/.specweave/increments/*/tasks.md) EVENT_TYPE="task.updated" ;;
|
|
27
|
-
*/.specweave/increments/*/spec.md) EVENT_TYPE="spec.updated" ;;
|
|
28
|
-
*/.specweave/increments/*/metadata.json) EVENT_TYPE="metadata.changed" ;;
|
|
29
|
-
*/.specweave/increments/*/plan.md) EVENT_TYPE="plan.updated" ;;
|
|
30
|
-
*) exit 0 ;; # Not a specweave file, ignore
|
|
31
|
-
esac
|
|
32
|
-
|
|
33
|
-
# Extract increment ID
|
|
36
|
+
# Extract increment ID from path
|
|
34
37
|
INC_ID=$(echo "$FILE_PATH" | grep -o '[0-9][0-9][0-9][0-9]-[^/]*' | head -1)
|
|
38
|
+
[[ -z "$INC_ID" ]] && exit 0
|
|
35
39
|
|
|
36
40
|
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
41
|
+
DETECTOR_DIR="$HOOK_DIR/detectors"
|
|
42
|
+
|
|
43
|
+
# ============================================================================
|
|
44
|
+
# EDA DISPATCHER: Route to detectors based on file type
|
|
45
|
+
# Detectors detect state transitions and emit events to queue
|
|
46
|
+
# All heavy work is done by handlers processing the queue
|
|
47
|
+
# ============================================================================
|
|
48
|
+
|
|
49
|
+
case "$FILE_PATH" in
|
|
50
|
+
*/.specweave/increments/*/metadata.json)
|
|
51
|
+
# Metadata changed -> check for lifecycle transitions
|
|
52
|
+
# Events: increment.created, increment.done, increment.archived, increment.reopened
|
|
53
|
+
bash "$DETECTOR_DIR/lifecycle-detector.sh" "$INC_ID" 2>/dev/null &
|
|
54
|
+
;;
|
|
55
|
+
|
|
56
|
+
*/.specweave/increments/*/tasks.md|*/.specweave/increments/*/spec.md)
|
|
57
|
+
# Tasks or spec changed -> check for US completion
|
|
58
|
+
# Events: user-story.completed, user-story.reopened
|
|
59
|
+
bash "$DETECTOR_DIR/us-completion-detector.sh" "$INC_ID" 2>/dev/null &
|
|
60
|
+
|
|
61
|
+
# Also queue legacy event for backward compatibility
|
|
62
|
+
if [[ "$FILE_PATH" == *tasks.md ]]; then
|
|
63
|
+
bash "$HOOK_DIR/queue/enqueue.sh" "task.updated" "$INC_ID" 2>/dev/null &
|
|
64
|
+
else
|
|
65
|
+
bash "$HOOK_DIR/queue/enqueue.sh" "spec.updated" "$INC_ID" 2>/dev/null &
|
|
66
|
+
fi
|
|
67
|
+
;;
|
|
37
68
|
|
|
38
|
-
|
|
39
|
-
|
|
69
|
+
*/.specweave/increments/*/plan.md)
|
|
70
|
+
# Plan updated (for future use, currently no special handling)
|
|
71
|
+
bash "$HOOK_DIR/queue/enqueue.sh" "plan.updated" "$INC_ID" 2>/dev/null &
|
|
72
|
+
;;
|
|
73
|
+
|
|
74
|
+
*)
|
|
75
|
+
# Not a specweave increment file, ignore
|
|
76
|
+
exit 0
|
|
77
|
+
;;
|
|
78
|
+
esac
|
|
40
79
|
|
|
41
|
-
#
|
|
42
|
-
|
|
80
|
+
# NOTE: Removed synchronous status-update.sh call
|
|
81
|
+
# Status line updates are now EVENT-DRIVEN:
|
|
82
|
+
# - Updated on user-story.completed/reopened events
|
|
83
|
+
# - Updated on increment lifecycle events
|
|
84
|
+
# - NOT updated on every file edit (reduces flickering, race conditions)
|
|
43
85
|
|
|
44
86
|
exit 0
|
|
@@ -2,8 +2,12 @@
|
|
|
2
2
|
# ac-validation-handler.sh - Validate AC completion status
|
|
3
3
|
# Checks that completed tasks have their ACs checked in spec.md
|
|
4
4
|
# Non-blocking, logs warnings only
|
|
5
|
+
#
|
|
6
|
+
# IMPORTANT: Never crash Claude, always exit 0
|
|
5
7
|
set +e
|
|
6
8
|
|
|
9
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
10
|
+
|
|
7
11
|
INC_ID="${1:-}"
|
|
8
12
|
[[ -z "$INC_ID" ]] && exit 0
|
|
9
13
|
|
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
# github-sync-handler.sh - Sync increment status to GitHub issue
|
|
3
3
|
# Called async by processor, non-blocking, error-tolerant
|
|
4
|
+
#
|
|
5
|
+
# IMPORTANT: Never crash Claude, always exit 0
|
|
4
6
|
set +e
|
|
5
7
|
|
|
8
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
9
|
+
|
|
6
10
|
INC_ID="${1:-}"
|
|
7
11
|
[[ -z "$INC_ID" ]] && exit 0
|
|
8
12
|
|
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
# living-docs-handler.sh - Sync increment to living docs
|
|
3
3
|
# Called async by processor, non-blocking, error-tolerant
|
|
4
|
+
#
|
|
5
|
+
# IMPORTANT: Never crash Claude, always exit 0
|
|
4
6
|
set +e
|
|
5
7
|
|
|
8
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
9
|
+
|
|
6
10
|
INC_ID="${1:-}"
|
|
7
11
|
[[ -z "$INC_ID" ]] && exit 0
|
|
8
12
|
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# living-specs-handler.sh - Update living SPECS on lifecycle events
|
|
3
|
+
# Events: increment.created, increment.done, increment.archived, increment.reopened
|
|
4
|
+
#
|
|
5
|
+
# This handler updates the specs/ folder structure when increment lifecycle changes.
|
|
6
|
+
# It is called by the event processor, NOT directly by post-tool-use.
|
|
7
|
+
#
|
|
8
|
+
# IMPORTANT: This script must be fast (<100ms) and never crash Claude
|
|
9
|
+
set +e
|
|
10
|
+
|
|
11
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
12
|
+
|
|
13
|
+
EVENT="${1:-}"
|
|
14
|
+
INC_ID="${2:-}"
|
|
15
|
+
|
|
16
|
+
[[ -z "$EVENT" ]] && exit 0
|
|
17
|
+
[[ -z "$INC_ID" ]] && exit 0
|
|
18
|
+
|
|
19
|
+
# Find project root
|
|
20
|
+
PROJECT_ROOT="$PWD"
|
|
21
|
+
while [[ "$PROJECT_ROOT" != "/" ]] && [[ ! -d "$PROJECT_ROOT/.specweave" ]]; do
|
|
22
|
+
PROJECT_ROOT=$(dirname "$PROJECT_ROOT")
|
|
23
|
+
done
|
|
24
|
+
[[ ! -d "$PROJECT_ROOT/.specweave" ]] && exit 0
|
|
25
|
+
|
|
26
|
+
# Throttle: max once per 60 seconds per increment
|
|
27
|
+
STATE_DIR="$PROJECT_ROOT/.specweave/state"
|
|
28
|
+
THROTTLE_FILE="$STATE_DIR/.living-specs-$INC_ID"
|
|
29
|
+
mkdir -p "$STATE_DIR" 2>/dev/null
|
|
30
|
+
|
|
31
|
+
if [[ -f "$THROTTLE_FILE" ]]; then
|
|
32
|
+
if [[ "$(uname)" == "Darwin" ]]; then
|
|
33
|
+
AGE=$(($(date +%s) - $(stat -f %m "$THROTTLE_FILE" 2>/dev/null || echo 0)))
|
|
34
|
+
else
|
|
35
|
+
AGE=$(($(date +%s) - $(stat -c %Y "$THROTTLE_FILE" 2>/dev/null || echo 0)))
|
|
36
|
+
fi
|
|
37
|
+
[[ $AGE -lt 60 ]] && exit 0
|
|
38
|
+
fi
|
|
39
|
+
touch "$THROTTLE_FILE"
|
|
40
|
+
|
|
41
|
+
# Find the sync script
|
|
42
|
+
SYNC_SCRIPT=""
|
|
43
|
+
for path in \
|
|
44
|
+
"$PROJECT_ROOT/plugins/specweave/lib/hooks/sync-living-docs.js" \
|
|
45
|
+
"$PROJECT_ROOT/dist/plugins/specweave/lib/hooks/sync-living-docs.js" \
|
|
46
|
+
"${CLAUDE_PLUGIN_ROOT:-}/lib/hooks/sync-living-docs.js"; do
|
|
47
|
+
[[ -f "$path" ]] && { SYNC_SCRIPT="$path"; break; }
|
|
48
|
+
done
|
|
49
|
+
|
|
50
|
+
# Log event (silent, async)
|
|
51
|
+
LOG_FILE="$PROJECT_ROOT/.specweave/logs/hooks.log"
|
|
52
|
+
mkdir -p "$(dirname "$LOG_FILE")" 2>/dev/null
|
|
53
|
+
echo "[$(date '+%Y-%m-%d %H:%M:%S')] living-specs-handler: $EVENT $INC_ID" >> "$LOG_FILE" 2>/dev/null
|
|
54
|
+
|
|
55
|
+
# Get increment paths
|
|
56
|
+
INC_DIR="$PROJECT_ROOT/.specweave/increments/$INC_ID"
|
|
57
|
+
ARCHIVE_DIR="$PROJECT_ROOT/.specweave/increments/_archive/$INC_ID"
|
|
58
|
+
SPEC_FILE="$INC_DIR/spec.md"
|
|
59
|
+
ARCHIVE_SPEC="$ARCHIVE_DIR/spec.md"
|
|
60
|
+
|
|
61
|
+
# Extract feature ID from spec.md (fast grep)
|
|
62
|
+
get_feature_id() {
|
|
63
|
+
local spec="$1"
|
|
64
|
+
[[ -f "$spec" ]] && grep -E "^(epic|feature_id):" "$spec" 2>/dev/null | head -1 | sed 's/.*:[[:space:]]*//' | tr -d '"'"'"
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
case "$EVENT" in
|
|
68
|
+
increment.created)
|
|
69
|
+
# Create spec entry in living docs via Node.js script
|
|
70
|
+
if [[ -n "$SYNC_SCRIPT" ]] && [[ -f "$SPEC_FILE" ]]; then
|
|
71
|
+
FEATURE_ID=$(get_feature_id "$SPEC_FILE")
|
|
72
|
+
cd "$PROJECT_ROOT" || exit 0
|
|
73
|
+
if [[ -n "$FEATURE_ID" ]]; then
|
|
74
|
+
FEATURE_ID="$FEATURE_ID" timeout 30 node "$SYNC_SCRIPT" "$INC_ID" >/dev/null 2>&1 &
|
|
75
|
+
else
|
|
76
|
+
timeout 30 node "$SYNC_SCRIPT" "$INC_ID" >/dev/null 2>&1 &
|
|
77
|
+
fi
|
|
78
|
+
fi
|
|
79
|
+
;;
|
|
80
|
+
|
|
81
|
+
increment.done)
|
|
82
|
+
# Update status to complete in living docs
|
|
83
|
+
if [[ -n "$SYNC_SCRIPT" ]] && [[ -f "$SPEC_FILE" ]]; then
|
|
84
|
+
FEATURE_ID=$(get_feature_id "$SPEC_FILE")
|
|
85
|
+
cd "$PROJECT_ROOT" || exit 0
|
|
86
|
+
|
|
87
|
+
# Mark as complete in FEATURE.md if it exists
|
|
88
|
+
if [[ -n "$FEATURE_ID" ]]; then
|
|
89
|
+
SPECS_DIR="$PROJECT_ROOT/.specweave/docs/internal/specs"
|
|
90
|
+
# Find FEATURE.md for this feature
|
|
91
|
+
FEATURE_FILE=$(find "$SPECS_DIR" -path "*/$FEATURE_ID/FEATURE.md" 2>/dev/null | head -1)
|
|
92
|
+
|
|
93
|
+
if [[ -f "$FEATURE_FILE" ]]; then
|
|
94
|
+
# Update status in FEATURE.md (in-progress -> complete)
|
|
95
|
+
sed -i.bak 's/status: in-progress/status: complete/g' "$FEATURE_FILE" 2>/dev/null
|
|
96
|
+
rm -f "${FEATURE_FILE}.bak" 2>/dev/null
|
|
97
|
+
|
|
98
|
+
# Update increment row status
|
|
99
|
+
sed -i.bak "s/\[$INC_ID\].*in-progress/[$INC_ID](..\/..\/..\/..\/increments\/$INC_ID\/spec.md) | complete/g" "$FEATURE_FILE" 2>/dev/null
|
|
100
|
+
rm -f "${FEATURE_FILE}.bak" 2>/dev/null
|
|
101
|
+
fi
|
|
102
|
+
|
|
103
|
+
# Also run full sync for completeness
|
|
104
|
+
FEATURE_ID="$FEATURE_ID" timeout 30 node "$SYNC_SCRIPT" "$INC_ID" >/dev/null 2>&1 &
|
|
105
|
+
else
|
|
106
|
+
timeout 30 node "$SYNC_SCRIPT" "$INC_ID" >/dev/null 2>&1 &
|
|
107
|
+
fi
|
|
108
|
+
fi
|
|
109
|
+
;;
|
|
110
|
+
|
|
111
|
+
increment.archived)
|
|
112
|
+
# Move spec entry to _archive section in living docs
|
|
113
|
+
if [[ -f "$ARCHIVE_SPEC" ]]; then
|
|
114
|
+
FEATURE_ID=$(get_feature_id "$ARCHIVE_SPEC")
|
|
115
|
+
|
|
116
|
+
if [[ -n "$FEATURE_ID" ]]; then
|
|
117
|
+
SPECS_DIR="$PROJECT_ROOT/.specweave/docs/internal/specs"
|
|
118
|
+
ACTIVE_FEATURE=$(find "$SPECS_DIR" -path "*/$FEATURE_ID/FEATURE.md" -not -path "*/_archive/*" 2>/dev/null | head -1)
|
|
119
|
+
ARCHIVE_SPECS="$SPECS_DIR/specweave/_archive"
|
|
120
|
+
|
|
121
|
+
if [[ -f "$ACTIVE_FEATURE" ]]; then
|
|
122
|
+
# Check if all increments for this feature are archived
|
|
123
|
+
FEATURE_DIR=$(dirname "$ACTIVE_FEATURE")
|
|
124
|
+
ACTIVE_INC_COUNT=$(ls -1 "$FEATURE_DIR"/*.md 2>/dev/null | grep -v FEATURE.md | grep -v README.md | wc -l | tr -d ' ')
|
|
125
|
+
|
|
126
|
+
if [[ "$ACTIVE_INC_COUNT" -eq 0 ]]; then
|
|
127
|
+
# Move entire feature folder to archive
|
|
128
|
+
mkdir -p "$ARCHIVE_SPECS" 2>/dev/null
|
|
129
|
+
mv "$FEATURE_DIR" "$ARCHIVE_SPECS/" 2>/dev/null
|
|
130
|
+
else
|
|
131
|
+
# Just update status in FEATURE.md
|
|
132
|
+
sed -i.bak "s/\[$INC_ID\].*|.*$/[$INC_ID](..\/..\/..\/..\/increments\/_archive\/$INC_ID\/spec.md) | archived/g" "$ACTIVE_FEATURE" 2>/dev/null
|
|
133
|
+
rm -f "${ACTIVE_FEATURE}.bak" 2>/dev/null
|
|
134
|
+
fi
|
|
135
|
+
fi
|
|
136
|
+
fi
|
|
137
|
+
fi
|
|
138
|
+
;;
|
|
139
|
+
|
|
140
|
+
increment.reopened)
|
|
141
|
+
# Restore from archive section in living docs
|
|
142
|
+
if [[ -f "$SPEC_FILE" ]]; then
|
|
143
|
+
FEATURE_ID=$(get_feature_id "$SPEC_FILE")
|
|
144
|
+
|
|
145
|
+
if [[ -n "$FEATURE_ID" ]]; then
|
|
146
|
+
SPECS_DIR="$PROJECT_ROOT/.specweave/docs/internal/specs"
|
|
147
|
+
ARCHIVE_FEATURE="$SPECS_DIR/specweave/_archive/$FEATURE_ID"
|
|
148
|
+
ACTIVE_SPECS="$SPECS_DIR/specweave"
|
|
149
|
+
|
|
150
|
+
# Check if feature was archived, restore it
|
|
151
|
+
if [[ -d "$ARCHIVE_FEATURE" ]]; then
|
|
152
|
+
mv "$ARCHIVE_FEATURE" "$ACTIVE_SPECS/" 2>/dev/null
|
|
153
|
+
|
|
154
|
+
# Update status back to in-progress
|
|
155
|
+
FEATURE_FILE="$ACTIVE_SPECS/$FEATURE_ID/FEATURE.md"
|
|
156
|
+
if [[ -f "$FEATURE_FILE" ]]; then
|
|
157
|
+
sed -i.bak 's/status: archived/status: in-progress/g' "$FEATURE_FILE" 2>/dev/null
|
|
158
|
+
sed -i.bak 's/status: complete/status: in-progress/g' "$FEATURE_FILE" 2>/dev/null
|
|
159
|
+
rm -f "${FEATURE_FILE}.bak" 2>/dev/null
|
|
160
|
+
fi
|
|
161
|
+
else
|
|
162
|
+
# Feature still exists, just update increment status
|
|
163
|
+
FEATURE_FILE=$(find "$SPECS_DIR" -path "*/$FEATURE_ID/FEATURE.md" -not -path "*/_archive/*" 2>/dev/null | head -1)
|
|
164
|
+
if [[ -f "$FEATURE_FILE" ]]; then
|
|
165
|
+
sed -i.bak "s/\[$INC_ID\].*archived/[$INC_ID](..\/..\/..\/..\/increments\/$INC_ID\/spec.md) | in-progress/g" "$FEATURE_FILE" 2>/dev/null
|
|
166
|
+
sed -i.bak "s/\[$INC_ID\].*complete/[$INC_ID](..\/..\/..\/..\/increments\/$INC_ID\/spec.md) | in-progress/g" "$FEATURE_FILE" 2>/dev/null
|
|
167
|
+
rm -f "${FEATURE_FILE}.bak" 2>/dev/null
|
|
168
|
+
fi
|
|
169
|
+
fi
|
|
170
|
+
|
|
171
|
+
# Run full sync to restore any missing content
|
|
172
|
+
cd "$PROJECT_ROOT" || exit 0
|
|
173
|
+
FEATURE_ID="$FEATURE_ID" timeout 30 node "$SYNC_SCRIPT" "$INC_ID" >/dev/null 2>&1 &
|
|
174
|
+
fi
|
|
175
|
+
fi
|
|
176
|
+
;;
|
|
177
|
+
esac
|
|
178
|
+
|
|
179
|
+
exit 0
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# status-line-handler.sh - Event-driven status line updates
|
|
3
|
+
# Events: user-story.completed, user-story.reopened, increment.done, increment.archived, increment.reopened
|
|
4
|
+
#
|
|
5
|
+
# This handler updates the status line ONLY when meaningful events occur:
|
|
6
|
+
# - User story completed (all ACs + tasks done for that US)
|
|
7
|
+
# - User story reopened (US tasks/ACs unchecked)
|
|
8
|
+
# - Increment lifecycle changes (done, archived, reopened)
|
|
9
|
+
#
|
|
10
|
+
# It does NOT update on every task.md edit - that would cause:
|
|
11
|
+
# - Race conditions (multiple rapid updates)
|
|
12
|
+
# - Performance issues (too frequent writes)
|
|
13
|
+
# - Status line flickering
|
|
14
|
+
#
|
|
15
|
+
# IMPORTANT: This script must be fast (<20ms) and never crash Claude
|
|
16
|
+
set +e
|
|
17
|
+
|
|
18
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
19
|
+
|
|
20
|
+
EVENT="${1:-}"
|
|
21
|
+
EVENT_DATA="${2:-}"
|
|
22
|
+
|
|
23
|
+
[[ -z "$EVENT" ]] && exit 0
|
|
24
|
+
[[ -z "$EVENT_DATA" ]] && exit 0
|
|
25
|
+
|
|
26
|
+
# Find project root
|
|
27
|
+
PROJECT_ROOT="$PWD"
|
|
28
|
+
while [[ "$PROJECT_ROOT" != "/" ]] && [[ ! -d "$PROJECT_ROOT/.specweave" ]]; do
|
|
29
|
+
PROJECT_ROOT=$(dirname "$PROJECT_ROOT")
|
|
30
|
+
done
|
|
31
|
+
[[ ! -d "$PROJECT_ROOT/.specweave" ]] && exit 0
|
|
32
|
+
|
|
33
|
+
STATE_DIR="$PROJECT_ROOT/.specweave/state"
|
|
34
|
+
CACHE_FILE="$STATE_DIR/status-line.json"
|
|
35
|
+
mkdir -p "$STATE_DIR" 2>/dev/null
|
|
36
|
+
|
|
37
|
+
# Log event (silent, async)
|
|
38
|
+
LOG_FILE="$PROJECT_ROOT/.specweave/logs/hooks.log"
|
|
39
|
+
mkdir -p "$(dirname "$LOG_FILE")" 2>/dev/null
|
|
40
|
+
echo "[$(date '+%Y-%m-%d %H:%M:%S')] status-line-handler: $EVENT $EVENT_DATA" >> "$LOG_FILE" 2>/dev/null
|
|
41
|
+
|
|
42
|
+
# Parse event data
|
|
43
|
+
# Format: INC_ID:US_ID (for user-story events) or just INC_ID (for lifecycle events)
|
|
44
|
+
if [[ "$EVENT_DATA" == *":"* ]]; then
|
|
45
|
+
INC_ID="${EVENT_DATA%%:*}"
|
|
46
|
+
US_ID="${EVENT_DATA##*:}"
|
|
47
|
+
else
|
|
48
|
+
INC_ID="$EVENT_DATA"
|
|
49
|
+
US_ID=""
|
|
50
|
+
fi
|
|
51
|
+
|
|
52
|
+
# Handle events
|
|
53
|
+
case "$EVENT" in
|
|
54
|
+
user-story.completed)
|
|
55
|
+
# User story completed - update status line with US progress
|
|
56
|
+
# This is the RIGHT time to update (not on every checkbox click)
|
|
57
|
+
;;
|
|
58
|
+
|
|
59
|
+
user-story.reopened)
|
|
60
|
+
# User story reopened - update status line
|
|
61
|
+
;;
|
|
62
|
+
|
|
63
|
+
increment.done)
|
|
64
|
+
# Increment completed - update status line to show completion
|
|
65
|
+
;;
|
|
66
|
+
|
|
67
|
+
increment.archived)
|
|
68
|
+
# Increment archived - clear from status line, find next active
|
|
69
|
+
INC_ID="" # Force finding a new active increment
|
|
70
|
+
;;
|
|
71
|
+
|
|
72
|
+
increment.reopened)
|
|
73
|
+
# Increment reopened - show it as active again
|
|
74
|
+
;;
|
|
75
|
+
|
|
76
|
+
*)
|
|
77
|
+
# Unknown event - ignore
|
|
78
|
+
exit 0
|
|
79
|
+
;;
|
|
80
|
+
esac
|
|
81
|
+
|
|
82
|
+
# Find active increment if not determined or was archived
|
|
83
|
+
if [[ -z "$INC_ID" ]] || [[ "$EVENT" == "increment.archived" ]]; then
|
|
84
|
+
INC_ID=""
|
|
85
|
+
for meta in "$PROJECT_ROOT/.specweave/increments"/[0-9]*/metadata.json; do
|
|
86
|
+
[[ -f "$meta" ]] || continue
|
|
87
|
+
STATUS=$(grep -o '"status"[[:space:]]*:[[:space:]]*"[^"]*"' "$meta" | grep -o '"[^"]*"$' | tr -d '"')
|
|
88
|
+
[[ "$STATUS" == "active" || "$STATUS" == "planning" ]] && {
|
|
89
|
+
INC_ID=$(basename "$(dirname "$meta")")
|
|
90
|
+
break
|
|
91
|
+
}
|
|
92
|
+
done
|
|
93
|
+
fi
|
|
94
|
+
|
|
95
|
+
# If no active increment, write null status
|
|
96
|
+
if [[ -z "$INC_ID" ]]; then
|
|
97
|
+
cat > "$CACHE_FILE" << 'EOF'
|
|
98
|
+
{"current":null,"event":"increment.archived","ts":"__TS__"}
|
|
99
|
+
EOF
|
|
100
|
+
sed -i.bak "s/__TS__/$(date +%s)/" "$CACHE_FILE" 2>/dev/null
|
|
101
|
+
rm -f "${CACHE_FILE}.bak" 2>/dev/null
|
|
102
|
+
exit 0
|
|
103
|
+
fi
|
|
104
|
+
|
|
105
|
+
# Get increment paths
|
|
106
|
+
TASKS_FILE="$PROJECT_ROOT/.specweave/increments/$INC_ID/tasks.md"
|
|
107
|
+
SPEC_FILE="$PROJECT_ROOT/.specweave/increments/$INC_ID/spec.md"
|
|
108
|
+
META_FILE="$PROJECT_ROOT/.specweave/increments/$INC_ID/metadata.json"
|
|
109
|
+
|
|
110
|
+
# Check files exist
|
|
111
|
+
[[ ! -f "$TASKS_FILE" ]] && exit 0
|
|
112
|
+
|
|
113
|
+
# Count tasks (pure bash, fast)
|
|
114
|
+
TOTAL=$(grep -c "^###\? T-" "$TASKS_FILE" 2>/dev/null || echo 0)
|
|
115
|
+
DONE=$(grep -c "\[x\]" "$TASKS_FILE" 2>/dev/null || echo 0)
|
|
116
|
+
PCT=0; [[ $TOTAL -gt 0 ]] && PCT=$((DONE * 100 / TOTAL))
|
|
117
|
+
|
|
118
|
+
# Count user stories completed
|
|
119
|
+
US_TOTAL=0
|
|
120
|
+
US_DONE=0
|
|
121
|
+
if [[ -f "$SPEC_FILE" ]]; then
|
|
122
|
+
# Count user story headers (## US-XXX or similar)
|
|
123
|
+
US_TOTAL=$(grep -c "^##.*US-" "$SPEC_FILE" 2>/dev/null || echo 0)
|
|
124
|
+
|
|
125
|
+
# Count by checking if all ACs for each US are checked
|
|
126
|
+
# This is a simplified count - the actual detection is done by us-completion-detector.sh
|
|
127
|
+
US_STATE_FILE="$STATE_DIR/.us-completion-$INC_ID"
|
|
128
|
+
if [[ -f "$US_STATE_FILE" ]]; then
|
|
129
|
+
US_DONE=$(grep -c "=yes$" "$US_STATE_FILE" 2>/dev/null || echo 0)
|
|
130
|
+
fi
|
|
131
|
+
fi
|
|
132
|
+
|
|
133
|
+
# Get increment status
|
|
134
|
+
INC_STATUS="active"
|
|
135
|
+
if [[ -f "$META_FILE" ]]; then
|
|
136
|
+
INC_STATUS=$(grep -o '"status"[[:space:]]*:[[:space:]]*"[^"]*"' "$META_FILE" | grep -o '"[^"]*"$' | tr -d '"')
|
|
137
|
+
fi
|
|
138
|
+
|
|
139
|
+
# Build detailed status JSON with event info
|
|
140
|
+
TMP_FILE="$CACHE_FILE.tmp.$$"
|
|
141
|
+
cat > "$TMP_FILE" << EOF
|
|
142
|
+
{
|
|
143
|
+
"current": {
|
|
144
|
+
"id": "$INC_ID",
|
|
145
|
+
"completed": $DONE,
|
|
146
|
+
"total": $TOTAL,
|
|
147
|
+
"percentage": $PCT,
|
|
148
|
+
"status": "$INC_STATUS",
|
|
149
|
+
"userStories": {
|
|
150
|
+
"completed": $US_DONE,
|
|
151
|
+
"total": $US_TOTAL
|
|
152
|
+
}
|
|
153
|
+
},
|
|
154
|
+
"lastEvent": {
|
|
155
|
+
"type": "$EVENT",
|
|
156
|
+
"data": "$EVENT_DATA"
|
|
157
|
+
},
|
|
158
|
+
"ts": "$(date +%s)"
|
|
159
|
+
}
|
|
160
|
+
EOF
|
|
161
|
+
|
|
162
|
+
# Atomic write (mv is atomic on same filesystem)
|
|
163
|
+
mv "$TMP_FILE" "$CACHE_FILE" 2>/dev/null
|
|
164
|
+
|
|
165
|
+
exit 0
|
|
@@ -1,8 +1,15 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
# status-update.sh - Fast status line update (synchronous)
|
|
3
3
|
# Goal: <20ms execution, pure bash, no external processes
|
|
4
|
+
#
|
|
5
|
+
# NOTE: This is the LEGACY handler. New EDA architecture uses
|
|
6
|
+
# status-line-handler.sh which is EVENT-DRIVEN.
|
|
7
|
+
#
|
|
8
|
+
# IMPORTANT: Never crash Claude, always exit 0
|
|
4
9
|
set +e
|
|
5
10
|
|
|
11
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
12
|
+
|
|
6
13
|
INC_ID="${1:-}"
|
|
7
14
|
|
|
8
15
|
# Find project root
|
|
@@ -2,8 +2,12 @@
|
|
|
2
2
|
# dequeue.sh - Get and remove next event from queue
|
|
3
3
|
# Usage: dequeue.sh [--peek]
|
|
4
4
|
# Returns JSON event or empty if queue is empty
|
|
5
|
+
#
|
|
6
|
+
# IMPORTANT: Never crash Claude, always exit 0
|
|
5
7
|
set +e
|
|
6
8
|
|
|
9
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
10
|
+
|
|
7
11
|
PEEK=false
|
|
8
12
|
[[ "$1" == "--peek" ]] && PEEK=true
|
|
9
13
|
|
|
@@ -1,9 +1,15 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
|
-
# enqueue.sh - Add event to queue with deduplication
|
|
2
|
+
# enqueue.sh - Add event to queue with deduplication and coalescing
|
|
3
3
|
# Usage: enqueue.sh <event_type> <event_data>
|
|
4
|
-
#
|
|
4
|
+
#
|
|
5
|
+
# Events are coalesced (deduplicated) by type+data hash within 10 second window.
|
|
6
|
+
# Events have priorities: lifecycle=1 (highest), user-story=2, other=3 (lowest)
|
|
7
|
+
#
|
|
8
|
+
# IMPORTANT: This script must be fast (<5ms) and never crash
|
|
5
9
|
set +e
|
|
6
10
|
|
|
11
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
12
|
+
|
|
7
13
|
EVENT_TYPE="${1:-unknown}"
|
|
8
14
|
EVENT_DATA="${2:-}"
|
|
9
15
|
|
|
@@ -17,25 +23,57 @@ done
|
|
|
17
23
|
QUEUE_DIR="$PROJECT_ROOT/.specweave/state/event-queue"
|
|
18
24
|
mkdir -p "$QUEUE_DIR" 2>/dev/null || exit 0
|
|
19
25
|
|
|
20
|
-
#
|
|
21
|
-
|
|
26
|
+
# Assign event priority
|
|
27
|
+
# Priority 1: Lifecycle events (most important)
|
|
28
|
+
# Priority 2: User story events
|
|
29
|
+
# Priority 3: Other events
|
|
30
|
+
PRIORITY=3
|
|
31
|
+
case "$EVENT_TYPE" in
|
|
32
|
+
increment.created|increment.done|increment.archived|increment.reopened)
|
|
33
|
+
PRIORITY=1
|
|
34
|
+
;;
|
|
35
|
+
user-story.completed|user-story.reopened)
|
|
36
|
+
PRIORITY=2
|
|
37
|
+
;;
|
|
38
|
+
esac
|
|
39
|
+
|
|
40
|
+
# Coalescing: hash event type + data for deduplication
|
|
41
|
+
# Cross-platform md5 (works on macOS and Linux)
|
|
42
|
+
if command -v md5 >/dev/null 2>&1; then
|
|
43
|
+
HASH=$(echo "${EVENT_TYPE}:${EVENT_DATA}" | md5 | cut -c1-8)
|
|
44
|
+
elif command -v md5sum >/dev/null 2>&1; then
|
|
45
|
+
HASH=$(echo "${EVENT_TYPE}:${EVENT_DATA}" | md5sum | cut -c1-8)
|
|
46
|
+
else
|
|
47
|
+
# Fallback: simple hash from type and data
|
|
48
|
+
HASH=$(printf "%s" "${EVENT_TYPE}:${EVENT_DATA}" | cksum | cut -d' ' -f1)
|
|
49
|
+
fi
|
|
50
|
+
|
|
22
51
|
DEDUP_FILE="$QUEUE_DIR/.dedup-$HASH"
|
|
23
|
-
DEDUP_TTL=
|
|
52
|
+
DEDUP_TTL=10 # Increased from 5s to 10s for better coalescing
|
|
24
53
|
|
|
25
|
-
#
|
|
54
|
+
# Coalescing check: skip if same event within TTL
|
|
26
55
|
if [[ -f "$DEDUP_FILE" ]]; then
|
|
27
|
-
|
|
56
|
+
if [[ "$(uname)" == "Darwin" ]]; then
|
|
57
|
+
AGE=$(($(date +%s) - $(stat -f %m "$DEDUP_FILE" 2>/dev/null || echo 0)))
|
|
58
|
+
else
|
|
59
|
+
AGE=$(($(date +%s) - $(stat -c %Y "$DEDUP_FILE" 2>/dev/null || echo 0)))
|
|
60
|
+
fi
|
|
28
61
|
[[ $AGE -lt $DEDUP_TTL ]] && exit 0
|
|
29
62
|
fi
|
|
30
63
|
touch "$DEDUP_FILE"
|
|
31
64
|
|
|
32
65
|
# Enqueue event (atomic write)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
66
|
+
# Filename includes priority for priority-ordered processing
|
|
67
|
+
TIMESTAMP=$(date +%s%N 2>/dev/null || date +%s)
|
|
68
|
+
EVENT_FILE="$QUEUE_DIR/${PRIORITY}-${TIMESTAMP}-${EVENT_TYPE}.event"
|
|
69
|
+
|
|
70
|
+
# Create event file atomically
|
|
71
|
+
TMP_FILE="$EVENT_FILE.tmp.$$"
|
|
72
|
+
cat > "$TMP_FILE" << EOF
|
|
73
|
+
{"type":"$EVENT_TYPE","data":"$EVENT_DATA","priority":$PRIORITY,"ts":"$(date -u +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || date +%Y-%m-%dT%H:%M:%SZ)"}
|
|
37
74
|
EOF
|
|
75
|
+
mv "$TMP_FILE" "$EVENT_FILE" 2>/dev/null
|
|
38
76
|
|
|
39
|
-
# Cleanup old dedup files (>30s)
|
|
77
|
+
# Cleanup old dedup files (>30s) - non-blocking
|
|
40
78
|
find "$QUEUE_DIR" -name ".dedup-*" -mmin +1 -delete 2>/dev/null &
|
|
41
79
|
exit 0
|
|
@@ -1,10 +1,21 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
|
-
# processor.sh - Background event processor
|
|
3
|
-
# Processes queued events asynchronously, routes to handlers
|
|
2
|
+
# processor.sh - Background event processor with EDA routing
|
|
3
|
+
# Processes queued events asynchronously, routes to specialized handlers
|
|
4
|
+
#
|
|
4
5
|
# Usage: processor.sh [--daemon]
|
|
5
|
-
#
|
|
6
|
+
#
|
|
7
|
+
# Event routing:
|
|
8
|
+
# - increment.created/done/archived/reopened -> living-specs-handler
|
|
9
|
+
# - user-story.completed/reopened -> status-line-handler
|
|
10
|
+
# - task.updated/spec.updated -> living-docs-handler (legacy)
|
|
11
|
+
#
|
|
12
|
+
# Self-terminates after 60s of idle
|
|
13
|
+
#
|
|
14
|
+
# IMPORTANT: This script uses flock for safe concurrent access
|
|
6
15
|
set +e
|
|
7
16
|
|
|
17
|
+
[[ "${SPECWEAVE_DISABLE_HOOKS:-0}" == "1" ]] && exit 0
|
|
18
|
+
|
|
8
19
|
DAEMON_MODE=false
|
|
9
20
|
[[ "$1" == "--daemon" ]] && DAEMON_MODE=true
|
|
10
21
|
|
|
@@ -19,23 +30,44 @@ QUEUE_DIR="$PROJECT_ROOT/.specweave/state/event-queue"
|
|
|
19
30
|
HANDLER_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../handlers" && pwd)"
|
|
20
31
|
LOG_FILE="$PROJECT_ROOT/.specweave/logs/processor.log"
|
|
21
32
|
PID_FILE="$PROJECT_ROOT/.specweave/state/.processor.pid"
|
|
22
|
-
|
|
33
|
+
LOCK_FILE="$PROJECT_ROOT/.specweave/state/.processor.lock"
|
|
34
|
+
IDLE_TIMEOUT=60 # Increased from 30s to 60s
|
|
23
35
|
IDLE_COUNT=0
|
|
36
|
+
HANDLER_TIMEOUT=30 # Max time per handler call
|
|
24
37
|
|
|
25
38
|
mkdir -p "$(dirname "$LOG_FILE")" 2>/dev/null
|
|
39
|
+
mkdir -p "$QUEUE_DIR" 2>/dev/null
|
|
40
|
+
|
|
41
|
+
# Acquire exclusive lock using flock
|
|
42
|
+
exec 200>"$LOCK_FILE"
|
|
43
|
+
if ! flock -n 200 2>/dev/null; then
|
|
44
|
+
# Another processor is running - exit silently
|
|
45
|
+
exit 0
|
|
46
|
+
fi
|
|
26
47
|
|
|
27
|
-
#
|
|
48
|
+
# Double-check PID file for extra safety
|
|
28
49
|
if [[ -f "$PID_FILE" ]]; then
|
|
29
50
|
OLD_PID=$(cat "$PID_FILE" 2>/dev/null)
|
|
30
|
-
if kill -0 "$OLD_PID" 2>/dev/null; then
|
|
51
|
+
if [[ -n "$OLD_PID" ]] && kill -0 "$OLD_PID" 2>/dev/null; then
|
|
31
52
|
exit 0 # Already running
|
|
32
53
|
fi
|
|
33
54
|
fi
|
|
34
55
|
echo $$ > "$PID_FILE"
|
|
35
|
-
trap 'rm -f "$PID_FILE"' EXIT
|
|
56
|
+
trap 'rm -f "$PID_FILE"; flock -u 200 2>/dev/null' EXIT
|
|
36
57
|
|
|
37
58
|
log() { echo "[$(date +%H:%M:%S)] $1" >> "$LOG_FILE" 2>/dev/null; }
|
|
38
|
-
log "Processor started (PID:
|
|
59
|
+
log "Processor started (PID: $$, IDLE_TIMEOUT: ${IDLE_TIMEOUT}s)"
|
|
60
|
+
|
|
61
|
+
# Run handler with timeout (prevents stuck handlers from blocking queue)
|
|
62
|
+
run_handler() {
|
|
63
|
+
local handler="$1"
|
|
64
|
+
local event_type="$2"
|
|
65
|
+
local event_data="$3"
|
|
66
|
+
|
|
67
|
+
if [[ -x "$handler" ]]; then
|
|
68
|
+
timeout "$HANDLER_TIMEOUT" bash "$handler" "$event_type" "$event_data" 2>/dev/null || true
|
|
69
|
+
fi
|
|
70
|
+
}
|
|
39
71
|
|
|
40
72
|
process_event() {
|
|
41
73
|
local EVENT_JSON="$1"
|
|
@@ -45,12 +77,38 @@ process_event() {
|
|
|
45
77
|
log "Processing: $EVENT_TYPE ($EVENT_DATA)"
|
|
46
78
|
|
|
47
79
|
case "$EVENT_TYPE" in
|
|
80
|
+
# ========================================
|
|
81
|
+
# EDA Event Routing (new architecture)
|
|
82
|
+
# ========================================
|
|
83
|
+
|
|
84
|
+
# Lifecycle events -> living-specs-handler
|
|
85
|
+
increment.created|increment.done|increment.archived|increment.reopened)
|
|
86
|
+
run_handler "$HANDLER_DIR/living-specs-handler.sh" "$EVENT_TYPE" "$EVENT_DATA"
|
|
87
|
+
# Also update status line on lifecycle changes
|
|
88
|
+
run_handler "$HANDLER_DIR/status-line-handler.sh" "$EVENT_TYPE" "$EVENT_DATA"
|
|
89
|
+
;;
|
|
90
|
+
|
|
91
|
+
# User story events -> status-line-handler
|
|
92
|
+
user-story.completed|user-story.reopened)
|
|
93
|
+
run_handler "$HANDLER_DIR/status-line-handler.sh" "$EVENT_TYPE" "$EVENT_DATA"
|
|
94
|
+
;;
|
|
95
|
+
|
|
96
|
+
# ========================================
|
|
97
|
+
# Legacy event routing (backward compat)
|
|
98
|
+
# ========================================
|
|
48
99
|
task.updated|spec.updated)
|
|
49
|
-
|
|
50
|
-
|
|
100
|
+
# Legacy: don't update status line on every task edit
|
|
101
|
+
# That causes race conditions and flickering
|
|
102
|
+
run_handler "$HANDLER_DIR/living-docs-handler.sh" "" "$EVENT_DATA"
|
|
103
|
+
run_handler "$HANDLER_DIR/ac-validation-handler.sh" "" "$EVENT_DATA"
|
|
51
104
|
;;
|
|
105
|
+
|
|
52
106
|
metadata.changed)
|
|
53
|
-
|
|
107
|
+
run_handler "$HANDLER_DIR/github-sync-handler.sh" "" "$EVENT_DATA"
|
|
108
|
+
;;
|
|
109
|
+
|
|
110
|
+
*)
|
|
111
|
+
log "Unknown event type: $EVENT_TYPE"
|
|
54
112
|
;;
|
|
55
113
|
esac
|
|
56
114
|
}
|
|
@@ -64,9 +122,13 @@ while true; do
|
|
|
64
122
|
process_event "$EVENT"
|
|
65
123
|
else
|
|
66
124
|
IDLE_COUNT=$((IDLE_COUNT + 1))
|
|
67
|
-
[[ $IDLE_COUNT -ge $IDLE_TIMEOUT ]]
|
|
125
|
+
if [[ $IDLE_COUNT -ge $IDLE_TIMEOUT ]]; then
|
|
126
|
+
log "Idle timeout (${IDLE_TIMEOUT}s), exiting"
|
|
127
|
+
exit 0
|
|
128
|
+
fi
|
|
68
129
|
sleep 1
|
|
69
130
|
fi
|
|
70
131
|
|
|
132
|
+
# In non-daemon mode, exit after 3s of idle (quick processing)
|
|
71
133
|
[[ "$DAEMON_MODE" == "false" ]] && [[ $IDLE_COUNT -ge 3 ]] && exit 0
|
|
72
134
|
done
|