@joshski/dust 0.1.65 → 0.1.68
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -0
- package/dist/artifacts.js +69 -23
- package/dist/audits/index.d.ts +5 -0
- package/dist/audits/stock-audits.d.ts +2 -1
- package/dist/audits.js +278 -1
- package/dist/bucket/events.d.ts +50 -0
- package/dist/bucket/log-buffer.d.ts +40 -0
- package/dist/bucket/paths.d.ts +26 -0
- package/dist/bucket/repository-git.d.ts +22 -0
- package/dist/bucket/repository-loop.d.ts +45 -0
- package/dist/bucket/repository.d.ts +70 -0
- package/dist/claude/event-parser.d.ts +2 -0
- package/dist/claude/run.d.ts +15 -0
- package/dist/claude/spawn-claude-code.d.ts +9 -0
- package/dist/claude/streamer.d.ts +17 -0
- package/dist/claude/tool-formatters.d.ts +13 -0
- package/dist/claude/types.d.ts +61 -0
- package/dist/cli/colors.d.ts +25 -0
- package/dist/cli/commands/agent-shared.d.ts +44 -0
- package/dist/cli/commands/focus.d.ts +11 -0
- package/dist/cli/commands/loop.d.ts +92 -0
- package/dist/cli/commands/next.d.ts +25 -0
- package/dist/cli/types.d.ts +1 -1
- package/dist/config/settings.d.ts +44 -0
- package/dist/dust.js +546 -66
- package/dist/git/hooks.d.ts +17 -0
- package/dist/session.d.ts +7 -0
- package/dist/types.d.ts +1 -0
- package/dist/version.d.ts +1 -0
- package/package.json +5 -4
package/dist/dust.js
CHANGED
|
@@ -275,7 +275,25 @@ async function loadSettings(cwd, fileSystem) {
|
|
|
275
275
|
}
|
|
276
276
|
|
|
277
277
|
// lib/version.ts
|
|
278
|
-
var DUST_VERSION = "0.1.
|
|
278
|
+
var DUST_VERSION = "0.1.68";
|
|
279
|
+
|
|
280
|
+
// lib/session.ts
|
|
281
|
+
var DUST_UNATTENDED = "DUST_UNATTENDED";
|
|
282
|
+
var DUST_SKIP_AGENT = "DUST_SKIP_AGENT";
|
|
283
|
+
var DUST_REPOSITORY_ID = "DUST_REPOSITORY_ID";
|
|
284
|
+
function isUnattended(env = process.env) {
|
|
285
|
+
return !!env[DUST_UNATTENDED];
|
|
286
|
+
}
|
|
287
|
+
function buildUnattendedEnv(options) {
|
|
288
|
+
const env = {
|
|
289
|
+
[DUST_UNATTENDED]: "1",
|
|
290
|
+
[DUST_SKIP_AGENT]: "1"
|
|
291
|
+
};
|
|
292
|
+
if (options?.repositoryId) {
|
|
293
|
+
env[DUST_REPOSITORY_ID] = options.repositoryId;
|
|
294
|
+
}
|
|
295
|
+
return env;
|
|
296
|
+
}
|
|
279
297
|
|
|
280
298
|
// lib/cli/dedent.ts
|
|
281
299
|
function dedent(strings, ...values) {
|
|
@@ -524,7 +542,7 @@ ${vars.agentInstructions}` : "";
|
|
|
524
542
|
}
|
|
525
543
|
async function agent(dependencies, env = process.env) {
|
|
526
544
|
const { context, fileSystem, settings } = dependencies;
|
|
527
|
-
if (env
|
|
545
|
+
if (env[DUST_SKIP_AGENT] === "1") {
|
|
528
546
|
context.stdout("You're running in an automated loop - proceeding to implement the assigned task.");
|
|
529
547
|
return { exitCode: 0 };
|
|
530
548
|
}
|
|
@@ -591,6 +609,102 @@ function extractOpeningSentence(content) {
|
|
|
591
609
|
|
|
592
610
|
// lib/audits/stock-audits.ts
|
|
593
611
|
var ideasHint = "Review existing ideas in `./.dust/ideas/` to understand what has been proposed or considered historically, then create new idea files in `./.dust/ideas/` for any issues you identify, avoiding duplication.";
|
|
612
|
+
function dataAccessReview() {
|
|
613
|
+
return dedent`
|
|
614
|
+
# Data Access Review
|
|
615
|
+
|
|
616
|
+
Review data access patterns for performance issues and optimization opportunities.
|
|
617
|
+
|
|
618
|
+
${ideasHint}
|
|
619
|
+
|
|
620
|
+
## Scope
|
|
621
|
+
|
|
622
|
+
Focus on these areas:
|
|
623
|
+
|
|
624
|
+
1. **N+1 query patterns** - Loops that make individual data requests instead of batching
|
|
625
|
+
2. **Missing indexes** - Database schema files lacking indexes on frequently queried columns
|
|
626
|
+
3. **Inefficient data loading** - Over-fetching (loading more data than needed) or under-fetching (requiring multiple round trips)
|
|
627
|
+
4. **Caching opportunities** - Repeated lookups that could benefit from memoization or caching
|
|
628
|
+
5. **Batch processing** - Sequential operations that could be parallelized or batched
|
|
629
|
+
6. **Connection management** - Connection pooling configuration and resource cleanup
|
|
630
|
+
|
|
631
|
+
## Analysis Steps
|
|
632
|
+
|
|
633
|
+
1. Search for loops containing data access calls (API requests, database queries, file reads)
|
|
634
|
+
2. Review database schema or migration files for index definitions
|
|
635
|
+
3. Identify functions that make multiple related data requests
|
|
636
|
+
4. Look for repeated identical lookups within the same request lifecycle
|
|
637
|
+
5. Check for proper resource cleanup (connection closing, stream ending)
|
|
638
|
+
|
|
639
|
+
## Applicability
|
|
640
|
+
|
|
641
|
+
This audit applies to codebases that interact with:
|
|
642
|
+
- Databases (SQL, NoSQL, ORM queries)
|
|
643
|
+
- External APIs (REST, GraphQL, gRPC)
|
|
644
|
+
- File systems (reading/writing files)
|
|
645
|
+
- Caches (Redis, Memcached, in-memory)
|
|
646
|
+
|
|
647
|
+
If none of these apply, document that finding and skip the detailed analysis.
|
|
648
|
+
|
|
649
|
+
## Principles
|
|
650
|
+
|
|
651
|
+
- [Decoupled Code](../principles/decoupled-code.md) - Data access should be isolated for testability
|
|
652
|
+
- [Fast Feedback](../principles/fast-feedback.md) - Efficient data access enables faster feedback loops
|
|
653
|
+
- [Maintainable Codebase](../principles/maintainable-codebase.md) - Good data patterns improve maintainability
|
|
654
|
+
|
|
655
|
+
## Blocked By
|
|
656
|
+
|
|
657
|
+
(none)
|
|
658
|
+
|
|
659
|
+
## Definition of Done
|
|
660
|
+
|
|
661
|
+
- [ ] Searched for N+1 query patterns (loops with data access)
|
|
662
|
+
- [ ] Reviewed database schemas for missing indexes (if applicable)
|
|
663
|
+
- [ ] Identified over-fetching or under-fetching patterns
|
|
664
|
+
- [ ] Found repeated lookups that could be cached
|
|
665
|
+
- [ ] Checked for sequential operations that could be batched
|
|
666
|
+
- [ ] Verified connection/resource cleanup is handled properly
|
|
667
|
+
- [ ] Proposed ideas for any data access improvements identified
|
|
668
|
+
`;
|
|
669
|
+
}
|
|
670
|
+
function coverageExclusions() {
|
|
671
|
+
return dedent`
|
|
672
|
+
# Coverage Exclusions
|
|
673
|
+
|
|
674
|
+
Review coverage exclusion configuration to identify opportunities for removal through refactoring.
|
|
675
|
+
|
|
676
|
+
${ideasHint}
|
|
677
|
+
|
|
678
|
+
## Scope
|
|
679
|
+
|
|
680
|
+
Focus on these areas:
|
|
681
|
+
|
|
682
|
+
1. **Current exclusions** - Review all exclusions in \`vitest.config.ts\` or equivalent
|
|
683
|
+
2. **Justification** - Is each exclusion still necessary?
|
|
684
|
+
3. **Tooling limitations** - Can workarounds be found for coverage tool issues?
|
|
685
|
+
4. **Decoupling opportunities** - Can excluded code be restructured to enable testing?
|
|
686
|
+
5. **Entry point patterns** - Can hard-to-test entry points be decoupled from logic?
|
|
687
|
+
|
|
688
|
+
## Principles
|
|
689
|
+
|
|
690
|
+
- [Decoupled Code](../principles/decoupled-code.md)
|
|
691
|
+
- [Unit Test Coverage](../principles/unit-test-coverage.md)
|
|
692
|
+
- [Comprehensive Test Coverage](../principles/comprehensive-test-coverage.md)
|
|
693
|
+
- [Make Changes with Confidence](../principles/make-changes-with-confidence.md)
|
|
694
|
+
|
|
695
|
+
## Blocked By
|
|
696
|
+
|
|
697
|
+
(none)
|
|
698
|
+
|
|
699
|
+
## Definition of Done
|
|
700
|
+
|
|
701
|
+
- [ ] Identified all coverage exclusions in the project
|
|
702
|
+
- [ ] Documented the reason each exclusion exists
|
|
703
|
+
- [ ] Evaluated whether each exclusion is still necessary
|
|
704
|
+
- [ ] Identified exclusions that could be removed through decoupling
|
|
705
|
+
- [ ] Proposed ideas for refactoring where feasible
|
|
706
|
+
`;
|
|
707
|
+
}
|
|
594
708
|
function componentReuse() {
|
|
595
709
|
return dedent`
|
|
596
710
|
# Component Reuse
|
|
@@ -801,6 +915,56 @@ function ideasFromPrinciples() {
|
|
|
801
915
|
- [ ] Proposed new ideas for unmet or underserved principles
|
|
802
916
|
`;
|
|
803
917
|
}
|
|
918
|
+
function refactoringOpportunities() {
|
|
919
|
+
return dedent`
|
|
920
|
+
# Refactoring Opportunities
|
|
921
|
+
|
|
922
|
+
Analyze recent commits to identify code needing structural improvements.
|
|
923
|
+
|
|
924
|
+
${ideasHint}
|
|
925
|
+
|
|
926
|
+
## Scope
|
|
927
|
+
|
|
928
|
+
Analyze commits since the last refactoring-opportunities audit (check \`.dust/done/\` for previous runs). Focus on these signals:
|
|
929
|
+
|
|
930
|
+
1. **File churn** - Files modified frequently across multiple commits may have unclear responsibilities or be accumulating technical debt
|
|
931
|
+
2. **Size growth** - Files that have grown significantly may benefit from decomposition
|
|
932
|
+
3. **Commit message patterns** - Look for messages containing "fix", "workaround", "temporary", "hack", or "TODO" that indicate shortcuts taken
|
|
933
|
+
|
|
934
|
+
## Analysis Steps
|
|
935
|
+
|
|
936
|
+
1. Run \`git log --since="<last-audit-date>" --name-only --pretty=format:"COMMIT:%s"\` to get commits with their messages and changed files
|
|
937
|
+
2. Count file modification frequency to identify high-churn files
|
|
938
|
+
3. Check current sizes of frequently-modified files with \`wc -l\`
|
|
939
|
+
4. Review commit messages for patterns suggesting technical debt
|
|
940
|
+
|
|
941
|
+
## Output
|
|
942
|
+
|
|
943
|
+
For each refactoring opportunity identified, provide:
|
|
944
|
+
- **File path** - The specific file needing attention
|
|
945
|
+
- **Signal** - What triggered this recommendation (churn, size, commit pattern)
|
|
946
|
+
- **Specific suggestion** - A concrete refactoring action (e.g., "Extract the validation logic into a separate module", not just "consider refactoring")
|
|
947
|
+
|
|
948
|
+
## Principles
|
|
949
|
+
|
|
950
|
+
- [Boy Scout Rule](../principles/boy-scout-rule.md) - Leave code better than found, but capture large cleanups as separate tasks
|
|
951
|
+
- [Make the Change Easy](../principles/make-the-change-easy.md) - Refactor until the change becomes straightforward
|
|
952
|
+
- [Make Changes with Confidence](../principles/make-changes-with-confidence.md) - Tests and checks enable safe refactoring
|
|
953
|
+
- [Reasonably DRY](../principles/reasonably-dry.md) - Extract only when duplication represents the same concept
|
|
954
|
+
|
|
955
|
+
## Blocked By
|
|
956
|
+
|
|
957
|
+
(none)
|
|
958
|
+
|
|
959
|
+
## Definition of Done
|
|
960
|
+
|
|
961
|
+
- [ ] Identified high-churn files (modified in 3+ commits since last audit)
|
|
962
|
+
- [ ] Flagged files exceeding 300 lines that grew significantly
|
|
963
|
+
- [ ] Noted commits with concerning message patterns
|
|
964
|
+
- [ ] Provided specific refactoring suggestions for each opportunity
|
|
965
|
+
- [ ] Created ideas for any substantial refactoring work identified
|
|
966
|
+
`;
|
|
967
|
+
}
|
|
804
968
|
function performanceReview() {
|
|
805
969
|
return dedent`
|
|
806
970
|
# Performance Review
|
|
@@ -941,17 +1105,129 @@ function testCoverage() {
|
|
|
941
1105
|
- [ ] Proposed ideas for any test coverage gaps identified
|
|
942
1106
|
`;
|
|
943
1107
|
}
|
|
1108
|
+
function errorHandling() {
|
|
1109
|
+
return dedent`
|
|
1110
|
+
# Error Handling
|
|
1111
|
+
|
|
1112
|
+
Review error handling patterns for consistency and agent-friendliness.
|
|
1113
|
+
|
|
1114
|
+
${ideasHint}
|
|
1115
|
+
|
|
1116
|
+
## Scope
|
|
1117
|
+
|
|
1118
|
+
Focus on these areas:
|
|
1119
|
+
|
|
1120
|
+
1. **Silently swallowed errors** - Empty catch blocks, \`.catch(() => {})\`, errors caught but not logged or re-thrown
|
|
1121
|
+
2. **Missing error context** - Errors converted to booleans or generic messages that lose details
|
|
1122
|
+
3. **Fire-and-forget promises** - Promises without \`.catch()\` or \`await\` that may fail silently
|
|
1123
|
+
4. **Non-actionable error messages** - Error messages that say what went wrong but not how to fix it
|
|
1124
|
+
5. **Inconsistent error recovery** - Similar error scenarios handled differently across the codebase
|
|
1125
|
+
|
|
1126
|
+
## Analysis Steps
|
|
1127
|
+
|
|
1128
|
+
1. Search for empty catch blocks: \`catch {}\`, \`catch () {}\`, \`.catch(() => {})\`
|
|
1129
|
+
2. Look for patterns that discard error details: \`catch { return false }\`, \`catch { return null }\`
|
|
1130
|
+
3. Find promises without error handling: unassigned or not-awaited promises
|
|
1131
|
+
4. Review error messages in \`throw\` statements and \`context.stderr()\` calls for actionability
|
|
1132
|
+
5. Compare error handling patterns across similar operations for consistency
|
|
1133
|
+
|
|
1134
|
+
## Output
|
|
1135
|
+
|
|
1136
|
+
For each error handling issue identified, provide:
|
|
1137
|
+
- **Location** - File path and line number
|
|
1138
|
+
- **Pattern** - Which category of issue (swallowed, missing context, fire-and-forget, etc.)
|
|
1139
|
+
- **Impact** - What failures could go unnoticed or be hard to debug
|
|
1140
|
+
- **Suggestion** - Specific fix (add logging, propagate error, add recovery guidance)
|
|
1141
|
+
|
|
1142
|
+
## Principles
|
|
1143
|
+
|
|
1144
|
+
- [Actionable Errors](../principles/actionable-errors.md) - Error messages should tell you what to do next
|
|
1145
|
+
- [Debugging Tooling](../principles/debugging-tooling.md) - Agents need readable, structured error output
|
|
1146
|
+
- [Stop the Line](../principles/stop-the-line.md) - Problems should be fixed at source, not hidden
|
|
1147
|
+
|
|
1148
|
+
## Blocked By
|
|
1149
|
+
|
|
1150
|
+
(none)
|
|
1151
|
+
|
|
1152
|
+
## Definition of Done
|
|
1153
|
+
|
|
1154
|
+
- [ ] Searched for empty catch blocks and silent error swallowing
|
|
1155
|
+
- [ ] Identified patterns that discard error details
|
|
1156
|
+
- [ ] Found fire-and-forget promises without error handling
|
|
1157
|
+
- [ ] Reviewed error messages for actionability
|
|
1158
|
+
- [ ] Compared error handling consistency across similar operations
|
|
1159
|
+
- [ ] Proposed ideas for any error handling improvements identified
|
|
1160
|
+
`;
|
|
1161
|
+
}
|
|
1162
|
+
function ubiquitousLanguage() {
|
|
1163
|
+
return dedent`
|
|
1164
|
+
# Ubiquitous Language
|
|
1165
|
+
|
|
1166
|
+
Verify terminology consistency across code, documentation, and user interface.
|
|
1167
|
+
|
|
1168
|
+
${ideasHint}
|
|
1169
|
+
|
|
1170
|
+
## Scope
|
|
1171
|
+
|
|
1172
|
+
Focus on these areas:
|
|
1173
|
+
|
|
1174
|
+
1. **Terminology drift** - Do recent changes introduce terms that deviate from established vocabulary?
|
|
1175
|
+
2. **Code-to-docs alignment** - Are variables, functions, and types named consistently with documentation?
|
|
1176
|
+
3. **User interface consistency** - Do UI labels and messages match the terms used in code and docs?
|
|
1177
|
+
4. **Glossary adherence** - If a glossary exists, is it being followed?
|
|
1178
|
+
5. **Acronym and abbreviation usage** - Are shortened forms used consistently?
|
|
1179
|
+
|
|
1180
|
+
## Analysis Steps
|
|
1181
|
+
|
|
1182
|
+
1. Identify key domain terms from documentation, README, or existing glossary
|
|
1183
|
+
2. Review recent commits for new terminology or naming choices
|
|
1184
|
+
3. Compare code identifiers against documented terminology
|
|
1185
|
+
4. Check user-facing strings for consistency with technical naming
|
|
1186
|
+
5. Flag deviations where the same concept uses different names
|
|
1187
|
+
|
|
1188
|
+
## Output
|
|
1189
|
+
|
|
1190
|
+
For each terminology issue identified, provide:
|
|
1191
|
+
- **Term in question** - The inconsistent or unclear term
|
|
1192
|
+
- **Where found** - File paths and locations where the term appears
|
|
1193
|
+
- **Recommended action** - Standardize on existing term, or propose a new canonical name
|
|
1194
|
+
|
|
1195
|
+
## Principles
|
|
1196
|
+
|
|
1197
|
+
- [Naming Matters](../principles/naming-matters.md) - Good naming reduces waste by eliminating confusion
|
|
1198
|
+
- [Consistent Naming](../principles/consistent-naming.md) - Names should follow established conventions
|
|
1199
|
+
- [Clarity Over Brevity](../principles/clarity-over-brevity.md) - Names should be descriptive and self-documenting
|
|
1200
|
+
|
|
1201
|
+
## Blocked By
|
|
1202
|
+
|
|
1203
|
+
(none)
|
|
1204
|
+
|
|
1205
|
+
## Definition of Done
|
|
1206
|
+
|
|
1207
|
+
- [ ] Identified key domain terms from project documentation
|
|
1208
|
+
- [ ] Reviewed recent commits for terminology consistency
|
|
1209
|
+
- [ ] Compared code naming against documentation vocabulary
|
|
1210
|
+
- [ ] Checked user-facing text for alignment with code terms
|
|
1211
|
+
- [ ] Documented any terminology drift or inconsistencies found
|
|
1212
|
+
- [ ] Proposed ideas for standardizing inconsistent terminology
|
|
1213
|
+
`;
|
|
1214
|
+
}
|
|
944
1215
|
var stockAuditFunctions = {
|
|
945
1216
|
"agent-developer-experience": agentDeveloperExperience,
|
|
946
1217
|
"component-reuse": componentReuse,
|
|
1218
|
+
"coverage-exclusions": coverageExclusions,
|
|
1219
|
+
"data-access-review": dataAccessReview,
|
|
947
1220
|
"dead-code": deadCode,
|
|
1221
|
+
"error-handling": errorHandling,
|
|
948
1222
|
"facts-verification": factsVerification,
|
|
949
1223
|
"ideas-from-commits": ideasFromCommits,
|
|
950
1224
|
"ideas-from-principles": ideasFromPrinciples,
|
|
951
1225
|
"performance-review": performanceReview,
|
|
1226
|
+
"refactoring-opportunities": refactoringOpportunities,
|
|
952
1227
|
"security-review": securityReview,
|
|
953
1228
|
"stale-ideas": staleIdeas,
|
|
954
|
-
"test-coverage": testCoverage
|
|
1229
|
+
"test-coverage": testCoverage,
|
|
1230
|
+
"ubiquitous-language": ubiquitousLanguage
|
|
955
1231
|
};
|
|
956
1232
|
function loadStockAudits() {
|
|
957
1233
|
return Object.entries(stockAuditFunctions).sort(([a], [b]) => a.localeCompare(b)).map(([name, render]) => {
|
|
@@ -970,6 +1246,24 @@ function transformAuditContent(content) {
|
|
|
970
1246
|
const originalTitle = titleMatch[1];
|
|
971
1247
|
return content.replace(/^#\s+.+$/m, `# Audit: ${originalTitle}`);
|
|
972
1248
|
}
|
|
1249
|
+
function injectAdHocScope(content, adHocDetails) {
|
|
1250
|
+
const scopeMatch = content.match(/\n## Scope\n/);
|
|
1251
|
+
if (scopeMatch?.index !== undefined) {
|
|
1252
|
+
const insertIndex = scopeMatch.index;
|
|
1253
|
+
const adHocSection = `
|
|
1254
|
+
## Ad-hoc Scope
|
|
1255
|
+
|
|
1256
|
+
${adHocDetails}
|
|
1257
|
+
`;
|
|
1258
|
+
return content.slice(0, insertIndex) + adHocSection + content.slice(insertIndex);
|
|
1259
|
+
}
|
|
1260
|
+
return `${content}
|
|
1261
|
+
|
|
1262
|
+
## Ad-hoc Scope
|
|
1263
|
+
|
|
1264
|
+
${adHocDetails}
|
|
1265
|
+
`;
|
|
1266
|
+
}
|
|
973
1267
|
|
|
974
1268
|
// lib/cli/colors.ts
|
|
975
1269
|
var ANSI_COLORS = {
|
|
@@ -1005,7 +1299,7 @@ function getColors() {
|
|
|
1005
1299
|
}
|
|
1006
1300
|
|
|
1007
1301
|
// lib/cli/commands/audit.ts
|
|
1008
|
-
async function addAudit(auditName, dependencies) {
|
|
1302
|
+
async function addAudit(auditName, adHocDetails, dependencies) {
|
|
1009
1303
|
const { context, fileSystem, settings } = dependencies;
|
|
1010
1304
|
const dustPath = `${context.cwd}/.dust`;
|
|
1011
1305
|
const userAuditsPath = `${dustPath}/config/audits`;
|
|
@@ -1019,7 +1313,10 @@ async function addAudit(auditName, dependencies) {
|
|
|
1019
1313
|
const userAuditPath = `${userAuditsPath}/${auditName}.md`;
|
|
1020
1314
|
if (fileSystem.exists(userAuditPath)) {
|
|
1021
1315
|
const content = await fileSystem.readFile(userAuditPath);
|
|
1022
|
-
|
|
1316
|
+
let transformedContent = transformAuditContent(content);
|
|
1317
|
+
if (adHocDetails) {
|
|
1318
|
+
transformedContent = injectAdHocScope(transformedContent, adHocDetails);
|
|
1319
|
+
}
|
|
1023
1320
|
await fileSystem.mkdir(tasksPath, { recursive: true });
|
|
1024
1321
|
await fileSystem.writeFile(taskFilePath, transformedContent);
|
|
1025
1322
|
context.stdout(`→ ${relativeTaskPath}`);
|
|
@@ -1027,7 +1324,10 @@ async function addAudit(auditName, dependencies) {
|
|
|
1027
1324
|
}
|
|
1028
1325
|
const stockAudit = loadStockAudits().find((a) => a.name === auditName);
|
|
1029
1326
|
if (stockAudit) {
|
|
1030
|
-
|
|
1327
|
+
let transformedContent = transformAuditContent(stockAudit.template);
|
|
1328
|
+
if (adHocDetails) {
|
|
1329
|
+
transformedContent = injectAdHocScope(transformedContent, adHocDetails);
|
|
1330
|
+
}
|
|
1031
1331
|
await fileSystem.mkdir(tasksPath, { recursive: true });
|
|
1032
1332
|
await fileSystem.writeFile(taskFilePath, transformedContent);
|
|
1033
1333
|
context.stdout(`→ ${relativeTaskPath}`);
|
|
@@ -1083,8 +1383,9 @@ async function listAudits(dependencies) {
|
|
|
1083
1383
|
}
|
|
1084
1384
|
async function audit(dependencies) {
|
|
1085
1385
|
const auditName = dependencies.arguments[0];
|
|
1386
|
+
const adHocDetails = dependencies.arguments[1];
|
|
1086
1387
|
if (auditName) {
|
|
1087
|
-
return addAudit(auditName, dependencies);
|
|
1388
|
+
return addAudit(auditName, adHocDetails, dependencies);
|
|
1088
1389
|
}
|
|
1089
1390
|
return listAudits(dependencies);
|
|
1090
1391
|
}
|
|
@@ -2189,13 +2490,7 @@ async function runOneIteration(dependencies, loopDependencies, onLoopEvent, onAg
|
|
|
2189
2490
|
logger = log,
|
|
2190
2491
|
repositoryId
|
|
2191
2492
|
} = options;
|
|
2192
|
-
const baseEnv = {
|
|
2193
|
-
DUST_UNATTENDED: "1",
|
|
2194
|
-
DUST_SKIP_AGENT: "1"
|
|
2195
|
-
};
|
|
2196
|
-
if (repositoryId) {
|
|
2197
|
-
baseEnv.DUST_REPOSITORY_ID = repositoryId;
|
|
2198
|
-
}
|
|
2493
|
+
const baseEnv = buildUnattendedEnv({ repositoryId });
|
|
2199
2494
|
log("syncing with remote");
|
|
2200
2495
|
onLoopEvent({ type: "loop.syncing" });
|
|
2201
2496
|
const pullResult = await gitPull(context.cwd, spawn);
|
|
@@ -2319,6 +2614,10 @@ function parseMaxIterations(commandArguments) {
|
|
|
2319
2614
|
async function loopClaude(dependencies, loopDependencies = createDefaultDependencies()) {
|
|
2320
2615
|
enableFileLogs("loop");
|
|
2321
2616
|
const { context, settings } = dependencies;
|
|
2617
|
+
if (isUnattended()) {
|
|
2618
|
+
context.stderr("dust loop cannot run inside an unattended session (DUST_UNATTENDED is set)");
|
|
2619
|
+
return { exitCode: 1 };
|
|
2620
|
+
}
|
|
2322
2621
|
const { postEvent } = loopDependencies;
|
|
2323
2622
|
const maxIterations = parseMaxIterations(dependencies.arguments);
|
|
2324
2623
|
const eventsUrl = settings.eventsUrl;
|
|
@@ -2673,6 +2972,51 @@ async function handleRepositoryList(repositories, manager, repoDeps, context) {
|
|
|
2673
2972
|
}
|
|
2674
2973
|
}
|
|
2675
2974
|
|
|
2975
|
+
// lib/bucket/server-messages.ts
|
|
2976
|
+
function parseServerMessage(data) {
|
|
2977
|
+
if (typeof data !== "object" || data === null) {
|
|
2978
|
+
return null;
|
|
2979
|
+
}
|
|
2980
|
+
const message = data;
|
|
2981
|
+
if (message.type === "repository-list") {
|
|
2982
|
+
if (!Array.isArray(message.repositories)) {
|
|
2983
|
+
return null;
|
|
2984
|
+
}
|
|
2985
|
+
const repositories = [];
|
|
2986
|
+
for (const r of message.repositories) {
|
|
2987
|
+
if (typeof r !== "object" || r === null) {
|
|
2988
|
+
return null;
|
|
2989
|
+
}
|
|
2990
|
+
const repo = r;
|
|
2991
|
+
if (typeof repo.name !== "string" || typeof repo.gitUrl !== "string") {
|
|
2992
|
+
return null;
|
|
2993
|
+
}
|
|
2994
|
+
const item = {
|
|
2995
|
+
name: repo.name,
|
|
2996
|
+
gitUrl: repo.gitUrl
|
|
2997
|
+
};
|
|
2998
|
+
if (typeof repo.url === "string") {
|
|
2999
|
+
item.url = repo.url;
|
|
3000
|
+
}
|
|
3001
|
+
if (typeof repo.id === "string") {
|
|
3002
|
+
item.id = repo.id;
|
|
3003
|
+
}
|
|
3004
|
+
if (typeof repo.hasTask === "boolean") {
|
|
3005
|
+
item.hasTask = repo.hasTask;
|
|
3006
|
+
}
|
|
3007
|
+
repositories.push(item);
|
|
3008
|
+
}
|
|
3009
|
+
return { type: "repository-list", repositories };
|
|
3010
|
+
}
|
|
3011
|
+
if (message.type === "task-available") {
|
|
3012
|
+
if (typeof message.repository !== "string") {
|
|
3013
|
+
return null;
|
|
3014
|
+
}
|
|
3015
|
+
return { type: "task-available", repository: message.repository };
|
|
3016
|
+
}
|
|
3017
|
+
return null;
|
|
3018
|
+
}
|
|
3019
|
+
|
|
2676
3020
|
// lib/bucket/terminal-ui.ts
|
|
2677
3021
|
var ANSI = {
|
|
2678
3022
|
HIDE_CURSOR: "\x1B[?25l",
|
|
@@ -3259,20 +3603,17 @@ function signalTaskAvailable(repoState, state, repoDeps, context, useTUI) {
|
|
|
3259
3603
|
}
|
|
3260
3604
|
function syncUIWithRepoList(state, repos) {
|
|
3261
3605
|
const incomingNames = new Set;
|
|
3262
|
-
for (const
|
|
3263
|
-
|
|
3264
|
-
if (repo) {
|
|
3265
|
-
|
|
3266
|
-
if (!
|
|
3267
|
-
|
|
3268
|
-
|
|
3269
|
-
buffer = createLogBuffer();
|
|
3270
|
-
state.logBuffers.set(repo.name, buffer);
|
|
3271
|
-
}
|
|
3272
|
-
addRepository2(state.ui, repo.name, buffer, repo.url);
|
|
3273
|
-
} else if (repo.url) {
|
|
3274
|
-
state.ui.repositoryUrls.set(repo.name, repo.url);
|
|
3606
|
+
for (const repo of repos) {
|
|
3607
|
+
incomingNames.add(repo.name);
|
|
3608
|
+
if (!state.ui.repositories.includes(repo.name)) {
|
|
3609
|
+
let buffer = state.logBuffers.get(repo.name);
|
|
3610
|
+
if (!buffer) {
|
|
3611
|
+
buffer = createLogBuffer();
|
|
3612
|
+
state.logBuffers.set(repo.name, buffer);
|
|
3275
3613
|
}
|
|
3614
|
+
addRepository2(state.ui, repo.name, buffer, repo.url);
|
|
3615
|
+
} else if (repo.url) {
|
|
3616
|
+
state.ui.repositoryUrls.set(repo.name, repo.url);
|
|
3276
3617
|
}
|
|
3277
3618
|
}
|
|
3278
3619
|
for (const name of [...state.ui.repositories]) {
|
|
@@ -3378,47 +3719,67 @@ function connectWebSocket(token, state, bucketDependencies, context, fileSystem,
|
|
|
3378
3719
|
logMessage(state, context, useTUI, `WebSocket error: ${error.message}`, "stderr");
|
|
3379
3720
|
};
|
|
3380
3721
|
ws.onmessage = (event) => {
|
|
3722
|
+
let rawData;
|
|
3381
3723
|
try {
|
|
3382
|
-
|
|
3383
|
-
|
|
3384
|
-
|
|
3385
|
-
|
|
3386
|
-
|
|
3387
|
-
|
|
3388
|
-
|
|
3389
|
-
|
|
3390
|
-
|
|
3391
|
-
|
|
3392
|
-
|
|
3393
|
-
|
|
3394
|
-
|
|
3395
|
-
|
|
3396
|
-
|
|
3397
|
-
|
|
3398
|
-
|
|
3399
|
-
|
|
3400
|
-
|
|
3401
|
-
|
|
3402
|
-
|
|
3724
|
+
rawData = JSON.parse(event.data);
|
|
3725
|
+
} catch {
|
|
3726
|
+
logMessage(state, context, useTUI, `Failed to parse WebSocket message: ${event.data}`, "stderr");
|
|
3727
|
+
return;
|
|
3728
|
+
}
|
|
3729
|
+
const message = parseServerMessage(rawData);
|
|
3730
|
+
if (!message) {
|
|
3731
|
+
logMessage(state, context, useTUI, `Invalid WebSocket message format: ${event.data}`, "stderr");
|
|
3732
|
+
return;
|
|
3733
|
+
}
|
|
3734
|
+
log4(`ws message: ${message.type}`);
|
|
3735
|
+
if (message.type === "repository-list") {
|
|
3736
|
+
const repos = message.repositories;
|
|
3737
|
+
logMessage(state, context, useTUI, `Received repository list (${repos.length} repositories):`);
|
|
3738
|
+
if (repos.length === 0) {
|
|
3739
|
+
logMessage(state, context, useTUI, " (empty)");
|
|
3740
|
+
} else {
|
|
3741
|
+
for (const r of repos) {
|
|
3742
|
+
const attrs = [];
|
|
3743
|
+
attrs.push(`name=${r.name}`);
|
|
3744
|
+
if (r.id !== undefined) {
|
|
3745
|
+
attrs.push(`id=${r.id}`);
|
|
3403
3746
|
}
|
|
3404
|
-
|
|
3405
|
-
|
|
3406
|
-
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
|
|
3410
|
-
const repoDeps = toRepositoryDependencies(bucketDependencies, fileSystem);
|
|
3411
|
-
logMessage(state, context, useTUI, `Received task-available for ${repoName}`);
|
|
3412
|
-
const repoState = state.repositories.get(repoName);
|
|
3413
|
-
if (repoState) {
|
|
3414
|
-
signalTaskAvailable(repoState, state, repoDeps, context, useTUI);
|
|
3415
|
-
} else {
|
|
3416
|
-
logMessage(state, context, useTUI, `No repository state found for ${repoName}`, "stderr");
|
|
3747
|
+
attrs.push(`gitUrl=${r.gitUrl}`);
|
|
3748
|
+
if (r.url !== undefined) {
|
|
3749
|
+
attrs.push(`url=${r.url}`);
|
|
3750
|
+
}
|
|
3751
|
+
if (r.hasTask !== undefined) {
|
|
3752
|
+
attrs.push(`hasTask=${r.hasTask}`);
|
|
3417
3753
|
}
|
|
3754
|
+
logMessage(state, context, useTUI, ` - ${attrs.join(", ")}`);
|
|
3418
3755
|
}
|
|
3419
3756
|
}
|
|
3420
|
-
|
|
3421
|
-
|
|
3757
|
+
syncUIWithRepoList(state, repos);
|
|
3758
|
+
const repoDeps = toRepositoryDependencies(bucketDependencies, fileSystem);
|
|
3759
|
+
const repoContext = createTUIContext(state, context, useTUI);
|
|
3760
|
+
handleRepositoryList(repos, state, repoDeps, repoContext).then(() => {
|
|
3761
|
+
syncTUI(state);
|
|
3762
|
+
for (const repoData of repos) {
|
|
3763
|
+
if (repoData.hasTask) {
|
|
3764
|
+
const repoState = state.repositories.get(repoData.name);
|
|
3765
|
+
if (repoState) {
|
|
3766
|
+
signalTaskAvailable(repoState, state, repoDeps, context, useTUI);
|
|
3767
|
+
}
|
|
3768
|
+
}
|
|
3769
|
+
}
|
|
3770
|
+
}).catch((error) => {
|
|
3771
|
+
logMessage(state, context, useTUI, `Failed to handle repository list: ${error.message}`, "stderr");
|
|
3772
|
+
});
|
|
3773
|
+
} else if (message.type === "task-available") {
|
|
3774
|
+
const repoName = message.repository;
|
|
3775
|
+
const repoDeps = toRepositoryDependencies(bucketDependencies, fileSystem);
|
|
3776
|
+
logMessage(state, context, useTUI, `Received task-available for ${repoName}`);
|
|
3777
|
+
const repoState = state.repositories.get(repoName);
|
|
3778
|
+
if (repoState) {
|
|
3779
|
+
signalTaskAvailable(repoState, state, repoDeps, context, useTUI);
|
|
3780
|
+
} else {
|
|
3781
|
+
logMessage(state, context, useTUI, `No repository state found for ${repoName}`, "stderr");
|
|
3782
|
+
}
|
|
3422
3783
|
}
|
|
3423
3784
|
};
|
|
3424
3785
|
}
|
|
@@ -3502,9 +3863,13 @@ async function resolveToken(authDeps, context) {
|
|
|
3502
3863
|
return null;
|
|
3503
3864
|
}
|
|
3504
3865
|
}
|
|
3505
|
-
async function
|
|
3866
|
+
async function bucketWorker(dependencies, bucketDeps = createDefaultBucketDependencies()) {
|
|
3506
3867
|
enableFileLogs("bucket");
|
|
3507
3868
|
const { context, fileSystem } = dependencies;
|
|
3869
|
+
if (isUnattended()) {
|
|
3870
|
+
context.stderr("dust bucket cannot run inside an unattended session (DUST_UNATTENDED is set)");
|
|
3871
|
+
return { exitCode: 1 };
|
|
3872
|
+
}
|
|
3508
3873
|
const token = await resolveToken(bucketDeps.auth, context);
|
|
3509
3874
|
if (!token) {
|
|
3510
3875
|
return { exitCode: 1 };
|
|
@@ -3964,6 +4329,11 @@ function validateTitleFilenameMatch(filePath, content) {
|
|
|
3964
4329
|
}
|
|
3965
4330
|
|
|
3966
4331
|
// lib/lint/validators/idea-validator.ts
|
|
4332
|
+
var WORKFLOW_PREFIX_TO_SECTION = {
|
|
4333
|
+
"Refine Idea: ": "Refines Idea",
|
|
4334
|
+
"Decompose Idea: ": "Decomposes Idea",
|
|
4335
|
+
"Shelve Idea: ": "Shelves Idea"
|
|
4336
|
+
};
|
|
3967
4337
|
function validateIdeaOpenQuestions(filePath, content) {
|
|
3968
4338
|
const violations = [];
|
|
3969
4339
|
const lines = content.split(`
|
|
@@ -4077,6 +4447,114 @@ function validateIdeaTransitionTitle(filePath, content, ideasPath, fileSystem) {
|
|
|
4077
4447
|
}
|
|
4078
4448
|
return null;
|
|
4079
4449
|
}
|
|
4450
|
+
function extractSectionContent(content, sectionHeading) {
|
|
4451
|
+
const lines = content.split(`
|
|
4452
|
+
`);
|
|
4453
|
+
let inSection = false;
|
|
4454
|
+
let sectionContent = "";
|
|
4455
|
+
let startLine = 0;
|
|
4456
|
+
for (let i = 0;i < lines.length; i++) {
|
|
4457
|
+
const line = lines[i];
|
|
4458
|
+
if (line.startsWith("## ")) {
|
|
4459
|
+
if (inSection)
|
|
4460
|
+
break;
|
|
4461
|
+
if (line.trimEnd() === `## ${sectionHeading}`) {
|
|
4462
|
+
inSection = true;
|
|
4463
|
+
startLine = i + 1;
|
|
4464
|
+
}
|
|
4465
|
+
continue;
|
|
4466
|
+
}
|
|
4467
|
+
if (line.startsWith("# ") && inSection)
|
|
4468
|
+
break;
|
|
4469
|
+
if (inSection) {
|
|
4470
|
+
sectionContent += `${line}
|
|
4471
|
+
`;
|
|
4472
|
+
}
|
|
4473
|
+
}
|
|
4474
|
+
if (!inSection)
|
|
4475
|
+
return null;
|
|
4476
|
+
return { content: sectionContent, startLine };
|
|
4477
|
+
}
|
|
4478
|
+
function validateWorkflowTaskBodySection(filePath, content, ideasPath, fileSystem) {
|
|
4479
|
+
const violations = [];
|
|
4480
|
+
const title = extractTitle(content);
|
|
4481
|
+
if (!title)
|
|
4482
|
+
return violations;
|
|
4483
|
+
let matchedPrefix = null;
|
|
4484
|
+
for (const prefix of IDEA_TRANSITION_PREFIXES) {
|
|
4485
|
+
if (title.startsWith(prefix)) {
|
|
4486
|
+
matchedPrefix = prefix;
|
|
4487
|
+
break;
|
|
4488
|
+
}
|
|
4489
|
+
}
|
|
4490
|
+
if (!matchedPrefix)
|
|
4491
|
+
return violations;
|
|
4492
|
+
const expectedHeading = WORKFLOW_PREFIX_TO_SECTION[matchedPrefix];
|
|
4493
|
+
const section = extractSectionContent(content, expectedHeading);
|
|
4494
|
+
if (!section) {
|
|
4495
|
+
violations.push({
|
|
4496
|
+
file: filePath,
|
|
4497
|
+
message: `Workflow task with "${matchedPrefix.trim()}" prefix is missing required "## ${expectedHeading}" section. Add a section with a link to the idea file, e.g.:
|
|
4498
|
+
|
|
4499
|
+
## ${expectedHeading}
|
|
4500
|
+
|
|
4501
|
+
- [Idea Title](../ideas/idea-slug.md)`
|
|
4502
|
+
});
|
|
4503
|
+
return violations;
|
|
4504
|
+
}
|
|
4505
|
+
const linkRegex = new RegExp(MARKDOWN_LINK_PATTERN.source, "g");
|
|
4506
|
+
const links = [];
|
|
4507
|
+
const sectionLines = section.content.split(`
|
|
4508
|
+
`);
|
|
4509
|
+
for (let i = 0;i < sectionLines.length; i++) {
|
|
4510
|
+
const line = sectionLines[i];
|
|
4511
|
+
let match = linkRegex.exec(line);
|
|
4512
|
+
while (match !== null) {
|
|
4513
|
+
links.push({
|
|
4514
|
+
text: match[1],
|
|
4515
|
+
target: match[2],
|
|
4516
|
+
line: section.startLine + i + 1
|
|
4517
|
+
});
|
|
4518
|
+
match = linkRegex.exec(line);
|
|
4519
|
+
}
|
|
4520
|
+
}
|
|
4521
|
+
if (links.length === 0) {
|
|
4522
|
+
violations.push({
|
|
4523
|
+
file: filePath,
|
|
4524
|
+
message: `"## ${expectedHeading}" section contains no link. Add a markdown link to the idea file, e.g.:
|
|
4525
|
+
|
|
4526
|
+
- [Idea Title](../ideas/idea-slug.md)`,
|
|
4527
|
+
line: section.startLine
|
|
4528
|
+
});
|
|
4529
|
+
return violations;
|
|
4530
|
+
}
|
|
4531
|
+
const ideaLinks = links.filter((l) => l.target.includes("/ideas/") || l.target.startsWith("../ideas/"));
|
|
4532
|
+
if (ideaLinks.length === 0) {
|
|
4533
|
+
violations.push({
|
|
4534
|
+
file: filePath,
|
|
4535
|
+
message: `"## ${expectedHeading}" section contains no link to an idea file. Links must point to a file in ../ideas/, e.g.:
|
|
4536
|
+
|
|
4537
|
+
- [Idea Title](../ideas/idea-slug.md)`,
|
|
4538
|
+
line: section.startLine
|
|
4539
|
+
});
|
|
4540
|
+
return violations;
|
|
4541
|
+
}
|
|
4542
|
+
for (const link of ideaLinks) {
|
|
4543
|
+
const slugMatch = link.target.match(/([^/]+)\.md$/);
|
|
4544
|
+
if (!slugMatch)
|
|
4545
|
+
continue;
|
|
4546
|
+
const ideaSlug = slugMatch[1];
|
|
4547
|
+
const ideaFilePath = `${ideasPath}/${ideaSlug}.md`;
|
|
4548
|
+
if (!fileSystem.exists(ideaFilePath)) {
|
|
4549
|
+
violations.push({
|
|
4550
|
+
file: filePath,
|
|
4551
|
+
message: `Link to idea "${link.text}" points to non-existent file: ${ideaSlug}.md. Either create the idea file at ideas/${ideaSlug}.md or update the link to point to an existing idea.`,
|
|
4552
|
+
line: link.line
|
|
4553
|
+
});
|
|
4554
|
+
}
|
|
4555
|
+
}
|
|
4556
|
+
return violations;
|
|
4557
|
+
}
|
|
4080
4558
|
|
|
4081
4559
|
// lib/lint/validators/link-validator.ts
|
|
4082
4560
|
import { dirname as dirname3, resolve } from "node:path";
|
|
@@ -4480,6 +4958,7 @@ async function lintMarkdown(dependencies) {
|
|
|
4480
4958
|
if (ideaTransitionViolation) {
|
|
4481
4959
|
violations.push(ideaTransitionViolation);
|
|
4482
4960
|
}
|
|
4961
|
+
violations.push(...validateWorkflowTaskBodySection(filePath, content, ideasPath, fileSystem));
|
|
4483
4962
|
}
|
|
4484
4963
|
}
|
|
4485
4964
|
const principlesPath = `${dustPath}/principles`;
|
|
@@ -4706,6 +5185,7 @@ function generateHelpText(settings) {
|
|
|
4706
5185
|
next Show tasks ready to work on (not blocked)
|
|
4707
5186
|
check Run project-defined quality gate hook
|
|
4708
5187
|
agent Agent greeting and routing instructions
|
|
5188
|
+
audit Create tasks from audit templates
|
|
4709
5189
|
focus Declare current objective (for remote session tracking)
|
|
4710
5190
|
pick task Pick the next task to work on
|
|
4711
5191
|
implement task Implement a task
|
|
@@ -5428,7 +5908,7 @@ async function prePush(dependencies, gitRunner = defaultGitRunner, env = process
|
|
|
5428
5908
|
if (agent2.type === "unknown") {
|
|
5429
5909
|
return { exitCode: 0 };
|
|
5430
5910
|
}
|
|
5431
|
-
if (env
|
|
5911
|
+
if (isUnattended(env)) {
|
|
5432
5912
|
const uncommittedFiles = await getUncommittedFiles(context.cwd, gitRunner);
|
|
5433
5913
|
if (uncommittedFiles.length > 0) {
|
|
5434
5914
|
context.stderr("");
|
|
@@ -5498,7 +5978,7 @@ var commandRegistry = {
|
|
|
5498
5978
|
check,
|
|
5499
5979
|
agent,
|
|
5500
5980
|
audit,
|
|
5501
|
-
bucket,
|
|
5981
|
+
"bucket worker": bucketWorker,
|
|
5502
5982
|
"bucket asset upload": bucketAssetUpload,
|
|
5503
5983
|
focus,
|
|
5504
5984
|
"new task": newTask,
|