@grainulation/orchard 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CODE_OF_CONDUCT.md +25 -0
- package/CONTRIBUTING.md +96 -0
- package/README.md +52 -61
- package/bin/orchard.js +227 -80
- package/lib/assignments.js +19 -17
- package/lib/conflicts.js +177 -29
- package/lib/dashboard.js +100 -47
- package/lib/decompose.js +268 -0
- package/lib/doctor.js +48 -32
- package/lib/export.js +72 -44
- package/lib/farmer.js +54 -38
- package/lib/hackathon.js +349 -0
- package/lib/planner.js +150 -21
- package/lib/server.js +396 -203
- package/lib/sync.js +31 -25
- package/lib/tracker.js +52 -40
- package/package.json +10 -4
package/lib/decompose.js
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const fs = require("node:fs");
|
|
4
|
+
const path = require("node:path");
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Auto-decompose a research question into sub-sprints.
|
|
8
|
+
*
|
|
9
|
+
* Uses heuristic keyword analysis to break a broad question into
|
|
10
|
+
* focused sub-questions, each becoming its own sprint with appropriate
|
|
11
|
+
* dependency relationships.
|
|
12
|
+
*
|
|
13
|
+
* This is a deterministic, offline decomposition — no LLM required.
|
|
14
|
+
* For richer decomposition, pipe through wheat's claim system.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Facets that broad research questions typically decompose into.
|
|
19
|
+
* Each facet has trigger keywords and a question template.
|
|
20
|
+
*/
|
|
21
|
+
const FACETS = [
|
|
22
|
+
{
|
|
23
|
+
name: "technical",
|
|
24
|
+
triggers: [
|
|
25
|
+
"how",
|
|
26
|
+
"implement",
|
|
27
|
+
"build",
|
|
28
|
+
"architecture",
|
|
29
|
+
"system",
|
|
30
|
+
"code",
|
|
31
|
+
"api",
|
|
32
|
+
"stack",
|
|
33
|
+
"platform",
|
|
34
|
+
"tool",
|
|
35
|
+
],
|
|
36
|
+
template: (q) =>
|
|
37
|
+
`What are the technical requirements and architecture for: ${q}`,
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
name: "user-experience",
|
|
41
|
+
triggers: [
|
|
42
|
+
"user",
|
|
43
|
+
"ux",
|
|
44
|
+
"design",
|
|
45
|
+
"interface",
|
|
46
|
+
"experience",
|
|
47
|
+
"mobile",
|
|
48
|
+
"web",
|
|
49
|
+
"dashboard",
|
|
50
|
+
"visual",
|
|
51
|
+
],
|
|
52
|
+
template: (q) => `What should the user experience look like for: ${q}`,
|
|
53
|
+
},
|
|
54
|
+
{
|
|
55
|
+
name: "market",
|
|
56
|
+
triggers: [
|
|
57
|
+
"market",
|
|
58
|
+
"competitor",
|
|
59
|
+
"existing",
|
|
60
|
+
"prior",
|
|
61
|
+
"landscape",
|
|
62
|
+
"alternative",
|
|
63
|
+
"compare",
|
|
64
|
+
"industry",
|
|
65
|
+
],
|
|
66
|
+
template: (q) => `What does the competitive landscape look like for: ${q}`,
|
|
67
|
+
},
|
|
68
|
+
{
|
|
69
|
+
name: "feasibility",
|
|
70
|
+
triggers: [
|
|
71
|
+
"cost",
|
|
72
|
+
"time",
|
|
73
|
+
"effort",
|
|
74
|
+
"feasible",
|
|
75
|
+
"risk",
|
|
76
|
+
"constraint",
|
|
77
|
+
"limit",
|
|
78
|
+
"budget",
|
|
79
|
+
"resource",
|
|
80
|
+
],
|
|
81
|
+
template: (q) => `What are the feasibility constraints and risks for: ${q}`,
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
name: "adoption",
|
|
85
|
+
triggers: [
|
|
86
|
+
"adopt",
|
|
87
|
+
"rollout",
|
|
88
|
+
"migration",
|
|
89
|
+
"team",
|
|
90
|
+
"org",
|
|
91
|
+
"enterprise",
|
|
92
|
+
"onboard",
|
|
93
|
+
"training",
|
|
94
|
+
"change",
|
|
95
|
+
],
|
|
96
|
+
template: (q) => `What does adoption and rollout look like for: ${q}`,
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
name: "measurement",
|
|
100
|
+
triggers: [
|
|
101
|
+
"measure",
|
|
102
|
+
"metric",
|
|
103
|
+
"success",
|
|
104
|
+
"kpi",
|
|
105
|
+
"track",
|
|
106
|
+
"outcome",
|
|
107
|
+
"impact",
|
|
108
|
+
"evaluate",
|
|
109
|
+
"test",
|
|
110
|
+
],
|
|
111
|
+
template: (q) => `How do we measure success for: ${q}`,
|
|
112
|
+
},
|
|
113
|
+
];
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Score how relevant each facet is to the question.
|
|
117
|
+
*/
|
|
118
|
+
function scoreFacets(question) {
|
|
119
|
+
const words = new Set(question.toLowerCase().split(/\s+/));
|
|
120
|
+
return FACETS.map((f) => {
|
|
121
|
+
const hits = f.triggers.filter(
|
|
122
|
+
(t) => words.has(t) || question.toLowerCase().includes(t),
|
|
123
|
+
);
|
|
124
|
+
return { ...f, score: hits.length };
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Decompose a question into sub-sprints.
|
|
130
|
+
* Returns an array of sprint configs ready to add to orchard.json.
|
|
131
|
+
*
|
|
132
|
+
* Options:
|
|
133
|
+
* maxSprints: maximum number of sub-sprints (default 5)
|
|
134
|
+
* prefix: path prefix for sprint directories (default 'sprints')
|
|
135
|
+
* minFacets: minimum facets even if no keywords match (default 2)
|
|
136
|
+
*/
|
|
137
|
+
function decompose(question, opts = {}) {
|
|
138
|
+
const maxSprints = opts.maxSprints || 5;
|
|
139
|
+
const prefix = opts.prefix || "sprints";
|
|
140
|
+
const minFacets = opts.minFacets || 2;
|
|
141
|
+
|
|
142
|
+
const scored = scoreFacets(question);
|
|
143
|
+
|
|
144
|
+
// Pick facets: all with score > 0, or top minFacets if none match
|
|
145
|
+
let selected = scored.filter((f) => f.score > 0);
|
|
146
|
+
if (selected.length < minFacets) {
|
|
147
|
+
selected = scored
|
|
148
|
+
.sort(
|
|
149
|
+
(a, b) => b.score - a.score || FACETS.indexOf(a) - FACETS.indexOf(b),
|
|
150
|
+
)
|
|
151
|
+
.slice(0, minFacets);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Cap at maxSprints
|
|
155
|
+
selected = selected.slice(0, maxSprints);
|
|
156
|
+
|
|
157
|
+
// Generate slug from question
|
|
158
|
+
const slug = question
|
|
159
|
+
.toLowerCase()
|
|
160
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
161
|
+
.replace(/^-|-$/g, "")
|
|
162
|
+
.substring(0, 40);
|
|
163
|
+
|
|
164
|
+
const sprints = selected.map((facet, i) => ({
|
|
165
|
+
path: path.join(prefix, `${slug}-${facet.name}`),
|
|
166
|
+
name: `${slug}-${facet.name}`,
|
|
167
|
+
question: facet.template(question),
|
|
168
|
+
depends_on:
|
|
169
|
+
i === 0 ? [] : [path.join(prefix, `${slug}-${selected[0].name}`)],
|
|
170
|
+
}));
|
|
171
|
+
|
|
172
|
+
// Add a synthesis sprint that depends on all others
|
|
173
|
+
if (sprints.length >= 2) {
|
|
174
|
+
sprints.push({
|
|
175
|
+
path: path.join(prefix, `${slug}-synthesis`),
|
|
176
|
+
name: `${slug}-synthesis`,
|
|
177
|
+
question: `Synthesize findings across all sub-sprints for: ${question}`,
|
|
178
|
+
depends_on: sprints.map((s) => s.path),
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return sprints;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Apply decomposition: create directories and update orchard.json.
|
|
187
|
+
*/
|
|
188
|
+
function applyDecomposition(root, question, opts = {}) {
|
|
189
|
+
const sprints = decompose(question, opts);
|
|
190
|
+
|
|
191
|
+
// Create sprint directories with initial claims.json
|
|
192
|
+
for (const sprint of sprints) {
|
|
193
|
+
const absPath = path.join(root, sprint.path);
|
|
194
|
+
fs.mkdirSync(absPath, { recursive: true });
|
|
195
|
+
|
|
196
|
+
const claimsPath = path.join(absPath, "claims.json");
|
|
197
|
+
if (!fs.existsSync(claimsPath)) {
|
|
198
|
+
const initial = {
|
|
199
|
+
schema_version: "1.0",
|
|
200
|
+
meta: {
|
|
201
|
+
question: sprint.question,
|
|
202
|
+
initiated: new Date().toISOString().split("T")[0],
|
|
203
|
+
audience: [],
|
|
204
|
+
phase: "define",
|
|
205
|
+
connectors: [],
|
|
206
|
+
},
|
|
207
|
+
claims: [],
|
|
208
|
+
};
|
|
209
|
+
fs.writeFileSync(
|
|
210
|
+
claimsPath,
|
|
211
|
+
JSON.stringify(initial, null, 2) + "\n",
|
|
212
|
+
"utf8",
|
|
213
|
+
);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Update orchard.json
|
|
218
|
+
const orchardPath = path.join(root, "orchard.json");
|
|
219
|
+
let config = { sprints: [] };
|
|
220
|
+
if (fs.existsSync(orchardPath)) {
|
|
221
|
+
config = JSON.parse(fs.readFileSync(orchardPath, "utf8"));
|
|
222
|
+
}
|
|
223
|
+
config.sprints = config.sprints || [];
|
|
224
|
+
|
|
225
|
+
const existingPaths = new Set(config.sprints.map((s) => s.path));
|
|
226
|
+
for (const sprint of sprints) {
|
|
227
|
+
if (!existingPaths.has(sprint.path)) {
|
|
228
|
+
config.sprints.push(sprint);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
fs.writeFileSync(orchardPath, JSON.stringify(config, null, 2) + "\n", "utf8");
|
|
233
|
+
|
|
234
|
+
return sprints;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Print decomposition plan without applying it.
|
|
239
|
+
*/
|
|
240
|
+
function printDecomposition(question, opts = {}) {
|
|
241
|
+
const sprints = decompose(question, opts);
|
|
242
|
+
|
|
243
|
+
console.log("");
|
|
244
|
+
console.log(` Auto-decompose: "${question}"`);
|
|
245
|
+
console.log(" " + "=".repeat(50));
|
|
246
|
+
console.log(` ${sprints.length} sub-sprints generated:`);
|
|
247
|
+
console.log("");
|
|
248
|
+
|
|
249
|
+
for (const s of sprints) {
|
|
250
|
+
const deps = s.depends_on.length
|
|
251
|
+
? ` (depends on: ${s.depends_on.map((d) => path.basename(d)).join(", ")})`
|
|
252
|
+
: " (root)";
|
|
253
|
+
console.log(` ${path.basename(s.path)}${deps}`);
|
|
254
|
+
console.log(` Q: ${s.question}`);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
console.log("");
|
|
258
|
+
console.log(' Apply with: orchard decompose --apply "<question>"');
|
|
259
|
+
console.log("");
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
module.exports = {
|
|
263
|
+
decompose,
|
|
264
|
+
applyDecomposition,
|
|
265
|
+
printDecomposition,
|
|
266
|
+
scoreFacets,
|
|
267
|
+
FACETS,
|
|
268
|
+
};
|
package/lib/doctor.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
2
|
|
|
3
|
-
const fs = require(
|
|
4
|
-
const path = require(
|
|
3
|
+
const fs = require("node:fs");
|
|
4
|
+
const path = require("node:path");
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
7
|
* Run all doctor checks against the orchard root directory.
|
|
@@ -11,12 +11,14 @@ function runChecks(root) {
|
|
|
11
11
|
const checks = [];
|
|
12
12
|
|
|
13
13
|
// 1. orchard.json present and parseable
|
|
14
|
-
const configPath = path.join(root,
|
|
14
|
+
const configPath = path.join(root, "orchard.json");
|
|
15
15
|
const configExists = fs.existsSync(configPath);
|
|
16
16
|
checks.push({
|
|
17
|
-
name:
|
|
17
|
+
name: "orchard.json exists",
|
|
18
18
|
ok: configExists,
|
|
19
|
-
detail: configExists
|
|
19
|
+
detail: configExists
|
|
20
|
+
? configPath
|
|
21
|
+
: 'Not found. Run "orchard init" to create one.',
|
|
20
22
|
});
|
|
21
23
|
|
|
22
24
|
if (!configExists) {
|
|
@@ -25,30 +27,36 @@ function runChecks(root) {
|
|
|
25
27
|
|
|
26
28
|
let config;
|
|
27
29
|
try {
|
|
28
|
-
config = JSON.parse(fs.readFileSync(configPath,
|
|
29
|
-
checks.push({ name:
|
|
30
|
+
config = JSON.parse(fs.readFileSync(configPath, "utf8"));
|
|
31
|
+
checks.push({ name: "orchard.json is valid JSON", ok: true, detail: "" });
|
|
30
32
|
} catch (err) {
|
|
31
|
-
checks.push({
|
|
33
|
+
checks.push({
|
|
34
|
+
name: "orchard.json is valid JSON",
|
|
35
|
+
ok: false,
|
|
36
|
+
detail: err.message,
|
|
37
|
+
});
|
|
32
38
|
return { checks, ok: false };
|
|
33
39
|
}
|
|
34
40
|
|
|
35
41
|
const sprints = config.sprints || [];
|
|
36
42
|
checks.push({
|
|
37
|
-
name:
|
|
43
|
+
name: "sprints defined",
|
|
38
44
|
ok: sprints.length > 0,
|
|
39
|
-
detail:
|
|
40
|
-
|
|
41
|
-
|
|
45
|
+
detail:
|
|
46
|
+
sprints.length > 0
|
|
47
|
+
? `${sprints.length} sprint(s) configured`
|
|
48
|
+
: "No sprints in orchard.json",
|
|
42
49
|
});
|
|
43
50
|
|
|
44
51
|
// 2. Sprint directories reachable
|
|
45
52
|
let allReachable = true;
|
|
46
53
|
for (const sprint of sprints) {
|
|
47
|
-
const sprintDir =
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
54
|
+
const sprintDir =
|
|
55
|
+
sprint.path === "."
|
|
56
|
+
? root
|
|
57
|
+
: path.isAbsolute(sprint.path)
|
|
58
|
+
? sprint.path
|
|
59
|
+
: path.join(root, sprint.path);
|
|
52
60
|
const exists = fs.existsSync(sprintDir);
|
|
53
61
|
if (!exists) allReachable = false;
|
|
54
62
|
checks.push({
|
|
@@ -78,9 +86,12 @@ function runChecks(root) {
|
|
|
78
86
|
}
|
|
79
87
|
}
|
|
80
88
|
if (allDepsResolved && sprints.length > 0) {
|
|
81
|
-
const totalDeps = sprints.reduce(
|
|
89
|
+
const totalDeps = sprints.reduce(
|
|
90
|
+
(n, s) => n + (s.depends_on || []).length,
|
|
91
|
+
0,
|
|
92
|
+
);
|
|
82
93
|
checks.push({
|
|
83
|
-
name:
|
|
94
|
+
name: "all dependencies resolve",
|
|
84
95
|
ok: true,
|
|
85
96
|
detail: `${totalDeps} dependency link(s) verified`,
|
|
86
97
|
});
|
|
@@ -89,17 +100,19 @@ function runChecks(root) {
|
|
|
89
100
|
// 4. Cycle detection
|
|
90
101
|
let hasCycles = false;
|
|
91
102
|
try {
|
|
92
|
-
const { detectCycles } = require(
|
|
103
|
+
const { detectCycles } = require("./planner.js");
|
|
93
104
|
const cycles = detectCycles(config);
|
|
94
105
|
hasCycles = cycles.length > 0;
|
|
95
106
|
checks.push({
|
|
96
|
-
name:
|
|
107
|
+
name: "no dependency cycles",
|
|
97
108
|
ok: !hasCycles,
|
|
98
|
-
detail: hasCycles
|
|
109
|
+
detail: hasCycles
|
|
110
|
+
? `Cycle involving: ${cycles.join(", ")}`
|
|
111
|
+
: "Dependency graph is acyclic",
|
|
99
112
|
});
|
|
100
113
|
} catch (err) {
|
|
101
114
|
checks.push({
|
|
102
|
-
name:
|
|
115
|
+
name: "no dependency cycles",
|
|
103
116
|
ok: false,
|
|
104
117
|
detail: `Cycle check failed: ${err.message}`,
|
|
105
118
|
});
|
|
@@ -113,13 +126,13 @@ function runChecks(root) {
|
|
|
113
126
|
* Print doctor results to stdout.
|
|
114
127
|
*/
|
|
115
128
|
function printReport(result) {
|
|
116
|
-
console.log(
|
|
117
|
-
console.log(
|
|
118
|
-
console.log(
|
|
119
|
-
console.log(
|
|
129
|
+
console.log("");
|
|
130
|
+
console.log(" orchard doctor");
|
|
131
|
+
console.log(" " + "=".repeat(40));
|
|
132
|
+
console.log("");
|
|
120
133
|
|
|
121
134
|
for (const check of result.checks) {
|
|
122
|
-
const icon = check.ok ?
|
|
135
|
+
const icon = check.ok ? "ok" : "FAIL";
|
|
123
136
|
const line = ` [${icon.padEnd(4)}] ${check.name}`;
|
|
124
137
|
console.log(line);
|
|
125
138
|
if (check.detail && !check.ok) {
|
|
@@ -127,11 +140,14 @@ function printReport(result) {
|
|
|
127
140
|
}
|
|
128
141
|
}
|
|
129
142
|
|
|
130
|
-
console.log(
|
|
143
|
+
console.log("");
|
|
131
144
|
const passed = result.checks.filter((c) => c.ok).length;
|
|
132
145
|
const total = result.checks.length;
|
|
133
|
-
console.log(
|
|
134
|
-
|
|
146
|
+
console.log(
|
|
147
|
+
` ${passed}/${total} checks passed` +
|
|
148
|
+
(result.ok ? " -- all healthy" : " -- issues found"),
|
|
149
|
+
);
|
|
150
|
+
console.log("");
|
|
135
151
|
}
|
|
136
152
|
|
|
137
153
|
module.exports = { runChecks, printReport };
|
package/lib/export.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* orchard -> mill edge: export trigger for completed sprints.
|
|
@@ -8,14 +8,14 @@
|
|
|
8
8
|
* or filesystem. Graceful fallback if mill is not available.
|
|
9
9
|
*/
|
|
10
10
|
|
|
11
|
-
const fs = require(
|
|
12
|
-
const path = require(
|
|
13
|
-
const http = require(
|
|
11
|
+
const fs = require("node:fs");
|
|
12
|
+
const path = require("node:path");
|
|
13
|
+
const http = require("node:http");
|
|
14
14
|
|
|
15
15
|
const MILL_PORT = 9094;
|
|
16
16
|
const MILL_SIBLINGS = [
|
|
17
|
-
path.join(__dirname,
|
|
18
|
-
path.join(__dirname,
|
|
17
|
+
path.join(__dirname, "..", "..", "mill"),
|
|
18
|
+
path.join(__dirname, "..", "..", "..", "mill"),
|
|
19
19
|
];
|
|
20
20
|
|
|
21
21
|
/**
|
|
@@ -24,14 +24,16 @@ const MILL_SIBLINGS = [
|
|
|
24
24
|
*/
|
|
25
25
|
function detectMill() {
|
|
26
26
|
for (const dir of MILL_SIBLINGS) {
|
|
27
|
-
const pkg = path.join(dir,
|
|
27
|
+
const pkg = path.join(dir, "package.json");
|
|
28
28
|
if (fs.existsSync(pkg)) {
|
|
29
29
|
try {
|
|
30
|
-
const meta = JSON.parse(fs.readFileSync(pkg,
|
|
31
|
-
if (meta.name ===
|
|
32
|
-
return { available: true, method:
|
|
30
|
+
const meta = JSON.parse(fs.readFileSync(pkg, "utf8"));
|
|
31
|
+
if (meta.name === "@grainulation/mill") {
|
|
32
|
+
return { available: true, method: "filesystem", path: dir };
|
|
33
33
|
}
|
|
34
|
-
} catch {
|
|
34
|
+
} catch {
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
35
37
|
}
|
|
36
38
|
}
|
|
37
39
|
return { available: false };
|
|
@@ -46,27 +48,44 @@ function detectMill() {
|
|
|
46
48
|
function exportSprint(sprintPath, format) {
|
|
47
49
|
return new Promise((resolve) => {
|
|
48
50
|
const body = JSON.stringify({ format, options: { source: sprintPath } });
|
|
49
|
-
const req = http.request(
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
}
|
|
66
|
-
|
|
51
|
+
const req = http.request(
|
|
52
|
+
{
|
|
53
|
+
hostname: "127.0.0.1",
|
|
54
|
+
port: MILL_PORT,
|
|
55
|
+
path: "/api/export",
|
|
56
|
+
method: "POST",
|
|
57
|
+
headers: {
|
|
58
|
+
"Content-Type": "application/json",
|
|
59
|
+
"Content-Length": Buffer.byteLength(body),
|
|
60
|
+
},
|
|
61
|
+
timeout: 5000,
|
|
62
|
+
},
|
|
63
|
+
(res) => {
|
|
64
|
+
let data = "";
|
|
65
|
+
res.on("data", (chunk) => {
|
|
66
|
+
data += chunk;
|
|
67
|
+
});
|
|
68
|
+
res.on("end", () => {
|
|
69
|
+
try {
|
|
70
|
+
const result = JSON.parse(data);
|
|
71
|
+
resolve(
|
|
72
|
+
result.error
|
|
73
|
+
? { ok: false, error: result.error }
|
|
74
|
+
: { ok: true, job: result.job },
|
|
75
|
+
);
|
|
76
|
+
} catch {
|
|
77
|
+
resolve({ ok: false, error: "Invalid response from mill" });
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
},
|
|
81
|
+
);
|
|
82
|
+
req.on("error", () =>
|
|
83
|
+
resolve({ ok: false, error: "mill not reachable on port " + MILL_PORT }),
|
|
84
|
+
);
|
|
85
|
+
req.on("timeout", () => {
|
|
86
|
+
req.destroy();
|
|
87
|
+
resolve({ ok: false, error: "mill request timed out" });
|
|
67
88
|
});
|
|
68
|
-
req.on('error', () => resolve({ ok: false, error: 'mill not reachable on port ' + MILL_PORT }));
|
|
69
|
-
req.on('timeout', () => { req.destroy(); resolve({ ok: false, error: 'mill request timed out' }); });
|
|
70
89
|
req.write(body);
|
|
71
90
|
req.end();
|
|
72
91
|
});
|
|
@@ -78,20 +97,29 @@ function exportSprint(sprintPath, format) {
|
|
|
78
97
|
*/
|
|
79
98
|
function listFormats() {
|
|
80
99
|
return new Promise((resolve) => {
|
|
81
|
-
const req = http.get(
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
res
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
100
|
+
const req = http.get(
|
|
101
|
+
`http://127.0.0.1:${MILL_PORT}/api/formats`,
|
|
102
|
+
{ timeout: 2000 },
|
|
103
|
+
(res) => {
|
|
104
|
+
let body = "";
|
|
105
|
+
res.on("data", (chunk) => {
|
|
106
|
+
body += chunk;
|
|
107
|
+
});
|
|
108
|
+
res.on("end", () => {
|
|
109
|
+
try {
|
|
110
|
+
const data = JSON.parse(body);
|
|
111
|
+
resolve({ available: true, formats: data.formats || [] });
|
|
112
|
+
} catch {
|
|
113
|
+
resolve({ available: false });
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
},
|
|
117
|
+
);
|
|
118
|
+
req.on("error", () => resolve({ available: false }));
|
|
119
|
+
req.on("timeout", () => {
|
|
120
|
+
req.destroy();
|
|
121
|
+
resolve({ available: false });
|
|
92
122
|
});
|
|
93
|
-
req.on('error', () => resolve({ available: false }));
|
|
94
|
-
req.on('timeout', () => { req.destroy(); resolve({ available: false }); });
|
|
95
123
|
});
|
|
96
124
|
}
|
|
97
125
|
|