prjct-cli 1.6.4 → 1.6.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,11 +1,24 @@
1
1
  # Changelog
2
2
 
3
- ## [1.6.4] - 2026-02-07
3
+ ## [1.6.6] - 2026-02-07
4
4
 
5
- ### Bug Fixes
5
+ ### Refactoring
6
+
7
+ - extract hardcoded values to constants (PRJ-71) (#129)
8
+
9
+
10
+ ## [1.6.8] - 2026-02-07
11
+
12
+ ### Refactor
13
+
14
+ - **Extract hardcoded values to constants (PRJ-71)**: Added `OUTPUT_LIMITS`, `STORAGE_LIMITS`, `EVENT_LIMITS` to `core/constants/index.ts`. Replaced 16 magic numbers across `output.ts` (11 truncation lengths), `bus.ts` (history limit), and `jsonl-helper.ts` (max lines, rotation size, warning threshold). All limits now configurable from one place with `as const` typing.
15
+
16
+
17
+ ## [1.6.7] - 2026-02-07
6
18
 
7
- - add type guards for error handling (PRJ-79) (#127)
19
+ ### Refactor
8
20
 
21
+ - **Extract common agent-base.md template (PRJ-95)**: Created `templates/subagents/agent-base.md` with shared project context (path resolution, storage locations, rules). Added `{{> partial }}` include resolution in `sync-service.ts` that resolves partials during agent generation. Updated all 9 agent templates (5 domain + 4 workflow) to use `{{> agent-base }}` instead of duplicated content. Saves ~200 tokens per additional agent template.
9
22
 
10
23
  ## [1.6.6] - 2026-02-07
11
24
 
package/core/bus/bus.ts CHANGED
@@ -9,6 +9,7 @@
9
9
 
10
10
  import fs from 'node:fs/promises'
11
11
  import path from 'node:path'
12
+ import { EVENT_LIMITS } from '../constants'
12
13
  import { getErrorMessage } from '../errors'
13
14
  import pathManager from '../infrastructure/path-manager'
14
15
  import {
@@ -45,7 +46,7 @@ class EventBus {
45
46
  this.listeners = new Map()
46
47
  this.onceListeners = new Map()
47
48
  this.history = []
48
- this.historyLimit = 100
49
+ this.historyLimit = EVENT_LIMITS.HISTORY_MAX
49
50
  this.projectId = null
50
51
  }
51
52
 
@@ -264,6 +264,65 @@ export function getTimeout(key: TimeoutKey): number {
264
264
  return TIMEOUTS[key]
265
265
  }
266
266
 
267
+ // =============================================================================
268
+ // Output Limits (PRJ-71)
269
+ // =============================================================================
270
+
271
+ /**
272
+ * Truncation lengths for CLI output messages.
273
+ * Centralizes magic numbers from output.ts.
274
+ */
275
+ export const OUTPUT_LIMITS = {
276
+ /** Spinner message truncation */
277
+ SPINNER_MSG: 45,
278
+ /** Done/success message truncation */
279
+ DONE_MSG: 50,
280
+ /** Fail message truncation */
281
+ FAIL_MSG: 65,
282
+ /** Warn message truncation */
283
+ WARN_MSG: 65,
284
+ /** Step counter message truncation */
285
+ STEP_MSG: 35,
286
+ /** Progress bar text truncation */
287
+ PROGRESS_TEXT: 25,
288
+ /** Issue title truncation in lists */
289
+ ISSUE_TITLE: 50,
290
+ /** Fallback truncation when tier config is 0 */
291
+ FALLBACK_TRUNCATE: 50,
292
+ /** Terminal clear width */
293
+ CLEAR_WIDTH: 80,
294
+ } as const
295
+
296
+ // =============================================================================
297
+ // Storage Limits (PRJ-71)
298
+ // =============================================================================
299
+
300
+ /**
301
+ * File size and line limits for JSONL/storage operations.
302
+ * Centralizes magic numbers from jsonl-helper.ts.
303
+ */
304
+ export const STORAGE_LIMITS = {
305
+ /** Default max lines for streaming JSONL reads */
306
+ JSONL_MAX_LINES: 1000,
307
+ /** File rotation threshold in MB */
308
+ ROTATION_SIZE_MB: 10,
309
+ /** Warning threshold for large files in MB */
310
+ LARGE_FILE_WARN_MB: 50,
311
+ } as const
312
+
313
+ // =============================================================================
314
+ // Event Bus Limits (PRJ-71)
315
+ // =============================================================================
316
+
317
+ /**
318
+ * Event bus configuration limits.
319
+ * Centralizes magic numbers from bus.ts.
320
+ */
321
+ export const EVENT_LIMITS = {
322
+ /** Max events kept in history */
323
+ HISTORY_MAX: 100,
324
+ } as const
325
+
267
326
  // =============================================================================
268
327
  // Combined Exports
269
328
  // =============================================================================
@@ -666,6 +666,39 @@ class SyncService {
666
666
  return agents
667
667
  }
668
668
 
669
+ /**
670
+ * Resolve {{> partial-name }} includes in template content.
671
+ * Loads partials from templates/subagents/.
672
+ */
673
+ private async resolveTemplateIncludes(content: string): Promise<string> {
674
+ const includePattern = /\{\{>\s*([\w-]+)\s*\}\}/g
675
+ const matches = [...content.matchAll(includePattern)]
676
+
677
+ if (matches.length === 0) return content
678
+
679
+ let resolved = content
680
+ for (const match of matches) {
681
+ const partialName = match[1]
682
+ const partialPath = path.join(
683
+ __dirname,
684
+ '..',
685
+ '..',
686
+ 'templates',
687
+ 'subagents',
688
+ `${partialName}.md`
689
+ )
690
+ try {
691
+ const partialContent = await fs.readFile(partialPath, 'utf-8')
692
+ resolved = resolved.replace(match[0], partialContent.trim())
693
+ } catch {
694
+ // Partial not found — leave marker for debugging
695
+ resolved = resolved.replace(match[0], `<!-- partial "${partialName}" not found -->`)
696
+ }
697
+ }
698
+
699
+ return resolved
700
+ }
701
+
669
702
  private async generateWorkflowAgent(name: string, agentsPath: string): Promise<void> {
670
703
  // Try to read template
671
704
  let content = ''
@@ -680,6 +713,7 @@ class SyncService {
680
713
  `${name}.md`
681
714
  )
682
715
  content = await fs.readFile(templatePath, 'utf-8')
716
+ content = await this.resolveTemplateIncludes(content)
683
717
  } catch {
684
718
  // Generate minimal agent
685
719
  content = this.generateMinimalWorkflowAgent(name)
@@ -708,6 +742,9 @@ class SyncService {
708
742
  )
709
743
  content = await fs.readFile(templatePath, 'utf-8')
710
744
 
745
+ // Resolve includes before variable replacement
746
+ content = await this.resolveTemplateIncludes(content)
747
+
711
748
  // Inject project-specific context
712
749
  content = content.replace('{projectName}', stats.name)
713
750
  content = content.replace('{frameworks}', stack.frameworks.join(', ') || 'None detected')
@@ -2,6 +2,7 @@ import fsSync from 'node:fs'
2
2
  import fs from 'node:fs/promises'
3
3
  import path from 'node:path'
4
4
  import readline from 'node:readline'
5
+ import { STORAGE_LIMITS } from '../constants'
5
6
  import { isNotFoundError } from '../types/fs'
6
7
 
7
8
  /**
@@ -171,7 +172,7 @@ export async function isJsonLinesEmpty(filePath: string): Promise<boolean> {
171
172
  */
172
173
  export async function readJsonLinesStreaming<T = Record<string, unknown>>(
173
174
  filePath: string,
174
- maxLines = 1000
175
+ maxLines: number = STORAGE_LIMITS.JSONL_MAX_LINES
175
176
  ): Promise<T[]> {
176
177
  try {
177
178
  const fileStream = fsSync.createReadStream(filePath)
@@ -225,7 +226,10 @@ export async function getFileSizeMB(filePath: string): Promise<number> {
225
226
  * Rotate JSONL file if it exceeds size limit
226
227
  * Moves large file to archive with timestamp
227
228
  */
228
- export async function rotateJsonLinesIfNeeded(filePath: string, maxSizeMB = 10): Promise<boolean> {
229
+ export async function rotateJsonLinesIfNeeded(
230
+ filePath: string,
231
+ maxSizeMB: number = STORAGE_LIMITS.ROTATION_SIZE_MB
232
+ ): Promise<boolean> {
229
233
  const sizeMB = await getFileSizeMB(filePath)
230
234
 
231
235
  if (sizeMB < maxSizeMB) {
@@ -256,7 +260,7 @@ export async function rotateJsonLinesIfNeeded(filePath: string, maxSizeMB = 10):
256
260
  export async function appendJsonLineWithRotation(
257
261
  filePath: string,
258
262
  object: unknown,
259
- maxSizeMB = 10
263
+ maxSizeMB: number = STORAGE_LIMITS.ROTATION_SIZE_MB
260
264
  ): Promise<void> {
261
265
  // Rotate if needed (before appending)
262
266
  await rotateJsonLinesIfNeeded(filePath, maxSizeMB)
@@ -271,7 +275,7 @@ export async function appendJsonLineWithRotation(
271
275
  */
272
276
  export async function checkFileSizeWarning(
273
277
  filePath: string,
274
- warnThresholdMB = 50
278
+ warnThresholdMB: number = STORAGE_LIMITS.LARGE_FILE_WARN_MB
275
279
  ): Promise<FileSizeWarning> {
276
280
  const sizeMB = await getFileSizeMB(filePath)
277
281
  const isLarge = sizeMB > warnThresholdMB
@@ -10,6 +10,7 @@
10
10
  */
11
11
 
12
12
  import chalk from 'chalk'
13
+ import { OUTPUT_LIMITS } from '../constants'
13
14
  import branding from './branding'
14
15
  import type { ErrorCode, ErrorWithHint } from './error-messages'
15
16
  import { getError } from './error-messages'
@@ -100,7 +101,7 @@ export function isQuietMode(): boolean {
100
101
  * Truncate string to max chars (uses tier config if no max specified)
101
102
  */
102
103
  const truncate = (s: string | undefined | null, max?: number): string => {
103
- const limit = max ?? (getTierConfig().maxCharsPerLine || 50)
104
+ const limit = max ?? (getTierConfig().maxCharsPerLine || OUTPUT_LIMITS.FALLBACK_TRUNCATE)
104
105
  return s && s.length > limit ? `${s.slice(0, limit - 1)}…` : s || ''
105
106
  }
106
107
 
@@ -156,7 +157,7 @@ export function formatForHuman(data: unknown): string {
156
157
  const issues = obj.issues as Array<Record<string, unknown>>
157
158
  const lines = issues.slice(0, tier.maxLines).map((i) => {
158
159
  const priority = i.priority && i.priority !== 'none' ? ` [${i.priority}]` : ''
159
- return `${i.identifier} ${truncate(String(i.title), 50)}${priority}`
160
+ return `${i.identifier} ${truncate(String(i.title), OUTPUT_LIMITS.ISSUE_TITLE)}${priority}`
160
161
  })
161
162
  if (issues.length > tier.maxLines) {
162
163
  lines.push(chalk.dim(`...${issues.length - tier.maxLines} more`))
@@ -181,7 +182,7 @@ export function formatForHuman(data: unknown): string {
181
182
  }
182
183
 
183
184
  const clear = (): boolean =>
184
- process.stdout.isTTY ? process.stdout.write(`\r${' '.repeat(80)}\r`) : true
185
+ process.stdout.isTTY ? process.stdout.write(`\r${' '.repeat(OUTPUT_LIMITS.CLEAR_WIDTH)}\r`) : true
185
186
 
186
187
  /**
187
188
  * Metrics to display after command completion
@@ -232,11 +233,13 @@ const out: Output = {
232
233
  if (quietMode) return this
233
234
  this.stop()
234
235
  if (!process.stdout.isTTY) {
235
- process.stdout.write(`${branding.cli.spin(0, truncate(msg, 45))}\n`)
236
+ process.stdout.write(`${branding.cli.spin(0, truncate(msg, OUTPUT_LIMITS.SPINNER_MSG))}\n`)
236
237
  return this
237
238
  }
238
239
  interval = setInterval(() => {
239
- process.stdout.write(`\r${branding.cli.spin(frame++, truncate(msg, 45))}`)
240
+ process.stdout.write(
241
+ `\r${branding.cli.spin(frame++, truncate(msg, OUTPUT_LIMITS.SPINNER_MSG))}`
242
+ )
240
243
  }, SPEED)
241
244
  return this
242
245
  },
@@ -255,7 +258,7 @@ const out: Output = {
255
258
  suffix = chalk.dim(` [${parts.join(' | ')}]`)
256
259
  }
257
260
  }
258
- console.log(`${ICONS.success} ${truncate(msg, 50)}${suffix}`)
261
+ console.log(`${ICONS.success} ${truncate(msg, OUTPUT_LIMITS.DONE_MSG)}${suffix}`)
259
262
  }
260
263
  return this
261
264
  },
@@ -263,7 +266,7 @@ const out: Output = {
263
266
  // Errors go to stderr even in quiet mode
264
267
  fail(msg: string) {
265
268
  this.stop()
266
- console.error(`${ICONS.fail} ${truncate(msg, 65)}`)
269
+ console.error(`${ICONS.fail} ${truncate(msg, OUTPUT_LIMITS.FAIL_MSG)}`)
267
270
  return this
268
271
  },
269
272
 
@@ -288,7 +291,7 @@ const out: Output = {
288
291
 
289
292
  warn(msg: string) {
290
293
  this.stop()
291
- if (!quietMode) console.log(`${ICONS.warn} ${truncate(msg, 65)}`)
294
+ if (!quietMode) console.log(`${ICONS.warn} ${truncate(msg, OUTPUT_LIMITS.WARN_MSG)}`)
292
295
  return this
293
296
  },
294
297
 
@@ -400,11 +403,15 @@ const out: Output = {
400
403
  this.stop()
401
404
  const counter = chalk.dim(`[${current}/${total}]`)
402
405
  if (!process.stdout.isTTY) {
403
- process.stdout.write(`${branding.cli.spin(0, `${counter} ${truncate(msg, 35)}`)}\n`)
406
+ process.stdout.write(
407
+ `${branding.cli.spin(0, `${counter} ${truncate(msg, OUTPUT_LIMITS.STEP_MSG)}`)}\n`
408
+ )
404
409
  return this
405
410
  }
406
411
  interval = setInterval(() => {
407
- process.stdout.write(`\r${branding.cli.spin(frame++, `${counter} ${truncate(msg, 35)}`)}`)
412
+ process.stdout.write(
413
+ `\r${branding.cli.spin(frame++, `${counter} ${truncate(msg, OUTPUT_LIMITS.STEP_MSG)}`)}`
414
+ )
408
415
  }, SPEED)
409
416
  return this
410
417
  },
@@ -417,7 +424,7 @@ const out: Output = {
417
424
  const filled = Math.round(percent / 10)
418
425
  const empty = 10 - filled
419
426
  const bar = chalk.cyan('█'.repeat(filled)) + chalk.dim('░'.repeat(empty))
420
- const text = msg ? ` ${truncate(msg, 25)}` : ''
427
+ const text = msg ? ` ${truncate(msg, OUTPUT_LIMITS.PROGRESS_TEXT)}` : ''
421
428
  if (!process.stdout.isTTY) {
422
429
  process.stdout.write(`${branding.cli.spin(0, `[${bar}] ${percent}%${text}`)}\n`)
423
430
  return this
@@ -2148,6 +2148,109 @@ var init_logger = __esm({
2148
2148
  }
2149
2149
  });
2150
2150
 
2151
+ // core/constants/index.ts
2152
+ function getTimeout(key) {
2153
+ const envVar = `PRJCT_TIMEOUT_${key}`;
2154
+ const envValue = process.env[envVar];
2155
+ if (envValue) {
2156
+ const parsed = Number.parseInt(envValue, 10);
2157
+ if (!Number.isNaN(parsed) && parsed > 0) {
2158
+ return parsed;
2159
+ }
2160
+ }
2161
+ return TIMEOUTS[key];
2162
+ }
2163
+ var PLAN_STATUS, PLAN_REQUIRED_COMMANDS, DESTRUCTIVE_COMMANDS, PLANNING_TOOLS, TIMEOUTS, OUTPUT_LIMITS, STORAGE_LIMITS;
2164
+ var init_constants = __esm({
2165
+ "core/constants/index.ts"() {
2166
+ "use strict";
2167
+ PLAN_STATUS = {
2168
+ GATHERING: "gathering",
2169
+ ANALYZING: "analyzing",
2170
+ PROPOSING: "proposing",
2171
+ PENDING_APPROVAL: "awaiting_approval",
2172
+ APPROVED: "approved",
2173
+ REJECTED: "rejected",
2174
+ EXECUTING: "executing",
2175
+ COMPLETED: "completed",
2176
+ ABORTED: "aborted"
2177
+ };
2178
+ PLAN_REQUIRED_COMMANDS = [
2179
+ "feature",
2180
+ // New features need planning
2181
+ "spec",
2182
+ // Specs are planning by definition
2183
+ "design",
2184
+ // Architecture needs planning
2185
+ "refactor",
2186
+ // Refactoring needs impact analysis
2187
+ "migrate"
2188
+ // Migrations are high-risk
2189
+ ];
2190
+ DESTRUCTIVE_COMMANDS = [
2191
+ "ship",
2192
+ // Commits and pushes
2193
+ "cleanup",
2194
+ // Deletes files/code
2195
+ "git",
2196
+ // Git operations
2197
+ "migrate"
2198
+ // Database/schema changes
2199
+ ];
2200
+ PLANNING_TOOLS = [
2201
+ "Read",
2202
+ "Glob",
2203
+ "Grep",
2204
+ "GetTimestamp",
2205
+ "GetDate",
2206
+ "GetDateTime"
2207
+ ];
2208
+ TIMEOUTS = {
2209
+ /** Tool availability checks (git --version, npm --version) */
2210
+ TOOL_CHECK: 5e3,
2211
+ /** Standard git operations (status, add, commit) */
2212
+ GIT_OPERATION: 1e4,
2213
+ /** Git clone with --depth 1 */
2214
+ GIT_CLONE: 6e4,
2215
+ /** HTTP fetch/API requests */
2216
+ API_REQUEST: 3e4,
2217
+ /** npm install -g (CLI installation) - 2 minutes */
2218
+ NPM_INSTALL: 12e4,
2219
+ /** User-defined workflow hooks */
2220
+ WORKFLOW_HOOK: 6e4
2221
+ };
2222
+ __name(getTimeout, "getTimeout");
2223
+ OUTPUT_LIMITS = {
2224
+ /** Spinner message truncation */
2225
+ SPINNER_MSG: 45,
2226
+ /** Done/success message truncation */
2227
+ DONE_MSG: 50,
2228
+ /** Fail message truncation */
2229
+ FAIL_MSG: 65,
2230
+ /** Warn message truncation */
2231
+ WARN_MSG: 65,
2232
+ /** Step counter message truncation */
2233
+ STEP_MSG: 35,
2234
+ /** Progress bar text truncation */
2235
+ PROGRESS_TEXT: 25,
2236
+ /** Issue title truncation in lists */
2237
+ ISSUE_TITLE: 50,
2238
+ /** Fallback truncation when tier config is 0 */
2239
+ FALLBACK_TRUNCATE: 50,
2240
+ /** Terminal clear width */
2241
+ CLEAR_WIDTH: 80
2242
+ };
2243
+ STORAGE_LIMITS = {
2244
+ /** Default max lines for streaming JSONL reads */
2245
+ JSONL_MAX_LINES: 1e3,
2246
+ /** File rotation threshold in MB */
2247
+ ROTATION_SIZE_MB: 10,
2248
+ /** Warning threshold for large files in MB */
2249
+ LARGE_FILE_WARN_MB: 50
2250
+ };
2251
+ }
2252
+ });
2253
+
2151
2254
  // core/utils/branding.ts
2152
2255
  import chalk from "chalk";
2153
2256
  var SPINNER_FRAMES, SPINNER_SPEED, branding, branding_default;
@@ -2387,7 +2490,7 @@ function formatForHuman(data) {
2387
2490
  const issues = obj.issues;
2388
2491
  const lines = issues.slice(0, tier.maxLines).map((i) => {
2389
2492
  const priority = i.priority && i.priority !== "none" ? ` [${i.priority}]` : "";
2390
- return `${i.identifier} ${truncate(String(i.title), 50)}${priority}`;
2493
+ return `${i.identifier} ${truncate(String(i.title), OUTPUT_LIMITS.ISSUE_TITLE)}${priority}`;
2391
2494
  });
2392
2495
  if (issues.length > tier.maxLines) {
2393
2496
  lines.push(chalk2.dim(`...${issues.length - tier.maxLines} more`));
@@ -2408,6 +2511,7 @@ var _FRAMES, SPEED, OUTPUT_TIERS, currentTier, ICONS, interval, frame, quietMode
2408
2511
  var init_output = __esm({
2409
2512
  "core/utils/output.ts"() {
2410
2513
  "use strict";
2514
+ init_constants();
2411
2515
  init_branding();
2412
2516
  init_error_messages();
2413
2517
  init_error_messages();
@@ -2441,12 +2545,12 @@ var init_output = __esm({
2441
2545
  __name(setQuietMode, "setQuietMode");
2442
2546
  __name(isQuietMode, "isQuietMode");
2443
2547
  truncate = /* @__PURE__ */ __name((s, max) => {
2444
- const limit = max ?? (getTierConfig().maxCharsPerLine || 50);
2548
+ const limit = max ?? (getTierConfig().maxCharsPerLine || OUTPUT_LIMITS.FALLBACK_TRUNCATE);
2445
2549
  return s && s.length > limit ? `${s.slice(0, limit - 1)}\u2026` : s || "";
2446
2550
  }, "truncate");
2447
2551
  __name(limitLines, "limitLines");
2448
2552
  __name(formatForHuman, "formatForHuman");
2449
- clear = /* @__PURE__ */ __name(() => process.stdout.isTTY ? process.stdout.write(`\r${" ".repeat(80)}\r`) : true, "clear");
2553
+ clear = /* @__PURE__ */ __name(() => process.stdout.isTTY ? process.stdout.write(`\r${" ".repeat(OUTPUT_LIMITS.CLEAR_WIDTH)}\r`) : true, "clear");
2450
2554
  out = {
2451
2555
  // Branding: Show header at start
2452
2556
  start() {
@@ -2464,12 +2568,14 @@ var init_output = __esm({
2464
2568
  if (quietMode) return this;
2465
2569
  this.stop();
2466
2570
  if (!process.stdout.isTTY) {
2467
- process.stdout.write(`${branding_default.cli.spin(0, truncate(msg, 45))}
2571
+ process.stdout.write(`${branding_default.cli.spin(0, truncate(msg, OUTPUT_LIMITS.SPINNER_MSG))}
2468
2572
  `);
2469
2573
  return this;
2470
2574
  }
2471
2575
  interval = setInterval(() => {
2472
- process.stdout.write(`\r${branding_default.cli.spin(frame++, truncate(msg, 45))}`);
2576
+ process.stdout.write(
2577
+ `\r${branding_default.cli.spin(frame++, truncate(msg, OUTPUT_LIMITS.SPINNER_MSG))}`
2578
+ );
2473
2579
  }, SPEED);
2474
2580
  return this;
2475
2581
  },
@@ -2486,14 +2592,14 @@ var init_output = __esm({
2486
2592
  suffix = chalk2.dim(` [${parts.join(" | ")}]`);
2487
2593
  }
2488
2594
  }
2489
- console.log(`${ICONS.success} ${truncate(msg, 50)}${suffix}`);
2595
+ console.log(`${ICONS.success} ${truncate(msg, OUTPUT_LIMITS.DONE_MSG)}${suffix}`);
2490
2596
  }
2491
2597
  return this;
2492
2598
  },
2493
2599
  // Errors go to stderr even in quiet mode
2494
2600
  fail(msg) {
2495
2601
  this.stop();
2496
- console.error(`${ICONS.fail} ${truncate(msg, 65)}`);
2602
+ console.error(`${ICONS.fail} ${truncate(msg, OUTPUT_LIMITS.FAIL_MSG)}`);
2497
2603
  return this;
2498
2604
  },
2499
2605
  // Rich error with context and recovery hint
@@ -2516,7 +2622,7 @@ var init_output = __esm({
2516
2622
  },
2517
2623
  warn(msg) {
2518
2624
  this.stop();
2519
- if (!quietMode) console.log(`${ICONS.warn} ${truncate(msg, 65)}`);
2625
+ if (!quietMode) console.log(`${ICONS.warn} ${truncate(msg, OUTPUT_LIMITS.WARN_MSG)}`);
2520
2626
  return this;
2521
2627
  },
2522
2628
  // Informational message
@@ -2612,12 +2718,16 @@ ${chalk2.bold(title)}`);
2612
2718
  this.stop();
2613
2719
  const counter = chalk2.dim(`[${current}/${total}]`);
2614
2720
  if (!process.stdout.isTTY) {
2615
- process.stdout.write(`${branding_default.cli.spin(0, `${counter} ${truncate(msg, 35)}`)}
2616
- `);
2721
+ process.stdout.write(
2722
+ `${branding_default.cli.spin(0, `${counter} ${truncate(msg, OUTPUT_LIMITS.STEP_MSG)}`)}
2723
+ `
2724
+ );
2617
2725
  return this;
2618
2726
  }
2619
2727
  interval = setInterval(() => {
2620
- process.stdout.write(`\r${branding_default.cli.spin(frame++, `${counter} ${truncate(msg, 35)}`)}`);
2728
+ process.stdout.write(
2729
+ `\r${branding_default.cli.spin(frame++, `${counter} ${truncate(msg, OUTPUT_LIMITS.STEP_MSG)}`)}`
2730
+ );
2621
2731
  }, SPEED);
2622
2732
  return this;
2623
2733
  },
@@ -2629,7 +2739,7 @@ ${chalk2.bold(title)}`);
2629
2739
  const filled = Math.round(percent / 10);
2630
2740
  const empty = 10 - filled;
2631
2741
  const bar = chalk2.cyan("\u2588".repeat(filled)) + chalk2.dim("\u2591".repeat(empty));
2632
- const text = msg ? ` ${truncate(msg, 25)}` : "";
2742
+ const text = msg ? ` ${truncate(msg, OUTPUT_LIMITS.PROGRESS_TEXT)}` : "";
2633
2743
  if (!process.stdout.isTTY) {
2634
2744
  process.stdout.write(`${branding_default.cli.spin(0, `[${bar}] ${percent}%${text}`)}
2635
2745
  `);
@@ -9589,7 +9699,7 @@ async function isJsonLinesEmpty(filePath) {
9589
9699
  const count = await countJsonLines(filePath);
9590
9700
  return count === 0;
9591
9701
  }
9592
- async function readJsonLinesStreaming(filePath, maxLines = 1e3) {
9702
+ async function readJsonLinesStreaming(filePath, maxLines = STORAGE_LIMITS.JSONL_MAX_LINES) {
9593
9703
  try {
9594
9704
  const fileStream = fsSync.createReadStream(filePath);
9595
9705
  const rl = readline.createInterface({
@@ -9627,7 +9737,7 @@ async function getFileSizeMB(filePath) {
9627
9737
  throw error;
9628
9738
  }
9629
9739
  }
9630
- async function rotateJsonLinesIfNeeded(filePath, maxSizeMB = 10) {
9740
+ async function rotateJsonLinesIfNeeded(filePath, maxSizeMB = STORAGE_LIMITS.ROTATION_SIZE_MB) {
9631
9741
  const sizeMB = await getFileSizeMB(filePath);
9632
9742
  if (sizeMB < maxSizeMB) {
9633
9743
  return false;
@@ -9643,11 +9753,11 @@ async function rotateJsonLinesIfNeeded(filePath, maxSizeMB = 10) {
9643
9753
  );
9644
9754
  return true;
9645
9755
  }
9646
- async function appendJsonLineWithRotation(filePath, object, maxSizeMB = 10) {
9756
+ async function appendJsonLineWithRotation(filePath, object, maxSizeMB = STORAGE_LIMITS.ROTATION_SIZE_MB) {
9647
9757
  await rotateJsonLinesIfNeeded(filePath, maxSizeMB);
9648
9758
  await appendJsonLine(filePath, object);
9649
9759
  }
9650
- async function checkFileSizeWarning(filePath, warnThresholdMB = 50) {
9760
+ async function checkFileSizeWarning(filePath, warnThresholdMB = STORAGE_LIMITS.LARGE_FILE_WARN_MB) {
9651
9761
  const sizeMB = await getFileSizeMB(filePath);
9652
9762
  const isLarge = sizeMB > warnThresholdMB;
9653
9763
  if (isLarge) {
@@ -9661,6 +9771,7 @@ var jsonl_helper_default;
9661
9771
  var init_jsonl_helper = __esm({
9662
9772
  "core/utils/jsonl-helper.ts"() {
9663
9773
  "use strict";
9774
+ init_constants();
9664
9775
  init_fs();
9665
9776
  __name(parseJsonLines, "parseJsonLines");
9666
9777
  __name(stringifyJsonLines, "stringifyJsonLines");
@@ -12843,81 +12954,6 @@ var init_orchestrator_executor = __esm({
12843
12954
  }
12844
12955
  });
12845
12956
 
12846
- // core/constants/index.ts
12847
- function getTimeout(key) {
12848
- const envVar = `PRJCT_TIMEOUT_${key}`;
12849
- const envValue = process.env[envVar];
12850
- if (envValue) {
12851
- const parsed = Number.parseInt(envValue, 10);
12852
- if (!Number.isNaN(parsed) && parsed > 0) {
12853
- return parsed;
12854
- }
12855
- }
12856
- return TIMEOUTS[key];
12857
- }
12858
- var PLAN_STATUS, PLAN_REQUIRED_COMMANDS, DESTRUCTIVE_COMMANDS, PLANNING_TOOLS, TIMEOUTS;
12859
- var init_constants = __esm({
12860
- "core/constants/index.ts"() {
12861
- "use strict";
12862
- PLAN_STATUS = {
12863
- GATHERING: "gathering",
12864
- ANALYZING: "analyzing",
12865
- PROPOSING: "proposing",
12866
- PENDING_APPROVAL: "awaiting_approval",
12867
- APPROVED: "approved",
12868
- REJECTED: "rejected",
12869
- EXECUTING: "executing",
12870
- COMPLETED: "completed",
12871
- ABORTED: "aborted"
12872
- };
12873
- PLAN_REQUIRED_COMMANDS = [
12874
- "feature",
12875
- // New features need planning
12876
- "spec",
12877
- // Specs are planning by definition
12878
- "design",
12879
- // Architecture needs planning
12880
- "refactor",
12881
- // Refactoring needs impact analysis
12882
- "migrate"
12883
- // Migrations are high-risk
12884
- ];
12885
- DESTRUCTIVE_COMMANDS = [
12886
- "ship",
12887
- // Commits and pushes
12888
- "cleanup",
12889
- // Deletes files/code
12890
- "git",
12891
- // Git operations
12892
- "migrate"
12893
- // Database/schema changes
12894
- ];
12895
- PLANNING_TOOLS = [
12896
- "Read",
12897
- "Glob",
12898
- "Grep",
12899
- "GetTimestamp",
12900
- "GetDate",
12901
- "GetDateTime"
12902
- ];
12903
- TIMEOUTS = {
12904
- /** Tool availability checks (git --version, npm --version) */
12905
- TOOL_CHECK: 5e3,
12906
- /** Standard git operations (status, add, commit) */
12907
- GIT_OPERATION: 1e4,
12908
- /** Git clone with --depth 1 */
12909
- GIT_CLONE: 6e4,
12910
- /** HTTP fetch/API requests */
12911
- API_REQUEST: 3e4,
12912
- /** npm install -g (CLI installation) - 2 minutes */
12913
- NPM_INSTALL: 12e4,
12914
- /** User-defined workflow hooks */
12915
- WORKFLOW_HOOK: 6e4
12916
- };
12917
- __name(getTimeout, "getTimeout");
12918
- }
12919
- });
12920
-
12921
12957
  // core/agentic/plan-mode.ts
12922
12958
  function generateApprovalPrompt(commandName, context2) {
12923
12959
  const prompts3 = {
@@ -22761,6 +22797,34 @@ var init_sync_service = __esm({
22761
22797
  }
22762
22798
  return agents;
22763
22799
  }
22800
+ /**
22801
+ * Resolve {{> partial-name }} includes in template content.
22802
+ * Loads partials from templates/subagents/.
22803
+ */
22804
+ async resolveTemplateIncludes(content) {
22805
+ const includePattern = /\{\{>\s*([\w-]+)\s*\}\}/g;
22806
+ const matches = [...content.matchAll(includePattern)];
22807
+ if (matches.length === 0) return content;
22808
+ let resolved = content;
22809
+ for (const match of matches) {
22810
+ const partialName = match[1];
22811
+ const partialPath = path49.join(
22812
+ __dirname,
22813
+ "..",
22814
+ "..",
22815
+ "templates",
22816
+ "subagents",
22817
+ `${partialName}.md`
22818
+ );
22819
+ try {
22820
+ const partialContent = await fs45.readFile(partialPath, "utf-8");
22821
+ resolved = resolved.replace(match[0], partialContent.trim());
22822
+ } catch {
22823
+ resolved = resolved.replace(match[0], `<!-- partial "${partialName}" not found -->`);
22824
+ }
22825
+ }
22826
+ return resolved;
22827
+ }
22764
22828
  async generateWorkflowAgent(name, agentsPath) {
22765
22829
  let content = "";
22766
22830
  try {
@@ -22774,6 +22838,7 @@ var init_sync_service = __esm({
22774
22838
  `${name}.md`
22775
22839
  );
22776
22840
  content = await fs45.readFile(templatePath, "utf-8");
22841
+ content = await this.resolveTemplateIncludes(content);
22777
22842
  } catch {
22778
22843
  content = this.generateMinimalWorkflowAgent(name);
22779
22844
  }
@@ -22792,6 +22857,7 @@ var init_sync_service = __esm({
22792
22857
  `${name}.md`
22793
22858
  );
22794
22859
  content = await fs45.readFile(templatePath, "utf-8");
22860
+ content = await this.resolveTemplateIncludes(content);
22795
22861
  content = content.replace("{projectName}", stats.name);
22796
22862
  content = content.replace("{frameworks}", stack.frameworks.join(", ") || "None detected");
22797
22863
  content = content.replace("{ecosystem}", stats.ecosystem);
@@ -28653,7 +28719,7 @@ var require_package = __commonJS({
28653
28719
  "package.json"(exports, module) {
28654
28720
  module.exports = {
28655
28721
  name: "prjct-cli",
28656
- version: "1.6.4",
28722
+ version: "1.6.6",
28657
28723
  description: "Context layer for AI agents. Project context for Claude Code, Gemini CLI, and more.",
28658
28724
  main: "core/index.ts",
28659
28725
  bin: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "prjct-cli",
3
- "version": "1.6.4",
3
+ "version": "1.6.6",
4
4
  "description": "Context layer for AI agents. Project context for Claude Code, Gemini CLI, and more.",
5
5
  "main": "core/index.ts",
6
6
  "bin": {
@@ -0,0 +1,20 @@
1
+ ## prjct Project Context
2
+
3
+ ### Setup
4
+ 1. Read `.prjct/prjct.config.json` → extract `projectId`
5
+ 2. Set `globalPath = ~/.prjct-cli/projects/{projectId}`
6
+
7
+ ### Available Storage
8
+
9
+ | File | Contents |
10
+ |------|----------|
11
+ | `{globalPath}/storage/state.json` | Current task & subtasks |
12
+ | `{globalPath}/storage/queue.json` | Task queue |
13
+ | `{globalPath}/storage/shipped.json` | Shipping history |
14
+ | `{globalPath}/storage/roadmap.json` | Feature roadmap |
15
+
16
+ ### Rules
17
+ - Storage (JSON) is **SOURCE OF TRUTH**
18
+ - Context (MD) is **GENERATED** from storage
19
+ - NEVER hardcode timestamps — use system time
20
+ - Log significant actions to `{globalPath}/memory/events.jsonl`
@@ -16,7 +16,9 @@ You are a backend specialist agent for this project.
16
16
  - **APIs**: REST, GraphQL, gRPC, WebSockets
17
17
  - **Auth**: JWT, OAuth, Sessions, API Keys
18
18
 
19
- ## Project Context
19
+ {{> agent-base }}
20
+
21
+ ## Domain Analysis
20
22
 
21
23
  When invoked, analyze the project's backend stack:
22
24
  1. Read `package.json`, `go.mod`, `requirements.txt`, or `Cargo.toml`
@@ -15,7 +15,9 @@ You are a database specialist agent for this project.
15
15
  - **ORMs**: Prisma, Drizzle, TypeORM, Sequelize, GORM
16
16
  - **Migrations**: Schema changes, data migrations
17
17
 
18
- ## Project Context
18
+ {{> agent-base }}
19
+
20
+ ## Domain Analysis
19
21
 
20
22
  When invoked, analyze the project's database setup:
21
23
  1. Check for ORM config (prisma/schema.prisma, drizzle.config.ts)
@@ -16,7 +16,9 @@ You are a DevOps specialist agent for this project.
16
16
  - **CI/CD**: GitHub Actions, GitLab CI, Jenkins
17
17
  - **Cloud**: AWS, GCP, Azure, Vercel, Railway
18
18
 
19
- ## Project Context
19
+ {{> agent-base }}
20
+
21
+ ## Domain Analysis
20
22
 
21
23
  When invoked, analyze the project's DevOps setup:
22
24
  1. Check for Dockerfile, docker-compose.yml
@@ -16,7 +16,9 @@ You are a frontend specialist agent for this project.
16
16
  - **State**: Redux, Zustand, Pinia, Context API
17
17
  - **Build**: Vite, webpack, esbuild, Turbopack
18
18
 
19
- ## Project Context
19
+ {{> agent-base }}
20
+
21
+ ## Domain Analysis
20
22
 
21
23
  When invoked, analyze the project's frontend stack:
22
24
  1. Read `package.json` for dependencies
@@ -17,7 +17,9 @@ You are a testing specialist agent for this project.
17
17
  - **Go**: testing package, testify
18
18
  - **E2E**: Playwright, Cypress, Puppeteer
19
19
 
20
- ## Project Context
20
+ {{> agent-base }}
21
+
22
+ ## Domain Analysis
21
23
 
22
24
  When invoked, analyze the project's testing setup:
23
25
  1. Check for test config (bunfig.toml, jest.config.js, pytest.ini)
@@ -13,13 +13,12 @@ You are the Chief Architect agent, the expert in creating Product Requirement Do
13
13
 
14
14
  You are responsible for ensuring every significant feature is properly documented BEFORE implementation begins. You follow a formal 8-phase methodology adapted from industry best practices.
15
15
 
16
- ## Project Context
16
+ {{> agent-base }}
17
17
 
18
- When invoked, FIRST load context:
19
- 1. Read `.prjct/prjct.config.json` → extract `projectId`
20
- 2. Read `~/.prjct-cli/projects/{projectId}/storage/roadmap.json` → existing features
21
- 3. Read `~/.prjct-cli/projects/{projectId}/storage/prds.json` → existing PRDs
22
- 4. Read `~/.prjct-cli/projects/{projectId}/analysis/repo-analysis.json` → project tech stack
18
+ When invoked, load these storage files:
19
+ - `roadmap.json` → existing features
20
+ - `prds.json` → existing PRDs
21
+ - `analysis/repo-analysis.json` → project tech stack
23
22
 
24
23
  ## Commands You Handle
25
24
 
@@ -9,13 +9,12 @@ skills: [feature-dev]
9
9
 
10
10
  You are the prjct planning agent, specializing in feature planning and task breakdown.
11
11
 
12
- ## Project Context
12
+ {{> agent-base }}
13
13
 
14
- When invoked, FIRST load context:
15
- 1. Read `.prjct/prjct.config.json` → extract `projectId`
16
- 2. Read `~/.prjct-cli/projects/{projectId}/storage/state.json` → current state
17
- 3. Read `~/.prjct-cli/projects/{projectId}/storage/queue.json` → task queue
18
- 4. Read `~/.prjct-cli/projects/{projectId}/storage/roadmap.json` → feature roadmap
14
+ When invoked, load these storage files:
15
+ - `state.json` → current task state
16
+ - `queue.json` → task queue
17
+ - `roadmap.json` → feature roadmap
19
18
 
20
19
  ## Commands You Handle
21
20
 
@@ -9,12 +9,11 @@ skills: [code-review]
9
9
 
10
10
  You are the prjct shipper agent, specializing in shipping features safely.
11
11
 
12
- ## Project Context
12
+ {{> agent-base }}
13
13
 
14
- When invoked, FIRST load context:
15
- 1. Read `.prjct/prjct.config.json` → extract `projectId`
16
- 2. Read `~/.prjct-cli/projects/{projectId}/storage/state.json` → current state
17
- 3. Read `~/.prjct-cli/projects/{projectId}/storage/shipped.json` → shipping history
14
+ When invoked, load these storage files:
15
+ - `state.json` → current task state
16
+ - `shipped.json` → shipping history
18
17
 
19
18
  ## Commands You Handle
20
19
 
@@ -8,12 +8,11 @@ effort: low
8
8
 
9
9
  You are the prjct workflow executor, specializing in task lifecycle management.
10
10
 
11
- ## Project Context
11
+ {{> agent-base }}
12
12
 
13
- When invoked, FIRST load context:
14
- 1. Read `.prjct/prjct.config.json` → extract `projectId`
15
- 2. Read `~/.prjct-cli/projects/{projectId}/storage/state.json` → current state
16
- 3. Read `~/.prjct-cli/projects/{projectId}/storage/queue.json` → task queue
13
+ When invoked, load these storage files:
14
+ - `state.json` → current task state
15
+ - `queue.json` → task queue
17
16
 
18
17
  ## Commands You Handle
19
18