clementine-agent 1.18.104 → 1.18.106
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/draft-store.d.ts +57 -0
- package/dist/agent/draft-store.js +113 -0
- package/dist/agent/skill-store.d.ts +60 -0
- package/dist/agent/skill-store.js +313 -0
- package/dist/cli/dashboard.js +691 -0
- package/dist/types.d.ts +112 -0
- package/package.json +1 -1
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PRD §11 Phase 5b / 1.18.105 — Draft store for cron tasks.
|
|
3
|
+
*
|
|
4
|
+
* Drafts live alongside CRON.md (the published source of truth) but in a
|
|
5
|
+
* separate per-task JSON sidecar at ~/.clementine/cron-drafts/<safe>.json.
|
|
6
|
+
* Schedule firing always reads CRON.md, so a draft never accidentally
|
|
7
|
+
* goes live until the user clicks Publish.
|
|
8
|
+
*
|
|
9
|
+
* Why a sidecar instead of editing CRON.md and gating with frontmatter:
|
|
10
|
+
* - One CRON.md edit = many tasks affected. Drafts are per-task by design.
|
|
11
|
+
* - Sidecars survive even if CRON.md gets rewritten (e.g. by an agent).
|
|
12
|
+
* - The published-vs-draft diff is a clean two-document compare.
|
|
13
|
+
*
|
|
14
|
+
* Tradeoff: a draft can become "orphaned" if its base task gets renamed.
|
|
15
|
+
* We detect this via basedOnName and surface a banner in the editor when
|
|
16
|
+
* we can't find the published peer. Manual cleanup via DELETE /api/cron/
|
|
17
|
+
* :name/draft.
|
|
18
|
+
*/
|
|
19
|
+
import type { CronJobDefinition } from '../types.js';
|
|
20
|
+
/** Stable hash of a job def — used to detect drift between when the draft
|
|
21
|
+
* was created and the current published version. If the published task
|
|
22
|
+
* changed under the draft (someone else edited it), we surface a warning
|
|
23
|
+
* and ask the user if they want to rebase. */
|
|
24
|
+
export declare function hashJobDef(def: CronJobDefinition): string;
|
|
25
|
+
export interface DraftRecord {
|
|
26
|
+
/** Task name this draft belongs to. Matches the published task's name
|
|
27
|
+
* (renames detach the draft — the user must republish to a new name). */
|
|
28
|
+
name: string;
|
|
29
|
+
/** Full job def the user is staging. Same shape as CronJobDefinition. */
|
|
30
|
+
draft: CronJobDefinition;
|
|
31
|
+
/** ISO timestamp of last save. */
|
|
32
|
+
savedAt: string;
|
|
33
|
+
/** Author marker. 'dashboard' for UI saves; future channels may add their
|
|
34
|
+
* own values. */
|
|
35
|
+
changedBy: string;
|
|
36
|
+
/** Hash of the published def at the time the draft was first created.
|
|
37
|
+
* If the live published def hashes to something different now, the
|
|
38
|
+
* draft is "rebased" — the editor surfaces a banner. */
|
|
39
|
+
basedOnPublishedHash: string | null;
|
|
40
|
+
}
|
|
41
|
+
export declare function getDraft(name: string): DraftRecord | null;
|
|
42
|
+
export declare function saveDraft(record: DraftRecord): void;
|
|
43
|
+
export declare function deleteDraft(name: string): boolean;
|
|
44
|
+
export declare function listDraftNames(): string[];
|
|
45
|
+
/** Compute draft state vs current published def. The badge in the editor
|
|
46
|
+
* reads this directly — four states matching the n8n flow:
|
|
47
|
+
* none = no draft sidecar, task is on its published version
|
|
48
|
+
* draft = draft exists, no published peer (new task being created)
|
|
49
|
+
* ready = draft + published peer; draft != published
|
|
50
|
+
* up_to_date = draft + published peer; draft hashes match published
|
|
51
|
+
* rebase_needed = draft + published peer; published has drifted since draft was created
|
|
52
|
+
*/
|
|
53
|
+
export type DraftBadgeState = 'none' | 'draft' | 'ready' | 'up_to_date' | 'rebase_needed';
|
|
54
|
+
export declare function computeBadgeState(name: string, publishedDef: CronJobDefinition | null): DraftBadgeState;
|
|
55
|
+
/** Test-only: where we read/write drafts. Tests use a clean tmpdir. */
|
|
56
|
+
export declare function _draftDirForTests(): string;
|
|
57
|
+
//# sourceMappingURL=draft-store.d.ts.map
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PRD §11 Phase 5b / 1.18.105 — Draft store for cron tasks.
|
|
3
|
+
*
|
|
4
|
+
* Drafts live alongside CRON.md (the published source of truth) but in a
|
|
5
|
+
* separate per-task JSON sidecar at ~/.clementine/cron-drafts/<safe>.json.
|
|
6
|
+
* Schedule firing always reads CRON.md, so a draft never accidentally
|
|
7
|
+
* goes live until the user clicks Publish.
|
|
8
|
+
*
|
|
9
|
+
* Why a sidecar instead of editing CRON.md and gating with frontmatter:
|
|
10
|
+
* - One CRON.md edit = many tasks affected. Drafts are per-task by design.
|
|
11
|
+
* - Sidecars survive even if CRON.md gets rewritten (e.g. by an agent).
|
|
12
|
+
* - The published-vs-draft diff is a clean two-document compare.
|
|
13
|
+
*
|
|
14
|
+
* Tradeoff: a draft can become "orphaned" if its base task gets renamed.
|
|
15
|
+
* We detect this via basedOnName and surface a banner in the editor when
|
|
16
|
+
* we can't find the published peer. Manual cleanup via DELETE /api/cron/
|
|
17
|
+
* :name/draft.
|
|
18
|
+
*/
|
|
19
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, unlinkSync } from 'node:fs';
|
|
20
|
+
import { createHash } from 'node:crypto';
|
|
21
|
+
import os from 'node:os';
|
|
22
|
+
import path from 'node:path';
|
|
23
|
+
/** Read BASE_DIR fresh on every call so tests can swap CLEMENTINE_HOME
|
|
24
|
+
* per-test without the module cache sticking the value at import time. */
|
|
25
|
+
function draftDir() {
|
|
26
|
+
const base = process.env.CLEMENTINE_HOME || path.join(os.homedir(), '.clementine');
|
|
27
|
+
return path.join(base, 'cron-drafts');
|
|
28
|
+
}
|
|
29
|
+
function safeName(name) {
|
|
30
|
+
// Mirror the convention used by CronRunLog.runs/<safe>.jsonl so users
|
|
31
|
+
// can grep for related files easily.
|
|
32
|
+
return String(name).replace(/[^a-zA-Z0-9_:-]/g, '_').slice(0, 128);
|
|
33
|
+
}
|
|
34
|
+
function draftPath(name) {
|
|
35
|
+
return path.join(draftDir(), safeName(name) + '.json');
|
|
36
|
+
}
|
|
37
|
+
/** Stable hash of a job def — used to detect drift between when the draft
|
|
38
|
+
* was created and the current published version. If the published task
|
|
39
|
+
* changed under the draft (someone else edited it), we surface a warning
|
|
40
|
+
* and ask the user if they want to rebase. */
|
|
41
|
+
export function hashJobDef(def) {
|
|
42
|
+
const canonical = JSON.stringify(def, Object.keys(def).sort());
|
|
43
|
+
return createHash('sha256').update(canonical).digest('hex').slice(0, 16);
|
|
44
|
+
}
|
|
45
|
+
export function getDraft(name) {
|
|
46
|
+
const file = draftPath(name);
|
|
47
|
+
if (!existsSync(file))
|
|
48
|
+
return null;
|
|
49
|
+
try {
|
|
50
|
+
const raw = readFileSync(file, 'utf-8');
|
|
51
|
+
return JSON.parse(raw);
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
return null;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
export function saveDraft(record) {
|
|
58
|
+
if (!record.name)
|
|
59
|
+
throw new Error('draft.name is required');
|
|
60
|
+
if (!record.draft || typeof record.draft !== 'object')
|
|
61
|
+
throw new Error('draft.draft (job def) is required');
|
|
62
|
+
mkdirSync(draftDir(), { recursive: true });
|
|
63
|
+
const file = draftPath(record.name);
|
|
64
|
+
writeFileSync(file, JSON.stringify(record, null, 2) + '\n');
|
|
65
|
+
}
|
|
66
|
+
export function deleteDraft(name) {
|
|
67
|
+
const file = draftPath(name);
|
|
68
|
+
if (!existsSync(file))
|
|
69
|
+
return false;
|
|
70
|
+
try {
|
|
71
|
+
unlinkSync(file);
|
|
72
|
+
return true;
|
|
73
|
+
}
|
|
74
|
+
catch {
|
|
75
|
+
return false;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
export function listDraftNames() {
|
|
79
|
+
const dir = draftDir();
|
|
80
|
+
if (!existsSync(dir))
|
|
81
|
+
return [];
|
|
82
|
+
try {
|
|
83
|
+
return readdirSync(dir)
|
|
84
|
+
.filter((f) => f.endsWith('.json'))
|
|
85
|
+
.map((f) => f.replace(/\.json$/, ''));
|
|
86
|
+
}
|
|
87
|
+
catch {
|
|
88
|
+
return [];
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
export function computeBadgeState(name, publishedDef) {
|
|
92
|
+
const d = getDraft(name);
|
|
93
|
+
if (!d)
|
|
94
|
+
return 'none';
|
|
95
|
+
if (!publishedDef)
|
|
96
|
+
return 'draft';
|
|
97
|
+
const publishedHash = hashJobDef(publishedDef);
|
|
98
|
+
const draftHash = hashJobDef(d.draft);
|
|
99
|
+
if (publishedHash === draftHash)
|
|
100
|
+
return 'up_to_date';
|
|
101
|
+
// Drift detection: if the draft was based on a published version we no
|
|
102
|
+
// longer recognise, surface "rebase needed". This covers two scenarios:
|
|
103
|
+
// (a) someone else edited the published def, (b) the user published
|
|
104
|
+
// through a different surface and forgot to discard the draft.
|
|
105
|
+
if (d.basedOnPublishedHash && d.basedOnPublishedHash !== publishedHash)
|
|
106
|
+
return 'rebase_needed';
|
|
107
|
+
return 'ready';
|
|
108
|
+
}
|
|
109
|
+
/** Test-only: where we read/write drafts. Tests use a clean tmpdir. */
|
|
110
|
+
export function _draftDirForTests() {
|
|
111
|
+
return draftDir();
|
|
112
|
+
}
|
|
113
|
+
//# sourceMappingURL=draft-store.js.map
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Skill store — Phase A (read-only) of the Skills-First redesign.
|
|
3
|
+
*
|
|
4
|
+
* Discovers skill .md files from two locations and parses their
|
|
5
|
+
* frontmatter into the Skill type. Phase A surfaces what's already on
|
|
6
|
+
* disk; Phase B adds editing + testing; Phase C wires runtime invocation.
|
|
7
|
+
*
|
|
8
|
+
* Discovery order:
|
|
9
|
+
* 1. ~/.clementine/vault/00-System/skills/<name>.md (global)
|
|
10
|
+
* 2. <work_dir>/.clementine/skills/<name>.md (per-project)
|
|
11
|
+
*
|
|
12
|
+
* Per-project files win on name collision — they override global skills
|
|
13
|
+
* for that project. The dashboard surfaces both pools and tags each
|
|
14
|
+
* skill with its scope so the user can see which one will resolve.
|
|
15
|
+
*
|
|
16
|
+
* Schema detection: a file is `v1` when its frontmatter declares any of
|
|
17
|
+
* inputs / tools.allow / tools.deny / dataSources / stateKeys / success.
|
|
18
|
+
* Otherwise (only legacy fields like title / triggers / toolsUsed) it's
|
|
19
|
+
* `legacy` and the dashboard shows a migration badge.
|
|
20
|
+
*
|
|
21
|
+
* Used-by join: Phase A reads the `skills:` array on CronJobDefinition
|
|
22
|
+
* (the existing field) to populate Skill.usedByTriggers. Phase C will
|
|
23
|
+
* extend this to read the new top-level `skill:` field on the trigger.
|
|
24
|
+
*/
|
|
25
|
+
import type { Skill, SkillScope, CronJobDefinition } from '../types.js';
|
|
26
|
+
interface ParseResult {
|
|
27
|
+
skill: Skill;
|
|
28
|
+
/** Set when the file existed but couldn't be parsed (bad YAML, etc.).
|
|
29
|
+
* We still surface the file with a fallback frontmatter so the user
|
|
30
|
+
* can see which one needs fixing. */
|
|
31
|
+
parseError?: string;
|
|
32
|
+
}
|
|
33
|
+
/** Parse a single skill file. Returns a Skill record even when the
|
|
34
|
+
* frontmatter is malformed — the dashboard renders the parse error
|
|
35
|
+
* in-pane so the user can fix it without leaving the UI. */
|
|
36
|
+
export declare function parseSkillFile(filePath: string, scope: SkillScope): ParseResult;
|
|
37
|
+
export interface ListSkillsOptions {
|
|
38
|
+
/** Optional per-project work_dir to also scan. Per-project skills
|
|
39
|
+
* override global skills with the same filename. */
|
|
40
|
+
projectWorkDir?: string;
|
|
41
|
+
/** Optional cron jobs list — when provided, the loader populates the
|
|
42
|
+
* usedByTriggers field on each skill via the existing skills[] array
|
|
43
|
+
* on CronJobDefinition (Phase A's join). */
|
|
44
|
+
jobs?: CronJobDefinition[];
|
|
45
|
+
}
|
|
46
|
+
/** Top-level discovery API. Returns the merged list of skills across
|
|
47
|
+
* global + per-project pools, with per-project taking precedence on
|
|
48
|
+
* name collision. usedByTriggers is populated when jobs are passed in. */
|
|
49
|
+
export declare function listSkills(opts?: ListSkillsOptions): Skill[];
|
|
50
|
+
/** Get a single skill by name, with the same global/project precedence
|
|
51
|
+
* as listSkills. Returns null if neither pool has the skill. */
|
|
52
|
+
export declare function getSkill(name: string, opts?: ListSkillsOptions): Skill | null;
|
|
53
|
+
/** Test-only: where the loader looked. Useful in unit tests + the
|
|
54
|
+
* dashboard's diagnostics surface. */
|
|
55
|
+
export declare function _skillDirsForDiagnostics(workDir?: string): {
|
|
56
|
+
global: string;
|
|
57
|
+
project: string | null;
|
|
58
|
+
};
|
|
59
|
+
export {};
|
|
60
|
+
//# sourceMappingURL=skill-store.d.ts.map
|
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Skill store — Phase A (read-only) of the Skills-First redesign.
|
|
3
|
+
*
|
|
4
|
+
* Discovers skill .md files from two locations and parses their
|
|
5
|
+
* frontmatter into the Skill type. Phase A surfaces what's already on
|
|
6
|
+
* disk; Phase B adds editing + testing; Phase C wires runtime invocation.
|
|
7
|
+
*
|
|
8
|
+
* Discovery order:
|
|
9
|
+
* 1. ~/.clementine/vault/00-System/skills/<name>.md (global)
|
|
10
|
+
* 2. <work_dir>/.clementine/skills/<name>.md (per-project)
|
|
11
|
+
*
|
|
12
|
+
* Per-project files win on name collision — they override global skills
|
|
13
|
+
* for that project. The dashboard surfaces both pools and tags each
|
|
14
|
+
* skill with its scope so the user can see which one will resolve.
|
|
15
|
+
*
|
|
16
|
+
* Schema detection: a file is `v1` when its frontmatter declares any of
|
|
17
|
+
* inputs / tools.allow / tools.deny / dataSources / stateKeys / success.
|
|
18
|
+
* Otherwise (only legacy fields like title / triggers / toolsUsed) it's
|
|
19
|
+
* `legacy` and the dashboard shows a migration badge.
|
|
20
|
+
*
|
|
21
|
+
* Used-by join: Phase A reads the `skills:` array on CronJobDefinition
|
|
22
|
+
* (the existing field) to populate Skill.usedByTriggers. Phase C will
|
|
23
|
+
* extend this to read the new top-level `skill:` field on the trigger.
|
|
24
|
+
*/
|
|
25
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
|
|
26
|
+
import os from 'node:os';
|
|
27
|
+
import path from 'node:path';
|
|
28
|
+
import matter from 'gray-matter';
|
|
29
|
+
/** Resolve the global skills directory from CLEMENTINE_HOME (or default). */
|
|
30
|
+
function globalSkillsDir() {
|
|
31
|
+
const base = process.env.CLEMENTINE_HOME || path.join(os.homedir(), '.clementine');
|
|
32
|
+
return path.join(base, 'vault', '00-System', 'skills');
|
|
33
|
+
}
|
|
34
|
+
/** Resolve a per-project skills directory. Returns null if work_dir is
|
|
35
|
+
* empty or doesn't have a .clementine/skills/ child. */
|
|
36
|
+
function projectSkillsDir(workDir) {
|
|
37
|
+
if (!workDir)
|
|
38
|
+
return null;
|
|
39
|
+
const dir = path.join(workDir, '.clementine', 'skills');
|
|
40
|
+
return existsSync(dir) ? dir : null;
|
|
41
|
+
}
|
|
42
|
+
/** Strip backup files (.bak), hidden files, and directories. */
|
|
43
|
+
function isSkillFile(name) {
|
|
44
|
+
if (name.startsWith('.'))
|
|
45
|
+
return false;
|
|
46
|
+
if (!name.endsWith('.md'))
|
|
47
|
+
return false;
|
|
48
|
+
if (name.endsWith('.bak'))
|
|
49
|
+
return false;
|
|
50
|
+
if (name.endsWith('.bak.md'))
|
|
51
|
+
return false;
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
54
|
+
/** Skill name is the filename without extension. We don't trust the
|
|
55
|
+
* frontmatter's `name:` field as the canonical identifier because
|
|
56
|
+
* different files could collide on it; the filename is what the loader
|
|
57
|
+
* joins on. The frontmatter `name:` is preserved as a display alias. */
|
|
58
|
+
function nameFromFile(file) {
|
|
59
|
+
return path.basename(file, '.md');
|
|
60
|
+
}
|
|
61
|
+
/** Detect whether a frontmatter object uses the v1 schema or the
|
|
62
|
+
* pre-redesign legacy shape. Phase A renders this as a badge so users
|
|
63
|
+
* can see which skills need migration in Phase B. */
|
|
64
|
+
function detectSchemaVersion(fm) {
|
|
65
|
+
const v1Markers = ['inputs', 'dataSources', 'stateKeys', 'success', 'limits'];
|
|
66
|
+
if (v1Markers.some((k) => k in fm))
|
|
67
|
+
return 'v1';
|
|
68
|
+
const tools = fm.tools;
|
|
69
|
+
if (tools && (Array.isArray(tools.allow) || Array.isArray(tools.deny)))
|
|
70
|
+
return 'v1';
|
|
71
|
+
return 'legacy';
|
|
72
|
+
}
|
|
73
|
+
/** Coerce a parsed YAML object into the SkillFrontmatter shape. We
|
|
74
|
+
* accept both the v1 fields and the legacy fields side-by-side; the
|
|
75
|
+
* caller's schemaVersion check tells the dashboard which is which. */
|
|
76
|
+
function coerceFrontmatter(raw, fileBasename) {
|
|
77
|
+
const fm = {
|
|
78
|
+
// Identifier — ALWAYS the filename (without .md). The frontmatter's
|
|
79
|
+
// `name:` field is intentionally ignored to avoid two skills colliding
|
|
80
|
+
// on it. Users wanting a friendly display string can set `title:`
|
|
81
|
+
// instead, which Phase B's editor surfaces as the heading.
|
|
82
|
+
name: fileBasename,
|
|
83
|
+
};
|
|
84
|
+
if (typeof raw.description === 'string')
|
|
85
|
+
fm.description = raw.description;
|
|
86
|
+
// v1 inputs — JSON Schema map keyed by field name.
|
|
87
|
+
if (raw.inputs && typeof raw.inputs === 'object' && !Array.isArray(raw.inputs)) {
|
|
88
|
+
fm.inputs = raw.inputs;
|
|
89
|
+
}
|
|
90
|
+
// tools.allow / tools.deny
|
|
91
|
+
if (raw.tools && typeof raw.tools === 'object' && !Array.isArray(raw.tools)) {
|
|
92
|
+
const t = raw.tools;
|
|
93
|
+
const policy = {};
|
|
94
|
+
if (Array.isArray(t.allow))
|
|
95
|
+
policy.allow = t.allow.map(String);
|
|
96
|
+
if (Array.isArray(t.deny))
|
|
97
|
+
policy.deny = t.deny.map(String);
|
|
98
|
+
if (policy.allow || policy.deny)
|
|
99
|
+
fm.tools = policy;
|
|
100
|
+
}
|
|
101
|
+
if (Array.isArray(raw.dataSources)) {
|
|
102
|
+
fm.dataSources = raw.dataSources
|
|
103
|
+
.filter((d) => !!d && typeof d === 'object')
|
|
104
|
+
.map((d) => ({
|
|
105
|
+
kind: String(d.kind || 'unknown'),
|
|
106
|
+
purpose: String(d.purpose || ''),
|
|
107
|
+
}));
|
|
108
|
+
}
|
|
109
|
+
if (Array.isArray(raw.stateKeys))
|
|
110
|
+
fm.stateKeys = raw.stateKeys.map(String);
|
|
111
|
+
if (raw.success && typeof raw.success === 'object' && !Array.isArray(raw.success)) {
|
|
112
|
+
const s = raw.success;
|
|
113
|
+
const success = {};
|
|
114
|
+
if (s.schema && typeof s.schema === 'object')
|
|
115
|
+
success.schema = s.schema;
|
|
116
|
+
if (typeof s.criterion === 'string')
|
|
117
|
+
success.criterion = s.criterion;
|
|
118
|
+
if (success.schema || success.criterion)
|
|
119
|
+
fm.success = success;
|
|
120
|
+
}
|
|
121
|
+
if (raw.limits && typeof raw.limits === 'object' && !Array.isArray(raw.limits)) {
|
|
122
|
+
const l = raw.limits;
|
|
123
|
+
const limits = {};
|
|
124
|
+
if (typeof l.maxTurns === 'number')
|
|
125
|
+
limits.maxTurns = l.maxTurns;
|
|
126
|
+
if (typeof l.maxBudgetUsd === 'number')
|
|
127
|
+
limits.maxBudgetUsd = l.maxBudgetUsd;
|
|
128
|
+
if (typeof l.timeoutSeconds === 'number')
|
|
129
|
+
limits.timeoutSeconds = l.timeoutSeconds;
|
|
130
|
+
if (Object.keys(limits).length > 0)
|
|
131
|
+
fm.limits = limits;
|
|
132
|
+
}
|
|
133
|
+
if (typeof raw.version === 'number')
|
|
134
|
+
fm.version = raw.version;
|
|
135
|
+
if (typeof raw.createdAt === 'string')
|
|
136
|
+
fm.createdAt = raw.createdAt;
|
|
137
|
+
if (typeof raw.updatedAt === 'string')
|
|
138
|
+
fm.updatedAt = raw.updatedAt;
|
|
139
|
+
if (typeof raw.lastUsed === 'string')
|
|
140
|
+
fm.lastUsed = raw.lastUsed;
|
|
141
|
+
if (typeof raw.lastTestPass === 'string')
|
|
142
|
+
fm.lastTestPass = raw.lastTestPass;
|
|
143
|
+
// Legacy fields (preserved as-is for the migration UI).
|
|
144
|
+
if (typeof raw.title === 'string')
|
|
145
|
+
fm.title = raw.title;
|
|
146
|
+
if (Array.isArray(raw.triggers))
|
|
147
|
+
fm.triggers = raw.triggers.map(String);
|
|
148
|
+
if (typeof raw.source === 'string')
|
|
149
|
+
fm.source = raw.source;
|
|
150
|
+
if (Array.isArray(raw.toolsUsed))
|
|
151
|
+
fm.toolsUsed = raw.toolsUsed.map(String);
|
|
152
|
+
if (typeof raw.useCount === 'number')
|
|
153
|
+
fm.useCount = raw.useCount;
|
|
154
|
+
return fm;
|
|
155
|
+
}
|
|
156
|
+
/** Parse a single skill file. Returns a Skill record even when the
|
|
157
|
+
* frontmatter is malformed — the dashboard renders the parse error
|
|
158
|
+
* in-pane so the user can fix it without leaving the UI. */
|
|
159
|
+
export function parseSkillFile(filePath, scope) {
|
|
160
|
+
const basename = nameFromFile(filePath);
|
|
161
|
+
let raw;
|
|
162
|
+
try {
|
|
163
|
+
raw = readFileSync(filePath, 'utf-8');
|
|
164
|
+
}
|
|
165
|
+
catch (err) {
|
|
166
|
+
return {
|
|
167
|
+
skill: emptySkill(filePath, basename, scope),
|
|
168
|
+
parseError: 'failed to read: ' + String(err),
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
let parsed;
|
|
172
|
+
try {
|
|
173
|
+
parsed = matter(raw);
|
|
174
|
+
}
|
|
175
|
+
catch (err) {
|
|
176
|
+
return {
|
|
177
|
+
skill: { ...emptySkill(filePath, basename, scope), body: raw },
|
|
178
|
+
parseError: 'YAML parse error: ' + String(err),
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
const data = parsed.data;
|
|
182
|
+
const fm = coerceFrontmatter(data, basename);
|
|
183
|
+
const schemaVersion = detectSchemaVersion(data);
|
|
184
|
+
return {
|
|
185
|
+
skill: {
|
|
186
|
+
frontmatter: fm,
|
|
187
|
+
body: parsed.content || '',
|
|
188
|
+
filePath,
|
|
189
|
+
scope,
|
|
190
|
+
schemaVersion,
|
|
191
|
+
usedByTriggers: [],
|
|
192
|
+
},
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
function emptySkill(filePath, basename, scope) {
|
|
196
|
+
return {
|
|
197
|
+
frontmatter: { name: basename },
|
|
198
|
+
body: '',
|
|
199
|
+
filePath,
|
|
200
|
+
scope,
|
|
201
|
+
schemaVersion: 'legacy',
|
|
202
|
+
usedByTriggers: [],
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
/** List skills in a directory, returning Skill records (not just paths)
|
|
206
|
+
* so callers can immediately render them. Tolerates missing dirs and
|
|
207
|
+
* unreadable files — best-effort. */
|
|
208
|
+
function listSkillsInDir(dir, scope) {
|
|
209
|
+
if (!existsSync(dir))
|
|
210
|
+
return [];
|
|
211
|
+
let entries;
|
|
212
|
+
try {
|
|
213
|
+
entries = readdirSync(dir);
|
|
214
|
+
}
|
|
215
|
+
catch {
|
|
216
|
+
return [];
|
|
217
|
+
}
|
|
218
|
+
const out = [];
|
|
219
|
+
for (const entry of entries) {
|
|
220
|
+
if (!isSkillFile(entry))
|
|
221
|
+
continue;
|
|
222
|
+
const fullPath = path.join(dir, entry);
|
|
223
|
+
try {
|
|
224
|
+
const stat = statSync(fullPath);
|
|
225
|
+
if (!stat.isFile())
|
|
226
|
+
continue;
|
|
227
|
+
}
|
|
228
|
+
catch {
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
out.push(parseSkillFile(fullPath, scope).skill);
|
|
232
|
+
}
|
|
233
|
+
return out;
|
|
234
|
+
}
|
|
235
|
+
/** Top-level discovery API. Returns the merged list of skills across
|
|
236
|
+
* global + per-project pools, with per-project taking precedence on
|
|
237
|
+
* name collision. usedByTriggers is populated when jobs are passed in. */
|
|
238
|
+
export function listSkills(opts = {}) {
|
|
239
|
+
const globalSkills = listSkillsInDir(globalSkillsDir(), 'global');
|
|
240
|
+
const projectSkills = opts.projectWorkDir
|
|
241
|
+
? (() => {
|
|
242
|
+
const pdir = projectSkillsDir(opts.projectWorkDir);
|
|
243
|
+
return pdir ? listSkillsInDir(pdir, 'project') : [];
|
|
244
|
+
})()
|
|
245
|
+
: [];
|
|
246
|
+
// Build a map keyed by basename so per-project entries override global.
|
|
247
|
+
const merged = new Map();
|
|
248
|
+
for (const s of globalSkills)
|
|
249
|
+
merged.set(s.frontmatter.name, s);
|
|
250
|
+
for (const s of projectSkills)
|
|
251
|
+
merged.set(s.frontmatter.name, s);
|
|
252
|
+
// Used-by join from cron jobs' skills[] array. Same skill referenced by
|
|
253
|
+
// multiple jobs accumulates them in order.
|
|
254
|
+
if (opts.jobs && opts.jobs.length > 0) {
|
|
255
|
+
for (const job of opts.jobs) {
|
|
256
|
+
if (!Array.isArray(job.skills))
|
|
257
|
+
continue;
|
|
258
|
+
for (const skillName of job.skills) {
|
|
259
|
+
const s = merged.get(skillName);
|
|
260
|
+
if (s)
|
|
261
|
+
s.usedByTriggers.push(job.name);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
// Sorted alphabetically — predictable rendering, no need for the
|
|
266
|
+
// dashboard to re-sort. Per-project always sorts at the same key as
|
|
267
|
+
// the global version it replaced.
|
|
268
|
+
return [...merged.values()].sort((a, b) => a.frontmatter.name.localeCompare(b.frontmatter.name));
|
|
269
|
+
}
|
|
270
|
+
/** Get a single skill by name, with the same global/project precedence
|
|
271
|
+
* as listSkills. Returns null if neither pool has the skill. */
|
|
272
|
+
export function getSkill(name, opts = {}) {
|
|
273
|
+
// Per-project first (precedence).
|
|
274
|
+
if (opts.projectWorkDir) {
|
|
275
|
+
const pdir = projectSkillsDir(opts.projectWorkDir);
|
|
276
|
+
if (pdir) {
|
|
277
|
+
const candidate = path.join(pdir, name + '.md');
|
|
278
|
+
if (existsSync(candidate)) {
|
|
279
|
+
const result = parseSkillFile(candidate, 'project');
|
|
280
|
+
if (opts.jobs)
|
|
281
|
+
result.skill.usedByTriggers = jobsUsing(name, opts.jobs);
|
|
282
|
+
return result.skill;
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
// Global fallback.
|
|
287
|
+
const candidate = path.join(globalSkillsDir(), name + '.md');
|
|
288
|
+
if (existsSync(candidate)) {
|
|
289
|
+
const result = parseSkillFile(candidate, 'global');
|
|
290
|
+
if (opts.jobs)
|
|
291
|
+
result.skill.usedByTriggers = jobsUsing(name, opts.jobs);
|
|
292
|
+
return result.skill;
|
|
293
|
+
}
|
|
294
|
+
return null;
|
|
295
|
+
}
|
|
296
|
+
/** Internal helper for the used-by join. */
|
|
297
|
+
function jobsUsing(skillName, jobs) {
|
|
298
|
+
const out = [];
|
|
299
|
+
for (const job of jobs) {
|
|
300
|
+
if (Array.isArray(job.skills) && job.skills.includes(skillName))
|
|
301
|
+
out.push(job.name);
|
|
302
|
+
}
|
|
303
|
+
return out;
|
|
304
|
+
}
|
|
305
|
+
/** Test-only: where the loader looked. Useful in unit tests + the
|
|
306
|
+
* dashboard's diagnostics surface. */
|
|
307
|
+
export function _skillDirsForDiagnostics(workDir) {
|
|
308
|
+
return {
|
|
309
|
+
global: globalSkillsDir(),
|
|
310
|
+
project: projectSkillsDir(workDir) ?? null,
|
|
311
|
+
};
|
|
312
|
+
}
|
|
313
|
+
//# sourceMappingURL=skill-store.js.map
|