@elmundi/ship-cli 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -0
- package/bin/ship.mjs +62 -0
- package/lib/commands/docs.mjs +114 -0
- package/lib/commands/help.mjs +59 -0
- package/lib/commands/init.mjs +223 -0
- package/lib/commands/manifest-catalog.mjs +157 -0
- package/lib/commands/patterns.mjs +170 -0
- package/lib/config.mjs +50 -0
- package/lib/detect.mjs +59 -0
- package/lib/find-ship-root.mjs +69 -0
- package/lib/http.mjs +46 -0
- package/lib/templates.mjs +113 -0
- package/package.json +36 -0
package/README.md
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# @elmundi/ship-cli
|
|
2
|
+
|
|
3
|
+
Command-line entry to the Ship methodology: **one HTTP API** (FastAPI) for **search, fetch, feedback, patterns, tools, workflows, collections** — or read catalogs from disk inside a Ship clone / `SHIP_REPO` — plus **`ship init`** to inject API usage into agent configs.
|
|
4
|
+
|
|
5
|
+
Published under the npm org **[elmundi](https://www.npmjs.com/org/elmundi)**; the binary name remains **`ship`**.
|
|
6
|
+
|
|
7
|
+
## Requirements
|
|
8
|
+
|
|
9
|
+
- **Node.js 20+** (matches Ship CI and typical adopters).
|
|
10
|
+
|
|
11
|
+
## Install
|
|
12
|
+
|
|
13
|
+
After the package is [published to npm](https://www.npmjs.com/package/@elmundi/ship-cli):
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
npm install -g @elmundi/ship-cli
|
|
17
|
+
# or, without a global install:
|
|
18
|
+
npx @elmundi/ship-cli help
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
From a full **Ship** monorepo clone you can still run `npm run ship -- …` from the repo root (workspace).
|
|
22
|
+
|
|
23
|
+
## Adopt without cloning the whole monorepo
|
|
24
|
+
|
|
25
|
+
1. Install the CLI (`npm i -g @elmundi/ship-cli` or use `npx @elmundi/ship-cli`).
|
|
26
|
+
2. From **any** directory, point **`SHIP_API_BASE`** at the **deployed methodology API** and list patterns or catalogs (same server as search):
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
SHIP_API_BASE=https://your-ship-api.example.com npx @elmundi/ship-cli patterns list
|
|
30
|
+
SHIP_API_BASE=https://your-ship-api.example.com npx @elmundi/ship-cli tools list
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
3. Optional: work from a **local** Ship checkout (or **`SHIP_REPO`**) to read manifests from disk without calling the API.
|
|
34
|
+
|
|
35
|
+
4. In your **product** repository, wire agents to the methodology API:
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
cd /path/to/your-product
|
|
39
|
+
npx @elmundi/ship-cli init --yes
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
Use **`--dry-run`** first to preview; **`--yes`** skips prompts and writes files — see `ship init help`.
|
|
43
|
+
|
|
44
|
+
## Which commands need what
|
|
45
|
+
|
|
46
|
+
| Command | Needs |
|
|
47
|
+
|--------|--------|
|
|
48
|
+
| `ship patterns …`, `ship tools …`, `ship workflows …`, `ship collections …` | Same **`SHIP_API_BASE`** as docs when not on disk. **Local:** cwd inside Ship or **`SHIP_REPO`**. |
|
|
49
|
+
| `ship docs search|fetch|feedback` | **`SHIP_API_BASE`** (default `http://127.0.0.1:8100`) or `--base-url`. |
|
|
50
|
+
| `ship init` | Target repo cwd; **`SHIP_API_BASE` / `--base-url`** is the API URL written into snippets. |
|
|
51
|
+
|
|
52
|
+
## Publishing (maintainers)
|
|
53
|
+
|
|
54
|
+
Releases are published via GitHub Actions (**Publish @elmundi/ship-cli to npm**): `npm publish -w @elmundi/ship-cli` from the monorepo root (not `npm publish --prefix cli`, which would try to publish the private root package). Configure the **`NPM_TOKEN`** repository secret. On npmjs.com use either a **Granular Access Token** with **Publish** on **`@elmundi/ship-cli`** (or the **elmundi** org) and **“Bypass two-factor authentication”** enabled for automation, or a classic **Automation** token — classic **Publish** tokens often cannot publish from CI when 2FA is on (`E403` *Two-factor authentication or granular access token with bypass 2fa…*).
|
|
55
|
+
|
|
56
|
+
The root monorepo `package.json` stays **`private`: true**; only **`@elmundi/ship-cli`** is intended for the public registry.
|
|
57
|
+
|
|
58
|
+
## Semver
|
|
59
|
+
|
|
60
|
+
Package version lives in **`cli/package.json`**. Bump it for each npm release following [semver](https://semver.org/).
|
package/bin/ship.mjs
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { extractGlobalArgv } from "../lib/config.mjs";
|
|
3
|
+
import { docsCommand } from "../lib/commands/docs.mjs";
|
|
4
|
+
import { patternsCommand } from "../lib/commands/patterns.mjs";
|
|
5
|
+
import { manifestCatalogCommand } from "../lib/commands/manifest-catalog.mjs";
|
|
6
|
+
import { printHelp } from "../lib/commands/help.mjs";
|
|
7
|
+
import { initCommand } from "../lib/commands/init.mjs";
|
|
8
|
+
|
|
9
|
+
const raw = process.argv.slice(2);
|
|
10
|
+
const { _, ...g } = extractGlobalArgv(raw);
|
|
11
|
+
const ctx = {
|
|
12
|
+
baseUrl: g.baseUrl,
|
|
13
|
+
json: g.json,
|
|
14
|
+
yes: g.yes,
|
|
15
|
+
force: g.force,
|
|
16
|
+
dryRun: g.dryRun,
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
const [cmd, ...rest] = _;
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
if (!cmd || cmd === "help" || cmd === "-h" || cmd === "--help") {
|
|
23
|
+
printHelp();
|
|
24
|
+
process.exit(0);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (cmd === "docs") {
|
|
28
|
+
await docsCommand(ctx, rest);
|
|
29
|
+
process.exit(0);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (cmd === "patterns") {
|
|
33
|
+
await patternsCommand(ctx, rest);
|
|
34
|
+
process.exit(0);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
if (cmd === "tools") {
|
|
38
|
+
await manifestCatalogCommand("tools", ctx, rest);
|
|
39
|
+
process.exit(0);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (cmd === "workflows") {
|
|
43
|
+
await manifestCatalogCommand("workflows", ctx, rest);
|
|
44
|
+
process.exit(0);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (cmd === "collections") {
|
|
48
|
+
await manifestCatalogCommand("collections", ctx, rest);
|
|
49
|
+
process.exit(0);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (cmd === "init") {
|
|
53
|
+
await initCommand(ctx, rest);
|
|
54
|
+
process.exit(0);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
console.error(`Unknown command: ${cmd}\nRun: ship help`);
|
|
58
|
+
process.exit(1);
|
|
59
|
+
} catch (err) {
|
|
60
|
+
console.error(err instanceof Error ? err.message : err);
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { apiPost } from "../http.mjs";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @param {{ baseUrl: string; json: boolean }} ctx
|
|
5
|
+
* @param {string[]} args
|
|
6
|
+
*/
|
|
7
|
+
export async function docsCommand(ctx, args) {
|
|
8
|
+
const [sub, ...rest] = args;
|
|
9
|
+
if (!sub || sub === "help") {
|
|
10
|
+
console.log(`Usage:
|
|
11
|
+
ship docs search <query> [--top-k 8]
|
|
12
|
+
ship docs fetch <repo-relative-path>
|
|
13
|
+
ship docs feedback --title "..." --summary "..." [--recommendation "line"]... [--source-context "..."]
|
|
14
|
+
|
|
15
|
+
Global flags: --base-url URL --json`);
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (sub === "search") {
|
|
20
|
+
const qParts = [];
|
|
21
|
+
let topK = 8;
|
|
22
|
+
for (let i = 0; i < rest.length; i++) {
|
|
23
|
+
const a = rest[i];
|
|
24
|
+
if (a === "--top-k" && rest[i + 1]) {
|
|
25
|
+
topK = Number(rest[++i]);
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
qParts.push(a);
|
|
29
|
+
}
|
|
30
|
+
const query = qParts.join(" ").trim();
|
|
31
|
+
if (query.length < 3) {
|
|
32
|
+
console.error("search: query must be at least 3 characters.");
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
const data = await apiPost(ctx.baseUrl, "/search", { query, top_k: topK });
|
|
36
|
+
if (ctx.json) console.log(JSON.stringify(data, null, 2));
|
|
37
|
+
else {
|
|
38
|
+
console.log(`Query: ${data.query}\n`);
|
|
39
|
+
for (const r of data.results || []) {
|
|
40
|
+
console.log(`- ${r.path} (chunk ${r.chunk_index}, distance ${r.distance ?? "n/a"})`);
|
|
41
|
+
console.log(` ${r.snippet}\n`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (sub === "fetch") {
|
|
48
|
+
const p = rest.join(" ").trim();
|
|
49
|
+
if (!p) {
|
|
50
|
+
console.error("fetch: path required.");
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
const data = await apiPost(ctx.baseUrl, "/fetch", { path: p });
|
|
54
|
+
if (ctx.json) console.log(JSON.stringify(data, null, 2));
|
|
55
|
+
else {
|
|
56
|
+
console.log(`# ${data.path}\n`);
|
|
57
|
+
console.log(data.content);
|
|
58
|
+
}
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if (sub === "feedback") {
|
|
63
|
+
const opts = parseFeedbackArgs(rest);
|
|
64
|
+
if (opts.title.length < 5 || opts.summary.length < 10) {
|
|
65
|
+
console.error("feedback: --title (min 5) and --summary (min 10) are required.");
|
|
66
|
+
process.exit(1);
|
|
67
|
+
}
|
|
68
|
+
const body = {
|
|
69
|
+
title: opts.title,
|
|
70
|
+
summary: opts.summary,
|
|
71
|
+
recommendations: opts.recommendations,
|
|
72
|
+
source_context: opts.sourceContext || null,
|
|
73
|
+
};
|
|
74
|
+
const data = await apiPost(ctx.baseUrl, "/feedback", body);
|
|
75
|
+
if (ctx.json) console.log(JSON.stringify(data, null, 2));
|
|
76
|
+
else {
|
|
77
|
+
console.log(`Created: ${data.issue_url} (#${data.issue_number})`);
|
|
78
|
+
if (data.redactions_applied) console.log(`Redactions applied: ${data.redactions_applied}`);
|
|
79
|
+
}
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
console.error(`Unknown docs subcommand: ${sub}`);
|
|
84
|
+
process.exit(1);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/** @param {string[]} rest */
|
|
88
|
+
function parseFeedbackArgs(rest) {
|
|
89
|
+
let title = "";
|
|
90
|
+
let summary = "";
|
|
91
|
+
/** @type {string[]} */
|
|
92
|
+
const recommendations = [];
|
|
93
|
+
let sourceContext = "";
|
|
94
|
+
for (let i = 0; i < rest.length; i++) {
|
|
95
|
+
const a = rest[i];
|
|
96
|
+
if (a === "--title" && rest[i + 1]) {
|
|
97
|
+
title = rest[++i];
|
|
98
|
+
continue;
|
|
99
|
+
}
|
|
100
|
+
if (a === "--summary" && rest[i + 1]) {
|
|
101
|
+
summary = rest[++i];
|
|
102
|
+
continue;
|
|
103
|
+
}
|
|
104
|
+
if (a === "--recommendation" && rest[i + 1]) {
|
|
105
|
+
recommendations.push(rest[++i]);
|
|
106
|
+
continue;
|
|
107
|
+
}
|
|
108
|
+
if (a === "--source-context" && rest[i + 1]) {
|
|
109
|
+
sourceContext = rest[++i];
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return { title, summary, recommendations, sourceContext };
|
|
114
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
export function printHelp() {
|
|
2
|
+
console.log(`Ship CLI — Ship methodology HTTP API (search, fetch, feedback, patterns, tools, workflows, collections) + init.
|
|
3
|
+
|
|
4
|
+
USAGE
|
|
5
|
+
ship help
|
|
6
|
+
ship docs search <query> [--top-k N]
|
|
7
|
+
ship docs fetch <path>
|
|
8
|
+
ship docs feedback --title "..." --summary "..." [--recommendation "…"]... [--source-context "…"]
|
|
9
|
+
ship patterns list
|
|
10
|
+
ship patterns show <pattern-id>
|
|
11
|
+
ship tools list | ship tools show <id>
|
|
12
|
+
ship workflows list | ship workflows show <id>
|
|
13
|
+
ship collections list | ship collections show <id>
|
|
14
|
+
ship init [--yes] [--force] [--dry-run] [--only <id>] [--cwd <dir>]
|
|
15
|
+
|
|
16
|
+
GLOBAL FLAGS
|
|
17
|
+
--base-url URL API root for ALL HTTP commands (default: SHIP_API_BASE or http://127.0.0.1:8100)
|
|
18
|
+
--json Machine-readable JSON to stdout
|
|
19
|
+
|
|
20
|
+
CATALOG COMMANDS (patterns, tools, workflows, collections)
|
|
21
|
+
If cwd or SHIP_REPO points at a Ship clone: read manifests from disk.
|
|
22
|
+
Otherwise: same base URL as docs — GET /patterns, /tools, /workflows, /collections (and /{id} for show).
|
|
23
|
+
|
|
24
|
+
INIT FLAGS
|
|
25
|
+
--yes Skip confirmation prompts (non-interactive; writes files — review plan with --dry-run first)
|
|
26
|
+
--force Overwrite / replace existing ship-cli blocks
|
|
27
|
+
--dry-run Print actions only
|
|
28
|
+
--only <id> Limit to one target: cursor | agents-md | claude-md | codex | copilot
|
|
29
|
+
--cwd <dir> Repository root (default: current directory)
|
|
30
|
+
|
|
31
|
+
BACKEND
|
|
32
|
+
Start from Ship repo: uvicorn backend.app.main:app --reload --host 127.0.0.1 --port 8100
|
|
33
|
+
Env on server: OPENAI_API_KEY (/search), GITHUB_TOKEN (/feedback)
|
|
34
|
+
|
|
35
|
+
────────────────────────────────────────────────────────
|
|
36
|
+
Embedding in an agent (Cursor, Codex, Claude Code, etc.)
|
|
37
|
+
────────────────────────────────────────────────────────
|
|
38
|
+
|
|
39
|
+
1. Run the Ship backend locally (or deploy it) and set SHIP_API_BASE in the agent environment
|
|
40
|
+
if the URL is not the default http://127.0.0.1:8100 .
|
|
41
|
+
|
|
42
|
+
2. Teach the agent this loop (or mirror it with curl):
|
|
43
|
+
- POST /search with the user question → pick 1–3 paths from results (CLI: ship docs search)
|
|
44
|
+
- POST /fetch for each chosen path → ground answers in those files (CLI: ship docs fetch)
|
|
45
|
+
- Optionally list/show patterns (CLI: ship patterns list | ship patterns show <id> — GET /patterns on the same API, or disk in a Ship clone / SHIP_REPO)
|
|
46
|
+
- POST /feedback only for retro-style notes (no secrets in free text) (CLI: ship docs feedback)
|
|
47
|
+
|
|
48
|
+
3. Run ship init in the TARGET repository (your product repo, not necessarily Ship).
|
|
49
|
+
It detects .cursor/, AGENTS.md, CLAUDE.md, .codex/, or Copilot instructions and, after
|
|
50
|
+
your confirmation, drops a focused rule or appends a markdown section the agent can read.
|
|
51
|
+
|
|
52
|
+
4. List patterns/tools/workflows/collections via the same API (or from disk in a clone / SHIP_REPO): ship patterns list , ship tools list , etc.
|
|
53
|
+
|
|
54
|
+
5. From CI or headless agents, call the same HTTP API with curl or fetch; use ship … --json
|
|
55
|
+
for stable parsing.
|
|
56
|
+
|
|
57
|
+
For full request/response schemas see documentation/tools/backend-api.md in the Ship repo.
|
|
58
|
+
`);
|
|
59
|
+
}
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import readline from "node:readline/promises";
|
|
4
|
+
import { stdin as input, stdout as output } from "node:process";
|
|
5
|
+
import { detectAgentTargets } from "../detect.mjs";
|
|
6
|
+
import { MARKER, cursorRuleMdc, markdownSection, standaloneDoc } from "../templates.mjs";
|
|
7
|
+
|
|
8
|
+
const END_MARKER = "<!-- ship-cli:end methodology-api -->";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @param {{ baseUrl: string; yes: boolean; force: boolean; dryRun: boolean; json: boolean }} ctx
|
|
12
|
+
* @param {string[]} args
|
|
13
|
+
*/
|
|
14
|
+
export async function initCommand(ctx, args) {
|
|
15
|
+
if (!args.length || args[0] === "help" || args[0] === "-h" || args[0] === "--help") {
|
|
16
|
+
console.log(`Usage:
|
|
17
|
+
ship init [--yes] [--force] [--dry-run] [--only <id>] [--cwd <dir>]
|
|
18
|
+
|
|
19
|
+
Writes Cursor rules and/or markdown sections that point agents at the Ship methodology API
|
|
20
|
+
(base URL from SHIP_API_BASE or --base-url, default http://127.0.0.1:8100).
|
|
21
|
+
|
|
22
|
+
Flags:
|
|
23
|
+
--dry-run Show the plan only (recommended before first use).
|
|
24
|
+
--yes Non-interactive: apply immediately. In CI or scripts there is no prompt;
|
|
25
|
+
combine with --dry-run first if you are unsure. --force replaces existing
|
|
26
|
+
ship-cli blocks; without --force, existing injections are skipped.
|
|
27
|
+
--force Replace existing injected blocks.
|
|
28
|
+
--only cursor | agents-md | claude-md | codex | copilot
|
|
29
|
+
--cwd Target repository root (default: current directory).
|
|
30
|
+
|
|
31
|
+
If stdin is not a TTY and you omit --yes, init exits with an error unless you use --dry-run.`);
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
let cwd = process.cwd();
|
|
36
|
+
/** @type {string[]} */
|
|
37
|
+
let only = [];
|
|
38
|
+
for (let i = 0; i < args.length; i++) {
|
|
39
|
+
const a = args[i];
|
|
40
|
+
if (a === "--cwd" && args[i + 1]) {
|
|
41
|
+
cwd = path.resolve(args[++i]);
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
if (a === "--only" && args[i + 1]) {
|
|
45
|
+
only.push(args[++i]);
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
let targets = detectAgentTargets(cwd);
|
|
51
|
+
if (only.includes("cursor") && !targets.some((t) => t.id === "cursor")) {
|
|
52
|
+
targets.push({
|
|
53
|
+
id: "cursor",
|
|
54
|
+
label: "Cursor (`.cursor/rules/` will be created if missing)",
|
|
55
|
+
paths: [path.join(cwd, ".cursor", "rules", "ship-methodology-api.mdc")],
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
if (only.length) {
|
|
59
|
+
const allowed = new Set(["cursor", "agents-md", "claude-md", "codex", "copilot"]);
|
|
60
|
+
for (const o of only) {
|
|
61
|
+
if (!allowed.has(o)) {
|
|
62
|
+
console.error(`init: unknown --only ${o}. Allowed: ${[...allowed].join(", ")}`);
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
targets = targets.filter((t) => only.includes(t.id));
|
|
67
|
+
if (!targets.length) {
|
|
68
|
+
console.error("init: no agent targets matched --only (markers missing in this repo).");
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/** @type {{ id: string; label: string; action: string }[]} */
|
|
74
|
+
const plan = [];
|
|
75
|
+
|
|
76
|
+
if (targets.some((t) => t.id === "cursor")) {
|
|
77
|
+
const rulesDir = path.join(cwd, ".cursor", "rules");
|
|
78
|
+
const file = path.join(rulesDir, "ship-methodology-api.mdc");
|
|
79
|
+
plan.push({ id: "cursor", label: "Cursor rule", action: `write ${path.relative(cwd, file)}` });
|
|
80
|
+
}
|
|
81
|
+
for (const t of targets) {
|
|
82
|
+
if (t.id === "agents-md") {
|
|
83
|
+
plan.push({ id: "agents-md", label: t.label, action: `append section → ${t.paths[0]}` });
|
|
84
|
+
}
|
|
85
|
+
if (t.id === "claude-md") {
|
|
86
|
+
plan.push({ id: "claude-md", label: t.label, action: `append section → ${t.paths[0]}` });
|
|
87
|
+
}
|
|
88
|
+
if (t.id === "codex") {
|
|
89
|
+
plan.push({ id: "codex", label: t.label, action: `write ${path.relative(cwd, t.paths[0])}` });
|
|
90
|
+
}
|
|
91
|
+
if (t.id === "copilot") {
|
|
92
|
+
plan.push({ id: "copilot", label: t.label, action: `append section → ${t.paths[0]}` });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const standalonePath = path.join(cwd, "SHIP_AGENT_API.md");
|
|
97
|
+
if (!targets.length) {
|
|
98
|
+
plan.push({
|
|
99
|
+
id: "standalone",
|
|
100
|
+
label: "Standalone reference (no agent markers in repo)",
|
|
101
|
+
action: `write ${path.relative(cwd, standalonePath)}`,
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (ctx.json) {
|
|
106
|
+
console.log(JSON.stringify({ cwd, baseUrl: ctx.baseUrl, plan }, null, 2));
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
console.log(`Repository: ${cwd}`);
|
|
111
|
+
console.log(`API base URL in injected docs: ${ctx.baseUrl}\n`);
|
|
112
|
+
console.log("Planned changes:");
|
|
113
|
+
for (const p of plan) console.log(` - [${p.id}] ${p.action}`);
|
|
114
|
+
console.log("");
|
|
115
|
+
|
|
116
|
+
if (ctx.dryRun) {
|
|
117
|
+
console.log("(dry-run: no files written)");
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if (!ctx.yes) {
|
|
122
|
+
if (!input.isTTY || !output.isTTY) {
|
|
123
|
+
console.error("init: not a TTY; re-run with --yes or use --dry-run to preview.");
|
|
124
|
+
process.exit(1);
|
|
125
|
+
}
|
|
126
|
+
const rl = readline.createInterface({ input, output });
|
|
127
|
+
const ans = (await rl.question("Apply these changes? [y/N] ")).trim().toLowerCase();
|
|
128
|
+
rl.close();
|
|
129
|
+
if (ans !== "y" && ans !== "yes") {
|
|
130
|
+
console.log("Aborted.");
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
for (const t of targets) {
|
|
136
|
+
if (t.id === "cursor") {
|
|
137
|
+
await writeCursorRule(cwd, ctx);
|
|
138
|
+
}
|
|
139
|
+
if (t.id === "agents-md") {
|
|
140
|
+
await appendOrWrite(t.paths[0], markdownSection(ctx.baseUrl), ctx);
|
|
141
|
+
}
|
|
142
|
+
if (t.id === "claude-md") {
|
|
143
|
+
await appendOrWrite(t.paths[0], markdownSection(ctx.baseUrl), ctx);
|
|
144
|
+
}
|
|
145
|
+
if (t.id === "codex") {
|
|
146
|
+
await writeNew(t.paths[0], standaloneDoc(ctx.baseUrl), ctx);
|
|
147
|
+
}
|
|
148
|
+
if (t.id === "copilot") {
|
|
149
|
+
await appendOrWrite(t.paths[0], markdownSection(ctx.baseUrl), ctx);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (!targets.length) {
|
|
154
|
+
await writeNew(standalonePath, standaloneDoc(ctx.baseUrl), ctx);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
console.log("Done.");
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* @param {string} cwd
|
|
162
|
+
* @param {{ force: boolean; dryRun: boolean }} ctx
|
|
163
|
+
*/
|
|
164
|
+
async function writeCursorRule(cwd, ctx) {
|
|
165
|
+
const rulesDir = path.join(cwd, ".cursor", "rules");
|
|
166
|
+
const file = path.join(rulesDir, "ship-methodology-api.mdc");
|
|
167
|
+
const body = cursorRuleMdc(ctx.baseUrl);
|
|
168
|
+
if (fs.existsSync(file) && fs.readFileSync(file, "utf8").includes(MARKER) && !ctx.force) {
|
|
169
|
+
console.log(`skip (exists): ${file}`);
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
if (ctx.dryRun) return;
|
|
173
|
+
fs.mkdirSync(rulesDir, { recursive: true });
|
|
174
|
+
fs.writeFileSync(file, body, "utf8");
|
|
175
|
+
console.log(`wrote ${file}`);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* @param {string} filePath
|
|
180
|
+
* @param {string} section
|
|
181
|
+
* @param {{ force: boolean; dryRun: boolean }} ctx
|
|
182
|
+
*/
|
|
183
|
+
async function appendOrWrite(filePath, section, ctx) {
|
|
184
|
+
if (ctx.dryRun) return;
|
|
185
|
+
let prev = "";
|
|
186
|
+
if (fs.existsSync(filePath)) prev = fs.readFileSync(filePath, "utf8");
|
|
187
|
+
if (prev.includes(MARKER) && !ctx.force) {
|
|
188
|
+
console.log(`skip (already injected): ${filePath}`);
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
if (prev.includes(MARKER) && ctx.force) {
|
|
192
|
+
prev = stripInjectedBlock(prev);
|
|
193
|
+
}
|
|
194
|
+
const block = `${section}\n${END_MARKER}\n`;
|
|
195
|
+
const next = prev.replace(/\s+$/, "") + (prev ? "\n" : "") + block;
|
|
196
|
+
fs.writeFileSync(filePath, next, "utf8");
|
|
197
|
+
console.log(`updated ${filePath}`);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* @param {string} filePath
|
|
202
|
+
* @param {string} body
|
|
203
|
+
* @param {{ force: boolean; dryRun: boolean }} ctx
|
|
204
|
+
*/
|
|
205
|
+
async function writeNew(filePath, body, ctx) {
|
|
206
|
+
if (ctx.dryRun) return;
|
|
207
|
+
if (fs.existsSync(filePath) && fs.readFileSync(filePath, "utf8").includes(MARKER) && !ctx.force) {
|
|
208
|
+
console.log(`skip (exists): ${filePath}`);
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
212
|
+
fs.writeFileSync(filePath, body, "utf8");
|
|
213
|
+
console.log(`wrote ${filePath}`);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/** @param {string} prev */
|
|
217
|
+
function stripInjectedBlock(prev) {
|
|
218
|
+
const start = prev.indexOf(MARKER);
|
|
219
|
+
if (start === -1) return prev;
|
|
220
|
+
const end = prev.indexOf(END_MARKER, start);
|
|
221
|
+
if (end === -1) return prev.slice(0, start).replace(/\n{3,}$/, "\n\n");
|
|
222
|
+
return (prev.slice(0, start) + prev.slice(end + END_MARKER.length)).replace(/\n{3,}/g, "\n\n");
|
|
223
|
+
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { apiGet } from "../http.mjs";
|
|
4
|
+
import { resolveShipRepoRootForCatalog } from "../find-ship-root.mjs";
|
|
5
|
+
|
|
6
|
+
/** @type {Record<string, { manifestRel: string; arrayKey: string; name: string }>} */
|
|
7
|
+
const CATALOGS = {
|
|
8
|
+
tools: { manifestRel: "tools/manifest.json", arrayKey: "tools", name: "Tools" },
|
|
9
|
+
workflows: {
|
|
10
|
+
manifestRel: "workflows/manifest.json",
|
|
11
|
+
arrayKey: "workflows",
|
|
12
|
+
name: "Workflows",
|
|
13
|
+
},
|
|
14
|
+
collections: {
|
|
15
|
+
manifestRel: "collections/manifest.json",
|
|
16
|
+
arrayKey: "collections",
|
|
17
|
+
name: "Collections",
|
|
18
|
+
},
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* @param {"tools"|"workflows"|"collections"} kind
|
|
23
|
+
* @param {{ baseUrl: string; json: boolean }} ctx
|
|
24
|
+
* @param {string[]} args subcommand tail (e.g. `list` or `show`, `linear`)
|
|
25
|
+
*/
|
|
26
|
+
export async function manifestCatalogCommand(kind, ctx, args) {
|
|
27
|
+
const spec = CATALOGS[kind];
|
|
28
|
+
if (!spec) throw new Error(`Unknown catalog kind: ${kind}`);
|
|
29
|
+
|
|
30
|
+
const [sub, ...rest] = args;
|
|
31
|
+
if (!sub || sub === "help") {
|
|
32
|
+
console.log(`Usage:
|
|
33
|
+
ship ${kind} list
|
|
34
|
+
ship ${kind} show <id>
|
|
35
|
+
|
|
36
|
+
With a local Ship tree (cwd or SHIP_REPO): reads ${spec.manifestRel} on disk.
|
|
37
|
+
Otherwise: methodology HTTP API — GET /${kind} and GET /${kind}/<id> (SHIP_API_BASE / --base-url).
|
|
38
|
+
|
|
39
|
+
Global flags: --base-url URL --json`);
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const root = resolveShipRepoRootForCatalog();
|
|
44
|
+
if (root) {
|
|
45
|
+
await manifestFromDisk(kind, root, spec, ctx, sub, rest);
|
|
46
|
+
} else {
|
|
47
|
+
await manifestFromHosted(kind, spec, ctx, sub, rest);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* @param {"tools"|"workflows"|"collections"} kind
|
|
53
|
+
* @param {typeof CATALOGS["tools"]} spec
|
|
54
|
+
* @param {{ baseUrl: string; json: boolean }} ctx
|
|
55
|
+
*/
|
|
56
|
+
async function manifestFromHosted(kind, spec, ctx, sub, rest) {
|
|
57
|
+
const base = ctx.baseUrl;
|
|
58
|
+
if (sub === "list") {
|
|
59
|
+
const data = await apiGet(base, `/${kind}`);
|
|
60
|
+
if (ctx.json) {
|
|
61
|
+
console.log(JSON.stringify(data, null, 2));
|
|
62
|
+
} else {
|
|
63
|
+
const entries = /** @type {Array<{ id: string; title: string; path: string; tags?: string[] }>} */ (
|
|
64
|
+
data[spec.arrayKey] || []
|
|
65
|
+
);
|
|
66
|
+
console.log(`${data.description || spec.name}\n`);
|
|
67
|
+
for (const p of entries) {
|
|
68
|
+
console.log(`- ${p.id}`);
|
|
69
|
+
console.log(` ${p.title}`);
|
|
70
|
+
console.log(` path: ${p.path} tags: ${(p.tags || []).join(", ")}\n`);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
if (sub === "show") {
|
|
76
|
+
const id = rest[0];
|
|
77
|
+
if (!id) {
|
|
78
|
+
console.error("show: id required.");
|
|
79
|
+
process.exit(1);
|
|
80
|
+
}
|
|
81
|
+
const data = await apiGet(base, `/${kind}/${encodeURIComponent(id)}`);
|
|
82
|
+
if (ctx.json) {
|
|
83
|
+
console.log(JSON.stringify(data, null, 2));
|
|
84
|
+
} else {
|
|
85
|
+
console.log(`# ${data.title} (${data.id})\n`);
|
|
86
|
+
console.log(data.content);
|
|
87
|
+
}
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
console.error(`Unknown ${kind} subcommand: ${sub}`);
|
|
91
|
+
process.exit(1);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* @param {"tools"|"workflows"|"collections"} kind
|
|
96
|
+
* @param {string} root
|
|
97
|
+
* @param {typeof CATALOGS["tools"]} spec
|
|
98
|
+
* @param {{ json: boolean }} ctx
|
|
99
|
+
*/
|
|
100
|
+
async function manifestFromDisk(kind, root, spec, ctx, sub, rest) {
|
|
101
|
+
const manifestPath = path.join(root, spec.manifestRel);
|
|
102
|
+
const raw = fs.readFileSync(manifestPath, "utf8");
|
|
103
|
+
/** @type {Record<string, unknown>} */
|
|
104
|
+
const data = JSON.parse(raw);
|
|
105
|
+
const entries = /** @type {Array<{ id: string; title: string; summary?: string; path: string; tags?: string[] }>} */ (
|
|
106
|
+
data[spec.arrayKey] || []
|
|
107
|
+
);
|
|
108
|
+
|
|
109
|
+
if (sub === "list") {
|
|
110
|
+
if (ctx.json) {
|
|
111
|
+
console.log(JSON.stringify(data, null, 2));
|
|
112
|
+
} else {
|
|
113
|
+
console.log(`${data.description || spec.name}\n`);
|
|
114
|
+
for (const p of entries) {
|
|
115
|
+
console.log(`- ${p.id}`);
|
|
116
|
+
console.log(` ${p.title}`);
|
|
117
|
+
console.log(` path: ${p.path} tags: ${(p.tags || []).join(", ")}\n`);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (sub === "show") {
|
|
124
|
+
const id = rest[0];
|
|
125
|
+
if (!id) {
|
|
126
|
+
console.error("show: id required.");
|
|
127
|
+
process.exit(1);
|
|
128
|
+
}
|
|
129
|
+
const entry = entries.find((e) => e.id === id);
|
|
130
|
+
if (!entry) {
|
|
131
|
+
console.error(`Unknown id: ${id}`);
|
|
132
|
+
process.exit(1);
|
|
133
|
+
}
|
|
134
|
+
const abs = path.resolve(root, entry.path);
|
|
135
|
+
const rootNorm = root.endsWith(path.sep) ? root.slice(0, -1) : root;
|
|
136
|
+
const absNorm = abs.endsWith(path.sep) ? abs.slice(0, -1) : abs;
|
|
137
|
+
if (absNorm !== rootNorm && !abs.startsWith(root + path.sep)) {
|
|
138
|
+
console.error("Manifest path escapes repository root.");
|
|
139
|
+
process.exit(1);
|
|
140
|
+
}
|
|
141
|
+
if (!fs.existsSync(abs) || !fs.statSync(abs).isFile()) {
|
|
142
|
+
console.error(`Missing file: ${entry.path}`);
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
145
|
+
const content = fs.readFileSync(abs, "utf8");
|
|
146
|
+
if (ctx.json) {
|
|
147
|
+
console.log(JSON.stringify({ ...entry, content }, null, 2));
|
|
148
|
+
} else {
|
|
149
|
+
console.log(`# ${entry.title} (${entry.id})\n`);
|
|
150
|
+
console.log(content);
|
|
151
|
+
}
|
|
152
|
+
return;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
console.error(`Unknown ${kind} subcommand: ${sub}`);
|
|
156
|
+
process.exit(1);
|
|
157
|
+
}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { apiGet } from "../http.mjs";
|
|
4
|
+
import { resolveShipRepoRootForCatalog } from "../find-ship-root.mjs";
|
|
5
|
+
|
|
6
|
+
const MANIFEST_REL = "patterns/manifest.json";
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @param {Record<string, unknown>} data
|
|
10
|
+
*/
|
|
11
|
+
function parseManifest(data) {
|
|
12
|
+
const patterns = /** @type {unknown} */ (data.patterns);
|
|
13
|
+
if (!Array.isArray(patterns)) {
|
|
14
|
+
throw new Error(`${MANIFEST_REL} must contain a "patterns" array.`);
|
|
15
|
+
}
|
|
16
|
+
return {
|
|
17
|
+
version: data.version ?? 1,
|
|
18
|
+
description: typeof data.description === "string" ? data.description : "",
|
|
19
|
+
patterns: /** @type {Array<Record<string, unknown>>} */ (patterns),
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* @param {Record<string, unknown>} p
|
|
25
|
+
*/
|
|
26
|
+
function slimEntry(p) {
|
|
27
|
+
return {
|
|
28
|
+
id: p.id,
|
|
29
|
+
title: p.title,
|
|
30
|
+
summary: p.summary,
|
|
31
|
+
path: p.path,
|
|
32
|
+
tags: Array.isArray(p.tags) ? p.tags : [],
|
|
33
|
+
group: p.group,
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* @param {string} root
|
|
39
|
+
* @param {{ json: boolean }} ctx
|
|
40
|
+
* @param {string} sub
|
|
41
|
+
* @param {string[]} rest
|
|
42
|
+
*/
|
|
43
|
+
async function patternsFromDisk(root, ctx, sub, rest) {
|
|
44
|
+
const manifestPath = path.join(root, MANIFEST_REL);
|
|
45
|
+
const raw = fs.readFileSync(manifestPath, "utf8");
|
|
46
|
+
/** @type {Record<string, unknown>} */
|
|
47
|
+
const manifest = JSON.parse(raw);
|
|
48
|
+
const { version, description, patterns } = parseManifest(manifest);
|
|
49
|
+
const entries = patterns.filter((p) => p && typeof p === "object" && typeof p.id === "string");
|
|
50
|
+
|
|
51
|
+
if (sub === "list") {
|
|
52
|
+
const slim = entries.map((p) => slimEntry(p));
|
|
53
|
+
const out = { version, description, patterns: slim };
|
|
54
|
+
if (ctx.json) console.log(JSON.stringify(out, null, 2));
|
|
55
|
+
else {
|
|
56
|
+
console.log(`${description || "Patterns"}\n`);
|
|
57
|
+
for (const p of slim) {
|
|
58
|
+
console.log(`- ${p.id}`);
|
|
59
|
+
console.log(` ${p.title}`);
|
|
60
|
+
const tags = (p.tags || []).join(", ");
|
|
61
|
+
console.log(` path: ${p.path} tags: ${tags}\n`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (sub === "show") {
|
|
68
|
+
const id = rest[0];
|
|
69
|
+
if (!id) {
|
|
70
|
+
console.error("show: pattern id required.");
|
|
71
|
+
process.exit(1);
|
|
72
|
+
}
|
|
73
|
+
const entry = entries.find((e) => e.id === id);
|
|
74
|
+
if (!entry) {
|
|
75
|
+
console.error(`Unknown id: ${id}`);
|
|
76
|
+
process.exit(1);
|
|
77
|
+
}
|
|
78
|
+
const rel = entry.path;
|
|
79
|
+
if (typeof rel !== "string" || !rel.trim()) {
|
|
80
|
+
console.error("Pattern entry has no path.");
|
|
81
|
+
process.exit(1);
|
|
82
|
+
}
|
|
83
|
+
const abs = path.resolve(root, rel);
|
|
84
|
+
const rootNorm = root.endsWith(path.sep) ? root.slice(0, -1) : root;
|
|
85
|
+
const absNorm = abs.endsWith(path.sep) ? abs.slice(0, -1) : abs;
|
|
86
|
+
if (absNorm !== rootNorm && !abs.startsWith(root + path.sep)) {
|
|
87
|
+
console.error("Manifest path escapes repository root.");
|
|
88
|
+
process.exit(1);
|
|
89
|
+
}
|
|
90
|
+
if (!fs.existsSync(abs) || !fs.statSync(abs).isFile()) {
|
|
91
|
+
console.error(`Missing file: ${rel}`);
|
|
92
|
+
process.exit(1);
|
|
93
|
+
}
|
|
94
|
+
const content = fs.readFileSync(abs, "utf8");
|
|
95
|
+
const full = { ...slimEntry(entry), content };
|
|
96
|
+
if (ctx.json) console.log(JSON.stringify(full, null, 2));
|
|
97
|
+
else {
|
|
98
|
+
console.log(`# ${entry.title} (${entry.id})\n`);
|
|
99
|
+
console.log(content);
|
|
100
|
+
}
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
console.error(`Unknown patterns subcommand: ${sub}`);
|
|
105
|
+
process.exit(1);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* @param {{ baseUrl: string; json: boolean }} ctx
|
|
110
|
+
* @param {string} sub
|
|
111
|
+
* @param {string[]} rest
|
|
112
|
+
*/
|
|
113
|
+
async function patternsFromHosted(ctx, sub, rest) {
|
|
114
|
+
const base = ctx.baseUrl;
|
|
115
|
+
if (sub === "list") {
|
|
116
|
+
const data = await apiGet(base, "/patterns");
|
|
117
|
+
if (ctx.json) console.log(JSON.stringify(data, null, 2));
|
|
118
|
+
else {
|
|
119
|
+
console.log(`${data.description || "Patterns"}\n`);
|
|
120
|
+
for (const p of data.patterns || []) {
|
|
121
|
+
console.log(`- ${p.id}`);
|
|
122
|
+
console.log(` ${p.title}`);
|
|
123
|
+
console.log(` path: ${p.path} tags: ${(p.tags || []).join(", ")}\n`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
if (sub === "show") {
|
|
129
|
+
const id = rest[0];
|
|
130
|
+
if (!id) {
|
|
131
|
+
console.error("show: pattern id required.");
|
|
132
|
+
process.exit(1);
|
|
133
|
+
}
|
|
134
|
+
const data = await apiGet(base, `/patterns/${encodeURIComponent(id)}`);
|
|
135
|
+
if (ctx.json) console.log(JSON.stringify(data, null, 2));
|
|
136
|
+
else {
|
|
137
|
+
console.log(`# ${data.title} (${data.id})\n`);
|
|
138
|
+
console.log(data.content);
|
|
139
|
+
}
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
console.error(`Unknown patterns subcommand: ${sub}`);
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* @param {{ baseUrl: string; json: boolean }} ctx
|
|
148
|
+
* @param {string[]} args
|
|
149
|
+
*/
|
|
150
|
+
export async function patternsCommand(ctx, args) {
|
|
151
|
+
const [sub, ...rest] = args;
|
|
152
|
+
if (!sub || sub === "help") {
|
|
153
|
+
console.log(`Usage:
|
|
154
|
+
ship patterns list
|
|
155
|
+
ship patterns show <pattern-id>
|
|
156
|
+
|
|
157
|
+
With a local Ship tree (cwd or SHIP_REPO): reads patterns/manifest.json on disk.
|
|
158
|
+
Otherwise: same HTTP API as methodology — GET /patterns and GET /patterns/{id} (SHIP_API_BASE / --base-url).
|
|
159
|
+
|
|
160
|
+
Global flags: --base-url URL --json`);
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const root = resolveShipRepoRootForCatalog();
|
|
165
|
+
if (root) {
|
|
166
|
+
await patternsFromDisk(root, ctx, sub, rest);
|
|
167
|
+
} else {
|
|
168
|
+
await patternsFromHosted(ctx, sub, rest);
|
|
169
|
+
}
|
|
170
|
+
}
|
package/lib/config.mjs
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pull known global flags out of argv; remainder is the subcommand tail.
|
|
3
|
+
* @param {string[]} argv
|
|
4
|
+
*/
|
|
5
|
+
export function extractGlobalArgv(argv) {
|
|
6
|
+
const out = {
|
|
7
|
+
_: /** @type {string[]} */ ([]),
|
|
8
|
+
baseUrl: (process.env.SHIP_API_BASE || "http://127.0.0.1:8100").replace(/\/$/, ""),
|
|
9
|
+
json: false,
|
|
10
|
+
yes: false,
|
|
11
|
+
force: false,
|
|
12
|
+
dryRun: false,
|
|
13
|
+
};
|
|
14
|
+
const copy = [...argv];
|
|
15
|
+
while (copy.length) {
|
|
16
|
+
const a = copy[0];
|
|
17
|
+
if (a === "--json") {
|
|
18
|
+
out.json = true;
|
|
19
|
+
copy.shift();
|
|
20
|
+
continue;
|
|
21
|
+
}
|
|
22
|
+
if (a === "--yes" || a === "-y") {
|
|
23
|
+
out.yes = true;
|
|
24
|
+
copy.shift();
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
if (a === "--force") {
|
|
28
|
+
out.force = true;
|
|
29
|
+
copy.shift();
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
if (a === "--dry-run") {
|
|
33
|
+
out.dryRun = true;
|
|
34
|
+
copy.shift();
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
if (a === "--base-url" && copy[1]) {
|
|
38
|
+
copy.shift();
|
|
39
|
+
out.baseUrl = String(copy.shift()).replace(/\/$/, "");
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
if (a.startsWith("--base-url=")) {
|
|
43
|
+
out.baseUrl = a.slice("--base-url=".length).replace(/\/$/, "");
|
|
44
|
+
copy.shift();
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
out._.push(copy.shift());
|
|
48
|
+
}
|
|
49
|
+
return out;
|
|
50
|
+
}
|
package/lib/detect.mjs
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @param {string} cwd
|
|
6
|
+
* @returns {{ id: string; label: string; paths: string[] }[]}
|
|
7
|
+
*/
|
|
8
|
+
export function detectAgentTargets(cwd) {
|
|
9
|
+
/** @type {{ id: string; label: string; paths: string[] }[]} */
|
|
10
|
+
const targets = [];
|
|
11
|
+
|
|
12
|
+
const cursorDir = path.join(cwd, ".cursor");
|
|
13
|
+
const cursorRules = path.join(cursorDir, "rules");
|
|
14
|
+
if (fs.existsSync(cursorDir)) {
|
|
15
|
+
targets.push({
|
|
16
|
+
id: "cursor",
|
|
17
|
+
label: "Cursor (`.cursor/` present)",
|
|
18
|
+
paths: [path.join(cursorRules, "ship-methodology-api.mdc")],
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const agents = path.join(cwd, "AGENTS.md");
|
|
23
|
+
if (fs.existsSync(agents)) {
|
|
24
|
+
targets.push({
|
|
25
|
+
id: "agents-md",
|
|
26
|
+
label: "OpenAI Codex / generic `AGENTS.md`",
|
|
27
|
+
paths: [agents],
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const claude = path.join(cwd, "CLAUDE.md");
|
|
32
|
+
if (fs.existsSync(claude)) {
|
|
33
|
+
targets.push({
|
|
34
|
+
id: "claude-md",
|
|
35
|
+
label: "Claude Code `CLAUDE.md`",
|
|
36
|
+
paths: [claude],
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const codexDir = path.join(cwd, ".codex");
|
|
41
|
+
if (fs.existsSync(codexDir)) {
|
|
42
|
+
targets.push({
|
|
43
|
+
id: "codex",
|
|
44
|
+
label: "Codex config dir (`.codex/`)",
|
|
45
|
+
paths: [path.join(codexDir, "SHIP_API.md")],
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const copilot = path.join(cwd, ".github", "copilot-instructions.md");
|
|
50
|
+
if (fs.existsSync(copilot)) {
|
|
51
|
+
targets.push({
|
|
52
|
+
id: "copilot",
|
|
53
|
+
label: "GitHub Copilot instructions",
|
|
54
|
+
paths: [copilot],
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return targets;
|
|
59
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
const MARKERS = [
|
|
5
|
+
"workflows/manifest.json",
|
|
6
|
+
"tools/manifest.json",
|
|
7
|
+
"collections/manifest.json",
|
|
8
|
+
"patterns/manifest.json",
|
|
9
|
+
];
|
|
10
|
+
|
|
11
|
+
function markersOk(dir) {
|
|
12
|
+
return MARKERS.every((m) => fs.existsSync(path.join(dir, m)));
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Walk parents from cwd for a directory containing all Ship manifest markers.
|
|
17
|
+
* @returns {string | null}
|
|
18
|
+
*/
|
|
19
|
+
export function tryFindShipRepoRootFromWalk() {
|
|
20
|
+
let dir = path.resolve(process.cwd());
|
|
21
|
+
for (;;) {
|
|
22
|
+
if (markersOk(dir)) return dir;
|
|
23
|
+
const parent = path.dirname(dir);
|
|
24
|
+
if (parent === dir) break;
|
|
25
|
+
dir = parent;
|
|
26
|
+
}
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Root of the Ship monorepo (manifests at repo root).
|
|
32
|
+
* Set `SHIP_REPO` to an absolute path when not running from inside the tree.
|
|
33
|
+
*/
|
|
34
|
+
export function findShipRepoRoot() {
|
|
35
|
+
const env = process.env.SHIP_REPO?.trim();
|
|
36
|
+
if (env) {
|
|
37
|
+
const r = path.resolve(env);
|
|
38
|
+
if (!markersOk(r)) {
|
|
39
|
+
throw new Error(
|
|
40
|
+
`SHIP_REPO=${r} is not the Ship monorepo (expected tools/, workflows/, collections/, patterns/ manifests at repo root).`,
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
return r;
|
|
44
|
+
}
|
|
45
|
+
const walked = tryFindShipRepoRootFromWalk();
|
|
46
|
+
if (walked) return walked;
|
|
47
|
+
throw new Error(
|
|
48
|
+
"Ship repo root not found: run from inside the ship clone, or set SHIP_REPO to the repository root.",
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* When `SHIP_REPO` is unset, returns repo root only if cwd is inside the tree; otherwise `null` (use hosted catalog).
|
|
54
|
+
* When `SHIP_REPO` is set, validates and returns that path or throws.
|
|
55
|
+
* @returns {string | null}
|
|
56
|
+
*/
|
|
57
|
+
export function resolveShipRepoRootForCatalog() {
|
|
58
|
+
const env = process.env.SHIP_REPO?.trim();
|
|
59
|
+
if (env) {
|
|
60
|
+
const r = path.resolve(env);
|
|
61
|
+
if (!markersOk(r)) {
|
|
62
|
+
throw new Error(
|
|
63
|
+
`SHIP_REPO=${r} is not the Ship monorepo (expected tools/, workflows/, collections/, patterns/ manifests at repo root).`,
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
return r;
|
|
67
|
+
}
|
|
68
|
+
return tryFindShipRepoRootFromWalk();
|
|
69
|
+
}
|
package/lib/http.mjs
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @param {string} baseUrl
|
|
3
|
+
* @param {string} path
|
|
4
|
+
* @param {Record<string, unknown>} body
|
|
5
|
+
*/
|
|
6
|
+
export async function apiPost(baseUrl, path, body) {
|
|
7
|
+
const url = `${baseUrl}${path.startsWith("/") ? path : `/${path}`}`;
|
|
8
|
+
const res = await fetch(url, {
|
|
9
|
+
method: "POST",
|
|
10
|
+
headers: { "Content-Type": "application/json", Accept: "application/json" },
|
|
11
|
+
body: JSON.stringify(body),
|
|
12
|
+
});
|
|
13
|
+
const text = await res.text();
|
|
14
|
+
let data;
|
|
15
|
+
try {
|
|
16
|
+
data = text ? JSON.parse(text) : null;
|
|
17
|
+
} catch {
|
|
18
|
+
data = text;
|
|
19
|
+
}
|
|
20
|
+
if (!res.ok) {
|
|
21
|
+
const msg = typeof data === "string" ? data : JSON.stringify(data);
|
|
22
|
+
throw new Error(`HTTP ${res.status} ${res.statusText} for POST ${url}\n${msg}`);
|
|
23
|
+
}
|
|
24
|
+
return data;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* @param {string} baseUrl
|
|
29
|
+
* @param {string} path
|
|
30
|
+
*/
|
|
31
|
+
export async function apiGet(baseUrl, path) {
|
|
32
|
+
const url = `${baseUrl}${path.startsWith("/") ? path : `/${path}`}`;
|
|
33
|
+
const res = await fetch(url, { headers: { Accept: "application/json" } });
|
|
34
|
+
const text = await res.text();
|
|
35
|
+
let data;
|
|
36
|
+
try {
|
|
37
|
+
data = text ? JSON.parse(text) : null;
|
|
38
|
+
} catch {
|
|
39
|
+
data = text;
|
|
40
|
+
}
|
|
41
|
+
if (!res.ok) {
|
|
42
|
+
const msg = typeof data === "string" ? data : JSON.stringify(data);
|
|
43
|
+
throw new Error(`HTTP ${res.status} ${res.statusText} for GET ${url}\n${msg}`);
|
|
44
|
+
}
|
|
45
|
+
return data;
|
|
46
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
const MARKER = "<!-- ship-cli: methodology-api -->";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @param {string} baseUrl
|
|
5
|
+
*/
|
|
6
|
+
export function cursorRuleMdc(baseUrl) {
|
|
7
|
+
return `---
|
|
8
|
+
name: ship-methodology-api
|
|
9
|
+
description: Call the Ship methodology HTTP API (semantic search, fetch full docs, retro feedback, pattern index) from the agent.
|
|
10
|
+
---
|
|
11
|
+
|
|
12
|
+
${MARKER}
|
|
13
|
+
|
|
14
|
+
# Ship methodology API (local)
|
|
15
|
+
|
|
16
|
+
Base URL (override with \`SHIP_API_BASE\` for agents, or \`--base-url\` for CLI): \`${baseUrl}\`
|
|
17
|
+
|
|
18
|
+
## When to use
|
|
19
|
+
|
|
20
|
+
1. **Discover** — \`POST /search\` with a natural-language query over Ship docs + prompts.
|
|
21
|
+
2. **Read** — \`POST /fetch\` with a repo-relative \`path\` from search hits (markdown/text only).
|
|
22
|
+
3. **Retro** — \`POST /feedback\` to open a sanitized GitHub issue (needs \`GITHUB_TOKEN\` on the server).
|
|
23
|
+
4. **Patterns** — run \`ship patterns list\` / \`ship patterns show <id>\` (\`GET /patterns\` on the same base URL as search; or disk when cwd/\`SHIP_REPO\` is the Ship tree).
|
|
24
|
+
|
|
25
|
+
## Examples (CLI from Ship repo)
|
|
26
|
+
|
|
27
|
+
\`\`\`bash
|
|
28
|
+
npm run ship -- docs search "release gates and qa split" --top-k 8
|
|
29
|
+
npm run ship -- docs fetch documentation/adoption/delivery-quality-and-release-process.md
|
|
30
|
+
npm run ship -- patterns list
|
|
31
|
+
\`\`\`
|
|
32
|
+
|
|
33
|
+
Equivalent curl (when not using the CLI):
|
|
34
|
+
|
|
35
|
+
\`\`\`bash
|
|
36
|
+
curl -sS -X POST "${baseUrl}/search" -H "Content-Type: application/json" \\
|
|
37
|
+
-d '{"query":"release gates and qa split","top_k":8}'
|
|
38
|
+
curl -sS -X POST "${baseUrl}/fetch" -H "Content-Type: application/json" \\
|
|
39
|
+
-d '{"path":"documentation/adoption/delivery-quality-and-release-process.md"}'
|
|
40
|
+
curl -sS "${baseUrl}/patterns"
|
|
41
|
+
\`\`\`
|
|
42
|
+
|
|
43
|
+
## Agent workflow
|
|
44
|
+
|
|
45
|
+
Prefer **search → fetch one path → quote** in your reply. Never paste secrets into \`/feedback\`; the server redacts common token shapes.
|
|
46
|
+
|
|
47
|
+
Run the API locally: \`uvicorn backend.app.main:app --reload --host 127.0.0.1 --port 8100\` (see \`documentation/tools/backend-api.md\` in the Ship repo).
|
|
48
|
+
`;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* @param {string} baseUrl
|
|
53
|
+
*/
|
|
54
|
+
export function markdownSection(baseUrl) {
|
|
55
|
+
return `
|
|
56
|
+
|
|
57
|
+
---
|
|
58
|
+
|
|
59
|
+
${MARKER}
|
|
60
|
+
|
|
61
|
+
## Ship methodology API
|
|
62
|
+
|
|
63
|
+
Base URL: \`${baseUrl}\` (env \`SHIP_API_BASE\`).
|
|
64
|
+
|
|
65
|
+
- **Search** \`POST /search\` JSON \`{ "query": string, "top_k"?: number }\`
|
|
66
|
+
- **Fetch** \`POST /fetch\` JSON \`{ "path": "documentation/...md" }\`
|
|
67
|
+
- **Feedback** \`POST /feedback\` JSON \`{ "title", "summary", "recommendations"?: string[], "source_context"?: string }\`
|
|
68
|
+
- **Patterns** — \`ship patterns list\` / \`ship patterns show <id>\` (same \`SHIP_API_BASE\` as search, or local tree): \`GET /patterns\`, \`GET /patterns/{id}\`
|
|
69
|
+
|
|
70
|
+
Use search first, then fetch the best path. Keep tokens out of feedback bodies.
|
|
71
|
+
`;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* @param {string} baseUrl
|
|
76
|
+
*/
|
|
77
|
+
export function standaloneDoc(baseUrl) {
|
|
78
|
+
return `# Ship methodology API — agent reference
|
|
79
|
+
|
|
80
|
+
${MARKER}
|
|
81
|
+
|
|
82
|
+
Generated by \`ship init\`. Base URL: \`${baseUrl}\`
|
|
83
|
+
|
|
84
|
+
See the Ship repo \`documentation/tools/backend-api.md\` for full contract.
|
|
85
|
+
|
|
86
|
+
## Endpoints
|
|
87
|
+
|
|
88
|
+
| Method | Path | Body / notes |
|
|
89
|
+
|--------|------|----------------|
|
|
90
|
+
| POST | /search | \`{ "query": "...", "top_k": 8 }\` |
|
|
91
|
+
| POST | /fetch | \`{ "path": "documentation/foo.md" }\` |
|
|
92
|
+
| POST | /feedback | \`{ "title", "summary", "recommendations": [], "source_context" }\` |
|
|
93
|
+
| GET | /patterns | list manifest — **CLI:** \`ship patterns list\` (HTTP or disk in clone) |
|
|
94
|
+
| GET | /patterns/{id} | metadata + markdown \`content\` — **CLI:** \`ship patterns show <id>\` |
|
|
95
|
+
|
|
96
|
+
## CLI (from Ship monorepo)
|
|
97
|
+
|
|
98
|
+
\`\`\`bash
|
|
99
|
+
npm run ship -- patterns list
|
|
100
|
+
npm run ship -- patterns show catalog-a1-intake
|
|
101
|
+
npm run ship -- docs search "intake labels" --top-k 5
|
|
102
|
+
\`\`\`
|
|
103
|
+
|
|
104
|
+
## curl (direct HTTP)
|
|
105
|
+
|
|
106
|
+
\`\`\`bash
|
|
107
|
+
export SHIP=${baseUrl}
|
|
108
|
+
curl -sS -X POST "$SHIP/search" -H "Content-Type: application/json" -d '{"query":"intake labels","top_k":5}'
|
|
109
|
+
\`\`\`
|
|
110
|
+
`;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
export { MARKER };
|
package/package.json
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@elmundi/ship-cli",
|
|
3
|
+
"version": "0.7.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "Ship CLI — docs API (search/fetch/feedback), on-disk patterns & catalogs, and agent init",
|
|
6
|
+
"license": "Apache-2.0",
|
|
7
|
+
"author": "Denys Kuzin",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/ElMundiUA/ship.git",
|
|
11
|
+
"directory": "cli"
|
|
12
|
+
},
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/ElMundiUA/ship/issues"
|
|
15
|
+
},
|
|
16
|
+
"homepage": "https://github.com/ElMundiUA/ship/tree/main/cli#readme",
|
|
17
|
+
"engines": {
|
|
18
|
+
"node": ">=20"
|
|
19
|
+
},
|
|
20
|
+
"keywords": [
|
|
21
|
+
"ship",
|
|
22
|
+
"cli",
|
|
23
|
+
"agents",
|
|
24
|
+
"methodology"
|
|
25
|
+
],
|
|
26
|
+
"files": [
|
|
27
|
+
"bin",
|
|
28
|
+
"lib"
|
|
29
|
+
],
|
|
30
|
+
"publishConfig": {
|
|
31
|
+
"access": "public"
|
|
32
|
+
},
|
|
33
|
+
"bin": {
|
|
34
|
+
"ship": "./bin/ship.mjs"
|
|
35
|
+
}
|
|
36
|
+
}
|