@elmundi/ship-cli 0.8.1 → 0.11.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +415 -22
- package/bin/shipctl.mjs +165 -0
- package/lib/adapters/_fs.mjs +165 -0
- package/lib/adapters/agents/index.mjs +26 -0
- package/lib/adapters/ci/azure-pipelines.mjs +23 -0
- package/lib/adapters/ci/buildkite.mjs +24 -0
- package/lib/adapters/ci/circleci.mjs +23 -0
- package/lib/adapters/ci/gh-actions.mjs +29 -0
- package/lib/adapters/ci/gitlab-ci.mjs +23 -0
- package/lib/adapters/ci/jenkins.mjs +23 -0
- package/lib/adapters/ci/manual.mjs +18 -0
- package/lib/adapters/index.mjs +122 -0
- package/lib/adapters/language/dart.mjs +23 -0
- package/lib/adapters/language/go.mjs +23 -0
- package/lib/adapters/language/java.mjs +27 -0
- package/lib/adapters/language/js.mjs +32 -0
- package/lib/adapters/language/kotlin.mjs +48 -0
- package/lib/adapters/language/py.mjs +34 -0
- package/lib/adapters/language/rust.mjs +23 -0
- package/lib/adapters/language/swift.mjs +37 -0
- package/lib/adapters/language/ts.mjs +35 -0
- package/lib/adapters/trackers/azure-boards.mjs +49 -0
- package/lib/adapters/trackers/clickup.mjs +43 -0
- package/lib/adapters/trackers/github-issues.mjs +52 -0
- package/lib/adapters/trackers/jira.mjs +72 -0
- package/lib/adapters/trackers/linear.mjs +62 -0
- package/lib/adapters/trackers/none.mjs +18 -0
- package/lib/adapters/trackers/spreadsheet.mjs +28 -0
- package/lib/artifacts/fs-index.mjs +230 -0
- package/lib/bootstrap/render.mjs +373 -0
- package/lib/cache/store.mjs +422 -0
- package/lib/commands/bootstrap.mjs +4 -0
- package/lib/commands/callback.mjs +302 -0
- package/lib/commands/config.mjs +257 -0
- package/lib/commands/docs.mjs +1 -1
- package/lib/commands/doctor.mjs +583 -0
- package/lib/commands/feedback.mjs +355 -0
- package/lib/commands/help.mjs +96 -21
- package/lib/commands/init.mjs +830 -158
- package/lib/commands/kickoff.mjs +192 -0
- package/lib/commands/knowledge.mjs +368 -0
- package/lib/commands/lanes.mjs +502 -0
- package/lib/commands/manifest-catalog.mjs +102 -38
- package/lib/commands/migrate.mjs +204 -0
- package/lib/commands/new.mjs +452 -0
- package/lib/commands/patterns.mjs +9 -43
- package/lib/commands/run.mjs +617 -0
- package/lib/commands/sync.mjs +749 -0
- package/lib/commands/telemetry.mjs +390 -0
- package/lib/commands/verify.mjs +187 -0
- package/lib/config/io.mjs +232 -0
- package/lib/config/migrate.mjs +215 -0
- package/lib/config/schema.mjs +650 -0
- package/lib/detect.mjs +162 -19
- package/lib/feedback/drafts.mjs +129 -0
- package/lib/find-ship-root.mjs +16 -10
- package/lib/http.mjs +237 -11
- package/lib/state/idempotency.mjs +183 -0
- package/lib/state/lockfile.mjs +180 -0
- package/lib/telemetry/outbox.mjs +224 -0
- package/lib/templates.mjs +53 -65
- package/lib/verify/checks/agents-on-disk.mjs +58 -0
- package/lib/verify/checks/api-reachable.mjs +39 -0
- package/lib/verify/checks/artifacts-up-to-date.mjs +78 -0
- package/lib/verify/checks/bootstrap-files.mjs +67 -0
- package/lib/verify/checks/cache-integrity.mjs +51 -0
- package/lib/verify/checks/ci-secrets.mjs +86 -0
- package/lib/verify/checks/config-present.mjs +39 -0
- package/lib/verify/checks/gitignore-cache.mjs +51 -0
- package/lib/verify/checks/rules-markers.mjs +135 -0
- package/lib/verify/checks/stack-enums.mjs +33 -0
- package/lib/verify/checks/tracker-labels.mjs +91 -0
- package/lib/verify/registry.mjs +120 -0
- package/lib/version.mjs +34 -0
- package/package.json +10 -3
- package/bin/ship.mjs +0 -68
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Idempotency markers for `kind=once` lanes (RFC-0007).
|
|
3
|
+
*
|
|
4
|
+
* Markers live under `.ship/state/<key>.json` and are expected to be
|
|
5
|
+
* committed to the repo. They record that a particular one-shot lane
|
|
6
|
+
* has already run for a specific version of its pattern. The agent
|
|
7
|
+
* orchestration layer reads them before deciding whether to execute the
|
|
8
|
+
* lane again and writes them on success.
|
|
9
|
+
*
|
|
10
|
+
* File format (version 1):
|
|
11
|
+
*
|
|
12
|
+
* {
|
|
13
|
+
* "version": 1,
|
|
14
|
+
* "lane": "seed_knowledge_starters",
|
|
15
|
+
* "pattern_id": "seed-knowledge-starters",
|
|
16
|
+
* "pattern_sha256": "…",
|
|
17
|
+
* "pattern_version": "1.0.0",
|
|
18
|
+
* "completed_at": "2026-04-21T14:20:00Z",
|
|
19
|
+
* "by": { "run_id": "…", "host": "github-actions" }
|
|
20
|
+
* }
|
|
21
|
+
*
|
|
22
|
+
* We keep the reader tolerant of extra keys (forward-compat) and the
|
|
23
|
+
* writer strict about the required ones (backward-compat).
|
|
24
|
+
*/
|
|
25
|
+
|
|
26
|
+
import fs from "node:fs";
|
|
27
|
+
import path from "node:path";
|
|
28
|
+
import { createHash } from "node:crypto";
|
|
29
|
+
|
|
30
|
+
import { findShipRoot } from "../config/io.mjs";
|
|
31
|
+
|
|
32
|
+
export const MARKER_SCHEMA_VERSION = 1;
|
|
33
|
+
export const STATE_SUBDIR = path.join(".ship", "state");
|
|
34
|
+
|
|
35
|
+
const KEY_REGEX = /^[a-z0-9][a-z0-9_.-]{0,127}$/;
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Resolve the marker path for a given idempotency key. Does not check
|
|
39
|
+
* existence — callers use {@link readMarker} for that.
|
|
40
|
+
*
|
|
41
|
+
* @param {string} cwd
|
|
42
|
+
* @param {string} key
|
|
43
|
+
* @returns {{ root: string, markerPath: string }}
|
|
44
|
+
*/
|
|
45
|
+
export function resolveMarkerPath(cwd, key) {
|
|
46
|
+
if (typeof key !== "string" || !KEY_REGEX.test(key)) {
|
|
47
|
+
throw new Error(
|
|
48
|
+
`idempotency key must match /^[a-z0-9][a-z0-9_.-]{0,127}$/; got ${JSON.stringify(key)}`,
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
const root = findShipRoot(cwd);
|
|
52
|
+
if (!root) {
|
|
53
|
+
throw new Error(
|
|
54
|
+
".ship/config.yml not found; idempotency markers require a Ship-adopted repo",
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
return {
|
|
58
|
+
root,
|
|
59
|
+
markerPath: path.join(root, STATE_SUBDIR, `${key}.json`),
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Read and validate an idempotency marker. Returns `null` when the
|
|
65
|
+
* marker file doesn't exist; throws on malformed JSON or schema
|
|
66
|
+
* mismatch (callers should decide whether to treat that as fatal —
|
|
67
|
+
* `shipctl run` currently treats it as fatal so a broken marker can't
|
|
68
|
+
* silently cause a re-seed).
|
|
69
|
+
*
|
|
70
|
+
* @param {string} cwd
|
|
71
|
+
* @param {string} key
|
|
72
|
+
* @returns {object | null}
|
|
73
|
+
*/
|
|
74
|
+
export function readMarker(cwd, key) {
|
|
75
|
+
const { markerPath } = resolveMarkerPath(cwd, key);
|
|
76
|
+
if (!fs.existsSync(markerPath)) return null;
|
|
77
|
+
let raw;
|
|
78
|
+
try {
|
|
79
|
+
raw = fs.readFileSync(markerPath, "utf8");
|
|
80
|
+
} catch (e) {
|
|
81
|
+
throw new Error(`idempotency: failed to read ${markerPath}: ${e.message}`);
|
|
82
|
+
}
|
|
83
|
+
let parsed;
|
|
84
|
+
try {
|
|
85
|
+
parsed = JSON.parse(raw);
|
|
86
|
+
} catch (e) {
|
|
87
|
+
throw new Error(`idempotency: ${markerPath} is not valid JSON: ${e.message}`);
|
|
88
|
+
}
|
|
89
|
+
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
90
|
+
throw new Error(`idempotency: ${markerPath} must be a JSON object`);
|
|
91
|
+
}
|
|
92
|
+
if (parsed.version !== MARKER_SCHEMA_VERSION) {
|
|
93
|
+
throw new Error(
|
|
94
|
+
`idempotency: ${markerPath} has marker version ${JSON.stringify(parsed.version)}; expected ${MARKER_SCHEMA_VERSION}`,
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
return parsed;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Write an idempotency marker atomically (tmp file + rename). Creates
|
|
102
|
+
* `.ship/state/` if missing.
|
|
103
|
+
*
|
|
104
|
+
* @param {string} cwd
|
|
105
|
+
* @param {string} key
|
|
106
|
+
* @param {{
|
|
107
|
+
* lane: string,
|
|
108
|
+
* pattern_id: string,
|
|
109
|
+
* pattern_sha256: string,
|
|
110
|
+
* pattern_version?: string | null,
|
|
111
|
+
* by?: Record<string, unknown>,
|
|
112
|
+
* completed_at?: string,
|
|
113
|
+
* }} fields
|
|
114
|
+
* @returns {{ markerPath: string, marker: object }}
|
|
115
|
+
*/
|
|
116
|
+
export function writeMarker(cwd, key, fields) {
|
|
117
|
+
const { markerPath } = resolveMarkerPath(cwd, key);
|
|
118
|
+
const marker = {
|
|
119
|
+
version: MARKER_SCHEMA_VERSION,
|
|
120
|
+
lane: String(fields.lane),
|
|
121
|
+
pattern_id: String(fields.pattern_id),
|
|
122
|
+
pattern_sha256: String(fields.pattern_sha256),
|
|
123
|
+
pattern_version: fields.pattern_version ?? null,
|
|
124
|
+
completed_at: fields.completed_at ?? new Date().toISOString(),
|
|
125
|
+
by: fields.by ?? defaultActorContext(),
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
const required = ["lane", "pattern_id", "pattern_sha256"];
|
|
129
|
+
for (const k of required) {
|
|
130
|
+
if (!marker[k]) throw new Error(`idempotency: missing required field '${k}'`);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
fs.mkdirSync(path.dirname(markerPath), { recursive: true });
|
|
134
|
+
const body = `${JSON.stringify(marker, null, 2)}\n`;
|
|
135
|
+
const tmp = `${markerPath}.tmp`;
|
|
136
|
+
fs.writeFileSync(tmp, body, "utf8");
|
|
137
|
+
fs.renameSync(tmp, markerPath);
|
|
138
|
+
return { markerPath, marker };
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Decide whether a `kind=once` lane should run given its current
|
|
143
|
+
* marker and the current pattern body.
|
|
144
|
+
*
|
|
145
|
+
* @param {object | null} marker result of readMarker()
|
|
146
|
+
* @param {string} patternBody full ARTIFACT.md text (including frontmatter)
|
|
147
|
+
* @param {"version-change" | "manual"} resetOn
|
|
148
|
+
* @returns {{ run: true, reason: "no-marker" | "sha-changed" } | { run: false, reason: "already-done", marker: object }}
|
|
149
|
+
*/
|
|
150
|
+
export function decideRun(marker, patternBody, resetOn) {
|
|
151
|
+
if (!marker) return { run: true, reason: "no-marker" };
|
|
152
|
+
const sha = sha256(patternBody);
|
|
153
|
+
if (resetOn === "version-change" && marker.pattern_sha256 !== sha) {
|
|
154
|
+
return { run: true, reason: "sha-changed" };
|
|
155
|
+
}
|
|
156
|
+
return { run: false, reason: "already-done", marker };
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* SHA-256 of the full pattern body (frontmatter included). Exposed so
|
|
161
|
+
* callers can recompute it when writing markers without re-parsing.
|
|
162
|
+
*
|
|
163
|
+
* @param {string} text
|
|
164
|
+
* @returns {string}
|
|
165
|
+
*/
|
|
166
|
+
export function sha256(text) {
|
|
167
|
+
return createHash("sha256").update(text, "utf8").digest("hex");
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
function defaultActorContext() {
|
|
171
|
+
/* Prefer the richest available CI context but keep it small — this
|
|
172
|
+
* blob ends up committed in the repo, so we don't want noise. */
|
|
173
|
+
const ctx = {};
|
|
174
|
+
if (process.env.GITHUB_ACTIONS) ctx.host = "github-actions";
|
|
175
|
+
else if (process.env.GITLAB_CI) ctx.host = "gitlab-ci";
|
|
176
|
+
else if (process.env.CI) ctx.host = "ci";
|
|
177
|
+
else ctx.host = "local";
|
|
178
|
+
|
|
179
|
+
if (process.env.SHIP_RUN_ID) ctx.run_id = process.env.SHIP_RUN_ID;
|
|
180
|
+
else if (process.env.GITHUB_RUN_ID) ctx.run_id = process.env.GITHUB_RUN_ID;
|
|
181
|
+
|
|
182
|
+
return ctx;
|
|
183
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `shipctl.lock.json` — reproducible-build anchor for Ship lanes (RFC-0007
|
|
3
|
+
* Phase 4). Records the exact `(kind, id, version, content_sha256)` of
|
|
4
|
+
* every artifact a lane depends on, plus where that body is materialised on
|
|
5
|
+
* disk. Without it, `shipctl run --offline` has no way to decide whether
|
|
6
|
+
* a cached pattern is "the one the lane expects" — the lockfile gives a
|
|
7
|
+
* positive, auditable answer.
|
|
8
|
+
*
|
|
9
|
+
* Only patterns are locked today (lanes only reference patterns via
|
|
10
|
+
* `lane.pattern`). The schema is forward-compatible: tools and collections
|
|
11
|
+
* can be added to the same `artifacts` map with no version bump.
|
|
12
|
+
*
|
|
13
|
+
* Concurrency: the writer does an atomic rename (write to `.tmp` in the
|
|
14
|
+
* same directory, rename over the target). Readers always open a read-only
|
|
15
|
+
* handle so a race doesn't yield a truncated parse.
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
import fs from "node:fs";
|
|
19
|
+
import path from "node:path";
|
|
20
|
+
|
|
21
|
+
import { artifactSha256 } from "../cache/store.mjs";
|
|
22
|
+
|
|
23
|
+
const LOCKFILE_REL = path.join(".ship", "shipctl.lock.json");
|
|
24
|
+
export const LOCKFILE_SCHEMA_VERSION = 1;
|
|
25
|
+
|
|
26
|
+
export function lockfilePath(shipRoot) {
|
|
27
|
+
return path.join(shipRoot, LOCKFILE_REL);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @typedef {Object} LockfileEntry
|
|
32
|
+
* @property {string} version Resolved version of the artifact.
|
|
33
|
+
* @property {string} content_sha256 Hex digest (RFC-0005 normalisation).
|
|
34
|
+
* @property {string} cached_path Relative path inside the ship root.
|
|
35
|
+
* @property {"http" | "monorepo" | "inline"} source
|
|
36
|
+
* @property {boolean} pinned Whether a config pin constrained this.
|
|
37
|
+
* @property {string} [channel] Manifest channel at time of lock.
|
|
38
|
+
*
|
|
39
|
+
* @typedef {Object} Lockfile
|
|
40
|
+
* @property {1} version
|
|
41
|
+
* @property {string} generated_at ISO-8601 UTC.
|
|
42
|
+
* @property {string} shipctl_version
|
|
43
|
+
* @property {{ base_url: string, channel: string } | null} source
|
|
44
|
+
* @property {Record<string, LockfileEntry>} artifacts Key: "<kind>/<id>".
|
|
45
|
+
* @property {string[]} [notes] Free-form operator hints.
|
|
46
|
+
*/
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* @param {string} shipRoot
|
|
50
|
+
* @returns {Lockfile | null}
|
|
51
|
+
*/
|
|
52
|
+
export function readLockfile(shipRoot) {
|
|
53
|
+
const file = lockfilePath(shipRoot);
|
|
54
|
+
if (!fs.existsSync(file)) return null;
|
|
55
|
+
const raw = fs.readFileSync(file, "utf8");
|
|
56
|
+
let parsed;
|
|
57
|
+
try {
|
|
58
|
+
parsed = JSON.parse(raw);
|
|
59
|
+
} catch (err) {
|
|
60
|
+
throw new Error(`shipctl.lock.json: invalid JSON (${err instanceof Error ? err.message : err})`);
|
|
61
|
+
}
|
|
62
|
+
if (!parsed || typeof parsed !== "object") {
|
|
63
|
+
throw new Error("shipctl.lock.json: root must be an object");
|
|
64
|
+
}
|
|
65
|
+
if (parsed.version !== LOCKFILE_SCHEMA_VERSION) {
|
|
66
|
+
throw new Error(
|
|
67
|
+
`shipctl.lock.json: unsupported version ${parsed.version} (shipctl supports v${LOCKFILE_SCHEMA_VERSION}).`,
|
|
68
|
+
);
|
|
69
|
+
}
|
|
70
|
+
if (!parsed.artifacts || typeof parsed.artifacts !== "object") {
|
|
71
|
+
throw new Error("shipctl.lock.json: missing `artifacts` map");
|
|
72
|
+
}
|
|
73
|
+
return parsed;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* @param {string} shipRoot
|
|
78
|
+
* @param {Lockfile} data
|
|
79
|
+
*/
|
|
80
|
+
export function writeLockfile(shipRoot, data) {
|
|
81
|
+
const file = lockfilePath(shipRoot);
|
|
82
|
+
fs.mkdirSync(path.dirname(file), { recursive: true });
|
|
83
|
+
|
|
84
|
+
/* Sort the artifacts map so diffs stay minimal between runs. We can't
|
|
85
|
+
* rely on JSON object key order, but `JSON.stringify` honours insertion
|
|
86
|
+
* order on v8, and every modern engine we care about follows suit. */
|
|
87
|
+
const sorted = {};
|
|
88
|
+
const keys = Object.keys(data.artifacts || {}).sort();
|
|
89
|
+
for (const k of keys) sorted[k] = data.artifacts[k];
|
|
90
|
+
|
|
91
|
+
const normalised = {
|
|
92
|
+
version: LOCKFILE_SCHEMA_VERSION,
|
|
93
|
+
generated_at: data.generated_at,
|
|
94
|
+
shipctl_version: data.shipctl_version,
|
|
95
|
+
source: data.source || null,
|
|
96
|
+
artifacts: sorted,
|
|
97
|
+
notes: Array.isArray(data.notes) ? [...data.notes] : [],
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
const tmp = `${file}.tmp`;
|
|
101
|
+
fs.writeFileSync(tmp, `${JSON.stringify(normalised, null, 2)}\n`, "utf8");
|
|
102
|
+
fs.renameSync(tmp, file);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Build the lookup key the rest of shipctl uses.
|
|
107
|
+
* @param {string} kind
|
|
108
|
+
* @param {string} id
|
|
109
|
+
*/
|
|
110
|
+
export function lockKey(kind, id) {
|
|
111
|
+
return `${kind}/${id}`;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* @param {Lockfile} lock
|
|
116
|
+
* @param {string} kind
|
|
117
|
+
* @param {string} id
|
|
118
|
+
* @returns {LockfileEntry | null}
|
|
119
|
+
*/
|
|
120
|
+
export function lookupLock(lock, kind, id) {
|
|
121
|
+
if (!lock) return null;
|
|
122
|
+
const key = lockKey(kind, id);
|
|
123
|
+
const entry = lock.artifacts?.[key];
|
|
124
|
+
return entry || null;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Compute the lockfile entry for a freshly-materialised artifact body.
|
|
129
|
+
* Keeping the signature narrow — just `body` and a bit of provenance —
|
|
130
|
+
* means the caller can't accidentally smuggle transient fields into the
|
|
131
|
+
* lock.
|
|
132
|
+
*
|
|
133
|
+
* @param {Object} params
|
|
134
|
+
* @param {string} params.body Artifact text (markdown + frontmatter).
|
|
135
|
+
* @param {string} params.version Resolved version string.
|
|
136
|
+
* @param {string} params.cachedPath Path relative to the ship root.
|
|
137
|
+
* @param {"http" | "monorepo" | "inline"} params.source
|
|
138
|
+
* @param {boolean} [params.pinned=false]
|
|
139
|
+
* @param {string} [params.channel]
|
|
140
|
+
* @returns {LockfileEntry}
|
|
141
|
+
*/
|
|
142
|
+
export function entryFromBody({
|
|
143
|
+
body,
|
|
144
|
+
version,
|
|
145
|
+
cachedPath,
|
|
146
|
+
source,
|
|
147
|
+
pinned = false,
|
|
148
|
+
channel,
|
|
149
|
+
}) {
|
|
150
|
+
return {
|
|
151
|
+
version,
|
|
152
|
+
content_sha256: artifactSha256(body),
|
|
153
|
+
cached_path: String(cachedPath).replace(/\\/g, "/"),
|
|
154
|
+
source,
|
|
155
|
+
pinned: Boolean(pinned),
|
|
156
|
+
...(channel ? { channel: String(channel) } : {}),
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Decide whether a body matches a lockfile entry. Returns a structured
|
|
162
|
+
* `{ ok, reason }` so callers can emit specific diagnostics.
|
|
163
|
+
*
|
|
164
|
+
* @param {LockfileEntry | null} entry
|
|
165
|
+
* @param {string} body
|
|
166
|
+
* @returns {{ ok: boolean, reason?: string, expected?: string, actual?: string }}
|
|
167
|
+
*/
|
|
168
|
+
export function verifyBody(entry, body) {
|
|
169
|
+
if (!entry) return { ok: false, reason: "missing-entry" };
|
|
170
|
+
const actual = artifactSha256(body);
|
|
171
|
+
if (actual !== entry.content_sha256) {
|
|
172
|
+
return {
|
|
173
|
+
ok: false,
|
|
174
|
+
reason: "sha-mismatch",
|
|
175
|
+
expected: entry.content_sha256,
|
|
176
|
+
actual,
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
return { ok: true };
|
|
180
|
+
}
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { readConfig } from "../config/io.mjs";
|
|
4
|
+
|
|
5
|
+
export const ALLOWED_EVENT_TYPES = Object.freeze([
|
|
6
|
+
"artifact.fetch",
|
|
7
|
+
"artifact.use",
|
|
8
|
+
"artifact.sync",
|
|
9
|
+
"feedback.submit",
|
|
10
|
+
"doctor.result",
|
|
11
|
+
]);
|
|
12
|
+
|
|
13
|
+
export const DENYLIST_KEYS = Object.freeze([
|
|
14
|
+
"path",
|
|
15
|
+
"code",
|
|
16
|
+
"diff",
|
|
17
|
+
"branch",
|
|
18
|
+
"remote",
|
|
19
|
+
"email",
|
|
20
|
+
]);
|
|
21
|
+
|
|
22
|
+
const ALLOWED_SET = new Set(ALLOWED_EVENT_TYPES);
|
|
23
|
+
const DENY_SET = new Set(DENYLIST_KEYS.map((k) => k.toLowerCase()));
|
|
24
|
+
|
|
25
|
+
export function outboxPath(shipRoot) {
|
|
26
|
+
return path.join(shipRoot, ".ship", "telemetry-outbox.jsonl");
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function debug(msg) {
|
|
30
|
+
if (process.env.SHIP_DEBUG === "1") {
|
|
31
|
+
console.error(`[ship:telemetry] ${msg}`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Recursively strip denylisted keys from a payload object, returning a new object.
|
|
37
|
+
* Returns {stripped, removed: string[]}.
|
|
38
|
+
*/
|
|
39
|
+
function scrubPayload(payload) {
|
|
40
|
+
const removed = [];
|
|
41
|
+
function walk(node) {
|
|
42
|
+
if (node === null || typeof node !== "object") return node;
|
|
43
|
+
if (Array.isArray(node)) return node.map(walk);
|
|
44
|
+
const out = {};
|
|
45
|
+
for (const [k, v] of Object.entries(node)) {
|
|
46
|
+
if (typeof k === "string" && DENY_SET.has(k.toLowerCase())) {
|
|
47
|
+
removed.push(k);
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
out[k] = walk(v);
|
|
51
|
+
}
|
|
52
|
+
return out;
|
|
53
|
+
}
|
|
54
|
+
const stripped = walk(payload ?? {});
|
|
55
|
+
return { stripped, removed };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Normalize an incoming event into the backend-compatible envelope:
|
|
60
|
+
* { type, anonymous_id, timestamp, payload, shipctl_version?, stack_preset? }
|
|
61
|
+
*
|
|
62
|
+
* Accepts both the old shape ({event, ts}) emitted by earlier sync code and the
|
|
63
|
+
* new shape ({type, timestamp}). Only `type` is considered authoritative for
|
|
64
|
+
* whitelist validation.
|
|
65
|
+
*/
|
|
66
|
+
function normalizeEvent(input) {
|
|
67
|
+
if (!input || typeof input !== "object") {
|
|
68
|
+
throw new Error("appendEvent: event must be an object");
|
|
69
|
+
}
|
|
70
|
+
const type = input.type || input.event;
|
|
71
|
+
const timestamp = input.timestamp || input.ts || new Date().toISOString();
|
|
72
|
+
const payload = input.payload || {};
|
|
73
|
+
const out = {
|
|
74
|
+
type,
|
|
75
|
+
anonymous_id: input.anonymous_id,
|
|
76
|
+
timestamp,
|
|
77
|
+
payload,
|
|
78
|
+
};
|
|
79
|
+
if (input.shipctl_version) out.shipctl_version = input.shipctl_version;
|
|
80
|
+
if (input.stack_preset !== undefined) out.stack_preset = input.stack_preset;
|
|
81
|
+
return out;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function readConfigSafe(shipRoot) {
|
|
85
|
+
try {
|
|
86
|
+
const { config } = readConfig(shipRoot);
|
|
87
|
+
return config;
|
|
88
|
+
} catch {
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Append one event to the outbox. Silently no-ops when telemetry is disabled.
|
|
95
|
+
* Throws on unknown event type. Strips denylisted keys from payload (quietly
|
|
96
|
+
* logging under SHIP_DEBUG=1).
|
|
97
|
+
*
|
|
98
|
+
* @param {string} shipRoot
|
|
99
|
+
* @param {object} event
|
|
100
|
+
* @returns {boolean} true if appended, false if skipped (telemetry off).
|
|
101
|
+
*/
|
|
102
|
+
export function appendEvent(shipRoot, event) {
|
|
103
|
+
const normalized = normalizeEvent(event);
|
|
104
|
+
if (!normalized.type || !ALLOWED_SET.has(normalized.type)) {
|
|
105
|
+
throw new Error(
|
|
106
|
+
`appendEvent: unknown event type ${JSON.stringify(normalized.type)}; allowed=${ALLOWED_EVENT_TYPES.join(",")}`,
|
|
107
|
+
);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
const cfg = readConfigSafe(shipRoot);
|
|
111
|
+
if (!cfg || cfg.telemetry?.share !== true) return false;
|
|
112
|
+
if (String(process.env.SHIP_TELEMETRY || "").toLowerCase() === "false") return false;
|
|
113
|
+
|
|
114
|
+
if (!normalized.anonymous_id) {
|
|
115
|
+
normalized.anonymous_id = cfg.telemetry?.anonymous_id || null;
|
|
116
|
+
}
|
|
117
|
+
if (!normalized.anonymous_id) return false;
|
|
118
|
+
|
|
119
|
+
const { stripped, removed } = scrubPayload(normalized.payload);
|
|
120
|
+
if (removed.length) {
|
|
121
|
+
debug(`stripped denylisted keys from ${normalized.type}: ${removed.join(", ")}`);
|
|
122
|
+
}
|
|
123
|
+
normalized.payload = stripped;
|
|
124
|
+
|
|
125
|
+
const file = outboxPath(shipRoot);
|
|
126
|
+
fs.mkdirSync(path.dirname(file), { recursive: true });
|
|
127
|
+
fs.appendFileSync(file, `${JSON.stringify(normalized)}\n`, "utf8");
|
|
128
|
+
return true;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Read and parse events from the outbox. Lines that fail to parse are skipped
|
|
133
|
+
* with a debug warning. Old-shape events (`event`/`ts`) are upgraded on read.
|
|
134
|
+
*/
|
|
135
|
+
export function listEvents(shipRoot) {
|
|
136
|
+
const file = outboxPath(shipRoot);
|
|
137
|
+
if (!fs.existsSync(file)) return [];
|
|
138
|
+
const raw = fs.readFileSync(file, "utf8");
|
|
139
|
+
const out = [];
|
|
140
|
+
for (const line of raw.split(/\r?\n/)) {
|
|
141
|
+
const trimmed = line.trim();
|
|
142
|
+
if (!trimmed) continue;
|
|
143
|
+
let parsed;
|
|
144
|
+
try {
|
|
145
|
+
parsed = JSON.parse(trimmed);
|
|
146
|
+
} catch {
|
|
147
|
+
debug(`skipping malformed line in ${file}`);
|
|
148
|
+
continue;
|
|
149
|
+
}
|
|
150
|
+
const upgraded = {
|
|
151
|
+
type: parsed.type || parsed.event,
|
|
152
|
+
anonymous_id: parsed.anonymous_id,
|
|
153
|
+
timestamp: parsed.timestamp || parsed.ts,
|
|
154
|
+
payload: parsed.payload || {},
|
|
155
|
+
};
|
|
156
|
+
if (parsed.shipctl_version) upgraded.shipctl_version = parsed.shipctl_version;
|
|
157
|
+
if (parsed.stack_preset !== undefined) upgraded.stack_preset = parsed.stack_preset;
|
|
158
|
+
out.push(upgraded);
|
|
159
|
+
}
|
|
160
|
+
return out;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
export function countEvents(shipRoot) {
|
|
164
|
+
return listEvents(shipRoot).length;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Remove events older than `before` (ISO string). If `before` is omitted,
|
|
169
|
+
* clears the entire outbox. Returns the number of events removed.
|
|
170
|
+
*/
|
|
171
|
+
export function clearEvents(shipRoot, { before } = {}) {
|
|
172
|
+
const file = outboxPath(shipRoot);
|
|
173
|
+
if (!fs.existsSync(file)) return 0;
|
|
174
|
+
if (!before) {
|
|
175
|
+
const before_count = countEvents(shipRoot);
|
|
176
|
+
try {
|
|
177
|
+
fs.unlinkSync(file);
|
|
178
|
+
} catch {
|
|
179
|
+
fs.writeFileSync(file, "", "utf8");
|
|
180
|
+
}
|
|
181
|
+
return before_count;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
const beforeMs = Date.parse(before);
|
|
185
|
+
if (Number.isNaN(beforeMs)) {
|
|
186
|
+
throw new Error(`clearEvents: invalid 'before' timestamp ${JSON.stringify(before)}`);
|
|
187
|
+
}
|
|
188
|
+
const kept = [];
|
|
189
|
+
let removed = 0;
|
|
190
|
+
for (const ev of listEvents(shipRoot)) {
|
|
191
|
+
const ts = Date.parse(ev.timestamp || "");
|
|
192
|
+
if (!Number.isNaN(ts) && ts < beforeMs) {
|
|
193
|
+
removed += 1;
|
|
194
|
+
} else {
|
|
195
|
+
kept.push(ev);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
if (kept.length === 0) {
|
|
199
|
+
try {
|
|
200
|
+
fs.unlinkSync(file);
|
|
201
|
+
} catch {
|
|
202
|
+
fs.writeFileSync(file, "", "utf8");
|
|
203
|
+
}
|
|
204
|
+
} else {
|
|
205
|
+
const text = kept.map((e) => JSON.stringify(e)).join("\n") + "\n";
|
|
206
|
+
fs.writeFileSync(file, text, "utf8");
|
|
207
|
+
}
|
|
208
|
+
return removed;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Overwrite the outbox with the given list of event envelopes. Used by flush
|
|
213
|
+
* to persist the subset of lines that failed to POST.
|
|
214
|
+
*/
|
|
215
|
+
export function writeAllEvents(shipRoot, events) {
|
|
216
|
+
const file = outboxPath(shipRoot);
|
|
217
|
+
if (!events.length) {
|
|
218
|
+
if (fs.existsSync(file)) fs.unlinkSync(file);
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
fs.mkdirSync(path.dirname(file), { recursive: true });
|
|
222
|
+
const text = events.map((e) => JSON.stringify(e)).join("\n") + "\n";
|
|
223
|
+
fs.writeFileSync(file, text, "utf8");
|
|
224
|
+
}
|