@ryanreh99/skills-sync 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +74 -0
- package/dist/assets/contracts/build/bundle.schema.json +76 -0
- package/dist/assets/contracts/inputs/config.schema.json +13 -0
- package/dist/assets/contracts/inputs/mcp-servers.schema.json +56 -0
- package/dist/assets/contracts/inputs/pack-manifest.schema.json +33 -0
- package/dist/assets/contracts/inputs/pack-sources.schema.json +47 -0
- package/dist/assets/contracts/inputs/profile.schema.json +21 -0
- package/dist/assets/contracts/inputs/upstreams.schema.json +45 -0
- package/dist/assets/contracts/runtime/targets.schema.json +120 -0
- package/dist/assets/contracts/state/upstreams-lock.schema.json +38 -0
- package/dist/assets/manifests/targets.linux.json +27 -0
- package/dist/assets/manifests/targets.macos.json +27 -0
- package/dist/assets/manifests/targets.windows.json +27 -0
- package/dist/assets/seed/config.json +3 -0
- package/dist/assets/seed/packs/personal/mcp/servers.json +20 -0
- package/dist/assets/seed/packs/personal/pack.json +7 -0
- package/dist/assets/seed/packs/personal/sources.json +31 -0
- package/dist/assets/seed/profiles/personal.json +4 -0
- package/dist/assets/seed/upstreams.json +23 -0
- package/dist/cli.js +532 -0
- package/dist/index.js +27 -0
- package/dist/lib/adapters/claude.js +49 -0
- package/dist/lib/adapters/codex.js +239 -0
- package/dist/lib/adapters/common.js +114 -0
- package/dist/lib/adapters/copilot.js +53 -0
- package/dist/lib/adapters/cursor.js +53 -0
- package/dist/lib/adapters/gemini.js +52 -0
- package/dist/lib/agents.js +888 -0
- package/dist/lib/bindings.js +510 -0
- package/dist/lib/build.js +190 -0
- package/dist/lib/bundle.js +165 -0
- package/dist/lib/config.js +324 -0
- package/dist/lib/core.js +447 -0
- package/dist/lib/detect.js +56 -0
- package/dist/lib/doctor.js +504 -0
- package/dist/lib/init.js +292 -0
- package/dist/lib/inventory.js +235 -0
- package/dist/lib/manage.js +463 -0
- package/dist/lib/mcp-config.js +264 -0
- package/dist/lib/profile-transfer.js +221 -0
- package/dist/lib/upstreams.js +782 -0
- package/docs/agent-storage-map.md +153 -0
- package/docs/architecture.md +117 -0
- package/docs/changelog.md +12 -0
- package/docs/commands.md +94 -0
- package/docs/contracts.md +112 -0
- package/docs/homebrew.md +46 -0
- package/docs/quickstart.md +14 -0
- package/docs/roadmap.md +5 -0
- package/docs/security.md +32 -0
- package/docs/user-guide.md +257 -0
- package/package.json +61 -0
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
import fs from "fs-extra";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import {
|
|
5
|
+
CODEX_MCP_BLOCK_END,
|
|
6
|
+
CODEX_MCP_BLOCK_START,
|
|
7
|
+
MCP_MANAGED_PREFIX,
|
|
8
|
+
fileSha256
|
|
9
|
+
} from "./core.js";
|
|
10
|
+
|
|
11
|
+
function escapeForRegExp(value) {
|
|
12
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function tomlString(value) {
|
|
16
|
+
return JSON.stringify(String(value ?? ""));
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function tomlArray(values) {
|
|
20
|
+
const list = (Array.isArray(values) ? values : []).map((item) => tomlString(item));
|
|
21
|
+
return `[${list.join(", ")}]`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function tomlInlineTable(values) {
|
|
25
|
+
const entries = values && typeof values === "object" && !Array.isArray(values) ? values : {};
|
|
26
|
+
const keys = Object.keys(entries).sort((left, right) => left.localeCompare(right));
|
|
27
|
+
const pairs = keys.map((key) => `${tomlTableKey(key)} = ${tomlString(entries[key])}`);
|
|
28
|
+
return `{ ${pairs.join(", ")} }`;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function tomlTableKey(value) {
|
|
32
|
+
return JSON.stringify(String(value ?? ""));
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function runtimeHomeDir() {
|
|
36
|
+
return process.env.USERPROFILE || process.env.HOME || os.homedir();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function expandRuntimeValue(value) {
|
|
40
|
+
const text = String(value ?? "");
|
|
41
|
+
const home = runtimeHomeDir();
|
|
42
|
+
if (text === "~") {
|
|
43
|
+
return home;
|
|
44
|
+
}
|
|
45
|
+
if (text.startsWith("~/") || text.startsWith("~\\")) {
|
|
46
|
+
return `${home}${text.slice(1)}`;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return text
|
|
50
|
+
.replace(/\$\{HOME\}/g, home)
|
|
51
|
+
.replace(/\$HOME/g, home)
|
|
52
|
+
.replace(/%USERPROFILE%/gi, home);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function normalizeRuntimeEnv(rawEnv) {
|
|
56
|
+
if (!rawEnv || typeof rawEnv !== "object" || Array.isArray(rawEnv)) {
|
|
57
|
+
return {};
|
|
58
|
+
}
|
|
59
|
+
const normalized = {};
|
|
60
|
+
const keys = Object.keys(rawEnv).sort((left, right) => left.localeCompare(right));
|
|
61
|
+
for (const key of keys) {
|
|
62
|
+
if (key.length === 0) {
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
normalized[key] = expandRuntimeValue(rawEnv[key]);
|
|
66
|
+
}
|
|
67
|
+
return normalized;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export function buildManagedServerEntries(canonicalMcp) {
|
|
71
|
+
const servers = canonicalMcp?.mcpServers ?? {};
|
|
72
|
+
const names = Object.keys(servers).sort((left, right) => left.localeCompare(right));
|
|
73
|
+
return names.map((name) => ({
|
|
74
|
+
rawName: name,
|
|
75
|
+
managedName: `${MCP_MANAGED_PREFIX}${name}`,
|
|
76
|
+
server: (() => {
|
|
77
|
+
const server = servers[name] ?? {};
|
|
78
|
+
if (typeof server.url === "string" && server.url.trim().length > 0) {
|
|
79
|
+
return {
|
|
80
|
+
url: expandRuntimeValue(server.url.trim())
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
const normalized = {
|
|
84
|
+
...server,
|
|
85
|
+
args: (Array.isArray(server?.args) ? server.args : []).map((arg) => expandRuntimeValue(arg))
|
|
86
|
+
};
|
|
87
|
+
const env = normalizeRuntimeEnv(server.env);
|
|
88
|
+
if (Object.keys(env).length > 0) {
|
|
89
|
+
normalized.env = env;
|
|
90
|
+
} else {
|
|
91
|
+
delete normalized.env;
|
|
92
|
+
}
|
|
93
|
+
return normalized;
|
|
94
|
+
})()
|
|
95
|
+
}));
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function writeManagedJsonServers(document, canonicalMcp, tool) {
|
|
99
|
+
if (!document.mcpServers) {
|
|
100
|
+
document.mcpServers = {};
|
|
101
|
+
}
|
|
102
|
+
if (typeof document.mcpServers !== "object" || Array.isArray(document.mcpServers)) {
|
|
103
|
+
throw new Error("Target MCP document must contain an object 'mcpServers' field.");
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
for (const key of Object.keys(document.mcpServers)) {
|
|
107
|
+
if (key.startsWith(MCP_MANAGED_PREFIX)) {
|
|
108
|
+
delete document.mcpServers[key];
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
for (const entry of buildManagedServerEntries(canonicalMcp)) {
|
|
113
|
+
if (typeof entry.server.url === "string" && entry.server.url.trim().length > 0) {
|
|
114
|
+
document.mcpServers[entry.managedName] = {
|
|
115
|
+
url: entry.server.url
|
|
116
|
+
};
|
|
117
|
+
continue;
|
|
118
|
+
}
|
|
119
|
+
const base = {
|
|
120
|
+
command: entry.server.command,
|
|
121
|
+
args: Array.isArray(entry.server.args) ? entry.server.args : []
|
|
122
|
+
};
|
|
123
|
+
if (entry.server.env && Object.keys(entry.server.env).length > 0) {
|
|
124
|
+
base.env = entry.server.env;
|
|
125
|
+
}
|
|
126
|
+
document.mcpServers[entry.managedName] =
|
|
127
|
+
tool === "gemini"
|
|
128
|
+
? base
|
|
129
|
+
: {
|
|
130
|
+
transport: "stdio",
|
|
131
|
+
...base
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function renderCodexManagedBlock(canonicalMcp) {
|
|
137
|
+
const lines = [CODEX_MCP_BLOCK_START];
|
|
138
|
+
for (const entry of buildManagedServerEntries(canonicalMcp)) {
|
|
139
|
+
lines.push(`[mcp_servers.${tomlTableKey(entry.managedName)}]`);
|
|
140
|
+
if (typeof entry.server.url === "string" && entry.server.url.trim().length > 0) {
|
|
141
|
+
lines.push(`url = ${tomlString(entry.server.url)}`);
|
|
142
|
+
} else {
|
|
143
|
+
lines.push('transport = "stdio"');
|
|
144
|
+
lines.push(`command = ${tomlString(entry.server.command)}`);
|
|
145
|
+
lines.push(`args = ${tomlArray(entry.server.args)}`);
|
|
146
|
+
if (entry.server.env && Object.keys(entry.server.env).length > 0) {
|
|
147
|
+
lines.push(`env = ${tomlInlineTable(entry.server.env)}`);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
lines.push("");
|
|
151
|
+
}
|
|
152
|
+
lines.push(CODEX_MCP_BLOCK_END);
|
|
153
|
+
return `${lines.join("\n").trimEnd()}\n`;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function stripCodexManagedBlock(content) {
|
|
157
|
+
const start = escapeForRegExp(CODEX_MCP_BLOCK_START);
|
|
158
|
+
const end = escapeForRegExp(CODEX_MCP_BLOCK_END);
|
|
159
|
+
const pattern = new RegExp(`\\n?${start}[\\s\\S]*?${end}\\n?`, "g");
|
|
160
|
+
return content.replace(pattern, "\n").replace(/\n{3,}/g, "\n\n");
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
export async function applyManagedMcpConfig({ tool, targetPath, canonicalMcp, dryRun = false }) {
|
|
164
|
+
if (!dryRun) {
|
|
165
|
+
await fs.ensureDir(path.dirname(targetPath));
|
|
166
|
+
}
|
|
167
|
+
const exists = await fs.pathExists(targetPath);
|
|
168
|
+
const managedNames = buildManagedServerEntries(canonicalMcp).map((entry) => entry.managedName);
|
|
169
|
+
|
|
170
|
+
if (tool === "codex") {
|
|
171
|
+
let existing = "";
|
|
172
|
+
if (exists) {
|
|
173
|
+
existing = await fs.readFile(targetPath, "utf8");
|
|
174
|
+
}
|
|
175
|
+
const stripped = stripCodexManagedBlock(existing).trimEnd();
|
|
176
|
+
const managedBlock = renderCodexManagedBlock(canonicalMcp).trimEnd();
|
|
177
|
+
const next = `${stripped.length > 0 ? `${stripped}\n\n` : ""}${managedBlock}\n`;
|
|
178
|
+
const wouldWrite = next !== existing;
|
|
179
|
+
if (!dryRun) {
|
|
180
|
+
await fs.writeFile(targetPath, next, "utf8");
|
|
181
|
+
}
|
|
182
|
+
return {
|
|
183
|
+
method: "toml-namespace",
|
|
184
|
+
hash: dryRun ? null : await fileSha256(targetPath),
|
|
185
|
+
managedNames,
|
|
186
|
+
wouldWrite
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
let document = {};
|
|
191
|
+
let before = "{}";
|
|
192
|
+
if (exists) {
|
|
193
|
+
try {
|
|
194
|
+
document = await fs.readJson(targetPath);
|
|
195
|
+
before = JSON.stringify(document);
|
|
196
|
+
} catch (error) {
|
|
197
|
+
throw new Error(`Failed to parse JSON config at ${targetPath}: ${error.message}`);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
if (!document || typeof document !== "object" || Array.isArray(document)) {
|
|
201
|
+
throw new Error(`Target MCP document at ${targetPath} must be a JSON object.`);
|
|
202
|
+
}
|
|
203
|
+
writeManagedJsonServers(document, canonicalMcp, tool);
|
|
204
|
+
const after = JSON.stringify(document);
|
|
205
|
+
const wouldWrite = !exists || before !== after;
|
|
206
|
+
if (!dryRun) {
|
|
207
|
+
await fs.writeFile(targetPath, `${JSON.stringify(document, null, 2)}\n`, "utf8");
|
|
208
|
+
}
|
|
209
|
+
return {
|
|
210
|
+
method: "json-namespace",
|
|
211
|
+
hash: dryRun ? null : await fileSha256(targetPath),
|
|
212
|
+
managedNames,
|
|
213
|
+
wouldWrite
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
export async function removeManagedMcpConfig(binding, options = {}) {
|
|
218
|
+
const { dryRun = false } = options;
|
|
219
|
+
const { tool, targetPath } = binding;
|
|
220
|
+
if (!(await fs.pathExists(targetPath))) {
|
|
221
|
+
return { removed: false };
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
if (tool === "codex") {
|
|
225
|
+
const existing = await fs.readFile(targetPath, "utf8");
|
|
226
|
+
const stripped = stripCodexManagedBlock(existing).trimEnd();
|
|
227
|
+
const next = stripped.length > 0 ? `${stripped}\n` : "";
|
|
228
|
+
if (next !== existing) {
|
|
229
|
+
if (!dryRun) {
|
|
230
|
+
await fs.writeFile(targetPath, next, "utf8");
|
|
231
|
+
}
|
|
232
|
+
return { removed: true };
|
|
233
|
+
}
|
|
234
|
+
return { removed: false };
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
let document;
|
|
238
|
+
try {
|
|
239
|
+
document = await fs.readJson(targetPath);
|
|
240
|
+
} catch (error) {
|
|
241
|
+
throw new Error(`Failed to parse JSON config at ${targetPath}: ${error.message}`);
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (!document || typeof document !== "object" || Array.isArray(document)) {
|
|
245
|
+
throw new Error(`Target MCP document at ${targetPath} must be a JSON object.`);
|
|
246
|
+
}
|
|
247
|
+
if (!document.mcpServers || typeof document.mcpServers !== "object" || Array.isArray(document.mcpServers)) {
|
|
248
|
+
return { removed: false };
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
let changed = false;
|
|
252
|
+
for (const key of Object.keys(document.mcpServers)) {
|
|
253
|
+
if (key.startsWith(MCP_MANAGED_PREFIX)) {
|
|
254
|
+
delete document.mcpServers[key];
|
|
255
|
+
changed = true;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
if (changed) {
|
|
259
|
+
if (!dryRun) {
|
|
260
|
+
await fs.writeFile(targetPath, `${JSON.stringify(document, null, 2)}\n`, "utf8");
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
return { removed: changed };
|
|
264
|
+
}
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import fs from "fs-extra";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import {
|
|
4
|
+
LOCAL_OVERRIDES_ROOT,
|
|
5
|
+
SCHEMAS,
|
|
6
|
+
assertObjectMatchesSchema,
|
|
7
|
+
logInfo,
|
|
8
|
+
writeJsonFile
|
|
9
|
+
} from "./core.js";
|
|
10
|
+
import { readDefaultProfile, resolvePack, resolveProfile } from "./config.js";
|
|
11
|
+
|
|
12
|
+
function normalizeOptionalText(value) {
|
|
13
|
+
if (typeof value !== "string") {
|
|
14
|
+
return null;
|
|
15
|
+
}
|
|
16
|
+
const normalized = value.trim();
|
|
17
|
+
return normalized.length > 0 ? normalized : null;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function normalizeRequiredText(value, label) {
|
|
21
|
+
const normalized = normalizeOptionalText(value);
|
|
22
|
+
if (!normalized) {
|
|
23
|
+
throw new Error(`${label} must be a non-empty string.`);
|
|
24
|
+
}
|
|
25
|
+
return normalized;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function normalizeRelativePath(rawPath, label) {
|
|
29
|
+
const normalized = String(rawPath ?? "")
|
|
30
|
+
.replace(/\\/g, "/")
|
|
31
|
+
.replace(/^\/+/, "")
|
|
32
|
+
.trim();
|
|
33
|
+
if (normalized.length === 0) {
|
|
34
|
+
throw new Error(`${label} cannot be empty.`);
|
|
35
|
+
}
|
|
36
|
+
const segments = normalized.split("/");
|
|
37
|
+
for (const segment of segments) {
|
|
38
|
+
if (segment.length === 0 || segment === "." || segment === "..") {
|
|
39
|
+
throw new Error(`Invalid relative path '${rawPath}' for ${label}.`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return segments.join("/");
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function readJsonIfExists(filePath, fallbackValue) {
|
|
46
|
+
if (!(await fs.pathExists(filePath))) {
|
|
47
|
+
return fallbackValue;
|
|
48
|
+
}
|
|
49
|
+
return fs.readJson(filePath);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async function collectSkillFiles(rootPath) {
|
|
53
|
+
if (!(await fs.pathExists(rootPath))) {
|
|
54
|
+
return [];
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const files = [];
|
|
58
|
+
async function walk(currentPath) {
|
|
59
|
+
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
|
60
|
+
entries.sort((left, right) => left.name.localeCompare(right.name));
|
|
61
|
+
for (const entry of entries) {
|
|
62
|
+
const absolutePath = path.join(currentPath, entry.name);
|
|
63
|
+
if (entry.isDirectory()) {
|
|
64
|
+
await walk(absolutePath);
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
if (!entry.isFile()) {
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
const relativePath = path.relative(rootPath, absolutePath).split(path.sep).join("/");
|
|
71
|
+
const content = await fs.readFile(absolutePath);
|
|
72
|
+
files.push({
|
|
73
|
+
path: relativePath,
|
|
74
|
+
contentBase64: content.toString("base64")
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
await walk(rootPath);
|
|
80
|
+
return files;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export async function cmdProfileExport({ profile, output } = {}) {
|
|
84
|
+
const explicitProfile = normalizeOptionalText(profile);
|
|
85
|
+
const resolvedProfile = explicitProfile ?? (await readDefaultProfile());
|
|
86
|
+
if (!resolvedProfile) {
|
|
87
|
+
throw new Error(
|
|
88
|
+
"Profile is required. Provide profile name or set a default with 'skills-sync use <name>'."
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const { profile: profileDoc } = await resolveProfile(resolvedProfile);
|
|
93
|
+
const packRoot = await resolvePack(profileDoc);
|
|
94
|
+
const packManifestPath = path.join(packRoot, "pack.json");
|
|
95
|
+
const sourcesPath = path.join(packRoot, "sources.json");
|
|
96
|
+
const mcpPath = path.join(packRoot, "mcp", "servers.json");
|
|
97
|
+
const skillsRoot = path.join(packRoot, "skills");
|
|
98
|
+
|
|
99
|
+
const packManifest = await readJsonIfExists(packManifestPath, {
|
|
100
|
+
name: resolvedProfile,
|
|
101
|
+
version: "0.0.0",
|
|
102
|
+
description: "",
|
|
103
|
+
maintainer: "",
|
|
104
|
+
tags: []
|
|
105
|
+
});
|
|
106
|
+
const sources = await readJsonIfExists(sourcesPath, { imports: [] });
|
|
107
|
+
const mcpServers = await readJsonIfExists(mcpPath, { servers: {} });
|
|
108
|
+
|
|
109
|
+
await assertObjectMatchesSchema(packManifest, SCHEMAS.packManifest, packManifestPath);
|
|
110
|
+
await assertObjectMatchesSchema(sources, SCHEMAS.packSources, sourcesPath);
|
|
111
|
+
await assertObjectMatchesSchema(mcpServers, SCHEMAS.mcpServers, mcpPath);
|
|
112
|
+
|
|
113
|
+
const skillFiles = await collectSkillFiles(skillsRoot);
|
|
114
|
+
|
|
115
|
+
const payload = {
|
|
116
|
+
schemaVersion: 1,
|
|
117
|
+
exportedAt: new Date().toISOString(),
|
|
118
|
+
profile: {
|
|
119
|
+
name: resolvedProfile,
|
|
120
|
+
pack: {
|
|
121
|
+
manifest: packManifest,
|
|
122
|
+
sources,
|
|
123
|
+
mcpServers,
|
|
124
|
+
skillFiles
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
const outputPath = normalizeOptionalText(output);
|
|
130
|
+
if (outputPath) {
|
|
131
|
+
await writeJsonFile(path.resolve(outputPath), payload);
|
|
132
|
+
logInfo(`Exported profile '${resolvedProfile}'.`);
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
function decodeSkillFileContent(contentBase64, filePath) {
|
|
139
|
+
if (typeof contentBase64 !== "string") {
|
|
140
|
+
throw new Error(`Skill file '${filePath}' is missing base64 content.`);
|
|
141
|
+
}
|
|
142
|
+
try {
|
|
143
|
+
return Buffer.from(contentBase64, "base64");
|
|
144
|
+
} catch (error) {
|
|
145
|
+
throw new Error(`Failed to decode skill file '${filePath}': ${error.message}`);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
async function writeSkillFiles(skillsRoot, skillFiles) {
|
|
150
|
+
for (const file of skillFiles) {
|
|
151
|
+
const relativePath = normalizeRelativePath(file.path, "skill file path");
|
|
152
|
+
const targetPath = path.join(skillsRoot, relativePath.split("/").join(path.sep));
|
|
153
|
+
await fs.ensureDir(path.dirname(targetPath));
|
|
154
|
+
await fs.writeFile(targetPath, decodeSkillFileContent(file.contentBase64, relativePath));
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
export async function cmdProfileImport({ profile, input, replace = false } = {}) {
|
|
159
|
+
const profileName = normalizeRequiredText(profile, "Profile name");
|
|
160
|
+
const inputPath = path.resolve(normalizeRequiredText(input, "Input path"));
|
|
161
|
+
if (!(await fs.pathExists(inputPath))) {
|
|
162
|
+
throw new Error("Input file not found.");
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
let payload;
|
|
166
|
+
try {
|
|
167
|
+
payload = await fs.readJson(inputPath);
|
|
168
|
+
} catch (error) {
|
|
169
|
+
throw new Error(`Failed to parse import file: ${error.message}`);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (!payload || typeof payload !== "object" || Array.isArray(payload)) {
|
|
173
|
+
throw new Error("Import payload must be a JSON object.");
|
|
174
|
+
}
|
|
175
|
+
if (payload.schemaVersion !== 1) {
|
|
176
|
+
throw new Error(`Unsupported import schemaVersion '${payload.schemaVersion}'.`);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const packDoc = payload.profile?.pack;
|
|
180
|
+
if (!packDoc || typeof packDoc !== "object" || Array.isArray(packDoc)) {
|
|
181
|
+
throw new Error("Import payload is missing profile.pack.");
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
const manifest = packDoc.manifest ?? {};
|
|
185
|
+
const sources = packDoc.sources ?? { imports: [] };
|
|
186
|
+
const mcpServers = packDoc.mcpServers ?? { servers: {} };
|
|
187
|
+
const skillFiles = Array.isArray(packDoc.skillFiles) ? packDoc.skillFiles : [];
|
|
188
|
+
|
|
189
|
+
await assertObjectMatchesSchema(manifest, SCHEMAS.packManifest, "import.pack.manifest");
|
|
190
|
+
await assertObjectMatchesSchema(sources, SCHEMAS.packSources, "import.pack.sources");
|
|
191
|
+
await assertObjectMatchesSchema(mcpServers, SCHEMAS.mcpServers, "import.pack.mcpServers");
|
|
192
|
+
|
|
193
|
+
const profilePath = path.join(LOCAL_OVERRIDES_ROOT, "profiles", `${profileName}.json`);
|
|
194
|
+
const packRoot = path.join(LOCAL_OVERRIDES_ROOT, "packs", profileName);
|
|
195
|
+
const packManifestPath = path.join(packRoot, "pack.json");
|
|
196
|
+
const sourcesPath = path.join(packRoot, "sources.json");
|
|
197
|
+
const mcpPath = path.join(packRoot, "mcp", "servers.json");
|
|
198
|
+
const skillsRoot = path.join(packRoot, "skills");
|
|
199
|
+
|
|
200
|
+
if (!replace && (await fs.pathExists(profilePath))) {
|
|
201
|
+
throw new Error(`Profile '${profileName}' already exists. Use --replace to overwrite local files.`);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
await fs.ensureDir(path.dirname(profilePath));
|
|
205
|
+
await fs.ensureDir(path.dirname(mcpPath));
|
|
206
|
+
await writeJsonFile(profilePath, {
|
|
207
|
+
name: profileName,
|
|
208
|
+
packPath: `workspace/packs/${profileName}`
|
|
209
|
+
});
|
|
210
|
+
await writeJsonFile(packManifestPath, manifest);
|
|
211
|
+
await writeJsonFile(sourcesPath, sources);
|
|
212
|
+
await writeJsonFile(mcpPath, mcpServers);
|
|
213
|
+
|
|
214
|
+
if (replace) {
|
|
215
|
+
await fs.remove(skillsRoot);
|
|
216
|
+
}
|
|
217
|
+
await fs.ensureDir(skillsRoot);
|
|
218
|
+
await writeSkillFiles(skillsRoot, skillFiles);
|
|
219
|
+
|
|
220
|
+
logInfo(`Imported profile '${profileName}'.`);
|
|
221
|
+
}
|