ctxbin 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +133 -0
- package/agent-addon.md +47 -0
- package/dist/agent-addon.md +47 -0
- package/dist/cli.js +1066 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.js +694 -0
- package/dist/index.js.map +1 -0
- package/dist/skills/ctxbin/SKILL.md +137 -0
- package/package.json +49 -0
- package/skills/ctxbin/SKILL.md +129 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/validators.ts","../src/errors.ts","../src/constants.ts","../src/value.ts","../src/skillpack.ts","../src/fs-ops.ts","../src/chmod.ts","../src/perm.ts","../src/tar-utils.ts","../src/skillref.ts","../src/input.ts"],"sourcesContent":["export { normalizeGithubUrl, validateCommitSha, normalizeSkillPath } from \"./validators\";\nexport { detectSkillValueType } from \"./value\";\nexport { SKILLPACK_HEADER, SKILLREF_HEADER } from \"./constants\";\nexport { createSkillpackFromDir } from \"./skillpack\";\nexport { createSkillrefValue, parseSkillrefValue } from \"./skillref\";\nexport { loadSkillrefToDir } from \"./skillref\";\nexport { resolveSaveInput } from \"./input\";\nexport { CtxbinError, formatError } from \"./errors\";\nexport { safeChmod } from \"./chmod\";\n","import path from \"node:path\";\nimport { fail } from \"./errors\";\n\nexport function normalizeGithubUrl(input: string): string {\n let url: URL;\n try {\n url = new URL(input);\n } catch {\n return fail(\"INVALID_URL\", \"invalid URL\");\n }\n\n if (url.protocol !== \"https:\") {\n return fail(\"INVALID_URL\", \"URL must use https\");\n }\n if (url.hostname !== \"github.com\") {\n return fail(\"INVALID_URL\", \"only github.com is supported\");\n }\n if (url.search || url.hash) {\n return fail(\"INVALID_URL\", \"URL must not include query or hash\");\n }\n\n const parts = url.pathname.split(\"/\").filter(Boolean);\n if (parts.length !== 2) {\n return fail(\"INVALID_URL\", \"URL must be https://github.com/<owner>/<repo>\");\n }\n const owner = parts[0];\n let repo = parts[1];\n if (repo.endsWith(\".git\")) {\n repo = repo.slice(0, -4);\n }\n if (!owner || !repo) {\n return fail(\"INVALID_URL\", \"URL must be https://github.com/<owner>/<repo>\");\n }\n\n return `https://github.com/${owner}/${repo}`;\n}\n\nexport function validateCommitSha(ref: string): string {\n if (!/^[0-9a-f]{40}$/.test(ref)) {\n return fail(\"INVALID_REF\", \"ref must be a 40-hex commit SHA\");\n }\n return ref;\n}\n\nexport function normalizeSkillPath(input: string): string {\n const trimmed = input.trim();\n if (!trimmed) {\n return fail(\"INVALID_PATH\", \"path must be a non-empty directory path\");\n }\n const cleaned = trimmed.replace(/\\\\/g, \"/\");\n if (cleaned.startsWith(\"/\")) {\n return fail(\"INVALID_PATH\", \"path must be relative, not absolute\");\n }\n const normalized = path.posix.normalize(cleaned).replace(/^\\.\\//, \"\");\n if (normalized === \".\" || normalized === \"\") {\n return fail(\"INVALID_PATH\", \"path must be a non-empty directory path\");\n }\n if (normalized.startsWith(\"../\") || normalized.includes(\"/../\") || normalized === \"..\") {\n return fail(\"INVALID_PATH\", \"path must not include .. segments\");\n }\n if (normalized.endsWith(\"/\")) {\n return normalized.slice(0, -1);\n }\n return normalized;\n}\n\nexport function assertSafeTarPath(entryPath: string): void {\n const cleaned = entryPath.replace(/\\\\/g, \"/\");\n if (cleaned.startsWith(\"/\")) {\n return fail(\"INVALID_PATH\", `tar entry path must be relative: ${entryPath}`);\n }\n const normalized = path.posix.normalize(cleaned);\n if (normalized.startsWith(\"../\") || normalized === \"..\" || normalized.includes(\"/../\")) {\n return fail(\"INVALID_PATH\", `tar entry path contains traversal: ${entryPath}`);\n }\n}\n","export type ErrorCode =\n | \"INVALID_INPUT\"\n | \"MISSING_KEY\"\n | \"INVALID_URL\"\n | \"INVALID_REF\"\n | \"INVALID_PATH\"\n | \"NOT_IN_GIT\"\n | \"NOT_FOUND\"\n | \"TYPE_MISMATCH\"\n | \"SIZE_LIMIT\"\n | \"NETWORK\"\n | \"IO\";\n\nexport class CtxbinError extends Error {\n readonly code: ErrorCode;\n\n constructor(code: ErrorCode, message: string) {\n super(message);\n this.code = code;\n }\n}\n\nexport function fail(code: ErrorCode, message: string): never {\n throw new CtxbinError(code, message);\n}\n\nexport function formatError(err: unknown): string {\n if (err instanceof CtxbinError) {\n return `CTXBIN_ERR ${err.code}: ${err.message}`;\n }\n const message = err instanceof Error ? err.message : String(err);\n return `CTXBIN_ERR IO: ${message}`;\n}\n","export const SKILLPACK_HEADER = \"ctxbin-skillpack@1\\n\";\nexport const SKILLREF_HEADER = \"ctxbin-skillref@1\\n\";\n\nexport const MAX_SKILLPACK_BYTES = 7 * 1024 * 1024;\nexport const MAX_SKILLREF_DOWNLOAD_BYTES = 20 * 1024 * 1024;\nexport const MAX_SKILLREF_EXTRACT_BYTES = 100 * 1024 * 1024;\nexport const MAX_SKILLREF_FILES = 5000;\n\nexport const SKILLREF_CONNECT_TIMEOUT_MS = 5000;\nexport const SKILLREF_DOWNLOAD_TIMEOUT_MS = 30000;\n\nexport const DEFAULT_EXCLUDES = [\".git\", \"node_modules\", \".DS_Store\"];\n","import { SKILLPACK_HEADER, SKILLREF_HEADER } from \"./constants\";\n\nexport type SkillValueType = \"skillpack\" | \"skillref\" | \"string\";\n\nexport function detectSkillValueType(value: string): SkillValueType {\n if (value.startsWith(SKILLPACK_HEADER)) return \"skillpack\";\n if (value.startsWith(SKILLREF_HEADER)) return \"skillref\";\n return \"string\";\n}\n","import fs from \"node:fs/promises\";\nimport { createWriteStream } from \"node:fs\";\nimport path from \"node:path\";\nimport os from \"node:os\";\nimport zlib from \"node:zlib\";\nimport { pipeline } from \"node:stream/promises\";\nimport tar from \"tar\";\nimport { DEFAULT_EXCLUDES, MAX_SKILLPACK_BYTES, SKILLPACK_HEADER } from \"./constants\";\nimport { fail } from \"./errors\";\nimport { ensureDir, copyDirContents } from \"./fs-ops\";\nimport { applyNormalizedPermissions } from \"./perm\";\nimport { assertSafeTarPath } from \"./validators\";\nimport { listTarEntries } from \"./tar-utils\";\n\nconst ALLOWED_TYPES = new Set([\"File\", \"Directory\"]);\n\nexport async function createSkillpackFromDir(dirPath: string): Promise<string> {\n const stats = await fs.stat(dirPath).catch(() => null);\n if (!stats || !stats.isDirectory()) {\n return fail(\"INVALID_INPUT\", `--dir is not a directory: ${dirPath}`);\n }\n\n const entries = await collectEntries(dirPath);\n const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), \"ctxbin-skillpack-\"));\n const tarPath = path.join(tmpDir, \"skillpack.tar.gz\");\n\n try {\n const tarStream = tar.c(\n {\n cwd: dirPath,\n portable: true,\n mtime: new Date(0),\n },\n entries\n );\n\n const gzip = zlib.createGzip({ mtime: 0 });\n await pipeline(tarStream, gzip, createWriteStream(tarPath));\n\n const stat = await fs.stat(tarPath);\n if (stat.size > MAX_SKILLPACK_BYTES) {\n return fail(\n \"SIZE_LIMIT\",\n `skillpack tar.gz size ${stat.size} bytes exceeds ${MAX_SKILLPACK_BYTES} bytes`\n );\n }\n\n const data = await fs.readFile(tarPath);\n const b64 = data.toString(\"base64\");\n return SKILLPACK_HEADER + b64;\n } finally {\n await fs.rm(tmpDir, { recursive: true, force: true });\n }\n}\n\nexport async function extractSkillpackToDir(value: string, targetDir: string): Promise<void> {\n const base64 = value.slice(SKILLPACK_HEADER.length);\n let buffer: Buffer;\n try {\n buffer = Buffer.from(base64, \"base64\");\n } catch {\n return fail(\"IO\", \"invalid skillpack base64 data\");\n }\n\n const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), \"ctxbin-skillpack-\"));\n const tarPath = path.join(tmpRoot, \"skillpack.tar.gz\");\n await fs.writeFile(tarPath, buffer);\n\n try {\n const entries = await listTarEntries(tarPath);\n const execSet = validateTarEntries(entries);\n\n const extractDir = path.join(tmpRoot, \"extract\");\n await ensureDir(extractDir);\n await tar.x({\n file: tarPath,\n cwd: extractDir,\n preserveOwner: false,\n noMtime: true,\n });\n\n await applyNormalizedPermissions(extractDir, execSet);\n await ensureDir(targetDir);\n await copyDirContents(extractDir, targetDir);\n } finally {\n await fs.rm(tmpRoot, { recursive: true, force: true });\n }\n}\n\nasync function collectEntries(root: string): Promise<string[]> {\n const results: string[] = [];\n\n async function walk(absDir: string, relDir: string): Promise<void> {\n const entries = await fs.readdir(absDir, { withFileTypes: true });\n entries.sort((a, b) => a.name.localeCompare(b.name));\n\n for (const entry of entries) {\n if (DEFAULT_EXCLUDES.includes(entry.name)) {\n if (entry.isDirectory()) {\n continue;\n }\n if (entry.isFile() && entry.name === \".DS_Store\") {\n continue;\n }\n }\n\n const absPath = path.join(absDir, entry.name);\n const relPath = relDir ? path.posix.join(relDir, entry.name) : entry.name;\n const stat = await fs.lstat(absPath);\n if (stat.isSymbolicLink()) {\n return fail(\"IO\", `symlink not allowed in skillpack: ${absPath}`);\n }\n\n if (entry.isDirectory()) {\n results.push(relPath);\n await walk(absPath, relPath);\n continue;\n }\n\n if (entry.isFile()) {\n if (entry.name === \".DS_Store\") {\n continue;\n }\n results.push(relPath);\n continue;\n }\n\n return fail(\"IO\", `unsupported file type in skillpack: ${absPath}`);\n }\n }\n\n await walk(root, \"\");\n results.sort();\n return results;\n}\n\nfunction validateTarEntries(entries: { path: string; type: string; mode: number }[]): Set<string> {\n const execSet = new Set<string>();\n for (const entry of entries) {\n assertSafeTarPath(entry.path);\n if (!ALLOWED_TYPES.has(entry.type)) {\n return fail(\"IO\", `unsupported entry type in skillpack: ${entry.path}`);\n }\n if (entry.type === \"File\" && (entry.mode & 0o111)) {\n execSet.add(entry.path);\n }\n }\n return execSet;\n}\n","import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { fail } from \"./errors\";\nimport { safeChmod } from \"./chmod\";\n\nexport async function ensureDir(dir: string): Promise<void> {\n await fs.mkdir(dir, { recursive: true });\n}\n\nexport function toPosix(p: string): string {\n return p.split(path.sep).join(\"/\");\n}\n\nexport async function copyDirContents(src: string, dest: string): Promise<void> {\n await ensureDir(dest);\n const entries = await fs.readdir(src, { withFileTypes: true });\n for (const entry of entries) {\n const srcPath = path.join(src, entry.name);\n const destPath = path.join(dest, entry.name);\n if (entry.isDirectory()) {\n await copyDirContents(srcPath, destPath);\n const stat = await fs.stat(srcPath);\n await safeChmod(destPath, stat.mode & 0o777);\n continue;\n }\n if (entry.isFile()) {\n await ensureDir(path.dirname(destPath));\n await fs.copyFile(srcPath, destPath);\n const stat = await fs.stat(srcPath);\n await safeChmod(destPath, stat.mode & 0o777);\n continue;\n }\n return fail(\"IO\", `unsupported file type during copy: ${srcPath}`);\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport async function safeChmod(path: string, mode: number): Promise<void> {\n try {\n await fs.chmod(path, mode);\n } catch (err) {\n if (process.platform === \"win32\") {\n return;\n }\n throw err;\n }\n}\n","import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { fail } from \"./errors\";\nimport { toPosix } from \"./fs-ops\";\nimport { safeChmod } from \"./chmod\";\n\nexport async function applyNormalizedPermissions(root: string, execSet: Set<string>): Promise<void> {\n async function walk(absDir: string): Promise<void> {\n const entries = await fs.readdir(absDir, { withFileTypes: true });\n for (const entry of entries) {\n const absPath = path.join(absDir, entry.name);\n if (entry.isDirectory()) {\n await safeChmod(absPath, 0o755);\n await walk(absPath);\n continue;\n }\n if (entry.isFile()) {\n const rel = toPosix(path.relative(root, absPath));\n const mode = execSet.has(rel) ? 0o755 : 0o644;\n await safeChmod(absPath, mode);\n continue;\n }\n return fail(\"IO\", `unsupported file type after extract: ${absPath}`);\n }\n }\n\n await walk(root);\n}\n","import tar from \"tar\";\n\nexport interface TarEntryInfo {\n path: string;\n type: string;\n size: number;\n mode: number;\n}\n\nexport async function listTarEntries(file: string): Promise<TarEntryInfo[]> {\n const entries: TarEntryInfo[] = [];\n await tar.t({\n file,\n onentry(entry) {\n entries.push({\n path: entry.path,\n type: entry.type,\n size: entry.size ?? 0,\n mode: entry.mode ?? 0,\n });\n },\n });\n return entries;\n}\n","import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport os from \"node:os\";\nimport { createWriteStream } from \"node:fs\";\nimport { SKILLREF_HEADER, MAX_SKILLREF_DOWNLOAD_BYTES, MAX_SKILLREF_EXTRACT_BYTES, MAX_SKILLREF_FILES, SKILLREF_CONNECT_TIMEOUT_MS, SKILLREF_DOWNLOAD_TIMEOUT_MS } from \"./constants\";\nimport { fail, CtxbinError } from \"./errors\";\nimport { normalizeGithubUrl, normalizeSkillPath, validateCommitSha, assertSafeTarPath } from \"./validators\";\nimport { listTarEntries, TarEntryInfo } from \"./tar-utils\";\nimport tar from \"tar\";\nimport { ensureDir, copyDirContents } from \"./fs-ops\";\nimport { applyNormalizedPermissions } from \"./perm\";\n\nconst ALLOWED_TYPES = new Set([\"File\", \"Directory\"]);\n\nexport interface Skillref {\n url: string;\n path: string;\n ref?: string;\n track?: \"default\";\n}\n\nexport function createSkillrefValue(url: string, skillPath: string, ref?: string): string {\n const normalizedUrl = normalizeGithubUrl(url);\n const normalizedPath = normalizeSkillPath(skillPath);\n const payload = ref\n ? JSON.stringify({ url: normalizedUrl, path: normalizedPath, ref: validateCommitSha(ref) })\n : JSON.stringify({ url: normalizedUrl, path: normalizedPath, track: \"default\" });\n return SKILLREF_HEADER + payload;\n}\n\nexport function parseSkillrefValue(value: string): Skillref {\n if (!value.startsWith(SKILLREF_HEADER)) {\n return fail(\"TYPE_MISMATCH\", \"value is not a skillref\");\n }\n const raw = value.slice(SKILLREF_HEADER.length);\n let parsed: any;\n try {\n parsed = JSON.parse(raw);\n } catch {\n return fail(\"IO\", \"invalid skillref payload JSON\");\n }\n if (!parsed || typeof parsed.url !== \"string\" || typeof parsed.path !== \"string\") {\n return fail(\"IO\", \"invalid skillref payload fields\");\n }\n const normalized = {\n url: normalizeGithubUrl(parsed.url),\n path: normalizeSkillPath(parsed.path),\n } satisfies Pick<Skillref, \"url\" | \"path\">;\n\n if (typeof parsed.ref === \"string\") {\n return { ...normalized, ref: validateCommitSha(parsed.ref) };\n }\n\n if (parsed.track === \"default\") {\n return { ...normalized, track: \"default\" };\n }\n\n return fail(\"IO\", \"invalid skillref payload fields\");\n}\n\nexport async function loadSkillrefToDir(value: string, targetDir: string): Promise<void> {\n const skillref = parseSkillrefValue(value);\n const resolvedRef = skillref.ref ?? (await fetchDefaultBranch(skillref.url));\n const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), \"ctxbin-skillref-\"));\n const tarPath = path.join(tmpRoot, \"skillref.tar.gz\");\n\n try {\n await downloadArchive(skillref.url, resolvedRef, tarPath);\n\n const entries = await listTarEntries(tarPath).catch(() => fail(\"IO\", \"failed to parse tar archive\"));\n const analysis = analyzeEntries(entries, skillref.path);\n\n const extractDir = path.join(tmpRoot, \"extract\");\n await ensureDir(extractDir);\n\n const stripCount = 1 + skillref.path.split(\"/\").length;\n await tar.x({\n file: tarPath,\n cwd: extractDir,\n preserveOwner: false,\n noMtime: true,\n strip: stripCount,\n filter: (p, entry) => {\n const entryPath = entry?.path ?? p;\n return isUnderPath(entryPath, analysis.prefix, skillref.path);\n },\n });\n\n await applyNormalizedPermissions(extractDir, analysis.execSet);\n await ensureDir(targetDir);\n await copyDirContents(extractDir, targetDir);\n } finally {\n await fs.rm(tmpRoot, { recursive: true, force: true });\n }\n}\n\nasync function downloadArchive(repoUrl: string, ref: string, outPath: string): Promise<void> {\n const { owner, repo } = splitGithubUrl(repoUrl);\n const url = `https://codeload.github.com/${owner}/${repo}/tar.gz/${ref}`;\n const controller = new AbortController();\n const totalTimer = setTimeout(() => controller.abort(), SKILLREF_DOWNLOAD_TIMEOUT_MS);\n let res: Response;\n try {\n res = await fetchWithRedirect(url, 1, controller, [\"github.com\", \"codeload.github.com\"]);\n } catch (err) {\n clearTimeout(totalTimer);\n return fail(\"NETWORK\", `download failed: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n if (!res.ok) {\n clearTimeout(totalTimer);\n const text = await res.text();\n return fail(\"NETWORK\", `download failed (${res.status}): ${text}`);\n }\n if (!res.body) {\n clearTimeout(totalTimer);\n return fail(\"NETWORK\", \"download failed: empty response body\");\n }\n\n const fileStream = createWriteStream(outPath);\n let total = 0;\n let magic = Buffer.alloc(0);\n\n try {\n for await (const chunk of res.body as AsyncIterable<Buffer>) {\n if (magic.length < 2) {\n const needed = 2 - magic.length;\n magic = Buffer.concat([magic, chunk.subarray(0, needed)]);\n if (magic.length === 2) {\n if (magic[0] !== 0x1f || magic[1] !== 0x8b) {\n fileStream.close();\n controller.abort();\n return fail(\"IO\", \"downloaded file is not gzip data\");\n }\n }\n }\n\n total += chunk.length;\n if (total > MAX_SKILLREF_DOWNLOAD_BYTES) {\n fileStream.close();\n controller.abort();\n return fail(\n \"SIZE_LIMIT\",\n `downloaded archive size ${total} exceeds ${MAX_SKILLREF_DOWNLOAD_BYTES} bytes`\n );\n }\n\n fileStream.write(chunk);\n }\n } catch (err) {\n fileStream.close();\n clearTimeout(totalTimer);\n if (err instanceof CtxbinError) {\n throw err;\n }\n return fail(\"NETWORK\", `download failed: ${err instanceof Error ? err.message : String(err)}`);\n } finally {\n clearTimeout(totalTimer);\n }\n\n if (magic.length < 2) {\n fileStream.close();\n return fail(\"IO\", \"downloaded file is incomplete\");\n }\n\n await new Promise<void>((resolve, reject) => {\n fileStream.end(() => resolve());\n fileStream.on(\"error\", reject);\n });\n}\n\nasync function fetchWithRedirect(\n url: string,\n redirectsLeft: number,\n controller: AbortController,\n allowedHosts: string[],\n init?: RequestInit\n): Promise<Response> {\n const connectTimer = setTimeout(() => controller.abort(), SKILLREF_CONNECT_TIMEOUT_MS);\n\n const res = await fetch(url, {\n ...init,\n signal: controller.signal,\n redirect: \"manual\",\n });\n\n clearTimeout(connectTimer);\n\n if (isRedirect(res.status)) {\n if (redirectsLeft <= 0) {\n return fail(\"NETWORK\", \"too many redirects\");\n }\n const location = res.headers.get(\"location\");\n if (!location) {\n return fail(\"NETWORK\", \"redirect without location header\");\n }\n const nextUrl = new URL(location, url).toString();\n const host = new URL(nextUrl).hostname;\n if (!allowedHosts.includes(host)) {\n return fail(\"NETWORK\", `redirected to unsupported host: ${host}`);\n }\n return fetchWithRedirect(nextUrl, redirectsLeft - 1, controller, allowedHosts, init);\n }\n\n return res;\n}\n\nfunction isRedirect(status: number): boolean {\n return [301, 302, 303, 307, 308].includes(status);\n}\n\nfunction splitGithubUrl(repoUrl: string): { owner: string; repo: string } {\n const url = new URL(repoUrl);\n const parts = url.pathname.split(\"/\").filter(Boolean);\n if (parts.length !== 2) {\n return fail(\"INVALID_URL\", \"URL must be https://github.com/<owner>/<repo>\");\n }\n return { owner: parts[0], repo: parts[1] };\n}\n\nasync function fetchDefaultBranch(repoUrl: string): Promise<string> {\n const { owner, repo } = splitGithubUrl(repoUrl);\n const url = `https://api.github.com/repos/${owner}/${repo}`;\n const controller = new AbortController();\n const totalTimer = setTimeout(() => controller.abort(), SKILLREF_DOWNLOAD_TIMEOUT_MS);\n let res: Response;\n try {\n res = await fetchWithRedirect(url, 1, controller, [\"github.com\", \"api.github.com\"], {\n headers: {\n \"User-Agent\": \"ctxbin\",\n Accept: \"application/vnd.github+json\",\n },\n });\n } catch (err) {\n clearTimeout(totalTimer);\n return fail(\"NETWORK\", `default branch lookup failed: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n if (!res.ok) {\n clearTimeout(totalTimer);\n const text = await res.text();\n return fail(\"NETWORK\", `default branch lookup failed (${res.status}): ${text}`);\n }\n\n let data: any;\n try {\n data = await res.json();\n } catch {\n clearTimeout(totalTimer);\n return fail(\"NETWORK\", \"default branch lookup returned invalid JSON\");\n }\n\n clearTimeout(totalTimer);\n if (!data || typeof data.default_branch !== \"string\" || data.default_branch.length === 0) {\n return fail(\"NETWORK\", \"default branch lookup returned no default_branch\");\n }\n return data.default_branch;\n}\n\nfunction analyzeEntries(entries: TarEntryInfo[], requestedPath: string): {\n prefix: string;\n execSet: Set<string>;\n} {\n if (entries.length === 0) {\n return fail(\"NOT_FOUND\", \"archive contained no entries\");\n }\n\n const prefix = entries[0].path.split(\"/\")[0];\n if (!prefix) {\n return fail(\"IO\", \"unable to determine archive prefix\");\n }\n\n const execSet = new Set<string>();\n let entryCount = 0;\n let totalSize = 0;\n let matched = false;\n\n for (const entry of entries) {\n assertSafeTarPath(entry.path);\n if (!ALLOWED_TYPES.has(entry.type)) {\n return fail(\"IO\", `unsupported entry type in archive: ${entry.path}`);\n }\n\n if (entry.path === prefix) {\n continue;\n }\n if (!entry.path.startsWith(`${prefix}/`)) {\n return fail(\"IO\", \"archive has unexpected top-level layout\");\n }\n\n const rel = entry.path.slice(prefix.length + 1);\n if (!rel) {\n continue;\n }\n\n const relToReq = stripRequestedPath(rel, requestedPath);\n if (relToReq === null) {\n continue;\n }\n\n matched = true;\n entryCount += 1;\n if (rel === requestedPath && entry.type === \"File\") {\n return fail(\"INVALID_PATH\", \"requested path is not a directory\");\n }\n if (entry.type === \"File\") {\n totalSize += entry.size ?? 0;\n if (entry.mode & 0o111) {\n execSet.add(relToReq);\n }\n }\n }\n\n if (!matched) {\n return fail(\"NOT_FOUND\", \"requested path not found in archive\");\n }\n if (entryCount > MAX_SKILLREF_FILES) {\n return fail(\"SIZE_LIMIT\", `extracted entry count ${entryCount} exceeds ${MAX_SKILLREF_FILES}`);\n }\n if (totalSize > MAX_SKILLREF_EXTRACT_BYTES) {\n return fail(\"SIZE_LIMIT\", `extracted size ${totalSize} exceeds ${MAX_SKILLREF_EXTRACT_BYTES}`);\n }\n\n return { prefix, execSet };\n}\n\nfunction stripRequestedPath(rel: string, requestedPath: string): string | null {\n if (rel === requestedPath) {\n return \"\";\n }\n const prefix = requestedPath + \"/\";\n if (rel.startsWith(prefix)) {\n return rel.slice(prefix.length);\n }\n return null;\n}\n\nfunction isUnderPath(entryPath: string, prefix: string, requestedPath: string): boolean {\n if (entryPath === prefix) {\n return false;\n }\n if (!entryPath.startsWith(`${prefix}/`)) {\n return false;\n }\n const rel = entryPath.slice(prefix.length + 1);\n if (!rel) {\n return false;\n }\n if (rel === requestedPath || rel.startsWith(requestedPath + \"/\")) {\n return true;\n }\n return false;\n}\n","import fs from \"node:fs/promises\";\nimport process from \"node:process\";\nimport { fail } from \"./errors\";\nimport { createSkillpackFromDir } from \"./skillpack\";\nimport { createSkillrefValue } from \"./skillref\";\n\nexport type SaveInput = { kind: \"string\" | \"skillpack\" | \"skillref\"; value: string };\n\nexport interface SaveOptions {\n append: boolean;\n file?: string;\n value?: string;\n dir?: string;\n url?: string;\n ref?: string;\n path?: string;\n}\n\nexport async function resolveSaveInput(\n resource: string,\n opts: SaveOptions,\n stdinIsTTY: boolean = Boolean(process.stdin.isTTY)\n): Promise<SaveInput> {\n const hasFile = typeof opts.file === \"string\";\n const hasValue = typeof opts.value === \"string\";\n const hasDir = typeof opts.dir === \"string\";\n const urlFlagsUsed = Boolean(opts.url || opts.ref || opts.path);\n const hasUrl = Boolean(opts.url && opts.path);\n const explicitCount = [hasFile, hasValue, hasDir, hasUrl].filter(Boolean).length;\n const hasStdin = !stdinIsTTY && explicitCount === 0;\n\n if (urlFlagsUsed && !hasUrl) {\n return fail(\"INVALID_INPUT\", \"--url and --path must be provided together\");\n }\n\n const methods = explicitCount + (hasStdin ? 1 : 0);\n if (methods !== 1) {\n return fail(\"INVALID_INPUT\", \"exactly one input method must be used\");\n }\n\n if (hasDir && resource !== \"skill\") {\n return fail(\"INVALID_INPUT\", \"--dir is only valid for skill save\");\n }\n if (hasUrl && resource !== \"skill\") {\n return fail(\"INVALID_INPUT\", \"--url/--ref/--path are only valid for skill save\");\n }\n if (opts.append && (hasDir || hasUrl)) {\n return fail(\"INVALID_INPUT\", \"--append cannot be used with --dir or --url\");\n }\n\n if (hasDir) {\n const value = await createSkillpackFromDir(opts.dir as string);\n return { kind: \"skillpack\", value };\n }\n\n if (hasUrl) {\n const value = createSkillrefValue(opts.url as string, opts.path as string, opts.ref as string | undefined);\n return { kind: \"skillref\", value };\n }\n\n if (hasFile) {\n const content = await fs.readFile(opts.file as string, \"utf8\");\n return { kind: \"string\", value: content };\n }\n\n if (hasValue) {\n return { kind: \"string\", value: opts.value as string };\n }\n\n const stdin = await readStdin();\n return { kind: \"string\", value: stdin };\n}\n\nasync function readStdin(): Promise<string> {\n return new Promise((resolve, reject) => {\n let data = \"\";\n process.stdin.setEncoding(\"utf8\");\n process.stdin.on(\"data\", (chunk) => {\n data += chunk;\n });\n process.stdin.on(\"end\", () => resolve(data));\n process.stdin.on(\"error\", reject);\n });\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,uBAAiB;;;ACaV,IAAM,cAAN,cAA0B,MAAM;AAAA,EAC5B;AAAA,EAET,YAAY,MAAiB,SAAiB;AAC5C,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAEO,SAAS,KAAK,MAAiB,SAAwB;AAC5D,QAAM,IAAI,YAAY,MAAM,OAAO;AACrC;AAEO,SAAS,YAAY,KAAsB;AAChD,MAAI,eAAe,aAAa;AAC9B,WAAO,cAAc,IAAI,IAAI,KAAK,IAAI,OAAO;AAAA,EAC/C;AACA,QAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,SAAO,kBAAkB,OAAO;AAClC;;;AD7BO,SAAS,mBAAmB,OAAuB;AACxD,MAAI;AACJ,MAAI;AACF,UAAM,IAAI,IAAI,KAAK;AAAA,EACrB,QAAQ;AACN,WAAO,KAAK,eAAe,aAAa;AAAA,EAC1C;AAEA,MAAI,IAAI,aAAa,UAAU;AAC7B,WAAO,KAAK,eAAe,oBAAoB;AAAA,EACjD;AACA,MAAI,IAAI,aAAa,cAAc;AACjC,WAAO,KAAK,eAAe,8BAA8B;AAAA,EAC3D;AACA,MAAI,IAAI,UAAU,IAAI,MAAM;AAC1B,WAAO,KAAK,eAAe,oCAAoC;AAAA,EACjE;AAEA,QAAM,QAAQ,IAAI,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AACpD,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO,KAAK,eAAe,+CAA+C;AAAA,EAC5E;AACA,QAAM,QAAQ,MAAM,CAAC;AACrB,MAAI,OAAO,MAAM,CAAC;AAClB,MAAI,KAAK,SAAS,MAAM,GAAG;AACzB,WAAO,KAAK,MAAM,GAAG,EAAE;AAAA,EACzB;AACA,MAAI,CAAC,SAAS,CAAC,MAAM;AACnB,WAAO,KAAK,eAAe,+CAA+C;AAAA,EAC5E;AAEA,SAAO,sBAAsB,KAAK,IAAI,IAAI;AAC5C;AAEO,SAAS,kBAAkB,KAAqB;AACrD,MAAI,CAAC,iBAAiB,KAAK,GAAG,GAAG;AAC/B,WAAO,KAAK,eAAe,iCAAiC;AAAA,EAC9D;AACA,SAAO;AACT;AAEO,SAAS,mBAAmB,OAAuB;AACxD,QAAM,UAAU,MAAM,KAAK;AAC3B,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,gBAAgB,yCAAyC;AAAA,EACvE;AACA,QAAM,UAAU,QAAQ,QAAQ,OAAO,GAAG;AAC1C,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,WAAO,KAAK,gBAAgB,qCAAqC;AAAA,EACnE;AACA,QAAM,aAAa,iBAAAA,QAAK,MAAM,UAAU,OAAO,EAAE,QAAQ,SAAS,EAAE;AACpE,MAAI,eAAe,OAAO,eAAe,IAAI;AAC3C,WAAO,KAAK,gBAAgB,yCAAyC;AAAA,EACvE;AACA,MAAI,WAAW,WAAW,KAAK,KAAK,WAAW,SAAS,MAAM,KAAK,eAAe,MAAM;AACtF,WAAO,KAAK,gBAAgB,mCAAmC;AAAA,EACjE;AACA,MAAI,WAAW,SAAS,GAAG,GAAG;AAC5B,WAAO,WAAW,MAAM,GAAG,EAAE;AAAA,EAC/B;AACA,SAAO;AACT;AAEO,SAAS,kBAAkB,WAAyB;AACzD,QAAM,UAAU,UAAU,QAAQ,OAAO,GAAG;AAC5C,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,WAAO,KAAK,gBAAgB,oCAAoC,SAAS,EAAE;AAAA,EAC7E;AACA,QAAM,aAAa,iBAAAA,QAAK,MAAM,UAAU,OAAO;AAC/C,MAAI,WAAW,WAAW,KAAK,KAAK,eAAe,QAAQ,WAAW,SAAS,MAAM,GAAG;AACtF,WAAO,KAAK,gBAAgB,sCAAsC,SAAS,EAAE;AAAA,EAC/E;AACF;;;AE3EO,IAAM,mBAAmB;AACzB,IAAM,kBAAkB;AAExB,IAAM,sBAAsB,IAAI,OAAO;AACvC,IAAM,8BAA8B,KAAK,OAAO;AAChD,IAAM,6BAA6B,MAAM,OAAO;AAChD,IAAM,qBAAqB;AAE3B,IAAM,8BAA8B;AACpC,IAAM,+BAA+B;AAErC,IAAM,mBAAmB,CAAC,QAAQ,gBAAgB,WAAW;;;ACP7D,SAAS,qBAAqB,OAA+B;AAClE,MAAI,MAAM,WAAW,gBAAgB,EAAG,QAAO;AAC/C,MAAI,MAAM,WAAW,eAAe,EAAG,QAAO;AAC9C,SAAO;AACT;;;ACRA,IAAAC,mBAAe;AACf,qBAAkC;AAClC,IAAAC,oBAAiB;AACjB,qBAAe;AACf,uBAAiB;AACjB,IAAAD,mBAAyB;AACzB,IAAAE,cAAgB;;;ACNhB,IAAAC,mBAAe;AACf,IAAAC,oBAAiB;;;ACDjB,sBAAe;AAEf,eAAsB,UAAUC,OAAc,MAA6B;AACzE,MAAI;AACF,UAAM,gBAAAC,QAAG,MAAMD,OAAM,IAAI;AAAA,EAC3B,SAAS,KAAK;AACZ,QAAI,QAAQ,aAAa,SAAS;AAChC;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;;;ADNA,eAAsB,UAAU,KAA4B;AAC1D,QAAM,iBAAAE,QAAG,MAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AACzC;AAEO,SAAS,QAAQ,GAAmB;AACzC,SAAO,EAAE,MAAM,kBAAAC,QAAK,GAAG,EAAE,KAAK,GAAG;AACnC;AAEA,eAAsB,gBAAgB,KAAa,MAA6B;AAC9E,QAAM,UAAU,IAAI;AACpB,QAAM,UAAU,MAAM,iBAAAD,QAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,aAAW,SAAS,SAAS;AAC3B,UAAM,UAAU,kBAAAC,QAAK,KAAK,KAAK,MAAM,IAAI;AACzC,UAAM,WAAW,kBAAAA,QAAK,KAAK,MAAM,MAAM,IAAI;AAC3C,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,gBAAgB,SAAS,QAAQ;AACvC,YAAM,OAAO,MAAM,iBAAAD,QAAG,KAAK,OAAO;AAClC,YAAM,UAAU,UAAU,KAAK,OAAO,GAAK;AAC3C;AAAA,IACF;AACA,QAAI,MAAM,OAAO,GAAG;AAClB,YAAM,UAAU,kBAAAC,QAAK,QAAQ,QAAQ,CAAC;AACtC,YAAM,iBAAAD,QAAG,SAAS,SAAS,QAAQ;AACnC,YAAM,OAAO,MAAM,iBAAAA,QAAG,KAAK,OAAO;AAClC,YAAM,UAAU,UAAU,KAAK,OAAO,GAAK;AAC3C;AAAA,IACF;AACA,WAAO,KAAK,MAAM,sCAAsC,OAAO,EAAE;AAAA,EACnE;AACF;;;AElCA,IAAAE,mBAAe;AACf,IAAAC,oBAAiB;AAKjB,eAAsB,2BAA2B,MAAc,SAAqC;AAClG,iBAAe,KAAK,QAA+B;AACjD,UAAM,UAAU,MAAM,iBAAAC,QAAG,QAAQ,QAAQ,EAAE,eAAe,KAAK,CAAC;AAChE,eAAW,SAAS,SAAS;AAC3B,YAAM,UAAU,kBAAAC,QAAK,KAAK,QAAQ,MAAM,IAAI;AAC5C,UAAI,MAAM,YAAY,GAAG;AACvB,cAAM,UAAU,SAAS,GAAK;AAC9B,cAAM,KAAK,OAAO;AAClB;AAAA,MACF;AACA,UAAI,MAAM,OAAO,GAAG;AAClB,cAAM,MAAM,QAAQ,kBAAAA,QAAK,SAAS,MAAM,OAAO,CAAC;AAChD,cAAM,OAAO,QAAQ,IAAI,GAAG,IAAI,MAAQ;AACxC,cAAM,UAAU,SAAS,IAAI;AAC7B;AAAA,MACF;AACA,aAAO,KAAK,MAAM,wCAAwC,OAAO,EAAE;AAAA,IACrE;AAAA,EACF;AAEA,QAAM,KAAK,IAAI;AACjB;;;AC3BA,iBAAgB;AAShB,eAAsB,eAAe,MAAuC;AAC1E,QAAM,UAA0B,CAAC;AACjC,QAAM,WAAAC,QAAI,EAAE;AAAA,IACV;AAAA,IACA,QAAQ,OAAO;AACb,cAAQ,KAAK;AAAA,QACX,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM,QAAQ;AAAA,QACpB,MAAM,MAAM,QAAQ;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACD,SAAO;AACT;;;AJPA,eAAsB,uBAAuB,SAAkC;AAC7E,QAAM,QAAQ,MAAM,iBAAAC,QAAG,KAAK,OAAO,EAAE,MAAM,MAAM,IAAI;AACrD,MAAI,CAAC,SAAS,CAAC,MAAM,YAAY,GAAG;AAClC,WAAO,KAAK,iBAAiB,6BAA6B,OAAO,EAAE;AAAA,EACrE;AAEA,QAAM,UAAU,MAAM,eAAe,OAAO;AAC5C,QAAM,SAAS,MAAM,iBAAAA,QAAG,QAAQ,kBAAAC,QAAK,KAAK,eAAAC,QAAG,OAAO,GAAG,mBAAmB,CAAC;AAC3E,QAAM,UAAU,kBAAAD,QAAK,KAAK,QAAQ,kBAAkB;AAEpD,MAAI;AACF,UAAM,YAAY,YAAAE,QAAI;AAAA,MACpB;AAAA,QACE,KAAK;AAAA,QACL,UAAU;AAAA,QACV,OAAO,oBAAI,KAAK,CAAC;AAAA,MACnB;AAAA,MACA;AAAA,IACF;AAEA,UAAM,OAAO,iBAAAC,QAAK,WAAW,EAAE,OAAO,EAAE,CAAC;AACzC,cAAM,2BAAS,WAAW,UAAM,kCAAkB,OAAO,CAAC;AAE1D,UAAM,OAAO,MAAM,iBAAAJ,QAAG,KAAK,OAAO;AAClC,QAAI,KAAK,OAAO,qBAAqB;AACnC,aAAO;AAAA,QACL;AAAA,QACA,yBAAyB,KAAK,IAAI,kBAAkB,mBAAmB;AAAA,MACzE;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,iBAAAA,QAAG,SAAS,OAAO;AACtC,UAAM,MAAM,KAAK,SAAS,QAAQ;AAClC,WAAO,mBAAmB;AAAA,EAC5B,UAAE;AACA,UAAM,iBAAAA,QAAG,GAAG,QAAQ,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACtD;AACF;AAoCA,eAAe,eAAe,MAAiC;AAC7D,QAAM,UAAoB,CAAC;AAE3B,iBAAe,KAAK,QAAgB,QAA+B;AACjE,UAAM,UAAU,MAAM,iBAAAK,QAAG,QAAQ,QAAQ,EAAE,eAAe,KAAK,CAAC;AAChE,YAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AAEnD,eAAW,SAAS,SAAS;AAC3B,UAAI,iBAAiB,SAAS,MAAM,IAAI,GAAG;AACzC,YAAI,MAAM,YAAY,GAAG;AACvB;AAAA,QACF;AACA,YAAI,MAAM,OAAO,KAAK,MAAM,SAAS,aAAa;AAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,UAAU,kBAAAC,QAAK,KAAK,QAAQ,MAAM,IAAI;AAC5C,YAAM,UAAU,SAAS,kBAAAA,QAAK,MAAM,KAAK,QAAQ,MAAM,IAAI,IAAI,MAAM;AACrE,YAAM,OAAO,MAAM,iBAAAD,QAAG,MAAM,OAAO;AACnC,UAAI,KAAK,eAAe,GAAG;AACzB,eAAO,KAAK,MAAM,qCAAqC,OAAO,EAAE;AAAA,MAClE;AAEA,UAAI,MAAM,YAAY,GAAG;AACvB,gBAAQ,KAAK,OAAO;AACpB,cAAM,KAAK,SAAS,OAAO;AAC3B;AAAA,MACF;AAEA,UAAI,MAAM,OAAO,GAAG;AAClB,YAAI,MAAM,SAAS,aAAa;AAC9B;AAAA,QACF;AACA,gBAAQ,KAAK,OAAO;AACpB;AAAA,MACF;AAEA,aAAO,KAAK,MAAM,uCAAuC,OAAO,EAAE;AAAA,IACpE;AAAA,EACF;AAEA,QAAM,KAAK,MAAM,EAAE;AACnB,UAAQ,KAAK;AACb,SAAO;AACT;;;AKtIA,IAAAE,mBAAe;AACf,IAAAC,oBAAiB;AACjB,IAAAC,kBAAe;AACf,IAAAC,kBAAkC;AAKlC,IAAAC,cAAgB;AAIhB,IAAM,gBAAgB,oBAAI,IAAI,CAAC,QAAQ,WAAW,CAAC;AAS5C,SAAS,oBAAoB,KAAa,WAAmB,KAAsB;AACxF,QAAM,gBAAgB,mBAAmB,GAAG;AAC5C,QAAM,iBAAiB,mBAAmB,SAAS;AACnD,QAAM,UAAU,MACZ,KAAK,UAAU,EAAE,KAAK,eAAe,MAAM,gBAAgB,KAAK,kBAAkB,GAAG,EAAE,CAAC,IACxF,KAAK,UAAU,EAAE,KAAK,eAAe,MAAM,gBAAgB,OAAO,UAAU,CAAC;AACjF,SAAO,kBAAkB;AAC3B;AAEO,SAAS,mBAAmB,OAAyB;AAC1D,MAAI,CAAC,MAAM,WAAW,eAAe,GAAG;AACtC,WAAO,KAAK,iBAAiB,yBAAyB;AAAA,EACxD;AACA,QAAM,MAAM,MAAM,MAAM,gBAAgB,MAAM;AAC9C,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,GAAG;AAAA,EACzB,QAAQ;AACN,WAAO,KAAK,MAAM,+BAA+B;AAAA,EACnD;AACA,MAAI,CAAC,UAAU,OAAO,OAAO,QAAQ,YAAY,OAAO,OAAO,SAAS,UAAU;AAChF,WAAO,KAAK,MAAM,iCAAiC;AAAA,EACrD;AACA,QAAM,aAAa;AAAA,IACjB,KAAK,mBAAmB,OAAO,GAAG;AAAA,IAClC,MAAM,mBAAmB,OAAO,IAAI;AAAA,EACtC;AAEA,MAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,WAAO,EAAE,GAAG,YAAY,KAAK,kBAAkB,OAAO,GAAG,EAAE;AAAA,EAC7D;AAEA,MAAI,OAAO,UAAU,WAAW;AAC9B,WAAO,EAAE,GAAG,YAAY,OAAO,UAAU;AAAA,EAC3C;AAEA,SAAO,KAAK,MAAM,iCAAiC;AACrD;AAEA,eAAsB,kBAAkB,OAAe,WAAkC;AACvF,QAAM,WAAW,mBAAmB,KAAK;AACzC,QAAM,cAAc,SAAS,OAAQ,MAAM,mBAAmB,SAAS,GAAG;AAC1E,QAAM,UAAU,MAAM,iBAAAC,QAAG,QAAQ,kBAAAC,QAAK,KAAK,gBAAAC,QAAG,OAAO,GAAG,kBAAkB,CAAC;AAC3E,QAAM,UAAU,kBAAAD,QAAK,KAAK,SAAS,iBAAiB;AAEpD,MAAI;AACF,UAAM,gBAAgB,SAAS,KAAK,aAAa,OAAO;AAExD,UAAM,UAAU,MAAM,eAAe,OAAO,EAAE,MAAM,MAAM,KAAK,MAAM,6BAA6B,CAAC;AACnG,UAAM,WAAW,eAAe,SAAS,SAAS,IAAI;AAEtD,UAAM,aAAa,kBAAAA,QAAK,KAAK,SAAS,SAAS;AAC/C,UAAM,UAAU,UAAU;AAE1B,UAAM,aAAa,IAAI,SAAS,KAAK,MAAM,GAAG,EAAE;AAChD,UAAM,YAAAE,QAAI,EAAE;AAAA,MACV,MAAM;AAAA,MACN,KAAK;AAAA,MACL,eAAe;AAAA,MACf,SAAS;AAAA,MACT,OAAO;AAAA,MACP,QAAQ,CAAC,GAAG,UAAU;AACpB,cAAM,YAAY,OAAO,QAAQ;AACjC,eAAO,YAAY,WAAW,SAAS,QAAQ,SAAS,IAAI;AAAA,MAC9D;AAAA,IACF,CAAC;AAED,UAAM,2BAA2B,YAAY,SAAS,OAAO;AAC7D,UAAM,UAAU,SAAS;AACzB,UAAM,gBAAgB,YAAY,SAAS;AAAA,EAC7C,UAAE;AACA,UAAM,iBAAAH,QAAG,GAAG,SAAS,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACvD;AACF;AAEA,eAAe,gBAAgB,SAAiB,KAAa,SAAgC;AAC3F,QAAM,EAAE,OAAO,KAAK,IAAI,eAAe,OAAO;AAC9C,QAAM,MAAM,+BAA+B,KAAK,IAAI,IAAI,WAAW,GAAG;AACtE,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,aAAa,WAAW,MAAM,WAAW,MAAM,GAAG,4BAA4B;AACpF,MAAI;AACJ,MAAI;AACF,UAAM,MAAM,kBAAkB,KAAK,GAAG,YAAY,CAAC,cAAc,qBAAqB,CAAC;AAAA,EACzF,SAAS,KAAK;AACZ,iBAAa,UAAU;AACvB,WAAO,KAAK,WAAW,oBAAoB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,EAC/F;AAEA,MAAI,CAAC,IAAI,IAAI;AACX,iBAAa,UAAU;AACvB,UAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,WAAO,KAAK,WAAW,oBAAoB,IAAI,MAAM,MAAM,IAAI,EAAE;AAAA,EACnE;AACA,MAAI,CAAC,IAAI,MAAM;AACb,iBAAa,UAAU;AACvB,WAAO,KAAK,WAAW,sCAAsC;AAAA,EAC/D;AAEA,QAAM,iBAAa,mCAAkB,OAAO;AAC5C,MAAI,QAAQ;AACZ,MAAI,QAAQ,OAAO,MAAM,CAAC;AAE1B,MAAI;AACF,qBAAiB,SAAS,IAAI,MAA+B;AAC3D,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,SAAS,IAAI,MAAM;AACzB,gBAAQ,OAAO,OAAO,CAAC,OAAO,MAAM,SAAS,GAAG,MAAM,CAAC,CAAC;AACxD,YAAI,MAAM,WAAW,GAAG;AACtB,cAAI,MAAM,CAAC,MAAM,MAAQ,MAAM,CAAC,MAAM,KAAM;AAC1C,uBAAW,MAAM;AACjB,uBAAW,MAAM;AACjB,mBAAO,KAAK,MAAM,kCAAkC;AAAA,UACtD;AAAA,QACF;AAAA,MACF;AAEA,eAAS,MAAM;AACf,UAAI,QAAQ,6BAA6B;AACvC,mBAAW,MAAM;AACjB,mBAAW,MAAM;AACjB,eAAO;AAAA,UACL;AAAA,UACA,2BAA2B,KAAK,YAAY,2BAA2B;AAAA,QACzE;AAAA,MACF;AAEA,iBAAW,MAAM,KAAK;AAAA,IACxB;AAAA,EACF,SAAS,KAAK;AACZ,eAAW,MAAM;AACjB,iBAAa,UAAU;AACvB,QAAI,eAAe,aAAa;AAC9B,YAAM;AAAA,IACR;AACA,WAAO,KAAK,WAAW,oBAAoB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,EAC/F,UAAE;AACA,iBAAa,UAAU;AAAA,EACzB;AAEA,MAAI,MAAM,SAAS,GAAG;AACpB,eAAW,MAAM;AACjB,WAAO,KAAK,MAAM,+BAA+B;AAAA,EACnD;AAEA,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,eAAW,IAAI,MAAM,QAAQ,CAAC;AAC9B,eAAW,GAAG,SAAS,MAAM;AAAA,EAC/B,CAAC;AACH;AAEA,eAAe,kBACb,KACA,eACA,YACA,cACA,MACmB;AACnB,QAAM,eAAe,WAAW,MAAM,WAAW,MAAM,GAAG,2BAA2B;AAErF,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,GAAG;AAAA,IACH,QAAQ,WAAW;AAAA,IACnB,UAAU;AAAA,EACZ,CAAC;AAED,eAAa,YAAY;AAEzB,MAAI,WAAW,IAAI,MAAM,GAAG;AAC1B,QAAI,iBAAiB,GAAG;AACtB,aAAO,KAAK,WAAW,oBAAoB;AAAA,IAC7C;AACA,UAAM,WAAW,IAAI,QAAQ,IAAI,UAAU;AAC3C,QAAI,CAAC,UAAU;AACb,aAAO,KAAK,WAAW,kCAAkC;AAAA,IAC3D;AACA,UAAM,UAAU,IAAI,IAAI,UAAU,GAAG,EAAE,SAAS;AAChD,UAAM,OAAO,IAAI,IAAI,OAAO,EAAE;AAC9B,QAAI,CAAC,aAAa,SAAS,IAAI,GAAG;AAChC,aAAO,KAAK,WAAW,mCAAmC,IAAI,EAAE;AAAA,IAClE;AACA,WAAO,kBAAkB,SAAS,gBAAgB,GAAG,YAAY,cAAc,IAAI;AAAA,EACrF;AAEA,SAAO;AACT;AAEA,SAAS,WAAW,QAAyB;AAC3C,SAAO,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG,EAAE,SAAS,MAAM;AAClD;AAEA,SAAS,eAAe,SAAkD;AACxE,QAAM,MAAM,IAAI,IAAI,OAAO;AAC3B,QAAM,QAAQ,IAAI,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AACpD,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO,KAAK,eAAe,+CAA+C;AAAA,EAC5E;AACA,SAAO,EAAE,OAAO,MAAM,CAAC,GAAG,MAAM,MAAM,CAAC,EAAE;AAC3C;AAEA,eAAe,mBAAmB,SAAkC;AAClE,QAAM,EAAE,OAAO,KAAK,IAAI,eAAe,OAAO;AAC9C,QAAM,MAAM,gCAAgC,KAAK,IAAI,IAAI;AACzD,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,aAAa,WAAW,MAAM,WAAW,MAAM,GAAG,4BAA4B;AACpF,MAAI;AACJ,MAAI;AACF,UAAM,MAAM,kBAAkB,KAAK,GAAG,YAAY,CAAC,cAAc,gBAAgB,GAAG;AAAA,MAClF,SAAS;AAAA,QACP,cAAc;AAAA,QACd,QAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,iBAAa,UAAU;AACvB,WAAO,KAAK,WAAW,iCAAiC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,EAC5G;AAEA,MAAI,CAAC,IAAI,IAAI;AACX,iBAAa,UAAU;AACvB,UAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,WAAO,KAAK,WAAW,iCAAiC,IAAI,MAAM,MAAM,IAAI,EAAE;AAAA,EAChF;AAEA,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,QAAQ;AACN,iBAAa,UAAU;AACvB,WAAO,KAAK,WAAW,6CAA6C;AAAA,EACtE;AAEA,eAAa,UAAU;AACvB,MAAI,CAAC,QAAQ,OAAO,KAAK,mBAAmB,YAAY,KAAK,eAAe,WAAW,GAAG;AACxF,WAAO,KAAK,WAAW,kDAAkD;AAAA,EAC3E;AACA,SAAO,KAAK;AACd;AAEA,SAAS,eAAe,SAAyB,eAG/C;AACA,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,KAAK,aAAa,8BAA8B;AAAA,EACzD;AAEA,QAAM,SAAS,QAAQ,CAAC,EAAE,KAAK,MAAM,GAAG,EAAE,CAAC;AAC3C,MAAI,CAAC,QAAQ;AACX,WAAO,KAAK,MAAM,oCAAoC;AAAA,EACxD;AAEA,QAAM,UAAU,oBAAI,IAAY;AAChC,MAAI,aAAa;AACjB,MAAI,YAAY;AAChB,MAAI,UAAU;AAEd,aAAW,SAAS,SAAS;AAC3B,sBAAkB,MAAM,IAAI;AAC5B,QAAI,CAAC,cAAc,IAAI,MAAM,IAAI,GAAG;AAClC,aAAO,KAAK,MAAM,sCAAsC,MAAM,IAAI,EAAE;AAAA,IACtE;AAEA,QAAI,MAAM,SAAS,QAAQ;AACzB;AAAA,IACF;AACA,QAAI,CAAC,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,GAAG;AACxC,aAAO,KAAK,MAAM,yCAAyC;AAAA,IAC7D;AAEA,UAAM,MAAM,MAAM,KAAK,MAAM,OAAO,SAAS,CAAC;AAC9C,QAAI,CAAC,KAAK;AACR;AAAA,IACF;AAEA,UAAM,WAAW,mBAAmB,KAAK,aAAa;AACtD,QAAI,aAAa,MAAM;AACrB;AAAA,IACF;AAEA,cAAU;AACV,kBAAc;AACd,QAAI,QAAQ,iBAAiB,MAAM,SAAS,QAAQ;AAClD,aAAO,KAAK,gBAAgB,mCAAmC;AAAA,IACjE;AACA,QAAI,MAAM,SAAS,QAAQ;AACzB,mBAAa,MAAM,QAAQ;AAC3B,UAAI,MAAM,OAAO,IAAO;AACtB,gBAAQ,IAAI,QAAQ;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,aAAa,qCAAqC;AAAA,EAChE;AACA,MAAI,aAAa,oBAAoB;AACnC,WAAO,KAAK,cAAc,yBAAyB,UAAU,YAAY,kBAAkB,EAAE;AAAA,EAC/F;AACA,MAAI,YAAY,4BAA4B;AAC1C,WAAO,KAAK,cAAc,kBAAkB,SAAS,YAAY,0BAA0B,EAAE;AAAA,EAC/F;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,SAAS,mBAAmB,KAAa,eAAsC;AAC7E,MAAI,QAAQ,eAAe;AACzB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,gBAAgB;AAC/B,MAAI,IAAI,WAAW,MAAM,GAAG;AAC1B,WAAO,IAAI,MAAM,OAAO,MAAM;AAAA,EAChC;AACA,SAAO;AACT;AAEA,SAAS,YAAY,WAAmB,QAAgB,eAAgC;AACtF,MAAI,cAAc,QAAQ;AACxB,WAAO;AAAA,EACT;AACA,MAAI,CAAC,UAAU,WAAW,GAAG,MAAM,GAAG,GAAG;AACvC,WAAO;AAAA,EACT;AACA,QAAM,MAAM,UAAU,MAAM,OAAO,SAAS,CAAC;AAC7C,MAAI,CAAC,KAAK;AACR,WAAO;AAAA,EACT;AACA,MAAI,QAAQ,iBAAiB,IAAI,WAAW,gBAAgB,GAAG,GAAG;AAChE,WAAO;AAAA,EACT;AACA,SAAO;AACT;;;AChWA,IAAAI,mBAAe;AACf,0BAAoB;AAiBpB,eAAsB,iBACpB,UACA,MACA,aAAsB,QAAQ,oBAAAC,QAAQ,MAAM,KAAK,GAC7B;AACpB,QAAM,UAAU,OAAO,KAAK,SAAS;AACrC,QAAM,WAAW,OAAO,KAAK,UAAU;AACvC,QAAM,SAAS,OAAO,KAAK,QAAQ;AACnC,QAAM,eAAe,QAAQ,KAAK,OAAO,KAAK,OAAO,KAAK,IAAI;AAC9D,QAAM,SAAS,QAAQ,KAAK,OAAO,KAAK,IAAI;AAC5C,QAAM,gBAAgB,CAAC,SAAS,UAAU,QAAQ,MAAM,EAAE,OAAO,OAAO,EAAE;AAC1E,QAAM,WAAW,CAAC,cAAc,kBAAkB;AAElD,MAAI,gBAAgB,CAAC,QAAQ;AAC3B,WAAO,KAAK,iBAAiB,4CAA4C;AAAA,EAC3E;AAEA,QAAM,UAAU,iBAAiB,WAAW,IAAI;AAChD,MAAI,YAAY,GAAG;AACjB,WAAO,KAAK,iBAAiB,uCAAuC;AAAA,EACtE;AAEA,MAAI,UAAU,aAAa,SAAS;AAClC,WAAO,KAAK,iBAAiB,oCAAoC;AAAA,EACnE;AACA,MAAI,UAAU,aAAa,SAAS;AAClC,WAAO,KAAK,iBAAiB,kDAAkD;AAAA,EACjF;AACA,MAAI,KAAK,WAAW,UAAU,SAAS;AACrC,WAAO,KAAK,iBAAiB,6CAA6C;AAAA,EAC5E;AAEA,MAAI,QAAQ;AACV,UAAM,QAAQ,MAAM,uBAAuB,KAAK,GAAa;AAC7D,WAAO,EAAE,MAAM,aAAa,MAAM;AAAA,EACpC;AAEA,MAAI,QAAQ;AACV,UAAM,QAAQ,oBAAoB,KAAK,KAAe,KAAK,MAAgB,KAAK,GAAyB;AACzG,WAAO,EAAE,MAAM,YAAY,MAAM;AAAA,EACnC;AAEA,MAAI,SAAS;AACX,UAAM,UAAU,MAAM,iBAAAC,QAAG,SAAS,KAAK,MAAgB,MAAM;AAC7D,WAAO,EAAE,MAAM,UAAU,OAAO,QAAQ;AAAA,EAC1C;AAEA,MAAI,UAAU;AACZ,WAAO,EAAE,MAAM,UAAU,OAAO,KAAK,MAAgB;AAAA,EACvD;AAEA,QAAM,QAAQ,MAAM,UAAU;AAC9B,SAAO,EAAE,MAAM,UAAU,OAAO,MAAM;AACxC;AAEA,eAAe,YAA6B;AAC1C,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,OAAO;AACX,wBAAAD,QAAQ,MAAM,YAAY,MAAM;AAChC,wBAAAA,QAAQ,MAAM,GAAG,QAAQ,CAAC,UAAU;AAClC,cAAQ;AAAA,IACV,CAAC;AACD,wBAAAA,QAAQ,MAAM,GAAG,OAAO,MAAM,QAAQ,IAAI,CAAC;AAC3C,wBAAAA,QAAQ,MAAM,GAAG,SAAS,MAAM;AAAA,EAClC,CAAC;AACH;","names":["path","import_promises","import_node_path","import_tar","import_promises","import_node_path","path","fs","fs","path","import_promises","import_node_path","fs","path","tar","fs","path","os","tar","zlib","fs","path","import_promises","import_node_path","import_node_os","import_node_fs","import_tar","fs","path","os","tar","import_promises","process","fs"]}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: ctxbin
|
|
3
|
+
description: Use when working with ctxbin to save and load ctx, agent, and skill context.
|
|
4
|
+
metadata:
|
|
5
|
+
short-description: ctxbin workflow
|
|
6
|
+
version: 0.1.0
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
# ctxbin Skill
|
|
10
|
+
|
|
11
|
+
## Purpose
|
|
12
|
+
Help agents preserve and restore **branch-scoped project context** so the next agent can continue work without re-explanation.
|
|
13
|
+
|
|
14
|
+
## Core Usage (ctx)
|
|
15
|
+
`ctx` automatically derives a key from the current Git repo and branch when omitted.
|
|
16
|
+
|
|
17
|
+
```
|
|
18
|
+
key = {project}/{branch}
|
|
19
|
+
project = git repository root directory name
|
|
20
|
+
branch = git rev-parse --abbrev-ref HEAD
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
### Save (most common)
|
|
24
|
+
```bash
|
|
25
|
+
ctxbin ctx save --value "summary / next steps / notes"
|
|
26
|
+
```
|
|
27
|
+
Or via stdin:
|
|
28
|
+
```bash
|
|
29
|
+
echo "summary" | ctxbin ctx save
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
### Load
|
|
33
|
+
```bash
|
|
34
|
+
ctxbin ctx load
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### List
|
|
38
|
+
```bash
|
|
39
|
+
ctxbin ctx list
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### Delete
|
|
43
|
+
```bash
|
|
44
|
+
ctxbin ctx delete
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## agent Save/Load
|
|
48
|
+
`agent` requires a key and stores **string values only**.
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
ctxbin agent save reviewer --value "# Agent role"
|
|
52
|
+
ctxbin agent load reviewer
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
### List/Delete
|
|
56
|
+
```bash
|
|
57
|
+
ctxbin agent list
|
|
58
|
+
ctxbin agent delete reviewer
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## skill Save/Load
|
|
62
|
+
`skill` requires a key.
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
ctxbin skill save my-skill --value "# Skill markdown"
|
|
66
|
+
ctxbin skill load my-skill
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
### List/Delete
|
|
70
|
+
```bash
|
|
71
|
+
ctxbin skill list
|
|
72
|
+
ctxbin skill delete my-skill
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Input Options (`--file`, `--value`, `--dir`, `--url`)
|
|
76
|
+
Use **exactly one** input method.
|
|
77
|
+
|
|
78
|
+
- `--value`: store a literal string
|
|
79
|
+
```bash
|
|
80
|
+
ctxbin ctx save --value "summary"
|
|
81
|
+
ctxbin agent save reviewer --value "# Agent role"
|
|
82
|
+
ctxbin skill save my-skill --value "# Skill markdown"
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
- `--file`: store file contents
|
|
86
|
+
```bash
|
|
87
|
+
ctxbin ctx save --file context.md
|
|
88
|
+
ctxbin agent save reviewer --file agent.md
|
|
89
|
+
ctxbin skill save my-skill --file SKILL.md
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
- `--dir`: store a directory as a skillpack (skill-only)
|
|
93
|
+
```bash
|
|
94
|
+
ctxbin skill save my-skill --dir ./skills/my-skill
|
|
95
|
+
ctxbin skill load my-skill --dir ./tmp/my-skill
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
- `--url` (+ `--path`, optional `--ref`): GitHub directory reference (skill-only)
|
|
99
|
+
```bash
|
|
100
|
+
# Pin to a specific commit
|
|
101
|
+
ctxbin skill save my-skill \
|
|
102
|
+
--url https://github.com/OWNER/REPO \
|
|
103
|
+
--ref <40-hex-commit-sha> \
|
|
104
|
+
--path skills/my-skill
|
|
105
|
+
|
|
106
|
+
# Track default branch (omit --ref)
|
|
107
|
+
ctxbin skill save my-skill \
|
|
108
|
+
--url https://github.com/OWNER/REPO \
|
|
109
|
+
--path skills/my-skill
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
## `--append` Examples
|
|
113
|
+
`--append` works with **string inputs only**.
|
|
114
|
+
|
|
115
|
+
```bash
|
|
116
|
+
ctxbin ctx save --append --value "more notes"
|
|
117
|
+
ctxbin agent save reviewer --append --value "extra role details"
|
|
118
|
+
ctxbin skill save my-skill --append --value "extra string"
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
## What agents must include in ctx
|
|
122
|
+
Ensure the next agent can continue immediately:
|
|
123
|
+
|
|
124
|
+
- What changed (summary)
|
|
125
|
+
- What remains (next steps)
|
|
126
|
+
- Completed vs remaining checklist items
|
|
127
|
+
- Important decisions/constraints
|
|
128
|
+
- Files touched and why
|
|
129
|
+
- Failing tests or warnings
|
|
130
|
+
|
|
131
|
+
## Storage Model (ctx)
|
|
132
|
+
Context is stored in Upstash Redis hash `ctx` under field `{project}/{branch}`.
|
|
133
|
+
|
|
134
|
+
## Do Not
|
|
135
|
+
- Don’t store secrets
|
|
136
|
+
- Don’t overwrite with trivial messages
|
|
137
|
+
|
package/package.json
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "ctxbin",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Minimal deterministic CLI to store/load context, agents, and skills via Redis hashes",
|
|
5
|
+
"author": "superlucky84",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"type": "commonjs",
|
|
8
|
+
"bin": {
|
|
9
|
+
"ctxbin": "dist/cli.js"
|
|
10
|
+
},
|
|
11
|
+
"repository": {
|
|
12
|
+
"type": "git",
|
|
13
|
+
"url": "https://github.com/superlucky84/ctxbin.git"
|
|
14
|
+
},
|
|
15
|
+
"bugs": {
|
|
16
|
+
"url": "https://github.com/superlucky84/ctxbin/issues"
|
|
17
|
+
},
|
|
18
|
+
"homepage": "https://superlucky84.github.io/ctxbin/",
|
|
19
|
+
"keywords": [
|
|
20
|
+
"cli",
|
|
21
|
+
"redis",
|
|
22
|
+
"upstash",
|
|
23
|
+
"context",
|
|
24
|
+
"skill",
|
|
25
|
+
"agent"
|
|
26
|
+
],
|
|
27
|
+
"files": [
|
|
28
|
+
"dist",
|
|
29
|
+
"skills",
|
|
30
|
+
"agent-addon.md",
|
|
31
|
+
"README.md",
|
|
32
|
+
"LICENSE"
|
|
33
|
+
],
|
|
34
|
+
"engines": {
|
|
35
|
+
"node": ">=18"
|
|
36
|
+
},
|
|
37
|
+
"dependencies": {
|
|
38
|
+
"tar": "^6.2.0"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@types/node": "^22.0.0",
|
|
42
|
+
"tsup": "^8.0.0",
|
|
43
|
+
"typescript": "^5.4.0"
|
|
44
|
+
},
|
|
45
|
+
"scripts": {
|
|
46
|
+
"build": "tsup && node scripts/copy-skills.js",
|
|
47
|
+
"test": "pnpm build && node --test \"tests/**/*.test.js\""
|
|
48
|
+
}
|
|
49
|
+
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# ctxbin Skill
|
|
2
|
+
|
|
3
|
+
## Purpose
|
|
4
|
+
Help agents preserve and restore **branch-scoped project context** so the next agent can continue work without re-explanation.
|
|
5
|
+
|
|
6
|
+
## Core Usage (ctx)
|
|
7
|
+
`ctx` automatically derives a key from the current Git repo and branch when omitted.
|
|
8
|
+
|
|
9
|
+
```
|
|
10
|
+
key = {project}/{branch}
|
|
11
|
+
project = git repository root directory name
|
|
12
|
+
branch = git rev-parse --abbrev-ref HEAD
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
### Save (most common)
|
|
16
|
+
```bash
|
|
17
|
+
ctxbin ctx save --value "summary / next steps / notes"
|
|
18
|
+
```
|
|
19
|
+
Or via stdin:
|
|
20
|
+
```bash
|
|
21
|
+
echo "summary" | ctxbin ctx save
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
### Load
|
|
25
|
+
```bash
|
|
26
|
+
ctxbin ctx load
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### List
|
|
30
|
+
```bash
|
|
31
|
+
ctxbin ctx list
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
### Delete
|
|
35
|
+
```bash
|
|
36
|
+
ctxbin ctx delete
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
## agent Save/Load
|
|
40
|
+
`agent` requires a key and stores **string values only**.
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
ctxbin agent save reviewer --value "# Agent role"
|
|
44
|
+
ctxbin agent load reviewer
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### List/Delete
|
|
48
|
+
```bash
|
|
49
|
+
ctxbin agent list
|
|
50
|
+
ctxbin agent delete reviewer
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## skill Save/Load
|
|
54
|
+
`skill` requires a key.
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
ctxbin skill save my-skill --value "# Skill markdown"
|
|
58
|
+
ctxbin skill load my-skill
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### List/Delete
|
|
62
|
+
```bash
|
|
63
|
+
ctxbin skill list
|
|
64
|
+
ctxbin skill delete my-skill
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
## Input Options (`--file`, `--value`, `--dir`, `--url`)
|
|
68
|
+
Use **exactly one** input method.
|
|
69
|
+
|
|
70
|
+
- `--value`: store a literal string
|
|
71
|
+
```bash
|
|
72
|
+
ctxbin ctx save --value "summary"
|
|
73
|
+
ctxbin agent save reviewer --value "# Agent role"
|
|
74
|
+
ctxbin skill save my-skill --value "# Skill markdown"
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
- `--file`: store file contents
|
|
78
|
+
```bash
|
|
79
|
+
ctxbin ctx save --file context.md
|
|
80
|
+
ctxbin agent save reviewer --file agent.md
|
|
81
|
+
ctxbin skill save my-skill --file SKILL.md
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
- `--dir`: store a directory as a skillpack (skill-only)
|
|
85
|
+
```bash
|
|
86
|
+
ctxbin skill save my-skill --dir ./skills/my-skill
|
|
87
|
+
ctxbin skill load my-skill --dir ./tmp/my-skill
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
- `--url` (+ `--path`, optional `--ref`): GitHub directory reference (skill-only)
|
|
91
|
+
```bash
|
|
92
|
+
# Pin to a specific commit
|
|
93
|
+
ctxbin skill save my-skill \
|
|
94
|
+
--url https://github.com/OWNER/REPO \
|
|
95
|
+
--ref <40-hex-commit-sha> \
|
|
96
|
+
--path skills/my-skill
|
|
97
|
+
|
|
98
|
+
# Track default branch (omit --ref)
|
|
99
|
+
ctxbin skill save my-skill \
|
|
100
|
+
--url https://github.com/OWNER/REPO \
|
|
101
|
+
--path skills/my-skill
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## `--append` Examples
|
|
105
|
+
`--append` works with **string inputs only**.
|
|
106
|
+
|
|
107
|
+
```bash
|
|
108
|
+
ctxbin ctx save --append --value "more notes"
|
|
109
|
+
ctxbin agent save reviewer --append --value "extra role details"
|
|
110
|
+
ctxbin skill save my-skill --append --value "extra string"
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
## What agents must include in ctx
|
|
114
|
+
Ensure the next agent can continue immediately:
|
|
115
|
+
|
|
116
|
+
- What changed (summary)
|
|
117
|
+
- What remains (next steps)
|
|
118
|
+
- Completed vs remaining checklist items
|
|
119
|
+
- Important decisions/constraints
|
|
120
|
+
- Files touched and why
|
|
121
|
+
- Failing tests or warnings
|
|
122
|
+
|
|
123
|
+
## Storage Model (ctx)
|
|
124
|
+
Context is stored in Upstash Redis hash `ctx` under field `{project}/{branch}`.
|
|
125
|
+
|
|
126
|
+
## Do Not
|
|
127
|
+
- Don’t store secrets
|
|
128
|
+
- Don’t overwrite with trivial messages
|
|
129
|
+
|