gitclaw 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/README.md +440 -0
  2. package/dist/agents.d.ts +8 -0
  3. package/dist/agents.js +82 -0
  4. package/dist/audit.d.ts +27 -0
  5. package/dist/audit.js +55 -0
  6. package/dist/compliance.d.ts +30 -0
  7. package/dist/compliance.js +108 -0
  8. package/dist/config.d.ts +11 -0
  9. package/dist/config.js +43 -0
  10. package/dist/examples.d.ts +6 -0
  11. package/dist/examples.js +40 -0
  12. package/dist/exports.d.ts +13 -0
  13. package/dist/exports.js +6 -0
  14. package/dist/hooks.d.ts +24 -0
  15. package/dist/hooks.js +108 -0
  16. package/dist/index.d.ts +2 -0
  17. package/dist/index.js +542 -0
  18. package/dist/knowledge.d.ts +17 -0
  19. package/dist/knowledge.js +55 -0
  20. package/dist/loader.d.ts +64 -0
  21. package/dist/loader.js +222 -0
  22. package/dist/sandbox.d.ts +28 -0
  23. package/dist/sandbox.js +54 -0
  24. package/dist/sdk-hooks.d.ts +8 -0
  25. package/dist/sdk-hooks.js +31 -0
  26. package/dist/sdk-types.d.ts +127 -0
  27. package/dist/sdk-types.js +1 -0
  28. package/dist/sdk.d.ts +6 -0
  29. package/dist/sdk.js +444 -0
  30. package/dist/session.d.ts +15 -0
  31. package/dist/session.js +127 -0
  32. package/dist/skills.d.ts +18 -0
  33. package/dist/skills.js +104 -0
  34. package/dist/tool-loader.d.ts +3 -0
  35. package/dist/tool-loader.js +138 -0
  36. package/dist/tools/cli.d.ts +3 -0
  37. package/dist/tools/cli.js +86 -0
  38. package/dist/tools/index.d.ts +13 -0
  39. package/dist/tools/index.js +29 -0
  40. package/dist/tools/memory.d.ts +3 -0
  41. package/dist/tools/memory.js +128 -0
  42. package/dist/tools/read.d.ts +3 -0
  43. package/dist/tools/read.js +46 -0
  44. package/dist/tools/sandbox-cli.d.ts +4 -0
  45. package/dist/tools/sandbox-cli.js +48 -0
  46. package/dist/tools/sandbox-memory.d.ts +4 -0
  47. package/dist/tools/sandbox-memory.js +117 -0
  48. package/dist/tools/sandbox-read.d.ts +4 -0
  49. package/dist/tools/sandbox-read.js +25 -0
  50. package/dist/tools/sandbox-write.d.ts +4 -0
  51. package/dist/tools/sandbox-write.js +26 -0
  52. package/dist/tools/shared.d.ts +38 -0
  53. package/dist/tools/shared.js +69 -0
  54. package/dist/tools/write.d.ts +3 -0
  55. package/dist/tools/write.js +28 -0
  56. package/dist/workflows.d.ts +8 -0
  57. package/dist/workflows.js +81 -0
  58. package/package.json +57 -0
@@ -0,0 +1,117 @@
1
+ import { memorySchema, DEFAULT_MEMORY_PATH, resolveSandboxPath } from "./shared.js";
2
+ import yaml from "js-yaml";
3
+ async function loadMemoryConfig(ctx) {
4
+ try {
5
+ const raw = await ctx.machine.readFile(resolveSandboxPath("memory/memory.yaml", ctx.repoPath));
6
+ const config = yaml.load(raw);
7
+ if (!config?.layers || !Array.isArray(config.layers))
8
+ return null;
9
+ return config;
10
+ }
11
+ catch {
12
+ return null;
13
+ }
14
+ }
15
+ function getWorkingLayer(config) {
16
+ if (!config)
17
+ return { path: DEFAULT_MEMORY_PATH };
18
+ const working = config.layers.find((l) => l.name === "working") || config.layers[0];
19
+ if (!working)
20
+ return { path: DEFAULT_MEMORY_PATH };
21
+ return { path: working.path, maxLines: working.max_lines };
22
+ }
23
+ async function archiveOverflow(ctx, content, maxLines) {
24
+ const lines = content.split("\n");
25
+ if (lines.length <= maxLines)
26
+ return content;
27
+ const overflow = lines.slice(0, lines.length - maxLines).join("\n");
28
+ const kept = lines.slice(lines.length - maxLines).join("\n");
29
+ const now = new Date();
30
+ const archiveFile = `memory/archive/${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}.md`;
31
+ const archivePath = resolveSandboxPath(archiveFile, ctx.repoPath);
32
+ // Ensure archive directory exists
33
+ await ctx.gitMachine.run(`mkdir -p "${archivePath.substring(0, archivePath.lastIndexOf("/"))}"`, {
34
+ cwd: ctx.repoPath,
35
+ });
36
+ // Append to archive
37
+ let existing = "";
38
+ try {
39
+ existing = await ctx.machine.readFile(archivePath);
40
+ }
41
+ catch {
42
+ // New archive file
43
+ }
44
+ const archiveEntry = `\n---\n_Archived: ${now.toISOString()}_\n\n${overflow}\n`;
45
+ await ctx.machine.writeFile(archivePath, existing + archiveEntry);
46
+ return kept;
47
+ }
48
+ export function createSandboxMemoryTool(ctx) {
49
+ return {
50
+ name: "memory",
51
+ label: "memory",
52
+ description: "Git-backed memory in the sandbox VM. Use 'load' to read current memory, 'save' to update memory and commit to git. Each save creates a git commit, giving you full history.",
53
+ parameters: memorySchema,
54
+ execute: async (_toolCallId, { action, content, message }, signal) => {
55
+ if (signal?.aborted)
56
+ throw new Error("Operation aborted");
57
+ const config = await loadMemoryConfig(ctx);
58
+ const { path: memoryPath, maxLines } = getWorkingLayer(config);
59
+ const memoryFile = resolveSandboxPath(memoryPath, ctx.repoPath);
60
+ if (action === "load") {
61
+ try {
62
+ const text = await ctx.machine.readFile(memoryFile);
63
+ const trimmed = text.trim();
64
+ if (!trimmed || trimmed === "# Memory") {
65
+ return {
66
+ content: [{ type: "text", text: "No memories yet." }],
67
+ details: undefined,
68
+ };
69
+ }
70
+ return {
71
+ content: [{ type: "text", text: trimmed }],
72
+ details: undefined,
73
+ };
74
+ }
75
+ catch {
76
+ return {
77
+ content: [{ type: "text", text: "No memories yet." }],
78
+ details: undefined,
79
+ };
80
+ }
81
+ }
82
+ // action === "save"
83
+ if (!content) {
84
+ throw new Error("content is required for save action");
85
+ }
86
+ const commitMsg = message || "Update memory";
87
+ let finalContent = content;
88
+ if (maxLines) {
89
+ finalContent = await archiveOverflow(ctx, content, maxLines);
90
+ }
91
+ // Ensure parent directory exists
92
+ const dir = memoryFile.substring(0, memoryFile.lastIndexOf("/"));
93
+ if (dir) {
94
+ await ctx.gitMachine.run(`mkdir -p "${dir}"`, { cwd: ctx.repoPath });
95
+ }
96
+ await ctx.machine.writeFile(memoryFile, finalContent);
97
+ try {
98
+ await ctx.gitMachine.commit(commitMsg);
99
+ }
100
+ catch (err) {
101
+ return {
102
+ content: [
103
+ {
104
+ type: "text",
105
+ text: `Memory saved to ${memoryPath} but git commit failed: ${err.message || "unknown error"}. The file was still written.`,
106
+ },
107
+ ],
108
+ details: undefined,
109
+ };
110
+ }
111
+ return {
112
+ content: [{ type: "text", text: `Memory saved and committed: "${commitMsg}"` }],
113
+ details: undefined,
114
+ };
115
+ },
116
+ };
117
+ }
@@ -0,0 +1,4 @@
1
+ import type { AgentTool } from "@mariozechner/pi-agent-core";
2
+ import type { SandboxContext } from "../sandbox.js";
3
+ import { readSchema } from "./shared.js";
4
+ export declare function createSandboxReadTool(ctx: SandboxContext): AgentTool<typeof readSchema>;
@@ -0,0 +1,25 @@
1
+ import { readSchema, paginateLines, resolveSandboxPath } from "./shared.js";
2
+ export function createSandboxReadTool(ctx) {
3
+ return {
4
+ name: "read",
5
+ label: "read",
6
+ description: "Read the contents of a file in the sandbox VM. Output is limited to 2000 lines or ~100KB. Use offset/limit for large files.",
7
+ parameters: readSchema,
8
+ execute: async (_toolCallId, { path, offset, limit }, signal) => {
9
+ if (signal?.aborted)
10
+ throw new Error("Operation aborted");
11
+ const sandboxPath = resolveSandboxPath(path, ctx.repoPath);
12
+ const text = await ctx.machine.readFile(sandboxPath);
13
+ const page = paginateLines(text, offset, limit);
14
+ let result = page.text;
15
+ if (page.hasMore) {
16
+ const nextOffset = page.shownRange[1] + 1;
17
+ result += `\n\n[Showing lines ${page.shownRange[0]}-${page.shownRange[1]} of ${page.totalLines}. Use offset=${nextOffset} to continue.]`;
18
+ }
19
+ return {
20
+ content: [{ type: "text", text: result }],
21
+ details: undefined,
22
+ };
23
+ },
24
+ };
25
+ }
@@ -0,0 +1,4 @@
1
+ import type { AgentTool } from "@mariozechner/pi-agent-core";
2
+ import type { SandboxContext } from "../sandbox.js";
3
+ import { writeSchema } from "./shared.js";
4
+ export declare function createSandboxWriteTool(ctx: SandboxContext): AgentTool<typeof writeSchema>;
@@ -0,0 +1,26 @@
1
+ import { writeSchema, resolveSandboxPath } from "./shared.js";
2
+ export function createSandboxWriteTool(ctx) {
3
+ return {
4
+ name: "write",
5
+ label: "write",
6
+ description: "Write content to a file in the sandbox VM. Creates the file if it doesn't exist, overwrites if it does. Parent directories are created automatically.",
7
+ parameters: writeSchema,
8
+ execute: async (_toolCallId, { path, content, createDirs }, signal) => {
9
+ if (signal?.aborted)
10
+ throw new Error("Operation aborted");
11
+ const sandboxPath = resolveSandboxPath(path, ctx.repoPath);
12
+ if (createDirs !== false) {
13
+ const dir = sandboxPath.substring(0, sandboxPath.lastIndexOf("/"));
14
+ if (dir) {
15
+ await ctx.gitMachine.run(`mkdir -p "${dir}"`, { cwd: ctx.repoPath });
16
+ }
17
+ }
18
+ await ctx.machine.writeFile(sandboxPath, content);
19
+ const bytes = Buffer.byteLength(content, "utf-8");
20
+ return {
21
+ content: [{ type: "text", text: `Wrote ${bytes} bytes to ${path}` }],
22
+ details: undefined,
23
+ };
24
+ },
25
+ };
26
+ }
@@ -0,0 +1,38 @@
1
+ export declare const MAX_OUTPUT = 100000;
2
+ export declare const MAX_LINES = 2000;
3
+ export declare const MAX_BYTES = 100000;
4
+ export declare const DEFAULT_TIMEOUT = 120;
5
+ export declare const DEFAULT_MEMORY_PATH = "memory/MEMORY.md";
6
+ export declare const cliSchema: import("@sinclair/typebox").TObject<{
7
+ command: import("@sinclair/typebox").TString;
8
+ timeout: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
9
+ }>;
10
+ export declare const readSchema: import("@sinclair/typebox").TObject<{
11
+ path: import("@sinclair/typebox").TString;
12
+ offset: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
13
+ limit: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
14
+ }>;
15
+ export declare const writeSchema: import("@sinclair/typebox").TObject<{
16
+ path: import("@sinclair/typebox").TString;
17
+ content: import("@sinclair/typebox").TString;
18
+ createDirs: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TBoolean>;
19
+ }>;
20
+ export declare const memorySchema: import("@sinclair/typebox").TObject<{
21
+ action: import("@sinclair/typebox").TUnsafe<string>;
22
+ content: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;
23
+ message: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;
24
+ }>;
25
+ /** Truncate output to MAX_OUTPUT, keeping the tail. */
26
+ export declare function truncateOutput(text: string): string;
27
+ /**
28
+ * Paginate text by lines with offset (1-indexed) and limit.
29
+ * Returns { text, hasMore, shownRange, totalLines }.
30
+ */
31
+ export declare function paginateLines(text: string, offset?: number, limit?: number): {
32
+ text: string;
33
+ hasMore: boolean;
34
+ shownRange: [number, number];
35
+ totalLines: number;
36
+ };
37
+ /** Resolve a path relative to a sandbox repo root. */
38
+ export declare function resolveSandboxPath(path: string, repoRoot: string): string;
@@ -0,0 +1,69 @@
1
+ import { Type } from "@sinclair/typebox";
2
+ import { StringEnum } from "@mariozechner/pi-ai";
3
+ // ── Constants ───────────────────────────────────────────────────────────
4
+ export const MAX_OUTPUT = 100_000; // ~100KB max output to send to LLM
5
+ export const MAX_LINES = 2000;
6
+ export const MAX_BYTES = 100_000;
7
+ export const DEFAULT_TIMEOUT = 120;
8
+ export const DEFAULT_MEMORY_PATH = "memory/MEMORY.md";
9
+ // ── Schemas ─────────────────────────────────────────────────────────────
10
+ export const cliSchema = Type.Object({
11
+ command: Type.String({ description: "Shell command to execute" }),
12
+ timeout: Type.Optional(Type.Number({ description: "Timeout in seconds (default: 120)" })),
13
+ });
14
+ export const readSchema = Type.Object({
15
+ path: Type.String({ description: "Path to the file to read (relative or absolute)" }),
16
+ offset: Type.Optional(Type.Number({ description: "Line number to start from (1-indexed)" })),
17
+ limit: Type.Optional(Type.Number({ description: "Maximum number of lines to read" })),
18
+ });
19
+ export const writeSchema = Type.Object({
20
+ path: Type.String({ description: "Path to the file to write (relative or absolute)" }),
21
+ content: Type.String({ description: "Content to write to the file" }),
22
+ createDirs: Type.Optional(Type.Boolean({ description: "Create parent directories if needed (default: true)" })),
23
+ });
24
+ export const memorySchema = Type.Object({
25
+ action: StringEnum(["load", "save"], { description: "Whether to load or save memory" }),
26
+ content: Type.Optional(Type.String({ description: "Memory content to save (required for save)" })),
27
+ message: Type.Optional(Type.String({ description: "Commit message describing why this memory changed (required for save)" })),
28
+ });
29
+ // ── Shared helpers ──────────────────────────────────────────────────────
30
+ /** Truncate output to MAX_OUTPUT, keeping the tail. */
31
+ export function truncateOutput(text) {
32
+ if (text.length > MAX_OUTPUT) {
33
+ return `[output truncated, showing last ~100KB]\n${text.slice(-MAX_OUTPUT)}`;
34
+ }
35
+ return text;
36
+ }
37
+ /**
38
+ * Paginate text by lines with offset (1-indexed) and limit.
39
+ * Returns { text, hasMore, shownRange, totalLines }.
40
+ */
41
+ export function paginateLines(text, offset, limit) {
42
+ const allLines = text.split("\n");
43
+ const totalLines = allLines.length;
44
+ const startLine = offset ? Math.max(0, offset - 1) : 0;
45
+ if (startLine >= totalLines) {
46
+ throw new Error(`Offset ${offset} is beyond end of file (${totalLines} lines)`);
47
+ }
48
+ const maxLines = limit ?? MAX_LINES;
49
+ const endLine = Math.min(startLine + maxLines, totalLines);
50
+ let selected = allLines.slice(startLine, endLine).join("\n");
51
+ let truncatedByBytes = false;
52
+ if (Buffer.byteLength(selected, "utf-8") > MAX_BYTES) {
53
+ selected = selected.slice(0, MAX_BYTES);
54
+ truncatedByBytes = true;
55
+ }
56
+ const hasMore = endLine < totalLines || truncatedByBytes;
57
+ return {
58
+ text: selected,
59
+ hasMore,
60
+ shownRange: [startLine + 1, endLine],
61
+ totalLines,
62
+ };
63
+ }
64
+ /** Resolve a path relative to a sandbox repo root. */
65
+ export function resolveSandboxPath(path, repoRoot) {
66
+ if (path.startsWith("/"))
67
+ return path;
68
+ return repoRoot.endsWith("/") ? repoRoot + path : repoRoot + "/" + path;
69
+ }
@@ -0,0 +1,3 @@
1
+ import type { AgentTool } from "@mariozechner/pi-agent-core";
2
+ import { writeSchema } from "./shared.js";
3
+ export declare function createWriteTool(cwd: string): AgentTool<typeof writeSchema>;
@@ -0,0 +1,28 @@
1
+ import { mkdir, writeFile } from "fs/promises";
2
+ import { dirname, resolve } from "path";
3
+ import { writeSchema } from "./shared.js";
4
+ function resolvePath(path, cwd) {
5
+ return path.startsWith("/") ? path : resolve(cwd, path);
6
+ }
7
+ export function createWriteTool(cwd) {
8
+ return {
9
+ name: "write",
10
+ label: "write",
11
+ description: "Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Parent directories are created automatically.",
12
+ parameters: writeSchema,
13
+ execute: async (_toolCallId, { path, content, createDirs }, signal) => {
14
+ if (signal?.aborted)
15
+ throw new Error("Operation aborted");
16
+ const absolutePath = resolvePath(path, cwd);
17
+ if (createDirs !== false) {
18
+ await mkdir(dirname(absolutePath), { recursive: true });
19
+ }
20
+ await writeFile(absolutePath, content, "utf-8");
21
+ const bytes = Buffer.byteLength(content, "utf-8");
22
+ return {
23
+ content: [{ type: "text", text: `Wrote ${bytes} bytes to ${path}` }],
24
+ details: undefined,
25
+ };
26
+ },
27
+ };
28
+ }
@@ -0,0 +1,8 @@
1
+ export interface WorkflowMetadata {
2
+ name: string;
3
+ description: string;
4
+ filePath: string;
5
+ format: "yaml" | "markdown";
6
+ }
7
+ export declare function discoverWorkflows(agentDir: string): Promise<WorkflowMetadata[]>;
8
+ export declare function formatWorkflowsForPrompt(workflows: WorkflowMetadata[]): string;
@@ -0,0 +1,81 @@
1
+ import { readFile, readdir, stat } from "fs/promises";
2
+ import { join } from "path";
3
+ import yaml from "js-yaml";
4
+ function parseFrontmatter(content) {
5
+ const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/);
6
+ if (!match) {
7
+ return { frontmatter: {}, body: content };
8
+ }
9
+ const frontmatter = yaml.load(match[1]);
10
+ return { frontmatter, body: match[2] };
11
+ }
12
+ export async function discoverWorkflows(agentDir) {
13
+ const workflowsDir = join(agentDir, "workflows");
14
+ try {
15
+ const s = await stat(workflowsDir);
16
+ if (!s.isDirectory())
17
+ return [];
18
+ }
19
+ catch {
20
+ return [];
21
+ }
22
+ const entries = await readdir(workflowsDir);
23
+ const workflows = [];
24
+ for (const entry of entries) {
25
+ const filePath = join(workflowsDir, entry);
26
+ const s = await stat(filePath);
27
+ if (!s.isFile())
28
+ continue;
29
+ if (entry.endsWith(".yaml") || entry.endsWith(".yml")) {
30
+ try {
31
+ const raw = await readFile(filePath, "utf-8");
32
+ const data = yaml.load(raw);
33
+ if (data?.name && data?.description) {
34
+ workflows.push({
35
+ name: data.name,
36
+ description: data.description,
37
+ filePath: `workflows/${entry}`,
38
+ format: "yaml",
39
+ });
40
+ }
41
+ }
42
+ catch {
43
+ // Skip invalid YAML
44
+ }
45
+ }
46
+ else if (entry.endsWith(".md")) {
47
+ try {
48
+ const raw = await readFile(filePath, "utf-8");
49
+ const { frontmatter } = parseFrontmatter(raw);
50
+ const name = frontmatter.name || entry.replace(/\.md$/, "");
51
+ const description = frontmatter.description || "";
52
+ if (description) {
53
+ workflows.push({
54
+ name,
55
+ description,
56
+ filePath: `workflows/${entry}`,
57
+ format: "markdown",
58
+ });
59
+ }
60
+ }
61
+ catch {
62
+ // Skip unreadable files
63
+ }
64
+ }
65
+ }
66
+ return workflows.sort((a, b) => a.name.localeCompare(b.name));
67
+ }
68
+ export function formatWorkflowsForPrompt(workflows) {
69
+ if (workflows.length === 0)
70
+ return "";
71
+ const entries = workflows
72
+ .map((w) => `<workflow>\n<name>${w.name}</name>\n<description>${w.description}</description>\n<path>${w.filePath}</path>\n</workflow>`)
73
+ .join("\n");
74
+ return `# Workflows
75
+
76
+ <available_workflows>
77
+ ${entries}
78
+ </available_workflows>
79
+
80
+ Use the \`read\` tool to load a workflow's full definition when you need to follow it.`;
81
+ }
package/package.json ADDED
@@ -0,0 +1,57 @@
1
+ {
2
+ "name": "gitclaw",
3
+ "version": "0.3.0",
4
+ "description": "A universal git-native agent powered by pi-agent-core",
5
+ "author": "shreyaskapale",
6
+ "license": "MIT",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "https://github.com/open-gitagent/gitclaw"
10
+ },
11
+ "homepage": "https://github.com/open-gitagent/gitclaw",
12
+ "keywords": ["ai", "agent", "git", "cli", "sdk", "llm"],
13
+ "type": "module",
14
+ "files": ["dist", "README.md"],
15
+ "main": "./dist/exports.js",
16
+ "types": "./dist/exports.d.ts",
17
+ "bin": {
18
+ "gitclaw": "./dist/index.js"
19
+ },
20
+ "exports": {
21
+ ".": {
22
+ "import": "./dist/exports.js",
23
+ "types": "./dist/exports.d.ts"
24
+ },
25
+ "./cli": {
26
+ "import": "./dist/index.js"
27
+ }
28
+ },
29
+ "scripts": {
30
+ "build": "tsc",
31
+ "dev": "tsc --watch",
32
+ "start": "node dist/index.js",
33
+ "test": "node --test test/*.test.ts --experimental-strip-types"
34
+ },
35
+ "engines": {
36
+ "node": ">=20"
37
+ },
38
+ "dependencies": {
39
+ "@mariozechner/pi-ai": "^0.55.4",
40
+ "@mariozechner/pi-agent-core": "^0.55.4",
41
+ "@sinclair/typebox": "^0.34.41",
42
+ "js-yaml": "^4.1.0"
43
+ },
44
+ "peerDependencies": {
45
+ "gitmachine": ">=0.1.0"
46
+ },
47
+ "peerDependenciesMeta": {
48
+ "gitmachine": {
49
+ "optional": true
50
+ }
51
+ },
52
+ "devDependencies": {
53
+ "@types/js-yaml": "^4.0.9",
54
+ "@types/node": "^22.0.0",
55
+ "typescript": "^5.7.0"
56
+ }
57
+ }