@companion-ai/feynman 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +8 -0
- package/.feynman/SYSTEM.md +62 -0
- package/.feynman/agents/researcher.md +63 -0
- package/.feynman/agents/reviewer.md +84 -0
- package/.feynman/agents/verifier.md +38 -0
- package/.feynman/agents/writer.md +51 -0
- package/.feynman/settings.json +20 -0
- package/.feynman/themes/feynman.json +85 -0
- package/AGENTS.md +53 -0
- package/README.md +99 -0
- package/bin/feynman.js +2 -0
- package/dist/bootstrap/sync.js +98 -0
- package/dist/cli.js +297 -0
- package/dist/config/commands.js +71 -0
- package/dist/config/feynman-config.js +42 -0
- package/dist/config/paths.js +32 -0
- package/dist/feynman-prompt.js +63 -0
- package/dist/index.js +5 -0
- package/dist/model/catalog.js +238 -0
- package/dist/model/commands.js +165 -0
- package/dist/pi/launch.js +31 -0
- package/dist/pi/runtime.js +70 -0
- package/dist/pi/settings.js +101 -0
- package/dist/pi/web-access.js +74 -0
- package/dist/search/commands.js +12 -0
- package/dist/setup/doctor.js +126 -0
- package/dist/setup/preview.js +20 -0
- package/dist/setup/prompts.js +29 -0
- package/dist/setup/setup.js +119 -0
- package/dist/system/executables.js +38 -0
- package/dist/system/promise-polyfill.js +12 -0
- package/dist/ui/terminal.js +53 -0
- package/dist/web-search.js +1 -0
- package/extensions/research-tools/alpha.ts +212 -0
- package/extensions/research-tools/header.ts +379 -0
- package/extensions/research-tools/help.ts +93 -0
- package/extensions/research-tools/preview.ts +233 -0
- package/extensions/research-tools/project.ts +116 -0
- package/extensions/research-tools/session-search.ts +223 -0
- package/extensions/research-tools/shared.ts +46 -0
- package/extensions/research-tools.ts +25 -0
- package/metadata/commands.d.mts +46 -0
- package/metadata/commands.mjs +133 -0
- package/package.json +71 -0
- package/prompts/audit.md +15 -0
- package/prompts/autoresearch.md +63 -0
- package/prompts/compare.md +16 -0
- package/prompts/deepresearch.md +167 -0
- package/prompts/delegate.md +21 -0
- package/prompts/draft.md +16 -0
- package/prompts/jobs.md +16 -0
- package/prompts/lit.md +16 -0
- package/prompts/log.md +14 -0
- package/prompts/replicate.md +22 -0
- package/prompts/review.md +15 -0
- package/prompts/watch.md +14 -0
- package/scripts/patch-embedded-pi.mjs +319 -0
- package/skills/agentcomputer/SKILL.md +108 -0
- package/skills/agentcomputer/references/acp-flow.md +23 -0
- package/skills/agentcomputer/references/cli-cheatsheet.md +68 -0
- package/skills/autoresearch/SKILL.md +12 -0
- package/skills/deep-research/SKILL.md +12 -0
- package/skills/docker/SKILL.md +84 -0
- package/skills/jobs/SKILL.md +10 -0
- package/skills/literature-review/SKILL.md +12 -0
- package/skills/paper-code-audit/SKILL.md +12 -0
- package/skills/paper-writing/SKILL.md +12 -0
- package/skills/peer-review/SKILL.md +12 -0
- package/skills/replication/SKILL.md +14 -0
- package/skills/session-log/SKILL.md +10 -0
- package/skills/source-comparison/SKILL.md +12 -0
- package/skills/watch/SKILL.md +12 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { mkdir, stat, writeFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, resolve as resolvePath } from "node:path";
|
|
3
|
+
|
|
4
|
+
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
|
5
|
+
import { Type } from "@sinclair/typebox";
|
|
6
|
+
|
|
7
|
+
import { getExtensionCommandSpec } from "../../metadata/commands.mjs";
|
|
8
|
+
import { renderHtmlPreview, renderPdfPreview, openWithDefaultApp, pathExists, buildProjectAgentsTemplate, buildSessionLogsReadme } from "./preview.js";
|
|
9
|
+
import { formatToolText } from "./shared.js";
|
|
10
|
+
import { searchSessionTranscripts } from "./session-search.js";
|
|
11
|
+
|
|
12
|
+
export function registerInitCommand(pi: ExtensionAPI): void {
|
|
13
|
+
pi.registerCommand("init", {
|
|
14
|
+
description: getExtensionCommandSpec("init")?.description ?? "Initialize AGENTS.md and session-log folders for a research project.",
|
|
15
|
+
handler: async (_args, ctx) => {
|
|
16
|
+
const agentsPath = resolvePath(ctx.cwd, "AGENTS.md");
|
|
17
|
+
const notesDir = resolvePath(ctx.cwd, "notes");
|
|
18
|
+
const sessionLogsDir = resolvePath(notesDir, "session-logs");
|
|
19
|
+
const sessionLogsReadmePath = resolvePath(sessionLogsDir, "README.md");
|
|
20
|
+
const created: string[] = [];
|
|
21
|
+
const skipped: string[] = [];
|
|
22
|
+
|
|
23
|
+
await mkdir(notesDir, { recursive: true });
|
|
24
|
+
await mkdir(sessionLogsDir, { recursive: true });
|
|
25
|
+
|
|
26
|
+
if (!(await pathExists(agentsPath))) {
|
|
27
|
+
await writeFile(agentsPath, buildProjectAgentsTemplate(), "utf8");
|
|
28
|
+
created.push("AGENTS.md");
|
|
29
|
+
} else {
|
|
30
|
+
skipped.push("AGENTS.md");
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
if (!(await pathExists(sessionLogsReadmePath))) {
|
|
34
|
+
await writeFile(sessionLogsReadmePath, buildSessionLogsReadme(), "utf8");
|
|
35
|
+
created.push("notes/session-logs/README.md");
|
|
36
|
+
} else {
|
|
37
|
+
skipped.push("notes/session-logs/README.md");
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const createdSummary = created.length > 0 ? `created: ${created.join(", ")}` : "created: nothing";
|
|
41
|
+
const skippedSummary = skipped.length > 0 ? `; kept existing: ${skipped.join(", ")}` : "";
|
|
42
|
+
ctx.ui.notify(`${createdSummary}${skippedSummary}`, "info");
|
|
43
|
+
},
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export function registerSessionSearchTool(pi: ExtensionAPI): void {
|
|
48
|
+
pi.registerTool({
|
|
49
|
+
name: "session_search",
|
|
50
|
+
label: "Session Search",
|
|
51
|
+
description: "Search prior Feynman session transcripts to recover what was done, said, or written before.",
|
|
52
|
+
parameters: Type.Object({
|
|
53
|
+
query: Type.String({
|
|
54
|
+
description: "Search query to look for in past sessions.",
|
|
55
|
+
}),
|
|
56
|
+
limit: Type.Optional(
|
|
57
|
+
Type.Number({
|
|
58
|
+
description: "Maximum number of sessions to return. Defaults to 3.",
|
|
59
|
+
}),
|
|
60
|
+
),
|
|
61
|
+
}),
|
|
62
|
+
async execute(_toolCallId, params) {
|
|
63
|
+
const result = await searchSessionTranscripts(params.query, Math.max(1, Math.min(params.limit ?? 3, 8)));
|
|
64
|
+
return {
|
|
65
|
+
content: [{ type: "text", text: formatToolText(result) }],
|
|
66
|
+
details: result,
|
|
67
|
+
};
|
|
68
|
+
},
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function registerPreviewTool(pi: ExtensionAPI): void {
|
|
73
|
+
pi.registerTool({
|
|
74
|
+
name: "preview_file",
|
|
75
|
+
label: "Preview File",
|
|
76
|
+
description: "Open a markdown, LaTeX, PDF, or code artifact in the browser or a PDF viewer for human review. Rendered HTML/PDF previews are temporary and do not replace the source artifact.",
|
|
77
|
+
parameters: Type.Object({
|
|
78
|
+
path: Type.String({
|
|
79
|
+
description: "Path to the file to preview.",
|
|
80
|
+
}),
|
|
81
|
+
target: Type.Optional(
|
|
82
|
+
Type.String({
|
|
83
|
+
description: "Preview target: browser or pdf. Defaults to browser.",
|
|
84
|
+
}),
|
|
85
|
+
),
|
|
86
|
+
}),
|
|
87
|
+
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
88
|
+
const target = (params.target?.trim().toLowerCase() || "browser");
|
|
89
|
+
if (target !== "browser" && target !== "pdf") {
|
|
90
|
+
throw new Error("target must be browser or pdf");
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const resolvedPath = resolvePath(ctx.cwd, params.path);
|
|
94
|
+
const openedPath =
|
|
95
|
+
resolvePath(resolvedPath).toLowerCase().endsWith(".pdf") && target === "pdf"
|
|
96
|
+
? resolvedPath
|
|
97
|
+
: target === "pdf"
|
|
98
|
+
? await renderPdfPreview(resolvedPath)
|
|
99
|
+
: await renderHtmlPreview(resolvedPath);
|
|
100
|
+
|
|
101
|
+
await mkdir(dirname(openedPath), { recursive: true }).catch(() => {});
|
|
102
|
+
await openWithDefaultApp(openedPath);
|
|
103
|
+
|
|
104
|
+
const result = {
|
|
105
|
+
sourcePath: resolvedPath,
|
|
106
|
+
target,
|
|
107
|
+
openedPath,
|
|
108
|
+
temporaryPreview: openedPath !== resolvedPath,
|
|
109
|
+
};
|
|
110
|
+
return {
|
|
111
|
+
content: [{ type: "text", text: formatToolText(result) }],
|
|
112
|
+
details: result,
|
|
113
|
+
};
|
|
114
|
+
},
|
|
115
|
+
});
|
|
116
|
+
}
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import { readdir, readFile, stat } from "node:fs/promises";
|
|
2
|
+
import { basename, join } from "node:path";
|
|
3
|
+
import { pathToFileURL } from "node:url";
|
|
4
|
+
|
|
5
|
+
import { getFeynmanHome } from "./shared.js";
|
|
6
|
+
|
|
7
|
+
function extractMessageText(message: unknown): string {
|
|
8
|
+
if (!message || typeof message !== "object") {
|
|
9
|
+
return "";
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const content = (message as { content?: unknown }).content;
|
|
13
|
+
if (typeof content === "string") {
|
|
14
|
+
return content;
|
|
15
|
+
}
|
|
16
|
+
if (!Array.isArray(content)) {
|
|
17
|
+
return "";
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
return content
|
|
21
|
+
.map((item) => {
|
|
22
|
+
if (!item || typeof item !== "object") {
|
|
23
|
+
return "";
|
|
24
|
+
}
|
|
25
|
+
const record = item as { type?: string; text?: unknown; arguments?: unknown; name?: unknown };
|
|
26
|
+
if (record.type === "text" && typeof record.text === "string") {
|
|
27
|
+
return record.text;
|
|
28
|
+
}
|
|
29
|
+
if (record.type === "toolCall") {
|
|
30
|
+
const name = typeof record.name === "string" ? record.name : "tool";
|
|
31
|
+
const args =
|
|
32
|
+
typeof record.arguments === "string"
|
|
33
|
+
? record.arguments
|
|
34
|
+
: record.arguments
|
|
35
|
+
? JSON.stringify(record.arguments)
|
|
36
|
+
: "";
|
|
37
|
+
return `[tool:${name}] ${args}`;
|
|
38
|
+
}
|
|
39
|
+
return "";
|
|
40
|
+
})
|
|
41
|
+
.filter(Boolean)
|
|
42
|
+
.join("\n");
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function buildExcerpt(text: string, query: string, radius = 180): string {
|
|
46
|
+
const normalizedText = text.replace(/\s+/g, " ").trim();
|
|
47
|
+
if (!normalizedText) {
|
|
48
|
+
return "";
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const lower = normalizedText.toLowerCase();
|
|
52
|
+
const q = query.toLowerCase();
|
|
53
|
+
const index = lower.indexOf(q);
|
|
54
|
+
if (index === -1) {
|
|
55
|
+
return normalizedText.slice(0, radius * 2) + (normalizedText.length > radius * 2 ? "..." : "");
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const start = Math.max(0, index - radius);
|
|
59
|
+
const end = Math.min(normalizedText.length, index + q.length + radius);
|
|
60
|
+
const prefix = start > 0 ? "..." : "";
|
|
61
|
+
const suffix = end < normalizedText.length ? "..." : "";
|
|
62
|
+
return `${prefix}${normalizedText.slice(start, end)}${suffix}`;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export async function searchSessionTranscripts(query: string, limit: number): Promise<{
|
|
66
|
+
query: string;
|
|
67
|
+
results: Array<{
|
|
68
|
+
sessionId: string;
|
|
69
|
+
sessionFile: string;
|
|
70
|
+
startedAt?: string;
|
|
71
|
+
cwd?: string;
|
|
72
|
+
matchCount: number;
|
|
73
|
+
topMatches: Array<{ role: string; timestamp?: string; excerpt: string }>;
|
|
74
|
+
}>;
|
|
75
|
+
}> {
|
|
76
|
+
const packageRoot = process.env.FEYNMAN_PI_NPM_ROOT;
|
|
77
|
+
if (packageRoot) {
|
|
78
|
+
try {
|
|
79
|
+
const indexerPath = pathToFileURL(
|
|
80
|
+
join(packageRoot, "@kaiserlich-dev", "pi-session-search", "extensions", "indexer.ts"),
|
|
81
|
+
).href;
|
|
82
|
+
const indexer = await import(indexerPath) as {
|
|
83
|
+
updateIndex?: (onProgress?: (msg: string) => void) => Promise<number>;
|
|
84
|
+
search?: (query: string, limit?: number) => Array<{
|
|
85
|
+
sessionPath: string;
|
|
86
|
+
project: string;
|
|
87
|
+
timestamp: string;
|
|
88
|
+
snippet: string;
|
|
89
|
+
rank: number;
|
|
90
|
+
title: string | null;
|
|
91
|
+
}>;
|
|
92
|
+
getSessionSnippets?: (sessionPath: string, query: string, limit?: number) => string[];
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
await indexer.updateIndex?.();
|
|
96
|
+
const results = indexer.search?.(query, limit) ?? [];
|
|
97
|
+
if (results.length > 0) {
|
|
98
|
+
return {
|
|
99
|
+
query,
|
|
100
|
+
results: results.map((result) => ({
|
|
101
|
+
sessionId: basename(result.sessionPath),
|
|
102
|
+
sessionFile: result.sessionPath,
|
|
103
|
+
startedAt: result.timestamp,
|
|
104
|
+
cwd: result.project,
|
|
105
|
+
matchCount: 1,
|
|
106
|
+
topMatches: (indexer.getSessionSnippets?.(result.sessionPath, query, 4) ?? [result.snippet])
|
|
107
|
+
.filter(Boolean)
|
|
108
|
+
.map((excerpt) => ({
|
|
109
|
+
role: "match",
|
|
110
|
+
excerpt,
|
|
111
|
+
})),
|
|
112
|
+
})),
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
} catch {
|
|
116
|
+
// Fall back to direct JSONL scanning below.
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const sessionDir = join(getFeynmanHome(), "sessions");
|
|
121
|
+
const terms = query
|
|
122
|
+
.toLowerCase()
|
|
123
|
+
.split(/\s+/)
|
|
124
|
+
.map((term) => term.trim())
|
|
125
|
+
.filter((term) => term.length >= 2);
|
|
126
|
+
const needle = query.toLowerCase();
|
|
127
|
+
|
|
128
|
+
let files: string[] = [];
|
|
129
|
+
try {
|
|
130
|
+
files = (await readdir(sessionDir))
|
|
131
|
+
.filter((entry) => entry.endsWith(".jsonl"))
|
|
132
|
+
.map((entry) => join(sessionDir, entry));
|
|
133
|
+
} catch {
|
|
134
|
+
return { query, results: [] };
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const sessions = [];
|
|
138
|
+
for (const file of files) {
|
|
139
|
+
const raw = await readFile(file, "utf8").catch(() => "");
|
|
140
|
+
if (!raw) {
|
|
141
|
+
continue;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
let sessionId = basename(file);
|
|
145
|
+
let startedAt: string | undefined;
|
|
146
|
+
let cwd: string | undefined;
|
|
147
|
+
const matches: Array<{ role: string; timestamp?: string; excerpt: string }> = [];
|
|
148
|
+
|
|
149
|
+
for (const line of raw.split("\n")) {
|
|
150
|
+
if (!line.trim()) {
|
|
151
|
+
continue;
|
|
152
|
+
}
|
|
153
|
+
try {
|
|
154
|
+
const record = JSON.parse(line) as {
|
|
155
|
+
type?: string;
|
|
156
|
+
id?: string;
|
|
157
|
+
timestamp?: string;
|
|
158
|
+
cwd?: string;
|
|
159
|
+
message?: { role?: string; content?: unknown };
|
|
160
|
+
};
|
|
161
|
+
if (record.type === "session") {
|
|
162
|
+
sessionId = record.id ?? sessionId;
|
|
163
|
+
startedAt = record.timestamp;
|
|
164
|
+
cwd = record.cwd;
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
if (record.type !== "message" || !record.message) {
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const text = extractMessageText(record.message);
|
|
172
|
+
if (!text) {
|
|
173
|
+
continue;
|
|
174
|
+
}
|
|
175
|
+
const lower = text.toLowerCase();
|
|
176
|
+
const matched = lower.includes(needle) || terms.some((term) => lower.includes(term));
|
|
177
|
+
if (!matched) {
|
|
178
|
+
continue;
|
|
179
|
+
}
|
|
180
|
+
matches.push({
|
|
181
|
+
role: record.message.role ?? "unknown",
|
|
182
|
+
timestamp: record.timestamp,
|
|
183
|
+
excerpt: buildExcerpt(text, query),
|
|
184
|
+
});
|
|
185
|
+
} catch {
|
|
186
|
+
continue;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (matches.length === 0) {
|
|
191
|
+
continue;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
let mtime = 0;
|
|
195
|
+
try {
|
|
196
|
+
mtime = (await stat(file)).mtimeMs;
|
|
197
|
+
} catch {
|
|
198
|
+
mtime = 0;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
sessions.push({
|
|
202
|
+
sessionId,
|
|
203
|
+
sessionFile: file,
|
|
204
|
+
startedAt,
|
|
205
|
+
cwd,
|
|
206
|
+
matchCount: matches.length,
|
|
207
|
+
topMatches: matches.slice(0, 4),
|
|
208
|
+
mtime,
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
sessions.sort((a, b) => {
|
|
213
|
+
if (b.matchCount !== a.matchCount) {
|
|
214
|
+
return b.matchCount - a.matchCount;
|
|
215
|
+
}
|
|
216
|
+
return b.mtime - a.mtime;
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
return {
|
|
220
|
+
query,
|
|
221
|
+
results: sessions.slice(0, limit).map(({ mtime: _mtime, ...session }) => session),
|
|
222
|
+
};
|
|
223
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { dirname, resolve as resolvePath } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
|
|
6
|
+
export const APP_ROOT = resolvePath(dirname(fileURLToPath(import.meta.url)), "..", "..");
|
|
7
|
+
|
|
8
|
+
export const FEYNMAN_VERSION = (() => {
|
|
9
|
+
try {
|
|
10
|
+
const pkg = JSON.parse(readFileSync(resolvePath(APP_ROOT, "package.json"), "utf8")) as { version?: string };
|
|
11
|
+
return pkg.version ?? "dev";
|
|
12
|
+
} catch {
|
|
13
|
+
return "dev";
|
|
14
|
+
}
|
|
15
|
+
})();
|
|
16
|
+
|
|
17
|
+
export const FEYNMAN_AGENT_LOGO = [
|
|
18
|
+
"███████╗███████╗██╗ ██╗███╗ ██╗███╗ ███╗ █████╗ ███╗ ██╗",
|
|
19
|
+
"██╔════╝██╔════╝╚██╗ ██╔╝████╗ ██║████╗ ████║██╔══██╗████╗ ██║",
|
|
20
|
+
"█████╗ █████╗ ╚████╔╝ ██╔██╗ ██║██╔████╔██║███████║██╔██╗ ██║",
|
|
21
|
+
"██╔══╝ ██╔══╝ ╚██╔╝ ██║╚██╗██║██║╚██╔╝██║██╔══██║██║╚██╗██║",
|
|
22
|
+
"██║ ███████╗ ██║ ██║ ╚████║██║ ╚═╝ ██║██║ ██║██║ ╚████║",
|
|
23
|
+
"╚═╝ ╚══════╝ ╚═╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝",
|
|
24
|
+
];
|
|
25
|
+
|
|
26
|
+
export const FEYNMAN_RESEARCH_TOOLS = [
|
|
27
|
+
"alpha_search",
|
|
28
|
+
"alpha_get_paper",
|
|
29
|
+
"alpha_ask_paper",
|
|
30
|
+
"alpha_annotate_paper",
|
|
31
|
+
"alpha_list_annotations",
|
|
32
|
+
"alpha_read_code",
|
|
33
|
+
"session_search",
|
|
34
|
+
"preview_file",
|
|
35
|
+
];
|
|
36
|
+
|
|
37
|
+
export function formatToolText(result: unknown): string {
|
|
38
|
+
return typeof result === "string" ? result : JSON.stringify(result, null, 2);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export function getFeynmanHome(): string {
|
|
42
|
+
const agentDir = process.env.FEYNMAN_CODING_AGENT_DIR ??
|
|
43
|
+
process.env.PI_CODING_AGENT_DIR ??
|
|
44
|
+
resolvePath(homedir(), ".feynman", "agent");
|
|
45
|
+
return dirname(agentDir);
|
|
46
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
|
2
|
+
|
|
3
|
+
import { registerAlphaCommands, registerAlphaTools } from "./research-tools/alpha.js";
|
|
4
|
+
import { installFeynmanHeader } from "./research-tools/header.js";
|
|
5
|
+
import { registerHelpCommand } from "./research-tools/help.js";
|
|
6
|
+
import { registerInitCommand, registerPreviewTool, registerSessionSearchTool } from "./research-tools/project.js";
|
|
7
|
+
|
|
8
|
+
export default function researchTools(pi: ExtensionAPI): void {
|
|
9
|
+
const cache: { agentSummaryPromise?: Promise<{ agents: string[]; chains: string[] }> } = {};
|
|
10
|
+
|
|
11
|
+
pi.on("session_start", async (_event, ctx) => {
|
|
12
|
+
await installFeynmanHeader(pi, ctx, cache);
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
pi.on("session_switch", async (_event, ctx) => {
|
|
16
|
+
await installFeynmanHeader(pi, ctx, cache);
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
registerAlphaCommands(pi);
|
|
20
|
+
registerHelpCommand(pi);
|
|
21
|
+
registerInitCommand(pi);
|
|
22
|
+
registerSessionSearchTool(pi);
|
|
23
|
+
registerAlphaTools(pi);
|
|
24
|
+
registerPreviewTool(pi);
|
|
25
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
export type PromptSpec = {
|
|
2
|
+
name: string;
|
|
3
|
+
description: string;
|
|
4
|
+
args: string;
|
|
5
|
+
section: string;
|
|
6
|
+
topLevelCli: boolean;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export type ExtensionCommandSpec = {
|
|
10
|
+
name: string;
|
|
11
|
+
args: string;
|
|
12
|
+
section: string;
|
|
13
|
+
description: string;
|
|
14
|
+
publicDocs: boolean;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export type LivePackageCommandSpec = {
|
|
18
|
+
name: string;
|
|
19
|
+
usage: string;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
export type LivePackageCommandGroup = {
|
|
23
|
+
title: string;
|
|
24
|
+
commands: LivePackageCommandSpec[];
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export type CliCommand = {
|
|
28
|
+
usage: string;
|
|
29
|
+
description: string;
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
export type CliCommandSection = {
|
|
33
|
+
title: string;
|
|
34
|
+
commands: CliCommand[];
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export declare function readPromptSpecs(appRoot: string): PromptSpec[];
|
|
38
|
+
export declare const extensionCommandSpecs: ExtensionCommandSpec[];
|
|
39
|
+
export declare const livePackageCommandGroups: LivePackageCommandGroup[];
|
|
40
|
+
export declare const cliCommandSections: CliCommandSection[];
|
|
41
|
+
export declare const legacyFlags: CliCommand[];
|
|
42
|
+
export declare const topLevelCommandNames: string[];
|
|
43
|
+
|
|
44
|
+
export declare function formatSlashUsage(command: { name: string; args?: string }): string;
|
|
45
|
+
export declare function formatCliWorkflowUsage(command: { name: string; args?: string }): string;
|
|
46
|
+
export declare function getExtensionCommandSpec(name: string): ExtensionCommandSpec | undefined;
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { readFileSync, readdirSync } from "node:fs";
|
|
2
|
+
import { resolve } from "node:path";
|
|
3
|
+
|
|
4
|
+
function parseFrontmatter(text) {
|
|
5
|
+
const match = text.match(/^---\n([\s\S]*?)\n---\n?/);
|
|
6
|
+
if (!match) return {};
|
|
7
|
+
|
|
8
|
+
const frontmatter = {};
|
|
9
|
+
for (const line of match[1].split("\n")) {
|
|
10
|
+
const separator = line.indexOf(":");
|
|
11
|
+
if (separator === -1) continue;
|
|
12
|
+
const key = line.slice(0, separator).trim();
|
|
13
|
+
const value = line.slice(separator + 1).trim();
|
|
14
|
+
if (!key) continue;
|
|
15
|
+
frontmatter[key] = value;
|
|
16
|
+
}
|
|
17
|
+
return frontmatter;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function readPromptSpecs(appRoot) {
|
|
21
|
+
const dir = resolve(appRoot, "prompts");
|
|
22
|
+
return readdirSync(dir)
|
|
23
|
+
.filter((f) => f.endsWith(".md"))
|
|
24
|
+
.map((f) => {
|
|
25
|
+
const text = readFileSync(resolve(dir, f), "utf8");
|
|
26
|
+
const fm = parseFrontmatter(text);
|
|
27
|
+
return {
|
|
28
|
+
name: f.replace(/\.md$/, ""),
|
|
29
|
+
description: fm.description ?? "",
|
|
30
|
+
args: fm.args ?? "",
|
|
31
|
+
section: fm.section ?? "Research Workflows",
|
|
32
|
+
topLevelCli: fm.topLevelCli === "true",
|
|
33
|
+
};
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export const extensionCommandSpecs = [
|
|
38
|
+
{ name: "help", args: "", section: "Project & Session", description: "Show grouped Feynman commands and prefill the editor with a selected command.", publicDocs: true },
|
|
39
|
+
{ name: "init", args: "", section: "Project & Session", description: "Bootstrap AGENTS.md and session-log folders for a research project.", publicDocs: true },
|
|
40
|
+
{ name: "alpha-login", args: "", section: "Setup", description: "Sign in to alphaXiv from inside Feynman.", publicDocs: true },
|
|
41
|
+
{ name: "alpha-status", args: "", section: "Setup", description: "Show alphaXiv authentication status.", publicDocs: true },
|
|
42
|
+
{ name: "alpha-logout", args: "", section: "Setup", description: "Clear alphaXiv auth from inside Feynman.", publicDocs: true },
|
|
43
|
+
];
|
|
44
|
+
|
|
45
|
+
export const livePackageCommandGroups = [
|
|
46
|
+
{
|
|
47
|
+
title: "Agents & Delegation",
|
|
48
|
+
commands: [
|
|
49
|
+
{ name: "agents", usage: "/agents" },
|
|
50
|
+
{ name: "run", usage: "/run <agent> <task>" },
|
|
51
|
+
{ name: "chain", usage: "/chain agent1 -> agent2" },
|
|
52
|
+
{ name: "parallel", usage: "/parallel agent1 -> agent2" },
|
|
53
|
+
],
|
|
54
|
+
},
|
|
55
|
+
{
|
|
56
|
+
title: "Bundled Package Commands",
|
|
57
|
+
commands: [
|
|
58
|
+
{ name: "ps", usage: "/ps" },
|
|
59
|
+
{ name: "schedule-prompt", usage: "/schedule-prompt" },
|
|
60
|
+
{ name: "search", usage: "/search" },
|
|
61
|
+
{ name: "preview", usage: "/preview" },
|
|
62
|
+
{ name: "new", usage: "/new" },
|
|
63
|
+
{ name: "quit", usage: "/quit" },
|
|
64
|
+
{ name: "exit", usage: "/exit" },
|
|
65
|
+
],
|
|
66
|
+
},
|
|
67
|
+
];
|
|
68
|
+
|
|
69
|
+
export const cliCommandSections = [
|
|
70
|
+
{
|
|
71
|
+
title: "Core",
|
|
72
|
+
commands: [
|
|
73
|
+
{ usage: "feynman", description: "Launch the interactive REPL." },
|
|
74
|
+
{ usage: "feynman chat [prompt]", description: "Start chat explicitly, optionally with an initial prompt." },
|
|
75
|
+
{ usage: "feynman help", description: "Show CLI help." },
|
|
76
|
+
{ usage: "feynman setup", description: "Run the guided setup wizard." },
|
|
77
|
+
{ usage: "feynman doctor", description: "Diagnose config, auth, Pi runtime, and preview dependencies." },
|
|
78
|
+
{ usage: "feynman status", description: "Show the current setup summary." },
|
|
79
|
+
],
|
|
80
|
+
},
|
|
81
|
+
{
|
|
82
|
+
title: "Model Management",
|
|
83
|
+
commands: [
|
|
84
|
+
{ usage: "feynman model list", description: "List available models in Pi auth storage." },
|
|
85
|
+
{ usage: "feynman model login [id]", description: "Login to a Pi OAuth model provider." },
|
|
86
|
+
{ usage: "feynman model logout [id]", description: "Logout from a Pi OAuth model provider." },
|
|
87
|
+
{ usage: "feynman model set <provider/model>", description: "Set the default model." },
|
|
88
|
+
],
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
title: "AlphaXiv",
|
|
92
|
+
commands: [
|
|
93
|
+
{ usage: "feynman alpha login", description: "Sign in to alphaXiv." },
|
|
94
|
+
{ usage: "feynman alpha logout", description: "Clear alphaXiv auth." },
|
|
95
|
+
{ usage: "feynman alpha status", description: "Check alphaXiv auth status." },
|
|
96
|
+
],
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
title: "Utilities",
|
|
100
|
+
commands: [
|
|
101
|
+
{ usage: "feynman search status", description: "Show Pi web-access status and config path." },
|
|
102
|
+
{ usage: "feynman update [package]", description: "Update installed packages, or a specific package." },
|
|
103
|
+
],
|
|
104
|
+
},
|
|
105
|
+
];
|
|
106
|
+
|
|
107
|
+
export const legacyFlags = [
|
|
108
|
+
{ usage: '--prompt "<text>"', description: "Run one prompt and exit." },
|
|
109
|
+
{ usage: "--alpha-login", description: "Sign in to alphaXiv and exit." },
|
|
110
|
+
{ usage: "--alpha-logout", description: "Clear alphaXiv auth and exit." },
|
|
111
|
+
{ usage: "--alpha-status", description: "Show alphaXiv auth status and exit." },
|
|
112
|
+
{ usage: "--model <provider:model>", description: "Force a specific model." },
|
|
113
|
+
{ usage: "--thinking <level>", description: "Set thinking level: off | minimal | low | medium | high | xhigh." },
|
|
114
|
+
{ usage: "--cwd <path>", description: "Set the working directory for tools." },
|
|
115
|
+
{ usage: "--session-dir <path>", description: "Set the session storage directory." },
|
|
116
|
+
{ usage: "--new-session", description: "Start a new persisted session." },
|
|
117
|
+
{ usage: "--doctor", description: "Alias for `feynman doctor`." },
|
|
118
|
+
{ usage: "--setup-preview", description: "Alias for `feynman setup preview`." },
|
|
119
|
+
];
|
|
120
|
+
|
|
121
|
+
export const topLevelCommandNames = ["alpha", "chat", "doctor", "help", "model", "search", "setup", "status", "update"];
|
|
122
|
+
|
|
123
|
+
export function formatSlashUsage(command) {
|
|
124
|
+
return `/${command.name}${command.args ? ` ${command.args}` : ""}`;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
export function formatCliWorkflowUsage(command) {
|
|
128
|
+
return `feynman ${command.name}${command.args ? ` ${command.args}` : ""}`;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
export function getExtensionCommandSpec(name) {
|
|
132
|
+
return extensionCommandSpecs.find((command) => command.name === name);
|
|
133
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@companion-ai/feynman",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Research-first CLI agent built on Pi and alphaXiv",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"feynman": "./bin/feynman.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"bin/",
|
|
11
|
+
"dist/",
|
|
12
|
+
"metadata/",
|
|
13
|
+
".feynman/agents/",
|
|
14
|
+
".feynman/settings.json",
|
|
15
|
+
".feynman/SYSTEM.md",
|
|
16
|
+
".feynman/themes/",
|
|
17
|
+
"extensions/",
|
|
18
|
+
"prompts/",
|
|
19
|
+
"scripts/",
|
|
20
|
+
"skills/",
|
|
21
|
+
"AGENTS.md",
|
|
22
|
+
"README.md",
|
|
23
|
+
".env.example"
|
|
24
|
+
],
|
|
25
|
+
"scripts": {
|
|
26
|
+
"build": "tsc -p tsconfig.build.json",
|
|
27
|
+
"dev": "tsx src/index.ts",
|
|
28
|
+
"postinstall": "node ./scripts/patch-embedded-pi.mjs",
|
|
29
|
+
"start": "tsx src/index.ts",
|
|
30
|
+
"start:dist": "node ./bin/feynman.js",
|
|
31
|
+
"test": "node --import tsx --test --test-concurrency=1 tests/*.test.ts",
|
|
32
|
+
"typecheck": "tsc --noEmit"
|
|
33
|
+
},
|
|
34
|
+
"keywords": [
|
|
35
|
+
"pi-package",
|
|
36
|
+
"research-agent",
|
|
37
|
+
"literature-review",
|
|
38
|
+
"experiments"
|
|
39
|
+
],
|
|
40
|
+
"pi": {
|
|
41
|
+
"extensions": [
|
|
42
|
+
"./extensions"
|
|
43
|
+
],
|
|
44
|
+
"prompts": [
|
|
45
|
+
"./prompts"
|
|
46
|
+
]
|
|
47
|
+
},
|
|
48
|
+
"dependencies": {
|
|
49
|
+
"@companion-ai/alpha-hub": "^0.1.2",
|
|
50
|
+
"@mariozechner/pi-ai": "^0.62.0",
|
|
51
|
+
"@mariozechner/pi-coding-agent": "^0.62.0",
|
|
52
|
+
"@sinclair/typebox": "^0.34.48",
|
|
53
|
+
"dotenv": "^17.3.1"
|
|
54
|
+
},
|
|
55
|
+
"devDependencies": {
|
|
56
|
+
"@types/node": "^25.5.0",
|
|
57
|
+
"tsx": "^4.21.0",
|
|
58
|
+
"typescript": "^5.9.3"
|
|
59
|
+
},
|
|
60
|
+
"engines": {
|
|
61
|
+
"node": ">=20.18.1"
|
|
62
|
+
},
|
|
63
|
+
"repository": {
|
|
64
|
+
"type": "git",
|
|
65
|
+
"url": "git+https://github.com/getcompanion-ai/feynman.git"
|
|
66
|
+
},
|
|
67
|
+
"homepage": "https://github.com/getcompanion-ai/feynman#readme",
|
|
68
|
+
"bugs": {
|
|
69
|
+
"url": "https://github.com/getcompanion-ai/feynman/issues"
|
|
70
|
+
}
|
|
71
|
+
}
|
package/prompts/audit.md
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Compare a paper's claims against its public codebase and identify mismatches, omissions, and reproducibility risks.
|
|
3
|
+
args: <item>
|
|
4
|
+
section: Research Workflows
|
|
5
|
+
topLevelCli: true
|
|
6
|
+
---
|
|
7
|
+
Audit the paper and codebase for: $@
|
|
8
|
+
|
|
9
|
+
Requirements:
|
|
10
|
+
- Before starting, outline the audit plan: which paper, which repo, which claims to check. Present the plan to the user and confirm before proceeding.
|
|
11
|
+
- Use the `researcher` subagent for evidence gathering and the `verifier` subagent to verify sources and add inline citations when the audit is non-trivial.
|
|
12
|
+
- Compare claimed methods, defaults, metrics, and data handling against the actual code.
|
|
13
|
+
- Call out missing code, mismatches, ambiguous defaults, and reproduction risks.
|
|
14
|
+
- Save exactly one audit artifact to `outputs/` as markdown.
|
|
15
|
+
- End with a `Sources` section containing paper and repository URLs.
|