@residue/cli 0.0.2 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@residue/cli",
3
- "version": "0.0.2",
3
+ "version": "0.0.4",
4
4
  "repository": {
5
5
  "type": "git",
6
6
  "url": "https://github.com/butttons/residue",
@@ -0,0 +1,42 @@
1
+ import { ok, type ResultAsync, safeTry } from "neverthrow";
2
+ import {
3
+ getPendingPath,
4
+ getProjectRoot,
5
+ readPending,
6
+ writePending,
7
+ } from "@/lib/pending";
8
+ import type { CliError } from "@/utils/errors";
9
+ import { createLogger } from "@/utils/logger";
10
+
11
+ const log = createLogger("clear");
12
+
13
+ export function clear(opts?: { id?: string }): ResultAsync<void, CliError> {
14
+ return safeTry(async function* () {
15
+ const projectRoot = yield* getProjectRoot();
16
+ const pendingPath = yield* getPendingPath(projectRoot);
17
+ const sessions = yield* readPending(pendingPath);
18
+
19
+ if (sessions.length === 0) {
20
+ log.info("No pending sessions to clear.");
21
+ return ok(undefined);
22
+ }
23
+
24
+ if (opts?.id) {
25
+ const targetId = opts.id;
26
+ const isFound = sessions.some((s) => s.id === targetId);
27
+ if (!isFound) {
28
+ log.info(`Session ${targetId} not found in pending queue.`);
29
+ return ok(undefined);
30
+ }
31
+ const remaining = sessions.filter((s) => s.id !== targetId);
32
+ yield* writePending({ path: pendingPath, sessions: remaining });
33
+ log.info(`Cleared session ${targetId}.`);
34
+ return ok(undefined);
35
+ }
36
+
37
+ const count = sessions.length;
38
+ yield* writePending({ path: pendingPath, sessions: [] });
39
+ log.info(`Cleared ${count} pending session(s).`);
40
+ return ok(undefined);
41
+ });
42
+ }
@@ -0,0 +1,266 @@
1
+ import { err, ok, ResultAsync, safeTry } from "neverthrow";
2
+ import { resolveConfig } from "@/lib/config";
3
+ import { CliError, toCliError } from "@/utils/errors";
4
+ import { createLogger } from "@/utils/logger";
5
+
6
+ const log = createLogger("search");
7
+
8
+ type SearchContentChunk = {
9
+ type: string;
10
+ text: string;
11
+ };
12
+
13
+ type SearchResultItem = {
14
+ file_id: string;
15
+ filename: string;
16
+ score: number;
17
+ attributes: Record<string, unknown>;
18
+ content: SearchContentChunk[];
19
+ };
20
+
21
+ type SearchResponse = {
22
+ object: string;
23
+ search_query: string;
24
+ data: SearchResultItem[];
25
+ has_more: boolean;
26
+ next_page: string | null;
27
+ };
28
+
29
+ type AiSearchResponse = SearchResponse & {
30
+ response: string;
31
+ };
32
+
33
+ type SessionCommit = {
34
+ commit_sha: string;
35
+ committed_at: number | null;
36
+ org: string;
37
+ repo: string;
38
+ branch: string | null;
39
+ };
40
+
41
+ function fetchSearch(opts: {
42
+ workerUrl: string;
43
+ token: string;
44
+ query: string;
45
+ isAi: boolean;
46
+ }): ResultAsync<SearchResponse | AiSearchResponse, CliError> {
47
+ const path = opts.isAi ? "/api/search/ai" : "/api/search";
48
+ const url = `${opts.workerUrl}${path}?q=${encodeURIComponent(opts.query)}`;
49
+
50
+ return ResultAsync.fromPromise(
51
+ fetch(url, {
52
+ headers: { Authorization: `Bearer ${opts.token}` },
53
+ }).then(async (response) => {
54
+ if (!response.ok) {
55
+ const body = await response.text().catch(() => "");
56
+ throw new Error(`HTTP ${response.status}: ${body}`);
57
+ }
58
+ return response.json() as Promise<SearchResponse | AiSearchResponse>;
59
+ }),
60
+ toCliError({ message: "Search request failed", code: "NETWORK_ERROR" }),
61
+ );
62
+ }
63
+
64
+ function fetchSessionCommits(opts: {
65
+ workerUrl: string;
66
+ token: string;
67
+ sessionId: string;
68
+ }): ResultAsync<SessionCommit[], CliError> {
69
+ const url = `${opts.workerUrl}/api/sessions/${opts.sessionId}/commits`;
70
+
71
+ return ResultAsync.fromPromise(
72
+ fetch(url, {
73
+ headers: { Authorization: `Bearer ${opts.token}` },
74
+ }).then(async (response) => {
75
+ if (!response.ok) return [];
76
+ const data = (await response.json()) as { commits: SessionCommit[] };
77
+ return data.commits;
78
+ }),
79
+ toCliError({
80
+ message: "Failed to fetch session commits",
81
+ code: "NETWORK_ERROR",
82
+ }),
83
+ ).orElse(() => ok([] as SessionCommit[]));
84
+ }
85
+
86
+ /**
87
+ * Extract a session ID from an R2 filename like "sessions/<uuid>.json"
88
+ * or "search/<uuid>.txt".
89
+ */
90
+ function extractSessionId(filename: string): string {
91
+ const match = filename.match(/(?:sessions|search)\/(.+?)\.(?:json|txt)$/);
92
+ return match ? match[1] : filename;
93
+ }
94
+
95
+ /**
96
+ * Truncate text to a max length, appending "..." if truncated.
97
+ */
98
+ function truncate(opts: { text: string; maxLength: number }): string {
99
+ if (opts.text.length <= opts.maxLength) return opts.text;
100
+ return opts.text.slice(0, opts.maxLength) + "...";
101
+ }
102
+
103
+ /**
104
+ * Clean up a content chunk for display: collapse whitespace,
105
+ * strip JSON noise, and truncate.
106
+ */
107
+ function formatSnippet(text: string): string {
108
+ const cleaned = text
109
+ .replace(/\\n/g, " ")
110
+ .replace(/\\"/g, '"')
111
+ .replace(/\s+/g, " ")
112
+ .trim();
113
+ return truncate({ text: cleaned, maxLength: 200 });
114
+ }
115
+
116
+ function buildCommitUrl(opts: {
117
+ workerUrl: string;
118
+ org: string;
119
+ repo: string;
120
+ sha: string;
121
+ }): string {
122
+ return `${opts.workerUrl}/app/${opts.org}/${opts.repo}/${opts.sha}`;
123
+ }
124
+
125
+ function renderSearchResults(opts: {
126
+ results: SearchResponse;
127
+ commitMap: Map<string, SessionCommit[]>;
128
+ workerUrl: string;
129
+ }): void {
130
+ if (opts.results.data.length === 0) {
131
+ log.info("No results found.");
132
+ return;
133
+ }
134
+
135
+ log.info(
136
+ `${opts.results.data.length} result(s) for "${opts.results.search_query}"\n`,
137
+ );
138
+
139
+ for (const item of opts.results.data) {
140
+ const sessionId = extractSessionId(item.filename);
141
+ const scorePercent = (item.score * 100).toFixed(1);
142
+
143
+ log.info(` ${sessionId} [${scorePercent}%]`);
144
+
145
+ const snippet = item.content[0]?.text;
146
+ if (snippet) {
147
+ log.info(` ${formatSnippet(snippet)}`);
148
+ }
149
+
150
+ const commits = opts.commitMap.get(sessionId) ?? [];
151
+ if (commits.length > 0) {
152
+ for (const commit of commits) {
153
+ const url = buildCommitUrl({
154
+ workerUrl: opts.workerUrl,
155
+ org: commit.org,
156
+ repo: commit.repo,
157
+ sha: commit.commit_sha,
158
+ });
159
+ log.info(` -> ${url}`);
160
+ }
161
+ }
162
+
163
+ log.info("");
164
+ }
165
+ }
166
+
167
+ function renderAiSearchResults(opts: {
168
+ results: AiSearchResponse;
169
+ commitMap: Map<string, SessionCommit[]>;
170
+ workerUrl: string;
171
+ }): void {
172
+ if (opts.results.response) {
173
+ log.info(opts.results.response);
174
+ log.info("");
175
+ }
176
+
177
+ if (opts.results.data.length > 0) {
178
+ log.info(`--- Sources (${opts.results.data.length}) ---\n`);
179
+ for (const item of opts.results.data) {
180
+ const sessionId = extractSessionId(item.filename);
181
+ const scorePercent = (item.score * 100).toFixed(1);
182
+ log.info(` ${sessionId} [${scorePercent}%]`);
183
+
184
+ const commits = opts.commitMap.get(sessionId) ?? [];
185
+ if (commits.length > 0) {
186
+ for (const commit of commits) {
187
+ const url = buildCommitUrl({
188
+ workerUrl: opts.workerUrl,
189
+ org: commit.org,
190
+ repo: commit.repo,
191
+ sha: commit.commit_sha,
192
+ });
193
+ log.info(` -> ${url}`);
194
+ }
195
+ }
196
+ }
197
+ log.info("");
198
+ }
199
+ }
200
+
201
+ function isAiSearchResponse(
202
+ response: SearchResponse | AiSearchResponse,
203
+ ): response is AiSearchResponse {
204
+ return "response" in response;
205
+ }
206
+
207
+ export function search(opts: {
208
+ query: string;
209
+ isAi?: boolean;
210
+ }): ResultAsync<void, CliError> {
211
+ return safeTry(async function* () {
212
+ const config = yield* resolveConfig();
213
+ if (!config) {
214
+ return err(
215
+ new CliError({
216
+ message: "Not configured. Run 'residue login' first.",
217
+ code: "CONFIG_MISSING",
218
+ }),
219
+ );
220
+ }
221
+
222
+ const results = yield* fetchSearch({
223
+ workerUrl: config.worker_url,
224
+ token: config.token,
225
+ query: opts.query,
226
+ isAi: opts.isAi ?? false,
227
+ });
228
+
229
+ // Fetch commits for each session in parallel
230
+ const sessionIds = results.data.map((item) =>
231
+ extractSessionId(item.filename),
232
+ );
233
+ const uniqueSessionIds = [...new Set(sessionIds)];
234
+
235
+ const commitResults = yield* ResultAsync.combine(
236
+ uniqueSessionIds.map((sessionId) =>
237
+ fetchSessionCommits({
238
+ workerUrl: config.worker_url,
239
+ token: config.token,
240
+ sessionId,
241
+ }).map((commits) => ({ sessionId, commits })),
242
+ ),
243
+ );
244
+
245
+ const commitMap = new Map<string, SessionCommit[]>();
246
+ for (const entry of commitResults) {
247
+ commitMap.set(entry.sessionId, entry.commits);
248
+ }
249
+
250
+ if (opts.isAi && isAiSearchResponse(results)) {
251
+ renderAiSearchResults({
252
+ results,
253
+ commitMap,
254
+ workerUrl: config.worker_url,
255
+ });
256
+ } else {
257
+ renderSearchResults({
258
+ results,
259
+ commitMap,
260
+ workerUrl: config.worker_url,
261
+ });
262
+ }
263
+
264
+ return ok(undefined);
265
+ });
266
+ }
@@ -0,0 +1,180 @@
1
+ import { readFile, stat } from "fs/promises";
2
+ import { ok, okAsync, ResultAsync, safeTry } from "neverthrow";
3
+ import { join } from "path";
4
+ import { readConfig, readLocalConfig } from "@/lib/config";
5
+ import { isGitRepo } from "@/lib/git";
6
+ import { getPendingPath, getProjectRoot, readPending } from "@/lib/pending";
7
+ import type { CliError } from "@/utils/errors";
8
+ import { toCliError } from "@/utils/errors";
9
+ import { createLogger } from "@/utils/logger";
10
+
11
+ const log = createLogger("status");
12
+
13
+ function checkFileExists(path: string): ResultAsync<boolean, CliError> {
14
+ return ResultAsync.fromPromise(
15
+ stat(path).then(() => true),
16
+ toCliError({ message: "Failed to check file", code: "IO_ERROR" }),
17
+ ).orElse(() => okAsync(false));
18
+ }
19
+
20
+ function checkHookInstalled(opts: {
21
+ gitDir: string;
22
+ hookName: string;
23
+ needle: string;
24
+ }): ResultAsync<boolean, CliError> {
25
+ const hookPath = join(opts.gitDir, "hooks", opts.hookName);
26
+ return ResultAsync.fromPromise(
27
+ readFile(hookPath, "utf-8").then((content) =>
28
+ content.includes(opts.needle),
29
+ ),
30
+ toCliError({ message: "Failed to read hook", code: "IO_ERROR" }),
31
+ ).orElse(() => okAsync(false));
32
+ }
33
+
34
+ function getGitDir(): ResultAsync<string, CliError> {
35
+ return ResultAsync.fromPromise(
36
+ (async () => {
37
+ const proc = Bun.spawn(["git", "rev-parse", "--git-dir"], {
38
+ stdout: "pipe",
39
+ stderr: "pipe",
40
+ });
41
+ await proc.exited;
42
+ return (await new Response(proc.stdout).text()).trim();
43
+ })(),
44
+ toCliError({ message: "Failed to get git directory", code: "GIT_ERROR" }),
45
+ );
46
+ }
47
+
48
+ export function status(): ResultAsync<void, CliError> {
49
+ return safeTry(async function* () {
50
+ const isRepo = yield* isGitRepo();
51
+ if (!isRepo) {
52
+ log.info("Not a git repository.");
53
+ return ok(undefined);
54
+ }
55
+
56
+ const projectRoot = yield* getProjectRoot();
57
+
58
+ // -- Auth / Login state --
59
+ log.info("Login");
60
+
61
+ const globalConfig = yield* readConfig();
62
+ if (globalConfig) {
63
+ log.info(` global: ${globalConfig.worker_url}`);
64
+ } else {
65
+ log.info(" global: not configured");
66
+ }
67
+
68
+ const localConfig = yield* readLocalConfig(projectRoot);
69
+ if (localConfig) {
70
+ log.info(` local: ${localConfig.worker_url}`);
71
+ } else {
72
+ log.info(" local: not configured");
73
+ }
74
+
75
+ const isActiveConfig = localConfig ?? globalConfig;
76
+ if (isActiveConfig) {
77
+ log.info(` active: ${isActiveConfig.worker_url}`);
78
+ } else {
79
+ log.info(' active: none (run "residue login" to configure)');
80
+ }
81
+
82
+ log.info("");
83
+
84
+ // -- Git hooks --
85
+ log.info("Hooks");
86
+
87
+ const gitDir = yield* getGitDir();
88
+
89
+ const isPostCommitInstalled = yield* checkHookInstalled({
90
+ gitDir,
91
+ hookName: "post-commit",
92
+ needle: "residue capture",
93
+ });
94
+ log.info(
95
+ ` post-commit: ${isPostCommitInstalled ? "installed" : "not installed"}`,
96
+ );
97
+
98
+ const isPrePushInstalled = yield* checkHookInstalled({
99
+ gitDir,
100
+ hookName: "pre-push",
101
+ needle: "residue sync",
102
+ });
103
+ log.info(
104
+ ` pre-push: ${isPrePushInstalled ? "installed" : "not installed"}`,
105
+ );
106
+
107
+ if (!isPostCommitInstalled || !isPrePushInstalled) {
108
+ log.info(' run "residue init" to install missing hooks');
109
+ }
110
+
111
+ log.info("");
112
+
113
+ // -- Agent adapters --
114
+ log.info("Adapters");
115
+
116
+ const isClaudeSetup = yield* checkFileExists(
117
+ join(projectRoot, ".claude", "settings.json"),
118
+ );
119
+
120
+ let isClaudeHookConfigured = false;
121
+ if (isClaudeSetup) {
122
+ isClaudeHookConfigured = yield* ResultAsync.fromPromise(
123
+ readFile(join(projectRoot, ".claude", "settings.json"), "utf-8").then(
124
+ (content) => content.includes("residue hook claude-code"),
125
+ ),
126
+ toCliError({
127
+ message: "Failed to read claude settings",
128
+ code: "IO_ERROR",
129
+ }),
130
+ ).orElse(() => okAsync(false));
131
+ }
132
+
133
+ log.info(
134
+ ` claude-code: ${isClaudeHookConfigured ? "configured" : "not configured"}`,
135
+ );
136
+
137
+ const isPiSetup = yield* checkFileExists(
138
+ join(projectRoot, ".pi", "extensions", "residue.ts"),
139
+ );
140
+ log.info(` pi: ${isPiSetup ? "configured" : "not configured"}`);
141
+
142
+ log.info("");
143
+
144
+ // -- Pending sessions --
145
+ log.info("Sessions");
146
+
147
+ const pendingPath = yield* getPendingPath(projectRoot);
148
+ const sessions = yield* readPending(pendingPath);
149
+
150
+ if (sessions.length === 0) {
151
+ log.info(" no pending sessions");
152
+ } else {
153
+ const openSessions = sessions.filter((s) => s.status === "open");
154
+ const endedSessions = sessions.filter((s) => s.status === "ended");
155
+ const totalCommits = sessions.reduce(
156
+ (sum, s) => sum + s.commits.length,
157
+ 0,
158
+ );
159
+ const sessionsWithCommits = sessions.filter((s) => s.commits.length > 0);
160
+
161
+ log.info(` total: ${sessions.length}`);
162
+ log.info(` open: ${openSessions.length}`);
163
+ log.info(` ended: ${endedSessions.length}`);
164
+ log.info(
165
+ ` commits: ${totalCommits} across ${sessionsWithCommits.length} session(s)`,
166
+ );
167
+
168
+ const isReadyToSync = sessionsWithCommits.length > 0;
169
+ if (isReadyToSync) {
170
+ log.info(
171
+ ` ${sessionsWithCommits.length} session(s) ready to sync on next push`,
172
+ );
173
+ } else {
174
+ log.info(" no sessions ready to sync (no commits captured yet)");
175
+ }
176
+ }
177
+
178
+ return ok(undefined);
179
+ });
180
+ }
@@ -8,6 +8,7 @@ import {
8
8
  readPending,
9
9
  writePending,
10
10
  } from "@/lib/pending";
11
+ import { buildSearchText, getExtractor } from "@/lib/search-text";
11
12
  import { CliError, toCliError } from "@/utils/errors";
12
13
  import { createLogger } from "@/utils/logger";
13
14
 
@@ -30,6 +31,8 @@ type CommitPayload = {
30
31
  type UploadUrlResponse = {
31
32
  url: string;
32
33
  r2_key: string;
34
+ search_url: string;
35
+ search_r2_key: string;
33
36
  };
34
37
 
35
38
  function requestUploadUrl(opts: {
@@ -197,6 +200,62 @@ function closeStaleOpenSessions(opts: {
197
200
  return ResultAsync.combine(checks).map(() => opts.sessions);
198
201
  }
199
202
 
203
+ function generateSearchText(opts: {
204
+ session: PendingSession;
205
+ rawData: string;
206
+ commits: CommitPayload[];
207
+ org: string;
208
+ repo: string;
209
+ }): string | null {
210
+ const extractor = getExtractor(opts.session.agent);
211
+ if (!extractor) {
212
+ log.debug(
213
+ "no search text extractor for agent %s, skipping",
214
+ opts.session.agent,
215
+ );
216
+ return null;
217
+ }
218
+
219
+ const searchLines = extractor(opts.rawData);
220
+ if (searchLines.length === 0) return null;
221
+
222
+ const branches = [
223
+ ...new Set(opts.session.commits.map((c) => c.branch).filter(Boolean)),
224
+ ];
225
+
226
+ return buildSearchText({
227
+ metadata: {
228
+ sessionId: opts.session.id,
229
+ agent: opts.session.agent,
230
+ commits: opts.commits.map((c) => c.sha.slice(0, 7)),
231
+ branch: branches[0] ?? "",
232
+ repo: `${opts.org}/${opts.repo}`,
233
+ },
234
+ lines: searchLines,
235
+ });
236
+ }
237
+
238
+ function uploadSearchText(opts: {
239
+ url: string;
240
+ data: string;
241
+ }): ResultAsync<void, CliError> {
242
+ return ResultAsync.fromPromise(
243
+ fetch(opts.url, {
244
+ method: "PUT",
245
+ headers: { "Content-Type": "text/plain" },
246
+ body: opts.data,
247
+ }).then((response) => {
248
+ if (!response.ok) {
249
+ throw new Error(`R2 search upload failed: HTTP ${response.status}`);
250
+ }
251
+ }),
252
+ toCliError({
253
+ message: "Search text R2 upload failed",
254
+ code: "NETWORK_ERROR",
255
+ }),
256
+ );
257
+ }
258
+
200
259
  function syncSessions(opts: {
201
260
  sessions: PendingSession[];
202
261
  workerUrl: string;
@@ -240,7 +299,7 @@ function syncSessions(opts: {
240
299
  continue;
241
300
  }
242
301
 
243
- // Step 1: Get a presigned URL from the worker
302
+ // Step 1: Get presigned URLs from the worker (raw + search)
244
303
  const uploadUrlResult = await requestUploadUrl({
245
304
  workerUrl: opts.workerUrl,
246
305
  token: opts.token,
@@ -271,6 +330,31 @@ function syncSessions(opts: {
271
330
 
272
331
  log.debug("uploaded session %s data directly to R2", session.id);
273
332
 
333
+ // Step 2b: Generate and upload search text
334
+ const searchText = generateSearchText({
335
+ session,
336
+ rawData: data,
337
+ commits: commitsResult.value,
338
+ org: opts.org,
339
+ repo: opts.repo,
340
+ });
341
+
342
+ if (searchText && uploadUrlResult.value.search_url) {
343
+ const searchUploadResult = await uploadSearchText({
344
+ url: uploadUrlResult.value.search_url,
345
+ data: searchText,
346
+ });
347
+
348
+ if (searchUploadResult.isErr()) {
349
+ // Non-fatal: search upload failure should not block sync
350
+ log.warn(
351
+ `search text upload failed for session ${session.id}: ${searchUploadResult.error.message}`,
352
+ );
353
+ } else {
354
+ log.debug("uploaded search text for session %s", session.id);
355
+ }
356
+ }
357
+
274
358
  // Step 3: POST metadata only (no inline data)
275
359
  const metadataResult = await postSessionMetadata({
276
360
  workerUrl: opts.workerUrl,
package/src/index.ts CHANGED
@@ -2,22 +2,27 @@
2
2
 
3
3
  import { Command } from "commander";
4
4
  import { capture } from "@/commands/capture";
5
+ import { clear } from "@/commands/clear";
5
6
  import { hookClaudeCode } from "@/commands/hook";
6
7
  import { init } from "@/commands/init";
7
8
  import { login } from "@/commands/login";
8
9
  import { push } from "@/commands/push";
10
+ import { search } from "@/commands/search";
9
11
  import { sessionEnd } from "@/commands/session-end";
10
12
  import { sessionStart } from "@/commands/session-start";
11
13
  import { setup } from "@/commands/setup";
14
+ import { status } from "@/commands/status";
12
15
  import { sync } from "@/commands/sync";
13
16
  import { wrapCommand, wrapHookCommand } from "@/utils/errors";
14
17
 
18
+ import packageJson from "../package.json";
19
+
15
20
  const program = new Command();
16
21
 
17
22
  program
18
23
  .name("residue")
19
24
  .description("Capture AI agent conversations linked to git commits")
20
- .version("0.0.1");
25
+ .version(packageJson.version);
21
26
 
22
27
  program
23
28
  .command("login")
@@ -97,4 +102,26 @@ program
97
102
  .description("Upload pending sessions to worker (manual trigger)")
98
103
  .action(wrapCommand(() => push()));
99
104
 
105
+ program
106
+ .command("clear")
107
+ .description("Remove pending sessions from the local queue")
108
+ .option("--id <session-id>", "Clear a specific session by ID")
109
+ .action(wrapCommand((opts: { id?: string }) => clear({ id: opts.id })));
110
+
111
+ program
112
+ .command("status")
113
+ .description("Show current residue state for this project")
114
+ .action(wrapCommand(() => status()));
115
+
116
+ program
117
+ .command("search")
118
+ .description("Search session history")
119
+ .argument("<query>", "Search query")
120
+ .option("--ai", "Use AI-powered search (generates an answer with citations)")
121
+ .action(
122
+ wrapCommand((query: string, opts: { ai?: boolean }) =>
123
+ search({ query, isAi: opts.ai }),
124
+ ),
125
+ );
126
+
100
127
  program.parse();