@residue/cli 0.0.3 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@residue/cli",
3
- "version": "0.0.3",
3
+ "version": "0.0.4",
4
4
  "repository": {
5
5
  "type": "git",
6
6
  "url": "https://github.com/butttons/residue",
@@ -0,0 +1,42 @@
1
+ import { ok, type ResultAsync, safeTry } from "neverthrow";
2
+ import {
3
+ getPendingPath,
4
+ getProjectRoot,
5
+ readPending,
6
+ writePending,
7
+ } from "@/lib/pending";
8
+ import type { CliError } from "@/utils/errors";
9
+ import { createLogger } from "@/utils/logger";
10
+
11
+ const log = createLogger("clear");
12
+
13
+ export function clear(opts?: { id?: string }): ResultAsync<void, CliError> {
14
+ return safeTry(async function* () {
15
+ const projectRoot = yield* getProjectRoot();
16
+ const pendingPath = yield* getPendingPath(projectRoot);
17
+ const sessions = yield* readPending(pendingPath);
18
+
19
+ if (sessions.length === 0) {
20
+ log.info("No pending sessions to clear.");
21
+ return ok(undefined);
22
+ }
23
+
24
+ if (opts?.id) {
25
+ const targetId = opts.id;
26
+ const isFound = sessions.some((s) => s.id === targetId);
27
+ if (!isFound) {
28
+ log.info(`Session ${targetId} not found in pending queue.`);
29
+ return ok(undefined);
30
+ }
31
+ const remaining = sessions.filter((s) => s.id !== targetId);
32
+ yield* writePending({ path: pendingPath, sessions: remaining });
33
+ log.info(`Cleared session ${targetId}.`);
34
+ return ok(undefined);
35
+ }
36
+
37
+ const count = sessions.length;
38
+ yield* writePending({ path: pendingPath, sessions: [] });
39
+ log.info(`Cleared ${count} pending session(s).`);
40
+ return ok(undefined);
41
+ });
42
+ }
@@ -0,0 +1,266 @@
1
+ import { err, ok, ResultAsync, safeTry } from "neverthrow";
2
+ import { resolveConfig } from "@/lib/config";
3
+ import { CliError, toCliError } from "@/utils/errors";
4
+ import { createLogger } from "@/utils/logger";
5
+
6
+ const log = createLogger("search");
7
+
8
+ type SearchContentChunk = {
9
+ type: string;
10
+ text: string;
11
+ };
12
+
13
+ type SearchResultItem = {
14
+ file_id: string;
15
+ filename: string;
16
+ score: number;
17
+ attributes: Record<string, unknown>;
18
+ content: SearchContentChunk[];
19
+ };
20
+
21
+ type SearchResponse = {
22
+ object: string;
23
+ search_query: string;
24
+ data: SearchResultItem[];
25
+ has_more: boolean;
26
+ next_page: string | null;
27
+ };
28
+
29
+ type AiSearchResponse = SearchResponse & {
30
+ response: string;
31
+ };
32
+
33
+ type SessionCommit = {
34
+ commit_sha: string;
35
+ committed_at: number | null;
36
+ org: string;
37
+ repo: string;
38
+ branch: string | null;
39
+ };
40
+
41
+ function fetchSearch(opts: {
42
+ workerUrl: string;
43
+ token: string;
44
+ query: string;
45
+ isAi: boolean;
46
+ }): ResultAsync<SearchResponse | AiSearchResponse, CliError> {
47
+ const path = opts.isAi ? "/api/search/ai" : "/api/search";
48
+ const url = `${opts.workerUrl}${path}?q=${encodeURIComponent(opts.query)}`;
49
+
50
+ return ResultAsync.fromPromise(
51
+ fetch(url, {
52
+ headers: { Authorization: `Bearer ${opts.token}` },
53
+ }).then(async (response) => {
54
+ if (!response.ok) {
55
+ const body = await response.text().catch(() => "");
56
+ throw new Error(`HTTP ${response.status}: ${body}`);
57
+ }
58
+ return response.json() as Promise<SearchResponse | AiSearchResponse>;
59
+ }),
60
+ toCliError({ message: "Search request failed", code: "NETWORK_ERROR" }),
61
+ );
62
+ }
63
+
64
+ function fetchSessionCommits(opts: {
65
+ workerUrl: string;
66
+ token: string;
67
+ sessionId: string;
68
+ }): ResultAsync<SessionCommit[], CliError> {
69
+ const url = `${opts.workerUrl}/api/sessions/${opts.sessionId}/commits`;
70
+
71
+ return ResultAsync.fromPromise(
72
+ fetch(url, {
73
+ headers: { Authorization: `Bearer ${opts.token}` },
74
+ }).then(async (response) => {
75
+ if (!response.ok) return [];
76
+ const data = (await response.json()) as { commits: SessionCommit[] };
77
+ return data.commits;
78
+ }),
79
+ toCliError({
80
+ message: "Failed to fetch session commits",
81
+ code: "NETWORK_ERROR",
82
+ }),
83
+ ).orElse(() => ok([] as SessionCommit[]));
84
+ }
85
+
86
+ /**
87
+ * Extract a session ID from an R2 filename like "sessions/<uuid>.json"
88
+ * or "search/<uuid>.txt".
89
+ */
90
+ function extractSessionId(filename: string): string {
91
+ const match = filename.match(/(?:sessions|search)\/(.+?)\.(?:json|txt)$/);
92
+ return match ? match[1] : filename;
93
+ }
94
+
95
+ /**
96
+ * Truncate text to a max length, appending "..." if truncated.
97
+ */
98
+ function truncate(opts: { text: string; maxLength: number }): string {
99
+ if (opts.text.length <= opts.maxLength) return opts.text;
100
+ return opts.text.slice(0, opts.maxLength) + "...";
101
+ }
102
+
103
+ /**
104
+ * Clean up a content chunk for display: collapse whitespace,
105
+ * strip JSON noise, and truncate.
106
+ */
107
+ function formatSnippet(text: string): string {
108
+ const cleaned = text
109
+ .replace(/\\n/g, " ")
110
+ .replace(/\\"/g, '"')
111
+ .replace(/\s+/g, " ")
112
+ .trim();
113
+ return truncate({ text: cleaned, maxLength: 200 });
114
+ }
115
+
116
+ function buildCommitUrl(opts: {
117
+ workerUrl: string;
118
+ org: string;
119
+ repo: string;
120
+ sha: string;
121
+ }): string {
122
+ return `${opts.workerUrl}/app/${opts.org}/${opts.repo}/${opts.sha}`;
123
+ }
124
+
125
+ function renderSearchResults(opts: {
126
+ results: SearchResponse;
127
+ commitMap: Map<string, SessionCommit[]>;
128
+ workerUrl: string;
129
+ }): void {
130
+ if (opts.results.data.length === 0) {
131
+ log.info("No results found.");
132
+ return;
133
+ }
134
+
135
+ log.info(
136
+ `${opts.results.data.length} result(s) for "${opts.results.search_query}"\n`,
137
+ );
138
+
139
+ for (const item of opts.results.data) {
140
+ const sessionId = extractSessionId(item.filename);
141
+ const scorePercent = (item.score * 100).toFixed(1);
142
+
143
+ log.info(` ${sessionId} [${scorePercent}%]`);
144
+
145
+ const snippet = item.content[0]?.text;
146
+ if (snippet) {
147
+ log.info(` ${formatSnippet(snippet)}`);
148
+ }
149
+
150
+ const commits = opts.commitMap.get(sessionId) ?? [];
151
+ if (commits.length > 0) {
152
+ for (const commit of commits) {
153
+ const url = buildCommitUrl({
154
+ workerUrl: opts.workerUrl,
155
+ org: commit.org,
156
+ repo: commit.repo,
157
+ sha: commit.commit_sha,
158
+ });
159
+ log.info(` -> ${url}`);
160
+ }
161
+ }
162
+
163
+ log.info("");
164
+ }
165
+ }
166
+
167
+ function renderAiSearchResults(opts: {
168
+ results: AiSearchResponse;
169
+ commitMap: Map<string, SessionCommit[]>;
170
+ workerUrl: string;
171
+ }): void {
172
+ if (opts.results.response) {
173
+ log.info(opts.results.response);
174
+ log.info("");
175
+ }
176
+
177
+ if (opts.results.data.length > 0) {
178
+ log.info(`--- Sources (${opts.results.data.length}) ---\n`);
179
+ for (const item of opts.results.data) {
180
+ const sessionId = extractSessionId(item.filename);
181
+ const scorePercent = (item.score * 100).toFixed(1);
182
+ log.info(` ${sessionId} [${scorePercent}%]`);
183
+
184
+ const commits = opts.commitMap.get(sessionId) ?? [];
185
+ if (commits.length > 0) {
186
+ for (const commit of commits) {
187
+ const url = buildCommitUrl({
188
+ workerUrl: opts.workerUrl,
189
+ org: commit.org,
190
+ repo: commit.repo,
191
+ sha: commit.commit_sha,
192
+ });
193
+ log.info(` -> ${url}`);
194
+ }
195
+ }
196
+ }
197
+ log.info("");
198
+ }
199
+ }
200
+
201
+ function isAiSearchResponse(
202
+ response: SearchResponse | AiSearchResponse,
203
+ ): response is AiSearchResponse {
204
+ return "response" in response;
205
+ }
206
+
207
+ export function search(opts: {
208
+ query: string;
209
+ isAi?: boolean;
210
+ }): ResultAsync<void, CliError> {
211
+ return safeTry(async function* () {
212
+ const config = yield* resolveConfig();
213
+ if (!config) {
214
+ return err(
215
+ new CliError({
216
+ message: "Not configured. Run 'residue login' first.",
217
+ code: "CONFIG_MISSING",
218
+ }),
219
+ );
220
+ }
221
+
222
+ const results = yield* fetchSearch({
223
+ workerUrl: config.worker_url,
224
+ token: config.token,
225
+ query: opts.query,
226
+ isAi: opts.isAi ?? false,
227
+ });
228
+
229
+ // Fetch commits for each session in parallel
230
+ const sessionIds = results.data.map((item) =>
231
+ extractSessionId(item.filename),
232
+ );
233
+ const uniqueSessionIds = [...new Set(sessionIds)];
234
+
235
+ const commitResults = yield* ResultAsync.combine(
236
+ uniqueSessionIds.map((sessionId) =>
237
+ fetchSessionCommits({
238
+ workerUrl: config.worker_url,
239
+ token: config.token,
240
+ sessionId,
241
+ }).map((commits) => ({ sessionId, commits })),
242
+ ),
243
+ );
244
+
245
+ const commitMap = new Map<string, SessionCommit[]>();
246
+ for (const entry of commitResults) {
247
+ commitMap.set(entry.sessionId, entry.commits);
248
+ }
249
+
250
+ if (opts.isAi && isAiSearchResponse(results)) {
251
+ renderAiSearchResults({
252
+ results,
253
+ commitMap,
254
+ workerUrl: config.worker_url,
255
+ });
256
+ } else {
257
+ renderSearchResults({
258
+ results,
259
+ commitMap,
260
+ workerUrl: config.worker_url,
261
+ });
262
+ }
263
+
264
+ return ok(undefined);
265
+ });
266
+ }
@@ -8,6 +8,7 @@ import {
8
8
  readPending,
9
9
  writePending,
10
10
  } from "@/lib/pending";
11
+ import { buildSearchText, getExtractor } from "@/lib/search-text";
11
12
  import { CliError, toCliError } from "@/utils/errors";
12
13
  import { createLogger } from "@/utils/logger";
13
14
 
@@ -30,6 +31,8 @@ type CommitPayload = {
30
31
  type UploadUrlResponse = {
31
32
  url: string;
32
33
  r2_key: string;
34
+ search_url: string;
35
+ search_r2_key: string;
33
36
  };
34
37
 
35
38
  function requestUploadUrl(opts: {
@@ -197,6 +200,62 @@ function closeStaleOpenSessions(opts: {
197
200
  return ResultAsync.combine(checks).map(() => opts.sessions);
198
201
  }
199
202
 
203
+ function generateSearchText(opts: {
204
+ session: PendingSession;
205
+ rawData: string;
206
+ commits: CommitPayload[];
207
+ org: string;
208
+ repo: string;
209
+ }): string | null {
210
+ const extractor = getExtractor(opts.session.agent);
211
+ if (!extractor) {
212
+ log.debug(
213
+ "no search text extractor for agent %s, skipping",
214
+ opts.session.agent,
215
+ );
216
+ return null;
217
+ }
218
+
219
+ const searchLines = extractor(opts.rawData);
220
+ if (searchLines.length === 0) return null;
221
+
222
+ const branches = [
223
+ ...new Set(opts.session.commits.map((c) => c.branch).filter(Boolean)),
224
+ ];
225
+
226
+ return buildSearchText({
227
+ metadata: {
228
+ sessionId: opts.session.id,
229
+ agent: opts.session.agent,
230
+ commits: opts.commits.map((c) => c.sha.slice(0, 7)),
231
+ branch: branches[0] ?? "",
232
+ repo: `${opts.org}/${opts.repo}`,
233
+ },
234
+ lines: searchLines,
235
+ });
236
+ }
237
+
238
+ function uploadSearchText(opts: {
239
+ url: string;
240
+ data: string;
241
+ }): ResultAsync<void, CliError> {
242
+ return ResultAsync.fromPromise(
243
+ fetch(opts.url, {
244
+ method: "PUT",
245
+ headers: { "Content-Type": "text/plain" },
246
+ body: opts.data,
247
+ }).then((response) => {
248
+ if (!response.ok) {
249
+ throw new Error(`R2 search upload failed: HTTP ${response.status}`);
250
+ }
251
+ }),
252
+ toCliError({
253
+ message: "Search text R2 upload failed",
254
+ code: "NETWORK_ERROR",
255
+ }),
256
+ );
257
+ }
258
+
200
259
  function syncSessions(opts: {
201
260
  sessions: PendingSession[];
202
261
  workerUrl: string;
@@ -240,7 +299,7 @@ function syncSessions(opts: {
240
299
  continue;
241
300
  }
242
301
 
243
- // Step 1: Get a presigned URL from the worker
302
+ // Step 1: Get presigned URLs from the worker (raw + search)
244
303
  const uploadUrlResult = await requestUploadUrl({
245
304
  workerUrl: opts.workerUrl,
246
305
  token: opts.token,
@@ -271,6 +330,31 @@ function syncSessions(opts: {
271
330
 
272
331
  log.debug("uploaded session %s data directly to R2", session.id);
273
332
 
333
+ // Step 2b: Generate and upload search text
334
+ const searchText = generateSearchText({
335
+ session,
336
+ rawData: data,
337
+ commits: commitsResult.value,
338
+ org: opts.org,
339
+ repo: opts.repo,
340
+ });
341
+
342
+ if (searchText && uploadUrlResult.value.search_url) {
343
+ const searchUploadResult = await uploadSearchText({
344
+ url: uploadUrlResult.value.search_url,
345
+ data: searchText,
346
+ });
347
+
348
+ if (searchUploadResult.isErr()) {
349
+ // Non-fatal: search upload failure should not block sync
350
+ log.warn(
351
+ `search text upload failed for session ${session.id}: ${searchUploadResult.error.message}`,
352
+ );
353
+ } else {
354
+ log.debug("uploaded search text for session %s", session.id);
355
+ }
356
+ }
357
+
274
358
  // Step 3: POST metadata only (no inline data)
275
359
  const metadataResult = await postSessionMetadata({
276
360
  workerUrl: opts.workerUrl,
package/src/index.ts CHANGED
@@ -2,10 +2,12 @@
2
2
 
3
3
  import { Command } from "commander";
4
4
  import { capture } from "@/commands/capture";
5
+ import { clear } from "@/commands/clear";
5
6
  import { hookClaudeCode } from "@/commands/hook";
6
7
  import { init } from "@/commands/init";
7
8
  import { login } from "@/commands/login";
8
9
  import { push } from "@/commands/push";
10
+ import { search } from "@/commands/search";
9
11
  import { sessionEnd } from "@/commands/session-end";
10
12
  import { sessionStart } from "@/commands/session-start";
11
13
  import { setup } from "@/commands/setup";
@@ -100,9 +102,26 @@ program
100
102
  .description("Upload pending sessions to worker (manual trigger)")
101
103
  .action(wrapCommand(() => push()));
102
104
 
105
+ program
106
+ .command("clear")
107
+ .description("Remove pending sessions from the local queue")
108
+ .option("--id <session-id>", "Clear a specific session by ID")
109
+ .action(wrapCommand((opts: { id?: string }) => clear({ id: opts.id })));
110
+
103
111
  program
104
112
  .command("status")
105
113
  .description("Show current residue state for this project")
106
114
  .action(wrapCommand(() => status()));
107
115
 
116
+ program
117
+ .command("search")
118
+ .description("Search session history")
119
+ .argument("<query>", "Search query")
120
+ .option("--ai", "Use AI-powered search (generates an answer with citations)")
121
+ .action(
122
+ wrapCommand((query: string, opts: { ai?: boolean }) =>
123
+ search({ query, isAi: opts.ai }),
124
+ ),
125
+ );
126
+
108
127
  program.parse();