cc-query 0.2.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -1
- package/package.json +1 -1
- package/src/query-session.js +67 -27
- package/src/repl.js +15 -1
package/README.md
CHANGED
|
@@ -42,13 +42,25 @@ echo "SELECT count(*) FROM messages;" | cc-query .
|
|
|
42
42
|
|
|
43
43
|
## Skill (experimental)
|
|
44
44
|
|
|
45
|
-
This [skill](
|
|
45
|
+
This [example skill](examples/skills/reflect/SKILL.md) gives claude the ability and slash command `/reflect` to work with claude session history.
|
|
46
|
+
|
|
47
|
+
Why not a plugin? If you copy the skill you can reflect on it to adapt to your own usage.
|
|
46
48
|
|
|
47
49
|
For example you can ask questions like:
|
|
48
50
|
- Across all projects what bash commands return the most errors?
|
|
49
51
|
- Let's analyze the last session and identify how we might improve the claude.md file
|
|
50
52
|
- Gimme a summary of what we worked on this past week
|
|
51
53
|
- Let's go though our whole session history and identify repeated patterns that we could extract into skills
|
|
54
|
+
- Let's look at our use of cc-query tool calls to see how we might improve the reflect skill
|
|
55
|
+
|
|
56
|
+
### Test drive
|
|
57
|
+
|
|
58
|
+
To test drive this skill do something like this:
|
|
59
|
+
|
|
60
|
+
1. `npm i -g cc-query`
|
|
61
|
+
2. Clone this repo or otherwise fetch the `examples/skills/reflect` dir
|
|
62
|
+
3. `mkdir -p ~/.claude/skills && cp -R examples/skills/reflect ~/.claude/skills/`
|
|
63
|
+
4. run claude and use `/reflect [whatever you want]`
|
|
52
64
|
|
|
53
65
|
## License
|
|
54
66
|
|
package/package.json
CHANGED
package/src/query-session.js
CHANGED
|
@@ -8,6 +8,59 @@ import { getSessionFiles } from "./session-loader.js";
|
|
|
8
8
|
* @property {number} projectCount
|
|
9
9
|
*/
|
|
10
10
|
|
|
11
|
+
/**
|
|
12
|
+
* Convert a result value to string for display
|
|
13
|
+
* @param {any} val
|
|
14
|
+
* @returns {string}
|
|
15
|
+
*/
|
|
16
|
+
function valueToString(val) {
|
|
17
|
+
if (val === null || val === undefined) return "NULL";
|
|
18
|
+
if (typeof val === "bigint") return val.toString();
|
|
19
|
+
if (typeof val === "object") {
|
|
20
|
+
// Handle DuckDB timestamp objects (returned as {micros: bigint})
|
|
21
|
+
if ("micros" in val) {
|
|
22
|
+
const ms = Number(val.micros) / 1000;
|
|
23
|
+
return new Date(ms).toISOString().replace("T", " ").replace("Z", "");
|
|
24
|
+
}
|
|
25
|
+
// Handle DuckDB UUID objects (returned as {hugeint: string})
|
|
26
|
+
if ("hugeint" in val) {
|
|
27
|
+
// Convert 128-bit signed decimal to UUID hex string
|
|
28
|
+
// DuckDB XORs the high bit for sorting, so flip it back
|
|
29
|
+
let n = BigInt(val.hugeint);
|
|
30
|
+
if (n < 0n) n += 1n << 128n; // Convert from signed to unsigned
|
|
31
|
+
n ^= 1n << 127n; // Flip high bit (undo DuckDB's sort optimization)
|
|
32
|
+
const hex = n.toString(16).padStart(32, "0");
|
|
33
|
+
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;
|
|
34
|
+
}
|
|
35
|
+
return JSON.stringify(val, (_, v) =>
|
|
36
|
+
typeof v === "bigint" ? v.toString() : v,
|
|
37
|
+
);
|
|
38
|
+
}
|
|
39
|
+
return String(val);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Format query results as TSV with header row
|
|
44
|
+
* @param {import("@duckdb/node-api").DuckDBResultReader} result
|
|
45
|
+
* @returns {string}
|
|
46
|
+
*/
|
|
47
|
+
function formatResultsTsv(result) {
|
|
48
|
+
const columnCount = result.columnCount;
|
|
49
|
+
if (columnCount === 0) return "";
|
|
50
|
+
|
|
51
|
+
const columnNames = [];
|
|
52
|
+
for (let i = 0; i < columnCount; i++) {
|
|
53
|
+
columnNames.push(result.columnName(i));
|
|
54
|
+
}
|
|
55
|
+
const rows = result.getRows();
|
|
56
|
+
|
|
57
|
+
const lines = [columnNames.join("\t")];
|
|
58
|
+
for (const row of rows) {
|
|
59
|
+
lines.push(row.map(valueToString).join("\t"));
|
|
60
|
+
}
|
|
61
|
+
return lines.join("\n");
|
|
62
|
+
}
|
|
63
|
+
|
|
11
64
|
/**
|
|
12
65
|
* Format query results as a table string
|
|
13
66
|
* @param {import("@duckdb/node-api").DuckDBResultReader} result
|
|
@@ -29,33 +82,7 @@ function formatResults(result) {
|
|
|
29
82
|
}
|
|
30
83
|
|
|
31
84
|
// Convert all values to strings and calculate column widths
|
|
32
|
-
const stringRows = rows.map((row) =>
|
|
33
|
-
row.map((val) => {
|
|
34
|
-
if (val === null || val === undefined) return "NULL";
|
|
35
|
-
if (typeof val === "bigint") return val.toString();
|
|
36
|
-
if (typeof val === "object") {
|
|
37
|
-
// Handle DuckDB timestamp objects (returned as {micros: bigint})
|
|
38
|
-
if ("micros" in val) {
|
|
39
|
-
const ms = Number(val.micros) / 1000;
|
|
40
|
-
return new Date(ms).toISOString().replace("T", " ").replace("Z", "");
|
|
41
|
-
}
|
|
42
|
-
// Handle DuckDB UUID objects (returned as {hugeint: string})
|
|
43
|
-
if ("hugeint" in val) {
|
|
44
|
-
// Convert 128-bit signed decimal to UUID hex string
|
|
45
|
-
// DuckDB XORs the high bit for sorting, so flip it back
|
|
46
|
-
let n = BigInt(val.hugeint);
|
|
47
|
-
if (n < 0n) n += 1n << 128n; // Convert from signed to unsigned
|
|
48
|
-
n ^= 1n << 127n; // Flip high bit (undo DuckDB's sort optimization)
|
|
49
|
-
const hex = n.toString(16).padStart(32, "0");
|
|
50
|
-
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;
|
|
51
|
-
}
|
|
52
|
-
return JSON.stringify(val, (_, v) =>
|
|
53
|
-
typeof v === "bigint" ? v.toString() : v,
|
|
54
|
-
);
|
|
55
|
-
}
|
|
56
|
-
return String(val);
|
|
57
|
-
}),
|
|
58
|
-
);
|
|
85
|
+
const stringRows = rows.map((row) => row.map(valueToString));
|
|
59
86
|
|
|
60
87
|
const widths = columnNames.map((name, i) => {
|
|
61
88
|
const maxDataWidth = Math.max(
|
|
@@ -171,6 +198,19 @@ export class QuerySession {
|
|
|
171
198
|
return formatResults(result);
|
|
172
199
|
}
|
|
173
200
|
|
|
201
|
+
/**
|
|
202
|
+
* Execute a SQL query and return TSV formatted string with header
|
|
203
|
+
* @param {string} sql
|
|
204
|
+
* @returns {Promise<string>} Query result as TSV
|
|
205
|
+
*/
|
|
206
|
+
async queryTsv(sql) {
|
|
207
|
+
if (!this.#connection) {
|
|
208
|
+
throw new Error("Session not initialized - use QuerySession.create()");
|
|
209
|
+
}
|
|
210
|
+
const result = await this.#connection.runAndReadAll(sql);
|
|
211
|
+
return formatResultsTsv(result);
|
|
212
|
+
}
|
|
213
|
+
|
|
174
214
|
/**
|
|
175
215
|
* Execute a SQL query and return raw rows
|
|
176
216
|
* @param {string} sql
|
package/src/repl.js
CHANGED
|
@@ -188,6 +188,7 @@ async function readStdin() {
|
|
|
188
188
|
|
|
189
189
|
/**
|
|
190
190
|
* Run queries from piped input (non-interactive mode)
|
|
191
|
+
* Uses TSV output format with --- separator between queries
|
|
191
192
|
* @param {QuerySession} qs
|
|
192
193
|
* @param {string} input
|
|
193
194
|
*/
|
|
@@ -198,12 +199,25 @@ async function runPipedQueries(qs, input) {
|
|
|
198
199
|
.map((s) => s.trim())
|
|
199
200
|
.filter((s) => s && s !== ";");
|
|
200
201
|
|
|
202
|
+
let isFirstOutput = true;
|
|
203
|
+
|
|
201
204
|
for (const stmt of statements) {
|
|
202
205
|
if (stmt.startsWith(".")) {
|
|
203
206
|
const shouldExit = await handleDotCommand(stmt, qs);
|
|
204
207
|
if (shouldExit) break;
|
|
205
208
|
} else {
|
|
206
|
-
|
|
209
|
+
try {
|
|
210
|
+
const result = await qs.queryTsv(stmt);
|
|
211
|
+
if (result) {
|
|
212
|
+
if (!isFirstOutput) {
|
|
213
|
+
console.log("---");
|
|
214
|
+
}
|
|
215
|
+
console.log(result);
|
|
216
|
+
isFirstOutput = false;
|
|
217
|
+
}
|
|
218
|
+
} catch (err) {
|
|
219
|
+
console.error(`Error: ${err instanceof Error ? err.message : err}`);
|
|
220
|
+
}
|
|
207
221
|
}
|
|
208
222
|
}
|
|
209
223
|
}
|