@nocoo/pika 0.2.1 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin.js +76 -4
- package/package.json +13 -2
package/dist/bin.js
CHANGED
|
@@ -30,7 +30,7 @@ var package_default;
|
|
|
30
30
|
var init_package = __esm(() => {
|
|
31
31
|
package_default = {
|
|
32
32
|
name: "pika",
|
|
33
|
-
version: "0.2.
|
|
33
|
+
version: "0.2.2",
|
|
34
34
|
private: true,
|
|
35
35
|
workspaces: [
|
|
36
36
|
"packages/*"
|
|
@@ -2833,6 +2833,7 @@ async function runSyncPipeline(input, opts) {
|
|
|
2833
2833
|
discoverOpts,
|
|
2834
2834
|
syncCtx
|
|
2835
2835
|
} = input;
|
|
2836
|
+
const log = opts.logger;
|
|
2836
2837
|
let cursorState = { ...input.cursorState, files: { ...input.cursorState.files } };
|
|
2837
2838
|
const allResults = [];
|
|
2838
2839
|
const parseErrors = [];
|
|
@@ -2843,7 +2844,9 @@ async function runSyncPipeline(input, opts) {
|
|
|
2843
2844
|
const dbSourcedSessionKeys = new Set;
|
|
2844
2845
|
let prevDbCursor;
|
|
2845
2846
|
for (const driver of fileDrivers) {
|
|
2847
|
+
log?.discoverStart(driver.source);
|
|
2846
2848
|
const files = await driver.discover(discoverOpts);
|
|
2849
|
+
log?.discoverDone(driver.source, files.length);
|
|
2847
2850
|
for (const filePath of files) {
|
|
2848
2851
|
totalFiles++;
|
|
2849
2852
|
let fingerprint;
|
|
@@ -2862,6 +2865,7 @@ async function runSyncPipeline(input, opts) {
|
|
|
2862
2865
|
const results = await driver.parse(filePath, resume);
|
|
2863
2866
|
if (results.length > 0) {
|
|
2864
2867
|
allResults.push(...results);
|
|
2868
|
+
log?.parseDone(driver.source, filePath, results.length);
|
|
2865
2869
|
prevCursors.set(filePath, cursorState.files[filePath]);
|
|
2866
2870
|
for (const r of results) {
|
|
2867
2871
|
sessionKeyToFile.set(r.canonical.sessionKey, filePath);
|
|
@@ -2881,6 +2885,7 @@ async function runSyncPipeline(input, opts) {
|
|
|
2881
2885
|
}
|
|
2882
2886
|
if (dbDriver) {
|
|
2883
2887
|
try {
|
|
2888
|
+
log?.dbDriverStart(dbDriver.source);
|
|
2884
2889
|
prevDbCursor = cursorState.openCodeSqlite;
|
|
2885
2890
|
const dbResult = await dbDriver.run(prevDbCursor, syncCtx);
|
|
2886
2891
|
allResults.push(...dbResult.results);
|
|
@@ -2888,6 +2893,7 @@ async function runSyncPipeline(input, opts) {
|
|
|
2888
2893
|
dbSourcedSessionKeys.add(r.canonical.sessionKey);
|
|
2889
2894
|
}
|
|
2890
2895
|
cursorState.openCodeSqlite = dbResult.cursor;
|
|
2896
|
+
log?.dbDriverDone(dbDriver.source, dbResult.results.length);
|
|
2891
2897
|
} catch (err) {
|
|
2892
2898
|
parseErrors.push({
|
|
2893
2899
|
timestamp: new Date().toISOString(),
|
|
@@ -2914,18 +2920,45 @@ async function runSyncPipeline(input, opts) {
|
|
|
2914
2920
|
fetch: opts.fetch,
|
|
2915
2921
|
sleep: opts.sleep
|
|
2916
2922
|
};
|
|
2923
|
+
log?.uploadMetadataStart(snapshots.length);
|
|
2917
2924
|
uploadResult = await uploadMetadataBatches(snapshots, uploadOpts);
|
|
2925
|
+
log?.uploadMetadataDone(uploadResult.totalIngested, uploadResult.totalConflicts);
|
|
2918
2926
|
const contentOpts = {
|
|
2919
2927
|
apiUrl: opts.apiUrl,
|
|
2920
2928
|
apiKey: opts.apiKey,
|
|
2921
2929
|
fetch: opts.fetch,
|
|
2922
2930
|
sleep: opts.sleep
|
|
2923
2931
|
};
|
|
2932
|
+
const totalSessions = allResults.length;
|
|
2933
|
+
log?.uploadContentStart(totalSessions);
|
|
2934
|
+
let contentDone = 0;
|
|
2935
|
+
const wrappedContentOpts = {
|
|
2936
|
+
...contentOpts
|
|
2937
|
+
};
|
|
2938
|
+
const originalFetch = contentOpts.fetch ?? globalThis.fetch;
|
|
2939
|
+
if (log) {
|
|
2940
|
+
const completedSessions = new Set;
|
|
2941
|
+
wrappedContentOpts.fetch = async (input2, init) => {
|
|
2942
|
+
const response = await originalFetch(input2, init);
|
|
2943
|
+
const url = typeof input2 === "string" ? input2 : input2.url;
|
|
2944
|
+
if (url.includes("/api/ingest/content/") && url.endsWith("/canonical")) {
|
|
2945
|
+
const parts = url.split("/");
|
|
2946
|
+
const sessionKey = decodeURIComponent(parts[parts.length - 2]);
|
|
2947
|
+
if (!completedSessions.has(sessionKey)) {
|
|
2948
|
+
completedSessions.add(sessionKey);
|
|
2949
|
+
contentDone++;
|
|
2950
|
+
log.uploadContentProgress(contentDone, totalSessions);
|
|
2951
|
+
}
|
|
2952
|
+
}
|
|
2953
|
+
return response;
|
|
2954
|
+
};
|
|
2955
|
+
}
|
|
2924
2956
|
contentResult = await uploadContentBatch(allResults.map((r) => ({
|
|
2925
2957
|
canonical: r.canonical,
|
|
2926
2958
|
raw: r.raw,
|
|
2927
2959
|
precomputed: precomputedMap.get(r.canonical.sessionKey)
|
|
2928
|
-
})), contentOpts, opts.contentConcurrency);
|
|
2960
|
+
})), log ? wrappedContentOpts : contentOpts, opts.contentConcurrency);
|
|
2961
|
+
log?.uploadContentDone(contentResult.uploaded, contentResult.skipped, contentResult.errors.length);
|
|
2929
2962
|
if (contentResult.errors.length > 0) {
|
|
2930
2963
|
const rolledBackFiles = new Set;
|
|
2931
2964
|
let rollbackDbCursor = false;
|
|
@@ -2971,7 +3004,7 @@ __export(exports_sync, {
|
|
|
2971
3004
|
buildDbDriver: () => buildDbDriver
|
|
2972
3005
|
});
|
|
2973
3006
|
import { defineCommand } from "citty";
|
|
2974
|
-
import { join as join11 } from "path";
|
|
3007
|
+
import { join as join11, basename as basename3 } from "path";
|
|
2975
3008
|
import consola from "consola";
|
|
2976
3009
|
async function buildDbDriver(driverSet, openDbOverride) {
|
|
2977
3010
|
if (!driverSet.dbDriversAvailable || !driverSet.discoverOpts.openCodeDbPath) {
|
|
@@ -3049,6 +3082,44 @@ var init_sync = __esm(() => {
|
|
|
3049
3082
|
const dbDriver = await buildDbDriver(driverSet);
|
|
3050
3083
|
const sourceCount = driverSet.fileDrivers.length + (dbDriver ? 1 : 0);
|
|
3051
3084
|
consola.start(`Syncing ${sourceCount} source(s)...`);
|
|
3085
|
+
const logger = {
|
|
3086
|
+
discoverStart(source) {
|
|
3087
|
+
consola.info(` [${source}] Scanning...`);
|
|
3088
|
+
},
|
|
3089
|
+
discoverDone(source, fileCount) {
|
|
3090
|
+
consola.info(` [${source}] Found ${fileCount} file(s)`);
|
|
3091
|
+
},
|
|
3092
|
+
parseDone(source, filePath, sessionCount) {
|
|
3093
|
+
consola.info(` [${source}] Parsed ${sessionCount} session(s) from ${basename3(filePath)}`);
|
|
3094
|
+
},
|
|
3095
|
+
uploadMetadataStart(sessionCount) {
|
|
3096
|
+
consola.info(`Uploading metadata for ${sessionCount} session(s)...`);
|
|
3097
|
+
},
|
|
3098
|
+
uploadMetadataDone(ingested, conflicts) {
|
|
3099
|
+
const parts = [`${ingested} ingested`];
|
|
3100
|
+
if (conflicts > 0)
|
|
3101
|
+
parts.push(`${conflicts} conflicts`);
|
|
3102
|
+
consola.info(`Metadata upload done: ${parts.join(", ")}`);
|
|
3103
|
+
},
|
|
3104
|
+
uploadContentStart(sessionCount) {
|
|
3105
|
+
consola.info(`Uploading content for ${sessionCount} session(s)...`);
|
|
3106
|
+
},
|
|
3107
|
+
uploadContentProgress(done, total) {
|
|
3108
|
+
consola.info(` Content: ${done}/${total}`);
|
|
3109
|
+
},
|
|
3110
|
+
uploadContentDone(uploaded, skipped, errors) {
|
|
3111
|
+
const parts = [`${uploaded} uploaded`, `${skipped} skipped`];
|
|
3112
|
+
if (errors > 0)
|
|
3113
|
+
parts.push(`${errors} errors`);
|
|
3114
|
+
consola.info(`Content upload done: ${parts.join(", ")}`);
|
|
3115
|
+
},
|
|
3116
|
+
dbDriverStart(source) {
|
|
3117
|
+
consola.info(` [${source}] Querying database...`);
|
|
3118
|
+
},
|
|
3119
|
+
dbDriverDone(source, sessionCount) {
|
|
3120
|
+
consola.info(` [${source}] Found ${sessionCount} session(s) from DB`);
|
|
3121
|
+
}
|
|
3122
|
+
};
|
|
3052
3123
|
const result = await runSyncPipeline({
|
|
3053
3124
|
fileDrivers: driverSet.fileDrivers,
|
|
3054
3125
|
dbDriver,
|
|
@@ -3059,7 +3130,8 @@ var init_sync = __esm(() => {
|
|
|
3059
3130
|
upload: doUpload,
|
|
3060
3131
|
apiUrl: config.getApiUrl(),
|
|
3061
3132
|
apiKey: config.getToken() ?? "",
|
|
3062
|
-
userId: "cli"
|
|
3133
|
+
userId: "cli",
|
|
3134
|
+
logger
|
|
3063
3135
|
});
|
|
3064
3136
|
await cursorStore.save(result.cursorState);
|
|
3065
3137
|
consola.success(`Parsed ${result.totalParsed} session(s) from ${result.totalFiles} file(s) (${result.totalSkipped} unchanged)`);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@nocoo/pika",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.2",
|
|
4
4
|
"description": "Replay and search coding agent sessions",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -23,7 +23,18 @@
|
|
|
23
23
|
"publishConfig": {
|
|
24
24
|
"access": "public"
|
|
25
25
|
},
|
|
26
|
-
"keywords": [
|
|
26
|
+
"keywords": [
|
|
27
|
+
"cli",
|
|
28
|
+
"agent",
|
|
29
|
+
"session",
|
|
30
|
+
"replay",
|
|
31
|
+
"search",
|
|
32
|
+
"claude",
|
|
33
|
+
"codex",
|
|
34
|
+
"gemini",
|
|
35
|
+
"opencode",
|
|
36
|
+
"copilot"
|
|
37
|
+
],
|
|
27
38
|
"license": "MIT",
|
|
28
39
|
"repository": {
|
|
29
40
|
"type": "git",
|