@nocoo/pika 0.5.4 → 0.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/bin.js +171 -124
  2. package/package.json +1 -1
package/dist/bin.js CHANGED
@@ -95,7 +95,7 @@ var package_default;
95
95
  var init_package = __esm(() => {
96
96
  package_default = {
97
97
  name: "pika",
98
- version: "0.5.4",
98
+ version: "0.5.6",
99
99
  private: true,
100
100
  workspaces: [
101
101
  "packages/*"
@@ -2327,7 +2327,7 @@ function buildRawSourceFiles(session, sessionRow, messageRawStrings, messageIds,
2327
2327
  function createOpenCodeSqliteDriver(openDb, dbPath) {
2328
2328
  return {
2329
2329
  source: "opencode",
2330
- async run(prevCursor, ctx) {
2330
+ async run(prevCursor, ctx, onResult) {
2331
2331
  let dbStat;
2332
2332
  try {
2333
2333
  dbStat = await stat10(dbPath);
@@ -2391,7 +2391,11 @@ function createOpenCodeSqliteDriver(openDb, dbPath) {
2391
2391
  if (shouldSkipForJson(sessionKey, result.canonical.lastMessageAt, result.canonical.messages.length, ctx.openCodeSessionState)) {
2392
2392
  continue;
2393
2393
  }
2394
- results.push(result);
2394
+ if (onResult) {
2395
+ await onResult(result);
2396
+ } else {
2397
+ results.push(result);
2398
+ }
2395
2399
  if (!ctx.openCodeSessionState) {
2396
2400
  ctx.openCodeSessionState = new Map;
2397
2401
  }
@@ -2874,14 +2878,121 @@ async function runSyncPipeline(input, opts) {
2874
2878
  ...input.cursorState,
2875
2879
  files: { ...input.cursorState.files }
2876
2880
  };
2877
- const allResults = [];
2878
- const parseErrors = [];
2881
+ let totalParsed = 0;
2882
+ let totalEmpty = 0;
2879
2883
  let totalFiles = 0;
2880
2884
  let totalSkipped = 0;
2881
- const sessionKeyToFile = new Map;
2882
- const prevCursors = new Map;
2885
+ const parseErrors = [];
2886
+ const currentBatch = [];
2887
+ const batchSessionToFile = new Map;
2888
+ const batchPrevCursors = new Map;
2883
2889
  const dbSourcedSessionKeys = new Set;
2884
2890
  let prevDbCursor;
2891
+ let uploadResult;
2892
+ let contentResult;
2893
+ let uploadOpts;
2894
+ let contentOpts;
2895
+ let contentProgressDone = 0;
2896
+ async function flushBatch() {
2897
+ if (currentBatch.length === 0)
2898
+ return;
2899
+ const batchEmpty = currentBatch.filter((r) => r.canonical.messages.length === 0).length;
2900
+ const uploadable = currentBatch.filter((r) => r.canonical.messages.length > 0);
2901
+ totalEmpty += batchEmpty;
2902
+ if (uploadable.length === 0 || !opts.upload) {
2903
+ totalParsed += uploadable.length;
2904
+ currentBatch.length = 0;
2905
+ batchSessionToFile.clear();
2906
+ batchPrevCursors.clear();
2907
+ return;
2908
+ }
2909
+ if (!uploadOpts) {
2910
+ uploadOpts = {
2911
+ apiUrl: opts.apiUrl,
2912
+ apiKey: opts.apiKey,
2913
+ userId: opts.userId,
2914
+ fetch: opts.fetch,
2915
+ sleep: opts.sleep
2916
+ };
2917
+ contentOpts = {
2918
+ apiUrl: opts.apiUrl,
2919
+ apiKey: opts.apiKey,
2920
+ fetch: opts.fetch,
2921
+ sleep: opts.sleep
2922
+ };
2923
+ uploadResult = {
2924
+ totalIngested: 0,
2925
+ totalConflicts: 0,
2926
+ totalBatches: 0,
2927
+ errors: []
2928
+ };
2929
+ contentResult = { uploaded: 0, skipped: 0, errors: [] };
2930
+ }
2931
+ const transformed = uploadable.map((r) => toSessionSnapshot(r.canonical, r.raw));
2932
+ const batchSnapshots = transformed.map((t) => t.snapshot);
2933
+ const batchUploadResult = await uploadMetadataBatches(batchSnapshots, uploadOpts);
2934
+ uploadResult.totalIngested += batchUploadResult.totalIngested;
2935
+ uploadResult.totalConflicts += batchUploadResult.totalConflicts;
2936
+ uploadResult.totalBatches += batchUploadResult.totalBatches;
2937
+ uploadResult.errors.push(...batchUploadResult.errors);
2938
+ const effectiveContentOpts = { ...contentOpts };
2939
+ if (log) {
2940
+ const completedSessions = new Set;
2941
+ const originalFetch = contentOpts.fetch ?? globalThis.fetch;
2942
+ effectiveContentOpts.fetch = async (input2, init) => {
2943
+ const response = await originalFetch(input2, init);
2944
+ const url = typeof input2 === "string" ? input2 : input2.url;
2945
+ if (url.includes("/api/ingest/content/") && url.endsWith("/canonical")) {
2946
+ const parts = url.split("/");
2947
+ const sessionKey = decodeURIComponent(parts[parts.length - 2]);
2948
+ if (!completedSessions.has(sessionKey)) {
2949
+ completedSessions.add(sessionKey);
2950
+ contentProgressDone++;
2951
+ log.uploadContentProgress(contentProgressDone, totalParsed + uploadable.length);
2952
+ }
2953
+ }
2954
+ return response;
2955
+ };
2956
+ }
2957
+ const batchContentResult = await uploadContentBatch(uploadable.map((r, i) => ({
2958
+ canonical: r.canonical,
2959
+ raw: r.raw,
2960
+ precomputed: transformed[i].precomputed
2961
+ })), log ? effectiveContentOpts : contentOpts, opts.contentConcurrency);
2962
+ contentResult.uploaded += batchContentResult.uploaded;
2963
+ contentResult.skipped += batchContentResult.skipped;
2964
+ contentResult.errors.push(...batchContentResult.errors);
2965
+ if (batchContentResult.errors.length > 0) {
2966
+ const rolledBackFiles = new Set;
2967
+ let rollbackDbCursor = false;
2968
+ for (const { sessionKey } of batchContentResult.errors) {
2969
+ const filePath = batchSessionToFile.get(sessionKey);
2970
+ if (filePath && !rolledBackFiles.has(filePath)) {
2971
+ rolledBackFiles.add(filePath);
2972
+ const prev = batchPrevCursors.get(filePath);
2973
+ if (prev === undefined) {
2974
+ delete cursorState.files[filePath];
2975
+ } else {
2976
+ cursorState.files[filePath] = prev;
2977
+ }
2978
+ }
2979
+ if (dbSourcedSessionKeys.has(sessionKey)) {
2980
+ rollbackDbCursor = true;
2981
+ }
2982
+ }
2983
+ if (rollbackDbCursor) {
2984
+ cursorState.openCodeSqlite = prevDbCursor;
2985
+ }
2986
+ }
2987
+ totalParsed += uploadable.length;
2988
+ for (const r of currentBatch) {
2989
+ r.raw = undefined;
2990
+ r.canonical.messages = [];
2991
+ }
2992
+ currentBatch.length = 0;
2993
+ batchSessionToFile.clear();
2994
+ batchPrevCursors.clear();
2995
+ }
2885
2996
  for (const driver of fileDrivers) {
2886
2997
  log?.discoverStart(driver.source);
2887
2998
  const files = await driver.discover(discoverOpts);
@@ -2903,14 +3014,17 @@ async function runSyncPipeline(input, opts) {
2903
3014
  try {
2904
3015
  const results = await driver.parse(filePath, resume);
2905
3016
  if (results.length > 0) {
2906
- allResults.push(...results);
3017
+ currentBatch.push(...results);
2907
3018
  log?.parseDone(driver.source, filePath, results.length);
2908
- prevCursors.set(filePath, cursorState.files[filePath]);
3019
+ batchPrevCursors.set(filePath, cursorState.files[filePath]);
2909
3020
  for (const r of results) {
2910
- sessionKeyToFile.set(r.canonical.sessionKey, filePath);
3021
+ batchSessionToFile.set(r.canonical.sessionKey, filePath);
2911
3022
  }
2912
3023
  const newCursor = driver.buildCursor(fingerprint, results);
2913
3024
  cursorState.files[filePath] = newCursor;
3025
+ if (currentBatch.length >= METADATA_BATCH_SIZE) {
3026
+ await flushBatch();
3027
+ }
2914
3028
  }
2915
3029
  } catch (err) {
2916
3030
  parseErrors.push({
@@ -2926,13 +3040,22 @@ async function runSyncPipeline(input, opts) {
2926
3040
  try {
2927
3041
  log?.dbDriverStart(dbDriver.source);
2928
3042
  prevDbCursor = cursorState.openCodeSqlite;
2929
- const dbResult = await dbDriver.run(prevDbCursor, syncCtx);
2930
- allResults.push(...dbResult.results);
3043
+ const dbResult = await dbDriver.run(prevDbCursor, syncCtx, async (r) => {
3044
+ currentBatch.push(r);
3045
+ dbSourcedSessionKeys.add(r.canonical.sessionKey);
3046
+ if (currentBatch.length >= METADATA_BATCH_SIZE) {
3047
+ await flushBatch();
3048
+ }
3049
+ });
3050
+ cursorState.openCodeSqlite = dbResult.cursor;
3051
+ log?.dbDriverDone(dbDriver.source, dbResult.rowCount);
2931
3052
  for (const r of dbResult.results) {
3053
+ currentBatch.push(r);
2932
3054
  dbSourcedSessionKeys.add(r.canonical.sessionKey);
3055
+ if (currentBatch.length >= METADATA_BATCH_SIZE) {
3056
+ await flushBatch();
3057
+ }
2933
3058
  }
2934
- cursorState.openCodeSqlite = dbResult.cursor;
2935
- log?.dbDriverDone(dbDriver.source, dbResult.results.length);
2936
3059
  } catch (err) {
2937
3060
  parseErrors.push({
2938
3061
  timestamp: new Date().toISOString(),
@@ -2942,112 +3065,19 @@ async function runSyncPipeline(input, opts) {
2942
3065
  });
2943
3066
  }
2944
3067
  }
2945
- cursorState.updatedAt = new Date().toISOString();
2946
- const emptyCount = allResults.filter((r) => r.canonical.messages.length === 0).length;
2947
- const uploadableResults = allResults.filter((r) => r.canonical.messages.length > 0);
2948
- let uploadResult;
2949
- let contentResult;
2950
- if (opts.upload && uploadableResults.length > 0) {
2951
- const uploadOpts = {
2952
- apiUrl: opts.apiUrl,
2953
- apiKey: opts.apiKey,
2954
- userId: opts.userId,
2955
- fetch: opts.fetch,
2956
- sleep: opts.sleep
2957
- };
2958
- const contentOpts = {
2959
- apiUrl: opts.apiUrl,
2960
- apiKey: opts.apiKey,
2961
- fetch: opts.fetch,
2962
- sleep: opts.sleep
2963
- };
2964
- const totalSessions = uploadableResults.length;
2965
- const pipelineBatches = splitBatches(uploadableResults, METADATA_BATCH_SIZE);
2966
- uploadResult = {
2967
- totalIngested: 0,
2968
- totalConflicts: 0,
2969
- totalBatches: 0,
2970
- errors: []
2971
- };
2972
- contentResult = {
2973
- uploaded: 0,
2974
- skipped: 0,
2975
- errors: []
2976
- };
2977
- log?.uploadMetadataStart(totalSessions);
2978
- for (const batch of pipelineBatches) {
2979
- const transformed = batch.map((r) => toSessionSnapshot(r.canonical, r.raw));
2980
- const batchSnapshots = transformed.map((t) => t.snapshot);
2981
- const batchUploadResult = await uploadMetadataBatches(batchSnapshots, uploadOpts);
2982
- uploadResult.totalIngested += batchUploadResult.totalIngested;
2983
- uploadResult.totalConflicts += batchUploadResult.totalConflicts;
2984
- uploadResult.totalBatches += batchUploadResult.totalBatches;
2985
- uploadResult.errors.push(...batchUploadResult.errors);
2986
- }
3068
+ await flushBatch();
3069
+ if (uploadResult) {
3070
+ log?.uploadMetadataStart(totalParsed);
2987
3071
  log?.uploadMetadataDone(uploadResult.totalIngested, uploadResult.totalConflicts);
2988
- let contentDone = 0;
2989
- log?.uploadContentStart(totalSessions);
2990
- for (const batch of pipelineBatches) {
2991
- const batchPrecomputed = new Map;
2992
- for (const r of batch) {
2993
- const t = toSessionSnapshot(r.canonical, r.raw);
2994
- batchPrecomputed.set(t.snapshot.sessionKey, t.precomputed);
2995
- }
2996
- const wrappedContentOpts = { ...contentOpts };
2997
- const originalFetch = contentOpts.fetch ?? globalThis.fetch;
2998
- if (log) {
2999
- const completedSessions = new Set;
3000
- wrappedContentOpts.fetch = async (input2, init) => {
3001
- const response = await originalFetch(input2, init);
3002
- const url = typeof input2 === "string" ? input2 : input2.url;
3003
- if (url.includes("/api/ingest/content/") && url.endsWith("/canonical")) {
3004
- const parts = url.split("/");
3005
- const sessionKey = decodeURIComponent(parts[parts.length - 2]);
3006
- if (!completedSessions.has(sessionKey)) {
3007
- completedSessions.add(sessionKey);
3008
- contentDone++;
3009
- log.uploadContentProgress(contentDone, totalSessions);
3010
- }
3011
- }
3012
- return response;
3013
- };
3014
- }
3015
- const batchContentResult = await uploadContentBatch(batch.map((r) => ({
3016
- canonical: r.canonical,
3017
- raw: r.raw,
3018
- precomputed: batchPrecomputed.get(r.canonical.sessionKey)
3019
- })), log ? wrappedContentOpts : contentOpts, opts.contentConcurrency);
3020
- contentResult.uploaded += batchContentResult.uploaded;
3021
- contentResult.skipped += batchContentResult.skipped;
3022
- contentResult.errors.push(...batchContentResult.errors);
3023
- if (batchContentResult.errors.length > 0) {
3024
- const rolledBackFiles = new Set;
3025
- let rollbackDbCursor = false;
3026
- for (const { sessionKey } of batchContentResult.errors) {
3027
- const filePath = sessionKeyToFile.get(sessionKey);
3028
- if (filePath && !rolledBackFiles.has(filePath)) {
3029
- rolledBackFiles.add(filePath);
3030
- const prev = prevCursors.get(filePath);
3031
- if (prev === undefined) {
3032
- delete cursorState.files[filePath];
3033
- } else {
3034
- cursorState.files[filePath] = prev;
3035
- }
3036
- }
3037
- if (dbSourcedSessionKeys.has(sessionKey)) {
3038
- rollbackDbCursor = true;
3039
- }
3040
- }
3041
- if (rollbackDbCursor) {
3042
- cursorState.openCodeSqlite = prevDbCursor;
3043
- }
3044
- }
3045
- }
3072
+ }
3073
+ if (contentResult) {
3074
+ log?.uploadContentStart(totalParsed);
3046
3075
  log?.uploadContentDone(contentResult.uploaded, contentResult.skipped, contentResult.errors.length);
3047
3076
  }
3077
+ cursorState.updatedAt = new Date().toISOString();
3048
3078
  return {
3049
- totalParsed: uploadableResults.length,
3050
- totalEmpty: emptyCount,
3079
+ totalParsed,
3080
+ totalEmpty,
3051
3081
  totalFiles,
3052
3082
  totalSkipped,
3053
3083
  parseErrors,
@@ -3071,6 +3101,20 @@ __export(exports_sync, {
3071
3101
  import { basename as basename3, join as join11 } from "path";
3072
3102
  import { defineCommand } from "citty";
3073
3103
  import consola from "consola";
3104
+ function spinner() {
3105
+ return SPINNER_FRAMES[spinnerIdx++ % SPINNER_FRAMES.length];
3106
+ }
3107
+ function rewriteLine(msg) {
3108
+ process.stdout.write(`\r\x1B[2K${msg}`);
3109
+ }
3110
+ function finalizeLine(consolaFn, msg) {
3111
+ process.stdout.write("\r\x1B[2K");
3112
+ consolaFn(msg);
3113
+ }
3114
+ function progressBar(pct, width) {
3115
+ const filled = Math.round(pct / 100 * width);
3116
+ return "\u2588".repeat(filled) + "\u2591".repeat(width - filled);
3117
+ }
3074
3118
  async function buildDbDriver(driverSet, openDbOverride) {
3075
3119
  if (!driverSet.dbDriversAvailable || !driverSet.discoverOpts.openCodeDbPath) {
3076
3120
  return;
@@ -3085,7 +3129,7 @@ async function buildDbDriver(driverSet, openDbOverride) {
3085
3129
  }
3086
3130
  return createOpenCodeSqliteDriver(openDb, driverSet.discoverOpts.openCodeDbPath);
3087
3131
  }
3088
- var sync_default;
3132
+ var SPINNER_FRAMES, spinnerIdx = 0, sync_default;
3089
3133
  var init_sync = __esm(() => {
3090
3134
  init_src();
3091
3135
  init_manager();
@@ -3093,6 +3137,7 @@ var init_sync = __esm(() => {
3093
3137
  init_opencode_sqlite();
3094
3138
  init_cursor_store();
3095
3139
  init_sync_pipeline();
3140
+ SPINNER_FRAMES = ["\u280B", "\u2819", "\u2839", "\u2838", "\u283C", "\u2834", "\u2826", "\u2827", "\u2807", "\u280F"];
3096
3141
  sync_default = defineCommand({
3097
3142
  meta: {
3098
3143
  name: "sync",
@@ -3149,13 +3194,13 @@ var init_sync = __esm(() => {
3149
3194
  consola.start(`Syncing ${sourceCount} source(s)...`);
3150
3195
  const logger = {
3151
3196
  discoverStart(source) {
3152
- consola.info(` [${source}] Scanning...`);
3197
+ rewriteLine(`${spinner()} [${source}] Scanning...`);
3153
3198
  },
3154
3199
  discoverDone(source, fileCount) {
3155
- consola.info(` [${source}] Found ${fileCount} file(s)`);
3200
+ finalizeLine(consola.info, ` [${source}] Found ${fileCount} file(s)`);
3156
3201
  },
3157
3202
  parseDone(source, filePath, sessionCount) {
3158
- consola.info(` [${source}] Parsed ${sessionCount} session(s) from ${basename3(filePath)}`);
3203
+ rewriteLine(`${spinner()} [${source}] Parsed ${sessionCount} session(s) from ${basename3(filePath)}`);
3159
3204
  },
3160
3205
  uploadMetadataStart(sessionCount) {
3161
3206
  consola.info(`Uploading metadata for ${sessionCount} session(s)...`);
@@ -3170,19 +3215,21 @@ var init_sync = __esm(() => {
3170
3215
  consola.info(`Uploading content for ${sessionCount} session(s)...`);
3171
3216
  },
3172
3217
  uploadContentProgress(done, total) {
3173
- consola.info(` Content: ${done}/${total}`);
3218
+ const pct = total > 0 ? Math.round(done / total * 100) : 0;
3219
+ const bar = progressBar(pct, 20);
3220
+ rewriteLine(`${spinner()} Content: ${bar} ${done}/${total} (${pct}%)`);
3174
3221
  },
3175
3222
  uploadContentDone(uploaded, skipped, errors) {
3176
3223
  const parts = [`${uploaded} uploaded`, `${skipped} skipped`];
3177
3224
  if (errors > 0)
3178
3225
  parts.push(`${errors} errors`);
3179
- consola.info(`Content upload done: ${parts.join(", ")}`);
3226
+ finalizeLine(consola.info, `Content upload done: ${parts.join(", ")}`);
3180
3227
  },
3181
3228
  dbDriverStart(source) {
3182
- consola.info(` [${source}] Querying database...`);
3229
+ rewriteLine(`${spinner()} [${source}] Querying database...`);
3183
3230
  },
3184
3231
  dbDriverDone(source, sessionCount) {
3185
- consola.info(` [${source}] Found ${sessionCount} session(s) from DB`);
3232
+ finalizeLine(consola.info, ` [${source}] Found ${sessionCount} session(s) from DB`);
3186
3233
  }
3187
3234
  };
3188
3235
  const result = await runSyncPipeline({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nocoo/pika",
3
- "version": "0.5.4",
3
+ "version": "0.5.6",
4
4
  "description": "Replay and search coding agent sessions",
5
5
  "type": "module",
6
6
  "bin": {