@skillrecordings/cli 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/skill.mjs +27 -0
- package/dist/chunk-2NCCVTEE.js +22342 -0
- package/dist/chunk-2NCCVTEE.js.map +1 -0
- package/dist/chunk-3E3GYSZR.js +7071 -0
- package/dist/chunk-3E3GYSZR.js.map +1 -0
- package/dist/chunk-F4EM72IH.js +86 -0
- package/dist/chunk-F4EM72IH.js.map +1 -0
- package/dist/chunk-FGP7KUQW.js +432 -0
- package/dist/chunk-FGP7KUQW.js.map +1 -0
- package/dist/chunk-H3D6VCME.js +55 -0
- package/dist/chunk-H3D6VCME.js.map +1 -0
- package/dist/chunk-HK3PEWFD.js +208 -0
- package/dist/chunk-HK3PEWFD.js.map +1 -0
- package/dist/chunk-KEV3QKXP.js +4495 -0
- package/dist/chunk-KEV3QKXP.js.map +1 -0
- package/dist/chunk-MG37YDAK.js +882 -0
- package/dist/chunk-MG37YDAK.js.map +1 -0
- package/dist/chunk-MLNDSBZ4.js +482 -0
- package/dist/chunk-MLNDSBZ4.js.map +1 -0
- package/dist/chunk-N2WIV2JV.js +22 -0
- package/dist/chunk-N2WIV2JV.js.map +1 -0
- package/dist/chunk-PWWRCN5W.js +2067 -0
- package/dist/chunk-PWWRCN5W.js.map +1 -0
- package/dist/chunk-SKHBM3XP.js +7746 -0
- package/dist/chunk-SKHBM3XP.js.map +1 -0
- package/dist/chunk-WFANXVQG.js +64 -0
- package/dist/chunk-WFANXVQG.js.map +1 -0
- package/dist/chunk-WYKL32C3.js +275 -0
- package/dist/chunk-WYKL32C3.js.map +1 -0
- package/dist/chunk-ZNF7XD2S.js +134 -0
- package/dist/chunk-ZNF7XD2S.js.map +1 -0
- package/dist/config-AUAIYDSI.js +20 -0
- package/dist/config-AUAIYDSI.js.map +1 -0
- package/dist/fileFromPath-XN7LXIBI.js +134 -0
- package/dist/fileFromPath-XN7LXIBI.js.map +1 -0
- package/dist/getMachineId-bsd-KW2E7VK3.js +42 -0
- package/dist/getMachineId-bsd-KW2E7VK3.js.map +1 -0
- package/dist/getMachineId-darwin-ROXJUJX5.js +42 -0
- package/dist/getMachineId-darwin-ROXJUJX5.js.map +1 -0
- package/dist/getMachineId-linux-KVZEHQSU.js +34 -0
- package/dist/getMachineId-linux-KVZEHQSU.js.map +1 -0
- package/dist/getMachineId-unsupported-PPRILPPA.js +25 -0
- package/dist/getMachineId-unsupported-PPRILPPA.js.map +1 -0
- package/dist/getMachineId-win-IIF36LEJ.js +44 -0
- package/dist/getMachineId-win-IIF36LEJ.js.map +1 -0
- package/dist/index.js +112703 -0
- package/dist/index.js.map +1 -0
- package/dist/lib-R6DEEJCP.js +7623 -0
- package/dist/lib-R6DEEJCP.js.map +1 -0
- package/dist/pipeline-IAVVAKTU.js +120 -0
- package/dist/pipeline-IAVVAKTU.js.map +1 -0
- package/dist/query-NTP5NVXN.js +25 -0
- package/dist/query-NTP5NVXN.js.map +1 -0
- package/dist/routing-BAEPFB7V.js +390 -0
- package/dist/routing-BAEPFB7V.js.map +1 -0
- package/dist/stripe-lookup-charge-EPRUMZDL.js +56 -0
- package/dist/stripe-lookup-charge-EPRUMZDL.js.map +1 -0
- package/dist/stripe-payment-history-SJPKA63N.js +67 -0
- package/dist/stripe-payment-history-SJPKA63N.js.map +1 -0
- package/dist/stripe-subscription-status-L4Z65GB3.js +58 -0
- package/dist/stripe-subscription-status-L4Z65GB3.js.map +1 -0
- package/dist/stripe-verify-refund-FZDKCIUQ.js +54 -0
- package/dist/stripe-verify-refund-FZDKCIUQ.js.map +1 -0
- package/dist/support-memory-WSG7SDKG.js +10 -0
- package/dist/support-memory-WSG7SDKG.js.map +1 -0
- package/package.json +10 -7
- package/.env.encrypted +0 -0
- package/CHANGELOG.md +0 -35
- package/data/tt-archive-dataset.json +0 -1
- package/data/validate-test-dataset.json +0 -97
- package/docs/CLI-AUTH.md +0 -504
- package/preload.ts +0 -18
- package/src/__tests__/init.test.ts +0 -74
- package/src/alignment-test.ts +0 -64
- package/src/check-apps.ts +0 -16
- package/src/commands/auth/decrypt.ts +0 -123
- package/src/commands/auth/encrypt.ts +0 -81
- package/src/commands/auth/index.ts +0 -50
- package/src/commands/auth/keygen.ts +0 -41
- package/src/commands/auth/status.ts +0 -164
- package/src/commands/axiom/forensic.ts +0 -868
- package/src/commands/axiom/index.ts +0 -697
- package/src/commands/build-dataset.ts +0 -311
- package/src/commands/db-status.ts +0 -47
- package/src/commands/deploys.ts +0 -219
- package/src/commands/eval-local/compare.ts +0 -171
- package/src/commands/eval-local/health.ts +0 -212
- package/src/commands/eval-local/index.ts +0 -76
- package/src/commands/eval-local/real-tools.ts +0 -416
- package/src/commands/eval-local/run.ts +0 -1168
- package/src/commands/eval-local/score-production.ts +0 -256
- package/src/commands/eval-local/seed.ts +0 -276
- package/src/commands/eval-pipeline/index.ts +0 -53
- package/src/commands/eval-pipeline/real-tools.ts +0 -492
- package/src/commands/eval-pipeline/run.ts +0 -1316
- package/src/commands/eval-pipeline/seed.ts +0 -395
- package/src/commands/eval-prompt.ts +0 -496
- package/src/commands/eval.test.ts +0 -253
- package/src/commands/eval.ts +0 -108
- package/src/commands/faq-classify.ts +0 -460
- package/src/commands/faq-cluster.ts +0 -135
- package/src/commands/faq-extract.ts +0 -249
- package/src/commands/faq-mine.ts +0 -432
- package/src/commands/faq-review.ts +0 -426
- package/src/commands/front/index.ts +0 -351
- package/src/commands/front/pull-conversations.ts +0 -275
- package/src/commands/front/tags.ts +0 -825
- package/src/commands/front-cache.ts +0 -1277
- package/src/commands/front-stats.ts +0 -75
- package/src/commands/health.test.ts +0 -82
- package/src/commands/health.ts +0 -362
- package/src/commands/init.test.ts +0 -89
- package/src/commands/init.ts +0 -106
- package/src/commands/inngest/client.ts +0 -294
- package/src/commands/inngest/events.ts +0 -296
- package/src/commands/inngest/investigate.ts +0 -382
- package/src/commands/inngest/runs.ts +0 -149
- package/src/commands/inngest/signal.ts +0 -143
- package/src/commands/kb-sync.ts +0 -498
- package/src/commands/memory/find.ts +0 -135
- package/src/commands/memory/get.ts +0 -87
- package/src/commands/memory/index.ts +0 -97
- package/src/commands/memory/stats.ts +0 -163
- package/src/commands/memory/store.ts +0 -49
- package/src/commands/memory/vote.ts +0 -159
- package/src/commands/pipeline.ts +0 -127
- package/src/commands/responses.ts +0 -856
- package/src/commands/tools.ts +0 -293
- package/src/commands/wizard.ts +0 -319
- package/src/index.ts +0 -172
- package/src/lib/crypto.ts +0 -56
- package/src/lib/env-loader.ts +0 -206
- package/src/lib/onepassword.ts +0 -137
- package/src/test-agent-local.ts +0 -115
- package/tsconfig.json +0 -11
- package/vitest.config.ts +0 -10
|
@@ -1,1277 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Front → DuckDB Cache System
|
|
3
|
-
*
|
|
4
|
-
* Builds a durable local cache of ALL Front conversation data.
|
|
5
|
-
* This is a long-running import (4-6 hours for full import).
|
|
6
|
-
*
|
|
7
|
-
* Usage:
|
|
8
|
-
* bun src/index.ts front cache --init # Full import all inboxes
|
|
9
|
-
* bun src/index.ts front cache --sync # Incremental sync
|
|
10
|
-
* bun src/index.ts front cache --stats # Show cache stats
|
|
11
|
-
* bun src/index.ts front cache --resume # Resume interrupted import
|
|
12
|
-
*
|
|
13
|
-
* Issue: https://github.com/skillrecordings/support/issues/91
|
|
14
|
-
*/
|
|
15
|
-
|
|
16
|
-
import * as path from 'path'
|
|
17
|
-
import { createInstrumentedFrontClient } from '@skillrecordings/core/front/instrumented-client'
|
|
18
|
-
import { FrontApiError } from '@skillrecordings/front-sdk'
|
|
19
|
-
import type { Command } from 'commander'
|
|
20
|
-
|
|
21
|
-
// DuckDB types - dynamically imported at runtime
|
|
22
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
23
|
-
type DuckDB = typeof import('duckdb')
|
|
24
|
-
let duckdb: DuckDB | null = null
|
|
25
|
-
|
|
26
|
-
async function loadDuckDB(): Promise<DuckDB> {
|
|
27
|
-
if (duckdb) return duckdb
|
|
28
|
-
try {
|
|
29
|
-
duckdb = await import('duckdb')
|
|
30
|
-
return duckdb
|
|
31
|
-
} catch {
|
|
32
|
-
throw new Error(
|
|
33
|
-
'DuckDB is not installed. Run: bun add duckdb (native module, requires compilation)'
|
|
34
|
-
)
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
// ============================================================================
|
|
39
|
-
// Configuration
|
|
40
|
-
// ============================================================================
|
|
41
|
-
|
|
42
|
-
const DB_PATH = path.join(process.env.HOME || '~', 'skill/data/front-cache.db')
|
|
43
|
-
const REQUEST_DELAY_MS = 1000 // Base delay between sequential requests
|
|
44
|
-
const MAX_RETRIES = 5
|
|
45
|
-
|
|
46
|
-
// Rate limiting configuration - VERY CONSERVATIVE
|
|
47
|
-
// Front limit: 120 req/min, but we target ~40 req/min to be safe
|
|
48
|
-
const MAX_CONCURRENT_REQUESTS = 1 // Sequential to avoid burst rate limits
|
|
49
|
-
const MIN_REQUEST_INTERVAL_MS = 700 // ~85 rpm, safely under 100 rpm Pro limit
|
|
50
|
-
const MIN_429_BACKOFF_MS = 60000 // 60 second minimum backoff on rate limit
|
|
51
|
-
const REQUESTS_PER_MINUTE_LIMIT = 40 // Stay well under 120
|
|
52
|
-
|
|
53
|
-
// ============================================================================
|
|
54
|
-
// Rate Limiter - VERY Conservative Parallel Request Management
|
|
55
|
-
// ============================================================================
|
|
56
|
-
|
|
57
|
-
class RateLimiter {
|
|
58
|
-
private activeRequests = 0
|
|
59
|
-
private lastRequestTime = 0
|
|
60
|
-
private requestTimestamps: number[] = [] // Track requests in last minute
|
|
61
|
-
private readonly queue: Array<{
|
|
62
|
-
resolve: () => void
|
|
63
|
-
reject: (error: Error) => void
|
|
64
|
-
}> = []
|
|
65
|
-
|
|
66
|
-
constructor(
|
|
67
|
-
private maxConcurrent: number = MAX_CONCURRENT_REQUESTS,
|
|
68
|
-
private minInterval: number = MIN_REQUEST_INTERVAL_MS,
|
|
69
|
-
private maxPerMinute: number = REQUESTS_PER_MINUTE_LIMIT
|
|
70
|
-
) {}
|
|
71
|
-
|
|
72
|
-
/**
|
|
73
|
-
* Get current number of active requests (for logging)
|
|
74
|
-
*/
|
|
75
|
-
getActiveCount(): number {
|
|
76
|
-
return this.activeRequests
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
/**
|
|
80
|
-
* Get requests made in the last minute
|
|
81
|
-
*/
|
|
82
|
-
getRequestsLastMinute(): number {
|
|
83
|
-
const oneMinuteAgo = Date.now() - 60000
|
|
84
|
-
this.requestTimestamps = this.requestTimestamps.filter(
|
|
85
|
-
(t) => t > oneMinuteAgo
|
|
86
|
-
)
|
|
87
|
-
return this.requestTimestamps.length
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
/**
|
|
91
|
-
* Acquire a slot for making a request
|
|
92
|
-
*/
|
|
93
|
-
async acquire(): Promise<void> {
|
|
94
|
-
// Wait for available slot
|
|
95
|
-
while (this.activeRequests >= this.maxConcurrent) {
|
|
96
|
-
await new Promise<void>((resolve, reject) => {
|
|
97
|
-
this.queue.push({ resolve, reject })
|
|
98
|
-
})
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
// Check requests per minute limit
|
|
102
|
-
while (this.getRequestsLastMinute() >= this.maxPerMinute) {
|
|
103
|
-
const oldestInWindow = this.requestTimestamps[0]
|
|
104
|
-
if (oldestInWindow === undefined) break // No timestamps to wait on
|
|
105
|
-
const waitTime = oldestInWindow + 60000 - Date.now() + 100 // Wait until oldest expires + buffer
|
|
106
|
-
if (waitTime > 0) {
|
|
107
|
-
console.log(
|
|
108
|
-
`[${new Date().toISOString()}] ⏸️ Rate limit: ${this.getRequestsLastMinute()}/${this.maxPerMinute} req/min. Waiting ${(waitTime / 1000).toFixed(1)}s...`
|
|
109
|
-
)
|
|
110
|
-
await sleep(waitTime)
|
|
111
|
-
}
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
// Enforce minimum interval between requests
|
|
115
|
-
const now = Date.now()
|
|
116
|
-
const elapsed = now - this.lastRequestTime
|
|
117
|
-
if (elapsed < this.minInterval) {
|
|
118
|
-
await sleep(this.minInterval - elapsed)
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
this.activeRequests++
|
|
122
|
-
this.lastRequestTime = Date.now()
|
|
123
|
-
this.requestTimestamps.push(Date.now())
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
/**
|
|
127
|
-
* Release a slot after request completes
|
|
128
|
-
*/
|
|
129
|
-
release(): void {
|
|
130
|
-
this.activeRequests--
|
|
131
|
-
const next = this.queue.shift()
|
|
132
|
-
if (next) {
|
|
133
|
-
next.resolve()
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
/**
|
|
138
|
-
* Execute a function with rate limiting
|
|
139
|
-
*/
|
|
140
|
-
async execute<T>(fn: () => Promise<T>): Promise<T> {
|
|
141
|
-
await this.acquire()
|
|
142
|
-
try {
|
|
143
|
-
return await fn()
|
|
144
|
-
} finally {
|
|
145
|
-
this.release()
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
// Global rate limiter instance
|
|
151
|
-
const rateLimiter = new RateLimiter()
|
|
152
|
-
|
|
153
|
-
// ============================================================================
|
|
154
|
-
// Types
|
|
155
|
-
// ============================================================================
|
|
156
|
-
|
|
157
|
-
interface FrontInbox {
|
|
158
|
-
id: string
|
|
159
|
-
name: string
|
|
160
|
-
address?: string
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
interface FrontTag {
|
|
164
|
-
id: string
|
|
165
|
-
name: string
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
interface FrontConversation {
|
|
169
|
-
id: string
|
|
170
|
-
subject: string | null
|
|
171
|
-
status: string
|
|
172
|
-
created_at: number
|
|
173
|
-
last_message_at?: number
|
|
174
|
-
tags: FrontTag[]
|
|
175
|
-
recipient?: { handle: string; name?: string }
|
|
176
|
-
assignee?: { email: string }
|
|
177
|
-
_links?: {
|
|
178
|
-
related?: {
|
|
179
|
-
parent?: { url: string }
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
interface FrontMessage {
|
|
185
|
-
id: string
|
|
186
|
-
type: string
|
|
187
|
-
is_inbound: boolean
|
|
188
|
-
created_at: number
|
|
189
|
-
subject?: string
|
|
190
|
-
body?: string
|
|
191
|
-
text?: string
|
|
192
|
-
author?: { email?: string; name?: string }
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
interface CacheOptions {
|
|
196
|
-
init?: boolean
|
|
197
|
-
sync?: boolean
|
|
198
|
-
stats?: boolean
|
|
199
|
-
resume?: boolean
|
|
200
|
-
inbox?: string
|
|
201
|
-
limit?: number
|
|
202
|
-
json?: boolean
|
|
203
|
-
}
|
|
204
|
-
|
|
205
|
-
interface SyncState {
|
|
206
|
-
inbox_id: string
|
|
207
|
-
last_sync_at: string | null
|
|
208
|
-
last_conversation_at: string | null
|
|
209
|
-
total_synced: number
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
// ============================================================================
|
|
213
|
-
// Logging - TIMESTAMPS ON EVERY LINE
|
|
214
|
-
// ============================================================================
|
|
215
|
-
|
|
216
|
-
function timestamp(): string {
|
|
217
|
-
return new Date().toISOString()
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
function log(message: string, ...args: unknown[]): void {
|
|
221
|
-
console.log(`[${timestamp()}] ${message}`, ...args)
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
function logProgress(
|
|
225
|
-
inboxIndex: number,
|
|
226
|
-
totalInboxes: number,
|
|
227
|
-
inboxName: string,
|
|
228
|
-
page: number,
|
|
229
|
-
totalPages: string,
|
|
230
|
-
conversationCount: number
|
|
231
|
-
): void {
|
|
232
|
-
console.log(
|
|
233
|
-
`[${timestamp()}] [${inboxIndex}/${totalInboxes}] ${inboxName} - page ${page}/${totalPages} (${conversationCount} conversations)`
|
|
234
|
-
)
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
function logError(context: string, error: unknown): void {
|
|
238
|
-
const msg = error instanceof Error ? error.message : String(error)
|
|
239
|
-
console.error(`[${timestamp()}] ❌ ERROR in ${context}: ${msg}`)
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
function logRateLimit(waitMs: number, attempt: number): void {
|
|
243
|
-
console.log(
|
|
244
|
-
`[${timestamp()}] ⚠️⚠️⚠️ RATE LIMITED (429)! Waiting ${(waitMs / 1000).toFixed(1)}s before retry (attempt ${attempt}/${MAX_RETRIES}) ⚠️⚠️⚠️`
|
|
245
|
-
)
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
function logConcurrent(action: string, count: number): void {
|
|
249
|
-
const rpm = rateLimiter.getRequestsLastMinute()
|
|
250
|
-
console.log(
|
|
251
|
-
`[${timestamp()}] 🔄 ${action} [concurrent: ${count}/${MAX_CONCURRENT_REQUESTS}] [rpm: ${rpm}/${REQUESTS_PER_MINUTE_LIMIT}]`
|
|
252
|
-
)
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
function logThread(convId: string, parentId: string, depth: number): void {
|
|
256
|
-
console.log(
|
|
257
|
-
`[${timestamp()}] 🧵 Thread: ${convId} → parent: ${parentId} (depth: ${depth})`
|
|
258
|
-
)
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
// ============================================================================
|
|
262
|
-
// Database Helper
|
|
263
|
-
// ============================================================================
|
|
264
|
-
|
|
265
|
-
// Type alias for DuckDB database instance
|
|
266
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
267
|
-
type DatabaseInstance = any
|
|
268
|
-
|
|
269
|
-
async function createDb(): Promise<DatabaseInstance> {
|
|
270
|
-
const duck = await loadDuckDB()
|
|
271
|
-
return new duck.Database(DB_PATH)
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
function runQuery(
|
|
275
|
-
db: DatabaseInstance,
|
|
276
|
-
sql: string,
|
|
277
|
-
params: unknown[] = []
|
|
278
|
-
): Promise<void> {
|
|
279
|
-
return new Promise((resolve, reject) => {
|
|
280
|
-
db.run(sql, ...params, (err: Error | null) => {
|
|
281
|
-
if (err) reject(err)
|
|
282
|
-
else resolve()
|
|
283
|
-
})
|
|
284
|
-
})
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
function allQuery<T>(
|
|
288
|
-
db: DatabaseInstance,
|
|
289
|
-
sql: string,
|
|
290
|
-
params: unknown[] = []
|
|
291
|
-
): Promise<T[]> {
|
|
292
|
-
return new Promise((resolve, reject) => {
|
|
293
|
-
db.all(sql, ...params, (err: Error | null, rows: T[]) => {
|
|
294
|
-
if (err) reject(err)
|
|
295
|
-
else resolve(rows as T[])
|
|
296
|
-
})
|
|
297
|
-
})
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
/**
|
|
301
|
-
* Run schema migrations for thread tracking
|
|
302
|
-
* Adds parent_id and thread_depth columns if they don't exist
|
|
303
|
-
*/
|
|
304
|
-
async function runMigrations(db: DatabaseInstance): Promise<void> {
|
|
305
|
-
log('🔧 Running schema migrations...')
|
|
306
|
-
|
|
307
|
-
// Check if parent_id column exists
|
|
308
|
-
const columns = await allQuery<{ column_name: string }>(
|
|
309
|
-
db,
|
|
310
|
-
`SELECT column_name FROM information_schema.columns
|
|
311
|
-
WHERE table_name = 'conversations' AND column_name = 'parent_id'`
|
|
312
|
-
)
|
|
313
|
-
|
|
314
|
-
if (columns.length === 0) {
|
|
315
|
-
log(' Adding parent_id column...')
|
|
316
|
-
await runQuery(db, `ALTER TABLE conversations ADD COLUMN parent_id VARCHAR`)
|
|
317
|
-
|
|
318
|
-
log(' Adding thread_depth column...')
|
|
319
|
-
await runQuery(
|
|
320
|
-
db,
|
|
321
|
-
`ALTER TABLE conversations ADD COLUMN thread_depth INTEGER DEFAULT 0`
|
|
322
|
-
)
|
|
323
|
-
|
|
324
|
-
log(' Creating index on parent_id...')
|
|
325
|
-
await runQuery(
|
|
326
|
-
db,
|
|
327
|
-
`CREATE INDEX IF NOT EXISTS idx_conv_parent ON conversations(parent_id)`
|
|
328
|
-
)
|
|
329
|
-
|
|
330
|
-
log(' ✅ Thread tracking columns added')
|
|
331
|
-
} else {
|
|
332
|
-
log(' ✓ Thread tracking columns already exist')
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
|
-
|
|
336
|
-
/**
|
|
337
|
-
* Extract conversation ID from Front API URL
|
|
338
|
-
* e.g., "https://api2.frontapp.com/conversations/cnv_abc123" → "cnv_abc123"
|
|
339
|
-
*/
|
|
340
|
-
function extractConversationIdFromUrl(url: string): string | null {
|
|
341
|
-
const match = url.match(/\/conversations\/(cnv_[a-zA-Z0-9]+)/)
|
|
342
|
-
return match?.[1] ?? null
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
// ============================================================================
|
|
346
|
-
// API Client with Rate Limiting
|
|
347
|
-
// ============================================================================
|
|
348
|
-
|
|
349
|
-
async function fetchWithRetry<T>(
|
|
350
|
-
front: ReturnType<typeof createInstrumentedFrontClient>,
|
|
351
|
-
url: string,
|
|
352
|
-
attempt = 1,
|
|
353
|
-
useRateLimiter = true
|
|
354
|
-
): Promise<T> {
|
|
355
|
-
const doFetch = async (): Promise<T> => {
|
|
356
|
-
try {
|
|
357
|
-
return await front.raw.get<T>(url)
|
|
358
|
-
} catch (error) {
|
|
359
|
-
if (error instanceof FrontApiError && error.status === 429) {
|
|
360
|
-
if (attempt >= MAX_RETRIES) {
|
|
361
|
-
throw new Error(`Rate limited after ${MAX_RETRIES} attempts`)
|
|
362
|
-
}
|
|
363
|
-
|
|
364
|
-
const exponentialMs = MIN_429_BACKOFF_MS * Math.pow(1.5, attempt - 1) // 60s, 90s, 135s...
|
|
365
|
-
const waitMs = Math.max(MIN_429_BACKOFF_MS, exponentialMs)
|
|
366
|
-
|
|
367
|
-
logRateLimit(waitMs, attempt)
|
|
368
|
-
await sleep(waitMs)
|
|
369
|
-
// Recursive call outside rate limiter for retry
|
|
370
|
-
return fetchWithRetry(front, url, attempt + 1, false)
|
|
371
|
-
}
|
|
372
|
-
|
|
373
|
-
throw error
|
|
374
|
-
}
|
|
375
|
-
}
|
|
376
|
-
|
|
377
|
-
if (useRateLimiter) {
|
|
378
|
-
return rateLimiter.execute(doFetch)
|
|
379
|
-
}
|
|
380
|
-
return doFetch()
|
|
381
|
-
}
|
|
382
|
-
|
|
383
|
-
function sleep(ms: number): Promise<void> {
|
|
384
|
-
return new Promise((resolve) => setTimeout(resolve, ms))
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
function getFrontClient(): ReturnType<typeof createInstrumentedFrontClient> {
|
|
388
|
-
const apiToken = process.env.FRONT_API_TOKEN
|
|
389
|
-
if (!apiToken) {
|
|
390
|
-
throw new Error('FRONT_API_TOKEN environment variable required')
|
|
391
|
-
}
|
|
392
|
-
return createInstrumentedFrontClient({ apiToken })
|
|
393
|
-
}
|
|
394
|
-
|
|
395
|
-
// ============================================================================
|
|
396
|
-
// Core Sync Functions
|
|
397
|
-
// ============================================================================
|
|
398
|
-
|
|
399
|
-
async function fetchAllInboxes(
|
|
400
|
-
front: ReturnType<typeof createInstrumentedFrontClient>
|
|
401
|
-
): Promise<FrontInbox[]> {
|
|
402
|
-
log('📥 Fetching inboxes...')
|
|
403
|
-
const data = await fetchWithRetry<{ _results: FrontInbox[] }>(
|
|
404
|
-
front,
|
|
405
|
-
'/inboxes'
|
|
406
|
-
)
|
|
407
|
-
const inboxes = data._results || []
|
|
408
|
-
log(` Found ${inboxes.length} inboxes`)
|
|
409
|
-
return inboxes
|
|
410
|
-
}
|
|
411
|
-
|
|
412
|
-
async function syncInboxes(
|
|
413
|
-
db: DatabaseInstance,
|
|
414
|
-
inboxes: FrontInbox[]
|
|
415
|
-
): Promise<void> {
|
|
416
|
-
log('💾 Syncing inboxes to database...')
|
|
417
|
-
for (const inbox of inboxes) {
|
|
418
|
-
await runQuery(
|
|
419
|
-
db,
|
|
420
|
-
`INSERT INTO inboxes (id, name, conversation_count)
|
|
421
|
-
VALUES (?, ?, 0)
|
|
422
|
-
ON CONFLICT (id) DO UPDATE SET name = excluded.name`,
|
|
423
|
-
[inbox.id, inbox.name]
|
|
424
|
-
)
|
|
425
|
-
}
|
|
426
|
-
log(` Synced ${inboxes.length} inboxes`)
|
|
427
|
-
}
|
|
428
|
-
|
|
429
|
-
async function fetchConversationsPage(
|
|
430
|
-
url: string,
|
|
431
|
-
front: ReturnType<typeof createInstrumentedFrontClient>
|
|
432
|
-
): Promise<{
|
|
433
|
-
conversations: FrontConversation[]
|
|
434
|
-
nextUrl: string | null
|
|
435
|
-
}> {
|
|
436
|
-
const data = await fetchWithRetry<{
|
|
437
|
-
_results: FrontConversation[]
|
|
438
|
-
_pagination?: { next?: string }
|
|
439
|
-
}>(front, url)
|
|
440
|
-
|
|
441
|
-
return {
|
|
442
|
-
conversations: data._results || [],
|
|
443
|
-
nextUrl: data._pagination?.next || null,
|
|
444
|
-
}
|
|
445
|
-
}
|
|
446
|
-
|
|
447
|
-
async function fetchMessages(
|
|
448
|
-
conversationId: string,
|
|
449
|
-
front: ReturnType<typeof createInstrumentedFrontClient>
|
|
450
|
-
): Promise<FrontMessage[]> {
|
|
451
|
-
const data = await fetchWithRetry<{ _results: FrontMessage[] }>(
|
|
452
|
-
front,
|
|
453
|
-
`/conversations/${conversationId}/messages`
|
|
454
|
-
)
|
|
455
|
-
return data._results || []
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
function stripHtml(html: string): string {
|
|
459
|
-
return html
|
|
460
|
-
.replace(/<[^>]*>/g, ' ')
|
|
461
|
-
.replace(/\s+/g, ' ')
|
|
462
|
-
.trim()
|
|
463
|
-
}
|
|
464
|
-
|
|
465
|
-
async function insertConversation(
|
|
466
|
-
db: DatabaseInstance,
|
|
467
|
-
conv: FrontConversation,
|
|
468
|
-
inboxId: string
|
|
469
|
-
): Promise<{ parentId: string | null; threadDepth: number }> {
|
|
470
|
-
const tags = conv.tags?.map((t) => t.name) || []
|
|
471
|
-
const tagsJson = JSON.stringify(tags)
|
|
472
|
-
const now = new Date().toISOString()
|
|
473
|
-
const createdAt = new Date(conv.created_at * 1000).toISOString()
|
|
474
|
-
const lastMessageAt = new Date(
|
|
475
|
-
(conv.last_message_at || conv.created_at) * 1000
|
|
476
|
-
).toISOString()
|
|
477
|
-
|
|
478
|
-
// Extract parent conversation ID from _links if present
|
|
479
|
-
let parentId: string | null = null
|
|
480
|
-
let threadDepth = 0
|
|
481
|
-
|
|
482
|
-
if (conv._links?.related?.parent?.url) {
|
|
483
|
-
parentId = extractConversationIdFromUrl(conv._links.related.parent.url)
|
|
484
|
-
if (parentId) {
|
|
485
|
-
// Try to get parent's thread depth to calculate this conversation's depth
|
|
486
|
-
const parentRows = await allQuery<{ thread_depth: number }>(
|
|
487
|
-
db,
|
|
488
|
-
`SELECT thread_depth FROM conversations WHERE id = ?`,
|
|
489
|
-
[parentId]
|
|
490
|
-
)
|
|
491
|
-
threadDepth = (parentRows[0]?.thread_depth ?? 0) + 1
|
|
492
|
-
logThread(conv.id, parentId, threadDepth)
|
|
493
|
-
}
|
|
494
|
-
}
|
|
495
|
-
|
|
496
|
-
await runQuery(
|
|
497
|
-
db,
|
|
498
|
-
`INSERT INTO conversations
|
|
499
|
-
(id, inbox_id, subject, status, customer_email, customer_name, tags,
|
|
500
|
-
assignee_email, created_at, last_message_at, synced_at, parent_id, thread_depth)
|
|
501
|
-
VALUES (?, ?, ?, ?, ?, ?, ?::VARCHAR[], ?,
|
|
502
|
-
?::TIMESTAMP, ?::TIMESTAMP, ?::TIMESTAMP, ?, ?)
|
|
503
|
-
ON CONFLICT (id) DO UPDATE SET
|
|
504
|
-
status = excluded.status,
|
|
505
|
-
tags = excluded.tags,
|
|
506
|
-
assignee_email = excluded.assignee_email,
|
|
507
|
-
last_message_at = excluded.last_message_at,
|
|
508
|
-
synced_at = excluded.synced_at,
|
|
509
|
-
parent_id = excluded.parent_id,
|
|
510
|
-
thread_depth = excluded.thread_depth`,
|
|
511
|
-
[
|
|
512
|
-
conv.id,
|
|
513
|
-
inboxId,
|
|
514
|
-
conv.subject || null,
|
|
515
|
-
conv.status,
|
|
516
|
-
conv.recipient?.handle || null,
|
|
517
|
-
conv.recipient?.name || null,
|
|
518
|
-
tagsJson,
|
|
519
|
-
conv.assignee?.email || null,
|
|
520
|
-
createdAt,
|
|
521
|
-
lastMessageAt,
|
|
522
|
-
now,
|
|
523
|
-
parentId,
|
|
524
|
-
threadDepth,
|
|
525
|
-
]
|
|
526
|
-
)
|
|
527
|
-
|
|
528
|
-
return { parentId, threadDepth }
|
|
529
|
-
}
|
|
530
|
-
|
|
531
|
-
async function insertMessage(
|
|
532
|
-
db: DatabaseInstance,
|
|
533
|
-
msg: FrontMessage,
|
|
534
|
-
conversationId: string
|
|
535
|
-
): Promise<void> {
|
|
536
|
-
const bodyText = msg.text || (msg.body ? stripHtml(msg.body) : null)
|
|
537
|
-
const createdAt = new Date(msg.created_at * 1000).toISOString()
|
|
538
|
-
|
|
539
|
-
await runQuery(
|
|
540
|
-
db,
|
|
541
|
-
`INSERT INTO messages
|
|
542
|
-
(id, conversation_id, is_inbound, author_email, author_name,
|
|
543
|
-
body_text, body_html, created_at)
|
|
544
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?::TIMESTAMP)
|
|
545
|
-
ON CONFLICT (id) DO NOTHING`,
|
|
546
|
-
[
|
|
547
|
-
msg.id,
|
|
548
|
-
conversationId,
|
|
549
|
-
msg.is_inbound,
|
|
550
|
-
msg.author?.email || null,
|
|
551
|
-
msg.author?.name || null,
|
|
552
|
-
bodyText,
|
|
553
|
-
msg.body || null,
|
|
554
|
-
createdAt,
|
|
555
|
-
]
|
|
556
|
-
)
|
|
557
|
-
}
|
|
558
|
-
|
|
559
|
-
async function updateSyncState(
|
|
560
|
-
db: DatabaseInstance,
|
|
561
|
-
inboxId: string,
|
|
562
|
-
totalSynced: number,
|
|
563
|
-
lastConversationAt?: number
|
|
564
|
-
): Promise<void> {
|
|
565
|
-
const now = new Date().toISOString()
|
|
566
|
-
const lastConvTs = lastConversationAt
|
|
567
|
-
? new Date(lastConversationAt * 1000).toISOString()
|
|
568
|
-
: null
|
|
569
|
-
|
|
570
|
-
await runQuery(
|
|
571
|
-
db,
|
|
572
|
-
`INSERT INTO sync_state (inbox_id, last_sync_at, last_conversation_at, total_synced)
|
|
573
|
-
VALUES (?, ?, ?, ?)
|
|
574
|
-
ON CONFLICT (inbox_id) DO UPDATE SET
|
|
575
|
-
last_sync_at = excluded.last_sync_at,
|
|
576
|
-
last_conversation_at = COALESCE(excluded.last_conversation_at, sync_state.last_conversation_at),
|
|
577
|
-
total_synced = excluded.total_synced`,
|
|
578
|
-
[inboxId, now, lastConvTs, totalSynced]
|
|
579
|
-
)
|
|
580
|
-
}
|
|
581
|
-
|
|
582
|
-
async function getSyncState(
|
|
583
|
-
db: DatabaseInstance,
|
|
584
|
-
inboxId: string
|
|
585
|
-
): Promise<SyncState | null> {
|
|
586
|
-
const rows = await allQuery<SyncState>(
|
|
587
|
-
db,
|
|
588
|
-
`SELECT inbox_id,
|
|
589
|
-
last_sync_at::VARCHAR as last_sync_at,
|
|
590
|
-
last_conversation_at::VARCHAR as last_conversation_at,
|
|
591
|
-
total_synced
|
|
592
|
-
FROM sync_state WHERE inbox_id = ?`,
|
|
593
|
-
[inboxId]
|
|
594
|
-
)
|
|
595
|
-
return rows[0] || null
|
|
596
|
-
}
|
|
597
|
-
|
|
598
|
-
async function updateInboxCount(
|
|
599
|
-
db: DatabaseInstance,
|
|
600
|
-
inboxId: string
|
|
601
|
-
): Promise<void> {
|
|
602
|
-
const now = new Date().toISOString()
|
|
603
|
-
await runQuery(
|
|
604
|
-
db,
|
|
605
|
-
`UPDATE inboxes
|
|
606
|
-
SET conversation_count = (SELECT COUNT(*) FROM conversations WHERE inbox_id = ?),
|
|
607
|
-
last_sync_at = ?::TIMESTAMP
|
|
608
|
-
WHERE id = ?`,
|
|
609
|
-
[inboxId, now, inboxId]
|
|
610
|
-
)
|
|
611
|
-
}
|
|
612
|
-
|
|
613
|
-
// ============================================================================
|
|
614
|
-
// Main Sync Logic
|
|
615
|
-
// ============================================================================
|
|
616
|
-
|
|
617
|
-
async function syncInbox(
|
|
618
|
-
db: DatabaseInstance,
|
|
619
|
-
inbox: FrontInbox,
|
|
620
|
-
front: ReturnType<typeof createInstrumentedFrontClient>,
|
|
621
|
-
inboxIndex: number,
|
|
622
|
-
totalInboxes: number,
|
|
623
|
-
limit?: number,
|
|
624
|
-
resumeFromConversation?: string
|
|
625
|
-
): Promise<{
|
|
626
|
-
conversationCount: number
|
|
627
|
-
messageCount: number
|
|
628
|
-
threadCount: number
|
|
629
|
-
}> {
|
|
630
|
-
log(
|
|
631
|
-
`\n📬 [${inboxIndex}/${totalInboxes}] Starting sync: ${inbox.name} (${inbox.id})`
|
|
632
|
-
)
|
|
633
|
-
|
|
634
|
-
let url: string | null = `/inboxes/${inbox.id}/conversations?limit=50`
|
|
635
|
-
let page = 1
|
|
636
|
-
let totalConversations = 0
|
|
637
|
-
let totalMessages = 0
|
|
638
|
-
let totalThreads = 0
|
|
639
|
-
let latestConversationAt: number | undefined
|
|
640
|
-
let skipUntilFound = !!resumeFromConversation
|
|
641
|
-
let foundResumePoint = false
|
|
642
|
-
|
|
643
|
-
while (url) {
|
|
644
|
-
await sleep(REQUEST_DELAY_MS)
|
|
645
|
-
|
|
646
|
-
try {
|
|
647
|
-
const { conversations, nextUrl } = await fetchConversationsPage(
|
|
648
|
-
url,
|
|
649
|
-
front
|
|
650
|
-
)
|
|
651
|
-
|
|
652
|
-
logProgress(
|
|
653
|
-
inboxIndex,
|
|
654
|
-
totalInboxes,
|
|
655
|
-
inbox.name,
|
|
656
|
-
page,
|
|
657
|
-
nextUrl ? '?' : `${page}`,
|
|
658
|
-
totalConversations
|
|
659
|
-
)
|
|
660
|
-
|
|
661
|
-
// Filter conversations based on limit and resume point
|
|
662
|
-
const toProcess: FrontConversation[] = []
|
|
663
|
-
|
|
664
|
-
for (const conv of conversations) {
|
|
665
|
-
// Check limit FIRST before processing
|
|
666
|
-
if (limit && totalConversations + toProcess.length >= limit) {
|
|
667
|
-
log(` ⏹ Reached limit of ${limit} conversations`)
|
|
668
|
-
url = null
|
|
669
|
-
break
|
|
670
|
-
}
|
|
671
|
-
|
|
672
|
-
// Handle resume: skip until we find the resume point
|
|
673
|
-
if (skipUntilFound) {
|
|
674
|
-
if (conv.id === resumeFromConversation) {
|
|
675
|
-
foundResumePoint = true
|
|
676
|
-
skipUntilFound = false
|
|
677
|
-
log(` ✓ Found resume point: ${conv.id}`)
|
|
678
|
-
}
|
|
679
|
-
continue
|
|
680
|
-
}
|
|
681
|
-
|
|
682
|
-
toProcess.push(conv)
|
|
683
|
-
}
|
|
684
|
-
|
|
685
|
-
// Process conversations: insert first, then parallel message fetch
|
|
686
|
-
const conversationsWithMeta: Array<{
|
|
687
|
-
conv: FrontConversation
|
|
688
|
-
hasThread: boolean
|
|
689
|
-
}> = []
|
|
690
|
-
|
|
691
|
-
for (const conv of toProcess) {
|
|
692
|
-
// Track latest conversation timestamp
|
|
693
|
-
if (!latestConversationAt || conv.created_at > latestConversationAt) {
|
|
694
|
-
latestConversationAt = conv.created_at
|
|
695
|
-
}
|
|
696
|
-
|
|
697
|
-
// Insert conversation (sync - needs parent lookup)
|
|
698
|
-
const { parentId } = await insertConversation(db, conv, inbox.id)
|
|
699
|
-
conversationsWithMeta.push({ conv, hasThread: !!parentId })
|
|
700
|
-
totalConversations++
|
|
701
|
-
if (parentId) totalThreads++
|
|
702
|
-
}
|
|
703
|
-
|
|
704
|
-
// Parallel message fetching with rate limiting
|
|
705
|
-
if (conversationsWithMeta.length > 0) {
|
|
706
|
-
logConcurrent(
|
|
707
|
-
`Fetching messages for ${conversationsWithMeta.length} conversations`,
|
|
708
|
-
rateLimiter.getActiveCount()
|
|
709
|
-
)
|
|
710
|
-
|
|
711
|
-
const messageResults = await Promise.all(
|
|
712
|
-
conversationsWithMeta.map(({ conv }) =>
|
|
713
|
-
fetchMessages(conv.id, front)
|
|
714
|
-
.then((messages) => ({ convId: conv.id, messages, error: null }))
|
|
715
|
-
.catch((err) => ({
|
|
716
|
-
convId: conv.id,
|
|
717
|
-
messages: [] as FrontMessage[],
|
|
718
|
-
error: err,
|
|
719
|
-
}))
|
|
720
|
-
)
|
|
721
|
-
)
|
|
722
|
-
|
|
723
|
-
// Insert messages (sequential DB writes to avoid conflicts)
|
|
724
|
-
for (const result of messageResults) {
|
|
725
|
-
if (result.error) {
|
|
726
|
-
logError(`fetching messages for ${result.convId}`, result.error)
|
|
727
|
-
continue
|
|
728
|
-
}
|
|
729
|
-
for (const msg of result.messages) {
|
|
730
|
-
await insertMessage(db, msg, result.convId)
|
|
731
|
-
totalMessages++
|
|
732
|
-
}
|
|
733
|
-
}
|
|
734
|
-
|
|
735
|
-
logConcurrent(`Completed page ${page}`, rateLimiter.getActiveCount())
|
|
736
|
-
}
|
|
737
|
-
|
|
738
|
-
// Break out of while loop if limit reached
|
|
739
|
-
if (limit && totalConversations >= limit) {
|
|
740
|
-
break
|
|
741
|
-
}
|
|
742
|
-
|
|
743
|
-
// Update sync state periodically (every page)
|
|
744
|
-
await updateSyncState(
|
|
745
|
-
db,
|
|
746
|
-
inbox.id,
|
|
747
|
-
totalConversations,
|
|
748
|
-
latestConversationAt
|
|
749
|
-
)
|
|
750
|
-
|
|
751
|
-
url = nextUrl
|
|
752
|
-
page++
|
|
753
|
-
} catch (err) {
|
|
754
|
-
logError(`page ${page} of ${inbox.name}`, err)
|
|
755
|
-
// Save progress and continue
|
|
756
|
-
await updateSyncState(
|
|
757
|
-
db,
|
|
758
|
-
inbox.id,
|
|
759
|
-
totalConversations,
|
|
760
|
-
latestConversationAt
|
|
761
|
-
)
|
|
762
|
-
break
|
|
763
|
-
}
|
|
764
|
-
}
|
|
765
|
-
|
|
766
|
-
// Final sync state update
|
|
767
|
-
await updateSyncState(db, inbox.id, totalConversations, latestConversationAt)
|
|
768
|
-
await updateInboxCount(db, inbox.id)
|
|
769
|
-
|
|
770
|
-
if (resumeFromConversation && !foundResumePoint) {
|
|
771
|
-
log(
|
|
772
|
-
` ⚠️ Resume point ${resumeFromConversation} not found - may have completed`
|
|
773
|
-
)
|
|
774
|
-
}
|
|
775
|
-
|
|
776
|
-
log(
|
|
777
|
-
` ✅ ${inbox.name} complete: ${totalConversations} conversations, ${totalMessages} messages, ${totalThreads} threads`
|
|
778
|
-
)
|
|
779
|
-
|
|
780
|
-
return {
|
|
781
|
-
conversationCount: totalConversations,
|
|
782
|
-
messageCount: totalMessages,
|
|
783
|
-
threadCount: totalThreads,
|
|
784
|
-
}
|
|
785
|
-
}
|
|
786
|
-
|
|
787
|
-
// ============================================================================
|
|
788
|
-
// Command Handlers
|
|
789
|
-
// ============================================================================
|
|
790
|
-
|
|
791
|
-
async function handleInit(options: CacheOptions): Promise<void> {
|
|
792
|
-
const startTime = Date.now()
|
|
793
|
-
log('🚀 Starting FULL IMPORT of Front conversations')
|
|
794
|
-
log(` Database: ${DB_PATH}`)
|
|
795
|
-
log(` Limit per inbox: ${options.limit || 'unlimited'}`)
|
|
796
|
-
|
|
797
|
-
const db = await createDb()
|
|
798
|
-
const front = getFrontClient()
|
|
799
|
-
|
|
800
|
-
try {
|
|
801
|
-
// Run schema migrations for thread tracking
|
|
802
|
-
await runMigrations(db)
|
|
803
|
-
|
|
804
|
-
// Get inboxes
|
|
805
|
-
let inboxes = await fetchAllInboxes(front)
|
|
806
|
-
|
|
807
|
-
// Filter by inbox if specified
|
|
808
|
-
if (options.inbox) {
|
|
809
|
-
inboxes = inboxes.filter((i) => i.id === options.inbox)
|
|
810
|
-
if (inboxes.length === 0) {
|
|
811
|
-
throw new Error(`Inbox ${options.inbox} not found`)
|
|
812
|
-
}
|
|
813
|
-
log(` Filtering to inbox: ${options.inbox}`)
|
|
814
|
-
}
|
|
815
|
-
|
|
816
|
-
// Sync inboxes table
|
|
817
|
-
await syncInboxes(db, inboxes)
|
|
818
|
-
|
|
819
|
-
// Sync each inbox
|
|
820
|
-
let totalConversations = 0
|
|
821
|
-
let totalMessages = 0
|
|
822
|
-
let totalThreads = 0
|
|
823
|
-
|
|
824
|
-
for (const [i, inbox] of inboxes.entries()) {
|
|
825
|
-
const result = await syncInbox(
|
|
826
|
-
db,
|
|
827
|
-
inbox,
|
|
828
|
-
front,
|
|
829
|
-
i + 1,
|
|
830
|
-
inboxes.length,
|
|
831
|
-
options.limit
|
|
832
|
-
)
|
|
833
|
-
totalConversations += result.conversationCount
|
|
834
|
-
totalMessages += result.messageCount
|
|
835
|
-
totalThreads += result.threadCount
|
|
836
|
-
}
|
|
837
|
-
|
|
838
|
-
// Final summary
|
|
839
|
-
const duration = ((Date.now() - startTime) / 1000 / 60).toFixed(1)
|
|
840
|
-
log('\n' + '='.repeat(60))
|
|
841
|
-
log('📊 IMPORT COMPLETE')
|
|
842
|
-
log('='.repeat(60))
|
|
843
|
-
log(` Duration: ${duration} minutes`)
|
|
844
|
-
log(` Inboxes: ${inboxes.length}`)
|
|
845
|
-
log(` Conversations: ${totalConversations}`)
|
|
846
|
-
log(` Messages: ${totalMessages}`)
|
|
847
|
-
log(` Threads: ${totalThreads}`)
|
|
848
|
-
|
|
849
|
-
// Get DB size
|
|
850
|
-
const dbStats = await allQuery<{ database_size: string }>(
|
|
851
|
-
db,
|
|
852
|
-
`SELECT database_size FROM pragma_database_size()`
|
|
853
|
-
)
|
|
854
|
-
log(` DB Size: ${dbStats[0]?.database_size || 'unknown'}`)
|
|
855
|
-
log('='.repeat(60))
|
|
856
|
-
} finally {
|
|
857
|
-
db.close?.() || db.terminate?.() || true
|
|
858
|
-
}
|
|
859
|
-
}
|
|
860
|
-
|
|
861
|
-
async function handleResume(options: CacheOptions): Promise<void> {
|
|
862
|
-
const startTime = Date.now()
|
|
863
|
-
log('🔄 Resuming interrupted import')
|
|
864
|
-
log(` Database: ${DB_PATH}`)
|
|
865
|
-
|
|
866
|
-
const db = await createDb()
|
|
867
|
-
const front = getFrontClient()
|
|
868
|
-
|
|
869
|
-
try {
|
|
870
|
-
// Run schema migrations for thread tracking
|
|
871
|
-
await runMigrations(db)
|
|
872
|
-
|
|
873
|
-
// Get inboxes that have incomplete sync (or no sync)
|
|
874
|
-
const inboxes = await fetchAllInboxes(front)
|
|
875
|
-
await syncInboxes(db, inboxes)
|
|
876
|
-
|
|
877
|
-
let totalConversations = 0
|
|
878
|
-
let totalMessages = 0
|
|
879
|
-
let totalThreads = 0
|
|
880
|
-
|
|
881
|
-
for (const [i, inbox] of inboxes.entries()) {
|
|
882
|
-
// Check sync state
|
|
883
|
-
const state = await getSyncState(db, inbox.id)
|
|
884
|
-
|
|
885
|
-
if (options.inbox && inbox.id !== options.inbox) {
|
|
886
|
-
continue
|
|
887
|
-
}
|
|
888
|
-
|
|
889
|
-
// Get last conversation ID to resume from
|
|
890
|
-
const lastConv = await allQuery<{ id: string }>(
|
|
891
|
-
db,
|
|
892
|
-
`SELECT id FROM conversations WHERE inbox_id = ? ORDER BY synced_at DESC LIMIT 1`,
|
|
893
|
-
[inbox.id]
|
|
894
|
-
)
|
|
895
|
-
|
|
896
|
-
const resumeFrom = lastConv[0]?.id
|
|
897
|
-
if (resumeFrom) {
|
|
898
|
-
log(
|
|
899
|
-
` Resuming ${inbox.name} from ${resumeFrom} (${state?.total_synced || 0} already synced)`
|
|
900
|
-
)
|
|
901
|
-
}
|
|
902
|
-
|
|
903
|
-
const result = await syncInbox(
|
|
904
|
-
db,
|
|
905
|
-
inbox,
|
|
906
|
-
front,
|
|
907
|
-
i + 1,
|
|
908
|
-
inboxes.length,
|
|
909
|
-
options.limit,
|
|
910
|
-
resumeFrom
|
|
911
|
-
)
|
|
912
|
-
totalConversations += result.conversationCount
|
|
913
|
-
totalMessages += result.messageCount
|
|
914
|
-
totalThreads += result.threadCount
|
|
915
|
-
}
|
|
916
|
-
|
|
917
|
-
const duration = ((Date.now() - startTime) / 1000 / 60).toFixed(1)
|
|
918
|
-
log('\n📊 RESUME COMPLETE')
|
|
919
|
-
log(` Duration: ${duration} minutes`)
|
|
920
|
-
log(` Conversations: ${totalConversations}`)
|
|
921
|
-
log(` Threads: ${totalThreads}`)
|
|
922
|
-
log(` Messages: ${totalMessages}`)
|
|
923
|
-
} finally {
|
|
924
|
-
db.close?.() || db.terminate?.() || true
|
|
925
|
-
}
|
|
926
|
-
}
|
|
927
|
-
|
|
928
|
-
async function handleSync(options: CacheOptions): Promise<void> {
|
|
929
|
-
const startTime = Date.now()
|
|
930
|
-
log('🔄 Starting INCREMENTAL SYNC')
|
|
931
|
-
log(` Database: ${DB_PATH}`)
|
|
932
|
-
log(' Fetching only conversations updated since last sync')
|
|
933
|
-
|
|
934
|
-
const db = await createDb()
|
|
935
|
-
const front = getFrontClient()
|
|
936
|
-
|
|
937
|
-
try {
|
|
938
|
-
// Run schema migrations for thread tracking
|
|
939
|
-
await runMigrations(db)
|
|
940
|
-
|
|
941
|
-
let inboxes = await fetchAllInboxes(front)
|
|
942
|
-
|
|
943
|
-
if (options.inbox) {
|
|
944
|
-
inboxes = inboxes.filter((i) => i.id === options.inbox)
|
|
945
|
-
}
|
|
946
|
-
|
|
947
|
-
await syncInboxes(db, inboxes)
|
|
948
|
-
|
|
949
|
-
let totalConversations = 0
|
|
950
|
-
let totalMessages = 0
|
|
951
|
-
let totalThreads = 0
|
|
952
|
-
|
|
953
|
-
for (const [i, inbox] of inboxes.entries()) {
|
|
954
|
-
const state = await getSyncState(db, inbox.id)
|
|
955
|
-
|
|
956
|
-
log(`\n📬 [${i + 1}/${inboxes.length}] ${inbox.name}`)
|
|
957
|
-
log(` Last sync: ${state?.last_sync_at || 'never'}`)
|
|
958
|
-
|
|
959
|
-
// For incremental sync, we fetch recent conversations and check for updates
|
|
960
|
-
// Front API doesn't support filtering by updated_at, so we fetch pages
|
|
961
|
-
// and stop when we hit conversations older than last sync
|
|
962
|
-
|
|
963
|
-
let url: string | null = `/inboxes/${inbox.id}/conversations?limit=50`
|
|
964
|
-
let page = 1
|
|
965
|
-
let conversationCount = 0
|
|
966
|
-
let messageCount = 0
|
|
967
|
-
let threadCount = 0
|
|
968
|
-
let shouldContinue = true
|
|
969
|
-
|
|
970
|
-
while (url && shouldContinue) {
|
|
971
|
-
await sleep(REQUEST_DELAY_MS)
|
|
972
|
-
|
|
973
|
-
const { conversations, nextUrl } = await fetchConversationsPage(
|
|
974
|
-
url,
|
|
975
|
-
front
|
|
976
|
-
)
|
|
977
|
-
|
|
978
|
-
// Filter conversations that need processing
|
|
979
|
-
const toProcess: FrontConversation[] = []
|
|
980
|
-
|
|
981
|
-
for (const conv of conversations) {
|
|
982
|
-
// Check if conversation was updated since last sync
|
|
983
|
-
const lastSync = state?.last_sync_at
|
|
984
|
-
? new Date(state.last_sync_at)
|
|
985
|
-
: new Date(0)
|
|
986
|
-
|
|
987
|
-
if (
|
|
988
|
-
conv.last_message_at &&
|
|
989
|
-
conv.last_message_at * 1000 < lastSync.getTime()
|
|
990
|
-
) {
|
|
991
|
-
// Conversation is older than last sync, we can stop
|
|
992
|
-
shouldContinue = false
|
|
993
|
-
break
|
|
994
|
-
}
|
|
995
|
-
|
|
996
|
-
toProcess.push(conv)
|
|
997
|
-
|
|
998
|
-
if (
|
|
999
|
-
options.limit &&
|
|
1000
|
-
conversationCount + toProcess.length >= options.limit
|
|
1001
|
-
) {
|
|
1002
|
-
shouldContinue = false
|
|
1003
|
-
break
|
|
1004
|
-
}
|
|
1005
|
-
}
|
|
1006
|
-
|
|
1007
|
-
// Insert conversations first
|
|
1008
|
-
for (const conv of toProcess) {
|
|
1009
|
-
const { parentId } = await insertConversation(db, conv, inbox.id)
|
|
1010
|
-
conversationCount++
|
|
1011
|
-
if (parentId) threadCount++
|
|
1012
|
-
}
|
|
1013
|
-
|
|
1014
|
-
// Parallel message fetching with rate limiting
|
|
1015
|
-
if (toProcess.length > 0) {
|
|
1016
|
-
logConcurrent(
|
|
1017
|
-
`Fetching messages for ${toProcess.length} conversations`,
|
|
1018
|
-
rateLimiter.getActiveCount()
|
|
1019
|
-
)
|
|
1020
|
-
|
|
1021
|
-
const messageResults = await Promise.all(
|
|
1022
|
-
toProcess.map((conv) =>
|
|
1023
|
-
fetchMessages(conv.id, front)
|
|
1024
|
-
.then((messages) => ({
|
|
1025
|
-
convId: conv.id,
|
|
1026
|
-
messages,
|
|
1027
|
-
error: null,
|
|
1028
|
-
}))
|
|
1029
|
-
.catch((err) => ({
|
|
1030
|
-
convId: conv.id,
|
|
1031
|
-
messages: [] as FrontMessage[],
|
|
1032
|
-
error: err,
|
|
1033
|
-
}))
|
|
1034
|
-
)
|
|
1035
|
-
)
|
|
1036
|
-
|
|
1037
|
-
for (const result of messageResults) {
|
|
1038
|
-
if (result.error) {
|
|
1039
|
-
logError(`fetching messages for ${result.convId}`, result.error)
|
|
1040
|
-
continue
|
|
1041
|
-
}
|
|
1042
|
-
for (const msg of result.messages) {
|
|
1043
|
-
await insertMessage(db, msg, result.convId)
|
|
1044
|
-
messageCount++
|
|
1045
|
-
}
|
|
1046
|
-
}
|
|
1047
|
-
}
|
|
1048
|
-
|
|
1049
|
-
logProgress(
|
|
1050
|
-
i + 1,
|
|
1051
|
-
inboxes.length,
|
|
1052
|
-
inbox.name,
|
|
1053
|
-
page,
|
|
1054
|
-
nextUrl ? '?' : `${page}`,
|
|
1055
|
-
conversationCount
|
|
1056
|
-
)
|
|
1057
|
-
|
|
1058
|
-
url = nextUrl
|
|
1059
|
-
page++
|
|
1060
|
-
}
|
|
1061
|
-
|
|
1062
|
-
await updateSyncState(
|
|
1063
|
-
db,
|
|
1064
|
-
inbox.id,
|
|
1065
|
-
(state?.total_synced || 0) + conversationCount
|
|
1066
|
-
)
|
|
1067
|
-
await updateInboxCount(db, inbox.id)
|
|
1068
|
-
|
|
1069
|
-
totalConversations += conversationCount
|
|
1070
|
-
totalMessages += messageCount
|
|
1071
|
-
totalThreads += threadCount
|
|
1072
|
-
|
|
1073
|
-
log(
|
|
1074
|
-
` ✅ Synced ${conversationCount} conversations, ${messageCount} messages, ${threadCount} threads`
|
|
1075
|
-
)
|
|
1076
|
-
}
|
|
1077
|
-
|
|
1078
|
-
const duration = ((Date.now() - startTime) / 1000 / 60).toFixed(1)
|
|
1079
|
-
log('\n📊 SYNC COMPLETE')
|
|
1080
|
-
log(` Duration: ${duration} minutes`)
|
|
1081
|
-
log(` Conversations: ${totalConversations}`)
|
|
1082
|
-
log(` Messages: ${totalMessages}`)
|
|
1083
|
-
log(` Threads: ${totalThreads}`)
|
|
1084
|
-
} finally {
|
|
1085
|
-
db.close?.() || db.terminate?.() || true
|
|
1086
|
-
}
|
|
1087
|
-
}
|
|
1088
|
-
|
|
1089
|
-
async function handleStats(options: CacheOptions): Promise<void> {
|
|
1090
|
-
log('📊 Cache Statistics')
|
|
1091
|
-
log(` Database: ${DB_PATH}`)
|
|
1092
|
-
log('')
|
|
1093
|
-
|
|
1094
|
-
const db = await createDb()
|
|
1095
|
-
|
|
1096
|
-
try {
|
|
1097
|
-
// Get DB size
|
|
1098
|
-
const size = await allQuery<{ database_size: string }>(
|
|
1099
|
-
db,
|
|
1100
|
-
`SELECT database_size FROM pragma_database_size()`
|
|
1101
|
-
)
|
|
1102
|
-
const sizeStr = size[0]?.database_size || 'unknown'
|
|
1103
|
-
|
|
1104
|
-
// Total counts
|
|
1105
|
-
const inboxCount = await allQuery<{ c: number }>(
|
|
1106
|
-
db,
|
|
1107
|
-
`SELECT COUNT(*) as c FROM inboxes`
|
|
1108
|
-
)
|
|
1109
|
-
const convCount = await allQuery<{ c: number }>(
|
|
1110
|
-
db,
|
|
1111
|
-
`SELECT COUNT(*) as c FROM conversations`
|
|
1112
|
-
)
|
|
1113
|
-
const msgCount = await allQuery<{ c: number }>(
|
|
1114
|
-
db,
|
|
1115
|
-
`SELECT COUNT(*) as c FROM messages`
|
|
1116
|
-
)
|
|
1117
|
-
|
|
1118
|
-
console.log('='.repeat(50))
|
|
1119
|
-
console.log('Overall Stats')
|
|
1120
|
-
console.log('='.repeat(50))
|
|
1121
|
-
console.log(`Database Size: ${sizeStr}`)
|
|
1122
|
-
console.log(`Total Inboxes: ${inboxCount[0]?.c || 0}`)
|
|
1123
|
-
console.log(`Total Conversations: ${convCount[0]?.c || 0}`)
|
|
1124
|
-
console.log(`Total Messages: ${msgCount[0]?.c || 0}`)
|
|
1125
|
-
console.log('')
|
|
1126
|
-
|
|
1127
|
-
// Per-inbox breakdown
|
|
1128
|
-
console.log('='.repeat(50))
|
|
1129
|
-
console.log('Per-Inbox Breakdown')
|
|
1130
|
-
console.log('='.repeat(50))
|
|
1131
|
-
|
|
1132
|
-
const inboxStats = await allQuery<{
|
|
1133
|
-
name: string
|
|
1134
|
-
id: string
|
|
1135
|
-
conversation_count: number
|
|
1136
|
-
last_sync_at: string | null
|
|
1137
|
-
}>(
|
|
1138
|
-
db,
|
|
1139
|
-
`SELECT i.name, i.id, i.conversation_count,
|
|
1140
|
-
s.last_sync_at::VARCHAR as last_sync_at
|
|
1141
|
-
FROM inboxes i
|
|
1142
|
-
LEFT JOIN sync_state s ON i.id = s.inbox_id
|
|
1143
|
-
ORDER BY i.conversation_count DESC`
|
|
1144
|
-
)
|
|
1145
|
-
|
|
1146
|
-
for (const inbox of inboxStats) {
|
|
1147
|
-
const syncTime = inbox.last_sync_at
|
|
1148
|
-
? new Date(inbox.last_sync_at).toLocaleString()
|
|
1149
|
-
: 'never'
|
|
1150
|
-
console.log(
|
|
1151
|
-
`${inbox.name.padEnd(30)} ${String(inbox.conversation_count).padStart(6)} convs (last sync: ${syncTime})`
|
|
1152
|
-
)
|
|
1153
|
-
}
|
|
1154
|
-
|
|
1155
|
-
// Status breakdown
|
|
1156
|
-
console.log('')
|
|
1157
|
-
console.log('='.repeat(50))
|
|
1158
|
-
console.log('By Status')
|
|
1159
|
-
console.log('='.repeat(50))
|
|
1160
|
-
|
|
1161
|
-
const statusStats = await allQuery<{ status: string; c: number }>(
|
|
1162
|
-
db,
|
|
1163
|
-
`SELECT status, COUNT(*) as c FROM conversations GROUP BY status ORDER BY c DESC`
|
|
1164
|
-
)
|
|
1165
|
-
|
|
1166
|
-
for (const s of statusStats) {
|
|
1167
|
-
console.log(
|
|
1168
|
-
`${(s.status || 'unknown').padEnd(20)} ${String(s.c).padStart(8)}`
|
|
1169
|
-
)
|
|
1170
|
-
}
|
|
1171
|
-
|
|
1172
|
-
// Thread statistics
|
|
1173
|
-
console.log('')
|
|
1174
|
-
console.log('='.repeat(50))
|
|
1175
|
-
console.log('Thread Statistics')
|
|
1176
|
-
console.log('='.repeat(50))
|
|
1177
|
-
|
|
1178
|
-
const threadStats = await allQuery<{ c: number }>(
|
|
1179
|
-
db,
|
|
1180
|
-
`SELECT COUNT(*) as c FROM conversations WHERE parent_id IS NOT NULL`
|
|
1181
|
-
).catch(() => [{ c: 0 }]) // Handle case where column doesn't exist yet
|
|
1182
|
-
|
|
1183
|
-
const depthStats = await allQuery<{ depth: number; c: number }>(
|
|
1184
|
-
db,
|
|
1185
|
-
`SELECT thread_depth as depth, COUNT(*) as c
|
|
1186
|
-
FROM conversations
|
|
1187
|
-
WHERE thread_depth > 0
|
|
1188
|
-
GROUP BY thread_depth
|
|
1189
|
-
ORDER BY thread_depth`
|
|
1190
|
-
).catch(() => [])
|
|
1191
|
-
|
|
1192
|
-
console.log(`Total Threaded: ${threadStats[0]?.c || 0}`)
|
|
1193
|
-
if (depthStats.length > 0) {
|
|
1194
|
-
for (const d of depthStats) {
|
|
1195
|
-
console.log(` Depth ${d.depth}: ${String(d.c).padStart(6)}`)
|
|
1196
|
-
}
|
|
1197
|
-
}
|
|
1198
|
-
|
|
1199
|
-
if (options.json) {
|
|
1200
|
-
console.log(
|
|
1201
|
-
'\n' +
|
|
1202
|
-
JSON.stringify(
|
|
1203
|
-
{
|
|
1204
|
-
size: sizeStr,
|
|
1205
|
-
inboxes: inboxCount[0]?.c,
|
|
1206
|
-
conversations: convCount[0]?.c,
|
|
1207
|
-
messages: msgCount[0]?.c,
|
|
1208
|
-
inboxBreakdown: inboxStats,
|
|
1209
|
-
statusBreakdown: statusStats,
|
|
1210
|
-
},
|
|
1211
|
-
null,
|
|
1212
|
-
2
|
|
1213
|
-
)
|
|
1214
|
-
)
|
|
1215
|
-
}
|
|
1216
|
-
} finally {
|
|
1217
|
-
db.close?.() || db.terminate?.() || true
|
|
1218
|
-
}
|
|
1219
|
-
}
|
|
1220
|
-
|
|
1221
|
-
function formatBytes(bytes: number): string {
|
|
1222
|
-
if (bytes === 0) return '0 B'
|
|
1223
|
-
const k = 1024
|
|
1224
|
-
const sizes = ['B', 'KB', 'MB', 'GB']
|
|
1225
|
-
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
|
1226
|
-
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`
|
|
1227
|
-
}
|
|
1228
|
-
|
|
1229
|
-
// ============================================================================
|
|
1230
|
-
// Main Entry Point
|
|
1231
|
-
// ============================================================================
|
|
1232
|
-
|
|
1233
|
-
async function frontCache(options: CacheOptions): Promise<void> {
|
|
1234
|
-
try {
|
|
1235
|
-
if (options.stats) {
|
|
1236
|
-
await handleStats(options)
|
|
1237
|
-
} else if (options.init) {
|
|
1238
|
-
await handleInit(options)
|
|
1239
|
-
} else if (options.resume) {
|
|
1240
|
-
await handleResume(options)
|
|
1241
|
-
} else if (options.sync) {
|
|
1242
|
-
await handleSync(options)
|
|
1243
|
-
} else {
|
|
1244
|
-
console.log('Usage: front cache [--init|--sync|--stats|--resume]')
|
|
1245
|
-
console.log('')
|
|
1246
|
-
console.log('Options:')
|
|
1247
|
-
console.log(' --init Full import all inboxes')
|
|
1248
|
-
console.log(' --sync Incremental sync (new conversations only)')
|
|
1249
|
-
console.log(' --stats Show cache statistics')
|
|
1250
|
-
console.log(' --resume Resume interrupted import')
|
|
1251
|
-
console.log(' --inbox <id> Filter to specific inbox')
|
|
1252
|
-
console.log(' --limit <n> Limit conversations per inbox')
|
|
1253
|
-
console.log(' --json JSON output (stats only)')
|
|
1254
|
-
}
|
|
1255
|
-
} catch (error) {
|
|
1256
|
-
logError('main', error)
|
|
1257
|
-
process.exit(1)
|
|
1258
|
-
}
|
|
1259
|
-
}
|
|
1260
|
-
|
|
1261
|
-
// ============================================================================
|
|
1262
|
-
// Register Command
|
|
1263
|
-
// ============================================================================
|
|
1264
|
-
|
|
1265
|
-
export function registerCacheCommand(parent: Command): void {
|
|
1266
|
-
parent
|
|
1267
|
-
.command('cache')
|
|
1268
|
-
.description('Build and maintain DuckDB cache of Front conversations')
|
|
1269
|
-
.option('--init', 'Full import all inboxes')
|
|
1270
|
-
.option('--sync', 'Incremental sync (new conversations only)')
|
|
1271
|
-
.option('--stats', 'Show cache statistics')
|
|
1272
|
-
.option('--resume', 'Resume interrupted import')
|
|
1273
|
-
.option('-i, --inbox <id>', 'Filter to specific inbox')
|
|
1274
|
-
.option('-l, --limit <n>', 'Limit conversations per inbox', parseInt)
|
|
1275
|
-
.option('--json', 'JSON output')
|
|
1276
|
-
.action(frontCache)
|
|
1277
|
-
}
|