@ainyc/canonry 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/assets/assets/index-CkNSldWM.css +1 -0
- package/assets/assets/index-DHoyZdlF.js +63 -0
- package/assets/index.html +17 -0
- package/bin/canonry.mjs +2 -0
- package/dist/chunk-ONZDY6Q4.js +3706 -0
- package/dist/cli.js +1101 -0
- package/dist/index.js +8 -0
- package/package.json +58 -0
- package/src/cli.ts +470 -0
- package/src/client.ts +152 -0
- package/src/commands/apply.ts +25 -0
- package/src/commands/competitor.ts +36 -0
- package/src/commands/evidence.ts +41 -0
- package/src/commands/export-cmd.ts +40 -0
- package/src/commands/history.ts +41 -0
- package/src/commands/init.ts +122 -0
- package/src/commands/keyword.ts +54 -0
- package/src/commands/notify.ts +70 -0
- package/src/commands/project.ts +89 -0
- package/src/commands/run.ts +54 -0
- package/src/commands/schedule.ts +90 -0
- package/src/commands/serve.ts +24 -0
- package/src/commands/settings.ts +45 -0
- package/src/commands/status.ts +52 -0
- package/src/config.ts +90 -0
- package/src/index.ts +2 -0
- package/src/job-runner.ts +368 -0
- package/src/notifier.ts +227 -0
- package/src/provider-registry.ts +55 -0
- package/src/scheduler.ts +161 -0
- package/src/server.ts +249 -0
package/src/notifier.ts
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
import { eq, desc, and, or } from 'drizzle-orm'
|
|
2
|
+
import { deliverWebhook, resolveWebhookTarget } from '@ainyc/canonry-api-routes'
|
|
3
|
+
import type { DatabaseClient } from '@ainyc/canonry-db'
|
|
4
|
+
import { notifications, runs, querySnapshots, keywords, projects, auditLog } from '@ainyc/canonry-db'
|
|
5
|
+
import type { NotificationEvent, WebhookPayload } from '@ainyc/canonry-contracts'
|
|
6
|
+
import crypto from 'node:crypto'
|
|
7
|
+
|
|
8
|
+
export class Notifier {
|
|
9
|
+
private db: DatabaseClient
|
|
10
|
+
private serverUrl: string
|
|
11
|
+
|
|
12
|
+
constructor(db: DatabaseClient, serverUrl: string) {
|
|
13
|
+
this.db = db
|
|
14
|
+
this.serverUrl = serverUrl
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/** Called after a run completes (success, partial, or failed). */
|
|
18
|
+
async onRunCompleted(runId: string, projectId: string): Promise<void> {
|
|
19
|
+
console.log(`[Notifier] onRunCompleted: runId=${runId} projectId=${projectId}`)
|
|
20
|
+
|
|
21
|
+
// Get project notifications
|
|
22
|
+
const notifs = this.db
|
|
23
|
+
.select()
|
|
24
|
+
.from(notifications)
|
|
25
|
+
.where(eq(notifications.projectId, projectId))
|
|
26
|
+
.all()
|
|
27
|
+
.filter(n => n.enabled === 1)
|
|
28
|
+
|
|
29
|
+
if (notifs.length === 0) {
|
|
30
|
+
console.log(`[Notifier] No enabled notifications for project ${projectId} — skipping`)
|
|
31
|
+
return
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
console.log(`[Notifier] Found ${notifs.length} enabled notification(s) for project ${projectId}`)
|
|
35
|
+
|
|
36
|
+
// Get the completed run
|
|
37
|
+
const run = this.db.select().from(runs).where(eq(runs.id, runId)).get()
|
|
38
|
+
if (!run) {
|
|
39
|
+
console.error(`[Notifier] Run ${runId} not found — skipping notification dispatch`)
|
|
40
|
+
return
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Get the project
|
|
44
|
+
const project = this.db.select().from(projects).where(eq(projects.id, projectId)).get()
|
|
45
|
+
if (!project) {
|
|
46
|
+
console.error(`[Notifier] Project ${projectId} not found — skipping notification dispatch`)
|
|
47
|
+
return
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Compute transitions by comparing to previous run
|
|
51
|
+
const transitions = this.computeTransitions(runId, projectId)
|
|
52
|
+
|
|
53
|
+
// Determine which events occurred
|
|
54
|
+
const events: NotificationEvent[] = []
|
|
55
|
+
console.log(`[Notifier] Run status: ${run.status}`)
|
|
56
|
+
|
|
57
|
+
if (run.status === 'completed' || run.status === 'partial') {
|
|
58
|
+
events.push('run.completed')
|
|
59
|
+
}
|
|
60
|
+
if (run.status === 'failed') {
|
|
61
|
+
events.push('run.failed')
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const lostTransitions = transitions.filter(t => t.to === 'not-cited' && t.from === 'cited')
|
|
65
|
+
const gainedTransitions = transitions.filter(t => t.to === 'cited' && t.from === 'not-cited')
|
|
66
|
+
|
|
67
|
+
if (lostTransitions.length > 0) events.push('citation.lost')
|
|
68
|
+
if (gainedTransitions.length > 0) events.push('citation.gained')
|
|
69
|
+
|
|
70
|
+
// Send webhooks for each notification config
|
|
71
|
+
for (const notif of notifs) {
|
|
72
|
+
const config = JSON.parse(notif.config) as { url: string; events: string[] }
|
|
73
|
+
const subscribedEvents = config.events as NotificationEvent[]
|
|
74
|
+
|
|
75
|
+
// Filter to events this notification cares about
|
|
76
|
+
const matchingEvents = events.filter(e => subscribedEvents.includes(e))
|
|
77
|
+
console.log(`[Notifier] Notification ${notif.id}: subscribed=${JSON.stringify(subscribedEvents)} matched=${JSON.stringify(matchingEvents)}`)
|
|
78
|
+
if (matchingEvents.length === 0) continue
|
|
79
|
+
|
|
80
|
+
// Send one webhook per matching event
|
|
81
|
+
for (const event of matchingEvents) {
|
|
82
|
+
const relevantTransitions = event === 'citation.lost' ? lostTransitions
|
|
83
|
+
: event === 'citation.gained' ? gainedTransitions
|
|
84
|
+
: transitions
|
|
85
|
+
|
|
86
|
+
const payload: WebhookPayload = {
|
|
87
|
+
source: 'canonry',
|
|
88
|
+
event,
|
|
89
|
+
project: { name: project.name, canonicalDomain: project.canonicalDomain },
|
|
90
|
+
run: { id: run.id, status: run.status, finishedAt: run.finishedAt },
|
|
91
|
+
transitions: relevantTransitions,
|
|
92
|
+
dashboardUrl: `${this.serverUrl}/projects/${project.name}`,
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
await this.sendWebhook(config.url, payload, notif.id, projectId, notif.webhookSecret ?? null)
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
private computeTransitions(runId: string, projectId: string): Array<{
|
|
101
|
+
keyword: string; from: string; to: string; provider: string
|
|
102
|
+
}> {
|
|
103
|
+
// Get the two most recent completed/partial runs for this project.
|
|
104
|
+
// Status filter is pushed into SQL (not applied in JS) so that a concurrent
|
|
105
|
+
// run completing after this one does not displace it from position [0].
|
|
106
|
+
const recentRuns = this.db
|
|
107
|
+
.select()
|
|
108
|
+
.from(runs)
|
|
109
|
+
.where(
|
|
110
|
+
and(
|
|
111
|
+
eq(runs.projectId, projectId),
|
|
112
|
+
or(eq(runs.status, 'completed'), eq(runs.status, 'partial')),
|
|
113
|
+
),
|
|
114
|
+
)
|
|
115
|
+
.orderBy(desc(runs.createdAt))
|
|
116
|
+
.limit(2)
|
|
117
|
+
.all()
|
|
118
|
+
|
|
119
|
+
if (recentRuns.length < 2) return []
|
|
120
|
+
|
|
121
|
+
const currentRunId = recentRuns[0]!.id
|
|
122
|
+
const previousRunId = recentRuns[1]!.id
|
|
123
|
+
|
|
124
|
+
// Only compute for the run that just finished
|
|
125
|
+
if (currentRunId !== runId) return []
|
|
126
|
+
|
|
127
|
+
const currentSnapshots = this.db
|
|
128
|
+
.select({
|
|
129
|
+
keywordId: querySnapshots.keywordId,
|
|
130
|
+
keyword: keywords.keyword,
|
|
131
|
+
provider: querySnapshots.provider,
|
|
132
|
+
citationState: querySnapshots.citationState,
|
|
133
|
+
})
|
|
134
|
+
.from(querySnapshots)
|
|
135
|
+
.leftJoin(keywords, eq(querySnapshots.keywordId, keywords.id))
|
|
136
|
+
.where(eq(querySnapshots.runId, currentRunId))
|
|
137
|
+
.all()
|
|
138
|
+
|
|
139
|
+
const previousSnapshots = this.db
|
|
140
|
+
.select({
|
|
141
|
+
keywordId: querySnapshots.keywordId,
|
|
142
|
+
provider: querySnapshots.provider,
|
|
143
|
+
citationState: querySnapshots.citationState,
|
|
144
|
+
})
|
|
145
|
+
.from(querySnapshots)
|
|
146
|
+
.where(eq(querySnapshots.runId, previousRunId))
|
|
147
|
+
.all()
|
|
148
|
+
|
|
149
|
+
// Build lookup: key = `${keywordId}:${provider}`
|
|
150
|
+
const prevMap = new Map<string, string>()
|
|
151
|
+
for (const s of previousSnapshots) {
|
|
152
|
+
prevMap.set(`${s.keywordId}:${s.provider}`, s.citationState)
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const transitions: Array<{ keyword: string; from: string; to: string; provider: string }> = []
|
|
156
|
+
|
|
157
|
+
for (const s of currentSnapshots) {
|
|
158
|
+
const key = `${s.keywordId}:${s.provider}`
|
|
159
|
+
const prevState = prevMap.get(key)
|
|
160
|
+
if (prevState && prevState !== s.citationState) {
|
|
161
|
+
transitions.push({
|
|
162
|
+
keyword: s.keyword ?? s.keywordId,
|
|
163
|
+
from: prevState,
|
|
164
|
+
to: s.citationState,
|
|
165
|
+
provider: s.provider,
|
|
166
|
+
})
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
return transitions
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
private async sendWebhook(url: string, payload: WebhookPayload, notificationId: string, projectId: string, webhookSecret: string | null): Promise<void> {
|
|
174
|
+
const targetCheck = await resolveWebhookTarget(url)
|
|
175
|
+
if (!targetCheck.ok) {
|
|
176
|
+
console.error(`[Notifier] Webhook URL blocked by SSRF check: ${url}`)
|
|
177
|
+
this.logDelivery(projectId, notificationId, payload.event, 'failed', `SSRF: ${targetCheck.message}`)
|
|
178
|
+
return
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
console.log(`[Notifier] Sending webhook event="${payload.event}" to ${url}`)
|
|
182
|
+
|
|
183
|
+
const maxRetries = 3
|
|
184
|
+
const delays = [1000, 4000, 16000]
|
|
185
|
+
|
|
186
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
187
|
+
try {
|
|
188
|
+
const response = await deliverWebhook(targetCheck.target, payload, webhookSecret)
|
|
189
|
+
|
|
190
|
+
if (response.status >= 200 && response.status < 300) {
|
|
191
|
+
console.log(`[Notifier] Webhook delivered: event="${payload.event}" status=${response.status}`)
|
|
192
|
+
this.logDelivery(projectId, notificationId, payload.event, 'sent', null)
|
|
193
|
+
return
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
const errorDetail = response.error ?? `HTTP ${response.status}`
|
|
197
|
+
console.warn(`[Notifier] Webhook attempt ${attempt + 1}/${maxRetries} failed: ${errorDetail}`)
|
|
198
|
+
if (attempt === maxRetries - 1) {
|
|
199
|
+
this.logDelivery(projectId, notificationId, payload.event, 'failed', errorDetail)
|
|
200
|
+
}
|
|
201
|
+
} catch (err: unknown) {
|
|
202
|
+
const errorDetail = err instanceof Error ? err.message : String(err)
|
|
203
|
+
if (attempt === maxRetries - 1) {
|
|
204
|
+
this.logDelivery(projectId, notificationId, payload.event, 'failed', errorDetail)
|
|
205
|
+
console.error(`[Notifier] Failed to deliver webhook after ${maxRetries} attempts: ${errorDetail}`)
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
if (attempt < maxRetries - 1) {
|
|
210
|
+
await new Promise(resolve => setTimeout(resolve, delays[attempt]!))
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
private logDelivery(projectId: string, notificationId: string, event: string, status: string, error: string | null): void {
|
|
216
|
+
this.db.insert(auditLog).values({
|
|
217
|
+
id: crypto.randomUUID(),
|
|
218
|
+
projectId,
|
|
219
|
+
actor: 'scheduler',
|
|
220
|
+
action: `notification.${status}`,
|
|
221
|
+
entityType: 'notification',
|
|
222
|
+
entityId: notificationId,
|
|
223
|
+
diff: JSON.stringify({ event, error }),
|
|
224
|
+
createdAt: new Date().toISOString(),
|
|
225
|
+
}).run()
|
|
226
|
+
}
|
|
227
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import type { ProviderAdapter, ProviderConfig, ProviderName, ProviderHealthcheckResult } from '@ainyc/canonry-contracts'
|
|
2
|
+
|
|
3
|
+
export interface RegisteredProvider {
|
|
4
|
+
adapter: ProviderAdapter
|
|
5
|
+
config: ProviderConfig
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export class ProviderRegistry {
|
|
9
|
+
private providers = new Map<ProviderName, RegisteredProvider>()
|
|
10
|
+
|
|
11
|
+
register(adapter: ProviderAdapter, config: ProviderConfig): void {
|
|
12
|
+
this.providers.set(adapter.name, { adapter, config })
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
get(name: ProviderName): RegisteredProvider | undefined {
|
|
16
|
+
return this.providers.get(name)
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
getAll(): RegisteredProvider[] {
|
|
20
|
+
return [...this.providers.values()]
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
getForProject(projectProviders: ProviderName[]): RegisteredProvider[] {
|
|
24
|
+
// Empty array means "use all configured providers"
|
|
25
|
+
if (projectProviders.length === 0) {
|
|
26
|
+
return this.getAll()
|
|
27
|
+
}
|
|
28
|
+
const result: RegisteredProvider[] = []
|
|
29
|
+
const seen = new Set<ProviderName>()
|
|
30
|
+
for (const name of projectProviders) {
|
|
31
|
+
if (seen.has(name)) continue
|
|
32
|
+
seen.add(name)
|
|
33
|
+
const provider = this.providers.get(name)
|
|
34
|
+
if (provider) {
|
|
35
|
+
result.push(provider)
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return result
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
get size(): number {
|
|
42
|
+
return this.providers.size
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async healthcheckAll(): Promise<Map<ProviderName, ProviderHealthcheckResult>> {
|
|
46
|
+
const results = new Map<ProviderName, ProviderHealthcheckResult>()
|
|
47
|
+
const entries = [...this.providers.entries()]
|
|
48
|
+
const checks = entries.map(async ([name, { adapter, config }]) => {
|
|
49
|
+
const result = await adapter.healthcheck(config)
|
|
50
|
+
results.set(name, result)
|
|
51
|
+
})
|
|
52
|
+
await Promise.all(checks)
|
|
53
|
+
return results
|
|
54
|
+
}
|
|
55
|
+
}
|
package/src/scheduler.ts
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import cron from 'node-cron'
|
|
2
|
+
import { eq } from 'drizzle-orm'
|
|
3
|
+
import { queueRunIfProjectIdle } from '@ainyc/canonry-api-routes'
|
|
4
|
+
import type { DatabaseClient } from '@ainyc/canonry-db'
|
|
5
|
+
import { schedules, projects } from '@ainyc/canonry-db'
|
|
6
|
+
import type { ProviderName } from '@ainyc/canonry-contracts'
|
|
7
|
+
|
|
8
|
+
export interface SchedulerCallbacks {
|
|
9
|
+
onRunCreated: (runId: string, projectId: string, providers?: ProviderName[]) => void
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export class Scheduler {
|
|
13
|
+
private db: DatabaseClient
|
|
14
|
+
private callbacks: SchedulerCallbacks
|
|
15
|
+
private tasks = new Map<string, cron.ScheduledTask>()
|
|
16
|
+
|
|
17
|
+
constructor(db: DatabaseClient, callbacks: SchedulerCallbacks) {
|
|
18
|
+
this.db = db
|
|
19
|
+
this.callbacks = callbacks
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/** Load all enabled schedules from DB and register cron jobs. */
|
|
23
|
+
start(): void {
|
|
24
|
+
const allSchedules = this.db
|
|
25
|
+
.select()
|
|
26
|
+
.from(schedules)
|
|
27
|
+
.where(eq(schedules.enabled, 1))
|
|
28
|
+
.all()
|
|
29
|
+
|
|
30
|
+
for (const schedule of allSchedules) {
|
|
31
|
+
// Capture nextRunAt before registration so the check uses the stored DB
|
|
32
|
+
// value, not a value that registerCronTask might have modified.
|
|
33
|
+
const missedRunAt = schedule.nextRunAt
|
|
34
|
+
this.registerCronTask(schedule)
|
|
35
|
+
|
|
36
|
+
// Catch-up: if the scheduled slot was set but the server was down when
|
|
37
|
+
// it was supposed to fire, trigger immediately.
|
|
38
|
+
if (missedRunAt && new Date(missedRunAt) < new Date()) {
|
|
39
|
+
console.log(`[Scheduler] Catch-up run for project ${schedule.projectId} (missed ${missedRunAt})`)
|
|
40
|
+
this.triggerRun(schedule.id, schedule.projectId)
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
console.log(`[Scheduler] Started with ${allSchedules.length} schedule(s)`)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/** Stop all cron tasks for graceful shutdown. */
|
|
48
|
+
stop(): void {
|
|
49
|
+
for (const [projectId, task] of this.tasks) {
|
|
50
|
+
task.stop()
|
|
51
|
+
console.log(`[Scheduler] Stopped task for project ${projectId}`)
|
|
52
|
+
}
|
|
53
|
+
this.tasks.clear()
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/** Add or update a cron registration at runtime (called when schedule API is used). */
|
|
57
|
+
upsert(projectId: string): void {
|
|
58
|
+
// Remove existing task if any
|
|
59
|
+
const existing = this.tasks.get(projectId)
|
|
60
|
+
if (existing) {
|
|
61
|
+
existing.stop()
|
|
62
|
+
this.tasks.delete(projectId)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Load fresh from DB
|
|
66
|
+
const schedule = this.db
|
|
67
|
+
.select()
|
|
68
|
+
.from(schedules)
|
|
69
|
+
.where(eq(schedules.projectId, projectId))
|
|
70
|
+
.get()
|
|
71
|
+
|
|
72
|
+
if (schedule && schedule.enabled === 1) {
|
|
73
|
+
this.registerCronTask(schedule)
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/** Remove a cron registration (called when schedule is deleted). */
|
|
78
|
+
remove(projectId: string): void {
|
|
79
|
+
const existing = this.tasks.get(projectId)
|
|
80
|
+
if (existing) {
|
|
81
|
+
existing.stop()
|
|
82
|
+
this.tasks.delete(projectId)
|
|
83
|
+
console.log(`[Scheduler] Removed task for project ${projectId}`)
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
private registerCronTask(schedule: typeof schedules.$inferSelect): void {
|
|
88
|
+
const { id: scheduleId, projectId, cronExpr, timezone } = schedule
|
|
89
|
+
|
|
90
|
+
if (!cron.validate(cronExpr)) {
|
|
91
|
+
console.error(`[Scheduler] Invalid cron expression for project ${projectId}: ${cronExpr}`)
|
|
92
|
+
return
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const task = cron.schedule(cronExpr, () => {
|
|
96
|
+
this.triggerRun(scheduleId, projectId)
|
|
97
|
+
}, {
|
|
98
|
+
timezone,
|
|
99
|
+
})
|
|
100
|
+
|
|
101
|
+
this.tasks.set(projectId, task)
|
|
102
|
+
this.db.update(schedules).set({
|
|
103
|
+
nextRunAt: task.getNextRun()?.toISOString() ?? null,
|
|
104
|
+
updatedAt: new Date().toISOString(),
|
|
105
|
+
}).where(eq(schedules.id, scheduleId)).run()
|
|
106
|
+
|
|
107
|
+
const label = schedule.preset ?? cronExpr
|
|
108
|
+
console.log(`[Scheduler] Registered "${label}" (${timezone}) for project ${projectId}`)
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
private triggerRun(scheduleId: string, projectId: string): void {
|
|
112
|
+
const now = new Date().toISOString()
|
|
113
|
+
const currentSchedule = this.db.select().from(schedules).where(eq(schedules.id, scheduleId)).get()
|
|
114
|
+
if (!currentSchedule || currentSchedule.enabled !== 1) {
|
|
115
|
+
console.log(`[Scheduler] Schedule ${scheduleId} no longer exists or is disabled, removing task for project ${projectId}`)
|
|
116
|
+
this.remove(projectId)
|
|
117
|
+
return
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const task = this.tasks.get(projectId)
|
|
121
|
+
const nextRunAt = task?.getNextRun()?.toISOString() ?? null
|
|
122
|
+
|
|
123
|
+
// Check if project still exists
|
|
124
|
+
const project = this.db.select().from(projects).where(eq(projects.id, projectId)).get()
|
|
125
|
+
if (!project) {
|
|
126
|
+
console.error(`[Scheduler] Project ${projectId} not found, skipping scheduled run`)
|
|
127
|
+
this.remove(projectId)
|
|
128
|
+
return
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const queueResult = queueRunIfProjectIdle(this.db, {
|
|
132
|
+
createdAt: now,
|
|
133
|
+
kind: 'answer-visibility',
|
|
134
|
+
projectId,
|
|
135
|
+
trigger: 'scheduled',
|
|
136
|
+
})
|
|
137
|
+
|
|
138
|
+
if (queueResult.conflict) {
|
|
139
|
+
console.log(`[Scheduler] Skipping scheduled run for ${project.name} — run ${queueResult.activeRunId} already active`)
|
|
140
|
+
this.db.update(schedules).set({
|
|
141
|
+
nextRunAt,
|
|
142
|
+
updatedAt: now,
|
|
143
|
+
}).where(eq(schedules.id, currentSchedule.id)).run()
|
|
144
|
+
return
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const runId = queueResult.runId
|
|
148
|
+
this.db.update(schedules).set({
|
|
149
|
+
lastRunAt: now,
|
|
150
|
+
nextRunAt,
|
|
151
|
+
updatedAt: now,
|
|
152
|
+
}).where(eq(schedules.id, currentSchedule.id)).run()
|
|
153
|
+
|
|
154
|
+
// Resolve providers
|
|
155
|
+
const scheduleProviders = JSON.parse(currentSchedule.providers) as string[]
|
|
156
|
+
const providers = scheduleProviders.length > 0 ? scheduleProviders as ProviderName[] : undefined
|
|
157
|
+
|
|
158
|
+
console.log(`[Scheduler] Triggered scheduled run ${runId} for project ${project.name}`)
|
|
159
|
+
this.callbacks.onRunCreated(runId, projectId, providers)
|
|
160
|
+
}
|
|
161
|
+
}
|
package/src/server.ts
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
import fs from 'node:fs'
|
|
2
|
+
import path from 'node:path'
|
|
3
|
+
import { fileURLToPath } from 'node:url'
|
|
4
|
+
import Fastify from 'fastify'
|
|
5
|
+
import type { FastifyInstance } from 'fastify'
|
|
6
|
+
import { apiRoutes } from '@ainyc/canonry-api-routes'
|
|
7
|
+
import type { DatabaseClient } from '@ainyc/canonry-db'
|
|
8
|
+
import { geminiAdapter } from '@ainyc/canonry-provider-gemini'
|
|
9
|
+
import { openaiAdapter } from '@ainyc/canonry-provider-openai'
|
|
10
|
+
import { claudeAdapter } from '@ainyc/canonry-provider-claude'
|
|
11
|
+
import { localAdapter } from '@ainyc/canonry-provider-local'
|
|
12
|
+
import type { ProviderName } from '@ainyc/canonry-contracts'
|
|
13
|
+
import type { CanonryConfig } from './config.js'
|
|
14
|
+
import { saveConfig } from './config.js'
|
|
15
|
+
import { JobRunner } from './job-runner.js'
|
|
16
|
+
import { ProviderRegistry } from './provider-registry.js'
|
|
17
|
+
import { Scheduler } from './scheduler.js'
|
|
18
|
+
import { Notifier } from './notifier.js'
|
|
19
|
+
|
|
20
|
+
const DEFAULT_QUOTA = {
|
|
21
|
+
maxConcurrency: 2,
|
|
22
|
+
maxRequestsPerMinute: 10,
|
|
23
|
+
maxRequestsPerDay: 1000,
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export async function createServer(opts: {
|
|
27
|
+
config: CanonryConfig
|
|
28
|
+
db: DatabaseClient
|
|
29
|
+
open?: boolean
|
|
30
|
+
}): Promise<FastifyInstance> {
|
|
31
|
+
const app = Fastify({
|
|
32
|
+
logger: {
|
|
33
|
+
transport: {
|
|
34
|
+
target: 'pino-pretty',
|
|
35
|
+
options: {
|
|
36
|
+
colorize: true,
|
|
37
|
+
translateTime: 'HH:MM:ss',
|
|
38
|
+
ignore: 'pid,hostname,reqId',
|
|
39
|
+
messageFormat: '{msg} {req.method} {req.url}',
|
|
40
|
+
},
|
|
41
|
+
},
|
|
42
|
+
},
|
|
43
|
+
})
|
|
44
|
+
|
|
45
|
+
// Build provider registry from config (with legacy field migration)
|
|
46
|
+
const registry = new ProviderRegistry()
|
|
47
|
+
const providers = opts.config.providers ?? {}
|
|
48
|
+
|
|
49
|
+
// Migrate legacy geminiApiKey if providers.gemini is not set
|
|
50
|
+
if (opts.config.geminiApiKey && !providers.gemini) {
|
|
51
|
+
providers.gemini = {
|
|
52
|
+
apiKey: opts.config.geminiApiKey,
|
|
53
|
+
model: opts.config.geminiModel,
|
|
54
|
+
quota: opts.config.geminiQuota,
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
console.log('[Server] Configured providers:', Object.keys(providers).filter(k => {
|
|
59
|
+
const p = providers[k as keyof typeof providers]
|
|
60
|
+
return p?.apiKey || p?.baseUrl
|
|
61
|
+
}))
|
|
62
|
+
|
|
63
|
+
if (providers.gemini?.apiKey) {
|
|
64
|
+
registry.register(geminiAdapter, {
|
|
65
|
+
provider: 'gemini',
|
|
66
|
+
apiKey: providers.gemini.apiKey,
|
|
67
|
+
model: providers.gemini.model,
|
|
68
|
+
quotaPolicy: providers.gemini.quota ?? DEFAULT_QUOTA,
|
|
69
|
+
})
|
|
70
|
+
}
|
|
71
|
+
if (providers.openai?.apiKey) {
|
|
72
|
+
registry.register(openaiAdapter, {
|
|
73
|
+
provider: 'openai',
|
|
74
|
+
apiKey: providers.openai.apiKey,
|
|
75
|
+
model: providers.openai.model,
|
|
76
|
+
quotaPolicy: providers.openai.quota ?? DEFAULT_QUOTA,
|
|
77
|
+
})
|
|
78
|
+
}
|
|
79
|
+
if (providers.claude?.apiKey) {
|
|
80
|
+
registry.register(claudeAdapter, {
|
|
81
|
+
provider: 'claude',
|
|
82
|
+
apiKey: providers.claude.apiKey,
|
|
83
|
+
model: providers.claude.model,
|
|
84
|
+
quotaPolicy: providers.claude.quota ?? DEFAULT_QUOTA,
|
|
85
|
+
})
|
|
86
|
+
}
|
|
87
|
+
if (providers.local?.baseUrl) {
|
|
88
|
+
registry.register(localAdapter, {
|
|
89
|
+
provider: 'local',
|
|
90
|
+
apiKey: providers.local.apiKey,
|
|
91
|
+
baseUrl: providers.local.baseUrl,
|
|
92
|
+
model: providers.local.model,
|
|
93
|
+
quotaPolicy: providers.local.quota ?? DEFAULT_QUOTA,
|
|
94
|
+
})
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const port = opts.config.port ?? 4100
|
|
98
|
+
const serverUrl = `http://localhost:${port}`
|
|
99
|
+
|
|
100
|
+
const jobRunner = new JobRunner(opts.db, registry)
|
|
101
|
+
const notifier = new Notifier(opts.db, serverUrl)
|
|
102
|
+
jobRunner.onRunCompleted = (runId, projectId) => notifier.onRunCompleted(runId, projectId)
|
|
103
|
+
|
|
104
|
+
const scheduler = new Scheduler(opts.db, {
|
|
105
|
+
onRunCreated: (runId, projectId, providers) => {
|
|
106
|
+
jobRunner.executeRun(runId, projectId, providers).catch((err: unknown) => {
|
|
107
|
+
app.log.error({ runId, err }, 'Scheduled job runner failed')
|
|
108
|
+
})
|
|
109
|
+
},
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
// Build provider summary for API routes
|
|
113
|
+
const providerSummary = (['gemini', 'openai', 'claude', 'local'] as const).map(name => ({
|
|
114
|
+
name,
|
|
115
|
+
model: registry.get(name)?.config.model,
|
|
116
|
+
configured: !!registry.get(name),
|
|
117
|
+
quota: registry.get(name)?.config.quotaPolicy,
|
|
118
|
+
}))
|
|
119
|
+
|
|
120
|
+
const adapterMap = { gemini: geminiAdapter, openai: openaiAdapter, claude: claudeAdapter, local: localAdapter } as const
|
|
121
|
+
|
|
122
|
+
// Register API routes
|
|
123
|
+
await app.register(apiRoutes, {
|
|
124
|
+
db: opts.db,
|
|
125
|
+
skipAuth: false,
|
|
126
|
+
providerSummary,
|
|
127
|
+
onRunCreated: (runId: string, projectId: string, providers?: string[]) => {
|
|
128
|
+
// Fire and forget — run executes in background
|
|
129
|
+
jobRunner.executeRun(runId, projectId, providers as ProviderName[] | undefined).catch((err: unknown) => {
|
|
130
|
+
app.log.error({ runId, err }, 'Job runner failed')
|
|
131
|
+
})
|
|
132
|
+
},
|
|
133
|
+
onProviderUpdate: (providerName: string, apiKey: string, model?: string, baseUrl?: string) => {
|
|
134
|
+
const name = providerName as keyof typeof adapterMap
|
|
135
|
+
if (!(name in adapterMap)) return null
|
|
136
|
+
|
|
137
|
+
// Update config and persist
|
|
138
|
+
if (!opts.config.providers) opts.config.providers = {}
|
|
139
|
+
const existing = opts.config.providers[name]
|
|
140
|
+
opts.config.providers[name] = {
|
|
141
|
+
apiKey: apiKey || existing?.apiKey,
|
|
142
|
+
baseUrl: baseUrl || existing?.baseUrl,
|
|
143
|
+
model: model || existing?.model,
|
|
144
|
+
quota: existing?.quota,
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
try {
|
|
148
|
+
saveConfig(opts.config)
|
|
149
|
+
} catch (err) {
|
|
150
|
+
app.log.error({ err }, 'Failed to save config')
|
|
151
|
+
return null
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Re-register in the live registry (use preserved model if none was passed)
|
|
155
|
+
const quota = opts.config.providers[name]!.quota ?? DEFAULT_QUOTA
|
|
156
|
+
registry.register(adapterMap[name], {
|
|
157
|
+
provider: name,
|
|
158
|
+
apiKey: apiKey || existing?.apiKey,
|
|
159
|
+
baseUrl: baseUrl || existing?.baseUrl,
|
|
160
|
+
model: model || existing?.model,
|
|
161
|
+
quotaPolicy: quota,
|
|
162
|
+
})
|
|
163
|
+
|
|
164
|
+
// Update the providerSummary array in-place
|
|
165
|
+
const entry = providerSummary.find(p => p.name === name)
|
|
166
|
+
if (entry) {
|
|
167
|
+
entry.configured = true
|
|
168
|
+
entry.model = model || registry.get(name)?.config.model
|
|
169
|
+
entry.quota = quota
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
name,
|
|
174
|
+
model: entry?.model,
|
|
175
|
+
configured: true,
|
|
176
|
+
quota,
|
|
177
|
+
}
|
|
178
|
+
},
|
|
179
|
+
onScheduleUpdated: (action: 'upsert' | 'delete', projectId: string) => {
|
|
180
|
+
if (action === 'upsert') scheduler.upsert(projectId)
|
|
181
|
+
if (action === 'delete') scheduler.remove(projectId)
|
|
182
|
+
},
|
|
183
|
+
onProjectDeleted: (projectId: string) => {
|
|
184
|
+
scheduler.remove(projectId)
|
|
185
|
+
},
|
|
186
|
+
})
|
|
187
|
+
|
|
188
|
+
// Try to serve static SPA assets
|
|
189
|
+
const dirname = path.dirname(fileURLToPath(import.meta.url))
|
|
190
|
+
const assetsDir = path.join(dirname, '..', 'assets')
|
|
191
|
+
if (fs.existsSync(assetsDir)) {
|
|
192
|
+
const indexPath = path.join(assetsDir, 'index.html')
|
|
193
|
+
|
|
194
|
+
const injectConfig = (html: string): string => {
|
|
195
|
+
const configScript = `<script>window.__CANONRY_CONFIG__=${JSON.stringify({ apiKey: opts.config.apiKey })}</script>`
|
|
196
|
+
return html.replace('</head>', `${configScript}</head>`)
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const fastifyStatic = await import('@fastify/static')
|
|
200
|
+
await app.register(fastifyStatic.default, {
|
|
201
|
+
root: assetsDir,
|
|
202
|
+
prefix: '/',
|
|
203
|
+
wildcard: false,
|
|
204
|
+
// Don't serve index.html automatically — we handle it with config injection
|
|
205
|
+
serve: true,
|
|
206
|
+
index: false,
|
|
207
|
+
})
|
|
208
|
+
|
|
209
|
+
// Serve index.html with injected API key for the root route
|
|
210
|
+
app.get('/', (_request, reply) => {
|
|
211
|
+
if (fs.existsSync(indexPath)) {
|
|
212
|
+
const html = fs.readFileSync(indexPath, 'utf-8')
|
|
213
|
+
return reply.type('text/html').send(injectConfig(html))
|
|
214
|
+
}
|
|
215
|
+
return reply.status(404).send({ error: 'Dashboard not built' })
|
|
216
|
+
})
|
|
217
|
+
|
|
218
|
+
// SPA fallback: serve index.html for unmatched non-API routes
|
|
219
|
+
app.setNotFoundHandler((request, reply) => {
|
|
220
|
+
// Never serve HTML for API routes — return proper JSON 404
|
|
221
|
+
if (request.url.startsWith('/api/')) {
|
|
222
|
+
return reply.status(404).send({ error: 'Not found', path: request.url })
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (fs.existsSync(indexPath)) {
|
|
226
|
+
const html = fs.readFileSync(indexPath, 'utf-8')
|
|
227
|
+
return reply.type('text/html').send(injectConfig(html))
|
|
228
|
+
}
|
|
229
|
+
return reply.status(404).send({ error: 'Not found' })
|
|
230
|
+
})
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// Health endpoint
|
|
234
|
+
app.get('/health', async () => ({
|
|
235
|
+
status: 'ok',
|
|
236
|
+
service: 'canonry',
|
|
237
|
+
version: '0.1.0',
|
|
238
|
+
}))
|
|
239
|
+
|
|
240
|
+
// Start scheduler after setup
|
|
241
|
+
scheduler.start()
|
|
242
|
+
|
|
243
|
+
// Graceful shutdown
|
|
244
|
+
app.addHook('onClose', async () => {
|
|
245
|
+
scheduler.stop()
|
|
246
|
+
})
|
|
247
|
+
|
|
248
|
+
return app
|
|
249
|
+
}
|