@gravito/zenith 0.1.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/ARCHITECTURE.md +88 -0
  2. package/BATCH_OPERATIONS_IMPLEMENTATION.md +159 -0
  3. package/DEMO.md +156 -0
  4. package/DEPLOYMENT.md +157 -0
  5. package/DOCS_INTERNAL.md +73 -0
  6. package/Dockerfile +46 -0
  7. package/Dockerfile.demo-worker +29 -0
  8. package/EVOLUTION_BLUEPRINT.md +112 -0
  9. package/JOBINSPECTOR_SCROLL_FIX.md +152 -0
  10. package/PULSE_IMPLEMENTATION_PLAN.md +111 -0
  11. package/QUICK_TEST_GUIDE.md +72 -0
  12. package/README.md +33 -0
  13. package/ROADMAP.md +85 -0
  14. package/TESTING_BATCH_OPERATIONS.md +252 -0
  15. package/bin/flux-console.ts +2 -0
  16. package/dist/bin.js +108196 -0
  17. package/dist/client/assets/index-DGYEwTDL.css +1 -0
  18. package/dist/client/assets/index-oyTdySX0.js +421 -0
  19. package/dist/client/index.html +13 -0
  20. package/dist/server/index.js +108191 -0
  21. package/docker-compose.yml +40 -0
  22. package/docs/integrations/LARAVEL.md +207 -0
  23. package/package.json +50 -0
  24. package/postcss.config.js +6 -0
  25. package/scripts/flood-logs.ts +21 -0
  26. package/scripts/seed.ts +213 -0
  27. package/scripts/verify-throttle.ts +45 -0
  28. package/scripts/worker.ts +123 -0
  29. package/src/bin.ts +6 -0
  30. package/src/client/App.tsx +70 -0
  31. package/src/client/Layout.tsx +644 -0
  32. package/src/client/Sidebar.tsx +102 -0
  33. package/src/client/ThroughputChart.tsx +135 -0
  34. package/src/client/WorkerStatus.tsx +170 -0
  35. package/src/client/components/ConfirmDialog.tsx +103 -0
  36. package/src/client/components/JobInspector.tsx +524 -0
  37. package/src/client/components/LogArchiveModal.tsx +383 -0
  38. package/src/client/components/NotificationBell.tsx +203 -0
  39. package/src/client/components/Toaster.tsx +80 -0
  40. package/src/client/components/UserProfileDropdown.tsx +177 -0
  41. package/src/client/contexts/AuthContext.tsx +93 -0
  42. package/src/client/contexts/NotificationContext.tsx +103 -0
  43. package/src/client/index.css +174 -0
  44. package/src/client/index.html +12 -0
  45. package/src/client/main.tsx +15 -0
  46. package/src/client/pages/LoginPage.tsx +153 -0
  47. package/src/client/pages/MetricsPage.tsx +408 -0
  48. package/src/client/pages/OverviewPage.tsx +511 -0
  49. package/src/client/pages/QueuesPage.tsx +372 -0
  50. package/src/client/pages/SchedulesPage.tsx +531 -0
  51. package/src/client/pages/SettingsPage.tsx +449 -0
  52. package/src/client/pages/WorkersPage.tsx +316 -0
  53. package/src/client/pages/index.ts +7 -0
  54. package/src/client/utils.ts +6 -0
  55. package/src/server/index.ts +556 -0
  56. package/src/server/middleware/auth.ts +127 -0
  57. package/src/server/services/AlertService.ts +160 -0
  58. package/src/server/services/QueueService.ts +828 -0
  59. package/tailwind.config.js +73 -0
  60. package/tests/placeholder.test.ts +7 -0
  61. package/tsconfig.json +38 -0
  62. package/tsconfig.node.json +12 -0
  63. package/vite.config.ts +27 -0
@@ -0,0 +1,556 @@
1
+ import { DB } from '@gravito/atlas'
2
+ import { Photon } from '@gravito/photon'
3
+ import { MySQLPersistence, SQLitePersistence } from '@gravito/stream'
4
+ import { serveStatic } from 'hono/bun'
5
+ import { getCookie } from 'hono/cookie'
6
+ import { streamSSE } from 'hono/streaming'
7
+ import {
8
+ authMiddleware,
9
+ createSession,
10
+ destroySession,
11
+ isAuthEnabled,
12
+ verifyPassword,
13
+ } from './middleware/auth'
14
+ import { QueueService } from './services/QueueService'
15
+
16
+ const app = new Photon()
17
+
18
+ // Configuration
19
+ const PORT = parseInt(process.env.PORT || '3000', 10)
20
+ const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379'
21
+ const QUEUE_PREFIX = process.env.QUEUE_PREFIX || 'queue:'
22
+
23
+ // Persistence Initialize
24
+ let persistence:
25
+ | { adapter: any; archiveCompleted: boolean; archiveFailed: boolean; archiveEnqueued: boolean }
26
+ | undefined
27
+
28
+ const dbDriver = process.env.DB_DRIVER || (process.env.DB_HOST ? 'mysql' : 'sqlite')
29
+
30
+ if (dbDriver === 'sqlite' || process.env.DB_HOST) {
31
+ if (dbDriver === 'sqlite') {
32
+ DB.addConnection('default', {
33
+ driver: 'sqlite',
34
+ database: process.env.DB_NAME || 'flux.sqlite',
35
+ })
36
+ } else {
37
+ DB.addConnection('default', {
38
+ driver: dbDriver as any,
39
+ host: process.env.DB_HOST,
40
+ port: parseInt(process.env.DB_PORT || '3306', 10),
41
+ database: process.env.DB_NAME || 'flux',
42
+ username: process.env.DB_USER || 'root',
43
+ password: process.env.DB_PASSWORD || '',
44
+ })
45
+ }
46
+
47
+ const adapter = dbDriver === 'sqlite' ? new SQLitePersistence(DB) : new MySQLPersistence(DB)
48
+ adapter.setupTable().catch((err) => console.error('[FluxConsole] SQL Archive Setup Error:', err))
49
+
50
+ persistence = {
51
+ adapter,
52
+ archiveCompleted: process.env.PERSIST_ARCHIVE_COMPLETED === 'true',
53
+ archiveFailed: process.env.PERSIST_ARCHIVE_FAILED !== 'false',
54
+ archiveEnqueued: process.env.PERSIST_ARCHIVE_ENQUEUED === 'true',
55
+ }
56
+ console.log(`[FluxConsole] SQL Archive enabled via ${dbDriver}`)
57
+ }
58
+
59
+ // Service Initialization
60
+ const queueService = new QueueService(REDIS_URL, QUEUE_PREFIX, persistence)
61
+
62
+ queueService
63
+ .connect()
64
+ .then(() => {
65
+ console.log(`[FluxConsole] Connected to Redis at ${REDIS_URL}`)
66
+ // Start background metrics recording (Reduced from 5s to 2s for better real-time feel)
67
+ setInterval(() => {
68
+ queueService.recordStatusMetrics().catch(console.error)
69
+ }, 2000)
70
+
71
+ // Start Scheduler Tick (Reduced from 10s to 5s)
72
+ setInterval(() => {
73
+ queueService.tickScheduler().catch(console.error)
74
+ }, 5000)
75
+
76
+ // Record initial snapshot
77
+ queueService.recordStatusMetrics().catch(console.error)
78
+ })
79
+ .catch((err) => {
80
+ console.error('[FluxConsole] Failed to connect to Redis', err)
81
+ })
82
+
83
+ const api = new Photon()
84
+
85
+ api.get('/health', (c) => c.json({ status: 'ok', time: new Date().toISOString() }))
86
+
87
+ // Auth endpoints (no middleware protection)
88
+ api.get('/auth/status', (c) => {
89
+ const token = getCookie(c, 'flux_session')
90
+ const isAuthenticated =
91
+ !isAuthEnabled() || (token && require('./middleware/auth').validateSession(token))
92
+ return c.json({
93
+ enabled: isAuthEnabled(),
94
+ authenticated: !!isAuthenticated,
95
+ })
96
+ })
97
+
98
+ api.post('/auth/login', async (c) => {
99
+ try {
100
+ const { password } = await c.req.json()
101
+
102
+ if (!verifyPassword(password)) {
103
+ return c.json({ success: false, error: 'Invalid password' }, 401)
104
+ }
105
+
106
+ createSession(c)
107
+ return c.json({ success: true })
108
+ } catch (_err) {
109
+ return c.json({ success: false, error: 'Login failed' }, 500)
110
+ }
111
+ })
112
+
113
+ api.post('/auth/logout', (c) => {
114
+ destroySession(c)
115
+ return c.json({ success: true })
116
+ })
117
+
118
+ // Apply auth middleware to all other API routes
119
+ api.use('/*', authMiddleware)
120
+
121
+ api.get('/queues', async (c) => {
122
+ try {
123
+ const queues = await queueService.listQueues()
124
+ return c.json({ queues })
125
+ } catch (err) {
126
+ console.error(err)
127
+ return c.json({ error: 'Failed to list queues' }, 500)
128
+ }
129
+ })
130
+
131
+ api.get('/search', async (c) => {
132
+ const query = c.req.query('q') || ''
133
+ const type = (c.req.query('type') as 'all' | 'waiting' | 'delayed' | 'failed') || 'all'
134
+ const limit = parseInt(c.req.query('limit') || '20', 10)
135
+
136
+ if (!query || query.length < 2) {
137
+ return c.json({ results: [], message: 'Query must be at least 2 characters' })
138
+ }
139
+
140
+ try {
141
+ const results = await queueService.searchJobs(query, { type, limit })
142
+ return c.json({ results, query, count: results.length })
143
+ } catch (err) {
144
+ console.error(err)
145
+ return c.json({ error: 'Search failed' }, 500)
146
+ }
147
+ })
148
+
149
+ api.get('/archive/search', async (c) => {
150
+ const query = c.req.query('q') || ''
151
+ const queue = c.req.query('queue')
152
+ const page = parseInt(c.req.query('page') || '1', 10)
153
+ const limit = parseInt(c.req.query('limit') || '50', 10)
154
+
155
+ if (!query) {
156
+ return c.json({ results: [] })
157
+ }
158
+
159
+ try {
160
+ const { jobs, total } = await queueService.searchArchive(query, { queue, page, limit })
161
+ return c.json({ results: jobs, query, count: total })
162
+ } catch (err) {
163
+ console.error(err)
164
+ return c.json({ error: 'Archive search failed' }, 500)
165
+ }
166
+ })
167
+
168
+ api.get('/logs/archive', async (c) => {
169
+ const level = c.req.query('level')
170
+ const workerId = c.req.query('workerId')
171
+ const queue = c.req.query('queue')
172
+ const search = c.req.query('search')
173
+
174
+ const startTime = c.req.query('startTime') ? new Date(c.req.query('startTime')!) : undefined
175
+ const endTime = c.req.query('endTime') ? new Date(c.req.query('endTime')!) : undefined
176
+ const page = parseInt(c.req.query('page') || '1', 10)
177
+ const limit = parseInt(c.req.query('limit') || '50', 10)
178
+
179
+ try {
180
+ const results = await queueService.getArchivedLogs({
181
+ level,
182
+ workerId,
183
+ queue,
184
+ search,
185
+ startTime,
186
+ endTime,
187
+ page,
188
+ limit,
189
+ })
190
+ return c.json(results)
191
+ } catch (err) {
192
+ console.error(err)
193
+ return c.json({ error: 'Failed to fetch archived logs' }, 500)
194
+ }
195
+ })
196
+
197
+ api.post('/queues/:name/retry-all', async (c) => {
198
+ const name = c.req.param('name')
199
+ try {
200
+ const count = await queueService.retryDelayedJob(name)
201
+ return c.json({ success: true, count })
202
+ } catch (_err) {
203
+ return c.json({ error: 'Failed to retry jobs' }, 500)
204
+ }
205
+ })
206
+
207
+ api.post('/queues/:name/retry-all-failed', async (c) => {
208
+ const name = c.req.param('name')
209
+ try {
210
+ const count = await queueService.retryAllFailedJobs(name)
211
+ return c.json({ success: true, count })
212
+ } catch (_err) {
213
+ return c.json({ error: 'Failed to retry failed jobs' }, 500)
214
+ }
215
+ })
216
+
217
+ api.post('/queues/:name/clear-failed', async (c) => {
218
+ const name = c.req.param('name')
219
+ try {
220
+ await queueService.clearFailedJobs(name)
221
+ return c.json({ success: true })
222
+ } catch (_err) {
223
+ return c.json({ error: 'Failed to clear failed jobs' }, 500)
224
+ }
225
+ })
226
+
227
+ api.post('/queues/:name/pause', async (c) => {
228
+ const name = c.req.param('name')
229
+ try {
230
+ await queueService.pauseQueue(name)
231
+ return c.json({ success: true, paused: true })
232
+ } catch (_err) {
233
+ return c.json({ error: 'Failed to pause queue' }, 500)
234
+ }
235
+ })
236
+
237
+ api.post('/queues/:name/resume', async (c) => {
238
+ const name = c.req.param('name')
239
+ try {
240
+ await queueService.resumeQueue(name)
241
+ return c.json({ success: true, paused: false })
242
+ } catch (_err) {
243
+ return c.json({ error: 'Failed to resume queue' }, 500)
244
+ }
245
+ })
246
+
247
+ api.get('/queues/:name/jobs', async (c) => {
248
+ const name = c.req.param('name')
249
+ const type = (c.req.query('type') as 'waiting' | 'delayed' | 'failed') || 'waiting'
250
+ try {
251
+ const jobs = await queueService.getJobs(name, type)
252
+ return c.json({ jobs })
253
+ } catch (err) {
254
+ console.error(err)
255
+ return c.json({ error: 'Failed to fetch jobs' }, 500)
256
+ }
257
+ })
258
+
259
+ api.get('/queues/:name/jobs/count', async (c) => {
260
+ const name = c.req.param('name')
261
+ const type = (c.req.query('type') as 'waiting' | 'delayed' | 'failed') || 'waiting'
262
+ try {
263
+ const count = await queueService.getJobCount(name, type)
264
+ return c.json({ count })
265
+ } catch (err) {
266
+ console.error(err)
267
+ return c.json({ error: 'Failed to count jobs' }, 500)
268
+ }
269
+ })
270
+
271
+ api.get('/queues/:name/archive', async (c) => {
272
+ const name = c.req.param('name')
273
+ const page = parseInt(c.req.query('page') || '1', 10)
274
+ const limit = parseInt(c.req.query('limit') || '50', 10)
275
+
276
+ const status = c.req.query('status') as 'completed' | 'failed' | undefined
277
+ const jobId = c.req.query('jobId')
278
+ const startTime = c.req.query('startTime') ? new Date(c.req.query('startTime')!) : undefined
279
+ const endTime = c.req.query('endTime') ? new Date(c.req.query('endTime')!) : undefined
280
+
281
+ try {
282
+ const { jobs, total } = await queueService.getArchiveJobs(name, page, limit, status, {
283
+ jobId,
284
+ startTime,
285
+ endTime,
286
+ })
287
+ return c.json({ jobs, total })
288
+ } catch (err) {
289
+ console.error(err)
290
+ return c.json({ error: 'Failed to fetch archived jobs' }, 500)
291
+ }
292
+ })
293
+
294
+ api.get('/throughput', async (c) => {
295
+ try {
296
+ const data = await queueService.getThroughputData()
297
+ return c.json({ data })
298
+ } catch (_err) {
299
+ return c.json({ error: 'Failed to fetch throughput' }, 500)
300
+ }
301
+ })
302
+
303
+ api.get('/workers', async (c) => {
304
+ try {
305
+ const workers = await queueService.listWorkers()
306
+ return c.json({ workers })
307
+ } catch (_err) {
308
+ return c.json({ error: 'Failed to fetch workers' }, 500)
309
+ }
310
+ })
311
+
312
+ api.get('/metrics/history', async (c) => {
313
+ try {
314
+ const metrics = ['waiting', 'delayed', 'failed', 'workers']
315
+ const history: Record<string, number[]> = {}
316
+
317
+ await Promise.all(
318
+ metrics.map(async (m) => {
319
+ history[m] = await queueService.getMetricHistory(m)
320
+ })
321
+ )
322
+
323
+ return c.json({ history })
324
+ } catch (_err) {
325
+ return c.json({ error: 'Failed to fetch metrics history' }, 500)
326
+ }
327
+ })
328
+
329
+ api.get('/system/status', (c) => {
330
+ const mem = process.memoryUsage()
331
+ return c.json({
332
+ node: process.version,
333
+ memory: {
334
+ rss: `${(mem.rss / 1024 / 1024).toFixed(2)} MB`,
335
+ heapUsed: `${(mem.heapUsed / 1024 / 1024).toFixed(2)} MB`,
336
+ total: '4.00 GB', // Hardcoded limit for demo aesthetic
337
+ },
338
+ engine: 'v0.1.0-beta.1',
339
+ uptime: process.uptime(),
340
+ env: process.env.NODE_ENV || 'production-east-1',
341
+ })
342
+ })
343
+
344
+ api.post('/queues/:name/jobs/delete', async (c) => {
345
+ const queueName = c.req.param('name')
346
+ const { type, raw } = await c.req.json()
347
+ try {
348
+ const success = await queueService.deleteJob(queueName, type, raw)
349
+ return c.json({ success })
350
+ } catch (_err) {
351
+ return c.json({ error: 'Failed to delete job' }, 500)
352
+ }
353
+ })
354
+
355
+ api.post('/queues/:name/jobs/retry', async (c) => {
356
+ const queueName = c.req.param('name')
357
+ const { raw } = await c.req.json()
358
+ try {
359
+ const success = await queueService.retryJob(queueName, raw)
360
+ return c.json({ success })
361
+ } catch (_err) {
362
+ return c.json({ error: 'Failed to retry job' }, 500)
363
+ }
364
+ })
365
+
366
+ api.post('/queues/:name/jobs/bulk-delete', async (c) => {
367
+ const queueName = c.req.param('name')
368
+ const { type, raws } = await c.req.json()
369
+ try {
370
+ const deleted = await queueService.deleteJobs(queueName, type, raws)
371
+ return c.json({ success: true, count: deleted })
372
+ } catch (_err) {
373
+ return c.json({ error: 'Failed to bulk delete' }, 500)
374
+ }
375
+ })
376
+
377
+ api.post('/queues/:name/jobs/bulk-retry', async (c) => {
378
+ const queueName = c.req.param('name')
379
+ const { type, raws } = await c.req.json()
380
+ try {
381
+ const retried = await queueService.retryJobs(queueName, type, raws)
382
+ return c.json({ success: true, count: retried })
383
+ } catch (_err) {
384
+ return c.json({ error: 'Failed to bulk retry' }, 500)
385
+ }
386
+ })
387
+
388
+ api.post('/queues/:name/jobs/bulk-delete-all', async (c) => {
389
+ const queueName = c.req.param('name')
390
+ const { type } = await c.req.json()
391
+ try {
392
+ const deleted = await queueService.deleteAllJobs(queueName, type)
393
+ return c.json({ success: true, count: deleted })
394
+ } catch (_err) {
395
+ return c.json({ error: 'Failed to bulk delete all' }, 500)
396
+ }
397
+ })
398
+
399
+ api.post('/queues/:name/jobs/bulk-retry-all', async (c) => {
400
+ const queueName = c.req.param('name')
401
+ const { type } = await c.req.json()
402
+ try {
403
+ const retried = await queueService.retryAllJobs(queueName, type)
404
+ return c.json({ success: true, count: retried })
405
+ } catch (_err) {
406
+ return c.json({ error: 'Failed to bulk retry all' }, 500)
407
+ }
408
+ })
409
+
410
+ api.post('/maintenance/cleanup-archive', async (c) => {
411
+ const { days = 30 } = await c.req.json()
412
+ try {
413
+ const deleted = await queueService.cleanupArchive(days)
414
+ return c.json({ success: true, deleted })
415
+ } catch (_err) {
416
+ return c.json({ error: 'Failed to cleanup archive' }, 500)
417
+ }
418
+ })
419
+
420
+ api.post('/queues/:name/purge', async (c) => {
421
+ const name = c.req.param('name')
422
+ try {
423
+ await queueService.purgeQueue(name)
424
+ return c.json({ success: true })
425
+ } catch (_err) {
426
+ return c.json({ error: 'Failed to purge queue' }, 500)
427
+ }
428
+ })
429
+
430
+ api.get('/logs/stream', async (c) => {
431
+ return streamSSE(c, async (stream) => {
432
+ // 1. Send history first
433
+ const history = await queueService.getLogHistory()
434
+ for (const log of history) {
435
+ await stream.writeSSE({
436
+ data: JSON.stringify(log),
437
+ event: 'log',
438
+ })
439
+ }
440
+
441
+ // 2. Subscribe to new logs
442
+ const unsubscribeLogs = queueService.onLog(async (msg) => {
443
+ await stream.writeSSE({
444
+ data: JSON.stringify(msg),
445
+ event: 'log',
446
+ })
447
+ })
448
+
449
+ // 3. Subscribe to real-time stats
450
+ const unsubscribeStats = queueService.onStats(async (stats) => {
451
+ await stream.writeSSE({
452
+ data: JSON.stringify(stats),
453
+ event: 'stats',
454
+ })
455
+ })
456
+
457
+ stream.onAbort(() => {
458
+ unsubscribeLogs()
459
+ unsubscribeStats()
460
+ })
461
+
462
+ // Keep alive
463
+ while (true) {
464
+ await stream.sleep(5000)
465
+ await stream.writeSSE({ data: 'heartbeat', event: 'ping' })
466
+ }
467
+ })
468
+ })
469
+
470
+ // --- Schedules ---
471
+ api.get('/schedules', async (c) => {
472
+ try {
473
+ const schedules = await queueService.listSchedules()
474
+ return c.json({ schedules })
475
+ } catch (_err) {
476
+ return c.json({ error: 'Failed to list schedules' }, 500)
477
+ }
478
+ })
479
+
480
+ api.post('/schedules', async (c) => {
481
+ const body = await c.req.json()
482
+ try {
483
+ await queueService.registerSchedule(body)
484
+ return c.json({ success: true })
485
+ } catch (_err) {
486
+ return c.json({ error: 'Failed to register schedule' }, 500)
487
+ }
488
+ })
489
+
490
+ api.post('/schedules/run/:id', async (c) => {
491
+ const id = c.req.param('id')
492
+ try {
493
+ await queueService.runScheduleNow(id)
494
+ return c.json({ success: true })
495
+ } catch (_err) {
496
+ return c.json({ error: 'Failed to run schedule' }, 500)
497
+ }
498
+ })
499
+
500
+ api.delete('/schedules/:id', async (c) => {
501
+ const id = c.req.param('id')
502
+ try {
503
+ await queueService.removeSchedule(id)
504
+ return c.json({ success: true })
505
+ } catch (_err) {
506
+ return c.json({ error: 'Failed to remove schedule' }, 500)
507
+ }
508
+ })
509
+
510
+ // --- Alerting ---
511
+ api.get('/alerts/config', (c) => {
512
+ return c.json({
513
+ rules: queueService.alerts.getRules(),
514
+ webhookEnabled: !!process.env.SLACK_WEBHOOK_URL,
515
+ })
516
+ })
517
+
518
+ api.post('/alerts/test', async (c) => {
519
+ try {
520
+ queueService.alerts.check({
521
+ queues: [],
522
+ workers: [
523
+ {
524
+ id: 'test-node',
525
+ hostname: 'localhost',
526
+ pid: 0,
527
+ uptime: 0,
528
+ memory: { rss: '0', heapTotal: '0', heapUsed: '0' },
529
+ queues: [],
530
+ },
531
+ ] as any,
532
+ totals: { waiting: 9999, delayed: 0, failed: 9999 },
533
+ })
534
+ return c.json({ success: true, message: 'Test alert dispatched' })
535
+ } catch (_err) {
536
+ return c.json({ error: 'Test failed' }, 500)
537
+ }
538
+ })
539
+
540
+ app.route('/api', api)
541
+
542
+ app.use(
543
+ '/*',
544
+ serveStatic({
545
+ root: './dist/client',
546
+ })
547
+ )
548
+
549
+ app.get('*', serveStatic({ path: './dist/client/index.html' }))
550
+
551
+ console.log(`[FluxConsole] Server starting on http://localhost:${PORT}`)
552
+
553
+ export default {
554
+ port: PORT,
555
+ fetch: app.fetch,
556
+ }
@@ -0,0 +1,127 @@
1
+ import type { Context, Next } from 'hono'
2
+ import { deleteCookie, getCookie, setCookie } from 'hono/cookie'
3
+
4
+ // Session token store (in-memory for simplicity, consider Redis for production)
5
+ const sessions = new Map<string, { createdAt: number; expiresAt: number }>()
6
+
7
+ // Configuration
8
+ const SESSION_DURATION_MS = 24 * 60 * 60 * 1000 // 24 hours
9
+ const SESSION_COOKIE_NAME = 'flux_session'
10
+
11
+ /**
12
+ * Generate a secure random session token
13
+ */
14
+ function generateSessionToken(): string {
15
+ const array = new Uint8Array(32)
16
+ crypto.getRandomValues(array)
17
+ return Array.from(array, (byte) => byte.toString(16).padStart(2, '0')).join('')
18
+ }
19
+
20
+ /**
21
+ * Check if authentication is enabled
22
+ */
23
+ export function isAuthEnabled(): boolean {
24
+ return !!process.env.AUTH_PASSWORD
25
+ }
26
+
27
+ /**
28
+ * Verify the provided password against the environment variable
29
+ */
30
+ export function verifyPassword(password: string): boolean {
31
+ const authPassword = process.env.AUTH_PASSWORD
32
+ if (!authPassword) {
33
+ return true // No password set, allow access
34
+ }
35
+ return password === authPassword
36
+ }
37
+
38
+ /**
39
+ * Create a new session and set the cookie
40
+ */
41
+ export function createSession(c: Context): string {
42
+ const token = generateSessionToken()
43
+ const now = Date.now()
44
+
45
+ sessions.set(token, {
46
+ createdAt: now,
47
+ expiresAt: now + SESSION_DURATION_MS,
48
+ })
49
+
50
+ setCookie(c, SESSION_COOKIE_NAME, token, {
51
+ httpOnly: true,
52
+ secure: process.env.NODE_ENV === 'production',
53
+ sameSite: 'Lax',
54
+ maxAge: SESSION_DURATION_MS / 1000,
55
+ path: '/',
56
+ })
57
+
58
+ return token
59
+ }
60
+
61
+ /**
62
+ * Validate a session token
63
+ */
64
+ export function validateSession(token: string): boolean {
65
+ const session = sessions.get(token)
66
+ if (!session) {
67
+ return false
68
+ }
69
+
70
+ if (Date.now() > session.expiresAt) {
71
+ sessions.delete(token)
72
+ return false
73
+ }
74
+
75
+ return true
76
+ }
77
+
78
+ /**
79
+ * Destroy a session
80
+ */
81
+ export function destroySession(c: Context): void {
82
+ const token = getCookie(c, SESSION_COOKIE_NAME)
83
+ if (token) {
84
+ sessions.delete(token)
85
+ }
86
+ deleteCookie(c, SESSION_COOKIE_NAME, { path: '/' })
87
+ }
88
+
89
+ /**
90
+ * Authentication middleware for API routes
91
+ */
92
+ export async function authMiddleware(c: Context, next: Next) {
93
+ // If no password is set, allow all requests
94
+ if (!isAuthEnabled()) {
95
+ return next()
96
+ }
97
+
98
+ // Allow auth endpoints without authentication
99
+ const path = c.req.path
100
+ if (path === '/api/auth/login' || path === '/api/auth/status' || path === '/api/auth/logout') {
101
+ return next()
102
+ }
103
+
104
+ // Check for valid session
105
+ const token = getCookie(c, SESSION_COOKIE_NAME)
106
+ if (token && validateSession(token)) {
107
+ return next()
108
+ }
109
+
110
+ // Unauthorized
111
+ return c.json({ error: 'Unauthorized', message: 'Please login to access this resource' }, 401)
112
+ }
113
+
114
+ /**
115
+ * Clean up expired sessions periodically
116
+ */
117
+ export function cleanupExpiredSessions(): void {
118
+ const now = Date.now()
119
+ for (const [token, session] of sessions) {
120
+ if (now > session.expiresAt) {
121
+ sessions.delete(token)
122
+ }
123
+ }
124
+ }
125
+
126
+ // Run cleanup every 10 minutes
127
+ setInterval(cleanupExpiredSessions, 10 * 60 * 1000)