@gravito/zenith 1.1.3 → 1.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -10
- package/dist/bin.js +43235 -76691
- package/dist/client/index.html +13 -0
- package/dist/server/index.js +43235 -76691
- package/package.json +16 -7
- package/CHANGELOG.md +0 -62
- package/Dockerfile +0 -46
- package/Dockerfile.demo-worker +0 -29
- package/bin/flux-console.ts +0 -2
- package/doc/ECOSYSTEM_EXPANSION_RFC.md +0 -130
- package/docker-compose.yml +0 -40
- package/docs/ALERTING_GUIDE.md +0 -71
- package/docs/DEPLOYMENT.md +0 -157
- package/docs/DOCS_INTERNAL.md +0 -73
- package/docs/LARAVEL_ZENITH_ROADMAP.md +0 -109
- package/docs/QUASAR_MASTER_PLAN.md +0 -140
- package/docs/QUICK_TEST_GUIDE.md +0 -72
- package/docs/ROADMAP.md +0 -85
- package/docs/integrations/LARAVEL.md +0 -207
- package/postcss.config.js +0 -6
- package/scripts/debug_redis_keys.ts +0 -24
- package/scripts/flood-logs.ts +0 -21
- package/scripts/seed.ts +0 -213
- package/scripts/verify-throttle.ts +0 -49
- package/scripts/worker.ts +0 -124
- package/specs/PULSE_SPEC.md +0 -86
- package/src/bin.ts +0 -6
- package/src/client/App.tsx +0 -72
- package/src/client/Layout.tsx +0 -669
- package/src/client/Sidebar.tsx +0 -112
- package/src/client/ThroughputChart.tsx +0 -158
- package/src/client/WorkerStatus.tsx +0 -202
- package/src/client/components/BrandIcons.tsx +0 -168
- package/src/client/components/ConfirmDialog.tsx +0 -134
- package/src/client/components/JobInspector.tsx +0 -487
- package/src/client/components/LogArchiveModal.tsx +0 -432
- package/src/client/components/NotificationBell.tsx +0 -212
- package/src/client/components/PageHeader.tsx +0 -47
- package/src/client/components/Toaster.tsx +0 -90
- package/src/client/components/UserProfileDropdown.tsx +0 -186
- package/src/client/contexts/AuthContext.tsx +0 -105
- package/src/client/contexts/NotificationContext.tsx +0 -128
- package/src/client/index.css +0 -172
- package/src/client/main.tsx +0 -15
- package/src/client/pages/LoginPage.tsx +0 -164
- package/src/client/pages/MetricsPage.tsx +0 -445
- package/src/client/pages/OverviewPage.tsx +0 -519
- package/src/client/pages/PulsePage.tsx +0 -409
- package/src/client/pages/QueuesPage.tsx +0 -378
- package/src/client/pages/SchedulesPage.tsx +0 -535
- package/src/client/pages/SettingsPage.tsx +0 -1001
- package/src/client/pages/WorkersPage.tsx +0 -380
- package/src/client/pages/index.ts +0 -8
- package/src/client/utils.ts +0 -15
- package/src/server/config/ServerConfigManager.ts +0 -90
- package/src/server/index.ts +0 -860
- package/src/server/middleware/auth.ts +0 -127
- package/src/server/services/AlertService.ts +0 -321
- package/src/server/services/CommandService.ts +0 -136
- package/src/server/services/LogStreamProcessor.ts +0 -93
- package/src/server/services/MaintenanceScheduler.ts +0 -78
- package/src/server/services/PulseService.ts +0 -148
- package/src/server/services/QueueMetricsCollector.ts +0 -138
- package/src/server/services/QueueService.ts +0 -924
- package/src/shared/types.ts +0 -223
- package/tailwind.config.js +0 -80
- package/tests/placeholder.test.ts +0 -7
- package/tsconfig.json +0 -29
- package/tsconfig.node.json +0 -10
- package/vite.config.ts +0 -27
|
@@ -1,924 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from 'node:events'
|
|
2
|
-
import { type MySQLPersistence, QueueManager } from '@gravito/stream'
|
|
3
|
-
import { Redis } from 'ioredis'
|
|
4
|
-
import { AlertService } from './AlertService'
|
|
5
|
-
import { LogStreamProcessor } from './LogStreamProcessor'
|
|
6
|
-
import { MaintenanceScheduler } from './MaintenanceScheduler'
|
|
7
|
-
import { QueueMetricsCollector } from './QueueMetricsCollector'
|
|
8
|
-
|
|
9
|
-
/**
|
|
10
|
-
* Snapshot of queue statistics.
|
|
11
|
-
*
|
|
12
|
-
* @public
|
|
13
|
-
* @since 3.0.0
|
|
14
|
-
*/
|
|
15
|
-
export interface QueueStats {
|
|
16
|
-
/** Name of the queue. */
|
|
17
|
-
name: string
|
|
18
|
-
/** Number of jobs waiting in the queue. */
|
|
19
|
-
waiting: number
|
|
20
|
-
/** Number of jobs delayed. */
|
|
21
|
-
delayed: number
|
|
22
|
-
/** Number of jobs that failed. */
|
|
23
|
-
failed: number
|
|
24
|
-
/** Number of jobs currently being processed. */
|
|
25
|
-
active: number
|
|
26
|
-
/** Whether the queue is currently paused. */
|
|
27
|
-
paused: boolean
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
/**
|
|
31
|
-
* Health report from a worker instance.
|
|
32
|
-
*
|
|
33
|
-
* @public
|
|
34
|
-
* @since 3.0.0
|
|
35
|
-
*/
|
|
36
|
-
export interface WorkerReport {
|
|
37
|
-
id: string
|
|
38
|
-
hostname: string
|
|
39
|
-
pid: number
|
|
40
|
-
uptime: number
|
|
41
|
-
memory: {
|
|
42
|
-
rss: string
|
|
43
|
-
heapTotal: string
|
|
44
|
-
heapUsed: string
|
|
45
|
-
}
|
|
46
|
-
queues: string[]
|
|
47
|
-
concurrency: number
|
|
48
|
-
timestamp: string
|
|
49
|
-
loadAvg: number[]
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
/**
|
|
53
|
-
* A standard system log message.
|
|
54
|
-
*
|
|
55
|
-
* @public
|
|
56
|
-
* @since 3.0.0
|
|
57
|
-
*/
|
|
58
|
-
export interface SystemLog {
|
|
59
|
-
level: 'info' | 'warn' | 'error' | 'success'
|
|
60
|
-
message: string
|
|
61
|
-
workerId: string
|
|
62
|
-
queue?: string
|
|
63
|
-
timestamp: string
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
/**
|
|
67
|
-
* Aggregated global statistics.
|
|
68
|
-
*
|
|
69
|
-
* @public
|
|
70
|
-
* @since 3.0.0
|
|
71
|
-
*/
|
|
72
|
-
export interface GlobalStats {
|
|
73
|
-
queues: QueueStats[]
|
|
74
|
-
throughput: { timestamp: string; count: number }[]
|
|
75
|
-
workers: WorkerReport[]
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
/**
|
|
79
|
-
* QueueService acts as the central orchestrator for all queue-related operations.
|
|
80
|
-
*
|
|
81
|
-
* It bridges the gap between the raw Redis data, the persistent SQL storage,
|
|
82
|
-
* and the real-time dashboard. It handles:
|
|
83
|
-
* - Direct queue manipulation (pause, resume, purge).
|
|
84
|
-
* - Job lifecycle management (retry, delete).
|
|
85
|
-
* - System-wide metric aggregation and alerting.
|
|
86
|
-
* - Log stream processing and archiving.
|
|
87
|
-
*
|
|
88
|
-
* This service is designed to be the single source of truth for the
|
|
89
|
-
* Zenith Console.
|
|
90
|
-
*
|
|
91
|
-
* @public
|
|
92
|
-
* @since 3.0.0
|
|
93
|
-
*/
|
|
94
|
-
export class QueueService {
|
|
95
|
-
private redis: Redis
|
|
96
|
-
private subRedis: Redis
|
|
97
|
-
private prefix: string
|
|
98
|
-
private logEmitter = new EventEmitter()
|
|
99
|
-
private manager: QueueManager
|
|
100
|
-
public alerts: AlertService
|
|
101
|
-
private logProcessor: LogStreamProcessor
|
|
102
|
-
private metricsCollector: QueueMetricsCollector
|
|
103
|
-
private maintenanceScheduler: MaintenanceScheduler
|
|
104
|
-
|
|
105
|
-
/**
|
|
106
|
-
* Initializes the QueueService.
|
|
107
|
-
*
|
|
108
|
-
* @param redisUrl - The Redis connection string (e.g., redis://localhost:6379).
|
|
109
|
-
* @param prefix - Key prefix for all Redis keys used by the queues.
|
|
110
|
-
* @param persistence - Optional configuration for MySQL persistence.
|
|
111
|
-
*/
|
|
112
|
-
constructor(
|
|
113
|
-
redisUrl: string,
|
|
114
|
-
prefix = 'queue:',
|
|
115
|
-
persistence?: {
|
|
116
|
-
adapter: MySQLPersistence
|
|
117
|
-
archiveCompleted?: boolean
|
|
118
|
-
archiveFailed?: boolean
|
|
119
|
-
archiveEnqueued?: boolean
|
|
120
|
-
}
|
|
121
|
-
) {
|
|
122
|
-
this.redis = new Redis(redisUrl, {
|
|
123
|
-
lazyConnect: true,
|
|
124
|
-
})
|
|
125
|
-
this.subRedis = new Redis(redisUrl, {
|
|
126
|
-
lazyConnect: true,
|
|
127
|
-
})
|
|
128
|
-
this.prefix = prefix
|
|
129
|
-
this.logEmitter.setMaxListeners(1000)
|
|
130
|
-
|
|
131
|
-
this.logProcessor = new LogStreamProcessor(this.redis, this.subRedis)
|
|
132
|
-
this.metricsCollector = new QueueMetricsCollector(this.redis, prefix)
|
|
133
|
-
this.maintenanceScheduler = new MaintenanceScheduler(this.redis, (days) =>
|
|
134
|
-
this.cleanupArchive(days)
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
this.manager = new QueueManager({
|
|
138
|
-
default: 'redis',
|
|
139
|
-
connections: {
|
|
140
|
-
redis: {
|
|
141
|
-
driver: 'redis',
|
|
142
|
-
client: this.redis as any,
|
|
143
|
-
prefix,
|
|
144
|
-
},
|
|
145
|
-
},
|
|
146
|
-
persistence,
|
|
147
|
-
})
|
|
148
|
-
this.alerts = new AlertService(redisUrl)
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
/**
|
|
152
|
-
* Connects to all required backing services.
|
|
153
|
-
*
|
|
154
|
-
* Establishes connections to Redis, the AlertService, and the LogStreamProcessor.
|
|
155
|
-
* Also starts the maintenance scheduler.
|
|
156
|
-
*
|
|
157
|
-
* @returns Promise resolving when all connections are ready.
|
|
158
|
-
* @throws {Error} If Redis or AlertService fails to connect.
|
|
159
|
-
*/
|
|
160
|
-
async connect() {
|
|
161
|
-
await Promise.all([
|
|
162
|
-
this.redis.connect(),
|
|
163
|
-
this.subRedis.connect(),
|
|
164
|
-
this.alerts.connect(),
|
|
165
|
-
this.logProcessor.subscribe(),
|
|
166
|
-
])
|
|
167
|
-
|
|
168
|
-
this.maintenanceScheduler.start(30000)
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
/**
|
|
172
|
-
* Subscribes to real-time system logs.
|
|
173
|
-
*
|
|
174
|
-
* @param callback - Function to be called when a new log arrives.
|
|
175
|
-
* @returns Unsubscribe function.
|
|
176
|
-
*
|
|
177
|
-
* @example
|
|
178
|
-
* ```typescript
|
|
179
|
-
* const unsub = queueService.onLog((log) => {
|
|
180
|
-
* console.log('New log:', log.message);
|
|
181
|
-
* });
|
|
182
|
-
* // Later...
|
|
183
|
-
* unsub();
|
|
184
|
-
* ```
|
|
185
|
-
*/
|
|
186
|
-
onLog(callback: (msg: SystemLog) => void): () => void {
|
|
187
|
-
const unsub = this.logProcessor.onLog(callback)
|
|
188
|
-
const emitterUnsub = () => {
|
|
189
|
-
this.logEmitter.off('log', callback)
|
|
190
|
-
}
|
|
191
|
-
return () => {
|
|
192
|
-
unsub()
|
|
193
|
-
emitterUnsub()
|
|
194
|
-
}
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
/**
|
|
198
|
-
* Retrieves current statistics for all known queues.
|
|
199
|
-
*
|
|
200
|
-
* @returns List of queue statistics.
|
|
201
|
-
*/
|
|
202
|
-
async listQueues(): Promise<QueueStats[]> {
|
|
203
|
-
return this.metricsCollector.listQueues()
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
/**
|
|
207
|
-
* Pauses a specific queue, preventing it from processing jobs.
|
|
208
|
-
*
|
|
209
|
-
* @param queueName - The name of the queue to pause.
|
|
210
|
-
* @returns True if successful.
|
|
211
|
-
* @throws {Error} If Redis operation fails.
|
|
212
|
-
*/
|
|
213
|
-
async pauseQueue(queueName: string): Promise<boolean> {
|
|
214
|
-
await this.redis.set(`${this.prefix}${queueName}:paused`, '1')
|
|
215
|
-
return true
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
/**
|
|
219
|
-
* Resumes a paused queue.
|
|
220
|
-
*
|
|
221
|
-
* @param queueName - The name of the queue to resume.
|
|
222
|
-
* @returns True if successful.
|
|
223
|
-
* @throws {Error} If Redis operation fails.
|
|
224
|
-
*/
|
|
225
|
-
async resumeQueue(queueName: string): Promise<boolean> {
|
|
226
|
-
await this.redis.del(`${this.prefix}${queueName}:paused`)
|
|
227
|
-
return true
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
/**
|
|
231
|
-
* Checks if a queue is currently paused.
|
|
232
|
-
*
|
|
233
|
-
* @param queueName - The name of the queue.
|
|
234
|
-
* @returns True if paused, false otherwise.
|
|
235
|
-
*/
|
|
236
|
-
async isQueuePaused(queueName: string): Promise<boolean> {
|
|
237
|
-
const paused = await this.redis.get(`${this.prefix}${queueName}:paused`)
|
|
238
|
-
return paused === '1'
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
/**
|
|
242
|
-
* Moves all delayed jobs in a queue back to the waiting list immediately.
|
|
243
|
-
*
|
|
244
|
-
* Useful for manually forcing retries or clearing backlogs.
|
|
245
|
-
*
|
|
246
|
-
* @param queueName - The name of the queue.
|
|
247
|
-
* @returns The number of jobs moved.
|
|
248
|
-
*/
|
|
249
|
-
async retryDelayedJob(queueName: string): Promise<number> {
|
|
250
|
-
const key = `${this.prefix}${queueName}`
|
|
251
|
-
const delayKey = `${key}:delayed`
|
|
252
|
-
|
|
253
|
-
const script = `
|
|
254
|
-
local delayKey = KEYS[1]
|
|
255
|
-
local queueKey = KEYS[2]
|
|
256
|
-
|
|
257
|
-
local jobs = redis.call('ZRANGE', delayKey, 0, -1)
|
|
258
|
-
|
|
259
|
-
if #jobs > 0 then
|
|
260
|
-
redis.call('LPUSH', queueKey, unpack(jobs))
|
|
261
|
-
redis.call('DEL', delayKey)
|
|
262
|
-
end
|
|
263
|
-
return #jobs
|
|
264
|
-
`
|
|
265
|
-
|
|
266
|
-
const movedCount = (await this.redis.eval(script, 2, delayKey, key)) as number
|
|
267
|
-
return movedCount
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
/**
|
|
271
|
-
* Retrieves a paginated list of jobs from a specific queue and state.
|
|
272
|
-
*
|
|
273
|
-
* @param queueName - The queue to query.
|
|
274
|
-
* @param type - The state to filter by (waiting, delayed, failed).
|
|
275
|
-
* @param start - Start index (0-based).
|
|
276
|
-
* @param stop - Stop index (inclusive).
|
|
277
|
-
* @returns List of job objects.
|
|
278
|
-
*/
|
|
279
|
-
async getJobs(
|
|
280
|
-
queueName: string,
|
|
281
|
-
type: 'waiting' | 'delayed' | 'failed' = 'waiting',
|
|
282
|
-
start = 0,
|
|
283
|
-
stop = 49
|
|
284
|
-
): Promise<any[]> {
|
|
285
|
-
const key = `${this.prefix}${queueName}`
|
|
286
|
-
let rawJobs: string[] = []
|
|
287
|
-
|
|
288
|
-
if (type === 'delayed') {
|
|
289
|
-
const results = await this.redis.zrange(`${key}:delayed`, start, stop, 'WITHSCORES')
|
|
290
|
-
const formatted = []
|
|
291
|
-
for (let i = 0; i < results.length; i += 2) {
|
|
292
|
-
const jobStr = results[i]!
|
|
293
|
-
const score = results[i + 1]!
|
|
294
|
-
try {
|
|
295
|
-
const parsed = JSON.parse(jobStr)
|
|
296
|
-
formatted.push({
|
|
297
|
-
...parsed,
|
|
298
|
-
_raw: jobStr,
|
|
299
|
-
scheduledAt: new Date(parseInt(score, 10)).toISOString(),
|
|
300
|
-
})
|
|
301
|
-
} catch (_e) {
|
|
302
|
-
formatted.push({ _raw: jobStr, _error: 'Failed to parse JSON' })
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
return formatted
|
|
306
|
-
} else {
|
|
307
|
-
const listKey = type === 'failed' ? `${key}:failed` : key
|
|
308
|
-
rawJobs = await this.redis.lrange(listKey, start, stop)
|
|
309
|
-
|
|
310
|
-
const jobs = rawJobs.map((jobStr) => {
|
|
311
|
-
try {
|
|
312
|
-
const parsed = JSON.parse(jobStr)
|
|
313
|
-
return { ...parsed, _raw: jobStr }
|
|
314
|
-
} catch (_e) {
|
|
315
|
-
return { _raw: jobStr, _error: 'Failed to parse JSON' }
|
|
316
|
-
}
|
|
317
|
-
})
|
|
318
|
-
|
|
319
|
-
const persistence = this.manager.getPersistence()
|
|
320
|
-
if (jobs.length < stop - start + 1 && persistence && type === 'failed') {
|
|
321
|
-
const archived = await persistence.list(queueName, {
|
|
322
|
-
limit: stop - start + 1 - jobs.length,
|
|
323
|
-
status: type as 'failed',
|
|
324
|
-
})
|
|
325
|
-
return [...jobs, ...archived.map((a: any) => ({ ...a, _archived: true }))]
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
return jobs
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
/**
|
|
333
|
-
* Records a snapshot of system metrics and triggers alerts if needed.
|
|
334
|
-
*
|
|
335
|
-
* Called periodically by the metrics collector.
|
|
336
|
-
*
|
|
337
|
-
* @param nodes - Current state of nodes (from PulseService).
|
|
338
|
-
* @param injectedWorkers - Optional worker data (for testing).
|
|
339
|
-
*/
|
|
340
|
-
async recordStatusMetrics(
|
|
341
|
-
nodes: Record<string, any> = {},
|
|
342
|
-
injectedWorkers?: any[]
|
|
343
|
-
): Promise<void> {
|
|
344
|
-
const stats = await this.listQueues()
|
|
345
|
-
const totals = stats.reduce(
|
|
346
|
-
(acc, q) => {
|
|
347
|
-
acc.waiting += q.waiting
|
|
348
|
-
acc.delayed += q.delayed
|
|
349
|
-
acc.failed += q.failed
|
|
350
|
-
return acc
|
|
351
|
-
},
|
|
352
|
-
{ waiting: 0, delayed: 0, failed: 0 }
|
|
353
|
-
)
|
|
354
|
-
|
|
355
|
-
const now = Math.floor(Date.now() / 60000)
|
|
356
|
-
const pipe = this.redis.pipeline()
|
|
357
|
-
|
|
358
|
-
pipe.set(`flux_console:metrics:waiting:${now}`, totals.waiting, 'EX', 3600)
|
|
359
|
-
pipe.set(`flux_console:metrics:delayed:${now}`, totals.delayed, 'EX', 3600)
|
|
360
|
-
pipe.set(`flux_console:metrics:failed:${now}`, totals.failed, 'EX', 3600)
|
|
361
|
-
|
|
362
|
-
const workers = injectedWorkers || (await this.listWorkers())
|
|
363
|
-
pipe.set(`flux_console:metrics:workers:${now}`, workers.length, 'EX', 3600)
|
|
364
|
-
|
|
365
|
-
await pipe.exec()
|
|
366
|
-
|
|
367
|
-
this.logEmitter.emit('stats', {
|
|
368
|
-
queues: stats,
|
|
369
|
-
throughput: await this.getThroughputData(),
|
|
370
|
-
workers,
|
|
371
|
-
})
|
|
372
|
-
|
|
373
|
-
this.alerts
|
|
374
|
-
.check({
|
|
375
|
-
queues: stats,
|
|
376
|
-
nodes: nodes as any,
|
|
377
|
-
workers: workers as any,
|
|
378
|
-
totals,
|
|
379
|
-
})
|
|
380
|
-
.catch((err) => console.error('[AlertService] Rule Evaluation Error:', err))
|
|
381
|
-
}
|
|
382
|
-
|
|
383
|
-
/**
|
|
384
|
-
* Subscribes to global stats updates.
|
|
385
|
-
*
|
|
386
|
-
* @param callback - Function called with new stats.
|
|
387
|
-
* @returns Unsubscribe function.
|
|
388
|
-
*/
|
|
389
|
-
onStats(callback: (stats: GlobalStats) => void): () => void {
|
|
390
|
-
this.logEmitter.on('stats', callback)
|
|
391
|
-
return () => {
|
|
392
|
-
this.logEmitter.off('stats', callback)
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
/**
|
|
397
|
-
* Retrieves historical data for a specific metric.
|
|
398
|
-
*
|
|
399
|
-
* @param metric - The metric name (waiting, delayed, failed, workers).
|
|
400
|
-
* @param limit - Number of data points to return (minutes).
|
|
401
|
-
* @returns Array of values.
|
|
402
|
-
*/
|
|
403
|
-
async getMetricHistory(metric: string, limit = 15): Promise<number[]> {
|
|
404
|
-
const now = Math.floor(Date.now() / 60000)
|
|
405
|
-
const keys = []
|
|
406
|
-
for (let i = limit - 1; i >= 0; i--) {
|
|
407
|
-
keys.push(`flux_console:metrics:${metric}:${now - i}`)
|
|
408
|
-
}
|
|
409
|
-
|
|
410
|
-
const values = await this.redis.mget(...keys)
|
|
411
|
-
return values.map((v) => parseInt(v || '0', 10))
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
/**
|
|
415
|
-
* Calculates system throughput (jobs per minute).
|
|
416
|
-
*
|
|
417
|
-
* @returns Array of { timestamp, count } objects for the last 15 minutes.
|
|
418
|
-
*/
|
|
419
|
-
async getThroughputData(): Promise<{ timestamp: string; count: number }[]> {
|
|
420
|
-
const now = Math.floor(Date.now() / 60000)
|
|
421
|
-
const results = []
|
|
422
|
-
|
|
423
|
-
for (let i = 14; i >= 0; i--) {
|
|
424
|
-
const t = now - i
|
|
425
|
-
const count = await this.redis.get(`flux_console:throughput:${t}`)
|
|
426
|
-
const date = new Date(t * 60000)
|
|
427
|
-
results.push({
|
|
428
|
-
timestamp: `${date.getHours().toString().padStart(2, '0')}:${date.getMinutes().toString().padStart(2, '0')}`,
|
|
429
|
-
count: parseInt(count || '0', 10),
|
|
430
|
-
})
|
|
431
|
-
}
|
|
432
|
-
|
|
433
|
-
return results
|
|
434
|
-
}
|
|
435
|
-
|
|
436
|
-
/**
|
|
437
|
-
* Lists all active workers.
|
|
438
|
-
*
|
|
439
|
-
* @returns Array of worker reports.
|
|
440
|
-
*/
|
|
441
|
-
async listWorkers(): Promise<WorkerReport[]> {
|
|
442
|
-
return this.metricsCollector.listWorkers()
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
/**
|
|
446
|
-
* Deletes a specific job from a queue.
|
|
447
|
-
*
|
|
448
|
-
* @param queueName - The queue name.
|
|
449
|
-
* @param type - The list to remove from (waiting, delayed, failed).
|
|
450
|
-
* @param jobRaw - The raw JSON string of the job to remove.
|
|
451
|
-
* @returns True if removed, false otherwise.
|
|
452
|
-
*/
|
|
453
|
-
async deleteJob(
|
|
454
|
-
queueName: string,
|
|
455
|
-
type: 'waiting' | 'delayed' | 'failed',
|
|
456
|
-
jobRaw: string
|
|
457
|
-
): Promise<boolean> {
|
|
458
|
-
const key =
|
|
459
|
-
type === 'delayed'
|
|
460
|
-
? `${this.prefix}${queueName}:delayed`
|
|
461
|
-
: type === 'failed'
|
|
462
|
-
? `${this.prefix}${queueName}:failed`
|
|
463
|
-
: `${this.prefix}${queueName}`
|
|
464
|
-
const result =
|
|
465
|
-
type === 'delayed'
|
|
466
|
-
? await this.redis.zrem(key, jobRaw)
|
|
467
|
-
: await this.redis.lrem(key, 0, jobRaw)
|
|
468
|
-
return result > 0
|
|
469
|
-
}
|
|
470
|
-
|
|
471
|
-
/**
|
|
472
|
-
* Retries a specific failed or delayed job immediately.
|
|
473
|
-
*
|
|
474
|
-
* @param queueName - The queue name.
|
|
475
|
-
* @param jobRaw - The raw JSON string of the job.
|
|
476
|
-
* @returns True if successfully moved to waiting list.
|
|
477
|
-
*/
|
|
478
|
-
async retryJob(queueName: string, jobRaw: string): Promise<boolean> {
|
|
479
|
-
const key = `${this.prefix}${queueName}`
|
|
480
|
-
const delayKey = `${key}:delayed`
|
|
481
|
-
|
|
482
|
-
const script = `
|
|
483
|
-
local delayKey = KEYS[1]
|
|
484
|
-
local queueKey = KEYS[2]
|
|
485
|
-
local jobRaw = ARGV[1]
|
|
486
|
-
|
|
487
|
-
local removed = redis.call('ZREM', delayKey, jobRaw)
|
|
488
|
-
if removed > 0 then
|
|
489
|
-
redis.call('LPUSH', queueKey, jobRaw)
|
|
490
|
-
return 1
|
|
491
|
-
end
|
|
492
|
-
return 0
|
|
493
|
-
`
|
|
494
|
-
const result = await this.redis.eval(script, 2, delayKey, key, jobRaw)
|
|
495
|
-
return result === 1
|
|
496
|
-
}
|
|
497
|
-
|
|
498
|
-
/**
|
|
499
|
-
* Purges all jobs from a queue (waiting, delayed, failed, active).
|
|
500
|
-
*
|
|
501
|
-
* ⚠️ Destructive operation. Irreversible.
|
|
502
|
-
*
|
|
503
|
-
* @param queueName - The queue to purge.
|
|
504
|
-
*/
|
|
505
|
-
async purgeQueue(queueName: string): Promise<void> {
|
|
506
|
-
const pipe = this.redis.pipeline()
|
|
507
|
-
pipe.del(`${this.prefix}${queueName}`)
|
|
508
|
-
pipe.del(`${this.prefix}${queueName}:delayed`)
|
|
509
|
-
pipe.del(`${this.prefix}${queueName}:failed`)
|
|
510
|
-
pipe.del(`${this.prefix}${queueName}:active`)
|
|
511
|
-
await pipe.exec()
|
|
512
|
-
}
|
|
513
|
-
|
|
514
|
-
/**
|
|
515
|
-
* Retries all failed jobs in a queue.
|
|
516
|
-
*
|
|
517
|
-
* @param queueName - The queue name.
|
|
518
|
-
* @returns Number of jobs retried.
|
|
519
|
-
*/
|
|
520
|
-
async retryAllFailedJobs(queueName: string): Promise<number> {
|
|
521
|
-
return await this.manager.retryFailed(queueName, 10000)
|
|
522
|
-
}
|
|
523
|
-
|
|
524
|
-
/**
|
|
525
|
-
* Clears all failed jobs from a queue.
|
|
526
|
-
*
|
|
527
|
-
* @param queueName - The queue name.
|
|
528
|
-
*/
|
|
529
|
-
async clearFailedJobs(queueName: string): Promise<void> {
|
|
530
|
-
await this.manager.clearFailed(queueName)
|
|
531
|
-
}
|
|
532
|
-
|
|
533
|
-
/**
|
|
534
|
-
* Gets the count of jobs in a specific state.
|
|
535
|
-
*
|
|
536
|
-
* @param queueName - Queue name.
|
|
537
|
-
* @param type - Job state.
|
|
538
|
-
* @returns Count of jobs.
|
|
539
|
-
*/
|
|
540
|
-
async getJobCount(queueName: string, type: 'waiting' | 'delayed' | 'failed'): Promise<number> {
|
|
541
|
-
const key =
|
|
542
|
-
type === 'delayed'
|
|
543
|
-
? `${this.prefix}${queueName}:delayed`
|
|
544
|
-
: type === 'failed'
|
|
545
|
-
? `${this.prefix}${queueName}:failed`
|
|
546
|
-
: `${this.prefix}${queueName}`
|
|
547
|
-
|
|
548
|
-
return type === 'delayed' ? await this.redis.zcard(key) : await this.redis.llen(key)
|
|
549
|
-
}
|
|
550
|
-
|
|
551
|
-
/**
|
|
552
|
-
* Deletes all jobs in a specific state from a queue.
|
|
553
|
-
*
|
|
554
|
-
* @param queueName - Queue name.
|
|
555
|
-
* @param type - Job state to clear.
|
|
556
|
-
* @returns Number of jobs deleted.
|
|
557
|
-
*/
|
|
558
|
-
async deleteAllJobs(queueName: string, type: 'waiting' | 'delayed' | 'failed'): Promise<number> {
|
|
559
|
-
const key =
|
|
560
|
-
type === 'delayed'
|
|
561
|
-
? `${this.prefix}${queueName}:delayed`
|
|
562
|
-
: type === 'failed'
|
|
563
|
-
? `${this.prefix}${queueName}:failed`
|
|
564
|
-
: `${this.prefix}${queueName}`
|
|
565
|
-
|
|
566
|
-
const count = await this.getJobCount(queueName, type)
|
|
567
|
-
await this.redis.del(key)
|
|
568
|
-
return count
|
|
569
|
-
}
|
|
570
|
-
|
|
571
|
-
/**
|
|
572
|
-
* Retries all jobs in a specific state (delayed or failed).
|
|
573
|
-
*
|
|
574
|
-
* @param queueName - Queue name.
|
|
575
|
-
* @param type - Job state.
|
|
576
|
-
* @returns Number of jobs retried.
|
|
577
|
-
*/
|
|
578
|
-
async retryAllJobs(queueName: string, type: 'delayed' | 'failed'): Promise<number> {
|
|
579
|
-
if (type === 'delayed') {
|
|
580
|
-
return await this.retryDelayedJob(queueName)
|
|
581
|
-
} else {
|
|
582
|
-
return await this.retryAllFailedJobs(queueName)
|
|
583
|
-
}
|
|
584
|
-
}
|
|
585
|
-
|
|
586
|
-
/**
|
|
587
|
-
* Deletes a specific set of jobs.
|
|
588
|
-
*
|
|
589
|
-
* @param queueName - Queue name.
|
|
590
|
-
* @param type - Job state.
|
|
591
|
-
* @param jobRaws - Array of raw job strings.
|
|
592
|
-
* @returns Number of jobs deleted.
|
|
593
|
-
*/
|
|
594
|
-
async deleteJobs(
|
|
595
|
-
queueName: string,
|
|
596
|
-
type: 'waiting' | 'delayed' | 'failed',
|
|
597
|
-
jobRaws: string[]
|
|
598
|
-
): Promise<number> {
|
|
599
|
-
const key =
|
|
600
|
-
type === 'delayed'
|
|
601
|
-
? `${this.prefix}${queueName}:delayed`
|
|
602
|
-
: type === 'failed'
|
|
603
|
-
? `${this.prefix}${queueName}:failed`
|
|
604
|
-
: `${this.prefix}${queueName}`
|
|
605
|
-
|
|
606
|
-
const pipe = this.redis.pipeline()
|
|
607
|
-
for (const raw of jobRaws) {
|
|
608
|
-
if (type === 'delayed') {
|
|
609
|
-
pipe.zrem(key, raw)
|
|
610
|
-
} else {
|
|
611
|
-
pipe.lrem(key, 1, raw)
|
|
612
|
-
}
|
|
613
|
-
}
|
|
614
|
-
const results = await pipe.exec()
|
|
615
|
-
return results?.reduce((acc, [_, res]) => acc + ((res as number) || 0), 0) || 0
|
|
616
|
-
}
|
|
617
|
-
|
|
618
|
-
/**
|
|
619
|
-
* Retries a specific set of jobs.
|
|
620
|
-
*
|
|
621
|
-
* @param queueName - Queue name.
|
|
622
|
-
* @param type - Job state.
|
|
623
|
-
* @param jobRaws - Array of raw job strings.
|
|
624
|
-
* @returns Number of jobs retried.
|
|
625
|
-
*/
|
|
626
|
-
async retryJobs(
|
|
627
|
-
queueName: string,
|
|
628
|
-
type: 'delayed' | 'failed',
|
|
629
|
-
jobRaws: string[]
|
|
630
|
-
): Promise<number> {
|
|
631
|
-
const key = `${this.prefix}${queueName}`
|
|
632
|
-
const sourceKey = type === 'delayed' ? `${key}:delayed` : `${key}:failed`
|
|
633
|
-
|
|
634
|
-
const pipe = this.redis.pipeline()
|
|
635
|
-
for (const raw of jobRaws) {
|
|
636
|
-
if (type === 'delayed') {
|
|
637
|
-
pipe.zrem(sourceKey, raw)
|
|
638
|
-
pipe.lpush(key, raw)
|
|
639
|
-
} else {
|
|
640
|
-
pipe.lrem(sourceKey, 1, raw)
|
|
641
|
-
pipe.lpush(key, raw)
|
|
642
|
-
}
|
|
643
|
-
}
|
|
644
|
-
const results = await pipe.exec()
|
|
645
|
-
let count = 0
|
|
646
|
-
if (results) {
|
|
647
|
-
for (let i = 0; i < results.length; i += 2) {
|
|
648
|
-
const result = results[i]
|
|
649
|
-
if (result && !result[0] && (result[1] as number) > 0) {
|
|
650
|
-
count++
|
|
651
|
-
}
|
|
652
|
-
}
|
|
653
|
-
}
|
|
654
|
-
return count
|
|
655
|
-
}
|
|
656
|
-
|
|
657
|
-
/**
|
|
658
|
-
* Publishes a log message to the stream and archives it.
|
|
659
|
-
*
|
|
660
|
-
* @param log - Log entry details.
|
|
661
|
-
*/
|
|
662
|
-
async publishLog(log: { level: string; message: string; workerId: string; queue?: string }) {
|
|
663
|
-
const payload = {
|
|
664
|
-
...log,
|
|
665
|
-
timestamp: new Date().toISOString(),
|
|
666
|
-
}
|
|
667
|
-
await this.redis.publish('flux_console:logs', JSON.stringify(payload))
|
|
668
|
-
|
|
669
|
-
const pipe = this.redis.pipeline()
|
|
670
|
-
pipe.lpush('flux_console:logs:history', JSON.stringify(payload))
|
|
671
|
-
pipe.ltrim('flux_console:logs:history', 0, 99)
|
|
672
|
-
|
|
673
|
-
const now = Math.floor(Date.now() / 60000)
|
|
674
|
-
pipe.incr(`flux_console:throughput:${now}`)
|
|
675
|
-
pipe.expire(`flux_console:throughput:${now}`, 3600)
|
|
676
|
-
|
|
677
|
-
await pipe.exec()
|
|
678
|
-
|
|
679
|
-
const persistence = this.manager.getPersistence()
|
|
680
|
-
if (persistence) {
|
|
681
|
-
persistence
|
|
682
|
-
.archiveLog({
|
|
683
|
-
...log,
|
|
684
|
-
timestamp: new Date(),
|
|
685
|
-
})
|
|
686
|
-
.catch((err: any) => console.error('[QueueService] Log Archive Error:', err))
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
|
-
|
|
690
|
-
/**
|
|
691
|
-
* Retrieves recent log history from Redis.
|
|
692
|
-
*
|
|
693
|
-
* @returns List of recent logs (max 100).
|
|
694
|
-
*/
|
|
695
|
-
async getLogHistory(): Promise<any[]> {
|
|
696
|
-
const logs = await this.redis.lrange('flux_console:logs:history', 0, -1)
|
|
697
|
-
return logs.map((l) => JSON.parse(l)).reverse()
|
|
698
|
-
}
|
|
699
|
-
|
|
700
|
-
/**
|
|
701
|
-
* Searches for jobs across all queues and states.
|
|
702
|
-
*
|
|
703
|
-
* Scans Redis structures in real-time. Note: This can be expensive on large queues.
|
|
704
|
-
*
|
|
705
|
-
* @param query - Search term (ID, name, or data).
|
|
706
|
-
* @param options - Search options (limit, type).
|
|
707
|
-
* @returns List of matching jobs.
|
|
708
|
-
*/
|
|
709
|
-
async searchJobs(
|
|
710
|
-
query: string,
|
|
711
|
-
options: { limit?: number; type?: 'all' | 'waiting' | 'delayed' | 'failed' } = {}
|
|
712
|
-
): Promise<any[]> {
|
|
713
|
-
const { limit = 20, type = 'all' } = options
|
|
714
|
-
const results: any[] = []
|
|
715
|
-
const queryLower = query.toLowerCase()
|
|
716
|
-
|
|
717
|
-
const queues = await this.listQueues()
|
|
718
|
-
|
|
719
|
-
for (const queue of queues) {
|
|
720
|
-
if (results.length >= limit) {
|
|
721
|
-
break
|
|
722
|
-
}
|
|
723
|
-
|
|
724
|
-
const types = type === 'all' ? ['waiting', 'delayed', 'failed'] : [type]
|
|
725
|
-
|
|
726
|
-
for (const jobType of types) {
|
|
727
|
-
if (results.length >= limit) {
|
|
728
|
-
break
|
|
729
|
-
}
|
|
730
|
-
|
|
731
|
-
const jobs = await this.getJobs(queue.name, jobType as any, 0, 99)
|
|
732
|
-
|
|
733
|
-
for (const job of jobs) {
|
|
734
|
-
if (results.length >= limit) {
|
|
735
|
-
break
|
|
736
|
-
}
|
|
737
|
-
|
|
738
|
-
const idMatch = job.id && String(job.id).toLowerCase().includes(queryLower)
|
|
739
|
-
const nameMatch = job.name && String(job.name).toLowerCase().includes(queryLower)
|
|
740
|
-
|
|
741
|
-
let dataMatch = false
|
|
742
|
-
try {
|
|
743
|
-
const dataStr = JSON.stringify(job.data || job).toLowerCase()
|
|
744
|
-
dataMatch = dataStr.includes(queryLower)
|
|
745
|
-
} catch (_e) {}
|
|
746
|
-
|
|
747
|
-
if (idMatch || nameMatch || dataMatch) {
|
|
748
|
-
results.push({
|
|
749
|
-
...job,
|
|
750
|
-
_queue: queue.name,
|
|
751
|
-
_type: jobType,
|
|
752
|
-
_matchType: idMatch ? 'id' : nameMatch ? 'name' : 'data',
|
|
753
|
-
})
|
|
754
|
-
}
|
|
755
|
-
}
|
|
756
|
-
}
|
|
757
|
-
}
|
|
758
|
-
|
|
759
|
-
return results
|
|
760
|
-
}
|
|
761
|
-
|
|
762
|
-
/**
|
|
763
|
-
* Retrieves archived jobs from persistent storage (MySQL).
|
|
764
|
-
*
|
|
765
|
-
* @param queue - Queue name.
|
|
766
|
-
* @param page - Page number.
|
|
767
|
-
* @param limit - Page size.
|
|
768
|
-
* @param status - Filter by status.
|
|
769
|
-
* @param filter - Additional filters (jobId, time range).
|
|
770
|
-
* @returns Paginated list of jobs.
|
|
771
|
-
*/
|
|
772
|
-
async getArchiveJobs(
|
|
773
|
-
queue: string,
|
|
774
|
-
page = 1,
|
|
775
|
-
limit = 50,
|
|
776
|
-
status?: 'completed' | 'failed',
|
|
777
|
-
filter: { jobId?: string; startTime?: Date; endTime?: Date } = {}
|
|
778
|
-
): Promise<{ jobs: any[]; total: number }> {
|
|
779
|
-
const persistence = this.manager.getPersistence()
|
|
780
|
-
if (!persistence) {
|
|
781
|
-
return { jobs: [], total: 0 }
|
|
782
|
-
}
|
|
783
|
-
|
|
784
|
-
const offset = (page - 1) * limit
|
|
785
|
-
const [jobs, total] = await Promise.all([
|
|
786
|
-
persistence.list(queue, { limit, offset, status, ...filter }),
|
|
787
|
-
persistence.count(queue, { status, ...filter }),
|
|
788
|
-
])
|
|
789
|
-
|
|
790
|
-
return {
|
|
791
|
-
jobs: jobs.map((j: any) => ({ ...j, _archived: true })),
|
|
792
|
-
total,
|
|
793
|
-
}
|
|
794
|
-
}
|
|
795
|
-
|
|
796
|
-
/**
|
|
797
|
-
* Searches archived jobs in persistent storage.
|
|
798
|
-
*
|
|
799
|
-
* @param query - Search term.
|
|
800
|
-
* @param options - Pagination options.
|
|
801
|
-
* @returns Matching jobs.
|
|
802
|
-
*/
|
|
803
|
-
async searchArchive(
|
|
804
|
-
query: string,
|
|
805
|
-
options: { limit?: number; page?: number; queue?: string } = {}
|
|
806
|
-
): Promise<{ jobs: any[]; total: number }> {
|
|
807
|
-
const persistence = this.manager.getPersistence() as any
|
|
808
|
-
if (!persistence || typeof persistence.search !== 'function') {
|
|
809
|
-
return { jobs: [], total: 0 }
|
|
810
|
-
}
|
|
811
|
-
|
|
812
|
-
const { limit = 50, page = 1, queue } = options
|
|
813
|
-
const offset = (page - 1) * limit
|
|
814
|
-
|
|
815
|
-
const jobs = await persistence.search(query, { limit, offset, queue })
|
|
816
|
-
return {
|
|
817
|
-
jobs: jobs.map((j: any) => ({ ...j, _archived: true })),
|
|
818
|
-
total: jobs.length === limit ? limit * page + 1 : (page - 1) * limit + jobs.length,
|
|
819
|
-
}
|
|
820
|
-
}
|
|
821
|
-
|
|
822
|
-
/**
|
|
823
|
-
* Retrieves archived logs from persistent storage.
|
|
824
|
-
*
|
|
825
|
-
* @param options - Filters and pagination.
|
|
826
|
-
* @returns Paginated logs.
|
|
827
|
-
*/
|
|
828
|
-
async getArchivedLogs(
|
|
829
|
-
options: {
|
|
830
|
-
page?: number
|
|
831
|
-
limit?: number
|
|
832
|
-
level?: string
|
|
833
|
-
workerId?: string
|
|
834
|
-
queue?: string
|
|
835
|
-
search?: string
|
|
836
|
-
startTime?: Date
|
|
837
|
-
endTime?: Date
|
|
838
|
-
} = {}
|
|
839
|
-
): Promise<{ logs: any[]; total: number }> {
|
|
840
|
-
const persistence = this.manager.getPersistence()
|
|
841
|
-
if (!persistence) {
|
|
842
|
-
return { logs: [], total: 0 }
|
|
843
|
-
}
|
|
844
|
-
|
|
845
|
-
const { page = 1, limit = 50, ...filters } = options
|
|
846
|
-
const offset = (page - 1) * limit
|
|
847
|
-
|
|
848
|
-
const [logs, total] = await Promise.all([
|
|
849
|
-
persistence.listLogs({ limit, offset, ...filters }),
|
|
850
|
-
persistence.countLogs(filters),
|
|
851
|
-
])
|
|
852
|
-
|
|
853
|
-
return { logs, total }
|
|
854
|
-
}
|
|
855
|
-
|
|
856
|
-
/**
|
|
857
|
-
* Cleans up old archived data based on retention policy.
|
|
858
|
-
*
|
|
859
|
-
* @param days - Retention period in days.
|
|
860
|
-
* @returns Number of records deleted.
|
|
861
|
-
*/
|
|
862
|
-
async cleanupArchive(days: number): Promise<number> {
|
|
863
|
-
const persistence = this.manager.getPersistence()
|
|
864
|
-
if (!persistence) {
|
|
865
|
-
return 0
|
|
866
|
-
}
|
|
867
|
-
return await persistence.cleanup(days)
|
|
868
|
-
}
|
|
869
|
-
|
|
870
|
-
/**
|
|
871
|
-
* Lists all registered Cron schedules.
|
|
872
|
-
*
|
|
873
|
-
* @returns List of schedules.
|
|
874
|
-
*/
|
|
875
|
-
async listSchedules(): Promise<any[]> {
|
|
876
|
-
const scheduler = this.manager.getScheduler()
|
|
877
|
-
return await scheduler.list()
|
|
878
|
-
}
|
|
879
|
-
|
|
880
|
-
/**
|
|
881
|
-
* Registers a new Cron schedule.
|
|
882
|
-
*
|
|
883
|
-
* @param config - Schedule configuration.
|
|
884
|
-
*/
|
|
885
|
-
async registerSchedule(config: {
|
|
886
|
-
id: string
|
|
887
|
-
cron: string
|
|
888
|
-
queue: string
|
|
889
|
-
job: any
|
|
890
|
-
}): Promise<void> {
|
|
891
|
-
const scheduler = this.manager.getScheduler()
|
|
892
|
-
await scheduler.register(config)
|
|
893
|
-
}
|
|
894
|
-
|
|
895
|
-
/**
|
|
896
|
-
* Removes a Cron schedule.
|
|
897
|
-
*
|
|
898
|
-
* @param id - Schedule ID.
|
|
899
|
-
*/
|
|
900
|
-
async removeSchedule(id: string): Promise<void> {
|
|
901
|
-
const scheduler = this.manager.getScheduler()
|
|
902
|
-
await scheduler.remove(id)
|
|
903
|
-
}
|
|
904
|
-
|
|
905
|
-
/**
|
|
906
|
-
* Manually triggers a scheduled job immediately.
|
|
907
|
-
*
|
|
908
|
-
* @param id - Schedule ID.
|
|
909
|
-
*/
|
|
910
|
-
async runScheduleNow(id: string): Promise<void> {
|
|
911
|
-
const scheduler = this.manager.getScheduler()
|
|
912
|
-
await scheduler.runNow(id)
|
|
913
|
-
}
|
|
914
|
-
|
|
915
|
-
/**
|
|
916
|
-
* Processes schedule ticks.
|
|
917
|
-
*
|
|
918
|
-
* Should be called periodically to check for due schedules.
|
|
919
|
-
*/
|
|
920
|
-
async tickScheduler(): Promise<void> {
|
|
921
|
-
const scheduler = this.manager.getScheduler()
|
|
922
|
-
await scheduler.tick()
|
|
923
|
-
}
|
|
924
|
-
}
|