ai-database 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-test.log +102 -0
  3. package/README.md +381 -68
  4. package/TESTING.md +410 -0
  5. package/TEST_SUMMARY.md +250 -0
  6. package/TODO.md +128 -0
  7. package/dist/ai-promise-db.d.ts +370 -0
  8. package/dist/ai-promise-db.d.ts.map +1 -0
  9. package/dist/ai-promise-db.js +839 -0
  10. package/dist/ai-promise-db.js.map +1 -0
  11. package/dist/authorization.d.ts +531 -0
  12. package/dist/authorization.d.ts.map +1 -0
  13. package/dist/authorization.js +632 -0
  14. package/dist/authorization.js.map +1 -0
  15. package/dist/durable-clickhouse.d.ts +193 -0
  16. package/dist/durable-clickhouse.d.ts.map +1 -0
  17. package/dist/durable-clickhouse.js +422 -0
  18. package/dist/durable-clickhouse.js.map +1 -0
  19. package/dist/durable-promise.d.ts +182 -0
  20. package/dist/durable-promise.d.ts.map +1 -0
  21. package/dist/durable-promise.js +409 -0
  22. package/dist/durable-promise.js.map +1 -0
  23. package/dist/execution-queue.d.ts +239 -0
  24. package/dist/execution-queue.d.ts.map +1 -0
  25. package/dist/execution-queue.js +400 -0
  26. package/dist/execution-queue.js.map +1 -0
  27. package/dist/index.d.ts +50 -191
  28. package/dist/index.d.ts.map +1 -0
  29. package/dist/index.js +79 -462
  30. package/dist/index.js.map +1 -0
  31. package/dist/linguistic.d.ts +115 -0
  32. package/dist/linguistic.d.ts.map +1 -0
  33. package/dist/linguistic.js +379 -0
  34. package/dist/linguistic.js.map +1 -0
  35. package/dist/memory-provider.d.ts +304 -0
  36. package/dist/memory-provider.d.ts.map +1 -0
  37. package/dist/memory-provider.js +785 -0
  38. package/dist/memory-provider.js.map +1 -0
  39. package/dist/schema.d.ts +899 -0
  40. package/dist/schema.d.ts.map +1 -0
  41. package/dist/schema.js +1165 -0
  42. package/dist/schema.js.map +1 -0
  43. package/dist/tests.d.ts +107 -0
  44. package/dist/tests.d.ts.map +1 -0
  45. package/dist/tests.js +568 -0
  46. package/dist/tests.js.map +1 -0
  47. package/dist/types.d.ts +972 -0
  48. package/dist/types.d.ts.map +1 -0
  49. package/dist/types.js +126 -0
  50. package/dist/types.js.map +1 -0
  51. package/package.json +37 -37
  52. package/src/ai-promise-db.ts +1243 -0
  53. package/src/authorization.ts +1102 -0
  54. package/src/durable-clickhouse.ts +596 -0
  55. package/src/durable-promise.ts +582 -0
  56. package/src/execution-queue.ts +608 -0
  57. package/src/index.test.ts +868 -0
  58. package/src/index.ts +337 -0
  59. package/src/linguistic.ts +404 -0
  60. package/src/memory-provider.test.ts +1036 -0
  61. package/src/memory-provider.ts +1119 -0
  62. package/src/schema.test.ts +1254 -0
  63. package/src/schema.ts +2296 -0
  64. package/src/tests.ts +725 -0
  65. package/src/types.ts +1177 -0
  66. package/test/README.md +153 -0
  67. package/test/edge-cases.test.ts +646 -0
  68. package/test/provider-resolution.test.ts +402 -0
  69. package/tsconfig.json +9 -0
  70. package/vitest.config.ts +19 -0
  71. package/dist/index.d.mts +0 -195
  72. package/dist/index.mjs +0 -430
@@ -0,0 +1,608 @@
1
+ /**
2
+ * Execution Queue - Manages execution priority and batching
3
+ *
4
+ * The queue decides WHEN to execute based on priority, concurrency, and batch windows.
5
+ * Operations can be executed immediately, queued, or deferred to batch processing.
6
+ *
7
+ * @packageDocumentation
8
+ */
9
+
10
+ import { Semaphore } from './memory-provider.js'
11
+ import {
12
+ DurablePromise,
13
+ setBatchScheduler,
14
+ type ExecutionPriority,
15
+ type BatchScheduler,
16
+ } from './durable-promise.js'
17
+
18
+ // =============================================================================
19
+ // Types
20
+ // =============================================================================
21
+
22
+ /**
23
+ * Configuration for the execution queue
24
+ */
25
+ export interface ExecutionQueueOptions {
26
+ /** Default execution priority */
27
+ priority?: ExecutionPriority
28
+
29
+ /** Maximum concurrent operations per priority tier */
30
+ concurrency?: {
31
+ priority?: number
32
+ standard?: number
33
+ flex?: number
34
+ batch?: number
35
+ }
36
+
37
+ /** Batch window in milliseconds (how long to accumulate before flush) */
38
+ batchWindow?: number
39
+
40
+ /** Maximum batch size before auto-flush */
41
+ maxBatchSize?: number
42
+
43
+ /** Auto-flush at process exit */
44
+ flushOnExit?: boolean
45
+ }
46
+
47
+ /**
48
+ * Stats for queue monitoring
49
+ */
50
+ export interface QueueStats {
51
+ /** Counts by priority tier */
52
+ byPriority: Record<ExecutionPriority, { pending: number; active: number; completed: number }>
53
+ /** Total counts */
54
+ totals: { pending: number; active: number; completed: number; failed: number }
55
+ /** Current batch info */
56
+ batch: { size: number; nextFlush: Date | null }
57
+ }
58
+
59
+ /**
60
+ * Batch submission result from a provider
61
+ */
62
+ export interface BatchSubmission {
63
+ /** Provider-assigned batch ID */
64
+ batchId: string
65
+ /** Estimated completion time */
66
+ estimatedCompletion?: Date
67
+ /** Number of requests in batch */
68
+ count: number
69
+ }
70
+
71
+ /**
72
+ * Provider interface for batch submission
73
+ */
74
+ export interface BatchProvider {
75
+ /** Provider name */
76
+ readonly name: string
77
+ /** Whether this provider supports batch API */
78
+ readonly supportsBatch: boolean
79
+ /** Whether this provider supports flex tier */
80
+ readonly supportsFlex: boolean
81
+
82
+ /** Submit a batch of requests */
83
+ submitBatch(requests: BatchRequest[]): Promise<BatchSubmission>
84
+
85
+ /** Get batch status */
86
+ getBatchStatus(batchId: string): Promise<BatchStatus>
87
+
88
+ /** Stream results as they complete */
89
+ streamResults(batchId: string): AsyncIterable<BatchResult>
90
+ }
91
+
92
+ /**
93
+ * A single request in a batch
94
+ */
95
+ export interface BatchRequest {
96
+ /** Unique ID for matching results */
97
+ customId: string
98
+ /** The action ID (for updating status) */
99
+ actionId: string
100
+ /** Method being called */
101
+ method: string
102
+ /** Request parameters */
103
+ params: unknown
104
+ }
105
+
106
+ /**
107
+ * Status of a batch
108
+ */
109
+ export interface BatchStatus {
110
+ /** Batch ID */
111
+ batchId: string
112
+ /** Current status */
113
+ status: 'validating' | 'in_progress' | 'completed' | 'failed' | 'expired' | 'cancelled'
114
+ /** Completion counts */
115
+ counts: {
116
+ total: number
117
+ completed: number
118
+ failed: number
119
+ }
120
+ /** Estimated completion */
121
+ estimatedCompletion?: Date
122
+ /** Error message if failed */
123
+ error?: string
124
+ }
125
+
126
+ /**
127
+ * A single result from a batch
128
+ */
129
+ export interface BatchResult {
130
+ /** Custom ID matching the request */
131
+ customId: string
132
+ /** Action ID for updating status */
133
+ actionId: string
134
+ /** Success or failure */
135
+ status: 'success' | 'error'
136
+ /** Result data (if success) */
137
+ result?: unknown
138
+ /** Error details (if error) */
139
+ error?: {
140
+ code: string
141
+ message: string
142
+ }
143
+ }
144
+
145
+ // =============================================================================
146
+ // Queue Item
147
+ // =============================================================================
148
+
149
+ interface QueueItem {
150
+ promise: DurablePromise<unknown>
151
+ priority: ExecutionPriority
152
+ enqueuedAt: Date
153
+ concurrencyKey?: string
154
+ }
155
+
156
+ // =============================================================================
157
+ // ExecutionQueue Class
158
+ // =============================================================================
159
+
160
+ /**
161
+ * Manages execution of DurablePromises with priority-based scheduling
162
+ *
163
+ * @example
164
+ * ```ts
165
+ * const queue = new ExecutionQueue({
166
+ * priority: 'standard',
167
+ * concurrency: { standard: 10, batch: 1000 },
168
+ * batchWindow: 60000, // 1 minute
169
+ * })
170
+ *
171
+ * // Register batch providers
172
+ * queue.registerProvider(openaiProvider)
173
+ * queue.registerProvider(claudeProvider)
174
+ *
175
+ * // Queue operations
176
+ * queue.enqueue(durablePromise)
177
+ *
178
+ * // Flush batch at end of workflow
179
+ * await queue.flush()
180
+ * ```
181
+ */
182
+ export class ExecutionQueue implements BatchScheduler {
183
+ private readonly semaphores: Record<ExecutionPriority, Semaphore>
184
+ private readonly queues: Record<ExecutionPriority, QueueItem[]>
185
+ private readonly providers = new Map<string, BatchProvider>()
186
+ private readonly options: Required<ExecutionQueueOptions>
187
+
188
+ private batchTimer: ReturnType<typeof setTimeout> | null = null
189
+ private completedCount = 0
190
+ private failedCount = 0
191
+ private isProcessing = false
192
+
193
+ constructor(options: ExecutionQueueOptions = {}) {
194
+ this.options = {
195
+ priority: options.priority ?? 'standard',
196
+ concurrency: {
197
+ priority: options.concurrency?.priority ?? 50,
198
+ standard: options.concurrency?.standard ?? 20,
199
+ flex: options.concurrency?.flex ?? 10,
200
+ batch: options.concurrency?.batch ?? 1000,
201
+ },
202
+ batchWindow: options.batchWindow ?? 60000, // 1 minute default
203
+ maxBatchSize: options.maxBatchSize ?? 10000,
204
+ flushOnExit: options.flushOnExit ?? true,
205
+ }
206
+
207
+ // Initialize semaphores for each priority tier
208
+ this.semaphores = {
209
+ priority: new Semaphore(this.options.concurrency.priority!),
210
+ standard: new Semaphore(this.options.concurrency.standard!),
211
+ flex: new Semaphore(this.options.concurrency.flex!),
212
+ batch: new Semaphore(this.options.concurrency.batch!),
213
+ }
214
+
215
+ // Initialize queues
216
+ this.queues = {
217
+ priority: [],
218
+ standard: [],
219
+ flex: [],
220
+ batch: [],
221
+ }
222
+
223
+ // Register as global batch scheduler
224
+ setBatchScheduler(this)
225
+
226
+ // Setup exit handler
227
+ if (this.options.flushOnExit && typeof process !== 'undefined') {
228
+ const exitHandler = async () => {
229
+ await this.flush()
230
+ }
231
+
232
+ process.on('beforeExit', exitHandler)
233
+ process.on('SIGINT', async () => {
234
+ await exitHandler()
235
+ process.exit(0)
236
+ })
237
+ process.on('SIGTERM', async () => {
238
+ await exitHandler()
239
+ process.exit(0)
240
+ })
241
+ }
242
+ }
243
+
244
+ // ===========================================================================
245
+ // Provider Management
246
+ // ===========================================================================
247
+
248
+ /**
249
+ * Register a batch provider
250
+ */
251
+ registerProvider(provider: BatchProvider): void {
252
+ this.providers.set(provider.name, provider)
253
+ }
254
+
255
+ /**
256
+ * Get a registered provider
257
+ */
258
+ getProvider(name: string): BatchProvider | undefined {
259
+ return this.providers.get(name)
260
+ }
261
+
262
+ /**
263
+ * List registered providers
264
+ */
265
+ listProviders(): BatchProvider[] {
266
+ return Array.from(this.providers.values())
267
+ }
268
+
269
+ // ===========================================================================
270
+ // Queue Operations
271
+ // ===========================================================================
272
+
273
+ /**
274
+ * Add a promise to the execution queue
275
+ */
276
+ enqueue(promise: DurablePromise<unknown>): void {
277
+ const item: QueueItem = {
278
+ promise,
279
+ priority: promise.priority,
280
+ enqueuedAt: new Date(),
281
+ }
282
+
283
+ this.queues[promise.priority].push(item)
284
+
285
+ // For batch, start the window timer
286
+ if (promise.priority === 'batch') {
287
+ this.startBatchTimer()
288
+
289
+ // Check for auto-flush on size
290
+ if (this.queues.batch.length >= this.options.maxBatchSize) {
291
+ this.flush()
292
+ }
293
+ } else {
294
+ // For other priorities, process immediately
295
+ this.processQueue(promise.priority)
296
+ }
297
+ }
298
+
299
+ private startBatchTimer(): void {
300
+ if (this.batchTimer) return
301
+
302
+ this.batchTimer = setTimeout(async () => {
303
+ this.batchTimer = null
304
+ await this.flush()
305
+ }, this.options.batchWindow)
306
+ }
307
+
308
+ private async processQueue(priority: ExecutionPriority): Promise<void> {
309
+ if (this.isProcessing) return
310
+ this.isProcessing = true
311
+
312
+ const queue = this.queues[priority]
313
+ const semaphore = this.semaphores[priority]
314
+
315
+ try {
316
+ while (queue.length > 0) {
317
+ const item = queue.shift()
318
+ if (!item) break
319
+
320
+ // Run with concurrency control
321
+ // The promise will execute itself; we just track completion
322
+ semaphore.run(async () => {
323
+ try {
324
+ await item.promise
325
+ this.completedCount++
326
+ } catch {
327
+ this.failedCount++
328
+ }
329
+ })
330
+ }
331
+ } finally {
332
+ this.isProcessing = false
333
+ }
334
+ }
335
+
336
+ // ===========================================================================
337
+ // Batch Operations
338
+ // ===========================================================================
339
+
340
+ /**
341
+ * Flush all pending batch operations
342
+ */
343
+ async flush(): Promise<void> {
344
+ // Clear the timer
345
+ if (this.batchTimer) {
346
+ clearTimeout(this.batchTimer)
347
+ this.batchTimer = null
348
+ }
349
+
350
+ const batchItems = [...this.queues.batch]
351
+ this.queues.batch = []
352
+
353
+ if (batchItems.length === 0) return
354
+
355
+ // Group by method/provider
356
+ const groups = this.groupByProvider(batchItems)
357
+
358
+ // Submit each group to its provider
359
+ const submissions = await Promise.all(
360
+ Array.from(groups.entries()).map(async ([providerName, items]) => {
361
+ const provider = this.providers.get(providerName)
362
+ if (!provider) {
363
+ // Fallback to standard execution if no provider
364
+ return this.executeFallback(items)
365
+ }
366
+
367
+ return this.submitToBatchProvider(provider, items)
368
+ })
369
+ )
370
+
371
+ // Wait for all batch results
372
+ await Promise.all(
373
+ submissions.map((submission) => {
374
+ if (submission && 'batchId' in submission) {
375
+ return this.pollBatchCompletion(submission)
376
+ }
377
+ return Promise.resolve()
378
+ })
379
+ )
380
+ }
381
+
382
+ private groupByProvider(items: QueueItem[]): Map<string, QueueItem[]> {
383
+ const groups = new Map<string, QueueItem[]>()
384
+
385
+ for (const item of items) {
386
+ // Determine provider from method prefix
387
+ const providerName = this.getProviderFromMethod(item.promise.method)
388
+ const existing = groups.get(providerName) || []
389
+ existing.push(item)
390
+ groups.set(providerName, existing)
391
+ }
392
+
393
+ return groups
394
+ }
395
+
396
+ private getProviderFromMethod(method: string): string {
397
+ // Extract provider from method like 'openai.chat' -> 'openai'
398
+ const parts = method.split('.')
399
+ return parts[0] || 'default'
400
+ }
401
+
402
+ private async submitToBatchProvider(
403
+ provider: BatchProvider,
404
+ items: QueueItem[]
405
+ ): Promise<BatchSubmission | null> {
406
+ if (!provider.supportsBatch) {
407
+ await this.executeFallback(items)
408
+ return null
409
+ }
410
+
411
+ const requests: BatchRequest[] = items.map((item) => ({
412
+ customId: crypto.randomUUID(),
413
+ actionId: item.promise.actionId,
414
+ method: item.promise.method,
415
+ params: {}, // Would need to extract from promise
416
+ }))
417
+
418
+ try {
419
+ return await provider.submitBatch(requests)
420
+ } catch (error) {
421
+ console.error(`Batch submission failed for ${provider.name}:`, error)
422
+ await this.executeFallback(items)
423
+ return null
424
+ }
425
+ }
426
+
427
+ private async executeFallback(items: QueueItem[]): Promise<void> {
428
+ // Execute as standard priority
429
+ for (const item of items) {
430
+ this.queues.standard.push(item)
431
+ }
432
+ await this.processQueue('standard')
433
+ }
434
+
435
+ private async pollBatchCompletion(submission: BatchSubmission): Promise<void> {
436
+ // This would be implemented by the specific provider
437
+ // For now, just log
438
+ console.log(`Batch ${submission.batchId} submitted with ${submission.count} requests`)
439
+
440
+ // In production, this would poll getBatchStatus and stream results
441
+ }
442
+
443
+ // ===========================================================================
444
+ // Configuration
445
+ // ===========================================================================
446
+
447
+ /**
448
+ * Set the default priority for new operations
449
+ */
450
+ setPriority(priority: ExecutionPriority): void {
451
+ this.options.priority = priority
452
+ }
453
+
454
+ /**
455
+ * Set concurrency limit for a priority tier
456
+ */
457
+ setConcurrency(priority: ExecutionPriority, limit: number): void {
458
+ this.options.concurrency[priority] = limit
459
+ // Re-create the semaphore (existing operations continue with old limit)
460
+ this.semaphores[priority] = new Semaphore(limit)
461
+ }
462
+
463
+ /**
464
+ * Set the batch window (how long to accumulate before auto-flush)
465
+ */
466
+ setBatchWindow(ms: number): void {
467
+ this.options.batchWindow = ms
468
+ }
469
+
470
+ /**
471
+ * Set max batch size before auto-flush
472
+ */
473
+ setMaxBatchSize(size: number): void {
474
+ this.options.maxBatchSize = size
475
+ }
476
+
477
+ // ===========================================================================
478
+ // Stats
479
+ // ===========================================================================
480
+
481
+ /**
482
+ * Get count of pending operations
483
+ */
484
+ get pending(): number {
485
+ return (
486
+ this.queues.priority.length +
487
+ this.queues.standard.length +
488
+ this.queues.flex.length +
489
+ this.queues.batch.length
490
+ )
491
+ }
492
+
493
+ /**
494
+ * Get count of active operations
495
+ */
496
+ get active(): number {
497
+ return (
498
+ this.semaphores.priority.active +
499
+ this.semaphores.standard.active +
500
+ this.semaphores.flex.active +
501
+ this.semaphores.batch.active
502
+ )
503
+ }
504
+
505
+ /**
506
+ * Get count of completed operations
507
+ */
508
+ get completed(): number {
509
+ return this.completedCount
510
+ }
511
+
512
+ /**
513
+ * Get full queue statistics
514
+ */
515
+ getStats(): QueueStats {
516
+ return {
517
+ byPriority: {
518
+ priority: {
519
+ pending: this.queues.priority.length,
520
+ active: this.semaphores.priority.active,
521
+ completed: 0, // Would need per-tier tracking
522
+ },
523
+ standard: {
524
+ pending: this.queues.standard.length,
525
+ active: this.semaphores.standard.active,
526
+ completed: 0,
527
+ },
528
+ flex: {
529
+ pending: this.queues.flex.length,
530
+ active: this.semaphores.flex.active,
531
+ completed: 0,
532
+ },
533
+ batch: {
534
+ pending: this.queues.batch.length,
535
+ active: this.semaphores.batch.active,
536
+ completed: 0,
537
+ },
538
+ },
539
+ totals: {
540
+ pending: this.pending,
541
+ active: this.active,
542
+ completed: this.completedCount,
543
+ failed: this.failedCount,
544
+ },
545
+ batch: {
546
+ size: this.queues.batch.length,
547
+ nextFlush: this.batchTimer
548
+ ? new Date(Date.now() + this.options.batchWindow)
549
+ : null,
550
+ },
551
+ }
552
+ }
553
+
554
+ // ===========================================================================
555
+ // Cleanup
556
+ // ===========================================================================
557
+
558
+ /**
559
+ * Stop the queue and clear all pending operations
560
+ */
561
+ destroy(): void {
562
+ if (this.batchTimer) {
563
+ clearTimeout(this.batchTimer)
564
+ this.batchTimer = null
565
+ }
566
+
567
+ this.queues.priority = []
568
+ this.queues.standard = []
569
+ this.queues.flex = []
570
+ this.queues.batch = []
571
+
572
+ setBatchScheduler(null)
573
+ }
574
+ }
575
+
576
+ // =============================================================================
577
+ // Factory
578
+ // =============================================================================
579
+
580
+ /**
581
+ * Create an execution queue
582
+ */
583
+ export function createExecutionQueue(options?: ExecutionQueueOptions): ExecutionQueue {
584
+ return new ExecutionQueue(options)
585
+ }
586
+
587
+ // =============================================================================
588
+ // Default Instance
589
+ // =============================================================================
590
+
591
+ let defaultQueue: ExecutionQueue | null = null
592
+
593
+ /**
594
+ * Get or create the default execution queue
595
+ */
596
+ export function getDefaultQueue(): ExecutionQueue {
597
+ if (!defaultQueue) {
598
+ defaultQueue = createExecutionQueue()
599
+ }
600
+ return defaultQueue
601
+ }
602
+
603
+ /**
604
+ * Set the default execution queue
605
+ */
606
+ export function setDefaultQueue(queue: ExecutionQueue | null): void {
607
+ defaultQueue = queue
608
+ }