@elsium-ai/observe 0.7.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -19,8 +19,10 @@ npm install @elsium-ai/observe @elsium-ai/core
19
19
  | **Cost Engine** | `createCostEngine`, `registerModelTier`, `CostEngine`, `CostEngineConfig`, `BudgetConfig`, `LoopDetectionConfig`, `CostAlert`, `CostDimension`, `CostIntelligenceReport`, `ModelSuggestion`, `ModelTierEntry` | Budget enforcement, cost projections, loop detection, and model optimization suggestions |
20
20
  | **Tracer** | `observe`, `Tracer`, `TracerConfig`, `TracerOutput`, `TracerExporter`, `CostReport` | High-level tracing with sampling, console output, and custom exporters |
21
21
  | **Metrics** | `createMetrics`, `MetricsCollector`, `MetricEntry` | Counters, gauges, and histograms for application-level metrics |
22
- | **Audit Trail** | `createAuditTrail`, `auditMiddleware`, `AuditEventType`, `AuditEvent`, `AuditStorageAdapter`, `AuditQueryFilter`, `AuditIntegrityResult`, `AuditTrailConfig`, `AuditTrail` | SHA-256 hash-chained audit events with tamper detection and middleware |
22
+ | **Audit Trail** | `createAuditTrail`, `auditMiddleware`, `auditStreamMiddleware`, `AuditEventType`, `AuditEvent`, `AuditStorageAdapter`, `AuditQueryFilter`, `AuditIntegrityResult`, `AuditTrailConfig`, `AuditTrail` | SHA-256 hash-chained audit events with tamper detection, middleware for both completion and streaming calls |
23
+ | **Audit Sinks** | `createSinkManager`, `createWebhookSink`, `createSplunkSink`, `createDatadogSink`, `AuditSink`, `AuditSinkRetryConfig`, `SinkManagerConfig`, `SinkManager`, `WebhookSinkConfig`, `SplunkSinkConfig`, `DatadogSinkConfig` | Export audit events to external systems (webhooks, Splunk, Datadog) with batching, retry, per-sink filtering, and dead letter queue |
23
24
  | **Provenance** | `createProvenanceTracker`, `ProvenanceRecord`, `ProvenanceTracker` | Full lineage tracking per output: prompt, model, config, input, output |
25
+ | **Studio Exporter** | `createStudioExporter`, `StudioExporter`, `StudioExporterConfig` | Write traces, X-Ray, and costs to `.elsium/` for the `elsium studio` dashboard |
24
26
  | **OpenTelemetry** | `toOTelSpan`, `toOTelExportRequest`, `toTraceparent`, `parseTraceparent`, `injectTraceContext`, `extractTraceContext`, `createOTLPExporter`, `OTelSpan`, `OTelSpanKind`, `OTelStatusCode`, `OTelAttribute`, `OTelAttributeValue`, `OTelEvent`, `OTelResource`, `OTelExportRequest`, `TraceContext`, `OTLPExporterConfig` | W3C Trace Context propagation, OTel span conversion, and OTLP JSON export |
25
27
 
26
28
  ---
@@ -646,6 +648,8 @@ interface AuditTrailConfig {
646
648
  hashChain?: boolean
647
649
  maxEvents?: number
648
650
  batch?: AuditBatchConfig
651
+ sinks?: AuditSink[] | SinkManagerConfig
652
+ context?: Record<string, unknown>
649
653
  onError?: (error: unknown) => void
650
654
  }
651
655
  ```
@@ -663,7 +667,7 @@ interface AuditTrail {
663
667
  query(filter: AuditQueryFilter): Promise<AuditEvent[]>
664
668
  verifyIntegrity(): Promise<AuditIntegrityResult>
665
669
  flush(): Promise<void>
666
- dispose(): void
670
+ dispose(): Promise<void>
667
671
  readonly count: number
668
672
  readonly pending: number
669
673
  }
@@ -676,7 +680,7 @@ interface AuditTrail {
676
680
  | `query()` | Query events by type, actor, traceId, or timestamp range. Auto-flushes pending events in batched mode. |
677
681
  | `verifyIntegrity()` | Verify the hash chain has not been tampered with. Auto-flushes pending events in batched mode. |
678
682
  | `flush()` | Drain all pending events — computes hashes and writes to storage. No-op when not in batched mode. |
679
- | `dispose()` | Stop the flush timer and drain remaining events. Call this on shutdown. |
683
+ | `dispose()` | Stop the flush timer, drain remaining events, await pending storage writes, and shut down sinks. Returns a promise — await it on shutdown. |
680
684
  | `count` | Total events (stored + pending). |
681
685
  | `pending` | Number of buffered events not yet flushed (0 when not in batched mode). |
682
686
 
@@ -694,6 +698,8 @@ function createAuditTrail(config?: AuditTrailConfig): AuditTrail
694
698
  | `config.hashChain` | `boolean` | `true` | Enable SHA-256 hash chaining for tamper detection |
695
699
  | `config.maxEvents` | `number` | `10000` | Maximum events retained (ring buffer — O(1) eviction) |
696
700
  | `config.batch` | `AuditBatchConfig` | `undefined` | Enable batched mode for high-volume scenarios |
701
+ | `config.sinks` | `AuditSink[] \| SinkManagerConfig` | `undefined` | Export events to external systems (webhooks, SIEM, etc.) |
702
+ | `config.context` | `Record<string, unknown>` | `undefined` | Global fields merged into every event's `data` (e.g. environment, service name, version) |
697
703
  | `config.onError` | `(error: unknown) => void` | `undefined` | Error handler for async storage failures |
698
704
 
699
705
  **Returns:** `AuditTrail`
@@ -748,12 +754,32 @@ audit.log('llm_call', { model: 'gpt-4o', tokens: 100 })
748
754
  // Force-flush before reading (query/verifyIntegrity auto-flush)
749
755
  await audit.flush()
750
756
 
751
- // Clean up on shutdown
752
- process.on('SIGTERM', () => audit.dispose())
757
+ // Clean up on shutdown (awaits pending writes and sink delivery)
758
+ process.on('SIGTERM', async () => await audit.dispose())
753
759
  ```
754
760
 
755
761
  Hash chain integrity is fully preserved — events are hashed sequentially during flush, not during `log()`.
756
762
 
763
+ #### Context enrichment
764
+
765
+ Add global fields to every event without repeating them at each call site:
766
+
767
+ ```ts
768
+ const audit = createAuditTrail({
769
+ context: {
770
+ env: 'production',
771
+ service: 'my-ai-service',
772
+ version: '1.2.0',
773
+ region: 'us-east-1',
774
+ },
775
+ })
776
+
777
+ audit.log('llm_call', { model: 'gpt-4o', tokens: 100 })
778
+ // event.data → { env: 'production', service: 'my-ai-service', version: '1.2.0', region: 'us-east-1', model: 'gpt-4o', tokens: 100 }
779
+ ```
780
+
781
+ Event-specific data always takes precedence over global context fields.
782
+
757
783
  ### `auditMiddleware()`
758
784
 
759
785
  Creates an ElsiumAI middleware that automatically logs every LLM call (success or failure) to the given audit trail.
@@ -780,6 +806,250 @@ const middleware = auditMiddleware(audit)
780
806
 
781
807
  The middleware automatically records `llm_call` events containing `provider`, `model`, `inputTokens`, `outputTokens`, `totalTokens`, `cost`, `latencyMs`, and `stopReason`. On errors, it records the error message and `success: false`.
782
808
 
809
+ ### `auditStreamMiddleware()`
810
+
811
+ Creates a `StreamMiddleware` that audits streaming LLM calls. Intercepts `message_end` events to capture token usage and latency, and `error` events to capture failures — all without buffering or blocking the stream.
812
+
813
+ ```ts
814
+ function auditStreamMiddleware(auditTrail: AuditTrail): StreamMiddleware
815
+ ```
816
+
817
+ | Parameter | Type | Description |
818
+ |---|---|---|
819
+ | `auditTrail` | `AuditTrail` | The audit trail instance to log events to |
820
+
821
+ **Returns:** `StreamMiddleware` (from `@elsium-ai/core`)
822
+
823
+ ```ts
824
+ import { createAuditTrail, auditStreamMiddleware } from '@elsium-ai/observe'
825
+
826
+ const audit = createAuditTrail({ hashChain: true })
827
+ const streamMw = auditStreamMiddleware(audit)
828
+
829
+ // Use with an ElsiumAI gateway
830
+ // gateway.use({ streamMiddleware: [streamMw] })
831
+ ```
832
+
833
+ The middleware records `llm_call` events with `provider`, `model`, `inputTokens`, `outputTokens`, `totalTokens`, `latencyMs`, `stopReason`, and `streaming: true`. On errors, it records the error message and `success: false`.
834
+
835
+ ---
836
+
837
+ ## Audit Sinks
838
+
839
+ Export audit events to external observability and governance systems. Sinks receive finalized `AuditEvent` objects after they are hashed and stored — a sink failure never blocks the audit trail or other sinks.
840
+
841
+ ### `AuditSink`
842
+
843
+ ```ts
844
+ interface AuditSink {
845
+ name: string
846
+ filter?: (event: AuditEvent) => boolean
847
+ send(events: AuditEvent[]): Promise<void>
848
+ shutdown?(): Promise<void>
849
+ }
850
+ ```
851
+
852
+ The optional `filter` function controls which events a sink receives. When set, only events for which the filter returns `true` are sent to that sink. Sinks without a filter receive all events.
853
+
854
+ ### `SinkManagerConfig`
855
+
856
+ ```ts
857
+ interface SinkManagerConfig {
858
+ sinks: AuditSink[]
859
+ batch?: {
860
+ size?: number // default: 50
861
+ intervalMs?: number // default: 5000
862
+ }
863
+ retry?: AuditSinkRetryConfig
864
+ maxBufferSize?: number // default: 10000
865
+ deadLetterSink?: AuditSink
866
+ onError?: (sinkName: string, error: unknown) => void
867
+ }
868
+ ```
869
+
870
+ The `deadLetterSink` receives events that a sink failed to deliver after all retry attempts. This prevents data loss for compliance-critical audit events.
871
+
872
+ ### `AuditSinkRetryConfig`
873
+
874
+ ```ts
875
+ interface AuditSinkRetryConfig {
876
+ maxRetries?: number // default: 3
877
+ baseDelayMs?: number // default: 1000
878
+ maxDelayMs?: number // default: 30000
879
+ }
880
+ ```
881
+
882
+ ### Quick start
883
+
884
+ Pass sinks directly to `createAuditTrail()`:
885
+
886
+ ```ts
887
+ import { createAuditTrail, createWebhookSink, createSplunkSink } from 'elsium-ai/observe'
888
+
889
+ const audit = createAuditTrail({
890
+ sinks: [
891
+ createWebhookSink({ url: 'https://hooks.example.com/audit' }),
892
+ createSplunkSink({
893
+ url: 'https://splunk:8088/services/collector',
894
+ token: 'your-hec-token',
895
+ }),
896
+ ],
897
+ })
898
+
899
+ audit.log('security_violation', { threat: 'prompt_injection', score: 0.95 })
900
+ ```
901
+
902
+ For advanced configuration, pass a `SinkManagerConfig`:
903
+
904
+ ```ts
905
+ const audit = createAuditTrail({
906
+ sinks: {
907
+ sinks: [createWebhookSink({ url: 'https://hooks.example.com/audit' })],
908
+ batch: { size: 100, intervalMs: 10_000 },
909
+ retry: { maxRetries: 5, baseDelayMs: 2000 },
910
+ maxBufferSize: 50_000,
911
+ onError: (sinkName, error) => {
912
+ console.error(`Sink ${sinkName} failed:`, error)
913
+ },
914
+ },
915
+ })
916
+ ```
917
+
918
+ #### Per-sink filtering
919
+
920
+ Route different event types to different destinations:
921
+
922
+ ```ts
923
+ import { createAuditTrail, createSplunkSink, createDatadogSink } from 'elsium-ai/observe'
924
+
925
+ const audit = createAuditTrail({
926
+ sinks: [
927
+ {
928
+ ...createSplunkSink({ url: 'https://splunk:8088/services/collector', token: 'tok' }),
929
+ filter: (event) => event.type === 'security_violation' || event.type === 'auth_event',
930
+ },
931
+ createDatadogSink({ apiKey: process.env.DD_API_KEY! }),
932
+ ],
933
+ })
934
+ ```
935
+
936
+ The Splunk sink only receives security and auth events; the Datadog sink receives everything.
937
+
938
+ #### Dead letter queue
939
+
940
+ Prevent data loss when sinks fail after all retry attempts:
941
+
942
+ ```ts
943
+ const audit = createAuditTrail({
944
+ sinks: {
945
+ sinks: [createWebhookSink({ url: 'https://hooks.example.com/audit' })],
946
+ deadLetterSink: createWebhookSink({ url: 'https://dlq.example.com/audit-dlq' }),
947
+ retry: { maxRetries: 3 },
948
+ },
949
+ })
950
+ ```
951
+
952
+ Events that cannot be delivered after exhausting retries are sent to the dead letter sink for later replay.
953
+
954
+ ### `createWebhookSink()`
955
+
956
+ Generic HTTP webhook sink. Sends `{ events: AuditEvent[] }` as JSON.
957
+
958
+ ```ts
959
+ function createWebhookSink(config: WebhookSinkConfig): AuditSink
960
+ ```
961
+
962
+ ```ts
963
+ interface WebhookSinkConfig {
964
+ url: string
965
+ headers?: Record<string, string>
966
+ method?: 'POST' | 'PUT'
967
+ timeoutMs?: number // default: 10000
968
+ }
969
+ ```
970
+
971
+ ```ts
972
+ createWebhookSink({
973
+ url: 'https://hooks.example.com/audit',
974
+ headers: { Authorization: 'Bearer token123' },
975
+ })
976
+ ```
977
+
978
+ ### `createSplunkSink()`
979
+
980
+ Sends events to Splunk HTTP Event Collector (HEC) as newline-delimited JSON.
981
+
982
+ ```ts
983
+ function createSplunkSink(config: SplunkSinkConfig): AuditSink
984
+ ```
985
+
986
+ ```ts
987
+ interface SplunkSinkConfig {
988
+ url: string
989
+ token: string
990
+ index?: string
991
+ source?: string // default: 'elsium-ai'
992
+ sourcetype?: string // default: 'elsium:audit'
993
+ timeoutMs?: number // default: 10000
994
+ }
995
+ ```
996
+
997
+ ```ts
998
+ createSplunkSink({
999
+ url: 'https://splunk:8088/services/collector',
1000
+ token: 'your-hec-token',
1001
+ index: 'ai_audit',
1002
+ })
1003
+ ```
1004
+
1005
+ ### `createDatadogSink()`
1006
+
1007
+ Sends events to the Datadog Log Intake API (v2).
1008
+
1009
+ ```ts
1010
+ function createDatadogSink(config: DatadogSinkConfig): AuditSink
1011
+ ```
1012
+
1013
+ ```ts
1014
+ interface DatadogSinkConfig {
1015
+ apiKey: string
1016
+ site?: string // default: 'datadoghq.com'
1017
+ service?: string // default: 'elsium-ai'
1018
+ source?: string // default: 'elsium-ai-audit'
1019
+ tags?: Record<string, string>
1020
+ timeoutMs?: number // default: 10000
1021
+ }
1022
+ ```
1023
+
1024
+ ```ts
1025
+ createDatadogSink({
1026
+ apiKey: process.env.DD_API_KEY!,
1027
+ site: 'datadoghq.eu',
1028
+ tags: { env: 'production', team: 'platform' },
1029
+ })
1030
+ ```
1031
+
1032
+ ### Custom sinks
1033
+
1034
+ Implement the `AuditSink` interface to send events anywhere:
1035
+
1036
+ ```ts
1037
+ import type { AuditSink, AuditEvent } from 'elsium-ai/observe'
1038
+
1039
+ const kafkaSink: AuditSink = {
1040
+ name: 'kafka',
1041
+ async send(events: AuditEvent[]) {
1042
+ await producer.send({
1043
+ topic: 'audit-events',
1044
+ messages: events.map(e => ({ value: JSON.stringify(e) })),
1045
+ })
1046
+ },
1047
+ async shutdown() {
1048
+ await producer.disconnect()
1049
+ },
1050
+ }
1051
+ ```
1052
+
783
1053
  ---
784
1054
 
785
1055
  ## Provenance
@@ -1279,6 +1549,52 @@ const loaded = await store.load(results.id)
1279
1549
 
1280
1550
  ---
1281
1551
 
1552
+ ## Studio Exporter
1553
+
1554
+ Bridges the observe system to the `.elsium/` directory so the `elsium studio` dashboard can display live traces, X-Ray data, and costs.
1555
+
1556
+ ### `createStudioExporter()`
1557
+
1558
+ ```ts
1559
+ function createStudioExporter(config?: StudioExporterConfig): StudioExporter
1560
+ ```
1561
+
1562
+ ```ts
1563
+ interface StudioExporterConfig {
1564
+ dir?: string // default: '.elsium'
1565
+ }
1566
+
1567
+ interface StudioExporter extends TracerExporter {
1568
+ writeXRayEntry(entry: Record<string, unknown>): void
1569
+ writeCostReport(report: CostReport): void
1570
+ }
1571
+ ```
1572
+
1573
+ | Parameter | Type | Default | Description |
1574
+ |---|---|---|---|
1575
+ | `config.dir` | `string` | `'.elsium'` | Directory where trace, X-Ray, and cost files are written |
1576
+
1577
+ **Returns:** `StudioExporter` (implements `TracerExporter` + X-Ray/cost writers)
1578
+
1579
+ ```ts
1580
+ import { observe, createStudioExporter } from '@elsium-ai/observe'
1581
+
1582
+ const studio = createStudioExporter()
1583
+
1584
+ const tracer = observe({ output: [studio] })
1585
+
1586
+ const span = tracer.startSpan('my-operation', 'llm')
1587
+ span.end()
1588
+ await tracer.flush()
1589
+
1590
+ studio.writeXRayEntry({ traceId: 'trc_123', provider: 'anthropic', model: 'claude-sonnet-4-6' })
1591
+ studio.writeCostReport(tracer.getCostReport())
1592
+ ```
1593
+
1594
+ Then run `elsium studio` in another terminal to see the data in the web dashboard.
1595
+
1596
+ ---
1597
+
1282
1598
  ## Auto-Instrumentation
1283
1599
 
1284
1600
  ### `instrumentComplete`
@@ -0,0 +1,11 @@
1
+ import type { AuditSink } from './audit-sink';
2
+ export interface DatadogSinkConfig {
3
+ apiKey: string;
4
+ site?: string;
5
+ service?: string;
6
+ source?: string;
7
+ tags?: Record<string, string>;
8
+ timeoutMs?: number;
9
+ }
10
+ export declare function createDatadogSink(config: DatadogSinkConfig): AuditSink;
11
+ //# sourceMappingURL=audit-sink-datadog.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"audit-sink-datadog.d.ts","sourceRoot":"","sources":["../src/audit-sink-datadog.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAA;AAE7C,MAAM,WAAW,iBAAiB;IACjC,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC7B,SAAS,CAAC,EAAE,MAAM,CAAA;CAClB;AAmCD,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CA+CtE"}
@@ -0,0 +1,11 @@
1
+ import type { AuditSink } from './audit-sink';
2
+ export interface SplunkSinkConfig {
3
+ url: string;
4
+ token: string;
5
+ index?: string;
6
+ source?: string;
7
+ sourcetype?: string;
8
+ timeoutMs?: number;
9
+ }
10
+ export declare function createSplunkSink(config: SplunkSinkConfig): AuditSink;
11
+ //# sourceMappingURL=audit-sink-splunk.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"audit-sink-splunk.d.ts","sourceRoot":"","sources":["../src/audit-sink-splunk.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAA;AAE7C,MAAM,WAAW,gBAAgB;IAChC,GAAG,EAAE,MAAM,CAAA;IACX,KAAK,EAAE,MAAM,CAAA;IACb,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,SAAS,CAAC,EAAE,MAAM,CAAA;CAClB;AAiBD,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,gBAAgB,GAAG,SAAS,CAoCpE"}
@@ -0,0 +1,9 @@
1
+ import type { AuditSink } from './audit-sink';
2
+ export interface WebhookSinkConfig {
3
+ url: string;
4
+ headers?: Record<string, string>;
5
+ method?: 'POST' | 'PUT';
6
+ timeoutMs?: number;
7
+ }
8
+ export declare function createWebhookSink(config: WebhookSinkConfig): AuditSink;
9
+ //# sourceMappingURL=audit-sink-webhook.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"audit-sink-webhook.d.ts","sourceRoot":"","sources":["../src/audit-sink-webhook.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAA;AAE7C,MAAM,WAAW,iBAAiB;IACjC,GAAG,EAAE,MAAM,CAAA;IACX,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAChC,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,CAAA;IACvB,SAAS,CAAC,EAAE,MAAM,CAAA;CAClB;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CAkCtE"}
@@ -0,0 +1,30 @@
1
+ import type { AuditEvent } from './audit';
2
+ export interface AuditSink {
3
+ name: string;
4
+ filter?: (event: AuditEvent) => boolean;
5
+ send(events: AuditEvent[]): Promise<void>;
6
+ shutdown?(): Promise<void>;
7
+ }
8
+ export interface AuditSinkRetryConfig {
9
+ maxRetries?: number;
10
+ baseDelayMs?: number;
11
+ maxDelayMs?: number;
12
+ }
13
+ export interface SinkManagerConfig {
14
+ sinks: AuditSink[];
15
+ batch?: {
16
+ size?: number;
17
+ intervalMs?: number;
18
+ };
19
+ retry?: AuditSinkRetryConfig;
20
+ maxBufferSize?: number;
21
+ deadLetterSink?: AuditSink;
22
+ onError?: (sinkName: string, error: unknown) => void;
23
+ }
24
+ export interface SinkManager {
25
+ dispatch(event: AuditEvent): void;
26
+ flush(): Promise<void>;
27
+ shutdown(): Promise<void>;
28
+ }
29
+ export declare function createSinkManager(config: SinkManagerConfig): SinkManager;
30
+ //# sourceMappingURL=audit-sink.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"audit-sink.d.ts","sourceRoot":"","sources":["../src/audit-sink.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AAIzC,MAAM,WAAW,SAAS;IACzB,IAAI,EAAE,MAAM,CAAA;IACZ,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,OAAO,CAAA;IACvC,IAAI,CAAC,MAAM,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACzC,QAAQ,CAAC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;CAC1B;AAED,MAAM,WAAW,oBAAoB;IACpC,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,UAAU,CAAC,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,iBAAiB;IACjC,KAAK,EAAE,SAAS,EAAE,CAAA;IAClB,KAAK,CAAC,EAAE;QACP,IAAI,CAAC,EAAE,MAAM,CAAA;QACb,UAAU,CAAC,EAAE,MAAM,CAAA;KACnB,CAAA;IACD,KAAK,CAAC,EAAE,oBAAoB,CAAA;IAC5B,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,cAAc,CAAC,EAAE,SAAS,CAAA;IAC1B,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,IAAI,CAAA;CACpD;AAED,MAAM,WAAW,WAAW;IAC3B,QAAQ,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI,CAAA;IACjC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IACtB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;CACzB;AAsDD,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,iBAAiB,GAAG,WAAW,CAmExE"}
package/dist/audit.d.ts CHANGED
@@ -1,4 +1,5 @@
1
- import type { Middleware } from '@elsium-ai/core';
1
+ import type { Middleware, StreamMiddleware } from '@elsium-ai/core';
2
+ import type { AuditSink, SinkManagerConfig } from './audit-sink';
2
3
  export type AuditEventType = 'llm_call' | 'tool_execution' | 'security_violation' | 'budget_alert' | 'policy_violation' | 'auth_event' | 'approval_request' | 'approval_decision' | 'config_change' | 'provider_failover' | 'circuit_breaker_state_change';
3
4
  export interface AuditEvent {
4
5
  id: string;
@@ -42,6 +43,8 @@ export interface AuditTrailConfig {
42
43
  hashChain?: boolean;
43
44
  maxEvents?: number;
44
45
  batch?: AuditBatchConfig;
46
+ sinks?: AuditSink[] | SinkManagerConfig;
47
+ context?: Record<string, unknown>;
45
48
  onError?: (error: unknown) => void;
46
49
  }
47
50
  export interface AuditTrail {
@@ -54,10 +57,11 @@ export interface AuditTrail {
54
57
  query(filter: AuditQueryFilter): Promise<AuditEvent[]>;
55
58
  verifyIntegrity(): Promise<AuditIntegrityResult>;
56
59
  flush(): Promise<void>;
57
- dispose(): void;
60
+ dispose(): Promise<void>;
58
61
  readonly count: number;
59
62
  readonly pending: number;
60
63
  }
61
64
  export declare function createAuditTrail(config?: AuditTrailConfig): AuditTrail;
62
65
  export declare function auditMiddleware(auditTrail: AuditTrail): Middleware;
66
+ export declare function auditStreamMiddleware(auditTrail: AuditTrail): StreamMiddleware;
63
67
  //# sourceMappingURL=audit.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"audit.d.ts","sourceRoot":"","sources":["../src/audit.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAqC,MAAM,iBAAiB,CAAA;AAEpF,MAAM,MAAM,cAAc,GACvB,UAAU,GACV,gBAAgB,GAChB,oBAAoB,GACpB,cAAc,GACd,kBAAkB,GAClB,YAAY,GACZ,kBAAkB,GAClB,mBAAmB,GACnB,eAAe,GACf,mBAAmB,GACnB,8BAA8B,CAAA;AAEjC,MAAM,WAAW,UAAU;IAC1B,EAAE,EAAE,MAAM,CAAA;IACV,UAAU,EAAE,MAAM,CAAA;IAClB,IAAI,EAAE,cAAc,CAAA;IACpB,SAAS,EAAE,MAAM,CAAA;IACjB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC7B,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,mBAAmB;IACnC,MAAM,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC/C,KAAK,CAAC,MAAM,EAAE,gBAAgB,GAAG,UAAU,EAAE,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,KAAK,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IACjC,eAAe,IAAI,oBAAoB,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAAA;IACvE,WAAW,CAAC,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;CACxC;AAED,MAAM,WAAW,gBAAgB;IAChC,IAAI,CAAC,EAAE,cAAc,GAAG,cAAc,EAAE,CAAA;IACxC,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,oBAAoB;IACpC,KAAK,EAAE,OAAO,CAAA;IACd,WAAW,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,aAAa,CAAC,EAAE,OAAO,CAAA;CACvB;AAED,MAAM,WAAW,gBAAgB;IAChC,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,UAAU,CAAC,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,gBAAgB;IAChC,OAAO,CAAC,EAAE,mBAAmB,GAAG,QAAQ,CAAA;IACxC,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,KAAK,CAAC,EAAE,gBAAgB,CAAA;IACxB,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,IAAI,CAAA;CAClC;AAED,MAAM,WAAW,UAAU;IAC1B,GAAG,CACF,IAAI,EAAE,cAAc,EACpB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAC7B,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAC5C,IAAI,CAAA;IACP,2EAA2E;IAC3E,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IACtB,KAAK,CAAC,MAAM,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACtD,eAAe,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAA;IAChD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IACtB,OAAO,IAAI,IAAI,CAAA;IACf,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAA;IACtB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAA;CACxB;AAuID,wBAAgB,gBAAgB,CAAC,MAAM,CAAC,EAAE,gBAAgB,GAAG,UAAU,CA2JtE;AAED,wBAAgB,eAAe,CAAC,UAAU,EAAE,UAAU,GAAG,UAAU,CA0ClE"}
1
+ {"version":3,"file":"audit.d.ts","sourceRoot":"","sources":["../src/audit.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACX,UAAU,EAGV,gBAAgB,EAChB,MAAM,iBAAiB,CAAA;AACxB,OAAO,KAAK,EAAE,SAAS,EAAe,iBAAiB,EAAE,MAAM,cAAc,CAAA;AAG7E,MAAM,MAAM,cAAc,GACvB,UAAU,GACV,gBAAgB,GAChB,oBAAoB,GACpB,cAAc,GACd,kBAAkB,GAClB,YAAY,GACZ,kBAAkB,GAClB,mBAAmB,GACnB,eAAe,GACf,mBAAmB,GACnB,8BAA8B,CAAA;AAEjC,MAAM,WAAW,UAAU;IAC1B,EAAE,EAAE,MAAM,CAAA;IACV,UAAU,EAAE,MAAM,CAAA;IAClB,IAAI,EAAE,cAAc,CAAA;IACpB,SAAS,EAAE,MAAM,CAAA;IACjB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC7B,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,mBAAmB;IACnC,MAAM,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC/C,KAAK,CAAC,MAAM,EAAE,gBAAgB,GAAG,UAAU,EAAE,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,KAAK,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IACjC,eAAe,IAAI,oBAAoB,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAAA;IACvE,WAAW,CAAC,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;CACxC;AAED,MAAM,WAAW,gBAAgB;IAChC,IAAI,CAAC,EAAE,cAAc,GAAG,cAAc,EAAE,CAAA;IACxC,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,oBAAoB;IACpC,KAAK,EAAE,OAAO,CAAA;IACd,WAAW,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,aAAa,CAAC,EAAE,OAAO,CAAA;CACvB;AAED,MAAM,WAAW,gBAAgB;IAChC,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,UAAU,CAAC,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,gBAAgB;IAChC,OAAO,CAAC,EAAE,mBAAmB,GAAG,QAAQ,CAAA;IACxC,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,KAAK,CAAC,EAAE,gBAAgB,CAAA;IACxB,KAAK,CAAC,EAAE,SAAS,EAAE,GAAG,iBAAiB,CAAA;IACvC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IACjC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,IAAI,CAAA;CAClC;AAED,MAAM,WAAW,UAAU;IAC1B,GAAG,CACF,IAAI,EAAE,cAAc,EACpB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAC7B,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAC5C,IAAI,CAAA;IACP,2EAA2E;IAC3E,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IACtB,KAAK,CAAC,MAAM,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACtD,eAAe,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAA;IAChD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IACtB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IACxB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAA;IACtB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAA;CACxB;AAuJD,wBAAgB,gBAAgB,CAAC,MAAM,CAAC,EAAE,gBAAgB,GAAG,UAAU,CAmKtE;AAED,wBAAgB,eAAe,CAAC,UAAU,EAAE,UAAU,GAAG,UAAU,CA0ClE;AAiDD,wBAAgB,qBAAqB,CAAC,UAAU,EAAE,UAAU,GAAG,gBAAgB,CAmC9E"}
package/dist/index.d.ts CHANGED
@@ -6,14 +6,24 @@ export { observe } from './tracer';
6
6
  export type { Tracer, TracerConfig, TracerOutput, TracerExporter, CostReport } from './tracer';
7
7
  export { createMetrics } from './metrics';
8
8
  export type { MetricsCollector, MetricEntry } from './metrics';
9
- export { createAuditTrail, auditMiddleware } from './audit';
9
+ export { createAuditTrail, auditMiddleware, auditStreamMiddleware } from './audit';
10
10
  export type { AuditEventType, AuditEvent, AuditStorageAdapter, AuditQueryFilter, AuditIntegrityResult, AuditTrailConfig, AuditBatchConfig, AuditTrail, } from './audit';
11
+ export { createSinkManager } from './audit-sink';
12
+ export type { AuditSink, AuditSinkRetryConfig, SinkManagerConfig, SinkManager } from './audit-sink';
13
+ export { createWebhookSink } from './audit-sink-webhook';
14
+ export type { WebhookSinkConfig } from './audit-sink-webhook';
15
+ export { createSplunkSink } from './audit-sink-splunk';
16
+ export type { SplunkSinkConfig } from './audit-sink-splunk';
17
+ export { createDatadogSink } from './audit-sink-datadog';
18
+ export type { DatadogSinkConfig } from './audit-sink-datadog';
11
19
  export { createProvenanceTracker } from './provenance';
12
20
  export type { ProvenanceRecord, ProvenanceTracker } from './provenance';
13
21
  export { createExperiment, createFileExperimentStore } from './experiment';
14
22
  export type { Experiment, ExperimentConfig, ExperimentVariant, ExperimentResults, ExperimentStore, } from './experiment';
15
23
  export { instrumentComplete, instrumentAgent } from './instrument';
16
24
  export type { InstrumentableAgent } from './instrument';
25
+ export { createStudioExporter } from './studio-exporter';
26
+ export type { StudioExporter, StudioExporterConfig } from './studio-exporter';
17
27
  export { toOTelSpan, toOTelExportRequest, toTraceparent, parseTraceparent, injectTraceContext, extractTraceContext, createOTLPExporter, } from './otel';
18
28
  export type { OTelSpan, OTelSpanKind, OTelStatusCode, OTelAttribute, OTelAttributeValue, OTelEvent, OTelResource, OTelExportRequest, TraceContext, OTLPExporterConfig, } from './otel';
19
29
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAA;AACnC,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,QAAQ,CAAA;AAG1F,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAA;AACnE,YAAY,EACX,UAAU,EACV,gBAAgB,EAChB,YAAY,EACZ,mBAAmB,EACnB,SAAS,EACT,aAAa,EACb,sBAAsB,EACtB,eAAe,EACf,cAAc,GACd,MAAM,eAAe,CAAA;AAGtB,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAA;AAClC,YAAY,EAAE,MAAM,EAAE,YAAY,EAAE,YAAY,EAAE,cAAc,EAAE,UAAU,EAAE,MAAM,UAAU,CAAA;AAG9F,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAA;AACzC,YAAY,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAG9D,OAAO,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,SAAS,CAAA;AAC3D,YAAY,EACX,cAAc,EACd,UAAU,EACV,mBAAmB,EACnB,gBAAgB,EAChB,oBAAoB,EACpB,gBAAgB,EAChB,gBAAgB,EAChB,UAAU,GACV,MAAM,SAAS,CAAA;AAGhB,OAAO,EAAE,uBAAuB,EAAE,MAAM,cAAc,CAAA;AACtD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAA;AAGvE,OAAO,EAAE,gBAAgB,EAAE,yBAAyB,EAAE,MAAM,cAAc,CAAA;AAC1E,YAAY,EACX,UAAU,EACV,gBAAgB,EAChB,iBAAiB,EACjB,iBAAiB,EACjB,eAAe,GACf,MAAM,cAAc,CAAA;AAGrB,OAAO,EAAE,kBAAkB,EAAE,eAAe,EAAE,MAAM,cAAc,CAAA;AAClE,YAAY,EAAE,mBAAmB,EAAE,MAAM,cAAc,CAAA;AAGvD,OAAO,EACN,UAAU,EACV,mBAAmB,EACnB,aAAa,EACb,gBAAgB,EAChB,kBAAkB,EAClB,mBAAmB,EACnB,kBAAkB,GAClB,MAAM,QAAQ,CAAA;AACf,YAAY,EACX,QAAQ,EACR,YAAY,EACZ,cAAc,EACd,aAAa,EACb,kBAAkB,EAClB,SAAS,EACT,YAAY,EACZ,iBAAiB,EACjB,YAAY,EACZ,kBAAkB,GAClB,MAAM,QAAQ,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAA;AACnC,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,QAAQ,CAAA;AAG1F,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAA;AACnE,YAAY,EACX,UAAU,EACV,gBAAgB,EAChB,YAAY,EACZ,mBAAmB,EACnB,SAAS,EACT,aAAa,EACb,sBAAsB,EACtB,eAAe,EACf,cAAc,GACd,MAAM,eAAe,CAAA;AAGtB,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAA;AAClC,YAAY,EAAE,MAAM,EAAE,YAAY,EAAE,YAAY,EAAE,cAAc,EAAE,UAAU,EAAE,MAAM,UAAU,CAAA;AAG9F,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAA;AACzC,YAAY,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAG9D,OAAO,EAAE,gBAAgB,EAAE,eAAe,EAAE,qBAAqB,EAAE,MAAM,SAAS,CAAA;AAClF,YAAY,EACX,cAAc,EACd,UAAU,EACV,mBAAmB,EACnB,gBAAgB,EAChB,oBAAoB,EACpB,gBAAgB,EAChB,gBAAgB,EAChB,UAAU,GACV,MAAM,SAAS,CAAA;AAGhB,OAAO,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAA;AAChD,YAAY,EAAE,SAAS,EAAE,oBAAoB,EAAE,iBAAiB,EAAE,WAAW,EAAE,MAAM,cAAc,CAAA;AACnG,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AACxD,YAAY,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAC7D,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AACtD,YAAY,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AAC3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AACxD,YAAY,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAG7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,cAAc,CAAA;AACtD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAA;AAGvE,OAAO,EAAE,gBAAgB,EAAE,yBAAyB,EAAE,MAAM,cAAc,CAAA;AAC1E,YAAY,EACX,UAAU,EACV,gBAAgB,EAChB,iBAAiB,EACjB,iBAAiB,EACjB,eAAe,GACf,MAAM,cAAc,CAAA;AAGrB,OAAO,EAAE,kBAAkB,EAAE,eAAe,EAAE,MAAM,cAAc,CAAA;AAClE,YAAY,EAAE,mBAAmB,EAAE,MAAM,cAAc,CAAA;AAGvD,OAAO,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAA;AACxD,YAAY,EAAE,cAAc,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAA;AAG7E,OAAO,EACN,UAAU,EACV,mBAAmB,EACnB,aAAa,EACb,gBAAgB,EAChB,kBAAkB,EAClB,mBAAmB,EACnB,kBAAkB,GAClB,MAAM,QAAQ,CAAA;AACf,YAAY,EACX,QAAQ,EACR,YAAY,EACZ,cAAc,EACd,aAAa,EACb,kBAAkB,EAClB,SAAS,EACT,YAAY,EACZ,iBAAiB,EACjB,YAAY,EACZ,kBAAkB,GAClB,MAAM,QAAQ,CAAA"}
package/dist/index.js CHANGED
@@ -695,6 +695,110 @@ function createMetrics(options) {
695
695
  }
696
696
  // src/audit.ts
697
697
  import { createHash } from "node:crypto";
698
+
699
+ // src/audit-sink.ts
700
+ var log4 = createLogger();
701
+ function getRetryDelay(attempt, baseDelayMs, maxDelayMs) {
702
+ const delay = Math.min(baseDelayMs * 2 ** attempt, maxDelayMs);
703
+ return delay * (0.5 + Math.random() * 0.5);
704
+ }
705
+ async function sendWithRetry(sink, events, retryConfig) {
706
+ let lastError;
707
+ for (let attempt = 0;attempt <= retryConfig.maxRetries; attempt++) {
708
+ try {
709
+ await sink.send(events);
710
+ return;
711
+ } catch (error) {
712
+ lastError = error;
713
+ if (attempt < retryConfig.maxRetries) {
714
+ const delay = getRetryDelay(attempt, retryConfig.baseDelayMs, retryConfig.maxDelayMs);
715
+ await new Promise((resolve) => setTimeout(resolve, delay));
716
+ }
717
+ }
718
+ }
719
+ throw lastError;
720
+ }
721
+ async function deliverToSink(sink, events, retryConfig, deadLetterSink, onError) {
722
+ const filtered = sink.filter ? events.filter(sink.filter) : events;
723
+ if (filtered.length === 0)
724
+ return;
725
+ try {
726
+ await sendWithRetry(sink, filtered, retryConfig);
727
+ } catch (error) {
728
+ log4.error("Audit sink delivery failed", { sink: sink.name });
729
+ onError?.(sink.name, error);
730
+ if (!deadLetterSink)
731
+ return;
732
+ try {
733
+ await deadLetterSink.send(filtered);
734
+ } catch (dlqError) {
735
+ log4.error("Dead letter sink delivery failed", { sink: deadLetterSink.name });
736
+ onError?.(deadLetterSink.name, dlqError);
737
+ }
738
+ }
739
+ }
740
+ function createSinkManager(config) {
741
+ const { sinks, onError, deadLetterSink } = config;
742
+ const batchSize = config.batch?.size ?? 50;
743
+ const batchIntervalMs = config.batch?.intervalMs ?? 5000;
744
+ const maxBufferSize = config.maxBufferSize ?? 1e4;
745
+ const retryConfig = {
746
+ maxRetries: config.retry?.maxRetries ?? 3,
747
+ baseDelayMs: config.retry?.baseDelayMs ?? 1000,
748
+ maxDelayMs: config.retry?.maxDelayMs ?? 30000
749
+ };
750
+ const buffer = [];
751
+ const inFlight = new Set;
752
+ let flushTimer = null;
753
+ function dispatchBatch(batch) {
754
+ if (batch.length === 0 || sinks.length === 0)
755
+ return;
756
+ const promises = sinks.map((sink) => deliverToSink(sink, batch, retryConfig, deadLetterSink, onError));
757
+ const combined = Promise.allSettled(promises).then(() => {
758
+ inFlight.delete(combined);
759
+ });
760
+ inFlight.add(combined);
761
+ }
762
+ function drainBuffer() {
763
+ while (buffer.length > 0) {
764
+ const batch = buffer.splice(0, batchSize);
765
+ dispatchBatch(batch);
766
+ }
767
+ }
768
+ flushTimer = setInterval(() => {
769
+ if (buffer.length > 0)
770
+ drainBuffer();
771
+ }, batchIntervalMs);
772
+ if (typeof flushTimer === "object" && "unref" in flushTimer) {
773
+ flushTimer.unref();
774
+ }
775
+ return {
776
+ dispatch(event) {
777
+ if (buffer.length >= maxBufferSize) {
778
+ buffer.shift();
779
+ log4.warn("Audit sink buffer full, dropping oldest event");
780
+ }
781
+ buffer.push(event);
782
+ if (buffer.length >= batchSize)
783
+ drainBuffer();
784
+ },
785
+ async flush() {
786
+ drainBuffer();
787
+ await Promise.allSettled([...inFlight]);
788
+ },
789
+ async shutdown() {
790
+ if (flushTimer) {
791
+ clearInterval(flushTimer);
792
+ flushTimer = null;
793
+ }
794
+ drainBuffer();
795
+ await Promise.allSettled([...inFlight]);
796
+ await Promise.allSettled(sinks.map((sink) => sink.shutdown?.()));
797
+ }
798
+ };
799
+ }
800
+
801
+ // src/audit.ts
698
802
  function computeEventHash(event, previousHash) {
699
803
  const content = JSON.stringify({
700
804
  id: event.id,
@@ -803,16 +907,34 @@ class InMemoryAuditStorage {
803
907
  return last ? last.hash : ZERO_HASH;
804
908
  }
805
909
  }
910
+ function resolveStorage(config) {
911
+ if (config?.storage && typeof config.storage !== "string")
912
+ return config.storage;
913
+ return new InMemoryAuditStorage(config?.maxEvents);
914
+ }
915
+ function resolveSinkManager(config) {
916
+ if (!config?.sinks)
917
+ return null;
918
+ const sinkConfig = Array.isArray(config.sinks) ? { sinks: config.sinks } : config.sinks;
919
+ return createSinkManager(sinkConfig);
920
+ }
921
+ function resolveLastHash(storage) {
922
+ if (!storage.getLastHash)
923
+ return ZERO_HASH;
924
+ return storage.getLastHash();
925
+ }
806
926
  function createAuditTrail(config) {
807
927
  const useHashChain = config?.hashChain !== false;
808
- const storage = config?.storage && typeof config.storage !== "string" ? config.storage : new InMemoryAuditStorage(config?.maxEvents);
928
+ const storage = resolveStorage(config);
929
+ const sinkManager = resolveSinkManager(config);
930
+ const globalContext = config?.context;
809
931
  let sequenceId = 0;
810
932
  let idCounter = 0;
811
933
  let previousHash = ZERO_HASH;
812
934
  let isReady = true;
813
935
  let readyPromise = Promise.resolve();
814
- if (useHashChain && storage.getLastHash) {
815
- const lastHash = storage.getLastHash();
936
+ if (useHashChain) {
937
+ const lastHash = resolveLastHash(storage);
816
938
  if (typeof lastHash === "string") {
817
939
  previousHash = lastHash;
818
940
  } else {
@@ -830,10 +952,11 @@ function createAuditTrail(config) {
830
952
  const isBatched = !!batchConfig;
831
953
  const pendingBuffer = [];
832
954
  let flushTimer = null;
833
- const flushPromise = Promise.resolve();
955
+ let flushPromise = Promise.resolve();
834
956
  function buildAndAppend(entry) {
835
957
  sequenceId++;
836
958
  idCounter++;
959
+ const data = globalContext ? { ...globalContext, ...entry.data } : entry.data;
837
960
  const event = {
838
961
  id: `audit_${idCounter.toString(36)}_${entry.timestamp.toString(36)}`,
839
962
  sequenceId,
@@ -841,7 +964,7 @@ function createAuditTrail(config) {
841
964
  timestamp: entry.timestamp,
842
965
  actor: entry.actor,
843
966
  traceId: entry.traceId,
844
- data: entry.data,
967
+ data,
845
968
  previousHash: useHashChain ? previousHash : ZERO_HASH
846
969
  };
847
970
  const hash = useHashChain ? computeEventHash(event, event.previousHash) : createHash("sha256").update(JSON.stringify(event)).digest("hex");
@@ -850,9 +973,10 @@ function createAuditTrail(config) {
850
973
  previousHash = hash;
851
974
  }
852
975
  const result = storage.append(finalEvent);
853
- if (result && typeof result.catch === "function") {
854
- result.catch((err2) => config?.onError?.(err2));
976
+ if (result && typeof result.then === "function") {
977
+ flushPromise = flushPromise.then(() => result).catch((err2) => config?.onError?.(err2));
855
978
  }
979
+ sinkManager?.dispatch(finalEvent);
856
980
  }
857
981
  function drainBuffer() {
858
982
  let entry = pendingBuffer.shift();
@@ -901,13 +1025,16 @@ function createAuditTrail(config) {
901
1025
  await readyPromise;
902
1026
  drainBuffer();
903
1027
  await flushPromise;
1028
+ await sinkManager?.flush();
904
1029
  },
905
- dispose() {
1030
+ async dispose() {
906
1031
  if (flushTimer) {
907
1032
  clearInterval(flushTimer);
908
1033
  flushTimer = null;
909
1034
  }
910
1035
  drainBuffer();
1036
+ await flushPromise;
1037
+ await sinkManager?.shutdown();
911
1038
  },
912
1039
  async query(filter) {
913
1040
  if (isBatched)
@@ -958,6 +1085,203 @@ function auditMiddleware(auditTrail) {
958
1085
  }
959
1086
  };
960
1087
  }
1088
+ function emitStreamAudit(auditTrail, ctx, state, latencyMs) {
1089
+ if (state.hasError && !state.hasUsage) {
1090
+ auditTrail.log("llm_call", {
1091
+ provider: ctx.provider,
1092
+ model: ctx.model,
1093
+ error: state.errorMessage,
1094
+ latencyMs,
1095
+ success: false,
1096
+ streaming: true
1097
+ }, { traceId: ctx.traceId });
1098
+ } else if (state.hasUsage) {
1099
+ auditTrail.log("llm_call", {
1100
+ provider: ctx.provider,
1101
+ model: ctx.model,
1102
+ inputTokens: state.inputTokens,
1103
+ outputTokens: state.outputTokens,
1104
+ totalTokens: state.totalTokens,
1105
+ latencyMs,
1106
+ stopReason: state.stopReason,
1107
+ streaming: true
1108
+ }, { traceId: ctx.traceId });
1109
+ }
1110
+ }
1111
+ function auditStreamMiddleware(auditTrail) {
1112
+ return (ctx, source, next) => {
1113
+ const startTime = performance.now();
1114
+ const processed = next(ctx, source);
1115
+ return async function* () {
1116
+ const state = {
1117
+ inputTokens: 0,
1118
+ outputTokens: 0,
1119
+ totalTokens: 0,
1120
+ hasUsage: false,
1121
+ hasError: false
1122
+ };
1123
+ try {
1124
+ for await (const event of processed) {
1125
+ if (event.type === "message_end") {
1126
+ state.inputTokens = event.usage.inputTokens;
1127
+ state.outputTokens = event.usage.outputTokens;
1128
+ state.totalTokens = event.usage.totalTokens;
1129
+ state.stopReason = event.stopReason;
1130
+ state.hasUsage = true;
1131
+ }
1132
+ if (event.type === "error") {
1133
+ state.hasError = true;
1134
+ state.errorMessage = event.error.message;
1135
+ }
1136
+ yield event;
1137
+ }
1138
+ } finally {
1139
+ const latencyMs = Math.round(performance.now() - startTime);
1140
+ emitStreamAudit(auditTrail, ctx, state, latencyMs);
1141
+ }
1142
+ }();
1143
+ };
1144
+ }
1145
+ // src/audit-sink-webhook.ts
1146
+ function createWebhookSink(config) {
1147
+ const { url, headers = {}, method = "POST", timeoutMs = 1e4 } = config;
1148
+ return {
1149
+ name: "webhook",
1150
+ async send(events) {
1151
+ const controller = new AbortController;
1152
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
1153
+ try {
1154
+ const response = await fetch(url, {
1155
+ method,
1156
+ headers: {
1157
+ "Content-Type": "application/json",
1158
+ ...headers
1159
+ },
1160
+ body: JSON.stringify({ events }),
1161
+ signal: controller.signal
1162
+ });
1163
+ if (!response.ok) {
1164
+ throw new Error(`Webhook responded with ${response.status} ${response.statusText}`);
1165
+ }
1166
+ } catch (error) {
1167
+ if (error instanceof DOMException && error.name === "AbortError") {
1168
+ throw new Error(`Webhook request timed out after ${timeoutMs}ms`);
1169
+ }
1170
+ throw error;
1171
+ } finally {
1172
+ clearTimeout(timeout);
1173
+ }
1174
+ }
1175
+ };
1176
+ }
1177
+ // src/audit-sink-splunk.ts
1178
+ function toSplunkEvent(event, index, source, sourcetype) {
1179
+ return JSON.stringify({
1180
+ time: event.timestamp / 1000,
1181
+ source: source ?? "elsium-ai",
1182
+ sourcetype: sourcetype ?? "elsium:audit",
1183
+ ...index && { index },
1184
+ event
1185
+ });
1186
+ }
1187
+ function createSplunkSink(config) {
1188
+ const { url, token, index, source, sourcetype, timeoutMs = 1e4 } = config;
1189
+ return {
1190
+ name: "splunk",
1191
+ async send(events) {
1192
+ const body = events.map((e) => toSplunkEvent(e, index, source, sourcetype)).join(`
1193
+ `);
1194
+ const controller = new AbortController;
1195
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
1196
+ try {
1197
+ const response = await fetch(url, {
1198
+ method: "POST",
1199
+ headers: {
1200
+ Authorization: `Splunk ${token}`,
1201
+ "Content-Type": "application/json"
1202
+ },
1203
+ body,
1204
+ signal: controller.signal
1205
+ });
1206
+ if (!response.ok) {
1207
+ throw new Error(`Splunk HEC responded with ${response.status} ${response.statusText}`);
1208
+ }
1209
+ } catch (error) {
1210
+ if (error instanceof DOMException && error.name === "AbortError") {
1211
+ throw new Error(`Splunk HEC request timed out after ${timeoutMs}ms`);
1212
+ }
1213
+ throw error;
1214
+ } finally {
1215
+ clearTimeout(timeout);
1216
+ }
1217
+ }
1218
+ };
1219
+ }
1220
+ // src/audit-sink-datadog.ts
1221
+ function formatTags(tags) {
1222
+ return Object.entries(tags).map(([k, v]) => `${k}:${v}`).join(",");
1223
+ }
1224
+ function toDatadogLog(event, service, source, tags) {
1225
+ return {
1226
+ ddsource: source,
1227
+ ddtags: tags ? formatTags(tags) : undefined,
1228
+ service,
1229
+ hostname: "elsium-ai",
1230
+ message: `[${event.type}] ${JSON.stringify(event.data)}`,
1231
+ status: event.type === "security_violation" ? "error" : "info",
1232
+ timestamp: event.timestamp,
1233
+ audit: {
1234
+ id: event.id,
1235
+ sequenceId: event.sequenceId,
1236
+ type: event.type,
1237
+ actor: event.actor,
1238
+ traceId: event.traceId,
1239
+ data: event.data,
1240
+ hash: event.hash,
1241
+ previousHash: event.previousHash
1242
+ }
1243
+ };
1244
+ }
1245
+ function createDatadogSink(config) {
1246
+ const {
1247
+ apiKey,
1248
+ site = "datadoghq.com",
1249
+ service = "elsium-ai",
1250
+ source = "elsium-ai-audit",
1251
+ tags,
1252
+ timeoutMs = 1e4
1253
+ } = config;
1254
+ const endpoint = `https://http-intake.logs.${site}/api/v2/logs`;
1255
+ return {
1256
+ name: "datadog",
1257
+ async send(events) {
1258
+ const body = events.map((e) => toDatadogLog(e, service, source, tags));
1259
+ const controller = new AbortController;
1260
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
1261
+ try {
1262
+ const response = await fetch(endpoint, {
1263
+ method: "POST",
1264
+ headers: {
1265
+ "DD-API-KEY": apiKey,
1266
+ "Content-Type": "application/json"
1267
+ },
1268
+ body: JSON.stringify(body),
1269
+ signal: controller.signal
1270
+ });
1271
+ if (!response.ok) {
1272
+ throw new Error(`Datadog Log Intake responded with ${response.status} ${response.statusText}`);
1273
+ }
1274
+ } catch (error) {
1275
+ if (error instanceof DOMException && error.name === "AbortError") {
1276
+ throw new Error(`Datadog request timed out after ${timeoutMs}ms`);
1277
+ }
1278
+ throw error;
1279
+ } finally {
1280
+ clearTimeout(timeout);
1281
+ }
1282
+ }
1283
+ };
1284
+ }
961
1285
  // src/provenance.ts
962
1286
  import { createHash as createHash2 } from "node:crypto";
963
1287
  function sha256(input) {
@@ -1019,7 +1343,7 @@ function createProvenanceTracker(options) {
1019
1343
  import { createHash as createHash3 } from "node:crypto";
1020
1344
  import { existsSync, mkdirSync, readFileSync, writeFileSync as writeFileSync2 } from "node:fs";
1021
1345
  import { join } from "node:path";
1022
- var log4 = createLogger();
1346
+ var log5 = createLogger();
1023
1347
  function createFileExperimentStore(dir) {
1024
1348
  return {
1025
1349
  save(name, data) {
@@ -1030,7 +1354,7 @@ function createFileExperimentStore(dir) {
1030
1354
  const filePath = join(dir, `${name}.json`);
1031
1355
  writeFileSync2(filePath, JSON.stringify(data, null, 2));
1032
1356
  } catch (err2) {
1033
- log4.error("Failed to save experiment", {
1357
+ log5.error("Failed to save experiment", {
1034
1358
  name,
1035
1359
  error: err2 instanceof Error ? err2.message : String(err2)
1036
1360
  });
@@ -1061,7 +1385,7 @@ function loadFromStore(store, name, stats) {
1061
1385
  stats[vName].metrics[key] = { sum: m.sum, count: m.count };
1062
1386
  }
1063
1387
  }
1064
- log4.debug("Loaded experiment state", { name, totalAssignments: saved.totalAssignments });
1388
+ log5.debug("Loaded experiment state", { name, totalAssignments: saved.totalAssignments });
1065
1389
  }
1066
1390
  function recordMetrics(s, metrics) {
1067
1391
  for (const [key, value] of Object.entries(metrics)) {
@@ -1129,7 +1453,7 @@ function createExperiment(config) {
1129
1453
  const s = stats[variant.name];
1130
1454
  if (s)
1131
1455
  s.assignments++;
1132
- log4.debug("Experiment assignment", {
1456
+ log5.debug("Experiment assignment", {
1133
1457
  experiment: name,
1134
1458
  variant: variant.name,
1135
1459
  userId
@@ -1197,8 +1521,72 @@ function instrumentAgent(agent, tracer) {
1197
1521
  };
1198
1522
  return instrumented;
1199
1523
  }
1524
+ // src/studio-exporter.ts
1525
+ import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync3 } from "node:fs";
1526
+ import { join as join2 } from "node:path";
1527
+ var log6 = createLogger();
1528
+ function ensureDir(dirPath) {
1529
+ if (!existsSync2(dirPath)) {
1530
+ mkdirSync2(dirPath, { recursive: true });
1531
+ }
1532
+ }
1533
+ function safeWriteJSON(filePath, data) {
1534
+ try {
1535
+ writeFileSync3(filePath, JSON.stringify(data, null, 2));
1536
+ } catch (err2) {
1537
+ log6.error("Studio exporter write failed", {
1538
+ file: filePath,
1539
+ error: err2 instanceof Error ? err2.message : String(err2)
1540
+ });
1541
+ }
1542
+ }
1543
+ function safeReadJSON(filePath, fallback) {
1544
+ try {
1545
+ if (!existsSync2(filePath))
1546
+ return fallback;
1547
+ return JSON.parse(readFileSync2(filePath, "utf-8"));
1548
+ } catch {
1549
+ return fallback;
1550
+ }
1551
+ }
1552
+ function createStudioExporter(config) {
1553
+ const baseDir = config?.dir ?? ".elsium";
1554
+ const tracesDir = join2(baseDir, "traces");
1555
+ const xrayFile = join2(baseDir, "xray-history.json");
1556
+ const costFile = join2(baseDir, "cost-report.json");
1557
+ ensureDir(tracesDir);
1558
+ return {
1559
+ name: "studio",
1560
+ export(spans) {
1561
+ for (const span of spans) {
1562
+ if (!span.traceId)
1563
+ continue;
1564
+ const filePath = join2(tracesDir, `${span.traceId}.json`);
1565
+ safeWriteJSON(filePath, span);
1566
+ }
1567
+ },
1568
+ writeXRayEntry(entry) {
1569
+ const history = safeReadJSON(xrayFile, []);
1570
+ history.unshift(entry);
1571
+ if (history.length > 500)
1572
+ history.length = 500;
1573
+ safeWriteJSON(xrayFile, history);
1574
+ },
1575
+ writeCostReport(report) {
1576
+ safeWriteJSON(costFile, {
1577
+ totalRequests: report.callCount,
1578
+ totalTokens: report.totalTokens,
1579
+ totalCost: report.totalCost,
1580
+ byModel: Object.fromEntries(Object.entries(report.byModel).map(([model, data]) => [
1581
+ model,
1582
+ { requests: data.calls, tokens: data.tokens, cost: data.cost }
1583
+ ]))
1584
+ });
1585
+ }
1586
+ };
1587
+ }
1200
1588
  // src/otel.ts
1201
- var log5 = createLogger();
1589
+ var log7 = createLogger();
1202
1590
  var SPAN_KIND_MAP = {
1203
1591
  llm: 3,
1204
1592
  tool: 1,
@@ -1339,10 +1727,10 @@ function createOTLPExporter(config) {
1339
1727
  body: JSON.stringify(payload)
1340
1728
  });
1341
1729
  if (!response.ok) {
1342
- log5.error(`OTLP export failed: ${response.status} ${response.statusText}`);
1730
+ log7.error(`OTLP export failed: ${response.status} ${response.statusText}`);
1343
1731
  }
1344
1732
  } catch (err2) {
1345
- log5.error("OTLP export error", { error: err2 instanceof Error ? err2.message : String(err2) });
1733
+ log7.error("OTLP export error", { error: err2 instanceof Error ? err2.message : String(err2) });
1346
1734
  }
1347
1735
  }
1348
1736
  function startAutoFlush() {
@@ -1389,13 +1777,19 @@ export {
1389
1777
  instrumentAgent,
1390
1778
  injectTraceContext,
1391
1779
  extractTraceContext,
1780
+ createWebhookSink,
1781
+ createStudioExporter,
1782
+ createSplunkSink,
1392
1783
  createSpan,
1784
+ createSinkManager,
1393
1785
  createProvenanceTracker,
1394
1786
  createOTLPExporter,
1395
1787
  createMetrics,
1396
1788
  createFileExperimentStore,
1397
1789
  createExperiment,
1790
+ createDatadogSink,
1398
1791
  createCostEngine,
1399
1792
  createAuditTrail,
1793
+ auditStreamMiddleware,
1400
1794
  auditMiddleware
1401
1795
  };
@@ -0,0 +1,10 @@
1
+ import type { CostReport, TracerExporter } from './tracer';
2
+ export interface StudioExporterConfig {
3
+ dir?: string;
4
+ }
5
+ export interface StudioExporter extends TracerExporter {
6
+ writeXRayEntry(entry: Record<string, unknown>): void;
7
+ writeCostReport(report: CostReport): void;
8
+ }
9
+ export declare function createStudioExporter(config?: StudioExporterConfig): StudioExporter;
10
+ //# sourceMappingURL=studio-exporter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"studio-exporter.d.ts","sourceRoot":"","sources":["../src/studio-exporter.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,UAAU,CAAA;AAI1D,MAAM,WAAW,oBAAoB;IACpC,GAAG,CAAC,EAAE,MAAM,CAAA;CACZ;AAED,MAAM,WAAW,cAAe,SAAQ,cAAc;IACrD,cAAc,CAAC,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,IAAI,CAAA;IACpD,eAAe,CAAC,MAAM,EAAE,UAAU,GAAG,IAAI,CAAA;CACzC;AA4BD,wBAAgB,oBAAoB,CAAC,MAAM,CAAC,EAAE,oBAAoB,GAAG,cAAc,CAwClF"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elsium-ai/observe",
3
- "version": "0.7.0",
3
+ "version": "0.9.0",
4
4
  "description": "Observability, tracing, and cost tracking for ElsiumAI",
5
5
  "license": "MIT",
6
6
  "author": "Eric Utrera <ebutrera9103@gmail.com>",
@@ -26,7 +26,7 @@
26
26
  "dev": "bun --watch src/index.ts"
27
27
  },
28
28
  "dependencies": {
29
- "@elsium-ai/core": "^0.7.0"
29
+ "@elsium-ai/core": "^0.9.0"
30
30
  },
31
31
  "devDependencies": {
32
32
  "typescript": "^5.7.0"