@agentgov/sdk 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,408 @@
1
+ # @agentgov/sdk
2
+
3
+ Official SDK for AgentGov — AI Agent Governance Platform.
4
+
5
+ Automatically trace your AI agent operations with minimal code changes. Supports OpenAI, Vercel AI SDK, streaming, tool calls, and more.
6
+
7
+ ## Features
8
+
9
+ - **OpenAI Integration** — Automatic tracing with `wrapOpenAI()`
10
+ - **Vercel AI SDK** — Support for `generateText`, `streamText`, `generateObject`, `embed`
11
+ - **Streaming Support** — Full tracking of streaming responses
12
+ - **Tool Calls** — Automatic span creation for tool/function calls
13
+ - **Cost Estimation** — Built-in pricing for common models
14
+ - **Batching** — High-throughput mode with `queueTrace()` / `queueSpan()`
15
+ - **Context Management** — `withTrace()` / `withSpan()` helpers
16
+
17
+ ## Installation
18
+
19
+ ```bash
20
+ npm install @agentgov/sdk
21
+ # or
22
+ pnpm add @agentgov/sdk
23
+ ```
24
+
25
+ ## Authentication
26
+
27
+ The SDK uses API keys for authentication. Get your API key from the AgentGov dashboard:
28
+
29
+ 1. Go to **Settings → API Keys**
30
+ 2. Click **Create API Key**
31
+ 3. Copy the key (it's only shown once!)
32
+
33
+ API keys have the format `ag_live_xxxxxxxxxxxx` (production) or `ag_test_xxxxxxxxxxxx` (testing).
34
+
35
+ ```typescript
36
+ import { AgentGov } from "@agentgov/sdk";
37
+
38
+ const ag = new AgentGov({
39
+ apiKey: process.env.AGENTGOV_API_KEY!, // ag_live_xxx or ag_test_xxx
40
+ projectId: process.env.AGENTGOV_PROJECT_ID!,
41
+ });
42
+ ```
43
+
44
+ ### API Key Scopes
45
+
46
+ API keys can be scoped to:
47
+ - **All projects** — Access all projects in your organization
48
+ - **Specific project** — Access only the specified project
49
+
50
+ ### Error Handling for Auth
51
+
52
+ ```typescript
53
+ import { AgentGov, AgentGovAPIError } from "@agentgov/sdk";
54
+
55
+ try {
56
+ const trace = await ag.trace({ name: "My Trace" });
57
+ } catch (error) {
58
+ if (error instanceof AgentGovAPIError) {
59
+ if (error.statusCode === 401) {
60
+ console.error("Invalid API key");
61
+ } else if (error.statusCode === 403) {
62
+ console.error("Access denied - check API key permissions");
63
+ } else if (error.statusCode === 429) {
64
+ console.error("Rate limit exceeded");
65
+ }
66
+ }
67
+ }
68
+ ```
69
+
70
+ ## Quick Start
71
+
72
+ ### OpenAI Integration
73
+
74
+ ```typescript
75
+ import { AgentGov } from "@agentgov/sdk";
76
+ import OpenAI from "openai";
77
+
78
+ const ag = new AgentGov({
79
+ apiKey: process.env.AGENTGOV_API_KEY!,
80
+ projectId: process.env.AGENTGOV_PROJECT_ID!,
81
+ });
82
+
83
+ // Wrap your OpenAI client
84
+ const openai = ag.wrapOpenAI(new OpenAI());
85
+
86
+ // All calls are automatically traced - including streaming!
87
+ const response = await openai.chat.completions.create({
88
+ model: "gpt-4o",
89
+ messages: [{ role: "user", content: "Hello!" }],
90
+ });
91
+
92
+ // Streaming also works
93
+ const stream = await openai.chat.completions.create({
94
+ model: "gpt-4o",
95
+ messages: [{ role: "user", content: "Write a poem" }],
96
+ stream: true,
97
+ });
98
+
99
+ for await (const chunk of stream) {
100
+ process.stdout.write(chunk.choices[0]?.delta?.content || "");
101
+ }
102
+ ```
103
+
104
+ ### Vercel AI SDK Integration
105
+
106
+ ```typescript
107
+ import { AgentGov } from "@agentgov/sdk";
108
+ import { generateText, streamText } from "ai";
109
+ import { openai } from "@ai-sdk/openai";
110
+
111
+ const ag = new AgentGov({
112
+ apiKey: process.env.AGENTGOV_API_KEY!,
113
+ projectId: process.env.AGENTGOV_PROJECT_ID!,
114
+ });
115
+
116
+ // Wrap Vercel AI SDK functions
117
+ const tracedGenerateText = ag.wrapGenerateText(generateText);
118
+ const tracedStreamText = ag.wrapStreamText(streamText);
119
+
120
+ // Use them like normal
121
+ const { text } = await tracedGenerateText({
122
+ model: openai("gpt-4o"),
123
+ prompt: "Hello!",
124
+ });
125
+
126
+ // Streaming
127
+ const { textStream } = await tracedStreamText({
128
+ model: openai("gpt-4o"),
129
+ prompt: "Write a story",
130
+ });
131
+
132
+ for await (const chunk of textStream) {
133
+ process.stdout.write(chunk);
134
+ }
135
+ ```
136
+
137
+ ### Manual Tracing
138
+
139
+ ```typescript
140
+ import { AgentGov } from "@agentgov/sdk";
141
+
142
+ const ag = new AgentGov({
143
+ apiKey: "ag_xxx",
144
+ projectId: "your-project-id",
145
+ });
146
+
147
+ // Using withTrace helper (recommended)
148
+ const result = await ag.withTrace({ name: "My Agent Pipeline" }, async () => {
149
+ // Nested spans
150
+ const docs = await ag.withSpan(
151
+ { name: "Retrieve Documents", type: "RETRIEVAL" },
152
+ async () => {
153
+ return ["doc1", "doc2"];
154
+ }
155
+ );
156
+
157
+ const response = await ag.withSpan(
158
+ { name: "Generate Response", type: "LLM_CALL", model: "gpt-4o" },
159
+ async (span) => {
160
+ // Update span with metrics
161
+ await ag.endSpan(span.id, {
162
+ promptTokens: 150,
163
+ outputTokens: 50,
164
+ cost: 0.01,
165
+ });
166
+ return { content: "Hello!" };
167
+ }
168
+ );
169
+
170
+ return response;
171
+ });
172
+ ```
173
+
174
+ ### High-Throughput Batching
175
+
176
+ ```typescript
177
+ const ag = new AgentGov({
178
+ apiKey: "ag_xxx",
179
+ projectId: "xxx",
180
+ batchSize: 10, // Flush after 10 items
181
+ flushInterval: 5000, // Or after 5 seconds
182
+ });
183
+
184
+ // Queue items (don't await immediately)
185
+ const tracePromise = ag.queueTrace({ name: "Batch Trace" });
186
+ const spanPromise = ag.queueSpan({
187
+ traceId: "...",
188
+ name: "Batch Span",
189
+ type: "CUSTOM",
190
+ });
191
+
192
+ // Force flush when needed
193
+ await ag.flush();
194
+
195
+ // Or shutdown gracefully
196
+ await ag.shutdown();
197
+ ```
198
+
199
+ ## Configuration
200
+
201
+ ```typescript
202
+ interface AgentGovConfig {
203
+ /** API key from AgentGov dashboard (ag_xxx) */
204
+ apiKey: string;
205
+
206
+ /** Project ID */
207
+ projectId: string;
208
+
209
+ /** API base URL (default: https://api.agentgov.co) */
210
+ baseUrl?: string;
211
+
212
+ /** Enable debug logging */
213
+ debug?: boolean;
214
+
215
+ /** Flush interval in ms (default: 5000) */
216
+ flushInterval?: number;
217
+
218
+ /** Max batch size before auto-flush (default: 10) */
219
+ batchSize?: number;
220
+
221
+ /** Max retry attempts for failed API requests (default: 3) */
222
+ maxRetries?: number;
223
+
224
+ /** Base delay in ms for exponential backoff (default: 1000) */
225
+ retryDelay?: number;
226
+
227
+ /** Request timeout in ms (default: 30000) */
228
+ timeout?: number;
229
+
230
+ /** Callback for batch flush errors (optional) */
231
+ onError?: (error: Error, context: { operation: string; itemCount?: number }) => void;
232
+ }
233
+ ```
234
+
235
+ ### Error Callback
236
+
237
+ Handle batch flush errors with the `onError` callback:
238
+
239
+ ```typescript
240
+ const ag = new AgentGov({
241
+ apiKey: "ag_xxx",
242
+ projectId: "xxx",
243
+ onError: (error, context) => {
244
+ console.error(`[AgentGov] ${context.operation} failed:`, error.message);
245
+ // Send to your error tracking service
246
+ Sentry.captureException(error, { extra: context });
247
+ },
248
+ });
249
+ ```
250
+
251
+ By default, errors during batch flush are:
252
+ - Logged to console in `debug` mode
253
+ - Silently dropped in production (to not affect your app)
254
+
255
+ ## Error Handling
256
+
257
+ The SDK includes built-in retry logic with exponential backoff:
258
+
259
+ ```typescript
260
+ import { AgentGov, AgentGovAPIError } from "@agentgov/sdk";
261
+
262
+ const ag = new AgentGov({
263
+ apiKey: "ag_xxx",
264
+ projectId: "xxx",
265
+ maxRetries: 3, // Retry up to 3 times
266
+ retryDelay: 1000, // Start with 1s delay
267
+ timeout: 30000, // 30s request timeout
268
+ });
269
+
270
+ try {
271
+ const trace = await ag.trace({ name: "My Trace" });
272
+ } catch (error) {
273
+ if (error instanceof AgentGovAPIError) {
274
+ console.log(`Status: ${error.statusCode}`);
275
+ console.log(`Retryable: ${error.retryable}`);
276
+ }
277
+ }
278
+ ```
279
+
280
+ **Automatic retries for:**
281
+
282
+ - `429` - Rate limited (respects `Retry-After` header)
283
+ - `408` - Request timeout
284
+ - `5xx` - Server errors
285
+
286
+ **No retries for:**
287
+
288
+ - `400` - Bad request
289
+ - `401` - Unauthorized
290
+ - `403` - Forbidden
291
+ - `404` - Not found
292
+
293
+ ## Wrapper Options
294
+
295
+ ### OpenAI Options
296
+
297
+ ```typescript
298
+ const openai = ag.wrapOpenAI(new OpenAI(), {
299
+ traceNamePrefix: "my-agent", // Custom trace name prefix
300
+ autoTrace: true, // Auto-create trace for each call
301
+ captureInput: true, // Include prompts in trace
302
+ captureOutput: true, // Include responses in trace
303
+ traceToolCalls: true, // Create spans for tool calls
304
+ });
305
+ ```
306
+
307
+ ### Vercel AI Options
308
+
309
+ ```typescript
310
+ const tracedFn = ag.wrapGenerateText(generateText, {
311
+ traceNamePrefix: "vercel-ai",
312
+ autoTrace: true,
313
+ captureInput: true,
314
+ captureOutput: true,
315
+ traceToolCalls: true,
316
+ });
317
+ ```
318
+
319
+ ## Span Types
320
+
321
+ | Type | Description |
322
+ | ------------ | ------------------------------------- |
323
+ | `LLM_CALL` | Call to LLM (OpenAI, Anthropic, etc.) |
324
+ | `TOOL_CALL` | Tool/function execution |
325
+ | `AGENT_STEP` | High-level agent step |
326
+ | `RETRIEVAL` | RAG retrieval |
327
+ | `EMBEDDING` | Embedding generation |
328
+ | `CUSTOM` | Custom span type |
329
+
330
+ ## Cost Estimation
331
+
332
+ Built-in pricing for common models:
333
+
334
+ ```typescript
335
+ import { estimateCost } from "@agentgov/sdk";
336
+
337
+ const cost = estimateCost("gpt-4o", 1000, 500);
338
+ // Returns: 0.0075 (USD)
339
+ ```
340
+
341
+ **Supported models (January 2026):**
342
+
343
+ - OpenAI GPT-5: gpt-5.2, gpt-5.2-pro, gpt-5
344
+ - OpenAI GPT-4: gpt-4.1, gpt-4.1-mini, gpt-4o, gpt-4o-mini
345
+ - OpenAI o-Series: o4-mini, o3-pro, o3, o3-mini, o1, o1-mini
346
+ - OpenAI Legacy: gpt-4-turbo, gpt-4, gpt-3.5-turbo
347
+ - Anthropic: claude-sonnet-4, claude-3.5-sonnet, claude-3.5-haiku, claude-3-opus, claude-3-sonnet, claude-3-haiku
348
+ - Embeddings: text-embedding-3-small, text-embedding-3-large, text-embedding-ada-002
349
+
350
+ ## API Reference
351
+
352
+ ### AgentGov Class
353
+
354
+ | Method | Description |
355
+ | ------------------------- | ---------------------------------------- |
356
+ | `wrapOpenAI(client)` | Wrap OpenAI client for auto-tracing |
357
+ | `wrapGenerateText(fn)` | Wrap Vercel AI generateText |
358
+ | `wrapStreamText(fn)` | Wrap Vercel AI streamText |
359
+ | `wrapGenerateObject(fn)` | Wrap Vercel AI generateObject |
360
+ | `wrapEmbed(fn)` | Wrap Vercel AI embed |
361
+ | `wrapEmbedMany(fn)` | Wrap Vercel AI embedMany |
362
+ | `trace(input)` | Create a new trace |
363
+ | `endTrace(id, update)` | End a trace |
364
+ | `span(input)` | Create a span |
365
+ | `endSpan(id, update)` | End a span |
366
+ | `withTrace(input, fn)` | Execute function within trace context |
367
+ | `withSpan(input, fn)` | Execute function within span context |
368
+ | `queueTrace(input)` | Queue trace creation (batched) |
369
+ | `queueSpan(input)` | Queue span creation (batched) |
370
+ | `flush()` | Force flush queued items |
371
+ | `shutdown()` | Flush and cleanup |
372
+ | `getContext()` | Get current trace context |
373
+ | `setContext(ctx)` | Set trace context (distributed tracing) |
374
+ | `getTrace(id)` | Fetch trace by ID |
375
+ | `getSpan(id)` | Fetch span by ID |
376
+
377
+ ## TypeScript
378
+
379
+ Full TypeScript support:
380
+
381
+ ```typescript
382
+ import type {
383
+ AgentGovConfig,
384
+ Trace,
385
+ Span,
386
+ SpanType,
387
+ TraceContext,
388
+ WrapOpenAIOptions,
389
+ WrapVercelAIOptions,
390
+ } from "@agentgov/sdk";
391
+ ```
392
+
393
+ ## Examples
394
+
395
+ See the [examples](../../examples) directory:
396
+
397
+ - `openai-example.ts` — Basic OpenAI integration
398
+ - `streaming-example.ts` — Streaming responses
399
+ - `vercel-ai-example.ts` — Vercel AI SDK integration
400
+ - `manual-tracing.ts` — Manual span creation
401
+
402
+ ## Documentation
403
+
404
+ [docs.agentgov.co](https://docs.agentgov.co)
405
+
406
+ ## License
407
+
408
+ MIT
@@ -0,0 +1,164 @@
1
+ import type { AgentGovConfig, TraceContext, TraceInput, Trace, SpanInput, SpanUpdate, Span } from './types.js';
2
+ import { type WrapOpenAIOptions, type OpenAIClient } from './wrappers/openai.js';
3
+ import { type WrapVercelAIOptions, type GenerateTextResult, type StreamTextResult, type GenerateObjectResult, type EmbedResult, type EmbedManyResult } from './wrappers/vercel-ai.js';
4
+ export declare class AgentGov {
5
+ private fetchClient;
6
+ private config;
7
+ private currentContext;
8
+ private batchQueue;
9
+ private batchTimer;
10
+ private isFlushing;
11
+ constructor(config: AgentGovConfig);
12
+ /**
13
+ * Wrap OpenAI client to automatically trace all calls
14
+ *
15
+ * @example
16
+ * ```typescript
17
+ * const ag = new AgentGov({ apiKey: 'ag_xxx', projectId: 'xxx' })
18
+ * const openai = ag.wrapOpenAI(new OpenAI())
19
+ *
20
+ * // All calls are now automatically traced (including streaming)
21
+ * const response = await openai.chat.completions.create({
22
+ * model: 'gpt-4o',
23
+ * messages: [{ role: 'user', content: 'Hello!' }]
24
+ * })
25
+ * ```
26
+ */
27
+ wrapOpenAI<T extends OpenAIClient>(client: T, options?: WrapOpenAIOptions): T;
28
+ /**
29
+ * Wrap Vercel AI SDK's generateText function
30
+ *
31
+ * @example
32
+ * ```typescript
33
+ * import { generateText } from 'ai'
34
+ * import { openai } from '@ai-sdk/openai'
35
+ *
36
+ * const ag = new AgentGov({ apiKey: 'ag_xxx', projectId: 'xxx' })
37
+ * const tracedGenerateText = ag.wrapGenerateText(generateText)
38
+ *
39
+ * const { text } = await tracedGenerateText({
40
+ * model: openai('gpt-4o'),
41
+ * prompt: 'Hello!'
42
+ * })
43
+ * ```
44
+ */
45
+ wrapGenerateText<TArgs extends Record<string, unknown>, TResult extends GenerateTextResult>(fn: (args: TArgs) => Promise<TResult>, options?: WrapVercelAIOptions): (args: TArgs) => Promise<TResult>;
46
+ /**
47
+ * Wrap Vercel AI SDK's streamText function
48
+ *
49
+ * @example
50
+ * ```typescript
51
+ * import { streamText } from 'ai'
52
+ * import { openai } from '@ai-sdk/openai'
53
+ *
54
+ * const ag = new AgentGov({ apiKey: 'ag_xxx', projectId: 'xxx' })
55
+ * const tracedStreamText = ag.wrapStreamText(streamText)
56
+ *
57
+ * const { textStream } = await tracedStreamText({
58
+ * model: openai('gpt-4o'),
59
+ * prompt: 'Hello!'
60
+ * })
61
+ *
62
+ * for await (const chunk of textStream) {
63
+ * process.stdout.write(chunk)
64
+ * }
65
+ * ```
66
+ */
67
+ wrapStreamText<TArgs extends Record<string, unknown>, TResult extends StreamTextResult>(fn: (args: TArgs) => Promise<TResult>, options?: WrapVercelAIOptions): (args: TArgs) => Promise<TResult>;
68
+ /**
69
+ * Wrap Vercel AI SDK's generateObject function
70
+ */
71
+ wrapGenerateObject<TArgs extends Record<string, unknown>, TResult extends GenerateObjectResult>(fn: (args: TArgs) => Promise<TResult>, options?: WrapVercelAIOptions): (args: TArgs) => Promise<TResult>;
72
+ /**
73
+ * Wrap Vercel AI SDK's embed function
74
+ */
75
+ wrapEmbed<TArgs extends Record<string, unknown>, TResult extends EmbedResult>(fn: (args: TArgs) => Promise<TResult>, options?: WrapVercelAIOptions): (args: TArgs) => Promise<TResult>;
76
+ /**
77
+ * Wrap Vercel AI SDK's embedMany function
78
+ */
79
+ wrapEmbedMany<TArgs extends Record<string, unknown>, TResult extends EmbedManyResult>(fn: (args: TArgs) => Promise<TResult>, options?: WrapVercelAIOptions): (args: TArgs) => Promise<TResult>;
80
+ /**
81
+ * Create a new trace
82
+ *
83
+ * @example
84
+ * ```typescript
85
+ * const trace = await ag.trace({ name: 'My Agent Run' })
86
+ * // ... do work ...
87
+ * await ag.endTrace(trace.id, { status: 'COMPLETED' })
88
+ * ```
89
+ */
90
+ trace(input?: TraceInput): Promise<Trace>;
91
+ /**
92
+ * End a trace
93
+ */
94
+ endTrace(traceId: string, update?: {
95
+ status?: 'COMPLETED' | 'FAILED';
96
+ output?: Record<string, unknown>;
97
+ }): Promise<Trace>;
98
+ /**
99
+ * Create a span within current trace
100
+ */
101
+ span(input: Omit<SpanInput, 'traceId'> & {
102
+ traceId?: string;
103
+ }): Promise<Span>;
104
+ /**
105
+ * End a span
106
+ */
107
+ endSpan(spanId: string, update?: SpanUpdate): Promise<Span>;
108
+ /**
109
+ * Run a function within a trace context
110
+ *
111
+ * @example
112
+ * ```typescript
113
+ * const result = await ag.withTrace({ name: 'My Operation' }, async (ctx) => {
114
+ * // All OpenAI calls within this function are traced
115
+ * const response = await openai.chat.completions.create(...)
116
+ * return response
117
+ * })
118
+ * ```
119
+ */
120
+ withTrace<T>(input: TraceInput, fn: (context: TraceContext) => Promise<T>): Promise<T>;
121
+ /**
122
+ * Run a function within a span context
123
+ */
124
+ withSpan<T>(input: Omit<SpanInput, 'traceId'> & {
125
+ traceId?: string;
126
+ }, fn: (span: Span) => Promise<T>): Promise<T>;
127
+ /**
128
+ * Queue a trace creation (batched)
129
+ * Use this for high-throughput scenarios where you don't need immediate response
130
+ */
131
+ queueTrace(input: TraceInput): Promise<Trace>;
132
+ /**
133
+ * Queue a span creation (batched)
134
+ */
135
+ queueSpan(input: SpanInput): Promise<Span>;
136
+ /**
137
+ * Force flush all queued items
138
+ */
139
+ flush(): Promise<void>;
140
+ private handleFlushError;
141
+ private scheduleBatchFlush;
142
+ private processBatch;
143
+ /**
144
+ * Get current trace context
145
+ */
146
+ getContext(): TraceContext | null;
147
+ /**
148
+ * Set trace context (useful for distributed tracing)
149
+ */
150
+ setContext(context: TraceContext | null): void;
151
+ /**
152
+ * Get a trace by ID
153
+ */
154
+ getTrace(traceId: string): Promise<Trace>;
155
+ /**
156
+ * Get a span by ID
157
+ */
158
+ getSpan(spanId: string): Promise<Span>;
159
+ /**
160
+ * Shutdown the client, flushing any remaining batched items
161
+ */
162
+ shutdown(): Promise<void>;
163
+ }
164
+ //# sourceMappingURL=client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,cAAc,EACd,YAAY,EACZ,UAAU,EACV,KAAK,EACL,SAAS,EACT,UAAU,EACV,IAAI,EACL,MAAM,YAAY,CAAA;AAEnB,OAAO,EAAc,KAAK,iBAAiB,EAAE,KAAK,YAAY,EAAE,MAAM,sBAAsB,CAAA;AAC5F,OAAO,EAML,KAAK,mBAAmB,EACxB,KAAK,kBAAkB,EACvB,KAAK,gBAAgB,EACrB,KAAK,oBAAoB,EACzB,KAAK,WAAW,EAChB,KAAK,eAAe,EACrB,MAAM,yBAAyB,CAAA;AAehC,qBAAa,QAAQ;IACnB,OAAO,CAAC,WAAW,CAAa;IAChC,OAAO,CAAC,MAAM,CAAgB;IAC9B,OAAO,CAAC,cAAc,CAA4B;IAGlD,OAAO,CAAC,UAAU,CAAkB;IACpC,OAAO,CAAC,UAAU,CAA6C;IAC/D,OAAO,CAAC,UAAU,CAAQ;gBAEd,MAAM,EAAE,cAAc;IAoClC;;;;;;;;;;;;;;OAcG;IACH,UAAU,CAAC,CAAC,SAAS,YAAY,EAAE,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,EAAE,iBAAiB,GAAG,CAAC;IAW7E;;;;;;;;;;;;;;;;OAgBG;IACH,gBAAgB,CACd,KAAK,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,OAAO,SAAS,kBAAkB,EAElC,EAAE,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,EACrC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC;IAOpC;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,cAAc,CACZ,KAAK,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,OAAO,SAAS,gBAAgB,EAEhC,EAAE,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,EACrC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC;IAOpC;;OAEG;IACH,kBAAkB,CAChB,KAAK,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,OAAO,SAAS,oBAAoB,EAEpC,EAAE,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,EACrC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC;IAOpC;;OAEG;IACH,SAAS,CACP,KAAK,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,OAAO,SAAS,WAAW,EAE3B,EAAE,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,EACrC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC;IAOpC;;OAEG;IACH,aAAa,CACX,KAAK,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,OAAO,SAAS,eAAe,EAE/B,EAAE,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,EACrC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,CAAC,IAAI,EAAE,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC;IAWpC;;;;;;;;;OASG;IACG,KAAK,CAAC,KAAK,GAAE,UAAe,GAAG,OAAO,CAAC,KAAK,CAAC;IAMnD;;OAEG;IACG,QAAQ,CACZ,OAAO,EAAE,MAAM,EACf,MAAM,GAAE;QAAE,MAAM,CAAC,EAAE,WAAW,GAAG,QAAQ,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;KAAO,GACjF,OAAO,CAAC,KAAK,CAAC;IAajB;;OAEG;IACG,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,GAAG;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAgBnF;;OAEG;IACG,OAAO,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,GAAE,UAAe,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrE;;;;;;;;;;;OAWG;IACG,SAAS,CAAC,CAAC,EACf,KAAK,EAAE,UAAU,EACjB,EAAE,EAAE,CAAC,OAAO,EAAE,YAAY,KAAK,OAAO,CAAC,CAAC,CAAC,GACxC,OAAO,CAAC,CAAC,CAAC;IAab;;OAEG;IACG,QAAQ,CAAC,CAAC,EACd,KAAK,EAAE,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,GAAG;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,EACxD,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,GAC7B,OAAO,CAAC,CAAC,CAAC;IA6Bb;;;OAGG;IACH,UAAU,CAAC,KAAK,EAAE,UAAU,GAAG,OAAO,CAAC,KAAK,CAAC;IAY7C;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IAY1C;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B,OAAO,CAAC,gBAAgB;IAYxB,OAAO,CAAC,kBAAkB;YAkBZ,YAAY;IAkD1B;;OAEG;IACH,UAAU,IAAI,YAAY,GAAG,IAAI;IAIjC;;OAEG;IACH,UAAU,CAAC,OAAO,EAAE,YAAY,GAAG,IAAI,GAAG,IAAI;IAI9C;;OAEG;IACG,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC;IAI/C;;OAEG;IACG,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAI5C;;OAEG;IACG,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAGhC"}