@superblocksteam/vite-plugin-file-sync 2.0.42-next.41 → 2.0.42-next.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (179) hide show
  1. package/dist/ai-service/agent/apis.d.ts.map +1 -1
  2. package/dist/ai-service/agent/apis.js +34 -26
  3. package/dist/ai-service/agent/apis.js.map +1 -1
  4. package/dist/ai-service/index.d.ts +1 -0
  5. package/dist/ai-service/index.d.ts.map +1 -1
  6. package/dist/ai-service/index.js +7 -0
  7. package/dist/ai-service/index.js.map +1 -1
  8. package/dist/ai-service/llm/context/caching/constants.d.ts +27 -0
  9. package/dist/ai-service/llm/context/caching/constants.d.ts.map +1 -0
  10. package/dist/ai-service/llm/context/caching/constants.js +20 -0
  11. package/dist/ai-service/llm/context/caching/constants.js.map +1 -0
  12. package/dist/ai-service/llm/context/caching/factory.d.ts +12 -0
  13. package/dist/ai-service/llm/context/caching/factory.d.ts.map +1 -0
  14. package/dist/ai-service/llm/context/caching/factory.js +28 -0
  15. package/dist/ai-service/llm/context/caching/factory.js.map +1 -0
  16. package/dist/ai-service/llm/context/caching/index.d.ts +10 -0
  17. package/dist/ai-service/llm/context/caching/index.d.ts.map +1 -0
  18. package/dist/ai-service/llm/context/caching/index.js +9 -0
  19. package/dist/ai-service/llm/context/caching/index.js.map +1 -0
  20. package/dist/ai-service/llm/context/caching/quantized-strategy.d.ts +22 -0
  21. package/dist/ai-service/llm/context/caching/quantized-strategy.d.ts.map +1 -0
  22. package/dist/ai-service/llm/context/caching/quantized-strategy.js +48 -0
  23. package/dist/ai-service/llm/context/caching/quantized-strategy.js.map +1 -0
  24. package/dist/ai-service/llm/context/caching/strategy.d.ts +20 -0
  25. package/dist/ai-service/llm/context/caching/strategy.d.ts.map +1 -0
  26. package/dist/ai-service/llm/context/caching/strategy.js +5 -0
  27. package/dist/ai-service/llm/context/caching/strategy.js.map +1 -0
  28. package/dist/ai-service/llm/context/caching/types.d.ts +34 -0
  29. package/dist/ai-service/llm/context/caching/types.d.ts.map +1 -0
  30. package/dist/ai-service/llm/context/caching/types.js +8 -0
  31. package/dist/ai-service/llm/context/caching/types.js.map +1 -0
  32. package/dist/ai-service/llm/context/config.d.ts +51 -0
  33. package/dist/ai-service/llm/context/config.d.ts.map +1 -0
  34. package/dist/ai-service/llm/context/config.js +58 -0
  35. package/dist/ai-service/llm/context/config.js.map +1 -0
  36. package/dist/ai-service/llm/context/constants.d.ts +106 -0
  37. package/dist/ai-service/llm/context/constants.d.ts.map +1 -0
  38. package/dist/ai-service/llm/context/constants.js +106 -0
  39. package/dist/ai-service/llm/context/constants.js.map +1 -0
  40. package/dist/ai-service/llm/context/context.d.ts +207 -0
  41. package/dist/ai-service/llm/context/context.d.ts.map +1 -0
  42. package/dist/ai-service/llm/context/context.js +742 -0
  43. package/dist/ai-service/llm/context/context.js.map +1 -0
  44. package/dist/ai-service/llm/context/errors.d.ts +45 -0
  45. package/dist/ai-service/llm/context/errors.d.ts.map +1 -0
  46. package/dist/ai-service/llm/context/errors.js +56 -0
  47. package/dist/ai-service/llm/context/errors.js.map +1 -0
  48. package/dist/ai-service/llm/context/index.d.ts +13 -0
  49. package/dist/ai-service/llm/context/index.d.ts.map +1 -0
  50. package/dist/ai-service/llm/context/index.js +17 -0
  51. package/dist/ai-service/llm/context/index.js.map +1 -0
  52. package/dist/ai-service/llm/context/internal-types.d.ts +52 -0
  53. package/dist/ai-service/llm/context/internal-types.d.ts.map +1 -0
  54. package/dist/ai-service/llm/context/internal-types.js +28 -0
  55. package/dist/ai-service/llm/context/internal-types.js.map +1 -0
  56. package/dist/ai-service/llm/context/levels/index.d.ts +12 -0
  57. package/dist/ai-service/llm/context/levels/index.d.ts.map +1 -0
  58. package/dist/ai-service/llm/context/levels/index.js +12 -0
  59. package/dist/ai-service/llm/context/levels/index.js.map +1 -0
  60. package/dist/ai-service/llm/context/levels/l1.d.ts +83 -0
  61. package/dist/ai-service/llm/context/levels/l1.d.ts.map +1 -0
  62. package/dist/ai-service/llm/context/levels/l1.js +143 -0
  63. package/dist/ai-service/llm/context/levels/l1.js.map +1 -0
  64. package/dist/ai-service/llm/context/levels/l2.d.ts +60 -0
  65. package/dist/ai-service/llm/context/levels/l2.d.ts.map +1 -0
  66. package/dist/ai-service/llm/context/levels/l2.js +96 -0
  67. package/dist/ai-service/llm/context/levels/l2.js.map +1 -0
  68. package/dist/ai-service/llm/context/levels/l3.d.ts +46 -0
  69. package/dist/ai-service/llm/context/levels/l3.d.ts.map +1 -0
  70. package/dist/ai-service/llm/context/levels/l3.js +70 -0
  71. package/dist/ai-service/llm/context/levels/l3.js.map +1 -0
  72. package/dist/ai-service/llm/context/logger.d.ts +17 -0
  73. package/dist/ai-service/llm/context/logger.d.ts.map +1 -0
  74. package/dist/ai-service/llm/context/logger.js +26 -0
  75. package/dist/ai-service/llm/context/logger.js.map +1 -0
  76. package/dist/ai-service/llm/context/manager.d.ts +79 -0
  77. package/dist/ai-service/llm/context/manager.d.ts.map +1 -0
  78. package/dist/ai-service/llm/context/manager.js +136 -0
  79. package/dist/ai-service/llm/context/manager.js.map +1 -0
  80. package/dist/ai-service/llm/context/options.d.ts +43 -0
  81. package/dist/ai-service/llm/context/options.d.ts.map +1 -0
  82. package/dist/ai-service/llm/context/options.js +81 -0
  83. package/dist/ai-service/llm/context/options.js.map +1 -0
  84. package/dist/ai-service/llm/context/serialization.d.ts +56 -0
  85. package/dist/ai-service/llm/context/serialization.d.ts.map +1 -0
  86. package/dist/ai-service/llm/context/serialization.js +9 -0
  87. package/dist/ai-service/llm/context/serialization.js.map +1 -0
  88. package/dist/ai-service/llm/context/storage/index.d.ts +10 -0
  89. package/dist/ai-service/llm/context/storage/index.d.ts.map +1 -0
  90. package/dist/ai-service/llm/context/storage/index.js +2 -0
  91. package/dist/ai-service/llm/context/storage/index.js.map +1 -0
  92. package/dist/ai-service/llm/context/storage/local.d.ts +22 -0
  93. package/dist/ai-service/llm/context/storage/local.d.ts.map +1 -0
  94. package/dist/ai-service/llm/context/storage/local.js +42 -0
  95. package/dist/ai-service/llm/context/storage/local.js.map +1 -0
  96. package/dist/ai-service/llm/context/types.d.ts +125 -0
  97. package/dist/ai-service/llm/context/types.d.ts.map +1 -0
  98. package/dist/ai-service/llm/context/types.js +5 -0
  99. package/dist/ai-service/llm/context/types.js.map +1 -0
  100. package/dist/ai-service/llm/context/utils/content-compaction.d.ts +26 -0
  101. package/dist/ai-service/llm/context/utils/content-compaction.d.ts.map +1 -0
  102. package/dist/ai-service/llm/context/utils/content-compaction.js +52 -0
  103. package/dist/ai-service/llm/context/utils/content-compaction.js.map +1 -0
  104. package/dist/ai-service/llm/context/utils/index.d.ts +10 -0
  105. package/dist/ai-service/llm/context/utils/index.d.ts.map +1 -0
  106. package/dist/ai-service/llm/context/utils/index.js +10 -0
  107. package/dist/ai-service/llm/context/utils/index.js.map +1 -0
  108. package/dist/ai-service/llm/context/utils/message-utils.d.ts +34 -0
  109. package/dist/ai-service/llm/context/utils/message-utils.d.ts.map +1 -0
  110. package/dist/ai-service/llm/context/utils/message-utils.js +68 -0
  111. package/dist/ai-service/llm/context/utils/message-utils.js.map +1 -0
  112. package/dist/ai-service/llm/context/utils/token-estimation.d.ts +31 -0
  113. package/dist/ai-service/llm/context/utils/token-estimation.d.ts.map +1 -0
  114. package/dist/ai-service/llm/context/utils/token-estimation.js +52 -0
  115. package/dist/ai-service/llm/context/utils/token-estimation.js.map +1 -0
  116. package/dist/ai-service/llm/context/utils/visualization.d.ts +40 -0
  117. package/dist/ai-service/llm/context/utils/visualization.d.ts.map +1 -0
  118. package/dist/ai-service/llm/context/utils/visualization.js +516 -0
  119. package/dist/ai-service/llm/context/utils/visualization.js.map +1 -0
  120. package/dist/ai-service/llmobs/helpers.d.ts.map +1 -1
  121. package/dist/ai-service/llmobs/helpers.js +10 -19
  122. package/dist/ai-service/llmobs/helpers.js.map +1 -1
  123. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ButtonPropsDocs.js +1 -1
  124. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/CheckboxPropsDocs.js +1 -1
  125. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ColumnPropsDocs.js +1 -1
  126. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ContainerPropsDocs.js +1 -1
  127. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/DatePickerPropsDocs.js +1 -1
  128. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/DropdownPropsDocs.js +1 -1
  129. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/IconPropsDocs.js +1 -1
  130. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ImagePropsDocs.js +1 -1
  131. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/InputPropsDocs.js +1 -1
  132. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ModalPropsDocs.js +1 -1
  133. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/PagePropsDocs.js +1 -1
  134. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/SectionPropsDocs.js +1 -1
  135. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/SlideoutPropsDocs.js +1 -1
  136. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/SwitchPropsDocs.js +1 -1
  137. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/TablePropsDocs.js +1 -1
  138. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/TextPropsDocs.js +1 -1
  139. package/dist/ai-service/prompt-builder-service/static-fragments/library-typedefs/Dim.js +1 -1
  140. package/dist/ai-service/prompt-builder-service/static-fragments/library-typedefs/EventFlow.js +1 -1
  141. package/dist/ai-service/prompt-builder-service/static-fragments/library-typedefs/TextStyleWithVariant.js +1 -1
  142. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/full-examples.js +1 -1
  143. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-api.js +1 -1
  144. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-components-rules.js +1 -1
  145. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-custom-components.js +1 -1
  146. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-data-filtering.js +1 -1
  147. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-event-flow.js +1 -1
  148. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-forms.js +1 -1
  149. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-layouts.js +1 -1
  150. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-page.js +1 -1
  151. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-rbac.js +1 -1
  152. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-routes.js +1 -1
  153. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-state.js +1 -1
  154. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-theming-chakra-new.js +1 -1
  155. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/system-base.js +1 -1
  156. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/system-incremental.js +1 -1
  157. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/system-specific-edit.js +1 -1
  158. package/dist/ai-service/state-machine/clark-fsm.d.ts +2 -0
  159. package/dist/ai-service/state-machine/clark-fsm.d.ts.map +1 -1
  160. package/dist/ai-service/state-machine/clark-fsm.js.map +1 -1
  161. package/dist/ai-service/state-machine/handlers/llm-generating.d.ts.map +1 -1
  162. package/dist/ai-service/state-machine/handlers/llm-generating.js +50 -71
  163. package/dist/ai-service/state-machine/handlers/llm-generating.js.map +1 -1
  164. package/dist/ai-service/state-machine/helpers/context-id.d.ts +4 -0
  165. package/dist/ai-service/state-machine/helpers/context-id.d.ts.map +1 -0
  166. package/dist/ai-service/state-machine/helpers/context-id.js +16 -0
  167. package/dist/ai-service/state-machine/helpers/context-id.js.map +1 -0
  168. package/dist/ai-service/util/parse-jwt.d.ts +12 -0
  169. package/dist/ai-service/util/parse-jwt.d.ts.map +1 -0
  170. package/dist/ai-service/util/parse-jwt.js +30 -0
  171. package/dist/ai-service/util/parse-jwt.js.map +1 -0
  172. package/dist/ai-service/util/safe-stringify.d.ts +1 -1
  173. package/dist/ai-service/util/safe-stringify.d.ts.map +1 -1
  174. package/dist/ai-service/util/safe-stringify.js +3 -3
  175. package/dist/ai-service/util/safe-stringify.js.map +1 -1
  176. package/dist/component-docs-service/index.d.ts.map +1 -1
  177. package/dist/component-docs-service/index.js +0 -2
  178. package/dist/component-docs-service/index.js.map +1 -1
  179. package/package.json +7 -7
@@ -0,0 +1,34 @@
1
+ /**
2
+ * Type definitions for cache strategy system.
3
+ */
4
+ import type { TokenizedMessage } from "../internal-types.js";
5
+ import type { ResolvedCachingOptions } from "../options.js";
6
+ /**
7
+ * Token counts for cache calculation.
8
+ */
9
+ export interface TokenCounts {
10
+ /** Total tokens across all context */
11
+ total: number;
12
+ /** System prompt tokens */
13
+ system: number;
14
+ }
15
+ /**
16
+ * Input snapshot for cache breakpoint calculation.
17
+ */
18
+ export interface CacheInput {
19
+ /** All messages including system prompt */
20
+ messages: TokenizedMessage[];
21
+ /** Token counts */
22
+ tokens: TokenCounts;
23
+ /** Caching configuration */
24
+ options: ResolvedCachingOptions;
25
+ }
26
+ /**
27
+ * Available cache strategy implementations.
28
+ */
29
+ export type CacheStrategyType = "quantized" | "none";
30
+ /**
31
+ * Default cache strategy for new contexts.
32
+ */
33
+ export declare const DEFAULT_CACHE_STRATEGY: CacheStrategyType;
34
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../../../src/ai-service/llm/context/caching/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AAC7D,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,eAAe,CAAC;AAE5D;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,sCAAsC;IACtC,KAAK,EAAE,MAAM,CAAC;IACd,2BAA2B;IAC3B,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,2CAA2C;IAC3C,QAAQ,EAAE,gBAAgB,EAAE,CAAC;IAC7B,mBAAmB;IACnB,MAAM,EAAE,WAAW,CAAC;IACpB,4BAA4B;IAC5B,OAAO,EAAE,sBAAsB,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAC;AAErD;;GAEG;AACH,eAAO,MAAM,sBAAsB,EAAE,iBAA+B,CAAC"}
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Type definitions for cache strategy system.
3
+ */
4
+ /**
5
+ * Default cache strategy for new contexts.
6
+ */
7
+ export const DEFAULT_CACHE_STRATEGY = "quantized";
8
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../../../../../src/ai-service/llm/context/caching/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAgCH;;GAEG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAsB,WAAW,CAAC"}
@@ -0,0 +1,51 @@
1
+ /**
2
+ * Configuration loader for context management.
3
+ *
4
+ * Supports loading context configuration from environment variables.
5
+ * When set, environment variable config acts as the final override.
6
+ */
7
+ import type { CacheStrategyType } from "./caching/index.js";
8
+ import type { ContextOptions, LoggingOptions } from "./types.js";
9
+ /**
10
+ * Configuration schema for context management.
11
+ */
12
+ export interface ContextConfig {
13
+ /** Context options */
14
+ context?: ContextOptions;
15
+ /** Cache strategy to use (default: 'quantized') */
16
+ cacheStrategy?: CacheStrategyType;
17
+ /** Logging configuration */
18
+ logging?: LoggingOptions;
19
+ }
20
+ /**
21
+ * Loads context configuration from environment variable.
22
+ *
23
+ * Checks the `SUPERBLOCKS_CLARK_CONTEXT_CONFIG` environment variable for a path
24
+ * to a YAML configuration file. When set, this configuration acts as the final
25
+ * override, taking precedence over both default constants and programmatic options.
26
+ *
27
+ * Configuration hierarchy (lowest to highest priority):
28
+ * 1. Default constants (see constants.ts)
29
+ * 2. Programmatic options passed to ContextManager
30
+ * 3. Environment variable configuration (this function) - **final override**
31
+ *
32
+ * @returns Parsed configuration object, or null if env var not set
33
+ * @throws Error if file path is set but cannot be parsed
34
+ *
35
+ * @example
36
+ * ```typescript
37
+ * // Set environment variable
38
+ * // SUPERBLOCKS_CLARK_CONTEXT_CONFIG=/path/to/config.yaml
39
+ *
40
+ * const config = loadContextConfigFromEnv();
41
+ * if (config) {
42
+ * const manager = new ContextManager({
43
+ * storage,
44
+ * cacheStrategy: config.cacheStrategy,
45
+ * logging: config.logging,
46
+ * });
47
+ * }
48
+ * ```
49
+ */
50
+ export declare function loadContextConfigFromEnv(): ContextConfig | null;
51
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../../../src/ai-service/llm/context/config.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAKH,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAC5D,OAAO,KAAK,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAEjE;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,sBAAsB;IACtB,OAAO,CAAC,EAAE,cAAc,CAAC;IACzB,mDAAmD;IACnD,aAAa,CAAC,EAAE,iBAAiB,CAAC;IAClC,4BAA4B;IAC5B,OAAO,CAAC,EAAE,cAAc,CAAC;CAC1B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,wBAAgB,wBAAwB,IAAI,aAAa,GAAG,IAAI,CAsB/D"}
@@ -0,0 +1,58 @@
1
+ /**
2
+ * Configuration loader for context management.
3
+ *
4
+ * Supports loading context configuration from environment variables.
5
+ * When set, environment variable config acts as the final override.
6
+ */
7
+ import { readFileSync, existsSync } from "fs";
8
+ import { resolve } from "path";
9
+ import YAML from "yaml";
10
+ /**
11
+ * Loads context configuration from environment variable.
12
+ *
13
+ * Checks the `SUPERBLOCKS_CLARK_CONTEXT_CONFIG` environment variable for a path
14
+ * to a YAML configuration file. When set, this configuration acts as the final
15
+ * override, taking precedence over both default constants and programmatic options.
16
+ *
17
+ * Configuration hierarchy (lowest to highest priority):
18
+ * 1. Default constants (see constants.ts)
19
+ * 2. Programmatic options passed to ContextManager
20
+ * 3. Environment variable configuration (this function) - **final override**
21
+ *
22
+ * @returns Parsed configuration object, or null if env var not set
23
+ * @throws Error if file path is set but cannot be parsed
24
+ *
25
+ * @example
26
+ * ```typescript
27
+ * // Set environment variable
28
+ * // SUPERBLOCKS_CLARK_CONTEXT_CONFIG=/path/to/config.yaml
29
+ *
30
+ * const config = loadContextConfigFromEnv();
31
+ * if (config) {
32
+ * const manager = new ContextManager({
33
+ * storage,
34
+ * cacheStrategy: config.cacheStrategy,
35
+ * logging: config.logging,
36
+ * });
37
+ * }
38
+ * ```
39
+ */
40
+ export function loadContextConfigFromEnv() {
41
+ const envPath = process.env.SUPERBLOCKS_CLARK_CONTEXT_CONFIG;
42
+ if (!envPath) {
43
+ return null;
44
+ }
45
+ const configPath = resolve(envPath);
46
+ if (!existsSync(configPath)) {
47
+ throw new Error(`SUPERBLOCKS_CLARK_CONTEXT_CONFIG points to non-existent file: ${configPath}`);
48
+ }
49
+ try {
50
+ const fileContent = readFileSync(configPath, "utf-8");
51
+ const config = YAML.parse(fileContent);
52
+ return config;
53
+ }
54
+ catch (error) {
55
+ throw new Error(`Failed to parse context config at ${configPath}: ${error}`);
56
+ }
57
+ }
58
+ //# sourceMappingURL=config.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.js","sourceRoot":"","sources":["../../../../src/ai-service/llm/context/config.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,IAAI,CAAC;AAC9C,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAC/B,OAAO,IAAI,MAAM,MAAM,CAAC;AAgBxB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,MAAM,UAAU,wBAAwB;IACtC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC;IAC7D,IAAI,CAAC,OAAO,EAAE,CAAC;QACb,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IACpC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC5B,MAAM,IAAI,KAAK,CACb,iEAAiE,UAAU,EAAE,CAC9E,CAAC;IACJ,CAAC;IAED,IAAI,CAAC;QACH,MAAM,WAAW,GAAG,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACtD,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAkB,CAAC;QACxD,OAAO,MAAM,CAAC;IAChB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CACb,qCAAqC,UAAU,KAAK,KAAK,EAAE,CAC5D,CAAC;IACJ,CAAC;AACH,CAAC"}
@@ -0,0 +1,106 @@
1
+ /**
2
+ * Context Management Constants
3
+ *
4
+ * These constants configure the token budget management and compaction behavior
5
+ * for LLM context windows. The system uses a three-level compaction strategy
6
+ * to maintain conversation history within token limits while preserving
7
+ * the most relevant information.
8
+ */
9
+ /**
10
+ * Maximum context size in tokens.
11
+ *
12
+ * Based on Vertex AI limits (200k tokens max context, 9k tokens max response),
13
+ * we set a conservative limit of 185k tokens to ensure safety margin for
14
+ * response generation and overhead.
15
+ */
16
+ export declare const DEFAULT_MAX_CONTEXT_TOKENS = 185000;
17
+ /**
18
+ * Token count threshold that triggers automatic compaction as a fraction of max context size.
19
+ *
20
+ * When context size exceeds this threshold, the compaction algorithm
21
+ * demotes older turns to more compressed levels and drops the oldest
22
+ * content if necessary.
23
+ *
24
+ * Defined as a fraction of max context size.
25
+ */
26
+ export declare const DEFAULT_COMPACTION_TRIGGER_FRACTION = 0.9;
27
+ /**
28
+ * Target token count after compaction completes as a fraction of max context size.
29
+ *
30
+ * The compaction algorithm aims to reduce context size to this target,
31
+ * leaving headroom for new content before the next compaction cycle.
32
+ *
33
+ * Defined as a fraction of max context size.
34
+ */
35
+ export declare const DEFAULT_COMPACTION_TARGET_FRACTION = 0.8;
36
+ /**
37
+ * Budget allocation for Level 1 (L1) turns as fraction of compaction target.
38
+ *
39
+ * L1 turns represent the most recent conversation turns with full detail:
40
+ * - All steps preserved
41
+ * - Complete tool calls and results
42
+ * - Full message content
43
+ *
44
+ * 50% allocation ensures recent context has maximum fidelity.
45
+ */
46
+ export declare const DEFAULT_L1_TARGET_FRACTION = 0.5;
47
+ /**
48
+ * Budget allocation for Level 2 (L2) turns as fraction of compaction target.
49
+ *
50
+ * L2 turns use moderate compression:
51
+ * - Steps flattened into single response array
52
+ * - Tool call inputs replaced with empty objects
53
+ * - Tool results marked as "[tool output trimmed]"
54
+ *
55
+ * 35% allocation balances history depth with compression.
56
+ */
57
+ export declare const DEFAULT_L2_TARGET_FRACTION = 0.35;
58
+ /**
59
+ * Initial estimate for character-to-token conversion ratio.
60
+ *
61
+ * This is a rough heuristic that gets refined during usage based on actual
62
+ * token counts from the LLM provider. Typical values range from 2-5 depending
63
+ * on the model and text characteristics.
64
+ *
65
+ * Starting value of 3 is conservative and works reasonably for most English text.
66
+ */
67
+ export declare const DEFAULT_INITIAL_CHARS_PER_TOKEN_ESTIMATE = 3;
68
+ /**
69
+ * Minimum token usage required before updating charactersPerToken estimate.
70
+ *
71
+ * Small token counts (e.g., from cached responses) can produce wildly
72
+ * inaccurate estimates. We only update the estimate when we have substantial
73
+ * token usage to work with.
74
+ */
75
+ export declare const DEFAULT_MIN_TOKENS_FOR_ESTIMATE_UPDATE = 1000;
76
+ /**
77
+ * Valid range for charactersPerToken estimates [min, max].
78
+ *
79
+ * Estimates outside this range are rejected as unreasonable, preventing
80
+ * bugs or edge cases from corrupting the token counting logic.
81
+ */
82
+ export declare const DEFAULT_CHARACTERS_PER_TOKEN_MIN = 2;
83
+ export declare const DEFAULT_CHARACTERS_PER_TOKEN_MAX = 5;
84
+ /**
85
+ * Number of steps to keep in active turn when compacting.
86
+ *
87
+ * At compaction time older steps are split into a separate turn to allows
88
+ * them to be compacted to L2. This helps avoid context overflow for long turns.
89
+ *
90
+ * Value of 3 ensures recent tool-use loops stay in high-fidelity L1 while
91
+ * older interactions move to compressed L2.
92
+ *
93
+ * When aggressive compaction is needed, only the last step is kept in L1.
94
+ */
95
+ export declare const DEFAULT_PRIORITIZED_STEPS = 3;
96
+ /**
97
+ * If false, an error will be thrown if maxTokens is exceeded after compaction.
98
+ */
99
+ export declare const DEFAULT_ALLOW_OVERFLOW = true;
100
+ /**
101
+ * Default settings for debug/visualization output.
102
+ */
103
+ export declare const DEFAULT_VISUALIZATION_WIDTH = 120;
104
+ export declare const DEFAULT_VISUALIZATION_MODE = "summary";
105
+ export declare const DEFAULT_VERBOSE_LOGGING_ENABLED = true;
106
+ //# sourceMappingURL=constants.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../../src/ai-service/llm/context/constants.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH;;;;;;GAMG;AACH,eAAO,MAAM,0BAA0B,SAAU,CAAC;AAElD;;;;;;;;GAQG;AACH,eAAO,MAAM,mCAAmC,MAAM,CAAC;AAEvD;;;;;;;GAOG;AACH,eAAO,MAAM,kCAAkC,MAAM,CAAC;AAEtD;;;;;;;;;GASG;AACH,eAAO,MAAM,0BAA0B,MAAM,CAAC;AAE9C;;;;;;;;;GASG;AACH,eAAO,MAAM,0BAA0B,OAAO,CAAC;AAE/C;;;;;;;;GAQG;AACH,eAAO,MAAM,wCAAwC,IAAI,CAAC;AAE1D;;;;;;GAMG;AACH,eAAO,MAAM,sCAAsC,OAAO,CAAC;AAE3D;;;;;GAKG;AACH,eAAO,MAAM,gCAAgC,IAAI,CAAC;AAClD,eAAO,MAAM,gCAAgC,IAAI,CAAC;AAElD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,yBAAyB,IAAI,CAAC;AAE3C;;GAEG;AACH,eAAO,MAAM,sBAAsB,OAAO,CAAC;AAE3C;;GAEG;AAEH,eAAO,MAAM,2BAA2B,MAAM,CAAC;AAC/C,eAAO,MAAM,0BAA0B,YAAY,CAAC;AACpD,eAAO,MAAM,+BAA+B,OAAO,CAAC"}
@@ -0,0 +1,106 @@
1
+ /**
2
+ * Context Management Constants
3
+ *
4
+ * These constants configure the token budget management and compaction behavior
5
+ * for LLM context windows. The system uses a three-level compaction strategy
6
+ * to maintain conversation history within token limits while preserving
7
+ * the most relevant information.
8
+ */
9
+ /**
10
+ * Maximum context size in tokens.
11
+ *
12
+ * Based on Vertex AI limits (200k tokens max context, 9k tokens max response),
13
+ * we set a conservative limit of 185k tokens to ensure safety margin for
14
+ * response generation and overhead.
15
+ */
16
+ export const DEFAULT_MAX_CONTEXT_TOKENS = 185_000;
17
+ /**
18
+ * Token count threshold that triggers automatic compaction as a fraction of max context size.
19
+ *
20
+ * When context size exceeds this threshold, the compaction algorithm
21
+ * demotes older turns to more compressed levels and drops the oldest
22
+ * content if necessary.
23
+ *
24
+ * Defined as a fraction of max context size.
25
+ */
26
+ export const DEFAULT_COMPACTION_TRIGGER_FRACTION = 0.9;
27
+ /**
28
+ * Target token count after compaction completes as a fraction of max context size.
29
+ *
30
+ * The compaction algorithm aims to reduce context size to this target,
31
+ * leaving headroom for new content before the next compaction cycle.
32
+ *
33
+ * Defined as a fraction of max context size.
34
+ */
35
+ export const DEFAULT_COMPACTION_TARGET_FRACTION = 0.8;
36
+ /**
37
+ * Budget allocation for Level 1 (L1) turns as fraction of compaction target.
38
+ *
39
+ * L1 turns represent the most recent conversation turns with full detail:
40
+ * - All steps preserved
41
+ * - Complete tool calls and results
42
+ * - Full message content
43
+ *
44
+ * 50% allocation ensures recent context has maximum fidelity.
45
+ */
46
+ export const DEFAULT_L1_TARGET_FRACTION = 0.5;
47
+ /**
48
+ * Budget allocation for Level 2 (L2) turns as fraction of compaction target.
49
+ *
50
+ * L2 turns use moderate compression:
51
+ * - Steps flattened into single response array
52
+ * - Tool call inputs replaced with empty objects
53
+ * - Tool results marked as "[tool output trimmed]"
54
+ *
55
+ * 35% allocation balances history depth with compression.
56
+ */
57
+ export const DEFAULT_L2_TARGET_FRACTION = 0.35;
58
+ /**
59
+ * Initial estimate for character-to-token conversion ratio.
60
+ *
61
+ * This is a rough heuristic that gets refined during usage based on actual
62
+ * token counts from the LLM provider. Typical values range from 2-5 depending
63
+ * on the model and text characteristics.
64
+ *
65
+ * Starting value of 3 is conservative and works reasonably for most English text.
66
+ */
67
+ export const DEFAULT_INITIAL_CHARS_PER_TOKEN_ESTIMATE = 3;
68
+ /**
69
+ * Minimum token usage required before updating charactersPerToken estimate.
70
+ *
71
+ * Small token counts (e.g., from cached responses) can produce wildly
72
+ * inaccurate estimates. We only update the estimate when we have substantial
73
+ * token usage to work with.
74
+ */
75
+ export const DEFAULT_MIN_TOKENS_FOR_ESTIMATE_UPDATE = 1000;
76
+ /**
77
+ * Valid range for charactersPerToken estimates [min, max].
78
+ *
79
+ * Estimates outside this range are rejected as unreasonable, preventing
80
+ * bugs or edge cases from corrupting the token counting logic.
81
+ */
82
+ export const DEFAULT_CHARACTERS_PER_TOKEN_MIN = 2;
83
+ export const DEFAULT_CHARACTERS_PER_TOKEN_MAX = 5;
84
+ /**
85
+ * Number of steps to keep in active turn when compacting.
86
+ *
87
+ * At compaction time older steps are split into a separate turn to allows
88
+ * them to be compacted to L2. This helps avoid context overflow for long turns.
89
+ *
90
+ * Value of 3 ensures recent tool-use loops stay in high-fidelity L1 while
91
+ * older interactions move to compressed L2.
92
+ *
93
+ * When aggressive compaction is needed, only the last step is kept in L1.
94
+ */
95
+ export const DEFAULT_PRIORITIZED_STEPS = 3;
96
+ /**
97
+ * If false, an error will be thrown if maxTokens is exceeded after compaction.
98
+ */
99
+ export const DEFAULT_ALLOW_OVERFLOW = true;
100
+ /**
101
+ * Default settings for debug/visualization output.
102
+ */
103
+ export const DEFAULT_VISUALIZATION_WIDTH = 120;
104
+ export const DEFAULT_VISUALIZATION_MODE = "summary";
105
+ export const DEFAULT_VERBOSE_LOGGING_ENABLED = true;
106
+ //# sourceMappingURL=constants.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../../src/ai-service/llm/context/constants.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,0BAA0B,GAAG,OAAO,CAAC;AAElD;;;;;;;;GAQG;AACH,MAAM,CAAC,MAAM,mCAAmC,GAAG,GAAG,CAAC;AAEvD;;;;;;;GAOG;AACH,MAAM,CAAC,MAAM,kCAAkC,GAAG,GAAG,CAAC;AAEtD;;;;;;;;;GASG;AACH,MAAM,CAAC,MAAM,0BAA0B,GAAG,GAAG,CAAC;AAE9C;;;;;;;;;GASG;AACH,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC;AAE/C;;;;;;;;GAQG;AACH,MAAM,CAAC,MAAM,wCAAwC,GAAG,CAAC,CAAC;AAE1D;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,sCAAsC,GAAG,IAAI,CAAC;AAE3D;;;;;GAKG;AACH,MAAM,CAAC,MAAM,gCAAgC,GAAG,CAAC,CAAC;AAClD,MAAM,CAAC,MAAM,gCAAgC,GAAG,CAAC,CAAC;AAElD;;;;;;;;;;GAUG;AACH,MAAM,CAAC,MAAM,yBAAyB,GAAG,CAAC,CAAC;AAE3C;;GAEG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAG,IAAI,CAAC;AAE3C;;GAEG;AAEH,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC;AAC/C,MAAM,CAAC,MAAM,0BAA0B,GAAG,SAAS,CAAC;AACpD,MAAM,CAAC,MAAM,+BAA+B,GAAG,IAAI,CAAC"}
@@ -0,0 +1,207 @@
1
+ /**
2
+ * Main Context class for managing LLM conversation context.
3
+ *
4
+ * Responsibilities:
5
+ * - Maintain conversation turns across three compression levels
6
+ * - Automatically compact context when token limits are approached
7
+ * - Track and refine character-to-token estimation
8
+ * - Persist context state for resumption
9
+ *
10
+ * The three-level compaction strategy:
11
+ * - L1 (50%): Recent turns with full detail
12
+ * - L2 (35%): Older turns with moderate compression
13
+ * - L3 (15%): Oldest turns with maximum compression
14
+ */
15
+ import { type CacheStrategy } from "./caching/index.js";
16
+ import { L1Turn, L2Turn, L3Turn } from "./levels/index.js";
17
+ import { type ResolvedContextOptions, type ResolvedLoggingOptions } from "./options.js";
18
+ import { type RenderContextParams } from "./utils/visualization.js";
19
+ import type { TokenizedMessage, ResponseMessage, Step, TokenizedSystemMessage } from "./internal-types.js";
20
+ import type { ContextRecord } from "./serialization.js";
21
+ import type { ContextOptions, LoggingOptions, ContextId } from "./types.js";
22
+ import type { ModelMessage, UserModelMessage, LanguageModelUsage, SystemModelMessage } from "ai";
23
+ /**
24
+ * Context manages conversation history with automatic compaction.
25
+ * Note: you generally want to use the ContextManager to obtain a Context instance.
26
+ */
27
+ export declare class Context {
28
+ readonly id?: ContextId;
29
+ readonly options: ResolvedContextOptions;
30
+ readonly logging: ResolvedLoggingOptions;
31
+ systemPrompt?: TokenizedSystemMessage;
32
+ l3Turns: L3Turn[];
33
+ l2Turns: L2Turn[];
34
+ l1Turns: L1Turn[];
35
+ private onPersist?;
36
+ private activeTurnNumResponses;
37
+ private continuations;
38
+ private turnStartMs;
39
+ private _logger;
40
+ private cacheStrategy;
41
+ constructor(params: {
42
+ id?: ContextId;
43
+ options?: ContextOptions;
44
+ logging?: LoggingOptions;
45
+ onPersist?: (record: ContextRecord) => Promise<void>;
46
+ cacheStrategy: CacheStrategy;
47
+ });
48
+ get charsPerToken(): number;
49
+ private logPrefix;
50
+ private get logger();
51
+ private persist;
52
+ /**
53
+ * Seed empty context with a summary message.
54
+ */
55
+ seedIfEmpty(summary: UserModelMessage): void;
56
+ /**
57
+ * Returns all messages in order (system + L3 + L2 + L1).
58
+ *
59
+ * Messages are returned without token counts (clean ModelMessage format).
60
+ */
61
+ getMessages(): ModelMessage[];
62
+ /**
63
+ * Total token count across all context (system + all turns).
64
+ */
65
+ get tokenCount(): number;
66
+ /**
67
+ * Token count for system prompt only.
68
+ */
69
+ get systemPromptTokenCount(): number;
70
+ /**
71
+ * Currently active turn (last L1 turn if not ended).
72
+ */
73
+ get activeTurn(): L1Turn | undefined;
74
+ /**
75
+ * Currently active step within active turn.
76
+ */
77
+ get activeStep(): Step | undefined;
78
+ /**
79
+ * All turn messages (L3 + L2 + L1), excluding continuation markers.
80
+ */
81
+ get turnMessages(): TokenizedMessage[];
82
+ /**
83
+ * All messages including system prompt.
84
+ */
85
+ private get allMessages();
86
+ getLevelBudgets(): {
87
+ l1: number;
88
+ l2: number;
89
+ l3: number;
90
+ };
91
+ /**
92
+ * Sets the system prompt.
93
+ */
94
+ setSystemPrompt(message: SystemModelMessage): void;
95
+ /**
96
+ * Retrieves the system prompt
97
+ */
98
+ getSystemPrompt(): SystemModelMessage | undefined;
99
+ /**
100
+ * Starts a new conversation turn with a user message.
101
+ *
102
+ * @throws {UserPromptTooLargeError} If user + system exceeds maxTokens and no options.allowOverflow is false
103
+ */
104
+ startTurn(user: UserModelMessage): void;
105
+ /**
106
+ * Starts a new step within the active turn.
107
+ */
108
+ startStep(): void;
109
+ /**
110
+ * Ends the current step, adding response messages.
111
+ *
112
+ * @param responses - Assistant and/or tool messages, accumulated over the entire active turn (this is how streamText.onStepFinish provides them)
113
+ * @param usage - Optional token usage info for refining estimates
114
+ */
115
+ endStep(responses: ResponseMessage[], usage?: LanguageModelUsage): void;
116
+ /**
117
+ * Ends the active turn.
118
+ */
119
+ endTurn(usage?: LanguageModelUsage): void;
120
+ /**
121
+ * Updates the character-per-token estimate based on actual LLM usage.
122
+ */
123
+ private updateCharsPerTokenEstimate;
124
+ /**
125
+ * Recalculates all token counts with current estimate.
126
+ */
127
+ private recalculateTokenCounts;
128
+ /**
129
+ * Main compaction algorithm.
130
+ *
131
+ * Triggers when context exceeds compactionTrigger threshold.
132
+ * Demotes/drops turns to reach compactionTarget.
133
+ * TODO(matt): add final AI summary level
134
+ *
135
+ * @param maxTokens - Hard token limit
136
+ * @param lastTurnIsActive - Whether last L1 turn is active (protected from demotion)
137
+ */
138
+ private compact;
139
+ private onContextLimitExceeded;
140
+ /**
141
+ * Calculates budget allocations for each compression level.
142
+ */
143
+ private calculateLevelBudgets;
144
+ /**
145
+ * Compacts L1 turns to fit within target budget.
146
+ */
147
+ private compactL1ToTarget;
148
+ /**
149
+ * Compacts L2 turns to fit within target budget.
150
+ */
151
+ private compactL2ToTarget;
152
+ /**
153
+ * Compacts L3 turns to fit within target budget by dropping oldest.
154
+ */
155
+ private compactL3ToTarget;
156
+ private patchContinuationBeforeDrop;
157
+ /**
158
+ * Performs aggressive compaction when still over target after level-based compaction.
159
+ */
160
+ private performAggressiveCompaction;
161
+ /**
162
+ * Splits old steps from active turn into a completed L1 turn when appropriate.
163
+ *
164
+ * Strategy:
165
+ * - Split when turn has more than `prioritizedSteps` steps
166
+ * - Merge with previous split turn if it exists (via continuations map)
167
+ *
168
+ * This keeps recent steps in the active turn while moving older steps
169
+ * to a completed turn that can be cached and potentially compacted.
170
+ */
171
+ private splitActiveTurn;
172
+ /**
173
+ * Performs the actual split operation.
174
+ *
175
+ * If a previous split turn exists (linked via continuations), merges steps into it.
176
+ * Otherwise creates a new completed turn.
177
+ */
178
+ private performSplit;
179
+ private withTokenCount;
180
+ private estimateTokenCount;
181
+ private toModelMessage;
182
+ /**
183
+ * Removes tool-call parts from assistant messages if their results aren't present later in the context.
184
+ * This can happen if a tool call was made but the tool result message was never added (e.g. due to an error).
185
+ * Such orphaned tool calls trigger an error in some LLMs.
186
+ * Example error from Vertex:
187
+ *{ "type": "invalid_request_error", "message": "messages.2: `tool_use` ids were found without `tool_result` blocks immediately after: toolu_vrtx_014CKoGtt5B94Q4MwZZHZ8LU. Each `tool_use` block must have a corresponding `tool_result` block in the next message."}
188
+ */
189
+ private repairBrokenToolCalls;
190
+ /**
191
+ * Serializes context for persistence.
192
+ */
193
+ toRecord(): ContextRecord;
194
+ printState(params?: RenderContextParams): void;
195
+ /**
196
+ * Returns an ASCII representation of the context.
197
+ */
198
+ render(params?: RenderContextParams): string;
199
+ /**
200
+ * Deserializes a context from a record.
201
+ *
202
+ * @param record - Serialized context data
203
+ * @param params - Parameters for the context constructor
204
+ */
205
+ static fromRecord(record: ContextRecord, params: ConstructorParameters<typeof Context>[0]): Context;
206
+ }
207
+ //# sourceMappingURL=context.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"context.d.ts","sourceRoot":"","sources":["../../../../src/ai-service/llm/context/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAIH,OAAO,EACL,KAAK,aAAa,EAEnB,MAAM,oBAAoB,CAAC;AAK5B,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAE3D,OAAO,EAGL,KAAK,sBAAsB,EAC3B,KAAK,sBAAsB,EAC5B,MAAM,cAAc,CAAC;AAOtB,OAAO,EAEL,KAAK,mBAAmB,EACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,KAAK,EACV,gBAAgB,EAGhB,eAAe,EACf,IAAI,EACJ,sBAAsB,EACvB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AACxD,OAAO,KAAK,EACV,cAAc,EACd,cAAc,EAGd,SAAS,EACV,MAAM,YAAY,CAAC;AAEpB,OAAO,KAAK,EACV,YAAY,EACZ,gBAAgB,EAChB,kBAAkB,EAElB,kBAAkB,EACnB,MAAM,IAAI,CAAC;AAEZ;;;GAGG;AACH,qBAAa,OAAO;IAClB,QAAQ,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC;IACxB,QAAQ,CAAC,OAAO,EAAE,sBAAsB,CAAC;IACzC,QAAQ,CAAC,OAAO,EAAE,sBAAsB,CAAC;IAEzC,YAAY,CAAC,EAAE,sBAAsB,CAAC;IACtC,OAAO,EAAE,MAAM,EAAE,CAAM;IACvB,OAAO,EAAE,MAAM,EAAE,CAAM;IACvB,OAAO,EAAE,MAAM,EAAE,CAAM;IACvB,OAAO,CAAC,SAAS,CAAC,CAA2C;IAG7D,OAAO,CAAC,sBAAsB,CAAK;IAEnC,OAAO,CAAC,aAAa,CAGjB;IAGJ,OAAO,CAAC,WAAW,CAAK;IAExB,OAAO,CAAC,OAAO,CAAuB;IAEtC,OAAO,CAAC,aAAa,CAAgB;gBAEzB,MAAM,EAAE;QAClB,EAAE,CAAC,EAAE,SAAS,CAAC;QACf,OAAO,CAAC,EAAE,cAAc,CAAC;QACzB,OAAO,CAAC,EAAE,cAAc,CAAC;QACzB,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE,aAAa,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;QACrD,aAAa,EAAE,aAAa,CAAC;KAC9B;IASD,IAAI,aAAa,IAAI,MAAM,CAE1B;IAED,OAAO,CAAC,SAAS;IAKjB,OAAO,KAAK,MAAM,GAQjB;YAEa,OAAO;IAMrB;;OAEG;IACI,WAAW,CAAC,OAAO,EAAE,gBAAgB;IAS5C;;;;OAIG;IACI,WAAW,IAAI,YAAY,EAAE;IA4BpC;;OAEG;IACH,IAAI,UAAU,IAAI,MAAM,CAevB;IAED;;OAEG;IACH,IAAI,sBAAsB,IAAI,MAAM,CAGnC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,MAAM,GAAG,SAAS,CAEnC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,IAAI,GAAG,SAAS,CAEjC;IAED;;OAEG;IACH,IAAI,YAAY,IAAI,gBAAgB,EAAE,CAWrC;IAED;;OAEG;IACH,OAAO,KAAK,WAAW,GAKtB;IAED,eAAe;YAmST,MAAM;YACN,MAAM;YACN,MAAM;;IAjSZ;;OAEG;IACH,eAAe,CAAC,OAAO,EAAE,kBAAkB;IAY3C;;OAEG;IACH,eAAe,IAAI,kBAAkB,GAAG,SAAS;IAMjD;;;;OAIG;IACH,SAAS,CAAC,IAAI,EAAE,gBAAgB;IA8BhC;;OAEG;IACH,SAAS;IAYT;;;;;OAKG;IACH,OAAO,CAAC,SAAS,EAAE,eAAe,EAAE,EAAE,KAAK,CAAC,EAAE,kBAAkB;IA+BhE;;OAEG;IACH,OAAO,CAAC,KAAK,CAAC,EAAE,kBAAkB;IAalC;;OAEG;IACH,OAAO,CAAC,2BAA2B;IAmCnC;;OAEG;IACH,OAAO,CAAC,sBAAsB;IAU9B;;;;;;;;;OASG;IAEH,OAAO,CAAC,OAAO;IAkEf,OAAO,CAAC,sBAAsB;IA4B9B;;OAEG;IACH,OAAO,CAAC,qBAAqB;IAkB7B;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAoCzB;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAoBzB;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAuBzB,OAAO,CAAC,2BAA2B;IAUnC;;OAEG;IACH,OAAO,CAAC,2BAA2B;IA+CnC;;;;;;;;;OASG;IACH,OAAO,CAAC,eAAe;IAcvB;;;;;OAKG;IACH,OAAO,CAAC,YAAY;IAgDpB,OAAO,CAAC,cAAc;IAOtB,OAAO,CAAC,kBAAkB;IAI1B,OAAO,CAAC,cAAc;IAyCtB;;;;;;OAMG;IACH,OAAO,CAAC,qBAAqB;IA4C7B;;OAEG;IACH,QAAQ,IAAI,aAAa;IASzB,UAAU,CAAC,MAAM,GAAE,mBAAwB;IAK3C;;OAEG;IACH,MAAM,CAAC,MAAM,GAAE,mBAAwB,GAAG,MAAM;IAIhD;;;;;OAKG;IACH,MAAM,CAAC,UAAU,CACf,MAAM,EAAE,aAAa,EACrB,MAAM,EAAE,qBAAqB,CAAC,OAAO,OAAO,CAAC,CAAC,CAAC,CAAC,GAC/C,OAAO;CAoBX"}