@deepagents/text2sql 0.11.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/index.d.ts +3 -1
  2. package/dist/index.d.ts.map +1 -1
  3. package/dist/index.js +1231 -511
  4. package/dist/index.js.map +4 -4
  5. package/dist/lib/adapters/groundings/index.js +1937 -50
  6. package/dist/lib/adapters/groundings/index.js.map +4 -4
  7. package/dist/lib/adapters/groundings/report.grounding.d.ts.map +1 -1
  8. package/dist/lib/adapters/mysql/index.js +1937 -50
  9. package/dist/lib/adapters/mysql/index.js.map +4 -4
  10. package/dist/lib/adapters/postgres/index.js +1937 -50
  11. package/dist/lib/adapters/postgres/index.js.map +4 -4
  12. package/dist/lib/adapters/spreadsheet/index.js +34 -49
  13. package/dist/lib/adapters/spreadsheet/index.js.map +4 -4
  14. package/dist/lib/adapters/sqlite/index.js +1937 -50
  15. package/dist/lib/adapters/sqlite/index.js.map +4 -4
  16. package/dist/lib/adapters/sqlserver/index.js +1937 -50
  17. package/dist/lib/adapters/sqlserver/index.js.map +4 -4
  18. package/dist/lib/agents/developer.agent.d.ts.map +1 -1
  19. package/dist/lib/agents/explainer.agent.d.ts +4 -5
  20. package/dist/lib/agents/explainer.agent.d.ts.map +1 -1
  21. package/dist/lib/agents/question.agent.d.ts.map +1 -1
  22. package/dist/lib/agents/result-tools.d.ts +34 -0
  23. package/dist/lib/agents/result-tools.d.ts.map +1 -0
  24. package/dist/lib/agents/sql.agent.d.ts.map +1 -1
  25. package/dist/lib/agents/teachables.agent.d.ts.map +1 -1
  26. package/dist/lib/agents/text2sql.agent.d.ts +0 -21
  27. package/dist/lib/agents/text2sql.agent.d.ts.map +1 -1
  28. package/dist/lib/checkpoint.d.ts +1 -1
  29. package/dist/lib/checkpoint.d.ts.map +1 -1
  30. package/dist/lib/instructions.d.ts +9 -28
  31. package/dist/lib/instructions.d.ts.map +1 -1
  32. package/dist/lib/sql.d.ts +1 -1
  33. package/dist/lib/sql.d.ts.map +1 -1
  34. package/dist/lib/synthesis/extractors/base-contextual-extractor.d.ts +6 -7
  35. package/dist/lib/synthesis/extractors/base-contextual-extractor.d.ts.map +1 -1
  36. package/dist/lib/synthesis/extractors/last-query-extractor.d.ts.map +1 -1
  37. package/dist/lib/synthesis/extractors/segmented-context-extractor.d.ts +0 -6
  38. package/dist/lib/synthesis/extractors/segmented-context-extractor.d.ts.map +1 -1
  39. package/dist/lib/synthesis/extractors/sql-extractor.d.ts.map +1 -1
  40. package/dist/lib/synthesis/index.js +2478 -2323
  41. package/dist/lib/synthesis/index.js.map +4 -4
  42. package/dist/lib/synthesis/synthesizers/breadth-evolver.d.ts.map +1 -1
  43. package/dist/lib/synthesis/synthesizers/depth-evolver.d.ts.map +1 -1
  44. package/dist/lib/synthesis/synthesizers/persona-generator.d.ts.map +1 -1
  45. package/package.json +9 -15
  46. package/dist/lib/instructions.js +0 -432
  47. package/dist/lib/instructions.js.map +0 -7
  48. package/dist/lib/teach/teachings.d.ts +0 -11
  49. package/dist/lib/teach/teachings.d.ts.map +0 -1
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../../../../src/lib/fragments/schema.ts", "../../../../src/lib/adapters/groundings/context.ts", "../../../../src/lib/adapters/adapter.ts", "../../../../src/lib/adapters/groundings/abstract.grounding.ts", "../../../../src/lib/adapters/groundings/column-stats.grounding.ts", "../../../../src/lib/adapters/groundings/info.grounding.ts", "../../../../src/lib/adapters/groundings/column-values.grounding.ts", "../../../../src/lib/adapters/groundings/report.grounding.ts", "../../../../src/lib/adapters/groundings/row-count.grounding.ts", "../../../../src/lib/adapters/groundings/table.grounding.ts", "../../../../src/lib/adapters/sqlite/column-stats.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/info.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/column-values.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/row-count.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/sqlite.ts", "../../../../src/lib/adapters/sqlite/table.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/index.ts", "../../../../src/lib/adapters/spreadsheet/spreadsheet.ts", "../../../../src/lib/adapters/spreadsheet/parser.ts"],
4
- "sourcesContent": ["import type { ContextFragment } from '@deepagents/context';\n\n/**\n * Schema fragment builders.\n *\n * These fragments represent database schema metadata that can be injected\n * into AI prompts. Use with renderers (XML, Markdown, TOML, TOON) to format.\n *\n * @example\n * ```ts\n * import { dialectInfo, table, column, relationship } from '@deepagents/text2sql';\n *\n * const schemaFragments = [\n * dialectInfo({ dialect: 'PostgreSQL', version: '14.5' }),\n * table({\n * name: 'users',\n * columns: [\n * column({ name: 'id', type: 'integer', pk: true }),\n * column({ name: 'email', type: 'varchar', unique: true }),\n * ],\n * }),\n * ];\n * ```\n */\n\n/**\n * Database dialect and version information.\n *\n * @param input.dialect - Database type (PostgreSQL, SQLite, SQL Server, etc.)\n * @param input.version - Database version string\n * @param input.database - Database name\n *\n * @example\n * dialectInfo({ dialect: 'PostgreSQL', version: '14.5', database: 'myapp' })\n */\nexport function dialectInfo(input: {\n dialect: string;\n version?: string;\n database?: string;\n}): ContextFragment {\n return {\n name: 'dialectInfo',\n data: {\n dialect: input.dialect,\n ...(input.version && { version: input.version }),\n ...(input.database && { database: input.database }),\n },\n };\n}\n\n/**\n * Database table with columns and optional metadata.\n *\n * @param input.name - Table name\n * @param input.schema - Schema name (e.g., 'public' for PostgreSQL)\n * @param input.rowCount - Approximate row count\n * @param input.sizeHint - Size category for query optimization hints\n * @param input.columns - Array of column() fragments\n * @param input.indexes - Array of index() fragments\n * @param input.constraints - Array of constraint() fragments\n *\n * @example\n * table({\n * name: 'users',\n * rowCount: 1500,\n * sizeHint: 'medium',\n * columns: [\n * column({ name: 'id', type: 'integer', pk: true }),\n * column({ name: 'email', type: 'varchar', unique: true, indexed: true }),\n * ],\n * indexes: [\n * index({ name: 'idx_email', columns: ['email'], unique: true }),\n * ],\n * })\n */\nexport function table(input: {\n name: string;\n schema?: string;\n rowCount?: number;\n sizeHint?: 'tiny' | 'small' | 'medium' | 'large' | 'huge';\n columns: ContextFragment[];\n indexes?: ContextFragment[];\n constraints?: ContextFragment[];\n}): ContextFragment {\n return {\n name: 'table',\n data: {\n name: input.name,\n ...(input.schema && { schema: input.schema }),\n ...(input.rowCount != null && { rowCount: input.rowCount }),\n ...(input.sizeHint && { sizeHint: input.sizeHint }),\n columns: input.columns,\n ...(input.indexes?.length && { indexes: input.indexes }),\n ...(input.constraints?.length && { constraints: input.constraints }),\n },\n };\n}\n\n/**\n * Table column with type and annotations.\n *\n * @param input.name - Column name\n * @param input.type - Column data type (e.g., 'integer', 'varchar(255)')\n * @param input.pk - Is primary key\n * @param input.fk - Foreign key reference in \"table.column\" format\n * @param input.unique - Has unique constraint\n * @param input.notNull - Has NOT NULL constraint\n * @param input.default - Default value expression\n * @param input.indexed - Has index on this column\n * @param input.values - Enum or low cardinality values\n * @param input.stats - Column statistics (min, max, null fraction)\n *\n * @example\n * column({\n * name: 'status',\n * type: 'varchar',\n * notNull: true,\n * indexed: true,\n * values: ['active', 'inactive', 'suspended'],\n * })\n */\nexport function column(input: {\n name: string;\n type: string;\n pk?: boolean;\n fk?: string;\n unique?: boolean;\n notNull?: boolean;\n default?: string;\n indexed?: boolean;\n values?: string[];\n stats?: {\n min?: string;\n max?: string;\n nullFraction?: number;\n };\n}): ContextFragment {\n return {\n name: 'column',\n data: {\n name: input.name,\n type: input.type,\n ...(input.pk && { pk: true }),\n ...(input.fk && { fk: input.fk }),\n ...(input.unique && { unique: true }),\n ...(input.notNull && { notNull: true }),\n ...(input.default && { default: input.default }),\n ...(input.indexed && { indexed: true }),\n ...(input.values?.length && { values: input.values }),\n ...(input.stats && { stats: input.stats }),\n },\n };\n}\n\n/**\n * Table index.\n *\n * @param input.name - Index name\n * @param input.columns - Columns included in the index\n * @param input.unique - Is unique index\n * @param input.type - Index type (BTREE, HASH, GIN, etc.)\n *\n * @example\n * index({ name: 'idx_user_email', columns: ['email'], unique: true, type: 'BTREE' })\n */\nexport function index(input: {\n name: string;\n columns: string[];\n unique?: boolean;\n type?: string;\n}): ContextFragment {\n return {\n name: 'index',\n data: {\n name: input.name,\n columns: input.columns,\n ...(input.unique && { unique: true }),\n ...(input.type && { type: input.type }),\n },\n };\n}\n\n/**\n * Table constraint (CHECK, UNIQUE, PRIMARY_KEY, FOREIGN_KEY, etc).\n *\n * @param input.name - Constraint name\n * @param input.type - Constraint type\n * @param input.columns - Columns involved in the constraint\n * @param input.definition - CHECK constraint SQL definition\n * @param input.defaultValue - DEFAULT constraint value\n * @param input.referencedTable - For FK: referenced table name\n * @param input.referencedColumns - For FK: referenced column names\n *\n * @example\n * constraint({\n * name: 'chk_amount_positive',\n * type: 'CHECK',\n * definition: 'amount > 0',\n * })\n *\n * @example\n * constraint({\n * name: 'fk_order_user',\n * type: 'FOREIGN_KEY',\n * columns: ['user_id'],\n * referencedTable: 'users',\n * referencedColumns: ['id'],\n * })\n */\nexport function constraint(input: {\n name: string;\n type:\n | 'CHECK'\n | 'UNIQUE'\n | 'NOT_NULL'\n | 'DEFAULT'\n | 'PRIMARY_KEY'\n | 'FOREIGN_KEY';\n columns?: string[];\n definition?: string;\n defaultValue?: string;\n referencedTable?: string;\n referencedColumns?: string[];\n}): ContextFragment {\n return {\n name: 'constraint',\n data: {\n name: input.name,\n type: input.type,\n ...(input.columns?.length && { columns: input.columns }),\n ...(input.definition && { definition: input.definition }),\n ...(input.defaultValue && { defaultValue: input.defaultValue }),\n ...(input.referencedTable && { referencedTable: input.referencedTable }),\n ...(input.referencedColumns?.length && {\n referencedColumns: input.referencedColumns,\n }),\n },\n };\n}\n\n/**\n * Database view.\n *\n * @param input.name - View name\n * @param input.schema - Schema name\n * @param input.columns - Array of column() fragments\n * @param input.definition - View SQL definition\n *\n * @example\n * view({\n * name: 'active_users',\n * columns: [\n * column({ name: 'id', type: 'integer' }),\n * column({ name: 'email', type: 'varchar' }),\n * ],\n * definition: \"SELECT id, email FROM users WHERE status = 'active'\",\n * })\n */\nexport function view(input: {\n name: string;\n schema?: string;\n columns: ContextFragment[];\n definition?: string;\n}): ContextFragment {\n return {\n name: 'view',\n data: {\n name: input.name,\n ...(input.schema && { schema: input.schema }),\n columns: input.columns,\n ...(input.definition && { definition: input.definition }),\n },\n };\n}\n\n/**\n * Relationship between tables (foreign key connection).\n *\n * @param input.from - Source table and columns\n * @param input.to - Referenced table and columns\n * @param input.cardinality - Relationship cardinality\n *\n * @example\n * relationship({\n * from: { table: 'orders', columns: ['user_id'] },\n * to: { table: 'users', columns: ['id'] },\n * cardinality: 'many-to-one',\n * })\n */\nexport function relationship(input: {\n from: { table: string; columns: string[] };\n to: { table: string; columns: string[] };\n cardinality?: 'one-to-one' | 'one-to-many' | 'many-to-one' | 'many-to-many';\n}): ContextFragment {\n return {\n name: 'relationship',\n data: {\n from: input.from,\n to: input.to,\n ...(input.cardinality && { cardinality: input.cardinality }),\n },\n };\n}\n", "import type { AdapterInfo, ColumnStats, Relationship, Table } from '../adapter.ts';\nimport type { View } from './view.grounding.ts';\n\n/**\n * Column type for grounding operations.\n * Common interface between Table.columns and View.columns.\n */\nexport interface Column {\n name: string;\n type: string;\n kind?: 'LowCardinality' | 'Enum';\n values?: string[];\n stats?: ColumnStats;\n}\n\n/**\n * Entity with columns (Table or View).\n */\nexport interface ColumnContainer {\n name: string;\n columns: Column[];\n}\n\n/**\n * Shared context object passed to all groundings.\n * Groundings read from and write to this context.\n */\nexport interface GroundingContext {\n /** Tables discovered by TableGrounding */\n tables: Table[];\n\n /** Views discovered by ViewGrounding */\n views: View[];\n\n /** Relationships discovered by TableGrounding */\n relationships: Relationship[];\n\n /** Database info collected by InfoGrounding */\n info?: AdapterInfo;\n\n /** Business context report generated by ReportGrounding */\n report?: string;\n}\n\n/**\n * Create a new empty grounding context.\n */\nexport function createGroundingContext(): GroundingContext {\n return {\n tables: [],\n views: [],\n relationships: [],\n info: undefined,\n };\n}\n", "import type { ContextFragment } from '@deepagents/context';\n\nimport {\n column,\n constraint,\n dialectInfo,\n index,\n relationship,\n table,\n view,\n} from '../fragments/schema.ts';\nimport type { AbstractGrounding } from './groundings/abstract.grounding.ts';\nimport {\n type GroundingContext,\n createGroundingContext,\n} from './groundings/context.ts';\nimport type { View } from './groundings/view.grounding.ts';\n\n/**\n * Filter type for view/table names.\n * - string[]: explicit list of view names\n * - RegExp: pattern to match view names\n * - function: predicate to filter view names\n */\nexport type Filter = string[] | RegExp | ((viewName: string) => boolean);\n\nexport interface Table {\n name: string;\n schema?: string;\n rawName?: string;\n columns: {\n name: string;\n type: string;\n kind?: 'LowCardinality' | 'Enum';\n values?: string[];\n isIndexed?: boolean;\n stats?: ColumnStats;\n }[];\n rowCount?: number;\n sizeHint?: 'tiny' | 'small' | 'medium' | 'large' | 'huge';\n indexes?: TableIndex[];\n constraints?: TableConstraint[];\n}\n\nexport interface TableIndex {\n name: string;\n columns: string[];\n unique?: boolean;\n type?: string;\n}\n\nexport interface TableConstraint {\n name: string;\n type:\n | 'CHECK'\n | 'UNIQUE'\n | 'NOT_NULL'\n | 'DEFAULT'\n | 'PRIMARY_KEY'\n | 'FOREIGN_KEY';\n columns?: string[];\n definition?: string;\n defaultValue?: string;\n referencedTable?: string;\n referencedColumns?: string[];\n}\n\nexport interface ColumnStats {\n min?: string;\n max?: string;\n nullFraction?: number;\n}\n\nexport type Relationship = {\n table: string;\n from: string[];\n referenced_table: string;\n to: string[];\n};\n\nexport type TablesFilter = string[] | RegExp;\n\nexport interface Introspection {\n tables: Table[];\n relationships: Relationship[];\n}\n\nexport interface AdapterInfo {\n dialect: string;\n version?: string;\n database?: string;\n details?: Record<string, unknown>;\n}\n\nexport type AdapterInfoProvider =\n | AdapterInfo\n | (() => Promise<AdapterInfo> | AdapterInfo);\n\nexport type IntrospectionPhase =\n | 'tables'\n | 'row_counts'\n | 'primary_keys'\n | 'indexes'\n | 'column_stats'\n | 'low_cardinality'\n | 'relationships';\n\nexport interface IntrospectionProgress {\n phase: IntrospectionPhase;\n message: string;\n current?: number;\n total?: number;\n}\n\nexport type OnProgress = (progress: IntrospectionProgress) => void;\n\nexport interface IntrospectOptions {\n onProgress?: OnProgress;\n}\n\nexport type GroundingFn = (adapter: Adapter) => AbstractGrounding;\n\nexport type ExecuteFunction = (sql: string) => Promise<any> | any;\nexport type ValidateFunction = (\n sql: string,\n) => Promise<string | void> | string | void;\n\nexport abstract class Adapter {\n abstract grounding: GroundingFn[];\n\n /**\n * Default schema name for this database.\n * PostgreSQL: 'public', SQL Server: 'dbo', SQLite: undefined\n */\n abstract readonly defaultSchema: string | undefined;\n\n /**\n * System schemas to exclude from introspection by default.\n */\n abstract readonly systemSchemas: string[];\n\n /**\n * Introspect the database schema and return context fragments.\n *\n * Executes all configured groundings to populate the context, then\n * generates fragments from the complete context data.\n *\n * @param ctx - Optional grounding context for sharing state between groundings\n * @returns Array of context fragments representing the database schema\n */\n async introspect(ctx = createGroundingContext()): Promise<ContextFragment[]> {\n // Phase 1: All groundings populate ctx\n for (const fn of this.grounding) {\n const grounding = fn(this);\n await grounding.execute(ctx);\n }\n\n // Phase 2: Generate fragments from complete ctx\n return this.#toSchemaFragments(ctx);\n }\n\n /**\n * Convert complete grounding context to schema fragments.\n * Called after all groundings have populated ctx with data.\n */\n #toSchemaFragments(ctx: GroundingContext): ContextFragment[] {\n const fragments: ContextFragment[] = [];\n\n // Dialect info\n if (ctx.info) {\n fragments.push(\n dialectInfo({\n dialect: ctx.info.dialect,\n version: ctx.info.version,\n database: ctx.info.database,\n }),\n );\n }\n\n // Tables (with all annotations now included)\n for (const t of ctx.tables) {\n fragments.push(this.#tableToFragment(t));\n }\n\n // Views\n for (const v of ctx.views) {\n fragments.push(this.#viewToFragment(v));\n }\n\n // Relationships\n const tableMap = new Map(ctx.tables.map((t) => [t.name, t]));\n for (const rel of ctx.relationships) {\n const sourceTable = tableMap.get(rel.table);\n const targetTable = tableMap.get(rel.referenced_table);\n fragments.push(\n this.#relationshipToFragment(rel, sourceTable, targetTable),\n );\n }\n\n // Business context\n if (ctx.report) {\n fragments.push({ name: 'businessContext', data: ctx.report });\n }\n\n return fragments;\n }\n\n /**\n * Convert a Table to a table fragment with nested column, index, and constraint fragments.\n */\n #tableToFragment(t: Table): ContextFragment {\n // Build constraint lookup maps for column-level annotations\n const pkConstraint = t.constraints?.find((c) => c.type === 'PRIMARY_KEY');\n const pkColumns = new Set(pkConstraint?.columns ?? []);\n\n const notNullColumns = new Set(\n t.constraints\n ?.filter((c) => c.type === 'NOT_NULL')\n .flatMap((c) => c.columns ?? []) ?? [],\n );\n\n const defaultByColumn = new Map<string, string>();\n for (const c of t.constraints?.filter((c) => c.type === 'DEFAULT') ?? []) {\n for (const col of c.columns ?? []) {\n if (c.defaultValue != null) {\n defaultByColumn.set(col, c.defaultValue);\n }\n }\n }\n\n // Single-column UNIQUE constraints\n const uniqueColumns = new Set(\n t.constraints\n ?.filter((c) => c.type === 'UNIQUE' && c.columns?.length === 1)\n .flatMap((c) => c.columns ?? []) ?? [],\n );\n\n // Foreign key lookup: column -> referenced table.column\n const fkByColumn = new Map<string, string>();\n for (const c of t.constraints?.filter((c) => c.type === 'FOREIGN_KEY') ??\n []) {\n const cols = c.columns ?? [];\n const refCols = c.referencedColumns ?? [];\n for (let i = 0; i < cols.length; i++) {\n const refCol = refCols[i] ?? refCols[0] ?? cols[i];\n fkByColumn.set(cols[i], `${c.referencedTable}.${refCol}`);\n }\n }\n\n // Build column fragments\n const columnFragments = t.columns.map((col) =>\n column({\n name: col.name,\n type: col.type,\n pk: pkColumns.has(col.name) || undefined,\n fk: fkByColumn.get(col.name),\n unique: uniqueColumns.has(col.name) || undefined,\n notNull: notNullColumns.has(col.name) || undefined,\n default: defaultByColumn.get(col.name),\n indexed: col.isIndexed || undefined,\n values: col.values,\n stats: col.stats,\n }),\n );\n\n // Build index fragments\n const indexFragments = (t.indexes ?? []).map((idx) =>\n index({\n name: idx.name,\n columns: idx.columns,\n unique: idx.unique,\n type: idx.type,\n }),\n );\n\n // Build constraint fragments for multi-column UNIQUE and CHECK constraints\n const constraintFragments = (t.constraints ?? [])\n .filter(\n (c) =>\n c.type === 'CHECK' ||\n (c.type === 'UNIQUE' && (c.columns?.length ?? 0) > 1),\n )\n .map((c) =>\n constraint({\n name: c.name,\n type: c.type,\n columns: c.columns,\n definition: c.definition,\n }),\n );\n\n return table({\n name: t.name,\n schema: t.schema,\n rowCount: t.rowCount,\n sizeHint: t.sizeHint,\n columns: columnFragments,\n indexes: indexFragments.length > 0 ? indexFragments : undefined,\n constraints:\n constraintFragments.length > 0 ? constraintFragments : undefined,\n });\n }\n\n /**\n * Convert a View to a view fragment with nested column fragments.\n */\n #viewToFragment(v: View): ContextFragment {\n const columnFragments = v.columns.map((col) =>\n column({\n name: col.name,\n type: col.type,\n values: col.values,\n stats: col.stats,\n }),\n );\n\n return view({\n name: v.name,\n schema: v.schema,\n columns: columnFragments,\n definition: v.definition,\n });\n }\n\n /**\n * Convert a Relationship to a relationship fragment.\n * Infers cardinality from row counts if available.\n */\n #relationshipToFragment(\n rel: Relationship,\n sourceTable?: Table,\n targetTable?: Table,\n ): ContextFragment {\n const sourceCount = sourceTable?.rowCount;\n const targetCount = targetTable?.rowCount;\n\n let cardinality:\n | 'one-to-one'\n | 'one-to-many'\n | 'many-to-one'\n | 'many-to-many'\n | undefined;\n\n if (sourceCount != null && targetCount != null && targetCount > 0) {\n const ratio = sourceCount / targetCount;\n if (ratio > 5) {\n cardinality = 'many-to-one';\n } else if (ratio < 1.2 && ratio > 0.8) {\n cardinality = 'one-to-one';\n } else if (ratio < 0.2) {\n cardinality = 'one-to-many';\n }\n }\n\n return relationship({\n from: { table: rel.table, columns: rel.from },\n to: { table: rel.referenced_table, columns: rel.to },\n cardinality,\n });\n }\n abstract execute(sql: string): Promise<any[]> | any[];\n abstract validate(sql: string): Promise<string | void> | string | void;\n abstract runQuery<Row>(sql: string): Promise<Row[]> | Row[];\n\n /**\n * Quote an identifier (table/column name) for safe use in SQL.\n * Each database uses different quoting styles.\n */\n abstract quoteIdentifier(name: string): string;\n\n /**\n * Escape a string value for safe use in SQL.\n * Each database escapes different characters.\n */\n abstract escape(value: string): string;\n\n /**\n * Build a SELECT query to sample rows from a table.\n * Each database uses different syntax for limiting rows (LIMIT vs TOP).\n */\n abstract buildSampleRowsQuery(\n tableName: string,\n columns: string[] | undefined,\n limit: number,\n ): string;\n\n /**\n * Convert unknown database value to number.\n * Handles number, bigint, and string types.\n */\n toNumber(value: unknown): number | undefined {\n if (typeof value === 'number' && Number.isFinite(value)) {\n return value;\n }\n if (typeof value === 'bigint') {\n return Number(value);\n }\n if (typeof value === 'string' && value.trim() !== '') {\n const parsed = Number(value);\n return Number.isFinite(parsed) ? parsed : undefined;\n }\n return undefined;\n }\n\n /**\n * Parse a potentially qualified table name into schema and table parts.\n */\n parseTableName(name: string): { schema: string; table: string } {\n if (name.includes('.')) {\n const [schema, ...rest] = name.split('.');\n return { schema, table: rest.join('.') };\n }\n return { schema: this.defaultSchema ?? '', table: name };\n }\n\n /**\n * Escape a string value for use in SQL string literals (single quotes).\n * Used in WHERE clauses like: WHERE name = '${escapeString(value)}'\n */\n escapeString(value: string): string {\n return value.replace(/'/g, \"''\");\n }\n\n /**\n * Build a SQL filter clause to include/exclude schemas.\n * @param columnName - The schema column name (e.g., 'TABLE_SCHEMA')\n * @param allowedSchemas - If provided, filter to these schemas only\n */\n buildSchemaFilter(columnName: string, allowedSchemas?: string[]): string {\n if (allowedSchemas && allowedSchemas.length > 0) {\n const values = allowedSchemas\n .map((s) => `'${this.escapeString(s)}'`)\n .join(', ');\n return `AND ${columnName} IN (${values})`;\n }\n if (this.systemSchemas.length > 0) {\n const values = this.systemSchemas\n .map((s) => `'${this.escapeString(s)}'`)\n .join(', ');\n return `AND ${columnName} NOT IN (${values})`;\n }\n return '';\n }\n}\n\nexport function filterTablesByName<T extends { name: string }>(\n tables: T[],\n filter: TablesFilter | undefined,\n): T[] {\n if (!filter) return tables;\n return tables.filter((table) => matchesFilter(table.name, filter));\n}\n\nexport function filterRelationshipsByTables(\n relationships: Relationship[],\n tableNames: Set<string> | undefined,\n): Relationship[] {\n if (tableNames === undefined) {\n return relationships;\n }\n if (tableNames.size === 0) {\n return [];\n }\n return relationships.filter(\n (it) => tableNames.has(it.table) || tableNames.has(it.referenced_table),\n );\n}\n\nexport function applyTablesFilter(\n tables: Table[],\n relationships: Relationship[],\n filter: TablesFilter | undefined,\n): { tables: Table[]; relationships: Relationship[] } {\n if (!filter) {\n return { tables, relationships };\n }\n\n const allowedNames = new Set(\n getTablesWithRelated(tables, relationships, filter),\n );\n\n return {\n tables: tables.filter((table) => allowedNames.has(table.name)),\n relationships: filterRelationshipsByTables(relationships, allowedNames),\n };\n}\n\nexport function matchesFilter(\n tableName: string,\n filter: TablesFilter,\n): boolean {\n if (Array.isArray(filter)) {\n return filter.includes(tableName);\n }\n return filter.test(tableName);\n}\n\nexport function getTablesWithRelated(\n allTables: Table[],\n relationships: Relationship[],\n filter: TablesFilter,\n): string[] {\n const matchedTables = filterTablesByName(allTables, filter).map(\n (it) => it.name,\n );\n\n if (matchedTables.length === 0) {\n return [];\n }\n\n const adjacency = new Map<string, Set<string>>();\n\n for (const rel of relationships) {\n if (!adjacency.has(rel.table)) {\n adjacency.set(rel.table, new Set());\n }\n if (!adjacency.has(rel.referenced_table)) {\n adjacency.set(rel.referenced_table, new Set());\n }\n adjacency.get(rel.table)!.add(rel.referenced_table);\n adjacency.get(rel.referenced_table)!.add(rel.table);\n }\n\n const result = new Set<string>(matchedTables);\n const queue = [...matchedTables];\n\n while (queue.length > 0) {\n const current = queue.shift()!;\n const neighbors = adjacency.get(current);\n\n if (!neighbors) {\n continue;\n }\n\n for (const neighbor of neighbors) {\n if (!result.has(neighbor)) {\n result.add(neighbor);\n queue.push(neighbor);\n }\n }\n }\n\n return Array.from(result);\n}\n", "import type { Adapter } from '../adapter.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Filter type for table names.\n * - string[]: explicit list of table names\n * - RegExp: pattern to match table names\n * - function: predicate to filter table names\n */\nexport type Filter = string[] | RegExp | ((tableName: string) => boolean);\n\nexport interface AdapterInfo {\n dialect: string;\n version?: string;\n database?: string;\n details?: Record<string, unknown>;\n}\nexport type AdapterInfoProvider =\n | AdapterInfo\n | (() => Promise<AdapterInfo> | AdapterInfo);\n\n/**\n * Abstract base class for database schema groundings.\n *\n * Groundings collect schema metadata into the shared GroundingContext.\n * Fragment generation is centralized in Adapter.introspect().\n */\nexport abstract class AbstractGrounding {\n /**\n * Grounding identifier for debugging/logging.\n */\n name: string;\n\n constructor(name: string) {\n this.name = name;\n }\n\n /**\n * Execute grounding to populate the shared context.\n * Groundings mutate ctx to add their collected data (tables, views, indexes, etc).\n * Fragment generation happens centrally in Adapter after all groundings complete.\n *\n * @param ctx - Shared context for accumulating schema data\n */\n abstract execute(ctx: GroundingContext): Promise<void>;\n}\n\nclass SampleDataGrounding {\n // this will fetch sample data for tables matching the filter\n}\n\nclass FunctionGrounding {\n #filter: Filter;\n #adapter: Adapter;\n constructor(adapter: Adapter, filter: Filter) {\n this.#filter = filter;\n this.#adapter = adapter;\n }\n}\n", "import type { ColumnStats } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { Column, ColumnContainer, GroundingContext } from './context.ts';\n\n/**\n * Configuration for ColumnStatsGrounding.\n */\nexport interface ColumnStatsGroundingConfig {\n // Future: filter which tables/columns to collect stats for\n}\n\n/**\n * Abstract base class for column statistics grounding.\n *\n * Reads tables and views from the context and annotates their columns\n * with statistics (min, max, nullFraction).\n *\n * Subclasses implement database-specific hooks:\n * - `collectStats()` - collect min/max/nullFraction for a column\n */\nexport abstract class ColumnStatsGrounding extends AbstractGrounding {\n constructor(config: ColumnStatsGroundingConfig = {}) {\n super('columnStats');\n }\n\n /**\n * Collect min/max/nullFraction statistics for a column.\n * Return undefined to skip this column.\n */\n protected abstract collectStats(\n tableName: string,\n column: Column,\n ): Promise<ColumnStats | undefined>;\n\n /**\n * Execute the grounding process.\n * Annotates columns in ctx.tables and ctx.views with statistics.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n // Process both tables and views\n const allContainers: ColumnContainer[] = [...ctx.tables, ...ctx.views];\n for (const container of allContainers) {\n for (const column of container.columns) {\n // Collect min/max/nullFraction\n try {\n const stats = await this.collectStats(container.name, column);\n if (stats) {\n column.stats = stats;\n }\n } catch (error) {\n // Skip on error\n console.warn(\n 'Error collecting stats for',\n container.name,\n column.name,\n error,\n );\n }\n }\n }\n }\n}\n", "import type { AdapterInfo } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Configuration for InfoGrounding.\n */\nexport interface InfoGroundingConfig {\n // Future: options to control what info to collect\n}\n\n/**\n * Abstract base class for database info grounding.\n *\n * Collects database dialect, version, and connection info.\n *\n * Subclasses implement the database-specific hook:\n * - `collectInfo()` - collect database info\n */\nexport abstract class InfoGrounding extends AbstractGrounding {\n constructor(config: InfoGroundingConfig = {}) {\n super('dialectInfo');\n }\n\n /**\n * Collect database dialect, version, and other info.\n */\n protected abstract collectInfo(): Promise<AdapterInfo>;\n\n /**\n * Execute the grounding process.\n * Writes database info to ctx.info.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n ctx.info = await this.collectInfo();\n }\n}\n", "import type { Table, TableConstraint } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { Column, ColumnContainer, GroundingContext } from './context.ts';\n\nexport type { Column, ColumnContainer };\n\n/**\n * Result of column value detection.\n */\nexport type ColumnValuesResult = {\n kind: 'Enum' | 'LowCardinality';\n values: string[];\n};\n\n/**\n * Configuration for ColumnValuesGrounding.\n */\nexport interface ColumnValuesGroundingConfig {\n /** Maximum number of distinct values to consider low cardinality (default: 20) */\n lowCardinalityLimit?: number;\n}\n\n/**\n * Abstract base class for column values grounding.\n *\n * Discovers possible values for columns from three sources (in priority order):\n * 1. Native ENUM types (PostgreSQL, MySQL) \u2192 kind: 'Enum'\n * 2. CHECK constraints with IN clauses \u2192 kind: 'Enum'\n * 3. Low cardinality data scan \u2192 kind: 'LowCardinality'\n *\n * Subclasses implement database-specific hooks:\n * - `collectEnumValues()` - get values for native ENUM columns\n * - `collectLowCardinality()` - collect distinct values via data scan\n */\nexport abstract class ColumnValuesGrounding extends AbstractGrounding {\n protected lowCardinalityLimit: number;\n\n constructor(config: ColumnValuesGroundingConfig = {}) {\n super('columnValues');\n this.lowCardinalityLimit = config.lowCardinalityLimit ?? 20;\n }\n\n /**\n * Get values for native ENUM type columns.\n * Return undefined if column is not an ENUM type.\n * Default implementation returns undefined (no native ENUM support).\n */\n protected async collectEnumValues(\n _tableName: string,\n _column: Column,\n ): Promise<string[] | undefined> {\n return undefined;\n }\n\n /**\n * Collect distinct values for low cardinality columns via data scan.\n * Return undefined if column has too many distinct values.\n */\n protected abstract collectLowCardinality(\n tableName: string,\n column: Column,\n ): Promise<string[] | undefined>;\n\n /**\n * Parse CHECK constraint for enum-like IN clause.\n * Extracts values from patterns like:\n * - CHECK (status IN ('active', 'inactive'))\n * - CHECK ((status)::text = ANY (ARRAY['a'::text, 'b'::text]))\n * - CHECK (status = 'active' OR status = 'inactive')\n */\n protected parseCheckConstraint(\n constraint: TableConstraint,\n columnName: string,\n ): string[] | undefined {\n if (constraint.type !== 'CHECK' || !constraint.definition) {\n return undefined;\n }\n\n // Check if constraint applies to this column\n if (constraint.columns && !constraint.columns.includes(columnName)) {\n return undefined;\n }\n\n const def = constraint.definition;\n const escapedCol = this.escapeRegex(columnName);\n\n // Column pattern: matches column name with optional parens and type cast\n // e.g., \"status\", \"(status)\", \"((status)::text)\"\n const colPattern = `(?:\\\\(?\\\\(?${escapedCol}\\\\)?(?:::(?:text|varchar|character varying))?\\\\)?)`;\n\n // Pattern 1: column IN ('val1', 'val2', ...)\n const inMatch = def.match(\n new RegExp(`${colPattern}\\\\s+IN\\\\s*\\\\(([^)]+)\\\\)`, 'i'),\n );\n if (inMatch) {\n return this.extractStringValues(inMatch[1]);\n }\n\n // Pattern 2: PostgreSQL ANY(ARRAY[...])\n const anyMatch = def.match(\n new RegExp(\n `${colPattern}\\\\s*=\\\\s*ANY\\\\s*\\\\(\\\\s*(?:ARRAY)?\\\\s*\\\\[([^\\\\]]+)\\\\]`,\n 'i',\n ),\n );\n if (anyMatch) {\n return this.extractStringValues(anyMatch[1]);\n }\n\n // Pattern 3: column = 'val1' OR column = 'val2' ...\n const orPattern = new RegExp(\n `\\\\b${this.escapeRegex(columnName)}\\\\b\\\\s*=\\\\s*'([^']*)'`,\n 'gi',\n );\n const orMatches = [...def.matchAll(orPattern)];\n if (orMatches.length >= 2) {\n return orMatches.map((m) => m[1]);\n }\n\n return undefined;\n }\n\n /**\n * Extract string values from a comma-separated list.\n */\n private extractStringValues(input: string): string[] | undefined {\n const values: string[] = [];\n // Match quoted strings: 'value' or 'value'::type\n const matches = input.matchAll(/'([^']*)'/g);\n for (const match of matches) {\n values.push(match[1]);\n }\n return values.length > 0 ? values : undefined;\n }\n\n /**\n * Escape special regex characters in a string.\n */\n private escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n }\n\n /**\n * Get the table from context by name.\n */\n private getTable(ctx: GroundingContext, name: string): Table | undefined {\n return ctx.tables.find((t) => t.name === name);\n }\n\n /**\n * Execute the grounding process.\n * Annotates columns in ctx.tables and ctx.views with values.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n // Process both tables and views\n const allContainers: ColumnContainer[] = [...ctx.tables, ...ctx.views];\n\n for (const container of allContainers) {\n const table = this.getTable(ctx, container.name);\n\n for (const column of container.columns) {\n try {\n const result = await this.resolveColumnValues(\n container.name,\n column,\n table?.constraints,\n );\n if (result) {\n column.kind = result.kind;\n column.values = result.values;\n }\n } catch (error) {\n console.warn(\n 'Error collecting column values for',\n container.name,\n column.name,\n error,\n );\n }\n }\n }\n }\n\n /**\n * Resolve column values from all sources in priority order.\n */\n private async resolveColumnValues(\n tableName: string,\n column: Column,\n constraints?: TableConstraint[],\n ): Promise<ColumnValuesResult | undefined> {\n // Priority 1: Native ENUM type\n const enumValues = await this.collectEnumValues(tableName, column);\n if (enumValues?.length) {\n return { kind: 'Enum', values: enumValues };\n }\n\n // Priority 2: CHECK constraint with IN clause\n if (constraints) {\n for (const constraint of constraints) {\n const checkValues = this.parseCheckConstraint(constraint, column.name);\n if (checkValues?.length) {\n return { kind: 'Enum', values: checkValues };\n }\n }\n }\n\n // Priority 3: Low cardinality data scan\n const lowCardValues = await this.collectLowCardinality(tableName, column);\n if (lowCardValues?.length) {\n return { kind: 'LowCardinality', values: lowCardValues };\n }\n\n return undefined;\n }\n}\n", "import { groq } from '@ai-sdk/groq';\nimport { tool } from 'ai';\nimport dedent from 'dedent';\nimport z from 'zod';\n\nimport {\n type AgentModel,\n agent,\n generate,\n toState,\n user,\n} from '@deepagents/agent';\n\nimport type { Adapter } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Cache interface for storing generated reports.\n */\nexport interface ReportCache {\n get(): Promise<string | null>;\n set(value: string): Promise<void>;\n}\n\n/**\n * Configuration for ReportGrounding.\n */\nexport interface ReportGroundingConfig {\n /** LLM model to use for generating the report */\n model?: AgentModel;\n /** Optional cache for storing generated reports */\n cache?: ReportCache;\n /** Force regeneration even if cached */\n forceRefresh?: boolean;\n}\n\nconst reportAgent = agent<unknown, { adapter: Adapter }>({\n name: 'db-report-agent',\n model: groq('openai/gpt-oss-20b'),\n prompt: () => dedent`\n <identity>\n You are a database analyst expert. Your job is to understand what\n a database represents and provide business context about it.\n You have READ-ONLY access to the database.\n </identity>\n\n <instructions>\n Write a business context that helps another agent answer questions accurately.\n\n For EACH table, do queries ONE AT A TIME:\n 1. SELECT COUNT(*) to get row count\n 2. SELECT * LIMIT 3 to see sample data\n\n Then write a report with:\n - What business this database is for\n - For each table: purpose, row count, and example of what the data looks like\n\n Include concrete examples like \"Track prices are $0.99\",\n \"Customer names like 'Lu\u00EDs Gon\u00E7alves'\", etc.\n\n Keep it 400-600 words, conversational style.\n </instructions>\n `,\n tools: {\n query_database: tool({\n description:\n 'Execute a SELECT query to explore the database and gather insights.',\n inputSchema: z.object({\n sql: z.string().describe('The SELECT query to execute'),\n purpose: z\n .string()\n .describe('What insight you are trying to gather with this query'),\n }),\n execute: ({ sql }, options) => {\n const state = toState<{ adapter: Adapter }>(options);\n return state.adapter.execute(sql);\n },\n }),\n },\n});\n\n/**\n * Grounding that generates a business context report about the database.\n *\n * Uses an LLM agent to:\n * 1. Query COUNT(*) for each table\n * 2. Query SELECT * LIMIT 3 for sample data\n * 3. Generate a 400-600 word business context report\n *\n * The report helps downstream agents understand what the database represents.\n */\nexport class ReportGrounding extends AbstractGrounding {\n #adapter: Adapter;\n #model: AgentModel;\n #cache?: ReportCache;\n #forceRefresh: boolean;\n\n constructor(adapter: Adapter, config: ReportGroundingConfig = {}) {\n super('business_context');\n this.#adapter = adapter;\n this.#model = config.model ?? groq('openai/gpt-oss-20b');\n this.#cache = config.cache;\n this.#forceRefresh = config.forceRefresh ?? false;\n }\n\n async execute(ctx: GroundingContext): Promise<void> {\n // Check cache first (unless forcing refresh)\n if (!this.#forceRefresh && this.#cache) {\n const cached = await this.#cache.get();\n if (cached) {\n ctx.report = cached;\n return;\n }\n }\n\n // Generate report using LLM\n const report = await this.#generateReport();\n ctx.report = report;\n\n // Cache the result\n if (this.#cache) {\n await this.#cache.set(report);\n }\n }\n\n async #generateReport(): Promise<string> {\n const { text } = await generate(\n reportAgent.clone({ model: this.#model }),\n [\n user(\n 'Please analyze the database and write a contextual report about what this database represents.',\n ),\n ],\n { adapter: this.#adapter },\n );\n\n return text;\n }\n}\n", "import type { Table } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Configuration for RowCountGrounding.\n */\nexport interface RowCountGroundingConfig {\n // Future: filter which tables to count\n}\n\n/**\n * Abstract base class for row count grounding.\n *\n * Reads tables from the context and annotates them with row counts and size hints.\n * This grounding must run AFTER TableGrounding since it reads from ctx.tables.\n *\n * Subclasses implement the database-specific hook:\n * - `getRowCount()` - get row count for a table\n */\nexport abstract class RowCountGrounding extends AbstractGrounding {\n constructor(config: RowCountGroundingConfig = {}) {\n super('rowCount');\n }\n\n /**\n * Get row count for a specific table.\n */\n protected abstract getRowCount(\n tableName: string,\n ): Promise<number | undefined>;\n\n /**\n * Execute the grounding process.\n * Annotates tables in ctx.tables with row counts and size hints.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n for (const table of ctx.tables) {\n const count = await this.getRowCount(table.name);\n if (count != null) {\n table.rowCount = count;\n table.sizeHint = this.#classifyRowCount(count);\n }\n }\n }\n\n /**\n * Classify row count into a size hint category.\n */\n #classifyRowCount(count: number): Table['sizeHint'] {\n if (count < 100) return 'tiny';\n if (count < 1000) return 'small';\n if (count < 10000) return 'medium';\n if (count < 100000) return 'large';\n return 'huge';\n }\n}\n", "import type { Filter, Relationship, Table } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Configuration for TableGrounding.\n */\nexport interface TableGroundingConfig {\n /** Filter to select seed tables */\n filter?: Filter;\n /**\n * Traverse forward (child\u2192parent) following FK direction.\n * - true: unlimited depth\n * - number: maximum depth\n * - false/undefined: no forward traversal\n */\n forward?: boolean | number;\n /**\n * Traverse backward (parent\u2192child) finding tables that reference us.\n * - true: unlimited depth\n * - number: maximum depth\n * - false/undefined: no backward traversal\n */\n backward?: boolean | number;\n}\n\n/**\n * Abstract base class for table grounding.\n *\n * The `execute()` method implements a BFS traversal algorithm that discovers\n * tables and relationships. Subclasses implement the database-specific hooks:\n * - `getAllTableNames()` - list all tables\n * - `getTable()` - get table metadata\n * - `findOutgoingRelations()` - find FKs FROM a table\n * - `findIncomingRelations()` - find FKs TO a table\n */\nexport abstract class TableGrounding extends AbstractGrounding {\n #filter?: Filter;\n #forward?: boolean | number;\n #backward?: boolean | number;\n\n constructor(config: TableGroundingConfig = {}) {\n super('table');\n this.#filter = config.filter;\n this.#forward = config.forward;\n this.#backward = config.backward;\n }\n\n /** Get all table names in the database */\n protected abstract getAllTableNames(): Promise<string[]>;\n\n /** Get full table metadata for a single table */\n protected abstract getTable(tableName: string): Promise<Table>;\n\n /** Find FKs FROM this table (outgoing relationships) */\n protected abstract findOutgoingRelations(\n tableName: string,\n ): Promise<Relationship[]>;\n\n /** Find FKs TO this table (incoming relationships) */\n protected abstract findIncomingRelations(\n tableName: string,\n ): Promise<Relationship[]>;\n\n /**\n * Execute the grounding process.\n * Writes discovered tables and relationships to the context.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n const seedTables = await this.applyFilter();\n const forward = this.#forward;\n const backward = this.#backward;\n\n // No traversal at all - just add the seed tables\n if (!forward && !backward) {\n const tables = await Promise.all(\n seedTables.map((name) => this.getTable(name)),\n );\n ctx.tables.push(...tables);\n return;\n }\n\n const tables: Record<string, Table> = {};\n const allRelationships: Relationship[] = [];\n const seenRelationships = new Set<string>();\n\n // Track depth separately for forward/backward using BFS\n const forwardQueue: Array<{ name: string; depth: number }> = [];\n const backwardQueue: Array<{ name: string; depth: number }> = [];\n const forwardVisited = new Set<string>();\n const backwardVisited = new Set<string>();\n\n // Initialize queues with seed tables at depth 0\n for (const name of seedTables) {\n if (forward) forwardQueue.push({ name, depth: 0 });\n if (backward) backwardQueue.push({ name, depth: 0 });\n }\n\n // Process forward (child\u2192parent)\n const forwardLimit = forward === true ? Infinity : forward || 0;\n while (forwardQueue.length > 0) {\n const item = forwardQueue.shift();\n if (!item) break;\n const { name, depth } = item;\n\n if (forwardVisited.has(name)) continue;\n forwardVisited.add(name);\n\n if (!tables[name]) {\n tables[name] = await this.getTable(name);\n }\n\n if (depth < forwardLimit) {\n const rels = await this.findOutgoingRelations(name);\n for (const rel of rels) {\n this.addRelationship(rel, allRelationships, seenRelationships);\n if (!forwardVisited.has(rel.referenced_table)) {\n forwardQueue.push({ name: rel.referenced_table, depth: depth + 1 });\n }\n }\n }\n }\n\n // Process backward (parent\u2192child)\n const backwardLimit = backward === true ? Infinity : backward || 0;\n while (backwardQueue.length > 0) {\n const item = backwardQueue.shift();\n if (!item) break;\n const { name, depth } = item;\n\n if (backwardVisited.has(name)) continue;\n backwardVisited.add(name);\n\n if (!tables[name]) {\n tables[name] = await this.getTable(name);\n }\n\n if (depth < backwardLimit) {\n const rels = await this.findIncomingRelations(name);\n for (const rel of rels) {\n this.addRelationship(rel, allRelationships, seenRelationships);\n if (!backwardVisited.has(rel.table)) {\n backwardQueue.push({ name: rel.table, depth: depth + 1 });\n }\n }\n }\n }\n\n // Write to context\n const tablesList = Object.values(tables);\n ctx.tables.push(...tablesList);\n ctx.relationships.push(...allRelationships);\n }\n\n /**\n * Apply the filter to get seed table names.\n * If filter is an explicit array, skip querying all table names.\n */\n protected async applyFilter(): Promise<string[]> {\n const filter = this.#filter;\n if (Array.isArray(filter)) {\n return filter;\n }\n const names = await this.getAllTableNames();\n if (!filter) {\n return names;\n }\n if (filter instanceof RegExp) {\n return names.filter((name) => filter.test(name));\n }\n return names.filter(filter);\n }\n\n /**\n * Add a relationship to the collection, deduplicating by key.\n */\n protected addRelationship(\n rel: Relationship,\n all: Relationship[],\n seen: Set<string>,\n ): void {\n const key = `${rel.table}:${rel.from.join(',')}:${rel.referenced_table}:${rel.to.join(',')}`;\n if (!seen.has(key)) {\n seen.add(key);\n all.push(rel);\n }\n }\n}\n", "import type { Adapter, ColumnStats } from '../adapter.ts';\nimport {\n ColumnStatsGrounding,\n type ColumnStatsGroundingConfig,\n} from '../groundings/column-stats.grounding.ts';\nimport type { Column } from '../groundings/context.ts';\n\n/**\n * SQLite implementation of ColumnStatsGrounding.\n */\nexport class SqliteColumnStatsGrounding extends ColumnStatsGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: ColumnStatsGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async collectStats(\n tableName: string,\n column: Column,\n ): Promise<ColumnStats | undefined> {\n if (!this.#shouldCollectStats(column.type)) {\n return undefined;\n }\n\n const tableIdentifier = this.#adapter.quoteIdentifier(tableName);\n const columnIdentifier = this.#adapter.quoteIdentifier(column.name);\n\n const sql = `\n SELECT\n MIN(${columnIdentifier}) AS min_value,\n MAX(${columnIdentifier}) AS max_value,\n AVG(CASE WHEN ${columnIdentifier} IS NULL THEN 1.0 ELSE 0.0 END) AS null_fraction\n FROM ${tableIdentifier}\n `;\n\n const rows = await this.#adapter.runQuery<{\n min_value: unknown;\n max_value: unknown;\n null_fraction: number | string | null;\n }>(sql);\n\n if (!rows.length) {\n return undefined;\n }\n\n const min = this.#normalizeValue(rows[0]?.min_value);\n const max = this.#normalizeValue(rows[0]?.max_value);\n const nullFraction = this.#adapter.toNumber(rows[0]?.null_fraction);\n\n if (min == null && max == null && nullFraction == null) {\n return undefined;\n }\n\n return {\n min: min ?? undefined,\n max: max ?? undefined,\n nullFraction:\n nullFraction != null && Number.isFinite(nullFraction)\n ? Math.max(0, Math.min(1, nullFraction))\n : undefined,\n };\n }\n\n #shouldCollectStats(type: string | undefined): boolean {\n if (!type) {\n return false;\n }\n const normalized = type.toLowerCase();\n return /int|real|numeric|double|float|decimal|date|time|bool/.test(\n normalized,\n );\n }\n\n #normalizeValue(value: unknown): string | null {\n if (value === null || value === undefined) {\n return null;\n }\n if (typeof value === 'string') {\n return value;\n }\n if (typeof value === 'number' || typeof value === 'bigint') {\n return String(value);\n }\n if (typeof value === 'boolean') {\n return value ? 'true' : 'false';\n }\n if (value instanceof Date) {\n return value.toISOString();\n }\n if (typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {\n return value.toString('utf-8');\n }\n return null;\n }\n}\n", "import type { Adapter, AdapterInfo } from '../adapter.ts';\nimport {\n InfoGrounding,\n type InfoGroundingConfig,\n} from '../groundings/info.grounding.ts';\n\n/**\n * SQLite implementation of InfoGrounding.\n */\nexport class SqliteInfoGrounding extends InfoGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: InfoGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async collectInfo(): Promise<AdapterInfo> {\n const rows = await this.#adapter.runQuery<{ version: string }>(\n 'SELECT sqlite_version() AS version',\n );\n\n return {\n dialect: 'sqlite',\n version: rows[0]?.version,\n details: {\n parameterPlaceholder: '?',\n },\n };\n }\n}\n", "import type { Adapter } from '../adapter.ts';\nimport {\n type Column,\n ColumnValuesGrounding,\n type ColumnValuesGroundingConfig,\n} from '../groundings/column-values.grounding.ts';\n\n/**\n * SQLite implementation of ColumnValuesGrounding.\n *\n * Supports:\n * - CHECK constraints with IN clauses (inherited from base)\n * - Low cardinality data scan\n *\n * Note: SQLite does not have native ENUM types.\n */\nexport class SqliteColumnValuesGrounding extends ColumnValuesGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: ColumnValuesGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async collectLowCardinality(\n tableName: string,\n column: Column,\n ): Promise<string[] | undefined> {\n const tableIdentifier = this.#adapter.quoteIdentifier(tableName);\n const columnIdentifier = this.#adapter.quoteIdentifier(column.name);\n const limit = this.lowCardinalityLimit + 1;\n\n const sql = `\n SELECT DISTINCT ${columnIdentifier} AS value\n FROM ${tableIdentifier}\n WHERE ${columnIdentifier} IS NOT NULL\n LIMIT ${limit}\n `;\n\n const rows = await this.#adapter.runQuery<{ value: unknown }>(sql);\n\n if (!rows.length || rows.length > this.lowCardinalityLimit) {\n return undefined;\n }\n\n const values: string[] = [];\n for (const row of rows) {\n const formatted = this.#normalizeValue(row.value);\n if (formatted == null) {\n return undefined;\n }\n values.push(formatted);\n }\n\n return values.length ? values : undefined;\n }\n\n #normalizeValue(value: unknown): string | null {\n if (value === null || value === undefined) {\n return null;\n }\n if (typeof value === 'string') {\n return value;\n }\n if (typeof value === 'number' || typeof value === 'bigint') {\n return String(value);\n }\n if (typeof value === 'boolean') {\n return value ? 'true' : 'false';\n }\n if (value instanceof Date) {\n return value.toISOString();\n }\n if (typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {\n return value.toString('utf-8');\n }\n return null;\n }\n}\n", "import type { Adapter } from '../adapter.ts';\nimport {\n RowCountGrounding,\n type RowCountGroundingConfig,\n} from '../groundings/row-count.grounding.ts';\n\n/**\n * SQLite implementation of RowCountGrounding.\n */\nexport class SqliteRowCountGrounding extends RowCountGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: RowCountGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async getRowCount(tableName: string): Promise<number | undefined> {\n const rows = await this.#adapter.runQuery<{ count: number | string | bigint | null }>(\n `SELECT COUNT(*) as count FROM ${this.#adapter.quoteIdentifier(tableName)}`,\n );\n\n return this.#adapter.toNumber(rows[0]?.count);\n }\n}\n", "import {\n Adapter,\n type ExecuteFunction,\n type GroundingFn,\n type ValidateFunction,\n} from '../adapter.ts';\n\nconst SQL_ERROR_MAP: Array<{\n pattern: RegExp;\n type: string;\n hint: string;\n}> = [\n {\n pattern: /^no such table: .+$/,\n type: 'MISSING_TABLE',\n hint: 'Check the database schema for the correct table name. The table you referenced does not exist.',\n },\n {\n pattern: /^no such column: .+$/,\n type: 'INVALID_COLUMN',\n hint: 'Check the table schema for correct column names. The column may not exist or is ambiguous (exists in multiple joined tables).',\n },\n {\n pattern: /^ambiguous column name: .+$/,\n type: 'INVALID_COLUMN',\n hint: 'Check the table schema for correct column names. The column may not exist or is ambiguous (exists in multiple joined tables).',\n },\n {\n pattern: /^near \".+\": syntax error$/,\n type: 'SYNTAX_ERROR',\n hint: 'There is a SQL syntax error. Review the query structure, keywords, and punctuation.',\n },\n {\n pattern: /^no tables specified$/,\n type: 'SYNTAX_ERROR',\n hint: 'There is a SQL syntax error. Review the query structure, keywords, and punctuation.',\n },\n {\n pattern: /^attempt to write a readonly database$/,\n type: 'CONSTRAINT_ERROR',\n hint: 'A database constraint was violated. This should not happen with read-only queries.',\n },\n];\n\nexport type SqliteAdapterOptions = {\n execute: ExecuteFunction;\n validate?: ValidateFunction;\n grounding: GroundingFn[];\n};\n\ntype ColumnRow = {\n name: string | null | undefined;\n type: string | null | undefined;\n pk?: number | null | undefined;\n};\n\ntype IndexListRow = {\n seq?: number | null | undefined;\n name?: string | null | undefined;\n unique?: number | null | undefined;\n origin?: string | null | undefined;\n};\n\ntype IndexInfoRow = {\n seqno?: number | null | undefined;\n cid?: number | null | undefined;\n name?: string | null | undefined;\n};\ntype ForeignKeyRow = {\n id: number | null | undefined;\n table: string | null | undefined;\n from: string | null | undefined;\n to: string | null | undefined;\n};\n\nconst LOW_CARDINALITY_LIMIT = 20;\n\nexport function formatError(sql: string, error: unknown) {\n const errorMessage =\n error instanceof Error\n ? error.message\n : typeof error === 'string'\n ? error\n : 'Unknown error occurred';\n const errorInfo = SQL_ERROR_MAP.find((it) => it.pattern.test(errorMessage));\n\n if (!errorInfo) {\n return {\n error: errorMessage,\n error_type: 'UNKNOWN_ERROR',\n suggestion: 'Review the query and try again',\n sql_attempted: sql,\n };\n }\n\n return {\n error: errorMessage,\n error_type: errorInfo.type,\n suggestion: errorInfo.hint,\n sql_attempted: sql,\n };\n}\n\nexport class Sqlite extends Adapter {\n #options: SqliteAdapterOptions;\n override readonly grounding: GroundingFn[];\n override readonly defaultSchema = undefined;\n override readonly systemSchemas: string[] = [];\n\n constructor(options: SqliteAdapterOptions) {\n super();\n if (!options || typeof options.execute !== 'function') {\n throw new Error('Sqlite adapter requires an execute function.');\n }\n this.#options = options;\n this.grounding = options.grounding;\n }\n\n override async execute(sql: string) {\n return this.#options.execute(sql);\n }\n\n override async validate(sql: string) {\n const validator: ValidateFunction =\n this.#options.validate ??\n (async (text: string) => {\n await this.#options.execute(`EXPLAIN ${text}`);\n });\n\n try {\n return await validator(sql);\n } catch (error) {\n return JSON.stringify(formatError(sql, error));\n }\n }\n\n #quoteIdentifier(name: string) {\n return `'${name.replace(/'/g, \"''\")}'`;\n }\n\n override async runQuery<Row>(sql: string): Promise<Row[]> {\n const result = await this.#options.execute(sql);\n\n if (Array.isArray(result)) {\n return result as Row[];\n }\n\n if (\n result &&\n typeof result === 'object' &&\n 'rows' in result &&\n Array.isArray((result as { rows?: unknown }).rows)\n ) {\n return (result as { rows: Row[] }).rows;\n }\n\n throw new Error(\n 'Sqlite adapter execute() must return an array of rows or an object with a rows array when introspecting.',\n );\n }\n\n override quoteIdentifier(name: string): string {\n return `\"${name.replace(/\"/g, '\"\"')}\"`;\n }\n\n override escape(value: string): string {\n return value.replace(/\"/g, '\"\"');\n }\n\n override buildSampleRowsQuery(\n tableName: string,\n columns: string[] | undefined,\n limit: number,\n ): string {\n const columnList = columns?.length\n ? columns.map((c) => this.quoteIdentifier(c)).join(', ')\n : '*';\n return `SELECT ${columnList} FROM ${this.quoteIdentifier(tableName)} LIMIT ${limit}`;\n }\n}\n", "import type { Adapter, Relationship, Table } from '../adapter.ts';\nimport {\n TableGrounding,\n type TableGroundingConfig,\n} from '../groundings/table.grounding.ts';\n\ntype ColumnRow = {\n name: string | null | undefined;\n type: string | null | undefined;\n pk?: number | null | undefined;\n};\n\ntype ForeignKeyRow = {\n id: number | null | undefined;\n table: string | null | undefined;\n from: string | null | undefined;\n to: string | null | undefined;\n};\n\n/**\n * SQLite implementation of TableGrounding.\n *\n * SQLite requires caching all relationships for backward lookups because\n * PRAGMA foreign_key_list only returns outgoing FKs from a specific table.\n */\nexport class SqliteTableGrounding extends TableGrounding {\n #adapter: Adapter;\n #relationshipCache: Relationship[] | null = null;\n\n constructor(adapter: Adapter, config: TableGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async getAllTableNames(): Promise<string[]> {\n const rows = await this.#adapter.runQuery<{\n name: string | null | undefined;\n }>(`SELECT name FROM sqlite_master WHERE type='table' ORDER BY name`);\n\n return rows\n .map((row) => row.name)\n .filter(\n (name): name is string =>\n typeof name === 'string' && !name.startsWith('sqlite_'),\n );\n }\n\n protected override async getTable(tableName: string): Promise<Table> {\n const columns = await this.#adapter.runQuery<ColumnRow>(\n `PRAGMA table_info(${this.#quoteIdentifier(tableName)})`,\n );\n\n return {\n name: tableName,\n rawName: tableName,\n columns: columns.map((col) => ({\n name: col.name ?? 'unknown',\n type: col.type ?? 'unknown',\n })),\n };\n }\n\n protected override async findOutgoingRelations(\n tableName: string,\n ): Promise<Relationship[]> {\n const rows = await this.#adapter.runQuery<ForeignKeyRow>(\n `PRAGMA foreign_key_list(${this.#quoteIdentifier(tableName)})`,\n );\n\n const groups = new Map<number, Relationship>();\n\n for (const row of rows) {\n if (\n row.id == null ||\n row.table == null ||\n row.from == null ||\n row.to == null\n ) {\n continue;\n }\n\n const id = Number(row.id);\n const existing = groups.get(id);\n if (!existing) {\n groups.set(id, {\n table: tableName,\n from: [String(row.from)],\n referenced_table: String(row.table),\n to: [String(row.to)],\n });\n } else {\n existing.from.push(String(row.from));\n existing.to.push(String(row.to));\n }\n }\n\n return Array.from(groups.values());\n }\n\n protected override async findIncomingRelations(\n tableName: string,\n ): Promise<Relationship[]> {\n // SQLite limitation: PRAGMA only shows outgoing FKs\n // Must scan all tables and cache the results\n if (!this.#relationshipCache) {\n this.#relationshipCache = await this.#loadAllRelationships();\n }\n return this.#relationshipCache.filter(\n (r) => r.referenced_table === tableName,\n );\n }\n\n async #loadAllRelationships(): Promise<Relationship[]> {\n const allNames = await this.getAllTableNames();\n const results: Relationship[] = [];\n for (const name of allNames) {\n results.push(...(await this.findOutgoingRelations(name)));\n }\n return results;\n }\n\n #quoteIdentifier(name: string) {\n return `'${name.replace(/'/g, \"''\")}'`;\n }\n}\n", "import { type Adapter } from '../adapter.ts';\nimport { type ColumnStatsGroundingConfig } from '../groundings/column-stats.grounding.ts';\nimport { type ConstraintGroundingConfig } from '../groundings/constraint.grounding.ts';\nimport { type IndexesGroundingConfig } from '../groundings/indexes.grounding.ts';\nimport { type InfoGroundingConfig } from '../groundings/info.grounding.ts';\nimport { type ColumnValuesGroundingConfig } from '../groundings/column-values.grounding.ts';\nimport {\n ReportGrounding,\n type ReportGroundingConfig,\n} from '../groundings/report.grounding.ts';\nimport { type RowCountGroundingConfig } from '../groundings/row-count.grounding.ts';\nimport { type TableGroundingConfig } from '../groundings/table.grounding.ts';\nimport type { ViewGroundingConfig } from '../groundings/view.grounding.ts';\nimport { SqliteColumnStatsGrounding } from './column-stats.sqlite.grounding.ts';\nimport { SqliteConstraintGrounding } from './constraint.sqlite.grounding.ts';\nimport { SqliteIndexesGrounding } from './indexes.sqlite.grounding.ts';\nimport { SqliteInfoGrounding } from './info.sqlite.grounding.ts';\nimport { SqliteColumnValuesGrounding } from './column-values.sqlite.grounding.ts';\nimport { SqliteRowCountGrounding } from './row-count.sqlite.grounding.ts';\nimport { Sqlite } from './sqlite.ts';\nimport { SqliteTableGrounding } from './table.sqlite.grounding.ts';\nimport { SqliteViewGrounding } from './view.sqlite.grounding.ts';\n\nexport * from './sqlite.ts';\n\nexport function tables(config: TableGroundingConfig = {}) {\n return (adapter: Adapter) => new SqliteTableGrounding(adapter, config);\n}\n\nexport function info(config: InfoGroundingConfig = {}) {\n return (adapter: Adapter) => new SqliteInfoGrounding(adapter, config);\n}\n\nexport function views(config: ViewGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteViewGrounding(adapter, config);\n };\n}\n\nexport function columnStats(config: ColumnStatsGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteColumnStatsGrounding(adapter, config);\n };\n}\n\nexport function columnValues(config: ColumnValuesGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteColumnValuesGrounding(adapter, config);\n };\n}\n\nexport function indexes(config: IndexesGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteIndexesGrounding(adapter, config);\n };\n}\n\nexport function rowCount(config: RowCountGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteRowCountGrounding(adapter, config);\n };\n}\n\nexport function constraints(config: ConstraintGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteConstraintGrounding(adapter, config);\n };\n}\n\nexport function report(config: ReportGroundingConfig = {}) {\n return (adapter: Adapter) => new ReportGrounding(adapter, config);\n}\n\nexport default {\n tables,\n info,\n views,\n columnStats,\n columnValues,\n indexes,\n rowCount,\n constraints,\n report,\n Sqlite,\n};\n", "import { DatabaseSync } from 'node:sqlite';\n\nimport type { GroundingFn } from '../adapter.ts';\nimport { Sqlite } from '../sqlite/sqlite.ts';\nimport { type ColumnType, type ParsedSheet, parseFile } from './parser.ts';\n\n/**\n * Options for creating a Spreadsheet adapter.\n */\nexport interface SpreadsheetOptions {\n /**\n * Path to the spreadsheet file (Excel .xlsx/.xls or CSV/TSV).\n */\n file: string;\n\n /**\n * Optional path to persist the SQLite database.\n * If not provided, uses in-memory database (':memory:').\n */\n database?: string;\n\n /**\n * Grounding functions to use for schema introspection.\n */\n grounding: GroundingFn[];\n}\n\n/**\n * Spreadsheet adapter that loads Excel/CSV files into SQLite.\n *\n * This adapter:\n * 1. Parses the spreadsheet file (Excel or CSV/TSV)\n * 2. Creates a SQLite database (in-memory or file-based)\n * 3. Creates tables from sheets and loads data\n * 4. Delegates all SQL operations to the SQLite adapter\n *\n * @example\n * ```typescript\n * import { Spreadsheet, tables, info } from '@deepagents/text2sql/spreadsheet';\n *\n * const adapter = new Spreadsheet({\n * file: './sales.xlsx',\n * grounding: [tables(), info()]\n * });\n *\n * const schema = await adapter.introspect();\n * const results = await adapter.execute('SELECT * FROM Customers');\n * ```\n */\nexport class Spreadsheet extends Sqlite {\n #db: DatabaseSync;\n\n constructor(options: SpreadsheetOptions) {\n // Parse the spreadsheet file\n const sheets = parseFile(options.file);\n\n // Create SQLite database\n const dbPath = options.database ?? ':memory:';\n const db = new DatabaseSync(dbPath);\n\n // Create tables and load data\n for (const sheet of sheets) {\n const createSQL = createTableSQL(sheet);\n db.exec(createSQL);\n loadData(db, sheet);\n }\n\n // Initialize the SQLite adapter with execute function\n super({\n execute: (sql: string) => db.prepare(sql).all(),\n grounding: options.grounding,\n });\n\n this.#db = db;\n }\n\n /**\n * Close the underlying SQLite database.\n * Call this when done to release resources.\n */\n close(): void {\n this.#db.close();\n }\n}\n\n/**\n * Generate CREATE TABLE SQL for a parsed sheet.\n */\nfunction createTableSQL(sheet: ParsedSheet): string {\n if (sheet.columns.length === 0) {\n throw new Error(`Cannot create table \"${sheet.name}\" with no columns.`);\n }\n\n const columns = sheet.columns\n .map((col) => `\"${escapeIdentifier(col.name)}\" ${col.type}`)\n .join(', ');\n\n return `CREATE TABLE \"${escapeIdentifier(sheet.name)}\" (${columns})`;\n}\n\n/**\n * SQLite input value type.\n */\ntype SQLiteValue = string | number | bigint | null | Uint8Array;\n\n/**\n * Load data from a parsed sheet into the SQLite database.\n * Uses transactions for performance.\n */\nfunction loadData(db: DatabaseSync, sheet: ParsedSheet): void {\n if (sheet.rows.length === 0) {\n return;\n }\n\n const columns = sheet.columns\n .map((c) => `\"${escapeIdentifier(c.name)}\"`)\n .join(', ');\n const placeholders = sheet.columns.map(() => '?').join(', ');\n\n const insertSQL = `INSERT INTO \"${escapeIdentifier(sheet.name)}\" (${columns}) VALUES (${placeholders})`;\n const stmt = db.prepare(insertSQL);\n\n db.exec('BEGIN TRANSACTION');\n\n try {\n for (const row of sheet.rows) {\n const values: SQLiteValue[] = sheet.columns.map((col) => {\n // Use originalKey to access row data (preserves original case)\n const rawValue = row[col.originalKey];\n return convertValue(rawValue, col.type);\n });\n stmt.run(...values);\n }\n db.exec('COMMIT');\n } catch (error) {\n db.exec('ROLLBACK');\n throw error;\n }\n}\n\n/**\n * Convert a JavaScript value to the appropriate SQLite type.\n * Type-aware conversion based on the inferred column type.\n */\nfunction convertValue(value: unknown, type: ColumnType): SQLiteValue {\n // Null/undefined/empty \u2192 NULL\n if (value == null || value === '') {\n return null;\n }\n\n // Handle Date objects - format as YYYY-MM-DD\n if (value instanceof Date) {\n return value.toISOString().split('T')[0];\n }\n\n switch (type) {\n case 'INTEGER': {\n // Convert to integer, floor decimals\n const num = Number(value);\n if (isNaN(num)) {\n return null; // Non-numeric values become NULL\n }\n return Math.floor(num);\n }\n\n case 'REAL': {\n // Convert to float\n const num = Number(value);\n if (isNaN(num)) {\n return null; // Non-numeric values become NULL\n }\n return num;\n }\n\n case 'TEXT':\n default: {\n // Convert to string\n if (typeof value === 'boolean') {\n return value ? 'true' : 'false';\n }\n if (typeof value === 'object') {\n return JSON.stringify(value);\n }\n return String(value);\n }\n }\n}\n\n/**\n * Escape double quotes in identifiers for SQLite.\n */\nfunction escapeIdentifier(name: string): string {\n return name.replace(/\"/g, '\"\"');\n}\n", "import * as path from 'node:path';\nimport XLSX from 'xlsx';\n\n/**\n * Column type for SQLite.\n */\nexport type ColumnType = 'TEXT' | 'INTEGER' | 'REAL';\n\n/**\n * Column definition with name and inferred type.\n */\nexport interface Column {\n /** Sanitized column name for SQL */\n name: string;\n /** Original column name from spreadsheet (for data access) */\n originalKey: string;\n /** Inferred SQLite type */\n type: ColumnType;\n}\n\n/**\n * Parsed sheet with table name, columns, and row data.\n */\nexport interface ParsedSheet {\n name: string;\n columns: Column[];\n rows: Record<string, unknown>[];\n}\n\n/**\n * Parse an Excel or CSV/TSV file into sheets.\n *\n * - Excel files: each sheet becomes a ParsedSheet\n * - CSV/TSV files: single ParsedSheet with filename as table name\n */\nexport function parseFile(filePath: string): ParsedSheet[] {\n const ext = path.extname(filePath).toLowerCase();\n\n let workbook: XLSX.WorkBook;\n try {\n workbook = XLSX.readFile(filePath, {\n cellDates: true, // Parse dates as Date objects\n });\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to read spreadsheet \"${filePath}\": ${message}`);\n }\n\n const sheets: ParsedSheet[] = [];\n\n for (const sheetName of workbook.SheetNames) {\n const sheet = workbook.Sheets[sheetName];\n const rows = XLSX.utils.sheet_to_json<Record<string, unknown>>(sheet);\n\n // Skip empty sheets\n if (rows.length === 0) {\n continue;\n }\n\n // For CSV files, use filename as table name; for Excel, use sheet name\n const tableName =\n ext === '.csv' || ext === '.tsv'\n ? getTableNameFromFile(filePath)\n : sanitizeTableName(sheetName);\n\n const columns = inferColumns(rows);\n\n // Skip sheets with no columns (shouldn't happen if rows exist, but be safe)\n if (columns.length === 0) {\n continue;\n }\n\n sheets.push({\n name: tableName,\n columns,\n rows,\n });\n }\n\n if (sheets.length === 0) {\n throw new Error(\n `No valid sheets found in \"${filePath}\". All sheets are empty or have no columns.`,\n );\n }\n\n return sheets;\n}\n\n/**\n * Extract table name from filename.\n * './data/customers.csv' \u2192 'customers'\n */\nfunction getTableNameFromFile(filePath: string): string {\n const basename = path.basename(filePath, path.extname(filePath));\n return sanitizeTableName(basename);\n}\n\n/**\n * Sanitize a name to be a valid SQL table/column identifier.\n * - Lowercase for consistency\n * - Replace invalid chars with underscores\n * - Ensure it doesn't start with a number\n * - Trim and collapse multiple underscores\n * - Truncate to 64 characters\n */\nexport function sanitizeIdentifier(name: string): string {\n // Lowercase for consistent SQL identifiers\n let sanitized = name.toLowerCase();\n\n // Replace any non-alphanumeric (except underscore) with underscore\n sanitized = sanitized.replace(/[^a-z0-9_]/g, '_');\n\n // Collapse multiple underscores\n sanitized = sanitized.replace(/_+/g, '_');\n\n // Trim leading/trailing underscores\n sanitized = sanitized.replace(/^_+|_+$/g, '');\n\n // If starts with number, prefix with underscore\n if (/^[0-9]/.test(sanitized)) {\n sanitized = '_' + sanitized;\n }\n\n // If empty after sanitization, use a default\n if (!sanitized) {\n return 'column';\n }\n\n // Truncate to 64 characters (common SQL identifier limit)\n return sanitized.slice(0, 64);\n}\n\n// Alias for backwards compatibility\nexport const sanitizeTableName = sanitizeIdentifier;\n\n/**\n * Infer column definitions from row data.\n * Uses the first row's keys as column names and samples values for type inference.\n */\nfunction inferColumns(rows: Record<string, unknown>[]): Column[] {\n if (rows.length === 0) {\n return [];\n }\n\n // Get all unique keys from all rows (in case some rows have different keys)\n const keySet = new Set<string>();\n for (const row of rows) {\n for (const key of Object.keys(row)) {\n keySet.add(key);\n }\n }\n\n // Handle empty keys (sheets with no headers)\n if (keySet.size === 0) {\n return [];\n }\n\n const rawNames = Array.from(keySet);\n const dedupedNames = deduplicateColumnNames(rawNames);\n\n return dedupedNames.map((name, idx) => {\n const originalKey = rawNames[idx];\n const values = rows.map((row) => row[originalKey]);\n const type = inferColumnType(values);\n return { name, originalKey, type };\n });\n}\n\n/**\n * Deduplicate column names by appending _2, _3, etc. to duplicates.\n */\nfunction deduplicateColumnNames(names: string[]): string[] {\n const seen = new Map<string, number>();\n const result: string[] = [];\n\n for (const rawName of names) {\n // Sanitize the column name\n let name = sanitizeTableName(rawName);\n\n // Handle empty names (generate column_1, column_2, etc.)\n if (!name) {\n name = 'column';\n }\n\n const count = seen.get(name) ?? 0;\n if (count > 0) {\n result.push(`${name}_${count + 1}`);\n } else {\n result.push(name);\n }\n seen.set(name, count + 1);\n }\n\n return result;\n}\n\n/**\n * Infer SQLite column type from sample values.\n * Conservative approach: when in doubt, use TEXT.\n */\nfunction inferColumnType(values: unknown[]): ColumnType {\n let hasInteger = false;\n let hasReal = false;\n\n for (const value of values) {\n // Skip nullish or empty values\n if (value == null || value === '') {\n continue;\n }\n\n // Dates are stored as TEXT (ISO format)\n if (value instanceof Date) {\n return 'TEXT';\n }\n\n if (typeof value === 'number') {\n if (Number.isInteger(value)) {\n hasInteger = true;\n } else {\n hasReal = true;\n }\n } else if (typeof value === 'boolean') {\n // Booleans can be stored as INTEGER (0/1)\n hasInteger = true;\n } else {\n // Any non-number type means TEXT\n return 'TEXT';\n }\n }\n\n // If we have any REAL values, use REAL (even if some are integers)\n if (hasReal) {\n return 'REAL';\n }\n\n // If we only have integers, use INTEGER\n if (hasInteger) {\n return 'INTEGER';\n }\n\n // Default to TEXT (all values were null/empty)\n return 'TEXT';\n}\n"],
5
- "mappings": ";AAmCO,SAAS,YAAY,OAIR;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,WAAW,EAAE,SAAS,MAAM,QAAQ;AAAA,MAC9C,GAAI,MAAM,YAAY,EAAE,UAAU,MAAM,SAAS;AAAA,IACnD;AAAA,EACF;AACF;AA2BO,SAAS,MAAM,OAQF;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,UAAU,EAAE,QAAQ,MAAM,OAAO;AAAA,MAC3C,GAAI,MAAM,YAAY,QAAQ,EAAE,UAAU,MAAM,SAAS;AAAA,MACzD,GAAI,MAAM,YAAY,EAAE,UAAU,MAAM,SAAS;AAAA,MACjD,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,SAAS,UAAU,EAAE,SAAS,MAAM,QAAQ;AAAA,MACtD,GAAI,MAAM,aAAa,UAAU,EAAE,aAAa,MAAM,YAAY;AAAA,IACpE;AAAA,EACF;AACF;AAyBO,SAAS,OAAO,OAeH;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,MAAM,EAAE,IAAI,KAAK;AAAA,MAC3B,GAAI,MAAM,MAAM,EAAE,IAAI,MAAM,GAAG;AAAA,MAC/B,GAAI,MAAM,UAAU,EAAE,QAAQ,KAAK;AAAA,MACnC,GAAI,MAAM,WAAW,EAAE,SAAS,KAAK;AAAA,MACrC,GAAI,MAAM,WAAW,EAAE,SAAS,MAAM,QAAQ;AAAA,MAC9C,GAAI,MAAM,WAAW,EAAE,SAAS,KAAK;AAAA,MACrC,GAAI,MAAM,QAAQ,UAAU,EAAE,QAAQ,MAAM,OAAO;AAAA,MACnD,GAAI,MAAM,SAAS,EAAE,OAAO,MAAM,MAAM;AAAA,IAC1C;AAAA,EACF;AACF;AAaO,SAAS,MAAM,OAKF;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,UAAU,EAAE,QAAQ,KAAK;AAAA,MACnC,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,IACvC;AAAA,EACF;AACF;AA6BO,SAAS,WAAW,OAcP;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,SAAS,UAAU,EAAE,SAAS,MAAM,QAAQ;AAAA,MACtD,GAAI,MAAM,cAAc,EAAE,YAAY,MAAM,WAAW;AAAA,MACvD,GAAI,MAAM,gBAAgB,EAAE,cAAc,MAAM,aAAa;AAAA,MAC7D,GAAI,MAAM,mBAAmB,EAAE,iBAAiB,MAAM,gBAAgB;AAAA,MACtE,GAAI,MAAM,mBAAmB,UAAU;AAAA,QACrC,mBAAmB,MAAM;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAoBO,SAAS,KAAK,OAKD;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,UAAU,EAAE,QAAQ,MAAM,OAAO;AAAA,MAC3C,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,cAAc,EAAE,YAAY,MAAM,WAAW;AAAA,IACzD;AAAA,EACF;AACF;AAgBO,SAAS,aAAa,OAIT;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,IAAI,MAAM;AAAA,MACV,GAAI,MAAM,eAAe,EAAE,aAAa,MAAM,YAAY;AAAA,IAC5D;AAAA,EACF;AACF;;;AC/PO,SAAS,yBAA2C;AACzD,SAAO;AAAA,IACL,QAAQ,CAAC;AAAA,IACT,OAAO,CAAC;AAAA,IACR,eAAe,CAAC;AAAA,IAChB,MAAM;AAAA,EACR;AACF;;;ACyEO,IAAe,UAAf,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuB5B,MAAM,WAAW,MAAM,uBAAuB,GAA+B;AAE3E,eAAW,MAAM,KAAK,WAAW;AAC/B,YAAM,YAAY,GAAG,IAAI;AACzB,YAAM,UAAU,QAAQ,GAAG;AAAA,IAC7B;AAGA,WAAO,KAAK,mBAAmB,GAAG;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,mBAAmB,KAA0C;AAC3D,UAAM,YAA+B,CAAC;AAGtC,QAAI,IAAI,MAAM;AACZ,gBAAU;AAAA,QACR,YAAY;AAAA,UACV,SAAS,IAAI,KAAK;AAAA,UAClB,SAAS,IAAI,KAAK;AAAA,UAClB,UAAU,IAAI,KAAK;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,eAAW,KAAK,IAAI,QAAQ;AAC1B,gBAAU,KAAK,KAAK,iBAAiB,CAAC,CAAC;AAAA,IACzC;AAGA,eAAW,KAAK,IAAI,OAAO;AACzB,gBAAU,KAAK,KAAK,gBAAgB,CAAC,CAAC;AAAA,IACxC;AAGA,UAAM,WAAW,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;AAC3D,eAAW,OAAO,IAAI,eAAe;AACnC,YAAM,cAAc,SAAS,IAAI,IAAI,KAAK;AAC1C,YAAM,cAAc,SAAS,IAAI,IAAI,gBAAgB;AACrD,gBAAU;AAAA,QACR,KAAK,wBAAwB,KAAK,aAAa,WAAW;AAAA,MAC5D;AAAA,IACF;AAGA,QAAI,IAAI,QAAQ;AACd,gBAAU,KAAK,EAAE,MAAM,mBAAmB,MAAM,IAAI,OAAO,CAAC;AAAA,IAC9D;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,GAA2B;AAE1C,UAAM,eAAe,EAAE,aAAa,KAAK,CAAC,MAAM,EAAE,SAAS,aAAa;AACxE,UAAM,YAAY,IAAI,IAAI,cAAc,WAAW,CAAC,CAAC;AAErD,UAAM,iBAAiB,IAAI;AAAA,MACzB,EAAE,aACE,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EACpC,QAAQ,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC,KAAK,CAAC;AAAA,IACzC;AAEA,UAAM,kBAAkB,oBAAI,IAAoB;AAChD,eAAW,KAAK,EAAE,aAAa,OAAO,CAACA,OAAMA,GAAE,SAAS,SAAS,KAAK,CAAC,GAAG;AACxE,iBAAW,OAAO,EAAE,WAAW,CAAC,GAAG;AACjC,YAAI,EAAE,gBAAgB,MAAM;AAC1B,0BAAgB,IAAI,KAAK,EAAE,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,IAAI;AAAA,MACxB,EAAE,aACE,OAAO,CAAC,MAAM,EAAE,SAAS,YAAY,EAAE,SAAS,WAAW,CAAC,EAC7D,QAAQ,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC,KAAK,CAAC;AAAA,IACzC;AAGA,UAAM,aAAa,oBAAI,IAAoB;AAC3C,eAAW,KAAK,EAAE,aAAa,OAAO,CAACA,OAAMA,GAAE,SAAS,aAAa,KACnE,CAAC,GAAG;AACJ,YAAM,OAAO,EAAE,WAAW,CAAC;AAC3B,YAAM,UAAU,EAAE,qBAAqB,CAAC;AACxC,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAM,SAAS,QAAQ,CAAC,KAAK,QAAQ,CAAC,KAAK,KAAK,CAAC;AACjD,mBAAW,IAAI,KAAK,CAAC,GAAG,GAAG,EAAE,eAAe,IAAI,MAAM,EAAE;AAAA,MAC1D;AAAA,IACF;AAGA,UAAM,kBAAkB,EAAE,QAAQ;AAAA,MAAI,CAAC,QACrC,OAAO;AAAA,QACL,MAAM,IAAI;AAAA,QACV,MAAM,IAAI;AAAA,QACV,IAAI,UAAU,IAAI,IAAI,IAAI,KAAK;AAAA,QAC/B,IAAI,WAAW,IAAI,IAAI,IAAI;AAAA,QAC3B,QAAQ,cAAc,IAAI,IAAI,IAAI,KAAK;AAAA,QACvC,SAAS,eAAe,IAAI,IAAI,IAAI,KAAK;AAAA,QACzC,SAAS,gBAAgB,IAAI,IAAI,IAAI;AAAA,QACrC,SAAS,IAAI,aAAa;AAAA,QAC1B,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,MACb,CAAC;AAAA,IACH;AAGA,UAAM,kBAAkB,EAAE,WAAW,CAAC,GAAG;AAAA,MAAI,CAAC,QAC5C,MAAM;AAAA,QACJ,MAAM,IAAI;AAAA,QACV,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,QACZ,MAAM,IAAI;AAAA,MACZ,CAAC;AAAA,IACH;AAGA,UAAM,uBAAuB,EAAE,eAAe,CAAC,GAC5C;AAAA,MACC,CAAC,MACC,EAAE,SAAS,WACV,EAAE,SAAS,aAAa,EAAE,SAAS,UAAU,KAAK;AAAA,IACvD,EACC;AAAA,MAAI,CAAC,MACJ,WAAW;AAAA,QACT,MAAM,EAAE;AAAA,QACR,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,QACX,YAAY,EAAE;AAAA,MAChB,CAAC;AAAA,IACH;AAEF,WAAO,MAAM;AAAA,MACX,MAAM,EAAE;AAAA,MACR,QAAQ,EAAE;AAAA,MACV,UAAU,EAAE;AAAA,MACZ,UAAU,EAAE;AAAA,MACZ,SAAS;AAAA,MACT,SAAS,eAAe,SAAS,IAAI,iBAAiB;AAAA,MACtD,aACE,oBAAoB,SAAS,IAAI,sBAAsB;AAAA,IAC3D,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB,GAA0B;AACxC,UAAM,kBAAkB,EAAE,QAAQ;AAAA,MAAI,CAAC,QACrC,OAAO;AAAA,QACL,MAAM,IAAI;AAAA,QACV,MAAM,IAAI;AAAA,QACV,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,MACb,CAAC;AAAA,IACH;AAEA,WAAO,KAAK;AAAA,MACV,MAAM,EAAE;AAAA,MACR,QAAQ,EAAE;AAAA,MACV,SAAS;AAAA,MACT,YAAY,EAAE;AAAA,IAChB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,wBACE,KACA,aACA,aACiB;AACjB,UAAM,cAAc,aAAa;AACjC,UAAM,cAAc,aAAa;AAEjC,QAAI;AAOJ,QAAI,eAAe,QAAQ,eAAe,QAAQ,cAAc,GAAG;AACjE,YAAM,QAAQ,cAAc;AAC5B,UAAI,QAAQ,GAAG;AACb,sBAAc;AAAA,MAChB,WAAW,QAAQ,OAAO,QAAQ,KAAK;AACrC,sBAAc;AAAA,MAChB,WAAW,QAAQ,KAAK;AACtB,sBAAc;AAAA,MAChB;AAAA,IACF;AAEA,WAAO,aAAa;AAAA,MAClB,MAAM,EAAE,OAAO,IAAI,OAAO,SAAS,IAAI,KAAK;AAAA,MAC5C,IAAI,EAAE,OAAO,IAAI,kBAAkB,SAAS,IAAI,GAAG;AAAA,MACnD;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EA+BA,SAAS,OAAoC;AAC3C,QAAI,OAAO,UAAU,YAAY,OAAO,SAAS,KAAK,GAAG;AACvD,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO,OAAO,KAAK;AAAA,IACrB;AACA,QAAI,OAAO,UAAU,YAAY,MAAM,KAAK,MAAM,IAAI;AACpD,YAAM,SAAS,OAAO,KAAK;AAC3B,aAAO,OAAO,SAAS,MAAM,IAAI,SAAS;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,MAAiD;AAC9D,QAAI,KAAK,SAAS,GAAG,GAAG;AACtB,YAAM,CAAC,QAAQ,GAAG,IAAI,IAAI,KAAK,MAAM,GAAG;AACxC,aAAO,EAAE,QAAQ,OAAO,KAAK,KAAK,GAAG,EAAE;AAAA,IACzC;AACA,WAAO,EAAE,QAAQ,KAAK,iBAAiB,IAAI,OAAO,KAAK;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,aAAa,OAAuB;AAClC,WAAO,MAAM,QAAQ,MAAM,IAAI;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,kBAAkB,YAAoB,gBAAmC;AACvE,QAAI,kBAAkB,eAAe,SAAS,GAAG;AAC/C,YAAM,SAAS,eACZ,IAAI,CAAC,MAAM,IAAI,KAAK,aAAa,CAAC,CAAC,GAAG,EACtC,KAAK,IAAI;AACZ,aAAO,OAAO,UAAU,QAAQ,MAAM;AAAA,IACxC;AACA,QAAI,KAAK,cAAc,SAAS,GAAG;AACjC,YAAM,SAAS,KAAK,cACjB,IAAI,CAAC,MAAM,IAAI,KAAK,aAAa,CAAC,CAAC,GAAG,EACtC,KAAK,IAAI;AACZ,aAAO,OAAO,UAAU,YAAY,MAAM;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AACF;;;AChaO,IAAe,oBAAf,MAAiC;AAAA;AAAA;AAAA;AAAA,EAItC;AAAA,EAEA,YAAY,MAAc;AACxB,SAAK,OAAO;AAAA,EACd;AAUF;;;ACzBO,IAAe,uBAAf,cAA4C,kBAAkB;AAAA,EACnE,YAAY,SAAqC,CAAC,GAAG;AACnD,UAAM,aAAa;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,QAAQ,KAAsC;AAElD,UAAM,gBAAmC,CAAC,GAAG,IAAI,QAAQ,GAAG,IAAI,KAAK;AACrE,eAAW,aAAa,eAAe;AACrC,iBAAWC,WAAU,UAAU,SAAS;AAEtC,YAAI;AACF,gBAAM,QAAQ,MAAM,KAAK,aAAa,UAAU,MAAMA,OAAM;AAC5D,cAAI,OAAO;AACT,YAAAA,QAAO,QAAQ;AAAA,UACjB;AAAA,QACF,SAAS,OAAO;AAEd,kBAAQ;AAAA,YACN;AAAA,YACA,UAAU;AAAA,YACVA,QAAO;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AC1CO,IAAe,gBAAf,cAAqC,kBAAkB;AAAA,EAC5D,YAAY,SAA8B,CAAC,GAAG;AAC5C,UAAM,aAAa;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,QAAQ,KAAsC;AAClD,QAAI,OAAO,MAAM,KAAK,YAAY;AAAA,EACpC;AACF;;;ACFO,IAAe,wBAAf,cAA6C,kBAAkB;AAAA,EAC1D;AAAA,EAEV,YAAY,SAAsC,CAAC,GAAG;AACpD,UAAM,cAAc;AACpB,SAAK,sBAAsB,OAAO,uBAAuB;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAgB,kBACd,YACA,SAC+B;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBU,qBACRC,aACA,YACsB;AACtB,QAAIA,YAAW,SAAS,WAAW,CAACA,YAAW,YAAY;AACzD,aAAO;AAAA,IACT;AAGA,QAAIA,YAAW,WAAW,CAACA,YAAW,QAAQ,SAAS,UAAU,GAAG;AAClE,aAAO;AAAA,IACT;AAEA,UAAM,MAAMA,YAAW;AACvB,UAAM,aAAa,KAAK,YAAY,UAAU;AAI9C,UAAM,aAAa,cAAc,UAAU;AAG3C,UAAM,UAAU,IAAI;AAAA,MAClB,IAAI,OAAO,GAAG,UAAU,2BAA2B,GAAG;AAAA,IACxD;AACA,QAAI,SAAS;AACX,aAAO,KAAK,oBAAoB,QAAQ,CAAC,CAAC;AAAA,IAC5C;AAGA,UAAM,WAAW,IAAI;AAAA,MACnB,IAAI;AAAA,QACF,GAAG,UAAU;AAAA,QACb;AAAA,MACF;AAAA,IACF;AACA,QAAI,UAAU;AACZ,aAAO,KAAK,oBAAoB,SAAS,CAAC,CAAC;AAAA,IAC7C;AAGA,UAAM,YAAY,IAAI;AAAA,MACpB,MAAM,KAAK,YAAY,UAAU,CAAC;AAAA,MAClC;AAAA,IACF;AACA,UAAM,YAAY,CAAC,GAAG,IAAI,SAAS,SAAS,CAAC;AAC7C,QAAI,UAAU,UAAU,GAAG;AACzB,aAAO,UAAU,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;AAAA,IAClC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,OAAqC;AAC/D,UAAM,SAAmB,CAAC;AAE1B,UAAM,UAAU,MAAM,SAAS,YAAY;AAC3C,eAAW,SAAS,SAAS;AAC3B,aAAO,KAAK,MAAM,CAAC,CAAC;AAAA,IACtB;AACA,WAAO,OAAO,SAAS,IAAI,SAAS;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,KAAqB;AACvC,WAAO,IAAI,QAAQ,uBAAuB,MAAM;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,KAAuB,MAAiC;AACvE,WAAO,IAAI,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,KAAsC;AAElD,UAAM,gBAAmC,CAAC,GAAG,IAAI,QAAQ,GAAG,IAAI,KAAK;AAErE,eAAW,aAAa,eAAe;AACrC,YAAMC,SAAQ,KAAK,SAAS,KAAK,UAAU,IAAI;AAE/C,iBAAWC,WAAU,UAAU,SAAS;AACtC,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB,UAAU;AAAA,YACVA;AAAA,YACAD,QAAO;AAAA,UACT;AACA,cAAI,QAAQ;AACV,YAAAC,QAAO,OAAO,OAAO;AACrB,YAAAA,QAAO,SAAS,OAAO;AAAA,UACzB;AAAA,QACF,SAAS,OAAO;AACd,kBAAQ;AAAA,YACN;AAAA,YACA,UAAU;AAAA,YACVA,QAAO;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBACZ,WACAA,SACA,aACyC;AAEzC,UAAM,aAAa,MAAM,KAAK,kBAAkB,WAAWA,OAAM;AACjE,QAAI,YAAY,QAAQ;AACtB,aAAO,EAAE,MAAM,QAAQ,QAAQ,WAAW;AAAA,IAC5C;AAGA,QAAI,aAAa;AACf,iBAAWF,eAAc,aAAa;AACpC,cAAM,cAAc,KAAK,qBAAqBA,aAAYE,QAAO,IAAI;AACrE,YAAI,aAAa,QAAQ;AACvB,iBAAO,EAAE,MAAM,QAAQ,QAAQ,YAAY;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,MAAM,KAAK,sBAAsB,WAAWA,OAAM;AACxE,QAAI,eAAe,QAAQ;AACzB,aAAO,EAAE,MAAM,kBAAkB,QAAQ,cAAc;AAAA,IACzD;AAEA,WAAO;AAAA,EACT;AACF;;;ACvNA,SAAS,YAAY;AACrB,SAAS,YAAY;AACrB,OAAO,YAAY;AACnB,OAAO,OAAO;AAEd;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AA0BP,IAAM,cAAc,MAAqC;AAAA,EACvD,MAAM;AAAA,EACN,OAAO,KAAK,oBAAoB;AAAA,EAChC,QAAQ,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBd,OAAO;AAAA,IACL,gBAAgB,KAAK;AAAA,MACnB,aACE;AAAA,MACF,aAAa,EAAE,OAAO;AAAA,QACpB,KAAK,EAAE,OAAO,EAAE,SAAS,6BAA6B;AAAA,QACtD,SAAS,EACN,OAAO,EACP,SAAS,uDAAuD;AAAA,MACrE,CAAC;AAAA,MACD,SAAS,CAAC,EAAE,IAAI,GAAG,YAAY;AAC7B,cAAM,QAAQ,QAA8B,OAAO;AACnD,eAAO,MAAM,QAAQ,QAAQ,GAAG;AAAA,MAClC;AAAA,IACF,CAAC;AAAA,EACH;AACF,CAAC;;;AC5DM,IAAe,oBAAf,cAAyC,kBAAkB;AAAA,EAChE,YAAY,SAAkC,CAAC,GAAG;AAChD,UAAM,UAAU;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,QAAQ,KAAsC;AAClD,eAAWC,UAAS,IAAI,QAAQ;AAC9B,YAAM,QAAQ,MAAM,KAAK,YAAYA,OAAM,IAAI;AAC/C,UAAI,SAAS,MAAM;AACjB,QAAAA,OAAM,WAAW;AACjB,QAAAA,OAAM,WAAW,KAAK,kBAAkB,KAAK;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,OAAkC;AAClD,QAAI,QAAQ,IAAK,QAAO;AACxB,QAAI,QAAQ,IAAM,QAAO;AACzB,QAAI,QAAQ,IAAO,QAAO;AAC1B,QAAI,QAAQ,IAAQ,QAAO;AAC3B,WAAO;AAAA,EACT;AACF;;;ACpBO,IAAe,iBAAf,cAAsC,kBAAkB;AAAA,EAC7D;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,SAA+B,CAAC,GAAG;AAC7C,UAAM,OAAO;AACb,SAAK,UAAU,OAAO;AACtB,SAAK,WAAW,OAAO;AACvB,SAAK,YAAY,OAAO;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,MAAM,QAAQ,KAAsC;AAClD,UAAM,aAAa,MAAM,KAAK,YAAY;AAC1C,UAAM,UAAU,KAAK;AACrB,UAAM,WAAW,KAAK;AAGtB,QAAI,CAAC,WAAW,CAAC,UAAU;AACzB,YAAMC,UAAS,MAAM,QAAQ;AAAA,QAC3B,WAAW,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,CAAC;AAAA,MAC9C;AACA,UAAI,OAAO,KAAK,GAAGA,OAAM;AACzB;AAAA,IACF;AAEA,UAAMA,UAAgC,CAAC;AACvC,UAAM,mBAAmC,CAAC;AAC1C,UAAM,oBAAoB,oBAAI,IAAY;AAG1C,UAAM,eAAuD,CAAC;AAC9D,UAAM,gBAAwD,CAAC;AAC/D,UAAM,iBAAiB,oBAAI,IAAY;AACvC,UAAM,kBAAkB,oBAAI,IAAY;AAGxC,eAAW,QAAQ,YAAY;AAC7B,UAAI,QAAS,cAAa,KAAK,EAAE,MAAM,OAAO,EAAE,CAAC;AACjD,UAAI,SAAU,eAAc,KAAK,EAAE,MAAM,OAAO,EAAE,CAAC;AAAA,IACrD;AAGA,UAAM,eAAe,YAAY,OAAO,WAAW,WAAW;AAC9D,WAAO,aAAa,SAAS,GAAG;AAC9B,YAAM,OAAO,aAAa,MAAM;AAChC,UAAI,CAAC,KAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI;AAExB,UAAI,eAAe,IAAI,IAAI,EAAG;AAC9B,qBAAe,IAAI,IAAI;AAEvB,UAAI,CAACA,QAAO,IAAI,GAAG;AACjB,QAAAA,QAAO,IAAI,IAAI,MAAM,KAAK,SAAS,IAAI;AAAA,MACzC;AAEA,UAAI,QAAQ,cAAc;AACxB,cAAM,OAAO,MAAM,KAAK,sBAAsB,IAAI;AAClD,mBAAW,OAAO,MAAM;AACtB,eAAK,gBAAgB,KAAK,kBAAkB,iBAAiB;AAC7D,cAAI,CAAC,eAAe,IAAI,IAAI,gBAAgB,GAAG;AAC7C,yBAAa,KAAK,EAAE,MAAM,IAAI,kBAAkB,OAAO,QAAQ,EAAE,CAAC;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,aAAa,OAAO,WAAW,YAAY;AACjE,WAAO,cAAc,SAAS,GAAG;AAC/B,YAAM,OAAO,cAAc,MAAM;AACjC,UAAI,CAAC,KAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI;AAExB,UAAI,gBAAgB,IAAI,IAAI,EAAG;AAC/B,sBAAgB,IAAI,IAAI;AAExB,UAAI,CAACA,QAAO,IAAI,GAAG;AACjB,QAAAA,QAAO,IAAI,IAAI,MAAM,KAAK,SAAS,IAAI;AAAA,MACzC;AAEA,UAAI,QAAQ,eAAe;AACzB,cAAM,OAAO,MAAM,KAAK,sBAAsB,IAAI;AAClD,mBAAW,OAAO,MAAM;AACtB,eAAK,gBAAgB,KAAK,kBAAkB,iBAAiB;AAC7D,cAAI,CAAC,gBAAgB,IAAI,IAAI,KAAK,GAAG;AACnC,0BAAc,KAAK,EAAE,MAAM,IAAI,OAAO,OAAO,QAAQ,EAAE,CAAC;AAAA,UAC1D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,OAAO,OAAOA,OAAM;AACvC,QAAI,OAAO,KAAK,GAAG,UAAU;AAC7B,QAAI,cAAc,KAAK,GAAG,gBAAgB;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAgB,cAAiC;AAC/C,UAAM,SAAS,KAAK;AACpB,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,aAAO;AAAA,IACT;AACA,UAAM,QAAQ,MAAM,KAAK,iBAAiB;AAC1C,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AACA,QAAI,kBAAkB,QAAQ;AAC5B,aAAO,MAAM,OAAO,CAAC,SAAS,OAAO,KAAK,IAAI,CAAC;AAAA,IACjD;AACA,WAAO,MAAM,OAAO,MAAM;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKU,gBACR,KACA,KACA,MACM;AACN,UAAM,MAAM,GAAG,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,GAAG,CAAC,IAAI,IAAI,gBAAgB,IAAI,IAAI,GAAG,KAAK,GAAG,CAAC;AAC1F,QAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,WAAK,IAAI,GAAG;AACZ,UAAI,KAAK,GAAG;AAAA,IACd;AAAA,EACF;AACF;;;ACjLO,IAAM,6BAAN,cAAyC,qBAAqB;AAAA,EACnE;AAAA,EAEA,YAAY,SAAkB,SAAqC,CAAC,GAAG;AACrE,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,aACvB,WACAC,SACkC;AAClC,QAAI,CAAC,KAAK,oBAAoBA,QAAO,IAAI,GAAG;AAC1C,aAAO;AAAA,IACT;AAEA,UAAM,kBAAkB,KAAK,SAAS,gBAAgB,SAAS;AAC/D,UAAM,mBAAmB,KAAK,SAAS,gBAAgBA,QAAO,IAAI;AAElE,UAAM,MAAM;AAAA;AAAA,cAEF,gBAAgB;AAAA,cAChB,gBAAgB;AAAA,wBACN,gBAAgB;AAAA,aAC3B,eAAe;AAAA;AAGxB,UAAM,OAAO,MAAM,KAAK,SAAS,SAI9B,GAAG;AAEN,QAAI,CAAC,KAAK,QAAQ;AAChB,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,KAAK,gBAAgB,KAAK,CAAC,GAAG,SAAS;AACnD,UAAM,MAAM,KAAK,gBAAgB,KAAK,CAAC,GAAG,SAAS;AACnD,UAAM,eAAe,KAAK,SAAS,SAAS,KAAK,CAAC,GAAG,aAAa;AAElE,QAAI,OAAO,QAAQ,OAAO,QAAQ,gBAAgB,MAAM;AACtD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO;AAAA,MACZ,cACE,gBAAgB,QAAQ,OAAO,SAAS,YAAY,IAChD,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,YAAY,CAAC,IACrC;AAAA,IACR;AAAA,EACF;AAAA,EAEA,oBAAoB,MAAmC;AACrD,QAAI,CAAC,MAAM;AACT,aAAO;AAAA,IACT;AACA,UAAM,aAAa,KAAK,YAAY;AACpC,WAAO,uDAAuD;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AAAA,EAEA,gBAAgB,OAA+B;AAC7C,QAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAU;AAC1D,aAAO,OAAO,KAAK;AAAA,IACrB;AACA,QAAI,OAAO,UAAU,WAAW;AAC9B,aAAO,QAAQ,SAAS;AAAA,IAC1B;AACA,QAAI,iBAAiB,MAAM;AACzB,aAAO,MAAM,YAAY;AAAA,IAC3B;AACA,QAAI,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK,GAAG;AAC3D,aAAO,MAAM,SAAS,OAAO;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AACF;;;ACvFO,IAAM,sBAAN,cAAkC,cAAc;AAAA,EACrD;AAAA,EAEA,YAAY,SAAkB,SAA8B,CAAC,GAAG;AAC9D,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,cAAoC;AAC3D,UAAM,OAAO,MAAM,KAAK,SAAS;AAAA,MAC/B;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS,KAAK,CAAC,GAAG;AAAA,MAClB,SAAS;AAAA,QACP,sBAAsB;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AACF;;;ACdO,IAAM,8BAAN,cAA0C,sBAAsB;AAAA,EACrE;AAAA,EAEA,YAAY,SAAkB,SAAsC,CAAC,GAAG;AACtE,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,sBACvB,WACAC,SAC+B;AAC/B,UAAM,kBAAkB,KAAK,SAAS,gBAAgB,SAAS;AAC/D,UAAM,mBAAmB,KAAK,SAAS,gBAAgBA,QAAO,IAAI;AAClE,UAAM,QAAQ,KAAK,sBAAsB;AAEzC,UAAM,MAAM;AAAA,wBACQ,gBAAgB;AAAA,aAC3B,eAAe;AAAA,cACd,gBAAgB;AAAA,cAChB,KAAK;AAAA;AAGf,UAAM,OAAO,MAAM,KAAK,SAAS,SAA6B,GAAG;AAEjE,QAAI,CAAC,KAAK,UAAU,KAAK,SAAS,KAAK,qBAAqB;AAC1D,aAAO;AAAA,IACT;AAEA,UAAM,SAAmB,CAAC;AAC1B,eAAW,OAAO,MAAM;AACtB,YAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK;AAChD,UAAI,aAAa,MAAM;AACrB,eAAO;AAAA,MACT;AACA,aAAO,KAAK,SAAS;AAAA,IACvB;AAEA,WAAO,OAAO,SAAS,SAAS;AAAA,EAClC;AAAA,EAEA,gBAAgB,OAA+B;AAC7C,QAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAU;AAC1D,aAAO,OAAO,KAAK;AAAA,IACrB;AACA,QAAI,OAAO,UAAU,WAAW;AAC9B,aAAO,QAAQ,SAAS;AAAA,IAC1B;AACA,QAAI,iBAAiB,MAAM;AACzB,aAAO,MAAM,YAAY;AAAA,IAC3B;AACA,QAAI,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK,GAAG;AAC3D,aAAO,MAAM,SAAS,OAAO;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AACF;;;ACrEO,IAAM,0BAAN,cAAsC,kBAAkB;AAAA,EAC7D;AAAA,EAEA,YAAY,SAAkB,SAAkC,CAAC,GAAG;AAClE,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,YAAY,WAAgD;AACnF,UAAM,OAAO,MAAM,KAAK,SAAS;AAAA,MAC/B,iCAAiC,KAAK,SAAS,gBAAgB,SAAS,CAAC;AAAA,IAC3E;AAEA,WAAO,KAAK,SAAS,SAAS,KAAK,CAAC,GAAG,KAAK;AAAA,EAC9C;AACF;;;ACjBA,IAAM,gBAID;AAAA,EACH;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AACF;AAmCO,SAAS,YAAY,KAAa,OAAgB;AACvD,QAAM,eACJ,iBAAiB,QACb,MAAM,UACN,OAAO,UAAU,WACf,QACA;AACR,QAAM,YAAY,cAAc,KAAK,CAAC,OAAO,GAAG,QAAQ,KAAK,YAAY,CAAC;AAE1E,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,eAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,YAAY,UAAU;AAAA,IACtB,YAAY,UAAU;AAAA,IACtB,eAAe;AAAA,EACjB;AACF;AAEO,IAAM,SAAN,cAAqB,QAAQ;AAAA,EAClC;AAAA,EACkB;AAAA,EACA,gBAAgB;AAAA,EAChB,gBAA0B,CAAC;AAAA,EAE7C,YAAY,SAA+B;AACzC,UAAM;AACN,QAAI,CAAC,WAAW,OAAO,QAAQ,YAAY,YAAY;AACrD,YAAM,IAAI,MAAM,8CAA8C;AAAA,IAChE;AACA,SAAK,WAAW;AAChB,SAAK,YAAY,QAAQ;AAAA,EAC3B;AAAA,EAEA,MAAe,QAAQ,KAAa;AAClC,WAAO,KAAK,SAAS,QAAQ,GAAG;AAAA,EAClC;AAAA,EAEA,MAAe,SAAS,KAAa;AACnC,UAAM,YACJ,KAAK,SAAS,aACb,OAAO,SAAiB;AACvB,YAAM,KAAK,SAAS,QAAQ,WAAW,IAAI,EAAE;AAAA,IAC/C;AAEF,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,SAAS,OAAO;AACd,aAAO,KAAK,UAAU,YAAY,KAAK,KAAK,CAAC;AAAA,IAC/C;AAAA,EACF;AAAA,EAEA,iBAAiB,MAAc;AAC7B,WAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AAAA,EACrC;AAAA,EAEA,MAAe,SAAc,KAA6B;AACxD,UAAM,SAAS,MAAM,KAAK,SAAS,QAAQ,GAAG;AAE9C,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,aAAO;AAAA,IACT;AAEA,QACE,UACA,OAAO,WAAW,YAClB,UAAU,UACV,MAAM,QAAS,OAA8B,IAAI,GACjD;AACA,aAAQ,OAA2B;AAAA,IACrC;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAES,gBAAgB,MAAsB;AAC7C,WAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AAAA,EACrC;AAAA,EAES,OAAO,OAAuB;AACrC,WAAO,MAAM,QAAQ,MAAM,IAAI;AAAA,EACjC;AAAA,EAES,qBACP,WACA,SACA,OACQ;AACR,UAAM,aAAa,SAAS,SACxB,QAAQ,IAAI,CAAC,MAAM,KAAK,gBAAgB,CAAC,CAAC,EAAE,KAAK,IAAI,IACrD;AACJ,WAAO,UAAU,UAAU,SAAS,KAAK,gBAAgB,SAAS,CAAC,UAAU,KAAK;AAAA,EACpF;AACF;;;AC1JO,IAAM,uBAAN,cAAmC,eAAe;AAAA,EACvD;AAAA,EACA,qBAA4C;AAAA,EAE5C,YAAY,SAAkB,SAA+B,CAAC,GAAG;AAC/D,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,mBAAsC;AAC7D,UAAM,OAAO,MAAM,KAAK,SAAS,SAE9B,iEAAiE;AAEpE,WAAO,KACJ,IAAI,CAAC,QAAQ,IAAI,IAAI,EACrB;AAAA,MACC,CAAC,SACC,OAAO,SAAS,YAAY,CAAC,KAAK,WAAW,SAAS;AAAA,IAC1D;AAAA,EACJ;AAAA,EAEA,MAAyB,SAAS,WAAmC;AACnE,UAAM,UAAU,MAAM,KAAK,SAAS;AAAA,MAClC,qBAAqB,KAAK,iBAAiB,SAAS,CAAC;AAAA,IACvD;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,QAAQ,IAAI,CAAC,SAAS;AAAA,QAC7B,MAAM,IAAI,QAAQ;AAAA,QAClB,MAAM,IAAI,QAAQ;AAAA,MACpB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAyB,sBACvB,WACyB;AACzB,UAAM,OAAO,MAAM,KAAK,SAAS;AAAA,MAC/B,2BAA2B,KAAK,iBAAiB,SAAS,CAAC;AAAA,IAC7D;AAEA,UAAM,SAAS,oBAAI,IAA0B;AAE7C,eAAW,OAAO,MAAM;AACtB,UACE,IAAI,MAAM,QACV,IAAI,SAAS,QACb,IAAI,QAAQ,QACZ,IAAI,MAAM,MACV;AACA;AAAA,MACF;AAEA,YAAM,KAAK,OAAO,IAAI,EAAE;AACxB,YAAM,WAAW,OAAO,IAAI,EAAE;AAC9B,UAAI,CAAC,UAAU;AACb,eAAO,IAAI,IAAI;AAAA,UACb,OAAO;AAAA,UACP,MAAM,CAAC,OAAO,IAAI,IAAI,CAAC;AAAA,UACvB,kBAAkB,OAAO,IAAI,KAAK;AAAA,UAClC,IAAI,CAAC,OAAO,IAAI,EAAE,CAAC;AAAA,QACrB,CAAC;AAAA,MACH,OAAO;AACL,iBAAS,KAAK,KAAK,OAAO,IAAI,IAAI,CAAC;AACnC,iBAAS,GAAG,KAAK,OAAO,IAAI,EAAE,CAAC;AAAA,MACjC;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,OAAO,OAAO,CAAC;AAAA,EACnC;AAAA,EAEA,MAAyB,sBACvB,WACyB;AAGzB,QAAI,CAAC,KAAK,oBAAoB;AAC5B,WAAK,qBAAqB,MAAM,KAAK,sBAAsB;AAAA,IAC7D;AACA,WAAO,KAAK,mBAAmB;AAAA,MAC7B,CAAC,MAAM,EAAE,qBAAqB;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,MAAM,wBAAiD;AACrD,UAAM,WAAW,MAAM,KAAK,iBAAiB;AAC7C,UAAM,UAA0B,CAAC;AACjC,eAAW,QAAQ,UAAU;AAC3B,cAAQ,KAAK,GAAI,MAAM,KAAK,sBAAsB,IAAI,CAAE;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAAA,EAEA,iBAAiB,MAAc;AAC7B,WAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AAAA,EACrC;AACF;;;ACnGO,SAAS,OAAO,SAA+B,CAAC,GAAG;AACxD,SAAO,CAAC,YAAqB,IAAI,qBAAqB,SAAS,MAAM;AACvE;AAEO,SAAS,KAAK,SAA8B,CAAC,GAAG;AACrD,SAAO,CAAC,YAAqB,IAAI,oBAAoB,SAAS,MAAM;AACtE;AAQO,SAAS,YAAY,SAAqC,CAAC,GAAG;AACnE,SAAO,CAAC,YAAqB;AAC3B,WAAO,IAAI,2BAA2B,SAAS,MAAM;AAAA,EACvD;AACF;AAEO,SAAS,aAAa,SAAsC,CAAC,GAAG;AACrE,SAAO,CAAC,YAAqB;AAC3B,WAAO,IAAI,4BAA4B,SAAS,MAAM;AAAA,EACxD;AACF;AAQO,SAAS,SAAS,SAAkC,CAAC,GAAG;AAC7D,SAAO,CAAC,YAAqB;AAC3B,WAAO,IAAI,wBAAwB,SAAS,MAAM;AAAA,EACpD;AACF;;;AC7DA,SAAS,oBAAoB;;;ACA7B,YAAY,UAAU;AACtB,OAAO,UAAU;AAkCV,SAAS,UAAU,UAAiC;AACzD,QAAM,MAAW,aAAQ,QAAQ,EAAE,YAAY;AAE/C,MAAI;AACJ,MAAI;AACF,eAAW,KAAK,SAAS,UAAU;AAAA,MACjC,WAAW;AAAA;AAAA,IACb,CAAC;AAAA,EACH,SAAS,OAAO;AACd,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,UAAM,IAAI,MAAM,+BAA+B,QAAQ,MAAM,OAAO,EAAE;AAAA,EACxE;AAEA,QAAM,SAAwB,CAAC;AAE/B,aAAW,aAAa,SAAS,YAAY;AAC3C,UAAM,QAAQ,SAAS,OAAO,SAAS;AACvC,UAAM,OAAO,KAAK,MAAM,cAAuC,KAAK;AAGpE,QAAI,KAAK,WAAW,GAAG;AACrB;AAAA,IACF;AAGA,UAAM,YACJ,QAAQ,UAAU,QAAQ,SACtB,qBAAqB,QAAQ,IAC7B,kBAAkB,SAAS;AAEjC,UAAM,UAAU,aAAa,IAAI;AAGjC,QAAI,QAAQ,WAAW,GAAG;AACxB;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,WAAW,GAAG;AACvB,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,qBAAqB,UAA0B;AACtD,QAAMC,YAAgB,cAAS,UAAe,aAAQ,QAAQ,CAAC;AAC/D,SAAO,kBAAkBA,SAAQ;AACnC;AAUO,SAAS,mBAAmB,MAAsB;AAEvD,MAAI,YAAY,KAAK,YAAY;AAGjC,cAAY,UAAU,QAAQ,eAAe,GAAG;AAGhD,cAAY,UAAU,QAAQ,OAAO,GAAG;AAGxC,cAAY,UAAU,QAAQ,YAAY,EAAE;AAG5C,MAAI,SAAS,KAAK,SAAS,GAAG;AAC5B,gBAAY,MAAM;AAAA,EACpB;AAGA,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAGA,SAAO,UAAU,MAAM,GAAG,EAAE;AAC9B;AAGO,IAAM,oBAAoB;AAMjC,SAAS,aAAa,MAA2C;AAC/D,MAAI,KAAK,WAAW,GAAG;AACrB,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,SAAS,oBAAI,IAAY;AAC/B,aAAW,OAAO,MAAM;AACtB,eAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,aAAO,IAAI,GAAG;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,OAAO,SAAS,GAAG;AACrB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,WAAW,MAAM,KAAK,MAAM;AAClC,QAAM,eAAe,uBAAuB,QAAQ;AAEpD,SAAO,aAAa,IAAI,CAAC,MAAM,QAAQ;AACrC,UAAM,cAAc,SAAS,GAAG;AAChC,UAAM,SAAS,KAAK,IAAI,CAAC,QAAQ,IAAI,WAAW,CAAC;AACjD,UAAM,OAAO,gBAAgB,MAAM;AACnC,WAAO,EAAE,MAAM,aAAa,KAAK;AAAA,EACnC,CAAC;AACH;AAKA,SAAS,uBAAuB,OAA2B;AACzD,QAAM,OAAO,oBAAI,IAAoB;AACrC,QAAM,SAAmB,CAAC;AAE1B,aAAW,WAAW,OAAO;AAE3B,QAAI,OAAO,kBAAkB,OAAO;AAGpC,QAAI,CAAC,MAAM;AACT,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ,KAAK,IAAI,IAAI,KAAK;AAChC,QAAI,QAAQ,GAAG;AACb,aAAO,KAAK,GAAG,IAAI,IAAI,QAAQ,CAAC,EAAE;AAAA,IACpC,OAAO;AACL,aAAO,KAAK,IAAI;AAAA,IAClB;AACA,SAAK,IAAI,MAAM,QAAQ,CAAC;AAAA,EAC1B;AAEA,SAAO;AACT;AAMA,SAAS,gBAAgB,QAA+B;AACtD,MAAI,aAAa;AACjB,MAAI,UAAU;AAEd,aAAW,SAAS,QAAQ;AAE1B,QAAI,SAAS,QAAQ,UAAU,IAAI;AACjC;AAAA,IACF;AAGA,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,UAAI,OAAO,UAAU,KAAK,GAAG;AAC3B,qBAAa;AAAA,MACf,OAAO;AACL,kBAAU;AAAA,MACZ;AAAA,IACF,WAAW,OAAO,UAAU,WAAW;AAErC,mBAAa;AAAA,IACf,OAAO;AAEL,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,SAAS;AACX,WAAO;AAAA,EACT;AAGA,MAAI,YAAY;AACd,WAAO;AAAA,EACT;AAGA,SAAO;AACT;;;ADjMO,IAAM,cAAN,cAA0B,OAAO;AAAA,EACtC;AAAA,EAEA,YAAY,SAA6B;AAEvC,UAAM,SAAS,UAAU,QAAQ,IAAI;AAGrC,UAAM,SAAS,QAAQ,YAAY;AACnC,UAAM,KAAK,IAAI,aAAa,MAAM;AAGlC,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,eAAe,KAAK;AACtC,SAAG,KAAK,SAAS;AACjB,eAAS,IAAI,KAAK;AAAA,IACpB;AAGA,UAAM;AAAA,MACJ,SAAS,CAAC,QAAgB,GAAG,QAAQ,GAAG,EAAE,IAAI;AAAA,MAC9C,WAAW,QAAQ;AAAA,IACrB,CAAC;AAED,SAAK,MAAM;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AACZ,SAAK,IAAI,MAAM;AAAA,EACjB;AACF;AAKA,SAAS,eAAe,OAA4B;AAClD,MAAI,MAAM,QAAQ,WAAW,GAAG;AAC9B,UAAM,IAAI,MAAM,wBAAwB,MAAM,IAAI,oBAAoB;AAAA,EACxE;AAEA,QAAM,UAAU,MAAM,QACnB,IAAI,CAAC,QAAQ,IAAI,iBAAiB,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE,EAC1D,KAAK,IAAI;AAEZ,SAAO,iBAAiB,iBAAiB,MAAM,IAAI,CAAC,MAAM,OAAO;AACnE;AAWA,SAAS,SAAS,IAAkB,OAA0B;AAC5D,MAAI,MAAM,KAAK,WAAW,GAAG;AAC3B;AAAA,EACF;AAEA,QAAM,UAAU,MAAM,QACnB,IAAI,CAAC,MAAM,IAAI,iBAAiB,EAAE,IAAI,CAAC,GAAG,EAC1C,KAAK,IAAI;AACZ,QAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAE3D,QAAM,YAAY,gBAAgB,iBAAiB,MAAM,IAAI,CAAC,MAAM,OAAO,aAAa,YAAY;AACpG,QAAM,OAAO,GAAG,QAAQ,SAAS;AAEjC,KAAG,KAAK,mBAAmB;AAE3B,MAAI;AACF,eAAW,OAAO,MAAM,MAAM;AAC5B,YAAM,SAAwB,MAAM,QAAQ,IAAI,CAAC,QAAQ;AAEvD,cAAM,WAAW,IAAI,IAAI,WAAW;AACpC,eAAO,aAAa,UAAU,IAAI,IAAI;AAAA,MACxC,CAAC;AACD,WAAK,IAAI,GAAG,MAAM;AAAA,IACpB;AACA,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,OAAO;AACd,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAMA,SAAS,aAAa,OAAgB,MAA+B;AAEnE,MAAI,SAAS,QAAQ,UAAU,IAAI;AACjC,WAAO;AAAA,EACT;AAGA,MAAI,iBAAiB,MAAM;AACzB,WAAO,MAAM,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACzC;AAEA,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AAEd,YAAM,MAAM,OAAO,KAAK;AACxB,UAAI,MAAM,GAAG,GAAG;AACd,eAAO;AAAA,MACT;AACA,aAAO,KAAK,MAAM,GAAG;AAAA,IACvB;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,MAAM,OAAO,KAAK;AACxB,UAAI,MAAM,GAAG,GAAG;AACd,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,IAEA,KAAK;AAAA,IACL,SAAS;AAEP,UAAI,OAAO,UAAU,WAAW;AAC9B,eAAO,QAAQ,SAAS;AAAA,MAC1B;AACA,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO,KAAK,UAAU,KAAK;AAAA,MAC7B;AACA,aAAO,OAAO,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAKA,SAAS,iBAAiB,MAAsB;AAC9C,SAAO,KAAK,QAAQ,MAAM,IAAI;AAChC;",
6
- "names": ["c", "column", "constraint", "table", "column", "table", "tables", "column", "column", "basename"]
3
+ "sources": ["../../../../src/lib/fragments/schema.ts", "../../../../src/lib/adapters/groundings/context.ts", "../../../../src/lib/adapters/adapter.ts", "../../../../src/lib/adapters/groundings/abstract.grounding.ts", "../../../../src/lib/adapters/groundings/column-stats.grounding.ts", "../../../../src/lib/adapters/groundings/info.grounding.ts", "../../../../src/lib/adapters/groundings/column-values.grounding.ts", "../../../../src/lib/adapters/groundings/report.grounding.ts", "../../../../../context/src/lib/estimate.ts", "../../../../../context/src/lib/fragments.ts", "../../../../../context/src/lib/renderers/abstract.renderer.ts", "../../../../../context/src/lib/store/store.ts", "../../../../../context/src/lib/engine.ts", "../../../../../context/src/lib/fragments/domain.ts", "../../../../../context/src/lib/fragments/user.ts", "../../../../../context/src/lib/guardrail.ts", "../../../../../context/src/lib/guardrails/error-recovery.guardrail.ts", "../../../../../context/src/lib/sandbox/binary-bridges.ts", "../../../../../context/src/lib/sandbox/docker-sandbox.ts", "../../../../../context/src/lib/sandbox/container-tool.ts", "../../../../../context/src/lib/skills/loader.ts", "../../../../../context/src/lib/skills/fragments.ts", "../../../../../context/src/lib/store/sqlite.store.ts", "../../../../../context/src/lib/store/memory.store.ts", "../../../../../context/src/lib/visualize.ts", "../../../../../context/src/lib/agent.ts", "../../../../../context/src/lib/render.ts", "../../../../src/lib/adapters/groundings/row-count.grounding.ts", "../../../../src/lib/adapters/groundings/table.grounding.ts", "../../../../src/lib/adapters/sqlite/column-stats.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/info.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/column-values.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/row-count.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/sqlite.ts", "../../../../src/lib/adapters/sqlite/table.sqlite.grounding.ts", "../../../../src/lib/adapters/sqlite/index.ts", "../../../../src/lib/adapters/spreadsheet/spreadsheet.ts", "../../../../src/lib/adapters/spreadsheet/parser.ts"],
4
+ "sourcesContent": ["import type { ContextFragment } from '@deepagents/context';\n\n/**\n * Schema fragment builders.\n *\n * These fragments represent database schema metadata that can be injected\n * into AI prompts. Use with renderers (XML, Markdown, TOML, TOON) to format.\n *\n * @example\n * ```ts\n * import { dialectInfo, table, column, relationship } from '@deepagents/text2sql';\n *\n * const schemaFragments = [\n * dialectInfo({ dialect: 'PostgreSQL', version: '14.5' }),\n * table({\n * name: 'users',\n * columns: [\n * column({ name: 'id', type: 'integer', pk: true }),\n * column({ name: 'email', type: 'varchar', unique: true }),\n * ],\n * }),\n * ];\n * ```\n */\n\n/**\n * Database dialect and version information.\n *\n * @param input.dialect - Database type (PostgreSQL, SQLite, SQL Server, etc.)\n * @param input.version - Database version string\n * @param input.database - Database name\n *\n * @example\n * dialectInfo({ dialect: 'PostgreSQL', version: '14.5', database: 'myapp' })\n */\nexport function dialectInfo(input: {\n dialect: string;\n version?: string;\n database?: string;\n}): ContextFragment {\n return {\n name: 'dialectInfo',\n data: {\n dialect: input.dialect,\n ...(input.version && { version: input.version }),\n ...(input.database && { database: input.database }),\n },\n };\n}\n\n/**\n * Database table with columns and optional metadata.\n *\n * @param input.name - Table name\n * @param input.schema - Schema name (e.g., 'public' for PostgreSQL)\n * @param input.rowCount - Approximate row count\n * @param input.sizeHint - Size category for query optimization hints\n * @param input.columns - Array of column() fragments\n * @param input.indexes - Array of index() fragments\n * @param input.constraints - Array of constraint() fragments\n *\n * @example\n * table({\n * name: 'users',\n * rowCount: 1500,\n * sizeHint: 'medium',\n * columns: [\n * column({ name: 'id', type: 'integer', pk: true }),\n * column({ name: 'email', type: 'varchar', unique: true, indexed: true }),\n * ],\n * indexes: [\n * index({ name: 'idx_email', columns: ['email'], unique: true }),\n * ],\n * })\n */\nexport function table(input: {\n name: string;\n schema?: string;\n rowCount?: number;\n sizeHint?: 'tiny' | 'small' | 'medium' | 'large' | 'huge';\n columns: ContextFragment[];\n indexes?: ContextFragment[];\n constraints?: ContextFragment[];\n}): ContextFragment {\n return {\n name: 'table',\n data: {\n name: input.name,\n ...(input.schema && { schema: input.schema }),\n ...(input.rowCount != null && { rowCount: input.rowCount }),\n ...(input.sizeHint && { sizeHint: input.sizeHint }),\n columns: input.columns,\n ...(input.indexes?.length && { indexes: input.indexes }),\n ...(input.constraints?.length && { constraints: input.constraints }),\n },\n };\n}\n\n/**\n * Table column with type and annotations.\n *\n * @param input.name - Column name\n * @param input.type - Column data type (e.g., 'integer', 'varchar(255)')\n * @param input.pk - Is primary key\n * @param input.fk - Foreign key reference in \"table.column\" format\n * @param input.unique - Has unique constraint\n * @param input.notNull - Has NOT NULL constraint\n * @param input.default - Default value expression\n * @param input.indexed - Has index on this column\n * @param input.values - Enum or low cardinality values\n * @param input.stats - Column statistics (min, max, null fraction)\n *\n * @example\n * column({\n * name: 'status',\n * type: 'varchar',\n * notNull: true,\n * indexed: true,\n * values: ['active', 'inactive', 'suspended'],\n * })\n */\nexport function column(input: {\n name: string;\n type: string;\n pk?: boolean;\n fk?: string;\n unique?: boolean;\n notNull?: boolean;\n default?: string;\n indexed?: boolean;\n values?: string[];\n stats?: {\n min?: string;\n max?: string;\n nullFraction?: number;\n };\n}): ContextFragment {\n return {\n name: 'column',\n data: {\n name: input.name,\n type: input.type,\n ...(input.pk && { pk: true }),\n ...(input.fk && { fk: input.fk }),\n ...(input.unique && { unique: true }),\n ...(input.notNull && { notNull: true }),\n ...(input.default && { default: input.default }),\n ...(input.indexed && { indexed: true }),\n ...(input.values?.length && { values: input.values }),\n ...(input.stats && { stats: input.stats }),\n },\n };\n}\n\n/**\n * Table index.\n *\n * @param input.name - Index name\n * @param input.columns - Columns included in the index\n * @param input.unique - Is unique index\n * @param input.type - Index type (BTREE, HASH, GIN, etc.)\n *\n * @example\n * index({ name: 'idx_user_email', columns: ['email'], unique: true, type: 'BTREE' })\n */\nexport function index(input: {\n name: string;\n columns: string[];\n unique?: boolean;\n type?: string;\n}): ContextFragment {\n return {\n name: 'index',\n data: {\n name: input.name,\n columns: input.columns,\n ...(input.unique && { unique: true }),\n ...(input.type && { type: input.type }),\n },\n };\n}\n\n/**\n * Table constraint (CHECK, UNIQUE, PRIMARY_KEY, FOREIGN_KEY, etc).\n *\n * @param input.name - Constraint name\n * @param input.type - Constraint type\n * @param input.columns - Columns involved in the constraint\n * @param input.definition - CHECK constraint SQL definition\n * @param input.defaultValue - DEFAULT constraint value\n * @param input.referencedTable - For FK: referenced table name\n * @param input.referencedColumns - For FK: referenced column names\n *\n * @example\n * constraint({\n * name: 'chk_amount_positive',\n * type: 'CHECK',\n * definition: 'amount > 0',\n * })\n *\n * @example\n * constraint({\n * name: 'fk_order_user',\n * type: 'FOREIGN_KEY',\n * columns: ['user_id'],\n * referencedTable: 'users',\n * referencedColumns: ['id'],\n * })\n */\nexport function constraint(input: {\n name: string;\n type:\n | 'CHECK'\n | 'UNIQUE'\n | 'NOT_NULL'\n | 'DEFAULT'\n | 'PRIMARY_KEY'\n | 'FOREIGN_KEY';\n columns?: string[];\n definition?: string;\n defaultValue?: string;\n referencedTable?: string;\n referencedColumns?: string[];\n}): ContextFragment {\n return {\n name: 'constraint',\n data: {\n name: input.name,\n type: input.type,\n ...(input.columns?.length && { columns: input.columns }),\n ...(input.definition && { definition: input.definition }),\n ...(input.defaultValue && { defaultValue: input.defaultValue }),\n ...(input.referencedTable && { referencedTable: input.referencedTable }),\n ...(input.referencedColumns?.length && {\n referencedColumns: input.referencedColumns,\n }),\n },\n };\n}\n\n/**\n * Database view.\n *\n * @param input.name - View name\n * @param input.schema - Schema name\n * @param input.columns - Array of column() fragments\n * @param input.definition - View SQL definition\n *\n * @example\n * view({\n * name: 'active_users',\n * columns: [\n * column({ name: 'id', type: 'integer' }),\n * column({ name: 'email', type: 'varchar' }),\n * ],\n * definition: \"SELECT id, email FROM users WHERE status = 'active'\",\n * })\n */\nexport function view(input: {\n name: string;\n schema?: string;\n columns: ContextFragment[];\n definition?: string;\n}): ContextFragment {\n return {\n name: 'view',\n data: {\n name: input.name,\n ...(input.schema && { schema: input.schema }),\n columns: input.columns,\n ...(input.definition && { definition: input.definition }),\n },\n };\n}\n\n/**\n * Relationship between tables (foreign key connection).\n *\n * @param input.from - Source table and columns\n * @param input.to - Referenced table and columns\n * @param input.cardinality - Relationship cardinality\n *\n * @example\n * relationship({\n * from: { table: 'orders', columns: ['user_id'] },\n * to: { table: 'users', columns: ['id'] },\n * cardinality: 'many-to-one',\n * })\n */\nexport function relationship(input: {\n from: { table: string; columns: string[] };\n to: { table: string; columns: string[] };\n cardinality?: 'one-to-one' | 'one-to-many' | 'many-to-one' | 'many-to-many';\n}): ContextFragment {\n return {\n name: 'relationship',\n data: {\n from: input.from,\n to: input.to,\n ...(input.cardinality && { cardinality: input.cardinality }),\n },\n };\n}\n", "import type { AdapterInfo, ColumnStats, Relationship, Table } from '../adapter.ts';\nimport type { View } from './view.grounding.ts';\n\n/**\n * Column type for grounding operations.\n * Common interface between Table.columns and View.columns.\n */\nexport interface Column {\n name: string;\n type: string;\n kind?: 'LowCardinality' | 'Enum';\n values?: string[];\n stats?: ColumnStats;\n}\n\n/**\n * Entity with columns (Table or View).\n */\nexport interface ColumnContainer {\n name: string;\n columns: Column[];\n}\n\n/**\n * Shared context object passed to all groundings.\n * Groundings read from and write to this context.\n */\nexport interface GroundingContext {\n /** Tables discovered by TableGrounding */\n tables: Table[];\n\n /** Views discovered by ViewGrounding */\n views: View[];\n\n /** Relationships discovered by TableGrounding */\n relationships: Relationship[];\n\n /** Database info collected by InfoGrounding */\n info?: AdapterInfo;\n\n /** Business context report generated by ReportGrounding */\n report?: string;\n}\n\n/**\n * Create a new empty grounding context.\n */\nexport function createGroundingContext(): GroundingContext {\n return {\n tables: [],\n views: [],\n relationships: [],\n info: undefined,\n };\n}\n", "import type { ContextFragment } from '@deepagents/context';\n\nimport {\n column,\n constraint,\n dialectInfo,\n index,\n relationship,\n table,\n view,\n} from '../fragments/schema.ts';\nimport type { AbstractGrounding } from './groundings/abstract.grounding.ts';\nimport {\n type GroundingContext,\n createGroundingContext,\n} from './groundings/context.ts';\nimport type { View } from './groundings/view.grounding.ts';\n\n/**\n * Filter type for view/table names.\n * - string[]: explicit list of view names\n * - RegExp: pattern to match view names\n * - function: predicate to filter view names\n */\nexport type Filter = string[] | RegExp | ((viewName: string) => boolean);\n\nexport interface Table {\n name: string;\n schema?: string;\n rawName?: string;\n columns: {\n name: string;\n type: string;\n kind?: 'LowCardinality' | 'Enum';\n values?: string[];\n isIndexed?: boolean;\n stats?: ColumnStats;\n }[];\n rowCount?: number;\n sizeHint?: 'tiny' | 'small' | 'medium' | 'large' | 'huge';\n indexes?: TableIndex[];\n constraints?: TableConstraint[];\n}\n\nexport interface TableIndex {\n name: string;\n columns: string[];\n unique?: boolean;\n type?: string;\n}\n\nexport interface TableConstraint {\n name: string;\n type:\n | 'CHECK'\n | 'UNIQUE'\n | 'NOT_NULL'\n | 'DEFAULT'\n | 'PRIMARY_KEY'\n | 'FOREIGN_KEY';\n columns?: string[];\n definition?: string;\n defaultValue?: string;\n referencedTable?: string;\n referencedColumns?: string[];\n}\n\nexport interface ColumnStats {\n min?: string;\n max?: string;\n nullFraction?: number;\n}\n\nexport type Relationship = {\n table: string;\n from: string[];\n referenced_table: string;\n to: string[];\n};\n\nexport type TablesFilter = string[] | RegExp;\n\nexport interface Introspection {\n tables: Table[];\n relationships: Relationship[];\n}\n\nexport interface AdapterInfo {\n dialect: string;\n version?: string;\n database?: string;\n details?: Record<string, unknown>;\n}\n\nexport type AdapterInfoProvider =\n | AdapterInfo\n | (() => Promise<AdapterInfo> | AdapterInfo);\n\nexport type IntrospectionPhase =\n | 'tables'\n | 'row_counts'\n | 'primary_keys'\n | 'indexes'\n | 'column_stats'\n | 'low_cardinality'\n | 'relationships';\n\nexport interface IntrospectionProgress {\n phase: IntrospectionPhase;\n message: string;\n current?: number;\n total?: number;\n}\n\nexport type OnProgress = (progress: IntrospectionProgress) => void;\n\nexport interface IntrospectOptions {\n onProgress?: OnProgress;\n}\n\nexport type GroundingFn = (adapter: Adapter) => AbstractGrounding;\n\nexport type ExecuteFunction = (sql: string) => Promise<any> | any;\nexport type ValidateFunction = (\n sql: string,\n) => Promise<string | void> | string | void;\n\nexport abstract class Adapter {\n abstract grounding: GroundingFn[];\n\n /**\n * Default schema name for this database.\n * PostgreSQL: 'public', SQL Server: 'dbo', SQLite: undefined\n */\n abstract readonly defaultSchema: string | undefined;\n\n /**\n * System schemas to exclude from introspection by default.\n */\n abstract readonly systemSchemas: string[];\n\n /**\n * Introspect the database schema and return context fragments.\n *\n * Executes all configured groundings to populate the context, then\n * generates fragments from the complete context data.\n *\n * @param ctx - Optional grounding context for sharing state between groundings\n * @returns Array of context fragments representing the database schema\n */\n async introspect(ctx = createGroundingContext()): Promise<ContextFragment[]> {\n // Phase 1: All groundings populate ctx\n for (const fn of this.grounding) {\n const grounding = fn(this);\n await grounding.execute(ctx);\n }\n\n // Phase 2: Generate fragments from complete ctx\n return this.#toSchemaFragments(ctx);\n }\n\n /**\n * Convert complete grounding context to schema fragments.\n * Called after all groundings have populated ctx with data.\n */\n #toSchemaFragments(ctx: GroundingContext): ContextFragment[] {\n const fragments: ContextFragment[] = [];\n\n // Dialect info\n if (ctx.info) {\n fragments.push(\n dialectInfo({\n dialect: ctx.info.dialect,\n version: ctx.info.version,\n database: ctx.info.database,\n }),\n );\n }\n\n // Tables (with all annotations now included)\n for (const t of ctx.tables) {\n fragments.push(this.#tableToFragment(t));\n }\n\n // Views\n for (const v of ctx.views) {\n fragments.push(this.#viewToFragment(v));\n }\n\n // Relationships\n const tableMap = new Map(ctx.tables.map((t) => [t.name, t]));\n for (const rel of ctx.relationships) {\n const sourceTable = tableMap.get(rel.table);\n const targetTable = tableMap.get(rel.referenced_table);\n fragments.push(\n this.#relationshipToFragment(rel, sourceTable, targetTable),\n );\n }\n\n // Business context\n if (ctx.report) {\n fragments.push({ name: 'businessContext', data: ctx.report });\n }\n\n return fragments;\n }\n\n /**\n * Convert a Table to a table fragment with nested column, index, and constraint fragments.\n */\n #tableToFragment(t: Table): ContextFragment {\n // Build constraint lookup maps for column-level annotations\n const pkConstraint = t.constraints?.find((c) => c.type === 'PRIMARY_KEY');\n const pkColumns = new Set(pkConstraint?.columns ?? []);\n\n const notNullColumns = new Set(\n t.constraints\n ?.filter((c) => c.type === 'NOT_NULL')\n .flatMap((c) => c.columns ?? []) ?? [],\n );\n\n const defaultByColumn = new Map<string, string>();\n for (const c of t.constraints?.filter((c) => c.type === 'DEFAULT') ?? []) {\n for (const col of c.columns ?? []) {\n if (c.defaultValue != null) {\n defaultByColumn.set(col, c.defaultValue);\n }\n }\n }\n\n // Single-column UNIQUE constraints\n const uniqueColumns = new Set(\n t.constraints\n ?.filter((c) => c.type === 'UNIQUE' && c.columns?.length === 1)\n .flatMap((c) => c.columns ?? []) ?? [],\n );\n\n // Foreign key lookup: column -> referenced table.column\n const fkByColumn = new Map<string, string>();\n for (const c of t.constraints?.filter((c) => c.type === 'FOREIGN_KEY') ??\n []) {\n const cols = c.columns ?? [];\n const refCols = c.referencedColumns ?? [];\n for (let i = 0; i < cols.length; i++) {\n const refCol = refCols[i] ?? refCols[0] ?? cols[i];\n fkByColumn.set(cols[i], `${c.referencedTable}.${refCol}`);\n }\n }\n\n // Build column fragments\n const columnFragments = t.columns.map((col) =>\n column({\n name: col.name,\n type: col.type,\n pk: pkColumns.has(col.name) || undefined,\n fk: fkByColumn.get(col.name),\n unique: uniqueColumns.has(col.name) || undefined,\n notNull: notNullColumns.has(col.name) || undefined,\n default: defaultByColumn.get(col.name),\n indexed: col.isIndexed || undefined,\n values: col.values,\n stats: col.stats,\n }),\n );\n\n // Build index fragments\n const indexFragments = (t.indexes ?? []).map((idx) =>\n index({\n name: idx.name,\n columns: idx.columns,\n unique: idx.unique,\n type: idx.type,\n }),\n );\n\n // Build constraint fragments for multi-column UNIQUE and CHECK constraints\n const constraintFragments = (t.constraints ?? [])\n .filter(\n (c) =>\n c.type === 'CHECK' ||\n (c.type === 'UNIQUE' && (c.columns?.length ?? 0) > 1),\n )\n .map((c) =>\n constraint({\n name: c.name,\n type: c.type,\n columns: c.columns,\n definition: c.definition,\n }),\n );\n\n return table({\n name: t.name,\n schema: t.schema,\n rowCount: t.rowCount,\n sizeHint: t.sizeHint,\n columns: columnFragments,\n indexes: indexFragments.length > 0 ? indexFragments : undefined,\n constraints:\n constraintFragments.length > 0 ? constraintFragments : undefined,\n });\n }\n\n /**\n * Convert a View to a view fragment with nested column fragments.\n */\n #viewToFragment(v: View): ContextFragment {\n const columnFragments = v.columns.map((col) =>\n column({\n name: col.name,\n type: col.type,\n values: col.values,\n stats: col.stats,\n }),\n );\n\n return view({\n name: v.name,\n schema: v.schema,\n columns: columnFragments,\n definition: v.definition,\n });\n }\n\n /**\n * Convert a Relationship to a relationship fragment.\n * Infers cardinality from row counts if available.\n */\n #relationshipToFragment(\n rel: Relationship,\n sourceTable?: Table,\n targetTable?: Table,\n ): ContextFragment {\n const sourceCount = sourceTable?.rowCount;\n const targetCount = targetTable?.rowCount;\n\n let cardinality:\n | 'one-to-one'\n | 'one-to-many'\n | 'many-to-one'\n | 'many-to-many'\n | undefined;\n\n if (sourceCount != null && targetCount != null && targetCount > 0) {\n const ratio = sourceCount / targetCount;\n if (ratio > 5) {\n cardinality = 'many-to-one';\n } else if (ratio < 1.2 && ratio > 0.8) {\n cardinality = 'one-to-one';\n } else if (ratio < 0.2) {\n cardinality = 'one-to-many';\n }\n }\n\n return relationship({\n from: { table: rel.table, columns: rel.from },\n to: { table: rel.referenced_table, columns: rel.to },\n cardinality,\n });\n }\n abstract execute(sql: string): Promise<any[]> | any[];\n abstract validate(sql: string): Promise<string | void> | string | void;\n abstract runQuery<Row>(sql: string): Promise<Row[]> | Row[];\n\n /**\n * Quote an identifier (table/column name) for safe use in SQL.\n * Each database uses different quoting styles.\n */\n abstract quoteIdentifier(name: string): string;\n\n /**\n * Escape a string value for safe use in SQL.\n * Each database escapes different characters.\n */\n abstract escape(value: string): string;\n\n /**\n * Build a SELECT query to sample rows from a table.\n * Each database uses different syntax for limiting rows (LIMIT vs TOP).\n */\n abstract buildSampleRowsQuery(\n tableName: string,\n columns: string[] | undefined,\n limit: number,\n ): string;\n\n /**\n * Convert unknown database value to number.\n * Handles number, bigint, and string types.\n */\n toNumber(value: unknown): number | undefined {\n if (typeof value === 'number' && Number.isFinite(value)) {\n return value;\n }\n if (typeof value === 'bigint') {\n return Number(value);\n }\n if (typeof value === 'string' && value.trim() !== '') {\n const parsed = Number(value);\n return Number.isFinite(parsed) ? parsed : undefined;\n }\n return undefined;\n }\n\n /**\n * Parse a potentially qualified table name into schema and table parts.\n */\n parseTableName(name: string): { schema: string; table: string } {\n if (name.includes('.')) {\n const [schema, ...rest] = name.split('.');\n return { schema, table: rest.join('.') };\n }\n return { schema: this.defaultSchema ?? '', table: name };\n }\n\n /**\n * Escape a string value for use in SQL string literals (single quotes).\n * Used in WHERE clauses like: WHERE name = '${escapeString(value)}'\n */\n escapeString(value: string): string {\n return value.replace(/'/g, \"''\");\n }\n\n /**\n * Build a SQL filter clause to include/exclude schemas.\n * @param columnName - The schema column name (e.g., 'TABLE_SCHEMA')\n * @param allowedSchemas - If provided, filter to these schemas only\n */\n buildSchemaFilter(columnName: string, allowedSchemas?: string[]): string {\n if (allowedSchemas && allowedSchemas.length > 0) {\n const values = allowedSchemas\n .map((s) => `'${this.escapeString(s)}'`)\n .join(', ');\n return `AND ${columnName} IN (${values})`;\n }\n if (this.systemSchemas.length > 0) {\n const values = this.systemSchemas\n .map((s) => `'${this.escapeString(s)}'`)\n .join(', ');\n return `AND ${columnName} NOT IN (${values})`;\n }\n return '';\n }\n}\n\nexport function filterTablesByName<T extends { name: string }>(\n tables: T[],\n filter: TablesFilter | undefined,\n): T[] {\n if (!filter) return tables;\n return tables.filter((table) => matchesFilter(table.name, filter));\n}\n\nexport function filterRelationshipsByTables(\n relationships: Relationship[],\n tableNames: Set<string> | undefined,\n): Relationship[] {\n if (tableNames === undefined) {\n return relationships;\n }\n if (tableNames.size === 0) {\n return [];\n }\n return relationships.filter(\n (it) => tableNames.has(it.table) || tableNames.has(it.referenced_table),\n );\n}\n\nexport function applyTablesFilter(\n tables: Table[],\n relationships: Relationship[],\n filter: TablesFilter | undefined,\n): { tables: Table[]; relationships: Relationship[] } {\n if (!filter) {\n return { tables, relationships };\n }\n\n const allowedNames = new Set(\n getTablesWithRelated(tables, relationships, filter),\n );\n\n return {\n tables: tables.filter((table) => allowedNames.has(table.name)),\n relationships: filterRelationshipsByTables(relationships, allowedNames),\n };\n}\n\nexport function matchesFilter(\n tableName: string,\n filter: TablesFilter,\n): boolean {\n if (Array.isArray(filter)) {\n return filter.includes(tableName);\n }\n return filter.test(tableName);\n}\n\nexport function getTablesWithRelated(\n allTables: Table[],\n relationships: Relationship[],\n filter: TablesFilter,\n): string[] {\n const matchedTables = filterTablesByName(allTables, filter).map(\n (it) => it.name,\n );\n\n if (matchedTables.length === 0) {\n return [];\n }\n\n const adjacency = new Map<string, Set<string>>();\n\n for (const rel of relationships) {\n if (!adjacency.has(rel.table)) {\n adjacency.set(rel.table, new Set());\n }\n if (!adjacency.has(rel.referenced_table)) {\n adjacency.set(rel.referenced_table, new Set());\n }\n adjacency.get(rel.table)!.add(rel.referenced_table);\n adjacency.get(rel.referenced_table)!.add(rel.table);\n }\n\n const result = new Set<string>(matchedTables);\n const queue = [...matchedTables];\n\n while (queue.length > 0) {\n const current = queue.shift()!;\n const neighbors = adjacency.get(current);\n\n if (!neighbors) {\n continue;\n }\n\n for (const neighbor of neighbors) {\n if (!result.has(neighbor)) {\n result.add(neighbor);\n queue.push(neighbor);\n }\n }\n }\n\n return Array.from(result);\n}\n", "import type { Adapter } from '../adapter.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Filter type for table names.\n * - string[]: explicit list of table names\n * - RegExp: pattern to match table names\n * - function: predicate to filter table names\n */\nexport type Filter = string[] | RegExp | ((tableName: string) => boolean);\n\nexport interface AdapterInfo {\n dialect: string;\n version?: string;\n database?: string;\n details?: Record<string, unknown>;\n}\nexport type AdapterInfoProvider =\n | AdapterInfo\n | (() => Promise<AdapterInfo> | AdapterInfo);\n\n/**\n * Abstract base class for database schema groundings.\n *\n * Groundings collect schema metadata into the shared GroundingContext.\n * Fragment generation is centralized in Adapter.introspect().\n */\nexport abstract class AbstractGrounding {\n /**\n * Grounding identifier for debugging/logging.\n */\n name: string;\n\n constructor(name: string) {\n this.name = name;\n }\n\n /**\n * Execute grounding to populate the shared context.\n * Groundings mutate ctx to add their collected data (tables, views, indexes, etc).\n * Fragment generation happens centrally in Adapter after all groundings complete.\n *\n * @param ctx - Shared context for accumulating schema data\n */\n abstract execute(ctx: GroundingContext): Promise<void>;\n}\n\nclass SampleDataGrounding {\n // this will fetch sample data for tables matching the filter\n}\n\nclass FunctionGrounding {\n #filter: Filter;\n #adapter: Adapter;\n constructor(adapter: Adapter, filter: Filter) {\n this.#filter = filter;\n this.#adapter = adapter;\n }\n}\n", "import type { ColumnStats } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { Column, ColumnContainer, GroundingContext } from './context.ts';\n\n/**\n * Configuration for ColumnStatsGrounding.\n */\nexport interface ColumnStatsGroundingConfig {\n // Future: filter which tables/columns to collect stats for\n}\n\n/**\n * Abstract base class for column statistics grounding.\n *\n * Reads tables and views from the context and annotates their columns\n * with statistics (min, max, nullFraction).\n *\n * Subclasses implement database-specific hooks:\n * - `collectStats()` - collect min/max/nullFraction for a column\n */\nexport abstract class ColumnStatsGrounding extends AbstractGrounding {\n constructor(config: ColumnStatsGroundingConfig = {}) {\n super('columnStats');\n }\n\n /**\n * Collect min/max/nullFraction statistics for a column.\n * Return undefined to skip this column.\n */\n protected abstract collectStats(\n tableName: string,\n column: Column,\n ): Promise<ColumnStats | undefined>;\n\n /**\n * Execute the grounding process.\n * Annotates columns in ctx.tables and ctx.views with statistics.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n // Process both tables and views\n const allContainers: ColumnContainer[] = [...ctx.tables, ...ctx.views];\n for (const container of allContainers) {\n for (const column of container.columns) {\n // Collect min/max/nullFraction\n try {\n const stats = await this.collectStats(container.name, column);\n if (stats) {\n column.stats = stats;\n }\n } catch (error) {\n // Skip on error\n console.warn(\n 'Error collecting stats for',\n container.name,\n column.name,\n error,\n );\n }\n }\n }\n }\n}\n", "import type { AdapterInfo } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Configuration for InfoGrounding.\n */\nexport interface InfoGroundingConfig {\n // Future: options to control what info to collect\n}\n\n/**\n * Abstract base class for database info grounding.\n *\n * Collects database dialect, version, and connection info.\n *\n * Subclasses implement the database-specific hook:\n * - `collectInfo()` - collect database info\n */\nexport abstract class InfoGrounding extends AbstractGrounding {\n constructor(config: InfoGroundingConfig = {}) {\n super('dialectInfo');\n }\n\n /**\n * Collect database dialect, version, and other info.\n */\n protected abstract collectInfo(): Promise<AdapterInfo>;\n\n /**\n * Execute the grounding process.\n * Writes database info to ctx.info.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n ctx.info = await this.collectInfo();\n }\n}\n", "import type { Table, TableConstraint } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { Column, ColumnContainer, GroundingContext } from './context.ts';\n\nexport type { Column, ColumnContainer };\n\n/**\n * Result of column value detection.\n */\nexport type ColumnValuesResult = {\n kind: 'Enum' | 'LowCardinality';\n values: string[];\n};\n\n/**\n * Configuration for ColumnValuesGrounding.\n */\nexport interface ColumnValuesGroundingConfig {\n /** Maximum number of distinct values to consider low cardinality (default: 20) */\n lowCardinalityLimit?: number;\n}\n\n/**\n * Abstract base class for column values grounding.\n *\n * Discovers possible values for columns from three sources (in priority order):\n * 1. Native ENUM types (PostgreSQL, MySQL) \u2192 kind: 'Enum'\n * 2. CHECK constraints with IN clauses \u2192 kind: 'Enum'\n * 3. Low cardinality data scan \u2192 kind: 'LowCardinality'\n *\n * Subclasses implement database-specific hooks:\n * - `collectEnumValues()` - get values for native ENUM columns\n * - `collectLowCardinality()` - collect distinct values via data scan\n */\nexport abstract class ColumnValuesGrounding extends AbstractGrounding {\n protected lowCardinalityLimit: number;\n\n constructor(config: ColumnValuesGroundingConfig = {}) {\n super('columnValues');\n this.lowCardinalityLimit = config.lowCardinalityLimit ?? 20;\n }\n\n /**\n * Get values for native ENUM type columns.\n * Return undefined if column is not an ENUM type.\n * Default implementation returns undefined (no native ENUM support).\n */\n protected async collectEnumValues(\n _tableName: string,\n _column: Column,\n ): Promise<string[] | undefined> {\n return undefined;\n }\n\n /**\n * Collect distinct values for low cardinality columns via data scan.\n * Return undefined if column has too many distinct values.\n */\n protected abstract collectLowCardinality(\n tableName: string,\n column: Column,\n ): Promise<string[] | undefined>;\n\n /**\n * Parse CHECK constraint for enum-like IN clause.\n * Extracts values from patterns like:\n * - CHECK (status IN ('active', 'inactive'))\n * - CHECK ((status)::text = ANY (ARRAY['a'::text, 'b'::text]))\n * - CHECK (status = 'active' OR status = 'inactive')\n */\n protected parseCheckConstraint(\n constraint: TableConstraint,\n columnName: string,\n ): string[] | undefined {\n if (constraint.type !== 'CHECK' || !constraint.definition) {\n return undefined;\n }\n\n // Check if constraint applies to this column\n if (constraint.columns && !constraint.columns.includes(columnName)) {\n return undefined;\n }\n\n const def = constraint.definition;\n const escapedCol = this.escapeRegex(columnName);\n\n // Column pattern: matches column name with optional parens and type cast\n // e.g., \"status\", \"(status)\", \"((status)::text)\"\n const colPattern = `(?:\\\\(?\\\\(?${escapedCol}\\\\)?(?:::(?:text|varchar|character varying))?\\\\)?)`;\n\n // Pattern 1: column IN ('val1', 'val2', ...)\n const inMatch = def.match(\n new RegExp(`${colPattern}\\\\s+IN\\\\s*\\\\(([^)]+)\\\\)`, 'i'),\n );\n if (inMatch) {\n return this.extractStringValues(inMatch[1]);\n }\n\n // Pattern 2: PostgreSQL ANY(ARRAY[...])\n const anyMatch = def.match(\n new RegExp(\n `${colPattern}\\\\s*=\\\\s*ANY\\\\s*\\\\(\\\\s*(?:ARRAY)?\\\\s*\\\\[([^\\\\]]+)\\\\]`,\n 'i',\n ),\n );\n if (anyMatch) {\n return this.extractStringValues(anyMatch[1]);\n }\n\n // Pattern 3: column = 'val1' OR column = 'val2' ...\n const orPattern = new RegExp(\n `\\\\b${this.escapeRegex(columnName)}\\\\b\\\\s*=\\\\s*'([^']*)'`,\n 'gi',\n );\n const orMatches = [...def.matchAll(orPattern)];\n if (orMatches.length >= 2) {\n return orMatches.map((m) => m[1]);\n }\n\n return undefined;\n }\n\n /**\n * Extract string values from a comma-separated list.\n */\n private extractStringValues(input: string): string[] | undefined {\n const values: string[] = [];\n // Match quoted strings: 'value' or 'value'::type\n const matches = input.matchAll(/'([^']*)'/g);\n for (const match of matches) {\n values.push(match[1]);\n }\n return values.length > 0 ? values : undefined;\n }\n\n /**\n * Escape special regex characters in a string.\n */\n private escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n }\n\n /**\n * Get the table from context by name.\n */\n private getTable(ctx: GroundingContext, name: string): Table | undefined {\n return ctx.tables.find((t) => t.name === name);\n }\n\n /**\n * Execute the grounding process.\n * Annotates columns in ctx.tables and ctx.views with values.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n // Process both tables and views\n const allContainers: ColumnContainer[] = [...ctx.tables, ...ctx.views];\n\n for (const container of allContainers) {\n const table = this.getTable(ctx, container.name);\n\n for (const column of container.columns) {\n try {\n const result = await this.resolveColumnValues(\n container.name,\n column,\n table?.constraints,\n );\n if (result) {\n column.kind = result.kind;\n column.values = result.values;\n }\n } catch (error) {\n console.warn(\n 'Error collecting column values for',\n container.name,\n column.name,\n error,\n );\n }\n }\n }\n }\n\n /**\n * Resolve column values from all sources in priority order.\n */\n private async resolveColumnValues(\n tableName: string,\n column: Column,\n constraints?: TableConstraint[],\n ): Promise<ColumnValuesResult | undefined> {\n // Priority 1: Native ENUM type\n const enumValues = await this.collectEnumValues(tableName, column);\n if (enumValues?.length) {\n return { kind: 'Enum', values: enumValues };\n }\n\n // Priority 2: CHECK constraint with IN clause\n if (constraints) {\n for (const constraint of constraints) {\n const checkValues = this.parseCheckConstraint(constraint, column.name);\n if (checkValues?.length) {\n return { kind: 'Enum', values: checkValues };\n }\n }\n }\n\n // Priority 3: Low cardinality data scan\n const lowCardValues = await this.collectLowCardinality(tableName, column);\n if (lowCardValues?.length) {\n return { kind: 'LowCardinality', values: lowCardValues };\n }\n\n return undefined;\n }\n}\n", "import { groq } from '@ai-sdk/groq';\nimport { tool } from 'ai';\nimport dedent from 'dedent';\nimport z from 'zod';\n\nimport { type AgentModel } from '@deepagents/agent';\nimport {\n ContextEngine,\n InMemoryContextStore,\n agent,\n fragment,\n persona,\n user,\n} from '@deepagents/context';\n\nimport type { Adapter } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Cache interface for storing generated reports.\n */\nexport interface ReportCache {\n get(): Promise<string | null>;\n set(value: string): Promise<void>;\n}\n\n/**\n * Configuration for ReportGrounding.\n */\nexport interface ReportGroundingConfig {\n /** LLM model to use for generating the report */\n model?: AgentModel;\n /** Optional cache for storing generated reports */\n cache?: ReportCache;\n /** Force regeneration even if cached */\n forceRefresh?: boolean;\n}\n\n/**\n * Grounding that generates a business context report about the database.\n *\n * Uses an LLM agent to:\n * 1. Query COUNT(*) for each table\n * 2. Query SELECT * LIMIT 3 for sample data\n * 3. Generate a 400-600 word business context report\n *\n * The report helps downstream agents understand what the database represents.\n */\nexport class ReportGrounding extends AbstractGrounding {\n #adapter: Adapter;\n #model: AgentModel;\n #cache?: ReportCache;\n #forceRefresh: boolean;\n\n constructor(adapter: Adapter, config: ReportGroundingConfig = {}) {\n super('business_context');\n this.#adapter = adapter;\n this.#model = config.model ?? groq('openai/gpt-oss-20b');\n this.#cache = config.cache;\n this.#forceRefresh = config.forceRefresh ?? false;\n }\n\n async execute(ctx: GroundingContext): Promise<void> {\n // Check cache first (unless forcing refresh)\n if (!this.#forceRefresh && this.#cache) {\n const cached = await this.#cache.get();\n if (cached) {\n ctx.report = cached;\n return;\n }\n }\n\n // Generate report using LLM\n const report = await this.#generateReport();\n ctx.report = report;\n\n // Cache the result\n if (this.#cache) {\n await this.#cache.set(report);\n }\n }\n\n async #generateReport(): Promise<string> {\n const context = new ContextEngine({\n store: new InMemoryContextStore(),\n chatId: `report-gen-${crypto.randomUUID()}`,\n userId: 'system',\n });\n\n context.set(\n persona({\n name: 'db-report-agent',\n role: 'Database analyst',\n objective:\n 'Analyze the database and write a contextual report about what it represents',\n }),\n fragment(\n 'instructions',\n dedent`\n Write a business context that helps another agent answer questions accurately.\n\n For EACH table, do queries ONE AT A TIME:\n 1. SELECT COUNT(*) to get row count\n 2. SELECT * LIMIT 3 to see sample data\n\n Then write a report with:\n - What business this database is for\n - For each table: purpose, row count, and example of what the data looks like\n\n Include concrete examples like \"Track prices are $0.99\",\n \"Customer names like 'Lu\u00EDs Gon\u00E7alves'\", etc.\n\n Keep it 400-600 words, conversational style.\n `,\n ),\n user(\n 'Please analyze the database and write a contextual report about what this database represents.',\n ),\n );\n\n const adapter = this.#adapter;\n\n const reportAgent = agent({\n name: 'db-report-agent',\n model: this.#model,\n context,\n tools: {\n query_database: tool({\n description:\n 'Execute a SELECT query to explore the database and gather insights.',\n inputSchema: z.object({\n sql: z.string().describe('The SELECT query to execute'),\n purpose: z\n .string()\n .describe(\n 'What insight you are trying to gather with this query',\n ),\n }),\n execute: ({ sql }) => {\n return adapter.execute(sql);\n },\n }),\n },\n });\n\n const result = await reportAgent.generate({});\n return result.text;\n }\n}\n", "import { encode } from 'gpt-tokenizer';\n\nimport type { ContextFragment } from './fragments.ts';\nimport type { Models } from './models.generated.ts';\nimport type { ContextRenderer } from './renderers/abstract.renderer.ts';\n\n/**\n * Cost information for a model (prices per 1M tokens)\n */\nexport interface ModelCost {\n input: number;\n output: number;\n cache_read?: number;\n cache_write?: number;\n reasoning?: number;\n}\n\n/**\n * Model information from models.dev\n */\nexport interface ModelInfo {\n id: string;\n name: string;\n family: string;\n cost: ModelCost;\n limit: {\n context: number;\n output: number;\n };\n provider: string;\n}\n\n/**\n * Estimate for a single fragment\n */\nexport interface FragmentEstimate {\n name: string;\n id?: string;\n tokens: number;\n cost: number;\n}\n\n/**\n * Estimate result returned by the estimate function\n */\nexport interface EstimateResult {\n model: string;\n provider: string;\n tokens: number;\n cost: number;\n limits: {\n context: number;\n output: number;\n exceedsContext: boolean;\n };\n fragments: FragmentEstimate[];\n}\n\n/**\n * Tokenizer interface for counting tokens\n */\nexport interface Tokenizer {\n encode(text: string): number[];\n count(text: string): number;\n}\n\n/**\n * Default tokenizer using gpt-tokenizer\n * Works reasonably well for most models (~5-10% variance)\n */\nexport const defaultTokenizer: Tokenizer = {\n encode(text: string): number[] {\n return encode(text);\n },\n count(text: string): number {\n return encode(text).length;\n },\n};\n\ntype ModelsDevResponse = Record<\n string,\n {\n id: string;\n name: string;\n models: Record<\n string,\n {\n id: string;\n name: string;\n family: string;\n cost: ModelCost;\n limit: { context: number; output: number };\n }\n >;\n }\n>;\n\n/**\n * Registry for AI model information from models.dev\n * Caches data and provides lookup by model ID\n */\nexport class ModelsRegistry {\n #cache: Map<string, ModelInfo> = new Map();\n #loaded = false;\n #tokenizers: Map<string, Tokenizer> = new Map();\n #defaultTokenizer: Tokenizer = defaultTokenizer;\n\n /**\n * Load models data from models.dev API\n */\n async load(): Promise<void> {\n if (this.#loaded) return;\n\n const response = await fetch('https://models.dev/api.json');\n if (!response.ok) {\n throw new Error(`Failed to fetch models: ${response.statusText}`);\n }\n\n const data = (await response.json()) as ModelsDevResponse;\n\n for (const [providerId, provider] of Object.entries(data)) {\n for (const [modelId, model] of Object.entries(provider.models)) {\n const info: ModelInfo = {\n id: model.id,\n name: model.name,\n family: model.family,\n cost: model.cost,\n limit: model.limit,\n provider: providerId,\n };\n\n // Store by full ID (provider:model)\n this.#cache.set(`${providerId}:${modelId}`, info);\n }\n }\n\n this.#loaded = true;\n }\n\n /**\n * Get model info by ID\n * @param modelId - Model ID (e.g., \"openai:gpt-4o\")\n */\n get(modelId: string): ModelInfo | undefined {\n return this.#cache.get(modelId);\n }\n\n /**\n * Check if a model exists in the registry\n */\n has(modelId: string): boolean {\n return this.#cache.has(modelId);\n }\n\n /**\n * List all available model IDs\n */\n list(): string[] {\n return [...this.#cache.keys()];\n }\n\n /**\n * Register a custom tokenizer for specific model families\n * @param family - Model family name (e.g., \"llama\", \"claude\")\n * @param tokenizer - Tokenizer implementation\n */\n registerTokenizer(family: string, tokenizer: Tokenizer): void {\n this.#tokenizers.set(family, tokenizer);\n }\n\n /**\n * Set the default tokenizer used when no family-specific tokenizer is registered\n */\n setDefaultTokenizer(tokenizer: Tokenizer): void {\n this.#defaultTokenizer = tokenizer;\n }\n\n /**\n * Get the appropriate tokenizer for a model\n */\n getTokenizer(modelId: string): Tokenizer {\n const model = this.get(modelId);\n if (model) {\n const familyTokenizer = this.#tokenizers.get(model.family);\n if (familyTokenizer) {\n return familyTokenizer;\n }\n }\n return this.#defaultTokenizer;\n }\n\n /**\n * Estimate token count and cost for given text and model\n * @param modelId - Model ID to use for pricing (e.g., \"openai:gpt-4o\")\n * @param input - Input text (prompt)\n */\n estimate(modelId: Models, input: string): EstimateResult {\n const model = this.get(modelId);\n if (!model) {\n throw new Error(\n `Model \"${modelId}\" not found. Call load() first or check model ID.`,\n );\n }\n\n const tokenizer = this.getTokenizer(modelId);\n const tokens = tokenizer.count(input);\n const cost = (tokens / 1_000_000) * model.cost.input;\n\n return {\n model: model.id,\n provider: model.provider,\n tokens,\n cost,\n limits: {\n context: model.limit.context,\n output: model.limit.output,\n exceedsContext: tokens > model.limit.context,\n },\n fragments: [],\n };\n }\n}\n\n// Singleton instance for convenience\nlet _registry: ModelsRegistry | null = null;\n\n/**\n * Get the shared ModelsRegistry instance\n */\nexport function getModelsRegistry(): ModelsRegistry {\n if (!_registry) {\n _registry = new ModelsRegistry();\n }\n return _registry;\n}\n\n/**\n * Convenience function to estimate cost for a model\n * Automatically loads the registry if not already loaded\n *\n * @param modelId - Model ID (e.g., \"openai:gpt-4o\", \"anthropic:claude-3-5-sonnet\")\n * @param renderer - Renderer to use for converting fragments to text\n * @param fragments - Context fragments to estimate\n */\nexport async function estimate(\n modelId: Models,\n renderer: ContextRenderer,\n ...fragments: ContextFragment[]\n): Promise<EstimateResult> {\n const registry = getModelsRegistry();\n await registry.load();\n\n // Calculate total (all fragments rendered together)\n const input = renderer.render(fragments);\n const model = registry.get(modelId);\n if (!model) {\n throw new Error(\n `Model \"${modelId}\" not found. Call load() first or check model ID.`,\n );\n }\n\n const tokenizer = registry.getTokenizer(modelId);\n const totalTokens = tokenizer.count(input);\n const totalCost = (totalTokens / 1_000_000) * model.cost.input;\n\n // Calculate per-fragment estimates\n const fragmentEstimates: FragmentEstimate[] = fragments.map((fragment) => {\n const rendered = renderer.render([fragment]);\n const tokens = tokenizer.count(rendered);\n const cost = (tokens / 1_000_000) * model.cost.input;\n return {\n id: fragment.id,\n name: fragment.name,\n tokens,\n cost,\n };\n });\n\n return {\n model: model.id,\n provider: model.provider,\n tokens: totalTokens,\n cost: totalCost,\n limits: {\n context: model.limit.context,\n output: model.limit.output,\n exceedsContext: totalTokens > model.limit.context,\n },\n fragments: fragmentEstimates,\n };\n}\n", "import { type UIMessage, generateId } from 'ai';\n\nimport type { FragmentCodec } from './codec.ts';\n\n/**\n * Fragment type identifier.\n * - 'fragment': Regular context fragment (default)\n * - 'message': Conversation message (user/assistant)\n */\nexport type FragmentType = 'fragment' | 'message';\n\n/**\n * A context fragment containing a name and associated data.\n */\nexport interface ContextFragment<T extends FragmentData = FragmentData> {\n /**\n * Unique identifier for this fragment.\n * Auto-generated for user/assistant messages, optional for other fragments.\n */\n id?: string;\n name: string;\n data: T;\n /**\n * Fragment type for categorization.\n * Messages use 'message' type and are handled separately during resolve().\n */\n type?: FragmentType;\n /**\n * When true, this fragment will be persisted to the store on save().\n */\n persist?: boolean;\n /**\n * Codec for encoding/decoding this fragment.\n * Used by resolve() to convert to AI SDK format.\n */\n codec?: FragmentCodec;\n}\n\n/**\n * Fragment data can be a primitive, array, object, or nested fragment.\n */\nexport type FragmentData =\n | string\n | number\n | null\n | undefined\n | boolean\n | ContextFragment\n | FragmentData[]\n | { [key: string]: FragmentData };\n\n/**\n * Type guard to check if data is a ContextFragment.\n */\nexport function isFragment(data: unknown): data is ContextFragment {\n return (\n typeof data === 'object' &&\n data !== null &&\n 'name' in data &&\n 'data' in data &&\n typeof (data as ContextFragment).name === 'string'\n );\n}\n\n/**\n * A plain object with string keys and FragmentData values.\n */\nexport type FragmentObject = Record<string, FragmentData>;\n\n/**\n * Type guard to check if data is a plain object (not array, not fragment, not primitive).\n */\nexport function isFragmentObject(data: unknown): data is FragmentObject {\n return (\n typeof data === 'object' &&\n data !== null &&\n !Array.isArray(data) &&\n !isFragment(data)\n );\n}\n\n/**\n * Type guard to check if a fragment is a message fragment.\n */\nexport function isMessageFragment(fragment: ContextFragment): boolean {\n return fragment.type === 'message';\n}\n\nexport function fragment(\n name: string,\n ...children: FragmentData[]\n): ContextFragment {\n return {\n name,\n data: children,\n };\n}\n\n/**\n * Create a user message fragment.\n * Message fragments are separated from regular fragments during resolve().\n *\n * @param content - The message content\n * @param options - Optional settings (id)\n *\n * @example\n * ```ts\n * context.set(user('Hello')); // Auto-generated ID\n * context.set(user('Hello', { id: 'msg-1' })); // Custom ID\n * ```\n */\nexport function user(content: string | UIMessage): ContextFragment {\n const message =\n typeof content === 'string'\n ? {\n id: generateId(),\n role: 'user',\n parts: [{ type: 'text', text: content }],\n }\n : content;\n return {\n id: message.id,\n name: 'user',\n data: 'content',\n type: 'message',\n persist: true,\n codec: {\n decode() {\n return message;\n },\n encode() {\n return message;\n },\n },\n };\n}\n\n/**\n * Create an assistant message fragment.\n * Message fragments are separated from regular fragments during resolve().\n *\n * @param message - The message content\n * @param options - Optional settings (id)\n *\n * @example\n * ```ts\n * context.set(assistant('Hi there!')); // Auto-generated ID\n * context.set(assistant('Hi there!', { id: 'resp-1' })); // Custom ID\n * ```\n */\nexport function assistant(message: UIMessage): ContextFragment {\n return {\n id: message.id,\n name: 'assistant',\n data: 'content',\n type: 'message',\n persist: true,\n codec: {\n decode() {\n return message;\n },\n encode() {\n return message;\n },\n },\n };\n}\nexport function message(content: string | UIMessage): ContextFragment {\n const message =\n typeof content === 'string'\n ? {\n id: generateId(),\n role: 'user',\n parts: [{ type: 'text', text: content }],\n }\n : content;\n return {\n id: message.id,\n name: 'message',\n data: 'content',\n type: 'message',\n persist: true,\n codec: {\n decode() {\n return message;\n },\n encode() {\n return message;\n },\n },\n };\n}\n\n/**\n * Create an assistant message fragment from text content.\n * Convenience wrapper that creates a UIMessage internally.\n *\n * @param content - The message text content\n * @param options - Optional settings (id)\n *\n * @example\n * ```ts\n * context.set(assistantText('Hi there!')); // Auto-generated ID\n * context.set(assistantText('Hi there!', { id: 'resp-1' })); // Custom ID\n * ```\n */\nexport function assistantText(\n content: string,\n options?: { id?: string },\n): ContextFragment {\n const id = options?.id ?? crypto.randomUUID();\n return assistant({\n id,\n role: 'assistant',\n parts: [{ type: 'text', text: content }],\n });\n}\n", "import pluralize from 'pluralize';\nimport { titlecase } from 'stringcase';\n\nimport {\n type ContextFragment,\n type FragmentData,\n type FragmentObject,\n isFragment,\n isFragmentObject,\n} from '../fragments.ts';\n\n/**\n * Render context passed through the template method.\n */\nexport interface RenderContext {\n depth: number;\n path: string[];\n}\n\n/**\n * Options for renderers.\n */\nexport interface RendererOptions {\n /**\n * When true, fragments with the same name are grouped under a pluralized parent tag.\n * e.g., multiple <hint> become <hints><hint>...</hint><hint>...</hint></hints>\n */\n groupFragments?: boolean;\n}\n\n/**\n * Base renderer implementing the Template Method pattern.\n * Subclasses implement the specific formatting hooks.\n */\nexport abstract class ContextRenderer {\n protected options: RendererOptions;\n\n constructor(options: RendererOptions = {}) {\n this.options = options;\n }\n\n abstract render(fragments: ContextFragment[]): string;\n\n /**\n * Check if data is a primitive (string, number, boolean).\n */\n protected isPrimitive(data: FragmentData): data is string | number | boolean {\n return (\n typeof data === 'string' ||\n typeof data === 'number' ||\n typeof data === 'boolean'\n );\n }\n\n /**\n * Group fragments by name for groupFragments option.\n */\n protected groupByName(\n fragments: ContextFragment[],\n ): Map<string, ContextFragment[]> {\n const groups = new Map<string, ContextFragment[]>();\n for (const fragment of fragments) {\n const existing = groups.get(fragment.name) ?? [];\n existing.push(fragment);\n groups.set(fragment.name, existing);\n }\n return groups;\n }\n\n /**\n * Remove null/undefined from fragments and fragment data recursively.\n * This protects renderers from nullish values and ensures they are ignored\n * consistently across all output formats.\n */\n protected sanitizeFragments(fragments: ContextFragment[]): ContextFragment[] {\n const sanitized: ContextFragment[] = [];\n for (const fragment of fragments) {\n const cleaned = this.sanitizeFragment(fragment, new WeakSet<object>());\n if (cleaned) {\n sanitized.push(cleaned);\n }\n }\n return sanitized;\n }\n\n protected sanitizeFragment(\n fragment: ContextFragment,\n seen: WeakSet<object>,\n ): ContextFragment | null {\n const data = this.sanitizeData(fragment.data, seen);\n if (data == null) {\n return null;\n }\n return {\n ...fragment,\n data,\n };\n }\n\n protected sanitizeData(\n data: FragmentData,\n seen: WeakSet<object>,\n ): FragmentData | undefined {\n if (data == null) {\n return undefined;\n }\n\n if (isFragment(data)) {\n return this.sanitizeFragment(data, seen) ?? undefined;\n }\n\n if (Array.isArray(data)) {\n if (seen.has(data)) {\n return undefined;\n }\n seen.add(data);\n\n const cleaned: FragmentData[] = [];\n for (const item of data) {\n const sanitizedItem = this.sanitizeData(item, seen);\n if (sanitizedItem != null) {\n cleaned.push(sanitizedItem);\n }\n }\n return cleaned;\n }\n\n if (isFragmentObject(data)) {\n if (seen.has(data)) {\n return undefined;\n }\n seen.add(data);\n\n const cleaned: FragmentObject = {};\n for (const [key, value] of Object.entries(data)) {\n const sanitizedValue = this.sanitizeData(value, seen);\n if (sanitizedValue != null) {\n cleaned[key] = sanitizedValue;\n }\n }\n return cleaned;\n }\n\n return data;\n }\n\n /**\n * Template method - dispatches value to appropriate handler.\n */\n protected renderValue(\n key: string,\n value: unknown,\n ctx: RenderContext,\n ): string {\n if (value == null) {\n return '';\n }\n if (isFragment(value)) {\n return this.renderFragment(value, ctx);\n }\n if (Array.isArray(value)) {\n return this.renderArray(key, value, ctx);\n }\n if (isFragmentObject(value)) {\n return this.renderObject(key, value, ctx);\n }\n return this.renderPrimitive(key, String(value), ctx);\n }\n\n /**\n * Render a nested fragment - subclasses implement this.\n */\n protected abstract renderFragment(\n fragment: ContextFragment,\n ctx: RenderContext,\n ): string;\n\n /**\n * Render all entries of an object.\n */\n protected renderEntries(data: FragmentObject, ctx: RenderContext): string[] {\n return Object.entries(data)\n .map(([key, value]) => this.renderValue(key, value, ctx))\n .filter(Boolean);\n }\n\n // Hooks - subclasses implement these\n protected abstract renderPrimitive(\n key: string,\n value: string,\n ctx: RenderContext,\n ): string;\n protected abstract renderArray(\n key: string,\n items: FragmentData[],\n ctx: RenderContext,\n ): string;\n protected abstract renderObject(\n key: string,\n obj: FragmentObject,\n ctx: RenderContext,\n ): string;\n}\n\n/**\n * Renders context fragments as XML.\n */\nexport class XmlRenderer extends ContextRenderer {\n render(fragments: ContextFragment[]): string {\n const sanitized = this.sanitizeFragments(fragments);\n return sanitized\n .map((f) => this.#renderTopLevel(f))\n .filter(Boolean)\n .join('\\n');\n }\n\n #renderTopLevel(fragment: ContextFragment): string {\n if (this.isPrimitive(fragment.data)) {\n return this.#leafRoot(fragment.name, String(fragment.data));\n }\n if (Array.isArray(fragment.data)) {\n return this.#renderArray(fragment.name, fragment.data, 0);\n }\n if (isFragment(fragment.data)) {\n const child = this.renderFragment(fragment.data, { depth: 1, path: [] });\n return this.#wrap(fragment.name, [child]);\n }\n if (isFragmentObject(fragment.data)) {\n return this.#wrap(\n fragment.name,\n this.renderEntries(fragment.data, { depth: 1, path: [] }),\n );\n }\n return '';\n }\n\n #renderArray(name: string, items: FragmentData[], depth: number): string {\n const fragmentItems = items.filter(isFragment);\n const nonFragmentItems = items.filter((item) => !isFragment(item));\n\n const children: string[] = [];\n\n // Render non-fragment items\n for (const item of nonFragmentItems) {\n if (item != null) {\n if (isFragmentObject(item)) {\n // Recursively render plain objects\n children.push(\n this.#wrapIndented(\n pluralize.singular(name),\n this.renderEntries(item, { depth: depth + 2, path: [] }),\n depth + 1,\n ),\n );\n } else {\n children.push(\n this.#leaf(pluralize.singular(name), String(item), depth + 1),\n );\n }\n }\n }\n\n // Render fragment items (possibly grouped)\n if (this.options.groupFragments && fragmentItems.length > 0) {\n const groups = this.groupByName(fragmentItems);\n for (const [groupName, groupFragments] of groups) {\n const groupChildren = groupFragments.map((frag) =>\n this.renderFragment(frag, { depth: depth + 2, path: [] }),\n );\n const pluralName = pluralize.plural(groupName);\n children.push(this.#wrapIndented(pluralName, groupChildren, depth + 1));\n }\n } else {\n for (const frag of fragmentItems) {\n children.push(\n this.renderFragment(frag, { depth: depth + 1, path: [] }),\n );\n }\n }\n\n return this.#wrap(name, children);\n }\n\n #leafRoot(tag: string, value: string): string {\n const safe = this.#escape(value);\n if (safe.includes('\\n')) {\n return `<${tag}>\\n${this.#indent(safe, 2)}\\n</${tag}>`;\n }\n return `<${tag}>${safe}</${tag}>`;\n }\n\n protected renderFragment(\n fragment: ContextFragment,\n ctx: RenderContext,\n ): string {\n const { name, data } = fragment;\n if (this.isPrimitive(data)) {\n return this.#leaf(name, String(data), ctx.depth);\n }\n if (isFragment(data)) {\n const child = this.renderFragment(data, { ...ctx, depth: ctx.depth + 1 });\n return this.#wrapIndented(name, [child], ctx.depth);\n }\n if (Array.isArray(data)) {\n return this.#renderArrayIndented(name, data, ctx.depth);\n }\n if (isFragmentObject(data)) {\n const children = this.renderEntries(data, {\n ...ctx,\n depth: ctx.depth + 1,\n });\n return this.#wrapIndented(name, children, ctx.depth);\n }\n return '';\n }\n\n #renderArrayIndented(\n name: string,\n items: FragmentData[],\n depth: number,\n ): string {\n const fragmentItems = items.filter(isFragment);\n const nonFragmentItems = items.filter((item) => !isFragment(item));\n\n const children: string[] = [];\n\n // Render non-fragment items\n for (const item of nonFragmentItems) {\n if (item != null) {\n if (isFragmentObject(item)) {\n // Recursively render plain objects\n children.push(\n this.#wrapIndented(\n pluralize.singular(name),\n this.renderEntries(item, { depth: depth + 2, path: [] }),\n depth + 1,\n ),\n );\n } else {\n children.push(\n this.#leaf(pluralize.singular(name), String(item), depth + 1),\n );\n }\n }\n }\n\n // Render fragment items (possibly grouped)\n if (this.options.groupFragments && fragmentItems.length > 0) {\n const groups = this.groupByName(fragmentItems);\n for (const [groupName, groupFragments] of groups) {\n const groupChildren = groupFragments.map((frag) =>\n this.renderFragment(frag, { depth: depth + 2, path: [] }),\n );\n const pluralName = pluralize.plural(groupName);\n children.push(this.#wrapIndented(pluralName, groupChildren, depth + 1));\n }\n } else {\n for (const frag of fragmentItems) {\n children.push(\n this.renderFragment(frag, { depth: depth + 1, path: [] }),\n );\n }\n }\n\n return this.#wrapIndented(name, children, depth);\n }\n\n protected renderPrimitive(\n key: string,\n value: string,\n ctx: RenderContext,\n ): string {\n return this.#leaf(key, value, ctx.depth);\n }\n\n protected renderArray(\n key: string,\n items: FragmentData[],\n ctx: RenderContext,\n ): string {\n if (!items.length) {\n return '';\n }\n const itemTag = pluralize.singular(key);\n const children = items\n .filter((item) => item != null)\n .map((item) => {\n // Check for ContextFragment first (has name + data properties)\n if (isFragment(item)) {\n return this.renderFragment(item, { ...ctx, depth: ctx.depth + 1 });\n }\n // Then check for plain objects\n if (isFragmentObject(item)) {\n return this.#wrapIndented(\n itemTag,\n this.renderEntries(item, { ...ctx, depth: ctx.depth + 2 }),\n ctx.depth + 1,\n );\n }\n // Primitives\n return this.#leaf(itemTag, String(item), ctx.depth + 1);\n });\n return this.#wrapIndented(key, children, ctx.depth);\n }\n\n protected renderObject(\n key: string,\n obj: FragmentObject,\n ctx: RenderContext,\n ): string {\n const children = this.renderEntries(obj, { ...ctx, depth: ctx.depth + 1 });\n return this.#wrapIndented(key, children, ctx.depth);\n }\n\n #escape(value: string): string {\n if (value == null) {\n return '';\n }\n return value\n .replaceAll(/&/g, '&amp;')\n .replaceAll(/</g, '&lt;')\n .replaceAll(/>/g, '&gt;')\n .replaceAll(/\"/g, '&quot;')\n .replaceAll(/'/g, '&apos;');\n }\n\n #indent(text: string, spaces: number): string {\n if (!text.trim()) {\n return '';\n }\n const padding = ' '.repeat(spaces);\n return text\n .split('\\n')\n .map((line) => (line.length ? padding + line : padding))\n .join('\\n');\n }\n\n #leaf(tag: string, value: string, depth: number): string {\n const safe = this.#escape(value);\n const pad = ' '.repeat(depth);\n if (safe.includes('\\n')) {\n return `${pad}<${tag}>\\n${this.#indent(safe, (depth + 1) * 2)}\\n${pad}</${tag}>`;\n }\n return `${pad}<${tag}>${safe}</${tag}>`;\n }\n\n #wrap(tag: string, children: string[]): string {\n const content = children.filter(Boolean).join('\\n');\n if (!content) {\n return '';\n }\n return `<${tag}>\\n${content}\\n</${tag}>`;\n }\n\n #wrapIndented(tag: string, children: string[], depth: number): string {\n const content = children.filter(Boolean).join('\\n');\n if (!content) {\n return '';\n }\n const pad = ' '.repeat(depth);\n return `${pad}<${tag}>\\n${content}\\n${pad}</${tag}>`;\n }\n}\n\n/**\n * Renders context fragments as Markdown.\n */\nexport class MarkdownRenderer extends ContextRenderer {\n render(fragments: ContextFragment[]): string {\n return this.sanitizeFragments(fragments)\n .map((f) => {\n const title = `## ${titlecase(f.name)}`;\n if (this.isPrimitive(f.data)) {\n return `${title}\\n${String(f.data)}`;\n }\n if (Array.isArray(f.data)) {\n return `${title}\\n${this.#renderArray(f.data, 0)}`;\n }\n if (isFragment(f.data)) {\n return `${title}\\n${this.renderFragment(f.data, { depth: 0, path: [] })}`;\n }\n if (isFragmentObject(f.data)) {\n return `${title}\\n${this.renderEntries(f.data, { depth: 0, path: [] }).join('\\n')}`;\n }\n return `${title}\\n`;\n })\n .join('\\n\\n');\n }\n\n #renderArray(items: FragmentData[], depth: number): string {\n const fragmentItems = items.filter(isFragment);\n const nonFragmentItems = items.filter((item) => !isFragment(item));\n\n const lines: string[] = [];\n\n // Render non-fragment items\n for (const item of nonFragmentItems) {\n if (item != null) {\n lines.push(`${this.#pad(depth)}- ${String(item)}`);\n }\n }\n\n // Render fragment items (possibly grouped)\n if (this.options.groupFragments && fragmentItems.length > 0) {\n const groups = this.groupByName(fragmentItems);\n for (const [groupName, groupFragments] of groups) {\n const pluralName = pluralize.plural(groupName);\n lines.push(`${this.#pad(depth)}- **${titlecase(pluralName)}**:`);\n for (const frag of groupFragments) {\n lines.push(this.renderFragment(frag, { depth: depth + 1, path: [] }));\n }\n }\n } else {\n for (const frag of fragmentItems) {\n lines.push(this.renderFragment(frag, { depth, path: [] }));\n }\n }\n\n return lines.join('\\n');\n }\n\n #pad(depth: number): string {\n return ' '.repeat(depth);\n }\n\n #leaf(key: string, value: string, depth: number): string {\n return `${this.#pad(depth)}- **${key}**: ${value}`;\n }\n\n #arrayItem(item: unknown, depth: number): string {\n if (isFragment(item)) {\n return this.renderFragment(item, { depth, path: [] });\n }\n if (isFragmentObject(item)) {\n return this.renderEntries(item, {\n depth,\n path: [],\n }).join('\\n');\n }\n return `${this.#pad(depth)}- ${String(item)}`;\n }\n\n protected renderFragment(\n fragment: ContextFragment,\n ctx: RenderContext,\n ): string {\n const { name, data } = fragment;\n const header = `${this.#pad(ctx.depth)}- **${name}**:`;\n if (this.isPrimitive(data)) {\n return `${this.#pad(ctx.depth)}- **${name}**: ${String(data)}`;\n }\n if (isFragment(data)) {\n const child = this.renderFragment(data, { ...ctx, depth: ctx.depth + 1 });\n return [header, child].join('\\n');\n }\n if (Array.isArray(data)) {\n const children = data\n .filter((item) => item != null)\n .map((item) => this.#arrayItem(item, ctx.depth + 1));\n return [header, ...children].join('\\n');\n }\n if (isFragmentObject(data)) {\n const children = this.renderEntries(data, {\n ...ctx,\n depth: ctx.depth + 1,\n }).join('\\n');\n return [header, children].join('\\n');\n }\n return header;\n }\n\n protected renderPrimitive(\n key: string,\n value: string,\n ctx: RenderContext,\n ): string {\n return this.#leaf(key, value, ctx.depth);\n }\n\n protected renderArray(\n key: string,\n items: FragmentData[],\n ctx: RenderContext,\n ): string {\n const header = `${this.#pad(ctx.depth)}- **${key}**:`;\n const children = items\n .filter((item) => item != null)\n .map((item) => this.#arrayItem(item, ctx.depth + 1));\n return [header, ...children].join('\\n');\n }\n\n protected renderObject(\n key: string,\n obj: FragmentObject,\n ctx: RenderContext,\n ): string {\n const header = `${this.#pad(ctx.depth)}- **${key}**:`;\n const children = this.renderEntries(obj, {\n ...ctx,\n depth: ctx.depth + 1,\n }).join('\\n');\n return [header, children].join('\\n');\n }\n}\n\n/**\n * Renders context fragments as TOML.\n */\nexport class TomlRenderer extends ContextRenderer {\n render(fragments: ContextFragment[]): string {\n const rendered: string[] = [];\n for (const f of this.sanitizeFragments(fragments)) {\n if (this.isPrimitive(f.data)) {\n rendered.push(`${f.name} = ${this.#formatValue(f.data)}`);\n } else if (Array.isArray(f.data)) {\n rendered.push(this.#renderTopLevelArray(f.name, f.data));\n } else if (isFragment(f.data)) {\n rendered.push(\n [\n `[${f.name}]`,\n this.renderFragment(f.data, { depth: 0, path: [f.name] }),\n ].join('\\n'),\n );\n } else if (isFragmentObject(f.data)) {\n const entries = this.#renderObjectEntries(f.data, [f.name]);\n rendered.push([`[${f.name}]`, ...entries].join('\\n'));\n }\n }\n return rendered.join('\\n\\n');\n }\n\n #renderTopLevelArray(name: string, items: FragmentData[]): string {\n const fragmentItems = items.filter(isFragment);\n const nonFragmentItems = items.filter(\n (item) => !isFragment(item) && item != null,\n );\n\n // If array contains fragments, render as sections\n if (fragmentItems.length > 0) {\n const parts: string[] = [`[${name}]`];\n for (const frag of fragmentItems) {\n parts.push(this.renderFragment(frag, { depth: 0, path: [name] }));\n }\n return parts.join('\\n');\n }\n\n // Otherwise render as inline array\n const values = nonFragmentItems.map((item) => this.#formatValue(item));\n return `${name} = [${values.join(', ')}]`;\n }\n\n /**\n * Override renderValue to preserve type information for TOML formatting.\n */\n protected override renderValue(\n key: string,\n value: unknown,\n ctx: RenderContext,\n ): string {\n if (value == null) {\n return '';\n }\n if (isFragment(value)) {\n return this.renderFragment(value, ctx);\n }\n if (Array.isArray(value)) {\n return this.renderArray(key, value, ctx);\n }\n if (isFragmentObject(value)) {\n return this.renderObject(key, value, ctx);\n }\n // Preserve original type for TOML formatting\n return `${key} = ${this.#formatValue(value)}`;\n }\n\n protected renderPrimitive(\n key: string,\n value: string,\n ctx: RenderContext,\n ): string {\n void ctx;\n return `${key} = ${this.#formatValue(value)}`;\n }\n\n protected renderArray(\n key: string,\n items: FragmentData[],\n ctx: RenderContext,\n ): string {\n void ctx;\n const values = items\n .filter((item) => item != null)\n .map((item) => this.#formatValue(item));\n return `${key} = [${values.join(', ')}]`;\n }\n\n protected renderObject(\n key: string,\n obj: FragmentObject,\n ctx: RenderContext,\n ): string {\n const newPath = [...ctx.path, key];\n const entries = this.#renderObjectEntries(obj, newPath);\n return ['', `[${newPath.join('.')}]`, ...entries].join('\\n');\n }\n\n #renderObjectEntries(obj: FragmentObject, path: string[]): string[] {\n return Object.entries(obj)\n .map(([key, value]) => {\n if (value == null) {\n return '';\n }\n if (isFragmentObject(value)) {\n const newPath = [...path, key];\n const entries = this.#renderObjectEntries(value, newPath);\n return ['', `[${newPath.join('.')}]`, ...entries].join('\\n');\n }\n if (Array.isArray(value)) {\n const values = value\n .filter((item) => item != null)\n .map((item) => this.#formatValue(item));\n return `${key} = [${values.join(', ')}]`;\n }\n return `${key} = ${this.#formatValue(value)}`;\n })\n .filter(Boolean);\n }\n\n protected renderFragment(\n fragment: ContextFragment,\n ctx: RenderContext,\n ): string {\n const { name, data } = fragment;\n const newPath = [...ctx.path, name];\n if (this.isPrimitive(data)) {\n return `${name} = ${this.#formatValue(data)}`;\n }\n if (isFragment(data)) {\n return [\n '',\n `[${newPath.join('.')}]`,\n this.renderFragment(data, { ...ctx, path: newPath }),\n ].join('\\n');\n }\n if (Array.isArray(data)) {\n const fragmentItems = data.filter(isFragment);\n const nonFragmentItems = data.filter(\n (item) => !isFragment(item) && item != null,\n );\n\n if (fragmentItems.length > 0) {\n const parts: string[] = ['', `[${newPath.join('.')}]`];\n for (const frag of fragmentItems) {\n parts.push(this.renderFragment(frag, { ...ctx, path: newPath }));\n }\n return parts.join('\\n');\n }\n\n const values = nonFragmentItems.map((item) => this.#formatValue(item));\n return `${name} = [${values.join(', ')}]`;\n }\n if (isFragmentObject(data)) {\n const entries = this.#renderObjectEntries(data, newPath);\n return ['', `[${newPath.join('.')}]`, ...entries].join('\\n');\n }\n return '';\n }\n\n #escape(value: string): string {\n return value.replace(/\\\\/g, '\\\\\\\\').replace(/\"/g, '\\\\\"');\n }\n\n #formatValue(value: unknown): string {\n if (typeof value === 'string') {\n return `\"${this.#escape(value)}\"`;\n }\n if (typeof value === 'boolean' || typeof value === 'number') {\n return String(value);\n }\n if (typeof value === 'object' && value !== null) {\n return JSON.stringify(value);\n }\n return `\"${String(value)}\"`;\n }\n}\n\n/**\n * Renders context fragments as TOON (Token-Oriented Object Notation).\n * TOON is a compact, token-efficient format for LLM prompts that combines\n * YAML-like indentation with CSV-like tabular arrays.\n */\nexport class ToonRenderer extends ContextRenderer {\n render(fragments: ContextFragment[]): string {\n const sanitized = this.sanitizeFragments(fragments);\n return sanitized\n .map((f) => this.#renderTopLevel(f))\n .filter(Boolean)\n .join('\\n');\n }\n\n #renderTopLevel(fragment: ContextFragment): string {\n const { name, data } = fragment;\n if (this.isPrimitive(data)) {\n return `${name}: ${this.#formatValue(data)}`;\n }\n if (Array.isArray(data)) {\n return this.#renderArrayField(name, data, 0);\n }\n if (isFragment(data)) {\n const child = this.renderFragment(data, { depth: 1, path: [] });\n return `${name}:\\n${child}`;\n }\n if (isFragmentObject(data)) {\n const entries = this.#renderObjectEntries(data, 1);\n if (!entries) {\n return `${name}:`;\n }\n return `${name}:\\n${entries}`;\n }\n return `${name}:`;\n }\n\n #renderArrayField(key: string, items: FragmentData[], depth: number): string {\n const filtered = items.filter((item) => item != null);\n if (filtered.length === 0) {\n return `${this.#pad(depth)}${key}[0]:`;\n }\n\n // Check for ContextFragment items\n const fragmentItems = filtered.filter(isFragment);\n if (fragmentItems.length > 0) {\n return this.#renderMixedArray(key, filtered, depth);\n }\n\n // Check if all items are primitives\n if (filtered.every((item) => this.#isPrimitiveValue(item))) {\n return this.#renderPrimitiveArray(key, filtered, depth);\n }\n\n // Check if tabular (uniform objects with primitive values)\n if (this.#isTabularArray(filtered)) {\n return this.#renderTabularArray(key, filtered, depth);\n }\n\n // Mixed array\n return this.#renderMixedArray(key, filtered, depth);\n }\n\n #isPrimitiveValue(value: unknown): boolean {\n return (\n typeof value === 'string' ||\n typeof value === 'number' ||\n typeof value === 'boolean'\n );\n }\n\n #isTabularArray(items: FragmentData[]): items is FragmentObject[] {\n if (items.length === 0) return false;\n\n // All items must be objects (not arrays, not primitives, not fragments)\n const objects = items.filter(isFragmentObject);\n if (objects.length !== items.length) return false;\n\n // Determine if there is at least one shared field across all rows.\n // We treat null/undefined/missing as \"empty\" cells, but we still require\n // a non-empty key intersection so non-uniform objects are not\n // forced into a tabular shape.\n let intersection = new Set<string>(Object.keys(objects[0]));\n for (const obj of objects) {\n const keys = new Set(Object.keys(obj));\n intersection = new Set([...intersection].filter((k) => keys.has(k)));\n\n for (const value of Object.values(obj)) {\n if (value == null) continue;\n if (!this.#isPrimitiveValue(value)) {\n return false;\n }\n }\n }\n\n return intersection.size > 0;\n }\n\n #renderPrimitiveArray(\n key: string,\n items: FragmentData[],\n depth: number,\n ): string {\n const values = items.map((item) => this.#formatValue(item)).join(',');\n return `${this.#pad(depth)}${key}[${items.length}]: ${values}`;\n }\n\n #renderTabularArray(\n key: string,\n items: FragmentObject[],\n depth: number,\n ): string {\n if (items.length === 0) {\n return `${this.#pad(depth)}${key}[0]:`;\n }\n\n const fields = Array.from(\n new Set(items.flatMap((obj) => Object.keys(obj))),\n );\n const header = `${this.#pad(depth)}${key}[${items.length}]{${fields.join(',')}}:`;\n\n const rows = items.map((obj) => {\n const values = fields.map((f) => {\n const value = obj[f];\n if (value == null) return '';\n return this.#formatValue(value);\n });\n return `${this.#pad(depth + 1)}${values.join(',')}`;\n });\n\n return [header, ...rows].join('\\n');\n }\n\n #renderMixedArray(key: string, items: FragmentData[], depth: number): string {\n const header = `${this.#pad(depth)}${key}[${items.length}]:`;\n const lines = items.map((item) => this.#renderListItem(item, depth + 1));\n return [header, ...lines].join('\\n');\n }\n\n #renderListItem(item: FragmentData, depth: number): string {\n if (this.#isPrimitiveValue(item)) {\n return `${this.#pad(depth)}- ${this.#formatValue(item)}`;\n }\n if (isFragment(item)) {\n const rendered = this.renderFragment(item, {\n depth: depth + 1,\n path: [],\n });\n // For fragments, render key: value on same line as hyphen if primitive\n if (this.isPrimitive(item.data)) {\n return `${this.#pad(depth)}- ${item.name}: ${this.#formatValue(item.data)}`;\n }\n return `${this.#pad(depth)}- ${item.name}:\\n${rendered.split('\\n').slice(1).join('\\n')}`;\n }\n if (Array.isArray(item)) {\n // Nested array\n const content = this.#renderArrayField('', item, depth + 1);\n return `${this.#pad(depth)}-${content.trimStart()}`;\n }\n if (isFragmentObject(item)) {\n // Object in list\n const entries = this.#renderObjectEntries(item, depth + 1);\n if (!entries) {\n return `${this.#pad(depth)}-`;\n }\n // First line on same line as hyphen\n const lines = entries.split('\\n');\n const first = lines[0].trimStart();\n const rest = lines.slice(1).join('\\n');\n return rest\n ? `${this.#pad(depth)}- ${first}\\n${rest}`\n : `${this.#pad(depth)}- ${first}`;\n }\n return `${this.#pad(depth)}- ${this.#formatValue(item)}`;\n }\n\n #renderObjectEntries(obj: FragmentObject, depth: number): string {\n const lines: string[] = [];\n for (const [key, value] of Object.entries(obj)) {\n if (value == null) continue;\n\n if (this.#isPrimitiveValue(value)) {\n lines.push(`${this.#pad(depth)}${key}: ${this.#formatValue(value)}`);\n } else if (Array.isArray(value)) {\n lines.push(this.#renderArrayField(key, value, depth));\n } else if (isFragmentObject(value)) {\n const nested = this.#renderObjectEntries(value, depth + 1);\n if (nested) {\n lines.push(`${this.#pad(depth)}${key}:\\n${nested}`);\n } else {\n lines.push(`${this.#pad(depth)}${key}:`);\n }\n }\n }\n return lines.join('\\n');\n }\n\n protected renderFragment(\n fragment: ContextFragment,\n ctx: RenderContext,\n ): string {\n const { name, data } = fragment;\n if (this.isPrimitive(data)) {\n return `${this.#pad(ctx.depth)}${name}: ${this.#formatValue(data)}`;\n }\n if (isFragment(data)) {\n const child = this.renderFragment(data, {\n ...ctx,\n depth: ctx.depth + 1,\n });\n return `${this.#pad(ctx.depth)}${name}:\\n${child}`;\n }\n if (Array.isArray(data)) {\n return this.#renderArrayField(name, data, ctx.depth);\n }\n if (isFragmentObject(data)) {\n const entries = this.#renderObjectEntries(data, ctx.depth + 1);\n if (!entries) {\n return `${this.#pad(ctx.depth)}${name}:`;\n }\n return `${this.#pad(ctx.depth)}${name}:\\n${entries}`;\n }\n return `${this.#pad(ctx.depth)}${name}:`;\n }\n\n protected renderPrimitive(\n key: string,\n value: string,\n ctx: RenderContext,\n ): string {\n return `${this.#pad(ctx.depth)}${key}: ${this.#formatValue(value)}`;\n }\n\n protected renderArray(\n key: string,\n items: FragmentData[],\n ctx: RenderContext,\n ): string {\n return this.#renderArrayField(key, items, ctx.depth);\n }\n\n protected renderObject(\n key: string,\n obj: FragmentObject,\n ctx: RenderContext,\n ): string {\n const entries = this.#renderObjectEntries(obj, ctx.depth + 1);\n if (!entries) {\n return `${this.#pad(ctx.depth)}${key}:`;\n }\n return `${this.#pad(ctx.depth)}${key}:\\n${entries}`;\n }\n\n #pad(depth: number): string {\n return ' '.repeat(depth);\n }\n\n #needsQuoting(value: string): boolean {\n if (value === '') return true;\n if (value !== value.trim()) return true;\n if (['true', 'false', 'null'].includes(value.toLowerCase())) return true;\n if (/^-?\\d+(?:\\.\\d+)?(?:e[+-]?\\d+)?$/i.test(value)) return true;\n if (/[:\\\\\"'[\\]{}|,\\t\\n\\r]/.test(value)) return true;\n if (value.startsWith('-')) return true;\n return false;\n }\n\n #escape(value: string): string {\n return value\n .replace(/\\\\/g, '\\\\\\\\')\n .replace(/\"/g, '\\\\\"')\n .replace(/\\n/g, '\\\\n')\n .replace(/\\r/g, '\\\\r')\n .replace(/\\t/g, '\\\\t');\n }\n\n #canonicalizeNumber(n: number): string {\n if (!Number.isFinite(n)) return 'null';\n if (Object.is(n, -0)) return '0';\n return String(n);\n }\n\n #formatValue(value: unknown): string {\n if (value === null) return 'null';\n if (typeof value === 'boolean') return String(value);\n if (typeof value === 'number') return this.#canonicalizeNumber(value);\n if (typeof value === 'string') {\n if (this.#needsQuoting(value)) {\n return `\"${this.#escape(value)}\"`;\n }\n return value;\n }\n // Fallback for objects/arrays in primitive context\n return `\"${this.#escape(JSON.stringify(value))}\"`;\n }\n}\n", "/**\n * Graph-based context store types and abstract interface.\n *\n * The storage model uses a DAG (Directed Acyclic Graph) for messages:\n * - Messages are immutable nodes with parentId forming the graph\n * - Branches are pointers to head (tip) messages\n * - Checkpoints are pointers to specific messages\n * - History is preserved through branching (rewind creates new branch)\n */\n\n// ============================================================================\n// Chat Types\n// ============================================================================\n\n/**\n * Data for creating/storing a chat.\n */\nexport interface ChatData {\n id: string;\n userId: string;\n title?: string;\n metadata?: Record<string, unknown>;\n}\n\n/**\n * Stored chat data returned from database (includes timestamps).\n */\nexport interface StoredChatData extends ChatData {\n createdAt: number;\n updatedAt: number;\n}\n\n/**\n * Information about a chat for listing.\n */\nexport interface ChatInfo {\n id: string;\n userId: string;\n title?: string;\n messageCount: number;\n branchCount: number;\n createdAt: number;\n updatedAt: number;\n}\n\n/**\n * Options for listing chats.\n */\nexport interface ListChatsOptions {\n /** Filter by user ID */\n userId?: string;\n /** Maximum number of results to return */\n limit?: number;\n /** Number of results to skip (for pagination) */\n offset?: number;\n}\n\n/**\n * Options for deleting a chat.\n */\nexport interface DeleteChatOptions {\n /** If provided, only delete if chat belongs to this user */\n userId?: string;\n}\n\n// ============================================================================\n// Message Types (Graph Nodes)\n// ============================================================================\n\n/**\n * Data for creating/storing a message (graph node).\n */\nexport interface MessageData {\n id: string;\n chatId: string;\n parentId: string | null; // null for root messages\n name: string; // 'user', 'assistant', 'role', 'hint', etc.\n type?: string; // 'message', 'fragment'\n data: unknown; // JSON-serializable content\n createdAt: number;\n}\n\n/**\n * Message with computed properties for listing.\n */\nexport interface MessageInfo extends MessageData {\n hasChildren: boolean;\n}\n\n// ============================================================================\n// Branch Types\n// ============================================================================\n\n/**\n * Data for creating/storing a branch.\n * A branch is a pointer to a head message in the graph.\n */\nexport interface BranchData {\n id: string;\n chatId: string;\n name: string; // 'main', 'alt-1', etc.\n headMessageId: string | null; // null if branch is empty\n isActive: boolean;\n createdAt: number;\n}\n\n/**\n * Information about a branch for listing.\n */\nexport interface BranchInfo {\n id: string;\n name: string;\n headMessageId: string | null;\n isActive: boolean;\n messageCount: number; // count of messages in this branch's chain\n createdAt: number;\n}\n\n// ============================================================================\n// Checkpoint Types\n// ============================================================================\n\n/**\n * Data for creating/storing a checkpoint.\n * A checkpoint is a pointer to a specific message in the graph.\n */\nexport interface CheckpointData {\n id: string;\n chatId: string;\n name: string;\n messageId: string;\n createdAt: number;\n}\n\n/**\n * Information about a checkpoint for listing.\n */\nexport interface CheckpointInfo {\n id: string;\n name: string;\n messageId: string;\n createdAt: number;\n}\n\n// ============================================================================\n// Search Types\n// ============================================================================\n\n/**\n * Options for searching messages.\n */\nexport interface SearchOptions {\n /** Only search in specific roles (e.g., ['user', 'assistant']) */\n roles?: string[];\n /** Maximum results to return (default: 20) */\n limit?: number;\n}\n\n/**\n * Search result with relevance ranking.\n */\nexport interface SearchResult {\n /** The matched message */\n message: MessageData;\n /** BM25 relevance score (lower = more relevant) */\n rank: number;\n /** Highlighted snippet with matched terms */\n snippet?: string;\n}\n\n// ============================================================================\n// Graph Visualization Types\n// ============================================================================\n\n/**\n * A node in the visualization graph.\n */\nexport interface GraphNode {\n id: string;\n parentId: string | null;\n role: string; // 'user', 'assistant', etc.\n content: string; // Truncated preview of message content\n createdAt: number;\n}\n\n/**\n * A branch pointer for visualization.\n */\nexport interface GraphBranch {\n name: string;\n headMessageId: string | null;\n isActive: boolean;\n}\n\n/**\n * A checkpoint pointer for visualization.\n */\nexport interface GraphCheckpoint {\n name: string;\n messageId: string;\n}\n\n/**\n * Complete graph data for visualization.\n */\nexport interface GraphData {\n chatId: string;\n nodes: GraphNode[];\n branches: GraphBranch[];\n checkpoints: GraphCheckpoint[];\n}\n\n// ============================================================================\n// Abstract Store Interface\n// ============================================================================\n\n/**\n * Abstract base class for graph-based context storage.\n *\n * Implementations provide persistence for the message graph, branches,\n * and checkpoints. The graph model enables:\n * - Branching: rewind creates a new branch, original stays intact\n * - Checkpoints: pointers to specific messages for easy restore\n * - No data loss: soft delete only, all history preserved\n */\nexport abstract class ContextStore {\n // ==========================================================================\n // Chat Operations\n // ==========================================================================\n\n /**\n * Create a new chat.\n */\n abstract createChat(chat: ChatData): Promise<void>;\n\n /**\n * Create a chat if it doesn't exist, or return existing one.\n * Returns the stored chat data with timestamps.\n */\n abstract upsertChat(chat: ChatData): Promise<StoredChatData>;\n\n /**\n * Get a chat by ID.\n */\n abstract getChat(chatId: string): Promise<StoredChatData | undefined>;\n\n /**\n * Update chat metadata.\n * Note: updatedAt is automatically managed by database triggers.\n * Returns the updated chat data.\n */\n abstract updateChat(\n chatId: string,\n updates: Partial<Pick<ChatData, 'title' | 'metadata'>>,\n ): Promise<StoredChatData>;\n\n /**\n * List chats, sorted by updatedAt descending.\n * @param options - Optional filters for userId, limit, offset\n */\n abstract listChats(options?: ListChatsOptions): Promise<ChatInfo[]>;\n\n /**\n * Delete a chat and all associated data (messages, branches, checkpoints).\n * Returns true if deleted, false if not found or userId mismatch.\n */\n abstract deleteChat(\n chatId: string,\n options?: DeleteChatOptions,\n ): Promise<boolean>;\n\n // ==========================================================================\n // Message Operations (Graph Nodes)\n // ==========================================================================\n\n /**\n * Add a message to the graph.\n */\n abstract addMessage(message: MessageData): Promise<void>;\n\n /**\n * Get a message by ID.\n */\n abstract getMessage(messageId: string): Promise<MessageData | undefined>;\n\n /**\n * Walk up the parent chain from a head message, returning messages in\n * chronological order (root first).\n */\n abstract getMessageChain(headId: string): Promise<MessageData[]>;\n\n /**\n * Get all messages for a chat from the active branch.\n * Returns messages in chronological order (oldest first).\n *\n * @throws Error if chat doesn't exist\n * @returns Empty array if chat has no active branch or branch has no messages\n */\n abstract getMessages(chatId: string): Promise<MessageData[]>;\n\n /**\n * Check if a message has children (is a fork point).\n */\n abstract hasChildren(messageId: string): Promise<boolean>;\n\n // ==========================================================================\n // Branch Operations\n // ==========================================================================\n\n /**\n * Create a new branch.\n */\n abstract createBranch(branch: BranchData): Promise<void>;\n\n /**\n * Get a branch by chat ID and name.\n */\n abstract getBranch(\n chatId: string,\n name: string,\n ): Promise<BranchData | undefined>;\n\n /**\n * Get the active branch for a chat.\n */\n abstract getActiveBranch(chatId: string): Promise<BranchData | undefined>;\n\n /**\n * Set a branch as active (and deactivate others).\n */\n abstract setActiveBranch(chatId: string, branchId: string): Promise<void>;\n\n /**\n * Update a branch's head message.\n */\n abstract updateBranchHead(\n branchId: string,\n messageId: string | null,\n ): Promise<void>;\n\n /**\n * List all branches for a chat.\n */\n abstract listBranches(chatId: string): Promise<BranchInfo[]>;\n\n // ==========================================================================\n // Checkpoint Operations\n // ==========================================================================\n\n /**\n * Create a checkpoint.\n */\n abstract createCheckpoint(checkpoint: CheckpointData): Promise<void>;\n\n /**\n * Get a checkpoint by chat ID and name.\n */\n abstract getCheckpoint(\n chatId: string,\n name: string,\n ): Promise<CheckpointData | undefined>;\n\n /**\n * List all checkpoints for a chat.\n */\n abstract listCheckpoints(chatId: string): Promise<CheckpointInfo[]>;\n\n /**\n * Delete a checkpoint.\n */\n abstract deleteCheckpoint(chatId: string, name: string): Promise<void>;\n\n // ==========================================================================\n // Search Operations\n // ==========================================================================\n\n /**\n * Search messages using full-text search.\n *\n * @param chatId - The chat to search in\n * @param query - FTS5 query string (supports AND, OR, NOT, phrases, prefix*)\n * @param options - Search options\n * @returns Search results ordered by relevance (lower rank = more relevant)\n */\n abstract searchMessages(\n chatId: string,\n query: string,\n options?: SearchOptions,\n ): Promise<SearchResult[]>;\n\n // ==========================================================================\n // Visualization Operations\n // ==========================================================================\n\n /**\n * Get the complete graph data for a chat.\n * Returns all messages, branches, and checkpoints.\n */\n abstract getGraph(chatId: string): Promise<GraphData>;\n}\n", "import {\n type EstimateResult,\n type FragmentEstimate,\n getModelsRegistry,\n} from './estimate.ts';\nimport type { ContextFragment } from './fragments.ts';\nimport { isMessageFragment, message } from './fragments.ts';\nimport type { Models } from './models.generated.ts';\nimport {\n type ContextRenderer,\n XmlRenderer,\n} from './renderers/abstract.renderer.ts';\nimport {\n type BranchData,\n type BranchInfo,\n type ChatData,\n type CheckpointData,\n type CheckpointInfo,\n ContextStore,\n type GraphData,\n type MessageData,\n type StoredChatData,\n} from './store/store.ts';\n\n/**\n * Result of resolving context - ready for AI SDK consumption.\n */\nexport interface ResolveResult {\n /** Rendered non-message fragments for system prompt */\n systemPrompt: string;\n /** Message fragments decoded to AI SDK format */\n messages: unknown[];\n}\n\n/**\n * Options for resolve().\n */\nexport interface ResolveOptions {\n /** Renderer to use for system prompt (defaults to XmlRenderer) */\n renderer: ContextRenderer;\n}\n\n/**\n * Options for creating a ContextEngine.\n */\nexport interface ContextEngineOptions {\n /** Store for persisting fragments (required) */\n store: ContextStore;\n /** Unique identifier for this chat (required) */\n chatId: string;\n /** User who owns this chat (required) */\n userId: string;\n}\n\n/**\n * Metadata about a chat.\n */\nexport interface ChatMeta {\n /** Unique chat identifier */\n id: string;\n /** User who owns this chat */\n userId: string;\n /** When the chat was created */\n createdAt: number;\n /** When the chat was last updated */\n updatedAt: number;\n /** Optional user-provided title */\n title?: string;\n /** Optional custom metadata */\n metadata?: Record<string, unknown>;\n}\n\n/**\n * Options for context inspection.\n */\nexport interface InspectOptions {\n /** Model ID for cost estimation (required) */\n modelId: Models;\n /** Renderer for estimation (required) */\n renderer: ContextRenderer;\n}\n\n/**\n * Result of inspecting context state.\n * JSON-serializable snapshot for debugging.\n */\nexport interface InspectResult {\n /** Token usage and cost estimation */\n estimate: EstimateResult;\n /** Rendered output using the provided renderer */\n rendered: string;\n /** Fragment structure breakdown */\n fragments: {\n /** Non-message fragments (role, hints, etc.) */\n context: ContextFragment[];\n /** Pending messages not yet saved to store */\n pending: ContextFragment[];\n /** Persisted messages from the store */\n persisted: MessageData[];\n };\n /** Conversation graph with branches and checkpoints */\n graph: GraphData;\n /** Inspection metadata */\n meta: {\n chatId: string;\n branch: string;\n timestamp: number;\n };\n}\n\n/**\n * Context engine for managing AI conversation context with graph-based storage.\n *\n * The engine uses a DAG (Directed Acyclic Graph) model for messages:\n * - Messages are immutable nodes with parentId forming the graph\n * - Branches are pointers to head (tip) messages\n * - Checkpoints are pointers to specific messages\n * - History is preserved through branching (rewind creates new branch)\n */\nexport class ContextEngine {\n /** Non-message fragments (role, hints, etc.) - not persisted in graph */\n #fragments: ContextFragment[] = [];\n /** Pending message fragments to be added to graph */\n #pendingMessages: ContextFragment[] = [];\n #store: ContextStore;\n #chatId: string;\n #userId: string;\n #branchName: string;\n #branch: BranchData | null = null;\n #chatData: StoredChatData | null = null;\n #initialized = false;\n\n constructor(options: ContextEngineOptions) {\n if (!options.chatId) {\n throw new Error('chatId is required');\n }\n if (!options.userId) {\n throw new Error('userId is required');\n }\n this.#store = options.store;\n this.#chatId = options.chatId;\n this.#userId = options.userId;\n this.#branchName = 'main';\n }\n\n /**\n * Initialize the chat and branch if they don't exist.\n */\n async #ensureInitialized(): Promise<void> {\n if (this.#initialized) {\n return;\n }\n\n this.#chatData = await this.#store.upsertChat({\n id: this.#chatId,\n userId: this.#userId,\n });\n\n // \"main\" branch is guaranteed to exist after upsertChat\n this.#branch = (await this.#store.getActiveBranch(this.#chatId))!;\n\n this.#initialized = true;\n }\n\n /**\n * Create a new branch from a specific message.\n * Shared logic between rewind() and btw().\n */\n async #createBranchFrom(\n messageId: string,\n switchTo: boolean,\n ): Promise<BranchInfo> {\n // Generate branch name based on same-prefix count (e.g., main-v2, main-v3)\n const branches = await this.#store.listBranches(this.#chatId);\n const samePrefix = branches.filter(\n (b) =>\n b.name === this.#branchName ||\n b.name.startsWith(`${this.#branchName}-v`),\n );\n const newBranchName = `${this.#branchName}-v${samePrefix.length + 1}`;\n\n // Create new branch pointing to the target message\n const newBranch: BranchData = {\n id: crypto.randomUUID(),\n chatId: this.#chatId,\n name: newBranchName,\n headMessageId: messageId,\n isActive: false,\n createdAt: Date.now(),\n };\n await this.#store.createBranch(newBranch);\n\n if (switchTo) {\n // Switch to the new branch\n await this.#store.setActiveBranch(this.#chatId, newBranch.id);\n this.#branch = { ...newBranch, isActive: true };\n this.#branchName = newBranchName;\n // Clear pending messages (they were for the old branch)\n this.#pendingMessages = [];\n }\n\n // Get message count for branch info\n const chain = await this.#store.getMessageChain(messageId);\n\n return {\n id: newBranch.id,\n name: newBranch.name,\n headMessageId: newBranch.headMessageId,\n isActive: switchTo,\n messageCount: chain.length,\n createdAt: newBranch.createdAt,\n };\n }\n\n /**\n * Get the current chat ID.\n */\n public get chatId(): string {\n return this.#chatId;\n }\n\n /**\n * Get the current branch name.\n */\n public get branch(): string {\n return this.#branchName;\n }\n\n /**\n * Get metadata for the current chat.\n * Returns null if the chat hasn't been initialized yet.\n */\n public get chat(): ChatMeta | null {\n if (!this.#chatData) {\n return null;\n }\n return {\n id: this.#chatData.id,\n userId: this.#chatData.userId,\n createdAt: this.#chatData.createdAt,\n updatedAt: this.#chatData.updatedAt,\n title: this.#chatData.title,\n metadata: this.#chatData.metadata,\n };\n }\n\n /**\n * Add fragments to the context.\n *\n * - Message fragments (user/assistant) are queued for persistence\n * - Non-message fragments (role/hint) are kept in memory for system prompt\n */\n public set(...fragments: ContextFragment[]) {\n for (const fragment of fragments) {\n if (isMessageFragment(fragment)) {\n this.#pendingMessages.push(fragment);\n } else {\n this.#fragments.push(fragment);\n }\n }\n return this;\n }\n\n // Unset a fragment by ID (not implemented yet)\n public unset(fragmentId: string) {\n //\n }\n\n /**\n * Render all fragments using the provided renderer.\n * @internal Use resolve() instead for public API.\n */\n public render(renderer: ContextRenderer) {\n return renderer.render(this.#fragments);\n }\n\n /**\n * Resolve context into AI SDK-ready format.\n *\n * - Initializes chat and branch if needed\n * - Loads message history from the graph (walking parent chain)\n * - Separates context fragments for system prompt\n * - Combines with pending messages\n *\n * @example\n * ```ts\n * const context = new ContextEngine({ store, chatId: 'chat-1', userId: 'user-1' })\n * .set(role('You are helpful'), user('Hello'));\n *\n * const { systemPrompt, messages } = await context.resolve();\n * await generateText({ system: systemPrompt, messages });\n * ```\n */\n public async resolve(options: ResolveOptions): Promise<ResolveResult> {\n await this.#ensureInitialized();\n\n const systemPrompt = options.renderer.render(this.#fragments);\n\n // Get persisted messages from graph\n const messages: unknown[] = [];\n if (this.#branch?.headMessageId) {\n const chain = await this.#store.getMessageChain(\n this.#branch.headMessageId,\n );\n\n for (const msg of chain) {\n messages.push(message(msg.data as never).codec?.decode());\n }\n }\n\n // Add pending messages (not yet saved)\n for (const fragment of this.#pendingMessages) {\n const decoded = fragment.codec!.decode();\n messages.push(decoded);\n }\n\n return { systemPrompt, messages };\n }\n\n /**\n * Save pending messages to the graph.\n *\n * Each message is added as a node with parentId pointing to the previous message.\n * The branch head is updated to point to the last message.\n *\n * @example\n * ```ts\n * context.set(user('Hello'));\n * // AI responds...\n * context.set(assistant('Hi there!'));\n * await context.save(); // Persist to graph\n * ```\n */\n public async save(): Promise<void> {\n await this.#ensureInitialized();\n\n if (this.#pendingMessages.length === 0) {\n return;\n }\n\n let parentId = this.#branch!.headMessageId;\n const now = Date.now();\n\n // Add each pending message to the graph\n for (const fragment of this.#pendingMessages) {\n const messageData: MessageData = {\n id: fragment.id ?? crypto.randomUUID(),\n chatId: this.#chatId,\n parentId,\n name: fragment.name,\n type: fragment.type,\n data: fragment.codec!.encode(),\n createdAt: now,\n };\n\n await this.#store.addMessage(messageData);\n parentId = messageData.id;\n }\n\n // Update branch head to last message\n await this.#store.updateBranchHead(this.#branch!.id, parentId);\n this.#branch!.headMessageId = parentId;\n\n // Clear pending messages\n this.#pendingMessages = [];\n }\n\n /**\n * Estimate token count and cost for the full context.\n *\n * Includes:\n * - System prompt fragments (role, hints, etc.)\n * - Persisted chat messages (from store)\n * - Pending messages (not yet saved)\n *\n * @param modelId - Model ID (e.g., \"openai:gpt-4o\", \"anthropic:claude-3-5-sonnet\")\n * @param options - Optional settings\n * @returns Estimate result with token counts, costs, and per-fragment breakdown\n */\n public async estimate(\n modelId: Models,\n options: {\n renderer?: ContextRenderer;\n } = {},\n ): Promise<EstimateResult> {\n await this.#ensureInitialized();\n\n const renderer = options.renderer ?? new XmlRenderer();\n const registry = getModelsRegistry();\n await registry.load();\n\n const model = registry.get(modelId);\n if (!model) {\n throw new Error(\n `Model \"${modelId}\" not found. Call load() first or check model ID.`,\n );\n }\n\n const tokenizer = registry.getTokenizer(modelId);\n const fragmentEstimates: FragmentEstimate[] = [];\n\n // 1. Estimate context fragments (system prompt)\n for (const fragment of this.#fragments) {\n const rendered = renderer.render([fragment]);\n const tokens = tokenizer.count(rendered);\n const cost = (tokens / 1_000_000) * model.cost.input;\n fragmentEstimates.push({\n id: fragment.id,\n name: fragment.name,\n tokens,\n cost,\n });\n }\n\n // 2. Estimate persisted messages from store\n if (this.#branch?.headMessageId) {\n const chain = await this.#store.getMessageChain(\n this.#branch.headMessageId,\n );\n for (const msg of chain) {\n const content = String(msg.data);\n const tokens = tokenizer.count(content);\n const cost = (tokens / 1_000_000) * model.cost.input;\n fragmentEstimates.push({\n name: msg.name,\n id: msg.id,\n tokens,\n cost,\n });\n }\n }\n\n // 3. Estimate pending messages (not yet saved)\n for (const fragment of this.#pendingMessages) {\n const content = String(fragment.data);\n const tokens = tokenizer.count(content);\n const cost = (tokens / 1_000_000) * model.cost.input;\n fragmentEstimates.push({\n name: fragment.name,\n id: fragment.id,\n tokens,\n cost,\n });\n }\n\n // Calculate totals\n const totalTokens = fragmentEstimates.reduce((sum, f) => sum + f.tokens, 0);\n const totalCost = fragmentEstimates.reduce((sum, f) => sum + f.cost, 0);\n\n return {\n model: model.id,\n provider: model.provider,\n tokens: totalTokens,\n cost: totalCost,\n limits: {\n context: model.limit.context,\n output: model.limit.output,\n exceedsContext: totalTokens > model.limit.context,\n },\n fragments: fragmentEstimates,\n };\n }\n\n /**\n * Rewind to a specific message by ID.\n *\n * Creates a new branch from that message, preserving the original branch.\n * The new branch becomes active.\n *\n * @param messageId - The message ID to rewind to\n * @returns The new branch info\n *\n * @example\n * ```ts\n * context.set(user('What is 2 + 2?', { id: 'q1' }));\n * context.set(assistant('The answer is 5.', { id: 'wrong' })); // Oops!\n * await context.save();\n *\n * // Rewind to the question, creates new branch\n * const newBranch = await context.rewind('q1');\n *\n * // Now add correct answer on new branch\n * context.set(assistant('The answer is 4.'));\n * await context.save();\n * ```\n */\n public async rewind(messageId: string): Promise<BranchInfo> {\n await this.#ensureInitialized();\n\n // Verify the message exists\n const message = await this.#store.getMessage(messageId);\n if (!message) {\n throw new Error(`Message \"${messageId}\" not found`);\n }\n if (message.chatId !== this.#chatId) {\n throw new Error(`Message \"${messageId}\" belongs to a different chat`);\n }\n\n return this.#createBranchFrom(messageId, true);\n }\n\n /**\n * Create a checkpoint at the current position.\n *\n * A checkpoint is a named pointer to the current branch head.\n * Use restore() to return to this point later.\n *\n * @param name - Name for the checkpoint\n * @returns The checkpoint info\n *\n * @example\n * ```ts\n * context.set(user('I want to learn a new skill.'));\n * context.set(assistant('Would you like coding or cooking?'));\n * await context.save();\n *\n * // Save checkpoint before user's choice\n * const cp = await context.checkpoint('before-choice');\n * ```\n */\n public async checkpoint(name: string): Promise<CheckpointInfo> {\n await this.#ensureInitialized();\n\n if (!this.#branch?.headMessageId) {\n throw new Error('Cannot create checkpoint: no messages in conversation');\n }\n\n const checkpoint: CheckpointData = {\n id: crypto.randomUUID(),\n chatId: this.#chatId,\n name,\n messageId: this.#branch.headMessageId,\n createdAt: Date.now(),\n };\n\n await this.#store.createCheckpoint(checkpoint);\n\n return {\n id: checkpoint.id,\n name: checkpoint.name,\n messageId: checkpoint.messageId,\n createdAt: checkpoint.createdAt,\n };\n }\n\n /**\n * Restore to a checkpoint by creating a new branch from that point.\n *\n * @param name - Name of the checkpoint to restore\n * @returns The new branch info\n *\n * @example\n * ```ts\n * // User chose cooking, but wants to try coding path\n * await context.restore('before-choice');\n *\n * context.set(user('I want to learn coding.'));\n * context.set(assistant('Python is a great starting language!'));\n * await context.save();\n * ```\n */\n public async restore(name: string): Promise<BranchInfo> {\n await this.#ensureInitialized();\n\n const checkpoint = await this.#store.getCheckpoint(this.#chatId, name);\n if (!checkpoint) {\n throw new Error(\n `Checkpoint \"${name}\" not found in chat \"${this.#chatId}\"`,\n );\n }\n\n // Rewind to the checkpoint's message\n return this.rewind(checkpoint.messageId);\n }\n\n /**\n * Switch to a different branch by name.\n *\n * @param name - Branch name to switch to\n *\n * @example\n * ```ts\n * // List branches (via store)\n * const branches = await store.listBranches(context.chatId);\n * console.log(branches); // [{name: 'main', ...}, {name: 'main-v2', ...}]\n *\n * // Switch to original branch\n * await context.switchBranch('main');\n * ```\n */\n public async switchBranch(name: string): Promise<void> {\n await this.#ensureInitialized();\n\n const branch = await this.#store.getBranch(this.#chatId, name);\n if (!branch) {\n throw new Error(`Branch \"${name}\" not found in chat \"${this.#chatId}\"`);\n }\n\n await this.#store.setActiveBranch(this.#chatId, branch.id);\n this.#branch = { ...branch, isActive: true };\n this.#branchName = name;\n\n // Clear pending messages (they were for the old branch)\n this.#pendingMessages = [];\n }\n\n /**\n * Create a parallel branch from the current position (\"by the way\").\n *\n * Use this when you want to fork the conversation without leaving\n * the current branch. Common use case: user wants to ask another\n * question while waiting for the model to respond.\n *\n * Unlike rewind(), this method:\n * - Uses the current HEAD (no messageId needed)\n * - Does NOT switch to the new branch\n * - Keeps pending messages intact\n *\n * @returns The new branch info (does not switch to it)\n * @throws Error if no messages exist in the conversation\n *\n * @example\n * ```ts\n * // User asked a question, model is generating...\n * context.set(user('What is the weather?'));\n * await context.save();\n *\n * // User wants to ask something else without waiting\n * const newBranch = await context.btw();\n * // newBranch = { name: 'main-v2', ... }\n *\n * // Later, switch to the new branch and add the question\n * await context.switchBranch(newBranch.name);\n * context.set(user('Also, what time is it?'));\n * await context.save();\n * ```\n */\n public async btw(): Promise<BranchInfo> {\n await this.#ensureInitialized();\n\n if (!this.#branch?.headMessageId) {\n throw new Error('Cannot create btw branch: no messages in conversation');\n }\n\n return this.#createBranchFrom(this.#branch.headMessageId, false);\n }\n\n /**\n * Update metadata for the current chat.\n *\n * @param updates - Partial metadata to merge (title, metadata)\n *\n * @example\n * ```ts\n * await context.updateChat({\n * title: 'Coding Help Session',\n * metadata: { tags: ['python', 'debugging'] }\n * });\n * ```\n */\n public async updateChat(\n updates: Partial<Pick<ChatMeta, 'title' | 'metadata'>>,\n ): Promise<void> {\n await this.#ensureInitialized();\n\n const storeUpdates: Partial<Pick<ChatData, 'title' | 'metadata'>> = {};\n\n if (updates.title !== undefined) {\n storeUpdates.title = updates.title;\n }\n if (updates.metadata !== undefined) {\n // Merge with existing metadata\n storeUpdates.metadata = {\n ...this.#chatData?.metadata,\n ...updates.metadata,\n };\n }\n\n this.#chatData = await this.#store.updateChat(this.#chatId, storeUpdates);\n }\n\n /**\n * Consolidate context fragments (no-op for now).\n *\n * This is a placeholder for future functionality that merges context fragments\n * using specific rules. Currently, it does nothing.\n *\n * @experimental\n */\n public consolidate(): void {\n return void 0;\n }\n\n /**\n * Inspect the full context state for debugging.\n * Returns a JSON-serializable object with context information.\n *\n * @param options - Inspection options (modelId and renderer required)\n * @returns Complete inspection data including estimates, rendered output, fragments, and graph\n *\n * @example\n * ```ts\n * const inspection = await context.inspect({\n * modelId: 'openai:gpt-4o',\n * renderer: new XmlRenderer(),\n * });\n * console.log(JSON.stringify(inspection, null, 2));\n *\n * // Or write to file for analysis\n * await fs.writeFile('context-debug.json', JSON.stringify(inspection, null, 2));\n * ```\n */\n public async inspect(options: InspectOptions): Promise<InspectResult> {\n await this.#ensureInitialized();\n\n const { renderer } = options;\n\n // Get token/cost estimation\n const estimateResult = await this.estimate(options.modelId, { renderer });\n\n // Render using provided renderer\n const rendered = renderer.render(this.#fragments);\n\n // Get persisted messages from store\n const persistedMessages: MessageData[] = [];\n if (this.#branch?.headMessageId) {\n const chain = await this.#store.getMessageChain(\n this.#branch.headMessageId,\n );\n persistedMessages.push(...chain);\n }\n\n // Get conversation graph\n const graph = await this.#store.getGraph(this.#chatId);\n\n return {\n estimate: estimateResult,\n rendered,\n fragments: {\n context: [...this.#fragments],\n pending: [...this.#pendingMessages],\n persisted: persistedMessages,\n },\n graph,\n meta: {\n chatId: this.#chatId,\n branch: this.#branchName,\n timestamp: Date.now(),\n },\n };\n }\n}\n", "import type { ContextFragment, FragmentData } from '../fragments.ts';\n\n/**\n * Domain knowledge fragment builders.\n *\n * These fragments capture domain-specific knowledge that can be injected\n * into AI prompts. Use with renderers (XML, Markdown, TOML, TOON) to format.\n *\n * @example\n * ```ts\n * import { term, hint, guardrail } from '@deepagents/context';\n *\n * context.set(\n * term('NPL', 'non-performing loan'),\n * hint('Always filter by status'),\n * guardrail({ rule: 'Never expose PII' }),\n * );\n * ```\n */\n\n/**\n * Define domain-specific vocabulary and business terminology.\n *\n * Use this to define simple, direct mappings between business terms and their meanings.\n * The system will understand these terms when users mention them in queries.\n *\n * @param name - The business term or acronym to define\n * @param definition - What the term means in your domain\n *\n * @example\n * // Logistics/Transportation dataset\n * term(\"deadhead miles\", \"distance driven with empty truck between deliveries\")\n * term(\"dwell time\", \"total time a truck spends at a loading dock or warehouse\")\n * term(\"LTL\", \"less than truckload - shipment that doesn't fill entire truck\")\n *\n * @example\n * // Education/University dataset\n * term(\"matriculation\", \"students who completed enrollment and started classes\")\n * term(\"DFW rate\", \"percentage of students receiving D, F, or Withdrawal in a course\")\n * term(\"cohort\", \"group of students who entered the same semester or academic year\")\n *\n * @example\n * // Finance/Banking dataset\n * term(\"NPL\", \"non-performing loan - loan past due 90+ days\")\n * term(\"basis points\", \"one hundredth of a percentage point (1% = 100 bps)\")\n * term(\"AUM\", \"assets under management - total market value of client investments\")\n */\nexport function term(name: string, definition: string): ContextFragment {\n return {\n name: 'term',\n data: { name, definition },\n };\n}\n\n/**\n * Define behavioral rules and constraints that should always apply.\n *\n * Use this for business logic, data quality rules, or query preferences that should\n * be automatically applied to all relevant queries.\n *\n * @param text - The rule or constraint to follow (use imperative language)\n *\n * @example\n * // Manufacturing/Supply Chain dataset\n * hint(\"Always exclude work orders with status = 'simulation' from production metrics\")\n * hint(\"When calculating OEE (overall equipment effectiveness), only count scheduled production time\")\n * hint(\"Defect rates should be calculated per batch, not per individual unit, for consistency\")\n *\n * @example\n * // Real Estate/Property dataset\n * hint(\"Never include properties with listing_status = 'draft' in market analysis\")\n * hint(\"Always filter out duplicate MLS listings - use the earliest listing_date for each property_id\")\n * hint(\"Square footage comparisons must specify if including or excluding basement/garage\")\n *\n * @example\n * // Social Media/Content Platform dataset\n * hint(\"Engagement metrics should exclude bot accounts identified by is_verified_human = false\")\n * hint(\"View counts reset daily - always use cumulative_views for historical analysis\")\n * hint(\"Default content filters to published_status = 'public' unless analyzing drafts\")\n */\nexport function hint(text: string): ContextFragment {\n return {\n name: 'hint',\n data: text,\n };\n}\n\n/**\n * Define hard guardrails, safety rules, and compliance boundaries.\n *\n * Use this for \"never do\" rules, sensitive data handling, and required behaviors when\n * certain conditions occur. Guardrails should be explicit and action-oriented.\n *\n * @param input.rule - The guardrail or restriction to enforce\n * @param input.reason - Why this guardrail exists (compliance, security, performance)\n * @param input.action - What to do when this guardrail is triggered\n *\n * @example\n * // Healthcare dataset\n * guardrail({\n * rule: \"Never return PHI like SSN, MRN, or full address in query results\",\n * reason: \"HIPAA compliance\",\n * action: \"If asked, state that identifiable patient data cannot be shared; offer de-identified aggregates instead\"\n * })\n *\n * @example\n * // Finance dataset\n * guardrail({\n * rule: \"Block any query exposing employee-level compensation by name\",\n * reason: \"Confidential payroll data\",\n * action: \"Provide ranges grouped by department or level instead of individual salaries\"\n * })\n *\n * @example\n * // E-commerce dataset\n * guardrail({\n * rule: \"Warn when a query would scan more than 10 million rows; require a narrower date range\",\n * reason: \"Performance and cost control\",\n * action: \"Ask the user to add filters (recent timeframe, specific categories) before proceeding\"\n * })\n */\nexport function guardrail(input: {\n rule: string;\n reason?: string;\n action?: string;\n}): ContextFragment {\n return {\n name: 'guardrail',\n data: {\n rule: input.rule,\n ...(input.reason && { reason: input.reason }),\n ...(input.action && { action: input.action }),\n },\n };\n}\n\n/**\n * Define a rich understanding of a single concept using metaphors and explanations.\n *\n * Use this when a simple term definition isn't enough - when you need to convey deeper\n * understanding about how to think about and calculate a metric or concept.\n *\n * @param input.concept - The concept being explained\n * @param input.explanation - A metaphor or detailed explanation\n * @param input.therefore - Optional actionable instruction based on this understanding\n *\n * @example\n * // Gaming/Entertainment dataset\n * explain({\n * concept: \"daily active users to monthly active users ratio\",\n * explanation: \"like measuring how many club members visit daily vs just once a month - shows stickiness\",\n * therefore: \"Calculate as DAU / MAU, where higher ratio (closer to 1) means more engaged user base\"\n * })\n *\n * @example\n * // HR/Employee Management dataset\n * explain({\n * concept: \"time to fill\",\n * explanation: \"like measuring how long a house sits on the market - from posting job to accepting offer\",\n * therefore: \"Calculate as days between job_posted_date and offer_accepted_date, exclude cancelled requisitions\"\n * })\n *\n * @example\n * // Telecommunications dataset\n * explain({\n * concept: \"network congestion ratio\",\n * explanation: \"like rush hour traffic density - measures actual usage vs total capacity at peak times\",\n * therefore: \"Calculate as (peak_hour_bandwidth_used / total_bandwidth_capacity) during busiest hour of day\"\n * })\n */\nexport function explain(input: {\n concept: string;\n explanation: string;\n therefore?: string;\n}): ContextFragment {\n return {\n name: 'explain',\n data: {\n concept: input.concept,\n explanation: input.explanation,\n ...(input.therefore && { therefore: input.therefore }),\n },\n };\n}\n\n/**\n * Define concrete examples of question \u2192 answer pairs.\n *\n * Use this for few-shot learning - show the system exactly how to translate\n * specific types of questions. Great for establishing patterns.\n *\n * @param input.question - The natural language question or request\n * @param input.answer - The correct answer that responds to the question\n * @param input.note - Optional note or explanation about the example\n *\n * @example\n * // Energy/Utilities dataset\n * example({\n * question: \"show me peak demand hours for the last week\",\n * answer: \"SELECT DATE_TRUNC('hour', reading_timestamp) as hour, MAX(consumption_kwh) as peak_demand FROM meter_readings WHERE reading_timestamp >= CURRENT_DATE - INTERVAL '7 days' GROUP BY hour ORDER BY peak_demand DESC LIMIT 10\"\n * })\n *\n * @example\n * // Agriculture/Farm Management dataset\n * example({\n * question: \"what is the average yield per acre by crop type this season\",\n * answer: \"SELECT crop_type, AVG(harvest_quantity / field_acres) as yield_per_acre FROM harvests WHERE harvest_date >= '2024-01-01' GROUP BY crop_type ORDER BY yield_per_acre DESC\"\n * })\n *\n * @example\n * // Travel/Hospitality dataset\n * example({\n * question: \"show me hotel occupancy rate for this month\",\n * answer: \"SELECT hotel_name, (SUM(occupied_rooms) / SUM(total_rooms)) * 100 as occupancy_rate FROM daily_occupancy WHERE date >= DATE_TRUNC('month', CURRENT_DATE) GROUP BY hotel_id, hotel_name ORDER BY occupancy_rate DESC\",\n * note: \"Occupancy rate is a percentage - multiply by 100 for readable output\"\n * })\n */\nexport function example(input: {\n question: string;\n answer: string;\n note?: string;\n}): ContextFragment {\n return {\n name: 'example',\n data: {\n question: input.question,\n answer: input.answer,\n ...(input.note && { note: input.note }),\n },\n };\n}\n\n/**\n * Define when and what to ask for clarification.\n *\n * Use this to handle ambiguous terms or situations where the system should\n * proactively ask the user for more information.\n *\n * @param input.when - The condition or trigger that should prompt clarification\n * @param input.ask - The question to ask the user\n * @param input.reason - Why this clarification is necessary\n *\n * @example\n * // Marketing/Advertising dataset\n * clarification({\n * when: \"user asks for 'conversion rate'\",\n * ask: \"Which conversion: click-to-lead, lead-to-opportunity, or opportunity-to-customer?\",\n * reason: \"Conversion rate means different things at each funnel stage - need to specify which metric\"\n * })\n *\n * @example\n * // Food Delivery dataset\n * clarification({\n * when: \"user asks about 'delivery time'\",\n * ask: \"Do you mean estimated time at order, actual delivery time, or time from kitchen to door?\",\n * reason: \"Multiple time metrics exist - estimated vs actual impacts customer satisfaction differently\"\n * })\n *\n * @example\n * // Fitness/Gym Management dataset\n * clarification({\n * when: \"user mentions 'active members'\",\n * ask: \"Do you mean paid memberships or members who actually visited in last 30 days?\",\n * reason: \"Many paid members don't use facilities - different metrics for revenue vs utilization\"\n * })\n */\nexport function clarification(input: {\n when: string;\n ask: string;\n reason: string;\n}): ContextFragment {\n return {\n name: 'clarification',\n data: {\n when: input.when,\n ask: input.ask,\n reason: input.reason,\n },\n };\n}\n\n/**\n * Define multi-step analytical processes that require sequential logic.\n *\n * Use this for complex analytical tasks that require multiple steps or specific\n * methodologies. Workflows teach the system HOW to approach a type of analysis.\n *\n * @param input.task - Name of the analytical task\n * @param input.steps - Sequential steps to execute\n * @param input.triggers - Optional phrases that should activate this workflow\n * @param input.notes - Optional additional context, warnings, or guidance\n *\n * @example\n * // Insurance dataset\n * workflow({\n * task: \"Claims Loss Ratio Analysis\",\n * triggers: [\"loss ratio\", \"claims ratio\", \"underwriting performance\"],\n * steps: [\n * \"Calculate total claims paid for each policy period\",\n * \"Calculate total premiums earned for same period\",\n * \"Compute loss ratio as (claims_paid / premiums_earned) * 100\",\n * \"Segment by policy type, geography, and underwriter\",\n * \"Identify policies with loss ratio > 100% (losing money)\",\n * \"Calculate trend over time using rolling 12-month windows\"\n * ],\n * notes: \"Use incurred date for claims, not paid date. Exclude reinsurance recoveries from claims total.\"\n * })\n *\n * @example\n * // Media/Publishing dataset\n * workflow({\n * task: \"Content Performance Funnel\",\n * triggers: [\"content funnel\", \"engagement funnel\", \"content performance\"],\n * steps: [\n * \"Count total impressions (articles shown) per content piece\",\n * \"Count click-throughs (articles opened)\",\n * \"Count scroll depth > 50% (meaningful engagement)\",\n * \"Count shares, comments, or saves (viral actions)\",\n * \"Calculate conversion rate at each funnel stage\",\n * \"Identify top-performing content by final conversion rate\"\n * ],\n * notes: \"Requires multiple event types. Join events table multiple times or use conditional aggregation.\"\n * })\n *\n * @example\n * // Sports Analytics dataset\n * workflow({\n * task: \"Player Performance Rating Calculation\",\n * triggers: [\"player rating\", \"performance score\", \"player analytics\"],\n * steps: [\n * \"Aggregate per-game stats: points, assists, rebounds, turnovers\",\n * \"Calculate efficiency metrics: shooting percentage, plus/minus\",\n * \"Normalize each metric using z-scores vs league average\",\n * \"Apply position-specific weights to each metric\",\n * \"Combine weighted scores into overall performance rating (0-100)\",\n * \"Rank players within position group and overall\"\n * ],\n * notes: \"Requires league-wide statistics for normalization. Update weights each season based on game trends.\"\n * })\n */\nexport function workflow(input: {\n task: string;\n steps: string[];\n triggers?: string[];\n notes?: string;\n}): ContextFragment {\n return {\n name: 'workflow',\n data: {\n task: input.task,\n steps: input.steps,\n ...(input.triggers?.length && { triggers: input.triggers }),\n ...(input.notes && { notes: input.notes }),\n },\n };\n}\n\n/**\n * Define data quirks, edge cases, or database-specific issues and their workarounds.\n *\n * Use this to document weird data patterns, database limitations, or special handling\n * required for specific scenarios.\n *\n * @param input.issue - Description of the quirk, edge case, or problem\n * @param input.workaround - How to handle or work around this issue\n *\n * @example\n * // Government/Public Services dataset\n * quirk({\n * issue: \"Citizen IDs contain leading zeros but are stored as integers, losing the zeros\",\n * workaround: \"Always cast to VARCHAR and use LPAD(citizen_id::VARCHAR, 10, '0') to restore leading zeros\"\n * })\n *\n * @example\n * // Aviation dataset\n * quirk({\n * issue: \"Flight times crossing midnight show as negative duration (landing before takeoff)\",\n * workaround: \"Add 24 hours when calculated duration < 0: CASE WHEN duration < 0 THEN duration + INTERVAL '24 hours' ELSE duration END\"\n * })\n *\n * @example\n * // Automotive/Dealership dataset\n * quirk({\n * issue: \"VIN numbers with letter 'O' were incorrectly entered as zero '0' in legacy data\",\n * workaround: \"When searching by VIN, use REPLACE(vin, '0', 'O') or fuzzy matching to handle both cases\"\n * })\n */\nexport function quirk(input: {\n issue: string;\n workaround: string;\n}): ContextFragment {\n return {\n name: 'quirk',\n data: {\n issue: input.issue,\n workaround: input.workaround,\n },\n };\n}\n\n/**\n * Define style preferences and coding standards.\n *\n * Use this to enforce consistent formatting, naming conventions, and best practices\n * specific to your team or organization.\n *\n * @param input.prefer - Preferred style or pattern\n * @param input.never - Optional anti-pattern to avoid\n * @param input.always - Optional rule that must always be followed\n *\n * @example\n * // Non-profit/Charity dataset\n * styleGuide({\n * prefer: \"Use donor-centric language in column aliases: 'donor_name' not 'customer_name'\",\n * never: \"Never expose internal donor IDs in external reports - use public gift IDs\",\n * always: \"Always include fiscal year in date-based aggregations (FY starts July 1)\"\n * })\n *\n * @example\n * // Legal/Law Firm dataset\n * styleGuide({\n * prefer: \"Use billable_hours with 2 decimal precision for accurate client billing\",\n * never: \"Never include attorney_rate in queries visible to paralegals - confidential data\",\n * always: \"Always filter by matter_status = 'open' unless specifically analyzing closed cases\"\n * })\n *\n * @example\n * // Inventory/Warehouse dataset\n * styleGuide({\n * prefer: \"Use location_id in joins rather than location_name (duplicates exist across warehouses)\",\n * never: \"Never aggregate inventory without grouping by warehouse_id first\",\n * always: \"Always use inventory_on_hand - inventory_reserved for available stock calculations\"\n * })\n */\nexport function styleGuide(input: {\n prefer: string;\n never?: string;\n always?: string;\n}): ContextFragment {\n return {\n name: 'styleGuide',\n data: {\n prefer: input.prefer,\n ...(input.never && { never: input.never }),\n ...(input.always && { always: input.always }),\n },\n };\n}\n\n/**\n * Define comparisons between related concepts through real-world analogies.\n *\n * Use this to teach relational understanding between concepts by drawing comparisons\n * to familiar real-world scenarios.\n *\n * @param input.concepts - Array of related concepts to compare\n * @param input.relationship - The comparison/analogy using real-world examples\n * @param input.insight - Optional key insight the analogy reveals\n * @param input.therefore - Optional actionable instruction\n * @param input.pitfall - Optional common mistake to avoid\n *\n * @example\n * // E-commerce dataset\n * analogy({\n * concepts: [\"cart abandonment\", \"browse abandonment\"],\n * relationship: \"Cart abandonment is like leaving items at a checkout counter, browse abandonment is like window shopping without picking anything up\",\n * insight: \"Cart abandonment shows purchase intent (added to cart), browse abandonment shows only interest\",\n * therefore: \"Prioritize cart abandonment recovery campaigns - higher conversion potential than browse\",\n * pitfall: \"Don't combine both into generic 'abandonment rate' - they need different marketing strategies\"\n * })\n *\n * @example\n * // SaaS dataset\n * analogy({\n * concepts: [\"logo churn\", \"revenue churn\"],\n * relationship: \"Logo churn is like counting how many customers left the store, revenue churn is how much money walked out\",\n * insight: \"Losing 10 small customers (high logo churn) might hurt less than losing 1 enterprise customer (high revenue churn)\",\n * therefore: \"Always report both metrics - logo churn for customer satisfaction, revenue churn for financial health\",\n * pitfall: \"Don't use logo churn to predict revenue impact - customer size distribution matters\"\n * })\n *\n * @example\n * // Healthcare dataset\n * analogy({\n * concepts: [\"incidence\", \"prevalence\"],\n * relationship: \"Incidence is like new house sales this month, prevalence is total houses currently occupied\",\n * insight: \"Incidence measures new cases over time, prevalence measures all existing cases at a point in time\",\n * therefore: \"For tracking disease outbreaks use incidence rate, for resource planning use prevalence\",\n * pitfall: \"Don't sum incidence rates across time periods - it's a rate not a count\"\n * })\n */\nexport function analogy(input: {\n concepts: string[];\n relationship: string;\n insight?: string;\n therefore?: string;\n pitfall?: string;\n}): ContextFragment {\n return {\n name: 'analogy',\n data: {\n concepts: input.concepts,\n relationship: input.relationship,\n ...(input.insight && { insight: input.insight }),\n ...(input.therefore && { therefore: input.therefore }),\n ...(input.pitfall && { pitfall: input.pitfall }),\n },\n };\n}\n\n/**\n * Map business terms directly to expressions or fragments.\n *\n * Use this to teach the system how to CALCULATE or QUERY specific business concepts.\n * The system will substitute these patterns when users mention the term.\n *\n * **Glossary vs Alias:**\n * - `alias` = user vocabulary \u2192 table/column name (\"the big table\" \u2192 \"orders table\")\n * - `glossary` = business term \u2192 SQL expression (\"revenue\" \u2192 \"SUM(orders.total_amount)\")\n *\n * In short: alias renames, glossary computes.\n *\n * @param entries - Record mapping business terms to their expressions\n *\n * @example\n * glossary({\n * \"revenue\": \"SUM(orders.total_amount)\",\n * \"average order value\": \"AVG(orders.total_amount)\",\n * \"active user\": \"last_login > NOW() - INTERVAL '30 days'\",\n * \"churned\": \"status = 'churned'\",\n * \"power user\": \"order_count > 10\",\n * \"net revenue\": \"SUM(orders.total_amount) - SUM(refunds.amount)\",\n * })\n */\nexport function glossary(entries: Record<string, string>): ContextFragment {\n return {\n name: 'glossary',\n data: Object.entries(entries).map(([term, expression]) => ({\n term,\n expression,\n })),\n };\n}\n\n/**\n * Create a role fragment for system prompt instructions.\n */\nexport function role(content: string): ContextFragment {\n return {\n name: 'role',\n data: content,\n };\n}\n\n/**\n * Define a guiding principle that shapes agent behavior.\n *\n * Use this to establish high-level rules for decision-making, reasoning, or domain behavior.\n * Principles can contain policies (specific rules that implement the principle).\n *\n * @param input.title - Name/title of the principle\n * @param input.description - What this principle means and why it matters\n * @param input.policies - Optional specific rules that implement this principle\n *\n * @example\n * // Logical dependencies principle\n * principle({\n * title: \"Logical dependencies and constraints\",\n * description: \"Analyze intended actions against factors in order of importance\",\n * policies: [\n * \"Policy-based rules, mandatory prerequisites, and constraints\",\n * \"Order of operations: Ensure actions don't prevent subsequent necessary actions\",\n * \"Other prerequisites (information and/or actions needed)\",\n * \"Explicit user constraints or preferences\"\n * ]\n * })\n *\n * @example\n * // Risk assessment principle\n * principle({\n * title: \"Risk assessment\",\n * description: \"Evaluate consequences before taking action\",\n * policies: [\n * \"For exploratory tasks, missing optional parameters is LOW risk\",\n * \"Prefer calling tools with available information over asking the user\"\n * ]\n * })\n *\n * @example\n * // Design principle\n * principle({\n * title: \"Separation of concerns\",\n * description: \"Each module should have a single, well-defined responsibility\",\n * policies: [\n * \"Data access logic stays in repository layer\",\n * \"Business rules stay in service layer\",\n * \"Presentation logic stays in controller/view layer\"\n * ]\n * })\n */\nexport function principle(input: {\n title: string;\n description: string;\n policies?: FragmentData[];\n}): ContextFragment {\n return {\n name: 'principle',\n data: {\n title: input.title,\n description: input.description,\n ...(input.policies?.length && { policies: input.policies }),\n },\n };\n}\n\n/**\n * Define a policy rule, optionally with prerequisites or nested sub-policies.\n *\n * Policies can be used in two ways:\n * 1. Prerequisite rules: \"must do X before Y\" using the `before` parameter\n * 2. Sub-policies: nested rules within a principle using the `policies` parameter\n *\n * Policies differ from guardrails: policies are prerequisites (do this first),\n * guardrails are prohibitions (never do this).\n *\n * @param input.rule - The policy rule to enforce\n * @param input.before - What action this is a prerequisite for (optional for sub-policies)\n * @param input.reason - Why this rule matters\n * @param input.policies - Nested sub-policies for hierarchical structure\n *\n * @example\n * // Prerequisite rule with \"before\"\n * policy({\n * rule: \"Validate SQL syntax\",\n * before: \"executing any query against the database\",\n * reason: \"Catches errors early and allows correction before execution\"\n * })\n *\n * @example\n * // Sub-policy within a principle (no \"before\" needed)\n * policy({ rule: \"Policy-based rules, mandatory prerequisites, and constraints.\" })\n *\n * @example\n * // Nested sub-policies (hierarchical structure like 1.2 \u2192 1.2.1)\n * policy({\n * rule: \"Order of operations: Ensure taking an action does not prevent a subsequent necessary action.\",\n * policies: [\n * \"The user may request actions in a random order, but you may need to reorder operations.\",\n * ],\n * })\n */\nexport function policy(input: {\n rule: string;\n before?: string;\n reason?: string;\n policies?: FragmentData[];\n}): ContextFragment {\n return {\n name: 'policy',\n data: {\n rule: input.rule,\n ...(input.before && { before: input.before }),\n ...(input.reason && { reason: input.reason }),\n ...(input.policies?.length && { policies: input.policies }),\n },\n };\n}\n", "import type { ContextFragment } from '../fragments.ts';\n\n/**\n * User-specific fragment builders.\n *\n * These fragments capture user context, preferences, and personalization data\n * that can be injected into AI prompts to tailor responses.\n *\n * @example\n * ```ts\n * import { identity, persona, preference } from '@deepagents/context';\n *\n * context.set(\n * identity({ name: 'John', role: 'VP of Sales' }),\n * persona({ name: 'Freya', role: 'Data Assistant', tone: 'professional' }),\n * preference('date format', 'YYYY-MM-DD'),\n * );\n * ```\n */\n\n/**\n * Define the user's identity including name and/or role.\n *\n * Use this to capture who the user is and what lens they view data through.\n * Helps tailor explanations, terminology, and focus areas.\n *\n * @param input.name - The user's name (optional)\n * @param input.role - The user's role or position (optional)\n *\n * @example\n * identity({ name: \"John\", role: \"VP of Sales\" })\n * identity({ role: \"Data analyst in the marketing team\" })\n * identity({ name: \"Sarah\" })\n * identity({ role: \"Finance manager focused on cost optimization\" })\n */\nexport function identity(input: {\n name?: string;\n role?: string;\n}): ContextFragment {\n return {\n name: 'identity',\n data: {\n ...(input.name && { name: input.name }),\n ...(input.role && { role: input.role }),\n },\n };\n}\n\n/**\n * Define an AI persona with a name, role, objective, and communication tone.\n *\n * Use this to customize the assistant's identity and what it should accomplish.\n *\n * @param input.name - The persona's name\n * @param input.role - The persona's expertise/identity (what they are)\n * @param input.objective - What the persona should accomplish (the goal)\n * @param input.tone - The communication style (e.g., friendly, professional, concise)\n *\n * @example\n * persona({ name: \"DataBot\", role: \"SQL Expert\", objective: \"Generate accurate SQL queries from natural language\" })\n * persona({ name: \"QueryMaster\", role: \"Database Analyst\", objective: \"Help users explore database schemas\" })\n */\nexport function persona(input: {\n name: string;\n role?: string;\n objective?: string;\n tone?: string;\n}): ContextFragment {\n return {\n name: 'persona',\n data: {\n name: input.name,\n ...(input.role && { role: input.role }),\n ...(input.objective && { objective: input.objective }),\n ...(input.tone && { tone: input.tone }),\n },\n };\n}\n\n/**\n * Define user-specific term meanings and vocabulary.\n *\n * Use this when the user has their own definitions for terms that might\n * differ from standard or domain definitions. Like `term()` but personal.\n *\n * @param term - The term the user uses\n * @param meaning - What the user means by this term\n *\n * @example\n * alias(\"revenue\", \"gross revenue before deductions, not net\")\n * alias(\"active users\", \"users who logged in within the last 30 days\")\n * alias(\"the big table\", \"the orders table\")\n * alias(\"Q4\", \"October through December, not fiscal Q4\")\n */\nexport function alias(term: string, meaning: string): ContextFragment {\n return {\n name: 'alias',\n data: { term, meaning },\n };\n}\n\n/**\n * Define how the user prefers results presented.\n *\n * Use this to capture output formatting, style, and behavioral preferences\n * that should apply to all interactions with this user.\n *\n * @param aspect - What aspect of output this preference applies to\n * @param value - The user's preference\n *\n * @example\n * preference(\"date format\", \"YYYY-MM-DD\")\n * preference(\"output style\", \"tables over charts unless trend data\")\n * preference(\"detail level\", \"always show the SQL query in responses\")\n * preference(\"row limit\", \"default to 50 rows unless I ask for more\")\n * preference(\"explanation style\", \"brief and to the point\")\n */\nexport function preference(aspect: string, value: string): ContextFragment {\n return {\n name: 'preference',\n data: { aspect, value },\n };\n}\n\n/**\n * Define the user's current working focus or project.\n *\n * Use this to capture temporary context that helps inform defaults,\n * assumptions, and suggestions. Should be updated as focus changes.\n *\n * @param description - What the user is currently working on\n *\n * @example\n * userContext(\"Preparing Q4 board presentation\")\n * userContext(\"Investigating drop in signups last week\")\n * userContext(\"Working on EMEA regional analysis for strategy meeting\")\n * userContext(\"Debugging discrepancy in revenue numbers\")\n */\nexport function userContext(description: string): ContextFragment {\n return {\n name: 'userContext',\n data: description,\n };\n}\n\n/**\n * Record a correction the user made to previous understanding.\n *\n * Use this when the user corrects a misunderstanding about data, columns,\n * or business logic. Prevents repeating the same mistake.\n *\n * @param subject - What was misunderstood\n * @param clarification - The correct understanding\n *\n * @example\n * correction(\"status column\", \"1 = active, 0 = inactive, not boolean true/false\")\n * correction(\"orders table\", \"Use orders_v2, not the deprecated legacy_orders table\")\n * correction(\"date field\", \"order_date is when order was placed, ship_date is when shipped\")\n * correction(\"revenue calculation\", \"Must exclude refunds and chargebacks\")\n */\nexport function correction(\n subject: string,\n clarification: string,\n): ContextFragment {\n return {\n name: 'correction',\n data: { subject, clarification },\n };\n}\n", "/**\n * Guardrail system for real-time stream interception and self-correction.\n *\n * Guardrails inspect streaming parts and can either:\n * - `pass(part)`: Allow the part through (optionally modified)\n * - `fail(feedback)`: Abort the stream and retry with self-correction feedback\n *\n * When a guardrail fails, the accumulated text is combined with the feedback\n * to create a \"self-correction\" that appears as if the agent caught itself.\n *\n * @example\n * ```typescript\n * const safetyGuardrail: Guardrail = {\n * id: 'safety',\n * name: 'Safety Filter',\n * handle: (part, context) => {\n * if (part.type === 'text-delta' && part.delta.includes('unsafe')) {\n * return fail('I should not provide this information. Let me help differently.');\n * }\n * if (part.type === 'error' && context.availableTools.length > 0) {\n * return fail(`Try using: ${context.availableTools.join(', ')}`);\n * }\n * return pass(part);\n * },\n * };\n *\n * const agent = agent({\n * name: 'safe_assistant',\n * context,\n * model,\n * guardrails: [safetyGuardrail],\n * });\n * ```\n */\nimport type { InferUIMessageChunk, UIDataTypes, UIMessage } from 'ai';\n\n/**\n * Type alias for stream parts from the AI SDK's UI message stream.\n * This is the full chunk type that includes text-delta, error, reasoning-delta, etc.\n */\nexport type StreamPart = InferUIMessageChunk<\n UIMessage<unknown, UIDataTypes, Record<string, never>>\n>;\n\n/**\n * Result of a guardrail check.\n * - `pass`: The part is allowed through (optionally modified)\n * - `fail`: The stream should abort and retry with feedback\n */\nexport type GuardrailResult =\n | { type: 'pass'; part: StreamPart }\n | { type: 'fail'; feedback: string };\n\n/**\n * Context passed to guardrails during stream processing.\n * Provides information about the agent's capabilities.\n */\nexport interface GuardrailContext {\n /** Names of tools available to the agent */\n availableTools: string[];\n}\n\n/**\n * A guardrail that inspects streaming parts.\n */\nexport interface Guardrail {\n /** Unique identifier for this guardrail */\n id: string;\n /** Human-readable name for logging/debugging */\n name: string;\n /**\n * Handle a stream part.\n *\n * @param part - The full stream part to inspect (text-delta, error, etc.)\n * @param context - Context with agent capabilities (available tools, etc.)\n * @returns Either `pass(part)` to allow or `fail(feedback)` to abort and retry\n */\n handle: (part: StreamPart, context: GuardrailContext) => GuardrailResult;\n}\n\n/**\n * Configuration for guardrail behavior.\n */\nexport interface GuardrailConfig {\n /** Maximum number of retry attempts when guardrails fail (default: 3) */\n maxRetries?: number;\n}\n\n/**\n * Allow a part to pass through the guardrail.\n *\n * @param part - The part to pass (can be modified from original)\n * @returns A pass result\n *\n * @example\n * ```typescript\n * handle: (part) => {\n * // Pass through unchanged\n * return pass(part);\n *\n * // Or modify text-delta before passing\n * if (part.type === 'text-delta') {\n * return pass({ ...part, delta: part.delta.replace('bad', 'good') });\n * }\n * return pass(part);\n * }\n * ```\n */\nexport function pass(part: StreamPart): GuardrailResult {\n return { type: 'pass', part };\n}\n\n/**\n * Fail the guardrail check and trigger a retry with feedback.\n *\n * The feedback will be appended to the accumulated assistant text,\n * making it appear as if the agent \"caught itself\" and self-corrected.\n *\n * @param feedback - The self-correction feedback to append\n * @returns A fail result\n *\n * @example\n * ```typescript\n * handle: (part) => {\n * if (part.type === 'text-delta' && part.delta.includes('hack')) {\n * return fail('I should not provide hacking instructions. Let me suggest ethical alternatives.');\n * }\n * if (part.type === 'error') {\n * return fail('An error occurred. Let me try a different approach.');\n * }\n * return pass(part);\n * }\n * ```\n */\nexport function fail(feedback: string): GuardrailResult {\n return { type: 'fail', feedback };\n}\n\n/**\n * Run a part through a chain of guardrails sequentially.\n *\n * @param part - The stream part to check\n * @param guardrails - Array of guardrails to run in order\n * @param context - Context with agent capabilities (available tools, etc.)\n * @returns The final result after all guardrails pass, or the first failure\n */\nexport function runGuardrailChain(\n part: StreamPart,\n guardrails: Guardrail[],\n context: GuardrailContext,\n): GuardrailResult {\n let currentPart = part;\n\n for (const guardrail of guardrails) {\n const result = guardrail.handle(currentPart, context);\n\n if (result.type === 'fail') {\n return result;\n }\n\n // Pass the (possibly modified) part to the next guardrail\n currentPart = result.part;\n }\n\n return pass(currentPart);\n}\n", "/**\n * Error Recovery Guardrail\n *\n * Intercepts API-level errors (like tool validation failures) and triggers\n * self-correction retries. This is essential for models like gpt-oss-20b\n * that may hallucinate tools that don't exist.\n *\n * Catches errors like:\n * - \"Tool choice is none, but model called a tool\"\n * - \"attempted to call tool 'X' which was not in request.tools\"\n * - \"Failed to parse tool call arguments as JSON\" (malformed JSON)\n * - Parsing failures\n *\n * @example\n * ```typescript\n * const myAgent = agent({\n * name: 'my_agent',\n * model: groq('openai/gpt-oss-20b'),\n * tools: { bash, sql },\n * guardrails: [errorRecoveryGuardrail],\n * maxGuardrailRetries: 3,\n * });\n * ```\n */\nimport chalk from 'chalk';\n\nimport type { Guardrail } from '../guardrail.ts';\nimport { fail, pass } from '../guardrail.ts';\n\nexport const errorRecoveryGuardrail: Guardrail = {\n id: 'error-recovery',\n name: 'API Error Recovery',\n handle: (part, context) => {\n // Only handle error parts\n if (part.type !== 'error') {\n return pass(part);\n }\n\n const errorText = (part as { errorText?: string }).errorText || '';\n const prefix = chalk.bold.magenta('[ErrorRecovery]');\n\n console.log(\n `${prefix} ${chalk.red('Caught error:')} ${chalk.dim(errorText.slice(0, 150))}`,\n );\n\n // Helper to log and return fail\n const logAndFail = (pattern: string, feedback: string) => {\n console.log(\n `${prefix} ${chalk.yellow('Pattern:')} ${chalk.cyan(pattern)}`,\n );\n console.log(\n `${prefix} ${chalk.green('Feedback:')} ${chalk.dim(feedback.slice(0, 80))}...`,\n );\n return fail(feedback);\n };\n\n // Pattern: No tools available but model tried to call one\n if (errorText.includes('Tool choice is none')) {\n if (context.availableTools.length > 0) {\n return logAndFail(\n 'Tool choice is none',\n `I tried to call a tool that doesn't exist. Available tools: ${context.availableTools.join(', ')}. Let me use one of these instead.`,\n );\n }\n return logAndFail(\n 'Tool choice is none (no tools)',\n 'I tried to call a tool, but no tools are available. Let me respond with plain text instead.',\n );\n }\n\n // Pattern: Tool not found in request.tools\n if (\n errorText.includes('not in request.tools') ||\n (errorText.includes('tool') && errorText.includes('not found'))\n ) {\n const toolMatch = errorText.match(/tool '([^']+)'/);\n const toolName = toolMatch ? toolMatch[1] : 'unknown';\n if (context.availableTools.length > 0) {\n return logAndFail(\n `Unregistered tool: ${toolName}`,\n `I tried to call \"${toolName}\" but it doesn't exist. Available tools: ${context.availableTools.join(', ')}. Let me use one of these instead.`,\n );\n }\n return logAndFail(\n `Unregistered tool: ${toolName} (no tools)`,\n `I tried to call \"${toolName}\" but no tools are available. Let me respond with plain text instead.`,\n );\n }\n\n // Pattern: Failed to parse tool arguments as JSON\n if (\n errorText.includes('Failed to parse tool call arguments') ||\n errorText.includes('parse tool call') ||\n errorText.includes('invalid JSON')\n ) {\n return logAndFail(\n 'Malformed JSON arguments',\n 'I generated malformed JSON for the tool arguments. Let me format my tool call properly with valid JSON.',\n );\n }\n\n // Pattern: Parsing failed (generic)\n if (errorText.includes('Parsing failed')) {\n return logAndFail(\n 'Parsing failed',\n 'My response format was invalid. Let me try again with a properly formatted response.',\n );\n }\n\n // Unknown error - still try to recover\n return logAndFail(\n 'Unknown error',\n `An error occurred: ${errorText.slice(0, 100)}. Let me try a different approach.`,\n );\n },\n};\n", "import { existsSync } from 'fs';\nimport { type CustomCommand, defineCommand } from 'just-bash';\nimport spawn from 'nano-spawn';\nimport * as path from 'path';\n\nexport interface BinaryBridgeConfig {\n /** Command name in the sandbox (what the agent types) */\n name: string;\n /** Actual binary path on the host system (defaults to name) */\n binaryPath?: string;\n /** Optional regex to restrict allowed arguments for security */\n allowedArgs?: RegExp;\n}\n\nexport type BinaryBridgeInput = string | BinaryBridgeConfig;\n\n/**\n * Creates custom commands that bridge to real system binaries.\n *\n * This allows just-bash sandboxed environments to execute specific\n * host system binaries while maintaining control over which binaries\n * are accessible.\n *\n * @example\n * // Simple - just strings (name === binaryPath)\n * createBinaryBridges('presenterm', 'node', 'cargo')\n *\n * @example\n * // Mixed - strings and config objects\n * createBinaryBridges(\n * 'presenterm',\n * { name: 'python', binaryPath: 'python3' },\n * { name: 'git', allowedArgs: /^(status|log|diff)/ }\n * )\n */\nexport function createBinaryBridges(\n ...binaries: BinaryBridgeInput[]\n): CustomCommand[] {\n return binaries.map((input) => {\n const config: BinaryBridgeConfig =\n typeof input === 'string' ? { name: input } : input;\n\n const { name, binaryPath = name, allowedArgs } = config;\n\n return defineCommand(name, async (args, ctx) => {\n // Validate args against pattern if specified\n if (allowedArgs) {\n const invalidArg = args.find((arg) => !allowedArgs.test(arg));\n if (invalidArg) {\n return {\n stdout: '',\n stderr: `${name}: argument '${invalidArg}' not allowed by security policy`,\n exitCode: 1,\n };\n }\n }\n\n try {\n // Resolve the real working directory from the virtual filesystem\n // just-bash uses virtual paths like /home/user, we need the real host path\n const realCwd = resolveRealCwd(ctx);\n\n // Resolve file paths in arguments relative to the real cwd\n const resolvedArgs = args.map((arg) => {\n // Skip flags and options\n if (arg.startsWith('-')) {\n return arg;\n }\n\n // Check if arg looks like a path:\n // 1. Has a file extension (e.g., file.md, script.py)\n // 2. Contains path separator (e.g., src/file, dir\\file)\n // 3. Is a relative path starting with . (e.g., ., .., ./foo)\n const hasExtension = path.extname(arg) !== '';\n const hasPathSep = arg.includes(path.sep) || arg.includes('/');\n const isRelative = arg.startsWith('.');\n\n if (hasExtension || hasPathSep || isRelative) {\n // Resolve relative to the real cwd\n return path.resolve(realCwd, arg);\n }\n\n return arg;\n });\n\n // Merge environments but preserve process.env.PATH for binary resolution\n // ctx.env.PATH is the virtual PATH (/bin:/usr/bin) which doesn't include host binaries\n const mergedEnv = {\n ...process.env,\n ...ctx.env,\n PATH: process.env.PATH, // Always use host PATH for binary bridges\n };\n\n const result = await spawn(binaryPath, resolvedArgs, {\n cwd: realCwd,\n env: mergedEnv,\n });\n\n return {\n stdout: result.stdout,\n stderr: result.stderr,\n exitCode: 0,\n };\n } catch (error) {\n // nano-spawn wraps ENOENT (missing binary) into a SubprocessError\n // with exitCode undefined and the real cause on error.cause.\n if (error && typeof error === 'object') {\n const err = error as { cause?: unknown; message?: string };\n const cause = err.cause as\n | { code?: string; path?: string; syscall?: string }\n | undefined;\n\n if (cause?.code === 'ENOENT') {\n return {\n stdout: '',\n stderr: `${name}: ${binaryPath} not found`,\n exitCode: 127,\n };\n }\n }\n\n // nano-spawn throws SubprocessError for non-zero exits\n if (error && typeof error === 'object' && 'exitCode' in error) {\n const subprocessError = error as {\n exitCode?: number;\n stdout: string;\n stderr: string;\n };\n return {\n stdout: subprocessError.stdout ?? '',\n stderr: subprocessError.stderr ?? '',\n exitCode: subprocessError.exitCode ?? 1,\n };\n }\n\n // Unknown error (e.g., binary not found)\n return {\n stdout: '',\n stderr: `${name}: ${error instanceof Error ? error.message : String(error)}`,\n exitCode: 127,\n };\n }\n });\n });\n}\n\n/**\n * Resolves the real filesystem path from a just-bash virtual path.\n *\n * just-bash filesystems (ReadWriteFs, OverlayFs) use virtual paths like /home/user\n * but we need the actual host filesystem path for spawning processes.\n */\nfunction resolveRealCwd(ctx: { cwd: string; fs: unknown }): string {\n const fs = ctx.fs as {\n toRealPath?: (p: string) => string | null;\n root?: string;\n getMountPoint?: () => string;\n };\n\n let realCwd: string;\n\n if (fs.root) {\n // ReadWriteFs - virtual paths are relative to root\n // e.g., root=/Users/x/project, cwd=/ -> /Users/x/project\n realCwd = path.join(fs.root, ctx.cwd);\n } else if (\n typeof fs.getMountPoint === 'function' &&\n typeof fs.toRealPath === 'function'\n ) {\n // OverlayFs - use toRealPath for proper path mapping\n const real = fs.toRealPath(ctx.cwd);\n realCwd = real ?? process.cwd();\n } else {\n // Fallback for InMemoryFs or unknown filesystems\n realCwd = process.cwd();\n }\n\n // Verify the path exists, fall back to process.cwd() if not\n if (!existsSync(realCwd)) {\n realCwd = process.cwd();\n }\n\n return realCwd;\n}\n", "import { type CommandResult, type Sandbox } from 'bash-tool';\nimport spawn from 'nano-spawn';\nimport { createHash } from 'node:crypto';\nimport { existsSync, readFileSync } from 'node:fs';\n\n// Re-export types from bash-tool for convenience\nexport type { CommandResult as ExecResult, Sandbox } from 'bash-tool';\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// Error Classes\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Base error for all Docker sandbox operations.\n */\nexport class DockerSandboxError extends Error {\n readonly containerId?: string;\n\n constructor(message: string, containerId?: string) {\n super(message);\n this.name = 'DockerSandboxError';\n this.containerId = containerId;\n }\n}\n\n/**\n * Thrown when Docker daemon is not available.\n */\nexport class DockerNotAvailableError extends DockerSandboxError {\n constructor() {\n super('Docker is not available. Ensure Docker daemon is running.');\n this.name = 'DockerNotAvailableError';\n }\n}\n\n/**\n * Thrown when container creation fails.\n */\nexport class ContainerCreationError extends DockerSandboxError {\n readonly image: string;\n override cause?: Error;\n\n constructor(message: string, image: string, cause?: Error) {\n super(`Failed to create container from image \"${image}\": ${message}`);\n this.name = 'ContainerCreationError';\n this.image = image;\n this.cause = cause;\n }\n}\n\n/**\n * Thrown when package installation fails.\n */\nexport class PackageInstallError extends DockerSandboxError {\n readonly packages: string[];\n readonly image: string;\n readonly packageManager: 'apk' | 'apt-get';\n readonly stderr: string;\n\n constructor(\n packages: string[],\n image: string,\n packageManager: 'apk' | 'apt-get',\n stderr: string,\n containerId?: string,\n ) {\n super(\n `Package installation failed for [${packages.join(', ')}] ` +\n `using ${packageManager} on ${image}: ${stderr}`,\n containerId,\n );\n this.name = 'PackageInstallError';\n this.packages = packages;\n this.image = image;\n this.packageManager = packageManager;\n this.stderr = stderr;\n }\n}\n\n/**\n * Thrown when a binary installation from URL fails.\n */\nexport class BinaryInstallError extends DockerSandboxError {\n readonly binaryName: string;\n readonly url: string;\n readonly reason: string;\n\n constructor(\n binaryName: string,\n url: string,\n reason: string,\n containerId?: string,\n ) {\n super(\n `Failed to install binary \"${binaryName}\" from ${url}: ${reason}`,\n containerId,\n );\n this.name = 'BinaryInstallError';\n this.binaryName = binaryName;\n this.url = url;\n this.reason = reason;\n }\n}\n\n/**\n * Thrown when a mount path doesn't exist on the host.\n */\nexport class MountPathError extends DockerSandboxError {\n readonly hostPath: string;\n readonly containerPath: string;\n\n constructor(hostPath: string, containerPath: string) {\n super(\n `Mount path does not exist on host: \"${hostPath}\" -> \"${containerPath}\"`,\n );\n this.name = 'MountPathError';\n this.hostPath = hostPath;\n this.containerPath = containerPath;\n }\n}\n\n/**\n * Thrown when Dockerfile build fails.\n */\nexport class DockerfileBuildError extends DockerSandboxError {\n readonly stderr: string;\n\n constructor(stderr: string) {\n super(`Dockerfile build failed: ${stderr}`);\n this.name = 'DockerfileBuildError';\n this.stderr = stderr;\n }\n}\n\n/**\n * Thrown when docker compose up fails.\n */\nexport class ComposeStartError extends DockerSandboxError {\n readonly composeFile: string;\n readonly stderr: string;\n\n constructor(composeFile: string, stderr: string) {\n super(`Docker Compose failed to start: ${stderr}`);\n this.name = 'ComposeStartError';\n this.composeFile = composeFile;\n this.stderr = stderr;\n }\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// Interfaces\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Configuration for mounting a host directory into the container.\n */\nexport interface DockerMount {\n /** Absolute path on the host machine */\n hostPath: string;\n /** Path inside the container */\n containerPath: string;\n /** Whether the mount is read-only (default: true) */\n readOnly?: boolean;\n}\n\n/**\n * Resource limits for the container.\n */\nexport interface DockerResources {\n /** Memory limit (e.g., '1g', '512m') */\n memory?: string;\n /** CPU limit (number of CPUs) */\n cpus?: number;\n}\n\n/**\n * Architecture-specific URL mapping for binary downloads.\n * Maps container architecture (from `uname -m`) to download URLs.\n */\nexport interface ArchitectureUrls {\n /** URL for x86_64 architecture (amd64) */\n x86_64?: string;\n /** URL for ARM64 architecture (aarch64) */\n aarch64?: string;\n /** URL for ARMv7 architecture */\n armv7l?: string;\n}\n\n/**\n * Configuration for installing a binary from a URL.\n *\n * Binaries are downloaded, extracted (if tar.gz), and installed to /usr/local/bin.\n */\nexport interface BinaryInstall {\n /** Name of the binary (used for the final executable name) */\n name: string;\n /**\n * URL or architecture-specific URLs.\n * - If a string, used for all architectures\n * - If ArchitectureUrls, selects based on container architecture\n */\n url: string | ArchitectureUrls;\n /**\n * Optional: The binary filename inside the archive if different from `name`.\n * Useful when the archive contains versioned binaries like \"presenterm-0.15.1\".\n */\n binaryPath?: string;\n}\n\n/**\n * Options for RuntimeStrategy - installs packages/binaries at container runtime.\n */\nexport interface RuntimeSandboxOptions {\n /** Docker image to use (default: 'alpine:latest') */\n image?: string;\n /** Packages to install in the container via package manager (apk/apt) */\n packages?: string[];\n /** Binaries to install from URLs (for tools not in package managers) */\n binaries?: BinaryInstall[];\n /** Directories to mount from host */\n mounts?: DockerMount[];\n /** Resource limits */\n resources?: DockerResources;\n}\n\n/**\n * Options for DockerfileStrategy - builds custom image from Dockerfile.\n */\nexport interface DockerfileSandboxOptions {\n /** Dockerfile content (if contains newlines) or path to Dockerfile */\n dockerfile: string;\n /** Build context directory (default: '.') */\n context?: string;\n /** Directories to mount from host */\n mounts?: DockerMount[];\n /** Resource limits */\n resources?: DockerResources;\n}\n\n/**\n * Options for ComposeStrategy - manages multi-container environments.\n */\nexport interface ComposeSandboxOptions {\n /** Path to docker-compose.yml file */\n compose: string;\n /** Service name to execute commands in (required) */\n service: string;\n /** Resource limits (applied to target service only) */\n resources?: DockerResources;\n // Note: mounts must be defined in compose file, not here\n}\n\n/**\n * Union type for Docker sandbox options.\n * - RuntimeSandboxOptions: Runtime package/binary installation\n * - DockerfileSandboxOptions: Pre-built images from Dockerfile\n * - ComposeSandboxOptions: Multi-container environments via Docker Compose\n */\nexport type DockerSandboxOptions =\n | RuntimeSandboxOptions\n | DockerfileSandboxOptions\n | ComposeSandboxOptions;\n\n/**\n * Extended sandbox interface with disposal method.\n */\nexport interface DockerSandbox extends Sandbox {\n /** Stop and remove the container */\n dispose(): Promise<void>;\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// Helper Functions\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Detects if the image is Debian-based (uses apt-get) or Alpine-based (uses apk).\n */\nfunction isDebianBased(image: string): boolean {\n const debianPatterns = ['debian', 'ubuntu', 'node', 'python'];\n return debianPatterns.some((pattern) =>\n image.toLowerCase().includes(pattern),\n );\n}\n\n/**\n * Type guard to determine if options are for DockerfileStrategy.\n */\nexport function isDockerfileOptions(\n opts: DockerSandboxOptions,\n): opts is DockerfileSandboxOptions {\n return 'dockerfile' in opts;\n}\n\n/**\n * Type guard to determine if options are for ComposeStrategy.\n */\nexport function isComposeOptions(\n opts: DockerSandboxOptions,\n): opts is ComposeSandboxOptions {\n return 'compose' in opts;\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// Strategy Pattern - Base Class\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Internal context shared across strategy methods.\n */\ninterface StrategyContext {\n containerId: string;\n image: string;\n}\n\n/**\n * Abstract base class for Docker sandbox creation strategies.\n *\n * Uses the Template Method pattern to define the skeleton of the sandbox\n * creation algorithm, deferring specific steps to subclasses.\n *\n * @example Extending the strategy\n * ```typescript\n * class CustomStrategy extends DockerSandboxStrategy {\n * protected async getImage(): Promise<string> {\n * // Custom image resolution logic\n * return 'my-custom-image:latest';\n * }\n *\n * protected async configure(): Promise<void> {\n * // Custom configuration after container starts\n * }\n * }\n * ```\n */\nexport abstract class DockerSandboxStrategy {\n protected context!: StrategyContext;\n protected mounts: DockerMount[];\n protected resources: DockerResources;\n\n constructor(mounts: DockerMount[] = [], resources: DockerResources = {}) {\n this.mounts = mounts;\n this.resources = resources;\n }\n\n /**\n * Template method - defines the algorithm skeleton for creating a sandbox.\n *\n * Steps:\n * 1. Validate mount paths exist on host\n * 2. Get/build the Docker image (strategy-specific)\n * 3. Start the container\n * 4. Configure the container (strategy-specific)\n * 5. Create and return sandbox methods\n */\n async create(): Promise<DockerSandbox> {\n this.validateMounts();\n const image = await this.getImage();\n const containerId = await this.startContainer(image);\n this.context = { containerId, image };\n\n try {\n await this.configure();\n } catch (error) {\n // Clean up container if configuration fails\n await this.stopContainer(containerId);\n throw error;\n }\n\n return this.createSandboxMethods();\n }\n\n // \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n // Common implementations (shared by all strategies)\n // \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n /**\n * Validates that all mount paths exist on the host filesystem.\n */\n protected validateMounts(): void {\n for (const mount of this.mounts) {\n if (!existsSync(mount.hostPath)) {\n throw new MountPathError(mount.hostPath, mount.containerPath);\n }\n }\n }\n\n /**\n * Builds the docker run command arguments.\n */\n protected buildDockerArgs(image: string, containerId: string): string[] {\n const { memory = '1g', cpus = 2 } = this.resources;\n\n const args: string[] = [\n 'run',\n '-d', // Detached mode\n '--rm', // Remove container when stopped\n '--name',\n containerId,\n `--memory=${memory}`,\n `--cpus=${cpus}`,\n '-w',\n '/workspace', // Set working directory\n ];\n\n // Add mounts\n for (const mount of this.mounts) {\n const mode = mount.readOnly !== false ? 'ro' : 'rw';\n args.push('-v', `${mount.hostPath}:${mount.containerPath}:${mode}`);\n }\n\n // Add image and command to keep container alive\n args.push(image, 'tail', '-f', '/dev/null');\n\n return args;\n }\n\n /**\n * Starts a Docker container with the given image.\n */\n protected async startContainer(image: string): Promise<string> {\n const containerId = `sandbox-${crypto.randomUUID().slice(0, 8)}`;\n const args = this.buildDockerArgs(image, containerId);\n\n try {\n await spawn('docker', args);\n } catch (error) {\n const err = error as Error & { stderr?: string };\n if (\n err.message?.includes('Cannot connect') ||\n err.message?.includes('docker daemon') ||\n err.stderr?.includes('Cannot connect')\n ) {\n throw new DockerNotAvailableError();\n }\n throw new ContainerCreationError(err.message || String(err), image, err);\n }\n\n return containerId;\n }\n\n /**\n * Stops a Docker container.\n */\n protected async stopContainer(containerId: string): Promise<void> {\n try {\n await spawn('docker', ['stop', containerId]);\n } catch {\n // Container may already be stopped, ignore errors\n }\n }\n\n /**\n * Executes a command in the container.\n */\n protected async exec(command: string): Promise<CommandResult> {\n try {\n const result = await spawn('docker', [\n 'exec',\n this.context.containerId,\n 'sh',\n '-c',\n command,\n ]);\n return {\n stdout: result.stdout,\n stderr: result.stderr,\n exitCode: 0,\n };\n } catch (error) {\n const err = error as Error & {\n stdout?: string;\n stderr?: string;\n exitCode?: number;\n };\n return {\n stdout: err.stdout || '',\n stderr: err.stderr || err.message || '',\n exitCode: err.exitCode ?? 1,\n };\n }\n }\n\n /**\n * Creates the DockerSandbox interface with all methods.\n */\n protected createSandboxMethods(): DockerSandbox {\n const { containerId } = this.context;\n\n const sandbox: DockerSandbox = {\n executeCommand: async (command: string): Promise<CommandResult> => {\n return this.exec(command);\n },\n\n readFile: async (path: string): Promise<string> => {\n // Use base64 encoding to preserve exact content (including trailing newlines)\n // nano-spawn strips trailing newlines from stdout, so we encode/decode\n const result = await sandbox.executeCommand(`base64 \"${path}\"`);\n if (result.exitCode !== 0) {\n throw new Error(`Failed to read file \"${path}\": ${result.stderr}`);\n }\n return Buffer.from(result.stdout, 'base64').toString('utf-8');\n },\n\n writeFiles: async (\n files: Array<{ path: string; content: string }>,\n ): Promise<void> => {\n for (const file of files) {\n // Create parent directories\n const dir = file.path.substring(0, file.path.lastIndexOf('/'));\n if (dir) {\n await sandbox.executeCommand(`mkdir -p \"${dir}\"`);\n }\n\n // Use base64 encoding for binary-safe file writes\n const base64Content = Buffer.from(file.content).toString('base64');\n const result = await sandbox.executeCommand(\n `echo \"${base64Content}\" | base64 -d > \"${file.path}\"`,\n );\n\n if (result.exitCode !== 0) {\n throw new Error(\n `Failed to write file \"${file.path}\": ${result.stderr}`,\n );\n }\n }\n },\n\n dispose: async (): Promise<void> => {\n await this.stopContainer(containerId);\n },\n };\n\n return sandbox;\n }\n\n // \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n // Strategy-specific hooks (to be implemented by subclasses)\n // \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n /**\n * Returns the Docker image to use for the container.\n * For RuntimeStrategy: returns the image name directly.\n * For DockerfileStrategy: builds the image and returns the tag.\n */\n protected abstract getImage(): Promise<string>;\n\n /**\n * Configures the container after it starts.\n * For RuntimeStrategy: installs packages and binaries.\n * For DockerfileStrategy: no-op (Dockerfile already configured).\n */\n protected abstract configure(): Promise<void>;\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// RuntimeStrategy - Installs packages/binaries at container runtime\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Strategy that uses an existing Docker image and installs packages/binaries\n * at container runtime.\n *\n * This is the \"configure-on-demand\" approach - starts a vanilla image and\n * customizes it by executing installation commands.\n *\n * @example\n * ```typescript\n * const strategy = new RuntimeStrategy(\n * 'alpine:latest',\n * ['curl', 'jq'],\n * [{ name: 'presenterm', url: {...} }],\n * );\n * const sandbox = await strategy.create();\n * ```\n */\nexport class RuntimeStrategy extends DockerSandboxStrategy {\n private image: string;\n private packages: string[];\n private binaries: BinaryInstall[];\n\n constructor(\n image = 'alpine:latest',\n packages: string[] = [],\n binaries: BinaryInstall[] = [],\n mounts?: DockerMount[],\n resources?: DockerResources,\n ) {\n super(mounts, resources);\n this.image = image;\n this.packages = packages;\n this.binaries = binaries;\n }\n\n protected async getImage(): Promise<string> {\n return this.image;\n }\n\n protected async configure(): Promise<void> {\n await this.installPackages();\n await this.installBinaries();\n }\n\n /**\n * Installs packages using the appropriate package manager (apk/apt-get).\n */\n private async installPackages(): Promise<void> {\n if (this.packages.length === 0) return;\n\n const useApt = isDebianBased(this.image);\n const installCmd = useApt\n ? `apt-get update && apt-get install -y ${this.packages.join(' ')}`\n : `apk add --no-cache ${this.packages.join(' ')}`;\n\n try {\n await spawn('docker', [\n 'exec',\n this.context.containerId,\n 'sh',\n '-c',\n installCmd,\n ]);\n } catch (error) {\n const err = error as Error & { stderr?: string };\n throw new PackageInstallError(\n this.packages,\n this.image,\n useApt ? 'apt-get' : 'apk',\n err.stderr || err.message,\n this.context.containerId,\n );\n }\n }\n\n /**\n * Installs binaries from URLs.\n */\n private async installBinaries(): Promise<void> {\n if (this.binaries.length === 0) return;\n\n // Ensure curl is available for downloading\n await this.ensureCurl();\n\n // Detect container architecture\n const arch = await this.detectArchitecture();\n\n // Install each binary\n for (const binary of this.binaries) {\n await this.installBinary(binary, arch);\n }\n }\n\n /**\n * Ensures curl is installed in the container.\n */\n private async ensureCurl(): Promise<void> {\n const checkResult = await spawn('docker', [\n 'exec',\n this.context.containerId,\n 'which',\n 'curl',\n ]).catch(() => null);\n\n if (checkResult) return; // curl already installed\n\n const useApt = isDebianBased(this.image);\n const curlInstallCmd = useApt\n ? 'apt-get update && apt-get install -y curl'\n : 'apk add --no-cache curl';\n\n try {\n await spawn('docker', [\n 'exec',\n this.context.containerId,\n 'sh',\n '-c',\n curlInstallCmd,\n ]);\n } catch (error) {\n const err = error as Error & { stderr?: string };\n throw new BinaryInstallError(\n 'curl',\n 'package-manager',\n `Required for binary downloads: ${err.stderr || err.message}`,\n this.context.containerId,\n );\n }\n }\n\n /**\n * Detects the container's CPU architecture.\n */\n private async detectArchitecture(): Promise<string> {\n try {\n const result = await spawn('docker', [\n 'exec',\n this.context.containerId,\n 'uname',\n '-m',\n ]);\n return result.stdout.trim();\n } catch (error) {\n const err = error as Error & { stderr?: string };\n throw new DockerSandboxError(\n `Failed to detect container architecture: ${err.stderr || err.message}`,\n this.context.containerId,\n );\n }\n }\n\n /**\n * Installs a single binary from URL.\n */\n private async installBinary(\n binary: BinaryInstall,\n arch: string,\n ): Promise<void> {\n // Resolve URL based on architecture\n let url: string;\n if (typeof binary.url === 'string') {\n url = binary.url;\n } else {\n const archUrl = binary.url[arch as keyof ArchitectureUrls];\n if (!archUrl) {\n throw new BinaryInstallError(\n binary.name,\n `arch:${arch}`,\n `No URL provided for architecture \"${arch}\". Available: ${Object.keys(binary.url).join(', ')}`,\n this.context.containerId,\n );\n }\n url = archUrl;\n }\n\n // Download and install the binary\n const isTarGz = url.endsWith('.tar.gz') || url.endsWith('.tgz');\n let installCmd: string;\n\n if (isTarGz) {\n const binaryPathInArchive = binary.binaryPath || binary.name;\n installCmd = `\n set -e\n TMPDIR=$(mktemp -d)\n cd \"$TMPDIR\"\n curl -fsSL \"${url}\" -o archive.tar.gz\n tar -xzf archive.tar.gz\n BINARY_FILE=$(find . -name \"${binaryPathInArchive}\" -o -name \"${binary.name}\" | head -1)\n if [ -z \"$BINARY_FILE\" ]; then\n echo \"Binary not found in archive. Contents:\" >&2\n find . -type f >&2\n exit 1\n fi\n chmod +x \"$BINARY_FILE\"\n mv \"$BINARY_FILE\" /usr/local/bin/${binary.name}\n cd /\n rm -rf \"$TMPDIR\"\n `;\n } else {\n installCmd = `\n curl -fsSL \"${url}\" -o /usr/local/bin/${binary.name}\n chmod +x /usr/local/bin/${binary.name}\n `;\n }\n\n try {\n await spawn('docker', [\n 'exec',\n this.context.containerId,\n 'sh',\n '-c',\n installCmd,\n ]);\n } catch (error) {\n const err = error as Error & { stderr?: string };\n throw new BinaryInstallError(\n binary.name,\n url,\n err.stderr || err.message,\n this.context.containerId,\n );\n }\n }\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// DockerfileStrategy - Builds image from Dockerfile\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Strategy that builds a custom Docker image from a Dockerfile.\n *\n * This is the \"build-once, run-many\" approach - builds the image upfront\n * (with caching) and runs containers from the pre-configured image.\n *\n * Image caching: Uses a deterministic tag based on Dockerfile content hash.\n * If the same Dockerfile is used, the existing image is reused (cache hit).\n *\n * @example Inline Dockerfile\n * ```typescript\n * const strategy = new DockerfileStrategy(`\n * FROM alpine:latest\n * RUN apk add --no-cache curl jq\n * `);\n * const sandbox = await strategy.create();\n * ```\n *\n * @example Dockerfile path\n * ```typescript\n * const strategy = new DockerfileStrategy(\n * './Dockerfile.sandbox',\n * './docker', // build context\n * );\n * const sandbox = await strategy.create();\n * ```\n */\nexport class DockerfileStrategy extends DockerSandboxStrategy {\n private imageTag: string;\n private dockerfile: string;\n private dockerContext: string;\n\n constructor(\n dockerfile: string,\n dockerContext = '.',\n mounts?: DockerMount[],\n resources?: DockerResources,\n ) {\n super(mounts, resources);\n this.dockerfile = dockerfile;\n this.dockerContext = dockerContext;\n this.imageTag = this.computeImageTag();\n }\n\n /**\n * Computes a deterministic image tag based on Dockerfile content.\n * Same Dockerfile \u2192 same tag \u2192 Docker skips rebuild if image exists.\n */\n private computeImageTag(): string {\n const content = this.isInlineDockerfile()\n ? this.dockerfile\n : readFileSync(this.dockerfile, 'utf-8');\n const hash = createHash('sha256')\n .update(content)\n .digest('hex')\n .slice(0, 12);\n return `sandbox-${hash}`;\n }\n\n /**\n * Checks if the dockerfile property is inline content or a file path.\n */\n private isInlineDockerfile(): boolean {\n return this.dockerfile.includes('\\n');\n }\n\n protected async getImage(): Promise<string> {\n // Check if image already exists (cache hit)\n const exists = await this.imageExists();\n if (!exists) {\n await this.buildImage();\n }\n return this.imageTag;\n }\n\n protected async configure(): Promise<void> {\n // No-op - Dockerfile already configured the image\n }\n\n /**\n * Checks if the image already exists locally.\n */\n private async imageExists(): Promise<boolean> {\n try {\n await spawn('docker', ['image', 'inspect', this.imageTag]);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Builds the Docker image from the Dockerfile.\n */\n private async buildImage(): Promise<void> {\n try {\n if (this.isInlineDockerfile()) {\n // Inline Dockerfile - use heredoc via shell\n const buildCmd = `echo '${this.dockerfile.replace(/'/g, \"'\\\\''\")}' | docker build -t ${this.imageTag} -f - ${this.dockerContext}`;\n await spawn('sh', ['-c', buildCmd]);\n } else {\n // Path to Dockerfile\n await spawn('docker', [\n 'build',\n '-t',\n this.imageTag,\n '-f',\n this.dockerfile,\n this.dockerContext,\n ]);\n }\n } catch (error) {\n const err = error as Error & { stderr?: string };\n throw new DockerfileBuildError(err.stderr || err.message);\n }\n }\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// ComposeStrategy - Multi-container environments via Docker Compose\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Strategy that manages multi-container environments using Docker Compose.\n *\n * Unlike other strategies that manage a single container, ComposeStrategy\n * orchestrates multiple services as a unit using docker compose commands.\n *\n * @example\n * ```typescript\n * const strategy = new ComposeStrategy(\n * './docker-compose.yml',\n * 'app', // Service to execute commands in\n * );\n * const sandbox = await strategy.create();\n *\n * // Commands run in the 'app' service\n * await sandbox.executeCommand('node --version');\n *\n * // Can communicate with other services via service names\n * await sandbox.executeCommand('curl http://api:3000/health');\n *\n * // Stops ALL services\n * await sandbox.dispose();\n * ```\n */\nexport class ComposeStrategy extends DockerSandboxStrategy {\n private projectName: string;\n private composeFile: string;\n private service: string;\n\n constructor(\n composeFile: string,\n service: string,\n resources?: DockerResources,\n ) {\n // Pass empty mounts - compose handles its own volumes\n super([], resources);\n this.composeFile = composeFile;\n this.service = service;\n this.projectName = this.computeProjectName();\n }\n\n /**\n * Deterministic project name based on compose file content for caching.\n * Same compose file \u2192 same project name \u2192 faster subsequent startups.\n */\n private computeProjectName(): string {\n const content = readFileSync(this.composeFile, 'utf-8');\n const hash = createHash('sha256').update(content).digest('hex').slice(0, 8);\n return `sandbox-${hash}`;\n }\n\n /**\n * Override: No image to get - compose manages its own images.\n */\n protected async getImage(): Promise<string> {\n return ''; // Not used for compose\n }\n\n /**\n * Override: Start all services with docker compose up.\n */\n protected override async startContainer(_image: string): Promise<string> {\n try {\n await spawn('docker', [\n 'compose',\n '-f',\n this.composeFile,\n '-p',\n this.projectName,\n 'up',\n '-d',\n ]);\n } catch (error) {\n const err = error as Error & { stderr?: string };\n if (err.stderr?.includes('Cannot connect')) {\n throw new DockerNotAvailableError();\n }\n throw new ComposeStartError(this.composeFile, err.stderr || err.message);\n }\n\n // Return project name as the \"container ID\" for context\n return this.projectName;\n }\n\n protected async configure(): Promise<void> {\n // No additional configuration - compose file defines everything\n }\n\n /**\n * Override: Execute commands in the target service.\n */\n protected override async exec(command: string): Promise<CommandResult> {\n try {\n const result = await spawn('docker', [\n 'compose',\n '-f',\n this.composeFile,\n '-p',\n this.projectName,\n 'exec',\n '-T', // -T disables pseudo-TTY\n this.service,\n 'sh',\n '-c',\n command,\n ]);\n return { stdout: result.stdout, stderr: result.stderr, exitCode: 0 };\n } catch (error) {\n const err = error as Error & {\n stdout?: string;\n stderr?: string;\n exitCode?: number;\n };\n return {\n stdout: err.stdout || '',\n stderr: err.stderr || err.message || '',\n exitCode: err.exitCode ?? 1,\n };\n }\n }\n\n /**\n * Override: Stop all services with docker compose down.\n */\n protected override async stopContainer(_containerId: string): Promise<void> {\n try {\n await spawn('docker', [\n 'compose',\n '-f',\n this.composeFile,\n '-p',\n this.projectName,\n 'down',\n ]);\n } catch {\n // Ignore cleanup errors\n }\n }\n}\n\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n// Factory Function\n// \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n/**\n * Creates a Docker-based sandbox for executing commands in an isolated container.\n *\n * Supports three strategies:\n * - **RuntimeStrategy**: Uses existing image, installs packages/binaries at runtime\n * - **DockerfileStrategy**: Builds custom image from Dockerfile (with caching)\n * - **ComposeStrategy**: Multi-container environments via Docker Compose\n *\n * @example RuntimeStrategy (default)\n * ```typescript\n * const sandbox = await createDockerSandbox({\n * image: 'alpine:latest',\n * packages: ['curl', 'jq'],\n * binaries: [{ name: 'presenterm', url: {...} }],\n * });\n * await sandbox.executeCommand('curl --version');\n * await sandbox.dispose();\n * ```\n *\n * @example DockerfileStrategy\n * ```typescript\n * const sandbox = await createDockerSandbox({\n * dockerfile: `\n * FROM alpine:latest\n * RUN apk add --no-cache curl jq\n * `,\n * context: '.',\n * });\n * await sandbox.executeCommand('curl --version');\n * await sandbox.dispose();\n * ```\n *\n * @example ComposeStrategy\n * ```typescript\n * const sandbox = await createDockerSandbox({\n * compose: './docker-compose.yml',\n * service: 'app',\n * });\n * // Commands run in the 'app' service\n * await sandbox.executeCommand('node --version');\n * // Can reach other services by name\n * await sandbox.executeCommand('curl http://db:5432');\n * await sandbox.dispose(); // Stops ALL services\n * ```\n */\nexport async function createDockerSandbox(\n options: DockerSandboxOptions = {},\n): Promise<DockerSandbox> {\n let strategy: DockerSandboxStrategy;\n\n if (isComposeOptions(options)) {\n strategy = new ComposeStrategy(\n options.compose,\n options.service,\n options.resources,\n );\n } else if (isDockerfileOptions(options)) {\n strategy = new DockerfileStrategy(\n options.dockerfile,\n options.context,\n options.mounts,\n options.resources,\n );\n } else {\n strategy = new RuntimeStrategy(\n options.image,\n options.packages,\n options.binaries,\n options.mounts,\n options.resources,\n );\n }\n\n return strategy.create();\n}\n\n/**\n * Execute a function with a Docker sandbox that auto-disposes on completion.\n * Ensures cleanup even if the function throws.\n *\n * @example\n * ```typescript\n * const output = await useSandbox(\n * { packages: ['curl', 'jq'] },\n * async (sandbox) => {\n * const result = await sandbox.executeCommand('curl --version');\n * return result.stdout;\n * },\n * );\n * // Container is automatically disposed - no try/finally needed\n * ```\n */\nexport async function useSandbox<T>(\n options: DockerSandboxOptions,\n fn: (sandbox: DockerSandbox) => Promise<T>,\n): Promise<T> {\n const sandbox = await createDockerSandbox(options);\n try {\n return await fn(sandbox);\n } finally {\n await sandbox.dispose();\n }\n}\n", "import {\n createBashTool,\n type CreateBashToolOptions,\n type BashToolkit,\n} from 'bash-tool';\n\nimport {\n createDockerSandbox,\n isComposeOptions,\n isDockerfileOptions,\n type BinaryInstall,\n type DockerMount,\n type DockerResources,\n type DockerSandbox,\n type DockerSandboxOptions,\n} from './docker-sandbox.ts';\n\n/**\n * Base options shared by RuntimeContainerToolOptions and DockerfileContainerToolOptions.\n */\ninterface BaseContainerToolOptions\n extends Omit<CreateBashToolOptions, 'sandbox' | 'uploadDirectory'> {\n /** Directories to mount from host into the container */\n mounts?: DockerMount[];\n /** Resource limits for the container */\n resources?: DockerResources;\n}\n\n/**\n * Options for container tool using RuntimeStrategy.\n * Installs packages/binaries at container runtime.\n */\nexport interface RuntimeContainerToolOptions extends BaseContainerToolOptions {\n /** Docker image to use (default: 'alpine:latest') */\n image?: string;\n /** Packages to install in the container via package manager (apk/apt) */\n packages?: string[];\n /** Binaries to install from URLs (for tools not in package managers) */\n binaries?: BinaryInstall[];\n}\n\n/**\n * Options for container tool using DockerfileStrategy.\n * Builds custom image from Dockerfile (with caching).\n */\nexport interface DockerfileContainerToolOptions extends BaseContainerToolOptions {\n /** Dockerfile content (if contains newlines) or path to Dockerfile */\n dockerfile: string;\n /** Build context directory (default: '.') */\n context?: string;\n}\n\n/**\n * Options for container tool using ComposeStrategy.\n * Manages multi-container environments via Docker Compose.\n */\nexport interface ComposeContainerToolOptions\n extends Omit<CreateBashToolOptions, 'sandbox' | 'uploadDirectory'> {\n /** Path to docker-compose.yml file */\n compose: string;\n /** Service name to execute commands in (required) */\n service: string;\n /** Resource limits for the container */\n resources?: DockerResources;\n // Note: mounts must be defined in compose file, not here\n}\n\n/**\n * Union type for container tool options.\n * - RuntimeContainerToolOptions: Runtime package/binary installation\n * - DockerfileContainerToolOptions: Pre-built images from Dockerfile\n * - ComposeContainerToolOptions: Multi-container environments via Docker Compose\n */\nexport type ContainerToolOptions =\n | RuntimeContainerToolOptions\n | DockerfileContainerToolOptions\n | ComposeContainerToolOptions;\n\n/**\n * Result of creating a container tool.\n * Extends BashToolkit but with DockerSandbox (which has dispose()) instead of base Sandbox.\n */\nexport type ContainerToolResult = Omit<BashToolkit, 'sandbox'> & {\n sandbox: DockerSandbox;\n};\n\n/**\n * Creates a bash tool that runs in a Docker container.\n *\n * This is a high-level wrapper that combines `createDockerSandbox()` and\n * `createBashTool()` into a single call. It provides a convenient way to\n * get a bash tool that executes real binaries in an isolated container.\n *\n * Supports three strategies:\n * - **RuntimeStrategy**: Uses existing image, installs packages/binaries at runtime\n * - **DockerfileStrategy**: Builds custom image from Dockerfile (with caching)\n * - **ComposeStrategy**: Multi-container environments via Docker Compose\n *\n * @example RuntimeStrategy (default)\n * ```typescript\n * const { bash, tools, sandbox } = await createContainerTool({\n * packages: ['curl', 'jq'],\n * mounts: [{\n * hostPath: process.cwd(),\n * containerPath: '/workspace',\n * readOnly: false,\n * }],\n * });\n *\n * // Use with AI SDK\n * const response = await generateText({\n * model: yourModel,\n * tools,\n * prompt: 'Fetch the weather data and parse it with jq',\n * });\n *\n * // Clean up when done\n * await sandbox.dispose();\n * ```\n *\n * @example DockerfileStrategy\n * ```typescript\n * const { bash, tools, sandbox } = await createContainerTool({\n * dockerfile: `\n * FROM python:3.11-slim\n * RUN pip install pandas numpy\n * `,\n * context: '.',\n * mounts: [{\n * hostPath: process.cwd(),\n * containerPath: '/workspace',\n * }],\n * });\n * ```\n *\n * @example ComposeStrategy\n * ```typescript\n * const { bash, tools, sandbox } = await createContainerTool({\n * compose: './docker-compose.yml',\n * service: 'app',\n * });\n * // Commands run in the 'app' service, can reach other services by name\n * await sandbox.dispose(); // Stops ALL services\n * ```\n *\n * @example With hooks for logging\n * ```typescript\n * const { bash, sandbox } = await createContainerTool({\n * packages: ['python3'],\n * onBeforeBashCall: ({ command }) => {\n * console.log('Running:', command);\n * },\n * onAfterBashCall: ({ command, result }) => {\n * console.log(`Exit code: ${result.exitCode}`);\n * },\n * });\n * ```\n */\nexport async function createContainerTool(\n options: ContainerToolOptions = {},\n): Promise<ContainerToolResult> {\n // Extract sandbox options from bash tool options\n let sandboxOptions: DockerSandboxOptions;\n let bashOptions: Omit<CreateBashToolOptions, 'sandbox' | 'uploadDirectory'>;\n\n if (isComposeOptions(options)) {\n const { compose, service, resources, ...rest } = options;\n sandboxOptions = { compose, service, resources };\n bashOptions = rest;\n } else if (isDockerfileOptions(options)) {\n const { dockerfile, context, mounts, resources, ...rest } = options;\n sandboxOptions = { dockerfile, context, mounts, resources };\n bashOptions = rest;\n } else {\n const { image, packages, binaries, mounts, resources, ...rest } = options;\n sandboxOptions = { image, packages, binaries, mounts, resources };\n bashOptions = rest;\n }\n\n // Create the Docker sandbox\n const sandbox = await createDockerSandbox(sandboxOptions);\n\n // Create the bash tool with our Docker sandbox\n const toolkit = await createBashTool({\n ...bashOptions,\n sandbox,\n });\n\n return {\n bash: toolkit.bash,\n tools: toolkit.tools,\n sandbox,\n };\n}\n", "import * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport YAML from 'yaml';\n\nimport type { ParsedSkillMd, SkillMetadata } from './types.ts';\n\n/**\n * Parse YAML frontmatter from a SKILL.md file content.\n *\n * Frontmatter format:\n * ```\n * ---\n * name: skill-name\n * description: Skill description here\n * ---\n *\n * # Markdown body\n * ```\n */\nexport function parseFrontmatter(content: string): ParsedSkillMd {\n const frontmatterRegex = /^---\\s*\\n([\\s\\S]*?)\\n---\\s*\\n?([\\s\\S]*)$/;\n const match = content.match(frontmatterRegex);\n\n if (!match) {\n throw new Error('Invalid SKILL.md: missing or malformed frontmatter');\n }\n\n const [, yamlContent, body] = match;\n const frontmatter = YAML.parse(yamlContent) as Record<string, unknown>;\n\n if (!frontmatter.name || typeof frontmatter.name !== 'string') {\n throw new Error('Invalid SKILL.md: frontmatter must have a \"name\" field');\n }\n\n if (!frontmatter.description || typeof frontmatter.description !== 'string') {\n throw new Error(\n 'Invalid SKILL.md: frontmatter must have a \"description\" field',\n );\n }\n\n return {\n frontmatter: frontmatter as ParsedSkillMd['frontmatter'],\n body: body.trim(),\n };\n}\n\n/**\n * Load skill metadata from a SKILL.md file.\n * Only parses frontmatter, does not load full body into memory.\n * This is the core of progressive disclosure - metadata only at startup.\n */\nexport function loadSkillMetadata(skillMdPath: string): SkillMetadata {\n const content = fs.readFileSync(skillMdPath, 'utf-8');\n const parsed = parseFrontmatter(content);\n const skillDir = path.dirname(skillMdPath);\n\n return {\n name: parsed.frontmatter.name,\n description: parsed.frontmatter.description,\n path: skillDir,\n skillMdPath,\n };\n}\n\n/**\n * Discover all skills in a directory.\n * Looks for subdirectories containing SKILL.md files.\n * Only loads metadata - full content is read by LLM when needed.\n */\nexport function discoverSkillsInDirectory(directory: string): SkillMetadata[] {\n const skills: SkillMetadata[] = [];\n\n // Expand ~ to home directory\n const expandedDir = directory.startsWith('~')\n ? path.join(process.env.HOME || '', directory.slice(1))\n : directory;\n if (!fs.existsSync(expandedDir)) {\n return skills;\n }\n\n const entries = fs.readdirSync(expandedDir, { withFileTypes: true });\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n\n const skillMdPath = path.join(expandedDir, entry.name, 'SKILL.md');\n if (!fs.existsSync(skillMdPath)) continue;\n\n try {\n const metadata = loadSkillMetadata(skillMdPath);\n skills.push(metadata);\n } catch (error) {\n // Skip invalid skills, log warning\n console.warn(`Warning: Failed to load skill at ${skillMdPath}:`, error);\n }\n }\n\n return skills;\n}\n", "import type { ContextFragment } from '../fragments.ts';\nimport { discoverSkillsInDirectory } from './loader.ts';\nimport type { SkillMetadata, SkillsFragmentOptions } from './types.ts';\n\n/**\n * Create a context fragment containing available skills metadata.\n *\n * Follows Anthropic's progressive disclosure pattern:\n * - At startup: only skill metadata (name, description, path) is injected\n * - At runtime: LLM reads full SKILL.md using file tools when relevant\n *\n * @param options - Configuration including paths to scan and optional filtering\n *\n * @example\n * ```ts\n * const context = new ContextEngine({ userId: 'demo-user', store, chatId: 'demo' })\n * .set(\n * role('You are a helpful assistant.'),\n * skills({ paths: ['./skills'] }), // Injects skill metadata into system prompt\n * );\n *\n * // LLM now sees skill metadata and can read full SKILL.md when needed\n * ```\n */\nexport function skills(options: SkillsFragmentOptions): ContextFragment {\n // Discover skills from all paths (later paths override earlier ones)\n const skillsMap = new Map<string, SkillMetadata>();\n for (const dir of options.paths) {\n const discovered = discoverSkillsInDirectory(dir);\n for (const skill of discovered) {\n skillsMap.set(skill.name, skill);\n }\n }\n const allSkills = Array.from(skillsMap.values());\n\n // Apply filtering\n let filteredSkills = allSkills;\n if (options.include) {\n filteredSkills = allSkills.filter((s) => options.include!.includes(s.name));\n }\n if (options.exclude) {\n filteredSkills = filteredSkills.filter(\n (s) => !options.exclude!.includes(s.name),\n );\n }\n\n // Convert skills to ContextFragments for proper rendering\n const skillFragments: ContextFragment[] = filteredSkills.map((skill) => ({\n name: 'skill',\n data: {\n name: skill.name,\n path: skill.skillMdPath,\n description: skill.description,\n },\n }));\n\n return {\n name: 'available_skills',\n data: [\n {\n name: 'instructions',\n data: SKILLS_INSTRUCTIONS,\n } as ContextFragment,\n ...skillFragments,\n ],\n };\n}\n\n/**\n * Instructions for the LLM on how to use available skills.\n * Follows Anthropic's progressive disclosure - LLM reads files when needed.\n */\nconst SKILLS_INSTRUCTIONS = `When a user's request matches one of the skills listed below, read the skill's SKILL.md file to get detailed instructions before proceeding. Skills provide specialized knowledge and workflows for specific tasks.\n\nTo use a skill:\n1. Identify if the user's request matches a skill's description\n2. Read the SKILL.md file at the skill's path to load full instructions\n3. Follow the skill's guidance to complete the task\n\nSkills are only loaded when relevant - don't read skill files unless needed.`;\n", "import { DatabaseSync, type SQLInputValue } from 'node:sqlite';\n\nimport type {\n BranchData,\n BranchInfo,\n ChatData,\n ChatInfo,\n CheckpointData,\n CheckpointInfo,\n DeleteChatOptions,\n GraphBranch,\n GraphCheckpoint,\n GraphData,\n GraphNode,\n ListChatsOptions,\n MessageData,\n SearchOptions,\n SearchResult,\n StoredChatData,\n} from './store.ts';\nimport { ContextStore } from './store.ts';\n\nconst STORE_DDL = `\n-- Chats table\n-- createdAt/updatedAt: DEFAULT for insert, inline SET for updates\nCREATE TABLE IF NOT EXISTS chats (\n id TEXT PRIMARY KEY,\n userId TEXT NOT NULL,\n title TEXT,\n metadata TEXT,\n createdAt INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),\n updatedAt INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000)\n);\n\nCREATE INDEX IF NOT EXISTS idx_chats_updatedAt ON chats(updatedAt);\nCREATE INDEX IF NOT EXISTS idx_chats_userId ON chats(userId);\n\n-- Messages table (nodes in the DAG)\nCREATE TABLE IF NOT EXISTS messages (\n id TEXT PRIMARY KEY,\n chatId TEXT NOT NULL,\n parentId TEXT,\n name TEXT NOT NULL,\n type TEXT,\n data TEXT NOT NULL,\n createdAt INTEGER NOT NULL,\n FOREIGN KEY (chatId) REFERENCES chats(id) ON DELETE CASCADE,\n FOREIGN KEY (parentId) REFERENCES messages(id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_messages_chatId ON messages(chatId);\nCREATE INDEX IF NOT EXISTS idx_messages_parentId ON messages(parentId);\n\n-- Branches table (pointers to head messages)\nCREATE TABLE IF NOT EXISTS branches (\n id TEXT PRIMARY KEY,\n chatId TEXT NOT NULL,\n name TEXT NOT NULL,\n headMessageId TEXT,\n isActive INTEGER NOT NULL DEFAULT 0,\n createdAt INTEGER NOT NULL,\n FOREIGN KEY (chatId) REFERENCES chats(id) ON DELETE CASCADE,\n FOREIGN KEY (headMessageId) REFERENCES messages(id),\n UNIQUE(chatId, name)\n);\n\nCREATE INDEX IF NOT EXISTS idx_branches_chatId ON branches(chatId);\n\n-- Checkpoints table (pointers to message nodes)\nCREATE TABLE IF NOT EXISTS checkpoints (\n id TEXT PRIMARY KEY,\n chatId TEXT NOT NULL,\n name TEXT NOT NULL,\n messageId TEXT NOT NULL,\n createdAt INTEGER NOT NULL,\n FOREIGN KEY (chatId) REFERENCES chats(id) ON DELETE CASCADE,\n FOREIGN KEY (messageId) REFERENCES messages(id),\n UNIQUE(chatId, name)\n);\n\nCREATE INDEX IF NOT EXISTS idx_checkpoints_chatId ON checkpoints(chatId);\n\n-- FTS5 virtual table for full-text search\n-- messageId/chatId/name are UNINDEXED (stored but not searchable, used for filtering/joining)\n-- Only 'content' is indexed for full-text search\nCREATE VIRTUAL TABLE IF NOT EXISTS messages_fts USING fts5(\n messageId UNINDEXED,\n chatId UNINDEXED,\n name UNINDEXED,\n content,\n tokenize='porter unicode61'\n);\n`;\n\n/**\n * SQLite-based context store using graph model.\n *\n * Uses node:sqlite's synchronous DatabaseSync for persistence.\n * Messages are stored as nodes in a DAG with parentId links.\n */\nexport class SqliteContextStore extends ContextStore {\n #db: DatabaseSync;\n\n constructor(path: string) {\n super();\n this.#db = new DatabaseSync(path);\n this.#db.exec('PRAGMA foreign_keys = ON');\n this.#db.exec(STORE_DDL);\n }\n\n /**\n * Execute a function within a transaction.\n * Automatically commits on success or rolls back on error.\n */\n #useTransaction<T>(fn: () => T): T {\n this.#db.exec('BEGIN TRANSACTION');\n try {\n const result = fn();\n this.#db.exec('COMMIT');\n return result;\n } catch (error) {\n this.#db.exec('ROLLBACK');\n throw error;\n }\n }\n\n // ==========================================================================\n // Chat Operations\n // ==========================================================================\n\n async createChat(chat: ChatData): Promise<void> {\n this.#useTransaction(() => {\n // Create chat (createdAt and updatedAt are auto-set by SQLite DEFAULT)\n this.#db\n .prepare(\n `INSERT INTO chats (id, userId, title, metadata)\n VALUES (?, ?, ?, ?)`,\n )\n .run(\n chat.id,\n chat.userId,\n chat.title ?? null,\n chat.metadata ? JSON.stringify(chat.metadata) : null,\n );\n\n // Create \"main\" branch\n this.#db\n .prepare(\n `INSERT INTO branches (id, chatId, name, headMessageId, isActive, createdAt)\n VALUES (?, ?, 'main', NULL, 1, ?)`,\n )\n .run(crypto.randomUUID(), chat.id, Date.now());\n });\n }\n\n async upsertChat(chat: ChatData): Promise<StoredChatData> {\n return this.#useTransaction(() => {\n // Insert if not exists, no-op update if exists (to trigger RETURNING)\n const row = this.#db\n .prepare(\n `INSERT INTO chats (id, userId, title, metadata)\n VALUES (?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET id = excluded.id\n RETURNING *`,\n )\n .get(\n chat.id,\n chat.userId,\n chat.title ?? null,\n chat.metadata ? JSON.stringify(chat.metadata) : null,\n ) as {\n id: string;\n userId: string;\n title: string | null;\n metadata: string | null;\n createdAt: number;\n updatedAt: number;\n };\n\n // Ensure \"main\" branch exists (INSERT OR IGNORE uses UNIQUE(chatId, name) constraint)\n this.#db\n .prepare(\n `INSERT OR IGNORE INTO branches (id, chatId, name, headMessageId, isActive, createdAt)\n VALUES (?, ?, 'main', NULL, 1, ?)`,\n )\n .run(crypto.randomUUID(), chat.id, Date.now());\n\n return {\n id: row.id,\n userId: row.userId,\n title: row.title ?? undefined,\n metadata: row.metadata ? JSON.parse(row.metadata) : undefined,\n createdAt: row.createdAt,\n updatedAt: row.updatedAt,\n };\n });\n }\n\n async getChat(chatId: string): Promise<StoredChatData | undefined> {\n const row = this.#db\n .prepare('SELECT * FROM chats WHERE id = ?')\n .get(chatId) as\n | {\n id: string;\n userId: string;\n title: string | null;\n metadata: string | null;\n createdAt: number;\n updatedAt: number;\n }\n | undefined;\n\n if (!row) {\n return undefined;\n }\n\n return {\n id: row.id,\n userId: row.userId,\n title: row.title ?? undefined,\n metadata: row.metadata ? JSON.parse(row.metadata) : undefined,\n createdAt: row.createdAt,\n updatedAt: row.updatedAt,\n };\n }\n\n async updateChat(\n chatId: string,\n updates: Partial<Pick<ChatData, 'title' | 'metadata'>>,\n ): Promise<StoredChatData> {\n const setClauses: string[] = [\"updatedAt = strftime('%s', 'now') * 1000\"];\n const params: SQLInputValue[] = [];\n\n if (updates.title !== undefined) {\n setClauses.push('title = ?');\n params.push(updates.title ?? null);\n }\n if (updates.metadata !== undefined) {\n setClauses.push('metadata = ?');\n params.push(JSON.stringify(updates.metadata));\n }\n\n params.push(chatId);\n const row = this.#db\n .prepare(\n `UPDATE chats SET ${setClauses.join(', ')} WHERE id = ? RETURNING *`,\n )\n .get(...params) as {\n id: string;\n userId: string;\n title: string | null;\n metadata: string | null;\n createdAt: number;\n updatedAt: number;\n };\n\n return {\n id: row.id,\n userId: row.userId,\n title: row.title ?? undefined,\n metadata: row.metadata ? JSON.parse(row.metadata) : undefined,\n createdAt: row.createdAt,\n updatedAt: row.updatedAt,\n };\n }\n\n async listChats(options?: ListChatsOptions): Promise<ChatInfo[]> {\n const params: SQLInputValue[] = [];\n let whereClause = '';\n let limitClause = '';\n\n // Build WHERE clause for userId filter\n if (options?.userId) {\n whereClause = 'WHERE c.userId = ?';\n params.push(options.userId);\n }\n\n // Build LIMIT/OFFSET clause\n if (options?.limit !== undefined) {\n limitClause = ' LIMIT ?';\n params.push(options.limit);\n if (options.offset !== undefined) {\n limitClause += ' OFFSET ?';\n params.push(options.offset);\n }\n }\n\n const rows = this.#db\n .prepare(\n `SELECT\n c.id,\n c.userId,\n c.title,\n c.createdAt,\n c.updatedAt,\n COUNT(DISTINCT m.id) as messageCount,\n COUNT(DISTINCT b.id) as branchCount\n FROM chats c\n LEFT JOIN messages m ON m.chatId = c.id\n LEFT JOIN branches b ON b.chatId = c.id\n ${whereClause}\n GROUP BY c.id\n ORDER BY c.updatedAt DESC${limitClause}`,\n )\n .all(...params) as {\n id: string;\n userId: string;\n title: string | null;\n createdAt: number;\n updatedAt: number;\n messageCount: number;\n branchCount: number;\n }[];\n\n return rows.map((row) => ({\n id: row.id,\n userId: row.userId,\n title: row.title ?? undefined,\n messageCount: row.messageCount,\n branchCount: row.branchCount,\n createdAt: row.createdAt,\n updatedAt: row.updatedAt,\n }));\n }\n\n async deleteChat(\n chatId: string,\n options?: DeleteChatOptions,\n ): Promise<boolean> {\n return this.#useTransaction(() => {\n // Get message IDs before deletion for FTS cleanup\n const messageIds = this.#db\n .prepare('SELECT id FROM messages WHERE chatId = ?')\n .all(chatId) as { id: string }[];\n\n // Build the delete query with optional userId check\n let sql = 'DELETE FROM chats WHERE id = ?';\n const params: SQLInputValue[] = [chatId];\n\n if (options?.userId !== undefined) {\n sql += ' AND userId = ?';\n params.push(options.userId);\n }\n\n const result = this.#db.prepare(sql).run(...params);\n\n // Clean up FTS entries (CASCADE handles messages, branches, checkpoints)\n if (result.changes > 0 && messageIds.length > 0) {\n const placeholders = messageIds.map(() => '?').join(', ');\n this.#db\n .prepare(\n `DELETE FROM messages_fts WHERE messageId IN (${placeholders})`,\n )\n .run(...messageIds.map((m) => m.id));\n }\n\n return result.changes > 0;\n });\n }\n\n // ==========================================================================\n // Message Operations (Graph Nodes)\n // ==========================================================================\n\n async addMessage(message: MessageData): Promise<void> {\n // Upsert the message\n this.#db\n .prepare(\n `INSERT INTO messages (id, chatId, parentId, name, type, data, createdAt)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n parentId = excluded.parentId,\n name = excluded.name,\n type = excluded.type,\n data = excluded.data`,\n )\n .run(\n message.id,\n message.chatId,\n message.parentId,\n message.name,\n message.type ?? null,\n JSON.stringify(message.data),\n message.createdAt,\n );\n\n // Index in FTS for search\n const content =\n typeof message.data === 'string'\n ? message.data\n : JSON.stringify(message.data);\n\n // Delete existing FTS entry if any (for upsert), then insert new one\n this.#db\n .prepare(`DELETE FROM messages_fts WHERE messageId = ?`)\n .run(message.id);\n this.#db\n .prepare(\n `INSERT INTO messages_fts(messageId, chatId, name, content)\n VALUES (?, ?, ?, ?)`,\n )\n .run(message.id, message.chatId, message.name, content);\n }\n\n async getMessage(messageId: string): Promise<MessageData | undefined> {\n const row = this.#db\n .prepare('SELECT * FROM messages WHERE id = ?')\n .get(messageId) as\n | {\n id: string;\n chatId: string;\n parentId: string | null;\n name: string;\n type: string | null;\n data: string;\n createdAt: number;\n }\n | undefined;\n\n if (!row) {\n return undefined;\n }\n\n return {\n id: row.id,\n chatId: row.chatId,\n parentId: row.parentId,\n name: row.name,\n type: row.type ?? undefined,\n data: JSON.parse(row.data),\n createdAt: row.createdAt,\n };\n }\n\n async getMessageChain(headId: string): Promise<MessageData[]> {\n // Walk up the parent chain using recursive CTE with depth tracking\n // The CTE walks from head (newest) to root (oldest), so we track depth\n // and order by depth DESC to get chronological order (root first)\n const rows = this.#db\n .prepare(\n `WITH RECURSIVE chain AS (\n SELECT *, 0 as depth FROM messages WHERE id = ?\n UNION ALL\n SELECT m.*, c.depth + 1 FROM messages m\n INNER JOIN chain c ON m.id = c.parentId\n )\n SELECT * FROM chain\n ORDER BY depth DESC`,\n )\n .all(headId) as {\n id: string;\n chatId: string;\n parentId: string | null;\n name: string;\n type: string | null;\n data: string;\n createdAt: number;\n depth: number;\n }[];\n\n return rows.map((row) => ({\n id: row.id,\n chatId: row.chatId,\n parentId: row.parentId,\n name: row.name,\n type: row.type ?? undefined,\n data: JSON.parse(row.data),\n createdAt: row.createdAt,\n }));\n }\n\n async hasChildren(messageId: string): Promise<boolean> {\n const row = this.#db\n .prepare(\n 'SELECT EXISTS(SELECT 1 FROM messages WHERE parentId = ?) as hasChildren',\n )\n .get(messageId) as { hasChildren: number };\n\n return row.hasChildren === 1;\n }\n\n async getMessages(chatId: string): Promise<MessageData[]> {\n const chat = await this.getChat(chatId);\n if (!chat) {\n throw new Error(`Chat \"${chatId}\" not found`);\n }\n\n const activeBranch = await this.getActiveBranch(chatId);\n if (!activeBranch?.headMessageId) {\n return [];\n }\n\n return this.getMessageChain(activeBranch.headMessageId);\n }\n\n // ==========================================================================\n // Branch Operations\n // ==========================================================================\n\n async createBranch(branch: BranchData): Promise<void> {\n this.#db\n .prepare(\n `INSERT INTO branches (id, chatId, name, headMessageId, isActive, createdAt)\n VALUES (?, ?, ?, ?, ?, ?)`,\n )\n .run(\n branch.id,\n branch.chatId,\n branch.name,\n branch.headMessageId,\n branch.isActive ? 1 : 0,\n branch.createdAt,\n );\n }\n\n async getBranch(\n chatId: string,\n name: string,\n ): Promise<BranchData | undefined> {\n const row = this.#db\n .prepare('SELECT * FROM branches WHERE chatId = ? AND name = ?')\n .get(chatId, name) as\n | {\n id: string;\n chatId: string;\n name: string;\n headMessageId: string | null;\n isActive: number;\n createdAt: number;\n }\n | undefined;\n\n if (!row) {\n return undefined;\n }\n\n return {\n id: row.id,\n chatId: row.chatId,\n name: row.name,\n headMessageId: row.headMessageId,\n isActive: row.isActive === 1,\n createdAt: row.createdAt,\n };\n }\n\n async getActiveBranch(chatId: string): Promise<BranchData | undefined> {\n const row = this.#db\n .prepare('SELECT * FROM branches WHERE chatId = ? AND isActive = 1')\n .get(chatId) as\n | {\n id: string;\n chatId: string;\n name: string;\n headMessageId: string | null;\n isActive: number;\n createdAt: number;\n }\n | undefined;\n\n if (!row) {\n return undefined;\n }\n\n return {\n id: row.id,\n chatId: row.chatId,\n name: row.name,\n headMessageId: row.headMessageId,\n isActive: true,\n createdAt: row.createdAt,\n };\n }\n\n async setActiveBranch(chatId: string, branchId: string): Promise<void> {\n // Deactivate all branches for this chat\n this.#db\n .prepare('UPDATE branches SET isActive = 0 WHERE chatId = ?')\n .run(chatId);\n\n // Activate the specified branch\n this.#db\n .prepare('UPDATE branches SET isActive = 1 WHERE id = ?')\n .run(branchId);\n }\n\n async updateBranchHead(\n branchId: string,\n messageId: string | null,\n ): Promise<void> {\n this.#db\n .prepare('UPDATE branches SET headMessageId = ? WHERE id = ?')\n .run(messageId, branchId);\n }\n\n async listBranches(chatId: string): Promise<BranchInfo[]> {\n // Get branches with message count by walking the chain\n const branches = this.#db\n .prepare(\n `SELECT\n b.id,\n b.name,\n b.headMessageId,\n b.isActive,\n b.createdAt\n FROM branches b\n WHERE b.chatId = ?\n ORDER BY b.createdAt ASC`,\n )\n .all(chatId) as {\n id: string;\n name: string;\n headMessageId: string | null;\n isActive: number;\n createdAt: number;\n }[];\n\n // For each branch, count messages in the chain\n const result: BranchInfo[] = [];\n for (const branch of branches) {\n let messageCount = 0;\n if (branch.headMessageId) {\n const countRow = this.#db\n .prepare(\n `WITH RECURSIVE chain AS (\n SELECT id, parentId FROM messages WHERE id = ?\n UNION ALL\n SELECT m.id, m.parentId FROM messages m\n INNER JOIN chain c ON m.id = c.parentId\n )\n SELECT COUNT(*) as count FROM chain`,\n )\n .get(branch.headMessageId) as { count: number };\n messageCount = countRow.count;\n }\n\n result.push({\n id: branch.id,\n name: branch.name,\n headMessageId: branch.headMessageId,\n isActive: branch.isActive === 1,\n messageCount,\n createdAt: branch.createdAt,\n });\n }\n\n return result;\n }\n\n // ==========================================================================\n // Checkpoint Operations\n // ==========================================================================\n\n async createCheckpoint(checkpoint: CheckpointData): Promise<void> {\n this.#db\n .prepare(\n `INSERT INTO checkpoints (id, chatId, name, messageId, createdAt)\n VALUES (?, ?, ?, ?, ?)\n ON CONFLICT(chatId, name) DO UPDATE SET\n messageId = excluded.messageId,\n createdAt = excluded.createdAt`,\n )\n .run(\n checkpoint.id,\n checkpoint.chatId,\n checkpoint.name,\n checkpoint.messageId,\n checkpoint.createdAt,\n );\n }\n\n async getCheckpoint(\n chatId: string,\n name: string,\n ): Promise<CheckpointData | undefined> {\n const row = this.#db\n .prepare('SELECT * FROM checkpoints WHERE chatId = ? AND name = ?')\n .get(chatId, name) as\n | {\n id: string;\n chatId: string;\n name: string;\n messageId: string;\n createdAt: number;\n }\n | undefined;\n\n if (!row) {\n return undefined;\n }\n\n return {\n id: row.id,\n chatId: row.chatId,\n name: row.name,\n messageId: row.messageId,\n createdAt: row.createdAt,\n };\n }\n\n async listCheckpoints(chatId: string): Promise<CheckpointInfo[]> {\n const rows = this.#db\n .prepare(\n `SELECT id, name, messageId, createdAt\n FROM checkpoints\n WHERE chatId = ?\n ORDER BY createdAt DESC`,\n )\n .all(chatId) as {\n id: string;\n name: string;\n messageId: string;\n createdAt: number;\n }[];\n\n return rows.map((row) => ({\n id: row.id,\n name: row.name,\n messageId: row.messageId,\n createdAt: row.createdAt,\n }));\n }\n\n async deleteCheckpoint(chatId: string, name: string): Promise<void> {\n this.#db\n .prepare('DELETE FROM checkpoints WHERE chatId = ? AND name = ?')\n .run(chatId, name);\n }\n\n // ==========================================================================\n // Search Operations\n // ==========================================================================\n\n async searchMessages(\n chatId: string,\n query: string,\n options?: SearchOptions,\n ): Promise<SearchResult[]> {\n const limit = options?.limit ?? 20;\n const roles = options?.roles;\n\n // Build the query dynamically based on options\n let sql = `\n SELECT\n m.id,\n m.chatId,\n m.parentId,\n m.name,\n m.type,\n m.data,\n m.createdAt,\n fts.rank,\n snippet(messages_fts, 3, '<mark>', '</mark>', '...', 32) as snippet\n FROM messages_fts fts\n JOIN messages m ON m.id = fts.messageId\n WHERE messages_fts MATCH ?\n AND fts.chatId = ?\n `;\n\n const params: SQLInputValue[] = [query, chatId];\n\n if (roles && roles.length > 0) {\n const placeholders = roles.map(() => '?').join(', ');\n sql += ` AND fts.name IN (${placeholders})`;\n params.push(...roles);\n }\n\n sql += ' ORDER BY fts.rank LIMIT ?';\n params.push(limit);\n\n const rows = this.#db.prepare(sql).all(...params) as {\n id: string;\n chatId: string;\n parentId: string | null;\n name: string;\n type: string | null;\n data: string;\n createdAt: number;\n rank: number;\n snippet: string;\n }[];\n\n return rows.map((row) => ({\n message: {\n id: row.id,\n chatId: row.chatId,\n parentId: row.parentId,\n name: row.name,\n type: row.type ?? undefined,\n data: JSON.parse(row.data),\n createdAt: row.createdAt,\n },\n rank: row.rank,\n snippet: row.snippet,\n }));\n }\n\n // ==========================================================================\n // Visualization Operations\n // ==========================================================================\n\n async getGraph(chatId: string): Promise<GraphData> {\n // Get all messages for complete graph\n const messageRows = this.#db\n .prepare(\n `SELECT id, parentId, name, data, createdAt\n FROM messages\n WHERE chatId = ?\n ORDER BY createdAt ASC`,\n )\n .all(chatId) as {\n id: string;\n parentId: string | null;\n name: string;\n data: string;\n createdAt: number;\n }[];\n\n const nodes: GraphNode[] = messageRows.map((row) => {\n const data = JSON.parse(row.data);\n const content = typeof data === 'string' ? data : JSON.stringify(data);\n return {\n id: row.id,\n parentId: row.parentId,\n role: row.name,\n content: content.length > 50 ? content.slice(0, 50) + '...' : content,\n createdAt: row.createdAt,\n };\n });\n\n // Get all branches\n const branchRows = this.#db\n .prepare(\n `SELECT name, headMessageId, isActive\n FROM branches\n WHERE chatId = ?\n ORDER BY createdAt ASC`,\n )\n .all(chatId) as {\n name: string;\n headMessageId: string | null;\n isActive: number;\n }[];\n\n const branches: GraphBranch[] = branchRows.map((row) => ({\n name: row.name,\n headMessageId: row.headMessageId,\n isActive: row.isActive === 1,\n }));\n\n // Get all checkpoints\n const checkpointRows = this.#db\n .prepare(\n `SELECT name, messageId\n FROM checkpoints\n WHERE chatId = ?\n ORDER BY createdAt ASC`,\n )\n .all(chatId) as {\n name: string;\n messageId: string;\n }[];\n\n const checkpoints: GraphCheckpoint[] = checkpointRows.map((row) => ({\n name: row.name,\n messageId: row.messageId,\n }));\n\n return {\n chatId,\n nodes,\n branches,\n checkpoints,\n };\n }\n}\n", "import { SqliteContextStore } from './sqlite.store.ts';\n\n/**\n * In-memory context store.\n *\n * Uses SQLite's :memory: database for non-persistent storage.\n * Useful for testing and short-lived sessions.\n */\nexport class InMemoryContextStore extends SqliteContextStore {\n constructor() {\n super(':memory:');\n }\n}\n", "import type { GraphData, GraphNode } from './store/store.ts';\n\n/**\n * Render a graph as ASCII art.\n *\n * @param data - The graph data to visualize\n * @returns ASCII art representation of the graph\n *\n * @example\n * ```ts\n * const graph = await store.getGraph('my-chat');\n * console.log(visualizeGraph(graph));\n * ```\n */\nexport function visualizeGraph(data: GraphData): string {\n if (data.nodes.length === 0) {\n return `[chat: ${data.chatId}]\\n\\n(empty)`;\n }\n\n // Build lookup maps\n const childrenByParentId = new Map<string | null, GraphNode[]>();\n const branchHeads = new Map<string, string[]>(); // messageId -> branch names\n const checkpointsByMessageId = new Map<string, string[]>(); // messageId -> checkpoint names\n\n for (const node of data.nodes) {\n const children = childrenByParentId.get(node.parentId) ?? [];\n children.push(node);\n childrenByParentId.set(node.parentId, children);\n }\n\n for (const branch of data.branches) {\n if (branch.headMessageId) {\n const heads = branchHeads.get(branch.headMessageId) ?? [];\n heads.push(branch.isActive ? `${branch.name} *` : branch.name);\n branchHeads.set(branch.headMessageId, heads);\n }\n }\n\n for (const checkpoint of data.checkpoints) {\n const cps = checkpointsByMessageId.get(checkpoint.messageId) ?? [];\n cps.push(checkpoint.name);\n checkpointsByMessageId.set(checkpoint.messageId, cps);\n }\n\n // Find root nodes (parentId === null)\n const roots = childrenByParentId.get(null) ?? [];\n\n const lines: string[] = [`[chat: ${data.chatId}]`, ''];\n\n // Recursively render the tree\n function renderNode(\n node: GraphNode,\n prefix: string,\n isLast: boolean,\n isRoot: boolean,\n ): void {\n const connector = isRoot ? '' : isLast ? '\u2514\u2500\u2500 ' : '\u251C\u2500\u2500 ';\n const contentPreview = node.content.replace(/\\n/g, ' ');\n\n let line = `${prefix}${connector}${node.id.slice(0, 8)} (${node.role}): \"${contentPreview}\"`;\n\n // Add branch markers\n const branches = branchHeads.get(node.id);\n if (branches) {\n line += ` <- [${branches.join(', ')}]`;\n }\n\n // Add checkpoint markers\n const checkpoints = checkpointsByMessageId.get(node.id);\n if (checkpoints) {\n line += ` {${checkpoints.join(', ')}}`;\n }\n\n lines.push(line);\n\n // Render children\n const children = childrenByParentId.get(node.id) ?? [];\n const childPrefix = isRoot ? '' : prefix + (isLast ? ' ' : '\u2502 ');\n\n for (let i = 0; i < children.length; i++) {\n renderNode(children[i], childPrefix, i === children.length - 1, false);\n }\n }\n\n // Render each root\n for (let i = 0; i < roots.length; i++) {\n renderNode(roots[i], '', i === roots.length - 1, true);\n }\n\n // Add legend\n lines.push('');\n lines.push('Legend: * = active branch, {...} = checkpoint');\n\n return lines.join('\\n');\n}\n", "import { groq } from '@ai-sdk/groq';\nimport {\n type GenerateTextResult,\n NoSuchToolError,\n Output,\n type StreamTextResult,\n type StreamTextTransform,\n type ToolCallRepairFunction,\n type ToolChoice,\n type ToolSet,\n convertToModelMessages,\n createUIMessageStream,\n generateId,\n generateText,\n smoothStream,\n stepCountIs,\n streamText,\n} from 'ai';\nimport chalk from 'chalk';\nimport z from 'zod';\n\nimport { type AgentModel } from '@deepagents/agent';\n\nimport { type ContextEngine, XmlRenderer } from '../index.ts';\nimport { assistantText } from './fragments.ts';\nimport {\n type Guardrail,\n type GuardrailContext,\n type StreamPart,\n runGuardrailChain,\n} from './guardrail.ts';\n\nexport interface CreateAgent<CIn, COut = CIn> {\n name: string;\n context?: ContextEngine;\n tools?: ToolSet;\n model?: AgentModel;\n toolChoice?: ToolChoice<Record<string, COut>>;\n providerOptions?: Parameters<typeof generateText>[0]['providerOptions'];\n logging?: boolean;\n /**\n * Guardrails to apply during streaming.\n * Each guardrail inspects text chunks and can trigger self-correction retries.\n */\n guardrails?: Guardrail[];\n /**\n * Maximum number of retry attempts when guardrails fail (default: 3).\n */\n maxGuardrailRetries?: number;\n}\n\nclass Agent<CIn, COut = CIn> {\n #options: CreateAgent<CIn, COut>;\n #guardrails: Guardrail[] = [];\n readonly tools: ToolSet;\n constructor(options: CreateAgent<CIn, COut>) {\n this.#options = options;\n this.tools = options.tools || {};\n this.#guardrails = options.guardrails || [];\n }\n\n public async generate<COut, CIn = COut>(\n contextVariables: CIn,\n config?: {\n abortSignal?: AbortSignal;\n },\n ): Promise<GenerateTextResult<ToolSet, Output.Output<string, string, any>>> {\n if (!this.#options.context) {\n throw new Error(`Agent ${this.#options.name} is missing a context.`);\n }\n if (!this.#options.model) {\n throw new Error(`Agent ${this.#options.name} is missing a model.`);\n }\n const { messages, systemPrompt } = await this.#options.context.resolve({\n renderer: new XmlRenderer(),\n });\n return generateText({\n abortSignal: config?.abortSignal,\n providerOptions: this.#options.providerOptions,\n model: this.#options.model,\n system: systemPrompt,\n messages: await convertToModelMessages(messages as never),\n stopWhen: stepCountIs(25),\n tools: this.#options.tools,\n experimental_context: contextVariables,\n experimental_repairToolCall: repairToolCall,\n toolChoice: this.#options.toolChoice,\n onStepFinish: (step) => {\n const toolCall = step.toolCalls.at(-1);\n if (toolCall) {\n console.log(\n `Debug: ${chalk.yellow('ToolCalled')}: ${toolCall.toolName}(${JSON.stringify(toolCall.input)})`,\n );\n }\n },\n });\n }\n\n /**\n * Stream a response from the agent.\n *\n * When guardrails are configured, `toUIMessageStream()` is wrapped to provide\n * self-correction behavior. Direct access to fullStream/textStream bypasses guardrails.\n *\n * @example\n * ```typescript\n * const stream = await agent.stream({});\n *\n * // With guardrails - use toUIMessageStream for protection\n * await printer.readableStream(stream.toUIMessageStream());\n *\n * // Or use printer.stdout which uses toUIMessageStream internally\n * await printer.stdout(stream);\n * ```\n */\n public async stream<COut, CIn = COut>(\n contextVariables: CIn,\n config?: {\n abortSignal?: AbortSignal;\n transform?: StreamTextTransform<ToolSet> | StreamTextTransform<ToolSet>[];\n maxRetries?: number;\n },\n ): Promise<StreamTextResult<ToolSet, never>> {\n if (!this.#options.context) {\n throw new Error(`Agent ${this.#options.name} is missing a context.`);\n }\n if (!this.#options.model) {\n throw new Error(`Agent ${this.#options.name} is missing a model.`);\n }\n\n const result = await this.#createRawStream(contextVariables, config);\n\n if (this.#guardrails.length === 0) {\n return result;\n }\n\n return this.#wrapWithGuardrails(result, contextVariables, config);\n }\n\n /**\n * Create a raw stream without guardrail processing.\n */\n async #createRawStream<COut, CIn = COut>(\n contextVariables: CIn,\n config?: {\n abortSignal?: AbortSignal;\n transform?: StreamTextTransform<ToolSet> | StreamTextTransform<ToolSet>[];\n },\n ) {\n const { messages, systemPrompt } = await this.#options.context!.resolve({\n renderer: new XmlRenderer(),\n });\n\n const runId = generateId();\n return streamText({\n abortSignal: config?.abortSignal,\n providerOptions: this.#options.providerOptions,\n model: this.#options.model!,\n system: systemPrompt,\n messages: await convertToModelMessages(messages as never),\n experimental_repairToolCall: repairToolCall,\n stopWhen: stepCountIs(50),\n experimental_transform: config?.transform ?? smoothStream(),\n tools: this.#options.tools,\n experimental_context: contextVariables,\n toolChoice: this.#options.toolChoice,\n onStepFinish: (step) => {\n const toolCall = step.toolCalls.at(-1);\n if (toolCall) {\n console.log(\n `Debug: (${runId}) ${chalk.bold.yellow('ToolCalled')}: ${toolCall.toolName}(${JSON.stringify(toolCall.input)})`,\n );\n }\n },\n });\n }\n\n /**\n * Wrap a StreamTextResult with guardrail protection on toUIMessageStream().\n *\n * When a guardrail fails:\n * 1. Accumulated text + feedback is appended as the assistant's self-correction\n * 2. The feedback is written to the output stream (user sees the correction)\n * 3. A new stream is started and the model continues from the correction\n */\n #wrapWithGuardrails<CIn>(\n result: StreamTextResult<ToolSet, never>,\n contextVariables: CIn,\n config?: {\n abortSignal?: AbortSignal;\n transform?: StreamTextTransform<ToolSet> | StreamTextTransform<ToolSet>[];\n maxRetries?: number;\n },\n ): StreamTextResult<ToolSet, never> {\n const maxRetries =\n config?.maxRetries ?? this.#options.maxGuardrailRetries ?? 3;\n const context = this.#options.context!;\n\n // Save original method BEFORE override (prevents infinite recursion)\n const originalToUIMessageStream = result.toUIMessageStream.bind(result);\n\n // Override toUIMessageStream with guardrail logic\n result.toUIMessageStream = (options) => {\n return createUIMessageStream({\n generateId,\n execute: async ({ writer }) => {\n let currentResult: StreamTextResult<ToolSet, never> = result;\n let attempt = 0;\n\n // Create guardrail context with available tools\n const guardrailContext: GuardrailContext = {\n availableTools: Object.keys(this.tools),\n };\n\n while (attempt < maxRetries) {\n // Check if request was cancelled before starting new attempt\n if (config?.abortSignal?.aborted) {\n writer.write({ type: 'finish' });\n return;\n }\n\n attempt++;\n let accumulatedText = '';\n let guardrailFailed = false;\n let failureFeedback = '';\n\n // Use original method for first result (avoids recursion), new results have their own original\n const uiStream =\n currentResult === result\n ? originalToUIMessageStream(options)\n : currentResult.toUIMessageStream(options);\n\n // Iterate over toUIMessageStream() - run ALL parts through guardrails\n for await (const part of uiStream) {\n // Run through guardrail chain - guardrails can handle any part type\n const checkResult = runGuardrailChain(\n part as StreamPart,\n this.#guardrails,\n guardrailContext,\n );\n\n if (checkResult.type === 'fail') {\n guardrailFailed = true;\n failureFeedback = checkResult.feedback;\n\n console.log(\n chalk.yellow(\n `[${this.#options.name}] Guardrail triggered (attempt ${attempt}/${maxRetries}): ${failureFeedback.slice(0, 50)}...`,\n ),\n );\n\n break; // Exit stream processing\n }\n\n // Guardrail passed - track text for self-correction context\n if (checkResult.part.type === 'text-delta') {\n accumulatedText += (checkResult.part as { delta: string })\n .delta;\n }\n\n // Write the (possibly modified) part to output\n writer.write(checkResult.part as typeof part);\n }\n\n if (!guardrailFailed) {\n // Stream completed successfully\n writer.write({ type: 'finish' });\n return;\n }\n\n // Check if we've exceeded max retries BEFORE writing feedback\n if (attempt >= maxRetries) {\n console.error(\n chalk.red(\n `[${this.#options.name}] Guardrail retry limit (${maxRetries}) exceeded.`,\n ),\n );\n writer.write({ type: 'finish' });\n return;\n }\n\n // Guardrail failed but we have retries left - prepare for retry\n // Write the self-correction feedback to the output stream\n writer.write({\n type: 'text-delta',\n id: generateId(),\n delta: ` ${failureFeedback}`,\n });\n\n // Add the partial assistant message + feedback to context\n const selfCorrectionText = accumulatedText + ' ' + failureFeedback;\n context.set(assistantText(selfCorrectionText));\n\n // Save to persist the self-correction (prevents duplicate messages on next resolve)\n await context.save();\n\n // Create new stream for retry\n currentResult = await this.#createRawStream(\n contextVariables,\n config,\n );\n }\n },\n onError: (error) => {\n const message =\n error instanceof Error ? error.message : String(error);\n return `Stream failed: ${message}`;\n },\n });\n };\n\n return result;\n }\n\n clone(overrides?: Partial<CreateAgent<CIn, COut>>): Agent<CIn, COut> {\n return new Agent<CIn, COut>({\n ...this.#options,\n ...overrides,\n });\n }\n}\n\nexport function agent<CIn, COut = CIn>(\n options: CreateAgent<CIn, COut>,\n): Agent<CIn, COut> {\n return new Agent(options);\n}\n\n/**\n * Options for creating a structured output handler.\n */\nexport interface StructuredOutputOptions<TSchema extends z.ZodType> {\n context?: ContextEngine;\n model?: AgentModel;\n schema: TSchema;\n providerOptions?: Parameters<typeof generateText>[0]['providerOptions'];\n tools?: ToolSet;\n}\n\n/**\n * Create a structured output handler that provides simplified access to structured output.\n *\n * @param options - Configuration options including schema\n * @returns Object with generate() and stream() methods\n *\n * @example\n * ```typescript\n * const output = structuredOutput({\n * name: 'extractor',\n * model: groq('...'),\n * context,\n * schema: z.object({\n * name: z.string(),\n * age: z.number(),\n * }),\n * });\n *\n * // Generate - returns only the structured output\n * const result = await output.generate({});\n * // result: { name: string, age: number }\n *\n * // Stream - returns the full stream\n * const stream = await output.stream({});\n * ```\n */\nexport interface StructuredOutputResult<TSchema extends z.ZodType> {\n generate<CIn>(\n contextVariables?: CIn,\n config?: { abortSignal?: AbortSignal },\n ): Promise<z.infer<TSchema>>;\n stream<CIn>(\n contextVariables?: CIn,\n config?: {\n abortSignal?: AbortSignal;\n transform?: StreamTextTransform<ToolSet> | StreamTextTransform<ToolSet>[];\n },\n ): Promise<StreamTextResult<ToolSet, any>>;\n}\n\nexport function structuredOutput<TSchema extends z.ZodType>(\n options: StructuredOutputOptions<TSchema>,\n): StructuredOutputResult<TSchema> {\n return {\n async generate<CIn>(\n contextVariables?: CIn,\n config?: { abortSignal?: AbortSignal },\n ): Promise<z.infer<TSchema>> {\n if (!options.context) {\n throw new Error(`structuredOutput is missing a context.`);\n }\n if (!options.model) {\n throw new Error(`structuredOutput is missing a model.`);\n }\n\n const { messages, systemPrompt } = await options.context.resolve({\n renderer: new XmlRenderer(),\n });\n\n const result = await generateText({\n abortSignal: config?.abortSignal,\n providerOptions: options.providerOptions,\n model: options.model,\n system: systemPrompt,\n messages: await convertToModelMessages(messages as never),\n stopWhen: stepCountIs(25),\n experimental_repairToolCall: repairToolCall,\n experimental_context: contextVariables,\n output: Output.object({ schema: options.schema }),\n tools: options.tools,\n });\n\n return result.output as z.infer<TSchema>;\n },\n\n async stream<CIn>(\n contextVariables?: CIn,\n config?: {\n abortSignal?: AbortSignal;\n transform?:\n | StreamTextTransform<ToolSet>\n | StreamTextTransform<ToolSet>[];\n },\n ) {\n if (!options.context) {\n throw new Error(`structuredOutput is missing a context.`);\n }\n if (!options.model) {\n throw new Error(`structuredOutput is missing a model.`);\n }\n\n const { messages, systemPrompt } = await options.context.resolve({\n renderer: new XmlRenderer(),\n });\n\n return streamText({\n abortSignal: config?.abortSignal,\n providerOptions: options.providerOptions,\n model: options.model,\n system: systemPrompt,\n experimental_repairToolCall: repairToolCall,\n messages: await convertToModelMessages(messages as never),\n stopWhen: stepCountIs(50),\n experimental_transform: config?.transform ?? smoothStream(),\n experimental_context: contextVariables,\n output: Output.object({ schema: options.schema }),\n tools: options.tools,\n });\n },\n };\n}\n\nconst repairToolCall: ToolCallRepairFunction<ToolSet> = async ({\n toolCall,\n tools,\n inputSchema,\n error,\n}) => {\n console.log(\n `Debug: ${chalk.yellow('RepairingToolCall')}: ${toolCall.toolName}`,\n error.name,\n );\n if (NoSuchToolError.isInstance(error)) {\n return null; // do not attempt to fix invalid tool names\n }\n\n const tool = tools[toolCall.toolName as keyof typeof tools];\n\n const { output } = await generateText({\n model: groq('openai/gpt-oss-20b'),\n output: Output.object({ schema: tool.inputSchema }),\n prompt: [\n `The model tried to call the tool \"${toolCall.toolName}\"` +\n ` with the following inputs:`,\n JSON.stringify(toolCall.input),\n `The tool accepts the following schema:`,\n JSON.stringify(inputSchema(toolCall)),\n 'Please fix the inputs.',\n ].join('\\n'),\n });\n\n return { ...toolCall, input: JSON.stringify(output) };\n};\n", "import { type ContextFragment, fragment } from './fragments.ts';\nimport { XmlRenderer } from './renderers/abstract.renderer.ts';\n\n/**\n * Render fragments to XML.\n *\n * Wraps fragments in a parent tag and renders using XmlRenderer.\n *\n * @param tag - Parent tag name (e.g., 'instructions')\n * @param fragments - Fragments to render\n * @returns XML string\n *\n * @example\n * ```ts\n * const xml = render(\n * 'instructions',\n * persona({ name: 'Freya', role: 'Data Assistant' }),\n * guardrail({ rule: 'Never expose PII' }),\n * );\n * ```\n */\nexport function render(tag: string, ...fragments: ContextFragment[]): string {\n if (fragments.length === 0) {\n return '';\n }\n\n const renderer = new XmlRenderer();\n const wrapped = fragment(tag, ...fragments);\n return renderer.render([wrapped]);\n}\n", "import type { Table } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Configuration for RowCountGrounding.\n */\nexport interface RowCountGroundingConfig {\n // Future: filter which tables to count\n}\n\n/**\n * Abstract base class for row count grounding.\n *\n * Reads tables from the context and annotates them with row counts and size hints.\n * This grounding must run AFTER TableGrounding since it reads from ctx.tables.\n *\n * Subclasses implement the database-specific hook:\n * - `getRowCount()` - get row count for a table\n */\nexport abstract class RowCountGrounding extends AbstractGrounding {\n constructor(config: RowCountGroundingConfig = {}) {\n super('rowCount');\n }\n\n /**\n * Get row count for a specific table.\n */\n protected abstract getRowCount(\n tableName: string,\n ): Promise<number | undefined>;\n\n /**\n * Execute the grounding process.\n * Annotates tables in ctx.tables with row counts and size hints.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n for (const table of ctx.tables) {\n const count = await this.getRowCount(table.name);\n if (count != null) {\n table.rowCount = count;\n table.sizeHint = this.#classifyRowCount(count);\n }\n }\n }\n\n /**\n * Classify row count into a size hint category.\n */\n #classifyRowCount(count: number): Table['sizeHint'] {\n if (count < 100) return 'tiny';\n if (count < 1000) return 'small';\n if (count < 10000) return 'medium';\n if (count < 100000) return 'large';\n return 'huge';\n }\n}\n", "import type { Filter, Relationship, Table } from '../adapter.ts';\nimport { AbstractGrounding } from './abstract.grounding.ts';\nimport type { GroundingContext } from './context.ts';\n\n/**\n * Configuration for TableGrounding.\n */\nexport interface TableGroundingConfig {\n /** Filter to select seed tables */\n filter?: Filter;\n /**\n * Traverse forward (child\u2192parent) following FK direction.\n * - true: unlimited depth\n * - number: maximum depth\n * - false/undefined: no forward traversal\n */\n forward?: boolean | number;\n /**\n * Traverse backward (parent\u2192child) finding tables that reference us.\n * - true: unlimited depth\n * - number: maximum depth\n * - false/undefined: no backward traversal\n */\n backward?: boolean | number;\n}\n\n/**\n * Abstract base class for table grounding.\n *\n * The `execute()` method implements a BFS traversal algorithm that discovers\n * tables and relationships. Subclasses implement the database-specific hooks:\n * - `getAllTableNames()` - list all tables\n * - `getTable()` - get table metadata\n * - `findOutgoingRelations()` - find FKs FROM a table\n * - `findIncomingRelations()` - find FKs TO a table\n */\nexport abstract class TableGrounding extends AbstractGrounding {\n #filter?: Filter;\n #forward?: boolean | number;\n #backward?: boolean | number;\n\n constructor(config: TableGroundingConfig = {}) {\n super('table');\n this.#filter = config.filter;\n this.#forward = config.forward;\n this.#backward = config.backward;\n }\n\n /** Get all table names in the database */\n protected abstract getAllTableNames(): Promise<string[]>;\n\n /** Get full table metadata for a single table */\n protected abstract getTable(tableName: string): Promise<Table>;\n\n /** Find FKs FROM this table (outgoing relationships) */\n protected abstract findOutgoingRelations(\n tableName: string,\n ): Promise<Relationship[]>;\n\n /** Find FKs TO this table (incoming relationships) */\n protected abstract findIncomingRelations(\n tableName: string,\n ): Promise<Relationship[]>;\n\n /**\n * Execute the grounding process.\n * Writes discovered tables and relationships to the context.\n */\n async execute(ctx: GroundingContext): Promise<void> {\n const seedTables = await this.applyFilter();\n const forward = this.#forward;\n const backward = this.#backward;\n\n // No traversal at all - just add the seed tables\n if (!forward && !backward) {\n const tables = await Promise.all(\n seedTables.map((name) => this.getTable(name)),\n );\n ctx.tables.push(...tables);\n return;\n }\n\n const tables: Record<string, Table> = {};\n const allRelationships: Relationship[] = [];\n const seenRelationships = new Set<string>();\n\n // Track depth separately for forward/backward using BFS\n const forwardQueue: Array<{ name: string; depth: number }> = [];\n const backwardQueue: Array<{ name: string; depth: number }> = [];\n const forwardVisited = new Set<string>();\n const backwardVisited = new Set<string>();\n\n // Initialize queues with seed tables at depth 0\n for (const name of seedTables) {\n if (forward) forwardQueue.push({ name, depth: 0 });\n if (backward) backwardQueue.push({ name, depth: 0 });\n }\n\n // Process forward (child\u2192parent)\n const forwardLimit = forward === true ? Infinity : forward || 0;\n while (forwardQueue.length > 0) {\n const item = forwardQueue.shift();\n if (!item) break;\n const { name, depth } = item;\n\n if (forwardVisited.has(name)) continue;\n forwardVisited.add(name);\n\n if (!tables[name]) {\n tables[name] = await this.getTable(name);\n }\n\n if (depth < forwardLimit) {\n const rels = await this.findOutgoingRelations(name);\n for (const rel of rels) {\n this.addRelationship(rel, allRelationships, seenRelationships);\n if (!forwardVisited.has(rel.referenced_table)) {\n forwardQueue.push({ name: rel.referenced_table, depth: depth + 1 });\n }\n }\n }\n }\n\n // Process backward (parent\u2192child)\n const backwardLimit = backward === true ? Infinity : backward || 0;\n while (backwardQueue.length > 0) {\n const item = backwardQueue.shift();\n if (!item) break;\n const { name, depth } = item;\n\n if (backwardVisited.has(name)) continue;\n backwardVisited.add(name);\n\n if (!tables[name]) {\n tables[name] = await this.getTable(name);\n }\n\n if (depth < backwardLimit) {\n const rels = await this.findIncomingRelations(name);\n for (const rel of rels) {\n this.addRelationship(rel, allRelationships, seenRelationships);\n if (!backwardVisited.has(rel.table)) {\n backwardQueue.push({ name: rel.table, depth: depth + 1 });\n }\n }\n }\n }\n\n // Write to context\n const tablesList = Object.values(tables);\n ctx.tables.push(...tablesList);\n ctx.relationships.push(...allRelationships);\n }\n\n /**\n * Apply the filter to get seed table names.\n * If filter is an explicit array, skip querying all table names.\n */\n protected async applyFilter(): Promise<string[]> {\n const filter = this.#filter;\n if (Array.isArray(filter)) {\n return filter;\n }\n const names = await this.getAllTableNames();\n if (!filter) {\n return names;\n }\n if (filter instanceof RegExp) {\n return names.filter((name) => filter.test(name));\n }\n return names.filter(filter);\n }\n\n /**\n * Add a relationship to the collection, deduplicating by key.\n */\n protected addRelationship(\n rel: Relationship,\n all: Relationship[],\n seen: Set<string>,\n ): void {\n const key = `${rel.table}:${rel.from.join(',')}:${rel.referenced_table}:${rel.to.join(',')}`;\n if (!seen.has(key)) {\n seen.add(key);\n all.push(rel);\n }\n }\n}\n", "import type { Adapter, ColumnStats } from '../adapter.ts';\nimport {\n ColumnStatsGrounding,\n type ColumnStatsGroundingConfig,\n} from '../groundings/column-stats.grounding.ts';\nimport type { Column } from '../groundings/context.ts';\n\n/**\n * SQLite implementation of ColumnStatsGrounding.\n */\nexport class SqliteColumnStatsGrounding extends ColumnStatsGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: ColumnStatsGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async collectStats(\n tableName: string,\n column: Column,\n ): Promise<ColumnStats | undefined> {\n if (!this.#shouldCollectStats(column.type)) {\n return undefined;\n }\n\n const tableIdentifier = this.#adapter.quoteIdentifier(tableName);\n const columnIdentifier = this.#adapter.quoteIdentifier(column.name);\n\n const sql = `\n SELECT\n MIN(${columnIdentifier}) AS min_value,\n MAX(${columnIdentifier}) AS max_value,\n AVG(CASE WHEN ${columnIdentifier} IS NULL THEN 1.0 ELSE 0.0 END) AS null_fraction\n FROM ${tableIdentifier}\n `;\n\n const rows = await this.#adapter.runQuery<{\n min_value: unknown;\n max_value: unknown;\n null_fraction: number | string | null;\n }>(sql);\n\n if (!rows.length) {\n return undefined;\n }\n\n const min = this.#normalizeValue(rows[0]?.min_value);\n const max = this.#normalizeValue(rows[0]?.max_value);\n const nullFraction = this.#adapter.toNumber(rows[0]?.null_fraction);\n\n if (min == null && max == null && nullFraction == null) {\n return undefined;\n }\n\n return {\n min: min ?? undefined,\n max: max ?? undefined,\n nullFraction:\n nullFraction != null && Number.isFinite(nullFraction)\n ? Math.max(0, Math.min(1, nullFraction))\n : undefined,\n };\n }\n\n #shouldCollectStats(type: string | undefined): boolean {\n if (!type) {\n return false;\n }\n const normalized = type.toLowerCase();\n return /int|real|numeric|double|float|decimal|date|time|bool/.test(\n normalized,\n );\n }\n\n #normalizeValue(value: unknown): string | null {\n if (value === null || value === undefined) {\n return null;\n }\n if (typeof value === 'string') {\n return value;\n }\n if (typeof value === 'number' || typeof value === 'bigint') {\n return String(value);\n }\n if (typeof value === 'boolean') {\n return value ? 'true' : 'false';\n }\n if (value instanceof Date) {\n return value.toISOString();\n }\n if (typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {\n return value.toString('utf-8');\n }\n return null;\n }\n}\n", "import type { Adapter, AdapterInfo } from '../adapter.ts';\nimport {\n InfoGrounding,\n type InfoGroundingConfig,\n} from '../groundings/info.grounding.ts';\n\n/**\n * SQLite implementation of InfoGrounding.\n */\nexport class SqliteInfoGrounding extends InfoGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: InfoGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async collectInfo(): Promise<AdapterInfo> {\n const rows = await this.#adapter.runQuery<{ version: string }>(\n 'SELECT sqlite_version() AS version',\n );\n\n return {\n dialect: 'sqlite',\n version: rows[0]?.version,\n details: {\n parameterPlaceholder: '?',\n },\n };\n }\n}\n", "import type { Adapter } from '../adapter.ts';\nimport {\n type Column,\n ColumnValuesGrounding,\n type ColumnValuesGroundingConfig,\n} from '../groundings/column-values.grounding.ts';\n\n/**\n * SQLite implementation of ColumnValuesGrounding.\n *\n * Supports:\n * - CHECK constraints with IN clauses (inherited from base)\n * - Low cardinality data scan\n *\n * Note: SQLite does not have native ENUM types.\n */\nexport class SqliteColumnValuesGrounding extends ColumnValuesGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: ColumnValuesGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async collectLowCardinality(\n tableName: string,\n column: Column,\n ): Promise<string[] | undefined> {\n const tableIdentifier = this.#adapter.quoteIdentifier(tableName);\n const columnIdentifier = this.#adapter.quoteIdentifier(column.name);\n const limit = this.lowCardinalityLimit + 1;\n\n const sql = `\n SELECT DISTINCT ${columnIdentifier} AS value\n FROM ${tableIdentifier}\n WHERE ${columnIdentifier} IS NOT NULL\n LIMIT ${limit}\n `;\n\n const rows = await this.#adapter.runQuery<{ value: unknown }>(sql);\n\n if (!rows.length || rows.length > this.lowCardinalityLimit) {\n return undefined;\n }\n\n const values: string[] = [];\n for (const row of rows) {\n const formatted = this.#normalizeValue(row.value);\n if (formatted == null) {\n return undefined;\n }\n values.push(formatted);\n }\n\n return values.length ? values : undefined;\n }\n\n #normalizeValue(value: unknown): string | null {\n if (value === null || value === undefined) {\n return null;\n }\n if (typeof value === 'string') {\n return value;\n }\n if (typeof value === 'number' || typeof value === 'bigint') {\n return String(value);\n }\n if (typeof value === 'boolean') {\n return value ? 'true' : 'false';\n }\n if (value instanceof Date) {\n return value.toISOString();\n }\n if (typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {\n return value.toString('utf-8');\n }\n return null;\n }\n}\n", "import type { Adapter } from '../adapter.ts';\nimport {\n RowCountGrounding,\n type RowCountGroundingConfig,\n} from '../groundings/row-count.grounding.ts';\n\n/**\n * SQLite implementation of RowCountGrounding.\n */\nexport class SqliteRowCountGrounding extends RowCountGrounding {\n #adapter: Adapter;\n\n constructor(adapter: Adapter, config: RowCountGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async getRowCount(tableName: string): Promise<number | undefined> {\n const rows = await this.#adapter.runQuery<{ count: number | string | bigint | null }>(\n `SELECT COUNT(*) as count FROM ${this.#adapter.quoteIdentifier(tableName)}`,\n );\n\n return this.#adapter.toNumber(rows[0]?.count);\n }\n}\n", "import {\n Adapter,\n type ExecuteFunction,\n type GroundingFn,\n type ValidateFunction,\n} from '../adapter.ts';\n\nconst SQL_ERROR_MAP: Array<{\n pattern: RegExp;\n type: string;\n hint: string;\n}> = [\n {\n pattern: /^no such table: .+$/,\n type: 'MISSING_TABLE',\n hint: 'Check the database schema for the correct table name. The table you referenced does not exist.',\n },\n {\n pattern: /^no such column: .+$/,\n type: 'INVALID_COLUMN',\n hint: 'Check the table schema for correct column names. The column may not exist or is ambiguous (exists in multiple joined tables).',\n },\n {\n pattern: /^ambiguous column name: .+$/,\n type: 'INVALID_COLUMN',\n hint: 'Check the table schema for correct column names. The column may not exist or is ambiguous (exists in multiple joined tables).',\n },\n {\n pattern: /^near \".+\": syntax error$/,\n type: 'SYNTAX_ERROR',\n hint: 'There is a SQL syntax error. Review the query structure, keywords, and punctuation.',\n },\n {\n pattern: /^no tables specified$/,\n type: 'SYNTAX_ERROR',\n hint: 'There is a SQL syntax error. Review the query structure, keywords, and punctuation.',\n },\n {\n pattern: /^attempt to write a readonly database$/,\n type: 'CONSTRAINT_ERROR',\n hint: 'A database constraint was violated. This should not happen with read-only queries.',\n },\n];\n\nexport type SqliteAdapterOptions = {\n execute: ExecuteFunction;\n validate?: ValidateFunction;\n grounding: GroundingFn[];\n};\n\ntype ColumnRow = {\n name: string | null | undefined;\n type: string | null | undefined;\n pk?: number | null | undefined;\n};\n\ntype IndexListRow = {\n seq?: number | null | undefined;\n name?: string | null | undefined;\n unique?: number | null | undefined;\n origin?: string | null | undefined;\n};\n\ntype IndexInfoRow = {\n seqno?: number | null | undefined;\n cid?: number | null | undefined;\n name?: string | null | undefined;\n};\ntype ForeignKeyRow = {\n id: number | null | undefined;\n table: string | null | undefined;\n from: string | null | undefined;\n to: string | null | undefined;\n};\n\nconst LOW_CARDINALITY_LIMIT = 20;\n\nexport function formatError(sql: string, error: unknown) {\n const errorMessage =\n error instanceof Error\n ? error.message\n : typeof error === 'string'\n ? error\n : 'Unknown error occurred';\n const errorInfo = SQL_ERROR_MAP.find((it) => it.pattern.test(errorMessage));\n\n if (!errorInfo) {\n return {\n error: errorMessage,\n error_type: 'UNKNOWN_ERROR',\n suggestion: 'Review the query and try again',\n sql_attempted: sql,\n };\n }\n\n return {\n error: errorMessage,\n error_type: errorInfo.type,\n suggestion: errorInfo.hint,\n sql_attempted: sql,\n };\n}\n\nexport class Sqlite extends Adapter {\n #options: SqliteAdapterOptions;\n override readonly grounding: GroundingFn[];\n override readonly defaultSchema = undefined;\n override readonly systemSchemas: string[] = [];\n\n constructor(options: SqliteAdapterOptions) {\n super();\n if (!options || typeof options.execute !== 'function') {\n throw new Error('Sqlite adapter requires an execute function.');\n }\n this.#options = options;\n this.grounding = options.grounding;\n }\n\n override async execute(sql: string) {\n return this.#options.execute(sql);\n }\n\n override async validate(sql: string) {\n const validator: ValidateFunction =\n this.#options.validate ??\n (async (text: string) => {\n await this.#options.execute(`EXPLAIN ${text}`);\n });\n\n try {\n return await validator(sql);\n } catch (error) {\n return JSON.stringify(formatError(sql, error));\n }\n }\n\n #quoteIdentifier(name: string) {\n return `'${name.replace(/'/g, \"''\")}'`;\n }\n\n override async runQuery<Row>(sql: string): Promise<Row[]> {\n const result = await this.#options.execute(sql);\n\n if (Array.isArray(result)) {\n return result as Row[];\n }\n\n if (\n result &&\n typeof result === 'object' &&\n 'rows' in result &&\n Array.isArray((result as { rows?: unknown }).rows)\n ) {\n return (result as { rows: Row[] }).rows;\n }\n\n throw new Error(\n 'Sqlite adapter execute() must return an array of rows or an object with a rows array when introspecting.',\n );\n }\n\n override quoteIdentifier(name: string): string {\n return `\"${name.replace(/\"/g, '\"\"')}\"`;\n }\n\n override escape(value: string): string {\n return value.replace(/\"/g, '\"\"');\n }\n\n override buildSampleRowsQuery(\n tableName: string,\n columns: string[] | undefined,\n limit: number,\n ): string {\n const columnList = columns?.length\n ? columns.map((c) => this.quoteIdentifier(c)).join(', ')\n : '*';\n return `SELECT ${columnList} FROM ${this.quoteIdentifier(tableName)} LIMIT ${limit}`;\n }\n}\n", "import type { Adapter, Relationship, Table } from '../adapter.ts';\nimport {\n TableGrounding,\n type TableGroundingConfig,\n} from '../groundings/table.grounding.ts';\n\ntype ColumnRow = {\n name: string | null | undefined;\n type: string | null | undefined;\n pk?: number | null | undefined;\n};\n\ntype ForeignKeyRow = {\n id: number | null | undefined;\n table: string | null | undefined;\n from: string | null | undefined;\n to: string | null | undefined;\n};\n\n/**\n * SQLite implementation of TableGrounding.\n *\n * SQLite requires caching all relationships for backward lookups because\n * PRAGMA foreign_key_list only returns outgoing FKs from a specific table.\n */\nexport class SqliteTableGrounding extends TableGrounding {\n #adapter: Adapter;\n #relationshipCache: Relationship[] | null = null;\n\n constructor(adapter: Adapter, config: TableGroundingConfig = {}) {\n super(config);\n this.#adapter = adapter;\n }\n\n protected override async getAllTableNames(): Promise<string[]> {\n const rows = await this.#adapter.runQuery<{\n name: string | null | undefined;\n }>(`SELECT name FROM sqlite_master WHERE type='table' ORDER BY name`);\n\n return rows\n .map((row) => row.name)\n .filter(\n (name): name is string =>\n typeof name === 'string' && !name.startsWith('sqlite_'),\n );\n }\n\n protected override async getTable(tableName: string): Promise<Table> {\n const columns = await this.#adapter.runQuery<ColumnRow>(\n `PRAGMA table_info(${this.#quoteIdentifier(tableName)})`,\n );\n\n return {\n name: tableName,\n rawName: tableName,\n columns: columns.map((col) => ({\n name: col.name ?? 'unknown',\n type: col.type ?? 'unknown',\n })),\n };\n }\n\n protected override async findOutgoingRelations(\n tableName: string,\n ): Promise<Relationship[]> {\n const rows = await this.#adapter.runQuery<ForeignKeyRow>(\n `PRAGMA foreign_key_list(${this.#quoteIdentifier(tableName)})`,\n );\n\n const groups = new Map<number, Relationship>();\n\n for (const row of rows) {\n if (\n row.id == null ||\n row.table == null ||\n row.from == null ||\n row.to == null\n ) {\n continue;\n }\n\n const id = Number(row.id);\n const existing = groups.get(id);\n if (!existing) {\n groups.set(id, {\n table: tableName,\n from: [String(row.from)],\n referenced_table: String(row.table),\n to: [String(row.to)],\n });\n } else {\n existing.from.push(String(row.from));\n existing.to.push(String(row.to));\n }\n }\n\n return Array.from(groups.values());\n }\n\n protected override async findIncomingRelations(\n tableName: string,\n ): Promise<Relationship[]> {\n // SQLite limitation: PRAGMA only shows outgoing FKs\n // Must scan all tables and cache the results\n if (!this.#relationshipCache) {\n this.#relationshipCache = await this.#loadAllRelationships();\n }\n return this.#relationshipCache.filter(\n (r) => r.referenced_table === tableName,\n );\n }\n\n async #loadAllRelationships(): Promise<Relationship[]> {\n const allNames = await this.getAllTableNames();\n const results: Relationship[] = [];\n for (const name of allNames) {\n results.push(...(await this.findOutgoingRelations(name)));\n }\n return results;\n }\n\n #quoteIdentifier(name: string) {\n return `'${name.replace(/'/g, \"''\")}'`;\n }\n}\n", "import { type Adapter } from '../adapter.ts';\nimport { type ColumnStatsGroundingConfig } from '../groundings/column-stats.grounding.ts';\nimport { type ConstraintGroundingConfig } from '../groundings/constraint.grounding.ts';\nimport { type IndexesGroundingConfig } from '../groundings/indexes.grounding.ts';\nimport { type InfoGroundingConfig } from '../groundings/info.grounding.ts';\nimport { type ColumnValuesGroundingConfig } from '../groundings/column-values.grounding.ts';\nimport {\n ReportGrounding,\n type ReportGroundingConfig,\n} from '../groundings/report.grounding.ts';\nimport { type RowCountGroundingConfig } from '../groundings/row-count.grounding.ts';\nimport { type TableGroundingConfig } from '../groundings/table.grounding.ts';\nimport type { ViewGroundingConfig } from '../groundings/view.grounding.ts';\nimport { SqliteColumnStatsGrounding } from './column-stats.sqlite.grounding.ts';\nimport { SqliteConstraintGrounding } from './constraint.sqlite.grounding.ts';\nimport { SqliteIndexesGrounding } from './indexes.sqlite.grounding.ts';\nimport { SqliteInfoGrounding } from './info.sqlite.grounding.ts';\nimport { SqliteColumnValuesGrounding } from './column-values.sqlite.grounding.ts';\nimport { SqliteRowCountGrounding } from './row-count.sqlite.grounding.ts';\nimport { Sqlite } from './sqlite.ts';\nimport { SqliteTableGrounding } from './table.sqlite.grounding.ts';\nimport { SqliteViewGrounding } from './view.sqlite.grounding.ts';\n\nexport * from './sqlite.ts';\n\nexport function tables(config: TableGroundingConfig = {}) {\n return (adapter: Adapter) => new SqliteTableGrounding(adapter, config);\n}\n\nexport function info(config: InfoGroundingConfig = {}) {\n return (adapter: Adapter) => new SqliteInfoGrounding(adapter, config);\n}\n\nexport function views(config: ViewGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteViewGrounding(adapter, config);\n };\n}\n\nexport function columnStats(config: ColumnStatsGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteColumnStatsGrounding(adapter, config);\n };\n}\n\nexport function columnValues(config: ColumnValuesGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteColumnValuesGrounding(adapter, config);\n };\n}\n\nexport function indexes(config: IndexesGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteIndexesGrounding(adapter, config);\n };\n}\n\nexport function rowCount(config: RowCountGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteRowCountGrounding(adapter, config);\n };\n}\n\nexport function constraints(config: ConstraintGroundingConfig = {}) {\n return (adapter: Adapter) => {\n return new SqliteConstraintGrounding(adapter, config);\n };\n}\n\nexport function report(config: ReportGroundingConfig = {}) {\n return (adapter: Adapter) => new ReportGrounding(adapter, config);\n}\n\nexport default {\n tables,\n info,\n views,\n columnStats,\n columnValues,\n indexes,\n rowCount,\n constraints,\n report,\n Sqlite,\n};\n", "import { DatabaseSync } from 'node:sqlite';\n\nimport type { GroundingFn } from '../adapter.ts';\nimport { Sqlite } from '../sqlite/sqlite.ts';\nimport { type ColumnType, type ParsedSheet, parseFile } from './parser.ts';\n\n/**\n * Options for creating a Spreadsheet adapter.\n */\nexport interface SpreadsheetOptions {\n /**\n * Path to the spreadsheet file (Excel .xlsx/.xls or CSV/TSV).\n */\n file: string;\n\n /**\n * Optional path to persist the SQLite database.\n * If not provided, uses in-memory database (':memory:').\n */\n database?: string;\n\n /**\n * Grounding functions to use for schema introspection.\n */\n grounding: GroundingFn[];\n}\n\n/**\n * Spreadsheet adapter that loads Excel/CSV files into SQLite.\n *\n * This adapter:\n * 1. Parses the spreadsheet file (Excel or CSV/TSV)\n * 2. Creates a SQLite database (in-memory or file-based)\n * 3. Creates tables from sheets and loads data\n * 4. Delegates all SQL operations to the SQLite adapter\n *\n * @example\n * ```typescript\n * import { Spreadsheet, tables, info } from '@deepagents/text2sql/spreadsheet';\n *\n * const adapter = new Spreadsheet({\n * file: './sales.xlsx',\n * grounding: [tables(), info()]\n * });\n *\n * const schema = await adapter.introspect();\n * const results = await adapter.execute('SELECT * FROM Customers');\n * ```\n */\nexport class Spreadsheet extends Sqlite {\n #db: DatabaseSync;\n\n constructor(options: SpreadsheetOptions) {\n // Parse the spreadsheet file\n const sheets = parseFile(options.file);\n\n // Create SQLite database\n const dbPath = options.database ?? ':memory:';\n const db = new DatabaseSync(dbPath);\n\n // Create tables and load data\n for (const sheet of sheets) {\n const createSQL = createTableSQL(sheet);\n db.exec(createSQL);\n loadData(db, sheet);\n }\n\n // Initialize the SQLite adapter with execute function\n super({\n execute: (sql: string) => db.prepare(sql).all(),\n grounding: options.grounding,\n });\n\n this.#db = db;\n }\n\n /**\n * Close the underlying SQLite database.\n * Call this when done to release resources.\n */\n close(): void {\n this.#db.close();\n }\n}\n\n/**\n * Generate CREATE TABLE SQL for a parsed sheet.\n */\nfunction createTableSQL(sheet: ParsedSheet): string {\n if (sheet.columns.length === 0) {\n throw new Error(`Cannot create table \"${sheet.name}\" with no columns.`);\n }\n\n const columns = sheet.columns\n .map((col) => `\"${escapeIdentifier(col.name)}\" ${col.type}`)\n .join(', ');\n\n return `CREATE TABLE \"${escapeIdentifier(sheet.name)}\" (${columns})`;\n}\n\n/**\n * SQLite input value type.\n */\ntype SQLiteValue = string | number | bigint | null | Uint8Array;\n\n/**\n * Load data from a parsed sheet into the SQLite database.\n * Uses transactions for performance.\n */\nfunction loadData(db: DatabaseSync, sheet: ParsedSheet): void {\n if (sheet.rows.length === 0) {\n return;\n }\n\n const columns = sheet.columns\n .map((c) => `\"${escapeIdentifier(c.name)}\"`)\n .join(', ');\n const placeholders = sheet.columns.map(() => '?').join(', ');\n\n const insertSQL = `INSERT INTO \"${escapeIdentifier(sheet.name)}\" (${columns}) VALUES (${placeholders})`;\n const stmt = db.prepare(insertSQL);\n\n db.exec('BEGIN TRANSACTION');\n\n try {\n for (const row of sheet.rows) {\n const values: SQLiteValue[] = sheet.columns.map((col) => {\n // Use originalKey to access row data (preserves original case)\n const rawValue = row[col.originalKey];\n return convertValue(rawValue, col.type);\n });\n stmt.run(...values);\n }\n db.exec('COMMIT');\n } catch (error) {\n db.exec('ROLLBACK');\n throw error;\n }\n}\n\n/**\n * Convert a JavaScript value to the appropriate SQLite type.\n * Type-aware conversion based on the inferred column type.\n */\nfunction convertValue(value: unknown, type: ColumnType): SQLiteValue {\n // Null/undefined/empty \u2192 NULL\n if (value == null || value === '') {\n return null;\n }\n\n // Handle Date objects - format as YYYY-MM-DD\n if (value instanceof Date) {\n return value.toISOString().split('T')[0];\n }\n\n switch (type) {\n case 'INTEGER': {\n // Convert to integer, floor decimals\n const num = Number(value);\n if (isNaN(num)) {\n return null; // Non-numeric values become NULL\n }\n return Math.floor(num);\n }\n\n case 'REAL': {\n // Convert to float\n const num = Number(value);\n if (isNaN(num)) {\n return null; // Non-numeric values become NULL\n }\n return num;\n }\n\n case 'TEXT':\n default: {\n // Convert to string\n if (typeof value === 'boolean') {\n return value ? 'true' : 'false';\n }\n if (typeof value === 'object') {\n return JSON.stringify(value);\n }\n return String(value);\n }\n }\n}\n\n/**\n * Escape double quotes in identifiers for SQLite.\n */\nfunction escapeIdentifier(name: string): string {\n return name.replace(/\"/g, '\"\"');\n}\n", "import * as path from 'node:path';\nimport XLSX from 'xlsx';\n\n/**\n * Column type for SQLite.\n */\nexport type ColumnType = 'TEXT' | 'INTEGER' | 'REAL';\n\n/**\n * Column definition with name and inferred type.\n */\nexport interface Column {\n /** Sanitized column name for SQL */\n name: string;\n /** Original column name from spreadsheet (for data access) */\n originalKey: string;\n /** Inferred SQLite type */\n type: ColumnType;\n}\n\n/**\n * Parsed sheet with table name, columns, and row data.\n */\nexport interface ParsedSheet {\n name: string;\n columns: Column[];\n rows: Record<string, unknown>[];\n}\n\n/**\n * Parse an Excel or CSV/TSV file into sheets.\n *\n * - Excel files: each sheet becomes a ParsedSheet\n * - CSV/TSV files: single ParsedSheet with filename as table name\n */\nexport function parseFile(filePath: string): ParsedSheet[] {\n const ext = path.extname(filePath).toLowerCase();\n\n let workbook: XLSX.WorkBook;\n try {\n workbook = XLSX.readFile(filePath, {\n cellDates: true, // Parse dates as Date objects\n });\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to read spreadsheet \"${filePath}\": ${message}`);\n }\n\n const sheets: ParsedSheet[] = [];\n\n for (const sheetName of workbook.SheetNames) {\n const sheet = workbook.Sheets[sheetName];\n const rows = XLSX.utils.sheet_to_json<Record<string, unknown>>(sheet);\n\n // Skip empty sheets\n if (rows.length === 0) {\n continue;\n }\n\n // For CSV files, use filename as table name; for Excel, use sheet name\n const tableName =\n ext === '.csv' || ext === '.tsv'\n ? getTableNameFromFile(filePath)\n : sanitizeTableName(sheetName);\n\n const columns = inferColumns(rows);\n\n // Skip sheets with no columns (shouldn't happen if rows exist, but be safe)\n if (columns.length === 0) {\n continue;\n }\n\n sheets.push({\n name: tableName,\n columns,\n rows,\n });\n }\n\n if (sheets.length === 0) {\n throw new Error(\n `No valid sheets found in \"${filePath}\". All sheets are empty or have no columns.`,\n );\n }\n\n return sheets;\n}\n\n/**\n * Extract table name from filename.\n * './data/customers.csv' \u2192 'customers'\n */\nfunction getTableNameFromFile(filePath: string): string {\n const basename = path.basename(filePath, path.extname(filePath));\n return sanitizeTableName(basename);\n}\n\n/**\n * Sanitize a name to be a valid SQL table/column identifier.\n * - Lowercase for consistency\n * - Replace invalid chars with underscores\n * - Ensure it doesn't start with a number\n * - Trim and collapse multiple underscores\n * - Truncate to 64 characters\n */\nexport function sanitizeIdentifier(name: string): string {\n // Lowercase for consistent SQL identifiers\n let sanitized = name.toLowerCase();\n\n // Replace any non-alphanumeric (except underscore) with underscore\n sanitized = sanitized.replace(/[^a-z0-9_]/g, '_');\n\n // Collapse multiple underscores\n sanitized = sanitized.replace(/_+/g, '_');\n\n // Trim leading/trailing underscores\n sanitized = sanitized.replace(/^_+|_+$/g, '');\n\n // If starts with number, prefix with underscore\n if (/^[0-9]/.test(sanitized)) {\n sanitized = '_' + sanitized;\n }\n\n // If empty after sanitization, use a default\n if (!sanitized) {\n return 'column';\n }\n\n // Truncate to 64 characters (common SQL identifier limit)\n return sanitized.slice(0, 64);\n}\n\n// Alias for backwards compatibility\nexport const sanitizeTableName = sanitizeIdentifier;\n\n/**\n * Infer column definitions from row data.\n * Uses the first row's keys as column names and samples values for type inference.\n */\nfunction inferColumns(rows: Record<string, unknown>[]): Column[] {\n if (rows.length === 0) {\n return [];\n }\n\n // Get all unique keys from all rows (in case some rows have different keys)\n const keySet = new Set<string>();\n for (const row of rows) {\n for (const key of Object.keys(row)) {\n keySet.add(key);\n }\n }\n\n // Handle empty keys (sheets with no headers)\n if (keySet.size === 0) {\n return [];\n }\n\n const rawNames = Array.from(keySet);\n const dedupedNames = deduplicateColumnNames(rawNames);\n\n return dedupedNames.map((name, idx) => {\n const originalKey = rawNames[idx];\n const values = rows.map((row) => row[originalKey]);\n const type = inferColumnType(values);\n return { name, originalKey, type };\n });\n}\n\n/**\n * Deduplicate column names by appending _2, _3, etc. to duplicates.\n */\nfunction deduplicateColumnNames(names: string[]): string[] {\n const seen = new Map<string, number>();\n const result: string[] = [];\n\n for (const rawName of names) {\n // Sanitize the column name\n let name = sanitizeTableName(rawName);\n\n // Handle empty names (generate column_1, column_2, etc.)\n if (!name) {\n name = 'column';\n }\n\n const count = seen.get(name) ?? 0;\n if (count > 0) {\n result.push(`${name}_${count + 1}`);\n } else {\n result.push(name);\n }\n seen.set(name, count + 1);\n }\n\n return result;\n}\n\n/**\n * Infer SQLite column type from sample values.\n * Conservative approach: when in doubt, use TEXT.\n */\nfunction inferColumnType(values: unknown[]): ColumnType {\n let hasInteger = false;\n let hasReal = false;\n\n for (const value of values) {\n // Skip nullish or empty values\n if (value == null || value === '') {\n continue;\n }\n\n // Dates are stored as TEXT (ISO format)\n if (value instanceof Date) {\n return 'TEXT';\n }\n\n if (typeof value === 'number') {\n if (Number.isInteger(value)) {\n hasInteger = true;\n } else {\n hasReal = true;\n }\n } else if (typeof value === 'boolean') {\n // Booleans can be stored as INTEGER (0/1)\n hasInteger = true;\n } else {\n // Any non-number type means TEXT\n return 'TEXT';\n }\n }\n\n // If we have any REAL values, use REAL (even if some are integers)\n if (hasReal) {\n return 'REAL';\n }\n\n // If we only have integers, use INTEGER\n if (hasInteger) {\n return 'INTEGER';\n }\n\n // Default to TEXT (all values were null/empty)\n return 'TEXT';\n}\n"],
5
+ "mappings": ";AAmCO,SAAS,YAAY,OAIR;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,WAAW,EAAE,SAAS,MAAM,QAAQ;AAAA,MAC9C,GAAI,MAAM,YAAY,EAAE,UAAU,MAAM,SAAS;AAAA,IACnD;AAAA,EACF;AACF;AA2BO,SAAS,MAAM,OAQF;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,UAAU,EAAE,QAAQ,MAAM,OAAO;AAAA,MAC3C,GAAI,MAAM,YAAY,QAAQ,EAAE,UAAU,MAAM,SAAS;AAAA,MACzD,GAAI,MAAM,YAAY,EAAE,UAAU,MAAM,SAAS;AAAA,MACjD,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,SAAS,UAAU,EAAE,SAAS,MAAM,QAAQ;AAAA,MACtD,GAAI,MAAM,aAAa,UAAU,EAAE,aAAa,MAAM,YAAY;AAAA,IACpE;AAAA,EACF;AACF;AAyBO,SAAS,OAAO,OAeH;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,MAAM,EAAE,IAAI,KAAK;AAAA,MAC3B,GAAI,MAAM,MAAM,EAAE,IAAI,MAAM,GAAG;AAAA,MAC/B,GAAI,MAAM,UAAU,EAAE,QAAQ,KAAK;AAAA,MACnC,GAAI,MAAM,WAAW,EAAE,SAAS,KAAK;AAAA,MACrC,GAAI,MAAM,WAAW,EAAE,SAAS,MAAM,QAAQ;AAAA,MAC9C,GAAI,MAAM,WAAW,EAAE,SAAS,KAAK;AAAA,MACrC,GAAI,MAAM,QAAQ,UAAU,EAAE,QAAQ,MAAM,OAAO;AAAA,MACnD,GAAI,MAAM,SAAS,EAAE,OAAO,MAAM,MAAM;AAAA,IAC1C;AAAA,EACF;AACF;AAaO,SAAS,MAAM,OAKF;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,UAAU,EAAE,QAAQ,KAAK;AAAA,MACnC,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,IACvC;AAAA,EACF;AACF;AA6BO,SAAS,WAAW,OAcP;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,SAAS,UAAU,EAAE,SAAS,MAAM,QAAQ;AAAA,MACtD,GAAI,MAAM,cAAc,EAAE,YAAY,MAAM,WAAW;AAAA,MACvD,GAAI,MAAM,gBAAgB,EAAE,cAAc,MAAM,aAAa;AAAA,MAC7D,GAAI,MAAM,mBAAmB,EAAE,iBAAiB,MAAM,gBAAgB;AAAA,MACtE,GAAI,MAAM,mBAAmB,UAAU;AAAA,QACrC,mBAAmB,MAAM;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAoBO,SAAS,KAAK,OAKD;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,GAAI,MAAM,UAAU,EAAE,QAAQ,MAAM,OAAO;AAAA,MAC3C,SAAS,MAAM;AAAA,MACf,GAAI,MAAM,cAAc,EAAE,YAAY,MAAM,WAAW;AAAA,IACzD;AAAA,EACF;AACF;AAgBO,SAAS,aAAa,OAIT;AAClB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,MACJ,MAAM,MAAM;AAAA,MACZ,IAAI,MAAM;AAAA,MACV,GAAI,MAAM,eAAe,EAAE,aAAa,MAAM,YAAY;AAAA,IAC5D;AAAA,EACF;AACF;;;AC/PO,SAAS,yBAA2C;AACzD,SAAO;AAAA,IACL,QAAQ,CAAC;AAAA,IACT,OAAO,CAAC;AAAA,IACR,eAAe,CAAC;AAAA,IAChB,MAAM;AAAA,EACR;AACF;;;ACyEO,IAAe,UAAf,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuB5B,MAAM,WAAW,MAAM,uBAAuB,GAA+B;AAE3E,eAAW,MAAM,KAAK,WAAW;AAC/B,YAAM,YAAY,GAAG,IAAI;AACzB,YAAM,UAAU,QAAQ,GAAG;AAAA,IAC7B;AAGA,WAAO,KAAK,mBAAmB,GAAG;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,mBAAmB,KAA0C;AAC3D,UAAM,YAA+B,CAAC;AAGtC,QAAI,IAAI,MAAM;AACZ,gBAAU;AAAA,QACR,YAAY;AAAA,UACV,SAAS,IAAI,KAAK;AAAA,UAClB,SAAS,IAAI,KAAK;AAAA,UAClB,UAAU,IAAI,KAAK;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,eAAW,KAAK,IAAI,QAAQ;AAC1B,gBAAU,KAAK,KAAK,iBAAiB,CAAC,CAAC;AAAA,IACzC;AAGA,eAAW,KAAK,IAAI,OAAO;AACzB,gBAAU,KAAK,KAAK,gBAAgB,CAAC,CAAC;AAAA,IACxC;AAGA,UAAM,WAAW,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;AAC3D,eAAW,OAAO,IAAI,eAAe;AACnC,YAAM,cAAc,SAAS,IAAI,IAAI,KAAK;AAC1C,YAAM,cAAc,SAAS,IAAI,IAAI,gBAAgB;AACrD,gBAAU;AAAA,QACR,KAAK,wBAAwB,KAAK,aAAa,WAAW;AAAA,MAC5D;AAAA,IACF;AAGA,QAAI,IAAI,QAAQ;AACd,gBAAU,KAAK,EAAE,MAAM,mBAAmB,MAAM,IAAI,OAAO,CAAC;AAAA,IAC9D;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,GAA2B;AAE1C,UAAM,eAAe,EAAE,aAAa,KAAK,CAAC,MAAM,EAAE,SAAS,aAAa;AACxE,UAAM,YAAY,IAAI,IAAI,cAAc,WAAW,CAAC,CAAC;AAErD,UAAM,iBAAiB,IAAI;AAAA,MACzB,EAAE,aACE,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EACpC,QAAQ,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC,KAAK,CAAC;AAAA,IACzC;AAEA,UAAM,kBAAkB,oBAAI,IAAoB;AAChD,eAAW,KAAK,EAAE,aAAa,OAAO,CAACA,OAAMA,GAAE,SAAS,SAAS,KAAK,CAAC,GAAG;AACxE,iBAAW,OAAO,EAAE,WAAW,CAAC,GAAG;AACjC,YAAI,EAAE,gBAAgB,MAAM;AAC1B,0BAAgB,IAAI,KAAK,EAAE,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,IAAI;AAAA,MACxB,EAAE,aACE,OAAO,CAAC,MAAM,EAAE,SAAS,YAAY,EAAE,SAAS,WAAW,CAAC,EAC7D,QAAQ,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC,KAAK,CAAC;AAAA,IACzC;AAGA,UAAM,aAAa,oBAAI,IAAoB;AAC3C,eAAW,KAAK,EAAE,aAAa,OAAO,CAACA,OAAMA,GAAE,SAAS,aAAa,KACnE,CAAC,GAAG;AACJ,YAAM,OAAO,EAAE,WAAW,CAAC;AAC3B,YAAM,UAAU,EAAE,qBAAqB,CAAC;AACxC,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAM,SAAS,QAAQ,CAAC,KAAK,QAAQ,CAAC,KAAK,KAAK,CAAC;AACjD,mBAAW,IAAI,KAAK,CAAC,GAAG,GAAG,EAAE,eAAe,IAAI,MAAM,EAAE;AAAA,MAC1D;AAAA,IACF;AAGA,UAAM,kBAAkB,EAAE,QAAQ;AAAA,MAAI,CAAC,QACrC,OAAO;AAAA,QACL,MAAM,IAAI;AAAA,QACV,MAAM,IAAI;AAAA,QACV,IAAI,UAAU,IAAI,IAAI,IAAI,KAAK;AAAA,QAC/B,IAAI,WAAW,IAAI,IAAI,IAAI;AAAA,QAC3B,QAAQ,cAAc,IAAI,IAAI,IAAI,KAAK;AAAA,QACvC,SAAS,eAAe,IAAI,IAAI,IAAI,KAAK;AAAA,QACzC,SAAS,gBAAgB,IAAI,IAAI,IAAI;AAAA,QACrC,SAAS,IAAI,aAAa;AAAA,QAC1B,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,MACb,CAAC;AAAA,IACH;AAGA,UAAM,kBAAkB,EAAE,WAAW,CAAC,GAAG;AAAA,MAAI,CAAC,QAC5C,MAAM;AAAA,QACJ,MAAM,IAAI;AAAA,QACV,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,QACZ,MAAM,IAAI;AAAA,MACZ,CAAC;AAAA,IACH;AAGA,UAAM,uBAAuB,EAAE,eAAe,CAAC,GAC5C;AAAA,MACC,CAAC,MACC,EAAE,SAAS,WACV,EAAE,SAAS,aAAa,EAAE,SAAS,UAAU,KAAK;AAAA,IACvD,EACC;AAAA,MAAI,CAAC,MACJ,WAAW;AAAA,QACT,MAAM,EAAE;AAAA,QACR,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,QACX,YAAY,EAAE;AAAA,MAChB,CAAC;AAAA,IACH;AAEF,WAAO,MAAM;AAAA,MACX,MAAM,EAAE;AAAA,MACR,QAAQ,EAAE;AAAA,MACV,UAAU,EAAE;AAAA,MACZ,UAAU,EAAE;AAAA,MACZ,SAAS;AAAA,MACT,SAAS,eAAe,SAAS,IAAI,iBAAiB;AAAA,MACtD,aACE,oBAAoB,SAAS,IAAI,sBAAsB;AAAA,IAC3D,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB,GAA0B;AACxC,UAAM,kBAAkB,EAAE,QAAQ;AAAA,MAAI,CAAC,QACrC,OAAO;AAAA,QACL,MAAM,IAAI;AAAA,QACV,MAAM,IAAI;AAAA,QACV,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,MACb,CAAC;AAAA,IACH;AAEA,WAAO,KAAK;AAAA,MACV,MAAM,EAAE;AAAA,MACR,QAAQ,EAAE;AAAA,MACV,SAAS;AAAA,MACT,YAAY,EAAE;AAAA,IAChB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,wBACE,KACA,aACA,aACiB;AACjB,UAAM,cAAc,aAAa;AACjC,UAAM,cAAc,aAAa;AAEjC,QAAI;AAOJ,QAAI,eAAe,QAAQ,eAAe,QAAQ,cAAc,GAAG;AACjE,YAAM,QAAQ,cAAc;AAC5B,UAAI,QAAQ,GAAG;AACb,sBAAc;AAAA,MAChB,WAAW,QAAQ,OAAO,QAAQ,KAAK;AACrC,sBAAc;AAAA,MAChB,WAAW,QAAQ,KAAK;AACtB,sBAAc;AAAA,MAChB;AAAA,IACF;AAEA,WAAO,aAAa;AAAA,MAClB,MAAM,EAAE,OAAO,IAAI,OAAO,SAAS,IAAI,KAAK;AAAA,MAC5C,IAAI,EAAE,OAAO,IAAI,kBAAkB,SAAS,IAAI,GAAG;AAAA,MACnD;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EA+BA,SAAS,OAAoC;AAC3C,QAAI,OAAO,UAAU,YAAY,OAAO,SAAS,KAAK,GAAG;AACvD,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO,OAAO,KAAK;AAAA,IACrB;AACA,QAAI,OAAO,UAAU,YAAY,MAAM,KAAK,MAAM,IAAI;AACpD,YAAM,SAAS,OAAO,KAAK;AAC3B,aAAO,OAAO,SAAS,MAAM,IAAI,SAAS;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,MAAiD;AAC9D,QAAI,KAAK,SAAS,GAAG,GAAG;AACtB,YAAM,CAAC,QAAQ,GAAG,IAAI,IAAI,KAAK,MAAM,GAAG;AACxC,aAAO,EAAE,QAAQ,OAAO,KAAK,KAAK,GAAG,EAAE;AAAA,IACzC;AACA,WAAO,EAAE,QAAQ,KAAK,iBAAiB,IAAI,OAAO,KAAK;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,aAAa,OAAuB;AAClC,WAAO,MAAM,QAAQ,MAAM,IAAI;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,kBAAkB,YAAoB,gBAAmC;AACvE,QAAI,kBAAkB,eAAe,SAAS,GAAG;AAC/C,YAAM,SAAS,eACZ,IAAI,CAAC,MAAM,IAAI,KAAK,aAAa,CAAC,CAAC,GAAG,EACtC,KAAK,IAAI;AACZ,aAAO,OAAO,UAAU,QAAQ,MAAM;AAAA,IACxC;AACA,QAAI,KAAK,cAAc,SAAS,GAAG;AACjC,YAAM,SAAS,KAAK,cACjB,IAAI,CAAC,MAAM,IAAI,KAAK,aAAa,CAAC,CAAC,GAAG,EACtC,KAAK,IAAI;AACZ,aAAO,OAAO,UAAU,YAAY,MAAM;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AACF;;;AChaO,IAAe,oBAAf,MAAiC;AAAA;AAAA;AAAA;AAAA,EAItC;AAAA,EAEA,YAAY,MAAc;AACxB,SAAK,OAAO;AAAA,EACd;AAUF;;;ACzBO,IAAe,uBAAf,cAA4C,kBAAkB;AAAA,EACnE,YAAY,SAAqC,CAAC,GAAG;AACnD,UAAM,aAAa;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,QAAQ,KAAsC;AAElD,UAAM,gBAAmC,CAAC,GAAG,IAAI,QAAQ,GAAG,IAAI,KAAK;AACrE,eAAW,aAAa,eAAe;AACrC,iBAAWC,WAAU,UAAU,SAAS;AAEtC,YAAI;AACF,gBAAM,QAAQ,MAAM,KAAK,aAAa,UAAU,MAAMA,OAAM;AAC5D,cAAI,OAAO;AACT,YAAAA,QAAO,QAAQ;AAAA,UACjB;AAAA,QACF,SAAS,OAAO;AAEd,kBAAQ;AAAA,YACN;AAAA,YACA,UAAU;AAAA,YACVA,QAAO;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AC1CO,IAAe,gBAAf,cAAqC,kBAAkB;AAAA,EAC5D,YAAY,SAA8B,CAAC,GAAG;AAC5C,UAAM,aAAa;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,QAAQ,KAAsC;AAClD,QAAI,OAAO,MAAM,KAAK,YAAY;AAAA,EACpC;AACF;;;ACFO,IAAe,wBAAf,cAA6C,kBAAkB;AAAA,EAC1D;AAAA,EAEV,YAAY,SAAsC,CAAC,GAAG;AACpD,UAAM,cAAc;AACpB,SAAK,sBAAsB,OAAO,uBAAuB;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAgB,kBACd,YACA,SAC+B;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBU,qBACRC,aACA,YACsB;AACtB,QAAIA,YAAW,SAAS,WAAW,CAACA,YAAW,YAAY;AACzD,aAAO;AAAA,IACT;AAGA,QAAIA,YAAW,WAAW,CAACA,YAAW,QAAQ,SAAS,UAAU,GAAG;AAClE,aAAO;AAAA,IACT;AAEA,UAAM,MAAMA,YAAW;AACvB,UAAM,aAAa,KAAK,YAAY,UAAU;AAI9C,UAAM,aAAa,cAAc,UAAU;AAG3C,UAAM,UAAU,IAAI;AAAA,MAClB,IAAI,OAAO,GAAG,UAAU,2BAA2B,GAAG;AAAA,IACxD;AACA,QAAI,SAAS;AACX,aAAO,KAAK,oBAAoB,QAAQ,CAAC,CAAC;AAAA,IAC5C;AAGA,UAAM,WAAW,IAAI;AAAA,MACnB,IAAI;AAAA,QACF,GAAG,UAAU;AAAA,QACb;AAAA,MACF;AAAA,IACF;AACA,QAAI,UAAU;AACZ,aAAO,KAAK,oBAAoB,SAAS,CAAC,CAAC;AAAA,IAC7C;AAGA,UAAM,YAAY,IAAI;AAAA,MACpB,MAAM,KAAK,YAAY,UAAU,CAAC;AAAA,MAClC;AAAA,IACF;AACA,UAAM,YAAY,CAAC,GAAG,IAAI,SAAS,SAAS,CAAC;AAC7C,QAAI,UAAU,UAAU,GAAG;AACzB,aAAO,UAAU,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;AAAA,IAClC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,OAAqC;AAC/D,UAAM,SAAmB,CAAC;AAE1B,UAAM,UAAU,MAAM,SAAS,YAAY;AAC3C,eAAW,SAAS,SAAS;AAC3B,aAAO,KAAK,MAAM,CAAC,CAAC;AAAA,IACtB;AACA,WAAO,OAAO,SAAS,IAAI,SAAS;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,KAAqB;AACvC,WAAO,IAAI,QAAQ,uBAAuB,MAAM;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,KAAuB,MAAiC;AACvE,WAAO,IAAI,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,KAAsC;AAElD,UAAM,gBAAmC,CAAC,GAAG,IAAI,QAAQ,GAAG,IAAI,KAAK;AAErE,eAAW,aAAa,eAAe;AACrC,YAAMC,SAAQ,KAAK,SAAS,KAAK,UAAU,IAAI;AAE/C,iBAAWC,WAAU,UAAU,SAAS;AACtC,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB,UAAU;AAAA,YACVA;AAAA,YACAD,QAAO;AAAA,UACT;AACA,cAAI,QAAQ;AACV,YAAAC,QAAO,OAAO,OAAO;AACrB,YAAAA,QAAO,SAAS,OAAO;AAAA,UACzB;AAAA,QACF,SAAS,OAAO;AACd,kBAAQ;AAAA,YACN;AAAA,YACA,UAAU;AAAA,YACVA,QAAO;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBACZ,WACAA,SACA,aACyC;AAEzC,UAAM,aAAa,MAAM,KAAK,kBAAkB,WAAWA,OAAM;AACjE,QAAI,YAAY,QAAQ;AACtB,aAAO,EAAE,MAAM,QAAQ,QAAQ,WAAW;AAAA,IAC5C;AAGA,QAAI,aAAa;AACf,iBAAWF,eAAc,aAAa;AACpC,cAAM,cAAc,KAAK,qBAAqBA,aAAYE,QAAO,IAAI;AACrE,YAAI,aAAa,QAAQ;AACvB,iBAAO,EAAE,MAAM,QAAQ,QAAQ,YAAY;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,MAAM,KAAK,sBAAsB,WAAWA,OAAM;AACxE,QAAI,eAAe,QAAQ;AACzB,aAAO,EAAE,MAAM,kBAAkB,QAAQ,cAAc;AAAA,IACzD;AAEA,WAAO;AAAA,EACT;AACF;;;ACvNA,SAAS,QAAAC,aAAY;AACrB,SAAS,YAAY;AACrB,OAAO,YAAY;AACnB,OAAO,OAAO;AAEd,OAAgC;;;ACLhC,SAAS,cAAc;ACAvB,SAAyB,kBAAkB;ACA3C,OAAO,eAAe;AACtB,SAAS,iBAAiB;AMuB1B,OAAO,WAAW;ACvBlB,SAA6B,qBAAqB;AAClD,OAAO,WAAW;ACFlB,OAAiD;AACjD,OAAOC,YAAW;ACDlB;EACE;OAGK;ACFP,OAAO,UAAU;AEFjB,SAAS,oBAAwC;AGAjD,SAAS,YAAY;AACrB;EAEE;EACA;EAMA;EACA;EACA,cAAAC;EACA;EACA;EACA;EACA;OACK;AACP,OAAOC,YAAW;AAClB,OAAc;AAEd,OAAgC;;;AEDzB,IAAe,oBAAf,cAAyC,kBAAkB;AAAA,EAChE,YAAY,SAAkC,CAAC,GAAG;AAChD,UAAM,UAAU;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,QAAQ,KAAsC;AAClD,eAAWC,UAAS,IAAI,QAAQ;AAC9B,YAAM,QAAQ,MAAM,KAAK,YAAYA,OAAM,IAAI;AAC/C,UAAI,SAAS,MAAM;AACjB,QAAAA,OAAM,WAAW;AACjB,QAAAA,OAAM,WAAW,KAAK,kBAAkB,KAAK;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,OAAkC;AAClD,QAAI,QAAQ,IAAK,QAAO;AACxB,QAAI,QAAQ,IAAM,QAAO;AACzB,QAAI,QAAQ,IAAO,QAAO;AAC1B,QAAI,QAAQ,IAAQ,QAAO;AAC3B,WAAO;AAAA,EACT;AACF;;;ACpBO,IAAe,iBAAf,cAAsC,kBAAkB;AAAA,EAC7D;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,SAA+B,CAAC,GAAG;AAC7C,UAAM,OAAO;AACb,SAAK,UAAU,OAAO;AACtB,SAAK,WAAW,OAAO;AACvB,SAAK,YAAY,OAAO;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,MAAM,QAAQ,KAAsC;AAClD,UAAM,aAAa,MAAM,KAAK,YAAY;AAC1C,UAAM,UAAU,KAAK;AACrB,UAAM,WAAW,KAAK;AAGtB,QAAI,CAAC,WAAW,CAAC,UAAU;AACzB,YAAMC,UAAS,MAAM,QAAQ;AAAA,QAC3B,WAAW,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,CAAC;AAAA,MAC9C;AACA,UAAI,OAAO,KAAK,GAAGA,OAAM;AACzB;AAAA,IACF;AAEA,UAAMA,UAAgC,CAAC;AACvC,UAAM,mBAAmC,CAAC;AAC1C,UAAM,oBAAoB,oBAAI,IAAY;AAG1C,UAAM,eAAuD,CAAC;AAC9D,UAAM,gBAAwD,CAAC;AAC/D,UAAM,iBAAiB,oBAAI,IAAY;AACvC,UAAM,kBAAkB,oBAAI,IAAY;AAGxC,eAAW,QAAQ,YAAY;AAC7B,UAAI,QAAS,cAAa,KAAK,EAAE,MAAM,OAAO,EAAE,CAAC;AACjD,UAAI,SAAU,eAAc,KAAK,EAAE,MAAM,OAAO,EAAE,CAAC;AAAA,IACrD;AAGA,UAAM,eAAe,YAAY,OAAO,WAAW,WAAW;AAC9D,WAAO,aAAa,SAAS,GAAG;AAC9B,YAAM,OAAO,aAAa,MAAM;AAChC,UAAI,CAAC,KAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI;AAExB,UAAI,eAAe,IAAI,IAAI,EAAG;AAC9B,qBAAe,IAAI,IAAI;AAEvB,UAAI,CAACA,QAAO,IAAI,GAAG;AACjB,QAAAA,QAAO,IAAI,IAAI,MAAM,KAAK,SAAS,IAAI;AAAA,MACzC;AAEA,UAAI,QAAQ,cAAc;AACxB,cAAM,OAAO,MAAM,KAAK,sBAAsB,IAAI;AAClD,mBAAW,OAAO,MAAM;AACtB,eAAK,gBAAgB,KAAK,kBAAkB,iBAAiB;AAC7D,cAAI,CAAC,eAAe,IAAI,IAAI,gBAAgB,GAAG;AAC7C,yBAAa,KAAK,EAAE,MAAM,IAAI,kBAAkB,OAAO,QAAQ,EAAE,CAAC;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,aAAa,OAAO,WAAW,YAAY;AACjE,WAAO,cAAc,SAAS,GAAG;AAC/B,YAAM,OAAO,cAAc,MAAM;AACjC,UAAI,CAAC,KAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI;AAExB,UAAI,gBAAgB,IAAI,IAAI,EAAG;AAC/B,sBAAgB,IAAI,IAAI;AAExB,UAAI,CAACA,QAAO,IAAI,GAAG;AACjB,QAAAA,QAAO,IAAI,IAAI,MAAM,KAAK,SAAS,IAAI;AAAA,MACzC;AAEA,UAAI,QAAQ,eAAe;AACzB,cAAM,OAAO,MAAM,KAAK,sBAAsB,IAAI;AAClD,mBAAW,OAAO,MAAM;AACtB,eAAK,gBAAgB,KAAK,kBAAkB,iBAAiB;AAC7D,cAAI,CAAC,gBAAgB,IAAI,IAAI,KAAK,GAAG;AACnC,0BAAc,KAAK,EAAE,MAAM,IAAI,OAAO,OAAO,QAAQ,EAAE,CAAC;AAAA,UAC1D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,OAAO,OAAOA,OAAM;AACvC,QAAI,OAAO,KAAK,GAAG,UAAU;AAC7B,QAAI,cAAc,KAAK,GAAG,gBAAgB;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAgB,cAAiC;AAC/C,UAAM,SAAS,KAAK;AACpB,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,aAAO;AAAA,IACT;AACA,UAAM,QAAQ,MAAM,KAAK,iBAAiB;AAC1C,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AACA,QAAI,kBAAkB,QAAQ;AAC5B,aAAO,MAAM,OAAO,CAAC,SAAS,OAAO,KAAK,IAAI,CAAC;AAAA,IACjD;AACA,WAAO,MAAM,OAAO,MAAM;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKU,gBACR,KACA,KACA,MACM;AACN,UAAM,MAAM,GAAG,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,GAAG,CAAC,IAAI,IAAI,gBAAgB,IAAI,IAAI,GAAG,KAAK,GAAG,CAAC;AAC1F,QAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,WAAK,IAAI,GAAG;AACZ,UAAI,KAAK,GAAG;AAAA,IACd;AAAA,EACF;AACF;;;ACjLO,IAAM,6BAAN,cAAyC,qBAAqB;AAAA,EACnE;AAAA,EAEA,YAAY,SAAkB,SAAqC,CAAC,GAAG;AACrE,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,aACvB,WACAC,SACkC;AAClC,QAAI,CAAC,KAAK,oBAAoBA,QAAO,IAAI,GAAG;AAC1C,aAAO;AAAA,IACT;AAEA,UAAM,kBAAkB,KAAK,SAAS,gBAAgB,SAAS;AAC/D,UAAM,mBAAmB,KAAK,SAAS,gBAAgBA,QAAO,IAAI;AAElE,UAAM,MAAM;AAAA;AAAA,cAEF,gBAAgB;AAAA,cAChB,gBAAgB;AAAA,wBACN,gBAAgB;AAAA,aAC3B,eAAe;AAAA;AAGxB,UAAM,OAAO,MAAM,KAAK,SAAS,SAI9B,GAAG;AAEN,QAAI,CAAC,KAAK,QAAQ;AAChB,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,KAAK,gBAAgB,KAAK,CAAC,GAAG,SAAS;AACnD,UAAM,MAAM,KAAK,gBAAgB,KAAK,CAAC,GAAG,SAAS;AACnD,UAAM,eAAe,KAAK,SAAS,SAAS,KAAK,CAAC,GAAG,aAAa;AAElE,QAAI,OAAO,QAAQ,OAAO,QAAQ,gBAAgB,MAAM;AACtD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO;AAAA,MACZ,cACE,gBAAgB,QAAQ,OAAO,SAAS,YAAY,IAChD,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,YAAY,CAAC,IACrC;AAAA,IACR;AAAA,EACF;AAAA,EAEA,oBAAoB,MAAmC;AACrD,QAAI,CAAC,MAAM;AACT,aAAO;AAAA,IACT;AACA,UAAM,aAAa,KAAK,YAAY;AACpC,WAAO,uDAAuD;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AAAA,EAEA,gBAAgB,OAA+B;AAC7C,QAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAU;AAC1D,aAAO,OAAO,KAAK;AAAA,IACrB;AACA,QAAI,OAAO,UAAU,WAAW;AAC9B,aAAO,QAAQ,SAAS;AAAA,IAC1B;AACA,QAAI,iBAAiB,MAAM;AACzB,aAAO,MAAM,YAAY;AAAA,IAC3B;AACA,QAAI,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK,GAAG;AAC3D,aAAO,MAAM,SAAS,OAAO;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AACF;;;ACvFO,IAAM,sBAAN,cAAkC,cAAc;AAAA,EACrD;AAAA,EAEA,YAAY,SAAkB,SAA8B,CAAC,GAAG;AAC9D,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,cAAoC;AAC3D,UAAM,OAAO,MAAM,KAAK,SAAS;AAAA,MAC/B;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS,KAAK,CAAC,GAAG;AAAA,MAClB,SAAS;AAAA,QACP,sBAAsB;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AACF;;;ACdO,IAAM,8BAAN,cAA0C,sBAAsB;AAAA,EACrE;AAAA,EAEA,YAAY,SAAkB,SAAsC,CAAC,GAAG;AACtE,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,sBACvB,WACAC,SAC+B;AAC/B,UAAM,kBAAkB,KAAK,SAAS,gBAAgB,SAAS;AAC/D,UAAM,mBAAmB,KAAK,SAAS,gBAAgBA,QAAO,IAAI;AAClE,UAAM,QAAQ,KAAK,sBAAsB;AAEzC,UAAM,MAAM;AAAA,wBACQ,gBAAgB;AAAA,aAC3B,eAAe;AAAA,cACd,gBAAgB;AAAA,cAChB,KAAK;AAAA;AAGf,UAAM,OAAO,MAAM,KAAK,SAAS,SAA6B,GAAG;AAEjE,QAAI,CAAC,KAAK,UAAU,KAAK,SAAS,KAAK,qBAAqB;AAC1D,aAAO;AAAA,IACT;AAEA,UAAM,SAAmB,CAAC;AAC1B,eAAW,OAAO,MAAM;AACtB,YAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK;AAChD,UAAI,aAAa,MAAM;AACrB,eAAO;AAAA,MACT;AACA,aAAO,KAAK,SAAS;AAAA,IACvB;AAEA,WAAO,OAAO,SAAS,SAAS;AAAA,EAClC;AAAA,EAEA,gBAAgB,OAA+B;AAC7C,QAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,IACT;AACA,QAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAU;AAC1D,aAAO,OAAO,KAAK;AAAA,IACrB;AACA,QAAI,OAAO,UAAU,WAAW;AAC9B,aAAO,QAAQ,SAAS;AAAA,IAC1B;AACA,QAAI,iBAAiB,MAAM;AACzB,aAAO,MAAM,YAAY;AAAA,IAC3B;AACA,QAAI,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK,GAAG;AAC3D,aAAO,MAAM,SAAS,OAAO;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AACF;;;ACrEO,IAAM,0BAAN,cAAsC,kBAAkB;AAAA,EAC7D;AAAA,EAEA,YAAY,SAAkB,SAAkC,CAAC,GAAG;AAClE,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,YAAY,WAAgD;AACnF,UAAM,OAAO,MAAM,KAAK,SAAS;AAAA,MAC/B,iCAAiC,KAAK,SAAS,gBAAgB,SAAS,CAAC;AAAA,IAC3E;AAEA,WAAO,KAAK,SAAS,SAAS,KAAK,CAAC,GAAG,KAAK;AAAA,EAC9C;AACF;;;ACjBA,IAAM,gBAID;AAAA,EACH;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AACF;AAmCO,SAAS,YAAY,KAAa,OAAgB;AACvD,QAAM,eACJ,iBAAiB,QACb,MAAM,UACN,OAAO,UAAU,WACf,QACA;AACR,QAAM,YAAY,cAAc,KAAK,CAAC,OAAO,GAAG,QAAQ,KAAK,YAAY,CAAC;AAE1E,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,eAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,YAAY,UAAU;AAAA,IACtB,YAAY,UAAU;AAAA,IACtB,eAAe;AAAA,EACjB;AACF;AAEO,IAAM,SAAN,cAAqB,QAAQ;AAAA,EAClC;AAAA,EACkB;AAAA,EACA,gBAAgB;AAAA,EAChB,gBAA0B,CAAC;AAAA,EAE7C,YAAY,SAA+B;AACzC,UAAM;AACN,QAAI,CAAC,WAAW,OAAO,QAAQ,YAAY,YAAY;AACrD,YAAM,IAAI,MAAM,8CAA8C;AAAA,IAChE;AACA,SAAK,WAAW;AAChB,SAAK,YAAY,QAAQ;AAAA,EAC3B;AAAA,EAEA,MAAe,QAAQ,KAAa;AAClC,WAAO,KAAK,SAAS,QAAQ,GAAG;AAAA,EAClC;AAAA,EAEA,MAAe,SAAS,KAAa;AACnC,UAAM,YACJ,KAAK,SAAS,aACb,OAAO,SAAiB;AACvB,YAAM,KAAK,SAAS,QAAQ,WAAW,IAAI,EAAE;AAAA,IAC/C;AAEF,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,SAAS,OAAO;AACd,aAAO,KAAK,UAAU,YAAY,KAAK,KAAK,CAAC;AAAA,IAC/C;AAAA,EACF;AAAA,EAEA,iBAAiB,MAAc;AAC7B,WAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AAAA,EACrC;AAAA,EAEA,MAAe,SAAc,KAA6B;AACxD,UAAM,SAAS,MAAM,KAAK,SAAS,QAAQ,GAAG;AAE9C,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,aAAO;AAAA,IACT;AAEA,QACE,UACA,OAAO,WAAW,YAClB,UAAU,UACV,MAAM,QAAS,OAA8B,IAAI,GACjD;AACA,aAAQ,OAA2B;AAAA,IACrC;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAES,gBAAgB,MAAsB;AAC7C,WAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AAAA,EACrC;AAAA,EAES,OAAO,OAAuB;AACrC,WAAO,MAAM,QAAQ,MAAM,IAAI;AAAA,EACjC;AAAA,EAES,qBACP,WACA,SACA,OACQ;AACR,UAAM,aAAa,SAAS,SACxB,QAAQ,IAAI,CAAC,MAAM,KAAK,gBAAgB,CAAC,CAAC,EAAE,KAAK,IAAI,IACrD;AACJ,WAAO,UAAU,UAAU,SAAS,KAAK,gBAAgB,SAAS,CAAC,UAAU,KAAK;AAAA,EACpF;AACF;;;AC1JO,IAAM,uBAAN,cAAmC,eAAe;AAAA,EACvD;AAAA,EACA,qBAA4C;AAAA,EAE5C,YAAY,SAAkB,SAA+B,CAAC,GAAG;AAC/D,UAAM,MAAM;AACZ,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAyB,mBAAsC;AAC7D,UAAM,OAAO,MAAM,KAAK,SAAS,SAE9B,iEAAiE;AAEpE,WAAO,KACJ,IAAI,CAAC,QAAQ,IAAI,IAAI,EACrB;AAAA,MACC,CAAC,SACC,OAAO,SAAS,YAAY,CAAC,KAAK,WAAW,SAAS;AAAA,IAC1D;AAAA,EACJ;AAAA,EAEA,MAAyB,SAAS,WAAmC;AACnE,UAAM,UAAU,MAAM,KAAK,SAAS;AAAA,MAClC,qBAAqB,KAAK,iBAAiB,SAAS,CAAC;AAAA,IACvD;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,QAAQ,IAAI,CAAC,SAAS;AAAA,QAC7B,MAAM,IAAI,QAAQ;AAAA,QAClB,MAAM,IAAI,QAAQ;AAAA,MACpB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAyB,sBACvB,WACyB;AACzB,UAAM,OAAO,MAAM,KAAK,SAAS;AAAA,MAC/B,2BAA2B,KAAK,iBAAiB,SAAS,CAAC;AAAA,IAC7D;AAEA,UAAM,SAAS,oBAAI,IAA0B;AAE7C,eAAW,OAAO,MAAM;AACtB,UACE,IAAI,MAAM,QACV,IAAI,SAAS,QACb,IAAI,QAAQ,QACZ,IAAI,MAAM,MACV;AACA;AAAA,MACF;AAEA,YAAM,KAAK,OAAO,IAAI,EAAE;AACxB,YAAM,WAAW,OAAO,IAAI,EAAE;AAC9B,UAAI,CAAC,UAAU;AACb,eAAO,IAAI,IAAI;AAAA,UACb,OAAO;AAAA,UACP,MAAM,CAAC,OAAO,IAAI,IAAI,CAAC;AAAA,UACvB,kBAAkB,OAAO,IAAI,KAAK;AAAA,UAClC,IAAI,CAAC,OAAO,IAAI,EAAE,CAAC;AAAA,QACrB,CAAC;AAAA,MACH,OAAO;AACL,iBAAS,KAAK,KAAK,OAAO,IAAI,IAAI,CAAC;AACnC,iBAAS,GAAG,KAAK,OAAO,IAAI,EAAE,CAAC;AAAA,MACjC;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,OAAO,OAAO,CAAC;AAAA,EACnC;AAAA,EAEA,MAAyB,sBACvB,WACyB;AAGzB,QAAI,CAAC,KAAK,oBAAoB;AAC5B,WAAK,qBAAqB,MAAM,KAAK,sBAAsB;AAAA,IAC7D;AACA,WAAO,KAAK,mBAAmB;AAAA,MAC7B,CAAC,MAAM,EAAE,qBAAqB;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,MAAM,wBAAiD;AACrD,UAAM,WAAW,MAAM,KAAK,iBAAiB;AAC7C,UAAM,UAA0B,CAAC;AACjC,eAAW,QAAQ,UAAU;AAC3B,cAAQ,KAAK,GAAI,MAAM,KAAK,sBAAsB,IAAI,CAAE;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAAA,EAEA,iBAAiB,MAAc;AAC7B,WAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AAAA,EACrC;AACF;;;ACnGO,SAAS,OAAO,SAA+B,CAAC,GAAG;AACxD,SAAO,CAAC,YAAqB,IAAI,qBAAqB,SAAS,MAAM;AACvE;AAEO,SAAS,KAAK,SAA8B,CAAC,GAAG;AACrD,SAAO,CAAC,YAAqB,IAAI,oBAAoB,SAAS,MAAM;AACtE;AAQO,SAAS,YAAY,SAAqC,CAAC,GAAG;AACnE,SAAO,CAAC,YAAqB;AAC3B,WAAO,IAAI,2BAA2B,SAAS,MAAM;AAAA,EACvD;AACF;AAEO,SAAS,aAAa,SAAsC,CAAC,GAAG;AACrE,SAAO,CAAC,YAAqB;AAC3B,WAAO,IAAI,4BAA4B,SAAS,MAAM;AAAA,EACxD;AACF;AAQO,SAAS,SAAS,SAAkC,CAAC,GAAG;AAC7D,SAAO,CAAC,YAAqB;AAC3B,WAAO,IAAI,wBAAwB,SAAS,MAAM;AAAA,EACpD;AACF;;;AC7DA,SAAS,gBAAAC,qBAAoB;;;ACA7B,YAAY,UAAU;AACtB,OAAO,UAAU;AAkCV,SAAS,UAAU,UAAiC;AACzD,QAAM,MAAW,aAAQ,QAAQ,EAAE,YAAY;AAE/C,MAAI;AACJ,MAAI;AACF,eAAW,KAAK,SAAS,UAAU;AAAA,MACjC,WAAW;AAAA;AAAA,IACb,CAAC;AAAA,EACH,SAAS,OAAO;AACd,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,UAAM,IAAI,MAAM,+BAA+B,QAAQ,MAAM,OAAO,EAAE;AAAA,EACxE;AAEA,QAAM,SAAwB,CAAC;AAE/B,aAAW,aAAa,SAAS,YAAY;AAC3C,UAAM,QAAQ,SAAS,OAAO,SAAS;AACvC,UAAM,OAAO,KAAK,MAAM,cAAuC,KAAK;AAGpE,QAAI,KAAK,WAAW,GAAG;AACrB;AAAA,IACF;AAGA,UAAM,YACJ,QAAQ,UAAU,QAAQ,SACtB,qBAAqB,QAAQ,IAC7B,kBAAkB,SAAS;AAEjC,UAAM,UAAU,aAAa,IAAI;AAGjC,QAAI,QAAQ,WAAW,GAAG;AACxB;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,WAAW,GAAG;AACvB,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,qBAAqB,UAA0B;AACtD,QAAMC,YAAgB,cAAS,UAAe,aAAQ,QAAQ,CAAC;AAC/D,SAAO,kBAAkBA,SAAQ;AACnC;AAUO,SAAS,mBAAmB,MAAsB;AAEvD,MAAI,YAAY,KAAK,YAAY;AAGjC,cAAY,UAAU,QAAQ,eAAe,GAAG;AAGhD,cAAY,UAAU,QAAQ,OAAO,GAAG;AAGxC,cAAY,UAAU,QAAQ,YAAY,EAAE;AAG5C,MAAI,SAAS,KAAK,SAAS,GAAG;AAC5B,gBAAY,MAAM;AAAA,EACpB;AAGA,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAGA,SAAO,UAAU,MAAM,GAAG,EAAE;AAC9B;AAGO,IAAM,oBAAoB;AAMjC,SAAS,aAAa,MAA2C;AAC/D,MAAI,KAAK,WAAW,GAAG;AACrB,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,SAAS,oBAAI,IAAY;AAC/B,aAAW,OAAO,MAAM;AACtB,eAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,aAAO,IAAI,GAAG;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,OAAO,SAAS,GAAG;AACrB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,WAAW,MAAM,KAAK,MAAM;AAClC,QAAM,eAAe,uBAAuB,QAAQ;AAEpD,SAAO,aAAa,IAAI,CAAC,MAAM,QAAQ;AACrC,UAAM,cAAc,SAAS,GAAG;AAChC,UAAM,SAAS,KAAK,IAAI,CAAC,QAAQ,IAAI,WAAW,CAAC;AACjD,UAAM,OAAO,gBAAgB,MAAM;AACnC,WAAO,EAAE,MAAM,aAAa,KAAK;AAAA,EACnC,CAAC;AACH;AAKA,SAAS,uBAAuB,OAA2B;AACzD,QAAM,OAAO,oBAAI,IAAoB;AACrC,QAAM,SAAmB,CAAC;AAE1B,aAAW,WAAW,OAAO;AAE3B,QAAI,OAAO,kBAAkB,OAAO;AAGpC,QAAI,CAAC,MAAM;AACT,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ,KAAK,IAAI,IAAI,KAAK;AAChC,QAAI,QAAQ,GAAG;AACb,aAAO,KAAK,GAAG,IAAI,IAAI,QAAQ,CAAC,EAAE;AAAA,IACpC,OAAO;AACL,aAAO,KAAK,IAAI;AAAA,IAClB;AACA,SAAK,IAAI,MAAM,QAAQ,CAAC;AAAA,EAC1B;AAEA,SAAO;AACT;AAMA,SAAS,gBAAgB,QAA+B;AACtD,MAAI,aAAa;AACjB,MAAI,UAAU;AAEd,aAAW,SAAS,QAAQ;AAE1B,QAAI,SAAS,QAAQ,UAAU,IAAI;AACjC;AAAA,IACF;AAGA,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,UAAI,OAAO,UAAU,KAAK,GAAG;AAC3B,qBAAa;AAAA,MACf,OAAO;AACL,kBAAU;AAAA,MACZ;AAAA,IACF,WAAW,OAAO,UAAU,WAAW;AAErC,mBAAa;AAAA,IACf,OAAO;AAEL,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,SAAS;AACX,WAAO;AAAA,EACT;AAGA,MAAI,YAAY;AACd,WAAO;AAAA,EACT;AAGA,SAAO;AACT;;;ADjMO,IAAM,cAAN,cAA0B,OAAO;AAAA,EACtC;AAAA,EAEA,YAAY,SAA6B;AAEvC,UAAM,SAAS,UAAU,QAAQ,IAAI;AAGrC,UAAM,SAAS,QAAQ,YAAY;AACnC,UAAM,KAAK,IAAIC,cAAa,MAAM;AAGlC,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,eAAe,KAAK;AACtC,SAAG,KAAK,SAAS;AACjB,eAAS,IAAI,KAAK;AAAA,IACpB;AAGA,UAAM;AAAA,MACJ,SAAS,CAAC,QAAgB,GAAG,QAAQ,GAAG,EAAE,IAAI;AAAA,MAC9C,WAAW,QAAQ;AAAA,IACrB,CAAC;AAED,SAAK,MAAM;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AACZ,SAAK,IAAI,MAAM;AAAA,EACjB;AACF;AAKA,SAAS,eAAe,OAA4B;AAClD,MAAI,MAAM,QAAQ,WAAW,GAAG;AAC9B,UAAM,IAAI,MAAM,wBAAwB,MAAM,IAAI,oBAAoB;AAAA,EACxE;AAEA,QAAM,UAAU,MAAM,QACnB,IAAI,CAAC,QAAQ,IAAI,iBAAiB,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE,EAC1D,KAAK,IAAI;AAEZ,SAAO,iBAAiB,iBAAiB,MAAM,IAAI,CAAC,MAAM,OAAO;AACnE;AAWA,SAAS,SAAS,IAAkB,OAA0B;AAC5D,MAAI,MAAM,KAAK,WAAW,GAAG;AAC3B;AAAA,EACF;AAEA,QAAM,UAAU,MAAM,QACnB,IAAI,CAAC,MAAM,IAAI,iBAAiB,EAAE,IAAI,CAAC,GAAG,EAC1C,KAAK,IAAI;AACZ,QAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAE3D,QAAM,YAAY,gBAAgB,iBAAiB,MAAM,IAAI,CAAC,MAAM,OAAO,aAAa,YAAY;AACpG,QAAM,OAAO,GAAG,QAAQ,SAAS;AAEjC,KAAG,KAAK,mBAAmB;AAE3B,MAAI;AACF,eAAW,OAAO,MAAM,MAAM;AAC5B,YAAM,SAAwB,MAAM,QAAQ,IAAI,CAAC,QAAQ;AAEvD,cAAM,WAAW,IAAI,IAAI,WAAW;AACpC,eAAO,aAAa,UAAU,IAAI,IAAI;AAAA,MACxC,CAAC;AACD,WAAK,IAAI,GAAG,MAAM;AAAA,IACpB;AACA,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,OAAO;AACd,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAMA,SAAS,aAAa,OAAgB,MAA+B;AAEnE,MAAI,SAAS,QAAQ,UAAU,IAAI;AACjC,WAAO;AAAA,EACT;AAGA,MAAI,iBAAiB,MAAM;AACzB,WAAO,MAAM,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACzC;AAEA,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AAEd,YAAM,MAAM,OAAO,KAAK;AACxB,UAAI,MAAM,GAAG,GAAG;AACd,eAAO;AAAA,MACT;AACA,aAAO,KAAK,MAAM,GAAG;AAAA,IACvB;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,MAAM,OAAO,KAAK;AACxB,UAAI,MAAM,GAAG,GAAG;AACd,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,IAEA,KAAK;AAAA,IACL,SAAS;AAEP,UAAI,OAAO,UAAU,WAAW;AAC9B,eAAO,QAAQ,SAAS;AAAA,MAC1B;AACA,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO,KAAK,UAAU,KAAK;AAAA,MAC7B;AACA,aAAO,OAAO,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAKA,SAAS,iBAAiB,MAAsB;AAC9C,SAAO,KAAK,QAAQ,MAAM,IAAI;AAChC;",
6
+ "names": ["c", "column", "constraint", "table", "column", "groq", "spawn", "generateId", "chalk", "table", "tables", "column", "column", "DatabaseSync", "basename", "DatabaseSync"]
7
7
  }