@workglow/dataset 0.0.86
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +1134 -0
- package/dist/browser.js +1053 -0
- package/dist/browser.js.map +20 -0
- package/dist/bun.js +1054 -0
- package/dist/bun.js.map +20 -0
- package/dist/common-server.d.ts +7 -0
- package/dist/common-server.d.ts.map +1 -0
- package/dist/common.d.ts +17 -0
- package/dist/common.d.ts.map +1 -0
- package/dist/document/Document.d.ts +50 -0
- package/dist/document/Document.d.ts.map +1 -0
- package/dist/document/DocumentDataset.d.ts +79 -0
- package/dist/document/DocumentDataset.d.ts.map +1 -0
- package/dist/document/DocumentDatasetRegistry.d.ts +29 -0
- package/dist/document/DocumentDatasetRegistry.d.ts.map +1 -0
- package/dist/document/DocumentNode.d.ts +31 -0
- package/dist/document/DocumentNode.d.ts.map +1 -0
- package/dist/document/DocumentSchema.d.ts +1668 -0
- package/dist/document/DocumentSchema.d.ts.map +1 -0
- package/dist/document/DocumentStorageSchema.d.ts +43 -0
- package/dist/document/DocumentStorageSchema.d.ts.map +1 -0
- package/dist/document/StructuralParser.d.ts +30 -0
- package/dist/document/StructuralParser.d.ts.map +1 -0
- package/dist/document-chunk/DocumentChunkDataset.d.ts +79 -0
- package/dist/document-chunk/DocumentChunkDataset.d.ts.map +1 -0
- package/dist/document-chunk/DocumentChunkDatasetRegistry.d.ts +29 -0
- package/dist/document-chunk/DocumentChunkDatasetRegistry.d.ts.map +1 -0
- package/dist/document-chunk/DocumentChunkSchema.d.ts +55 -0
- package/dist/document-chunk/DocumentChunkSchema.d.ts.map +1 -0
- package/dist/node.js +1053 -0
- package/dist/node.js.map +20 -0
- package/dist/types.d.ts +7 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/util/DatasetSchema.d.ts +85 -0
- package/dist/util/DatasetSchema.d.ts.map +1 -0
- package/package.json +54 -0
- package/src/document-chunk/README.md +362 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../src/util/DatasetSchema.ts", "../src/document/Document.ts", "../src/document/DocumentDataset.ts", "../src/document/DocumentDatasetRegistry.ts", "../src/document/DocumentSchema.ts", "../src/document/DocumentNode.ts", "../src/document/DocumentStorageSchema.ts", "../src/document/StructuralParser.ts", "../src/document-chunk/DocumentChunkDataset.ts", "../src/document-chunk/DocumentChunkDatasetRegistry.ts", "../src/document-chunk/DocumentChunkSchema.ts"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { JsonSchema } from \"@workglow/util\";\n\n/**\n * Semantic format types for dataset schema annotations.\n * These are used by the InputResolver to determine how to resolve string IDs.\n */\nexport type DatasetSemantic = \"dataset:tabular\" | \"dataset:document-chunk\" | \"dataset:document\";\n\n/**\n * Creates a JSON schema for a tabular dataset input.\n * The schema accepts either a string ID (resolved from registry) or a direct dataset instance.\n *\n * @param options Additional schema options to merge\n * @returns JSON schema for tabular dataset input\n *\n * @example\n * ```typescript\n * const inputSchema = {\n * type: \"object\",\n * properties: {\n * dataSource: TypeTabularStorage({\n * title: \"User Database\",\n * description: \"Dataset containing user records\",\n * }),\n * },\n * required: [\"dataSource\"],\n * } as const;\n * ```\n */\nexport function TypeTabularStorage<O extends Record<string, unknown> = {}>(options: O = {} as O) {\n return {\n title: \"Tabular Storage\",\n description: \"Storage ID or instance for tabular data storage\",\n ...options,\n format: \"storage:tabular\" as const,\n oneOf: [\n { type: \"string\" as const, title: \"Storage ID\" },\n { title: \"Storage Instance\", additionalProperties: true },\n ],\n } as const satisfies JsonSchema;\n}\n\n/**\n * Creates a JSON schema for a document chunk dataset input.\n * The schema accepts either a string ID (resolved from registry) or a direct dataset instance.\n *\n * @param options Additional schema options to merge\n * @returns JSON schema for document chunk dataset input\n */\nexport function TypeDocumentChunkDataset<O extends Record<string, unknown> = {}>(\n options: O = {} as O\n) {\n return {\n title: \"Document Chunk Dataset\",\n description: \"Dataset ID or instance for document chunk data storage\",\n ...options,\n format: \"dataset:document-chunk\" as const,\n anyOf: [\n { type: \"string\" as const, title: \"Dataset ID\" },\n { title: \"Dataset Instance\", additionalProperties: true },\n ],\n } as const satisfies JsonSchema;\n}\n\n/**\n * Creates a JSON schema for a document dataset input.\n * The schema accepts either a string ID (resolved from registry) or a direct dataset instance.\n *\n * @param options Additional schema options to merge\n * @returns JSON schema for document dataset input\n */\nexport function TypeDocumentDataset<O extends Record<string, unknown> = {}>(options: O = {} as O) {\n return {\n title: \"Document Dataset\",\n description: \"Dataset ID or instance for document data storage\",\n ...options,\n format: \"dataset:document\" as const,\n anyOf: [\n { type: \"string\" as const, title: \"Dataset ID\" },\n { title: \"Dataset Instance\", additionalProperties: true },\n ],\n } as const satisfies JsonSchema;\n}\n",
|
|
6
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { ChunkNode, DocumentMetadata, DocumentNode } from \"./DocumentSchema\";\n\n/**\n * Document represents a hierarchical document with chunks\n *\n * Key features:\n * - Single source-of-truth tree structure (root node)\n * - Single set of chunks\n * - Separate persistence for document structure vs vectors\n */\nexport class Document {\n public doc_id: string | undefined;\n public readonly metadata: DocumentMetadata;\n public readonly root: DocumentNode;\n private chunks: ChunkNode[];\n\n constructor(\n root: DocumentNode,\n metadata: DocumentMetadata,\n chunks: ChunkNode[] = [],\n doc_id?: string\n ) {\n this.doc_id = doc_id;\n this.root = root;\n this.metadata = metadata;\n this.chunks = chunks || [];\n }\n\n /**\n * Set chunks for the document\n */\n setChunks(chunks: ChunkNode[]): void {\n this.chunks = chunks;\n }\n\n /**\n * Get all chunks\n */\n getChunks(): ChunkNode[] {\n return this.chunks;\n }\n\n /**\n * Set the document ID\n */\n setDocId(doc_id: string): void {\n this.doc_id = doc_id;\n }\n\n /**\n * Find chunks by nodeId\n */\n findChunksByNodeId(nodeId: string): ChunkNode[] {\n return this.chunks.filter((chunk) => chunk.nodePath.includes(nodeId));\n }\n\n /**\n * Serialize to JSON\n */\n toJSON(): {\n metadata: DocumentMetadata;\n root: DocumentNode;\n chunks: ChunkNode[];\n } {\n return {\n metadata: this.metadata,\n root: this.root,\n chunks: this.chunks,\n };\n }\n\n /**\n * Deserialize from JSON\n */\n static fromJSON(json: string, doc_id?: string): Document {\n const obj = JSON.parse(json);\n return new Document(obj.root, obj.metadata, obj.chunks, doc_id);\n }\n\n}\n",
|
|
7
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { VectorSearchOptions } from \"@workglow/storage\";\nimport type { TypedArray } from \"@workglow/util\";\nimport type { DocumentChunk, DocumentChunkStorage } from \"../document-chunk/DocumentChunkSchema\";\nimport { Document } from \"./Document\";\nimport { ChunkNode, DocumentNode } from \"./DocumentSchema\";\nimport {\n DocumentStorageEntity,\n DocumentTabularStorage,\n InsertDocumentStorageEntity,\n} from \"./DocumentStorageSchema\";\n\n/**\n * Document dataset that uses TabularStorage for document persistence and VectorStorage for chunk persistence and similarity search.\n * This is a unified implementation that composes storage backends rather than using\n * inheritance/interface patterns.\n */\nexport class DocumentDataset {\n private tabularStorage: DocumentTabularStorage;\n private vectorStorage?: DocumentChunkStorage;\n\n /**\n * Creates a new DocumentDataset instance.\n *\n * @param tabularStorage - Pre-initialized tabular storage for document persistence\n * @param vectorStorage - Pre-initialized vector storage for chunk similarity search\n *\n * @example\n * ```typescript\n * const tabularStorage = new InMemoryTabularStorage(DocumentStorageSchema, [\"doc_id\"]);\n * await tabularStorage.setupDatabase();\n *\n * const vectorStorage = new InMemoryVectorStorage();\n * await vectorStorage.setupDatabase();\n *\n * const docDataset = new DocumentDataset(tabularStorage, vectorStorage);\n * ```\n */\n constructor(tabularStorage: DocumentTabularStorage, vectorStorage?: DocumentChunkStorage) {\n this.tabularStorage = tabularStorage;\n this.vectorStorage = vectorStorage;\n }\n\n /**\n * Upsert a document\n * @returns The document with the generated doc_id if it was auto-generated\n */\n async upsert(document: Document): Promise<Document> {\n const serialized = JSON.stringify(document.toJSON());\n \n const insertEntity: InsertDocumentStorageEntity = {\n doc_id: document.doc_id,\n data: serialized,\n };\n const entity = await this.tabularStorage.put(insertEntity);\n\n // If doc_id was auto-generated, return document with the generated ID\n if (document.doc_id !== entity.doc_id) {\n document.setDocId(entity.doc_id);\n }\n return document;\n }\n\n /**\n * Get a document by ID\n */\n async get(doc_id: string): Promise<Document | undefined> {\n const entity = await this.tabularStorage.get({ doc_id: doc_id });\n if (!entity) {\n return undefined;\n }\n return Document.fromJSON(entity.data, entity.doc_id);\n }\n\n /**\n * Delete a document\n */\n async delete(doc_id: string): Promise<void> {\n await this.tabularStorage.delete({ doc_id: doc_id });\n }\n\n /**\n * Get a specific node by ID\n */\n async getNode(doc_id: string, nodeId: string): Promise<DocumentNode | undefined> {\n const doc = await this.get(doc_id);\n if (!doc) {\n return undefined;\n }\n\n // Traverse tree to find node\n const traverse = (node: any): any => {\n if (node.nodeId === nodeId) {\n return node;\n }\n if (node.children && Array.isArray(node.children)) {\n for (const child of node.children) {\n const found = traverse(child);\n if (found) return found;\n }\n }\n return undefined;\n };\n\n return traverse(doc.root);\n }\n\n /**\n * Get ancestors of a node (from root to node)\n */\n async getAncestors(doc_id: string, nodeId: string): Promise<DocumentNode[]> {\n const doc = await this.get(doc_id);\n if (!doc) {\n return [];\n }\n\n // Get path from root to target node\n const path: string[] = [];\n const findPath = (node: any): boolean => {\n path.push(node.nodeId);\n if (node.nodeId === nodeId) {\n return true;\n }\n if (node.children && Array.isArray(node.children)) {\n for (const child of node.children) {\n if (findPath(child)) {\n return true;\n }\n }\n }\n path.pop();\n return false;\n };\n\n if (!findPath(doc.root)) {\n return [];\n }\n\n // Collect nodes along the path\n const ancestors: any[] = [];\n let currentNode: any = doc.root;\n ancestors.push(currentNode);\n\n for (let i = 1; i < path.length; i++) {\n const targetId = path[i];\n if (currentNode.children && Array.isArray(currentNode.children)) {\n const found = currentNode.children.find((child: any) => child.nodeId === targetId);\n if (found) {\n currentNode = found;\n ancestors.push(currentNode);\n } else {\n break;\n }\n } else {\n break;\n }\n }\n\n return ancestors;\n }\n\n /**\n * Get chunks for a document\n */\n async getChunks(doc_id: string): Promise<ChunkNode[]> {\n const doc = await this.get(doc_id);\n if (!doc) {\n return [];\n }\n return doc.getChunks();\n }\n\n /**\n * Find chunks that contain a specific nodeId in their path\n */\n async findChunksByNodeId(doc_id: string, nodeId: string): Promise<ChunkNode[]> {\n const doc = await this.get(doc_id);\n if (!doc) {\n return [];\n }\n if (doc.findChunksByNodeId) {\n return doc.findChunksByNodeId(nodeId);\n }\n // Fallback implementation\n const chunks = doc.getChunks();\n return chunks.filter((chunk) => chunk.nodePath && chunk.nodePath.includes(nodeId));\n }\n\n /**\n * List all document IDs\n */\n async list(): Promise<string[]> {\n const entities = await this.tabularStorage.getAll();\n if (!entities) {\n return [];\n }\n return entities.map((e: DocumentStorageEntity) => e.doc_id);\n }\n\n /**\n * Search for similar vectors using the vector storage\n * @param query - Query vector to search for\n * @param options - Search options (topK, filter, scoreThreshold)\n * @returns Array of search results sorted by similarity\n */\n async search(\n query: TypedArray,\n options?: VectorSearchOptions<Record<string, unknown>>\n ): Promise<Array<DocumentChunk<Record<string, unknown>, TypedArray>>> {\n return this.vectorStorage?.similaritySearch(query, options) || [];\n }\n}\n",
|
|
8
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n registerInputResolver,\n ServiceRegistry,\n} from \"@workglow/util\";\nimport type { DocumentDataset } from \"./DocumentDataset\";\n\n/**\n * Service token for the document dataset registry\n * Maps dataset IDs to DocumentDataset instances\n */\nexport const DOCUMENT_DATASETS =\n createServiceToken<Map<string, DocumentDataset>>(\"dataset.documents\");\n\n// Register default factory if not already registered\nif (!globalServiceRegistry.has(DOCUMENT_DATASETS)) {\n globalServiceRegistry.register(\n DOCUMENT_DATASETS,\n (): Map<string, DocumentDataset> => new Map(),\n true\n );\n}\n\n/**\n * Gets the global document dataset registry\n * @returns Map of document dataset ID to instance\n */\nexport function getGlobalDocumentDatasets(): Map<string, DocumentDataset> {\n return globalServiceRegistry.get(DOCUMENT_DATASETS);\n}\n\n/**\n * Registers a document dataset globally by ID\n * @param id The unique identifier for this dataset\n * @param dataset The dataset instance to register\n */\nexport function registerDocumentDataset(id: string, dataset: DocumentDataset): void {\n const datasets = getGlobalDocumentDatasets();\n datasets.set(id, dataset);\n}\n\n/**\n * Gets a document dataset by ID from the global registry\n * @param id The dataset identifier\n * @returns The dataset instance or undefined if not found\n */\nexport function getDocumentDataset(id: string): DocumentDataset | undefined {\n return getGlobalDocumentDatasets().get(id);\n}\n\n/**\n * Resolves a dataset ID to a DocumentDataset from the registry.\n * Used by the input resolver system.\n */\nasync function resolveDocumentDatasetFromRegistry(\n id: string,\n format: string,\n registry: ServiceRegistry\n): Promise<DocumentDataset> {\n const datasets = registry.has(DOCUMENT_DATASETS)\n ? registry.get<Map<string, DocumentDataset>>(DOCUMENT_DATASETS)\n : getGlobalDocumentDatasets();\n\n const dataset = datasets.get(id);\n if (!dataset) {\n throw new Error(`Document dataset \"${id}\" not found in registry`);\n }\n return dataset;\n}\n\n// Register the dataset resolver for format: \"dataset:document\"\nregisterInputResolver(\"dataset:document\", resolveDocumentDatasetFromRegistry);\n",
|
|
9
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { DataPortSchema, FromSchema, JsonSchema } from \"@workglow/util\";\n\n/**\n * Node kind discriminator for hierarchical document structure\n */\nexport const NodeKind = {\n DOCUMENT: \"document\",\n SECTION: \"section\",\n PARAGRAPH: \"paragraph\",\n SENTENCE: \"sentence\",\n TOPIC: \"topic\",\n} as const;\n\nexport type NodeKind = (typeof NodeKind)[keyof typeof NodeKind];\n\n// =============================================================================\n// Schema Definitions\n// =============================================================================\n\n/**\n * Schema for source range of a node (character offsets)\n */\nexport const NodeRangeSchema = {\n type: \"object\",\n properties: {\n startOffset: {\n type: \"integer\",\n title: \"Start Offset\",\n description: \"Starting character offset\",\n },\n endOffset: {\n type: \"integer\",\n title: \"End Offset\",\n description: \"Ending character offset\",\n },\n },\n required: [\"startOffset\", \"endOffset\"],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\nexport type NodeRange = FromSchema<typeof NodeRangeSchema>;\n\n/**\n * Schema for named entity extracted from text\n */\nexport const EntitySchema = {\n type: \"object\",\n properties: {\n text: {\n type: \"string\",\n title: \"Text\",\n description: \"Entity text\",\n },\n type: {\n type: \"string\",\n title: \"Type\",\n description: \"Entity type (e.g., PERSON, ORG, LOC)\",\n },\n score: {\n type: \"number\",\n title: \"Score\",\n description: \"Confidence score\",\n },\n },\n required: [\"text\", \"type\", \"score\"],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\nexport type Entity = FromSchema<typeof EntitySchema>;\n\n/**\n * Schema for enrichment data attached to a node\n */\nexport const NodeEnrichmentSchema = {\n type: \"object\",\n properties: {\n summary: {\n type: \"string\",\n title: \"Summary\",\n description: \"Summary of the node content\",\n },\n entities: {\n type: \"array\",\n items: EntitySchema,\n title: \"Entities\",\n description: \"Named entities extracted from the node\",\n },\n keywords: {\n type: \"array\",\n items: { type: \"string\" },\n title: \"Keywords\",\n description: \"Keywords associated with the node\",\n },\n },\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\nexport type NodeEnrichment = FromSchema<typeof NodeEnrichmentSchema>;\n\n/**\n * Schema for base document node fields (used for runtime validation)\n * Note: Individual node types and DocumentNode union are defined as interfaces\n * below because FromSchema cannot properly infer recursive discriminated unions.\n */\nexport const DocumentNodeBaseSchema = {\n type: \"object\",\n properties: {\n nodeId: {\n type: \"string\",\n title: \"Node ID\",\n description: \"Unique identifier for this node\",\n },\n kind: {\n type: \"string\",\n enum: Object.values(NodeKind),\n title: \"Kind\",\n description: \"Node type discriminator\",\n },\n range: NodeRangeSchema,\n text: {\n type: \"string\",\n title: \"Text\",\n description: \"Text content of the node\",\n },\n enrichment: NodeEnrichmentSchema,\n },\n required: [\"nodeId\", \"kind\", \"range\", \"text\"],\n additionalProperties: true,\n} as const satisfies DataPortSchema;\n\n/**\n * Schema for document node (generic, for runtime validation)\n * This is a simplified schema for task input/output validation.\n * The actual TypeScript types use a proper discriminated union.\n */\nexport const DocumentNodeSchema = {\n type: \"object\",\n title: \"Document Node\",\n description: \"A node in the hierarchical document tree\",\n properties: {\n ...DocumentNodeBaseSchema.properties,\n level: {\n type: \"integer\",\n title: \"Level\",\n description: \"Header level for section nodes\",\n },\n title: {\n type: \"string\",\n title: \"Title\",\n description: \"Section title\",\n },\n children: {\n type: \"array\",\n title: \"Children\",\n description: \"Child nodes\",\n },\n },\n required: [...DocumentNodeBaseSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\n/**\n * Schema for paragraph node\n */\nexport const ParagraphNodeSchema = {\n type: \"object\",\n properties: {\n ...DocumentNodeBaseSchema.properties,\n kind: {\n type: \"string\",\n const: NodeKind.PARAGRAPH,\n title: \"Kind\",\n description: \"Node type discriminator\",\n },\n },\n required: [...DocumentNodeBaseSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\n/**\n * Schema for sentence node\n */\nexport const SentenceNodeSchema = {\n type: \"object\",\n properties: {\n ...DocumentNodeBaseSchema.properties,\n kind: {\n type: \"string\",\n const: NodeKind.SENTENCE,\n title: \"Kind\",\n description: \"Node type discriminator\",\n },\n },\n required: [...DocumentNodeBaseSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\n/**\n * Schema for section node\n */\nexport const SectionNodeSchema = {\n type: \"object\",\n properties: {\n ...DocumentNodeBaseSchema.properties,\n kind: {\n type: \"string\",\n const: NodeKind.SECTION,\n title: \"Kind\",\n description: \"Node type discriminator\",\n },\n level: {\n type: \"integer\",\n minimum: 1,\n maximum: 6,\n title: \"Level\",\n description: \"Header level (1-6 for markdown)\",\n },\n title: {\n type: \"string\",\n title: \"Title\",\n description: \"Section title\",\n },\n children: {\n type: \"array\",\n items: DocumentNodeSchema,\n title: \"Children\",\n description: \"Child nodes\",\n },\n },\n required: [...DocumentNodeBaseSchema.required, \"level\", \"title\", \"children\"],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\n/**\n * Schema for topic node\n */\nexport const TopicNodeSchema = {\n type: \"object\",\n properties: {\n ...DocumentNodeBaseSchema.properties,\n kind: {\n type: \"string\",\n const: NodeKind.TOPIC,\n title: \"Kind\",\n description: \"Node type discriminator\",\n },\n children: {\n type: \"array\",\n items: DocumentNodeSchema,\n title: \"Children\",\n description: \"Child nodes\",\n },\n },\n required: [...DocumentNodeBaseSchema.required, \"children\"],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\n/**\n * Schema for document root node\n */\nexport const DocumentRootNodeSchema = {\n type: \"object\",\n properties: {\n ...DocumentNodeBaseSchema.properties,\n kind: {\n type: \"string\",\n const: NodeKind.DOCUMENT,\n title: \"Kind\",\n description: \"Node type discriminator\",\n },\n title: {\n type: \"string\",\n title: \"Title\",\n description: \"Document title\",\n },\n children: {\n type: \"array\",\n items: DocumentNodeSchema,\n title: \"Children\",\n description: \"Child nodes\",\n },\n },\n required: [...DocumentNodeBaseSchema.required, \"title\", \"children\"],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\n// =============================================================================\n// Manually-defined interfaces for recursive discriminated union types\n// These provide better TypeScript inference than FromSchema for recursive types\n// =============================================================================\n\n/**\n * Base document node fields\n */\ninterface DocumentNodeBase {\n readonly nodeId: string;\n readonly kind: NodeKind;\n readonly range: NodeRange;\n readonly text: string;\n readonly enrichment?: NodeEnrichment;\n}\n\n/**\n * Document root node\n */\nexport interface DocumentRootNode extends DocumentNodeBase {\n readonly kind: typeof NodeKind.DOCUMENT;\n readonly title: string;\n readonly children: DocumentNode[];\n}\n\n/**\n * Section node (from markdown headers or structural divisions)\n */\nexport interface SectionNode extends DocumentNodeBase {\n readonly kind: typeof NodeKind.SECTION;\n readonly level: number;\n readonly title: string;\n readonly children: DocumentNode[];\n}\n\n/**\n * Paragraph node\n */\nexport interface ParagraphNode extends DocumentNodeBase {\n readonly kind: typeof NodeKind.PARAGRAPH;\n}\n\n/**\n * Sentence node (optional fine-grained segmentation)\n */\nexport interface SentenceNode extends DocumentNodeBase {\n readonly kind: typeof NodeKind.SENTENCE;\n}\n\n/**\n * Topic segment node (from TopicSegmenter)\n */\nexport interface TopicNode extends DocumentNodeBase {\n readonly kind: typeof NodeKind.TOPIC;\n readonly children: DocumentNode[];\n}\n\n/**\n * Discriminated union of all document node types\n */\nexport type DocumentNode =\n | DocumentRootNode\n | SectionNode\n | ParagraphNode\n | SentenceNode\n | TopicNode;\n\n// =============================================================================\n// Token Budget and Chunk Schemas\n// =============================================================================\n\n/**\n * Schema for token budget configuration\n */\nexport const TokenBudgetSchema = {\n type: \"object\",\n properties: {\n maxTokensPerChunk: {\n type: \"integer\",\n title: \"Max Tokens Per Chunk\",\n description: \"Maximum tokens allowed per chunk\",\n },\n overlapTokens: {\n type: \"integer\",\n title: \"Overlap Tokens\",\n description: \"Number of tokens to overlap between chunks\",\n },\n reservedTokens: {\n type: \"integer\",\n title: \"Reserved Tokens\",\n description: \"Tokens reserved for metadata or context\",\n },\n },\n required: [\"maxTokensPerChunk\", \"overlapTokens\", \"reservedTokens\"],\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\nexport type TokenBudget = FromSchema<typeof TokenBudgetSchema>;\n\n/**\n * Schema for chunk enrichment\n */\nexport const ChunkEnrichmentSchema = {\n type: \"object\",\n properties: {\n summary: {\n type: \"string\",\n title: \"Summary\",\n description: \"Summary of the chunk content\",\n },\n entities: {\n type: \"array\",\n items: EntitySchema,\n title: \"Entities\",\n description: \"Named entities extracted from the chunk\",\n },\n },\n additionalProperties: false,\n} as const satisfies DataPortSchema;\n\nexport type ChunkEnrichment = FromSchema<typeof ChunkEnrichmentSchema>;\n\n/**\n * Schema for chunk node (output of HierarchicalChunker)\n */\nexport const ChunkNodeSchema = () =>\n ({\n type: \"object\",\n properties: {\n chunkId: {\n type: \"string\",\n title: \"Chunk ID\",\n description: \"Unique identifier for this chunk\",\n },\n doc_id: {\n type: \"string\",\n title: \"Document ID\",\n description: \"ID of the parent document\",\n },\n text: {\n type: \"string\",\n title: \"Text\",\n description: \"Text content of the chunk\",\n },\n nodePath: {\n type: \"array\",\n items: { type: \"string\" },\n title: \"Node Path\",\n description: \"Node IDs from root to leaf\",\n },\n depth: {\n type: \"integer\",\n title: \"Depth\",\n description: \"Depth in the document tree\",\n },\n enrichment: ChunkEnrichmentSchema,\n },\n required: [\"chunkId\", \"doc_id\", \"text\", \"nodePath\", \"depth\"],\n additionalProperties: false,\n }) as const satisfies DataPortSchema;\n\nexport type ChunkNode = FromSchema<ReturnType<typeof ChunkNodeSchema>>;\n\n// =============================================================================\n// Chunk Metadata Schemas (for vector store)\n// =============================================================================\n\n/**\n * Schema for chunk metadata stored in vector database\n * This is the metadata output from ChunkToVectorTask\n */\nexport const ChunkMetadataSchema = {\n type: \"object\",\n properties: {\n doc_id: {\n type: \"string\",\n title: \"Document ID\",\n description: \"ID of the parent document\",\n },\n chunkId: {\n type: \"string\",\n title: \"Chunk ID\",\n description: \"Unique identifier for this chunk\",\n },\n leafNodeId: {\n type: \"string\",\n title: \"Leaf Node ID\",\n description: \"ID of the leaf node this chunk belongs to\",\n },\n depth: {\n type: \"integer\",\n title: \"Depth\",\n description: \"Depth in the document tree\",\n },\n text: {\n type: \"string\",\n title: \"Text\",\n description: \"Text content of the chunk\",\n },\n nodePath: {\n type: \"array\",\n items: { type: \"string\" },\n title: \"Node Path\",\n description: \"Node IDs from root to leaf\",\n },\n summary: {\n type: \"string\",\n title: \"Summary\",\n description: \"Summary of the chunk content\",\n },\n entities: {\n type: \"array\",\n items: EntitySchema,\n title: \"Entities\",\n description: \"Named entities extracted from the chunk\",\n },\n },\n required: [\"doc_id\", \"chunkId\", \"leafNodeId\", \"depth\", \"text\", \"nodePath\"],\n additionalProperties: true,\n} as const satisfies DataPortSchema;\n\nexport type ChunkMetadata = FromSchema<typeof ChunkMetadataSchema>;\n\n/**\n * Schema for chunk metadata array (for use in task schemas)\n */\nexport const ChunkMetadataArraySchema = {\n type: \"array\",\n items: ChunkMetadataSchema,\n title: \"Chunk Metadata\",\n description: \"Metadata for each chunk\",\n} as const satisfies JsonSchema;\n\n/**\n * Schema for enriched chunk metadata (after HierarchyJoinTask)\n * Extends ChunkMetadata with hierarchy information from document repository\n */\nexport const EnrichedChunkMetadataSchema = {\n type: \"object\",\n properties: {\n doc_id: {\n type: \"string\",\n title: \"Document ID\",\n description: \"ID of the parent document\",\n },\n chunkId: {\n type: \"string\",\n title: \"Chunk ID\",\n description: \"Unique identifier for this chunk\",\n },\n leafNodeId: {\n type: \"string\",\n title: \"Leaf Node ID\",\n description: \"ID of the leaf node this chunk belongs to\",\n },\n depth: {\n type: \"integer\",\n title: \"Depth\",\n description: \"Depth in the document tree\",\n },\n text: {\n type: \"string\",\n title: \"Text\",\n description: \"Text content of the chunk\",\n },\n nodePath: {\n type: \"array\",\n items: { type: \"string\" },\n title: \"Node Path\",\n description: \"Node IDs from root to leaf\",\n },\n summary: {\n type: \"string\",\n title: \"Summary\",\n description: \"Summary of the chunk content\",\n },\n entities: {\n type: \"array\",\n items: EntitySchema,\n title: \"Entities\",\n description: \"Named entities (rolled up from hierarchy)\",\n },\n parentSummaries: {\n type: \"array\",\n items: { type: \"string\" },\n title: \"Parent Summaries\",\n description: \"Summaries from ancestor nodes\",\n },\n sectionTitles: {\n type: \"array\",\n items: { type: \"string\" },\n title: \"Section Titles\",\n description: \"Titles of ancestor section nodes\",\n },\n },\n required: [\"doc_id\", \"chunkId\", \"leafNodeId\", \"depth\", \"text\", \"nodePath\"],\n additionalProperties: true,\n} as const satisfies DataPortSchema;\n\nexport type EnrichedChunkMetadata = FromSchema<typeof EnrichedChunkMetadataSchema>;\n\n/**\n * Schema for enriched chunk metadata array (for use in task schemas)\n */\nexport const EnrichedChunkMetadataArraySchema = {\n type: \"array\",\n items: EnrichedChunkMetadataSchema,\n title: \"Enriched Metadata\",\n description: \"Metadata enriched with hierarchy information\",\n} as const satisfies JsonSchema;\n\n/**\n * Schema for document metadata\n */\nexport const DocumentMetadataSchema = {\n type: \"object\",\n properties: {\n title: {\n type: \"string\",\n title: \"Title\",\n description: \"Document title\",\n },\n sourceUri: {\n type: \"string\",\n title: \"Source URI\",\n description: \"Original source URI of the document\",\n },\n createdAt: {\n type: \"string\",\n title: \"Created At\",\n description: \"ISO timestamp of creation\",\n },\n },\n required: [\"title\"],\n additionalProperties: true,\n} as const satisfies DataPortSchema;\n\nexport type DocumentMetadata = FromSchema<typeof DocumentMetadataSchema>;\n",
|
|
10
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n NodeKind,\n type DocumentNode,\n type DocumentRootNode,\n type NodeRange,\n type SectionNode,\n type TopicNode,\n} from \"./DocumentSchema\";\n\n/**\n * Approximate token counting (v1)\n */\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / 4);\n}\n\n/**\n * Helper to check if a node has children\n */\nexport function hasChildren(\n node: DocumentNode\n): node is DocumentRootNode | SectionNode | TopicNode {\n return (\n node.kind === NodeKind.DOCUMENT ||\n node.kind === NodeKind.SECTION ||\n node.kind === NodeKind.TOPIC\n );\n}\n\n/**\n * Helper to get all children of a node\n */\nexport function getChildren(node: DocumentNode): DocumentNode[] {\n if (hasChildren(node)) {\n return node.children;\n }\n return [];\n}\n\n/**\n * Traverse document tree depth-first\n */\nexport function* traverseDepthFirst(node: DocumentNode): Generator<DocumentNode> {\n yield node;\n if (hasChildren(node)) {\n for (const child of node.children) {\n yield* traverseDepthFirst(child);\n }\n }\n}\n\n/**\n * Get node path from root to target node\n */\nexport function getNodePath(root: DocumentNode, targetNodeId: string): string[] | undefined {\n const path: string[] = [];\n\n function search(node: DocumentNode): boolean {\n path.push(node.nodeId);\n if (node.nodeId === targetNodeId) {\n return true;\n }\n if (hasChildren(node)) {\n for (const child of node.children) {\n if (search(child)) {\n return true;\n }\n }\n }\n path.pop();\n return false;\n }\n\n return search(root) ? path : undefined;\n}\n\n/**\n * Get document range for a node path\n */\nexport function getDocumentRange(root: DocumentNode, nodePath: string[]): NodeRange {\n let currentNode = root as DocumentRootNode | SectionNode | TopicNode;\n\n // Start from index 1 since nodePath[0] is the root\n for (let i = 1; i < nodePath.length; i++) {\n const targetId = nodePath[i];\n const children = currentNode.children;\n let found: DocumentNode | undefined;\n\n for (let j = 0; j < children.length; j++) {\n if (children[j].nodeId === targetId) {\n found = children[j];\n break;\n }\n }\n\n if (!found) {\n throw new Error(`Node with id ${targetId} not found in path`);\n }\n\n currentNode = found as DocumentRootNode | SectionNode | TopicNode;\n }\n\n return currentNode.range;\n}\n",
|
|
11
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ITabularStorage } from \"@workglow/storage\";\nimport {\n TypedArraySchemaOptions,\n type DataPortSchemaObject,\n type FromSchema,\n} from \"@workglow/util\";\n\n/**\n * Schema for storing documents in tabular storage\n */\nexport const DocumentStorageSchema = {\n type: \"object\",\n properties: {\n doc_id: {\n type: \"string\",\n \"x-auto-generated\": true,\n title: \"Document ID\",\n description: \"Unique identifier for the document\",\n },\n data: {\n type: \"string\",\n title: \"Document Data\",\n description: \"JSON-serialized document\",\n },\n metadata: {\n type: \"object\",\n title: \"Metadata\",\n description: \"Metadata of the document\",\n },\n },\n required: [\"doc_id\", \"data\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\nexport type DocumentStorageSchema = typeof DocumentStorageSchema;\n\nexport const DocumentStorageKey = [\"doc_id\"] as const;\nexport type DocumentStorageKey = typeof DocumentStorageKey;\n\nexport type DocumentStorageEntity = FromSchema<DocumentStorageSchema, TypedArraySchemaOptions>;\n\n/**\n * Type for inserting documents - doc_id is optional (auto-generated)\n */\nexport type InsertDocumentStorageEntity = Omit<DocumentStorageEntity, \"doc_id\"> &\n Partial<Pick<DocumentStorageEntity, \"doc_id\">>;\n\nexport type DocumentTabularStorage = ITabularStorage<\n typeof DocumentStorageSchema,\n DocumentStorageKey,\n DocumentStorageEntity\n>;\n",
|
|
12
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { uuid4 } from \"@workglow/util\";\nimport {\n type DocumentRootNode,\n NodeKind,\n type ParagraphNode,\n type SectionNode,\n} from \"./DocumentSchema\";\n\n/**\n * Parse markdown into a hierarchical DocumentNode tree\n */\nexport class StructuralParser {\n /**\n * Parse markdown text into a hierarchical document tree\n */\n static async parseMarkdown(\n doc_id: string,\n text: string,\n title: string\n ): Promise<DocumentRootNode> {\n const lines = text.split(\"\\n\");\n let currentOffset = 0;\n\n const root: DocumentRootNode = {\n nodeId: uuid4(),\n kind: NodeKind.DOCUMENT,\n range: { startOffset: 0, endOffset: text.length },\n text: title,\n title,\n children: [],\n };\n\n let currentParentStack: Array<DocumentRootNode | SectionNode> = [root];\n let textBuffer: string[] = [];\n let textBufferStartOffset = 0;\n\n const flushTextBuffer = async () => {\n if (textBuffer.length > 0) {\n const content = textBuffer.join(\"\\n\").trim();\n if (content) {\n const paragraphStartOffset = textBufferStartOffset;\n const paragraphEndOffset = currentOffset;\n\n const paragraph: ParagraphNode = {\n nodeId: uuid4(),\n kind: NodeKind.PARAGRAPH,\n range: {\n startOffset: paragraphStartOffset,\n endOffset: paragraphEndOffset,\n },\n text: content,\n };\n\n currentParentStack[currentParentStack.length - 1].children.push(paragraph);\n }\n textBuffer = [];\n }\n };\n\n for (const line of lines) {\n const lineLength = line.length + 1; // +1 for newline\n\n // Check if line is a header\n const headerMatch = line.match(/^(#{1,6})\\s+(.*)$/);\n if (headerMatch) {\n await flushTextBuffer();\n\n const level = headerMatch[1].length;\n const headerTitle = headerMatch[2];\n\n // Pop stack until we find appropriate parent\n while (\n currentParentStack.length > 1 &&\n currentParentStack[currentParentStack.length - 1].kind === NodeKind.SECTION &&\n (currentParentStack[currentParentStack.length - 1] as SectionNode).level >= level\n ) {\n const poppedSection = currentParentStack.pop() as SectionNode;\n // Update endOffset of popped section\n const updatedSection: SectionNode = {\n ...poppedSection,\n range: {\n ...poppedSection.range,\n endOffset: currentOffset,\n },\n };\n // Replace in parent's children\n const parent = currentParentStack[currentParentStack.length - 1];\n parent.children[parent.children.length - 1] = updatedSection;\n }\n\n const sectionStartOffset = currentOffset;\n const section: SectionNode = {\n nodeId: uuid4(),\n kind: NodeKind.SECTION,\n level,\n title: headerTitle,\n range: {\n startOffset: sectionStartOffset,\n endOffset: text.length,\n },\n text: headerTitle,\n children: [],\n };\n\n currentParentStack[currentParentStack.length - 1].children.push(section);\n currentParentStack.push(section);\n } else {\n // Accumulate text\n if (textBuffer.length === 0) {\n textBufferStartOffset = currentOffset;\n }\n textBuffer.push(line);\n }\n\n currentOffset += lineLength;\n }\n\n await flushTextBuffer();\n\n // Close any remaining sections\n while (currentParentStack.length > 1) {\n const section = currentParentStack.pop() as SectionNode;\n const updatedSection: SectionNode = {\n ...section,\n range: {\n ...section.range,\n endOffset: text.length,\n },\n };\n const parent = currentParentStack[currentParentStack.length - 1];\n parent.children[parent.children.length - 1] = updatedSection;\n }\n\n return root;\n }\n\n /**\n * Parse plain text into a hierarchical document tree\n * Splits by double newlines to create paragraphs\n */\n static async parsePlainText(\n doc_id: string,\n text: string,\n title: string\n ): Promise<DocumentRootNode> {\n const root: DocumentRootNode = {\n nodeId: uuid4(),\n kind: NodeKind.DOCUMENT,\n range: { startOffset: 0, endOffset: text.length },\n text: title,\n title,\n children: [],\n };\n\n // Split by double newlines to get paragraphs while tracking offsets\n const paragraphRegex = /\\n\\s*\\n/g;\n let lastIndex = 0;\n let paragraphIndex = 0;\n let match: RegExpExecArray | null;\n\n while ((match = paragraphRegex.exec(text)) !== null) {\n const rawParagraph = text.slice(lastIndex, match.index);\n const paragraphText = rawParagraph.trim();\n\n if (paragraphText.length > 0) {\n const trimmedRelativeStart = rawParagraph.indexOf(paragraphText);\n const startOffset = lastIndex + trimmedRelativeStart;\n const endOffset = startOffset + paragraphText.length;\n\n const paragraph: ParagraphNode = {\n nodeId: uuid4(),\n kind: NodeKind.PARAGRAPH,\n range: {\n startOffset,\n endOffset,\n },\n text: paragraphText,\n };\n\n root.children.push(paragraph);\n paragraphIndex++;\n }\n\n lastIndex = paragraphRegex.lastIndex;\n }\n\n // Handle trailing paragraph after the last double newline, if any\n if (lastIndex < text.length) {\n const rawParagraph = text.slice(lastIndex);\n const paragraphText = rawParagraph.trim();\n\n if (paragraphText.length > 0) {\n const trimmedRelativeStart = rawParagraph.indexOf(paragraphText);\n const startOffset = lastIndex + trimmedRelativeStart;\n const endOffset = startOffset + paragraphText.length;\n\n const paragraph: ParagraphNode = {\n nodeId: uuid4(),\n kind: NodeKind.PARAGRAPH,\n range: {\n startOffset,\n endOffset,\n },\n text: paragraphText,\n };\n\n root.children.push(paragraph);\n }\n }\n return root;\n }\n\n /**\n * Auto-detect format and parse\n */\n static parse(\n doc_id: string,\n text: string,\n title: string,\n format?: \"markdown\" | \"text\"\n ): Promise<DocumentRootNode> {\n if (format === \"markdown\" || (!format && this.looksLikeMarkdown(text))) {\n return this.parseMarkdown(doc_id, text, title);\n }\n return this.parsePlainText(doc_id, text, title);\n }\n\n /**\n * Check if text contains markdown header patterns\n * Looks for lines starting with 1-6 hash symbols followed by whitespace\n */\n private static looksLikeMarkdown(text: string): boolean {\n // Check for markdown header patterns: line starting with # followed by space\n return /^#{1,6}\\s/m.test(text);\n }\n}\n",
|
|
13
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { VectorSearchOptions } from \"@workglow/storage\";\nimport type { TypedArray } from \"@workglow/util\";\nimport type {\n DocumentChunk,\n DocumentChunkKey,\n DocumentChunkStorage,\n InsertDocumentChunk,\n} from \"./DocumentChunkSchema\";\n\n/**\n * Document Chunk Dataset\n *\n * A dataset-specific wrapper around vector storage for document chunks.\n * This provides a domain-specific API for working with document chunk embeddings\n * in RAG pipelines.\n */\nexport class DocumentChunkDataset {\n private storage: DocumentChunkStorage;\n\n constructor(storage: DocumentChunkStorage) {\n this.storage = storage;\n }\n\n /**\n * Get the underlying storage instance\n */\n getStorage(): DocumentChunkStorage {\n return this.storage;\n }\n\n /**\n * Store a document chunk\n */\n async put(chunk: InsertDocumentChunk): Promise<DocumentChunk> {\n return this.storage.put(chunk);\n }\n\n /**\n * Store multiple document chunks\n */\n async putBulk(chunks: InsertDocumentChunk[]): Promise<DocumentChunk[]> {\n return this.storage.putBulk(chunks);\n }\n\n /**\n * Get a document chunk by ID\n */\n async get(chunk_id: string): Promise<DocumentChunk | undefined> {\n const key: DocumentChunkKey = { chunk_id };\n return this.storage.get(key);\n }\n\n /**\n * Delete a document chunk\n */\n async delete(chunk_id: string): Promise<void> {\n const key: DocumentChunkKey = { chunk_id };\n return this.storage.delete(key);\n }\n\n /**\n * Search for similar chunks using vector similarity\n */\n async similaritySearch(\n query: TypedArray,\n options?: VectorSearchOptions<Record<string, unknown>>\n ): Promise<Array<DocumentChunk & { score: number }>> {\n return this.storage.similaritySearch(query, options);\n }\n\n /**\n * Hybrid search (vector + full-text)\n */\n async hybridSearch(\n query: TypedArray,\n options: VectorSearchOptions<Record<string, unknown>> & {\n textQuery: string;\n vectorWeight?: number;\n }\n ): Promise<Array<DocumentChunk & { score: number }>> {\n if (this.storage.hybridSearch) {\n return this.storage.hybridSearch(query, options);\n }\n throw new Error(\"Hybrid search not supported by this storage backend\");\n }\n\n /**\n * Get all chunks\n */\n async getAll(): Promise<DocumentChunk[] | undefined> {\n return this.storage.getAll();\n }\n\n /**\n * Get the count of stored chunks\n */\n async size(): Promise<number> {\n return this.storage.size();\n }\n\n /**\n * Clear all chunks\n */\n async clear(): Promise<void> {\n return (this.storage as any).clear();\n }\n\n /**\n * Destroy the storage\n */\n destroy(): void {\n return this.storage.destroy();\n }\n\n /**\n * Setup the database/storage\n */\n async setupDatabase(): Promise<void> {\n return this.storage.setupDatabase();\n }\n\n /**\n * Get the vector dimensions\n */\n getVectorDimensions(): number {\n return this.storage.getVectorDimensions();\n }\n}\n",
|
|
14
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n registerInputResolver,\n ServiceRegistry,\n} from \"@workglow/util\";\nimport type { DocumentChunkDataset } from \"./DocumentChunkDataset\";\n\n/**\n * Service token for the document chunk dataset registry\n * Maps dataset IDs to DocumentChunkDataset instances\n */\nexport const DOCUMENT_CHUNK_DATASET =\n createServiceToken<Map<string, DocumentChunkDataset>>(\"dataset.document-chunk\");\n\n// Register default factory if not already registered\nif (!globalServiceRegistry.has(DOCUMENT_CHUNK_DATASET)) {\n globalServiceRegistry.register(\n DOCUMENT_CHUNK_DATASET,\n (): Map<string, DocumentChunkDataset> => new Map(),\n true\n );\n}\n\n/**\n * Gets the global document chunk dataset registry\n * @returns Map of document chunk dataset ID to instance\n */\nexport function getGlobalDocumentChunkDataset(): Map<string, DocumentChunkDataset> {\n return globalServiceRegistry.get(DOCUMENT_CHUNK_DATASET);\n}\n\n/**\n * Registers a document chunk dataset globally by ID\n * @param id The unique identifier for this dataset\n * @param dataset The dataset instance to register\n */\nexport function registerDocumentChunkDataset(id: string, dataset: DocumentChunkDataset): void {\n const datasets = getGlobalDocumentChunkDataset();\n datasets.set(id, dataset);\n}\n\n/**\n * Gets a document chunk dataset by ID from the global registry\n * @param id The dataset identifier\n * @returns The dataset instance or undefined if not found\n */\nexport function getDocumentChunkDataset(id: string): DocumentChunkDataset | undefined {\n return getGlobalDocumentChunkDataset().get(id);\n}\n\n/**\n * Resolves a dataset ID to a DocumentChunkDataset from the registry.\n * Used by the input resolver system.\n */\nasync function resolveDocumentChunkDatasetFromRegistry(\n id: string,\n format: string,\n registry: ServiceRegistry\n): Promise<DocumentChunkDataset> {\n const datasets = registry.has(DOCUMENT_CHUNK_DATASET)\n ? registry.get<Map<string, DocumentChunkDataset>>(DOCUMENT_CHUNK_DATASET)\n : getGlobalDocumentChunkDataset();\n\n const dataset = datasets.get(id);\n if (!dataset) {\n throw new Error(`Document chunk dataset \"${id}\" not found in registry`);\n }\n return dataset;\n}\n\n// Register the dataset resolver for format: \"dataset:document-chunk\"\nregisterInputResolver(\"dataset:document-chunk\", resolveDocumentChunkDatasetFromRegistry);\n",
|
|
15
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { IVectorStorage } from \"@workglow/storage\";\nimport { TypedArraySchema, type DataPortSchemaObject, type TypedArray } from \"@workglow/util\";\n\n/**\n * Default schema for document chunk storage with vector embeddings\n */\nexport const DocumentChunkSchema = {\n type: \"object\",\n properties: {\n chunk_id: { type: \"string\", \"x-auto-generated\": true },\n doc_id: { type: \"string\" },\n vector: TypedArraySchema(),\n metadata: { type: \"object\", format: \"metadata\", additionalProperties: true },\n },\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\nexport type DocumentChunkSchema = typeof DocumentChunkSchema;\n\nexport const DocumentChunkPrimaryKey = [\"chunk_id\"] as const;\nexport type DocumentChunkPrimaryKey = typeof DocumentChunkPrimaryKey;\n\nexport interface DocumentChunk<\n Metadata extends Record<string, unknown> = Record<string, unknown>,\n Vector extends TypedArray = TypedArray,\n> {\n chunk_id: string;\n doc_id: string;\n vector: Vector;\n metadata: Metadata;\n}\n\n/**\n * Type for inserting document chunks - chunk_id is optional (auto-generated)\n */\nexport type InsertDocumentChunk<\n Metadata extends Record<string, unknown> = Record<string, unknown>,\n Vector extends TypedArray = TypedArray,\n> = Omit<DocumentChunk<Metadata, Vector>, \"chunk_id\"> &\n Partial<Pick<DocumentChunk<Metadata, Vector>, \"chunk_id\">>;\n\n/**\n * Type for the primary key of document chunks\n */\nexport type DocumentChunkKey = { chunk_id: string };\n\nexport type DocumentChunkStorage = IVectorStorage<\n Record<string, unknown>,\n typeof DocumentChunkSchema,\n DocumentChunk,\n DocumentChunkPrimaryKey\n>;\n"
|
|
16
|
+
],
|
|
17
|
+
"mappings": ";AAmCO,SAAS,kBAA0D,CAAC,UAAa,CAAC,GAAQ;AAAA,EAC/F,OAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,OACV;AAAA,IACH,QAAQ;AAAA,IACR,OAAO;AAAA,MACL,EAAE,MAAM,UAAmB,OAAO,aAAa;AAAA,MAC/C,EAAE,OAAO,oBAAoB,sBAAsB,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA;AAUK,SAAS,wBAAgE,CAC9E,UAAa,CAAC,GACd;AAAA,EACA,OAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,OACV;AAAA,IACH,QAAQ;AAAA,IACR,OAAO;AAAA,MACL,EAAE,MAAM,UAAmB,OAAO,aAAa;AAAA,MAC/C,EAAE,OAAO,oBAAoB,sBAAsB,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA;AAUK,SAAS,mBAA2D,CAAC,UAAa,CAAC,GAAQ;AAAA,EAChG,OAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,OACV;AAAA,IACH,QAAQ;AAAA,IACR,OAAO;AAAA,MACL,EAAE,MAAM,UAAmB,OAAO,aAAa;AAAA,MAC/C,EAAE,OAAO,oBAAoB,sBAAsB,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA;;ACvEK,MAAM,SAAS;AAAA,EACb;AAAA,EACS;AAAA,EACA;AAAA,EACR;AAAA,EAER,WAAW,CACT,MACA,UACA,SAAsB,CAAC,GACvB,QACA;AAAA,IACA,KAAK,SAAS;AAAA,IACd,KAAK,OAAO;AAAA,IACZ,KAAK,WAAW;AAAA,IAChB,KAAK,SAAS,UAAU,CAAC;AAAA;AAAA,EAM3B,SAAS,CAAC,QAA2B;AAAA,IACnC,KAAK,SAAS;AAAA;AAAA,EAMhB,SAAS,GAAgB;AAAA,IACvB,OAAO,KAAK;AAAA;AAAA,EAMd,QAAQ,CAAC,QAAsB;AAAA,IAC7B,KAAK,SAAS;AAAA;AAAA,EAMhB,kBAAkB,CAAC,QAA6B;AAAA,IAC9C,OAAO,KAAK,OAAO,OAAO,CAAC,UAAU,MAAM,SAAS,SAAS,MAAM,CAAC;AAAA;AAAA,EAMtE,MAAM,GAIJ;AAAA,IACA,OAAO;AAAA,MACL,UAAU,KAAK;AAAA,MACf,MAAM,KAAK;AAAA,MACX,QAAQ,KAAK;AAAA,IACf;AAAA;AAAA,SAMK,QAAQ,CAAC,MAAc,QAA2B;AAAA,IACvD,MAAM,MAAM,KAAK,MAAM,IAAI;AAAA,IAC3B,OAAO,IAAI,SAAS,IAAI,MAAM,IAAI,UAAU,IAAI,QAAQ,MAAM;AAAA;AAGlE;;AC/DO,MAAM,gBAAgB;AAAA,EACnB;AAAA,EACA;AAAA,EAmBR,WAAW,CAAC,gBAAwC,eAAsC;AAAA,IACxF,KAAK,iBAAiB;AAAA,IACtB,KAAK,gBAAgB;AAAA;AAAA,OAOjB,OAAM,CAAC,UAAuC;AAAA,IAClD,MAAM,aAAa,KAAK,UAAU,SAAS,OAAO,CAAC;AAAA,IAEnD,MAAM,eAA4C;AAAA,MAChD,QAAQ,SAAS;AAAA,MACjB,MAAM;AAAA,IACR;AAAA,IACA,MAAM,SAAS,MAAM,KAAK,eAAe,IAAI,YAAY;AAAA,IAGzD,IAAI,SAAS,WAAW,OAAO,QAAQ;AAAA,MACrC,SAAS,SAAS,OAAO,MAAM;AAAA,IACjC;AAAA,IACA,OAAO;AAAA;AAAA,OAMH,IAAG,CAAC,QAA+C;AAAA,IACvD,MAAM,SAAS,MAAM,KAAK,eAAe,IAAI,EAAE,OAAe,CAAC;AAAA,IAC/D,IAAI,CAAC,QAAQ;AAAA,MACX;AAAA,IACF;AAAA,IACA,OAAO,SAAS,SAAS,OAAO,MAAM,OAAO,MAAM;AAAA;AAAA,OAM/C,OAAM,CAAC,QAA+B;AAAA,IAC1C,MAAM,KAAK,eAAe,OAAO,EAAE,OAAe,CAAC;AAAA;AAAA,OAM/C,QAAO,CAAC,QAAgB,QAAmD;AAAA,IAC/E,MAAM,MAAM,MAAM,KAAK,IAAI,MAAM;AAAA,IACjC,IAAI,CAAC,KAAK;AAAA,MACR;AAAA,IACF;AAAA,IAGA,MAAM,WAAW,CAAC,SAAmB;AAAA,MACnC,IAAI,KAAK,WAAW,QAAQ;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,MACA,IAAI,KAAK,YAAY,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAAA,QACjD,WAAW,SAAS,KAAK,UAAU;AAAA,UACjC,MAAM,QAAQ,SAAS,KAAK;AAAA,UAC5B,IAAI;AAAA,YAAO,OAAO;AAAA,QACpB;AAAA,MACF;AAAA,MACA;AAAA;AAAA,IAGF,OAAO,SAAS,IAAI,IAAI;AAAA;AAAA,OAMpB,aAAY,CAAC,QAAgB,QAAyC;AAAA,IAC1E,MAAM,MAAM,MAAM,KAAK,IAAI,MAAM;AAAA,IACjC,IAAI,CAAC,KAAK;AAAA,MACR,OAAO,CAAC;AAAA,IACV;AAAA,IAGA,MAAM,OAAiB,CAAC;AAAA,IACxB,MAAM,WAAW,CAAC,SAAuB;AAAA,MACvC,KAAK,KAAK,KAAK,MAAM;AAAA,MACrB,IAAI,KAAK,WAAW,QAAQ;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,MACA,IAAI,KAAK,YAAY,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAAA,QACjD,WAAW,SAAS,KAAK,UAAU;AAAA,UACjC,IAAI,SAAS,KAAK,GAAG;AAAA,YACnB,OAAO;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,IAAI;AAAA,MACT,OAAO;AAAA;AAAA,IAGT,IAAI,CAAC,SAAS,IAAI,IAAI,GAAG;AAAA,MACvB,OAAO,CAAC;AAAA,IACV;AAAA,IAGA,MAAM,YAAmB,CAAC;AAAA,IAC1B,IAAI,cAAmB,IAAI;AAAA,IAC3B,UAAU,KAAK,WAAW;AAAA,IAE1B,SAAS,IAAI,EAAG,IAAI,KAAK,QAAQ,KAAK;AAAA,MACpC,MAAM,WAAW,KAAK;AAAA,MACtB,IAAI,YAAY,YAAY,MAAM,QAAQ,YAAY,QAAQ,GAAG;AAAA,QAC/D,MAAM,QAAQ,YAAY,SAAS,KAAK,CAAC,UAAe,MAAM,WAAW,QAAQ;AAAA,QACjF,IAAI,OAAO;AAAA,UACT,cAAc;AAAA,UACd,UAAU,KAAK,WAAW;AAAA,QAC5B,EAAO;AAAA,UACL;AAAA;AAAA,MAEJ,EAAO;AAAA,QACL;AAAA;AAAA,IAEJ;AAAA,IAEA,OAAO;AAAA;AAAA,OAMH,UAAS,CAAC,QAAsC;AAAA,IACpD,MAAM,MAAM,MAAM,KAAK,IAAI,MAAM;AAAA,IACjC,IAAI,CAAC,KAAK;AAAA,MACR,OAAO,CAAC;AAAA,IACV;AAAA,IACA,OAAO,IAAI,UAAU;AAAA;AAAA,OAMjB,mBAAkB,CAAC,QAAgB,QAAsC;AAAA,IAC7E,MAAM,MAAM,MAAM,KAAK,IAAI,MAAM;AAAA,IACjC,IAAI,CAAC,KAAK;AAAA,MACR,OAAO,CAAC;AAAA,IACV;AAAA,IACA,IAAI,IAAI,oBAAoB;AAAA,MAC1B,OAAO,IAAI,mBAAmB,MAAM;AAAA,IACtC;AAAA,IAEA,MAAM,SAAS,IAAI,UAAU;AAAA,IAC7B,OAAO,OAAO,OAAO,CAAC,UAAU,MAAM,YAAY,MAAM,SAAS,SAAS,MAAM,CAAC;AAAA;AAAA,OAM7E,KAAI,GAAsB;AAAA,IAC9B,MAAM,WAAW,MAAM,KAAK,eAAe,OAAO;AAAA,IAClD,IAAI,CAAC,UAAU;AAAA,MACb,OAAO,CAAC;AAAA,IACV;AAAA,IACA,OAAO,SAAS,IAAI,CAAC,MAA6B,EAAE,MAAM;AAAA;AAAA,OAStD,OAAM,CACV,OACA,SACoE;AAAA,IACpE,OAAO,KAAK,eAAe,iBAAiB,OAAO,OAAO,KAAK,CAAC;AAAA;AAEpE;;AClNA;AAAA;AAAA;AAAA;AAAA;AAYO,IAAM,oBACX,mBAAiD,mBAAmB;AAGtE,IAAI,CAAC,sBAAsB,IAAI,iBAAiB,GAAG;AAAA,EACjD,sBAAsB,SACpB,mBACA,MAAoC,IAAI,KACxC,IACF;AACF;AAMO,SAAS,yBAAyB,GAAiC;AAAA,EACxE,OAAO,sBAAsB,IAAI,iBAAiB;AAAA;AAQ7C,SAAS,uBAAuB,CAAC,IAAY,SAAgC;AAAA,EAClF,MAAM,WAAW,0BAA0B;AAAA,EAC3C,SAAS,IAAI,IAAI,OAAO;AAAA;AAQnB,SAAS,kBAAkB,CAAC,IAAyC;AAAA,EAC1E,OAAO,0BAA0B,EAAE,IAAI,EAAE;AAAA;AAO3C,eAAe,kCAAkC,CAC/C,IACA,QACA,UAC0B;AAAA,EAC1B,MAAM,WAAW,SAAS,IAAI,iBAAiB,IAC3C,SAAS,IAAkC,iBAAiB,IAC5D,0BAA0B;AAAA,EAE9B,MAAM,UAAU,SAAS,IAAI,EAAE;AAAA,EAC/B,IAAI,CAAC,SAAS;AAAA,IACZ,MAAM,IAAI,MAAM,qBAAqB,2BAA2B;AAAA,EAClE;AAAA,EACA,OAAO;AAAA;AAIT,sBAAsB,oBAAoB,kCAAkC;;ACnErE,IAAM,WAAW;AAAA,EACtB,UAAU;AAAA,EACV,SAAS;AAAA,EACT,WAAW;AAAA,EACX,UAAU;AAAA,EACV,OAAO;AACT;AAWO,IAAM,kBAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,aAAa;AAAA,MACX,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,WAAW;AAAA,MACT,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,eAAe,WAAW;AAAA,EACrC,sBAAsB;AACxB;AAOO,IAAM,eAAe;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,QAAQ,QAAQ,OAAO;AAAA,EAClC,sBAAsB;AACxB;AAOO,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,sBAAsB;AACxB;AASO,IAAM,yBAAyB;AAAA,EACpC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,MAAM,OAAO,OAAO,QAAQ;AAAA,MAC5B,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,IACP,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,YAAY;AAAA,EACd;AAAA,EACA,UAAU,CAAC,UAAU,QAAQ,SAAS,MAAM;AAAA,EAC5C,sBAAsB;AACxB;AAOO,IAAM,qBAAqB;AAAA,EAChC,MAAM;AAAA,EACN,OAAO;AAAA,EACP,aAAa;AAAA,EACb,YAAY;AAAA,OACP,uBAAuB;AAAA,IAC1B,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,GAAG,uBAAuB,QAAQ;AAAA,EAC7C,sBAAsB;AACxB;AAKO,IAAM,sBAAsB;AAAA,EACjC,MAAM;AAAA,EACN,YAAY;AAAA,OACP,uBAAuB;AAAA,IAC1B,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,GAAG,uBAAuB,QAAQ;AAAA,EAC7C,sBAAsB;AACxB;AAKO,IAAM,qBAAqB;AAAA,EAChC,MAAM;AAAA,EACN,YAAY;AAAA,OACP,uBAAuB;AAAA,IAC1B,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,GAAG,uBAAuB,QAAQ;AAAA,EAC7C,sBAAsB;AACxB;AAKO,IAAM,oBAAoB;AAAA,EAC/B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,uBAAuB;AAAA,IAC1B,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS;AAAA,MACT,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,GAAG,uBAAuB,UAAU,SAAS,SAAS,UAAU;AAAA,EAC3E,sBAAsB;AACxB;AAKO,IAAM,kBAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,uBAAuB;AAAA,IAC1B,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,GAAG,uBAAuB,UAAU,UAAU;AAAA,EACzD,sBAAsB;AACxB;AAKO,IAAM,yBAAyB;AAAA,EACpC,MAAM;AAAA,EACN,YAAY;AAAA,OACP,uBAAuB;AAAA,IAC1B,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,GAAG,uBAAuB,UAAU,SAAS,UAAU;AAAA,EAClE,sBAAsB;AACxB;AA4EO,IAAM,oBAAoB;AAAA,EAC/B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,mBAAmB;AAAA,MACjB,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,eAAe;AAAA,MACb,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,qBAAqB,iBAAiB,gBAAgB;AAAA,EACjE,sBAAsB;AACxB;AAOO,IAAM,wBAAwB;AAAA,EACnC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,sBAAsB;AACxB;AAOO,IAAM,kBAAkB,OAC5B;AAAA,EACC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,YAAY;AAAA,EACd;AAAA,EACA,UAAU,CAAC,WAAW,UAAU,QAAQ,YAAY,OAAO;AAAA,EAC3D,sBAAsB;AACxB;AAYK,IAAM,sBAAsB;AAAA,EACjC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,YAAY;AAAA,MACV,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,UAAU,WAAW,cAAc,SAAS,QAAQ,UAAU;AAAA,EACzE,sBAAsB;AACxB;AAOO,IAAM,2BAA2B;AAAA,EACtC,MAAM;AAAA,EACN,OAAO;AAAA,EACP,OAAO;AAAA,EACP,aAAa;AACf;AAMO,IAAM,8BAA8B;AAAA,EACzC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,YAAY;AAAA,MACV,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,iBAAiB;AAAA,MACf,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,eAAe;AAAA,MACb,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,UAAU,WAAW,cAAc,SAAS,QAAQ,UAAU;AAAA,EACzE,sBAAsB;AACxB;AAOO,IAAM,mCAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,OAAO;AAAA,EACP,OAAO;AAAA,EACP,aAAa;AACf;AAKO,IAAM,yBAAyB;AAAA,EACpC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,OAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,WAAW;AAAA,MACT,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,WAAW;AAAA,MACT,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,OAAO;AAAA,EAClB,sBAAsB;AACxB;;;ACjmBO,SAAS,cAAc,CAAC,MAAsB;AAAA,EACnD,OAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAM3B,SAAS,WAAW,CACzB,MACoD;AAAA,EACpD,OACE,KAAK,SAAS,SAAS,YACvB,KAAK,SAAS,SAAS,WACvB,KAAK,SAAS,SAAS;AAAA;AAOpB,SAAS,WAAW,CAAC,MAAoC;AAAA,EAC9D,IAAI,YAAY,IAAI,GAAG;AAAA,IACrB,OAAO,KAAK;AAAA,EACd;AAAA,EACA,OAAO,CAAC;AAAA;AAMH,UAAU,kBAAkB,CAAC,MAA6C;AAAA,EAC/E,MAAM;AAAA,EACN,IAAI,YAAY,IAAI,GAAG;AAAA,IACrB,WAAW,SAAS,KAAK,UAAU;AAAA,MACjC,OAAO,mBAAmB,KAAK;AAAA,IACjC;AAAA,EACF;AAAA;AAMK,SAAS,WAAW,CAAC,MAAoB,cAA4C;AAAA,EAC1F,MAAM,OAAiB,CAAC;AAAA,EAExB,SAAS,MAAM,CAAC,MAA6B;AAAA,IAC3C,KAAK,KAAK,KAAK,MAAM;AAAA,IACrB,IAAI,KAAK,WAAW,cAAc;AAAA,MAChC,OAAO;AAAA,IACT;AAAA,IACA,IAAI,YAAY,IAAI,GAAG;AAAA,MACrB,WAAW,SAAS,KAAK,UAAU;AAAA,QACjC,IAAI,OAAO,KAAK,GAAG;AAAA,UACjB,OAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,IACA,KAAK,IAAI;AAAA,IACT,OAAO;AAAA;AAAA,EAGT,OAAO,OAAO,IAAI,IAAI,OAAO;AAAA;AAMxB,SAAS,gBAAgB,CAAC,MAAoB,UAA+B;AAAA,EAClF,IAAI,cAAc;AAAA,EAGlB,SAAS,IAAI,EAAG,IAAI,SAAS,QAAQ,KAAK;AAAA,IACxC,MAAM,WAAW,SAAS;AAAA,IAC1B,MAAM,WAAW,YAAY;AAAA,IAC7B,IAAI;AAAA,IAEJ,SAAS,IAAI,EAAG,IAAI,SAAS,QAAQ,KAAK;AAAA,MACxC,IAAI,SAAS,GAAG,WAAW,UAAU;AAAA,QACnC,QAAQ,SAAS;AAAA,QACjB;AAAA,MACF;AAAA,IACF;AAAA,IAEA,IAAI,CAAC,OAAO;AAAA,MACV,MAAM,IAAI,MAAM,gBAAgB,4BAA4B;AAAA,IAC9D;AAAA,IAEA,cAAc;AAAA,EAChB;AAAA,EAEA,OAAO,YAAY;AAAA;;AC5Fd,IAAM,wBAAwB;AAAA,EACnC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,oBAAoB;AAAA,MACpB,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,UAAU,MAAM;AAAA,EAC3B,sBAAsB;AACxB;AAGO,IAAM,qBAAqB,CAAC,QAAQ;;ACnC3C;AAWO,MAAM,iBAAiB;AAAA,cAIf,cAAa,CACxB,QACA,MACA,OAC2B;AAAA,IAC3B,MAAM,QAAQ,KAAK,MAAM;AAAA,CAAI;AAAA,IAC7B,IAAI,gBAAgB;AAAA,IAEpB,MAAM,OAAyB;AAAA,MAC7B,QAAQ,MAAM;AAAA,MACd,MAAM,SAAS;AAAA,MACf,OAAO,EAAE,aAAa,GAAG,WAAW,KAAK,OAAO;AAAA,MAChD,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IAEA,IAAI,qBAA4D,CAAC,IAAI;AAAA,IACrE,IAAI,aAAuB,CAAC;AAAA,IAC5B,IAAI,wBAAwB;AAAA,IAE5B,MAAM,kBAAkB,YAAY;AAAA,MAClC,IAAI,WAAW,SAAS,GAAG;AAAA,QACzB,MAAM,UAAU,WAAW,KAAK;AAAA,CAAI,EAAE,KAAK;AAAA,QAC3C,IAAI,SAAS;AAAA,UACX,MAAM,uBAAuB;AAAA,UAC7B,MAAM,qBAAqB;AAAA,UAE3B,MAAM,YAA2B;AAAA,YAC/B,QAAQ,MAAM;AAAA,YACd,MAAM,SAAS;AAAA,YACf,OAAO;AAAA,cACL,aAAa;AAAA,cACb,WAAW;AAAA,YACb;AAAA,YACA,MAAM;AAAA,UACR;AAAA,UAEA,mBAAmB,mBAAmB,SAAS,GAAG,SAAS,KAAK,SAAS;AAAA,QAC3E;AAAA,QACA,aAAa,CAAC;AAAA,MAChB;AAAA;AAAA,IAGF,WAAW,QAAQ,OAAO;AAAA,MACxB,MAAM,aAAa,KAAK,SAAS;AAAA,MAGjC,MAAM,cAAc,KAAK,MAAM,mBAAmB;AAAA,MAClD,IAAI,aAAa;AAAA,QACf,MAAM,gBAAgB;AAAA,QAEtB,MAAM,QAAQ,YAAY,GAAG;AAAA,QAC7B,MAAM,cAAc,YAAY;AAAA,QAGhC,OACE,mBAAmB,SAAS,KAC5B,mBAAmB,mBAAmB,SAAS,GAAG,SAAS,SAAS,WACnE,mBAAmB,mBAAmB,SAAS,GAAmB,SAAS,OAC5E;AAAA,UACA,MAAM,gBAAgB,mBAAmB,IAAI;AAAA,UAE7C,MAAM,iBAA8B;AAAA,eAC/B;AAAA,YACH,OAAO;AAAA,iBACF,cAAc;AAAA,cACjB,WAAW;AAAA,YACb;AAAA,UACF;AAAA,UAEA,MAAM,SAAS,mBAAmB,mBAAmB,SAAS;AAAA,UAC9D,OAAO,SAAS,OAAO,SAAS,SAAS,KAAK;AAAA,QAChD;AAAA,QAEA,MAAM,qBAAqB;AAAA,QAC3B,MAAM,UAAuB;AAAA,UAC3B,QAAQ,MAAM;AAAA,UACd,MAAM,SAAS;AAAA,UACf;AAAA,UACA,OAAO;AAAA,UACP,OAAO;AAAA,YACL,aAAa;AAAA,YACb,WAAW,KAAK;AAAA,UAClB;AAAA,UACA,MAAM;AAAA,UACN,UAAU,CAAC;AAAA,QACb;AAAA,QAEA,mBAAmB,mBAAmB,SAAS,GAAG,SAAS,KAAK,OAAO;AAAA,QACvE,mBAAmB,KAAK,OAAO;AAAA,MACjC,EAAO;AAAA,QAEL,IAAI,WAAW,WAAW,GAAG;AAAA,UAC3B,wBAAwB;AAAA,QAC1B;AAAA,QACA,WAAW,KAAK,IAAI;AAAA;AAAA,MAGtB,iBAAiB;AAAA,IACnB;AAAA,IAEA,MAAM,gBAAgB;AAAA,IAGtB,OAAO,mBAAmB,SAAS,GAAG;AAAA,MACpC,MAAM,UAAU,mBAAmB,IAAI;AAAA,MACvC,MAAM,iBAA8B;AAAA,WAC/B;AAAA,QACH,OAAO;AAAA,aACF,QAAQ;AAAA,UACX,WAAW,KAAK;AAAA,QAClB;AAAA,MACF;AAAA,MACA,MAAM,SAAS,mBAAmB,mBAAmB,SAAS;AAAA,MAC9D,OAAO,SAAS,OAAO,SAAS,SAAS,KAAK;AAAA,IAChD;AAAA,IAEA,OAAO;AAAA;AAAA,cAOI,eAAc,CACzB,QACA,MACA,OAC2B;AAAA,IAC3B,MAAM,OAAyB;AAAA,MAC7B,QAAQ,MAAM;AAAA,MACd,MAAM,SAAS;AAAA,MACf,OAAO,EAAE,aAAa,GAAG,WAAW,KAAK,OAAO;AAAA,MAChD,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IAGA,MAAM,iBAAiB;AAAA,IACvB,IAAI,YAAY;AAAA,IAChB,IAAI,iBAAiB;AAAA,IACrB,IAAI;AAAA,IAEJ,QAAQ,QAAQ,eAAe,KAAK,IAAI,OAAO,MAAM;AAAA,MACnD,MAAM,eAAe,KAAK,MAAM,WAAW,MAAM,KAAK;AAAA,MACtD,MAAM,gBAAgB,aAAa,KAAK;AAAA,MAExC,IAAI,cAAc,SAAS,GAAG;AAAA,QAC5B,MAAM,uBAAuB,aAAa,QAAQ,aAAa;AAAA,QAC/D,MAAM,cAAc,YAAY;AAAA,QAChC,MAAM,YAAY,cAAc,cAAc;AAAA,QAE9C,MAAM,YAA2B;AAAA,UAC/B,QAAQ,MAAM;AAAA,UACd,MAAM,SAAS;AAAA,UACf,OAAO;AAAA,YACL;AAAA,YACA;AAAA,UACF;AAAA,UACA,MAAM;AAAA,QACR;AAAA,QAEA,KAAK,SAAS,KAAK,SAAS;AAAA,QAC5B;AAAA,MACF;AAAA,MAEA,YAAY,eAAe;AAAA,IAC7B;AAAA,IAGA,IAAI,YAAY,KAAK,QAAQ;AAAA,MAC3B,MAAM,eAAe,KAAK,MAAM,SAAS;AAAA,MACzC,MAAM,gBAAgB,aAAa,KAAK;AAAA,MAExC,IAAI,cAAc,SAAS,GAAG;AAAA,QAC5B,MAAM,uBAAuB,aAAa,QAAQ,aAAa;AAAA,QAC/D,MAAM,cAAc,YAAY;AAAA,QAChC,MAAM,YAAY,cAAc,cAAc;AAAA,QAE9C,MAAM,YAA2B;AAAA,UAC/B,QAAQ,MAAM;AAAA,UACd,MAAM,SAAS;AAAA,UACf,OAAO;AAAA,YACL;AAAA,YACA;AAAA,UACF;AAAA,UACA,MAAM;AAAA,QACR;AAAA,QAEA,KAAK,SAAS,KAAK,SAAS;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,OAAO;AAAA;AAAA,SAMF,KAAK,CACV,QACA,MACA,OACA,QAC2B;AAAA,IAC3B,IAAI,WAAW,cAAe,CAAC,UAAU,KAAK,kBAAkB,IAAI,GAAI;AAAA,MACtE,OAAO,KAAK,cAAc,QAAQ,MAAM,KAAK;AAAA,IAC/C;AAAA,IACA,OAAO,KAAK,eAAe,QAAQ,MAAM,KAAK;AAAA;AAAA,SAOjC,iBAAiB,CAAC,MAAuB;AAAA,IAEtD,OAAO,aAAa,KAAK,IAAI;AAAA;AAEjC;;AC3NO,MAAM,qBAAqB;AAAA,EACxB;AAAA,EAER,WAAW,CAAC,SAA+B;AAAA,IACzC,KAAK,UAAU;AAAA;AAAA,EAMjB,UAAU,GAAyB;AAAA,IACjC,OAAO,KAAK;AAAA;AAAA,OAMR,IAAG,CAAC,OAAoD;AAAA,IAC5D,OAAO,KAAK,QAAQ,IAAI,KAAK;AAAA;AAAA,OAMzB,QAAO,CAAC,QAAyD;AAAA,IACrE,OAAO,KAAK,QAAQ,QAAQ,MAAM;AAAA;AAAA,OAM9B,IAAG,CAAC,UAAsD;AAAA,IAC9D,MAAM,MAAwB,EAAE,SAAS;AAAA,IACzC,OAAO,KAAK,QAAQ,IAAI,GAAG;AAAA;AAAA,OAMvB,OAAM,CAAC,UAAiC;AAAA,IAC5C,MAAM,MAAwB,EAAE,SAAS;AAAA,IACzC,OAAO,KAAK,QAAQ,OAAO,GAAG;AAAA;AAAA,OAM1B,iBAAgB,CACpB,OACA,SACmD;AAAA,IACnD,OAAO,KAAK,QAAQ,iBAAiB,OAAO,OAAO;AAAA;AAAA,OAM/C,aAAY,CAChB,OACA,SAImD;AAAA,IACnD,IAAI,KAAK,QAAQ,cAAc;AAAA,MAC7B,OAAO,KAAK,QAAQ,aAAa,OAAO,OAAO;AAAA,IACjD;AAAA,IACA,MAAM,IAAI,MAAM,qDAAqD;AAAA;AAAA,OAMjE,OAAM,GAAyC;AAAA,IACnD,OAAO,KAAK,QAAQ,OAAO;AAAA;AAAA,OAMvB,KAAI,GAAoB;AAAA,IAC5B,OAAO,KAAK,QAAQ,KAAK;AAAA;AAAA,OAMrB,MAAK,GAAkB;AAAA,IAC3B,OAAQ,KAAK,QAAgB,MAAM;AAAA;AAAA,EAMrC,OAAO,GAAS;AAAA,IACd,OAAO,KAAK,QAAQ,QAAQ;AAAA;AAAA,OAMxB,cAAa,GAAkB;AAAA,IACnC,OAAO,KAAK,QAAQ,cAAc;AAAA;AAAA,EAMpC,mBAAmB,GAAW;AAAA,IAC5B,OAAO,KAAK,QAAQ,oBAAoB;AAAA;AAE5C;;AC/HA;AAAA,wBACE;AAAA,2BACA;AAAA,2BACA;AAAA;AASK,IAAM,yBACX,oBAAsD,wBAAwB;AAGhF,IAAI,CAAC,uBAAsB,IAAI,sBAAsB,GAAG;AAAA,EACtD,uBAAsB,SACpB,wBACA,MAAyC,IAAI,KAC7C,IACF;AACF;AAMO,SAAS,6BAA6B,GAAsC;AAAA,EACjF,OAAO,uBAAsB,IAAI,sBAAsB;AAAA;AAQlD,SAAS,4BAA4B,CAAC,IAAY,SAAqC;AAAA,EAC5F,MAAM,WAAW,8BAA8B;AAAA,EAC/C,SAAS,IAAI,IAAI,OAAO;AAAA;AAQnB,SAAS,uBAAuB,CAAC,IAA8C;AAAA,EACpF,OAAO,8BAA8B,EAAE,IAAI,EAAE;AAAA;AAO/C,eAAe,uCAAuC,CACpD,IACA,QACA,UAC+B;AAAA,EAC/B,MAAM,WAAW,SAAS,IAAI,sBAAsB,IAChD,SAAS,IAAuC,sBAAsB,IACtE,8BAA8B;AAAA,EAElC,MAAM,UAAU,SAAS,IAAI,EAAE;AAAA,EAC/B,IAAI,CAAC,SAAS;AAAA,IACZ,MAAM,IAAI,MAAM,2BAA2B,2BAA2B;AAAA,EACxE;AAAA,EACA,OAAO;AAAA;AAIT,uBAAsB,0BAA0B,uCAAuC;;ACvEvF;AAKO,IAAM,sBAAsB;AAAA,EACjC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU,EAAE,MAAM,UAAU,oBAAoB,KAAK;AAAA,IACrD,QAAQ,EAAE,MAAM,SAAS;AAAA,IACzB,QAAQ,iBAAiB;AAAA,IACzB,UAAU,EAAE,MAAM,UAAU,QAAQ,YAAY,sBAAsB,KAAK;AAAA,EAC7E;AAAA,EACA,sBAAsB;AACxB;AAGO,IAAM,0BAA0B,CAAC,UAAU;",
|
|
18
|
+
"debugId": "10C8E3D0DA933AA664756E2164756E21",
|
|
19
|
+
"names": []
|
|
20
|
+
}
|