@rejot-dev/thalo 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +396 -0
- package/dist/ast/ast-types.d.ts +469 -0
- package/dist/ast/ast-types.d.ts.map +1 -0
- package/dist/ast/ast-types.js +11 -0
- package/dist/ast/ast-types.js.map +1 -0
- package/dist/ast/builder.js +158 -0
- package/dist/ast/builder.js.map +1 -0
- package/dist/ast/extract.js +748 -0
- package/dist/ast/extract.js.map +1 -0
- package/dist/ast/node-at-position.d.ts +147 -0
- package/dist/ast/node-at-position.d.ts.map +1 -0
- package/dist/ast/node-at-position.js +382 -0
- package/dist/ast/node-at-position.js.map +1 -0
- package/dist/ast/visitor.js +232 -0
- package/dist/ast/visitor.js.map +1 -0
- package/dist/checker/check.d.ts +53 -0
- package/dist/checker/check.d.ts.map +1 -0
- package/dist/checker/check.js +105 -0
- package/dist/checker/check.js.map +1 -0
- package/dist/checker/rules/actualize-missing-updated.js +34 -0
- package/dist/checker/rules/actualize-missing-updated.js.map +1 -0
- package/dist/checker/rules/actualize-unresolved-target.js +42 -0
- package/dist/checker/rules/actualize-unresolved-target.js.map +1 -0
- package/dist/checker/rules/alter-before-define.js +53 -0
- package/dist/checker/rules/alter-before-define.js.map +1 -0
- package/dist/checker/rules/alter-undefined-entity.js +32 -0
- package/dist/checker/rules/alter-undefined-entity.js.map +1 -0
- package/dist/checker/rules/create-requires-section.js +34 -0
- package/dist/checker/rules/create-requires-section.js.map +1 -0
- package/dist/checker/rules/define-entity-requires-section.js +31 -0
- package/dist/checker/rules/define-entity-requires-section.js.map +1 -0
- package/dist/checker/rules/duplicate-entity-definition.js +37 -0
- package/dist/checker/rules/duplicate-entity-definition.js.map +1 -0
- package/dist/checker/rules/duplicate-field-in-schema.js +38 -0
- package/dist/checker/rules/duplicate-field-in-schema.js.map +1 -0
- package/dist/checker/rules/duplicate-link-id.js +52 -0
- package/dist/checker/rules/duplicate-link-id.js.map +1 -0
- package/dist/checker/rules/duplicate-metadata-key.js +21 -0
- package/dist/checker/rules/duplicate-metadata-key.js.map +1 -0
- package/dist/checker/rules/duplicate-section-heading.js +41 -0
- package/dist/checker/rules/duplicate-section-heading.js.map +1 -0
- package/dist/checker/rules/duplicate-section-in-schema.js +38 -0
- package/dist/checker/rules/duplicate-section-in-schema.js.map +1 -0
- package/dist/checker/rules/duplicate-timestamp.js +104 -0
- package/dist/checker/rules/duplicate-timestamp.js.map +1 -0
- package/dist/checker/rules/empty-required-value.js +45 -0
- package/dist/checker/rules/empty-required-value.js.map +1 -0
- package/dist/checker/rules/empty-section.js +21 -0
- package/dist/checker/rules/empty-section.js.map +1 -0
- package/dist/checker/rules/invalid-date-range-value.js +56 -0
- package/dist/checker/rules/invalid-date-range-value.js.map +1 -0
- package/dist/checker/rules/invalid-default-value.js +86 -0
- package/dist/checker/rules/invalid-default-value.js.map +1 -0
- package/dist/checker/rules/invalid-field-type.js +45 -0
- package/dist/checker/rules/invalid-field-type.js.map +1 -0
- package/dist/checker/rules/missing-required-field.js +48 -0
- package/dist/checker/rules/missing-required-field.js.map +1 -0
- package/dist/checker/rules/missing-required-section.js +51 -0
- package/dist/checker/rules/missing-required-section.js.map +1 -0
- package/dist/checker/rules/missing-title.js +56 -0
- package/dist/checker/rules/missing-title.js.map +1 -0
- package/dist/checker/rules/remove-undefined-field.js +42 -0
- package/dist/checker/rules/remove-undefined-field.js.map +1 -0
- package/dist/checker/rules/remove-undefined-section.js +42 -0
- package/dist/checker/rules/remove-undefined-section.js.map +1 -0
- package/dist/checker/rules/rules.d.ts +71 -0
- package/dist/checker/rules/rules.d.ts.map +1 -0
- package/dist/checker/rules/rules.js +102 -0
- package/dist/checker/rules/rules.js.map +1 -0
- package/dist/checker/rules/synthesis-empty-query.js +35 -0
- package/dist/checker/rules/synthesis-empty-query.js.map +1 -0
- package/dist/checker/rules/synthesis-missing-prompt.js +42 -0
- package/dist/checker/rules/synthesis-missing-prompt.js.map +1 -0
- package/dist/checker/rules/synthesis-missing-sources.js +32 -0
- package/dist/checker/rules/synthesis-missing-sources.js.map +1 -0
- package/dist/checker/rules/synthesis-unknown-query-entity.js +39 -0
- package/dist/checker/rules/synthesis-unknown-query-entity.js.map +1 -0
- package/dist/checker/rules/timestamp-out-of-order.js +55 -0
- package/dist/checker/rules/timestamp-out-of-order.js.map +1 -0
- package/dist/checker/rules/unknown-entity.js +32 -0
- package/dist/checker/rules/unknown-entity.js.map +1 -0
- package/dist/checker/rules/unknown-field.js +40 -0
- package/dist/checker/rules/unknown-field.js.map +1 -0
- package/dist/checker/rules/unknown-section.js +47 -0
- package/dist/checker/rules/unknown-section.js.map +1 -0
- package/dist/checker/rules/unresolved-link.js +34 -0
- package/dist/checker/rules/unresolved-link.js.map +1 -0
- package/dist/checker/rules/update-without-create.js +65 -0
- package/dist/checker/rules/update-without-create.js.map +1 -0
- package/dist/checker/visitor.d.ts +69 -0
- package/dist/checker/visitor.d.ts.map +1 -0
- package/dist/checker/visitor.js +67 -0
- package/dist/checker/visitor.js.map +1 -0
- package/dist/checker/workspace-index.d.ts +50 -0
- package/dist/checker/workspace-index.d.ts.map +1 -0
- package/dist/checker/workspace-index.js +108 -0
- package/dist/checker/workspace-index.js.map +1 -0
- package/dist/commands/actualize.d.ts +113 -0
- package/dist/commands/actualize.d.ts.map +1 -0
- package/dist/commands/actualize.js +111 -0
- package/dist/commands/actualize.js.map +1 -0
- package/dist/commands/check.d.ts +65 -0
- package/dist/commands/check.d.ts.map +1 -0
- package/dist/commands/check.js +61 -0
- package/dist/commands/check.js.map +1 -0
- package/dist/commands/format.d.ts +90 -0
- package/dist/commands/format.d.ts.map +1 -0
- package/dist/commands/format.js +80 -0
- package/dist/commands/format.js.map +1 -0
- package/dist/commands/query.d.ts +152 -0
- package/dist/commands/query.d.ts.map +1 -0
- package/dist/commands/query.js +151 -0
- package/dist/commands/query.js.map +1 -0
- package/dist/constants.d.ts +31 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +51 -0
- package/dist/constants.js.map +1 -0
- package/dist/files.d.ts +58 -0
- package/dist/files.d.ts.map +1 -0
- package/dist/files.js +103 -0
- package/dist/files.js.map +1 -0
- package/dist/formatters.d.ts +39 -0
- package/dist/formatters.d.ts.map +1 -0
- package/dist/formatters.js +200 -0
- package/dist/formatters.js.map +1 -0
- package/dist/fragment.d.ts +22 -0
- package/dist/fragment.d.ts.map +1 -0
- package/dist/git/git.js +240 -0
- package/dist/git/git.js.map +1 -0
- package/dist/merge/conflict-detector.d.ts +89 -0
- package/dist/merge/conflict-detector.d.ts.map +1 -0
- package/dist/merge/conflict-detector.js +352 -0
- package/dist/merge/conflict-detector.js.map +1 -0
- package/dist/merge/conflict-formatter.js +143 -0
- package/dist/merge/conflict-formatter.js.map +1 -0
- package/dist/merge/driver.d.ts +54 -0
- package/dist/merge/driver.d.ts.map +1 -0
- package/dist/merge/driver.js +112 -0
- package/dist/merge/driver.js.map +1 -0
- package/dist/merge/entry-matcher.d.ts +50 -0
- package/dist/merge/entry-matcher.d.ts.map +1 -0
- package/dist/merge/entry-matcher.js +141 -0
- package/dist/merge/entry-matcher.js.map +1 -0
- package/dist/merge/entry-merger.js +194 -0
- package/dist/merge/entry-merger.js.map +1 -0
- package/dist/merge/merge-result-builder.d.ts +62 -0
- package/dist/merge/merge-result-builder.d.ts.map +1 -0
- package/dist/merge/merge-result-builder.js +89 -0
- package/dist/merge/merge-result-builder.js.map +1 -0
- package/dist/mod.d.ts +31 -0
- package/dist/mod.js +23 -0
- package/dist/model/document.d.ts +134 -0
- package/dist/model/document.d.ts.map +1 -0
- package/dist/model/document.js +275 -0
- package/dist/model/document.js.map +1 -0
- package/dist/model/line-index.d.ts +85 -0
- package/dist/model/line-index.d.ts.map +1 -0
- package/dist/model/line-index.js +159 -0
- package/dist/model/line-index.js.map +1 -0
- package/dist/model/workspace.d.ts +296 -0
- package/dist/model/workspace.d.ts.map +1 -0
- package/dist/model/workspace.js +562 -0
- package/dist/model/workspace.js.map +1 -0
- package/dist/parser.js +27 -0
- package/dist/parser.js.map +1 -0
- package/dist/parser.native.d.ts +51 -0
- package/dist/parser.native.d.ts.map +1 -0
- package/dist/parser.native.js +62 -0
- package/dist/parser.native.js.map +1 -0
- package/dist/parser.shared.d.ts +99 -0
- package/dist/parser.shared.d.ts.map +1 -0
- package/dist/parser.shared.js +124 -0
- package/dist/parser.shared.js.map +1 -0
- package/dist/parser.web.d.ts +67 -0
- package/dist/parser.web.d.ts.map +1 -0
- package/dist/parser.web.js +49 -0
- package/dist/parser.web.js.map +1 -0
- package/dist/schema/registry.d.ts +108 -0
- package/dist/schema/registry.d.ts.map +1 -0
- package/dist/schema/registry.js +281 -0
- package/dist/schema/registry.js.map +1 -0
- package/dist/semantic/analyzer.d.ts +107 -0
- package/dist/semantic/analyzer.d.ts.map +1 -0
- package/dist/semantic/analyzer.js +261 -0
- package/dist/semantic/analyzer.js.map +1 -0
- package/dist/services/change-tracker/change-tracker.d.ts +111 -0
- package/dist/services/change-tracker/change-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/change-tracker.js +62 -0
- package/dist/services/change-tracker/change-tracker.js.map +1 -0
- package/dist/services/change-tracker/create-tracker.d.ts +42 -0
- package/dist/services/change-tracker/create-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/create-tracker.js +53 -0
- package/dist/services/change-tracker/create-tracker.js.map +1 -0
- package/dist/services/change-tracker/git-tracker.d.ts +59 -0
- package/dist/services/change-tracker/git-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/git-tracker.js +218 -0
- package/dist/services/change-tracker/git-tracker.js.map +1 -0
- package/dist/services/change-tracker/timestamp-tracker.d.ts +22 -0
- package/dist/services/change-tracker/timestamp-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/timestamp-tracker.js +74 -0
- package/dist/services/change-tracker/timestamp-tracker.js.map +1 -0
- package/dist/services/definition.d.ts +37 -0
- package/dist/services/definition.d.ts.map +1 -0
- package/dist/services/definition.js +43 -0
- package/dist/services/definition.js.map +1 -0
- package/dist/services/entity-navigation.d.ts +200 -0
- package/dist/services/entity-navigation.d.ts.map +1 -0
- package/dist/services/entity-navigation.js +211 -0
- package/dist/services/entity-navigation.js.map +1 -0
- package/dist/services/hover.d.ts +81 -0
- package/dist/services/hover.d.ts.map +1 -0
- package/dist/services/hover.js +669 -0
- package/dist/services/hover.js.map +1 -0
- package/dist/services/query.d.ts +116 -0
- package/dist/services/query.d.ts.map +1 -0
- package/dist/services/query.js +225 -0
- package/dist/services/query.js.map +1 -0
- package/dist/services/references.d.ts +52 -0
- package/dist/services/references.d.ts.map +1 -0
- package/dist/services/references.js +66 -0
- package/dist/services/references.js.map +1 -0
- package/dist/services/semantic-tokens.d.ts +54 -0
- package/dist/services/semantic-tokens.d.ts.map +1 -0
- package/dist/services/semantic-tokens.js +213 -0
- package/dist/services/semantic-tokens.js.map +1 -0
- package/dist/services/synthesis.d.ts +90 -0
- package/dist/services/synthesis.d.ts.map +1 -0
- package/dist/services/synthesis.js +113 -0
- package/dist/services/synthesis.js.map +1 -0
- package/dist/source-map.d.ts +42 -0
- package/dist/source-map.d.ts.map +1 -0
- package/dist/source-map.js +170 -0
- package/dist/source-map.js.map +1 -0
- package/package.json +128 -0
- package/tree-sitter-thalo.wasm +0 -0
- package/web-tree-sitter.wasm +0 -0
|
@@ -0,0 +1,562 @@
|
|
|
1
|
+
import { identitySourceMap } from "../source-map.js";
|
|
2
|
+
import { formatTimestamp } from "../formatters.js";
|
|
3
|
+
import { extractSourceFile } from "../ast/extract.js";
|
|
4
|
+
import { analyze, updateSemanticModel } from "../semantic/analyzer.js";
|
|
5
|
+
import { SchemaRegistry } from "../schema/registry.js";
|
|
6
|
+
import { LineIndex, computeEdit } from "./line-index.js";
|
|
7
|
+
import { Document } from "./document.js";
|
|
8
|
+
|
|
9
|
+
//#region src/model/workspace.ts
|
|
10
|
+
/**
|
|
11
|
+
* A workspace containing multiple thalo documents.
|
|
12
|
+
* Provides cross-file link resolution and schema management.
|
|
13
|
+
*
|
|
14
|
+
* The workspace is parser-agnostic - it accepts any parser that implements
|
|
15
|
+
* the ThaloParser interface, allowing it to work with both native (Node.js)
|
|
16
|
+
* and web (WASM) tree-sitter implementations.
|
|
17
|
+
*
|
|
18
|
+
* @example
|
|
19
|
+
* ```typescript
|
|
20
|
+
* // With native parser (Node.js) - parser is optional, defaults to native
|
|
21
|
+
* import { Workspace } from "@rejot-dev/thalo";
|
|
22
|
+
* const workspace = new Workspace();
|
|
23
|
+
*
|
|
24
|
+
* // With explicit parser
|
|
25
|
+
* import { createParser } from "@rejot-dev/thalo/native";
|
|
26
|
+
* const parser = createParser();
|
|
27
|
+
* const workspace = new Workspace(parser);
|
|
28
|
+
*
|
|
29
|
+
* // With web parser (browser)
|
|
30
|
+
* import { createParser } from "@rejot-dev/thalo/web";
|
|
31
|
+
* const parser = await createParser({ treeSitterWasm, languageWasm });
|
|
32
|
+
* const workspace = new Workspace(parser);
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
var Workspace = class {
|
|
36
|
+
parser;
|
|
37
|
+
models = /* @__PURE__ */ new Map();
|
|
38
|
+
documents = /* @__PURE__ */ new Map();
|
|
39
|
+
_schemaRegistry = new SchemaRegistry();
|
|
40
|
+
_linkIndex = {
|
|
41
|
+
definitions: /* @__PURE__ */ new Map(),
|
|
42
|
+
references: /* @__PURE__ */ new Map()
|
|
43
|
+
};
|
|
44
|
+
linkDependencies = /* @__PURE__ */ new Map();
|
|
45
|
+
entityDependencies = /* @__PURE__ */ new Map();
|
|
46
|
+
/**
|
|
47
|
+
* Create a new Workspace.
|
|
48
|
+
*
|
|
49
|
+
* @param parser - A ThaloParser instance. For Node.js, use createParser() from "@rejot-dev/thalo".
|
|
50
|
+
* For browser, use createParser() from "@rejot-dev/thalo/web".
|
|
51
|
+
*/
|
|
52
|
+
constructor(parser) {
|
|
53
|
+
this.parser = parser;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Get the schema registry for this workspace
|
|
57
|
+
*/
|
|
58
|
+
get schemaRegistry() {
|
|
59
|
+
return this._schemaRegistry;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Get the combined link index for all documents.
|
|
63
|
+
*/
|
|
64
|
+
get linkIndex() {
|
|
65
|
+
return this._linkIndex;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Add a document to the workspace.
|
|
69
|
+
*/
|
|
70
|
+
addDocument(source, options) {
|
|
71
|
+
const { filename, fileType } = options;
|
|
72
|
+
this.removeDocument(filename);
|
|
73
|
+
const parsed = this.parser.parseDocument(source, {
|
|
74
|
+
fileType,
|
|
75
|
+
filename
|
|
76
|
+
});
|
|
77
|
+
if (parsed.blocks.length === 0) {
|
|
78
|
+
const model$1 = {
|
|
79
|
+
ast: {
|
|
80
|
+
type: "source_file",
|
|
81
|
+
entries: [],
|
|
82
|
+
syntaxErrors: [],
|
|
83
|
+
location: {
|
|
84
|
+
startIndex: 0,
|
|
85
|
+
endIndex: 0,
|
|
86
|
+
startPosition: {
|
|
87
|
+
row: 0,
|
|
88
|
+
column: 0
|
|
89
|
+
},
|
|
90
|
+
endPosition: {
|
|
91
|
+
row: 0,
|
|
92
|
+
column: 0
|
|
93
|
+
}
|
|
94
|
+
},
|
|
95
|
+
syntaxNode: null
|
|
96
|
+
},
|
|
97
|
+
file: filename,
|
|
98
|
+
source,
|
|
99
|
+
sourceMap: identitySourceMap(),
|
|
100
|
+
blocks: [],
|
|
101
|
+
linkIndex: {
|
|
102
|
+
definitions: /* @__PURE__ */ new Map(),
|
|
103
|
+
references: /* @__PURE__ */ new Map()
|
|
104
|
+
},
|
|
105
|
+
schemaEntries: []
|
|
106
|
+
};
|
|
107
|
+
this.models.set(filename, model$1);
|
|
108
|
+
return model$1;
|
|
109
|
+
}
|
|
110
|
+
const block = parsed.blocks[0];
|
|
111
|
+
const model = analyze(extractSourceFile(block.tree.rootNode), {
|
|
112
|
+
file: filename,
|
|
113
|
+
source,
|
|
114
|
+
sourceMap: block.sourceMap,
|
|
115
|
+
blocks: parsed.blocks
|
|
116
|
+
});
|
|
117
|
+
this.models.set(filename, model);
|
|
118
|
+
for (const entry of model.schemaEntries) {
|
|
119
|
+
const modelEntry = convertToModelSchemaEntry(entry, filename, model.sourceMap);
|
|
120
|
+
if (modelEntry) this._schemaRegistry.add(modelEntry);
|
|
121
|
+
}
|
|
122
|
+
this.mergeLinks(model);
|
|
123
|
+
return model;
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Remove a document from the workspace
|
|
127
|
+
*/
|
|
128
|
+
removeDocument(file) {
|
|
129
|
+
if (!this.models.has(file)) return;
|
|
130
|
+
this.models.delete(file);
|
|
131
|
+
this.rebuild();
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Get a SemanticModel by file path.
|
|
135
|
+
*/
|
|
136
|
+
getModel(file) {
|
|
137
|
+
return this.models.get(file);
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Check if a document exists
|
|
141
|
+
*/
|
|
142
|
+
hasDocument(file) {
|
|
143
|
+
return this.models.has(file);
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Get all document file paths
|
|
147
|
+
*/
|
|
148
|
+
files() {
|
|
149
|
+
return Array.from(this.models.keys());
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Get all SemanticModels.
|
|
153
|
+
*/
|
|
154
|
+
allModels() {
|
|
155
|
+
return Array.from(this.models.values());
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Get all AST entries across all SemanticModels.
|
|
159
|
+
*/
|
|
160
|
+
allEntries() {
|
|
161
|
+
const entries = [];
|
|
162
|
+
for (const model of this.models.values()) entries.push(...model.ast.entries);
|
|
163
|
+
return entries;
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Get all instance entries (create/update) across all SemanticModels.
|
|
167
|
+
*/
|
|
168
|
+
allInstanceEntries() {
|
|
169
|
+
return this.allEntries().filter((e) => e.type === "instance_entry");
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Get all schema entries (define-entity/alter-entity) across all SemanticModels.
|
|
173
|
+
*/
|
|
174
|
+
allSchemaEntries() {
|
|
175
|
+
return this.allEntries().filter((e) => e.type === "schema_entry");
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Get all synthesis entries (define-synthesis) across all SemanticModels.
|
|
179
|
+
*/
|
|
180
|
+
allSynthesisEntries() {
|
|
181
|
+
return this.allEntries().filter((e) => e.type === "synthesis_entry");
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Get all actualize entries (actualize-synthesis) across all SemanticModels.
|
|
185
|
+
*/
|
|
186
|
+
allActualizeEntries() {
|
|
187
|
+
return this.allEntries().filter((e) => e.type === "actualize_entry");
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Get the definition for a link ID
|
|
191
|
+
*/
|
|
192
|
+
getLinkDefinition(id) {
|
|
193
|
+
return this._linkIndex.definitions.get(id);
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Get all references for a link ID
|
|
197
|
+
*/
|
|
198
|
+
getLinkReferences(id) {
|
|
199
|
+
return this._linkIndex.references.get(id) ?? [];
|
|
200
|
+
}
|
|
201
|
+
/**
|
|
202
|
+
* Get the Document instance for incremental editing.
|
|
203
|
+
* Returns undefined if the document hasn't been added with incremental support.
|
|
204
|
+
*/
|
|
205
|
+
getDocument(file) {
|
|
206
|
+
return this.documents.get(file);
|
|
207
|
+
}
|
|
208
|
+
/**
|
|
209
|
+
* Apply an incremental edit to a document.
|
|
210
|
+
* This is more efficient than addDocument() for small edits.
|
|
211
|
+
*
|
|
212
|
+
* @param filename - The file to edit
|
|
213
|
+
* @param startLine - 0-based start line
|
|
214
|
+
* @param startColumn - 0-based start column
|
|
215
|
+
* @param endLine - 0-based end line
|
|
216
|
+
* @param endColumn - 0-based end column
|
|
217
|
+
* @param newText - The replacement text
|
|
218
|
+
* @returns Information about what was invalidated
|
|
219
|
+
*/
|
|
220
|
+
applyEdit(filename, startLine, startColumn, endLine, endColumn, newText) {
|
|
221
|
+
const doc = this.documents.get(filename);
|
|
222
|
+
if (!doc) {
|
|
223
|
+
const model = this.models.get(filename);
|
|
224
|
+
if (!model) return {
|
|
225
|
+
affectedFiles: [],
|
|
226
|
+
schemasChanged: false,
|
|
227
|
+
linksChanged: false,
|
|
228
|
+
changedEntityNames: [],
|
|
229
|
+
changedLinkIds: []
|
|
230
|
+
};
|
|
231
|
+
const edit = computeEdit(new LineIndex(model.source), startLine, startColumn, endLine, endColumn, newText);
|
|
232
|
+
const newSource = model.source.slice(0, edit.startIndex) + newText + model.source.slice(edit.oldEndIndex);
|
|
233
|
+
return this.updateDocument(filename, newSource);
|
|
234
|
+
}
|
|
235
|
+
const editResult = doc.applyEdit(startLine, startColumn, endLine, endColumn, newText);
|
|
236
|
+
return this.updateModelFromDocument(filename, doc, editResult);
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Update a document with new content.
|
|
240
|
+
* This replaces the entire document and recalculates all dependencies.
|
|
241
|
+
*
|
|
242
|
+
* @param filename - The file to update
|
|
243
|
+
* @param newSource - The new source content
|
|
244
|
+
* @returns Information about what was invalidated
|
|
245
|
+
*/
|
|
246
|
+
updateDocument(filename, newSource) {
|
|
247
|
+
let doc = this.documents.get(filename);
|
|
248
|
+
if (doc) doc.replaceContent(newSource);
|
|
249
|
+
else {
|
|
250
|
+
doc = new Document(this.parser, filename, newSource);
|
|
251
|
+
this.documents.set(filename, doc);
|
|
252
|
+
}
|
|
253
|
+
return this.updateModelFromDocument(filename, doc, {
|
|
254
|
+
blockBoundariesChanged: true,
|
|
255
|
+
modifiedBlockIndices: doc.blocks.map((_, i) => i),
|
|
256
|
+
fullReparse: true
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
/**
|
|
260
|
+
* Get files that would be affected by changes in a specific file.
|
|
261
|
+
* Useful for targeted diagnostics refresh.
|
|
262
|
+
*/
|
|
263
|
+
getAffectedFiles(filename) {
|
|
264
|
+
const model = this.models.get(filename);
|
|
265
|
+
if (!model) return [filename];
|
|
266
|
+
const affected = new Set([filename]);
|
|
267
|
+
for (const [linkId] of model.linkIndex.definitions) {
|
|
268
|
+
const dependents = this.linkDependencies.get(linkId);
|
|
269
|
+
if (dependents) for (const dep of dependents) affected.add(dep);
|
|
270
|
+
}
|
|
271
|
+
for (const entry of model.schemaEntries) {
|
|
272
|
+
const entityName = entry.header.entityName.value;
|
|
273
|
+
const dependents = this.entityDependencies.get(entityName);
|
|
274
|
+
if (dependents) for (const dep of dependents) affected.add(dep);
|
|
275
|
+
}
|
|
276
|
+
return Array.from(affected);
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Clear all documents
|
|
280
|
+
*/
|
|
281
|
+
clear() {
|
|
282
|
+
this.models.clear();
|
|
283
|
+
this.documents.clear();
|
|
284
|
+
this._schemaRegistry.clear();
|
|
285
|
+
this._linkIndex = {
|
|
286
|
+
definitions: /* @__PURE__ */ new Map(),
|
|
287
|
+
references: /* @__PURE__ */ new Map()
|
|
288
|
+
};
|
|
289
|
+
this.linkDependencies.clear();
|
|
290
|
+
this.entityDependencies.clear();
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Rebuild schema registry and link index from all models
|
|
294
|
+
*/
|
|
295
|
+
rebuild() {
|
|
296
|
+
this._schemaRegistry.clear();
|
|
297
|
+
this._linkIndex = {
|
|
298
|
+
definitions: /* @__PURE__ */ new Map(),
|
|
299
|
+
references: /* @__PURE__ */ new Map()
|
|
300
|
+
};
|
|
301
|
+
this.linkDependencies.clear();
|
|
302
|
+
this.entityDependencies.clear();
|
|
303
|
+
for (const model of this.models.values()) {
|
|
304
|
+
for (const entry of model.schemaEntries) {
|
|
305
|
+
const modelEntry = convertToModelSchemaEntry(entry, model.file, model.sourceMap);
|
|
306
|
+
if (modelEntry) this._schemaRegistry.add(modelEntry);
|
|
307
|
+
}
|
|
308
|
+
this.mergeLinks(model);
|
|
309
|
+
this.updateEntityDependencies(model);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Merge a SemanticModel's links into the workspace link index
|
|
314
|
+
*/
|
|
315
|
+
mergeLinks(model) {
|
|
316
|
+
for (const [id, def] of model.linkIndex.definitions) this._linkIndex.definitions.set(id, def);
|
|
317
|
+
for (const [id, refs] of model.linkIndex.references) {
|
|
318
|
+
const existing = this._linkIndex.references.get(id) ?? [];
|
|
319
|
+
existing.push(...refs);
|
|
320
|
+
this._linkIndex.references.set(id, existing);
|
|
321
|
+
let deps = this.linkDependencies.get(id);
|
|
322
|
+
if (!deps) {
|
|
323
|
+
deps = /* @__PURE__ */ new Set();
|
|
324
|
+
this.linkDependencies.set(id, deps);
|
|
325
|
+
}
|
|
326
|
+
deps.add(model.file);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Update entity dependencies for a model
|
|
331
|
+
*/
|
|
332
|
+
updateEntityDependencies(model) {
|
|
333
|
+
for (const entry of model.ast.entries) if (entry.type === "instance_entry") {
|
|
334
|
+
const entityName = entry.header.entity;
|
|
335
|
+
let deps = this.entityDependencies.get(entityName);
|
|
336
|
+
if (!deps) {
|
|
337
|
+
deps = /* @__PURE__ */ new Set();
|
|
338
|
+
this.entityDependencies.set(entityName, deps);
|
|
339
|
+
}
|
|
340
|
+
deps.add(model.file);
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Remove a file's dependencies from the tracking maps
|
|
345
|
+
*/
|
|
346
|
+
removeDependencies(file) {
|
|
347
|
+
for (const deps of this.linkDependencies.values()) deps.delete(file);
|
|
348
|
+
for (const deps of this.entityDependencies.values()) deps.delete(file);
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Update the semantic model from a Document's parse results.
|
|
352
|
+
*/
|
|
353
|
+
updateModelFromDocument(filename, doc, editResult) {
|
|
354
|
+
const result = {
|
|
355
|
+
affectedFiles: [filename],
|
|
356
|
+
schemasChanged: false,
|
|
357
|
+
linksChanged: false,
|
|
358
|
+
changedEntityNames: [],
|
|
359
|
+
changedLinkIds: []
|
|
360
|
+
};
|
|
361
|
+
if (doc.blocks.length === 0) {
|
|
362
|
+
const oldModel$1 = this.models.get(filename);
|
|
363
|
+
if (oldModel$1) {
|
|
364
|
+
for (const [linkId] of oldModel$1.linkIndex.definitions) {
|
|
365
|
+
result.changedLinkIds.push(linkId);
|
|
366
|
+
result.linksChanged = true;
|
|
367
|
+
}
|
|
368
|
+
for (const entry of oldModel$1.schemaEntries) {
|
|
369
|
+
result.changedEntityNames.push(entry.header.entityName.value);
|
|
370
|
+
result.schemasChanged = true;
|
|
371
|
+
}
|
|
372
|
+
this.removeDependencies(filename);
|
|
373
|
+
}
|
|
374
|
+
const model = {
|
|
375
|
+
ast: {
|
|
376
|
+
type: "source_file",
|
|
377
|
+
entries: [],
|
|
378
|
+
syntaxErrors: [],
|
|
379
|
+
location: {
|
|
380
|
+
startIndex: 0,
|
|
381
|
+
endIndex: 0,
|
|
382
|
+
startPosition: {
|
|
383
|
+
row: 0,
|
|
384
|
+
column: 0
|
|
385
|
+
},
|
|
386
|
+
endPosition: {
|
|
387
|
+
row: 0,
|
|
388
|
+
column: 0
|
|
389
|
+
}
|
|
390
|
+
},
|
|
391
|
+
syntaxNode: null
|
|
392
|
+
},
|
|
393
|
+
file: filename,
|
|
394
|
+
source: doc.source,
|
|
395
|
+
sourceMap: identitySourceMap(),
|
|
396
|
+
blocks: [],
|
|
397
|
+
linkIndex: {
|
|
398
|
+
definitions: /* @__PURE__ */ new Map(),
|
|
399
|
+
references: /* @__PURE__ */ new Map()
|
|
400
|
+
},
|
|
401
|
+
schemaEntries: []
|
|
402
|
+
};
|
|
403
|
+
this.models.set(filename, model);
|
|
404
|
+
this.rebuild();
|
|
405
|
+
result.affectedFiles = this.getAffectedFiles(filename);
|
|
406
|
+
return result;
|
|
407
|
+
}
|
|
408
|
+
const block = doc.blocks[0];
|
|
409
|
+
const newAst = extractSourceFile(block.tree.rootNode);
|
|
410
|
+
const newSourceMap = block.sourceMap;
|
|
411
|
+
const newBlocks = doc.blocks.map((b) => ({
|
|
412
|
+
source: b.source,
|
|
413
|
+
sourceMap: b.sourceMap,
|
|
414
|
+
tree: b.tree
|
|
415
|
+
}));
|
|
416
|
+
const oldModel = this.models.get(filename);
|
|
417
|
+
if (oldModel && !editResult.fullReparse) {
|
|
418
|
+
const updateResult = updateSemanticModel(oldModel, newAst, doc.source, newSourceMap, newBlocks);
|
|
419
|
+
result.linksChanged = updateResult.addedLinkDefinitions.length > 0 || updateResult.removedLinkDefinitions.length > 0;
|
|
420
|
+
result.changedLinkIds = [...updateResult.addedLinkDefinitions, ...updateResult.removedLinkDefinitions];
|
|
421
|
+
result.schemasChanged = updateResult.schemaEntriesChanged;
|
|
422
|
+
result.changedEntityNames = updateResult.changedEntityNames;
|
|
423
|
+
if (result.schemasChanged || result.linksChanged) this.rebuild();
|
|
424
|
+
} else {
|
|
425
|
+
const model = analyze(newAst, {
|
|
426
|
+
file: filename,
|
|
427
|
+
source: doc.source,
|
|
428
|
+
sourceMap: newSourceMap,
|
|
429
|
+
blocks: newBlocks
|
|
430
|
+
});
|
|
431
|
+
if (oldModel) {
|
|
432
|
+
for (const [linkId] of oldModel.linkIndex.definitions) if (!model.linkIndex.definitions.has(linkId)) {
|
|
433
|
+
result.changedLinkIds.push(linkId);
|
|
434
|
+
result.linksChanged = true;
|
|
435
|
+
}
|
|
436
|
+
for (const [linkId] of model.linkIndex.definitions) if (!oldModel.linkIndex.definitions.has(linkId)) {
|
|
437
|
+
result.changedLinkIds.push(linkId);
|
|
438
|
+
result.linksChanged = true;
|
|
439
|
+
}
|
|
440
|
+
const oldEntityNames = new Set(oldModel.schemaEntries.map((e) => e.header.entityName.value));
|
|
441
|
+
const newEntityNames = new Set(model.schemaEntries.map((e) => e.header.entityName.value));
|
|
442
|
+
for (const name of oldEntityNames) if (!newEntityNames.has(name)) {
|
|
443
|
+
result.changedEntityNames.push(name);
|
|
444
|
+
result.schemasChanged = true;
|
|
445
|
+
}
|
|
446
|
+
for (const name of newEntityNames) if (!oldEntityNames.has(name)) {
|
|
447
|
+
result.changedEntityNames.push(name);
|
|
448
|
+
result.schemasChanged = true;
|
|
449
|
+
}
|
|
450
|
+
} else {
|
|
451
|
+
for (const [linkId] of model.linkIndex.definitions) result.changedLinkIds.push(linkId);
|
|
452
|
+
for (const entry of model.schemaEntries) result.changedEntityNames.push(entry.header.entityName.value);
|
|
453
|
+
result.linksChanged = result.changedLinkIds.length > 0;
|
|
454
|
+
result.schemasChanged = result.changedEntityNames.length > 0;
|
|
455
|
+
}
|
|
456
|
+
this.models.set(filename, model);
|
|
457
|
+
this.rebuild();
|
|
458
|
+
}
|
|
459
|
+
result.affectedFiles = this.getAffectedFiles(filename);
|
|
460
|
+
return result;
|
|
461
|
+
}
|
|
462
|
+
};
|
|
463
|
+
/**
|
|
464
|
+
* Convert AST SchemaEntry to ModelSchemaEntry for SchemaRegistry compatibility.
|
|
465
|
+
* Note: This is a temporary conversion layer until SchemaRegistry is updated to use AST types.
|
|
466
|
+
*/
|
|
467
|
+
function convertToModelSchemaEntry(entry, file, sourceMap) {
|
|
468
|
+
const directive = entry.header.directive;
|
|
469
|
+
if (directive !== "define-entity" && directive !== "alter-entity") return null;
|
|
470
|
+
const timestamp = formatTimestamp(entry.header.timestamp);
|
|
471
|
+
const fields = entry.metadataBlock?.fields ?? [];
|
|
472
|
+
const sections = entry.sectionsBlock?.sections ?? [];
|
|
473
|
+
const removeFields = entry.removeMetadataBlock?.fields ?? [];
|
|
474
|
+
const removeSections = entry.removeSectionsBlock?.sections ?? [];
|
|
475
|
+
return {
|
|
476
|
+
kind: "schema",
|
|
477
|
+
timestamp,
|
|
478
|
+
directive,
|
|
479
|
+
entityName: entry.header.entityName.value,
|
|
480
|
+
title: entry.header.title?.value ?? "",
|
|
481
|
+
linkId: entry.header.link?.id ?? null,
|
|
482
|
+
tags: entry.header.tags.map((t) => t.name),
|
|
483
|
+
fields: fields.map(convertFieldDefinition),
|
|
484
|
+
sections: sections.map(convertSectionDefinition),
|
|
485
|
+
removeFields: removeFields.map((f) => f.name.value),
|
|
486
|
+
removeSections: removeSections.map((s) => s.name.value),
|
|
487
|
+
location: entry.location,
|
|
488
|
+
file,
|
|
489
|
+
sourceMap
|
|
490
|
+
};
|
|
491
|
+
}
|
|
492
|
+
function convertFieldDefinition(field) {
|
|
493
|
+
return {
|
|
494
|
+
name: field.name.value,
|
|
495
|
+
optional: field.optional,
|
|
496
|
+
type: convertTypeExpression(field.typeExpr),
|
|
497
|
+
defaultValue: field.defaultValue ? convertDefaultValue(field.defaultValue) : null,
|
|
498
|
+
description: field.description?.value ?? null,
|
|
499
|
+
location: field.location
|
|
500
|
+
};
|
|
501
|
+
}
|
|
502
|
+
function convertSectionDefinition(section) {
|
|
503
|
+
return {
|
|
504
|
+
name: section.name.value,
|
|
505
|
+
optional: section.optional,
|
|
506
|
+
description: section.description?.value ?? null,
|
|
507
|
+
location: section.location
|
|
508
|
+
};
|
|
509
|
+
}
|
|
510
|
+
function convertTypeExpression(expr) {
|
|
511
|
+
switch (expr.type) {
|
|
512
|
+
case "primitive_type": return {
|
|
513
|
+
kind: "primitive",
|
|
514
|
+
name: expr.name
|
|
515
|
+
};
|
|
516
|
+
case "literal_type": return {
|
|
517
|
+
kind: "literal",
|
|
518
|
+
value: expr.value
|
|
519
|
+
};
|
|
520
|
+
case "array_type": return {
|
|
521
|
+
kind: "array",
|
|
522
|
+
elementType: convertTypeExpression(expr.elementType)
|
|
523
|
+
};
|
|
524
|
+
case "union_type": return {
|
|
525
|
+
kind: "union",
|
|
526
|
+
members: expr.members.map((m) => convertTypeExpression(m))
|
|
527
|
+
};
|
|
528
|
+
case "syntax_error": return {
|
|
529
|
+
kind: "unknown",
|
|
530
|
+
name: expr.text
|
|
531
|
+
};
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
function convertDefaultValue(defaultValue) {
|
|
535
|
+
const raw = defaultValue.raw;
|
|
536
|
+
switch (defaultValue.content.type) {
|
|
537
|
+
case "quoted_value": return {
|
|
538
|
+
kind: "quoted",
|
|
539
|
+
value: defaultValue.content.value,
|
|
540
|
+
raw
|
|
541
|
+
};
|
|
542
|
+
case "link": return {
|
|
543
|
+
kind: "link",
|
|
544
|
+
id: defaultValue.content.id,
|
|
545
|
+
raw
|
|
546
|
+
};
|
|
547
|
+
case "datetime_value": return {
|
|
548
|
+
kind: "datetime",
|
|
549
|
+
value: defaultValue.content.value,
|
|
550
|
+
raw
|
|
551
|
+
};
|
|
552
|
+
case "number_value": return {
|
|
553
|
+
kind: "number",
|
|
554
|
+
value: defaultValue.content.value,
|
|
555
|
+
raw
|
|
556
|
+
};
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
//#endregion
|
|
561
|
+
export { Workspace };
|
|
562
|
+
//# sourceMappingURL=workspace.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"workspace.js","names":["model: SemanticModel","model","entries: Entry[]","result: InvalidationResult","oldModel"],"sources":["../../src/model/workspace.ts"],"sourcesContent":["// ===================\n// Schema Entry Types (used by SchemaRegistry)\n// ===================\n\nimport type { Location } from \"../ast/ast-types.js\";\nimport type { SourceMap } from \"../source-map.js\";\n\n/**\n * A schema entry (define-entity/alter-entity)\n */\nexport interface ModelSchemaEntry {\n kind: \"schema\";\n /** The timestamp from the header */\n timestamp: string;\n /** The directive (define-entity or alter-entity) */\n directive: \"define-entity\" | \"alter-entity\";\n /** The entity name being defined/altered */\n entityName: string;\n /** The title/description */\n title: string;\n /** Explicit link ID from header (if any) */\n linkId: string | null;\n /** Tags from the header */\n tags: string[];\n /** Field definitions (for define/alter) */\n fields: ModelFieldDefinition[];\n /** Section definitions (for define/alter) */\n sections: ModelSectionDefinition[];\n /** Field removals (for alter) */\n removeFields: string[];\n /** Section removals (for alter) */\n removeSections: string[];\n /** Location in source (block-relative) */\n location: Location;\n /** The file path containing this entry */\n file: string;\n /** Source map for translating block-relative to file-absolute positions */\n sourceMap: SourceMap;\n}\n\n/**\n * A field definition from a schema entry\n */\nexport interface ModelFieldDefinition {\n name: string;\n optional: boolean;\n type: ModelTypeExpression;\n /** The typed default value content (quoted, link, or datetime) */\n defaultValue: ModelDefaultValue | null;\n description: string | null;\n location: Location;\n}\n\n/**\n * A typed default value from a field definition.\n * Can be a quoted string, link reference, datetime, or number.\n */\nexport type ModelDefaultValue =\n | { kind: \"quoted\"; value: string; raw: string }\n | { kind: \"link\"; id: string; raw: string }\n | { kind: \"datetime\"; value: string; raw: string }\n | { kind: \"number\"; value: number; raw: string };\n\n/**\n * A section definition from a schema entry\n */\nexport interface ModelSectionDefinition {\n name: string;\n optional: boolean;\n description: string | null;\n location: Location;\n}\n\n/**\n * Type expressions in field definitions\n */\nexport type ModelTypeExpression =\n | ModelPrimitiveType\n | ModelLiteralType\n | ModelArrayType\n | ModelUnionType\n | ModelUnknownType;\n\nexport interface ModelPrimitiveType {\n kind: \"primitive\";\n name: \"string\" | \"datetime\" | \"daterange\" | \"link\" | \"number\";\n}\n\n/**\n * An unknown/invalid type (e.g., typo like \"date-time\" instead of \"datetime\").\n * Fields with this type are included in the schema but type validation is skipped.\n */\nexport interface ModelUnknownType {\n kind: \"unknown\";\n /** The unrecognized type name as written */\n name: string;\n}\n\nexport interface ModelLiteralType {\n kind: \"literal\";\n value: string;\n}\n\nexport interface ModelArrayType {\n kind: \"array\";\n elementType: ModelPrimitiveType | ModelLiteralType | ModelUnionType | ModelUnknownType;\n}\n\nexport interface ModelUnionType {\n kind: \"union\";\n members: (ModelPrimitiveType | ModelLiteralType | ModelArrayType | ModelUnknownType)[];\n}\nimport type {\n Entry,\n SchemaEntry,\n InstanceEntry,\n SynthesisEntry,\n ActualizeEntry,\n FieldDefinition,\n SectionDefinition,\n TypeExpression,\n DefaultValue as AstDefaultValue,\n} from \"../ast/ast-types.js\";\nimport type {\n SemanticModel,\n LinkIndex,\n LinkDefinition,\n LinkReference,\n} from \"../semantic/analyzer.js\";\nimport type { ThaloParser, GenericTree, FileType } from \"../parser.shared.js\";\nimport { extractSourceFile } from \"../ast/extract.js\";\nimport { analyze, updateSemanticModel } from \"../semantic/analyzer.js\";\nimport { SchemaRegistry } from \"../schema/registry.js\";\nimport { identitySourceMap } from \"../source-map.js\";\nimport { Document, type EditResult } from \"./document.js\";\nimport { LineIndex, computeEdit } from \"./line-index.js\";\nimport { formatTimestamp } from \"../formatters.js\";\n\n/**\n * Options for adding a document to the workspace\n */\nexport interface AddDocumentOptions {\n /** The file type. If not provided, uses heuristics based on filename or content. */\n fileType?: FileType;\n /** The filename/path for the document (required for workspace indexing) */\n filename: string;\n}\n\n/**\n * Result of applying an edit or update to the workspace.\n * Used for incremental diagnostics and targeted invalidation.\n */\nexport interface InvalidationResult {\n /** Files whose diagnostics may have changed */\n affectedFiles: string[];\n /** Whether schema definitions changed (affects type checking) */\n schemasChanged: boolean;\n /** Whether link definitions changed (affects link resolution) */\n linksChanged: boolean;\n /** Entity names whose schemas changed */\n changedEntityNames: string[];\n /** Link IDs that were added or removed */\n changedLinkIds: string[];\n}\n\n/**\n * A workspace containing multiple thalo documents.\n * Provides cross-file link resolution and schema management.\n *\n * The workspace is parser-agnostic - it accepts any parser that implements\n * the ThaloParser interface, allowing it to work with both native (Node.js)\n * and web (WASM) tree-sitter implementations.\n *\n * @example\n * ```typescript\n * // With native parser (Node.js) - parser is optional, defaults to native\n * import { Workspace } from \"@rejot-dev/thalo\";\n * const workspace = new Workspace();\n *\n * // With explicit parser\n * import { createParser } from \"@rejot-dev/thalo/native\";\n * const parser = createParser();\n * const workspace = new Workspace(parser);\n *\n * // With web parser (browser)\n * import { createParser } from \"@rejot-dev/thalo/web\";\n * const parser = await createParser({ treeSitterWasm, languageWasm });\n * const workspace = new Workspace(parser);\n * ```\n */\nexport class Workspace {\n private parser: ThaloParser<GenericTree>;\n private models = new Map<string, SemanticModel>();\n private documents = new Map<string, Document<GenericTree>>();\n private _schemaRegistry = new SchemaRegistry();\n private _linkIndex: LinkIndex = {\n definitions: new Map(),\n references: new Map(),\n };\n\n // Dependency tracking for targeted invalidation\n // linkId -> Set of files that reference this link\n private linkDependencies = new Map<string, Set<string>>();\n // entityName -> Set of files that use this entity (as instances)\n private entityDependencies = new Map<string, Set<string>>();\n\n /**\n * Create a new Workspace.\n *\n * @param parser - A ThaloParser instance. For Node.js, use createParser() from \"@rejot-dev/thalo\".\n * For browser, use createParser() from \"@rejot-dev/thalo/web\".\n */\n constructor(parser: ThaloParser<GenericTree>) {\n this.parser = parser;\n }\n\n /**\n * Get the schema registry for this workspace\n */\n get schemaRegistry(): SchemaRegistry {\n return this._schemaRegistry;\n }\n\n /**\n * Get the combined link index for all documents.\n */\n get linkIndex(): LinkIndex {\n return this._linkIndex;\n }\n\n /**\n * Add a document to the workspace.\n */\n addDocument(source: string, options: AddDocumentOptions): SemanticModel {\n const { filename, fileType } = options;\n\n // Remove existing document if present\n this.removeDocument(filename);\n\n // Parse and create SemanticModel\n const parsed = this.parser.parseDocument(source, { fileType, filename });\n if (parsed.blocks.length === 0) {\n // Empty document - create minimal model\n const emptyLocation = {\n startIndex: 0,\n endIndex: 0,\n startPosition: { row: 0, column: 0 },\n endPosition: { row: 0, column: 0 },\n };\n // Empty document has no syntax tree, but SemanticModel still needs a valid structure\n const model: SemanticModel = {\n ast: {\n type: \"source_file\",\n entries: [],\n syntaxErrors: [],\n location: emptyLocation,\n // Safe: empty documents have no syntax tree; this is only used for empty file edge case\n syntaxNode: null as unknown as import(\"tree-sitter\").SyntaxNode,\n },\n file: filename,\n source,\n sourceMap: identitySourceMap(),\n blocks: [],\n linkIndex: { definitions: new Map(), references: new Map() },\n schemaEntries: [],\n };\n this.models.set(filename, model);\n return model;\n }\n\n // For now, only process the first block (standard for .thalo files)\n const block = parsed.blocks[0];\n // Type assertion: both native and web tree-sitter rootNode have compatible interfaces\n const ast = extractSourceFile(block.tree.rootNode as import(\"tree-sitter\").SyntaxNode);\n const model = analyze(ast, {\n file: filename,\n source,\n sourceMap: block.sourceMap,\n blocks: parsed.blocks,\n });\n this.models.set(filename, model);\n\n // Update schema registry with converted schema entries\n for (const entry of model.schemaEntries) {\n const modelEntry = convertToModelSchemaEntry(entry, filename, model.sourceMap);\n if (modelEntry) {\n this._schemaRegistry.add(modelEntry);\n }\n }\n\n // Merge link index\n this.mergeLinks(model);\n\n return model;\n }\n\n /**\n * Remove a document from the workspace\n */\n removeDocument(file: string): void {\n if (!this.models.has(file)) {\n return;\n }\n\n this.models.delete(file);\n\n // Rebuild schema registry and link index\n this.rebuild();\n }\n\n /**\n * Get a SemanticModel by file path.\n */\n getModel(file: string): SemanticModel | undefined {\n return this.models.get(file);\n }\n\n /**\n * Check if a document exists\n */\n hasDocument(file: string): boolean {\n return this.models.has(file);\n }\n\n /**\n * Get all document file paths\n */\n files(): string[] {\n return Array.from(this.models.keys());\n }\n\n /**\n * Get all SemanticModels.\n */\n allModels(): SemanticModel[] {\n return Array.from(this.models.values());\n }\n\n /**\n * Get all AST entries across all SemanticModels.\n */\n allEntries(): Entry[] {\n const entries: Entry[] = [];\n for (const model of this.models.values()) {\n entries.push(...model.ast.entries);\n }\n return entries;\n }\n\n /**\n * Get all instance entries (create/update) across all SemanticModels.\n */\n allInstanceEntries(): InstanceEntry[] {\n return this.allEntries().filter((e): e is InstanceEntry => e.type === \"instance_entry\");\n }\n\n /**\n * Get all schema entries (define-entity/alter-entity) across all SemanticModels.\n */\n allSchemaEntries(): SchemaEntry[] {\n return this.allEntries().filter((e): e is SchemaEntry => e.type === \"schema_entry\");\n }\n\n /**\n * Get all synthesis entries (define-synthesis) across all SemanticModels.\n */\n allSynthesisEntries(): SynthesisEntry[] {\n return this.allEntries().filter((e): e is SynthesisEntry => e.type === \"synthesis_entry\");\n }\n\n /**\n * Get all actualize entries (actualize-synthesis) across all SemanticModels.\n */\n allActualizeEntries(): ActualizeEntry[] {\n return this.allEntries().filter((e): e is ActualizeEntry => e.type === \"actualize_entry\");\n }\n\n /**\n * Get the definition for a link ID\n */\n getLinkDefinition(id: string): LinkDefinition | undefined {\n return this._linkIndex.definitions.get(id);\n }\n\n /**\n * Get all references for a link ID\n */\n getLinkReferences(id: string): LinkReference[] {\n return this._linkIndex.references.get(id) ?? [];\n }\n\n /**\n * Get the Document instance for incremental editing.\n * Returns undefined if the document hasn't been added with incremental support.\n */\n getDocument(file: string): Document<GenericTree> | undefined {\n return this.documents.get(file);\n }\n\n /**\n * Apply an incremental edit to a document.\n * This is more efficient than addDocument() for small edits.\n *\n * @param filename - The file to edit\n * @param startLine - 0-based start line\n * @param startColumn - 0-based start column\n * @param endLine - 0-based end line\n * @param endColumn - 0-based end column\n * @param newText - The replacement text\n * @returns Information about what was invalidated\n */\n applyEdit(\n filename: string,\n startLine: number,\n startColumn: number,\n endLine: number,\n endColumn: number,\n newText: string,\n ): InvalidationResult {\n const doc = this.documents.get(filename);\n if (!doc) {\n // Fall back to full re-parse if document wasn't initialized with incremental support\n const model = this.models.get(filename);\n if (!model) {\n return {\n affectedFiles: [],\n schemasChanged: false,\n linksChanged: false,\n changedEntityNames: [],\n changedLinkIds: [],\n };\n }\n\n // Apply the edit to get new source and do full re-parse\n const lineIndex = new LineIndex(model.source);\n const edit = computeEdit(lineIndex, startLine, startColumn, endLine, endColumn, newText);\n const newSource =\n model.source.slice(0, edit.startIndex) + newText + model.source.slice(edit.oldEndIndex);\n return this.updateDocument(filename, newSource);\n }\n\n // Apply incremental edit to the Document\n const editResult = doc.applyEdit(startLine, startColumn, endLine, endColumn, newText);\n\n // Update the semantic model\n return this.updateModelFromDocument(filename, doc, editResult);\n }\n\n /**\n * Update a document with new content.\n * This replaces the entire document and recalculates all dependencies.\n *\n * @param filename - The file to update\n * @param newSource - The new source content\n * @returns Information about what was invalidated\n */\n updateDocument(filename: string, newSource: string): InvalidationResult {\n // Get or create the Document\n let doc = this.documents.get(filename);\n if (doc) {\n doc.replaceContent(newSource);\n } else {\n doc = new Document(this.parser, filename, newSource);\n this.documents.set(filename, doc);\n }\n\n // Update the semantic model\n return this.updateModelFromDocument(filename, doc, {\n blockBoundariesChanged: true,\n modifiedBlockIndices: doc.blocks.map((_, i) => i),\n fullReparse: true,\n });\n }\n\n /**\n * Get files that would be affected by changes in a specific file.\n * Useful for targeted diagnostics refresh.\n */\n getAffectedFiles(filename: string): string[] {\n const model = this.models.get(filename);\n if (!model) {\n return [filename];\n }\n\n const affected = new Set<string>([filename]);\n\n // Files that reference links defined in this file\n for (const [linkId] of model.linkIndex.definitions) {\n const dependents = this.linkDependencies.get(linkId);\n if (dependents) {\n for (const dep of dependents) {\n affected.add(dep);\n }\n }\n }\n\n // Files that use entities defined in this file\n for (const entry of model.schemaEntries) {\n const entityName = entry.header.entityName.value;\n const dependents = this.entityDependencies.get(entityName);\n if (dependents) {\n for (const dep of dependents) {\n affected.add(dep);\n }\n }\n }\n\n return Array.from(affected);\n }\n\n /**\n * Clear all documents\n */\n clear(): void {\n this.models.clear();\n this.documents.clear();\n this._schemaRegistry.clear();\n this._linkIndex = {\n definitions: new Map(),\n references: new Map(),\n };\n this.linkDependencies.clear();\n this.entityDependencies.clear();\n }\n\n /**\n * Rebuild schema registry and link index from all models\n */\n private rebuild(): void {\n this._schemaRegistry.clear();\n this._linkIndex = {\n definitions: new Map(),\n references: new Map(),\n };\n this.linkDependencies.clear();\n this.entityDependencies.clear();\n\n for (const model of this.models.values()) {\n // Add schema entries\n for (const entry of model.schemaEntries) {\n const modelEntry = convertToModelSchemaEntry(entry, model.file, model.sourceMap);\n if (modelEntry) {\n this._schemaRegistry.add(modelEntry);\n }\n }\n\n // Merge links and track dependencies\n this.mergeLinks(model);\n\n // Track entity dependencies\n this.updateEntityDependencies(model);\n }\n }\n\n /**\n * Merge a SemanticModel's links into the workspace link index\n */\n private mergeLinks(model: SemanticModel): void {\n // Merge definitions\n for (const [id, def] of model.linkIndex.definitions) {\n this._linkIndex.definitions.set(id, def);\n }\n\n // Merge references and track dependencies\n for (const [id, refs] of model.linkIndex.references) {\n const existing = this._linkIndex.references.get(id) ?? [];\n existing.push(...refs);\n this._linkIndex.references.set(id, existing);\n\n // Track that this file depends on this link\n let deps = this.linkDependencies.get(id);\n if (!deps) {\n deps = new Set();\n this.linkDependencies.set(id, deps);\n }\n deps.add(model.file);\n }\n }\n\n /**\n * Update entity dependencies for a model\n */\n private updateEntityDependencies(model: SemanticModel): void {\n // Track which entities this file uses\n for (const entry of model.ast.entries) {\n if (entry.type === \"instance_entry\") {\n const entityName = entry.header.entity;\n let deps = this.entityDependencies.get(entityName);\n if (!deps) {\n deps = new Set();\n this.entityDependencies.set(entityName, deps);\n }\n deps.add(model.file);\n }\n }\n }\n\n /**\n * Remove a file's dependencies from the tracking maps\n */\n private removeDependencies(file: string): void {\n // Remove from link dependencies\n for (const deps of this.linkDependencies.values()) {\n deps.delete(file);\n }\n\n // Remove from entity dependencies\n for (const deps of this.entityDependencies.values()) {\n deps.delete(file);\n }\n }\n\n /**\n * Update the semantic model from a Document's parse results.\n */\n private updateModelFromDocument(\n filename: string,\n doc: Document<GenericTree>,\n editResult: EditResult,\n ): InvalidationResult {\n const result: InvalidationResult = {\n affectedFiles: [filename],\n schemasChanged: false,\n linksChanged: false,\n changedEntityNames: [],\n changedLinkIds: [],\n };\n\n if (doc.blocks.length === 0) {\n // Empty document - remove existing model\n const oldModel = this.models.get(filename);\n if (oldModel) {\n // Track removed links\n for (const [linkId] of oldModel.linkIndex.definitions) {\n result.changedLinkIds.push(linkId);\n result.linksChanged = true;\n }\n // Track removed schemas\n for (const entry of oldModel.schemaEntries) {\n result.changedEntityNames.push(entry.header.entityName.value);\n result.schemasChanged = true;\n }\n // Remove dependencies\n this.removeDependencies(filename);\n }\n\n // Create empty model\n const emptyLocation = {\n startIndex: 0,\n endIndex: 0,\n startPosition: { row: 0, column: 0 },\n endPosition: { row: 0, column: 0 },\n };\n const model: SemanticModel = {\n ast: {\n type: \"source_file\",\n entries: [],\n syntaxErrors: [],\n location: emptyLocation,\n // Safe: empty documents have no syntax tree; this is only used for empty file edge case\n syntaxNode: null as unknown as import(\"tree-sitter\").SyntaxNode,\n },\n file: filename,\n source: doc.source,\n sourceMap: identitySourceMap(),\n blocks: [],\n linkIndex: { definitions: new Map(), references: new Map() },\n schemaEntries: [],\n };\n this.models.set(filename, model);\n this.rebuild();\n\n result.affectedFiles = this.getAffectedFiles(filename);\n return result;\n }\n\n // Parse the first block\n const block = doc.blocks[0];\n // Type assertion: both native and web tree-sitter rootNode have compatible interfaces\n const newAst = extractSourceFile(block.tree.rootNode as import(\"tree-sitter\").SyntaxNode);\n const newSourceMap = block.sourceMap;\n const newBlocks = doc.blocks.map((b) => ({\n source: b.source,\n sourceMap: b.sourceMap,\n tree: b.tree,\n }));\n\n const oldModel = this.models.get(filename);\n if (oldModel && !editResult.fullReparse) {\n // Use incremental update\n const updateResult = updateSemanticModel(\n oldModel,\n newAst,\n doc.source,\n newSourceMap,\n newBlocks,\n );\n\n result.linksChanged =\n updateResult.addedLinkDefinitions.length > 0 ||\n updateResult.removedLinkDefinitions.length > 0;\n result.changedLinkIds = [\n ...updateResult.addedLinkDefinitions,\n ...updateResult.removedLinkDefinitions,\n ];\n result.schemasChanged = updateResult.schemaEntriesChanged;\n result.changedEntityNames = updateResult.changedEntityNames;\n\n // Rebuild workspace indices if needed\n if (result.schemasChanged || result.linksChanged) {\n this.rebuild();\n }\n } else {\n // Full re-analyze\n const model = analyze(newAst, {\n file: filename,\n source: doc.source,\n sourceMap: newSourceMap,\n blocks: newBlocks,\n });\n\n // Track what changed compared to old model\n if (oldModel) {\n // Check for link changes\n for (const [linkId] of oldModel.linkIndex.definitions) {\n if (!model.linkIndex.definitions.has(linkId)) {\n result.changedLinkIds.push(linkId);\n result.linksChanged = true;\n }\n }\n for (const [linkId] of model.linkIndex.definitions) {\n if (!oldModel.linkIndex.definitions.has(linkId)) {\n result.changedLinkIds.push(linkId);\n result.linksChanged = true;\n }\n }\n\n // Check for schema changes\n const oldEntityNames = new Set(\n oldModel.schemaEntries.map((e) => e.header.entityName.value),\n );\n const newEntityNames = new Set(model.schemaEntries.map((e) => e.header.entityName.value));\n\n for (const name of oldEntityNames) {\n if (!newEntityNames.has(name)) {\n result.changedEntityNames.push(name);\n result.schemasChanged = true;\n }\n }\n for (const name of newEntityNames) {\n if (!oldEntityNames.has(name)) {\n result.changedEntityNames.push(name);\n result.schemasChanged = true;\n }\n }\n } else {\n // New document - everything is new\n for (const [linkId] of model.linkIndex.definitions) {\n result.changedLinkIds.push(linkId);\n }\n for (const entry of model.schemaEntries) {\n result.changedEntityNames.push(entry.header.entityName.value);\n }\n result.linksChanged = result.changedLinkIds.length > 0;\n result.schemasChanged = result.changedEntityNames.length > 0;\n }\n\n this.models.set(filename, model);\n this.rebuild();\n }\n\n // Calculate affected files\n result.affectedFiles = this.getAffectedFiles(filename);\n\n return result;\n }\n}\n\n/**\n * Convert AST SchemaEntry to ModelSchemaEntry for SchemaRegistry compatibility.\n * Note: This is a temporary conversion layer until SchemaRegistry is updated to use AST types.\n */\nfunction convertToModelSchemaEntry(\n entry: SchemaEntry,\n file: string,\n sourceMap: SourceMap,\n): ModelSchemaEntry | null {\n // Only handle entity schema entries (define-entity or alter-entity)\n const directive = entry.header.directive;\n if (directive !== \"define-entity\" && directive !== \"alter-entity\") {\n return null;\n }\n\n const timestamp = formatTimestamp(entry.header.timestamp);\n const fields = entry.metadataBlock?.fields ?? [];\n const sections = entry.sectionsBlock?.sections ?? [];\n const removeFields = entry.removeMetadataBlock?.fields ?? [];\n const removeSections = entry.removeSectionsBlock?.sections ?? [];\n\n return {\n kind: \"schema\",\n timestamp,\n directive,\n entityName: entry.header.entityName.value,\n title: entry.header.title?.value ?? \"\",\n linkId: entry.header.link?.id ?? null,\n tags: entry.header.tags.map((t) => t.name),\n fields: fields.map(convertFieldDefinition),\n sections: sections.map(convertSectionDefinition),\n removeFields: removeFields.map((f) => f.name.value),\n removeSections: removeSections.map((s) => s.name.value),\n location: entry.location,\n file,\n sourceMap,\n };\n}\n\nfunction convertFieldDefinition(field: FieldDefinition): ModelFieldDefinition {\n return {\n name: field.name.value,\n optional: field.optional,\n type: convertTypeExpression(field.typeExpr),\n defaultValue: field.defaultValue ? convertDefaultValue(field.defaultValue) : null,\n description: field.description?.value ?? null,\n location: field.location,\n };\n}\n\nfunction convertSectionDefinition(section: SectionDefinition): ModelSectionDefinition {\n return {\n name: section.name.value,\n optional: section.optional,\n description: section.description?.value ?? null,\n location: section.location,\n };\n}\n\nfunction convertTypeExpression(\n expr: TypeExpression | import(\"../ast/ast-types.js\").SyntaxErrorNode<\"unknown_type\">,\n): ModelTypeExpression {\n switch (expr.type) {\n case \"primitive_type\":\n return { kind: \"primitive\", name: expr.name };\n case \"literal_type\":\n return { kind: \"literal\", value: expr.value };\n case \"array_type\":\n // Safe: array element types cannot be arrays or unions per grammar\n return {\n kind: \"array\",\n elementType: convertTypeExpression(expr.elementType) as Exclude<\n ModelTypeExpression,\n { kind: \"array\" | \"union\" | \"unknown\" }\n >,\n };\n case \"union_type\":\n // Safe: union members cannot be unions per grammar\n return {\n kind: \"union\",\n members: expr.members.map(\n (m) => convertTypeExpression(m) as Exclude<ModelTypeExpression, { kind: \"union\" }>,\n ),\n };\n case \"syntax_error\":\n // Unknown type - preserve the name for error messages\n return { kind: \"unknown\", name: expr.text };\n }\n}\n\nfunction convertDefaultValue(defaultValue: AstDefaultValue): ModelDefaultValue {\n const raw = defaultValue.raw;\n switch (defaultValue.content.type) {\n case \"quoted_value\":\n return { kind: \"quoted\", value: defaultValue.content.value, raw };\n case \"link\":\n return { kind: \"link\", id: defaultValue.content.id, raw };\n case \"datetime_value\":\n return { kind: \"datetime\", value: defaultValue.content.value, raw };\n case \"number_value\":\n return { kind: \"number\", value: defaultValue.content.value, raw };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8LA,IAAa,YAAb,MAAuB;CACrB,AAAQ;CACR,AAAQ,yBAAS,IAAI,KAA4B;CACjD,AAAQ,4BAAY,IAAI,KAAoC;CAC5D,AAAQ,kBAAkB,IAAI,gBAAgB;CAC9C,AAAQ,aAAwB;EAC9B,6BAAa,IAAI,KAAK;EACtB,4BAAY,IAAI,KAAK;EACtB;CAID,AAAQ,mCAAmB,IAAI,KAA0B;CAEzD,AAAQ,qCAAqB,IAAI,KAA0B;;;;;;;CAQ3D,YAAY,QAAkC;AAC5C,OAAK,SAAS;;;;;CAMhB,IAAI,iBAAiC;AACnC,SAAO,KAAK;;;;;CAMd,IAAI,YAAuB;AACzB,SAAO,KAAK;;;;;CAMd,YAAY,QAAgB,SAA4C;EACtE,MAAM,EAAE,UAAU,aAAa;AAG/B,OAAK,eAAe,SAAS;EAG7B,MAAM,SAAS,KAAK,OAAO,cAAc,QAAQ;GAAE;GAAU;GAAU,CAAC;AACxE,MAAI,OAAO,OAAO,WAAW,GAAG;GAS9B,MAAMA,UAAuB;IAC3B,KAAK;KACH,MAAM;KACN,SAAS,EAAE;KACX,cAAc,EAAE;KAChB,UAZkB;MACpB,YAAY;MACZ,UAAU;MACV,eAAe;OAAE,KAAK;OAAG,QAAQ;OAAG;MACpC,aAAa;OAAE,KAAK;OAAG,QAAQ;OAAG;MACnC;KASG,YAAY;KACb;IACD,MAAM;IACN;IACA,WAAW,mBAAmB;IAC9B,QAAQ,EAAE;IACV,WAAW;KAAE,6BAAa,IAAI,KAAK;KAAE,4BAAY,IAAI,KAAK;KAAE;IAC5D,eAAe,EAAE;IAClB;AACD,QAAK,OAAO,IAAI,UAAUC,QAAM;AAChC,UAAOA;;EAIT,MAAM,QAAQ,OAAO,OAAO;EAG5B,MAAM,QAAQ,QADF,kBAAkB,MAAM,KAAK,SAA6C,EAC3D;GACzB,MAAM;GACN;GACA,WAAW,MAAM;GACjB,QAAQ,OAAO;GAChB,CAAC;AACF,OAAK,OAAO,IAAI,UAAU,MAAM;AAGhC,OAAK,MAAM,SAAS,MAAM,eAAe;GACvC,MAAM,aAAa,0BAA0B,OAAO,UAAU,MAAM,UAAU;AAC9E,OAAI,WACF,MAAK,gBAAgB,IAAI,WAAW;;AAKxC,OAAK,WAAW,MAAM;AAEtB,SAAO;;;;;CAMT,eAAe,MAAoB;AACjC,MAAI,CAAC,KAAK,OAAO,IAAI,KAAK,CACxB;AAGF,OAAK,OAAO,OAAO,KAAK;AAGxB,OAAK,SAAS;;;;;CAMhB,SAAS,MAAyC;AAChD,SAAO,KAAK,OAAO,IAAI,KAAK;;;;;CAM9B,YAAY,MAAuB;AACjC,SAAO,KAAK,OAAO,IAAI,KAAK;;;;;CAM9B,QAAkB;AAChB,SAAO,MAAM,KAAK,KAAK,OAAO,MAAM,CAAC;;;;;CAMvC,YAA6B;AAC3B,SAAO,MAAM,KAAK,KAAK,OAAO,QAAQ,CAAC;;;;;CAMzC,aAAsB;EACpB,MAAMC,UAAmB,EAAE;AAC3B,OAAK,MAAM,SAAS,KAAK,OAAO,QAAQ,CACtC,SAAQ,KAAK,GAAG,MAAM,IAAI,QAAQ;AAEpC,SAAO;;;;;CAMT,qBAAsC;AACpC,SAAO,KAAK,YAAY,CAAC,QAAQ,MAA0B,EAAE,SAAS,iBAAiB;;;;;CAMzF,mBAAkC;AAChC,SAAO,KAAK,YAAY,CAAC,QAAQ,MAAwB,EAAE,SAAS,eAAe;;;;;CAMrF,sBAAwC;AACtC,SAAO,KAAK,YAAY,CAAC,QAAQ,MAA2B,EAAE,SAAS,kBAAkB;;;;;CAM3F,sBAAwC;AACtC,SAAO,KAAK,YAAY,CAAC,QAAQ,MAA2B,EAAE,SAAS,kBAAkB;;;;;CAM3F,kBAAkB,IAAwC;AACxD,SAAO,KAAK,WAAW,YAAY,IAAI,GAAG;;;;;CAM5C,kBAAkB,IAA6B;AAC7C,SAAO,KAAK,WAAW,WAAW,IAAI,GAAG,IAAI,EAAE;;;;;;CAOjD,YAAY,MAAiD;AAC3D,SAAO,KAAK,UAAU,IAAI,KAAK;;;;;;;;;;;;;;CAejC,UACE,UACA,WACA,aACA,SACA,WACA,SACoB;EACpB,MAAM,MAAM,KAAK,UAAU,IAAI,SAAS;AACxC,MAAI,CAAC,KAAK;GAER,MAAM,QAAQ,KAAK,OAAO,IAAI,SAAS;AACvC,OAAI,CAAC,MACH,QAAO;IACL,eAAe,EAAE;IACjB,gBAAgB;IAChB,cAAc;IACd,oBAAoB,EAAE;IACtB,gBAAgB,EAAE;IACnB;GAKH,MAAM,OAAO,YADK,IAAI,UAAU,MAAM,OAAO,EACT,WAAW,aAAa,SAAS,WAAW,QAAQ;GACxF,MAAM,YACJ,MAAM,OAAO,MAAM,GAAG,KAAK,WAAW,GAAG,UAAU,MAAM,OAAO,MAAM,KAAK,YAAY;AACzF,UAAO,KAAK,eAAe,UAAU,UAAU;;EAIjD,MAAM,aAAa,IAAI,UAAU,WAAW,aAAa,SAAS,WAAW,QAAQ;AAGrF,SAAO,KAAK,wBAAwB,UAAU,KAAK,WAAW;;;;;;;;;;CAWhE,eAAe,UAAkB,WAAuC;EAEtE,IAAI,MAAM,KAAK,UAAU,IAAI,SAAS;AACtC,MAAI,IACF,KAAI,eAAe,UAAU;OACxB;AACL,SAAM,IAAI,SAAS,KAAK,QAAQ,UAAU,UAAU;AACpD,QAAK,UAAU,IAAI,UAAU,IAAI;;AAInC,SAAO,KAAK,wBAAwB,UAAU,KAAK;GACjD,wBAAwB;GACxB,sBAAsB,IAAI,OAAO,KAAK,GAAG,MAAM,EAAE;GACjD,aAAa;GACd,CAAC;;;;;;CAOJ,iBAAiB,UAA4B;EAC3C,MAAM,QAAQ,KAAK,OAAO,IAAI,SAAS;AACvC,MAAI,CAAC,MACH,QAAO,CAAC,SAAS;EAGnB,MAAM,WAAW,IAAI,IAAY,CAAC,SAAS,CAAC;AAG5C,OAAK,MAAM,CAAC,WAAW,MAAM,UAAU,aAAa;GAClD,MAAM,aAAa,KAAK,iBAAiB,IAAI,OAAO;AACpD,OAAI,WACF,MAAK,MAAM,OAAO,WAChB,UAAS,IAAI,IAAI;;AAMvB,OAAK,MAAM,SAAS,MAAM,eAAe;GACvC,MAAM,aAAa,MAAM,OAAO,WAAW;GAC3C,MAAM,aAAa,KAAK,mBAAmB,IAAI,WAAW;AAC1D,OAAI,WACF,MAAK,MAAM,OAAO,WAChB,UAAS,IAAI,IAAI;;AAKvB,SAAO,MAAM,KAAK,SAAS;;;;;CAM7B,QAAc;AACZ,OAAK,OAAO,OAAO;AACnB,OAAK,UAAU,OAAO;AACtB,OAAK,gBAAgB,OAAO;AAC5B,OAAK,aAAa;GAChB,6BAAa,IAAI,KAAK;GACtB,4BAAY,IAAI,KAAK;GACtB;AACD,OAAK,iBAAiB,OAAO;AAC7B,OAAK,mBAAmB,OAAO;;;;;CAMjC,AAAQ,UAAgB;AACtB,OAAK,gBAAgB,OAAO;AAC5B,OAAK,aAAa;GAChB,6BAAa,IAAI,KAAK;GACtB,4BAAY,IAAI,KAAK;GACtB;AACD,OAAK,iBAAiB,OAAO;AAC7B,OAAK,mBAAmB,OAAO;AAE/B,OAAK,MAAM,SAAS,KAAK,OAAO,QAAQ,EAAE;AAExC,QAAK,MAAM,SAAS,MAAM,eAAe;IACvC,MAAM,aAAa,0BAA0B,OAAO,MAAM,MAAM,MAAM,UAAU;AAChF,QAAI,WACF,MAAK,gBAAgB,IAAI,WAAW;;AAKxC,QAAK,WAAW,MAAM;AAGtB,QAAK,yBAAyB,MAAM;;;;;;CAOxC,AAAQ,WAAW,OAA4B;AAE7C,OAAK,MAAM,CAAC,IAAI,QAAQ,MAAM,UAAU,YACtC,MAAK,WAAW,YAAY,IAAI,IAAI,IAAI;AAI1C,OAAK,MAAM,CAAC,IAAI,SAAS,MAAM,UAAU,YAAY;GACnD,MAAM,WAAW,KAAK,WAAW,WAAW,IAAI,GAAG,IAAI,EAAE;AACzD,YAAS,KAAK,GAAG,KAAK;AACtB,QAAK,WAAW,WAAW,IAAI,IAAI,SAAS;GAG5C,IAAI,OAAO,KAAK,iBAAiB,IAAI,GAAG;AACxC,OAAI,CAAC,MAAM;AACT,2BAAO,IAAI,KAAK;AAChB,SAAK,iBAAiB,IAAI,IAAI,KAAK;;AAErC,QAAK,IAAI,MAAM,KAAK;;;;;;CAOxB,AAAQ,yBAAyB,OAA4B;AAE3D,OAAK,MAAM,SAAS,MAAM,IAAI,QAC5B,KAAI,MAAM,SAAS,kBAAkB;GACnC,MAAM,aAAa,MAAM,OAAO;GAChC,IAAI,OAAO,KAAK,mBAAmB,IAAI,WAAW;AAClD,OAAI,CAAC,MAAM;AACT,2BAAO,IAAI,KAAK;AAChB,SAAK,mBAAmB,IAAI,YAAY,KAAK;;AAE/C,QAAK,IAAI,MAAM,KAAK;;;;;;CAQ1B,AAAQ,mBAAmB,MAAoB;AAE7C,OAAK,MAAM,QAAQ,KAAK,iBAAiB,QAAQ,CAC/C,MAAK,OAAO,KAAK;AAInB,OAAK,MAAM,QAAQ,KAAK,mBAAmB,QAAQ,CACjD,MAAK,OAAO,KAAK;;;;;CAOrB,AAAQ,wBACN,UACA,KACA,YACoB;EACpB,MAAMC,SAA6B;GACjC,eAAe,CAAC,SAAS;GACzB,gBAAgB;GAChB,cAAc;GACd,oBAAoB,EAAE;GACtB,gBAAgB,EAAE;GACnB;AAED,MAAI,IAAI,OAAO,WAAW,GAAG;GAE3B,MAAMC,aAAW,KAAK,OAAO,IAAI,SAAS;AAC1C,OAAIA,YAAU;AAEZ,SAAK,MAAM,CAAC,WAAWA,WAAS,UAAU,aAAa;AACrD,YAAO,eAAe,KAAK,OAAO;AAClC,YAAO,eAAe;;AAGxB,SAAK,MAAM,SAASA,WAAS,eAAe;AAC1C,YAAO,mBAAmB,KAAK,MAAM,OAAO,WAAW,MAAM;AAC7D,YAAO,iBAAiB;;AAG1B,SAAK,mBAAmB,SAAS;;GAUnC,MAAMJ,QAAuB;IAC3B,KAAK;KACH,MAAM;KACN,SAAS,EAAE;KACX,cAAc,EAAE;KAChB,UAXkB;MACpB,YAAY;MACZ,UAAU;MACV,eAAe;OAAE,KAAK;OAAG,QAAQ;OAAG;MACpC,aAAa;OAAE,KAAK;OAAG,QAAQ;OAAG;MACnC;KAQG,YAAY;KACb;IACD,MAAM;IACN,QAAQ,IAAI;IACZ,WAAW,mBAAmB;IAC9B,QAAQ,EAAE;IACV,WAAW;KAAE,6BAAa,IAAI,KAAK;KAAE,4BAAY,IAAI,KAAK;KAAE;IAC5D,eAAe,EAAE;IAClB;AACD,QAAK,OAAO,IAAI,UAAU,MAAM;AAChC,QAAK,SAAS;AAEd,UAAO,gBAAgB,KAAK,iBAAiB,SAAS;AACtD,UAAO;;EAIT,MAAM,QAAQ,IAAI,OAAO;EAEzB,MAAM,SAAS,kBAAkB,MAAM,KAAK,SAA6C;EACzF,MAAM,eAAe,MAAM;EAC3B,MAAM,YAAY,IAAI,OAAO,KAAK,OAAO;GACvC,QAAQ,EAAE;GACV,WAAW,EAAE;GACb,MAAM,EAAE;GACT,EAAE;EAEH,MAAM,WAAW,KAAK,OAAO,IAAI,SAAS;AAC1C,MAAI,YAAY,CAAC,WAAW,aAAa;GAEvC,MAAM,eAAe,oBACnB,UACA,QACA,IAAI,QACJ,cACA,UACD;AAED,UAAO,eACL,aAAa,qBAAqB,SAAS,KAC3C,aAAa,uBAAuB,SAAS;AAC/C,UAAO,iBAAiB,CACtB,GAAG,aAAa,sBAChB,GAAG,aAAa,uBACjB;AACD,UAAO,iBAAiB,aAAa;AACrC,UAAO,qBAAqB,aAAa;AAGzC,OAAI,OAAO,kBAAkB,OAAO,aAClC,MAAK,SAAS;SAEX;GAEL,MAAM,QAAQ,QAAQ,QAAQ;IAC5B,MAAM;IACN,QAAQ,IAAI;IACZ,WAAW;IACX,QAAQ;IACT,CAAC;AAGF,OAAI,UAAU;AAEZ,SAAK,MAAM,CAAC,WAAW,SAAS,UAAU,YACxC,KAAI,CAAC,MAAM,UAAU,YAAY,IAAI,OAAO,EAAE;AAC5C,YAAO,eAAe,KAAK,OAAO;AAClC,YAAO,eAAe;;AAG1B,SAAK,MAAM,CAAC,WAAW,MAAM,UAAU,YACrC,KAAI,CAAC,SAAS,UAAU,YAAY,IAAI,OAAO,EAAE;AAC/C,YAAO,eAAe,KAAK,OAAO;AAClC,YAAO,eAAe;;IAK1B,MAAM,iBAAiB,IAAI,IACzB,SAAS,cAAc,KAAK,MAAM,EAAE,OAAO,WAAW,MAAM,CAC7D;IACD,MAAM,iBAAiB,IAAI,IAAI,MAAM,cAAc,KAAK,MAAM,EAAE,OAAO,WAAW,MAAM,CAAC;AAEzF,SAAK,MAAM,QAAQ,eACjB,KAAI,CAAC,eAAe,IAAI,KAAK,EAAE;AAC7B,YAAO,mBAAmB,KAAK,KAAK;AACpC,YAAO,iBAAiB;;AAG5B,SAAK,MAAM,QAAQ,eACjB,KAAI,CAAC,eAAe,IAAI,KAAK,EAAE;AAC7B,YAAO,mBAAmB,KAAK,KAAK;AACpC,YAAO,iBAAiB;;UAGvB;AAEL,SAAK,MAAM,CAAC,WAAW,MAAM,UAAU,YACrC,QAAO,eAAe,KAAK,OAAO;AAEpC,SAAK,MAAM,SAAS,MAAM,cACxB,QAAO,mBAAmB,KAAK,MAAM,OAAO,WAAW,MAAM;AAE/D,WAAO,eAAe,OAAO,eAAe,SAAS;AACrD,WAAO,iBAAiB,OAAO,mBAAmB,SAAS;;AAG7D,QAAK,OAAO,IAAI,UAAU,MAAM;AAChC,QAAK,SAAS;;AAIhB,SAAO,gBAAgB,KAAK,iBAAiB,SAAS;AAEtD,SAAO;;;;;;;AAQX,SAAS,0BACP,OACA,MACA,WACyB;CAEzB,MAAM,YAAY,MAAM,OAAO;AAC/B,KAAI,cAAc,mBAAmB,cAAc,eACjD,QAAO;CAGT,MAAM,YAAY,gBAAgB,MAAM,OAAO,UAAU;CACzD,MAAM,SAAS,MAAM,eAAe,UAAU,EAAE;CAChD,MAAM,WAAW,MAAM,eAAe,YAAY,EAAE;CACpD,MAAM,eAAe,MAAM,qBAAqB,UAAU,EAAE;CAC5D,MAAM,iBAAiB,MAAM,qBAAqB,YAAY,EAAE;AAEhE,QAAO;EACL,MAAM;EACN;EACA;EACA,YAAY,MAAM,OAAO,WAAW;EACpC,OAAO,MAAM,OAAO,OAAO,SAAS;EACpC,QAAQ,MAAM,OAAO,MAAM,MAAM;EACjC,MAAM,MAAM,OAAO,KAAK,KAAK,MAAM,EAAE,KAAK;EAC1C,QAAQ,OAAO,IAAI,uBAAuB;EAC1C,UAAU,SAAS,IAAI,yBAAyB;EAChD,cAAc,aAAa,KAAK,MAAM,EAAE,KAAK,MAAM;EACnD,gBAAgB,eAAe,KAAK,MAAM,EAAE,KAAK,MAAM;EACvD,UAAU,MAAM;EAChB;EACA;EACD;;AAGH,SAAS,uBAAuB,OAA8C;AAC5E,QAAO;EACL,MAAM,MAAM,KAAK;EACjB,UAAU,MAAM;EAChB,MAAM,sBAAsB,MAAM,SAAS;EAC3C,cAAc,MAAM,eAAe,oBAAoB,MAAM,aAAa,GAAG;EAC7E,aAAa,MAAM,aAAa,SAAS;EACzC,UAAU,MAAM;EACjB;;AAGH,SAAS,yBAAyB,SAAoD;AACpF,QAAO;EACL,MAAM,QAAQ,KAAK;EACnB,UAAU,QAAQ;EAClB,aAAa,QAAQ,aAAa,SAAS;EAC3C,UAAU,QAAQ;EACnB;;AAGH,SAAS,sBACP,MACqB;AACrB,SAAQ,KAAK,MAAb;EACE,KAAK,iBACH,QAAO;GAAE,MAAM;GAAa,MAAM,KAAK;GAAM;EAC/C,KAAK,eACH,QAAO;GAAE,MAAM;GAAW,OAAO,KAAK;GAAO;EAC/C,KAAK,aAEH,QAAO;GACL,MAAM;GACN,aAAa,sBAAsB,KAAK,YAAY;GAIrD;EACH,KAAK,aAEH,QAAO;GACL,MAAM;GACN,SAAS,KAAK,QAAQ,KACnB,MAAM,sBAAsB,EAAE,CAChC;GACF;EACH,KAAK,eAEH,QAAO;GAAE,MAAM;GAAW,MAAM,KAAK;GAAM;;;AAIjD,SAAS,oBAAoB,cAAkD;CAC7E,MAAM,MAAM,aAAa;AACzB,SAAQ,aAAa,QAAQ,MAA7B;EACE,KAAK,eACH,QAAO;GAAE,MAAM;GAAU,OAAO,aAAa,QAAQ;GAAO;GAAK;EACnE,KAAK,OACH,QAAO;GAAE,MAAM;GAAQ,IAAI,aAAa,QAAQ;GAAI;GAAK;EAC3D,KAAK,iBACH,QAAO;GAAE,MAAM;GAAY,OAAO,aAAa,QAAQ;GAAO;GAAK;EACrE,KAAK,eACH,QAAO;GAAE,MAAM;GAAU,OAAO,aAAa,QAAQ;GAAO;GAAK"}
|
package/dist/parser.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { createParser } from "./parser.native.js";
|
|
2
|
+
|
|
3
|
+
//#region src/parser.ts
|
|
4
|
+
let parserInstance;
|
|
5
|
+
/**
|
|
6
|
+
* Get or create the singleton parser instance.
|
|
7
|
+
*
|
|
8
|
+
* @returns The singleton ThaloParser instance
|
|
9
|
+
*/
|
|
10
|
+
function getParser() {
|
|
11
|
+
if (!parserInstance) parserInstance = createParser();
|
|
12
|
+
return parserInstance;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Parse a document, automatically detecting if it's a .thalo file or markdown with embedded thalo blocks.
|
|
16
|
+
*
|
|
17
|
+
* @param source - The source code to parse
|
|
18
|
+
* @param options - Parse options including fileType and filename
|
|
19
|
+
* @returns A ParsedDocument containing one or more parsed blocks
|
|
20
|
+
*/
|
|
21
|
+
const parseDocument = (source, options) => {
|
|
22
|
+
return getParser().parseDocument(source, options);
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
//#endregion
|
|
26
|
+
export { parseDocument };
|
|
27
|
+
//# sourceMappingURL=parser.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parser.js","names":["parserInstance: ThaloParser<Tree> | undefined","parseDocument: ThaloParser<Tree>[\"parseDocument\"]"],"sources":["../src/parser.ts"],"sourcesContent":["/**\n * Default parser entry point using native tree-sitter bindings.\n *\n * This module provides a singleton parser instance for convenience.\n * For more control, use `@rejot-dev/thalo/native` directly.\n */\n\nimport type { Tree } from \"tree-sitter\";\nimport { createParser, type ThaloParser } from \"./parser.native.js\";\n\n// Re-export types\nexport type {\n ParsedBlock,\n ParsedDocument,\n FileType,\n ParseOptions,\n ThaloParser,\n} from \"./parser.native.js\";\n\n// Lazy singleton parser instance\nlet parserInstance: ThaloParser<Tree> | undefined;\n\n/**\n * Get or create the singleton parser instance.\n *\n * @returns The singleton ThaloParser instance\n */\nfunction getParser(): ThaloParser<Tree> {\n if (!parserInstance) {\n parserInstance = createParser();\n }\n return parserInstance;\n}\n\n/**\n * Parse a thalo source string into a tree-sitter Tree.\n *\n * @param source - The thalo source code to parse\n * @returns The parsed tree-sitter Tree\n */\nexport const parseThalo = (source: string): Tree => {\n return getParser().parse(source);\n};\n\n/**\n * Parse a thalo source string with optional incremental parsing.\n *\n * When an oldTree is provided, tree-sitter can reuse unchanged parts of the\n * parse tree, making parsing much faster for small edits.\n *\n * Note: Before calling this with an oldTree, you must call oldTree.edit()\n * to inform tree-sitter about the changes made to the source.\n *\n * @param source - The thalo source code to parse\n * @param oldTree - Optional previous tree for incremental parsing\n * @returns The parsed tree-sitter Tree\n */\nexport const parseThaloIncremental = (source: string, oldTree?: Tree): Tree => {\n return getParser().parseIncremental(source, oldTree);\n};\n\n/**\n * Parse a document, automatically detecting if it's a .thalo file or markdown with embedded thalo blocks.\n *\n * @param source - The source code to parse\n * @param options - Parse options including fileType and filename\n * @returns A ParsedDocument containing one or more parsed blocks\n */\nexport const parseDocument: ThaloParser<Tree>[\"parseDocument\"] = (source, options) => {\n return getParser().parseDocument(source, options);\n};\n"],"mappings":";;;AAoBA,IAAIA;;;;;;AAOJ,SAAS,YAA+B;AACtC,KAAI,CAAC,eACH,kBAAiB,cAAc;AAEjC,QAAO;;;;;;;;;AAqCT,MAAaC,iBAAqD,QAAQ,YAAY;AACpF,QAAO,WAAW,CAAC,cAAc,QAAQ,QAAQ"}
|