@rejot-dev/thalo 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +396 -0
- package/dist/ast/ast-types.d.ts +469 -0
- package/dist/ast/ast-types.d.ts.map +1 -0
- package/dist/ast/ast-types.js +11 -0
- package/dist/ast/ast-types.js.map +1 -0
- package/dist/ast/builder.js +158 -0
- package/dist/ast/builder.js.map +1 -0
- package/dist/ast/extract.js +748 -0
- package/dist/ast/extract.js.map +1 -0
- package/dist/ast/node-at-position.d.ts +147 -0
- package/dist/ast/node-at-position.d.ts.map +1 -0
- package/dist/ast/node-at-position.js +382 -0
- package/dist/ast/node-at-position.js.map +1 -0
- package/dist/ast/visitor.js +232 -0
- package/dist/ast/visitor.js.map +1 -0
- package/dist/checker/check.d.ts +53 -0
- package/dist/checker/check.d.ts.map +1 -0
- package/dist/checker/check.js +105 -0
- package/dist/checker/check.js.map +1 -0
- package/dist/checker/rules/actualize-missing-updated.js +34 -0
- package/dist/checker/rules/actualize-missing-updated.js.map +1 -0
- package/dist/checker/rules/actualize-unresolved-target.js +42 -0
- package/dist/checker/rules/actualize-unresolved-target.js.map +1 -0
- package/dist/checker/rules/alter-before-define.js +53 -0
- package/dist/checker/rules/alter-before-define.js.map +1 -0
- package/dist/checker/rules/alter-undefined-entity.js +32 -0
- package/dist/checker/rules/alter-undefined-entity.js.map +1 -0
- package/dist/checker/rules/create-requires-section.js +34 -0
- package/dist/checker/rules/create-requires-section.js.map +1 -0
- package/dist/checker/rules/define-entity-requires-section.js +31 -0
- package/dist/checker/rules/define-entity-requires-section.js.map +1 -0
- package/dist/checker/rules/duplicate-entity-definition.js +37 -0
- package/dist/checker/rules/duplicate-entity-definition.js.map +1 -0
- package/dist/checker/rules/duplicate-field-in-schema.js +38 -0
- package/dist/checker/rules/duplicate-field-in-schema.js.map +1 -0
- package/dist/checker/rules/duplicate-link-id.js +52 -0
- package/dist/checker/rules/duplicate-link-id.js.map +1 -0
- package/dist/checker/rules/duplicate-metadata-key.js +21 -0
- package/dist/checker/rules/duplicate-metadata-key.js.map +1 -0
- package/dist/checker/rules/duplicate-section-heading.js +41 -0
- package/dist/checker/rules/duplicate-section-heading.js.map +1 -0
- package/dist/checker/rules/duplicate-section-in-schema.js +38 -0
- package/dist/checker/rules/duplicate-section-in-schema.js.map +1 -0
- package/dist/checker/rules/duplicate-timestamp.js +104 -0
- package/dist/checker/rules/duplicate-timestamp.js.map +1 -0
- package/dist/checker/rules/empty-required-value.js +45 -0
- package/dist/checker/rules/empty-required-value.js.map +1 -0
- package/dist/checker/rules/empty-section.js +21 -0
- package/dist/checker/rules/empty-section.js.map +1 -0
- package/dist/checker/rules/invalid-date-range-value.js +56 -0
- package/dist/checker/rules/invalid-date-range-value.js.map +1 -0
- package/dist/checker/rules/invalid-default-value.js +86 -0
- package/dist/checker/rules/invalid-default-value.js.map +1 -0
- package/dist/checker/rules/invalid-field-type.js +45 -0
- package/dist/checker/rules/invalid-field-type.js.map +1 -0
- package/dist/checker/rules/missing-required-field.js +48 -0
- package/dist/checker/rules/missing-required-field.js.map +1 -0
- package/dist/checker/rules/missing-required-section.js +51 -0
- package/dist/checker/rules/missing-required-section.js.map +1 -0
- package/dist/checker/rules/missing-title.js +56 -0
- package/dist/checker/rules/missing-title.js.map +1 -0
- package/dist/checker/rules/remove-undefined-field.js +42 -0
- package/dist/checker/rules/remove-undefined-field.js.map +1 -0
- package/dist/checker/rules/remove-undefined-section.js +42 -0
- package/dist/checker/rules/remove-undefined-section.js.map +1 -0
- package/dist/checker/rules/rules.d.ts +71 -0
- package/dist/checker/rules/rules.d.ts.map +1 -0
- package/dist/checker/rules/rules.js +102 -0
- package/dist/checker/rules/rules.js.map +1 -0
- package/dist/checker/rules/synthesis-empty-query.js +35 -0
- package/dist/checker/rules/synthesis-empty-query.js.map +1 -0
- package/dist/checker/rules/synthesis-missing-prompt.js +42 -0
- package/dist/checker/rules/synthesis-missing-prompt.js.map +1 -0
- package/dist/checker/rules/synthesis-missing-sources.js +32 -0
- package/dist/checker/rules/synthesis-missing-sources.js.map +1 -0
- package/dist/checker/rules/synthesis-unknown-query-entity.js +39 -0
- package/dist/checker/rules/synthesis-unknown-query-entity.js.map +1 -0
- package/dist/checker/rules/timestamp-out-of-order.js +55 -0
- package/dist/checker/rules/timestamp-out-of-order.js.map +1 -0
- package/dist/checker/rules/unknown-entity.js +32 -0
- package/dist/checker/rules/unknown-entity.js.map +1 -0
- package/dist/checker/rules/unknown-field.js +40 -0
- package/dist/checker/rules/unknown-field.js.map +1 -0
- package/dist/checker/rules/unknown-section.js +47 -0
- package/dist/checker/rules/unknown-section.js.map +1 -0
- package/dist/checker/rules/unresolved-link.js +34 -0
- package/dist/checker/rules/unresolved-link.js.map +1 -0
- package/dist/checker/rules/update-without-create.js +65 -0
- package/dist/checker/rules/update-without-create.js.map +1 -0
- package/dist/checker/visitor.d.ts +69 -0
- package/dist/checker/visitor.d.ts.map +1 -0
- package/dist/checker/visitor.js +67 -0
- package/dist/checker/visitor.js.map +1 -0
- package/dist/checker/workspace-index.d.ts +50 -0
- package/dist/checker/workspace-index.d.ts.map +1 -0
- package/dist/checker/workspace-index.js +108 -0
- package/dist/checker/workspace-index.js.map +1 -0
- package/dist/commands/actualize.d.ts +113 -0
- package/dist/commands/actualize.d.ts.map +1 -0
- package/dist/commands/actualize.js +111 -0
- package/dist/commands/actualize.js.map +1 -0
- package/dist/commands/check.d.ts +65 -0
- package/dist/commands/check.d.ts.map +1 -0
- package/dist/commands/check.js +61 -0
- package/dist/commands/check.js.map +1 -0
- package/dist/commands/format.d.ts +90 -0
- package/dist/commands/format.d.ts.map +1 -0
- package/dist/commands/format.js +80 -0
- package/dist/commands/format.js.map +1 -0
- package/dist/commands/query.d.ts +152 -0
- package/dist/commands/query.d.ts.map +1 -0
- package/dist/commands/query.js +151 -0
- package/dist/commands/query.js.map +1 -0
- package/dist/constants.d.ts +31 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +51 -0
- package/dist/constants.js.map +1 -0
- package/dist/files.d.ts +58 -0
- package/dist/files.d.ts.map +1 -0
- package/dist/files.js +103 -0
- package/dist/files.js.map +1 -0
- package/dist/formatters.d.ts +39 -0
- package/dist/formatters.d.ts.map +1 -0
- package/dist/formatters.js +200 -0
- package/dist/formatters.js.map +1 -0
- package/dist/fragment.d.ts +22 -0
- package/dist/fragment.d.ts.map +1 -0
- package/dist/git/git.js +240 -0
- package/dist/git/git.js.map +1 -0
- package/dist/merge/conflict-detector.d.ts +89 -0
- package/dist/merge/conflict-detector.d.ts.map +1 -0
- package/dist/merge/conflict-detector.js +352 -0
- package/dist/merge/conflict-detector.js.map +1 -0
- package/dist/merge/conflict-formatter.js +143 -0
- package/dist/merge/conflict-formatter.js.map +1 -0
- package/dist/merge/driver.d.ts +54 -0
- package/dist/merge/driver.d.ts.map +1 -0
- package/dist/merge/driver.js +112 -0
- package/dist/merge/driver.js.map +1 -0
- package/dist/merge/entry-matcher.d.ts +50 -0
- package/dist/merge/entry-matcher.d.ts.map +1 -0
- package/dist/merge/entry-matcher.js +141 -0
- package/dist/merge/entry-matcher.js.map +1 -0
- package/dist/merge/entry-merger.js +194 -0
- package/dist/merge/entry-merger.js.map +1 -0
- package/dist/merge/merge-result-builder.d.ts +62 -0
- package/dist/merge/merge-result-builder.d.ts.map +1 -0
- package/dist/merge/merge-result-builder.js +89 -0
- package/dist/merge/merge-result-builder.js.map +1 -0
- package/dist/mod.d.ts +31 -0
- package/dist/mod.js +23 -0
- package/dist/model/document.d.ts +134 -0
- package/dist/model/document.d.ts.map +1 -0
- package/dist/model/document.js +275 -0
- package/dist/model/document.js.map +1 -0
- package/dist/model/line-index.d.ts +85 -0
- package/dist/model/line-index.d.ts.map +1 -0
- package/dist/model/line-index.js +159 -0
- package/dist/model/line-index.js.map +1 -0
- package/dist/model/workspace.d.ts +296 -0
- package/dist/model/workspace.d.ts.map +1 -0
- package/dist/model/workspace.js +562 -0
- package/dist/model/workspace.js.map +1 -0
- package/dist/parser.js +27 -0
- package/dist/parser.js.map +1 -0
- package/dist/parser.native.d.ts +51 -0
- package/dist/parser.native.d.ts.map +1 -0
- package/dist/parser.native.js +62 -0
- package/dist/parser.native.js.map +1 -0
- package/dist/parser.shared.d.ts +99 -0
- package/dist/parser.shared.d.ts.map +1 -0
- package/dist/parser.shared.js +124 -0
- package/dist/parser.shared.js.map +1 -0
- package/dist/parser.web.d.ts +67 -0
- package/dist/parser.web.d.ts.map +1 -0
- package/dist/parser.web.js +49 -0
- package/dist/parser.web.js.map +1 -0
- package/dist/schema/registry.d.ts +108 -0
- package/dist/schema/registry.d.ts.map +1 -0
- package/dist/schema/registry.js +281 -0
- package/dist/schema/registry.js.map +1 -0
- package/dist/semantic/analyzer.d.ts +107 -0
- package/dist/semantic/analyzer.d.ts.map +1 -0
- package/dist/semantic/analyzer.js +261 -0
- package/dist/semantic/analyzer.js.map +1 -0
- package/dist/services/change-tracker/change-tracker.d.ts +111 -0
- package/dist/services/change-tracker/change-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/change-tracker.js +62 -0
- package/dist/services/change-tracker/change-tracker.js.map +1 -0
- package/dist/services/change-tracker/create-tracker.d.ts +42 -0
- package/dist/services/change-tracker/create-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/create-tracker.js +53 -0
- package/dist/services/change-tracker/create-tracker.js.map +1 -0
- package/dist/services/change-tracker/git-tracker.d.ts +59 -0
- package/dist/services/change-tracker/git-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/git-tracker.js +218 -0
- package/dist/services/change-tracker/git-tracker.js.map +1 -0
- package/dist/services/change-tracker/timestamp-tracker.d.ts +22 -0
- package/dist/services/change-tracker/timestamp-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/timestamp-tracker.js +74 -0
- package/dist/services/change-tracker/timestamp-tracker.js.map +1 -0
- package/dist/services/definition.d.ts +37 -0
- package/dist/services/definition.d.ts.map +1 -0
- package/dist/services/definition.js +43 -0
- package/dist/services/definition.js.map +1 -0
- package/dist/services/entity-navigation.d.ts +200 -0
- package/dist/services/entity-navigation.d.ts.map +1 -0
- package/dist/services/entity-navigation.js +211 -0
- package/dist/services/entity-navigation.js.map +1 -0
- package/dist/services/hover.d.ts +81 -0
- package/dist/services/hover.d.ts.map +1 -0
- package/dist/services/hover.js +669 -0
- package/dist/services/hover.js.map +1 -0
- package/dist/services/query.d.ts +116 -0
- package/dist/services/query.d.ts.map +1 -0
- package/dist/services/query.js +225 -0
- package/dist/services/query.js.map +1 -0
- package/dist/services/references.d.ts +52 -0
- package/dist/services/references.d.ts.map +1 -0
- package/dist/services/references.js +66 -0
- package/dist/services/references.js.map +1 -0
- package/dist/services/semantic-tokens.d.ts +54 -0
- package/dist/services/semantic-tokens.d.ts.map +1 -0
- package/dist/services/semantic-tokens.js +213 -0
- package/dist/services/semantic-tokens.js.map +1 -0
- package/dist/services/synthesis.d.ts +90 -0
- package/dist/services/synthesis.d.ts.map +1 -0
- package/dist/services/synthesis.js +113 -0
- package/dist/services/synthesis.js.map +1 -0
- package/dist/source-map.d.ts +42 -0
- package/dist/source-map.d.ts.map +1 -0
- package/dist/source-map.js +170 -0
- package/dist/source-map.js.map +1 -0
- package/package.json +128 -0
- package/tree-sitter-thalo.wasm +0 -0
- package/web-tree-sitter.wasm +0 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { serializeIdentity } from "./entry-matcher.js";
|
|
2
|
+
import { formatConflict, formatEntry } from "./conflict-formatter.js";
|
|
3
|
+
import { entriesEqual, mergeEntry } from "./entry-merger.js";
|
|
4
|
+
|
|
5
|
+
//#region src/merge/merge-result-builder.ts
|
|
6
|
+
/**
|
|
7
|
+
* Build the final merged result from matches and conflicts
|
|
8
|
+
*
|
|
9
|
+
* @param matches - Array of matched entry triplets
|
|
10
|
+
* @param conflicts - Array of detected conflicts
|
|
11
|
+
* @param options - Merge options
|
|
12
|
+
* @returns Complete merge result with content and statistics
|
|
13
|
+
*/
|
|
14
|
+
function buildMergedResult(matches, conflicts, options = {}) {
|
|
15
|
+
const conflictMap = /* @__PURE__ */ new Map();
|
|
16
|
+
for (const conflict of conflicts) {
|
|
17
|
+
const key = serializeIdentity(conflict.identity);
|
|
18
|
+
conflictMap.set(key, conflict);
|
|
19
|
+
}
|
|
20
|
+
const lines = [];
|
|
21
|
+
const stats = {
|
|
22
|
+
totalEntries: 0,
|
|
23
|
+
oursOnly: 0,
|
|
24
|
+
theirsOnly: 0,
|
|
25
|
+
common: 0,
|
|
26
|
+
autoMerged: 0,
|
|
27
|
+
conflicts: conflicts.length
|
|
28
|
+
};
|
|
29
|
+
const sortedMatches = sortMatchesByTimestamp(matches);
|
|
30
|
+
for (const match of sortedMatches) {
|
|
31
|
+
const matchKey = serializeIdentity(match.identity);
|
|
32
|
+
const conflict = conflictMap.get(matchKey);
|
|
33
|
+
if (conflict) {
|
|
34
|
+
const conflictLines = formatConflict(conflict, options);
|
|
35
|
+
conflict.location = lines.length + 1;
|
|
36
|
+
lines.push(...conflictLines);
|
|
37
|
+
lines.push("");
|
|
38
|
+
} else {
|
|
39
|
+
const merged = mergeEntry(match);
|
|
40
|
+
if (merged) {
|
|
41
|
+
lines.push(...formatEntry(merged));
|
|
42
|
+
lines.push("");
|
|
43
|
+
stats.totalEntries++;
|
|
44
|
+
if (!match.base && match.ours && !match.theirs) stats.oursOnly++;
|
|
45
|
+
else if (!match.base && !match.ours && match.theirs) stats.theirsOnly++;
|
|
46
|
+
else if (match.base && match.ours && match.theirs) if (entriesEqual(match.base, match.ours) && entriesEqual(match.base, match.theirs)) stats.common++;
|
|
47
|
+
else stats.autoMerged++;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
if (lines.length > 0 && lines[lines.length - 1] === "") lines.pop();
|
|
52
|
+
return {
|
|
53
|
+
success: conflicts.length === 0,
|
|
54
|
+
content: lines.join("\n"),
|
|
55
|
+
conflicts,
|
|
56
|
+
stats
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Sort matches by timestamp for chronological output
|
|
61
|
+
*/
|
|
62
|
+
function sortMatchesByTimestamp(matches) {
|
|
63
|
+
return [...matches].sort((a, b) => {
|
|
64
|
+
const tsA = getMatchTimestamp(a);
|
|
65
|
+
const tsB = getMatchTimestamp(b);
|
|
66
|
+
if (!tsA && !tsB) return 0;
|
|
67
|
+
if (!tsA) return 1;
|
|
68
|
+
if (!tsB) return -1;
|
|
69
|
+
return tsA.localeCompare(tsB);
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Get timestamp from a match (prefer ours, then theirs, then base)
|
|
74
|
+
*/
|
|
75
|
+
function getMatchTimestamp(match) {
|
|
76
|
+
const entry = match.ours || match.theirs || match.base;
|
|
77
|
+
if (!entry) return null;
|
|
78
|
+
switch (entry.type) {
|
|
79
|
+
case "instance_entry":
|
|
80
|
+
case "schema_entry":
|
|
81
|
+
case "synthesis_entry":
|
|
82
|
+
case "actualize_entry": return entry.header.timestamp.value;
|
|
83
|
+
default: return null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
//#endregion
|
|
88
|
+
export { buildMergedResult };
|
|
89
|
+
//# sourceMappingURL=merge-result-builder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"merge-result-builder.js","names":["lines: string[]","stats: MergeStats"],"sources":["../../src/merge/merge-result-builder.ts"],"sourcesContent":["import type { EntryMatch } from \"./entry-matcher.js\";\nimport type { MergeConflict } from \"./conflict-detector.js\";\nimport type { MergeOptions } from \"./driver.js\";\nimport { formatConflict, formatEntry } from \"./conflict-formatter.js\";\nimport { mergeEntry, entriesEqual } from \"./entry-merger.js\";\nimport { serializeIdentity } from \"./entry-matcher.js\";\n\n/**\n * Statistics about a merge operation\n */\nexport interface MergeStats {\n /**\n * Total entries in merged result\n */\n totalEntries: number;\n\n /**\n * Entries present only in ours (additions)\n */\n oursOnly: number;\n\n /**\n * Entries present only in theirs (additions)\n */\n theirsOnly: number;\n\n /**\n * Entries present in all three versions unchanged\n */\n common: number;\n\n /**\n * Entries successfully auto-merged\n */\n autoMerged: number;\n\n /**\n * Number of conflicts detected\n */\n conflicts: number;\n}\n\n/**\n * Result of a three-way merge operation\n */\nexport interface MergeResult {\n /**\n * Whether the merge completed without conflicts\n * - `true`: Clean merge, all changes reconciled\n * - `false`: Conflicts detected, manual resolution required\n */\n success: boolean;\n\n /**\n * The merged content as a string\n * - On success: Clean merged entries\n * - On failure: Includes conflict markers\n */\n content: string;\n\n /**\n * List of conflicts detected during merge\n * Empty array if success is true\n */\n conflicts: MergeConflict[];\n\n /**\n * Statistics about the merge operation\n */\n stats: MergeStats;\n}\n\n/**\n * Build the final merged result from matches and conflicts\n *\n * @param matches - Array of matched entry triplets\n * @param conflicts - Array of detected conflicts\n * @param options - Merge options\n * @returns Complete merge result with content and statistics\n */\nexport function buildMergedResult(\n matches: EntryMatch[],\n conflicts: MergeConflict[],\n options: MergeOptions = {},\n): MergeResult {\n const conflictMap = new Map<string, MergeConflict>();\n for (const conflict of conflicts) {\n const key = serializeIdentity(conflict.identity);\n conflictMap.set(key, conflict);\n }\n\n const lines: string[] = [];\n const stats: MergeStats = {\n totalEntries: 0,\n oursOnly: 0,\n theirsOnly: 0,\n common: 0,\n autoMerged: 0,\n conflicts: conflicts.length,\n };\n\n const sortedMatches = sortMatchesByTimestamp(matches);\n\n for (const match of sortedMatches) {\n const matchKey = serializeIdentity(match.identity);\n const conflict = conflictMap.get(matchKey);\n\n if (conflict) {\n const conflictLines = formatConflict(conflict, options);\n conflict.location = lines.length + 1;\n lines.push(...conflictLines);\n lines.push(\"\");\n } else {\n const merged = mergeEntry(match);\n if (merged) {\n lines.push(...formatEntry(merged));\n lines.push(\"\");\n\n stats.totalEntries++;\n if (!match.base && match.ours && !match.theirs) {\n stats.oursOnly++;\n } else if (!match.base && !match.ours && match.theirs) {\n stats.theirsOnly++;\n } else if (match.base && match.ours && match.theirs) {\n if (entriesEqual(match.base, match.ours) && entriesEqual(match.base, match.theirs)) {\n stats.common++;\n } else {\n stats.autoMerged++;\n }\n }\n }\n }\n }\n\n if (lines.length > 0 && lines[lines.length - 1] === \"\") {\n lines.pop();\n }\n\n return {\n success: conflicts.length === 0,\n content: lines.join(\"\\n\"),\n conflicts,\n stats,\n };\n}\n\n/**\n * Sort matches by timestamp for chronological output\n */\nfunction sortMatchesByTimestamp(matches: EntryMatch[]): EntryMatch[] {\n return [...matches].sort((a, b) => {\n const tsA = getMatchTimestamp(a);\n const tsB = getMatchTimestamp(b);\n\n if (!tsA && !tsB) {\n return 0;\n }\n if (!tsA) {\n return 1;\n }\n if (!tsB) {\n return -1;\n }\n\n return tsA.localeCompare(tsB);\n });\n}\n\n/**\n * Get timestamp from a match (prefer ours, then theirs, then base)\n */\nfunction getMatchTimestamp(match: EntryMatch): string | null {\n const entry = match.ours || match.theirs || match.base;\n if (!entry) {\n return null;\n }\n\n switch (entry.type) {\n case \"instance_entry\":\n case \"schema_entry\":\n case \"synthesis_entry\":\n case \"actualize_entry\":\n return entry.header.timestamp.value;\n default:\n return null;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAgFA,SAAgB,kBACd,SACA,WACA,UAAwB,EAAE,EACb;CACb,MAAM,8BAAc,IAAI,KAA4B;AACpD,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,MAAM,kBAAkB,SAAS,SAAS;AAChD,cAAY,IAAI,KAAK,SAAS;;CAGhC,MAAMA,QAAkB,EAAE;CAC1B,MAAMC,QAAoB;EACxB,cAAc;EACd,UAAU;EACV,YAAY;EACZ,QAAQ;EACR,YAAY;EACZ,WAAW,UAAU;EACtB;CAED,MAAM,gBAAgB,uBAAuB,QAAQ;AAErD,MAAK,MAAM,SAAS,eAAe;EACjC,MAAM,WAAW,kBAAkB,MAAM,SAAS;EAClD,MAAM,WAAW,YAAY,IAAI,SAAS;AAE1C,MAAI,UAAU;GACZ,MAAM,gBAAgB,eAAe,UAAU,QAAQ;AACvD,YAAS,WAAW,MAAM,SAAS;AACnC,SAAM,KAAK,GAAG,cAAc;AAC5B,SAAM,KAAK,GAAG;SACT;GACL,MAAM,SAAS,WAAW,MAAM;AAChC,OAAI,QAAQ;AACV,UAAM,KAAK,GAAG,YAAY,OAAO,CAAC;AAClC,UAAM,KAAK,GAAG;AAEd,UAAM;AACN,QAAI,CAAC,MAAM,QAAQ,MAAM,QAAQ,CAAC,MAAM,OACtC,OAAM;aACG,CAAC,MAAM,QAAQ,CAAC,MAAM,QAAQ,MAAM,OAC7C,OAAM;aACG,MAAM,QAAQ,MAAM,QAAQ,MAAM,OAC3C,KAAI,aAAa,MAAM,MAAM,MAAM,KAAK,IAAI,aAAa,MAAM,MAAM,MAAM,OAAO,CAChF,OAAM;QAEN,OAAM;;;;AAOhB,KAAI,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,OAAO,GAClD,OAAM,KAAK;AAGb,QAAO;EACL,SAAS,UAAU,WAAW;EAC9B,SAAS,MAAM,KAAK,KAAK;EACzB;EACA;EACD;;;;;AAMH,SAAS,uBAAuB,SAAqC;AACnE,QAAO,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,MAAM;EACjC,MAAM,MAAM,kBAAkB,EAAE;EAChC,MAAM,MAAM,kBAAkB,EAAE;AAEhC,MAAI,CAAC,OAAO,CAAC,IACX,QAAO;AAET,MAAI,CAAC,IACH,QAAO;AAET,MAAI,CAAC,IACH,QAAO;AAGT,SAAO,IAAI,cAAc,IAAI;GAC7B;;;;;AAMJ,SAAS,kBAAkB,OAAkC;CAC3D,MAAM,QAAQ,MAAM,QAAQ,MAAM,UAAU,MAAM;AAClD,KAAI,CAAC,MACH,QAAO;AAGT,SAAQ,MAAM,MAAd;EACE,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK,kBACH,QAAO,MAAM,OAAO,UAAU;EAChC,QACE,QAAO"}
|
package/dist/mod.d.ts
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { ActualizeEntry, Entry, InstanceEntry, Location, Query, QueryCondition, SchemaEntry, SynthesisEntry, Timestamp, isSyntaxError } from "./ast/ast-types.js";
|
|
2
|
+
import { BlockMatch, Position as Position$1, SourceMap } from "./source-map.js";
|
|
3
|
+
import { FileType, GenericTree, ParseOptions, ParsedBlock, ParsedDocument, ThaloParser } from "./parser.shared.js";
|
|
4
|
+
import { LinkDefinition, LinkIndex, LinkReference, SemanticModel, SemanticModelDirtyFlags, SemanticUpdateResult } from "./semantic/analyzer.js";
|
|
5
|
+
import { EntitySchema, FieldSchema, SectionSchema, TypeExpr } from "./schema/registry.js";
|
|
6
|
+
import { Position } from "./model/line-index.js";
|
|
7
|
+
import { DocumentBlock, EditRange, EditResult } from "./model/document.js";
|
|
8
|
+
import { InvalidationResult, ModelDefaultValue, ModelFieldDefinition, ModelSchemaEntry, ModelSectionDefinition, ModelTypeExpression, Workspace } from "./model/workspace.js";
|
|
9
|
+
import { FieldCondition, LinkCondition, Query as Query$1, QueryCondition as QueryCondition$1, QueryOptions, TagCondition, astQueryToModelQuery, entryMatchesQuery, executeQueries, executeQuery, formatQuery, parseQueryString, validateQueryEntities } from "./services/query.js";
|
|
10
|
+
import { ChangeMarker, ChangeTracker, parseCheckpoint } from "./services/change-tracker/change-tracker.js";
|
|
11
|
+
import { ActualizeEntryInfo, ActualizeResult, DEFAULT_INSTRUCTIONS_TEMPLATE, InstructionsParams, RunActualizeOptions, SynthesisOutputInfo, generateInstructions, generateTimestamp, runActualize } from "./commands/actualize.js";
|
|
12
|
+
import { RULE_CATEGORIES, Rule, RuleCategory, Severity, allRules } from "./checker/rules/rules.js";
|
|
13
|
+
import { CheckConfig, Diagnostic, checkDocument } from "./checker/check.js";
|
|
14
|
+
import { CheckResult, DiagnosticInfo, DiagnosticSeverity, RunCheckOptions, runCheck } from "./commands/check.js";
|
|
15
|
+
import { FormatFileInput, FormatFileResult, FormatResult, Formatter, RunFormatOptions, SyntaxErrorInfo, runFormat } from "./commands/format.js";
|
|
16
|
+
import { CheckpointError, QueriesResult, QueryConditionInfo, QueryEntryInfo, QueryResult, QueryValidationError, RunQueryOptions, isCheckpointError, isQueriesSuccess, isQueryError, isQuerySuccess, isQueryValidationError, runQueries, runQuery } from "./commands/query.js";
|
|
17
|
+
import { DiagnosticFormat, formatDiagnostic, formatQueryResultRaw, formatTimestamp } from "./formatters.js";
|
|
18
|
+
import { FragmentType, ParsedFragment } from "./fragment.js";
|
|
19
|
+
import { DirectiveContext, EntityContext, FieldNameContext, LinkContext, MetadataKeyContext, NodeContext, SchemaEntityContext, SectionHeaderContext, SectionNameContext, TagContext, TimestampContext, TitleContext, TypeContext, UnknownContext, findNodeAtPosition } from "./ast/node-at-position.js";
|
|
20
|
+
import { DefinitionResult, findDefinition, findDefinitionAtPosition } from "./services/definition.js";
|
|
21
|
+
import { ReferenceLocation, ReferencesResult, findReferences, findReferencesAtPosition } from "./services/references.js";
|
|
22
|
+
import { SemanticToken, TokenModifier, TokenType, encodeSemanticTokens, extractSemanticTokens, tokenModifiers, tokenTypes } from "./services/semantic-tokens.js";
|
|
23
|
+
import { EntityDefinitionResult, EntityReferenceLocation, EntityReferencesResult, FieldDefinitionResult, FieldReferenceLocation, FieldReferencesResult, SectionDefinitionResult, SectionReferenceLocation, SectionReferencesResult, TagReferenceLocation, TagReferencesResult, findEntityDefinition, findEntityReferences, findFieldDefinition, findFieldReferences, findSectionDefinition, findSectionReferences, findTagReferences } from "./services/entity-navigation.js";
|
|
24
|
+
import { HoverResult, formatActualizeEntry, formatEntitySchema, formatEntryHover, formatFieldHover, formatInstanceEntry, formatSchemaEntry, formatSectionHover, formatSynthesisEntry, formatTagHover, formatTimestampHover, getDirectiveDocumentation, getHoverInfo, getPrimitiveTypeDocumentation } from "./services/hover.js";
|
|
25
|
+
import { ActualizeInfo, SynthesisInfo, findAllSyntheses, findEntryFile, findLatestActualize, getActualizeUpdatedTimestamp, getEntrySourceText, getSynthesisPrompt, getSynthesisSources } from "./services/synthesis.js";
|
|
26
|
+
import { EntryIdentity, EntryMatch } from "./merge/entry-matcher.js";
|
|
27
|
+
import { ConflictContext, ConflictRule, ConflictType, MergeConflict } from "./merge/conflict-detector.js";
|
|
28
|
+
import { MergeResult, MergeStats } from "./merge/merge-result-builder.js";
|
|
29
|
+
import { MergeOptions, mergeThaloFiles } from "./merge/driver.js";
|
|
30
|
+
import { ALL_DIRECTIVES, Directive, INSTANCE_DIRECTIVES, InstanceDirective, PRIMITIVE_TYPES, PrimitiveType, SCHEMA_BLOCK_HEADERS, SCHEMA_DIRECTIVES, SYNTHESIS_DIRECTIVES, SchemaBlockHeader, SchemaDirective, SynthesisDirective, isDirective, isInstanceDirective, isPrimitiveType, isSchemaDirective, isSynthesisDirective } from "./constants.js";
|
|
31
|
+
export { ALL_DIRECTIVES, type ActualizeEntry, type ActualizeEntryInfo, ActualizeInfo, type ActualizeResult, type Query as AstQuery, type QueryCondition as AstQueryCondition, type BlockMatch, type ChangeMarker, type ChangeTracker, type CheckConfig, type CheckResult, type CheckpointError, type ConflictContext, type ConflictRule, type ConflictType, DEFAULT_INSTRUCTIONS_TEMPLATE, DefinitionResult, type Diagnostic, type DiagnosticFormat, type DiagnosticInfo, type DiagnosticSeverity, Directive, type DirectiveContext, type DocumentBlock, type EditRange, type EditResult, type EntityContext, EntityDefinitionResult, EntityReferenceLocation, EntityReferencesResult, type EntitySchema, type Entry, type EntryIdentity, type EntryMatch, type FieldCondition, FieldDefinitionResult, type FieldNameContext, FieldReferenceLocation, FieldReferencesResult, type FieldSchema, type FileType, type FormatFileInput, type FormatFileResult, type FormatResult, type Formatter, type FragmentType, type GenericTree, HoverResult, INSTANCE_DIRECTIVES, InstanceDirective, type InstanceEntry, type InstructionsParams, type InvalidationResult, type Position as LinePosition, type LinkCondition, type LinkContext, type LinkDefinition, type LinkIndex, type LinkReference, type Location, type MergeConflict, type MergeOptions, type MergeResult, type MergeStats, type MetadataKeyContext, type ModelDefaultValue, type ModelFieldDefinition, type ModelSchemaEntry, type ModelSectionDefinition, type ModelTypeExpression, type NodeContext, PRIMITIVE_TYPES, type ParseOptions, type ParsedBlock, type ParsedDocument, type ParsedFragment, type Position$1 as Position, PrimitiveType, type QueriesResult, type Query$1 as Query, type QueryCondition$1 as QueryCondition, type QueryConditionInfo, type QueryEntryInfo, QueryOptions, type QueryResult, type QueryValidationError, RULE_CATEGORIES, ReferenceLocation, ReferencesResult, type Rule, type RuleCategory, type RunActualizeOptions, type RunCheckOptions, type RunFormatOptions, type RunQueryOptions, SCHEMA_BLOCK_HEADERS, SCHEMA_DIRECTIVES, SYNTHESIS_DIRECTIVES, SchemaBlockHeader, SchemaDirective, type SchemaEntityContext, type SchemaEntry, SectionDefinitionResult, type SectionHeaderContext, type SectionNameContext, SectionReferenceLocation, SectionReferencesResult, type SectionSchema, type SemanticModel, type SemanticModelDirtyFlags, type SemanticToken, type SemanticUpdateResult, type Severity, type SourceMap, type SyntaxErrorInfo, SynthesisDirective, type SynthesisEntry, SynthesisInfo, type SynthesisOutputInfo, type TagCondition, type TagContext, TagReferenceLocation, TagReferencesResult, type ThaloParser, type Timestamp, type TimestampContext, type TitleContext, type TokenModifier, type TokenType, type TypeContext, TypeExpr, type UnknownContext, Workspace, allRules, astQueryToModelQuery, checkDocument, encodeSemanticTokens, entryMatchesQuery, executeQueries, executeQuery, extractSemanticTokens, findAllSyntheses, findDefinition, findDefinitionAtPosition, findEntityDefinition, findEntityReferences, findEntryFile, findFieldDefinition, findFieldReferences, findLatestActualize, findNodeAtPosition, findReferences, findReferencesAtPosition, findSectionDefinition, findSectionReferences, findTagReferences, formatActualizeEntry, formatDiagnostic, formatEntitySchema, formatEntryHover, formatFieldHover, formatInstanceEntry, formatQuery, formatQueryResultRaw, formatSchemaEntry, formatSectionHover, formatSynthesisEntry, formatTagHover, formatTimestamp, formatTimestampHover, generateInstructions, generateTimestamp, getActualizeUpdatedTimestamp, getDirectiveDocumentation, getEntrySourceText, getHoverInfo, getPrimitiveTypeDocumentation, getSynthesisPrompt, getSynthesisSources, isCheckpointError, isDirective, isInstanceDirective, isPrimitiveType, isQueriesSuccess, isQueryError, isQuerySuccess, isQueryValidationError, isSchemaDirective, isSyntaxError, isSynthesisDirective, mergeThaloFiles, parseCheckpoint, parseQueryString, runActualize, runCheck, runFormat, runQueries, runQuery, tokenModifiers, tokenTypes, validateQueryEntities };
|
package/dist/mod.js
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { astQueryToModelQuery, entryMatchesQuery, executeQueries, executeQuery, formatQuery, parseQueryString, validateQueryEntities } from "./services/query.js";
|
|
2
|
+
import { parseCheckpoint } from "./services/change-tracker/change-tracker.js";
|
|
3
|
+
import { isSyntaxError } from "./ast/ast-types.js";
|
|
4
|
+
import { formatDiagnostic, formatQueryResultRaw, formatTimestamp } from "./formatters.js";
|
|
5
|
+
import { TypeExpr } from "./schema/registry.js";
|
|
6
|
+
import { Workspace } from "./model/workspace.js";
|
|
7
|
+
import { findNodeAtPosition } from "./ast/node-at-position.js";
|
|
8
|
+
import { RULE_CATEGORIES, allRules } from "./checker/rules/rules.js";
|
|
9
|
+
import { checkDocument } from "./checker/check.js";
|
|
10
|
+
import { findDefinition, findDefinitionAtPosition } from "./services/definition.js";
|
|
11
|
+
import { findReferences, findReferencesAtPosition } from "./services/references.js";
|
|
12
|
+
import { encodeSemanticTokens, extractSemanticTokens, tokenModifiers, tokenTypes } from "./services/semantic-tokens.js";
|
|
13
|
+
import { findEntityDefinition, findEntityReferences, findFieldDefinition, findFieldReferences, findSectionDefinition, findSectionReferences, findTagReferences } from "./services/entity-navigation.js";
|
|
14
|
+
import { formatActualizeEntry, formatEntitySchema, formatEntryHover, formatFieldHover, formatInstanceEntry, formatSchemaEntry, formatSectionHover, formatSynthesisEntry, formatTagHover, formatTimestampHover, getDirectiveDocumentation, getHoverInfo, getPrimitiveTypeDocumentation } from "./services/hover.js";
|
|
15
|
+
import { findAllSyntheses, findEntryFile, findLatestActualize, getActualizeUpdatedTimestamp, getEntrySourceText, getSynthesisPrompt, getSynthesisSources } from "./services/synthesis.js";
|
|
16
|
+
import { mergeThaloFiles } from "./merge/driver.js";
|
|
17
|
+
import { ALL_DIRECTIVES, INSTANCE_DIRECTIVES, PRIMITIVE_TYPES, SCHEMA_BLOCK_HEADERS, SCHEMA_DIRECTIVES, SYNTHESIS_DIRECTIVES, isDirective, isInstanceDirective, isPrimitiveType, isSchemaDirective, isSynthesisDirective } from "./constants.js";
|
|
18
|
+
import { runCheck } from "./commands/check.js";
|
|
19
|
+
import { runFormat } from "./commands/format.js";
|
|
20
|
+
import { isCheckpointError, isQueriesSuccess, isQueryError, isQuerySuccess, isQueryValidationError, runQueries, runQuery } from "./commands/query.js";
|
|
21
|
+
import { DEFAULT_INSTRUCTIONS_TEMPLATE, generateInstructions, generateTimestamp, runActualize } from "./commands/actualize.js";
|
|
22
|
+
|
|
23
|
+
export { ALL_DIRECTIVES, DEFAULT_INSTRUCTIONS_TEMPLATE, INSTANCE_DIRECTIVES, PRIMITIVE_TYPES, RULE_CATEGORIES, SCHEMA_BLOCK_HEADERS, SCHEMA_DIRECTIVES, SYNTHESIS_DIRECTIVES, TypeExpr, Workspace, allRules, astQueryToModelQuery, checkDocument, encodeSemanticTokens, entryMatchesQuery, executeQueries, executeQuery, extractSemanticTokens, findAllSyntheses, findDefinition, findDefinitionAtPosition, findEntityDefinition, findEntityReferences, findEntryFile, findFieldDefinition, findFieldReferences, findLatestActualize, findNodeAtPosition, findReferences, findReferencesAtPosition, findSectionDefinition, findSectionReferences, findTagReferences, formatActualizeEntry, formatDiagnostic, formatEntitySchema, formatEntryHover, formatFieldHover, formatInstanceEntry, formatQuery, formatQueryResultRaw, formatSchemaEntry, formatSectionHover, formatSynthesisEntry, formatTagHover, formatTimestamp, formatTimestampHover, generateInstructions, generateTimestamp, getActualizeUpdatedTimestamp, getDirectiveDocumentation, getEntrySourceText, getHoverInfo, getPrimitiveTypeDocumentation, getSynthesisPrompt, getSynthesisSources, isCheckpointError, isDirective, isInstanceDirective, isPrimitiveType, isQueriesSuccess, isQueryError, isQuerySuccess, isQueryValidationError, isSchemaDirective, isSyntaxError, isSynthesisDirective, mergeThaloFiles, parseCheckpoint, parseQueryString, runActualize, runCheck, runFormat, runQueries, runQuery, tokenModifiers, tokenTypes, validateQueryEntities };
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { SourceMap } from "../source-map.js";
|
|
2
|
+
import { FileType, GenericTree, ThaloParser } from "../parser.shared.js";
|
|
3
|
+
import { LineIndex } from "./line-index.js";
|
|
4
|
+
|
|
5
|
+
//#region src/model/document.d.ts
|
|
6
|
+
/** Position type compatible with tree-sitter Point */
|
|
7
|
+
interface Point {
|
|
8
|
+
row: number;
|
|
9
|
+
column: number;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Edit range for applying incremental edits to a document.
|
|
13
|
+
* Uses tree-sitter's edit format.
|
|
14
|
+
*/
|
|
15
|
+
interface EditRange {
|
|
16
|
+
startIndex: number;
|
|
17
|
+
startPosition: Point;
|
|
18
|
+
oldEndIndex: number;
|
|
19
|
+
oldEndPosition: Point;
|
|
20
|
+
newEndIndex: number;
|
|
21
|
+
newEndPosition: Point;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* A parsed thalo block within a document.
|
|
25
|
+
*/
|
|
26
|
+
interface DocumentBlock<T extends GenericTree = GenericTree> {
|
|
27
|
+
/** The thalo source code for this block */
|
|
28
|
+
source: string;
|
|
29
|
+
/** Source map for translating block-relative positions to file-absolute positions */
|
|
30
|
+
sourceMap: SourceMap;
|
|
31
|
+
/** The parsed tree-sitter tree */
|
|
32
|
+
tree: T;
|
|
33
|
+
/** Character offset where this block starts in the full document */
|
|
34
|
+
startOffset: number;
|
|
35
|
+
/** Character offset where this block ends in the full document */
|
|
36
|
+
endOffset: number;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Result of applying an edit to a document.
|
|
40
|
+
*/
|
|
41
|
+
interface EditResult {
|
|
42
|
+
/** Whether the edit affected thalo block boundaries (```thalo fences) */
|
|
43
|
+
blockBoundariesChanged: boolean;
|
|
44
|
+
/** Indices of blocks that were modified */
|
|
45
|
+
modifiedBlockIndices: number[];
|
|
46
|
+
/** Whether a full reparse was required */
|
|
47
|
+
fullReparse: boolean;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* A Document owns the source text and Tree-sitter tree(s) for a file,
|
|
51
|
+
* providing efficient incremental edit operations.
|
|
52
|
+
*
|
|
53
|
+
* The Document is parser-agnostic - it accepts any parser that implements
|
|
54
|
+
* the ThaloParser interface.
|
|
55
|
+
*/
|
|
56
|
+
declare class Document<T extends GenericTree = GenericTree> {
|
|
57
|
+
readonly filename: string;
|
|
58
|
+
readonly fileType: FileType;
|
|
59
|
+
private parser;
|
|
60
|
+
private _source;
|
|
61
|
+
private _blocks;
|
|
62
|
+
private _lineIndex;
|
|
63
|
+
/**
|
|
64
|
+
* Create a Document with a parser.
|
|
65
|
+
*
|
|
66
|
+
* @param parser - A ThaloParser instance
|
|
67
|
+
* @param filename - The filename for this document
|
|
68
|
+
* @param source - The source code
|
|
69
|
+
* @param fileType - Optional explicit file type
|
|
70
|
+
*/
|
|
71
|
+
constructor(parser: ThaloParser<T>, filename: string, source: string, fileType?: FileType);
|
|
72
|
+
/**
|
|
73
|
+
* Get the current source text.
|
|
74
|
+
*/
|
|
75
|
+
get source(): string;
|
|
76
|
+
/**
|
|
77
|
+
* Get the line index for position conversions.
|
|
78
|
+
*/
|
|
79
|
+
get lineIndex(): LineIndex;
|
|
80
|
+
/**
|
|
81
|
+
* Get the current parsed blocks.
|
|
82
|
+
*/
|
|
83
|
+
get blocks(): readonly DocumentBlock<T>[];
|
|
84
|
+
/**
|
|
85
|
+
* Apply an incremental edit to the document.
|
|
86
|
+
*
|
|
87
|
+
* @param startLine - 0-based start line of the edit
|
|
88
|
+
* @param startColumn - 0-based start column of the edit
|
|
89
|
+
* @param endLine - 0-based end line of the edit (exclusive)
|
|
90
|
+
* @param endColumn - 0-based end column of the edit
|
|
91
|
+
* @param newText - The replacement text
|
|
92
|
+
* @returns Information about what changed
|
|
93
|
+
*/
|
|
94
|
+
applyEdit(startLine: number, startColumn: number, endLine: number, endColumn: number, newText: string): EditResult;
|
|
95
|
+
/**
|
|
96
|
+
* Apply an edit using raw edit parameters (tree-sitter format).
|
|
97
|
+
*/
|
|
98
|
+
applyEditRange(edit: EditRange, newText: string): EditResult;
|
|
99
|
+
/**
|
|
100
|
+
* Replace the entire document content (for full sync).
|
|
101
|
+
*/
|
|
102
|
+
replaceContent(newSource: string): void;
|
|
103
|
+
/**
|
|
104
|
+
* Detect file type from filename extension or content.
|
|
105
|
+
*/
|
|
106
|
+
private static detectFileType;
|
|
107
|
+
/**
|
|
108
|
+
* Parse all thalo blocks from the source.
|
|
109
|
+
*/
|
|
110
|
+
private parseBlocks;
|
|
111
|
+
/**
|
|
112
|
+
* Check if an edit might affect thalo block boundaries (```thalo fences).
|
|
113
|
+
*/
|
|
114
|
+
private mightAffectBlockBoundaries;
|
|
115
|
+
/**
|
|
116
|
+
* Apply incremental edit to a thalo file (single block).
|
|
117
|
+
*/
|
|
118
|
+
private applyIncrementalEdit;
|
|
119
|
+
/**
|
|
120
|
+
* Apply edit to a markdown file, updating affected blocks incrementally.
|
|
121
|
+
*/
|
|
122
|
+
private applyMarkdownEdit;
|
|
123
|
+
/**
|
|
124
|
+
* Update block offsets after an edit outside all blocks.
|
|
125
|
+
*/
|
|
126
|
+
private updateBlockOffsets;
|
|
127
|
+
/**
|
|
128
|
+
* Update a single block using incremental parsing.
|
|
129
|
+
*/
|
|
130
|
+
private updateSingleBlockIncremental;
|
|
131
|
+
}
|
|
132
|
+
//#endregion
|
|
133
|
+
export { Document, DocumentBlock, EditRange, EditResult };
|
|
134
|
+
//# sourceMappingURL=document.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"document.d.ts","names":[],"sources":["../../src/model/document.ts"],"sourcesContent":[],"mappings":";;;;;;UAMU,KAAA;EAAA,GAAA,EAAA,MAAK;EASE,MAAA,EAAA,MAAS;;;;;AAY1B;AAAyC,UAZxB,SAAA,CAYwB;EAAc,UAAA,EAAA,MAAA;EAI1C,aAAA,EAdI,KAcJ;EAEL,WAAA,EAAA,MAAA;EAAC,cAAA,EAdS,KAcT;EAUQ,WAAA,EAAU,MAAA;EAsBd,cAAQ,EA5CH,KA4CG;;;;;AAiBC,UAvDL,aAuDK,CAAA,UAvDmB,WAuDnB,GAvDiC,WAuDjC,CAAA,CAAA;EAA6D;EAmBhE,MAAA,EAAA,MAAA;EAOoB;EAAd,SAAA,EA7EZ,SA6EY;EAoBpB;EAwCkB,IAAA,EAvIf,CAuIe;EAA6B;EAAU,WAAA,EAAA,MAAA;;;;;;;UA7H7C,UAAA;;;;;;;;;;;;;;;cAsBJ,mBAAmB,cAAc;;qBAEzB;;;;;;;;;;;;;sBAeC,YAAY,iDAAiD;;;;;;;;mBAmBhE;;;;yBAOM,cAAc;;;;;;;;;;;0GAoBlC;;;;uBAwCkB,6BAA6B"}
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import { createSourceMap, identitySourceMap } from "../source-map.js";
|
|
2
|
+
import { LineIndex, computeEdit } from "./line-index.js";
|
|
3
|
+
|
|
4
|
+
//#region src/model/document.ts
|
|
5
|
+
/**
|
|
6
|
+
* Regex to match fenced thalo code blocks in markdown.
|
|
7
|
+
* Captures the content between ```thalo and ```
|
|
8
|
+
*/
|
|
9
|
+
const THALO_FENCE_REGEX = /^```thalo\s*\n([\s\S]*?)^```/gm;
|
|
10
|
+
/**
|
|
11
|
+
* A Document owns the source text and Tree-sitter tree(s) for a file,
|
|
12
|
+
* providing efficient incremental edit operations.
|
|
13
|
+
*
|
|
14
|
+
* The Document is parser-agnostic - it accepts any parser that implements
|
|
15
|
+
* the ThaloParser interface.
|
|
16
|
+
*/
|
|
17
|
+
var Document = class Document {
|
|
18
|
+
filename;
|
|
19
|
+
fileType;
|
|
20
|
+
parser;
|
|
21
|
+
_source;
|
|
22
|
+
_blocks;
|
|
23
|
+
_lineIndex;
|
|
24
|
+
/**
|
|
25
|
+
* Create a Document with a parser.
|
|
26
|
+
*
|
|
27
|
+
* @param parser - A ThaloParser instance
|
|
28
|
+
* @param filename - The filename for this document
|
|
29
|
+
* @param source - The source code
|
|
30
|
+
* @param fileType - Optional explicit file type
|
|
31
|
+
*/
|
|
32
|
+
constructor(parser, filename, source, fileType) {
|
|
33
|
+
this.parser = parser;
|
|
34
|
+
this.filename = filename;
|
|
35
|
+
this._source = source;
|
|
36
|
+
this.fileType = fileType ?? Document.detectFileType(filename, source);
|
|
37
|
+
this._lineIndex = new LineIndex(this._source);
|
|
38
|
+
this._blocks = this.parseBlocks(this._source);
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Get the current source text.
|
|
42
|
+
*/
|
|
43
|
+
get source() {
|
|
44
|
+
return this._source;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Get the line index for position conversions.
|
|
48
|
+
*/
|
|
49
|
+
get lineIndex() {
|
|
50
|
+
return this._lineIndex;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Get the current parsed blocks.
|
|
54
|
+
*/
|
|
55
|
+
get blocks() {
|
|
56
|
+
return this._blocks;
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Apply an incremental edit to the document.
|
|
60
|
+
*
|
|
61
|
+
* @param startLine - 0-based start line of the edit
|
|
62
|
+
* @param startColumn - 0-based start column of the edit
|
|
63
|
+
* @param endLine - 0-based end line of the edit (exclusive)
|
|
64
|
+
* @param endColumn - 0-based end column of the edit
|
|
65
|
+
* @param newText - The replacement text
|
|
66
|
+
* @returns Information about what changed
|
|
67
|
+
*/
|
|
68
|
+
applyEdit(startLine, startColumn, endLine, endColumn, newText) {
|
|
69
|
+
const edit = computeEdit(this._lineIndex, startLine, startColumn, endLine, endColumn, newText);
|
|
70
|
+
const newSource = this._source.slice(0, edit.startIndex) + newText + this._source.slice(edit.oldEndIndex);
|
|
71
|
+
const blockBoundariesChanged = this.fileType === "markdown" && this.mightAffectBlockBoundaries(edit, newText);
|
|
72
|
+
const oldSource = this._source;
|
|
73
|
+
this._source = newSource;
|
|
74
|
+
this._lineIndex = new LineIndex(newSource);
|
|
75
|
+
if (blockBoundariesChanged) {
|
|
76
|
+
this._blocks = this.parseBlocks(newSource);
|
|
77
|
+
return {
|
|
78
|
+
blockBoundariesChanged: true,
|
|
79
|
+
modifiedBlockIndices: this._blocks.map((_, i) => i),
|
|
80
|
+
fullReparse: true
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
if (this.fileType === "thalo") return this.applyIncrementalEdit(edit, oldSource, newSource, newText);
|
|
84
|
+
else return this.applyMarkdownEdit(edit, oldSource, newSource, newText);
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Apply an edit using raw edit parameters (tree-sitter format).
|
|
88
|
+
*/
|
|
89
|
+
applyEditRange(edit, newText) {
|
|
90
|
+
const startPos = this._lineIndex.offsetToPosition(edit.startIndex);
|
|
91
|
+
const endPos = this._lineIndex.offsetToPosition(edit.oldEndIndex);
|
|
92
|
+
return this.applyEdit(startPos.line, startPos.column, endPos.line, endPos.column, newText);
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Replace the entire document content (for full sync).
|
|
96
|
+
*/
|
|
97
|
+
replaceContent(newSource) {
|
|
98
|
+
this._source = newSource;
|
|
99
|
+
this._lineIndex = new LineIndex(newSource);
|
|
100
|
+
this._blocks = this.parseBlocks(newSource);
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Detect file type from filename extension or content.
|
|
104
|
+
*/
|
|
105
|
+
static detectFileType(filename, source) {
|
|
106
|
+
if (filename.endsWith(".thalo")) return "thalo";
|
|
107
|
+
if (filename.endsWith(".md")) return "markdown";
|
|
108
|
+
if (source.includes("```thalo")) return "markdown";
|
|
109
|
+
return "thalo";
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Parse all thalo blocks from the source.
|
|
113
|
+
*/
|
|
114
|
+
parseBlocks(source) {
|
|
115
|
+
if (this.fileType === "thalo") {
|
|
116
|
+
const tree = this.parser.parse(source);
|
|
117
|
+
return [{
|
|
118
|
+
source,
|
|
119
|
+
sourceMap: identitySourceMap(),
|
|
120
|
+
tree,
|
|
121
|
+
startOffset: 0,
|
|
122
|
+
endOffset: source.length
|
|
123
|
+
}];
|
|
124
|
+
}
|
|
125
|
+
const blocks = [];
|
|
126
|
+
let match;
|
|
127
|
+
while ((match = THALO_FENCE_REGEX.exec(source)) !== null) {
|
|
128
|
+
const content = match[1];
|
|
129
|
+
const contentStart = match.index + match[0].indexOf(content);
|
|
130
|
+
const sourceMap = createSourceMap(source, contentStart, content);
|
|
131
|
+
const tree = this.parser.parse(content);
|
|
132
|
+
blocks.push({
|
|
133
|
+
source: content,
|
|
134
|
+
sourceMap,
|
|
135
|
+
tree,
|
|
136
|
+
startOffset: contentStart,
|
|
137
|
+
endOffset: contentStart + content.length
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
THALO_FENCE_REGEX.lastIndex = 0;
|
|
141
|
+
return blocks;
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Check if an edit might affect thalo block boundaries (```thalo fences).
|
|
145
|
+
*/
|
|
146
|
+
mightAffectBlockBoundaries(edit, newText) {
|
|
147
|
+
const hasFenceInOld = this._source.slice(edit.startIndex, edit.oldEndIndex).includes("```");
|
|
148
|
+
const hasFenceInNew = newText.includes("```");
|
|
149
|
+
if (hasFenceInOld || hasFenceInNew) return true;
|
|
150
|
+
for (const block of this._blocks) {
|
|
151
|
+
const fenceStartOffset = block.startOffset - 9;
|
|
152
|
+
if (fenceStartOffset >= edit.startIndex && fenceStartOffset < edit.oldEndIndex) return true;
|
|
153
|
+
const fenceEndOffset = block.endOffset;
|
|
154
|
+
if (fenceEndOffset >= edit.startIndex && fenceEndOffset < edit.oldEndIndex) return true;
|
|
155
|
+
}
|
|
156
|
+
return false;
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Apply incremental edit to a thalo file (single block).
|
|
160
|
+
*/
|
|
161
|
+
applyIncrementalEdit(edit, _oldSource, newSource, _newText) {
|
|
162
|
+
const block = this._blocks[0];
|
|
163
|
+
block.tree.edit({
|
|
164
|
+
startIndex: edit.startIndex,
|
|
165
|
+
oldEndIndex: edit.oldEndIndex,
|
|
166
|
+
newEndIndex: edit.newEndIndex,
|
|
167
|
+
startPosition: edit.startPosition,
|
|
168
|
+
oldEndPosition: edit.oldEndPosition,
|
|
169
|
+
newEndPosition: edit.newEndPosition
|
|
170
|
+
});
|
|
171
|
+
const newTree = this.parser.parseIncremental(newSource, block.tree);
|
|
172
|
+
this._blocks = [{
|
|
173
|
+
source: newSource,
|
|
174
|
+
sourceMap: identitySourceMap(),
|
|
175
|
+
tree: newTree,
|
|
176
|
+
startOffset: 0,
|
|
177
|
+
endOffset: newSource.length
|
|
178
|
+
}];
|
|
179
|
+
return {
|
|
180
|
+
blockBoundariesChanged: false,
|
|
181
|
+
modifiedBlockIndices: [0],
|
|
182
|
+
fullReparse: false
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Apply edit to a markdown file, updating affected blocks incrementally.
|
|
187
|
+
*/
|
|
188
|
+
applyMarkdownEdit(edit, _oldSource, newSource, _newText) {
|
|
189
|
+
const affectedBlockIndices = [];
|
|
190
|
+
for (let i = 0; i < this._blocks.length; i++) {
|
|
191
|
+
const block = this._blocks[i];
|
|
192
|
+
if (edit.startIndex < block.endOffset && edit.oldEndIndex > block.startOffset) affectedBlockIndices.push(i);
|
|
193
|
+
}
|
|
194
|
+
if (affectedBlockIndices.length === 0) {
|
|
195
|
+
this.updateBlockOffsets(edit, newSource);
|
|
196
|
+
return {
|
|
197
|
+
blockBoundariesChanged: false,
|
|
198
|
+
modifiedBlockIndices: [],
|
|
199
|
+
fullReparse: false
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
if (affectedBlockIndices.length === 1) {
|
|
203
|
+
const blockIndex = affectedBlockIndices[0];
|
|
204
|
+
this.updateSingleBlockIncremental(blockIndex, edit, newSource);
|
|
205
|
+
return {
|
|
206
|
+
blockBoundariesChanged: false,
|
|
207
|
+
modifiedBlockIndices: affectedBlockIndices,
|
|
208
|
+
fullReparse: false
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
this._blocks = this.parseBlocks(newSource);
|
|
212
|
+
return {
|
|
213
|
+
blockBoundariesChanged: false,
|
|
214
|
+
modifiedBlockIndices: this._blocks.map((_, i) => i),
|
|
215
|
+
fullReparse: true
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
/**
|
|
219
|
+
* Update block offsets after an edit outside all blocks.
|
|
220
|
+
*/
|
|
221
|
+
updateBlockOffsets(edit, newSource) {
|
|
222
|
+
const delta = edit.newEndIndex - edit.oldEndIndex;
|
|
223
|
+
this._blocks = this._blocks.map((block) => {
|
|
224
|
+
if (block.startOffset > edit.oldEndIndex) return {
|
|
225
|
+
...block,
|
|
226
|
+
startOffset: block.startOffset + delta,
|
|
227
|
+
endOffset: block.endOffset + delta,
|
|
228
|
+
sourceMap: createSourceMap(newSource, block.startOffset + delta, block.source)
|
|
229
|
+
};
|
|
230
|
+
return block;
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Update a single block using incremental parsing.
|
|
235
|
+
*/
|
|
236
|
+
updateSingleBlockIncremental(blockIndex, edit, newSource) {
|
|
237
|
+
const oldBlock = this._blocks[blockIndex];
|
|
238
|
+
const blockRelativeStartIndex = edit.startIndex - oldBlock.startOffset;
|
|
239
|
+
const blockRelativeOldEndIndex = edit.oldEndIndex - oldBlock.startOffset;
|
|
240
|
+
const delta = edit.newEndIndex - edit.oldEndIndex;
|
|
241
|
+
const newBlockEndOffset = oldBlock.endOffset + delta;
|
|
242
|
+
const newBlockSource = newSource.slice(oldBlock.startOffset, newBlockEndOffset);
|
|
243
|
+
const blockRelativeNewEndIndex = blockRelativeStartIndex + (edit.newEndIndex - edit.startIndex);
|
|
244
|
+
const blockRelativeEdit = computeEdit(new LineIndex(oldBlock.source), edit.startPosition.row - oldBlock.sourceMap.lineOffset, edit.startPosition.row === oldBlock.sourceMap.lineOffset ? edit.startPosition.column - oldBlock.sourceMap.columnOffset : edit.startPosition.column, edit.oldEndPosition.row - oldBlock.sourceMap.lineOffset, edit.oldEndPosition.row === oldBlock.sourceMap.lineOffset ? edit.oldEndPosition.column - oldBlock.sourceMap.columnOffset : edit.oldEndPosition.column, newSource.slice(edit.startIndex, edit.newEndIndex));
|
|
245
|
+
oldBlock.tree.edit({
|
|
246
|
+
startIndex: blockRelativeStartIndex,
|
|
247
|
+
oldEndIndex: blockRelativeOldEndIndex,
|
|
248
|
+
newEndIndex: blockRelativeNewEndIndex,
|
|
249
|
+
startPosition: blockRelativeEdit.startPosition,
|
|
250
|
+
oldEndPosition: blockRelativeEdit.oldEndPosition,
|
|
251
|
+
newEndPosition: blockRelativeEdit.newEndPosition
|
|
252
|
+
});
|
|
253
|
+
const newTree = this.parser.parseIncremental(newBlockSource, oldBlock.tree);
|
|
254
|
+
this._blocks = this._blocks.map((block, i) => {
|
|
255
|
+
if (i === blockIndex) return {
|
|
256
|
+
source: newBlockSource,
|
|
257
|
+
sourceMap: createSourceMap(newSource, oldBlock.startOffset, newBlockSource),
|
|
258
|
+
tree: newTree,
|
|
259
|
+
startOffset: oldBlock.startOffset,
|
|
260
|
+
endOffset: newBlockEndOffset
|
|
261
|
+
};
|
|
262
|
+
else if (i > blockIndex) return {
|
|
263
|
+
...block,
|
|
264
|
+
startOffset: block.startOffset + delta,
|
|
265
|
+
endOffset: block.endOffset + delta,
|
|
266
|
+
sourceMap: createSourceMap(newSource, block.startOffset + delta, block.source)
|
|
267
|
+
};
|
|
268
|
+
return block;
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
//#endregion
|
|
274
|
+
export { Document };
|
|
275
|
+
//# sourceMappingURL=document.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"document.js","names":["blocks: DocumentBlock<T>[]","match: RegExpExecArray | null","affectedBlockIndices: number[]"],"sources":["../../src/model/document.ts"],"sourcesContent":["import type { SourceMap } from \"../source-map.js\";\nimport { createSourceMap, identitySourceMap } from \"../source-map.js\";\nimport type { ThaloParser, GenericTree, FileType } from \"../parser.shared.js\";\nimport { LineIndex, computeEdit } from \"./line-index.js\";\n\n/** Position type compatible with tree-sitter Point */\ninterface Point {\n row: number;\n column: number;\n}\n\n/**\n * Edit range for applying incremental edits to a document.\n * Uses tree-sitter's edit format.\n */\nexport interface EditRange {\n startIndex: number;\n startPosition: Point;\n oldEndIndex: number;\n oldEndPosition: Point;\n newEndIndex: number;\n newEndPosition: Point;\n}\n\n/**\n * A parsed thalo block within a document.\n */\nexport interface DocumentBlock<T extends GenericTree = GenericTree> {\n /** The thalo source code for this block */\n source: string;\n /** Source map for translating block-relative positions to file-absolute positions */\n sourceMap: SourceMap;\n /** The parsed tree-sitter tree */\n tree: T;\n /** Character offset where this block starts in the full document */\n startOffset: number;\n /** Character offset where this block ends in the full document */\n endOffset: number;\n}\n\n/**\n * Result of applying an edit to a document.\n */\nexport interface EditResult {\n /** Whether the edit affected thalo block boundaries (```thalo fences) */\n blockBoundariesChanged: boolean;\n /** Indices of blocks that were modified */\n modifiedBlockIndices: number[];\n /** Whether a full reparse was required */\n fullReparse: boolean;\n}\n\n/**\n * Regex to match fenced thalo code blocks in markdown.\n * Captures the content between ```thalo and ```\n */\nconst THALO_FENCE_REGEX = /^```thalo\\s*\\n([\\s\\S]*?)^```/gm;\n\n/**\n * A Document owns the source text and Tree-sitter tree(s) for a file,\n * providing efficient incremental edit operations.\n *\n * The Document is parser-agnostic - it accepts any parser that implements\n * the ThaloParser interface.\n */\nexport class Document<T extends GenericTree = GenericTree> {\n readonly filename: string;\n readonly fileType: FileType;\n\n private parser: ThaloParser<T>;\n private _source: string;\n private _blocks: DocumentBlock<T>[];\n private _lineIndex: LineIndex;\n\n /**\n * Create a Document with a parser.\n *\n * @param parser - A ThaloParser instance\n * @param filename - The filename for this document\n * @param source - The source code\n * @param fileType - Optional explicit file type\n */\n constructor(parser: ThaloParser<T>, filename: string, source: string, fileType?: FileType) {\n this.parser = parser;\n this.filename = filename;\n this._source = source;\n this.fileType = fileType ?? Document.detectFileType(filename, source);\n this._lineIndex = new LineIndex(this._source);\n this._blocks = this.parseBlocks(this._source);\n }\n\n /**\n * Get the current source text.\n */\n get source(): string {\n return this._source;\n }\n\n /**\n * Get the line index for position conversions.\n */\n get lineIndex(): LineIndex {\n return this._lineIndex;\n }\n\n /**\n * Get the current parsed blocks.\n */\n get blocks(): readonly DocumentBlock<T>[] {\n return this._blocks;\n }\n\n /**\n * Apply an incremental edit to the document.\n *\n * @param startLine - 0-based start line of the edit\n * @param startColumn - 0-based start column of the edit\n * @param endLine - 0-based end line of the edit (exclusive)\n * @param endColumn - 0-based end column of the edit\n * @param newText - The replacement text\n * @returns Information about what changed\n */\n applyEdit(\n startLine: number,\n startColumn: number,\n endLine: number,\n endColumn: number,\n newText: string,\n ): EditResult {\n // Compute edit parameters\n const edit = computeEdit(this._lineIndex, startLine, startColumn, endLine, endColumn, newText);\n\n // Apply the edit to get new source\n const newSource =\n this._source.slice(0, edit.startIndex) + newText + this._source.slice(edit.oldEndIndex);\n\n // Check if this is a markdown file and if block boundaries might have changed\n const blockBoundariesChanged =\n this.fileType === \"markdown\" && this.mightAffectBlockBoundaries(edit, newText);\n\n // Update source and line index\n const oldSource = this._source;\n this._source = newSource;\n this._lineIndex = new LineIndex(newSource);\n\n // For markdown files with potential boundary changes, do a full reparse\n if (blockBoundariesChanged) {\n this._blocks = this.parseBlocks(newSource);\n return {\n blockBoundariesChanged: true,\n modifiedBlockIndices: this._blocks.map((_, i) => i),\n fullReparse: true,\n };\n }\n\n // For thalo files or markdown edits within a single block, use incremental parsing\n if (this.fileType === \"thalo\") {\n // Single block, always incremental\n return this.applyIncrementalEdit(edit, oldSource, newSource, newText);\n } else {\n // Markdown: find affected block(s)\n return this.applyMarkdownEdit(edit, oldSource, newSource, newText);\n }\n }\n\n /**\n * Apply an edit using raw edit parameters (tree-sitter format).\n */\n applyEditRange(edit: EditRange, newText: string): EditResult {\n const startPos = this._lineIndex.offsetToPosition(edit.startIndex);\n const endPos = this._lineIndex.offsetToPosition(edit.oldEndIndex);\n return this.applyEdit(startPos.line, startPos.column, endPos.line, endPos.column, newText);\n }\n\n /**\n * Replace the entire document content (for full sync).\n */\n replaceContent(newSource: string): void {\n this._source = newSource;\n this._lineIndex = new LineIndex(newSource);\n this._blocks = this.parseBlocks(newSource);\n }\n\n /**\n * Detect file type from filename extension or content.\n */\n private static detectFileType(filename: string, source: string): FileType {\n if (filename.endsWith(\".thalo\")) {\n return \"thalo\";\n }\n if (filename.endsWith(\".md\")) {\n return \"markdown\";\n }\n // Use heuristics: if it contains markdown thalo fences, treat as markdown\n if (source.includes(\"```thalo\")) {\n return \"markdown\";\n }\n return \"thalo\";\n }\n\n /**\n * Parse all thalo blocks from the source.\n */\n private parseBlocks(source: string): DocumentBlock<T>[] {\n if (this.fileType === \"thalo\") {\n // Single block for pure thalo files\n const tree = this.parser.parse(source);\n return [\n {\n source,\n sourceMap: identitySourceMap(),\n tree,\n startOffset: 0,\n endOffset: source.length,\n },\n ];\n }\n\n // Extract thalo blocks from markdown\n const blocks: DocumentBlock<T>[] = [];\n let match: RegExpExecArray | null;\n\n while ((match = THALO_FENCE_REGEX.exec(source)) !== null) {\n const content = match[1];\n const contentStart = match.index + match[0].indexOf(content);\n const sourceMap = createSourceMap(source, contentStart, content);\n const tree = this.parser.parse(content);\n\n blocks.push({\n source: content,\n sourceMap,\n tree,\n startOffset: contentStart,\n endOffset: contentStart + content.length,\n });\n }\n\n // Reset regex state\n THALO_FENCE_REGEX.lastIndex = 0;\n\n return blocks;\n }\n\n /**\n * Check if an edit might affect thalo block boundaries (```thalo fences).\n */\n private mightAffectBlockBoundaries(edit: EditRange, newText: string): boolean {\n // Check if the edit region or new text contains fence markers\n const oldText = this._source.slice(edit.startIndex, edit.oldEndIndex);\n const hasFenceInOld = oldText.includes(\"```\");\n const hasFenceInNew = newText.includes(\"```\");\n\n if (hasFenceInOld || hasFenceInNew) {\n return true;\n }\n\n // Check if any block boundary falls within the edit range\n for (const block of this._blocks) {\n // Check if fence start (```thalo) is in edit range\n const fenceStartOffset = block.startOffset - \"```thalo\\n\".length;\n if (fenceStartOffset >= edit.startIndex && fenceStartOffset < edit.oldEndIndex) {\n return true;\n }\n // Check if fence end (```) is in edit range\n const fenceEndOffset = block.endOffset;\n if (fenceEndOffset >= edit.startIndex && fenceEndOffset < edit.oldEndIndex) {\n return true;\n }\n }\n\n return false;\n }\n\n /**\n * Apply incremental edit to a thalo file (single block).\n */\n private applyIncrementalEdit(\n edit: EditRange,\n _oldSource: string,\n newSource: string,\n _newText: string,\n ): EditResult {\n const block = this._blocks[0];\n\n // Tell tree-sitter about the edit\n block.tree.edit({\n startIndex: edit.startIndex,\n oldEndIndex: edit.oldEndIndex,\n newEndIndex: edit.newEndIndex,\n startPosition: edit.startPosition,\n oldEndPosition: edit.oldEndPosition,\n newEndPosition: edit.newEndPosition,\n });\n\n // Reparse with the old tree for incremental parsing\n const newTree = this.parser.parseIncremental(newSource, block.tree);\n\n // Update the block\n this._blocks = [\n {\n source: newSource,\n sourceMap: identitySourceMap(),\n tree: newTree,\n startOffset: 0,\n endOffset: newSource.length,\n },\n ];\n\n return {\n blockBoundariesChanged: false,\n modifiedBlockIndices: [0],\n fullReparse: false,\n };\n }\n\n /**\n * Apply edit to a markdown file, updating affected blocks incrementally.\n */\n private applyMarkdownEdit(\n edit: EditRange,\n _oldSource: string,\n newSource: string,\n _newText: string,\n ): EditResult {\n // Find which block(s) contain the edit\n const affectedBlockIndices: number[] = [];\n\n for (let i = 0; i < this._blocks.length; i++) {\n const block = this._blocks[i];\n // Check if edit overlaps with this block\n if (edit.startIndex < block.endOffset && edit.oldEndIndex > block.startOffset) {\n affectedBlockIndices.push(i);\n }\n }\n\n if (affectedBlockIndices.length === 0) {\n // Edit is outside all thalo blocks (in markdown content)\n // Just need to update block offsets\n this.updateBlockOffsets(edit, newSource);\n return {\n blockBoundariesChanged: false,\n modifiedBlockIndices: [],\n fullReparse: false,\n };\n }\n\n if (affectedBlockIndices.length === 1) {\n // Edit is within a single block - can use incremental parsing\n const blockIndex = affectedBlockIndices[0];\n this.updateSingleBlockIncremental(blockIndex, edit, newSource);\n return {\n blockBoundariesChanged: false,\n modifiedBlockIndices: affectedBlockIndices,\n fullReparse: false,\n };\n }\n\n // Edit spans multiple blocks - do a full reparse\n this._blocks = this.parseBlocks(newSource);\n return {\n blockBoundariesChanged: false,\n modifiedBlockIndices: this._blocks.map((_, i) => i),\n fullReparse: true,\n };\n }\n\n /**\n * Update block offsets after an edit outside all blocks.\n */\n private updateBlockOffsets(edit: EditRange, newSource: string): void {\n const delta = edit.newEndIndex - edit.oldEndIndex;\n\n this._blocks = this._blocks.map((block) => {\n if (block.startOffset > edit.oldEndIndex) {\n // Block is after the edit - shift offsets\n return {\n ...block,\n startOffset: block.startOffset + delta,\n endOffset: block.endOffset + delta,\n sourceMap: createSourceMap(newSource, block.startOffset + delta, block.source),\n };\n }\n return block;\n });\n }\n\n /**\n * Update a single block using incremental parsing.\n */\n private updateSingleBlockIncremental(\n blockIndex: number,\n edit: EditRange,\n newSource: string,\n ): void {\n const oldBlock = this._blocks[blockIndex];\n\n // Convert edit to block-relative coordinates\n const blockRelativeStartIndex = edit.startIndex - oldBlock.startOffset;\n const blockRelativeOldEndIndex = edit.oldEndIndex - oldBlock.startOffset;\n\n // Compute the new block source\n const delta = edit.newEndIndex - edit.oldEndIndex;\n const newBlockEndOffset = oldBlock.endOffset + delta;\n\n // Extract the new block content\n // Need to find the new block boundaries in the edited source\n const newBlockSource = newSource.slice(oldBlock.startOffset, newBlockEndOffset);\n\n // Compute block-relative edit\n const blockRelativeNewEndIndex = blockRelativeStartIndex + (edit.newEndIndex - edit.startIndex);\n const blockRelativeEdit = computeEdit(\n new LineIndex(oldBlock.source),\n edit.startPosition.row - oldBlock.sourceMap.lineOffset,\n edit.startPosition.row === oldBlock.sourceMap.lineOffset\n ? edit.startPosition.column - oldBlock.sourceMap.columnOffset\n : edit.startPosition.column,\n edit.oldEndPosition.row - oldBlock.sourceMap.lineOffset,\n edit.oldEndPosition.row === oldBlock.sourceMap.lineOffset\n ? edit.oldEndPosition.column - oldBlock.sourceMap.columnOffset\n : edit.oldEndPosition.column,\n newSource.slice(edit.startIndex, edit.newEndIndex),\n );\n\n // Tell tree-sitter about the edit\n oldBlock.tree.edit({\n startIndex: blockRelativeStartIndex,\n oldEndIndex: blockRelativeOldEndIndex,\n newEndIndex: blockRelativeNewEndIndex,\n startPosition: blockRelativeEdit.startPosition,\n oldEndPosition: blockRelativeEdit.oldEndPosition,\n newEndPosition: blockRelativeEdit.newEndPosition,\n });\n\n // Reparse with the old tree\n const newTree = this.parser.parseIncremental(newBlockSource, oldBlock.tree);\n\n // Update blocks array\n this._blocks = this._blocks.map((block, i) => {\n if (i === blockIndex) {\n return {\n source: newBlockSource,\n sourceMap: createSourceMap(newSource, oldBlock.startOffset, newBlockSource),\n tree: newTree,\n startOffset: oldBlock.startOffset,\n endOffset: newBlockEndOffset,\n };\n } else if (i > blockIndex) {\n // Shift subsequent blocks\n return {\n ...block,\n startOffset: block.startOffset + delta,\n endOffset: block.endOffset + delta,\n sourceMap: createSourceMap(newSource, block.startOffset + delta, block.source),\n };\n }\n return block;\n });\n }\n}\n"],"mappings":";;;;;;;;AAwDA,MAAM,oBAAoB;;;;;;;;AAS1B,IAAa,WAAb,MAAa,SAA8C;CACzD,AAAS;CACT,AAAS;CAET,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;;;;;;;;;CAUR,YAAY,QAAwB,UAAkB,QAAgB,UAAqB;AACzF,OAAK,SAAS;AACd,OAAK,WAAW;AAChB,OAAK,UAAU;AACf,OAAK,WAAW,YAAY,SAAS,eAAe,UAAU,OAAO;AACrE,OAAK,aAAa,IAAI,UAAU,KAAK,QAAQ;AAC7C,OAAK,UAAU,KAAK,YAAY,KAAK,QAAQ;;;;;CAM/C,IAAI,SAAiB;AACnB,SAAO,KAAK;;;;;CAMd,IAAI,YAAuB;AACzB,SAAO,KAAK;;;;;CAMd,IAAI,SAAsC;AACxC,SAAO,KAAK;;;;;;;;;;;;CAad,UACE,WACA,aACA,SACA,WACA,SACY;EAEZ,MAAM,OAAO,YAAY,KAAK,YAAY,WAAW,aAAa,SAAS,WAAW,QAAQ;EAG9F,MAAM,YACJ,KAAK,QAAQ,MAAM,GAAG,KAAK,WAAW,GAAG,UAAU,KAAK,QAAQ,MAAM,KAAK,YAAY;EAGzF,MAAM,yBACJ,KAAK,aAAa,cAAc,KAAK,2BAA2B,MAAM,QAAQ;EAGhF,MAAM,YAAY,KAAK;AACvB,OAAK,UAAU;AACf,OAAK,aAAa,IAAI,UAAU,UAAU;AAG1C,MAAI,wBAAwB;AAC1B,QAAK,UAAU,KAAK,YAAY,UAAU;AAC1C,UAAO;IACL,wBAAwB;IACxB,sBAAsB,KAAK,QAAQ,KAAK,GAAG,MAAM,EAAE;IACnD,aAAa;IACd;;AAIH,MAAI,KAAK,aAAa,QAEpB,QAAO,KAAK,qBAAqB,MAAM,WAAW,WAAW,QAAQ;MAGrE,QAAO,KAAK,kBAAkB,MAAM,WAAW,WAAW,QAAQ;;;;;CAOtE,eAAe,MAAiB,SAA6B;EAC3D,MAAM,WAAW,KAAK,WAAW,iBAAiB,KAAK,WAAW;EAClE,MAAM,SAAS,KAAK,WAAW,iBAAiB,KAAK,YAAY;AACjE,SAAO,KAAK,UAAU,SAAS,MAAM,SAAS,QAAQ,OAAO,MAAM,OAAO,QAAQ,QAAQ;;;;;CAM5F,eAAe,WAAyB;AACtC,OAAK,UAAU;AACf,OAAK,aAAa,IAAI,UAAU,UAAU;AAC1C,OAAK,UAAU,KAAK,YAAY,UAAU;;;;;CAM5C,OAAe,eAAe,UAAkB,QAA0B;AACxE,MAAI,SAAS,SAAS,SAAS,CAC7B,QAAO;AAET,MAAI,SAAS,SAAS,MAAM,CAC1B,QAAO;AAGT,MAAI,OAAO,SAAS,WAAW,CAC7B,QAAO;AAET,SAAO;;;;;CAMT,AAAQ,YAAY,QAAoC;AACtD,MAAI,KAAK,aAAa,SAAS;GAE7B,MAAM,OAAO,KAAK,OAAO,MAAM,OAAO;AACtC,UAAO,CACL;IACE;IACA,WAAW,mBAAmB;IAC9B;IACA,aAAa;IACb,WAAW,OAAO;IACnB,CACF;;EAIH,MAAMA,SAA6B,EAAE;EACrC,IAAIC;AAEJ,UAAQ,QAAQ,kBAAkB,KAAK,OAAO,MAAM,MAAM;GACxD,MAAM,UAAU,MAAM;GACtB,MAAM,eAAe,MAAM,QAAQ,MAAM,GAAG,QAAQ,QAAQ;GAC5D,MAAM,YAAY,gBAAgB,QAAQ,cAAc,QAAQ;GAChE,MAAM,OAAO,KAAK,OAAO,MAAM,QAAQ;AAEvC,UAAO,KAAK;IACV,QAAQ;IACR;IACA;IACA,aAAa;IACb,WAAW,eAAe,QAAQ;IACnC,CAAC;;AAIJ,oBAAkB,YAAY;AAE9B,SAAO;;;;;CAMT,AAAQ,2BAA2B,MAAiB,SAA0B;EAG5E,MAAM,gBADU,KAAK,QAAQ,MAAM,KAAK,YAAY,KAAK,YAAY,CACvC,SAAS,MAAM;EAC7C,MAAM,gBAAgB,QAAQ,SAAS,MAAM;AAE7C,MAAI,iBAAiB,cACnB,QAAO;AAIT,OAAK,MAAM,SAAS,KAAK,SAAS;GAEhC,MAAM,mBAAmB,MAAM,cAAc;AAC7C,OAAI,oBAAoB,KAAK,cAAc,mBAAmB,KAAK,YACjE,QAAO;GAGT,MAAM,iBAAiB,MAAM;AAC7B,OAAI,kBAAkB,KAAK,cAAc,iBAAiB,KAAK,YAC7D,QAAO;;AAIX,SAAO;;;;;CAMT,AAAQ,qBACN,MACA,YACA,WACA,UACY;EACZ,MAAM,QAAQ,KAAK,QAAQ;AAG3B,QAAM,KAAK,KAAK;GACd,YAAY,KAAK;GACjB,aAAa,KAAK;GAClB,aAAa,KAAK;GAClB,eAAe,KAAK;GACpB,gBAAgB,KAAK;GACrB,gBAAgB,KAAK;GACtB,CAAC;EAGF,MAAM,UAAU,KAAK,OAAO,iBAAiB,WAAW,MAAM,KAAK;AAGnE,OAAK,UAAU,CACb;GACE,QAAQ;GACR,WAAW,mBAAmB;GAC9B,MAAM;GACN,aAAa;GACb,WAAW,UAAU;GACtB,CACF;AAED,SAAO;GACL,wBAAwB;GACxB,sBAAsB,CAAC,EAAE;GACzB,aAAa;GACd;;;;;CAMH,AAAQ,kBACN,MACA,YACA,WACA,UACY;EAEZ,MAAMC,uBAAiC,EAAE;AAEzC,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK;GAC5C,MAAM,QAAQ,KAAK,QAAQ;AAE3B,OAAI,KAAK,aAAa,MAAM,aAAa,KAAK,cAAc,MAAM,YAChE,sBAAqB,KAAK,EAAE;;AAIhC,MAAI,qBAAqB,WAAW,GAAG;AAGrC,QAAK,mBAAmB,MAAM,UAAU;AACxC,UAAO;IACL,wBAAwB;IACxB,sBAAsB,EAAE;IACxB,aAAa;IACd;;AAGH,MAAI,qBAAqB,WAAW,GAAG;GAErC,MAAM,aAAa,qBAAqB;AACxC,QAAK,6BAA6B,YAAY,MAAM,UAAU;AAC9D,UAAO;IACL,wBAAwB;IACxB,sBAAsB;IACtB,aAAa;IACd;;AAIH,OAAK,UAAU,KAAK,YAAY,UAAU;AAC1C,SAAO;GACL,wBAAwB;GACxB,sBAAsB,KAAK,QAAQ,KAAK,GAAG,MAAM,EAAE;GACnD,aAAa;GACd;;;;;CAMH,AAAQ,mBAAmB,MAAiB,WAAyB;EACnE,MAAM,QAAQ,KAAK,cAAc,KAAK;AAEtC,OAAK,UAAU,KAAK,QAAQ,KAAK,UAAU;AACzC,OAAI,MAAM,cAAc,KAAK,YAE3B,QAAO;IACL,GAAG;IACH,aAAa,MAAM,cAAc;IACjC,WAAW,MAAM,YAAY;IAC7B,WAAW,gBAAgB,WAAW,MAAM,cAAc,OAAO,MAAM,OAAO;IAC/E;AAEH,UAAO;IACP;;;;;CAMJ,AAAQ,6BACN,YACA,MACA,WACM;EACN,MAAM,WAAW,KAAK,QAAQ;EAG9B,MAAM,0BAA0B,KAAK,aAAa,SAAS;EAC3D,MAAM,2BAA2B,KAAK,cAAc,SAAS;EAG7D,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAM,oBAAoB,SAAS,YAAY;EAI/C,MAAM,iBAAiB,UAAU,MAAM,SAAS,aAAa,kBAAkB;EAG/E,MAAM,2BAA2B,2BAA2B,KAAK,cAAc,KAAK;EACpF,MAAM,oBAAoB,YACxB,IAAI,UAAU,SAAS,OAAO,EAC9B,KAAK,cAAc,MAAM,SAAS,UAAU,YAC5C,KAAK,cAAc,QAAQ,SAAS,UAAU,aAC1C,KAAK,cAAc,SAAS,SAAS,UAAU,eAC/C,KAAK,cAAc,QACvB,KAAK,eAAe,MAAM,SAAS,UAAU,YAC7C,KAAK,eAAe,QAAQ,SAAS,UAAU,aAC3C,KAAK,eAAe,SAAS,SAAS,UAAU,eAChD,KAAK,eAAe,QACxB,UAAU,MAAM,KAAK,YAAY,KAAK,YAAY,CACnD;AAGD,WAAS,KAAK,KAAK;GACjB,YAAY;GACZ,aAAa;GACb,aAAa;GACb,eAAe,kBAAkB;GACjC,gBAAgB,kBAAkB;GAClC,gBAAgB,kBAAkB;GACnC,CAAC;EAGF,MAAM,UAAU,KAAK,OAAO,iBAAiB,gBAAgB,SAAS,KAAK;AAG3E,OAAK,UAAU,KAAK,QAAQ,KAAK,OAAO,MAAM;AAC5C,OAAI,MAAM,WACR,QAAO;IACL,QAAQ;IACR,WAAW,gBAAgB,WAAW,SAAS,aAAa,eAAe;IAC3E,MAAM;IACN,aAAa,SAAS;IACtB,WAAW;IACZ;YACQ,IAAI,WAEb,QAAO;IACL,GAAG;IACH,aAAa,MAAM,cAAc;IACjC,WAAW,MAAM,YAAY;IAC7B,WAAW,gBAAgB,WAAW,MAAM,cAAc,OAAO,MAAM,OAAO;IAC/E;AAEH,UAAO;IACP"}
|