@rejot-dev/thalo 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +396 -0
- package/dist/ast/ast-types.d.ts +469 -0
- package/dist/ast/ast-types.d.ts.map +1 -0
- package/dist/ast/ast-types.js +11 -0
- package/dist/ast/ast-types.js.map +1 -0
- package/dist/ast/builder.js +158 -0
- package/dist/ast/builder.js.map +1 -0
- package/dist/ast/extract.js +748 -0
- package/dist/ast/extract.js.map +1 -0
- package/dist/ast/node-at-position.d.ts +147 -0
- package/dist/ast/node-at-position.d.ts.map +1 -0
- package/dist/ast/node-at-position.js +382 -0
- package/dist/ast/node-at-position.js.map +1 -0
- package/dist/ast/visitor.js +232 -0
- package/dist/ast/visitor.js.map +1 -0
- package/dist/checker/check.d.ts +53 -0
- package/dist/checker/check.d.ts.map +1 -0
- package/dist/checker/check.js +105 -0
- package/dist/checker/check.js.map +1 -0
- package/dist/checker/rules/actualize-missing-updated.js +34 -0
- package/dist/checker/rules/actualize-missing-updated.js.map +1 -0
- package/dist/checker/rules/actualize-unresolved-target.js +42 -0
- package/dist/checker/rules/actualize-unresolved-target.js.map +1 -0
- package/dist/checker/rules/alter-before-define.js +53 -0
- package/dist/checker/rules/alter-before-define.js.map +1 -0
- package/dist/checker/rules/alter-undefined-entity.js +32 -0
- package/dist/checker/rules/alter-undefined-entity.js.map +1 -0
- package/dist/checker/rules/create-requires-section.js +34 -0
- package/dist/checker/rules/create-requires-section.js.map +1 -0
- package/dist/checker/rules/define-entity-requires-section.js +31 -0
- package/dist/checker/rules/define-entity-requires-section.js.map +1 -0
- package/dist/checker/rules/duplicate-entity-definition.js +37 -0
- package/dist/checker/rules/duplicate-entity-definition.js.map +1 -0
- package/dist/checker/rules/duplicate-field-in-schema.js +38 -0
- package/dist/checker/rules/duplicate-field-in-schema.js.map +1 -0
- package/dist/checker/rules/duplicate-link-id.js +52 -0
- package/dist/checker/rules/duplicate-link-id.js.map +1 -0
- package/dist/checker/rules/duplicate-metadata-key.js +21 -0
- package/dist/checker/rules/duplicate-metadata-key.js.map +1 -0
- package/dist/checker/rules/duplicate-section-heading.js +41 -0
- package/dist/checker/rules/duplicate-section-heading.js.map +1 -0
- package/dist/checker/rules/duplicate-section-in-schema.js +38 -0
- package/dist/checker/rules/duplicate-section-in-schema.js.map +1 -0
- package/dist/checker/rules/duplicate-timestamp.js +104 -0
- package/dist/checker/rules/duplicate-timestamp.js.map +1 -0
- package/dist/checker/rules/empty-required-value.js +45 -0
- package/dist/checker/rules/empty-required-value.js.map +1 -0
- package/dist/checker/rules/empty-section.js +21 -0
- package/dist/checker/rules/empty-section.js.map +1 -0
- package/dist/checker/rules/invalid-date-range-value.js +56 -0
- package/dist/checker/rules/invalid-date-range-value.js.map +1 -0
- package/dist/checker/rules/invalid-default-value.js +86 -0
- package/dist/checker/rules/invalid-default-value.js.map +1 -0
- package/dist/checker/rules/invalid-field-type.js +45 -0
- package/dist/checker/rules/invalid-field-type.js.map +1 -0
- package/dist/checker/rules/missing-required-field.js +48 -0
- package/dist/checker/rules/missing-required-field.js.map +1 -0
- package/dist/checker/rules/missing-required-section.js +51 -0
- package/dist/checker/rules/missing-required-section.js.map +1 -0
- package/dist/checker/rules/missing-title.js +56 -0
- package/dist/checker/rules/missing-title.js.map +1 -0
- package/dist/checker/rules/remove-undefined-field.js +42 -0
- package/dist/checker/rules/remove-undefined-field.js.map +1 -0
- package/dist/checker/rules/remove-undefined-section.js +42 -0
- package/dist/checker/rules/remove-undefined-section.js.map +1 -0
- package/dist/checker/rules/rules.d.ts +71 -0
- package/dist/checker/rules/rules.d.ts.map +1 -0
- package/dist/checker/rules/rules.js +102 -0
- package/dist/checker/rules/rules.js.map +1 -0
- package/dist/checker/rules/synthesis-empty-query.js +35 -0
- package/dist/checker/rules/synthesis-empty-query.js.map +1 -0
- package/dist/checker/rules/synthesis-missing-prompt.js +42 -0
- package/dist/checker/rules/synthesis-missing-prompt.js.map +1 -0
- package/dist/checker/rules/synthesis-missing-sources.js +32 -0
- package/dist/checker/rules/synthesis-missing-sources.js.map +1 -0
- package/dist/checker/rules/synthesis-unknown-query-entity.js +39 -0
- package/dist/checker/rules/synthesis-unknown-query-entity.js.map +1 -0
- package/dist/checker/rules/timestamp-out-of-order.js +55 -0
- package/dist/checker/rules/timestamp-out-of-order.js.map +1 -0
- package/dist/checker/rules/unknown-entity.js +32 -0
- package/dist/checker/rules/unknown-entity.js.map +1 -0
- package/dist/checker/rules/unknown-field.js +40 -0
- package/dist/checker/rules/unknown-field.js.map +1 -0
- package/dist/checker/rules/unknown-section.js +47 -0
- package/dist/checker/rules/unknown-section.js.map +1 -0
- package/dist/checker/rules/unresolved-link.js +34 -0
- package/dist/checker/rules/unresolved-link.js.map +1 -0
- package/dist/checker/rules/update-without-create.js +65 -0
- package/dist/checker/rules/update-without-create.js.map +1 -0
- package/dist/checker/visitor.d.ts +69 -0
- package/dist/checker/visitor.d.ts.map +1 -0
- package/dist/checker/visitor.js +67 -0
- package/dist/checker/visitor.js.map +1 -0
- package/dist/checker/workspace-index.d.ts +50 -0
- package/dist/checker/workspace-index.d.ts.map +1 -0
- package/dist/checker/workspace-index.js +108 -0
- package/dist/checker/workspace-index.js.map +1 -0
- package/dist/commands/actualize.d.ts +113 -0
- package/dist/commands/actualize.d.ts.map +1 -0
- package/dist/commands/actualize.js +111 -0
- package/dist/commands/actualize.js.map +1 -0
- package/dist/commands/check.d.ts +65 -0
- package/dist/commands/check.d.ts.map +1 -0
- package/dist/commands/check.js +61 -0
- package/dist/commands/check.js.map +1 -0
- package/dist/commands/format.d.ts +90 -0
- package/dist/commands/format.d.ts.map +1 -0
- package/dist/commands/format.js +80 -0
- package/dist/commands/format.js.map +1 -0
- package/dist/commands/query.d.ts +152 -0
- package/dist/commands/query.d.ts.map +1 -0
- package/dist/commands/query.js +151 -0
- package/dist/commands/query.js.map +1 -0
- package/dist/constants.d.ts +31 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +51 -0
- package/dist/constants.js.map +1 -0
- package/dist/files.d.ts +58 -0
- package/dist/files.d.ts.map +1 -0
- package/dist/files.js +103 -0
- package/dist/files.js.map +1 -0
- package/dist/formatters.d.ts +39 -0
- package/dist/formatters.d.ts.map +1 -0
- package/dist/formatters.js +200 -0
- package/dist/formatters.js.map +1 -0
- package/dist/fragment.d.ts +22 -0
- package/dist/fragment.d.ts.map +1 -0
- package/dist/git/git.js +240 -0
- package/dist/git/git.js.map +1 -0
- package/dist/merge/conflict-detector.d.ts +89 -0
- package/dist/merge/conflict-detector.d.ts.map +1 -0
- package/dist/merge/conflict-detector.js +352 -0
- package/dist/merge/conflict-detector.js.map +1 -0
- package/dist/merge/conflict-formatter.js +143 -0
- package/dist/merge/conflict-formatter.js.map +1 -0
- package/dist/merge/driver.d.ts +54 -0
- package/dist/merge/driver.d.ts.map +1 -0
- package/dist/merge/driver.js +112 -0
- package/dist/merge/driver.js.map +1 -0
- package/dist/merge/entry-matcher.d.ts +50 -0
- package/dist/merge/entry-matcher.d.ts.map +1 -0
- package/dist/merge/entry-matcher.js +141 -0
- package/dist/merge/entry-matcher.js.map +1 -0
- package/dist/merge/entry-merger.js +194 -0
- package/dist/merge/entry-merger.js.map +1 -0
- package/dist/merge/merge-result-builder.d.ts +62 -0
- package/dist/merge/merge-result-builder.d.ts.map +1 -0
- package/dist/merge/merge-result-builder.js +89 -0
- package/dist/merge/merge-result-builder.js.map +1 -0
- package/dist/mod.d.ts +31 -0
- package/dist/mod.js +23 -0
- package/dist/model/document.d.ts +134 -0
- package/dist/model/document.d.ts.map +1 -0
- package/dist/model/document.js +275 -0
- package/dist/model/document.js.map +1 -0
- package/dist/model/line-index.d.ts +85 -0
- package/dist/model/line-index.d.ts.map +1 -0
- package/dist/model/line-index.js +159 -0
- package/dist/model/line-index.js.map +1 -0
- package/dist/model/workspace.d.ts +296 -0
- package/dist/model/workspace.d.ts.map +1 -0
- package/dist/model/workspace.js +562 -0
- package/dist/model/workspace.js.map +1 -0
- package/dist/parser.js +27 -0
- package/dist/parser.js.map +1 -0
- package/dist/parser.native.d.ts +51 -0
- package/dist/parser.native.d.ts.map +1 -0
- package/dist/parser.native.js +62 -0
- package/dist/parser.native.js.map +1 -0
- package/dist/parser.shared.d.ts +99 -0
- package/dist/parser.shared.d.ts.map +1 -0
- package/dist/parser.shared.js +124 -0
- package/dist/parser.shared.js.map +1 -0
- package/dist/parser.web.d.ts +67 -0
- package/dist/parser.web.d.ts.map +1 -0
- package/dist/parser.web.js +49 -0
- package/dist/parser.web.js.map +1 -0
- package/dist/schema/registry.d.ts +108 -0
- package/dist/schema/registry.d.ts.map +1 -0
- package/dist/schema/registry.js +281 -0
- package/dist/schema/registry.js.map +1 -0
- package/dist/semantic/analyzer.d.ts +107 -0
- package/dist/semantic/analyzer.d.ts.map +1 -0
- package/dist/semantic/analyzer.js +261 -0
- package/dist/semantic/analyzer.js.map +1 -0
- package/dist/services/change-tracker/change-tracker.d.ts +111 -0
- package/dist/services/change-tracker/change-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/change-tracker.js +62 -0
- package/dist/services/change-tracker/change-tracker.js.map +1 -0
- package/dist/services/change-tracker/create-tracker.d.ts +42 -0
- package/dist/services/change-tracker/create-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/create-tracker.js +53 -0
- package/dist/services/change-tracker/create-tracker.js.map +1 -0
- package/dist/services/change-tracker/git-tracker.d.ts +59 -0
- package/dist/services/change-tracker/git-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/git-tracker.js +218 -0
- package/dist/services/change-tracker/git-tracker.js.map +1 -0
- package/dist/services/change-tracker/timestamp-tracker.d.ts +22 -0
- package/dist/services/change-tracker/timestamp-tracker.d.ts.map +1 -0
- package/dist/services/change-tracker/timestamp-tracker.js +74 -0
- package/dist/services/change-tracker/timestamp-tracker.js.map +1 -0
- package/dist/services/definition.d.ts +37 -0
- package/dist/services/definition.d.ts.map +1 -0
- package/dist/services/definition.js +43 -0
- package/dist/services/definition.js.map +1 -0
- package/dist/services/entity-navigation.d.ts +200 -0
- package/dist/services/entity-navigation.d.ts.map +1 -0
- package/dist/services/entity-navigation.js +211 -0
- package/dist/services/entity-navigation.js.map +1 -0
- package/dist/services/hover.d.ts +81 -0
- package/dist/services/hover.d.ts.map +1 -0
- package/dist/services/hover.js +669 -0
- package/dist/services/hover.js.map +1 -0
- package/dist/services/query.d.ts +116 -0
- package/dist/services/query.d.ts.map +1 -0
- package/dist/services/query.js +225 -0
- package/dist/services/query.js.map +1 -0
- package/dist/services/references.d.ts +52 -0
- package/dist/services/references.d.ts.map +1 -0
- package/dist/services/references.js +66 -0
- package/dist/services/references.js.map +1 -0
- package/dist/services/semantic-tokens.d.ts +54 -0
- package/dist/services/semantic-tokens.d.ts.map +1 -0
- package/dist/services/semantic-tokens.js +213 -0
- package/dist/services/semantic-tokens.js.map +1 -0
- package/dist/services/synthesis.d.ts +90 -0
- package/dist/services/synthesis.d.ts.map +1 -0
- package/dist/services/synthesis.js +113 -0
- package/dist/services/synthesis.js.map +1 -0
- package/dist/source-map.d.ts +42 -0
- package/dist/source-map.d.ts.map +1 -0
- package/dist/source-map.js +170 -0
- package/dist/source-map.js.map +1 -0
- package/package.json +128 -0
- package/tree-sitter-thalo.wasm +0 -0
- package/web-tree-sitter.wasm +0 -0
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { ParsedDocument } from "../parser.shared.js";
|
|
2
|
+
|
|
3
|
+
//#region src/services/semantic-tokens.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Semantic token types - these map to LSP's SemanticTokenTypes
|
|
7
|
+
*
|
|
8
|
+
* See: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokenTypes
|
|
9
|
+
*/
|
|
10
|
+
declare const tokenTypes: readonly ["namespace", "type", "class", "function", "property", "string", "keyword", "comment", "variable", "number", "operator", "macro"];
|
|
11
|
+
type TokenType = (typeof tokenTypes)[number];
|
|
12
|
+
/**
|
|
13
|
+
* Semantic token modifiers
|
|
14
|
+
*/
|
|
15
|
+
declare const tokenModifiers: readonly ["declaration", "definition", "documentation", "readonly"];
|
|
16
|
+
type TokenModifier = (typeof tokenModifiers)[number];
|
|
17
|
+
/**
|
|
18
|
+
* A semantic token with position and type information
|
|
19
|
+
*/
|
|
20
|
+
interface SemanticToken {
|
|
21
|
+
/** Line number (0-based) */
|
|
22
|
+
line: number;
|
|
23
|
+
/** Character offset on the line (0-based) */
|
|
24
|
+
startChar: number;
|
|
25
|
+
/** Length of the token */
|
|
26
|
+
length: number;
|
|
27
|
+
/** Token type index (into tokenTypes array) */
|
|
28
|
+
tokenType: number;
|
|
29
|
+
/** Token modifiers as a bitmask */
|
|
30
|
+
tokenModifiers: number;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Get the index of a token type
|
|
34
|
+
*/
|
|
35
|
+
declare function getTokenTypeIndex(type: TokenType): number;
|
|
36
|
+
/**
|
|
37
|
+
* Get the modifier bitmask for a set of modifiers
|
|
38
|
+
*/
|
|
39
|
+
declare function getTokenModifiersMask(modifiers: TokenModifier[]): number;
|
|
40
|
+
/**
|
|
41
|
+
* Extract semantic tokens from a parsed document.
|
|
42
|
+
* Returns tokens with file-absolute positions (sourceMap applied).
|
|
43
|
+
*/
|
|
44
|
+
declare function extractSemanticTokens(document: ParsedDocument): SemanticToken[];
|
|
45
|
+
/**
|
|
46
|
+
* Encode semantic tokens into the LSP delta format
|
|
47
|
+
*
|
|
48
|
+
* LSP expects tokens in a flattened array where each token is:
|
|
49
|
+
* [deltaLine, deltaStartChar, length, tokenType, tokenModifiers]
|
|
50
|
+
*/
|
|
51
|
+
declare function encodeSemanticTokens(tokens: SemanticToken[]): number[];
|
|
52
|
+
//#endregion
|
|
53
|
+
export { SemanticToken, TokenModifier, TokenType, encodeSemanticTokens, extractSemanticTokens, getTokenModifiersMask, getTokenTypeIndex, tokenModifiers, tokenTypes };
|
|
54
|
+
//# sourceMappingURL=semantic-tokens.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"semantic-tokens.d.ts","names":[],"sources":["../../src/services/semantic-tokens.ts"],"sourcesContent":[],"mappings":";;;;;;AASA;AAeA;AAKA;AAOY,cA3BC,UA2BY,EAAW,SAAA,CAAA,WAAc,EAAA,MAAA,EAAA,OAAA,EAAA,UAAA,EAAA,UAAA,EAAA,QAAA,EAAA,SAAA,EAAA,SAAA,EAAA,UAAA,EAAA,QAAA,EAAA,UAAA,EAAA,OAAA,CAAA;AAKjC,KAjBL,SAAA,GAiBkB,CAAA,OAjBE,UAiBF,CAAA,CAAA,MAAA,CAAA;AAgB9B;AAOA;AAeA;AA+OgB,cAjSH,cAiSuB,EAAA,SAAS,CAAA,aAAa,EAAA,YAAA,EAAA,eAAA,EAAA,UAAA,CAAA;KA1R9C,aAAA,WAAwB;;;;UAKnB,aAAA;;;;;;;;;;;;;;;iBAgBD,iBAAA,OAAwB;;;;iBAOxB,qBAAA,YAAiC;;;;;iBAejC,qBAAA,WAAgC,iBAAiB;;;;;;;iBA+OjD,oBAAA,SAA6B"}
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
//#region src/services/semantic-tokens.ts
|
|
2
|
+
/**
|
|
3
|
+
* Semantic token types - these map to LSP's SemanticTokenTypes
|
|
4
|
+
*
|
|
5
|
+
* See: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokenTypes
|
|
6
|
+
*/
|
|
7
|
+
const tokenTypes = [
|
|
8
|
+
"namespace",
|
|
9
|
+
"type",
|
|
10
|
+
"class",
|
|
11
|
+
"function",
|
|
12
|
+
"property",
|
|
13
|
+
"string",
|
|
14
|
+
"keyword",
|
|
15
|
+
"comment",
|
|
16
|
+
"variable",
|
|
17
|
+
"number",
|
|
18
|
+
"operator",
|
|
19
|
+
"macro"
|
|
20
|
+
];
|
|
21
|
+
/**
|
|
22
|
+
* Semantic token modifiers
|
|
23
|
+
*/
|
|
24
|
+
const tokenModifiers = [
|
|
25
|
+
"declaration",
|
|
26
|
+
"definition",
|
|
27
|
+
"documentation",
|
|
28
|
+
"readonly"
|
|
29
|
+
];
|
|
30
|
+
/**
|
|
31
|
+
* Get the index of a token type
|
|
32
|
+
*/
|
|
33
|
+
function getTokenTypeIndex(type) {
|
|
34
|
+
return tokenTypes.indexOf(type);
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Get the modifier bitmask for a set of modifiers
|
|
38
|
+
*/
|
|
39
|
+
function getTokenModifiersMask(modifiers) {
|
|
40
|
+
let mask = 0;
|
|
41
|
+
for (const mod of modifiers) {
|
|
42
|
+
const index = tokenModifiers.indexOf(mod);
|
|
43
|
+
if (index >= 0) mask |= 1 << index;
|
|
44
|
+
}
|
|
45
|
+
return mask;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Extract semantic tokens from a parsed document.
|
|
49
|
+
* Returns tokens with file-absolute positions (sourceMap applied).
|
|
50
|
+
*/
|
|
51
|
+
function extractSemanticTokens(document) {
|
|
52
|
+
const tokens = [];
|
|
53
|
+
for (const block of document.blocks) extractTokensFromTree(block.tree, block.sourceMap, tokens);
|
|
54
|
+
tokens.sort((a, b) => {
|
|
55
|
+
if (a.line !== b.line) return a.line - b.line;
|
|
56
|
+
return a.startChar - b.startChar;
|
|
57
|
+
});
|
|
58
|
+
return tokens;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Extract tokens from a tree-sitter tree
|
|
62
|
+
*/
|
|
63
|
+
function extractTokensFromTree(tree, sourceMap, tokens) {
|
|
64
|
+
const cursor = tree.walk();
|
|
65
|
+
const visitNode = (node) => {
|
|
66
|
+
const token = getTokenForNode(node, sourceMap);
|
|
67
|
+
if (token) tokens.push(token);
|
|
68
|
+
if (cursor.gotoFirstChild()) {
|
|
69
|
+
do
|
|
70
|
+
visitNode(cursor.currentNode);
|
|
71
|
+
while (cursor.gotoNextSibling());
|
|
72
|
+
cursor.gotoParent();
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
visitNode(cursor.currentNode);
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Get a semantic token for a tree-sitter node (if applicable).
|
|
79
|
+
* Returns token with file-absolute positions (sourceMap applied).
|
|
80
|
+
*/
|
|
81
|
+
function getTokenForNode(node, sourceMap) {
|
|
82
|
+
let { type, startPosition, endPosition } = node;
|
|
83
|
+
let nodeText = node.text;
|
|
84
|
+
if (type === "ERROR") return null;
|
|
85
|
+
let tokenType = null;
|
|
86
|
+
let modifiers = [];
|
|
87
|
+
switch (type) {
|
|
88
|
+
case "timestamp":
|
|
89
|
+
tokenType = "number";
|
|
90
|
+
break;
|
|
91
|
+
case "instance_directive":
|
|
92
|
+
case "schema_directive":
|
|
93
|
+
case "define-synthesis":
|
|
94
|
+
case "actualize-synthesis":
|
|
95
|
+
tokenType = "keyword";
|
|
96
|
+
break;
|
|
97
|
+
case "entity":
|
|
98
|
+
case "query_entity":
|
|
99
|
+
tokenType = "type";
|
|
100
|
+
break;
|
|
101
|
+
case "identifier":
|
|
102
|
+
tokenType = "type";
|
|
103
|
+
modifiers = ["definition"];
|
|
104
|
+
break;
|
|
105
|
+
case "title":
|
|
106
|
+
tokenType = "string";
|
|
107
|
+
break;
|
|
108
|
+
case "link":
|
|
109
|
+
tokenType = "function";
|
|
110
|
+
if (node.parent?.type === "data_entry" || node.parent?.type === "schema_entry") modifiers = ["declaration"];
|
|
111
|
+
break;
|
|
112
|
+
case "tag":
|
|
113
|
+
tokenType = "variable";
|
|
114
|
+
break;
|
|
115
|
+
case "key":
|
|
116
|
+
tokenType = "property";
|
|
117
|
+
break;
|
|
118
|
+
case "quoted_value":
|
|
119
|
+
tokenType = "string";
|
|
120
|
+
break;
|
|
121
|
+
case "datetime_value":
|
|
122
|
+
case "date_range":
|
|
123
|
+
tokenType = "number";
|
|
124
|
+
break;
|
|
125
|
+
case "field_name":
|
|
126
|
+
case "section_name": {
|
|
127
|
+
tokenType = "property";
|
|
128
|
+
const trimmed = nodeText.trimStart();
|
|
129
|
+
if (nodeText.length - trimmed.length > 0) {
|
|
130
|
+
startPosition = {
|
|
131
|
+
row: endPosition.row,
|
|
132
|
+
column: endPosition.column - trimmed.length
|
|
133
|
+
};
|
|
134
|
+
nodeText = trimmed;
|
|
135
|
+
}
|
|
136
|
+
break;
|
|
137
|
+
}
|
|
138
|
+
case "optional_marker":
|
|
139
|
+
tokenType = "operator";
|
|
140
|
+
modifiers = ["readonly"];
|
|
141
|
+
break;
|
|
142
|
+
case "primitive_type":
|
|
143
|
+
tokenType = "type";
|
|
144
|
+
break;
|
|
145
|
+
case "literal_type":
|
|
146
|
+
tokenType = "string";
|
|
147
|
+
break;
|
|
148
|
+
case "description":
|
|
149
|
+
tokenType = "string";
|
|
150
|
+
modifiers = ["documentation"];
|
|
151
|
+
break;
|
|
152
|
+
case "|":
|
|
153
|
+
case "[]":
|
|
154
|
+
case ":":
|
|
155
|
+
case "=":
|
|
156
|
+
case ";":
|
|
157
|
+
case "(":
|
|
158
|
+
case ")":
|
|
159
|
+
case ",":
|
|
160
|
+
tokenType = "operator";
|
|
161
|
+
break;
|
|
162
|
+
case "where":
|
|
163
|
+
case "and":
|
|
164
|
+
tokenType = "keyword";
|
|
165
|
+
break;
|
|
166
|
+
case "condition_field":
|
|
167
|
+
tokenType = "property";
|
|
168
|
+
break;
|
|
169
|
+
case "comment":
|
|
170
|
+
tokenType = "comment";
|
|
171
|
+
break;
|
|
172
|
+
case "md_indicator":
|
|
173
|
+
tokenType = "macro";
|
|
174
|
+
break;
|
|
175
|
+
case "md_heading_text":
|
|
176
|
+
tokenType = "keyword";
|
|
177
|
+
break;
|
|
178
|
+
default: return null;
|
|
179
|
+
}
|
|
180
|
+
if (!tokenType) return null;
|
|
181
|
+
const length = startPosition.row === endPosition.row ? endPosition.column - startPosition.column : nodeText.indexOf("\n");
|
|
182
|
+
const isFirstBlockLine = startPosition.row === 0;
|
|
183
|
+
return {
|
|
184
|
+
line: sourceMap.lineOffset + startPosition.row,
|
|
185
|
+
startChar: isFirstBlockLine ? sourceMap.columnOffset + startPosition.column : startPosition.column,
|
|
186
|
+
length: length > 0 ? length : nodeText.length,
|
|
187
|
+
tokenType: getTokenTypeIndex(tokenType),
|
|
188
|
+
tokenModifiers: getTokenModifiersMask(modifiers)
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Encode semantic tokens into the LSP delta format
|
|
193
|
+
*
|
|
194
|
+
* LSP expects tokens in a flattened array where each token is:
|
|
195
|
+
* [deltaLine, deltaStartChar, length, tokenType, tokenModifiers]
|
|
196
|
+
*/
|
|
197
|
+
function encodeSemanticTokens(tokens) {
|
|
198
|
+
const data = [];
|
|
199
|
+
let prevLine = 0;
|
|
200
|
+
let prevChar = 0;
|
|
201
|
+
for (const token of tokens) {
|
|
202
|
+
const deltaLine = token.line - prevLine;
|
|
203
|
+
const deltaChar = deltaLine === 0 ? token.startChar - prevChar : token.startChar;
|
|
204
|
+
data.push(deltaLine, deltaChar, token.length, token.tokenType, token.tokenModifiers);
|
|
205
|
+
prevLine = token.line;
|
|
206
|
+
prevChar = token.startChar;
|
|
207
|
+
}
|
|
208
|
+
return data;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
//#endregion
|
|
212
|
+
export { encodeSemanticTokens, extractSemanticTokens, getTokenModifiersMask, getTokenTypeIndex, tokenModifiers, tokenTypes };
|
|
213
|
+
//# sourceMappingURL=semantic-tokens.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"semantic-tokens.js","names":["tokens: SemanticToken[]","tokenType: TokenType | null","modifiers: TokenModifier[]","data: number[]"],"sources":["../../src/services/semantic-tokens.ts"],"sourcesContent":["import type { SyntaxNode } from \"../ast/ast-types.js\";\nimport type { ParsedDocument, GenericTree } from \"../parser.shared.js\";\nimport type { SourceMap } from \"../source-map.js\";\n\n/**\n * Semantic token types - these map to LSP's SemanticTokenTypes\n *\n * See: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokenTypes\n */\nexport const tokenTypes = [\n \"namespace\", // timestamp\n \"type\", // entity type (lore, opinion, etc.), primitive types\n \"class\", // (unused, kept for index stability)\n \"function\", // link (^link-id)\n \"property\", // metadata key, field name, section name\n \"string\", // title, description, quoted strings, literal types\n \"keyword\", // where, and, block headers (# Metadata, etc.)\n \"comment\", // comments\n \"variable\", // tag names\n \"number\", // datetime values, date ranges\n \"operator\", // |, [], =, :, ;, (, ), ,\n \"macro\", // markdown header indicator (#, ##, etc.)\n] as const;\n\nexport type TokenType = (typeof tokenTypes)[number];\n\n/**\n * Semantic token modifiers\n */\nexport const tokenModifiers = [\n \"declaration\", // link definition\n \"definition\", // entity definition\n \"documentation\", // description text\n \"readonly\", // optional marker (?)\n] as const;\n\nexport type TokenModifier = (typeof tokenModifiers)[number];\n\n/**\n * A semantic token with position and type information\n */\nexport interface SemanticToken {\n /** Line number (0-based) */\n line: number;\n /** Character offset on the line (0-based) */\n startChar: number;\n /** Length of the token */\n length: number;\n /** Token type index (into tokenTypes array) */\n tokenType: number;\n /** Token modifiers as a bitmask */\n tokenModifiers: number;\n}\n\n/**\n * Get the index of a token type\n */\nexport function getTokenTypeIndex(type: TokenType): number {\n return tokenTypes.indexOf(type);\n}\n\n/**\n * Get the modifier bitmask for a set of modifiers\n */\nexport function getTokenModifiersMask(modifiers: TokenModifier[]): number {\n let mask = 0;\n for (const mod of modifiers) {\n const index = tokenModifiers.indexOf(mod);\n if (index >= 0) {\n mask |= 1 << index;\n }\n }\n return mask;\n}\n\n/**\n * Extract semantic tokens from a parsed document.\n * Returns tokens with file-absolute positions (sourceMap applied).\n */\nexport function extractSemanticTokens(document: ParsedDocument): SemanticToken[] {\n const tokens: SemanticToken[] = [];\n\n for (const block of document.blocks) {\n extractTokensFromTree(block.tree, block.sourceMap, tokens);\n }\n\n // Sort by position (line, then character)\n tokens.sort((a, b) => {\n if (a.line !== b.line) {\n return a.line - b.line;\n }\n return a.startChar - b.startChar;\n });\n\n return tokens;\n}\n\n/**\n * Extract tokens from a tree-sitter tree\n */\nfunction extractTokensFromTree(\n tree: GenericTree,\n sourceMap: SourceMap,\n tokens: SemanticToken[],\n): void {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const cursor = (tree as any).walk();\n\n // DFS traversal\n const visitNode = (node: SyntaxNode): void => {\n const token = getTokenForNode(node, sourceMap);\n if (token) {\n tokens.push(token);\n }\n\n // Visit children\n if (cursor.gotoFirstChild()) {\n do {\n visitNode(cursor.currentNode);\n } while (cursor.gotoNextSibling());\n cursor.gotoParent();\n }\n };\n\n visitNode(cursor.currentNode);\n}\n\n/**\n * Get a semantic token for a tree-sitter node (if applicable).\n * Returns token with file-absolute positions (sourceMap applied).\n */\nfunction getTokenForNode(node: SyntaxNode, sourceMap: SourceMap): SemanticToken | null {\n let { type, startPosition, endPosition } = node;\n let nodeText = node.text;\n\n // Skip ERROR nodes and certain wrapper nodes\n if (type === \"ERROR\") {\n return null;\n }\n\n let tokenType: TokenType | null = null;\n let modifiers: TokenModifier[] = [];\n\n switch (type) {\n // =========================================================================\n // Header elements\n // =========================================================================\n case \"timestamp\":\n tokenType = \"number\"; // same as datetime_value for consistent date coloring\n break;\n\n case \"instance_directive\":\n case \"schema_directive\":\n case \"define-synthesis\":\n case \"actualize-synthesis\":\n tokenType = \"keyword\";\n break;\n\n case \"entity\":\n case \"query_entity\":\n tokenType = \"type\";\n break;\n\n case \"identifier\":\n // Entity name in schema entries (define-entity custom)\n tokenType = \"type\";\n modifiers = [\"definition\"];\n break;\n\n case \"title\":\n tokenType = \"string\";\n break;\n\n case \"link\":\n tokenType = \"function\";\n // Check if this is a definition (directly on entry) or reference (in metadata)\n if (node.parent?.type === \"data_entry\" || node.parent?.type === \"schema_entry\") {\n modifiers = [\"declaration\"];\n }\n break;\n\n case \"tag\":\n tokenType = \"variable\";\n break;\n\n // =========================================================================\n // Metadata\n // =========================================================================\n case \"key\":\n tokenType = \"property\";\n break;\n\n case \"quoted_value\":\n tokenType = \"string\";\n break;\n\n case \"datetime_value\":\n case \"date_range\":\n tokenType = \"number\";\n break;\n\n // =========================================================================\n // Schema definitions\n // =========================================================================\n case \"field_name\":\n case \"section_name\": {\n tokenType = \"property\";\n // These tokens include leading whitespace (\\n + indent), strip it\n const trimmed = nodeText.trimStart();\n const leadingLen = nodeText.length - trimmed.length;\n if (leadingLen > 0) {\n // Adjust position to after the whitespace (endPosition is correct, work backwards)\n startPosition = { row: endPosition.row, column: endPosition.column - trimmed.length };\n nodeText = trimmed;\n }\n break;\n }\n\n case \"optional_marker\":\n tokenType = \"operator\";\n modifiers = [\"readonly\"];\n break;\n\n case \"primitive_type\":\n tokenType = \"type\";\n break;\n\n case \"literal_type\":\n tokenType = \"string\";\n break;\n\n case \"description\":\n tokenType = \"string\";\n modifiers = [\"documentation\"];\n break;\n\n // =========================================================================\n // Operators and punctuation\n // =========================================================================\n case \"|\":\n case \"[]\":\n case \":\":\n case \"=\":\n case \";\":\n case \"(\":\n case \")\":\n case \",\":\n tokenType = \"operator\";\n break;\n\n // =========================================================================\n // Query expressions\n // =========================================================================\n case \"where\":\n case \"and\":\n tokenType = \"keyword\";\n break;\n\n case \"condition_field\":\n tokenType = \"property\";\n break;\n\n // =========================================================================\n // Comments\n // =========================================================================\n case \"comment\":\n tokenType = \"comment\";\n break;\n\n // =========================================================================\n // Content sections\n // =========================================================================\n case \"md_indicator\":\n tokenType = \"macro\";\n break;\n\n case \"md_heading_text\":\n tokenType = \"keyword\";\n break;\n\n default:\n return null;\n }\n\n if (!tokenType) {\n return null;\n }\n\n // For multiline tokens, only highlight the first line\n const length =\n startPosition.row === endPosition.row\n ? endPosition.column - startPosition.column\n : nodeText.indexOf(\"\\n\");\n\n // Apply sourceMap to convert block-relative positions to file-absolute\n // For the first line of the block, add both line and column offset\n // For subsequent lines, only add line offset\n const isFirstBlockLine = startPosition.row === 0;\n const fileLine = sourceMap.lineOffset + startPosition.row;\n const fileChar = isFirstBlockLine\n ? sourceMap.columnOffset + startPosition.column\n : startPosition.column;\n\n return {\n line: fileLine,\n startChar: fileChar,\n length: length > 0 ? length : nodeText.length,\n tokenType: getTokenTypeIndex(tokenType),\n tokenModifiers: getTokenModifiersMask(modifiers),\n };\n}\n\n/**\n * Encode semantic tokens into the LSP delta format\n *\n * LSP expects tokens in a flattened array where each token is:\n * [deltaLine, deltaStartChar, length, tokenType, tokenModifiers]\n */\nexport function encodeSemanticTokens(tokens: SemanticToken[]): number[] {\n const data: number[] = [];\n let prevLine = 0;\n let prevChar = 0;\n\n for (const token of tokens) {\n const deltaLine = token.line - prevLine;\n const deltaChar = deltaLine === 0 ? token.startChar - prevChar : token.startChar;\n\n data.push(deltaLine, deltaChar, token.length, token.tokenType, token.tokenModifiers);\n\n prevLine = token.line;\n prevChar = token.startChar;\n }\n\n return data;\n}\n"],"mappings":";;;;;;AASA,MAAa,aAAa;CACxB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;;;;AAOD,MAAa,iBAAiB;CAC5B;CACA;CACA;CACA;CACD;;;;AAuBD,SAAgB,kBAAkB,MAAyB;AACzD,QAAO,WAAW,QAAQ,KAAK;;;;;AAMjC,SAAgB,sBAAsB,WAAoC;CACxE,IAAI,OAAO;AACX,MAAK,MAAM,OAAO,WAAW;EAC3B,MAAM,QAAQ,eAAe,QAAQ,IAAI;AACzC,MAAI,SAAS,EACX,SAAQ,KAAK;;AAGjB,QAAO;;;;;;AAOT,SAAgB,sBAAsB,UAA2C;CAC/E,MAAMA,SAA0B,EAAE;AAElC,MAAK,MAAM,SAAS,SAAS,OAC3B,uBAAsB,MAAM,MAAM,MAAM,WAAW,OAAO;AAI5D,QAAO,MAAM,GAAG,MAAM;AACpB,MAAI,EAAE,SAAS,EAAE,KACf,QAAO,EAAE,OAAO,EAAE;AAEpB,SAAO,EAAE,YAAY,EAAE;GACvB;AAEF,QAAO;;;;;AAMT,SAAS,sBACP,MACA,WACA,QACM;CAEN,MAAM,SAAU,KAAa,MAAM;CAGnC,MAAM,aAAa,SAA2B;EAC5C,MAAM,QAAQ,gBAAgB,MAAM,UAAU;AAC9C,MAAI,MACF,QAAO,KAAK,MAAM;AAIpB,MAAI,OAAO,gBAAgB,EAAE;AAC3B;AACE,cAAU,OAAO,YAAY;UACtB,OAAO,iBAAiB;AACjC,UAAO,YAAY;;;AAIvB,WAAU,OAAO,YAAY;;;;;;AAO/B,SAAS,gBAAgB,MAAkB,WAA4C;CACrF,IAAI,EAAE,MAAM,eAAe,gBAAgB;CAC3C,IAAI,WAAW,KAAK;AAGpB,KAAI,SAAS,QACX,QAAO;CAGT,IAAIC,YAA8B;CAClC,IAAIC,YAA6B,EAAE;AAEnC,SAAQ,MAAR;EAIE,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;EACL,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AAEH,eAAY;AACZ,eAAY,CAAC,aAAa;AAC1B;EAEF,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AACH,eAAY;AAEZ,OAAI,KAAK,QAAQ,SAAS,gBAAgB,KAAK,QAAQ,SAAS,eAC9D,aAAY,CAAC,cAAc;AAE7B;EAEF,KAAK;AACH,eAAY;AACZ;EAKF,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;EACL,KAAK;AACH,eAAY;AACZ;EAKF,KAAK;EACL,KAAK,gBAAgB;AACnB,eAAY;GAEZ,MAAM,UAAU,SAAS,WAAW;AAEpC,OADmB,SAAS,SAAS,QAAQ,SAC5B,GAAG;AAElB,oBAAgB;KAAE,KAAK,YAAY;KAAK,QAAQ,YAAY,SAAS,QAAQ;KAAQ;AACrF,eAAW;;AAEb;;EAGF,KAAK;AACH,eAAY;AACZ,eAAY,CAAC,WAAW;AACxB;EAEF,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AACH,eAAY;AACZ,eAAY,CAAC,gBAAgB;AAC7B;EAKF,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACH,eAAY;AACZ;EAKF,KAAK;EACL,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AACH,eAAY;AACZ;EAKF,KAAK;AACH,eAAY;AACZ;EAKF,KAAK;AACH,eAAY;AACZ;EAEF,KAAK;AACH,eAAY;AACZ;EAEF,QACE,QAAO;;AAGX,KAAI,CAAC,UACH,QAAO;CAIT,MAAM,SACJ,cAAc,QAAQ,YAAY,MAC9B,YAAY,SAAS,cAAc,SACnC,SAAS,QAAQ,KAAK;CAK5B,MAAM,mBAAmB,cAAc,QAAQ;AAM/C,QAAO;EACL,MANe,UAAU,aAAa,cAAc;EAOpD,WANe,mBACb,UAAU,eAAe,cAAc,SACvC,cAAc;EAKhB,QAAQ,SAAS,IAAI,SAAS,SAAS;EACvC,WAAW,kBAAkB,UAAU;EACvC,gBAAgB,sBAAsB,UAAU;EACjD;;;;;;;;AASH,SAAgB,qBAAqB,QAAmC;CACtE,MAAMC,OAAiB,EAAE;CACzB,IAAI,WAAW;CACf,IAAI,WAAW;AAEf,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,YAAY,MAAM,OAAO;EAC/B,MAAM,YAAY,cAAc,IAAI,MAAM,YAAY,WAAW,MAAM;AAEvE,OAAK,KAAK,WAAW,WAAW,MAAM,QAAQ,MAAM,WAAW,MAAM,eAAe;AAEpF,aAAW,MAAM;AACjB,aAAW,MAAM;;AAGnB,QAAO"}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { ActualizeEntry, InstanceEntry, SynthesisEntry } from "../ast/ast-types.js";
|
|
2
|
+
import { Workspace } from "../model/workspace.js";
|
|
3
|
+
import { Query, astQueryToModelQuery } from "./query.js";
|
|
4
|
+
import { formatTimestamp } from "../formatters.js";
|
|
5
|
+
|
|
6
|
+
//#region src/services/synthesis.d.ts
|
|
7
|
+
/**
|
|
8
|
+
* Information about a synthesis definition including file context
|
|
9
|
+
*/
|
|
10
|
+
interface SynthesisInfo {
|
|
11
|
+
/** The synthesis entry */
|
|
12
|
+
entry: SynthesisEntry;
|
|
13
|
+
/** File containing this synthesis */
|
|
14
|
+
file: string;
|
|
15
|
+
/** The link ID for this synthesis */
|
|
16
|
+
linkId: string;
|
|
17
|
+
/** Title of the synthesis */
|
|
18
|
+
title: string;
|
|
19
|
+
/** Source queries for this synthesis */
|
|
20
|
+
sources: Query[];
|
|
21
|
+
/** The prompt text (if defined) */
|
|
22
|
+
prompt: string | null;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Information about an actualize entry including file context
|
|
26
|
+
*/
|
|
27
|
+
interface ActualizeInfo {
|
|
28
|
+
/** The actualize entry */
|
|
29
|
+
entry: ActualizeEntry;
|
|
30
|
+
/** File containing this actualize */
|
|
31
|
+
file: string;
|
|
32
|
+
/** Target synthesis link ID */
|
|
33
|
+
target: string;
|
|
34
|
+
/** Formatted timestamp of the actualize entry */
|
|
35
|
+
timestamp: string;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Extract source queries from a synthesis entry's metadata.
|
|
39
|
+
* Sources can be a single query or an array of queries.
|
|
40
|
+
*/
|
|
41
|
+
declare function getSynthesisSources(synthesis: SynthesisEntry): Query[];
|
|
42
|
+
/**
|
|
43
|
+
* Extract the prompt text from a synthesis entry's content.
|
|
44
|
+
* Looks for content under a "# Prompt" header.
|
|
45
|
+
*/
|
|
46
|
+
declare function getSynthesisPrompt(synthesis: SynthesisEntry): string | null;
|
|
47
|
+
/**
|
|
48
|
+
* Find all synthesis definitions in a workspace.
|
|
49
|
+
*
|
|
50
|
+
* @param workspace - The workspace to search
|
|
51
|
+
* @returns Array of synthesis info objects with file context
|
|
52
|
+
*/
|
|
53
|
+
declare function findAllSyntheses(workspace: Workspace): SynthesisInfo[];
|
|
54
|
+
/**
|
|
55
|
+
* Find the latest actualize entry for a given synthesis by link ID.
|
|
56
|
+
* Searches across all files in the workspace.
|
|
57
|
+
*
|
|
58
|
+
* @param workspace - The workspace to search
|
|
59
|
+
* @param synthesisLinkId - The link ID of the synthesis to find actualizes for
|
|
60
|
+
* @returns The latest actualize info, or null if none found
|
|
61
|
+
*/
|
|
62
|
+
declare function findLatestActualize(workspace: Workspace, synthesisLinkId: string): ActualizeInfo | null;
|
|
63
|
+
/**
|
|
64
|
+
* Get the 'updated' timestamp from an actualize entry's metadata.
|
|
65
|
+
*
|
|
66
|
+
* @param actualize - The actualize info (or null)
|
|
67
|
+
* @returns The updated timestamp string, or null if not found
|
|
68
|
+
*/
|
|
69
|
+
declare function getActualizeUpdatedTimestamp(actualize: ActualizeInfo | null): string | null;
|
|
70
|
+
/**
|
|
71
|
+
* Find which file and model contain a specific entry.
|
|
72
|
+
* Uses object identity first, then falls back to location matching.
|
|
73
|
+
*
|
|
74
|
+
* @param workspace - The workspace to search
|
|
75
|
+
* @param entry - The entry to find
|
|
76
|
+
* @returns The file path, or undefined if not found
|
|
77
|
+
*/
|
|
78
|
+
declare function findEntryFile(workspace: Workspace, entry: InstanceEntry): string | undefined;
|
|
79
|
+
/**
|
|
80
|
+
* Get the raw source text for an entry from its file.
|
|
81
|
+
* Requires providing a function to read file contents (to avoid filesystem dependency).
|
|
82
|
+
*
|
|
83
|
+
* @param entry - The entry to get text for
|
|
84
|
+
* @param source - The source text of the file containing the entry
|
|
85
|
+
* @returns The raw entry text
|
|
86
|
+
*/
|
|
87
|
+
declare function getEntrySourceText(entry: InstanceEntry, source: string): string;
|
|
88
|
+
//#endregion
|
|
89
|
+
export { ActualizeInfo, SynthesisInfo, findAllSyntheses, findEntryFile, findLatestActualize, getActualizeUpdatedTimestamp, getEntrySourceText, getSynthesisPrompt, getSynthesisSources };
|
|
90
|
+
//# sourceMappingURL=synthesis.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"synthesis.d.ts","names":[],"sources":["../../src/services/synthesis.ts"],"sourcesContent":[],"mappings":";;;;;;;;;AAciB,UAAA,aAAA,CAAa;EAkBb;EAqBD,KAAA,EArCP,cAqCO;EAsBA;EA8BA,IAAA,EAAA,MAAA;EA6BA;EAoCA,MAAA,EAAA,MAAA;EAgBA;EAwCA,KAAA,EAAA,MAAA;;WA1ML;;;;;;;UAQM,aAAA;;SAER;;;;;;;;;;;;iBAmBO,mBAAA,YAA+B,iBAAiB;;;;;iBAsBhD,kBAAA,YAA8B;;;;;;;iBA8B9B,gBAAA,YAA4B,YAAY;;;;;;;;;iBA6BxC,mBAAA,YACH,qCAEV;;;;;;;iBAiCa,4BAAA,YAAwC;;;;;;;;;iBAgBxC,aAAA,YAAyB,kBAAkB;;;;;;;;;iBAwC3C,kBAAA,QAA0B"}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import { astQueryToModelQuery } from "./query.js";
|
|
2
|
+
import { formatTimestamp } from "../formatters.js";
|
|
3
|
+
|
|
4
|
+
//#region src/services/synthesis.ts
|
|
5
|
+
/**
|
|
6
|
+
* Extract source queries from a synthesis entry's metadata.
|
|
7
|
+
* Sources can be a single query or an array of queries.
|
|
8
|
+
*/
|
|
9
|
+
function getSynthesisSources(synthesis) {
|
|
10
|
+
const sourcesMeta = synthesis.metadata.find((m) => m.key.value === "sources");
|
|
11
|
+
if (!sourcesMeta) return [];
|
|
12
|
+
const content = sourcesMeta.value.content;
|
|
13
|
+
if (content.type === "query_value") return [astQueryToModelQuery(content.query)];
|
|
14
|
+
if (content.type === "value_array") return content.elements.filter((e) => e.type === "query").map(astQueryToModelQuery);
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Extract the prompt text from a synthesis entry's content.
|
|
19
|
+
* Looks for content under a "# Prompt" header.
|
|
20
|
+
*/
|
|
21
|
+
function getSynthesisPrompt(synthesis) {
|
|
22
|
+
if (!synthesis.content) return null;
|
|
23
|
+
let inPrompt = false;
|
|
24
|
+
const promptLines = [];
|
|
25
|
+
for (const child of synthesis.content.children) if (child.type === "markdown_header") if (child.text.toLowerCase().includes("prompt")) inPrompt = true;
|
|
26
|
+
else inPrompt = false;
|
|
27
|
+
else if (child.type === "content_line" && inPrompt) promptLines.push(child.text);
|
|
28
|
+
return promptLines.length > 0 ? promptLines.join("\n").trim() : null;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Find all synthesis definitions in a workspace.
|
|
32
|
+
*
|
|
33
|
+
* @param workspace - The workspace to search
|
|
34
|
+
* @returns Array of synthesis info objects with file context
|
|
35
|
+
*/
|
|
36
|
+
function findAllSyntheses(workspace) {
|
|
37
|
+
const syntheses = [];
|
|
38
|
+
for (const model of workspace.allModels()) for (const entry of model.ast.entries) if (entry.type === "synthesis_entry") syntheses.push({
|
|
39
|
+
entry,
|
|
40
|
+
file: model.file,
|
|
41
|
+
linkId: entry.header.linkId.id,
|
|
42
|
+
title: entry.header.title?.value ?? "(no title)",
|
|
43
|
+
sources: getSynthesisSources(entry),
|
|
44
|
+
prompt: getSynthesisPrompt(entry)
|
|
45
|
+
});
|
|
46
|
+
return syntheses;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Find the latest actualize entry for a given synthesis by link ID.
|
|
50
|
+
* Searches across all files in the workspace.
|
|
51
|
+
*
|
|
52
|
+
* @param workspace - The workspace to search
|
|
53
|
+
* @param synthesisLinkId - The link ID of the synthesis to find actualizes for
|
|
54
|
+
* @returns The latest actualize info, or null if none found
|
|
55
|
+
*/
|
|
56
|
+
function findLatestActualize(workspace, synthesisLinkId) {
|
|
57
|
+
let latest = null;
|
|
58
|
+
for (const model of workspace.allModels()) for (const entry of model.ast.entries) {
|
|
59
|
+
if (entry.type !== "actualize_entry") continue;
|
|
60
|
+
if (entry.header.target.id !== synthesisLinkId) continue;
|
|
61
|
+
const ts = formatTimestamp(entry.header.timestamp);
|
|
62
|
+
if (!latest || ts > latest.timestamp) latest = {
|
|
63
|
+
entry,
|
|
64
|
+
file: model.file,
|
|
65
|
+
target: entry.header.target.id,
|
|
66
|
+
timestamp: ts
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
return latest;
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Get the 'updated' timestamp from an actualize entry's metadata.
|
|
73
|
+
*
|
|
74
|
+
* @param actualize - The actualize info (or null)
|
|
75
|
+
* @returns The updated timestamp string, or null if not found
|
|
76
|
+
*/
|
|
77
|
+
function getActualizeUpdatedTimestamp(actualize) {
|
|
78
|
+
if (!actualize) return null;
|
|
79
|
+
return actualize.entry.metadata.find((m) => m.key.value === "updated")?.value.raw ?? null;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Find which file and model contain a specific entry.
|
|
83
|
+
* Uses object identity first, then falls back to location matching.
|
|
84
|
+
*
|
|
85
|
+
* @param workspace - The workspace to search
|
|
86
|
+
* @param entry - The entry to find
|
|
87
|
+
* @returns The file path, or undefined if not found
|
|
88
|
+
*/
|
|
89
|
+
function findEntryFile(workspace, entry) {
|
|
90
|
+
for (const model of workspace.allModels()) for (const e of model.ast.entries) if (e === entry) return model.file;
|
|
91
|
+
const entryTs = formatTimestamp(entry.header.timestamp);
|
|
92
|
+
for (const model of workspace.allModels()) for (const e of model.ast.entries) {
|
|
93
|
+
if (e.type !== "instance_entry") continue;
|
|
94
|
+
if (formatTimestamp(e.header.timestamp) === entryTs && e.location.startIndex === entry.location.startIndex && e.location.endIndex === entry.location.endIndex) return model.file;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Get the raw source text for an entry from its file.
|
|
99
|
+
* Requires providing a function to read file contents (to avoid filesystem dependency).
|
|
100
|
+
*
|
|
101
|
+
* @param entry - The entry to get text for
|
|
102
|
+
* @param source - The source text of the file containing the entry
|
|
103
|
+
* @returns The raw entry text
|
|
104
|
+
*/
|
|
105
|
+
function getEntrySourceText(entry, source) {
|
|
106
|
+
const start = entry.location.startIndex;
|
|
107
|
+
const end = entry.location.endIndex;
|
|
108
|
+
return source.slice(start, end).trim();
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
//#endregion
|
|
112
|
+
export { findAllSyntheses, findEntryFile, findLatestActualize, getActualizeUpdatedTimestamp, getEntrySourceText, getSynthesisPrompt, getSynthesisSources };
|
|
113
|
+
//# sourceMappingURL=synthesis.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"synthesis.js","names":["promptLines: string[]","syntheses: SynthesisInfo[]","latest: ActualizeInfo | null"],"sources":["../../src/services/synthesis.ts"],"sourcesContent":["import type { Workspace } from \"../model/workspace.js\";\nimport type {\n SynthesisEntry,\n ActualizeEntry,\n InstanceEntry,\n Query as AstQuery,\n} from \"../ast/ast-types.js\";\nimport type { Query } from \"./query.js\";\nimport { formatTimestamp } from \"../formatters.js\";\nimport { astQueryToModelQuery } from \"./query.js\";\n\n/**\n * Information about a synthesis definition including file context\n */\nexport interface SynthesisInfo {\n /** The synthesis entry */\n entry: SynthesisEntry;\n /** File containing this synthesis */\n file: string;\n /** The link ID for this synthesis */\n linkId: string;\n /** Title of the synthesis */\n title: string;\n /** Source queries for this synthesis */\n sources: Query[];\n /** The prompt text (if defined) */\n prompt: string | null;\n}\n\n/**\n * Information about an actualize entry including file context\n */\nexport interface ActualizeInfo {\n /** The actualize entry */\n entry: ActualizeEntry;\n /** File containing this actualize */\n file: string;\n /** Target synthesis link ID */\n target: string;\n /** Formatted timestamp of the actualize entry */\n timestamp: string;\n}\n\n// Re-export formatTimestamp from formatters for backward compatibility\nexport { formatTimestamp } from \"../formatters.js\";\n\n// Re-export astQueryToModelQuery for backward compatibility\nexport { astQueryToModelQuery } from \"./query.js\";\n\n/**\n * Extract source queries from a synthesis entry's metadata.\n * Sources can be a single query or an array of queries.\n */\nexport function getSynthesisSources(synthesis: SynthesisEntry): Query[] {\n const sourcesMeta = synthesis.metadata.find((m) => m.key.value === \"sources\");\n if (!sourcesMeta) {\n return [];\n }\n\n const content = sourcesMeta.value.content;\n if (content.type === \"query_value\") {\n return [astQueryToModelQuery(content.query)];\n }\n if (content.type === \"value_array\") {\n return content.elements\n .filter((e): e is AstQuery => e.type === \"query\")\n .map(astQueryToModelQuery);\n }\n return [];\n}\n\n/**\n * Extract the prompt text from a synthesis entry's content.\n * Looks for content under a \"# Prompt\" header.\n */\nexport function getSynthesisPrompt(synthesis: SynthesisEntry): string | null {\n if (!synthesis.content) {\n return null;\n }\n\n let inPrompt = false;\n const promptLines: string[] = [];\n\n for (const child of synthesis.content.children) {\n if (child.type === \"markdown_header\") {\n const headerText = child.text.toLowerCase();\n if (headerText.includes(\"prompt\")) {\n inPrompt = true;\n } else {\n inPrompt = false;\n }\n } else if (child.type === \"content_line\" && inPrompt) {\n promptLines.push(child.text);\n }\n }\n\n return promptLines.length > 0 ? promptLines.join(\"\\n\").trim() : null;\n}\n\n/**\n * Find all synthesis definitions in a workspace.\n *\n * @param workspace - The workspace to search\n * @returns Array of synthesis info objects with file context\n */\nexport function findAllSyntheses(workspace: Workspace): SynthesisInfo[] {\n const syntheses: SynthesisInfo[] = [];\n\n for (const model of workspace.allModels()) {\n for (const entry of model.ast.entries) {\n if (entry.type === \"synthesis_entry\") {\n syntheses.push({\n entry,\n file: model.file,\n linkId: entry.header.linkId.id,\n title: entry.header.title?.value ?? \"(no title)\",\n sources: getSynthesisSources(entry),\n prompt: getSynthesisPrompt(entry),\n });\n }\n }\n }\n\n return syntheses;\n}\n\n/**\n * Find the latest actualize entry for a given synthesis by link ID.\n * Searches across all files in the workspace.\n *\n * @param workspace - The workspace to search\n * @param synthesisLinkId - The link ID of the synthesis to find actualizes for\n * @returns The latest actualize info, or null if none found\n */\nexport function findLatestActualize(\n workspace: Workspace,\n synthesisLinkId: string,\n): ActualizeInfo | null {\n let latest: ActualizeInfo | null = null;\n\n for (const model of workspace.allModels()) {\n for (const entry of model.ast.entries) {\n if (entry.type !== \"actualize_entry\") {\n continue;\n }\n if (entry.header.target.id !== synthesisLinkId) {\n continue;\n }\n\n const ts = formatTimestamp(entry.header.timestamp);\n if (!latest || ts > latest.timestamp) {\n latest = {\n entry,\n file: model.file,\n target: entry.header.target.id,\n timestamp: ts,\n };\n }\n }\n }\n\n return latest;\n}\n\n/**\n * Get the 'updated' timestamp from an actualize entry's metadata.\n *\n * @param actualize - The actualize info (or null)\n * @returns The updated timestamp string, or null if not found\n */\nexport function getActualizeUpdatedTimestamp(actualize: ActualizeInfo | null): string | null {\n if (!actualize) {\n return null;\n }\n const updated = actualize.entry.metadata.find((m) => m.key.value === \"updated\");\n return updated?.value.raw ?? null;\n}\n\n/**\n * Find which file and model contain a specific entry.\n * Uses object identity first, then falls back to location matching.\n *\n * @param workspace - The workspace to search\n * @param entry - The entry to find\n * @returns The file path, or undefined if not found\n */\nexport function findEntryFile(workspace: Workspace, entry: InstanceEntry): string | undefined {\n // First try object identity (fast path when entry came from this workspace)\n for (const model of workspace.allModels()) {\n for (const e of model.ast.entries) {\n if (e === entry) {\n return model.file;\n }\n }\n }\n\n // Fallback: match by location and timestamp (for entries that may have been cloned)\n const entryTs = formatTimestamp(entry.header.timestamp);\n for (const model of workspace.allModels()) {\n for (const e of model.ast.entries) {\n if (e.type !== \"instance_entry\") {\n continue;\n }\n // Match by timestamp and location for uniqueness\n const eTs = formatTimestamp(e.header.timestamp);\n if (\n eTs === entryTs &&\n e.location.startIndex === entry.location.startIndex &&\n e.location.endIndex === entry.location.endIndex\n ) {\n return model.file;\n }\n }\n }\n\n return undefined;\n}\n\n/**\n * Get the raw source text for an entry from its file.\n * Requires providing a function to read file contents (to avoid filesystem dependency).\n *\n * @param entry - The entry to get text for\n * @param source - The source text of the file containing the entry\n * @returns The raw entry text\n */\nexport function getEntrySourceText(entry: InstanceEntry, source: string): string {\n const start = entry.location.startIndex;\n const end = entry.location.endIndex;\n return source.slice(start, end).trim();\n}\n"],"mappings":";;;;;;;;AAqDA,SAAgB,oBAAoB,WAAoC;CACtE,MAAM,cAAc,UAAU,SAAS,MAAM,MAAM,EAAE,IAAI,UAAU,UAAU;AAC7E,KAAI,CAAC,YACH,QAAO,EAAE;CAGX,MAAM,UAAU,YAAY,MAAM;AAClC,KAAI,QAAQ,SAAS,cACnB,QAAO,CAAC,qBAAqB,QAAQ,MAAM,CAAC;AAE9C,KAAI,QAAQ,SAAS,cACnB,QAAO,QAAQ,SACZ,QAAQ,MAAqB,EAAE,SAAS,QAAQ,CAChD,IAAI,qBAAqB;AAE9B,QAAO,EAAE;;;;;;AAOX,SAAgB,mBAAmB,WAA0C;AAC3E,KAAI,CAAC,UAAU,QACb,QAAO;CAGT,IAAI,WAAW;CACf,MAAMA,cAAwB,EAAE;AAEhC,MAAK,MAAM,SAAS,UAAU,QAAQ,SACpC,KAAI,MAAM,SAAS,kBAEjB,KADmB,MAAM,KAAK,aAAa,CAC5B,SAAS,SAAS,CAC/B,YAAW;KAEX,YAAW;UAEJ,MAAM,SAAS,kBAAkB,SAC1C,aAAY,KAAK,MAAM,KAAK;AAIhC,QAAO,YAAY,SAAS,IAAI,YAAY,KAAK,KAAK,CAAC,MAAM,GAAG;;;;;;;;AASlE,SAAgB,iBAAiB,WAAuC;CACtE,MAAMC,YAA6B,EAAE;AAErC,MAAK,MAAM,SAAS,UAAU,WAAW,CACvC,MAAK,MAAM,SAAS,MAAM,IAAI,QAC5B,KAAI,MAAM,SAAS,kBACjB,WAAU,KAAK;EACb;EACA,MAAM,MAAM;EACZ,QAAQ,MAAM,OAAO,OAAO;EAC5B,OAAO,MAAM,OAAO,OAAO,SAAS;EACpC,SAAS,oBAAoB,MAAM;EACnC,QAAQ,mBAAmB,MAAM;EAClC,CAAC;AAKR,QAAO;;;;;;;;;;AAWT,SAAgB,oBACd,WACA,iBACsB;CACtB,IAAIC,SAA+B;AAEnC,MAAK,MAAM,SAAS,UAAU,WAAW,CACvC,MAAK,MAAM,SAAS,MAAM,IAAI,SAAS;AACrC,MAAI,MAAM,SAAS,kBACjB;AAEF,MAAI,MAAM,OAAO,OAAO,OAAO,gBAC7B;EAGF,MAAM,KAAK,gBAAgB,MAAM,OAAO,UAAU;AAClD,MAAI,CAAC,UAAU,KAAK,OAAO,UACzB,UAAS;GACP;GACA,MAAM,MAAM;GACZ,QAAQ,MAAM,OAAO,OAAO;GAC5B,WAAW;GACZ;;AAKP,QAAO;;;;;;;;AAST,SAAgB,6BAA6B,WAAgD;AAC3F,KAAI,CAAC,UACH,QAAO;AAGT,QADgB,UAAU,MAAM,SAAS,MAAM,MAAM,EAAE,IAAI,UAAU,UAAU,EAC/D,MAAM,OAAO;;;;;;;;;;AAW/B,SAAgB,cAAc,WAAsB,OAA0C;AAE5F,MAAK,MAAM,SAAS,UAAU,WAAW,CACvC,MAAK,MAAM,KAAK,MAAM,IAAI,QACxB,KAAI,MAAM,MACR,QAAO,MAAM;CAMnB,MAAM,UAAU,gBAAgB,MAAM,OAAO,UAAU;AACvD,MAAK,MAAM,SAAS,UAAU,WAAW,CACvC,MAAK,MAAM,KAAK,MAAM,IAAI,SAAS;AACjC,MAAI,EAAE,SAAS,iBACb;AAIF,MADY,gBAAgB,EAAE,OAAO,UAAU,KAErC,WACR,EAAE,SAAS,eAAe,MAAM,SAAS,cACzC,EAAE,SAAS,aAAa,MAAM,SAAS,SAEvC,QAAO,MAAM;;;;;;;;;;;AAgBrB,SAAgB,mBAAmB,OAAsB,QAAwB;CAC/E,MAAM,QAAQ,MAAM,SAAS;CAC7B,MAAM,MAAM,MAAM,SAAS;AAC3B,QAAO,OAAO,MAAM,OAAO,IAAI,CAAC,MAAM"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
//#region src/source-map.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* A simple position (line and column).
|
|
4
|
+
* Both are 0-based to match tree-sitter's Point type.
|
|
5
|
+
*/
|
|
6
|
+
interface Position {
|
|
7
|
+
/** 0-based line number */
|
|
8
|
+
line: number;
|
|
9
|
+
/** 0-based column number */
|
|
10
|
+
column: number;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* SourceMap tracks the relationship between block-relative positions
|
|
14
|
+
* (as returned by tree-sitter) and file-absolute positions.
|
|
15
|
+
*
|
|
16
|
+
* For standalone .thalo files, this is an identity map (all offsets are 0).
|
|
17
|
+
* For embedded blocks in markdown, this contains the offset to the block start.
|
|
18
|
+
*/
|
|
19
|
+
interface SourceMap {
|
|
20
|
+
/** Character offset where block content starts in the file */
|
|
21
|
+
readonly charOffset: number;
|
|
22
|
+
/** Line number where block content starts (0-based) */
|
|
23
|
+
readonly lineOffset: number;
|
|
24
|
+
/** Column offset on the starting line (usually 0 for fenced code blocks) */
|
|
25
|
+
readonly columnOffset: number;
|
|
26
|
+
/** Number of lines in the block content */
|
|
27
|
+
readonly lineCount: number;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Result of finding a block at a position.
|
|
31
|
+
*/
|
|
32
|
+
interface BlockMatch<T extends {
|
|
33
|
+
sourceMap: SourceMap;
|
|
34
|
+
}> {
|
|
35
|
+
/** The matched block */
|
|
36
|
+
block: T;
|
|
37
|
+
/** Position relative to the block start (0-based line and column) */
|
|
38
|
+
blockPosition: Position;
|
|
39
|
+
}
|
|
40
|
+
//#endregion
|
|
41
|
+
export { BlockMatch, Position, SourceMap };
|
|
42
|
+
//# sourceMappingURL=source-map.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"source-map.d.ts","names":[],"sources":["../src/source-map.ts"],"sourcesContent":[],"mappings":";AAMA;AAuBA;AA8OA;;AAES,UAvQQ,QAAA,CAuQR;EAEQ;EAAQ,IAAA,EAAA,MAAA;;;;;;;;;;;UAlPR,SAAA;;;;;;;;;;;;;UA8OA;aAAkC;;;SAE1C;;iBAEQ"}
|