@xano/xanoscript-language-server 11.10.0 → 11.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,6 +11,7 @@ import { onDidChangeContent } from "./onDidChangeContent/onDidChangeContent.js";
11
11
  import { onHover } from "./onHover/onHover.js";
12
12
  import { onSemanticCheck } from "./onSemanticCheck/onSemanticCheck.js";
13
13
  import { TOKEN_TYPES } from "./onSemanticCheck/tokens.js";
14
+ import pkg from "./package.json" with { type: "json" };
14
15
 
15
16
  const messageReader = new BrowserMessageReader(self);
16
17
  const messageWriter = new BrowserMessageWriter(self);
@@ -47,6 +48,6 @@ connection.onRequest("textDocument/semanticTokens/full", (params) =>
47
48
  documents.onDidChangeContent((params) =>
48
49
  onDidChangeContent(params, connection)
49
50
  );
50
- connection.onInitialized(() => console.log("lang server init"));
51
+ connection.onInitialized(() => console.log(`XanoScript Language Server v${pkg.version}`));
51
52
  documents.listen(connection);
52
53
  connection.listen();
@@ -1,4 +1,6 @@
1
+ import { documentCache } from "../cache/documentCache.js";
1
2
  import { mapToVirtualJS } from "../embedded/embeddedContent.js";
3
+ import { findSegmentAtOffset } from "../parser/multidoc.js";
2
4
  import { getSchemeFromContent } from "../utils.js";
3
5
  import { workspaceIndex } from "../workspace/workspaceIndex.js";
4
6
  import { getContentAssistSuggestions } from "./contentAssist.js";
@@ -32,9 +34,28 @@ export function onCompletion(params, documents) {
32
34
  return null;
33
35
  }
34
36
 
35
- // Otherwise, handle as regular XanoScript
36
- const scheme = getSchemeFromContent(text);
37
- const prefix = text.slice(0, offset);
37
+ // For multidoc, resolve to the correct segment
38
+ const { parser } = documentCache.getOrParse(
39
+ params.textDocument.uri,
40
+ document.version,
41
+ text,
42
+ );
43
+
44
+ let scheme;
45
+ let prefix;
46
+
47
+ if (parser.isMultidoc) {
48
+ const match = findSegmentAtOffset(parser, offset);
49
+ if (!match) return null;
50
+ scheme = getSchemeFromContent(match.segment.text);
51
+ // Prefix is everything in the full text up to the cursor — contentAssist
52
+ // parses backwards from the cursor to find context, so it needs the segment
53
+ // text up to the local offset
54
+ prefix = match.segment.text.slice(0, match.localOffset);
55
+ } else {
56
+ scheme = getSchemeFromContent(text);
57
+ prefix = text.slice(0, offset);
58
+ }
38
59
 
39
60
  const suggestions = getContentAssistSuggestions(prefix, scheme);
40
61
 
@@ -10,10 +10,51 @@ import { encodeTokenType } from "./tokens.js";
10
10
  * @returns {SemanticTokensBuilder} Returns null if the scheme is not supported
11
11
  */
12
12
  export function higlightText(scheme, text, SemanticTokensBuilder) {
13
- return higlightDefault(text, SemanticTokensBuilder);
13
+ return higlightDefault(text, 0, SemanticTokensBuilder);
14
14
  }
15
15
 
16
- function higlightDefault(text, SemanticTokensBuilder) {
16
+ /**
17
+ * Highlight a multidoc by highlighting each segment independently.
18
+ * @param {Array<{text: string, globalOffset: number}>} segments - The segments from multidocParser
19
+ * @param {SemanticTokensBuilder} SemanticTokensBuilder - The semantic tokens builder constructor
20
+ */
21
+ /**
22
+ * Highlight a multidoc by highlighting each segment independently.
23
+ * @param {Array<{text: string, globalOffset: number}>} segments - The segments from multidocParser
24
+ * @param {SemanticTokensBuilder} SemanticTokensBuilder - The semantic tokens builder constructor
25
+ */
26
+ export function higlightSegments(segments, SemanticTokensBuilder) {
27
+ const builder = new SemanticTokensBuilder();
28
+
29
+ // Pre-compute line offsets: count newlines in each previous segment + separator
30
+ let lineOffset = 0;
31
+
32
+ for (const segment of segments) {
33
+ const lexResult = lexDocument(segment.text, true);
34
+
35
+ for (const token of lexResult.tokens) {
36
+ const tokenType = mapTokenToType(token.tokenType.name);
37
+ if (tokenType) {
38
+ builder.push(
39
+ token.startLine - 1 + lineOffset,
40
+ token.startColumn - 1,
41
+ token.image.length,
42
+ encodeTokenType(tokenType),
43
+ 0,
44
+ );
45
+ }
46
+ }
47
+
48
+ // Advance lineOffset: lines in this segment's text + 2 for the \n---\n separator
49
+ // (separator has 2 newlines: one before --- and one after)
50
+ const segmentLines = segment.text.split("\n").length - 1;
51
+ lineOffset += segmentLines + 2;
52
+ }
53
+
54
+ return builder.build();
55
+ }
56
+
57
+ function higlightDefault(text, lineOffset, SemanticTokensBuilder) {
17
58
  const builder = new SemanticTokensBuilder();
18
59
 
19
60
  // Map Chevrotain tokens to semantic token types
@@ -23,8 +64,8 @@ function higlightDefault(text, SemanticTokensBuilder) {
23
64
  lexResult.tokens.forEach((token) => {
24
65
  const tokenType = mapTokenToType(token.tokenType.name);
25
66
  if (tokenType) {
26
- const line = token.startLine - 1; // Convert to 0-based for LSP
27
- const character = token.startColumn - 1; // Convert to 0-based for LSP
67
+ const line = token.startLine - 1 + lineOffset;
68
+ const character = token.startColumn - 1;
28
69
  builder.push(
29
70
  line,
30
71
  character,
@@ -1,5 +1,6 @@
1
- import { getSchemeFromContent } from "../utils";
2
- import { higlightText } from "./highlight";
1
+ import { documentCache } from "../cache/documentCache.js";
2
+ import { getSchemeFromContent } from "../utils.js";
3
+ import { higlightSegments,higlightText } from "./highlight.js";
3
4
 
4
5
  /**
5
6
  * Handles a semantic tokens request for the full document.
@@ -19,7 +20,17 @@ export function onSemanticCheck(params, documents, SemanticTokensBuilder) {
19
20
  }
20
21
 
21
22
  const text = document.getText();
22
- const scheme = getSchemeFromContent(text);
23
23
 
24
+ const { parser } = documentCache.getOrParse(
25
+ params.textDocument.uri,
26
+ document.version,
27
+ text,
28
+ );
29
+
30
+ if (parser.isMultidoc) {
31
+ return higlightSegments(parser.segments, SemanticTokensBuilder);
32
+ }
33
+
34
+ const scheme = getSchemeFromContent(text);
24
35
  return higlightText(scheme, text, SemanticTokensBuilder);
25
36
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xano/xanoscript-language-server",
3
- "version": "11.10.0",
3
+ "version": "11.10.1",
4
4
  "description": "Language Server Protocol implementation for XanoScript",
5
5
  "type": "module",
6
6
  "main": "server.js",
@@ -860,6 +860,64 @@ function foo {
860
860
  // Both segments are empty strings — should not crash
861
861
  });
862
862
 
863
+ it("should parse workspace + table multidoc without errors on separator", () => {
864
+ const text = `workspace "car-coverage" {
865
+ acceptance = {ai_terms: false}
866
+ preferences = {
867
+ internal_docs : false
868
+ track_performance: true
869
+ sql_names : false
870
+ sql_columns : true
871
+ }
872
+
873
+ realtime = {canonical: "UJCEVaoiEJN-wjghNzMxj324-8w"}
874
+ env = {api_key: "12345"}
875
+ }
876
+ ---
877
+ table user {
878
+ auth = true
879
+
880
+ schema {
881
+ int id
882
+ timestamp created_at?=now
883
+ text name filters=trim
884
+ email? email filters=trim|lower
885
+ password? password filters=min:8|minAlpha:1|minDigit:1
886
+ text? refresh_token {
887
+ sensitive = true
888
+ visibility = "private"
889
+ }
890
+
891
+ timestamp? refresh_token_expires
892
+
893
+ // some description
894
+ enum my_enum {
895
+ values = ["true", "false", "2"]
896
+ sensitive = true
897
+ visibility = "private"
898
+ }
899
+
900
+ object test_visibility? {
901
+ schema {
902
+ text internal? filters=trim {
903
+ visibility = "internal"
904
+ }
905
+ }
906
+ }
907
+ }
908
+
909
+ index = [
910
+ {type: "primary", field: [{name: "id"}]}
911
+ {type: "btree", field: [{name: "created_at", op: "desc"}]}
912
+ {type: "btree|unique", field: [{name: "email", op: "asc"}]}
913
+ ]
914
+ }`;
915
+ const result = multidocParser(text);
916
+ expect(result.isMultidoc).to.be.true;
917
+ expect(result.segmentCount).to.equal(2);
918
+ expect(result.errors).to.be.empty;
919
+ });
920
+
863
921
  it("should not treat --- inside a string literal as separator", () => {
864
922
  // This is a known limitation — but test current behavior
865
923
  const text = `function foo {