@danielblomma/cortex-mcp 1.0.3 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@danielblomma/cortex-mcp",
3
3
  "mcpName": "io.github.DanielBlomma/cortex",
4
- "version": "1.0.3",
4
+ "version": "1.2.0",
5
5
  "description": "Local, repo-scoped context platform for coding assistants. Semantic search, graph relationships, and architectural rule context.",
6
6
  "type": "module",
7
7
  "author": "Daniel Blomma",
@@ -3,7 +3,7 @@ set -euo pipefail
3
3
 
4
4
  REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
5
5
  MCP_DIR="$REPO_ROOT/mcp"
6
- TOTAL_STEPS=5
6
+ TOTAL_STEPS=6
7
7
  STEP_INDEX=0
8
8
 
9
9
  print_logo() {
@@ -38,6 +38,8 @@ info "note: upstream RyuGraph dependencies may print deprecation warnings during
38
38
  NPM_CONFIG_CACHE="$MCP_DIR/.npm-cache" npm --prefix "$MCP_DIR" install --no-fund --no-update-notifier --loglevel=warn
39
39
  NPM_CONFIG_CACHE="$REPO_ROOT/scripts/parsers/.npm-cache" npm --prefix "$REPO_ROOT/scripts/parsers" install --no-fund --no-update-notifier --loglevel=warn
40
40
 
41
+ source "$REPO_ROOT/scripts/lib/enterprise-check.sh"
42
+
41
43
  step "Indexing repository context"
42
44
  "$REPO_ROOT/scripts/ingest.sh"
43
45
 
@@ -403,26 +403,7 @@ function computeTopConnected() {
403
403
 
404
404
  // ── Data: estimate tokens per task (realistic comparison) ────
405
405
  function estimatePerTaskTokens(baseline) {
406
- // Read all entity types for accurate excerpt averaging
407
- const entityFiles = [
408
- "entities.file.jsonl",
409
- "entities.chunk.jsonl",
410
- "entities.rule.jsonl",
411
- "entities.adr.jsonl"
412
- ];
413
-
414
- let totalExcerptChars = 0;
415
- let entityCount = 0;
416
-
417
- for (const file of entityFiles) {
418
- const entities = readJsonlSafe(path.join(CACHE_DIR, file));
419
- for (const e of entities) {
420
- totalExcerptChars += (e.excerpt || e.body || "").slice(0, 500).length;
421
- entityCount++;
422
- }
423
- }
424
-
425
- if (entityCount === 0 || baseline.files === 0) {
406
+ if (baseline.files === 0) {
426
407
  return { codebase: baseline.tokens, baselinePerTask: 0, cortexPerTask: 0,
427
408
  filesPerTask: 0, queriesPerTask: 0, ratio: 0, reduction: 0 };
428
409
  }
@@ -434,14 +415,13 @@ function estimatePerTaskTokens(baseline) {
434
415
  const baselinePerTask = filesPerTask * avgFileTokens;
435
416
 
436
417
  // --- With Cortex: ~3 search queries per task, top 5 results each ---
418
+ // Fixed cost per result: search returns truncated snippets + metadata.
419
+ // More entities in the index = better precision, NOT more tokens per query.
437
420
  const TYPICAL_SEARCHES_PER_TASK = 3;
438
421
  const topK = 5;
439
- const avgExcerptChars = totalExcerptChars / entityCount;
440
- // Per result: excerpt + JSON metadata (~350 chars for id, type, title, path, scores, etc.)
441
- const perResultChars = avgExcerptChars + 350;
442
- // Per query: top-K results + response wrapper (~300 chars for query, ranking, counts)
443
- const perQueryChars = topK * perResultChars + 300;
444
- const perQueryTokens = Math.round(perQueryChars / 4);
422
+ const PER_RESULT_CHARS = 850; // ~500 char snippet + ~350 char metadata
423
+ const PER_QUERY_OVERHEAD = 300; // query wrapper, ranking, counts
424
+ const perQueryTokens = Math.round((topK * PER_RESULT_CHARS + PER_QUERY_OVERHEAD) / 4);
445
425
  const cortexPerTask = TYPICAL_SEARCHES_PER_TASK * perQueryTokens;
446
426
 
447
427
  const ratio = cortexPerTask > 0 ? Math.round(baselinePerTask / cortexPerTask) : 0;
@@ -9,6 +9,10 @@ import {
9
9
  isVbNetParserAvailable,
10
10
  parseCode as parseVbNetCode
11
11
  } from "./parsers/vbnet.mjs";
12
+ import {
13
+ isCSharpParserAvailable,
14
+ parseCode as parseCSharpCode
15
+ } from "./parsers/csharp.mjs";
12
16
  import {
13
17
  isCppParserAvailable,
14
18
  parseCode as parseCppCode
@@ -199,6 +203,14 @@ const CHUNK_PARSERS = new Map([
199
203
  isAvailable: isVbNetParserAvailable
200
204
  }
201
205
  ],
206
+ [
207
+ ".cs",
208
+ {
209
+ language: "csharp",
210
+ parse: parseCSharpCode,
211
+ isAvailable: isCSharpParserAvailable
212
+ }
213
+ ],
202
214
  [
203
215
  ".sql",
204
216
  {
@@ -0,0 +1,143 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Conditional C# parser bridge for Cortex.
4
+ *
5
+ * Uses a Roslyn sidecar via `dotnet run` when a .NET runtime is available.
6
+ * If no runtime exists, callers should skip structured chunk extraction and
7
+ * fall back to plain file-level indexing.
8
+ */
9
+
10
+ import path from "node:path";
11
+ import { fileURLToPath } from "node:url";
12
+ import { spawnSync } from "node:child_process";
13
+
14
+ const __filename = fileURLToPath(import.meta.url);
15
+ const __dirname = path.dirname(__filename);
16
+ const DEFAULT_DOTNET_COMMAND = "dotnet";
17
+ const DEFAULT_PROJECT_PATH = path.join(__dirname, "dotnet", "CSharpParser", "CSharpParser.csproj");
18
+
19
+ let runtimeCache = null;
20
+
21
+ function getDotnetCommand() {
22
+ const override = process.env.CORTEX_DOTNET_CMD;
23
+ return override && override.trim().length > 0 ? override.trim() : DEFAULT_DOTNET_COMMAND;
24
+ }
25
+
26
+ function getProjectPath() {
27
+ const override = process.env.CORTEX_CSHARP_PARSER_PROJECT;
28
+ return override && override.trim().length > 0 ? override.trim() : DEFAULT_PROJECT_PATH;
29
+ }
30
+
31
+ export function resetCSharpParserRuntimeCache() {
32
+ runtimeCache = null;
33
+ }
34
+
35
+ export function getCSharpParserRuntime() {
36
+ if (runtimeCache) {
37
+ return runtimeCache;
38
+ }
39
+
40
+ const command = getDotnetCommand();
41
+ const versionProbe = spawnSync(command, ["--version"], {
42
+ encoding: "utf8",
43
+ timeout: 5000
44
+ });
45
+
46
+ if (versionProbe.error || versionProbe.status !== 0) {
47
+ runtimeCache = {
48
+ available: false,
49
+ command,
50
+ projectPath: getProjectPath(),
51
+ reason:
52
+ versionProbe.error?.message ||
53
+ versionProbe.stderr?.trim() ||
54
+ "dotnet runtime not available"
55
+ };
56
+ return runtimeCache;
57
+ }
58
+
59
+ runtimeCache = {
60
+ available: true,
61
+ command,
62
+ projectPath: getProjectPath(),
63
+ version: versionProbe.stdout.trim()
64
+ };
65
+ return runtimeCache;
66
+ }
67
+
68
+ export function isCSharpParserAvailable() {
69
+ return getCSharpParserRuntime().available;
70
+ }
71
+
72
+ export function parseCode(code, filePath, language = "csharp") {
73
+ const runtime = getCSharpParserRuntime();
74
+ if (!runtime.available) {
75
+ return { chunks: [], errors: [] };
76
+ }
77
+
78
+ const args = [
79
+ "run",
80
+ "--project",
81
+ runtime.projectPath,
82
+ "--configuration",
83
+ "Release",
84
+ "--",
85
+ "--stdin",
86
+ "--file",
87
+ filePath,
88
+ "--language",
89
+ language
90
+ ];
91
+
92
+ const result = spawnSync(runtime.command, args, {
93
+ input: code,
94
+ encoding: "utf8",
95
+ timeout: 30000,
96
+ maxBuffer: 10 * 1024 * 1024
97
+ });
98
+
99
+ if (result.error || result.status !== 0) {
100
+ return {
101
+ chunks: [],
102
+ errors: [
103
+ {
104
+ message:
105
+ result.error?.message ||
106
+ result.stderr?.trim() ||
107
+ `C# parser failed with exit code ${result.status ?? "unknown"}`
108
+ }
109
+ ]
110
+ };
111
+ }
112
+
113
+ try {
114
+ const parsed = JSON.parse(result.stdout);
115
+ return {
116
+ chunks: Array.isArray(parsed.chunks) ? parsed.chunks : [],
117
+ errors: Array.isArray(parsed.errors) ? parsed.errors : []
118
+ };
119
+ } catch (error) {
120
+ return {
121
+ chunks: [],
122
+ errors: [
123
+ {
124
+ message: `C# parser returned invalid JSON: ${error instanceof Error ? error.message : String(error)}`
125
+ }
126
+ ]
127
+ };
128
+ }
129
+ }
130
+
131
+ if (import.meta.url === `file://${process.argv[1]}`) {
132
+ const fs = await import("node:fs");
133
+ const filePath = process.argv[2];
134
+
135
+ if (!filePath) {
136
+ console.error("Usage: csharp.mjs <file.cs>");
137
+ process.exit(1);
138
+ }
139
+
140
+ const code = fs.readFileSync(filePath, "utf8");
141
+ const result = parseCode(code, filePath, "csharp");
142
+ console.log(JSON.stringify(result, null, 2));
143
+ }
@@ -0,0 +1,13 @@
1
+ <Project Sdk="Microsoft.NET.Sdk">
2
+ <PropertyGroup>
3
+ <OutputType>Exe</OutputType>
4
+ <TargetFramework>net10.0</TargetFramework>
5
+ <ImplicitUsings>enable</ImplicitUsings>
6
+ <Nullable>enable</Nullable>
7
+ <LangVersion>latest</LangVersion>
8
+ </PropertyGroup>
9
+
10
+ <ItemGroup>
11
+ <PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.11.0" />
12
+ </ItemGroup>
13
+ </Project>
@@ -0,0 +1,406 @@
1
+ using System.Text.Json;
2
+ using Microsoft.CodeAnalysis;
3
+ using Microsoft.CodeAnalysis.CSharp;
4
+ using Microsoft.CodeAnalysis.CSharp.Syntax;
5
+
6
+ var options = ParseArgs(args);
7
+ if (string.IsNullOrWhiteSpace(options.FilePath))
8
+ {
9
+ Console.Error.WriteLine("Missing required --file argument.");
10
+ Environment.Exit(1);
11
+ }
12
+
13
+ var source = options.UseStdin
14
+ ? Console.In.ReadToEnd()
15
+ : File.ReadAllText(options.FilePath);
16
+
17
+ var parseResult = ParseCSharp(source, options.FilePath, options.Language);
18
+ var json = JsonSerializer.Serialize(parseResult, new JsonSerializerOptions
19
+ {
20
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
21
+ WriteIndented = false
22
+ });
23
+
24
+ Console.WriteLine(json);
25
+
26
+ return;
27
+
28
+ static ParseOptions ParseArgs(string[] args)
29
+ {
30
+ var options = new ParseOptions();
31
+ for (var index = 0; index < args.Length; index += 1)
32
+ {
33
+ var arg = args[index];
34
+ switch (arg)
35
+ {
36
+ case "--stdin":
37
+ options.UseStdin = true;
38
+ break;
39
+ case "--file":
40
+ if (index + 1 < args.Length)
41
+ {
42
+ options.FilePath = args[++index];
43
+ }
44
+ break;
45
+ case "--language":
46
+ if (index + 1 < args.Length)
47
+ {
48
+ options.Language = args[++index];
49
+ }
50
+ break;
51
+ }
52
+ }
53
+
54
+ return options;
55
+ }
56
+
57
+ static ParserOutput ParseCSharp(string source, string filePath, string language)
58
+ {
59
+ var tree = CSharpSyntaxTree.ParseText(source, path: filePath);
60
+ var root = tree.GetCompilationUnitRoot();
61
+ var diagnostics = tree.GetDiagnostics()
62
+ .Where(diagnostic => diagnostic.Severity == DiagnosticSeverity.Error)
63
+ .Select(diagnostic => new ParserError(
64
+ diagnostic.GetMessage(),
65
+ diagnostic.Location.GetLineSpan().StartLinePosition.Line + 1,
66
+ diagnostic.Location.GetLineSpan().StartLinePosition.Character + 1
67
+ ))
68
+ .ToList();
69
+
70
+ if (diagnostics.Count > 0)
71
+ {
72
+ return new ParserOutput(new List<ChunkOutput>(), diagnostics);
73
+ }
74
+
75
+ var collector = new CSharpChunkCollector(tree, root, source, language);
76
+ return new ParserOutput(collector.Collect(), diagnostics);
77
+ }
78
+
79
+ sealed class CSharpChunkCollector
80
+ {
81
+ private readonly SyntaxTree _tree;
82
+ private readonly CompilationUnitSyntax _root;
83
+ private readonly string _source;
84
+ private readonly string _language;
85
+ private readonly string[] _imports;
86
+
87
+ public CSharpChunkCollector(SyntaxTree tree, CompilationUnitSyntax root, string source, string language)
88
+ {
89
+ _tree = tree;
90
+ _root = root;
91
+ _source = source;
92
+ _language = language;
93
+ _imports = CollectUsings(root);
94
+ }
95
+
96
+ private static string[] CollectUsings(CompilationUnitSyntax root)
97
+ {
98
+ var usings = new List<string>();
99
+
100
+ // Top-level using directives (including global using)
101
+ foreach (var directive in root.Usings)
102
+ {
103
+ var name = directive.Name?.ToString();
104
+ if (!string.IsNullOrWhiteSpace(name))
105
+ {
106
+ usings.Add(name);
107
+ }
108
+ }
109
+
110
+ // Namespace-scoped using directives
111
+ foreach (var member in root.Members)
112
+ {
113
+ if (member is BaseNamespaceDeclarationSyntax ns)
114
+ {
115
+ foreach (var directive in ns.Usings)
116
+ {
117
+ var name = directive.Name?.ToString();
118
+ if (!string.IsNullOrWhiteSpace(name))
119
+ {
120
+ usings.Add(name);
121
+ }
122
+ }
123
+ }
124
+ }
125
+
126
+ return usings.Distinct(StringComparer.Ordinal).ToArray();
127
+ }
128
+
129
+ public List<ChunkOutput> Collect()
130
+ {
131
+ var chunks = new List<ChunkOutput>();
132
+
133
+ foreach (var member in _root.Members)
134
+ {
135
+ CollectMember(chunks, member, null);
136
+ }
137
+
138
+ return chunks;
139
+ }
140
+
141
+ private void CollectMember(List<ChunkOutput> chunks, MemberDeclarationSyntax member, string? parentName)
142
+ {
143
+ switch (member)
144
+ {
145
+ case BaseNamespaceDeclarationSyntax namespaceDecl:
146
+ foreach (var nested in namespaceDecl.Members)
147
+ {
148
+ CollectMember(chunks, nested, parentName);
149
+ }
150
+ break;
151
+
152
+ case ClassDeclarationSyntax classDecl:
153
+ AddTypeChunk(chunks, classDecl.Identifier.Text, "class", classDecl, parentName);
154
+ foreach (var nested in classDecl.Members)
155
+ {
156
+ CollectTypeMember(chunks, nested, classDecl.Identifier.Text);
157
+ }
158
+ break;
159
+
160
+ case StructDeclarationSyntax structDecl:
161
+ AddTypeChunk(chunks, structDecl.Identifier.Text, "struct", structDecl, parentName);
162
+ foreach (var nested in structDecl.Members)
163
+ {
164
+ CollectTypeMember(chunks, nested, structDecl.Identifier.Text);
165
+ }
166
+ break;
167
+
168
+ case InterfaceDeclarationSyntax interfaceDecl:
169
+ AddTypeChunk(chunks, interfaceDecl.Identifier.Text, "interface", interfaceDecl, parentName);
170
+ foreach (var nested in interfaceDecl.Members)
171
+ {
172
+ CollectTypeMember(chunks, nested, interfaceDecl.Identifier.Text);
173
+ }
174
+ break;
175
+
176
+ case EnumDeclarationSyntax enumDecl:
177
+ AddTypeChunk(chunks, enumDecl.Identifier.Text, "enum", enumDecl, parentName);
178
+ break;
179
+
180
+ case RecordDeclarationSyntax recordDecl:
181
+ AddTypeChunk(chunks, recordDecl.Identifier.Text, "record", recordDecl, parentName);
182
+ foreach (var nested in recordDecl.Members)
183
+ {
184
+ CollectTypeMember(chunks, nested, recordDecl.Identifier.Text);
185
+ }
186
+ break;
187
+ }
188
+ }
189
+
190
+ private void CollectTypeMember(List<ChunkOutput> chunks, MemberDeclarationSyntax member, string parentTypeName)
191
+ {
192
+ switch (member)
193
+ {
194
+ case MethodDeclarationSyntax methodDecl:
195
+ AddMethodChunk(chunks, methodDecl, parentTypeName);
196
+ break;
197
+ case ConstructorDeclarationSyntax ctorDecl:
198
+ AddConstructorChunk(chunks, ctorDecl, parentTypeName);
199
+ break;
200
+ case PropertyDeclarationSyntax propDecl:
201
+ AddPropertyChunk(chunks, propDecl, parentTypeName);
202
+ break;
203
+ case EventDeclarationSyntax eventDecl:
204
+ AddTypeChunk(chunks, $"{parentTypeName}.{eventDecl.Identifier.Text}", "event", eventDecl, null);
205
+ break;
206
+ case FieldDeclarationSyntax fieldDecl:
207
+ foreach (var variable in fieldDecl.Declaration.Variables)
208
+ {
209
+ AddTypeChunk(chunks, $"{parentTypeName}.{variable.Identifier.Text}", "field", fieldDecl, null);
210
+ }
211
+ break;
212
+ case ClassDeclarationSyntax nestedClass:
213
+ AddTypeChunk(chunks, $"{parentTypeName}.{nestedClass.Identifier.Text}", "class", nestedClass, null);
214
+ foreach (var nested in nestedClass.Members)
215
+ {
216
+ CollectTypeMember(chunks, nested, $"{parentTypeName}.{nestedClass.Identifier.Text}");
217
+ }
218
+ break;
219
+ case StructDeclarationSyntax nestedStruct:
220
+ AddTypeChunk(chunks, $"{parentTypeName}.{nestedStruct.Identifier.Text}", "struct", nestedStruct, null);
221
+ foreach (var nested in nestedStruct.Members)
222
+ {
223
+ CollectTypeMember(chunks, nested, $"{parentTypeName}.{nestedStruct.Identifier.Text}");
224
+ }
225
+ break;
226
+ case RecordDeclarationSyntax nestedRecord:
227
+ AddTypeChunk(chunks, $"{parentTypeName}.{nestedRecord.Identifier.Text}", "record", nestedRecord, null);
228
+ foreach (var nested in nestedRecord.Members)
229
+ {
230
+ CollectTypeMember(chunks, nested, $"{parentTypeName}.{nestedRecord.Identifier.Text}");
231
+ }
232
+ break;
233
+ case EnumDeclarationSyntax nestedEnum:
234
+ AddTypeChunk(chunks, $"{parentTypeName}.{nestedEnum.Identifier.Text}", "enum", nestedEnum, null);
235
+ break;
236
+ }
237
+ }
238
+
239
+ private void AddTypeChunk(List<ChunkOutput> chunks, string name, string kind, SyntaxNode node, string? parentName)
240
+ {
241
+ chunks.Add(BuildChunk(
242
+ parentName is null ? name : $"{parentName}.{name}",
243
+ kind,
244
+ BuildSignature(kind, name, node),
245
+ node,
246
+ GetCalls(node),
247
+ _imports,
248
+ IsExported(node)
249
+ ));
250
+ }
251
+
252
+ private void AddMethodChunk(List<ChunkOutput> chunks, MethodDeclarationSyntax node, string parentTypeName)
253
+ {
254
+ var name = $"{parentTypeName}.{node.Identifier.Text}";
255
+ chunks.Add(BuildChunk(
256
+ name,
257
+ "method",
258
+ $"{node.ReturnType} {node.Identifier.Text}{node.ParameterList}",
259
+ node,
260
+ GetCalls(node),
261
+ _imports,
262
+ IsExported(node)
263
+ ));
264
+ }
265
+
266
+ private void AddConstructorChunk(List<ChunkOutput> chunks, ConstructorDeclarationSyntax node, string parentTypeName)
267
+ {
268
+ chunks.Add(BuildChunk(
269
+ $"{parentTypeName}.ctor",
270
+ "constructor",
271
+ $"{node.Identifier.Text}{node.ParameterList}",
272
+ node,
273
+ GetCalls(node),
274
+ _imports,
275
+ IsExported(node)
276
+ ));
277
+ }
278
+
279
+ private void AddPropertyChunk(List<ChunkOutput> chunks, PropertyDeclarationSyntax node, string parentTypeName)
280
+ {
281
+ chunks.Add(BuildChunk(
282
+ $"{parentTypeName}.{node.Identifier.Text}",
283
+ "property",
284
+ $"{node.Type} {node.Identifier.Text}",
285
+ node,
286
+ GetCalls(node),
287
+ _imports,
288
+ IsExported(node)
289
+ ));
290
+ }
291
+
292
+ private ChunkOutput BuildChunk(
293
+ string name,
294
+ string kind,
295
+ string signature,
296
+ SyntaxNode node,
297
+ IReadOnlyCollection<string> calls,
298
+ IReadOnlyCollection<string> imports,
299
+ bool exported)
300
+ {
301
+ var span = node.GetLocation().GetLineSpan();
302
+ return new ChunkOutput(
303
+ name,
304
+ kind,
305
+ signature,
306
+ node.ToFullString(),
307
+ span.StartLinePosition.Line + 1,
308
+ span.EndLinePosition.Line + 1,
309
+ _language,
310
+ exported,
311
+ calls.ToArray(),
312
+ imports.ToArray()
313
+ );
314
+ }
315
+
316
+ private static string BuildSignature(string kind, string name, SyntaxNode node)
317
+ {
318
+ return node switch
319
+ {
320
+ ClassDeclarationSyntax c => $"{ModifiersOf(c.Modifiers)} class {c.Identifier}{c.TypeParameterList}{BaseListOf(c.BaseList)}".Trim(),
321
+ StructDeclarationSyntax s => $"{ModifiersOf(s.Modifiers)} struct {s.Identifier}{s.TypeParameterList}{BaseListOf(s.BaseList)}".Trim(),
322
+ InterfaceDeclarationSyntax i => $"{ModifiersOf(i.Modifiers)} interface {i.Identifier}{i.TypeParameterList}{BaseListOf(i.BaseList)}".Trim(),
323
+ EnumDeclarationSyntax e => $"{ModifiersOf(e.Modifiers)} enum {e.Identifier}{BaseListOf(e.BaseList)}".Trim(),
324
+ RecordDeclarationSyntax r => $"{ModifiersOf(r.Modifiers)} record {r.Identifier}{r.TypeParameterList}{r.ParameterList}{BaseListOf(r.BaseList)}".Trim(),
325
+ _ => $"{kind} {name}"
326
+ };
327
+ }
328
+
329
+ private static string ModifiersOf(SyntaxTokenList modifiers)
330
+ {
331
+ return modifiers.Count > 0 ? modifiers.ToString() : "";
332
+ }
333
+
334
+ private static string BaseListOf(BaseListSyntax? baseList)
335
+ {
336
+ return baseList is not null ? $" : {baseList.Types}" : "";
337
+ }
338
+
339
+ private static bool IsExported(SyntaxNode node)
340
+ {
341
+ SyntaxTokenList modifiers = node switch
342
+ {
343
+ TypeDeclarationSyntax typeDecl => typeDecl.Modifiers,
344
+ MethodDeclarationSyntax methodDecl => methodDecl.Modifiers,
345
+ ConstructorDeclarationSyntax ctorDecl => ctorDecl.Modifiers,
346
+ PropertyDeclarationSyntax propDecl => propDecl.Modifiers,
347
+ EventDeclarationSyntax eventDecl => eventDecl.Modifiers,
348
+ FieldDeclarationSyntax fieldDecl => fieldDecl.Modifiers,
349
+ _ => default
350
+ };
351
+
352
+ if (modifiers.Count == 0)
353
+ {
354
+ return false;
355
+ }
356
+
357
+ return modifiers.Any(modifier => modifier.IsKind(SyntaxKind.PublicKeyword));
358
+ }
359
+
360
+ private static IReadOnlyCollection<string> GetCalls(SyntaxNode node)
361
+ {
362
+ return node.DescendantNodes()
363
+ .OfType<InvocationExpressionSyntax>()
364
+ .Select(invocation => invocation.Expression)
365
+ .Select(GetInvocationName)
366
+ .Where(name => !string.IsNullOrWhiteSpace(name))
367
+ .Distinct(StringComparer.Ordinal)
368
+ .ToArray();
369
+ }
370
+
371
+ private static string? GetInvocationName(ExpressionSyntax expression)
372
+ {
373
+ return expression switch
374
+ {
375
+ IdentifierNameSyntax identifier => identifier.Identifier.Text,
376
+ GenericNameSyntax genericName => genericName.Identifier.Text,
377
+ MemberAccessExpressionSyntax memberAccess => memberAccess.Name.Identifier.Text,
378
+ InvocationExpressionSyntax nestedInvocation => GetInvocationName(nestedInvocation.Expression),
379
+ _ => null
380
+ };
381
+ }
382
+ }
383
+
384
+ sealed record ParseOptions
385
+ {
386
+ public bool UseStdin { get; set; }
387
+ public string FilePath { get; set; } = "";
388
+ public string Language { get; set; } = "csharp";
389
+ }
390
+
391
+ sealed record ChunkOutput(
392
+ string Name,
393
+ string Kind,
394
+ string Signature,
395
+ string Body,
396
+ int StartLine,
397
+ int EndLine,
398
+ string Language,
399
+ bool Exported,
400
+ string[] Calls,
401
+ string[] Imports
402
+ );
403
+
404
+ sealed record ParserError(string Message, int Line, int Column);
405
+
406
+ sealed record ParserOutput(List<ChunkOutput> Chunks, List<ParserError> Errors);