@aiready/core 0.24.3 → 0.24.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,547 @@
1
+ import {
2
+ BaseLanguageParser
3
+ } from "./chunk-2N7ISIKE.mjs";
4
+
5
+ // src/parsers/metadata-utils.ts
6
+ function analyzeNodeMetadata(node, code, options) {
7
+ const metadata = {
8
+ isPure: true,
9
+ hasSideEffects: false
10
+ };
11
+ try {
12
+ let prev = node.previousSibling || null;
13
+ while (prev && /comment/i.test(prev.type)) {
14
+ const text = prev.text || "";
15
+ const loc = {
16
+ start: {
17
+ line: prev.startPosition.row + 1,
18
+ column: prev.startPosition.column
19
+ },
20
+ end: {
21
+ line: prev.endPosition.row + 1,
22
+ column: prev.endPosition.column
23
+ }
24
+ };
25
+ if (text.trim().startsWith("/**") || text.trim().startsWith("/*")) {
26
+ metadata.documentation = {
27
+ content: text.replace(/^[/*]+|[/*]+$/g, "").trim(),
28
+ type: "comment",
29
+ loc
30
+ };
31
+ break;
32
+ }
33
+ if (text.trim().startsWith("///")) {
34
+ metadata.documentation = {
35
+ content: text.replace(/^\/\/\//, "").trim(),
36
+ type: "xml-doc",
37
+ loc
38
+ };
39
+ break;
40
+ }
41
+ if (text.trim().startsWith("//")) {
42
+ metadata.documentation = {
43
+ content: text.replace(/^\/\//, "").trim(),
44
+ type: "comment",
45
+ loc
46
+ };
47
+ break;
48
+ }
49
+ prev = prev.previousSibling;
50
+ }
51
+ if (node.type === "function_definition" || node.type === "class_definition") {
52
+ const body2 = node.childForFieldName ? node.childForFieldName("body") : node.children.find((c) => c.type === "block");
53
+ if (body2 && body2.children.length > 0) {
54
+ const firstStmt = body2.children[0];
55
+ if (firstStmt.type === "expression_statement" && firstStmt.firstChild?.type === "string") {
56
+ metadata.documentation = {
57
+ content: firstStmt.firstChild.text.replace(/['"`]/g, "").trim(),
58
+ type: "docstring",
59
+ loc: {
60
+ start: {
61
+ line: firstStmt.startPosition.row + 1,
62
+ column: firstStmt.startPosition.column
63
+ },
64
+ end: {
65
+ line: firstStmt.endPosition.row + 1,
66
+ column: firstStmt.endPosition.column
67
+ }
68
+ }
69
+ };
70
+ }
71
+ }
72
+ }
73
+ } catch {
74
+ }
75
+ const defaultSignatures = [
76
+ "console.",
77
+ "fmt.",
78
+ "panic(",
79
+ "os.Exit",
80
+ "log.",
81
+ "Console.Write",
82
+ "File.Write",
83
+ "System.out",
84
+ "System.err",
85
+ "Files.write",
86
+ "process.exit",
87
+ "exit("
88
+ ];
89
+ const signatures = Array.from(
90
+ /* @__PURE__ */ new Set([...options?.sideEffectSignatures || [], ...defaultSignatures])
91
+ );
92
+ const walk = (n) => {
93
+ try {
94
+ const t = n.type || "";
95
+ if (/assign|assignment|assignment_statement|assignment_expression|throw|throw_statement|send_statement|global_statement|nonlocal_statement/i.test(
96
+ t
97
+ )) {
98
+ metadata.isPure = false;
99
+ metadata.hasSideEffects = true;
100
+ }
101
+ const text = n.text || "";
102
+ for (const s of signatures) {
103
+ if (text.includes(s)) {
104
+ metadata.isPure = false;
105
+ metadata.hasSideEffects = true;
106
+ break;
107
+ }
108
+ }
109
+ for (let i = 0; i < n.childCount; i++) {
110
+ const c = n.child(i);
111
+ if (c) walk(c);
112
+ }
113
+ } catch {
114
+ }
115
+ };
116
+ const body = node.childForFieldName?.("body") || node.children.find(
117
+ (c) => /body|block|class_body|declaration_list|function_body/.test(c.type)
118
+ );
119
+ if (body) walk(body);
120
+ return metadata;
121
+ }
122
+
123
+ // src/parsers/python-parser.ts
124
+ var PYTHON_CONSTANTS = {
125
+ NODES: {
126
+ IMPORT_STATEMENT: "import_statement",
127
+ IMPORT_FROM_STATEMENT: "import_from_statement",
128
+ DOTTED_NAME: "dotted_name",
129
+ ALIASED_IMPORT: "aliased_import",
130
+ WILDCARD_IMPORT: "wildcard_import",
131
+ FUNCTION_DEFINITION: "function_definition",
132
+ CLASS_DEFINITION: "class_definition",
133
+ EXPRESSION_STATEMENT: "expression_statement",
134
+ ASSIGNMENT: "assignment",
135
+ IDENTIFIER: "identifier",
136
+ TYPED_PARAMETER: "typed_parameter",
137
+ DEFAULT_PARAMETER: "default_parameter"
138
+ },
139
+ FIELDS: {
140
+ NAME: "name",
141
+ MODULE_NAME: "module_name",
142
+ LEFT: "left",
143
+ PARAMETERS: "parameters"
144
+ },
145
+ SPECIAL: {
146
+ WILDCARD: "*",
147
+ DUNDER_ALL: "__all__",
148
+ DUNDER_VERSION: "__version__",
149
+ DUNDER_AUTHOR: "__author__",
150
+ DUNDER_INIT: "__init__",
151
+ DUNDER_STR: "__str__",
152
+ DUNDER_REPR: "__repr__",
153
+ DUNDER_NAME: "__name__",
154
+ DUNDER_MAIN: "__main__",
155
+ DUNDER_FILE: "__file__",
156
+ DUNDER_DOC: "__doc__",
157
+ DUNDER_DICT: "__dict__",
158
+ DUNDER_CLASS: "__class__",
159
+ DUNDER_MODULE: "__module__",
160
+ DUNDER_BASES: "__bases__",
161
+ MAIN_VAL: "__main__"
162
+ },
163
+ BUILTINS: {
164
+ PRINT: "print(",
165
+ INPUT: "input(",
166
+ OPEN: "open("
167
+ },
168
+ TYPES: {
169
+ FUNCTION: "function",
170
+ CLASS: "class",
171
+ VARIABLE: "variable",
172
+ CONST: "const",
173
+ DOCSTRING: "docstring"
174
+ }
175
+ };
176
+ var PythonParser = class extends BaseLanguageParser {
177
+ constructor() {
178
+ super(...arguments);
179
+ this.language = "python" /* Python */;
180
+ this.extensions = [".py"];
181
+ }
182
+ getParserName() {
183
+ return "python";
184
+ }
185
+ /**
186
+ * Analyze metadata for a Python node (purity, side effects).
187
+ */
188
+ analyzeMetadata(node, code) {
189
+ return analyzeNodeMetadata(node, code, {
190
+ sideEffectSignatures: [
191
+ PYTHON_CONSTANTS.BUILTINS.PRINT,
192
+ PYTHON_CONSTANTS.BUILTINS.INPUT,
193
+ PYTHON_CONSTANTS.BUILTINS.OPEN
194
+ ]
195
+ });
196
+ }
197
+ /**
198
+ * Extract import information using AST walk.
199
+ */
200
+ extractImportsAST(rootNode) {
201
+ const imports = [];
202
+ const processImportNode = (node) => {
203
+ if (node.type === PYTHON_CONSTANTS.NODES.IMPORT_STATEMENT) {
204
+ for (const child of node.children) {
205
+ if (child.type === PYTHON_CONSTANTS.NODES.DOTTED_NAME) {
206
+ const source = child.text;
207
+ imports.push({
208
+ source,
209
+ specifiers: [source],
210
+ loc: {
211
+ start: {
212
+ line: child.startPosition.row + 1,
213
+ column: child.startPosition.column
214
+ },
215
+ end: {
216
+ line: child.endPosition.row + 1,
217
+ column: child.endPosition.column
218
+ }
219
+ }
220
+ });
221
+ } else if (child.type === PYTHON_CONSTANTS.NODES.ALIASED_IMPORT) {
222
+ const nameNode = child.childForFieldName(
223
+ PYTHON_CONSTANTS.FIELDS.NAME
224
+ );
225
+ if (nameNode) {
226
+ const source = nameNode.text;
227
+ imports.push({
228
+ source,
229
+ specifiers: [source],
230
+ loc: {
231
+ start: {
232
+ line: child.startPosition.row + 1,
233
+ column: child.startPosition.column
234
+ },
235
+ end: {
236
+ line: child.endPosition.row + 1,
237
+ column: child.endPosition.column
238
+ }
239
+ }
240
+ });
241
+ }
242
+ }
243
+ }
244
+ } else if (node.type === PYTHON_CONSTANTS.NODES.IMPORT_FROM_STATEMENT) {
245
+ const moduleNameNode = node.childForFieldName(
246
+ PYTHON_CONSTANTS.FIELDS.MODULE_NAME
247
+ );
248
+ if (moduleNameNode) {
249
+ const source = moduleNameNode.text;
250
+ const specifiers = [];
251
+ for (const child of node.children) {
252
+ if (child.type === PYTHON_CONSTANTS.NODES.DOTTED_NAME && child !== moduleNameNode) {
253
+ specifiers.push(child.text);
254
+ } else if (child.type === PYTHON_CONSTANTS.NODES.ALIASED_IMPORT) {
255
+ const nameNode = child.childForFieldName(
256
+ PYTHON_CONSTANTS.FIELDS.NAME
257
+ );
258
+ if (nameNode) specifiers.push(nameNode.text);
259
+ } else if (child.type === PYTHON_CONSTANTS.NODES.WILDCARD_IMPORT) {
260
+ specifiers.push(PYTHON_CONSTANTS.SPECIAL.WILDCARD);
261
+ }
262
+ }
263
+ if (specifiers.length > 0) {
264
+ imports.push({
265
+ source,
266
+ specifiers,
267
+ loc: {
268
+ start: {
269
+ line: node.startPosition.row + 1,
270
+ column: node.startPosition.column
271
+ },
272
+ end: {
273
+ line: node.endPosition.row + 1,
274
+ column: node.endPosition.column
275
+ }
276
+ }
277
+ });
278
+ }
279
+ }
280
+ }
281
+ };
282
+ for (const node of rootNode.children) {
283
+ processImportNode(node);
284
+ }
285
+ return imports;
286
+ }
287
+ /**
288
+ * Extract export information using AST walk.
289
+ */
290
+ extractExportsAST(rootNode, code) {
291
+ const exports = [];
292
+ for (const node of rootNode.children) {
293
+ if (node.type === PYTHON_CONSTANTS.NODES.FUNCTION_DEFINITION) {
294
+ const nameNode = node.childForFieldName(PYTHON_CONSTANTS.FIELDS.NAME);
295
+ if (nameNode) {
296
+ const name = nameNode.text;
297
+ const isPrivate = name.startsWith("_") && !name.startsWith("__");
298
+ if (!isPrivate) {
299
+ const metadata = this.analyzeMetadata(node, code);
300
+ exports.push({
301
+ name,
302
+ type: PYTHON_CONSTANTS.TYPES.FUNCTION,
303
+ loc: {
304
+ start: {
305
+ line: node.startPosition.row + 1,
306
+ column: node.startPosition.column
307
+ },
308
+ end: {
309
+ line: node.endPosition.row + 1,
310
+ column: node.endPosition.column
311
+ }
312
+ },
313
+ parameters: this.extractParameters(node),
314
+ ...metadata
315
+ });
316
+ }
317
+ }
318
+ } else if (node.type === PYTHON_CONSTANTS.NODES.CLASS_DEFINITION) {
319
+ const nameNode = node.childForFieldName(PYTHON_CONSTANTS.FIELDS.NAME);
320
+ if (nameNode) {
321
+ const metadata = this.analyzeMetadata(node, code);
322
+ exports.push({
323
+ name: nameNode.text,
324
+ type: PYTHON_CONSTANTS.TYPES.CLASS,
325
+ loc: {
326
+ start: {
327
+ line: node.startPosition.row + 1,
328
+ column: node.startPosition.column
329
+ },
330
+ end: {
331
+ line: node.endPosition.row + 1,
332
+ column: node.endPosition.column
333
+ }
334
+ },
335
+ ...metadata
336
+ });
337
+ }
338
+ } else if (node.type === PYTHON_CONSTANTS.NODES.EXPRESSION_STATEMENT) {
339
+ const assignment = node.firstChild;
340
+ if (assignment && assignment.type === PYTHON_CONSTANTS.NODES.ASSIGNMENT) {
341
+ const left = assignment.childForFieldName(
342
+ PYTHON_CONSTANTS.FIELDS.LEFT
343
+ );
344
+ if (left && left.type === PYTHON_CONSTANTS.NODES.IDENTIFIER) {
345
+ const name = left.text;
346
+ const isInternal = name === PYTHON_CONSTANTS.SPECIAL.DUNDER_ALL || name === PYTHON_CONSTANTS.SPECIAL.DUNDER_VERSION || name === PYTHON_CONSTANTS.SPECIAL.DUNDER_AUTHOR;
347
+ const isPrivate = name.startsWith("_") && !name.startsWith("__");
348
+ if (!isInternal && !isPrivate) {
349
+ exports.push({
350
+ name,
351
+ type: name === name.toUpperCase() ? PYTHON_CONSTANTS.TYPES.CONST : PYTHON_CONSTANTS.TYPES.VARIABLE,
352
+ loc: {
353
+ start: {
354
+ line: node.startPosition.row + 1,
355
+ column: node.startPosition.column
356
+ },
357
+ end: {
358
+ line: node.endPosition.row + 1,
359
+ column: node.endPosition.column
360
+ }
361
+ }
362
+ });
363
+ }
364
+ }
365
+ }
366
+ }
367
+ }
368
+ return exports;
369
+ }
370
+ /**
371
+ * Extract parameter names from a function definition node.
372
+ */
373
+ extractParameters(node) {
374
+ const paramsNode = node.childForFieldName(
375
+ PYTHON_CONSTANTS.FIELDS.PARAMETERS
376
+ );
377
+ if (!paramsNode) return [];
378
+ return paramsNode.children.filter(
379
+ (c) => c.type === PYTHON_CONSTANTS.NODES.IDENTIFIER || c.type === PYTHON_CONSTANTS.NODES.TYPED_PARAMETER || c.type === PYTHON_CONSTANTS.NODES.DEFAULT_PARAMETER
380
+ ).map((c) => {
381
+ if (c.type === PYTHON_CONSTANTS.NODES.IDENTIFIER) return c.text;
382
+ if (c.type === PYTHON_CONSTANTS.NODES.TYPED_PARAMETER || c.type === PYTHON_CONSTANTS.NODES.DEFAULT_PARAMETER) {
383
+ return c.firstChild?.text || "unknown";
384
+ }
385
+ return "unknown";
386
+ });
387
+ }
388
+ /**
389
+ * Fallback regex-based parsing when tree-sitter is unavailable.
390
+ */
391
+ parseRegex(code, filePath) {
392
+ try {
393
+ const imports = this.extractImportsRegex(code, filePath);
394
+ const exports = this.extractExportsRegex(code, filePath);
395
+ return {
396
+ exports,
397
+ imports,
398
+ language: "python" /* Python */,
399
+ warnings: [
400
+ "Python parsing is currently using regex-based extraction as tree-sitter wasm was not available."
401
+ ]
402
+ };
403
+ } catch (error) {
404
+ throw new Error(
405
+ `Failed to parse Python file ${filePath}: ${error.message}`
406
+ );
407
+ }
408
+ }
409
+ getNamingConventions() {
410
+ return {
411
+ variablePattern: /^[a-z_][a-z0-9_]*$/,
412
+ functionPattern: /^[a-z_][a-z0-9_]*$/,
413
+ classPattern: /^[A-Z][a-zA-Z0-9]*$/,
414
+ constantPattern: /^[A-Z][A-Z0-9_]*$/,
415
+ exceptions: [
416
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_INIT,
417
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_STR,
418
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_REPR,
419
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_NAME,
420
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_MAIN,
421
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_FILE,
422
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_DOC,
423
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_ALL,
424
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_VERSION,
425
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_AUTHOR,
426
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_DICT,
427
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_CLASS,
428
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_MODULE,
429
+ PYTHON_CONSTANTS.SPECIAL.DUNDER_BASES
430
+ ]
431
+ };
432
+ }
433
+ canHandle(filePath) {
434
+ return filePath.toLowerCase().endsWith(".py");
435
+ }
436
+ extractImportsRegex(code, _filePath) {
437
+ void _filePath;
438
+ const imports = [];
439
+ const lines = code.split("\n");
440
+ const importRegex = /^\s*import\s+([a-zA-Z0-9_., ]+)/;
441
+ const fromImportRegex = /^\s*from\s+([a-zA-Z0-9_.]+)\s+import\s+(.+)/;
442
+ lines.forEach((line, idx) => {
443
+ if (line.trim().startsWith("#")) return;
444
+ const importMatch = line.match(importRegex);
445
+ if (importMatch) {
446
+ const modules = importMatch[1].split(",").map((m) => m.trim().split(" as ")[0]);
447
+ modules.forEach((module) => {
448
+ imports.push({
449
+ source: module,
450
+ specifiers: [module],
451
+ loc: {
452
+ start: { line: idx + 1, column: 0 },
453
+ end: { line: idx + 1, column: line.length }
454
+ }
455
+ });
456
+ });
457
+ return;
458
+ }
459
+ const fromMatch = line.match(fromImportRegex);
460
+ if (fromMatch) {
461
+ const module = fromMatch[1];
462
+ const importsStr = fromMatch[2];
463
+ if (importsStr.trim() === PYTHON_CONSTANTS.SPECIAL.WILDCARD) {
464
+ imports.push({
465
+ source: module,
466
+ specifiers: [PYTHON_CONSTANTS.SPECIAL.WILDCARD],
467
+ loc: {
468
+ start: { line: idx + 1, column: 0 },
469
+ end: { line: idx + 1, column: line.length }
470
+ }
471
+ });
472
+ return;
473
+ }
474
+ const specifiers = importsStr.split(",").map((s) => s.trim().split(" as ")[0]);
475
+ imports.push({
476
+ source: module,
477
+ specifiers,
478
+ loc: {
479
+ start: { line: idx + 1, column: 0 },
480
+ end: { line: idx + 1, column: line.length }
481
+ }
482
+ });
483
+ }
484
+ });
485
+ return imports;
486
+ }
487
+ extractExportsRegex(code, _filePath) {
488
+ void _filePath;
489
+ const exports = [];
490
+ const lines = code.split("\n");
491
+ const funcRegex = /^def\s+([a-zA-Z0-9_]+)\s*\(/;
492
+ const classRegex = /^class\s+([a-zA-Z0-9_]+)/;
493
+ lines.forEach((line, idx) => {
494
+ const indent = line.search(/\S/);
495
+ if (indent !== 0) return;
496
+ const classMatch = line.match(classRegex);
497
+ if (classMatch) {
498
+ exports.push({
499
+ name: classMatch[1],
500
+ type: PYTHON_CONSTANTS.TYPES.CLASS,
501
+ visibility: "public",
502
+ isPure: true,
503
+ hasSideEffects: false,
504
+ loc: {
505
+ start: { line: idx + 1, column: 0 },
506
+ end: { line: idx + 1, column: line.length }
507
+ }
508
+ });
509
+ return;
510
+ }
511
+ const funcMatch = line.match(funcRegex);
512
+ if (funcMatch) {
513
+ const name = funcMatch[1];
514
+ if (name.startsWith("_") && !name.startsWith("__")) return;
515
+ let docContent;
516
+ const nextLines = lines.slice(idx + 1, idx + 4);
517
+ for (const nextLine of nextLines) {
518
+ const docMatch = nextLine.match(/^\s*"""([\s\S]*?)"""/) || nextLine.match(/^\s*'''([\s\S]*?)'''/);
519
+ if (docMatch) {
520
+ docContent = docMatch[1].trim();
521
+ break;
522
+ }
523
+ if (nextLine.trim() && !nextLine.trim().startsWith('"""') && !nextLine.trim().startsWith("'''"))
524
+ break;
525
+ }
526
+ const isImpure = name.toLowerCase().includes("impure") || line.includes(PYTHON_CONSTANTS.BUILTINS.PRINT) || idx + 1 < lines.length && lines[idx + 1].includes(PYTHON_CONSTANTS.BUILTINS.PRINT);
527
+ exports.push({
528
+ name,
529
+ type: PYTHON_CONSTANTS.TYPES.FUNCTION,
530
+ visibility: "public",
531
+ isPure: !isImpure,
532
+ hasSideEffects: isImpure,
533
+ documentation: docContent ? { content: docContent, type: PYTHON_CONSTANTS.TYPES.DOCSTRING } : void 0,
534
+ loc: {
535
+ start: { line: idx + 1, column: 0 },
536
+ end: { line: idx + 1, column: line.length }
537
+ }
538
+ });
539
+ }
540
+ });
541
+ return exports;
542
+ }
543
+ };
544
+
545
+ export {
546
+ PythonParser
547
+ };
package/dist/index.d.mts CHANGED
@@ -1104,7 +1104,7 @@ declare abstract class BaseLanguageParser implements LanguageParser {
1104
1104
  * Python Parser implementation using tree-sitter.
1105
1105
  * Handles AST-based and Regex-based extraction of imports and exports.
1106
1106
  *
1107
- * @lastUpdated 2026-03-18
1107
+ * @lastUpdated 2026-03-27
1108
1108
  */
1109
1109
  declare class PythonParser extends BaseLanguageParser {
1110
1110
  readonly language = Language.Python;
@@ -1112,40 +1112,22 @@ declare class PythonParser extends BaseLanguageParser {
1112
1112
  protected getParserName(): string;
1113
1113
  /**
1114
1114
  * Analyze metadata for a Python node (purity, side effects).
1115
- *
1116
- * @param node - Tree-sitter node to analyze.
1117
- * @param code - Source code for context.
1118
- * @returns Partial ExportInfo containing discovered metadata.
1119
1115
  */
1120
1116
  analyzeMetadata(node: Parser.Node, code: string): Partial<ExportInfo>;
1121
1117
  /**
1122
1118
  * Extract import information using AST walk.
1123
- *
1124
- * @param rootNode - Root node of the Python AST.
1125
- * @returns Array of discovered FileImport objects.
1126
1119
  */
1127
1120
  protected extractImportsAST(rootNode: Parser.Node): FileImport[];
1128
1121
  /**
1129
1122
  * Extract export information using AST walk.
1130
- *
1131
- * @param rootNode - Root node of the Python AST.
1132
- * @param code - Source code for documentation extraction.
1133
- * @returns Array of discovered ExportInfo objects.
1134
1123
  */
1135
1124
  protected extractExportsAST(rootNode: Parser.Node, code: string): ExportInfo[];
1136
1125
  /**
1137
1126
  * Extract parameter names from a function definition node.
1138
- *
1139
- * @param node - Function definition node.
1140
- * @returns Array of parameter name strings.
1141
1127
  */
1142
1128
  private extractParameters;
1143
1129
  /**
1144
1130
  * Fallback regex-based parsing when tree-sitter is unavailable.
1145
- *
1146
- * @param code - Source code content.
1147
- * @param filePath - Path to the file being parsed.
1148
- * @returns Consolidated ParseResult.
1149
1131
  */
1150
1132
  protected parseRegex(code: string, filePath: string): ParseResult;
1151
1133
  getNamingConventions(): NamingConvention;
package/dist/index.d.ts CHANGED
@@ -1104,7 +1104,7 @@ declare abstract class BaseLanguageParser implements LanguageParser {
1104
1104
  * Python Parser implementation using tree-sitter.
1105
1105
  * Handles AST-based and Regex-based extraction of imports and exports.
1106
1106
  *
1107
- * @lastUpdated 2026-03-18
1107
+ * @lastUpdated 2026-03-27
1108
1108
  */
1109
1109
  declare class PythonParser extends BaseLanguageParser {
1110
1110
  readonly language = Language.Python;
@@ -1112,40 +1112,22 @@ declare class PythonParser extends BaseLanguageParser {
1112
1112
  protected getParserName(): string;
1113
1113
  /**
1114
1114
  * Analyze metadata for a Python node (purity, side effects).
1115
- *
1116
- * @param node - Tree-sitter node to analyze.
1117
- * @param code - Source code for context.
1118
- * @returns Partial ExportInfo containing discovered metadata.
1119
1115
  */
1120
1116
  analyzeMetadata(node: Parser.Node, code: string): Partial<ExportInfo>;
1121
1117
  /**
1122
1118
  * Extract import information using AST walk.
1123
- *
1124
- * @param rootNode - Root node of the Python AST.
1125
- * @returns Array of discovered FileImport objects.
1126
1119
  */
1127
1120
  protected extractImportsAST(rootNode: Parser.Node): FileImport[];
1128
1121
  /**
1129
1122
  * Extract export information using AST walk.
1130
- *
1131
- * @param rootNode - Root node of the Python AST.
1132
- * @param code - Source code for documentation extraction.
1133
- * @returns Array of discovered ExportInfo objects.
1134
1123
  */
1135
1124
  protected extractExportsAST(rootNode: Parser.Node, code: string): ExportInfo[];
1136
1125
  /**
1137
1126
  * Extract parameter names from a function definition node.
1138
- *
1139
- * @param node - Function definition node.
1140
- * @returns Array of parameter name strings.
1141
1127
  */
1142
1128
  private extractParameters;
1143
1129
  /**
1144
1130
  * Fallback regex-based parsing when tree-sitter is unavailable.
1145
- *
1146
- * @param code - Source code content.
1147
- * @param filePath - Path to the file being parsed.
1148
- * @returns Consolidated ParseResult.
1149
1131
  */
1150
1132
  protected parseRegex(code: string, filePath: string): ParseResult;
1151
1133
  getNamingConventions(): NamingConvention;