google-docstring-parser 0.0.9__tar.gz → 0.0.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (16) hide show
  1. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/PKG-INFO +3 -1
  2. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/README.md +2 -0
  3. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser/google_docstring_parser.py +12 -12
  4. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser/type_validation.py +11 -11
  5. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser.egg-info/PKG-INFO +3 -1
  6. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/pyproject.toml +3 -1
  7. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/tools/check_docstrings.py +314 -46
  8. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/LICENSE +0 -0
  9. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser/__init__.py +0 -0
  10. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser/py.typed +0 -0
  11. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser.egg-info/SOURCES.txt +0 -0
  12. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser.egg-info/dependency_links.txt +0 -0
  13. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser.egg-info/requires.txt +0 -0
  14. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/google_docstring_parser.egg-info/top_level.txt +0 -0
  15. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/setup.cfg +0 -0
  16. {google_docstring_parser-0.0.9 → google_docstring_parser-0.0.10}/tools/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: google-docstring-parser
3
- Version: 0.0.9
3
+ Version: 0.0.10
4
4
  Summary: A lightweight, efficient parser for Google-style Python docstrings that converts them into structured dictionaries.
5
5
  Author: Vladimir Iglovikov
6
6
  Maintainer: Vladimir Iglovikov
@@ -180,6 +180,8 @@ Add a `[tool.docstring_checker]` section to your pyproject.toml:
180
180
  paths = ["src", "tests"] # Directories or files to scan
181
181
  require_param_types = true # Require parameter types in docstrings
182
182
  check_references = true # Check references for proper format
183
+ check_type_consistency = true # Compare docstring types with annotations
183
184
  exclude_files = ["conftest.py", "__init__.py"] # Files to exclude from checks
185
+ min_short_description_length = 10 # Minimum summary length; set to 0 to disable
184
186
  verbose = false # Enable verbose output
185
187
  ```
@@ -146,6 +146,8 @@ Add a `[tool.docstring_checker]` section to your pyproject.toml:
146
146
  paths = ["src", "tests"] # Directories or files to scan
147
147
  require_param_types = true # Require parameter types in docstrings
148
148
  check_references = true # Check references for proper format
149
+ check_type_consistency = true # Compare docstring types with annotations
149
150
  exclude_files = ["conftest.py", "__init__.py"] # Files to exclude from checks
151
+ min_short_description_length = 10 # Minimum summary length; set to 0 to disable
150
152
  verbose = false # Enable verbose output
151
153
  ```
@@ -40,7 +40,7 @@ __all__ = [
40
40
 
41
41
 
42
42
  class ReferenceFormatError(ValueError):
43
- """Error raised when a reference format is invalid.
43
+ """Error raised when a reference format is invalid or malformed.
44
44
 
45
45
  Args:
46
46
  code (str): Error code identifying the specific format issue
@@ -88,7 +88,7 @@ class EmptyDescriptionError(ReferenceFormatError):
88
88
 
89
89
 
90
90
  def _extract_sections(docstring: str) -> dict[str, str]:
91
- """Extract sections from a docstring.
91
+ """Extract named sections from a Google-style docstring.
92
92
 
93
93
  Args:
94
94
  docstring (str): The docstring to extract sections from
@@ -170,7 +170,7 @@ def _find_separator_colon(content: str) -> int:
170
170
 
171
171
 
172
172
  def _parse_reference_line(line: str, *, is_single: bool = False) -> dict[str, str]:
173
- """Parse a single reference line.
173
+ """Parse a single reference line into description and source.
174
174
 
175
175
  Args:
176
176
  line (str): The line to parse
@@ -252,7 +252,7 @@ def _identify_main_reference_lines(lines: list[str]) -> list[str]:
252
252
 
253
253
 
254
254
  def _process_single_reference(main_line: str, all_lines: list[str]) -> dict[str, str]:
255
- """Process a single reference entry.
255
+ """Process a single reference entry from the References section.
256
256
 
257
257
  Args:
258
258
  main_line (str): The main reference line
@@ -285,7 +285,7 @@ def _process_single_reference(main_line: str, all_lines: list[str]) -> dict[str,
285
285
 
286
286
 
287
287
  def _process_multiple_references(lines: list[str]) -> list[dict[str, str]]:
288
- """Process multiple reference entries.
288
+ """Process multiple reference entries from the References section.
289
289
 
290
290
  Args:
291
291
  lines (list[str]): Lines containing multiple references
@@ -338,7 +338,7 @@ def _process_multiple_references(lines: list[str]) -> list[dict[str, str]]:
338
338
 
339
339
 
340
340
  def _parse_references(reference_content: str) -> list[dict[str, str]]:
341
- """Parse references section content.
341
+ """Parse references section content into structured reference entries.
342
342
 
343
343
  Args:
344
344
  reference_content (str): Content of the references section
@@ -373,7 +373,7 @@ def _parse_references(reference_content: str) -> list[dict[str, str]]:
373
373
 
374
374
 
375
375
  def _validate_type_with_error_handling(type_str: str, result: dict[str, Any], collect_errors: bool) -> None:
376
- """Validate a type annotation and handle any errors.
376
+ """Validate a type annotation and handle any validation errors.
377
377
 
378
378
  This function validates type annotations and handles errors differently based on the collect_errors flag:
379
379
  - When collect_errors is True: Errors are added to result["errors"] list instead of being raised
@@ -408,7 +408,7 @@ def _process_args_with_validation(
408
408
  validate_types: bool,
409
409
  collect_errors: bool,
410
410
  ) -> None:
411
- """Process the Args section with type validation.
411
+ """Process the Args section with type validation and error collection.
412
412
 
413
413
  Args:
414
414
  sections (dict[str, str]): The sections dictionary
@@ -439,7 +439,7 @@ def _process_args_with_validation(
439
439
 
440
440
 
441
441
  def _parse_returns_section(sections: dict[str, str], *, validate_types: bool) -> dict[str, str] | str:
442
- """Process the Returns section of a docstring.
442
+ """Process the Returns section of a docstring into type and description.
443
443
 
444
444
  Args:
445
445
  sections (dict[str, str]): The sections dictionary
@@ -482,7 +482,7 @@ def _process_returns_with_validation(
482
482
  validate_types: bool,
483
483
  collect_errors: bool,
484
484
  ) -> None:
485
- """Process the Returns section with type validation.
485
+ """Process the Returns section with type validation and error handling.
486
486
 
487
487
  Args:
488
488
  sections (dict[str, str]): The sections dictionary
@@ -506,7 +506,7 @@ def _process_returns_with_validation(
506
506
 
507
507
 
508
508
  def _process_references_section(sections: dict[str, str], result: dict[str, Any]) -> None:
509
- """Process the References section.
509
+ """Process the References section into structured reference entries.
510
510
 
511
511
  Args:
512
512
  sections (dict[str, str]): The sections dictionary
@@ -527,7 +527,7 @@ def parse_google_docstring(
527
527
  validate_types: bool = True,
528
528
  collect_errors: bool = True,
529
529
  ) -> dict[str, Any]:
530
- """Parse a Google-style docstring.
530
+ """Parse a Google-style docstring into a structured dictionary.
531
531
 
532
532
  Args:
533
533
  docstring (str): The docstring to parse
@@ -89,7 +89,7 @@ NESTING_KEYWORD = "with"
89
89
 
90
90
 
91
91
  class InvalidTypeAnnotationError(ValueError):
92
- """Error raised when a type annotation is invalid.
92
+ """Error raised when a type annotation is invalid or malformed.
93
93
 
94
94
  Args:
95
95
  message (str): The error message.
@@ -100,7 +100,7 @@ class InvalidTypeAnnotationError(ValueError):
100
100
  INVALID_NESTED_TYPE = "Invalid nested type: {}"
101
101
 
102
102
  def __init__(self, message: str) -> None:
103
- """Initialize the error with a message.
103
+ """Initialize the error instance with a descriptive message.
104
104
 
105
105
  Args:
106
106
  message (str): The error message.
@@ -125,7 +125,7 @@ class BracketValidationError(ValueError):
125
125
  WRONG_BRACKET_TYPE = "Collection '{}' must use square brackets for type arguments, not '{}'"
126
126
 
127
127
  def __init__(self, error_type: str) -> None:
128
- """Initialize with a specific error type.
128
+ """Initialize the error with a specific bracket validation type.
129
129
 
130
130
  Args:
131
131
  error_type (str): One of the predefined error types.
@@ -137,7 +137,7 @@ class BracketValidationError(ValueError):
137
137
 
138
138
 
139
139
  def is_collection_type(type_name: str) -> bool:
140
- """Check if a type name is a known collection type.
140
+ """Check if a type name is a known collection type (list, dict, etc).
141
141
 
142
142
  Args:
143
143
  type_name (str): The type name to check.
@@ -261,7 +261,7 @@ def _is_within_string_literal(text: str, position: int) -> bool:
261
261
 
262
262
 
263
263
  def _looks_like_type_annotation(text: str) -> bool:
264
- """Check if text looks like a type annotation.
264
+ """Check if text looks like a type annotation using heuristics.
265
265
 
266
266
  Args:
267
267
  text (str): The text to check
@@ -277,7 +277,7 @@ def _looks_like_type_annotation(text: str) -> bool:
277
277
 
278
278
 
279
279
  def _process_string_literals(text: str) -> tuple[str, list[str]]:
280
- """Process string literals in text.
280
+ """Process string literals in text by replacing them with placeholders.
281
281
 
282
282
  Args:
283
283
  text (str): The text to process
@@ -386,7 +386,7 @@ def _check_for_opening_bracket(
386
386
  bracket_stack: list[str],
387
387
  collection_stack: list[tuple[str, str]],
388
388
  ) -> None:
389
- """Check for opening bracket in type declaration.
389
+ """Check for opening bracket in type declaration and update stacks.
390
390
 
391
391
  Args:
392
392
  tokens (list[str]): List of tokens
@@ -409,7 +409,7 @@ def _check_for_opening_bracket(
409
409
 
410
410
 
411
411
  def _check_for_closing_bracket(token: str, bracket_stack: list[str], collection_stack: list[tuple[str, str]]) -> None:
412
- """Check for closing bracket in type declaration.
412
+ """Check for closing bracket in type declaration and validate pairing.
413
413
 
414
414
  Args:
415
415
  token (str): Current token
@@ -440,7 +440,7 @@ def _check_for_closing_bracket(token: str, bracket_stack: list[str], collection_
440
440
 
441
441
 
442
442
  def _check_for_bare_collection(tokens: list[str], i: int, token: str) -> None:
443
- """Check for bare collection type usage.
443
+ """Check for bare collection type usage without type arguments.
444
444
 
445
445
  Args:
446
446
  tokens (list[str]): List of tokens
@@ -487,7 +487,7 @@ def _is_bare_collection_in_nested_type(token: str, tokens: list[str], i: int, br
487
487
 
488
488
 
489
489
  def _check_tokens_for_collection_type_usage(tokens: list[str]) -> None:
490
- """Check tokens for proper collection type usage.
490
+ """Check tokens for proper collection type usage and brackets.
491
491
 
492
492
  Args:
493
493
  tokens (list[str]): List of tokens to check
@@ -539,7 +539,7 @@ def _check_tokens_for_collection_type_usage(tokens: list[str]) -> None:
539
539
 
540
540
 
541
541
  def _validate_type_declaration(declaration: str) -> None:
542
- """Validate a type declaration.
542
+ """Validate a type declaration for syntax and collection usage.
543
543
 
544
544
  Args:
545
545
  declaration (str): The type declaration to validate
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: google-docstring-parser
3
- Version: 0.0.9
3
+ Version: 0.0.10
4
4
  Summary: A lightweight, efficient parser for Google-style Python docstrings that converts them into structured dictionaries.
5
5
  Author: Vladimir Iglovikov
6
6
  Maintainer: Vladimir Iglovikov
@@ -180,6 +180,8 @@ Add a `[tool.docstring_checker]` section to your pyproject.toml:
180
180
  paths = ["src", "tests"] # Directories or files to scan
181
181
  require_param_types = true # Require parameter types in docstrings
182
182
  check_references = true # Check references for proper format
183
+ check_type_consistency = true # Compare docstring types with annotations
183
184
  exclude_files = ["conftest.py", "__init__.py"] # Files to exclude from checks
185
+ min_short_description_length = 10 # Minimum summary length; set to 0 to disable
184
186
  verbose = false # Enable verbose output
185
187
  ```
@@ -5,7 +5,7 @@ requires = [ "setuptools>=45", "wheel" ]
5
5
 
6
6
  [project]
7
7
  name = "google-docstring-parser"
8
- version = "0.0.9"
8
+ version = "0.0.10"
9
9
 
10
10
  description = "A lightweight, efficient parser for Google-style Python docstrings that converts them into structured dictionaries."
11
11
  readme = "README.md"
@@ -119,6 +119,7 @@ lint.per-file-ignores = { "__init__.py" = [
119
119
  "BLE001",
120
120
  "FBT002",
121
121
  "ANN201",
122
+ "PLR0913",
122
123
  ] }
123
124
 
124
125
  lint.fixable = [ "ALL" ]
@@ -146,5 +147,6 @@ paths = [ "google_docstring_parser", "tools" ]
146
147
  require_param_types = true
147
148
  check_references = true
148
149
  check_type_consistency = true
150
+ min_short_description_length = 50
149
151
  exclude_files = [ "test_malformed_docstrings.py" ]
150
152
  verbose = false
@@ -20,18 +20,23 @@ from google_docstring_parser.google_docstring_parser import (
20
20
  parse_google_docstring,
21
21
  )
22
22
 
23
+ # Preview length for short description error messages
24
+ SHORT_DESC_PREVIEW_LENGTH = 50
25
+
23
26
  # Default configuration
24
27
  DEFAULT_CONFIG = {
25
28
  "paths": [], # Empty by default, so no directories are scanned unless explicitly specified
26
29
  "require_param_types": False,
27
30
  "check_references": True,
31
+ "check_type_consistency": False,
32
+ "min_short_description_length": 0,
28
33
  "exclude_files": [],
29
34
  "verbose": False,
30
35
  }
31
36
 
32
37
 
33
38
  class DocstringContext(NamedTuple):
34
- """Context for docstring processing.
39
+ """Context for docstring processing, validation, and error reporting.
35
40
 
36
41
  Args:
37
42
  file_path (Path): Path to the file
@@ -40,6 +45,9 @@ class DocstringContext(NamedTuple):
40
45
  verbose (bool): Whether to print verbose output
41
46
  require_param_types (bool): Whether parameter types are required
42
47
  check_references (bool): Whether to check references for errors
48
+ check_type_consistency (bool): Whether to compare docstring types with annotations
49
+ min_short_description_length (int): Minimum length for short description
50
+ node (ast.AST | None): AST node for the function or class
43
51
 
44
52
  Returns:
45
53
  DocstringContext: A named tuple containing docstring processing context
@@ -51,6 +59,33 @@ class DocstringContext(NamedTuple):
51
59
  verbose: bool
52
60
  require_param_types: bool = False
53
61
  check_references: bool = True
62
+ check_type_consistency: bool = False
63
+ min_short_description_length: int = 0
64
+ node: ast.AST | None = None
65
+
66
+
67
+ _CONFIG_KEYS: dict[str, tuple[str, type]] = {
68
+ "paths": ("paths", list),
69
+ "require_param_types": ("require_param_types", bool),
70
+ "check_references": ("check_references", bool),
71
+ "check_type_consistency": ("check_type_consistency", bool),
72
+ "min_short_description_length": ("min_short_description_length", int),
73
+ "exclude_files": ("exclude_files", list),
74
+ "verbose": ("verbose", bool),
75
+ }
76
+
77
+
78
+ def _config_keys_match() -> bool:
79
+ """Return True if DEFAULT_CONFIG and _CONFIG_KEYS have the same keys."""
80
+ return set(DEFAULT_CONFIG.keys()) == set(_CONFIG_KEYS.keys())
81
+
82
+
83
+ def _apply_tool_config(config: dict[str, Any], tool_config: dict[str, Any]) -> None:
84
+ """Apply tool_config values to config. Modifies config in place."""
85
+ for key, (config_key, converter) in _CONFIG_KEYS.items():
86
+ if key in tool_config:
87
+ raw = tool_config[key]
88
+ config[config_key] = raw if converter is list else converter(raw)
54
89
 
55
90
 
56
91
  def load_pyproject_config() -> dict[str, Any]:
@@ -60,8 +95,6 @@ def load_pyproject_config() -> dict[str, Any]:
60
95
  dict[str, Any]: Dictionary with configuration values
61
96
  """
62
97
  config = DEFAULT_CONFIG.copy()
63
-
64
- # Look for pyproject.toml in the current directory
65
98
  pyproject_path = Path("pyproject.toml")
66
99
  if not pyproject_path.is_file():
67
100
  return config
@@ -69,24 +102,10 @@ def load_pyproject_config() -> dict[str, Any]:
69
102
  try:
70
103
  with pyproject_path.open("rb") as f:
71
104
  pyproject_data = tomli.load(f)
72
-
73
- # Check if our tool is configured
74
105
  tool_config = pyproject_data.get("tool", {}).get("docstring_checker", {})
75
106
  if not tool_config:
76
107
  return config
77
-
78
- # Update config with values from pyproject.toml
79
- if "paths" in tool_config:
80
- config["paths"] = tool_config["paths"]
81
- if "require_param_types" in tool_config:
82
- config["require_param_types"] = bool(tool_config["require_param_types"])
83
- if "check_references" in tool_config:
84
- config["check_references"] = bool(tool_config["check_references"])
85
- if "exclude_files" in tool_config:
86
- config["exclude_files"] = tool_config["exclude_files"]
87
- if "verbose" in tool_config:
88
- config["verbose"] = bool(tool_config["verbose"])
89
-
108
+ _apply_tool_config(config, tool_config)
90
109
  except Exception as e:
91
110
  print(f"Warning: Failed to load configuration from pyproject.toml: {e}")
92
111
 
@@ -94,7 +113,7 @@ def load_pyproject_config() -> dict[str, Any]:
94
113
 
95
114
 
96
115
  def get_docstrings(file_path: Path) -> list[tuple[str, int, str | None, ast.AST | None]]:
97
- """Extract docstrings from a Python file.
116
+ """Extract docstrings from a Python file using AST parsing.
98
117
 
99
118
  Args:
100
119
  file_path (Path): Path to the Python file
@@ -128,7 +147,7 @@ def get_docstrings(file_path: Path) -> list[tuple[str, int, str | None, ast.AST
128
147
 
129
148
 
130
149
  def check_param_types(docstring_dict: dict[str, Any], require_types: bool) -> list[str]:
131
- """Check if all parameters have types if required.
150
+ """Check if all parameters have types when types are required.
132
151
 
133
152
  Args:
134
153
  docstring_dict (dict[str, Any]): Parsed docstring dictionary
@@ -150,6 +169,111 @@ def check_param_types(docstring_dict: dict[str, Any], require_types: bool) -> li
150
169
  return errors
151
170
 
152
171
 
172
+ def _normalize_type(type_str: str) -> str:
173
+ """Normalize type string for comparison (quotes and whitespace only).
174
+
175
+ Python 3.10+ typing uses list, dict, tuple, X|Y - no List, Dict, Tuple, Union.
176
+ We do not normalize those; mismatches will be reported.
177
+ Internal whitespace differences (e.g., around commas, |, or brackets) are ignored.
178
+
179
+ Args:
180
+ type_str (str): Type string to normalize
181
+
182
+ Returns:
183
+ str: Normalized type string
184
+ """
185
+ normalized = type_str.strip().strip("'\"")
186
+ return re.sub(r"\s+", "", normalized)
187
+
188
+
189
+ def _annotation_to_str(annotation: ast.expr | None) -> str | None:
190
+ """Extract the type string from an AST annotation node.
191
+
192
+ Args:
193
+ annotation (ast.expr | None): AST annotation node
194
+
195
+ Returns:
196
+ str | None: Type string or None if no annotation
197
+ """
198
+ if annotation is None:
199
+ return None
200
+ if isinstance(annotation, ast.Constant) and isinstance(annotation.value, str):
201
+ return annotation.value
202
+ return ast.unparse(annotation)
203
+
204
+
205
+ def _get_ast_param_types(node: ast.FunctionDef | ast.AsyncFunctionDef) -> dict[str, str]:
206
+ """Extract parameter names and type strings from function AST.
207
+
208
+ Args:
209
+ node (ast.FunctionDef | ast.AsyncFunctionDef): Function AST node
210
+
211
+ Returns:
212
+ dict[str, str]: Map of param name to annotation string (skips self/cls)
213
+ """
214
+ ast_params: dict[str, str] = {}
215
+ all_args: list[ast.arg] = []
216
+ all_args.extend(node.args.posonlyargs)
217
+ all_args.extend(node.args.args)
218
+ all_args.extend(node.args.kwonlyargs)
219
+ if node.args.vararg is not None:
220
+ all_args.append(node.args.vararg)
221
+ if node.args.kwarg is not None:
222
+ all_args.append(node.args.kwarg)
223
+ for arg in all_args:
224
+ if arg.arg in ("self", "cls"):
225
+ continue
226
+ if ann_str := _annotation_to_str(arg.annotation):
227
+ ast_params[arg.arg] = ann_str
228
+ return ast_params
229
+
230
+
231
+ def check_type_consistency(
232
+ parsed: dict[str, Any],
233
+ node: ast.FunctionDef | ast.AsyncFunctionDef,
234
+ ) -> list[str]:
235
+ """Compare docstring types with function annotations.
236
+
237
+ Args:
238
+ parsed (dict[str, Any]): Parsed docstring dictionary
239
+ node (ast.FunctionDef | ast.AsyncFunctionDef): Function AST node
240
+
241
+ Returns:
242
+ list[str]: List of error messages for type mismatches
243
+ """
244
+ errors = []
245
+ ast_params = _get_ast_param_types(node)
246
+
247
+ # Compare Args
248
+ for arg in parsed.get("Args", []):
249
+ doc_type = arg.get("type")
250
+ if not doc_type:
251
+ continue
252
+ param_name = arg.get("name")
253
+ if not param_name or param_name not in ast_params:
254
+ continue
255
+ ast_type = ast_params[param_name]
256
+ if _normalize_type(doc_type) != _normalize_type(ast_type):
257
+ errors.append(
258
+ f"Parameter '{param_name}': docstring says '{doc_type}' but annotation says '{ast_type}'",
259
+ )
260
+
261
+ # Compare Returns (handle both dict and string "None" from parse_google_docstring)
262
+ returns = parsed.get("Returns")
263
+ doc_ret: str | None = None
264
+ if isinstance(returns, dict):
265
+ doc_ret = returns.get("type")
266
+ elif isinstance(returns, str):
267
+ doc_ret = returns
268
+ ast_ret = _annotation_to_str(node.returns)
269
+ if doc_ret and ast_ret and _normalize_type(doc_ret) != _normalize_type(ast_ret):
270
+ errors.append(
271
+ f"Returns: docstring says '{doc_ret}' but annotation says '{ast_ret}'",
272
+ )
273
+
274
+ return errors
275
+
276
+
153
277
  def _check_reference_fields(reference: dict[str, Any], index: int) -> list[str]:
154
278
  """Check a single reference for missing or empty fields.
155
279
 
@@ -177,7 +301,7 @@ def _check_reference_fields(reference: dict[str, Any], index: int) -> list[str]:
177
301
 
178
302
 
179
303
  def check_references(docstring_dict: dict[str, Any]) -> list[str]:
180
- """Check references section for common errors.
304
+ """Check references section for common formatting errors.
181
305
 
182
306
  Args:
183
307
  docstring_dict (dict[str, Any]): Parsed docstring dictionary
@@ -216,7 +340,7 @@ def check_references(docstring_dict: dict[str, Any]) -> list[str]:
216
340
 
217
341
 
218
342
  def validate_docstring(docstring: str) -> list[str]:
219
- """Perform additional validation on a docstring.
343
+ """Perform additional validation on docstring format and structure.
220
344
 
221
345
  Args:
222
346
  docstring (str): The docstring to validate
@@ -250,7 +374,7 @@ def validate_docstring(docstring: str) -> list[str]:
250
374
 
251
375
 
252
376
  def check_returns_section_name(docstring: str) -> list[str]:
253
- """Check for incorrect Returns section names.
377
+ """Check for incorrect Returns section names (e.g. return vs Returns).
254
378
 
255
379
  Args:
256
380
  docstring (str): The docstring to check
@@ -267,8 +391,37 @@ def check_returns_section_name(docstring: str) -> list[str]:
267
391
  return errors
268
392
 
269
393
 
394
+ def check_short_description_length(parsed: dict[str, Any], min_length: int) -> list[str]:
395
+ """Check that the short description meets the minimum length requirement.
396
+
397
+ Args:
398
+ parsed (dict[str, Any]): Parsed docstring dictionary
399
+ min_length (int): Minimum length for short description (0 to disable)
400
+
401
+ Returns:
402
+ list[str]: List of error messages for short descriptions that are too short
403
+ """
404
+ if min_length <= 0:
405
+ return []
406
+
407
+ short_desc = (parsed.get("Description") or "").split("\n")[0].strip()
408
+ if not short_desc:
409
+ return []
410
+
411
+ if len(short_desc) < min_length:
412
+ preview = (
413
+ short_desc[:SHORT_DESC_PREVIEW_LENGTH] + "..."
414
+ if len(short_desc) > SHORT_DESC_PREVIEW_LENGTH
415
+ else short_desc
416
+ )
417
+ return [
418
+ f"Short description too short ({len(short_desc)} chars, min {min_length}): '{preview}'",
419
+ ]
420
+ return []
421
+
422
+
270
423
  def check_returns_type(docstring_dict: dict[str, Any]) -> list[str]:
271
- """Check Returns type in a docstring."""
424
+ """Check that the Returns section has proper type annotation."""
272
425
  errors = []
273
426
  if returns := docstring_dict.get("Returns"):
274
427
  # Special case: Returns section just contains "None"
@@ -286,7 +439,7 @@ def check_returns_type(docstring_dict: dict[str, Any]) -> list[str]:
286
439
 
287
440
 
288
441
  def _format_error(context: DocstringContext, error: str) -> str:
289
- """Format an error message consistently.
442
+ """Format an error message consistently with file, line, and name.
290
443
 
291
444
  Args:
292
445
  context (DocstringContext): Docstring context
@@ -333,7 +486,7 @@ def safe_execute(
333
486
 
334
487
 
335
488
  def _check_returns_section(context: DocstringContext, docstring: str) -> list[str]:
336
- """Check the Returns section name.
489
+ """Check the Returns section name for correct spelling.
337
490
 
338
491
  Args:
339
492
  context (DocstringContext): Docstring context
@@ -352,7 +505,7 @@ def _check_returns_section(context: DocstringContext, docstring: str) -> list[st
352
505
 
353
506
 
354
507
  def _validate_docstring_format(context: DocstringContext, docstring: str) -> list[str]:
355
- """Validate docstring format.
508
+ """Validate docstring format for common structural issues.
356
509
 
357
510
  Args:
358
511
  context (DocstringContext): Docstring context
@@ -371,7 +524,7 @@ def _validate_docstring_format(context: DocstringContext, docstring: str) -> lis
371
524
 
372
525
 
373
526
  def _parse_and_check_returns(context: DocstringContext, docstring: str) -> tuple[list[str], dict[str, Any] | None]:
374
- """Parse docstring and check returns type.
527
+ """Parse docstring and check that the Returns section has proper type.
375
528
 
376
529
  Args:
377
530
  context (DocstringContext): Docstring context
@@ -408,7 +561,7 @@ def _parse_and_check_returns(context: DocstringContext, docstring: str) -> tuple
408
561
 
409
562
 
410
563
  def _check_additional_validations(context: DocstringContext, parsed: dict[str, Any]) -> list[str]:
411
- """Run additional validations on parsed docstring.
564
+ """Run additional validations on the parsed docstring dictionary.
412
565
 
413
566
  Args:
414
567
  context (DocstringContext): Docstring context
@@ -438,11 +591,35 @@ def _check_additional_validations(context: DocstringContext, parsed: dict[str, A
438
591
  )
439
592
  errors.extend(ref_errors)
440
593
 
594
+ if context.min_short_description_length > 0:
595
+ length_errors, _ = safe_execute(
596
+ context,
597
+ check_short_description_length,
598
+ parsed,
599
+ context.min_short_description_length,
600
+ error_prefix="Error checking short description length",
601
+ )
602
+ errors.extend(length_errors)
603
+
604
+ if (
605
+ context.check_type_consistency
606
+ and context.node is not None
607
+ and isinstance(context.node, (ast.FunctionDef, ast.AsyncFunctionDef))
608
+ ):
609
+ consistency_errors, _ = safe_execute(
610
+ context,
611
+ check_type_consistency,
612
+ parsed,
613
+ context.node,
614
+ error_prefix="Error checking type consistency",
615
+ )
616
+ errors.extend(consistency_errors)
617
+
441
618
  return errors
442
619
 
443
620
 
444
621
  def _process_docstring(context: DocstringContext, docstring: str) -> list[str]:
445
- """Process a single docstring.
622
+ """Process a single docstring and collect all validation errors.
446
623
 
447
624
  Args:
448
625
  context (DocstringContext): Docstring context
@@ -482,14 +659,18 @@ def check_file(
482
659
  require_param_types: bool = False,
483
660
  verbose: bool = False,
484
661
  check_references: bool = True,
662
+ check_type_consistency: bool = False,
663
+ min_short_description_length: int = 0,
485
664
  ) -> list[str]:
486
- """Check docstrings in a file.
665
+ """Check docstrings in a Python file for parsing and validation errors.
487
666
 
488
667
  Args:
489
668
  file_path (Path): Path to the Python file
490
669
  require_param_types (bool): Whether parameter types are required
491
670
  verbose (bool): Whether to print verbose output
492
671
  check_references (bool): Whether to check references for errors
672
+ check_type_consistency (bool): Whether to compare docstring types with annotations
673
+ min_short_description_length (int): Minimum length for short description (0 to disable)
493
674
 
494
675
  Returns:
495
676
  list[str]: List of error messages
@@ -508,7 +689,7 @@ def check_file(
508
689
  print(error_msg)
509
690
  return errors
510
691
 
511
- for name, line_no, docstring, _ in docstrings:
692
+ for name, line_no, docstring, node in docstrings:
512
693
  context = DocstringContext(
513
694
  file_path=file_path,
514
695
  line_no=line_no,
@@ -516,6 +697,9 @@ def check_file(
516
697
  verbose=verbose,
517
698
  require_param_types=require_param_types,
518
699
  check_references=check_references,
700
+ check_type_consistency=check_type_consistency,
701
+ min_short_description_length=min_short_description_length,
702
+ node=node,
519
703
  )
520
704
  errors.extend(_process_docstring(context, docstring))
521
705
 
@@ -528,6 +712,8 @@ def scan_directory(
528
712
  require_param_types: bool = False,
529
713
  verbose: bool = False,
530
714
  check_references: bool = True,
715
+ check_type_consistency: bool = False,
716
+ min_short_description_length: int = 0,
531
717
  ) -> list[str]:
532
718
  """Scan a directory for Python files and check their docstrings.
533
719
 
@@ -537,6 +723,8 @@ def scan_directory(
537
723
  require_param_types (bool): Whether parameter types are required
538
724
  verbose (bool): Whether to print verbose output
539
725
  check_references (bool): Whether to check references for errors
726
+ check_type_consistency (bool): Whether to compare docstring types with annotations
727
+ min_short_description_length (int): Minimum length for short description (0 to disable)
540
728
 
541
729
  Returns:
542
730
  list[str]: List of error messages
@@ -559,12 +747,21 @@ def scan_directory(
559
747
  break
560
748
 
561
749
  if not should_exclude:
562
- errors.extend(check_file(py_file, require_param_types, verbose, check_references))
750
+ errors.extend(
751
+ check_file(
752
+ py_file,
753
+ require_param_types,
754
+ verbose,
755
+ check_references,
756
+ check_type_consistency,
757
+ min_short_description_length,
758
+ ),
759
+ )
563
760
  return errors
564
761
 
565
762
 
566
763
  def _parse_args() -> argparse.Namespace:
567
- """Parse command line arguments.
764
+ """Parse command line arguments for the docstring checker.
568
765
 
569
766
  Returns:
570
767
  argparse.Namespace: Parsed command line arguments
@@ -582,29 +779,47 @@ def _parse_args() -> argparse.Namespace:
582
779
  action="store_true",
583
780
  help="Require parameter types in docstrings",
584
781
  )
585
- parser.add_argument(
782
+ ref_group = parser.add_mutually_exclusive_group()
783
+ ref_group.add_argument(
586
784
  "--check-references",
587
785
  action="store_true",
588
786
  help="Check references for errors",
589
787
  )
590
- parser.add_argument(
788
+ ref_group.add_argument(
591
789
  "--no-check-references",
592
790
  action="store_true",
593
791
  help="Skip reference checking",
594
792
  )
793
+ type_consistency_group = parser.add_mutually_exclusive_group()
794
+ type_consistency_group.add_argument(
795
+ "--check-type-consistency",
796
+ action="store_true",
797
+ help="Compare docstring types with function annotations",
798
+ )
799
+ type_consistency_group.add_argument(
800
+ "--no-check-type-consistency",
801
+ action="store_true",
802
+ help="Skip type consistency checking",
803
+ )
595
804
  parser.add_argument(
596
805
  "--exclude-files",
597
806
  help="Comma-separated list of filenames to exclude",
598
807
  default="",
599
808
  )
600
809
  parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output")
810
+ parser.add_argument(
811
+ "--min-short-description-length",
812
+ type=int,
813
+ metavar="N",
814
+ help="Minimum length for short description (0 to disable)",
815
+ )
601
816
  return parser.parse_args()
602
817
 
603
818
 
604
819
  def _get_config_values(
605
820
  args: argparse.Namespace,
606
821
  config: dict[str, Any],
607
- ) -> tuple[list[str], bool, bool, bool, list[str]]:
822
+ ) -> tuple[list[str], bool, bool, bool, bool, int, list[str]]:
608
823
  """Get configuration values from command line arguments and config file.
609
824
 
610
825
  Args:
@@ -612,11 +827,13 @@ def _get_config_values(
612
827
  config (dict[str, Any]): Configuration dictionary
613
828
 
614
829
  Returns:
615
- tuple[list[str], bool, bool, bool, list[str]]: Tuple containing:
830
+ tuple[list[str], bool, bool, bool, bool, int, list[str]]: Tuple containing:
616
831
  - List of paths to check
617
832
  - Whether to require parameter types
618
- - Whether to check references
619
833
  - Whether to enable verbose output
834
+ - Whether to check references
835
+ - Whether to check type consistency
836
+ - Minimum short description length
620
837
  - List of files to exclude
621
838
  """
622
839
  # Get paths
@@ -628,13 +845,20 @@ def _get_config_values(
628
845
  # Get verbose
629
846
  verbose = args.verbose or config["verbose"]
630
847
 
631
- # Get check_references - handle both positive and negative flags
848
+ # Get check_references - handle both positive and negative flags (mutually exclusive)
632
849
  check_references = config["check_references"]
633
850
  if args.check_references:
634
851
  check_references = True
635
852
  if args.no_check_references:
636
853
  check_references = False
637
854
 
855
+ # Get check_type_consistency - handle both positive and negative flags (mutually exclusive)
856
+ check_type_consistency = config.get("check_type_consistency", False)
857
+ if args.check_type_consistency:
858
+ check_type_consistency = True
859
+ if args.no_check_type_consistency:
860
+ check_type_consistency = False
861
+
638
862
  # Get exclude_files
639
863
  exclude_files = []
640
864
  if args.exclude_files:
@@ -644,7 +868,20 @@ def _get_config_values(
644
868
  if not exclude_files:
645
869
  exclude_files = config["exclude_files"]
646
870
 
647
- return paths, require_param_types, verbose, check_references, exclude_files
871
+ # Get min_short_description_length - CLI overrides config
872
+ min_short_description_length = config.get("min_short_description_length", 0)
873
+ if args.min_short_description_length is not None:
874
+ min_short_description_length = args.min_short_description_length
875
+
876
+ return (
877
+ paths,
878
+ require_param_types,
879
+ verbose,
880
+ check_references,
881
+ check_type_consistency,
882
+ min_short_description_length,
883
+ exclude_files,
884
+ )
648
885
 
649
886
 
650
887
  def _process_paths(
@@ -653,8 +890,10 @@ def _process_paths(
653
890
  require_param_types: bool,
654
891
  verbose: bool,
655
892
  check_references: bool,
893
+ check_type_consistency: bool,
894
+ min_short_description_length: int,
656
895
  ) -> list[str]:
657
- """Process paths and check docstrings.
896
+ """Process paths and check docstrings in each file or directory.
658
897
 
659
898
  Args:
660
899
  paths (list[str]): List of paths to check
@@ -662,6 +901,8 @@ def _process_paths(
662
901
  require_param_types (bool): Whether parameter types are required
663
902
  verbose (bool): Whether to print verbose output
664
903
  check_references (bool): Whether to check references for errors
904
+ check_type_consistency (bool): Whether to compare docstring types with annotations
905
+ min_short_description_length (int): Minimum length for short description (0 to disable)
665
906
 
666
907
  Returns:
667
908
  list[str]: List of error messages
@@ -670,10 +911,25 @@ def _process_paths(
670
911
  for path_str in paths:
671
912
  path = Path(path_str)
672
913
  if path.is_dir():
673
- errors = scan_directory(path, exclude_files, require_param_types, verbose, check_references)
914
+ errors = scan_directory(
915
+ path,
916
+ exclude_files,
917
+ require_param_types,
918
+ verbose,
919
+ check_references,
920
+ check_type_consistency,
921
+ min_short_description_length,
922
+ )
674
923
  all_errors.extend(errors)
675
924
  elif path.is_file() and path.suffix == ".py":
676
- errors = check_file(path, require_param_types, verbose, check_references)
925
+ errors = check_file(
926
+ path,
927
+ require_param_types,
928
+ verbose,
929
+ check_references,
930
+ check_type_consistency,
931
+ min_short_description_length,
932
+ )
677
933
  all_errors.extend(errors)
678
934
  else:
679
935
  print(f"Error: {path} is not a directory or Python file")
@@ -681,7 +937,7 @@ def _process_paths(
681
937
 
682
938
 
683
939
  def main() -> None:
684
- """Run the docstring checker.
940
+ """Run the docstring checker and exit with appropriate status code.
685
941
 
686
942
  Returns:
687
943
  None
@@ -693,7 +949,15 @@ def main() -> None:
693
949
  args = _parse_args()
694
950
 
695
951
  # Get configuration values
696
- paths, require_param_types, verbose, check_references, exclude_files = _get_config_values(args, config)
952
+ (
953
+ paths,
954
+ require_param_types,
955
+ verbose,
956
+ check_references,
957
+ check_type_consistency,
958
+ min_short_description_length,
959
+ exclude_files,
960
+ ) = _get_config_values(args, config)
697
961
 
698
962
  # Print configuration if verbose
699
963
  if verbose:
@@ -701,6 +965,8 @@ def main() -> None:
701
965
  print(f" Paths: {paths}")
702
966
  print(f" Require parameter types: {require_param_types}")
703
967
  print(f" Check references: {check_references}")
968
+ print(f" Check type consistency: {check_type_consistency}")
969
+ print(f" Min short description length: {min_short_description_length}")
704
970
  print(f" Exclude files: {exclude_files}")
705
971
 
706
972
  # Check if paths is empty
@@ -717,6 +983,8 @@ def main() -> None:
717
983
  require_param_types,
718
984
  verbose,
719
985
  check_references,
986
+ check_type_consistency,
987
+ min_short_description_length,
720
988
  ):
721
989
  for error in all_errors:
722
990
  print(error)