troubadix 25.12.4__py3-none-any.whl → 26.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. troubadix/__version__.py +1 -1
  2. troubadix/argparser.py +4 -14
  3. troubadix/helper/date_format.py +3 -8
  4. troubadix/helper/helper.py +1 -3
  5. troubadix/helper/if_block_parser.py +14 -37
  6. troubadix/helper/linguistic_exception_handler.py +6 -18
  7. troubadix/helper/patterns.py +7 -19
  8. troubadix/helper/remove_comments.py +1 -4
  9. troubadix/helper/text_utils.py +1 -3
  10. troubadix/plugin.py +3 -9
  11. troubadix/plugins/__init__.py +6 -20
  12. troubadix/plugins/badwords.py +3 -9
  13. troubadix/plugins/copyright_text.py +2 -5
  14. troubadix/plugins/copyright_year.py +4 -12
  15. troubadix/plugins/creation_date.py +3 -9
  16. troubadix/plugins/cvss_format.py +1 -3
  17. troubadix/plugins/dependencies.py +2 -5
  18. troubadix/plugins/dependency_category_order.py +7 -20
  19. troubadix/plugins/deprecated_dependency.py +6 -16
  20. troubadix/plugins/deprecated_functions.py +2 -4
  21. troubadix/plugins/double_end_points.py +2 -7
  22. troubadix/plugins/duplicate_oid.py +1 -3
  23. troubadix/plugins/duplicated_script_tags.py +1 -2
  24. troubadix/plugins/forking_nasl_functions.py +1 -4
  25. troubadix/plugins/get_kb_on_services.py +2 -4
  26. troubadix/plugins/grammar.py +8 -20
  27. troubadix/plugins/http_links_in_tags.py +3 -6
  28. troubadix/plugins/illegal_characters.py +6 -15
  29. troubadix/plugins/log_messages.py +1 -2
  30. troubadix/plugins/malformed_dependencies.py +2 -6
  31. troubadix/plugins/missing_desc_exit.py +1 -3
  32. troubadix/plugins/multiple_re_parameters.py +2 -7
  33. troubadix/plugins/newlines.py +1 -2
  34. troubadix/plugins/overlong_description_lines.py +2 -6
  35. troubadix/plugins/overlong_script_tags.py +9 -14
  36. troubadix/plugins/prod_svc_detect_in_vulnvt.py +1 -4
  37. troubadix/plugins/qod.py +0 -1
  38. troubadix/plugins/script_add_preference_id.py +3 -10
  39. troubadix/plugins/script_add_preference_type.py +2 -7
  40. troubadix/plugins/script_calls_empty_values.py +3 -3
  41. troubadix/plugins/script_calls_recommended.py +5 -8
  42. troubadix/plugins/script_copyright.py +1 -3
  43. troubadix/plugins/script_family.py +1 -2
  44. troubadix/plugins/script_tag_form.py +1 -3
  45. troubadix/plugins/script_tag_whitespaces.py +5 -10
  46. troubadix/plugins/script_tags_mandatory.py +2 -5
  47. troubadix/plugins/script_version_and_last_modification_tags.py +6 -17
  48. troubadix/plugins/script_xref_form.py +1 -3
  49. troubadix/plugins/script_xref_url.py +3 -7
  50. troubadix/plugins/security_messages.py +8 -21
  51. troubadix/plugins/severity_date.py +3 -10
  52. troubadix/plugins/severity_format.py +1 -4
  53. troubadix/plugins/severity_origin.py +1 -4
  54. troubadix/plugins/solution_text.py +6 -10
  55. troubadix/plugins/solution_type.py +1 -2
  56. troubadix/plugins/spaces_before_dots.py +2 -9
  57. troubadix/plugins/spaces_in_filename.py +1 -2
  58. troubadix/plugins/spelling.py +5 -14
  59. troubadix/plugins/trailing_spaces_tabs.py +2 -5
  60. troubadix/plugins/using_display.py +2 -6
  61. troubadix/plugins/valid_oid.py +51 -60
  62. troubadix/plugins/valid_script_tag_names.py +2 -5
  63. troubadix/plugins/variable_assigned_in_if.py +2 -7
  64. troubadix/plugins/variable_redefinition_in_foreach.py +2 -6
  65. troubadix/plugins/vt_placement.py +2 -8
  66. troubadix/reporter.py +7 -21
  67. troubadix/results.py +2 -8
  68. troubadix/runner.py +5 -14
  69. troubadix/standalone_plugins/allowed_rev_diff.py +8 -25
  70. troubadix/standalone_plugins/changed_creation_date.py +4 -14
  71. troubadix/standalone_plugins/changed_cves.py +4 -12
  72. troubadix/standalone_plugins/changed_oid.py +3 -8
  73. troubadix/standalone_plugins/changed_packages/changed_packages.py +3 -8
  74. troubadix/standalone_plugins/changed_packages/marker/changed_update.py +1 -3
  75. troubadix/standalone_plugins/changed_packages/marker/dropped_architecture.py +1 -3
  76. troubadix/standalone_plugins/changed_packages/package.py +13 -7
  77. troubadix/standalone_plugins/dependency_graph/checks.py +5 -15
  78. troubadix/standalone_plugins/dependency_graph/dependency_graph.py +5 -13
  79. troubadix/standalone_plugins/deprecate_vts.py +3 -9
  80. troubadix/standalone_plugins/file_extensions.py +3 -10
  81. troubadix/standalone_plugins/last_modification.py +3 -9
  82. troubadix/standalone_plugins/no_solution.py +12 -34
  83. troubadix/standalone_plugins/util.py +0 -2
  84. troubadix/standalone_plugins/version_updated.py +5 -14
  85. troubadix/troubadix.py +1 -4
  86. {troubadix-25.12.4.dist-info → troubadix-26.2.0.dist-info}/METADATA +1 -1
  87. troubadix-26.2.0.dist-info/RECORD +116 -0
  88. {troubadix-25.12.4.dist-info → troubadix-26.2.0.dist-info}/WHEEL +1 -1
  89. troubadix-25.12.4.dist-info/RECORD +0 -116
  90. {troubadix-25.12.4.dist-info → troubadix-26.2.0.dist-info}/entry_points.txt +0 -0
  91. {troubadix-25.12.4.dist-info → troubadix-26.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -12,9 +12,7 @@ from troubadix.standalone_plugins.common import get_merge_base, git
12
12
  CVE_PATTERN = re.compile(r"CVE-\d{4}-\d{4,}")
13
13
 
14
14
 
15
- def compare(
16
- old_content: str, current_content: str
17
- ) -> Tuple[List[str], List[str]]:
15
+ def compare(old_content: str, current_content: str) -> Tuple[List[str], List[str]]:
18
16
  old_cves = get_cves_from_content(old_content)
19
17
  current_cves = get_cves_from_content(current_content)
20
18
 
@@ -25,9 +23,7 @@ def compare(
25
23
 
26
24
 
27
25
  def get_cves_from_content(content: str) -> Set[str]:
28
- pattern = _get_special_script_tag_pattern(
29
- name="cve_id", flags=re.MULTILINE | re.DOTALL
30
- )
26
+ pattern = _get_special_script_tag_pattern(name="cve_id", flags=re.MULTILINE | re.DOTALL)
31
27
  match = pattern.search(content)
32
28
  if not match:
33
29
  return set()
@@ -72,18 +68,14 @@ def main():
72
68
  args = parse_args()
73
69
  terminal = ConsoleTerminal()
74
70
 
75
- terminal.info(
76
- f"Checking {len(args.files)} file(s) from {args.start_commit} to HEAD"
77
- )
71
+ terminal.info(f"Checking {len(args.files)} file(s) from {args.start_commit} to HEAD")
78
72
 
79
73
  for file in args.files:
80
74
  try:
81
75
  old_content = git("show", f"{args.start_commit}:{file}")
82
76
  current_content = git("show", f"HEAD:{file}")
83
77
  except CalledProcessError:
84
- terminal.error(
85
- f"Could not find {file} at {args.start_commit} or HEAD"
86
- )
78
+ terminal.error(f"Could not find {file} at {args.start_commit} or HEAD")
87
79
  continue
88
80
 
89
81
  missing_cves, added_cves = compare(old_content, current_content)
@@ -49,8 +49,7 @@ def parse_args(args: Iterable[str]) -> Namespace:
49
49
  type=file_type_existing,
50
50
  default=[],
51
51
  help=(
52
- "List of files to diff. "
53
- "If empty use all files added or modified in the commit range."
52
+ "List of files to diff. If empty use all files added or modified in the commit range."
54
53
  ),
55
54
  )
56
55
  return parser.parse_args(args=args)
@@ -69,9 +68,7 @@ def check_oid(args: Namespace) -> bool:
69
68
  if not args.files:
70
69
  args.files += [
71
70
  Path(f)
72
- for f in git(
73
- "diff", "--name-only", "--diff-filter=d", args.commit_range
74
- ).splitlines()
71
+ for f in git("diff", "--name-only", "--diff-filter=d", args.commit_range).splitlines()
75
72
  ]
76
73
 
77
74
  rcode = False
@@ -125,9 +122,7 @@ def main() -> int:
125
122
  git_base = git("rev-parse", "--show-toplevel")
126
123
  os.chdir(git_base.rstrip("\n"))
127
124
  except subprocess.SubprocessError:
128
- print(
129
- "Your current working directory doesn't belong to a git repository"
130
- )
125
+ print("Your current working directory doesn't belong to a git repository")
131
126
  return 1
132
127
 
133
128
  if check_oid(parse_args(args)):
@@ -64,8 +64,7 @@ def filter_reasons(packages: List[Package], reasons: Iterable[Reasons]):
64
64
  return [
65
65
  package
66
66
  for package in packages
67
- if not package.reasons
68
- or any([reason not in reasons for reason in package.reasons])
67
+ if not package.reasons or any([reason not in reasons for reason in package.reasons])
69
68
  ]
70
69
 
71
70
 
@@ -154,9 +153,7 @@ def main():
154
153
  hide_reasons = set(args.hide_reasons)
155
154
  terminal = ConsoleTerminal()
156
155
 
157
- terminal.info(
158
- f"Checking {len(args.files)} file(s) from {args.start_commit} to HEAD"
159
- )
156
+ terminal.info(f"Checking {len(args.files)} file(s) from {args.start_commit} to HEAD")
160
157
 
161
158
  for file in args.files:
162
159
  try:
@@ -164,9 +161,7 @@ def main():
164
161
  content = git("show", f"HEAD:{file}")
165
162
  missing_packages, new_packages = compare(old_content, content)
166
163
  except CalledProcessError:
167
- terminal.error(
168
- f"Could not find {file} at {args.start_commit} or HEAD"
169
- )
164
+ terminal.error(f"Could not find {file} at {args.start_commit} or HEAD")
170
165
  continue
171
166
  except ValueError as e:
172
167
  terminal.error(f"Error while handling {file}: {e}")
@@ -43,9 +43,7 @@ class ChangedUpdate(Marker):
43
43
  old_package
44
44
  for old_package in missing_packages
45
45
  if package.name == old_package.name
46
- and old_package.version.startswith(
47
- package.version.replace(suffix, "")
48
- )
46
+ and old_package.version.startswith(package.version.replace(suffix, ""))
49
47
  and package.release == old_package.release
50
48
  ),
51
49
  None,
@@ -46,6 +46,4 @@ class DroppedArchitecture(Marker):
46
46
  continue
47
47
 
48
48
  package.reasons[Reasons.DROPPED_ARCHITECTURE] = Direction.PASSIVE
49
- other_package.reasons[Reasons.DROPPED_ARCHITECTURE] = (
50
- Direction.ACTIVE
51
- )
49
+ other_package.reasons[Reasons.DROPPED_ARCHITECTURE] = Direction.ACTIVE
@@ -42,9 +42,7 @@ class Reasons(str, Enum):
42
42
  try:
43
43
  return cls[cli_argument.upper().replace("-", "_")]
44
44
  except KeyError as error:
45
- raise ArgumentError(
46
- None, f"Invalid reason '{cli_argument}'"
47
- ) from error
45
+ raise ArgumentError(None, f"Invalid reason '{cli_argument}'") from error
48
46
 
49
47
 
50
48
  @dataclass()
@@ -76,14 +74,22 @@ class Package:
76
74
 
77
75
  return False
78
76
 
77
+ def __le__(self, other: "Package") -> bool:
78
+ # Sort by release first, then the other fields
79
+ if self.release != other.release:
80
+ return self.release < other.release
81
+ if self.name != other.name:
82
+ return self.name < other.name
83
+
84
+ return self.version <= other.version
85
+
79
86
  def __str__(self) -> str:
80
- result = f"{self.name : <50} {self.version : <40} {self.release : <10}"
87
+ result = f"{self.name: <50} {self.version: <40} {self.release: <10}"
81
88
 
82
89
  reasons = ", ".join(
83
- f"{change}"
84
- f"{' in new package' if direction == Direction.PASSIVE else ''}"
90
+ f"{change}{' in new package' if direction == Direction.PASSIVE else ''}"
85
91
  for change, direction in self.reasons.items()
86
92
  )
87
- result += f"{reasons : <10}"
93
+ result += f"{reasons: <10}"
88
94
 
89
95
  return result
@@ -23,28 +23,21 @@ def check_duplicates(scripts: list[Script]) -> Result:
23
23
  return Result(name="duplicate dependency", warnings=warnings)
24
24
 
25
25
 
26
- def check_missing_dependencies(
27
- scripts: list[Script], graph: nx.DiGraph
28
- ) -> Result:
26
+ def check_missing_dependencies(scripts: list[Script], graph: nx.DiGraph) -> Result:
29
27
  """
30
28
  Checks if any scripts that are depended on are missing from
31
29
  the list of scripts created from the local file system,
32
30
  logs the scripts dependending on the missing script
33
31
  """
34
32
  errors = []
35
- dependency_names = {
36
- dep.name for script in scripts for dep in script.dependencies
37
- }
33
+ dependency_names = {dep.name for script in scripts for dep in script.dependencies}
38
34
  script_names = {script.name for script in scripts}
39
35
  missing_dependencies = dependency_names - script_names
40
36
 
41
37
  for missing in missing_dependencies:
42
38
  depending_scripts = graph.predecessors(missing)
43
39
  errors.append(
44
- f"{missing}:"
45
- + "".join(
46
- f"\n - used by: {script}" for script in depending_scripts
47
- )
40
+ f"{missing}:" + "".join(f"\n - used by: {script}" for script in depending_scripts)
48
41
  )
49
42
 
50
43
  return Result(name="missing dependency", errors=errors)
@@ -63,9 +56,7 @@ def check_cycles(graph) -> Result:
63
56
  return Result(name="cyclic dependency", errors=errors)
64
57
 
65
58
 
66
- def cross_feed_dependencies(
67
- graph, is_enterprise_checked: bool
68
- ) -> list[tuple[str, str]]:
59
+ def cross_feed_dependencies(graph, is_enterprise_checked: bool) -> list[tuple[str, str]]:
69
60
  """
70
61
  creates a list of script and dependency for scripts
71
62
  in community feed that depend on scripts in enterprise folders
@@ -105,8 +96,7 @@ def check_category_order(graph) -> Result:
105
96
  problematic_edges = [
106
97
  (dependent, dependency)
107
98
  for dependent, dependency in graph.edges()
108
- if graph.nodes[dependent]["category"]
109
- < graph.nodes[dependency].get("category", -1)
99
+ if graph.nodes[dependent]["category"] < graph.nodes[dependency].get("category", -1)
110
100
  ]
111
101
 
112
102
  errors = [
@@ -34,9 +34,7 @@ from .checks import (
34
34
  from .cli import Feed, parse_args
35
35
  from .models import Dependency, Result, Script
36
36
 
37
- DEPENDENCY_PATTERN = _get_special_script_tag_pattern(
38
- "dependencies", flags=re.DOTALL | re.MULTILINE
39
- )
37
+ DEPENDENCY_PATTERN = _get_special_script_tag_pattern("dependencies", flags=re.DOTALL | re.MULTILINE)
40
38
  CATEGORY_PATTERN = get_special_script_tag_pattern(SpecialScriptTag.CATEGORY)
41
39
  DEPRECATED_PATTERN = get_script_tag_pattern(ScriptTag.DEPRECATED)
42
40
  ENTERPRISE_FEED_CHECK_PATTERN = re.compile(
@@ -84,9 +82,7 @@ def get_scripts(directory: Path) -> list[Script]:
84
82
  dependencies = extract_dependencies(content)
85
83
  category = extract_category(content)
86
84
  deprecated = bool(DEPRECATED_PATTERN.search(content))
87
- scripts.append(
88
- Script(name, feed, dependencies, category, deprecated)
89
- )
85
+ scripts.append(Script(name, feed, dependencies, category, deprecated))
90
86
  except Exception as e:
91
87
  logger.error(f"Error processing {path}: {e}")
92
88
 
@@ -105,20 +101,16 @@ def extract_dependencies(content: str) -> list[Dependency]:
105
101
  dependencies = []
106
102
 
107
103
  if_blocks = [
108
- (match.start(), match.end())
109
- for match in ENTERPRISE_FEED_CHECK_PATTERN.finditer(content)
104
+ (match.start(), match.end()) for match in ENTERPRISE_FEED_CHECK_PATTERN.finditer(content)
110
105
  ]
111
106
 
112
107
  for match in DEPENDENCY_PATTERN.finditer(content):
113
108
  start, end = match.span()
114
109
  is_enterprise_feed = any(
115
- start >= block_start and end <= block_end
116
- for block_start, block_end in if_blocks
110
+ start >= block_start and end <= block_end for block_start, block_end in if_blocks
117
111
  )
118
112
  dep_list = split_dependencies(match.group("value"))
119
- dependencies.extend(
120
- Dependency(dep, is_enterprise_feed) for dep in dep_list
121
- )
113
+ dependencies.extend(Dependency(dep, is_enterprise_feed) for dep in dep_list)
122
114
 
123
115
  return dependencies
124
116
 
@@ -42,9 +42,7 @@ KB_ITEMS_PATTERN = re.compile(r"set_kb_item\(.+\);")
42
42
 
43
43
 
44
44
  def load_transition_oid_mapping(transition_file: Path) -> dict[str, str]:
45
- spec = importlib.util.spec_from_file_location(
46
- "transition_layer", transition_file
47
- )
45
+ spec = importlib.util.spec_from_file_location("transition_layer", transition_file)
48
46
  transition_layer = importlib.util.module_from_spec(spec)
49
47
  spec.loader.exec_module(transition_layer)
50
48
 
@@ -151,9 +149,7 @@ def find_replacement_oid(
151
149
  file.content,
152
150
  )
153
151
  if not oid_match:
154
- raise ValueError(
155
- f"No OID found in {file.name}. Cannot map to replacement OID."
156
- )
152
+ raise ValueError(f"No OID found in {file.name}. Cannot map to replacement OID.")
157
153
  oid = oid_match.group("value")
158
154
  replacement_oid = oid_mapping.get(oid)
159
155
  if not replacement_oid:
@@ -181,9 +177,7 @@ def deprecate(
181
177
  output_path.mkdir(parents=True, exist_ok=True)
182
178
  for file in to_deprecate:
183
179
  if re.findall(KB_ITEMS_PATTERN, file.content):
184
- logger.warning(
185
- f"Unable to deprecate {file.name}. There are still KB keys remaining."
186
- )
180
+ logger.warning(f"Unable to deprecate {file.name}. There are still KB keys remaining.")
187
181
  continue
188
182
 
189
183
  replacement_oid = find_replacement_oid(file, oid_mapping)
@@ -19,9 +19,7 @@ def parse_args() -> Namespace:
19
19
  type=directory_type_existing,
20
20
  help="directory that should be linted",
21
21
  )
22
- parser.add_argument(
23
- "--ignore-file", type=file_type_existing, help="path to ignore file"
24
- )
22
+ parser.add_argument("--ignore-file", type=file_type_existing, help="path to ignore file")
25
23
  parser.add_argument(
26
24
  "--gen-ignore-entries",
27
25
  action="store_true",
@@ -35,9 +33,7 @@ def create_exclusions(ignore_file: Path) -> set[Path]:
35
33
  return set()
36
34
 
37
35
  with open(ignore_file, "r", encoding="utf-8") as file:
38
- return {
39
- Path(line.strip()) for line in file if not re.match(r"^\s*#", line)
40
- }
36
+ return {Path(line.strip()) for line in file if not re.match(r"^\s*#", line)}
41
37
 
42
38
 
43
39
  def check_extensions(args: Namespace) -> List[Path]:
@@ -77,10 +73,7 @@ def main() -> int:
77
73
  print(file.relative_to(args.dir))
78
74
  return 0
79
75
 
80
- print(
81
- f"{len(unwanted_files)} "
82
- "Files with unwanted file extension were found:"
83
- )
76
+ print(f"{len(unwanted_files)} Files with unwanted file extension were found:")
84
77
  for file in unwanted_files:
85
78
  print(file)
86
79
  return 1
@@ -48,9 +48,7 @@ def update(nasl_file: Path, terminal: Terminal):
48
48
  )
49
49
 
50
50
  if not match_last_modification_any_value:
51
- terminal.warning(
52
- f'Ignoring "{nasl_file}" because it is missing a last_modification tag.'
53
- )
51
+ terminal.warning(f'Ignoring "{nasl_file}" because it is missing a last_modification tag.')
54
52
  return
55
53
 
56
54
  now = datetime.datetime.now(datetime.timezone.utc)
@@ -71,9 +69,7 @@ def update(nasl_file: Path, terminal: Terminal):
71
69
  string=file_content,
72
70
  )
73
71
  if not match_script_version:
74
- terminal.warning(
75
- f'Ignoring "{nasl_file}" because it is missing a script_version.'
76
- )
72
+ terminal.warning(f'Ignoring "{nasl_file}" because it is missing a script_version.')
77
73
  return
78
74
 
79
75
  # get that date formatted correctly:
@@ -89,9 +85,7 @@ def update(nasl_file: Path, terminal: Terminal):
89
85
 
90
86
 
91
87
  def parse_args(args: Sequence[str] = None) -> Namespace:
92
- parser = ArgumentParser(
93
- description="Update script_version and last_modification tags"
94
- )
88
+ parser = ArgumentParser(description="Update script_version and last_modification tags")
95
89
  what_group = parser.add_mutually_exclusive_group(required=True)
96
90
  what_group.add_argument(
97
91
  "--files",
@@ -53,9 +53,7 @@ MONTH_AS_DAYS = 365 / 12
53
53
  def parse_solution_date(date_string: str) -> datetime:
54
54
  """Convert date string to date trying different formats"""
55
55
 
56
- date_string = re.sub(
57
- r"(?P<date>.\d{1,2})(st|nd|rd|th)", r"\g<date>", date_string
58
- )
56
+ date_string = re.sub(r"(?P<date>.\d{1,2})(st|nd|rd|th)", r"\g<date>", date_string)
59
57
 
60
58
  for strptime in SOLUTION_DATE_FORMATS:
61
59
  try:
@@ -99,8 +97,7 @@ def parse_args() -> Namespace:
99
97
  dest="threshold",
100
98
  type=int,
101
99
  default=12,
102
- help="The threshold after which to assume no solution "
103
- "will be provided anymore",
100
+ help="The threshold after which to assume no solution will be provided anymore",
104
101
  )
105
102
 
106
103
  parser.add_argument(
@@ -118,10 +115,7 @@ def parse_args() -> Namespace:
118
115
 
119
116
  def check_skip_script(file_content: str) -> bool:
120
117
  solution_type = SOLUTION_TYPE_PATTERN.search(file_content)
121
- if (
122
- solution_type
123
- and solution_type.group("value") != SOLUTION_TYPE_NONE_AVAILABLE
124
- ):
118
+ if solution_type and solution_type.group("value") != SOLUTION_TYPE_NONE_AVAILABLE:
125
119
  return True
126
120
 
127
121
  cvss = CVSS_PATTERN.search(file_content)
@@ -148,9 +142,7 @@ def extract_tags(content: str) -> Optional[Tuple[str, datetime, datetime]]:
148
142
  if not creation_match:
149
143
  return None
150
144
 
151
- creation_date = datetime.strptime(
152
- creation_match.group("value")[:10], CREATION_DATE_FORMAT
153
- )
145
+ creation_date = datetime.strptime(creation_match.group("value")[:10], CREATION_DATE_FORMAT)
154
146
 
155
147
  oid_match = OID_PATTERN.search(content)
156
148
  if not oid_match:
@@ -164,9 +156,7 @@ def extract_tags(content: str) -> Optional[Tuple[str, datetime, datetime]]:
164
156
  def get_no_solution_vts(
165
157
  files: Iterable[Path],
166
158
  ) -> Iterable[Tuple[Path, str, datetime, datetime]]:
167
- file_contents = (
168
- (file, file.read_text(encoding=CURRENT_ENCODING)) for file in files
169
- )
159
+ file_contents = ((file, file.read_text(encoding=CURRENT_ENCODING)) for file in files)
170
160
  return (
171
161
  (file, *extract_tags(content))
172
162
  for file, content in file_contents
@@ -191,22 +181,17 @@ def check_no_solutions(
191
181
  (
192
182
  milestone
193
183
  for milestone in milestones
194
- if solution_date
195
- < creation_date + timedelta(days=milestone * MONTH_AS_DAYS)
196
- and milestone * MONTH_AS_DAYS
197
- <= (datetime.now() - creation_date).days
184
+ if solution_date < creation_date + timedelta(days=milestone * MONTH_AS_DAYS)
185
+ and milestone * MONTH_AS_DAYS <= (datetime.now() - creation_date).days
198
186
  ),
199
187
  None,
200
188
  )
201
189
 
202
- if solution_date > creation_date + timedelta(
203
- days=last_milestone * MONTH_AS_DAYS
204
- ):
190
+ if solution_date > creation_date + timedelta(days=last_milestone * MONTH_AS_DAYS):
205
191
  milestone = last_milestone
206
192
 
207
193
  if not milestone or (
208
- milestone == last_milestone
209
- and (datetime.now() - solution_date) < snooze_duration
194
+ milestone == last_milestone and (datetime.now() - solution_date) < snooze_duration
210
195
  ):
211
196
  continue
212
197
 
@@ -254,10 +239,7 @@ def print_report(
254
239
  "No solution should be expected at this point. "
255
240
  )
256
241
  else:
257
- term.bold_info(
258
- f"{len(vts)} VTs with no solution for "
259
- f"more than {milestone} month(s)"
260
- )
242
+ term.bold_info(f"{len(vts)} VTs with no solution for more than {milestone} month(s)")
261
243
 
262
244
  for vt, oid, creation, solution in vts:
263
245
  term.info(str(vt.relative_to(root)))
@@ -265,9 +247,7 @@ def print_report(
265
247
  with term.indent():
266
248
  term.print(f"OID: {oid}")
267
249
  term.print(f"Created: {creation.strftime('%Y-%m-%d')}")
268
- term.print(
269
- f"Last solution update: {solution.strftime('%Y-%m-%d')}"
270
- )
250
+ term.print(f"Last solution update: {solution.strftime('%Y-%m-%d')}")
271
251
 
272
252
  term.print()
273
253
 
@@ -284,9 +264,7 @@ def main():
284
264
 
285
265
  term = ConsoleTerminal()
286
266
 
287
- print_info(
288
- term, milestones, arguments.threshold, arguments.snooze, root
289
- )
267
+ print_info(term, milestones, arguments.threshold, arguments.snooze, root)
290
268
 
291
269
  summary = check_no_solutions(files, milestones, arguments.snooze)
292
270
 
@@ -11,11 +11,9 @@ from troubadix.standalone_plugins.changed_creation_date import git
11
11
 
12
12
  @contextmanager
13
13
  def temporary_git_directory():
14
-
15
14
  cwd = Path.cwd()
16
15
 
17
16
  with TemporaryDirectory() as tempdir:
18
-
19
17
  try:
20
18
  chdir(tempdir)
21
19
  git("init", "-b", "main")
@@ -31,9 +31,7 @@ from troubadix.helper.patterns import (
31
31
  )
32
32
  from troubadix.standalone_plugins.common import git
33
33
 
34
- SCRIPT_VERSION_PATTERN = re.compile(
35
- r"^\+\s*" + SCRIPT_VERSION_ANY_VALUE_PATTERN, re.MULTILINE
36
- )
34
+ SCRIPT_VERSION_PATTERN = re.compile(r"^\+\s*" + SCRIPT_VERSION_ANY_VALUE_PATTERN, re.MULTILINE)
37
35
  SCRIPT_LAST_MODIFICATION_PATTERN = re.compile(
38
36
  r"^\+\s*" + LAST_MODIFICATION_ANY_VALUE_PATTERN, re.MULTILINE
39
37
  )
@@ -70,8 +68,7 @@ def parse_args(args: Iterable[str]) -> Namespace:
70
68
  type=file_type_existing,
71
69
  default=[],
72
70
  help=(
73
- "List of files to diff. "
74
- "If empty use all files added or modified in the commit range."
71
+ "List of files to diff. If empty use all files added or modified in the commit range."
75
72
  ),
76
73
  )
77
74
  return parser.parse_args(args=args)
@@ -88,9 +85,7 @@ def check_version_updated(files: List[Path], commit_range: str) -> bool:
88
85
  if not files:
89
86
  files = [
90
87
  Path(f)
91
- for f in git(
92
- "diff", "--name-only", "--diff-filter=d", commit_range
93
- ).splitlines()
88
+ for f in git("diff", "--name-only", "--diff-filter=d", commit_range).splitlines()
94
89
  ]
95
90
 
96
91
  rcode = True
@@ -113,9 +108,7 @@ def check_version_updated(files: List[Path], commit_range: str) -> bool:
113
108
  )
114
109
 
115
110
  if not SCRIPT_VERSION_PATTERN.search(text):
116
- print(
117
- f"{nasl_file}: Missing updated script_version", file=sys.stderr
118
- )
111
+ print(f"{nasl_file}: Missing updated script_version", file=sys.stderr)
119
112
  rcode = False
120
113
 
121
114
  if not SCRIPT_LAST_MODIFICATION_PATTERN.search(text):
@@ -135,9 +128,7 @@ def main() -> int:
135
128
  git_base = git("rev-parse", "--show-toplevel")
136
129
  os.chdir(git_base.rstrip("\n"))
137
130
  except subprocess.SubprocessError:
138
- print(
139
- "Your current working directory doesn't belong to a git repository"
140
- )
131
+ print("Your current working directory doesn't belong to a git repository")
141
132
  return 1
142
133
 
143
134
  parsed_args = parse_args(args)
troubadix/troubadix.py CHANGED
@@ -98,10 +98,7 @@ def generate_patterns(
98
98
  def from_file(include_file: Path, term: Terminal) -> Iterable[Path]:
99
99
  """Parse the given file containing a list of files into"""
100
100
  try:
101
- return [
102
- Path(f)
103
- for f in include_file.read_text(encoding="utf-8").splitlines()
104
- ]
101
+ return [Path(f) for f in include_file.read_text(encoding="utf-8").splitlines()]
105
102
  except FileNotFoundError:
106
103
  term.error(f"File {include_file} containing the file list not found.")
107
104
  sys.exit(1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: troubadix
3
- Version: 25.12.4
3
+ Version: 26.2.0
4
4
  Summary: A linting and QA check tool for NASL files
5
5
  License: GPL-3.0-or-later
6
6
  License-File: LICENSE