skylos 1.0.10__py3-none-any.whl → 2.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. skylos/__init__.py +9 -3
  2. skylos/analyzer.py +674 -168
  3. skylos/cfg_visitor.py +60 -0
  4. skylos/cli.py +719 -235
  5. skylos/codemods.py +277 -0
  6. skylos/config.py +50 -0
  7. skylos/constants.py +78 -0
  8. skylos/gatekeeper.py +147 -0
  9. skylos/linter.py +18 -0
  10. skylos/rules/base.py +20 -0
  11. skylos/rules/danger/calls.py +119 -0
  12. skylos/rules/danger/danger.py +157 -0
  13. skylos/rules/danger/danger_cmd/cmd_flow.py +75 -0
  14. skylos/rules/danger/danger_fs/__init__.py +0 -0
  15. skylos/rules/danger/danger_fs/path_flow.py +79 -0
  16. skylos/rules/danger/danger_net/__init__.py +0 -0
  17. skylos/rules/danger/danger_net/ssrf_flow.py +80 -0
  18. skylos/rules/danger/danger_sql/__init__.py +0 -0
  19. skylos/rules/danger/danger_sql/sql_flow.py +245 -0
  20. skylos/rules/danger/danger_sql/sql_raw_flow.py +96 -0
  21. skylos/rules/danger/danger_web/__init__.py +0 -0
  22. skylos/rules/danger/danger_web/xss_flow.py +170 -0
  23. skylos/rules/danger/taint.py +110 -0
  24. skylos/rules/quality/__init__.py +0 -0
  25. skylos/rules/quality/complexity.py +95 -0
  26. skylos/rules/quality/logic.py +96 -0
  27. skylos/rules/quality/nesting.py +101 -0
  28. skylos/rules/quality/structure.py +99 -0
  29. skylos/rules/secrets.py +325 -0
  30. skylos/server.py +554 -0
  31. skylos/visitor.py +502 -90
  32. skylos/visitors/__init__.py +0 -0
  33. skylos/visitors/framework_aware.py +437 -0
  34. skylos/visitors/test_aware.py +74 -0
  35. skylos-2.5.2.dist-info/METADATA +21 -0
  36. skylos-2.5.2.dist-info/RECORD +42 -0
  37. {skylos-1.0.10.dist-info → skylos-2.5.2.dist-info}/WHEEL +1 -1
  38. {skylos-1.0.10.dist-info → skylos-2.5.2.dist-info}/top_level.txt +0 -1
  39. skylos-1.0.10.dist-info/METADATA +0 -8
  40. skylos-1.0.10.dist-info/RECORD +0 -21
  41. test/compare_tools.py +0 -604
  42. test/diagnostics.py +0 -364
  43. test/sample_repo/app.py +0 -13
  44. test/sample_repo/sample_repo/commands.py +0 -81
  45. test/sample_repo/sample_repo/models.py +0 -122
  46. test/sample_repo/sample_repo/routes.py +0 -89
  47. test/sample_repo/sample_repo/utils.py +0 -36
  48. test/test_skylos.py +0 -456
  49. test/test_visitor.py +0 -220
  50. {test → skylos/rules}/__init__.py +0 -0
  51. {test/sample_repo → skylos/rules/danger}/__init__.py +0 -0
  52. {test/sample_repo/sample_repo → skylos/rules/danger/danger_cmd}/__init__.py +0 -0
  53. {skylos-1.0.10.dist-info → skylos-2.5.2.dist-info}/entry_points.txt +0 -0
skylos/analyzer.py CHANGED
@@ -1,240 +1,746 @@
1
1
  #!/usr/bin/env python3
2
- import ast,sys,json,logging,re
2
+ import ast
3
+ import sys
4
+ import json
5
+ import logging
3
6
  from pathlib import Path
4
7
  from collections import defaultdict
5
8
  from skylos.visitor import Visitor
9
+ from skylos.constants import PENALTIES, AUTO_CALLED
10
+ from skylos.visitors.test_aware import TestAwareVisitor
11
+ from skylos.rules.secrets import scan_ctx as _secrets_scan_ctx
12
+ from skylos.rules.danger.danger import scan_ctx as scan_danger
13
+ import os
14
+ import traceback
15
+ from skylos.visitors.framework_aware import (
16
+ FrameworkAwareVisitor,
17
+ detect_framework_usage,
18
+ )
6
19
 
7
- logging.basicConfig(level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
8
- logger=logging.getLogger('Skylos')
20
+ from skylos.config import load_config
21
+ from skylos.linter import LinterVisitor
22
+ from skylos.rules.quality.complexity import ComplexityRule
23
+ from skylos.rules.quality.nesting import NestingRule
24
+ from skylos.rules.danger.calls import DangerousCallsRule
25
+ from skylos.rules.quality.structure import ArgCountRule, FunctionLengthRule
26
+ from skylos.rules.quality.logic import MutableDefaultRule, BareExceptRule, DangerousComparisonRule
27
+
28
+ logging.basicConfig(
29
+ level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
30
+ )
31
+ logger = logging.getLogger("Skylos")
9
32
 
10
- AUTO_CALLED={"__init__","__enter__","__exit__"}
11
- TEST_BASE_CLASSES = {"TestCase", "AsyncioTestCase", "unittest.TestCase", "unittest.AsyncioTestCase"}
12
- TEST_METHOD_PATTERN = re.compile(r"^test_\w+$")
13
- MAGIC_METHODS={f"__{n}__"for n in["init","new","call","getattr","getattribute","enter","exit","str","repr","hash","eq","ne","lt","gt","le","ge","iter","next","contains","len","getitem","setitem","delitem","iadd","isub","imul","itruediv","ifloordiv","imod","ipow","ilshift","irshift","iand","ixor","ior","round","format","dir","abs","complex","int","float","bool","bytes","reduce","await","aiter","anext","add","sub","mul","truediv","floordiv","mod","divmod","pow","lshift","rshift","and","or","xor","radd","rsub","rmul","rtruediv","rfloordiv","rmod","rdivmod","rpow","rlshift","rrshift","rand","ror","rxor"]}
14
33
 
15
34
  class Skylos:
16
35
  def __init__(self):
17
- self.defs={}
18
- self.refs=[]
19
- self.dynamic=set()
20
- self.exports=defaultdict(set)
21
-
22
- def _module(self,root,f):
23
- p=list(f.relative_to(root).parts)
24
- if p[-1].endswith(".py"):p[-1]=p[-1][:-3]
25
- if p[-1]=="__init__":p.pop()
26
- return".".join(p)
27
-
36
+ self.defs = {}
37
+ self.refs = []
38
+ self.dynamic = set()
39
+ self.exports = defaultdict(set)
40
+
41
+ def _module(self, root, f):
42
+ p = list(f.relative_to(root).parts)
43
+ if p[-1].endswith(".py"):
44
+ p[-1] = p[-1][:-3]
45
+ if p[-1] == "__init__":
46
+ p.pop()
47
+ return ".".join(p)
48
+
49
+ def _should_exclude_file(self, file_path, root_path, exclude_folders):
50
+ if not exclude_folders:
51
+ return False
52
+
53
+ try:
54
+ rel_path = file_path.relative_to(root_path)
55
+ except ValueError:
56
+ return False
57
+
58
+ path_parts = rel_path.parts
59
+
60
+ for exclude_folder in exclude_folders:
61
+ if "*" in exclude_folder:
62
+ for part in path_parts:
63
+ if part.endswith(exclude_folder.replace("*", "")):
64
+ return True
65
+ else:
66
+ if exclude_folder in path_parts:
67
+ return True
68
+
69
+ return False
70
+
71
+ def _get_python_files(self, path, exclude_folders=None):
72
+ p = Path(path).resolve()
73
+
74
+ if p.is_file():
75
+ return [p], p.parent
76
+
77
+ root = p
78
+ all_files = list(p.glob("**/*.py"))
79
+
80
+ if exclude_folders:
81
+ filtered_files = []
82
+ excluded_count = 0
83
+
84
+ for file_path in all_files:
85
+ if self._should_exclude_file(file_path, root, exclude_folders):
86
+ excluded_count += 1
87
+ continue
88
+ filtered_files.append(file_path)
89
+
90
+ if excluded_count > 0:
91
+ logger.info(f"Excluded {excluded_count} files from analysis")
92
+
93
+ return filtered_files, root
94
+
95
+ return all_files, root
96
+
28
97
  def _mark_exports(self):
29
- for name, d in self.defs.items():
30
- if d.in_init and not d.simple_name.startswith('_'):
31
- d.is_exported = True
32
-
98
+ for name, definition in self.defs.items():
99
+ if definition.in_init and not definition.simple_name.startswith("_"):
100
+ definition.is_exported = True
101
+
33
102
  for mod, export_names in self.exports.items():
34
103
  for name in export_names:
35
104
  for def_name, def_obj in self.defs.items():
36
- if (def_name.startswith(f"{mod}.") and
37
- def_obj.simple_name == name and
38
- def_obj.type != "import"):
105
+ if (
106
+ def_name.startswith(f"{mod}.")
107
+ and def_obj.simple_name == name
108
+ and def_obj.type != "import"
109
+ ):
39
110
  def_obj.is_exported = True
40
111
 
41
112
  def _mark_refs(self):
42
113
  import_to_original = {}
43
114
  for name, def_obj in self.defs.items():
44
115
  if def_obj.type == "import":
45
- import_name = name.split('.')[-1]
46
-
116
+ import_name = name.split(".")[-1]
117
+
47
118
  for def_name, orig_def in self.defs.items():
48
- if (orig_def.type != "import" and
49
- orig_def.simple_name == import_name and
50
- def_name != name):
119
+ if (
120
+ orig_def.type != "import"
121
+ and orig_def.simple_name == import_name
122
+ and def_name != name
123
+ ):
51
124
  import_to_original[name] = def_name
52
125
  break
53
126
 
54
127
  simple_name_lookup = defaultdict(list)
55
- for d in self.defs.values():
56
- simple_name_lookup[d.simple_name].append(d)
57
-
58
- for ref, file in self.refs:
128
+ for definition in self.defs.values():
129
+ simple_name_lookup[definition.simple_name].append(definition)
130
+
131
+ for ref, _ in self.refs:
59
132
  if ref in self.defs:
60
133
  self.defs[ref].references += 1
61
-
62
134
  if ref in import_to_original:
63
135
  original = import_to_original[ref]
64
136
  self.defs[original].references += 1
65
137
  continue
66
-
67
- simple = ref.split('.')[-1]
68
- matches = simple_name_lookup.get(simple, [])
69
- for d in matches:
70
- d.references += 1
71
-
138
+
139
+ simple = ref.split(".")[-1]
140
+ ref_mod = ref.rsplit(".", 1)[0]
141
+ candidates = simple_name_lookup.get(simple, [])
142
+
143
+ if ref_mod:
144
+ if ref_mod in ("cls", "self"):
145
+ cls_candidates = []
146
+ for d in candidates:
147
+ if d.type == "variable" and "." in d.name:
148
+ cls_candidates.append(d)
149
+
150
+ if cls_candidates:
151
+ for d in cls_candidates:
152
+ d.references += 1
153
+ continue
154
+
155
+ else:
156
+ filtered = []
157
+ for d in candidates:
158
+ if d.name.startswith(ref_mod + ".") and d.type != "import":
159
+ filtered.append(d)
160
+ candidates = filtered
161
+ else:
162
+ filtered = []
163
+ for d in candidates:
164
+ if d.type != "import":
165
+ filtered.append(d)
166
+ candidates = filtered
167
+
168
+ if len(candidates) == 1:
169
+ candidates[0].references += 1
170
+ continue
171
+
172
+ non_import_defs = []
173
+ for d in simple_name_lookup.get(simple, []):
174
+ if d.type != "import":
175
+ non_import_defs.append(d)
176
+
177
+ if len(non_import_defs) == 1:
178
+ non_import_defs[0].references += 1
179
+ continue
180
+
181
+ for module_name in self.dynamic:
182
+ for def_name, def_obj in self.defs.items():
183
+ if def_obj.name.startswith(f"{module_name}."):
184
+ if def_obj.type in (
185
+ "function",
186
+ "method",
187
+ ) and not def_obj.simple_name.startswith("_"):
188
+ def_obj.references += 1
189
+
72
190
  def _get_base_classes(self, class_name):
73
191
  if class_name not in self.defs:
74
192
  return []
75
-
193
+
76
194
  class_def = self.defs[class_name]
77
-
78
- if hasattr(class_def, 'base_classes'):
195
+
196
+ if hasattr(class_def, "base_classes"):
79
197
  return class_def.base_classes
80
-
198
+
81
199
  return []
82
-
200
+
201
+ def _apply_penalties(self, def_obj, visitor, framework):
202
+ confidence = 100
203
+
204
+ if (
205
+ getattr(visitor, "ignore_lines", None)
206
+ and def_obj.line in visitor.ignore_lines
207
+ ):
208
+ def_obj.confidence = 0
209
+ return
210
+
211
+ if "." in def_obj.name:
212
+ owner, attr = def_obj.name.rsplit(".", 1)
213
+ owner_simple = owner.split(".")[-1]
214
+
215
+ if (
216
+ owner_simple == "Settings"
217
+ or owner_simple == "Config"
218
+ or owner_simple.endswith("Settings")
219
+ or owner_simple.endswith("Config")
220
+ ):
221
+ if attr.isupper() or not attr.startswith("_"):
222
+ def_obj.confidence = 0
223
+ return
224
+
225
+ if def_obj.type == "variable" and def_obj.simple_name == "_":
226
+ def_obj.confidence = 0
227
+ return
228
+
229
+ if def_obj.simple_name.startswith("_") and not def_obj.simple_name.startswith(
230
+ "__"
231
+ ):
232
+ confidence -= PENALTIES["private_name"]
233
+
234
+ if def_obj.simple_name.startswith("__") and def_obj.simple_name.endswith("__"):
235
+ confidence -= PENALTIES["dunder_or_magic"]
236
+
237
+ if def_obj.in_init and def_obj.type in ("function", "class"):
238
+ confidence -= PENALTIES["in_init_file"]
239
+
240
+ if def_obj.name.split(".")[0] in self.dynamic:
241
+ confidence -= PENALTIES["dynamic_module"]
242
+
243
+ if visitor.is_test_file or def_obj.line in visitor.test_decorated_lines:
244
+ confidence -= PENALTIES["test_related"]
245
+
246
+ if def_obj.type == "variable" and getattr(framework, "dataclass_fields", None):
247
+ if def_obj.name in framework.dataclass_fields:
248
+ def_obj.confidence = 0
249
+ return
250
+
251
+ if def_obj.type == "variable" and "." in def_obj.name:
252
+ prefix, _ = def_obj.name.rsplit(".", 1)
253
+
254
+ cls_def = self.defs.get(prefix)
255
+ if cls_def and cls_def.type == "class":
256
+ cls_simple = cls_def.simple_name
257
+
258
+ if (
259
+ getattr(framework, "pydantic_models", None)
260
+ and cls_simple in framework.pydantic_models
261
+ ):
262
+ def_obj.confidence = 0
263
+ return
264
+
265
+ cls_node = getattr(framework, "class_defs", {}).get(cls_simple)
266
+ if cls_node is not None:
267
+ schema_like = False
268
+
269
+ for base in cls_node.bases:
270
+ if isinstance(base, ast.Name) and base.id.lower().endswith(
271
+ ("schema", "model")
272
+ ):
273
+ schema_like = True
274
+ break
275
+
276
+ if isinstance(
277
+ base, ast.Attribute
278
+ ) and base.attr.lower().endswith(("schema", "model")):
279
+ schema_like = True
280
+ break
281
+
282
+ if schema_like:
283
+ def_obj.confidence = 0
284
+ return
285
+
286
+ if def_obj.type == "variable":
287
+ fr = getattr(framework, "first_read_lineno", {}).get(def_obj.name)
288
+ if fr is not None and fr >= def_obj.line:
289
+ def_obj.confidence = 0
290
+ return
291
+
292
+ if def_obj.type == "variable" and "." in def_obj.name:
293
+ _, attr = def_obj.name.rsplit(".", 1)
294
+
295
+ for other in self.defs.values():
296
+ if other is def_obj:
297
+ continue
298
+ if other.type != "variable":
299
+ continue
300
+ if "." not in other.name:
301
+ continue
302
+ if other.simple_name != attr:
303
+ continue
304
+
305
+ def_obj.confidence = 0
306
+ return
307
+
308
+ framework_confidence = detect_framework_usage(def_obj, visitor=framework)
309
+ if framework_confidence is not None:
310
+ confidence = min(confidence, framework_confidence)
311
+
312
+ if def_obj.simple_name.startswith("__") and def_obj.simple_name.endswith("__"):
313
+ confidence = 0
314
+
315
+ if def_obj.type == "parameter":
316
+ if def_obj.simple_name in ("self", "cls"):
317
+ confidence = 0
318
+ elif "." in def_obj.name:
319
+ method_name = def_obj.name.split(".")[-2]
320
+ if method_name.startswith("__") and method_name.endswith("__"):
321
+ confidence = 0
322
+
323
+ if visitor.is_test_file or def_obj.line in visitor.test_decorated_lines:
324
+ confidence = 0
325
+
326
+ if (
327
+ def_obj.type == "import"
328
+ and def_obj.name.startswith("__future__.")
329
+ and def_obj.simple_name
330
+ in (
331
+ "annotations",
332
+ "absolute_import",
333
+ "division",
334
+ "print_function",
335
+ "unicode_literals",
336
+ "generator_stop",
337
+ )
338
+ ):
339
+ confidence = 0
340
+
341
+ def_obj.confidence = max(confidence, 0)
342
+
83
343
  def _apply_heuristics(self):
84
- class_methods=defaultdict(list)
85
- for d in self.defs.values():
86
- if d.type in("method","function") and"." in d.name:
87
- cls=d.name.rsplit(".",1)[0]
88
- if cls in self.defs and self.defs[cls].type=="class":
89
- class_methods[cls].append(d)
90
-
91
- for cls,methods in class_methods.items():
92
- if self.defs[cls].references>0:
93
- for m in methods:
94
- if m.simple_name in AUTO_CALLED:m.references+=1
95
-
96
- for d in self.defs.values():
97
- if d.simple_name in MAGIC_METHODS or (d.simple_name.startswith("__") and d.simple_name.endswith("__")):
98
- d.confidence = 0
99
-
100
- if not d.simple_name.startswith("_") and d.type in ("function", "method", "class"):
101
- d.confidence = min(d.confidence, 90)
102
-
103
- if d.in_init and d.type in ("function", "class"):
104
- d.confidence = min(d.confidence, 85)
105
-
106
- if d.name.split(".")[0] in self.dynamic:
107
- d.confidence = min(d.confidence, 60)
108
-
109
- if d.type == "method" and TEST_METHOD_PATTERN.match(d.simple_name):
110
- class_name = d.name.rsplit(".", 1)[0]
111
- class_simple_name = class_name.split(".")[-1]
112
- if "Test" in class_simple_name or class_simple_name.endswith("TestCase"):
113
- d.confidence = 0
114
-
115
- def analyze(self, path, thr=60):
116
- p = Path(path).resolve()
117
- files = [p] if p.is_file() else list(p.glob("**/*.py"))
118
- root = p.parent if p.is_file() else p
119
-
344
+ class_methods = defaultdict(list)
345
+ for definition in self.defs.values():
346
+ if definition.type in ("method", "function") and "." in definition.name:
347
+ cls = definition.name.rsplit(".", 1)[0]
348
+ if cls in self.defs and self.defs[cls].type == "class":
349
+ class_methods[cls].append(definition)
350
+
351
+ for cls, methods in class_methods.items():
352
+ if self.defs[cls].references > 0:
353
+ for method in methods:
354
+ if method.simple_name in AUTO_CALLED:
355
+ method.references += 1
356
+
357
+ if (
358
+ method.simple_name.startswith("visit_")
359
+ or method.simple_name.startswith("leave_")
360
+ or method.simple_name.startswith("transform_")
361
+ ):
362
+ method.references += 1
363
+
364
+ if method.simple_name == "format" and cls.endswith("Formatter"):
365
+ method.references += 1
366
+
367
+ def analyze(
368
+ self,
369
+ path,
370
+ thr=60,
371
+ exclude_folders=None,
372
+ enable_secrets=False,
373
+ enable_danger=False,
374
+ enable_quality=False,
375
+ extra_visitors=None,
376
+ ):
377
+ files, root = self._get_python_files(path, exclude_folders)
378
+
379
+ if not files:
380
+ logger.warning(f"No Python files found in {path}")
381
+ return json.dumps(
382
+ {
383
+ "unused_functions": [],
384
+ "unused_imports": [],
385
+ "unused_classes": [],
386
+ "unused_variables": [],
387
+ "unused_parameters": [],
388
+ "analysis_summary": {
389
+ "total_files": 0,
390
+ "excluded_folders": exclude_folders if exclude_folders else [],
391
+ },
392
+ }
393
+ )
394
+
395
+ logger.info(f"Analyzing {len(files)} Python files...")
396
+
120
397
  modmap = {}
121
398
  for f in files:
122
399
  modmap[f] = self._module(root, f)
123
-
400
+
401
+ all_secrets = []
402
+ all_dangers = []
403
+ all_quality = []
404
+ file_contexts = []
405
+
124
406
  for file in files:
125
407
  mod = modmap[file]
126
- defs, refs, dyn, exports = proc_file(file, mod)
127
-
128
- for d in defs:
129
- self.defs[d.name] = d
408
+ (
409
+ defs,
410
+ refs,
411
+ dyn,
412
+ exports,
413
+ test_flags,
414
+ framework_flags,
415
+ q_finds,
416
+ d_finds,
417
+ pro_finds,
418
+ ) = proc_file(file, mod, extra_visitors)
419
+
420
+ for definition in defs:
421
+ self.defs[definition.name] = definition
422
+
130
423
  self.refs.extend(refs)
131
424
  self.dynamic.update(dyn)
132
425
  self.exports[mod].update(exports)
133
-
426
+
427
+ file_contexts.append((defs, test_flags, framework_flags, file, mod))
428
+
429
+ if enable_quality and q_finds:
430
+ all_quality.extend(q_finds)
431
+
432
+ if enable_danger and d_finds:
433
+ all_dangers.extend(d_finds)
434
+
435
+ # --- CHANGED: Collect Pro Findings ---
436
+ if pro_finds:
437
+ all_dangers.extend(pro_finds)
438
+
439
+ if enable_secrets and _secrets_scan_ctx is not None:
440
+ try:
441
+ src = Path(file).read_text(encoding="utf-8", errors="ignore")
442
+ src_lines = src.splitlines(True)
443
+ rel = str(Path(file).relative_to(root))
444
+ ctx = {"relpath": rel, "lines": src_lines, "tree": None}
445
+ findings = list(_secrets_scan_ctx(ctx))
446
+ if findings:
447
+ all_secrets.extend(findings)
448
+ except Exception:
449
+ pass
450
+
451
+ for defs, test_flags, framework_flags, file, mod in file_contexts:
452
+ for definition in defs:
453
+ self._apply_penalties(definition, test_flags, framework_flags)
454
+
455
+ if enable_danger and scan_danger is not None:
456
+ try:
457
+ findings = scan_danger(root, [file])
458
+ if findings:
459
+ all_dangers.extend(findings)
460
+ except Exception as e:
461
+ logger.error(f"Error scanning {file} for dangerous code: {e}")
462
+ if os.getenv("SKYLOS_DEBUG"):
463
+ logger.error(traceback.format_exc())
464
+
134
465
  self._mark_refs()
135
466
  self._apply_heuristics()
136
467
  self._mark_exports()
137
-
138
- thr = max(0, thr)
468
+
469
+ shown = 0
470
+
471
+ def def_sort_key(d):
472
+ return (d.type, d.name)
473
+
474
+ for d in sorted(self.defs.values(), key=def_sort_key):
475
+ if shown >= 50:
476
+ break
477
+ shown += 1
139
478
 
140
479
  unused = []
141
- for d in self.defs.values():
142
- if d.references == 0 and not d.is_exported and d.confidence >= thr:
143
- unused.append(d.to_dict())
144
-
480
+ for definition in self.defs.values():
481
+ if (
482
+ definition.references == 0
483
+ and not definition.is_exported
484
+ and definition.confidence > 0
485
+ and definition.confidence >= thr
486
+ ):
487
+ unused.append(definition.to_dict())
488
+
145
489
  result = {
146
- "unused_functions": [],
147
- "unused_imports": [],
490
+ "unused_functions": [],
491
+ "unused_imports": [],
148
492
  "unused_classes": [],
149
- "unused_variables": []
493
+ "unused_variables": [],
494
+ "unused_parameters": [],
495
+ "analysis_summary": {
496
+ "total_files": len(files),
497
+ "excluded_folders": exclude_folders or [],
498
+ },
150
499
  }
151
-
500
+
501
+ if enable_secrets and all_secrets:
502
+ result["secrets"] = all_secrets
503
+ result["analysis_summary"]["secrets_count"] = len(all_secrets)
504
+
505
+ if enable_danger and all_dangers:
506
+ result["danger"] = all_dangers
507
+ result["analysis_summary"]["danger_count"] = len(all_dangers)
508
+
509
+ if enable_quality and all_quality:
510
+ result["quality"] = all_quality
511
+ result["analysis_summary"]["quality_count"] = len(all_quality)
512
+
152
513
  for u in unused:
153
514
  if u["type"] in ("function", "method"):
154
515
  result["unused_functions"].append(u)
155
516
  elif u["type"] == "import":
156
517
  result["unused_imports"].append(u)
157
- elif u["type"] == "class":
518
+ elif u["type"] == "class":
158
519
  result["unused_classes"].append(u)
159
520
  elif u["type"] == "variable":
160
521
  result["unused_variables"].append(u)
161
-
522
+ elif u["type"] == "parameter":
523
+ result["unused_parameters"].append(u)
524
+
162
525
  return json.dumps(result, indent=2)
163
526
 
164
- def proc_file(file_or_args, mod=None):
527
+
528
+ def proc_file(file_or_args, mod=None, extra_visitors=None):
165
529
  if mod is None and isinstance(file_or_args, tuple):
166
- file, mod = file_or_args
530
+ file, mod = file_or_args
167
531
  else:
168
- file = file_or_args
532
+ file = file_or_args
169
533
 
170
534
  try:
171
- tree = ast.parse(Path(file).read_text(encoding="utf-8"))
535
+ source = Path(file).read_text(encoding="utf-8")
536
+ ignore_lines = {
537
+ i
538
+ for i, line in enumerate(source.splitlines(), start=1)
539
+ if "pragma: no skylos" in line
540
+ }
541
+ tree = ast.parse(source)
542
+
543
+ cfg = load_config(file)
544
+
545
+ q_rules = []
546
+ if "SKY-Q301" not in cfg["ignore"]:
547
+ q_rules.append(ComplexityRule(threshold=cfg["complexity"]))
548
+ if "SKY-Q302" not in cfg["ignore"]:
549
+ q_rules.append(NestingRule(threshold=cfg["nesting"]))
550
+ if "SKY-C303" not in cfg["ignore"]:
551
+ q_rules.append(ArgCountRule(max_args=cfg["max_args"]))
552
+ if "SKY-C304" not in cfg["ignore"]:
553
+ q_rules.append(FunctionLengthRule(max_lines=cfg["max_lines"]))
554
+ if "SKY-Q305" not in cfg["ignore"]:
555
+ q_rules.append(MutableDefaultRule())
556
+ if "SKY-Q306" not in cfg["ignore"]:
557
+ q_rules.append(BareExceptRule())
558
+ if "SKY-Q307" not in cfg["ignore"]:
559
+ q_rules.append(DangerousComparisonRule())
560
+
561
+ linter_q = LinterVisitor(q_rules, str(file))
562
+ linter_q.visit(tree)
563
+ quality_findings = linter_q.findings
564
+
565
+ d_rules = [DangerousCallsRule()]
566
+ linter_d = LinterVisitor(d_rules, str(file))
567
+ linter_d.visit(tree)
568
+ danger_findings = linter_d.findings
569
+
570
+ pro_findings = []
571
+ if extra_visitors:
572
+ for VisitorClass in extra_visitors:
573
+ checker = VisitorClass(file, pro_findings)
574
+ checker.visit(tree)
575
+
576
+ tv = TestAwareVisitor(filename=file)
577
+ tv.visit(tree)
578
+ tv.ignore_lines = ignore_lines
579
+
580
+ fv = FrameworkAwareVisitor(filename=file)
581
+ fv.visit(tree)
582
+ fv.finalize()
172
583
  v = Visitor(mod, file)
173
584
  v.visit(tree)
174
- return v.defs, v.refs, v.dyn, v.exports
585
+
586
+ fv.dataclass_fields = getattr(v, "dataclass_fields", set())
587
+ fv.first_read_lineno = getattr(v, "first_read_lineno", {})
588
+
589
+ return (
590
+ v.defs,
591
+ v.refs,
592
+ v.dyn,
593
+ v.exports,
594
+ tv,
595
+ fv,
596
+ quality_findings,
597
+ danger_findings,
598
+ pro_findings,
599
+ )
600
+
175
601
  except Exception as e:
176
602
  logger.error(f"{file}: {e}")
177
- return [], [], set(), set()
178
-
179
- def analyze(path,conf=60):return Skylos().analyze(path,conf)
180
-
181
- if __name__=="__main__":
182
- if len(sys.argv)>1:
183
- p=sys.argv[1];c=int(sys.argv[2])if len(sys.argv)>2 else 60
184
- result = analyze(p,c)
185
-
186
- data = json.loads(result)
187
- print("\n🔍 Python Static Analysis Results")
188
- print("===================================\n")
189
-
190
- total_items = sum(len(items) for items in data.values())
191
-
192
- print("Summary:")
193
- if data["unused_functions"]:
194
- print(f" • Unreachable functions: {len(data['unused_functions'])}")
195
- if data["unused_imports"]:
196
- print(f" • Unused imports: {len(data['unused_imports'])}")
197
- if data["unused_classes"]:
198
- print(f" • Unused classes: {len(data['unused_classes'])}")
199
- if data["unused_variables"]:
200
- print(f" • Unused variables: {len(data['unused_variables'])}")
201
-
202
- if data["unused_functions"]:
203
- print("\n📦 Unreachable Functions")
204
- print("=======================")
205
- for i, func in enumerate(data["unused_functions"], 1):
206
- print(f" {i}. {func['name']}")
207
- print(f" └─ {func['file']}:{func['line']}")
208
-
209
- if data["unused_imports"]:
210
- print("\n📥 Unused Imports")
211
- print("================")
212
- for i, imp in enumerate(data["unused_imports"], 1):
213
- print(f" {i}. {imp['simple_name']}")
214
- print(f" └─ {imp['file']}:{imp['line']}")
215
-
216
- if data["unused_classes"]:
217
- print("\n📋 Unused Classes")
218
- print("=================")
219
- for i, cls in enumerate(data["unused_classes"], 1):
220
- print(f" {i}. {cls['name']}")
221
- print(f" └─ {cls['file']}:{cls['line']}")
222
-
223
- if data["unused_variables"]:
224
- print("\n📊 Unused Variables")
225
- print("==================")
226
- for i, var in enumerate(data["unused_variables"], 1):
227
- print(f" {i}. {var['name']}")
228
- print(f" └─ {var['file']}:{var['line']}")
229
-
230
- print("\n" + "─" * 50)
231
- print(f"Found {total_items} dead code items. Add this badge to your README:")
232
- print(f"```markdown")
233
- print(f"![Dead Code: {total_items}](https://img.shields.io/badge/Dead_Code-{total_items}_detected-orange?logo=codacy&logoColor=red)")
234
- print(f"```")
235
-
236
- print("\nNext steps:")
237
- print(" • Use --interactive to select specific items to remove")
238
- print(" • Use --dry-run to preview changes before applying them")
603
+ if os.getenv("SKYLOS_DEBUG"):
604
+ logger.error(traceback.format_exc())
605
+ dummy_visitor = TestAwareVisitor(filename=file)
606
+ dummy_visitor.ignore_lines = set()
607
+ dummy_framework_visitor = FrameworkAwareVisitor(filename=file)
608
+ return [], [], set(), set(), dummy_visitor, dummy_framework_visitor, [], [], []
609
+
610
+
611
+ def analyze(
612
+ path,
613
+ conf=60,
614
+ exclude_folders=None,
615
+ enable_secrets=False,
616
+ enable_danger=False,
617
+ enable_quality=False,
618
+ extra_visitors=None,
619
+ ):
620
+ return Skylos().analyze(
621
+ path,
622
+ conf,
623
+ exclude_folders,
624
+ enable_secrets,
625
+ enable_danger,
626
+ enable_quality,
627
+ extra_visitors,
628
+ )
629
+
630
+
631
+ if __name__ == "__main__":
632
+ enable_secrets = "--secrets" in sys.argv
633
+ enable_danger = "--danger" in sys.argv
634
+ enable_quality = "--quality" in sys.argv
635
+
636
+ positional = [a for a in sys.argv[1:] if not a.startswith("--")]
637
+ if not positional:
638
+ print(
639
+ "Usage: python Skylos.py <path> [confidence_threshold] [--secrets] [--danger] [--quality]"
640
+ )
641
+ sys.exit(2)
642
+ p = positional[0]
643
+ confidence = int(positional[1]) if len(positional) > 1 else 60
644
+
645
+ result = analyze(
646
+ p,
647
+ confidence,
648
+ enable_secrets=enable_secrets,
649
+ enable_danger=enable_danger,
650
+ enable_quality=enable_quality,
651
+ )
652
+ data = json.loads(result)
653
+ print("\n Python Static Analysis Results")
654
+ print("===================================\n")
655
+
656
+ total_dead = 0
657
+ for key, items in data.items():
658
+ if key.startswith("unused_") and isinstance(items, list):
659
+ total_dead += len(items)
660
+
661
+ danger_count = (
662
+ data.get("analysis_summary", {}).get("danger_count", 0) if enable_danger else 0
663
+ )
664
+ secrets_count = (
665
+ data.get("analysis_summary", {}).get("secrets_count", 0)
666
+ if enable_secrets
667
+ else 0
668
+ )
669
+
670
+ print("Summary:")
671
+ if data["unused_functions"]:
672
+ print(f" * Unreachable functions: {len(data['unused_functions'])}")
673
+ if data["unused_imports"]:
674
+ print(f" * Unused imports: {len(data['unused_imports'])}")
675
+ if data["unused_classes"]:
676
+ print(f" * Unused classes: {len(data['unused_classes'])}")
677
+ if data["unused_variables"]:
678
+ print(f" * Unused variables: {len(data['unused_variables'])}")
679
+ if enable_danger:
680
+ print(f" * Security issues: {danger_count}")
681
+ if enable_secrets:
682
+ print(f" * Secrets found: {secrets_count}")
683
+
684
+ if data["unused_functions"]:
685
+ print("\n - Unreachable Functions")
686
+ print("=======================")
687
+ for i, func in enumerate(data["unused_functions"], 1):
688
+ print(f" {i}. {func['name']}")
689
+ print(f" └─ {func['file']}:{func['line']}")
690
+
691
+ if data["unused_imports"]:
692
+ print("\n - Unused Imports")
693
+ print("================")
694
+ for i, imp in enumerate(data["unused_imports"], 1):
695
+ print(f" {i}. {imp['simple_name']}")
696
+ print(f" └─ {imp['file']}:{imp['line']}")
697
+
698
+ if data["unused_classes"]:
699
+ print("\n - Unused Classes")
700
+ print("=================")
701
+ for i, cls in enumerate(data["unused_classes"], 1):
702
+ print(f" {i}. {cls['name']}")
703
+ print(f" └─ {cls['file']}:{cls['line']}")
704
+
705
+ if data["unused_variables"]:
706
+ print("\n - Unused Variables")
707
+ print("==================")
708
+ for i, var in enumerate(data["unused_variables"], 1):
709
+ print(f" {i}. {var['name']}")
710
+ print(f" └─ {var['file']}:{var['line']}")
711
+
712
+ if enable_danger and data.get("danger"):
713
+ print("\n - Security Issues")
714
+ print("================")
715
+ for i, f in enumerate(data["danger"], 1):
716
+ print(
717
+ f" {i}. {f['message']} [{f['rule_id']}] ({f['file']}:{f['line']}) Severity: {f['severity']}"
718
+ )
719
+
720
+ if enable_secrets and data.get("secrets"):
721
+ print("\n - Secrets")
722
+ print("==========")
723
+ for i, s in enumerate(data["secrets"], 1):
724
+ rid = s.get("rule_id", "SECRET")
725
+ msg = s.get("message", "Potential secret")
726
+ file = s.get("file")
727
+ line = s.get("line", 1)
728
+ sev = s.get("severity", "HIGH")
729
+ print(f" {i}. {msg} [{rid}] ({file}:{line}) Severity: {sev}")
730
+
731
+ print("\n" + "─" * 50)
732
+ if enable_danger:
733
+ print(
734
+ f"Found {total_dead} dead code items and {danger_count} security flaws. Add this badge to your README:"
735
+ )
239
736
  else:
240
- print("Usage: python Skylos.py <path> [confidence_threshold]")
737
+ print(f"Found {total_dead} dead code items. Add this badge to your README:")
738
+ print("```markdown")
739
+ print(
740
+ f"![Dead Code: {total_dead}](https://img.shields.io/badge/Dead_Code-{total_dead}_detected-orange?logo=codacy&logoColor=red)"
741
+ )
742
+ print("```")
743
+
744
+ print("\nNext steps:")
745
+ print(" * Use --interactive to select specific items to remove")
746
+ print(" * Use --dry-run to preview changes before applying them")