troubadix 25.2.4__py3-none-any.whl → 25.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- troubadix/__version__.py +1 -1
- troubadix/argparser.py +18 -0
- troubadix/codespell/codespell.exclude +26 -2
- troubadix/codespell/codespell.ignore +6 -2
- troubadix/plugins/creation_date.py +49 -35
- troubadix/plugins/dependencies.py +15 -11
- troubadix/plugins/spaces_before_dots.py +2 -2
- troubadix/plugins/valid_oid.py +2 -2
- troubadix/standalone_plugins/changed_oid.py +2 -8
- troubadix/standalone_plugins/dependency_graph/__init__.py +2 -0
- troubadix/standalone_plugins/dependency_graph/checks.py +128 -0
- troubadix/standalone_plugins/dependency_graph/cli.py +58 -0
- troubadix/standalone_plugins/dependency_graph/dependency_graph.py +212 -0
- troubadix/standalone_plugins/dependency_graph/models.py +40 -0
- troubadix/standalone_plugins/deprecate_vts.py +3 -12
- troubadix/standalone_plugins/file_extensions.py +3 -15
- troubadix/standalone_plugins/last_modification.py +4 -12
- troubadix/standalone_plugins/no_solution.py +2 -8
- troubadix/standalone_plugins/version_updated.py +2 -8
- {troubadix-25.2.4.dist-info → troubadix-25.3.1.dist-info}/METADATA +2 -1
- {troubadix-25.2.4.dist-info → troubadix-25.3.1.dist-info}/RECORD +24 -19
- {troubadix-25.2.4.dist-info → troubadix-25.3.1.dist-info}/entry_points.txt +1 -0
- {troubadix-25.2.4.dist-info → troubadix-25.3.1.dist-info}/LICENSE +0 -0
- {troubadix-25.2.4.dist-info → troubadix-25.3.1.dist-info}/WHEEL +0 -0
troubadix/__version__.py
CHANGED
troubadix/argparser.py
CHANGED
|
@@ -26,6 +26,7 @@ from pathlib import Path
|
|
|
26
26
|
from pontos.terminal import Terminal
|
|
27
27
|
|
|
28
28
|
|
|
29
|
+
# allows non existent paths and directory paths
|
|
29
30
|
def directory_type(string: str) -> Path:
|
|
30
31
|
directory_path = Path(string)
|
|
31
32
|
if directory_path.exists() and not directory_path.is_dir():
|
|
@@ -33,6 +34,15 @@ def directory_type(string: str) -> Path:
|
|
|
33
34
|
return directory_path
|
|
34
35
|
|
|
35
36
|
|
|
37
|
+
# allows only existing directory paths
|
|
38
|
+
def directory_type_existing(string: str) -> Path:
|
|
39
|
+
directory_path = Path(string)
|
|
40
|
+
if not directory_path.is_dir():
|
|
41
|
+
raise ValueError(f"{string} is not a directory.")
|
|
42
|
+
return directory_path
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
# allows non existent paths and file paths
|
|
36
46
|
def file_type(string: str) -> Path:
|
|
37
47
|
file_path = Path(string)
|
|
38
48
|
if file_path.exists() and not file_path.is_file():
|
|
@@ -40,6 +50,14 @@ def file_type(string: str) -> Path:
|
|
|
40
50
|
return file_path
|
|
41
51
|
|
|
42
52
|
|
|
53
|
+
# allows only existing file paths
|
|
54
|
+
def file_type_existing(string: str) -> Path:
|
|
55
|
+
file_path = Path(string)
|
|
56
|
+
if not file_path.is_file():
|
|
57
|
+
raise ValueError(f"{string} is not a file.")
|
|
58
|
+
return file_path
|
|
59
|
+
|
|
60
|
+
|
|
43
61
|
def check_cpu_count(number: str) -> int:
|
|
44
62
|
"""Make sure this value is valid
|
|
45
63
|
Default: use half of the available cores to not block the machine"""
|
|
@@ -34,6 +34,7 @@
|
|
|
34
34
|
0x20: C8 E6 AB 65 3B A9 5A 0E 14 00 00 05 41 44 4D 49 ...e;.Z.....ADMI
|
|
35
35
|
0x20: D5 A6 22 5D 33 E4 C6 0E 14 00 00 05 61 64 6D 69 .."]3.......admi
|
|
36
36
|
0x40: 61 6C 64 6F 6D 61 69 6E 00 07 64 65 66 61 75 6C aldomain..defaul
|
|
37
|
+
0x40: 66 6C 61 74 65 3D 67 7A 69 70 0D 0A 66 69 6C 65 flate=gzip..file
|
|
37
38
|
# 0x50: 72 6F 2E 70 72 6F 64 75 63 74 2E 64 65 76 69 63 ro.product.devic
|
|
38
39
|
# 0x50: 74 72 69 6E 67 20 6D 69 73 73 69 6E 67 20 6F 72 tring missing or
|
|
39
40
|
# 0x50: 75 65 73 74 3A 20 47 45 54 20 2F 20 uest: GET / # nb: Trailing space
|
|
@@ -231,6 +232,7 @@ Claus Wahlers reported that random images from GPU memory
|
|
|
231
232
|
clen +
|
|
232
233
|
clen = "567";
|
|
233
234
|
clen = data_len( data:_ciphers );
|
|
235
|
+
- Cleo Streem
|
|
234
236
|
"cliente",
|
|
235
237
|
cmd = 'for usr in $(cut -d: -f1 /etc/shadow); do [[ $(chage --list $usr | grep \'^Last password change\' | cut -d: -f2) > $(date) ]] && echo "$usr :$(chage --list $usr | grep \'^Last password change\' | cut -d: -f2)"; done';
|
|
236
238
|
cmd = "mount | grep -w ro";
|
|
@@ -262,6 +264,9 @@ Corrected a badly constracted file which could have allowed treating of
|
|
|
262
264
|
could lead to shared memory segments of other users beeing freed
|
|
263
265
|
cpe = build_cpe(value:appVer, exp:"^([0-9]\.[0-9]+\.[0-9]+)", base:"cpe:/a:shemes:grabit:");
|
|
264
266
|
cpe = build_cpe(value:ver, exp:"^([0-9.]+)", base:"cpe:/a:mitre:ovaldi:");
|
|
267
|
+
cpe = build_cpe(value: vers, exp: "^([0-9.]+)", base: "cpe:/a:aprox:aproxengine:");
|
|
268
|
+
cpe = "cpe:/a:aprox:aproxengine";
|
|
269
|
+
CPE = "cpe:/a:aprox:aproxengine";
|
|
265
270
|
CPE = "cpe:/a:mapp:webtrekk:";
|
|
266
271
|
cpe = "cpe:/a:mitre:ovaldi";
|
|
267
272
|
CPE = "cpe:/a:netsparker:wass";
|
|
@@ -270,6 +275,7 @@ CPE: cpe:/a:tawk:tawk.to_live_chat:0.8.0
|
|
|
270
275
|
cpe =~ "^cpe:/o:hp:laserjet_pro_420[1-3](cdn|dn|dw|dne|dwe)_firmware") {
|
|
271
276
|
CPU' could have occured because a retry loop continually finds the same
|
|
272
277
|
crafted IFF ILBM file. NOTE: some of these details are obtained from
|
|
278
|
+
crafted wLongsPerEntry or nEntriesInUse value in the indx chunk, which
|
|
273
279
|
crapData = string("ALLO ", crap(length: 25000),"\r\n");
|
|
274
280
|
crash) via a malformed file with UPack encoding (CVE-2009-1371).
|
|
275
281
|
Create all system-defined macros defore processing command-line given
|
|
@@ -340,6 +346,7 @@ CVE-2020-36158: Fixed an issue wich might have allowed a remote
|
|
|
340
346
|
CVE-2020-5208: Fixed multiple remote code executtion vulnerabilities
|
|
341
347
|
CVE-2020-8492: Fixed a regular expression in urrlib that was prone to
|
|
342
348
|
CVE-2021-30004: Fixed an issue where forging attacks might have occured
|
|
349
|
+
* CVE-2021-47311: net: qcom/emac: fix UAF in emac_remove (bsc#1225010).
|
|
343
350
|
dass eine geeignete Windows 8.1 Version, vorzugsweise eine 64-Bit Variante, eingesetzt werden muss.");
|
|
344
351
|
"DataArchivingService/webcontent/aas",
|
|
345
352
|
"/DataArchivingService/webcontent/aas/aas_store.jsp");
|
|
@@ -398,7 +405,7 @@ Enable log information of starting/stoping services. (bsc#1144923,
|
|
|
398
405
|
<!-- Ende Message Box -->
|
|
399
406
|
Engineering (TE) database and then a subsequent operation attempts to process these, rpd will
|
|
400
407
|
Engineering (TE) tunnel's physical source interface is not propagated to hardware after the adjacency is lost.
|
|
401
|
-
- ERRO[0000] Error creating docker key file: CreateKeyFile write root.key file failed. open /root/.docker/root.key: permission denied
|
|
408
|
+
- ERRO[0000] Error creating docker key file: CreateKeyFile write root.key file failed. open /root/.docker/root.key: permission denied
|
|
402
409
|
eSpace IAD, eSpace U1981 and eSpace USM.");
|
|
403
410
|
establishment of the Cisco Nexus 9000 Series Application Centric Infrastructure (ACI) Mode Switch
|
|
404
411
|
exact-width integer types int{N}_t and uint{N}_t.
|
|
@@ -410,10 +417,13 @@ EXP=expext.dll
|
|
|
410
417
|
expressions that are not properly handled by a stap script that
|
|
411
418
|
* extended EAP-SIM/AKA fast re-authentication to allow use with FILS
|
|
412
419
|
extended EAP-SIM/AKA fast re-authentication to allow use with FILS
|
|
420
|
+
- Extended HTTP methods: ACL, BASELINE-CONTROL, BIND, CHECKIN, CHECKOUT, COPY, LABEL, LINK, LOCK,
|
|
421
|
+
extended_methods = make_list("ACL", "BASELINE-CONTROL", "BIND", "CHECKIN", "CHECKOUT", "COPY",
|
|
413
422
|
external emulators, which may have cuased Xen to crash, resulting in a
|
|
414
423
|
'facsimiletelephonenumber' to the Access Control Instruction (ACI) for user
|
|
415
424
|
family_id = 'oval:org.mitre.oval:obj:99';
|
|
416
425
|
family_xml = '\t\t<family_item xmlns="http://oval.mitre.org/XMLSchema/oval-sys' +
|
|
426
|
+
"Faroe Islands" : [ "FO", "English", "PAL", "(UTC+00:00) Dublin", 0 ],
|
|
417
427
|
(FATE#312793, bnc#782369). The userland utilities were published seperately to support this feature.
|
|
418
428
|
(FATE#313309) The ipset userland utility will be published seperately to support this feature.
|
|
419
429
|
(FATE#314441). A seperate hyper-v package will be published to support this feature.
|
|
@@ -546,6 +556,7 @@ if (http_vuln_check(port: port, url: url, pattern: "<title>WAN Setup", check_hea
|
|
|
546
556
|
if(!isnull(res = isdpkgvuln(pkg:"nd", ver:"0.5.0-1woody1", rls:"DEB3.0"))) {
|
|
547
557
|
if(!isnull(res = isrpmvuln(pkg:"libell", rpm:"libell~0.26~1.fc31", rls:"FC31"))) {
|
|
548
558
|
if( "Login successed" >< recv ) {
|
|
559
|
+
if (methods = egrep(pattern: "^([Aa]llow|[Pp]ublic)\s*:", string: res, icase: FALSE)) {
|
|
549
560
|
if( model =~ "^(RICOH|LANIER|SAVIN|NRG)" && "Network Printer" >< model ) {
|
|
550
561
|
if( ( model =~ '^RP200' || model =~ '^TE[3456]0' ) && revcomp( a: version, b: "v600r006c00spc500" ) < 0) {
|
|
551
562
|
if( model =~ '^TE[3456]0' && revcomp( a: version, b: "v600r006c00spc500" ) < 0 ) {
|
|
@@ -565,7 +576,9 @@ if (prod =~ "^BMX\s*NOE\s*0110$") {
|
|
|
565
576
|
if (prod =~ "^BMX\s*NOE\s*0110$" || prod =~ "^BMX\s*NOE\s*0110H$") {
|
|
566
577
|
if (prod =~ "^BMX\s*NOE\s*0110" && version_is_less(version: version, test_version: "6.5")) {
|
|
567
578
|
if (prod =~ "^BMX\s*NOE\s*0110" && version_is_less(version: version, test_version: "6.70")) {
|
|
579
|
+
if (prod =~ "^BMX\s*NOE\s*0200") {
|
|
568
580
|
if (!prod || (prod !~ "^BMX\s*P34" && prod !~ "^BMX\s*NOE\s*01[01]0"))
|
|
581
|
+
if (!prod || (prod !~ "^BMX\s*P34" && prod !~ "^BMX\s*NOE\s*0(1[01]|20)0"))
|
|
569
582
|
if (!prod || (prod !~ "^BMX\s*P34" && prod !~ "^BMX\s*NOR\s*0200H" && prod !~ "^BMX\s*NOE\s*0100" &&
|
|
570
583
|
if( r =~ " (A\. A\. Milne|Albert Einstein|Anonimo|Antico proverbio cinese|Autor desconocido|Charles Dickens|Francisco de Quevedo y Villegas|George Bernard Shaw|Jaime Balmes|Johann Wolfgang von Goethe|Jil Sander|Juana de Asbaje|Konfucius|Lord Philip Chesterfield|Montaigne|Petrarca|Ralph Waldo Emerson|Seneca|Syrus|Werner von Siemens)" ||
|
|
571
584
|
if(rcvRes && '>iAm[i]nE<' >< rcvRes)
|
|
@@ -585,6 +598,7 @@ if( "Server: Boa" >!< banner || ( "AirLive" >!< banner && banner !~ "(WL|MD|BU|P
|
|
|
585
598
|
if (sysdesc =~ "^(RICOH|LANIER|SAVIN|NRG)" && (sysdesc =~ "(RICOH|LANIER|SAVIN|NRG) Network Printer" ||
|
|
586
599
|
if ("<title>Cisco NFVIS</title>" >< res && 'content="Xenon Boostrap Admin Panel"' >< res) {
|
|
587
600
|
if ("<title>COMfortel</title>" >< res && "/statics/script/pageChallenge.js" >< res) {
|
|
601
|
+
if( "<title>Login to Axis2:: Administartion page</title>" >< buf8 ||
|
|
588
602
|
if( "[Xx]-[Aa]dobe-[Cc]ontent" >< pattern )
|
|
589
603
|
if( "[Xx]-[Aa]dobe-[Cc]ontent" >< pattern )
|
|
590
604
|
If you disable this policy setting, transcripting of PowerShell-based applications is disabled by
|
|
@@ -678,7 +692,9 @@ kJtP0F6mv/Afe/5s7yd3ZJ/72yT73NjLg0vWbmLkop6eOR+CKw4nxorWxpocAj0p
|
|
|
678
692
|
# Kubernetes Dashboard Public WAN (Internet) Accessible
|
|
679
693
|
L3: conring size for XEN HV's with huge memory to small. Inital Xen logs
|
|
680
694
|
LAST_PATCH_UPDATE UpToDate
|
|
695
|
+
"leadin/readme.txt", "HubSpot#---#=== HubSpot -#---#Stable tag: ([0-9.]+)#---#cpe:/a:hubspot:hubspot",
|
|
681
696
|
leaks because of a missing check when transfering pages via
|
|
697
|
+
<li><a href="Status_Router.asp"><strong><script type="text/javascript">Capture(bmenu.statu)</script></strong></a></li>
|
|
682
698
|
library: Increment to 7:0:1 No changes, no removals New fuctions:
|
|
683
699
|
[link moved to references] has more informations.
|
|
684
700
|
<link rel="stylesheet" href="/bui/base.css?v=GWAY-8.3.1-0086" />
|
|
@@ -718,6 +734,7 @@ Mark Shepard discovered a double free in the TCP listener cleanup which could re
|
|
|
718
734
|
"messasges",
|
|
719
735
|
"Metastasio (Ipermestra)" >< banner || '"\r\nAnonimo' >< banner || banner =~ '^"[^"]+" *Autor desconocido[ \t\r\n]*$' || "/usr/games/fortune: not found" >< banner ||
|
|
720
736
|
"Metastasio (Ipermestra)" >< r || '"\r\nAnonimo' >< r || r =~ '^"[^"]+" *Autor desconocido[ \t\r\n]*$' ) {
|
|
737
|
+
methods = eregmatch(pattern: "^([Aa]llow|[Pp]ublic)\s*:\s*([A-Z,]+\s*([A-Z ,]+)?)", string: methods,
|
|
721
738
|
MFSA 2012-27 / CVE-2012-0474: Security researchers Jordi Chancel and Eddy Bordi reported that they could short-circuit page loads to show the address of a different site than what is ... [Please see the references for more information on the vulnerabilities]");
|
|
722
739
|
MFSA 2012-75 / CVE-2012-3984: Security researcher David Bloom of Cue discovered that 'select' elements are always-on-top chromeless windows and that navigation away from a page with an active 'select' menu does not remove this window.When another menu is opened programmatically on a new page, the original 'select' menu can be retained and arbitrary HTML content within it rendered, allowing an attacker to cover arbitrary portions of the new page through absolute positioning/scrolling, leading to spoofing attacks. Security researcher Jordi Chancel found a variation that would allow for click-jacking attacks was well.
|
|
723
740
|
MFSA 2013-23 / CVE-2013-0765: Mozilla developer Boris Zbarsky reported that in some circumstances a wrapped WebIDL object can be wrapped multiple times, overwriting the existing wrapp... [Please see the references for more information on the vulnerabilities]");
|
|
@@ -863,6 +880,7 @@ reenable php7-dba support of Berkeley DB (bsc#1108554)");
|
|
|
863
880
|
# Ref : http://www.hsc.fr/ressources/articles/win_net_srv/index.html.en by Jean-Baptiste Marchand
|
|
864
881
|
register_and_report_cpe( app:"Netsparker - Web Application Security Scanner", ver:netVer, base:"cpe:/a:netsparker:wass:", expr:"^([0-9.]+)", insloc:netPath );
|
|
865
882
|
register_and_report_cpe(app:"Wiesemann & Theis GmbH " + appName, ver:version, concluded:concluded,
|
|
883
|
+
* [REGRESSION] 'call into AER handling regardless of severity' triggers
|
|
866
884
|
reg_xml = '\t\t<registry_item' + status + ' xmlns="http://oval.mitre.org/' +
|
|
867
885
|
Reject invalid eliptic curve point coordinates (bsc#1131291)");
|
|
868
886
|
rejection for EXTRAVERSION = -xfs, but likely little else will be
|
|
@@ -891,6 +909,7 @@ req = string("POST /UE/ProcessForm HTTP/1.1\r\n",
|
|
|
891
909
|
res = http_get_cache(port: port, item: dir + "/product.comparision.php");
|
|
892
910
|
- Restrict envrionment variable expansion to `ENV`, `ADD`, `COPY`,
|
|
893
911
|
result = "Diese Vorgabe muss manuell ueberprueft werden.";
|
|
912
|
+
"resutls": {
|
|
894
913
|
return -1, "The following script_xref of VT '" + str(file) + "' is pointing to Mitre/NVD which is already covered by the script_cve_id. This is a redundant info and the script_xref needs to be removed:" + nvd_mitre_link_tags
|
|
895
914
|
return("Diese Vorgabe muss manuell ueberprueft werden.");
|
|
896
915
|
return make_list( "error", text_response + 'Ueberpruefung fehlgeschlagen. Die Verwendung der benoetigten win_cmd_exec Funktion wurde in "Options for Local Security Checks (OID: 1.3.6.1.4.1.25623.1.0.100509)" manuell deaktiviert.\n' );
|
|
@@ -939,6 +958,7 @@ SAML/CAS tokens in the session database, an attacker can open an anonymous
|
|
|
939
958
|
script_mandatory_keys("Jasig CAS server/Installed");
|
|
940
959
|
script_mandatory_keys("shttp/detected");
|
|
941
960
|
script_mandatory_keys("telnet/huawei/te/detected");
|
|
961
|
+
script_mandatory_keys("wordpress/plugin/leadin/detected");
|
|
942
962
|
script_mandatory_keys("wordpress/plugin/mailin/detected");
|
|
943
963
|
script_name("Acronis Cyber Infrastructure (ACI) Detection (HTTP)");
|
|
944
964
|
script_name("Acronis Cyber Infrastructure (ACI) RCE Vulnerability (SEC-6452)");
|
|
@@ -1002,6 +1022,7 @@ SAML/CAS tokens in the session database, an attacker can open an anonymous
|
|
|
1002
1022
|
script_tag(name:"affected", value:"Petite Annonce version 1.0 is known to be affected. Other
|
|
1003
1023
|
script_tag(name:"affected", value:"RV320 Dual Gigabit WAN VPN Router and RV325 Dual Gigabit WAN
|
|
1004
1024
|
script_tag(name:"affected", value:"RV320 Dual Gigabit WAN VPN Router and RV325 Dual Gigabit WAN VPN Router.");
|
|
1025
|
+
script_tag(name:"affected", value:"Symantec Encryption Management Server (SEMS)
|
|
1005
1026
|
script_tag(name:"affected", value:"tre on Fedora 23");
|
|
1006
1027
|
script_tag(name:"affected", value:"tre on Fedora 24");
|
|
1007
1028
|
script_tag(name:"affected", value:"tre on Fedora 25");
|
|
@@ -1042,6 +1063,7 @@ SAML/CAS tokens in the session database, an attacker can open an anonymous
|
|
|
1042
1063
|
script_tag(name:"insight", value:"Federico Manuel Bento discovered that the Linux kernel did not properly
|
|
1043
1064
|
script_tag(name:"insight", value:"FreeS/WAN, Openswan, strongSwan and Super-FreeS/WAN contain two bugs when
|
|
1044
1065
|
script_tag(name:"insight", value:"In Apache::Session::Browseable before 1.3.6, validity of the X.509 certificate is not checked by default when connecting to remote LDAP backends, because the default configuration of the Net::LDAPS module for Perl is used.
|
|
1066
|
+
script_tag(name:"insight", value:"It was discovered that a race condition existed in the ARC EMAC ethernet
|
|
1045
1067
|
script_tag(name:"insight", value:"It was discovered that a specially-crafted packet sent to the racoon ipsec key exchange server could cause a tunnel to crash, resulting in a denial of service.
|
|
1046
1068
|
script_tag(name:"insight", value:"It was discovered that the VLC CAF demuxer incorrectly handled certain
|
|
1047
1069
|
script_tag(name:"insight", value:"James Troup discovered that MAAS stored RabbitMQ
|
|
@@ -1053,6 +1075,7 @@ SAML/CAS tokens in the session database, an attacker can open an anonymous
|
|
|
1053
1075
|
script_tag(name:"insight", value:"Juraj Somorovsky, Robert Merget and Nimrod Aviram discovered a padding oracle attack in OpenSSL.
|
|
1054
1076
|
script_tag(name:"insight", value:"Juraj Somorovsky, Robert Merget, and Nimrod Aviram discovered that certain
|
|
1055
1077
|
script_tag(name:"insight", value:"libsoup without ca path added, accepted all SSL certificats as trusted. This has been fixed. CVE-2012-2132 has been assigned to this issue.
|
|
1078
|
+
script_tag(name:"insight", value:"Local ABL Client bypass of the required PASOE security checks
|
|
1056
1079
|
script_tag(name:"insight", value:"LSAT insecurely creates temporary files which can lead to symlink attacks
|
|
1057
1080
|
script_tag(name:"insight", value:"Manuel Nickschas discovered that Konversation did not properly perform
|
|
1058
1081
|
script_tag(name:"insight", value:"Meh Chang discovered that Exim incorrectly
|
|
@@ -1126,6 +1149,7 @@ SAML/CAS tokens in the session database, an attacker can open an anonymous
|
|
|
1126
1149
|
script_tag(name:"summary", value:"Palo Alto PAN-OS is prone to a vulnerability in Panorama SD WAN.");
|
|
1127
1150
|
script_tag(name:"summary", value:"Petite Annonce is prone to a cross-site scripting (XSS)
|
|
1128
1151
|
script_tag(name:"summary", value:"Telnet based detection of Huawei TE (Telepresence and
|
|
1152
|
+
script_tag(name:"summary", value:"The management console for Symantec Encryption Management Server (SEMS) is susceptible to potential OS command execution,
|
|
1129
1153
|
script_tag(name:"summary", value:"The remote host is a SIP Express Router (SER).
|
|
1130
1154
|
script_tag(name:"summary", value:"The remote host is missing an update for the 'Recommended udpate for SUSE Manager Client Tools' package(s) announced via the SUSE-SU-2016:1366-1 advisory.");
|
|
1131
1155
|
script_tag(name:"summary", value:"The remote host is missing an update for the 'tre'
|
|
@@ -1471,6 +1495,7 @@ url = "/statics/html/index.htm";
|
|
|
1471
1495
|
url = "/statics/pageChallenge.html";
|
|
1472
1496
|
url = "/status/infomation.htm";
|
|
1473
1497
|
url = string(dir, "/config.xml.sav");
|
|
1498
|
+
url = string( dir, "/index.php?file=Liens&op=", raw_string( 0x22 ), "><script>window.alert('test');</script>" );
|
|
1474
1499
|
url = string(openVer[2], "/obj/autorisation.class.php?path_om=../../../../../../../../vt-rfi.txt");
|
|
1475
1500
|
url = "/UE/advanced.html";
|
|
1476
1501
|
url = "/UE/welcome_login.html";
|
|
@@ -1578,4 +1603,3 @@ xml += string( '<oval_system_characteristics xmlns="http://oval.mitre.org/XMLSch
|
|
|
1578
1603
|
- XSS via a crafted WAN name on the General Setup screen (CVE-2019-16534)");
|
|
1579
1604
|
"^[Xx]-[Aa]dobe-[Cc]ontent\s*:\s*AEM" );
|
|
1580
1605
|
Zhongling Wen discovered that the h323 conntrack handler did not correctly
|
|
1581
|
-
* CVE-2021-47311: net: qcom/emac: fix UAF in emac_remove (bsc#1225010).
|
|
@@ -22,10 +22,14 @@ complies
|
|
|
22
22
|
# mentioned here this correction is fully ingored.
|
|
23
23
|
racoon
|
|
24
24
|
# re-use vs. reuse currently unclear, e.g. there is the following from https://dict.leo.org/forum/viewGeneraldiscussion.php?idForum=4&idThread=5586&lp=ende&lang=en:
|
|
25
|
-
# Not even the wise and educated English native speakers seem to have a rule on hyphenation and there are differences between BE and AE.
|
|
26
|
-
#
|
|
25
|
+
# > Not even the wise and educated English native speakers seem to have a rule on hyphenation and there are differences between BE and AE.
|
|
26
|
+
# https://en.wiktionary.org/wiki/re-uses lists and https://en.wiktionary.org/wiki/re-use lists these as valid as well.
|
|
27
|
+
# As e.g. the spelling correction in Mozilla Firefox is accepting all we're excluding this for now
|
|
27
28
|
re-use
|
|
29
|
+
re-used
|
|
30
|
+
re-uses
|
|
28
31
|
re-usable
|
|
32
|
+
re-using
|
|
29
33
|
# Bootup is commonly used in e.g. example output and seems to be also generally valid like seen on:
|
|
30
34
|
# https://en.wiktionary.org/wiki/bootup
|
|
31
35
|
# so it is ignored (at least for now)
|
|
@@ -52,45 +52,59 @@ class CheckCreationDate(FileContentPlugin):
|
|
|
52
52
|
# Example: "2017-11-29 13:56:41 +0100 (Wed, 29 Nov 2017)"
|
|
53
53
|
match = tag_pattern.search(file_content)
|
|
54
54
|
|
|
55
|
-
if match:
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
match.group("value")[27:43], "%a, %d %b %Y"
|
|
63
|
-
)
|
|
64
|
-
week_day_parsed = date_right.strftime("%a")
|
|
65
|
-
except ValueError:
|
|
66
|
-
yield LinterError(
|
|
67
|
-
"False or incorrectly formatted creation_date.",
|
|
68
|
-
file=nasl_file,
|
|
69
|
-
plugin=self.name,
|
|
70
|
-
)
|
|
71
|
-
return
|
|
55
|
+
if not match:
|
|
56
|
+
yield LinterError(
|
|
57
|
+
"False or incorrectly formatted creation_date.",
|
|
58
|
+
file=nasl_file,
|
|
59
|
+
plugin=self.name,
|
|
60
|
+
)
|
|
61
|
+
return
|
|
72
62
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
formatted_date = week_day_parsed
|
|
84
|
-
yield LinterError(
|
|
85
|
-
f"Wrong day of week. Please change it from '{week_day_str}"
|
|
86
|
-
f"' to '{formatted_date}'.",
|
|
87
|
-
file=nasl_file,
|
|
88
|
-
plugin=self.name,
|
|
89
|
-
)
|
|
90
|
-
else:
|
|
63
|
+
try:
|
|
64
|
+
date_left = datetime.strptime(
|
|
65
|
+
match.group("value")[:25], "%Y-%m-%d %H:%M:%S %z"
|
|
66
|
+
)
|
|
67
|
+
# 2017-11-29 13:56:41 +0100 (error if no timezone)
|
|
68
|
+
date_right = datetime.strptime(
|
|
69
|
+
match.group("value")[27:43], "%a, %d %b %Y"
|
|
70
|
+
)
|
|
71
|
+
week_day_parsed = date_right.strftime("%a")
|
|
72
|
+
except ValueError:
|
|
91
73
|
yield LinterError(
|
|
92
74
|
"False or incorrectly formatted creation_date.",
|
|
93
75
|
file=nasl_file,
|
|
94
76
|
plugin=self.name,
|
|
95
77
|
)
|
|
96
78
|
return
|
|
79
|
+
|
|
80
|
+
week_day_str = match.group("value")[27:30]
|
|
81
|
+
# Wed, 29 Nov 2017
|
|
82
|
+
if date_left.date() != date_right.date():
|
|
83
|
+
yield LinterError(
|
|
84
|
+
"The creation_date consists of two different dates.",
|
|
85
|
+
file=nasl_file,
|
|
86
|
+
plugin=self.name,
|
|
87
|
+
)
|
|
88
|
+
# Check correct weekday
|
|
89
|
+
elif week_day_str != week_day_parsed:
|
|
90
|
+
formatted_date = week_day_parsed
|
|
91
|
+
yield LinterError(
|
|
92
|
+
f"Wrong day of week. Please change it from '{week_day_str}"
|
|
93
|
+
f"' to '{formatted_date}'.",
|
|
94
|
+
file=nasl_file,
|
|
95
|
+
plugin=self.name,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
last_modification_pattern = get_script_tag_pattern(
|
|
99
|
+
ScriptTag.LAST_MODIFICATION
|
|
100
|
+
)
|
|
101
|
+
if match := last_modification_pattern.search(file_content):
|
|
102
|
+
last_modification = datetime.strptime(
|
|
103
|
+
match.group("value")[:25], "%Y-%m-%d %H:%M:%S %z"
|
|
104
|
+
)
|
|
105
|
+
if date_left > last_modification:
|
|
106
|
+
yield LinterError(
|
|
107
|
+
"The creation_date must not be greater than the last modification date.",
|
|
108
|
+
file=nasl_file,
|
|
109
|
+
plugin=self.name,
|
|
110
|
+
)
|
|
@@ -31,6 +31,20 @@ from troubadix.plugin import (
|
|
|
31
31
|
)
|
|
32
32
|
|
|
33
33
|
|
|
34
|
+
def split_dependencies(value: str) -> list[str]:
|
|
35
|
+
"""
|
|
36
|
+
Remove single and/or double quotes, spaces
|
|
37
|
+
and create a list by using the comma as a separator
|
|
38
|
+
additionally, check and filter for inline comments
|
|
39
|
+
"""
|
|
40
|
+
dependencies = []
|
|
41
|
+
for line in value.splitlines():
|
|
42
|
+
subject = line[: line.index("#")] if "#" in line else line
|
|
43
|
+
_dependencies = re.sub(r'[\'"\s]', "", subject).split(",")
|
|
44
|
+
dependencies += [dep for dep in _dependencies if dep != ""]
|
|
45
|
+
return dependencies
|
|
46
|
+
|
|
47
|
+
|
|
34
48
|
class CheckDependencies(FilePlugin):
|
|
35
49
|
name = "check_dependencies"
|
|
36
50
|
|
|
@@ -60,17 +74,7 @@ class CheckDependencies(FilePlugin):
|
|
|
60
74
|
|
|
61
75
|
for match in matches:
|
|
62
76
|
if match:
|
|
63
|
-
|
|
64
|
-
# and create a list by using the comma as a separator
|
|
65
|
-
# additionally, check and filter for inline comments
|
|
66
|
-
dependencies = []
|
|
67
|
-
|
|
68
|
-
for line in match.group("value").splitlines():
|
|
69
|
-
subject = line[: line.index("#")] if "#" in line else line
|
|
70
|
-
_dependencies = re.sub(r'[\'"\s]', "", subject).split(",")
|
|
71
|
-
dependencies += [dep for dep in _dependencies if dep != ""]
|
|
72
|
-
|
|
73
|
-
for dep in dependencies:
|
|
77
|
+
for dep in split_dependencies(match.group("value")):
|
|
74
78
|
if not any(
|
|
75
79
|
(root / vers / dep).exists() for vers in FEED_VERSIONS
|
|
76
80
|
):
|
|
@@ -41,8 +41,8 @@ IGNORE = [
|
|
|
41
41
|
"common/2008/freebsd/freebsdsa_cpio.nasl",
|
|
42
42
|
"common/2008/freebsd/freebsdsa_cvs2.nasl",
|
|
43
43
|
"common/2009/osc_photoGallery_sql_injection.nasl",
|
|
44
|
-
"common/2009/
|
|
45
|
-
"common/2009/
|
|
44
|
+
"common/2009/gb_novell_edir_mult_vuln_jul09_lin.nasl",
|
|
45
|
+
"common/2009/gb_novell_edir_mult_vuln_jul09_win.nasl",
|
|
46
46
|
"common/2010/freebsd/freebsd_3a7c5fc4.nasl",
|
|
47
47
|
"common/2012/freebsd/freebsd_a4a809d8.nasl",
|
|
48
48
|
"common/2015/amazon/alas-2014-455.nasl",
|
troubadix/plugins/valid_oid.py
CHANGED
|
@@ -434,8 +434,8 @@ class CheckValidOID(FileContentPlugin):
|
|
|
434
434
|
"2008/asterisk_pbx_guest_access_enabled.nasl",
|
|
435
435
|
"2008/asterisk_null_pointer_dereference.nasl",
|
|
436
436
|
"2008/goaheadwebserver_source_disclosure.nasl",
|
|
437
|
-
"2011/
|
|
438
|
-
"2011/
|
|
437
|
+
"2011/gb_ibm_lotus_domino_rpc_auth_dos_vuln.nasl",
|
|
438
|
+
"2011/gb_cubecart_mult_xss_and_sql_inj_vuln.nasl",
|
|
439
439
|
"2016/gb_adobe_air_mult_vuln_feb16_macosx.nasl",
|
|
440
440
|
"attic/gb_cybozu_garoon_mult_vuln_aug16.nasl",
|
|
441
441
|
"2017/gb_openssh_mult_vuln_jan17_lin.nasl",
|
|
@@ -23,16 +23,10 @@ from argparse import ArgumentParser, Namespace
|
|
|
23
23
|
from pathlib import Path
|
|
24
24
|
from typing import Iterable
|
|
25
25
|
|
|
26
|
+
from troubadix.argparser import file_type_existing
|
|
26
27
|
from troubadix.standalone_plugins.common import git
|
|
27
28
|
|
|
28
29
|
|
|
29
|
-
def file_type(string: str) -> Path:
|
|
30
|
-
file_path = Path(string)
|
|
31
|
-
if not file_path.is_file():
|
|
32
|
-
raise ValueError(f"{string} is not a file.")
|
|
33
|
-
return file_path
|
|
34
|
-
|
|
35
|
-
|
|
36
30
|
def parse_args(args: Iterable[str]) -> Namespace:
|
|
37
31
|
parser = ArgumentParser(
|
|
38
32
|
description="Check for changed oid",
|
|
@@ -52,7 +46,7 @@ def parse_args(args: Iterable[str]) -> Namespace:
|
|
|
52
46
|
"-f",
|
|
53
47
|
"--files",
|
|
54
48
|
nargs="+",
|
|
55
|
-
type=
|
|
49
|
+
type=file_type_existing,
|
|
56
50
|
default=[],
|
|
57
51
|
help=(
|
|
58
52
|
"List of files to diff. "
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
2
|
+
# SPDX-FileCopyrightText: 2025 Greenbone AG
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
from collections import Counter
|
|
6
|
+
|
|
7
|
+
import networkx as nx
|
|
8
|
+
|
|
9
|
+
from .models import Result, Script
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def check_duplicates(scripts: list[Script]) -> Result:
|
|
13
|
+
"""
|
|
14
|
+
checks for a script depending on a script multiple times
|
|
15
|
+
"""
|
|
16
|
+
warnings = []
|
|
17
|
+
for script in scripts:
|
|
18
|
+
counter = Counter(dep.name for dep in script.dependencies)
|
|
19
|
+
duplicates = [dep for dep, count in counter.items() if count > 1]
|
|
20
|
+
|
|
21
|
+
if duplicates:
|
|
22
|
+
msg = f"Duplicate dependencies in {script.name}: {', '.join(duplicates)}"
|
|
23
|
+
warnings.append(msg)
|
|
24
|
+
|
|
25
|
+
return Result(name="check_duplicates", warnings=warnings)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def check_missing_dependencies(
|
|
29
|
+
scripts: list[Script], graph: nx.DiGraph
|
|
30
|
+
) -> Result:
|
|
31
|
+
"""
|
|
32
|
+
Checks if any scripts that are depended on are missing from
|
|
33
|
+
the list of scripts created from the local file system,
|
|
34
|
+
logs the scripts dependending on the missing script
|
|
35
|
+
"""
|
|
36
|
+
errors = []
|
|
37
|
+
dependencies = {
|
|
38
|
+
dep.name for script in scripts for dep in script.dependencies
|
|
39
|
+
}
|
|
40
|
+
script_names = {script.name for script in scripts}
|
|
41
|
+
missing_dependencies = dependencies - script_names
|
|
42
|
+
|
|
43
|
+
for missing in missing_dependencies:
|
|
44
|
+
depending_scripts = graph.predecessors(missing)
|
|
45
|
+
msg = f"missing dependency file: {missing}:"
|
|
46
|
+
for script in depending_scripts:
|
|
47
|
+
msg += f"\n - used by: {script}"
|
|
48
|
+
errors.append(msg)
|
|
49
|
+
|
|
50
|
+
return Result(name="missing_dependencies", errors=errors)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def check_cycles(graph) -> Result:
|
|
54
|
+
"""
|
|
55
|
+
checks for cyclic dependencies
|
|
56
|
+
"""
|
|
57
|
+
if nx.is_directed_acyclic_graph(graph):
|
|
58
|
+
return Result(name="check_cycles")
|
|
59
|
+
|
|
60
|
+
cycles = nx.simple_cycles(graph)
|
|
61
|
+
|
|
62
|
+
errors = [f"cyclic dependency: {cycle}" for cycle in cycles]
|
|
63
|
+
return Result(name="check_cycles", errors=errors)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def cross_feed_dependencies(
|
|
67
|
+
graph, is_enterprise_checked: bool
|
|
68
|
+
) -> list[tuple[str, str]]:
|
|
69
|
+
"""
|
|
70
|
+
creates a list of script and dependency for scripts
|
|
71
|
+
in community feed that depend on scripts in enterprise folders
|
|
72
|
+
"""
|
|
73
|
+
cross_feed_dependencies = [
|
|
74
|
+
(u, v)
|
|
75
|
+
for u, v, is_enterprise_feed in graph.edges.data("is_enterprise_feed")
|
|
76
|
+
if graph.nodes[u]["feed"] == "community"
|
|
77
|
+
and graph.nodes[v].get("feed", "unknown") == "enterprise"
|
|
78
|
+
and is_enterprise_feed == is_enterprise_checked
|
|
79
|
+
] # unknown as standard value due to non existent nodes not having a feed value
|
|
80
|
+
return cross_feed_dependencies
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def check_cross_feed_dependencies(graph) -> Result:
|
|
84
|
+
"""
|
|
85
|
+
Checks if scripts in the community feed have dependencies to enterprise scripts,
|
|
86
|
+
and if they are correctly contained within a is_enterprise_feed check.
|
|
87
|
+
"""
|
|
88
|
+
gated_cfd = cross_feed_dependencies(graph, is_enterprise_checked=True)
|
|
89
|
+
warnings = [
|
|
90
|
+
f"cross-feed-dependency: {dependent}(community feed) "
|
|
91
|
+
f"depends on {dependency}(enterprise feed)"
|
|
92
|
+
for dependent, dependency in gated_cfd
|
|
93
|
+
]
|
|
94
|
+
|
|
95
|
+
ungated_cfd = cross_feed_dependencies(graph, is_enterprise_checked=False)
|
|
96
|
+
errors = [
|
|
97
|
+
f"unchecked cross-feed-dependency: {dependent}(community feed) "
|
|
98
|
+
f"depends on {dependency}(enterprise feed), but the current feed is not properly checked"
|
|
99
|
+
for dependent, dependency in ungated_cfd
|
|
100
|
+
]
|
|
101
|
+
|
|
102
|
+
return Result(
|
|
103
|
+
name="check_cross_feed_dependencies", warnings=warnings, errors=errors
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def check_category_order(graph) -> Result:
|
|
108
|
+
problematic_edges = [
|
|
109
|
+
(dependent, dependency)
|
|
110
|
+
for dependent, dependency in graph.edges()
|
|
111
|
+
if graph.nodes[dependent]["category"]
|
|
112
|
+
< graph.nodes[dependency].get("category", -1)
|
|
113
|
+
]
|
|
114
|
+
|
|
115
|
+
errors = [
|
|
116
|
+
f"{dependent} depends on {dependency} which has a lower category order"
|
|
117
|
+
for dependent, dependency in problematic_edges
|
|
118
|
+
]
|
|
119
|
+
return Result(name="check_category_order", errors=errors)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def check_deprecated_dependencies(graph) -> Result:
|
|
123
|
+
errors = [
|
|
124
|
+
f"{dependent} depends on deprecated script {dependency}"
|
|
125
|
+
for dependent, dependency in graph.edges()
|
|
126
|
+
if graph.nodes[dependency].get("deprecated", False)
|
|
127
|
+
]
|
|
128
|
+
return Result(name="check_deprecated_dependencies", errors=errors)
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
2
|
+
# SPDX-FileCopyrightText: 2025 Greenbone AG
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from argparse import ArgumentParser, ArgumentTypeError, Namespace
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from troubadix.argparser import directory_type_existing
|
|
10
|
+
|
|
11
|
+
from .models import Feed
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def feed_type(value: str) -> Feed:
|
|
15
|
+
try:
|
|
16
|
+
return Feed[value.upper()]
|
|
17
|
+
except KeyError:
|
|
18
|
+
raise ArgumentTypeError(f"Invalid Feed value: '{value}'")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def parse_args() -> Namespace:
|
|
22
|
+
parser = ArgumentParser(
|
|
23
|
+
description="Tool for analysing the dependencies in the NASL repository.",
|
|
24
|
+
)
|
|
25
|
+
parser.add_argument(
|
|
26
|
+
"-r",
|
|
27
|
+
"--root",
|
|
28
|
+
type=directory_type_existing,
|
|
29
|
+
help="root for nasl directory that should be linted, uses $VTDIR if no path is given",
|
|
30
|
+
)
|
|
31
|
+
parser.add_argument(
|
|
32
|
+
"-f",
|
|
33
|
+
"--feed",
|
|
34
|
+
type=feed_type,
|
|
35
|
+
choices=Feed,
|
|
36
|
+
nargs="+",
|
|
37
|
+
default=[Feed.FULL],
|
|
38
|
+
help="feed",
|
|
39
|
+
)
|
|
40
|
+
parser.add_argument(
|
|
41
|
+
"--log",
|
|
42
|
+
default="WARNING",
|
|
43
|
+
help="Set the logging level (INFO, WARNING, ERROR)",
|
|
44
|
+
)
|
|
45
|
+
parser.add_argument("-v", "--verbose", action="count", default=0)
|
|
46
|
+
|
|
47
|
+
args = parser.parse_args()
|
|
48
|
+
|
|
49
|
+
if not args.root:
|
|
50
|
+
vtdir = os.environ.get("VTDIR")
|
|
51
|
+
if not vtdir:
|
|
52
|
+
raise ValueError(
|
|
53
|
+
"The environment variable 'VTDIR' is not set,"
|
|
54
|
+
" and no root path with '--root' was provided."
|
|
55
|
+
)
|
|
56
|
+
args.root = Path(vtdir)
|
|
57
|
+
|
|
58
|
+
return args
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
2
|
+
# SPDX-FileCopyrightText: 2025 Greenbone AG
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import re
|
|
7
|
+
import sys
|
|
8
|
+
from functools import reduce
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
import networkx as nx
|
|
12
|
+
|
|
13
|
+
from troubadix.helper import CURRENT_ENCODING
|
|
14
|
+
from troubadix.helper.helper import is_enterprise_folder
|
|
15
|
+
from troubadix.helper.patterns import (
|
|
16
|
+
ScriptTag,
|
|
17
|
+
SpecialScriptTag,
|
|
18
|
+
_get_special_script_tag_pattern,
|
|
19
|
+
get_script_tag_pattern,
|
|
20
|
+
get_special_script_tag_pattern,
|
|
21
|
+
)
|
|
22
|
+
from troubadix.plugins.dependencies import split_dependencies
|
|
23
|
+
from troubadix.plugins.dependency_category_order import (
|
|
24
|
+
VTCategory,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
from .checks import (
|
|
28
|
+
check_category_order,
|
|
29
|
+
check_cross_feed_dependencies,
|
|
30
|
+
check_cycles,
|
|
31
|
+
check_deprecated_dependencies,
|
|
32
|
+
check_duplicates,
|
|
33
|
+
check_missing_dependencies,
|
|
34
|
+
)
|
|
35
|
+
from .cli import Feed, parse_args
|
|
36
|
+
from .models import Dependency, Result, Script
|
|
37
|
+
|
|
38
|
+
DEPENDENCY_PATTERN = _get_special_script_tag_pattern(
|
|
39
|
+
"dependencies", flags=re.DOTALL | re.MULTILINE
|
|
40
|
+
)
|
|
41
|
+
CATEGORY_PATTERN = get_special_script_tag_pattern(SpecialScriptTag.CATEGORY)
|
|
42
|
+
DEPRECATED_PATTERN = get_script_tag_pattern(ScriptTag.DEPRECATED)
|
|
43
|
+
ENTERPRISE_FEED_CHECK_PATTERN = re.compile(
|
|
44
|
+
r'if\s*\(FEED_NAME\s*==\s*"GSF"\s*\|\|\s*FEED_NAME\s*==\s*"GEF"\s*\|\|\s*FEED_NAME\s*==\s*"SCM"\)\s*'
|
|
45
|
+
r"(?:\{[^}]*\}\s*|[^\{;]*;)"
|
|
46
|
+
) # Matches specific if blocks used to gate code to run only for enterprise feeds
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class Reporter:
|
|
50
|
+
def __init__(self, verbosity) -> None:
|
|
51
|
+
self.verbosity = verbosity
|
|
52
|
+
|
|
53
|
+
def report(self, results: list[Result]):
|
|
54
|
+
for result in results:
|
|
55
|
+
if self.verbosity >= 2:
|
|
56
|
+
self.print_statistic(result)
|
|
57
|
+
self.print_divider()
|
|
58
|
+
if self.verbosity >= 1:
|
|
59
|
+
self.print_warnings(result)
|
|
60
|
+
self.print_errors(result)
|
|
61
|
+
if self.verbosity >= 2:
|
|
62
|
+
self.print_divider("=")
|
|
63
|
+
|
|
64
|
+
def print_divider(self, char="-", length=40):
|
|
65
|
+
print(char * length)
|
|
66
|
+
|
|
67
|
+
def print_statistic(self, result: Result):
|
|
68
|
+
print(
|
|
69
|
+
f"{result.name} - warnings: {len(result.warnings)}, errors: {len(result.errors)}"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def print_warnings(self, result: Result):
|
|
73
|
+
for warning in result.warnings:
|
|
74
|
+
print(f"warning: {warning}")
|
|
75
|
+
|
|
76
|
+
def print_errors(self, result: Result):
|
|
77
|
+
for error in result.errors:
|
|
78
|
+
print(f"error: {error}")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def get_feed(root, feeds: list[Feed]) -> list[Script]:
|
|
82
|
+
feed = reduce((lambda x, y: x | y), feeds)
|
|
83
|
+
scripts = []
|
|
84
|
+
if feed & Feed.COMMON:
|
|
85
|
+
scripts.extend(get_scripts(root / "common"))
|
|
86
|
+
if feed & Feed.FEED_21_04:
|
|
87
|
+
scripts.extend(get_scripts(root / "21.04"))
|
|
88
|
+
if feed & Feed.FEED_22_04:
|
|
89
|
+
scripts.extend(get_scripts(root / "22.04"))
|
|
90
|
+
|
|
91
|
+
return scripts
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def get_scripts(directory: Path) -> list[Script]:
|
|
95
|
+
scripts = []
|
|
96
|
+
|
|
97
|
+
for path in directory.rglob("*.nasl"):
|
|
98
|
+
try:
|
|
99
|
+
content = path.read_text(encoding=CURRENT_ENCODING)
|
|
100
|
+
except Exception as e:
|
|
101
|
+
logging.error(f"Error reading file {path}: {e}")
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
try:
|
|
105
|
+
relative_path = path.relative_to(directory) # used as identifier
|
|
106
|
+
name = str(relative_path)
|
|
107
|
+
feed = determine_feed(relative_path)
|
|
108
|
+
dependencies = extract_dependencies(content)
|
|
109
|
+
category = extract_category(content)
|
|
110
|
+
deprecated = bool(DEPRECATED_PATTERN.search(content))
|
|
111
|
+
scripts.append(
|
|
112
|
+
Script(name, feed, dependencies, category, deprecated)
|
|
113
|
+
)
|
|
114
|
+
except Exception as e:
|
|
115
|
+
logging.error(f"Error processing {path}: {e}")
|
|
116
|
+
|
|
117
|
+
return scripts
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def determine_feed(script_relative_path: Path) -> str:
|
|
121
|
+
parts = script_relative_path.parts
|
|
122
|
+
if is_enterprise_folder(parts[0]):
|
|
123
|
+
return "enterprise"
|
|
124
|
+
else:
|
|
125
|
+
return "community"
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def extract_dependencies(content: str) -> list[Dependency]:
|
|
129
|
+
dependencies = []
|
|
130
|
+
|
|
131
|
+
if_blocks = [
|
|
132
|
+
(match.start(), match.end())
|
|
133
|
+
for match in ENTERPRISE_FEED_CHECK_PATTERN.finditer(content)
|
|
134
|
+
]
|
|
135
|
+
|
|
136
|
+
for match in DEPENDENCY_PATTERN.finditer(content):
|
|
137
|
+
start, end = match.span()
|
|
138
|
+
is_enterprise_feed = any(
|
|
139
|
+
start >= block_start and end <= block_end
|
|
140
|
+
for block_start, block_end in if_blocks
|
|
141
|
+
)
|
|
142
|
+
dep_list = split_dependencies(match.group("value"))
|
|
143
|
+
dependencies.extend(
|
|
144
|
+
Dependency(dep, is_enterprise_feed) for dep in dep_list
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
return dependencies
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def extract_category(content) -> int:
|
|
151
|
+
match = CATEGORY_PATTERN.search(content)
|
|
152
|
+
category_value = match.group("value")
|
|
153
|
+
return VTCategory[category_value]
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def create_graph(scripts: list[Script]):
|
|
157
|
+
graph = nx.DiGraph()
|
|
158
|
+
|
|
159
|
+
# Add nodes and edges based on dependencies
|
|
160
|
+
for script in scripts:
|
|
161
|
+
# explicit add incase the script has no dependencies
|
|
162
|
+
graph.add_node(
|
|
163
|
+
script.name,
|
|
164
|
+
feed=script.feed,
|
|
165
|
+
category=script.category,
|
|
166
|
+
deprecated=script.deprecated,
|
|
167
|
+
)
|
|
168
|
+
for dependency in script.dependencies:
|
|
169
|
+
graph.add_edge(
|
|
170
|
+
script.name,
|
|
171
|
+
dependency.name,
|
|
172
|
+
is_enterprise_feed=dependency.is_enterprise_feed,
|
|
173
|
+
)
|
|
174
|
+
return graph
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def main():
|
|
178
|
+
args = parse_args()
|
|
179
|
+
|
|
180
|
+
logging.basicConfig(
|
|
181
|
+
level=args.log.upper(), format="%(levelname)s: %(message)s"
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
logging.info("starting troubadix dependency analysis")
|
|
185
|
+
|
|
186
|
+
scripts = get_feed(args.root, args.feed)
|
|
187
|
+
graph = create_graph(scripts)
|
|
188
|
+
|
|
189
|
+
logging.info(f"nodes (scripts) in graph: {graph.number_of_nodes()}")
|
|
190
|
+
logging.info(f"edges (dependencies) in graph: {graph.number_of_edges()}")
|
|
191
|
+
|
|
192
|
+
results = [
|
|
193
|
+
check_duplicates(scripts),
|
|
194
|
+
check_missing_dependencies(scripts, graph),
|
|
195
|
+
check_cycles(graph),
|
|
196
|
+
check_cross_feed_dependencies(graph),
|
|
197
|
+
check_category_order(graph),
|
|
198
|
+
check_deprecated_dependencies(graph),
|
|
199
|
+
]
|
|
200
|
+
reporter = Reporter(args.verbose)
|
|
201
|
+
reporter.report(results)
|
|
202
|
+
|
|
203
|
+
if any(result.errors for result in results):
|
|
204
|
+
return 1
|
|
205
|
+
elif any(result.warnings for result in results):
|
|
206
|
+
return 2
|
|
207
|
+
else:
|
|
208
|
+
return 0
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
if __name__ == "__main__":
|
|
212
|
+
sys.exit(main())
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
2
|
+
# SPDX-FileCopyrightText: 2025 Greenbone AG
|
|
3
|
+
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from enum import Flag, auto
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Feed(Flag):
|
|
9
|
+
COMMON = auto()
|
|
10
|
+
FEED_21_04 = auto()
|
|
11
|
+
FEED_22_04 = auto()
|
|
12
|
+
FULL = COMMON | FEED_21_04 | FEED_22_04
|
|
13
|
+
|
|
14
|
+
def __str__(self):
|
|
15
|
+
# Make enum values user-friendly for argparse help
|
|
16
|
+
return self.name.lower()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class Dependency:
|
|
21
|
+
name: str
|
|
22
|
+
# Indicates whether the dependency will only run if an enterprise feed is used.
|
|
23
|
+
# Controlled by a specific if check. Does not indicate the script's feed.
|
|
24
|
+
is_enterprise_feed: bool
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class Script:
|
|
29
|
+
name: str
|
|
30
|
+
feed: str
|
|
31
|
+
dependencies: list[Dependency]
|
|
32
|
+
category: int
|
|
33
|
+
deprecated: bool
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class Result:
|
|
38
|
+
name: str
|
|
39
|
+
warnings: list[str] = field(default_factory=list)
|
|
40
|
+
errors: list[str] = field(default_factory=list)
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
# SPDX-FileCopyrightText: 2024 Greenbone AG
|
|
3
3
|
|
|
4
4
|
import re
|
|
5
|
-
from argparse import ArgumentParser,
|
|
5
|
+
from argparse import ArgumentParser, Namespace
|
|
6
6
|
from dataclasses import dataclass
|
|
7
7
|
from enum import Enum
|
|
8
8
|
from pathlib import Path
|
|
@@ -10,7 +10,7 @@ from typing import Iterable, Optional
|
|
|
10
10
|
|
|
11
11
|
from pontos.terminal.terminal import ConsoleTerminal
|
|
12
12
|
|
|
13
|
-
from troubadix.argparser import directory_type, file_type
|
|
13
|
+
from troubadix.argparser import directory_type, file_type, file_type_existing
|
|
14
14
|
from troubadix.helper.patterns import (
|
|
15
15
|
ScriptTag,
|
|
16
16
|
SpecialScriptTag,
|
|
@@ -37,15 +37,6 @@ class DeprecatedFile:
|
|
|
37
37
|
KB_ITEMS_PATTERN = re.compile(r"set_kb_item\(.+\);")
|
|
38
38
|
|
|
39
39
|
|
|
40
|
-
def existing_file_type(string: str) -> Path:
|
|
41
|
-
file_path = Path(string)
|
|
42
|
-
if not file_path.exists():
|
|
43
|
-
raise ArgumentTypeError(f'File "{string}" does not exist.')
|
|
44
|
-
if not file_path.is_file():
|
|
45
|
-
raise ArgumentTypeError(f'"{string}" is not a file.')
|
|
46
|
-
return file_path
|
|
47
|
-
|
|
48
|
-
|
|
49
40
|
def update_summary(file: DeprecatedFile, deprecation_reason: str) -> str:
|
|
50
41
|
"""Update the summary of the nasl script by adding the information
|
|
51
42
|
that the script has been deprecated, and if possible, the oid of
|
|
@@ -232,7 +223,7 @@ def parse_args(args: Iterable[str] = None) -> Namespace:
|
|
|
232
223
|
"--from-file",
|
|
233
224
|
metavar="<from_file>",
|
|
234
225
|
default=None,
|
|
235
|
-
type=
|
|
226
|
+
type=file_type_existing,
|
|
236
227
|
help=(
|
|
237
228
|
"Path to a single file that contains a list of files "
|
|
238
229
|
"to be deprecated, separated by new lines."
|
|
@@ -7,19 +7,7 @@ from argparse import ArgumentParser, Namespace
|
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from typing import List
|
|
9
9
|
|
|
10
|
-
|
|
11
|
-
def directory_type(string: str) -> Path:
|
|
12
|
-
directory_path = Path(string)
|
|
13
|
-
if not directory_path.is_dir():
|
|
14
|
-
raise ValueError(f"{string} is not a directory.")
|
|
15
|
-
return directory_path
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def file_type(string: str) -> Path:
|
|
19
|
-
file_path = Path(string)
|
|
20
|
-
if not file_path.is_file():
|
|
21
|
-
raise ValueError(f"{string} is not a file.")
|
|
22
|
-
return file_path
|
|
10
|
+
from troubadix.argparser import directory_type_existing, file_type_existing
|
|
23
11
|
|
|
24
12
|
|
|
25
13
|
def parse_args() -> Namespace:
|
|
@@ -28,11 +16,11 @@ def parse_args() -> Namespace:
|
|
|
28
16
|
)
|
|
29
17
|
parser.add_argument(
|
|
30
18
|
"dir",
|
|
31
|
-
type=
|
|
19
|
+
type=directory_type_existing,
|
|
32
20
|
help="directory that should be linted",
|
|
33
21
|
)
|
|
34
22
|
parser.add_argument(
|
|
35
|
-
"--ignore-file", type=
|
|
23
|
+
"--ignore-file", type=file_type_existing, help="path to ignore file"
|
|
36
24
|
)
|
|
37
25
|
parser.add_argument(
|
|
38
26
|
"--gen-ignore-entries",
|
|
@@ -20,13 +20,14 @@
|
|
|
20
20
|
import datetime
|
|
21
21
|
import re
|
|
22
22
|
import sys
|
|
23
|
-
from argparse import ArgumentParser,
|
|
23
|
+
from argparse import ArgumentParser, Namespace
|
|
24
24
|
from pathlib import Path
|
|
25
25
|
from typing import Iterable, Sequence
|
|
26
26
|
|
|
27
27
|
from pontos.terminal import Terminal
|
|
28
28
|
from pontos.terminal.terminal import ConsoleTerminal
|
|
29
29
|
|
|
30
|
+
from troubadix.argparser import file_type_existing
|
|
30
31
|
from troubadix.helper import CURRENT_ENCODING
|
|
31
32
|
from troubadix.helper.patterns import (
|
|
32
33
|
LAST_MODIFICATION_ANY_VALUE_PATTERN,
|
|
@@ -35,15 +36,6 @@ from troubadix.helper.patterns import (
|
|
|
35
36
|
from troubadix.troubadix import from_file
|
|
36
37
|
|
|
37
38
|
|
|
38
|
-
def existing_file_type(string: str) -> Path:
|
|
39
|
-
file_path = Path(string)
|
|
40
|
-
if not file_path.exists():
|
|
41
|
-
raise ArgumentTypeError(f'File "{string}" does not exist.')
|
|
42
|
-
if not file_path.is_file():
|
|
43
|
-
raise ArgumentTypeError(f'"{string}" is not a file.')
|
|
44
|
-
return file_path
|
|
45
|
-
|
|
46
|
-
|
|
47
39
|
def update(nasl_file: Path, terminal: Terminal):
|
|
48
40
|
file_content = nasl_file.read_text(encoding=CURRENT_ENCODING)
|
|
49
41
|
|
|
@@ -104,12 +96,12 @@ def parse_args(args: Sequence[str] = None) -> Namespace:
|
|
|
104
96
|
what_group.add_argument(
|
|
105
97
|
"--files",
|
|
106
98
|
nargs="+",
|
|
107
|
-
type=
|
|
99
|
+
type=file_type_existing,
|
|
108
100
|
help="List of files that should be updated",
|
|
109
101
|
)
|
|
110
102
|
what_group.add_argument(
|
|
111
103
|
"--from-file",
|
|
112
|
-
type=
|
|
104
|
+
type=file_type_existing,
|
|
113
105
|
help=(
|
|
114
106
|
"Pass a file that contains a List of files "
|
|
115
107
|
"containing paths to files, that should be "
|
|
@@ -25,6 +25,7 @@ from typing import Iterable, Optional, Tuple
|
|
|
25
25
|
|
|
26
26
|
from pontos.terminal.terminal import ConsoleTerminal
|
|
27
27
|
|
|
28
|
+
from troubadix.argparser import directory_type_existing
|
|
28
29
|
from troubadix.helper import CURRENT_ENCODING
|
|
29
30
|
from troubadix.helper.patterns import (
|
|
30
31
|
ScriptTag,
|
|
@@ -49,13 +50,6 @@ CREATION_DATE_FORMAT = "%Y-%m-%d"
|
|
|
49
50
|
MONTH_AS_DAYS = 365 / 12
|
|
50
51
|
|
|
51
52
|
|
|
52
|
-
def directory_type(string: str) -> Path:
|
|
53
|
-
file_path = Path(string)
|
|
54
|
-
if not file_path.is_dir():
|
|
55
|
-
raise ValueError(f"{string} is not a directory.")
|
|
56
|
-
return file_path
|
|
57
|
-
|
|
58
|
-
|
|
59
53
|
def parse_solution_date(date_string: str) -> datetime:
|
|
60
54
|
"""Convert date string to date trying different formats"""
|
|
61
55
|
|
|
@@ -83,7 +77,7 @@ def parse_args() -> Namespace:
|
|
|
83
77
|
"-d",
|
|
84
78
|
"--directory",
|
|
85
79
|
dest="directory",
|
|
86
|
-
type=
|
|
80
|
+
type=directory_type_existing,
|
|
87
81
|
help="Specify the directory to scan for nasl scripts",
|
|
88
82
|
)
|
|
89
83
|
|
|
@@ -23,6 +23,7 @@ from argparse import ArgumentParser, Namespace
|
|
|
23
23
|
from pathlib import Path
|
|
24
24
|
from typing import Iterable, List
|
|
25
25
|
|
|
26
|
+
from troubadix.argparser import file_type_existing
|
|
26
27
|
from troubadix.helper import is_ignore_file
|
|
27
28
|
from troubadix.helper.patterns import (
|
|
28
29
|
LAST_MODIFICATION_ANY_VALUE_PATTERN,
|
|
@@ -46,13 +47,6 @@ _IGNORE_FILES = [
|
|
|
46
47
|
]
|
|
47
48
|
|
|
48
49
|
|
|
49
|
-
def file_type(string: str) -> Path:
|
|
50
|
-
file_path = Path(string)
|
|
51
|
-
if not file_path.is_file():
|
|
52
|
-
raise ValueError(f"{string} is not a file.")
|
|
53
|
-
return file_path
|
|
54
|
-
|
|
55
|
-
|
|
56
50
|
def parse_args(args: Iterable[str]) -> Namespace:
|
|
57
51
|
parser = ArgumentParser(
|
|
58
52
|
description="Check for changed files that did not alter "
|
|
@@ -73,7 +67,7 @@ def parse_args(args: Iterable[str]) -> Namespace:
|
|
|
73
67
|
"-f",
|
|
74
68
|
"--files",
|
|
75
69
|
nargs="+",
|
|
76
|
-
type=
|
|
70
|
+
type=file_type_existing,
|
|
77
71
|
default=[],
|
|
78
72
|
help=(
|
|
79
73
|
"List of files to diff. "
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: troubadix
|
|
3
|
-
Version: 25.
|
|
3
|
+
Version: 25.3.1
|
|
4
4
|
Summary: A linting and QA check tool for NASL files
|
|
5
5
|
License: GPL-3.0-or-later
|
|
6
6
|
Author: Greenbone
|
|
@@ -21,6 +21,7 @@ Requires-Dist: chardet (>=4,<6)
|
|
|
21
21
|
Requires-Dist: charset-normalizer (>=3.2.0,<4.0.0)
|
|
22
22
|
Requires-Dist: codespell (==2.4.1)
|
|
23
23
|
Requires-Dist: gitpython (>=3.1.31,<4.0.0)
|
|
24
|
+
Requires-Dist: networkx (>=3.4.2,<4.0.0)
|
|
24
25
|
Requires-Dist: pontos (>=22.7,<26.0)
|
|
25
26
|
Requires-Dist: python-magic (>=0.4.25,<0.5.0)
|
|
26
27
|
Requires-Dist: validators (>=0.34.0,<0.35.0)
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
troubadix/__init__.py,sha256=K7sIXXDrC7YRb7BvIpdQ6ZfG_QkT0qUH_wAlHROVRfM,716
|
|
2
|
-
troubadix/__version__.py,sha256=
|
|
3
|
-
troubadix/argparser.py,sha256
|
|
2
|
+
troubadix/__version__.py,sha256=6NCjh_0ChAOoHShs27iMDdEnzibZHYvZIc4km_AUJTA,103
|
|
3
|
+
troubadix/argparser.py,sha256=-H07Jhqh68_M4Mbjq9qJjTr3MShy_N2pxl2qHA6cfRU,7481
|
|
4
4
|
troubadix/codespell/codespell.additions,sha256=NAYnQF79kdk4YhU_h8fpjAVVkqBm778aPHPPP7FEkZY,504
|
|
5
|
-
troubadix/codespell/codespell.exclude,sha256=
|
|
6
|
-
troubadix/codespell/codespell.ignore,sha256=
|
|
5
|
+
troubadix/codespell/codespell.exclude,sha256=RS0PH7Px2NRg40UpreuXfnInaC4veaeeDxP1FNac4ms,147431
|
|
6
|
+
troubadix/codespell/codespell.ignore,sha256=2CP8u6O2VENcDpt2FfEDNmfa1Eh3D80yeYHT54GM1X4,1512
|
|
7
7
|
troubadix/helper/__init__.py,sha256=tp2fPLzwGEA_2eiJbvuePiY6rjYSFxx7VUsCV4fSwvw,1110
|
|
8
8
|
troubadix/helper/helper.py,sha256=GXapYLii2rLKwkX2ok31YoAdUSizBnyPjWz-aPP6HM8,3105
|
|
9
9
|
troubadix/helper/linguistic_exception_handler.py,sha256=Bq7ULjDdWTKUpFNTUX6XMPdD4s4v8eIjZPyqBe8VLws,6811
|
|
@@ -13,10 +13,10 @@ troubadix/plugins/__init__.py,sha256=V5fHMg2qVWIYKVZJqHKpzgrQ5x87Pz5u-h-CxOx7Dls
|
|
|
13
13
|
troubadix/plugins/badwords.py,sha256=k1A1d2pdXzie87FGGXrykP2BgdZbY5QtmQItupHtNyw,4701
|
|
14
14
|
troubadix/plugins/copyright_text.py,sha256=jYsLWmTbT_A78XQQxQFK-5kMMHkh3xdvlh7mEF2dZGU,3583
|
|
15
15
|
troubadix/plugins/copyright_year.py,sha256=XzM9MHVzOXwNLwHpfuaWj8PUOmswr56SBVOLBdvxjd4,5478
|
|
16
|
-
troubadix/plugins/creation_date.py,sha256=
|
|
16
|
+
troubadix/plugins/creation_date.py,sha256=TyYnbCdmuBuPYNE79Y5hnoQUr5BIOnvJwpHovu8xE98,3793
|
|
17
17
|
troubadix/plugins/cve_format.py,sha256=Ue6b9RzuQZWOdBd6y5ruqdEq2zyRb2_FSUQScnjHOUQ,3400
|
|
18
18
|
troubadix/plugins/cvss_format.py,sha256=GrZfZxkxSh9OVixmBBnHQ4NzF9nN2tCK1vepz_-U60g,2309
|
|
19
|
-
troubadix/plugins/dependencies.py,sha256=
|
|
19
|
+
troubadix/plugins/dependencies.py,sha256=zLCioIiHlBYO6ViO3vXOIQboESyHaIQ9y6pSuoYP9aU,4118
|
|
20
20
|
troubadix/plugins/dependency_category_order.py,sha256=CrEWJHDeuD4zdSnKghsjf_D5EICQDN2Dv72OsVrYgnU,7010
|
|
21
21
|
troubadix/plugins/deprecated_dependency.py,sha256=TFTe9fO4A0S3JTXpxdHaVnQ9ZSHR3lLQjggS_FjGY-s,3831
|
|
22
22
|
troubadix/plugins/deprecated_functions.py,sha256=6e46woXd-3hUBnnuIXLlaqlcP6F7c99Y0ZGeXfsbQYc,2568
|
|
@@ -57,14 +57,14 @@ troubadix/plugins/security_messages.py,sha256=EPu3-YB0iP5_hfbQjrfvvTrQRiPgDjC85G
|
|
|
57
57
|
troubadix/plugins/set_get_kb_calls.py,sha256=WGu1CKLjn3VhbDo33IJ4TtWQ-kz9gInkJskTqOSMM6k,3415
|
|
58
58
|
troubadix/plugins/solution_text.py,sha256=4Gs84Qsyg-1iTDP7Y7o8Bo5AH4hKlwGPW0NfwMpx2fU,5852
|
|
59
59
|
troubadix/plugins/solution_type.py,sha256=6wq7lj0bCL6tTaZ-d_aGKq6a_jVlCD73GltHJsJhmm8,2602
|
|
60
|
-
troubadix/plugins/spaces_before_dots.py,sha256=
|
|
60
|
+
troubadix/plugins/spaces_before_dots.py,sha256=8metqLHLN3y259CzZckrglatwA0Uwn4mvV_bkFZZbT4,4844
|
|
61
61
|
troubadix/plugins/spaces_in_filename.py,sha256=v8OqzzZSeI4_iXATHYzkf-HoAMxc_xb2Nj0HPAVevpk,611
|
|
62
62
|
troubadix/plugins/spelling.py,sha256=3AW5sNtLL67OthKLaCH_y2HCVJ5YH_eyF9xjTJMIDG4,9593
|
|
63
63
|
troubadix/plugins/tabs.py,sha256=7zXaTZe4cZoZvrLyqntVfTeNN_W3D8dfQl67QevXxtc,1319
|
|
64
64
|
troubadix/plugins/todo_tbd.py,sha256=MN5fFwBhPmt3JDQ2Hx20B8yUy1vz7LIZC3rDIOzfW9M,1758
|
|
65
65
|
troubadix/plugins/trailing_spaces_tabs.py,sha256=nMly8ZsmGprxHvwCDclKBDRB0eq6JEkjERYKvtStkY4,1873
|
|
66
66
|
troubadix/plugins/using_display.py,sha256=Hd-eysbXlkQb4M-ywzSd784k3aBSiG_sO6Ou0JdbyJA,4046
|
|
67
|
-
troubadix/plugins/valid_oid.py,sha256=
|
|
67
|
+
troubadix/plugins/valid_oid.py,sha256=LteBZOW35Ml8awjVShaPrmPwpsIkCX7GfiwWKLk09Ts,17929
|
|
68
68
|
troubadix/plugins/valid_script_tag_names.py,sha256=6uMJsBdV-Zx-k1F2_MWmQPHXNo1u0ifuosbftbg-27E,3447
|
|
69
69
|
troubadix/plugins/variable_assigned_in_if.py,sha256=NNz8iuzyQ4rSM6My4WYC1s5TABQqgs7us15PkDA-VV0,3285
|
|
70
70
|
troubadix/plugins/variable_redefinition_in_foreach.py,sha256=SfaA70TkpD9dsvNbhwJEA3eLAHWvj4YwksN-qeBMowg,2470
|
|
@@ -76,7 +76,7 @@ troubadix/runner.py,sha256=RHyZe7YMBzJpCGBVfBUg2BMkVi13CmI9u0DVG2z4518,5195
|
|
|
76
76
|
troubadix/standalone_plugins/__init__.py,sha256=kUR5RAFc7HCeiqdlX36dZOHkUI5wI6V_43RpEcD8b-0,22
|
|
77
77
|
troubadix/standalone_plugins/allowed_rev_diff.py,sha256=5Zc8xTZlkMOPVNRdxNkosFFtwaQ6J8RiJYzaYXEuN40,4145
|
|
78
78
|
troubadix/standalone_plugins/changed_cves.py,sha256=nEWwDa33QXekvpwcmnGMrdPHrJISz9p9j6lX09teDlk,2921
|
|
79
|
-
troubadix/standalone_plugins/changed_oid.py,sha256=
|
|
79
|
+
troubadix/standalone_plugins/changed_oid.py,sha256=9eLvuBuPgZYnHHst-Y-J0TgCcgwmlOWjszWJ57kG9cg,3934
|
|
80
80
|
troubadix/standalone_plugins/changed_packages/changed_packages.py,sha256=tyNwpJgaZS2o0X6xywXAQ_i7LB9HsEQYbDZ3Tcvtsdo,5742
|
|
81
81
|
troubadix/standalone_plugins/changed_packages/marker/__init__.py,sha256=Le59j2KcaXez1MIPjZ8GDJmSuLGkOVI3k2-BWO30Bc0,993
|
|
82
82
|
troubadix/standalone_plugins/changed_packages/marker/added_epoch.py,sha256=PfnG5B1v8SwOJw3ez-eb794PT7k-O4hJKDHMSd9lwNo,1797
|
|
@@ -87,14 +87,19 @@ troubadix/standalone_plugins/changed_packages/marker/dropped_architecture.py,sha
|
|
|
87
87
|
troubadix/standalone_plugins/changed_packages/marker/marker.py,sha256=7uZXR2Ds_8soB_2wugCkOSz_3hoX03KMh2NAW0G5Dzg,1278
|
|
88
88
|
troubadix/standalone_plugins/changed_packages/package.py,sha256=Pcr2tcwiPTzD3jB0iteqA7-TajL-dl5Onh1dvC_H9xk,2743
|
|
89
89
|
troubadix/standalone_plugins/common.py,sha256=PkScV-lisNY4WyrzwjV3dK1DF26hJv5JXTcREblJ0v0,1028
|
|
90
|
-
troubadix/standalone_plugins/
|
|
91
|
-
troubadix/standalone_plugins/
|
|
92
|
-
troubadix/standalone_plugins/
|
|
93
|
-
troubadix/standalone_plugins/
|
|
94
|
-
troubadix/standalone_plugins/
|
|
90
|
+
troubadix/standalone_plugins/dependency_graph/__init__.py,sha256=SQSaQXWmpq5-5ozpqMgvnvoYTK8oj64A5kie1m_5bWQ,88
|
|
91
|
+
troubadix/standalone_plugins/dependency_graph/checks.py,sha256=nrpY2cyQlRb-A6gbVq-1bQEYlanNdOJwfX4liwjiN6Q,4249
|
|
92
|
+
troubadix/standalone_plugins/dependency_graph/cli.py,sha256=osTVMWknVo_jWmlaYtRM-qcE88c_YVcT1my-jnFdxiM,1510
|
|
93
|
+
troubadix/standalone_plugins/dependency_graph/dependency_graph.py,sha256=TB54FTBFlt2iNDqLrlYj9AO04ntS7DN44ASMwfbJWy8,6257
|
|
94
|
+
troubadix/standalone_plugins/dependency_graph/models.py,sha256=bEvj71inolH-_NLv1fEHMYF8tH9ck-T0jIlThR91uPM,918
|
|
95
|
+
troubadix/standalone_plugins/deprecate_vts.py,sha256=mLt2DV9Y1YAEuh6c4nFweZYIOprsBzO7115dihEn4lA,7602
|
|
96
|
+
troubadix/standalone_plugins/file_extensions.py,sha256=fqswrhCcQqygIszcnobS9hFQmSpv3gDkvlufoaTckBg,2355
|
|
97
|
+
troubadix/standalone_plugins/last_modification.py,sha256=ROzwVzzYilXJ0llVt4Lv0w8b9BJKoahl6YxPDiub614,4338
|
|
98
|
+
troubadix/standalone_plugins/no_solution.py,sha256=p_-az9Igl4GH6HnhLLYbYlWIiEP64OTQLpX-z3JAshs,8760
|
|
99
|
+
troubadix/standalone_plugins/version_updated.py,sha256=6YHF0OjL5NWszQdsSh7XzlSji1e6Uaqwu_Y6m3R0mvI,4203
|
|
95
100
|
troubadix/troubadix.py,sha256=5__Jz3bYSrya4aG6RCBWxqnsDepXfwXZ3v0bjCzEFi0,6039
|
|
96
|
-
troubadix-25.
|
|
97
|
-
troubadix-25.
|
|
98
|
-
troubadix-25.
|
|
99
|
-
troubadix-25.
|
|
100
|
-
troubadix-25.
|
|
101
|
+
troubadix-25.3.1.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
102
|
+
troubadix-25.3.1.dist-info/METADATA,sha256=-j187jjk4eNelTKbSS-JY83GusMFSSko5FZavym8RUM,4462
|
|
103
|
+
troubadix-25.3.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
|
|
104
|
+
troubadix-25.3.1.dist-info/entry_points.txt,sha256=SnhEUe4W76P-ADmO9J355gRztTyHU_PTxRewKy3-e5o,832
|
|
105
|
+
troubadix-25.3.1.dist-info/RECORD,,
|
|
@@ -4,6 +4,7 @@ troubadix-allowed-rev-diff=troubadix.standalone_plugins.allowed_rev_diff:main
|
|
|
4
4
|
troubadix-changed-cves=troubadix.standalone_plugins.changed_cves:main
|
|
5
5
|
troubadix-changed-oid=troubadix.standalone_plugins.changed_oid:main
|
|
6
6
|
troubadix-changed-packages=troubadix.standalone_plugins.changed_packages.changed_packages:main
|
|
7
|
+
troubadix-dependency-graph=troubadix.standalone_plugins.dependency_graph.dependency_graph:main
|
|
7
8
|
troubadix-deprecate-vts=troubadix.standalone_plugins.deprecate_vts:main
|
|
8
9
|
troubadix-file-extensions=troubadix.standalone_plugins.file_extensions:main
|
|
9
10
|
troubadix-last-modification=troubadix.standalone_plugins.last_modification:main
|
|
File without changes
|
|
File without changes
|