mail-ops-scripts 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mail_ops_scripts-2.2.0.dist-info/METADATA +147 -0
- mail_ops_scripts-2.2.0.dist-info/RECORD +15 -0
- mail_ops_scripts-2.2.0.dist-info/WHEEL +5 -0
- mail_ops_scripts-2.2.0.dist-info/entry_points.txt +2 -0
- mail_ops_scripts-2.2.0.dist-info/licenses/LICENSE +21 -0
- mail_ops_scripts-2.2.0.dist-info/top_level.txt +1 -0
- mailops/__init__.py +0 -0
- mailops/__main__.py +12 -0
- mailops/blacklist_monitor.py +97 -0
- mailops/cli.py +95 -0
- mailops/dkim_gen.py +67 -0
- mailops/dmarc_parser.py +189 -0
- mailops/imap_fetcher.py +190 -0
- mailops/spf_check.py +108 -0
- mailops/ui.py +45 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mail-ops-scripts
|
|
3
|
+
Version: 2.2.0
|
|
4
|
+
Summary: A unified operational toolkit for email server administration and DMARC analysis.
|
|
5
|
+
Author-email: Beau Bremer <beau.bremer@tutamial.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/KnowOneActual/mail-ops-scripts
|
|
8
|
+
Project-URL: Repository, https://github.com/KnowOneActual/mail-ops-scripts
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Requires-Python: >=3.8
|
|
12
|
+
Description-Content-Type: text/markdown
|
|
13
|
+
License-File: LICENSE
|
|
14
|
+
Provides-Extra: dev
|
|
15
|
+
Requires-Dist: pytest>=7.0; extra == "dev"
|
|
16
|
+
Requires-Dist: black>=23.0; extra == "dev"
|
|
17
|
+
Requires-Dist: isort>=5.12; extra == "dev"
|
|
18
|
+
Requires-Dist: mypy>=1.5; extra == "dev"
|
|
19
|
+
Requires-Dist: build; extra == "dev"
|
|
20
|
+
Requires-Dist: twine; extra == "dev"
|
|
21
|
+
Dynamic: license-file
|
|
22
|
+
|
|
23
|
+
<div align="center">
|
|
24
|
+
<img src="assets/img/mail-ops-scripts.webp" alt="mail ops scripts project logo" width="200">
|
|
25
|
+
|
|
26
|
+
# Mail Ops Scripts
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
[](https://github.com/KnowOneActual/mail-ops-scripts/actions/workflows/ci.yml)
|
|
30
|
+
[](https://github.com/psf/black)
|
|
31
|
+
[](https://pycqa.github.io/isort/)
|
|
32
|
+
[](https://opensource.org/licenses/MIT)
|
|
33
|
+
[](https://www.python.org/downloads/)
|
|
34
|
+
|
|
35
|
+
</div>
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
A unified operational toolkit for email server administration, security analysis, and reporting.
|
|
39
|
+
|
|
40
|
+
## 🚀 COMMANDS STATUS
|
|
41
|
+
|
|
42
|
+
| Command | Status | Tech |
|
|
43
|
+
|---------|--------|------|
|
|
44
|
+
| `mailops dkim example.com` | ✅ **FULLY LIVE** | OpenSSL key generation |
|
|
45
|
+
| `mailops spf google.com` | ✅ **FULLY LIVE** | Google DNS-over-HTTPS |
|
|
46
|
+
| `mailops report --alerts` | ✅ **FILE READY** | DMARC XML parsing |
|
|
47
|
+
| `mailops fetch --user...` | ✅ **CREDS READY** | IMAPlib + Gmail/Outlook |
|
|
48
|
+
|
|
49
|
+
✅ LIVE = Real code executing (DKIM keys generated, SPF DNS lookups, XML parsing)
|
|
50
|
+
⏳ TODO = Structure ready but needs real implementation
|
|
51
|
+
❌ BROKEN = Import errors or crashes
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
## 🎯 PRODUCTION WORKFLOW
|
|
55
|
+
|
|
56
|
+
```
|
|
57
|
+
📥 1. Fetch reports → mailops fetch --user you@gmail.com --password app-pass --days 7
|
|
58
|
+
📊 2. Analyze + alerts → mailops report --alerts
|
|
59
|
+
🔍 3. SPF validation → mailops spf yourdomain.com
|
|
60
|
+
🔑 4. DKIM key generation → mailops dkim yourdomain.com --selector=mail
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## 💾 Quick Start
|
|
64
|
+
|
|
65
|
+
```
|
|
66
|
+
# Clone + setup
|
|
67
|
+
git clone https://github.com/knowoneactual/mail-ops-scripts
|
|
68
|
+
cd mail-ops-scripts
|
|
69
|
+
|
|
70
|
+
# Virtual environment
|
|
71
|
+
python -m venv .venv
|
|
72
|
+
source .venv/bin/activate # Linux/Mac
|
|
73
|
+
# .venv/bin/Activate.ps1 # Windows PowerShell
|
|
74
|
+
|
|
75
|
+
# Install
|
|
76
|
+
pip install -e .
|
|
77
|
+
|
|
78
|
+
# Test
|
|
79
|
+
mailops --help
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## 📋 Commands Reference
|
|
83
|
+
|
|
84
|
+
```
|
|
85
|
+
# DKIM Key Generation
|
|
86
|
+
mailops dkim example.com # default selector
|
|
87
|
+
mailops dkim example.com --selector=mail # custom selector
|
|
88
|
+
|
|
89
|
+
# SPF Checking
|
|
90
|
+
mailops spf google.com
|
|
91
|
+
mailops spf yourdomain.com
|
|
92
|
+
|
|
93
|
+
# DMARC Reports
|
|
94
|
+
mailops report # All XML files
|
|
95
|
+
mailops report --alerts # Failures only
|
|
96
|
+
mailops report --csv output.csv # Export CSV
|
|
97
|
+
|
|
98
|
+
# IMAP Fetching
|
|
99
|
+
mailops fetch --user you@gmail.com --password app-password --days 7
|
|
100
|
+
mailops fetch --user user@domain.com --server imap.domain.com --days 30
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## 🎉 Features
|
|
104
|
+
|
|
105
|
+
- ✅ **Real OpenSSL DKIM generation** → `selector.private` files
|
|
106
|
+
- ✅ **Google DNS-over-HTTPS SPF** → Production DNS lookups
|
|
107
|
+
- ✅ **DMARC XML parsing** → Console + CSV output
|
|
108
|
+
- ✅ **IMAP report fetching** → Gmail/Outlook/Exchange ready
|
|
109
|
+
- ✅ **Global CLI install** → `~/.local/bin/mailops`
|
|
110
|
+
- ✅ **VS Code workflow** → Python/HTML/Bash integration
|
|
111
|
+
- ✅ **Production ready** → Error handling + help text
|
|
112
|
+
|
|
113
|
+
## 🛠 Development
|
|
114
|
+
|
|
115
|
+
```
|
|
116
|
+
# Dev dependencies
|
|
117
|
+
pip install -e '.[dev]'
|
|
118
|
+
|
|
119
|
+
# Code quality
|
|
120
|
+
black .
|
|
121
|
+
isort .
|
|
122
|
+
mypy .
|
|
123
|
+
pytest
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
## 📦 Build & Publish
|
|
127
|
+
|
|
128
|
+
```
|
|
129
|
+
pip install build twine
|
|
130
|
+
python -m build
|
|
131
|
+
twine upload dist/*
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
## 📖 Changelog
|
|
135
|
+
[CHANGELOG.md](CHANGELOG.md)
|
|
136
|
+
|
|
137
|
+
## 🤝 Contributing
|
|
138
|
+
[CONTRIBUTING.md](CONTRIBUTING.md)
|
|
139
|
+
|
|
140
|
+
## 📄 License
|
|
141
|
+
[MIT](LICENSE)
|
|
142
|
+
|
|
143
|
+
---
|
|
144
|
+
**Made with ❤️ for email operations**
|
|
145
|
+
[knowoneactual/mail-ops-scripts](https://github.com/knowoneactual/mail-ops-scripts)
|
|
146
|
+
|
|
147
|
+
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
mail_ops_scripts-2.2.0.dist-info/licenses/LICENSE,sha256=Q-LO70O6FUf1h3JjATf0DjyzuuAFuFAAU4gOF6Zv0Yw,1068
|
|
2
|
+
mailops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
mailops/__main__.py,sha256=8hxkD7SyvJIPDbJWYj_f5pA0N8WLjl6aiU0GLEmCnHU,242
|
|
4
|
+
mailops/blacklist_monitor.py,sha256=zOmzgiSicDPkRNt_WmoW2jbrnbpptxanbayq-OnsbQg,2814
|
|
5
|
+
mailops/cli.py,sha256=jHqXIg_6OHrA78U1Xuj0mJ_B_T8Feqpn_ftLnf5-kak,3291
|
|
6
|
+
mailops/dkim_gen.py,sha256=8n6M4DIQMBo9T8-BTSMHAe_fDkzR-1CX942S5vGOO2E,1821
|
|
7
|
+
mailops/dmarc_parser.py,sha256=PkdJw7EzyyezAv9aZgK9V69Mp26yBA1PSh6gJngMXtI,5387
|
|
8
|
+
mailops/imap_fetcher.py,sha256=SnBUvUy7IQF5d_mlkHq7jULTDTUYnfx1EVpDck2wZ3I,6366
|
|
9
|
+
mailops/spf_check.py,sha256=yp54JIDXI6DoqnRxErqDulvKq-6_Jpp0q34s-eeZvDg,3334
|
|
10
|
+
mailops/ui.py,sha256=y4HEUS62tMg4S1sPGcHSgvGyoGbSqW-P4kEgnbx69h4,1093
|
|
11
|
+
mail_ops_scripts-2.2.0.dist-info/METADATA,sha256=jnIciF1p6JV49dVXgVWd_yQpLPsvfqU7tmf_hL5QP40,4433
|
|
12
|
+
mail_ops_scripts-2.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
13
|
+
mail_ops_scripts-2.2.0.dist-info/entry_points.txt,sha256=jthzvlg7kxSDzelTo5SuZoCiXr6A82iY0t9FqwiZbhA,45
|
|
14
|
+
mail_ops_scripts-2.2.0.dist-info/top_level.txt,sha256=iVIjt6hatGQ-tO1krcLfXvCfRo6ECGIpWYCFQJvLw_M,8
|
|
15
|
+
mail_ops_scripts-2.2.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Beau Bremer
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
mailops
|
mailops/__init__.py
ADDED
|
File without changes
|
mailops/__main__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
9
|
+
|
|
10
|
+
from mailops.cli import main
|
|
11
|
+
|
|
12
|
+
main()
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
# mailops/blacklist_monitor.py
|
|
6
|
+
import argparse
|
|
7
|
+
import ipaddress
|
|
8
|
+
import json
|
|
9
|
+
import urllib.request
|
|
10
|
+
|
|
11
|
+
from . import ui # Import the new UI module
|
|
12
|
+
|
|
13
|
+
# Common RBLs
|
|
14
|
+
RBL_PROVIDERS = [
|
|
15
|
+
"zen.spamhaus.org",
|
|
16
|
+
"bl.spamcop.net",
|
|
17
|
+
"b.barracudacentral.org",
|
|
18
|
+
"dnsbl.sorbs.net",
|
|
19
|
+
"ips.backscatterer.org",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def resolve_domain(domain):
|
|
24
|
+
print(f"[*] Resolving IP for: {domain}...", end=" ", flush=True)
|
|
25
|
+
url = f"https://dns.google/resolve?name={domain}&type=A"
|
|
26
|
+
try:
|
|
27
|
+
with urllib.request.urlopen(url) as response:
|
|
28
|
+
data = json.loads(response.read().decode())
|
|
29
|
+
if "Answer" in data:
|
|
30
|
+
for answer in data["Answer"]:
|
|
31
|
+
if answer["type"] == 1:
|
|
32
|
+
ip = answer["data"]
|
|
33
|
+
print(f"Found {ip}")
|
|
34
|
+
return ip
|
|
35
|
+
print("\n[!] Error: No A record found.")
|
|
36
|
+
return None
|
|
37
|
+
except Exception as e:
|
|
38
|
+
print(f"\n[!] DNS Lookup Error: {e}")
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def check_rbl(ip_address, rbl_domain):
|
|
43
|
+
try:
|
|
44
|
+
reversed_ip = ".".join(reversed(ip_address.split(".")))
|
|
45
|
+
query = f"{reversed_ip}.{rbl_domain}"
|
|
46
|
+
url = f"https://dns.google/resolve?name={query}&type=A"
|
|
47
|
+
with urllib.request.urlopen(url) as response:
|
|
48
|
+
data = json.loads(response.read().decode())
|
|
49
|
+
if "Answer" in data:
|
|
50
|
+
return data["Answer"][0]["data"]
|
|
51
|
+
return None
|
|
52
|
+
except Exception as e:
|
|
53
|
+
return f"Error: {e}"
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def run_check(target_input):
|
|
57
|
+
"""Orchestrates the check logic so other scripts can call it."""
|
|
58
|
+
target_ip = None
|
|
59
|
+
try:
|
|
60
|
+
ipaddress.ip_address(target_input)
|
|
61
|
+
target_ip = target_input
|
|
62
|
+
except ValueError:
|
|
63
|
+
target_ip = resolve_domain(target_input)
|
|
64
|
+
if not target_ip:
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
ui.print_sub_header(f"Blacklist Status for: {target_ip}")
|
|
68
|
+
print("-" * 60)
|
|
69
|
+
print(f"{'RBL Provider':<30} | {'Status':<10}")
|
|
70
|
+
print("-" * 60)
|
|
71
|
+
|
|
72
|
+
issues = 0
|
|
73
|
+
for rbl in RBL_PROVIDERS:
|
|
74
|
+
res = check_rbl(target_ip, rbl)
|
|
75
|
+
if res is None:
|
|
76
|
+
print(f"{rbl:<30} | ✅ Clean")
|
|
77
|
+
elif str(res).startswith("Error"):
|
|
78
|
+
print(f"{rbl:<30} | ⚠️ {res}")
|
|
79
|
+
else:
|
|
80
|
+
print(f"{rbl:<30} | ❌ LISTED ({res})")
|
|
81
|
+
issues += 1
|
|
82
|
+
print("-" * 60)
|
|
83
|
+
if issues == 0:
|
|
84
|
+
ui.print_success("Great! This IP is not listed on the checked RBLs.")
|
|
85
|
+
else:
|
|
86
|
+
ui.print_warning(f"This IP is listed on {issues} blacklists.")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def main():
|
|
90
|
+
parser = argparse.ArgumentParser(description="Check RBL status.")
|
|
91
|
+
parser.add_argument("target", help="IP address or Domain")
|
|
92
|
+
args = parser.parse_args()
|
|
93
|
+
run_check(args.target)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
if __name__ == "__main__":
|
|
97
|
+
main()
|
mailops/cli.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""MailOps CLI - Email Operations Toolkit"""
|
|
3
|
+
|
|
4
|
+
import argparse
|
|
5
|
+
import glob
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
|
|
9
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
10
|
+
|
|
11
|
+
from mailops.dkim_gen import generate_keys
|
|
12
|
+
from mailops.dmarc_parser import parse_dmarc_xml
|
|
13
|
+
from mailops.imap_fetcher import fetch_reports
|
|
14
|
+
from mailops.spf_check import fetch_spf_record
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def main() -> None:
|
|
18
|
+
parser = argparse.ArgumentParser(
|
|
19
|
+
description="MailOps - Email Operations Toolkit ✅",
|
|
20
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
21
|
+
epilog="""
|
|
22
|
+
🚀 FULL PRODUCTION WORKFLOW:
|
|
23
|
+
1. mailops fetch --user you@gmail.com --pass app-password --days 7
|
|
24
|
+
2. mailops report --alerts
|
|
25
|
+
3. mailops spf yourdomain.com
|
|
26
|
+
4. mailops dkim yourdomain.com
|
|
27
|
+
""",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
subparsers = parser.add_subparsers(dest="command", help="Commands")
|
|
31
|
+
|
|
32
|
+
# FETCH ⚡ REAL IMAP
|
|
33
|
+
fetch_parser = subparsers.add_parser("fetch", help="Fetch DMARC reports from IMAP")
|
|
34
|
+
fetch_parser.add_argument("--days", type=int, default=7, help="Days back")
|
|
35
|
+
fetch_parser.add_argument("--user", required=True, help="IMAP username")
|
|
36
|
+
fetch_parser.add_argument(
|
|
37
|
+
"--password", required=True, help="IMAP password"
|
|
38
|
+
) # FIXED!
|
|
39
|
+
fetch_parser.add_argument("--server", default="imap.gmail.com", help="IMAP server")
|
|
40
|
+
|
|
41
|
+
# REPORT
|
|
42
|
+
report_parser = subparsers.add_parser("report", help="Analyze DMARC reports")
|
|
43
|
+
report_parser.add_argument(
|
|
44
|
+
"--alerts", action="store_true", help="Show only failures"
|
|
45
|
+
)
|
|
46
|
+
report_parser.add_argument("--csv", help="Export to CSV")
|
|
47
|
+
|
|
48
|
+
# DKIM
|
|
49
|
+
dkim_parser = subparsers.add_parser("dkim", help="Generate DKIM keys")
|
|
50
|
+
dkim_parser.add_argument("domain", help="Domain name")
|
|
51
|
+
dkim_parser.add_argument("--selector", default="default", help="DKIM selector")
|
|
52
|
+
|
|
53
|
+
# SPF
|
|
54
|
+
spf_parser = subparsers.add_parser("spf", help="Check SPF records")
|
|
55
|
+
spf_parser.add_argument("domain", help="Domain to check")
|
|
56
|
+
|
|
57
|
+
args = parser.parse_args()
|
|
58
|
+
|
|
59
|
+
if not args.command:
|
|
60
|
+
parser.print_help()
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
if args.command == "fetch":
|
|
65
|
+
print(f"📥 Fetching REAL DMARC reports...")
|
|
66
|
+
print(f" 👤 {args.user} | 📧 {args.server} | 📅 {args.days} days")
|
|
67
|
+
fetch_reports(args.user, args.password, args.server) # FIXED!
|
|
68
|
+
print("✅ Reports downloaded! Run 'mailops report'")
|
|
69
|
+
|
|
70
|
+
elif args.command == "report":
|
|
71
|
+
print("📊 Analyzing REAL DMARC reports...")
|
|
72
|
+
xml_files = glob.glob("*.xml") + glob.glob("reports/*.xml")
|
|
73
|
+
if xml_files:
|
|
74
|
+
print(f"Found {len(xml_files)} XML files:")
|
|
75
|
+
for xml_file in xml_files:
|
|
76
|
+
print(f" 📄 {xml_file}")
|
|
77
|
+
parse_dmarc_xml(xml_file)
|
|
78
|
+
else:
|
|
79
|
+
print("❌ No XML files found. Run 'mailops fetch' first!")
|
|
80
|
+
|
|
81
|
+
elif args.command == "dkim":
|
|
82
|
+
print(f"🔑 Generating DKIM keys for {args.domain}...")
|
|
83
|
+
generate_keys(args.selector)
|
|
84
|
+
print("✅ DKIM keys generated!")
|
|
85
|
+
|
|
86
|
+
elif args.command == "spf":
|
|
87
|
+
fetch_spf_record(args.domain)
|
|
88
|
+
|
|
89
|
+
except Exception as e:
|
|
90
|
+
print(f"❌ Error: {e}")
|
|
91
|
+
sys.exit(1)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
if __name__ == "__main__":
|
|
95
|
+
main()
|
mailops/dkim_gen.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
import argparse
|
|
6
|
+
import os
|
|
7
|
+
import shutil
|
|
8
|
+
import subprocess
|
|
9
|
+
import sys
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def check_openssl():
|
|
13
|
+
if not shutil.which("openssl"):
|
|
14
|
+
print("Error: 'openssl' command not found.")
|
|
15
|
+
sys.exit(1)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def generate_keys(selector, output_dir="."):
|
|
19
|
+
priv_filename = os.path.join(output_dir, f"{selector}.private")
|
|
20
|
+
print(f"[*] Generating key for '{selector}'...")
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
subprocess.run(
|
|
24
|
+
["openssl", "genrsa", "-out", priv_filename, "2048"],
|
|
25
|
+
check=True,
|
|
26
|
+
stderr=subprocess.DEVNULL,
|
|
27
|
+
)
|
|
28
|
+
result = subprocess.run(
|
|
29
|
+
["openssl", "rsa", "-in", priv_filename, "-pubout", "-outform", "PEM"],
|
|
30
|
+
check=True,
|
|
31
|
+
capture_output=True,
|
|
32
|
+
text=True,
|
|
33
|
+
)
|
|
34
|
+
raw_key = result.stdout
|
|
35
|
+
except subprocess.CalledProcessError as e:
|
|
36
|
+
print(f"OpenSSL Error: {e}")
|
|
37
|
+
sys.exit(1)
|
|
38
|
+
|
|
39
|
+
lines = raw_key.splitlines()
|
|
40
|
+
clean_key = "".join(line for line in lines if "-----" not in line)
|
|
41
|
+
print(f"✅ Saved private key to: {priv_filename}")
|
|
42
|
+
return clean_key
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def generate_and_print(selector, domain):
|
|
46
|
+
check_openssl()
|
|
47
|
+
pub_key = generate_keys(selector)
|
|
48
|
+
record = f"v=DKIM1; k=rsa; p={pub_key}"
|
|
49
|
+
|
|
50
|
+
print("\n" + "=" * 60)
|
|
51
|
+
print("DNS TXT RECORD TO ADD")
|
|
52
|
+
print("=" * 60)
|
|
53
|
+
print(f"Host: {selector}._domainkey")
|
|
54
|
+
print(f"Value: {record}")
|
|
55
|
+
print("=" * 60)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def main():
|
|
59
|
+
parser = argparse.ArgumentParser(description="Generate DKIM keys.")
|
|
60
|
+
parser.add_argument("selector", help="DKIM selector")
|
|
61
|
+
parser.add_argument("--domain", default="yourdomain.com")
|
|
62
|
+
args = parser.parse_args()
|
|
63
|
+
generate_and_print(args.selector, args.domain)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
if __name__ == "__main__":
|
|
67
|
+
main()
|
mailops/dmarc_parser.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
# mailops/dmarc_parser.py
|
|
6
|
+
import csv
|
|
7
|
+
import gzip
|
|
8
|
+
import os
|
|
9
|
+
import socket
|
|
10
|
+
import xml.etree.ElementTree as ET
|
|
11
|
+
import zipfile
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
|
|
14
|
+
from . import ui # Import the new UI module
|
|
15
|
+
|
|
16
|
+
IP_CACHE: dict[str, str] = {}
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def resolve_ip(ip_address):
|
|
20
|
+
"""Resolves IP to Hostname with caching."""
|
|
21
|
+
if ip_address in IP_CACHE:
|
|
22
|
+
return IP_CACHE[ip_address]
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
socket.setdefaulttimeout(2)
|
|
26
|
+
hostname, _, _ = socket.gethostbyaddr(ip_address)
|
|
27
|
+
IP_CACHE[ip_address] = hostname
|
|
28
|
+
return hostname
|
|
29
|
+
except Exception:
|
|
30
|
+
result = "Unknown"
|
|
31
|
+
IP_CACHE[ip_address] = result
|
|
32
|
+
return result
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def analyze_record(spf, dkim, disposition):
|
|
36
|
+
"""
|
|
37
|
+
Determines the status and color based on DMARC results.
|
|
38
|
+
Returns: (Action_String, Color_Code)
|
|
39
|
+
"""
|
|
40
|
+
if spf == "pass" or dkim == "pass":
|
|
41
|
+
return "OK", ui.Colors.GREEN
|
|
42
|
+
|
|
43
|
+
if disposition in ["quarantine", "reject"]:
|
|
44
|
+
return "BLOCKED (Spoofing)", ui.Colors.YELLOW
|
|
45
|
+
|
|
46
|
+
return "INVESTIGATE", ui.Colors.RED
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# --- Core Logic ---
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def parse_dmarc_xml(file_path):
|
|
53
|
+
tree = None
|
|
54
|
+
filename = os.path.basename(file_path)
|
|
55
|
+
records_data = []
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
if file_path.endswith(".gz"):
|
|
59
|
+
with gzip.open(file_path, "rb") as f:
|
|
60
|
+
tree = ET.parse(f)
|
|
61
|
+
elif file_path.endswith(".zip"):
|
|
62
|
+
with zipfile.ZipFile(file_path, "r") as z:
|
|
63
|
+
xml_files = [n for n in z.namelist() if n.lower().endswith(".xml")]
|
|
64
|
+
if not xml_files:
|
|
65
|
+
return []
|
|
66
|
+
with z.open(xml_files[0]) as f:
|
|
67
|
+
tree = ET.parse(f)
|
|
68
|
+
else:
|
|
69
|
+
tree = ET.parse(file_path)
|
|
70
|
+
root = tree.getroot()
|
|
71
|
+
except Exception as e:
|
|
72
|
+
ui.print_error(f"Processing '{filename}': {e}")
|
|
73
|
+
return []
|
|
74
|
+
|
|
75
|
+
org_name = root.findtext(".//org_name") or "Unknown Org"
|
|
76
|
+
|
|
77
|
+
date_range = root.find(".//date_range")
|
|
78
|
+
if date_range is not None:
|
|
79
|
+
begin_ts = int(date_range.findtext("begin", 0))
|
|
80
|
+
end_ts = int(date_range.findtext("end", 0))
|
|
81
|
+
begin_date = datetime.fromtimestamp(begin_ts).strftime("%Y-%m-%d")
|
|
82
|
+
end_date = datetime.fromtimestamp(end_ts).strftime("%Y-%m-%d")
|
|
83
|
+
else:
|
|
84
|
+
begin_date = end_date = "Unknown"
|
|
85
|
+
|
|
86
|
+
records = root.findall("record")
|
|
87
|
+
if not records:
|
|
88
|
+
return []
|
|
89
|
+
|
|
90
|
+
for record in records:
|
|
91
|
+
row = record.find("row")
|
|
92
|
+
source_ip = row.findtext("source_ip")
|
|
93
|
+
count = row.findtext("count")
|
|
94
|
+
disposition = row.find(".//policy_evaluated/disposition").text
|
|
95
|
+
|
|
96
|
+
spf = record.find(".//auth_results/spf/result")
|
|
97
|
+
spf_res = spf.text if spf is not None else "none"
|
|
98
|
+
|
|
99
|
+
dkim = record.find(".//auth_results/dkim/result")
|
|
100
|
+
dkim_res = dkim.text if dkim is not None else "none"
|
|
101
|
+
|
|
102
|
+
hostname = resolve_ip(source_ip)
|
|
103
|
+
status_msg, status_color = analyze_record(spf_res, dkim_res, disposition)
|
|
104
|
+
|
|
105
|
+
records_data.append(
|
|
106
|
+
{
|
|
107
|
+
"org_name": org_name,
|
|
108
|
+
"date": begin_date,
|
|
109
|
+
"source_ip": source_ip,
|
|
110
|
+
"hostname": hostname,
|
|
111
|
+
"count": count,
|
|
112
|
+
"spf": spf_res,
|
|
113
|
+
"dkim": dkim_res,
|
|
114
|
+
"disposition": disposition,
|
|
115
|
+
"status_msg": status_msg,
|
|
116
|
+
"status_color": status_color,
|
|
117
|
+
"file": filename,
|
|
118
|
+
}
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
return records_data
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def print_to_console(all_data):
|
|
125
|
+
if not all_data:
|
|
126
|
+
ui.print_warning("No records found.")
|
|
127
|
+
return
|
|
128
|
+
|
|
129
|
+
current_file = None
|
|
130
|
+
header_fmt = "{:<20} | {:<30} | {:<5} | {:<6} | {:<6} | {:<15}"
|
|
131
|
+
row_fmt = "{:<20} | {:<30} | {:<5} | {:<6} | {:<6} | {:<15}"
|
|
132
|
+
|
|
133
|
+
for row in all_data:
|
|
134
|
+
if row["file"] != current_file:
|
|
135
|
+
current_file = row["file"]
|
|
136
|
+
ui.print_sub_header(f"Report: {row['org_name']} ({row['date']})")
|
|
137
|
+
print("-" * 95)
|
|
138
|
+
print(
|
|
139
|
+
ui.Colors.HEADER
|
|
140
|
+
+ header_fmt.format(
|
|
141
|
+
"Source IP", "Hostname", "Cnt", "SPF", "DKIM", "Analysis"
|
|
142
|
+
)
|
|
143
|
+
+ ui.Colors.RESET
|
|
144
|
+
)
|
|
145
|
+
print("-" * 95)
|
|
146
|
+
|
|
147
|
+
host_display = (
|
|
148
|
+
(row["hostname"][:27] + "..")
|
|
149
|
+
if len(row["hostname"]) > 29
|
|
150
|
+
else row["hostname"]
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
line = row_fmt.format(
|
|
154
|
+
row["source_ip"],
|
|
155
|
+
host_display,
|
|
156
|
+
row["count"],
|
|
157
|
+
row["spf"],
|
|
158
|
+
row["dkim"],
|
|
159
|
+
row["status_msg"],
|
|
160
|
+
)
|
|
161
|
+
print(row["status_color"] + line + ui.Colors.RESET)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def save_to_csv(all_data, output_file):
|
|
165
|
+
if not all_data:
|
|
166
|
+
return
|
|
167
|
+
|
|
168
|
+
clean_data = [{k: v for k, v in r.items() if k != "status_color"} for r in all_data]
|
|
169
|
+
headers = [
|
|
170
|
+
"org_name",
|
|
171
|
+
"date",
|
|
172
|
+
"source_ip",
|
|
173
|
+
"hostname",
|
|
174
|
+
"count",
|
|
175
|
+
"spf",
|
|
176
|
+
"dkim",
|
|
177
|
+
"disposition",
|
|
178
|
+
"status_msg",
|
|
179
|
+
"file",
|
|
180
|
+
]
|
|
181
|
+
|
|
182
|
+
try:
|
|
183
|
+
with open(output_file, "w", newline="") as f:
|
|
184
|
+
writer = csv.DictWriter(f, fieldnames=headers)
|
|
185
|
+
writer.writeheader()
|
|
186
|
+
writer.writerows(clean_data)
|
|
187
|
+
ui.print_success(f"Exported to {output_file}")
|
|
188
|
+
except Exception as e:
|
|
189
|
+
ui.print_error(f"CSV Error: {e}")
|
mailops/imap_fetcher.py
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
import argparse
|
|
6
|
+
import email
|
|
7
|
+
import getpass
|
|
8
|
+
import imaplib
|
|
9
|
+
import os
|
|
10
|
+
import sys
|
|
11
|
+
from email.header import decode_header
|
|
12
|
+
|
|
13
|
+
from . import ui # Integrate with your new UI system
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def clean_filename(filename):
|
|
17
|
+
"""Sanitizes filenames to prevent directory traversal issues."""
|
|
18
|
+
if not filename:
|
|
19
|
+
return None
|
|
20
|
+
# Keep only safe characters
|
|
21
|
+
return "".join(c for c in filename if c.isalnum() or c in "._-")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_safe_date(msg):
|
|
25
|
+
"""Extracts a safe YYYY-MM-DD date from the email."""
|
|
26
|
+
date_str = msg.get("Date")
|
|
27
|
+
if date_str:
|
|
28
|
+
try:
|
|
29
|
+
date_obj = email.utils.parsedate_to_datetime(date_str)
|
|
30
|
+
return date_obj.strftime("%Y-%m-%d")
|
|
31
|
+
except:
|
|
32
|
+
pass
|
|
33
|
+
return "unknown_date"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def safe_decode(value):
|
|
37
|
+
"""Safely decodes bytes to string."""
|
|
38
|
+
if isinstance(value, bytes):
|
|
39
|
+
return value.decode("utf-8", errors="ignore")
|
|
40
|
+
return str(value)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def decode_header_safe(header_val):
|
|
44
|
+
"""Decodes email headers (Subject, Filename) securely."""
|
|
45
|
+
if not header_val:
|
|
46
|
+
return None
|
|
47
|
+
try:
|
|
48
|
+
decoded_list = decode_header(header_val)
|
|
49
|
+
result = []
|
|
50
|
+
for content, encoding in decoded_list:
|
|
51
|
+
if isinstance(content, bytes):
|
|
52
|
+
if encoding:
|
|
53
|
+
try:
|
|
54
|
+
result.append(content.decode(encoding, errors="ignore"))
|
|
55
|
+
except LookupError:
|
|
56
|
+
# Fallback for unknown encodings
|
|
57
|
+
result.append(content.decode("utf-8", errors="ignore"))
|
|
58
|
+
else:
|
|
59
|
+
result.append(content.decode("utf-8", errors="ignore"))
|
|
60
|
+
else:
|
|
61
|
+
result.append(str(content))
|
|
62
|
+
return "".join(result)
|
|
63
|
+
except Exception:
|
|
64
|
+
return str(header_val)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def fetch_reports(username, password, server, folder="INBOX"):
|
|
68
|
+
ui.print_info(f"Connecting to {server}...")
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
mail = imaplib.IMAP4_SSL(server)
|
|
72
|
+
mail.login(username, password)
|
|
73
|
+
except Exception as e:
|
|
74
|
+
ui.print_error(f"Login Failed: {e}")
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
ui.print_info("Login successful. Searching for DMARC reports...")
|
|
78
|
+
mail.select(folder)
|
|
79
|
+
|
|
80
|
+
# Search for DMARC specific subjects
|
|
81
|
+
search_criteria = '(OR SUBJECT "Report Domain" SUBJECT "DMARC Aggregate Report")'
|
|
82
|
+
status, messages = mail.search(None, search_criteria)
|
|
83
|
+
|
|
84
|
+
if status != "OK" or not messages[0]:
|
|
85
|
+
ui.print_warning("No DMARC reports found in INBOX.")
|
|
86
|
+
return
|
|
87
|
+
|
|
88
|
+
email_ids = messages[0].split()
|
|
89
|
+
ui.print_info(f"Found {len(email_ids)} potential report emails. Processing...")
|
|
90
|
+
|
|
91
|
+
count = 0
|
|
92
|
+
|
|
93
|
+
for e_id in email_ids:
|
|
94
|
+
try:
|
|
95
|
+
# Fetch the email body
|
|
96
|
+
res, msg_data = mail.fetch(e_id, "(BODY[])")
|
|
97
|
+
if res != "OK":
|
|
98
|
+
continue
|
|
99
|
+
|
|
100
|
+
raw_email = None
|
|
101
|
+
|
|
102
|
+
# Standard IMAP extraction (Tuple usually contains the body)
|
|
103
|
+
for response_part in msg_data:
|
|
104
|
+
if isinstance(response_part, tuple):
|
|
105
|
+
raw_email = response_part[1]
|
|
106
|
+
break
|
|
107
|
+
|
|
108
|
+
if raw_email is None:
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
# Parse email object
|
|
112
|
+
msg = email.message_from_bytes(raw_email)
|
|
113
|
+
folder_date = get_safe_date(msg)
|
|
114
|
+
subject = decode_header_safe(msg.get("Subject", "Unknown Subject"))
|
|
115
|
+
|
|
116
|
+
# Walk through email parts to find attachments
|
|
117
|
+
for part in msg.walk():
|
|
118
|
+
if part.get_content_maintype() == "multipart":
|
|
119
|
+
continue
|
|
120
|
+
|
|
121
|
+
# Check explicitly for attachments
|
|
122
|
+
content_disposition = part.get("Content-Disposition", "")
|
|
123
|
+
is_attachment = "attachment" in content_disposition.lower()
|
|
124
|
+
|
|
125
|
+
filename = decode_header_safe(part.get_filename())
|
|
126
|
+
content_type = part.get_content_type()
|
|
127
|
+
|
|
128
|
+
# LOGIC: It must look like a DMARC report (XML, GZIP, ZIP)
|
|
129
|
+
valid_extension = filename and filename.lower().endswith(
|
|
130
|
+
(".xml", ".gz", ".zip")
|
|
131
|
+
)
|
|
132
|
+
valid_mime = any(x in content_type for x in ["gzip", "zip", "xml"])
|
|
133
|
+
|
|
134
|
+
if is_attachment or valid_extension or valid_mime:
|
|
135
|
+
# If no filename, generate one from subject
|
|
136
|
+
if not filename:
|
|
137
|
+
ext = ".xml"
|
|
138
|
+
if "gzip" in content_type:
|
|
139
|
+
ext = ".gz"
|
|
140
|
+
elif "zip" in content_type:
|
|
141
|
+
ext = ".zip"
|
|
142
|
+
|
|
143
|
+
safe_subj = clean_filename(subject)
|
|
144
|
+
filename = f"dmarc_report_{safe_subj}{ext}"
|
|
145
|
+
|
|
146
|
+
filename = clean_filename(filename)
|
|
147
|
+
|
|
148
|
+
if filename:
|
|
149
|
+
# Save Logic
|
|
150
|
+
save_dir = os.path.join("dmarc_reports", folder_date)
|
|
151
|
+
os.makedirs(save_dir, exist_ok=True)
|
|
152
|
+
filepath = os.path.join(save_dir, filename)
|
|
153
|
+
|
|
154
|
+
if not os.path.exists(filepath):
|
|
155
|
+
payload = part.get_payload(decode=True)
|
|
156
|
+
if payload:
|
|
157
|
+
with open(filepath, "wb") as f:
|
|
158
|
+
f.write(payload)
|
|
159
|
+
ui.print_success(f"Saved: {folder_date}/{filename}")
|
|
160
|
+
count += 1
|
|
161
|
+
|
|
162
|
+
except Exception as e:
|
|
163
|
+
ui.print_error(f"Processing email ID {e_id}: {e}")
|
|
164
|
+
continue
|
|
165
|
+
|
|
166
|
+
mail.close()
|
|
167
|
+
mail.logout()
|
|
168
|
+
print("-" * 60)
|
|
169
|
+
ui.print_success(f"Download complete. Saved {count} new reports.")
|
|
170
|
+
ui.print_info(f"Location: {os.path.abspath('dmarc_reports')}")
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def main():
|
|
174
|
+
parser = argparse.ArgumentParser(description="Download DMARC reports from IMAP.")
|
|
175
|
+
parser.add_argument("--email", required=True, help="Your email address")
|
|
176
|
+
parser.add_argument("--server", default="imap.mail.me.com", help="IMAP Server")
|
|
177
|
+
|
|
178
|
+
args = parser.parse_args()
|
|
179
|
+
|
|
180
|
+
print(f"Enter IMAP Password for {args.email}")
|
|
181
|
+
try:
|
|
182
|
+
password = getpass.getpass("> ")
|
|
183
|
+
except KeyboardInterrupt:
|
|
184
|
+
sys.exit(0)
|
|
185
|
+
|
|
186
|
+
fetch_reports(args.email, password, args.server)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
if __name__ == "__main__":
|
|
190
|
+
main()
|
mailops/spf_check.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
# mailops/spf_check.py
|
|
6
|
+
import argparse
|
|
7
|
+
import json
|
|
8
|
+
import urllib.request
|
|
9
|
+
|
|
10
|
+
from . import ui # Import the new UI module
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def fetch_spf_record(domain):
|
|
14
|
+
"""
|
|
15
|
+
Fetches the SPF record for a domain using Google's DNS-over-HTTPS API.
|
|
16
|
+
"""
|
|
17
|
+
ui.print_info(f"Fetching SPF record for '{domain}'...")
|
|
18
|
+
|
|
19
|
+
url = f"https://dns.google/resolve?name={domain}&type=TXT"
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
with urllib.request.urlopen(url) as response:
|
|
23
|
+
data = json.loads(response.read().decode())
|
|
24
|
+
|
|
25
|
+
if "Answer" not in data:
|
|
26
|
+
ui.print_warning(f"No TXT records found for {domain}.")
|
|
27
|
+
return None
|
|
28
|
+
|
|
29
|
+
spf_records = []
|
|
30
|
+
for answer in data["Answer"]:
|
|
31
|
+
txt_data = answer["data"].strip('"').replace('" "', "")
|
|
32
|
+
if txt_data.startswith("v=spf1"):
|
|
33
|
+
spf_records.append(txt_data)
|
|
34
|
+
|
|
35
|
+
if not spf_records:
|
|
36
|
+
ui.print_warning(f"No SPF record found for {domain}.")
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
if len(spf_records) > 1:
|
|
40
|
+
ui.print_warning(f"Multiple SPF records found! This is invalid.")
|
|
41
|
+
for r in spf_records:
|
|
42
|
+
print(f" - {r}")
|
|
43
|
+
return spf_records[0]
|
|
44
|
+
|
|
45
|
+
return spf_records[0]
|
|
46
|
+
|
|
47
|
+
except Exception as e:
|
|
48
|
+
ui.print_error(f"Fetching DNS: {e}")
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def analyze_spf(spf_string):
|
|
53
|
+
"""
|
|
54
|
+
Analyzes the SPF string for syntax errors and security best practices.
|
|
55
|
+
"""
|
|
56
|
+
ui.print_sub_header(f"Analysis for: {spf_string}")
|
|
57
|
+
issues = []
|
|
58
|
+
warnings = []
|
|
59
|
+
|
|
60
|
+
# 1. Basic Syntax
|
|
61
|
+
if not spf_string.startswith("v=spf1"):
|
|
62
|
+
issues.append("Record does not start with 'v=spf1'")
|
|
63
|
+
|
|
64
|
+
# 2. Lookup Counting (Approximation)
|
|
65
|
+
lookup_mechanisms = ["include:", "a:", "mx:", "ptr:", "exists:", "redirect="]
|
|
66
|
+
tokens = spf_string.split()
|
|
67
|
+
lookup_count = 0
|
|
68
|
+
|
|
69
|
+
for token in tokens:
|
|
70
|
+
for mech in lookup_mechanisms:
|
|
71
|
+
if token.startswith(mech):
|
|
72
|
+
lookup_count += 1
|
|
73
|
+
if token == "a" or token == "mx":
|
|
74
|
+
lookup_count += 1
|
|
75
|
+
|
|
76
|
+
print(f"[*] DNS Lookup Count (Approx): {lookup_count}/10")
|
|
77
|
+
if lookup_count > 10:
|
|
78
|
+
issues.append(f"Too many DNS lookups ({lookup_count}). Limit is 10 (RFC 7208).")
|
|
79
|
+
|
|
80
|
+
# 3. Security Checks
|
|
81
|
+
if "+all" in tokens:
|
|
82
|
+
issues.append(
|
|
83
|
+
"Usage of '+all' allows the entire internet to spoof your domain."
|
|
84
|
+
)
|
|
85
|
+
elif "?all" in tokens:
|
|
86
|
+
warnings.append("Usage of '?all' (Neutral) provides no protection.")
|
|
87
|
+
elif not (
|
|
88
|
+
tokens[-1].endswith("-all")
|
|
89
|
+
or tokens[-1].endswith("~all")
|
|
90
|
+
or "redirect=" in tokens[-1]
|
|
91
|
+
):
|
|
92
|
+
issues.append("Record does not end with a strict policy ('-all' or '~all').")
|
|
93
|
+
|
|
94
|
+
if "ptr" in spf_string:
|
|
95
|
+
warnings.append("The 'ptr' mechanism is deprecated and should not be used.")
|
|
96
|
+
|
|
97
|
+
# Report
|
|
98
|
+
if not issues and not warnings:
|
|
99
|
+
ui.print_success("Status: Valid & Secure")
|
|
100
|
+
else:
|
|
101
|
+
if issues:
|
|
102
|
+
print(f"{ui.Colors.RED}❌ Critical Issues:{ui.Colors.RESET}")
|
|
103
|
+
for i in issues:
|
|
104
|
+
print(f" - {i}")
|
|
105
|
+
if warnings:
|
|
106
|
+
print(f"{ui.Colors.YELLOW}⚠️ Warnings:{ui.Colors.RESET}")
|
|
107
|
+
for w in warnings:
|
|
108
|
+
print(f" - {w}")
|
mailops/ui.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
|
+
# mailops/ui.py
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Colors:
|
|
9
|
+
HEADER = "\033[95m"
|
|
10
|
+
BLUE = "\033[94m"
|
|
11
|
+
GREEN = "\033[92m"
|
|
12
|
+
YELLOW = "\033[93m"
|
|
13
|
+
RED = "\033[91m"
|
|
14
|
+
RESET = "\033[0m"
|
|
15
|
+
BOLD = "\033[1m"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def print_header(text):
|
|
19
|
+
"""Prints a bold, colorful header section."""
|
|
20
|
+
print(f"\n{Colors.HEADER}{Colors.BOLD}=== {text} ==={Colors.RESET}")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def print_sub_header(text):
|
|
24
|
+
"""Prints a sub-header (e.g., for individual reports)."""
|
|
25
|
+
print(f"\n{Colors.BOLD}--- {text} ---{Colors.RESET}")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def print_error(text):
|
|
29
|
+
"""Prints an error message in Red."""
|
|
30
|
+
print(f"{Colors.RED}[!] Error: {text}{Colors.RESET}")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def print_warning(text):
|
|
34
|
+
"""Prints a warning in Yellow."""
|
|
35
|
+
print(f"{Colors.YELLOW}[!] Warning: {text}{Colors.RESET}")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def print_success(text):
|
|
39
|
+
"""Prints a success message in Green."""
|
|
40
|
+
print(f"{Colors.GREEN}[+] {text}{Colors.RESET}")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def print_info(text):
|
|
44
|
+
"""Prints a general info message in Blue."""
|
|
45
|
+
print(f"{Colors.BLUE}[*] {text}{Colors.RESET}")
|