bbot 2.3.0.5546rc0__py3-none-any.whl → 2.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (116) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +1 -1
  3. bbot/core/engine.py +1 -1
  4. bbot/core/event/base.py +7 -5
  5. bbot/core/helpers/async_helpers.py +7 -1
  6. bbot/core/helpers/depsinstaller/installer.py +7 -2
  7. bbot/core/helpers/diff.py +13 -4
  8. bbot/core/helpers/dns/brute.py +8 -2
  9. bbot/core/helpers/dns/engine.py +3 -2
  10. bbot/core/helpers/ratelimiter.py +8 -2
  11. bbot/core/helpers/regexes.py +5 -2
  12. bbot/core/helpers/web/engine.py +1 -1
  13. bbot/core/helpers/web/web.py +1 -1
  14. bbot/core/shared_deps.py +14 -0
  15. bbot/defaults.yml +44 -0
  16. bbot/modules/ajaxpro.py +64 -37
  17. bbot/modules/baddns.py +23 -15
  18. bbot/modules/baddns_direct.py +2 -2
  19. bbot/modules/badsecrets.py +2 -2
  20. bbot/modules/base.py +49 -15
  21. bbot/modules/censys.py +1 -1
  22. bbot/modules/deadly/dastardly.py +3 -3
  23. bbot/modules/deadly/nuclei.py +1 -1
  24. bbot/modules/dehashed.py +2 -2
  25. bbot/modules/dnsbrute_mutations.py +3 -1
  26. bbot/modules/docker_pull.py +1 -1
  27. bbot/modules/dockerhub.py +2 -2
  28. bbot/modules/dotnetnuke.py +12 -12
  29. bbot/modules/extractous.py +1 -1
  30. bbot/modules/ffuf_shortnames.py +107 -48
  31. bbot/modules/filedownload.py +6 -0
  32. bbot/modules/generic_ssrf.py +54 -40
  33. bbot/modules/github_codesearch.py +2 -2
  34. bbot/modules/github_org.py +16 -20
  35. bbot/modules/github_workflows.py +6 -2
  36. bbot/modules/gowitness.py +6 -0
  37. bbot/modules/hunt.py +1 -1
  38. bbot/modules/hunterio.py +1 -1
  39. bbot/modules/iis_shortnames.py +23 -7
  40. bbot/modules/internal/excavate.py +5 -3
  41. bbot/modules/internal/unarchive.py +82 -0
  42. bbot/modules/jadx.py +2 -2
  43. bbot/modules/output/asset_inventory.py +1 -1
  44. bbot/modules/output/base.py +1 -1
  45. bbot/modules/output/discord.py +2 -1
  46. bbot/modules/output/slack.py +2 -1
  47. bbot/modules/output/teams.py +10 -25
  48. bbot/modules/output/web_parameters.py +55 -0
  49. bbot/modules/paramminer_headers.py +15 -10
  50. bbot/modules/portfilter.py +41 -0
  51. bbot/modules/portscan.py +1 -22
  52. bbot/modules/postman.py +61 -43
  53. bbot/modules/postman_download.py +10 -147
  54. bbot/modules/sitedossier.py +1 -1
  55. bbot/modules/skymem.py +1 -1
  56. bbot/modules/templates/postman.py +163 -1
  57. bbot/modules/templates/subdomain_enum.py +1 -1
  58. bbot/modules/templates/webhook.py +17 -26
  59. bbot/modules/trufflehog.py +3 -3
  60. bbot/modules/wappalyzer.py +1 -1
  61. bbot/modules/zoomeye.py +1 -1
  62. bbot/presets/kitchen-sink.yml +1 -1
  63. bbot/presets/nuclei/nuclei-budget.yml +19 -0
  64. bbot/presets/nuclei/nuclei-intense.yml +28 -0
  65. bbot/presets/nuclei/nuclei-technology.yml +23 -0
  66. bbot/presets/nuclei/nuclei.yml +34 -0
  67. bbot/presets/spider-intense.yml +13 -0
  68. bbot/scanner/preset/args.py +29 -3
  69. bbot/scanner/preset/preset.py +43 -24
  70. bbot/scanner/scanner.py +17 -7
  71. bbot/test/bbot_fixtures.py +7 -7
  72. bbot/test/test_step_1/test_bloom_filter.py +2 -2
  73. bbot/test/test_step_1/test_cli.py +5 -5
  74. bbot/test/test_step_1/test_dns.py +33 -0
  75. bbot/test/test_step_1/test_events.py +15 -5
  76. bbot/test/test_step_1/test_modules_basic.py +21 -21
  77. bbot/test/test_step_1/test_presets.py +94 -4
  78. bbot/test/test_step_1/test_regexes.py +13 -13
  79. bbot/test/test_step_1/test_scan.py +78 -0
  80. bbot/test/test_step_1/test_web.py +4 -4
  81. bbot/test/test_step_2/module_tests/test_module_ajaxpro.py +43 -23
  82. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +3 -3
  83. bbot/test/test_step_2/module_tests/test_module_baddns.py +3 -3
  84. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +6 -6
  85. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +3 -3
  86. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +3 -3
  87. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +3 -3
  88. bbot/test/test_step_2/module_tests/test_module_dnscaa.py +6 -6
  89. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +9 -9
  90. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +12 -12
  91. bbot/test/test_step_2/module_tests/test_module_excavate.py +15 -15
  92. bbot/test/test_step_2/module_tests/test_module_extractous.py +3 -3
  93. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +8 -8
  94. bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py +3 -1
  95. bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +3 -3
  96. bbot/test/test_step_2/module_tests/test_module_gowitness.py +9 -9
  97. bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py +1 -1
  98. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +35 -1
  99. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +3 -3
  100. bbot/test/test_step_2/module_tests/test_module_portfilter.py +48 -0
  101. bbot/test/test_step_2/module_tests/test_module_postman.py +338 -3
  102. bbot/test/test_step_2/module_tests/test_module_postman_download.py +4 -161
  103. bbot/test/test_step_2/module_tests/test_module_securitytxt.py +12 -12
  104. bbot/test/test_step_2/module_tests/test_module_teams.py +10 -1
  105. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +1 -1
  106. bbot/test/test_step_2/module_tests/test_module_unarchive.py +229 -0
  107. bbot/test/test_step_2/module_tests/test_module_viewdns.py +3 -3
  108. bbot/test/test_step_2/module_tests/test_module_web_parameters.py +59 -0
  109. bbot/test/test_step_2/module_tests/test_module_websocket.py +5 -4
  110. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.dist-info}/METADATA +7 -7
  111. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.dist-info}/RECORD +115 -105
  112. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.dist-info}/WHEEL +1 -1
  113. bbot/wordlists/ffuf_shortname_candidates.txt +0 -107982
  114. /bbot/presets/{baddns-thorough.yml → baddns-intense.yml} +0 -0
  115. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.dist-info}/LICENSE +0 -0
  116. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.dist-info}/entry_points.txt +0 -0
@@ -7,7 +7,7 @@ class TestTeams(DiscordBase):
7
7
  modules_overrides = ["teams", "excavate", "badsecrets", "httpx"]
8
8
 
9
9
  webhook_url = "https://evilcorp.webhook.office.com/webhookb2/deadbeef@deadbeef/IncomingWebhook/deadbeef/deadbeef"
10
- config_overrides = {"modules": {"teams": {"webhook_url": webhook_url}}}
10
+ config_overrides = {"modules": {"teams": {"webhook_url": webhook_url, "retries": 5}}}
11
11
 
12
12
  async def setup_after_prep(self, module_test):
13
13
  self.custom_setup(module_test)
@@ -15,6 +15,8 @@ class TestTeams(DiscordBase):
15
15
  def custom_response(request: httpx.Request):
16
16
  module_test.request_count += 1
17
17
  if module_test.request_count == 2:
18
+ return httpx.Response(status_code=429, headers={"Retry-After": "0.01"})
19
+ elif module_test.request_count == 3:
18
20
  return httpx.Response(
19
21
  status_code=400,
20
22
  json={
@@ -28,3 +30,10 @@ class TestTeams(DiscordBase):
28
30
  return httpx.Response(status_code=200)
29
31
 
30
32
  module_test.httpx_mock.add_callback(custom_response, url=self.webhook_url)
33
+
34
+ def check(self, module_test, events):
35
+ vulns = [e for e in events if e.type == "VULNERABILITY"]
36
+ findings = [e for e in events if e.type == "FINDING"]
37
+ assert len(findings) == 1
38
+ assert len(vulns) == 2
39
+ assert module_test.request_count == 5
@@ -848,7 +848,7 @@ class TestTrufflehog(ModuleTestBase):
848
848
  async def setup_after_prep(self, module_test):
849
849
  module_test.httpx_mock.add_response(
850
850
  url="https://www.postman.com/_api/ws/proxy",
851
- match_content=b'{"service": "search", "method": "POST", "path": "/search-all", "body": {"queryIndices": ["collaboration.workspace"], "queryText": "blacklanternsecurity", "size": 100, "from": 0, "clientTraceId": "", "requestOrigin": "srp", "mergeEntities": "true", "nonNestedRequests": "true", "domain": "public"}}',
851
+ match_content=b'{"service": "search", "method": "POST", "path": "/search-all", "body": {"queryIndices": ["collaboration.workspace"], "queryText": "blacklanternsecurity", "size": 25, "from": 0, "clientTraceId": "", "requestOrigin": "srp", "mergeEntities": "true", "nonNestedRequests": "true", "domain": "public"}}',
852
852
  json={
853
853
  "data": [
854
854
  {
@@ -0,0 +1,229 @@
1
+ import asyncio
2
+
3
+ from pathlib import Path
4
+ from .base import ModuleTestBase
5
+
6
+
7
+ class TestUnarchive(ModuleTestBase):
8
+ targets = ["http://127.0.0.1:8888"]
9
+ modules_overrides = ["filedownload", "httpx", "excavate", "speculate", "unarchive"]
10
+
11
+ async def setup_after_prep(self, module_test):
12
+ temp_path = Path("/tmp/.bbot_test")
13
+
14
+ # Create a text file to compress
15
+ text_file = temp_path / "test.txt"
16
+ with open(text_file, "w") as f:
17
+ f.write("This is a test file")
18
+ zip_file = temp_path / "test.zip"
19
+ zip_zip_file = temp_path / "test_zip.zip"
20
+ bz2_file = temp_path / "test.bz2"
21
+ xz_file = temp_path / "test.xz"
22
+ zip7_file = temp_path / "test.7z"
23
+ # lzma_file = temp_path / "test.lzma"
24
+ tar_file = temp_path / "test.tar"
25
+ tgz_file = temp_path / "test.tgz"
26
+ commands = [
27
+ ("7z", "a", '-p""', "-aoa", f"{zip_file}", f"{text_file}"),
28
+ ("7z", "a", '-p""', "-aoa", f"{zip_zip_file}", f"{zip_file}"),
29
+ ("tar", "-C", f"{temp_path}", "-cvjf", f"{bz2_file}", f"{text_file.name}"),
30
+ ("tar", "-C", f"{temp_path}", "-cvJf", f"{xz_file}", f"{text_file.name}"),
31
+ ("7z", "a", '-p""', "-aoa", f"{zip7_file}", f"{text_file}"),
32
+ # ("tar", "-C", f"{temp_path}", "--lzma", "-cvf", f"{lzma_file}", f"{text_file.name}"),
33
+ ("tar", "-C", f"{temp_path}", "-cvf", f"{tar_file}", f"{text_file.name}"),
34
+ ("tar", "-C", f"{temp_path}", "-cvzf", f"{tgz_file}", f"{text_file.name}"),
35
+ ]
36
+
37
+ for command in commands:
38
+ process = await asyncio.create_subprocess_exec(
39
+ *command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
40
+ )
41
+ stdout, stderr = await process.communicate()
42
+ assert process.returncode == 0, f"Command {command} failed with error: {stderr.decode()}"
43
+
44
+ module_test.set_expect_requests(
45
+ dict(uri="/"),
46
+ dict(
47
+ response_data="""<a href="/test.zip">
48
+ <a href="/test-zip.zip">
49
+ <a href="/test.bz2">
50
+ <a href="/test.xz">
51
+ <a href="/test.7z">
52
+ <a href="/test.tar">
53
+ <a href="/test.tgz">""",
54
+ ),
55
+ )
56
+ (
57
+ module_test.set_expect_requests(
58
+ dict(uri="/test.zip"),
59
+ dict(
60
+ response_data=zip_file.read_bytes(),
61
+ headers={"Content-Type": "application/zip"},
62
+ ),
63
+ ),
64
+ )
65
+ (
66
+ module_test.set_expect_requests(
67
+ dict(uri="/test-zip.zip"),
68
+ dict(
69
+ response_data=zip_zip_file.read_bytes(),
70
+ headers={"Content-Type": "application/zip"},
71
+ ),
72
+ ),
73
+ )
74
+ (
75
+ module_test.set_expect_requests(
76
+ dict(uri="/test.bz2"),
77
+ dict(
78
+ response_data=bz2_file.read_bytes(),
79
+ headers={"Content-Type": "application/x-bzip2"},
80
+ ),
81
+ ),
82
+ )
83
+ (
84
+ module_test.set_expect_requests(
85
+ dict(uri="/test.xz"),
86
+ dict(
87
+ response_data=xz_file.read_bytes(),
88
+ headers={"Content-Type": "application/x-xz"},
89
+ ),
90
+ ),
91
+ )
92
+ (
93
+ module_test.set_expect_requests(
94
+ dict(uri="/test.7z"),
95
+ dict(
96
+ response_data=zip7_file.read_bytes(),
97
+ headers={"Content-Type": "application/x-7z-compressed"},
98
+ ),
99
+ ),
100
+ )
101
+ # (
102
+ # module_test.set_expect_requests(
103
+ # dict(uri="/test.rar"),
104
+ # dict(
105
+ # response_data=b"Rar!\x1a\x07\x01\x003\x92\xb5\xe5\n\x01\x05\x06\x00\x05\x01\x01\x80\x80\x00\xa2N\x8ec&\x02\x03\x0b\x93\x00\x04\x93\x00\xa4\x83\x02\xc9\x11f\x06\x80\x00\x01\x08test.txt\n\x03\x13S\x96ug\x96\xf3\x1b\x06This is a test file\x1dwVQ\x03\x05\x04\x00",
106
+ # headers={"Content-Type": "application/vnd.rar"},
107
+ # ),
108
+ # ),
109
+ # )
110
+ # (
111
+ # module_test.set_expect_requests(
112
+ # dict(uri="/test.lzma"),
113
+ # dict(
114
+ # response_data=lzma_file.read_bytes(),
115
+ # headers={"Content-Type": "application/x-lzma"},
116
+ # ),
117
+ # ),
118
+ # )
119
+ (
120
+ module_test.set_expect_requests(
121
+ dict(uri="/test.tar"),
122
+ dict(
123
+ response_data=tar_file.read_bytes(),
124
+ headers={"Content-Type": "application/x-tar"},
125
+ ),
126
+ ),
127
+ )
128
+ (
129
+ module_test.set_expect_requests(
130
+ dict(uri="/test.tgz"),
131
+ dict(
132
+ response_data=tgz_file.read_bytes(),
133
+ headers={"Content-Type": "application/x-tgz"},
134
+ ),
135
+ ),
136
+ )
137
+
138
+ def check(self, module_test, events):
139
+ filesystem_events = [e for e in events if e.type == "FILESYSTEM"]
140
+
141
+ # ZIP
142
+ zip_file_event = [e for e in filesystem_events if "test.zip" in e.data["path"]]
143
+ assert 1 == len(zip_file_event), "No zip file found"
144
+ file = Path(zip_file_event[0].data["path"])
145
+ assert file.is_file(), f"File not found at {file}"
146
+ extract_event = [e for e in filesystem_events if "test_zip" in e.data["path"] and "folder" in e.tags]
147
+ assert 1 == len(extract_event), "Failed to extract zip"
148
+ extract_path = Path(extract_event[0].data["path"]) / "test.txt"
149
+ assert extract_path.is_file(), "Failed to extract the test file"
150
+
151
+ # Recursive ZIP
152
+ zip_zip_file_event = [e for e in filesystem_events if "test-zip.zip" in e.data["path"]]
153
+ assert 1 == len(zip_zip_file_event), "No recursive file found"
154
+ file = Path(zip_zip_file_event[0].data["path"])
155
+ assert file.is_file(), f"File not found at {file}"
156
+ extract_event = [e for e in filesystem_events if "test-zip_zip" in e.data["path"] and "folder" in e.tags]
157
+ assert 1 == len(extract_event), "Failed to extract zip"
158
+ extract_path = Path(extract_event[0].data["path"]) / "test" / "test.txt"
159
+ assert extract_path.is_file(), "Failed to extract the test file"
160
+
161
+ # BZ2
162
+ bz2_file_event = [e for e in filesystem_events if "test.bz2" in e.data["path"]]
163
+ assert 1 == len(bz2_file_event), "No bz2 file found"
164
+ file = Path(bz2_file_event[0].data["path"])
165
+ assert file.is_file(), f"File not found at {file}"
166
+ extract_event = [e for e in filesystem_events if "test_bz2" in e.data["path"] and "folder" in e.tags]
167
+ assert 1 == len(extract_event), "Failed to extract bz2"
168
+ extract_path = Path(extract_event[0].data["path"]) / "test.txt"
169
+ assert extract_path.is_file(), "Failed to extract the test file"
170
+
171
+ # XZ
172
+ xz_file_event = [e for e in filesystem_events if "test.xz" in e.data["path"]]
173
+ assert 1 == len(xz_file_event), "No xz file found"
174
+ file = Path(xz_file_event[0].data["path"])
175
+ assert file.is_file(), f"File not found at {file}"
176
+ extract_event = [e for e in filesystem_events if "test_xz" in e.data["path"] and "folder" in e.tags]
177
+ assert 1 == len(extract_event), "Failed to extract xz"
178
+ extract_path = Path(extract_event[0].data["path"]) / "test.txt"
179
+ assert extract_path.is_file(), "Failed to extract the test file"
180
+
181
+ # 7z
182
+ zip7_file_event = [e for e in filesystem_events if "test.7z" in e.data["path"]]
183
+ assert 1 == len(zip7_file_event), "No 7z file found"
184
+ file = Path(zip7_file_event[0].data["path"])
185
+ assert file.is_file(), f"File not found at {file}"
186
+ extract_event = [e for e in filesystem_events if "test_7z" in e.data["path"] and "folder" in e.tags]
187
+ assert 1 == len(extract_event), "Failed to extract 7z"
188
+ extract_path = Path(extract_event[0].data["path"]) / "test.txt"
189
+ assert extract_path.is_file(), "Failed to extract the test file"
190
+
191
+ # RAR
192
+ # rar_file_event = [e for e in filesystem_events if "test.rar" in e.data["path"]]
193
+ # assert 1 == len(rar_file_event), "No rar file found"
194
+ # file = Path(rar_file_event[0].data["path"])
195
+ # assert file.is_file(), f"File not found at {file}"
196
+ # extract_event = [e for e in filesystem_events if "test_rar" in e.data["path"] and "folder" in e.tags]
197
+ # assert 1 == len(extract_event), "Failed to extract rar"
198
+ # extract_path = Path(extract_event[0].data["path"]) / "test.txt"
199
+ # assert extract_path.is_file(), list(extract_path.parent.iterdir())
200
+
201
+ # LZMA
202
+ # lzma_file_event = [e for e in filesystem_events if "test.lzma" in e.data["path"]]
203
+ # assert 1 == len(lzma_file_event), "No lzma file found"
204
+ # file = Path(lzma_file_event[0].data["path"])
205
+ # assert file.is_file(), f"File not found at {file}"
206
+ # extract_event = [e for e in filesystem_events if "test_lzma" in e.data["path"] and "folder" in e.tags]
207
+ # assert 1 == len(extract_event), "Failed to extract lzma"
208
+ # extract_path = Path(extract_event[0].data["path"]) / "test.txt"
209
+ # assert extract_path.is_file(), "Failed to extract the test file"
210
+
211
+ # TAR
212
+ tar_file_event = [e for e in filesystem_events if "test.tar" in e.data["path"]]
213
+ assert 1 == len(tar_file_event), "No tar file found"
214
+ file = Path(tar_file_event[0].data["path"])
215
+ assert file.is_file(), f"File not found at {file}"
216
+ extract_event = [e for e in filesystem_events if "test_tar" in e.data["path"] and "folder" in e.tags]
217
+ assert 1 == len(extract_event), "Failed to extract tar"
218
+ extract_path = Path(extract_event[0].data["path"]) / "test.txt"
219
+ assert extract_path.is_file(), "Failed to extract the test file"
220
+
221
+ # TGZ
222
+ tgz_file_event = [e for e in filesystem_events if "test.tgz" in e.data["path"]]
223
+ assert 1 == len(tgz_file_event), "No tgz file found"
224
+ file = Path(tgz_file_event[0].data["path"])
225
+ assert file.is_file(), f"File not found at {file}"
226
+ extract_event = [e for e in filesystem_events if "test_tgz" in e.data["path"] and "folder" in e.tags]
227
+ assert 1 == len(extract_event), "Failed to extract tgz"
228
+ extract_path = Path(extract_event[0].data["path"]) / "test.txt"
229
+ assert extract_path.is_file(), "Failed to extract the test file"
@@ -9,9 +9,9 @@ class TestViewDNS(ModuleTestBase):
9
9
  )
10
10
 
11
11
  def check(self, module_test, events):
12
- assert any(
13
- e.data == "hyperloop.com" and "affiliate" in e.tags for e in events
14
- ), "Failed to detect affiliate domain"
12
+ assert any(e.data == "hyperloop.com" and "affiliate" in e.tags for e in events), (
13
+ "Failed to detect affiliate domain"
14
+ )
15
15
 
16
16
 
17
17
  web_body = """<html>
@@ -0,0 +1,59 @@
1
+ from .test_module_excavate import TestExcavateParameterExtraction
2
+
3
+
4
+ class TestWebParameters(TestExcavateParameterExtraction):
5
+ modules_overrides = ["excavate", "httpx", "web_parameters"]
6
+
7
+ def check(self, module_test, events):
8
+ parameters_file = module_test.scan.home / "web_parameters.txt"
9
+ with open(parameters_file) as f:
10
+ data = f.read()
11
+
12
+ assert "age" in data
13
+ assert "fit" in data
14
+ assert "id" in data
15
+ assert "jqueryget" in data
16
+ assert "jquerypost" in data
17
+ assert "size" in data
18
+
19
+ # after lightfuzz is merged uncomment these additional parameters
20
+ # assert "blog-post-author-display" in data
21
+ # assert "csrf" in data
22
+ # assert "q1" in data
23
+ # assert "q2" in data
24
+ # assert "q3" in data
25
+ # assert "test" in data
26
+
27
+
28
+ class TestWebParameters_include_count(TestWebParameters):
29
+ config_overrides = {
30
+ "web": {"spider_distance": 1, "spider_depth": 1},
31
+ "modules": {"web_parameters": {"include_count": True}},
32
+ }
33
+
34
+ def check(self, module_test, events):
35
+ parameters_file = module_test.scan.home / "web_parameters.txt"
36
+ with open(parameters_file) as f:
37
+ data = f.read()
38
+ assert "2\tq" in data
39
+ assert "1\tage" in data
40
+ assert "1\tfit" in data
41
+ assert "1\tid" in data
42
+ assert "1\tjqueryget" in data
43
+ assert "1\tjquerypost" in data
44
+ assert "1\tsize" in data
45
+
46
+ # after lightfuzz is merged, these will be the correct parameters to check
47
+
48
+ # assert "3\ttest" in data
49
+ # assert "2\tblog-post-author-display" in data
50
+ # assert "2\tcsrf" in data
51
+ # assert "2\tq2" in data
52
+ # assert "1\tage" in data
53
+ # assert "1\tfit" in data
54
+ # assert "1\tid" in data
55
+ # assert "1\tjqueryget" in data
56
+ # assert "1\tjquerypost" in data
57
+ # assert "1\tq1" in data
58
+ # assert "1\tq3" in data
59
+ # assert "1\tsize" in data
@@ -2,6 +2,7 @@ import json
2
2
  import asyncio
3
3
  import logging
4
4
  import websockets
5
+ from websockets.asyncio.server import serve
5
6
 
6
7
  from .base import ModuleTestBase
7
8
 
@@ -10,16 +11,16 @@ log = logging.getLogger("bbot.testing")
10
11
  results = {"events": []}
11
12
 
12
13
 
13
- async def websocket_handler(websocket, path):
14
- results["path"] = path
14
+ async def websocket_handler(websocket):
15
+ results["path"] = websocket.request.path
15
16
  async for message in websocket:
16
17
  results["events"].append(message)
17
18
 
18
19
 
19
20
  # Define a coroutine for the server
20
21
  async def server_coroutine():
21
- async with websockets.serve(websocket_handler, "127.0.0.1", 8765):
22
- await asyncio.Future() # run forever
22
+ async with serve(websocket_handler, "127.0.0.1", 8765) as server:
23
+ await server.serve_forever()
23
24
 
24
25
 
25
26
  class TestWebsocket(ModuleTestBase):
@@ -1,8 +1,7 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: bbot
3
- Version: 2.3.0.5546rc0
3
+ Version: 2.3.1
4
4
  Summary: OSINT automation for hackers.
5
- Home-page: https://github.com/blacklanternsecurity/bbot
6
5
  License: GPL-3.0
7
6
  Keywords: python,cli,automation,osint,threat-intel,intelligence,neo4j,scanner,python-library,hacking,recursion,pentesting,recon,command-line-tool,bugbounty,subdomains,security-tools,subdomain-scanner,osint-framework,attack-surface,subdomain-enumeration,osint-tool
8
7
  Author: TheTechromancer
@@ -21,7 +20,7 @@ Requires-Dist: ansible-runner (>=2.3.2,<3.0.0)
21
20
  Requires-Dist: beautifulsoup4 (>=4.12.2,<5.0.0)
22
21
  Requires-Dist: cachetools (>=5.3.2,<6.0.0)
23
22
  Requires-Dist: cloudcheck (>=7.0.12,<8.0.0)
24
- Requires-Dist: deepdiff (>=6.2.3,<8.0.0)
23
+ Requires-Dist: deepdiff (>=8.0.0,<9.0.0)
25
24
  Requires-Dist: dnspython (>=2.4.2,<3.0.0)
26
25
  Requires-Dist: httpx (>=0.27.0,<0.28.0)
27
26
  Requires-Dist: idna (>=3.4,<4.0)
@@ -33,7 +32,7 @@ Requires-Dist: orjson (>=3.10.12,<4.0.0)
33
32
  Requires-Dist: psutil (>=5.9.4,<7.0.0)
34
33
  Requires-Dist: puremagic (>=1.28,<2.0)
35
34
  Requires-Dist: pycryptodome (>=3.17,<4.0)
36
- Requires-Dist: pydantic (>=2.4.2,<3.0.0)
35
+ Requires-Dist: pydantic (>=2.9.2,<3.0.0)
37
36
  Requires-Dist: pyjwt (>=2.7.0,<3.0.0)
38
37
  Requires-Dist: pyzmq (>=26.0.3,<27.0.0)
39
38
  Requires-Dist: radixtarget (>=3.0.13,<4.0.0)
@@ -43,13 +42,14 @@ Requires-Dist: socksio (>=1.0.0,<2.0.0)
43
42
  Requires-Dist: tabulate (==0.8.10)
44
43
  Requires-Dist: tldextract (>=5.1.1,<6.0.0)
45
44
  Requires-Dist: unidecode (>=1.3.8,<2.0.0)
46
- Requires-Dist: websockets (>=11.0.2,<13.0.0)
45
+ Requires-Dist: websockets (>=11.0.2,<15.0.0)
47
46
  Requires-Dist: wordninja (>=2.0.0,<3.0.0)
48
47
  Requires-Dist: xmltojson (>=2.0.2,<3.0.0)
49
48
  Requires-Dist: yara-python (>=4.5.1,<5.0.0)
50
49
  Project-URL: Documentation, https://www.blacklanternsecurity.com/bbot/
51
50
  Project-URL: Discord, https://discord.com/invite/PZqkgxu5SA
52
51
  Project-URL: Docker Hub, https://hub.docker.com/r/blacklanternsecurity/bbot
52
+ Project-URL: Homepage, https://github.com/blacklanternsecurity/bbot
53
53
  Project-URL: Repository, https://github.com/blacklanternsecurity/bbot
54
54
  Description-Content-Type: text/markdown
55
55
 
@@ -274,7 +274,7 @@ include:
274
274
  - paramminer
275
275
  - dirbust-light
276
276
  - web-screenshots
277
- - baddns-thorough
277
+ - baddns-intense
278
278
 
279
279
  config:
280
280
  modules: