man-spider 1.1.0__tar.gz → 1.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,213 @@
1
+ Metadata-Version: 2.3
2
+ Name: man-spider
3
+ Version: 1.1.2
4
+ Summary: Full-featured SMB spider capable of searching file content
5
+ License: GPL-3.0
6
+ Author: TheTechromancer
7
+ Requires-Python: >=3.8,<3.14
8
+ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: Programming Language :: Python :: 3.8
11
+ Classifier: Programming Language :: Python :: 3.9
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
16
+ Requires-Dist: extractous (>=0.3.0,<0.4.0)
17
+ Requires-Dist: impacket (>=0.12.0,<0.13.0)
18
+ Requires-Dist: python-magic (>=0.4.27,<0.5.0)
19
+ Description-Content-Type: text/markdown
20
+
21
+ # MANSPIDER
22
+ ### Crawl SMB shares for juicy information. File content searching + regex is supported!
23
+
24
+ ### UPDATE 2025-05-26
25
+
26
+ **Manspider has been updated to support kerberos!** Also, the textract library has been replaced with [Extractous](https://github.com/yobix-ai/extractous), so you can now feel free to run it outside Docker. 🎉
27
+
28
+ ![manspider](https://user-images.githubusercontent.com/20261699/74963251-6a08de80-53df-11ea-88f4-60c39665dfa2.gif)
29
+
30
+ ### File types supported:
31
+ - `PDF`
32
+ - `DOCX`
33
+ - `XLSX`
34
+ - `PPTX`
35
+ - any text-based format
36
+ - and many more!!
37
+
38
+ ### MANSPIDER will crawl every share on every target system. If provided creds don't work, it will fall back to "guest", then to a null session.
39
+ ![manspider](https://user-images.githubusercontent.com/20261699/80316979-f9ab7e80-87ce-11ea-9628-3c22a07e8378.png)
40
+
41
+ ### Installation:
42
+ (Optional) Install these dependencies to add additional file parsing capability:
43
+ ~~~
44
+ # for images (png, jpeg)
45
+ $ sudo apt install tesseract-ocr
46
+
47
+ # for legacy document support (.doc)
48
+ $ sudo apt install antiword
49
+ ~~~
50
+ Install manspider (please be patient, this can take a while):
51
+ ~~~
52
+ $ pip install pipx
53
+ $ pipx install git+https://github.com/blacklanternsecurity/MANSPIDER
54
+ ~~~
55
+
56
+ ## Installation (Docker)
57
+
58
+ ```bash
59
+ docker run --rm -v ./manspider:/root/.manspider blacklanternsecurity/manspider --help
60
+ ```
61
+
62
+ Note there is also a helper script `manspider.sh` which will automatically mount volumes for manspider's `loot` and `logs` directories, making it a bit more convenient to run:
63
+
64
+ ```bash
65
+ ./manspider.sh --help
66
+ ```
67
+
68
+ ### Example #1: Search the network for filenames that may contain creds
69
+ NOTE: matching files are automatically downloaded into `$HOME/.manspider/loot`! (`-n` to disable)
70
+ ~~~
71
+ $ manspider 192.168.0.0/24 -f passw user admin account network login logon cred -d evilcorp -u bob -p Passw0rd
72
+ ~~~
73
+
74
+ ### Example #2: Search for spreadsheets with "password" in the filename
75
+ ~~~
76
+ $ manspider share.evilcorp.local -f passw -e xlsx csv -d evilcorp -u bob -p Passw0rd
77
+ ~~~
78
+
79
+ ### Example #3: Search for documents containing passwords
80
+ ~~~
81
+ $ manspider share.evilcorp.local -c passw -e xlsx csv docx pdf -d evilcorp -u bob -p Passw0rd
82
+ ~~~
83
+
84
+ ### Example #4: Search for interesting file extensions
85
+ ~~~
86
+ $ manspider share.evilcorp.local -e bat com vbs ps1 psd1 psm1 pem key rsa pub reg pfx cfg conf config vmdk vhd vdi dit -d evilcorp -u bob -p Passw0rd
87
+ ~~~
88
+
89
+ ### Example #5: Search for finance-related files
90
+ This example searches financy-sounding directories for filenames containing 5 or more consecutive numbers (e.g. `000202006.EFT`)
91
+ ~~~
92
+ $ manspider share.evilcorp.local --dirnames bank financ payable payment reconcil remit voucher vendor eft swift -f '[0-9]{5,}' -d evilcorp -u bob -p Passw0rd
93
+ ~~~
94
+
95
+ ### Example #6: Search for SSH keys by filename
96
+ ~~~
97
+ $ manspider share.evilcorp.local -e ppk rsa pem ssh rsa -o -f id_rsa id_dsa id_ed25519 -d evilcorp -u bob -p Passw0rd
98
+ ~~~
99
+
100
+ ### Example #7: Search for SSH keys by content
101
+ ~~~
102
+ $ manspider share.evilcorp.local -e '' -c 'BEGIN .{1,10} PRIVATE KEY' -d evilcorp -u bob -p Passw0rd
103
+ ~~~
104
+
105
+ ### Example #8: Search for password manager files
106
+ ~~~bash
107
+ # .kdbx - KeePass Password Database (KeePass, KeePassXC)
108
+ # .kdb - KeePass Classic Database (KeePass 1.x)
109
+ # .1pif - 1Password Interchange Format (1Password)
110
+ # .agilekeychain - Agile Keychain Format (1Password, deprecated)
111
+ # .opvault - OPVault Format (1Password)
112
+ # .lpd - LastPass Data File (LastPass)
113
+ # .dashlane - Dashlane Data File (Dashlane)
114
+ # .psafe3 - Password Safe Database (Password Safe)
115
+ # .enpass - Enpass Password Manager Data File (Enpass)
116
+ # .bwdb - Bitwarden Database (Bitwarden)
117
+ # .msecure - mSecure Password Manager Data File (mSecure)
118
+ # .stickypass - Sticky Password Data File (Sticky Password)
119
+ # .pwm - Password Memory Data File (Password Memory)
120
+ # .rdb - RoboForm Data File (RoboForm)
121
+ # .safe - SafeInCloud Password Manager Data File (SafeInCloud)
122
+ # .zps - Zoho Vault Encrypted Data File (Zoho Vault)
123
+ # .pmvault - SplashID Safe Data File (SplashID Safe)
124
+ # .mywallet - MyWallet Password Manager Data File (MyWallet)
125
+ # .jpass - JPass Password Manager Data File (JPass)
126
+ # .pwmdb - Universal Password Manager Database (Universal Password Manager)
127
+ $ manspider share.evilcorp.local -e kdbx kdb 1pif agilekeychain opvault lpd dashlane psafe3 enpass bwdb msecure stickypass pwm rdb safe zps pmvault mywallet jpass pwmdb -d evilcorp -u bob -p Passw0rd
128
+ ~~~
129
+
130
+ ### Example #9: Search for certificates
131
+ ~~~
132
+ $ manspider share.evilcorp.local -e pfx p12 pkcs12 pem key crt cer csr jks keystore key keys der -d evilcorp -u bob -p Passw0rd
133
+ ~~~
134
+
135
+ ### Usage Tip #1:
136
+ You can run multiple instances of manspider at one time. This is useful when one instance is already running, and you want to search what it's downloaded (similar to `grep -R`). To do this, specify the keyword `loot` as the target, which will search the downloaded files in `$HOME/.manspider/loot`.
137
+
138
+ ### Usage Tip #2:
139
+ Reasonable defaults help prevent unwanted scenarios like getting stuck on a single target. All of these can be overridden:
140
+ - **default spider depth: 10** (override with `-m`)
141
+ - **default max filesize: 10MB** (override with `-s`)
142
+ - **default threads: 5** (override with `-t`)
143
+ - **shares excluded: `C$`, `IPC$`, `ADMIN$`, `PRINT$`** (override with `--exclude-sharenames`)
144
+
145
+ ### Usage Tip #3:
146
+ Manspider accepts any combination of the following as targets:
147
+ - IPs
148
+ - hostnames
149
+ - subnets (CIDR format)
150
+ - files containing any of the above
151
+ - local folders containing files
152
+
153
+ For example, you could specify any or all of these:
154
+ - **`192.168.1.250`**
155
+ - **`share.evilcorp.local`**
156
+ - **`192.168.1.0/24`**
157
+ - **`smb_hosts.txt`**
158
+ - **`loot`** (to search already-downloaded files)
159
+ - **`/mnt/share`** (to recursively search a directory)
160
+ - NOTE: when searching local files, you must specify a directory, not an individual file
161
+
162
+ ## Usage:
163
+ ~~~
164
+ usage: manspider [-h] [-u USERNAME] [-p PASSWORD] [-d DOMAIN] [-m MAXDEPTH] [-H HASH] [-t THREADS] [-f REGEX [REGEX ...]] [-e EXT [EXT ...]] [--exclude-extensions EXT [EXT ...]]
165
+ [-c REGEX [REGEX ...]] [--sharenames SHARE [SHARE ...]] [--exclude-sharenames [SHARE ...]] [--dirnames DIR [DIR ...]] [--exclude-dirnames DIR [DIR ...]] [-q] [-n]
166
+ [-mfail INT] [-o] [-s SIZE] [-v]
167
+ targets [targets ...]
168
+
169
+ Scan for juicy data on SMB shares. Matching files and logs are stored in $HOME/.manspider. All filters are case-insensitive.
170
+
171
+ positional arguments:
172
+ targets IPs, Hostnames, CIDR ranges, or files containing targets to spider (NOTE: local searching also supported, specify directory name or keyword "loot" to search
173
+ downloaded files)
174
+
175
+ optional arguments:
176
+ -h, --help show this help message and exit
177
+ -u USERNAME, --username USERNAME
178
+ username for authentication
179
+ -p PASSWORD, --password PASSWORD
180
+ password for authentication
181
+ -d DOMAIN, --domain DOMAIN
182
+ domain for authentication
183
+ -m MAXDEPTH, --maxdepth MAXDEPTH
184
+ maximum depth to spider (default: 10)
185
+ -H HASH, --hash HASH NTLM hash for authentication
186
+ -t THREADS, --threads THREADS
187
+ concurrent threads (default: 5)
188
+ -f REGEX [REGEX ...], --filenames REGEX [REGEX ...]
189
+ filter filenames using regex (space-separated)
190
+ -e EXT [EXT ...], --extensions EXT [EXT ...]
191
+ only show filenames with these extensions (space-separated, e.g. `docx xlsx` for only word & excel docs)
192
+ --exclude-extensions EXT [EXT ...]
193
+ ignore files with these extensions
194
+ -c REGEX [REGEX ...], --content REGEX [REGEX ...]
195
+ search for file content using regex (multiple supported)
196
+ --sharenames SHARE [SHARE ...]
197
+ only search shares with these names (multiple supported)
198
+ --exclude-sharenames [SHARE ...]
199
+ don't search shares with these names (multiple supported)
200
+ --dirnames DIR [DIR ...]
201
+ only search directories containing these strings (multiple supported)
202
+ --exclude-dirnames DIR [DIR ...]
203
+ don't search directories containing these strings (multiple supported)
204
+ -q, --quiet don't display matching file content
205
+ -n, --no-download don't download matching files
206
+ -mfail INT, --max-failed-logons INT
207
+ limit failed logons
208
+ -o, --or-logic use OR logic instead of AND (files are downloaded if filename OR extension OR content match)
209
+ -s SIZE, --max-filesize SIZE
210
+ don't retrieve files over this size, e.g. "500K" or ".5M" (default: 10M)
211
+ -v, --verbose show debugging messages
212
+ ~~~
213
+
@@ -0,0 +1,192 @@
1
+ # MANSPIDER
2
+ ### Crawl SMB shares for juicy information. File content searching + regex is supported!
3
+
4
+ ### UPDATE 2025-05-26
5
+
6
+ **Manspider has been updated to support kerberos!** Also, the textract library has been replaced with [Extractous](https://github.com/yobix-ai/extractous), so you can now feel free to run it outside Docker. 🎉
7
+
8
+ ![manspider](https://user-images.githubusercontent.com/20261699/74963251-6a08de80-53df-11ea-88f4-60c39665dfa2.gif)
9
+
10
+ ### File types supported:
11
+ - `PDF`
12
+ - `DOCX`
13
+ - `XLSX`
14
+ - `PPTX`
15
+ - any text-based format
16
+ - and many more!!
17
+
18
+ ### MANSPIDER will crawl every share on every target system. If provided creds don't work, it will fall back to "guest", then to a null session.
19
+ ![manspider](https://user-images.githubusercontent.com/20261699/80316979-f9ab7e80-87ce-11ea-9628-3c22a07e8378.png)
20
+
21
+ ### Installation:
22
+ (Optional) Install these dependencies to add additional file parsing capability:
23
+ ~~~
24
+ # for images (png, jpeg)
25
+ $ sudo apt install tesseract-ocr
26
+
27
+ # for legacy document support (.doc)
28
+ $ sudo apt install antiword
29
+ ~~~
30
+ Install manspider (please be patient, this can take a while):
31
+ ~~~
32
+ $ pip install pipx
33
+ $ pipx install git+https://github.com/blacklanternsecurity/MANSPIDER
34
+ ~~~
35
+
36
+ ## Installation (Docker)
37
+
38
+ ```bash
39
+ docker run --rm -v ./manspider:/root/.manspider blacklanternsecurity/manspider --help
40
+ ```
41
+
42
+ Note there is also a helper script `manspider.sh` which will automatically mount volumes for manspider's `loot` and `logs` directories, making it a bit more convenient to run:
43
+
44
+ ```bash
45
+ ./manspider.sh --help
46
+ ```
47
+
48
+ ### Example #1: Search the network for filenames that may contain creds
49
+ NOTE: matching files are automatically downloaded into `$HOME/.manspider/loot`! (`-n` to disable)
50
+ ~~~
51
+ $ manspider 192.168.0.0/24 -f passw user admin account network login logon cred -d evilcorp -u bob -p Passw0rd
52
+ ~~~
53
+
54
+ ### Example #2: Search for spreadsheets with "password" in the filename
55
+ ~~~
56
+ $ manspider share.evilcorp.local -f passw -e xlsx csv -d evilcorp -u bob -p Passw0rd
57
+ ~~~
58
+
59
+ ### Example #3: Search for documents containing passwords
60
+ ~~~
61
+ $ manspider share.evilcorp.local -c passw -e xlsx csv docx pdf -d evilcorp -u bob -p Passw0rd
62
+ ~~~
63
+
64
+ ### Example #4: Search for interesting file extensions
65
+ ~~~
66
+ $ manspider share.evilcorp.local -e bat com vbs ps1 psd1 psm1 pem key rsa pub reg pfx cfg conf config vmdk vhd vdi dit -d evilcorp -u bob -p Passw0rd
67
+ ~~~
68
+
69
+ ### Example #5: Search for finance-related files
70
+ This example searches financy-sounding directories for filenames containing 5 or more consecutive numbers (e.g. `000202006.EFT`)
71
+ ~~~
72
+ $ manspider share.evilcorp.local --dirnames bank financ payable payment reconcil remit voucher vendor eft swift -f '[0-9]{5,}' -d evilcorp -u bob -p Passw0rd
73
+ ~~~
74
+
75
+ ### Example #6: Search for SSH keys by filename
76
+ ~~~
77
+ $ manspider share.evilcorp.local -e ppk rsa pem ssh rsa -o -f id_rsa id_dsa id_ed25519 -d evilcorp -u bob -p Passw0rd
78
+ ~~~
79
+
80
+ ### Example #7: Search for SSH keys by content
81
+ ~~~
82
+ $ manspider share.evilcorp.local -e '' -c 'BEGIN .{1,10} PRIVATE KEY' -d evilcorp -u bob -p Passw0rd
83
+ ~~~
84
+
85
+ ### Example #8: Search for password manager files
86
+ ~~~bash
87
+ # .kdbx - KeePass Password Database (KeePass, KeePassXC)
88
+ # .kdb - KeePass Classic Database (KeePass 1.x)
89
+ # .1pif - 1Password Interchange Format (1Password)
90
+ # .agilekeychain - Agile Keychain Format (1Password, deprecated)
91
+ # .opvault - OPVault Format (1Password)
92
+ # .lpd - LastPass Data File (LastPass)
93
+ # .dashlane - Dashlane Data File (Dashlane)
94
+ # .psafe3 - Password Safe Database (Password Safe)
95
+ # .enpass - Enpass Password Manager Data File (Enpass)
96
+ # .bwdb - Bitwarden Database (Bitwarden)
97
+ # .msecure - mSecure Password Manager Data File (mSecure)
98
+ # .stickypass - Sticky Password Data File (Sticky Password)
99
+ # .pwm - Password Memory Data File (Password Memory)
100
+ # .rdb - RoboForm Data File (RoboForm)
101
+ # .safe - SafeInCloud Password Manager Data File (SafeInCloud)
102
+ # .zps - Zoho Vault Encrypted Data File (Zoho Vault)
103
+ # .pmvault - SplashID Safe Data File (SplashID Safe)
104
+ # .mywallet - MyWallet Password Manager Data File (MyWallet)
105
+ # .jpass - JPass Password Manager Data File (JPass)
106
+ # .pwmdb - Universal Password Manager Database (Universal Password Manager)
107
+ $ manspider share.evilcorp.local -e kdbx kdb 1pif agilekeychain opvault lpd dashlane psafe3 enpass bwdb msecure stickypass pwm rdb safe zps pmvault mywallet jpass pwmdb -d evilcorp -u bob -p Passw0rd
108
+ ~~~
109
+
110
+ ### Example #9: Search for certificates
111
+ ~~~
112
+ $ manspider share.evilcorp.local -e pfx p12 pkcs12 pem key crt cer csr jks keystore key keys der -d evilcorp -u bob -p Passw0rd
113
+ ~~~
114
+
115
+ ### Usage Tip #1:
116
+ You can run multiple instances of manspider at one time. This is useful when one instance is already running, and you want to search what it's downloaded (similar to `grep -R`). To do this, specify the keyword `loot` as the target, which will search the downloaded files in `$HOME/.manspider/loot`.
117
+
118
+ ### Usage Tip #2:
119
+ Reasonable defaults help prevent unwanted scenarios like getting stuck on a single target. All of these can be overridden:
120
+ - **default spider depth: 10** (override with `-m`)
121
+ - **default max filesize: 10MB** (override with `-s`)
122
+ - **default threads: 5** (override with `-t`)
123
+ - **shares excluded: `C$`, `IPC$`, `ADMIN$`, `PRINT$`** (override with `--exclude-sharenames`)
124
+
125
+ ### Usage Tip #3:
126
+ Manspider accepts any combination of the following as targets:
127
+ - IPs
128
+ - hostnames
129
+ - subnets (CIDR format)
130
+ - files containing any of the above
131
+ - local folders containing files
132
+
133
+ For example, you could specify any or all of these:
134
+ - **`192.168.1.250`**
135
+ - **`share.evilcorp.local`**
136
+ - **`192.168.1.0/24`**
137
+ - **`smb_hosts.txt`**
138
+ - **`loot`** (to search already-downloaded files)
139
+ - **`/mnt/share`** (to recursively search a directory)
140
+ - NOTE: when searching local files, you must specify a directory, not an individual file
141
+
142
+ ## Usage:
143
+ ~~~
144
+ usage: manspider [-h] [-u USERNAME] [-p PASSWORD] [-d DOMAIN] [-m MAXDEPTH] [-H HASH] [-t THREADS] [-f REGEX [REGEX ...]] [-e EXT [EXT ...]] [--exclude-extensions EXT [EXT ...]]
145
+ [-c REGEX [REGEX ...]] [--sharenames SHARE [SHARE ...]] [--exclude-sharenames [SHARE ...]] [--dirnames DIR [DIR ...]] [--exclude-dirnames DIR [DIR ...]] [-q] [-n]
146
+ [-mfail INT] [-o] [-s SIZE] [-v]
147
+ targets [targets ...]
148
+
149
+ Scan for juicy data on SMB shares. Matching files and logs are stored in $HOME/.manspider. All filters are case-insensitive.
150
+
151
+ positional arguments:
152
+ targets IPs, Hostnames, CIDR ranges, or files containing targets to spider (NOTE: local searching also supported, specify directory name or keyword "loot" to search
153
+ downloaded files)
154
+
155
+ optional arguments:
156
+ -h, --help show this help message and exit
157
+ -u USERNAME, --username USERNAME
158
+ username for authentication
159
+ -p PASSWORD, --password PASSWORD
160
+ password for authentication
161
+ -d DOMAIN, --domain DOMAIN
162
+ domain for authentication
163
+ -m MAXDEPTH, --maxdepth MAXDEPTH
164
+ maximum depth to spider (default: 10)
165
+ -H HASH, --hash HASH NTLM hash for authentication
166
+ -t THREADS, --threads THREADS
167
+ concurrent threads (default: 5)
168
+ -f REGEX [REGEX ...], --filenames REGEX [REGEX ...]
169
+ filter filenames using regex (space-separated)
170
+ -e EXT [EXT ...], --extensions EXT [EXT ...]
171
+ only show filenames with these extensions (space-separated, e.g. `docx xlsx` for only word & excel docs)
172
+ --exclude-extensions EXT [EXT ...]
173
+ ignore files with these extensions
174
+ -c REGEX [REGEX ...], --content REGEX [REGEX ...]
175
+ search for file content using regex (multiple supported)
176
+ --sharenames SHARE [SHARE ...]
177
+ only search shares with these names (multiple supported)
178
+ --exclude-sharenames [SHARE ...]
179
+ don't search shares with these names (multiple supported)
180
+ --dirnames DIR [DIR ...]
181
+ only search directories containing these strings (multiple supported)
182
+ --exclude-dirnames DIR [DIR ...]
183
+ don't search directories containing these strings (multiple supported)
184
+ -q, --quiet don't display matching file content
185
+ -n, --no-download don't download matching files
186
+ -mfail INT, --max-failed-logons INT
187
+ limit failed logons
188
+ -o, --or-logic use OR logic instead of AND (files are downloaded if filename OR extension OR content match)
189
+ -s SIZE, --max-filesize SIZE
190
+ don't retrieve files over this size, e.g. "500K" or ".5M" (default: 10M)
191
+ -v, --verbose show debugging messages
192
+ ~~~
@@ -1,8 +1,8 @@
1
- import io
2
- from .util import *
3
- from .errors import *
4
1
  from pathlib import Path
5
2
 
3
+ from man_spider.lib.util import *
4
+ from man_spider.lib.errors import *
5
+
6
6
 
7
7
  class RemoteFile():
8
8
  '''
@@ -117,10 +117,9 @@ class FileParser:
117
117
 
118
118
  try:
119
119
 
120
- matches = self.textract(file, pretty_filename=pretty_filename)
120
+ matches = self.extractous(file, pretty_filename=pretty_filename)
121
121
 
122
122
  except Exception as e:
123
- #except (BadZipFile, textract.exceptions.CommandLineError) as e:
124
123
  if log.level <= logging.DEBUG:
125
124
  log.warning(f'Error extracting text from {pretty_filename}: {e}')
126
125
  else:
@@ -129,10 +128,9 @@ class FileParser:
129
128
  return matches
130
129
 
131
130
 
132
- def textract(self, file, pretty_filename):
131
+ def extractous(self, file, pretty_filename):
133
132
  '''
134
- Extracts text from a file
135
- Uses the textract library on specific extensions
133
+ Extracts text from a file using the extractous library
136
134
  '''
137
135
 
138
136
  matches = dict()
@@ -1,10 +1,11 @@
1
1
  import ntpath
2
2
  import logging
3
- from .errors import *
4
3
  from contextlib import suppress
5
4
  from impacket.nmb import NetBIOSError, NetBIOSTimeout
6
5
  from impacket.smbconnection import SessionError, SMBConnection
7
6
 
7
+ from man_spider.lib.errors import *
8
+
8
9
  # set up logging
9
10
  log = logging.getLogger('manspider.smb')
10
11
 
@@ -1,12 +1,11 @@
1
1
  import re
2
- import sys
3
2
  import queue
4
- import threading
5
3
  from time import sleep
6
4
  import multiprocessing
7
5
  from pathlib import Path
8
- from .spiderling import *
9
- from .parser import FileParser
6
+
7
+ from man_spider.lib.spiderling import *
8
+ from man_spider.lib.parser import FileParser
10
9
 
11
10
  # set up logging
12
11
  log = logging.getLogger('manspider')
@@ -1,15 +1,16 @@
1
1
  import string
2
2
  import logging
3
3
  import pathlib
4
- from .smb import *
5
- from .file import *
6
- from .util import *
7
- from .errors import *
8
4
  import multiprocessing
9
5
  from shutil import move
10
- from .processpool import *
11
6
  from traceback import format_exc
12
7
 
8
+ from man_spider.lib.smb import *
9
+ from man_spider.lib.file import *
10
+ from man_spider.lib.util import *
11
+ from man_spider.lib.errors import *
12
+ from man_spider.lib.processpool import *
13
+
13
14
 
14
15
  log = logging.getLogger('manspider.spiderling')
15
16
 
@@ -5,10 +5,11 @@ import pathlib
5
5
  import logging
6
6
  import argparse
7
7
  import traceback
8
- from .lib import *
9
8
  from time import sleep
10
9
  import multiprocessing
11
10
 
11
+ from man_spider.lib import *
12
+
12
13
 
13
14
  # set up logging
14
15
  log = logging.getLogger('manspider')
@@ -1,23 +1,24 @@
1
- [tool.poetry]
1
+ [project]
2
2
  name = "man-spider"
3
- version = "1.1.0"
3
+ version = "1.1.2"
4
4
  description = "Full-featured SMB spider capable of searching file content"
5
- authors = ["TheTechromancer"]
6
- license = "GPL-3.0"
5
+ authors = [
6
+ {name = "TheTechromancer"}
7
+ ]
8
+ license = {text = "GPL-3.0"}
9
+ readme = "README.md"
10
+ requires-python = ">=3.8,<3.14"
11
+ dependencies = [
12
+ "extractous (>=0.3.0,<0.4.0)",
13
+ "impacket (>=0.12.0,<0.13.0)",
14
+ "python-magic (>=0.4.27,<0.5.0)"
15
+ ]
7
16
  repository = "https://github.com/blacklanternsecurity/MANSPIDER"
8
17
  homepage = "https://github.com/blacklanternsecurity/MANSPIDER"
9
18
 
10
19
  [tool.poetry.scripts]
11
20
  manspider = 'man_spider.manspider:main'
12
21
 
13
- [tool.poetry.dependencies]
14
- python = ">=3.8,<3.14"
15
- impacket = "^0.12.0"
16
- extractous = "^0.3.0"
17
- python-magic = "^0.4.27"
18
-
19
- [tool.poetry.dev-dependencies]
20
-
21
22
  [build-system]
22
- requires = ["poetry-core>=1.0.0"]
23
+ requires = ["poetry-core>=2.0.0,<3.0.0"]
23
24
  build-backend = "poetry.core.masonry.api"
man_spider-1.1.0/PKG-INFO DELETED
@@ -1,20 +0,0 @@
1
- Metadata-Version: 2.3
2
- Name: man-spider
3
- Version: 1.1.0
4
- Summary: Full-featured SMB spider capable of searching file content
5
- License: GPL-3.0
6
- Author: TheTechromancer
7
- Requires-Python: >=3.8,<3.14
8
- Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: Programming Language :: Python :: 3.8
11
- Classifier: Programming Language :: Python :: 3.9
12
- Classifier: Programming Language :: Python :: 3.10
13
- Classifier: Programming Language :: Python :: 3.11
14
- Classifier: Programming Language :: Python :: 3.12
15
- Classifier: Programming Language :: Python :: 3.13
16
- Requires-Dist: extractous (>=0.3.0,<0.4.0)
17
- Requires-Dist: impacket (>=0.12.0,<0.13.0)
18
- Requires-Dist: python-magic (>=0.4.27,<0.5.0)
19
- Project-URL: Homepage, https://github.com/blacklanternsecurity/MANSPIDER
20
- Project-URL: Repository, https://github.com/blacklanternsecurity/MANSPIDER
File without changes