lanscape 1.4.4__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lanscape might be problematic. Click here for more details.

lanscape/__init__.py CHANGED
@@ -3,6 +3,7 @@ Local network scanner
3
3
  """
4
4
  from lanscape.libraries.subnet_scan import (
5
5
  SubnetScanner,
6
+ ScannerResults,
6
7
  ScanManager
7
8
  )
8
9
 
@@ -12,6 +13,9 @@ from lanscape.libraries.scan_config import (
12
13
  PingConfig,
13
14
  PokeConfig,
14
15
  ArpCacheConfig,
16
+ PortScanConfig,
17
+ ServiceScanConfig,
18
+ ServiceScanStrategy,
15
19
  ScanType
16
20
  )
17
21
 
@@ -5,7 +5,6 @@ Resource and environment management utilities for Lanscape.
5
5
 
6
6
  from pathlib import Path
7
7
  import json
8
- import re
9
8
 
10
9
 
11
10
  class ResourceManager:
@@ -32,9 +31,28 @@ class ResourceManager:
32
31
  return json.loads(self.get(asset_name))
33
32
 
34
33
  def get_jsonc(self, asset_name: str):
35
- """Get JSON content with comments removed."""
34
+ """AI Slop to get JSONC (JSON with comments) content of an asset as a JSON object."""
36
35
  content = self.get(asset_name)
37
- cleaned_content = re.sub(r'//.*', '', content)
36
+
37
+ def strip_jsonc_lines(text):
38
+ result = []
39
+ in_string = False
40
+ escape = False
41
+ for line in text.splitlines():
42
+ new_line = []
43
+ i = 0
44
+ while i < len(line):
45
+ char = line[i]
46
+ if char == '"' and not escape:
47
+ in_string = not in_string
48
+ if not in_string and line[i:i + 2] == "//":
49
+ break # Ignore rest of line (comment)
50
+ new_line.append(char)
51
+ escape = (char == '\\' and not escape)
52
+ i += 1
53
+ result.append(''.join(new_line))
54
+ return '\n'.join(result)
55
+ cleaned_content = strip_jsonc_lines(content)
38
56
  return json.loads(cleaned_content)
39
57
 
40
58
  def update(self, asset_name: str, content: str):
@@ -179,7 +179,9 @@ def timeout_enforcer(timeout: int, raise_on_timeout: bool = True):
179
179
  @functools.wraps(func)
180
180
  def wrapper(*args, **kwargs):
181
181
  """Wrap the function to enforce a timeout on its execution."""
182
- with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
182
+ with concurrent.futures.ThreadPoolExecutor(
183
+ max_workers=1,
184
+ thread_name_prefix="TimeoutEnforcer") as executor:
183
185
  future = executor.submit(func, *args, **kwargs)
184
186
  try:
185
187
  return future.result(
@@ -1,44 +1,97 @@
1
1
  """Network tools for scanning and managing devices on a network."""
2
2
 
3
- import logging
4
3
  import ipaddress
5
- import traceback
6
- import subprocess
7
- from typing import List, Dict
4
+ import logging
5
+ import re
8
6
  import socket
9
7
  import struct
10
- import re
11
- import psutil
8
+ import subprocess
9
+ import traceback
10
+ from time import sleep
11
+ from typing import List, Dict, Optional
12
12
 
13
+ import psutil
13
14
  from scapy.sendrecv import srp
14
15
  from scapy.layers.l2 import ARP, Ether
15
16
  from scapy.error import Scapy_Exception
16
17
 
18
+ from pydantic import BaseModel, PrivateAttr
19
+ try:
20
+ from pydantic import ConfigDict, computed_field, model_serializer # pydantic v2
21
+ _PYD_V2 = True
22
+ except Exception: # pragma: no cover
23
+ CONFIG_DICT = None # type: ignore # pylint: disable=invalid-name
24
+ COMPUTED_FIELD = None # type: ignore # pylint: disable=invalid-name
25
+ MODEL_SERIALIZER = None # type: ignore # pylint: disable=invalid-name
26
+ _PYD_V2 = False
27
+ else:
28
+ CONFIG_DICT = ConfigDict # pylint: disable=invalid-name
29
+ COMPUTED_FIELD = computed_field # pylint: disable=invalid-name
30
+ MODEL_SERIALIZER = model_serializer # pylint: disable=invalid-name
31
+
17
32
  from lanscape.libraries.service_scan import scan_service
18
33
  from lanscape.libraries.mac_lookup import MacLookup, get_macs
19
34
  from lanscape.libraries.ip_parser import get_address_count, MAX_IPS_ALLOWED
20
35
  from lanscape.libraries.errors import DeviceError
21
- from lanscape.libraries.decorators import job_tracker, run_once
36
+ from lanscape.libraries.decorators import job_tracker, run_once, timeout_enforcer
37
+ from lanscape.libraries.scan_config import ServiceScanConfig, PortScanConfig
22
38
 
23
39
  log = logging.getLogger('NetTools')
24
40
  mac_lookup = MacLookup()
25
41
 
26
42
 
27
- class Device:
43
+ class Device(BaseModel):
28
44
  """Represents a network device with metadata and scanning capabilities."""
29
45
 
30
- def __init__(self, ip: str):
31
- super().__init__()
32
- self.ip: str = ip
33
- self.alive: bool = None
34
- self.hostname: str = None
35
- self.macs: List[str] = []
36
- self.manufacturer: str = None
37
- self.ports: List[int] = []
38
- self.stage: str = 'found'
39
- self.services: Dict[str, List[int]] = {}
40
- self.caught_errors: List[DeviceError] = []
41
- self.log = logging.getLogger('Device')
46
+ ip: str
47
+ alive: Optional[bool] = None
48
+ hostname: Optional[str] = None
49
+ macs: List[str] = []
50
+ manufacturer: Optional[str] = None
51
+ ports: List[int] = []
52
+ stage: str = 'found'
53
+ ports_scanned: int = 0
54
+ services: Dict[str, List[int]] = {}
55
+ caught_errors: List[DeviceError] = []
56
+ job_stats: Optional[Dict] = None
57
+
58
+ _log: logging.Logger = PrivateAttr(default_factory=lambda: logging.getLogger('Device'))
59
+ # Support pydantic v1 and v2 configs
60
+ if _PYD_V2 and CONFIG_DICT:
61
+ model_config = CONFIG_DICT(arbitrary_types_allowed=True) # type: ignore[assignment]
62
+ else: # pragma: no cover
63
+ class Config: # pylint: disable=too-few-public-methods
64
+ """Pydantic v1 configuration."""
65
+ arbitrary_types_allowed = True
66
+ extra = 'allow'
67
+
68
+ @property
69
+ def log(self) -> logging.Logger:
70
+ """Get the logger instance for this device."""
71
+ return self._log
72
+
73
+ # Computed fields for pydantic v2 (included in model_dump)
74
+ if _PYD_V2 and COMPUTED_FIELD:
75
+ @COMPUTED_FIELD(return_type=str) # type: ignore[misc]
76
+ @property
77
+ def mac_addr(self) -> str:
78
+ """Get the primary MAC address for this device."""
79
+ return self.get_mac() or ""
80
+
81
+ @MODEL_SERIALIZER(mode='wrap') # type: ignore[misc]
82
+ def _serialize(self, serializer):
83
+ """Serialize device data for output."""
84
+ data = serializer(self)
85
+ # Remove internals
86
+ data.pop('job_stats', None)
87
+ # Ensure mac_addr present (computed_field already adds it)
88
+ data['mac_addr'] = data.get('mac_addr') or (self.get_mac() or '')
89
+ # Ensure manufacturer present; prefer explicit model value
90
+ manuf = data.get('manufacturer')
91
+ if not manuf:
92
+ data['manufacturer'] = self._get_manufacturer(
93
+ data['mac_addr']) if data['mac_addr'] else None
94
+ return data
42
95
 
43
96
  def get_metadata(self):
44
97
  """Retrieve metadata such as hostname and MAC addresses."""
@@ -46,32 +99,72 @@ class Device:
46
99
  self.hostname = self._get_hostname()
47
100
  self._get_mac_addresses()
48
101
 
49
- def dict(self) -> dict:
50
- """Convert the device object to a dictionary."""
51
- obj = vars(self).copy()
52
- obj.pop('log')
53
- obj.pop('job_stats', None) # Remove job_stats if it exists
54
- primary_mac = self.get_mac()
55
- obj['mac_addr'] = primary_mac
56
- obj['manufacturer'] = self._get_manufacturer(primary_mac)
57
-
58
- return obj
59
-
60
- def test_port(self, port: int) -> bool:
102
+ # Fallback for pydantic v1: use dict() and enrich output
103
+ if not _PYD_V2:
104
+ def dict(self, *args, **kwargs) -> dict: # type: ignore[override]
105
+ """Generate dictionary representation for pydantic v1."""
106
+ data = super().dict(*args, **kwargs)
107
+ data.pop('job_stats', None)
108
+ mac_addr = self.get_mac() or ''
109
+ data['mac_addr'] = mac_addr
110
+ if not data.get('manufacturer'):
111
+ data['manufacturer'] = self._get_manufacturer(mac_addr) if mac_addr else None
112
+ return data
113
+ else:
114
+ # In v2, route dict() to model_dump() so callers get the serialized enrichment
115
+ def dict(self, *args, **kwargs) -> dict: # type: ignore[override]
116
+ """Generate dictionary representation for pydantic v2."""
117
+ try:
118
+ return self.model_dump(*args, **kwargs) # type: ignore[attr-defined]
119
+ except Exception:
120
+ # Safety fallback (shouldn't normally hit)
121
+ data = self.__dict__.copy()
122
+ data.pop('_log', None)
123
+ data.pop('job_stats', None)
124
+ mac_addr = self.get_mac() or ''
125
+ data['mac_addr'] = mac_addr
126
+ if not data.get('manufacturer'):
127
+ data['manufacturer'] = self._get_manufacturer(mac_addr) if mac_addr else None
128
+ return data
129
+
130
+ def test_port(self, port: int, port_config: Optional[PortScanConfig] = None) -> bool:
61
131
  """Test if a specific port is open on the device."""
62
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
63
- sock.settimeout(1)
64
- result = sock.connect_ex((self.ip, port))
65
- sock.close()
66
- if result == 0:
67
- self.ports.append(port)
68
- return True
69
- return False
132
+ if port_config is None:
133
+ port_config = PortScanConfig() # Use defaults
134
+
135
+ # Calculate timeout enforcer: (timeout * (retries+1) * 1.5)
136
+ enforcer_timeout = port_config.timeout * (port_config.retries + 1) * 1.5
137
+
138
+ @timeout_enforcer(enforcer_timeout, False)
139
+ def do_test():
140
+ for attempt in range(port_config.retries + 1):
141
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
142
+ sock.settimeout(port_config.timeout)
143
+ try:
144
+ result = sock.connect_ex((self.ip, port))
145
+ if result == 0:
146
+ if port not in self.ports:
147
+ self.ports.append(port)
148
+ return True
149
+ except Exception:
150
+ pass # Connection failed, try again if retries remain
151
+ finally:
152
+ sock.close()
153
+
154
+ # Wait before retry (except on last attempt)
155
+ if attempt < port_config.retries:
156
+ sleep(port_config.retry_delay)
157
+
158
+ return False
159
+
160
+ ans = do_test() or False
161
+ self.ports_scanned += 1
162
+ return ans
70
163
 
71
164
  @job_tracker
72
- def scan_service(self, port: int):
165
+ def scan_service(self, port: int, cfg: ServiceScanConfig):
73
166
  """Scan a specific port for services."""
74
- service = scan_service(self.ip, port)
167
+ service = scan_service(self.ip, port, cfg)
75
168
  service_ports = self.services.get(service, [])
76
169
  service_ports.append(port)
77
170
  self.services[service] = service_ports
@@ -154,6 +154,87 @@ class PokeConfig(BaseModel):
154
154
  return self.model_dump()
155
155
 
156
156
 
157
+ class ServiceScanStrategy(Enum):
158
+ """
159
+ Enumeration of strategies for service scanning on open ports.
160
+
161
+ LAZY: Several common probes to see if we can identify the service.
162
+ BASIC: Common probes plus probes correlated to the port number.
163
+ AGGRESSIVE: All known probes in parallel to try to elicit a response.
164
+ """
165
+ LAZY = 'LAZY'
166
+ BASIC = 'BASIC'
167
+ AGGRESSIVE = 'AGGRESSIVE'
168
+
169
+
170
+ class ServiceScanConfig(BaseModel):
171
+ """
172
+ Configuration for service scanning on open ports.
173
+ """
174
+ timeout: float = 5.0
175
+ lookup_type: ServiceScanStrategy = ServiceScanStrategy.BASIC
176
+ max_concurrent_probes: int = 10
177
+
178
+ @classmethod
179
+ def from_dict(cls, data: dict) -> 'ServiceScanConfig':
180
+ """
181
+ Create a ServiceScanConfig instance from a dictionary.
182
+
183
+ Args:
184
+ data: Dictionary containing ServiceScanConfig parameters
185
+
186
+ Returns:
187
+ A new ServiceScanConfig instance with the provided settings
188
+ """
189
+ return cls.model_validate(data)
190
+
191
+ def to_dict(self) -> dict:
192
+ """
193
+ Convert the ServiceScanConfig instance to a dictionary.
194
+
195
+ Returns:
196
+ Dictionary representation of the ServiceScanConfig
197
+ """
198
+ return self.model_dump()
199
+
200
+ def __str__(self):
201
+ return f'ServiceScanCfg(timeout={self.timeout})'
202
+
203
+
204
+ class PortScanConfig(BaseModel):
205
+ """
206
+ Configuration for port scanning.
207
+ """
208
+ timeout: float = 1.0
209
+ retries: int = 0
210
+ retry_delay: float = 0.1
211
+
212
+ @classmethod
213
+ def from_dict(cls, data: dict) -> 'PortScanConfig':
214
+ """
215
+ Create a PortScanConfig instance from a dictionary.
216
+
217
+ Args:
218
+ data: Dictionary containing PortScanConfig parameters
219
+
220
+ Returns:
221
+ A new PortScanConfig instance with the provided settings
222
+ """
223
+ return cls.model_validate(data)
224
+
225
+ def to_dict(self) -> dict:
226
+ """
227
+ Convert the PortScanConfig instance to a dictionary.
228
+
229
+ Returns:
230
+ Dictionary representation of the PortScanConfig
231
+ """
232
+ return self.model_dump()
233
+
234
+ def __str__(self):
235
+ return f'PortScanCfg(timeout={self.timeout}, retry_delay={self.retry_delay})'
236
+
237
+
157
238
  class ScanType(Enum):
158
239
  """
159
240
  Enumeration of supported network scan types.
@@ -184,7 +265,7 @@ class ScanConfig(BaseModel):
184
265
 
185
266
  task_scan_ports: bool = True
186
267
  # below wont run if above false
187
- task_scan_port_services: bool = False # disabling until more stable
268
+ task_scan_port_services: bool = True
188
269
 
189
270
  lookup_type: List[ScanType] = [ScanType.ICMP_THEN_ARP]
190
271
 
@@ -192,6 +273,8 @@ class ScanConfig(BaseModel):
192
273
  arp_config: ArpConfig = Field(default_factory=ArpConfig)
193
274
  poke_config: PokeConfig = Field(default_factory=PokeConfig)
194
275
  arp_cache_config: ArpCacheConfig = Field(default_factory=ArpCacheConfig)
276
+ port_scan_config: PortScanConfig = Field(default_factory=PortScanConfig)
277
+ service_scan_config: ServiceScanConfig = Field(default_factory=ServiceScanConfig)
195
278
 
196
279
  def t_cnt(self, thread_id: str) -> int:
197
280
  """
@@ -259,7 +342,7 @@ DEFAULT_CONFIGS: Dict[str, ScanConfig] = {
259
342
  t_cnt_port_test=64,
260
343
  t_cnt_isalive=64,
261
344
  task_scan_ports=True,
262
- task_scan_port_services=False,
345
+ task_scan_port_services=True,
263
346
  lookup_type=[ScanType.ICMP_THEN_ARP, ScanType.ARP_LOOKUP],
264
347
  arp_config=ArpConfig(
265
348
  attempts=3,
@@ -274,6 +357,16 @@ DEFAULT_CONFIGS: Dict[str, ScanConfig] = {
274
357
  arp_cache_config=ArpCacheConfig(
275
358
  attempts=2,
276
359
  wait_before=0.3
360
+ ),
361
+ port_scan_config=PortScanConfig(
362
+ timeout=2.5,
363
+ retries=1,
364
+ retry_delay=0.2
365
+ ),
366
+ service_scan_config=ServiceScanConfig(
367
+ timeout=8.0,
368
+ lookup_type=ServiceScanStrategy.AGGRESSIVE,
369
+ max_concurrent_probes=5
277
370
  )
278
371
  ),
279
372
  'fast': ScanConfig(
@@ -283,7 +376,7 @@ DEFAULT_CONFIGS: Dict[str, ScanConfig] = {
283
376
  t_cnt_port_test=256,
284
377
  t_cnt_isalive=512,
285
378
  task_scan_ports=True,
286
- task_scan_port_services=False,
379
+ task_scan_port_services=True,
287
380
  lookup_type=[ScanType.POKE_THEN_ARP],
288
381
  arp_config=ArpConfig(
289
382
  attempts=1,
@@ -294,6 +387,11 @@ DEFAULT_CONFIGS: Dict[str, ScanConfig] = {
294
387
  ping_count=1,
295
388
  timeout=0.5,
296
389
  retry_delay=0.25
390
+ ),
391
+ service_scan_config=ServiceScanConfig(
392
+ timeout=2.0,
393
+ lookup_type=ServiceScanStrategy.LAZY,
394
+ max_concurrent_probes=15
297
395
  )
298
396
  )
299
397
  }
@@ -1,8 +1,14 @@
1
- """Service scanning module for identifying services running on network ports."""
1
+ """Service scanning module for identifying services running on network ports.
2
+ """
3
+
4
+ from typing import Optional, Union
5
+ import sys
2
6
  import asyncio
3
7
  import logging
4
8
  import traceback
9
+
5
10
  from lanscape.libraries.app_scope import ResourceManager
11
+ from lanscape.libraries.scan_config import ServiceScanConfig, ServiceScanStrategy
6
12
 
7
13
  log = logging.getLogger('ServiceScan')
8
14
  SERVICES = ResourceManager('services').get_jsonc('definitions.jsonc')
@@ -11,33 +17,162 @@ SERVICES = ResourceManager('services').get_jsonc('definitions.jsonc')
11
17
  PRINTER_PORTS = [9100, 631]
12
18
 
13
19
 
14
- def scan_service(ip: str, port: int, timeout=10) -> str:
20
+ async def _try_probe(
21
+ ip: str,
22
+ port: int,
23
+ payload: Optional[Union[str, bytes]] = None,
24
+ *,
25
+ timeout: float = 5.0,
26
+ read_len: int = 1024,
27
+ ) -> Optional[str]:
28
+ """
29
+ Open a connection, optionally send a payload, and read a single response chunk.
30
+ Returns the decoded response string or None.
31
+ """
32
+ try:
33
+ reader, writer = await asyncio.wait_for(
34
+ asyncio.open_connection(ip, port), timeout=timeout
35
+ )
36
+ try:
37
+ if payload is not None:
38
+ data = payload if isinstance(
39
+ payload, (bytes, bytearray)) else str(payload).encode(
40
+ "utf-8", errors="ignore")
41
+ writer.write(data)
42
+ await writer.drain()
43
+ try:
44
+ response = await asyncio.wait_for(reader.read(read_len), timeout=timeout / 2)
45
+ except asyncio.TimeoutError:
46
+ response = b""
47
+ resp_str = response.decode("utf-8", errors="ignore") if response else ""
48
+ return resp_str if resp_str else None
49
+ finally:
50
+ # Guarded close to avoid surfacing connection-lost noise
51
+ try:
52
+ writer.close()
53
+ except Exception:
54
+ pass
55
+ try:
56
+ await asyncio.wait_for(writer.wait_closed(), timeout=0.5)
57
+ except Exception:
58
+ pass
59
+ except Exception as e:
60
+ # Suppress common/expected network errors that simply indicate no useful banner
61
+ expected_types = (ConnectionResetError, ConnectionRefusedError, TimeoutError, OSError)
62
+ expected_errnos = {10054, 10061, 10060} # reset, refused, timeout (Win specific)
63
+ eno = getattr(e, 'errno', None)
64
+ if isinstance(e, expected_types) and (eno in expected_errnos or eno is None):
65
+ return None
66
+ log.debug(f"Probe error on {ip}:{port} - {repr(e)}")
67
+ return None
68
+
69
+
70
+ async def _multi_probe_generic(
71
+ ip: str, port: int, cfg: ServiceScanConfig
72
+ ) -> Optional[str]:
73
+ """
74
+ Run a small set of generic probes in parallel and return the first non-empty response.
75
+ """
76
+ probes = get_port_probes(port, cfg.lookup_type)
77
+
78
+ semaphore = asyncio.Semaphore(cfg.max_concurrent_probes)
79
+
80
+ async def limited_probe(ip, port, payload, timeout_val):
81
+ async with semaphore:
82
+ return await _try_probe(
83
+ ip, port, payload,
84
+ timeout=timeout_val
85
+ )
86
+
87
+ tasks = [
88
+ asyncio.create_task(
89
+ limited_probe(ip, port, p, cfg.timeout)
90
+ )
91
+ for p in probes
92
+ ]
93
+
94
+ try:
95
+ for fut in asyncio.as_completed(tasks, timeout=cfg.timeout):
96
+ try:
97
+ resp = await fut
98
+ except Exception:
99
+ resp = None
100
+ if resp and resp.strip():
101
+ # Cancel remaining tasks
102
+ for t in tasks:
103
+ if not t.done():
104
+ t.cancel()
105
+ return resp
106
+ except asyncio.TimeoutError:
107
+ pass
108
+ finally:
109
+ # Ensure remaining tasks are cancelled and awaited to suppress warnings
110
+ for t in tasks:
111
+ if not t.done():
112
+ t.cancel()
113
+ await asyncio.gather(*tasks, return_exceptions=True)
114
+
115
+ return None
116
+
117
+
118
+ def get_port_probes(port: int, strategy: ServiceScanStrategy):
119
+ """
120
+ Return a list of probe payloads based on the port and strategy.
121
+ """
122
+ # For now, we use generic probes for all ports.
123
+ # This can be extended to use specific probes per port/service.
124
+
125
+ probes = [
126
+ None, # banner-first protocols (SSH/FTP/SMTP/etc.)
127
+ b"\r\n", # nudge for many line-oriented services
128
+ b"HELP\r\n", # sometimes yields usage/help (SMTP/POP/IMAP-ish)
129
+ b"OPTIONS * HTTP/1.0\r\n\r\n", # elicit Server header without path
130
+ b"HEAD / HTTP/1.0\r\n\r\n", # basic HTTP
131
+ b"QUIT\r\n", # graceful close if understood
132
+ ]
133
+
134
+ if strategy == ServiceScanStrategy.LAZY:
135
+ return probes
136
+
137
+ if strategy == ServiceScanStrategy.BASIC:
138
+ for _, detail in SERVICES.items():
139
+ if port in detail.get("ports", []):
140
+ if probe := detail.get("probe", ''):
141
+ probes.append(probe)
142
+ return probes
143
+
144
+ if strategy == ServiceScanStrategy.AGGRESSIVE:
145
+ for _, detail in SERVICES.items():
146
+ if probe := detail.get("probe", ''):
147
+ probes.append(probe)
148
+ return probes
149
+
150
+ return [None] # Default to banner grab only
151
+
152
+
153
+ def scan_service(ip: str, port: int, cfg: ServiceScanConfig) -> str:
15
154
  """
16
155
  Synchronous function that attempts to identify the service running on a given port.
17
156
  """
18
157
 
19
- async def _async_scan_service(ip: str, port: int, timeout) -> str:
158
+ async def _async_scan_service(
159
+ ip: str, port: int,
160
+ cfg: ServiceScanConfig
161
+ ) -> str:
20
162
  if port in PRINTER_PORTS:
21
163
  return "Printer"
22
164
 
23
165
  try:
24
- # Add a timeout to prevent hanging
25
- reader, writer = await asyncio.wait_for(asyncio.open_connection(ip, port), timeout=5)
26
-
27
- # Send a probe appropriate for common services
28
- probe = f"GET / HTTP/1.1\r\nHost: {ip}\r\n\r\n".encode("utf-8")
29
- writer.write(probe)
30
- await writer.drain()
166
+ # Run multiple generic probes concurrently and take first useful response
167
+ response_str = await _multi_probe_generic(ip, port, cfg)
168
+ if not response_str:
169
+ return "Unknown"
31
170
 
32
- # Receive the response with a timeout
33
- response = await asyncio.wait_for(reader.read(1024), timeout=timeout)
34
- writer.close()
35
- await writer.wait_closed()
171
+ log.debug(f"Service scan response from {ip}:{port} - {response_str}")
36
172
 
37
173
  # Analyze the response to identify the service
38
- response_str = response.decode("utf-8", errors="ignore")
39
- for service, hints in SERVICES.items():
40
- if any(hint.lower() in response_str.lower() for hint in hints):
174
+ for service, config in SERVICES.items():
175
+ if any(hint.lower() in response_str.lower() for hint in config.get("hints", [])):
41
176
  return service
42
177
  except asyncio.TimeoutError:
43
178
  log.warning(f"Timeout scanning {ip}:{port}")
@@ -47,4 +182,23 @@ def scan_service(ip: str, port: int, timeout=10) -> str:
47
182
  return "Unknown"
48
183
 
49
184
  # Use asyncio.run to execute the asynchronous logic synchronously
50
- return asyncio.run(_async_scan_service(ip, port, timeout=timeout))
185
+ return asyncio.run(_async_scan_service(ip, port, cfg=cfg))
186
+
187
+
188
+ def asyncio_logger_suppression():
189
+ """Suppress the noisy asyncio transport errors since they are expected in service scanning."""
190
+
191
+ # Reduce noisy asyncio transport errors on Windows by switching to Selector policy
192
+ if sys.platform.startswith("win"):
193
+ try:
194
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
195
+ except Exception:
196
+ pass
197
+ # Also tone down asyncio logger noise from transport callbacks
198
+ try:
199
+ logging.getLogger("asyncio").setLevel(logging.WARNING)
200
+ except Exception:
201
+ pass
202
+
203
+
204
+ asyncio_logger_suppression()