scrapli 2024.1.30__py3-none-any.whl → 2024.7.30.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. scrapli/__init__.py +2 -1
  2. scrapli/channel/__init__.py +1 -0
  3. scrapli/channel/async_channel.py +16 -7
  4. scrapli/channel/base_channel.py +12 -7
  5. scrapli/channel/sync_channel.py +16 -7
  6. scrapli/decorators.py +2 -1
  7. scrapli/driver/__init__.py +1 -0
  8. scrapli/driver/base/__init__.py +1 -0
  9. scrapli/driver/base/async_driver.py +19 -13
  10. scrapli/driver/base/base_driver.py +35 -38
  11. scrapli/driver/base/sync_driver.py +19 -13
  12. scrapli/driver/core/__init__.py +1 -0
  13. scrapli/driver/core/arista_eos/__init__.py +1 -0
  14. scrapli/driver/core/arista_eos/async_driver.py +2 -1
  15. scrapli/driver/core/arista_eos/base_driver.py +4 -2
  16. scrapli/driver/core/arista_eos/sync_driver.py +2 -1
  17. scrapli/driver/core/cisco_iosxe/__init__.py +1 -0
  18. scrapli/driver/core/cisco_iosxe/async_driver.py +2 -1
  19. scrapli/driver/core/cisco_iosxe/base_driver.py +1 -0
  20. scrapli/driver/core/cisco_iosxe/sync_driver.py +2 -1
  21. scrapli/driver/core/cisco_iosxr/__init__.py +1 -0
  22. scrapli/driver/core/cisco_iosxr/async_driver.py +2 -1
  23. scrapli/driver/core/cisco_iosxr/base_driver.py +1 -0
  24. scrapli/driver/core/cisco_iosxr/sync_driver.py +2 -1
  25. scrapli/driver/core/cisco_nxos/__init__.py +1 -0
  26. scrapli/driver/core/cisco_nxos/async_driver.py +2 -1
  27. scrapli/driver/core/cisco_nxos/base_driver.py +9 -4
  28. scrapli/driver/core/cisco_nxos/sync_driver.py +2 -1
  29. scrapli/driver/core/juniper_junos/__init__.py +1 -0
  30. scrapli/driver/core/juniper_junos/async_driver.py +2 -1
  31. scrapli/driver/core/juniper_junos/base_driver.py +1 -0
  32. scrapli/driver/core/juniper_junos/sync_driver.py +2 -1
  33. scrapli/driver/generic/__init__.py +1 -0
  34. scrapli/driver/generic/async_driver.py +24 -5
  35. scrapli/driver/generic/base_driver.py +6 -1
  36. scrapli/driver/generic/sync_driver.py +25 -6
  37. scrapli/driver/network/__init__.py +1 -0
  38. scrapli/driver/network/async_driver.py +2 -1
  39. scrapli/driver/network/base_driver.py +2 -1
  40. scrapli/driver/network/sync_driver.py +2 -1
  41. scrapli/exceptions.py +1 -0
  42. scrapli/factory.py +7 -6
  43. scrapli/helper.py +21 -7
  44. scrapli/logging.py +2 -3
  45. scrapli/response.py +13 -3
  46. scrapli/ssh_config.py +1 -0
  47. scrapli/transport/base/__init__.py +1 -0
  48. scrapli/transport/base/async_transport.py +1 -0
  49. scrapli/transport/base/base_socket.py +1 -0
  50. scrapli/transport/base/base_transport.py +1 -0
  51. scrapli/transport/base/sync_transport.py +1 -0
  52. scrapli/transport/plugins/asyncssh/transport.py +4 -0
  53. scrapli/transport/plugins/asynctelnet/transport.py +10 -8
  54. scrapli/transport/plugins/paramiko/transport.py +1 -0
  55. scrapli/transport/plugins/ssh2/transport.py +6 -3
  56. scrapli/transport/plugins/system/ptyprocess.py +37 -0
  57. scrapli/transport/plugins/system/transport.py +27 -6
  58. scrapli/transport/plugins/telnet/transport.py +10 -9
  59. {scrapli-2024.1.30.dist-info → scrapli-2024.7.30.post1.dist-info}/METADATA +70 -49
  60. scrapli-2024.7.30.post1.dist-info/RECORD +74 -0
  61. {scrapli-2024.1.30.dist-info → scrapli-2024.7.30.post1.dist-info}/WHEEL +1 -1
  62. scrapli-2024.1.30.dist-info/RECORD +0 -74
  63. {scrapli-2024.1.30.dist-info → scrapli-2024.7.30.post1.dist-info}/LICENSE +0 -0
  64. {scrapli-2024.1.30.dist-info → scrapli-2024.7.30.post1.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.cisco_nxos.async_driver"""
2
+
2
3
  from copy import deepcopy
3
4
  from io import BytesIO
4
5
  from typing import Any, Callable, Dict, List, Optional, Union
@@ -47,7 +48,7 @@ async def nxos_on_close(conn: AsyncNetworkDriver) -> None:
47
48
 
48
49
 
49
50
  class AsyncNXOSDriver(AsyncNetworkDriver, NXOSDriverBase):
50
- def __init__(
51
+ def __init__( # pylint: disable=R0917
51
52
  self,
52
53
  host: str,
53
54
  privilege_levels: Optional[Dict[str, PrivilegeLevel]] = None,
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.cisco_nxos.base_driver"""
2
+
2
3
  from typing import Dict
3
4
 
4
5
  from scrapli.driver.network.base_driver import PrivilegeLevel
@@ -7,7 +8,7 @@ from scrapli.exceptions import ScrapliValueError
7
8
  PRIVS = {
8
9
  "exec": (
9
10
  PrivilegeLevel(
10
- pattern=r"^[\w.\-]{1,63}>\s?$",
11
+ pattern=r"^[\w.\-]{1,63}(\(maint\-mode\))?>\s?$",
11
12
  name="exec",
12
13
  previous_priv="",
13
14
  deescalate="",
@@ -18,7 +19,7 @@ PRIVS = {
18
19
  ),
19
20
  "privilege_exec": (
20
21
  PrivilegeLevel(
21
- pattern=r"^[\w.\-]{1,63}#\s?$",
22
+ pattern=r"^[\w.\-]{1,63}(\(maint\-mode\))?#\s?$",
22
23
  name="privilege_exec",
23
24
  previous_priv="exec",
24
25
  deescalate="disable",
@@ -30,7 +31,7 @@ PRIVS = {
30
31
  ),
31
32
  "configuration": (
32
33
  PrivilegeLevel(
33
- pattern=r"^[\w.\-]{1,63}\(config[\w.\-@/:\+]{0,32}\)#\s?$",
34
+ pattern=r"^[\w.\-]{1,63}(\(maint\-mode\))?\(config[\w.\-@/:\+]{0,32}\)#\s?$",
34
35
  name="configuration",
35
36
  previous_priv="privilege_exec",
36
37
  deescalate="end",
@@ -46,7 +47,11 @@ PRIVS = {
46
47
  # for now doesnt seem to be a reason to differentiate between them, so just have one
47
48
  # giant pattern
48
49
  pattern=(
49
- r"(^[\w.\-]{1,63}\-tcl#\s?$)|" r"(^[\w.\-]{1,63}\(config\-tcl\)#\s?$)|" r"(^>\s?$)"
50
+ r"(^[\w.\-]{1,63}\-tcl#\s?$)|"
51
+ r"(^[\w.\-]{1,63}\(config\-tcl\)#\s?$)|"
52
+ r"(^>\s?$)|"
53
+ r"(^[\w.\-]{1,63}\(maint\-mode\-tcl\)#\s?$)|"
54
+ r"(^[\w.\-]{1,63}\(maint\-mode\)\(config\-tcl\)#\s?$)"
50
55
  ),
51
56
  name="tclsh",
52
57
  previous_priv="privilege_exec",
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.cisco_nxos.sync_driver"""
2
+
2
3
  from copy import deepcopy
3
4
  from io import BytesIO
4
5
  from typing import Any, Callable, Dict, List, Optional, Union
@@ -47,7 +48,7 @@ def nxos_on_close(conn: NetworkDriver) -> None:
47
48
 
48
49
 
49
50
  class NXOSDriver(NetworkDriver, NXOSDriverBase):
50
- def __init__(
51
+ def __init__( # pylint: disable=R0917
51
52
  self,
52
53
  host: str,
53
54
  privilege_levels: Optional[Dict[str, PrivilegeLevel]] = None,
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.juniper_junos"""
2
+
2
3
  from scrapli.driver.core.juniper_junos.async_driver import AsyncJunosDriver
3
4
  from scrapli.driver.core.juniper_junos.sync_driver import JunosDriver
4
5
 
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.juniper_junos.async_driver"""
2
+
2
3
  from copy import deepcopy
3
4
  from io import BytesIO
4
5
  from typing import Any, Callable, Dict, List, Optional, Union
@@ -48,7 +49,7 @@ async def junos_on_close(conn: AsyncNetworkDriver) -> None:
48
49
 
49
50
 
50
51
  class AsyncJunosDriver(AsyncNetworkDriver):
51
- def __init__(
52
+ def __init__( # pylint: disable=R0917
52
53
  self,
53
54
  host: str,
54
55
  privilege_levels: Optional[Dict[str, PrivilegeLevel]] = None,
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.juniper_junos.base_driver"""
2
+
2
3
  from scrapli.driver.network.base_driver import PrivilegeLevel
3
4
 
4
5
  PRIVS = {
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.core.juniper_junos.driver"""
2
+
2
3
  from copy import deepcopy
3
4
  from io import BytesIO
4
5
  from typing import Any, Callable, Dict, List, Optional, Union
@@ -48,7 +49,7 @@ def junos_on_close(conn: NetworkDriver) -> None:
48
49
 
49
50
 
50
51
  class JunosDriver(NetworkDriver):
51
- def __init__(
52
+ def __init__( # pylint: disable=R0917
52
53
  self,
53
54
  host: str,
54
55
  privilege_levels: Optional[Dict[str, PrivilegeLevel]] = None,
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.generic"""
2
+
2
3
  from scrapli.driver.generic.async_driver import AsyncGenericDriver
3
4
  from scrapli.driver.generic.base_driver import ReadCallback
4
5
  from scrapli.driver.generic.sync_driver import GenericDriver
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.generic.async_driver"""
2
+
2
3
  import asyncio
3
4
  from io import BytesIO
4
5
  from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
@@ -16,8 +17,25 @@ if TYPE_CHECKING:
16
17
  )
17
18
 
18
19
 
20
+ async def generic_on_open(conn: "AsyncGenericDriver") -> None:
21
+ """
22
+ GenericDriver default on-open -- drains initial login by running a simple get_prompt
23
+
24
+ Args:
25
+ conn: GenericDriver object
26
+
27
+ Returns:
28
+ None
29
+
30
+ Raises:
31
+ N/A
32
+
33
+ """
34
+ await conn.get_prompt()
35
+
36
+
19
37
  class AsyncGenericDriver(AsyncDriver, BaseGenericDriver):
20
- def __init__(
38
+ def __init__( # pylint: disable=R0917
21
39
  self,
22
40
  host: str,
23
41
  port: Optional[int] = None,
@@ -39,7 +57,7 @@ class AsyncGenericDriver(AsyncDriver, BaseGenericDriver):
39
57
  ssh_config_file: Union[str, bool] = False,
40
58
  ssh_known_hosts_file: Union[str, bool] = False,
41
59
  on_init: Optional[Callable[..., Any]] = None,
42
- on_open: Optional[Callable[..., Any]] = None,
60
+ on_open: Optional[Callable[..., Any]] = generic_on_open,
43
61
  on_close: Optional[Callable[..., Any]] = None,
44
62
  transport: str = "system",
45
63
  transport_options: Optional[Dict[str, Any]] = None,
@@ -97,7 +115,7 @@ class AsyncGenericDriver(AsyncDriver, BaseGenericDriver):
97
115
  return prompt
98
116
 
99
117
  @timeout_modifier
100
- async def _send_command(
118
+ async def _send_command( # pylint: disable=R0917
101
119
  self,
102
120
  command: str,
103
121
  strip_prompt: bool = True,
@@ -466,7 +484,7 @@ class AsyncGenericDriver(AsyncDriver, BaseGenericDriver):
466
484
  raw_response=raw_response, processed_response=processed_response, response=response
467
485
  )
468
486
 
469
- async def read_callback( # noqa: C901
487
+ async def read_callback( # pylint: disable=R0917
470
488
  self,
471
489
  callbacks: List["ReadCallback"],
472
490
  initial_input: Optional[str] = None,
@@ -576,7 +594,8 @@ class AsyncGenericDriver(AsyncDriver, BaseGenericDriver):
576
594
  _run_callback = callback.check(read_output=read_output)
577
595
 
578
596
  if (
579
- callback.only_once is True
597
+ _run_callback is True
598
+ and callback.only_once is True
580
599
  and callback._triggered is True # pylint: disable=W0212
581
600
  ):
582
601
  self.logger.warning(
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.generic.base_driver"""
2
+
2
3
  import re
3
4
  from typing import (
4
5
  TYPE_CHECKING,
@@ -23,7 +24,7 @@ if TYPE_CHECKING:
23
24
 
24
25
 
25
26
  class ReadCallback:
26
- def __init__(
27
+ def __init__( # pylint: disable=R0917
27
28
  self,
28
29
  callback: Callable[
29
30
  [Union["GenericDriver", "AsyncGenericDriver"], str],
@@ -122,6 +123,8 @@ class ReadCallback:
122
123
  """
123
124
  if self.contains and not self._contains_bytes:
124
125
  self._contains_bytes = self.contains.encode()
126
+ if self.case_insensitive:
127
+ self._contains_bytes = self._contains_bytes.lower()
125
128
 
126
129
  return self._contains_bytes
127
130
 
@@ -142,6 +145,8 @@ class ReadCallback:
142
145
  """
143
146
  if self.not_contains and not self._not_contains_bytes:
144
147
  self._not_contains_bytes = self.not_contains.encode()
148
+ if self.case_insensitive:
149
+ self._not_contains_bytes = self._not_contains_bytes.lower()
145
150
 
146
151
  return self._not_contains_bytes
147
152
 
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.generic.sync_driver"""
2
+
2
3
  import time
3
4
  from io import BytesIO
4
5
  from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
@@ -16,8 +17,25 @@ if TYPE_CHECKING:
16
17
  )
17
18
 
18
19
 
20
+ def generic_on_open(conn: "GenericDriver") -> None:
21
+ """
22
+ GenericDriver default on-open -- drains initial login by running a simple get_prompt
23
+
24
+ Args:
25
+ conn: GenericDriver object
26
+
27
+ Returns:
28
+ None
29
+
30
+ Raises:
31
+ N/A
32
+
33
+ """
34
+ conn.get_prompt()
35
+
36
+
19
37
  class GenericDriver(Driver, BaseGenericDriver):
20
- def __init__(
38
+ def __init__( # pylint: disable=R0917
21
39
  self,
22
40
  host: str,
23
41
  port: Optional[int] = None,
@@ -39,7 +57,7 @@ class GenericDriver(Driver, BaseGenericDriver):
39
57
  ssh_config_file: Union[str, bool] = False,
40
58
  ssh_known_hosts_file: Union[str, bool] = False,
41
59
  on_init: Optional[Callable[..., Any]] = None,
42
- on_open: Optional[Callable[..., Any]] = None,
60
+ on_open: Optional[Callable[..., Any]] = generic_on_open,
43
61
  on_close: Optional[Callable[..., Any]] = None,
44
62
  transport: str = "system",
45
63
  transport_options: Optional[Dict[str, Any]] = None,
@@ -98,7 +116,7 @@ class GenericDriver(Driver, BaseGenericDriver):
98
116
  return prompt
99
117
 
100
118
  @timeout_modifier
101
- def _send_command(
119
+ def _send_command( # pylint: disable=R0917
102
120
  self,
103
121
  command: str,
104
122
  strip_prompt: bool = True,
@@ -191,7 +209,7 @@ class GenericDriver(Driver, BaseGenericDriver):
191
209
  )
192
210
  return response
193
211
 
194
- def send_commands(
212
+ def send_commands( # pylint: disable=R0917
195
213
  self,
196
214
  commands: List[str],
197
215
  *,
@@ -467,7 +485,7 @@ class GenericDriver(Driver, BaseGenericDriver):
467
485
  raw_response=raw_response, processed_response=processed_response, response=response
468
486
  )
469
487
 
470
- def read_callback(
488
+ def read_callback( # pylint: disable=R0917
471
489
  self,
472
490
  callbacks: List["ReadCallback"],
473
491
  initial_input: Optional[str] = None,
@@ -577,7 +595,8 @@ class GenericDriver(Driver, BaseGenericDriver):
577
595
  _run_callback = callback.check(read_output=read_output)
578
596
 
579
597
  if (
580
- callback.only_once is True
598
+ _run_callback is True
599
+ and callback.only_once is True
581
600
  and callback._triggered is True # pylint: disable=W0212
582
601
  ):
583
602
  self.logger.warning(
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.network"""
2
+
2
3
  from scrapli.driver.network.async_driver import AsyncNetworkDriver
3
4
  from scrapli.driver.network.sync_driver import NetworkDriver
4
5
 
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.network.async_driver"""
2
+
2
3
  from collections import defaultdict
3
4
  from io import BytesIO
4
5
  from typing import Any, Callable, Dict, List, Optional, Tuple, Union
@@ -10,7 +11,7 @@ from scrapli.response import MultiResponse, Response
10
11
 
11
12
 
12
13
  class AsyncNetworkDriver(AsyncGenericDriver, BaseNetworkDriver):
13
- def __init__(
14
+ def __init__( # pylint: disable=R0917
14
15
  self,
15
16
  host: str,
16
17
  privilege_levels: Dict[str, PrivilegeLevel],
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.network.base_driver"""
2
+
2
3
  import re
3
4
  from collections import defaultdict
4
5
  from datetime import datetime
@@ -29,7 +30,7 @@ class PrivilegeLevel:
29
30
  "not_contains",
30
31
  )
31
32
 
32
- def __init__(
33
+ def __init__( # pylint: disable=R0917
33
34
  self,
34
35
  pattern: str,
35
36
  name: str,
@@ -1,4 +1,5 @@
1
1
  """scrapli.driver.network.sync_driver"""
2
+
2
3
  from collections import defaultdict
3
4
  from io import BytesIO
4
5
  from typing import Any, Callable, Dict, List, Optional, Tuple, Union
@@ -10,7 +11,7 @@ from scrapli.response import MultiResponse, Response
10
11
 
11
12
 
12
13
  class NetworkDriver(GenericDriver, BaseNetworkDriver):
13
- def __init__(
14
+ def __init__( # pylint: disable=R0917
14
15
  self,
15
16
  host: str,
16
17
  privilege_levels: Dict[str, PrivilegeLevel],
scrapli/exceptions.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """scrapli.exceptions"""
2
+
2
3
  from typing import Optional
3
4
 
4
5
 
scrapli/factory.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """scrapli.factory"""
2
+
2
3
  import importlib
3
4
  from copy import deepcopy
4
5
  from io import BytesIO
@@ -26,10 +27,10 @@ from scrapli.exceptions import (
26
27
  )
27
28
  from scrapli.helper import format_user_warning
28
29
  from scrapli.logging import logger
29
- from scrapli.transport import ASYNCIO_TRANSPORTS
30
+ from scrapli.transport import ASYNCIO_TRANSPORTS, CORE_TRANSPORTS
30
31
 
31
32
 
32
- def _build_provided_kwargs_dict( # pylint: disable=R0914
33
+ def _build_provided_kwargs_dict( # pylint: disable=R0914,R0917
33
34
  host: str,
34
35
  privilege_levels: Optional[Dict[str, PrivilegeLevel]],
35
36
  default_desired_privilege_level: Optional[str],
@@ -336,7 +337,7 @@ class Scrapli(NetworkDriver):
336
337
  logger.info(msg)
337
338
  return final_driver, additional_kwargs
338
339
 
339
- def __new__( # pylint: disable=R0914
340
+ def __new__( # pylint: disable=R0914,R0917
340
341
  cls,
341
342
  platform: str,
342
343
  host: str,
@@ -464,7 +465,7 @@ class Scrapli(NetworkDriver):
464
465
  """
465
466
  logger.debug("Scrapli factory initialized")
466
467
 
467
- if transport in ASYNCIO_TRANSPORTS:
468
+ if transport in CORE_TRANSPORTS and transport in ASYNCIO_TRANSPORTS:
468
469
  raise ScrapliValueError("Use 'AsyncScrapli' if using an async transport!")
469
470
 
470
471
  if not isinstance(platform, str):
@@ -635,7 +636,7 @@ class AsyncScrapli(AsyncNetworkDriver):
635
636
  logger.info(msg)
636
637
  return final_driver, additional_kwargs
637
638
 
638
- def __new__( # pylint: disable=R0914
639
+ def __new__( # pylint: disable=R0914,R0917
639
640
  cls,
640
641
  platform: str,
641
642
  host: str,
@@ -763,7 +764,7 @@ class AsyncScrapli(AsyncNetworkDriver):
763
764
  """
764
765
  logger.debug("AsyncScrapli factory initialized")
765
766
 
766
- if transport not in ASYNCIO_TRANSPORTS:
767
+ if transport in CORE_TRANSPORTS and transport not in ASYNCIO_TRANSPORTS:
767
768
  raise ScrapliValueError("Use 'Scrapli' if using a synchronous transport!")
768
769
 
769
770
  if not isinstance(platform, str):
scrapli/helper.py CHANGED
@@ -1,15 +1,16 @@
1
1
  """scrapli.helper"""
2
+
2
3
  import importlib
3
4
  import importlib.resources
4
5
  import sys
5
6
  import urllib.request
6
- from io import BytesIO, TextIOWrapper
7
+ from io import BufferedReader, BytesIO, TextIOWrapper
7
8
  from pathlib import Path
8
9
  from shutil import get_terminal_size
9
10
  from typing import Any, Dict, List, Optional, TextIO, Tuple, Union
10
11
  from warnings import warn
11
12
 
12
- from scrapli.exceptions import ScrapliValueError
13
+ from scrapli.exceptions import ScrapliException, ScrapliValueError
13
14
  from scrapli.logging import logger
14
15
  from scrapli.settings import Settings
15
16
 
@@ -18,7 +19,14 @@ def _textfsm_get_template_directory() -> str:
18
19
  if sys.version_info >= (3, 9):
19
20
  return f"{importlib.resources.files('ntc_templates')}/templates"
20
21
 
21
- with importlib.resources.path("ntc_templates", "templates") as path:
22
+ if sys.version_info >= (3, 11):
23
+ # https://docs.python.org/3/library/importlib.resources.html#importlib.resources.path
24
+ with importlib.resources.as_file(
25
+ importlib.resources.files("ntc_templates").joinpath("templates")
26
+ ) as path:
27
+ return str(path)
28
+
29
+ with importlib.resources.path("ntc_templates", "templates") as path: # pylint: disable=W4902
22
30
  return str(path)
23
31
 
24
32
 
@@ -89,7 +97,7 @@ def _textfsm_to_dict(
89
97
 
90
98
 
91
99
  def textfsm_parse(
92
- template: Union[str, TextIOWrapper], output: str, to_dict: bool = True
100
+ template: Union[str, TextIOWrapper], output: str, to_dict: bool = True, raise_err: bool = False
93
101
  ) -> Union[List[Any], Dict[str, Any]]:
94
102
  """
95
103
  Parse output with TextFSM and ntc-templates, try to return structured output
@@ -99,16 +107,20 @@ def textfsm_parse(
99
107
  output: unstructured output from device to parse
100
108
  to_dict: convert textfsm output from list of lists to list of dicts -- basically create dict
101
109
  from header and row data so it is easier to read/parse the output
110
+ raise_err: exceptions in the textfsm parser will raised for the caller to handle
102
111
 
103
112
  Returns:
104
113
  output: structured data
105
114
 
106
115
  Raises:
107
- N/A
116
+ ScrapliException: If raise_err is set and a textfsm parsing error occurs, raises from the
117
+ originating textfsm.parser.TextFSMError exception.
108
118
 
109
119
  """
110
120
  import textfsm # pylint: disable=C0415
111
121
 
122
+ template_file: Union[BufferedReader, TextIOWrapper]
123
+
112
124
  if not isinstance(template, TextIOWrapper):
113
125
  if template.startswith("http://") or template.startswith("https://"):
114
126
  with urllib.request.urlopen(template) as response:
@@ -117,7 +129,7 @@ def textfsm_parse(
117
129
  encoding=response.headers.get_content_charset(),
118
130
  )
119
131
  else:
120
- template_file = TextIOWrapper(open(template, mode="rb")) # pylint: disable=R1732
132
+ template_file = open(template, mode="rb") # pylint: disable=R1732
121
133
  else:
122
134
  template_file = template
123
135
  re_table = textfsm.TextFSM(template_file)
@@ -128,8 +140,10 @@ def textfsm_parse(
128
140
  structured_output=structured_output, header=re_table.header
129
141
  )
130
142
  return structured_output
131
- except textfsm.parser.TextFSMError:
143
+ except textfsm.parser.TextFSMError as exc:
132
144
  logger.warning("failed to parse data with textfsm")
145
+ if raise_err:
146
+ raise ScrapliException(exc) from exc
133
147
  return []
134
148
 
135
149
 
scrapli/logging.py CHANGED
@@ -1,5 +1,4 @@
1
1
  """scrapli.logging"""
2
- from ast import literal_eval
3
2
 
4
3
  # slightly irritating renaming to prevent a cyclic lookup in griffe for mkdocstrings
5
4
  from logging import FileHandler as FileHandler_
@@ -222,13 +221,13 @@ class ScrapliFileHandler(FileHandler_):
222
221
  # no message in the buffer, set the current record to the _record_buf
223
222
  self._record_buf = record
224
223
  # get the payload of the message after "read: " and re-convert it to bytes
225
- self._record_msg_buf = literal_eval(record.msg[self._read_msg_prefix_len :]) # noqa
224
+ self._record_msg_buf = record.msg[self._read_msg_prefix_len :].encode()
226
225
  return
227
226
 
228
227
  # if we get here we know we are getting subsequent read messages we want to buffer -- the
229
228
  # log record data will all be the same, its just the payload that will be new, so add that
230
229
  # current payload to the _record_msg_buf buffer
231
- self._record_msg_buf += literal_eval(record.msg[self._read_msg_prefix_len :]) # noqa
230
+ self._record_msg_buf += record.msg[self._read_msg_prefix_len :].encode()
232
231
 
233
232
 
234
233
  def get_instance_logger(
scrapli/response.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """scrapli.response"""
2
+
2
3
  from collections import UserList
3
4
  from datetime import datetime
4
5
  from io import TextIOWrapper
@@ -9,7 +10,7 @@ from scrapli.helper import _textfsm_get_template, genie_parse, textfsm_parse, tt
9
10
 
10
11
 
11
12
  class Response:
12
- def __init__(
13
+ def __init__( # pylint: disable=R0917
13
14
  self,
14
15
  host: str,
15
16
  channel_input: str,
@@ -141,7 +142,10 @@ class Response:
141
142
  self.failed = False
142
143
 
143
144
  def textfsm_parse_output(
144
- self, template: Union[str, TextIO, None] = None, to_dict: bool = True
145
+ self,
146
+ template: Union[str, TextIO, None] = None,
147
+ to_dict: bool = True,
148
+ raise_err: bool = False,
145
149
  ) -> Union[Dict[str, Any], List[Any]]:
146
150
  """
147
151
  Parse results with textfsm, always return structured data
@@ -152,6 +156,7 @@ class Response:
152
156
  template: string path to textfsm template or opened textfsm template file
153
157
  to_dict: convert textfsm output from list of lists to list of dicts -- basically create
154
158
  dict from header and row data so it is easier to read/parse the output
159
+ raise_err: exceptions in the textfsm parser will raised for the caller to handle
155
160
 
156
161
  Returns:
157
162
  structured_result: empty list or parsed data from textfsm
@@ -169,7 +174,12 @@ class Response:
169
174
  return []
170
175
 
171
176
  template = cast(Union[str, TextIOWrapper], template)
172
- return textfsm_parse(template=template, output=self.result, to_dict=to_dict) or []
177
+ return (
178
+ textfsm_parse(
179
+ template=template, output=self.result, to_dict=to_dict, raise_err=raise_err
180
+ )
181
+ or []
182
+ )
173
183
 
174
184
  def genie_parse_output(self) -> Union[Dict[str, Any], List[Any]]:
175
185
  """
scrapli/ssh_config.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """scrapli.ssh_config"""
2
+
2
3
  import base64
3
4
  import hmac
4
5
  import os
@@ -1,4 +1,5 @@
1
1
  """scrapli.transport.base"""
2
+
2
3
  from scrapli.transport.base.async_transport import AsyncTransport
3
4
  from scrapli.transport.base.base_transport import BasePluginTransportArgs, BaseTransportArgs
4
5
  from scrapli.transport.base.sync_transport import Transport
@@ -1,4 +1,5 @@
1
1
  """scrapli.transport.async_transport"""
2
+
2
3
  from abc import ABC, abstractmethod
3
4
 
4
5
  from scrapli.transport.base.base_transport import BaseTransport
@@ -1,4 +1,5 @@
1
1
  """scrapli.transport.base.base_socket"""
2
+
2
3
  import socket
3
4
  from contextlib import suppress
4
5
  from typing import Optional, Set
@@ -1,4 +1,5 @@
1
1
  """scrapli.transport.base_transport"""
2
+
2
3
  from abc import ABC, abstractmethod
3
4
  from dataclasses import dataclass
4
5
  from typing import Any, Dict
@@ -1,4 +1,5 @@
1
1
  """scrapli.transport.base_transport"""
2
+
2
3
  from abc import ABC, abstractmethod
3
4
 
4
5
  from scrapli.transport.base.base_transport import BaseTransport
@@ -1,4 +1,5 @@
1
1
  """scrapli.transport.plugins.asyncssh.transport"""
2
+
2
3
  import asyncio
3
4
  from contextlib import suppress
4
5
  from dataclasses import dataclass
@@ -259,6 +260,9 @@ class AsyncsshTransport(AsyncTransport):
259
260
  if not self.stdout:
260
261
  raise ScrapliConnectionNotOpened
261
262
 
263
+ if self.stdout.at_eof():
264
+ raise ScrapliConnectionError("transport at EOF; no more data to be read")
265
+
262
266
  try:
263
267
  buf: bytes = await self.stdout.read(65535)
264
268
  except ConnectionLost as exc: