dissect.target 3.16.dev44__py3-none-any.whl → 3.17__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (63) hide show
  1. dissect/target/container.py +1 -0
  2. dissect/target/containers/fortifw.py +190 -0
  3. dissect/target/filesystem.py +192 -67
  4. dissect/target/filesystems/dir.py +14 -1
  5. dissect/target/filesystems/overlay.py +103 -0
  6. dissect/target/helpers/compat/path_common.py +19 -5
  7. dissect/target/helpers/configutil.py +30 -7
  8. dissect/target/helpers/network_managers.py +101 -73
  9. dissect/target/helpers/record_modifier.py +4 -1
  10. dissect/target/loader.py +3 -1
  11. dissect/target/loaders/dir.py +23 -5
  12. dissect/target/loaders/itunes.py +3 -3
  13. dissect/target/loaders/mqtt.py +309 -0
  14. dissect/target/loaders/overlay.py +31 -0
  15. dissect/target/loaders/target.py +12 -9
  16. dissect/target/loaders/vb.py +2 -2
  17. dissect/target/loaders/velociraptor.py +5 -4
  18. dissect/target/plugin.py +1 -1
  19. dissect/target/plugins/apps/browser/brave.py +10 -0
  20. dissect/target/plugins/apps/browser/browser.py +43 -0
  21. dissect/target/plugins/apps/browser/chrome.py +10 -0
  22. dissect/target/plugins/apps/browser/chromium.py +234 -12
  23. dissect/target/plugins/apps/browser/edge.py +10 -0
  24. dissect/target/plugins/apps/browser/firefox.py +512 -19
  25. dissect/target/plugins/apps/browser/iexplore.py +2 -2
  26. dissect/target/plugins/apps/container/docker.py +24 -4
  27. dissect/target/plugins/apps/ssh/openssh.py +4 -0
  28. dissect/target/plugins/apps/ssh/putty.py +45 -14
  29. dissect/target/plugins/apps/ssh/ssh.py +40 -0
  30. dissect/target/plugins/apps/vpn/openvpn.py +115 -93
  31. dissect/target/plugins/child/docker.py +24 -0
  32. dissect/target/plugins/filesystem/ntfs/mft.py +1 -1
  33. dissect/target/plugins/filesystem/walkfs.py +2 -2
  34. dissect/target/plugins/general/users.py +6 -0
  35. dissect/target/plugins/os/unix/bsd/__init__.py +0 -0
  36. dissect/target/plugins/os/unix/esxi/_os.py +2 -2
  37. dissect/target/plugins/os/unix/linux/debian/vyos/_os.py +1 -1
  38. dissect/target/plugins/os/unix/linux/fortios/_os.py +9 -9
  39. dissect/target/plugins/os/unix/linux/services.py +1 -0
  40. dissect/target/plugins/os/unix/linux/sockets.py +2 -2
  41. dissect/target/plugins/os/unix/log/messages.py +53 -8
  42. dissect/target/plugins/os/windows/_os.py +10 -1
  43. dissect/target/plugins/os/windows/catroot.py +178 -63
  44. dissect/target/plugins/os/windows/credhist.py +210 -0
  45. dissect/target/plugins/os/windows/dpapi/crypto.py +12 -1
  46. dissect/target/plugins/os/windows/dpapi/dpapi.py +62 -7
  47. dissect/target/plugins/os/windows/dpapi/master_key.py +22 -2
  48. dissect/target/plugins/os/windows/regf/runkeys.py +6 -4
  49. dissect/target/plugins/os/windows/sam.py +10 -1
  50. dissect/target/target.py +1 -1
  51. dissect/target/tools/dump/run.py +23 -28
  52. dissect/target/tools/dump/state.py +11 -8
  53. dissect/target/tools/dump/utils.py +5 -4
  54. dissect/target/tools/query.py +3 -15
  55. dissect/target/tools/shell.py +48 -8
  56. dissect/target/tools/utils.py +23 -0
  57. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/METADATA +7 -3
  58. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/RECORD +63 -56
  59. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/WHEEL +1 -1
  60. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/COPYRIGHT +0 -0
  61. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/LICENSE +0 -0
  62. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/entry_points.txt +0 -0
  63. {dissect.target-3.16.dev44.dist-info → dissect.target-3.17.dist-info}/top_level.txt +0 -0
@@ -4,7 +4,12 @@ import hashlib
4
4
  import hmac
5
5
  from typing import Optional, Union
6
6
 
7
- from Crypto.Cipher import AES, ARC4
7
+ try:
8
+ from Crypto.Cipher import AES, ARC4
9
+
10
+ HAS_CRYPTO = True
11
+ except ImportError:
12
+ HAS_CRYPTO = False
8
13
 
9
14
  CIPHER_ALGORITHMS: dict[Union[int, str], CipherAlgorithm] = {}
10
15
  HASH_ALGORITHMS: dict[Union[int, str], HashAlgorithm] = {}
@@ -62,6 +67,9 @@ class _AES(CipherAlgorithm):
62
67
  block_length = 128 // 8
63
68
 
64
69
  def decrypt(self, data: bytes, key: bytes, iv: Optional[bytes] = None) -> bytes:
70
+ if not HAS_CRYPTO:
71
+ raise RuntimeError("Missing pycryptodome dependency")
72
+
65
73
  cipher = AES.new(
66
74
  key[: self.key_length], mode=AES.MODE_CBC, IV=iv[: self.iv_length] if iv else b"\x00" * self.iv_length
67
75
  )
@@ -93,6 +101,9 @@ class _RC4(CipherAlgorithm):
93
101
  block_length = 1 // 8
94
102
 
95
103
  def decrypt(self, data: bytes, key: bytes, iv: Optional[bytes] = None) -> bytes:
104
+ if not HAS_CRYPTO:
105
+ raise RuntimeError("Missing pycryptodome dependency")
106
+
96
107
  cipher = ARC4.new(key[: self.key_length])
97
108
  return cipher.decrypt(data)
98
109
 
@@ -1,21 +1,27 @@
1
1
  import hashlib
2
2
  import re
3
- from functools import cached_property
3
+ from functools import cache, cached_property
4
4
  from pathlib import Path
5
5
 
6
- from Crypto.Cipher import AES
6
+ try:
7
+ from Crypto.Cipher import AES
8
+
9
+ HAS_CRYPTO = True
10
+ except ImportError:
11
+ HAS_CRYPTO = False
12
+
7
13
 
8
- from dissect.target import Target
9
14
  from dissect.target.exceptions import UnsupportedPluginError
15
+ from dissect.target.helpers import keychain
10
16
  from dissect.target.plugin import InternalPlugin
11
17
  from dissect.target.plugins.os.windows.dpapi.blob import Blob as DPAPIBlob
12
18
  from dissect.target.plugins.os.windows.dpapi.master_key import CredSystem, MasterKeyFile
19
+ from dissect.target.target import Target
13
20
 
14
21
 
15
22
  class DPAPIPlugin(InternalPlugin):
16
23
  __namespace__ = "dpapi"
17
24
 
18
- # This matches master key file names
19
25
  MASTER_KEY_REGEX = re.compile("^[0-9a-f]{8}(?:-[0-9a-f]{4}){3}-[0-9a-f]{12}$")
20
26
 
21
27
  SECURITY_POLICY_KEY = "HKEY_LOCAL_MACHINE\\SECURITY\\Policy"
@@ -25,11 +31,26 @@ class DPAPIPlugin(InternalPlugin):
25
31
 
26
32
  def __init__(self, target: Target):
27
33
  super().__init__(target)
34
+ self.keychain = cache(self.keychain)
28
35
 
29
36
  def check_compatible(self) -> None:
37
+ if not HAS_CRYPTO:
38
+ raise UnsupportedPluginError("Missing pycryptodome dependency")
39
+
30
40
  if not list(self.target.registry.keys(self.SYSTEM_KEY)):
31
41
  raise UnsupportedPluginError(f"Registry key not found: {self.SYSTEM_KEY}")
32
42
 
43
+ def keychain(self) -> set:
44
+ passwords = set()
45
+
46
+ for key in keychain.get_keys_for_provider("user") + keychain.get_keys_without_provider():
47
+ if key.key_type == keychain.KeyType.PASSPHRASE:
48
+ passwords.add(key.value)
49
+
50
+ # It is possible to encrypt using an empty passphrase.
51
+ passwords.add("")
52
+ return passwords
53
+
33
54
  @cached_property
34
55
  def syskey(self) -> bytes:
35
56
  lsa = self.target.registry.key(self.SYSTEM_KEY)
@@ -84,6 +105,10 @@ class DPAPIPlugin(InternalPlugin):
84
105
 
85
106
  return result
86
107
 
108
+ @cached_property
109
+ def _users(self) -> dict[str, dict[str, str]]:
110
+ return {u.name: {"sid": u.sid} for u in self.target.users()}
111
+
87
112
  def _load_master_keys_from_path(self, username: str, path: Path) -> dict[str, MasterKeyFile]:
88
113
  if not path.exists():
89
114
  return {}
@@ -104,21 +129,51 @@ class DPAPIPlugin(InternalPlugin):
104
129
  if not mkf.decrypted:
105
130
  raise Exception("Failed to decrypt System master key")
106
131
 
132
+ if user := self._users.get(username):
133
+ for mk_pass in self.keychain():
134
+ if mkf.decrypt_with_password(user["sid"], mk_pass):
135
+ break
136
+
137
+ try:
138
+ if mkf.decrypt_with_hash(user["sid"], bytes.fromhex(mk_pass)) is True:
139
+ break
140
+ except ValueError:
141
+ pass
142
+
143
+ if not mkf.decrypted:
144
+ self.target.log.warning("Could not decrypt DPAPI master key for username '%s'", username)
145
+
107
146
  result[file.name] = mkf
108
147
 
109
148
  return result
110
149
 
111
150
  def decrypt_system_blob(self, data: bytes) -> bytes:
151
+ """Decrypt the given bytes using the System master key."""
152
+ return self.decrypt_user_blob(data, self.SYSTEM_USERNAME)
153
+
154
+ def decrypt_user_blob(self, data: bytes, username: str) -> bytes:
155
+ """Decrypt the given bytes using the master key of the given user."""
112
156
  blob = DPAPIBlob(data)
113
157
 
114
- if not (mk := self.master_keys.get(self.SYSTEM_USERNAME, {}).get(blob.guid)):
115
- raise ValueError("Blob UUID is unknown to system master keys")
158
+ if not (mk := self.master_keys.get(username, {}).get(blob.guid)):
159
+ raise ValueError(f"Blob UUID is unknown to {username} master keys")
116
160
 
117
161
  if not blob.decrypt(mk.key):
118
- raise ValueError("Failed to decrypt system blob")
162
+ raise ValueError(f"Failed to decrypt blob for user {username}")
119
163
 
120
164
  return blob.clear_text
121
165
 
166
+ def decrypt_blob(self, data: bytes) -> bytes:
167
+ """Attempt to decrypt the given bytes using any of the available master keys."""
168
+ blob = DPAPIBlob(data)
169
+
170
+ for user in self.master_keys:
171
+ for mk in self.master_keys[user].values():
172
+ if blob.decrypt(mk.key):
173
+ return blob.clear_text
174
+
175
+ raise ValueError("Failed to decrypt blob")
176
+
122
177
 
123
178
  def _decrypt_aes(data: bytes, key: bytes) -> bytes:
124
179
  ctx = hashlib.sha256()
@@ -1,4 +1,5 @@
1
1
  import hashlib
2
+ import logging
2
3
  from io import BytesIO
3
4
  from typing import BinaryIO
4
5
 
@@ -11,6 +12,16 @@ from dissect.target.plugins.os.windows.dpapi.crypto import (
11
12
  dpapi_hmac,
12
13
  )
13
14
 
15
+ try:
16
+ from Crypto.Hash import MD4
17
+
18
+ HAS_CRYPTO = True
19
+ except ImportError:
20
+ HAS_CRYPTO = False
21
+
22
+ log = logging.getLogger(__name__)
23
+
24
+
14
25
  master_key_def = """
15
26
  struct DomainKey {
16
27
  DWORD dwVersion;
@@ -85,9 +96,18 @@ class MasterKey:
85
96
 
86
97
  def decrypt_with_password(self, user_sid: str, pwd: str) -> bool:
87
98
  """Decrypts the master key with the given user's password and SID."""
99
+ pwd = pwd.encode("utf-16-le")
100
+
88
101
  for algo in ["sha1", "md4"]:
89
- pwd_hash = hashlib.new(algo, pwd.encode("utf-16-le")).digest()
90
- self.decrypt_with_key(derive_password_hash(pwd_hash, user_sid))
102
+ if algo in hashlib.algorithms_available:
103
+ pwd_hash = hashlib.new(algo, pwd)
104
+ elif HAS_CRYPTO and algo == "md4":
105
+ pwd_hash = MD4.new(pwd)
106
+ else:
107
+ log.warning("No cryptography capabilities for algorithm %s", algo)
108
+ continue
109
+
110
+ self.decrypt_with_key(derive_password_hash(pwd_hash.digest(), user_sid))
91
111
  if self.decrypted:
92
112
  break
93
113
 
@@ -1,3 +1,5 @@
1
+ from typing import Iterator
2
+
1
3
  from dissect.target.exceptions import UnsupportedPluginError
2
4
  from dissect.target.helpers.descriptor_extensions import (
3
5
  RegistryRecordDescriptorExtension,
@@ -11,7 +13,7 @@ RunKeyRecord = create_extended_descriptor([RegistryRecordDescriptorExtension, Us
11
13
  [
12
14
  ("datetime", "ts"),
13
15
  ("wstring", "name"),
14
- ("string", "path"),
16
+ ("command", "command"),
15
17
  ("string", "key"),
16
18
  ],
17
19
  )
@@ -48,7 +50,7 @@ class RunKeysPlugin(Plugin):
48
50
  raise UnsupportedPluginError("No registry run key found")
49
51
 
50
52
  @export(record=RunKeyRecord)
51
- def runkeys(self):
53
+ def runkeys(self) -> Iterator[RunKeyRecord]:
52
54
  """Iterate various run key locations. See source for all locations.
53
55
 
54
56
  Run keys (Run and RunOnce) are registry keys that make a program run when a user logs on. a Run key runs every
@@ -63,7 +65,7 @@ class RunKeysPlugin(Plugin):
63
65
  domain (string): The target domain.
64
66
  ts (datetime): The registry key last modified timestamp.
65
67
  name (string): The run key name.
66
- path (string): The run key path.
68
+ command (command): The run key command.
67
69
  key (string): The source key for this run key.
68
70
  """
69
71
  for key in self.KEYS:
@@ -73,7 +75,7 @@ class RunKeysPlugin(Plugin):
73
75
  yield RunKeyRecord(
74
76
  ts=r.ts,
75
77
  name=entry.name,
76
- path=entry.value,
78
+ command=entry.value,
77
79
  key=key,
78
80
  _target=self.target,
79
81
  _key=r,
@@ -2,7 +2,13 @@ from hashlib import md5, sha256
2
2
  from struct import pack
3
3
  from typing import Iterator
4
4
 
5
- from Crypto.Cipher import AES, ARC4, DES
5
+ try:
6
+ from Crypto.Cipher import AES, ARC4, DES
7
+
8
+ HAS_CRYPTO = True
9
+ except ImportError:
10
+ HAS_CRYPTO = False
11
+
6
12
  from dissect import cstruct
7
13
  from dissect.util import ts
8
14
 
@@ -295,6 +301,9 @@ class SamPlugin(Plugin):
295
301
  SAM_KEY = "HKEY_LOCAL_MACHINE\\SAM\\SAM\\Domains\\Account"
296
302
 
297
303
  def check_compatible(self) -> None:
304
+ if not HAS_CRYPTO:
305
+ raise UnsupportedPluginError("Missing pycryptodome dependency")
306
+
298
307
  if not len(list(self.target.registry.keys(self.SAM_KEY))) > 0:
299
308
  raise UnsupportedPluginError(f"Registry key not found: {self.SAM_KEY}")
300
309
 
dissect/target/target.py CHANGED
@@ -280,7 +280,7 @@ class Target:
280
280
  continue
281
281
 
282
282
  getlogger(entry).debug("Attempting to use loader: %s", loader_cls)
283
- for sub_entry in loader_cls.find_all(entry):
283
+ for sub_entry in loader_cls.find_all(entry, parsed_path=parsed_path):
284
284
  try:
285
285
  ldr = loader_cls(sub_entry, parsed_path=parsed_path)
286
286
  except Exception as e:
@@ -7,7 +7,7 @@ import sys
7
7
  from collections import deque
8
8
  from dataclasses import dataclass
9
9
  from pathlib import Path
10
- from typing import Any, Generator, Iterable, List, Optional, Tuple
10
+ from typing import Any, Iterable, Iterator, Optional
11
11
 
12
12
  import structlog
13
13
  from flow.record import Record
@@ -25,10 +25,12 @@ from dissect.target.tools.dump.utils import (
25
25
  Compression,
26
26
  Serialization,
27
27
  cached_sink_writers,
28
- get_nested_attr,
29
28
  )
30
29
  from dissect.target.tools.utils import (
30
+ PluginFunction,
31
31
  configure_generic_arguments,
32
+ execute_function_on_target,
33
+ find_and_filter_plugins,
32
34
  process_generic_arguments,
33
35
  )
34
36
 
@@ -44,13 +46,13 @@ class RecordStreamElement:
44
46
  sink_path: Optional[Path] = None
45
47
 
46
48
 
47
- def get_targets(targets: List[str]) -> Generator[Target, None, None]:
49
+ def get_targets(targets: list[str]) -> Iterator[Target]:
48
50
  """Return a generator with `Target` objects for provided paths"""
49
51
  for target in Target.open_all(targets):
50
52
  yield target
51
53
 
52
54
 
53
- def execute_function(target: Target, function: str) -> Generator[TargetRecordDescriptor, None, None]:
55
+ def execute_function(target: Target, function: PluginFunction) -> TargetRecordDescriptor:
54
56
  """
55
57
  Execute function `function` on provided target `target` and return a generator
56
58
  with the records produced.
@@ -62,7 +64,7 @@ def execute_function(target: Target, function: str) -> Generator[TargetRecordDes
62
64
  local_log.debug("Function execution")
63
65
 
64
66
  try:
65
- target_attr = get_nested_attr(target, function)
67
+ output_type, target_attr, _ = execute_function_on_target(target, function)
66
68
  except UnsupportedPluginError:
67
69
  local_log.error("Function is not supported for target", exc_info=True)
68
70
  return
@@ -70,15 +72,8 @@ def execute_function(target: Target, function: str) -> Generator[TargetRecordDes
70
72
  local_log.error("Plugin error while executing function for target", exc_info=True)
71
73
  return
72
74
 
73
- # skip non-record outputs
74
- try:
75
- output = getattr(target_attr, "__output__", "default") if hasattr(target_attr, "__output__") else None
76
- except PluginError as e:
77
- local_log.error("Plugin error while fetching an attribute", exc_info=e)
78
- return
79
-
80
- if output != "record":
81
- local_log.warn("Output format is not supported", output=output)
75
+ if output_type != "record":
76
+ local_log.warn("Output format is not supported", output=output_type)
82
77
  return
83
78
 
84
79
  # no support for function-specific arguments
@@ -94,9 +89,9 @@ def execute_function(target: Target, function: str) -> Generator[TargetRecordDes
94
89
 
95
90
  def produce_target_func_pairs(
96
91
  targets: Iterable[Target],
97
- functions: List[str],
92
+ functions: str,
98
93
  state: DumpState,
99
- ) -> Generator[Tuple[Target, str], None, None]:
94
+ ) -> Iterator[tuple[Target, PluginFunction]]:
100
95
  """
101
96
  Return a generator with target and function pairs for execution.
102
97
 
@@ -107,20 +102,20 @@ def produce_target_func_pairs(
107
102
  pairs_to_skip.update((str(sink.target_path), sink.func) for sink in state.finished_sinks)
108
103
 
109
104
  for target in targets:
110
- for func in functions:
111
- if state and (target.path, func) in pairs_to_skip:
105
+ for func_def in find_and_filter_plugins(target, functions):
106
+ if state and (target.path, func_def.name) in pairs_to_skip:
112
107
  log.info(
113
108
  "Skipping target/func pair since its marked as done in provided state",
114
109
  target=target.path,
115
- func=func,
110
+ func=func_def.name,
116
111
  state=state.path,
117
112
  )
118
113
  continue
119
- yield (target, func)
120
- state.mark_as_finished(target, func)
114
+ yield (target, func_def)
115
+ state.mark_as_finished(target, func_def.name)
121
116
 
122
117
 
123
- def execute_functions(target_func_stream: Iterable[Tuple[Target, str]]) -> Generator[RecordStreamElement, None, None]:
118
+ def execute_functions(target_func_stream: Iterable[tuple[Target, str]]) -> Iterable[RecordStreamElement]:
124
119
  """
125
120
  Execute a function on a target for target / function pairs in the stream.
126
121
 
@@ -131,7 +126,7 @@ def execute_functions(target_func_stream: Iterable[Tuple[Target, str]]) -> Gener
131
126
  yield RecordStreamElement(target=target, func=func, record=record)
132
127
 
133
128
 
134
- def log_progress(stream: Iterable[Any], step_size: int = 1000) -> Generator[Any, None, None]:
129
+ def log_progress(stream: Iterable[Any], step_size: int = 1000) -> Iterable[Any]:
135
130
  """
136
131
  Log a number of items that went though the generator stream
137
132
  after every N element (N is configured in `step_size`).
@@ -155,7 +150,7 @@ def log_progress(stream: Iterable[Any], step_size: int = 1000) -> Generator[Any,
155
150
  def sink_records(
156
151
  record_stream: Iterable[RecordStreamElement],
157
152
  state: DumpState,
158
- ) -> Generator[RecordStreamElement, None, None]:
153
+ ) -> Iterator[RecordStreamElement]:
159
154
  """
160
155
  Persist records from the stream into appropriate sinks, per serialization, compression and record type.
161
156
  """
@@ -168,7 +163,7 @@ def sink_records(
168
163
  def persist_processing_state(
169
164
  record_stream: Iterable[RecordStreamElement],
170
165
  state: DumpState,
171
- ) -> Generator[RecordStreamElement, None, None]:
166
+ ) -> Iterator[RecordStreamElement]:
172
167
  """
173
168
  Keep track of the pipeline state in a persistent state object.
174
169
  """
@@ -179,8 +174,8 @@ def persist_processing_state(
179
174
 
180
175
 
181
176
  def execute_pipeline(
182
- targets: List[str],
183
- functions: List[str],
177
+ targets: list[str],
178
+ functions: str,
184
179
  output_dir: Path,
185
180
  serialization: Serialization,
186
181
  compression: Optional[Compression] = None,
@@ -297,7 +292,7 @@ def main():
297
292
  try:
298
293
  execute_pipeline(
299
294
  targets=args.targets,
300
- functions=args.function.split(","),
295
+ functions=args.function,
301
296
  output_dir=args.output,
302
297
  serialization=Serialization(args.serialization),
303
298
  compression=Compression(args.compression),
@@ -6,7 +6,7 @@ import json
6
6
  from contextlib import contextmanager
7
7
  from dataclasses import dataclass
8
8
  from pathlib import Path
9
- from typing import Any, Callable, Iterator, List, Optional, TextIO
9
+ from typing import Any, Callable, Iterator, Optional, TextIO
10
10
 
11
11
  import structlog
12
12
 
@@ -35,17 +35,20 @@ class Sink:
35
35
  record_count: int = 0
36
36
  size_bytes: int = 0
37
37
 
38
+ def __post_init__(self):
39
+ self.func = getattr(self.func, "name", self.func)
40
+
38
41
 
39
42
  @dataclass
40
43
  class DumpState:
41
- target_paths: List[str]
42
- functions: List[str]
44
+ target_paths: list[str]
45
+ functions: list[str]
43
46
  serialization: str
44
47
  compression: str
45
48
  start_time: datetime.datetime
46
49
  last_update_time: datetime.datetime
47
50
 
48
- sinks: List[Sink] = dataclasses.field(default_factory=list)
51
+ sinks: list[Sink] = dataclasses.field(default_factory=list)
49
52
 
50
53
  # Volatile properties
51
54
  output_dir: Optional[Path] = None
@@ -56,7 +59,7 @@ class DumpState:
56
59
  return sum(s.record_count for s in self.sinks)
57
60
 
58
61
  @property
59
- def finished_sinks(self) -> List[Sink]:
62
+ def finished_sinks(self) -> list[Sink]:
60
63
  return [sink for sink in self.sinks if not sink.is_dirty]
61
64
 
62
65
  @property
@@ -178,7 +181,7 @@ class DumpState:
178
181
  state.output_dir = output_dir
179
182
  return state
180
183
 
181
- def get_invalid_sinks(self) -> List[Sink]:
184
+ def get_invalid_sinks(self) -> list[Sink]:
182
185
  """Return sinks that have a mismatch between recorded size and a real file size"""
183
186
  invalid_sinks = []
184
187
  for sink in self.sinks:
@@ -214,8 +217,8 @@ class DumpState:
214
217
  def create_state(
215
218
  *,
216
219
  output_dir: Path,
217
- target_paths: List[str],
218
- functions: List[str],
220
+ target_paths: list[str],
221
+ functions: list[str],
219
222
  serialization: Serialization,
220
223
  compression: Compression = None,
221
224
  ) -> DumpState:
@@ -32,6 +32,7 @@ from flow.record.adapter.jsonfile import JsonfileWriter
32
32
  from flow.record.jsonpacker import JsonRecordPacker
33
33
 
34
34
  from dissect.target import Target
35
+ from dissect.target.plugin import PluginFunction
35
36
 
36
37
  log = structlog.get_logger(__name__)
37
38
 
@@ -69,14 +70,14 @@ def get_nested_attr(obj: Any, nested_attr: str) -> Any:
69
70
 
70
71
 
71
72
  @lru_cache(maxsize=DEST_DIR_CACHE_SIZE)
72
- def get_sink_dir_by_target(target: Target, function: str) -> Path:
73
- func_first_name, _, _ = function.partition(".")
73
+ def get_sink_dir_by_target(target: Target, function: PluginFunction) -> Path:
74
+ func_first_name, _, _ = function.name.partition(".")
74
75
  return Path(target.name) / func_first_name
75
76
 
76
77
 
77
78
  @functools.lru_cache(maxsize=DEST_DIR_CACHE_SIZE)
78
- def get_sink_dir_by_func(target: Target, function: str) -> Path:
79
- func_first_name, _, _ = function.partition(".")
79
+ def get_sink_dir_by_func(target: Target, function: PluginFunction) -> Path:
80
+ func_first_name, _, _ = function.name.partition(".")
80
81
  return Path(func_first_name) / target.name
81
82
 
82
83
 
@@ -26,6 +26,7 @@ from dissect.target.tools.utils import (
26
26
  catch_sigpipe,
27
27
  configure_generic_arguments,
28
28
  execute_function_on_target,
29
+ find_and_filter_plugins,
29
30
  generate_argparse_for_bound_method,
30
31
  generate_argparse_for_plugin_class,
31
32
  generate_argparse_for_unbound_method,
@@ -172,8 +173,7 @@ def main():
172
173
  collected_plugins = {}
173
174
 
174
175
  if targets:
175
- for target in targets:
176
- plugin_target = Target.open(target)
176
+ for plugin_target in Target.open_all(targets, args.children):
177
177
  if isinstance(plugin_target._loader, ProxyLoader):
178
178
  parser.error("can't list compatible plugins for remote targets.")
179
179
  funcs, _ = find_plugin_functions(plugin_target, args.list, compatibility=True, show_hidden=True)
@@ -270,25 +270,13 @@ def main():
270
270
  basic_entries = []
271
271
  yield_entries = []
272
272
 
273
- # Keep a set of plugins that were already executed on the target.
274
- executed_plugins = set()
275
-
276
273
  first_seen_output_type = default_output_type
277
274
  cli_params_unparsed = rest
278
275
 
279
- func_defs, _ = find_plugin_functions(target, args.function, compatibility=False)
280
276
  excluded_funcs, _ = find_plugin_functions(target, args.excluded_functions, compatibility=False)
281
277
  excluded_func_paths = {excluded_func.path for excluded_func in excluded_funcs}
282
278
 
283
- for func_def in func_defs:
284
- if func_def.path in excluded_func_paths:
285
- continue
286
-
287
- # Avoid executing same plugin for multiple OSes (like hostname)
288
- if func_def.name in executed_plugins:
289
- continue
290
- executed_plugins.add(func_def.name)
291
-
279
+ for func_def in find_and_filter_plugins(target, args.function, excluded_func_paths):
292
280
  # If the default type is record (meaning we skip everything else)
293
281
  # and actual output type is not record, continue.
294
282
  # We perform this check here because plugins that require output files/dirs
@@ -31,12 +31,13 @@ from dissect.target.exceptions import (
31
31
  RegistryValueNotFoundError,
32
32
  TargetError,
33
33
  )
34
- from dissect.target.filesystem import FilesystemEntry, RootFilesystemEntry
34
+ from dissect.target.filesystem import FilesystemEntry, LayerFilesystemEntry
35
35
  from dissect.target.helpers import cyber, fsutil, regutil
36
36
  from dissect.target.plugin import arg
37
37
  from dissect.target.target import Target
38
38
  from dissect.target.tools.info import print_target_info
39
39
  from dissect.target.tools.utils import (
40
+ args_to_uri,
40
41
  catch_sigpipe,
41
42
  configure_generic_arguments,
42
43
  generate_argparse_for_bound_method,
@@ -468,7 +469,7 @@ class TargetCli(TargetCmd):
468
469
  # If we happen to scan an NTFS filesystem see if any of the
469
470
  # entries has an alternative data stream and also list them.
470
471
  entry = file_.get()
471
- if isinstance(entry, RootFilesystemEntry):
472
+ if isinstance(entry, LayerFilesystemEntry):
472
473
  if entry.entries.fs.__type__ == "ntfs":
473
474
  attrs = entry.lattr()
474
475
  for data_stream in attrs.DATA:
@@ -511,34 +512,66 @@ class TargetCli(TargetCmd):
511
512
  @arg("-l", action="store_true")
512
513
  @arg("-a", "--all", action="store_true") # ignored but included for proper argument parsing
513
514
  @arg("-h", "--human-readable", action="store_true")
515
+ @arg("-R", "--recursive", action="store_true", help="recursively list subdirectories encountered")
516
+ @arg("-c", action="store_true", dest="use_ctime", help="show time when file status was last changed")
517
+ @arg("-u", action="store_true", dest="use_atime", help="show time of last access")
514
518
  def cmd_ls(self, args: argparse.Namespace, stdout: TextIO) -> Optional[bool]:
515
519
  """list directory contents"""
516
520
 
517
521
  path = self.resolve_path(args.path)
518
522
 
523
+ if args.use_ctime and args.use_atime:
524
+ print("can't specify -c and -u at the same time")
525
+ return
526
+
519
527
  if not path or not path.exists():
520
528
  return
521
529
 
530
+ self._print_ls(args, path, 0, stdout)
531
+
532
+ def _print_ls(self, args: argparse.Namespace, path: fsutil.TargetPath, depth: int, stdout: TextIO) -> None:
533
+ path = self.resolve_path(path)
534
+ subdirs = []
535
+
522
536
  if path.is_dir():
523
537
  contents = self.scandir(path, color=True)
524
538
  elif path.is_file():
525
539
  contents = [(path, path.name)]
526
540
 
541
+ if depth > 0:
542
+ print(f"\n{str(path)}:", file=stdout)
543
+
527
544
  if not args.l:
528
- print("\n".join([name for _, name in contents]), file=stdout)
545
+ for target_path, name in contents:
546
+ print(name, file=stdout)
547
+ if target_path.is_dir():
548
+ subdirs.append(target_path)
529
549
  else:
530
550
  if len(contents) > 1:
531
551
  print(f"total {len(contents)}", file=stdout)
532
552
  for target_path, name in contents:
533
- self.print_extensive_file_stat(stdout=stdout, target_path=target_path, name=name)
553
+ self.print_extensive_file_stat(args=args, stdout=stdout, target_path=target_path, name=name)
554
+ if target_path.is_dir():
555
+ subdirs.append(target_path)
556
+
557
+ if args.recursive and subdirs:
558
+ for subdir in subdirs:
559
+ self._print_ls(args, subdir, depth + 1, stdout)
534
560
 
535
- def print_extensive_file_stat(self, stdout: TextIO, target_path: fsutil.TargetPath, name: str) -> None:
561
+ def print_extensive_file_stat(
562
+ self, args: argparse.Namespace, stdout: TextIO, target_path: fsutil.TargetPath, name: str
563
+ ) -> None:
536
564
  """Print the file status."""
537
565
  try:
538
566
  entry = target_path.get()
539
567
  stat = entry.lstat()
540
568
  symlink = f" -> {entry.readlink()}" if entry.is_symlink() else ""
541
- utc_time = datetime.datetime.utcfromtimestamp(stat.st_mtime).isoformat()
569
+ show_time = stat.st_mtime
570
+ if args.use_ctime:
571
+ show_time = stat.st_ctime
572
+ elif args.use_atime:
573
+ show_time = stat.st_atime
574
+ utc_time = datetime.datetime.utcfromtimestamp(show_time).isoformat()
542
575
 
543
576
  print(
544
577
  f"{stat_modestr(stat)} {stat.st_uid:4d} {stat.st_gid:4d} {stat.st_size:6d} {utc_time} {name}{symlink}",
@@ -1223,10 +1256,17 @@ def main() -> None:
1223
1256
  parser.add_argument("targets", metavar="TARGETS", nargs="*", help="targets to load")
1224
1257
  parser.add_argument("-p", "--python", action="store_true", help="(I)Python shell")
1225
1258
  parser.add_argument("-r", "--registry", action="store_true", help="registry shell")
1259
+ parser.add_argument(
1260
+ "-L",
1261
+ "--loader",
1262
+ action="store",
1263
+ default=None,
1264
+ help="select a specific loader (i.e. vmx, raw)",
1265
+ )
1226
1266
 
1227
1267
  configure_generic_arguments(parser)
1228
- args = parser.parse_args()
1229
-
1268
+ args, rest = parser.parse_known_args()
1269
+ args.targets = args_to_uri(args.targets, args.loader, rest) if args.loader else args.targets
1230
1270
  process_generic_arguments(args)
1231
1271
 
1232
1272
  # For the shell tool we want -q to log slightly more then just CRITICAL