dissect.target 3.14.dev29__py3-none-any.whl → 3.15__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (86) hide show
  1. dissect/target/containers/ewf.py +1 -1
  2. dissect/target/containers/vhd.py +5 -2
  3. dissect/target/filesystem.py +36 -18
  4. dissect/target/filesystems/dir.py +10 -4
  5. dissect/target/filesystems/jffs.py +122 -0
  6. dissect/target/helpers/compat/path_310.py +506 -0
  7. dissect/target/helpers/compat/path_311.py +539 -0
  8. dissect/target/helpers/compat/path_312.py +443 -0
  9. dissect/target/helpers/compat/path_39.py +545 -0
  10. dissect/target/helpers/compat/path_common.py +223 -0
  11. dissect/target/helpers/cyber.py +512 -0
  12. dissect/target/helpers/fsutil.py +128 -666
  13. dissect/target/helpers/hashutil.py +17 -57
  14. dissect/target/helpers/keychain.py +9 -3
  15. dissect/target/helpers/loaderutil.py +1 -1
  16. dissect/target/helpers/mount.py +47 -4
  17. dissect/target/helpers/polypath.py +73 -0
  18. dissect/target/helpers/record_modifier.py +100 -0
  19. dissect/target/loader.py +2 -1
  20. dissect/target/loaders/asdf.py +2 -0
  21. dissect/target/loaders/cyber.py +37 -0
  22. dissect/target/loaders/log.py +14 -3
  23. dissect/target/loaders/raw.py +2 -0
  24. dissect/target/loaders/remote.py +12 -0
  25. dissect/target/loaders/tar.py +13 -0
  26. dissect/target/loaders/targetd.py +2 -0
  27. dissect/target/loaders/velociraptor.py +12 -3
  28. dissect/target/loaders/vmwarevm.py +2 -0
  29. dissect/target/plugin.py +272 -143
  30. dissect/target/plugins/apps/ssh/openssh.py +11 -54
  31. dissect/target/plugins/apps/ssh/opensshd.py +4 -3
  32. dissect/target/plugins/apps/ssh/putty.py +236 -0
  33. dissect/target/plugins/apps/ssh/ssh.py +58 -0
  34. dissect/target/plugins/apps/vpn/openvpn.py +6 -0
  35. dissect/target/plugins/apps/webserver/apache.py +309 -95
  36. dissect/target/plugins/apps/webserver/caddy.py +5 -2
  37. dissect/target/plugins/apps/webserver/citrix.py +82 -0
  38. dissect/target/plugins/apps/webserver/iis.py +9 -12
  39. dissect/target/plugins/apps/webserver/nginx.py +5 -2
  40. dissect/target/plugins/apps/webserver/webserver.py +25 -41
  41. dissect/target/plugins/child/wsl.py +1 -1
  42. dissect/target/plugins/filesystem/ntfs/mft.py +10 -0
  43. dissect/target/plugins/filesystem/ntfs/mft_timeline.py +10 -0
  44. dissect/target/plugins/filesystem/ntfs/usnjrnl.py +10 -0
  45. dissect/target/plugins/filesystem/ntfs/utils.py +28 -5
  46. dissect/target/plugins/filesystem/resolver.py +6 -4
  47. dissect/target/plugins/general/default.py +0 -2
  48. dissect/target/plugins/general/example.py +0 -1
  49. dissect/target/plugins/general/loaders.py +3 -5
  50. dissect/target/plugins/os/unix/_os.py +3 -3
  51. dissect/target/plugins/os/unix/bsd/citrix/_os.py +68 -28
  52. dissect/target/plugins/os/unix/bsd/citrix/history.py +130 -0
  53. dissect/target/plugins/os/unix/generic.py +17 -12
  54. dissect/target/plugins/os/unix/linux/fortios/__init__.py +0 -0
  55. dissect/target/plugins/os/unix/linux/fortios/_os.py +534 -0
  56. dissect/target/plugins/os/unix/linux/fortios/generic.py +30 -0
  57. dissect/target/plugins/os/unix/linux/fortios/locale.py +109 -0
  58. dissect/target/plugins/os/windows/log/evt.py +1 -1
  59. dissect/target/plugins/os/windows/log/schedlgu.py +155 -0
  60. dissect/target/plugins/os/windows/regf/firewall.py +1 -1
  61. dissect/target/plugins/os/windows/regf/shimcache.py +1 -1
  62. dissect/target/plugins/os/windows/regf/trusteddocs.py +1 -1
  63. dissect/target/plugins/os/windows/registry.py +1 -1
  64. dissect/target/plugins/os/windows/sam.py +3 -0
  65. dissect/target/plugins/os/windows/sru.py +41 -28
  66. dissect/target/plugins/os/windows/tasks.py +5 -2
  67. dissect/target/target.py +7 -3
  68. dissect/target/tools/dd.py +7 -1
  69. dissect/target/tools/fs.py +8 -1
  70. dissect/target/tools/info.py +22 -16
  71. dissect/target/tools/mount.py +28 -3
  72. dissect/target/tools/query.py +146 -117
  73. dissect/target/tools/reg.py +21 -16
  74. dissect/target/tools/shell.py +30 -6
  75. dissect/target/tools/utils.py +28 -0
  76. dissect/target/volumes/bde.py +14 -10
  77. dissect/target/volumes/luks.py +18 -10
  78. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/METADATA +4 -3
  79. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/RECORD +85 -67
  80. dissect/target/plugins/os/unix/linux/fortigate/_os.py +0 -175
  81. /dissect/target/{plugins/os/unix/linux/fortigate → helpers/compat}/__init__.py +0 -0
  82. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/COPYRIGHT +0 -0
  83. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/LICENSE +0 -0
  84. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/WHEEL +0 -0
  85. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/entry_points.txt +0 -0
  86. {dissect.target-3.14.dev29.dist-info → dissect.target-3.15.dist-info}/top_level.txt +0 -0
@@ -4,9 +4,9 @@ import hashlib
4
4
  import warnings
5
5
  from typing import TYPE_CHECKING, BinaryIO, Union
6
6
 
7
- from flow.record import GroupedRecord, Record, RecordDescriptor, fieldtypes
7
+ from flow.record import Record
8
8
 
9
- from dissect.target.exceptions import FileNotFoundError, IsADirectoryError
9
+ from dissect.target.exceptions import FileNotFoundError
10
10
 
11
11
  if TYPE_CHECKING:
12
12
  from hashlib._hashlib import HASH
@@ -15,10 +15,6 @@ if TYPE_CHECKING:
15
15
 
16
16
  BUFFER_SIZE = 32768
17
17
 
18
- RECORD_NAME = "filesystem/file/digest"
19
- NAME_SUFFIXES = ["_resolved", "_digest"]
20
- RECORD_TYPES = ["path", "digest"]
21
-
22
18
 
23
19
  def _hash(fh: BinaryIO, ctx: Union[HASH, list[HASH]]) -> tuple[str]:
24
20
  if not isinstance(ctx, list):
@@ -58,70 +54,34 @@ def custom(fh: BinaryIO, algos: list[Union[str, HASH]]) -> tuple[str]:
58
54
  return _hash(fh, ctx)
59
55
 
60
56
 
61
- def hash_uri_records(target: Target, record: Record) -> Record:
62
- """Hash uri paths inside the record."""
57
+ def hash_uri(target: Target, path: str) -> tuple[str, str]:
58
+ """Hash the target path."""
63
59
  warnings.warn(
64
60
  (
65
- "The hash_uri_records() function is deprecated, and will be removed in dissect.target 3.15. "
66
- "Use hash_path_records() instead"
61
+ "The hash_uri() function is deprecated, and will be removed in dissect.target 3.15. "
62
+ "Use target.fs.hash() instead"
67
63
  ),
68
64
  DeprecationWarning,
69
65
  )
70
- return hash_path_records(target, record)
71
-
72
-
73
- def hash_path_records(target: Target, record: Record) -> Record:
74
- """Hash files from path fields inside the record."""
75
-
76
- hash_records = []
77
-
78
- for field_name, field_type in record._field_types.items():
79
- if not issubclass(field_type, fieldtypes.path):
80
- continue
81
-
82
- path = getattr(record, field_name, None)
83
- if path is None:
84
- continue
85
-
86
- try:
87
- resolved_path = target.resolve(str(path))
88
- path_hash = target.fs.hash(resolved_path)
89
- except (FileNotFoundError, IsADirectoryError):
90
- pass
91
- else:
92
- resolved_path = target.fs.path(resolved_path)
93
- record_kwargs = dict()
94
- record_def = list()
95
-
96
- fields = [resolved_path, path_hash]
97
-
98
- for type, name, field in zip(RECORD_TYPES, NAME_SUFFIXES, fields):
99
- hashed_field_name = f"{field_name}{name}"
100
- record_kwargs.update({hashed_field_name: field})
101
- record_def.append((type, hashed_field_name))
102
66
 
103
- _record = RecordDescriptor(RECORD_NAME, record_def)
67
+ if path is None:
68
+ raise FileNotFoundError()
104
69
 
105
- hash_records.append(_record(**record_kwargs))
70
+ path = str(target.resolve(path))
71
+ return (path, target.fs.hash(path))
106
72
 
107
- if not hash_records:
108
- return record
109
73
 
110
- return GroupedRecord(record._desc.name, [record] + hash_records)
74
+ def hash_uri_records(target: Target, record: Record) -> Record:
75
+ """Hash uri paths inside the record."""
111
76
 
77
+ from dissect.target.helpers.record_modifier import Modifier, get_modifier_function
112
78
 
113
- def hash_uri(target: Target, path: str) -> tuple[str, str]:
114
- """Hash the target path."""
115
79
  warnings.warn(
116
80
  (
117
- "The hash_uri() function is deprecated, and will be removed in dissect.target 3.15."
118
- "Use target.fs.hash() instead"
81
+ "The hash_uri_records() function is deprecated, and will be removed in dissect.target 3.15. "
82
+ "Use hash_path_records() instead"
119
83
  ),
120
84
  DeprecationWarning,
121
85
  )
122
-
123
- if path is None:
124
- raise FileNotFoundError()
125
-
126
- path = target.resolve(path)
127
- return (path, target.fs.hash(path))
86
+ func = get_modifier_function(Modifier.HASH)
87
+ return func(target, record)
@@ -19,12 +19,15 @@ class Key(NamedTuple):
19
19
  value: Union[str, bytes]
20
20
  provider: str = None
21
21
  identifier: str = None
22
+ is_wildcard: bool = False
22
23
 
23
24
 
24
25
  KEYCHAIN: list[Key] = []
25
26
 
26
27
 
27
- def register_key(key_type: KeyType, value: str, identifier: str = None, provider: str = None) -> None:
28
+ def register_key(
29
+ key_type: KeyType, value: str, identifier: str = None, provider: str = None, is_wildcard: bool = False
30
+ ) -> None:
28
31
  if key_type == KeyType.RAW:
29
32
  try:
30
33
  value = bytes.fromhex(value)
@@ -32,7 +35,10 @@ def register_key(key_type: KeyType, value: str, identifier: str = None, provider
32
35
  log.warning("Failed to decode raw key as hex, ignoring: %s", value)
33
36
  return
34
37
 
35
- key = Key(key_type, value, provider, identifier)
38
+ if key_type in (KeyType.RECOVERY_KEY, KeyType.FILE):
39
+ value = value.strip("\"'")
40
+
41
+ key = Key(key_type, value, provider, identifier, is_wildcard)
36
42
  KEYCHAIN.append(key)
37
43
  log.info("Registered key %s", key)
38
44
 
@@ -58,7 +64,7 @@ def parse_key_type(key_type_name: str) -> KeyType:
58
64
 
59
65
  def register_wildcard_value(value: str) -> None:
60
66
  for key_type in KeyType:
61
- register_key(key_type, value)
67
+ register_key(key_type, value, is_wildcard=True)
62
68
 
63
69
 
64
70
  def register_keychain_file(keychain_path: Path) -> None:
@@ -80,4 +80,4 @@ def extract_path_info(path: Union[str, Path]) -> tuple[Path, Optional[urllib.par
80
80
  if parsed_path.scheme == "" or re.match("^[A-Za-z]$", parsed_path.scheme):
81
81
  return Path(path), None
82
82
  else:
83
- return Path(parsed_path.path), parsed_path
83
+ return Path(parsed_path.netloc + parsed_path.path), parsed_path
@@ -1,12 +1,26 @@
1
1
  import errno
2
2
  import logging
3
+ from ctypes import c_void_p
3
4
  from functools import lru_cache
4
- from typing import BinaryIO, Optional
5
+ from typing import BinaryIO, Iterator, Optional
5
6
 
6
- from fuse import FuseOSError, Operations
7
+ from dissect.util.feature import Feature, feature_enabled
7
8
 
8
9
  from dissect.target.filesystem import Filesystem, FilesystemEntry
9
10
 
11
+ HAS_FUSE3 = False
12
+ if feature_enabled(Feature.BETA):
13
+ from fuse3 import FuseOSError, Operations
14
+ from fuse3.c_fuse import fuse_config_p, fuse_conn_info_p
15
+
16
+ HAS_FUSE3 = True
17
+ else:
18
+ from fuse import FuseOSError, Operations
19
+
20
+ fuse_config_p = c_void_p
21
+ fuse_conn_info_p = c_void_p
22
+
23
+
10
24
  log = logging.getLogger(__name__)
11
25
 
12
26
  CACHE_SIZE = 1024 * 1024
@@ -27,15 +41,32 @@ class DissectMount(Operations):
27
41
  except Exception:
28
42
  raise FuseOSError(errno.ENOENT)
29
43
 
44
+ def init(self, path: str, conn: Optional[fuse_conn_info_p] = None, cfg: Optional[fuse_config_p] = None) -> None:
45
+ if cfg:
46
+ # Enables the use of inodes in getattr
47
+ cfg.contents.use_ino = 1
48
+
30
49
  def getattr(self, path: str, fh: Optional[int] = None) -> dict:
31
50
  fe = self._get(path)
32
51
 
33
52
  try:
34
53
  st = fe.lstat()
54
+
35
55
  return dict(
36
56
  (key, getattr(st, key))
37
- for key in ("st_atime", "st_ctime", "st_gid", "st_mode", "st_mtime", "st_nlink", "st_size", "st_uid")
57
+ for key in (
58
+ "st_atime",
59
+ "st_ctime",
60
+ "st_ino",
61
+ "st_gid",
62
+ "st_mode",
63
+ "st_mtime",
64
+ "st_nlink",
65
+ "st_size",
66
+ "st_uid",
67
+ )
38
68
  )
69
+
39
70
  except Exception:
40
71
  raise FuseOSError(errno.EIO)
41
72
 
@@ -75,7 +106,7 @@ class DissectMount(Operations):
75
106
  log.exception("Exception in fuse::read")
76
107
  raise FuseOSError(errno.EIO)
77
108
 
78
- def readdir(self, path: str, fh: int):
109
+ def readdir(self, path: str, fh: int, flags: int = 0) -> Iterator[str]:
79
110
  if fh not in self.dir_handles:
80
111
  raise FuseOSError(errno.EBADFD)
81
112
 
@@ -100,7 +131,19 @@ class DissectMount(Operations):
100
131
  raise FuseOSError(errno.EIO)
101
132
 
102
133
  def release(self, path: str, fh: int) -> int:
134
+ if file := self.file_handles.get(fh):
135
+ file.close()
136
+
103
137
  del self.file_handles[fh]
138
+ return 0
104
139
 
105
140
  def releasedir(self, path: str, fh: int) -> int:
106
141
  del self.dir_handles[fh]
142
+ return 0
143
+
144
+ if HAS_FUSE3:
145
+ # Define the fuse3 bindings here
146
+
147
+ def lseek(self, path: str, off: int, whence: int, fh: int) -> int:
148
+ if file := self.file_handles.get(fh):
149
+ return file.seek(off, whence)
@@ -0,0 +1,73 @@
1
+ """Filesystem path manipulation functions.
2
+
3
+ Similar to posixpath and ntpath, but with support for alternative separators.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import posixpath
9
+ import re
10
+
11
+ re_normalize_path = re.compile(r"[/]+")
12
+ re_normalize_sbs_path = re.compile(r"[\\/]+")
13
+
14
+
15
+ def normalize(path: str, alt_separator: str = "") -> str:
16
+ if alt_separator == "\\":
17
+ return re_normalize_sbs_path.sub("/", path)
18
+ else:
19
+ return re_normalize_path.sub("/", path)
20
+
21
+
22
+ def isabs(path: str, alt_separator: str = "") -> bool:
23
+ return posixpath.isabs(normalize(path, alt_separator=alt_separator))
24
+
25
+
26
+ def join(*args, alt_separator: str = "") -> str:
27
+ return posixpath.join(*[normalize(part, alt_separator=alt_separator) for part in args])
28
+
29
+
30
+ def split(path: str, alt_separator: str = "") -> str:
31
+ return posixpath.split(normalize(path, alt_separator=alt_separator))
32
+
33
+
34
+ splitext = posixpath.splitext
35
+
36
+
37
+ splitdrive = posixpath.splitdrive
38
+
39
+
40
+ def splitroot(path: str, alt_separator: str = "") -> tuple[str, str]:
41
+ return posixpath.splitroot(normalize(path, alt_separator=alt_separator))
42
+
43
+
44
+ def basename(path: str, alt_separator: str = "") -> str:
45
+ return posixpath.basename(normalize(path, alt_separator=alt_separator))
46
+
47
+
48
+ def dirname(path: str, alt_separator: str = "") -> str:
49
+ return posixpath.dirname(normalize(path, alt_separator=alt_separator))
50
+
51
+
52
+ def normpath(path: str, alt_separator: str = "") -> str:
53
+ return posixpath.normpath(normalize(path, alt_separator=alt_separator))
54
+
55
+
56
+ def abspath(path: str, cwd: str = "", alt_separator: str = "") -> str:
57
+ cwd = cwd or "/"
58
+ cwd = normalize(cwd, alt_separator=alt_separator)
59
+ path = normalize(path, alt_separator=alt_separator)
60
+ if not isabs(path):
61
+ path = join(cwd, path)
62
+ return posixpath.normpath(path)
63
+
64
+
65
+ def relpath(path: str, start: str, alt_separator: str = "") -> str:
66
+ return posixpath.relpath(
67
+ normalize(path, alt_separator=alt_separator),
68
+ normalize(start, alt_separator=alt_separator),
69
+ )
70
+
71
+
72
+ def commonpath(paths: list[str], alt_separator: str = "") -> str:
73
+ return posixpath.commonpath([normalize(path, alt_separator=alt_separator) for path in paths])
@@ -0,0 +1,100 @@
1
+ from functools import partial
2
+ from typing import Callable, Iterable, Iterator
3
+
4
+ from flow.record import GroupedRecord, Record, RecordDescriptor, fieldtypes
5
+
6
+ from dissect.target import Target
7
+ from dissect.target.exceptions import FilesystemError
8
+ from dissect.target.helpers.fsutil import TargetPath
9
+ from dissect.target.helpers.hashutil import common
10
+ from dissect.target.helpers.utils import StrEnum
11
+
12
+ __all__ = ("get_modifier_function", "Modifier", "ModifierFunc")
13
+
14
+ RECORD_NAME = "filesystem/file/digest"
15
+ NAME_SUFFIXES = ["_resolved", "_digest"]
16
+ RECORD_TYPES = ["path", "digest"]
17
+
18
+ ModifierFunc = Callable[[Target, Record], GroupedRecord]
19
+
20
+
21
+ class Modifier(StrEnum):
22
+ RESOLVE = "resolve"
23
+ HASH = "hash"
24
+
25
+
26
+ def _create_modified_record(
27
+ record_name: str, field_name: str, field_info: Iterable[tuple[str, str, TargetPath]]
28
+ ) -> Record:
29
+ record_kwargs = dict()
30
+ record_def = list()
31
+ for type, name_suffix, data in field_info:
32
+ extended_field_name = f"{field_name}{name_suffix}"
33
+ record_kwargs.update({extended_field_name: data})
34
+ record_def.append((type, extended_field_name))
35
+
36
+ _record = RecordDescriptor(record_name, record_def)
37
+ return _record(**record_kwargs)
38
+
39
+
40
+ def _resolve_path_records(field_name: str, resolved_path: TargetPath) -> Record:
41
+ """Resolve files from path fields inside the record."""
42
+ type_info = [("path", "_resolved", resolved_path)]
43
+ return _create_modified_record("filesystem/file/resolved", field_name, type_info)
44
+
45
+
46
+ def _hash_path_records(field_name: str, resolved_path: TargetPath) -> Record:
47
+ """Hash files from path fields inside the record."""
48
+
49
+ with resolved_path.open() as fh:
50
+ path_hash = common(fh)
51
+
52
+ type_info = zip(RECORD_TYPES, NAME_SUFFIXES, [resolved_path, path_hash])
53
+
54
+ return _create_modified_record("filesystem/file/digest", field_name, type_info)
55
+
56
+
57
+ MODIFIER_MAPPING = {
58
+ Modifier.RESOLVE: _resolve_path_records,
59
+ Modifier.HASH: _hash_path_records,
60
+ }
61
+
62
+
63
+ def _resolve_path_types(target: Target, record: Record) -> Iterator[tuple[str, TargetPath]]:
64
+ for field_name, field_type in record._field_types.items():
65
+ if not issubclass(field_type, fieldtypes.path):
66
+ continue
67
+
68
+ path = getattr(record, field_name, None)
69
+ if path is None:
70
+ continue
71
+
72
+ yield field_name, target.resolve(str(path))
73
+
74
+
75
+ def modify_record(target: Target, record: Record, modifier_function: ModifierFunc) -> GroupedRecord:
76
+ additional_records = []
77
+
78
+ for field_name, resolved_path in _resolve_path_types(target, record):
79
+ try:
80
+ _record = modifier_function(field_name, resolved_path)
81
+ except FilesystemError:
82
+ pass
83
+ else:
84
+ additional_records.append(_record)
85
+
86
+ if not additional_records:
87
+ return record
88
+
89
+ return GroupedRecord(record._desc.name, [record] + additional_records)
90
+
91
+
92
+ def _noop(_target: Target, record: Record):
93
+ return record
94
+
95
+
96
+ def get_modifier_function(modifier_type: Modifier) -> ModifierFunc:
97
+ if func := MODIFIER_MAPPING.get(modifier_type):
98
+ return partial(modify_record, modifier_function=func)
99
+
100
+ return _noop
dissect/target/loader.py CHANGED
@@ -154,7 +154,7 @@ def find_loader(
154
154
  log.debug("", exc_info=exception)
155
155
 
156
156
 
157
- def open(item: Union[str, Path], *args, **kwargs):
157
+ def open(item: Union[str, Path], *args, **kwargs) -> Loader:
158
158
  """Opens a :class:`Loader` for a specific ``item``.
159
159
 
160
160
  This instantiates a :class:`Loader` for a specific ``item``.
@@ -202,4 +202,5 @@ register("phobos", "PhobosLoader")
202
202
  register("velociraptor", "VelociraptorLoader")
203
203
  register("smb", "SmbLoader")
204
204
  register("cb", "CbLoader")
205
+ register("cyber", "CyberLoader")
205
206
  register("multiraw", "MultiRawLoader") # Should be last
@@ -15,6 +15,8 @@ if TYPE_CHECKING:
15
15
 
16
16
 
17
17
  class AsdfLoader(Loader):
18
+ """Load an ASDF target."""
19
+
18
20
  METADATA_PREFIX = "$asdf$"
19
21
 
20
22
  def __init__(self, path: Path, **kwargs):
@@ -0,0 +1,37 @@
1
+ from pathlib import Path
2
+
3
+ from dissect.target import Target
4
+ from dissect.target.helpers.cyber import cyber
5
+ from dissect.target.loader import Loader
6
+ from dissect.target.loader import open as loader_open
7
+ from dissect.target.loaders.raw import RawLoader
8
+
9
+ HEADER = r"""
10
+ ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
11
+ ┃ _______ ______ ______ _____ ┃
12
+ ┃ / ____\ \ / / _ \| ____| __ \ ┃
13
+ ┃ | | \ \_/ /| |_) | |__ | |__) | ┃
14
+ ┃ | | \ / | _ <| __| | _ / ┃
15
+ ┃ | |____ | | | |_) | |____| | \ \ ┃
16
+ ┃ \_____| |_| |____/|______|_| \_\ ┃
17
+ ┃ ┃
18
+ ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
19
+
20
+ """
21
+
22
+
23
+ class CyberLoader(Loader):
24
+ def __init__(self, path: Path, **kwargs):
25
+ super().__init__(path, **kwargs)
26
+ self._real = loader_open(path) or RawLoader(path)
27
+
28
+ @staticmethod
29
+ def detect(path: Path) -> bool:
30
+ return False
31
+
32
+ def map(self, target: Target) -> None:
33
+ with cyber(mask_space=True):
34
+ print(HEADER)
35
+
36
+ target.props["cyber"] = True
37
+ return self._real.map(target)
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import urllib
2
4
  from pathlib import Path
3
5
  from typing import Union
@@ -5,9 +7,19 @@ from typing import Union
5
7
  from dissect.target import Target
6
8
  from dissect.target.filesystem import VirtualFilesystem
7
9
  from dissect.target.loader import Loader
10
+ from dissect.target.plugin import arg
8
11
 
9
12
 
13
+ @arg("--log-hint", dest="hint", help="hint for file type")
10
14
  class LogLoader(Loader):
15
+ """Load separate log files without a target.
16
+
17
+ Usage:
18
+
19
+ ``target-query /evtx/* -L log -f evtx``
20
+
21
+ """
22
+
11
23
  LOGS_DIRS = {
12
24
  "evtx": "sysvol/windows/system32/winevt/logs",
13
25
  "evt": "sysvol/windows/system32/config",
@@ -25,13 +37,12 @@ class LogLoader(Loader):
25
37
  return False
26
38
 
27
39
  def map(self, target: Target) -> None:
28
- self.target = target
29
40
  vfs = VirtualFilesystem(case_sensitive=False, alt_separator=target.fs.alt_separator)
41
+ target.filesystems.add(vfs)
42
+ target.fs.mount("/", vfs)
30
43
  for entry in self.path.parent.glob(self.path.name):
31
44
  ext = self.options.get("hint", entry.suffix.lower()).strip(".")
32
45
  if (mapping := self.LOGS_DIRS.get(ext, None)) is None:
33
46
  continue
34
47
  mapping = str(vfs.path(mapping).joinpath(entry.name))
35
48
  vfs.map_file(mapping, str(entry))
36
- target.filesystems.add(vfs)
37
- target.fs = vfs
@@ -6,6 +6,8 @@ from dissect.target.target import Target
6
6
 
7
7
 
8
8
  class RawLoader(Loader):
9
+ """Load raw container files such as disk images."""
10
+
9
11
  @staticmethod
10
12
  def detect(path: Path) -> bool:
11
13
  return not path.is_dir()
@@ -15,6 +15,7 @@ from dissect.util.stream import AlignedStream
15
15
  from dissect.target.containers.raw import RawContainer
16
16
  from dissect.target.exceptions import LoaderError
17
17
  from dissect.target.loader import Loader
18
+ from dissect.target.plugin import arg
18
19
  from dissect.target.target import Target
19
20
 
20
21
  log = logging.getLogger(__name__)
@@ -86,6 +87,7 @@ class RemoteStreamConnection:
86
87
  client_crt = options.get("crt")
87
88
  server_ca = options.get("ca")
88
89
  noverify = options.get("noverify")
90
+
89
91
  if client_key and client_crt:
90
92
  self._context.load_cert_chain(certfile=client_crt, keyfile=client_key)
91
93
  flag_cert_chain_loaded = True
@@ -203,7 +205,17 @@ class RemoteStreamConnection:
203
205
  return disks
204
206
 
205
207
 
208
+ @arg("--remote-key", dest="key", help="private key")
209
+ @arg("--remote-crt", dest="crt", help="client certificate")
210
+ @arg("--remote-ca", dest="ca", help="certificate Authority")
211
+ @arg("--remote-noverify", dest="noverify", help="no certificate verification")
212
+ @arg("--remote-reconnects", dest="reconnects", help="max number of reconnects")
213
+ @arg("--remote-shortreads", dest="shortreads", help="max limit shortreads")
214
+ @arg("--remote-reconnectwait", dest="reconnectwait", help="max time before reconnection attempt")
215
+ @arg("--remote-sockettimeout", dest="sockettimeout", help="socket timeout")
206
216
  class RemoteLoader(Loader):
217
+ """Load a remote target that runs a compatible Dissect agent."""
218
+
207
219
  def __init__(self, path: Union[Path, str], **kwargs):
208
220
  super().__init__(path)
209
221
  uri = kwargs.get("parsed_path")
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import re
2
3
  import tarfile
3
4
  from pathlib import Path
4
5
  from typing import Union
@@ -14,6 +15,9 @@ from dissect.target.loader import Loader
14
15
  log = logging.getLogger(__name__)
15
16
 
16
17
 
18
+ ANON_FS_RE = re.compile(r"^fs[0-9]+$")
19
+
20
+
17
21
  class TarLoader(Loader):
18
22
  """Load tar files."""
19
23
 
@@ -63,6 +67,15 @@ class TarLoader(Loader):
63
67
  if volume_name.lower() == "c:":
64
68
  volume_name = "sysvol"
65
69
 
70
+ if volume_name == "$fs$":
71
+ if len(parts) == 1:
72
+ # The fs/$fs$ entry is ignored, only the directories below it are processed.
73
+ continue
74
+ fs_name = parts[1]
75
+ if ANON_FS_RE.match(fs_name):
76
+ parts.pop(0)
77
+ volume_name = f"{volume_name}/{fs_name}"
78
+
66
79
  if volume_name not in volumes:
67
80
  vol = filesystem.VirtualFilesystem(case_sensitive=False)
68
81
  vol.tar = self.tar
@@ -33,6 +33,8 @@ except Exception:
33
33
 
34
34
 
35
35
  class TargetdLoader(ProxyLoader):
36
+ """Load remote targets through a broker."""
37
+
36
38
  instance = None
37
39
 
38
40
  def __init__(self, path: Union[Path, str], **kwargs):
@@ -22,7 +22,8 @@ def find_fs_directories(path: Path) -> tuple[Optional[OperatingSystem], Optional
22
22
  # As of Velociraptor version 0.7.0 the structure of the Velociraptor Offline Collector varies by operating system.
23
23
  # Generic.Collectors.File (Unix) uses the accessors file and auto.
24
24
  # Generic.Collectors.File (Windows) and Windows.KapeFiles.Targets (Windows) uses the accessors
25
- # mft, ntfs, lazy_ntfs, ntfs_vss and auto.
25
+ # mft, ntfs, lazy_ntfs, ntfs_vss and auto. The loader only supports a collection where a single accessor is used.
26
+ # For Windows usage of the ntfs_vss accessor can be forced by configuring VSSAnalysisAge to be greater than 0.
26
27
 
27
28
  fs_root = path.joinpath(FILESYSTEMS_ROOT)
28
29
 
@@ -36,14 +37,22 @@ def find_fs_directories(path: Path) -> tuple[Optional[OperatingSystem], Optional
36
37
 
37
38
  # Windows
38
39
  volumes = set()
40
+ vss_volumes = set()
39
41
  for accessor in WINDOWS_ACCESSORS:
40
42
  accessor_root = fs_root.joinpath(accessor)
41
43
  if accessor_root.exists():
42
44
  # If the accessor directory exists, assume all the subdirectories are volumes
43
- volumes.update(accessor_root.iterdir())
45
+ for volume in accessor_root.iterdir():
46
+ # https://github.com/Velocidex/velociraptor/blob/87368e7cc678144592a1614bb3bbd0a0f900ded9/accessors/ntfs/vss.go#L82
47
+ if "HarddiskVolumeShadowCopy" in volume.name:
48
+ vss_volumes.add(volume)
49
+ else:
50
+ volumes.add(volume)
44
51
 
45
52
  if volumes:
46
- return OperatingSystem.WINDOWS, list(volumes)
53
+ # The volumes that represent drives (C, D) are mounted first,
54
+ # otherwise one of the volume shadow copies could be detected as the root filesystem which results in errors.
55
+ return OperatingSystem.WINDOWS, list(volumes) + list(vss_volumes)
47
56
 
48
57
  return None, None
49
58
 
@@ -4,6 +4,8 @@ from dissect.target.loaders.vmx import VmxLoader
4
4
 
5
5
 
6
6
  class VmwarevmLoader(VmxLoader):
7
+ """Load ``*.vmwarevm`` folders from VMware Fusion."""
8
+
7
9
  def __init__(self, path: Path, **kwargs):
8
10
  super().__init__(next(path.glob("*.vmx")))
9
11