dissect.target 3.20.1__py3-none-any.whl → 3.20.2.dev12__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (29) hide show
  1. dissect/target/filesystems/dir.py +9 -6
  2. dissect/target/filesystems/zip.py +4 -1
  3. dissect/target/helpers/configutil.py +3 -3
  4. dissect/target/loaders/dir.py +13 -3
  5. dissect/target/loaders/itunes.py +5 -3
  6. dissect/target/loaders/velociraptor.py +35 -15
  7. dissect/target/plugins/apps/browser/iexplore.py +7 -3
  8. dissect/target/plugins/general/plugins.py +1 -1
  9. dissect/target/plugins/os/unix/_os.py +1 -1
  10. dissect/target/plugins/os/unix/esxi/_os.py +34 -32
  11. dissect/target/plugins/os/unix/linux/fortios/_keys.py +7919 -1951
  12. dissect/target/plugins/os/unix/linux/fortios/_os.py +109 -22
  13. dissect/target/plugins/os/unix/linux/network_managers.py +1 -1
  14. dissect/target/plugins/os/unix/log/auth.py +6 -37
  15. dissect/target/plugins/os/unix/log/helpers.py +46 -0
  16. dissect/target/plugins/os/unix/log/messages.py +24 -15
  17. dissect/target/plugins/os/windows/activitiescache.py +32 -30
  18. dissect/target/plugins/os/windows/catroot.py +14 -5
  19. dissect/target/plugins/os/windows/lnk.py +13 -7
  20. dissect/target/plugins/os/windows/notifications.py +40 -38
  21. dissect/target/plugins/os/windows/regf/cit.py +20 -7
  22. dissect/target/tools/diff.py +990 -0
  23. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/METADATA +2 -2
  24. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/RECORD +29 -27
  25. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/entry_points.txt +1 -0
  26. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/COPYRIGHT +0 -0
  27. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/LICENSE +0 -0
  28. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/WHEEL +0 -0
  29. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/top_level.txt +0 -0
@@ -27,12 +27,7 @@ class DirectoryFilesystem(Filesystem):
27
27
  def _detect(fh: BinaryIO) -> bool:
28
28
  raise TypeError("Detect is not allowed on DirectoryFilesystem class")
29
29
 
30
- def get(self, path: str) -> FilesystemEntry:
31
- path = path.strip("/")
32
-
33
- if not path:
34
- return DirectoryFilesystemEntry(self, "/", self.base_path)
35
-
30
+ def _resolve_path(self, path: str) -> Path:
36
31
  if not self.case_sensitive:
37
32
  searchpath = self.base_path
38
33
 
@@ -48,6 +43,14 @@ class DirectoryFilesystem(Filesystem):
48
43
  else:
49
44
  entry = self.base_path.joinpath(path.strip("/"))
50
45
 
46
+ return entry
47
+
48
+ def get(self, path: str) -> FilesystemEntry:
49
+ if not (path := path.strip("/")):
50
+ return DirectoryFilesystemEntry(self, "/", self.base_path)
51
+
52
+ entry = self._resolve_path(path)
53
+
51
54
  try:
52
55
  entry.lstat()
53
56
  return DirectoryFilesystemEntry(self, path, entry)
@@ -56,7 +56,7 @@ class ZipFilesystem(Filesystem):
56
56
  if not mname.startswith(self.base) or mname == ".":
57
57
  continue
58
58
 
59
- rel_name = fsutil.normpath(mname[len(self.base) :], alt_separator=self.alt_separator)
59
+ rel_name = self._resolve_path(mname)
60
60
  self._fs.map_file_entry(rel_name, ZipFilesystemEntry(self, rel_name, member))
61
61
 
62
62
  @staticmethod
@@ -64,6 +64,9 @@ class ZipFilesystem(Filesystem):
64
64
  """Detect a zip file on a given file-like object."""
65
65
  return zipfile.is_zipfile(fh)
66
66
 
67
+ def _resolve_path(self, path: str) -> str:
68
+ return fsutil.normpath(path[len(self.base) :], alt_separator=self.alt_separator)
69
+
67
70
  def get(self, path: str, relentry: FilesystemEntry = None) -> FilesystemEntry:
68
71
  """Returns a ZipFilesystemEntry object corresponding to the given path."""
69
72
  return self._fs.get(path, relentry=relentry)
@@ -470,9 +470,9 @@ class Toml(ConfigurationParser):
470
470
  class Env(ConfigurationParser):
471
471
  """Parses ``.env`` file contents according to Docker and bash specification.
472
472
 
473
- Does not apply interpolation of substituted values, eg. ``foo=${bar}`` and does not attempt
474
- to parse list or dict strings. Does not support dynamic env files, eg. `` foo=`bar` ``. Also
475
- does not support multi-line key/value assignments (yet).
473
+ Does not apply interpolation of substituted values, e.g. ``foo=${bar}`` and does not attempt to parse list or dict
474
+ strings. Does not support dynamic env files, e.g. ``foo=`bar```. Also does not support multi-line key/value
475
+ assignments (yet).
476
476
 
477
477
  Resources:
478
478
  - https://docs.docker.com/compose/environment-variables/variable-interpolation/#env-file-syntax
@@ -36,13 +36,23 @@ def find_entry_path(path: Path) -> str | None:
36
36
  return prefix
37
37
 
38
38
 
39
- def map_dirs(target: Target, dirs: list[Path | tuple[str, Path]], os_type: str, **kwargs) -> None:
39
+ def map_dirs(
40
+ target: Target,
41
+ dirs: list[Path | tuple[str, Path]],
42
+ os_type: str,
43
+ *,
44
+ dirfs: type[DirectoryFilesystem] = DirectoryFilesystem,
45
+ zipfs: type[ZipFilesystem] = ZipFilesystem,
46
+ **kwargs,
47
+ ) -> None:
40
48
  """Map directories as filesystems into the given target.
41
49
 
42
50
  Args:
43
51
  target: The target to map into.
44
52
  dirs: The directories to map as filesystems. If a list member is a tuple, the first element is the drive letter.
45
53
  os_type: The operating system type, used to determine how the filesystem should be mounted.
54
+ dirfs: The filesystem class to use for directory filesystems.
55
+ zipfs: The filesystem class to use for ZIP filesystems.
46
56
  """
47
57
  alt_separator = ""
48
58
  case_sensitive = True
@@ -59,9 +69,9 @@ def map_dirs(target: Target, dirs: list[Path | tuple[str, Path]], os_type: str,
59
69
  drive_letter = path.name[0]
60
70
 
61
71
  if isinstance(path, zipfile.Path):
62
- dfs = ZipFilesystem(path.root.fp, path.at, alt_separator=alt_separator, case_sensitive=case_sensitive)
72
+ dfs = zipfs(path.root.fp, path.at, alt_separator=alt_separator, case_sensitive=case_sensitive)
63
73
  else:
64
- dfs = DirectoryFilesystem(path, alt_separator=alt_separator, case_sensitive=case_sensitive)
74
+ dfs = dirfs(path, alt_separator=alt_separator, case_sensitive=case_sensitive)
65
75
 
66
76
  drive_letter_map[drive_letter].append(dfs)
67
77
 
@@ -163,8 +163,10 @@ class ITunesBackup:
163
163
 
164
164
  def files(self) -> Iterator[FileInfo]:
165
165
  """Iterate all the files in this backup."""
166
- for row in self.manifest_db.table("Files").rows():
167
- yield FileInfo(self, row.fileID, row.domain, row.relativePath, row.flags, row.file)
166
+
167
+ if table := self.manifest_db.table("Files"):
168
+ for row in table.rows():
169
+ yield FileInfo(self, row.fileID, row.domain, row.relativePath, row.flags, row.file)
168
170
 
169
171
 
170
172
  class FileInfo:
@@ -288,7 +290,7 @@ def translate_file_path(domain: str, relative_path: str) -> str:
288
290
  package_name = ""
289
291
 
290
292
  domain_path = fsutil.join(DOMAIN_TRANSLATION.get(domain, domain), package_name)
291
- return fsutil.join(domain_path, relative_path)
293
+ return fsutil.join(domain_path, relative_path).rstrip("/")
292
294
 
293
295
 
294
296
  def parse_key_bag(buf: bytes) -> tuple[dict[str, bytes, int], dict[str, ClassKey]]:
@@ -4,7 +4,11 @@ import logging
4
4
  import zipfile
5
5
  from pathlib import Path
6
6
  from typing import TYPE_CHECKING
7
+ from urllib.parse import quote, unquote
7
8
 
9
+ from dissect.target.filesystems.dir import DirectoryFilesystem
10
+ from dissect.target.filesystems.zip import ZipFilesystem
11
+ from dissect.target.helpers.fsutil import basename, dirname, join
8
12
  from dissect.target.loaders.dir import DirLoader, find_dirs, map_dirs
9
13
  from dissect.target.plugin import OperatingSystem
10
14
 
@@ -87,11 +91,13 @@ class VelociraptorLoader(DirLoader):
87
91
  super().__init__(path)
88
92
 
89
93
  if path.suffix == ".zip":
90
- log.warning(
91
- f"Velociraptor target {path!r} is compressed, which will slightly affect performance. "
92
- "Consider uncompressing the archive and passing the uncompressed folder to Dissect."
93
- )
94
94
  self.root = zipfile.Path(path.open("rb"))
95
+ if self.root.root.getinfo("uploads.json").compress_type > 0:
96
+ log.warning(
97
+ "Velociraptor target '%s' is compressed, which will slightly affect performance. "
98
+ "Consider uncompressing the archive and passing the uncompressed folder to Dissect.",
99
+ path,
100
+ )
95
101
  else:
96
102
  self.root = path
97
103
 
@@ -116,14 +122,28 @@ class VelociraptorLoader(DirLoader):
116
122
 
117
123
  def map(self, target: Target) -> None:
118
124
  os_type, dirs = find_fs_directories(self.root)
119
- if os_type == OperatingSystem.WINDOWS:
120
- # Velociraptor doesn't have the correct filenames for the paths "$J" and "$Secure:$SDS"
121
- map_dirs(
122
- target,
123
- dirs,
124
- os_type,
125
- usnjrnl_path="$Extend/$UsnJrnl%3A$J",
126
- sds_path="$Secure%3A$SDS",
127
- )
128
- else:
129
- map_dirs(target, dirs, os_type)
125
+
126
+ # Velociraptor URL encodes paths before storing these in a collection, this leads plugins not being able to find
127
+ # these paths. To circumvent this issue, for a zip file the path names are URL decoded before mapping into the
128
+ # VFS and for a directory the paths are URL encoded at lookup time.
129
+ map_dirs(
130
+ target,
131
+ dirs,
132
+ os_type,
133
+ dirfs=VelociraptorDirectoryFilesystem,
134
+ zipfs=VelociraptorZipFilesystem,
135
+ )
136
+
137
+
138
+ class VelociraptorDirectoryFilesystem(DirectoryFilesystem):
139
+ def _resolve_path(self, path: str) -> Path:
140
+ path = quote(path, safe="$/% ")
141
+ if (fname := basename(path)).startswith("."):
142
+ path = join(dirname(path), fname.replace(".", "%2E", 1))
143
+
144
+ return super()._resolve_path(path)
145
+
146
+
147
+ class VelociraptorZipFilesystem(ZipFilesystem):
148
+ def _resolve_path(self, path: str) -> str:
149
+ return unquote(super()._resolve_path(path))
@@ -36,14 +36,18 @@ class WebCache:
36
36
  All ``ContainerId`` values for the requested container name.
37
37
  """
38
38
  try:
39
- for container_record in self.db.table("Containers").records():
39
+ table = self.db.table("Containers")
40
+
41
+ for container_record in table.records():
40
42
  if record_name := container_record.get("Name"):
41
43
  record_name = record_name.rstrip("\00").lower()
42
44
  if record_name == name.lower():
43
45
  container_id = container_record.get("ContainerId")
44
46
  yield self.db.table(f"Container_{container_id}")
45
- except KeyError:
46
- pass
47
+
48
+ except KeyError as e:
49
+ self.target.log.warning("Exception while parsing EseDB Containers table")
50
+ self.target.log.debug("", exc_info=e)
47
51
 
48
52
  def _iter_records(self, name: str) -> Iterator[record.Record]:
49
53
  """Yield records from a Webcache container.
@@ -169,7 +169,7 @@ class PluginListPlugin(Plugin):
169
169
 
170
170
 
171
171
  def generate_plugins_json(plugins: list[Plugin]) -> Iterator[dict]:
172
- """Generates JSON output of a list of :class:`Plugin`s."""
172
+ """Generates JSON output of a list of :class:`Plugin`."""
173
173
 
174
174
  for p in plugins:
175
175
  func = getattr(p.class_object, p.method_name)
@@ -182,7 +182,7 @@ class UnixPlugin(OSPlugin):
182
182
  paths (list): list of paths
183
183
  """
184
184
  redhat_legacy_path = "/etc/sysconfig/network"
185
- paths = paths or ["/etc/hostname", "/etc/HOSTNAME", redhat_legacy_path]
185
+ paths = paths or ["/etc/hostname", "/etc/HOSTNAME", "/proc/sys/kernel/hostname", redhat_legacy_path]
186
186
  hostname_dict = {"hostname": None, "domain": None}
187
187
 
188
188
  for path in paths:
@@ -472,37 +472,39 @@ def parse_config_store(fh: BinaryIO) -> dict[str, Any]:
472
472
  db = sqlite3.SQLite3(fh)
473
473
 
474
474
  store = {}
475
- for row in db.table("Config").rows():
476
- component_name = row.Component
477
- config_group_name = row.ConfigGroup
478
- value_group_name = row.Name
479
- identifier_name = row.Identifier
480
-
481
- if component_name not in store:
482
- store[component_name] = {}
483
- component = store[component_name]
484
-
485
- if config_group_name not in component:
486
- component[config_group_name] = {}
487
- config_group = component[config_group_name]
488
-
489
- if value_group_name not in config_group:
490
- config_group[value_group_name] = {}
491
- value_group = config_group[value_group_name]
492
-
493
- if identifier_name not in value_group:
494
- value_group[identifier_name] = {}
495
- identifier = value_group[identifier_name]
496
-
497
- identifier["modified_time"] = row.ModifiedTime
498
- identifier["creation_time"] = row.CreationTime
499
- identifier["version"] = row.Version
500
- identifier["success"] = row.Success
501
- identifier["auto_conf_value"] = json.loads(row.AutoConfValue) if row.AutoConfValue else None
502
- identifier["user_value"] = json.loads(row.UserValue) if row.UserValue else None
503
- identifier["vital_value"] = json.loads(row.VitalValue) if row.VitalValue else None
504
- identifier["cached_value"] = json.loads(row.CachedValue) if row.CachedValue else None
505
- identifier["desired_value"] = json.loads(row.DesiredValue) if row.DesiredValue else None
506
- identifier["revision"] = row.Revision
475
+
476
+ if table := db.table("Config"):
477
+ for row in table.rows():
478
+ component_name = row.Component
479
+ config_group_name = row.ConfigGroup
480
+ value_group_name = row.Name
481
+ identifier_name = row.Identifier
482
+
483
+ if component_name not in store:
484
+ store[component_name] = {}
485
+ component = store[component_name]
486
+
487
+ if config_group_name not in component:
488
+ component[config_group_name] = {}
489
+ config_group = component[config_group_name]
490
+
491
+ if value_group_name not in config_group:
492
+ config_group[value_group_name] = {}
493
+ value_group = config_group[value_group_name]
494
+
495
+ if identifier_name not in value_group:
496
+ value_group[identifier_name] = {}
497
+ identifier = value_group[identifier_name]
498
+
499
+ identifier["modified_time"] = row.ModifiedTime
500
+ identifier["creation_time"] = row.CreationTime
501
+ identifier["version"] = row.Version
502
+ identifier["success"] = row.Success
503
+ identifier["auto_conf_value"] = json.loads(row.AutoConfValue) if row.AutoConfValue else None
504
+ identifier["user_value"] = json.loads(row.UserValue) if row.UserValue else None
505
+ identifier["vital_value"] = json.loads(row.VitalValue) if row.VitalValue else None
506
+ identifier["cached_value"] = json.loads(row.CachedValue) if row.CachedValue else None
507
+ identifier["desired_value"] = json.loads(row.DesiredValue) if row.DesiredValue else None
508
+ identifier["revision"] = row.Revision
507
509
 
508
510
  return store