dissect.target 3.16.dev21__py3-none-any.whl → 3.16.dev23__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,60 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any, BinaryIO
4
+
5
+ from dissect.cstruct.types.base import BaseType
6
+ from dissect.cstruct.types.bytesinteger import BytesInteger
7
+
8
+
9
+ class ProtobufVarint(BytesInteger):
10
+ """Implements a protobuf integer type for dissect.cstruct that can span a variable amount of bytes.
11
+
12
+ Mainly follows the cstruct BytesInteger implementation with minor tweaks
13
+ to support protobuf's msb varint implementation.
14
+
15
+ Resources:
16
+ - https://protobuf.dev/programming-guides/encoding/
17
+ - https://github.com/protocolbuffers/protobuf/blob/main/python/google/protobuf/internal/decoder.py
18
+ """
19
+
20
+ def _read(self, stream: BinaryIO, context: dict[str, Any] = None) -> int:
21
+ return decode_varint(stream)
22
+
23
+ def _write(self, stream: BinaryIO, data: int) -> int:
24
+ return stream.write(encode_varint(data))
25
+
26
+ _read_array = BaseType._read_array
27
+
28
+ _write_array = BaseType._write_array
29
+
30
+
31
+ def decode_varint(stream: BinaryIO) -> int:
32
+ """Reads a varint from the provided buffer stream.
33
+
34
+ If we have not reached the end of a varint, the msb will be 1.
35
+ We read every byte from our current position until the msb is 0.
36
+ """
37
+ result = 0
38
+ i = 0
39
+ while True:
40
+ byte = stream.read(1)
41
+ result |= (byte[0] & 0x7F) << (i * 7)
42
+ i += 1
43
+ if byte[0] & 0x80 == 0:
44
+ break
45
+
46
+ return result
47
+
48
+
49
+ def encode_varint(number: int) -> bytes:
50
+ """Encode a decoded protobuf varint to its original bytes."""
51
+ buf = []
52
+ while True:
53
+ towrite = number & 0x7F
54
+ number >>= 7
55
+ if number:
56
+ buf.append(towrite | 0x80)
57
+ else:
58
+ buf.append(towrite)
59
+ break
60
+ return bytes(buf)
@@ -0,0 +1,68 @@
1
+ from typing import Iterator
2
+
3
+ from dissect.target.helpers.descriptor_extensions import UserRecordDescriptorExtension
4
+ from dissect.target.helpers.record import create_extended_descriptor
5
+ from dissect.target.plugin import export
6
+ from dissect.target.plugins.apps.browser.browser import (
7
+ GENERIC_COOKIE_FIELDS,
8
+ GENERIC_DOWNLOAD_RECORD_FIELDS,
9
+ GENERIC_EXTENSION_RECORD_FIELDS,
10
+ GENERIC_HISTORY_RECORD_FIELDS,
11
+ BrowserPlugin,
12
+ )
13
+ from dissect.target.plugins.apps.browser.chromium import (
14
+ CHROMIUM_DOWNLOAD_RECORD_FIELDS,
15
+ ChromiumMixin,
16
+ )
17
+
18
+
19
+ class BravePlugin(ChromiumMixin, BrowserPlugin):
20
+ """Brave browser plugin."""
21
+
22
+ __namespace__ = "brave"
23
+
24
+ DIRS = [
25
+ # Windows
26
+ "AppData/Local/BraveSoftware/Brave-Browser/User Data/Default",
27
+ "AppData/Roaming/BraveSoftware/Brave-Browser/User Data/Default",
28
+ # Linux
29
+ ".config/BraveSoftware/Default",
30
+ # Macos
31
+ "Library/Application Support/BraveSoftware/Default",
32
+ ]
33
+
34
+ BrowserHistoryRecord = create_extended_descriptor([UserRecordDescriptorExtension])(
35
+ "browser/brave/history", GENERIC_HISTORY_RECORD_FIELDS
36
+ )
37
+
38
+ BrowserCookieRecord = create_extended_descriptor([UserRecordDescriptorExtension])(
39
+ "browser/brave/cookie", GENERIC_COOKIE_FIELDS
40
+ )
41
+
42
+ BrowserDownloadRecord = create_extended_descriptor([UserRecordDescriptorExtension])(
43
+ "browser/brave/download", GENERIC_DOWNLOAD_RECORD_FIELDS + CHROMIUM_DOWNLOAD_RECORD_FIELDS
44
+ )
45
+
46
+ BrowserExtensionRecord = create_extended_descriptor([UserRecordDescriptorExtension])(
47
+ "browser/brave/extension", GENERIC_EXTENSION_RECORD_FIELDS
48
+ )
49
+
50
+ @export(record=BrowserHistoryRecord)
51
+ def history(self) -> Iterator[BrowserHistoryRecord]:
52
+ """Return browser history records for Brave."""
53
+ yield from super().history("brave")
54
+
55
+ @export(record=BrowserCookieRecord)
56
+ def cookies(self) -> Iterator[BrowserCookieRecord]:
57
+ """Return browser cookie records for Brave."""
58
+ yield from super().cookies("brave")
59
+
60
+ @export(record=BrowserDownloadRecord)
61
+ def downloads(self) -> Iterator[BrowserDownloadRecord]:
62
+ """Return browser download records for Brave."""
63
+ yield from super().downloads("brave")
64
+
65
+ @export(record=BrowserExtensionRecord)
66
+ def extensions(self) -> Iterator[BrowserExtensionRecord]:
67
+ """Return browser extension records for Brave."""
68
+ yield from super().extensions("brave")
@@ -1,3 +1,4 @@
1
+ import itertools
1
2
  import json
2
3
  from collections import defaultdict
3
4
  from typing import Iterator, Optional
@@ -9,7 +10,7 @@ from dissect.util.ts import webkittimestamp
9
10
 
10
11
  from dissect.target.exceptions import FileNotFoundError, UnsupportedPluginError
11
12
  from dissect.target.helpers.descriptor_extensions import UserRecordDescriptorExtension
12
- from dissect.target.helpers.fsutil import TargetPath
13
+ from dissect.target.helpers.fsutil import TargetPath, join
13
14
  from dissect.target.helpers.record import create_extended_descriptor
14
15
  from dissect.target.plugin import export
15
16
  from dissect.target.plugins.apps.browser.browser import (
@@ -69,11 +70,12 @@ class ChromiumMixin:
69
70
  users_dirs.append((user_details.user, cur_dir))
70
71
  return users_dirs
71
72
 
72
- def _iter_db(self, filename: str) -> Iterator[SQLite3]:
73
+ def _iter_db(self, filename: str, subdirs: Optional[list[str]] = None) -> Iterator[SQLite3]:
73
74
  """Generate a connection to a sqlite database file.
74
75
 
75
76
  Args:
76
77
  filename: The filename as string of the database where the data is stored.
78
+ subdirs: Subdirectories to also try for every configured directory.
77
79
 
78
80
  Yields:
79
81
  opened db_file (SQLite3)
@@ -83,7 +85,11 @@ class ChromiumMixin:
83
85
  SQLError: If the history file could not be opened.
84
86
  """
85
87
 
86
- for user, cur_dir in self._build_userdirs(self.DIRS):
88
+ dirs = self.DIRS
89
+ if subdirs:
90
+ dirs.extend([join(dir, subdir) for dir, subdir in itertools.product(self.DIRS, subdirs)])
91
+
92
+ for user, cur_dir in self._build_userdirs(dirs):
87
93
  db_file = cur_dir.joinpath(filename)
88
94
  try:
89
95
  yield user, db_file, sqlite3.SQLite3(db_file.open())
@@ -198,7 +204,7 @@ class ChromiumMixin:
198
204
  is_http_only (bool): Cookie http only flag.
199
205
  same_site (bool): Cookie same site flag.
200
206
  """
201
- for user, db_file, db in self._iter_db("Cookies"):
207
+ for user, db_file, db in self._iter_db("Cookies", subdirs=["Network"]):
202
208
  try:
203
209
  for cookie in db.table("cookies").rows():
204
210
  yield self.BrowserCookieRecord(
@@ -1,10 +1,22 @@
1
+ from __future__ import annotations
2
+
1
3
  import json
4
+ import logging
2
5
  import re
3
- from typing import Iterator
6
+ from pathlib import Path
7
+ from typing import Iterator, Optional
8
+
9
+ from dissect.cstruct import cstruct
10
+ from dissect.util import ts
4
11
 
5
12
  from dissect.target.exceptions import UnsupportedPluginError
13
+ from dissect.target.helpers.fsutil import open_decompress
14
+ from dissect.target.helpers.protobuf import ProtobufVarint
6
15
  from dissect.target.helpers.record import TargetRecordDescriptor
7
- from dissect.target.plugin import Plugin, export
16
+ from dissect.target.plugin import Plugin, arg, export
17
+ from dissect.target.target import Target
18
+
19
+ log = logging.getLogger(__name__)
8
20
 
9
21
  DockerContainerRecord = TargetRecordDescriptor(
10
22
  "apps/containers/docker/container",
@@ -35,11 +47,62 @@ DockerImageRecord = TargetRecordDescriptor(
35
47
  ],
36
48
  )
37
49
 
38
- DOCKER_NS_REGEX = re.compile(r"\.(?P<nanoseconds>\d{7,})(?P<postfix>Z|\+\d{2}:\d{2})")
50
+ DockerLogRecord = TargetRecordDescriptor(
51
+ "apps/containers/docker/log",
52
+ [
53
+ ("datetime", "ts"),
54
+ ("string", "container"),
55
+ ("string", "stream"),
56
+ ("string", "message"),
57
+ ],
58
+ )
59
+
60
+ # Resources:
61
+ # - https://github.com/moby/moby/pull/37092
62
+ # - https://github.com/cpuguy83/docker/blob/master/daemon/logger/local/doc.go
63
+ # - https://github.com/moby/moby/blob/master/api/types/plugins/logdriver/entry.proto
64
+ local_def = """
65
+ struct entry {
66
+ uint32 header;
67
+
68
+ // source
69
+ uint8 s_type; // 0x0a
70
+ varint s_len; // 0x06
71
+ char source[s_len]; // stdout or stderr
72
+
73
+ // timestamp
74
+ uint8 t_type; // 0x10
75
+ varint ts; // timestamp in ums
76
+
77
+ // message
78
+ uint8 m_type; // 0x1a
79
+ varint m_len; // message length
80
+ char message[m_len];
81
+
82
+ // partial_log_metadata not implemented
83
+
84
+ uint32 footer;
85
+ };
86
+ """
87
+
88
+ c_local = cstruct(endian=">")
89
+ c_local.addtype("varint", ProtobufVarint(c_local, "varint", size=None, signed=False, alignment=1))
90
+ c_local.load(local_def, compiled=False)
91
+
92
+ RE_DOCKER_NS = re.compile(r"\.(?P<nanoseconds>\d{7,})(?P<postfix>Z|\+\d{2}:\d{2})")
93
+ RE_ANSI_ESCAPE = re.compile(r"\x1b(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
94
+
95
+ ASCII_MAP = {
96
+ "\x08": "[BS]",
97
+ "\x09": "[TAB]",
98
+ "\x0A": "", # \n
99
+ "\x0D": "", # \r
100
+ }
39
101
 
40
102
 
41
103
  class DockerPlugin(Plugin):
42
- """
104
+ """Parse Docker Daemon artefacts.
105
+
43
106
  References:
44
107
  - https://didactic-security.com/resources/docker-forensics.pdf
45
108
  - https://didactic-security.com/resources/docker-forensics-cheatsheet.pdf
@@ -48,84 +111,218 @@ class DockerPlugin(Plugin):
48
111
 
49
112
  __namespace__ = "docker"
50
113
 
51
- DOCKER_PATH = "/var/lib/docker"
114
+ def __init__(self, target: Target):
115
+ super().__init__(target)
116
+ self.installs = set(find_installs(target))
52
117
 
53
118
  def check_compatible(self) -> None:
54
- if not self.target.fs.path(self.DOCKER_PATH).exists():
55
- raise UnsupportedPluginError("No Docker path found")
119
+ if not self.installs:
120
+ raise UnsupportedPluginError("No Docker install(s) found")
56
121
 
57
122
  @export(record=DockerImageRecord)
58
123
  def images(self) -> Iterator[DockerImageRecord]:
59
124
  """Returns any pulled docker images on the target system."""
60
125
 
61
- images_path = f"{self.DOCKER_PATH}/image/overlay2/repositories.json"
62
-
63
- if (fp := self.target.fs.path(images_path)).exists():
64
- repositories = json.loads(fp.read_text()).get("Repositories")
65
- else:
66
- self.target.log.debug(f"No docker images found, file {images_path} does not exist.")
67
- return
68
-
69
- for name, tags in repositories.items():
70
- for tag, hash in tags.items():
71
- image_metadata_path = f"{self.DOCKER_PATH}/image/overlay2/imagedb/content/sha256/{hash.split(':')[-1]}"
72
- created = None
73
-
74
- if (fp := self.target.fs.path(image_metadata_path)).exists():
75
- image_metadata = json.loads(fp.read_text())
76
- created = _convert_timestamp(image_metadata.get("created"))
77
-
78
- yield DockerImageRecord(
79
- name=name,
80
- tag=tag,
81
- image_id=_hash_to_image_id(hash),
82
- created=created,
83
- hash=hash,
84
- _target=self.target,
85
- )
126
+ for data_root in self.installs:
127
+ images_path = data_root.joinpath("image/overlay2/repositories.json")
128
+
129
+ if images_path.exists():
130
+ repositories = json.loads(images_path.read_text()).get("Repositories")
131
+ else:
132
+ self.target.log.debug("No docker images found, file %s does not exist.", images_path)
133
+ continue
134
+
135
+ for name, tags in repositories.items():
136
+ for tag, hash in tags.items():
137
+ image_metadata_path = data_root.joinpath(
138
+ "image/overlay2/imagedb/content/sha256/", hash.split(":")[-1]
139
+ )
140
+ created = None
141
+
142
+ if image_metadata_path.exists():
143
+ image_metadata = json.loads(image_metadata_path.read_text())
144
+ created = convert_timestamp(image_metadata.get("created"))
145
+
146
+ yield DockerImageRecord(
147
+ name=name,
148
+ tag=tag,
149
+ image_id=hash_to_image_id(hash),
150
+ created=created,
151
+ hash=hash,
152
+ _target=self.target,
153
+ )
86
154
 
87
155
  @export(record=DockerContainerRecord)
88
156
  def containers(self) -> Iterator[DockerContainerRecord]:
89
157
  """Returns any docker containers present on the target system."""
90
158
 
91
- containers_path = f"{self.DOCKER_PATH}/containers"
92
- for container in self.target.fs.path(containers_path).iterdir():
93
- if (fp := self.target.fs.path(f"{container}/config.v2.json")).exists():
94
- config = json.loads(fp.read_text())
95
-
96
- if config.get("State").get("Running"):
159
+ for data_root in self.installs:
160
+ for config_path in data_root.joinpath("containers").glob("**/config.v2.json"):
161
+ config = json.loads(config_path.read_text())
162
+ running = config.get("State").get("Running")
163
+ if running:
97
164
  ports = config.get("NetworkSettings").get("Ports", {})
98
165
  pid = config.get("Pid")
99
166
  else:
100
167
  ports = config.get("Config").get("ExposedPorts", {})
101
- pid = False
102
-
168
+ pid = None
103
169
  volumes = []
104
170
  if mount_points := config.get("MountPoints"):
105
171
  for mp in mount_points:
106
172
  mount_point = mount_points[mp]
107
173
  volumes.append(f"{mount_point.get('Source')}:{mount_point.get('Destination')}")
108
-
109
174
  yield DockerContainerRecord(
110
175
  container_id=config.get("ID"),
111
176
  image=config.get("Config").get("Image"),
112
177
  command=config.get("Config").get("Cmd"),
113
- created=_convert_timestamp(config.get("Created")),
114
- running=config.get("State").get("Running"),
178
+ created=convert_timestamp(config.get("Created")),
179
+ running=running,
115
180
  pid=pid,
116
- started=_convert_timestamp(config.get("State").get("StartedAt")),
117
- finished=_convert_timestamp(config.get("State").get("FinishedAt")),
118
- ports=_convert_ports(ports),
181
+ started=convert_timestamp(config.get("State").get("StartedAt")),
182
+ finished=convert_timestamp(config.get("State").get("FinishedAt")),
183
+ ports=convert_ports(ports),
119
184
  names=config.get("Name").replace("/", "", 1),
120
185
  volumes=volumes,
121
- source=fp,
186
+ source=config_path,
122
187
  _target=self.target,
123
188
  )
124
189
 
190
+ @export(record=DockerLogRecord)
191
+ @arg(
192
+ "--raw-messages",
193
+ action="store_true",
194
+ help="preserve ANSI escape sequences and trailing newlines from log messages",
195
+ )
196
+ @arg(
197
+ "--remove-backspaces",
198
+ action="store_true",
199
+ help="alter messages by removing ASCII backspaces and the corresponding characters",
200
+ )
201
+ def logs(self, raw_messages: bool = False, remove_backspaces: bool = False) -> Iterator[DockerLogRecord]:
202
+ """Returns log files (stdout/stderr) from Docker containers.
203
+
204
+ The default Docker Daemon log driver is ``json-file``, which
205
+ performs no log rotation. Another log driver is ``local`` and
206
+ performs log rotation and compresses log files more efficiently.
207
+
208
+ Eventually ``local`` will likely replace ``json-file`` as the
209
+ default log driver.
210
+
211
+ Resources:
212
+ - https://docs.docker.com/config/containers/logging/configure/
213
+ - https://docs.docker.com/config/containers/logging/json-file/
214
+ - https://docs.docker.com/config/containers/logging/local/
215
+ """
216
+
217
+ for data_root in self.installs:
218
+ containers_path = data_root.joinpath("containers")
219
+
220
+ for log_file in containers_path.glob(("**/*.log*")):
221
+ container = log_file.parent
222
+
223
+ # json log driver
224
+ if "-json.log" in log_file.name:
225
+ for log_entry in self._parse_json_log(log_file):
226
+ yield DockerLogRecord(
227
+ ts=log_entry.get("time"),
228
+ container=container.name, # container hash
229
+ stream=log_entry.get("stream"),
230
+ message=log_entry.get("log")
231
+ if raw_messages
232
+ else strip_log(log_entry.get("log"), remove_backspaces),
233
+ _target=self.target,
234
+ )
235
+
236
+ # local log driver
237
+ else:
238
+ for log_entry in self._parse_local_log(log_file):
239
+ yield DockerLogRecord(
240
+ ts=ts.from_unix_us(log_entry.ts // 1000),
241
+ container=container.parent.name, # container hash
242
+ stream=log_entry.source,
243
+ message=log_entry.message
244
+ if raw_messages
245
+ else strip_log(log_entry.message, remove_backspaces),
246
+ _target=self.target,
247
+ )
248
+
249
+ def _parse_local_log(self, path: Path) -> Iterator[c_local.entry]:
250
+ fh = open_decompress(path, "rb") # can be a .gz file
251
+
252
+ while True:
253
+ try:
254
+ entry = c_local.entry(fh)
255
+ if entry.header != entry.footer:
256
+ self.target.log.warning(
257
+ "Could not reliably parse log entry at offset %i in file %s."
258
+ "Entry could be parsed incorrectly. Please report this "
259
+ "issue as Docker's protobuf could have changed.",
260
+ fh.tell(),
261
+ path,
262
+ )
263
+ yield entry
264
+ except EOFError:
265
+ break
266
+
267
+ def _parse_json_log(self, path: Path) -> Iterator[dict]:
268
+ for line in open_decompress(path, "rt"):
269
+ try:
270
+ entry = json.loads(line)
271
+ except json.JSONDecodeError as e:
272
+ self.target.log.warning("Could not decode JSON line in file %s", path)
273
+ self.target.log.debug("", exc_info=e)
274
+ continue
275
+ yield entry
276
+
277
+
278
+ def get_data_path(path: Path) -> Optional[str]:
279
+ """Returns the configured Docker daemon data-root path."""
280
+ try:
281
+ config = json.loads(path.open("rt").read())
282
+ except json.JSONDecodeError as e:
283
+ log.warning("Could not read JSON file '%s'", path)
284
+ log.debug(exc_info=e)
285
+
286
+ return config.get("data-root")
287
+
288
+
289
+ def find_installs(target: Target) -> Iterator[Path]:
290
+ """Attempt to find additional configured and existing Docker daemon data-root folders.
125
291
 
126
- def _convert_timestamp(timestamp: str) -> str:
292
+ References:
293
+ - https://docs.docker.com/config/daemon/
127
294
  """
128
- Docker sometimes uses (unpadded) 9 digit nanosecond precision
295
+
296
+ default_config_paths = [
297
+ # Linux
298
+ "/etc/docker/daemon.json",
299
+ "/var/snap/docker/current/config/daemon.json",
300
+ # Windows
301
+ "sysvol/ProgramData/docker/config/daemon.json",
302
+ ]
303
+
304
+ user_config_paths = [
305
+ # Docker Desktop (macOS/Windows/Linux)
306
+ ".docker/daemon.json",
307
+ ]
308
+
309
+ if (default_root := target.fs.path("/var/lib/docker")).exists():
310
+ yield default_root
311
+
312
+ for path in default_config_paths:
313
+ if (config_file := target.fs.path(path)).exists():
314
+ if (data_root_path := target.fs.path(get_data_path(config_file))).exists():
315
+ yield data_root_path
316
+
317
+ for path in user_config_paths:
318
+ for user_details in target.user_details.all_with_home():
319
+ if (config_file := user_details.home_path.joinpath(path)).exists():
320
+ if (data_root_path := target.fs.path(get_data_path(config_file))).exists():
321
+ yield data_root_path
322
+
323
+
324
+ def convert_timestamp(timestamp: str) -> str:
325
+ """Docker sometimes uses (unpadded) 9 digit nanosecond precision
129
326
  in their timestamp logs, eg. "2022-12-19T13:37:00.123456789Z".
130
327
 
131
328
  Python has no native %n nanosecond strptime directive, so we
@@ -134,7 +331,7 @@ def _convert_timestamp(timestamp: str) -> str:
134
331
  """
135
332
 
136
333
  timestamp_nanoseconds_plus_postfix = timestamp[19:]
137
- match = DOCKER_NS_REGEX.match(timestamp_nanoseconds_plus_postfix)
334
+ match = RE_DOCKER_NS.match(timestamp_nanoseconds_plus_postfix)
138
335
 
139
336
  # Timestamp does not have nanoseconds if there is no match.
140
337
  if not match:
@@ -146,9 +343,8 @@ def _convert_timestamp(timestamp: str) -> str:
146
343
  return f"{timestamp[:19]}.{microseconds}{match['postfix']}"
147
344
 
148
345
 
149
- def _convert_ports(ports: dict) -> dict:
150
- """
151
- Depending on the state of the container (turned on or off) we
346
+ def convert_ports(ports: dict[str, list | dict]) -> dict:
347
+ """Depending on the state of the container (turned on or off) we
152
348
  can salvage forwarded ports for the container in different
153
349
  parts of the config.v2.json file.
154
350
 
@@ -171,5 +367,43 @@ def _convert_ports(ports: dict) -> dict:
171
367
  return fports
172
368
 
173
369
 
174
- def _hash_to_image_id(hash: str) -> str:
370
+ def hash_to_image_id(hash: str) -> str:
371
+ """Convert the hash to an abbrevated docker image id."""
175
372
  return hash.split(":")[-1][:12]
373
+
374
+
375
+ def strip_log(input: str | bytes, exc_backspace: bool = False) -> str:
376
+ """Remove ANSI escape sequences from a given input string.
377
+
378
+ Also translates ASCII codes such as backspaces to readable format.
379
+
380
+ Resources:
381
+ - https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797#general-ascii-codes
382
+ """
383
+
384
+ if isinstance(input, bytes):
385
+ input = input.decode("utf-8", errors="backslashreplace")
386
+
387
+ out = RE_ANSI_ESCAPE.sub("", input)
388
+
389
+ if exc_backspace:
390
+ out = _replace_backspace(out)
391
+
392
+ for hex, name in ASCII_MAP.items():
393
+ out = out.replace(hex, name)
394
+
395
+ return out
396
+
397
+
398
+ def _replace_backspace(input: str) -> str:
399
+ """Remove ANSI backspace characters (``\x08``) and 'replay' their effect on the rest of the string.
400
+
401
+ For example, with the input ``123\x084``, the output would be ``124``.
402
+ """
403
+ out = ""
404
+ for char in input:
405
+ if char == "\x08":
406
+ out = out[:-1]
407
+ else:
408
+ out += char
409
+ return out
dissect/target/target.py CHANGED
@@ -87,8 +87,8 @@ class Target:
87
87
 
88
88
  try:
89
89
  self._config = config.load(self.path)
90
- except Exception:
91
- self.log.exception("Error loading config file")
90
+ except Exception as e:
91
+ self.log.debug("Error loading config file", exc_info=e)
92
92
  self._config = config.load(None) # This loads an empty config.
93
93
 
94
94
  # Fill the disks and/or volumes and/or filesystems and apply() will
@@ -372,7 +372,7 @@ class Target:
372
372
  recursive: Whether to check the child ``Target`` for more ``Targets``.
373
373
 
374
374
  Returns:
375
- An interator of ``Targets``.
375
+ An iterator of ``Targets``.
376
376
  """
377
377
  for child in self.list_children():
378
378
  try:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dissect.target
3
- Version: 3.16.dev21
3
+ Version: 3.16.dev23
4
4
  Summary: This module ties all other Dissect modules together, it provides a programming API and command line tools which allow easy access to various data sources inside disk images or file collections (a.k.a. targets)
5
5
  Author-email: Dissect Team <dissect@fox-it.com>
6
6
  License: Affero General Public License v3
@@ -5,7 +5,7 @@ dissect/target/filesystem.py,sha256=aLkvZMgeah39Nhlscawh77cm2mzFYI9J5h3uT3Rigtc,
5
5
  dissect/target/loader.py,sha256=0-LcZNi7S0qsXR7XGtrzxpuCh9BsLcqNR1T15O7SnBM,7257
6
6
  dissect/target/plugin.py,sha256=ndqz4RpbBCN6wagCBvfHzHkL0l0-gnbHjc7c8Blite4,48473
7
7
  dissect/target/report.py,sha256=06uiP4MbNI8cWMVrC1SasNS-Yg6ptjVjckwj8Yhe0Js,7958
8
- dissect/target/target.py,sha256=HxqqnGW0i0Y4a6Q4DjgNmqkJmJ-_IrkvksNgSPwa7LI,32143
8
+ dissect/target/target.py,sha256=xNJdecZSt2oHcZwf775kOSTFRA-c_hKoScXaDuK-8FI,32155
9
9
  dissect/target/volume.py,sha256=aQZAJiny8jjwkc9UtwIRwy7nINXjCxwpO-_UDfh6-BA,15801
10
10
  dissect/target/containers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  dissect/target/containers/asdf.py,sha256=DJp0QEFwUjy2MFwKYcYqIR_BS1fQT1Yi9Kcmqt0aChM,1366
@@ -56,6 +56,7 @@ dissect/target/helpers/mount.py,sha256=JxhUYyEbDnHfzPpfuWy4nV9OwCJPoDSGdHHNiyvd_
56
56
  dissect/target/helpers/mui.py,sha256=i-7XoHbu4WO2fYapK9yGAMW04rFlgRispknc1KQIS5Q,22258
57
57
  dissect/target/helpers/network_managers.py,sha256=tjqkVWn7i3PpBPkYnKUU0XxhqTTJlIjOc7Y2jpzdzA4,24525
58
58
  dissect/target/helpers/polypath.py,sha256=h8p7m_OCNiQljGwoZh5Aflr9H2ot6CZr6WKq1OSw58o,2175
59
+ dissect/target/helpers/protobuf.py,sha256=NwKrZD4q9v7J8GnZX9gbzMUMV5pR78eAV17jgWOz_EY,1730
59
60
  dissect/target/helpers/record.py,sha256=lWl7k2Mp9Axllm0tXzPGJx2zj2zONsyY_p5g424T0Lc,4826
60
61
  dissect/target/helpers/record_modifier.py,sha256=BiZ_gtqVxuByLWrga1lfglk3X-TcMrJC0quxPpXoIRo,3138
61
62
  dissect/target/helpers/regutil.py,sha256=kX-sSZbW8Qkg29Dn_9zYbaQrwLumrr4Y8zJ1EhHXIAM,27337
@@ -112,14 +113,15 @@ dissect/target/plugins/apps/av/sophos.py,sha256=gSfTvjBZMuT0hsL-p4oYxuYmakbqApoO
112
113
  dissect/target/plugins/apps/av/symantec.py,sha256=RFLyNW6FyuoGcirJ4xHbQM8oGjua9W4zXmC7YDF-H20,14109
113
114
  dissect/target/plugins/apps/av/trendmicro.py,sha256=jloy_N4hHAqF1sVIEeD5Q7LRYal3_os14Umk-hGaAR4,4613
114
115
  dissect/target/plugins/apps/browser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
116
+ dissect/target/plugins/apps/browser/brave.py,sha256=Fid4P5sUuRQsn7YKwHJodj_Jnfp1H7fAvNs_rL53QCI,2462
115
117
  dissect/target/plugins/apps/browser/browser.py,sha256=_QP1u57-wOSiLvpTUotWDpqBdRn-WEWpBDzCMqZTYO0,2682
116
118
  dissect/target/plugins/apps/browser/chrome.py,sha256=XMDq3v-fA0W16gm5jXryP73PEtF7bRw5Pfqy5JQd-U8,2635
117
- dissect/target/plugins/apps/browser/chromium.py,sha256=Y1sS0EqF5F5abpLXNog2HwI5QV5d3qnBvZMnE0MPdyU,17774
119
+ dissect/target/plugins/apps/browser/chromium.py,sha256=QswqB1sSc6i1wpRbZnTvvq-UeEz0bN7pefc_gf5w4Wc,18078
118
120
  dissect/target/plugins/apps/browser/edge.py,sha256=cjMbAGtlTVyJLuha3D0uNbai0mJnkXmp6d0gBfceWB4,2473
119
121
  dissect/target/plugins/apps/browser/firefox.py,sha256=6dUTNfclNTsqB_GA-4q38tyHPuiw8lgNEmmtfIWbMUY,11373
120
122
  dissect/target/plugins/apps/browser/iexplore.py,sha256=LUXXCjMBBFcFN2ceBpks8qM1PyOvrBPn1guA4WM4oSU,8706
121
123
  dissect/target/plugins/apps/container/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
122
- dissect/target/plugins/apps/container/docker.py,sha256=guFPqRLbeP4p8R6lDIZVKWnva5_S7rQUVKG21QDz-B4,6416
124
+ dissect/target/plugins/apps/container/docker.py,sha256=0HWheazdh9arri0hFZgEUximHO_IaF_Dg_kJ7sq59Jw,14487
123
125
  dissect/target/plugins/apps/remoteaccess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
124
126
  dissect/target/plugins/apps/remoteaccess/anydesk.py,sha256=lHtgINWXfVpPuCTRyQmT2ZO-1vkoqiXZ7coj8cZ8p4c,3185
125
127
  dissect/target/plugins/apps/remoteaccess/remoteaccess.py,sha256=UQDmDC4Y-KxYl_8kaAh6SG_BLJZ6SeGnxG0gyD8tzaE,833
@@ -323,10 +325,10 @@ dissect/target/volumes/luks.py,sha256=OmCMsw6rCUXG1_plnLVLTpsvE1n_6WtoRUGQbpmu1z
323
325
  dissect/target/volumes/lvm.py,sha256=wwQVR9I3G9YzmY6UxFsH2Y4MXGBcKL9aayWGCDTiWMU,2269
324
326
  dissect/target/volumes/md.py,sha256=j1K1iKmspl0C_OJFc7-Q1BMWN2OCC5EVANIgVlJ_fIE,1673
325
327
  dissect/target/volumes/vmfs.py,sha256=-LoUbn9WNwTtLi_4K34uV_-wDw2W5hgaqxZNj4UmqAQ,1730
326
- dissect.target-3.16.dev21.dist-info/COPYRIGHT,sha256=m-9ih2RVhMiXHI2bf_oNSSgHgkeIvaYRVfKTwFbnJPA,301
327
- dissect.target-3.16.dev21.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
328
- dissect.target-3.16.dev21.dist-info/METADATA,sha256=P5IeEWD-JvFiTMQeMulDT6W2H5J0Dvo8DbB5T1Cwyjg,11113
329
- dissect.target-3.16.dev21.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
330
- dissect.target-3.16.dev21.dist-info/entry_points.txt,sha256=tvFPa-Ap-gakjaPwRc6Fl6mxHzxEZ_arAVU-IUYeo_s,447
331
- dissect.target-3.16.dev21.dist-info/top_level.txt,sha256=Mn-CQzEYsAbkxrUI0TnplHuXnGVKzxpDw_po_sXpvv4,8
332
- dissect.target-3.16.dev21.dist-info/RECORD,,
328
+ dissect.target-3.16.dev23.dist-info/COPYRIGHT,sha256=m-9ih2RVhMiXHI2bf_oNSSgHgkeIvaYRVfKTwFbnJPA,301
329
+ dissect.target-3.16.dev23.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
330
+ dissect.target-3.16.dev23.dist-info/METADATA,sha256=0sRgs6_clcf3PUsMUj5HEJkVmN398nl1HOMUtvwTe48,11113
331
+ dissect.target-3.16.dev23.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
332
+ dissect.target-3.16.dev23.dist-info/entry_points.txt,sha256=tvFPa-Ap-gakjaPwRc6Fl6mxHzxEZ_arAVU-IUYeo_s,447
333
+ dissect.target-3.16.dev23.dist-info/top_level.txt,sha256=Mn-CQzEYsAbkxrUI0TnplHuXnGVKzxpDw_po_sXpvv4,8
334
+ dissect.target-3.16.dev23.dist-info/RECORD,,