dissect.target 3.20.1__py3-none-any.whl → 3.20.2.dev12__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (29) hide show
  1. dissect/target/filesystems/dir.py +9 -6
  2. dissect/target/filesystems/zip.py +4 -1
  3. dissect/target/helpers/configutil.py +3 -3
  4. dissect/target/loaders/dir.py +13 -3
  5. dissect/target/loaders/itunes.py +5 -3
  6. dissect/target/loaders/velociraptor.py +35 -15
  7. dissect/target/plugins/apps/browser/iexplore.py +7 -3
  8. dissect/target/plugins/general/plugins.py +1 -1
  9. dissect/target/plugins/os/unix/_os.py +1 -1
  10. dissect/target/plugins/os/unix/esxi/_os.py +34 -32
  11. dissect/target/plugins/os/unix/linux/fortios/_keys.py +7919 -1951
  12. dissect/target/plugins/os/unix/linux/fortios/_os.py +109 -22
  13. dissect/target/plugins/os/unix/linux/network_managers.py +1 -1
  14. dissect/target/plugins/os/unix/log/auth.py +6 -37
  15. dissect/target/plugins/os/unix/log/helpers.py +46 -0
  16. dissect/target/plugins/os/unix/log/messages.py +24 -15
  17. dissect/target/plugins/os/windows/activitiescache.py +32 -30
  18. dissect/target/plugins/os/windows/catroot.py +14 -5
  19. dissect/target/plugins/os/windows/lnk.py +13 -7
  20. dissect/target/plugins/os/windows/notifications.py +40 -38
  21. dissect/target/plugins/os/windows/regf/cit.py +20 -7
  22. dissect/target/tools/diff.py +990 -0
  23. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/METADATA +2 -2
  24. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/RECORD +29 -27
  25. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/entry_points.txt +1 -0
  26. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/COPYRIGHT +0 -0
  27. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/LICENSE +0 -0
  28. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/WHEEL +0 -0
  29. {dissect.target-3.20.1.dist-info → dissect.target-3.20.2.dev12.dist-info}/top_level.txt +0 -0
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import gzip
4
4
  import hashlib
5
+ import struct
5
6
  from base64 import b64decode
6
7
  from datetime import datetime
7
8
  from io import BytesIO
@@ -17,11 +18,17 @@ from dissect.target.helpers.fsutil import open_decompress
17
18
  from dissect.target.helpers.record import TargetRecordDescriptor, UnixUserRecord
18
19
  from dissect.target.plugin import OperatingSystem, export
19
20
  from dissect.target.plugins.os.unix.linux._os import LinuxPlugin
20
- from dissect.target.plugins.os.unix.linux.fortios._keys import KERNEL_KEY_MAP
21
+ from dissect.target.plugins.os.unix.linux.fortios._keys import (
22
+ KERNEL_KEY_MAP,
23
+ AesKey,
24
+ ChaCha20Key,
25
+ ChaCha20Seed,
26
+ )
21
27
  from dissect.target.target import Target
22
28
 
23
29
  try:
24
30
  from Crypto.Cipher import AES, ChaCha20
31
+ from Crypto.Util import Counter
25
32
 
26
33
  HAS_CRYPTO = True
27
34
  except ImportError:
@@ -95,8 +102,11 @@ class FortiOSPlugin(LinuxPlugin):
95
102
  # The rootfs.gz file could be encrypted.
96
103
  try:
97
104
  kernel_hash = get_kernel_hash(sysvol)
98
- key, iv = key_iv_for_kernel_hash(kernel_hash)
99
- rfs_fh = decrypt_rootfs(rootfs.open(), key, iv)
105
+ target.log.info("Kernel hash: %s", kernel_hash)
106
+ key = key_iv_for_kernel_hash(kernel_hash)
107
+ target.log.info("Trying to decrypt_rootfs using key: %r", key)
108
+ rfs_fh = decrypt_rootfs(rootfs.open(), key)
109
+ target.log.info("Decrypted fh: %r", rfs_fh)
100
110
  vfs = TarFilesystem(rfs_fh, tarinfo=cpio.CpioInfo)
101
111
  except RuntimeError:
102
112
  target.log.warning("Could not decrypt rootfs.gz. Missing `pycryptodome` dependency.")
@@ -471,7 +481,7 @@ def decrypt_password(input: str) -> str:
471
481
  return "ENC:" + input
472
482
 
473
483
 
474
- def key_iv_for_kernel_hash(kernel_hash: str) -> tuple[bytes, bytes]:
484
+ def key_iv_for_kernel_hash(kernel_hash: str) -> AesKey | ChaCha20Key:
475
485
  """Return decryption key and IV for a specific sha256 kernel hash.
476
486
 
477
487
  The decryption key and IV are used to decrypt the ``rootfs.gz`` file.
@@ -486,17 +496,96 @@ def key_iv_for_kernel_hash(kernel_hash: str) -> tuple[bytes, bytes]:
486
496
  ValueError: When no decryption keys are available for the given kernel hash.
487
497
  """
488
498
 
489
- key = bytes.fromhex(KERNEL_KEY_MAP.get(kernel_hash, ""))
490
- if len(key) == 32:
499
+ key = KERNEL_KEY_MAP.get(kernel_hash)
500
+ if isinstance(key, ChaCha20Seed):
491
501
  # FortiOS 7.4.x uses a KDF to derive the key and IV
492
- return _kdf_7_4_x(key)
493
- elif len(key) == 48:
502
+ key, iv = _kdf_7_4_x(key.key)
503
+ return ChaCha20Key(key, iv)
504
+ elif isinstance(key, ChaCha20Key):
494
505
  # FortiOS 7.0.13 and 7.0.14 uses a static key and IV
495
- return key[:32], key[32:]
506
+ return key
507
+ elif isinstance(key, AesKey):
508
+ # FortiOS 7.0.16, 7.2.9, 7.4.4, 7.6.0 and higher uses AES-CTR with a custom CTR increment
509
+ return key
496
510
  raise ValueError(f"No known decryption keys for kernel hash: {kernel_hash}")
497
511
 
498
512
 
499
- def decrypt_rootfs(fh: BinaryIO, key: bytes, iv: bytes) -> BinaryIO:
513
+ def chacha20_decrypt(fh: BinaryIO, key: ChaCha20Key) -> bytes:
514
+ """Decrypt file using ChaCha20 with given ChaCha20Key.
515
+
516
+ Args:
517
+ fh: File-like object to the encrypted rootfs.gz file.
518
+ key: ChaCha20Key.
519
+
520
+ Returns:
521
+ Decrypted bytes.
522
+ """
523
+
524
+ # First 8 bytes = counter, last 8 bytes = nonce
525
+ # PyCryptodome interally divides this seek by 64 to get a (position, offset) tuple
526
+ # We're interested in updating the position in the ChaCha20 internal state, so to make
527
+ # PyCryptodome "OpenSSL-compatible" we have to multiply the counter by 64
528
+ cipher = ChaCha20.new(key=key.key, nonce=key.iv[8:])
529
+ cipher.seek(int.from_bytes(key.iv[:8], "little") * 64)
530
+ return cipher.decrypt(fh.read())
531
+
532
+
533
+ def calculate_counter_increment(iv: bytes) -> int:
534
+ """Calculate the custom FortiGate CTR increment from IV.
535
+
536
+ Args:
537
+ iv: 16 bytes IV.
538
+
539
+ Returns:
540
+ Custom CTR increment.
541
+ """
542
+ increment = 0
543
+ for i in range(16):
544
+ increment ^= (iv[i] & 15) ^ ((iv[i] >> 4) & 0xFF)
545
+ return max(increment, 1)
546
+
547
+
548
+ def aes_decrypt(fh: BinaryIO, key: AesKey) -> bytes:
549
+ """Decrypt file using a custom AES CTR increment with given AesKey.
550
+
551
+ Args:
552
+ fh: File-like object to the encrypted rootfs.gz file.
553
+ key: AesKey.
554
+
555
+ Returns:
556
+ Decrypted bytes.
557
+ """
558
+
559
+ data = bytearray(fh.read())
560
+
561
+ # Calculate custom CTR increment from IV
562
+ increment = calculate_counter_increment(key.iv)
563
+ advance_block = (b"\x69" * 16) * (increment - 1)
564
+
565
+ # AES counter is little-endian and has a prefix
566
+ prefix, counter = struct.unpack("<8sQ", key.iv)
567
+ ctr = Counter.new(
568
+ 64,
569
+ prefix=prefix,
570
+ initial_value=counter,
571
+ little_endian=True,
572
+ allow_wraparound=True,
573
+ )
574
+ cipher = AES.new(key.key, mode=AES.MODE_CTR, counter=ctr)
575
+
576
+ nblocks, nleft = divmod(len(data), 16)
577
+ for i in range(nblocks):
578
+ offset = i * 16
579
+ data[offset : offset + 16] = cipher.decrypt(data[offset : offset + 16])
580
+ cipher.decrypt(advance_block) # custom advance the counter
581
+
582
+ if nleft:
583
+ data[nblocks * 16 :] = cipher.decrypt(data[nblocks * 16 :])
584
+
585
+ return data
586
+
587
+
588
+ def decrypt_rootfs(fh: BinaryIO, key: ChaCha20Key | AesKey) -> BinaryIO:
500
589
  """Attempt to decrypt an encrypted ``rootfs.gz`` file with given key and IV.
501
590
 
502
591
  FortiOS releases as of 7.4.1 / 2023-08-31, have ChaCha20 encrypted ``rootfs.gz`` files.
@@ -511,8 +600,7 @@ def decrypt_rootfs(fh: BinaryIO, key: bytes, iv: bytes) -> BinaryIO:
511
600
 
512
601
  Args:
513
602
  fh: File-like object to the encrypted rootfs.gz file.
514
- key: ChaCha20 key.
515
- iv: ChaCha20 iv.
603
+ key: ChaCha20Key or AesKey.
516
604
 
517
605
  Returns:
518
606
  File-like object to the decrypted rootfs.gz file.
@@ -525,13 +613,12 @@ def decrypt_rootfs(fh: BinaryIO, key: bytes, iv: bytes) -> BinaryIO:
525
613
  if not HAS_CRYPTO:
526
614
  raise RuntimeError("Missing pycryptodome dependency")
527
615
 
528
- # First 8 bytes = counter, last 8 bytes = nonce
529
- # PyCryptodome interally divides this seek by 64 to get a (position, offset) tuple
530
- # We're interested in updating the position in the ChaCha20 internal state, so to make
531
- # PyCryptodome "OpenSSL-compatible" we have to multiply the counter by 64
532
- cipher = ChaCha20.new(key=key, nonce=iv[8:])
533
- cipher.seek(int.from_bytes(iv[:8], "little") * 64)
534
- result = cipher.decrypt(fh.read())
616
+ result = b""
617
+ if isinstance(key, ChaCha20Key):
618
+ result = chacha20_decrypt(fh, key)
619
+ elif isinstance(key, AesKey):
620
+ result = aes_decrypt(fh, key)
621
+ result = result[:-256] # strip off the 256 byte footer
535
622
 
536
623
  if result[0:2] != b"\x1f\x8b":
537
624
  raise ValueError("Failed to decrypt: No gzip magic header found.")
@@ -539,7 +626,7 @@ def decrypt_rootfs(fh: BinaryIO, key: bytes, iv: bytes) -> BinaryIO:
539
626
  return BytesIO(result)
540
627
 
541
628
 
542
- def _kdf_7_4_x(key_data: str | bytes) -> tuple[bytes, bytes]:
629
+ def _kdf_7_4_x(key_data: str | bytes, offset_key: int = 4, offset_iv: int = 5) -> tuple[bytes, bytes]:
543
630
  """Derive 32 byte key and 16 byte IV from 32 byte seed.
544
631
 
545
632
  As the IV needs to be 16 bytes, we return the first 16 bytes of the sha256 hash.
@@ -548,8 +635,8 @@ def _kdf_7_4_x(key_data: str | bytes) -> tuple[bytes, bytes]:
548
635
  if isinstance(key_data, str):
549
636
  key_data = bytes.fromhex(key_data)
550
637
 
551
- key = hashlib.sha256(key_data[4:32] + key_data[:4]).digest()
552
- iv = hashlib.sha256(key_data[5:32] + key_data[:5]).digest()[:16]
638
+ key = hashlib.sha256(key_data[offset_key:32] + key_data[:offset_key]).digest()
639
+ iv = hashlib.sha256(key_data[offset_iv:32] + key_data[:offset_iv]).digest()[:16]
553
640
  return key, iv
554
641
 
555
642
 
@@ -567,7 +567,7 @@ def parse_unix_dhcp_log_messages(target: Target, iter_all: bool = False) -> set[
567
567
  continue
568
568
 
569
569
  # Debian and CentOS dhclient
570
- if hasattr(record, "daemon") and record.daemon == "dhclient" and "bound to" in line:
570
+ if hasattr(record, "service") and record.service == "dhclient" and "bound to" in line:
571
571
  ip = line.split("bound to")[1].split(" ")[1].strip()
572
572
  ips.add(ip)
573
573
  continue
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import itertools
4
3
  import logging
5
4
  import re
6
5
  from abc import ABC, abstractmethod
@@ -12,24 +11,18 @@ from typing import Any, Iterator
12
11
 
13
12
  from dissect.target import Target
14
13
  from dissect.target.exceptions import UnsupportedPluginError
15
- from dissect.target.helpers.fsutil import open_decompress
16
14
  from dissect.target.helpers.record import DynamicDescriptor, TargetRecordDescriptor
17
15
  from dissect.target.helpers.utils import year_rollover_helper
18
16
  from dissect.target.plugin import Plugin, alias, export
17
+ from dissect.target.plugins.os.unix.log.helpers import (
18
+ RE_LINE,
19
+ RE_TS,
20
+ is_iso_fmt,
21
+ iso_readlines,
22
+ )
19
23
 
20
24
  log = logging.getLogger(__name__)
21
25
 
22
- RE_TS = re.compile(r"^[A-Za-z]{3}\s*\d{1,2}\s\d{1,2}:\d{2}:\d{2}")
23
- RE_TS_ISO = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}\+\d{2}:\d{2}")
24
- RE_LINE = re.compile(
25
- r"""
26
- \d{2}:\d{2}\s # First match on the similar ending of the different timestamps
27
- (?P<hostname>\S+)\s # The hostname
28
- (?P<service>\S+?)(\[(?P<pid>\d+)\])?: # The service with optionally the PID between brackets
29
- \s*(?P<message>.+?)\s*$ # The log message stripped from spaces left and right
30
- """,
31
- re.VERBOSE,
32
- )
33
26
 
34
27
  # Generic regular expressions
35
28
  RE_IPV4_ADDRESS = re.compile(
@@ -347,27 +340,3 @@ class AuthPlugin(Plugin):
347
340
 
348
341
  for ts, line in iterable:
349
342
  yield self._auth_log_builder.build_record(ts, auth_file, line)
350
-
351
-
352
- def iso_readlines(file: Path) -> Iterator[tuple[datetime, str]]:
353
- """Iterator reading the provided auth log file in ISO format. Mimics ``year_rollover_helper`` behaviour."""
354
- with open_decompress(file, "rt") as fh:
355
- for line in fh:
356
- if not (match := RE_TS_ISO.match(line)):
357
- log.warning("No timestamp found in one of the lines in %s!", file)
358
- log.debug("Skipping line: %s", line)
359
- continue
360
-
361
- try:
362
- ts = datetime.strptime(match[0], "%Y-%m-%dT%H:%M:%S.%f%z")
363
- except ValueError as e:
364
- log.warning("Unable to parse ISO timestamp in line: %s", line)
365
- log.debug("", exc_info=e)
366
- continue
367
-
368
- yield ts, line
369
-
370
-
371
- def is_iso_fmt(file: Path) -> bool:
372
- """Determine if the provided auth log file uses new ISO format logging or not."""
373
- return any(itertools.islice(iso_readlines(file), 0, 2))
@@ -0,0 +1,46 @@
1
+ import itertools
2
+ import logging
3
+ import re
4
+ from datetime import datetime
5
+ from pathlib import Path
6
+ from typing import Iterator
7
+
8
+ from dissect.target.helpers.fsutil import open_decompress
9
+
10
+ log = logging.getLogger(__name__)
11
+
12
+ RE_TS = re.compile(r"^[A-Za-z]{3}\s*\d{1,2}\s\d{1,2}:\d{2}:\d{2}")
13
+ RE_TS_ISO = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}\+\d{2}:\d{2}")
14
+ RE_LINE = re.compile(
15
+ r"""
16
+ \d{2}:\d{2}\s # First match on the similar ending of the different timestamps
17
+ (?:\S+)\s # The hostname, but do not capture it
18
+ (?P<service>\S+?)(\[(?P<pid>\d+)\])?: # The service / daemon with optionally the PID between brackets
19
+ \s*(?P<message>.+?)\s*$ # The log message stripped from spaces left and right
20
+ """,
21
+ re.VERBOSE,
22
+ )
23
+
24
+
25
+ def iso_readlines(file: Path) -> Iterator[tuple[datetime, str]]:
26
+ """Iterator reading the provided log file in ISO format. Mimics ``year_rollover_helper`` behaviour."""
27
+ with open_decompress(file, "rt") as fh:
28
+ for line in fh:
29
+ if not (match := RE_TS_ISO.match(line)):
30
+ log.warning("No timestamp found in one of the lines in %s!", file)
31
+ log.debug("Skipping line: %s", line)
32
+ continue
33
+
34
+ try:
35
+ ts = datetime.strptime(match[0], "%Y-%m-%dT%H:%M:%S.%f%z")
36
+ except ValueError as e:
37
+ log.warning("Unable to parse ISO timestamp in line: %s", line)
38
+ log.debug("", exc_info=e)
39
+ continue
40
+
41
+ yield ts, line
42
+
43
+
44
+ def is_iso_fmt(file: Path) -> bool:
45
+ """Determine if the provided log file uses ISO 8601 timestamp format logging or not."""
46
+ return any(itertools.islice(iso_readlines(file), 0, 2))
@@ -11,12 +11,18 @@ from dissect.target.helpers.fsutil import open_decompress
11
11
  from dissect.target.helpers.record import TargetRecordDescriptor
12
12
  from dissect.target.helpers.utils import year_rollover_helper
13
13
  from dissect.target.plugin import Plugin, alias, export
14
+ from dissect.target.plugins.os.unix.log.helpers import (
15
+ RE_LINE,
16
+ RE_TS,
17
+ is_iso_fmt,
18
+ iso_readlines,
19
+ )
14
20
 
15
21
  MessagesRecord = TargetRecordDescriptor(
16
22
  "linux/log/messages",
17
23
  [
18
24
  ("datetime", "ts"),
19
- ("string", "daemon"),
25
+ ("string", "service"),
20
26
  ("varint", "pid"),
21
27
  ("string", "message"),
22
28
  ("path", "source"),
@@ -24,12 +30,8 @@ MessagesRecord = TargetRecordDescriptor(
24
30
  )
25
31
 
26
32
  DEFAULT_TS_LOG_FORMAT = "%b %d %H:%M:%S"
27
- RE_TS = re.compile(r"(\w+\s{1,2}\d+\s\d{2}:\d{2}:\d{2})")
28
- RE_DAEMON = re.compile(r"^[^:]+:\d+:\d+[^\[\]:]+\s([^\[:]+)[\[|:]{1}")
29
- RE_PID = re.compile(r"\w\[(\d+)\]")
30
- RE_MSG = re.compile(r"[^:]+:\d+:\d+[^:]+:\s(.*)$")
31
33
  RE_CLOUD_INIT_LINE = re.compile(
32
- r"^(?P<ts>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}) - (?P<daemon>.*)\[(?P<log_level>\w+)\]\: (?P<message>.*)$"
34
+ r"^(?P<ts>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}) - (?P<service>.*)\[(?P<log_level>\w+)\]\: (?P<message>.*)$"
33
35
  )
34
36
 
35
37
 
@@ -56,7 +58,7 @@ class MessagesPlugin(Plugin):
56
58
  def messages(self) -> Iterator[MessagesRecord]:
57
59
  """Return contents of /var/log/messages*, /var/log/syslog* and cloud-init logs.
58
60
 
59
- Due to year rollover detection, the contents of the files are returned in reverse.
61
+ Due to year rollover detection, the log contents could be returned in reversed or mixed chronological order.
60
62
 
61
63
  The messages log file holds information about a variety of events such as the system error messages, system
62
64
  startups and shutdowns, change in the network configuration, etc. Aims to store valuable, non-debug and
@@ -75,16 +77,23 @@ class MessagesPlugin(Plugin):
75
77
  yield from self._parse_cloud_init_log(log_file, tzinfo)
76
78
  continue
77
79
 
78
- for ts, line in year_rollover_helper(log_file, RE_TS, DEFAULT_TS_LOG_FORMAT, tzinfo):
79
- daemon = dict(enumerate(RE_DAEMON.findall(line))).get(0)
80
- pid = dict(enumerate(RE_PID.findall(line))).get(0)
81
- message = dict(enumerate(RE_MSG.findall(line))).get(0, line)
80
+ if is_iso_fmt(log_file):
81
+ iterable = iso_readlines(log_file)
82
+
83
+ else:
84
+ iterable = year_rollover_helper(log_file, RE_TS, DEFAULT_TS_LOG_FORMAT, tzinfo)
85
+
86
+ for ts, line in iterable:
87
+ match = RE_LINE.search(line)
88
+
89
+ if not match:
90
+ self.target.log.warning("Unable to parse message line in %s", log_file)
91
+ self.target.log.debug("Line %s", line)
92
+ continue
82
93
 
83
94
  yield MessagesRecord(
84
95
  ts=ts,
85
- daemon=daemon,
86
- pid=pid,
87
- message=message,
96
+ **match.groupdict(),
88
97
  source=log_file,
89
98
  _target=self.target,
90
99
  )
@@ -134,7 +143,7 @@ class MessagesPlugin(Plugin):
134
143
 
135
144
  yield MessagesRecord(
136
145
  ts=ts,
137
- daemon=values["daemon"],
146
+ service=values["service"],
138
147
  pid=None,
139
148
  message=values["message"],
140
149
  source=log_file,
@@ -116,36 +116,38 @@ class ActivitiesCachePlugin(Plugin):
116
116
  for user, cache_file in self.cachefiles:
117
117
  fh = cache_file.open()
118
118
  db = sqlite3.SQLite3(fh)
119
- for r in db.table("Activity").rows():
120
- yield ActivitiesCacheRecord(
121
- start_time=mkts(r["[StartTime]"]),
122
- end_time=mkts(r["[EndTime]"]),
123
- last_modified_time=mkts(r["[LastModifiedTime]"]),
124
- last_modified_on_client=mkts(r["[LastModifiedOnClient]"]),
125
- original_last_modified_on_client=mkts(r["[OriginalLastModifiedOnClient]"]),
126
- expiration_time=mkts(r["[ExpirationTime]"]),
127
- app_id=r["[AppId]"],
128
- enterprise_id=r["[EnterpriseId]"],
129
- app_activity_id=r["[AppActivityId]"],
130
- group_app_activity_id=r["[GroupAppActivityId]"],
131
- group=r["[Group]"],
132
- activity_type=r["[ActivityType]"],
133
- activity_status=r["[ActivityStatus]"],
134
- priority=r["[Priority]"],
135
- match_id=r["[MatchId]"],
136
- etag=r["[ETag]"],
137
- tag=r["[Tag]"],
138
- is_local_only=r["[IsLocalOnly]"],
139
- created_in_cloud=r["[CreatedInCloud]"],
140
- platform_device_id=r["[PlatformDeviceId]"],
141
- package_id_hash=r["[PackageIdHash]"],
142
- id=r["[Id]"],
143
- payload=r["[Payload]"],
144
- original_payload=r["[OriginalPayload]"],
145
- clipboard_payload=r["[ClipboardPayload]"],
146
- _target=self.target,
147
- _user=user,
148
- )
119
+
120
+ if table := db.table("Activity"):
121
+ for r in table.rows():
122
+ yield ActivitiesCacheRecord(
123
+ start_time=mkts(r["[StartTime]"]),
124
+ end_time=mkts(r["[EndTime]"]),
125
+ last_modified_time=mkts(r["[LastModifiedTime]"]),
126
+ last_modified_on_client=mkts(r["[LastModifiedOnClient]"]),
127
+ original_last_modified_on_client=mkts(r["[OriginalLastModifiedOnClient]"]),
128
+ expiration_time=mkts(r["[ExpirationTime]"]),
129
+ app_id=r["[AppId]"],
130
+ enterprise_id=r["[EnterpriseId]"],
131
+ app_activity_id=r["[AppActivityId]"],
132
+ group_app_activity_id=r["[GroupAppActivityId]"],
133
+ group=r["[Group]"],
134
+ activity_type=r["[ActivityType]"],
135
+ activity_status=r["[ActivityStatus]"],
136
+ priority=r["[Priority]"],
137
+ match_id=r["[MatchId]"],
138
+ etag=r["[ETag]"],
139
+ tag=r["[Tag]"],
140
+ is_local_only=r["[IsLocalOnly]"],
141
+ created_in_cloud=r["[CreatedInCloud]"],
142
+ platform_device_id=r["[PlatformDeviceId]"],
143
+ package_id_hash=r["[PackageIdHash]"],
144
+ id=r["[Id]"],
145
+ payload=r["[Payload]"],
146
+ original_payload=r["[OriginalPayload]"],
147
+ clipboard_payload=r["[ClipboardPayload]"],
148
+ _target=self.target,
149
+ _user=user,
150
+ )
149
151
 
150
152
 
151
153
  def mkts(ts: int) -> datetime | None:
@@ -217,15 +217,24 @@ class CatrootPlugin(Plugin):
217
217
  with ese_file.open("rb") as fh:
218
218
  ese_db = EseDB(fh)
219
219
 
220
- tables = [table.name for table in ese_db.tables()]
221
220
  for hash_type, table_name in [("sha256", "HashCatNameTableSHA256"), ("sha1", "HashCatNameTableSHA1")]:
222
- if table_name not in tables:
221
+ try:
222
+ table = ese_db.table(table_name)
223
+ except KeyError as e:
224
+ self.target.log.warning("EseDB %s has no table %s", ese_file, table_name)
225
+ self.target.log.debug("", exc_info=e)
223
226
  continue
224
227
 
225
- for record in ese_db.table(table_name).records():
228
+ for record in table.records():
226
229
  file_digest = digest()
227
- setattr(file_digest, hash_type, record.get("HashCatNameTable_HashCol").hex())
228
- catroot_names = record.get("HashCatNameTable_CatNameCol").decode().rstrip("|").split("|")
230
+
231
+ try:
232
+ setattr(file_digest, hash_type, record.get("HashCatNameTable_HashCol").hex())
233
+ catroot_names = record.get("HashCatNameTable_CatNameCol").decode().rstrip("|").split("|")
234
+ except Exception as e:
235
+ self.target.log.warning("Unable to parse catroot names for %s in %s", record, ese_file)
236
+ self.target.log.debug("", exc_info=e)
237
+ continue
229
238
 
230
239
  for catroot_name in catroot_names:
231
240
  yield CatrootRecord(
@@ -1,4 +1,6 @@
1
- from typing import Iterator, Optional
1
+ from __future__ import annotations
2
+
3
+ from typing import Iterator
2
4
 
3
5
  from dissect.shellitem.lnk import Lnk
4
6
  from dissect.util import ts
@@ -34,7 +36,7 @@ LnkRecord = TargetRecordDescriptor(
34
36
  )
35
37
 
36
38
 
37
- def parse_lnk_file(target: Target, lnk_file: Lnk, lnk_path: TargetPath) -> Iterator[LnkRecord]:
39
+ def parse_lnk_file(target: Target, lnk_file: Lnk, lnk_path: TargetPath) -> LnkRecord:
38
40
  # we need to get the active codepage from the system to properly decode some values
39
41
  codepage = target.codepage or "ascii"
40
42
 
@@ -132,7 +134,7 @@ class LnkPlugin(Plugin):
132
134
 
133
135
  @arg("--path", "-p", dest="path", default=None, help="Path to directory or .lnk file in target")
134
136
  @export(record=LnkRecord)
135
- def lnk(self, path: Optional[str] = None) -> Iterator[LnkRecord]:
137
+ def lnk(self, path: str | None = None) -> Iterator[LnkRecord]:
136
138
  """Parse all .lnk files in /ProgramData, /Users, and /Windows or from a specified path in record format.
137
139
 
138
140
  Yields a LnkRecord record with the following fields:
@@ -160,10 +162,14 @@ class LnkPlugin(Plugin):
160
162
  """
161
163
 
162
164
  for entry in self.lnk_entries(path):
163
- lnk_file = Lnk(entry.open())
164
- yield parse_lnk_file(self.target, lnk_file, entry)
165
-
166
- def lnk_entries(self, path: Optional[str] = None) -> Iterator[TargetPath]:
165
+ try:
166
+ lnk_file = Lnk(entry.open())
167
+ yield parse_lnk_file(self.target, lnk_file, entry)
168
+ except Exception as e:
169
+ self.target.log.warning("Failed to parse link file %s", lnk_file)
170
+ self.target.log.debug("", exc_info=e)
171
+
172
+ def lnk_entries(self, path: str | None = None) -> Iterator[TargetPath]:
167
173
  if path:
168
174
  target_path = self.target.fs.path(path)
169
175
  if not target_path.exists():
@@ -442,43 +442,45 @@ class NotificationsPlugin(Plugin):
442
442
  """
443
443
  for user, wpndatabase in self.wpndb_files:
444
444
  db = sqlite3.SQLite3(wpndatabase.open())
445
-
446
445
  handlers = {}
447
- for row in db.table("NotificationHandler").rows():
448
- handlers[row["[RecordId]"]] = WpnDatabaseNotificationHandlerRecord(
449
- created_time=datetime.datetime.strptime(row["[CreatedTime]"], "%Y-%m-%d %H:%M:%S"),
450
- modified_time=datetime.datetime.strptime(row["[ModifiedTime]"], "%Y-%m-%d %H:%M:%S"),
451
- id=row["[RecordId]"],
452
- primary_id=row["[PrimaryId]"],
453
- wns_id=row["[WNSId]"],
454
- handler_type=row["[HandlerType]"],
455
- wnf_event_name=row["[WNFEventName]"],
456
- system_data_property_set=row["[SystemDataPropertySet]"],
457
- _target=self.target,
458
- _user=user,
459
- )
460
-
461
- for row in db.table("Notification").rows():
462
- record = WpnDatabaseNotificationRecord(
463
- arrival_time=wintimestamp(row["[ArrivalTime]"]),
464
- expiry_time=wintimestamp(row["[ExpiryTime]"]),
465
- order=row["[Order]"],
466
- id=row["[Id]"],
467
- handler_id=row["[HandlerId]"],
468
- activity_id=UUID(bytes=row["[ActivityId]"]),
469
- type=row["[Type]"],
470
- payload=row["[Payload]"],
471
- payload_type=row["[PayloadType]"],
472
- tag=row["[Tag]"],
473
- group=row["[Group]"],
474
- boot_id=row["[BootId]"],
475
- expires_on_reboot=row["[ExpiresOnReboot]"] != "FALSE",
476
- _target=self.target,
477
- _user=user,
478
- )
479
- handler = handlers.get(row["[HandlerId]"])
480
446
 
481
- if handler:
482
- yield GroupedRecord("windows/notification/wpndatabase/grouped", [record, handler])
483
- else:
484
- yield record
447
+ if table := db.table("NotificationHandler"):
448
+ for row in table.rows():
449
+ handlers[row["[RecordId]"]] = WpnDatabaseNotificationHandlerRecord(
450
+ created_time=datetime.datetime.strptime(row["[CreatedTime]"], "%Y-%m-%d %H:%M:%S"),
451
+ modified_time=datetime.datetime.strptime(row["[ModifiedTime]"], "%Y-%m-%d %H:%M:%S"),
452
+ id=row["[RecordId]"],
453
+ primary_id=row["[PrimaryId]"],
454
+ wns_id=row["[WNSId]"],
455
+ handler_type=row["[HandlerType]"],
456
+ wnf_event_name=row["[WNFEventName]"],
457
+ system_data_property_set=row["[SystemDataPropertySet]"],
458
+ _target=self.target,
459
+ _user=user,
460
+ )
461
+
462
+ if table := db.table("Notification"):
463
+ for row in table.rows():
464
+ record = WpnDatabaseNotificationRecord(
465
+ arrival_time=wintimestamp(row["[ArrivalTime]"]),
466
+ expiry_time=wintimestamp(row["[ExpiryTime]"]),
467
+ order=row["[Order]"],
468
+ id=row["[Id]"],
469
+ handler_id=row["[HandlerId]"],
470
+ activity_id=UUID(bytes=row["[ActivityId]"]),
471
+ type=row["[Type]"],
472
+ payload=row["[Payload]"],
473
+ payload_type=row["[PayloadType]"],
474
+ tag=row["[Tag]"],
475
+ group=row["[Group]"],
476
+ boot_id=row["[BootId]"],
477
+ expires_on_reboot=row["[ExpiresOnReboot]"] != "FALSE",
478
+ _target=self.target,
479
+ _user=user,
480
+ )
481
+ handler = handlers.get(row["[HandlerId]"])
482
+
483
+ if handler:
484
+ yield GroupedRecord("windows/notification/wpndatabase/grouped", [record, handler])
485
+ else:
486
+ yield record