dissect.target 3.19.dev40__py3-none-any.whl → 3.19.dev41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -134,6 +134,10 @@ class ExtFilesystemEntry(FilesystemEntry):
134
134
  st_info.st_mtime_ns = self.entry.mtime_ns
135
135
  st_info.st_ctime_ns = self.entry.ctime_ns
136
136
 
137
+ # Set blocks
138
+ st_info.st_blocks = self.entry.inode.i_blocks_lo
139
+ st_info.st_blksize = self.entry.extfs.block_size
140
+
137
141
  return st_info
138
142
 
139
143
  def attr(self) -> Any:
@@ -1,8 +1,9 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
4
  import re
3
5
  import tarfile
4
6
  from pathlib import Path
5
- from typing import Union
6
7
 
7
8
  from dissect.target import filesystem, target
8
9
  from dissect.target.filesystems.tar import (
@@ -21,22 +22,25 @@ ANON_FS_RE = re.compile(r"^fs[0-9]+$")
21
22
  class TarLoader(Loader):
22
23
  """Load tar files."""
23
24
 
24
- def __init__(self, path: Union[Path, str], **kwargs):
25
+ def __init__(self, path: Path | str, **kwargs):
25
26
  super().__init__(path)
26
27
 
28
+ if isinstance(path, str):
29
+ path = Path(path)
30
+
27
31
  if self.is_compressed(path):
28
32
  log.warning(
29
33
  f"Tar file {path!r} is compressed, which will affect performance. "
30
34
  "Consider uncompressing the archive before passing the tar file to Dissect."
31
35
  )
32
36
 
33
- self.tar = tarfile.open(path)
37
+ self.tar = tarfile.open(fileobj=path.open("rb"))
34
38
 
35
39
  @staticmethod
36
40
  def detect(path: Path) -> bool:
37
41
  return path.name.lower().endswith((".tar", ".tar.gz", ".tgz"))
38
42
 
39
- def is_compressed(self, path: Union[Path, str]) -> bool:
43
+ def is_compressed(self, path: Path | str) -> bool:
40
44
  return str(path).lower().endswith((".tar.gz", ".tgz"))
41
45
 
42
46
  def map(self, target: target.Target) -> None:
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import logging
4
4
  import zipfile
5
5
  from pathlib import Path
6
- from typing import TYPE_CHECKING, Optional
6
+ from typing import TYPE_CHECKING
7
7
 
8
8
  from dissect.target.loaders.dir import DirLoader, find_dirs, map_dirs
9
9
  from dissect.target.plugin import OperatingSystem
@@ -18,7 +18,7 @@ UNIX_ACCESSORS = ["file", "auto"]
18
18
  WINDOWS_ACCESSORS = ["mft", "ntfs", "lazy_ntfs", "ntfs_vss", "auto"]
19
19
 
20
20
 
21
- def find_fs_directories(path: Path) -> tuple[Optional[OperatingSystem], Optional[list[Path]]]:
21
+ def find_fs_directories(path: Path) -> tuple[OperatingSystem | None, list[Path] | None]:
22
22
  fs_root = path.joinpath(FILESYSTEMS_ROOT)
23
23
 
24
24
  # Unix
@@ -56,7 +56,7 @@ def find_fs_directories(path: Path) -> tuple[Optional[OperatingSystem], Optional
56
56
  return None, None
57
57
 
58
58
 
59
- def extract_drive_letter(name: str) -> Optional[str]:
59
+ def extract_drive_letter(name: str) -> str | None:
60
60
  # \\.\X: in URL encoding
61
61
  if len(name) == 14 and name.startswith("%5C%5C.%5C") and name.endswith("%3A"):
62
62
  return name[10].lower()
@@ -91,7 +91,7 @@ class VelociraptorLoader(DirLoader):
91
91
  f"Velociraptor target {path!r} is compressed, which will slightly affect performance. "
92
92
  "Consider uncompressing the archive and passing the uncompressed folder to Dissect."
93
93
  )
94
- self.root = zipfile.Path(path)
94
+ self.root = zipfile.Path(path.open("rb"))
95
95
  else:
96
96
  self.root = path
97
97
 
@@ -105,8 +105,8 @@ class VelociraptorLoader(DirLoader):
105
105
  # results/
106
106
  # uploads.json
107
107
  # [...] other files related to the collection
108
- if path.suffix == ".zip": # novermin
109
- path = zipfile.Path(path)
108
+ if path.exists() and path.suffix == ".zip": # novermin
109
+ path = zipfile.Path(path.open("rb"))
110
110
 
111
111
  if path.joinpath(FILESYSTEMS_ROOT).exists() and path.joinpath("uploads.json").exists():
112
112
  _, dirs = find_fs_directories(path)
dissect/target/plugin.py CHANGED
@@ -2,9 +2,11 @@
2
2
 
3
3
  See dissect/target/plugins/general/example.py for an example plugin.
4
4
  """
5
+
5
6
  from __future__ import annotations
6
7
 
7
8
  import fnmatch
9
+ import functools
8
10
  import importlib
9
11
  import importlib.util
10
12
  import inspect
@@ -196,6 +198,8 @@ class Plugin:
196
198
  The :func:`internal` decorator and :class:`InternalPlugin` set the ``__internal__`` attribute.
197
199
  Finally. :func:`args` decorator sets the ``__args__`` attribute.
198
200
 
201
+ The :func:`alias` decorator populates the ``__aliases__`` private attribute of :class:`Plugin` methods.
202
+
199
203
  Args:
200
204
  target: The :class:`~dissect.target.target.Target` object to load the plugin for.
201
205
  """
@@ -448,6 +452,11 @@ def register(plugincls: Type[Plugin]) -> None:
448
452
  exports = []
449
453
  functions = []
450
454
 
455
+ # First pass to resolve aliases
456
+ for attr in get_nonprivate_attributes(plugincls):
457
+ for alias in getattr(attr, "__aliases__", []):
458
+ clone_alias(plugincls, attr, alias)
459
+
451
460
  for attr in get_nonprivate_attributes(plugincls):
452
461
  if isinstance(attr, property):
453
462
  attr = attr.fget
@@ -542,6 +551,47 @@ def arg(*args, **kwargs) -> Callable:
542
551
  return decorator
543
552
 
544
553
 
554
+ def alias(*args, **kwargs: dict[str, Any]) -> Callable:
555
+ """Decorator to be used on :class:`Plugin` functions to register an alias of that function."""
556
+
557
+ if not kwargs.get("name") and not args:
558
+ raise ValueError("Missing argument 'name'")
559
+
560
+ def decorator(obj: Callable) -> Callable:
561
+ if not hasattr(obj, "__aliases__"):
562
+ obj.__aliases__ = []
563
+
564
+ if name := (kwargs.get("name") or args[0]):
565
+ obj.__aliases__.append(name)
566
+
567
+ return obj
568
+
569
+ return decorator
570
+
571
+
572
+ def clone_alias(cls: type, attr: Callable, alias: str) -> None:
573
+ """Clone the given attribute to an alias in the provided class."""
574
+
575
+ # Clone the function object
576
+ clone = type(attr)(attr.__code__, attr.__globals__, alias, attr.__defaults__, attr.__closure__)
577
+ clone.__kwdefaults__ = attr.__kwdefaults__
578
+
579
+ # Copy some attributes
580
+ functools.update_wrapper(clone, attr)
581
+ if wrapped := getattr(attr, "__wrapped__", None):
582
+ # update_wrapper sets a new wrapper, we want the original
583
+ clone.__wrapped__ = wrapped
584
+
585
+ # Update module path so we can fool inspect.getmodule with subclassed Plugin classes
586
+ clone.__module__ = cls.__module__
587
+
588
+ # Update the names
589
+ clone.__name__ = alias
590
+ clone.__qualname__ = f"{cls.__name__}.{alias}"
591
+
592
+ setattr(cls, alias, clone)
593
+
594
+
545
595
  def plugins(
546
596
  osfilter: Optional[type[OSPlugin]] = None,
547
597
  special_keys: set[str] = set(),
@@ -8,7 +8,7 @@ from dissect.target.exceptions import UnsupportedPluginError
8
8
  from dissect.target.helpers.descriptor_extensions import UserRecordDescriptorExtension
9
9
  from dissect.target.helpers.fsutil import TargetPath
10
10
  from dissect.target.helpers.record import UnixUserRecord, create_extended_descriptor
11
- from dissect.target.plugin import Plugin, export, internal
11
+ from dissect.target.plugin import Plugin, alias, export, internal
12
12
 
13
13
  CommandHistoryRecord = create_extended_descriptor([UserRecordDescriptorExtension])(
14
14
  "unix/history",
@@ -36,6 +36,7 @@ class CommandHistoryPlugin(Plugin):
36
36
  ("sqlite", ".sqlite_history"),
37
37
  ("zsh", ".zsh_history"),
38
38
  ("ash", ".ash_history"),
39
+ ("dissect", ".dissect_history"), # wow so meta
39
40
  )
40
41
 
41
42
  def __init__(self, target: Target):
@@ -56,12 +57,7 @@ class CommandHistoryPlugin(Plugin):
56
57
  history_files.append((shell, history_path, user_details.user))
57
58
  return history_files
58
59
 
59
- @export(record=CommandHistoryRecord)
60
- def bashhistory(self):
61
- """Deprecated, use commandhistory function."""
62
- self.target.log.warn("Function 'bashhistory' is deprecated, use the 'commandhistory' function instead.")
63
- return self.commandhistory()
64
-
60
+ @alias("bashhistory")
65
61
  @export(record=CommandHistoryRecord)
66
62
  def commandhistory(self):
67
63
  """Return shell history for all users.
dissect/target/target.py CHANGED
@@ -87,7 +87,7 @@ class Target:
87
87
  self._applied = False
88
88
 
89
89
  try:
90
- self._config = config.load([self.path, os.getcwd()])
90
+ self._config = config.load([self.path, Path.cwd(), Path.home()])
91
91
  except Exception as e:
92
92
  self.log.warning("Error loading config file: %s", self.path)
93
93
  self.log.debug("", exc_info=e)
@@ -2,9 +2,7 @@
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
4
  import argparse
5
- import datetime
6
5
  import logging
7
- import operator
8
6
  import os
9
7
  import pathlib
10
8
  import shutil
@@ -13,7 +11,7 @@ import sys
13
11
  from dissect.target import Target
14
12
  from dissect.target.exceptions import TargetError
15
13
  from dissect.target.helpers.fsutil import TargetPath
16
- from dissect.target.tools.shell import stat_modestr
14
+ from dissect.target.tools.fsutils import print_ls, print_stat
17
15
  from dissect.target.tools.utils import (
18
16
  catch_sigpipe,
19
17
  configure_generic_arguments,
@@ -25,11 +23,6 @@ logging.lastResort = None
25
23
  logging.raiseExceptions = False
26
24
 
27
25
 
28
- def human_size(bytes: int, units: list[str] = ["", "K", "M", "G", "T", "P", "E"]) -> str:
29
- """Helper function to return the human readable string representation of bytes."""
30
- return str(bytes) + units[0] if bytes < 1024 else human_size(bytes >> 10, units[1:])
31
-
32
-
33
26
  def ls(t: Target, path: TargetPath, args: argparse.Namespace) -> None:
34
27
  if args.use_ctime and args.use_atime:
35
28
  log.error("Can't specify -c and -u at the same time")
@@ -37,63 +30,20 @@ def ls(t: Target, path: TargetPath, args: argparse.Namespace) -> None:
37
30
  if not path or not path.exists():
38
31
  return
39
32
 
40
- _print_ls(args, path, 0)
41
-
42
-
43
- def _print_ls(args: argparse.Namespace, path: TargetPath, depth: int) -> None:
44
- subdirs = []
45
-
46
- if path.is_dir():
47
- contents = sorted(path.iterdir(), key=operator.attrgetter("name"))
48
- elif path.is_file():
49
- contents = [path]
50
-
51
- if depth > 0:
52
- print(f"\n{str(path)}:")
53
-
54
- if not args.l:
55
- for entry in contents:
56
- print(entry.name)
57
-
58
- if entry.is_dir():
59
- subdirs.append(entry)
60
- else:
61
- if len(contents) > 1:
62
- print(f"total {len(contents)}")
63
-
64
- for entry in contents:
65
- _print_extensive_file_stat(args, entry, entry.name)
66
-
67
- if entry.is_dir():
68
- subdirs.append(entry)
69
-
70
- if args.recursive and subdirs:
71
- for subdir in subdirs:
72
- _print_ls(args, subdir, depth + 1)
73
-
74
-
75
- def _print_extensive_file_stat(args: argparse.Namespace, path: TargetPath, name: str) -> None:
76
- try:
77
- entry = path.get()
78
- stat = entry.lstat()
79
- symlink = f" -> {entry.readlink()}" if entry.is_symlink() else ""
80
- show_time = stat.st_mtime
81
-
82
- if args.use_ctime:
83
- show_time = stat.st_ctime
84
- elif args.use_atime:
85
- show_time = stat.st_atime
86
-
87
- utc_time = datetime.datetime.utcfromtimestamp(show_time).isoformat()
88
-
89
- if args.human_readable:
90
- size = human_size(stat.st_size)
91
- else:
92
- size = stat.st_size
93
-
94
- print(f"{stat_modestr(stat)} {stat.st_uid:4d} {stat.st_gid:4d} {size:>6s} {utc_time} {name}{symlink}")
95
- except FileNotFoundError:
96
- print(f"?????????? ? ? ? ????-??-??T??:??:??.?????? {name}")
33
+ # Only output with colors if stdout is a tty
34
+ use_colors = sys.stdout.buffer.isatty()
35
+
36
+ print_ls(
37
+ path,
38
+ 0,
39
+ sys.stdout,
40
+ args.l,
41
+ args.human_readable,
42
+ args.recursive,
43
+ args.use_ctime,
44
+ args.use_atime,
45
+ use_colors,
46
+ )
97
47
 
98
48
 
99
49
  def cat(t: Target, path: TargetPath, args: argparse.Namespace) -> None:
@@ -120,6 +70,12 @@ def cp(t: Target, path: TargetPath, args: argparse.Namespace) -> None:
120
70
  print("[!] Failed, unsuported file type: %s" % path)
121
71
 
122
72
 
73
+ def stat(t: Target, path: TargetPath, args: argparse.Namespace) -> None:
74
+ if not path or not path.exists():
75
+ return
76
+ print_stat(path, sys.stdout, args.dereference)
77
+
78
+
123
79
  def _extract_path(path: TargetPath, output_path: str) -> None:
124
80
  print("%s -> %s" % (path, output_path))
125
81
 
@@ -172,6 +128,10 @@ def main() -> None:
172
128
  parser_cat = subparsers.add_parser("cat", help="dump file contents", parents=[baseparser])
173
129
  parser_cat.set_defaults(handler=cat)
174
130
 
131
+ parser_stat = subparsers.add_parser("stat", help="display file status", parents=[baseparser])
132
+ parser_stat.add_argument("-L", "--dereference", action="store_true")
133
+ parser_stat.set_defaults(handler=stat)
134
+
175
135
  parser_find = subparsers.add_parser("walk", help="perform a walk", parents=[baseparser])
176
136
  parser_find.set_defaults(handler=walk)
177
137
 
@@ -0,0 +1,243 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import stat
5
+ from datetime import datetime, timezone
6
+ from typing import TextIO
7
+
8
+ from dissect.target.exceptions import FileNotFoundError
9
+ from dissect.target.filesystem import FilesystemEntry, LayerFilesystemEntry
10
+ from dissect.target.helpers import fsutil
11
+ from dissect.target.helpers.fsutil import TargetPath
12
+
13
+ # ['mode', 'addr', 'dev', 'nlink', 'uid', 'gid', 'size', 'atime', 'mtime', 'ctime']
14
+ STAT_TEMPLATE = """ File: {path} {symlink}
15
+ Size: {size} Blocks: {blocks} IO Block: {blksize} {filetype}
16
+ Device: {device} Inode: {inode} Links: {nlink}
17
+ Access: ({modeord}/{modestr}) Uid: ( {uid} ) Gid: ( {gid} )
18
+ Access: {atime}
19
+ Modify: {mtime}
20
+ Change: {ctime}
21
+ Birth: {btime}"""
22
+
23
+ FALLBACK_LS_COLORS = "rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:mi=00:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32" # noqa: E501
24
+
25
+
26
+ def prepare_ls_colors() -> dict[str, str]:
27
+ """Parse the LS_COLORS environment variable so we can use it later."""
28
+ d = {}
29
+ ls_colors = os.environ.get("LS_COLORS", FALLBACK_LS_COLORS)
30
+ for line in ls_colors.split(":"):
31
+ if not line:
32
+ continue
33
+
34
+ ft, _, value = line.partition("=")
35
+ if ft.startswith("*"):
36
+ ft = ft[1:]
37
+
38
+ d[ft] = f"\x1b[{value}m{{}}\x1b[0m"
39
+
40
+ return d
41
+
42
+
43
+ LS_COLORS = prepare_ls_colors()
44
+
45
+
46
+ def fmt_ls_colors(ft: str, name: str) -> str:
47
+ """Helper method to colorize strings according to LS_COLORS."""
48
+ try:
49
+ return LS_COLORS[ft].format(name)
50
+ except KeyError:
51
+ pass
52
+
53
+ try:
54
+ return LS_COLORS[fsutil.splitext(name)[1]].format(name)
55
+ except KeyError:
56
+ pass
57
+
58
+ return name
59
+
60
+
61
+ def human_size(bytes: int, units: list[str] = ["", "K", "M", "G", "T", "P", "E"]) -> str:
62
+ """Helper function to return the human readable string representation of bytes."""
63
+ return str(bytes) + units[0] if bytes < 1024 else human_size(bytes >> 10, units[1:])
64
+
65
+
66
+ def stat_modestr(st: fsutil.stat_result) -> str:
67
+ """Helper method for generating a mode string from a numerical mode value."""
68
+ return stat.filemode(st.st_mode)
69
+
70
+
71
+ def print_extensive_file_stat_listing(
72
+ stdout: TextIO,
73
+ name: str,
74
+ entry: FilesystemEntry | None = None,
75
+ timestamp: datetime | None = None,
76
+ human_readable: bool = False,
77
+ ) -> None:
78
+ """Print the file status as a single line."""
79
+ if entry is not None:
80
+ try:
81
+ entry_stat = entry.lstat()
82
+ if timestamp is None:
83
+ timestamp = entry_stat.st_mtime
84
+ symlink = f" -> {entry.readlink()}" if entry.is_symlink() else ""
85
+ utc_time = datetime.fromtimestamp(timestamp, tz=timezone.utc).isoformat(timespec="microseconds")
86
+ size = f"{human_size(entry_stat.st_size):5s}" if human_readable else f"{entry_stat.st_size:10d}"
87
+
88
+ print(
89
+ (
90
+ f"{stat_modestr(entry_stat)} {entry_stat.st_uid:4d} {entry_stat.st_gid:4d} {size} "
91
+ f"{utc_time} {name}{symlink}"
92
+ ),
93
+ file=stdout,
94
+ )
95
+ return
96
+ except FileNotFoundError:
97
+ pass
98
+
99
+ hr_spaces = f"{'':5s}" if human_readable else " "
100
+ regular_spaces = f"{'':10s}" if not human_readable else " "
101
+
102
+ print(f"?????????? ? ?{regular_spaces}?{hr_spaces}????-??-??T??:??:??.??????+??:?? {name}", file=stdout)
103
+
104
+
105
+ def ls_scandir(path: fsutil.TargetPath, color: bool = False) -> list[tuple[fsutil.TargetPath, str]]:
106
+ """List a directory for the given path."""
107
+ result = []
108
+ if not path.exists() or not path.is_dir():
109
+ return []
110
+
111
+ for file_ in path.iterdir():
112
+ file_type = None
113
+ if color:
114
+ if file_.is_symlink():
115
+ file_type = "ln"
116
+ elif file_.is_dir():
117
+ file_type = "di"
118
+ elif file_.is_file():
119
+ file_type = "fi"
120
+
121
+ result.append((file_, fmt_ls_colors(file_type, file_.name) if color else file_.name))
122
+
123
+ # If we happen to scan an NTFS filesystem see if any of the
124
+ # entries has an alternative data stream and also list them.
125
+ entry = file_.get()
126
+ if isinstance(entry, LayerFilesystemEntry):
127
+ if entry.entries.fs.__type__ == "ntfs":
128
+ attrs = entry.lattr()
129
+ for data_stream in attrs.DATA:
130
+ if data_stream.name != "":
131
+ name = f"{file_.name}:{data_stream.name}"
132
+ result.append((file_, fmt_ls_colors(file_type, name) if color else name))
133
+
134
+ result.sort(key=lambda e: e[0].name)
135
+
136
+ return result
137
+
138
+
139
+ def print_ls(
140
+ path: fsutil.TargetPath,
141
+ depth: int,
142
+ stdout: TextIO,
143
+ long_listing: bool = False,
144
+ human_readable: bool = False,
145
+ recursive: bool = False,
146
+ use_ctime: bool = False,
147
+ use_atime: bool = False,
148
+ color: bool = True,
149
+ ) -> None:
150
+ """Print ls output"""
151
+ subdirs = []
152
+
153
+ if path.is_dir():
154
+ contents = ls_scandir(path, color)
155
+ elif path.is_file():
156
+ contents = [(path, path.name)]
157
+
158
+ if depth > 0:
159
+ print(f"\n{str(path)}:", file=stdout)
160
+
161
+ if not long_listing:
162
+ for target_path, name in contents:
163
+ print(name, file=stdout)
164
+ if target_path.is_dir():
165
+ subdirs.append(target_path)
166
+ else:
167
+ if len(contents) > 1:
168
+ print(f"total {len(contents)}", file=stdout)
169
+ for target_path, name in contents:
170
+ try:
171
+ entry = target_path.get()
172
+ entry_stat = entry.lstat()
173
+ show_time = entry_stat.st_mtime
174
+ if use_ctime:
175
+ show_time = entry_stat.st_ctime
176
+ elif use_atime:
177
+ show_time = entry_stat.st_atime
178
+ except FileNotFoundError:
179
+ entry = None
180
+ show_time = None
181
+ print_extensive_file_stat_listing(stdout, name, entry, show_time, human_readable)
182
+ if target_path.is_dir():
183
+ subdirs.append(target_path)
184
+
185
+ if recursive and subdirs:
186
+ for subdir in subdirs:
187
+ print_ls(subdir, depth + 1, stdout, long_listing, human_readable, recursive, use_ctime, use_atime, color)
188
+
189
+
190
+ def print_stat(path: fsutil.TargetPath, stdout: TextIO, dereference: bool = False) -> None:
191
+ """Print file status."""
192
+ symlink = f"-> {path.readlink()}" if path.is_symlink() else ""
193
+ s = path.stat() if dereference else path.lstat()
194
+
195
+ def filetype(path: TargetPath) -> str:
196
+ if path.is_dir():
197
+ return "directory"
198
+ elif path.is_symlink():
199
+ return "symbolic link"
200
+ elif path.is_file():
201
+ return "regular file"
202
+
203
+ res = STAT_TEMPLATE.format(
204
+ path=path,
205
+ symlink=symlink,
206
+ size=s.st_size,
207
+ filetype=filetype(path),
208
+ device="?",
209
+ inode=s.st_ino,
210
+ blocks=s.st_blocks or "?",
211
+ blksize=s.st_blksize or "?",
212
+ nlink=s.st_nlink,
213
+ modeord=oct(stat.S_IMODE(s.st_mode)),
214
+ modestr=stat_modestr(s),
215
+ uid=s.st_uid,
216
+ gid=s.st_gid,
217
+ atime=datetime.fromtimestamp(s.st_atime, tz=timezone.utc).isoformat(timespec="microseconds"),
218
+ mtime=datetime.fromtimestamp(s.st_mtime, tz=timezone.utc).isoformat(timespec="microseconds"),
219
+ ctime=datetime.fromtimestamp(s.st_ctime, tz=timezone.utc).isoformat(timespec="microseconds"),
220
+ btime=datetime.fromtimestamp(s.st_birthtime, tz=timezone.utc).isoformat(timespec="microseconds")
221
+ if hasattr(s, "st_birthtime") and s.st_birthtime
222
+ else "?",
223
+ )
224
+ print(res, file=stdout)
225
+
226
+ try:
227
+ if (xattr := path.get().attr()) and isinstance(xattr, list) and hasattr(xattr[0], "name"):
228
+ print(" Attr:")
229
+ print_xattr(path.name, xattr, stdout)
230
+ except Exception:
231
+ pass
232
+
233
+
234
+ def print_xattr(basename: str, xattr: list, stdout: TextIO) -> None:
235
+ """Mimics getfattr -d {file} behaviour."""
236
+ if not hasattr(xattr[0], "name"):
237
+ return
238
+
239
+ XATTR_TEMPLATE = "# file: {basename}\n{attrs}"
240
+ res = XATTR_TEMPLATE.format(
241
+ basename=basename, attrs="\n".join([f'{attr.name}="{attr.value.decode()}"' for attr in xattr])
242
+ )
243
+ print(res, file=stdout)
@@ -4,6 +4,7 @@
4
4
  import argparse
5
5
  import json
6
6
  import logging
7
+ from datetime import datetime
7
8
  from pathlib import Path
8
9
  from typing import Union
9
10
 
@@ -138,10 +139,13 @@ def print_target_info(target: Target) -> None:
138
139
  if isinstance(value, list):
139
140
  value = ", ".join(value)
140
141
 
142
+ if isinstance(value, datetime):
143
+ value = value.isoformat(timespec="microseconds")
144
+
141
145
  if name == "hostname":
142
146
  print()
143
147
 
144
- print(f"{name.capitalize().replace('_', ' ')}" + (14 - len(name)) * " " + f" : {value}")
148
+ print(f"{name.capitalize().replace('_', ' '):14s} : {value}")
145
149
 
146
150
 
147
151
  def get_disks_info(target: Target) -> list[dict[str, Union[str, int]]]: