dfindexeddb 20240417__py3-none-any.whl → 20240519__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,11 +14,12 @@
14
14
  # limitations under the License.
15
15
  """A module for records from LevelDB files."""
16
16
  from __future__ import annotations
17
+ from collections import defaultdict
17
18
  import dataclasses
18
19
  import pathlib
19
20
  import re
20
21
  import sys
21
- from typing import Any, Generator, Optional, Union
22
+ from typing import Generator, Optional, Union
22
23
 
23
24
  from dfindexeddb import errors
24
25
  from dfindexeddb.leveldb import definitions
@@ -37,7 +38,8 @@ class LevelDBRecord:
37
38
  Attributes:
38
39
  path: the file path where the record was parsed from.
39
40
  record: the leveldb record.
40
- level: the leveldb level, None indicates the record came from a log file.
41
+ level: the leveldb level, None indicates the record came from a log file or
42
+ a file not part of the active file set (determined by a MANIFEST file).
41
43
  recovered: True if the record is a recovered record.
42
44
  """
43
45
  path: str
@@ -51,7 +53,7 @@ class LevelDBRecord:
51
53
  def FromFile(
52
54
  cls,
53
55
  file_path: pathlib.Path
54
- ) -> Generator[LevelDBRecord, Any, Any]:
56
+ ) -> Generator[LevelDBRecord, None, None]:
55
57
  """Yields leveldb records from the given path.
56
58
 
57
59
  Yields:
@@ -74,45 +76,47 @@ class LevelDBRecord:
74
76
  else:
75
77
  print(f'Unsupported file type {file_path.as_posix()}', file=sys.stderr)
76
78
 
77
- @classmethod
78
- def FromDir(
79
- cls,
80
- path: pathlib.Path
81
- ) -> Generator[LevelDBRecord, Any, Any]:
82
- """Yields LevelDBRecords from the given directory.
79
+
80
+ class FolderReader:
81
+ """A LevelDB folder reader.
82
+
83
+ Attributes:
84
+ foldername (str): the source LevelDB folder.
85
+ """
86
+
87
+ def __init__(self, foldername: pathlib.Path):
88
+ """Initializes the FolderReader.
83
89
 
84
90
  Args:
85
- path: the file path.
91
+ foldername: the source LevelDB folder.
86
92
 
87
- Yields:
88
- LevelDBRecords
93
+ Raises:
94
+ ValueError: if foldername is None or not a directory.
89
95
  """
90
- if not path or not path.is_dir():
91
- raise ValueError(f'{path} is not a directory')
92
- for file_path in path.iterdir():
93
- yield from cls.FromFile(file_path=file_path)
96
+ if not foldername or not foldername.is_dir():
97
+ raise ValueError(f'{foldername} is None or not a directory')
98
+ self.foldername = foldername
94
99
 
95
- @classmethod
96
- def FromManifest(
97
- cls,
98
- path: pathlib.Path
99
- ) -> Generator[LevelDBRecord, Any, Any]:
100
- """Yields LevelDBRecords from the given directory using the manifest.
100
+ def LogFiles(self) -> Generator[pathlib.Path, None, None]:
101
+ """Returns the log filenames."""
102
+ yield from self.foldername.glob('*.log')
101
103
 
102
- Args:
103
- path: the file path.
104
+ def LdbFiles(self) -> Generator[pathlib.Path, None, None]:
105
+ """Returns the ldb filenames."""
106
+ yield from self.foldername.glob('*.ldb')
104
107
 
105
- Yields:
106
- LevelDBRecords
108
+ def Manifest(self) -> Generator[pathlib.Path, None, None]:
109
+ """Returns the Manifest filenames."""
110
+ yield from self.foldername.glob('MANIFEST-*')
111
+
112
+ def GetCurrentManifestPath(self) -> pathlib.Path:
113
+ """Returns the path of the current manifest file.
107
114
 
108
115
  Raises:
109
- ParserError: if the CURRENT or MANIFEST-* file does not exist.
110
- ValueError: if path is not a directory.
116
+ ParserError: when the CURRENT file does not exist/contain the expected
117
+ content or when the expected MANIFEST file does not exist.
111
118
  """
112
- if not path or not path.is_dir():
113
- raise ValueError(f'{path} is not a directory')
114
-
115
- current_path = path / 'CURRENT'
119
+ current_path = self.foldername / 'CURRENT'
116
120
  if not current_path.exists():
117
121
  raise errors.ParserError(f'{current_path!s} does not exist.')
118
122
 
@@ -122,34 +126,113 @@ class LevelDBRecord:
122
126
  raise errors.ParserError(
123
127
  f'{current_path!s} does not contain the expected content')
124
128
 
125
- manifest_path = path / current_manifest
129
+ manifest_path = self.foldername / current_manifest
126
130
  if not manifest_path.exists():
127
131
  raise errors.ParserError(f'{manifest_path!s} does not exist.')
132
+ return manifest_path
128
133
 
134
+ def GetLatestVersion(self) -> descriptor.LevelDBVersion:
135
+ """Returns the latest LevelDBVersion.
136
+
137
+ Raises:
138
+ ParserError: when the leveldb version could not be parsed.
139
+ """
140
+ current_manifest_path = self.GetCurrentManifestPath()
129
141
  latest_version = descriptor.FileReader(
130
- str(manifest_path)).GetLatestVersion()
142
+ str(current_manifest_path)).GetLatestVersion()
131
143
  if not latest_version:
132
144
  raise errors.ParserError(
133
- f'Could not parse a leveldb version from {manifest_path!s}')
145
+ f'Could not parse a leveldb version from {current_manifest_path!s}')
146
+ return latest_version
147
+
148
+ def _GetRecordsByFile(
149
+ self, filename: pathlib.Path) -> Generator[LevelDBRecord, None, None]:
150
+ """Yields the LevelDBRecords from a file.
151
+
152
+ Non-log/ldb files are ignored.
153
+
154
+ Args:
155
+ filename: the source LevelDB file.
156
+
157
+ Yields:
158
+ LevelDBRecords
159
+ """
160
+ if filename.name.endswith('.log'):
161
+ yield from self._GetLogRecords(filename)
162
+ elif filename.name.endswith('.ldb'):
163
+ yield from self._GetLdbRecords(filename)
164
+ elif filename.name.startswith('MANIFEST'):
165
+ print(f'Ignoring descriptor file {filename.as_posix()}', file=sys.stderr)
166
+ elif filename.name in ('LOCK', 'CURRENT', 'LOG', 'LOG.old'):
167
+ print(f'Ignoring {filename.as_posix()}', file=sys.stderr)
168
+ else:
169
+ print(f'Unsupported file type {filename.as_posix()}', file=sys.stderr)
170
+
171
+ def _GetLogRecords(
172
+ self,
173
+ filename: pathlib.Path
174
+ ) -> Generator[LevelDBRecord, None, None]:
175
+ """Yields the LevelDBRecords from a log file.
176
+
177
+ Args:
178
+ filename: the source LevelDB file.
179
+
180
+ Yields:
181
+ LevelDBRecords
182
+ """
183
+ for record in log.FileReader(filename.as_posix()).GetParsedInternalKeys():
184
+ yield LevelDBRecord(path=filename.as_posix(), record=record)
134
185
 
135
- # read log records
186
+ def _GetLdbRecords(
187
+ self,
188
+ filename: pathlib.Path
189
+ ) -> Generator[LevelDBRecord, None, None]:
190
+ """Yields the LevelDBRecords from a log file.
191
+
192
+ Args:
193
+ filename: the source LevelDB file.
194
+
195
+ Yields:
196
+ LevelDBRecords
197
+ """
198
+ for record in ldb.FileReader(filename.as_posix()).GetKeyValueRecords():
199
+ yield LevelDBRecord(path=filename.as_posix(), record=record)
200
+
201
+ def _RecordsByManifest(self) -> Generator[LevelDBRecord, None, None]:
202
+ """Yields LevelDBRecords using active files determined by the MANIFEST file.
203
+
204
+ Yields:
205
+ LevelDBRecords.
206
+ """
207
+ latest_version = self.GetLatestVersion()
208
+
209
+ processed_files = set()
210
+
211
+ # read and cache the log records
136
212
  log_records = []
137
213
  if latest_version.current_log:
138
- current_log = path / latest_version.current_log
139
- if current_log.exists():
140
- for log_record in cls.FromFile(file_path=current_log):
141
- log_records.append(log_record)
214
+ current_log_filename = self.foldername / latest_version.current_log
215
+ if current_log_filename.exists():
216
+ log_records = list(self._GetLogRecords(filename=current_log_filename))
217
+ processed_files.add(current_log_filename)
142
218
  else:
143
219
  print('No current log file.', file=sys.stderr)
144
220
 
145
- # read records from the "young" or 0-level
221
+ # read and cache the records from the "young" or 0-level
146
222
  young_records = []
147
223
  for active_file in latest_version.active_files.get(0, {}).keys():
148
- current_young = path / active_file
149
- if current_young.exists():
150
- for young_record in cls.FromFile(current_young):
151
- young_records.append(young_record)
224
+ current_young_filename = self.foldername / active_file
225
+ if current_young_filename.exists():
226
+ young_records = list(self._GetLdbRecords(current_young_filename))
227
+ processed_files.add(current_young_filename)
228
+ else:
229
+ print(
230
+ f'Could not find {current_young_filename} for level 0.',
231
+ file=sys.stderr)
152
232
 
233
+ # sort the log records by the leveldb sequence number in reverse
234
+ # order and update the recovered attribute based on the highest sequence
235
+ # number for a key.
153
236
  active_records = {}
154
237
  for record in sorted(
155
238
  log_records,
@@ -161,6 +244,10 @@ class LevelDBRecord:
161
244
  else:
162
245
  record.recovered = True
163
246
 
247
+ # sort the young records by the leveldb sequence number in reverse
248
+ # order and update:
249
+ # * the recovered attribute based on the highest sequence number for a key
250
+ # * the level attribute to 0
164
251
  for record in sorted(
165
252
  young_records,
166
253
  key=lambda record: record.record.sequence_number,
@@ -177,14 +264,86 @@ class LevelDBRecord:
177
264
  key=lambda record: record.record.sequence_number,
178
265
  reverse=False)
179
266
 
267
+ # read records from the active files in each level (except the 0 level)
268
+ # and update the recovered and level attribute.
180
269
  if latest_version.active_files.keys():
181
270
  for level in range(1, max(latest_version.active_files.keys()) + 1):
182
271
  for filename in latest_version.active_files.get(level, []):
183
- current_filename = path / filename
184
- for record in cls.FromFile(file_path=current_filename):
185
- if record.record.key in active_records:
186
- record.recovered = True
187
- else:
188
- record.recovered = False
189
- record.level = level
190
- yield record
272
+ current_filename = self.foldername / filename
273
+ if current_filename.exists():
274
+ processed_files.add(current_filename)
275
+ for record in self._GetLdbRecords(filename=current_filename):
276
+ record.recovered = record.record.key in active_records
277
+ record.level = level
278
+ yield record
279
+ else:
280
+ print(
281
+ f'Could not find {current_filename} for level {level}.',
282
+ file=sys.stderr)
283
+
284
+ # as a final step, parse any other log/ldb files which we will consider
285
+ # any records as recovered since they are not listed in the the active file
286
+ # set.
287
+ for log_file in self.LogFiles():
288
+ if log_file in processed_files:
289
+ continue
290
+ for record in self._GetLogRecords(filename=log_file):
291
+ record.recovered = True
292
+ yield record
293
+
294
+ for ldb_file in self.LdbFiles():
295
+ if ldb_file in processed_files:
296
+ continue
297
+ for record in self._GetLdbRecords(filename=ldb_file):
298
+ record.recovered = True
299
+ yield record
300
+
301
+ def _RecordsBySequenceNumber(self) -> Generator[LevelDBRecord, None, None]:
302
+ """Yields LevelDBRecords using the sequence number and file offset.
303
+
304
+ Yields:
305
+ LevelDBRecords.
306
+ """
307
+ unsorted_records = defaultdict(list)
308
+
309
+ for filename in self.foldername.iterdir():
310
+ for leveldb_record in LevelDBRecord.FromFile(filename):
311
+ if leveldb_record:
312
+ unsorted_records[leveldb_record.record.key].append(leveldb_record)
313
+ for _, unsorted_records in unsorted_records.items():
314
+ num_unsorted_records = len(unsorted_records)
315
+ if num_unsorted_records == 1:
316
+ unsorted_records[0].recovered = False
317
+ yield unsorted_records[0]
318
+ else:
319
+ for i, record in enumerate(sorted(
320
+ unsorted_records, key=lambda x: (
321
+ x.record.sequence_number, x.record.offset)),
322
+ start=1):
323
+ if i == num_unsorted_records:
324
+ record.recovered = False
325
+ else:
326
+ record.recovered = True
327
+ yield record
328
+
329
+ def GetRecords(
330
+ self,
331
+ use_manifest: bool = False,
332
+ use_sequence_number: bool = False
333
+ ) -> Generator[LevelDBRecord, None, None]:
334
+ """Yield LevelDBRecords.
335
+
336
+ Args:
337
+ use_manifest: True to use the current manifest in the folder as a means to
338
+ find the active file set.
339
+
340
+ Yields:
341
+ LevelDBRecords.
342
+ """
343
+ if use_manifest:
344
+ yield from self._RecordsByManifest()
345
+ elif use_sequence_number:
346
+ yield from self._RecordsBySequenceNumber()
347
+ else:
348
+ for filename in self.foldername.iterdir():
349
+ yield from LevelDBRecord.FromFile(filename)
dfindexeddb/utils.py CHANGED
@@ -14,6 +14,8 @@
14
14
  # limitations under the License.
15
15
  """Utilities for dfindexeddb."""
16
16
  from __future__ import annotations
17
+ import copy
18
+ import dataclasses
17
19
  import io
18
20
  import os
19
21
  import struct
@@ -259,3 +261,35 @@ class FromDecoderMixin:
259
261
  """
260
262
  stream = io.BytesIO(raw_data)
261
263
  return cls.FromStream(stream=stream, base_offset=base_offset)
264
+
265
+
266
+ def asdict(obj, *, dict_factory=dict): # pylint: disable=invalid-name
267
+ """Custom implementation of the asdict dataclasses method to include the
268
+ class name under the __type__ attribute name.
269
+ """
270
+ if not dataclasses.is_dataclass(obj):
271
+ raise TypeError("asdict() should be called on dataclass instances")
272
+ return _asdict_inner(obj, dict_factory)
273
+
274
+
275
+ def _asdict_inner(obj, dict_factory):
276
+ """Custom implementation of the _asdict_inner dataclasses method."""
277
+ if dataclasses.is_dataclass(obj):
278
+ result = [('__type__', obj.__class__.__name__)]
279
+ for f in dataclasses.fields(obj):
280
+ value = _asdict_inner(getattr(obj, f.name), dict_factory)
281
+ result.append((f.name, value))
282
+ return dict_factory(result)
283
+
284
+ if isinstance(obj, tuple) and hasattr(obj, '_fields'):
285
+ return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj])
286
+
287
+ if isinstance(obj, (list, tuple)):
288
+ return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
289
+
290
+ if isinstance(obj, dict):
291
+ return type(obj)((_asdict_inner(k, dict_factory),
292
+ _asdict_inner(v, dict_factory))
293
+ for k, v in obj.items())
294
+
295
+ return copy.deepcopy(obj)
dfindexeddb/version.py CHANGED
@@ -15,7 +15,7 @@
15
15
  """Version information for dfIndexeddb."""
16
16
 
17
17
 
18
- __version__ = "20240417"
18
+ __version__ = "20240519"
19
19
 
20
20
 
21
21
  def GetVersion():
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dfindexeddb
3
- Version: 20240417
3
+ Version: 20240519
4
4
  Summary: dfindexeddb is an experimental Python tool for performing digital forensic analysis of IndexedDB and leveldb files.
5
5
  Author-email: Syd Pleno <sydp@google.com>
6
6
  Maintainer-email: dfIndexeddb Developers <dfindexeddb-dev@googlegroups.com>
@@ -219,16 +219,19 @@ License-File: LICENSE
219
219
  License-File: AUTHORS
220
220
  Requires-Dist: python-snappy ==0.6.1
221
221
  Requires-Dist: zstd ==1.5.5.1
222
+ Provides-Extra: plugins
223
+ Requires-Dist: protobuf ; extra == 'plugins'
224
+ Requires-Dist: dfdatetime ; extra == 'plugins'
222
225
 
223
226
  # dfIndexeddb
224
227
 
225
228
  dfindexeddb is an experimental Python tool for performing digital forensic
226
- analysis of IndexedDB and leveldb files.
229
+ analysis of IndexedDB and LevelDB files.
227
230
 
228
- It parses leveldb, IndexedDB and javascript structures from these files without
231
+ It parses LevelDB, IndexedDB and JavaScript structures from these files without
229
232
  requiring native libraries. (Note: only a subset of IndexedDB key types and
230
- Javascript types for Chromium-based browsers are currently supported. Safari
231
- and Firefox are under development).
233
+ JavaScript types for Safari and Chromium-based browsers are currently supported.
234
+ Firefox is under development).
232
235
 
233
236
  The content of IndexedDB files is dependent on what a web application stores
234
237
  locally/offline using the web browser's
@@ -255,6 +258,12 @@ include:
255
258
  $ pip install dfindexeddb
256
259
  ```
257
260
 
261
+ To also install the dependencies for leveldb/indexeddb plugins, run
262
+ ```
263
+ $ pip install 'dfindexeddb[plugins]'
264
+ ```
265
+
266
+
258
267
  ## Installation from source
259
268
 
260
269
  1. [Linux] Install the snappy compression development package
@@ -273,9 +282,14 @@ include:
273
282
  $ pip install .
274
283
  ```
275
284
 
285
+ To also install the dependencies for leveldb/indexeddb plugins, run
286
+ ```
287
+ $ pip install '.[plugins]'
288
+ ```
289
+
276
290
  ## Usage
277
291
 
278
- Two CLI tools for parsing IndexedDB/leveldb files are available after
292
+ Two CLI tools for parsing IndexedDB/LevelDB files are available after
279
293
  installation:
280
294
 
281
295
 
@@ -297,49 +311,42 @@ options:
297
311
  -h, --help show this help message and exit
298
312
  ```
299
313
 
300
- To parse Indexeddb records from a LevelDB folder, use the following command:
314
+ #### Examples:
301
315
 
302
- ```
303
- dfindexeddb db -h
304
- usage: dfindexeddb db [-h] -s SOURCE [--use_manifest] [-o {json,jsonl,repr}]
316
+ To parse IndexedDB records from an sqlite file for Safari and output the
317
+ results as JSON-L, use the following command:
305
318
 
306
- options:
307
- -h, --help show this help message and exit
308
- -s SOURCE, --source SOURCE
309
- The source leveldb folder
310
- --use_manifest Use manifest file to determine active/recovered records.
311
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
312
- Output format. Default is json
319
+ ```
320
+ dfindexeddb db -s SOURCE --format safari -o jsonl
313
321
  ```
314
322
 
315
- To parse Indexeddb records from a LevelDB ldb (.ldb) file, use the following
316
- command:
323
+ To parse IndexedDB records from a LevelDB folder for Chrome/Chromium, using the
324
+ manifest file to determine recovered records and output as JSON, use the
325
+ following command:
317
326
 
318
327
  ```
319
- dfindexeddb ldb -h
320
- usage: dfindexeddb ldb [-h] -s SOURCE [-o {json,jsonl,repr}]
328
+ dfindexeddb db -s SOURCE --format chrome --use_manifest
329
+ ```
330
+
331
+ To parse IndexedDB records from a LevelDB ldb (.ldb) file and output the
332
+ results as JSON-L, use the following command:
321
333
 
322
- options:
323
- -h, --help show this help message and exit
324
- -s SOURCE, --source SOURCE
325
- The source .ldb file.
326
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
327
- Output format. Default is json
334
+ ```
335
+ dfindexeddb ldb -s SOURCE -o jsonl
328
336
  ```
329
337
 
330
- To parse Indexeddb records from a LevelDB log (.log) file, use the following
331
- command:
338
+ To parse IndexedDB records from a LevelDB log (.log) file and output the
339
+ results as the Python printable representation, use the following command:
332
340
 
333
341
  ```
334
- dfindexeddb log -h
335
- usage: dfindexeddb log [-h] -s SOURCE [-o {json,jsonl,repr}]
342
+ dfindexeddb log -s SOURCE -o repr
343
+ ```
344
+
345
+ To parse a file as a Chrome/Chromium IndexedDB blink value and output the
346
+ results as JSON:
336
347
 
337
- options:
338
- -h, --help show this help message and exit
339
- -s SOURCE, --source SOURCE
340
- The source .log file.
341
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
342
- Output format. Default is json
348
+ ```
349
+ dfindexeddb blink -s SOURCE
343
350
  ```
344
351
 
345
352
  ### LevelDB
@@ -361,64 +368,51 @@ options:
361
368
  -h, --help show this help message and exit
362
369
  ```
363
370
 
371
+ #### Examples
372
+
364
373
  To parse records from a LevelDB folder, use the following command:
365
374
 
366
375
  ```
367
- dfindexeddb db -h
368
- usage: dfindexeddb db [-h] -s SOURCE [--use_manifest] [-o {json,jsonl,repr}]
369
-
370
- options:
371
- -h, --help show this help message and exit
372
- -s SOURCE, --source SOURCE
373
- The source leveldb folder
374
- --use_manifest Use manifest file to determine active/recovered records.
375
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
376
- Output format. Default is json
376
+ dfleveldb db -s SOURCE
377
377
  ```
378
378
 
379
- To parse records from a LevelDB log (.log) file, use the following command:
379
+ To parse records from a LevelDB folder, and use the sequence number to
380
+ determine recovered records and output as JSON, use the
381
+ following command:
380
382
 
381
383
  ```
382
- $ dfleveldb log -s SOURCE [-o {json,jsonl,repr}] [-t {blocks,physical_records,write_batches,parsed_internal_key}]
383
-
384
- options:
385
- -h, --help show this help message and exit
386
- -s SOURCE, --source SOURCE
387
- The source leveldb file
388
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
389
- Output format. Default is json
390
- -t {blocks,physical_records,write_batches,parsed_internal_key}, --structure_type {blocks,physical_records,write_batches,parsed_internal_key}
391
- Parses the specified structure. Default is parsed_internal_key.
384
+ dfleveldb db -s SOURCE --use_sequence_number
392
385
  ```
393
386
 
394
- To parse records from a LevelDB table (.ldb) file, use the following command:
387
+ To parse blocks / physical records/ write batches / internal key records from a
388
+ LevelDB log (.log) file, use the following command, specifying the type (block,
389
+ physical_records, etc) via the `-t` option. By default, internal key records are parsed:
395
390
 
396
391
  ```
397
- $ dfleveldb ldb -s SOURCE [-o {json,jsonl,repr}] [-t {blocks,records}]
392
+ $ dfleveldb log -s SOURCE [-t {blocks,physical_records,write_batches,parsed_internal_key}]
393
+ ```
398
394
 
399
- options:
400
- -h, --help show this help message and exit
401
- -s SOURCE, --source SOURCE
402
- The source leveldb file
403
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
404
- Output format. Default is json
405
- -t {blocks,records}, --structure_type {blocks,records}
406
- Parses the specified structure. Default is records.
395
+ To parse blocks / records from a LevelDB table (.ldb) file, use the following
396
+ command, specifying the type (blocks, records) via the `-t` option. By
397
+ default, records are parsed:
398
+
399
+ ```
400
+ $ dfleveldb ldb -s SOURCE [-t {blocks,records}]
407
401
  ```
408
402
 
409
- To parse version edit records from a Descriptor (MANIFEST) file:
403
+ To parse version edit records from a Descriptor (MANIFEST) file, use the
404
+ following command:
410
405
 
411
406
  ```
412
407
  $ dfleveldb descriptor -s SOURCE [-o {json,jsonl,repr}] [-t {blocks,physical_records,versionedit} | -v]
413
-
414
- options:
415
- -h, --help show this help message and exit
416
- -s SOURCE, --source SOURCE
417
- The source leveldb file
418
- -o {json,jsonl,repr}, --output {json,jsonl,repr}
419
- Output format. Default is json
420
- -t {blocks,physical_records,versionedit}, --structure_type {blocks,physical_records,versionedit}
421
- Parses the specified structure. Default is versionedit.
422
- -v, --version_history
423
- Parses the leveldb version history.
424
408
  ```
409
+
410
+ #### Plugins
411
+
412
+ To apply a plugin parser for a leveldb file/folder, add the
413
+ `--plugin [Plugin Name]` argument. Currently, there is support for the
414
+ following artifacts:
415
+
416
+ | Plugin Name | Artifact Name |
417
+ | -------- | ------- |
418
+ | `ChromeNotificationRecord` | Chrome/Chromium Notifications |
@@ -0,0 +1,37 @@
1
+ dfindexeddb/__init__.py,sha256=KPYL9__l8od6_OyDfGRTgaJ6iy_fqIgZ-dS2S-e3Rac,599
2
+ dfindexeddb/errors.py,sha256=PNpwyf_lrPc4TE77oAakX3mu5D_YcP3f80wq8Y1LkvY,749
3
+ dfindexeddb/utils.py,sha256=NEqnBNgAmSrZ7iwhEZx8PYlDrT8c-NnTLbm86ZLsOsc,9998
4
+ dfindexeddb/version.py,sha256=PzaqD9kWSx3q6RDQLgClQ7dmnrp2h8nqJcq5jprK2uY,751
5
+ dfindexeddb/indexeddb/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
6
+ dfindexeddb/indexeddb/cli.py,sha256=WeOaQXFZglkI9Mwc97rCnVsCX32Dw0CgMQX3RKutwIA,6339
7
+ dfindexeddb/indexeddb/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ dfindexeddb/indexeddb/chromium/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
9
+ dfindexeddb/indexeddb/chromium/blink.py,sha256=kwhPfzcWOOxYyXUWfV6f4grQwXzS2ABFaNVMIVhol3c,32268
10
+ dfindexeddb/indexeddb/chromium/definitions.py,sha256=1a-AmHVZ95uDB6se_fdarwJR8q0tFMQNh2xrZ2-VxN8,8739
11
+ dfindexeddb/indexeddb/chromium/record.py,sha256=LIuTwwQeQbn6CBXdo0AZZHounOWcnXRg6W082yxmNBo,47578
12
+ dfindexeddb/indexeddb/chromium/v8.py,sha256=NsbMgA6nRcAfdLg6CFwWadwsDS6TJ95-4MrgphaTuLw,22102
13
+ dfindexeddb/indexeddb/firefox/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
14
+ dfindexeddb/indexeddb/safari/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
15
+ dfindexeddb/indexeddb/safari/definitions.py,sha256=nW8MmYx9Ob86W4pxm4QD4Xvr5QjoV34-U7wDhm2GIr0,2779
16
+ dfindexeddb/indexeddb/safari/record.py,sha256=bzoMSgpXs2SsEOKHjVh9tkJDZtzGkQByq3G5dK_Yd7Q,8010
17
+ dfindexeddb/indexeddb/safari/webkit.py,sha256=SmmwuJKF8NfHCiAaz1Zc1LsVm6IF3bTkaEzi6M0-HdM,21969
18
+ dfindexeddb/leveldb/__init__.py,sha256=KPYL9__l8od6_OyDfGRTgaJ6iy_fqIgZ-dS2S-e3Rac,599
19
+ dfindexeddb/leveldb/cli.py,sha256=e2C94FSP28dh83FWQXD5N44ymUDwkfFeX0Tfk9YLCTo,9913
20
+ dfindexeddb/leveldb/definitions.py,sha256=lPW_kjc47vyoGOoEWfgWvKcpGbN-0h7XXwCeMoFmYKk,1486
21
+ dfindexeddb/leveldb/descriptor.py,sha256=WR3irG16oIE6VbaP9UPnzOD3KlHR8GYFnoeG6ySJUzU,12211
22
+ dfindexeddb/leveldb/ldb.py,sha256=mN-M7PLtE_VLZCbCbzRgjkSezbMUhgDjgWgPgIxJ1jM,8087
23
+ dfindexeddb/leveldb/log.py,sha256=ofw0r2f_3Ll5oHzssvp61nmjhIPdt3tmb9UeNiGLHXk,9401
24
+ dfindexeddb/leveldb/record.py,sha256=j7ZnU6VDVcYVpJRGFRb5Sr2edhC3aGp3U0kPNcoZgng,11912
25
+ dfindexeddb/leveldb/utils.py,sha256=RgEEZ7Z35m3CcOUypAiViQSzKjBgSXZ3aeJhQjY3H9w,3748
26
+ dfindexeddb/leveldb/plugins/__init__.py,sha256=RoC6tRkq8FhqIaFs6jwu1fao_qaSvlSfIFxQVjWregI,690
27
+ dfindexeddb/leveldb/plugins/chrome_notifications.py,sha256=-dyb_AJbUPE2wPJg_Y1Ns5CMtg4udi9Fqo5WKh6f3Z4,5354
28
+ dfindexeddb/leveldb/plugins/interface.py,sha256=QlNEvVvU8K9ChE2kblM97cOvXwvmCh9NuSf2b6WwezQ,1257
29
+ dfindexeddb/leveldb/plugins/manager.py,sha256=jisYyks3OQQQUVACoGcWN81UCGQEa537YvYL7v3CiFs,2139
30
+ dfindexeddb/leveldb/plugins/notification_database_data_pb2.py,sha256=DCPZHbyq2szLgrBprOKrJKycKJma8Z_SnAQM6Jx9bZg,4389
31
+ dfindexeddb-20240519.dist-info/AUTHORS,sha256=QbvjbAom57fpEkekkCVFUj0B9KUMGraR510aUMBC-PE,286
32
+ dfindexeddb-20240519.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
33
+ dfindexeddb-20240519.dist-info/METADATA,sha256=aDy87_6iqVwvkSI4teKm-Sq8DKb6GHpuydePrZXsydA,18818
34
+ dfindexeddb-20240519.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
35
+ dfindexeddb-20240519.dist-info/entry_points.txt,sha256=WG9YNLZ9lBx4Q9QF6wS4dZdZfADT3Zs4_-MV5TcA0ls,102
36
+ dfindexeddb-20240519.dist-info/top_level.txt,sha256=X9OTaub1c8S_JJ7g-f8JdkhhdiZ4x1j4eni1hdUCwE4,12
37
+ dfindexeddb-20240519.dist-info/RECORD,,