dfindexeddb 20240305__tar.gz → 20240324__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dfindexeddb-20240305/dfindexeddb.egg-info → dfindexeddb-20240324}/PKG-INFO +46 -32
- dfindexeddb-20240324/README.md +110 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/indexeddb/blink.py +2 -1
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/indexeddb/chromium.py +4 -4
- dfindexeddb-20240324/dfindexeddb/indexeddb/cli.py +101 -0
- dfindexeddb-20240324/dfindexeddb/indexeddb/utils.py +0 -0
- dfindexeddb-20240324/dfindexeddb/leveldb/cli.py +217 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/leveldb/definitions.py +16 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/leveldb/descriptor.py +10 -11
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/leveldb/ldb.py +20 -24
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/leveldb/log.py +25 -18
- dfindexeddb-20240324/dfindexeddb/leveldb/record.py +102 -0
- dfindexeddb-20240324/dfindexeddb/leveldb/utils.py +116 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/utils.py +5 -46
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/version.py +1 -1
- {dfindexeddb-20240305 → dfindexeddb-20240324/dfindexeddb.egg-info}/PKG-INFO +46 -32
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb.egg-info/SOURCES.txt +6 -2
- dfindexeddb-20240324/dfindexeddb.egg-info/entry_points.txt +3 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/pyproject.toml +3 -2
- dfindexeddb-20240305/README.md +0 -96
- dfindexeddb-20240305/dfindexeddb/cli.py +0 -180
- dfindexeddb-20240305/dfindexeddb.egg-info/entry_points.txt +0 -2
- {dfindexeddb-20240305 → dfindexeddb-20240324}/AUTHORS +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/LICENSE +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/__init__.py +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/errors.py +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/indexeddb/__init__.py +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/indexeddb/definitions.py +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/indexeddb/v8.py +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb/leveldb/__init__.py +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb.egg-info/dependency_links.txt +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb.egg-info/requires.txt +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/dfindexeddb.egg-info/top_level.txt +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/setup.cfg +0 -0
- {dfindexeddb-20240305 → dfindexeddb-20240324}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dfindexeddb
|
|
3
|
-
Version:
|
|
3
|
+
Version: 20240324
|
|
4
4
|
Summary: dfindexeddb is an experimental Python tool for performing digital forensic analysis of IndexedDB and leveldb files.
|
|
5
5
|
Author-email: Syd Pleno <sydp@google.com>
|
|
6
6
|
Maintainer-email: dfIndexeddb Developers <dfindexeddb-dev@googlegroups.com>
|
|
@@ -226,7 +226,9 @@ dfindexeddb is an experimental Python tool for performing digital forensic
|
|
|
226
226
|
analysis of IndexedDB and leveldb files.
|
|
227
227
|
|
|
228
228
|
It parses leveldb, IndexedDB and javascript structures from these files without
|
|
229
|
-
requiring native libraries.
|
|
229
|
+
requiring native libraries. (Note: only a subset of IndexedDB key types and
|
|
230
|
+
Javascript types for Chromium-based browsers are currently supported. Safari
|
|
231
|
+
and Firefox are under development).
|
|
230
232
|
|
|
231
233
|
The content of IndexedDB files is dependent on what a web application stores
|
|
232
234
|
locally/offline using the web browser's
|
|
@@ -236,25 +238,34 @@ include:
|
|
|
236
238
|
* emails and contact information from an e-mail application,
|
|
237
239
|
* images and metadata from a photo gallery application
|
|
238
240
|
|
|
241
|
+
|
|
239
242
|
## Installation
|
|
240
243
|
|
|
244
|
+
1. [Linux] Install the snappy compression development package
|
|
245
|
+
|
|
241
246
|
```
|
|
242
|
-
$
|
|
247
|
+
$ sudo apt install libsnappy-dev
|
|
243
248
|
```
|
|
244
249
|
|
|
245
|
-
|
|
250
|
+
2. Create a virtual environment and install the package
|
|
246
251
|
|
|
247
|
-
|
|
252
|
+
```
|
|
253
|
+
$ python3 -m venv .venv
|
|
254
|
+
$ source .venv/bin/activate
|
|
255
|
+
$ pip install dfindexeddb
|
|
256
|
+
```
|
|
248
257
|
|
|
249
|
-
|
|
258
|
+
## Installation from source
|
|
259
|
+
|
|
260
|
+
1. [Linux] Install the snappy compression development package
|
|
250
261
|
|
|
251
262
|
```
|
|
252
263
|
$ sudo apt install libsnappy-dev
|
|
253
264
|
```
|
|
254
265
|
|
|
255
|
-
2. Clone or download the repository to your local machine.
|
|
266
|
+
2. Clone or download/unzip the repository to your local machine.
|
|
256
267
|
|
|
257
|
-
3. Create a
|
|
268
|
+
3. Create a virtual environment and install the package
|
|
258
269
|
|
|
259
270
|
```
|
|
260
271
|
$ python3 -m venv .venv
|
|
@@ -264,55 +275,58 @@ $ pip install dfindexeddb
|
|
|
264
275
|
|
|
265
276
|
## Usage
|
|
266
277
|
|
|
267
|
-
|
|
278
|
+
Two CLI tools for parsing IndexedDB/leveldb files are available after
|
|
279
|
+
installation:
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
### IndexedDB
|
|
268
283
|
|
|
269
284
|
```
|
|
270
285
|
$ dfindexeddb -h
|
|
271
|
-
usage: dfindexeddb [-h] -s SOURCE [--json]
|
|
272
|
-
|
|
273
|
-
A cli tool for the dfindexeddb package
|
|
286
|
+
usage: dfindexeddb [-h] -s SOURCE [--json]
|
|
274
287
|
|
|
275
|
-
|
|
276
|
-
{log,ldb,indexeddb}
|
|
288
|
+
A cli tool for parsing indexeddb files
|
|
277
289
|
|
|
278
290
|
options:
|
|
291
|
+
-h, --help show this help message and exit
|
|
279
292
|
-s SOURCE, --source SOURCE
|
|
280
|
-
The source leveldb
|
|
293
|
+
The source leveldb folder
|
|
281
294
|
--json Output as JSON
|
|
282
295
|
```
|
|
283
296
|
|
|
284
|
-
|
|
297
|
+
### LevelDB
|
|
285
298
|
|
|
286
299
|
```
|
|
287
|
-
$
|
|
288
|
-
usage:
|
|
300
|
+
$ dfleveldb -h
|
|
301
|
+
usage: dfleveldb [-h] {db,log,ldb,descriptor} ...
|
|
302
|
+
|
|
303
|
+
A cli tool for parsing leveldb files
|
|
289
304
|
|
|
290
305
|
positional arguments:
|
|
291
|
-
{
|
|
306
|
+
{db,log,ldb,descriptor}
|
|
307
|
+
db Parse a directory as leveldb.
|
|
308
|
+
log Parse a leveldb log file.
|
|
309
|
+
ldb Parse a leveldb table (.ldb) file.
|
|
310
|
+
descriptor Parse a leveldb descriptor (MANIFEST) file.
|
|
292
311
|
|
|
293
312
|
options:
|
|
294
313
|
-h, --help show this help message and exit
|
|
295
314
|
```
|
|
296
315
|
|
|
297
|
-
To parse a LevelDB .
|
|
316
|
+
To parse records from a LevelDB log (.log) file, use the following command:
|
|
298
317
|
|
|
299
318
|
```
|
|
300
|
-
$
|
|
301
|
-
|
|
319
|
+
$ dfleveldb log -s <SOURCE> [--json]
|
|
320
|
+
```
|
|
302
321
|
|
|
303
|
-
|
|
304
|
-
{blocks,records}
|
|
322
|
+
To parse records from a LevelDB table (.ldb) file, use the following command:
|
|
305
323
|
|
|
306
|
-
|
|
307
|
-
|
|
324
|
+
```
|
|
325
|
+
$ dfleveldb ldb -s <SOURCE> [--json]
|
|
308
326
|
```
|
|
309
327
|
|
|
310
|
-
To parse
|
|
328
|
+
To parse version edit records from a Descriptor (MANIFEST) file:
|
|
311
329
|
|
|
312
330
|
```
|
|
313
|
-
$
|
|
314
|
-
usage: dfindexeddb indexeddb [-h]
|
|
315
|
-
|
|
316
|
-
options:
|
|
317
|
-
-h, --help show this help message and exit
|
|
331
|
+
$ dfleveldb descriptor -s <SOURCE> [--json]
|
|
318
332
|
```
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
# dfIndexeddb
|
|
2
|
+
|
|
3
|
+
dfindexeddb is an experimental Python tool for performing digital forensic
|
|
4
|
+
analysis of IndexedDB and leveldb files.
|
|
5
|
+
|
|
6
|
+
It parses leveldb, IndexedDB and javascript structures from these files without
|
|
7
|
+
requiring native libraries. (Note: only a subset of IndexedDB key types and
|
|
8
|
+
Javascript types for Chromium-based browsers are currently supported. Safari
|
|
9
|
+
and Firefox are under development).
|
|
10
|
+
|
|
11
|
+
The content of IndexedDB files is dependent on what a web application stores
|
|
12
|
+
locally/offline using the web browser's
|
|
13
|
+
[IndexedDB API](https://www.w3.org/TR/IndexedDB/). Examples of content might
|
|
14
|
+
include:
|
|
15
|
+
* text from a text/source-code editor application,
|
|
16
|
+
* emails and contact information from an e-mail application,
|
|
17
|
+
* images and metadata from a photo gallery application
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
1. [Linux] Install the snappy compression development package
|
|
23
|
+
|
|
24
|
+
```
|
|
25
|
+
$ sudo apt install libsnappy-dev
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
2. Create a virtual environment and install the package
|
|
29
|
+
|
|
30
|
+
```
|
|
31
|
+
$ python3 -m venv .venv
|
|
32
|
+
$ source .venv/bin/activate
|
|
33
|
+
$ pip install dfindexeddb
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Installation from source
|
|
37
|
+
|
|
38
|
+
1. [Linux] Install the snappy compression development package
|
|
39
|
+
|
|
40
|
+
```
|
|
41
|
+
$ sudo apt install libsnappy-dev
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
2. Clone or download/unzip the repository to your local machine.
|
|
45
|
+
|
|
46
|
+
3. Create a virtual environment and install the package
|
|
47
|
+
|
|
48
|
+
```
|
|
49
|
+
$ python3 -m venv .venv
|
|
50
|
+
$ source .venv/bin/activate
|
|
51
|
+
$ pip install .
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Usage
|
|
55
|
+
|
|
56
|
+
Two CLI tools for parsing IndexedDB/leveldb files are available after
|
|
57
|
+
installation:
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
### IndexedDB
|
|
61
|
+
|
|
62
|
+
```
|
|
63
|
+
$ dfindexeddb -h
|
|
64
|
+
usage: dfindexeddb [-h] -s SOURCE [--json]
|
|
65
|
+
|
|
66
|
+
A cli tool for parsing indexeddb files
|
|
67
|
+
|
|
68
|
+
options:
|
|
69
|
+
-h, --help show this help message and exit
|
|
70
|
+
-s SOURCE, --source SOURCE
|
|
71
|
+
The source leveldb folder
|
|
72
|
+
--json Output as JSON
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### LevelDB
|
|
76
|
+
|
|
77
|
+
```
|
|
78
|
+
$ dfleveldb -h
|
|
79
|
+
usage: dfleveldb [-h] {db,log,ldb,descriptor} ...
|
|
80
|
+
|
|
81
|
+
A cli tool for parsing leveldb files
|
|
82
|
+
|
|
83
|
+
positional arguments:
|
|
84
|
+
{db,log,ldb,descriptor}
|
|
85
|
+
db Parse a directory as leveldb.
|
|
86
|
+
log Parse a leveldb log file.
|
|
87
|
+
ldb Parse a leveldb table (.ldb) file.
|
|
88
|
+
descriptor Parse a leveldb descriptor (MANIFEST) file.
|
|
89
|
+
|
|
90
|
+
options:
|
|
91
|
+
-h, --help show this help message and exit
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
To parse records from a LevelDB log (.log) file, use the following command:
|
|
95
|
+
|
|
96
|
+
```
|
|
97
|
+
$ dfleveldb log -s <SOURCE> [--json]
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
To parse records from a LevelDB table (.ldb) file, use the following command:
|
|
101
|
+
|
|
102
|
+
```
|
|
103
|
+
$ dfleveldb ldb -s <SOURCE> [--json]
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
To parse version edit records from a Descriptor (MANIFEST) file:
|
|
107
|
+
|
|
108
|
+
```
|
|
109
|
+
$ dfleveldb descriptor -s <SOURCE> [--json]
|
|
110
|
+
```
|
|
@@ -86,7 +86,8 @@ class V8ScriptValueDecoder:
|
|
|
86
86
|
NotImplementedError: when called.
|
|
87
87
|
"""
|
|
88
88
|
tag = self.ReadTag()
|
|
89
|
-
raise NotImplementedError(
|
|
89
|
+
raise NotImplementedError(
|
|
90
|
+
f'V8ScriptValueDecoder.ReadHostObject - {tag.name}')
|
|
90
91
|
|
|
91
92
|
def Deserialize(self) -> Any:
|
|
92
93
|
"""Deserializes a Blink SSV.
|
|
@@ -20,11 +20,11 @@ import io
|
|
|
20
20
|
from typing import Any, BinaryIO, Optional, Tuple, Type, TypeVar, Union
|
|
21
21
|
|
|
22
22
|
from dfindexeddb import errors
|
|
23
|
-
from dfindexeddb import utils
|
|
24
23
|
from dfindexeddb.indexeddb import blink
|
|
25
24
|
from dfindexeddb.indexeddb import definitions
|
|
26
25
|
from dfindexeddb.leveldb import ldb
|
|
27
26
|
from dfindexeddb.leveldb import log
|
|
27
|
+
from dfindexeddb.leveldb import utils
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
T = TypeVar('T')
|
|
@@ -570,7 +570,7 @@ class EarlistCompactionTimeKey(BaseIndexedDBKey):
|
|
|
570
570
|
class ScopesPrefixKey(BaseIndexedDBKey):
|
|
571
571
|
"""A scopes prefix IndexedDB key."""
|
|
572
572
|
|
|
573
|
-
def DecodeValue(self, decoder: utils.
|
|
573
|
+
def DecodeValue(self, decoder: utils.LevelDBDecoder) -> Optional[bytes]:
|
|
574
574
|
"""Decodes the scopes prefix value."""
|
|
575
575
|
if decoder.NumRemainingBytes:
|
|
576
576
|
return decoder.ReadBytes()[1]
|
|
@@ -578,7 +578,7 @@ class ScopesPrefixKey(BaseIndexedDBKey):
|
|
|
578
578
|
|
|
579
579
|
@classmethod
|
|
580
580
|
def FromDecoder(
|
|
581
|
-
cls, decoder: utils.
|
|
581
|
+
cls, decoder: utils.LevelDBDecoder, key_prefix: KeyPrefix,
|
|
582
582
|
base_offset: int = 0
|
|
583
583
|
) -> ScopesPrefixKey:
|
|
584
584
|
"""Decodes the scopes prefix key."""
|
|
@@ -1357,4 +1357,4 @@ class IndexedDBRecord:
|
|
|
1357
1357
|
value=idb_value,
|
|
1358
1358
|
sequence_number=record.sequence_number if hasattr(
|
|
1359
1359
|
record, 'sequence_number') else None,
|
|
1360
|
-
type=record.
|
|
1360
|
+
type=record.record_type)
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Copyright 2024 Google LLC
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""A CLI tool for dfindexeddb."""
|
|
16
|
+
import argparse
|
|
17
|
+
import dataclasses
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
import json
|
|
20
|
+
import pathlib
|
|
21
|
+
import sys
|
|
22
|
+
import traceback
|
|
23
|
+
|
|
24
|
+
from dfindexeddb import errors
|
|
25
|
+
from dfindexeddb import version
|
|
26
|
+
from dfindexeddb.leveldb import record as leveldb_record
|
|
27
|
+
from dfindexeddb.indexeddb import chromium
|
|
28
|
+
from dfindexeddb.indexeddb import v8
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
_VALID_PRINTABLE_CHARACTERS = (
|
|
32
|
+
' abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' +
|
|
33
|
+
'!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~.')
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class Encoder(json.JSONEncoder):
|
|
37
|
+
"""A JSON encoder class for dfindexeddb fields."""
|
|
38
|
+
def default(self, o):
|
|
39
|
+
if dataclasses.is_dataclass(o):
|
|
40
|
+
o_dict = dataclasses.asdict(o)
|
|
41
|
+
return o_dict
|
|
42
|
+
if isinstance(o, bytes):
|
|
43
|
+
out = []
|
|
44
|
+
for x in o:
|
|
45
|
+
if chr(x) not in _VALID_PRINTABLE_CHARACTERS:
|
|
46
|
+
out.append(f'\\x{x:02X}')
|
|
47
|
+
else:
|
|
48
|
+
out.append(chr(x))
|
|
49
|
+
return ''.join(out)
|
|
50
|
+
if isinstance(o, datetime):
|
|
51
|
+
return o.isoformat()
|
|
52
|
+
if isinstance(o, v8.Undefined):
|
|
53
|
+
return "<undefined>"
|
|
54
|
+
if isinstance(o, v8.Null):
|
|
55
|
+
return "<null>"
|
|
56
|
+
if isinstance(o, set):
|
|
57
|
+
return list(o)
|
|
58
|
+
if isinstance(o, v8.RegExp):
|
|
59
|
+
return str(o)
|
|
60
|
+
return json.JSONEncoder.default(self, o)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _Output(structure, to_json=False):
|
|
64
|
+
"""Helper method to output parsed structure to stdout."""
|
|
65
|
+
if to_json:
|
|
66
|
+
print(json.dumps(structure, indent=2, cls=Encoder))
|
|
67
|
+
else:
|
|
68
|
+
print(structure)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def IndexeddbCommand(args):
|
|
72
|
+
"""The CLI for processing a log/ldb file as indexeddb."""
|
|
73
|
+
for db_record in leveldb_record.LevelDBRecord.FromDir(args.source):
|
|
74
|
+
record = db_record.record
|
|
75
|
+
try:
|
|
76
|
+
db_record.record = chromium.IndexedDBRecord.FromLevelDBRecord(record)
|
|
77
|
+
except(
|
|
78
|
+
errors.ParserError,
|
|
79
|
+
errors.DecoderError,
|
|
80
|
+
NotImplementedError) as err:
|
|
81
|
+
print(
|
|
82
|
+
(f'Error parsing blink value: {err} for {record.__class__.__name__} '
|
|
83
|
+
f'at offset {record.offset} in {db_record.path}'), file=sys.stderr)
|
|
84
|
+
print(f'Traceback: {traceback.format_exc()}', file=sys.stderr)
|
|
85
|
+
_Output(db_record, to_json=args.json)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def App():
|
|
89
|
+
"""The CLI app entrypoint for dfindexeddb."""
|
|
90
|
+
parser = argparse.ArgumentParser(
|
|
91
|
+
prog='dfindexeddb',
|
|
92
|
+
description='A cli tool for parsing indexeddb files',
|
|
93
|
+
epilog=f'Version {version.GetVersion()}')
|
|
94
|
+
parser.add_argument(
|
|
95
|
+
'-s', '--source', required=True, type=pathlib.Path,
|
|
96
|
+
help='The source leveldb folder')
|
|
97
|
+
parser.add_argument('--json', action='store_true', help='Output as JSON')
|
|
98
|
+
parser.set_defaults(func=IndexeddbCommand)
|
|
99
|
+
|
|
100
|
+
args = parser.parse_args()
|
|
101
|
+
args.func(args)
|
|
File without changes
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Copyright 2024 Google LLC
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""A CLI tool for leveldb files."""
|
|
16
|
+
import argparse
|
|
17
|
+
import dataclasses
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
import json
|
|
20
|
+
import pathlib
|
|
21
|
+
|
|
22
|
+
from dfindexeddb import version
|
|
23
|
+
from dfindexeddb.leveldb import descriptor
|
|
24
|
+
from dfindexeddb.leveldb import ldb
|
|
25
|
+
from dfindexeddb.leveldb import log
|
|
26
|
+
from dfindexeddb.leveldb import record
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
_VALID_PRINTABLE_CHARACTERS = (
|
|
30
|
+
' abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' +
|
|
31
|
+
'!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~.')
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Encoder(json.JSONEncoder):
|
|
35
|
+
"""A JSON encoder class for dfleveldb fields."""
|
|
36
|
+
|
|
37
|
+
def default(self, o):
|
|
38
|
+
"""Returns a serializable object for o."""
|
|
39
|
+
if dataclasses.is_dataclass(o):
|
|
40
|
+
o_dict = dataclasses.asdict(o)
|
|
41
|
+
return o_dict
|
|
42
|
+
if isinstance(o, bytes):
|
|
43
|
+
out = []
|
|
44
|
+
for x in o:
|
|
45
|
+
if chr(x) not in _VALID_PRINTABLE_CHARACTERS:
|
|
46
|
+
out.append(f'\\x{x:02X}')
|
|
47
|
+
else:
|
|
48
|
+
out.append(chr(x))
|
|
49
|
+
return ''.join(out)
|
|
50
|
+
if isinstance(o, datetime):
|
|
51
|
+
return o.isoformat()
|
|
52
|
+
if isinstance(o, set):
|
|
53
|
+
return list(o)
|
|
54
|
+
return json.JSONEncoder.default(self, o)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _Output(structure, to_json=False):
|
|
58
|
+
"""Helper method to output parsed structure to stdout."""
|
|
59
|
+
if to_json:
|
|
60
|
+
print(json.dumps(structure, indent=2, cls=Encoder))
|
|
61
|
+
else:
|
|
62
|
+
print(structure)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def DbCommand(args):
|
|
66
|
+
"""The CLI for processing leveldb folders."""
|
|
67
|
+
for rec in record.LevelDBRecord.FromDir(args.source):
|
|
68
|
+
_Output(rec, to_json=args.json)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def LdbCommand(args):
|
|
72
|
+
"""The CLI for processing ldb files."""
|
|
73
|
+
ldb_file = ldb.FileReader(args.source)
|
|
74
|
+
|
|
75
|
+
if args.structure_type == 'blocks':
|
|
76
|
+
# Prints block information.
|
|
77
|
+
for block in ldb_file.GetBlocks():
|
|
78
|
+
_Output(block, to_json=args.json)
|
|
79
|
+
|
|
80
|
+
elif args.structure_type == 'records' or not args.structure_type:
|
|
81
|
+
# Prints key value record information.
|
|
82
|
+
for key_value_record in ldb_file.GetKeyValueRecords():
|
|
83
|
+
_Output(key_value_record, to_json=args.json)
|
|
84
|
+
|
|
85
|
+
else:
|
|
86
|
+
print(f'{args.structure_type} is not supported for ldb files.')
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def LogCommand(args):
|
|
90
|
+
"""The CLI for processing log files."""
|
|
91
|
+
log_file = log.FileReader(args.source)
|
|
92
|
+
|
|
93
|
+
if args.structure_type == 'blocks':
|
|
94
|
+
# Prints block information.
|
|
95
|
+
for block in log_file.GetBlocks():
|
|
96
|
+
_Output(block, to_json=args.json)
|
|
97
|
+
|
|
98
|
+
elif args.structure_type == 'physical_records':
|
|
99
|
+
# Prints log file physical record information.
|
|
100
|
+
for log_file_record in log_file.GetPhysicalRecords():
|
|
101
|
+
_Output(log_file_record, to_json=args.json)
|
|
102
|
+
|
|
103
|
+
elif args.structure_type == 'write_batches':
|
|
104
|
+
# Prints log file batch information.
|
|
105
|
+
for batch in log_file.GetWriteBatches():
|
|
106
|
+
_Output(batch, to_json=args.json)
|
|
107
|
+
|
|
108
|
+
elif (args.structure_type in ('parsed_internal_key', 'records')
|
|
109
|
+
or not args.structure_type):
|
|
110
|
+
# Prints key value record information.
|
|
111
|
+
for internal_key_record in log_file.GetParsedInternalKeys():
|
|
112
|
+
_Output(internal_key_record, to_json=args.json)
|
|
113
|
+
|
|
114
|
+
else:
|
|
115
|
+
print(f'{args.structure_type} is not supported for log files.')
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def DescriptorCommand(args):
|
|
119
|
+
"""The CLI for processing descriptor (MANIFEST) files."""
|
|
120
|
+
manifest_file = descriptor.FileReader(args.source)
|
|
121
|
+
|
|
122
|
+
if args.structure_type == 'blocks':
|
|
123
|
+
# Prints block information.
|
|
124
|
+
for block in manifest_file.GetBlocks():
|
|
125
|
+
_Output(block, to_json=args.json)
|
|
126
|
+
|
|
127
|
+
elif args.structure_type == 'physical_records':
|
|
128
|
+
# Prints log file physical record information.
|
|
129
|
+
for log_file_record in manifest_file.GetPhysicalRecords():
|
|
130
|
+
_Output(log_file_record, to_json=args.json)
|
|
131
|
+
|
|
132
|
+
elif (args.structure_type == 'versionedit'
|
|
133
|
+
or not args.structure_type):
|
|
134
|
+
for version_edit in manifest_file.GetVersionEdits():
|
|
135
|
+
_Output(version_edit, to_json=args.json)
|
|
136
|
+
|
|
137
|
+
else:
|
|
138
|
+
print(f'{args.structure_type} is not supported for descriptor files.')
|
|
139
|
+
|
|
140
|
+
def App():
|
|
141
|
+
"""The CLI app entrypoint for parsing leveldb files."""
|
|
142
|
+
parser = argparse.ArgumentParser(
|
|
143
|
+
prog='dfleveldb',
|
|
144
|
+
description='A cli tool for parsing leveldb files',
|
|
145
|
+
epilog=f'Version {version.GetVersion()}')
|
|
146
|
+
|
|
147
|
+
subparsers = parser.add_subparsers()
|
|
148
|
+
|
|
149
|
+
parser_db = subparsers.add_parser(
|
|
150
|
+
'db', help='Parse a directory as leveldb.')
|
|
151
|
+
parser_db.add_argument(
|
|
152
|
+
'-s', '--source',
|
|
153
|
+
required=True,
|
|
154
|
+
type=pathlib.Path,
|
|
155
|
+
help='The source leveldb directory')
|
|
156
|
+
parser_db.add_argument(
|
|
157
|
+
'--json', action='store_true', help='Output as JSON')
|
|
158
|
+
parser_db.set_defaults(func=DbCommand)
|
|
159
|
+
|
|
160
|
+
parser_log = subparsers.add_parser(
|
|
161
|
+
'log', help='Parse a leveldb log file.')
|
|
162
|
+
parser_log.add_argument(
|
|
163
|
+
'-s', '--source',
|
|
164
|
+
required=True,
|
|
165
|
+
type=pathlib.Path,
|
|
166
|
+
help='The source leveldb file')
|
|
167
|
+
parser_log.add_argument(
|
|
168
|
+
'--json', action='store_true', help='Output as JSON')
|
|
169
|
+
parser_log.add_argument(
|
|
170
|
+
'-t',
|
|
171
|
+
'--structure_type',
|
|
172
|
+
choices=[
|
|
173
|
+
'blocks',
|
|
174
|
+
'physical_records',
|
|
175
|
+
'write_batches',
|
|
176
|
+
'parsed_internal_key'])
|
|
177
|
+
parser_log.set_defaults(func=LogCommand)
|
|
178
|
+
|
|
179
|
+
parser_ldb = subparsers.add_parser(
|
|
180
|
+
'ldb', help='Parse a leveldb table (.ldb) file.')
|
|
181
|
+
parser_ldb.add_argument(
|
|
182
|
+
'-s', '--source',
|
|
183
|
+
required=True,
|
|
184
|
+
type=pathlib.Path,
|
|
185
|
+
help='The source leveldb file')
|
|
186
|
+
parser_ldb.add_argument(
|
|
187
|
+
'--json', action='store_true', help='Output as JSON')
|
|
188
|
+
parser_ldb.add_argument(
|
|
189
|
+
'-t',
|
|
190
|
+
'--structure_type',
|
|
191
|
+
choices=[
|
|
192
|
+
'blocks',
|
|
193
|
+
'records'])
|
|
194
|
+
parser_ldb.set_defaults(func=LdbCommand)
|
|
195
|
+
|
|
196
|
+
parser_descriptor = subparsers.add_parser(
|
|
197
|
+
'descriptor', help='Parse a leveldb descriptor (MANIFEST) file.')
|
|
198
|
+
parser_descriptor.add_argument(
|
|
199
|
+
'-s', '--source',
|
|
200
|
+
required=True,
|
|
201
|
+
type=pathlib.Path,
|
|
202
|
+
help='The source leveldb file')
|
|
203
|
+
parser_descriptor.add_argument(
|
|
204
|
+
'--json', action='store_true', help='Output as JSON')
|
|
205
|
+
parser_descriptor.add_argument(
|
|
206
|
+
'-t',
|
|
207
|
+
'--structure_type',
|
|
208
|
+
choices=[
|
|
209
|
+
'blocks', 'physical_records', 'versionedit'])
|
|
210
|
+
parser_descriptor.set_defaults(func=DescriptorCommand)
|
|
211
|
+
|
|
212
|
+
args = parser.parse_args()
|
|
213
|
+
|
|
214
|
+
if not hasattr(args, 'func'):
|
|
215
|
+
parser.print_usage()
|
|
216
|
+
else:
|
|
217
|
+
args.func(args)
|
|
@@ -16,12 +16,22 @@
|
|
|
16
16
|
|
|
17
17
|
import enum
|
|
18
18
|
|
|
19
|
+
BLOCK_RESTART_ENTRY_LENGTH = 4
|
|
20
|
+
BLOCK_TRAILER_SIZE = 5
|
|
21
|
+
TABLE_FOOTER_SIZE = 48
|
|
22
|
+
TABLE_MAGIC = b'\x57\xfb\x80\x8b\x24\x75\x47\xdb'
|
|
19
23
|
|
|
20
24
|
PACKED_SEQUENCE_AND_TYPE_LENGTH = 8
|
|
21
25
|
SEQUENCE_LENGTH = 7
|
|
22
26
|
TYPE_LENGTH = 1
|
|
23
27
|
|
|
24
28
|
|
|
29
|
+
class BlockCompressionType(enum.IntEnum):
|
|
30
|
+
"""Block compression types."""
|
|
31
|
+
SNAPPY = 1
|
|
32
|
+
ZSTD = 2
|
|
33
|
+
|
|
34
|
+
|
|
25
35
|
class VersionEditTags(enum.IntEnum):
|
|
26
36
|
"""VersionEdit tags."""
|
|
27
37
|
COMPARATOR = 1
|
|
@@ -41,3 +51,9 @@ class LogFilePhysicalRecordType(enum.IntEnum):
|
|
|
41
51
|
FIRST = 2
|
|
42
52
|
MIDDLE = 3
|
|
43
53
|
LAST = 4
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class InternalRecordType(enum.IntEnum):
|
|
57
|
+
"""Internal record types."""
|
|
58
|
+
DELETED = 0
|
|
59
|
+
VALUE = 1
|