dfindexeddb 20251109__py3-none-any.whl → 20260205__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,362 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Copyright 2026 Google LLC
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # https://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Chromium IndexedDB records encoded in sqlite3 databases."""
16
+
17
+ import os
18
+ import sqlite3
19
+ from typing import Any, Generator, Optional
20
+ from dataclasses import dataclass
21
+
22
+ import snappy
23
+ import zstd
24
+
25
+ from dfindexeddb.indexeddb.chromium import blink
26
+ from dfindexeddb.indexeddb.chromium import definitions
27
+ from dfindexeddb.indexeddb.chromium import record
28
+
29
+
30
+ @dataclass
31
+ class ChromiumIndexedDBRecord:
32
+ """Chromium IndexedDB record parsed from sqlite3 database.
33
+
34
+ Attributes:
35
+ row_id: the row ID.
36
+ object_store_id: the object store ID.
37
+ compression_type: the compression type.
38
+ key: the key.
39
+ value: the value.
40
+ has_blobs: whether the record has blobs.
41
+ raw_key: the raw key.
42
+ raw_value: the raw value.
43
+ """
44
+
45
+ row_id: int
46
+ object_store_id: int
47
+ compression_type: int
48
+ key: Any
49
+ value: Any
50
+ has_blobs: bool
51
+ raw_key: Optional[bytes]
52
+ raw_value: Optional[bytes]
53
+
54
+
55
+ @dataclass
56
+ class ChromiumObjectStoreInfo:
57
+ """Chromium IndexedDB object store info parsed from sqlite3 database.
58
+
59
+ Attributes:
60
+ id: the object store ID.
61
+ name: the object store name.
62
+ key_path: the object store key path.
63
+ auto_increment: whether the object store is auto increment.
64
+ key_generator_current_number: the current number of the key generator.
65
+ """
66
+
67
+ id: int
68
+ name: str
69
+ key_path: str
70
+ auto_increment: int
71
+ key_generator_current_number: int
72
+
73
+
74
+ @dataclass
75
+ class ChromiumBlobInfo:
76
+ """Chromium IndexedDB blob info parsed from sqlite3 database.
77
+
78
+ Attributes:
79
+ row_id: the blob row ID.
80
+ object_type: the object type.
81
+ mime_type: the mime type.
82
+ size_bytes: the total size in bytes.
83
+ file_name: the file name (only for files).
84
+ number_of_chunks: the number of chunks including the initial one.
85
+ blob_data: the blob data.
86
+ """
87
+
88
+ row_id: int
89
+ object_type: int
90
+ mime_type: Optional[str]
91
+ size_bytes: int
92
+ file_name: Optional[str]
93
+ number_of_chunks: int
94
+ blob_data: bytes
95
+
96
+
97
+ class DatabaseReader:
98
+ """A reader for Chromium IndexedDB sqlite3 files."""
99
+
100
+ def __init__(self, filename: str):
101
+ """Initializes the reader.
102
+
103
+ Args:
104
+ filename: the path to the sqlite3 file.
105
+ """
106
+ self._filename = filename
107
+
108
+ def ObjectStores(self) -> Generator[ChromiumObjectStoreInfo, None, None]:
109
+ """Yields object stores."""
110
+ with sqlite3.connect(f"file:{self._filename}?mode=ro", uri=True) as conn:
111
+ cursor = conn.cursor()
112
+ cursor.execute(definitions.SQL_OBJECT_STORES_QUERY)
113
+ for row in cursor:
114
+ yield ChromiumObjectStoreInfo(
115
+ id=row[0],
116
+ name=row[1].decode("utf-16-le"),
117
+ key_path=row[2],
118
+ auto_increment=row[3],
119
+ key_generator_current_number=row[4],
120
+ )
121
+
122
+ def _GetLegacyBlobPath(self, blob_id: int) -> str:
123
+ """Gets the path to a legacy blob file.
124
+
125
+ Args:
126
+ blob_id: the blob ID.
127
+
128
+ Returns:
129
+ The path to the legacy blob file.
130
+ """
131
+ base, ext = os.path.splitext(self._filename)
132
+ db_dir = f"{base}_{ext}"
133
+ return os.path.join(db_dir, f"{blob_id:x}")
134
+
135
+ def LoadLegacyBlobData(self, blob_id: int) -> bytes:
136
+ """Loads legacy blob data from disk.
137
+
138
+ Args:
139
+ blob_id: the blob ID.
140
+
141
+ Returns:
142
+ The blob data.
143
+
144
+ Raises:
145
+ FileNotFoundError: if the legacy blob file is not found.
146
+ """
147
+ blob_path = self._GetLegacyBlobPath(blob_id)
148
+ if os.path.exists(blob_path):
149
+ with open(blob_path, "rb") as f:
150
+ return f.read()
151
+ raise FileNotFoundError(f"Legacy blob file not found: {blob_path}")
152
+
153
+ def LoadBlobDataForRecordId(
154
+ self, row_id: int
155
+ ) -> Generator[ChromiumBlobInfo, None, None]:
156
+ """Loads blob data for a given record row ID.
157
+
158
+ Args:
159
+ row_id: the record row ID.
160
+
161
+ Yields:
162
+ ChromiumBlobInfo objects.
163
+ """
164
+ with sqlite3.connect(f"file:{self._filename}?mode=ro", uri=True) as conn:
165
+ conn.row_factory = sqlite3.Row
166
+ cursor = conn.cursor()
167
+
168
+ # Note this is a UNION query between the blob and overflow_blob_chunks
169
+ # table. The chunk_index = 0 for the row from the 'blobs' table.
170
+ cursor.execute(definitions.SQL_BLOB_DATA_QUERY, (row_id, row_id))
171
+
172
+ current_blob_id = None
173
+ current_blob_data = bytearray()
174
+ current_record: Optional[sqlite3.Row] = None
175
+ total_number_of_chunks = 0
176
+
177
+ for blob_row in cursor:
178
+ blob_id = blob_row["row_id"]
179
+
180
+ if blob_id != current_blob_id:
181
+ if current_record is not None:
182
+ yield ChromiumBlobInfo(
183
+ row_id=current_record["row_id"],
184
+ object_type=current_record["object_type"],
185
+ mime_type=current_record["mime_type"],
186
+ size_bytes=current_record["size_bytes"],
187
+ file_name=current_record["file_name"],
188
+ number_of_chunks=total_number_of_chunks,
189
+ blob_data=bytes(current_blob_data),
190
+ )
191
+ current_blob_id = blob_id
192
+ current_blob_data = bytearray()
193
+ current_record = blob_row
194
+ total_number_of_chunks = 0
195
+
196
+ if blob_row["chunk_index"] == 0 and blob_row["bytes"] is None:
197
+ current_blob_data.extend(self.LoadLegacyBlobData(blob_id))
198
+ total_number_of_chunks += 1
199
+ continue
200
+
201
+ if blob_row["bytes"]:
202
+ current_blob_data.extend(blob_row["bytes"])
203
+ total_number_of_chunks += 1
204
+
205
+ if current_record is not None:
206
+ yield ChromiumBlobInfo(
207
+ row_id=current_record["row_id"],
208
+ object_type=current_record["object_type"],
209
+ mime_type=current_record["mime_type"],
210
+ size_bytes=current_record["size_bytes"],
211
+ file_name=current_record["file_name"],
212
+ number_of_chunks=total_number_of_chunks,
213
+ blob_data=bytes(current_blob_data),
214
+ )
215
+
216
+ def _EnumerateCursor(
217
+ self,
218
+ cursor: sqlite3.Cursor,
219
+ include_raw_data: bool = False,
220
+ parse_key: bool = True,
221
+ parse_value: bool = True,
222
+ load_blobs: bool = True,
223
+ ) -> Generator[ChromiumIndexedDBRecord, None, None]:
224
+ """Yields ChromiumIndexedDBRecord records from a sqlite3 cursor.
225
+
226
+ Args:
227
+ cursor: the sqlite3 cursor.
228
+ include_raw_data: whether to include the raw data.
229
+ parse_key: whether to parse the key.
230
+ parse_value: whether to parse the value.
231
+ load_blobs: whether to load the record blobs.
232
+
233
+ Yields:
234
+ ChromiumIndexedDBRecord records.
235
+ """
236
+ for row in cursor:
237
+ row_id = row[0]
238
+ object_store_id = row[1]
239
+ compression_type = definitions.DatabaseCompressionType(row[2])
240
+ raw_key = row[3]
241
+ raw_value = row[4]
242
+ has_blobs = bool(row[5])
243
+
244
+ key, value = None, None
245
+ if parse_key and raw_key:
246
+ key = record.SortableIDBKey.FromBytes(raw_data=raw_key, base_offset=0)
247
+
248
+ if parse_value and raw_value:
249
+ if compression_type == definitions.DatabaseCompressionType.UNCOMPRESSED:
250
+ value = blink.V8ScriptValueDecoder.FromBytes(raw_value)
251
+ elif compression_type == definitions.DatabaseCompressionType.ZSTD:
252
+ value = blink.V8ScriptValueDecoder.FromBytes(
253
+ zstd.decompress(raw_value)
254
+ )
255
+ elif compression_type == definitions.DatabaseCompressionType.SNAPPY:
256
+ value = blink.V8ScriptValueDecoder.FromBytes(
257
+ snappy.decompress(raw_value)
258
+ )
259
+
260
+ if load_blobs and raw_value is None:
261
+ if not has_blobs:
262
+ raise ValueError("Raw value is None but has_blobs is not set")
263
+ value = []
264
+ for blob in self.LoadBlobDataForRecordId(row_id):
265
+ blob.blob_data = blink.V8ScriptValueDecoder.FromBytes(blob.blob_data)
266
+ value.append(blob)
267
+
268
+ yield ChromiumIndexedDBRecord(
269
+ row_id=row_id,
270
+ object_store_id=object_store_id,
271
+ compression_type=compression_type,
272
+ key=key,
273
+ value=value,
274
+ has_blobs=has_blobs,
275
+ raw_key=raw_key if include_raw_data else None,
276
+ raw_value=raw_value if include_raw_data else None,
277
+ )
278
+
279
+ def RecordsByObjectStoreId(
280
+ self,
281
+ object_store_id: int,
282
+ include_raw_data: bool = False,
283
+ parse_key: bool = True,
284
+ parse_value: bool = True,
285
+ load_blobs: bool = True,
286
+ ) -> Generator[ChromiumIndexedDBRecord, None, None]:
287
+ """Yields ChromiumIndexedDBRecord records for a given object store ID.
288
+
289
+ Args:
290
+ object_store_id: the object store ID.
291
+ include_raw_data: whether to include the raw data.
292
+ parse_key: whether to parse the key.
293
+ parse_value: whether to parse the value.
294
+ load_blobs: whether to load the record blobs.
295
+
296
+ Yields:
297
+ ChromiumIndexedDBRecord records.
298
+ """
299
+ with sqlite3.connect(f"file:{self._filename}?mode=ro", uri=True) as conn:
300
+ conn.row_factory = sqlite3.Row
301
+ cursor = conn.cursor()
302
+ cursor.execute(definitions.SQL_RECORDS_BY_ID_QUERY, (object_store_id,))
303
+ yield from self._EnumerateCursor(
304
+ cursor, include_raw_data, parse_key, parse_value, load_blobs
305
+ )
306
+
307
+ def RecordsByObjectStoreName(
308
+ self,
309
+ object_store_name: str,
310
+ include_raw_data: bool = False,
311
+ parse_key: bool = True,
312
+ parse_value: bool = True,
313
+ load_blobs: bool = True,
314
+ ) -> Generator[ChromiumIndexedDBRecord, None, None]:
315
+ """Yields ChromiumIndexedDBRecord records for a given object store name.
316
+
317
+ Args:
318
+ object_store_name: the object store name.
319
+ include_raw_data: whether to include the raw data.
320
+ parse_key: whether to parse the key.
321
+ parse_value: whether to parse the value.
322
+ load_blobs: whether to load the record blobs.
323
+
324
+ Yields:
325
+ ChromiumIndexedDBRecord records.
326
+ """
327
+ with sqlite3.connect(f"file:{self._filename}?mode=ro", uri=True) as conn:
328
+ conn.row_factory = sqlite3.Row
329
+ cursor = conn.cursor()
330
+ cursor.execute(
331
+ definitions.SQL_RECORDS_BY_NAME_QUERY,
332
+ (object_store_name.encode("utf-16-le"),),
333
+ )
334
+ yield from self._EnumerateCursor(
335
+ cursor, include_raw_data, parse_key, parse_value, load_blobs
336
+ )
337
+
338
+ def Records(
339
+ self,
340
+ include_raw_data: bool = False,
341
+ parse_key: bool = True,
342
+ parse_value: bool = True,
343
+ load_blobs: bool = True,
344
+ ) -> Generator[ChromiumIndexedDBRecord, None, None]:
345
+ """Yields ChromiumIndexedDBRecord records from all object stores.
346
+
347
+ Args:
348
+ include_raw_data: whether to include the raw data.
349
+ parse_key: whether to parse the key.
350
+ parse_value: whether to parse the value.
351
+ load_blobs: whether to load the record blobs.
352
+
353
+ Yields:
354
+ ChromiumIndexedDBRecord records.
355
+ """
356
+ with sqlite3.connect(f"file:{self._filename}?mode=ro", uri=True) as conn:
357
+ conn.row_factory = sqlite3.Row
358
+ cursor = conn.cursor()
359
+ cursor.execute(definitions.SQL_RECORDS_QUERY)
360
+ yield from self._EnumerateCursor(
361
+ cursor, include_raw_data, parse_key, parse_value, load_blobs
362
+ )
@@ -24,6 +24,7 @@ from typing import Any
24
24
  from dfindexeddb import utils, version
25
25
  from dfindexeddb.indexeddb import types
26
26
  from dfindexeddb.indexeddb.chromium import blink
27
+ from dfindexeddb.indexeddb.chromium import sqlite
27
28
  from dfindexeddb.indexeddb.chromium import record as chromium_record
28
29
  from dfindexeddb.indexeddb.firefox import gecko
29
30
  from dfindexeddb.indexeddb.firefox import record as firefox_record
@@ -68,7 +69,12 @@ class Encoder(json.JSONEncoder):
68
69
 
69
70
 
70
71
  def _Output(structure: Any, output: str) -> None:
71
- """Helper method to output parsed structure to stdout."""
72
+ """Helper method to output parsed structure to stdout.
73
+
74
+ Args:
75
+ structure: The structure to output.
76
+ output: The output format.
77
+ """
72
78
  if output == "json":
73
79
  print(json.dumps(structure, indent=2, cls=Encoder))
74
80
  elif output == "jsonl":
@@ -96,30 +102,123 @@ def GeckoCommand(args: argparse.Namespace) -> None:
96
102
  def DbCommand(args: argparse.Namespace) -> None:
97
103
  """The CLI for processing a directory as IndexedDB."""
98
104
  if args.format in ("chrome", "chromium"):
99
- for chromium_db_record in chromium_record.FolderReader(
100
- args.source
101
- ).GetRecords(
102
- use_manifest=args.use_manifest,
103
- use_sequence_number=args.use_sequence_number,
104
- ):
105
- _Output(chromium_db_record, output=args.output)
105
+ if args.source.is_file():
106
+ if args.object_store_id is not None:
107
+ records = sqlite.DatabaseReader(
108
+ str(args.source)
109
+ ).RecordsByObjectStoreId(
110
+ args.object_store_id, include_raw_data=args.include_raw_data
111
+ )
112
+ else:
113
+ records = sqlite.DatabaseReader(str(args.source)).Records(
114
+ include_raw_data=args.include_raw_data
115
+ )
116
+ for chromium_db_record in records:
117
+ if args.filter_value is not None and args.filter_value not in str(
118
+ chromium_db_record.value
119
+ ):
120
+ continue
121
+ if args.filter_key is not None and args.filter_key not in str(
122
+ chromium_db_record.key.value
123
+ ):
124
+ continue
125
+ _Output(chromium_db_record, output=args.output)
126
+ else:
127
+ for chromium_leveldb_record in chromium_record.FolderReader(
128
+ args.source
129
+ ).GetRecords(
130
+ use_manifest=args.use_manifest,
131
+ use_sequence_number=args.use_sequence_number,
132
+ ):
133
+ if (
134
+ args.object_store_id is not None
135
+ and chromium_leveldb_record.object_store_id != args.object_store_id
136
+ ):
137
+ continue
138
+ if args.filter_value is not None and args.filter_value not in str(
139
+ chromium_leveldb_record.value
140
+ ):
141
+ continue
142
+ if args.filter_key is not None and args.filter_key not in str(
143
+ chromium_leveldb_record.key.value
144
+ ):
145
+ continue
146
+ _Output(chromium_leveldb_record, output=args.output)
106
147
  elif args.format == "firefox":
107
- for firefox_db_record in firefox_record.FileReader(args.source).Records():
148
+ if args.object_store_id is not None:
149
+ firefox_db_records = firefox_record.FileReader(
150
+ str(args.source)
151
+ ).RecordsByObjectStoreId(
152
+ args.object_store_id, include_raw_data=args.include_raw_data
153
+ )
154
+ else:
155
+ firefox_db_records = firefox_record.FileReader(str(args.source)).Records(
156
+ include_raw_data=args.include_raw_data
157
+ )
158
+
159
+ for firefox_db_record in firefox_db_records:
160
+ if args.filter_value is not None and args.filter_value not in str(
161
+ firefox_db_record.value
162
+ ):
163
+ continue
164
+ if args.filter_key is not None and args.filter_key not in str(
165
+ firefox_db_record.key.value
166
+ ):
167
+ continue
108
168
  _Output(firefox_db_record, output=args.output)
109
169
  elif args.format == "safari":
110
- for safari_db_record in safari_record.FileReader(args.source).Records():
170
+ if args.object_store_id is not None:
171
+ safari_db_records = safari_record.FileReader(
172
+ str(args.source)
173
+ ).RecordsByObjectStoreId(
174
+ args.object_store_id, include_raw_data=args.include_raw_data
175
+ )
176
+ else:
177
+ safari_db_records = safari_record.FileReader(str(args.source)).Records(
178
+ include_raw_data=args.include_raw_data
179
+ )
180
+
181
+ for safari_db_record in safari_db_records:
182
+ if args.filter_value is not None and args.filter_value not in str(
183
+ safari_db_record.value
184
+ ):
185
+ continue
186
+ if args.filter_key is not None and args.filter_key not in str(
187
+ safari_db_record.key
188
+ ):
189
+ continue
111
190
  _Output(safari_db_record, output=args.output)
112
191
 
113
192
 
114
193
  def LdbCommand(args: argparse.Namespace) -> None:
115
194
  """The CLI for processing a LevelDB table (.ldb) file as IndexedDB."""
116
- for db_record in chromium_record.IndexedDBRecord.FromFile(args.source):
195
+ for db_record in chromium_record.ChromiumIndexedDBRecord.FromFile(
196
+ args.source
197
+ ):
198
+ if args.filter_value is not None and args.filter_value not in str(
199
+ db_record.value
200
+ ):
201
+ continue
202
+ if args.filter_key is not None and args.filter_key not in str(
203
+ db_record.key
204
+ ):
205
+ continue
117
206
  _Output(db_record, output=args.output)
118
207
 
119
208
 
120
209
  def LogCommand(args: argparse.Namespace) -> None:
121
210
  """The CLI for processing a LevelDB log file as IndexedDB."""
122
- for db_record in chromium_record.IndexedDBRecord.FromFile(args.source):
211
+ for db_record in chromium_record.ChromiumIndexedDBRecord.FromFile(
212
+ args.source
213
+ ):
214
+ if args.filter_value is not None and args.filter_value not in str(
215
+ db_record.value
216
+ ):
217
+ continue
218
+ if args.filter_key is not None and args.filter_key not in str(
219
+ db_record.key
220
+ ):
221
+ continue
123
222
  _Output(db_record, output=args.output)
124
223
 
125
224
 
@@ -204,6 +303,16 @@ def App() -> None:
204
303
  choices=["chromium", "chrome", "firefox", "safari"],
205
304
  help="The type of IndexedDB to parse.",
206
305
  )
306
+ parser_db.add_argument(
307
+ "--object_store_id",
308
+ type=int,
309
+ help="The object store ID to filter by.",
310
+ )
311
+ parser_db.add_argument(
312
+ "--include_raw_data",
313
+ action="store_true",
314
+ help="Include raw key and value in the output.",
315
+ )
207
316
  parser_db.add_argument(
208
317
  "-o",
209
318
  "--output",
@@ -211,6 +320,22 @@ def App() -> None:
211
320
  default="json",
212
321
  help="Output format. Default is json.",
213
322
  )
323
+ parser_db.add_argument(
324
+ "--filter_value",
325
+ type=str,
326
+ help=(
327
+ "Only output records where the value contains this string. "
328
+ "Values are normalized to strings before comparison."
329
+ ),
330
+ )
331
+ parser_db.add_argument(
332
+ "--filter_key",
333
+ type=str,
334
+ help=(
335
+ "Only output records where the key contains this string. "
336
+ "Keys are normalized to strings before comparison."
337
+ ),
338
+ )
214
339
  parser_db.set_defaults(func=DbCommand)
215
340
 
216
341
  parser_ldb = subparsers.add_parser(
@@ -230,6 +355,22 @@ def App() -> None:
230
355
  default="json",
231
356
  help="Output format. Default is json.",
232
357
  )
358
+ parser_ldb.add_argument(
359
+ "--filter_value",
360
+ type=str,
361
+ help=(
362
+ "Only output records where the value contains this string. "
363
+ "Values are normalized to strings before comparison."
364
+ ),
365
+ )
366
+ parser_ldb.add_argument(
367
+ "--filter_key",
368
+ type=str,
369
+ help=(
370
+ "Only output records where the key contains this string. "
371
+ "Keys are normalized to strings before comparison."
372
+ ),
373
+ )
233
374
  parser_ldb.set_defaults(func=LdbCommand)
234
375
 
235
376
  parser_log = subparsers.add_parser(
@@ -249,6 +390,22 @@ def App() -> None:
249
390
  default="json",
250
391
  help="Output format. Default is json.",
251
392
  )
393
+ parser_log.add_argument(
394
+ "--filter_value",
395
+ type=str,
396
+ help=(
397
+ "Only output records where the value contains this string. "
398
+ "Values are normalized to strings before comparison."
399
+ ),
400
+ )
401
+ parser_log.add_argument(
402
+ "--filter_key",
403
+ type=str,
404
+ help=(
405
+ "Only output records where the key contains this string. "
406
+ "Keys are normalized to strings before comparison."
407
+ ),
408
+ )
252
409
  parser_log.set_defaults(func=LogCommand)
253
410
 
254
411
  args: argparse.Namespace = parser.parse_args()
@@ -54,6 +54,8 @@ class FirefoxIndexedDBRecord:
54
54
  object_store_id: the object store id.
55
55
  object_store_name: the object store name from the object_store table.
56
56
  database_name: the IndexedDB database name from the database table.
57
+ raw_key: the raw key.
58
+ raw_value: the raw value.
57
59
  """
58
60
 
59
61
  key: Any
@@ -62,6 +64,8 @@ class FirefoxIndexedDBRecord:
62
64
  object_store_id: int
63
65
  object_store_name: str
64
66
  database_name: str
67
+ raw_key: Optional[bytes] = None
68
+ raw_value: Optional[bytes] = None
65
69
 
66
70
 
67
71
  class FileReader:
@@ -134,7 +138,7 @@ class FileReader:
134
138
  )
135
139
 
136
140
  def RecordsByObjectStoreId(
137
- self, object_store_id: int
141
+ self, object_store_id: int, include_raw_data: bool = False
138
142
  ) -> Generator[FirefoxIndexedDBRecord, None, None]:
139
143
  """Returns FirefoxIndexedDBRecords by a given object store id.
140
144
 
@@ -163,9 +167,13 @@ class FileReader:
163
167
  file_ids=row[3],
164
168
  object_store_name=row[4].decode("utf-8"),
165
169
  database_name=self.database_name,
170
+ raw_key=row[0] if include_raw_data else None,
171
+ raw_value=row[1] if include_raw_data else None,
166
172
  )
167
173
 
168
- def Records(self) -> Generator[FirefoxIndexedDBRecord, None, None]:
174
+ def Records(
175
+ self, include_raw_data: bool = False
176
+ ) -> Generator[FirefoxIndexedDBRecord, None, None]:
169
177
  """Returns FirefoxIndexedDBRecords from the database."""
170
178
  with sqlite3.connect(f"file:{self.filename}?mode=ro", uri=True) as conn:
171
179
  conn.text_factory = bytes
@@ -187,6 +195,8 @@ class FileReader:
187
195
  file_ids=row[3],
188
196
  object_store_name=row[4].decode("utf-8"),
189
197
  database_name=self.database_name,
198
+ raw_key=row[0] if include_raw_data else None,
199
+ raw_value=row[1] if include_raw_data else None,
190
200
  )
191
201
 
192
202