dfindexeddb 20241105__py3-none-any.whl → 20260205__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. dfindexeddb/indexeddb/chromium/blink.py +116 -74
  2. dfindexeddb/indexeddb/chromium/definitions.py +240 -125
  3. dfindexeddb/indexeddb/chromium/record.py +651 -346
  4. dfindexeddb/indexeddb/chromium/sqlite.py +362 -0
  5. dfindexeddb/indexeddb/chromium/v8.py +100 -78
  6. dfindexeddb/indexeddb/cli.py +282 -121
  7. dfindexeddb/indexeddb/firefox/definitions.py +7 -4
  8. dfindexeddb/indexeddb/firefox/gecko.py +98 -74
  9. dfindexeddb/indexeddb/firefox/record.py +78 -26
  10. dfindexeddb/indexeddb/safari/definitions.py +5 -3
  11. dfindexeddb/indexeddb/safari/record.py +86 -53
  12. dfindexeddb/indexeddb/safari/webkit.py +85 -71
  13. dfindexeddb/indexeddb/types.py +4 -1
  14. dfindexeddb/leveldb/cli.py +146 -138
  15. dfindexeddb/leveldb/definitions.py +6 -2
  16. dfindexeddb/leveldb/descriptor.py +70 -56
  17. dfindexeddb/leveldb/ldb.py +39 -33
  18. dfindexeddb/leveldb/log.py +41 -30
  19. dfindexeddb/leveldb/plugins/chrome_notifications.py +30 -18
  20. dfindexeddb/leveldb/plugins/interface.py +5 -6
  21. dfindexeddb/leveldb/plugins/manager.py +10 -9
  22. dfindexeddb/leveldb/record.py +71 -62
  23. dfindexeddb/leveldb/utils.py +105 -13
  24. dfindexeddb/utils.py +36 -31
  25. dfindexeddb/version.py +2 -2
  26. dfindexeddb-20260205.dist-info/METADATA +171 -0
  27. dfindexeddb-20260205.dist-info/RECORD +41 -0
  28. {dfindexeddb-20241105.dist-info → dfindexeddb-20260205.dist-info}/WHEEL +1 -1
  29. dfindexeddb-20241105.dist-info/AUTHORS +0 -12
  30. dfindexeddb-20241105.dist-info/METADATA +0 -424
  31. dfindexeddb-20241105.dist-info/RECORD +0 -41
  32. {dfindexeddb-20241105.dist-info → dfindexeddb-20260205.dist-info}/entry_points.txt +0 -0
  33. {dfindexeddb-20241105.dist-info → dfindexeddb-20260205.dist-info/licenses}/LICENSE +0 -0
  34. {dfindexeddb-20241105.dist-info → dfindexeddb-20260205.dist-info}/top_level.txt +0 -0
dfindexeddb/utils.py CHANGED
@@ -14,13 +14,13 @@
14
14
  # limitations under the License.
15
15
  """Utilities for dfindexeddb."""
16
16
  from __future__ import annotations
17
+
17
18
  import copy
18
19
  import dataclasses
19
20
  import io
20
21
  import os
21
22
  import struct
22
- from typing import BinaryIO, Tuple, Type, TypeVar
23
-
23
+ from typing import Any, BinaryIO, Literal, Tuple, Type, TypeVar
24
24
 
25
25
  from dfindexeddb import errors
26
26
 
@@ -28,8 +28,8 @@ from dfindexeddb import errors
28
28
  class StreamDecoder:
29
29
  """A helper class to decode primitive data types from BinaryIO streams.
30
30
 
31
- Attributes:
32
- stream (BinaryIO): the binary stream.
31
+ Attributes:
32
+ stream (BinaryIO): the binary stream.
33
33
  """
34
34
 
35
35
  def __init__(self, stream: BinaryIO):
@@ -51,7 +51,7 @@ class StreamDecoder:
51
51
  self.stream.seek(current_offset, os.SEEK_SET)
52
52
  num_rem_bytes = end_offset - current_offset
53
53
  if num_rem_bytes < 0:
54
- raise errors.DecoderError('Negative number of remaining bytes.')
54
+ raise errors.DecoderError("Negative number of remaining bytes.")
55
55
  return num_rem_bytes
56
56
 
57
57
  def ReadBytes(self, count: int = -1) -> Tuple[int, bytes]:
@@ -71,10 +71,11 @@ class StreamDecoder:
71
71
  offset = self.stream.tell()
72
72
  buffer = self.stream.read(count)
73
73
  if count == -1 and not buffer:
74
- raise errors.DecoderError(f'No bytes available at offset {offset}')
74
+ raise errors.DecoderError(f"No bytes available at offset {offset}")
75
75
  if count != -1 and len(buffer) != count:
76
76
  raise errors.DecoderError(
77
- f'Read {len(buffer)} bytes, but wanted {count} at offset {offset}')
77
+ f"Read {len(buffer)} bytes, but wanted {count} at offset {offset}"
78
+ )
78
79
  return offset, buffer
79
80
 
80
81
  def PeekBytes(self, count: int) -> Tuple[int, bytes]:
@@ -94,8 +95,8 @@ class StreamDecoder:
94
95
  def DecodeInt(
95
96
  self,
96
97
  byte_count: int = -1,
97
- byte_order: str = 'little',
98
- signed: bool = True
98
+ byte_order: Literal["big", "little"] = "little",
99
+ signed: bool = True,
99
100
  ) -> Tuple[int, int]:
100
101
  """Decodes an integer from the binary stream.
101
102
 
@@ -162,27 +163,27 @@ class StreamDecoder:
162
163
  """Returns a Tuple of the offset and a double-precision float."""
163
164
  offset, blob = self.ReadBytes(8)
164
165
  if little_endian:
165
- value = struct.unpack('<d', blob)[0]
166
+ value = struct.unpack("<d", blob)[0]
166
167
  else:
167
- value = struct.unpack('>d', blob)[0]
168
+ value = struct.unpack(">d", blob)[0]
168
169
  return offset, value
169
170
 
170
171
  def DecodeFloat(self, little_endian: bool = True) -> Tuple[int, float]:
171
172
  """Returns a Tuple of the offset and a single-precision float."""
172
173
  offset, blob = self.ReadBytes(4)
173
174
  if little_endian:
174
- value = struct.unpack('<f', blob)[0]
175
+ value = struct.unpack("<f", blob)[0]
175
176
  else:
176
- value = struct.unpack('>f', blob)[0]
177
+ value = struct.unpack(">f", blob)[0]
177
178
  return offset, value
178
179
 
179
180
  def DecodeVarint(self, max_bytes: int = 10) -> Tuple[int, int]:
180
181
  """Returns a Tuple of the offset and the decoded base128 varint."""
181
182
  offset = self.stream.tell()
182
183
  varint = 0
183
- for i in range(0, max_bytes*7, 7):
184
+ for i in range(0, max_bytes * 7, 7):
184
185
  _, varint_part = self.ReadBytes(1)
185
- varint |= (varint_part[0] & 0x7f) << i
186
+ varint |= (varint_part[0] & 0x7F) << i
186
187
  if not varint_part[0] >> 7:
187
188
  break
188
189
  return offset, varint
@@ -209,7 +210,7 @@ class StreamDecoder:
209
210
  return self.DecodeZigzagVarint(max_bytes=10)
210
211
 
211
212
 
212
- T = TypeVar('T')
213
+ T = TypeVar("T")
213
214
 
214
215
 
215
216
  class FromDecoderMixin:
@@ -217,7 +218,8 @@ class FromDecoderMixin:
217
218
 
218
219
  @classmethod
219
220
  def FromDecoder(
220
- cls: Type[T], decoder: StreamDecoder, base_offset: int = 0) -> T:
221
+ cls: Type[T], decoder: StreamDecoder, base_offset: int = 0
222
+ ) -> T:
221
223
  """Decodes a class type from the current position of a StreamDecoder.
222
224
 
223
225
  Args:
@@ -233,8 +235,7 @@ class FromDecoderMixin:
233
235
  raise NotImplementedError
234
236
 
235
237
  @classmethod
236
- def FromStream(
237
- cls: Type[T], stream: BinaryIO, base_offset: int = 0) -> T:
238
+ def FromStream(cls: Type[T], stream: BinaryIO, base_offset: int = 0) -> T:
238
239
  """Decodes a class type from the current position of a binary stream.
239
240
 
240
241
  Args:
@@ -245,11 +246,12 @@ class FromDecoderMixin:
245
246
  The class instance.
246
247
  """
247
248
  decoder = StreamDecoder(stream)
248
- return cls.FromDecoder(decoder=decoder, base_offset=base_offset)
249
+ return cls.FromDecoder( # type: ignore[attr-defined,no-any-return]
250
+ decoder=decoder, base_offset=base_offset
251
+ )
249
252
 
250
253
  @classmethod
251
- def FromBytes(
252
- cls: Type[T], raw_data: bytes, base_offset: int = 0) -> T:
254
+ def FromBytes(cls: Type[T], raw_data: bytes, base_offset: int = 0) -> T:
253
255
  """Parses a class type from raw bytes.
254
256
 
255
257
  Args:
@@ -260,36 +262,39 @@ class FromDecoderMixin:
260
262
  The class instance.
261
263
  """
262
264
  stream = io.BytesIO(raw_data)
263
- return cls.FromStream(stream=stream, base_offset=base_offset)
265
+ return cls.FromStream( # type: ignore[attr-defined,no-any-return]
266
+ stream=stream, base_offset=base_offset
267
+ )
264
268
 
265
269
 
266
- def asdict(obj, *, dict_factory=dict): # pylint: disable=invalid-name
270
+ def asdict(obj, *, dict_factory=dict) -> Any: # type: ignore[no-untyped-def] # pylint: disable=invalid-name
267
271
  """Custom implementation of the asdict dataclasses method to include the
268
272
  class name under the __type__ attribute name.
269
273
  """
270
274
  if not dataclasses.is_dataclass(obj):
271
- raise TypeError('asdict() should be called on dataclass instances')
275
+ raise TypeError("asdict() should be called on dataclass instances")
272
276
  return _asdict_inner(obj, dict_factory)
273
277
 
274
278
 
275
- def _asdict_inner(obj, dict_factory):
279
+ def _asdict_inner(obj, dict_factory): # type: ignore[no-untyped-def]
276
280
  """Custom implementation of the _asdict_inner dataclasses method."""
277
281
  if dataclasses.is_dataclass(obj):
278
- result = [('__type__', obj.__class__.__name__)]
282
+ result = [("__type__", obj.__class__.__name__)]
279
283
  for f in dataclasses.fields(obj):
280
284
  value = _asdict_inner(getattr(obj, f.name), dict_factory)
281
285
  result.append((f.name, value))
282
286
  return dict_factory(result)
283
287
 
284
- if isinstance(obj, tuple) and hasattr(obj, '_fields'):
288
+ if isinstance(obj, tuple) and hasattr(obj, "_fields"):
285
289
  return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj])
286
290
 
287
291
  if isinstance(obj, (list, tuple)):
288
292
  return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
289
293
 
290
294
  if isinstance(obj, dict):
291
- return type(obj)((_asdict_inner(k, dict_factory),
292
- _asdict_inner(v, dict_factory))
293
- for k, v in obj.items())
295
+ return type(obj)(
296
+ (_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory))
297
+ for k, v in obj.items()
298
+ )
294
299
 
295
300
  return copy.deepcopy(obj)
dfindexeddb/version.py CHANGED
@@ -15,9 +15,9 @@
15
15
  """Version information for dfIndexeddb."""
16
16
 
17
17
 
18
- __version__ = "20241105"
18
+ __version__ = "20260205"
19
19
 
20
20
 
21
- def GetVersion():
21
+ def GetVersion() -> str:
22
22
  """Returns the version information."""
23
23
  return __version__
@@ -0,0 +1,171 @@
1
+ Metadata-Version: 2.4
2
+ Name: dfindexeddb
3
+ Version: 20260205
4
+ Summary: dfindexeddb is an experimental Python tool for performing digital forensic analysis of IndexedDB and leveldb files.
5
+ Author-email: Syd Pleno <sydp@google.com>
6
+ Maintainer-email: dfIndexeddb Developers <dfindexeddb-dev@googlegroups.com>
7
+ License-Expression: Apache-2.0
8
+ Project-URL: Homepage, https://github.com/google/dfindexeddb
9
+ Project-URL: Documentation, https://github.com/google/dfindexeddb/tree/main/docs
10
+ Project-URL: Repository, https://github.com/google/dfindexeddb
11
+ Project-URL: Bug Tracker, https://github.com/google/dfindexeddb/issues
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Programming Language :: Python
14
+ Requires-Python: >=3.9
15
+ Description-Content-Type: text/markdown
16
+ License-File: LICENSE
17
+ Requires-Dist: python-snappy==0.6.1
18
+ Requires-Dist: zstd==1.5.5.1
19
+ Provides-Extra: plugins
20
+ Requires-Dist: protobuf; extra == "plugins"
21
+ Requires-Dist: dfdatetime; extra == "plugins"
22
+ Dynamic: license-file
23
+
24
+ # dfIndexeddb
25
+
26
+ dfindexeddb is an experimental Python tool for performing digital forensic
27
+ analysis of IndexedDB and LevelDB files.
28
+
29
+ It parses LevelDB, IndexedDB and JavaScript structures from these files without
30
+ requiring native libraries. (Note: only a subset of IndexedDB key types and
31
+ JavaScript types for Firefox, Safari and Chromium-based browsers are currently supported).
32
+
33
+ The content of IndexedDB files is dependent on what a web application stores
34
+ locally/offline using the web browser's
35
+ [IndexedDB API](https://www.w3.org/TR/IndexedDB/). Examples of content might
36
+ include:
37
+ * text from a text/source-code editor application,
38
+ * emails and contact information from an e-mail application,
39
+ * images and metadata from a photo gallery application
40
+
41
+
42
+ ## Installation
43
+
44
+ 1. [Linux] Install the snappy compression development package
45
+
46
+ ```
47
+ $ sudo apt install libsnappy-dev
48
+ ```
49
+
50
+ 2. Create a virtual environment and install the package
51
+
52
+ ```
53
+ $ python3 -m venv .venv
54
+ $ source .venv/bin/activate
55
+ $ pip install dfindexeddb
56
+ ```
57
+
58
+ ### Optional plugins
59
+
60
+ To also install the dependencies for leveldb/indexeddb plugins, run
61
+ ```
62
+ $ pip install 'dfindexeddb[plugins]'
63
+ ```
64
+
65
+
66
+ ## Installation from source
67
+
68
+ 1. [Linux] Install the snappy compression development package
69
+
70
+ ```
71
+ $ sudo apt install libsnappy-dev
72
+ ```
73
+
74
+ 2. Clone or download/unzip the repository to your local machine.
75
+
76
+ 3. Create a virtual environment and install the package
77
+
78
+ ```
79
+ $ python3 -m venv .venv
80
+ $ source .venv/bin/activate
81
+ $ pip install .
82
+ ```
83
+
84
+ ### Optional plugins
85
+
86
+ To also install the dependencies for leveldb/indexeddb plugins, run
87
+ ```
88
+ $ pip install '.[plugins]'
89
+ ```
90
+
91
+ ## Usage
92
+
93
+ Two CLI tools for parsing IndexedDB/LevelDB files are available after
94
+ installation:
95
+
96
+
97
+ ### IndexedDB
98
+
99
+ ```
100
+ $ dfindexeddb -h
101
+ usage: dfindexeddb [-h] {blink,gecko,db,ldb,log} ...
102
+
103
+ A cli tool for parsing IndexedDB files
104
+
105
+ positional arguments:
106
+ {blink,gecko,db,ldb,log}
107
+ blink Parse a file as a blink-encoded value.
108
+ gecko Parse a file as a gecko-encoded value.
109
+ db Parse a directory/file as IndexedDB.
110
+ ldb Parse a ldb file as IndexedDB.
111
+ log Parse a log file as IndexedDB.
112
+
113
+ options:
114
+ -h, --help show this help message and exit
115
+ ```
116
+
117
+ #### Examples:
118
+
119
+ | Platform / Source | Format | Command |
120
+ | :--- | :--- | :--- |
121
+ | **Firefox** (sqlite) | JSON | `dfindexeddb db -s SOURCE --format firefox -o json` |
122
+ | **Safari** (sqlite) | JSON-L | `dfindexeddb db -s SOURCE --format safari -o jsonl` |
123
+ | **Chrome** (LevelDB/sqlite) | JSON | `dfindexeddb db -s SOURCE --format chrome` |
124
+ | **Chrome** (.ldb) | JSON-L | `dfindexeddb ldb -s SOURCE -o jsonl` |
125
+ | **Chrome** (.log) | Python repr | `dfindexeddb log -s SOURCE -o repr` |
126
+ | **Chrome** (Blink) | JSON | `dfindexeddb blink -s SOURCE` |
127
+ | **Filter Records by key** | JSON | `dfindexeddb db -s SOURCE --format chrome --filter_key search_term` |
128
+ | **Filter Records by value** | JSON | `dfindexeddb db -s SOURCE --format chrome --filter_value "search_term"` |
129
+
130
+
131
+ ### LevelDB
132
+
133
+ ```
134
+ $ dfleveldb -h
135
+ usage: dfleveldb [-h] {db,log,ldb,descriptor} ...
136
+
137
+ A cli tool for parsing leveldb files
138
+
139
+ positional arguments:
140
+ {db,log,ldb,descriptor}
141
+ db Parse a directory as leveldb.
142
+ log Parse a leveldb log file.
143
+ ldb Parse a leveldb table (.ldb) file.
144
+ descriptor Parse a leveldb descriptor (MANIFEST) file.
145
+
146
+ options:
147
+ -h, --help show this help message and exit
148
+ ```
149
+
150
+ #### Examples
151
+
152
+ | Source | Type | Command |
153
+ | :--- | :--- | :--- |
154
+ | **LevelDB Folder** | Records | `dfleveldb db -s SOURCE` |
155
+ | **Log file** (.log) | Physical Records | `dfleveldb log -s SOURCE -t physical_records` |
156
+ | **Log file** (.log) | Blocks | `dfleveldb log -s SOURCE -t blocks` |
157
+ | **Log file** (.log) | Write Batches | `dfleveldb log -s SOURCE -t write_batches` |
158
+ | **Log file** (.log) | Internal Key Records | `dfleveldb log -s SOURCE -t parsed_internal_key` |
159
+ | **Table file** (.ldb) | Records | `dfleveldb ldb -s SOURCE -t record` |
160
+ | **Table file** (.ldb) | Blocks | `dfleveldb ldb -s SOURCE -t blocks` |
161
+ | **Descriptor** (MANIFEST) | Version Edits | `dfleveldb descriptor -s SOURCE -t versionedit` |
162
+
163
+ #### Optional Plugins
164
+
165
+ To apply a plugin parser for a leveldb file/folder, add the
166
+ `--plugin [Plugin Name]` argument. Currently, there is support for the
167
+ following artifacts:
168
+
169
+ | Plugin Name | Artifact Name |
170
+ | -------- | ------- |
171
+ | `ChromeNotificationRecord` | Chrome/Chromium Notifications |
@@ -0,0 +1,41 @@
1
+ dfindexeddb/__init__.py,sha256=KPYL9__l8od6_OyDfGRTgaJ6iy_fqIgZ-dS2S-e3Rac,599
2
+ dfindexeddb/errors.py,sha256=PNpwyf_lrPc4TE77oAakX3mu5D_YcP3f80wq8Y1LkvY,749
3
+ dfindexeddb/utils.py,sha256=gep43m3LpAZNnW6JFOQTDn8MRrEXzGlc1NZ5yMuBsvI,10194
4
+ dfindexeddb/version.py,sha256=Cc6Drn2uY77dYocXLvSTJsbyUur_ujj6hChTvvNhnnY,758
5
+ dfindexeddb/indexeddb/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
6
+ dfindexeddb/indexeddb/cli.py,sha256=EbPmRloa95xoDlBhwH9ShgMGb8z118CZN_oarFtbZDE,12419
7
+ dfindexeddb/indexeddb/types.py,sha256=m9llt7PTVMSvhsQ_uDIlZJUkbM_5k3gUx5CtMc3TvC8,1893
8
+ dfindexeddb/indexeddb/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ dfindexeddb/indexeddb/chromium/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
10
+ dfindexeddb/indexeddb/chromium/blink.py,sha256=oQdbgzn4w1dUs3jAqtCF0078o4ccupHSjH0hB42vJSc,32978
11
+ dfindexeddb/indexeddb/chromium/definitions.py,sha256=HFe2aiAMCOrGkncZX80wzQVvaka8MzDpDZ8I1vE0HMs,10732
12
+ dfindexeddb/indexeddb/chromium/record.py,sha256=3U_NzidU-pAIFo1bHZucupi44-QmYXitYU9f9VA3yjI,56224
13
+ dfindexeddb/indexeddb/chromium/sqlite.py,sha256=fT3fmDuM3KgQD6OKhRrgYkg3ranfFN3BQZmr8DEt5PE,11476
14
+ dfindexeddb/indexeddb/chromium/v8.py,sha256=cbaFQmhZoJMWGphLdvz1ZcvTge9MSh6GLH4K1yQNiWk,21888
15
+ dfindexeddb/indexeddb/firefox/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
16
+ dfindexeddb/indexeddb/firefox/definitions.py,sha256=C9qziLvNMmWFwVrt6K_lw4I_1YbiWJ9nxb65x0TefvU,4306
17
+ dfindexeddb/indexeddb/firefox/gecko.py,sha256=rcph8PrX0IL_67cCvx9EsbEK_8r8lkXvW-nU7_2IsxA,19932
18
+ dfindexeddb/indexeddb/firefox/record.py,sha256=8T2s9e66qNUugCEqOLG3mtpq309njGkY20yrdek_Kio,7314
19
+ dfindexeddb/indexeddb/safari/__init__.py,sha256=kExXSVBCTKCD5BZJkdMfUMqGksH-DMJxP2_lI0gq-BE,575
20
+ dfindexeddb/indexeddb/safari/definitions.py,sha256=_st96KuCjgzAeltDGylmoojlCtVvTGQPWwqzYCuX3s8,2793
21
+ dfindexeddb/indexeddb/safari/record.py,sha256=vwAud6yxlGmhT4UGvojBPjJ7NuIIsNkr7liJjY1yBj0,8874
22
+ dfindexeddb/indexeddb/safari/webkit.py,sha256=16Vj7f3Pbts2ebdFg1dXmqQ3ixAKKv4_3znvsP21b4U,20816
23
+ dfindexeddb/leveldb/__init__.py,sha256=KPYL9__l8od6_OyDfGRTgaJ6iy_fqIgZ-dS2S-e3Rac,599
24
+ dfindexeddb/leveldb/cli.py,sha256=7CkM9P0bk83uj1cJrGFai82FIPBF0sul1tI2KUTMokA,10291
25
+ dfindexeddb/leveldb/definitions.py,sha256=cvwO3BkbEh5ISlPzlM_TL2xs-1lRyqepiyy7_ZZ8-_A,1490
26
+ dfindexeddb/leveldb/descriptor.py,sha256=Cmy9y73cBeb586pryoagZ0-Zb2iA_8MCRGu6SN5dKbo,12479
27
+ dfindexeddb/leveldb/ldb.py,sha256=7sjpqMXrU6iXz5d4GAx58BuEknpjPM3qlrzNv6jdLIk,8140
28
+ dfindexeddb/leveldb/log.py,sha256=oCMpLH8STOSK0qM-u44wnf781ehy5-JPe1e_osMlgL4,9510
29
+ dfindexeddb/leveldb/record.py,sha256=Fnz7i446IO3IjJZAlB9olM_VSiEaM2ONHzG7BqrBzak,11978
30
+ dfindexeddb/leveldb/utils.py,sha256=Nl4JT_rnQZnHA6APhM_c8tPS9U6R2EAM4KxnclbRqTg,6476
31
+ dfindexeddb/leveldb/plugins/__init__.py,sha256=RoC6tRkq8FhqIaFs6jwu1fao_qaSvlSfIFxQVjWregI,690
32
+ dfindexeddb/leveldb/plugins/chrome_notifications.py,sha256=H2TgriK_mre1kflTh2Zd2aIvcoHXa_qJum5f5Sbwp88,5485
33
+ dfindexeddb/leveldb/plugins/interface.py,sha256=1kFbhzgiRLQ18gdOc-pzbizQ7dp_Li-ut4ZHjXSxKIU,1193
34
+ dfindexeddb/leveldb/plugins/manager.py,sha256=UV8dtdeu6xPCNzvx3fSef1WrZxGPzkPBD8mD2qpoVzY,2317
35
+ dfindexeddb/leveldb/plugins/notification_database_data_pb2.py,sha256=DCPZHbyq2szLgrBprOKrJKycKJma8Z_SnAQM6Jx9bZg,4389
36
+ dfindexeddb-20260205.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
37
+ dfindexeddb-20260205.dist-info/METADATA,sha256=q0VwkHYGOmrEp7Up_dPYGwXPdULB4MxI1y_dWH5IGAE,5520
38
+ dfindexeddb-20260205.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
39
+ dfindexeddb-20260205.dist-info/entry_points.txt,sha256=WG9YNLZ9lBx4Q9QF6wS4dZdZfADT3Zs4_-MV5TcA0ls,102
40
+ dfindexeddb-20260205.dist-info/top_level.txt,sha256=X9OTaub1c8S_JJ7g-f8JdkhhdiZ4x1j4eni1hdUCwE4,12
41
+ dfindexeddb-20260205.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.3.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,12 +0,0 @@
1
- # Names should be added to this file with this pattern:
2
- #
3
- # For individuals:
4
- # Name (email address)
5
- #
6
- # For organizations:
7
- # Organization (fnmatch pattern)
8
- #
9
- # See python fnmatch module documentation for more information.
10
-
11
- Google Inc. (*@google.com)
12
- Syd Pleno (syd.pleno@gmail.com)