linear-mcp-fast 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. ccl_chromium_reader/__init__.py +2 -0
  2. ccl_chromium_reader/ccl_chromium_cache.py +1335 -0
  3. ccl_chromium_reader/ccl_chromium_filesystem.py +302 -0
  4. ccl_chromium_reader/ccl_chromium_history.py +357 -0
  5. ccl_chromium_reader/ccl_chromium_indexeddb.py +1060 -0
  6. ccl_chromium_reader/ccl_chromium_localstorage.py +454 -0
  7. ccl_chromium_reader/ccl_chromium_notifications.py +268 -0
  8. ccl_chromium_reader/ccl_chromium_profile_folder.py +568 -0
  9. ccl_chromium_reader/ccl_chromium_sessionstorage.py +368 -0
  10. ccl_chromium_reader/ccl_chromium_snss2.py +332 -0
  11. ccl_chromium_reader/ccl_shared_proto_db_downloads.py +189 -0
  12. ccl_chromium_reader/common.py +19 -0
  13. ccl_chromium_reader/download_common.py +78 -0
  14. ccl_chromium_reader/profile_folder_protocols.py +276 -0
  15. ccl_chromium_reader/serialization_formats/__init__.py +0 -0
  16. ccl_chromium_reader/serialization_formats/ccl_blink_value_deserializer.py +401 -0
  17. ccl_chromium_reader/serialization_formats/ccl_easy_chromium_pickle.py +133 -0
  18. ccl_chromium_reader/serialization_formats/ccl_protobuff.py +276 -0
  19. ccl_chromium_reader/serialization_formats/ccl_v8_value_deserializer.py +627 -0
  20. ccl_chromium_reader/storage_formats/__init__.py +0 -0
  21. ccl_chromium_reader/storage_formats/ccl_leveldb.py +582 -0
  22. ccl_simplesnappy/__init__.py +1 -0
  23. ccl_simplesnappy/ccl_simplesnappy.py +306 -0
  24. linear_mcp_fast/__init__.py +8 -0
  25. linear_mcp_fast/__main__.py +6 -0
  26. linear_mcp_fast/reader.py +433 -0
  27. linear_mcp_fast/server.py +367 -0
  28. linear_mcp_fast/store_detector.py +117 -0
  29. linear_mcp_fast-0.1.0.dist-info/METADATA +160 -0
  30. linear_mcp_fast-0.1.0.dist-info/RECORD +39 -0
  31. linear_mcp_fast-0.1.0.dist-info/WHEEL +5 -0
  32. linear_mcp_fast-0.1.0.dist-info/entry_points.txt +2 -0
  33. linear_mcp_fast-0.1.0.dist-info/top_level.txt +4 -0
  34. tools_and_utilities/Chromium_dump_local_storage.py +111 -0
  35. tools_and_utilities/Chromium_dump_session_storage.py +92 -0
  36. tools_and_utilities/benchmark.py +35 -0
  37. tools_and_utilities/ccl_chrome_audit.py +651 -0
  38. tools_and_utilities/dump_indexeddb_details.py +59 -0
  39. tools_and_utilities/dump_leveldb.py +53 -0
@@ -0,0 +1,189 @@
1
+ """
2
+ Copyright 2022, CCL Forensics
3
+
4
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
5
+ this software and associated documentation files (the "Software"), to deal in
6
+ the Software without restriction, including without limitation the rights to
7
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
8
+ of the Software, and to permit persons to whom the Software is furnished to do
9
+ so, subject to the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be included in all
12
+ copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20
+ SOFTWARE.
21
+ """
22
+
23
+ __version__ = "0.3"
24
+ __description__ = "A module for reading downloads from the Chrome/Chromium shared_proto_db leveldb data store"
25
+ __contact__ = "Alex Caithness"
26
+
27
+ import datetime
28
+ import io
29
+ import os
30
+ import pathlib
31
+ import sys
32
+ import typing
33
+
34
+ from .storage_formats import ccl_leveldb
35
+ from .serialization_formats import ccl_protobuff as pb
36
+ from .download_common import Download
37
+
38
+ CHROME_EPOCH = datetime.datetime(1601, 1, 1, 0, 0, 0)
39
+
40
+
41
+ def chrome_milli_time(milliseconds: typing.Optional[int], allow_none=True) -> typing.Optional[datetime.datetime]:
42
+ if milliseconds is not None:
43
+ if milliseconds == 0xffffffffffffffff:
44
+ return CHROME_EPOCH
45
+ else:
46
+ return CHROME_EPOCH + datetime.timedelta(milliseconds=milliseconds)
47
+ elif allow_none:
48
+ return None
49
+ raise ValueError("milliseconds cannot be None")
50
+
51
+
52
+ def read_datetime(stream) -> typing.Optional[datetime.datetime]:
53
+ return chrome_milli_time(pb.read_le_varint(stream))
54
+
55
+
56
+ # https://source.chromium.org/chromium/chromium/src/+/main:components/download/database/proto/download_entry.proto;l=86
57
+
58
+ HttpRequestHeader_Structure = {
59
+ 1: pb.ProtoDecoder("key", pb.read_string),
60
+ 2: pb.ProtoDecoder("value", pb.read_string)
61
+ }
62
+
63
+ ReceivedSlice_Structure = {
64
+ 1: pb.ProtoDecoder("offset", pb.read_le_varint),
65
+ 2: pb.ProtoDecoder("received_bytes", pb.read_le_varint),
66
+ 3: pb.ProtoDecoder("finished", lambda x: pb.read_le_varint(x) != 0)
67
+ }
68
+
69
+ InProgressInfo_Structure = {
70
+ 1: pb.ProtoDecoder("url_chain", pb.read_string), # string
71
+ 2: pb.ProtoDecoder("referrer_url", pb.read_string), # string
72
+ 3: pb.ProtoDecoder("site_url", pb.read_string), # string // deprecated
73
+ 4: pb.ProtoDecoder("tab_url", pb.read_string), # string
74
+ 5: pb.ProtoDecoder("tab_referrer_url", pb.read_string), # string
75
+ 6: pb.ProtoDecoder("fetch_error_body", lambda x: pb.read_le_varint(x) != 0), # bool
76
+ 7: pb.ProtoDecoder("request_headers", lambda x: pb.read_embedded_protobuf(x, HttpRequestHeader_Structure, True)), # HttpRequestHeader
77
+ 8: pb.ProtoDecoder("etag", pb.read_string), # string
78
+ 9: pb.ProtoDecoder("last_modified", pb.read_string), # string
79
+ 10: pb.ProtoDecoder("total_bytes", pb.read_le_varint), # int64:
80
+ 11: pb.ProtoDecoder("mime_type", pb.read_string), # string
81
+ 12: pb.ProtoDecoder("original_mime_type", pb.read_string), # string
82
+ 13: pb.ProtoDecoder("current_path", pb.read_blob), # bytes // Serialized pickles to support string16: TODO
83
+ 14: pb.ProtoDecoder("target_path", pb.read_blob), # bytes // Serialized pickles to support string16: TODO
84
+ 15: pb.ProtoDecoder("received_bytes", pb.read_le_varint), # int64:
85
+ 16: pb.ProtoDecoder("start_time", read_datetime), # int64:
86
+ 17: pb.ProtoDecoder("end_time", read_datetime), # int64:
87
+ 18: pb.ProtoDecoder("received_slices", lambda x: pb.read_embedded_protobuf(x, ReceivedSlice_Structure, True)), # ReceivedSlice
88
+ 19: pb.ProtoDecoder("hash", pb.read_blob), # string
89
+ 20: pb.ProtoDecoder("transient", lambda x: pb.read_le_varint(x) != 0), # bool
90
+ 21: pb.ProtoDecoder("state", pb.read_le_varint32), # int32:
91
+ 22: pb.ProtoDecoder("danger_type", pb.read_le_varint32), # int32:
92
+ 23: pb.ProtoDecoder("interrupt_reason", pb.read_le_varint32), # int32:
93
+ 24: pb.ProtoDecoder("paused", lambda x: pb.read_le_varint(x) != 0), # bool
94
+ 25: pb.ProtoDecoder("metered", lambda x: pb.read_le_varint(x) != 0), # bool
95
+ 26: pb.ProtoDecoder("bytes_wasted", pb.read_le_varint), # int64:
96
+ 27: pb.ProtoDecoder("auto_resume_count", pb.read_le_varint32), # int32:
97
+ # 28: pb.ProtoDecoder("download_schedule", None) # DownloadSchedule // // Deprecated.
98
+ # 29: pb.ProtoDecoder("reroute_info", pb), # enterprise_connectors.DownloadItemRerouteInfo TODO
99
+ 30: pb.ProtoDecoder("credentials_mode", pb.read_le_varint32), # int32: // network::mojom::CredentialsMode
100
+ 31: pb.ProtoDecoder("range_request_from", pb.read_le_varint), # int64:
101
+ 32: pb.ProtoDecoder("range_request_to", pb.read_le_varint), # int64:
102
+ 33: pb.ProtoDecoder("serialized_embedder_download_data", pb.read_string) # string
103
+ }
104
+
105
+ DownloadInfo_structure = {
106
+ 1: pb.ProtoDecoder("guid", pb.read_string),
107
+ 2: pb.ProtoDecoder("id", pb.read_le_varint32),
108
+ # 3 UkmInfo
109
+ 4: pb.ProtoDecoder("in_progress_info", lambda x: pb.read_embedded_protobuf(x, InProgressInfo_Structure, True))
110
+ }
111
+
112
+ DownloadDbEntry_structure = {
113
+ 1: pb.ProtoDecoder("download_info", lambda x: pb.read_embedded_protobuf(x, DownloadInfo_structure, True))
114
+ }
115
+
116
+
117
+ def read_downloads(
118
+ shared_proto_db_folder: typing.Union[str, os.PathLike],
119
+ *, handle_errors=False, utf16_paths=True) -> typing.Iterator[Download]:
120
+ ldb_path = pathlib.Path(shared_proto_db_folder)
121
+ with ccl_leveldb.RawLevelDb(ldb_path) as ldb:
122
+ for rec in ldb.iterate_records_raw():
123
+ if rec.state != ccl_leveldb.KeyState.Live:
124
+ continue
125
+
126
+ key = rec.user_key
127
+ record_type, specific_key = key.split(b"_", 1)
128
+ if record_type == b"21":
129
+ with io.BytesIO(rec.value) as f:
130
+ obj = pb.ProtoObject(
131
+ 0xa, "root", pb.read_protobuff(f, DownloadDbEntry_structure, use_friendly_tag=True))
132
+ try:
133
+ download = Download.from_pb(rec.seq, obj, target_path_is_utf_16=utf16_paths)
134
+ except ValueError as ex:
135
+ print(f"Error reading a download: {ex}", file=sys.stderr)
136
+ if handle_errors:
137
+ continue
138
+ else:
139
+ raise
140
+
141
+ yield download
142
+
143
+
144
+ def report_downloads(
145
+ shared_proto_db_folder: typing.Union[str, os.PathLike],
146
+ out_csv_path: typing.Union[str, os.PathLike], utf16_paths=True):
147
+
148
+ with pathlib.Path(out_csv_path).open("tx", encoding="utf-8", newline="") as out:
149
+ writer = csv.writer(out, csv.excel, quoting=csv.QUOTE_ALL, quotechar="\"", escapechar="\\")
150
+ writer.writerow([
151
+ "seq no",
152
+ "guid",
153
+ "start time",
154
+ "end time",
155
+ "tab url",
156
+ "tab referrer url",
157
+ "download url chain",
158
+ "target path",
159
+ "hash",
160
+ "total bytes",
161
+ "mime type",
162
+ "original mime type"
163
+ ])
164
+ for download in read_downloads(shared_proto_db_folder, handle_errors=True, utf16_paths=utf16_paths):
165
+ writer.writerow([
166
+ str(download.level_db_seq_no),
167
+ str(download.guid),
168
+ download.start_time,
169
+ download.end_time,
170
+ download.tab_url,
171
+ download.tab_referrer_url,
172
+ " -> ".join(download.url_chain),
173
+ download.target_path,
174
+ download.hash,
175
+ download.total_bytes,
176
+ download.mime_type,
177
+ download.original_mime_type
178
+ ])
179
+
180
+
181
+ if __name__ == '__main__':
182
+ import csv
183
+ if len(sys.argv) < 3:
184
+ print(f"USAGE: {pathlib.Path(sys.argv[0]).name} <shared_proto_db folder> <out.csv> [-u8]")
185
+ print()
186
+ print("-u8\tutf-8 target paths (use this if target paths appear garbled in the output)")
187
+ print()
188
+ exit(1)
189
+ report_downloads(sys.argv[1], sys.argv[2], "-u8" not in sys.argv[3:])
@@ -0,0 +1,19 @@
1
+ import re
2
+ import typing
3
+ import collections.abc as col_abc
4
+
5
+
6
+ KeySearch = typing.Union[str, re.Pattern, col_abc.Collection[str], col_abc.Callable[[str], bool]]
7
+
8
+
9
+ def is_keysearch_hit(search: KeySearch, value: str):
10
+ if isinstance(search, str):
11
+ return value == search
12
+ elif isinstance(search, re.Pattern):
13
+ return search.search(value) is not None
14
+ elif isinstance(search, col_abc.Collection):
15
+ return value in set(search)
16
+ elif isinstance(search, col_abc.Callable):
17
+ return search(value)
18
+ else:
19
+ raise TypeError(f"Unexpected type: {type(search)} (expects: {KeySearch})")
@@ -0,0 +1,78 @@
1
+ import dataclasses
2
+ import datetime
3
+ import struct
4
+ import enum
5
+
6
+ from .serialization_formats import ccl_protobuff as pb
7
+
8
+
9
+ class DownloadSource(enum.Enum):
10
+ shared_proto_db = 1
11
+ history_db = 2
12
+
13
+
14
+ @dataclasses.dataclass(frozen=True)
15
+ class Download: # TODO: all of the parameters
16
+ record_source: DownloadSource
17
+ record_id: int
18
+ guid: str
19
+ hash: str
20
+ url_chain: tuple[str, ...]
21
+ tab_url: str
22
+ tab_referrer_url: str
23
+ target_path: str
24
+ mime_type: str
25
+ original_mime_type: str
26
+ total_bytes: str
27
+ start_time: datetime.datetime
28
+ end_time: datetime.datetime
29
+
30
+ @property
31
+ def level_db_seq_no(self):
32
+ if self.record_source == DownloadSource.shared_proto_db:
33
+ return self.record_id
34
+
35
+ @property
36
+ def record_location(self) -> str:
37
+ if self.record_source == DownloadSource.shared_proto_db:
38
+ return f"Leveldb Seq: {self.record_id}"
39
+ elif self.record_source == DownloadSource.history_db:
40
+ return f"SQLite Rowid: {self.record_id}"
41
+ raise NotImplementedError()
42
+
43
+ @property
44
+ def url(self) -> str:
45
+ return self.url_chain[-1]
46
+
47
+ @property
48
+ def file_size(self) -> int:
49
+ return int(self.total_bytes)
50
+
51
+ @classmethod
52
+ def from_pb(cls, seq: int, proto: pb.ProtoObject, *, target_path_is_utf_16=True):
53
+ if not proto.only("download_info").value:
54
+ raise ValueError("download_info is empty")
55
+ target_path_raw = proto.only("download_info").only("in_progress_info").only("target_path").value
56
+ path_proto_length, path_char_count = struct.unpack("<II", target_path_raw[0:8])
57
+ if path_proto_length != len(target_path_raw) - 4:
58
+ raise ValueError("Invalid pickle for target path")
59
+ if target_path_is_utf_16:
60
+ target_path = target_path_raw[8: 8 + (path_char_count * 2)].decode("utf-16-le")
61
+ else:
62
+ target_path = target_path_raw[8: 8 + path_char_count].decode("utf-8")
63
+
64
+ return cls(
65
+ DownloadSource.shared_proto_db,
66
+ seq,
67
+ proto.only("download_info").only("guid").value,
68
+ proto.only("download_info").only("in_progress_info").only("hash").value.hex(),
69
+ tuple(x.value for x in proto.only("download_info").only("in_progress_info")["url_chain"]),
70
+ proto.only("download_info").only("in_progress_info").only("tab_url").value,
71
+ proto.only("download_info").only("in_progress_info").only("tab_url_referrer").value,
72
+ target_path,
73
+ proto.only("download_info").only("in_progress_info").only("mime_type").value,
74
+ proto.only("download_info").only("in_progress_info").only("original_mime_type").value,
75
+ proto.only("download_info").only("in_progress_info").only("total_bytes").value,
76
+ proto.only("download_info").only("in_progress_info").only("start_time").value,
77
+ proto.only("download_info").only("in_progress_info").only("end_time").value,
78
+ )
@@ -0,0 +1,276 @@
1
+ import datetime
2
+ import pathlib
3
+ import typing
4
+ import collections.abc as col_abc
5
+
6
+ from .common import KeySearch, is_keysearch_hit
7
+
8
+
9
+ class HasRecordLocationProtocol(typing.Protocol):
10
+ @property
11
+ def record_location(self) -> str:
12
+ raise NotImplementedError()
13
+
14
+
15
+ @typing.runtime_checkable
16
+ class LocalStorageRecordProtocol(HasRecordLocationProtocol, typing.Protocol):
17
+ @property
18
+ def storage_key(self) -> str:
19
+ raise NotImplementedError()
20
+
21
+ @property
22
+ def script_key(self) -> str:
23
+ raise NotImplementedError()
24
+
25
+ @property
26
+ def value(self) -> str:
27
+ raise NotImplementedError()
28
+
29
+
30
+ @typing.runtime_checkable
31
+ class SessionStorageRecordProtocol(HasRecordLocationProtocol, typing.Protocol):
32
+ host: typing.Optional[str]
33
+ key: str
34
+ value: str
35
+
36
+
37
+ @typing.runtime_checkable
38
+ class HistoryRecordProtocol(HasRecordLocationProtocol, typing.Protocol):
39
+ url: str
40
+ title: str
41
+ visit_time: datetime.datetime
42
+ # TODO: Assess whether the parent/child visits can be part of the protocol
43
+
44
+
45
+ @typing.runtime_checkable
46
+ class IdbKeyProtocol(typing.Protocol):
47
+ raw_key: bytes
48
+ value: typing.Any
49
+
50
+
51
+ @typing.runtime_checkable
52
+ class IndexedDbRecordProtocol(HasRecordLocationProtocol, typing.Protocol):
53
+ key: IdbKeyProtocol
54
+ value: typing.Any
55
+
56
+
57
+ class CacheMetadataProtocol(typing.Protocol):
58
+ request_time: datetime.datetime
59
+ http_header_attributes: typing.Iterable[tuple[str, str]]
60
+
61
+ def get_attribute(self, attribute: str) -> list[str]:
62
+ raise NotImplementedError()
63
+
64
+
65
+ class CacheKeyProtocol(typing.Protocol):
66
+ raw_key: str
67
+ url: str
68
+
69
+
70
+ class CacheRecordProtocol(typing.Protocol):
71
+ key: CacheKeyProtocol
72
+ metadata: CacheMetadataProtocol
73
+ data: bytes
74
+ metadata_location: typing.Any
75
+ data_location: typing.Any
76
+ was_decompressed: bool
77
+
78
+
79
+ class DownloadRecordProtocol(HasRecordLocationProtocol, typing.Protocol):
80
+ url: str
81
+ start_time: typing.Optional[datetime.datetime]
82
+ end_time: typing.Optional[datetime.datetime]
83
+ target_path: typing.Optional[str]
84
+ file_size: int
85
+
86
+
87
+ @typing.runtime_checkable
88
+ class BrowserProfileProtocol(typing.Protocol):
89
+ def close(self):
90
+ raise NotImplementedError()
91
+
92
+ def iter_local_storage_hosts(self) -> col_abc.Iterable[str]:
93
+ """
94
+ Iterates the hosts in this profile's local storage
95
+ """
96
+ raise NotImplementedError()
97
+
98
+ def iter_local_storage(
99
+ self, storage_key: typing.Optional[KeySearch] = None, script_key: typing.Optional[KeySearch] = None, *,
100
+ include_deletions=False, raise_on_no_result=False) -> col_abc.Iterable[LocalStorageRecordProtocol]:
101
+ """
102
+ Iterates this profile's local storage records
103
+
104
+ :param storage_key: storage key (host) for the records. This can be one of: a single string;
105
+ a collection of strings; a regex pattern; a function that takes a string and returns a bool.
106
+ :param script_key: script defined key for the records. This can be one of: a single string;
107
+ a collection of strings; a regex pattern; a function that takes a string and returns a bool.
108
+ :param include_deletions: if True, records related to deletions will be included
109
+ :param raise_on_no_result: if True (the default) if no matching storage keys are found, raise a KeyError
110
+ (these will have None as values).
111
+ :return:
112
+ """
113
+ raise NotImplementedError()
114
+
115
+ def iter_session_storage_hosts(self) -> col_abc.Iterable[str]:
116
+ """
117
+ Iterates this profile's session storage hosts
118
+ """
119
+ raise NotImplementedError()
120
+
121
+ def iter_session_storage(
122
+ self, host: typing.Optional[KeySearch] = None, key: typing.Optional[KeySearch] = None, *,
123
+ include_deletions=False, raise_on_no_result=False) -> col_abc.Iterable[SessionStorageRecordProtocol]:
124
+ """
125
+ Iterates this profile's session storage records
126
+
127
+ :param host: storage key (host) for the records. This can be one of: a single string;
128
+ a collection of strings; a regex pattern; a function that takes a string (each host) and
129
+ returns a bool; or None (the default) in which case all hosts are considered.
130
+ :param key: script defined key for the records. This can be one of: a single string;
131
+ a collection of strings; a regex pattern; a function that takes a string and returns a bool; or
132
+ None (the default) in which case all keys are considered.
133
+ :param include_deletions: if True, records related to deletions will be included (these will have None as
134
+ values).
135
+ :param raise_on_no_result: if True, if no matching storage keys are found, raise a KeyError
136
+
137
+ :return: iterable of SessionStoreValue
138
+ """
139
+ raise NotImplementedError()
140
+
141
+ def iter_indexeddb_hosts(self) -> col_abc.Iterable[str]:
142
+ """
143
+ Iterates the hosts present in the Indexed DB folder. These values are what should be used to load the databases
144
+ directly.
145
+ """
146
+ raise NotImplementedError()
147
+
148
+ def get_indexeddb(self, host: str):
149
+ """
150
+ Returns the database with the host provided. Should be one of the values returned by
151
+ :func:`~iter_indexeddb_hosts`. The database will be opened on-demand if it hasn't previously been opened.
152
+
153
+ :param host: the host to get
154
+ """
155
+ # TODO typehint return type once it's also abstracted
156
+ raise NotImplementedError()
157
+
158
+ def iter_indexeddb_records(
159
+ self, host_id: typing.Optional[KeySearch], database_name: typing.Optional[KeySearch] = None,
160
+ object_store_name: typing.Optional[KeySearch] = None, *,
161
+ raise_on_no_result=False, include_deletions=False,
162
+ bad_deserializer_data_handler=None) -> col_abc.Iterable[IndexedDbRecordProtocol]:
163
+ """
164
+ Iterates indexeddb records in this profile.
165
+
166
+ :param host_id: the host for the records, relates to the host-named folder in the IndexedDB folder. The
167
+ possible values for this profile are returned by :func:`~iter_indexeddb_hosts`. This can be one of:
168
+ a single string; a collection of strings; a regex pattern; a function that takes a string (each host) and
169
+ returns a bool; or None in which case all hosts are considered. Be cautious with supplying a parameter
170
+ which will lead to unnecessary databases being opened as this has a set-up time for the first time it
171
+ is opened.
172
+ :param database_name: the database name for the records. This can be one of: a single string; a collection
173
+ of strings; a regex pattern; a function that takes a string (each host) and returns a bool; or None (the
174
+ default) in which case all hosts are considered.
175
+ :param object_store_name: the object store name of the records. This can be one of: a single string;
176
+ a collection of strings; a regex pattern; a function that takes a string (each host) and returns a bool;
177
+ or None (the default) in which case all hosts are considered.
178
+ :param raise_on_no_result: if True, if no matching storage keys are found, raise a KeyError
179
+ :param include_deletions: if True, records related to deletions will be included (these will have None as
180
+ values).
181
+ :param bad_deserializer_data_handler: a callback function which will be executed by the underlying
182
+ indexeddb reader if invalid data is encountered during reading a record, rather than raising an exception.
183
+ The function should take two arguments: an IdbKey object (which is the key of the bad record) and a bytes
184
+ object (which is the raw data). The return value of the callback is ignored by the calling code. If this is
185
+ None (the default) then any bad data will cause an exception to be raised.
186
+ """
187
+ raise NotImplementedError()
188
+
189
+ def iterate_history_records(
190
+ self, url: typing.Optional[KeySearch]=None, *,
191
+ earliest: typing.Optional[datetime.datetime]=None,
192
+ latest: typing.Optional[datetime.datetime]=None) -> col_abc.Iterable[HistoryRecordProtocol]:
193
+ """
194
+ Iterates history records for this profile.
195
+
196
+ :param url: a URL to search for. This can be one of: a single string; a collection of strings;
197
+ a regex pattern; a function that takes a string (each host) and returns a bool; or None (the
198
+ default) in which case all hosts are considered.
199
+ :param earliest: an optional datetime which will be used to exclude records before this date.
200
+ NB the date should be UTC to match the database. If None, no lower limit will be placed on
201
+ timestamps.
202
+ :param latest: an optional datetime which will be used to exclude records after this date.
203
+ NB the date should be UTC to match the database. If None, no upper limit will be placed on
204
+ timestamps.
205
+ """
206
+ # TODO typehint return type once it's also abstracted
207
+ raise NotImplementedError()
208
+
209
+ def iterate_cache(
210
+ self,
211
+ url: typing.Optional[KeySearch]=None, *, decompress=True, omit_cached_data=False,
212
+ **kwargs: typing.Union[bool, KeySearch]) -> col_abc.Iterable[CacheRecordProtocol]:
213
+ """
214
+ Iterates cache records for this profile.
215
+
216
+ :param url: a URL to search for. This can be one of: a single string; a collection of strings;
217
+ a regex pattern; a function that takes a string (each host) and returns a bool; or None (the
218
+ default) in which case all records are considered.
219
+ :param decompress: if True (the default), data from the cache which is compressed (as per the
220
+ content-encoding header field) will be decompressed when read if the compression format is
221
+ supported (currently deflate, gzip and brotli are supported).
222
+ :param omit_cached_data: does not collect the cached data and omits it from each `CacheResult`
223
+ object. Should be faster in cases when only metadata recovery is required.
224
+ :param kwargs: further keyword arguments are used to search based upon header fields. The
225
+ keyword should be the header field name, with underscores replacing hyphens (e.g.,
226
+ content-encoding, becomes content_encoding). The value should be one of: a Boolean (in which
227
+ case only records with this field present will be included if True, and vice versa); a single
228
+ string; a collection of strings; a regex pattern; a function that takes a string (the value)
229
+ and returns a bool.
230
+ """
231
+ raise NotImplementedError()
232
+
233
+ def iter_downloads(
234
+ self, *, download_url: typing.Optional[KeySearch]=None,
235
+ tab_url: typing.Optional[KeySearch]=None) -> col_abc.Iterable[DownloadRecordProtocol]:
236
+ """
237
+ Iterates download records for this profile
238
+
239
+ :param download_url: A URL related to the downloaded resource. This can be one of: a single string;
240
+ a collection of strings; a regex pattern; a function that takes a string (each host) and returns a bool;
241
+ or None (the default) in which case all records are considered.
242
+ :param tab_url: A URL related to the page the user was accessing when this download was started.
243
+ This can be one of: a single string; a collection of strings; a regex pattern; a function that takes
244
+ a string (each host) and returns a bool; or None (the default) in which case all records are considered.
245
+ """
246
+ raise NotImplementedError()
247
+
248
+ @property
249
+ def path(self) -> pathlib.Path:
250
+ """The input path of this browser profile"""
251
+ raise NotImplementedError()
252
+
253
+ @property
254
+ def local_storage(self):
255
+ """The local storage object for this browser profile"""
256
+ raise NotImplementedError()
257
+
258
+ @property
259
+ def session_storage(self):
260
+ """The session storage object for this browser profile"""
261
+ raise NotImplementedError()
262
+
263
+ @property
264
+ def cache(self):
265
+ """The cache for this browser profile"""
266
+ raise NotImplementedError()
267
+
268
+ @property
269
+ def history(self):
270
+ """The history for this browser profile"""
271
+ raise NotImplementedError()
272
+
273
+ @property
274
+ def browser_type(self) -> str:
275
+ """The name of the browser type for this profile"""
276
+ raise NotImplementedError()
File without changes