linear-mcp-fast 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ccl_chromium_reader/__init__.py +2 -0
- ccl_chromium_reader/ccl_chromium_cache.py +1335 -0
- ccl_chromium_reader/ccl_chromium_filesystem.py +302 -0
- ccl_chromium_reader/ccl_chromium_history.py +357 -0
- ccl_chromium_reader/ccl_chromium_indexeddb.py +1060 -0
- ccl_chromium_reader/ccl_chromium_localstorage.py +454 -0
- ccl_chromium_reader/ccl_chromium_notifications.py +268 -0
- ccl_chromium_reader/ccl_chromium_profile_folder.py +568 -0
- ccl_chromium_reader/ccl_chromium_sessionstorage.py +368 -0
- ccl_chromium_reader/ccl_chromium_snss2.py +332 -0
- ccl_chromium_reader/ccl_shared_proto_db_downloads.py +189 -0
- ccl_chromium_reader/common.py +19 -0
- ccl_chromium_reader/download_common.py +78 -0
- ccl_chromium_reader/profile_folder_protocols.py +276 -0
- ccl_chromium_reader/serialization_formats/__init__.py +0 -0
- ccl_chromium_reader/serialization_formats/ccl_blink_value_deserializer.py +401 -0
- ccl_chromium_reader/serialization_formats/ccl_easy_chromium_pickle.py +133 -0
- ccl_chromium_reader/serialization_formats/ccl_protobuff.py +276 -0
- ccl_chromium_reader/serialization_formats/ccl_v8_value_deserializer.py +627 -0
- ccl_chromium_reader/storage_formats/__init__.py +0 -0
- ccl_chromium_reader/storage_formats/ccl_leveldb.py +582 -0
- ccl_simplesnappy/__init__.py +1 -0
- ccl_simplesnappy/ccl_simplesnappy.py +306 -0
- linear_mcp_fast/__init__.py +8 -0
- linear_mcp_fast/__main__.py +6 -0
- linear_mcp_fast/reader.py +433 -0
- linear_mcp_fast/server.py +367 -0
- linear_mcp_fast/store_detector.py +117 -0
- linear_mcp_fast-0.1.0.dist-info/METADATA +160 -0
- linear_mcp_fast-0.1.0.dist-info/RECORD +39 -0
- linear_mcp_fast-0.1.0.dist-info/WHEEL +5 -0
- linear_mcp_fast-0.1.0.dist-info/entry_points.txt +2 -0
- linear_mcp_fast-0.1.0.dist-info/top_level.txt +4 -0
- tools_and_utilities/Chromium_dump_local_storage.py +111 -0
- tools_and_utilities/Chromium_dump_session_storage.py +92 -0
- tools_and_utilities/benchmark.py +35 -0
- tools_and_utilities/ccl_chrome_audit.py +651 -0
- tools_and_utilities/dump_indexeddb_details.py +59 -0
- tools_and_utilities/dump_leveldb.py +53 -0
|
@@ -0,0 +1,651 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2022, CCL Forensics
|
|
3
|
+
|
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
5
|
+
this software and associated documentation files (the "Software"), to deal in
|
|
6
|
+
the Software without restriction, including without limitation the rights to
|
|
7
|
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
|
8
|
+
of the Software, and to permit persons to whom the Software is furnished to do
|
|
9
|
+
so, subject to the following conditions:
|
|
10
|
+
|
|
11
|
+
The above copyright notice and this permission notice shall be included in all
|
|
12
|
+
copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
15
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
16
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
17
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
18
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
19
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
20
|
+
SOFTWARE.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
import base64
|
|
24
|
+
import json
|
|
25
|
+
import pathlib
|
|
26
|
+
import re
|
|
27
|
+
import sys
|
|
28
|
+
import os
|
|
29
|
+
import typing
|
|
30
|
+
import abc
|
|
31
|
+
import sqlite3
|
|
32
|
+
import datetime
|
|
33
|
+
import win32crypt
|
|
34
|
+
import Crypto.Cipher.AES
|
|
35
|
+
from ccl_chromium_reader import ccl_chromium_localstorage
|
|
36
|
+
from ccl_chromium_reader import ccl_chromium_sessionstorage
|
|
37
|
+
from ccl_chromium_reader import ccl_chromium_indexeddb
|
|
38
|
+
from ccl_chromium_reader import ccl_chromium_filesystem
|
|
39
|
+
from ccl_chromium_reader import ccl_shared_proto_db_downloads
|
|
40
|
+
from ccl_chromium_reader import ccl_chromium_cache
|
|
41
|
+
from ccl_chromium_reader import ccl_chromium_notifications
|
|
42
|
+
from ccl_chromium_reader import ccl_chromium_snss2
|
|
43
|
+
|
|
44
|
+
sys.stdout.reconfigure(encoding="utf-8")
|
|
45
|
+
|
|
46
|
+
__version__ = "0.3"
|
|
47
|
+
__description__ = "Audits multiple Chromium data stores"
|
|
48
|
+
__contact__ = "Alex Caithness"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
WINDOWS = False
|
|
52
|
+
|
|
53
|
+
CHROME_EPOCH = datetime.datetime(1601, 1, 1, 0, 0, 0)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def chrome_time(microseconds: typing.Optional[int], allow_none=True):
|
|
57
|
+
if microseconds is not None:
|
|
58
|
+
return CHROME_EPOCH + datetime.timedelta(microseconds=microseconds)
|
|
59
|
+
elif allow_none:
|
|
60
|
+
return None
|
|
61
|
+
raise ValueError("microseconds cannot be None")
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class AbstractAuditor(abc.ABC):
|
|
65
|
+
def __init__(self, name: str):
|
|
66
|
+
self.name = name
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
@abc.abstractmethod
|
|
70
|
+
def headers(self) -> tuple[str, ...]:
|
|
71
|
+
raise NotImplementedError
|
|
72
|
+
|
|
73
|
+
@abc.abstractmethod
|
|
74
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
75
|
+
raise NotImplementedError
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class BookmarksAuditor(AbstractAuditor):
|
|
79
|
+
@property
|
|
80
|
+
def headers(self) -> tuple[str, ...]:
|
|
81
|
+
return "id", "guid", "path", "url", "added time",
|
|
82
|
+
|
|
83
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
84
|
+
bookmarks_path = pathlib.Path(profile_root, "Bookmarks")
|
|
85
|
+
if not bookmarks_path.exists():
|
|
86
|
+
return
|
|
87
|
+
|
|
88
|
+
with bookmarks_path.open("rt", encoding="utf-8") as f:
|
|
89
|
+
bookmarks = json.load(f)
|
|
90
|
+
|
|
91
|
+
def walk_bookmarks(obj: dict, parts: list):
|
|
92
|
+
print(obj)
|
|
93
|
+
for inner in obj["children"]:
|
|
94
|
+
if inner["type"] == "folder":
|
|
95
|
+
yield from walk_bookmarks(inner, parts + [inner["name"]])
|
|
96
|
+
elif inner["type"] == "url":
|
|
97
|
+
if domain_re.search(inner["url"]) is not None:
|
|
98
|
+
yield (
|
|
99
|
+
inner["id"],
|
|
100
|
+
inner["guid"],
|
|
101
|
+
"/".join(parts + [inner["name"]]),
|
|
102
|
+
inner["url"],
|
|
103
|
+
chrome_time(int(inner["date_added"]))
|
|
104
|
+
)
|
|
105
|
+
else:
|
|
106
|
+
raise ValueError("unexpected bookmark type")
|
|
107
|
+
|
|
108
|
+
for key, root in bookmarks["roots"].items():
|
|
109
|
+
if key == "sync_transaction_version":
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
yield from walk_bookmarks(root, [key])
|
|
113
|
+
|
|
114
|
+
def __init__(self):
|
|
115
|
+
super().__init__("Bookmarks")
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class HistoryAuditor(AbstractAuditor):
|
|
119
|
+
def __init__(self):
|
|
120
|
+
super().__init__("History")
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def headers(self) -> tuple[str, ...]:
|
|
124
|
+
return "id", "url", "title", "timestamp"
|
|
125
|
+
|
|
126
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
127
|
+
profile_folder = pathlib.Path(profile_root)
|
|
128
|
+
history_path = profile_folder / "History"
|
|
129
|
+
if not history_path.exists():
|
|
130
|
+
return
|
|
131
|
+
conn = sqlite3.connect(history_path)
|
|
132
|
+
conn.row_factory = sqlite3.Row
|
|
133
|
+
cur = conn.cursor()
|
|
134
|
+
cur.execute("""
|
|
135
|
+
SELECT
|
|
136
|
+
visits.id AS "id",
|
|
137
|
+
urls.url AS "url",
|
|
138
|
+
urls.title AS "title",
|
|
139
|
+
visits.visit_time AS "timestamp",
|
|
140
|
+
visits.visit_duration AS "duration",
|
|
141
|
+
visits.from_visit AS "from_visit",
|
|
142
|
+
visits.transition AS "transition"
|
|
143
|
+
FROM visits
|
|
144
|
+
INNER JOIN urls ON visits.url = urls.id;
|
|
145
|
+
""")
|
|
146
|
+
|
|
147
|
+
for row in cur:
|
|
148
|
+
if domain_re.search(row["url"]) is not None:
|
|
149
|
+
yield (
|
|
150
|
+
row["id"],
|
|
151
|
+
row["url"],
|
|
152
|
+
row["title"],
|
|
153
|
+
chrome_time(row["timestamp"]),
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
conn.close()
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class DownloadsHistoryAuditor(AbstractAuditor):
|
|
160
|
+
def __init__(self):
|
|
161
|
+
super().__init__("Downloads (History)")
|
|
162
|
+
|
|
163
|
+
@property
|
|
164
|
+
def headers(self) -> tuple[str, ...]:
|
|
165
|
+
return (
|
|
166
|
+
"id", "guid", "tab url", "tab referrer url", "target path", "total bytes",
|
|
167
|
+
"mime-type", "original mime-type", "start time"
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
171
|
+
profile_folder = pathlib.Path(profile_root)
|
|
172
|
+
|
|
173
|
+
history_path = profile_folder / "History"
|
|
174
|
+
if not history_path.exists():
|
|
175
|
+
return
|
|
176
|
+
|
|
177
|
+
conn = sqlite3.connect(history_path)
|
|
178
|
+
conn.row_factory = sqlite3.Row
|
|
179
|
+
cur = conn.cursor()
|
|
180
|
+
cur.execute("""
|
|
181
|
+
SELECT
|
|
182
|
+
downloads.id,
|
|
183
|
+
downloads.guid,
|
|
184
|
+
downloads.tab_url,
|
|
185
|
+
downloads.tab_referrer_url,
|
|
186
|
+
downloads.target_path,
|
|
187
|
+
downloads.total_bytes,
|
|
188
|
+
downloads.mime_type,
|
|
189
|
+
downloads.original_mime_type,
|
|
190
|
+
downloads.start_time
|
|
191
|
+
FROM downloads
|
|
192
|
+
""")
|
|
193
|
+
|
|
194
|
+
for row in cur:
|
|
195
|
+
if domain_re.search(row["tab_url"]) is not None or domain_re.search(row["tab_referrer_url"]) is not None:
|
|
196
|
+
yield (
|
|
197
|
+
row["id"],
|
|
198
|
+
row["guid"],
|
|
199
|
+
row["tab_url"],
|
|
200
|
+
row["tab_referrer_url"],
|
|
201
|
+
row["target_path"],
|
|
202
|
+
row["total_bytes"],
|
|
203
|
+
row["mime_type"],
|
|
204
|
+
row["original_mime_type"],
|
|
205
|
+
chrome_time(row["start_time"])
|
|
206
|
+
)
|
|
207
|
+
conn.close()
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
class DownloadsSharedProtoDb(AbstractAuditor):
|
|
211
|
+
def __init__(self):
|
|
212
|
+
super().__init__("Downloads (Shard Proto Db)")
|
|
213
|
+
|
|
214
|
+
@property
|
|
215
|
+
def headers(self) -> tuple[str, ...]:
|
|
216
|
+
return (
|
|
217
|
+
"id", "guid", "tab url", "tab referrer url", "target path", "total bytes",
|
|
218
|
+
"mime-type", "original mime-type", "start time"
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
222
|
+
shared_proto_db_folder = pathlib.Path(profile_root) / "shared_proto_db"
|
|
223
|
+
if not shared_proto_db_folder.exists():
|
|
224
|
+
return
|
|
225
|
+
for download in ccl_shared_proto_db_downloads.read_downloads(shared_proto_db_folder):
|
|
226
|
+
if (domain_re.search(download.tab_url or "") is not None or
|
|
227
|
+
domain_re.search(download.tab_referrer_url or "") is not None):
|
|
228
|
+
yield (
|
|
229
|
+
download.level_db_seq_no,
|
|
230
|
+
download.guid,
|
|
231
|
+
download.tab_url,
|
|
232
|
+
download.tab_referrer_url,
|
|
233
|
+
download.target_path,
|
|
234
|
+
download.total_bytes,
|
|
235
|
+
download.mime_type,
|
|
236
|
+
download.original_mime_type,
|
|
237
|
+
download.start_time
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class FaviconAuditor(AbstractAuditor):
|
|
242
|
+
def __init__(self):
|
|
243
|
+
super().__init__("Favicons")
|
|
244
|
+
|
|
245
|
+
@property
|
|
246
|
+
def headers(self) -> tuple[str, ...]:
|
|
247
|
+
return "id", "page_url", "favicon url"
|
|
248
|
+
|
|
249
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
250
|
+
favicons_path = pathlib.Path(profile_root) / "Favicons"
|
|
251
|
+
if not favicons_path.exists():
|
|
252
|
+
return
|
|
253
|
+
|
|
254
|
+
conn = sqlite3.connect(favicons_path)
|
|
255
|
+
conn.row_factory = sqlite3.Row
|
|
256
|
+
cur = conn.cursor()
|
|
257
|
+
|
|
258
|
+
cur.execute("""
|
|
259
|
+
SELECT
|
|
260
|
+
icon_mapping.id,
|
|
261
|
+
icon_mapping.page_url,
|
|
262
|
+
favicons.url
|
|
263
|
+
FROM icon_mapping
|
|
264
|
+
LEFT JOIN favicons ON icon_mapping.icon_id = favicons.id
|
|
265
|
+
""")
|
|
266
|
+
|
|
267
|
+
for row in cur:
|
|
268
|
+
if domain_re.search(row["page_url"]) is not None or domain_re.search(row["url"]) is not None:
|
|
269
|
+
yield (
|
|
270
|
+
row["id"],
|
|
271
|
+
row["page_url"],
|
|
272
|
+
row["url"]
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
conn.close()
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
class CacheAuditor(AbstractAuditor):
|
|
279
|
+
def __init__(self):
|
|
280
|
+
super().__init__("Cache")
|
|
281
|
+
|
|
282
|
+
@property
|
|
283
|
+
def headers(self) -> tuple[str, ...]:
|
|
284
|
+
return (
|
|
285
|
+
"key", "request time", "response time", "data stream file type", "data stream file selector",
|
|
286
|
+
"block number", "block count", "data stream external file number")
|
|
287
|
+
|
|
288
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern,
|
|
289
|
+
override_path: typing.Optional[typing.Union[os.PathLike, str]] = None) -> typing.Iterator[tuple]:
|
|
290
|
+
cache_folder = override_path or (pathlib.Path(profile_root) / "Cache" / "Cache_Data")
|
|
291
|
+
cache_type = ccl_chromium_cache.guess_cache_class(cache_folder)
|
|
292
|
+
if cache_type == ccl_chromium_cache.ChromiumBlockFileCache:
|
|
293
|
+
with ccl_chromium_cache.ChromiumBlockFileCache(cache_folder) as cache:
|
|
294
|
+
for key, es in cache.items():
|
|
295
|
+
metas = cache.get_metadata(key)
|
|
296
|
+
for meta in metas:
|
|
297
|
+
if domain_re.search(key) is not None:
|
|
298
|
+
yield (
|
|
299
|
+
key,
|
|
300
|
+
meta.request_time,
|
|
301
|
+
meta.response_time,
|
|
302
|
+
es.data_addrs[1].file_type,
|
|
303
|
+
es.data_addrs[1].file_selector,
|
|
304
|
+
es.data_addrs[1].block_number,
|
|
305
|
+
es.data_addrs[1].contiguous_blocks,
|
|
306
|
+
es.data_addrs[1].external_file_number
|
|
307
|
+
)
|
|
308
|
+
else:
|
|
309
|
+
with ccl_chromium_cache.ChromiumSimpleFileCache(cache_folder) as cache:
|
|
310
|
+
for key in cache.keys():
|
|
311
|
+
if domain_re.search(key) is not None:
|
|
312
|
+
metas = zip(cache.get_metadata(key), cache.get_file_for_key(key))
|
|
313
|
+
for meta, file_name in metas:
|
|
314
|
+
yield (
|
|
315
|
+
key,
|
|
316
|
+
meta.request_time,
|
|
317
|
+
meta.response_time,
|
|
318
|
+
file_name
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
class CookieAuditor(AbstractAuditor):
|
|
323
|
+
def __init__(self):
|
|
324
|
+
super().__init__("Cookies")
|
|
325
|
+
|
|
326
|
+
@property
|
|
327
|
+
def headers(self) -> tuple[str, ...]:
|
|
328
|
+
return "ID", "Host", "Name", "Value", "Creation Time"
|
|
329
|
+
|
|
330
|
+
@staticmethod
|
|
331
|
+
def decrypt_windows_cookie(encryption_key: bytes, cipher_text: bytes):
|
|
332
|
+
cipher = Crypto.Cipher.AES.new(encryption_key, nonce=cipher_text[3:3+12], mode=Crypto.Cipher.AES.MODE_GCM)
|
|
333
|
+
value = cipher.decrypt_and_verify(cipher_text[3+12:-16], cipher_text[-16:])
|
|
334
|
+
return value
|
|
335
|
+
|
|
336
|
+
@staticmethod
|
|
337
|
+
def nop(*args):
|
|
338
|
+
return None
|
|
339
|
+
|
|
340
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
341
|
+
cookie_path = pathlib.Path(profile_root) / "Cookies" # old location on windows, still here on some platforms
|
|
342
|
+
if not cookie_path.exists():
|
|
343
|
+
cookie_path = pathlib.Path(profile_root) / "Network" / "Cookies"
|
|
344
|
+
if not cookie_path.exists():
|
|
345
|
+
return
|
|
346
|
+
|
|
347
|
+
local_state_path = pathlib.Path(profile_root).parent / "Local State"
|
|
348
|
+
|
|
349
|
+
decrypter = self.nop
|
|
350
|
+
|
|
351
|
+
if local_state_path.exists():
|
|
352
|
+
if WINDOWS:
|
|
353
|
+
with local_state_path.open("rt", encoding="utf-8") as f:
|
|
354
|
+
local_state = json.load(f)
|
|
355
|
+
encryption_key = base64.b64decode(local_state['os_crypt']['encrypted_key'])
|
|
356
|
+
encryption_key = encryption_key[5:]
|
|
357
|
+
encryption_key = win32crypt.CryptUnprotectData(encryption_key, None, None, None, 0)[1]
|
|
358
|
+
decrypter = lambda x: self.decrypt_windows_cookie(encryption_key, x)
|
|
359
|
+
else:
|
|
360
|
+
print("Cannot get Local State file to decode cookie values")
|
|
361
|
+
|
|
362
|
+
conn = sqlite3.connect(cookie_path)
|
|
363
|
+
conn.row_factory = sqlite3.Row
|
|
364
|
+
cur = conn.cursor()
|
|
365
|
+
cur.execute("""SELECT
|
|
366
|
+
rowid,
|
|
367
|
+
cookies.host_key,
|
|
368
|
+
cookies.name,
|
|
369
|
+
cookies.value,
|
|
370
|
+
cookies.encrypted_value,
|
|
371
|
+
cookies.creation_utc
|
|
372
|
+
FROM cookies""")
|
|
373
|
+
|
|
374
|
+
for row in cur:
|
|
375
|
+
if domain_re.search(row["host_key"]):
|
|
376
|
+
value = row["value"] if row["value"] is not None else decrypter(row["encrypted_value"]).decode("utf-8")
|
|
377
|
+
rowid = row["rowid"] if "rowid" in row else row["creation_utc"]
|
|
378
|
+
yield (
|
|
379
|
+
rowid,
|
|
380
|
+
row["host_key"],
|
|
381
|
+
row["name"],
|
|
382
|
+
value,
|
|
383
|
+
chrome_time(row["creation_utc"])
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
conn.close()
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
class LocalStorageAuditor(AbstractAuditor):
|
|
390
|
+
def __init__(self):
|
|
391
|
+
super().__init__("Local Storage")
|
|
392
|
+
|
|
393
|
+
@property
|
|
394
|
+
def headers(self) -> tuple[str, ...]:
|
|
395
|
+
return "id", "storage key", "script key", "value"
|
|
396
|
+
|
|
397
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
398
|
+
ldb_path = pathlib.Path(profile_root) / "Local Storage" / "leveldb"
|
|
399
|
+
if not ldb_path.exists():
|
|
400
|
+
return
|
|
401
|
+
|
|
402
|
+
local_storage = ccl_chromium_localstorage.LocalStoreDb(ldb_path)
|
|
403
|
+
for storage_key in local_storage.iter_storage_keys():
|
|
404
|
+
if domain_re.search(storage_key) is not None:
|
|
405
|
+
for rec in local_storage.iter_records_for_storage_key(storage_key):
|
|
406
|
+
yield (
|
|
407
|
+
rec.leveldb_seq_number,
|
|
408
|
+
rec.storage_key,
|
|
409
|
+
rec.script_key,
|
|
410
|
+
rec.value
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
local_storage.close()
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
class SessionStorageAuditor(AbstractAuditor):
|
|
417
|
+
def __init__(self):
|
|
418
|
+
super().__init__("Session Storage")
|
|
419
|
+
|
|
420
|
+
@property
|
|
421
|
+
def headers(self) -> tuple[str, ...]:
|
|
422
|
+
return "id", "host", "key", "value"
|
|
423
|
+
|
|
424
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
425
|
+
ldb_path = pathlib.Path(profile_root) / "Session Storage"
|
|
426
|
+
if not ldb_path.exists():
|
|
427
|
+
return
|
|
428
|
+
|
|
429
|
+
session_storage = ccl_chromium_sessionstorage.SessionStoreDb(ldb_path)
|
|
430
|
+
value: typing.Optional[ccl_chromium_sessionstorage.SessionStoreValue] = None
|
|
431
|
+
for host in session_storage.iter_hosts():
|
|
432
|
+
if domain_re.search(host) is not None:
|
|
433
|
+
for ss_key, values in session_storage.get_all_for_host(host).items():
|
|
434
|
+
for value in values:
|
|
435
|
+
yield (
|
|
436
|
+
value.leveldb_sequence_number,
|
|
437
|
+
host,
|
|
438
|
+
ss_key,
|
|
439
|
+
value.value
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
session_storage.close()
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
class IndexedDbAuditor(AbstractAuditor):
|
|
446
|
+
def __init__(self):
|
|
447
|
+
super().__init__("IndexedDb")
|
|
448
|
+
|
|
449
|
+
@property
|
|
450
|
+
def headers(self) -> tuple[str, ...]:
|
|
451
|
+
return "id", "origin", "database", "object store", "key", "value"
|
|
452
|
+
|
|
453
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
454
|
+
idb_root = pathlib.Path(profile_root) / "IndexedDB"
|
|
455
|
+
if not idb_root.exists():
|
|
456
|
+
return
|
|
457
|
+
|
|
458
|
+
def bad_deserializer_data_handler(key: ccl_chromium_indexeddb.IdbKey, buffer: bytes):
|
|
459
|
+
print(f"Error reading IndexedDb record {key}", file=sys.stderr)
|
|
460
|
+
|
|
461
|
+
for ldb_folder in idb_root.glob("*.leveldb"):
|
|
462
|
+
if domain_re.search(ldb_folder.stem) is not None:
|
|
463
|
+
idb = ccl_chromium_indexeddb.WrappedIndexDB(ldb_folder)
|
|
464
|
+
for database_id in idb.database_ids:
|
|
465
|
+
database = idb[database_id.dbid_no]
|
|
466
|
+
for obj_store_name in database.object_store_names:
|
|
467
|
+
obj_store = database.get_object_store_by_name(obj_store_name)
|
|
468
|
+
for rec in obj_store.iterate_records(
|
|
469
|
+
bad_deserializer_data_handler=bad_deserializer_data_handler):
|
|
470
|
+
yield (
|
|
471
|
+
rec.sequence_number,
|
|
472
|
+
database.origin,
|
|
473
|
+
database.name,
|
|
474
|
+
obj_store.name,
|
|
475
|
+
rec.key,
|
|
476
|
+
rec.value
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
class FileSystemAuditor(AbstractAuditor):
|
|
481
|
+
def __init__(self):
|
|
482
|
+
super().__init__("FileSystem")
|
|
483
|
+
|
|
484
|
+
@property
|
|
485
|
+
def headers(self) -> tuple[str, ...]:
|
|
486
|
+
return "Folder ID", "Sequence Number", "Name", "Data Path", "Storage Type"
|
|
487
|
+
|
|
488
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
489
|
+
file_system_path = pathlib.Path(profile_root) / "File System"
|
|
490
|
+
if not file_system_path.exists():
|
|
491
|
+
return
|
|
492
|
+
|
|
493
|
+
file_system = ccl_chromium_filesystem.FileSystem(file_system_path)
|
|
494
|
+
for origin in file_system.get_origins():
|
|
495
|
+
if domain_re.search(origin):
|
|
496
|
+
for folder in file_system.get_folders_for_origin(origin):
|
|
497
|
+
origin_storage = file_system.get_storage_for_folder(folder)
|
|
498
|
+
for file_id, file_info in origin_storage.get_file_listing():
|
|
499
|
+
yield (
|
|
500
|
+
file_info.folder_id,
|
|
501
|
+
file_info.seq_no,
|
|
502
|
+
file_info.name,
|
|
503
|
+
file_info.data_path,
|
|
504
|
+
"Persistent" if file_info.is_persistent else "Temporary"
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
class NotificationAuditor(AbstractAuditor):
|
|
509
|
+
def __init__(self):
|
|
510
|
+
super().__init__("Notifications")
|
|
511
|
+
|
|
512
|
+
@property
|
|
513
|
+
def headers(self) -> tuple[str, ...]:
|
|
514
|
+
return "ID", "Origin", "Title", "Body", "Data", "Timestamp"
|
|
515
|
+
|
|
516
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
517
|
+
notification_path = pathlib.Path(profile_root) / "Platform Notifications"
|
|
518
|
+
|
|
519
|
+
if not notification_path.exists():
|
|
520
|
+
return
|
|
521
|
+
|
|
522
|
+
with ccl_chromium_notifications.NotificationReader(notification_path) as reader:
|
|
523
|
+
for notification in reader.read_notifications():
|
|
524
|
+
if domain_re.search(notification.origin) is not None:
|
|
525
|
+
yield (
|
|
526
|
+
notification.level_db_info.seq_no,
|
|
527
|
+
notification.origin,
|
|
528
|
+
json.dumps(notification.title),
|
|
529
|
+
json.dumps(notification.body),
|
|
530
|
+
json.dumps(notification.data),
|
|
531
|
+
notification.timestamp
|
|
532
|
+
)
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
class LoginAuditor(AbstractAuditor):
|
|
536
|
+
def __init__(self):
|
|
537
|
+
super().__init__("Logins")
|
|
538
|
+
|
|
539
|
+
@property
|
|
540
|
+
def headers(self) -> tuple[str, ...]:
|
|
541
|
+
return "ID", "database", "origin url", "action url", "username_value"
|
|
542
|
+
|
|
543
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
544
|
+
for db_name in ("Login Data", "Login Data For Account"):
|
|
545
|
+
db_path = pathlib.Path(profile_root) / db_name
|
|
546
|
+
if not db_path.exists():
|
|
547
|
+
continue
|
|
548
|
+
conn = sqlite3.connect(db_path)
|
|
549
|
+
conn.row_factory = sqlite3.Row
|
|
550
|
+
cur = conn.cursor()
|
|
551
|
+
cur.execute("""SELECT
|
|
552
|
+
rowid,
|
|
553
|
+
logins.origin_url,
|
|
554
|
+
logins.action_url,
|
|
555
|
+
logins.username_value
|
|
556
|
+
FROM logins""")
|
|
557
|
+
|
|
558
|
+
for row in cur:
|
|
559
|
+
if domain_re.search(row["origin_url"]) is not None or domain_re.search(row["action_url"]):
|
|
560
|
+
yield (
|
|
561
|
+
row["id"],
|
|
562
|
+
db_name,
|
|
563
|
+
row["origin_url"],
|
|
564
|
+
row["action_url"],
|
|
565
|
+
row["username_value"]
|
|
566
|
+
)
|
|
567
|
+
conn.close()
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
class SnssAuditor(AbstractAuditor):
|
|
571
|
+
def __init__(self):
|
|
572
|
+
super().__init__("Snss")
|
|
573
|
+
|
|
574
|
+
@property
|
|
575
|
+
def headers(self) -> tuple[str, ...]:
|
|
576
|
+
return "file", "offset", "index", "timestamp", "title", "url", "original_request_url", "referrer_url",
|
|
577
|
+
|
|
578
|
+
def audit(self, profile_root: typing.Union[os.PathLike, str], domain_re: re.Pattern) -> typing.Iterator[tuple]:
|
|
579
|
+
session_folder = pathlib.Path(profile_root) / "Sessions"
|
|
580
|
+
if not session_folder.exists():
|
|
581
|
+
return
|
|
582
|
+
for snss_file in session_folder.iterdir():
|
|
583
|
+
if not snss_file.is_file():
|
|
584
|
+
continue
|
|
585
|
+
if not (snss_file.name.startswith("Session_") or snss_file.name.startswith("Tabs_")):
|
|
586
|
+
continue
|
|
587
|
+
|
|
588
|
+
with snss_file.open("rb") as f:
|
|
589
|
+
snss = ccl_chromium_snss2.SnssFile(
|
|
590
|
+
ccl_chromium_snss2.SnssFileType.Session if snss_file.name.startswith("Session_")
|
|
591
|
+
else ccl_chromium_snss2.SnssFileType.Tab, f)
|
|
592
|
+
for navigation_entry in snss.iter_session_commands():
|
|
593
|
+
if not isinstance(navigation_entry, ccl_chromium_snss2.NavigationEntry):
|
|
594
|
+
continue # TODO: There may well be other useful session commands to look into later
|
|
595
|
+
|
|
596
|
+
# TODO: add PageState stuff once it's in place in ccl_chromium_snss2
|
|
597
|
+
yield (
|
|
598
|
+
snss_file.name,
|
|
599
|
+
navigation_entry.offset,
|
|
600
|
+
navigation_entry.index,
|
|
601
|
+
navigation_entry.timestamp,
|
|
602
|
+
navigation_entry.title,
|
|
603
|
+
navigation_entry.url,
|
|
604
|
+
navigation_entry.original_request_url,
|
|
605
|
+
navigation_entry.referrer_url,
|
|
606
|
+
)
|
|
607
|
+
|
|
608
|
+
|
|
609
|
+
AUDITORS: typing.Collection[AbstractAuditor] = (
|
|
610
|
+
BookmarksAuditor(),
|
|
611
|
+
HistoryAuditor(),
|
|
612
|
+
DownloadsHistoryAuditor(),
|
|
613
|
+
DownloadsSharedProtoDb(),
|
|
614
|
+
FaviconAuditor(),
|
|
615
|
+
CacheAuditor(),
|
|
616
|
+
CookieAuditor(),
|
|
617
|
+
LocalStorageAuditor(),
|
|
618
|
+
SessionStorageAuditor(),
|
|
619
|
+
IndexedDbAuditor(),
|
|
620
|
+
FileSystemAuditor(),
|
|
621
|
+
NotificationAuditor(),
|
|
622
|
+
LoginAuditor(),
|
|
623
|
+
SnssAuditor()
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def main(args):
|
|
628
|
+
profile_folder = pathlib.Path(args[0])
|
|
629
|
+
domain_re = re.compile(args[1])
|
|
630
|
+
cache_folder = None if len(args) < 3 else args[2]
|
|
631
|
+
|
|
632
|
+
for auditor in AUDITORS:
|
|
633
|
+
print("-" * 72)
|
|
634
|
+
print(auditor.name)
|
|
635
|
+
print("-" * 72)
|
|
636
|
+
print("\t".join(auditor.headers))
|
|
637
|
+
|
|
638
|
+
if isinstance(auditor, CacheAuditor) and cache_folder is not None:
|
|
639
|
+
results = auditor.audit(profile_folder, domain_re, cache_folder)
|
|
640
|
+
else:
|
|
641
|
+
results = auditor.audit(profile_folder, domain_re)
|
|
642
|
+
|
|
643
|
+
for result in results:
|
|
644
|
+
print("\t".join(str(x) for x in result))
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
if __name__ == '__main__':
|
|
648
|
+
if len(sys.argv) < 2:
|
|
649
|
+
print(f"{pathlib.Path(sys.argv[0]).name} <chrome profile folder> <pattern for url matching> [cache folder (for mobile)]")
|
|
650
|
+
exit(1)
|
|
651
|
+
main(sys.argv[1:])
|