claude-backup 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_backup/__init__.py +1039 -0
- claude_backup/__main__.py +4 -0
- claude_backup-0.1.9.dist-info/METADATA +160 -0
- claude_backup-0.1.9.dist-info/RECORD +7 -0
- claude_backup-0.1.9.dist-info/WHEEL +4 -0
- claude_backup-0.1.9.dist-info/entry_points.txt +3 -0
- claude_backup-0.1.9.dist-info/licenses/LICENSE +121 -0
|
@@ -0,0 +1,1039 @@
|
|
|
1
|
+
"""Unofficial, unsanctioned tool to backup Claude.ai chats to local files."""
|
|
2
|
+
|
|
3
|
+
# pyright: reportAny=false, reportExplicitAny=false
|
|
4
|
+
# pyright: reportImplicitOverride=false, reportUnusedCallResult=false
|
|
5
|
+
# pyright: reportPrivateUsage=false, reportIncompatibleVariableOverride=false
|
|
6
|
+
|
|
7
|
+
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
|
|
8
|
+
from asyncio import Task
|
|
9
|
+
from collections import defaultdict
|
|
10
|
+
from collections.abc import (
|
|
11
|
+
AsyncGenerator,
|
|
12
|
+
AsyncIterator,
|
|
13
|
+
Awaitable,
|
|
14
|
+
Callable,
|
|
15
|
+
Iterable,
|
|
16
|
+
Iterator,
|
|
17
|
+
)
|
|
18
|
+
from contextlib import aclosing, suppress
|
|
19
|
+
from dataclasses import dataclass, field
|
|
20
|
+
from datetime import datetime
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from tempfile import NamedTemporaryFile
|
|
23
|
+
from types import TracebackType
|
|
24
|
+
from typing import Any, ClassVar, TypeAlias, TypeVar, cast, final
|
|
25
|
+
from uuid import UUID
|
|
26
|
+
import asyncio
|
|
27
|
+
import json
|
|
28
|
+
import os
|
|
29
|
+
import shutil
|
|
30
|
+
import sys
|
|
31
|
+
|
|
32
|
+
from fake_useragent import UserAgent
|
|
33
|
+
from platformdirs import user_data_dir
|
|
34
|
+
from aiohttp import (
|
|
35
|
+
ClientResponseError,
|
|
36
|
+
ClientSession,
|
|
37
|
+
ClientTimeout,
|
|
38
|
+
CookieJar,
|
|
39
|
+
TCPConnector,
|
|
40
|
+
)
|
|
41
|
+
from yarl import URL
|
|
42
|
+
import browser_cookie3 # pyright: ignore[reportMissingTypeStubs]
|
|
43
|
+
|
|
44
|
+
__version__ = "0.1.9"
|
|
45
|
+
|
|
46
|
+
__all__ = (
|
|
47
|
+
"__version__",
|
|
48
|
+
"main",
|
|
49
|
+
"get_session_key",
|
|
50
|
+
"Client",
|
|
51
|
+
"Store",
|
|
52
|
+
"Syncer",
|
|
53
|
+
"APIObject",
|
|
54
|
+
"Immutable",
|
|
55
|
+
"Nameable",
|
|
56
|
+
"Timestamped",
|
|
57
|
+
"Loadable",
|
|
58
|
+
"Account",
|
|
59
|
+
"Membership",
|
|
60
|
+
"Organization",
|
|
61
|
+
"Chats",
|
|
62
|
+
"ChatsEntry",
|
|
63
|
+
"Chat",
|
|
64
|
+
"Json",
|
|
65
|
+
"JsonD",
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
Json: TypeAlias = dict[str, "Json"] | list["Json"] | str | int | float | bool | None
|
|
69
|
+
JsonD: TypeAlias = dict[str, Json]
|
|
70
|
+
|
|
71
|
+
T = TypeVar("T")
|
|
72
|
+
T_APIObject = TypeVar("T_APIObject", bound="APIObject")
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass(slots=True)
|
|
76
|
+
class Client:
|
|
77
|
+
session_key: str
|
|
78
|
+
retries: int = 10
|
|
79
|
+
min_retry_delay: float = 1.0
|
|
80
|
+
max_retry_delay: float = 60.0
|
|
81
|
+
session: ClientSession = field(init=False)
|
|
82
|
+
|
|
83
|
+
def __post_init__(self):
|
|
84
|
+
headers = {"User-Agent": UserAgent().chrome}
|
|
85
|
+
|
|
86
|
+
jar = CookieJar()
|
|
87
|
+
jar.update_cookies({"sessionKey": self.session_key}, URL("https://claude.ai/"))
|
|
88
|
+
|
|
89
|
+
self.session = ClientSession(
|
|
90
|
+
headers=headers,
|
|
91
|
+
cookie_jar=jar,
|
|
92
|
+
connector=TCPConnector(limit=0),
|
|
93
|
+
timeout=ClientTimeout(total=None),
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
async def __aenter__(self):
|
|
97
|
+
await self.session.__aenter__()
|
|
98
|
+
|
|
99
|
+
return self
|
|
100
|
+
|
|
101
|
+
async def __aexit__(
|
|
102
|
+
self,
|
|
103
|
+
exc_type: type[BaseException] | None,
|
|
104
|
+
exc_val: BaseException | None,
|
|
105
|
+
exc_tb: TracebackType | None,
|
|
106
|
+
) -> bool | None:
|
|
107
|
+
return await self.session.__aexit__(exc_type, exc_val, exc_tb)
|
|
108
|
+
|
|
109
|
+
async def _refresh(self, path: str) -> Json:
|
|
110
|
+
# i have never seen a 429 or in fact a 4xx error of any kind from this
|
|
111
|
+
# api, nor ratelimit headers or fields on the returned stuff (i've seen
|
|
112
|
+
# 403 Forbidden from cloudflare, in front of claude.ai, but only behind
|
|
113
|
+
# vpn i.e. totally blocked, and even then no ratelimit headers), so we
|
|
114
|
+
# will have to cross our fingers and hope our rate and conn limiting is
|
|
115
|
+
# enough not to break anything...
|
|
116
|
+
async with self.session.get(
|
|
117
|
+
f"https://claude.ai/api/{path}",
|
|
118
|
+
allow_redirects=False, # csrf defense for very silly threat model
|
|
119
|
+
) as r:
|
|
120
|
+
# explicitly check r.status instead of using r.raise_for_status
|
|
121
|
+
# because we want to raise for r.status >= 300 not just >= 400
|
|
122
|
+
if r.status in range(200, 300):
|
|
123
|
+
return cast(Json, await r.json())
|
|
124
|
+
|
|
125
|
+
raise ClientResponseError(
|
|
126
|
+
r.request_info,
|
|
127
|
+
r.history,
|
|
128
|
+
status=r.status,
|
|
129
|
+
message=r.reason or "",
|
|
130
|
+
headers=r.headers,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
async def refresh(self, path: str) -> Json:
|
|
134
|
+
retry_delay = self.min_retry_delay
|
|
135
|
+
for retry in range(self.retries - 1):
|
|
136
|
+
try:
|
|
137
|
+
return await self._refresh(path)
|
|
138
|
+
except Exception as e:
|
|
139
|
+
print(
|
|
140
|
+
f"Error fetching {path} (try {retry+1} of {self.retries}, "
|
|
141
|
+
+ f"waiting {round(retry_delay)}s): {e}",
|
|
142
|
+
file=sys.stderr,
|
|
143
|
+
)
|
|
144
|
+
await asyncio.sleep(retry_delay)
|
|
145
|
+
retry_delay = min(retry_delay * 2, self.max_retry_delay)
|
|
146
|
+
|
|
147
|
+
return await self._refresh(path)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
@dataclass(slots=True)
|
|
151
|
+
class Store:
|
|
152
|
+
def _wipe(self) -> None:
|
|
153
|
+
for entry in self.store_dir.iterdir():
|
|
154
|
+
if entry.is_dir():
|
|
155
|
+
shutil.rmtree(entry)
|
|
156
|
+
else:
|
|
157
|
+
entry.unlink()
|
|
158
|
+
|
|
159
|
+
def _set_chat_mtimes(self) -> None:
|
|
160
|
+
if account := Account.load(None, self): # pyright: ignore[reportArgumentType]
|
|
161
|
+
for membership in account.memberships():
|
|
162
|
+
org = membership.organization()
|
|
163
|
+
if chats := org.chat_list():
|
|
164
|
+
for entry in chats.cached_entries():
|
|
165
|
+
if chat := entry.load_chat():
|
|
166
|
+
self.save(chat.store_path(), chat.get_data(), chat.get_mtime())
|
|
167
|
+
|
|
168
|
+
MIGRATIONS: ClassVar[
|
|
169
|
+
defaultdict[str | None, tuple[str, Callable[["Store"], None]]]
|
|
170
|
+
] = defaultdict(
|
|
171
|
+
# migrate from unknown versions by not migrating anything/starting over
|
|
172
|
+
lambda: (__version__, Store._wipe),
|
|
173
|
+
{
|
|
174
|
+
"0.1.7": ("0.1.8", lambda _: None), # if format unchanged, migrate whole store
|
|
175
|
+
"0.1.8": ("0.1.9", _set_chat_mtimes),
|
|
176
|
+
},
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
store_dir: Path
|
|
180
|
+
ignore_cache: bool = False
|
|
181
|
+
|
|
182
|
+
def __post_init__(self):
|
|
183
|
+
if not self.store_dir.exists():
|
|
184
|
+
self.store_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
|
|
185
|
+
(self.store_dir / "version").write_text(f"{__version__}\n")
|
|
186
|
+
return
|
|
187
|
+
|
|
188
|
+
try:
|
|
189
|
+
version = (self.store_dir / "version").read_text().strip()
|
|
190
|
+
except FileNotFoundError:
|
|
191
|
+
(self.store_dir / "version").write_text(f"{__version__}\n")
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
while version != __version__:
|
|
195
|
+
version, migrate_step = self.MIGRATIONS[version]
|
|
196
|
+
migrate_step(self)
|
|
197
|
+
(self.store_dir / "version").write_text(f"{version}\n")
|
|
198
|
+
|
|
199
|
+
def save(
|
|
200
|
+
self, path: Path, data: Json, mtime: datetime | float | None = None
|
|
201
|
+
) -> None:
|
|
202
|
+
cache_file = self.store_dir / path.with_suffix(".json")
|
|
203
|
+
cache_file.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
|
|
204
|
+
|
|
205
|
+
with NamedTemporaryFile(
|
|
206
|
+
"w",
|
|
207
|
+
prefix=f"{cache_file.name}-",
|
|
208
|
+
dir=cache_file.parent,
|
|
209
|
+
) as f:
|
|
210
|
+
json.dump(
|
|
211
|
+
data, f, ensure_ascii=False, check_circular=False, separators=(",", ":")
|
|
212
|
+
)
|
|
213
|
+
f.flush()
|
|
214
|
+
Path(f.name).rename(cache_file)
|
|
215
|
+
|
|
216
|
+
if mtime:
|
|
217
|
+
mtime = mtime.timestamp() if isinstance(mtime, datetime) else mtime
|
|
218
|
+
os.utime(cache_file, (mtime, mtime))
|
|
219
|
+
|
|
220
|
+
def load(self, path: Path) -> Json | None:
|
|
221
|
+
if self.ignore_cache:
|
|
222
|
+
return None
|
|
223
|
+
|
|
224
|
+
cache_file = self.store_dir / path.with_suffix(".json")
|
|
225
|
+
with suppress(FileNotFoundError, NotADirectoryError), cache_file.open() as f:
|
|
226
|
+
return cast(Json, json.load(f))
|
|
227
|
+
|
|
228
|
+
def delete(self, path: Path) -> None:
|
|
229
|
+
file = self.store_dir / path.with_suffix(".json")
|
|
230
|
+
with suppress(FileNotFoundError):
|
|
231
|
+
file.unlink()
|
|
232
|
+
|
|
233
|
+
parent = file.parent
|
|
234
|
+
while parent != self.store_dir and parent.exists():
|
|
235
|
+
try:
|
|
236
|
+
parent.rmdir()
|
|
237
|
+
parent = parent.parent
|
|
238
|
+
except OSError:
|
|
239
|
+
break
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class APIObject:
|
|
243
|
+
_client: Client # pyright: ignore[reportUninitializedInstanceVariable]
|
|
244
|
+
_store: Store # pyright: ignore[reportUninitializedInstanceVariable]
|
|
245
|
+
_data: Json # pyright: ignore[reportUninitializedInstanceVariable]
|
|
246
|
+
|
|
247
|
+
@property
|
|
248
|
+
def client(self) -> Client:
|
|
249
|
+
return self._client
|
|
250
|
+
|
|
251
|
+
@property
|
|
252
|
+
def store(self) -> Store:
|
|
253
|
+
return self._store
|
|
254
|
+
|
|
255
|
+
def get_data(self) -> Json:
|
|
256
|
+
return self._data
|
|
257
|
+
|
|
258
|
+
def get_mtime(self) -> datetime | float | None:
|
|
259
|
+
return None
|
|
260
|
+
|
|
261
|
+
def set_data(self: T_APIObject, data: Json) -> T_APIObject:
|
|
262
|
+
self._data = data
|
|
263
|
+
return self
|
|
264
|
+
|
|
265
|
+
def api_path(self) -> str:
|
|
266
|
+
raise NotImplementedError
|
|
267
|
+
|
|
268
|
+
def store_path(self) -> Path:
|
|
269
|
+
raise NotImplementedError
|
|
270
|
+
|
|
271
|
+
@classmethod
|
|
272
|
+
def _load(
|
|
273
|
+
cls: type[T_APIObject],
|
|
274
|
+
*args: Any,
|
|
275
|
+
store_path: Path | None = None,
|
|
276
|
+
) -> T_APIObject | None:
|
|
277
|
+
obj = cls(*args)
|
|
278
|
+
if data := obj.store.load(store_path or obj.store_path()):
|
|
279
|
+
return obj.set_data(data)
|
|
280
|
+
else:
|
|
281
|
+
return None
|
|
282
|
+
|
|
283
|
+
@classmethod
|
|
284
|
+
async def _fetch(
|
|
285
|
+
cls: type[T_APIObject],
|
|
286
|
+
*args: Any,
|
|
287
|
+
api_path: str | None = None,
|
|
288
|
+
) -> T_APIObject:
|
|
289
|
+
obj = cls(*args)
|
|
290
|
+
data = await obj.client.refresh(api_path or obj.api_path())
|
|
291
|
+
return obj.set_data(data).save()
|
|
292
|
+
|
|
293
|
+
async def refresh(self: T_APIObject) -> T_APIObject:
|
|
294
|
+
return self.set_data(await self.client.refresh(self.api_path())).save()
|
|
295
|
+
|
|
296
|
+
def save(self: T_APIObject) -> T_APIObject:
|
|
297
|
+
self.store.save(self.store_path(), self.get_data(), self.get_mtime())
|
|
298
|
+
return self
|
|
299
|
+
|
|
300
|
+
def delete_cached(self) -> None:
|
|
301
|
+
self.store.delete(self.store_path())
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
class Immutable(APIObject):
|
|
305
|
+
def __hash__(self) -> int:
|
|
306
|
+
return hash(
|
|
307
|
+
json.dumps(
|
|
308
|
+
self._data,
|
|
309
|
+
ensure_ascii=False,
|
|
310
|
+
sort_keys=True,
|
|
311
|
+
check_circular=False,
|
|
312
|
+
separators=(",", ":"),
|
|
313
|
+
)
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
def __eq__(self, other: object) -> bool:
|
|
317
|
+
if type(self) is type(other):
|
|
318
|
+
assert isinstance(other, APIObject) # appease pyright
|
|
319
|
+
return self._data == other._data
|
|
320
|
+
else:
|
|
321
|
+
return NotImplemented
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
class Nameable(APIObject):
|
|
325
|
+
_data: JsonD
|
|
326
|
+
|
|
327
|
+
FILENAME_XLAT: ClassVar[dict[int, int]] = {
|
|
328
|
+
ord(c): ord("_") for c in '<>:"|?*/\\ \t\n\r'
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
@property
|
|
332
|
+
def uuid(self) -> str:
|
|
333
|
+
return str(UUID(cast(str, self._data["uuid"])))
|
|
334
|
+
|
|
335
|
+
@property
|
|
336
|
+
def name(self) -> str | None:
|
|
337
|
+
# api seems to use {"name": ""} to mean {"name": null}
|
|
338
|
+
return cast(str, self._data.get("name")) or None
|
|
339
|
+
|
|
340
|
+
def slug(self) -> str:
|
|
341
|
+
if self.name:
|
|
342
|
+
return f"{self.name.translate(self.FILENAME_XLAT)}-{self.uuid}"
|
|
343
|
+
return self.uuid
|
|
344
|
+
|
|
345
|
+
def __str__(self) -> str:
|
|
346
|
+
if self.name:
|
|
347
|
+
return f"{self.name} ({self.uuid})"
|
|
348
|
+
return self.uuid
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
class Timestamped(APIObject):
|
|
352
|
+
_data: JsonD
|
|
353
|
+
|
|
354
|
+
@property
|
|
355
|
+
def created_at(self) -> datetime | None:
|
|
356
|
+
try:
|
|
357
|
+
return datetime.fromisoformat(cast(str, self._data["created_at"]))
|
|
358
|
+
except (KeyError, TypeError, ValueError):
|
|
359
|
+
return None
|
|
360
|
+
|
|
361
|
+
@property
|
|
362
|
+
def updated_at(self) -> datetime | None:
|
|
363
|
+
try:
|
|
364
|
+
return datetime.fromisoformat(cast(str, self._data["updated_at"]))
|
|
365
|
+
except (KeyError, TypeError, ValueError):
|
|
366
|
+
return None
|
|
367
|
+
|
|
368
|
+
def get_mtime(self) -> datetime | float | None:
|
|
369
|
+
return self.updated_at
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
T_Loadable = TypeVar("T_Loadable", bound="Loadable")
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
class Loadable(APIObject):
|
|
376
|
+
@classmethod
|
|
377
|
+
def load(cls: type[T_Loadable], client: Client, store: Store) -> T_Loadable | None:
|
|
378
|
+
return cls._load(client, store)
|
|
379
|
+
|
|
380
|
+
@classmethod
|
|
381
|
+
async def fetch(cls: type[T_Loadable], client: Client, store: Store) -> T_Loadable:
|
|
382
|
+
return await cls._fetch(client, store)
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
@final
|
|
386
|
+
class Chat(Timestamped, Nameable):
|
|
387
|
+
__slots__ = (
|
|
388
|
+
"chat_list",
|
|
389
|
+
"_data",
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
chat_list: "Chats"
|
|
393
|
+
_data: JsonD
|
|
394
|
+
|
|
395
|
+
def __init__(
|
|
396
|
+
self,
|
|
397
|
+
chat_list: "Chats",
|
|
398
|
+
):
|
|
399
|
+
self.chat_list = chat_list
|
|
400
|
+
|
|
401
|
+
@property
|
|
402
|
+
def client(self) -> Client:
|
|
403
|
+
return self.chat_list.client
|
|
404
|
+
|
|
405
|
+
@property
|
|
406
|
+
def store(self) -> Store:
|
|
407
|
+
return self.chat_list.store
|
|
408
|
+
|
|
409
|
+
def api_path(self) -> str:
|
|
410
|
+
return (
|
|
411
|
+
f"{self.chat_list.api_path()}/{self.uuid}"
|
|
412
|
+
+ "?tree=True&rendering_mode=messages&render_all_tools=true"
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
def store_path(self) -> Path:
|
|
416
|
+
return self.chat_list.store_path() / self.slug()
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
@final
|
|
420
|
+
class ChatsEntry(Timestamped, Nameable, Immutable):
|
|
421
|
+
__slots__ = (
|
|
422
|
+
"chat_list",
|
|
423
|
+
"_data",
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
chat_list: "Chats"
|
|
427
|
+
_data: JsonD
|
|
428
|
+
|
|
429
|
+
def __init__(
|
|
430
|
+
self,
|
|
431
|
+
chat_list: "Chats",
|
|
432
|
+
):
|
|
433
|
+
self.chat_list = chat_list
|
|
434
|
+
|
|
435
|
+
@property
|
|
436
|
+
def client(self) -> Client:
|
|
437
|
+
return self.chat_list.client
|
|
438
|
+
|
|
439
|
+
@property
|
|
440
|
+
def store(self) -> Store:
|
|
441
|
+
return self.chat_list.store
|
|
442
|
+
|
|
443
|
+
def chat_api_path(self) -> str:
|
|
444
|
+
return (
|
|
445
|
+
f"{self.chat_list.api_path()}/{self.uuid}"
|
|
446
|
+
+ "?tree=True&rendering_mode=messages&render_all_tools=true"
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
def chat_store_path(self) -> Path:
|
|
450
|
+
return self.chat_list.store_path() / self.slug()
|
|
451
|
+
|
|
452
|
+
def load_chat(self) -> Chat | None:
|
|
453
|
+
return Chat._load(self.chat_list, store_path=self.chat_store_path())
|
|
454
|
+
|
|
455
|
+
async def fetch_chat(self) -> Chat:
|
|
456
|
+
return await Chat._fetch(self.chat_list, api_path=self.chat_api_path())
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
@final
|
|
460
|
+
class Chats(APIObject):
|
|
461
|
+
__slots__ = ("organization", "unseen", "_data")
|
|
462
|
+
|
|
463
|
+
organization: "Organization"
|
|
464
|
+
_data: dict[str, ChatsEntry]
|
|
465
|
+
unseen: int
|
|
466
|
+
|
|
467
|
+
def __init__(self, organization: "Organization"):
|
|
468
|
+
self.organization = organization
|
|
469
|
+
self.unseen = 0
|
|
470
|
+
self._data = {}
|
|
471
|
+
|
|
472
|
+
@property
|
|
473
|
+
def client(self) -> Client:
|
|
474
|
+
return self.organization.client
|
|
475
|
+
|
|
476
|
+
@property
|
|
477
|
+
def store(self) -> Store:
|
|
478
|
+
return self.organization.store
|
|
479
|
+
|
|
480
|
+
def api_path(self) -> str:
|
|
481
|
+
return f"{self.organization.api_path()}/chat_conversations"
|
|
482
|
+
|
|
483
|
+
def store_path(self) -> Path:
|
|
484
|
+
return self.organization.store_path() / "chat_conversations"
|
|
485
|
+
|
|
486
|
+
def set_data(self, data: Json) -> "Chats":
|
|
487
|
+
# convert list (reverse chronological from API) to dict (forward chronological)
|
|
488
|
+
entries = (ChatsEntry(self).set_data(raw) for raw in reversed(cast(list[JsonD], data)))
|
|
489
|
+
self._data = {entry.uuid: entry for entry in entries}
|
|
490
|
+
return self
|
|
491
|
+
|
|
492
|
+
@classmethod
|
|
493
|
+
def _load(
|
|
494
|
+
cls,
|
|
495
|
+
*args: Any,
|
|
496
|
+
store_path: Path | None = None,
|
|
497
|
+
) -> "Chats | None":
|
|
498
|
+
obj = cls(*args)
|
|
499
|
+
if data := obj.store.load(store_path or obj.store_path()):
|
|
500
|
+
return obj.set_data(data)
|
|
501
|
+
else:
|
|
502
|
+
return None
|
|
503
|
+
|
|
504
|
+
def get_data(self) -> Json:
|
|
505
|
+
# convert dict (forward chronological) back to list (reverse chronological)
|
|
506
|
+
return [entry._data for entry in reversed(self._data.values())]
|
|
507
|
+
|
|
508
|
+
def entry(self, uuid: str) -> ChatsEntry | None:
|
|
509
|
+
return self._data.get(uuid)
|
|
510
|
+
|
|
511
|
+
def cached_entries(self) -> Iterator[ChatsEntry]:
|
|
512
|
+
# yield in reverse chronological order (newest first)
|
|
513
|
+
yield from reversed(self._data.values())
|
|
514
|
+
|
|
515
|
+
async def new_entries(
|
|
516
|
+
self, page_size: int = 20, save: bool = True
|
|
517
|
+
) -> AsyncGenerator[ChatsEntry, None]:
|
|
518
|
+
# if no data/first fetch, fetch all entries---they're all new
|
|
519
|
+
if not self._data:
|
|
520
|
+
self.set_data(await self.client.refresh(self.api_path()))
|
|
521
|
+
|
|
522
|
+
for entry in self.cached_entries():
|
|
523
|
+
yield entry
|
|
524
|
+
|
|
525
|
+
if save:
|
|
526
|
+
self.save()
|
|
527
|
+
return
|
|
528
|
+
|
|
529
|
+
# "sliding window" sync (chat_conversations is recently-modified-first)
|
|
530
|
+
new: dict[str, ChatsEntry] = {}
|
|
531
|
+
|
|
532
|
+
offset = 0
|
|
533
|
+
limit = self.unseen + 1 if self.unseen else page_size
|
|
534
|
+
self.unseen = 0
|
|
535
|
+
|
|
536
|
+
assert limit
|
|
537
|
+
|
|
538
|
+
while True:
|
|
539
|
+
page = cast(
|
|
540
|
+
list[JsonD],
|
|
541
|
+
await self.client.refresh(
|
|
542
|
+
f"{self.api_path()}?limit={limit}&offset={offset}"
|
|
543
|
+
),
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
done = False
|
|
547
|
+
for raw in page:
|
|
548
|
+
entry = ChatsEntry(self).set_data(raw)
|
|
549
|
+
uuid = entry.uuid
|
|
550
|
+
|
|
551
|
+
if new_entry := new.get(uuid):
|
|
552
|
+
# this api doesn't have cursors or snapshots or anything :(
|
|
553
|
+
# so if an entry's created or updated *between our fetching
|
|
554
|
+
# one page and the next*, there's a *new* most recent entry
|
|
555
|
+
# which bumps all the others down and causes the last entry
|
|
556
|
+
# of the prior page also to be the first entry of the next:
|
|
557
|
+
#
|
|
558
|
+
# page 1 sees [A B]: [A B] C D E
|
|
559
|
+
# entry D is updated: D A B C E
|
|
560
|
+
# page 2 sees [B C]: D A [B C] E
|
|
561
|
+
#
|
|
562
|
+
# of course this can happen multiple times, and in fact the
|
|
563
|
+
# number of times it happens is the count of new entries we
|
|
564
|
+
# would expect at offsets 0 through N on our next fetch! so
|
|
565
|
+
# update self.unseen as a hint to the next new_entries call
|
|
566
|
+
# that there are likely exactly self.unseen new or changed.
|
|
567
|
+
self.unseen += 1
|
|
568
|
+
|
|
569
|
+
# ...but if the above hypothesis is wrong for some perverse
|
|
570
|
+
# reason like all the chats up until now being rewritten in
|
|
571
|
+
# a specific order behind our backs, still yield the entry:
|
|
572
|
+
#
|
|
573
|
+
# page 1 sees [A B]: [A B] C D E
|
|
574
|
+
# B, D, & A updated: A' D' B' C E
|
|
575
|
+
# page 2 sees [B' C]: A' D' [B' C] E
|
|
576
|
+
#
|
|
577
|
+
# so in case the cartesian daemon of claude chats is out to
|
|
578
|
+
# get us, we need to yield B' even if we wouldn't yield B.
|
|
579
|
+
if entry.updated_at == new_entry.updated_at:
|
|
580
|
+
continue
|
|
581
|
+
elif (stored := self._data.get(uuid)) and entry.updated_at == stored.updated_at:
|
|
582
|
+
# entry was in a chronological list of ones we already had,
|
|
583
|
+
# so we must also have all entries before it, so we're done
|
|
584
|
+
done = True
|
|
585
|
+
break
|
|
586
|
+
|
|
587
|
+
new[uuid] = entry
|
|
588
|
+
yield entry
|
|
589
|
+
|
|
590
|
+
# we *ought* to break from this loop by seeing something from prior
|
|
591
|
+
# refreshes, but just in case e.g. all seen entries were deleted on
|
|
592
|
+
# claude.ai (or more likely moved/counted in self.unseen)... we are
|
|
593
|
+
# also definitely done if they ran out of items for this page
|
|
594
|
+
if done or len(page) < limit:
|
|
595
|
+
break
|
|
596
|
+
|
|
597
|
+
# if not, double it and give it to the next person
|
|
598
|
+
offset += limit
|
|
599
|
+
limit *= 2
|
|
600
|
+
|
|
601
|
+
# dicts preserve order, but because there is no dict.prepend(), we need
|
|
602
|
+
# self._data to be in "forward-chronological" order so newly discovered
|
|
603
|
+
# chats belong at the "end" w/r/t iteration order. thus we insert items
|
|
604
|
+
# from necessarily reverse-chronological new in reverse so self._data's
|
|
605
|
+
# still entirely in chronological order. this may be a bit galaxy brain
|
|
606
|
+
self._data.update(reversed(new.items()))
|
|
607
|
+
if save:
|
|
608
|
+
self.save()
|
|
609
|
+
|
|
610
|
+
async def refresh(self) -> "Chats":
|
|
611
|
+
async for _ in self.new_entries():
|
|
612
|
+
pass
|
|
613
|
+
|
|
614
|
+
return self
|
|
615
|
+
|
|
616
|
+
def __len__(self) -> int:
|
|
617
|
+
return len(self._data)
|
|
618
|
+
|
|
619
|
+
|
|
620
|
+
@final
|
|
621
|
+
class Organization(Nameable, Loadable):
|
|
622
|
+
__slots__ = (
|
|
623
|
+
"_client",
|
|
624
|
+
"_store",
|
|
625
|
+
"_data",
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
_client: Client
|
|
629
|
+
_store: Store
|
|
630
|
+
_data: JsonD
|
|
631
|
+
|
|
632
|
+
def __init__(self, client: Client, store: Store):
|
|
633
|
+
self._client = client
|
|
634
|
+
self._store = store
|
|
635
|
+
|
|
636
|
+
def api_path(self) -> str:
|
|
637
|
+
return f"organizations/{self.uuid}"
|
|
638
|
+
|
|
639
|
+
def store_path(self) -> Path:
|
|
640
|
+
return Path("organizations") / self.slug()
|
|
641
|
+
|
|
642
|
+
@property
|
|
643
|
+
def capabilities(self) -> list[str]:
|
|
644
|
+
return cast(list[str], self._data["capabilities"])
|
|
645
|
+
|
|
646
|
+
def chat_list(self) -> Chats:
|
|
647
|
+
return Chats._load(self) or Chats(self)
|
|
648
|
+
|
|
649
|
+
|
|
650
|
+
@final
|
|
651
|
+
class Membership(Immutable):
|
|
652
|
+
__slots__ = (
|
|
653
|
+
"account",
|
|
654
|
+
"_data",
|
|
655
|
+
)
|
|
656
|
+
|
|
657
|
+
account: "Account"
|
|
658
|
+
_data: JsonD
|
|
659
|
+
|
|
660
|
+
def __init__(self, account: "Account"):
|
|
661
|
+
self.account = account
|
|
662
|
+
|
|
663
|
+
@property
|
|
664
|
+
def client(self) -> Client:
|
|
665
|
+
return self.account.client
|
|
666
|
+
|
|
667
|
+
@property
|
|
668
|
+
def store(self) -> Store:
|
|
669
|
+
return self.account.store
|
|
670
|
+
|
|
671
|
+
def organization(self) -> Organization:
|
|
672
|
+
return Organization(self.client, self.store).set_data(
|
|
673
|
+
cast(JsonD, self._data["organization"])
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
|
|
677
|
+
@final
|
|
678
|
+
class Account(Nameable, Loadable):
|
|
679
|
+
__slots__ = (
|
|
680
|
+
"_client",
|
|
681
|
+
"_store",
|
|
682
|
+
"_data",
|
|
683
|
+
)
|
|
684
|
+
|
|
685
|
+
_client: Client
|
|
686
|
+
_store: Store
|
|
687
|
+
_data: JsonD
|
|
688
|
+
|
|
689
|
+
def __init__(self, client: Client, store: Store):
|
|
690
|
+
self._client = client
|
|
691
|
+
self._store = store
|
|
692
|
+
|
|
693
|
+
def api_path(self) -> str:
|
|
694
|
+
return "account"
|
|
695
|
+
|
|
696
|
+
def store_path(self) -> Path:
|
|
697
|
+
return Path("account")
|
|
698
|
+
|
|
699
|
+
def memberships(self) -> Iterator[Membership]:
|
|
700
|
+
for membership in cast(list[JsonD], self._data["memberships"]):
|
|
701
|
+
yield Membership(self).set_data(membership)
|
|
702
|
+
|
|
703
|
+
def organization(self, uuid: str) -> Organization | None:
|
|
704
|
+
for membership in self.memberships():
|
|
705
|
+
organization = membership.organization()
|
|
706
|
+
if organization.uuid == uuid:
|
|
707
|
+
return organization
|
|
708
|
+
|
|
709
|
+
return None
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
def truncate(s: str, max_len: int) -> str:
|
|
713
|
+
if len(s) > max_len:
|
|
714
|
+
return s[: max_len - 1] + "…"
|
|
715
|
+
return s
|
|
716
|
+
|
|
717
|
+
|
|
718
|
+
async def aroundrobin(*iterators: AsyncGenerator[T, None]) -> AsyncGenerator[T, None]:
|
|
719
|
+
try:
|
|
720
|
+
done = False
|
|
721
|
+
while not done:
|
|
722
|
+
done = True
|
|
723
|
+
for it in iterators:
|
|
724
|
+
with suppress(StopAsyncIteration):
|
|
725
|
+
yield await anext(it)
|
|
726
|
+
done = False
|
|
727
|
+
finally:
|
|
728
|
+
for it in iterators:
|
|
729
|
+
with suppress(BaseException):
|
|
730
|
+
await it.aclose()
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
@dataclass(slots=True)
|
|
734
|
+
class Syncer:
|
|
735
|
+
client: Client
|
|
736
|
+
store: Store
|
|
737
|
+
connections: int = 6
|
|
738
|
+
success_delay: float = 0.25
|
|
739
|
+
tty: bool = field(default_factory=sys.stdout.isatty)
|
|
740
|
+
|
|
741
|
+
async def _as_completed(
|
|
742
|
+
self, awaitables: AsyncIterator[Awaitable[T]]
|
|
743
|
+
) -> AsyncGenerator[Task[T], None]:
|
|
744
|
+
pending: set[Task[T]] = set()
|
|
745
|
+
|
|
746
|
+
try:
|
|
747
|
+
for _ in range(self.connections):
|
|
748
|
+
try:
|
|
749
|
+
awaitable = await anext(awaitables)
|
|
750
|
+
pending.add(asyncio.ensure_future(awaitable))
|
|
751
|
+
except StopAsyncIteration:
|
|
752
|
+
break
|
|
753
|
+
|
|
754
|
+
while pending:
|
|
755
|
+
done, pending = await asyncio.wait(
|
|
756
|
+
pending, return_when=asyncio.FIRST_COMPLETED
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
for task in done:
|
|
760
|
+
yield task
|
|
761
|
+
|
|
762
|
+
await asyncio.sleep(self.success_delay)
|
|
763
|
+
|
|
764
|
+
try:
|
|
765
|
+
awaitable = await anext(awaitables)
|
|
766
|
+
pending.add(asyncio.ensure_future(awaitable))
|
|
767
|
+
except StopAsyncIteration:
|
|
768
|
+
pass
|
|
769
|
+
except BaseException:
|
|
770
|
+
for task in pending:
|
|
771
|
+
task.cancel()
|
|
772
|
+
for task in pending:
|
|
773
|
+
with suppress(BaseException):
|
|
774
|
+
await task
|
|
775
|
+
raise
|
|
776
|
+
finally:
|
|
777
|
+
if isinstance(awaitables, AsyncGenerator):
|
|
778
|
+
await awaitables.aclose()
|
|
779
|
+
|
|
780
|
+
def as_completed(
|
|
781
|
+
self, awaitables: Iterable[Awaitable[T]] | AsyncIterator[Awaitable[T]]
|
|
782
|
+
) -> AsyncGenerator[Task[T], None]:
|
|
783
|
+
async def asyncify(
|
|
784
|
+
iterable: Iterable[Awaitable[T]],
|
|
785
|
+
) -> AsyncGenerator[Awaitable[T], None]:
|
|
786
|
+
for item in iterable:
|
|
787
|
+
yield item
|
|
788
|
+
|
|
789
|
+
if isinstance(awaitables, AsyncIterator):
|
|
790
|
+
return self._as_completed(awaitables)
|
|
791
|
+
else:
|
|
792
|
+
return self._as_completed(asyncify(awaitables))
|
|
793
|
+
|
|
794
|
+
async def gather(self, *awaitables: Awaitable[T]) -> list[T]:
|
|
795
|
+
tasks_list = [asyncio.ensure_future(a) for a in awaitables]
|
|
796
|
+
|
|
797
|
+
async with aclosing(self.as_completed(tasks_list)) as gen:
|
|
798
|
+
async for task in gen:
|
|
799
|
+
await task
|
|
800
|
+
|
|
801
|
+
return [task.result() for task in tasks_list]
|
|
802
|
+
|
|
803
|
+
async def get_organizations(self) -> AsyncGenerator[Organization]:
|
|
804
|
+
old_account = Account.load(self.client, self.store)
|
|
805
|
+
account = await Account.fetch(self.client, self.store)
|
|
806
|
+
|
|
807
|
+
for membership in account.memberships():
|
|
808
|
+
organization = membership.organization()
|
|
809
|
+
|
|
810
|
+
if (
|
|
811
|
+
old_account
|
|
812
|
+
and (old_organization := old_account.organization(organization.uuid))
|
|
813
|
+
and old_organization.store_path() != organization.store_path()
|
|
814
|
+
):
|
|
815
|
+
print(
|
|
816
|
+
f"Renaming organization {old_organization} to {organization.name or organization.uuid}",
|
|
817
|
+
file=sys.stderr,
|
|
818
|
+
)
|
|
819
|
+
old_dir = self.store.store_dir / old_organization.store_path()
|
|
820
|
+
new_dir = self.store.store_dir / organization.store_path()
|
|
821
|
+
with suppress(FileNotFoundError):
|
|
822
|
+
old_dir.rename(new_dir)
|
|
823
|
+
|
|
824
|
+
if "chat" not in organization.capabilities:
|
|
825
|
+
print(
|
|
826
|
+
f'Skipping organization {organization} without "chat" capability',
|
|
827
|
+
file=sys.stderr,
|
|
828
|
+
)
|
|
829
|
+
continue
|
|
830
|
+
|
|
831
|
+
print(f"Fetching chats for organization {organization}", file=sys.stderr)
|
|
832
|
+
yield organization
|
|
833
|
+
|
|
834
|
+
def print_entry(self, entry: ChatsEntry) -> None:
|
|
835
|
+
name = entry.name or ""
|
|
836
|
+
if self.tty:
|
|
837
|
+
try:
|
|
838
|
+
width = os.get_terminal_size().columns
|
|
839
|
+
except OSError:
|
|
840
|
+
width = 80
|
|
841
|
+
name = truncate(name, width - 36 - 4)
|
|
842
|
+
print(f"{entry.uuid}\t{name}")
|
|
843
|
+
|
|
844
|
+
@staticmethod
|
|
845
|
+
def fetch_new_chat(entry: ChatsEntry) -> Task[Chat]:
|
|
846
|
+
# we must get old_entry *now* and not in the async function below since
|
|
847
|
+
# Chats.new_entries might finish and save the new entry over old_entry!
|
|
848
|
+
old_entry = entry.chat_list.entry(entry.uuid)
|
|
849
|
+
old_chat = old_entry.load_chat() if old_entry else None
|
|
850
|
+
|
|
851
|
+
async def _fetch_new_chat() -> Chat:
|
|
852
|
+
chat = await entry.fetch_chat()
|
|
853
|
+
|
|
854
|
+
if old_chat and old_chat.store_path() != chat.store_path():
|
|
855
|
+
old_chat.delete_cached()
|
|
856
|
+
|
|
857
|
+
return chat
|
|
858
|
+
|
|
859
|
+
return asyncio.create_task(_fetch_new_chat())
|
|
860
|
+
|
|
861
|
+
async def new_chat_fetches(self) -> AsyncGenerator[Task[Chat], None]:
|
|
862
|
+
needs_refresh: list[tuple[Chats, list[Task[Chat]], Task[None]]] = [
|
|
863
|
+
(organization.chat_list(), [], asyncio.create_task(asyncio.sleep(0)))
|
|
864
|
+
async for organization in self.get_organizations()
|
|
865
|
+
]
|
|
866
|
+
|
|
867
|
+
try:
|
|
868
|
+
while needs_refresh:
|
|
869
|
+
async with aclosing(
|
|
870
|
+
aroundrobin(
|
|
871
|
+
*(
|
|
872
|
+
(
|
|
873
|
+
(entry, fetches)
|
|
874
|
+
async for entry in chats.new_entries(save=False)
|
|
875
|
+
)
|
|
876
|
+
for chats, fetches, _ in needs_refresh
|
|
877
|
+
)
|
|
878
|
+
)
|
|
879
|
+
) as items:
|
|
880
|
+
async for entry, fetches in items:
|
|
881
|
+
fetch = self.fetch_new_chat(entry)
|
|
882
|
+
fetches.append(fetch)
|
|
883
|
+
self.print_entry(entry)
|
|
884
|
+
yield fetch
|
|
885
|
+
|
|
886
|
+
for _, _, old_save in needs_refresh:
|
|
887
|
+
if old_save:
|
|
888
|
+
old_save.cancel()
|
|
889
|
+
|
|
890
|
+
async def save_after_fetches(
|
|
891
|
+
chats: Chats, fetches: list[Task[Chat]]
|
|
892
|
+
) -> None:
|
|
893
|
+
for fetch in fetches:
|
|
894
|
+
await fetch
|
|
895
|
+
|
|
896
|
+
chats.save()
|
|
897
|
+
|
|
898
|
+
needs_refresh = [
|
|
899
|
+
(
|
|
900
|
+
chats,
|
|
901
|
+
[],
|
|
902
|
+
asyncio.create_task(save_after_fetches(chats, fetches)),
|
|
903
|
+
)
|
|
904
|
+
for chats, fetches, _ in needs_refresh
|
|
905
|
+
if chats.unseen
|
|
906
|
+
]
|
|
907
|
+
|
|
908
|
+
for _, _, save in needs_refresh:
|
|
909
|
+
await save
|
|
910
|
+
except BaseException:
|
|
911
|
+
for _, _, save in needs_refresh:
|
|
912
|
+
save.cancel()
|
|
913
|
+
|
|
914
|
+
with suppress(BaseException):
|
|
915
|
+
await save
|
|
916
|
+
|
|
917
|
+
raise
|
|
918
|
+
|
|
919
|
+
async def sync_all(self) -> None:
|
|
920
|
+
async with aclosing(self.as_completed(self.new_chat_fetches())) as tasks:
|
|
921
|
+
async for task in tasks:
|
|
922
|
+
await task
|
|
923
|
+
|
|
924
|
+
|
|
925
|
+
@dataclass(slots=True)
|
|
926
|
+
class DefaultPath:
|
|
927
|
+
path: str
|
|
928
|
+
|
|
929
|
+
def __str__(self):
|
|
930
|
+
try:
|
|
931
|
+
return f"~/{Path(self.path).relative_to(Path.home())}"
|
|
932
|
+
except ValueError:
|
|
933
|
+
return self.path
|
|
934
|
+
|
|
935
|
+
|
|
936
|
+
def get_session_key() -> str:
|
|
937
|
+
try:
|
|
938
|
+
for cookie in browser_cookie3.load(domain_name=".claude.ai"):
|
|
939
|
+
if cookie.name == "sessionKey" and cookie.value:
|
|
940
|
+
return cookie.value
|
|
941
|
+
raise RuntimeError("sessionKey cookie not found in browser")
|
|
942
|
+
except Exception as e:
|
|
943
|
+
raise RuntimeError(
|
|
944
|
+
"Failed to load browser cookies. "
|
|
945
|
+
+ "Set CLAUDE_SESSION_KEY to your claude.ai sessionKey cookie."
|
|
946
|
+
) from e
|
|
947
|
+
|
|
948
|
+
|
|
949
|
+
async def _main() -> None:
|
|
950
|
+
def default(cls: type[Any], key: str) -> Any:
|
|
951
|
+
return cls.__dataclass_fields__[key].default
|
|
952
|
+
|
|
953
|
+
parser = ArgumentParser(
|
|
954
|
+
description="Backup Claude.ai chats",
|
|
955
|
+
formatter_class=ArgumentDefaultsHelpFormatter,
|
|
956
|
+
)
|
|
957
|
+
parser.add_argument(
|
|
958
|
+
"-v",
|
|
959
|
+
"--version",
|
|
960
|
+
action="version",
|
|
961
|
+
version=f"%(prog)s {__version__}",
|
|
962
|
+
)
|
|
963
|
+
parser.add_argument(
|
|
964
|
+
"backup_dir",
|
|
965
|
+
nargs="?",
|
|
966
|
+
default=DefaultPath(user_data_dir("claude-backup")),
|
|
967
|
+
help="Directory to save backups",
|
|
968
|
+
)
|
|
969
|
+
parser.add_argument(
|
|
970
|
+
"-c",
|
|
971
|
+
"--connections",
|
|
972
|
+
type=int,
|
|
973
|
+
default=default(Syncer, "connections"),
|
|
974
|
+
help="Maximum concurrent connections",
|
|
975
|
+
)
|
|
976
|
+
parser.add_argument(
|
|
977
|
+
"-d",
|
|
978
|
+
"--success-delay",
|
|
979
|
+
type=float,
|
|
980
|
+
metavar="DELAY",
|
|
981
|
+
default=default(Syncer, "success_delay"),
|
|
982
|
+
help="Delay after successful request in seconds",
|
|
983
|
+
)
|
|
984
|
+
parser.add_argument(
|
|
985
|
+
"-r",
|
|
986
|
+
"--retries",
|
|
987
|
+
type=int,
|
|
988
|
+
default=default(Client, "retries"),
|
|
989
|
+
help="Number of retries for API requests",
|
|
990
|
+
)
|
|
991
|
+
parser.add_argument(
|
|
992
|
+
"--min-retry-delay",
|
|
993
|
+
type=float,
|
|
994
|
+
metavar="DELAY",
|
|
995
|
+
default=default(Client, "min_retry_delay"),
|
|
996
|
+
help="Minimum retry delay in seconds",
|
|
997
|
+
)
|
|
998
|
+
parser.add_argument(
|
|
999
|
+
"--max-retry-delay",
|
|
1000
|
+
type=float,
|
|
1001
|
+
metavar="DELAY",
|
|
1002
|
+
default=default(Client, "max_retry_delay"),
|
|
1003
|
+
help="Maximum retry delay in seconds",
|
|
1004
|
+
)
|
|
1005
|
+
parser.add_argument(
|
|
1006
|
+
"--ignore-cache",
|
|
1007
|
+
action="store_true",
|
|
1008
|
+
help="Ignore local cache and re-fetch everything from API",
|
|
1009
|
+
)
|
|
1010
|
+
|
|
1011
|
+
args = parser.parse_args()
|
|
1012
|
+
if isinstance(args.backup_dir, DefaultPath):
|
|
1013
|
+
args.backup_dir = args.backup_dir.path
|
|
1014
|
+
|
|
1015
|
+
store = Store(store_dir=Path(args.backup_dir), ignore_cache=args.ignore_cache)
|
|
1016
|
+
|
|
1017
|
+
session_key = os.environ.get("CLAUDE_SESSION_KEY") or get_session_key()
|
|
1018
|
+
async with Client(
|
|
1019
|
+
session_key=session_key,
|
|
1020
|
+
retries=args.retries,
|
|
1021
|
+
min_retry_delay=args.min_retry_delay,
|
|
1022
|
+
max_retry_delay=args.max_retry_delay,
|
|
1023
|
+
) as client:
|
|
1024
|
+
syncer = Syncer(
|
|
1025
|
+
client=client,
|
|
1026
|
+
store=store,
|
|
1027
|
+
connections=args.connections,
|
|
1028
|
+
success_delay=args.success_delay,
|
|
1029
|
+
)
|
|
1030
|
+
await syncer.sync_all()
|
|
1031
|
+
|
|
1032
|
+
|
|
1033
|
+
def main() -> None:
|
|
1034
|
+
with suppress(KeyboardInterrupt):
|
|
1035
|
+
asyncio.run(_main())
|
|
1036
|
+
|
|
1037
|
+
|
|
1038
|
+
if __name__ == "__main__":
|
|
1039
|
+
main()
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: claude-backup
|
|
3
|
+
Version: 0.1.9
|
|
4
|
+
Summary: Unofficial, unsanctioned tool to backup Claude.ai chats to local files.
|
|
5
|
+
Keywords: claude,backup,ai,chat,export,archive
|
|
6
|
+
Author-email: Ultrathinkers Anonymous <ultrathink@twilligon.com>
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
|
+
Description-Content-Type: text/markdown
|
|
9
|
+
License-Expression: CC0-1.0
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: Environment :: Console
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
17
|
+
Classifier: Topic :: Internet :: WWW/HTTP
|
|
18
|
+
Classifier: Topic :: System :: Archiving :: Backup
|
|
19
|
+
Classifier: Topic :: Utilities
|
|
20
|
+
License-File: LICENSE
|
|
21
|
+
Requires-Dist: aiohttp
|
|
22
|
+
Requires-Dist: browser-cookie3
|
|
23
|
+
Requires-Dist: fake-useragent
|
|
24
|
+
Requires-Dist: platformdirs
|
|
25
|
+
Requires-Dist: yarl
|
|
26
|
+
Project-URL: Repository, https://github.com/twilligon/claude-backup
|
|
27
|
+
|
|
28
|
+
# `claude-backup`
|
|
29
|
+
|
|
30
|
+
Unofficial, unsanctioned tool to backup [Claude.ai](https://claude.ai) chats to local files.
|
|
31
|
+
|
|
32
|
+
uvx claude-backup
|
|
33
|
+
|
|
34
|
+
## Features
|
|
35
|
+
|
|
36
|
+
`claude-backup` creates a full local copy of all (text) content of all branches of all chats accessible to your [Claude.ai](https://claude.ai) account. If you are a member of multiple "organizations", it fetches chats from all of them. We preserve all metadata on chats and their provenance, including that of their parent organization and user. We automatically rename our local copies of chats and organizations to match their current names on `claude.ai`.
|
|
37
|
+
|
|
38
|
+
We use an incremental sync algorithm to fetch only chats created or updated since the last backup. The fetch is done in parallel, with a typical user agent and polite rate and connection limits such that it's less traffic than manually scrolling through your chats and opening each in a new browser tab. I of course can't guarantee Big Claude won't be after you if you run this unofficial tool, but empirically they don't seem to mind.
|
|
39
|
+
|
|
40
|
+
Chats are stored as their original API JSON with nice `find`able names and `grep`able contents:
|
|
41
|
+
|
|
42
|
+
$ CLAUDE_SESSION_KEY="$(wl-paste)" uvx claude-backup
|
|
43
|
+
Skipping organization twilligon (c8eaca6b-eddb-4bbc-9fe3-637a0574565f) without "chat" capability
|
|
44
|
+
Fetching chats for organization claude@twilligon.com's Organization (9e9a56fc-6d1c-4d62-a96d-0cff3a473cf0)
|
|
45
|
+
30ceebd6-afcc-4796-bb77-631069cd0696 Cadre versus posse comparison
|
|
46
|
+
d15ac7d6-f167-43d3-afac-fcfaaafb64ec GraphQL database backends and data sources
|
|
47
|
+
3e553492-89a8-45e8-b51a-b2faf9dfde64 Japanese sword quality despite poor iron sources
|
|
48
|
+
040343e8-db5a-4031-a21a-be237a6c661a Balisong pin and screw maintenance
|
|
49
|
+
08217feb-c912-40c2-9d14-f89784d61ab5 Fixing dry falafel centers
|
|
50
|
+
d41bf95e-8da1-47b3-af89-aecacd7770a6 Sleep Token's artistic merits
|
|
51
|
+
b10e02dd-479b-4714-9d99-264d6ef4ba75 Annual human steel consumption
|
|
52
|
+
608f7e3d-fbcc-4891-bf67-ebc9f15c7a01 Calling payphones from mobile devices
|
|
53
|
+
c56ad0c5-feae-4dae-bafc-784d1f46de29 Postfix security assessment
|
|
54
|
+
4553fae5-61a6-40fc-b95c-36c53e706614 App store review power dynamics for major companies
|
|
55
|
+
0b7f4ae8-ef57-4cac-8d47-7e4c5b0e5564 Exercise timing and sleep quality
|
|
56
|
+
6e5385c4-6ad4-4d46-ac20-dda2105a3bea Scented neural networks with odor emissions
|
|
57
|
+
$ # ...and so on... time passes... then later:
|
|
58
|
+
$ CLAUDE_SESSION_KEY="$(wl-paste)" uvx claude-backup
|
|
59
|
+
Skipping organization twilligon (c8eaca6b-eddb-4bbc-9fe3-637a0574565f) without "chat" capability
|
|
60
|
+
Fetching chats for organization claude@twilligon.com's Organization (9e9a56fc-6d1c-4d62-a96d-0cff3a473cf0)
|
|
61
|
+
4ddbcc12-5cd8-4611-813a-befdedeb4b16 Smithsonian funding and government ownership
|
|
62
|
+
$ tree ~/.local/share/claude-backup | head -n15
|
|
63
|
+
~/.local/share/claude-backup
|
|
64
|
+
├── account.json
|
|
65
|
+
├── organizations
|
|
66
|
+
│ └── claude@twilligon.com's_Organization-9e9a56fc-6d1c-4d62-a96d-0cff3a473cf0
|
|
67
|
+
│ ├── chat_conversations
|
|
68
|
+
│ │ ├── Free_Indirect_Discourse_Analysis-48d70be1-f23d-4757-9bcf-9d9d9711a3f6.json
|
|
69
|
+
│ │ ├── Fetty_Wap_name_origin-dc45880a-8c60-4366-b610-e4fe6bb9a65c.json
|
|
70
|
+
│ │ ├── Ryzen_Motherboard_PS_2_Port_Hunt-012583f0-83cc-4c18-8f8b-312c8cb856ac.json
|
|
71
|
+
│ │ ├── Tailscale_versus_wireguard_comparison-24ed7155-8d9e-410f-bf7c-cd3fb6cb4379.json
|
|
72
|
+
│ │ ├── Credit_card_companies_in_Europe-67ad5f75-80ba-4754-bc6c-7d21e98f948c.json
|
|
73
|
+
│ │ ├── OpenGL_and_Vulkan_Package_Compatibility-63859641-5ffe-459b-99b0-4759acdc8235.json
|
|
74
|
+
│ │ ├── Government_tech_capabilities_and_bureaucracy-1c0743f0-a124-456b-888f-d08c5b83923b.json
|
|
75
|
+
│ │ ├── Reverse_Engineering_Minified_JavaScript-8cf7cab1-9576-45d7-929a-ea34997b0061.json
|
|
76
|
+
│ │ ├── Wire_and_String_Mysteries-9ac747b7-f36e-4e86-9f24-377ab16fb5ec.json
|
|
77
|
+
│ │ ├── US_Senate_parliamentarian's_role_and_power-5fa34eb3-ac74-4683-8850-c90761e5f3d8.json
|
|
78
|
+
$ rg -0l falafel ~/.local/share/claude-backup/organizations/*/chat_conversations | xargs -0n1 basename
|
|
79
|
+
Fixing_dry_falafel_centers-08217feb-c912-40c2-9d14-f89784d61ab5.json
|
|
80
|
+
Best_falafel_restaurants_in_San_Francisco-76aa159a-e41a-4935-a2e5-d53465041e13.json
|
|
81
|
+
Rainy_day_food_delivery_dilemma-db4992e8-c69c-4085-a571-9a22dfff68da.json
|
|
82
|
+
Pita_Chip_Conversation_Search-059dca9c-bac3-4cd5-b405-34ddbbdd5fa8.json
|
|
83
|
+
$ rg -0l falafel ~/.local/share/claude-backup/organizations/*/chat_conversations | xargs -0 jq -r '"https://claude.ai/chat/\(.uuid)"'
|
|
84
|
+
https://claude.ai/chat/08217feb-c912-40c2-9d14-f89784d61ab5
|
|
85
|
+
https://claude.ai/chat/76aa159a-e41a-4935-a2e5-d53465041e13
|
|
86
|
+
https://claude.ai/chat/db4992e8-c69c-4085-a571-9a22dfff68da
|
|
87
|
+
https://claude.ai/chat/059dca9c-bac3-4cd5-b405-34ddbbdd5fa8
|
|
88
|
+
|
|
89
|
+
## Limitations
|
|
90
|
+
|
|
91
|
+
This read-only tool is the product of reverse-engineering Claude.ai's internal API (for Good, not Evil---please don't ban me Anthropic 🙏) so I can't make any guarantees `claude-backup` will continue to work. That said, we make very few assumptions about the API schema, and everything works as of 2025-10-31. I'll likely update this best-effort when things break. Barring that, PRs welcome ;)
|
|
92
|
+
|
|
93
|
+
For reliable and comprehensive backups even through minor API changes, we save raw JSON responses from the API instead of normalizing them to some fixed schema. This should be a bit more resilient than forcing everything into some internal data model that could diverge from that of Claude.ai, but it means we don't download resources referenced by API objects other than what's necessary to list and fetch chat text. In practice, this preserves all textual message content and text/markdown attachments, but not images, PDFs, container uploads, "advanced research" reports, etc.
|
|
94
|
+
|
|
95
|
+
As a backup tool, we **retain deleted chats** (and old branches of extant chats) by default. To delete local copies, manually delete the corresponding `.json` from `backup_dir` or start from scratch by deleting `backup_dir` or running `claude-backup --ignore-cache`.
|
|
96
|
+
|
|
97
|
+
By default, `claude-backup` attempts to authenticate to `claude.ai` by extracting a session cookie from your browser. If this doesn't work (and frankly if this does work you should be sandboxing things better!) you must manually do the same. For Chrome et al., to https://claude.ai in your browser, open **Developer tools** with F12 or Ctrl+Shift+I, navigate to the **Application** tab (it may be hidden under **⋮** > **More tools** > **Application**), and copy the value of the `sessionKey` cookie. (Firefox should be [similar](https://firefox-source-docs.mozilla.org/devtools-user/storage_inspector/index.html).) Then set the `CLAUDE_SESSION_KEY` environment variable to this cookie when running `claude-backup`:
|
|
98
|
+
|
|
99
|
+
$ CLAUDE_SESSION_KEY="sk-ant-sid01-..." claude-backup
|
|
100
|
+
|
|
101
|
+
As this tool demonstrates **anyone with this cookie is authenticated as you on `claude.ai`** so be careful and never give this to anyone or anything you do not trust! It might even be worth keeping out of `history` by loading it straight from your clipboard with e.g. `CLAUDE_SESSION_KEY="$(wl-paste)"`, though the exact command varies by platform. I'm sure Claude knows which you should use ;)
|
|
102
|
+
|
|
103
|
+
As of recently(?) `claude.ai` is behind Cloudflare, and this no longer works great without VPNs. I swore to myself I was off the scrape-things-that-don't-want-to-be-scraped grind, so I recommend running `claude-backup` from a non-sus IP, bumping `--success-delay`, and lowering `--connections`. Any further workarounds are left as an exercise to the reader...
|
|
104
|
+
|
|
105
|
+
## Install
|
|
106
|
+
|
|
107
|
+
The blazing fast and memory safe way:
|
|
108
|
+
|
|
109
|
+
$ uvx claude-backup # installed on demand
|
|
110
|
+
|
|
111
|
+
The traditional way:
|
|
112
|
+
|
|
113
|
+
$ pip install claude-backup
|
|
114
|
+
$ claude-backup
|
|
115
|
+
|
|
116
|
+
The bleeding-edge way:
|
|
117
|
+
|
|
118
|
+
$ git clone https://github.com/twilligon/claude-backup
|
|
119
|
+
$ cd claude-backup
|
|
120
|
+
$ python3 -m venv venv; . venv/bin/activate # you probably want a venv
|
|
121
|
+
$ pip install -e .
|
|
122
|
+
$ claude-backup
|
|
123
|
+
|
|
124
|
+
## Usage
|
|
125
|
+
|
|
126
|
+
$ claude-backup --help
|
|
127
|
+
usage: claude-backup [-h] [-v] [-c CONNECTIONS] [-d DELAY] [-r RETRIES]
|
|
128
|
+
[--min-retry-delay DELAY] [--max-retry-delay DELAY]
|
|
129
|
+
[--ignore-cache]
|
|
130
|
+
[backup_dir]
|
|
131
|
+
|
|
132
|
+
Backup Claude.ai chats
|
|
133
|
+
|
|
134
|
+
positional arguments:
|
|
135
|
+
backup_dir Directory to save backups (default:
|
|
136
|
+
~/.local/share/claude-backup)
|
|
137
|
+
|
|
138
|
+
options:
|
|
139
|
+
-h, --help show this help message and exit
|
|
140
|
+
-v, --version show program's version number and exit
|
|
141
|
+
-c, --connections CONNECTIONS
|
|
142
|
+
Maximum concurrent connections (default: 6)
|
|
143
|
+
-d, --success-delay DELAY
|
|
144
|
+
Delay after successful request in seconds (default:
|
|
145
|
+
0.25)
|
|
146
|
+
-r, --retries RETRIES
|
|
147
|
+
Number of retries for API requests (default: 10)
|
|
148
|
+
--min-retry-delay DELAY
|
|
149
|
+
Minimum retry delay in seconds (default: 1.0)
|
|
150
|
+
--max-retry-delay DELAY
|
|
151
|
+
Maximum retry delay in seconds (default: 60.0)
|
|
152
|
+
--ignore-cache Ignore local cache and re-fetch everything from API
|
|
153
|
+
(default: False)
|
|
154
|
+
|
|
155
|
+
## License
|
|
156
|
+
|
|
157
|
+
`claude-backup` is dedicated to the public domain where possible via CC0-1.0.
|
|
158
|
+
|
|
159
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
160
|
+
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
claude_backup/__init__.py,sha256=MxtkapWnGhHrZ-tVu1JcCwoxrSFM27WS9e4wuutfKag,32040
|
|
2
|
+
claude_backup/__main__.py,sha256=Fe20UD3aUiVzAXRjBrN-KNCrD7ZoX2BfTZbLymLPCMQ,70
|
|
3
|
+
claude_backup-0.1.9.dist-info/entry_points.txt,sha256=j0WwAOxft9FrMp3E_A99dU5mxfPLS036JdRekd9_xm0,52
|
|
4
|
+
claude_backup-0.1.9.dist-info/licenses/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
|
|
5
|
+
claude_backup-0.1.9.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
6
|
+
claude_backup-0.1.9.dist-info/METADATA,sha256=uXlv_HWCLkUEdWzINcs4h_rzoMk4Z29Aca8OV66th0o,11049
|
|
7
|
+
claude_backup-0.1.9.dist-info/RECORD,,
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
Creative Commons Legal Code
|
|
2
|
+
|
|
3
|
+
CC0 1.0 Universal
|
|
4
|
+
|
|
5
|
+
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
|
6
|
+
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
|
7
|
+
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
|
8
|
+
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
|
9
|
+
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
|
10
|
+
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
|
11
|
+
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
|
12
|
+
HEREUNDER.
|
|
13
|
+
|
|
14
|
+
Statement of Purpose
|
|
15
|
+
|
|
16
|
+
The laws of most jurisdictions throughout the world automatically confer
|
|
17
|
+
exclusive Copyright and Related Rights (defined below) upon the creator
|
|
18
|
+
and subsequent owner(s) (each and all, an "owner") of an original work of
|
|
19
|
+
authorship and/or a database (each, a "Work").
|
|
20
|
+
|
|
21
|
+
Certain owners wish to permanently relinquish those rights to a Work for
|
|
22
|
+
the purpose of contributing to a commons of creative, cultural and
|
|
23
|
+
scientific works ("Commons") that the public can reliably and without fear
|
|
24
|
+
of later claims of infringement build upon, modify, incorporate in other
|
|
25
|
+
works, reuse and redistribute as freely as possible in any form whatsoever
|
|
26
|
+
and for any purposes, including without limitation commercial purposes.
|
|
27
|
+
These owners may contribute to the Commons to promote the ideal of a free
|
|
28
|
+
culture and the further production of creative, cultural and scientific
|
|
29
|
+
works, or to gain reputation or greater distribution for their Work in
|
|
30
|
+
part through the use and efforts of others.
|
|
31
|
+
|
|
32
|
+
For these and/or other purposes and motivations, and without any
|
|
33
|
+
expectation of additional consideration or compensation, the person
|
|
34
|
+
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
|
|
35
|
+
is an owner of Copyright and Related Rights in the Work, voluntarily
|
|
36
|
+
elects to apply CC0 to the Work and publicly distribute the Work under its
|
|
37
|
+
terms, with knowledge of his or her Copyright and Related Rights in the
|
|
38
|
+
Work and the meaning and intended legal effect of CC0 on those rights.
|
|
39
|
+
|
|
40
|
+
1. Copyright and Related Rights. A Work made available under CC0 may be
|
|
41
|
+
protected by copyright and related or neighboring rights ("Copyright and
|
|
42
|
+
Related Rights"). Copyright and Related Rights include, but are not
|
|
43
|
+
limited to, the following:
|
|
44
|
+
|
|
45
|
+
i. the right to reproduce, adapt, distribute, perform, display,
|
|
46
|
+
communicate, and translate a Work;
|
|
47
|
+
ii. moral rights retained by the original author(s) and/or performer(s);
|
|
48
|
+
iii. publicity and privacy rights pertaining to a person's image or
|
|
49
|
+
likeness depicted in a Work;
|
|
50
|
+
iv. rights protecting against unfair competition in regards to a Work,
|
|
51
|
+
subject to the limitations in paragraph 4(a), below;
|
|
52
|
+
v. rights protecting the extraction, dissemination, use and reuse of data
|
|
53
|
+
in a Work;
|
|
54
|
+
vi. database rights (such as those arising under Directive 96/9/EC of the
|
|
55
|
+
European Parliament and of the Council of 11 March 1996 on the legal
|
|
56
|
+
protection of databases, and under any national implementation
|
|
57
|
+
thereof, including any amended or successor version of such
|
|
58
|
+
directive); and
|
|
59
|
+
vii. other similar, equivalent or corresponding rights throughout the
|
|
60
|
+
world based on applicable law or treaty, and any national
|
|
61
|
+
implementations thereof.
|
|
62
|
+
|
|
63
|
+
2. Waiver. To the greatest extent permitted by, but not in contravention
|
|
64
|
+
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
|
65
|
+
irrevocably and unconditionally waives, abandons, and surrenders all of
|
|
66
|
+
Affirmer's Copyright and Related Rights and associated claims and causes
|
|
67
|
+
of action, whether now known or unknown (including existing as well as
|
|
68
|
+
future claims and causes of action), in the Work (i) in all territories
|
|
69
|
+
worldwide, (ii) for the maximum duration provided by applicable law or
|
|
70
|
+
treaty (including future time extensions), (iii) in any current or future
|
|
71
|
+
medium and for any number of copies, and (iv) for any purpose whatsoever,
|
|
72
|
+
including without limitation commercial, advertising or promotional
|
|
73
|
+
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
|
|
74
|
+
member of the public at large and to the detriment of Affirmer's heirs and
|
|
75
|
+
successors, fully intending that such Waiver shall not be subject to
|
|
76
|
+
revocation, rescission, cancellation, termination, or any other legal or
|
|
77
|
+
equitable action to disrupt the quiet enjoyment of the Work by the public
|
|
78
|
+
as contemplated by Affirmer's express Statement of Purpose.
|
|
79
|
+
|
|
80
|
+
3. Public License Fallback. Should any part of the Waiver for any reason
|
|
81
|
+
be judged legally invalid or ineffective under applicable law, then the
|
|
82
|
+
Waiver shall be preserved to the maximum extent permitted taking into
|
|
83
|
+
account Affirmer's express Statement of Purpose. In addition, to the
|
|
84
|
+
extent the Waiver is so judged Affirmer hereby grants to each affected
|
|
85
|
+
person a royalty-free, non transferable, non sublicensable, non exclusive,
|
|
86
|
+
irrevocable and unconditional license to exercise Affirmer's Copyright and
|
|
87
|
+
Related Rights in the Work (i) in all territories worldwide, (ii) for the
|
|
88
|
+
maximum duration provided by applicable law or treaty (including future
|
|
89
|
+
time extensions), (iii) in any current or future medium and for any number
|
|
90
|
+
of copies, and (iv) for any purpose whatsoever, including without
|
|
91
|
+
limitation commercial, advertising or promotional purposes (the
|
|
92
|
+
"License"). The License shall be deemed effective as of the date CC0 was
|
|
93
|
+
applied by Affirmer to the Work. Should any part of the License for any
|
|
94
|
+
reason be judged legally invalid or ineffective under applicable law, such
|
|
95
|
+
partial invalidity or ineffectiveness shall not invalidate the remainder
|
|
96
|
+
of the License, and in such case Affirmer hereby affirms that he or she
|
|
97
|
+
will not (i) exercise any of his or her remaining Copyright and Related
|
|
98
|
+
Rights in the Work or (ii) assert any associated claims and causes of
|
|
99
|
+
action with respect to the Work, in either case contrary to Affirmer's
|
|
100
|
+
express Statement of Purpose.
|
|
101
|
+
|
|
102
|
+
4. Limitations and Disclaimers.
|
|
103
|
+
|
|
104
|
+
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
|
105
|
+
surrendered, licensed or otherwise affected by this document.
|
|
106
|
+
b. Affirmer offers the Work as-is and makes no representations or
|
|
107
|
+
warranties of any kind concerning the Work, express, implied,
|
|
108
|
+
statutory or otherwise, including without limitation warranties of
|
|
109
|
+
title, merchantability, fitness for a particular purpose, non
|
|
110
|
+
infringement, or the absence of latent or other defects, accuracy, or
|
|
111
|
+
the present or absence of errors, whether or not discoverable, all to
|
|
112
|
+
the greatest extent permissible under applicable law.
|
|
113
|
+
c. Affirmer disclaims responsibility for clearing rights of other persons
|
|
114
|
+
that may apply to the Work or any use thereof, including without
|
|
115
|
+
limitation any person's Copyright and Related Rights in the Work.
|
|
116
|
+
Further, Affirmer disclaims responsibility for obtaining any necessary
|
|
117
|
+
consents, permissions or other rights required for any use of the
|
|
118
|
+
Work.
|
|
119
|
+
d. Affirmer understands and acknowledges that Creative Commons is not a
|
|
120
|
+
party to this document and has no duty or obligation with respect to
|
|
121
|
+
this CC0 or use of the Work.
|