cognee 0.3.4.dev1__py3-none-any.whl → 0.3.4.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/api/v1/cloud/routers/get_checks_router.py +1 -1
- cognee/api/v1/prune/prune.py +2 -2
- cognee/api/v1/sync/sync.py +16 -5
- cognee/base_config.py +15 -0
- cognee/infrastructure/databases/graph/kuzu/remote_kuzu_adapter.py +4 -1
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +4 -1
- cognee/infrastructure/files/storage/LocalFileStorage.py +50 -0
- cognee/infrastructure/files/storage/S3FileStorage.py +56 -9
- cognee/infrastructure/files/storage/StorageManager.py +18 -0
- cognee/modules/cloud/operations/check_api_key.py +4 -1
- cognee/modules/data/deletion/prune_system.py +5 -1
- cognee/modules/notebooks/methods/create_notebook.py +32 -0
- cognee/modules/notebooks/models/Notebook.py +206 -1
- cognee/modules/retrieval/temporal_retriever.py +2 -2
- cognee/modules/users/methods/create_user.py +5 -23
- cognee/root_dir.py +5 -0
- cognee/shared/cache.py +346 -0
- cognee/shared/utils.py +12 -0
- cognee/tasks/ingestion/save_data_item_to_storage.py +1 -0
- cognee/tests/unit/modules/users/test_tutorial_notebook_creation.py +399 -0
- {cognee-0.3.4.dev1.dist-info → cognee-0.3.4.dev3.dist-info}/METADATA +2 -1
- {cognee-0.3.4.dev1.dist-info → cognee-0.3.4.dev3.dist-info}/RECORD +26 -24
- {cognee-0.3.4.dev1.dist-info → cognee-0.3.4.dev3.dist-info}/WHEEL +0 -0
- {cognee-0.3.4.dev1.dist-info → cognee-0.3.4.dev3.dist-info}/entry_points.txt +0 -0
- {cognee-0.3.4.dev1.dist-info → cognee-0.3.4.dev3.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.3.4.dev1.dist-info → cognee-0.3.4.dev3.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -1,9 +1,10 @@
|
|
|
1
|
-
from uuid import uuid4
|
|
1
|
+
from uuid import UUID, uuid4
|
|
2
2
|
from fastapi_users.exceptions import UserAlreadyExists
|
|
3
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
3
4
|
|
|
4
5
|
from cognee.infrastructure.databases.relational import get_relational_engine
|
|
5
|
-
from cognee.modules.notebooks.
|
|
6
|
-
from cognee.modules.notebooks.
|
|
6
|
+
from cognee.modules.notebooks.models.Notebook import Notebook
|
|
7
|
+
from cognee.modules.notebooks.methods.create_notebook import _create_tutorial_notebook
|
|
7
8
|
from cognee.modules.users.exceptions import TenantNotFoundError
|
|
8
9
|
from cognee.modules.users.get_user_manager import get_user_manager_context
|
|
9
10
|
from cognee.modules.users.get_user_db import get_user_db_context
|
|
@@ -60,26 +61,7 @@ async def create_user(
|
|
|
60
61
|
if auto_login:
|
|
61
62
|
await session.refresh(user)
|
|
62
63
|
|
|
63
|
-
await
|
|
64
|
-
user_id=user.id,
|
|
65
|
-
notebook_name="Welcome to cognee 🧠",
|
|
66
|
-
cells=[
|
|
67
|
-
NotebookCell(
|
|
68
|
-
id=uuid4(),
|
|
69
|
-
name="Welcome",
|
|
70
|
-
content="Cognee is your toolkit for turning text into a structured knowledge graph, optionally enhanced by ontologies, and then querying it with advanced retrieval techniques. This notebook will guide you through a simple example.",
|
|
71
|
-
type="markdown",
|
|
72
|
-
),
|
|
73
|
-
NotebookCell(
|
|
74
|
-
id=uuid4(),
|
|
75
|
-
name="Example",
|
|
76
|
-
content="",
|
|
77
|
-
type="code",
|
|
78
|
-
),
|
|
79
|
-
],
|
|
80
|
-
deletable=False,
|
|
81
|
-
session=session,
|
|
82
|
-
)
|
|
64
|
+
await _create_tutorial_notebook(user.id, session)
|
|
83
65
|
|
|
84
66
|
return user
|
|
85
67
|
except UserAlreadyExists as error:
|
cognee/root_dir.py
CHANGED
|
@@ -20,6 +20,11 @@ def ensure_absolute_path(path: str) -> str:
|
|
|
20
20
|
"""
|
|
21
21
|
if path is None:
|
|
22
22
|
raise ValueError("Path cannot be None")
|
|
23
|
+
|
|
24
|
+
# Check if it's an S3 URL - S3 URLs are absolute by definition
|
|
25
|
+
if path.startswith("s3://"):
|
|
26
|
+
return path
|
|
27
|
+
|
|
23
28
|
path_obj = Path(path).expanduser()
|
|
24
29
|
if path_obj.is_absolute():
|
|
25
30
|
return str(path_obj.resolve())
|
cognee/shared/cache.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Storage-aware cache management utilities for Cognee.
|
|
3
|
+
|
|
4
|
+
This module provides cache functionality that works with both local and cloud storage
|
|
5
|
+
backends (like S3) through the StorageManager abstraction.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import hashlib
|
|
9
|
+
import zipfile
|
|
10
|
+
import asyncio
|
|
11
|
+
from typing import Optional, Tuple
|
|
12
|
+
import aiohttp
|
|
13
|
+
import logging
|
|
14
|
+
from io import BytesIO
|
|
15
|
+
|
|
16
|
+
from cognee.base_config import get_base_config
|
|
17
|
+
from cognee.infrastructure.files.storage.get_file_storage import get_file_storage
|
|
18
|
+
from cognee.infrastructure.files.storage.StorageManager import StorageManager
|
|
19
|
+
from cognee.shared.utils import create_secure_ssl_context
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class StorageAwareCache:
|
|
25
|
+
"""
|
|
26
|
+
A cache manager that works with different storage backends (local, S3, etc.)
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, cache_subdir: str = "cache"):
|
|
30
|
+
"""
|
|
31
|
+
Initialize the cache manager.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
cache_subdir: Subdirectory name within the system root for caching
|
|
35
|
+
"""
|
|
36
|
+
self.base_config = get_base_config()
|
|
37
|
+
# Since we're using cache_root_directory, don't add extra cache prefix
|
|
38
|
+
self.cache_base_path = ""
|
|
39
|
+
self.storage_manager: StorageManager = get_file_storage(
|
|
40
|
+
self.base_config.cache_root_directory
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Print absolute path
|
|
44
|
+
storage_path = self.storage_manager.storage.storage_path
|
|
45
|
+
if storage_path.startswith("s3://"):
|
|
46
|
+
absolute_path = storage_path # S3 paths are already absolute
|
|
47
|
+
else:
|
|
48
|
+
import os
|
|
49
|
+
|
|
50
|
+
absolute_path = os.path.abspath(storage_path)
|
|
51
|
+
logger.info(f"Storage manager absolute path: {absolute_path}")
|
|
52
|
+
|
|
53
|
+
async def get_cache_dir(self) -> str:
|
|
54
|
+
"""Get the base cache directory path."""
|
|
55
|
+
cache_path = self.cache_base_path or "." # Use "." for root when cache_base_path is empty
|
|
56
|
+
await self.storage_manager.ensure_directory_exists(cache_path)
|
|
57
|
+
return cache_path
|
|
58
|
+
|
|
59
|
+
async def get_cache_subdir(self, name: str) -> str:
|
|
60
|
+
"""Get a specific cache subdirectory."""
|
|
61
|
+
if self.cache_base_path:
|
|
62
|
+
cache_path = f"{self.cache_base_path}/{name}"
|
|
63
|
+
else:
|
|
64
|
+
cache_path = name
|
|
65
|
+
await self.storage_manager.ensure_directory_exists(cache_path)
|
|
66
|
+
|
|
67
|
+
# Return the absolute path based on storage system
|
|
68
|
+
if self.storage_manager.storage.storage_path.startswith("s3://"):
|
|
69
|
+
return cache_path
|
|
70
|
+
elif hasattr(self.storage_manager.storage, "storage_path"):
|
|
71
|
+
return f"{self.storage_manager.storage.storage_path}/{cache_path}"
|
|
72
|
+
else:
|
|
73
|
+
# Fallback for other storage types
|
|
74
|
+
return cache_path
|
|
75
|
+
|
|
76
|
+
async def delete_cache(self):
|
|
77
|
+
"""Delete the entire cache directory."""
|
|
78
|
+
logger.info("Deleting cache...")
|
|
79
|
+
try:
|
|
80
|
+
await self.storage_manager.remove_all(self.cache_base_path)
|
|
81
|
+
logger.info("✓ Cache deleted successfully!")
|
|
82
|
+
except Exception as e:
|
|
83
|
+
logger.error(f"Error deleting cache: {e}")
|
|
84
|
+
raise
|
|
85
|
+
|
|
86
|
+
async def _is_cache_valid(self, cache_dir: str, version_or_hash: str) -> bool:
|
|
87
|
+
"""Check if cached content is valid for the given version/hash."""
|
|
88
|
+
version_file = f"{cache_dir}/version.txt"
|
|
89
|
+
|
|
90
|
+
if not await self.storage_manager.file_exists(version_file):
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
try:
|
|
94
|
+
async with self.storage_manager.open(version_file, "r") as f:
|
|
95
|
+
cached_version = (await asyncio.to_thread(f.read)).strip()
|
|
96
|
+
return cached_version == version_or_hash
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.debug(f"Error checking cache validity: {e}")
|
|
99
|
+
return False
|
|
100
|
+
|
|
101
|
+
async def _clear_cache(self, cache_dir: str) -> None:
|
|
102
|
+
"""Clear a cache directory."""
|
|
103
|
+
try:
|
|
104
|
+
await self.storage_manager.remove_all(cache_dir)
|
|
105
|
+
except Exception as e:
|
|
106
|
+
logger.debug(f"Error clearing cache directory {cache_dir}: {e}")
|
|
107
|
+
|
|
108
|
+
async def _check_remote_content_freshness(
|
|
109
|
+
self, url: str, cache_dir: str
|
|
110
|
+
) -> Tuple[bool, Optional[str]]:
|
|
111
|
+
"""
|
|
112
|
+
Check if remote content is fresher than cached version using HTTP headers.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
Tuple of (is_fresh: bool, new_identifier: Optional[str])
|
|
116
|
+
"""
|
|
117
|
+
try:
|
|
118
|
+
# Make a HEAD request to check headers without downloading
|
|
119
|
+
ssl_context = create_secure_ssl_context()
|
|
120
|
+
connector = aiohttp.TCPConnector(ssl=ssl_context)
|
|
121
|
+
async with aiohttp.ClientSession(connector=connector) as session:
|
|
122
|
+
async with session.head(url, timeout=aiohttp.ClientTimeout(total=30)) as response:
|
|
123
|
+
response.raise_for_status()
|
|
124
|
+
|
|
125
|
+
# Try ETag first (most reliable)
|
|
126
|
+
etag = response.headers.get("ETag", "").strip('"')
|
|
127
|
+
last_modified = response.headers.get("Last-Modified", "")
|
|
128
|
+
|
|
129
|
+
# Use ETag if available, otherwise Last-Modified
|
|
130
|
+
remote_identifier = etag if etag else last_modified
|
|
131
|
+
|
|
132
|
+
if not remote_identifier:
|
|
133
|
+
logger.debug("No freshness headers available, cannot check for updates")
|
|
134
|
+
return True, None # Assume fresh if no headers
|
|
135
|
+
|
|
136
|
+
# Check cached identifier
|
|
137
|
+
identifier_file = f"{cache_dir}/content_id.txt"
|
|
138
|
+
if await self.storage_manager.file_exists(identifier_file):
|
|
139
|
+
async with self.storage_manager.open(identifier_file, "r") as f:
|
|
140
|
+
cached_identifier = (await asyncio.to_thread(f.read)).strip()
|
|
141
|
+
if cached_identifier == remote_identifier:
|
|
142
|
+
logger.debug(f"Content is fresh (identifier: {remote_identifier[:20]}...)")
|
|
143
|
+
return True, None
|
|
144
|
+
else:
|
|
145
|
+
logger.info(
|
|
146
|
+
f"Content has changed (old: {cached_identifier[:20]}..., new: {remote_identifier[:20]}...)"
|
|
147
|
+
)
|
|
148
|
+
return False, remote_identifier
|
|
149
|
+
else:
|
|
150
|
+
# No cached identifier, treat as stale
|
|
151
|
+
return False, remote_identifier
|
|
152
|
+
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.debug(f"Could not check remote freshness: {e}")
|
|
155
|
+
return True, None # Assume fresh if we can't check
|
|
156
|
+
|
|
157
|
+
async def download_and_extract_zip(
|
|
158
|
+
self, url: str, cache_subdir_name: str, version_or_hash: str, force: bool = False
|
|
159
|
+
) -> str:
|
|
160
|
+
"""
|
|
161
|
+
Download a zip file and extract it to cache directory with content freshness checking.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
url: URL to download zip file from
|
|
165
|
+
cache_subdir_name: Name of the cache subdirectory
|
|
166
|
+
version_or_hash: Version string or content hash for cache validation
|
|
167
|
+
force: If True, re-download even if already cached
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
Path to the cached directory
|
|
171
|
+
"""
|
|
172
|
+
cache_dir = await self.get_cache_subdir(cache_subdir_name)
|
|
173
|
+
|
|
174
|
+
# Check if already cached and valid
|
|
175
|
+
if not force and await self._is_cache_valid(cache_dir, version_or_hash):
|
|
176
|
+
# Also check if remote content has changed
|
|
177
|
+
is_fresh, new_identifier = await self._check_remote_content_freshness(url, cache_dir)
|
|
178
|
+
if is_fresh:
|
|
179
|
+
logger.debug(f"Content already cached and fresh for version {version_or_hash}")
|
|
180
|
+
return cache_dir
|
|
181
|
+
else:
|
|
182
|
+
logger.info("Cached content is stale, updating...")
|
|
183
|
+
|
|
184
|
+
# Clear old cache if it exists
|
|
185
|
+
await self._clear_cache(cache_dir)
|
|
186
|
+
|
|
187
|
+
logger.info(f"Downloading content from {url}...")
|
|
188
|
+
|
|
189
|
+
# Download the zip file
|
|
190
|
+
zip_content = BytesIO()
|
|
191
|
+
etag = ""
|
|
192
|
+
last_modified = ""
|
|
193
|
+
ssl_context = create_secure_ssl_context()
|
|
194
|
+
connector = aiohttp.TCPConnector(ssl=ssl_context)
|
|
195
|
+
async with aiohttp.ClientSession(connector=connector) as session:
|
|
196
|
+
async with session.get(url, timeout=aiohttp.ClientTimeout(total=60)) as response:
|
|
197
|
+
response.raise_for_status()
|
|
198
|
+
|
|
199
|
+
# Extract headers before consuming response
|
|
200
|
+
etag = response.headers.get("ETag", "").strip('"')
|
|
201
|
+
last_modified = response.headers.get("Last-Modified", "")
|
|
202
|
+
|
|
203
|
+
# Read the response content
|
|
204
|
+
async for chunk in response.content.iter_chunked(8192):
|
|
205
|
+
zip_content.write(chunk)
|
|
206
|
+
zip_content.seek(0)
|
|
207
|
+
|
|
208
|
+
# Extract the archive
|
|
209
|
+
await self.storage_manager.ensure_directory_exists(cache_dir)
|
|
210
|
+
|
|
211
|
+
# Extract files and store them using StorageManager
|
|
212
|
+
with zipfile.ZipFile(zip_content, "r") as zip_file:
|
|
213
|
+
for file_info in zip_file.infolist():
|
|
214
|
+
if file_info.is_dir():
|
|
215
|
+
# Create directory
|
|
216
|
+
dir_path = f"{cache_dir}/{file_info.filename}"
|
|
217
|
+
await self.storage_manager.ensure_directory_exists(dir_path)
|
|
218
|
+
else:
|
|
219
|
+
# Extract and store file
|
|
220
|
+
file_data = zip_file.read(file_info.filename)
|
|
221
|
+
file_path = f"{cache_dir}/{file_info.filename}"
|
|
222
|
+
await self.storage_manager.store(file_path, BytesIO(file_data), overwrite=True)
|
|
223
|
+
|
|
224
|
+
# Write version info for future cache validation
|
|
225
|
+
version_file = f"{cache_dir}/version.txt"
|
|
226
|
+
await self.storage_manager.store(version_file, version_or_hash, overwrite=True)
|
|
227
|
+
|
|
228
|
+
# Store content identifier from response headers for freshness checking
|
|
229
|
+
content_identifier = etag if etag else last_modified
|
|
230
|
+
|
|
231
|
+
if content_identifier:
|
|
232
|
+
identifier_file = f"{cache_dir}/content_id.txt"
|
|
233
|
+
await self.storage_manager.store(identifier_file, content_identifier, overwrite=True)
|
|
234
|
+
logger.debug(f"Stored content identifier: {content_identifier[:20]}...")
|
|
235
|
+
|
|
236
|
+
logger.info("✓ Content downloaded and cached successfully!")
|
|
237
|
+
return cache_dir
|
|
238
|
+
|
|
239
|
+
async def file_exists(self, file_path: str) -> bool:
|
|
240
|
+
"""Check if a file exists in cache storage."""
|
|
241
|
+
return await self.storage_manager.file_exists(file_path)
|
|
242
|
+
|
|
243
|
+
async def read_file(self, file_path: str, encoding: str = "utf-8"):
|
|
244
|
+
"""Read a file from cache storage."""
|
|
245
|
+
return self.storage_manager.open(file_path, encoding=encoding)
|
|
246
|
+
|
|
247
|
+
async def list_files(self, directory_path: str):
|
|
248
|
+
"""List files in a cache directory."""
|
|
249
|
+
try:
|
|
250
|
+
file_list = await self.storage_manager.list_files(directory_path)
|
|
251
|
+
|
|
252
|
+
# For S3 storage, convert relative paths to full S3 URLs
|
|
253
|
+
if self.storage_manager.storage.storage_path.startswith("s3://"):
|
|
254
|
+
full_paths = []
|
|
255
|
+
for file_path in file_list:
|
|
256
|
+
full_s3_path = f"{self.storage_manager.storage.storage_path}/{file_path}"
|
|
257
|
+
full_paths.append(full_s3_path)
|
|
258
|
+
return full_paths
|
|
259
|
+
else:
|
|
260
|
+
# For local storage, return absolute paths
|
|
261
|
+
storage_path = self.storage_manager.storage.storage_path
|
|
262
|
+
if not storage_path.startswith("/"):
|
|
263
|
+
import os
|
|
264
|
+
|
|
265
|
+
storage_path = os.path.abspath(storage_path)
|
|
266
|
+
|
|
267
|
+
full_paths = []
|
|
268
|
+
for file_path in file_list:
|
|
269
|
+
if file_path.startswith("/"):
|
|
270
|
+
full_paths.append(file_path) # Already absolute
|
|
271
|
+
else:
|
|
272
|
+
full_paths.append(f"{storage_path}/{file_path}")
|
|
273
|
+
return full_paths
|
|
274
|
+
|
|
275
|
+
except Exception as e:
|
|
276
|
+
logger.debug(f"Error listing files in {directory_path}: {e}")
|
|
277
|
+
return []
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
# Convenience functions that maintain API compatibility
|
|
281
|
+
_cache_manager = None
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def get_cache_manager() -> StorageAwareCache:
|
|
285
|
+
"""Get a singleton cache manager instance."""
|
|
286
|
+
global _cache_manager
|
|
287
|
+
if _cache_manager is None:
|
|
288
|
+
_cache_manager = StorageAwareCache()
|
|
289
|
+
return _cache_manager
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def generate_content_hash(url: str, additional_data: str = "") -> str:
|
|
293
|
+
"""Generate a content hash from URL and optional additional data."""
|
|
294
|
+
content = f"{url}:{additional_data}"
|
|
295
|
+
return hashlib.md5(content.encode()).hexdigest()[:12] # Short hash for readability
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
# Async wrapper functions for backward compatibility
|
|
299
|
+
async def delete_cache():
|
|
300
|
+
"""Delete the Cognee cache directory."""
|
|
301
|
+
cache_manager = get_cache_manager()
|
|
302
|
+
await cache_manager.delete_cache()
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
async def get_cognee_cache_dir() -> str:
|
|
306
|
+
"""Get the base Cognee cache directory."""
|
|
307
|
+
cache_manager = get_cache_manager()
|
|
308
|
+
return await cache_manager.get_cache_dir()
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
async def get_cache_subdir(name: str) -> str:
|
|
312
|
+
"""Get a specific cache subdirectory."""
|
|
313
|
+
cache_manager = get_cache_manager()
|
|
314
|
+
return await cache_manager.get_cache_subdir(name)
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
async def download_and_extract_zip(
|
|
318
|
+
url: str, cache_dir_name: str, version_or_hash: str, force: bool = False
|
|
319
|
+
) -> str:
|
|
320
|
+
"""Download a zip file and extract it to cache directory."""
|
|
321
|
+
cache_manager = get_cache_manager()
|
|
322
|
+
return await cache_manager.download_and_extract_zip(url, cache_dir_name, version_or_hash, force)
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
async def get_tutorial_data_dir() -> str:
|
|
326
|
+
"""Get the tutorial data cache directory."""
|
|
327
|
+
return await get_cache_subdir("tutorial_data")
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
# Cache file operations
|
|
331
|
+
async def cache_file_exists(file_path: str) -> bool:
|
|
332
|
+
"""Check if a file exists in cache storage."""
|
|
333
|
+
cache_manager = get_cache_manager()
|
|
334
|
+
return await cache_manager.file_exists(file_path)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
async def read_cache_file(file_path: str, encoding: str = "utf-8"):
|
|
338
|
+
"""Read a file from cache storage."""
|
|
339
|
+
cache_manager = get_cache_manager()
|
|
340
|
+
return await cache_manager.read_file(file_path, encoding)
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
async def list_cache_files(directory_path: str):
|
|
344
|
+
"""List files in a cache directory."""
|
|
345
|
+
cache_manager = get_cache_manager()
|
|
346
|
+
return await cache_manager.list_files(directory_path)
|
cognee/shared/utils.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""This module contains utility functions for the cognee."""
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
+
import ssl
|
|
4
5
|
import requests
|
|
5
6
|
from datetime import datetime, timezone
|
|
6
7
|
import matplotlib.pyplot as plt
|
|
@@ -18,6 +19,17 @@ from cognee.infrastructure.databases.graph import get_graph_engine
|
|
|
18
19
|
proxy_url = "https://test.prometh.ai"
|
|
19
20
|
|
|
20
21
|
|
|
22
|
+
def create_secure_ssl_context() -> ssl.SSLContext:
|
|
23
|
+
"""
|
|
24
|
+
Create a secure SSL context.
|
|
25
|
+
|
|
26
|
+
By default, use the system's certificate store.
|
|
27
|
+
If users report SSL issues, I'm keeping this open in case we need to switch to:
|
|
28
|
+
ssl.create_default_context(cafile=certifi.where())
|
|
29
|
+
"""
|
|
30
|
+
return ssl.create_default_context()
|
|
31
|
+
|
|
32
|
+
|
|
21
33
|
def get_entities(tagged_tokens):
|
|
22
34
|
import nltk
|
|
23
35
|
|
|
@@ -41,6 +41,7 @@ async def save_data_item_to_storage(data_item: Union[BinaryIO, str, Any]) -> str
|
|
|
41
41
|
abs_path.is_file()
|
|
42
42
|
except (OSError, ValueError):
|
|
43
43
|
# In case file path is too long it's most likely not a relative path
|
|
44
|
+
abs_path = data_item
|
|
44
45
|
logger.debug(f"Data item was too long to be a possible file path: {abs_path}")
|
|
45
46
|
abs_path = Path("")
|
|
46
47
|
|