cognee 0.3.4.dev3__py3-none-any.whl → 0.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/api/client.py +16 -7
- cognee/api/health.py +5 -9
- cognee/api/v1/add/add.py +3 -1
- cognee/api/v1/cognify/cognify.py +44 -7
- cognee/api/v1/permissions/routers/get_permissions_router.py +8 -4
- cognee/api/v1/search/search.py +3 -0
- cognee/api/v1/ui/__init__.py +1 -1
- cognee/api/v1/ui/ui.py +215 -150
- cognee/api/v1/update/__init__.py +1 -0
- cognee/api/v1/update/routers/__init__.py +1 -0
- cognee/api/v1/update/routers/get_update_router.py +90 -0
- cognee/api/v1/update/update.py +100 -0
- cognee/base_config.py +5 -2
- cognee/cli/_cognee.py +28 -10
- cognee/cli/commands/delete_command.py +34 -2
- cognee/eval_framework/corpus_builder/task_getters/get_default_tasks_by_indices.py +2 -2
- cognee/eval_framework/evaluation/direct_llm_eval_adapter.py +3 -2
- cognee/eval_framework/modal_eval_dashboard.py +9 -1
- cognee/infrastructure/databases/graph/config.py +9 -9
- cognee/infrastructure/databases/graph/get_graph_engine.py +4 -21
- cognee/infrastructure/databases/graph/kuzu/adapter.py +60 -9
- cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py +3 -3
- cognee/infrastructure/databases/relational/config.py +4 -4
- cognee/infrastructure/databases/relational/create_relational_engine.py +11 -3
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +7 -3
- cognee/infrastructure/databases/vector/config.py +7 -7
- cognee/infrastructure/databases/vector/create_vector_engine.py +7 -15
- cognee/infrastructure/databases/vector/embeddings/EmbeddingEngine.py +9 -0
- cognee/infrastructure/databases/vector/embeddings/FastembedEmbeddingEngine.py +11 -0
- cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +19 -2
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +11 -0
- cognee/infrastructure/databases/vector/embeddings/config.py +8 -0
- cognee/infrastructure/databases/vector/embeddings/get_embedding_engine.py +5 -0
- cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +11 -10
- cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +48 -38
- cognee/infrastructure/databases/vector/vector_db_interface.py +8 -4
- cognee/infrastructure/files/storage/S3FileStorage.py +15 -5
- cognee/infrastructure/files/storage/s3_config.py +1 -0
- cognee/infrastructure/files/utils/open_data_file.py +7 -14
- cognee/infrastructure/llm/LLMGateway.py +19 -117
- cognee/infrastructure/llm/config.py +28 -13
- cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/extract_categories.py +2 -1
- cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/extract_event_entities.py +3 -2
- cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/extract_summary.py +3 -2
- cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/knowledge_graph/extract_content_graph.py +2 -1
- cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/knowledge_graph/extract_event_graph.py +3 -2
- cognee/infrastructure/llm/prompts/read_query_prompt.py +3 -2
- cognee/infrastructure/llm/prompts/show_prompt.py +35 -0
- cognee/infrastructure/llm/prompts/test.txt +1 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/__init__.py +2 -2
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/async_client.py +50 -397
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/inlinedbaml.py +2 -3
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/parser.py +8 -88
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/runtime.py +78 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/stream_types.py +2 -99
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/sync_client.py +49 -401
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/type_builder.py +19 -882
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/type_map.py +2 -34
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/types.py +2 -107
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/acreate_structured_output.baml +26 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/__init__.py +1 -2
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/acreate_structured_output.py +76 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/create_dynamic_baml_type.py +122 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/generators.baml +3 -3
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py +0 -32
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/gemini/adapter.py +107 -98
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/generic_llm_api/adapter.py +5 -6
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py +5 -6
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/llm_interface.py +0 -26
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py +17 -67
- cognee/infrastructure/llm/tokenizer/Gemini/adapter.py +8 -7
- cognee/infrastructure/llm/utils.py +4 -4
- cognee/infrastructure/loaders/LoaderEngine.py +5 -2
- cognee/infrastructure/loaders/external/__init__.py +7 -0
- cognee/infrastructure/loaders/external/advanced_pdf_loader.py +244 -0
- cognee/infrastructure/loaders/supported_loaders.py +7 -0
- cognee/modules/data/methods/create_authorized_dataset.py +9 -0
- cognee/modules/data/methods/get_authorized_dataset.py +1 -1
- cognee/modules/data/methods/get_authorized_dataset_by_name.py +11 -0
- cognee/modules/data/methods/get_deletion_counts.py +92 -0
- cognee/modules/graph/cognee_graph/CogneeGraph.py +1 -1
- cognee/modules/graph/utils/expand_with_nodes_and_edges.py +22 -8
- cognee/modules/graph/utils/retrieve_existing_edges.py +0 -2
- cognee/modules/ingestion/data_types/TextData.py +0 -1
- cognee/modules/notebooks/methods/create_notebook.py +3 -1
- cognee/modules/notebooks/methods/get_notebooks.py +27 -1
- cognee/modules/observability/get_observe.py +14 -0
- cognee/modules/observability/observers.py +1 -0
- cognee/modules/ontology/base_ontology_resolver.py +42 -0
- cognee/modules/ontology/get_default_ontology_resolver.py +41 -0
- cognee/modules/ontology/matching_strategies.py +53 -0
- cognee/modules/ontology/models.py +20 -0
- cognee/modules/ontology/ontology_config.py +24 -0
- cognee/modules/ontology/ontology_env_config.py +45 -0
- cognee/modules/ontology/rdf_xml/{OntologyResolver.py → RDFLibOntologyResolver.py} +20 -28
- cognee/modules/pipelines/layers/resolve_authorized_user_dataset.py +21 -24
- cognee/modules/pipelines/layers/resolve_authorized_user_datasets.py +3 -3
- cognee/modules/retrieval/code_retriever.py +2 -1
- cognee/modules/retrieval/context_providers/TripletSearchContextProvider.py +1 -4
- cognee/modules/retrieval/graph_completion_cot_retriever.py +6 -5
- cognee/modules/retrieval/graph_completion_retriever.py +0 -3
- cognee/modules/retrieval/insights_retriever.py +1 -1
- cognee/modules/retrieval/jaccard_retrival.py +60 -0
- cognee/modules/retrieval/lexical_retriever.py +123 -0
- cognee/modules/retrieval/natural_language_retriever.py +2 -1
- cognee/modules/retrieval/temporal_retriever.py +3 -2
- cognee/modules/retrieval/utils/brute_force_triplet_search.py +2 -12
- cognee/modules/retrieval/utils/completion.py +4 -7
- cognee/modules/search/methods/get_search_type_tools.py +7 -0
- cognee/modules/search/methods/no_access_control_search.py +1 -1
- cognee/modules/search/methods/search.py +32 -13
- cognee/modules/search/types/SearchType.py +1 -0
- cognee/modules/users/methods/create_user.py +0 -2
- cognee/modules/users/permissions/methods/authorized_give_permission_on_datasets.py +12 -0
- cognee/modules/users/permissions/methods/check_permission_on_dataset.py +11 -0
- cognee/modules/users/permissions/methods/get_all_user_permission_datasets.py +10 -0
- cognee/modules/users/permissions/methods/get_document_ids_for_user.py +10 -0
- cognee/modules/users/permissions/methods/get_principal.py +9 -0
- cognee/modules/users/permissions/methods/get_principal_datasets.py +11 -0
- cognee/modules/users/permissions/methods/get_role.py +10 -0
- cognee/modules/users/permissions/methods/get_specific_user_permission_datasets.py +3 -3
- cognee/modules/users/permissions/methods/get_tenant.py +9 -0
- cognee/modules/users/permissions/methods/give_default_permission_to_role.py +9 -0
- cognee/modules/users/permissions/methods/give_default_permission_to_tenant.py +9 -0
- cognee/modules/users/permissions/methods/give_default_permission_to_user.py +9 -0
- cognee/modules/users/permissions/methods/give_permission_on_dataset.py +10 -0
- cognee/modules/users/roles/methods/add_user_to_role.py +11 -0
- cognee/modules/users/roles/methods/create_role.py +12 -1
- cognee/modules/users/tenants/methods/add_user_to_tenant.py +12 -0
- cognee/modules/users/tenants/methods/create_tenant.py +12 -1
- cognee/modules/visualization/cognee_network_visualization.py +13 -9
- cognee/shared/data_models.py +0 -1
- cognee/shared/utils.py +0 -32
- cognee/tasks/chunk_naive_llm_classifier/chunk_naive_llm_classifier.py +2 -2
- cognee/tasks/codingagents/coding_rule_associations.py +3 -2
- cognee/tasks/entity_completion/entity_extractors/llm_entity_extractor.py +3 -2
- cognee/tasks/graph/cascade_extract/utils/extract_content_nodes_and_relationship_names.py +3 -2
- cognee/tasks/graph/cascade_extract/utils/extract_edge_triplets.py +3 -2
- cognee/tasks/graph/cascade_extract/utils/extract_nodes.py +3 -2
- cognee/tasks/graph/extract_graph_from_code.py +2 -2
- cognee/tasks/graph/extract_graph_from_data.py +55 -12
- cognee/tasks/graph/extract_graph_from_data_v2.py +16 -4
- cognee/tasks/ingestion/migrate_relational_database.py +132 -41
- cognee/tasks/ingestion/resolve_data_directories.py +4 -1
- cognee/tasks/schema/ingest_database_schema.py +134 -0
- cognee/tasks/schema/models.py +40 -0
- cognee/tasks/storage/index_data_points.py +1 -1
- cognee/tasks/storage/index_graph_edges.py +3 -1
- cognee/tasks/summarization/summarize_code.py +2 -2
- cognee/tasks/summarization/summarize_text.py +2 -2
- cognee/tasks/temporal_graph/enrich_events.py +2 -2
- cognee/tasks/temporal_graph/extract_events_and_entities.py +2 -2
- cognee/tests/cli_tests/cli_unit_tests/test_cli_commands.py +13 -4
- cognee/tests/cli_tests/cli_unit_tests/test_cli_edge_cases.py +13 -3
- cognee/tests/test_advanced_pdf_loader.py +141 -0
- cognee/tests/test_chromadb.py +40 -0
- cognee/tests/test_cognee_server_start.py +6 -1
- cognee/tests/test_data/Quantum_computers.txt +9 -0
- cognee/tests/test_lancedb.py +211 -0
- cognee/tests/test_pgvector.py +40 -0
- cognee/tests/test_relational_db_migration.py +76 -0
- cognee/tests/unit/infrastructure/databases/test_index_graph_edges.py +2 -1
- cognee/tests/unit/modules/ontology/test_ontology_adapter.py +330 -13
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_context_extension_test.py +0 -4
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_cot_test.py +0 -4
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_test.py +0 -4
- {cognee-0.3.4.dev3.dist-info → cognee-0.3.5.dist-info}/METADATA +92 -96
- {cognee-0.3.4.dev3.dist-info → cognee-0.3.5.dist-info}/RECORD +176 -162
- distributed/pyproject.toml +0 -1
- cognee/infrastructure/data/utils/extract_keywords.py +0 -48
- cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py +0 -1227
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extract_categories.baml +0 -109
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extract_content_graph.baml +0 -343
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/extract_categories.py +0 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/extract_summary.py +0 -89
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/knowledge_graph/__init__.py +0 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/knowledge_graph/extract_content_graph.py +0 -44
- cognee/tasks/graph/infer_data_ontology.py +0 -309
- cognee/tests/test_falkordb.py +0 -174
- /cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/__init__.py +0 -0
- /cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/knowledge_graph/__init__.py +0 -0
- /cognee/infrastructure/llm/{structured_output_framework/litellm_instructor/extraction → extraction}/texts.json +0 -0
- {cognee-0.3.4.dev3.dist-info → cognee-0.3.5.dist-info}/WHEEL +0 -0
- {cognee-0.3.4.dev3.dist-info → cognee-0.3.5.dist-info}/entry_points.txt +0 -0
- {cognee-0.3.4.dev3.dist-info → cognee-0.3.5.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.3.4.dev3.dist-info → cognee-0.3.5.dist-info}/licenses/NOTICE.md +0 -0
cognee/api/v1/ui/ui.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import platform
|
|
2
3
|
import signal
|
|
4
|
+
import socket
|
|
3
5
|
import subprocess
|
|
4
6
|
import threading
|
|
5
7
|
import time
|
|
@@ -7,7 +9,7 @@ import webbrowser
|
|
|
7
9
|
import zipfile
|
|
8
10
|
import requests
|
|
9
11
|
from pathlib import Path
|
|
10
|
-
from typing import Callable, Optional, Tuple
|
|
12
|
+
from typing import Callable, Optional, Tuple, List
|
|
11
13
|
import tempfile
|
|
12
14
|
import shutil
|
|
13
15
|
|
|
@@ -17,6 +19,80 @@ from cognee.version import get_cognee_version
|
|
|
17
19
|
logger = get_logger()
|
|
18
20
|
|
|
19
21
|
|
|
22
|
+
def _stream_process_output(
|
|
23
|
+
process: subprocess.Popen, stream_name: str, prefix: str, color_code: str = ""
|
|
24
|
+
) -> threading.Thread:
|
|
25
|
+
"""
|
|
26
|
+
Stream output from a process with a prefix to identify the source.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
process: The subprocess to monitor
|
|
30
|
+
stream_name: 'stdout' or 'stderr'
|
|
31
|
+
prefix: Text prefix for each line (e.g., '[BACKEND]', '[FRONTEND]')
|
|
32
|
+
color_code: ANSI color code for the prefix (optional)
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Thread that handles the streaming
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def stream_reader():
|
|
39
|
+
stream = getattr(process, stream_name)
|
|
40
|
+
if stream is None:
|
|
41
|
+
return
|
|
42
|
+
|
|
43
|
+
reset_code = "\033[0m" if color_code else ""
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
for line in iter(stream.readline, b""):
|
|
47
|
+
if line:
|
|
48
|
+
line_text = line.decode("utf-8").rstrip()
|
|
49
|
+
if line_text:
|
|
50
|
+
print(f"{color_code}{prefix}{reset_code} {line_text}", flush=True)
|
|
51
|
+
except Exception:
|
|
52
|
+
pass
|
|
53
|
+
finally:
|
|
54
|
+
if stream:
|
|
55
|
+
stream.close()
|
|
56
|
+
|
|
57
|
+
thread = threading.Thread(target=stream_reader, daemon=True)
|
|
58
|
+
thread.start()
|
|
59
|
+
return thread
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _is_port_available(port: int) -> bool:
|
|
63
|
+
"""
|
|
64
|
+
Check if a port is available on localhost.
|
|
65
|
+
Returns True if the port is available, False otherwise.
|
|
66
|
+
"""
|
|
67
|
+
try:
|
|
68
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
69
|
+
sock.settimeout(1) # 1 second timeout
|
|
70
|
+
result = sock.connect_ex(("localhost", port))
|
|
71
|
+
return result != 0 # Port is available if connection fails
|
|
72
|
+
except Exception:
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _check_required_ports(ports_to_check: List[Tuple[int, str]]) -> Tuple[bool, List[str]]:
|
|
77
|
+
"""
|
|
78
|
+
Check if all required ports are available on localhost.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
ports_to_check: List of (port, service_name) tuples
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Tuple of (all_available: bool, unavailable_services: List[str])
|
|
85
|
+
"""
|
|
86
|
+
unavailable = []
|
|
87
|
+
|
|
88
|
+
for port, service_name in ports_to_check:
|
|
89
|
+
if not _is_port_available(port):
|
|
90
|
+
unavailable.append(f"{service_name} (port {port})")
|
|
91
|
+
logger.error(f"Port {port} is already in use for {service_name}")
|
|
92
|
+
|
|
93
|
+
return len(unavailable) == 0, unavailable
|
|
94
|
+
|
|
95
|
+
|
|
20
96
|
def normalize_version_for_comparison(version: str) -> str:
|
|
21
97
|
"""
|
|
22
98
|
Normalize version string for comparison.
|
|
@@ -214,6 +290,7 @@ def check_node_npm() -> tuple[bool, str]:
|
|
|
214
290
|
Check if Node.js and npm are available.
|
|
215
291
|
Returns (is_available, error_message)
|
|
216
292
|
"""
|
|
293
|
+
|
|
217
294
|
try:
|
|
218
295
|
# Check Node.js
|
|
219
296
|
result = subprocess.run(["node", "--version"], capture_output=True, text=True, timeout=10)
|
|
@@ -223,8 +300,17 @@ def check_node_npm() -> tuple[bool, str]:
|
|
|
223
300
|
node_version = result.stdout.strip()
|
|
224
301
|
logger.debug(f"Found Node.js version: {node_version}")
|
|
225
302
|
|
|
226
|
-
# Check npm
|
|
227
|
-
|
|
303
|
+
# Check npm - handle Windows PowerShell scripts
|
|
304
|
+
if platform.system() == "Windows":
|
|
305
|
+
# On Windows, npm might be a PowerShell script, so we need to use shell=True
|
|
306
|
+
result = subprocess.run(
|
|
307
|
+
["npm", "--version"], capture_output=True, text=True, timeout=10, shell=True
|
|
308
|
+
)
|
|
309
|
+
else:
|
|
310
|
+
result = subprocess.run(
|
|
311
|
+
["npm", "--version"], capture_output=True, text=True, timeout=10
|
|
312
|
+
)
|
|
313
|
+
|
|
228
314
|
if result.returncode != 0:
|
|
229
315
|
return False, "npm is not installed or not in PATH"
|
|
230
316
|
|
|
@@ -246,6 +332,7 @@ def install_frontend_dependencies(frontend_path: Path) -> bool:
|
|
|
246
332
|
Install frontend dependencies if node_modules doesn't exist.
|
|
247
333
|
This is needed for both development and downloaded frontends since both use npm run dev.
|
|
248
334
|
"""
|
|
335
|
+
|
|
249
336
|
node_modules = frontend_path / "node_modules"
|
|
250
337
|
if node_modules.exists():
|
|
251
338
|
logger.debug("Frontend dependencies already installed")
|
|
@@ -254,13 +341,24 @@ def install_frontend_dependencies(frontend_path: Path) -> bool:
|
|
|
254
341
|
logger.info("Installing frontend dependencies (this may take a few minutes)...")
|
|
255
342
|
|
|
256
343
|
try:
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
344
|
+
# Use shell=True on Windows for npm commands
|
|
345
|
+
if platform.system() == "Windows":
|
|
346
|
+
result = subprocess.run(
|
|
347
|
+
["npm", "install"],
|
|
348
|
+
cwd=frontend_path,
|
|
349
|
+
capture_output=True,
|
|
350
|
+
text=True,
|
|
351
|
+
timeout=300, # 5 minutes timeout
|
|
352
|
+
shell=True,
|
|
353
|
+
)
|
|
354
|
+
else:
|
|
355
|
+
result = subprocess.run(
|
|
356
|
+
["npm", "install"],
|
|
357
|
+
cwd=frontend_path,
|
|
358
|
+
capture_output=True,
|
|
359
|
+
text=True,
|
|
360
|
+
timeout=300, # 5 minutes timeout
|
|
361
|
+
)
|
|
264
362
|
|
|
265
363
|
if result.returncode == 0:
|
|
266
364
|
logger.info("Frontend dependencies installed successfully")
|
|
@@ -327,55 +425,111 @@ def prompt_user_for_download() -> bool:
|
|
|
327
425
|
|
|
328
426
|
def start_ui(
|
|
329
427
|
pid_callback: Callable[[int], None],
|
|
330
|
-
host: str = "localhost",
|
|
331
428
|
port: int = 3000,
|
|
332
429
|
open_browser: bool = True,
|
|
333
430
|
auto_download: bool = False,
|
|
334
431
|
start_backend: bool = False,
|
|
335
|
-
backend_host: str = "localhost",
|
|
336
432
|
backend_port: int = 8000,
|
|
433
|
+
start_mcp: bool = False,
|
|
434
|
+
mcp_port: int = 8001,
|
|
337
435
|
) -> Optional[subprocess.Popen]:
|
|
338
436
|
"""
|
|
339
|
-
Start the cognee frontend UI server, optionally with the backend API server.
|
|
437
|
+
Start the cognee frontend UI server, optionally with the backend API server and MCP server.
|
|
340
438
|
|
|
341
439
|
This function will:
|
|
342
440
|
1. Optionally start the cognee backend API server
|
|
343
|
-
2.
|
|
344
|
-
3.
|
|
345
|
-
4.
|
|
346
|
-
5.
|
|
347
|
-
6.
|
|
441
|
+
2. Optionally start the cognee MCP server
|
|
442
|
+
3. Find the cognee-frontend directory (development) or download it (pip install)
|
|
443
|
+
4. Check if Node.js and npm are available (for development mode)
|
|
444
|
+
5. Install dependencies if needed (development mode)
|
|
445
|
+
6. Start the frontend server
|
|
446
|
+
7. Optionally open the browser
|
|
348
447
|
|
|
349
448
|
Args:
|
|
350
449
|
pid_callback: Callback to notify with PID of each spawned process
|
|
351
|
-
host: Host to bind the frontend server to (default: localhost)
|
|
352
450
|
port: Port to run the frontend server on (default: 3000)
|
|
353
451
|
open_browser: Whether to open the browser automatically (default: True)
|
|
354
452
|
auto_download: If True, download frontend without prompting (default: False)
|
|
355
453
|
start_backend: If True, also start the cognee API backend server (default: False)
|
|
356
|
-
backend_host: Host to bind the backend server to (default: localhost)
|
|
357
454
|
backend_port: Port to run the backend server on (default: 8000)
|
|
455
|
+
start_mcp: If True, also start the cognee MCP server (default: False)
|
|
456
|
+
mcp_port: Port to run the MCP server on (default: 8001)
|
|
358
457
|
|
|
359
458
|
Returns:
|
|
360
459
|
subprocess.Popen object representing the running frontend server, or None if failed
|
|
361
|
-
Note: If backend
|
|
362
|
-
when the frontend process is terminated.
|
|
460
|
+
Note: If backend and/or MCP server are started, they run in separate processes
|
|
461
|
+
that will be cleaned up when the frontend process is terminated.
|
|
363
462
|
|
|
364
463
|
Example:
|
|
365
464
|
>>> import cognee
|
|
465
|
+
>>> def dummy_callback(pid): pass
|
|
366
466
|
>>> # Start just the frontend
|
|
367
|
-
>>> server = cognee.start_ui()
|
|
467
|
+
>>> server = cognee.start_ui(dummy_callback)
|
|
368
468
|
>>>
|
|
369
469
|
>>> # Start both frontend and backend
|
|
370
|
-
>>> server = cognee.start_ui(start_backend=True)
|
|
470
|
+
>>> server = cognee.start_ui(dummy_callback, start_backend=True)
|
|
371
471
|
>>> # UI will be available at http://localhost:3000
|
|
372
472
|
>>> # API will be available at http://localhost:8000
|
|
373
|
-
>>>
|
|
473
|
+
>>>
|
|
474
|
+
>>> # Start frontend with MCP server
|
|
475
|
+
>>> server = cognee.start_ui(dummy_callback, start_mcp=True)
|
|
476
|
+
>>> # UI will be available at http://localhost:3000
|
|
477
|
+
>>> # MCP server will be available at http://127.0.0.1:8001/sse
|
|
478
|
+
>>> # To stop all servers later:
|
|
374
479
|
>>> server.terminate()
|
|
375
480
|
"""
|
|
376
481
|
logger.info("Starting cognee UI...")
|
|
482
|
+
|
|
483
|
+
ports_to_check = [(port, "Frontend UI")]
|
|
484
|
+
|
|
485
|
+
if start_backend:
|
|
486
|
+
ports_to_check.append((backend_port, "Backend API"))
|
|
487
|
+
|
|
488
|
+
if start_mcp:
|
|
489
|
+
ports_to_check.append((mcp_port, "MCP Server"))
|
|
490
|
+
|
|
491
|
+
logger.info("Checking port availability...")
|
|
492
|
+
all_ports_available, unavailable_services = _check_required_ports(ports_to_check)
|
|
493
|
+
|
|
494
|
+
if not all_ports_available:
|
|
495
|
+
error_msg = f"Cannot start cognee UI: The following services have ports already in use: {', '.join(unavailable_services)}"
|
|
496
|
+
logger.error(error_msg)
|
|
497
|
+
logger.error("Please stop the conflicting services or change the port configuration.")
|
|
498
|
+
return None
|
|
499
|
+
|
|
500
|
+
logger.info("✓ All required ports are available")
|
|
377
501
|
backend_process = None
|
|
378
502
|
|
|
503
|
+
if start_mcp:
|
|
504
|
+
logger.info("Starting Cognee MCP server with Docker...")
|
|
505
|
+
cwd = os.getcwd()
|
|
506
|
+
env_file = os.path.join(cwd, ".env")
|
|
507
|
+
try:
|
|
508
|
+
mcp_process = subprocess.Popen(
|
|
509
|
+
[
|
|
510
|
+
"docker",
|
|
511
|
+
"run",
|
|
512
|
+
"-p",
|
|
513
|
+
f"{mcp_port}:8000",
|
|
514
|
+
"--rm",
|
|
515
|
+
"--env-file",
|
|
516
|
+
env_file,
|
|
517
|
+
"-e",
|
|
518
|
+
"TRANSPORT_MODE=sse",
|
|
519
|
+
"cognee/cognee-mcp:main",
|
|
520
|
+
],
|
|
521
|
+
stdout=subprocess.PIPE,
|
|
522
|
+
stderr=subprocess.PIPE,
|
|
523
|
+
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
_stream_process_output(mcp_process, "stdout", "[MCP]", "\033[34m") # Blue
|
|
527
|
+
_stream_process_output(mcp_process, "stderr", "[MCP]", "\033[34m") # Blue
|
|
528
|
+
|
|
529
|
+
pid_callback(mcp_process.pid)
|
|
530
|
+
logger.info(f"✓ Cognee MCP server starting on http://127.0.0.1:{mcp_port}/sse")
|
|
531
|
+
except Exception as e:
|
|
532
|
+
logger.error(f"Failed to start MCP server with Docker: {str(e)}")
|
|
379
533
|
# Start backend server if requested
|
|
380
534
|
if start_backend:
|
|
381
535
|
logger.info("Starting cognee backend API server...")
|
|
@@ -389,16 +543,19 @@ def start_ui(
|
|
|
389
543
|
"uvicorn",
|
|
390
544
|
"cognee.api.client:app",
|
|
391
545
|
"--host",
|
|
392
|
-
|
|
546
|
+
"localhost",
|
|
393
547
|
"--port",
|
|
394
548
|
str(backend_port),
|
|
395
549
|
],
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
stderr=None,
|
|
550
|
+
stdout=subprocess.PIPE,
|
|
551
|
+
stderr=subprocess.PIPE,
|
|
399
552
|
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
|
|
400
553
|
)
|
|
401
554
|
|
|
555
|
+
# Start threads to stream backend output with prefix
|
|
556
|
+
_stream_process_output(backend_process, "stdout", "[BACKEND]", "\033[32m") # Green
|
|
557
|
+
_stream_process_output(backend_process, "stderr", "[BACKEND]", "\033[32m") # Green
|
|
558
|
+
|
|
402
559
|
pid_callback(backend_process.pid)
|
|
403
560
|
|
|
404
561
|
# Give the backend a moment to start
|
|
@@ -408,7 +565,7 @@ def start_ui(
|
|
|
408
565
|
logger.error("Backend server failed to start - process exited early")
|
|
409
566
|
return None
|
|
410
567
|
|
|
411
|
-
logger.info(f"✓ Backend API started at http://
|
|
568
|
+
logger.info(f"✓ Backend API started at http://localhost:{backend_port}")
|
|
412
569
|
|
|
413
570
|
except Exception as e:
|
|
414
571
|
logger.error(f"Failed to start backend server: {str(e)}")
|
|
@@ -453,24 +610,38 @@ def start_ui(
|
|
|
453
610
|
|
|
454
611
|
# Prepare environment variables
|
|
455
612
|
env = os.environ.copy()
|
|
456
|
-
env["HOST"] =
|
|
613
|
+
env["HOST"] = "localhost"
|
|
457
614
|
env["PORT"] = str(port)
|
|
458
615
|
|
|
459
616
|
# Start the development server
|
|
460
|
-
logger.info(f"Starting frontend server at http://
|
|
617
|
+
logger.info(f"Starting frontend server at http://localhost:{port}")
|
|
461
618
|
logger.info("This may take a moment to compile and start...")
|
|
462
619
|
|
|
463
620
|
try:
|
|
464
621
|
# Create frontend in its own process group for clean termination
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
622
|
+
# Use shell=True on Windows for npm commands
|
|
623
|
+
if platform.system() == "Windows":
|
|
624
|
+
process = subprocess.Popen(
|
|
625
|
+
["npm", "run", "dev"],
|
|
626
|
+
cwd=frontend_path,
|
|
627
|
+
env=env,
|
|
628
|
+
stdout=subprocess.PIPE,
|
|
629
|
+
stderr=subprocess.PIPE,
|
|
630
|
+
shell=True,
|
|
631
|
+
)
|
|
632
|
+
else:
|
|
633
|
+
process = subprocess.Popen(
|
|
634
|
+
["npm", "run", "dev"],
|
|
635
|
+
cwd=frontend_path,
|
|
636
|
+
env=env,
|
|
637
|
+
stdout=subprocess.PIPE,
|
|
638
|
+
stderr=subprocess.PIPE,
|
|
639
|
+
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
|
|
640
|
+
)
|
|
641
|
+
|
|
642
|
+
# Start threads to stream frontend output with prefix
|
|
643
|
+
_stream_process_output(process, "stdout", "[FRONTEND]", "\033[33m") # Yellow
|
|
644
|
+
_stream_process_output(process, "stderr", "[FRONTEND]", "\033[33m") # Yellow
|
|
474
645
|
|
|
475
646
|
pid_callback(process.pid)
|
|
476
647
|
|
|
@@ -479,10 +650,7 @@ def start_ui(
|
|
|
479
650
|
|
|
480
651
|
# Check if process is still running
|
|
481
652
|
if process.poll() is not None:
|
|
482
|
-
|
|
483
|
-
logger.error("Frontend server failed to start:")
|
|
484
|
-
logger.error(f"stdout: {stdout}")
|
|
485
|
-
logger.error(f"stderr: {stderr}")
|
|
653
|
+
logger.error("Frontend server failed to start - check the logs above for details")
|
|
486
654
|
return None
|
|
487
655
|
|
|
488
656
|
# Open browser if requested
|
|
@@ -491,7 +659,7 @@ def start_ui(
|
|
|
491
659
|
def open_browser_delayed():
|
|
492
660
|
time.sleep(5) # Give Next.js time to fully start
|
|
493
661
|
try:
|
|
494
|
-
webbrowser.open(f"http://
|
|
662
|
+
webbrowser.open(f"http://localhost:{port}")
|
|
495
663
|
except Exception as e:
|
|
496
664
|
logger.warning(f"Could not open browser automatically: {e}")
|
|
497
665
|
|
|
@@ -499,13 +667,9 @@ def start_ui(
|
|
|
499
667
|
browser_thread.start()
|
|
500
668
|
|
|
501
669
|
logger.info("✓ Cognee UI is starting up...")
|
|
502
|
-
logger.info(f"✓ Open your browser to: http://
|
|
670
|
+
logger.info(f"✓ Open your browser to: http://localhost:{port}")
|
|
503
671
|
logger.info("✓ The UI will be available once Next.js finishes compiling")
|
|
504
672
|
|
|
505
|
-
# Store backend process reference in the frontend process for cleanup
|
|
506
|
-
if backend_process:
|
|
507
|
-
process._cognee_backend_process = backend_process
|
|
508
|
-
|
|
509
673
|
return process
|
|
510
674
|
|
|
511
675
|
except Exception as e:
|
|
@@ -523,102 +687,3 @@ def start_ui(
|
|
|
523
687
|
except (OSError, ProcessLookupError):
|
|
524
688
|
pass
|
|
525
689
|
return None
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
def stop_ui(process: subprocess.Popen) -> bool:
|
|
529
|
-
"""
|
|
530
|
-
Stop a running UI server process and backend process (if started), along with all their children.
|
|
531
|
-
|
|
532
|
-
Args:
|
|
533
|
-
process: The subprocess.Popen object returned by start_ui()
|
|
534
|
-
|
|
535
|
-
Returns:
|
|
536
|
-
bool: True if stopped successfully, False otherwise
|
|
537
|
-
"""
|
|
538
|
-
if not process:
|
|
539
|
-
return False
|
|
540
|
-
|
|
541
|
-
success = True
|
|
542
|
-
|
|
543
|
-
try:
|
|
544
|
-
# First, stop the backend process if it exists
|
|
545
|
-
backend_process = getattr(process, "_cognee_backend_process", None)
|
|
546
|
-
if backend_process:
|
|
547
|
-
logger.info("Stopping backend server...")
|
|
548
|
-
try:
|
|
549
|
-
backend_process.terminate()
|
|
550
|
-
try:
|
|
551
|
-
backend_process.wait(timeout=5)
|
|
552
|
-
logger.info("Backend server stopped gracefully")
|
|
553
|
-
except subprocess.TimeoutExpired:
|
|
554
|
-
logger.warning("Backend didn't terminate gracefully, forcing kill")
|
|
555
|
-
backend_process.kill()
|
|
556
|
-
backend_process.wait()
|
|
557
|
-
logger.info("Backend server stopped")
|
|
558
|
-
except Exception as e:
|
|
559
|
-
logger.error(f"Error stopping backend server: {str(e)}")
|
|
560
|
-
success = False
|
|
561
|
-
|
|
562
|
-
# Now stop the frontend process
|
|
563
|
-
logger.info("Stopping frontend server...")
|
|
564
|
-
# Try to terminate the process group (includes child processes like Next.js)
|
|
565
|
-
if hasattr(os, "killpg"):
|
|
566
|
-
try:
|
|
567
|
-
# Kill the entire process group
|
|
568
|
-
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
|
|
569
|
-
logger.debug("Sent SIGTERM to process group")
|
|
570
|
-
except (OSError, ProcessLookupError):
|
|
571
|
-
# Fall back to terminating just the main process
|
|
572
|
-
process.terminate()
|
|
573
|
-
logger.debug("Terminated main process only")
|
|
574
|
-
else:
|
|
575
|
-
process.terminate()
|
|
576
|
-
logger.debug("Terminated main process (Windows)")
|
|
577
|
-
|
|
578
|
-
try:
|
|
579
|
-
process.wait(timeout=10)
|
|
580
|
-
logger.info("Frontend server stopped gracefully")
|
|
581
|
-
except subprocess.TimeoutExpired:
|
|
582
|
-
logger.warning("Frontend didn't terminate gracefully, forcing kill")
|
|
583
|
-
|
|
584
|
-
# Force kill the process group
|
|
585
|
-
if hasattr(os, "killpg"):
|
|
586
|
-
try:
|
|
587
|
-
os.killpg(os.getpgid(process.pid), signal.SIGKILL)
|
|
588
|
-
logger.debug("Sent SIGKILL to process group")
|
|
589
|
-
except (OSError, ProcessLookupError):
|
|
590
|
-
process.kill()
|
|
591
|
-
logger.debug("Force killed main process only")
|
|
592
|
-
else:
|
|
593
|
-
process.kill()
|
|
594
|
-
logger.debug("Force killed main process (Windows)")
|
|
595
|
-
|
|
596
|
-
process.wait()
|
|
597
|
-
|
|
598
|
-
if success:
|
|
599
|
-
logger.info("UI servers stopped successfully")
|
|
600
|
-
|
|
601
|
-
return success
|
|
602
|
-
|
|
603
|
-
except Exception as e:
|
|
604
|
-
logger.error(f"Error stopping UI servers: {str(e)}")
|
|
605
|
-
return False
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
# Convenience function similar to DuckDB's approach
|
|
609
|
-
def ui() -> Optional[subprocess.Popen]:
|
|
610
|
-
"""
|
|
611
|
-
Convenient alias for start_ui() with default parameters.
|
|
612
|
-
Similar to how DuckDB provides simple ui() function.
|
|
613
|
-
"""
|
|
614
|
-
return start_ui()
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
if __name__ == "__main__":
|
|
618
|
-
# Test the UI startup
|
|
619
|
-
server = start_ui()
|
|
620
|
-
if server:
|
|
621
|
-
try:
|
|
622
|
-
input("Press Enter to stop the server...")
|
|
623
|
-
finally:
|
|
624
|
-
stop_ui(server)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .update import update
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .get_update_router import get_update_router
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
from fastapi.responses import JSONResponse
|
|
2
|
+
from fastapi import File, UploadFile, Depends, Form
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from fastapi import APIRouter
|
|
5
|
+
from fastapi.encoders import jsonable_encoder
|
|
6
|
+
from typing import List
|
|
7
|
+
from uuid import UUID
|
|
8
|
+
from cognee.shared.logging_utils import get_logger
|
|
9
|
+
from cognee.modules.users.models import User
|
|
10
|
+
from cognee.modules.users.methods import get_authenticated_user
|
|
11
|
+
from cognee.shared.utils import send_telemetry
|
|
12
|
+
from cognee.modules.pipelines.models.PipelineRunInfo import (
|
|
13
|
+
PipelineRunErrored,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
logger = get_logger()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_update_router() -> APIRouter:
|
|
20
|
+
router = APIRouter()
|
|
21
|
+
|
|
22
|
+
@router.patch("", response_model=None)
|
|
23
|
+
async def update(
|
|
24
|
+
data_id: UUID,
|
|
25
|
+
dataset_id: UUID,
|
|
26
|
+
data: List[UploadFile] = File(default=None),
|
|
27
|
+
node_set: Optional[List[str]] = Form(default=[""], example=[""]),
|
|
28
|
+
user: User = Depends(get_authenticated_user),
|
|
29
|
+
):
|
|
30
|
+
"""
|
|
31
|
+
Update data in a dataset.
|
|
32
|
+
|
|
33
|
+
This endpoint updates existing documents in a specified dataset by providing the data_id of the existing document
|
|
34
|
+
to update and the new document with the changes as the data.
|
|
35
|
+
The document is updated, analyzed, and the changes are integrated into the knowledge graph.
|
|
36
|
+
|
|
37
|
+
## Request Parameters
|
|
38
|
+
- **data_id** (UUID): UUID of the document to update in Cognee memory
|
|
39
|
+
- **data** (List[UploadFile]): List of files to upload.
|
|
40
|
+
- **datasetId** (Optional[UUID]): UUID of an already existing dataset
|
|
41
|
+
- **node_set** Optional[list[str]]: List of node identifiers for graph organization and access control.
|
|
42
|
+
Used for grouping related data points in the knowledge graph.
|
|
43
|
+
|
|
44
|
+
## Response
|
|
45
|
+
Returns information about the add operation containing:
|
|
46
|
+
- Status of the operation
|
|
47
|
+
- Details about the processed data
|
|
48
|
+
- Any relevant metadata from the ingestion process
|
|
49
|
+
|
|
50
|
+
## Error Codes
|
|
51
|
+
- **400 Bad Request**: Neither datasetId nor datasetName provided
|
|
52
|
+
- **409 Conflict**: Error during add operation
|
|
53
|
+
- **403 Forbidden**: User doesn't have permission to add to dataset
|
|
54
|
+
|
|
55
|
+
## Notes
|
|
56
|
+
- To add data to datasets not owned by the user, use dataset_id (when ENABLE_BACKEND_ACCESS_CONTROL is set to True)
|
|
57
|
+
- datasetId value can only be the UUID of an already existing dataset
|
|
58
|
+
"""
|
|
59
|
+
send_telemetry(
|
|
60
|
+
"Update API Endpoint Invoked",
|
|
61
|
+
user.id,
|
|
62
|
+
additional_properties={
|
|
63
|
+
"endpoint": "PATCH /v1/update",
|
|
64
|
+
"dataset_id": str(dataset_id),
|
|
65
|
+
"data_id": str(data_id),
|
|
66
|
+
"node_set": str(node_set),
|
|
67
|
+
},
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
from cognee.api.v1.update import update as cognee_update
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
update_run = await cognee_update(
|
|
74
|
+
data_id=data_id,
|
|
75
|
+
data=data,
|
|
76
|
+
dataset_id=dataset_id,
|
|
77
|
+
user=user,
|
|
78
|
+
node_set=node_set,
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
# If any cognify run errored return JSONResponse with proper error status code
|
|
82
|
+
if any(isinstance(v, PipelineRunErrored) for v in update_run.values()):
|
|
83
|
+
return JSONResponse(status_code=420, content=jsonable_encoder(update_run))
|
|
84
|
+
return update_run
|
|
85
|
+
|
|
86
|
+
except Exception as error:
|
|
87
|
+
logger.error(f"Error during deletion by data_id: {str(error)}")
|
|
88
|
+
return JSONResponse(status_code=409, content={"error": str(error)})
|
|
89
|
+
|
|
90
|
+
return router
|