codegraphcontext 0.4.5__py3-none-any.whl → 0.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. codegraphcontext/cli/config_manager.py +33 -3
  2. codegraphcontext/cli/setup_wizard.py +20 -12
  3. codegraphcontext/core/__init__.py +25 -3
  4. codegraphcontext/core/cgc_bundle.py +3 -8
  5. codegraphcontext/core/database_falkordb.py +64 -19
  6. codegraphcontext/core/database_kuzu.py +31 -2
  7. codegraphcontext/core/database_nornic.py +204 -0
  8. codegraphcontext/server.py +116 -3
  9. codegraphcontext/tools/code_finder.py +86 -66
  10. codegraphcontext/tools/graph_builder.py +37 -7
  11. codegraphcontext/tools/handlers/analysis_handlers.py +54 -17
  12. codegraphcontext/tools/handlers/management_handlers.py +14 -3
  13. codegraphcontext/tools/handlers/query_handlers.py +14 -4
  14. codegraphcontext/tools/indexing/persistence/writer.py +24 -4
  15. codegraphcontext/tools/indexing/pre_scan.py +12 -2
  16. codegraphcontext/tools/indexing/resolution/calls.py +51 -1
  17. codegraphcontext/tools/indexing/schema.py +3 -3
  18. codegraphcontext/tools/languages/css.py +84 -0
  19. codegraphcontext/tools/languages/html.py +123 -0
  20. codegraphcontext/tools/languages/java.py +58 -29
  21. codegraphcontext/tools/languages/javascript.py +38 -0
  22. codegraphcontext/tools/languages/lua.py +410 -0
  23. codegraphcontext/tools/languages/php.py +26 -18
  24. codegraphcontext/tools/languages/swift.py +51 -4
  25. codegraphcontext/tools/languages/typescript.py +1 -1
  26. codegraphcontext/tools/tree_sitter_parser.py +12 -0
  27. codegraphcontext/utils/git_utils.py +25 -0
  28. codegraphcontext/utils/tool_limits.py +85 -0
  29. codegraphcontext/utils/tree_sitter_manager.py +3 -0
  30. {codegraphcontext-0.4.5.dist-info → codegraphcontext-0.4.6.dist-info}/METADATA +60 -33
  31. {codegraphcontext-0.4.5.dist-info → codegraphcontext-0.4.6.dist-info}/RECORD +35 -29
  32. {codegraphcontext-0.4.5.dist-info → codegraphcontext-0.4.6.dist-info}/WHEEL +0 -0
  33. {codegraphcontext-0.4.5.dist-info → codegraphcontext-0.4.6.dist-info}/entry_points.txt +0 -0
  34. {codegraphcontext-0.4.5.dist-info → codegraphcontext-0.4.6.dist-info}/licenses/LICENSE +0 -0
  35. {codegraphcontext-0.4.5.dist-info → codegraphcontext-0.4.6.dist-info}/top_level.txt +0 -0
@@ -19,7 +19,10 @@ CONFIG_DIR = Path.home() / ".codegraphcontext"
19
19
  CONFIG_FILE = CONFIG_DIR / ".env"
20
20
 
21
21
  # Database credential keys (stored in same .env file but not managed as config)
22
- DATABASE_CREDENTIAL_KEYS = {"NEO4J_URI", "NEO4J_USERNAME", "NEO4J_PASSWORD", "NEO4J_DATABASE"}
22
+ DATABASE_CREDENTIAL_KEYS = {
23
+ "NEO4J_URI", "NEO4J_USERNAME", "NEO4J_PASSWORD", "NEO4J_DATABASE",
24
+ "NORNIC_URI", "NORNIC_USERNAME", "NORNIC_PASSWORD", "NORNIC_DATABASE"
25
+ }
23
26
 
24
27
  # Default configuration values
25
28
  DEFAULT_CONFIG = {
@@ -47,11 +50,16 @@ DEFAULT_CONFIG = {
47
50
  "SCIP_INDEXER": "false",
48
51
  "SCIP_LANGUAGES": "python,typescript,go,rust,java",
49
52
  "SKIP_EXTERNAL_RESOLUTION": "false",
53
+ # 0 = unlimited; any positive integer caps MCP tool response size.
54
+ "MAX_TOOL_RESPONSE_TOKENS": "0",
55
+ # JSON object mapping tool names to integer result-count limits.
56
+ # Example: {"find_code": 20, "analyze_code_relationships": 10, "find_dead_code": 30}
57
+ "TOOL_RESULT_LIMITS": "{}",
50
58
  }
51
59
 
52
60
  # Configuration key descriptions
53
61
  CONFIG_DESCRIPTIONS = {
54
- "DEFAULT_DATABASE": "Default database backend (neo4j|falkordb|kuzudb)",
62
+ "DEFAULT_DATABASE": "Default database backend (neo4j|falkordb|kuzudb|nornic)",
55
63
  "FALKORDB_PATH": "Path to FalkorDB database file",
56
64
  "FALKORDB_SOCKET_PATH": "Path to FalkorDB Unix socket",
57
65
  "INDEX_VARIABLES": "Index variable nodes in the graph (lighter graph if false)",
@@ -74,11 +82,13 @@ CONFIG_DESCRIPTIONS = {
74
82
  "SCIP_INDEXER": "Use SCIP-based indexing for higher accuracy call/inheritance resolution (requires scip-<lang> tools installed)",
75
83
  "SCIP_LANGUAGES": "Comma-separated languages to index via SCIP when SCIP_INDEXER=true (python,typescript,go,rust,java)",
76
84
  "SKIP_EXTERNAL_RESOLUTION": "Skip resolution attempts for external library method calls (recommended for enterprise large Java/Spring codebases)",
85
+ "MAX_TOOL_RESPONSE_TOKENS": "Maximum tokens per MCP tool response (0 = unlimited). Truncates oversized payloads and appends a notice.",
86
+ "TOOL_RESULT_LIMITS": "JSON object mapping tool names to max result counts, e.g. {\"find_code\": 20, \"analyze_code_relationships\": 10}. Missing keys use built-in defaults.",
77
87
  }
78
88
 
79
89
  # Valid values for each config key
80
90
  CONFIG_VALIDATORS = {
81
- "DEFAULT_DATABASE": ["neo4j", "falkordb", "falkordb-remote", "kuzudb"],
91
+ "DEFAULT_DATABASE": ["neo4j", "falkordb", "falkordb-remote", "kuzudb", "nornic"],
82
92
  "INDEX_VARIABLES": ["true", "false"],
83
93
  "ALLOW_DB_DELETION": ["true", "false"],
84
94
  "DEBUG_LOGS": ["true", "false"],
@@ -342,6 +352,26 @@ def validate_config_value(key: str, value: str) -> tuple[bool, Optional[str]]:
342
352
  return False, "PARALLEL_WORKERS must be between 1 and 32"
343
353
  except ValueError:
344
354
  return False, "PARALLEL_WORKERS must be a number"
355
+
356
+ if key == "MAX_TOOL_RESPONSE_TOKENS":
357
+ try:
358
+ limit = int(value)
359
+ if limit < 0:
360
+ return False, "MAX_TOOL_RESPONSE_TOKENS must be 0 (unlimited) or a positive integer"
361
+ except ValueError:
362
+ return False, "MAX_TOOL_RESPONSE_TOKENS must be an integer (0 = unlimited)"
363
+
364
+ if key == "TOOL_RESULT_LIMITS":
365
+ import json as _json
366
+ try:
367
+ parsed = _json.loads(value)
368
+ if not isinstance(parsed, dict):
369
+ return False, "TOOL_RESULT_LIMITS must be a JSON object, e.g. {\"find_code\": 20}"
370
+ for k, v in parsed.items():
371
+ if not isinstance(v, int) or v < 1:
372
+ return False, f"TOOL_RESULT_LIMITS: value for '{k}' must be a positive integer"
373
+ except _json.JSONDecodeError:
374
+ return False, "TOOL_RESULT_LIMITS must be valid JSON, e.g. {\"find_code\": 20, \"find_dead_code\": 30}"
345
375
 
346
376
  if key == "MAX_DEPTH":
347
377
  if value.lower() != "unlimited":
@@ -54,15 +54,18 @@ def _save_neo4j_credentials(creds):
54
54
 
55
55
  def _generate_mcp_json(creds):
56
56
  """Generates and prints the MCP JSON configuration."""
57
- cgc_path = shutil.which("cgc") or sys.executable
57
+ cgc_path = shutil.which("cgc")
58
+ pipx_path = shutil.which("pipx")
58
59
 
59
- if "python" in Path(cgc_path).name:
60
- # fallback to running as module if no cgc binary is found
60
+ if cgc_path:
61
61
  command = cgc_path
62
- args = ["-m", "cgc", "mcp", "start"]
62
+ args = ["mcp", "start"]
63
+ elif pipx_path:
64
+ command = pipx_path
65
+ args = ["run", "codegraphcontext", "mcp", "start"]
63
66
  else:
64
- command = cgc_path
65
- args = ["mcp","start"]
67
+ command = sys.executable
68
+ args = ["-m", "codegraphcontext", "mcp", "start"]
66
69
 
67
70
  mcp_config = {
68
71
  "mcpServers": {
@@ -84,6 +87,7 @@ def _generate_mcp_json(creds):
84
87
  "list_indexed_repositories", "delete_repository", "list_watched_paths",
85
88
  "unwatch_directory", "visualize_graph_query"
86
89
  ],
90
+ "disabledTools": [],
87
91
  "disabled": False
88
92
  },
89
93
  "disabled": False,
@@ -424,15 +428,18 @@ def configure_mcp_client():
424
428
  env_vars[key] = value
425
429
 
426
430
  # Generate MCP configuration
427
- cgc_path = shutil.which("cgc") or sys.executable
431
+ cgc_path = shutil.which("cgc")
432
+ pipx_path = shutil.which("pipx")
428
433
 
429
- if "python" in Path(cgc_path).name:
430
- # fallback to running as module if no cgc binary is found
431
- command = cgc_path
432
- args = ["-m", "cgc", "mcp", "start"]
433
- else:
434
+ if cgc_path:
434
435
  command = cgc_path
435
436
  args = ["mcp", "start"]
437
+ elif pipx_path:
438
+ command = pipx_path
439
+ args = ["run", "codegraphcontext", "mcp", "start"]
440
+ else:
441
+ command = sys.executable
442
+ args = ["-m", "codegraphcontext", "mcp", "start"]
436
443
 
437
444
  # Create MCP config with complete env section
438
445
  mcp_config = {
@@ -451,6 +458,7 @@ def configure_mcp_client():
451
458
  "list_indexed_repositories", "delete_repository", "list_watched_paths",
452
459
  "unwatch_directory", "visualize_graph_query"
453
460
  ],
461
+ "disabledTools": [],
454
462
  "disabled": False
455
463
  },
456
464
  "disabled": False,
@@ -52,7 +52,15 @@ def _is_neo4j_configured() -> bool:
52
52
  os.getenv('NEO4J_PASSWORD')
53
53
  ])
54
54
 
55
- def get_database_manager(db_path: Optional[str] = None) -> Union['DatabaseManager', 'FalkorDBManager', 'FalkorDBRemoteManager', 'KuzuDBManager']:
55
+ def _is_nornic_configured() -> bool:
56
+ """Check if Nornic is configured with credentials."""
57
+ return all([
58
+ os.getenv('NORNIC_URI'),
59
+ os.getenv('NORNIC_USERNAME'),
60
+ os.getenv('NORNIC_PASSWORD')
61
+ ])
62
+
63
+ def get_database_manager(db_path: Optional[str] = None) -> Union['DatabaseManager', 'FalkorDBManager', 'FalkorDBRemoteManager', 'KuzuDBManager', 'NornicDBManager']:
56
64
  """
57
65
  Factory function to get the appropriate database manager based on configuration.
58
66
 
@@ -111,8 +119,15 @@ def get_database_manager(db_path: Optional[str] = None) -> Union['DatabaseManage
111
119
  from .database import DatabaseManager
112
120
  info_logger("Using Neo4j Server (explicit)")
113
121
  return DatabaseManager()
122
+
123
+ elif db_type == 'nornic':
124
+ if not _is_nornic_configured():
125
+ raise ValueError("Database set to 'nornic' but it is not configured.")
126
+ from .database_nornic import NornicDBManager
127
+ info_logger("Using Nornic DB (explicit)")
128
+ return NornicDBManager()
114
129
  else:
115
- raise ValueError(f"Unknown database type: '{db_type}'. Use 'kuzudb', 'falkordb', 'falkordb-remote', or 'neo4j'.")
130
+ raise ValueError(f"Unknown database type: '{db_type}'. Use 'kuzudb', 'falkordb', 'falkordb-remote', 'neo4j', or 'nornic'.")
116
131
 
117
132
  # Implicit: remote FalkorDB when FALKORDB_HOST is set (explicit infra signal)
118
133
  if _is_falkordb_remote_configured():
@@ -146,6 +161,12 @@ def get_database_manager(db_path: Optional[str] = None) -> Union['DatabaseManage
146
161
  info_logger("Using Neo4j Server (auto-detected)")
147
162
  return DatabaseManager()
148
163
 
164
+ # Implicit: Nornic when configured
165
+ if _is_nornic_configured():
166
+ from .database_nornic import NornicDBManager
167
+ info_logger("Using Nornic DB (auto-detected)")
168
+ return NornicDBManager()
169
+
149
170
  error_msg = "No database backend available.\n"
150
171
  error_msg += "Recommended: Install KùzuDB for zero-config ('pip install real_ladybug')\n"
151
172
 
@@ -161,5 +182,6 @@ from .database import DatabaseManager
161
182
  from .database_falkordb import FalkorDBManager
162
183
  from .database_falkordb_remote import FalkorDBRemoteManager
163
184
  from .database_kuzu import KuzuDBManager
185
+ from .database_nornic import NornicDBManager
164
186
 
165
- __all__ = ['DatabaseManager', 'FalkorDBManager', 'FalkorDBRemoteManager', 'KuzuDBManager', 'get_database_manager']
187
+ __all__ = ['DatabaseManager', 'FalkorDBManager', 'FalkorDBRemoteManager', 'KuzuDBManager', 'NornicDBManager', 'get_database_manager']
@@ -28,6 +28,7 @@ from datetime import datetime, date
28
28
  import subprocess
29
29
 
30
30
  from codegraphcontext.utils.debug_log import debug_log, info_logger, error_logger, warning_logger
31
+ from codegraphcontext.utils.git_utils import get_repo_commit_hash
31
32
 
32
33
 
33
34
  class _BundleEncoder(json.JSONEncoder):
@@ -293,15 +294,9 @@ class CGCBundle:
293
294
 
294
295
  # Try to get git information if available
295
296
  if repo_path and repo_path.exists():
296
- try:
297
- commit = subprocess.check_output(
298
- ['git', 'rev-parse', 'HEAD'],
299
- cwd=repo_path,
300
- stderr=subprocess.DEVNULL
301
- ).decode().strip()
297
+ commit = get_repo_commit_hash(repo_path)
298
+ if commit:
302
299
  metadata["commit"] = commit[:8]
303
- except (subprocess.CalledProcessError, FileNotFoundError):
304
- pass
305
300
 
306
301
  try:
307
302
  result = session.run("""
@@ -356,6 +356,18 @@ class FalkorDBSessionWrapper:
356
356
  """
357
357
  Execute a Cypher query on FalkorDB.
358
358
  """
359
+ constraint_command = self._translate_constraint_command(query)
360
+ if constraint_command is not None:
361
+ try:
362
+ self.graph.execute_command(*constraint_command)
363
+ return FalkorDBResultWrapper(None)
364
+ except Exception as e:
365
+ error_msg = str(e).lower()
366
+ if "already exists" in error_msg or "already created" in error_msg:
367
+ return FalkorDBResultWrapper(None)
368
+ error_logger(f"FalkorDB constraint failed: {constraint_command!r} Error: {e}")
369
+ raise
370
+
359
371
  # Translate Neo4j schema queries to FalkorDB syntax
360
372
  query = self._translate_schema_query(query)
361
373
 
@@ -371,6 +383,56 @@ class FalkorDBSessionWrapper:
371
383
  error_logger(f"FalkorDB query failed: {query[:100]}... Error: {e}")
372
384
  raise
373
385
 
386
+ def _translate_constraint_command(self, query: str):
387
+ """
388
+ Translate Neo4j-style CREATE CONSTRAINT queries to GRAPH.CONSTRAINT CREATE.
389
+ FalkorDB 4.16.x expects this command path instead of GRAPH.QUERY.
390
+ """
391
+ q_upper = query.upper()
392
+ if "CREATE CONSTRAINT" not in q_upper:
393
+ return None
394
+
395
+ normalized = re.sub(r"\s+IF NOT EXISTS", "", query, flags=re.IGNORECASE)
396
+ normalized = re.sub(r"\s+", " ", normalized).strip()
397
+
398
+ entity_match = re.search(r"FOR\s*\((\w+):([^)]+)\)", normalized, flags=re.IGNORECASE)
399
+ if not entity_match:
400
+ return None
401
+ entity_type = "NODE"
402
+ label = entity_match.group(2).strip()
403
+
404
+ composite_match = re.search(
405
+ r"REQUIRE\s*\(([^)]+)\)\s*IS\s+UNIQUE",
406
+ normalized,
407
+ flags=re.IGNORECASE,
408
+ )
409
+ single_match = re.search(
410
+ r"REQUIRE\s+\w+\.([A-Za-z_][A-Za-z0-9_]*)\s+IS\s+UNIQUE",
411
+ normalized,
412
+ flags=re.IGNORECASE,
413
+ )
414
+
415
+ if composite_match:
416
+ props = [part.split(".")[-1].strip() for part in composite_match.group(1).split(",") if part.strip()]
417
+ constraint_type = "UNIQUE"
418
+ elif single_match:
419
+ props = [single_match.group(1).strip()]
420
+ constraint_type = "UNIQUE"
421
+ else:
422
+ return None
423
+
424
+ return [
425
+ "GRAPH.CONSTRAINT",
426
+ "CREATE",
427
+ self.graph.name,
428
+ constraint_type,
429
+ entity_type,
430
+ label,
431
+ "PROPERTIES",
432
+ len(props),
433
+ *props,
434
+ ]
435
+
374
436
  def _translate_schema_query(self, query: str) -> str:
375
437
  """Translate Neo4j schema queries to FalkorDB/RedisGraph syntax."""
376
438
  q_upper = query.upper()
@@ -379,26 +441,9 @@ class FalkorDBSessionWrapper:
379
441
  if "CREATE FULLTEXT INDEX" in q_upper:
380
442
  return "RETURN 1"
381
443
 
382
- # Handle Constraints
444
+ # Handle Constraints through GRAPH.CONSTRAINT in run()
383
445
  if "CREATE CONSTRAINT" in q_upper:
384
- # Remove "IF NOT EXISTS"
385
- query = re.sub(r'\s+IF NOT EXISTS', '', query, flags=re.IGNORECASE)
386
-
387
- # Handle composite keys: (n.p1, n.p2) -> downgrade to INDEX
388
- if "," in query:
389
- match_node = re.search(r'FOR\s+(\([^)]+\))', query, flags=re.IGNORECASE)
390
- match_props = re.search(r'REQUIRE\s+(\([^)]+\))\s+IS UNIQUE', query, flags=re.IGNORECASE)
391
-
392
- if match_node and match_props:
393
- return f"CREATE INDEX FOR {match_node.group(1)} ON {match_props.group(1)}"
394
-
395
- # Handle simple uniqueness: CREATE CONSTRAINT name FOR (n:Label) REQUIRE n.prop IS UNIQUE
396
- # TO: CREATE CONSTRAINT ON (n:Label) ASSERT n.prop IS UNIQUE
397
-
398
- # Remove constraint name
399
- query = re.sub(r'CREATE CONSTRAINT\s+\w+\s+', 'CREATE CONSTRAINT ', query, flags=re.IGNORECASE)
400
- query = re.sub(r'\s+FOR\s+', ' ON ', query, flags=re.IGNORECASE)
401
- query = re.sub(r'\s+REQUIRE\s+', ' ASSERT ', query, flags=re.IGNORECASE)
446
+ return "RETURN 1"
402
447
 
403
448
  # Handle Regular Indexes
404
449
  elif "CREATE INDEX" in q_upper:
@@ -99,7 +99,7 @@ class KuzuDBManager:
99
99
  # but we can wrap in try-except or check metadata.
100
100
 
101
101
  node_tables = [
102
- ("Repository", "path STRING, name STRING, is_dependency BOOLEAN, PRIMARY KEY (path)"),
102
+ ("Repository", "path STRING, name STRING, is_dependency BOOLEAN, indexed_at STRING, commit_hash STRING, PRIMARY KEY (path)"),
103
103
  ("File", "path STRING, name STRING, relative_path STRING, is_dependency BOOLEAN, PRIMARY KEY (path)"),
104
104
  ("Directory", "path STRING, name STRING, PRIMARY KEY (path)"),
105
105
  ("Module", "name STRING, lang STRING, full_import_name STRING, PRIMARY KEY (name)"),
@@ -164,6 +164,9 @@ class KuzuDBManager:
164
164
  ("Module", "full_import_name", "STRING"),
165
165
  ("IMPORTS", "full_import_name", "STRING"),
166
166
  ("IMPORTS", "imported_name", "STRING"),
167
+ # Freshness properties added to Repository in 0.4.6
168
+ ("Repository", "indexed_at", "STRING"),
169
+ ("Repository", "commit_hash", "STRING"),
167
170
  ]
168
171
 
169
172
  for table_name, column_name, column_type in migrations:
@@ -171,7 +174,7 @@ class KuzuDBManager:
171
174
  self._conn.execute(f"ALTER TABLE `{table_name}` ADD {column_name} {column_type}")
172
175
  except Exception as e:
173
176
  err = str(e).lower()
174
- if "already exists" in err or "duplicate" in err:
177
+ if "already exists" in err or "duplicate" in err or "already has property" in err:
175
178
  continue
176
179
  warning_logger(f"Kuzu Schema Migration Error ({table_name}.{column_name}): {e}")
177
180
  debug_log(f"Kuzu Schema Migration Error ({table_name}.{column_name}): {e}")
@@ -277,6 +280,32 @@ class KuzuSessionWrapper:
277
280
  err_str = str(e).lower()
278
281
  if "already exists" in err_str:
279
282
  return KuzuResultWrapper(None)
283
+
284
+ # Fallback for KuzuDB UNWIND bug (unordered_map::at)
285
+ if "unordered_map::at" in err_str and "UNWIND" in query:
286
+ unwind_m = re.search(r'UNWIND\s+\$(\w+)\s+AS\s+(\w+)', query)
287
+ if unwind_m:
288
+ batch_param = unwind_m.group(1)
289
+ row_var = unwind_m.group(2)
290
+ batch_data = parameters.get(batch_param)
291
+ if isinstance(batch_data, list):
292
+ loop_query = re.sub(r'UNWIND\s+\$\w+\s+AS\s+\w+', '', query, count=1)
293
+ # Find all row.prop usages and replace with $row_prop
294
+ props_used = set(re.findall(rf'{row_var}\.(\w+)', loop_query))
295
+ for p in props_used:
296
+ loop_query = loop_query.replace(f"{row_var}.{p}", f"${row_var}_{p}")
297
+
298
+ last_result = None
299
+ for item in batch_data:
300
+ loop_params = parameters.copy()
301
+ loop_params.pop(batch_param, None)
302
+ for p in props_used:
303
+ loop_params[f"{row_var}_{p}"] = item.get(p)
304
+ if "uid" in item:
305
+ loop_params[f"{row_var}_uid"] = item["uid"]
306
+ last_result = self.run(loop_query, **loop_params)
307
+ return last_result or KuzuResultWrapper(None)
308
+
280
309
  error_logger(f"Kuzu Query failed: {query[:100]}... Error: {e}")
281
310
  debug_log(f"Kuzu Query failed: {query[:100]}... Error: {e}")
282
311
  raise
@@ -0,0 +1,204 @@
1
+ # src/codegraphcontext/core/database_nornic.py
2
+ """
3
+ This module provides a thread-safe singleton manager for the Nornic DB connection.
4
+ Nornic DB is compatible with Neo4j APIs and drivers.
5
+ """
6
+ import os
7
+ import re
8
+ import threading
9
+ from typing import Optional, Tuple
10
+ from neo4j import GraphDatabase, Driver
11
+
12
+ from codegraphcontext.utils.debug_log import debug_log, info_logger, error_logger, warning_logger
13
+
14
+ class NornicDriverWrapper:
15
+ """
16
+ A simple wrapper around the Nornic (Neo4j) Driver to inject a database name into session() calls.
17
+ """
18
+ def __init__(self, driver: Driver, database: str = None):
19
+ self._driver = driver
20
+ self._database = database
21
+
22
+ def session(self, **kwargs):
23
+ """Proxy method to get a session from the underlying driver."""
24
+ if self._database and 'database' not in kwargs:
25
+ kwargs["database"] = self._database
26
+ return self._driver.session(**kwargs)
27
+
28
+ def close(self):
29
+ """Proxy method to close the underlying driver."""
30
+ self._driver.close()
31
+
32
+ class NornicDBManager:
33
+ """
34
+ Manages the Nornic database driver as a singleton to ensure only one
35
+ connection pool is created and shared across the application.
36
+ """
37
+ _instance = None
38
+ _driver: Optional[Driver] = None
39
+ _lock = threading.Lock()
40
+
41
+ def __new__(cls):
42
+ """Standard singleton pattern implementation."""
43
+ if cls._instance is None:
44
+ with cls._lock:
45
+ if cls._instance is None:
46
+ cls._instance = super(NornicDBManager, cls).__new__(cls)
47
+ return cls._instance
48
+
49
+ def __init__(self):
50
+ """
51
+ Initializes the manager by reading credentials from environment variables.
52
+ The `_initialized` flag prevents re-initialization on subsequent calls.
53
+ """
54
+ if hasattr(self, '_initialized'):
55
+ return
56
+
57
+ self.nornic_uri = os.getenv('NORNIC_URI')
58
+ self.nornic_username = os.getenv('NORNIC_USERNAME', 'nornic')
59
+ self.nornic_password = os.getenv('NORNIC_PASSWORD')
60
+ self.nornic_database = os.getenv('NORNIC_DATABASE')
61
+ self._initialized = True
62
+
63
+ def get_driver(self) -> Driver:
64
+ """
65
+ Gets the Nornic driver instance, creating it if it doesn't exist.
66
+ This method is thread-safe.
67
+
68
+ Returns:
69
+ The a wrapper for Nornic Driver instance.
70
+ """
71
+ if self._driver is None:
72
+ with self._lock:
73
+ if self._driver is None:
74
+ if not all([self.nornic_uri, self.nornic_username, self.nornic_password]):
75
+ raise ValueError(
76
+ "Nornic credentials must be set via environment variables:\n"
77
+ "- NORNIC_URI\n"
78
+ "- NORNIC_USERNAME\n"
79
+ "- NORNIC_PASSWORD"
80
+ )
81
+
82
+ is_valid, validation_error = self.validate_config(
83
+ self.nornic_uri,
84
+ self.nornic_username,
85
+ self.nornic_password
86
+ )
87
+
88
+ if not is_valid:
89
+ error_logger(f"Nornic configuration validation failed: {validation_error}")
90
+ raise ValueError(validation_error)
91
+
92
+ info_logger(f"Creating Nornic driver connection to {self.nornic_uri}")
93
+ self._driver = GraphDatabase.driver(
94
+ self.nornic_uri,
95
+ auth=(self.nornic_username, self.nornic_password)
96
+ )
97
+ try:
98
+ with self._driver.session() as session:
99
+ session.run("RETURN 1").consume()
100
+ info_logger("Nornic connection established successfully")
101
+ except Exception as e:
102
+ _, detailed_error = self.test_connection(
103
+ self.nornic_uri,
104
+ self.nornic_username,
105
+ self.nornic_password
106
+ )
107
+ error_logger(f"Failed to connect to Nornic: {e}")
108
+ if self._driver:
109
+ self._driver.close()
110
+ self._driver = None
111
+ raise
112
+ return NornicDriverWrapper(self._driver, database=self.nornic_database)
113
+
114
+ def close_driver(self):
115
+ """Closes the Nornic driver connection if it exists."""
116
+ if self._driver is not None:
117
+ with self._lock:
118
+ if self._driver is not None:
119
+ info_logger("Closing Nornic driver")
120
+ self._driver.close()
121
+ self._driver = None
122
+
123
+ def is_connected(self) -> bool:
124
+ """Checks if the database connection is currently active."""
125
+ if self._driver is None:
126
+ return False
127
+ try:
128
+ session_kwargs = {}
129
+ if self.nornic_database:
130
+ session_kwargs['database'] = self.nornic_database
131
+ with self._driver.session(**session_kwargs) as session:
132
+ session.run("RETURN 1").consume()
133
+ return True
134
+ except Exception:
135
+ return False
136
+
137
+ def get_backend_type(self) -> str:
138
+ """Returns the database backend type."""
139
+ return 'nornic'
140
+
141
+ @staticmethod
142
+ def validate_config(uri: str, username: str, password: str) -> Tuple[bool, Optional[str]]:
143
+ """
144
+ Validates Nornic configuration parameters.
145
+ """
146
+ # Nornic likely uses similar URI formats to Neo4j/Bolt
147
+ uri_pattern = r'^(nornic|nornic\+s|nornic\+ssc|bolt|bolt\+s|bolt\+ssc|neo4j|neo4j\+s|neo4j\+ssc)://[^:]+(:\d+)?$'
148
+ if not re.match(uri_pattern, uri):
149
+ return False, (
150
+ "Invalid Nornic URI format.\n"
151
+ "Expected format: nornic://host:port or bolt://host:port\n"
152
+ "Example: nornic://localhost:7687"
153
+ )
154
+
155
+ if not username or len(username.strip()) == 0:
156
+ return False, "Username cannot be empty."
157
+
158
+ if not password or len(password.strip()) == 0:
159
+ return False, "Password cannot be empty."
160
+
161
+ return True, None
162
+
163
+ @staticmethod
164
+ def test_connection(uri: str, username: str, password: str, database: str=None) -> Tuple[bool, Optional[str]]:
165
+ """
166
+ Tests the Nornic database connection.
167
+ """
168
+ try:
169
+ from neo4j import GraphDatabase
170
+ import socket
171
+
172
+ try:
173
+ host_port = uri.split('://')[1]
174
+ if ':' in host_port:
175
+ host = host_port.split(':')[0]
176
+ port = int(host_port.split(':')[1])
177
+ else:
178
+ host = host_port
179
+ port = 7687
180
+
181
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
182
+ sock.settimeout(5)
183
+ result = sock.connect_ex((host, port))
184
+ sock.close()
185
+
186
+ if result != 0:
187
+ return False, f"Cannot reach Nornic server at {host}:{port}"
188
+ except Exception as e:
189
+ return False, f"Error parsing URI or checking connectivity: {str(e)}"
190
+
191
+ driver = GraphDatabase.driver(uri, auth=(username, password))
192
+
193
+ session_kwargs = {}
194
+ if database:
195
+ session_kwargs['database'] = database
196
+ with driver.session(**session_kwargs) as session:
197
+ result = session.run("RETURN 'Connection successful' as status")
198
+ result.single()
199
+
200
+ driver.close()
201
+ return True, None
202
+
203
+ except Exception as e:
204
+ return False, f"Connection failed: {str(e)}"