noesium 0.1.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. noesium/agents/askura_agent/__init__.py +22 -0
  2. noesium/agents/askura_agent/askura_agent.py +480 -0
  3. noesium/agents/askura_agent/conversation.py +164 -0
  4. noesium/agents/askura_agent/extractor.py +175 -0
  5. noesium/agents/askura_agent/memory.py +14 -0
  6. noesium/agents/askura_agent/models.py +239 -0
  7. noesium/agents/askura_agent/prompts.py +202 -0
  8. noesium/agents/askura_agent/reflection.py +234 -0
  9. noesium/agents/askura_agent/summarizer.py +30 -0
  10. noesium/agents/askura_agent/utils.py +6 -0
  11. noesium/agents/deep_research/__init__.py +13 -0
  12. noesium/agents/deep_research/agent.py +398 -0
  13. noesium/agents/deep_research/prompts.py +84 -0
  14. noesium/agents/deep_research/schemas.py +42 -0
  15. noesium/agents/deep_research/state.py +54 -0
  16. noesium/agents/search/__init__.py +5 -0
  17. noesium/agents/search/agent.py +474 -0
  18. noesium/agents/search/state.py +28 -0
  19. noesium/core/__init__.py +1 -1
  20. noesium/core/agent/base.py +10 -2
  21. noesium/core/goalith/decomposer/llm_decomposer.py +1 -1
  22. noesium/core/llm/__init__.py +1 -1
  23. noesium/core/llm/base.py +2 -2
  24. noesium/core/llm/litellm.py +42 -21
  25. noesium/core/llm/llamacpp.py +25 -4
  26. noesium/core/llm/ollama.py +43 -22
  27. noesium/core/llm/openai.py +25 -5
  28. noesium/core/llm/openrouter.py +1 -1
  29. noesium/core/toolify/base.py +9 -2
  30. noesium/core/toolify/config.py +2 -2
  31. noesium/core/toolify/registry.py +21 -5
  32. noesium/core/tracing/opik_tracing.py +7 -7
  33. noesium/core/vector_store/__init__.py +2 -2
  34. noesium/core/vector_store/base.py +1 -1
  35. noesium/core/vector_store/pgvector.py +10 -13
  36. noesium/core/vector_store/weaviate.py +2 -1
  37. noesium/toolkits/__init__.py +1 -0
  38. noesium/toolkits/arxiv_toolkit.py +310 -0
  39. noesium/toolkits/audio_aliyun_toolkit.py +441 -0
  40. noesium/toolkits/audio_toolkit.py +370 -0
  41. noesium/toolkits/bash_toolkit.py +332 -0
  42. noesium/toolkits/document_toolkit.py +454 -0
  43. noesium/toolkits/file_edit_toolkit.py +552 -0
  44. noesium/toolkits/github_toolkit.py +395 -0
  45. noesium/toolkits/gmail_toolkit.py +575 -0
  46. noesium/toolkits/image_toolkit.py +425 -0
  47. noesium/toolkits/memory_toolkit.py +398 -0
  48. noesium/toolkits/python_executor_toolkit.py +334 -0
  49. noesium/toolkits/search_toolkit.py +451 -0
  50. noesium/toolkits/serper_toolkit.py +623 -0
  51. noesium/toolkits/tabular_data_toolkit.py +537 -0
  52. noesium/toolkits/user_interaction_toolkit.py +365 -0
  53. noesium/toolkits/video_toolkit.py +168 -0
  54. noesium/toolkits/wikipedia_toolkit.py +420 -0
  55. noesium-0.2.1.dist-info/METADATA +253 -0
  56. {noesium-0.1.0.dist-info → noesium-0.2.1.dist-info}/RECORD +59 -23
  57. {noesium-0.1.0.dist-info → noesium-0.2.1.dist-info}/licenses/LICENSE +1 -1
  58. noesium-0.1.0.dist-info/METADATA +0 -525
  59. {noesium-0.1.0.dist-info → noesium-0.2.1.dist-info}/WHEEL +0 -0
  60. {noesium-0.1.0.dist-info → noesium-0.2.1.dist-info}/top_level.txt +0 -0
noesium/core/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  from noesium.core.utils.logging import setup_logging
2
2
 
3
- # Enable colorful logging by default for cogents
3
+ # Enable colorful logging by default for noesium
4
4
  setup_logging(level="INFO", enable_colors=True)
@@ -3,10 +3,18 @@ from abc import ABC, abstractmethod
3
3
  from datetime import datetime
4
4
  from typing import Any, Dict, List, Optional, Type
5
5
 
6
- from langchain_core.runnables import RunnableConfig
7
- from langgraph.graph import StateGraph
8
6
  from pydantic import BaseModel, Field
9
7
 
8
+ try:
9
+ from langchain_core.runnables import RunnableConfig
10
+ from langgraph.graph import StateGraph
11
+
12
+ LANGCHAIN_AVAILABLE = True
13
+ except ImportError:
14
+ RunnableConfig = None
15
+ StateGraph = None
16
+ LANGCHAIN_AVAILABLE = False
17
+
10
18
  from noesium.core.llm import get_llm_client
11
19
  from noesium.core.tracing import get_token_tracker
12
20
  from noesium.core.utils.logging import get_logger
@@ -77,7 +77,7 @@ class LLMDecomposer(GoalDecomposer):
77
77
 
78
78
  def __init__(
79
79
  self,
80
- provider: str = os.getenv("COGENTS_LLM_PROVIDER", "openrouter"),
80
+ provider: str = os.getenv("NOESIUM_LLM_PROVIDER", "openrouter"),
81
81
  model_name: Optional[str] = None,
82
82
  temperature: float = 0.3,
83
83
  max_tokens: int = 2000,
@@ -55,7 +55,7 @@ __all__ = [
55
55
 
56
56
 
57
57
  def get_llm_client(
58
- provider: str = os.getenv("COGENTS_LLM_PROVIDER", "openai"),
58
+ provider: str = os.getenv("NOESIUM_LLM_PROVIDER", "openai"),
59
59
  base_url: Optional[str] = None,
60
60
  api_key: Optional[str] = None,
61
61
  structured_output: bool = True,
noesium/core/llm/base.py CHANGED
@@ -18,7 +18,7 @@ class BaseLLMClient(ABC):
18
18
  Args:
19
19
  **kwargs: Additional arguments to pass to the LLM client
20
20
  """
21
- self.debug = os.getenv("COGENTS_DEBUG", "false").lower() == "true"
21
+ self.debug = os.getenv("NOESIUM_DEBUG", "false").lower() == "true"
22
22
 
23
23
  @abstractmethod
24
24
  def completion(
@@ -149,4 +149,4 @@ class BaseLLMClient(ABC):
149
149
  Returns:
150
150
  int: Expected embedding dimensions
151
151
  """
152
- return int(os.getenv("COGENTS_EMBEDDING_DIMS", str(DEFAULT_EMBEDDING_DIMS)))
152
+ return int(os.getenv("NOESIUM_EMBEDDING_DIMS", str(DEFAULT_EMBEDDING_DIMS)))
@@ -1,5 +1,5 @@
1
1
  """
2
- LiteLLM provider for Cogents.
2
+ LiteLLM provider for Noesium.
3
3
 
4
4
  This module provides:
5
5
  - Unified interface to multiple LLM providers via LiteLLM
@@ -16,8 +16,23 @@ import time
16
16
  from pathlib import Path
17
17
  from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union
18
18
 
19
- import litellm
20
- from instructor import Instructor, Mode, patch
19
+ try:
20
+ import litellm
21
+
22
+ LITELLM_AVAILABLE = True
23
+ except ImportError:
24
+ litellm = None
25
+ LITELLM_AVAILABLE = False
26
+
27
+ try:
28
+ from instructor import Instructor, Mode, patch
29
+
30
+ INSTRUCTOR_AVAILABLE = True
31
+ except ImportError:
32
+ Instructor = None
33
+ Mode = None
34
+ patch = None
35
+ INSTRUCTOR_AVAILABLE = False
21
36
 
22
37
  from noesium.core.llm.base import BaseLLMClient
23
38
  from noesium.core.tracing import configure_opik, estimate_token_usage, get_token_tracker, is_opik_enabled
@@ -26,7 +41,7 @@ from noesium.core.utils.logging import get_logger
26
41
  # Only import OPIK if tracing is enabled
27
42
  OPIK_AVAILABLE = False
28
43
  track = lambda func: func # Default no-op decorator
29
- if os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true":
44
+ if os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true":
30
45
  try:
31
46
  from opik import track
32
47
 
@@ -64,6 +79,9 @@ class LLMClient(BaseLLMClient):
64
79
  vision_model: Model to use for vision tasks (e.g., "gpt-4-vision-preview", "claude-3-sonnet")
65
80
  **kwargs: Additional arguments
66
81
  """
82
+ if not LITELLM_AVAILABLE:
83
+ raise ImportError("LiteLLM package is not installed. Install it with: pip install 'noesium[litellm]'")
84
+
67
85
  super().__init__(**kwargs)
68
86
  # Configure Opik tracing for observability only if enabled
69
87
  if OPIK_AVAILABLE:
@@ -90,22 +108,25 @@ class LLMClient(BaseLLMClient):
90
108
  # Initialize instructor if requested
91
109
  self.instructor = None
92
110
  if instructor:
93
- try:
94
- from openai import OpenAI
111
+ if not INSTRUCTOR_AVAILABLE:
112
+ logger.warning("Instructor package not available, structured completion will not work")
113
+ else:
114
+ try:
115
+ from openai import OpenAI
95
116
 
96
- # Create a mock client for instructor
97
- mock_client = OpenAI(
98
- api_key="litellm",
99
- base_url="http://localhost:8000", # LiteLLM proxy default
100
- )
101
- patched_client = patch(mock_client, mode=Mode.JSON)
102
- self.instructor = Instructor(
103
- client=patched_client,
104
- create=patched_client.chat.completions.create,
105
- mode=Mode.JSON,
106
- )
107
- except ImportError:
108
- logger.warning("OpenAI package not available, structured completion will not work")
117
+ # Create a mock client for instructor
118
+ mock_client = OpenAI(
119
+ api_key="litellm",
120
+ base_url="http://localhost:8000", # LiteLLM proxy default
121
+ )
122
+ patched_client = patch(mock_client, mode=Mode.JSON)
123
+ self.instructor = Instructor(
124
+ client=patched_client,
125
+ create=patched_client.chat.completions.create,
126
+ mode=Mode.JSON,
127
+ )
128
+ except ImportError:
129
+ logger.warning("OpenAI package not available, structured completion will not work")
109
130
 
110
131
  # Configure LiteLLM settings
111
132
  litellm.drop_params = True # Drop unsupported parameters
@@ -438,7 +459,7 @@ class LLMClient(BaseLLMClient):
438
459
  if len(embedding) != expected_dims:
439
460
  logger.warning(
440
461
  f"Embedding has {len(embedding)} dimensions, expected {expected_dims}. "
441
- f"Consider setting COGENTS_EMBEDDING_DIMS={len(embedding)} or "
462
+ f"Consider setting NOESIUM_EMBEDDING_DIMS={len(embedding)} or "
442
463
  f"using a different embedding model."
443
464
  )
444
465
 
@@ -473,7 +494,7 @@ class LLMClient(BaseLLMClient):
473
494
  if len(embedding) != expected_dims:
474
495
  logger.warning(
475
496
  f"Embedding at index {i} has {len(embedding)} dimensions, expected {expected_dims}. "
476
- f"Consider setting COGENTS_EMBEDDING_DIMS={len(embedding)} or "
497
+ f"Consider setting NOESIUM_EMBEDDING_DIMS={len(embedding)} or "
477
498
  f"using a different embedding model."
478
499
  )
479
500
 
@@ -3,8 +3,21 @@ import os
3
3
  from pathlib import Path
4
4
  from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union
5
5
 
6
- from huggingface_hub import snapshot_download
7
- from llama_cpp import Llama
6
+ try:
7
+ from huggingface_hub import snapshot_download
8
+
9
+ HUGGINGFACE_HUB_AVAILABLE = True
10
+ except ImportError:
11
+ snapshot_download = None
12
+ HUGGINGFACE_HUB_AVAILABLE = False
13
+
14
+ try:
15
+ from llama_cpp import Llama
16
+
17
+ LLAMA_CPP_AVAILABLE = True
18
+ except ImportError:
19
+ Llama = None
20
+ LLAMA_CPP_AVAILABLE = False
8
21
 
9
22
  from noesium.core.llm.base import BaseLLMClient
10
23
  from noesium.core.tracing import estimate_token_usage, get_token_tracker
@@ -14,7 +27,7 @@ from noesium.core.utils.logging import get_logger
14
27
  # Only import OPIK if tracing is enabled
15
28
  OPIK_AVAILABLE = False
16
29
  track = lambda func: func # Default no-op decorator
17
- if os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true":
30
+ if os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true":
18
31
  try:
19
32
  from opik import track
20
33
 
@@ -39,6 +52,9 @@ def _download_default_model() -> str:
39
52
  Returns:
40
53
  Path to the downloaded model file
41
54
  """
55
+ if not HUGGINGFACE_HUB_AVAILABLE:
56
+ raise ImportError("huggingface-hub package is not installed. Install it with: pip install 'noesium[local-llm]'")
57
+
42
58
  try:
43
59
  logger.info(f"No model path provided, downloading default model: {DEFAULT_MODEL_REPO}")
44
60
 
@@ -101,6 +117,11 @@ class LLMClient(BaseLLMClient):
101
117
  n_gpu_layers: Number of layers to offload to GPU (-1 for all)
102
118
  **kwargs: Additional arguments to pass to Llama constructor
103
119
  """
120
+ if not LLAMA_CPP_AVAILABLE:
121
+ raise ImportError(
122
+ "llama-cpp-python package is not installed. Install it with: pip install 'noesium[local-llm]'"
123
+ )
124
+
104
125
  super().__init__(**kwargs)
105
126
  # Configure Opik tracing for observability only if enabled
106
127
  if OPIK_AVAILABLE:
@@ -411,7 +432,7 @@ class LLMClient(BaseLLMClient):
411
432
  if len(embedding_vector) != expected_dims:
412
433
  logger.warning(
413
434
  f"Embedding has {len(embedding_vector)} dimensions, expected {expected_dims}. "
414
- f"Consider setting COGENTS_EMBEDDING_DIMS={len(embedding_vector)} or "
435
+ f"Consider setting NOESIUM_EMBEDDING_DIMS={len(embedding_vector)} or "
415
436
  f"using a different embedding model."
416
437
  )
417
438
 
@@ -1,5 +1,5 @@
1
1
  """
2
- Ollama LLM provider for Cogents.
2
+ Ollama LLM provider for Noesium.
3
3
 
4
4
  This module provides:
5
5
  - Chat completion using Ollama models
@@ -13,8 +13,23 @@ import time
13
13
  from pathlib import Path
14
14
  from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union
15
15
 
16
- import ollama
17
- from instructor import Instructor, Mode, patch
16
+ try:
17
+ import ollama
18
+
19
+ OLLAMA_AVAILABLE = True
20
+ except ImportError:
21
+ ollama = None
22
+ OLLAMA_AVAILABLE = False
23
+
24
+ try:
25
+ from instructor import Instructor, Mode, patch
26
+
27
+ INSTRUCTOR_AVAILABLE = True
28
+ except ImportError:
29
+ Instructor = None
30
+ Mode = None
31
+ patch = None
32
+ INSTRUCTOR_AVAILABLE = False
18
33
 
19
34
  from noesium.core.llm.base import BaseLLMClient
20
35
  from noesium.core.tracing import estimate_token_usage, get_token_tracker
@@ -24,7 +39,7 @@ from noesium.core.utils.logging import get_logger
24
39
  # Only import OPIK if tracing is enabled
25
40
  OPIK_AVAILABLE = False
26
41
  track = lambda func: func # Default no-op decorator
27
- if os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true":
42
+ if os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true":
28
43
  try:
29
44
  from opik import track
30
45
 
@@ -62,6 +77,9 @@ class LLMClient(BaseLLMClient):
62
77
  vision_model: Model to use for vision tasks (defaults to gemma3:4b)
63
78
  **kwargs: Additional arguments
64
79
  """
80
+ if not OLLAMA_AVAILABLE:
81
+ raise ImportError("Ollama package is not installed. Install it with: pip install 'noesium[local-llm]'")
82
+
65
83
  super().__init__(**kwargs)
66
84
  # Configure Opik tracing for observability only if enabled
67
85
  if OPIK_AVAILABLE:
@@ -84,23 +102,26 @@ class LLMClient(BaseLLMClient):
84
102
  # Initialize instructor if requested
85
103
  self.instructor = None
86
104
  if instructor:
87
- # Create a mock OpenAI-compatible client for instructor
88
- try:
89
- from openai import OpenAI
90
-
91
- # Create a mock client that uses Ollama through OpenAI-compatible API
92
- mock_client = OpenAI(
93
- base_url=f"{self.base_url}/v1",
94
- api_key="ollama", # Ollama doesn't require real API key
95
- )
96
- patched_client = patch(mock_client, mode=Mode.JSON)
97
- self.instructor = Instructor(
98
- client=patched_client,
99
- create=patched_client.chat.completions.create,
100
- mode=Mode.JSON,
101
- )
102
- except ImportError:
103
- logger.warning("OpenAI package not available, structured completion will not work")
105
+ if not INSTRUCTOR_AVAILABLE:
106
+ logger.warning("Instructor package not available, structured completion will not work")
107
+ else:
108
+ # Create a mock OpenAI-compatible client for instructor
109
+ try:
110
+ from openai import OpenAI
111
+
112
+ # Create a mock client that uses Ollama through OpenAI-compatible API
113
+ mock_client = OpenAI(
114
+ base_url=f"{self.base_url}/v1",
115
+ api_key="ollama", # Ollama doesn't require real API key
116
+ )
117
+ patched_client = patch(mock_client, mode=Mode.JSON)
118
+ self.instructor = Instructor(
119
+ client=patched_client,
120
+ create=patched_client.chat.completions.create,
121
+ mode=Mode.JSON,
122
+ )
123
+ except ImportError:
124
+ logger.warning("OpenAI package not available, structured completion will not work")
104
125
 
105
126
  @track
106
127
  def completion(
@@ -394,7 +415,7 @@ class LLMClient(BaseLLMClient):
394
415
  if len(embedding) != expected_dims:
395
416
  logger.warning(
396
417
  f"Embedding has {len(embedding)} dimensions, expected {expected_dims}. "
397
- f"Consider setting COGENTS_EMBEDDING_DIMS={len(embedding)} or "
418
+ f"Consider setting NOESIUM_EMBEDDING_DIMS={len(embedding)} or "
398
419
  f"using a different embedding model."
399
420
  )
400
421
 
@@ -16,8 +16,23 @@ from pathlib import Path
16
16
  from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union
17
17
 
18
18
  # Import instructor for structured output
19
- from instructor import Instructor, Mode, patch
20
- from openai import OpenAI
19
+ try:
20
+ from instructor import Instructor, Mode, patch
21
+
22
+ INSTRUCTOR_AVAILABLE = True
23
+ except ImportError:
24
+ Instructor = None
25
+ Mode = None
26
+ patch = None
27
+ INSTRUCTOR_AVAILABLE = False
28
+
29
+ try:
30
+ from openai import OpenAI
31
+
32
+ OPENAI_AVAILABLE = True
33
+ except ImportError:
34
+ OpenAI = None
35
+ OPENAI_AVAILABLE = False
21
36
 
22
37
  from noesium.core.llm.base import BaseLLMClient
23
38
  from noesium.core.tracing import (
@@ -33,7 +48,7 @@ from noesium.core.utils.logging import get_logger
33
48
  OPIK_AVAILABLE = False
34
49
  track = lambda func: func # Default no-op decorator
35
50
  track_openai = lambda client: client # Default no-op function
36
- if os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true":
51
+ if os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true":
37
52
  try:
38
53
  from opik import track
39
54
  from opik.integrations.openai import track_openai
@@ -73,6 +88,9 @@ class LLMClient(BaseLLMClient):
73
88
  embed_model: Model to use for embeddings (defaults to text-embedding-3-small)
74
89
  **kwargs: Additional arguments to pass to the LLM client
75
90
  """
91
+ if not OPENAI_AVAILABLE:
92
+ raise ImportError("OpenAI package is not installed. Install it with: pip install 'noesium[openai]'")
93
+
76
94
  super().__init__(**kwargs)
77
95
  # Configure Opik tracing for observability only if enabled
78
96
  if OPIK_AVAILABLE:
@@ -109,6 +127,8 @@ class LLMClient(BaseLLMClient):
109
127
  # Initialize instructor if requested
110
128
  self.instructor = None
111
129
  if instructor:
130
+ if not INSTRUCTOR_AVAILABLE:
131
+ raise ImportError("Instructor package is not installed. Install it with: pip install 'noesium[openai]'")
112
132
  # Create instructor instance for structured output
113
133
  patched_client = patch(self.client, mode=Mode.JSON)
114
134
  self.instructor = Instructor(
@@ -412,7 +432,7 @@ class LLMClient(BaseLLMClient):
412
432
  if len(embedding) != expected_dims:
413
433
  logger.warning(
414
434
  f"Embedding has {len(embedding)} dimensions, expected {expected_dims}. "
415
- f"Consider setting COGENTS_EMBEDDING_DIMS={len(embedding)} or "
435
+ f"Consider setting NOESIUM_EMBEDDING_DIMS={len(embedding)} or "
416
436
  f"using a different embedding model."
417
437
  )
418
438
 
@@ -465,7 +485,7 @@ class LLMClient(BaseLLMClient):
465
485
  if len(embedding) != expected_dims:
466
486
  logger.warning(
467
487
  f"Embedding at index {i} has {len(embedding)} dimensions, expected {expected_dims}. "
468
- f"Consider setting COGENTS_EMBEDDING_DIMS={len(embedding)} or "
488
+ f"Consider setting NOESIUM_EMBEDDING_DIMS={len(embedding)} or "
469
489
  f"using a different embedding model."
470
490
  )
471
491
 
@@ -20,7 +20,7 @@ from noesium.core.utils.logging import get_logger
20
20
  # Only import OPIK if tracing is enabled
21
21
  OPIK_AVAILABLE = False
22
22
  track = lambda func: func # Default no-op decorator
23
- if os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true":
23
+ if os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true":
24
24
  try:
25
25
  pass
26
26
 
@@ -9,13 +9,20 @@ import abc
9
9
  import asyncio
10
10
  from typing import Any, Callable, Dict, List, Optional, Union
11
11
 
12
- from langchain_core.tools import BaseTool, tool
13
-
14
12
  from noesium.core.llm import BaseLLMClient
15
13
  from noesium.core.utils.logging import get_logger
16
14
 
17
15
  from .config import ToolkitConfig
18
16
 
17
+ try:
18
+ from langchain_core.tools import BaseTool, tool
19
+
20
+ LANGCHAIN_AVAILABLE = True
21
+ except ImportError:
22
+ BaseTool = None
23
+ tool = None
24
+ LANGCHAIN_AVAILABLE = False
25
+
19
26
  try:
20
27
  import mcp.types as mcp_types
21
28
 
@@ -32,7 +32,7 @@ class ToolkitConfig(BaseModel):
32
32
  """Toolkit-specific configuration parameters"""
33
33
 
34
34
  # LLM Integration
35
- llm_provider: str = Field(default_factory=lambda: os.getenv("COGENTS_LLM_PROVIDER", "openai"))
35
+ llm_provider: str = Field(default_factory=lambda: os.getenv("NOESIUM_LLM_PROVIDER", "openai"))
36
36
  """LLM provider to use (openrouter, openai, ollama, llamacpp, litellm)"""
37
37
 
38
38
  llm_model: Optional[str] = None
@@ -55,7 +55,7 @@ class ToolkitConfig(BaseModel):
55
55
  log_level: str = Field(default_factory=lambda: os.getenv("LOG_LEVEL", "INFO"))
56
56
  """Logging level for this toolkit"""
57
57
 
58
- enable_tracing: bool = Field(default_factory=lambda: os.getenv("COGENTS_ENABLE_TRACING", "false").lower() == "true")
58
+ enable_tracing: bool = Field(default_factory=lambda: os.getenv("NOESIUM_ENABLE_TRACING", "false").lower() == "true")
59
59
  """Enable detailed tracing for debugging"""
60
60
 
61
61
  class Config:
@@ -188,10 +188,21 @@ def _discover_builtin_toolkits():
188
188
  import pkgutil
189
189
  from pathlib import Path
190
190
 
191
- # Get the toolkits directory
192
- toolkits_dir = Path(__file__).parent / "toolkits"
193
-
194
- if not toolkits_dir.exists():
191
+ # Get the toolkits directory - look in noesium.toolkits
192
+ # Try multiple possible locations
193
+ possible_toolkits_dirs = [
194
+ Path(__file__).parent.parent.parent / "toolkits", # noesium/toolkits
195
+ Path(__file__).parent / "toolkits", # noesium/core/toolify/toolkits
196
+ ]
197
+
198
+ toolkits_dir = None
199
+ for dir_path in possible_toolkits_dirs:
200
+ if dir_path.exists() and (dir_path / "__init__.py").exists():
201
+ toolkits_dir = dir_path
202
+ break
203
+
204
+ if not toolkits_dir:
205
+ logger.warning("No toolkits directory found")
195
206
  return
196
207
 
197
208
  # Import all toolkit modules
@@ -199,8 +210,13 @@ def _discover_builtin_toolkits():
199
210
  if module_info.name.startswith("_"):
200
211
  continue
201
212
 
202
- try:
213
+ # Determine the correct module name based on the location
214
+ if "core/toolify/toolkits" in str(toolkits_dir):
203
215
  module_name = f"noesium.core.toolify.toolkits.{module_info.name}"
216
+ else:
217
+ module_name = f"noesium.toolkits.{module_info.name}"
218
+
219
+ try:
204
220
  importlib.import_module(module_name)
205
221
  logger.debug(f"Discovered toolkit module: {module_name}")
206
222
  except ImportError as e:
@@ -17,7 +17,7 @@ def configure_opik() -> bool:
17
17
  Configure Opik tracing based on environment variables.
18
18
 
19
19
  Environment variables:
20
- COGENTS_OPIK_TRACING: Global toggle for Opik tracing (default: false)
20
+ NOESIUM_OPIK_TRACING: Global toggle for Opik tracing (default: false)
21
21
  OPIK_USE_LOCAL: Use local Opik deployment (default: true)
22
22
  OPIK_LOCAL_URL: Local Opik URL (default: http://localhost:5173)
23
23
  OPIK_API_KEY: API key for Comet ML/Opik (only needed for cloud)
@@ -31,10 +31,10 @@ def configure_opik() -> bool:
31
31
  """
32
32
  try:
33
33
  # Check global Noesium Opik tracing toggle first
34
- cogents_opik_enabled = os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true"
34
+ noesium_opik_enabled = os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true"
35
35
 
36
- if not cogents_opik_enabled:
37
- logger.debug("Opik tracing disabled via COGENTS_OPIK_TRACING=false")
36
+ if not noesium_opik_enabled:
37
+ logger.debug("Opik tracing disabled via NOESIUM_OPIK_TRACING=false")
38
38
  return False
39
39
 
40
40
  # Check if using local deployment
@@ -43,7 +43,7 @@ def configure_opik() -> bool:
43
43
  # Configuration variables
44
44
  opik_api_key = os.getenv("OPIK_API_KEY")
45
45
  os.getenv("OPIK_WORKSPACE")
46
- opik_project = os.getenv("OPIK_PROJECT_NAME", "cogents-llm")
46
+ opik_project = os.getenv("OPIK_PROJECT_NAME", "noesium-llm")
47
47
  opik_tracing = os.getenv("OPIK_TRACING", "true").lower() == "true"
48
48
 
49
49
  if not opik_tracing:
@@ -88,9 +88,9 @@ def is_opik_enabled() -> bool:
88
88
  """
89
89
  try:
90
90
  # Check global Noesium Opik tracing toggle first
91
- cogents_opik_enabled = os.getenv("COGENTS_OPIK_TRACING", "false").lower() == "true"
91
+ noesium_opik_enabled = os.getenv("NOESIUM_OPIK_TRACING", "false").lower() == "true"
92
92
 
93
- if not cogents_opik_enabled:
93
+ if not noesium_opik_enabled:
94
94
  return False
95
95
 
96
96
  import opik
@@ -33,9 +33,9 @@ __all__ = [
33
33
 
34
34
 
35
35
  def get_vector_store(
36
- provider: str = os.getenv("COGENTS_VECTOR_STORE_PROVIDER", "weaviate"),
36
+ provider: str = os.getenv("NOESIUM_VECTOR_STORE_PROVIDER", "weaviate"),
37
37
  collection_name: str = "default_collection",
38
- embedding_model_dims: int = int(os.getenv("COGENTS_EMBEDDING_DIMS", "768")),
38
+ embedding_model_dims: int = int(os.getenv("NOESIUM_EMBEDDING_DIMS", "768")),
39
39
  **kwargs,
40
40
  ):
41
41
  """
@@ -37,7 +37,7 @@ class BaseVectorStore(ABC):
37
37
  raise ValueError(
38
38
  f"Vector at index {i} has {len(vector)} dimensions, "
39
39
  f"expected {self.embedding_model_dims}. "
40
- f"Check that your embedding model matches COGENTS_EMBEDDING_DIMS={self.embedding_model_dims}"
40
+ f"Check that your embedding model matches NOESIUM_EMBEDDING_DIMS={self.embedding_model_dims}"
41
41
  )
42
42
 
43
43
  @abstractmethod
@@ -6,7 +6,10 @@ try:
6
6
  import psycopg2
7
7
  from psycopg2.extras import execute_values
8
8
  except ImportError:
9
- raise ImportError("The 'psycopg2' library is required. Please install it using 'pip install psycopg2'.")
9
+ raise ImportError(
10
+ "The 'psycopg2' library is required for PGVector support. "
11
+ "Install it with 'pip install noesium[postgres]' or 'pip install psycopg2-binary'."
12
+ )
10
13
 
11
14
  from .base import BaseVectorStore, OutputData
12
15
 
@@ -63,36 +66,30 @@ class PGVectorStore(BaseVectorStore):
63
66
  distance (str): Distance metric (not used in PGVector, for compatibility).
64
67
  """
65
68
  self.cur.execute("CREATE EXTENSION IF NOT EXISTS vector")
66
- self.cur.execute(
67
- f"""
69
+ self.cur.execute(f"""
68
70
  CREATE TABLE IF NOT EXISTS {self.collection_name} (
69
71
  id UUID PRIMARY KEY,
70
72
  vector vector({vector_size}),
71
73
  payload JSONB
72
74
  );
73
- """
74
- )
75
+ """)
75
76
 
76
77
  if self.use_diskann and vector_size < 2000:
77
78
  # Check if vectorscale extension is installed
78
79
  self.cur.execute("SELECT * FROM pg_extension WHERE extname = 'vectorscale'")
79
80
  if self.cur.fetchone():
80
81
  # Create DiskANN index if extension is installed for faster search
81
- self.cur.execute(
82
- f"""
82
+ self.cur.execute(f"""
83
83
  CREATE INDEX IF NOT EXISTS {self.collection_name}_diskann_idx
84
84
  ON {self.collection_name}
85
85
  USING diskann (vector);
86
- """
87
- )
86
+ """)
88
87
  elif self.use_hnsw:
89
- self.cur.execute(
90
- f"""
88
+ self.cur.execute(f"""
91
89
  CREATE INDEX IF NOT EXISTS {self.collection_name}_hnsw_idx
92
90
  ON {self.collection_name}
93
91
  USING hnsw (vector vector_cosine_ops)
94
- """
95
- )
92
+ """)
96
93
 
97
94
  self.conn.commit()
98
95
 
@@ -6,7 +6,8 @@ try:
6
6
  import weaviate
7
7
  except ImportError:
8
8
  raise ImportError(
9
- "The 'weaviate' library is required. Please install it using 'pip install weaviate-client weaviate'."
9
+ "The 'weaviate' library is required for Weaviate support. "
10
+ "Install it with 'pip install noesium[weaviate]' or 'pip install weaviate-client'."
10
11
  )
11
12
 
12
13
  import weaviate.classes.config as wvcc
@@ -0,0 +1 @@
1
+ # A collection of toolkits inheriting `noesium.core.toolify` module.