mem0ai-azure-mysql 0.1.115__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. mem0/__init__.py +6 -0
  2. mem0/client/__init__.py +0 -0
  3. mem0/client/main.py +1535 -0
  4. mem0/client/project.py +860 -0
  5. mem0/client/utils.py +29 -0
  6. mem0/configs/__init__.py +0 -0
  7. mem0/configs/base.py +90 -0
  8. mem0/configs/dbs/__init__.py +4 -0
  9. mem0/configs/dbs/base.py +41 -0
  10. mem0/configs/dbs/mysql.py +25 -0
  11. mem0/configs/embeddings/__init__.py +0 -0
  12. mem0/configs/embeddings/base.py +108 -0
  13. mem0/configs/enums.py +7 -0
  14. mem0/configs/llms/__init__.py +0 -0
  15. mem0/configs/llms/base.py +152 -0
  16. mem0/configs/prompts.py +333 -0
  17. mem0/configs/vector_stores/__init__.py +0 -0
  18. mem0/configs/vector_stores/azure_ai_search.py +59 -0
  19. mem0/configs/vector_stores/baidu.py +29 -0
  20. mem0/configs/vector_stores/chroma.py +40 -0
  21. mem0/configs/vector_stores/elasticsearch.py +47 -0
  22. mem0/configs/vector_stores/faiss.py +39 -0
  23. mem0/configs/vector_stores/langchain.py +32 -0
  24. mem0/configs/vector_stores/milvus.py +43 -0
  25. mem0/configs/vector_stores/mongodb.py +25 -0
  26. mem0/configs/vector_stores/opensearch.py +41 -0
  27. mem0/configs/vector_stores/pgvector.py +37 -0
  28. mem0/configs/vector_stores/pinecone.py +56 -0
  29. mem0/configs/vector_stores/qdrant.py +49 -0
  30. mem0/configs/vector_stores/redis.py +26 -0
  31. mem0/configs/vector_stores/supabase.py +44 -0
  32. mem0/configs/vector_stores/upstash_vector.py +36 -0
  33. mem0/configs/vector_stores/vertex_ai_vector_search.py +27 -0
  34. mem0/configs/vector_stores/weaviate.py +43 -0
  35. mem0/dbs/__init__.py +4 -0
  36. mem0/dbs/base.py +68 -0
  37. mem0/dbs/configs.py +21 -0
  38. mem0/dbs/mysql.py +321 -0
  39. mem0/embeddings/__init__.py +0 -0
  40. mem0/embeddings/aws_bedrock.py +100 -0
  41. mem0/embeddings/azure_openai.py +43 -0
  42. mem0/embeddings/base.py +31 -0
  43. mem0/embeddings/configs.py +30 -0
  44. mem0/embeddings/gemini.py +39 -0
  45. mem0/embeddings/huggingface.py +41 -0
  46. mem0/embeddings/langchain.py +35 -0
  47. mem0/embeddings/lmstudio.py +29 -0
  48. mem0/embeddings/mock.py +11 -0
  49. mem0/embeddings/ollama.py +53 -0
  50. mem0/embeddings/openai.py +49 -0
  51. mem0/embeddings/together.py +31 -0
  52. mem0/embeddings/vertexai.py +54 -0
  53. mem0/graphs/__init__.py +0 -0
  54. mem0/graphs/configs.py +96 -0
  55. mem0/graphs/neptune/__init__.py +0 -0
  56. mem0/graphs/neptune/base.py +410 -0
  57. mem0/graphs/neptune/main.py +372 -0
  58. mem0/graphs/tools.py +371 -0
  59. mem0/graphs/utils.py +97 -0
  60. mem0/llms/__init__.py +0 -0
  61. mem0/llms/anthropic.py +64 -0
  62. mem0/llms/aws_bedrock.py +270 -0
  63. mem0/llms/azure_openai.py +114 -0
  64. mem0/llms/azure_openai_structured.py +76 -0
  65. mem0/llms/base.py +32 -0
  66. mem0/llms/configs.py +34 -0
  67. mem0/llms/deepseek.py +85 -0
  68. mem0/llms/gemini.py +201 -0
  69. mem0/llms/groq.py +88 -0
  70. mem0/llms/langchain.py +65 -0
  71. mem0/llms/litellm.py +87 -0
  72. mem0/llms/lmstudio.py +53 -0
  73. mem0/llms/ollama.py +94 -0
  74. mem0/llms/openai.py +124 -0
  75. mem0/llms/openai_structured.py +52 -0
  76. mem0/llms/sarvam.py +89 -0
  77. mem0/llms/together.py +88 -0
  78. mem0/llms/vllm.py +89 -0
  79. mem0/llms/xai.py +52 -0
  80. mem0/memory/__init__.py +0 -0
  81. mem0/memory/base.py +63 -0
  82. mem0/memory/graph_memory.py +632 -0
  83. mem0/memory/main.py +1843 -0
  84. mem0/memory/memgraph_memory.py +630 -0
  85. mem0/memory/setup.py +56 -0
  86. mem0/memory/storage.py +218 -0
  87. mem0/memory/telemetry.py +90 -0
  88. mem0/memory/utils.py +133 -0
  89. mem0/proxy/__init__.py +0 -0
  90. mem0/proxy/main.py +194 -0
  91. mem0/utils/factory.py +132 -0
  92. mem0/vector_stores/__init__.py +0 -0
  93. mem0/vector_stores/azure_ai_search.py +383 -0
  94. mem0/vector_stores/baidu.py +368 -0
  95. mem0/vector_stores/base.py +58 -0
  96. mem0/vector_stores/chroma.py +229 -0
  97. mem0/vector_stores/configs.py +60 -0
  98. mem0/vector_stores/elasticsearch.py +235 -0
  99. mem0/vector_stores/faiss.py +473 -0
  100. mem0/vector_stores/langchain.py +179 -0
  101. mem0/vector_stores/milvus.py +245 -0
  102. mem0/vector_stores/mongodb.py +293 -0
  103. mem0/vector_stores/opensearch.py +281 -0
  104. mem0/vector_stores/pgvector.py +294 -0
  105. mem0/vector_stores/pinecone.py +373 -0
  106. mem0/vector_stores/qdrant.py +240 -0
  107. mem0/vector_stores/redis.py +295 -0
  108. mem0/vector_stores/supabase.py +237 -0
  109. mem0/vector_stores/upstash_vector.py +293 -0
  110. mem0/vector_stores/vertex_ai_vector_search.py +629 -0
  111. mem0/vector_stores/weaviate.py +316 -0
  112. mem0ai_azure_mysql-0.1.115.data/data/README.md +169 -0
  113. mem0ai_azure_mysql-0.1.115.dist-info/METADATA +224 -0
  114. mem0ai_azure_mysql-0.1.115.dist-info/RECORD +116 -0
  115. mem0ai_azure_mysql-0.1.115.dist-info/WHEEL +4 -0
  116. mem0ai_azure_mysql-0.1.115.dist-info/licenses/LICENSE +201 -0
mem0/client/utils.py ADDED
@@ -0,0 +1,29 @@
1
+ import logging
2
+
3
+ import httpx
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+
8
+ class APIError(Exception):
9
+ """Exception raised for errors in the API."""
10
+
11
+ pass
12
+
13
+
14
+ def api_error_handler(func):
15
+ """Decorator to handle API errors consistently."""
16
+ from functools import wraps
17
+
18
+ @wraps(func)
19
+ def wrapper(*args, **kwargs):
20
+ try:
21
+ return func(*args, **kwargs)
22
+ except httpx.HTTPStatusError as e:
23
+ logger.error(f"HTTP error occurred: {e}")
24
+ raise APIError(f"API request failed: {e.response.text}")
25
+ except httpx.RequestError as e:
26
+ logger.error(f"Request error occurred: {e}")
27
+ raise APIError(f"Request failed: {str(e)}")
28
+
29
+ return wrapper
File without changes
mem0/configs/base.py ADDED
@@ -0,0 +1,90 @@
1
+ import os
2
+ from typing import Any, Dict, Optional
3
+
4
+ from pydantic import BaseModel, Field
5
+
6
+ from mem0.embeddings.configs import EmbedderConfig
7
+ from mem0.graphs.configs import GraphStoreConfig
8
+ from mem0.llms.configs import LlmConfig
9
+ from mem0.vector_stores.configs import VectorStoreConfig
10
+ from mem0.dbs.configs import DBConfig
11
+
12
+ # Set up the directory path
13
+ home_dir = os.path.expanduser("~")
14
+ mem0_dir = os.environ.get("MEM0_DIR") or os.path.join(home_dir, ".mem0")
15
+
16
+
17
+ class MemoryItem(BaseModel):
18
+ id: str = Field(..., description="The unique identifier for the text data")
19
+ memory: str = Field(
20
+ ..., description="The memory deduced from the text data"
21
+ ) # TODO After prompt changes from platform, update this
22
+ hash: Optional[str] = Field(None, description="The hash of the memory")
23
+ # The metadata value can be anything and not just string. Fix it
24
+ metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata for the text data")
25
+ score: Optional[float] = Field(None, description="The score associated with the text data")
26
+ created_at: Optional[str] = Field(None, description="The timestamp when the memory was created")
27
+ updated_at: Optional[str] = Field(None, description="The timestamp when the memory was updated")
28
+
29
+
30
+ class MemoryConfig(BaseModel):
31
+ vector_store: VectorStoreConfig = Field(
32
+ description="Configuration for the vector store",
33
+ default_factory=VectorStoreConfig,
34
+ )
35
+ llm: LlmConfig = Field(
36
+ description="Configuration for the language model",
37
+ default_factory=LlmConfig,
38
+ )
39
+ embedder: EmbedderConfig = Field(
40
+ description="Configuration for the embedding model",
41
+ default_factory=EmbedderConfig,
42
+ )
43
+ history_db_path: str = Field(
44
+ description="Path to the history database",
45
+ default=os.path.join(mem0_dir, "history.db"),
46
+ )
47
+ db: DBConfig = Field(
48
+ description="Configuration for the database",
49
+ default_factory=DBConfig,
50
+ )
51
+ graph_store: GraphStoreConfig = Field(
52
+ description="Configuration for the graph",
53
+ default_factory=GraphStoreConfig,
54
+ )
55
+ version: str = Field(
56
+ description="The version of the API",
57
+ default="v1.1",
58
+ )
59
+ custom_fact_extraction_prompt: Optional[str] = Field(
60
+ description="Custom prompt for the fact extraction",
61
+ default=None,
62
+ )
63
+ custom_update_memory_prompt: Optional[str] = Field(
64
+ description="Custom prompt for the update memory",
65
+ default=None,
66
+ )
67
+
68
+
69
+ class AzureConfig(BaseModel):
70
+ """
71
+ Configuration settings for Azure.
72
+
73
+ Args:
74
+ api_key (str): The API key used for authenticating with the Azure service.
75
+ azure_deployment (str): The name of the Azure deployment.
76
+ azure_endpoint (str): The endpoint URL for the Azure service.
77
+ api_version (str): The version of the Azure API being used.
78
+ default_headers (Dict[str, str]): Headers to include in requests to the Azure API.
79
+ """
80
+
81
+ api_key: str = Field(
82
+ description="The API key used for authenticating with the Azure service.",
83
+ default=None,
84
+ )
85
+ azure_deployment: str = Field(description="The name of the Azure deployment.", default=None)
86
+ azure_endpoint: str = Field(description="The endpoint URL for the Azure service.", default=None)
87
+ api_version: str = Field(description="The version of the Azure API being used.", default=None)
88
+ default_headers: Optional[Dict[str, str]] = Field(
89
+ description="Headers to include in requests to the Azure API.", default=None
90
+ )
@@ -0,0 +1,4 @@
1
+ from mem0.configs.dbs.base import BaseDBConfig
2
+ from mem0.configs.dbs.mysql import MySQLConfig
3
+
4
+ __all__ = ["BaseDBConfig", "MySQLConfig"]
@@ -0,0 +1,41 @@
1
+ from abc import ABC
2
+ from typing import Optional
3
+
4
+
5
+ class BaseDBConfig(ABC):
6
+ """
7
+ Config for Database.
8
+ """
9
+
10
+ def __init__(
11
+ self,
12
+ host: Optional[str] = None,
13
+ port: Optional[int] = None,
14
+ user: Optional[str] = None,
15
+ password: Optional[str] = None,
16
+ database: Optional[str] = None,
17
+ ssl_enabled: bool = False,
18
+ ):
19
+ """
20
+ Initializes a configuration class instance for the Database.
21
+
22
+ :param host: Database host, defaults to None
23
+ :type host: Optional[str], optional
24
+ :param port: Database port, defaults to None
25
+ :type port: Optional[int], optional
26
+ :param user: Database user, defaults to None
27
+ :type user: Optional[str], optional
28
+ :param password: Database password, defaults to None
29
+ :type password: Optional[str], optional
30
+ :param database: Database name, defaults to None
31
+ :type database: Optional[str], optional
32
+ :param ssl_enabled: Whether to use SSL for the connection, defaults to False
33
+ :type ssl_enabled: bool, optional
34
+ """
35
+
36
+ self.host = host
37
+ self.port = port
38
+ self.user = user
39
+ self.password = password
40
+ self.database = database
41
+ self.ssl_enabled = ssl_enabled
@@ -0,0 +1,25 @@
1
+ from typing import Optional
2
+
3
+ from mem0.configs.dbs.base import BaseDBConfig
4
+
5
+
6
+ class MySQLConfig(BaseDBConfig):
7
+ """Configuration for MySQL database."""
8
+
9
+ def __init__(
10
+ self,
11
+ host: Optional[str] = "localhost",
12
+ port: Optional[int] = 3306,
13
+ user: Optional[str] = None,
14
+ password: Optional[str] = None,
15
+ database: Optional[str] = None,
16
+ ssl_enabled: bool = False,
17
+ **kwargs
18
+ ):
19
+ self.host = host
20
+ self.port = port
21
+ self.user = user
22
+ self.password = password
23
+ self.database = database
24
+ self.ssl_enabled = ssl_enabled
25
+ self.connection_params = kwargs
File without changes
@@ -0,0 +1,108 @@
1
+ from abc import ABC
2
+ from typing import Dict, Optional, Union
3
+
4
+ import httpx
5
+
6
+ from mem0.configs.base import AzureConfig
7
+
8
+
9
+ class BaseEmbedderConfig(ABC):
10
+ """
11
+ Config for Embeddings.
12
+ """
13
+
14
+ def __init__(
15
+ self,
16
+ model: Optional[str] = None,
17
+ api_key: Optional[str] = None,
18
+ embedding_dims: Optional[int] = None,
19
+ # Ollama specific
20
+ ollama_base_url: Optional[str] = None,
21
+ # Openai specific
22
+ openai_base_url: Optional[str] = None,
23
+ # Huggingface specific
24
+ model_kwargs: Optional[dict] = None,
25
+ huggingface_base_url: Optional[str] = None,
26
+ # AzureOpenAI specific
27
+ azure_kwargs: Optional[AzureConfig] = {},
28
+ http_client_proxies: Optional[Union[Dict, str]] = None,
29
+ # VertexAI specific
30
+ vertex_credentials_json: Optional[str] = None,
31
+ memory_add_embedding_type: Optional[str] = None,
32
+ memory_update_embedding_type: Optional[str] = None,
33
+ memory_search_embedding_type: Optional[str] = None,
34
+ # Gemini specific
35
+ output_dimensionality: Optional[str] = None,
36
+ # LM Studio specific
37
+ lmstudio_base_url: Optional[str] = "http://localhost:1234/v1",
38
+ # AWS Bedrock specific
39
+ aws_access_key_id: Optional[str] = None,
40
+ aws_secret_access_key: Optional[str] = None,
41
+ aws_region: Optional[str] = "us-west-2",
42
+ ):
43
+ """
44
+ Initializes a configuration class instance for the Embeddings.
45
+
46
+ :param model: Embedding model to use, defaults to None
47
+ :type model: Optional[str], optional
48
+ :param api_key: API key to be use, defaults to None
49
+ :type api_key: Optional[str], optional
50
+ :param embedding_dims: The number of dimensions in the embedding, defaults to None
51
+ :type embedding_dims: Optional[int], optional
52
+ :param ollama_base_url: Base URL for the Ollama API, defaults to None
53
+ :type ollama_base_url: Optional[str], optional
54
+ :param model_kwargs: key-value arguments for the huggingface embedding model, defaults a dict inside init
55
+ :type model_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
56
+ :param huggingface_base_url: Huggingface base URL to be use, defaults to None
57
+ :type huggingface_base_url: Optional[str], optional
58
+ :param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
59
+ :type openai_base_url: Optional[str], optional
60
+ :param azure_kwargs: key-value arguments for the AzureOpenAI embedding model, defaults a dict inside init
61
+ :type azure_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
62
+ :param http_client_proxies: The proxy server settings used to create self.http_client, defaults to None
63
+ :type http_client_proxies: Optional[Dict | str], optional
64
+ :param vertex_credentials_json: The path to the Vertex AI credentials JSON file, defaults to None
65
+ :type vertex_credentials_json: Optional[str], optional
66
+ :param memory_add_embedding_type: The type of embedding to use for the add memory action, defaults to None
67
+ :type memory_add_embedding_type: Optional[str], optional
68
+ :param memory_update_embedding_type: The type of embedding to use for the update memory action, defaults to None
69
+ :type memory_update_embedding_type: Optional[str], optional
70
+ :param memory_search_embedding_type: The type of embedding to use for the search memory action, defaults to None
71
+ :type memory_search_embedding_type: Optional[str], optional
72
+ :param lmstudio_base_url: LM Studio base URL to be use, defaults to "http://localhost:1234/v1"
73
+ :type lmstudio_base_url: Optional[str], optional
74
+ """
75
+
76
+ self.model = model
77
+ self.api_key = api_key
78
+ self.openai_base_url = openai_base_url
79
+ self.embedding_dims = embedding_dims
80
+
81
+ # AzureOpenAI specific
82
+ self.http_client = httpx.Client(proxies=http_client_proxies) if http_client_proxies else None
83
+
84
+ # Ollama specific
85
+ self.ollama_base_url = ollama_base_url
86
+
87
+ # Huggingface specific
88
+ self.model_kwargs = model_kwargs or {}
89
+ self.huggingface_base_url = huggingface_base_url
90
+ # AzureOpenAI specific
91
+ self.azure_kwargs = AzureConfig(**azure_kwargs) or {}
92
+
93
+ # VertexAI specific
94
+ self.vertex_credentials_json = vertex_credentials_json
95
+ self.memory_add_embedding_type = memory_add_embedding_type
96
+ self.memory_update_embedding_type = memory_update_embedding_type
97
+ self.memory_search_embedding_type = memory_search_embedding_type
98
+
99
+ # Gemini specific
100
+ self.output_dimensionality = output_dimensionality
101
+
102
+ # LM Studio specific
103
+ self.lmstudio_base_url = lmstudio_base_url
104
+
105
+ # AWS Bedrock specific
106
+ self.aws_access_key_id = aws_access_key_id
107
+ self.aws_secret_access_key = aws_secret_access_key
108
+ self.aws_region = aws_region
mem0/configs/enums.py ADDED
@@ -0,0 +1,7 @@
1
+ from enum import Enum
2
+
3
+
4
+ class MemoryType(Enum):
5
+ SEMANTIC = "semantic_memory"
6
+ EPISODIC = "episodic_memory"
7
+ PROCEDURAL = "procedural_memory"
File without changes
@@ -0,0 +1,152 @@
1
+ from abc import ABC
2
+ from typing import Dict, Optional, Union
3
+
4
+ import httpx
5
+
6
+ from mem0.configs.base import AzureConfig
7
+
8
+
9
+ class BaseLlmConfig(ABC):
10
+ """
11
+ Config for LLMs.
12
+ """
13
+
14
+ def __init__(
15
+ self,
16
+ model: Optional[Union[str, Dict]] = None,
17
+ temperature: float = 0.1,
18
+ api_key: Optional[str] = None,
19
+ max_tokens: int = 2000,
20
+ top_p: float = 0.1,
21
+ top_k: int = 1,
22
+ enable_vision: bool = False,
23
+ vision_details: Optional[str] = "auto",
24
+ # Openrouter specific
25
+ models: Optional[list[str]] = None,
26
+ route: Optional[str] = "fallback",
27
+ openrouter_base_url: Optional[str] = None,
28
+ # Openai specific
29
+ openai_base_url: Optional[str] = None,
30
+ site_url: Optional[str] = None,
31
+ app_name: Optional[str] = None,
32
+ # Ollama specific
33
+ ollama_base_url: Optional[str] = None,
34
+ # AzureOpenAI specific
35
+ azure_kwargs: Optional[AzureConfig] = {},
36
+ # AzureOpenAI specific
37
+ http_client_proxies: Optional[Union[Dict, str]] = None,
38
+ # DeepSeek specific
39
+ deepseek_base_url: Optional[str] = None,
40
+ # XAI specific
41
+ xai_base_url: Optional[str] = None,
42
+ # Sarvam specific
43
+ sarvam_base_url: Optional[str] = "https://api.sarvam.ai/v1",
44
+ # LM Studio specific
45
+ lmstudio_base_url: Optional[str] = "http://localhost:1234/v1",
46
+ lmstudio_response_format: dict = None,
47
+ # vLLM specific
48
+ vllm_base_url: Optional[str] = "http://localhost:8000/v1",
49
+ # AWS Bedrock specific
50
+ aws_access_key_id: Optional[str] = None,
51
+ aws_secret_access_key: Optional[str] = None,
52
+ aws_region: Optional[str] = "us-west-2",
53
+ ):
54
+ """
55
+ Initializes a configuration class instance for the LLM.
56
+
57
+ :param model: Controls the OpenAI model used, defaults to None
58
+ :type model: Optional[str], optional
59
+ :param temperature: Controls the randomness of the model's output.
60
+ Higher values (closer to 1) make output more random, lower values make it more deterministic, defaults to 0
61
+ :type temperature: float, optional
62
+ :param api_key: OpenAI API key to be use, defaults to None
63
+ :type api_key: Optional[str], optional
64
+ :param max_tokens: Controls how many tokens are generated, defaults to 2000
65
+ :type max_tokens: int, optional
66
+ :param top_p: Controls the diversity of words. Higher values (closer to 1) make word selection more diverse,
67
+ defaults to 1
68
+ :type top_p: float, optional
69
+ :param top_k: Controls the diversity of words. Higher values make word selection more diverse, defaults to 0
70
+ :type top_k: int, optional
71
+ :param enable_vision: Enable vision for the LLM, defaults to False
72
+ :type enable_vision: bool, optional
73
+ :param vision_details: Details of the vision to be used [low, high, auto], defaults to "auto"
74
+ :type vision_details: Optional[str], optional
75
+ :param models: Openrouter models to use, defaults to None
76
+ :type models: Optional[list[str]], optional
77
+ :param route: Openrouter route to be used, defaults to "fallback"
78
+ :type route: Optional[str], optional
79
+ :param openrouter_base_url: Openrouter base URL to be use, defaults to "https://openrouter.ai/api/v1"
80
+ :type openrouter_base_url: Optional[str], optional
81
+ :param site_url: Openrouter site URL to use, defaults to None
82
+ :type site_url: Optional[str], optional
83
+ :param app_name: Openrouter app name to use, defaults to None
84
+ :type app_name: Optional[str], optional
85
+ :param ollama_base_url: The base URL of the LLM, defaults to None
86
+ :type ollama_base_url: Optional[str], optional
87
+ :param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
88
+ :type openai_base_url: Optional[str], optional
89
+ :param azure_kwargs: key-value arguments for the AzureOpenAI LLM model, defaults a dict inside init
90
+ :type azure_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
91
+ :param http_client_proxies: The proxy server(s) settings used to create self.http_client, defaults to None
92
+ :type http_client_proxies: Optional[Dict | str], optional
93
+ :param deepseek_base_url: DeepSeek base URL to be use, defaults to None
94
+ :type deepseek_base_url: Optional[str], optional
95
+ :param xai_base_url: XAI base URL to be use, defaults to None
96
+ :type xai_base_url: Optional[str], optional
97
+ :param sarvam_base_url: Sarvam base URL to be use, defaults to "https://api.sarvam.ai/v1"
98
+ :type sarvam_base_url: Optional[str], optional
99
+ :param lmstudio_base_url: LM Studio base URL to be use, defaults to "http://localhost:1234/v1"
100
+ :type lmstudio_base_url: Optional[str], optional
101
+ :param lmstudio_response_format: LM Studio response format to be use, defaults to None
102
+ :type lmstudio_response_format: Optional[Dict], optional
103
+ :param vllm_base_url: vLLM base URL to be use, defaults to "http://localhost:8000/v1"
104
+ :type vllm_base_url: Optional[str], optional
105
+ """
106
+
107
+ self.model = model
108
+ self.temperature = temperature
109
+ self.api_key = api_key
110
+ self.max_tokens = max_tokens
111
+ self.top_p = top_p
112
+ self.top_k = top_k
113
+ self.enable_vision = enable_vision
114
+ self.vision_details = vision_details
115
+
116
+ # AzureOpenAI specific
117
+ self.http_client = httpx.Client(proxies=http_client_proxies) if http_client_proxies else None
118
+
119
+ # Openrouter specific
120
+ self.models = models
121
+ self.route = route
122
+ self.openrouter_base_url = openrouter_base_url
123
+ self.openai_base_url = openai_base_url
124
+ self.site_url = site_url
125
+ self.app_name = app_name
126
+
127
+ # Ollama specific
128
+ self.ollama_base_url = ollama_base_url
129
+
130
+ # DeepSeek specific
131
+ self.deepseek_base_url = deepseek_base_url
132
+
133
+ # AzureOpenAI specific
134
+ self.azure_kwargs = AzureConfig(**azure_kwargs) or {}
135
+
136
+ # XAI specific
137
+ self.xai_base_url = xai_base_url
138
+
139
+ # Sarvam specific
140
+ self.sarvam_base_url = sarvam_base_url
141
+
142
+ # LM Studio specific
143
+ self.lmstudio_base_url = lmstudio_base_url
144
+ self.lmstudio_response_format = lmstudio_response_format
145
+
146
+ # vLLM specific
147
+ self.vllm_base_url = vllm_base_url
148
+
149
+ # AWS Bedrock specific
150
+ self.aws_access_key_id = aws_access_key_id
151
+ self.aws_secret_access_key = aws_secret_access_key
152
+ self.aws_region = aws_region