langchain-timbr 1.5.3__py3-none-any.whl → 1.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '1.5.3'
32
- __version_tuple__ = version_tuple = (1, 5, 3)
31
+ __version__ = version = '1.5.5'
32
+ __version_tuple__ = version_tuple = (1, 5, 5)
33
33
 
34
34
  __commit_id__ = commit_id = None
langchain_timbr/config.py CHANGED
@@ -4,7 +4,7 @@ from .utils.general import to_boolean, to_integer, parse_list
4
4
  # MUST HAVE VARIABLES
5
5
  url = os.environ.get('TIMBR_URL')
6
6
  token = os.environ.get('TIMBR_TOKEN')
7
- ontology = os.environ.get('ONTOLOGY', 'system_db')
7
+ ontology = os.environ.get('TIMBR_ONTOLOGY', os.environ.get('ONTOLOGY', 'system_db'))
8
8
 
9
9
  # OPTIONAL VARIABLES
10
10
  is_jwt = to_boolean(os.environ.get('IS_JWT', 'false'))
@@ -18,4 +18,5 @@ llm_type = os.environ.get('LLM_TYPE')
18
18
  llm_model = os.environ.get('LLM_MODEL')
19
19
  llm_api_key = os.environ.get('LLM_API_KEY')
20
20
  llm_temperature = os.environ.get('LLM_TEMPERATURE', 0.0)
21
+ llm_additional_params = os.environ.get('LLM_ADDITIONAL_PARAMS', '')
21
22
  llm_timeout = to_integer(os.environ.get('LLM_TIMEOUT', 60)) # Default 60 seconds timeout
@@ -2,9 +2,11 @@ from typing import Optional, Union, Dict, Any
2
2
  from langchain.chains.base import Chain
3
3
  from langchain.llms.base import LLM
4
4
 
5
- from ..utils.general import parse_list, to_boolean, to_integer
5
+ from ..utils.general import parse_list, to_boolean, to_integer, validate_timbr_connection_params
6
6
  from ..utils.timbr_utils import run_query, validate_sql
7
7
  from ..utils.timbr_llm_utils import generate_sql
8
+ from ..llm_wrapper.llm_wrapper import LlmWrapper
9
+ from .. import config
8
10
 
9
11
  class ExecuteTimbrQueryChain(Chain):
10
12
  """
@@ -17,10 +19,10 @@ class ExecuteTimbrQueryChain(Chain):
17
19
 
18
20
  def __init__(
19
21
  self,
20
- llm: LLM,
21
- url: str,
22
- token: str,
23
- ontology: str,
22
+ llm: Optional[LLM] = None,
23
+ url: Optional[str] = None,
24
+ token: Optional[str] = None,
25
+ ontology: Optional[str] = None,
24
26
  schema: Optional[str] = 'dtimbr',
25
27
  concept: Optional[str] = None,
26
28
  concepts_list: Optional[Union[list[str], str]] = None,
@@ -44,10 +46,10 @@ class ExecuteTimbrQueryChain(Chain):
44
46
  **kwargs,
45
47
  ):
46
48
  """
47
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response
48
- :param url: Timbr server url
49
- :param token: Timbr password or token value
50
- :param ontology: The name of the ontology/knowledge graph
49
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
50
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
51
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
52
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
51
53
  :param schema: The name of the schema to query
52
54
  :param concept: The name of the concept to query
53
55
  :param concepts_list: Optional specific concept options to query
@@ -72,6 +74,7 @@ class ExecuteTimbrQueryChain(Chain):
72
74
 
73
75
  ## Example
74
76
  ```
77
+ # Using explicit parameters
75
78
  execute_timbr_query_chain = ExecuteTimbrQueryChain(
76
79
  url=<url>,
77
80
  token=<token>,
@@ -85,14 +88,35 @@ class ExecuteTimbrQueryChain(Chain):
85
88
  note=<note>,
86
89
  )
87
90
 
91
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY)
92
+ execute_timbr_query_chain = ExecuteTimbrQueryChain(
93
+ llm=<llm or timbr_llm_wrapper instance>,
94
+ )
95
+
96
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY, LLM_TYPE, LLM_API_KEY, etc.)
97
+ execute_timbr_query_chain = ExecuteTimbrQueryChain()
98
+
88
99
  return execute_timbr_query_chain.invoke({ "prompt": question }).get("rows", [])
89
100
  ```
90
101
  """
91
102
  super().__init__(**kwargs)
92
- self._llm = llm
93
- self._url = url
94
- self._token = token
95
- self._ontology = ontology
103
+
104
+ # Initialize LLM - use provided one or create with LlmWrapper from env variables
105
+ if llm is not None:
106
+ self._llm = llm
107
+ else:
108
+ try:
109
+ self._llm = LlmWrapper()
110
+ except Exception as e:
111
+ raise ValueError(f"Failed to initialize LLM from environment variables. Either provide an llm parameter or ensure LLM_TYPE and LLM_API_KEY environment variables are set. Error: {e}")
112
+
113
+ self._url = url if url is not None else config.url
114
+ self._token = token if token is not None else config.token
115
+ self._ontology = ontology if ontology is not None else config.ontology
116
+
117
+ # Validate required parameters
118
+ validate_timbr_connection_params(self._url, self._token)
119
+
96
120
  self._schema = schema
97
121
  self._concept = concept
98
122
  self._concepts_list = parse_list(concepts_list)
@@ -181,7 +205,7 @@ class ExecuteTimbrQueryChain(Chain):
181
205
  should_validate_sql=self._should_validate_sql,
182
206
  retries=self._retries,
183
207
  max_limit=self._max_limit,
184
- note=self._note + err_txt,
208
+ note=(self._note or '') + err_txt,
185
209
  db_is_case_sensitive=self._db_is_case_sensitive,
186
210
  graph_depth=self._graph_depth,
187
211
  debug=self._debug,
@@ -251,7 +275,7 @@ class ExecuteTimbrQueryChain(Chain):
251
275
  generated.append(sql)
252
276
  # If the SQL is valid but no rows are returned, create an error message to be sent to the LLM
253
277
  if is_sql_valid:
254
- error = f"No rows returned. Please revise the SQL considering if the question was ambiguous (e.g., which ID or name to use), try use alternative columns in the WHERE clause part in a way that could match the user's intent, without adding new columns with new filters."
278
+ error = "No rows returned. Please revise the SQL considering if the question was ambiguous (e.g., which ID or name to use), try use alternative columns in the WHERE clause part in a way that could match the user's intent, without adding new columns with new filters."
255
279
  error += "\nConsider that this queries already generated and returned 0 rows:\n" + "\n".join(generated)
256
280
  is_sql_valid = False
257
281
  else:
@@ -2,8 +2,10 @@ from typing import Optional, Dict, Any
2
2
  from langchain.chains.base import Chain
3
3
  from langchain.llms.base import LLM
4
4
 
5
- from ..utils.general import to_boolean
5
+ from ..utils.general import to_boolean, validate_timbr_connection_params
6
6
  from ..utils.timbr_llm_utils import answer_question
7
+ from ..llm_wrapper.llm_wrapper import LlmWrapper
8
+ from .. import config
7
9
 
8
10
  class GenerateAnswerChain(Chain):
9
11
  """
@@ -14,9 +16,9 @@ class GenerateAnswerChain(Chain):
14
16
  """
15
17
  def __init__(
16
18
  self,
17
- llm: LLM,
18
- url: str,
19
- token: str,
19
+ llm: Optional[LLM] = None,
20
+ url: Optional[str] = None,
21
+ token: Optional[str] = None,
20
22
  verify_ssl: Optional[bool] = True,
21
23
  is_jwt: Optional[bool] = False,
22
24
  jwt_tenant_id: Optional[str] = None,
@@ -25,9 +27,9 @@ class GenerateAnswerChain(Chain):
25
27
  **kwargs,
26
28
  ):
27
29
  """
28
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLM’s response
29
- :param url: Timbr server url
30
- :param token: Timbr password or token value
30
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM’s response (optional, will use LlmWrapper with env variables if not provided)
31
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
32
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
31
33
  :param verify_ssl: Whether to verify SSL certificates (default is True).
32
34
  :param is_jwt: Whether to use JWT authentication (default is False).
33
35
  :param jwt_tenant_id: JWT tenant ID for multi-tenant environments (required when is_jwt=True).
@@ -35,17 +37,41 @@ class GenerateAnswerChain(Chain):
35
37
 
36
38
  ## Example
37
39
  ```
40
+ # Using explicit parameters
41
+ generate_answer_chain = GenerateAnswerChain(
42
+ llm=<llm or timbr_llm_wrapper instance>,
43
+ url=<url>,
44
+ token=<token>
45
+ )
46
+
47
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN)
38
48
  generate_answer_chain = GenerateAnswerChain(
39
49
  llm=<llm or timbr_llm_wrapper instance>
40
50
  )
51
+
52
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, LLM_TYPE, LLM_API_KEY, etc.)
53
+ generate_answer_chain = GenerateAnswerChain()
41
54
 
42
55
  return generate_answer_chain.invoke({ "prompt": prompt, "rows": rows }).get("answer", [])
43
56
  ```
44
57
  """
45
58
  super().__init__(**kwargs)
46
- self._llm = llm
47
- self._url = url
48
- self._token = token
59
+
60
+ # Initialize LLM - use provided one or create with LlmWrapper from env variables
61
+ if llm is not None:
62
+ self._llm = llm
63
+ else:
64
+ try:
65
+ self._llm = LlmWrapper()
66
+ except Exception as e:
67
+ raise ValueError(f"Failed to initialize LLM from environment variables. Either provide an llm parameter or ensure LLM_TYPE and LLM_API_KEY environment variables are set. Error: {e}")
68
+
69
+ self._url = url if url is not None else config.url
70
+ self._token = token if token is not None else config.token
71
+
72
+ # Validate required parameters
73
+ validate_timbr_connection_params(self._url, self._token)
74
+
49
75
  self._verify_ssl = to_boolean(verify_ssl)
50
76
  self._is_jwt = to_boolean(is_jwt)
51
77
  self._jwt_tenant_id = jwt_tenant_id
@@ -2,8 +2,10 @@ from typing import Optional, Union, Dict, Any
2
2
  from langchain.chains.base import Chain
3
3
  from langchain.llms.base import LLM
4
4
 
5
- from ..utils.general import parse_list, to_boolean, to_integer
5
+ from ..utils.general import parse_list, to_boolean, to_integer, validate_timbr_connection_params
6
6
  from ..utils.timbr_llm_utils import generate_sql
7
+ from ..llm_wrapper.llm_wrapper import LlmWrapper
8
+ from .. import config
7
9
 
8
10
  class GenerateTimbrSqlChain(Chain):
9
11
  """
@@ -16,10 +18,10 @@ class GenerateTimbrSqlChain(Chain):
16
18
 
17
19
  def __init__(
18
20
  self,
19
- llm: LLM,
20
- url: str,
21
- token: str,
22
- ontology: str,
21
+ llm: Optional[LLM] = None,
22
+ url: Optional[str] = None,
23
+ token: Optional[str] = None,
24
+ ontology: Optional[str] = None,
23
25
  schema: Optional[str] = 'dtimbr',
24
26
  concept: Optional[str] = None,
25
27
  concepts_list: Optional[Union[list[str], str]] = None,
@@ -41,10 +43,10 @@ class GenerateTimbrSqlChain(Chain):
41
43
  **kwargs,
42
44
  ):
43
45
  """
44
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
45
- :param url: Timbr server url
46
- :param token: Timbr password or token value
47
- :param ontology: The name of the ontology/knowledge graph
46
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
47
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
48
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
49
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
48
50
  :param schema: The name of the schema to query
49
51
  :param concept: The name of the concept to query
50
52
  :param concepts_list: Optional specific concept options to query
@@ -66,6 +68,7 @@ class GenerateTimbrSqlChain(Chain):
66
68
 
67
69
  ## Example
68
70
  ```
71
+ # Using explicit parameters
69
72
  generate_timbr_sql_chain = GenerateTimbrSqlChain(
70
73
  url=<url>,
71
74
  token=<token>,
@@ -79,14 +82,35 @@ class GenerateTimbrSqlChain(Chain):
79
82
  note=<note>,
80
83
  )
81
84
 
85
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY)
86
+ generate_timbr_sql_chain = GenerateTimbrSqlChain(
87
+ llm=<llm or timbr_llm_wrapper instance>,
88
+ )
89
+
90
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY, LLM_TYPE, LLM_API_KEY, etc.)
91
+ generate_timbr_sql_chain = GenerateTimbrSqlChain()
92
+
82
93
  return generate_timbr_sql_chain.invoke({ "prompt": question }).get("sql", [])
83
94
  ```
84
95
  """
85
96
  super().__init__(**kwargs)
86
- self._llm = llm
87
- self._url = url
88
- self._token = token
89
- self._ontology = ontology
97
+
98
+ # Initialize LLM - use provided one or create with LlmWrapper from env variables
99
+ if llm is not None:
100
+ self._llm = llm
101
+ else:
102
+ try:
103
+ self._llm = LlmWrapper()
104
+ except Exception as e:
105
+ raise ValueError(f"Failed to initialize LLM from environment variables. Either provide an llm parameter or ensure LLM_TYPE and LLM_API_KEY environment variables are set. Error: {e}")
106
+
107
+ self._url = url if url is not None else config.url
108
+ self._token = token if token is not None else config.token
109
+ self._ontology = ontology if ontology is not None else config.ontology
110
+
111
+ # Validate required parameters
112
+ validate_timbr_connection_params(self._url, self._token)
113
+
90
114
  self._schema = schema
91
115
  self._concept = concept
92
116
  self._concepts_list = parse_list(concepts_list)
@@ -2,8 +2,10 @@ from typing import Optional, Union, Dict, Any
2
2
  from langchain.chains.base import Chain
3
3
  from langchain.llms.base import LLM
4
4
 
5
- from ..utils.general import parse_list, to_boolean, to_integer
5
+ from ..utils.general import parse_list, to_boolean, to_integer, validate_timbr_connection_params
6
6
  from ..utils.timbr_llm_utils import determine_concept
7
+ from ..llm_wrapper.llm_wrapper import LlmWrapper
8
+ from .. import config
7
9
 
8
10
 
9
11
  class IdentifyTimbrConceptChain(Chain):
@@ -17,10 +19,10 @@ class IdentifyTimbrConceptChain(Chain):
17
19
 
18
20
  def __init__(
19
21
  self,
20
- llm: LLM,
21
- url: str,
22
- token: str,
23
- ontology: str,
22
+ llm: Optional[LLM] = None,
23
+ url: Optional[str] = None,
24
+ token: Optional[str] = None,
25
+ ontology: Optional[str] = None,
24
26
  concepts_list: Optional[Union[list[str], str]] = None,
25
27
  views_list: Optional[Union[list[str], str]] = None,
26
28
  include_logic_concepts: Optional[bool] = False,
@@ -36,10 +38,10 @@ class IdentifyTimbrConceptChain(Chain):
36
38
  **kwargs,
37
39
  ):
38
40
  """
39
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
40
- :param url: Timbr server url
41
- :param token: Timbr password or token value
42
- :param ontology: The name of the ontology/knowledge graph
41
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
42
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
43
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
44
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
43
45
  :param concepts_list: Optional specific concept options to query
44
46
  :param views_list: Optional specific view options to query
45
47
  :param include_logic_concepts: Optional boolean to include logic concepts (concepts without unique properties which only inherits from an upper level concept with filter logic) in the query.
@@ -55,10 +57,11 @@ class IdentifyTimbrConceptChain(Chain):
55
57
 
56
58
  ## Example
57
59
  ```
60
+ # Using explicit parameters
58
61
  identify_timbr_concept_chain = IdentifyTimbrConceptChain(
62
+ llm=<llm or timbr_llm_wrapper instance>,
59
63
  url=<url>,
60
64
  token=<token>,
61
- llm=<llm or timbr_llm_wrapper instance>,
62
65
  ontology=<ontology_name>,
63
66
  concepts_list=<concepts>,
64
67
  views_list=<views>,
@@ -66,14 +69,35 @@ class IdentifyTimbrConceptChain(Chain):
66
69
  note=<note>,
67
70
  )
68
71
 
72
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY)
73
+ identify_timbr_concept_chain = IdentifyTimbrConceptChain(
74
+ llm=<llm or timbr_llm_wrapper instance>,
75
+ )
76
+
77
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY, LLM_TYPE, LLM_API_KEY, etc.)
78
+ identify_timbr_concept_chain = IdentifyTimbrConceptChain()
79
+
69
80
  return identify_timbr_concept_chain.invoke({ "prompt": question }).get("concept", None)
70
81
  ```
71
82
  """
72
83
  super().__init__(**kwargs)
73
- self._llm = llm
74
- self._url = url
75
- self._token = token
76
- self._ontology = ontology
84
+
85
+ # Initialize LLM - use provided one or create with LlmWrapper from env variables
86
+ if llm is not None:
87
+ self._llm = llm
88
+ else:
89
+ try:
90
+ self._llm = LlmWrapper()
91
+ except Exception as e:
92
+ raise ValueError(f"Failed to initialize LLM from environment variables. Either provide an llm parameter or ensure LLM_TYPE and LLM_API_KEY environment variables are set. Error: {e}")
93
+
94
+ self._url = url if url is not None else config.url
95
+ self._token = token if token is not None else config.token
96
+ self._ontology = ontology if ontology is not None else config.ontology
97
+
98
+ # Validate required parameters
99
+ validate_timbr_connection_params(self._url, self._token)
100
+
77
101
  self._concepts_list = parse_list(concepts_list)
78
102
  self._views_list = parse_list(views_list)
79
103
  self._include_logic_concepts = to_boolean(include_logic_concepts)
@@ -10,10 +10,10 @@ from .generate_answer_chain import GenerateAnswerChain
10
10
  class TimbrSqlAgent(BaseSingleActionAgent):
11
11
  def __init__(
12
12
  self,
13
- llm: LLM,
14
- url: str,
15
- token: str,
16
- ontology: str,
13
+ llm: Optional[LLM] = None,
14
+ url: Optional[str] = None,
15
+ token: Optional[str] = None,
16
+ ontology: Optional[str] = None,
17
17
  schema: Optional[str] = 'dtimbr',
18
18
  concept: Optional[str] = None,
19
19
  concepts_list: Optional[Union[list[str], str]] = None,
@@ -37,10 +37,10 @@ class TimbrSqlAgent(BaseSingleActionAgent):
37
37
  debug: Optional[bool] = False
38
38
  ):
39
39
  """
40
- :param llm: Language model to use
41
- :param url: Timbr server URL
42
- :param token: Timbr authentication token
43
- :param ontology: Name of the ontology/knowledge graph
40
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
41
+ :param url: Timbr server URL (optional, defaults to TIMBR_URL environment variable)
42
+ :param token: Timbr authentication token (optional, defaults to TIMBR_TOKEN environment variable)
43
+ :param ontology: Name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
44
44
  :param schema: Optional specific schema name to query
45
45
  :param concept: Optional specific concept name to query
46
46
  :param concepts_list: Optional specific concept options to query
@@ -64,6 +64,7 @@ class TimbrSqlAgent(BaseSingleActionAgent):
64
64
 
65
65
  ## Example
66
66
  ```
67
+ # Using explicit parameters
67
68
  agent = TimbrSqlAgent(
68
69
  llm=<llm>,
69
70
  url=<url>,
@@ -77,6 +78,14 @@ class TimbrSqlAgent(BaseSingleActionAgent):
77
78
  retries=<retries>,
78
79
  note=<note>,
79
80
  )
81
+
82
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY)
83
+ agent = TimbrSqlAgent(
84
+ llm=<llm>,
85
+ )
86
+
87
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY, LLM_TYPE, LLM_API_KEY, etc.)
88
+ agent = TimbrSqlAgent()
80
89
  ```
81
90
  """
82
91
  super().__init__()
@@ -298,10 +307,10 @@ class TimbrSqlAgent(BaseSingleActionAgent):
298
307
 
299
308
 
300
309
  def create_timbr_sql_agent(
301
- llm: LLM,
302
- url: str,
303
- token: str,
304
- ontology: str,
310
+ llm: Optional[LLM] = None,
311
+ url: Optional[str] = None,
312
+ token: Optional[str] = None,
313
+ ontology: Optional[str] = None,
305
314
  schema: Optional[str] = 'dtimbr',
306
315
  concept: Optional[str] = None,
307
316
  concepts_list: Optional[Union[list[str], str]] = None,
@@ -327,10 +336,10 @@ def create_timbr_sql_agent(
327
336
  """
328
337
  Create and configure a Timbr agent with its executor.
329
338
 
330
- :param llm: Language model to use
331
- :param url: Timbr server URL
332
- :param token: Timbr authentication token
333
- :param ontology: Name of the ontology/knowledge graph
339
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
340
+ :param url: Timbr server URL (optional, defaults to TIMBR_URL environment variable)
341
+ :param token: Timbr authentication token (optional, defaults to TIMBR_TOKEN environment variable)
342
+ :param ontology: Name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
334
343
  :param schema: Optional specific schema name to query
335
344
  :param concept: Optional specific concept name to query
336
345
  :param concepts_list: Optional specific concept options to query
@@ -357,6 +366,7 @@ def create_timbr_sql_agent(
357
366
 
358
367
  ## Example
359
368
  ```
369
+ # Using explicit parameters
360
370
  agent = create_timbr_sql_agent(
361
371
  llm=<llm>,
362
372
  url=<url>,
@@ -372,7 +382,15 @@ def create_timbr_sql_agent(
372
382
  retries=<retries>,
373
383
  note=<note>,
374
384
  )
375
-
385
+
386
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY)
387
+ agent = create_timbr_sql_agent(
388
+ llm=<llm>,
389
+ )
390
+
391
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY, LLM_TYPE, LLM_API_KEY, etc.)
392
+ agent = create_timbr_sql_agent()
393
+
376
394
  result = agent.invoke("What are the total sales for last month?")
377
395
 
378
396
  # Access the components of the result:
@@ -2,9 +2,11 @@ from typing import Optional, Union, Dict, Any
2
2
  from langchain.chains.base import Chain
3
3
  from langchain.llms.base import LLM
4
4
 
5
- from ..utils.general import parse_list, to_integer, to_boolean
5
+ from ..utils.general import parse_list, to_integer, to_boolean, validate_timbr_connection_params
6
6
  from ..utils.timbr_llm_utils import generate_sql
7
7
  from ..utils.timbr_utils import validate_sql
8
+ from ..llm_wrapper.llm_wrapper import LlmWrapper
9
+ from .. import config
8
10
 
9
11
 
10
12
  class ValidateTimbrSqlChain(Chain):
@@ -18,10 +20,10 @@ class ValidateTimbrSqlChain(Chain):
18
20
 
19
21
  def __init__(
20
22
  self,
21
- llm: LLM,
22
- url: str,
23
- token: str,
24
- ontology: str,
23
+ llm: Optional[LLM] = None,
24
+ url: Optional[str] = None,
25
+ token: Optional[str] = None,
26
+ ontology: Optional[str] = None,
25
27
  schema: Optional[str] = 'dtimbr',
26
28
  concept: Optional[str] = None,
27
29
  retries: Optional[int] = 3,
@@ -42,10 +44,10 @@ class ValidateTimbrSqlChain(Chain):
42
44
  **kwargs,
43
45
  ):
44
46
  """
45
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
46
- :param url: Timbr server url
47
- :param token: Timbr password or token value
48
- :param ontology: The name of the ontology/knowledge graph
47
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
48
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
49
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
50
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
49
51
  :param schema: The name of the schema to query
50
52
  :param concept: The name of the concept to query
51
53
  :param retries: The maximum number of retries to attempt
@@ -66,6 +68,7 @@ class ValidateTimbrSqlChain(Chain):
66
68
 
67
69
  ## Example
68
70
  ```
71
+ # Using explicit parameters
69
72
  validate_timbr_sql_chain = ValidateTimbrSqlChain(
70
73
  url=<url>,
71
74
  token=<token>,
@@ -80,14 +83,35 @@ class ValidateTimbrSqlChain(Chain):
80
83
  note=<note>,
81
84
  )
82
85
 
86
+ # Using environment variables for timbr environment (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY)
87
+ validate_timbr_sql_chain = ValidateTimbrSqlChain(
88
+ llm=<llm or timbr_llm_wrapper instance>,
89
+ )
90
+
91
+ # Using environment variables for both timbr environment & llm (TIMBR_URL, TIMBR_TOKEN, TIMBR_ONTOLOGY, LLM_TYPE, LLM_API_KEY, etc.)
92
+ validate_timbr_sql_chain = ValidateTimbrSqlChain()
93
+
83
94
  return validate_timbr_sql_chain.invoke({ "prompt": question, "sql": <latest_query_to_validate> }).get("sql", [])
84
95
  ```
85
96
  """
86
97
  super().__init__(**kwargs)
87
- self._llm = llm
88
- self._url = url
89
- self._token = token
90
- self._ontology = ontology
98
+
99
+ # Initialize LLM - use provided one or create with LlmWrapper from env variables
100
+ if llm is not None:
101
+ self._llm = llm
102
+ else:
103
+ try:
104
+ self._llm = LlmWrapper()
105
+ except Exception as e:
106
+ raise ValueError(f"Failed to initialize LLM from environment variables. Either provide an llm parameter or ensure LLM_TYPE and LLM_API_KEY environment variables are set. Error: {e}")
107
+
108
+ self._url = url if url is not None else config.url
109
+ self._token = token if token is not None else config.token
110
+ self._ontology = ontology if ontology is not None else config.ontology
111
+
112
+ # Validate required parameters
113
+ validate_timbr_connection_params(self._url, self._token)
114
+
91
115
  self._schema = schema
92
116
  self._concept = concept
93
117
  self._retries = retries
@@ -13,7 +13,7 @@ class ExecuteSemanticQueryNode:
13
13
  """
14
14
  def __init__(
15
15
  self,
16
- llm: LLM,
16
+ llm: Optional[LLM] = None,
17
17
  url: Optional[str] = None,
18
18
  token: Optional[str] = None,
19
19
  ontology: Optional[str] = None,
@@ -40,10 +40,10 @@ class ExecuteSemanticQueryNode:
40
40
  **kwargs,
41
41
  ):
42
42
  """
43
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
44
- :param url: Timbr server url
45
- :param token: Timbr password or token value
46
- :param ontology: The name of the ontology/knowledge graph
43
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
44
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
45
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
46
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
47
47
  :param schema: The name of the schema to query
48
48
  :param concept: The name of the concept to query
49
49
  :param concepts_list: Optional specific concept options to query
@@ -97,7 +97,7 @@ class ExecuteSemanticQueryNode:
97
97
  def run(self, state: StateGraph) -> dict:
98
98
  try:
99
99
  prompt = state.messages[-1].content if state.messages[-1] else None
100
- except:
100
+ except Exception:
101
101
  prompt = state.get('prompt', None)
102
102
 
103
103
  return self.chain.invoke({ "prompt": prompt })
@@ -1,6 +1,5 @@
1
- from typing import Optional, Union
1
+ from typing import Optional
2
2
  from langchain.llms.base import LLM
3
- from langgraph.graph import StateGraph
4
3
 
5
4
  from ..langchain import GenerateAnswerChain
6
5
 
@@ -14,9 +13,9 @@ class GenerateResponseNode:
14
13
  """
15
14
  def __init__(
16
15
  self,
17
- llm: LLM,
18
- url: str,
19
- token: str,
16
+ llm: Optional[LLM] = None,
17
+ url: Optional[str] = None,
18
+ token: Optional[str] = None,
20
19
  verify_ssl: Optional[bool] = True,
21
20
  is_jwt: Optional[bool] = False,
22
21
  jwt_tenant_id: Optional[str] = None,
@@ -25,9 +24,9 @@ class GenerateResponseNode:
25
24
  **kwargs,
26
25
  ):
27
26
  """
28
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
29
- :param url: Timbr server url
30
- :param token: Timbr password or token value
27
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
28
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
29
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
31
30
  :param verify_ssl: Whether to verify SSL certificates (default is True).
32
31
  :param is_jwt: Whether to use JWT authentication (default is False).
33
32
  :param jwt_tenant_id: JWT tenant ID for multi-tenant environments (required when is_jwt=True).
@@ -11,7 +11,7 @@ class GenerateTimbrSqlNode:
11
11
  """
12
12
  def __init__(
13
13
  self,
14
- llm: LLM,
14
+ llm: Optional[LLM] = None,
15
15
  url: Optional[str] = None,
16
16
  token: Optional[str] = None,
17
17
  ontology: Optional[str] = None,
@@ -36,10 +36,10 @@ class GenerateTimbrSqlNode:
36
36
  **kwargs,
37
37
  ):
38
38
  """
39
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
40
- :param url: Timbr server url
41
- :param token: Timbr password or token value
42
- :param ontology: The name of the ontology/knowledge graph
39
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
40
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
41
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
42
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
43
43
  :param schema: The name of the schema to query
44
44
  :param concept: The name of the concept to query
45
45
  :param concepts_list: Optional specific concept options to query
@@ -88,7 +88,7 @@ class GenerateTimbrSqlNode:
88
88
  def run(self, state: StateGraph) -> dict:
89
89
  try:
90
90
  prompt = state.messages[-1].content if (state.messages and state.messages[-1]) else None
91
- except:
91
+ except Exception:
92
92
  prompt = state.get('prompt', None)
93
93
 
94
94
  return self.chain.invoke({ "prompt": prompt })
@@ -8,7 +8,7 @@ from ..langchain.identify_concept_chain import IdentifyTimbrConceptChain
8
8
  class IdentifyConceptNode:
9
9
  def __init__(
10
10
  self,
11
- llm: LLM,
11
+ llm: Optional[LLM] = None,
12
12
  url: Optional[str] = None,
13
13
  token: Optional[str] = None,
14
14
  ontology: Optional[str] = None,
@@ -27,10 +27,10 @@ class IdentifyConceptNode:
27
27
  **kwargs,
28
28
  ):
29
29
  """
30
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
31
- :param url: Timbr server url
32
- :param token: Timbr password or token value
33
- :param ontology: The name of the ontology/knowledge graph
30
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
31
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
32
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
33
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
34
34
  :param concepts_list: Optional specific concept options to query
35
35
  :param views_list: Optional specific view options to query
36
36
  :param include_logic_concepts: Optional boolean to include logic concepts (concepts without unique properties which only inherits from an upper level concept with filter logic) in the query.
@@ -67,7 +67,7 @@ class IdentifyConceptNode:
67
67
  def run(self, state: StateGraph) -> dict:
68
68
  try:
69
69
  prompt = state.messages[-1].content if state.messages[-1] else None
70
- except:
70
+ except Exception:
71
71
  prompt = state.get('prompt', None)
72
72
 
73
73
  return self.chain.invoke({ "prompt": prompt })
@@ -13,10 +13,10 @@ class ValidateSemanticSqlNode:
13
13
  """
14
14
  def __init__(
15
15
  self,
16
- llm: LLM,
17
- url: str,
18
- token: str,
19
- ontology: str,
16
+ llm: Optional[LLM] = None,
17
+ url: Optional[str] = None,
18
+ token: Optional[str] = None,
19
+ ontology: Optional[str] = None,
20
20
  schema: Optional[str] = None,
21
21
  concept: Optional[str] = None,
22
22
  retries: Optional[int] = 3,
@@ -37,10 +37,10 @@ class ValidateSemanticSqlNode:
37
37
  **kwargs,
38
38
  ):
39
39
  """
40
- :param llm: An LLM instance or a function that takes a prompt string and returns the LLMs response
41
- :param url: Timbr server url
42
- :param token: Timbr password or token value
43
- :param ontology: The name of the ontology/knowledge graph
40
+ :param llm: An LLM instance or a function that takes a prompt string and returns the LLM's response (optional, will use LlmWrapper with env variables if not provided)
41
+ :param url: Timbr server url (optional, defaults to TIMBR_URL environment variable)
42
+ :param token: Timbr password or token value (optional, defaults to TIMBR_TOKEN environment variable)
43
+ :param ontology: The name of the ontology/knowledge graph (optional, defaults to ONTOLOGY/TIMBR_ONTOLOGY environment variable)
44
44
  :param schema: The name of the schema to query
45
45
  :param concept: The name of the concept to query
46
46
  :param retries: The maximum number of retries to attempt
@@ -88,7 +88,7 @@ class ValidateSemanticSqlNode:
88
88
  try:
89
89
  sql = state.sql
90
90
  prompt = state.prompt
91
- except:
91
+ except Exception:
92
92
  sql = state.get('sql', None)
93
93
  prompt = state.get('prompt', None)
94
94
 
@@ -1,10 +1,11 @@
1
1
  from enum import Enum
2
+ from typing import Optional
2
3
  from langchain.llms.base import LLM
3
4
  from pydantic import Field
4
5
 
5
6
  from .timbr_llm_wrapper import TimbrLlmWrapper
6
- from ..utils.general import is_llm_type, is_support_temperature
7
- from ..config import llm_temperature
7
+ from ..utils.general import is_llm_type, is_support_temperature, get_supported_models, parse_additional_params
8
+ from ..config import llm_temperature, llm_type as default_llm_type, llm_model as default_llm_model, llm_api_key as default_llm_api_key, llm_additional_params as default_llm_additional_params
8
9
 
9
10
  class LlmTypes(Enum):
10
11
  OpenAI = 'openai-chat'
@@ -21,29 +22,47 @@ class LlmWrapper(LLM):
21
22
  LlmWrapper is a unified interface for connecting to various Large Language Model (LLM) providers
22
23
  (OpenAI, Anthropic, Google, Azure OpenAI, Snowflake Cortex, Databricks, etc.) using LangChain. It abstracts
23
24
  the initialization and connection logic for each provider, allowing you to switch between them
24
- with a consistent API.
25
25
  """
26
- client: LLM = Field(default=None, exclude=True)
26
+ client: Optional[LLM] = Field(default=None, exclude=True)
27
27
 
28
28
  def __init__(
29
29
  self,
30
- llm_type: str,
31
- api_key: str,
32
- model: str = None,
30
+ llm_type: Optional[str] = None,
31
+ api_key: Optional[str] = None,
32
+ model: Optional[str] = None,
33
33
  **llm_params,
34
34
  ):
35
35
  """
36
- :param llm_type (str): The type of LLM provider (e.g., 'openai-chat', 'anthropic-chat').
37
- :param api_key (str): The API key for authenticating with the LLM provider.
38
- :param model (str): The model name or deployment to use. Defaults to provider-specific values (Optional).
36
+ :param llm_type (str, optional): The type of LLM provider (e.g., 'openai-chat', 'anthropic-chat').
37
+ If not provided, will try to get from LLM_TYPE environment variable.
38
+ :param api_key (str, optional): The API key for authenticating with the LLM provider.
39
+ If not provided, will try to get from LLM_API_KEY environment variable.
40
+ :param model (str, optional): The model name or deployment to use. If not provided, will try to get from LLM_MODEL environment variable.
39
41
  :param **llm_params: Additional parameters for the LLM (e.g., temperature, endpoint, etc.).
40
42
  """
41
43
  super().__init__()
44
+
45
+ selected_llm_type = llm_type or default_llm_type
46
+ selected_api_key = api_key or default_llm_api_key
47
+ selected_model = model or default_llm_model
48
+ selected_additional_params = llm_params.pop('additional_params', None)
49
+
50
+ # Parse additional parameters from init params or config and merge with provided params
51
+ default_additional_params = parse_additional_params(selected_additional_params or default_llm_additional_params or {})
52
+ additional_llm_params = {**default_additional_params, **llm_params}
53
+
54
+ # Validation: Ensure we have the required parameters
55
+ if not selected_llm_type:
56
+ raise ValueError("llm_type must be provided either as parameter or in config (LLM_TYPE environment variable)")
57
+
58
+ if not selected_api_key:
59
+ raise ValueError("api_key must be provided either as parameter or in config (LLM_API_KEY environment variable)")
60
+
42
61
  self.client = self._connect_to_llm(
43
- llm_type,
44
- api_key,
45
- model,
46
- **llm_params,
62
+ selected_llm_type,
63
+ selected_api_key,
64
+ selected_model,
65
+ **additional_llm_params,
47
66
  )
48
67
 
49
68
 
@@ -168,7 +187,7 @@ class LlmWrapper(LLM):
168
187
  # For Azure, get the deployments instead of models
169
188
  try:
170
189
  models = [model.id for model in client.models.list()]
171
- except:
190
+ except Exception:
172
191
  # If listing models fails, provide some common deployment names
173
192
  models = ["gpt-4o", "Other (Custom)"]
174
193
  elif is_llm_type(self._llm_type, LlmTypes.Snowflake):
@@ -185,8 +204,20 @@ class LlmWrapper(LLM):
185
204
 
186
205
  # elif self._is_llm_type(self._llm_type, LlmTypes.Timbr):
187
206
 
188
- except Exception as e:
189
- models = []
207
+ except Exception:
208
+ # If model list fetching throws an exception, return default value using get_supported_models
209
+ llm_type_name = None
210
+ if hasattr(self, '_llm_type'):
211
+ # Try to extract the LLM type name from the _llm_type
212
+ for llm_enum in LlmTypes:
213
+ if is_llm_type(self._llm_type, llm_enum):
214
+ llm_type_name = llm_enum.name
215
+ break
216
+
217
+ if llm_type_name:
218
+ models = get_supported_models(llm_type_name)
219
+ else:
220
+ models = []
190
221
 
191
222
  return sorted(models)
192
223
 
@@ -1,4 +1,5 @@
1
1
  import os
2
+ from typing import Optional
2
3
  import json
3
4
 
4
5
  ### A global helper functions to use across the project
@@ -30,6 +31,43 @@ def to_integer(value) -> int:
30
31
  raise ValueError(f"Failed to parse integer value: {e}")
31
32
 
32
33
 
34
+ def parse_additional_params(value) -> dict:
35
+ """
36
+ Parse additional parameters from string format 'a=1,b=2' or return dict as-is.
37
+
38
+ Args:
39
+ value: String in format 'key=value,key2=value2', JSON string, or dict
40
+
41
+ Returns:
42
+ Dictionary of parsed parameters
43
+ """
44
+ try:
45
+ if isinstance(value, dict):
46
+ return value
47
+ elif isinstance(value, str) and value.strip():
48
+ # Try to parse as JSON first
49
+ stripped_value = value.strip()
50
+ if stripped_value.startswith('{') and stripped_value.endswith('}'):
51
+ try:
52
+ return json.loads(stripped_value)
53
+ except json.JSONDecodeError:
54
+ pass
55
+
56
+ # Fall back to key=value parsing
57
+ params = {}
58
+ for pair in (value.split('&') if '&' in value else value.split(',')):
59
+ if '=' in pair:
60
+ key, val = pair.split('=', 1)
61
+ params[key.strip()] = val.strip()
62
+ elif ':' in pair:
63
+ key, val = pair.split(':', 1)
64
+ params[key.strip()] = val.strip()
65
+ return params
66
+ return {}
67
+ except Exception as e:
68
+ raise ValueError(f"Failed to parse additional parameters: {e}")
69
+
70
+
33
71
  def is_llm_type(llm_type, enum_value):
34
72
  """Check if llm_type equals the enum value or its name, case-insensitive."""
35
73
  if llm_type == enum_value:
@@ -51,10 +89,42 @@ def is_llm_type(llm_type, enum_value):
51
89
  return False
52
90
 
53
91
 
92
+ def validate_timbr_connection_params(url: Optional[str] = None, token: Optional[str] = None) -> None:
93
+ """
94
+ Validate that required Timbr connection parameters are provided.
95
+
96
+ Args:
97
+ url: Timbr server URL
98
+ token: Timbr authentication token
99
+
100
+ Raises:
101
+ ValueError: If URL or token are not provided with clear instructions
102
+ """
103
+ if not url:
104
+ raise ValueError("URL must be provided either through the 'url' parameter or by setting the 'TIMBR_URL' environment variable")
105
+ if not token:
106
+ raise ValueError("Token must be provided either through the 'token' parameter or by setting the 'TIMBR_TOKEN' environment variable")
107
+
108
+
54
109
  def is_support_temperature(llm_type: str, llm_model: str) -> bool:
55
110
  """
56
111
  Check if the LLM model supports temperature setting.
57
112
  """
113
+ supported_models = get_supported_models(llm_type)
114
+ return llm_model in supported_models
115
+
116
+
117
+ def get_supported_models(llm_type: str) -> list[str]:
118
+ """
119
+ Get the list of supported models for a given LLM type.
120
+
121
+ Args:
122
+ llm_type (str): The LLM type to get supported models for
123
+
124
+ Returns:
125
+ list[str]: List of supported model names for the given LLM type.
126
+ Returns empty list if llm_type is not found in the JSON file.
127
+ """
58
128
  current_dir = os.path.dirname(os.path.abspath(__file__))
59
129
  json_file_path = os.path.join(current_dir, 'temperature_supported_models.json')
60
130
 
@@ -62,10 +132,9 @@ def is_support_temperature(llm_type: str, llm_model: str) -> bool:
62
132
  with open(json_file_path, 'r') as f:
63
133
  temperature_supported_models = json.load(f)
64
134
 
65
- # Check if llm_type exists and llm_model is in its list
66
- if llm_type in temperature_supported_models:
67
- return llm_model in temperature_supported_models[llm_type]
135
+ # Return the list of models for the given llm_type, or empty list if not found
136
+ return temperature_supported_models.get(llm_type, [])
68
137
 
69
- return False
70
- except (FileNotFoundError, json.JSONDecodeError, KeyError):
71
- return False
138
+ except (FileNotFoundError, json.JSONDecodeError):
139
+ return []
140
+
@@ -1,32 +1,45 @@
1
1
  import requests
2
2
  from typing import Dict, Any, Optional, List, Union
3
- from langchain.schema import SystemMessage, HumanMessage
4
3
  from langchain.prompts.chat import ChatPromptTemplate
5
4
  from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
6
5
  import json
7
6
  import logging
8
-
9
- from ..config import url, token as default_token, is_jwt, jwt_tenant_id as default_jwt_tenant_id, llm_timeout
7
+ from ..config import url as default_url, token as default_token, is_jwt, jwt_tenant_id as default_jwt_tenant_id, llm_timeout
10
8
 
11
9
  logger = logging.getLogger(__name__)
12
10
 
13
11
  # Global template cache shared across all PromptService instances
14
- _global_template_cache = {}
12
+ _global_template_cache: dict[Any, Any] = {}
15
13
 
16
14
  class PromptService:
17
15
  def __init__(
18
16
  self,
19
- base_url: Optional[str] = url,
20
- token: Optional[str] = default_token,
21
- is_jwt: Optional[bool] = is_jwt,
22
- jwt_tenant_id: Optional[str] = default_jwt_tenant_id,
23
- timeout: Optional[int] = llm_timeout,
17
+ conn_params: Optional[Dict[str, Any]] = None,
18
+ **kwargs
24
19
  ):
25
- self.base_url = base_url.rstrip('/') if base_url else ''
26
- self.token = token
27
- self.is_jwt = is_jwt
28
- self.jwt_tenant_id = jwt_tenant_id
29
- self.timeout = timeout
20
+ """
21
+ Initialize PromptService with connection parameters.
22
+
23
+ Args:
24
+ conn_params: Dictionary containing connection parameters
25
+ **kwargs: Additional parameters for backward compatibility
26
+ """
27
+ # Extract relevant parameters from conn_params or use defaults
28
+ if conn_params:
29
+ url_value = conn_params.get('url') or default_url
30
+ self.base_url = url_value.rstrip('/') if url_value else ''
31
+ self.token = conn_params.get('token') or default_token
32
+ self.is_jwt = conn_params.get('is_jwt', is_jwt)
33
+ self.jwt_tenant_id = conn_params.get('jwt_tenant_id') or default_jwt_tenant_id
34
+ self.timeout = conn_params.get('timeout') or llm_timeout
35
+ else:
36
+ # Fallback to kwargs for backward compatibility
37
+ url_value = kwargs.get('url') or default_url
38
+ self.base_url = url_value.rstrip('/') if url_value else ''
39
+ self.token = str(kwargs.get('token') or default_token)
40
+ self.is_jwt = kwargs.get('is_jwt', is_jwt)
41
+ self.jwt_tenant_id = kwargs.get('jwt_tenant_id') or default_jwt_tenant_id
42
+ self.timeout = kwargs.get('timeout') or llm_timeout
30
43
 
31
44
 
32
45
  def _get_headers(self) -> Dict[str, str]:
@@ -220,99 +233,67 @@ class PromptTemplateWrapper:
220
233
 
221
234
  # Individual prompt template getter functions
222
235
  def get_determine_concept_prompt_template(
223
- token: Optional[str] = None,
224
- is_jwt: Optional[bool] = None,
225
- jwt_tenant_id: Optional[str] = None
236
+ conn_params: Optional[dict] = None
226
237
  ) -> PromptTemplateWrapper:
227
238
  """
228
239
  Get determine concept prompt template wrapper
229
240
 
230
241
  Args:
231
- token: Authentication token
232
- is_jwt: Whether the token is a JWT
233
- jwt_tenant_id: JWT tenant ID
234
-
242
+ conn_params: Connection parameters including url, token, is_jwt, and jwt_tenant_id
243
+
235
244
  Returns:
236
245
  PromptTemplateWrapper for determine concept
237
246
  """
238
- prompt_service = PromptService(
239
- token=token,
240
- is_jwt=is_jwt,
241
- jwt_tenant_id=jwt_tenant_id
242
- )
247
+ prompt_service = PromptService(conn_params=conn_params)
243
248
  return PromptTemplateWrapper(prompt_service, "get_identify_concept_template")
244
249
 
245
250
 
246
251
  def get_generate_sql_prompt_template(
247
- token: Optional[str] = None,
248
- is_jwt: Optional[bool] = None,
249
- jwt_tenant_id: Optional[str] = None
252
+ conn_params: Optional[dict] = None
250
253
  ) -> PromptTemplateWrapper:
251
254
  """
252
255
  Get generate SQL prompt template wrapper
253
256
 
254
257
  Args:
255
- token: Authentication token
256
- is_jwt: Whether the token is a JWT
257
- jwt_tenant_id: JWT tenant ID
258
+ conn_params: Connection parameters including url, token, is_jwt, and jwt_tenant_id
258
259
 
259
260
  Returns:
260
261
  PromptTemplateWrapper for generate SQL
261
262
  """
262
- prompt_service = PromptService(
263
- token=token,
264
- is_jwt=is_jwt,
265
- jwt_tenant_id=jwt_tenant_id
266
- )
263
+ prompt_service = PromptService(conn_params=conn_params)
267
264
  return PromptTemplateWrapper(prompt_service, "get_generate_sql_template")
268
265
 
269
266
 
270
267
  def get_qa_prompt_template(
271
- token: Optional[str] = None,
272
- is_jwt: Optional[bool] = None,
273
- jwt_tenant_id: Optional[str] = None
268
+ conn_params: Optional[dict] = None
274
269
  ) -> PromptTemplateWrapper:
275
270
  """
276
271
  Get QA prompt template wrapper
277
272
 
278
273
  Args:
279
- token: Authentication token
280
- is_jwt: Whether the token is a JWT
281
- jwt_tenant_id: JWT tenant ID
282
-
274
+ conn_params: Connection parameters including url, token, is_jwt, and jwt_tenant_id
275
+
283
276
  Returns:
284
277
  PromptTemplateWrapper for QA
285
278
  """
286
- prompt_service = PromptService(
287
- token=token,
288
- is_jwt=is_jwt,
289
- jwt_tenant_id=jwt_tenant_id
290
- )
279
+ prompt_service = PromptService(conn_params=conn_params)
291
280
  return PromptTemplateWrapper(prompt_service, "get_generate_answer_template")
292
281
 
293
282
 
294
283
  # Global prompt service instance (updated signature)
295
284
  def get_prompt_service(
296
- token: str = None,
297
- is_jwt: bool = None,
298
- jwt_tenant_id: str = None
285
+ conn_params: Optional[dict] = None
299
286
  ) -> PromptService:
300
287
  """
301
288
  Get or create a prompt service instance
302
289
 
303
290
  Args:
304
- token: Authentication token (API key or JWT token)
305
- is_jwt: Whether the token is a JWT
306
- jwt_tenant_id: JWT tenant ID
307
-
291
+ conn_params: Connection parameters including url, token, is_jwt, and jwt_tenant_id
292
+
308
293
  Returns:
309
294
  PromptService instance
310
295
  """
311
- return PromptService(
312
- token=token,
313
- is_jwt=is_jwt,
314
- jwt_tenant_id=jwt_tenant_id
315
- )
296
+ return PromptService(conn_params=conn_params)
316
297
 
317
298
 
318
299
  # Global cache management functions
@@ -212,7 +212,7 @@ def determine_concept(
212
212
  note: Optional[str] = '',
213
213
  debug: Optional[bool] = False,
214
214
  timeout: Optional[int] = None,
215
- ) -> dict[str, any]:
215
+ ) -> dict[str, Any]:
216
216
  usage_metadata = {}
217
217
  determined_concept_name = None
218
218
  schema = 'dtimbr'
@@ -221,7 +221,7 @@ def determine_concept(
221
221
  if timeout is None:
222
222
  timeout = llm_timeout
223
223
 
224
- determine_concept_prompt = get_determine_concept_prompt_template(conn_params["token"], conn_params["is_jwt"], conn_params["jwt_tenant_id"])
224
+ determine_concept_prompt = get_determine_concept_prompt_template(conn_params)
225
225
  tags = get_tags(conn_params=conn_params, include_tags=include_tags)
226
226
  concepts = get_concepts(
227
227
  conn_params=conn_params,
@@ -418,7 +418,7 @@ def generate_sql(
418
418
  if timeout is None:
419
419
  timeout = llm_timeout
420
420
 
421
- generate_sql_prompt = get_generate_sql_prompt_template(conn_params["token"], conn_params["is_jwt"], conn_params["jwt_tenant_id"])
421
+ generate_sql_prompt = get_generate_sql_prompt_template(conn_params)
422
422
 
423
423
  if concept and concept != "" and (schema is None or schema != "vtimbr"):
424
424
  concepts_list = [concept]
@@ -550,7 +550,7 @@ def answer_question(
550
550
  if timeout is None:
551
551
  timeout = llm_timeout
552
552
 
553
- qa_prompt = get_qa_prompt_template(conn_params["token"], conn_params["is_jwt"], conn_params["jwt_tenant_id"])
553
+ qa_prompt = get_qa_prompt_template(conn_params)
554
554
 
555
555
  prompt = qa_prompt.format_messages(
556
556
  question=question,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-timbr
3
- Version: 1.5.3
3
+ Version: 1.5.5
4
4
  Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
5
5
  Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
6
6
  Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
@@ -0,0 +1,28 @@
1
+ langchain_timbr/__init__.py,sha256=gxd6Y6QDmYZtPlYVdXtPIy501hMOZXHjWh2qq4qzt_s,828
2
+ langchain_timbr/_version.py,sha256=-6ligW2dI8j2jxE-2b0-iN0QH0LgHVSboUTjBV5nIrc,704
3
+ langchain_timbr/config.py,sha256=DT2__mjaFCh8Eynm4czQM8vKRTxYVcNX3UJNU3epJwo,934
4
+ langchain_timbr/timbr_llm_connector.py,sha256=OXRttlEOJf-dTyilnXR6b6Cgl_cWDYrXGXQfmDV6vc8,13206
5
+ langchain_timbr/langchain/__init__.py,sha256=ejcsZKP9PK0j4WrrCCcvBXpDpP-TeRiVb21OIUJqix8,580
6
+ langchain_timbr/langchain/execute_timbr_query_chain.py,sha256=g7VzCuywo7xlWIldjtGOV4dYsQB3KsO8VARCfKrZFHM,15461
7
+ langchain_timbr/langchain/generate_answer_chain.py,sha256=euy8GcMOziMKpiJrmpiH-De-qKTaVV48TqTduDa5-PE,4711
8
+ langchain_timbr/langchain/generate_timbr_sql_chain.py,sha256=6NPvidW8pwmaX3w7p7yNFwQTZ8Cnmb40ZznRQJ2dTFM,9023
9
+ langchain_timbr/langchain/identify_concept_chain.py,sha256=kuzg0jJQpFGIiaxtNhdQ5K4HXveLVwONFNsoipPCteE,7169
10
+ langchain_timbr/langchain/timbr_sql_agent.py,sha256=7S7USe3wnwxdBduBycHYVbXnywnvog-Bg67Tk8-2X_s,19551
11
+ langchain_timbr/langchain/validate_timbr_sql_chain.py,sha256=sxT72asK-Cms4ZTiThWv58PgEBY1p_9hnUQwU35kPIQ,9492
12
+ langchain_timbr/langgraph/__init__.py,sha256=mKBFd0x01jWpRujUWe-suX3FFhenPoDxrvzs8I0mum0,457
13
+ langchain_timbr/langgraph/execute_timbr_query_node.py,sha256=rPx_V3OOh-JTGOwrEopHmOmFuM-ngBZdswkW9oZ43hU,5536
14
+ langchain_timbr/langgraph/generate_response_node.py,sha256=BLmsDZfbhncRpO7PEfDpy7CnPE7a55j8QV5jfg30heQ,2247
15
+ langchain_timbr/langgraph/generate_timbr_sql_node.py,sha256=wkau-NajblSVzNIro9IyqawULvz3XaCYSEdYW95vWco,4911
16
+ langchain_timbr/langgraph/identify_concept_node.py,sha256=aiLDFEcz_vM4zZ_ULe1SvJKmI-e4Fb2SibZQaEPz_eY,3649
17
+ langchain_timbr/langgraph/validate_timbr_query_node.py,sha256=-2fuieCz1hv6ua-17zfonme8LQ_OoPnoOBTdGSXkJgs,4793
18
+ langchain_timbr/llm_wrapper/llm_wrapper.py,sha256=XK4tcm6GGX4fZfciVN1BgwAFk0YI5FdkmtMwFuUL-Po,9701
19
+ langchain_timbr/llm_wrapper/timbr_llm_wrapper.py,sha256=sDqDOz0qu8b4WWlagjNceswMVyvEJ8yBWZq2etBh-T0,1362
20
+ langchain_timbr/utils/general.py,sha256=UDKumpar8gPbfhBBf7-7xzuTDj5NCk4JJhctfePhOjU,4960
21
+ langchain_timbr/utils/prompt_service.py,sha256=QT7kiq72rQno77z1-tvGGD7HlH_wdTQAl_1teSoKEv4,11373
22
+ langchain_timbr/utils/temperature_supported_models.json,sha256=d3UmBUpG38zDjjB42IoGpHTUaf0pHMBRSPY99ao1a3g,1832
23
+ langchain_timbr/utils/timbr_llm_utils.py,sha256=7-nnTa1T9XOcgIb-aJP3Pgon_gOrCMnDPiIPiAT3UCg,23016
24
+ langchain_timbr/utils/timbr_utils.py,sha256=p21DwTGhF4iKTLDQBkeBaJDFcXt-Hpu1ij8xzQt00Ng,16958
25
+ langchain_timbr-1.5.5.dist-info/METADATA,sha256=NcjtNkm167oxPOvd9CjGqzPm5hoONnPnYHPORQpo6rA,6129
26
+ langchain_timbr-1.5.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
+ langchain_timbr-1.5.5.dist-info/licenses/LICENSE,sha256=0ITGFk2alkC7-e--bRGtuzDrv62USIiVyV2Crf3_L_0,1065
28
+ langchain_timbr-1.5.5.dist-info/RECORD,,
@@ -1,28 +0,0 @@
1
- langchain_timbr/__init__.py,sha256=gxd6Y6QDmYZtPlYVdXtPIy501hMOZXHjWh2qq4qzt_s,828
2
- langchain_timbr/_version.py,sha256=YmPxci9z5OdhwvwXu2qOrZdFU4K4N2lTtMkI0KAJCh0,704
3
- langchain_timbr/config.py,sha256=NOMjSpo0TVkWT8BdbiGSADU08iknF2bRltFLwQRhpwk,832
4
- langchain_timbr/timbr_llm_connector.py,sha256=OXRttlEOJf-dTyilnXR6b6Cgl_cWDYrXGXQfmDV6vc8,13206
5
- langchain_timbr/langchain/__init__.py,sha256=ejcsZKP9PK0j4WrrCCcvBXpDpP-TeRiVb21OIUJqix8,580
6
- langchain_timbr/langchain/execute_timbr_query_chain.py,sha256=we1qjMS2JdBPRkGMJTzyHkpY3yW62wlh3PF3bTkEe8U,13883
7
- langchain_timbr/langchain/generate_answer_chain.py,sha256=3q_Fe2tclZsdH0PFkNdZtO4Xe2WQQoxg4sekFic5zt4,3260
8
- langchain_timbr/langchain/generate_timbr_sql_chain.py,sha256=-RZcdJ1lsivOE6zAm_hyg9txaINMGHWnGmkZepCg2Dk,7458
9
- langchain_timbr/langchain/identify_concept_chain.py,sha256=m3Lzb0PVeSeE7YpfhjB1OY0x9jcR6a_lTYg5YTTDhIw,5588
10
- langchain_timbr/langchain/timbr_sql_agent.py,sha256=F-HVqziHS7bVxWTkmvrkseRP5uZTuHQ2JoZNorQR4J8,18025
11
- langchain_timbr/langchain/validate_timbr_sql_chain.py,sha256=SPW7zimqunZZLRD5d-trglL9RkqrWOy2vjkBc19CWhE,7919
12
- langchain_timbr/langgraph/__init__.py,sha256=mKBFd0x01jWpRujUWe-suX3FFhenPoDxrvzs8I0mum0,457
13
- langchain_timbr/langgraph/execute_timbr_query_node.py,sha256=ZL-HsBer073VmkJv59qFCNYJyKOgB8-Ziij4EEBD39c,5263
14
- langchain_timbr/langgraph/generate_response_node.py,sha256=gChNFSPjK9lKwblgWTia6ETxhY5aIbgGsEykyIlGd90,2065
15
- langchain_timbr/langgraph/generate_timbr_sql_node.py,sha256=qyL7uqB5k-Bv8rE12f2Ub7wlcAw-pQibEPP1SvFKLu0,4638
16
- langchain_timbr/langgraph/identify_concept_node.py,sha256=ot9TFdRg8FA9JYVrtHLVi5k0vmUHUfL4ptQDFYYqOoA,3376
17
- langchain_timbr/langgraph/validate_timbr_query_node.py,sha256=TypUs60OaBhOx9Ceq-15qNVuuAvfrFBjQsPRjWK1StQ,4469
18
- langchain_timbr/llm_wrapper/llm_wrapper.py,sha256=qd6MHbmJsU02no7bAReec3Ps4oYc_m0WPrDFqRY2Dlc,7735
19
- langchain_timbr/llm_wrapper/timbr_llm_wrapper.py,sha256=sDqDOz0qu8b4WWlagjNceswMVyvEJ8yBWZq2etBh-T0,1362
20
- langchain_timbr/utils/general.py,sha256=Psb9F9ylI0z-1Ddw0Hi74nKl03_aLanIV9YC-MJUdsw,2522
21
- langchain_timbr/utils/prompt_service.py,sha256=f-L2w-wRCF9GEMLC7GZy_gZ0pcnEQj0em-lDJvg2onY,11164
22
- langchain_timbr/utils/temperature_supported_models.json,sha256=d3UmBUpG38zDjjB42IoGpHTUaf0pHMBRSPY99ao1a3g,1832
23
- langchain_timbr/utils/timbr_llm_utils.py,sha256=Mn9Q9Wj97WuOZO3xKxZLjTQx31H22EACAPppz3B0JlE,23202
24
- langchain_timbr/utils/timbr_utils.py,sha256=p21DwTGhF4iKTLDQBkeBaJDFcXt-Hpu1ij8xzQt00Ng,16958
25
- langchain_timbr-1.5.3.dist-info/METADATA,sha256=jt8vCwK1a1hwjgTPxG2DzhNF0KO0CkqY-3kPTftejTc,6129
26
- langchain_timbr-1.5.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
- langchain_timbr-1.5.3.dist-info/licenses/LICENSE,sha256=0ITGFk2alkC7-e--bRGtuzDrv62USIiVyV2Crf3_L_0,1065
28
- langchain_timbr-1.5.3.dist-info/RECORD,,