euriai 1.0.18__py3-none-any.whl → 1.0.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
euriai/__init__.py CHANGED
@@ -4,7 +4,7 @@ Euri AI Python SDK
4
4
  A comprehensive Python SDK for the Euri AI API with integrations for popular frameworks.
5
5
  """
6
6
 
7
- __version__ = "1.0.18"
7
+ __version__ = "1.0.19"
8
8
 
9
9
  # Core imports that should always work
10
10
  try:
euriai/langgraph.py CHANGED
@@ -112,7 +112,9 @@ class EuriaiAINode:
112
112
  max_tokens: int = 1000,
113
113
  system_message: Optional[str] = None,
114
114
  output_parser: Optional[Callable[[str], Any]] = None,
115
- error_handler: Optional[Callable[[Exception], Any]] = None
115
+ error_handler: Optional[Callable[[Exception], Any]] = None,
116
+ max_retries: int = 0,
117
+ retry_delay: float = 1.0
116
118
  ):
117
119
  """
118
120
  Initialize an AI node.
@@ -127,6 +129,8 @@ class EuriaiAINode:
127
129
  system_message: Optional system message
128
130
  output_parser: Function to parse AI output
129
131
  error_handler: Function to handle errors
132
+ max_retries: Maximum number of retries on error (default: 0)
133
+ retry_delay: Delay between retries in seconds (default: 1.0)
130
134
  """
131
135
  self.name = name
132
136
  self.prompt_template = prompt_template
@@ -136,6 +140,8 @@ class EuriaiAINode:
136
140
  self.system_message = system_message
137
141
  self.output_parser = output_parser
138
142
  self.error_handler = error_handler
143
+ self.max_retries = max_retries
144
+ self.retry_delay = retry_delay
139
145
 
140
146
  # Initialize client
141
147
  self.client = EuriaiClient(api_key=api_key, model=model)
@@ -149,58 +155,79 @@ class EuriaiAINode:
149
155
  }
150
156
 
151
157
  def __call__(self, state: Dict[str, Any]) -> Dict[str, Any]:
152
- """Execute the AI node."""
158
+ """Execute the AI node with retry logic."""
153
159
  start_time = time.time()
160
+ last_exception = None
154
161
 
155
- try:
156
- # Format prompt with state variables
157
- formatted_prompt = self.prompt_template.format(**state)
158
-
159
- # Prepare user prompt (combine system message if provided)
160
- user_prompt = formatted_prompt
161
- if self.system_message:
162
- # If there's a system message, combine it with the user prompt
163
- user_prompt = f"System: {self.system_message}\n\nUser: {formatted_prompt}"
164
-
165
- # Make API call
166
- response = self.client.generate_completion(
167
- prompt=user_prompt,
168
- temperature=self.temperature,
169
- max_tokens=self.max_tokens
170
- )
171
-
172
- # Extract content
173
- content = response.get("choices", [{}])[0].get("message", {}).get("content", "")
174
-
175
- # Parse output if parser provided
176
- if self.output_parser:
177
- parsed_output = self.output_parser(content)
178
- else:
179
- parsed_output = content
180
-
181
- # Update usage stats
182
- self.usage_stats["total_calls"] += 1
183
- response_time = time.time() - start_time
184
- self.usage_stats["avg_response_time"] = (
185
- (self.usage_stats["avg_response_time"] * (self.usage_stats["total_calls"] - 1) + response_time)
186
- / self.usage_stats["total_calls"]
187
- )
188
-
189
- # Update state
190
- state[f"{self.name}_output"] = parsed_output
191
- state[f"{self.name}_raw_response"] = content
192
-
193
- return state
194
-
195
- except Exception as e:
196
- self.usage_stats["errors"] += 1
197
-
198
- if self.error_handler:
199
- return self.error_handler(e)
200
- else:
201
- logging.error(f"Error in AI node {self.name}: {e}")
202
- state[f"{self.name}_error"] = str(e)
162
+ for attempt in range(self.max_retries + 1): # +1 for initial attempt
163
+ try:
164
+ # Format prompt with state variables
165
+ formatted_prompt = self.prompt_template.format(**state)
166
+
167
+ # Prepare user prompt (combine system message if provided)
168
+ user_prompt = formatted_prompt
169
+ if self.system_message:
170
+ # If there's a system message, combine it with the user prompt
171
+ user_prompt = f"System: {self.system_message}\n\nUser: {formatted_prompt}"
172
+
173
+ # Make API call
174
+ response = self.client.generate_completion(
175
+ prompt=user_prompt,
176
+ temperature=self.temperature,
177
+ max_tokens=self.max_tokens
178
+ )
179
+
180
+ # Extract content
181
+ content = response.get("choices", [{}])[0].get("message", {}).get("content", "")
182
+
183
+ # Parse output if parser provided
184
+ if self.output_parser:
185
+ parsed_output = self.output_parser(content)
186
+ else:
187
+ parsed_output = content
188
+
189
+ # Update usage stats
190
+ self.usage_stats["total_calls"] += 1
191
+ response_time = time.time() - start_time
192
+ self.usage_stats["avg_response_time"] = (
193
+ (self.usage_stats["avg_response_time"] * (self.usage_stats["total_calls"] - 1) + response_time)
194
+ / self.usage_stats["total_calls"]
195
+ )
196
+
197
+ # Update state
198
+ state[f"{self.name}_output"] = parsed_output
199
+ state[f"{self.name}_raw_response"] = content
200
+
203
201
  return state
202
+
203
+ except Exception as e:
204
+ last_exception = e
205
+ self.usage_stats["errors"] += 1
206
+
207
+ # If we haven't exhausted retries, wait and try again
208
+ if attempt < self.max_retries:
209
+ logging.warning(f"AI node {self.name} failed (attempt {attempt + 1}/{self.max_retries + 1}): {e}")
210
+ time.sleep(self.retry_delay)
211
+ continue
212
+
213
+ # All retries exhausted, handle error
214
+ if self.error_handler:
215
+ error_result = self.error_handler(e)
216
+ if isinstance(error_result, dict):
217
+ # Merge error handler result with state
218
+ state.update(error_result)
219
+ return state
220
+ else:
221
+ state[f"{self.name}_error_handler_result"] = error_result
222
+ return state
223
+ else:
224
+ logging.error(f"Error in AI node {self.name} after {self.max_retries + 1} attempts: {e}")
225
+ state[f"{self.name}_error"] = str(e)
226
+ state["error_handled"] = False
227
+ return state
228
+
229
+ # Should never reach here, but just in case
230
+ return state
204
231
 
205
232
  async def acall(self, state: Dict[str, Any]) -> Dict[str, Any]:
206
233
  """Async version of the AI node execution."""
@@ -318,7 +345,9 @@ class EuriaiLangGraph:
318
345
  max_tokens: Optional[int] = None,
319
346
  system_message: Optional[str] = None,
320
347
  output_parser: Optional[Callable[[str], Any]] = None,
321
- error_handler: Optional[Callable[[Exception], Any]] = None
348
+ error_handler: Optional[Callable[[Exception], Any]] = None,
349
+ max_retries: int = 0,
350
+ retry_delay: float = 1.0
322
351
  ) -> None:
323
352
  """
324
353
  Add an AI-powered node to the graph.
@@ -332,6 +361,8 @@ class EuriaiLangGraph:
332
361
  system_message: System message for the node
333
362
  output_parser: Function to parse AI output
334
363
  error_handler: Function to handle errors
364
+ max_retries: Maximum number of retries on error (default: 0)
365
+ retry_delay: Delay between retries in seconds (default: 1.0)
335
366
  """
336
367
  ai_node = EuriaiAINode(
337
368
  name=name,
@@ -342,7 +373,9 @@ class EuriaiLangGraph:
342
373
  max_tokens=max_tokens or self.default_max_tokens,
343
374
  system_message=system_message,
344
375
  output_parser=output_parser,
345
- error_handler=error_handler
376
+ error_handler=error_handler,
377
+ max_retries=max_retries,
378
+ retry_delay=retry_delay
346
379
  )
347
380
 
348
381
  self.ai_nodes[name] = ai_node
@@ -511,6 +544,40 @@ class EuriaiLangGraph:
511
544
  if self.verbose:
512
545
  print(f"Set finish point: {node_name}")
513
546
 
547
+ def add_conditional_edge(
548
+ self,
549
+ from_node: str,
550
+ condition_func: Callable[[Dict[str, Any]], str],
551
+ condition_map: Optional[Dict[str, str]] = None
552
+ ) -> None:
553
+ """
554
+ Add a conditional edge that routes based on state.
555
+
556
+ Args:
557
+ from_node: Source node name
558
+ condition_func: Function that evaluates state and returns next node name
559
+ condition_map: Optional mapping of condition results to node names
560
+ """
561
+ if condition_map:
562
+ # Use condition map for routing
563
+ def router(state: Dict[str, Any]) -> str:
564
+ result = condition_func(state)
565
+ return condition_map.get(result, END)
566
+
567
+ self.graph.add_conditional_edges(from_node, router, condition_map)
568
+ else:
569
+ # Direct function routing
570
+ self.graph.add_conditional_edges(from_node, condition_func)
571
+
572
+ self.conditional_edges.append({
573
+ "source": from_node,
574
+ "condition": condition_func,
575
+ "condition_map": condition_map
576
+ })
577
+
578
+ if self.verbose:
579
+ print(f"Added conditional edge from: {from_node}")
580
+
514
581
  def compile_graph(self) -> CompiledStateGraph:
515
582
  """
516
583
  Compile the graph for execution.
@@ -1036,6 +1103,37 @@ class EuriaiLangGraph:
1036
1103
 
1037
1104
  return stats
1038
1105
 
1106
+ def get_error_stats(self) -> Dict[str, Any]:
1107
+ """Get error statistics for the workflow."""
1108
+ error_stats = {
1109
+ "total_errors": 0,
1110
+ "ai_node_errors": {},
1111
+ "error_rate": 0.0
1112
+ }
1113
+
1114
+ total_calls = 0
1115
+ total_errors = 0
1116
+
1117
+ # Collect error stats from AI nodes
1118
+ for name, node in self.ai_nodes.items():
1119
+ node_errors = node.usage_stats.get("errors", 0)
1120
+ node_calls = node.usage_stats.get("total_calls", 0)
1121
+
1122
+ error_stats["ai_node_errors"][name] = {
1123
+ "errors": node_errors,
1124
+ "total_calls": node_calls,
1125
+ "error_rate": node_errors / max(node_calls, 1)
1126
+ }
1127
+
1128
+ total_errors += node_errors
1129
+ total_calls += node_calls
1130
+
1131
+ error_stats["total_errors"] = total_errors
1132
+ error_stats["total_calls"] = total_calls
1133
+ error_stats["error_rate"] = total_errors / max(total_calls, 1)
1134
+
1135
+ return error_stats
1136
+
1039
1137
  def get_graph_structure(self) -> Dict[str, Any]:
1040
1138
  """Get the structure of the graph."""
1041
1139
  return {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 1.0.18
3
+ Version: 1.0.19
4
4
  Summary: Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: Euri
6
6
  Author-email: tech@euron.one
@@ -1,4 +1,4 @@
1
- euriai/__init__.py,sha256=7HAeI4LEAaq9UPyxXxt5iV3HBOIn4lGBnJ3VFCGfbBU,6427
1
+ euriai/__init__.py,sha256=claTvNZ_iqGvMHoPLa7Lqj2O4Ac-SYAZuGBeFwVpep8,6427
2
2
  euriai/autogen.py,sha256=z1WHftUgu3_Sn8zDXmf31onikS0p8TwH5JE4llL7ogk,21144
3
3
  euriai/cli.py,sha256=hF1wiiL2QQSfWf8WlLQyNVDBd4YkbiwmMSoPxVbyPTM,3290
4
4
  euriai/client.py,sha256=L-o6hv9N3md-l-hz-kz5nYVaaZqnrREZlo_0jguhF7E,4066
@@ -8,12 +8,12 @@ euriai/embedding.py,sha256=uP66Ph1k9Ou6J5RAkztJxlfyj0S0MESOvZ4ulhnVo-o,1270
8
8
  euriai/euri_chat.py,sha256=DEAiet1ReRwB4ljkPYaTl1Nb5uc20-JF-3PQjGQZXk4,3567
9
9
  euriai/euri_embed.py,sha256=g7zs1G-ZBDJjOGJtkkfIcV4LPtRcm9wpVWmrfMGn5EM,2919
10
10
  euriai/langchain.py,sha256=gVF9eh21RC1WtDn7SQoEREUDqOObm5IRx6BFZtB5xcc,34968
11
- euriai/langgraph.py,sha256=XfOLj5J5KXIPG_BnXV2MzjbloLNOl0wIf0MbnovoznY,40503
11
+ euriai/langgraph.py,sha256=_D89ugUcVQevjZ1GK-IkJQdbi7JFxJGtqFJ0CyO89bk,44623
12
12
  euriai/llamaindex.py,sha256=c-ujod2bjL6QIvfAyuIxm1SvSCS00URFElYybKQ5Ew0,26551
13
13
  euriai/n8n.py,sha256=hjkckqyW_hZNL78UkBCof1WvKCKCIjwdvZdAgx6NrB8,3764
14
14
  euriai/smolagents.py,sha256=xlixGx2IWzAPTpSJGsYIK2L-SHGY9Mw1-8GbwVsEYtU,28507
15
- euriai-1.0.18.dist-info/METADATA,sha256=iDxnmLXiPmv3BaMVQlGCThABooMqKAQd13e_xUv4SJo,6807
16
- euriai-1.0.18.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
17
- euriai-1.0.18.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
18
- euriai-1.0.18.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
19
- euriai-1.0.18.dist-info/RECORD,,
15
+ euriai-1.0.19.dist-info/METADATA,sha256=PySHaQOnXL358TJQsK9NN8gLDe5Vee43T7RmoxN7CWQ,6807
16
+ euriai-1.0.19.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
17
+ euriai-1.0.19.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
18
+ euriai-1.0.19.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
19
+ euriai-1.0.19.dist-info/RECORD,,