euriai 1.0.16__tar.gz → 1.0.17__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-1.0.16 → euriai-1.0.17}/PKG-INFO +1 -1
- {euriai-1.0.16 → euriai-1.0.17}/euriai/__init__.py +1 -1
- {euriai-1.0.16 → euriai-1.0.17}/euriai/langgraph.py +80 -1
- {euriai-1.0.16 → euriai-1.0.17}/euriai.egg-info/PKG-INFO +1 -1
- {euriai-1.0.16 → euriai-1.0.17}/setup.py +1 -1
- {euriai-1.0.16 → euriai-1.0.17}/README.md +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/autogen.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/cli.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/client.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/crewai.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/direct.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/embedding.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/euri_chat.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/euri_embed.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/langchain.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/llamaindex.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/n8n.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai/smolagents.py +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai.egg-info/SOURCES.txt +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai.egg-info/requires.txt +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/euriai.egg-info/top_level.txt +0 -0
- {euriai-1.0.16 → euriai-1.0.17}/setup.cfg +0 -0
@@ -248,6 +248,7 @@ class EuriaiLangGraph:
|
|
248
248
|
default_temperature: float = 0.7,
|
249
249
|
default_max_tokens: int = 1000,
|
250
250
|
enable_checkpointing: bool = True,
|
251
|
+
enable_usage_tracking: bool = True,
|
251
252
|
verbose: bool = True
|
252
253
|
):
|
253
254
|
"""
|
@@ -260,6 +261,7 @@ class EuriaiLangGraph:
|
|
260
261
|
default_temperature: Default temperature
|
261
262
|
default_max_tokens: Default max tokens
|
262
263
|
enable_checkpointing: Enable workflow checkpointing
|
264
|
+
enable_usage_tracking: Enable usage statistics tracking
|
263
265
|
verbose: Enable verbose logging
|
264
266
|
"""
|
265
267
|
if not LANGGRAPH_AVAILABLE:
|
@@ -273,6 +275,7 @@ class EuriaiLangGraph:
|
|
273
275
|
self.default_model = default_model
|
274
276
|
self.default_temperature = default_temperature
|
275
277
|
self.default_max_tokens = default_max_tokens
|
278
|
+
self.enable_usage_tracking = enable_usage_tracking
|
276
279
|
self.verbose = verbose
|
277
280
|
|
278
281
|
# Initialize graph
|
@@ -297,8 +300,10 @@ class EuriaiLangGraph:
|
|
297
300
|
"total_runs": 0,
|
298
301
|
"total_nodes_executed": 0,
|
299
302
|
"avg_execution_time": 0.0,
|
303
|
+
"total_api_calls": 0,
|
300
304
|
"errors": 0,
|
301
|
-
"successful_runs": 0
|
305
|
+
"successful_runs": 0,
|
306
|
+
"ai_nodes": {} # Node-specific statistics
|
302
307
|
}
|
303
308
|
|
304
309
|
# Thread pool for async operations
|
@@ -344,6 +349,15 @@ class EuriaiLangGraph:
|
|
344
349
|
self.nodes[name] = ai_node
|
345
350
|
self.graph.add_node(name, ai_node)
|
346
351
|
|
352
|
+
# Initialize node-specific statistics
|
353
|
+
if self.enable_usage_tracking:
|
354
|
+
self.usage_stats["ai_nodes"][name] = {
|
355
|
+
"total_calls": 0,
|
356
|
+
"total_tokens": 0,
|
357
|
+
"errors": 0,
|
358
|
+
"avg_response_time": 0.0
|
359
|
+
}
|
360
|
+
|
347
361
|
if self.verbose:
|
348
362
|
print(f"Added AI node: {name} (model: {ai_node.model})")
|
349
363
|
|
@@ -704,6 +718,71 @@ class EuriaiLangGraph:
|
|
704
718
|
|
705
719
|
return results
|
706
720
|
|
721
|
+
def get_usage_stats(self) -> Dict[str, Any]:
|
722
|
+
"""
|
723
|
+
Get comprehensive usage statistics for the workflow.
|
724
|
+
|
725
|
+
Returns:
|
726
|
+
Dictionary containing usage statistics including:
|
727
|
+
- total_runs: Total number of workflow runs
|
728
|
+
- successful_runs: Number of successful runs
|
729
|
+
- errors: Number of errors
|
730
|
+
- avg_execution_time: Average execution time per run
|
731
|
+
- total_api_calls: Total API calls made
|
732
|
+
- ai_nodes: Node-specific statistics
|
733
|
+
"""
|
734
|
+
if not self.enable_usage_tracking:
|
735
|
+
return {"message": "Usage tracking is disabled"}
|
736
|
+
|
737
|
+
# Update AI node statistics from individual nodes
|
738
|
+
for node_name, ai_node in self.ai_nodes.items():
|
739
|
+
if node_name in self.usage_stats["ai_nodes"]:
|
740
|
+
node_stats = ai_node.usage_stats
|
741
|
+
self.usage_stats["ai_nodes"][node_name].update({
|
742
|
+
"total_calls": node_stats["total_calls"],
|
743
|
+
"total_tokens": node_stats["total_tokens"],
|
744
|
+
"errors": node_stats["errors"],
|
745
|
+
"avg_response_time": node_stats["avg_response_time"]
|
746
|
+
})
|
747
|
+
|
748
|
+
# Add to total API calls
|
749
|
+
self.usage_stats["total_api_calls"] += node_stats["total_calls"]
|
750
|
+
|
751
|
+
return self.usage_stats.copy()
|
752
|
+
|
753
|
+
def reset_usage_stats(self) -> None:
|
754
|
+
"""Reset all usage statistics."""
|
755
|
+
if not self.enable_usage_tracking:
|
756
|
+
return
|
757
|
+
|
758
|
+
self.usage_stats = {
|
759
|
+
"total_runs": 0,
|
760
|
+
"total_nodes_executed": 0,
|
761
|
+
"avg_execution_time": 0.0,
|
762
|
+
"total_api_calls": 0,
|
763
|
+
"errors": 0,
|
764
|
+
"successful_runs": 0,
|
765
|
+
"ai_nodes": {}
|
766
|
+
}
|
767
|
+
|
768
|
+
# Reset individual AI node stats
|
769
|
+
for ai_node in self.ai_nodes.values():
|
770
|
+
ai_node.usage_stats = {
|
771
|
+
"total_calls": 0,
|
772
|
+
"total_tokens": 0,
|
773
|
+
"errors": 0,
|
774
|
+
"avg_response_time": 0.0
|
775
|
+
}
|
776
|
+
|
777
|
+
# Reinitialize AI node stats
|
778
|
+
for name in self.ai_nodes.keys():
|
779
|
+
self.usage_stats["ai_nodes"][name] = {
|
780
|
+
"total_calls": 0,
|
781
|
+
"total_tokens": 0,
|
782
|
+
"errors": 0,
|
783
|
+
"avg_response_time": 0.0
|
784
|
+
}
|
785
|
+
|
707
786
|
def create_workflow_pattern(self, pattern_type: WorkflowType, **kwargs) -> None:
|
708
787
|
"""
|
709
788
|
Create a pre-defined workflow pattern.
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="1.0.
|
5
|
+
version="1.0.17",
|
6
6
|
description="Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|