versionhq 1.2.1.22__py3-none-any.whl → 1.2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,13 +6,14 @@ import uuid
6
6
  import networkx as nx
7
7
  import matplotlib.pyplot as plt
8
8
  from abc import ABC
9
+ from concurrent.futures import Future
9
10
  from typing import List, Any, Optional, Callable, Dict, Type, Tuple
10
11
 
11
12
  from pydantic import BaseModel, InstanceOf, Field, UUID4, field_validator
12
13
  from pydantic_core import PydanticCustomError
13
14
 
14
- from versionhq.task.model import Task, TaskOutput
15
15
  from versionhq.agent.model import Agent
16
+ from versionhq.task.model import Task, TaskOutput
16
17
  from versionhq._utils.logger import Logger
17
18
 
18
19
 
@@ -62,20 +63,23 @@ class Node(BaseModel):
62
63
  def is_independent(self) -> bool:
63
64
  return not self.in_degree_nodes and not self.out_degree_nodes
64
65
 
65
- def handle_task_execution(self, agent: Agent = None, context: str = None, format: Type[BaseModel] = None) -> TaskOutput | None:
66
- """
67
- Start task execution and update status accordingly.
68
- """
66
+ def handle_task_execution(self, agent: Agent = None, context: str = None, response_format: Type[BaseModel] = None) -> TaskOutput | None:
67
+ """Executes the task and updates its status"""
69
68
 
70
69
  self.status = TaskStatus.IN_PROGRESS
71
70
 
72
71
  if not self.task:
73
- Logger(verbose=True).log(level="error", message="Missing a task to execute. We'll return None.", color="red")
72
+ Logger().log(level="error", message="Missing a task to execute. We'll return None.", color="red")
74
73
  self.status = TaskStatus.ERROR
75
74
  return None
76
75
 
77
- self.task.pydantic_output = self.task.pydantic_output if self.task.pydantic_output else format if type(format) == BaseModel else None
76
+ agent = agent if agent else self.assigned_to
77
+ self.task.pydantic_output = self.task.pydantic_output if self.task.pydantic_output else response_format if type(response_format) == BaseModel else None
78
78
  res = self.task.execute(agent=agent, context=context)
79
+
80
+ if isinstance(res, Future): # activate async
81
+ res = res.result()
82
+
79
83
  self.status = TaskStatus.COMPLETED if res else TaskStatus.ERROR
80
84
  return res
81
85
 
@@ -179,7 +183,7 @@ class Edge(BaseModel):
179
183
  return False
180
184
 
181
185
 
182
- def activate(self, format: Type[BaseModel] = None) -> TaskOutput | None:
186
+ def activate(self, response_format: Type[BaseModel] = None) -> TaskOutput | None:
183
187
  """
184
188
  Activates the edge to initiate task execution of the target node.
185
189
  """
@@ -197,7 +201,7 @@ class Edge(BaseModel):
197
201
  time.sleep(self.lag)
198
202
 
199
203
  context = self.source.task.output.raw if self.data_transfer else None
200
- res = self.target.handle_task_execution(context=context, format=format)
204
+ res = self.target.handle_task_execution(context=context, response_format=response_format)
201
205
  return res
202
206
 
203
207
 
@@ -207,14 +211,13 @@ class Graph(ABC, BaseModel):
207
211
  """
208
212
  directed: bool = Field(default=False, description="Whether the graph is directed")
209
213
  graph: Type[nx.Graph] = Field(default=None)
210
- nodes: Dict[str, Node] = Field(default_factory=dict, description="identifier: Node - for the sake of ")
214
+ nodes: Dict[str, Node] = Field(default_factory=dict, description="{node_identifier: Node}")
211
215
  edges: Dict[str, Edge] = Field(default_factory=dict)
212
216
 
213
217
  def __init__(self, directed: bool = False, **kwargs):
214
218
  super().__init__(directed=directed, **kwargs)
215
219
  self.graph = nx.DiGraph(directed=True) if self.directed else nx.Graph()
216
220
 
217
-
218
221
  def _return_node_object(self, node_identifier) -> Node | None:
219
222
  match = [v for k, v in self.nodes.items() if k == node_identifier]
220
223
 
@@ -262,9 +265,7 @@ class Graph(ABC, BaseModel):
262
265
  return [v for v in self.nodes.values() if v.out_degrees == 0 and v.in_degrees > 0]
263
266
 
264
267
  def find_critical_end_node(self) -> Node | None:
265
- """
266
- Find a critical end node from all the end nodes to lead a conclusion of the entire graph.
267
- """
268
+ """Finds a critical end node from all the end nodes to lead a conclusion of the entire graph."""
268
269
  end_nodes = self.find_end_nodes()
269
270
  if not end_nodes:
270
271
  return None
@@ -276,7 +277,6 @@ class Graph(ABC, BaseModel):
276
277
  critical_edge = max(edges, key=lambda item: item['weight']) if edges else None
277
278
  return critical_edge.target if critical_edge else None
278
279
 
279
-
280
280
  def find_path(self, source: Optional[str] | None, target: str, weight: Optional[Any] | None) -> Any:
281
281
  try:
282
282
  return nx.shortest_path(self.graph, source=source, target=target, weight=weight)
@@ -309,8 +309,7 @@ class Graph(ABC, BaseModel):
309
309
  critical_path = max(all_paths, key=all_paths.get)
310
310
  critical_duration = all_paths[critical_path]
311
311
 
312
- return list(critical_path), critical_duration, all_paths
313
-
312
+ return list(set(critical_path)), critical_duration, all_paths
314
313
 
315
314
  def is_circled(self, node: Node) -> bool:
316
315
  """Check if there's a path from the node to itself and return bool."""
@@ -325,8 +324,8 @@ class TaskGraph(Graph):
325
324
  id: UUID4 = Field(default_factory=uuid.uuid4, frozen=True)
326
325
  should_reform: bool = Field(default=False)
327
326
  outputs: Dict[str, TaskOutput] = Field(default_factory=dict, description="stores node identifier and TaskOutput")
328
- concl_template: Optional[Dict[str, Any] | Type[BaseModel]] = Field(default=None, description="stores a format of `concl` either in Pydantic model or JSON dict")
329
- concl: Any = Field(default=None, description="store the final result of the entire task graph")
327
+ concl_template: Optional[Dict[str, Any] | Type[BaseModel]] = Field(default=None, description="stores final response format in Pydantic class or JSON dict")
328
+ concl: Optional[TaskOutput] = Field(default=None, description="stores the final or latest conclusion of the entire task graph")
330
329
 
331
330
 
332
331
  def _save(self, title: str, abs_file_path: str = None) -> None:
@@ -373,7 +372,7 @@ class TaskGraph(Graph):
373
372
  return task_node
374
373
 
375
374
 
376
- def add_dependency(self, source_node_identifier: str, target_node_identifier: str, **edge_attributes) -> None:
375
+ def add_dependency(self, source: str, target: str, **edge_attributes) -> None:
377
376
  """
378
377
  Add an edge that connect task 1 (source) and task 2 (target) using task_node.name as an identifier
379
378
  """
@@ -389,7 +388,7 @@ class TaskGraph(Graph):
389
388
  else:
390
389
  pass
391
390
 
392
- self.add_edge(source_node_identifier, target_node_identifier, edge)
391
+ self.add_edge(source, target, edge)
393
392
 
394
393
 
395
394
  def get_task_status(self, identifier: str) -> TaskStatus | None:
@@ -484,7 +483,7 @@ class TaskGraph(Graph):
484
483
  plt.show(block=False)
485
484
 
486
485
 
487
- def activate(self, target_node_identifier: Optional[str] = None) -> Tuple[TaskOutput | None, Dict[str, TaskOutput]]:
486
+ def activate(self, target: Optional[str] = None) -> Tuple[TaskOutput | None, Dict[str, TaskOutput]]:
488
487
  """
489
488
  Starts to execute all nodes in the graph or a specific node if the target is given, following the given conditons of the edge obeject.
490
489
  Then returns tuple of the last task output and all task outputs (self.outputs)
@@ -492,24 +491,24 @@ class TaskGraph(Graph):
492
491
 
493
492
  Logger().log(color="blue", message=f"Start to activate the graph: {str(self.id)}", level="info")
494
493
 
495
- if target_node_identifier:
496
- if not [k for k in self.nodes.keys() if k == target_node_identifier]:
497
- Logger().log(level="error", message=f"The node {str(target_node_identifier)} is not in the graph.", color="red")
498
- return None
494
+ if target:
495
+ if not [k for k in self.nodes.keys() if k == target]:
496
+ Logger().log(level="error", message=f"The node {str(target)} is not in the graph.", color="red")
497
+ return None, None
499
498
 
500
499
  # find a shortest path to each in-degree node of the node and see if dependency met.
501
- node = self._return_node_object(target_node_identifier)
500
+ node = self._return_node_object(target)
502
501
  sources = node.in_degrees
503
502
  edge_status = []
504
503
  res = None
505
504
 
506
505
  for item in sources:
507
- edge = self.find_path(source=item, target=target_node_identifier)
506
+ edge = self.find_path(source=item, target=target)
508
507
  edge_status.append(dict(edge=edge if edge else None, dep_met=edge.dependency_met() if edge else False))
509
508
 
510
509
  if len([item for item in edge_status if item["dep_met"] == True]) == len(sources):
511
510
  res = node.handle_task_execution()
512
- self.outputs.update({ target_node_identifier: res })
511
+ self.outputs.update({ target: res })
513
512
 
514
513
  return res, self.outputs
515
514
 
@@ -524,40 +523,35 @@ class TaskGraph(Graph):
524
523
  critical_path, _, _ = self.find_critical_path()
525
524
  res = None
526
525
 
527
- # When all nodes are completed, return the output of the critical end node or end node.
528
526
  if end_nodes and len([node for node in end_nodes if node.status == TaskStatus.COMPLETED]) == len(end_nodes):
529
- if critical_end_node:
530
- return critical_end_node.task.output, self.outputs
531
- else:
532
- return [v.task.output for k, v in end_nodes.items()][0], self.outputs
527
+ res = self.concl if self.concl else critical_end_node.task.output if critical_end_node else [v.task.output for v in end_nodes.values()][0]
533
528
 
534
- # Else, execute nodes connected with the critical_path
535
529
  elif critical_path:
536
- for item in critical_path:
537
- edge = [v for k, v in self.edges.items() if item in k]
538
- if edge:
539
- edge = edge[0]
540
-
541
- if edge.target.status == TaskStatus.COMPLETED:
542
- res = edge.target.output
530
+ nodes = [v for k, v in self.nodes.items() if k in critical_path]
531
+ if nodes:
532
+ for node in nodes:
533
+ if node.status == TaskStatus.COMPLETED:
534
+ res = node.task.output
543
535
 
544
536
  else:
545
- res = edge.activate()
546
- node_identifier = edge.target.identifier
547
- self.outputs.update({ node_identifier: res })
537
+ res = node.handle_task_execution()
538
+ self.outputs.update({ node.identifier: res })
548
539
 
549
540
  if not res and start_nodes:
550
541
  for node in start_nodes:
551
542
  res = node.handle_task_execution()
552
543
  self.outputs.update({ node.identifier: res })
544
+ else:
545
+ for k, edge in self.edges.items():
546
+ res = edge.activate()
547
+ node_identifier = edge.target.identifier
548
+ self.outputs.update({ node_identifier: res })
553
549
 
554
- # If no critical paths in the graph, simply start from the start nodes.
555
550
  elif start_nodes:
556
551
  for node in start_nodes:
557
552
  res = node.handle_task_execution()
558
553
  self.outputs.update({ node.identifier: res })
559
554
 
560
- # If none of above is applicable, try to activate all the edges.
561
555
  else:
562
556
  for k, edge in self.edges.items():
563
557
  res = edge.activate()
@@ -566,5 +560,36 @@ class TaskGraph(Graph):
566
560
 
567
561
  self.concl = res
568
562
  self.concl_template = self.concl_template if self.concl_template else res.pydantic.__class__ if res.pydantic else None
569
- # last_task_output = [v for v in self.outputs.values()][len([v for v in self.outputs.values()]) - 1] if [v for v in self.outputs.values()] else None
563
+ # last_task_output = [v for v in self.outputs.values()][len([v for v in self.outputs.values()]) - 1] if [v for v in self.outputs.values()] else None
570
564
  return res, self.outputs
565
+
566
+
567
+ def evaluate(self, eval_criteria: List[str] = None):
568
+ """Evaluates the conclusion based on the given eval criteria."""
569
+
570
+ if not isinstance(self.concl, TaskOutput):
571
+ return None
572
+
573
+ tasks = [v.task for v in self.nodes.values() if v.task and v.task.id == self.concl.task_id]
574
+ task = tasks[0] if tasks else None
575
+
576
+ if not task:
577
+ return None
578
+
579
+ if not task.eval_criteria:
580
+ task.eval_criteria = eval_criteria
581
+
582
+ eval = self.concl.evaluate(task=task)
583
+ return eval
584
+
585
+
586
+ @property
587
+ def usage(self) -> Tuple[int, float]:
588
+ """Returns aggregate number of consumed tokens and job latency in ms during the activation"""
589
+
590
+ tokens, latency = 0, 0
591
+ for v in self.outputs.values():
592
+ tokens += v._tokens
593
+ latency += v.latency
594
+
595
+ return tokens, latency
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: versionhq
3
- Version: 1.2.1.22
3
+ Version: 1.2.2.0
4
4
  Summary: An agentic orchestration framework for building agent networks that handle task automation.
5
5
  Author-email: Kuriko Iwai <kuriko@versi0n.io>
6
6
  License: MIT License
@@ -259,7 +259,7 @@ agent.update(
259
259
  task="YOUR AMAZING TASK OVERVIEW",
260
260
  expected_outcome="YOUR OUTCOME EXPECTATION",
261
261
  )
262
- res = network.launch()
262
+ res, _ = network.launch()
263
263
  ```
264
264
 
265
265
  This will form a network with multiple agents on `Formation` and return `TaskOutput` object with output in JSON, plane text, Pydantic model format with evaluation.
@@ -290,15 +290,16 @@ task = vhq.Task(
290
290
  callback_kwargs=dict(message="Hi! Here is the result: ")
291
291
  )
292
292
 
293
- res = task.execute(context="amazing context to consider.")
294
- print(res)
293
+ res = task.execute(context="context to consider")
294
+
295
+ assert isinstance(res, vhq.TaskOutput)
295
296
  ```
296
297
 
297
298
 
298
299
  This will return a `TaskOutput` object that stores response in plane text, JSON, and Pydantic model: `CustomOutput` formats with a callback result, tool output (if given), and evaluation results (if given).
299
300
 
300
301
  ```python
301
- res == TaskOutput(
302
+ res == vhq.TaskOutput(
302
303
  task_id=UUID('<TASK UUID>'),
303
304
  raw='{\"test1\":\"random str\", \"test2\":[\"str item 1\", \"str item 2\", \"str item 3\"]}',
304
305
  json_dict={'test1': 'random str', 'test2': ['str item 1', 'str item 2', 'str item 3']},
@@ -337,9 +338,9 @@ network =vhq.AgentNetwork(
337
338
  vhq.Member(agent=agent_b, is_manager=True, tasks=[task_2]), # Agent B as a manager
338
339
  ],
339
340
  )
340
- res = network.launch()
341
+ res, _ = network.launch()
341
342
 
342
- assert isinstance(res, vhq.NetworkOutput)
343
+ assert isinstance(res, vhq.TaskOutput)
343
344
  assert not [item for item in task_1.processed_agents if "vhq-Delegated-Agent" == item]
344
345
  assert [item for item in task_1.processed_agents if "agent b" == item]
345
346
  ```
@@ -1,20 +1,20 @@
1
- versionhq/__init__.py,sha256=xiTq9nfM7wVDAPecl79IUIeaU4m48he7azr1WZ962W4,2892
2
- versionhq/_utils/__init__.py,sha256=dzoZr4cBlh-2QZuPzTdehPUCe9lP1dmRtauD7qTjUaA,158
1
+ versionhq/__init__.py,sha256=uh8uc3vSkOPeiz6MBNYdjJeCzH_KPMN-x5Xp5MWyOPo,2857
2
+ versionhq/_utils/__init__.py,sha256=d-vYVcORZKG-kkLe_fzE8VbViDpAk9DDOKe2fVK25ew,178
3
3
  versionhq/_utils/i18n.py,sha256=TwA_PnYfDLA6VqlUDPuybdV9lgi3Frh_ASsb_X8jJo8,1483
4
4
  versionhq/_utils/logger.py,sha256=iHxGjm3BvUo5dHKLU88_pc0Z45wzSHOjyJGQkb7OADk,3255
5
5
  versionhq/_utils/process_config.py,sha256=YTGY_erW335RfceQfzS18YAqq-AAb-iSvKSjN7noD2E,782
6
- versionhq/_utils/usage_metrics.py,sha256=NXF18dn5NNvGK7EsQ4AAghpR8ppYOjMx6ABenLLHnmM,1066
6
+ versionhq/_utils/usage_metrics.py,sha256=xgYGRW3OTuK9EJyi3QYJeYcJl7dL27olcWaLo_7B3JE,2246
7
7
  versionhq/_utils/vars.py,sha256=bZ5Dx_bFKlt3hi4-NNGXqdk7B23If_WaTIju2fiTyPQ,57
8
8
  versionhq/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- versionhq/agent/inhouse_agents.py,sha256=vupO1viYqVb7sKohIE1zThu6JArhh5JLo5LBeSnh0kM,2534
10
- versionhq/agent/model.py,sha256=2GzZW9io5yHKXHvJJMq53ZjIP9pCK4_ubH-KdMYmsKw,26542
9
+ versionhq/agent/inhouse_agents.py,sha256=WAbyog-6pKwa8ru9u_KJgD_ViTLv4ZRECks1Znch47E,2638
10
+ versionhq/agent/model.py,sha256=cYplMaQXHocFeZGZ1WIZ5npZ_Edokj3qFPBE9PiBxHw,25285
11
11
  versionhq/agent/parser.py,sha256=riG0dkdQCxH7uJ0AbdVdg7WvL0BXhUgJht0VtQvxJBc,4082
12
12
  versionhq/agent/rpm_controller.py,sha256=grezIxyBci_lDlwAlgWFRyR5KOocXeOhYkgN02dNFNE,2360
13
13
  versionhq/agent/TEMPLATES/Backstory.py,sha256=IAhGnnt6VUMe3wO6IzeyZPDNu7XE7Uiu3VEXUreOcKs,532
14
14
  versionhq/agent/TEMPLATES/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  versionhq/agent_network/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  versionhq/agent_network/formation.py,sha256=7iGw20Dj2sFLAho6yfrdmonAwFcxINBDGFr2RU-Qz3s,7505
17
- versionhq/agent_network/model.py,sha256=hjtYIopAN52nStcM6TlV0b6ulRMrmzKH7jIkzNmZHDE,19265
17
+ versionhq/agent_network/model.py,sha256=93VVOtON6_TEren4FnWfLtUGl_T6DLVA0BhZgY9yhIA,19059
18
18
  versionhq/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  versionhq/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  versionhq/clients/customer/__init__.py,sha256=-YXh1FQfvpfLacK8SUC7bD7Wx_eIEi4yrkCC_cUasFg,217
@@ -32,28 +32,27 @@ versionhq/knowledge/source_docling.py,sha256=mg7bgvKePHn2LlA_XzSFCbS0zOo9xfu_aNO
32
32
  versionhq/knowledge/storage.py,sha256=7oxCg3W9mFjYH1YmuH9kFtTbNxquzYFjuUjd_TlsB9E,8170
33
33
  versionhq/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  versionhq/llm/llm_vars.py,sha256=wjQK20cKvph6Vq1v71o4d16zBGcHlwq0bzOT_zWno7w,7041
35
- versionhq/llm/model.py,sha256=wlzDUMEyIOm808d1vzqu9gmbB4ch-s_EUvwFR60gR80,17177
35
+ versionhq/llm/model.py,sha256=HIBmf8FYV6-cDbZK1ZBu6z3dmF0ZUbKbCelfwxMlgyY,17177
36
36
  versionhq/memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
37
  versionhq/memory/contextual_memory.py,sha256=WeDujcEp4oud30OusXSPPNrMEQP-vGrt1mcfYopQruU,3483
38
- versionhq/memory/model.py,sha256=MPO8dDP5eAuk9td6bMOq5j2huLzCADFJDrTujHhcWQY,8150
38
+ versionhq/memory/model.py,sha256=VQR1229t7GQPMItlGAHLtJrb6LrZfSoRA1DRW4z0SOU,8234
39
39
  versionhq/storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
40
  versionhq/storage/base.py,sha256=p-Jas0fXQan_qotnRD6seQxrT2lj-uw9-SmHQhdppcs,355
41
41
  versionhq/storage/ltm_sqlite_storage.py,sha256=wdUiuwHfJocdk0UGqyrdU4S5Nae1rgsoRNu3LWmGFcI,3951
42
42
  versionhq/storage/mem0_storage.py,sha256=ZY8MELBWaINRv9YuRW5MxH7dj2cII-L0i3xSD6o1-2M,3781
43
43
  versionhq/storage/rag_storage.py,sha256=bS2eE874obarYl-4hT6ZWYWTRsqtfuGpKgKzERmM6Uo,7433
44
- versionhq/storage/task_output_storage.py,sha256=E1t_Fkt78dPYIOl3MP7LfQ8oGtjlzxBuSNq_8ZXKho8,4573
45
- versionhq/storage/utils.py,sha256=ByYXPoEIGJYLUqz-DWjbCAnneNrH1otiYbp12SCILpM,747
44
+ versionhq/storage/task_output_storage.py,sha256=M8vInLJ5idGAq17w1juHKXtyPyF-B-rK_P8UcqD-Px8,5357
45
+ versionhq/storage/utils.py,sha256=r5ghA_ktdR2IuzlzKqZYCjsNxztEMzyhWLneA4cFuWY,748
46
46
  versionhq/task/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- versionhq/task/evaluate.py,sha256=WdUgjbZL62XrxyWe5MTz29scfzwmuAHGxJ7GvAB8Fmk,3954
47
+ versionhq/task/evaluation.py,sha256=mHLNabuvSbi9FGfblomr41vlc3quTHsugH3-3qQmcDw,4461
48
48
  versionhq/task/formatter.py,sha256=N8Kmk9vtrMtBdgJ8J7RmlKNMdZWSmV8O1bDexmCWgU0,643
49
- versionhq/task/log_handler.py,sha256=LT7YnO7gcPR9IZS7eRvMjnHh8crMBFtqduxd8dxIbkk,1680
50
- versionhq/task/model.py,sha256=KshCysteol3ggfotZMfFn192dMYALg8lvjiGpyLUVQA,28948
49
+ versionhq/task/model.py,sha256=8qBxRgqMs_b8IZA45_gAU1uc5S5w9hdnyRG-Vwm8bVM,28901
51
50
  versionhq/task/structured_response.py,sha256=4q-hQPu7oMMHHXEzh9YW4SJ7N5eCZ7OfZ65juyl_jCI,5000
52
- versionhq/task/TEMPLATES/Description.py,sha256=V-4kh8xpQTKOcDMi2xnuP-fcNk6kuoz1_5tYBlDLQWQ,420
51
+ versionhq/task/TEMPLATES/Description.py,sha256=EkwJHc65G32MjWyn3rcp0ATmMaVPHuYKaykyByU5r4g,751
53
52
  versionhq/task_graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
53
  versionhq/task_graph/colors.py,sha256=naJCx4Vho4iuJtbW8USUXb-M5uYvd5ds2p8qbjUfRus,669
55
- versionhq/task_graph/draft.py,sha256=AuQ2X-T5xuQ2ipMiAqeh9Pjm6I2fIf952pBQRYqdaog,5131
56
- versionhq/task_graph/model.py,sha256=njyHQyHrVTZP46iVkC6YvuMnGcS40vOy1wszRtf7DHY,23971
54
+ versionhq/task_graph/draft.py,sha256=l18XacRsbDhAv6CvKMnUMI26IDuizA1UNWHbL1q5gn4,5099
55
+ versionhq/task_graph/model.py,sha256=53D_Bd-Uz6fv5Y1iVVDX1wzqcabE1BmLzqEO0oL8wrs,24628
57
56
  versionhq/tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
57
  versionhq/tool/cache_handler.py,sha256=iL8FH7X0G-cdT0uhJwzuhLDaadTXOdfybZcDy151-es,1085
59
58
  versionhq/tool/composio_tool.py,sha256=IATfsEnF_1RPJyGtPBmAtEJh5XPcgDHpyG3SUR461Og,8572
@@ -61,8 +60,8 @@ versionhq/tool/composio_tool_vars.py,sha256=FvBuEXsOQUYnN7RTFxT20kAkiEYkxWKkiVtg
61
60
  versionhq/tool/decorator.py,sha256=C4ZM7Xi2gwtEMaSeRo-geo_g_MAkY77WkSLkAuY0AyI,1205
62
61
  versionhq/tool/model.py,sha256=PO4zNWBZcJhYVur381YL1dy6zqurio2jWjtbxOxZMGI,12194
63
62
  versionhq/tool/tool_handler.py,sha256=2m41K8qo5bGCCbwMFferEjT-XZ-mE9F0mDUOBkgivOI,1416
64
- versionhq-1.2.1.22.dist-info/LICENSE,sha256=cRoGGdM73IiDs6nDWKqPlgSv7aR4n-qBXYnJlCMHCeE,1082
65
- versionhq-1.2.1.22.dist-info/METADATA,sha256=2NuRqQKWkFVJpQVfjjMLLUmKeIH2FqK9UWHt0KsC8Wk,22033
66
- versionhq-1.2.1.22.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
67
- versionhq-1.2.1.22.dist-info/top_level.txt,sha256=DClQwxDWqIUGeRJkA8vBlgeNsYZs4_nJWMonzFt5Wj0,10
68
- versionhq-1.2.1.22.dist-info/RECORD,,
63
+ versionhq-1.2.2.0.dist-info/LICENSE,sha256=cRoGGdM73IiDs6nDWKqPlgSv7aR4n-qBXYnJlCMHCeE,1082
64
+ versionhq-1.2.2.0.dist-info/METADATA,sha256=dEEdYYA25P_WeaIrGwXDKdXmlWSgWcl-pMGEleVuaU0,22059
65
+ versionhq-1.2.2.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
66
+ versionhq-1.2.2.0.dist-info/top_level.txt,sha256=DClQwxDWqIUGeRJkA8vBlgeNsYZs4_nJWMonzFt5Wj0,10
67
+ versionhq-1.2.2.0.dist-info/RECORD,,
@@ -1,59 +0,0 @@
1
- from datetime import datetime
2
- from typing import Any, Dict, List, Optional
3
-
4
- from pydantic import BaseModel, Field
5
-
6
- from versionhq.storage.task_output_storage import TaskOutputSQLiteStorage
7
-
8
-
9
- class ExecutionLog(BaseModel):
10
- task_id: str
11
- output: Dict[str, Any]
12
- timestamp: datetime = Field(default_factory=datetime.now)
13
- task_index: int
14
- inputs: Dict[str, Any] = Field(default_factory=dict)
15
- was_replayed: bool = False
16
-
17
- def __getitem__(self, key: str) -> Any:
18
- return getattr(self, key)
19
-
20
-
21
-
22
- class TaskOutputStorageHandler:
23
-
24
- def __init__(self):
25
- self.storage = TaskOutputSQLiteStorage()
26
-
27
-
28
- def update(self, task, task_index: int, was_replayed: bool = False, inputs: Dict[str, Any] = {}) -> None:
29
- """
30
- task: task instance
31
- """
32
- saved_outputs = self.load()
33
- if saved_outputs is None:
34
- raise ValueError("Logs cannot be None")
35
-
36
- self.add(task, task_index, was_replayed, inputs)
37
-
38
-
39
- def add(self, task, task_index: int, was_replayed: bool = False, inputs: Dict[str, Any] = {}) -> None:
40
- from versionhq.task.model import Task
41
-
42
- output_to_store = dict()
43
-
44
- if isinstance(task, Task):
45
- output_to_store = dict(
46
- description=str(task.description),
47
- raw=str(task.output.raw),
48
- responsible_agent=str(task.processed_agents),
49
- )
50
-
51
- self.storage.add(task=task, output=output_to_store, task_index=task_index, was_replayed=was_replayed, inputs=inputs)
52
-
53
-
54
- def reset(self) -> None:
55
- self.storage.delete_all()
56
-
57
-
58
- def load(self) -> Optional[List[Dict[str, Any]]]:
59
- return self.storage.load()