goose-py 0.3.2__tar.gz → 0.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: goose-py
3
- Version: 0.3.2
3
+ Version: 0.3.4
4
4
  Summary: A tool for AI workflows based on human-computer collaboration and structured output.
5
5
  Home-page: https://github.com/chelle-ai/goose
6
6
  Keywords: ai,yaml,configuration,llm
@@ -1,4 +1,5 @@
1
1
  import base64
2
+ import json
2
3
  import logging
3
4
  from datetime import datetime
4
5
  from enum import StrEnum
@@ -97,6 +98,24 @@ class SystemMessage(BaseModel):
97
98
  }
98
99
 
99
100
 
101
+ class AgentResponseDump(TypedDict):
102
+ run_id: str
103
+ flow_name: str
104
+ task_name: str
105
+ model: str
106
+ system_message: str
107
+ input_messages: str
108
+ output_message: str
109
+ input_cost: float
110
+ output_cost: float
111
+ total_cost: float
112
+ input_tokens: int
113
+ output_tokens: int
114
+ start_time: datetime
115
+ end_time: datetime
116
+ duration_ms: int
117
+
118
+
100
119
  class AgentResponse[R: BaseModel](BaseModel):
101
120
  INPUT_CENTS_PER_MILLION_TOKENS: ClassVar[dict[GeminiModel, float]] = {
102
121
  GeminiModel.FLASH_8B: 30,
@@ -143,6 +162,40 @@ class AgentResponse[R: BaseModel](BaseModel):
143
162
  def total_cost(self) -> float:
144
163
  return self.input_cost + self.output_cost
145
164
 
165
+ def minimized_dump(self) -> AgentResponseDump:
166
+ if self.system is None:
167
+ minimized_system_message = ""
168
+ else:
169
+ minimized_system_message = self.system.model_dump()
170
+ for part in minimized_system_message["parts"]:
171
+ if part["type"] == "image_url":
172
+ part["content"] = b"__MEDIA__"
173
+
174
+ minimized_input_messages = [
175
+ message.model_dump() for message in self.input_messages
176
+ ]
177
+ for message in minimized_input_messages:
178
+ if message["type"] == "image_url":
179
+ message["content"] = b"__MEDIA__"
180
+
181
+ return {
182
+ "run_id": self.run_id,
183
+ "flow_name": self.flow_name,
184
+ "task_name": self.task_name,
185
+ "model": self.model.value,
186
+ "system_message": json.dumps(minimized_system_message),
187
+ "input_messages": json.dumps(minimized_input_messages),
188
+ "output_message": self.response.model_dump_json(),
189
+ "input_tokens": self.input_tokens,
190
+ "output_tokens": self.output_tokens,
191
+ "input_cost": self.input_cost,
192
+ "output_cost": self.output_cost,
193
+ "total_cost": self.total_cost,
194
+ "start_time": self.start_time,
195
+ "end_time": self.end_time,
196
+ "duration_ms": self.duration_ms,
197
+ }
198
+
146
199
 
147
200
  class IAgentLogger(Protocol):
148
201
  async def __call__(self, *, response: AgentResponse[Any]) -> None: ...
@@ -1,11 +1,11 @@
1
1
  import json
2
- from contextlib import contextmanager
2
+ from contextlib import asynccontextmanager
3
3
  from contextvars import ContextVar
4
4
  from typing import (
5
5
  Any,
6
+ AsyncIterator,
6
7
  Awaitable,
7
8
  Callable,
8
- Iterator,
9
9
  NewType,
10
10
  Protocol,
11
11
  Self,
@@ -23,6 +23,7 @@ from goose.agent import (
23
23
  UserMessage,
24
24
  )
25
25
  from goose.errors import Honk
26
+ from goose.store import IFlowRunStore, InMemoryFlowRunStore
26
27
 
27
28
  SerializedFlowRun = NewType("SerializedFlowRun", str)
28
29
 
@@ -236,11 +237,13 @@ class Flow[**P]:
236
237
  /,
237
238
  *,
238
239
  name: str | None = None,
240
+ store: IFlowRunStore | None = None,
239
241
  agent_logger: IAgentLogger | None = None,
240
242
  ) -> None:
241
243
  self._fn = fn
242
244
  self._name = name
243
245
  self._agent_logger = agent_logger
246
+ self._store = store or InMemoryFlowRunStore(flow_name=self.name)
244
247
 
245
248
  @property
246
249
  def name(self) -> str:
@@ -253,20 +256,20 @@ class Flow[**P]:
253
256
  raise Honk("No current flow run")
254
257
  return run
255
258
 
256
- @contextmanager
257
- def start_run(
258
- self, *, run_id: str, preload: FlowRun | None = None
259
- ) -> Iterator[FlowRun]:
260
- if preload is None:
259
+ @asynccontextmanager
260
+ async def start_run(self, *, run_id: str) -> AsyncIterator[FlowRun]:
261
+ existing_run = await self._store.get(run_id=run_id)
262
+ if existing_run is None:
261
263
  run = FlowRun()
262
264
  else:
263
- run = preload
265
+ run = existing_run
264
266
 
265
267
  old_run = _current_flow_run.get()
266
268
  _current_flow_run.set(run)
267
269
 
268
270
  run.start(flow_name=self.name, run_id=run_id, agent_logger=self._agent_logger)
269
271
  yield run
272
+ await self._store.save(run=run)
270
273
  run.end()
271
274
 
272
275
  _current_flow_run.set(old_run)
@@ -386,20 +389,24 @@ def task[**P, R: Result](
386
389
  def flow[**P](fn: Callable[P, Awaitable[None]], /) -> Flow[P]: ...
387
390
  @overload
388
391
  def flow[**P](
389
- *, name: str | None = None, agent_logger: IAgentLogger | None = None
392
+ *,
393
+ name: str | None = None,
394
+ store: IFlowRunStore | None = None,
395
+ agent_logger: IAgentLogger | None = None,
390
396
  ) -> Callable[[Callable[P, Awaitable[None]]], Flow[P]]: ...
391
397
  def flow[**P](
392
398
  fn: Callable[P, Awaitable[None]] | None = None,
393
399
  /,
394
400
  *,
395
401
  name: str | None = None,
402
+ store: IFlowRunStore | None = None,
396
403
  agent_logger: IAgentLogger | None = None,
397
404
  ) -> Flow[P] | Callable[[Callable[P, Awaitable[None]]], Flow[P]]:
398
405
  if fn is None:
399
406
 
400
407
  def decorator(fn: Callable[P, Awaitable[None]]) -> Flow[P]:
401
- return Flow(fn, name=name, agent_logger=agent_logger)
408
+ return Flow(fn, name=name, store=store, agent_logger=agent_logger)
402
409
 
403
410
  return decorator
404
411
 
405
- return Flow(fn, name=name, agent_logger=agent_logger)
412
+ return Flow(fn, name=name, store=store, agent_logger=agent_logger)
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Protocol
4
+
5
+ if TYPE_CHECKING:
6
+ from goose.flow import FlowRun
7
+
8
+
9
+ class IFlowRunStore(Protocol):
10
+ def __init__(self, *, flow_name: str) -> None: ...
11
+ async def get(self, *, run_id: str) -> FlowRun | None: ...
12
+ async def save(self, *, run: FlowRun) -> None: ...
13
+
14
+
15
+ class InMemoryFlowRunStore(IFlowRunStore):
16
+ def __init__(self, *, flow_name: str) -> None:
17
+ self._flow_name = flow_name
18
+ self._runs: dict[str, FlowRun] = {}
19
+
20
+ async def get(self, *, run_id: str) -> FlowRun | None:
21
+ return self._runs.get(run_id)
22
+
23
+ async def save(self, *, run: FlowRun) -> None:
24
+ self._runs[run.id] = run
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "goose-py"
3
- version = "0.3.2"
3
+ version = "0.3.4"
4
4
  description = "A tool for AI workflows based on human-computer collaboration and structured output."
5
5
  authors = [
6
6
  "Nash Taylor <nash@chelle.ai>",
@@ -49,7 +49,7 @@ pythonVersion = "3.12"
49
49
  typeCheckingMode = "strict"
50
50
  reportMissingModuleSource = false
51
51
  useLibraryCodeForTypes = false
52
- reportImportCycles = true
52
+ reportImportCycles = false
53
53
  reportUnknownMemberType = false
54
54
  reportUnknownVariableType = false
55
55
  stubPath = ".stubs"
File without changes
File without changes
File without changes
File without changes