futurehouse-client 0.4.5.dev119__tar.gz → 0.4.5.dev160__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {futurehouse_client-0.4.5.dev119/src/futurehouse_client.egg-info → futurehouse_client-0.4.5.dev160}/PKG-INFO +1 -1
  2. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/__init__.py +12 -0
  3. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/clients/rest_client.py +187 -0
  4. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/models/__init__.py +12 -0
  5. futurehouse_client-0.4.5.dev160/src/futurehouse_client/models/client.py +72 -0
  6. futurehouse_client-0.4.5.dev160/src/futurehouse_client/models/job_event.py +75 -0
  7. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/version.py +3 -3
  8. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160/src/futurehouse_client.egg-info}/PKG-INFO +1 -1
  9. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client.egg-info/SOURCES.txt +1 -0
  10. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_rest.py +149 -0
  11. futurehouse_client-0.4.5.dev119/src/futurehouse_client/models/client.py +0 -128
  12. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/LICENSE +0 -0
  13. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/README.md +0 -0
  14. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/data_storage.md +0 -0
  15. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/docs/__init__.py +0 -0
  16. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/docs/client_notebook.ipynb +0 -0
  17. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/pyproject.toml +0 -0
  18. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/setup.cfg +0 -0
  19. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/clients/__init__.py +0 -0
  20. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/clients/data_storage_methods.py +0 -0
  21. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/clients/job_client.py +0 -0
  22. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/models/app.py +0 -0
  23. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/models/data_storage_methods.py +0 -0
  24. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/models/rest.py +0 -0
  25. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/py.typed +0 -0
  26. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/utils/__init__.py +0 -0
  27. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/utils/auth.py +0 -0
  28. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/utils/general.py +0 -0
  29. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/utils/module_utils.py +0 -0
  30. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/utils/monitoring.py +0 -0
  31. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client/utils/world_model_tools.py +0 -0
  32. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client.egg-info/dependency_links.txt +0 -0
  33. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client.egg-info/requires.txt +0 -0
  34. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/src/futurehouse_client.egg-info/top_level.txt +0 -0
  35. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_client.py +0 -0
  36. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_data/test_file.txt +0 -0
  37. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_data/test_information.txt +0 -0
  38. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_data/test_manifest.yaml +0 -0
  39. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_data_storage_e2e.py +0 -0
  40. {futurehouse_client-0.4.5.dev119 → futurehouse_client-0.4.5.dev160}/tests/test_data_storage_methods.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.5.dev119
3
+ Version: 0.4.5.dev160
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -8,6 +8,13 @@ from .models.app import (
8
8
  TaskResponse,
9
9
  TaskResponseVerbose,
10
10
  )
11
+ from .models.job_event import (
12
+ CostComponent,
13
+ ExecutionType,
14
+ JobEventCreateRequest,
15
+ JobEventCreateResponse,
16
+ JobEventUpdateRequest,
17
+ )
11
18
  from .utils.world_model_tools import (
12
19
  create_world_model_tool,
13
20
  make_world_model_tools,
@@ -15,9 +22,14 @@ from .utils.world_model_tools import (
15
22
  )
16
23
 
17
24
  __all__ = [
25
+ "CostComponent",
26
+ "ExecutionType",
18
27
  "FinchTaskResponse",
19
28
  "FutureHouseClient",
20
29
  "JobClient",
30
+ "JobEventCreateRequest",
31
+ "JobEventCreateResponse",
32
+ "JobEventUpdateRequest",
21
33
  "JobNames",
22
34
  "PQATaskResponse",
23
35
  "PhoenixTaskResponse",
@@ -52,6 +52,11 @@ from futurehouse_client.models.app import (
52
52
  TaskResponseVerbose,
53
53
  TrajectoryQueryParams,
54
54
  )
55
+ from futurehouse_client.models.job_event import (
56
+ JobEventCreateRequest,
57
+ JobEventCreateResponse,
58
+ JobEventUpdateRequest,
59
+ )
55
60
  from futurehouse_client.models.rest import (
56
61
  DiscoveryResponse,
57
62
  ExecutionStatus,
@@ -160,6 +165,18 @@ class FileUploadError(RestClientError):
160
165
  """Raised when there's an error uploading a file."""
161
166
 
162
167
 
168
+ class JobEventClientError(RestClientError):
169
+ """Raised when there's an error with job event operations."""
170
+
171
+
172
+ class JobEventCreationError(JobEventClientError):
173
+ """Raised when there's an error creating a job event."""
174
+
175
+
176
+ class JobEventUpdateError(JobEventClientError):
177
+ """Raised when there's an error updating a job event."""
178
+
179
+
163
180
  retry_if_connection_error = create_retry_if_connection_error(FileUploadError)
164
181
 
165
182
  DEFAULT_AGENT_TIMEOUT: int = 2400 # seconds
@@ -2609,6 +2626,176 @@ class RestClient(DataStorageMethods):
2609
2626
  f"Error fetching discoveries for project: {e!r}"
2610
2627
  ) from e
2611
2628
 
2629
+ @retry(
2630
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2631
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2632
+ retry=retry_if_connection_error,
2633
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2634
+ )
2635
+ def create_job_event(
2636
+ self, request: JobEventCreateRequest
2637
+ ) -> JobEventCreateResponse:
2638
+ """Create a new job event.
2639
+
2640
+ Args:
2641
+ request: Job event creation request
2642
+
2643
+ Returns:
2644
+ Job event creation response
2645
+
2646
+ Raises:
2647
+ JobEventCreationError: If the API call fails
2648
+ """
2649
+ try:
2650
+ response = self.client.post(
2651
+ "/v0.1/job-events",
2652
+ json=request.model_dump(exclude_none=True, mode="json"),
2653
+ )
2654
+ response.raise_for_status()
2655
+ return JobEventCreateResponse(**response.json())
2656
+ except HTTPStatusError as e:
2657
+ if e.response.status_code == codes.BAD_REQUEST:
2658
+ raise JobEventCreationError(
2659
+ f"Invalid job event creation request: {e.response.text}."
2660
+ ) from e
2661
+ if e.response.status_code == codes.NOT_FOUND:
2662
+ raise JobEventCreationError(
2663
+ f"Execution not found for job event creation: {e.response.text}."
2664
+ ) from e
2665
+ raise JobEventCreationError(
2666
+ f"Error creating job event: {e.response.status_code} - {e.response.text}."
2667
+ ) from e
2668
+ except Exception as e:
2669
+ raise JobEventCreationError(
2670
+ f"An unexpected error occurred during job event creation: {e!r}."
2671
+ ) from e
2672
+
2673
+ @retry(
2674
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2675
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2676
+ retry=retry_if_connection_error,
2677
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2678
+ )
2679
+ async def acreate_job_event(
2680
+ self, request: JobEventCreateRequest
2681
+ ) -> JobEventCreateResponse:
2682
+ """Asynchronously create a new job event.
2683
+
2684
+ Args:
2685
+ request: Job event creation request
2686
+
2687
+ Returns:
2688
+ Job event creation response
2689
+
2690
+ Raises:
2691
+ JobEventCreationError: If the API call fails
2692
+ """
2693
+ try:
2694
+ response = await self.async_client.post(
2695
+ "/v0.1/job-events",
2696
+ json=request.model_dump(exclude_none=True, mode="json"),
2697
+ )
2698
+ response.raise_for_status()
2699
+ return JobEventCreateResponse(**response.json())
2700
+ except HTTPStatusError as e:
2701
+ if e.response.status_code == codes.BAD_REQUEST:
2702
+ raise JobEventCreationError(
2703
+ f"Invalid job event creation request: {e.response.text}."
2704
+ ) from e
2705
+ if e.response.status_code == codes.NOT_FOUND:
2706
+ raise JobEventCreationError(
2707
+ f"Execution not found for job event creation: {e.response.text}."
2708
+ ) from e
2709
+ raise JobEventCreationError(
2710
+ f"Error creating job event: {e.response.status_code} - {e.response.text}."
2711
+ ) from e
2712
+ except Exception as e:
2713
+ raise JobEventCreationError(
2714
+ f"An unexpected error occurred during job event creation: {e!r}."
2715
+ ) from e
2716
+
2717
+ @retry(
2718
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2719
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2720
+ retry=retry_if_connection_error,
2721
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2722
+ )
2723
+ def update_job_event(
2724
+ self, job_event_id: UUID, request: JobEventUpdateRequest
2725
+ ) -> None:
2726
+ """Update an existing job event.
2727
+
2728
+ Args:
2729
+ job_event_id: ID of the job event to update
2730
+ request: Job event update request
2731
+
2732
+ Raises:
2733
+ JobEventUpdateError: If the API call fails
2734
+ """
2735
+ try:
2736
+ response = self.client.patch(
2737
+ f"/v0.1/job-events/{job_event_id}",
2738
+ json=request.model_dump(exclude_none=True, mode="json"),
2739
+ )
2740
+ response.raise_for_status()
2741
+ except HTTPStatusError as e:
2742
+ if e.response.status_code == codes.NOT_FOUND:
2743
+ raise JobEventUpdateError(
2744
+ f"Job event with ID {job_event_id} not found."
2745
+ ) from e
2746
+ if e.response.status_code == codes.BAD_REQUEST:
2747
+ raise JobEventUpdateError(
2748
+ f"Invalid job event update request: {e.response.text}."
2749
+ ) from e
2750
+ raise JobEventUpdateError(
2751
+ f"Error updating job event: {e.response.status_code} - {e.response.text}."
2752
+ ) from e
2753
+ except Exception as e:
2754
+ raise JobEventUpdateError(
2755
+ f"An unexpected error occurred during job event update: {e!r}."
2756
+ ) from e
2757
+
2758
+ @retry(
2759
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2760
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2761
+ retry=retry_if_connection_error,
2762
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2763
+ )
2764
+ async def aupdate_job_event(
2765
+ self, job_event_id: UUID, request: JobEventUpdateRequest
2766
+ ) -> None:
2767
+ """Asynchronously update an existing job event.
2768
+
2769
+ Args:
2770
+ job_event_id: ID of the job event to update
2771
+ request: Job event update request
2772
+
2773
+ Raises:
2774
+ JobEventUpdateError: If the API call fails
2775
+ """
2776
+ try:
2777
+ response = await self.async_client.patch(
2778
+ f"/v0.1/job-events/{job_event_id}",
2779
+ json=request.model_dump(exclude_none=True, mode="json"),
2780
+ )
2781
+ response.raise_for_status()
2782
+ except HTTPStatusError as e:
2783
+ if e.response.status_code == codes.NOT_FOUND:
2784
+ raise JobEventUpdateError(
2785
+ f"Job event with ID {job_event_id} not found."
2786
+ ) from e
2787
+ if e.response.status_code == codes.BAD_REQUEST:
2788
+ raise JobEventUpdateError(
2789
+ f"Invalid job event update request: {e.response.text}."
2790
+ ) from e
2791
+ raise JobEventUpdateError(
2792
+ f"Error updating job event: {e.response.status_code} - {e.response.text}."
2793
+ ) from e
2794
+ except Exception as e:
2795
+ raise JobEventUpdateError(
2796
+ f"An unexpected error occurred during job event update: {e!r}."
2797
+ ) from e
2798
+
2612
2799
 
2613
2800
  def get_installed_packages() -> dict[str, str]:
2614
2801
  """Returns a dictionary of installed packages and their versions."""
@@ -13,13 +13,25 @@ from .app import (
13
13
  TaskResponse,
14
14
  TaskResponseVerbose,
15
15
  )
16
+ from .job_event import (
17
+ CostComponent,
18
+ ExecutionType,
19
+ JobEventCreateRequest,
20
+ JobEventCreateResponse,
21
+ JobEventUpdateRequest,
22
+ )
16
23
  from .rest import TrajectoryPatchRequest, WorldModel, WorldModelResponse
17
24
 
18
25
  __all__ = [
19
26
  "AuthType",
27
+ "CostComponent",
20
28
  "DockerContainerConfiguration",
29
+ "ExecutionType",
21
30
  "FramePath",
22
31
  "JobDeploymentConfig",
32
+ "JobEventCreateRequest",
33
+ "JobEventCreateResponse",
34
+ "JobEventUpdateRequest",
23
35
  "PQATaskResponse",
24
36
  "RuntimeConfig",
25
37
  "Stage",
@@ -0,0 +1,72 @@
1
+ from typing import Any, Generic, TypeAlias, TypeVar
2
+
3
+ from aviary.message import Message
4
+ from aviary.tools.base import Tool
5
+ from ldp.data_structures import Transition
6
+ from ldp.graph.ops import OpResult
7
+ from pydantic import BaseModel, ConfigDict, Field, field_serializer
8
+
9
+ T = TypeVar("T")
10
+
11
+
12
+ # TODO: revisit this
13
+ # unsure what crow states will return
14
+ # need to revisit after we get more crows deployed
15
+ class BaseState(BaseModel):
16
+ model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow")
17
+
18
+
19
+ class BeforeTransitionState(BaseState):
20
+ current_state: Any = Field()
21
+ observations: list[Message] = Field()
22
+
23
+
24
+ class InitialState(BaseState):
25
+ initial_state: Any = Field()
26
+
27
+
28
+ class ASVState(BaseState, Generic[T]):
29
+ action: OpResult[T] = Field()
30
+ next_state: Any = Field()
31
+ value: float = Field()
32
+
33
+ @field_serializer("action")
34
+ def serialize_action(self, action: OpResult[T]) -> dict:
35
+ return action.to_dict()
36
+
37
+ @field_serializer("next_state")
38
+ def serialize_next_state(self, state: Any) -> str:
39
+ return str(state)
40
+
41
+
42
+ class EnvResetState(BaseState):
43
+ observations: list[Message] = Field()
44
+ tools: list[Tool] = Field()
45
+
46
+
47
+ class EnvStepState(BaseState):
48
+ observations: list[Message] = Field()
49
+ reward: float = Field()
50
+ done: bool = Field()
51
+ trunc: bool = Field()
52
+
53
+
54
+ class TransitionState(BaseState):
55
+ transition: Transition = Field()
56
+
57
+ @field_serializer("transition")
58
+ def serialize_transition(self, transition: Transition) -> dict:
59
+ transition_data = transition.model_dump()
60
+ return transition_data | {
61
+ "action": transition.action.to_dict() if transition.action else None,
62
+ }
63
+
64
+
65
+ StateType: TypeAlias = (
66
+ BeforeTransitionState
67
+ | InitialState
68
+ | ASVState
69
+ | EnvResetState
70
+ | EnvStepState
71
+ | TransitionState
72
+ )
@@ -0,0 +1,75 @@
1
+ """Job event models for cost and usage tracking."""
2
+
3
+ from datetime import datetime
4
+ from enum import StrEnum, auto
5
+ from typing import Any
6
+ from uuid import UUID
7
+
8
+ from pydantic import BaseModel, Field
9
+
10
+
11
+ class ExecutionType(StrEnum):
12
+ """Type of execution for job events."""
13
+
14
+ TRAJECTORY = auto()
15
+ SESSION = auto()
16
+
17
+
18
+ class CostComponent(StrEnum):
19
+ """Cost component types for job events."""
20
+
21
+ LLM_USAGE = auto()
22
+ EXTERNAL_SERVICE = auto()
23
+ STEP = auto()
24
+
25
+
26
+ class JobEventCreateRequest(BaseModel):
27
+ """Request model for creating a job event matching crow-service schema."""
28
+
29
+ execution_id: UUID = Field(description="UUID for trajectory_id or session_id")
30
+ execution_type: ExecutionType = Field(
31
+ description="Either 'TRAJECTORY' or 'SESSION'"
32
+ )
33
+ cost_component: CostComponent = Field(
34
+ description="Cost component: 'LLM_USAGE', 'EXTERNAL_SERVICE', or 'STEP'"
35
+ )
36
+ started_at: datetime = Field(description="Start time of the job event")
37
+ ended_at: datetime = Field(description="End time of the job event")
38
+ crow: str | None = Field(default=None, description="unique identifier for the crow")
39
+ amount_acu: float | None = Field(default=None, description="Cost amount in ACUs")
40
+ amount_usd: float | None = Field(default=None, description="Cost amount in USD")
41
+ rate: float | None = Field(default=None, description="Rate per token/call in USD")
42
+ input_token_count: int | None = Field(
43
+ default=None, description="Input token count for LLM calls"
44
+ )
45
+ completion_token_count: int | None = Field(
46
+ default=None, description="Completion token count for LLM calls"
47
+ )
48
+ metadata: dict[str, Any] | None = Field(default=None)
49
+
50
+
51
+ class JobEventUpdateRequest(BaseModel):
52
+ """Request model for updating a job event matching crow-service schema."""
53
+
54
+ amount_acu: float | None = Field(default=None, description="Cost amount in ACUs")
55
+ amount_usd: float | None = Field(default=None, description="Cost amount in USD")
56
+ rate: float | None = Field(default=None, description="Rate per token/call in USD")
57
+ input_token_count: int | None = Field(
58
+ default=None, description="Input token count for LLM calls"
59
+ )
60
+ completion_token_count: int | None = Field(
61
+ default=None, description="Completion token count for LLM calls"
62
+ )
63
+ metadata: dict[str, Any] | None = Field(default=None)
64
+ started_at: datetime | None = Field(
65
+ default=None, description="Start time of the job event"
66
+ )
67
+ ended_at: datetime | None = Field(
68
+ default=None, description="End time of the job event"
69
+ )
70
+
71
+
72
+ class JobEventCreateResponse(BaseModel):
73
+ """Response model for job event creation."""
74
+
75
+ id: UUID = Field(description="UUID of the created job event")
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.4.5.dev119'
32
- __version_tuple__ = version_tuple = (0, 4, 5, 'dev119')
31
+ __version__ = version = '0.4.5.dev160'
32
+ __version_tuple__ = version_tuple = (0, 4, 5, 'dev160')
33
33
 
34
- __commit_id__ = commit_id = 'g1d92eec8f'
34
+ __commit_id__ = commit_id = 'g7b8da3bce'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.5.dev119
3
+ Version: 0.4.5.dev160
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -20,6 +20,7 @@ src/futurehouse_client/models/__init__.py
20
20
  src/futurehouse_client/models/app.py
21
21
  src/futurehouse_client/models/client.py
22
22
  src/futurehouse_client/models/data_storage_methods.py
23
+ src/futurehouse_client/models/job_event.py
23
24
  src/futurehouse_client/models/rest.py
24
25
  src/futurehouse_client/utils/__init__.py
25
26
  src/futurehouse_client/utils/auth.py
@@ -30,6 +30,8 @@ from futurehouse_client.clients import (
30
30
  )
31
31
  from futurehouse_client.clients.rest_client import (
32
32
  FileUploadError,
33
+ JobEventCreationError,
34
+ JobEventUpdateError,
33
35
  ProjectError,
34
36
  RestClient,
35
37
  RestClientError,
@@ -43,6 +45,12 @@ from futurehouse_client.models.app import (
43
45
  TaskRequest,
44
46
  TaskResponseVerbose,
45
47
  )
48
+ from futurehouse_client.models.job_event import (
49
+ CostComponent,
50
+ ExecutionType,
51
+ JobEventCreateRequest,
52
+ JobEventUpdateRequest,
53
+ )
46
54
  from futurehouse_client.models.rest import (
47
55
  ExecutionStatus,
48
56
  UserAgentRequestPostPayload,
@@ -1229,3 +1237,144 @@ def test_retry_logic_conditions(exception, should_retry, test_description):
1229
1237
  assert result == should_retry, (
1230
1238
  f"Expected {should_retry} for {test_description}, got {result}"
1231
1239
  )
1240
+
1241
+
1242
+ class TestJobEventOperations:
1243
+ @pytest.fixture
1244
+ def test_trajectory_id(
1245
+ self, admin_client: RestClient, task_req: TaskRequest
1246
+ ) -> str:
1247
+ """Create a real trajectory for job event testing."""
1248
+ return admin_client.create_task(task_req)
1249
+
1250
+ @pytest.fixture
1251
+ def job_event_create_request(self, test_trajectory_id: str):
1252
+ from datetime import datetime
1253
+
1254
+ return JobEventCreateRequest(
1255
+ execution_id=UUID(test_trajectory_id),
1256
+ execution_type=ExecutionType.TRAJECTORY,
1257
+ cost_component=CostComponent.LLM_USAGE,
1258
+ started_at=datetime.now(),
1259
+ ended_at=datetime.now(),
1260
+ crow=None,
1261
+ amount_usd=0.005,
1262
+ rate=0.0001,
1263
+ input_token_count=100,
1264
+ completion_token_count=50,
1265
+ metadata={"model": "gpt-4o", "temperature": 0.7},
1266
+ )
1267
+
1268
+ @pytest.fixture
1269
+ def job_event_update_request(self):
1270
+ return JobEventUpdateRequest(
1271
+ amount_usd=0.007,
1272
+ rate=0.00015,
1273
+ input_token_count=120,
1274
+ completion_token_count=60,
1275
+ metadata={"model": "gpt-4o", "temperature": 0.5, "updated": True},
1276
+ )
1277
+
1278
+ @pytest.mark.timeout(300)
1279
+ def test_create_job_event_success(
1280
+ self, admin_client: RestClient, job_event_create_request
1281
+ ):
1282
+ response = admin_client.create_job_event(job_event_create_request)
1283
+ assert response.id is not None
1284
+ assert isinstance(response.id, UUID)
1285
+
1286
+ @pytest.mark.asyncio
1287
+ @pytest.mark.timeout(300)
1288
+ async def test_acreate_job_event_success(
1289
+ self, admin_client: RestClient, job_event_create_request
1290
+ ):
1291
+ response = await admin_client.acreate_job_event(job_event_create_request)
1292
+ assert response.id is not None
1293
+ assert isinstance(response.id, UUID)
1294
+
1295
+ def test_create_job_event_execution_not_found(
1296
+ self, admin_client: RestClient, job_event_create_request
1297
+ ):
1298
+ # Create a new request with a non-existent execution ID
1299
+ invalid_request = JobEventCreateRequest(
1300
+ execution_id=uuid4(), # Non-existent ID
1301
+ execution_type=job_event_create_request.execution_type,
1302
+ cost_component=job_event_create_request.cost_component,
1303
+ started_at=job_event_create_request.started_at,
1304
+ ended_at=job_event_create_request.ended_at,
1305
+ crow=job_event_create_request.crow,
1306
+ amount_usd=job_event_create_request.amount_usd,
1307
+ rate=job_event_create_request.rate,
1308
+ input_token_count=job_event_create_request.input_token_count,
1309
+ completion_token_count=job_event_create_request.completion_token_count,
1310
+ metadata=job_event_create_request.metadata,
1311
+ )
1312
+ with pytest.raises(JobEventCreationError, match="Execution not found"):
1313
+ admin_client.create_job_event(invalid_request)
1314
+
1315
+ @pytest.mark.asyncio
1316
+ async def test_acreate_job_event_execution_not_found(
1317
+ self, admin_client: RestClient, job_event_create_request
1318
+ ):
1319
+ # Create a new request with a non-existent execution ID
1320
+ invalid_request = JobEventCreateRequest(
1321
+ execution_id=uuid4(), # Non-existent ID
1322
+ execution_type=job_event_create_request.execution_type,
1323
+ cost_component=job_event_create_request.cost_component,
1324
+ started_at=job_event_create_request.started_at,
1325
+ ended_at=job_event_create_request.ended_at,
1326
+ crow=job_event_create_request.crow,
1327
+ amount_usd=job_event_create_request.amount_usd,
1328
+ rate=job_event_create_request.rate,
1329
+ input_token_count=job_event_create_request.input_token_count,
1330
+ completion_token_count=job_event_create_request.completion_token_count,
1331
+ metadata=job_event_create_request.metadata,
1332
+ )
1333
+ with pytest.raises(JobEventCreationError, match="Execution not found"):
1334
+ await admin_client.acreate_job_event(invalid_request)
1335
+
1336
+ @pytest.mark.timeout(300)
1337
+ def test_update_job_event_success(
1338
+ self,
1339
+ admin_client: RestClient,
1340
+ job_event_create_request,
1341
+ job_event_update_request,
1342
+ ):
1343
+ create_response = admin_client.create_job_event(job_event_create_request)
1344
+ job_event_id = create_response.id
1345
+
1346
+ # Should not raise an exception and return None
1347
+ result = admin_client.update_job_event(job_event_id, job_event_update_request)
1348
+ assert result is None
1349
+
1350
+ @pytest.mark.asyncio
1351
+ @pytest.mark.timeout(300)
1352
+ async def test_aupdate_job_event_success(
1353
+ self,
1354
+ admin_client: RestClient,
1355
+ job_event_create_request,
1356
+ job_event_update_request,
1357
+ ):
1358
+ create_response = await admin_client.acreate_job_event(job_event_create_request)
1359
+ job_event_id = create_response.id
1360
+
1361
+ await admin_client.aupdate_job_event(job_event_id, job_event_update_request)
1362
+
1363
+ def test_update_job_event_not_found(
1364
+ self, admin_client: RestClient, job_event_update_request
1365
+ ):
1366
+ job_event_id = uuid4()
1367
+ with pytest.raises(
1368
+ JobEventUpdateError, match=r"Job event with ID .* not found"
1369
+ ):
1370
+ admin_client.update_job_event(job_event_id, job_event_update_request)
1371
+
1372
+ @pytest.mark.asyncio
1373
+ async def test_aupdate_job_event_not_found(
1374
+ self, admin_client: RestClient, job_event_update_request
1375
+ ):
1376
+ job_event_id = uuid4()
1377
+ with pytest.raises(
1378
+ JobEventUpdateError, match=r"Job event with ID .* not found"
1379
+ ):
1380
+ await admin_client.aupdate_job_event(job_event_id, job_event_update_request)
@@ -1,128 +0,0 @@
1
- from typing import Any, Generic, TypeAlias, TypeVar
2
-
3
- from aviary.message import Message
4
- from aviary.tools.base import Tool
5
- from ldp.agent import Agent
6
- from ldp.data_structures import Transition
7
- from ldp.graph.ops import OpResult
8
- from pydantic import BaseModel, ConfigDict, Field, field_serializer
9
-
10
- T = TypeVar("T")
11
-
12
-
13
- # TODO: revisit this
14
- # unsure what crow states will return
15
- # need to revisit after we get more crows deployed
16
- class BaseState(BaseModel):
17
- model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow")
18
-
19
-
20
- class BeforeTransitionState(BaseState):
21
- current_state: Any = Field()
22
- observations: list[Message] = Field()
23
-
24
-
25
- class InitialState(BaseState):
26
- initial_state: Any = Field()
27
-
28
-
29
- class ASVState(BaseState, Generic[T]):
30
- action: OpResult[T] = Field()
31
- next_state: Any = Field()
32
- value: float = Field()
33
-
34
- @field_serializer("action")
35
- def serialize_action(self, action: OpResult[T]) -> dict:
36
- return action.to_dict()
37
-
38
-
39
- class EnvResetState(BaseState):
40
- observations: list[Message] = Field()
41
- tools: list[Tool] = Field()
42
-
43
-
44
- class EnvStepState(BaseState):
45
- observations: list[Message] = Field()
46
- reward: float = Field()
47
- done: bool = Field()
48
- trunc: bool = Field()
49
-
50
-
51
- class TransitionState(BaseState):
52
- transition: Transition = Field()
53
-
54
- @field_serializer("transition")
55
- def serialize_transition(self, transition: Transition) -> dict:
56
- transition_data = transition.model_dump()
57
- return transition_data | {
58
- "action": transition.action.to_dict() if transition.action else None,
59
- }
60
-
61
-
62
- class GlobalState(BaseState):
63
- agent: Agent | None = None
64
- env: Any | None = None
65
- agent_state: Any | None = None
66
- next_agent_state: Any | None = None
67
- observations: list = []
68
- action: Any | None = None
69
- value: float = 0.0
70
- last_step_state: Transition | None = None
71
-
72
- def update_observations(self, obs: list[Message]) -> list[Message]:
73
- previous_observations = self.observations or []
74
- self.observations = obs
75
- return previous_observations
76
-
77
- def store_step_state(self, step_state: Transition) -> None:
78
- self.last_step_state = step_state
79
-
80
- def update_trajectory_data(self, **kwargs) -> None:
81
- for key, value in kwargs.items():
82
- setattr(self, key, value)
83
-
84
- def _get_safe_previous_observations(
85
- self, current_obs: list[Message] | None = None
86
- ) -> list[Message]:
87
- if self.last_step_state:
88
- last_step_state = self.last_step_state
89
- if last_step_state.next_observation:
90
- return last_step_state.next_observation
91
- if self.observations:
92
- return self.observations
93
- return current_obs or []
94
-
95
- def create_step_state(self, callback_type: str, **kwargs) -> Transition:
96
- defaults = {
97
- "timestep": getattr(self.agent, "_timestep", 0) if self.agent else 0,
98
- "agent_state": self.agent_state,
99
- "next_agent_state": self.next_agent_state or self.agent_state,
100
- "observation": self._get_safe_previous_observations(),
101
- "next_observation": self.observations or [],
102
- "action": self.action,
103
- "reward": 0.0,
104
- "truncated": False,
105
- "done": False,
106
- "value": self.value or 0.0,
107
- "metadata": {"callback_type": callback_type},
108
- }
109
-
110
- for key, value in kwargs.items():
111
- if key == "metadata" and isinstance(value, dict):
112
- if isinstance(defaults["metadata"], dict):
113
- defaults["metadata"].update(value)
114
- else:
115
- defaults[key] = value
116
-
117
- return Transition(**defaults)
118
-
119
-
120
- StateType: TypeAlias = (
121
- BeforeTransitionState
122
- | InitialState
123
- | ASVState
124
- | EnvResetState
125
- | EnvStepState
126
- | TransitionState
127
- | GlobalState
128
- )