futurehouse-client 0.0.1__tar.gz → 0.0.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- futurehouse_client-0.0.3/PKG-INFO +225 -0
- futurehouse_client-0.0.3/README.md +187 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/docs/client_notebook.ipynb +13 -18
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/docs/client_notebook.md +1 -2
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/docs/crow_client_notebook_w_deployment.ipynb +10 -10
- futurehouse_client-0.0.3/futurehouse_client/__init__.py +12 -0
- futurehouse_client-0.0.3/futurehouse_client/clients/__init__.py +12 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/clients/job_client.py +1 -1
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/clients/rest_client.py +52 -43
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/models/__init__.py +4 -4
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/models/app.py +5 -5
- futurehouse_client-0.0.3/futurehouse_client.egg-info/PKG-INFO +225 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client.egg-info/SOURCES.txt +0 -6
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/pyproject.toml +1 -1
- futurehouse_client-0.0.3/tests/test_rest.py +96 -0
- futurehouse_client-0.0.1/PKG-INFO +0 -151
- futurehouse_client-0.0.1/README.md +0 -113
- futurehouse_client-0.0.1/docs/envs/dummy_env/__init__.py +0 -3
- futurehouse_client-0.0.1/docs/envs/dummy_env/env.py +0 -127
- futurehouse_client-0.0.1/docs/envs/dummy_env/requirements.txt +0 -1
- futurehouse_client-0.0.1/docs/envs/testEnv/env.py +0 -46
- futurehouse_client-0.0.1/futurehouse_client/__init__.py +0 -12
- futurehouse_client-0.0.1/futurehouse_client/clients/__init__.py +0 -12
- futurehouse_client-0.0.1/futurehouse_client/utils/__init__.py +0 -0
- futurehouse_client-0.0.1/futurehouse_client.egg-info/PKG-INFO +0 -151
- futurehouse_client-0.0.1/tests/__init__.py +0 -0
- futurehouse_client-0.0.1/tests/test_rest.py +0 -95
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/LICENSE +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/docs/__init__.py +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/models/client.py +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/models/rest.py +0 -0
- {futurehouse_client-0.0.1/docs/envs/testEnv → futurehouse_client-0.0.3/futurehouse_client/utils}/__init__.py +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client/utils/module_utils.py +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client.egg-info/dependency_links.txt +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client.egg-info/requires.txt +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/futurehouse_client.egg-info/top_level.txt +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/setup.cfg +0 -0
- {futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/uv.lock +0 -0
@@ -0,0 +1,225 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: futurehouse-client
|
3
|
+
Version: 0.0.3
|
4
|
+
Summary: A client for interacting with endpoints of the FutureHouse service.
|
5
|
+
Author-email: FutureHouse technical staff <hello@futurehouse.org>
|
6
|
+
Classifier: Operating System :: OS Independent
|
7
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
8
|
+
Classifier: Programming Language :: Python :: 3.11
|
9
|
+
Classifier: Programming Language :: Python :: 3.12
|
10
|
+
Classifier: Programming Language :: Python
|
11
|
+
Requires-Python: <3.13,>=3.11
|
12
|
+
Description-Content-Type: text/markdown
|
13
|
+
Requires-Dist: cloudpickle
|
14
|
+
Requires-Dist: dm-tree<0.1.9
|
15
|
+
Requires-Dist: fhaviary
|
16
|
+
Requires-Dist: httpx
|
17
|
+
Requires-Dist: ldp>=0.22.0
|
18
|
+
Requires-Dist: pydantic
|
19
|
+
Requires-Dist: python-dotenv
|
20
|
+
Requires-Dist: tenacity
|
21
|
+
Provides-Extra: dev
|
22
|
+
Requires-Dist: black; extra == "dev"
|
23
|
+
Requires-Dist: jupyter; extra == "dev"
|
24
|
+
Requires-Dist: jupyterlab; extra == "dev"
|
25
|
+
Requires-Dist: mypy; extra == "dev"
|
26
|
+
Requires-Dist: notebook; extra == "dev"
|
27
|
+
Requires-Dist: pre-commit; extra == "dev"
|
28
|
+
Requires-Dist: pylint; extra == "dev"
|
29
|
+
Requires-Dist: pylint-per-file-ignores; extra == "dev"
|
30
|
+
Requires-Dist: pylint-pydantic; extra == "dev"
|
31
|
+
Requires-Dist: pytest; extra == "dev"
|
32
|
+
Requires-Dist: pytest-rerunfailures; extra == "dev"
|
33
|
+
Requires-Dist: pytest-subtests; extra == "dev"
|
34
|
+
Requires-Dist: pytest-timeout; extra == "dev"
|
35
|
+
Requires-Dist: pytest-xdist; extra == "dev"
|
36
|
+
Requires-Dist: ruff; extra == "dev"
|
37
|
+
Requires-Dist: setuptools_scm; extra == "dev"
|
38
|
+
|
39
|
+
# FutureHouse Platform API Documentation
|
40
|
+
|
41
|
+
Documentation and tutorials for crow-client, a client for interacting with endpoints of the FutureHouse crow service.
|
42
|
+
|
43
|
+
> FutureHouse's mascot is the crow. Therefore, some objects are named after the crow as a homage.
|
44
|
+
|
45
|
+
<!--TOC-->
|
46
|
+
|
47
|
+
- [Installation](#installation)
|
48
|
+
- [Quickstart](#quickstart)
|
49
|
+
- [Functionalities](#functionalities)
|
50
|
+
- [Stages](#stages)
|
51
|
+
- [Authentication](#authentication)
|
52
|
+
- [Job submission](#job-submission)
|
53
|
+
- [Job Continuation](#job-continuation)
|
54
|
+
- [Job retrieval](#job-retrieval)
|
55
|
+
|
56
|
+
<!--TOC-->
|
57
|
+
|
58
|
+
## Installation
|
59
|
+
|
60
|
+
```bash
|
61
|
+
uv pip install crow-client
|
62
|
+
```
|
63
|
+
|
64
|
+
## Quickstart
|
65
|
+
|
66
|
+
```python
|
67
|
+
from crow_client import CrowClient, JobNames
|
68
|
+
from pathlib import Path
|
69
|
+
from aviary.core import DummyEnv
|
70
|
+
import ldp
|
71
|
+
|
72
|
+
client = CrowClient(
|
73
|
+
stage=Stage.PROD,
|
74
|
+
auth_type=AuthType.API_KEY,
|
75
|
+
api_key="your_api_key",
|
76
|
+
)
|
77
|
+
|
78
|
+
job_data = {
|
79
|
+
"name": JobNames.CROW,
|
80
|
+
"query": "Has anyone tested therapeutic exerkines in humans or NHPs?",
|
81
|
+
}
|
82
|
+
|
83
|
+
job_run_id = client.create_job(job_data)
|
84
|
+
|
85
|
+
job_status = client.get_job(job_run_id)
|
86
|
+
```
|
87
|
+
|
88
|
+
A quickstart example can be found in the [crow_client_notebook.ipynb](./docs/crow_client_notebook.ipynb) file, where we show how to submit and retrieve a job task, pass runtime configuration to the agent, and ask follow-up questions to the previous job.
|
89
|
+
|
90
|
+
## Functionalities
|
91
|
+
|
92
|
+
Crow-client implements a RestClient (called `CrowClient`) with the following functionalities:
|
93
|
+
|
94
|
+
- [Authentication](#authtype): `auth_client`
|
95
|
+
- [Job submission](#job-submission): `create_job(JobRequest)`
|
96
|
+
- [Job status](#job-status): `get_job(job_id)`
|
97
|
+
|
98
|
+
To create a `CrowClient`, you need to pass the following parameters:
|
99
|
+
| Parameter | Type | Default | Description |
|
100
|
+
| --- | --- | --- | --- |
|
101
|
+
| stage | Stage | Stage.DEV | Where the job will be submitted? |
|
102
|
+
| organization | str \| None | None | Which organization to use? |
|
103
|
+
| auth_type | AuthType | AuthType.API_KEY | Which authentication method to use? |
|
104
|
+
| api_key | str \| None | None | The API key to use for authentication, if using auth_type=AuthType.API_KEY. |
|
105
|
+
|
106
|
+
To instantiate a Client, we can use the following code:
|
107
|
+
|
108
|
+
```python
|
109
|
+
from crow_client import CrowClient
|
110
|
+
from crow_client.models import Stage, AuthType
|
111
|
+
|
112
|
+
client = CrowClient(
|
113
|
+
stage=Stage.PROD,
|
114
|
+
organization="your_organization",
|
115
|
+
auth_type=AuthType.API_KEY,
|
116
|
+
api_key="your_api_key",
|
117
|
+
)
|
118
|
+
```
|
119
|
+
|
120
|
+
### Stages
|
121
|
+
|
122
|
+
The stage is where your job will be submitted. This parameter can be one of the following:
|
123
|
+
| Name | Description |
|
124
|
+
| --- | --- |
|
125
|
+
| Stage.DEV | Development environment at https://dev.api.platform.futurehouse.org |
|
126
|
+
| Stage.PROD | Production environment at https://api.platform.futurehouse.org |
|
127
|
+
|
128
|
+
## Authentication
|
129
|
+
|
130
|
+
In order to use the `CrowClient`, you need to authenticate yourself. Authentication is done by providing an API key, which can be obtained directly from your [profile page in the FutureHouse platform](https://platform.futurehouse.org/profile).
|
131
|
+
|
132
|
+
## Job submission
|
133
|
+
|
134
|
+
`CrowClient` can be used to submit jobs to the FutureHouse platform. Using a `CrowClient` instance, you can submit jobs to the platform by calling the `create_job` method, which receives a `JobRequest` (or a dictionary with `kwargs`) and returns the job id.
|
135
|
+
Aiming to make the submission of jobs as simple as possible, we have created a `JobNames` enum that contains the available job types.
|
136
|
+
|
137
|
+
The available supported jobs are:
|
138
|
+
| Alias | Job Name | Task type | Description |
|
139
|
+
| --- | --- | --- | --- |
|
140
|
+
| `JobNames.CROW` | `job-futurehouse-paperqa2` | Fast Search | Ask a question of scientific data sources, and receive a high-accuracy, cited response. Built with [PaperQA2](https://github.com/Future-House/paper-qa). |
|
141
|
+
| `JobNames.FALCON` | `job-futurehouse-paperqa2-deep` | Deep Search | Use a plethora of sources to deeply research. Receive a detailed, structured report as a response. |
|
142
|
+
| `JobNames.OWL` | `job-futurehouse-hasanyone` | Precedent Search | Formerly known as HasAnyone, query if anyone has ever done something in science. |
|
143
|
+
| `JobNames.DUMMY` | `job-futurehouse-dummy` | Dummy Task | This is a dummy task. Mainly for testing purposes. |
|
144
|
+
|
145
|
+
Using `JobNames`, the client automatically adapts the job name to the current stage.
|
146
|
+
The job submission looks like this:
|
147
|
+
|
148
|
+
```python
|
149
|
+
from crow_client import CrowClient, JobNames
|
150
|
+
from crow_client.models import AuthType, Stage
|
151
|
+
|
152
|
+
client = CrowClient(
|
153
|
+
stage=Stage.PROD,
|
154
|
+
auth_type=AuthType.API_KEY,
|
155
|
+
api_key="your_api_key",
|
156
|
+
)
|
157
|
+
|
158
|
+
job_data = {
|
159
|
+
"name": JobNames.CROW,
|
160
|
+
"query": "Has anyone tested therapeutic exerkines in humans or NHPs?",
|
161
|
+
}
|
162
|
+
|
163
|
+
job_id = client.create_job(job_data)
|
164
|
+
```
|
165
|
+
|
166
|
+
`JobRequest` has the following fields:
|
167
|
+
|
168
|
+
| Field | Type | Description |
|
169
|
+
| -------------- | ------------- | ------------------------------------------------------------------------------------------------------------------- |
|
170
|
+
| id | UUID | Optional job identifier. A UUID will be generated if not provided |
|
171
|
+
| name | str | Name of the job to execute eg. `job-futurehouse-paperqa2`, or using the `JobNames` for convenience: `JobNames.CROW` |
|
172
|
+
| query | str | Query or task to be executed by the job |
|
173
|
+
| runtime_config | RuntimeConfig | Optional runtime parameters for the job |
|
174
|
+
|
175
|
+
`runtime_config` can receive a `AgentConfig` object with the desired kwargs. Check the available `AgentConfig` fields in the [LDP documentation](https://github.com/Future-House/ldp/blob/main/src/ldp/agent/agent.py#L87). Besides the `AgentConfig` object, we can also pass `timeout` and `max_steps` to limit the execution time and the number of steps the agent can take.
|
176
|
+
Other especialised configurations are also available but are outside the scope of this documentation.
|
177
|
+
|
178
|
+
## Job Continuation
|
179
|
+
|
180
|
+
Once a job is submitted and the answer is returned, FutureHouse platform allow you to ask follow-up questions to the previous job.
|
181
|
+
It is also possible through the platform API.
|
182
|
+
To accomplish that, we can use the `runtime_config` we discussed in the [Job submission](#job-submission) section.
|
183
|
+
|
184
|
+
```python
|
185
|
+
from crow_client import CrowClient, JobNames
|
186
|
+
from crow_client.models import AuthType, Stage
|
187
|
+
|
188
|
+
client = CrowClient(
|
189
|
+
stage=Stage.PROD,
|
190
|
+
auth_type=AuthType.API_KEY,
|
191
|
+
api_key="your_api_key",
|
192
|
+
)
|
193
|
+
|
194
|
+
job_data = {"name": JobNames.CROW, "query": "How many species of birds are there?"}
|
195
|
+
|
196
|
+
job_id = client.create_job(job_data)
|
197
|
+
|
198
|
+
continued_job_data = {
|
199
|
+
"name": JobNames.CROW,
|
200
|
+
"query": "From the previous answer, specifically,how many species of crows are there?",
|
201
|
+
"runtime_config": {"continued_job_id": job_id},
|
202
|
+
}
|
203
|
+
|
204
|
+
continued_job_id = client.create_job(continued_job_data)
|
205
|
+
```
|
206
|
+
|
207
|
+
## Job retrieval
|
208
|
+
|
209
|
+
Once a job is submitted, you can retrieve it by calling the `get_job` method, which receives a job id and returns a `JobResponse` object.
|
210
|
+
|
211
|
+
```python
|
212
|
+
from crow_client import CrowClient
|
213
|
+
from crow_client.models import AuthType
|
214
|
+
|
215
|
+
client = CrowClient(
|
216
|
+
auth_type=AuthType.API_KEY,
|
217
|
+
api_key="your_api_key",
|
218
|
+
)
|
219
|
+
|
220
|
+
job_id = "job_id"
|
221
|
+
|
222
|
+
job_status = client.get_job(job_id)
|
223
|
+
```
|
224
|
+
|
225
|
+
`job_status` contains information about the job. For instance, its `status`, `task`, `environment_name` and `agent_name`, and other fields specific to the job.
|
@@ -0,0 +1,187 @@
|
|
1
|
+
# FutureHouse Platform API Documentation
|
2
|
+
|
3
|
+
Documentation and tutorials for crow-client, a client for interacting with endpoints of the FutureHouse crow service.
|
4
|
+
|
5
|
+
> FutureHouse's mascot is the crow. Therefore, some objects are named after the crow as a homage.
|
6
|
+
|
7
|
+
<!--TOC-->
|
8
|
+
|
9
|
+
- [Installation](#installation)
|
10
|
+
- [Quickstart](#quickstart)
|
11
|
+
- [Functionalities](#functionalities)
|
12
|
+
- [Stages](#stages)
|
13
|
+
- [Authentication](#authentication)
|
14
|
+
- [Job submission](#job-submission)
|
15
|
+
- [Job Continuation](#job-continuation)
|
16
|
+
- [Job retrieval](#job-retrieval)
|
17
|
+
|
18
|
+
<!--TOC-->
|
19
|
+
|
20
|
+
## Installation
|
21
|
+
|
22
|
+
```bash
|
23
|
+
uv pip install crow-client
|
24
|
+
```
|
25
|
+
|
26
|
+
## Quickstart
|
27
|
+
|
28
|
+
```python
|
29
|
+
from crow_client import CrowClient, JobNames
|
30
|
+
from pathlib import Path
|
31
|
+
from aviary.core import DummyEnv
|
32
|
+
import ldp
|
33
|
+
|
34
|
+
client = CrowClient(
|
35
|
+
stage=Stage.PROD,
|
36
|
+
auth_type=AuthType.API_KEY,
|
37
|
+
api_key="your_api_key",
|
38
|
+
)
|
39
|
+
|
40
|
+
job_data = {
|
41
|
+
"name": JobNames.CROW,
|
42
|
+
"query": "Has anyone tested therapeutic exerkines in humans or NHPs?",
|
43
|
+
}
|
44
|
+
|
45
|
+
job_run_id = client.create_job(job_data)
|
46
|
+
|
47
|
+
job_status = client.get_job(job_run_id)
|
48
|
+
```
|
49
|
+
|
50
|
+
A quickstart example can be found in the [crow_client_notebook.ipynb](./docs/crow_client_notebook.ipynb) file, where we show how to submit and retrieve a job task, pass runtime configuration to the agent, and ask follow-up questions to the previous job.
|
51
|
+
|
52
|
+
## Functionalities
|
53
|
+
|
54
|
+
Crow-client implements a RestClient (called `CrowClient`) with the following functionalities:
|
55
|
+
|
56
|
+
- [Authentication](#authtype): `auth_client`
|
57
|
+
- [Job submission](#job-submission): `create_job(JobRequest)`
|
58
|
+
- [Job status](#job-status): `get_job(job_id)`
|
59
|
+
|
60
|
+
To create a `CrowClient`, you need to pass the following parameters:
|
61
|
+
| Parameter | Type | Default | Description |
|
62
|
+
| --- | --- | --- | --- |
|
63
|
+
| stage | Stage | Stage.DEV | Where the job will be submitted? |
|
64
|
+
| organization | str \| None | None | Which organization to use? |
|
65
|
+
| auth_type | AuthType | AuthType.API_KEY | Which authentication method to use? |
|
66
|
+
| api_key | str \| None | None | The API key to use for authentication, if using auth_type=AuthType.API_KEY. |
|
67
|
+
|
68
|
+
To instantiate a Client, we can use the following code:
|
69
|
+
|
70
|
+
```python
|
71
|
+
from crow_client import CrowClient
|
72
|
+
from crow_client.models import Stage, AuthType
|
73
|
+
|
74
|
+
client = CrowClient(
|
75
|
+
stage=Stage.PROD,
|
76
|
+
organization="your_organization",
|
77
|
+
auth_type=AuthType.API_KEY,
|
78
|
+
api_key="your_api_key",
|
79
|
+
)
|
80
|
+
```
|
81
|
+
|
82
|
+
### Stages
|
83
|
+
|
84
|
+
The stage is where your job will be submitted. This parameter can be one of the following:
|
85
|
+
| Name | Description |
|
86
|
+
| --- | --- |
|
87
|
+
| Stage.DEV | Development environment at https://dev.api.platform.futurehouse.org |
|
88
|
+
| Stage.PROD | Production environment at https://api.platform.futurehouse.org |
|
89
|
+
|
90
|
+
## Authentication
|
91
|
+
|
92
|
+
In order to use the `CrowClient`, you need to authenticate yourself. Authentication is done by providing an API key, which can be obtained directly from your [profile page in the FutureHouse platform](https://platform.futurehouse.org/profile).
|
93
|
+
|
94
|
+
## Job submission
|
95
|
+
|
96
|
+
`CrowClient` can be used to submit jobs to the FutureHouse platform. Using a `CrowClient` instance, you can submit jobs to the platform by calling the `create_job` method, which receives a `JobRequest` (or a dictionary with `kwargs`) and returns the job id.
|
97
|
+
Aiming to make the submission of jobs as simple as possible, we have created a `JobNames` enum that contains the available job types.
|
98
|
+
|
99
|
+
The available supported jobs are:
|
100
|
+
| Alias | Job Name | Task type | Description |
|
101
|
+
| --- | --- | --- | --- |
|
102
|
+
| `JobNames.CROW` | `job-futurehouse-paperqa2` | Fast Search | Ask a question of scientific data sources, and receive a high-accuracy, cited response. Built with [PaperQA2](https://github.com/Future-House/paper-qa). |
|
103
|
+
| `JobNames.FALCON` | `job-futurehouse-paperqa2-deep` | Deep Search | Use a plethora of sources to deeply research. Receive a detailed, structured report as a response. |
|
104
|
+
| `JobNames.OWL` | `job-futurehouse-hasanyone` | Precedent Search | Formerly known as HasAnyone, query if anyone has ever done something in science. |
|
105
|
+
| `JobNames.DUMMY` | `job-futurehouse-dummy` | Dummy Task | This is a dummy task. Mainly for testing purposes. |
|
106
|
+
|
107
|
+
Using `JobNames`, the client automatically adapts the job name to the current stage.
|
108
|
+
The job submission looks like this:
|
109
|
+
|
110
|
+
```python
|
111
|
+
from crow_client import CrowClient, JobNames
|
112
|
+
from crow_client.models import AuthType, Stage
|
113
|
+
|
114
|
+
client = CrowClient(
|
115
|
+
stage=Stage.PROD,
|
116
|
+
auth_type=AuthType.API_KEY,
|
117
|
+
api_key="your_api_key",
|
118
|
+
)
|
119
|
+
|
120
|
+
job_data = {
|
121
|
+
"name": JobNames.CROW,
|
122
|
+
"query": "Has anyone tested therapeutic exerkines in humans or NHPs?",
|
123
|
+
}
|
124
|
+
|
125
|
+
job_id = client.create_job(job_data)
|
126
|
+
```
|
127
|
+
|
128
|
+
`JobRequest` has the following fields:
|
129
|
+
|
130
|
+
| Field | Type | Description |
|
131
|
+
| -------------- | ------------- | ------------------------------------------------------------------------------------------------------------------- |
|
132
|
+
| id | UUID | Optional job identifier. A UUID will be generated if not provided |
|
133
|
+
| name | str | Name of the job to execute eg. `job-futurehouse-paperqa2`, or using the `JobNames` for convenience: `JobNames.CROW` |
|
134
|
+
| query | str | Query or task to be executed by the job |
|
135
|
+
| runtime_config | RuntimeConfig | Optional runtime parameters for the job |
|
136
|
+
|
137
|
+
`runtime_config` can receive a `AgentConfig` object with the desired kwargs. Check the available `AgentConfig` fields in the [LDP documentation](https://github.com/Future-House/ldp/blob/main/src/ldp/agent/agent.py#L87). Besides the `AgentConfig` object, we can also pass `timeout` and `max_steps` to limit the execution time and the number of steps the agent can take.
|
138
|
+
Other especialised configurations are also available but are outside the scope of this documentation.
|
139
|
+
|
140
|
+
## Job Continuation
|
141
|
+
|
142
|
+
Once a job is submitted and the answer is returned, FutureHouse platform allow you to ask follow-up questions to the previous job.
|
143
|
+
It is also possible through the platform API.
|
144
|
+
To accomplish that, we can use the `runtime_config` we discussed in the [Job submission](#job-submission) section.
|
145
|
+
|
146
|
+
```python
|
147
|
+
from crow_client import CrowClient, JobNames
|
148
|
+
from crow_client.models import AuthType, Stage
|
149
|
+
|
150
|
+
client = CrowClient(
|
151
|
+
stage=Stage.PROD,
|
152
|
+
auth_type=AuthType.API_KEY,
|
153
|
+
api_key="your_api_key",
|
154
|
+
)
|
155
|
+
|
156
|
+
job_data = {"name": JobNames.CROW, "query": "How many species of birds are there?"}
|
157
|
+
|
158
|
+
job_id = client.create_job(job_data)
|
159
|
+
|
160
|
+
continued_job_data = {
|
161
|
+
"name": JobNames.CROW,
|
162
|
+
"query": "From the previous answer, specifically,how many species of crows are there?",
|
163
|
+
"runtime_config": {"continued_job_id": job_id},
|
164
|
+
}
|
165
|
+
|
166
|
+
continued_job_id = client.create_job(continued_job_data)
|
167
|
+
```
|
168
|
+
|
169
|
+
## Job retrieval
|
170
|
+
|
171
|
+
Once a job is submitted, you can retrieve it by calling the `get_job` method, which receives a job id and returns a `JobResponse` object.
|
172
|
+
|
173
|
+
```python
|
174
|
+
from crow_client import CrowClient
|
175
|
+
from crow_client.models import AuthType
|
176
|
+
|
177
|
+
client = CrowClient(
|
178
|
+
auth_type=AuthType.API_KEY,
|
179
|
+
api_key="your_api_key",
|
180
|
+
)
|
181
|
+
|
182
|
+
job_id = "job_id"
|
183
|
+
|
184
|
+
job_status = client.get_job(job_id)
|
185
|
+
```
|
186
|
+
|
187
|
+
`job_status` contains information about the job. For instance, its `status`, `task`, `environment_name` and `agent_name`, and other fields specific to the job.
|
@@ -9,18 +9,19 @@
|
|
9
9
|
},
|
10
10
|
{
|
11
11
|
"cell_type": "code",
|
12
|
-
"execution_count":
|
12
|
+
"execution_count": null,
|
13
13
|
"metadata": {},
|
14
14
|
"outputs": [],
|
15
15
|
"source": [
|
16
16
|
"import sys\n",
|
17
17
|
"from pathlib import Path\n",
|
18
|
-
"
|
18
|
+
"\n",
|
19
|
+
"sys.path.insert(0, str(Path(\"..\").resolve()))"
|
19
20
|
]
|
20
21
|
},
|
21
22
|
{
|
22
23
|
"cell_type": "code",
|
23
|
-
"execution_count":
|
24
|
+
"execution_count": null,
|
24
25
|
"metadata": {},
|
25
26
|
"outputs": [],
|
26
27
|
"source": [
|
@@ -48,14 +49,14 @@
|
|
48
49
|
},
|
49
50
|
{
|
50
51
|
"cell_type": "code",
|
51
|
-
"execution_count":
|
52
|
+
"execution_count": null,
|
52
53
|
"metadata": {},
|
53
54
|
"outputs": [],
|
54
55
|
"source": [
|
55
56
|
"client = Client(\n",
|
56
57
|
" stage=Stage.PROD,\n",
|
57
58
|
" auth_type=AuthType.API_KEY,\n",
|
58
|
-
" api_key=\"
|
59
|
+
" api_key=\"your-api-key\",\n",
|
59
60
|
")"
|
60
61
|
]
|
61
62
|
},
|
@@ -75,7 +76,7 @@
|
|
75
76
|
},
|
76
77
|
{
|
77
78
|
"cell_type": "code",
|
78
|
-
"execution_count":
|
79
|
+
"execution_count": null,
|
79
80
|
"metadata": {},
|
80
81
|
"outputs": [
|
81
82
|
{
|
@@ -115,7 +116,7 @@
|
|
115
116
|
},
|
116
117
|
{
|
117
118
|
"cell_type": "code",
|
118
|
-
"execution_count":
|
119
|
+
"execution_count": null,
|
119
120
|
"metadata": {},
|
120
121
|
"outputs": [
|
121
122
|
{
|
@@ -216,7 +217,7 @@
|
|
216
217
|
},
|
217
218
|
{
|
218
219
|
"cell_type": "code",
|
219
|
-
"execution_count":
|
220
|
+
"execution_count": null,
|
220
221
|
"metadata": {},
|
221
222
|
"outputs": [
|
222
223
|
{
|
@@ -267,10 +268,7 @@
|
|
267
268
|
}
|
268
269
|
],
|
269
270
|
"source": [
|
270
|
-
"job_data = JobRequest(\n",
|
271
|
-
" name=JobNames.CROW,\n",
|
272
|
-
" query=\"How many species of birds are there?\"\n",
|
273
|
-
")\n",
|
271
|
+
"job_data = JobRequest(name=JobNames.CROW, query=\"How many species of birds are there?\")\n",
|
274
272
|
"\n",
|
275
273
|
"job_id = client.create_job(job_data)\n",
|
276
274
|
"while client.get_job().status != \"success\":\n",
|
@@ -281,7 +279,7 @@
|
|
281
279
|
},
|
282
280
|
{
|
283
281
|
"cell_type": "code",
|
284
|
-
"execution_count":
|
282
|
+
"execution_count": null,
|
285
283
|
"metadata": {},
|
286
284
|
"outputs": [
|
287
285
|
{
|
@@ -317,9 +315,7 @@
|
|
317
315
|
"continued_job_data = {\n",
|
318
316
|
" \"name\": JobNames.CROW,\n",
|
319
317
|
" \"query\": \"From the previous answer, specifically,how many species of crows are there?\",\n",
|
320
|
-
" \"runtime_config\": {\n",
|
321
|
-
" \"continued_job_id\": job_id\n",
|
322
|
-
" }\n",
|
318
|
+
" \"runtime_config\": {\"continued_job_id\": job_id},\n",
|
323
319
|
"}\n",
|
324
320
|
"\n",
|
325
321
|
"continued_job_id = client.create_job(continued_job_data)\n",
|
@@ -345,8 +341,7 @@
|
|
345
341
|
"mimetype": "text/x-python",
|
346
342
|
"name": "python",
|
347
343
|
"nbconvert_exporter": "python",
|
348
|
-
"pygments_lexer": "ipython3"
|
349
|
-
"version": "3.12.9"
|
344
|
+
"pygments_lexer": "ipython3"
|
350
345
|
}
|
351
346
|
},
|
352
347
|
"nbformat": 4,
|
@@ -4,7 +4,7 @@ jupyter:
|
|
4
4
|
text_representation:
|
5
5
|
extension: .md
|
6
6
|
format_name: markdown
|
7
|
-
format_version:
|
7
|
+
format_version: "1.3"
|
8
8
|
jupytext_version: 1.16.7
|
9
9
|
kernelspec:
|
10
10
|
display_name: .venv
|
@@ -42,7 +42,6 @@ client = CrowClient(
|
|
42
42
|
|
43
43
|
## Submit a job
|
44
44
|
|
45
|
-
|
46
45
|
Submitting jobs is done by calling the `create_job` method, which receives a `JobRequest` object.
|
47
46
|
|
48
47
|
```python
|
{futurehouse_client-0.0.1 → futurehouse_client-0.0.3}/docs/crow_client_notebook_w_deployment.ipynb
RENAMED
@@ -2,7 +2,7 @@
|
|
2
2
|
"cells": [
|
3
3
|
{
|
4
4
|
"cell_type": "code",
|
5
|
-
"execution_count":
|
5
|
+
"execution_count": null,
|
6
6
|
"metadata": {},
|
7
7
|
"outputs": [],
|
8
8
|
"source": [
|
@@ -12,9 +12,9 @@
|
|
12
12
|
"from futurehouse_client import CrowClient, JobNames\n",
|
13
13
|
"from futurehouse_client.models import (\n",
|
14
14
|
" AuthType,\n",
|
15
|
-
" CrowDeploymentConfig,\n",
|
16
15
|
" DockerContainerConfiguration,\n",
|
17
16
|
" FramePath,\n",
|
17
|
+
" JobDeploymentConfig,\n",
|
18
18
|
" JobRequest,\n",
|
19
19
|
" RuntimeConfig,\n",
|
20
20
|
" Stage,\n",
|
@@ -24,7 +24,7 @@
|
|
24
24
|
},
|
25
25
|
{
|
26
26
|
"cell_type": "code",
|
27
|
-
"execution_count":
|
27
|
+
"execution_count": null,
|
28
28
|
"metadata": {},
|
29
29
|
"outputs": [],
|
30
30
|
"source": [
|
@@ -46,13 +46,13 @@
|
|
46
46
|
"job_data = JobRequest(\n",
|
47
47
|
" name=JobNames.from_string(\"crow\"),\n",
|
48
48
|
" query=\"How many species of insects are there in the world?\",\n",
|
49
|
-
" n_replicates=2
|
49
|
+
" n_replicates=2,\n",
|
50
50
|
")\n",
|
51
51
|
"\n",
|
52
52
|
"job_data = {\n",
|
53
53
|
" \"name\": JobNames.from_string(\"crow\"),\n",
|
54
54
|
" \"query\": \"What does the fox say?\",\n",
|
55
|
-
" \"n_replicates\": 2
|
55
|
+
" \"n_replicates\": 2,\n",
|
56
56
|
"}\n",
|
57
57
|
"\n",
|
58
58
|
"\n",
|
@@ -65,7 +65,7 @@
|
|
65
65
|
},
|
66
66
|
{
|
67
67
|
"cell_type": "code",
|
68
|
-
"execution_count":
|
68
|
+
"execution_count": null,
|
69
69
|
"metadata": {},
|
70
70
|
"outputs": [
|
71
71
|
{
|
@@ -121,13 +121,14 @@
|
|
121
121
|
" 'mayr1946thenumberof pages 7-7'}"
|
122
122
|
]
|
123
123
|
},
|
124
|
-
"execution_count":
|
124
|
+
"execution_count": null,
|
125
125
|
"metadata": {},
|
126
126
|
"output_type": "execute_result"
|
127
127
|
}
|
128
128
|
],
|
129
129
|
"source": [
|
130
130
|
"from paperqa.utils import get_citenames\n",
|
131
|
+
"\n",
|
131
132
|
"bug_id = \"d398d963-1e4c-4191-b9f2-c8df55376cb0\"\n",
|
132
133
|
"job = client.get_job(bug_id)\n",
|
133
134
|
"\n",
|
@@ -176,7 +177,7 @@
|
|
176
177
|
" FramePath(path=\"state.single_pdb\", type=\"pdb\"),\n",
|
177
178
|
"]\n",
|
178
179
|
"\n",
|
179
|
-
"crow =
|
180
|
+
"crow = JobDeploymentConfig(\n",
|
180
181
|
" path=Path(\"../../envs/dummy_env\"),\n",
|
181
182
|
" environment=\"dummy_env.env.DummyEnv\",\n",
|
182
183
|
" requires_aviary_internal=False,\n",
|
@@ -217,8 +218,7 @@
|
|
217
218
|
"mimetype": "text/x-python",
|
218
219
|
"name": "python",
|
219
220
|
"nbconvert_exporter": "python",
|
220
|
-
"pygments_lexer": "ipython3"
|
221
|
-
"version": "3.12.9"
|
221
|
+
"pygments_lexer": "ipython3"
|
222
222
|
}
|
223
223
|
},
|
224
224
|
"nbformat": 4,
|
@@ -0,0 +1,12 @@
|
|
1
|
+
from .clients.job_client import JobClient, JobNames
|
2
|
+
from .clients.rest_client import PQATaskResponse, TaskResponse, TaskResponseVerbose
|
3
|
+
from .clients.rest_client import RestClient as FutureHouseClient
|
4
|
+
|
5
|
+
__all__ = [
|
6
|
+
"FutureHouseClient",
|
7
|
+
"JobClient",
|
8
|
+
"JobNames",
|
9
|
+
"PQATaskResponse",
|
10
|
+
"TaskResponse",
|
11
|
+
"TaskResponseVerbose",
|
12
|
+
]
|
@@ -0,0 +1,12 @@
|
|
1
|
+
from .job_client import JobClient, JobNames
|
2
|
+
from .rest_client import PQATaskResponse, TaskResponse, TaskResponseVerbose
|
3
|
+
from .rest_client import RestClient as FutureHouseClient
|
4
|
+
|
5
|
+
__all__ = [
|
6
|
+
"FutureHouseClient",
|
7
|
+
"JobClient",
|
8
|
+
"JobNames",
|
9
|
+
"PQATaskResponse",
|
10
|
+
"TaskResponse",
|
11
|
+
"TaskResponseVerbose",
|
12
|
+
]
|