raccoonai 0.1.0a6__py3-none-any.whl → 0.1.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of raccoonai might be problematic. Click here for more details.
- raccoonai/_base_client.py +7 -1
- raccoonai/_client.py +2 -2
- raccoonai/_version.py +1 -1
- raccoonai/resources/fleet.py +3 -12
- raccoonai/resources/lam.py +73 -422
- raccoonai/types/__init__.py +0 -2
- raccoonai/types/fleet_create_params.py +26 -12
- raccoonai/types/fleet_create_response.py +3 -5
- raccoonai/types/fleet_status_response.py +3 -5
- raccoonai/types/fleet_terminate_response.py +3 -5
- raccoonai/types/lam_integration_run_params.py +24 -3
- raccoonai/types/lam_integration_run_response.py +3 -3
- raccoonai/types/lam_run_params.py +36 -3
- raccoonai/types/lam_run_response.py +9 -1
- {raccoonai-0.1.0a6.dist-info → raccoonai-0.1.0a7.dist-info}/METADATA +16 -16
- {raccoonai-0.1.0a6.dist-info → raccoonai-0.1.0a7.dist-info}/RECORD +18 -20
- raccoonai/types/lam_extract_params.py +0 -68
- raccoonai/types/lam_extract_response.py +0 -31
- {raccoonai-0.1.0a6.dist-info → raccoonai-0.1.0a7.dist-info}/WHEEL +0 -0
- {raccoonai-0.1.0a6.dist-info → raccoonai-0.1.0a7.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from typing import Dict, List, Optional
|
|
6
|
-
from typing_extensions import TypedDict
|
|
5
|
+
from typing import Dict, List, Iterable, Optional
|
|
6
|
+
from typing_extensions import Literal, TypedDict
|
|
7
7
|
|
|
8
|
-
__all__ = ["FleetCreateParams", "Advanced", "Settings"]
|
|
8
|
+
__all__ = ["FleetCreateParams", "Advanced", "AdvancedProxy", "Settings"]
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class FleetCreateParams(TypedDict, total=False):
|
|
@@ -15,13 +15,7 @@ class FleetCreateParams(TypedDict, total=False):
|
|
|
15
15
|
solving.
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
"""
|
|
20
|
-
The name of the app for which the session is going to run for, used for
|
|
21
|
-
streamlining authentication.
|
|
22
|
-
"""
|
|
23
|
-
|
|
24
|
-
browser_type: Optional[str]
|
|
18
|
+
browser_type: Optional[Literal["chromium", "firefox", "webkit"]]
|
|
25
19
|
"""The type of browser to use.
|
|
26
20
|
|
|
27
21
|
Supported values include 'chromium', 'firefox', and 'webkit'.
|
|
@@ -49,12 +43,32 @@ class FleetCreateParams(TypedDict, total=False):
|
|
|
49
43
|
"""The entrypoint url for the session."""
|
|
50
44
|
|
|
51
45
|
|
|
46
|
+
class AdvancedProxy(TypedDict, total=False):
|
|
47
|
+
city: Optional[str]
|
|
48
|
+
"""Target city."""
|
|
49
|
+
|
|
50
|
+
country: Optional[str]
|
|
51
|
+
"""Target country (2-letter ISO code)."""
|
|
52
|
+
|
|
53
|
+
enable: bool
|
|
54
|
+
"""Whether to use a proxy for the browser session."""
|
|
55
|
+
|
|
56
|
+
state: Optional[str]
|
|
57
|
+
"""Target state (2-letter code)."""
|
|
58
|
+
|
|
59
|
+
zip: Optional[int]
|
|
60
|
+
"""Target postal code."""
|
|
61
|
+
|
|
62
|
+
|
|
52
63
|
class Advanced(TypedDict, total=False):
|
|
53
64
|
block_ads: Optional[bool]
|
|
54
65
|
"""Whether to block advertisements during the browser session."""
|
|
55
66
|
|
|
56
|
-
|
|
57
|
-
"""
|
|
67
|
+
extension_ids: Optional[Iterable[object]]
|
|
68
|
+
"""list of extension ids"""
|
|
69
|
+
|
|
70
|
+
proxy: Optional[AdvancedProxy]
|
|
71
|
+
"""Proxy details for the browser session."""
|
|
58
72
|
|
|
59
73
|
solve_captchas: Optional[bool]
|
|
60
74
|
"""Whether to attempt automatic CAPTCHA solving."""
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
|
+
from typing_extensions import Literal
|
|
3
4
|
|
|
4
5
|
from .._models import BaseModel
|
|
5
6
|
|
|
@@ -13,11 +14,8 @@ class FleetCreateResponse(BaseModel):
|
|
|
13
14
|
session_id: str
|
|
14
15
|
"""A unique identifier for the created session."""
|
|
15
16
|
|
|
16
|
-
status:
|
|
17
|
-
"""The current status of the session.
|
|
18
|
-
|
|
19
|
-
Possible values include 'running', 'unknown', or 'terminated'.
|
|
20
|
-
"""
|
|
17
|
+
status: Literal["starting", "running", "terminated", "completed", "unknown"]
|
|
18
|
+
"""The current status of the session."""
|
|
21
19
|
|
|
22
20
|
websocket_url: str
|
|
23
21
|
"""The WebSocket URL for interacting with the session."""
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
|
+
from typing_extensions import Literal
|
|
3
4
|
|
|
4
5
|
from .._models import BaseModel
|
|
5
6
|
|
|
@@ -10,8 +11,5 @@ class FleetStatusResponse(BaseModel):
|
|
|
10
11
|
session_id: str
|
|
11
12
|
"""A unique identifier for the session."""
|
|
12
13
|
|
|
13
|
-
status:
|
|
14
|
-
"""The current status of the session.
|
|
15
|
-
|
|
16
|
-
Possible values include 'running', 'unknown', or 'terminated'.
|
|
17
|
-
"""
|
|
14
|
+
status: Literal["starting", "running", "terminated", "completed", "unknown"]
|
|
15
|
+
"""The current status of the session."""
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
|
+
from typing_extensions import Literal
|
|
3
4
|
|
|
4
5
|
from .._models import BaseModel
|
|
5
6
|
|
|
@@ -10,8 +11,5 @@ class FleetTerminateResponse(BaseModel):
|
|
|
10
11
|
session_id: str
|
|
11
12
|
"""A unique identifier for the session."""
|
|
12
13
|
|
|
13
|
-
status:
|
|
14
|
-
"""The current status of the session.
|
|
15
|
-
|
|
16
|
-
Possible values include 'running', 'unknown', or 'terminated'.
|
|
17
|
-
"""
|
|
14
|
+
status: Literal["starting", "running", "terminated", "completed", "unknown"]
|
|
15
|
+
"""The current status of the session."""
|
|
@@ -2,12 +2,13 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from typing import Union, Optional
|
|
5
|
+
from typing import Union, Iterable, Optional
|
|
6
6
|
from typing_extensions import Literal, Required, TypedDict
|
|
7
7
|
|
|
8
8
|
__all__ = [
|
|
9
9
|
"LamIntegrationRunParamsBase",
|
|
10
10
|
"Advanced",
|
|
11
|
+
"AdvancedProxy",
|
|
11
12
|
"LamIntegrationRunParamsNonStreaming",
|
|
12
13
|
"LamIntegrationRunParamsStreaming",
|
|
13
14
|
]
|
|
@@ -33,12 +34,32 @@ class LamIntegrationRunParamsBase(TypedDict, total=False):
|
|
|
33
34
|
"""Additional properties or data related to the particular integration."""
|
|
34
35
|
|
|
35
36
|
|
|
37
|
+
class AdvancedProxy(TypedDict, total=False):
|
|
38
|
+
city: Optional[str]
|
|
39
|
+
"""Target city."""
|
|
40
|
+
|
|
41
|
+
country: Optional[str]
|
|
42
|
+
"""Target country (2-letter ISO code)."""
|
|
43
|
+
|
|
44
|
+
enable: bool
|
|
45
|
+
"""Whether to use a proxy for the browser session."""
|
|
46
|
+
|
|
47
|
+
state: Optional[str]
|
|
48
|
+
"""Target state (2-letter code)."""
|
|
49
|
+
|
|
50
|
+
zip: Optional[int]
|
|
51
|
+
"""Target postal code."""
|
|
52
|
+
|
|
53
|
+
|
|
36
54
|
class Advanced(TypedDict, total=False):
|
|
37
55
|
block_ads: Optional[bool]
|
|
38
56
|
"""Whether to block advertisements during the browser session."""
|
|
39
57
|
|
|
40
|
-
|
|
41
|
-
"""
|
|
58
|
+
extension_ids: Optional[Iterable[object]]
|
|
59
|
+
"""list of extension ids"""
|
|
60
|
+
|
|
61
|
+
proxy: Optional[AdvancedProxy]
|
|
62
|
+
"""Proxy details for the browser session."""
|
|
42
63
|
|
|
43
64
|
solve_captchas: Optional[bool]
|
|
44
65
|
"""Whether to attempt automatic CAPTCHA solving."""
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
3
|
from typing import List, Union
|
|
4
|
-
from typing_extensions import TypeAlias
|
|
4
|
+
from typing_extensions import Literal, TypeAlias
|
|
5
5
|
|
|
6
6
|
from .._models import BaseModel
|
|
7
7
|
|
|
@@ -21,7 +21,7 @@ class UnionMember0(BaseModel):
|
|
|
21
21
|
properties: object
|
|
22
22
|
"""Additional metadata or details related to the integration task."""
|
|
23
23
|
|
|
24
|
-
task_status:
|
|
24
|
+
task_status: Literal["STARTING", "PROCESSING", "DONE", "HUMAN_INTERACTION", "FAILURE"]
|
|
25
25
|
"""The current status of the extraction task.
|
|
26
26
|
|
|
27
27
|
For example: 'STARTING', 'PROCESSING', 'DONE', 'HUMAN_INTERACTION', or
|
|
@@ -42,7 +42,7 @@ class IntegrationResponse(BaseModel):
|
|
|
42
42
|
properties: object
|
|
43
43
|
"""Additional metadata or details related to the integration task."""
|
|
44
44
|
|
|
45
|
-
task_status:
|
|
45
|
+
task_status: Literal["STARTING", "PROCESSING", "DONE", "HUMAN_INTERACTION", "FAILURE"]
|
|
46
46
|
"""The current status of the extraction task.
|
|
47
47
|
|
|
48
48
|
For example: 'STARTING', 'PROCESSING', 'DONE', 'HUMAN_INTERACTION', or
|
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
from typing import Union, Iterable, Optional
|
|
6
6
|
from typing_extensions import Literal, Required, TypedDict
|
|
7
7
|
|
|
8
|
-
__all__ = ["LamRunParamsBase", "Advanced", "LamRunParamsNonStreaming", "LamRunParamsStreaming"]
|
|
8
|
+
__all__ = ["LamRunParamsBase", "Advanced", "AdvancedProxy", "LamRunParamsNonStreaming", "LamRunParamsStreaming"]
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class LamRunParamsBase(TypedDict, total=False):
|
|
@@ -33,13 +33,46 @@ class LamRunParamsBase(TypedDict, total=False):
|
|
|
33
33
|
while building a chat app to give the model context of the past conversation.
|
|
34
34
|
"""
|
|
35
35
|
|
|
36
|
+
max_count: Optional[int]
|
|
37
|
+
"""The maximum number of results to extract."""
|
|
38
|
+
|
|
39
|
+
mode: Optional[Literal["deepsearch", "default"]]
|
|
40
|
+
"""Mode of execution."""
|
|
41
|
+
|
|
42
|
+
schema: object
|
|
43
|
+
"""The expected schema for the response.
|
|
44
|
+
|
|
45
|
+
This is a dictionary where the keys describe the fields and the values describe
|
|
46
|
+
their purposes.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class AdvancedProxy(TypedDict, total=False):
|
|
51
|
+
city: Optional[str]
|
|
52
|
+
"""Target city."""
|
|
53
|
+
|
|
54
|
+
country: Optional[str]
|
|
55
|
+
"""Target country (2-letter ISO code)."""
|
|
56
|
+
|
|
57
|
+
enable: bool
|
|
58
|
+
"""Whether to use a proxy for the browser session."""
|
|
59
|
+
|
|
60
|
+
state: Optional[str]
|
|
61
|
+
"""Target state (2-letter code)."""
|
|
62
|
+
|
|
63
|
+
zip: Optional[int]
|
|
64
|
+
"""Target postal code."""
|
|
65
|
+
|
|
36
66
|
|
|
37
67
|
class Advanced(TypedDict, total=False):
|
|
38
68
|
block_ads: Optional[bool]
|
|
39
69
|
"""Whether to block advertisements during the browser session."""
|
|
40
70
|
|
|
41
|
-
|
|
42
|
-
"""
|
|
71
|
+
extension_ids: Optional[Iterable[object]]
|
|
72
|
+
"""list of extension ids"""
|
|
73
|
+
|
|
74
|
+
proxy: Optional[AdvancedProxy]
|
|
75
|
+
"""Proxy details for the browser session."""
|
|
43
76
|
|
|
44
77
|
solve_captchas: Optional[bool]
|
|
45
78
|
"""Whether to attempt automatic CAPTCHA solving."""
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
|
+
from typing import List
|
|
4
|
+
from typing_extensions import Literal
|
|
3
5
|
|
|
4
6
|
from .._models import BaseModel
|
|
5
7
|
|
|
@@ -7,6 +9,12 @@ __all__ = ["LamRunResponse"]
|
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
class LamRunResponse(BaseModel):
|
|
12
|
+
data: List[object]
|
|
13
|
+
"""The extracted data as a list of objects when the status is DONE.
|
|
14
|
+
|
|
15
|
+
Each object represents an extracted entity.
|
|
16
|
+
"""
|
|
17
|
+
|
|
10
18
|
livestream_url: str
|
|
11
19
|
"""The Livestream URL"""
|
|
12
20
|
|
|
@@ -16,7 +24,7 @@ class LamRunResponse(BaseModel):
|
|
|
16
24
|
properties: object
|
|
17
25
|
"""Additional metadata or details related to the run task."""
|
|
18
26
|
|
|
19
|
-
task_status:
|
|
27
|
+
task_status: Literal["STARTING", "PROCESSING", "DONE", "HUMAN_INTERACTION", "FAILURE"]
|
|
20
28
|
"""The current status of the extraction task.
|
|
21
29
|
|
|
22
30
|
For example: 'STARTING', 'PROCESSING', 'DONE', 'HUMAN_INTERACTION', or
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: raccoonai
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.0a7
|
|
4
4
|
Summary: The official Python library for the raccoonAI API
|
|
5
5
|
Project-URL: Homepage, https://github.com/raccoonaihq/raccoonai-python
|
|
6
6
|
Project-URL: Repository, https://github.com/raccoonaihq/raccoonai-python
|
|
@@ -38,7 +38,7 @@ The Raccoon AI Python library provides convenient access to the Raccoon AI REST
|
|
|
38
38
|
application. The library includes type definitions for all request params and response fields,
|
|
39
39
|
and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx).
|
|
40
40
|
|
|
41
|
-
It is generated with [Stainless](https://www.
|
|
41
|
+
It is generated with [Stainless](https://www.stainless.com/).
|
|
42
42
|
|
|
43
43
|
## Documentation
|
|
44
44
|
|
|
@@ -66,10 +66,10 @@ client = RaccoonAI(
|
|
|
66
66
|
)
|
|
67
67
|
|
|
68
68
|
response = client.lam.run(
|
|
69
|
-
query="Find
|
|
69
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
70
70
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
71
71
|
)
|
|
72
|
-
print(response.
|
|
72
|
+
print(response.data)
|
|
73
73
|
```
|
|
74
74
|
|
|
75
75
|
While you can provide a `secret_key` keyword argument,
|
|
@@ -95,10 +95,10 @@ client = AsyncRaccoonAI(
|
|
|
95
95
|
|
|
96
96
|
async def main() -> None:
|
|
97
97
|
response = await client.lam.run(
|
|
98
|
-
query="Find
|
|
98
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
99
99
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
100
100
|
)
|
|
101
|
-
print(response.
|
|
101
|
+
print(response.data)
|
|
102
102
|
|
|
103
103
|
|
|
104
104
|
asyncio.run(main())
|
|
@@ -116,12 +116,12 @@ from raccoonai import RaccoonAI
|
|
|
116
116
|
client = RaccoonAI()
|
|
117
117
|
|
|
118
118
|
stream = client.lam.run(
|
|
119
|
-
query="Find
|
|
119
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
120
120
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
121
121
|
stream=True,
|
|
122
122
|
)
|
|
123
123
|
for response in stream:
|
|
124
|
-
print(response.
|
|
124
|
+
print(response.data)
|
|
125
125
|
```
|
|
126
126
|
|
|
127
127
|
The async client uses the exact same interface.
|
|
@@ -132,12 +132,12 @@ from raccoonai import AsyncRaccoonAI
|
|
|
132
132
|
client = AsyncRaccoonAI()
|
|
133
133
|
|
|
134
134
|
stream = await client.lam.run(
|
|
135
|
-
query="Find
|
|
135
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
136
136
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
137
137
|
stream=True,
|
|
138
138
|
)
|
|
139
139
|
async for response in stream:
|
|
140
|
-
print(response.
|
|
140
|
+
print(response.data)
|
|
141
141
|
```
|
|
142
142
|
|
|
143
143
|
## Using types
|
|
@@ -166,7 +166,7 @@ client = RaccoonAI()
|
|
|
166
166
|
|
|
167
167
|
try:
|
|
168
168
|
client.lam.run(
|
|
169
|
-
query="Find
|
|
169
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
170
170
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
171
171
|
)
|
|
172
172
|
except raccoonai.APIConnectionError as e:
|
|
@@ -212,7 +212,7 @@ client = RaccoonAI(
|
|
|
212
212
|
|
|
213
213
|
# Or, configure per-request:
|
|
214
214
|
client.with_options(max_retries=5).lam.run(
|
|
215
|
-
query="Find
|
|
215
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
216
216
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
217
217
|
)
|
|
218
218
|
```
|
|
@@ -238,7 +238,7 @@ client = RaccoonAI(
|
|
|
238
238
|
|
|
239
239
|
# Override per-request:
|
|
240
240
|
client.with_options(timeout=5.0).lam.run(
|
|
241
|
-
query="Find
|
|
241
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
242
242
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
243
243
|
)
|
|
244
244
|
```
|
|
@@ -282,13 +282,13 @@ from raccoonai import RaccoonAI
|
|
|
282
282
|
|
|
283
283
|
client = RaccoonAI()
|
|
284
284
|
response = client.lam.with_raw_response.run(
|
|
285
|
-
query="Find
|
|
285
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
286
286
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
287
287
|
)
|
|
288
288
|
print(response.headers.get('X-My-Header'))
|
|
289
289
|
|
|
290
290
|
lam = response.parse() # get the object that `lam.run()` would have returned
|
|
291
|
-
print(lam.
|
|
291
|
+
print(lam.data)
|
|
292
292
|
```
|
|
293
293
|
|
|
294
294
|
These methods return an [`APIResponse`](https://github.com/raccoonaihq/raccoonai-python/tree/main/src/raccoonai/_response.py) object.
|
|
@@ -303,7 +303,7 @@ To stream the response body, use `.with_streaming_response` instead, which requi
|
|
|
303
303
|
|
|
304
304
|
```python
|
|
305
305
|
with client.lam.with_streaming_response.run(
|
|
306
|
-
query="Find
|
|
306
|
+
query="Find YCombinator startups who got funded in W24.",
|
|
307
307
|
raccoon_passcode="<end-user-raccoon-passcode>",
|
|
308
308
|
) as response:
|
|
309
309
|
print(response.headers.get("X-My-Header"))
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
raccoonai/__init__.py,sha256=ut5jCAExi9IzT9pzrjpsJoVhn6bzWI6jIXNXB-fXBY4,2523
|
|
2
|
-
raccoonai/_base_client.py,sha256=
|
|
3
|
-
raccoonai/_client.py,sha256=
|
|
2
|
+
raccoonai/_base_client.py,sha256=u8s3PM-sDfScKytBJRlCxNiVuOz_Dbhwm-At4yS3DoQ,68824
|
|
3
|
+
raccoonai/_client.py,sha256=0uI6X9zJX0m0Ld5hBzTPkRIBoFv2l3cgAh8S6gcHgSw,18402
|
|
4
4
|
raccoonai/_compat.py,sha256=VWemUKbj6DDkQ-O4baSpHVLJafotzeXmCQGJugfVTIw,6580
|
|
5
5
|
raccoonai/_constants.py,sha256=FkmVVcfVS3gR69v_fTrqA_qjakyxJHOWJcw3jpEck8Y,465
|
|
6
6
|
raccoonai/_exceptions.py,sha256=Y-DcD2M8xkSw8IEkk4KHj73O8GQxCtWm4HWYQ02j7z8,3226
|
|
@@ -11,7 +11,7 @@ raccoonai/_resource.py,sha256=zfxyYCvzutc1dvCP-j9UPc1sn9U8F-X9gGyqleOvCxY,1118
|
|
|
11
11
|
raccoonai/_response.py,sha256=q3bfYfS84vvIRPz_wL8djh6ir9UHGDzzF2l3gKDOWX8,28807
|
|
12
12
|
raccoonai/_streaming.py,sha256=zHnkREZO5v33YJ7P0YZ7KhJET4ZzevGw1JzRY2-Mls4,10112
|
|
13
13
|
raccoonai/_types.py,sha256=sN2zE-vBl9KBlBKL8fkN2DNZnItdjDl-3fTpP9cg69w,6146
|
|
14
|
-
raccoonai/_version.py,sha256=
|
|
14
|
+
raccoonai/_version.py,sha256=nLhygAgY4C0ZzwzE5XNmow6kK-dxMqylKS4tVi0zWZg,169
|
|
15
15
|
raccoonai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
raccoonai/_utils/__init__.py,sha256=PNZ_QJuzZEgyYXqkO1HVhGkj5IU9bglVUcw7H-Knjzw,2062
|
|
17
17
|
raccoonai/_utils/_logs.py,sha256=Af3FKkE-LAPzYTl8bnFD4yPvPBIO-QyCra-r9_dSmOM,784
|
|
@@ -24,21 +24,19 @@ raccoonai/_utils/_typing.py,sha256=nTJz0jcrQbEgxwy4TtAkNxuU0QHHlmc6mQtA6vIR8tg,4
|
|
|
24
24
|
raccoonai/_utils/_utils.py,sha256=8UmbPOy_AAr2uUjjFui-VZSrVBHRj6bfNEKRp5YZP2A,12004
|
|
25
25
|
raccoonai/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
|
|
26
26
|
raccoonai/resources/__init__.py,sha256=kSwRlkcSl9bYen9uCJOA5g0Fn0Z0TmVl_5oxvGjoTwU,950
|
|
27
|
-
raccoonai/resources/fleet.py,sha256=
|
|
28
|
-
raccoonai/resources/lam.py,sha256=
|
|
29
|
-
raccoonai/types/__init__.py,sha256=
|
|
30
|
-
raccoonai/types/fleet_create_params.py,sha256=
|
|
31
|
-
raccoonai/types/fleet_create_response.py,sha256=
|
|
27
|
+
raccoonai/resources/fleet.py,sha256=rkYwAS5SCmnmQ12Ena6j3kAKbWl-K85_EKfr5ABKh7E,18207
|
|
28
|
+
raccoonai/resources/lam.py,sha256=9p_on4ukYau7SX7ato6xq_2LZDsxo8K4RJn5dtv4JOc,36494
|
|
29
|
+
raccoonai/types/__init__.py,sha256=kvaRJdPzzx8pLMJ6F_PQtsd59cjSoYxBZTQU8LibUVQ,818
|
|
30
|
+
raccoonai/types/fleet_create_params.py,sha256=mqUxuv01Y3uSI-DM5EMzLIEzSu-rQBBRkcpRxXvc3nE,2453
|
|
31
|
+
raccoonai/types/fleet_create_response.py,sha256=eJ6GPZY7hqQ2XktCgXq4ui-pKWxhsVz1otFsos_l-b4,575
|
|
32
32
|
raccoonai/types/fleet_logs_response.py,sha256=Lzamw3eqPNXzArU0ujxnXav2AwmIM8OZd7McMzheVNA,360
|
|
33
|
-
raccoonai/types/fleet_status_response.py,sha256=
|
|
34
|
-
raccoonai/types/fleet_terminate_response.py,sha256=
|
|
35
|
-
raccoonai/types/
|
|
36
|
-
raccoonai/types/
|
|
37
|
-
raccoonai/types/
|
|
38
|
-
raccoonai/types/
|
|
39
|
-
raccoonai/
|
|
40
|
-
raccoonai/
|
|
41
|
-
raccoonai-0.1.
|
|
42
|
-
raccoonai-0.1.
|
|
43
|
-
raccoonai-0.1.0a6.dist-info/licenses/LICENSE,sha256=enGvZ2fGU7wGgMPWkgyWhnsFhCpxwdeG_selO_ovoTM,11340
|
|
44
|
-
raccoonai-0.1.0a6.dist-info/RECORD,,
|
|
33
|
+
raccoonai/types/fleet_status_response.py,sha256=TPbSlxg2ldPuTnH575D48AnKaMn6Aq7uBHK-264wceE,427
|
|
34
|
+
raccoonai/types/fleet_terminate_response.py,sha256=AYadrUN5RGkT6RGVohNZg3c63Hf0JIZ4W6Drgw6rh6I,433
|
|
35
|
+
raccoonai/types/lam_integration_run_params.py,sha256=YZS7sTbwUgqH5gS-UP1uDtAR9E-AlJEzzrUpqN0vgS0,2196
|
|
36
|
+
raccoonai/types/lam_integration_run_response.py,sha256=ZBsHaaml-EbFgkjiicQqmn3ZVPbwf5bm2H3gfGXUlFU,1605
|
|
37
|
+
raccoonai/types/lam_run_params.py,sha256=jwWRy1P8iV6_nsGhMUPv4BMI2IfDDsLO0W7ZmCHvH5Q,2603
|
|
38
|
+
raccoonai/types/lam_run_response.py,sha256=pOBB0xmGZou7vMG-dmhUk6v5pMyJF4dXWnNWXAHvfW0,891
|
|
39
|
+
raccoonai-0.1.0a7.dist-info/METADATA,sha256=mK16RZKgOJraoj6vvC71BMKsKa48JcGQmKkovTAMe4o,14355
|
|
40
|
+
raccoonai-0.1.0a7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
41
|
+
raccoonai-0.1.0a7.dist-info/licenses/LICENSE,sha256=enGvZ2fGU7wGgMPWkgyWhnsFhCpxwdeG_selO_ovoTM,11340
|
|
42
|
+
raccoonai-0.1.0a7.dist-info/RECORD,,
|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from typing import Union, Iterable, Optional
|
|
6
|
-
from typing_extensions import Literal, Required, TypedDict
|
|
7
|
-
|
|
8
|
-
__all__ = ["LamExtractParamsBase", "Advanced", "LamExtractParamsNonStreaming", "LamExtractParamsStreaming"]
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class LamExtractParamsBase(TypedDict, total=False):
|
|
12
|
-
query: Required[str]
|
|
13
|
-
"""The input query string for the request. This is typically the main prompt."""
|
|
14
|
-
|
|
15
|
-
raccoon_passcode: Required[str]
|
|
16
|
-
"""
|
|
17
|
-
The raccoon passcode associated with the end user on behalf of which the call is
|
|
18
|
-
being made.
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
advanced: Optional[Advanced]
|
|
22
|
-
"""
|
|
23
|
-
Advanced configuration options for the session, such as ad-blocking and CAPTCHA
|
|
24
|
-
solving.
|
|
25
|
-
"""
|
|
26
|
-
|
|
27
|
-
app_url: Optional[str]
|
|
28
|
-
"""This is the entrypoint URL for the web agent."""
|
|
29
|
-
|
|
30
|
-
chat_history: Optional[Iterable[object]]
|
|
31
|
-
"""
|
|
32
|
-
The history of the conversation as a list of messages or objects you might use
|
|
33
|
-
while building a chat app to give the model context of the past conversation.
|
|
34
|
-
"""
|
|
35
|
-
|
|
36
|
-
max_count: Optional[int]
|
|
37
|
-
"""The maximum number of results to extract."""
|
|
38
|
-
|
|
39
|
-
schema: object
|
|
40
|
-
"""The expected schema for the response.
|
|
41
|
-
|
|
42
|
-
This is a dictionary where the keys describe the fields and the values describe
|
|
43
|
-
their purposes.
|
|
44
|
-
"""
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
class Advanced(TypedDict, total=False):
|
|
48
|
-
block_ads: Optional[bool]
|
|
49
|
-
"""Whether to block advertisements during the browser session."""
|
|
50
|
-
|
|
51
|
-
proxy: Optional[bool]
|
|
52
|
-
"""Whether to use a proxy for the browser session."""
|
|
53
|
-
|
|
54
|
-
solve_captchas: Optional[bool]
|
|
55
|
-
"""Whether to attempt automatic CAPTCHA solving."""
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
class LamExtractParamsNonStreaming(LamExtractParamsBase, total=False):
|
|
59
|
-
stream: Optional[Literal[False]]
|
|
60
|
-
"""Whether the response should be streamed back or not."""
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
class LamExtractParamsStreaming(LamExtractParamsBase):
|
|
64
|
-
stream: Required[Literal[True]]
|
|
65
|
-
"""Whether the response should be streamed back or not."""
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
LamExtractParams = Union[LamExtractParamsNonStreaming, LamExtractParamsStreaming]
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
|
-
|
|
3
|
-
from typing import List
|
|
4
|
-
|
|
5
|
-
from .._models import BaseModel
|
|
6
|
-
|
|
7
|
-
__all__ = ["LamExtractResponse"]
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class LamExtractResponse(BaseModel):
|
|
11
|
-
data: List[object]
|
|
12
|
-
"""The extracted data as a list of objects when the status is DONE.
|
|
13
|
-
|
|
14
|
-
Each object represents an extracted entity.
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
livestream_url: str
|
|
18
|
-
"""The Livestream URL"""
|
|
19
|
-
|
|
20
|
-
message: str
|
|
21
|
-
"""A message providing the thought summary if the status is processing currently."""
|
|
22
|
-
|
|
23
|
-
properties: object
|
|
24
|
-
"""Additional metadata or information related to the extraction task."""
|
|
25
|
-
|
|
26
|
-
task_status: str
|
|
27
|
-
"""The current status of the extraction task.
|
|
28
|
-
|
|
29
|
-
For example: 'STARTING', 'PROCESSING', 'DONE', 'HUMAN_INTERACTION', or
|
|
30
|
-
'FAILURE'.
|
|
31
|
-
"""
|
|
File without changes
|
|
File without changes
|