simplai-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- billing/__init__.py +6 -0
- billing/api.py +55 -0
- billing/client.py +14 -0
- billing/schema.py +15 -0
- constants/__init__.py +90 -0
- core/__init__.py +53 -0
- core/agents/__init__.py +42 -0
- core/agents/execution/__init__.py +49 -0
- core/agents/execution/api.py +283 -0
- core/agents/execution/client.py +1139 -0
- core/agents/models.py +99 -0
- core/workflows/WORKFLOW_ARCHITECTURE.md +417 -0
- core/workflows/__init__.py +31 -0
- core/workflows/bulk/__init__.py +14 -0
- core/workflows/bulk/api.py +202 -0
- core/workflows/bulk/client.py +115 -0
- core/workflows/bulk/schema.py +58 -0
- core/workflows/models.py +49 -0
- core/workflows/scheduling/__init__.py +9 -0
- core/workflows/scheduling/api.py +179 -0
- core/workflows/scheduling/client.py +128 -0
- core/workflows/scheduling/schema.py +74 -0
- core/workflows/tool_execution/__init__.py +16 -0
- core/workflows/tool_execution/api.py +172 -0
- core/workflows/tool_execution/client.py +195 -0
- core/workflows/tool_execution/schema.py +40 -0
- exceptions/__init__.py +21 -0
- simplai_sdk/__init__.py +7 -0
- simplai_sdk/simplai.py +239 -0
- simplai_sdk-0.1.0.dist-info/METADATA +728 -0
- simplai_sdk-0.1.0.dist-info/RECORD +42 -0
- simplai_sdk-0.1.0.dist-info/WHEEL +5 -0
- simplai_sdk-0.1.0.dist-info/licenses/LICENSE +21 -0
- simplai_sdk-0.1.0.dist-info/top_level.txt +7 -0
- traces/__init__.py +1 -0
- traces/agents/__init__.py +55 -0
- traces/agents/api.py +350 -0
- traces/agents/client.py +697 -0
- traces/agents/models.py +249 -0
- traces/workflows/__init__.py +0 -0
- utils/__init__.py +0 -0
- utils/config.py +117 -0
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
"""Low-level HTTP API layer for bulk workflow execution."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict, List
|
|
4
|
+
import httpx
|
|
5
|
+
|
|
6
|
+
from .schema import (
|
|
7
|
+
BulkRunRequest,
|
|
8
|
+
BulkRunResponse,
|
|
9
|
+
InputFieldMapping,
|
|
10
|
+
BulkRunStatusResponse,
|
|
11
|
+
BulkRunCancelResponse,
|
|
12
|
+
)
|
|
13
|
+
from exceptions import APIException
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
from constants import WORKFLOW_BASE_URL as BASE_URL
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def trigger_bulk_run_api(
|
|
21
|
+
workflow_id: str,
|
|
22
|
+
file_link: str,
|
|
23
|
+
batch_size: int,
|
|
24
|
+
input_mapping: List[Dict[str, str]],
|
|
25
|
+
api_key: str,
|
|
26
|
+
) -> BulkRunResponse:
|
|
27
|
+
"""
|
|
28
|
+
Trigger a bulk workflow run via the REST API.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
workflow_id: The workflow ID to execute
|
|
32
|
+
file_link: URL/link to the input file
|
|
33
|
+
batch_size: Number of records to process per batch
|
|
34
|
+
input_mapping: List of dictionaries mapping app_input_field to file_input_field
|
|
35
|
+
Example: [{"app_input_field": "num_1", "file_input_field": "A"}]
|
|
36
|
+
api_key: API key for authentication
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
BulkRunResponse with execution_id and status
|
|
40
|
+
|
|
41
|
+
Raises:
|
|
42
|
+
APIException: If the API request fails
|
|
43
|
+
"""
|
|
44
|
+
url = f"{BASE_URL}/bulk/run"
|
|
45
|
+
headers = {
|
|
46
|
+
"Accept": "application/json",
|
|
47
|
+
"Content-Type": "application/json",
|
|
48
|
+
"PIM-SID": api_key,
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
# Convert input_mapping to InputFieldMapping objects
|
|
52
|
+
input_fields_mapping = [
|
|
53
|
+
InputFieldMapping(
|
|
54
|
+
app_input_field=mapping["app_input_field"],
|
|
55
|
+
file_input_field=mapping["file_input_field"]
|
|
56
|
+
)
|
|
57
|
+
for mapping in input_mapping
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
request_body = BulkRunRequest(
|
|
61
|
+
file_url=file_link,
|
|
62
|
+
tool_id=workflow_id,
|
|
63
|
+
batch_size=batch_size,
|
|
64
|
+
input_fields_mapping=input_fields_mapping,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
async with httpx.AsyncClient() as client:
|
|
68
|
+
try:
|
|
69
|
+
response = await client.post(
|
|
70
|
+
url,
|
|
71
|
+
headers=headers,
|
|
72
|
+
json=request_body.model_dump(),
|
|
73
|
+
timeout=30.0,
|
|
74
|
+
)
|
|
75
|
+
response.raise_for_status()
|
|
76
|
+
data = response.json()
|
|
77
|
+
|
|
78
|
+
return BulkRunResponse(
|
|
79
|
+
execution_id=data.get("execution_id", ""),
|
|
80
|
+
status=data.get("status", ""),
|
|
81
|
+
)
|
|
82
|
+
except httpx.HTTPStatusError as e:
|
|
83
|
+
raise APIException(
|
|
84
|
+
f"API request failed with status {e.response.status_code}: {e.response.text}",
|
|
85
|
+
status_code=e.response.status_code,
|
|
86
|
+
)
|
|
87
|
+
except httpx.RequestError as e:
|
|
88
|
+
raise APIException(f"Request failed: {str(e)}")
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
async def get_bulk_run_status_api(
|
|
92
|
+
bulk_run_id: str,
|
|
93
|
+
api_key: str,
|
|
94
|
+
) -> BulkRunStatusResponse:
|
|
95
|
+
"""
|
|
96
|
+
Get the bulk run status by bulk run ID.
|
|
97
|
+
"""
|
|
98
|
+
# NOTE: Endpoint path uses singular 'run' (not 'runs'), e.g.:
|
|
99
|
+
# /bulk/run/{execution_id}/status
|
|
100
|
+
url = f"{BASE_URL}/bulk/run/{bulk_run_id}/status"
|
|
101
|
+
headers = {
|
|
102
|
+
"Accept": "application/json",
|
|
103
|
+
"Content-Type": "application/json",
|
|
104
|
+
"PIM-SID": api_key,
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async with httpx.AsyncClient() as client:
|
|
108
|
+
try:
|
|
109
|
+
response = await client.get(
|
|
110
|
+
url,
|
|
111
|
+
headers=headers,
|
|
112
|
+
timeout=30.0,
|
|
113
|
+
)
|
|
114
|
+
response.raise_for_status()
|
|
115
|
+
|
|
116
|
+
data = response.json()
|
|
117
|
+
|
|
118
|
+
return BulkRunStatusResponse(
|
|
119
|
+
execution_id=data.get("execution_id", ""),
|
|
120
|
+
status=data.get("status", ""),
|
|
121
|
+
batch_completed=data.get("batch_completed", 0),
|
|
122
|
+
total_batches=data.get("total_batches", 0),
|
|
123
|
+
trace_id=data.get("trace_id"),
|
|
124
|
+
)
|
|
125
|
+
except httpx.HTTPStatusError as e:
|
|
126
|
+
raise APIException(
|
|
127
|
+
f"API request failed with status {e.response.status_code}: {e.response.text}",
|
|
128
|
+
status_code=e.response.status_code,
|
|
129
|
+
)
|
|
130
|
+
except httpx.RequestError as e:
|
|
131
|
+
raise APIException(f"Request failed: {str(e)}")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
async def cancel_bulk_run_api(
|
|
135
|
+
bulk_run_id: str,
|
|
136
|
+
api_key: str,
|
|
137
|
+
) -> BulkRunCancelResponse:
|
|
138
|
+
"""
|
|
139
|
+
Cancel a bulk run via the REST API.
|
|
140
|
+
"""
|
|
141
|
+
url = f"{BASE_URL}/bulk/execution/{bulk_run_id}/cancel"
|
|
142
|
+
headers = {
|
|
143
|
+
"Accept": "application/json",
|
|
144
|
+
"Content-Type": "application/json",
|
|
145
|
+
"PIM-SID": api_key,
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async with httpx.AsyncClient() as client:
|
|
149
|
+
try:
|
|
150
|
+
response = await client.put(
|
|
151
|
+
url,
|
|
152
|
+
headers=headers,
|
|
153
|
+
timeout=30.0,
|
|
154
|
+
)
|
|
155
|
+
response.raise_for_status()
|
|
156
|
+
data = response.json()
|
|
157
|
+
|
|
158
|
+
return BulkRunCancelResponse(
|
|
159
|
+
execution_id=data.get("execution_id", ""),
|
|
160
|
+
status=data.get("status", ""),
|
|
161
|
+
trace_id=data.get("trace_id"),
|
|
162
|
+
)
|
|
163
|
+
except httpx.HTTPStatusError as e:
|
|
164
|
+
raise APIException(
|
|
165
|
+
f"API request failed with status {e.response.status_code}: {e.response.text}",
|
|
166
|
+
status_code=e.response.status_code,
|
|
167
|
+
)
|
|
168
|
+
except httpx.RequestError as e:
|
|
169
|
+
raise APIException(f"Request failed: {str(e)}")
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
async def download_bulk_run_result_api(
|
|
173
|
+
bulk_run_id: str,
|
|
174
|
+
api_key: str,
|
|
175
|
+
) -> str:
|
|
176
|
+
"""
|
|
177
|
+
Download the bulk run result as raw CSV text.
|
|
178
|
+
"""
|
|
179
|
+
url = f"{BASE_URL}/bulk/run/{bulk_run_id}/download"
|
|
180
|
+
headers = {
|
|
181
|
+
"Accept": "text/csv",
|
|
182
|
+
"Content-Type": "application/json",
|
|
183
|
+
"PIM-SID": api_key,
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
async with httpx.AsyncClient() as client:
|
|
187
|
+
try:
|
|
188
|
+
response = await client.get(
|
|
189
|
+
url,
|
|
190
|
+
headers=headers,
|
|
191
|
+
timeout=60.0,
|
|
192
|
+
)
|
|
193
|
+
response.raise_for_status()
|
|
194
|
+
# The API returns raw CSV (text/csv)
|
|
195
|
+
return response.text
|
|
196
|
+
except httpx.HTTPStatusError as e:
|
|
197
|
+
raise APIException(
|
|
198
|
+
f"API request failed with status {e.response.status_code}: {e.response.text}",
|
|
199
|
+
status_code=e.response.status_code,
|
|
200
|
+
)
|
|
201
|
+
except httpx.RequestError as e:
|
|
202
|
+
raise APIException(f"Request failed: {str(e)}")
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"""Public SDK interface for bulk workflow execution."""
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
|
|
5
|
+
from .api import trigger_bulk_run_api
|
|
6
|
+
from .api import get_bulk_run_status_api
|
|
7
|
+
from .api import cancel_bulk_run_api
|
|
8
|
+
from .api import download_bulk_run_result_api
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
async def trigger_bulk_run(
|
|
12
|
+
workflow_id: str,
|
|
13
|
+
file_link: str,
|
|
14
|
+
batch_size: int,
|
|
15
|
+
input_mapping: List[Dict[str, str]],
|
|
16
|
+
api_key: str,
|
|
17
|
+
) -> Dict[str, str]:
|
|
18
|
+
"""
|
|
19
|
+
Trigger a bulk run of workflows.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
workflow_id: The workflow ID to execute
|
|
23
|
+
file_link: URL/link to the input file containing the data
|
|
24
|
+
batch_size: Number of records to process per batch
|
|
25
|
+
input_mapping: List of dictionaries mapping workflow input fields to file columns.
|
|
26
|
+
Each dictionary should have:
|
|
27
|
+
- "app_input_field": The workflow input field name
|
|
28
|
+
- "file_input_field": The file column/field name
|
|
29
|
+
Example: [
|
|
30
|
+
{"app_input_field": "num_1", "file_input_field": "A"},
|
|
31
|
+
{"app_input_field": "num_2", "file_input_field": "B"}
|
|
32
|
+
]
|
|
33
|
+
api_key: API key for authentication
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Dictionary with bulk_run_id and status:
|
|
37
|
+
{
|
|
38
|
+
"bulk_run_id": str,
|
|
39
|
+
"status": str
|
|
40
|
+
}
|
|
41
|
+
"""
|
|
42
|
+
response = await trigger_bulk_run_api(
|
|
43
|
+
workflow_id=workflow_id,
|
|
44
|
+
file_link=file_link,
|
|
45
|
+
batch_size=batch_size,
|
|
46
|
+
input_mapping=input_mapping,
|
|
47
|
+
api_key=api_key,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
return {
|
|
51
|
+
"bulk_run_id": response.execution_id,
|
|
52
|
+
"status": response.status,
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
async def get_bulk_run_status(
|
|
57
|
+
bulk_run_id: str,
|
|
58
|
+
api_key: str,
|
|
59
|
+
) -> Dict[str, str]:
|
|
60
|
+
"""
|
|
61
|
+
Get the bulk run result by bulk run ID.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
bulk_run_id: The bulk run ID to check
|
|
65
|
+
api_key: API key for authentication
|
|
66
|
+
"""
|
|
67
|
+
response = await get_bulk_run_status_api(
|
|
68
|
+
bulk_run_id=bulk_run_id,
|
|
69
|
+
api_key=api_key,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
return {
|
|
73
|
+
"bulk_run_id": response.execution_id,
|
|
74
|
+
"status": response.status,
|
|
75
|
+
"batch_completed": response.batch_completed,
|
|
76
|
+
"total_batches": response.total_batches,
|
|
77
|
+
"trace_id": response.trace_id,
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
async def cancel_bulk_run(
|
|
82
|
+
bulk_run_id: str,
|
|
83
|
+
api_key: str,
|
|
84
|
+
) -> Dict[str, str]:
|
|
85
|
+
"""
|
|
86
|
+
Cancel a bulk run by bulk run ID.
|
|
87
|
+
"""
|
|
88
|
+
response = await cancel_bulk_run_api(
|
|
89
|
+
bulk_run_id=bulk_run_id,
|
|
90
|
+
api_key=api_key,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
result: Dict[str, str] = {
|
|
94
|
+
"bulk_run_id": response.execution_id,
|
|
95
|
+
"status": response.status,
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if response.trace_id:
|
|
99
|
+
result["trace_id"] = response.trace_id
|
|
100
|
+
|
|
101
|
+
return result
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
async def download_bulk_run_result(
|
|
105
|
+
bulk_run_id: str,
|
|
106
|
+
api_key: str,
|
|
107
|
+
) -> str:
|
|
108
|
+
"""
|
|
109
|
+
Download the bulk run result as CSV text for the given bulk_run_id.
|
|
110
|
+
"""
|
|
111
|
+
return await download_bulk_run_result_api(
|
|
112
|
+
bulk_run_id=bulk_run_id,
|
|
113
|
+
api_key=api_key,
|
|
114
|
+
)
|
|
115
|
+
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Pydantic schemas for bulk workflow execution API requests and responses."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class InputFieldMapping(BaseModel):
|
|
8
|
+
"""Schema for input field mapping."""
|
|
9
|
+
app_input_field: str = Field(..., description="The workflow input field name")
|
|
10
|
+
file_input_field: str = Field(..., description="The file column/field name")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class BulkRunRequest(BaseModel):
|
|
14
|
+
"""Request schema for triggering a bulk workflow run."""
|
|
15
|
+
file_url: str = Field(..., description="URL/link to the input file")
|
|
16
|
+
tool_id: str = Field(..., description="The workflow ID")
|
|
17
|
+
batch_size: int = Field(..., description="Number of records to process per batch")
|
|
18
|
+
input_fields_mapping: List[InputFieldMapping] = Field(
|
|
19
|
+
...,
|
|
20
|
+
description="Mapping between workflow input fields and file columns"
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class BulkRunResponse(BaseModel):
|
|
25
|
+
"""Response schema from bulk run API."""
|
|
26
|
+
execution_id: str = Field(..., alias="execution_id", description="The bulk run execution ID")
|
|
27
|
+
status: str = Field(..., description="Execution status (e.g., ACCEPTED)")
|
|
28
|
+
|
|
29
|
+
class Config:
|
|
30
|
+
populate_by_name = True
|
|
31
|
+
|
|
32
|
+
class BulkRunStatusRequest(BaseModel):
|
|
33
|
+
"""Request schema for getting the bulk run status."""
|
|
34
|
+
execution_id: str = Field(..., description="The bulk run ID")
|
|
35
|
+
|
|
36
|
+
class Config:
|
|
37
|
+
populate_by_name = True
|
|
38
|
+
|
|
39
|
+
class BulkRunStatusResponse(BaseModel):
|
|
40
|
+
"""Response schema from bulk run client."""
|
|
41
|
+
execution_id: str = Field(..., alias="execution_id", description="The bulk run execution ID")
|
|
42
|
+
status: str = Field(..., description="Execution status (e.g., ACCEPTED, COMPLETED, FAILED, CANCELLED)")
|
|
43
|
+
batch_completed: int = Field(..., description="Number of completed batches")
|
|
44
|
+
total_batches: int = Field(..., description="Total number of batches")
|
|
45
|
+
trace_id: Optional[str] = Field(None, description="Trace identifier for the bulk run")
|
|
46
|
+
|
|
47
|
+
class Config:
|
|
48
|
+
populate_by_name = True
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class BulkRunCancelResponse(BaseModel):
|
|
52
|
+
"""Response schema from bulk run cancel API."""
|
|
53
|
+
execution_id: str = Field(..., alias="execution_id", description="The bulk run execution ID")
|
|
54
|
+
status: str = Field(..., description="Execution status after cancel (e.g., CANCELLED)")
|
|
55
|
+
trace_id: Optional[str] = Field(None, description="Trace identifier for the bulk run")
|
|
56
|
+
|
|
57
|
+
class Config:
|
|
58
|
+
populate_by_name = True
|
core/workflows/models.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Any, Dict
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ExecutionStatus(str, Enum):
|
|
9
|
+
"""Known execution status values returned by the Simplai workflow API."""
|
|
10
|
+
|
|
11
|
+
PENDING = "PENDING"
|
|
12
|
+
RUNNING = "RUNNING"
|
|
13
|
+
SUCCEEDED = "SUCCEEDED"
|
|
14
|
+
FAILED = "FAILED"
|
|
15
|
+
CANCELED = "CANCELED"
|
|
16
|
+
TIMEOUT = "TIMEOUT"
|
|
17
|
+
UNKNOWN = "UNKNOWN"
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def from_raw(cls, value: Any) -> "ExecutionStatus":
|
|
21
|
+
"""Convert a raw status value from the API into an ExecutionStatus."""
|
|
22
|
+
if not isinstance(value, str):
|
|
23
|
+
return cls.UNKNOWN
|
|
24
|
+
upper = value.upper()
|
|
25
|
+
for member in cls:
|
|
26
|
+
if member.value == upper:
|
|
27
|
+
return member
|
|
28
|
+
return cls.UNKNOWN
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class WorkflowResult:
|
|
33
|
+
"""Final result of a workflow execution in synchronous mode."""
|
|
34
|
+
|
|
35
|
+
execution_id: str
|
|
36
|
+
status: ExecutionStatus
|
|
37
|
+
payload: Dict[str, Any]
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def succeeded(self) -> bool:
|
|
41
|
+
"""True if the workflow completed successfully."""
|
|
42
|
+
return self.status == ExecutionStatus.SUCCEEDED
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class WorkflowExecutionError(Exception):
|
|
46
|
+
"""Raised when a workflow fails, times out, or cannot be executed."""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""Low-level HTTP API layer for scheduled workflow execution."""
|
|
2
|
+
|
|
3
|
+
from math import log
|
|
4
|
+
from typing import Any, Dict, List, Optional, Union
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from exceptions import APIException
|
|
8
|
+
|
|
9
|
+
from .schema import (
|
|
10
|
+
ScheduleRunRequest,
|
|
11
|
+
ScheduleRunResponse,
|
|
12
|
+
ScheduleRunResult,
|
|
13
|
+
ScheduleCancelRequest,
|
|
14
|
+
ScheduleCancelResponse,
|
|
15
|
+
BulkInputFieldMapping,
|
|
16
|
+
ManualInputFieldMapping,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
from constants import WORKFLOW_BASE_URL as BASE_URL
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def schedule_run_api(
|
|
24
|
+
workflow_id: str,
|
|
25
|
+
scheduled_run_type: str,
|
|
26
|
+
cron_expression: str,
|
|
27
|
+
time_zone: str,
|
|
28
|
+
input_mapping: List[Dict[str, str]],
|
|
29
|
+
api_key: str,
|
|
30
|
+
file_url: Optional[str] = None,
|
|
31
|
+
batch_size: Optional[int] = None,
|
|
32
|
+
) -> ScheduleRunResponse:
|
|
33
|
+
"""
|
|
34
|
+
Schedule a workflow run via the REST API (supports both BULK and MANUAL types).
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
workflow_id: The workflow ID to execute
|
|
38
|
+
scheduled_run_type: Type of scheduled run - 'BULK' or 'MANUAL'
|
|
39
|
+
cron_expression: Cron expression for scheduling (e.g., "47 22 11 2 1 ?")
|
|
40
|
+
time_zone: Time zone for the schedule (e.g., "Asia/Kolkata")
|
|
41
|
+
input_mapping: List of dictionaries with input field mappings.
|
|
42
|
+
For BULK: [{"app_input_field": "x", "file_input_field": "A"}]
|
|
43
|
+
For MANUAL: [{"app_input_field": "x", "value": "y"}]
|
|
44
|
+
api_key: API key for authentication
|
|
45
|
+
file_url: URL/link to the input file (required for BULK, ignored for MANUAL)
|
|
46
|
+
batch_size: Number of records to process per batch (required for BULK, optional for MANUAL)
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
ScheduleRunResponse with execution_id and status
|
|
50
|
+
|
|
51
|
+
Raises:
|
|
52
|
+
APIException: If the API request fails
|
|
53
|
+
ValueError: If required fields are missing for the scheduled_run_type
|
|
54
|
+
"""
|
|
55
|
+
# Validate required fields based on scheduled_run_type
|
|
56
|
+
scheduled_run_type_upper = scheduled_run_type.upper()
|
|
57
|
+
if scheduled_run_type_upper not in ["BULK", "MANUAL"]:
|
|
58
|
+
raise ValueError(f"scheduled_run_type must be 'BULK' or 'MANUAL', got '{scheduled_run_type}'")
|
|
59
|
+
|
|
60
|
+
if scheduled_run_type_upper == "BULK":
|
|
61
|
+
if not file_url:
|
|
62
|
+
raise ValueError("file_url is required when scheduled_run_type is 'BULK'")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# Build the request body
|
|
66
|
+
request_data: Dict[str, Any] = {
|
|
67
|
+
"tool_id": workflow_id,
|
|
68
|
+
"scheduled_run_type": scheduled_run_type_upper,
|
|
69
|
+
"cron_expression": cron_expression,
|
|
70
|
+
"time_zone": time_zone,
|
|
71
|
+
"input_fields_mapping": input_mapping,
|
|
72
|
+
"batch_size": batch_size,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
# Add BULK-specific / shared fields if applicable
|
|
76
|
+
if scheduled_run_type_upper == "BULK":
|
|
77
|
+
request_data["file_url"] = file_url
|
|
78
|
+
|
|
79
|
+
# batch_size can be used for both BULK and MANUAL if provided
|
|
80
|
+
|
|
81
|
+
print(f"request_data : {request_data}")
|
|
82
|
+
|
|
83
|
+
url = f"{BASE_URL}/schedule/run"
|
|
84
|
+
headers = {
|
|
85
|
+
"Accept": "application/json",
|
|
86
|
+
"Content-Type": "application/json",
|
|
87
|
+
"PIM-SID": api_key,
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async with httpx.AsyncClient() as client:
|
|
91
|
+
try:
|
|
92
|
+
response = await client.post(
|
|
93
|
+
url,
|
|
94
|
+
headers=headers,
|
|
95
|
+
json=request_data,
|
|
96
|
+
timeout=30.0,
|
|
97
|
+
)
|
|
98
|
+
response.raise_for_status()
|
|
99
|
+
data = response.json()
|
|
100
|
+
|
|
101
|
+
# Parse nested result object if present
|
|
102
|
+
result_obj = None
|
|
103
|
+
if "result" in data and data["result"]:
|
|
104
|
+
result_data = data["result"]
|
|
105
|
+
result_obj = ScheduleRunResult(
|
|
106
|
+
unique_id=result_data.get("unique_id", ""),
|
|
107
|
+
job_master_id=result_data.get("job_master_id", ""),
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
return ScheduleRunResponse(
|
|
111
|
+
app_id=data.get("app_id", ""),
|
|
112
|
+
status=data.get("status", ""),
|
|
113
|
+
execution_mode=data.get("execution_mode"),
|
|
114
|
+
result=result_obj,
|
|
115
|
+
trace_id=data.get("trace_id"),
|
|
116
|
+
)
|
|
117
|
+
except httpx.HTTPStatusError as e:
|
|
118
|
+
raise APIException(
|
|
119
|
+
f"API request failed with status {e.response.status_code}: {e.response.text}",
|
|
120
|
+
status_code=e.response.status_code,
|
|
121
|
+
)
|
|
122
|
+
except httpx.RequestError as e:
|
|
123
|
+
raise APIException(f"Request failed: {str(e)}")
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
async def cancel_schedule_run_api(
|
|
127
|
+
job_master_id: Union[int, str],
|
|
128
|
+
unique_id: str,
|
|
129
|
+
api_key: str,
|
|
130
|
+
) -> ScheduleCancelResponse:
|
|
131
|
+
"""
|
|
132
|
+
Cancel a scheduled run via the REST API.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
job_master_id: Job master identifier (can be int or str)
|
|
136
|
+
unique_id: Unique identifier for the scheduled run
|
|
137
|
+
api_key: API key for authentication
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
ScheduleCancelResponse with status and trace_id
|
|
141
|
+
|
|
142
|
+
Raises:
|
|
143
|
+
APIException: If the API request fails
|
|
144
|
+
"""
|
|
145
|
+
url = f"{BASE_URL}/schedule/cancel"
|
|
146
|
+
headers = {
|
|
147
|
+
"Accept": "application/json",
|
|
148
|
+
"Content-Type": "application/json",
|
|
149
|
+
"PIM-SID": api_key,
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
request_data = {
|
|
153
|
+
"job_master_id": job_master_id,
|
|
154
|
+
"unique_id": unique_id,
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async with httpx.AsyncClient() as client:
|
|
158
|
+
try:
|
|
159
|
+
response = await client.put(
|
|
160
|
+
url,
|
|
161
|
+
headers=headers,
|
|
162
|
+
json=request_data,
|
|
163
|
+
timeout=30.0,
|
|
164
|
+
)
|
|
165
|
+
response.raise_for_status()
|
|
166
|
+
data = response.json()
|
|
167
|
+
|
|
168
|
+
return ScheduleCancelResponse(
|
|
169
|
+
status=data.get("status", ""),
|
|
170
|
+
trace_id=data.get("trace_id"),
|
|
171
|
+
)
|
|
172
|
+
except httpx.HTTPStatusError as e:
|
|
173
|
+
raise APIException(
|
|
174
|
+
f"API request failed with status {e.response.status_code}: {e.response.text}",
|
|
175
|
+
status_code=e.response.status_code,
|
|
176
|
+
)
|
|
177
|
+
except httpx.RequestError as e:
|
|
178
|
+
raise APIException(f"Request failed: {str(e)}")
|
|
179
|
+
|