dbos 1.5.0a4__py3-none-any.whl → 1.5.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_admin_server.py +87 -2
- {dbos-1.5.0a4.dist-info → dbos-1.5.0a5.dist-info}/METADATA +1 -1
- {dbos-1.5.0a4.dist-info → dbos-1.5.0a5.dist-info}/RECORD +6 -6
- {dbos-1.5.0a4.dist-info → dbos-1.5.0a5.dist-info}/WHEEL +0 -0
- {dbos-1.5.0a4.dist-info → dbos-1.5.0a5.dist-info}/entry_points.txt +0 -0
- {dbos-1.5.0a4.dist-info → dbos-1.5.0a5.dist-info}/licenses/LICENSE +0 -0
dbos/_admin_server.py
CHANGED
@@ -5,7 +5,7 @@ import re
|
|
5
5
|
import threading
|
6
6
|
from functools import partial
|
7
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
|
-
from typing import TYPE_CHECKING, Any, List, Optional, TypedDict
|
8
|
+
from typing import TYPE_CHECKING, Any, List, Optional, TypedDict, Dict
|
9
9
|
|
10
10
|
from dbos._workflow_commands import garbage_collect, global_timeout
|
11
11
|
|
@@ -24,6 +24,8 @@ _deactivate_path = "/deactivate"
|
|
24
24
|
_workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
|
25
25
|
_garbage_collect_path = "/dbos-garbage-collect"
|
26
26
|
_global_timeout_path = "/dbos-global-timeout"
|
27
|
+
_queued_workflows_path = "/queues"
|
28
|
+
_workflows_path = "/workflows"
|
27
29
|
# /workflows/:workflow_id/cancel
|
28
30
|
# /workflows/:workflow_id/resume
|
29
31
|
# /workflows/:workflow_id/restart
|
@@ -104,10 +106,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
104
106
|
steps_match = re.match(
|
105
107
|
r"^/workflows/(?P<workflow_id>[^/]+)/steps$", self.path
|
106
108
|
)
|
109
|
+
workflow_match = re.match(r"^/workflows/(?P<workflow_id>[^/]+)$", self.path)
|
107
110
|
|
108
111
|
if steps_match:
|
109
112
|
workflow_id = steps_match.group("workflow_id")
|
110
113
|
self._handle_steps(workflow_id)
|
114
|
+
elif workflow_match:
|
115
|
+
workflow_id = workflow_match.group("workflow_id")
|
116
|
+
workflows = self.dbos.list_workflows(workflow_ids=[workflow_id])
|
117
|
+
if not workflows:
|
118
|
+
self.send_response(404)
|
119
|
+
self._end_headers()
|
120
|
+
return
|
121
|
+
response_body = json.dumps(workflows[0].__dict__).encode("utf-8")
|
122
|
+
self.send_response(200)
|
123
|
+
self.send_header("Content-Type", "application/json")
|
124
|
+
self.send_header("Content-Length", str(len(response_body)))
|
125
|
+
self._end_headers()
|
126
|
+
self.wfile.write(response_body)
|
111
127
|
else:
|
112
128
|
self.send_response(404)
|
113
129
|
self._end_headers()
|
@@ -126,6 +142,32 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
126
142
|
self.send_response(200)
|
127
143
|
self._end_headers()
|
128
144
|
self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
|
145
|
+
elif self.path == _workflows_path:
|
146
|
+
try:
|
147
|
+
filters = json.loads(post_data.decode("utf-8")) if post_data else {}
|
148
|
+
self._handle_workflows(filters)
|
149
|
+
except (json.JSONDecodeError, AttributeError) as e:
|
150
|
+
self.send_response(400)
|
151
|
+
self.send_header("Content-Type", "application/json")
|
152
|
+
self.end_headers()
|
153
|
+
self.wfile.write(
|
154
|
+
json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
|
155
|
+
"utf-8"
|
156
|
+
)
|
157
|
+
)
|
158
|
+
elif self.path == _queued_workflows_path:
|
159
|
+
try:
|
160
|
+
filters = json.loads(post_data.decode("utf-8")) if post_data else {}
|
161
|
+
self._handle_queued_workflows(filters)
|
162
|
+
except (json.JSONDecodeError, AttributeError) as e:
|
163
|
+
self.send_response(400)
|
164
|
+
self.send_header("Content-Type", "application/json")
|
165
|
+
self.end_headers()
|
166
|
+
self.wfile.write(
|
167
|
+
json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
|
168
|
+
"utf-8"
|
169
|
+
)
|
170
|
+
)
|
129
171
|
elif self.path == _garbage_collect_path:
|
130
172
|
inputs = json.loads(post_data.decode("utf-8"))
|
131
173
|
cutoff_epoch_timestamp_ms = inputs.get("cutoff_epoch_timestamp_ms", None)
|
@@ -144,7 +186,6 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
144
186
|
self.send_response(204)
|
145
187
|
self._end_headers()
|
146
188
|
else:
|
147
|
-
|
148
189
|
restart_match = re.match(
|
149
190
|
r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
|
150
191
|
)
|
@@ -283,6 +324,50 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
283
324
|
self._end_headers()
|
284
325
|
self.wfile.write(json_steps)
|
285
326
|
|
327
|
+
def _handle_workflows(self, filters: Dict[str, Any]) -> None:
|
328
|
+
workflows = self.dbos.list_workflows(
|
329
|
+
workflow_ids=filters.get("workflow_ids"),
|
330
|
+
name=filters.get("name"),
|
331
|
+
start_time=filters.get("start_time"),
|
332
|
+
end_time=filters.get("end_time"),
|
333
|
+
status=filters.get("status"),
|
334
|
+
app_version=filters.get("application_version"),
|
335
|
+
limit=filters.get("limit"),
|
336
|
+
offset=filters.get("offset"),
|
337
|
+
sort_desc=filters.get("sort_desc", False),
|
338
|
+
workflow_id_prefix=filters.get("workflow_id_prefix"),
|
339
|
+
)
|
340
|
+
|
341
|
+
response_body = json.dumps(
|
342
|
+
[workflow.__dict__ for workflow in workflows]
|
343
|
+
).encode("utf-8")
|
344
|
+
self.send_response(200)
|
345
|
+
self.send_header("Content-Type", "application/json")
|
346
|
+
self.send_header("Content-Length", str(len(response_body)))
|
347
|
+
self._end_headers()
|
348
|
+
self.wfile.write(response_body)
|
349
|
+
|
350
|
+
def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
|
351
|
+
workflows = self.dbos.list_queued_workflows(
|
352
|
+
queue_name=filters.get("queue_name"),
|
353
|
+
name=filters.get("name"),
|
354
|
+
start_time=filters.get("start_time"),
|
355
|
+
end_time=filters.get("end_time"),
|
356
|
+
status=filters.get("status"),
|
357
|
+
limit=filters.get("limit"),
|
358
|
+
offset=filters.get("offset"),
|
359
|
+
sort_desc=filters.get("sort_desc", False),
|
360
|
+
)
|
361
|
+
|
362
|
+
response_body = json.dumps(
|
363
|
+
[workflow.__dict__ for workflow in workflows]
|
364
|
+
).encode("utf-8")
|
365
|
+
self.send_response(200)
|
366
|
+
self.send_header("Content-Type", "application/json")
|
367
|
+
self.send_header("Content-Length", str(len(response_body)))
|
368
|
+
self._end_headers()
|
369
|
+
self.wfile.write(response_body)
|
370
|
+
|
286
371
|
|
287
372
|
# Be consistent with DBOS-TS response.
|
288
373
|
class PerfUtilization(TypedDict):
|
@@ -1,10 +1,10 @@
|
|
1
|
-
dbos-1.5.
|
2
|
-
dbos-1.5.
|
3
|
-
dbos-1.5.
|
4
|
-
dbos-1.5.
|
1
|
+
dbos-1.5.0a5.dist-info/METADATA,sha256=PmP2OiVSSpeLfQI7zia2CeglXOeQXDi2QLxWK88OlxM,13267
|
2
|
+
dbos-1.5.0a5.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-1.5.0a5.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-1.5.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
|
-
dbos/_admin_server.py,sha256=
|
7
|
+
dbos/_admin_server.py,sha256=t0GU32Z9FZEPBQSzVXDrhj0lvVjkDhH_CN6p0qScaB8,15465
|
8
8
|
dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
10
|
dbos/_client.py,sha256=cQxw1Nbh_vKZ03lONt0EmUhwXBk3B3NczZrmfXXeefY,14667
|
@@ -69,4 +69,4 @@ dbos/cli/cli.py,sha256=EemOMqNpzSU2BQhAxV_e59pBRITDLwt49HF6W3uWBZg,20775
|
|
69
69
|
dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
|
70
70
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
71
71
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
72
|
-
dbos-1.5.
|
72
|
+
dbos-1.5.0a5.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|