dbos 0.27.0a2__py3-none-any.whl → 0.27.0a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/__init__.py +2 -1
- dbos/_admin_server.py +1 -1
- dbos/_app_db.py +1 -1
- dbos/_client.py +7 -5
- dbos/_dbos.py +21 -5
- dbos/_sys_db.py +2 -14
- dbos/cli/cli.py +166 -117
- {dbos-0.27.0a2.dist-info → dbos-0.27.0a6.dist-info}/METADATA +1 -1
- {dbos-0.27.0a2.dist-info → dbos-0.27.0a6.dist-info}/RECORD +12 -12
- {dbos-0.27.0a2.dist-info → dbos-0.27.0a6.dist-info}/WHEEL +0 -0
- {dbos-0.27.0a2.dist-info → dbos-0.27.0a6.dist-info}/entry_points.txt +0 -0
- {dbos-0.27.0a2.dist-info → dbos-0.27.0a6.dist-info}/licenses/LICENSE +0 -0
dbos/__init__.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from . import _error as error
|
|
2
2
|
from ._client import DBOSClient, EnqueueOptions
|
|
3
3
|
from ._context import DBOSContextEnsure, DBOSContextSetAuth, SetWorkflowID
|
|
4
|
-
from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle
|
|
4
|
+
from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowHandleAsync
|
|
5
5
|
from ._dbos_config import ConfigFile, DBOSConfig, get_dbos_database_url, load_config
|
|
6
6
|
from ._kafka_message import KafkaMessage
|
|
7
7
|
from ._queue import Queue
|
|
@@ -20,6 +20,7 @@ __all__ = [
|
|
|
20
20
|
"KafkaMessage",
|
|
21
21
|
"SetWorkflowID",
|
|
22
22
|
"WorkflowHandle",
|
|
23
|
+
"WorkflowHandleAsync",
|
|
23
24
|
"WorkflowStatus",
|
|
24
25
|
"WorkflowStatusString",
|
|
25
26
|
"load_config",
|
dbos/_admin_server.py
CHANGED
|
@@ -209,7 +209,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
209
209
|
self._end_headers()
|
|
210
210
|
|
|
211
211
|
def _handle_steps(self, workflow_id: str) -> None:
|
|
212
|
-
steps = self.dbos.
|
|
212
|
+
steps = self.dbos.list_workflow_steps(workflow_id)
|
|
213
213
|
|
|
214
214
|
updated_steps = [
|
|
215
215
|
{
|
dbos/_app_db.py
CHANGED
|
@@ -7,7 +7,7 @@ from sqlalchemy.exc import DBAPIError
|
|
|
7
7
|
from sqlalchemy.orm import Session, sessionmaker
|
|
8
8
|
|
|
9
9
|
from . import _serialization
|
|
10
|
-
from ._dbos_config import
|
|
10
|
+
from ._dbos_config import DatabaseConfig
|
|
11
11
|
from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
|
|
12
12
|
from ._schemas.application_database import ApplicationSchema
|
|
13
13
|
from ._sys_db import StepInfo
|
dbos/_client.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import sys
|
|
3
|
-
import time
|
|
4
3
|
import uuid
|
|
5
4
|
from typing import Any, Generic, List, Optional, TypedDict, TypeVar
|
|
6
5
|
|
|
6
|
+
from sqlalchemy import URL
|
|
7
|
+
|
|
7
8
|
from dbos._app_db import ApplicationDatabase
|
|
8
9
|
|
|
9
10
|
if sys.version_info < (3, 11):
|
|
@@ -57,7 +58,7 @@ class WorkflowHandleClientPolling(Generic[R]):
|
|
|
57
58
|
return res
|
|
58
59
|
|
|
59
60
|
def get_status(self) -> WorkflowStatus:
|
|
60
|
-
status = get_workflow(self._sys_db, self.workflow_id,
|
|
61
|
+
status = get_workflow(self._sys_db, self.workflow_id, False)
|
|
61
62
|
if status is None:
|
|
62
63
|
raise DBOSNonExistentWorkflowError(self.workflow_id)
|
|
63
64
|
return status
|
|
@@ -80,7 +81,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
|
80
81
|
|
|
81
82
|
async def get_status(self) -> WorkflowStatus:
|
|
82
83
|
status = await asyncio.to_thread(
|
|
83
|
-
get_workflow, self._sys_db, self.workflow_id,
|
|
84
|
+
get_workflow, self._sys_db, self.workflow_id, False
|
|
84
85
|
)
|
|
85
86
|
if status is None:
|
|
86
87
|
raise DBOSNonExistentWorkflowError(self.workflow_id)
|
|
@@ -94,6 +95,7 @@ class DBOSClient:
|
|
|
94
95
|
db_config["sys_db_name"] = system_database
|
|
95
96
|
self._sys_db = SystemDatabase(db_config)
|
|
96
97
|
self._app_db = ApplicationDatabase(db_config)
|
|
98
|
+
self._db_url = database_url
|
|
97
99
|
|
|
98
100
|
def destroy(self) -> None:
|
|
99
101
|
self._sys_db.destroy()
|
|
@@ -159,13 +161,13 @@ class DBOSClient:
|
|
|
159
161
|
return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
|
|
160
162
|
|
|
161
163
|
def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
|
|
162
|
-
status = get_workflow(self._sys_db, workflow_id,
|
|
164
|
+
status = get_workflow(self._sys_db, workflow_id, False)
|
|
163
165
|
if status is None:
|
|
164
166
|
raise DBOSNonExistentWorkflowError(workflow_id)
|
|
165
167
|
return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
|
|
166
168
|
|
|
167
169
|
async def retrieve_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[R]:
|
|
168
|
-
status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id,
|
|
170
|
+
status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id, False)
|
|
169
171
|
if status is None:
|
|
170
172
|
raise DBOSNonExistentWorkflowError(workflow_id)
|
|
171
173
|
return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
|
dbos/_dbos.py
CHANGED
|
@@ -64,7 +64,8 @@ from ._registrations import (
|
|
|
64
64
|
)
|
|
65
65
|
from ._roles import default_required_roles, required_roles
|
|
66
66
|
from ._scheduler import ScheduledWorkflow, scheduled
|
|
67
|
-
from .
|
|
67
|
+
from ._schemas.system_database import SystemSchema
|
|
68
|
+
from ._sys_db import StepInfo, SystemDatabase, WorkflowStatus, reset_system_database
|
|
68
69
|
from ._tracer import DBOSTracer, dbos_tracer
|
|
69
70
|
|
|
70
71
|
if TYPE_CHECKING:
|
|
@@ -73,14 +74,15 @@ if TYPE_CHECKING:
|
|
|
73
74
|
from ._request import Request
|
|
74
75
|
from flask import Flask
|
|
75
76
|
|
|
77
|
+
from sqlalchemy import URL
|
|
76
78
|
from sqlalchemy.orm import Session
|
|
77
79
|
|
|
78
80
|
from ._request import Request
|
|
79
81
|
|
|
80
82
|
if sys.version_info < (3, 10):
|
|
81
|
-
from typing_extensions import ParamSpec
|
|
83
|
+
from typing_extensions import ParamSpec
|
|
82
84
|
else:
|
|
83
|
-
from typing import ParamSpec
|
|
85
|
+
from typing import ParamSpec
|
|
84
86
|
|
|
85
87
|
from ._admin_server import AdminServer
|
|
86
88
|
from ._app_db import ApplicationDatabase
|
|
@@ -109,7 +111,6 @@ from ._error import (
|
|
|
109
111
|
)
|
|
110
112
|
from ._event_loop import BackgroundEventLoop
|
|
111
113
|
from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
|
|
112
|
-
from ._sys_db import SystemDatabase
|
|
113
114
|
from ._workflow_commands import get_workflow, list_workflow_steps
|
|
114
115
|
|
|
115
116
|
# Most DBOS functions are just any callable F, so decorators / wrappers work on F
|
|
@@ -563,7 +564,22 @@ class DBOS:
|
|
|
563
564
|
assert (
|
|
564
565
|
not self._launched
|
|
565
566
|
), "The system database cannot be reset after DBOS is launched. Resetting the system database is a destructive operation that should only be used in a test environment."
|
|
566
|
-
|
|
567
|
+
|
|
568
|
+
sysdb_name = (
|
|
569
|
+
self._config["database"]["sys_db_name"]
|
|
570
|
+
if "sys_db_name" in self._config["database"]
|
|
571
|
+
and self._config["database"]["sys_db_name"]
|
|
572
|
+
else self._config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
|
|
573
|
+
)
|
|
574
|
+
postgres_db_url = URL.create(
|
|
575
|
+
"postgresql+psycopg",
|
|
576
|
+
username=self._config["database"]["username"],
|
|
577
|
+
password=self._config["database"]["password"],
|
|
578
|
+
host=self._config["database"]["hostname"],
|
|
579
|
+
port=self._config["database"]["port"],
|
|
580
|
+
database="postgres",
|
|
581
|
+
)
|
|
582
|
+
reset_system_database(postgres_db_url, sysdb_name)
|
|
567
583
|
|
|
568
584
|
def _destroy(self) -> None:
|
|
569
585
|
self._initialized = False
|
dbos/_sys_db.py
CHANGED
|
@@ -248,6 +248,7 @@ class SystemDatabase:
|
|
|
248
248
|
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
|
249
249
|
parameters={"db_name": sysdb_name},
|
|
250
250
|
).scalar():
|
|
251
|
+
dbos_logger.info(f"Creating system database {sysdb_name}")
|
|
251
252
|
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
|
252
253
|
engine.dispose()
|
|
253
254
|
|
|
@@ -1897,20 +1898,7 @@ class SystemDatabase:
|
|
|
1897
1898
|
return wf_status, workflow_deadline_epoch_ms
|
|
1898
1899
|
|
|
1899
1900
|
|
|
1900
|
-
def reset_system_database(
|
|
1901
|
-
sysdb_name = (
|
|
1902
|
-
config["database"]["sys_db_name"]
|
|
1903
|
-
if "sys_db_name" in config["database"] and config["database"]["sys_db_name"]
|
|
1904
|
-
else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
|
|
1905
|
-
)
|
|
1906
|
-
postgres_db_url = sa.URL.create(
|
|
1907
|
-
"postgresql+psycopg",
|
|
1908
|
-
username=config["database"]["username"],
|
|
1909
|
-
password=config["database"]["password"],
|
|
1910
|
-
host=config["database"]["hostname"],
|
|
1911
|
-
port=config["database"]["port"],
|
|
1912
|
-
database="postgres",
|
|
1913
|
-
)
|
|
1901
|
+
def reset_system_database(postgres_db_url: sa.URL, sysdb_name: str) -> None:
|
|
1914
1902
|
try:
|
|
1915
1903
|
# Connect to postgres default database
|
|
1916
1904
|
engine = sa.create_engine(postgres_db_url)
|
dbos/cli/cli.py
CHANGED
|
@@ -5,10 +5,10 @@ import subprocess
|
|
|
5
5
|
import time
|
|
6
6
|
import typing
|
|
7
7
|
from os import path
|
|
8
|
-
from typing import Any
|
|
8
|
+
from typing import Any, Optional
|
|
9
|
+
from urllib.parse import quote
|
|
9
10
|
|
|
10
11
|
import jsonpickle # type: ignore
|
|
11
|
-
import requests
|
|
12
12
|
import sqlalchemy as sa
|
|
13
13
|
import typer
|
|
14
14
|
from rich import print
|
|
@@ -19,18 +19,28 @@ from dbos._debug import debug_workflow, parse_start_command
|
|
|
19
19
|
|
|
20
20
|
from .. import load_config
|
|
21
21
|
from .._app_db import ApplicationDatabase
|
|
22
|
+
from .._client import DBOSClient
|
|
22
23
|
from .._dbos_config import _is_valid_app_name
|
|
23
24
|
from .._docker_pg_helper import start_docker_pg, stop_docker_pg
|
|
25
|
+
from .._schemas.system_database import SystemSchema
|
|
24
26
|
from .._sys_db import SystemDatabase, reset_system_database
|
|
25
|
-
from .._workflow_commands import (
|
|
26
|
-
get_workflow,
|
|
27
|
-
list_queued_workflows,
|
|
28
|
-
list_workflow_steps,
|
|
29
|
-
list_workflows,
|
|
30
|
-
)
|
|
31
27
|
from ..cli._github_init import create_template_from_github
|
|
32
28
|
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
33
29
|
|
|
30
|
+
|
|
31
|
+
def start_client(db_url: Optional[str] = None) -> DBOSClient:
|
|
32
|
+
database_url = db_url
|
|
33
|
+
if database_url is None:
|
|
34
|
+
database_url = os.getenv("DBOS_DATABASE_URL")
|
|
35
|
+
if database_url is None:
|
|
36
|
+
config = load_config(silent=True)
|
|
37
|
+
database = config["database"]
|
|
38
|
+
username = quote(database["username"])
|
|
39
|
+
password = quote(database["password"])
|
|
40
|
+
database_url = f"postgresql://{username}:{password}@{database['hostname']}:{database['port']}/{database['app_db_name']}"
|
|
41
|
+
return DBOSClient(database_url=database_url)
|
|
42
|
+
|
|
43
|
+
|
|
34
44
|
app = typer.Typer()
|
|
35
45
|
workflow = typer.Typer()
|
|
36
46
|
queue = typer.Typer()
|
|
@@ -241,7 +251,23 @@ def migrate() -> None:
|
|
|
241
251
|
|
|
242
252
|
@app.command(help="Reset the DBOS system database")
|
|
243
253
|
def reset(
|
|
244
|
-
yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt")
|
|
254
|
+
yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt"),
|
|
255
|
+
sys_db_name: Annotated[
|
|
256
|
+
typing.Optional[str],
|
|
257
|
+
typer.Option(
|
|
258
|
+
"--sys-db-name",
|
|
259
|
+
"-s",
|
|
260
|
+
help="Specify the name of the system database to reset",
|
|
261
|
+
),
|
|
262
|
+
] = None,
|
|
263
|
+
db_url: Annotated[
|
|
264
|
+
typing.Optional[str],
|
|
265
|
+
typer.Option(
|
|
266
|
+
"--db-url",
|
|
267
|
+
"-D",
|
|
268
|
+
help="Your DBOS application database URL",
|
|
269
|
+
),
|
|
270
|
+
] = None,
|
|
245
271
|
) -> None:
|
|
246
272
|
if not yes:
|
|
247
273
|
confirm = typer.confirm(
|
|
@@ -250,9 +276,18 @@ def reset(
|
|
|
250
276
|
if not confirm:
|
|
251
277
|
typer.echo("Operation cancelled.")
|
|
252
278
|
raise typer.Exit()
|
|
253
|
-
config = load_config()
|
|
254
279
|
try:
|
|
255
|
-
|
|
280
|
+
client = start_client(db_url=db_url)
|
|
281
|
+
pg_db_url = sa.make_url(client._db_url).set(drivername="postgresql+psycopg")
|
|
282
|
+
assert (
|
|
283
|
+
pg_db_url.database is not None
|
|
284
|
+
), f"Database name is required in URL: {pg_db_url.render_as_string(hide_password=True)}"
|
|
285
|
+
sysdb_name = (
|
|
286
|
+
sys_db_name
|
|
287
|
+
if sys_db_name
|
|
288
|
+
else (pg_db_url.database + SystemSchema.sysdb_suffix)
|
|
289
|
+
)
|
|
290
|
+
reset_system_database(pg_db_url.set(database="postgres"), sysdb_name)
|
|
256
291
|
except sa.exc.SQLAlchemyError as e:
|
|
257
292
|
typer.echo(f"Error resetting system database: {str(e)}")
|
|
258
293
|
return
|
|
@@ -276,6 +311,14 @@ def debug(
|
|
|
276
311
|
|
|
277
312
|
@workflow.command(help="List workflows for your application")
|
|
278
313
|
def list(
|
|
314
|
+
db_url: Annotated[
|
|
315
|
+
typing.Optional[str],
|
|
316
|
+
typer.Option(
|
|
317
|
+
"--db-url",
|
|
318
|
+
"-D",
|
|
319
|
+
help="Your DBOS application database URL",
|
|
320
|
+
),
|
|
321
|
+
] = None,
|
|
279
322
|
limit: Annotated[
|
|
280
323
|
int,
|
|
281
324
|
typer.Option("--limit", "-l", help="Limit the results returned"),
|
|
@@ -324,21 +367,31 @@ def list(
|
|
|
324
367
|
help="Retrieve workflows with this name",
|
|
325
368
|
),
|
|
326
369
|
] = None,
|
|
327
|
-
|
|
370
|
+
sort_desc: Annotated[
|
|
328
371
|
bool,
|
|
329
|
-
typer.Option(
|
|
372
|
+
typer.Option(
|
|
373
|
+
"--sort-desc",
|
|
374
|
+
"-d",
|
|
375
|
+
help="Sort the results in descending order (older first)",
|
|
376
|
+
),
|
|
330
377
|
] = False,
|
|
378
|
+
offset: Annotated[
|
|
379
|
+
typing.Optional[int],
|
|
380
|
+
typer.Option(
|
|
381
|
+
"--offset",
|
|
382
|
+
"-o",
|
|
383
|
+
help="Offset for pagination",
|
|
384
|
+
),
|
|
385
|
+
] = None,
|
|
331
386
|
) -> None:
|
|
332
|
-
|
|
333
|
-
sys_db = SystemDatabase(config["database"])
|
|
334
|
-
workflows = list_workflows(
|
|
335
|
-
sys_db,
|
|
387
|
+
workflows = start_client(db_url=db_url).list_workflows(
|
|
336
388
|
limit=limit,
|
|
389
|
+
offset=offset,
|
|
390
|
+
sort_desc=sort_desc,
|
|
337
391
|
user=user,
|
|
338
392
|
start_time=starttime,
|
|
339
393
|
end_time=endtime,
|
|
340
394
|
status=status,
|
|
341
|
-
request=request,
|
|
342
395
|
app_version=appversion,
|
|
343
396
|
name=name,
|
|
344
397
|
)
|
|
@@ -348,28 +401,39 @@ def list(
|
|
|
348
401
|
@workflow.command(help="Retrieve the status of a workflow")
|
|
349
402
|
def get(
|
|
350
403
|
workflow_id: Annotated[str, typer.Argument()],
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
typer.Option(
|
|
354
|
-
|
|
404
|
+
db_url: Annotated[
|
|
405
|
+
typing.Optional[str],
|
|
406
|
+
typer.Option(
|
|
407
|
+
"--db-url",
|
|
408
|
+
"-D",
|
|
409
|
+
help="Your DBOS application database URL",
|
|
410
|
+
),
|
|
411
|
+
] = None,
|
|
355
412
|
) -> None:
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
413
|
+
status = (
|
|
414
|
+
start_client(db_url=db_url)
|
|
415
|
+
.retrieve_workflow(workflow_id=workflow_id)
|
|
416
|
+
.get_status()
|
|
360
417
|
)
|
|
418
|
+
print(jsonpickle.encode(status, unpicklable=False))
|
|
361
419
|
|
|
362
420
|
|
|
363
421
|
@workflow.command(help="List the steps of a workflow")
|
|
364
422
|
def steps(
|
|
365
423
|
workflow_id: Annotated[str, typer.Argument()],
|
|
424
|
+
db_url: Annotated[
|
|
425
|
+
typing.Optional[str],
|
|
426
|
+
typer.Option(
|
|
427
|
+
"--db-url",
|
|
428
|
+
"-D",
|
|
429
|
+
help="Your DBOS application database URL",
|
|
430
|
+
),
|
|
431
|
+
] = None,
|
|
366
432
|
) -> None:
|
|
367
|
-
config = load_config(silent=True)
|
|
368
|
-
sys_db = SystemDatabase(config["database"])
|
|
369
|
-
app_db = ApplicationDatabase(config["database"])
|
|
370
433
|
print(
|
|
371
434
|
jsonpickle.encode(
|
|
372
|
-
list_workflow_steps(
|
|
435
|
+
start_client(db_url=db_url).list_workflow_steps(workflow_id=workflow_id),
|
|
436
|
+
unpicklable=False,
|
|
373
437
|
)
|
|
374
438
|
)
|
|
375
439
|
|
|
@@ -378,119 +442,94 @@ def steps(
|
|
|
378
442
|
help="Cancel a workflow so it is no longer automatically retried or restarted"
|
|
379
443
|
)
|
|
380
444
|
def cancel(
|
|
381
|
-
|
|
382
|
-
|
|
445
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
446
|
+
db_url: Annotated[
|
|
383
447
|
typing.Optional[str],
|
|
384
|
-
typer.Option(
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
] =
|
|
448
|
+
typer.Option(
|
|
449
|
+
"--db-url",
|
|
450
|
+
"-D",
|
|
451
|
+
help="Your DBOS application database URL",
|
|
452
|
+
),
|
|
453
|
+
] = None,
|
|
390
454
|
) -> None:
|
|
391
|
-
|
|
392
|
-
f"http://{host}:{port}/workflows/{uuid}/cancel", json=[], timeout=5
|
|
393
|
-
)
|
|
394
|
-
|
|
395
|
-
if response.status_code == 204:
|
|
396
|
-
print(f"Workflow {uuid} has been cancelled")
|
|
397
|
-
else:
|
|
398
|
-
print(f"Failed to cancel workflow {uuid}. Status code: {response.status_code}")
|
|
455
|
+
start_client(db_url=db_url).cancel_workflow(workflow_id=workflow_id)
|
|
399
456
|
|
|
400
457
|
|
|
401
458
|
@workflow.command(help="Resume a workflow that has been cancelled")
|
|
402
459
|
def resume(
|
|
403
|
-
|
|
404
|
-
|
|
460
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
461
|
+
db_url: Annotated[
|
|
405
462
|
typing.Optional[str],
|
|
406
|
-
typer.Option(
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
] =
|
|
463
|
+
typer.Option(
|
|
464
|
+
"--db-url",
|
|
465
|
+
"-D",
|
|
466
|
+
help="Your DBOS application database URL",
|
|
467
|
+
),
|
|
468
|
+
] = None,
|
|
412
469
|
) -> None:
|
|
413
|
-
|
|
414
|
-
f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
|
|
415
|
-
)
|
|
416
|
-
|
|
417
|
-
if response.status_code == 204:
|
|
418
|
-
print(f"Workflow {uuid} has been resumed")
|
|
419
|
-
else:
|
|
420
|
-
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
470
|
+
start_client(db_url=db_url).resume_workflow(workflow_id=workflow_id)
|
|
421
471
|
|
|
422
472
|
|
|
423
473
|
@workflow.command(help="Restart a workflow from the beginning with a new id")
|
|
424
474
|
def restart(
|
|
425
|
-
|
|
426
|
-
|
|
475
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
476
|
+
db_url: Annotated[
|
|
427
477
|
typing.Optional[str],
|
|
428
|
-
typer.Option(
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
] =
|
|
478
|
+
typer.Option(
|
|
479
|
+
"--db-url",
|
|
480
|
+
"-D",
|
|
481
|
+
help="Your DBOS application database URL",
|
|
482
|
+
),
|
|
483
|
+
] = None,
|
|
434
484
|
) -> None:
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
485
|
+
status = (
|
|
486
|
+
start_client(db_url=db_url)
|
|
487
|
+
.fork_workflow(workflow_id=workflow_id, start_step=1)
|
|
488
|
+
.get_status()
|
|
439
489
|
)
|
|
440
|
-
|
|
441
|
-
if response.status_code == 204:
|
|
442
|
-
print(f"Workflow {uuid} has been restarted")
|
|
443
|
-
else:
|
|
444
|
-
error_message = response.json().get("error", "Unknown error")
|
|
445
|
-
print(
|
|
446
|
-
f"Failed to restart workflow {uuid}. "
|
|
447
|
-
f"Status code: {response.status_code}. "
|
|
448
|
-
f"Error: {error_message}"
|
|
449
|
-
)
|
|
490
|
+
print(jsonpickle.encode(status, unpicklable=False))
|
|
450
491
|
|
|
451
492
|
|
|
452
493
|
@workflow.command(
|
|
453
494
|
help="fork a workflow from the beginning with a new id and from a step"
|
|
454
495
|
)
|
|
455
496
|
def fork(
|
|
456
|
-
|
|
457
|
-
host: Annotated[
|
|
458
|
-
typing.Optional[str],
|
|
459
|
-
typer.Option("--host", "-H", help="Specify the admin host"),
|
|
460
|
-
] = "localhost",
|
|
461
|
-
port: Annotated[
|
|
462
|
-
typing.Optional[int],
|
|
463
|
-
typer.Option("--port", "-p", help="Specify the admin port"),
|
|
464
|
-
] = 3001,
|
|
497
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
465
498
|
step: Annotated[
|
|
466
|
-
|
|
499
|
+
int,
|
|
467
500
|
typer.Option(
|
|
468
501
|
"--step",
|
|
469
502
|
"-s",
|
|
470
|
-
help="Restart from this step
|
|
503
|
+
help="Restart from this step",
|
|
471
504
|
),
|
|
472
505
|
] = 1,
|
|
506
|
+
db_url: Annotated[
|
|
507
|
+
typing.Optional[str],
|
|
508
|
+
typer.Option(
|
|
509
|
+
"--db-url",
|
|
510
|
+
"-D",
|
|
511
|
+
help="Your DBOS application database URL",
|
|
512
|
+
),
|
|
513
|
+
] = None,
|
|
473
514
|
) -> None:
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
timeout=5,
|
|
515
|
+
status = (
|
|
516
|
+
start_client(db_url=db_url)
|
|
517
|
+
.fork_workflow(workflow_id=workflow_id, start_step=step)
|
|
518
|
+
.get_status()
|
|
479
519
|
)
|
|
480
|
-
|
|
481
|
-
if response.status_code == 204:
|
|
482
|
-
print(f"Workflow {uuid} has been forked")
|
|
483
|
-
else:
|
|
484
|
-
error_message = response.json().get("error", "Unknown error")
|
|
485
|
-
print(
|
|
486
|
-
f"Failed to fork workflow {uuid}. "
|
|
487
|
-
f"Status code: {response.status_code}. "
|
|
488
|
-
f"Error: {error_message}"
|
|
489
|
-
)
|
|
520
|
+
print(jsonpickle.encode(status, unpicklable=False))
|
|
490
521
|
|
|
491
522
|
|
|
492
523
|
@queue.command(name="list", help="List enqueued functions for your application")
|
|
493
524
|
def list_queue(
|
|
525
|
+
db_url: Annotated[
|
|
526
|
+
typing.Optional[str],
|
|
527
|
+
typer.Option(
|
|
528
|
+
"--db-url",
|
|
529
|
+
"-D",
|
|
530
|
+
help="Your DBOS application database URL",
|
|
531
|
+
),
|
|
532
|
+
] = None,
|
|
494
533
|
limit: Annotated[
|
|
495
534
|
typing.Optional[int],
|
|
496
535
|
typer.Option("--limit", "-l", help="Limit the results returned"),
|
|
@@ -535,21 +574,31 @@ def list_queue(
|
|
|
535
574
|
help="Retrieve functions on this queue",
|
|
536
575
|
),
|
|
537
576
|
] = None,
|
|
538
|
-
|
|
577
|
+
sort_desc: Annotated[
|
|
539
578
|
bool,
|
|
540
|
-
typer.Option(
|
|
579
|
+
typer.Option(
|
|
580
|
+
"--sort-desc",
|
|
581
|
+
"-d",
|
|
582
|
+
help="Sort the results in descending order (older first)",
|
|
583
|
+
),
|
|
541
584
|
] = False,
|
|
585
|
+
offset: Annotated[
|
|
586
|
+
typing.Optional[int],
|
|
587
|
+
typer.Option(
|
|
588
|
+
"--offset",
|
|
589
|
+
"-o",
|
|
590
|
+
help="Offset for pagination",
|
|
591
|
+
),
|
|
592
|
+
] = None,
|
|
542
593
|
) -> None:
|
|
543
|
-
|
|
544
|
-
sys_db = SystemDatabase(config["database"])
|
|
545
|
-
workflows = list_queued_workflows(
|
|
546
|
-
sys_db=sys_db,
|
|
594
|
+
workflows = start_client(db_url=db_url).list_queued_workflows(
|
|
547
595
|
limit=limit,
|
|
596
|
+
offset=offset,
|
|
597
|
+
sort_desc=sort_desc,
|
|
548
598
|
start_time=start_time,
|
|
549
599
|
end_time=end_time,
|
|
550
600
|
queue_name=queue_name,
|
|
551
601
|
status=status,
|
|
552
|
-
request=request,
|
|
553
602
|
name=name,
|
|
554
603
|
)
|
|
555
604
|
print(jsonpickle.encode(workflows, unpicklable=False))
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
dbos-0.27.
|
|
2
|
-
dbos-0.27.
|
|
3
|
-
dbos-0.27.
|
|
4
|
-
dbos-0.27.
|
|
5
|
-
dbos/__init__.py,sha256=
|
|
1
|
+
dbos-0.27.0a6.dist-info/METADATA,sha256=zkUFvsOjiEivL10Uz8W8LqkCI_c98CmbwvcAk5r98rY,5553
|
|
2
|
+
dbos-0.27.0a6.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
|
3
|
+
dbos-0.27.0a6.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-0.27.0a6.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
|
+
dbos/__init__.py,sha256=HgYmqo90vIabiROcK5LaKXXT7KfqDARiI9dUUK9sww8,890
|
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
|
7
|
-
dbos/_admin_server.py,sha256=
|
|
8
|
-
dbos/_app_db.py,sha256=
|
|
7
|
+
dbos/_admin_server.py,sha256=bR7hO8WS5hUzxjbDS3X0hXWuW8k3AQQSAvaynnthhtc,9031
|
|
8
|
+
dbos/_app_db.py,sha256=3j8_5-MlSDY0otLRszFE-GfenU6JC20fcfSL-drSNYk,11800
|
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
|
-
dbos/_client.py,sha256=
|
|
10
|
+
dbos/_client.py,sha256=jMY73ymYKGr_nnjXfOgCB6adZhd4cQw7UWZmxt-iq6c,12574
|
|
11
11
|
dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
|
|
12
12
|
dbos/_conductor/protocol.py,sha256=zEKIuOQdIaSduNqfZKpo8PSD9_1oNpKIPnBNCu3RUyE,6681
|
|
13
13
|
dbos/_context.py,sha256=aHzJxO7LLAz9w3G2dkZnOcFW_GG-Yaxd02AaoLu4Et8,21861
|
|
14
14
|
dbos/_core.py,sha256=ylTVSv02h2M5SmDgYEJAZmNiKX35zPq0z-9WA-f4byY,47900
|
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=t76_SVyPpyScCfotGthae27h6XxznpphVm5zIRxfnpY,48164
|
|
17
17
|
dbos/_dbos_config.py,sha256=L0Z0OOB5FoPM9g-joZqXGeJnlxWQsEUtgPtgtg9Uf48,21732
|
|
18
18
|
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
|
19
19
|
dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
|
|
@@ -46,7 +46,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
46
46
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
|
47
47
|
dbos/_schemas/system_database.py,sha256=aChSK7uLECD-v-7BZeOfuZFbtWayllaS3PaowaKDHwY,5490
|
|
48
48
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
|
49
|
-
dbos/_sys_db.py,sha256=
|
|
49
|
+
dbos/_sys_db.py,sha256=HBXmOL6AvVC8WjIDxCuOoodw2xtni8SgaU3mMft84Ts,80697
|
|
50
50
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
51
51
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
52
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
|
@@ -62,8 +62,8 @@ dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
|
|
|
62
62
|
dbos/_workflow_commands.py,sha256=7wyxTfIyh2IVIqlkaTr8CMBq8yxWP3Hhddyv1YJY8zE,3576
|
|
63
63
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
|
64
64
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
|
65
|
-
dbos/cli/cli.py,sha256=
|
|
65
|
+
dbos/cli/cli.py,sha256=a3rUrHog5-e22KjjUPOuTjH20PmUgSP0amRpMd6LVJE,18882
|
|
66
66
|
dbos/dbos-config.schema.json,sha256=8KcwJb_sQc4-6tQG2TLmjE_nratfrQa0qVLl9XPsvWE,6367
|
|
67
67
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
68
68
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
69
|
-
dbos-0.27.
|
|
69
|
+
dbos-0.27.0a6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|