dbos 0.26.0a25__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/__init__.py +11 -2
- dbos/_admin_server.py +5 -4
- dbos/_app_db.py +17 -5
- dbos/_client.py +46 -15
- dbos/_context.py +50 -0
- dbos/_core.py +13 -0
- dbos/_dbos.py +35 -7
- dbos/_dbos_config.py +6 -0
- dbos/_error.py +28 -0
- dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +45 -0
- dbos/_queue.py +5 -3
- dbos/_schemas/system_database.py +9 -0
- dbos/_serialization.py +54 -3
- dbos/_sys_db.py +98 -48
- dbos/_tracer.py +9 -1
- dbos/_workflow_commands.py +15 -2
- dbos/cli/cli.py +166 -117
- {dbos-0.26.0a25.dist-info → dbos-0.27.0.dist-info}/METADATA +1 -1
- {dbos-0.26.0a25.dist-info → dbos-0.27.0.dist-info}/RECORD +22 -21
- {dbos-0.26.0a25.dist-info → dbos-0.27.0.dist-info}/WHEEL +0 -0
- {dbos-0.26.0a25.dist-info → dbos-0.27.0.dist-info}/entry_points.txt +0 -0
- {dbos-0.26.0a25.dist-info → dbos-0.27.0.dist-info}/licenses/LICENSE +0 -0
dbos/cli/cli.py
CHANGED
|
@@ -5,10 +5,10 @@ import subprocess
|
|
|
5
5
|
import time
|
|
6
6
|
import typing
|
|
7
7
|
from os import path
|
|
8
|
-
from typing import Any
|
|
8
|
+
from typing import Any, Optional
|
|
9
|
+
from urllib.parse import quote
|
|
9
10
|
|
|
10
11
|
import jsonpickle # type: ignore
|
|
11
|
-
import requests
|
|
12
12
|
import sqlalchemy as sa
|
|
13
13
|
import typer
|
|
14
14
|
from rich import print
|
|
@@ -19,18 +19,28 @@ from dbos._debug import debug_workflow, parse_start_command
|
|
|
19
19
|
|
|
20
20
|
from .. import load_config
|
|
21
21
|
from .._app_db import ApplicationDatabase
|
|
22
|
+
from .._client import DBOSClient
|
|
22
23
|
from .._dbos_config import _is_valid_app_name
|
|
23
24
|
from .._docker_pg_helper import start_docker_pg, stop_docker_pg
|
|
25
|
+
from .._schemas.system_database import SystemSchema
|
|
24
26
|
from .._sys_db import SystemDatabase, reset_system_database
|
|
25
|
-
from .._workflow_commands import (
|
|
26
|
-
get_workflow,
|
|
27
|
-
list_queued_workflows,
|
|
28
|
-
list_workflow_steps,
|
|
29
|
-
list_workflows,
|
|
30
|
-
)
|
|
31
27
|
from ..cli._github_init import create_template_from_github
|
|
32
28
|
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
33
29
|
|
|
30
|
+
|
|
31
|
+
def start_client(db_url: Optional[str] = None) -> DBOSClient:
|
|
32
|
+
database_url = db_url
|
|
33
|
+
if database_url is None:
|
|
34
|
+
database_url = os.getenv("DBOS_DATABASE_URL")
|
|
35
|
+
if database_url is None:
|
|
36
|
+
config = load_config(silent=True)
|
|
37
|
+
database = config["database"]
|
|
38
|
+
username = quote(database["username"])
|
|
39
|
+
password = quote(database["password"])
|
|
40
|
+
database_url = f"postgresql://{username}:{password}@{database['hostname']}:{database['port']}/{database['app_db_name']}"
|
|
41
|
+
return DBOSClient(database_url=database_url)
|
|
42
|
+
|
|
43
|
+
|
|
34
44
|
app = typer.Typer()
|
|
35
45
|
workflow = typer.Typer()
|
|
36
46
|
queue = typer.Typer()
|
|
@@ -241,7 +251,23 @@ def migrate() -> None:
|
|
|
241
251
|
|
|
242
252
|
@app.command(help="Reset the DBOS system database")
|
|
243
253
|
def reset(
|
|
244
|
-
yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt")
|
|
254
|
+
yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt"),
|
|
255
|
+
sys_db_name: Annotated[
|
|
256
|
+
typing.Optional[str],
|
|
257
|
+
typer.Option(
|
|
258
|
+
"--sys-db-name",
|
|
259
|
+
"-s",
|
|
260
|
+
help="Specify the name of the system database to reset",
|
|
261
|
+
),
|
|
262
|
+
] = None,
|
|
263
|
+
db_url: Annotated[
|
|
264
|
+
typing.Optional[str],
|
|
265
|
+
typer.Option(
|
|
266
|
+
"--db-url",
|
|
267
|
+
"-D",
|
|
268
|
+
help="Your DBOS application database URL",
|
|
269
|
+
),
|
|
270
|
+
] = None,
|
|
245
271
|
) -> None:
|
|
246
272
|
if not yes:
|
|
247
273
|
confirm = typer.confirm(
|
|
@@ -250,9 +276,18 @@ def reset(
|
|
|
250
276
|
if not confirm:
|
|
251
277
|
typer.echo("Operation cancelled.")
|
|
252
278
|
raise typer.Exit()
|
|
253
|
-
config = load_config()
|
|
254
279
|
try:
|
|
255
|
-
|
|
280
|
+
client = start_client(db_url=db_url)
|
|
281
|
+
pg_db_url = sa.make_url(client._db_url).set(drivername="postgresql+psycopg")
|
|
282
|
+
assert (
|
|
283
|
+
pg_db_url.database is not None
|
|
284
|
+
), f"Database name is required in URL: {pg_db_url.render_as_string(hide_password=True)}"
|
|
285
|
+
sysdb_name = (
|
|
286
|
+
sys_db_name
|
|
287
|
+
if sys_db_name
|
|
288
|
+
else (pg_db_url.database + SystemSchema.sysdb_suffix)
|
|
289
|
+
)
|
|
290
|
+
reset_system_database(pg_db_url.set(database="postgres"), sysdb_name)
|
|
256
291
|
except sa.exc.SQLAlchemyError as e:
|
|
257
292
|
typer.echo(f"Error resetting system database: {str(e)}")
|
|
258
293
|
return
|
|
@@ -276,6 +311,14 @@ def debug(
|
|
|
276
311
|
|
|
277
312
|
@workflow.command(help="List workflows for your application")
|
|
278
313
|
def list(
|
|
314
|
+
db_url: Annotated[
|
|
315
|
+
typing.Optional[str],
|
|
316
|
+
typer.Option(
|
|
317
|
+
"--db-url",
|
|
318
|
+
"-D",
|
|
319
|
+
help="Your DBOS application database URL",
|
|
320
|
+
),
|
|
321
|
+
] = None,
|
|
279
322
|
limit: Annotated[
|
|
280
323
|
int,
|
|
281
324
|
typer.Option("--limit", "-l", help="Limit the results returned"),
|
|
@@ -324,21 +367,31 @@ def list(
|
|
|
324
367
|
help="Retrieve workflows with this name",
|
|
325
368
|
),
|
|
326
369
|
] = None,
|
|
327
|
-
|
|
370
|
+
sort_desc: Annotated[
|
|
328
371
|
bool,
|
|
329
|
-
typer.Option(
|
|
372
|
+
typer.Option(
|
|
373
|
+
"--sort-desc",
|
|
374
|
+
"-d",
|
|
375
|
+
help="Sort the results in descending order (older first)",
|
|
376
|
+
),
|
|
330
377
|
] = False,
|
|
378
|
+
offset: Annotated[
|
|
379
|
+
typing.Optional[int],
|
|
380
|
+
typer.Option(
|
|
381
|
+
"--offset",
|
|
382
|
+
"-o",
|
|
383
|
+
help="Offset for pagination",
|
|
384
|
+
),
|
|
385
|
+
] = None,
|
|
331
386
|
) -> None:
|
|
332
|
-
|
|
333
|
-
sys_db = SystemDatabase(config["database"])
|
|
334
|
-
workflows = list_workflows(
|
|
335
|
-
sys_db,
|
|
387
|
+
workflows = start_client(db_url=db_url).list_workflows(
|
|
336
388
|
limit=limit,
|
|
389
|
+
offset=offset,
|
|
390
|
+
sort_desc=sort_desc,
|
|
337
391
|
user=user,
|
|
338
392
|
start_time=starttime,
|
|
339
393
|
end_time=endtime,
|
|
340
394
|
status=status,
|
|
341
|
-
request=request,
|
|
342
395
|
app_version=appversion,
|
|
343
396
|
name=name,
|
|
344
397
|
)
|
|
@@ -348,28 +401,39 @@ def list(
|
|
|
348
401
|
@workflow.command(help="Retrieve the status of a workflow")
|
|
349
402
|
def get(
|
|
350
403
|
workflow_id: Annotated[str, typer.Argument()],
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
typer.Option(
|
|
354
|
-
|
|
404
|
+
db_url: Annotated[
|
|
405
|
+
typing.Optional[str],
|
|
406
|
+
typer.Option(
|
|
407
|
+
"--db-url",
|
|
408
|
+
"-D",
|
|
409
|
+
help="Your DBOS application database URL",
|
|
410
|
+
),
|
|
411
|
+
] = None,
|
|
355
412
|
) -> None:
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
413
|
+
status = (
|
|
414
|
+
start_client(db_url=db_url)
|
|
415
|
+
.retrieve_workflow(workflow_id=workflow_id)
|
|
416
|
+
.get_status()
|
|
360
417
|
)
|
|
418
|
+
print(jsonpickle.encode(status, unpicklable=False))
|
|
361
419
|
|
|
362
420
|
|
|
363
421
|
@workflow.command(help="List the steps of a workflow")
|
|
364
422
|
def steps(
|
|
365
423
|
workflow_id: Annotated[str, typer.Argument()],
|
|
424
|
+
db_url: Annotated[
|
|
425
|
+
typing.Optional[str],
|
|
426
|
+
typer.Option(
|
|
427
|
+
"--db-url",
|
|
428
|
+
"-D",
|
|
429
|
+
help="Your DBOS application database URL",
|
|
430
|
+
),
|
|
431
|
+
] = None,
|
|
366
432
|
) -> None:
|
|
367
|
-
config = load_config(silent=True)
|
|
368
|
-
sys_db = SystemDatabase(config["database"])
|
|
369
|
-
app_db = ApplicationDatabase(config["database"])
|
|
370
433
|
print(
|
|
371
434
|
jsonpickle.encode(
|
|
372
|
-
list_workflow_steps(
|
|
435
|
+
start_client(db_url=db_url).list_workflow_steps(workflow_id=workflow_id),
|
|
436
|
+
unpicklable=False,
|
|
373
437
|
)
|
|
374
438
|
)
|
|
375
439
|
|
|
@@ -378,119 +442,94 @@ def steps(
|
|
|
378
442
|
help="Cancel a workflow so it is no longer automatically retried or restarted"
|
|
379
443
|
)
|
|
380
444
|
def cancel(
|
|
381
|
-
|
|
382
|
-
|
|
445
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
446
|
+
db_url: Annotated[
|
|
383
447
|
typing.Optional[str],
|
|
384
|
-
typer.Option(
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
] =
|
|
448
|
+
typer.Option(
|
|
449
|
+
"--db-url",
|
|
450
|
+
"-D",
|
|
451
|
+
help="Your DBOS application database URL",
|
|
452
|
+
),
|
|
453
|
+
] = None,
|
|
390
454
|
) -> None:
|
|
391
|
-
|
|
392
|
-
f"http://{host}:{port}/workflows/{uuid}/cancel", json=[], timeout=5
|
|
393
|
-
)
|
|
394
|
-
|
|
395
|
-
if response.status_code == 204:
|
|
396
|
-
print(f"Workflow {uuid} has been cancelled")
|
|
397
|
-
else:
|
|
398
|
-
print(f"Failed to cancel workflow {uuid}. Status code: {response.status_code}")
|
|
455
|
+
start_client(db_url=db_url).cancel_workflow(workflow_id=workflow_id)
|
|
399
456
|
|
|
400
457
|
|
|
401
458
|
@workflow.command(help="Resume a workflow that has been cancelled")
|
|
402
459
|
def resume(
|
|
403
|
-
|
|
404
|
-
|
|
460
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
461
|
+
db_url: Annotated[
|
|
405
462
|
typing.Optional[str],
|
|
406
|
-
typer.Option(
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
] =
|
|
463
|
+
typer.Option(
|
|
464
|
+
"--db-url",
|
|
465
|
+
"-D",
|
|
466
|
+
help="Your DBOS application database URL",
|
|
467
|
+
),
|
|
468
|
+
] = None,
|
|
412
469
|
) -> None:
|
|
413
|
-
|
|
414
|
-
f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
|
|
415
|
-
)
|
|
416
|
-
|
|
417
|
-
if response.status_code == 204:
|
|
418
|
-
print(f"Workflow {uuid} has been resumed")
|
|
419
|
-
else:
|
|
420
|
-
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
470
|
+
start_client(db_url=db_url).resume_workflow(workflow_id=workflow_id)
|
|
421
471
|
|
|
422
472
|
|
|
423
473
|
@workflow.command(help="Restart a workflow from the beginning with a new id")
|
|
424
474
|
def restart(
|
|
425
|
-
|
|
426
|
-
|
|
475
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
476
|
+
db_url: Annotated[
|
|
427
477
|
typing.Optional[str],
|
|
428
|
-
typer.Option(
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
] =
|
|
478
|
+
typer.Option(
|
|
479
|
+
"--db-url",
|
|
480
|
+
"-D",
|
|
481
|
+
help="Your DBOS application database URL",
|
|
482
|
+
),
|
|
483
|
+
] = None,
|
|
434
484
|
) -> None:
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
485
|
+
status = (
|
|
486
|
+
start_client(db_url=db_url)
|
|
487
|
+
.fork_workflow(workflow_id=workflow_id, start_step=1)
|
|
488
|
+
.get_status()
|
|
439
489
|
)
|
|
440
|
-
|
|
441
|
-
if response.status_code == 204:
|
|
442
|
-
print(f"Workflow {uuid} has been restarted")
|
|
443
|
-
else:
|
|
444
|
-
error_message = response.json().get("error", "Unknown error")
|
|
445
|
-
print(
|
|
446
|
-
f"Failed to restart workflow {uuid}. "
|
|
447
|
-
f"Status code: {response.status_code}. "
|
|
448
|
-
f"Error: {error_message}"
|
|
449
|
-
)
|
|
490
|
+
print(jsonpickle.encode(status, unpicklable=False))
|
|
450
491
|
|
|
451
492
|
|
|
452
493
|
@workflow.command(
|
|
453
494
|
help="fork a workflow from the beginning with a new id and from a step"
|
|
454
495
|
)
|
|
455
496
|
def fork(
|
|
456
|
-
|
|
457
|
-
host: Annotated[
|
|
458
|
-
typing.Optional[str],
|
|
459
|
-
typer.Option("--host", "-H", help="Specify the admin host"),
|
|
460
|
-
] = "localhost",
|
|
461
|
-
port: Annotated[
|
|
462
|
-
typing.Optional[int],
|
|
463
|
-
typer.Option("--port", "-p", help="Specify the admin port"),
|
|
464
|
-
] = 3001,
|
|
497
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
465
498
|
step: Annotated[
|
|
466
|
-
|
|
499
|
+
int,
|
|
467
500
|
typer.Option(
|
|
468
501
|
"--step",
|
|
469
502
|
"-s",
|
|
470
|
-
help="Restart from this step
|
|
503
|
+
help="Restart from this step",
|
|
471
504
|
),
|
|
472
505
|
] = 1,
|
|
506
|
+
db_url: Annotated[
|
|
507
|
+
typing.Optional[str],
|
|
508
|
+
typer.Option(
|
|
509
|
+
"--db-url",
|
|
510
|
+
"-D",
|
|
511
|
+
help="Your DBOS application database URL",
|
|
512
|
+
),
|
|
513
|
+
] = None,
|
|
473
514
|
) -> None:
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
timeout=5,
|
|
515
|
+
status = (
|
|
516
|
+
start_client(db_url=db_url)
|
|
517
|
+
.fork_workflow(workflow_id=workflow_id, start_step=step)
|
|
518
|
+
.get_status()
|
|
479
519
|
)
|
|
480
|
-
|
|
481
|
-
if response.status_code == 204:
|
|
482
|
-
print(f"Workflow {uuid} has been forked")
|
|
483
|
-
else:
|
|
484
|
-
error_message = response.json().get("error", "Unknown error")
|
|
485
|
-
print(
|
|
486
|
-
f"Failed to fork workflow {uuid}. "
|
|
487
|
-
f"Status code: {response.status_code}. "
|
|
488
|
-
f"Error: {error_message}"
|
|
489
|
-
)
|
|
520
|
+
print(jsonpickle.encode(status, unpicklable=False))
|
|
490
521
|
|
|
491
522
|
|
|
492
523
|
@queue.command(name="list", help="List enqueued functions for your application")
|
|
493
524
|
def list_queue(
|
|
525
|
+
db_url: Annotated[
|
|
526
|
+
typing.Optional[str],
|
|
527
|
+
typer.Option(
|
|
528
|
+
"--db-url",
|
|
529
|
+
"-D",
|
|
530
|
+
help="Your DBOS application database URL",
|
|
531
|
+
),
|
|
532
|
+
] = None,
|
|
494
533
|
limit: Annotated[
|
|
495
534
|
typing.Optional[int],
|
|
496
535
|
typer.Option("--limit", "-l", help="Limit the results returned"),
|
|
@@ -535,21 +574,31 @@ def list_queue(
|
|
|
535
574
|
help="Retrieve functions on this queue",
|
|
536
575
|
),
|
|
537
576
|
] = None,
|
|
538
|
-
|
|
577
|
+
sort_desc: Annotated[
|
|
539
578
|
bool,
|
|
540
|
-
typer.Option(
|
|
579
|
+
typer.Option(
|
|
580
|
+
"--sort-desc",
|
|
581
|
+
"-d",
|
|
582
|
+
help="Sort the results in descending order (older first)",
|
|
583
|
+
),
|
|
541
584
|
] = False,
|
|
585
|
+
offset: Annotated[
|
|
586
|
+
typing.Optional[int],
|
|
587
|
+
typer.Option(
|
|
588
|
+
"--offset",
|
|
589
|
+
"-o",
|
|
590
|
+
help="Offset for pagination",
|
|
591
|
+
),
|
|
592
|
+
] = None,
|
|
542
593
|
) -> None:
|
|
543
|
-
|
|
544
|
-
sys_db = SystemDatabase(config["database"])
|
|
545
|
-
workflows = list_queued_workflows(
|
|
546
|
-
sys_db=sys_db,
|
|
594
|
+
workflows = start_client(db_url=db_url).list_queued_workflows(
|
|
547
595
|
limit=limit,
|
|
596
|
+
offset=offset,
|
|
597
|
+
sort_desc=sort_desc,
|
|
548
598
|
start_time=start_time,
|
|
549
599
|
end_time=end_time,
|
|
550
600
|
queue_name=queue_name,
|
|
551
601
|
status=status,
|
|
552
|
-
request=request,
|
|
553
602
|
name=name,
|
|
554
603
|
)
|
|
555
604
|
print(jsonpickle.encode(workflows, unpicklable=False))
|
|
@@ -1,23 +1,23 @@
|
|
|
1
|
-
dbos-0.
|
|
2
|
-
dbos-0.
|
|
3
|
-
dbos-0.
|
|
4
|
-
dbos-0.
|
|
5
|
-
dbos/__init__.py,sha256
|
|
1
|
+
dbos-0.27.0.dist-info/METADATA,sha256=8OJw5wlcLe6mDes-u8QqQo9A7vVfWDhn-Q1KKBQ5g2w,5551
|
|
2
|
+
dbos-0.27.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
|
3
|
+
dbos-0.27.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-0.27.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
|
+
dbos/__init__.py,sha256=-FdBlOlr-f2tY__C23J4v22MoCAXqcDN_-zXsJXdoZ0,1005
|
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
|
7
|
-
dbos/_admin_server.py,sha256=
|
|
8
|
-
dbos/_app_db.py,sha256=
|
|
7
|
+
dbos/_admin_server.py,sha256=NG0JWQQer9kEslPNAA0dBv-O262sjarz7ZSlv8yird0,9053
|
|
8
|
+
dbos/_app_db.py,sha256=3j8_5-MlSDY0otLRszFE-GfenU6JC20fcfSL-drSNYk,11800
|
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
|
-
dbos/_client.py,sha256=
|
|
10
|
+
dbos/_client.py,sha256=Id-jzAUH6JMN-9WmAGyo0vm-nc0URjNIVwA2iKnCN5Q,13418
|
|
11
11
|
dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
|
|
12
12
|
dbos/_conductor/protocol.py,sha256=zEKIuOQdIaSduNqfZKpo8PSD9_1oNpKIPnBNCu3RUyE,6681
|
|
13
|
-
dbos/_context.py,sha256=
|
|
14
|
-
dbos/_core.py,sha256=
|
|
13
|
+
dbos/_context.py,sha256=5aJHOjh6-2Zc7Fwzw924Vg0utLEkaR-oBMRdz3cE95k,23680
|
|
14
|
+
dbos/_core.py,sha256=7zhdO-VfZe84wgOzBVsliqO-BI20OzcLTFqvrGyxttw,48425
|
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
17
|
-
dbos/_dbos_config.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=ENDQ6Xi4MoKrjXoCRlk1B64yZP7D-MyDUjUlOTRsw9I,48314
|
|
17
|
+
dbos/_dbos_config.py,sha256=L0Z0OOB5FoPM9g-joZqXGeJnlxWQsEUtgPtgtg9Uf48,21732
|
|
18
18
|
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
|
19
19
|
dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
|
|
20
|
-
dbos/_error.py,sha256=
|
|
20
|
+
dbos/_error.py,sha256=EN4eVBjMT3k7O7hfqJl6mIf4sxWPsiAOM086yhcGH_g,8012
|
|
21
21
|
dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
|
|
22
22
|
dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
|
|
23
23
|
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
|
@@ -27,6 +27,7 @@ dbos/_logger.py,sha256=qv2srteCF2rSRjCK1VGOck3ieIkwUe9Lvbv60mJc16E,4069
|
|
|
27
27
|
dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
|
28
28
|
dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
|
|
29
29
|
dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
|
|
30
|
+
dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py,sha256=56w1v6TdofW3V18iwm0MP0SAeSaAUPSS40HIcn6qYIE,1072
|
|
30
31
|
dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
|
|
31
32
|
dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
|
|
32
33
|
dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py,sha256=Q_R35pb8AfVI3sg5mzKwyoPfYB88Ychcc8gwxpM9R7A,1035
|
|
@@ -36,7 +37,7 @@ dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4
|
|
|
36
37
|
dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
|
|
37
38
|
dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
|
|
38
39
|
dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
|
|
39
|
-
dbos/_queue.py,sha256=
|
|
40
|
+
dbos/_queue.py,sha256=aKCGahWBGJOLOv5PCOOId96Va3YQ4ICuHWXy-eQXohE,3526
|
|
40
41
|
dbos/_recovery.py,sha256=98Py7icfytyIELJ54gIsdvmURBvTb0HmWaxEAuYL0dc,2546
|
|
41
42
|
dbos/_registrations.py,sha256=EZzG3ZfYmWA2bHX2hpnSIQ3PTi3-cXsvbcmXjyOusMk,7302
|
|
42
43
|
dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
|
|
@@ -44,9 +45,9 @@ dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
|
|
|
44
45
|
dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
|
|
45
46
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
47
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
|
47
|
-
dbos/_schemas/system_database.py,sha256=
|
|
48
|
-
dbos/_serialization.py,sha256=
|
|
49
|
-
dbos/_sys_db.py,sha256=
|
|
48
|
+
dbos/_schemas/system_database.py,sha256=wLqrhApNqrwZC1SdUxi_ca0y_66WzKaaBOxvND4_bdg,5738
|
|
49
|
+
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
|
50
|
+
dbos/_sys_db.py,sha256=caIbhOwAnfugGzhnJ5rOG2V_bXphD9tJ4Un37gnG47A,82281
|
|
50
51
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
51
52
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
53
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
|
@@ -57,13 +58,13 @@ dbos/_templates/dbos-db-starter/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGE
|
|
|
57
58
|
dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
|
|
58
59
|
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
|
|
59
60
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
60
|
-
dbos/_tracer.py,sha256=
|
|
61
|
+
dbos/_tracer.py,sha256=yN6GRDKu_1p-EqtQLNarMocPfga2ZuqpzStzzSPYhzo,2732
|
|
61
62
|
dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
|
|
62
|
-
dbos/_workflow_commands.py,sha256=
|
|
63
|
+
dbos/_workflow_commands.py,sha256=7_f8-w0MbS1gqC5v68EwzbUtomVM0lLebozpHxXmRYg,3982
|
|
63
64
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
|
64
65
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
|
65
|
-
dbos/cli/cli.py,sha256=
|
|
66
|
+
dbos/cli/cli.py,sha256=a3rUrHog5-e22KjjUPOuTjH20PmUgSP0amRpMd6LVJE,18882
|
|
66
67
|
dbos/dbos-config.schema.json,sha256=8KcwJb_sQc4-6tQG2TLmjE_nratfrQa0qVLl9XPsvWE,6367
|
|
67
68
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
68
69
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
69
|
-
dbos-0.
|
|
70
|
+
dbos-0.27.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|