dbos 1.14.0a9__py3-none-any.whl → 1.15.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (45) hide show
  1. dbos/_client.py +13 -14
  2. dbos/_context.py +12 -6
  3. dbos/_core.py +2 -7
  4. dbos/_dbos.py +5 -13
  5. dbos/_dbos_config.py +17 -29
  6. dbos/_debouncer.py +1 -7
  7. dbos/_debug.py +0 -8
  8. dbos/_docker_pg_helper.py +93 -51
  9. dbos/_fastapi.py +5 -1
  10. dbos/_logger.py +18 -21
  11. dbos/_migration.py +4 -41
  12. dbos/_serialization.py +19 -30
  13. dbos/_sys_db_postgres.py +2 -9
  14. dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
  15. dbos/_tracer.py +42 -31
  16. dbos/cli/_github_init.py +22 -16
  17. dbos/cli/_template_init.py +5 -16
  18. dbos/cli/cli.py +20 -28
  19. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/METADATA +8 -16
  20. dbos-1.15.0a1.dist-info/RECORD +59 -0
  21. dbos/_alembic_migrations/env.py +0 -62
  22. dbos/_alembic_migrations/script.py.mako +0 -26
  23. dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
  24. dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
  25. dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
  26. dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
  27. dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
  28. dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
  29. dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
  30. dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
  31. dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
  32. dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
  33. dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
  34. dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
  35. dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
  36. dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
  37. dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
  38. dbos/_templates/dbos-db-starter/alembic.ini +0 -116
  39. dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
  40. dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
  41. dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
  42. dbos-1.14.0a9.dist-info/RECORD +0 -79
  43. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/WHEEL +0 -0
  44. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/entry_points.txt +0 -0
  45. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,59 @@
1
+ dbos-1.15.0a1.dist-info/METADATA,sha256=554w5m_z7LQ0LxzPVltRg_qEfahaA1NPkLDLv9g6GMY,13021
2
+ dbos-1.15.0a1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-1.15.0a1.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.15.0a1.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
+ dbos/__init__.py,sha256=pT4BuNLDCrIQX27vQG8NlfxX6PZRU7r9miq4thJTszU,982
6
+ dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
+ dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
+ dbos/_app_db.py,sha256=GsV-uYU0QsChWwQDxnrh8_iiZ_zMQB-bsP2jPGIe2aM,16094
9
+ dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
+ dbos/_client.py,sha256=XW5EQltqkwRgTnHq3avhuBuOTYSZkERrk9GMpqX8kfM,18859
11
+ dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
+ dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
+ dbos/_context.py,sha256=cJDxVbswTLXKE5MV4Hmg6gpIX3Dd5mBTG-4lmofWP9E,27668
14
+ dbos/_core.py,sha256=Eec_XWwewuwyaJiQwJMv7bWBWQoC3QWXDJsm2hzUVdI,50342
15
+ dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
+ dbos/_dbos.py,sha256=dMJvLCCPqF31oqxgVF7ydcOgFmy4jRI5ky8AdMReUCQ,57987
17
+ dbos/_dbos_config.py,sha256=B1y1hrWJc8UIoWbCqqBRGnc3bainLPp-TC6x1phInk0,25320
18
+ dbos/_debouncer.py,sha256=VmGq1_ZIQ79fnH14LEhdoqxKWp6rlEwzsUwumwAMgTQ,15095
19
+ dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
20
+ dbos/_docker_pg_helper.py,sha256=xySum4hTA8TVMBODoG19u4cXQAB1vCock-jwM2pnmSI,7791
21
+ dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
22
+ dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
23
+ dbos/_fastapi.py,sha256=toYYfbe2aui2aHw0021PoXi2dKlI6NzO3M3pHB0dHOk,3421
24
+ dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
25
+ dbos/_kafka.py,sha256=Gm4fHWl7gYb-i5BMvwNwm5Km3z8zQpseqdMgqgFjlGI,4252
26
+ dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
27
+ dbos/_logger.py,sha256=djnCp147QoQ1iG9Bt3Uz8RyGaXGmi6gebccXsrA6Cps,4660
28
+ dbos/_migration.py,sha256=LgxWPtXqRRwjvS5CrSvQ81B_UzLvRNWd4fnQ_Wo-gek,9507
29
+ dbos/_outcome.py,sha256=7HvosMfEHTh1U5P6xok7kFTGLwa2lPaul0YApb3UnN4,8191
30
+ dbos/_queue.py,sha256=0kJTPwXy3nZ4Epzt-lHky9M9S4L31645drPGFR8fIJY,4854
31
+ dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
32
+ dbos/_registrations.py,sha256=bEOntObnWaBylnebr5ZpcX2hk7OVLDd1z4BvW4_y3zA,7380
33
+ dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
34
+ dbos/_scheduler.py,sha256=CWeGVfl9h51VXfxt80y5Da_5pE8SPty_AYkfpJkkMxQ,2117
35
+ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
+ dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
37
+ dbos/_schemas/system_database.py,sha256=-dAKk-_Y3vzbpLT4ei-sIrBQgFyQiwPj1enZb1TYc8I,4943
38
+ dbos/_serialization.py,sha256=GLgWLtHpvk7nSHyXukVQLE1ASNA3CJBtfF8w6iflBDw,3590
39
+ dbos/_sys_db.py,sha256=SspVk-wYmE6xZLuyYQUclwh_AMjnkDXcog5g5WmYn7c,83036
40
+ dbos/_sys_db_postgres.py,sha256=CcvxWzoByEvCZ2P_P-KNBRcyJ_8vSpCjtHBRmc7l5hI,7324
41
+ dbos/_sys_db_sqlite.py,sha256=xT9l-czMhLmfuu5UcnBzAyUxSFgzt3XtEWx9t_D8mZs,7361
42
+ dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
43
+ dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
45
+ dbos/_templates/dbos-db-starter/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIREYxtUhU4JVkLCp5Q7UahVQ0,260
46
+ dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=0wPktElM7kMB3OPHTXw4xBk9bgGKMqOHrrr7x_R23Z8,446
47
+ dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos,sha256=pVm2Q0AsxS8pg85llbrXFD6jMccMqGjhGRjTEvS-hXk,942
48
+ dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
49
+ dbos/_tracer.py,sha256=PHbD7iTEkHk7z4B9hc-wPgi2dPTeI1rhZgLI33TQEeM,3786
50
+ dbos/_utils.py,sha256=ZdoM1MDbHnlJrh31zfhp3iX62bAxK1kyvMwXnltC_84,1779
51
+ dbos/_workflow_commands.py,sha256=EmmAaQfRWeOZm_WPTznuU-O3he3jiSzzT9VpYrhxugE,4835
52
+ dbos/cli/_github_init.py,sha256=R_94Fnn40CAmPy-zM00lwHi0ndyfv57TmIooADjmag4,3378
53
+ dbos/cli/_template_init.py,sha256=AltKk256VocgvxLpuTxpjJyACrdHFjbGoqYhHzeLae4,2649
54
+ dbos/cli/cli.py,sha256=-yoFUGzwPiAJlDfCOWr2TfnVw5LvMGqDKDK-ri-HPzw,26631
55
+ dbos/cli/migration.py,sha256=5GiyagLZkyVvDz3StYxtFdkFoKFCmh6eSXjzsIGhZ_A,3330
56
+ dbos/dbos-config.schema.json,sha256=LyUT1DOTaAwOP6suxQGS5KemVIqXGPyu_q7Hbo0neA8,6192
57
+ dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
58
+ version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
59
+ dbos-1.15.0a1.dist-info/RECORD,,
@@ -1,62 +0,0 @@
1
- from alembic import context
2
- from sqlalchemy import engine_from_config, pool
3
-
4
- # this is the Alembic Config object, which provides
5
- # access to the values within the .ini file in use.
6
- config = context.config
7
-
8
- # add your model's MetaData object here
9
- # for 'autogenerate' support
10
- target_metadata = None
11
-
12
-
13
- def run_migrations_offline() -> None:
14
- """
15
- Run migrations in 'offline' mode.
16
-
17
- This configures the context with just a URL
18
- and not an Engine, though an Engine is acceptable
19
- here as well. By skipping the Engine creation
20
- we don't even need a DBAPI to be available.
21
-
22
- Calls to context.execute() here emit the given string to the
23
- script output.
24
- """
25
-
26
- url = config.get_main_option("sqlalchemy.url")
27
- context.configure(
28
- url=url,
29
- target_metadata=target_metadata,
30
- literal_binds=True,
31
- dialect_opts={"paramstyle": "named"},
32
- )
33
-
34
- with context.begin_transaction():
35
- context.run_migrations()
36
-
37
-
38
- def run_migrations_online() -> None:
39
- """
40
- Run migrations in 'online' mode.
41
-
42
- In this scenario we need to create an Engine
43
- and associate a connection with the context.
44
- """
45
-
46
- connectable = engine_from_config(
47
- config.get_section(config.config_ini_section, {}),
48
- prefix="sqlalchemy.",
49
- poolclass=pool.NullPool,
50
- )
51
-
52
- with connectable.connect() as connection:
53
- context.configure(connection=connection, target_metadata=target_metadata)
54
-
55
- with context.begin_transaction():
56
- context.run_migrations()
57
-
58
-
59
- if context.is_offline_mode():
60
- run_migrations_offline()
61
- else:
62
- run_migrations_online()
@@ -1,26 +0,0 @@
1
- """${message}
2
-
3
- Revision ID: ${up_revision}
4
- Revises: ${down_revision | comma,n}
5
- Create Date: ${create_date}
6
-
7
- """
8
- from typing import Sequence, Union
9
-
10
- from alembic import op
11
- import sqlalchemy as sa
12
- ${imports if imports else ""}
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = ${repr(up_revision)}
16
- down_revision: Union[str, None] = ${repr(down_revision)}
17
- branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
- depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
-
20
-
21
- def upgrade() -> None:
22
- ${upgrades if upgrades else "pass"}
23
-
24
-
25
- def downgrade() -> None:
26
- ${downgrades if downgrades else "pass"}
@@ -1,42 +0,0 @@
1
- """streaming
2
-
3
- Revision ID: 01ce9f07bd10
4
- Revises: d994145b47b6
5
- Create Date: 2025-08-05 10:20:46.424975
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "01ce9f07bd10"
16
- down_revision: Union[str, None] = "d994145b47b6"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # Create streams table
23
- op.create_table(
24
- "streams",
25
- sa.Column("workflow_uuid", sa.Text(), nullable=False),
26
- sa.Column("key", sa.Text(), nullable=False),
27
- sa.Column("value", sa.Text(), nullable=False),
28
- sa.Column("offset", sa.Integer(), nullable=False),
29
- sa.ForeignKeyConstraint(
30
- ["workflow_uuid"],
31
- ["dbos.workflow_status.workflow_uuid"],
32
- onupdate="CASCADE",
33
- ondelete="CASCADE",
34
- ),
35
- sa.PrimaryKeyConstraint("workflow_uuid", "key", "offset"),
36
- schema="dbos",
37
- )
38
-
39
-
40
- def downgrade() -> None:
41
- # Drop streams table
42
- op.drop_table("streams", schema="dbos")
@@ -1,34 +0,0 @@
1
- """workflow_queues_executor_id
2
-
3
- Revision ID: 04ca4f231047
4
- Revises: d76646551a6c
5
- Create Date: 2025-01-15 15:05:08.043190
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "04ca4f231047"
16
- down_revision: Union[str, None] = "d76646551a6c"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.add_column(
23
- "workflow_queue",
24
- sa.Column(
25
- "executor_id",
26
- sa.Text(),
27
- nullable=True,
28
- ),
29
- schema="dbos",
30
- )
31
-
32
-
33
- def downgrade() -> None:
34
- op.drop_column("workflow_queue", "executor_id", schema="dbos")
@@ -1,45 +0,0 @@
1
- """add queue dedup
2
-
3
- Revision ID: 27ac6900c6ad
4
- Revises: 83f3732ae8e7
5
- Create Date: 2025-04-23 16:18:48.530047
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "27ac6900c6ad"
16
- down_revision: Union[str, None] = "83f3732ae8e7"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.add_column(
23
- "workflow_queue",
24
- sa.Column(
25
- "deduplication_id",
26
- sa.Text(),
27
- nullable=True,
28
- ),
29
- schema="dbos",
30
- )
31
-
32
- # Unique constraint for queue_name, deduplication_id
33
- op.create_unique_constraint(
34
- "uq_workflow_queue_name_dedup_id",
35
- "workflow_queue",
36
- ["queue_name", "deduplication_id"],
37
- schema="dbos",
38
- )
39
-
40
-
41
- def downgrade() -> None:
42
- op.drop_constraint(
43
- "uq_workflow_queue_name_dedup_id", "workflow_queue", schema="dbos"
44
- )
45
- op.drop_column("workflow_queue", "deduplication_id", schema="dbos")
@@ -1,35 +0,0 @@
1
- """dbos_migrations
2
-
3
- Revision ID: 471b60d64126
4
- Revises: 01ce9f07bd10
5
- Create Date: 2025-08-21 14:22:31.455266
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "471b60d64126"
16
- down_revision: Union[str, None] = "01ce9f07bd10"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # Create dbos_migrations table
23
- op.create_table(
24
- "dbos_migrations",
25
- sa.Column("version", sa.BigInteger(), nullable=False),
26
- sa.PrimaryKeyConstraint("version"),
27
- schema="dbos",
28
- )
29
-
30
- # Insert initial version 1
31
- op.execute("INSERT INTO dbos.dbos_migrations (version) VALUES (1)")
32
-
33
-
34
- def downgrade() -> None:
35
- op.drop_table("dbos_migrations", schema="dbos")
@@ -1,35 +0,0 @@
1
- """
2
- Fix job queue PK.
3
-
4
- Revision ID: 50f3227f0b4b
5
- Revises: eab0cc1d9a14
6
- Create Date: 2024-09-25 14:03:53.308068
7
-
8
- """
9
-
10
- from typing import Sequence, Union
11
-
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "50f3227f0b4b"
16
- down_revision: Union[str, None] = "eab0cc1d9a14"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.drop_constraint("job_queue_pkey", "job_queue", schema="dbos", type_="primary")
23
-
24
- op.create_primary_key(
25
- "job_queue_pkey", "job_queue", ["workflow_uuid"], schema="dbos"
26
- )
27
-
28
-
29
- def downgrade() -> None:
30
- # Reverting the changes
31
- op.drop_constraint("job_queue_pkey", "job_queue", schema="dbos", type_="primary")
32
-
33
- op.create_primary_key(
34
- "job_queue_pkey", "job_queue", ["created_at_epoch_ms"], schema="dbos"
35
- )
@@ -1,193 +0,0 @@
1
- """
2
- Add system tables.
3
-
4
- Revision ID: 5c361fc04708
5
- Revises:
6
- Create Date: 2024-07-21 13:06:13.724602
7
- # mypy: allow-untyped-defs, allow-untyped-calls
8
- """
9
-
10
- from typing import Sequence, Union
11
-
12
- import sqlalchemy as sa
13
- from alembic import op
14
-
15
- # revision identifiers, used by Alembic.
16
- revision: str = "5c361fc04708"
17
- down_revision: Union[str, None] = None
18
- branch_labels: Union[str, Sequence[str], None] = None
19
- depends_on: Union[str, Sequence[str], None] = None
20
-
21
-
22
- def upgrade() -> None:
23
- op.execute(sa.schema.CreateSchema(name="dbos", if_not_exists=True))
24
- op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')
25
-
26
- # ### commands auto generated by Alembic - please adjust! ###
27
- op.create_table(
28
- "scheduler_state",
29
- sa.Column("workflow_fn_name", sa.Text(), nullable=False),
30
- sa.Column("last_run_time", sa.BigInteger(), nullable=False),
31
- sa.PrimaryKeyConstraint("workflow_fn_name"),
32
- schema="dbos",
33
- )
34
- op.create_table(
35
- "workflow_status",
36
- sa.Column("workflow_uuid", sa.Text(), nullable=False),
37
- sa.Column("status", sa.Text(), nullable=True),
38
- sa.Column("name", sa.Text(), nullable=True),
39
- sa.Column("authenticated_user", sa.Text(), nullable=True),
40
- sa.Column("assumed_role", sa.Text(), nullable=True),
41
- sa.Column("authenticated_roles", sa.Text(), nullable=True),
42
- sa.Column("request", sa.Text(), nullable=True),
43
- sa.Column("output", sa.Text(), nullable=True),
44
- sa.Column("error", sa.Text(), nullable=True),
45
- sa.Column("executor_id", sa.Text(), nullable=True),
46
- sa.Column(
47
- "created_at",
48
- sa.BigInteger(),
49
- server_default=sa.text(
50
- "(EXTRACT(epoch FROM now()) * 1000::numeric)::bigint"
51
- ),
52
- nullable=False,
53
- ),
54
- sa.Column(
55
- "updated_at",
56
- sa.BigInteger(),
57
- server_default=sa.text(
58
- "(EXTRACT(epoch FROM now()) * 1000::numeric)::bigint"
59
- ),
60
- nullable=False,
61
- ),
62
- sa.Column("application_version", sa.Text(), nullable=True),
63
- sa.Column("application_id", sa.Text(), nullable=True),
64
- sa.Column(
65
- "class_name",
66
- sa.String(length=255),
67
- server_default=sa.text("NULL"),
68
- nullable=True,
69
- ),
70
- sa.Column(
71
- "config_name",
72
- sa.String(length=255),
73
- server_default=sa.text("NULL"),
74
- nullable=True,
75
- ),
76
- sa.Column(
77
- "recovery_attempts",
78
- sa.BigInteger(),
79
- server_default=sa.text("'0'::bigint"),
80
- nullable=True,
81
- ),
82
- sa.PrimaryKeyConstraint("workflow_uuid"),
83
- schema="dbos",
84
- )
85
- op.create_index(
86
- "workflow_status_created_at_index",
87
- "workflow_status",
88
- ["created_at"],
89
- unique=False,
90
- schema="dbos",
91
- )
92
- op.create_index(
93
- "workflow_status_executor_id_index",
94
- "workflow_status",
95
- ["executor_id"],
96
- unique=False,
97
- schema="dbos",
98
- )
99
- op.create_table(
100
- "notifications",
101
- sa.Column("destination_uuid", sa.Text(), nullable=False),
102
- sa.Column("topic", sa.Text(), nullable=True),
103
- sa.Column("message", sa.Text(), nullable=False),
104
- sa.Column(
105
- "created_at_epoch_ms",
106
- sa.BigInteger(),
107
- server_default=sa.text(
108
- "(EXTRACT(epoch FROM now()) * 1000::numeric)::bigint"
109
- ),
110
- nullable=False,
111
- ),
112
- sa.Column(
113
- "message_uuid",
114
- sa.Text(),
115
- server_default=sa.text("uuid_generate_v4()"),
116
- nullable=False,
117
- ),
118
- sa.ForeignKeyConstraint(
119
- ["destination_uuid"],
120
- ["dbos.workflow_status.workflow_uuid"],
121
- onupdate="CASCADE",
122
- ondelete="CASCADE",
123
- ),
124
- schema="dbos",
125
- )
126
- op.create_index(
127
- "idx_workflow_topic",
128
- "notifications",
129
- ["destination_uuid", "topic"],
130
- unique=False,
131
- schema="dbos",
132
- )
133
- op.create_table(
134
- "operation_outputs",
135
- sa.Column("workflow_uuid", sa.Text(), nullable=False),
136
- sa.Column("function_id", sa.Integer(), nullable=False),
137
- sa.Column("output", sa.Text(), nullable=True),
138
- sa.Column("error", sa.Text(), nullable=True),
139
- sa.ForeignKeyConstraint(
140
- ["workflow_uuid"],
141
- ["dbos.workflow_status.workflow_uuid"],
142
- onupdate="CASCADE",
143
- ondelete="CASCADE",
144
- ),
145
- sa.PrimaryKeyConstraint("workflow_uuid", "function_id"),
146
- schema="dbos",
147
- )
148
- op.create_table(
149
- "workflow_events",
150
- sa.Column("workflow_uuid", sa.Text(), nullable=False),
151
- sa.Column("key", sa.Text(), nullable=False),
152
- sa.Column("value", sa.Text(), nullable=False),
153
- sa.ForeignKeyConstraint(
154
- ["workflow_uuid"],
155
- ["dbos.workflow_status.workflow_uuid"],
156
- onupdate="CASCADE",
157
- ondelete="CASCADE",
158
- ),
159
- sa.PrimaryKeyConstraint("workflow_uuid", "key"),
160
- schema="dbos",
161
- )
162
- op.create_table(
163
- "workflow_inputs",
164
- sa.Column("workflow_uuid", sa.Text(), nullable=False),
165
- sa.Column("inputs", sa.Text(), nullable=False),
166
- sa.ForeignKeyConstraint(
167
- ["workflow_uuid"],
168
- ["dbos.workflow_status.workflow_uuid"],
169
- onupdate="CASCADE",
170
- ondelete="CASCADE",
171
- ),
172
- sa.PrimaryKeyConstraint("workflow_uuid"),
173
- schema="dbos",
174
- )
175
- # ### end Alembic commands ###
176
-
177
-
178
- def downgrade() -> None:
179
- # ### commands auto generated by Alembic - please adjust! ###
180
- op.drop_table("workflow_inputs", schema="dbos")
181
- op.drop_table("workflow_events", schema="dbos")
182
- op.drop_table("operation_outputs", schema="dbos")
183
- op.drop_index("idx_workflow_topic", table_name="notifications", schema="dbos")
184
- op.drop_table("notifications", schema="dbos")
185
- op.drop_index(
186
- "workflow_status_executor_id_index", table_name="workflow_status", schema="dbos"
187
- )
188
- op.drop_index(
189
- "workflow_status_created_at_index", table_name="workflow_status", schema="dbos"
190
- )
191
- op.drop_table("workflow_status", schema="dbos")
192
- op.drop_table("scheduler_state", schema="dbos")
193
- # ### end Alembic commands ###
@@ -1,71 +0,0 @@
1
- """consolidate_queues
2
-
3
- Revision ID: 66478e1b95e5
4
- Revises: 933e86bdac6a
5
- Create Date: 2025-05-21 10:14:25.674613
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "66478e1b95e5"
16
- down_revision: Union[str, None] = "933e86bdac6a"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # Add new columns to workflow_status table
23
- op.add_column(
24
- "workflow_status",
25
- sa.Column("started_at_epoch_ms", sa.BigInteger(), nullable=True),
26
- schema="dbos",
27
- )
28
-
29
- op.add_column(
30
- "workflow_status",
31
- sa.Column("deduplication_id", sa.Text(), nullable=True),
32
- schema="dbos",
33
- )
34
-
35
- op.add_column(
36
- "workflow_status",
37
- sa.Column(
38
- "priority", sa.Integer(), nullable=False, server_default=sa.text("'0'::int")
39
- ),
40
- schema="dbos",
41
- )
42
-
43
- # Add unique constraint for deduplication_id
44
- op.create_unique_constraint(
45
- "uq_workflow_status_queue_name_dedup_id",
46
- "workflow_status",
47
- ["queue_name", "deduplication_id"],
48
- schema="dbos",
49
- )
50
-
51
- # Add index on status field
52
- op.create_index(
53
- "workflow_status_status_index", "workflow_status", ["status"], schema="dbos"
54
- )
55
-
56
-
57
- def downgrade() -> None:
58
- # Drop indexes
59
- op.drop_index(
60
- "workflow_status_status_index", table_name="workflow_status", schema="dbos"
61
- )
62
-
63
- # Drop unique constraint
64
- op.drop_constraint(
65
- "uq_workflow_status_queue_name_dedup_id", "workflow_status", schema="dbos"
66
- )
67
-
68
- # Drop columns
69
- op.drop_column("workflow_status", "priority", schema="dbos")
70
- op.drop_column("workflow_status", "deduplication_id", schema="dbos")
71
- op.drop_column("workflow_status", "started_at_epoch_ms", schema="dbos")
@@ -1,44 +0,0 @@
1
- """workflow_timeout
2
-
3
- Revision ID: 83f3732ae8e7
4
- Revises: f4b9b32ba814
5
- Create Date: 2025-04-16 17:05:36.642395
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "83f3732ae8e7"
16
- down_revision: Union[str, None] = "f4b9b32ba814"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.add_column(
23
- "workflow_status",
24
- sa.Column(
25
- "workflow_timeout_ms",
26
- sa.BigInteger(),
27
- nullable=True,
28
- ),
29
- schema="dbos",
30
- )
31
- op.add_column(
32
- "workflow_status",
33
- sa.Column(
34
- "workflow_deadline_epoch_ms",
35
- sa.BigInteger(),
36
- nullable=True,
37
- ),
38
- schema="dbos",
39
- )
40
-
41
-
42
- def downgrade() -> None:
43
- op.drop_column("workflow_status", "workflow_deadline_epoch_ms", schema="dbos")
44
- op.drop_column("workflow_status", "workflow_timeout_ms", schema="dbos")
@@ -1,35 +0,0 @@
1
- """add queue priority
2
-
3
- Revision ID: 933e86bdac6a
4
- Revises: 27ac6900c6ad
5
- Create Date: 2025-04-25 18:17:40.462737
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "933e86bdac6a"
16
- down_revision: Union[str, None] = "27ac6900c6ad"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.add_column(
23
- "workflow_queue",
24
- sa.Column(
25
- "priority",
26
- sa.Integer(),
27
- nullable=False,
28
- server_default=sa.text("'0'::int"),
29
- ),
30
- schema="dbos",
31
- )
32
-
33
-
34
- def downgrade() -> None:
35
- op.drop_column("workflow_queue", "priority", schema="dbos")