dbos 1.12.0a2__py3-none-any.whl → 1.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +35 -0
- dbos/_app_db.py +215 -80
- dbos/_client.py +30 -15
- dbos/_context.py +4 -0
- dbos/_core.py +7 -8
- dbos/_dbos.py +28 -18
- dbos/_dbos_config.py +124 -50
- dbos/_fastapi.py +3 -1
- dbos/_logger.py +3 -1
- dbos/_migration.py +322 -0
- dbos/_sys_db.py +122 -200
- dbos/_sys_db_postgres.py +173 -0
- dbos/_sys_db_sqlite.py +182 -0
- dbos/_tracer.py +5 -1
- dbos/_utils.py +10 -1
- dbos/cli/cli.py +238 -100
- dbos/cli/migration.py +2 -2
- dbos/dbos-config.schema.json +4 -0
- {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/METADATA +1 -1
- dbos-1.13.0.dist-info/RECORD +78 -0
- dbos-1.12.0a2.dist-info/RECORD +0 -74
- /dbos/{_migrations → _alembic_migrations}/env.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/script.py.mako +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/01ce9f07bd10_streaming.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/5c361fc04708_added_system_tables.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/66478e1b95e5_consolidate_queues.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/933e86bdac6a_add_queue_priority.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/a3b18ad34abe_added_triggers.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/d76646551a6b_job_queue_limiter.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/d76646551a6c_workflow_queue.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/d994145b47b6_consolidate_inputs.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/eab0cc1d9a14_job_queue.py +0 -0
- /dbos/{_migrations → _alembic_migrations}/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/WHEEL +0 -0
- {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/entry_points.txt +0 -0
- {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/licenses/LICENSE +0 -0
dbos/_dbos_config.py
CHANGED
|
@@ -22,9 +22,10 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
22
22
|
|
|
23
23
|
Attributes:
|
|
24
24
|
name (str): Application name
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
25
|
+
system_database_url (str): Connection string for the DBOS system database. Defaults to sqlite:///{name} if not provided.
|
|
26
|
+
application_database_url (str): Connection string for the DBOS application database, in which DBOS @Transaction functions run. Optional. Should be the same type of database (SQLite or Postgres) as the system database.
|
|
27
|
+
database_url (str): (DEPRECATED) Database connection string
|
|
28
|
+
sys_db_name (str): (DEPRECATED) System database name
|
|
28
29
|
sys_db_pool_size (int): System database pool size
|
|
29
30
|
db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs (See https://docs.sqlalchemy.org/en/20/core/engines.html#sqlalchemy.create_engine)
|
|
30
31
|
log_level (str): Log level
|
|
@@ -33,11 +34,15 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
33
34
|
admin_port (int): Admin port
|
|
34
35
|
run_admin_server (bool): Whether to run the DBOS admin server
|
|
35
36
|
otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
|
|
37
|
+
application_version (str): Application version
|
|
38
|
+
executor_id (str): Executor ID, used to identify the application instance in distributed environments
|
|
39
|
+
disable_otlp (bool): If True, disables OTLP tracing and logging. Defaults to False.
|
|
36
40
|
"""
|
|
37
41
|
|
|
38
42
|
name: str
|
|
39
|
-
database_url: Optional[str]
|
|
40
43
|
system_database_url: Optional[str]
|
|
44
|
+
application_database_url: Optional[str]
|
|
45
|
+
database_url: Optional[str]
|
|
41
46
|
sys_db_name: Optional[str]
|
|
42
47
|
sys_db_pool_size: Optional[int]
|
|
43
48
|
db_engine_kwargs: Optional[Dict[str, Any]]
|
|
@@ -49,6 +54,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
49
54
|
otlp_attributes: Optional[dict[str, str]]
|
|
50
55
|
application_version: Optional[str]
|
|
51
56
|
executor_id: Optional[str]
|
|
57
|
+
disable_otlp: Optional[bool]
|
|
52
58
|
|
|
53
59
|
|
|
54
60
|
class RuntimeConfig(TypedDict, total=False):
|
|
@@ -91,6 +97,7 @@ class TelemetryConfig(TypedDict, total=False):
|
|
|
91
97
|
logs: Optional[LoggerConfig]
|
|
92
98
|
OTLPExporter: Optional[OTLPExporterConfig]
|
|
93
99
|
otlp_attributes: Optional[dict[str, str]]
|
|
100
|
+
disable_otlp: Optional[bool]
|
|
94
101
|
|
|
95
102
|
|
|
96
103
|
class ConfigFile(TypedDict, total=False):
|
|
@@ -102,12 +109,12 @@ class ConfigFile(TypedDict, total=False):
|
|
|
102
109
|
|
|
103
110
|
Attributes:
|
|
104
111
|
name (str): Application name
|
|
105
|
-
runtimeConfig (RuntimeConfig): Configuration for
|
|
112
|
+
runtimeConfig (RuntimeConfig): Configuration for DBOS Cloud
|
|
106
113
|
database (DatabaseConfig): Configure pool sizes, migrate commands
|
|
107
|
-
database_url (str):
|
|
114
|
+
database_url (str): Application database URL
|
|
115
|
+
system_database_url (str): System database URL
|
|
108
116
|
telemetry (TelemetryConfig): Configuration for tracing / logging
|
|
109
|
-
env (Dict[str,str]): Environment
|
|
110
|
-
application (Dict[str, Any]): Application-specific configuration section
|
|
117
|
+
env (Dict[str,str]): Environment variables
|
|
111
118
|
|
|
112
119
|
"""
|
|
113
120
|
|
|
@@ -139,8 +146,12 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
139
146
|
if db_config:
|
|
140
147
|
translated_config["database"] = db_config
|
|
141
148
|
|
|
149
|
+
# Use application_database_url instead of the deprecated database_url if provided
|
|
142
150
|
if "database_url" in config:
|
|
143
151
|
translated_config["database_url"] = config.get("database_url")
|
|
152
|
+
elif "application_database_url" in config:
|
|
153
|
+
translated_config["database_url"] = config.get("application_database_url")
|
|
154
|
+
|
|
144
155
|
if "system_database_url" in config:
|
|
145
156
|
translated_config["system_database_url"] = config.get("system_database_url")
|
|
146
157
|
|
|
@@ -157,6 +168,7 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
157
168
|
telemetry: TelemetryConfig = {
|
|
158
169
|
"OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
|
|
159
170
|
"otlp_attributes": config.get("otlp_attributes", {}),
|
|
171
|
+
"disable_otlp": config.get("disable_otlp", False),
|
|
160
172
|
}
|
|
161
173
|
# For mypy
|
|
162
174
|
assert telemetry["OTLPExporter"] is not None
|
|
@@ -227,7 +239,6 @@ def _substitute_env_vars(content: str, silent: bool = False) -> str:
|
|
|
227
239
|
def load_config(
|
|
228
240
|
config_file_path: str = DBOS_CONFIG_PATH,
|
|
229
241
|
*,
|
|
230
|
-
run_process_config: bool = True,
|
|
231
242
|
silent: bool = False,
|
|
232
243
|
) -> ConfigFile:
|
|
233
244
|
"""
|
|
@@ -277,8 +288,6 @@ def load_config(
|
|
|
277
288
|
]
|
|
278
289
|
|
|
279
290
|
data = cast(ConfigFile, data)
|
|
280
|
-
if run_process_config:
|
|
281
|
-
data = process_config(data=data, silent=silent)
|
|
282
291
|
return data # type: ignore
|
|
283
292
|
|
|
284
293
|
|
|
@@ -290,19 +299,12 @@ def process_config(
|
|
|
290
299
|
"""
|
|
291
300
|
If a database_url is provided, pass it as is in the config.
|
|
292
301
|
|
|
293
|
-
Else,
|
|
302
|
+
Else, default to SQLite.
|
|
294
303
|
|
|
295
304
|
Also build SQL Alchemy "kwargs" base on user input + defaults.
|
|
296
305
|
Specifically, db_engine_kwargs takes precedence over app_db_pool_size
|
|
297
306
|
|
|
298
307
|
In debug mode, apply overrides from DBOS_DBHOST, DBOS_DBPORT, DBOS_DBUSER, and DBOS_DBPASSWORD.
|
|
299
|
-
|
|
300
|
-
Default configuration:
|
|
301
|
-
- Hostname: localhost
|
|
302
|
-
- Port: 5432
|
|
303
|
-
- Username: postgres
|
|
304
|
-
- Password: $PGPASSWORD
|
|
305
|
-
- Database name: transformed application name.
|
|
306
308
|
"""
|
|
307
309
|
|
|
308
310
|
if "name" not in data:
|
|
@@ -334,20 +336,16 @@ def process_config(
|
|
|
334
336
|
|
|
335
337
|
# Ensure database dict exists
|
|
336
338
|
data.setdefault("database", {})
|
|
337
|
-
|
|
338
|
-
# Database URL resolution
|
|
339
339
|
connect_timeout = None
|
|
340
|
-
|
|
340
|
+
|
|
341
|
+
# Process the application database URL, if provided
|
|
342
|
+
if data.get("database_url"):
|
|
341
343
|
# Parse the db string and check required fields
|
|
342
344
|
assert data["database_url"] is not None
|
|
343
345
|
assert is_valid_database_url(data["database_url"])
|
|
344
346
|
|
|
345
347
|
url = make_url(data["database_url"])
|
|
346
348
|
|
|
347
|
-
if not data["database"].get("sys_db_name"):
|
|
348
|
-
assert url.database is not None
|
|
349
|
-
data["database"]["sys_db_name"] = url.database + SystemSchema.sysdb_suffix
|
|
350
|
-
|
|
351
349
|
# Gather connect_timeout from the URL if provided. It should be used in engine kwargs if not provided there (instead of our default)
|
|
352
350
|
connect_timeout_str = url.query.get("connect_timeout")
|
|
353
351
|
if connect_timeout_str is not None:
|
|
@@ -372,25 +370,88 @@ def process_config(
|
|
|
372
370
|
host=os.getenv("DBOS_DBHOST", url.host),
|
|
373
371
|
port=port,
|
|
374
372
|
).render_as_string(hide_password=False)
|
|
375
|
-
|
|
373
|
+
|
|
374
|
+
# Process the system database URL, if provided
|
|
375
|
+
if data.get("system_database_url"):
|
|
376
|
+
# Parse the db string and check required fields
|
|
377
|
+
assert data["system_database_url"]
|
|
378
|
+
assert is_valid_database_url(data["system_database_url"])
|
|
379
|
+
|
|
380
|
+
url = make_url(data["system_database_url"])
|
|
381
|
+
|
|
382
|
+
# Gather connect_timeout from the URL if provided. It should be used in engine kwargs if not provided there (instead of our default). This overrides a timeout from the application database, if any.
|
|
383
|
+
connect_timeout_str = url.query.get("connect_timeout")
|
|
384
|
+
if connect_timeout_str is not None:
|
|
385
|
+
assert isinstance(
|
|
386
|
+
connect_timeout_str, str
|
|
387
|
+
), "connect_timeout must be a string and defined once in the URL"
|
|
388
|
+
if connect_timeout_str.isdigit():
|
|
389
|
+
connect_timeout = int(connect_timeout_str)
|
|
390
|
+
|
|
391
|
+
# In debug mode perform env vars overrides
|
|
392
|
+
if isDebugMode:
|
|
393
|
+
# Override the username, password, host, and port
|
|
394
|
+
port_str = os.getenv("DBOS_DBPORT")
|
|
395
|
+
port = (
|
|
396
|
+
int(port_str)
|
|
397
|
+
if port_str is not None and port_str.isdigit()
|
|
398
|
+
else url.port
|
|
399
|
+
)
|
|
400
|
+
data["system_database_url"] = url.set(
|
|
401
|
+
username=os.getenv("DBOS_DBUSER", url.username),
|
|
402
|
+
password=os.getenv("DBOS_DBPASSWORD", url.password),
|
|
403
|
+
host=os.getenv("DBOS_DBHOST", url.host),
|
|
404
|
+
port=port,
|
|
405
|
+
).render_as_string(hide_password=False)
|
|
406
|
+
|
|
407
|
+
# If an application database URL is provided but not the system database URL,
|
|
408
|
+
# construct the system database URL.
|
|
409
|
+
if data.get("database_url") and not data.get("system_database_url"):
|
|
410
|
+
assert data["database_url"]
|
|
411
|
+
if data["database_url"].startswith("sqlite"):
|
|
412
|
+
data["system_database_url"] = data["database_url"]
|
|
413
|
+
else:
|
|
414
|
+
url = make_url(data["database_url"])
|
|
415
|
+
assert url.database
|
|
416
|
+
if data["database"].get("sys_db_name"):
|
|
417
|
+
url = url.set(database=data["database"]["sys_db_name"])
|
|
418
|
+
else:
|
|
419
|
+
url = url.set(database=f"{url.database}{SystemSchema.sysdb_suffix}")
|
|
420
|
+
data["system_database_url"] = url.render_as_string(hide_password=False)
|
|
421
|
+
|
|
422
|
+
# If a system database URL is provided but not an application database URL, set the
|
|
423
|
+
# application database URL to the system database URL.
|
|
424
|
+
if data.get("system_database_url") and not data.get("database_url"):
|
|
425
|
+
assert data["system_database_url"]
|
|
426
|
+
data["database_url"] = data["system_database_url"]
|
|
427
|
+
|
|
428
|
+
# If neither URL is provided, use a default SQLite database URL.
|
|
429
|
+
if not data.get("database_url") and not data.get("system_database_url"):
|
|
376
430
|
_app_db_name = _app_name_to_db_name(data["name"])
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
f"postgres://postgres:{_password}@localhost:5432/{_app_db_name}?connect_timeout=10&sslmode=prefer"
|
|
431
|
+
data["system_database_url"] = data["database_url"] = (
|
|
432
|
+
f"sqlite:///{_app_db_name}.sqlite"
|
|
380
433
|
)
|
|
381
|
-
if not data["database"].get("sys_db_name"):
|
|
382
|
-
data["database"]["sys_db_name"] = _app_db_name + SystemSchema.sysdb_suffix
|
|
383
|
-
assert data["database_url"] is not None
|
|
384
434
|
|
|
385
435
|
configure_db_engine_parameters(data["database"], connect_timeout=connect_timeout)
|
|
386
436
|
|
|
387
|
-
|
|
437
|
+
assert data["database_url"] is not None
|
|
438
|
+
assert data["system_database_url"] is not None
|
|
439
|
+
# Pretty-print connection information, respecting log level
|
|
388
440
|
if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
|
|
389
|
-
|
|
390
|
-
|
|
441
|
+
printable_sys_db_url = make_url(data["system_database_url"]).render_as_string(
|
|
442
|
+
hide_password=True
|
|
443
|
+
)
|
|
391
444
|
print(
|
|
392
|
-
f"[bold blue]
|
|
445
|
+
f"[bold blue]DBOS system database URL: {printable_sys_db_url}[/bold blue]"
|
|
393
446
|
)
|
|
447
|
+
if data["database_url"].startswith("sqlite"):
|
|
448
|
+
print(
|
|
449
|
+
f"[bold blue]Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use.[/bold blue]"
|
|
450
|
+
)
|
|
451
|
+
else:
|
|
452
|
+
print(
|
|
453
|
+
f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
|
|
454
|
+
)
|
|
394
455
|
|
|
395
456
|
# Return data as ConfigFile type
|
|
396
457
|
return data
|
|
@@ -439,6 +500,8 @@ def configure_db_engine_parameters(
|
|
|
439
500
|
|
|
440
501
|
|
|
441
502
|
def is_valid_database_url(database_url: str) -> bool:
|
|
503
|
+
if database_url.startswith("sqlite"):
|
|
504
|
+
return True
|
|
442
505
|
url = make_url(database_url)
|
|
443
506
|
required_fields = [
|
|
444
507
|
("username", "Username must be specified in the connection URL"),
|
|
@@ -467,36 +530,34 @@ def _app_name_to_db_name(app_name: str) -> str:
|
|
|
467
530
|
|
|
468
531
|
def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
|
|
469
532
|
# Load the DBOS configuration file and force the use of:
|
|
470
|
-
# 1. The database url provided by DBOS_DATABASE_URL
|
|
533
|
+
# 1. The application and system database url provided by DBOS_DATABASE_URL and DBOS_SYSTEM_DATABASE_URL
|
|
471
534
|
# 2. OTLP traces endpoints (add the config data to the provided config)
|
|
472
535
|
# 3. Use the application name from the file. This is a defensive measure to ensure the application name is whatever it was registered with in the cloud
|
|
473
536
|
# 4. Remove admin_port is provided in code
|
|
474
537
|
# 5. Remove env vars if provided in code
|
|
475
538
|
# Optimistically assume that expected fields in config_from_file are present
|
|
476
539
|
|
|
477
|
-
config_from_file = load_config(
|
|
540
|
+
config_from_file = load_config()
|
|
478
541
|
# Be defensive
|
|
479
542
|
if config_from_file is None:
|
|
480
543
|
return provided_config
|
|
481
544
|
|
|
482
|
-
#
|
|
545
|
+
# Set the application name to the cloud app name
|
|
483
546
|
provided_config["name"] = config_from_file["name"]
|
|
484
547
|
|
|
485
|
-
#
|
|
486
|
-
if "database" not in provided_config:
|
|
487
|
-
provided_config["database"] = {}
|
|
488
|
-
provided_config["database"]["sys_db_name"] = config_from_file["database"][
|
|
489
|
-
"sys_db_name"
|
|
490
|
-
]
|
|
491
|
-
|
|
548
|
+
# Use the DBOS Cloud application and system database URLs
|
|
492
549
|
db_url = os.environ.get("DBOS_DATABASE_URL")
|
|
493
550
|
if db_url is None:
|
|
494
551
|
raise DBOSInitializationError(
|
|
495
552
|
"DBOS_DATABASE_URL environment variable is not set. This is required to connect to the database."
|
|
496
553
|
)
|
|
497
554
|
provided_config["database_url"] = db_url
|
|
498
|
-
|
|
499
|
-
|
|
555
|
+
system_db_url = os.environ.get("DBOS_SYSTEM_DATABASE_URL")
|
|
556
|
+
if system_db_url is None:
|
|
557
|
+
raise DBOSInitializationError(
|
|
558
|
+
"DBOS_SYSTEM_DATABASE_URL environment variable is not set. This is required to connect to the database."
|
|
559
|
+
)
|
|
560
|
+
provided_config["system_database_url"] = system_db_url
|
|
500
561
|
|
|
501
562
|
# Telemetry config
|
|
502
563
|
if "telemetry" not in provided_config or provided_config["telemetry"] is None:
|
|
@@ -553,8 +614,10 @@ def get_system_database_url(config: ConfigFile) -> str:
|
|
|
553
614
|
return config["system_database_url"]
|
|
554
615
|
else:
|
|
555
616
|
assert config["database_url"] is not None
|
|
617
|
+
if config["database_url"].startswith("sqlite"):
|
|
618
|
+
return config["database_url"]
|
|
556
619
|
app_db_url = make_url(config["database_url"])
|
|
557
|
-
if config["database"].get("sys_db_name") is not None:
|
|
620
|
+
if config.get("database") and config["database"].get("sys_db_name") is not None:
|
|
558
621
|
sys_db_name = config["database"]["sys_db_name"]
|
|
559
622
|
else:
|
|
560
623
|
assert app_db_url.database is not None
|
|
@@ -562,3 +625,14 @@ def get_system_database_url(config: ConfigFile) -> str:
|
|
|
562
625
|
return app_db_url.set(database=sys_db_name).render_as_string(
|
|
563
626
|
hide_password=False
|
|
564
627
|
)
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
def get_application_database_url(config: ConfigFile) -> str:
|
|
631
|
+
# For backwards compatibility, the application database URL is "database_url"
|
|
632
|
+
if config.get("database_url"):
|
|
633
|
+
assert config["database_url"]
|
|
634
|
+
return config["database_url"]
|
|
635
|
+
else:
|
|
636
|
+
# If the application database URL is not specified, set it to the system database URL
|
|
637
|
+
assert config["system_database_url"]
|
|
638
|
+
return config["system_database_url"]
|
dbos/_fastapi.py
CHANGED
|
@@ -49,7 +49,7 @@ class LifespanMiddleware:
|
|
|
49
49
|
if not self.dbos._launched:
|
|
50
50
|
self.dbos._launch()
|
|
51
51
|
elif message["type"] == "lifespan.shutdown.complete":
|
|
52
|
-
self.dbos.
|
|
52
|
+
self.dbos.destroy()
|
|
53
53
|
await send(message)
|
|
54
54
|
|
|
55
55
|
# Call the original app with our wrapped functions
|
|
@@ -83,4 +83,6 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
|
|
|
83
83
|
response = await call_next(request)
|
|
84
84
|
else:
|
|
85
85
|
response = await call_next(request)
|
|
86
|
+
if hasattr(response, "status_code"):
|
|
87
|
+
DBOS.span.set_attribute("responseCode", response.status_code)
|
|
86
88
|
return response
|
dbos/_logger.py
CHANGED
|
@@ -77,7 +77,9 @@ def config_logger(config: "ConfigFile") -> None:
|
|
|
77
77
|
otlp_logs_endpoints = (
|
|
78
78
|
config.get("telemetry", {}).get("OTLPExporter", {}).get("logsEndpoint") # type: ignore
|
|
79
79
|
)
|
|
80
|
-
|
|
80
|
+
disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
|
|
81
|
+
|
|
82
|
+
if not disable_otlp and otlp_logs_endpoints:
|
|
81
83
|
log_provider = PatchedOTLPLoggerProvider(
|
|
82
84
|
Resource.create(
|
|
83
85
|
attributes={
|
dbos/_migration.py
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
import sqlalchemy as sa
|
|
7
|
+
from alembic import command
|
|
8
|
+
from alembic.config import Config
|
|
9
|
+
|
|
10
|
+
from ._logger import dbos_logger
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def ensure_dbos_schema(engine: sa.Engine) -> bool:
|
|
14
|
+
"""
|
|
15
|
+
True if using DBOS migrations (DBOS schema and migrations table already exist or were created)
|
|
16
|
+
False if using Alembic migrations (DBOS schema exists, but dbos_migrations table doesn't)
|
|
17
|
+
"""
|
|
18
|
+
with engine.begin() as conn:
|
|
19
|
+
# Check if dbos schema exists
|
|
20
|
+
schema_result = conn.execute(
|
|
21
|
+
sa.text(
|
|
22
|
+
"SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'dbos'"
|
|
23
|
+
)
|
|
24
|
+
)
|
|
25
|
+
schema_existed = schema_result.fetchone() is not None
|
|
26
|
+
|
|
27
|
+
# Create schema if it doesn't exist
|
|
28
|
+
if not schema_existed:
|
|
29
|
+
conn.execute(sa.text("CREATE SCHEMA dbos"))
|
|
30
|
+
|
|
31
|
+
# Check if dbos_migrations table exists
|
|
32
|
+
table_result = conn.execute(
|
|
33
|
+
sa.text(
|
|
34
|
+
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'dbos' AND table_name = 'dbos_migrations'"
|
|
35
|
+
)
|
|
36
|
+
)
|
|
37
|
+
table_exists = table_result.fetchone() is not None
|
|
38
|
+
|
|
39
|
+
if table_exists:
|
|
40
|
+
return True
|
|
41
|
+
elif schema_existed:
|
|
42
|
+
return False
|
|
43
|
+
else:
|
|
44
|
+
conn.execute(
|
|
45
|
+
sa.text(
|
|
46
|
+
"CREATE TABLE dbos.dbos_migrations (version BIGINT NOT NULL PRIMARY KEY)"
|
|
47
|
+
)
|
|
48
|
+
)
|
|
49
|
+
return True
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def run_alembic_migrations(engine: sa.Engine) -> None:
|
|
53
|
+
"""Run system database schema migrations with Alembic.
|
|
54
|
+
This is DEPRECATED in favor of DBOS-managed migrations.
|
|
55
|
+
It is retained only for backwards compatibility and
|
|
56
|
+
will be removed in the next major version."""
|
|
57
|
+
# Run a schema migration for the system database
|
|
58
|
+
migration_dir = os.path.join(
|
|
59
|
+
os.path.dirname(os.path.realpath(__file__)), "_alembic_migrations"
|
|
60
|
+
)
|
|
61
|
+
alembic_cfg = Config()
|
|
62
|
+
alembic_cfg.set_main_option("script_location", migration_dir)
|
|
63
|
+
logging.getLogger("alembic").setLevel(logging.WARNING)
|
|
64
|
+
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
|
65
|
+
escaped_conn_string = re.sub(
|
|
66
|
+
r"%(?=[0-9A-Fa-f]{2})",
|
|
67
|
+
"%%",
|
|
68
|
+
engine.url.render_as_string(hide_password=False),
|
|
69
|
+
)
|
|
70
|
+
alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
|
|
71
|
+
try:
|
|
72
|
+
command.upgrade(alembic_cfg, "head")
|
|
73
|
+
except Exception as e:
|
|
74
|
+
dbos_logger.warning(
|
|
75
|
+
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def run_dbos_migrations(engine: sa.Engine) -> None:
|
|
80
|
+
"""Run DBOS-managed migrations by executing each SQL command in dbos_migrations."""
|
|
81
|
+
with engine.begin() as conn:
|
|
82
|
+
# Get current migration version
|
|
83
|
+
result = conn.execute(sa.text("SELECT version FROM dbos.dbos_migrations"))
|
|
84
|
+
current_version = result.fetchone()
|
|
85
|
+
last_applied = current_version[0] if current_version else 0
|
|
86
|
+
|
|
87
|
+
# Apply migrations starting from the next version
|
|
88
|
+
for i, migration_sql in enumerate(dbos_migrations, 1):
|
|
89
|
+
if i <= last_applied:
|
|
90
|
+
continue
|
|
91
|
+
|
|
92
|
+
# Execute the migration
|
|
93
|
+
dbos_logger.info(f"Applying DBOS system database schema migration {i}")
|
|
94
|
+
conn.execute(sa.text(migration_sql))
|
|
95
|
+
|
|
96
|
+
# Update the single row with the new version
|
|
97
|
+
if last_applied == 0:
|
|
98
|
+
conn.execute(
|
|
99
|
+
sa.text(
|
|
100
|
+
"INSERT INTO dbos.dbos_migrations (version) VALUES (:version)"
|
|
101
|
+
),
|
|
102
|
+
{"version": i},
|
|
103
|
+
)
|
|
104
|
+
else:
|
|
105
|
+
conn.execute(
|
|
106
|
+
sa.text("UPDATE dbos.dbos_migrations SET version = :version"),
|
|
107
|
+
{"version": i},
|
|
108
|
+
)
|
|
109
|
+
last_applied = i
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
dbos_migration_one = """
|
|
113
|
+
-- Enable uuid extension for generating UUIDs
|
|
114
|
+
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
|
115
|
+
|
|
116
|
+
CREATE TABLE dbos.workflow_status (
|
|
117
|
+
workflow_uuid TEXT PRIMARY KEY,
|
|
118
|
+
status TEXT,
|
|
119
|
+
name TEXT,
|
|
120
|
+
authenticated_user TEXT,
|
|
121
|
+
assumed_role TEXT,
|
|
122
|
+
authenticated_roles TEXT,
|
|
123
|
+
request TEXT,
|
|
124
|
+
output TEXT,
|
|
125
|
+
error TEXT,
|
|
126
|
+
executor_id TEXT,
|
|
127
|
+
created_at BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
|
|
128
|
+
updated_at BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
|
|
129
|
+
application_version TEXT,
|
|
130
|
+
application_id TEXT,
|
|
131
|
+
class_name VARCHAR(255) DEFAULT NULL,
|
|
132
|
+
config_name VARCHAR(255) DEFAULT NULL,
|
|
133
|
+
recovery_attempts BIGINT DEFAULT 0,
|
|
134
|
+
queue_name TEXT,
|
|
135
|
+
workflow_timeout_ms BIGINT,
|
|
136
|
+
workflow_deadline_epoch_ms BIGINT,
|
|
137
|
+
inputs TEXT,
|
|
138
|
+
started_at_epoch_ms BIGINT,
|
|
139
|
+
deduplication_id TEXT,
|
|
140
|
+
priority INTEGER NOT NULL DEFAULT 0
|
|
141
|
+
);
|
|
142
|
+
|
|
143
|
+
CREATE INDEX workflow_status_created_at_index ON dbos.workflow_status (created_at);
|
|
144
|
+
CREATE INDEX workflow_status_executor_id_index ON dbos.workflow_status (executor_id);
|
|
145
|
+
CREATE INDEX workflow_status_status_index ON dbos.workflow_status (status);
|
|
146
|
+
|
|
147
|
+
ALTER TABLE dbos.workflow_status
|
|
148
|
+
ADD CONSTRAINT uq_workflow_status_queue_name_dedup_id
|
|
149
|
+
UNIQUE (queue_name, deduplication_id);
|
|
150
|
+
|
|
151
|
+
CREATE TABLE dbos.operation_outputs (
|
|
152
|
+
workflow_uuid TEXT NOT NULL,
|
|
153
|
+
function_id INTEGER NOT NULL,
|
|
154
|
+
function_name TEXT NOT NULL DEFAULT '',
|
|
155
|
+
output TEXT,
|
|
156
|
+
error TEXT,
|
|
157
|
+
child_workflow_id TEXT,
|
|
158
|
+
PRIMARY KEY (workflow_uuid, function_id),
|
|
159
|
+
FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
|
|
160
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
CREATE TABLE dbos.notifications (
|
|
164
|
+
destination_uuid TEXT NOT NULL,
|
|
165
|
+
topic TEXT,
|
|
166
|
+
message TEXT NOT NULL,
|
|
167
|
+
created_at_epoch_ms BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
|
|
168
|
+
message_uuid TEXT NOT NULL DEFAULT gen_random_uuid(), -- Built-in function
|
|
169
|
+
FOREIGN KEY (destination_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
|
|
170
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
171
|
+
);
|
|
172
|
+
CREATE INDEX idx_workflow_topic ON dbos.notifications (destination_uuid, topic);
|
|
173
|
+
|
|
174
|
+
-- Create notification function
|
|
175
|
+
CREATE OR REPLACE FUNCTION dbos.notifications_function() RETURNS TRIGGER AS $$
|
|
176
|
+
DECLARE
|
|
177
|
+
payload text := NEW.destination_uuid || '::' || NEW.topic;
|
|
178
|
+
BEGIN
|
|
179
|
+
PERFORM pg_notify('dbos_notifications_channel', payload);
|
|
180
|
+
RETURN NEW;
|
|
181
|
+
END;
|
|
182
|
+
$$ LANGUAGE plpgsql;
|
|
183
|
+
|
|
184
|
+
-- Create notification trigger
|
|
185
|
+
CREATE TRIGGER dbos_notifications_trigger
|
|
186
|
+
AFTER INSERT ON dbos.notifications
|
|
187
|
+
FOR EACH ROW EXECUTE FUNCTION dbos.notifications_function();
|
|
188
|
+
|
|
189
|
+
CREATE TABLE dbos.workflow_events (
|
|
190
|
+
workflow_uuid TEXT NOT NULL,
|
|
191
|
+
key TEXT NOT NULL,
|
|
192
|
+
value TEXT NOT NULL,
|
|
193
|
+
PRIMARY KEY (workflow_uuid, key),
|
|
194
|
+
FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
|
|
195
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
196
|
+
);
|
|
197
|
+
|
|
198
|
+
-- Create events function
|
|
199
|
+
CREATE OR REPLACE FUNCTION dbos.workflow_events_function() RETURNS TRIGGER AS $$
|
|
200
|
+
DECLARE
|
|
201
|
+
payload text := NEW.workflow_uuid || '::' || NEW.key;
|
|
202
|
+
BEGIN
|
|
203
|
+
PERFORM pg_notify('dbos_workflow_events_channel', payload);
|
|
204
|
+
RETURN NEW;
|
|
205
|
+
END;
|
|
206
|
+
$$ LANGUAGE plpgsql;
|
|
207
|
+
|
|
208
|
+
-- Create events trigger
|
|
209
|
+
CREATE TRIGGER dbos_workflow_events_trigger
|
|
210
|
+
AFTER INSERT ON dbos.workflow_events
|
|
211
|
+
FOR EACH ROW EXECUTE FUNCTION dbos.workflow_events_function();
|
|
212
|
+
|
|
213
|
+
CREATE TABLE dbos.streams (
|
|
214
|
+
workflow_uuid TEXT NOT NULL,
|
|
215
|
+
key TEXT NOT NULL,
|
|
216
|
+
value TEXT NOT NULL,
|
|
217
|
+
"offset" INTEGER NOT NULL,
|
|
218
|
+
PRIMARY KEY (workflow_uuid, key, "offset"),
|
|
219
|
+
FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
|
|
220
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
221
|
+
);
|
|
222
|
+
|
|
223
|
+
CREATE TABLE dbos.event_dispatch_kv (
|
|
224
|
+
service_name TEXT NOT NULL,
|
|
225
|
+
workflow_fn_name TEXT NOT NULL,
|
|
226
|
+
key TEXT NOT NULL,
|
|
227
|
+
value TEXT,
|
|
228
|
+
update_seq NUMERIC(38,0),
|
|
229
|
+
update_time NUMERIC(38,15),
|
|
230
|
+
PRIMARY KEY (service_name, workflow_fn_name, key)
|
|
231
|
+
);
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def get_sqlite_timestamp_expr() -> str:
|
|
236
|
+
"""Get SQLite timestamp expression with millisecond precision for Python >= 3.12."""
|
|
237
|
+
if sys.version_info >= (3, 12):
|
|
238
|
+
return "(unixepoch('subsec') * 1000)"
|
|
239
|
+
else:
|
|
240
|
+
return "(strftime('%s','now') * 1000)"
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
sqlite_migration_one = f"""
|
|
244
|
+
CREATE TABLE workflow_status (
|
|
245
|
+
workflow_uuid TEXT PRIMARY KEY,
|
|
246
|
+
status TEXT,
|
|
247
|
+
name TEXT,
|
|
248
|
+
authenticated_user TEXT,
|
|
249
|
+
assumed_role TEXT,
|
|
250
|
+
authenticated_roles TEXT,
|
|
251
|
+
request TEXT,
|
|
252
|
+
output TEXT,
|
|
253
|
+
error TEXT,
|
|
254
|
+
executor_id TEXT,
|
|
255
|
+
created_at INTEGER NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
|
|
256
|
+
updated_at INTEGER NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
|
|
257
|
+
application_version TEXT,
|
|
258
|
+
application_id TEXT,
|
|
259
|
+
class_name TEXT DEFAULT NULL,
|
|
260
|
+
config_name TEXT DEFAULT NULL,
|
|
261
|
+
recovery_attempts INTEGER DEFAULT 0,
|
|
262
|
+
queue_name TEXT,
|
|
263
|
+
workflow_timeout_ms INTEGER,
|
|
264
|
+
workflow_deadline_epoch_ms INTEGER,
|
|
265
|
+
inputs TEXT,
|
|
266
|
+
started_at_epoch_ms INTEGER,
|
|
267
|
+
deduplication_id TEXT,
|
|
268
|
+
priority INTEGER NOT NULL DEFAULT 0
|
|
269
|
+
);
|
|
270
|
+
|
|
271
|
+
CREATE INDEX workflow_status_created_at_index ON workflow_status (created_at);
|
|
272
|
+
CREATE INDEX workflow_status_executor_id_index ON workflow_status (executor_id);
|
|
273
|
+
CREATE INDEX workflow_status_status_index ON workflow_status (status);
|
|
274
|
+
|
|
275
|
+
CREATE UNIQUE INDEX uq_workflow_status_queue_name_dedup_id
|
|
276
|
+
ON workflow_status (queue_name, deduplication_id);
|
|
277
|
+
|
|
278
|
+
CREATE TABLE operation_outputs (
|
|
279
|
+
workflow_uuid TEXT NOT NULL,
|
|
280
|
+
function_id INTEGER NOT NULL,
|
|
281
|
+
function_name TEXT NOT NULL DEFAULT '',
|
|
282
|
+
output TEXT,
|
|
283
|
+
error TEXT,
|
|
284
|
+
child_workflow_id TEXT,
|
|
285
|
+
PRIMARY KEY (workflow_uuid, function_id),
|
|
286
|
+
FOREIGN KEY (workflow_uuid) REFERENCES workflow_status(workflow_uuid)
|
|
287
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
288
|
+
);
|
|
289
|
+
|
|
290
|
+
CREATE TABLE notifications (
|
|
291
|
+
destination_uuid TEXT NOT NULL,
|
|
292
|
+
topic TEXT,
|
|
293
|
+
message TEXT NOT NULL,
|
|
294
|
+
created_at_epoch_ms INTEGER NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
|
|
295
|
+
message_uuid TEXT NOT NULL DEFAULT (hex(randomblob(16))),
|
|
296
|
+
FOREIGN KEY (destination_uuid) REFERENCES workflow_status(workflow_uuid)
|
|
297
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
298
|
+
);
|
|
299
|
+
CREATE INDEX idx_workflow_topic ON notifications (destination_uuid, topic);
|
|
300
|
+
|
|
301
|
+
CREATE TABLE workflow_events (
|
|
302
|
+
workflow_uuid TEXT NOT NULL,
|
|
303
|
+
key TEXT NOT NULL,
|
|
304
|
+
value TEXT NOT NULL,
|
|
305
|
+
PRIMARY KEY (workflow_uuid, key),
|
|
306
|
+
FOREIGN KEY (workflow_uuid) REFERENCES workflow_status(workflow_uuid)
|
|
307
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
308
|
+
);
|
|
309
|
+
|
|
310
|
+
CREATE TABLE streams (
|
|
311
|
+
workflow_uuid TEXT NOT NULL,
|
|
312
|
+
key TEXT NOT NULL,
|
|
313
|
+
value TEXT NOT NULL,
|
|
314
|
+
"offset" INTEGER NOT NULL,
|
|
315
|
+
PRIMARY KEY (workflow_uuid, key, "offset"),
|
|
316
|
+
FOREIGN KEY (workflow_uuid) REFERENCES workflow_status(workflow_uuid)
|
|
317
|
+
ON UPDATE CASCADE ON DELETE CASCADE
|
|
318
|
+
);
|
|
319
|
+
"""
|
|
320
|
+
|
|
321
|
+
dbos_migrations = [dbos_migration_one]
|
|
322
|
+
sqlite_migrations = [sqlite_migration_one]
|