dbos 1.0.0__py3-none-any.whl → 1.1.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_app_db.py +48 -41
- dbos/_client.py +1 -0
- dbos/_dbos.py +4 -0
- dbos/_sys_db.py +111 -54
- dbos/cli/cli.py +71 -47
- {dbos-1.0.0.dist-info → dbos-1.1.0a3.dist-info}/METADATA +1 -1
- {dbos-1.0.0.dist-info → dbos-1.1.0a3.dist-info}/RECORD +10 -10
- {dbos-1.0.0.dist-info → dbos-1.1.0a3.dist-info}/WHEEL +0 -0
- {dbos-1.0.0.dist-info → dbos-1.1.0a3.dist-info}/entry_points.txt +0 -0
- {dbos-1.0.0.dist-info → dbos-1.1.0a3.dist-info}/licenses/LICENSE +0 -0
dbos/_app_db.py
CHANGED
@@ -8,6 +8,7 @@ from sqlalchemy.orm import Session, sessionmaker
|
|
8
8
|
|
9
9
|
from . import _serialization
|
10
10
|
from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
|
11
|
+
from ._logger import dbos_logger
|
11
12
|
from ._schemas.application_database import ApplicationSchema
|
12
13
|
from ._sys_db import StepInfo
|
13
14
|
|
@@ -39,21 +40,6 @@ class ApplicationDatabase:
|
|
39
40
|
):
|
40
41
|
app_db_url = sa.make_url(database_url).set(drivername="postgresql+psycopg")
|
41
42
|
|
42
|
-
# If the application database does not already exist, create it
|
43
|
-
if not debug_mode:
|
44
|
-
postgres_db_engine = sa.create_engine(
|
45
|
-
app_db_url.set(database="postgres"),
|
46
|
-
**engine_kwargs,
|
47
|
-
)
|
48
|
-
with postgres_db_engine.connect() as conn:
|
49
|
-
conn.execution_options(isolation_level="AUTOCOMMIT")
|
50
|
-
if not conn.execute(
|
51
|
-
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
52
|
-
parameters={"db_name": app_db_url.database},
|
53
|
-
).scalar():
|
54
|
-
conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
|
55
|
-
postgres_db_engine.dispose()
|
56
|
-
|
57
43
|
if engine_kwargs is None:
|
58
44
|
engine_kwargs = {}
|
59
45
|
|
@@ -61,40 +47,61 @@ class ApplicationDatabase:
|
|
61
47
|
app_db_url,
|
62
48
|
**engine_kwargs,
|
63
49
|
)
|
50
|
+
self._engine_kwargs = engine_kwargs
|
64
51
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
65
52
|
self.debug_mode = debug_mode
|
66
53
|
|
54
|
+
def run_migrations(self) -> None:
|
55
|
+
if self.debug_mode:
|
56
|
+
dbos_logger.warning(
|
57
|
+
"Application database migrations are skipped in debug mode."
|
58
|
+
)
|
59
|
+
return
|
60
|
+
# Check if the database exists
|
61
|
+
app_db_url = self.engine.url
|
62
|
+
postgres_db_engine = sa.create_engine(
|
63
|
+
app_db_url.set(database="postgres"),
|
64
|
+
**self._engine_kwargs,
|
65
|
+
)
|
66
|
+
with postgres_db_engine.connect() as conn:
|
67
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
68
|
+
if not conn.execute(
|
69
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
70
|
+
parameters={"db_name": app_db_url.database},
|
71
|
+
).scalar():
|
72
|
+
conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
|
73
|
+
postgres_db_engine.dispose()
|
74
|
+
|
67
75
|
# Create the dbos schema and transaction_outputs table in the application database
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
conn.execute(schema_creation_query)
|
76
|
+
with self.engine.begin() as conn:
|
77
|
+
schema_creation_query = sa.text(
|
78
|
+
f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
|
79
|
+
)
|
80
|
+
conn.execute(schema_creation_query)
|
74
81
|
|
75
|
-
|
76
|
-
|
82
|
+
inspector = inspect(self.engine)
|
83
|
+
if not inspector.has_table(
|
84
|
+
"transaction_outputs", schema=ApplicationSchema.schema
|
85
|
+
):
|
86
|
+
ApplicationSchema.metadata_obj.create_all(self.engine)
|
87
|
+
else:
|
88
|
+
columns = inspector.get_columns(
|
77
89
|
"transaction_outputs", schema=ApplicationSchema.schema
|
78
|
-
)
|
79
|
-
|
80
|
-
else:
|
81
|
-
columns = inspector.get_columns(
|
82
|
-
"transaction_outputs", schema=ApplicationSchema.schema
|
83
|
-
)
|
84
|
-
column_names = [col["name"] for col in columns]
|
90
|
+
)
|
91
|
+
column_names = [col["name"] for col in columns]
|
85
92
|
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
)
|
93
|
+
if "function_name" not in column_names:
|
94
|
+
# Column missing, alter table to add it
|
95
|
+
with self.engine.connect() as conn:
|
96
|
+
conn.execute(
|
97
|
+
text(
|
98
|
+
f"""
|
99
|
+
ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
|
100
|
+
ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
|
101
|
+
"""
|
96
102
|
)
|
97
|
-
|
103
|
+
)
|
104
|
+
conn.commit()
|
98
105
|
|
99
106
|
def destroy(self) -> None:
|
100
107
|
self.engine.dispose()
|
dbos/_client.py
CHANGED
@@ -99,6 +99,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
99
99
|
|
100
100
|
class DBOSClient:
|
101
101
|
def __init__(self, database_url: str, *, system_database: Optional[str] = None):
|
102
|
+
# We only create database connections but do not run migrations
|
102
103
|
self._sys_db = SystemDatabase(
|
103
104
|
database_url=database_url,
|
104
105
|
engine_kwargs={
|
dbos/_dbos.py
CHANGED
@@ -433,6 +433,10 @@ class DBOS:
|
|
433
433
|
if debug_mode:
|
434
434
|
return
|
435
435
|
|
436
|
+
# Run migrations for the system and application databases
|
437
|
+
self._sys_db.run_migrations()
|
438
|
+
self._app_db.run_migrations()
|
439
|
+
|
436
440
|
admin_port = self._config.get("runtimeConfig", {}).get("admin_port")
|
437
441
|
if admin_port is None:
|
438
442
|
admin_port = 3001
|
dbos/_sys_db.py
CHANGED
@@ -222,6 +222,47 @@ class StepInfo(TypedDict):
|
|
222
222
|
_dbos_null_topic = "__null__topic__"
|
223
223
|
|
224
224
|
|
225
|
+
class ConditionCount(TypedDict):
|
226
|
+
condition: threading.Condition
|
227
|
+
count: int
|
228
|
+
|
229
|
+
|
230
|
+
class ThreadSafeConditionDict:
|
231
|
+
def __init__(self) -> None:
|
232
|
+
self._dict: Dict[str, ConditionCount] = {}
|
233
|
+
self._lock = threading.Lock()
|
234
|
+
|
235
|
+
def get(self, key: str) -> Optional[threading.Condition]:
|
236
|
+
with self._lock:
|
237
|
+
if key not in self._dict:
|
238
|
+
# Key does not exist, return None
|
239
|
+
return None
|
240
|
+
return self._dict[key]["condition"]
|
241
|
+
|
242
|
+
def set(
|
243
|
+
self, key: str, value: threading.Condition
|
244
|
+
) -> tuple[bool, threading.Condition]:
|
245
|
+
with self._lock:
|
246
|
+
if key in self._dict:
|
247
|
+
# Key already exists, do not overwrite. Increment the wait count.
|
248
|
+
cc = self._dict[key]
|
249
|
+
cc["count"] += 1
|
250
|
+
return False, cc["condition"]
|
251
|
+
self._dict[key] = ConditionCount(condition=value, count=1)
|
252
|
+
return True, value
|
253
|
+
|
254
|
+
def pop(self, key: str) -> None:
|
255
|
+
with self._lock:
|
256
|
+
if key in self._dict:
|
257
|
+
cc = self._dict[key]
|
258
|
+
cc["count"] -= 1
|
259
|
+
if cc["count"] == 0:
|
260
|
+
# No more threads waiting on this condition, remove it
|
261
|
+
del self._dict[key]
|
262
|
+
else:
|
263
|
+
dbos_logger.warning(f"Key {key} not found in condition dictionary.")
|
264
|
+
|
265
|
+
|
225
266
|
class SystemDatabase:
|
226
267
|
|
227
268
|
def __init__(
|
@@ -241,34 +282,63 @@ class SystemDatabase:
|
|
241
282
|
sysdb_name = system_db_url.database + SystemSchema.sysdb_suffix
|
242
283
|
system_db_url = system_db_url.set(database=sysdb_name)
|
243
284
|
|
244
|
-
if not debug_mode:
|
245
|
-
# If the system database does not already exist, create it
|
246
|
-
engine = sa.create_engine(
|
247
|
-
system_db_url.set(database="postgres"), **engine_kwargs
|
248
|
-
)
|
249
|
-
with engine.connect() as conn:
|
250
|
-
conn.execution_options(isolation_level="AUTOCOMMIT")
|
251
|
-
if not conn.execute(
|
252
|
-
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
253
|
-
parameters={"db_name": sysdb_name},
|
254
|
-
).scalar():
|
255
|
-
dbos_logger.info(f"Creating system database {sysdb_name}")
|
256
|
-
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
257
|
-
engine.dispose()
|
258
|
-
|
259
285
|
self.engine = sa.create_engine(
|
260
286
|
system_db_url,
|
261
287
|
**engine_kwargs,
|
262
288
|
)
|
289
|
+
self._engine_kwargs = engine_kwargs
|
290
|
+
|
291
|
+
self.notification_conn: Optional[psycopg.connection.Connection] = None
|
292
|
+
self.notifications_map = ThreadSafeConditionDict()
|
293
|
+
self.workflow_events_map = ThreadSafeConditionDict()
|
294
|
+
|
295
|
+
# Now we can run background processes
|
296
|
+
self._run_background_processes = True
|
297
|
+
self._debug_mode = debug_mode
|
298
|
+
|
299
|
+
# Run migrations
|
300
|
+
def run_migrations(self) -> None:
|
301
|
+
if self._debug_mode:
|
302
|
+
dbos_logger.warning("System database migrations are skipped in debug mode.")
|
303
|
+
return
|
304
|
+
system_db_url = self.engine.url
|
305
|
+
sysdb_name = system_db_url.database
|
306
|
+
# If the system database does not already exist, create it
|
307
|
+
engine = sa.create_engine(
|
308
|
+
system_db_url.set(database="postgres"), **self._engine_kwargs
|
309
|
+
)
|
310
|
+
with engine.connect() as conn:
|
311
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
312
|
+
if not conn.execute(
|
313
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
314
|
+
parameters={"db_name": sysdb_name},
|
315
|
+
).scalar():
|
316
|
+
dbos_logger.info(f"Creating system database {sysdb_name}")
|
317
|
+
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
318
|
+
engine.dispose()
|
263
319
|
|
264
320
|
# Run a schema migration for the system database
|
265
|
-
|
266
|
-
|
267
|
-
|
321
|
+
migration_dir = os.path.join(
|
322
|
+
os.path.dirname(os.path.realpath(__file__)), "_migrations"
|
323
|
+
)
|
324
|
+
alembic_cfg = Config()
|
325
|
+
alembic_cfg.set_main_option("script_location", migration_dir)
|
326
|
+
logging.getLogger("alembic").setLevel(logging.WARNING)
|
327
|
+
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
328
|
+
escaped_conn_string = re.sub(
|
329
|
+
r"%(?=[0-9A-Fa-f]{2})",
|
330
|
+
"%%",
|
331
|
+
self.engine.url.render_as_string(hide_password=False),
|
332
|
+
)
|
333
|
+
alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
|
334
|
+
try:
|
335
|
+
command.upgrade(alembic_cfg, "head")
|
336
|
+
except Exception as e:
|
337
|
+
dbos_logger.warning(
|
338
|
+
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
268
339
|
)
|
269
340
|
alembic_cfg = Config()
|
270
341
|
alembic_cfg.set_main_option("script_location", migration_dir)
|
271
|
-
logging.getLogger("alembic").setLevel(logging.WARNING)
|
272
342
|
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
273
343
|
escaped_conn_string = re.sub(
|
274
344
|
r"%(?=[0-9A-Fa-f]{2})",
|
@@ -282,29 +352,6 @@ class SystemDatabase:
|
|
282
352
|
dbos_logger.warning(
|
283
353
|
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
284
354
|
)
|
285
|
-
alembic_cfg = Config()
|
286
|
-
alembic_cfg.set_main_option("script_location", migration_dir)
|
287
|
-
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
288
|
-
escaped_conn_string = re.sub(
|
289
|
-
r"%(?=[0-9A-Fa-f]{2})",
|
290
|
-
"%%",
|
291
|
-
self.engine.url.render_as_string(hide_password=False),
|
292
|
-
)
|
293
|
-
alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
|
294
|
-
try:
|
295
|
-
command.upgrade(alembic_cfg, "head")
|
296
|
-
except Exception as e:
|
297
|
-
dbos_logger.warning(
|
298
|
-
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
299
|
-
)
|
300
|
-
|
301
|
-
self.notification_conn: Optional[psycopg.connection.Connection] = None
|
302
|
-
self.notifications_map: Dict[str, threading.Condition] = {}
|
303
|
-
self.workflow_events_map: Dict[str, threading.Condition] = {}
|
304
|
-
|
305
|
-
# Now we can run background processes
|
306
|
-
self._run_background_processes = True
|
307
|
-
self._debug_mode = debug_mode
|
308
355
|
|
309
356
|
# Destroy the pool when finished
|
310
357
|
def destroy(self) -> None:
|
@@ -1282,7 +1329,12 @@ class SystemDatabase:
|
|
1282
1329
|
condition = threading.Condition()
|
1283
1330
|
# Must acquire first before adding to the map. Otherwise, the notification listener may notify it before the condition is acquired and waited.
|
1284
1331
|
condition.acquire()
|
1285
|
-
self.notifications_map
|
1332
|
+
success, _ = self.notifications_map.set(payload, condition)
|
1333
|
+
if not success:
|
1334
|
+
# This should not happen, but if it does, it means the workflow is executed concurrently.
|
1335
|
+
condition.release()
|
1336
|
+
self.notifications_map.pop(payload)
|
1337
|
+
raise DBOSWorkflowConflictIDError(workflow_uuid)
|
1286
1338
|
|
1287
1339
|
# Check if the key is already in the database. If not, wait for the notification.
|
1288
1340
|
init_recv: Sequence[Any]
|
@@ -1375,11 +1427,11 @@ class SystemDatabase:
|
|
1375
1427
|
f"Received notification on channel: {channel}, payload: {notify.payload}"
|
1376
1428
|
)
|
1377
1429
|
if channel == "dbos_notifications_channel":
|
1378
|
-
if
|
1379
|
-
notify.payload
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1430
|
+
if notify.payload:
|
1431
|
+
condition = self.notifications_map.get(notify.payload)
|
1432
|
+
if condition is None:
|
1433
|
+
# No condition found for this payload
|
1434
|
+
continue
|
1383
1435
|
condition.acquire()
|
1384
1436
|
condition.notify_all()
|
1385
1437
|
condition.release()
|
@@ -1387,11 +1439,11 @@ class SystemDatabase:
|
|
1387
1439
|
f"Signaled notifications condition for {notify.payload}"
|
1388
1440
|
)
|
1389
1441
|
elif channel == "dbos_workflow_events_channel":
|
1390
|
-
if
|
1391
|
-
notify.payload
|
1392
|
-
|
1393
|
-
|
1394
|
-
|
1442
|
+
if notify.payload:
|
1443
|
+
condition = self.workflow_events_map.get(notify.payload)
|
1444
|
+
if condition is None:
|
1445
|
+
# No condition found for this payload
|
1446
|
+
continue
|
1395
1447
|
condition.acquire()
|
1396
1448
|
condition.notify_all()
|
1397
1449
|
condition.release()
|
@@ -1529,8 +1581,13 @@ class SystemDatabase:
|
|
1529
1581
|
|
1530
1582
|
payload = f"{target_uuid}::{key}"
|
1531
1583
|
condition = threading.Condition()
|
1532
|
-
self.workflow_events_map[payload] = condition
|
1533
1584
|
condition.acquire()
|
1585
|
+
success, existing_condition = self.workflow_events_map.set(payload, condition)
|
1586
|
+
if not success:
|
1587
|
+
# Wait on the existing condition
|
1588
|
+
condition.release()
|
1589
|
+
condition = existing_condition
|
1590
|
+
condition.acquire()
|
1534
1591
|
|
1535
1592
|
# Check if the key is already in the database. If not, wait for the notification.
|
1536
1593
|
init_recv: Sequence[Any]
|
dbos/cli/cli.py
CHANGED
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
|
12
12
|
import typer
|
13
13
|
from rich import print
|
14
14
|
from rich.prompt import IntPrompt
|
15
|
-
from typing_extensions import Annotated
|
15
|
+
from typing_extensions import Annotated, List
|
16
16
|
|
17
17
|
from dbos._debug import debug_workflow, parse_start_command
|
18
18
|
|
@@ -147,55 +147,16 @@ def init(
|
|
147
147
|
] = False,
|
148
148
|
) -> None:
|
149
149
|
try:
|
150
|
-
|
151
150
|
git_templates = ["dbos-toolbox", "dbos-app-starter", "dbos-cron-starter"]
|
152
151
|
templates_dir = get_templates_directory()
|
153
|
-
templates = git_templates + [
|
154
|
-
x.name for x in os.scandir(templates_dir) if x.is_dir()
|
155
|
-
]
|
156
|
-
|
157
|
-
if config and template is None:
|
158
|
-
template = templates[-1]
|
159
152
|
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
while True:
|
168
|
-
try:
|
169
|
-
choice = IntPrompt.ask(
|
170
|
-
"\nSelect template number",
|
171
|
-
show_choices=False,
|
172
|
-
show_default=False,
|
173
|
-
)
|
174
|
-
if 1 <= choice <= len(templates):
|
175
|
-
template = templates[choice - 1]
|
176
|
-
break
|
177
|
-
else:
|
178
|
-
print(
|
179
|
-
"[red]Invalid selection. Please choose a number from the list.[/red]"
|
180
|
-
)
|
181
|
-
except (KeyboardInterrupt, EOFError):
|
182
|
-
raise typer.Abort()
|
183
|
-
except ValueError:
|
184
|
-
print("[red]Please enter a valid number.[/red]")
|
185
|
-
|
186
|
-
if template in git_templates:
|
187
|
-
project_name = template
|
188
|
-
else:
|
189
|
-
if project_name is None:
|
190
|
-
project_name = typing.cast(
|
191
|
-
str,
|
192
|
-
typer.prompt("What is your project's name?", get_project_name()),
|
193
|
-
)
|
194
|
-
|
195
|
-
if not _is_valid_app_name(project_name):
|
196
|
-
raise Exception(
|
197
|
-
f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
|
198
|
-
)
|
153
|
+
project_name, template = _resolve_project_name_and_template(
|
154
|
+
project_name=project_name,
|
155
|
+
template=template,
|
156
|
+
config=config,
|
157
|
+
git_templates=git_templates,
|
158
|
+
templates_dir=templates_dir,
|
159
|
+
)
|
199
160
|
|
200
161
|
if template in git_templates:
|
201
162
|
create_template_from_github(app_name=project_name, template_name=template)
|
@@ -207,6 +168,67 @@ def init(
|
|
207
168
|
print(f"[red]{e}[/red]")
|
208
169
|
|
209
170
|
|
171
|
+
def _resolve_project_name_and_template(
|
172
|
+
project_name: Optional[str],
|
173
|
+
template: Optional[str],
|
174
|
+
config: bool,
|
175
|
+
git_templates: List[str],
|
176
|
+
templates_dir: str,
|
177
|
+
) -> tuple[str, str]:
|
178
|
+
templates = git_templates + [
|
179
|
+
x.name for x in os.scandir(templates_dir) if x.is_dir()
|
180
|
+
]
|
181
|
+
|
182
|
+
if config and template is None:
|
183
|
+
template = templates[-1]
|
184
|
+
|
185
|
+
if template:
|
186
|
+
if template not in templates:
|
187
|
+
raise Exception(f"Template {template} not found in {templates_dir}")
|
188
|
+
else:
|
189
|
+
print("\n[bold]Available templates:[/bold]")
|
190
|
+
for idx, template_name in enumerate(templates, 1):
|
191
|
+
print(f" {idx}. {template_name}")
|
192
|
+
while True:
|
193
|
+
try:
|
194
|
+
choice = IntPrompt.ask(
|
195
|
+
"\nSelect template number",
|
196
|
+
show_choices=False,
|
197
|
+
show_default=False,
|
198
|
+
)
|
199
|
+
if 1 <= choice <= len(templates):
|
200
|
+
template = templates[choice - 1]
|
201
|
+
break
|
202
|
+
else:
|
203
|
+
print(
|
204
|
+
"[red]Invalid selection. Please choose a number from the list.[/red]"
|
205
|
+
)
|
206
|
+
except (KeyboardInterrupt, EOFError):
|
207
|
+
raise typer.Abort()
|
208
|
+
except ValueError:
|
209
|
+
print("[red]Please enter a valid number.[/red]")
|
210
|
+
|
211
|
+
if template in git_templates:
|
212
|
+
if project_name is None:
|
213
|
+
project_name = template
|
214
|
+
else:
|
215
|
+
if project_name is None:
|
216
|
+
project_name = typing.cast(
|
217
|
+
str,
|
218
|
+
typer.prompt("What is your project's name?", get_project_name()),
|
219
|
+
)
|
220
|
+
|
221
|
+
if not _is_valid_app_name(project_name):
|
222
|
+
raise Exception(
|
223
|
+
f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
|
224
|
+
)
|
225
|
+
|
226
|
+
assert project_name is not None, "Project name cannot be None"
|
227
|
+
assert template is not None, "Template name cannot be None"
|
228
|
+
|
229
|
+
return project_name, template
|
230
|
+
|
231
|
+
|
210
232
|
@app.command(
|
211
233
|
help="Run your database schema migrations using the migration commands in 'dbos-config.yaml'"
|
212
234
|
)
|
@@ -258,6 +280,8 @@ def migrate(
|
|
258
280
|
"pool_size": 2,
|
259
281
|
},
|
260
282
|
)
|
283
|
+
sys_db.run_migrations()
|
284
|
+
app_db.run_migrations()
|
261
285
|
except Exception as e:
|
262
286
|
typer.echo(f"DBOS system schema migration failed: {e}")
|
263
287
|
finally:
|
@@ -1,19 +1,19 @@
|
|
1
|
-
dbos-1.
|
2
|
-
dbos-1.
|
3
|
-
dbos-1.
|
4
|
-
dbos-1.
|
1
|
+
dbos-1.1.0a3.dist-info/METADATA,sha256=wwmIbv3PGphti-I6d0es4O3hZMHu7m7hXZbMfrko2WM,13267
|
2
|
+
dbos-1.1.0a3.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-1.1.0a3.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-1.1.0a3.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=A_28_nJ1nBBYDmCxtklJR9O2v14JRMtD1rAo_D4y8Kc,9764
|
8
|
-
dbos/_app_db.py,sha256=
|
8
|
+
dbos/_app_db.py,sha256=wxZz3ja9QgVuyp5YLsAqa_MpuyD5tl0C5GHTLl8fwF0,10514
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
|
-
dbos/_client.py,sha256
|
10
|
+
dbos/_client.py,sha256=-nK2GjS9D0qnD2DkRDs7gKxNECwYlsvW6hFCjADlnv0,14186
|
11
11
|
dbos/_conductor/conductor.py,sha256=o0IaZjwnZ2TOyHeP2H4iSX6UnXLXQ4uODvWAKD9hHMs,21703
|
12
12
|
dbos/_conductor/protocol.py,sha256=wgOFZxmS81bv0WCB9dAyg0s6QzldpzVKQDoSPeaX0Ws,6967
|
13
13
|
dbos/_context.py,sha256=Ly1CXF1nWxICQgIpDZSaONGlz1yERBs63gqmR-yqCzM,24476
|
14
14
|
dbos/_core.py,sha256=UDpSgRA9m_YuViNXR9tVgNFLC-zxKZPxjlkj2a-Kj00,48317
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
16
|
-
dbos/_dbos.py,sha256=
|
16
|
+
dbos/_dbos.py,sha256=f5s9cVgsiMkAkpvctLHE6sjVAEuC-eFEpRddYBIKxiA,46430
|
17
17
|
dbos/_dbos_config.py,sha256=IufNrIC-M2xSNTXyT_KXlEdfB3j03pPLv_nE0fEq4_U,20955
|
18
18
|
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
19
19
|
dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
|
@@ -47,7 +47,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
48
48
|
dbos/_schemas/system_database.py,sha256=3Z0L72bOgHnusK1hBaETWU9RfiLBP0QnS-fdu41i0yY,5835
|
49
49
|
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
50
|
-
dbos/_sys_db.py,sha256=
|
50
|
+
dbos/_sys_db.py,sha256=cgUpkgVF8jz_dcseShiJl4jFqoPlF24UadjVZ7LQ2qc,83235
|
51
51
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
52
52
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
53
|
dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
|
@@ -63,8 +63,8 @@ dbos/_utils.py,sha256=UbpMYRBSyvJqdXeWAnfSw8xXM1R1mfnyl1oTunhEjJM,513
|
|
63
63
|
dbos/_workflow_commands.py,sha256=2E8FRUv_nLYkpBTwfhh_ELhySYpMrm8qGB9J44g6DSE,3872
|
64
64
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
65
65
|
dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
|
66
|
-
dbos/cli/cli.py,sha256=
|
66
|
+
dbos/cli/cli.py,sha256=HinoCGrAUTiSeq7AAoCFfhdiE0uDw7vLMuDMN1_YTLI,20705
|
67
67
|
dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
|
68
68
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
69
69
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
70
|
-
dbos-1.
|
70
|
+
dbos-1.1.0a3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|