dbos 0.19.0a4__py3-none-any.whl → 0.20.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_admin_server.py +45 -2
- dbos/_context.py +11 -2
- dbos/_core.py +66 -6
- dbos/_dbos.py +36 -1
- dbos/_error.py +11 -0
- dbos/_fastapi.py +16 -11
- dbos/_flask.py +6 -2
- dbos/_kafka.py +17 -1
- dbos/_queue.py +1 -0
- dbos/_sys_db.py +113 -38
- dbos/_workflow_commands.py +171 -0
- dbos/cli/_github_init.py +107 -0
- dbos/cli/_template_init.py +98 -0
- dbos/cli/cli.py +367 -0
- {dbos-0.19.0a4.dist-info → dbos-0.20.0.dist-info}/METADATA +21 -16
- {dbos-0.19.0a4.dist-info → dbos-0.20.0.dist-info}/RECORD +29 -26
- {dbos-0.19.0a4.dist-info → dbos-0.20.0.dist-info}/entry_points.txt +1 -1
- dbos/cli.py +0 -337
- /dbos/_templates/{hello → dbos-db-starter}/README.md +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/__package/__init__.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/__package/main.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/__package/schema.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/alembic.ini +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/dbos-config.yaml.dbos +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/migrations/env.py.dbos +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/migrations/script.py.mako +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/migrations/versions/2024_07_31_180642_init.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/start_postgres_docker.py +0 -0
- {dbos-0.19.0a4.dist-info → dbos-0.20.0.dist-info}/WHEEL +0 -0
- {dbos-0.19.0a4.dist-info → dbos-0.20.0.dist-info}/licenses/LICENSE +0 -0
dbos/_sys_db.py
CHANGED
|
@@ -28,6 +28,7 @@ from sqlalchemy.exc import DBAPIError
|
|
|
28
28
|
from . import _serialization
|
|
29
29
|
from ._dbos_config import ConfigFile
|
|
30
30
|
from ._error import (
|
|
31
|
+
DBOSConflictingWorkflowError,
|
|
31
32
|
DBOSDeadLetterQueueError,
|
|
32
33
|
DBOSException,
|
|
33
34
|
DBOSNonExistentWorkflowError,
|
|
@@ -288,8 +289,14 @@ class SystemDatabase:
|
|
|
288
289
|
),
|
|
289
290
|
)
|
|
290
291
|
else:
|
|
291
|
-
|
|
292
|
-
|
|
292
|
+
# A blank update so that we can return the existing status
|
|
293
|
+
cmd = cmd.on_conflict_do_update(
|
|
294
|
+
index_elements=["workflow_uuid"],
|
|
295
|
+
set_=dict(
|
|
296
|
+
recovery_attempts=SystemSchema.workflow_status.c.recovery_attempts
|
|
297
|
+
),
|
|
298
|
+
)
|
|
299
|
+
cmd = cmd.returning(SystemSchema.workflow_status.c.recovery_attempts, SystemSchema.workflow_status.c.status, SystemSchema.workflow_status.c.name, SystemSchema.workflow_status.c.class_name, SystemSchema.workflow_status.c.config_name, SystemSchema.workflow_status.c.queue_name) # type: ignore
|
|
293
300
|
|
|
294
301
|
if conn is not None:
|
|
295
302
|
results = conn.execute(cmd)
|
|
@@ -297,37 +304,53 @@ class SystemDatabase:
|
|
|
297
304
|
with self.engine.begin() as c:
|
|
298
305
|
results = c.execute(cmd)
|
|
299
306
|
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
307
|
+
row = results.fetchone()
|
|
308
|
+
if row is not None:
|
|
309
|
+
# Check the started workflow matches the expected name, class_name, config_name, and queue_name
|
|
310
|
+
# A mismatch indicates a workflow starting with the same UUID but different functions, which would throw an exception.
|
|
311
|
+
recovery_attempts: int = row[0]
|
|
312
|
+
wf_status = row[1]
|
|
313
|
+
err_msg: Optional[str] = None
|
|
314
|
+
if row[2] != status["name"]:
|
|
315
|
+
err_msg = f"Workflow already exists with a different function name: {row[2]}, but the provided function name is: {status['name']}"
|
|
316
|
+
elif row[3] != status["class_name"]:
|
|
317
|
+
err_msg = f"Workflow already exists with a different class name: {row[3]}, but the provided class name is: {status['class_name']}"
|
|
318
|
+
elif row[4] != status["config_name"]:
|
|
319
|
+
err_msg = f"Workflow already exists with a different config name: {row[4]}, but the provided config name is: {status['config_name']}"
|
|
320
|
+
elif row[5] != status["queue_name"]:
|
|
321
|
+
# This is a warning because a different queue name is not necessarily an error.
|
|
322
|
+
dbos_logger.warning(
|
|
323
|
+
f"Workflow already exists in queue: {row[5]}, but the provided queue name is: {status['queue_name']}. The queue is not updated."
|
|
324
|
+
)
|
|
325
|
+
if err_msg is not None:
|
|
326
|
+
raise DBOSConflictingWorkflowError(status["workflow_uuid"], err_msg)
|
|
327
|
+
|
|
328
|
+
if in_recovery and recovery_attempts > max_recovery_attempts:
|
|
329
|
+
with self.engine.begin() as c:
|
|
330
|
+
c.execute(
|
|
331
|
+
sa.delete(SystemSchema.workflow_queue).where(
|
|
332
|
+
SystemSchema.workflow_queue.c.workflow_uuid
|
|
333
|
+
== status["workflow_uuid"]
|
|
334
|
+
)
|
|
335
|
+
)
|
|
336
|
+
c.execute(
|
|
337
|
+
sa.update(SystemSchema.workflow_status)
|
|
338
|
+
.where(
|
|
339
|
+
SystemSchema.workflow_status.c.workflow_uuid
|
|
340
|
+
== status["workflow_uuid"]
|
|
341
|
+
)
|
|
342
|
+
.where(
|
|
343
|
+
SystemSchema.workflow_status.c.status
|
|
344
|
+
== WorkflowStatusString.PENDING.value
|
|
312
345
|
)
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
SystemSchema.workflow_status.c.workflow_uuid
|
|
317
|
-
== status["workflow_uuid"]
|
|
318
|
-
)
|
|
319
|
-
.where(
|
|
320
|
-
SystemSchema.workflow_status.c.status
|
|
321
|
-
== WorkflowStatusString.PENDING.value
|
|
322
|
-
)
|
|
323
|
-
.values(
|
|
324
|
-
status=WorkflowStatusString.RETRIES_EXCEEDED.value,
|
|
325
|
-
queue_name=None,
|
|
326
|
-
)
|
|
346
|
+
.values(
|
|
347
|
+
status=WorkflowStatusString.RETRIES_EXCEEDED.value,
|
|
348
|
+
queue_name=None,
|
|
327
349
|
)
|
|
328
|
-
raise DBOSDeadLetterQueueError(
|
|
329
|
-
status["workflow_uuid"], max_recovery_attempts
|
|
330
350
|
)
|
|
351
|
+
raise DBOSDeadLetterQueueError(
|
|
352
|
+
status["workflow_uuid"], max_recovery_attempts
|
|
353
|
+
)
|
|
331
354
|
|
|
332
355
|
# Record we have exported status for this single-transaction workflow
|
|
333
356
|
if status["workflow_uuid"] in self._temp_txn_wf_ids:
|
|
@@ -344,7 +367,7 @@ class SystemDatabase:
|
|
|
344
367
|
with self.engine.begin() as c:
|
|
345
368
|
stmt = (
|
|
346
369
|
sa.update(SystemSchema.workflow_status)
|
|
347
|
-
.where(SystemSchema.
|
|
370
|
+
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
|
348
371
|
.values(
|
|
349
372
|
status=status,
|
|
350
373
|
)
|
|
@@ -356,7 +379,7 @@ class SystemDatabase:
|
|
|
356
379
|
stmt = (
|
|
357
380
|
sa.update(SystemSchema.workflow_status)
|
|
358
381
|
.where(
|
|
359
|
-
SystemSchema.
|
|
382
|
+
SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid
|
|
360
383
|
)
|
|
361
384
|
.values(recovery_attempts=reset_recovery_attempts)
|
|
362
385
|
)
|
|
@@ -538,18 +561,27 @@ class SystemDatabase:
|
|
|
538
561
|
workflow_uuid=workflow_uuid,
|
|
539
562
|
inputs=inputs,
|
|
540
563
|
)
|
|
541
|
-
.
|
|
564
|
+
.on_conflict_do_update(
|
|
565
|
+
index_elements=["workflow_uuid"],
|
|
566
|
+
set_=dict(workflow_uuid=SystemSchema.workflow_inputs.c.workflow_uuid),
|
|
567
|
+
)
|
|
568
|
+
.returning(SystemSchema.workflow_inputs.c.inputs)
|
|
542
569
|
)
|
|
543
570
|
if conn is not None:
|
|
544
|
-
conn.execute(cmd)
|
|
571
|
+
row = conn.execute(cmd).fetchone()
|
|
545
572
|
else:
|
|
546
573
|
with self.engine.begin() as c:
|
|
547
|
-
c.execute(cmd)
|
|
548
|
-
|
|
574
|
+
row = c.execute(cmd).fetchone()
|
|
575
|
+
if row is not None and row[0] != inputs:
|
|
576
|
+
dbos_logger.warning(
|
|
577
|
+
f"Workflow inputs for {workflow_uuid} changed since the first call! Use the original inputs."
|
|
578
|
+
)
|
|
579
|
+
# TODO: actually changing the input
|
|
549
580
|
if workflow_uuid in self._temp_txn_wf_ids:
|
|
550
581
|
# Clean up the single-transaction tracking sets
|
|
551
582
|
self._exported_temp_txn_wf_status.discard(workflow_uuid)
|
|
552
583
|
self._temp_txn_wf_ids.discard(workflow_uuid)
|
|
584
|
+
return
|
|
553
585
|
|
|
554
586
|
def get_workflow_inputs(
|
|
555
587
|
self, workflow_uuid: str
|
|
@@ -582,12 +614,12 @@ class SystemDatabase:
|
|
|
582
614
|
if input.start_time:
|
|
583
615
|
query = query.where(
|
|
584
616
|
SystemSchema.workflow_status.c.created_at
|
|
585
|
-
>= datetime.datetime.fromisoformat(input.start_time).timestamp()
|
|
617
|
+
>= datetime.datetime.fromisoformat(input.start_time).timestamp() * 1000
|
|
586
618
|
)
|
|
587
619
|
if input.end_time:
|
|
588
620
|
query = query.where(
|
|
589
621
|
SystemSchema.workflow_status.c.created_at
|
|
590
|
-
<= datetime.datetime.fromisoformat(input.end_time).timestamp()
|
|
622
|
+
<= datetime.datetime.fromisoformat(input.end_time).timestamp() * 1000
|
|
591
623
|
)
|
|
592
624
|
if input.status:
|
|
593
625
|
query = query.where(SystemSchema.workflow_status.c.status == input.status)
|
|
@@ -1233,3 +1265,46 @@ class SystemDatabase:
|
|
|
1233
1265
|
.where(SystemSchema.workflow_queue.c.workflow_uuid == workflow_id)
|
|
1234
1266
|
.values(completed_at_epoch_ms=int(time.time() * 1000))
|
|
1235
1267
|
)
|
|
1268
|
+
|
|
1269
|
+
|
|
1270
|
+
def reset_system_database(config: ConfigFile) -> None:
|
|
1271
|
+
sysdb_name = (
|
|
1272
|
+
config["database"]["sys_db_name"]
|
|
1273
|
+
if "sys_db_name" in config["database"] and config["database"]["sys_db_name"]
|
|
1274
|
+
else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
|
|
1275
|
+
)
|
|
1276
|
+
postgres_db_url = sa.URL.create(
|
|
1277
|
+
"postgresql+psycopg",
|
|
1278
|
+
username=config["database"]["username"],
|
|
1279
|
+
password=config["database"]["password"],
|
|
1280
|
+
host=config["database"]["hostname"],
|
|
1281
|
+
port=config["database"]["port"],
|
|
1282
|
+
database="postgres",
|
|
1283
|
+
)
|
|
1284
|
+
try:
|
|
1285
|
+
# Connect to postgres default database
|
|
1286
|
+
engine = sa.create_engine(postgres_db_url)
|
|
1287
|
+
|
|
1288
|
+
with engine.connect() as conn:
|
|
1289
|
+
# Set autocommit required for database dropping
|
|
1290
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
1291
|
+
|
|
1292
|
+
# Terminate existing connections
|
|
1293
|
+
conn.execute(
|
|
1294
|
+
sa.text(
|
|
1295
|
+
"""
|
|
1296
|
+
SELECT pg_terminate_backend(pg_stat_activity.pid)
|
|
1297
|
+
FROM pg_stat_activity
|
|
1298
|
+
WHERE pg_stat_activity.datname = :db_name
|
|
1299
|
+
AND pid <> pg_backend_pid()
|
|
1300
|
+
"""
|
|
1301
|
+
),
|
|
1302
|
+
{"db_name": sysdb_name},
|
|
1303
|
+
)
|
|
1304
|
+
|
|
1305
|
+
# Drop the database
|
|
1306
|
+
conn.execute(sa.text(f"DROP DATABASE IF EXISTS {sysdb_name}"))
|
|
1307
|
+
|
|
1308
|
+
except sa.exc.SQLAlchemyError as e:
|
|
1309
|
+
dbos_logger.error(f"Error resetting system database: {str(e)}")
|
|
1310
|
+
raise e
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
from typing import Any, List, Optional, cast
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
from rich import print
|
|
8
|
+
|
|
9
|
+
from dbos import DBOS
|
|
10
|
+
|
|
11
|
+
from . import _serialization, load_config
|
|
12
|
+
from ._core import execute_workflow_by_id
|
|
13
|
+
from ._dbos_config import ConfigFile, _is_valid_app_name
|
|
14
|
+
from ._sys_db import (
|
|
15
|
+
GetWorkflowsInput,
|
|
16
|
+
GetWorkflowsOutput,
|
|
17
|
+
SystemDatabase,
|
|
18
|
+
WorkflowStatuses,
|
|
19
|
+
WorkflowStatusInternal,
|
|
20
|
+
WorkflowStatusString,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class WorkflowInformation:
|
|
25
|
+
workflowUUID: str
|
|
26
|
+
status: WorkflowStatuses
|
|
27
|
+
workflowName: str
|
|
28
|
+
workflowClassName: Optional[str]
|
|
29
|
+
workflowConfigName: Optional[str]
|
|
30
|
+
input: Optional[_serialization.WorkflowInputs] # JSON (jsonpickle)
|
|
31
|
+
output: Optional[str] # JSON (jsonpickle)
|
|
32
|
+
error: Optional[str] # JSON (jsonpickle)
|
|
33
|
+
executor_id: Optional[str]
|
|
34
|
+
app_version: Optional[str]
|
|
35
|
+
app_id: Optional[str]
|
|
36
|
+
request: Optional[str] # JSON (jsonpickle)
|
|
37
|
+
recovery_attempts: Optional[int]
|
|
38
|
+
authenticated_user: Optional[str]
|
|
39
|
+
assumed_role: Optional[str]
|
|
40
|
+
authenticated_roles: Optional[str] # JSON list of roles.
|
|
41
|
+
queue_name: Optional[str]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _list_workflows(
|
|
45
|
+
config: ConfigFile,
|
|
46
|
+
li: int,
|
|
47
|
+
user: Optional[str],
|
|
48
|
+
starttime: Optional[str],
|
|
49
|
+
endtime: Optional[str],
|
|
50
|
+
status: Optional[str],
|
|
51
|
+
request: bool,
|
|
52
|
+
appversion: Optional[str],
|
|
53
|
+
) -> List[WorkflowInformation]:
|
|
54
|
+
|
|
55
|
+
sys_db = None
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
sys_db = SystemDatabase(config)
|
|
59
|
+
|
|
60
|
+
input = GetWorkflowsInput()
|
|
61
|
+
input.authenticated_user = user
|
|
62
|
+
input.start_time = starttime
|
|
63
|
+
input.end_time = endtime
|
|
64
|
+
if status is not None:
|
|
65
|
+
input.status = cast(WorkflowStatuses, status)
|
|
66
|
+
input.application_version = appversion
|
|
67
|
+
input.limit = li
|
|
68
|
+
|
|
69
|
+
output: GetWorkflowsOutput = sys_db.get_workflows(input)
|
|
70
|
+
|
|
71
|
+
infos: List[WorkflowInformation] = []
|
|
72
|
+
|
|
73
|
+
if output.workflow_uuids is None:
|
|
74
|
+
typer.echo("No workflows found")
|
|
75
|
+
return {}
|
|
76
|
+
|
|
77
|
+
for workflow_id in output.workflow_uuids:
|
|
78
|
+
info = _get_workflow_info(
|
|
79
|
+
sys_db, workflow_id, request
|
|
80
|
+
) # Call the method for each ID
|
|
81
|
+
|
|
82
|
+
if info is not None:
|
|
83
|
+
infos.append(info)
|
|
84
|
+
|
|
85
|
+
return infos
|
|
86
|
+
except Exception as e:
|
|
87
|
+
typer.echo(f"Error listing workflows: {e}")
|
|
88
|
+
return []
|
|
89
|
+
finally:
|
|
90
|
+
if sys_db:
|
|
91
|
+
sys_db.destroy()
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _get_workflow(
|
|
95
|
+
config: ConfigFile, uuid: str, request: bool
|
|
96
|
+
) -> Optional[WorkflowInformation]:
|
|
97
|
+
sys_db = None
|
|
98
|
+
|
|
99
|
+
try:
|
|
100
|
+
sys_db = SystemDatabase(config)
|
|
101
|
+
|
|
102
|
+
info = _get_workflow_info(sys_db, uuid, request)
|
|
103
|
+
return info
|
|
104
|
+
|
|
105
|
+
except Exception as e:
|
|
106
|
+
typer.echo(f"Error getting workflow: {e}")
|
|
107
|
+
return None
|
|
108
|
+
finally:
|
|
109
|
+
if sys_db:
|
|
110
|
+
sys_db.destroy()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _cancel_workflow(config: ConfigFile, uuid: str) -> None:
|
|
114
|
+
# config = load_config()
|
|
115
|
+
sys_db = None
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
sys_db = SystemDatabase(config)
|
|
119
|
+
sys_db.set_workflow_status(uuid, WorkflowStatusString.CANCELLED, False)
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
except Exception as e:
|
|
123
|
+
typer.echo(f"Failed to connect to DBOS system database: {e}")
|
|
124
|
+
return None
|
|
125
|
+
finally:
|
|
126
|
+
if sys_db:
|
|
127
|
+
sys_db.destroy()
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _get_workflow_info(
|
|
131
|
+
sys_db: SystemDatabase, workflowUUID: str, getRequest: bool
|
|
132
|
+
) -> Optional[WorkflowInformation]:
|
|
133
|
+
|
|
134
|
+
info = sys_db.get_workflow_status(workflowUUID)
|
|
135
|
+
if info is None:
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
winfo = WorkflowInformation()
|
|
139
|
+
|
|
140
|
+
winfo.workflowUUID = workflowUUID
|
|
141
|
+
winfo.status = info["status"]
|
|
142
|
+
winfo.workflowName = info["name"]
|
|
143
|
+
winfo.workflowClassName = info["class_name"]
|
|
144
|
+
winfo.workflowConfigName = info["config_name"]
|
|
145
|
+
winfo.executor_id = info["executor_id"]
|
|
146
|
+
winfo.app_version = info["app_version"]
|
|
147
|
+
winfo.app_id = info["app_id"]
|
|
148
|
+
winfo.recovery_attempts = info["recovery_attempts"]
|
|
149
|
+
winfo.authenticated_user = info["authenticated_user"]
|
|
150
|
+
winfo.assumed_role = info["assumed_role"]
|
|
151
|
+
winfo.authenticated_roles = info["authenticated_roles"]
|
|
152
|
+
winfo.queue_name = info["queue_name"]
|
|
153
|
+
|
|
154
|
+
# no input field
|
|
155
|
+
input_data = sys_db.get_workflow_inputs(workflowUUID)
|
|
156
|
+
if input_data is not None:
|
|
157
|
+
winfo.input = input_data
|
|
158
|
+
|
|
159
|
+
if info.get("status") == "SUCCESS":
|
|
160
|
+
result = sys_db.await_workflow_result(workflowUUID)
|
|
161
|
+
winfo.output = result
|
|
162
|
+
elif info.get("status") == "ERROR":
|
|
163
|
+
try:
|
|
164
|
+
sys_db.await_workflow_result(workflowUUID)
|
|
165
|
+
except Exception as e:
|
|
166
|
+
winfo.error = str(e)
|
|
167
|
+
|
|
168
|
+
if not getRequest:
|
|
169
|
+
winfo.request = None
|
|
170
|
+
|
|
171
|
+
return winfo
|
dbos/cli/_github_init.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from base64 import b64decode
|
|
3
|
+
from typing import List, TypedDict
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
DEMO_REPO_API = "https://api.github.com/repos/dbos-inc/dbos-demo-apps"
|
|
8
|
+
PY_DEMO_PATH = "python/"
|
|
9
|
+
BRANCH = "main"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GitHubTreeItem(TypedDict):
|
|
13
|
+
path: str
|
|
14
|
+
mode: str
|
|
15
|
+
type: str
|
|
16
|
+
sha: str
|
|
17
|
+
url: str
|
|
18
|
+
size: int
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class GitHubTree(TypedDict):
|
|
22
|
+
sha: str
|
|
23
|
+
url: str
|
|
24
|
+
tree: List[GitHubTreeItem]
|
|
25
|
+
truncated: bool
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class GitHubItem(TypedDict):
|
|
29
|
+
sha: str
|
|
30
|
+
node_id: str
|
|
31
|
+
url: str
|
|
32
|
+
content: str
|
|
33
|
+
encoding: str
|
|
34
|
+
size: int
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _fetch_github(url: str) -> requests.Response:
|
|
38
|
+
headers = {}
|
|
39
|
+
github_token = os.getenv("GITHUB_TOKEN")
|
|
40
|
+
if github_token:
|
|
41
|
+
headers["Authorization"] = f"Bearer {github_token}"
|
|
42
|
+
|
|
43
|
+
response = requests.get(url, headers=headers)
|
|
44
|
+
|
|
45
|
+
if not response.ok:
|
|
46
|
+
if response.headers.get("x-ratelimit-remaining") == "0":
|
|
47
|
+
raise Exception(
|
|
48
|
+
"Error fetching from GitHub API: rate limit exceeded.\n"
|
|
49
|
+
"Please wait a few minutes and try again.\n"
|
|
50
|
+
"To increase the limit, you can create a personal access token and set it in the GITHUB_TOKEN environment variable.\n"
|
|
51
|
+
"Details: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"
|
|
52
|
+
)
|
|
53
|
+
elif response.status_code == 401:
|
|
54
|
+
raise Exception(
|
|
55
|
+
f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}.\n"
|
|
56
|
+
"Please ensure your GITHUB_TOKEN environment variable is set to a valid personal access token."
|
|
57
|
+
)
|
|
58
|
+
raise Exception(
|
|
59
|
+
f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
return response
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _fetch_github_tree(tag: str) -> List[GitHubTreeItem]:
|
|
66
|
+
response = _fetch_github(f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1")
|
|
67
|
+
tree_data: GitHubTree = response.json()
|
|
68
|
+
return tree_data["tree"]
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _fetch_github_item(url: str) -> str:
|
|
72
|
+
response = _fetch_github(url)
|
|
73
|
+
item: GitHubItem = response.json()
|
|
74
|
+
return b64decode(item["content"]).decode("utf-8")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def create_template_from_github(app_name: str, template_name: str) -> None:
|
|
78
|
+
print(
|
|
79
|
+
f"Creating a new application named {app_name} from the template {template_name}"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
tree = _fetch_github_tree(BRANCH)
|
|
83
|
+
template_path = f"{PY_DEMO_PATH}{template_name}/"
|
|
84
|
+
|
|
85
|
+
files_to_download = [
|
|
86
|
+
item
|
|
87
|
+
for item in tree
|
|
88
|
+
if item["path"].startswith(template_path) and item["type"] == "blob"
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
# Download every file from the template
|
|
92
|
+
for item in files_to_download:
|
|
93
|
+
raw_content = _fetch_github_item(item["url"])
|
|
94
|
+
file_path = item["path"].replace(template_path, "")
|
|
95
|
+
target_path = os.path.join(".", file_path)
|
|
96
|
+
|
|
97
|
+
# Create directory if it doesn't exist
|
|
98
|
+
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
|
99
|
+
|
|
100
|
+
# Write file with proper permissions
|
|
101
|
+
with open(target_path, "w", encoding="utf-8") as f:
|
|
102
|
+
f.write(raw_content)
|
|
103
|
+
os.chmod(target_path, int(item["mode"], 8))
|
|
104
|
+
|
|
105
|
+
print(
|
|
106
|
+
f"Downloaded {len(files_to_download)} files from the template GitHub repository"
|
|
107
|
+
)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import typing
|
|
4
|
+
from os import path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import tomlkit
|
|
8
|
+
from rich import print
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_templates_directory() -> str:
|
|
12
|
+
import dbos
|
|
13
|
+
|
|
14
|
+
package_dir = path.abspath(path.dirname(dbos.__file__))
|
|
15
|
+
return path.join(package_dir, "_templates")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
|
|
19
|
+
with open(src, "r") as f:
|
|
20
|
+
content = f.read()
|
|
21
|
+
|
|
22
|
+
for key, value in ctx.items():
|
|
23
|
+
content = content.replace(f"${{{key}}}", value)
|
|
24
|
+
|
|
25
|
+
with open(dst, "w") as f:
|
|
26
|
+
f.write(content)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
|
|
30
|
+
|
|
31
|
+
for root, dirs, files in os.walk(src_dir, topdown=True):
|
|
32
|
+
dirs[:] = [d for d in dirs if d != "__package"]
|
|
33
|
+
|
|
34
|
+
dst_root = path.join(dst_dir, path.relpath(root, src_dir))
|
|
35
|
+
if len(dirs) == 0:
|
|
36
|
+
os.makedirs(dst_root, exist_ok=True)
|
|
37
|
+
else:
|
|
38
|
+
for dir in dirs:
|
|
39
|
+
os.makedirs(path.join(dst_root, dir), exist_ok=True)
|
|
40
|
+
|
|
41
|
+
for file in files:
|
|
42
|
+
src = path.join(root, file)
|
|
43
|
+
base, ext = path.splitext(file)
|
|
44
|
+
|
|
45
|
+
dst = path.join(dst_root, base if ext == ".dbos" else file)
|
|
46
|
+
if path.exists(dst):
|
|
47
|
+
print(f"[yellow]File {dst} already exists, skipping[/yellow]")
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
if ext == ".dbos":
|
|
51
|
+
_copy_dbos_template(src, dst, ctx)
|
|
52
|
+
else:
|
|
53
|
+
shutil.copy(src, dst)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
|
|
57
|
+
|
|
58
|
+
dst_dir = path.abspath(".")
|
|
59
|
+
|
|
60
|
+
package_name = project_name.replace("-", "_")
|
|
61
|
+
ctx = {
|
|
62
|
+
"project_name": project_name,
|
|
63
|
+
"package_name": package_name,
|
|
64
|
+
"migration_command": "alembic upgrade head",
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if config_mode:
|
|
68
|
+
ctx["package_name"] = "."
|
|
69
|
+
ctx["migration_command"] = "echo 'No migrations specified'"
|
|
70
|
+
_copy_dbos_template(
|
|
71
|
+
os.path.join(src_dir, "dbos-config.yaml.dbos"),
|
|
72
|
+
os.path.join(dst_dir, "dbos-config.yaml"),
|
|
73
|
+
ctx,
|
|
74
|
+
)
|
|
75
|
+
else:
|
|
76
|
+
_copy_template_dir(src_dir, dst_dir, ctx)
|
|
77
|
+
_copy_template_dir(
|
|
78
|
+
path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_project_name() -> typing.Union[str, None]:
|
|
83
|
+
name = None
|
|
84
|
+
try:
|
|
85
|
+
with open("pyproject.toml", "rb") as file:
|
|
86
|
+
pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
|
|
87
|
+
name = typing.cast(str, pyproj["project"]["name"])
|
|
88
|
+
except:
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
if name == None:
|
|
92
|
+
try:
|
|
93
|
+
_, parent = path.split(path.abspath("."))
|
|
94
|
+
name = parent
|
|
95
|
+
except:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
return name
|