dbos 0.19.0a9__tar.gz → 0.20.0a3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (90) hide show
  1. {dbos-0.19.0a9 → dbos-0.20.0a3}/PKG-INFO +1 -1
  2. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_admin_server.py +45 -2
  3. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_core.py +40 -4
  4. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_dbos.py +19 -0
  5. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_fastapi.py +6 -2
  6. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_flask.py +6 -2
  7. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_workflow_commands.py +4 -5
  8. dbos-0.20.0a3/dbos/cli/_github_init.py +107 -0
  9. dbos-0.20.0a3/dbos/cli/_template_init.py +98 -0
  10. {dbos-0.19.0a9/dbos → dbos-0.20.0a3/dbos/cli}/cli.py +101 -127
  11. {dbos-0.19.0a9 → dbos-0.20.0a3}/pyproject.toml +2 -2
  12. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_admin_server.py +119 -1
  13. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_fastapi.py +20 -1
  14. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_flask.py +20 -1
  15. dbos-0.20.0a3/tests/test_package.py +92 -0
  16. dbos-0.19.0a9/tests/test_package.py +0 -84
  17. {dbos-0.19.0a9 → dbos-0.20.0a3}/LICENSE +0 -0
  18. {dbos-0.19.0a9 → dbos-0.20.0a3}/README.md +0 -0
  19. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/__init__.py +0 -0
  20. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_app_db.py +0 -0
  21. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_classproperty.py +0 -0
  22. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_cloudutils/authentication.py +0 -0
  23. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_cloudutils/cloudutils.py +0 -0
  24. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_cloudutils/databases.py +0 -0
  25. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_context.py +0 -0
  26. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_croniter.py +0 -0
  27. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_db_wizard.py +0 -0
  28. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_dbos_config.py +0 -0
  29. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_error.py +0 -0
  30. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_kafka.py +0 -0
  31. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_kafka_message.py +0 -0
  32. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_logger.py +0 -0
  33. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/env.py +0 -0
  34. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/script.py.mako +0 -0
  35. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  36. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  37. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  38. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  39. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  40. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  41. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  42. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_outcome.py +0 -0
  43. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_queue.py +0 -0
  44. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_recovery.py +0 -0
  45. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_registrations.py +0 -0
  46. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_request.py +0 -0
  47. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_roles.py +0 -0
  48. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_scheduler.py +0 -0
  49. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_schemas/__init__.py +0 -0
  50. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_schemas/application_database.py +0 -0
  51. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_schemas/system_database.py +0 -0
  52. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_serialization.py +0 -0
  53. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_sys_db.py +0 -0
  54. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/README.md +0 -0
  55. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/__package/__init__.py +0 -0
  56. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/__package/main.py +0 -0
  57. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/__package/schema.py +0 -0
  58. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/alembic.ini +0 -0
  59. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/dbos-config.yaml.dbos +0 -0
  60. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/migrations/env.py.dbos +0 -0
  61. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/migrations/script.py.mako +0 -0
  62. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/migrations/versions/2024_07_31_180642_init.py +0 -0
  63. {dbos-0.19.0a9/dbos/_templates/hello → dbos-0.20.0a3/dbos/_templates/dbos-db-starter}/start_postgres_docker.py +0 -0
  64. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/_tracer.py +0 -0
  65. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/dbos-config.schema.json +0 -0
  66. {dbos-0.19.0a9 → dbos-0.20.0a3}/dbos/py.typed +0 -0
  67. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/__init__.py +0 -0
  68. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/atexit_no_ctor.py +0 -0
  69. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/atexit_no_launch.py +0 -0
  70. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/classdefs.py +0 -0
  71. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/conftest.py +0 -0
  72. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/more_classdefs.py +0 -0
  73. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/queuedworkflow.py +0 -0
  74. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_async.py +0 -0
  75. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_classdecorators.py +0 -0
  76. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_concurrency.py +0 -0
  77. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_config.py +0 -0
  78. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_croniter.py +0 -0
  79. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_dbos.py +0 -0
  80. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_failures.py +0 -0
  81. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_fastapi_roles.py +0 -0
  82. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_kafka.py +0 -0
  83. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_outcome.py +0 -0
  84. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_queue.py +0 -0
  85. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_scheduler.py +0 -0
  86. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_schema_migration.py +0 -0
  87. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_singleton.py +0 -0
  88. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_spans.py +0 -0
  89. {dbos-0.19.0a9 → dbos-0.20.0a3}/tests/test_workflow_cmds.py +0 -0
  90. {dbos-0.19.0a9 → dbos-0.20.0a3}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.19.0a9
3
+ Version: 0.20.0a3
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
+ import re
4
5
  import threading
5
6
  from functools import partial
6
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
@@ -15,6 +16,9 @@ if TYPE_CHECKING:
15
16
  _health_check_path = "/dbos-healthz"
16
17
  _workflow_recovery_path = "/dbos-workflow-recovery"
17
18
  _deactivate_path = "/deactivate"
19
+ # /workflows/:workflow_id/cancel
20
+ # /workflows/:workflow_id/resume
21
+ # /workflows/:workflow_id/restart
18
22
 
19
23
 
20
24
  class AdminServer:
@@ -79,12 +83,51 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
79
83
  self._end_headers()
80
84
  self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
81
85
  else:
82
- self.send_response(404)
83
- self._end_headers()
86
+
87
+ restart_match = re.match(
88
+ r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
89
+ )
90
+ resume_match = re.match(
91
+ r"^/workflows/(?P<workflow_id>[^/]+)/resume$", self.path
92
+ )
93
+ cancel_match = re.match(
94
+ r"^/workflows/(?P<workflow_id>[^/]+)/cancel$", self.path
95
+ )
96
+
97
+ if restart_match:
98
+ workflow_id = restart_match.group("workflow_id")
99
+ self._handle_restart(workflow_id)
100
+ elif resume_match:
101
+ workflow_id = resume_match.group("workflow_id")
102
+ self._handle_resume(workflow_id)
103
+ elif cancel_match:
104
+ workflow_id = cancel_match.group("workflow_id")
105
+ self._handle_cancel(workflow_id)
106
+ else:
107
+ self.send_response(404)
108
+ self._end_headers()
84
109
 
85
110
  def log_message(self, format: str, *args: Any) -> None:
86
111
  return # Disable admin server request logging
87
112
 
113
+ def _handle_restart(self, workflow_id: str) -> None:
114
+ self.dbos.restart_workflow(workflow_id)
115
+ print("Restarting workflow", workflow_id)
116
+ self.send_response(204)
117
+ self._end_headers()
118
+
119
+ def _handle_resume(self, workflow_id: str) -> None:
120
+ print("Resuming workflow", workflow_id)
121
+ self.dbos.resume_workflow(workflow_id)
122
+ self.send_response(204)
123
+ self._end_headers()
124
+
125
+ def _handle_cancel(self, workflow_id: str) -> None:
126
+ print("Cancelling workflow", workflow_id)
127
+ self.dbos.cancel_workflow(workflow_id)
128
+ self.send_response(204)
129
+ self._end_headers()
130
+
88
131
 
89
132
  # Be consistent with DBOS-TS response.
90
133
  class PerfUtilization(TypedDict):
@@ -266,7 +266,9 @@ def _execute_workflow_wthread(
266
266
  raise
267
267
 
268
268
 
269
- def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
269
+ def execute_workflow_by_id(
270
+ dbos: "DBOS", workflow_id: str, startNew: bool = False
271
+ ) -> "WorkflowHandle[Any]":
270
272
  status = dbos._sys_db.get_workflow_status(workflow_id)
271
273
  if not status:
272
274
  raise DBOSRecoveryError(workflow_id, "Workflow status not found")
@@ -293,7 +295,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
293
295
  workflow_id,
294
296
  f"Cannot execute workflow because instance '{iname}' is not registered",
295
297
  )
296
- with SetWorkflowID(workflow_id):
298
+
299
+ if startNew:
297
300
  return start_workflow(
298
301
  dbos,
299
302
  wf_func,
@@ -303,6 +306,17 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
303
306
  *inputs["args"],
304
307
  **inputs["kwargs"],
305
308
  )
309
+ else:
310
+ with SetWorkflowID(workflow_id):
311
+ return start_workflow(
312
+ dbos,
313
+ wf_func,
314
+ status["queue_name"],
315
+ True,
316
+ dbos._registry.instance_info_map[iname],
317
+ *inputs["args"],
318
+ **inputs["kwargs"],
319
+ )
306
320
  elif status["class_name"] is not None:
307
321
  class_name = status["class_name"]
308
322
  if class_name not in dbos._registry.class_info_map:
@@ -310,7 +324,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
310
324
  workflow_id,
311
325
  f"Cannot execute workflow because class '{class_name}' is not registered",
312
326
  )
313
- with SetWorkflowID(workflow_id):
327
+
328
+ if startNew:
314
329
  return start_workflow(
315
330
  dbos,
316
331
  wf_func,
@@ -320,8 +335,19 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
320
335
  *inputs["args"],
321
336
  **inputs["kwargs"],
322
337
  )
338
+ else:
339
+ with SetWorkflowID(workflow_id):
340
+ return start_workflow(
341
+ dbos,
342
+ wf_func,
343
+ status["queue_name"],
344
+ True,
345
+ dbos._registry.class_info_map[class_name],
346
+ *inputs["args"],
347
+ **inputs["kwargs"],
348
+ )
323
349
  else:
324
- with SetWorkflowID(workflow_id):
350
+ if startNew:
325
351
  return start_workflow(
326
352
  dbos,
327
353
  wf_func,
@@ -330,6 +356,16 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
330
356
  *inputs["args"],
331
357
  **inputs["kwargs"],
332
358
  )
359
+ else:
360
+ with SetWorkflowID(workflow_id):
361
+ return start_workflow(
362
+ dbos,
363
+ wf_func,
364
+ status["queue_name"],
365
+ True,
366
+ *inputs["args"],
367
+ **inputs["kwargs"],
368
+ )
333
369
 
334
370
 
335
371
  @overload
@@ -56,6 +56,7 @@ from ._registrations import (
56
56
  )
57
57
  from ._roles import default_required_roles, required_roles
58
58
  from ._scheduler import ScheduledWorkflow, scheduled
59
+ from ._sys_db import WorkflowStatusString
59
60
  from ._tracer import dbos_tracer
60
61
 
61
62
  if TYPE_CHECKING:
@@ -231,6 +232,7 @@ class DBOS:
231
232
  f"DBOS configured multiple times with conflicting information"
232
233
  )
233
234
  config = _dbos_global_registry.config
235
+
234
236
  _dbos_global_instance = super().__new__(cls)
235
237
  _dbos_global_instance.__init__(fastapi=fastapi, config=config, flask=flask) # type: ignore
236
238
  else:
@@ -767,6 +769,11 @@ class DBOS:
767
769
  """Execute a workflow by ID (for recovery)."""
768
770
  return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
769
771
 
772
+ @classmethod
773
+ def restart_workflow(cls, workflow_id: str) -> None:
774
+ """Execute a workflow by ID (for recovery)."""
775
+ execute_workflow_by_id(_get_dbos_instance(), workflow_id, True)
776
+
770
777
  @classmethod
771
778
  def recover_pending_workflows(
772
779
  cls, executor_ids: List[str] = ["local"]
@@ -774,6 +781,18 @@ class DBOS:
774
781
  """Find all PENDING workflows and execute them."""
775
782
  return recover_pending_workflows(_get_dbos_instance(), executor_ids)
776
783
 
784
+ @classmethod
785
+ def cancel_workflow(cls, workflow_id: str) -> None:
786
+ """Cancel a workflow by ID."""
787
+ _get_dbos_instance()._sys_db.set_workflow_status(
788
+ workflow_id, WorkflowStatusString.CANCELLED, False
789
+ )
790
+
791
+ @classmethod
792
+ def resume_workflow(cls, workflow_id: str) -> None:
793
+ """Resume a workflow by ID."""
794
+ execute_workflow_by_id(_get_dbos_instance(), workflow_id, False)
795
+
777
796
  @classproperty
778
797
  def logger(cls) -> Logger:
779
798
  """Return the DBOS `Logger` for the current context."""
@@ -94,7 +94,11 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
94
94
  with EnterDBOSHandler(attributes):
95
95
  ctx = assert_current_dbos_context()
96
96
  ctx.request = _make_request(request)
97
- workflow_id = request.headers.get("dbos-idempotency-key", "")
98
- with SetWorkflowID(workflow_id):
97
+ workflow_id = request.headers.get("dbos-idempotency-key")
98
+ if workflow_id is not None:
99
+ # Set the workflow ID for the handler
100
+ with SetWorkflowID(workflow_id):
101
+ response = await call_next(request)
102
+ else:
99
103
  response = await call_next(request)
100
104
  return response
@@ -34,8 +34,12 @@ class FlaskMiddleware:
34
34
  with EnterDBOSHandler(attributes):
35
35
  ctx = assert_current_dbos_context()
36
36
  ctx.request = _make_request(request)
37
- workflow_id = request.headers.get("dbos-idempotency-key", "")
38
- with SetWorkflowID(workflow_id):
37
+ workflow_id = request.headers.get("dbos-idempotency-key")
38
+ if workflow_id is not None:
39
+ # Set the workflow ID for the handler
40
+ with SetWorkflowID(workflow_id):
41
+ response = self.app(environ, start_response)
42
+ else:
39
43
  response = self.app(environ, start_response)
40
44
  return response
41
45
 
@@ -1,3 +1,6 @@
1
+ import importlib
2
+ import os
3
+ import sys
1
4
  from typing import Any, List, Optional, cast
2
5
 
3
6
  import typer
@@ -6,6 +9,7 @@ from rich import print
6
9
  from dbos import DBOS
7
10
 
8
11
  from . import _serialization, load_config
12
+ from ._core import execute_workflow_by_id
9
13
  from ._dbos_config import ConfigFile, _is_valid_app_name
10
14
  from ._sys_db import (
11
15
  GetWorkflowsInput,
@@ -123,11 +127,6 @@ def _cancel_workflow(config: ConfigFile, uuid: str) -> None:
123
127
  sys_db.destroy()
124
128
 
125
129
 
126
- def _reattempt_workflow(uuid: str, startNewWorkflow: bool) -> None:
127
- print(f"Reattempt workflow info for {uuid} not implemented")
128
- return
129
-
130
-
131
130
  def _get_workflow_info(
132
131
  sys_db: SystemDatabase, workflowUUID: str, getRequest: bool
133
132
  ) -> Optional[WorkflowInformation]:
@@ -0,0 +1,107 @@
1
+ import os
2
+ from base64 import b64decode
3
+ from typing import List, TypedDict
4
+
5
+ import requests
6
+
7
+ DEMO_REPO_API = "https://api.github.com/repos/dbos-inc/dbos-demo-apps"
8
+ PY_DEMO_PATH = "python/"
9
+ BRANCH = "main"
10
+
11
+
12
+ class GitHubTreeItem(TypedDict):
13
+ path: str
14
+ mode: str
15
+ type: str
16
+ sha: str
17
+ url: str
18
+ size: int
19
+
20
+
21
+ class GitHubTree(TypedDict):
22
+ sha: str
23
+ url: str
24
+ tree: List[GitHubTreeItem]
25
+ truncated: bool
26
+
27
+
28
+ class GitHubItem(TypedDict):
29
+ sha: str
30
+ node_id: str
31
+ url: str
32
+ content: str
33
+ encoding: str
34
+ size: int
35
+
36
+
37
+ def _fetch_github(url: str) -> requests.Response:
38
+ headers = {}
39
+ github_token = os.getenv("GITHUB_TOKEN")
40
+ if github_token:
41
+ headers["Authorization"] = f"Bearer {github_token}"
42
+
43
+ response = requests.get(url, headers=headers)
44
+
45
+ if not response.ok:
46
+ if response.headers.get("x-ratelimit-remaining") == "0":
47
+ raise Exception(
48
+ "Error fetching from GitHub API: rate limit exceeded.\n"
49
+ "Please wait a few minutes and try again.\n"
50
+ "To increase the limit, you can create a personal access token and set it in the GITHUB_TOKEN environment variable.\n"
51
+ "Details: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"
52
+ )
53
+ elif response.status_code == 401:
54
+ raise Exception(
55
+ f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}.\n"
56
+ "Please ensure your GITHUB_TOKEN environment variable is set to a valid personal access token."
57
+ )
58
+ raise Exception(
59
+ f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}"
60
+ )
61
+
62
+ return response
63
+
64
+
65
+ def _fetch_github_tree(tag: str) -> List[GitHubTreeItem]:
66
+ response = _fetch_github(f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1")
67
+ tree_data: GitHubTree = response.json()
68
+ return tree_data["tree"]
69
+
70
+
71
+ def _fetch_github_item(url: str) -> str:
72
+ response = _fetch_github(url)
73
+ item: GitHubItem = response.json()
74
+ return b64decode(item["content"]).decode("utf-8")
75
+
76
+
77
+ def create_template_from_github(app_name: str, template_name: str) -> None:
78
+ print(
79
+ f"Creating a new application named {app_name} from the template {template_name}"
80
+ )
81
+
82
+ tree = _fetch_github_tree(BRANCH)
83
+ template_path = f"{PY_DEMO_PATH}{template_name}/"
84
+
85
+ files_to_download = [
86
+ item
87
+ for item in tree
88
+ if item["path"].startswith(template_path) and item["type"] == "blob"
89
+ ]
90
+
91
+ # Download every file from the template
92
+ for item in files_to_download:
93
+ raw_content = _fetch_github_item(item["url"])
94
+ file_path = item["path"].replace(template_path, "")
95
+ target_path = os.path.join(".", file_path)
96
+
97
+ # Create directory if it doesn't exist
98
+ os.makedirs(os.path.dirname(target_path), exist_ok=True)
99
+
100
+ # Write file with proper permissions
101
+ with open(target_path, "w", encoding="utf-8") as f:
102
+ f.write(raw_content)
103
+ os.chmod(target_path, int(item["mode"], 8))
104
+
105
+ print(
106
+ f"Downloaded {len(files_to_download)} files from the template GitHub repository"
107
+ )
@@ -0,0 +1,98 @@
1
+ import os
2
+ import shutil
3
+ import typing
4
+ from os import path
5
+ from typing import Any
6
+
7
+ import tomlkit
8
+ from rich import print
9
+
10
+
11
+ def get_templates_directory() -> str:
12
+ import dbos
13
+
14
+ package_dir = path.abspath(path.dirname(dbos.__file__))
15
+ return path.join(package_dir, "_templates")
16
+
17
+
18
+ def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
19
+ with open(src, "r") as f:
20
+ content = f.read()
21
+
22
+ for key, value in ctx.items():
23
+ content = content.replace(f"${{{key}}}", value)
24
+
25
+ with open(dst, "w") as f:
26
+ f.write(content)
27
+
28
+
29
+ def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
30
+
31
+ for root, dirs, files in os.walk(src_dir, topdown=True):
32
+ dirs[:] = [d for d in dirs if d != "__package"]
33
+
34
+ dst_root = path.join(dst_dir, path.relpath(root, src_dir))
35
+ if len(dirs) == 0:
36
+ os.makedirs(dst_root, exist_ok=True)
37
+ else:
38
+ for dir in dirs:
39
+ os.makedirs(path.join(dst_root, dir), exist_ok=True)
40
+
41
+ for file in files:
42
+ src = path.join(root, file)
43
+ base, ext = path.splitext(file)
44
+
45
+ dst = path.join(dst_root, base if ext == ".dbos" else file)
46
+ if path.exists(dst):
47
+ print(f"[yellow]File {dst} already exists, skipping[/yellow]")
48
+ continue
49
+
50
+ if ext == ".dbos":
51
+ _copy_dbos_template(src, dst, ctx)
52
+ else:
53
+ shutil.copy(src, dst)
54
+
55
+
56
+ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
57
+
58
+ dst_dir = path.abspath(".")
59
+
60
+ package_name = project_name.replace("-", "_")
61
+ ctx = {
62
+ "project_name": project_name,
63
+ "package_name": package_name,
64
+ "migration_command": "alembic upgrade head",
65
+ }
66
+
67
+ if config_mode:
68
+ ctx["package_name"] = "."
69
+ ctx["migration_command"] = "echo 'No migrations specified'"
70
+ _copy_dbos_template(
71
+ os.path.join(src_dir, "dbos-config.yaml.dbos"),
72
+ os.path.join(dst_dir, "dbos-config.yaml"),
73
+ ctx,
74
+ )
75
+ else:
76
+ _copy_template_dir(src_dir, dst_dir, ctx)
77
+ _copy_template_dir(
78
+ path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
79
+ )
80
+
81
+
82
+ def get_project_name() -> typing.Union[str, None]:
83
+ name = None
84
+ try:
85
+ with open("pyproject.toml", "rb") as file:
86
+ pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
87
+ name = typing.cast(str, pyproj["project"]["name"])
88
+ except:
89
+ pass
90
+
91
+ if name == None:
92
+ try:
93
+ _, parent = path.split(path.abspath("."))
94
+ name = parent
95
+ except:
96
+ pass
97
+
98
+ return name