dbos 2.2.0a3__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_client.py +12 -2
- dbos/_core.py +0 -21
- dbos/_dbos_config.py +1 -2
- dbos/_kafka.py +6 -4
- dbos/_logger.py +23 -16
- dbos/_scheduler.py +5 -2
- dbos/_serialization.py +7 -3
- dbos/_sys_db_postgres.py +1 -1
- dbos/_tracer.py +24 -19
- dbos/cli/cli.py +1 -15
- {dbos-2.2.0a3.dist-info → dbos-2.3.0.dist-info}/METADATA +1 -1
- {dbos-2.2.0a3.dist-info → dbos-2.3.0.dist-info}/RECORD +15 -15
- {dbos-2.2.0a3.dist-info → dbos-2.3.0.dist-info}/WHEEL +0 -0
- {dbos-2.2.0a3.dist-info → dbos-2.3.0.dist-info}/entry_points.txt +0 -0
- {dbos-2.2.0a3.dist-info → dbos-2.3.0.dist-info}/licenses/LICENSE +0 -0
dbos/_client.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import json
|
|
2
3
|
import time
|
|
3
4
|
import uuid
|
|
4
5
|
from typing import (
|
|
@@ -63,6 +64,8 @@ class EnqueueOptions(_EnqueueOptionsRequired, total=False):
|
|
|
63
64
|
priority: int
|
|
64
65
|
max_recovery_attempts: int
|
|
65
66
|
queue_partition_key: str
|
|
67
|
+
authenticated_user: str
|
|
68
|
+
authenticated_roles: list[str]
|
|
66
69
|
|
|
67
70
|
|
|
68
71
|
def validate_enqueue_options(options: EnqueueOptions) -> None:
|
|
@@ -189,6 +192,13 @@ class DBOSClient:
|
|
|
189
192
|
"queue_partition_key": options.get("queue_partition_key"),
|
|
190
193
|
}
|
|
191
194
|
|
|
195
|
+
authenticated_user = options.get("authenticated_user")
|
|
196
|
+
authenticated_roles = (
|
|
197
|
+
json.dumps(options.get("authenticated_roles"))
|
|
198
|
+
if options.get("authenticated_roles")
|
|
199
|
+
else None
|
|
200
|
+
)
|
|
201
|
+
|
|
192
202
|
inputs: WorkflowInputs = {
|
|
193
203
|
"args": args,
|
|
194
204
|
"kwargs": kwargs,
|
|
@@ -202,9 +212,9 @@ class DBOSClient:
|
|
|
202
212
|
"queue_name": queue_name,
|
|
203
213
|
"app_version": enqueue_options_internal["app_version"],
|
|
204
214
|
"config_name": None,
|
|
205
|
-
"authenticated_user":
|
|
215
|
+
"authenticated_user": authenticated_user,
|
|
206
216
|
"assumed_role": None,
|
|
207
|
-
"authenticated_roles":
|
|
217
|
+
"authenticated_roles": authenticated_roles,
|
|
208
218
|
"output": None,
|
|
209
219
|
"error": None,
|
|
210
220
|
"created_at": None,
|
dbos/_core.py
CHANGED
|
@@ -93,14 +93,6 @@ TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
|
|
|
93
93
|
DEBOUNCER_WORKFLOW_NAME = "_dbos_debouncer_workflow"
|
|
94
94
|
|
|
95
95
|
|
|
96
|
-
def check_is_in_coroutine() -> bool:
|
|
97
|
-
try:
|
|
98
|
-
asyncio.get_running_loop()
|
|
99
|
-
return True
|
|
100
|
-
except RuntimeError:
|
|
101
|
-
return False
|
|
102
|
-
|
|
103
|
-
|
|
104
96
|
class WorkflowHandleFuture(Generic[R]):
|
|
105
97
|
|
|
106
98
|
def __init__(self, workflow_id: str, future: Future[R], dbos: "DBOS"):
|
|
@@ -856,11 +848,6 @@ def workflow_wrapper(
|
|
|
856
848
|
dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
|
|
857
849
|
return r
|
|
858
850
|
|
|
859
|
-
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
|
860
|
-
dbos_logger.warning(
|
|
861
|
-
f"Sync workflow ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
|
862
|
-
)
|
|
863
|
-
|
|
864
851
|
outcome = (
|
|
865
852
|
wfOutcome.wrap(init_wf, dbos=dbos)
|
|
866
853
|
.also(DBOSAssumeRole(rr))
|
|
@@ -1046,10 +1033,6 @@ def decorate_transaction(
|
|
|
1046
1033
|
assert (
|
|
1047
1034
|
ctx.is_workflow()
|
|
1048
1035
|
), "Transactions must be called from within workflows"
|
|
1049
|
-
if check_is_in_coroutine():
|
|
1050
|
-
dbos_logger.warning(
|
|
1051
|
-
f"Transaction function ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Use asyncio.to_thread instead."
|
|
1052
|
-
)
|
|
1053
1036
|
with DBOSAssumeRole(rr):
|
|
1054
1037
|
return invoke_tx(*args, **kwargs)
|
|
1055
1038
|
else:
|
|
@@ -1194,10 +1177,6 @@ def decorate_step(
|
|
|
1194
1177
|
|
|
1195
1178
|
@wraps(func)
|
|
1196
1179
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1197
|
-
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
|
1198
|
-
dbos_logger.warning(
|
|
1199
|
-
f"Sync step ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
|
1200
|
-
)
|
|
1201
1180
|
# If the step is called from a workflow, run it as a step.
|
|
1202
1181
|
# Otherwise, run it as a normal function.
|
|
1203
1182
|
ctx = get_local_dbos_context()
|
dbos/_dbos_config.py
CHANGED
|
@@ -444,6 +444,7 @@ def configure_db_engine_parameters(
|
|
|
444
444
|
|
|
445
445
|
# Configure user database engine parameters
|
|
446
446
|
app_engine_kwargs: dict[str, Any] = {
|
|
447
|
+
"connect_args": {"application_name": "dbos_transact"},
|
|
447
448
|
"pool_timeout": 30,
|
|
448
449
|
"max_overflow": 0,
|
|
449
450
|
"pool_size": 20,
|
|
@@ -477,8 +478,6 @@ def is_valid_database_url(database_url: str) -> bool:
|
|
|
477
478
|
return True
|
|
478
479
|
url = make_url(database_url)
|
|
479
480
|
required_fields = [
|
|
480
|
-
("username", "Username must be specified in the connection URL"),
|
|
481
|
-
("host", "Host must be specified in the connection URL"),
|
|
482
481
|
("database", "Database name must be specified in the connection URL"),
|
|
483
482
|
]
|
|
484
483
|
for field_name, error_message in required_fields:
|
dbos/_kafka.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import re
|
|
2
2
|
import threading
|
|
3
|
-
from typing import TYPE_CHECKING, Any, Callable, NoReturn
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Callable, Coroutine, NoReturn
|
|
4
4
|
|
|
5
5
|
from confluent_kafka import Consumer, KafkaError, KafkaException
|
|
6
6
|
|
|
@@ -15,7 +15,9 @@ from ._kafka_message import KafkaMessage
|
|
|
15
15
|
from ._logger import dbos_logger
|
|
16
16
|
from ._registrations import get_dbos_func_name
|
|
17
17
|
|
|
18
|
-
_KafkaConsumerWorkflow =
|
|
18
|
+
_KafkaConsumerWorkflow = (
|
|
19
|
+
Callable[[KafkaMessage], None] | Callable[[KafkaMessage], Coroutine[Any, Any, None]]
|
|
20
|
+
)
|
|
19
21
|
|
|
20
22
|
_kafka_queue: Queue
|
|
21
23
|
_in_order_kafka_queues: dict[str, Queue] = {}
|
|
@@ -37,8 +39,8 @@ def _kafka_consumer_loop(
|
|
|
37
39
|
in_order: bool,
|
|
38
40
|
) -> None:
|
|
39
41
|
|
|
40
|
-
def on_error(err: KafkaError) ->
|
|
41
|
-
|
|
42
|
+
def on_error(err: KafkaError) -> None:
|
|
43
|
+
dbos_logger.error(f"Exception in Kafka consumer: {err}")
|
|
42
44
|
|
|
43
45
|
config["error_cb"] = on_error
|
|
44
46
|
if "auto.offset.reset" not in config:
|
dbos/_logger.py
CHANGED
|
@@ -68,30 +68,37 @@ def config_logger(config: "ConfigFile") -> None:
|
|
|
68
68
|
)
|
|
69
69
|
disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
|
|
70
70
|
|
|
71
|
-
if not disable_otlp
|
|
71
|
+
if not disable_otlp:
|
|
72
72
|
|
|
73
|
-
from opentelemetry._logs import set_logger_provider
|
|
73
|
+
from opentelemetry._logs import get_logger_provider, set_logger_provider
|
|
74
74
|
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
|
75
75
|
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
|
76
76
|
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
|
77
77
|
from opentelemetry.sdk.resources import Resource
|
|
78
78
|
from opentelemetry.semconv.attributes.service_attributes import SERVICE_NAME
|
|
79
79
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
80
|
+
# Only set up OTLP provider and exporter if endpoints are provided
|
|
81
|
+
log_provider = get_logger_provider()
|
|
82
|
+
if otlp_logs_endpoints is not None:
|
|
83
|
+
if not isinstance(log_provider, LoggerProvider):
|
|
84
|
+
log_provider = LoggerProvider(
|
|
85
|
+
Resource.create(
|
|
86
|
+
attributes={
|
|
87
|
+
SERVICE_NAME: config["name"],
|
|
88
|
+
}
|
|
89
|
+
)
|
|
90
|
+
)
|
|
91
|
+
set_logger_provider(log_provider)
|
|
92
|
+
|
|
93
|
+
for e in otlp_logs_endpoints:
|
|
94
|
+
log_provider.add_log_record_processor(
|
|
95
|
+
BatchLogRecordProcessor(
|
|
96
|
+
OTLPLogExporter(endpoint=e),
|
|
97
|
+
export_timeout_millis=5000,
|
|
98
|
+
)
|
|
93
99
|
)
|
|
94
|
-
|
|
100
|
+
|
|
101
|
+
# Even if no endpoints are provided, we still need a LoggerProvider to create the LoggingHandler
|
|
95
102
|
global _otlp_handler
|
|
96
103
|
_otlp_handler = LoggingHandler(logger_provider=log_provider)
|
|
97
104
|
|
dbos/_scheduler.py
CHANGED
|
@@ -2,7 +2,7 @@ import random
|
|
|
2
2
|
import threading
|
|
3
3
|
import traceback
|
|
4
4
|
from datetime import datetime, timezone
|
|
5
|
-
from typing import TYPE_CHECKING, Callable
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Callable, Coroutine
|
|
6
6
|
|
|
7
7
|
from ._logger import dbos_logger
|
|
8
8
|
from ._queue import Queue
|
|
@@ -14,7 +14,10 @@ from ._context import SetWorkflowID
|
|
|
14
14
|
from ._croniter import croniter # type: ignore
|
|
15
15
|
from ._registrations import get_dbos_func_name
|
|
16
16
|
|
|
17
|
-
ScheduledWorkflow =
|
|
17
|
+
ScheduledWorkflow = (
|
|
18
|
+
Callable[[datetime, datetime], None]
|
|
19
|
+
| Callable[[datetime, datetime], Coroutine[Any, Any, None]]
|
|
20
|
+
)
|
|
18
21
|
|
|
19
22
|
|
|
20
23
|
def scheduler_loop(
|
dbos/_serialization.py
CHANGED
|
@@ -25,9 +25,13 @@ class Serializer(ABC):
|
|
|
25
25
|
class DefaultSerializer(Serializer):
|
|
26
26
|
|
|
27
27
|
def serialize(self, data: Any) -> str:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
28
|
+
try:
|
|
29
|
+
pickled_data: bytes = pickle.dumps(data)
|
|
30
|
+
encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
|
|
31
|
+
return encoded_data
|
|
32
|
+
except Exception as e:
|
|
33
|
+
dbos_logger.error(f"Error serializing object: {data}", exc_info=e)
|
|
34
|
+
raise
|
|
31
35
|
|
|
32
36
|
def deserialize(cls, serialized_data: str) -> Any:
|
|
33
37
|
pickled_data: bytes = base64.b64decode(serialized_data)
|
dbos/_sys_db_postgres.py
CHANGED
|
@@ -41,7 +41,7 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
41
41
|
parameters={"db_name": sysdb_name},
|
|
42
42
|
).scalar():
|
|
43
43
|
dbos_logger.info(f"Creating system database {sysdb_name}")
|
|
44
|
-
conn.execute(sa.text(f
|
|
44
|
+
conn.execute(sa.text(f'CREATE DATABASE "{sysdb_name}"'))
|
|
45
45
|
engine.dispose()
|
|
46
46
|
else:
|
|
47
47
|
# If we were provided an engine, validate it can connect
|
dbos/_tracer.py
CHANGED
|
@@ -25,6 +25,10 @@ class DBOSTracer:
|
|
|
25
25
|
def config(self, config: ConfigFile) -> None:
|
|
26
26
|
self.otlp_attributes = config.get("telemetry", {}).get("otlp_attributes", {}) # type: ignore
|
|
27
27
|
self.disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
|
|
28
|
+
otlp_traces_endpoints = (
|
|
29
|
+
config.get("telemetry", {}).get("OTLPExporter", {}).get("tracesEndpoint") # type: ignore
|
|
30
|
+
)
|
|
31
|
+
|
|
28
32
|
if not self.disable_otlp:
|
|
29
33
|
from opentelemetry import trace
|
|
30
34
|
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
|
@@ -38,25 +42,26 @@ class DBOSTracer:
|
|
|
38
42
|
)
|
|
39
43
|
from opentelemetry.semconv.attributes.service_attributes import SERVICE_NAME
|
|
40
44
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
45
|
+
tracer_provider = trace.get_tracer_provider()
|
|
46
|
+
|
|
47
|
+
# Only set up OTLP provider and exporter if endpoints are provided
|
|
48
|
+
if otlp_traces_endpoints is not None:
|
|
49
|
+
if not isinstance(tracer_provider, TracerProvider):
|
|
50
|
+
resource = Resource(
|
|
51
|
+
attributes={
|
|
52
|
+
SERVICE_NAME: config["name"],
|
|
53
|
+
}
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
tracer_provider = TracerProvider(resource=resource)
|
|
57
|
+
if os.environ.get("DBOS__CONSOLE_TRACES", None) is not None:
|
|
58
|
+
processor = BatchSpanProcessor(ConsoleSpanExporter())
|
|
59
|
+
tracer_provider.add_span_processor(processor)
|
|
60
|
+
trace.set_tracer_provider(tracer_provider)
|
|
61
|
+
|
|
62
|
+
for e in otlp_traces_endpoints:
|
|
63
|
+
processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=e))
|
|
64
|
+
tracer_provider.add_span_processor(processor)
|
|
60
65
|
|
|
61
66
|
def set_provider(self, provider: "Optional[TracerProvider]") -> None:
|
|
62
67
|
self.provider = provider
|
dbos/cli/cli.py
CHANGED
|
@@ -140,26 +140,12 @@ def start() -> None:
|
|
|
140
140
|
Forward kill signals to children.
|
|
141
141
|
|
|
142
142
|
When we receive a signal, send it to the entire process group of the child.
|
|
143
|
-
If that doesn't work, SIGKILL them then exit.
|
|
144
143
|
"""
|
|
145
144
|
# Send the signal to the child's entire process group
|
|
146
145
|
if process.poll() is None:
|
|
147
146
|
os.killpg(os.getpgid(process.pid), signum)
|
|
148
147
|
|
|
149
|
-
#
|
|
150
|
-
for _ in range(10): # Wait up to 1 second
|
|
151
|
-
if process.poll() is not None:
|
|
152
|
-
break
|
|
153
|
-
time.sleep(0.1)
|
|
154
|
-
|
|
155
|
-
# If the child is still running, force kill it
|
|
156
|
-
if process.poll() is None:
|
|
157
|
-
try:
|
|
158
|
-
os.killpg(os.getpgid(process.pid), signal.SIGKILL)
|
|
159
|
-
except Exception:
|
|
160
|
-
pass
|
|
161
|
-
|
|
162
|
-
# Exit immediately
|
|
148
|
+
# Exit
|
|
163
149
|
os._exit(process.returncode if process.returncode is not None else 1)
|
|
164
150
|
|
|
165
151
|
# Configure the single handler only on Unix-like systems.
|
|
@@ -1,20 +1,20 @@
|
|
|
1
|
-
dbos-2.
|
|
2
|
-
dbos-2.
|
|
3
|
-
dbos-2.
|
|
4
|
-
dbos-2.
|
|
1
|
+
dbos-2.3.0.dist-info/METADATA,sha256=k_8xl4krhDI4OSnc_iNZknaDutWwlIbHVoDa8MKuEEE,14530
|
|
2
|
+
dbos-2.3.0.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
|
|
3
|
+
dbos-2.3.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-2.3.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=M7FdFSBGhcvaLIXrNw_0eR68ijwMWV7_UEyimHMP_F4,1039
|
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
|
7
7
|
dbos/_admin_server.py,sha256=hubQJw5T8zGKCPNS6FQTXy8jQ8GTJxoYQaDTMlICl9k,16267
|
|
8
8
|
dbos/_app_db.py,sha256=mvWQ66ebdbiD9fpGKHZBWNVEza6Ulo1D-3UoTB_LwRc,16378
|
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
|
-
dbos/_client.py,sha256=
|
|
10
|
+
dbos/_client.py,sha256=0VR9oWBn0i-34jNWHqkgeImKdg5aBefMWu2jaqRLH8Q,19658
|
|
11
11
|
dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
|
|
12
12
|
dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
|
|
13
13
|
dbos/_context.py,sha256=XKllmsDR_oMcWOuZnoe1X4yv2JeOi_vsAuyWC-mWs_o,28164
|
|
14
|
-
dbos/_core.py,sha256=
|
|
14
|
+
dbos/_core.py,sha256=e-pKDbrvpN6BzcfyIZx4Nsb8wnMiGxLNzdpgtlRI-0I,50096
|
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
16
16
|
dbos/_dbos.py,sha256=dr32Z_NT36JkUxWGyYVX7xkl3bYJmgsxVMOX8H9_mpM,59394
|
|
17
|
-
dbos/_dbos_config.py,sha256=
|
|
17
|
+
dbos/_dbos_config.py,sha256=mfajyeyeV1ZHaAg2GU3dxwvp_19wZtY2prNdVrXgPb8,24846
|
|
18
18
|
dbos/_debouncer.py,sha256=qNjIVmWqTPp64M2cEbLnpgGmlKVdCaAKysD1BPJgWh4,15297
|
|
19
19
|
dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
|
|
20
20
|
dbos/_docker_pg_helper.py,sha256=xySum4hTA8TVMBODoG19u4cXQAB1vCock-jwM2pnmSI,7791
|
|
@@ -22,22 +22,22 @@ dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
|
|
|
22
22
|
dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
|
|
23
23
|
dbos/_fastapi.py,sha256=toYYfbe2aui2aHw0021PoXi2dKlI6NzO3M3pHB0dHOk,3421
|
|
24
24
|
dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
|
|
25
|
-
dbos/_kafka.py,sha256=
|
|
25
|
+
dbos/_kafka.py,sha256=cA3hXyT-FR4LQZnaBMVLTZn7oko76rcTUC_kOo6aSis,4352
|
|
26
26
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
|
27
|
-
dbos/_logger.py,sha256=
|
|
27
|
+
dbos/_logger.py,sha256=ByGkkGwEWaqE9z6E2VNDFOgu_z4LNe7_SxsVgAXzoT0,5081
|
|
28
28
|
dbos/_migration.py,sha256=Fvc3m4dC4oDpjPMHX-tUZVnXklVB9OMMojSLuVyV9ak,10312
|
|
29
29
|
dbos/_outcome.py,sha256=7HvosMfEHTh1U5P6xok7kFTGLwa2lPaul0YApb3UnN4,8191
|
|
30
30
|
dbos/_queue.py,sha256=GmqZHl9smES1KSmpauhSdsnZFJHDyfvRArmC-jBibhw,6228
|
|
31
31
|
dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
|
|
32
32
|
dbos/_registrations.py,sha256=bEOntObnWaBylnebr5ZpcX2hk7OVLDd1z4BvW4_y3zA,7380
|
|
33
33
|
dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
|
|
34
|
-
dbos/_scheduler.py,sha256=
|
|
34
|
+
dbos/_scheduler.py,sha256=PLiCSUujlfEfojTnHwzY-P_AEOVEx7bvWvU5BuMgLPY,2708
|
|
35
35
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
|
37
37
|
dbos/_schemas/system_database.py,sha256=mNsBV0ttlqJArvOqGPY60WvtuiWrHCpYnVxtvMfe2LI,5544
|
|
38
|
-
dbos/_serialization.py,sha256=
|
|
38
|
+
dbos/_serialization.py,sha256=ZGrkN5UclSLOqMVZgYpT72pw1l888ZXRoYuu3pIg3PA,2957
|
|
39
39
|
dbos/_sys_db.py,sha256=FDboSk58CyQCAFjOF_KMLnRtIw05OL3IpJHT1qwKEKo,87596
|
|
40
|
-
dbos/_sys_db_postgres.py,sha256=
|
|
40
|
+
dbos/_sys_db_postgres.py,sha256=_3m3hF6Pc23iZfUlIFYtDuC1Tw6KsjYqnDQE0HZpjt4,6965
|
|
41
41
|
dbos/_sys_db_sqlite.py,sha256=ifjKdy-Z9vlVIBf5L6XnSaNjiBdvqPE73asVHim4A5Q,6998
|
|
42
42
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
43
43
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -46,14 +46,14 @@ dbos/_templates/dbos-db-starter/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIRE
|
|
|
46
46
|
dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=0wPktElM7kMB3OPHTXw4xBk9bgGKMqOHrrr7x_R23Z8,446
|
|
47
47
|
dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos,sha256=pVm2Q0AsxS8pg85llbrXFD6jMccMqGjhGRjTEvS-hXk,942
|
|
48
48
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
49
|
-
dbos/_tracer.py,sha256=
|
|
49
|
+
dbos/_tracer.py,sha256=jTlTkb5vUr_Ai5W9JIJf6FpYjAL0IWL52EWM_HXsi54,3958
|
|
50
50
|
dbos/_utils.py,sha256=ZdoM1MDbHnlJrh31zfhp3iX62bAxK1kyvMwXnltC_84,1779
|
|
51
51
|
dbos/_workflow_commands.py,sha256=k-i1bCfNrux43BHLT8wQ-l-MVZX3D6LGZLH7-uuiDRo,4951
|
|
52
52
|
dbos/cli/_github_init.py,sha256=R_94Fnn40CAmPy-zM00lwHi0ndyfv57TmIooADjmag4,3378
|
|
53
53
|
dbos/cli/_template_init.py,sha256=AltKk256VocgvxLpuTxpjJyACrdHFjbGoqYhHzeLae4,2649
|
|
54
|
-
dbos/cli/cli.py,sha256=
|
|
54
|
+
dbos/cli/cli.py,sha256=hPZJmrQZWn8mcXou7DHaHl8luSEQTEWaYlnIsLw8WY4,27150
|
|
55
55
|
dbos/cli/migration.py,sha256=I0_0ngWTuCPQf6Symbpd0lizaxWUKe3uTYEmuCmsrdU,3775
|
|
56
56
|
dbos/dbos-config.schema.json,sha256=47wofTZ5jlFynec7bG0L369tAXbRQQ2euBxBXvg4m9c,1730
|
|
57
57
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
58
58
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
59
|
-
dbos-2.
|
|
59
|
+
dbos-2.3.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|