durabletask 0.1.1a1__py3-none-any.whl → 0.2b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of durabletask might be problematic. Click here for more details.

@@ -5,39 +5,67 @@ import dataclasses
5
5
  import json
6
6
  import logging
7
7
  from types import SimpleNamespace
8
- from typing import Any, Dict, List, Tuple, Union
8
+ from typing import Any, Optional, Sequence, Union
9
9
 
10
10
  import grpc
11
11
 
12
- from durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl
12
+ ClientInterceptor = Union[
13
+ grpc.UnaryUnaryClientInterceptor,
14
+ grpc.UnaryStreamClientInterceptor,
15
+ grpc.StreamUnaryClientInterceptor,
16
+ grpc.StreamStreamClientInterceptor
17
+ ]
13
18
 
14
19
  # Field name used to indicate that an object was automatically serialized
15
20
  # and should be deserialized as a SimpleNamespace
16
21
  AUTO_SERIALIZED = "__durabletask_autoobject__"
17
22
 
23
+ SECURE_PROTOCOLS = ["https://", "grpcs://"]
24
+ INSECURE_PROTOCOLS = ["http://", "grpc://"]
25
+
18
26
 
19
27
  def get_default_host_address() -> str:
20
28
  return "localhost:4001"
21
29
 
22
30
 
23
- def get_grpc_channel(host_address: Union[str, None], metadata: Union[List[Tuple[str, str]], None], secure_channel: bool = False) -> grpc.Channel:
31
+ def get_grpc_channel(
32
+ host_address: Optional[str],
33
+ secure_channel: bool = False,
34
+ interceptors: Optional[Sequence[ClientInterceptor]] = None) -> grpc.Channel:
35
+
24
36
  if host_address is None:
25
37
  host_address = get_default_host_address()
26
38
 
39
+ for protocol in SECURE_PROTOCOLS:
40
+ if host_address.lower().startswith(protocol):
41
+ secure_channel = True
42
+ # remove the protocol from the host name
43
+ host_address = host_address[len(protocol):]
44
+ break
45
+
46
+ for protocol in INSECURE_PROTOCOLS:
47
+ if host_address.lower().startswith(protocol):
48
+ secure_channel = False
49
+ # remove the protocol from the host name
50
+ host_address = host_address[len(protocol):]
51
+ break
52
+
53
+ # Create the base channel
27
54
  if secure_channel:
28
55
  channel = grpc.secure_channel(host_address, grpc.ssl_channel_credentials())
29
56
  else:
30
57
  channel = grpc.insecure_channel(host_address)
31
58
 
32
- if metadata is not None and len(metadata) > 0:
33
- interceptors = [DefaultClientInterceptorImpl(metadata)]
59
+ # Apply interceptors ONLY if they exist
60
+ if interceptors:
34
61
  channel = grpc.intercept_channel(channel, *interceptors)
35
62
  return channel
36
63
 
64
+
37
65
  def get_logger(
38
66
  name_suffix: str,
39
- log_handler: Union[logging.Handler, None] = None,
40
- log_formatter: Union[logging.Formatter, None] = None) -> logging.Logger:
67
+ log_handler: Optional[logging.Handler] = None,
68
+ log_formatter: Optional[logging.Formatter] = None) -> logging.Logger:
41
69
  logger = logging.Logger(f"durabletask-{name_suffix}")
42
70
 
43
71
  # Add a default log handler if none is provided
@@ -78,7 +106,7 @@ class InternalJSONEncoder(json.JSONEncoder):
78
106
  if dataclasses.is_dataclass(obj):
79
107
  # Dataclasses are not serializable by default, so we convert them to a dict and mark them for
80
108
  # automatic deserialization by the receiver
81
- d = dataclasses.asdict(obj)
109
+ d = dataclasses.asdict(obj) # type: ignore
82
110
  d[AUTO_SERIALIZED] = True
83
111
  return d
84
112
  elif isinstance(obj, SimpleNamespace):
@@ -94,7 +122,7 @@ class InternalJSONDecoder(json.JSONDecoder):
94
122
  def __init__(self, *args, **kwargs):
95
123
  super().__init__(object_hook=self.dict_to_object, *args, **kwargs)
96
124
 
97
- def dict_to_object(self, d: Dict[str, Any]):
125
+ def dict_to_object(self, d: dict[str, Any]):
98
126
  # If the object was serialized by the InternalJSONEncoder, deserialize it as a SimpleNamespace
99
127
  if d.pop(AUTO_SERIALIZED, False):
100
128
  return SimpleNamespace(**d)
durabletask/task.py CHANGED
@@ -7,8 +7,7 @@ from __future__ import annotations
7
7
  import math
8
8
  from abc import ABC, abstractmethod
9
9
  from datetime import datetime, timedelta
10
- from typing import (Any, Callable, Generator, Generic, List, Optional, TypeVar,
11
- Union)
10
+ from typing import Any, Callable, Generator, Generic, Optional, TypeVar, Union
12
11
 
13
12
  import durabletask.internal.helpers as pbh
14
13
  import durabletask.internal.orchestrator_service_pb2 as pb
@@ -71,6 +70,17 @@ class OrchestrationContext(ABC):
71
70
  """
72
71
  pass
73
72
 
73
+ @abstractmethod
74
+ def set_custom_status(self, custom_status: Any) -> None:
75
+ """Set the orchestration instance's custom status.
76
+
77
+ Parameters
78
+ ----------
79
+ custom_status: Any
80
+ A JSON-serializable custom status value to set.
81
+ """
82
+ pass
83
+
74
84
  @abstractmethod
75
85
  def create_timer(self, fire_at: Union[datetime, timedelta]) -> Task:
76
86
  """Create a Timer Task to fire after at the specified deadline.
@@ -248,9 +258,9 @@ class Task(ABC, Generic[T]):
248
258
 
249
259
  class CompositeTask(Task[T]):
250
260
  """A task that is composed of other tasks."""
251
- _tasks: List[Task]
261
+ _tasks: list[Task]
252
262
 
253
- def __init__(self, tasks: List[Task]):
263
+ def __init__(self, tasks: list[Task]):
254
264
  super().__init__()
255
265
  self._tasks = tasks
256
266
  self._completed_tasks = 0
@@ -260,17 +270,18 @@ class CompositeTask(Task[T]):
260
270
  if task.is_complete:
261
271
  self.on_child_completed(task)
262
272
 
263
- def get_tasks(self) -> List[Task]:
273
+ def get_tasks(self) -> list[Task]:
264
274
  return self._tasks
265
275
 
266
276
  @abstractmethod
267
277
  def on_child_completed(self, task: Task[T]):
268
278
  pass
269
279
 
270
- class WhenAllTask(CompositeTask[List[T]]):
280
+
281
+ class WhenAllTask(CompositeTask[list[T]]):
271
282
  """A task that completes when all of its child tasks complete."""
272
283
 
273
- def __init__(self, tasks: List[Task[T]]):
284
+ def __init__(self, tasks: list[Task[T]]):
274
285
  super().__init__(tasks)
275
286
  self._completed_tasks = 0
276
287
  self._failed_tasks = 0
@@ -323,7 +334,7 @@ class RetryableTask(CompletableTask[T]):
323
334
  """A task that can be retried according to a retry policy."""
324
335
 
325
336
  def __init__(self, retry_policy: RetryPolicy, action: pb.OrchestratorAction,
326
- start_time:datetime, is_sub_orch: bool) -> None:
337
+ start_time: datetime, is_sub_orch: bool) -> None:
327
338
  super().__init__()
328
339
  self._action = action
329
340
  self._retry_policy = retry_policy
@@ -333,15 +344,15 @@ class RetryableTask(CompletableTask[T]):
333
344
 
334
345
  def increment_attempt_count(self) -> None:
335
346
  self._attempt_count += 1
336
-
337
- def compute_next_delay(self) -> Union[timedelta, None]:
347
+
348
+ def compute_next_delay(self) -> Optional[timedelta]:
338
349
  if self._attempt_count >= self._retry_policy.max_number_of_attempts:
339
350
  return None
340
351
 
341
352
  retry_expiration: datetime = datetime.max
342
353
  if self._retry_policy.retry_timeout is not None and self._retry_policy.retry_timeout != datetime.max:
343
354
  retry_expiration = self._start_time + self._retry_policy.retry_timeout
344
-
355
+
345
356
  if self._retry_policy.backoff_coefficient is None:
346
357
  backoff_coefficient = 1.0
347
358
  else:
@@ -369,7 +380,7 @@ class TimerTask(CompletableTask[T]):
369
380
  class WhenAnyTask(CompositeTask[Task]):
370
381
  """A task that completes when any of its child tasks complete."""
371
382
 
372
- def __init__(self, tasks: List[Task]):
383
+ def __init__(self, tasks: list[Task]):
373
384
  super().__init__(tasks)
374
385
 
375
386
  def on_child_completed(self, task: Task):
@@ -379,12 +390,12 @@ class WhenAnyTask(CompositeTask[Task]):
379
390
  self._result = task
380
391
 
381
392
 
382
- def when_all(tasks: List[Task[T]]) -> WhenAllTask[T]:
393
+ def when_all(tasks: list[Task[T]]) -> WhenAllTask[T]:
383
394
  """Returns a task that completes when all of the provided tasks complete or when one of the tasks fail."""
384
395
  return WhenAllTask(tasks)
385
396
 
386
397
 
387
- def when_any(tasks: List[Task]) -> WhenAnyTask:
398
+ def when_any(tasks: list[Task]) -> WhenAnyTask:
388
399
  """Returns a task that completes when any of the provided tasks complete or fail."""
389
400
  return WhenAnyTask(tasks)
390
401
 
durabletask/worker.py CHANGED
@@ -6,8 +6,7 @@ import logging
6
6
  from datetime import datetime, timedelta
7
7
  from threading import Event, Thread
8
8
  from types import GeneratorType
9
- from typing import (Any, Dict, Generator, List, Optional, Sequence, Tuple,
10
- TypeVar, Union)
9
+ from typing import Any, Generator, Optional, Sequence, TypeVar, Union
11
10
 
12
11
  import grpc
13
12
  from google.protobuf import empty_pb2
@@ -18,6 +17,7 @@ import durabletask.internal.orchestrator_service_pb2 as pb
18
17
  import durabletask.internal.orchestrator_service_pb2_grpc as stubs
19
18
  import durabletask.internal.shared as shared
20
19
  from durabletask import task
20
+ from durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl
21
21
 
22
22
  TInput = TypeVar('TInput')
23
23
  TOutput = TypeVar('TOutput')
@@ -25,8 +25,8 @@ TOutput = TypeVar('TOutput')
25
25
 
26
26
  class _Registry:
27
27
 
28
- orchestrators: Dict[str, task.Orchestrator]
29
- activities: Dict[str, task.Activity]
28
+ orchestrators: dict[str, task.Orchestrator]
29
+ activities: dict[str, task.Activity]
30
30
 
31
31
  def __init__(self):
32
32
  self.orchestrators = {}
@@ -83,21 +83,32 @@ class ActivityNotRegisteredError(ValueError):
83
83
 
84
84
  class TaskHubGrpcWorker:
85
85
  _response_stream: Optional[grpc.Future] = None
86
+ _interceptors: Optional[list[shared.ClientInterceptor]] = None
86
87
 
87
88
  def __init__(self, *,
88
89
  host_address: Optional[str] = None,
89
- metadata: Optional[List[Tuple[str, str]]] = None,
90
+ metadata: Optional[list[tuple[str, str]]] = None,
90
91
  log_handler=None,
91
92
  log_formatter: Optional[logging.Formatter] = None,
92
- secure_channel: bool = False):
93
+ secure_channel: bool = False,
94
+ interceptors: Optional[Sequence[shared.ClientInterceptor]] = None):
93
95
  self._registry = _Registry()
94
96
  self._host_address = host_address if host_address else shared.get_default_host_address()
95
- self._metadata = metadata
96
97
  self._logger = shared.get_logger("worker", log_handler, log_formatter)
97
98
  self._shutdown = Event()
98
99
  self._is_running = False
99
100
  self._secure_channel = secure_channel
100
101
 
102
+ # Determine the interceptors to use
103
+ if interceptors is not None:
104
+ self._interceptors = list(interceptors)
105
+ if metadata:
106
+ self._interceptors.append(DefaultClientInterceptorImpl(metadata))
107
+ elif metadata:
108
+ self._interceptors = [DefaultClientInterceptorImpl(metadata)]
109
+ else:
110
+ self._interceptors = None
111
+
101
112
  def __enter__(self):
102
113
  return self
103
114
 
@@ -118,7 +129,7 @@ class TaskHubGrpcWorker:
118
129
 
119
130
  def start(self):
120
131
  """Starts the worker on a background thread and begins listening for work items."""
121
- channel = shared.get_grpc_channel(self._host_address, self._metadata, self._secure_channel)
132
+ channel = shared.get_grpc_channel(self._host_address, self._secure_channel, self._interceptors)
122
133
  stub = stubs.TaskHubSidecarServiceStub(channel)
123
134
 
124
135
  if self._is_running:
@@ -140,13 +151,15 @@ class TaskHubGrpcWorker:
140
151
 
141
152
  # The stream blocks until either a work item is received or the stream is canceled
142
153
  # by another thread (see the stop() method).
143
- for work_item in self._response_stream:
154
+ for work_item in self._response_stream: # type: ignore
144
155
  request_type = work_item.WhichOneof('request')
145
156
  self._logger.debug(f'Received "{request_type}" work item')
146
157
  if work_item.HasField('orchestratorRequest'):
147
- executor.submit(self._execute_orchestrator, work_item.orchestratorRequest, stub)
158
+ executor.submit(self._execute_orchestrator, work_item.orchestratorRequest, stub, work_item.completionToken)
148
159
  elif work_item.HasField('activityRequest'):
149
- executor.submit(self._execute_activity, work_item.activityRequest, stub)
160
+ executor.submit(self._execute_activity, work_item.activityRequest, stub, work_item.completionToken)
161
+ elif work_item.HasField('healthPing'):
162
+ pass # no-op
150
163
  else:
151
164
  self._logger.warning(f'Unexpected work item type: {request_type}')
152
165
 
@@ -185,23 +198,27 @@ class TaskHubGrpcWorker:
185
198
  self._logger.info("Worker shutdown completed")
186
199
  self._is_running = False
187
200
 
188
- def _execute_orchestrator(self, req: pb.OrchestratorRequest, stub: stubs.TaskHubSidecarServiceStub):
201
+ def _execute_orchestrator(self, req: pb.OrchestratorRequest, stub: stubs.TaskHubSidecarServiceStub, completionToken):
189
202
  try:
190
203
  executor = _OrchestrationExecutor(self._registry, self._logger)
191
- actions = executor.execute(req.instanceId, req.pastEvents, req.newEvents)
192
- res = pb.OrchestratorResponse(instanceId=req.instanceId, actions=actions)
204
+ result = executor.execute(req.instanceId, req.pastEvents, req.newEvents)
205
+ res = pb.OrchestratorResponse(
206
+ instanceId=req.instanceId,
207
+ actions=result.actions,
208
+ customStatus=pbh.get_string_value(result.encoded_custom_status),
209
+ completionToken=completionToken)
193
210
  except Exception as ex:
194
211
  self._logger.exception(f"An error occurred while trying to execute instance '{req.instanceId}': {ex}")
195
212
  failure_details = pbh.new_failure_details(ex)
196
213
  actions = [pbh.new_complete_orchestration_action(-1, pb.ORCHESTRATION_STATUS_FAILED, "", failure_details)]
197
- res = pb.OrchestratorResponse(instanceId=req.instanceId, actions=actions)
214
+ res = pb.OrchestratorResponse(instanceId=req.instanceId, actions=actions, completionToken=completionToken)
198
215
 
199
216
  try:
200
217
  stub.CompleteOrchestratorTask(res)
201
218
  except Exception as ex:
202
219
  self._logger.exception(f"Failed to deliver orchestrator response for '{req.instanceId}' to sidecar: {ex}")
203
220
 
204
- def _execute_activity(self, req: pb.ActivityRequest, stub: stubs.TaskHubSidecarServiceStub):
221
+ def _execute_activity(self, req: pb.ActivityRequest, stub: stubs.TaskHubSidecarServiceStub, completionToken):
205
222
  instance_id = req.orchestrationInstance.instanceId
206
223
  try:
207
224
  executor = _ActivityExecutor(self._registry, self._logger)
@@ -209,12 +226,14 @@ class TaskHubGrpcWorker:
209
226
  res = pb.ActivityResponse(
210
227
  instanceId=instance_id,
211
228
  taskId=req.taskId,
212
- result=pbh.get_string_value(result))
229
+ result=pbh.get_string_value(result),
230
+ completionToken=completionToken)
213
231
  except Exception as ex:
214
232
  res = pb.ActivityResponse(
215
233
  instanceId=instance_id,
216
234
  taskId=req.taskId,
217
- failureDetails=pbh.new_failure_details(ex))
235
+ failureDetails=pbh.new_failure_details(ex),
236
+ completionToken=completionToken)
218
237
 
219
238
  try:
220
239
  stub.CompleteActivityTask(res)
@@ -232,16 +251,17 @@ class _RuntimeOrchestrationContext(task.OrchestrationContext):
232
251
  self._is_replaying = True
233
252
  self._is_complete = False
234
253
  self._result = None
235
- self._pending_actions: Dict[int, pb.OrchestratorAction] = {}
236
- self._pending_tasks: Dict[int, task.CompletableTask] = {}
254
+ self._pending_actions: dict[int, pb.OrchestratorAction] = {}
255
+ self._pending_tasks: dict[int, task.CompletableTask] = {}
237
256
  self._sequence_number = 0
238
257
  self._current_utc_datetime = datetime(1000, 1, 1)
239
258
  self._instance_id = instance_id
240
259
  self._completion_status: Optional[pb.OrchestrationStatus] = None
241
- self._received_events: Dict[str, List[Any]] = {}
242
- self._pending_events: Dict[str, List[task.CompletableTask]] = {}
260
+ self._received_events: dict[str, list[Any]] = {}
261
+ self._pending_events: dict[str, list[task.CompletableTask]] = {}
243
262
  self._new_input: Optional[Any] = None
244
263
  self._save_events = False
264
+ self._encoded_custom_status: Optional[str] = None
245
265
 
246
266
  def run(self, generator: Generator[task.Task, Any, Any]):
247
267
  self._generator = generator
@@ -313,10 +333,10 @@ class _RuntimeOrchestrationContext(task.OrchestrationContext):
313
333
  self._new_input = new_input
314
334
  self._save_events = save_events
315
335
 
316
- def get_actions(self) -> List[pb.OrchestratorAction]:
336
+ def get_actions(self) -> list[pb.OrchestratorAction]:
317
337
  if self._completion_status == pb.ORCHESTRATION_STATUS_CONTINUED_AS_NEW:
318
338
  # When continuing-as-new, we only return a single completion action.
319
- carryover_events: Optional[List[pb.HistoryEvent]] = None
339
+ carryover_events: Optional[list[pb.HistoryEvent]] = None
320
340
  if self._save_events:
321
341
  carryover_events = []
322
342
  # We need to save the current set of pending events so that they can be
@@ -355,6 +375,9 @@ class _RuntimeOrchestrationContext(task.OrchestrationContext):
355
375
  def current_utc_datetime(self, value: datetime):
356
376
  self._current_utc_datetime = value
357
377
 
378
+ def set_custom_status(self, custom_status: Any) -> None:
379
+ self._encoded_custom_status = shared.to_json(custom_status) if custom_status is not None else None
380
+
358
381
  def create_timer(self, fire_at: Union[datetime, timedelta]) -> task.Task:
359
382
  return self.create_timer_internal(fire_at)
360
383
 
@@ -457,6 +480,15 @@ class _RuntimeOrchestrationContext(task.OrchestrationContext):
457
480
  self.set_continued_as_new(new_input, save_events)
458
481
 
459
482
 
483
+ class ExecutionResults:
484
+ actions: list[pb.OrchestratorAction]
485
+ encoded_custom_status: Optional[str]
486
+
487
+ def __init__(self, actions: list[pb.OrchestratorAction], encoded_custom_status: Optional[str]):
488
+ self.actions = actions
489
+ self.encoded_custom_status = encoded_custom_status
490
+
491
+
460
492
  class _OrchestrationExecutor:
461
493
  _generator: Optional[task.Orchestrator] = None
462
494
 
@@ -464,9 +496,9 @@ class _OrchestrationExecutor:
464
496
  self._registry = registry
465
497
  self._logger = logger
466
498
  self._is_suspended = False
467
- self._suspended_events: List[pb.HistoryEvent] = []
499
+ self._suspended_events: list[pb.HistoryEvent] = []
468
500
 
469
- def execute(self, instance_id: str, old_events: Sequence[pb.HistoryEvent], new_events: Sequence[pb.HistoryEvent]) -> List[pb.OrchestratorAction]:
501
+ def execute(self, instance_id: str, old_events: Sequence[pb.HistoryEvent], new_events: Sequence[pb.HistoryEvent]) -> ExecutionResults:
470
502
  if not new_events:
471
503
  raise task.OrchestrationStateError("The new history event list must have at least one event in it.")
472
504
 
@@ -501,7 +533,7 @@ class _OrchestrationExecutor:
501
533
  actions = ctx.get_actions()
502
534
  if self._logger.level <= logging.DEBUG:
503
535
  self._logger.debug(f"{instance_id}: Returning {len(actions)} action(s): {_get_action_summary(actions)}")
504
- return actions
536
+ return ExecutionResults(actions=actions, encoded_custom_status=ctx._encoded_custom_status)
505
537
 
506
538
  def process_event(self, ctx: _RuntimeOrchestrationContext, event: pb.HistoryEvent) -> None:
507
539
  if self._is_suspended and _is_suspendable(event):
@@ -817,7 +849,7 @@ def _get_new_event_summary(new_events: Sequence[pb.HistoryEvent]) -> str:
817
849
  elif len(new_events) == 1:
818
850
  return f"[{new_events[0].WhichOneof('eventType')}]"
819
851
  else:
820
- counts: Dict[str, int] = {}
852
+ counts: dict[str, int] = {}
821
853
  for event in new_events:
822
854
  event_type = event.WhichOneof('eventType')
823
855
  counts[event_type] = counts.get(event_type, 0) + 1
@@ -831,7 +863,7 @@ def _get_action_summary(new_actions: Sequence[pb.OrchestratorAction]) -> str:
831
863
  elif len(new_actions) == 1:
832
864
  return f"[{new_actions[0].WhichOneof('orchestratorActionType')}]"
833
865
  else:
834
- counts: Dict[str, int] = {}
866
+ counts: dict[str, int] = {}
835
867
  for action in new_actions:
836
868
  action_type = action.WhichOneof('orchestratorActionType')
837
869
  counts[action_type] = counts.get(action_type, 0) + 1
@@ -1,8 +1,8 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: durabletask
3
- Version: 0.1.1a1
3
+ Version: 0.2.0b1
4
4
  Summary: A Durable Task Client SDK for Python
5
- License: MIT License
5
+ License: MIT License
6
6
 
7
7
  Copyright (c) Microsoft Corporation.
8
8
 
@@ -30,23 +30,21 @@ Keywords: durable,task,workflow
30
30
  Classifier: Development Status :: 3 - Alpha
31
31
  Classifier: Programming Language :: Python :: 3
32
32
  Classifier: License :: OSI Approved :: MIT License
33
- Requires-Python: >=3.8
33
+ Requires-Python: >=3.9
34
34
  Description-Content-Type: text/markdown
35
- License-File: LICENSE
36
35
  Requires-Dist: grpcio
37
36
 
38
- # Durable Task Client SDK for Python
37
+ # Durable Task SDK for Python
39
38
 
40
39
  [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
41
40
  [![Build Validation](https://github.com/microsoft/durabletask-python/actions/workflows/pr-validation.yml/badge.svg)](https://github.com/microsoft/durabletask-python/actions/workflows/pr-validation.yml)
42
41
  [![PyPI version](https://badge.fury.io/py/durabletask.svg)](https://badge.fury.io/py/durabletask)
43
42
 
44
- This repo contains a Python client SDK for use with the [Durable Task Framework for Go](https://github.com/microsoft/durabletask-go) and [Dapr Workflow](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-overview/). With this SDK, you can define, schedule, and manage durable orchestrations using ordinary Python code.
43
+ This repo contains a Python SDK for use with the [Azure Durable Task Scheduler](https://techcommunity.microsoft.com/blog/appsonazureblog/announcing-limited-early-access-of-the-durable-task-scheduler-for-azure-durable-/4286526) and the [Durable Task Framework for Go](https://github.com/microsoft/durabletask-go). With this SDK, you can define, schedule, and manage durable orchestrations using ordinary Python code.
45
44
 
46
45
  ⚠️ **This SDK is currently under active development and is not yet ready for production use.** ⚠️
47
46
 
48
- > Note that this project is **not** currently affiliated with the [Durable Functions](https://docs.microsoft.com/azure/azure-functions/durable/durable-functions-overview) project for Azure Functions. If you are looking for a Python SDK for Durable Functions, please see [this repo](https://github.com/Azure/azure-functions-durable-python).
49
-
47
+ > Note that this SDK is **not** currently compatible with [Azure Durable Functions](https://docs.microsoft.com/azure/azure-functions/durable/durable-functions-overview). If you are looking for a Python SDK for Azure Durable Functions, please see [this repo](https://github.com/Azure/azure-functions-durable-python).
50
48
 
51
49
  ## Supported patterns
52
50
 
@@ -171,7 +169,7 @@ Orchestrations can specify retry policies for activities and sub-orchestrations.
171
169
 
172
170
  ### Prerequisites
173
171
 
174
- - Python 3.8
172
+ - Python 3.9
175
173
  - A Durable Task-compatible sidecar, like [Dapr Workflow](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-overview/)
176
174
 
177
175
  ### Installing the Durable Task Python client SDK
@@ -198,18 +196,13 @@ The following is more information about how to develop this project. Note that d
198
196
 
199
197
  ### Generating protobufs
200
198
 
201
- Protobuf definitions are stored in the [./submodules/durabletask-proto](./submodules/durabletask-proto) directory, which is a submodule. To update the submodule, run the following command from the project root:
202
-
203
- ```sh
204
- git submodule update --init
205
- ```
206
-
207
- Once the submodule is available, the corresponding source code can be regenerated using the following command from the project root:
208
-
209
199
  ```sh
200
+ pip3 install -r dev-requirements.txt
210
201
  make gen-proto
211
202
  ```
212
203
 
204
+ This will download the `orchestrator_service.proto` from the `microsoft/durabletask-protobuf` repo and compile it using `grpcio-tools`. The version of the source proto file that was downloaded can be found in the file `durabletask/internal/PROTO_SOURCE_COMMIT_HASH`.
205
+
213
206
  ### Running unit tests
214
207
 
215
208
  Unit tests can be run using the following command from the project root. Unit tests _don't_ require a sidecar process to be running.
@@ -0,0 +1,14 @@
1
+ durabletask/__init__.py,sha256=4gNZ89cYaJTCvWQ1wWu3-BwRD98fWWt9k7hgHSETXU4,139
2
+ durabletask/client.py,sha256=vAm7BtVHeeWFVoiwvOGcrhrkand43oBCCVNnzbNfH6I,10011
3
+ durabletask/task.py,sha256=Brxt-cFqFaIjU07UFLCQoRv7ioycOLcJQbRh9Je_UW4,17722
4
+ durabletask/worker.py,sha256=YPW0XIp0z7UPh9afFaygcS3uR4jgD4FcI2KCXPp0_6M,44583
5
+ durabletask/internal/grpc_interceptor.py,sha256=KGl8GGIbNdiEnWVLwQwkOemWvIlcEO0dh-_Tg20h5XA,2834
6
+ durabletask/internal/helpers.py,sha256=G4nEhLnRUE1VbFHkOMX277_6LSsMH9lTh9sXUD0GdHM,7289
7
+ durabletask/internal/orchestrator_service_pb2.py,sha256=nkADgSglhimtNjAuISJdBz1bwA8xYm1cEQdL9ZifsmU,33993
8
+ durabletask/internal/orchestrator_service_pb2.pyi,sha256=99AIPzz4AdXrkQrN2MHkHkW9zKqmH4puSwvg9ze5IjA,50517
9
+ durabletask/internal/orchestrator_service_pb2_grpc.py,sha256=mZXK0QtvaRr6cjm8gi9y-DjMNR2Xg2Adu79WsR22pQc,41146
10
+ durabletask/internal/shared.py,sha256=dKRGU8z1EQM4_YA6zkKeKfiaWbiZ6-B8lP-wHy7Q_jI,4379
11
+ durabletask-0.2.0b1.dist-info/METADATA,sha256=Mh020ccP2tO01EibjaX_Y3jg4wUAAvqBUpnPqqC41Ek,12869
12
+ durabletask-0.2.0b1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
13
+ durabletask-0.2.0b1.dist-info/top_level.txt,sha256=EBVyuKWnjOwq8bJI1Uvb9U3c4fzQxACWj9p83he6fik,12
14
+ durabletask-0.2.0b1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes
@@ -1,21 +0,0 @@
1
- MIT License
2
-
3
- Copyright (c) Microsoft Corporation.
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE
@@ -1,16 +0,0 @@
1
- durabletask/__init__.py,sha256=4gNZ89cYaJTCvWQ1wWu3-BwRD98fWWt9k7hgHSETXU4,139
2
- durabletask/client.py,sha256=UmQv4fYjHmJRju6teJ-7HM0tVjd_Ugty044Dg2s19tI,8872
3
- durabletask/task.py,sha256=zwnlLLNCeFJmFT7wvpK0MEx-tMxu1_JCYq_OhXYW0S0,17471
4
- durabletask/worker.py,sha256=FoqWcgP35lzqbbOuQ1pdJ3jMNtvtd7HO-s22OOcRHyA,42926
5
- durabletask/internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- durabletask/internal/grpc_interceptor.py,sha256=bixPtGQ5KPN2CWAzrfR8z9drwEdM_ZaTZ-tn97DC3LU,2878
7
- durabletask/internal/helpers.py,sha256=m9lnmQcx5zx3s0DC_Lddugr__O_dgWgHAAAByOmvL_c,7340
8
- durabletask/internal/orchestrator_service_pb2.py,sha256=N4R75_gT7P05fUsPgLpL130wjws49-SI0w6_BaleWOI,29754
9
- durabletask/internal/orchestrator_service_pb2.pyi,sha256=HRtK9PTpLudEoFPD08-oWom26quwWMkjpkmjIiWC-bQ,46324
10
- durabletask/internal/orchestrator_service_pb2_grpc.py,sha256=r0UbaHS2EacF3SdiY5QoWEZgp21XPcSje0lFEG23HcI,37544
11
- durabletask/internal/shared.py,sha256=1IU_sTwrntyJqMUB63yOtjQd0IqKARiTR_0L-AiM-N8,3645
12
- durabletask-0.1.1a1.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
13
- durabletask-0.1.1a1.dist-info/METADATA,sha256=t9YiZ6gJHW3I36X99Zx6iWRuNhDPYrHoT-VxVx1FxqQ,12930
14
- durabletask-0.1.1a1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
15
- durabletask-0.1.1a1.dist-info/top_level.txt,sha256=EBVyuKWnjOwq8bJI1Uvb9U3c4fzQxACWj9p83he6fik,12
16
- durabletask-0.1.1a1.dist-info/RECORD,,