dotflow 0.12.0.dev1__tar.gz → 0.12.0.dev2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/PKG-INFO +1 -1
  2. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/__init__.py +1 -1
  3. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/abc/flow.py +1 -1
  4. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/commands/start.py +18 -11
  5. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/setup.py +10 -8
  6. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/dotflow.py +3 -0
  7. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/execution.py +4 -4
  8. dotflow-0.12.0.dev2/dotflow/core/serializers/task.py +47 -0
  9. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/serializers/workflow.py +4 -0
  10. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/task.py +19 -0
  11. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/types/__init__.py +3 -1
  12. dotflow-0.12.0.dev2/dotflow/core/types/storage.py +15 -0
  13. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/workflow.py +70 -54
  14. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/pyproject.toml +2 -2
  15. dotflow-0.12.0.dev1/dotflow/core/serializers/task.py +0 -25
  16. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/LICENSE +0 -0
  17. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/README.md +0 -0
  18. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/abc/__init__.py +0 -0
  19. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/abc/file.py +0 -0
  20. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/abc/http.py +0 -0
  21. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/abc/storage.py +0 -0
  22. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/abc/tcp.py +0 -0
  23. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/__init__.py +0 -0
  24. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/command.py +0 -0
  25. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/commands/__init__.py +0 -0
  26. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/commands/init.py +0 -0
  27. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/commands/log.py +0 -0
  28. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/validators/__init__.py +0 -0
  29. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/cli/validators/start.py +0 -0
  30. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/__init__.py +0 -0
  31. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/action.py +0 -0
  32. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/config.py +0 -0
  33. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/context.py +0 -0
  34. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/decorators/__init__.py +0 -0
  35. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/decorators/time.py +0 -0
  36. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/exception.py +0 -0
  37. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/module.py +0 -0
  38. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/serializers/__init__.py +0 -0
  39. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/serializers/transport.py +0 -0
  40. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/types/execution.py +0 -0
  41. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/types/status.py +0 -0
  42. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/core/types/worflow.py +0 -0
  43. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/logging.py +0 -0
  44. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/main.py +0 -0
  45. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/providers/__init__.py +0 -0
  46. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/providers/storage_default.py +0 -0
  47. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/providers/storage_file.py +0 -0
  48. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/settings.py +0 -0
  49. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/storage.py +0 -0
  50. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/utils/__init__.py +0 -0
  51. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/utils/basic_functions.py +0 -0
  52. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/utils/error_handler.py +0 -0
  53. {dotflow-0.12.0.dev1 → dotflow-0.12.0.dev2}/dotflow/utils/tools.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dotflow
3
- Version: 0.12.0.dev1
3
+ Version: 0.12.0.dev2
4
4
  Summary: 🎲 Dotflow turns an idea into flow!
5
5
  License: MIT License
6
6
 
@@ -1,6 +1,6 @@
1
1
  """Dotflow __init__ module."""
2
2
 
3
- __version__ = "0.12.0.dev1"
3
+ __version__ = "0.12.0.dev2"
4
4
  __description__ = "🎲 Dotflow turns an idea into flow!"
5
5
 
6
6
  from .core.action import Action as action
@@ -34,7 +34,7 @@ class Flow(ABC):
34
34
  return self.queue
35
35
 
36
36
  @abstractmethod
37
- def internal_callback(self, task: Task) -> None:
37
+ def _internal_callback(self, task: Task) -> None:
38
38
  self.queue.append(task)
39
39
 
40
40
  @abstractmethod
@@ -11,17 +11,7 @@ from dotflow.cli.command import Command
11
11
  class StartCommand(Command):
12
12
 
13
13
  def setup(self):
14
- workflow = DotFlow()
15
-
16
- if self.params.storage:
17
- storage = {"default": StorageDefault, "file": StorageFile}
18
-
19
- config = Config(
20
- storage=storage.get(self.params.storage)(
21
- path=self.params.path,
22
- )
23
- )
24
- workflow = DotFlow(config=config)
14
+ workflow = self._new_workflow()
25
15
 
26
16
  workflow.task.add(
27
17
  step=self.params.step,
@@ -33,3 +23,20 @@ class StartCommand(Command):
33
23
 
34
24
  if self.params.mode == TypeExecution.BACKGROUND:
35
25
  system("/bin/bash")
26
+
27
+ def _new_workflow(self):
28
+ if not self.params.storage:
29
+ return DotFlow()
30
+
31
+ storage_classes = {
32
+ "default": StorageDefault,
33
+ "file": StorageFile
34
+ }
35
+
36
+ config = Config(
37
+ storage=storage_classes.get(self.params.storage)(
38
+ path=self.params.path,
39
+ )
40
+ )
41
+
42
+ return DotFlow(config=config)
@@ -6,18 +6,14 @@ from dotflow import __version__, __description__
6
6
  from dotflow.logging import logger
7
7
  from dotflow.settings import Settings as settings
8
8
  from dotflow.utils.basic_functions import basic_callback
9
- from dotflow.core.types.execution import TypeExecution
9
+ from dotflow.core.types import TypeExecution, TypeStorage
10
10
  from dotflow.core.exception import (
11
11
  MissingActionDecorator,
12
12
  ExecutionModeNotExist,
13
13
  ImportModuleError,
14
14
  MESSAGE_UNKNOWN_ERROR,
15
15
  )
16
- from dotflow.cli.commands import (
17
- InitCommand,
18
- LogCommand,
19
- StartCommand
20
- )
16
+ from dotflow.cli.commands import InitCommand, LogCommand, StartCommand
21
17
 
22
18
 
23
19
  class Command:
@@ -56,13 +52,19 @@ class Command:
56
52
  self.cmd_start.add_argument("-s", "--step", required=True)
57
53
  self.cmd_start.add_argument("-c", "--callback", default=basic_callback)
58
54
  self.cmd_start.add_argument("-i", "--initial-context")
59
- self.cmd_start.add_argument("-o", "--storage", choices=["default", "file"])
55
+ self.cmd_start.add_argument(
56
+ "-o", "--storage", choices=[TypeStorage.DEFAULT, TypeStorage.FILE]
57
+ )
60
58
  self.cmd_start.add_argument("-p", "--path", default=settings.START_PATH)
61
59
  self.cmd_start.add_argument(
62
60
  "-m",
63
61
  "--mode",
64
62
  default=TypeExecution.SEQUENTIAL,
65
- choices=[TypeExecution.SEQUENTIAL, TypeExecution.BACKGROUND],
63
+ choices=[
64
+ TypeExecution.SEQUENTIAL,
65
+ TypeExecution.BACKGROUND,
66
+ TypeExecution.PARALLEL,
67
+ ],
66
68
  )
67
69
 
68
70
  self.cmd_start.set_defaults(exec=StartCommand)
@@ -73,3 +73,6 @@ class DotFlow:
73
73
  list (List[Any]): Returns a list of assorted objects.
74
74
  """
75
75
  return [task.current_context.storage for task in self.task.queue]
76
+
77
+ def result(self):
78
+ return self.task.schema()
@@ -46,14 +46,14 @@ class Execution:
46
46
  task: Task,
47
47
  workflow_id: UUID,
48
48
  previous_context: Context = None,
49
- internal_callback: Callable = basic_callback
49
+ _internal_callback: Callable = basic_callback
50
50
  ) -> None:
51
51
  self.task = task
52
52
  self.task.status = TaskStatus.IN_PROGRESS
53
53
  self.task.previous_context = previous_context
54
54
  self.task.workflow_id = workflow_id
55
55
 
56
- self._excution(internal_callback)
56
+ self._excution(_internal_callback)
57
57
 
58
58
  def _is_action(self, class_instance: Callable, func: Callable):
59
59
  try:
@@ -131,7 +131,7 @@ class Execution:
131
131
  return new_context
132
132
 
133
133
  @time
134
- def _excution(self, internal_callback):
134
+ def _excution(self, _internal_callback):
135
135
  try:
136
136
  current_context = self.task.step(
137
137
  initial_context=self.task.initial_context,
@@ -157,6 +157,6 @@ class Execution:
157
157
 
158
158
  finally:
159
159
  self.task.callback(task=self.task)
160
- internal_callback(task=self.task)
160
+ _internal_callback(task=self.task)
161
161
 
162
162
  return self.task
@@ -0,0 +1,47 @@
1
+ """Task serializer module"""
2
+
3
+ import json
4
+
5
+ from typing import Any, Optional
6
+ from uuid import UUID
7
+
8
+ from pydantic import BaseModel, Field, ConfigDict, field_validator # type: ignore
9
+
10
+
11
+ class SerializerTaskError(BaseModel):
12
+
13
+ traceback: str
14
+ message: str
15
+
16
+
17
+ class SerializerTask(BaseModel):
18
+ model_config = ConfigDict(title="task")
19
+
20
+ task_id: int = Field(default=None)
21
+ workflow_id: Optional[UUID] = Field(default=None)
22
+ status: str = Field(default=None, alias="_status")
23
+ error: Optional[SerializerTaskError] = Field(default=None, alias="_error")
24
+ duration: Optional[float] = Field(default=None, alias="_duration")
25
+ initial_context: Any = Field(default=None, alias="_initial_context")
26
+ current_context: Any = Field(default=None, alias="_current_context")
27
+ previous_context: Any = Field(default=None, alias="_previous_context")
28
+ group_name: str = Field(default=None)
29
+
30
+ @field_validator("error", mode="before")
31
+ @classmethod
32
+ def error_validator(cls, value: str) -> str:
33
+ if value:
34
+ return SerializerTaskError(**value.__dict__)
35
+ return value
36
+
37
+ @field_validator(
38
+ "initial_context", "current_context", "previous_context", mode="before"
39
+ )
40
+ @classmethod
41
+ def context_validator(cls, value: str) -> str:
42
+ if value and value.storage:
43
+ try:
44
+ return json.dumps(value.storage)
45
+ except Exception:
46
+ return str(value)
47
+ return None
@@ -1,11 +1,15 @@
1
1
  """Workflow serializer module"""
2
2
 
3
3
  from uuid import UUID
4
+ from typing import List
4
5
 
5
6
  from pydantic import BaseModel, Field, ConfigDict # type: ignore
6
7
 
8
+ from dotflow.core.serializers.task import SerializerTask
9
+
7
10
 
8
11
  class SerializerWorkflow(BaseModel):
9
12
  model_config = ConfigDict(title="workflow")
10
13
 
11
14
  workflow_id: UUID = Field(default=None)
15
+ tasks: List[SerializerTask] = Field(default=[])
@@ -1,5 +1,7 @@
1
1
  """Task module"""
2
2
 
3
+ import json
4
+
3
5
  from uuid import UUID
4
6
  from typing import Any, Callable, List
5
7
 
@@ -10,6 +12,8 @@ from dotflow.core.config import Config
10
12
  from dotflow.core.action import Action
11
13
  from dotflow.core.context import Context
12
14
  from dotflow.core.module import Module
15
+ from dotflow.core.serializers.task import SerializerTask
16
+ from dotflow.core.serializers.workflow import SerializerWorkflow
13
17
  from dotflow.core.exception import MissingActionDecorator, NotCallableObject
14
18
  from dotflow.core.types.status import TaskStatus
15
19
  from dotflow.utils import (
@@ -240,6 +244,13 @@ class Task(TaskInstance):
240
244
  def config(self, value: Config):
241
245
  self._config = value
242
246
 
247
+ def schema(self) -> SerializerTask:
248
+ item = SerializerTask(
249
+ **self.__dict__
250
+ ).model_dump_json()
251
+
252
+ return json.loads(item)
253
+
243
254
 
244
255
  class TaskError:
245
256
 
@@ -340,3 +351,11 @@ class TaskBuilder:
340
351
 
341
352
  def reverse(self) -> None:
342
353
  self.queue.reverse()
354
+
355
+ def schema(self) -> SerializerWorkflow:
356
+ item = SerializerWorkflow(
357
+ workflow_id=self.workflow_id,
358
+ tasks=[item.schema() for item in self.queue]
359
+ ).model_dump_json()
360
+
361
+ return json.loads(item)
@@ -2,9 +2,11 @@
2
2
 
3
3
  from dotflow.core.types.execution import TypeExecution
4
4
  from dotflow.core.types.status import TaskStatus
5
+ from dotflow.core.types.storage import TypeStorage
5
6
 
6
7
 
7
8
  __all__ = [
8
9
  "TypeExecution",
9
- "TaskStatus"
10
+ "TaskStatus",
11
+ "TypeStorage"
10
12
  ]
@@ -0,0 +1,15 @@
1
+ """Type Storage mode"""
2
+
3
+ from typing_extensions import Annotated, Doc
4
+
5
+
6
+ class TypeStorage:
7
+ """
8
+ Import:
9
+ You can import the **TypeStorage** class with:
10
+
11
+ from dotflow.core.types import TypeStorage
12
+ """
13
+
14
+ DEFAULT: Annotated[str, Doc("Default storage.")] = "default"
15
+ FILE: Annotated[str, Doc("File storage.")] = "file"
@@ -1,6 +1,8 @@
1
1
  """Workflow module"""
2
2
 
3
3
  import threading
4
+ import warnings
5
+ import platform
4
6
 
5
7
  from datetime import datetime
6
8
  from multiprocessing import Process, Queue
@@ -17,7 +19,13 @@ from dotflow.core.task import Task
17
19
  from dotflow.utils import basic_callback
18
20
 
19
21
 
22
+ def is_darwin() -> bool:
23
+ """Is Darwin"""
24
+ return platform.system() == "Darwin"
25
+
26
+
20
27
  def grouper(tasks: List[Task]) -> Dict[str, List[Task]]:
28
+ """Grouper"""
21
29
  groups = {}
22
30
  for task in tasks:
23
31
  if not groups.get(task.group_name):
@@ -84,7 +92,7 @@ class Manager:
84
92
  failure: Callable = basic_callback,
85
93
  mode: TypeExecution = TypeExecution.SEQUENTIAL,
86
94
  keep_going: bool = False,
87
- workflow_id: UUID = None
95
+ workflow_id: UUID = None,
88
96
  ) -> None:
89
97
  self.tasks = tasks
90
98
  self.success = success
@@ -101,10 +109,7 @@ class Manager:
101
109
  raise ExecutionModeNotExist() from err
102
110
 
103
111
  self.tasks = execution(
104
- tasks=tasks,
105
- workflow_id=workflow_id,
106
- ignore=keep_going,
107
- groups=groups
112
+ tasks=tasks, workflow_id=workflow_id, ignore=keep_going, groups=groups
108
113
  )
109
114
 
110
115
  self._callback_workflow(tasks=self.tasks)
@@ -118,7 +123,7 @@ class Manager:
118
123
  self.success(tasks=tasks)
119
124
 
120
125
  def sequential(self, **kwargs) -> List[Task]:
121
- if len(kwargs.get("groups")) > 1:
126
+ if len(kwargs.get("groups", {})) > 1 and not is_darwin():
122
127
  process = SequentialGroup(**kwargs)
123
128
  return process.get_tasks()
124
129
 
@@ -134,11 +139,21 @@ class Manager:
134
139
  return process.get_tasks()
135
140
 
136
141
  def parallel(self, **kwargs) -> List[Task]:
142
+ if is_darwin():
143
+ warnings.warn(
144
+ "Parallel mode does not work with MacOS."
145
+ " Running tasks in sequence.",
146
+ Warning
147
+ )
148
+ process = Sequential(**kwargs)
149
+ return process.get_tasks()
150
+
137
151
  process = Parallel(**kwargs)
138
152
  return process.get_tasks()
139
153
 
140
154
 
141
155
  class Sequential(Flow):
156
+ """Sequential"""
142
157
 
143
158
  def setup_queue(self) -> None:
144
159
  self.queue = []
@@ -146,39 +161,37 @@ class Sequential(Flow):
146
161
  def get_tasks(self) -> List[Task]:
147
162
  return self.queue
148
163
 
149
- def internal_callback(self, task: Task) -> None:
164
+ def _internal_callback(self, task: Task) -> None:
150
165
  self.queue.append(task)
151
166
 
152
167
  def run(self) -> None:
153
- previous_context = Context(
154
- workflow_id=self.workflow_id
155
- )
168
+ previous_context = Context(workflow_id=self.workflow_id)
156
169
 
157
170
  for task in self.tasks:
158
171
  Execution(
159
172
  task=task,
160
173
  workflow_id=self.workflow_id,
161
174
  previous_context=previous_context,
162
- internal_callback=self.internal_callback
175
+ _internal_callback=self._internal_callback,
163
176
  )
164
177
 
165
178
  previous_context = task.config.storage.get(
166
179
  key=task.config.storage.key(task=task)
167
180
  )
168
181
 
169
- if not self.ignore:
170
- if task.status == TaskStatus.FAILED:
171
- break
182
+ if not self.ignore and task.status == TaskStatus.FAILED:
183
+ break
172
184
 
173
185
 
174
186
  class SequentialGroup(Flow):
187
+ """SequentialGroup"""
175
188
 
176
189
  def setup_queue(self) -> None:
177
190
  self.queue = Queue()
178
191
 
179
192
  def get_tasks(self) -> List[Task]:
180
193
  contexts = {}
181
- while len(contexts) < len(self.groups):
194
+ while len(contexts) < len(self.tasks):
182
195
  if not self.queue.empty():
183
196
  contexts = {**contexts, **self.queue.get()}
184
197
 
@@ -191,41 +204,44 @@ class SequentialGroup(Flow):
191
204
 
192
205
  return self.tasks
193
206
 
194
- def internal_callback(self, task: Task) -> None:
207
+ def _internal_callback(self, task: Task) -> None:
195
208
  current_task = {
196
209
  task.task_id: {
197
210
  "current_context": task.current_context,
198
211
  "duration": task.duration,
199
212
  "error": task.error,
200
- "status": task.status
213
+ "status": task.status,
201
214
  }
202
215
  }
203
216
  self.queue.put(current_task)
204
217
 
205
218
  def run(self) -> None:
206
- thread_list = []
207
- process_list = []
208
-
209
- for group in self.groups:
210
- def parallel(process_list):
211
- process = Process(
212
- target=self.sequential,
213
- args=(self.groups[group],)
214
- )
215
- process.start()
216
- process_list.append(process)
219
+ threads = []
220
+ processes = []
217
221
 
222
+ for _, group_tasks in self.groups.items():
218
223
  thread = threading.Thread(
219
- target=parallel,
220
- args=(process_list,)
224
+ target=self._launch_group,
225
+ args=(processes, group_tasks,)
221
226
  )
222
227
  thread.start()
223
- thread_list.append(thread)
228
+ threads.append(thread)
229
+
230
+ for process in processes:
231
+ process.join()
232
+
233
+ for thread in threads:
234
+ thread.join()
224
235
 
225
- [process.join() for process in process_list]
226
- [thread.join() for thread in thread_list]
236
+ def _launch_group(self, processes, group_tasks):
237
+ process = Process(
238
+ target=self._run_group,
239
+ args=(group_tasks,)
240
+ )
241
+ process.start()
242
+ processes.append(process)
227
243
 
228
- def sequential(self, groups: List[Task]) -> None:
244
+ def _run_group(self, groups: List[Task]) -> None:
229
245
  previous_context = Context(workflow_id=self.workflow_id)
230
246
 
231
247
  for task in groups:
@@ -233,19 +249,19 @@ class SequentialGroup(Flow):
233
249
  task=task,
234
250
  workflow_id=self.workflow_id,
235
251
  previous_context=previous_context,
236
- internal_callback=self.internal_callback
252
+ _internal_callback=self._internal_callback,
237
253
  )
238
254
 
239
255
  previous_context = task.config.storage.get(
240
256
  key=task.config.storage.key(task=task)
241
257
  )
242
258
 
243
- if not self.ignore:
244
- if task.status == TaskStatus.FAILED:
245
- break
259
+ if not self.ignore and task.status == TaskStatus.FAILED:
260
+ break
246
261
 
247
262
 
248
263
  class Background(Flow):
264
+ """Background"""
249
265
 
250
266
  def setup_queue(self) -> None:
251
267
  self.queue = []
@@ -253,19 +269,25 @@ class Background(Flow):
253
269
  def get_tasks(self) -> List[Task]:
254
270
  return self.tasks
255
271
 
256
- def internal_callback(self, task: Task) -> None:
272
+ def _internal_callback(self, task: Task) -> None:
257
273
  pass
258
274
 
259
275
  def run(self) -> None:
260
276
  thread = threading.Thread(
261
277
  target=Sequential,
262
- args=(self.tasks, self.workflow_id, self.ignore, self.groups,)
278
+ args=(
279
+ self.tasks,
280
+ self.workflow_id,
281
+ self.ignore,
282
+ self.groups,
283
+ ),
263
284
  )
264
285
  thread.start()
265
286
  thread.join()
266
287
 
267
288
 
268
289
  class Parallel(Flow):
290
+ """Parallel"""
269
291
 
270
292
  def setup_queue(self) -> None:
271
293
  self.queue = Queue()
@@ -284,34 +306,28 @@ class Parallel(Flow):
284
306
 
285
307
  return self.tasks
286
308
 
287
- def internal_callback(self, task: Task) -> None:
309
+ def _internal_callback(self, task: Task) -> None:
288
310
  current_task = {
289
311
  task.task_id: {
290
312
  "current_context": task.current_context,
291
313
  "duration": task.duration,
292
314
  "error": task.error,
293
- "status": task.status
315
+ "status": task.status,
294
316
  }
295
317
  }
296
318
  self.queue.put(current_task)
297
319
 
298
320
  def run(self) -> None:
299
- process_list = []
300
- previous_context = Context(
301
- workflow_id=self.workflow_id
302
- )
321
+ processes = []
322
+ previous_context = Context(workflow_id=self.workflow_id)
303
323
 
304
324
  for task in self.tasks:
305
325
  process = Process(
306
326
  target=Execution,
307
- args=(
308
- task,
309
- self.workflow_id,
310
- previous_context,
311
- self.internal_callback
312
- )
327
+ args=(task, self.workflow_id, previous_context, self._internal_callback),
313
328
  )
314
329
  process.start()
315
- process_list.append(process)
330
+ processes.append(process)
316
331
 
317
- [process.join() for process in process_list]
332
+ for process in processes:
333
+ process.join()
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "dotflow"
3
- version = "0.12.0.dev1"
3
+ version = "0.12.0.dev2"
4
4
  authors = [
5
5
  { name="Fernando Celmer", email="email@fernandocelmer.com" },
6
6
  ]
@@ -35,7 +35,7 @@ mongodb = ["dotflow-mongodb"]
35
35
 
36
36
  [tool.poetry]
37
37
  name = "dotflow"
38
- version = "0.12.0.dev1"
38
+ version = "0.12.0.dev2"
39
39
  description = "🎲 Dotflow turns an idea into flow!"
40
40
  authors = ["Fernando Celmer <email@fernandocelmer.com>"]
41
41
  readme = "README.md"
@@ -1,25 +0,0 @@
1
- """Task serializer module"""
2
-
3
- from typing import Any, Optional
4
- from uuid import UUID
5
-
6
- from pydantic import BaseModel, Field, ConfigDict # type: ignore
7
-
8
-
9
- class SerializerTaskError(BaseModel):
10
-
11
- traceback: str
12
- message: str
13
-
14
-
15
- class SerializerTask(BaseModel):
16
- model_config = ConfigDict(title="task")
17
-
18
- task_id: int = Field(default=None)
19
- workflow_id: Optional[UUID] = Field(default=None)
20
- status: str = Field(default=None)
21
- error: Optional[SerializerTaskError] = Field(default=None)
22
- duration: float = Field(default=None)
23
- initial_context: Any = Field(default=None)
24
- current_context: Any = Field(default=None)
25
- previous_context: Any = Field(default=None)
File without changes
File without changes