orchestrator-core 2.10.0rc2__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. orchestrator/__init__.py +1 -1
  2. orchestrator/api/api_v1/endpoints/processes.py +1 -1
  3. orchestrator/api/api_v1/endpoints/settings.py +2 -1
  4. orchestrator/cli/generator/templates/create_product.j2 +2 -1
  5. orchestrator/cli/generator/templates/modify_product.j2 +2 -1
  6. orchestrator/cli/generator/templates/shared_workflows.j2 +2 -1
  7. orchestrator/cli/generator/templates/terminate_product.j2 +1 -1
  8. orchestrator/cli/generator/templates/test_create_workflow.j2 +0 -1
  9. orchestrator/cli/generator/templates/test_modify_workflow.j2 +1 -2
  10. orchestrator/cli/generator/templates/test_terminate_workflow.j2 +1 -1
  11. orchestrator/cli/generator/templates/validate_product.j2 +3 -1
  12. orchestrator/cli/helpers/print_helpers.py +1 -1
  13. orchestrator/config/assignee.py +1 -1
  14. orchestrator/db/models.py +17 -0
  15. orchestrator/devtools/populator.py +1 -1
  16. orchestrator/devtools/scripts/migrate_20.py +11 -106
  17. orchestrator/devtools/scripts/migrate_30.py +61 -0
  18. orchestrator/devtools/scripts/shared.py +108 -0
  19. orchestrator/distlock/managers/redis_distlock_manager.py +3 -2
  20. orchestrator/domain/base.py +1 -2
  21. orchestrator/domain/lifecycle.py +2 -1
  22. orchestrator/graphql/resolvers/settings.py +2 -1
  23. orchestrator/graphql/schemas/product.py +19 -2
  24. orchestrator/migrations/helpers.py +1 -1
  25. orchestrator/migrations/versions/schema/2025-02-12_bac6be6f2b4f_added_input_state_table.py +56 -0
  26. orchestrator/schemas/engine_settings.py +1 -1
  27. orchestrator/schemas/subscription.py +2 -1
  28. orchestrator/services/celery.py +7 -4
  29. orchestrator/services/input_state.py +76 -0
  30. orchestrator/services/processes.py +8 -6
  31. orchestrator/services/products.py +1 -1
  32. orchestrator/services/subscriptions.py +2 -1
  33. orchestrator/services/tasks.py +13 -7
  34. orchestrator/services/workflows.py +13 -0
  35. orchestrator/settings.py +5 -2
  36. orchestrator/targets.py +1 -1
  37. orchestrator/types.py +8 -43
  38. orchestrator/utils/errors.py +2 -1
  39. orchestrator/utils/redis.py +6 -11
  40. orchestrator/utils/redis_client.py +35 -0
  41. orchestrator/utils/state.py +2 -1
  42. orchestrator/workflow.py +3 -1
  43. orchestrator/workflows/modify_note.py +1 -2
  44. orchestrator/workflows/steps.py +2 -1
  45. orchestrator/workflows/tasks/cleanup_tasks_log.py +1 -1
  46. orchestrator/workflows/tasks/resume_workflows.py +1 -1
  47. orchestrator/workflows/tasks/validate_product_type.py +1 -1
  48. orchestrator/workflows/tasks/validate_products.py +1 -1
  49. orchestrator/workflows/utils.py +2 -2
  50. {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info}/METADATA +10 -8
  51. {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info}/RECORD +53 -48
  52. {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info}/WHEEL +1 -1
  53. {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info/licenses}/LICENSE +0 -0
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "2.10.0rc2"
16
+ __version__ = "3.0.0"
17
17
 
18
18
  from orchestrator.app import OrchestratorCore
19
19
  from orchestrator.settings import app_settings
@@ -61,7 +61,6 @@ from orchestrator.services.processes import (
61
61
  )
62
62
  from orchestrator.services.settings import get_engine_settings
63
63
  from orchestrator.settings import app_settings
64
- from orchestrator.types import JSON, State
65
64
  from orchestrator.utils.enrich_process import enrich_process
66
65
  from orchestrator.websocket import (
67
66
  WS_CHANNELS,
@@ -70,6 +69,7 @@ from orchestrator.websocket import (
70
69
  websocket_manager,
71
70
  )
72
71
  from orchestrator.workflow import ProcessStatus
72
+ from pydantic_forms.types import JSON, State
73
73
 
74
74
  router = APIRouter()
75
75
 
@@ -28,6 +28,7 @@ from orchestrator.services import processes, settings
28
28
  from orchestrator.settings import ExecutorType, app_settings
29
29
  from orchestrator.utils.json import json_dumps
30
30
  from orchestrator.utils.redis import delete_keys_matching_pattern
31
+ from orchestrator.utils.redis_client import create_redis_asyncio_client
31
32
  from orchestrator.websocket import WS_CHANNELS, broadcast_invalidate_cache, websocket_manager
32
33
 
33
34
  router = APIRouter()
@@ -41,7 +42,7 @@ CACHE_FLUSH_OPTIONS: dict[str, str] = {
41
42
 
42
43
  @router.delete("/cache/{name}")
43
44
  async def clear_cache(name: str) -> int | None:
44
- cache: AIORedis = AIORedis.from_url(str(app_settings.CACHE_URI))
45
+ cache: AIORedis = create_redis_asyncio_client(app_settings.CACHE_URI)
45
46
  if name not in CACHE_FLUSH_OPTIONS:
46
47
  raise_status(HTTPStatus.BAD_REQUEST, "Invalid cache name")
47
48
 
@@ -7,11 +7,12 @@ from typing import Annotated
7
7
 
8
8
  import structlog
9
9
  from pydantic import AfterValidator, ConfigDict, model_validator
10
+ from pydantic_forms.types import FormGenerator, State, UUIDstr
10
11
 
11
12
  from orchestrator.forms import FormPage
12
13
  from orchestrator.forms.validators import Divider, Label, CustomerId, MigrationSummary
13
14
  from orchestrator.targets import Target
14
- from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUIDstr
15
+ from orchestrator.types import SubscriptionLifecycle
15
16
  from orchestrator.workflow import StepList, begin, step
16
17
  from orchestrator.workflows.steps import store_process_subscription
17
18
  from orchestrator.workflows.utils import create_workflow
@@ -6,11 +6,12 @@ from typing import Annotated
6
6
 
7
7
  import structlog
8
8
  from pydantic import AfterValidator, ConfigDict, model_validator
9
+ from pydantic_forms.types import FormGenerator, State, UUIDstr
9
10
  from pydantic_forms.validators import ReadOnlyField
10
11
 
11
12
  from orchestrator.forms import FormPage
12
13
  from orchestrator.forms.validators import CustomerId, Divider
13
- from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUIDstr
14
+ from orchestrator.types import SubscriptionLifecycle
14
15
  from orchestrator.workflow import StepList, begin, step
15
16
  from orchestrator.workflows.steps import set_status
16
17
  from orchestrator.workflows.utils import modify_workflow
@@ -1,4 +1,5 @@
1
- from typing import Generator, List, TypeAlias, cast
1
+ from collections.abc import Generator
2
+ from typing import List, TypeAlias, cast
2
3
 
3
4
  from pydantic import ConfigDict
4
5
 
@@ -2,10 +2,10 @@
2
2
 
3
3
  import structlog
4
4
  from pydantic import AfterValidator, ConfigDict, model_validator
5
+ from pydantic_forms.types import InputForm, State, UUIDstr
5
6
 
6
7
  from orchestrator.forms import FormPage
7
8
  from orchestrator.forms.validators import DisplaySubscription
8
- from orchestrator.types import InputForm, State, UUIDstr
9
9
  from orchestrator.workflow import StepList, begin, step
10
10
  from orchestrator.workflows.utils import terminate_workflow
11
11
 
@@ -4,7 +4,6 @@ from orchestrator.db import ProductTable
4
4
  from orchestrator.forms import FormValidationError
5
5
 
6
6
  from test.unit_tests.workflows import assert_complete, extract_state, run_workflow
7
-
8
7
  from {{ product_types_module }}.{{ product.variable }} import {{ product.type }}
9
8
 
10
9
 
@@ -1,9 +1,8 @@
1
1
  import pytest
2
2
  from orchestrator.forms import FormValidationError
3
-
4
3
  from orchestrator.types import SubscriptionLifecycle
5
- from test.unit_tests.workflows import assert_complete, extract_state, run_workflow
6
4
 
5
+ from test.unit_tests.workflows import assert_complete, extract_state, run_workflow
7
6
  from {{ product_types_module }}.{{ product.variable }} import {{ product.type }}
8
7
 
9
8
 
@@ -4,8 +4,8 @@ import pytest
4
4
  from orchestrator.forms import FormValidationError
5
5
  {% endif %}
6
6
  from orchestrator.types import SubscriptionLifecycle
7
- from test.unit_tests.workflows import assert_complete, extract_state, run_workflow
8
7
 
8
+ from test.unit_tests.workflows import assert_complete, extract_state, run_workflow
9
9
  from {{ product_types_module }}.{{ product.variable }} import {{ product.type }}
10
10
 
11
11
 
@@ -3,9 +3,11 @@
3
3
  import structlog
4
4
  {% if product.nso_service_id_path %}
5
5
  from deepdiff import DeepDiff
6
+ {% endif %}
7
+ from pydantic_forms.types import State
8
+ {% if product.nso_service_id_path %}
6
9
  from surf.products.services.nso.nso import build_payload
7
10
  {% endif %}
8
- from orchestrator.types import State
9
11
  from orchestrator.workflow import StepList, begin, step
10
12
  from orchestrator.workflows.utils import validate_workflow
11
13
 
@@ -1,7 +1,7 @@
1
1
  from collections.abc import Callable, Iterable
2
2
  from typing import Any
3
3
 
4
- from orchestrator.types import strEnum
4
+ from pydantic_forms.types import strEnum
5
5
 
6
6
 
7
7
  def _esc_str(i: int) -> str:
@@ -13,7 +13,7 @@
13
13
 
14
14
  import strawberry
15
15
 
16
- from orchestrator.types import strEnum
16
+ from pydantic_forms.types import strEnum
17
17
 
18
18
 
19
19
  @strawberry.enum
orchestrator/db/models.py CHANGED
@@ -13,6 +13,7 @@
13
13
 
14
14
  from __future__ import annotations
15
15
 
16
+ import enum
16
17
  from datetime import datetime, timezone
17
18
 
18
19
  import sqlalchemy
@@ -23,6 +24,7 @@ from sqlalchemy import (
23
24
  Boolean,
24
25
  CheckConstraint,
25
26
  Column,
27
+ Enum,
26
28
  ForeignKey,
27
29
  Index,
28
30
  Integer,
@@ -81,6 +83,20 @@ class UtcTimestamp(TypeDecorator):
81
83
  return value.astimezone(timezone.utc) if value else value
82
84
 
83
85
 
86
+ class InputStateTable(BaseModel):
87
+ __tablename__ = "input_states"
88
+
89
+ class InputType(enum.Enum):
90
+ user_input = "user_input"
91
+ initial_state = "initial_state"
92
+
93
+ input_state_id = mapped_column(UUIDType, primary_key=True, server_default=text("uuid_generate_v4()"), index=True)
94
+ process_id = mapped_column("pid", UUIDType, ForeignKey("processes.pid"), nullable=False)
95
+ input_state = mapped_column(pg.JSONB(), nullable=False) # type: ignore
96
+ input_time = mapped_column(UtcTimestamp, server_default=text("current_timestamp()"), nullable=False)
97
+ input_type = mapped_column(Enum(InputType), nullable=False)
98
+
99
+
84
100
  class ProcessTable(BaseModel):
85
101
  __tablename__ = "processes"
86
102
 
@@ -101,6 +117,7 @@ class ProcessTable(BaseModel):
101
117
  steps = relationship(
102
118
  "ProcessStepTable", cascade="delete", passive_deletes=True, order_by="asc(ProcessStepTable.executed_at)"
103
119
  )
120
+ input_states = relationship("InputStateTable", cascade="delete", order_by="desc(InputStateTable.input_time)")
104
121
  process_subscriptions = relationship("ProcessSubscriptionTable", back_populates="process", passive_deletes=True)
105
122
  workflow = relationship("WorkflowTable", back_populates="processes")
106
123
 
@@ -26,8 +26,8 @@ import structlog
26
26
  from more_itertools import first, first_true
27
27
 
28
28
  from nwastdlib.url import URL
29
- from orchestrator.types import State
30
29
  from pydantic_forms.types import InputForm as LegacyInputForm
30
+ from pydantic_forms.types import State
31
31
 
32
32
 
33
33
  class JSONSubSchema(TypedDict, total=False):
@@ -6,61 +6,17 @@ Refer to the 2.0 migration guide documentation for background.
6
6
  import re
7
7
  import sys
8
8
  from pathlib import Path
9
- from subprocess import run
10
9
  from typing import Iterable
11
10
 
12
-
13
- def remove_imports(text: str, module: str, symbol: str) -> tuple[str, bool]:
14
- """Find imports and remove them.
15
-
16
- Assumes code is formatted through Black to keep the regex somewhat readable.
17
- """
18
- text_orig = text
19
-
20
- # single import from module (may have a #comment) -> remove line
21
- rgx = r"(from %s import \b%s\b(\s*#[^\n]*)*\n)" % (re.escape(module), symbol)
22
- text = re.sub(rgx, "", text)
23
-
24
- # middle or last of multiple imports from module -> strip symbol
25
- rgx = r"(from %s import .+)(, \b%s\b)" % (re.escape(module), symbol)
26
- text = re.sub(rgx, r"\1", text)
27
-
28
- # first of multiple imports from same module -> strip symbol
29
- rgx = r"(from %s import )\b%s\b, " % (re.escape(module), symbol)
30
- text = re.sub(rgx, r"\1", text)
31
-
32
- # multiline import -> remove line with symbol
33
- rgx_verbose = r"""(?P<before>^from\s%s\simport\s*\([^\n]*\n(?:^[^\n]+,\n)*)
34
- (^\s*\b%s\b,[^\n]*\n)
35
- (?P<after>(?:^[^\n]+,\n)*\)[^\n]*$)"""
36
- text = re.sub(rgx_verbose % (re.escape(module), symbol), r"\g<before>\g<after>", text, flags=re.M | re.X)
37
- return text, text_orig != text
38
-
39
-
40
- def insert_import(text: str, import_stmt: str) -> str:
41
- # Find the first import line and add our line above that
42
- # Rely on ruff & black for formatting
43
- return re.sub(r"(^(?:from .+|import .+)$)", f"{import_stmt}\n" + r"\1", text, count=1, flags=re.M)
44
-
45
-
46
- def find_and_remove_aliases(text: str, symbol: str) -> tuple[str, list[str]]:
47
- """In the given text find aliases of the given symbol and remove them.
48
-
49
- Return updated text and aliases removed.
50
- """
51
- rgx = r"(\b%s as (\w+))" % (symbol,)
52
- aliases = [aliasgroup for fullgroup, aliasgroup in re.findall(rgx, text)]
53
- newtext = re.sub(rgx, symbol, text)
54
- return newtext, aliases
55
-
56
-
57
- def replace_words(text: str, words: list[str], replace: str) -> str:
58
- rgx = r"\b(%s)\b" % ("|".join(words),)
59
- return re.sub(rgx, replace, text)
60
-
61
-
62
- def has_word(text: str, word: str) -> bool:
63
- return bool(re.search(r"\b%s\b" % (word,), text))
11
+ from orchestrator.devtools.scripts.shared import (
12
+ find_and_remove_aliases,
13
+ has_word,
14
+ insert_import,
15
+ migrate,
16
+ move_import,
17
+ remove_imports,
18
+ replace_words,
19
+ )
64
20
 
65
21
 
66
22
  def rewrite_subscription_instance_lists(f: Path) -> list[str]:
@@ -124,17 +80,6 @@ def rewrite_subscription_instance_lists(f: Path) -> list[str]:
124
80
  return names
125
81
 
126
82
 
127
- def move_import(f: Path, symbol: str, old_module: str, new_module: str) -> bool:
128
- text = f.read_text()
129
- text, changed = remove_imports(text, old_module, symbol)
130
- if not changed:
131
- return False
132
- text = insert_import(text, f"from {new_module} import {symbol}")
133
- with f.open(mode="w"):
134
- f.write_text(text)
135
- return True
136
-
137
-
138
83
  re_serializable_property = re.compile(r"^(\s+)(@serializable_property)([^\n]*)\n", flags=re.MULTILINE)
139
84
 
140
85
 
@@ -167,7 +112,7 @@ def replace_serializable_props(f: Path) -> bool:
167
112
  return True
168
113
 
169
114
 
170
- def migrate_file(f: Path) -> int:
115
+ def migrate_file(f: Path) -> bool:
171
116
  imports = {
172
117
  "SI": move_import(f, "SI", "orchestrator.domain.base", "orchestrator.types"),
173
118
  "VlanRanges": move_import(f, "VlanRanges", "orchestrator.utils.vlans", "nwastdlib.vlans"),
@@ -189,46 +134,6 @@ def migrate_file(f: Path) -> int:
189
134
  return bool(lines)
190
135
 
191
136
 
192
- def run_tool(*args: str) -> bool:
193
- cmd = " ".join(args)
194
- try:
195
- r = run(args, capture_output=True) # noqa: S603
196
- if r.returncode == 0:
197
- return True
198
- print(f"{cmd} failed:", r.stdout, r.stderr)
199
- except FileNotFoundError:
200
- print(f"{cmd }failed: could not find executable in the current venv")
201
- return False
202
-
203
-
204
- def migrate(target_dir: Path) -> bool:
205
- abs_path = str(target_dir.resolve())
206
-
207
- def run_tools() -> bool:
208
- return run_tool("ruff", "--fix", abs_path) and run_tool("black", "--quiet", abs_path)
209
-
210
- print(f"\n### Verifing files in {abs_path}... ", end="")
211
- if not run_tools():
212
- print("Failed to verify files, aborting migration. Please resolve the errors.")
213
- return False
214
- print("Ok")
215
-
216
- files_migrated = files_checked = 0
217
- print(f"\n### Migrating files in {abs_path}")
218
- try:
219
- for f in target_dir.glob("**/*.py"):
220
- if migrate_file(f):
221
- files_migrated += 1
222
- files_checked += 1
223
- except KeyboardInterrupt:
224
- print("Interrupted...")
225
-
226
- print(f"\n### Migrated {files_migrated}/{files_checked} files in {abs_path}")
227
-
228
- print(f"\n### Formatting files in {abs_path}")
229
- return run_tools()
230
-
231
-
232
137
  if __name__ == "__main__":
233
138
  try:
234
139
  _target_dir = Path(sys.argv[1])
@@ -237,4 +142,4 @@ if __name__ == "__main__":
237
142
  print("Need a directory as parameter")
238
143
  sys.exit(1)
239
144
 
240
- sys.exit(0 if migrate(_target_dir) else 1)
145
+ sys.exit(0 if migrate(_target_dir, migrate_file) else 1)
@@ -0,0 +1,61 @@
1
+ """Helper script to rewrite import statements in your orchestrator.
2
+
3
+ Since types have been externalised in `pydantic_forms`, they were re-imported in `orchestrator.types` for backwards
4
+ compatibility. These import statements have been removed, and therefore need to be updated in orchestrator
5
+ implementations.
6
+ """
7
+
8
+ import sys
9
+ from pathlib import Path
10
+
11
+ from orchestrator.devtools.scripts.shared import migrate, move_import
12
+
13
+
14
+ def migrate_file(f: Path) -> bool:
15
+ imports = {
16
+ "JSON": move_import(f, "JSON", "orchestrator.types", "pydantic_forms.types"),
17
+ "AcceptData": move_import(f, "AcceptData", "orchestrator.types", "pydantic_forms.types"),
18
+ "AcceptItemType": move_import(f, "AcceptItemType", "orchestrator.types", "pydantic_forms.types"),
19
+ "FormGenerator": move_import(f, "FormGenerator", "orchestrator.types", "pydantic_forms.types"),
20
+ "FormGeneratorAsync": move_import(f, "FormGeneratorAsync", "orchestrator.types", "pydantic_forms.types"),
21
+ "InputForm": move_import(f, "InputForm", "orchestrator.types", "pydantic_forms.types"),
22
+ "InputFormGenerator": move_import(f, "InputFormGenerator", "orchestrator.types", "pydantic_forms.types"),
23
+ "InputStepFunc": move_import(f, "InputStepFunc", "orchestrator.types", "pydantic_forms.types"),
24
+ "SimpleInputFormGenerator": move_import(
25
+ f, "SimpleInputFormGenerator", "orchestrator.types", "pydantic_forms.types"
26
+ ),
27
+ "State": move_import(f, "State", "orchestrator.types", "pydantic_forms.types"),
28
+ "StateInputFormGenerator": move_import(
29
+ f, "StateInputFormGenerator", "orchestrator.types", "pydantic_forms.types"
30
+ ),
31
+ "StateInputFormGeneratorAsync": move_import(
32
+ f, "StateInputFormGeneratorAsync", "orchestrator.types", "pydantic_forms.types"
33
+ ),
34
+ "StateInputStepFunc": move_import(f, "StateInputStepFunc", "orchestrator.types", "pydantic_forms.types"),
35
+ "StateSimpleInputFormGenerator": move_import(
36
+ f, "StateSimpleInputFormGenerator", "orchestrator.types", "pydantic_forms.types"
37
+ ),
38
+ "SubscriptionMapping": move_import(f, "SubscriptionMapping", "orchestrator.types", "pydantic_forms.types"),
39
+ "SummaryData": move_import(f, "SummaryData", "orchestrator.types", "pydantic_forms.types"),
40
+ "UUIDstr": move_import(f, "UUIDstr", "orchestrator.types", "pydantic_forms.types"),
41
+ "strEnum": move_import(f, "strEnum", "orchestrator.types", "pydantic_forms.types"),
42
+ }
43
+ lines = []
44
+ lines.extend([f"Moved {k} import" for k, v in imports.items() if v])
45
+
46
+ if lines:
47
+ formatted_lines = "\n".join(f" - {line}" for line in lines)
48
+ print(f"Updated {f.name:50s}\n{formatted_lines}")
49
+
50
+ return bool(lines)
51
+
52
+
53
+ if __name__ == "__main__":
54
+ try:
55
+ _target_dir = Path(sys.argv[1])
56
+ assert _target_dir.is_dir()
57
+ except Exception:
58
+ print("Need a directory as parameter")
59
+ sys.exit(1)
60
+
61
+ sys.exit(0 if migrate(_target_dir, migrate_file) else 1)
@@ -0,0 +1,108 @@
1
+ import re
2
+ from pathlib import Path
3
+ from subprocess import run
4
+ from typing import Callable
5
+
6
+
7
+ def remove_imports(text: str, module: str, symbol: str) -> tuple[str, bool]:
8
+ """Find imports and remove them.
9
+
10
+ Assumes code is formatted through Black to keep the regex somewhat readable.
11
+ """
12
+ text_orig = text
13
+
14
+ # single import from module (may have a #comment) -> remove line
15
+ rgx = r"(from %s import \b%s\b(\s*#[^\n]*)*\n)" % (re.escape(module), symbol)
16
+ text = re.sub(rgx, "", text)
17
+
18
+ # middle or last of multiple imports from module -> strip symbol
19
+ rgx = r"(from %s import .+)(, \b%s\b)" % (re.escape(module), symbol)
20
+ text = re.sub(rgx, r"\1", text)
21
+
22
+ # first of multiple imports from same module -> strip symbol
23
+ rgx = r"(from %s import )\b%s\b, " % (re.escape(module), symbol)
24
+ text = re.sub(rgx, r"\1", text)
25
+
26
+ # multiline import -> remove line with symbol
27
+ rgx_verbose = r"""(?P<before>^from\s%s\simport\s*\([^\n]*\n(?:^[^\n]+,\n)*)
28
+ (^\s*\b%s\b,[^\n]*\n)
29
+ (?P<after>(?:^[^\n]+,\n)*\)[^\n]*$)"""
30
+ text = re.sub(rgx_verbose % (re.escape(module), symbol), r"\g<before>\g<after>", text, flags=re.M | re.X)
31
+ return text, text_orig != text
32
+
33
+
34
+ def insert_import(text: str, import_stmt: str) -> str:
35
+ # Find the first import line and add our line above that
36
+ # Rely on ruff & black for formatting
37
+ return re.sub(r"(^(?:from .+|import .+)$)", f"{import_stmt}\n" + r"\1", text, count=1, flags=re.M)
38
+
39
+
40
+ def move_import(f: Path, symbol: str, old_module: str, new_module: str) -> bool:
41
+ text = f.read_text()
42
+ text, changed = remove_imports(text, old_module, symbol)
43
+ if not changed:
44
+ return False
45
+ text = insert_import(text, f"from {new_module} import {symbol}")
46
+ with f.open(mode="w"):
47
+ f.write_text(text)
48
+ return True
49
+
50
+
51
+ def find_and_remove_aliases(text: str, symbol: str) -> tuple[str, list[str]]:
52
+ """In the given text find aliases of the given symbol and remove them.
53
+
54
+ Return updated text and aliases removed.
55
+ """
56
+ rgx = r"(\b%s as (\w+))" % (symbol,)
57
+ aliases = [aliasgroup for fullgroup, aliasgroup in re.findall(rgx, text)]
58
+ newtext = re.sub(rgx, symbol, text)
59
+ return newtext, aliases
60
+
61
+
62
+ def replace_words(text: str, words: list[str], replace: str) -> str:
63
+ rgx = r"\b(%s)\b" % ("|".join(words),)
64
+ return re.sub(rgx, replace, text)
65
+
66
+
67
+ def has_word(text: str, word: str) -> bool:
68
+ return bool(re.search(r"\b%s\b" % (word,), text))
69
+
70
+
71
+ def run_tool(*args: str) -> bool:
72
+ cmd = " ".join(args)
73
+ try:
74
+ r = run(args, capture_output=True) # noqa: S603
75
+ if r.returncode == 0:
76
+ return True
77
+ print(f"{cmd} failed:", r.stdout, r.stderr)
78
+ except FileNotFoundError:
79
+ print(f"{cmd} failed: could not find executable in the current venv")
80
+ return False
81
+
82
+
83
+ def migrate(target_dir: Path, migrate_file: Callable[[Path], bool]) -> bool:
84
+ abs_path = str(target_dir.resolve())
85
+
86
+ def run_tools() -> bool:
87
+ return run_tool("ruff", "check", "--fix", abs_path) and run_tool("black", "--quiet", abs_path)
88
+
89
+ print(f"\n### Verifying files in {abs_path}... ", end="")
90
+ if not run_tools():
91
+ print("Failed to verify files, aborting migration. Please resolve errors.")
92
+ return False
93
+ print("Ok")
94
+
95
+ files_migrated = files_checked = 0
96
+ print(f"\n### Migrating files in {abs_path}")
97
+ try:
98
+ for f in target_dir.glob("**/*.py"):
99
+ if migrate_file(f):
100
+ files_migrated += 1
101
+ files_checked += 1
102
+ except KeyboardInterrupt:
103
+ print("Interrupted...")
104
+
105
+ print(f"\n### Migrated {files_migrated}/{files_checked} files in {abs_path}")
106
+
107
+ print(f"\n### Formatting files in {abs_path}")
108
+ return run_tools()
@@ -20,6 +20,7 @@ from redis.lock import Lock as SyncLock
20
20
  from structlog import get_logger
21
21
 
22
22
  from orchestrator.settings import app_settings
23
+ from orchestrator.utils.redis_client import create_redis_asyncio_client, create_redis_client
23
24
 
24
25
  logger = get_logger(__name__)
25
26
 
@@ -37,7 +38,7 @@ class RedisDistLockManager:
37
38
  self.redis_address = redis_address
38
39
 
39
40
  async def connect_redis(self) -> None:
40
- self.redis_conn = AIORedis.from_url(str(self.redis_address))
41
+ self.redis_conn = create_redis_asyncio_client(self.redis_address)
41
42
 
42
43
  async def disconnect_redis(self) -> None:
43
44
  if self.redis_conn:
@@ -78,7 +79,7 @@ class RedisDistLockManager:
78
79
  def release_sync(self, lock: Lock) -> None:
79
80
  redis_conn: Redis | None = None
80
81
  try:
81
- redis_conn = Redis.from_url(str(app_settings.CACHE_URI))
82
+ redis_conn = create_redis_client(app_settings.CACHE_URI)
82
83
  sync_lock: SyncLock = SyncLock(
83
84
  redis=redis_conn,
84
85
  name=lock.name, # type: ignore
@@ -55,9 +55,7 @@ from orchestrator.domain.lifecycle import (
55
55
  from orchestrator.services.products import get_product_by_id
56
56
  from orchestrator.types import (
57
57
  SAFE_USED_BY_TRANSITIONS_FOR_STATUS,
58
- State,
59
58
  SubscriptionLifecycle,
60
- UUIDstr,
61
59
  filter_nonetype,
62
60
  get_origin_and_args,
63
61
  get_possible_product_block_types,
@@ -69,6 +67,7 @@ from orchestrator.types import (
69
67
  )
70
68
  from orchestrator.utils.datetime import nowtz
71
69
  from orchestrator.utils.docs import make_product_block_docstring, make_subscription_model_docstring
70
+ from pydantic_forms.types import State, UUIDstr
72
71
 
73
72
  logger = structlog.get_logger(__name__)
74
73
 
@@ -16,7 +16,8 @@ from typing import TYPE_CHECKING, TypeVar
16
16
  import strawberry
17
17
  import structlog
18
18
 
19
- from orchestrator.types import SubscriptionLifecycle, strEnum
19
+ from orchestrator.types import SubscriptionLifecycle
20
+ from pydantic_forms.types import strEnum
20
21
 
21
22
  if TYPE_CHECKING:
22
23
  from orchestrator.domain.base import DomainModel
@@ -21,6 +21,7 @@ from orchestrator.services.processes import SYSTEM_USER, ThreadPoolWorkerStatus,
21
21
  from orchestrator.services.settings import get_engine_settings, get_engine_settings_for_update, post_update_to_slack
22
22
  from orchestrator.settings import ExecutorType, app_settings
23
23
  from orchestrator.utils.redis import delete_keys_matching_pattern
24
+ from orchestrator.utils.redis_client import create_redis_asyncio_client
24
25
 
25
26
  logger = structlog.get_logger(__name__)
26
27
 
@@ -57,7 +58,7 @@ def resolve_settings(info: OrchestratorInfo) -> StatusType:
57
58
 
58
59
  # Mutations
59
60
  async def clear_cache(info: OrchestratorInfo, name: str) -> CacheClearSuccess | Error:
60
- cache: AIORedis = AIORedis.from_url(str(app_settings.CACHE_URI))
61
+ cache: AIORedis = create_redis_asyncio_client(app_settings.CACHE_URI)
61
62
  if name not in CACHE_FLUSH_OPTIONS:
62
63
  return Error(message="Invalid cache name")
63
64
 
@@ -1,11 +1,11 @@
1
- from typing import TYPE_CHECKING, Annotated
1
+ from typing import TYPE_CHECKING, Annotated, Iterable
2
2
 
3
3
  import strawberry
4
4
  from strawberry import UNSET
5
5
  from strawberry.federation.schema_directives import Key
6
6
 
7
7
  from oauth2_lib.strawberry import authenticated_field
8
- from orchestrator.db import ProductTable
8
+ from orchestrator.db import ProductBlockTable, ProductTable
9
9
  from orchestrator.domain.base import ProductModel
10
10
  from orchestrator.graphql.pagination import Connection
11
11
  from orchestrator.graphql.schemas.fixed_input import FixedInput
@@ -51,6 +51,23 @@ class ProductType:
51
51
  filter_by_with_related_subscriptions = (filter_by or []) + [GraphqlFilter(field="product", value=self.name)]
52
52
  return await resolve_subscriptions(info, filter_by_with_related_subscriptions, sort_by, first, after)
53
53
 
54
+ @strawberry.field(description="Returns list of all nested productblock names") # type: ignore
55
+ async def all_pb_names(self) -> list[str]:
56
+
57
+ model = get_original_model(self, ProductTable)
58
+
59
+ def get_all_pb_names(product_blocks: list[ProductBlockTable]) -> Iterable[str]:
60
+ for product_block in product_blocks:
61
+ yield product_block.name
62
+
63
+ if product_block.depends_on:
64
+ yield from get_all_pb_names(product_block.depends_on)
65
+
66
+ names: list[str] = list(get_all_pb_names(model.product_blocks))
67
+ names.sort()
68
+
69
+ return names
70
+
54
71
  @strawberry.field(description="Return product blocks") # type: ignore
55
72
  async def product_blocks(self) -> list[Annotated["ProductBlock", strawberry.lazy(".product_block")]]:
56
73
  from orchestrator.graphql.schemas.product_block import ProductBlock
@@ -19,7 +19,7 @@ import sqlalchemy as sa
19
19
  import structlog
20
20
 
21
21
  from orchestrator.settings import app_settings
22
- from orchestrator.types import UUIDstr
22
+ from pydantic_forms.types import UUIDstr
23
23
 
24
24
  logger = structlog.get_logger(__name__)
25
25