orchestrator-core 2.10.0rc1__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. orchestrator/__init__.py +1 -1
  2. orchestrator/api/api_v1/api.py +24 -3
  3. orchestrator/api/api_v1/endpoints/processes.py +1 -1
  4. orchestrator/api/api_v1/endpoints/product_blocks.py +56 -0
  5. orchestrator/api/api_v1/endpoints/products.py +28 -1
  6. orchestrator/api/api_v1/endpoints/resource_types.py +56 -0
  7. orchestrator/api/api_v1/endpoints/settings.py +2 -1
  8. orchestrator/api/api_v1/endpoints/workflows.py +54 -0
  9. orchestrator/app.py +3 -2
  10. orchestrator/cli/generator/generator/product_block.py +1 -9
  11. orchestrator/cli/generator/templates/create_product.j2 +2 -1
  12. orchestrator/cli/generator/templates/modify_product.j2 +2 -1
  13. orchestrator/cli/generator/templates/shared_workflows.j2 +2 -1
  14. orchestrator/cli/generator/templates/terminate_product.j2 +1 -1
  15. orchestrator/cli/generator/templates/test_create_workflow.j2 +0 -1
  16. orchestrator/cli/generator/templates/test_modify_workflow.j2 +1 -2
  17. orchestrator/cli/generator/templates/test_terminate_workflow.j2 +1 -1
  18. orchestrator/cli/generator/templates/validate_product.j2 +3 -1
  19. orchestrator/cli/helpers/print_helpers.py +1 -1
  20. orchestrator/config/assignee.py +1 -1
  21. orchestrator/db/models.py +17 -0
  22. orchestrator/devtools/populator.py +1 -1
  23. orchestrator/devtools/scripts/migrate_20.py +11 -106
  24. orchestrator/devtools/scripts/migrate_30.py +61 -0
  25. orchestrator/devtools/scripts/shared.py +108 -0
  26. orchestrator/distlock/managers/redis_distlock_manager.py +3 -2
  27. orchestrator/domain/base.py +1 -2
  28. orchestrator/domain/lifecycle.py +2 -1
  29. orchestrator/graphql/resolvers/settings.py +2 -1
  30. orchestrator/graphql/schemas/product.py +19 -2
  31. orchestrator/migrations/helpers.py +1 -1
  32. orchestrator/migrations/versions/schema/2025-02-12_bac6be6f2b4f_added_input_state_table.py +56 -0
  33. orchestrator/schemas/engine_settings.py +1 -1
  34. orchestrator/schemas/product.py +4 -0
  35. orchestrator/schemas/product_block.py +4 -0
  36. orchestrator/schemas/resource_type.py +4 -0
  37. orchestrator/schemas/subscription.py +2 -1
  38. orchestrator/schemas/workflow.py +4 -0
  39. orchestrator/services/celery.py +7 -4
  40. orchestrator/services/input_state.py +76 -0
  41. orchestrator/services/processes.py +8 -6
  42. orchestrator/services/products.py +1 -1
  43. orchestrator/services/subscriptions.py +2 -1
  44. orchestrator/services/tasks.py +13 -7
  45. orchestrator/services/workflows.py +13 -0
  46. orchestrator/settings.py +5 -2
  47. orchestrator/targets.py +1 -1
  48. orchestrator/types.py +8 -43
  49. orchestrator/utils/errors.py +2 -1
  50. orchestrator/utils/redis.py +6 -11
  51. orchestrator/utils/redis_client.py +35 -0
  52. orchestrator/utils/state.py +2 -1
  53. orchestrator/workflow.py +3 -1
  54. orchestrator/workflows/modify_note.py +1 -2
  55. orchestrator/workflows/steps.py +2 -1
  56. orchestrator/workflows/tasks/cleanup_tasks_log.py +1 -1
  57. orchestrator/workflows/tasks/resume_workflows.py +1 -1
  58. orchestrator/workflows/tasks/validate_product_type.py +1 -1
  59. orchestrator/workflows/tasks/validate_products.py +1 -1
  60. orchestrator/workflows/utils.py +2 -2
  61. {orchestrator_core-2.10.0rc1.dist-info → orchestrator_core-3.0.0.dist-info}/METADATA +10 -8
  62. {orchestrator_core-2.10.0rc1.dist-info → orchestrator_core-3.0.0.dist-info}/RECORD +64 -56
  63. {orchestrator_core-2.10.0rc1.dist-info → orchestrator_core-3.0.0.dist-info}/WHEEL +1 -1
  64. {orchestrator_core-2.10.0rc1.dist-info → orchestrator_core-3.0.0.dist-info/licenses}/LICENSE +0 -0
@@ -6,61 +6,17 @@ Refer to the 2.0 migration guide documentation for background.
6
6
  import re
7
7
  import sys
8
8
  from pathlib import Path
9
- from subprocess import run
10
9
  from typing import Iterable
11
10
 
12
-
13
- def remove_imports(text: str, module: str, symbol: str) -> tuple[str, bool]:
14
- """Find imports and remove them.
15
-
16
- Assumes code is formatted through Black to keep the regex somewhat readable.
17
- """
18
- text_orig = text
19
-
20
- # single import from module (may have a #comment) -> remove line
21
- rgx = r"(from %s import \b%s\b(\s*#[^\n]*)*\n)" % (re.escape(module), symbol)
22
- text = re.sub(rgx, "", text)
23
-
24
- # middle or last of multiple imports from module -> strip symbol
25
- rgx = r"(from %s import .+)(, \b%s\b)" % (re.escape(module), symbol)
26
- text = re.sub(rgx, r"\1", text)
27
-
28
- # first of multiple imports from same module -> strip symbol
29
- rgx = r"(from %s import )\b%s\b, " % (re.escape(module), symbol)
30
- text = re.sub(rgx, r"\1", text)
31
-
32
- # multiline import -> remove line with symbol
33
- rgx_verbose = r"""(?P<before>^from\s%s\simport\s*\([^\n]*\n(?:^[^\n]+,\n)*)
34
- (^\s*\b%s\b,[^\n]*\n)
35
- (?P<after>(?:^[^\n]+,\n)*\)[^\n]*$)"""
36
- text = re.sub(rgx_verbose % (re.escape(module), symbol), r"\g<before>\g<after>", text, flags=re.M | re.X)
37
- return text, text_orig != text
38
-
39
-
40
- def insert_import(text: str, import_stmt: str) -> str:
41
- # Find the first import line and add our line above that
42
- # Rely on ruff & black for formatting
43
- return re.sub(r"(^(?:from .+|import .+)$)", f"{import_stmt}\n" + r"\1", text, count=1, flags=re.M)
44
-
45
-
46
- def find_and_remove_aliases(text: str, symbol: str) -> tuple[str, list[str]]:
47
- """In the given text find aliases of the given symbol and remove them.
48
-
49
- Return updated text and aliases removed.
50
- """
51
- rgx = r"(\b%s as (\w+))" % (symbol,)
52
- aliases = [aliasgroup for fullgroup, aliasgroup in re.findall(rgx, text)]
53
- newtext = re.sub(rgx, symbol, text)
54
- return newtext, aliases
55
-
56
-
57
- def replace_words(text: str, words: list[str], replace: str) -> str:
58
- rgx = r"\b(%s)\b" % ("|".join(words),)
59
- return re.sub(rgx, replace, text)
60
-
61
-
62
- def has_word(text: str, word: str) -> bool:
63
- return bool(re.search(r"\b%s\b" % (word,), text))
11
+ from orchestrator.devtools.scripts.shared import (
12
+ find_and_remove_aliases,
13
+ has_word,
14
+ insert_import,
15
+ migrate,
16
+ move_import,
17
+ remove_imports,
18
+ replace_words,
19
+ )
64
20
 
65
21
 
66
22
  def rewrite_subscription_instance_lists(f: Path) -> list[str]:
@@ -124,17 +80,6 @@ def rewrite_subscription_instance_lists(f: Path) -> list[str]:
124
80
  return names
125
81
 
126
82
 
127
- def move_import(f: Path, symbol: str, old_module: str, new_module: str) -> bool:
128
- text = f.read_text()
129
- text, changed = remove_imports(text, old_module, symbol)
130
- if not changed:
131
- return False
132
- text = insert_import(text, f"from {new_module} import {symbol}")
133
- with f.open(mode="w"):
134
- f.write_text(text)
135
- return True
136
-
137
-
138
83
  re_serializable_property = re.compile(r"^(\s+)(@serializable_property)([^\n]*)\n", flags=re.MULTILINE)
139
84
 
140
85
 
@@ -167,7 +112,7 @@ def replace_serializable_props(f: Path) -> bool:
167
112
  return True
168
113
 
169
114
 
170
- def migrate_file(f: Path) -> int:
115
+ def migrate_file(f: Path) -> bool:
171
116
  imports = {
172
117
  "SI": move_import(f, "SI", "orchestrator.domain.base", "orchestrator.types"),
173
118
  "VlanRanges": move_import(f, "VlanRanges", "orchestrator.utils.vlans", "nwastdlib.vlans"),
@@ -189,46 +134,6 @@ def migrate_file(f: Path) -> int:
189
134
  return bool(lines)
190
135
 
191
136
 
192
- def run_tool(*args: str) -> bool:
193
- cmd = " ".join(args)
194
- try:
195
- r = run(args, capture_output=True) # noqa: S603
196
- if r.returncode == 0:
197
- return True
198
- print(f"{cmd} failed:", r.stdout, r.stderr)
199
- except FileNotFoundError:
200
- print(f"{cmd }failed: could not find executable in the current venv")
201
- return False
202
-
203
-
204
- def migrate(target_dir: Path) -> bool:
205
- abs_path = str(target_dir.resolve())
206
-
207
- def run_tools() -> bool:
208
- return run_tool("ruff", "--fix", abs_path) and run_tool("black", "--quiet", abs_path)
209
-
210
- print(f"\n### Verifing files in {abs_path}... ", end="")
211
- if not run_tools():
212
- print("Failed to verify files, aborting migration. Please resolve the errors.")
213
- return False
214
- print("Ok")
215
-
216
- files_migrated = files_checked = 0
217
- print(f"\n### Migrating files in {abs_path}")
218
- try:
219
- for f in target_dir.glob("**/*.py"):
220
- if migrate_file(f):
221
- files_migrated += 1
222
- files_checked += 1
223
- except KeyboardInterrupt:
224
- print("Interrupted...")
225
-
226
- print(f"\n### Migrated {files_migrated}/{files_checked} files in {abs_path}")
227
-
228
- print(f"\n### Formatting files in {abs_path}")
229
- return run_tools()
230
-
231
-
232
137
  if __name__ == "__main__":
233
138
  try:
234
139
  _target_dir = Path(sys.argv[1])
@@ -237,4 +142,4 @@ if __name__ == "__main__":
237
142
  print("Need a directory as parameter")
238
143
  sys.exit(1)
239
144
 
240
- sys.exit(0 if migrate(_target_dir) else 1)
145
+ sys.exit(0 if migrate(_target_dir, migrate_file) else 1)
@@ -0,0 +1,61 @@
1
+ """Helper script to rewrite import statements in your orchestrator.
2
+
3
+ Since types have been externalised in `pydantic_forms`, they were re-imported in `orchestrator.types` for backwards
4
+ compatibility. These import statements have been removed, and therefore need to be updated in orchestrator
5
+ implementations.
6
+ """
7
+
8
+ import sys
9
+ from pathlib import Path
10
+
11
+ from orchestrator.devtools.scripts.shared import migrate, move_import
12
+
13
+
14
+ def migrate_file(f: Path) -> bool:
15
+ imports = {
16
+ "JSON": move_import(f, "JSON", "orchestrator.types", "pydantic_forms.types"),
17
+ "AcceptData": move_import(f, "AcceptData", "orchestrator.types", "pydantic_forms.types"),
18
+ "AcceptItemType": move_import(f, "AcceptItemType", "orchestrator.types", "pydantic_forms.types"),
19
+ "FormGenerator": move_import(f, "FormGenerator", "orchestrator.types", "pydantic_forms.types"),
20
+ "FormGeneratorAsync": move_import(f, "FormGeneratorAsync", "orchestrator.types", "pydantic_forms.types"),
21
+ "InputForm": move_import(f, "InputForm", "orchestrator.types", "pydantic_forms.types"),
22
+ "InputFormGenerator": move_import(f, "InputFormGenerator", "orchestrator.types", "pydantic_forms.types"),
23
+ "InputStepFunc": move_import(f, "InputStepFunc", "orchestrator.types", "pydantic_forms.types"),
24
+ "SimpleInputFormGenerator": move_import(
25
+ f, "SimpleInputFormGenerator", "orchestrator.types", "pydantic_forms.types"
26
+ ),
27
+ "State": move_import(f, "State", "orchestrator.types", "pydantic_forms.types"),
28
+ "StateInputFormGenerator": move_import(
29
+ f, "StateInputFormGenerator", "orchestrator.types", "pydantic_forms.types"
30
+ ),
31
+ "StateInputFormGeneratorAsync": move_import(
32
+ f, "StateInputFormGeneratorAsync", "orchestrator.types", "pydantic_forms.types"
33
+ ),
34
+ "StateInputStepFunc": move_import(f, "StateInputStepFunc", "orchestrator.types", "pydantic_forms.types"),
35
+ "StateSimpleInputFormGenerator": move_import(
36
+ f, "StateSimpleInputFormGenerator", "orchestrator.types", "pydantic_forms.types"
37
+ ),
38
+ "SubscriptionMapping": move_import(f, "SubscriptionMapping", "orchestrator.types", "pydantic_forms.types"),
39
+ "SummaryData": move_import(f, "SummaryData", "orchestrator.types", "pydantic_forms.types"),
40
+ "UUIDstr": move_import(f, "UUIDstr", "orchestrator.types", "pydantic_forms.types"),
41
+ "strEnum": move_import(f, "strEnum", "orchestrator.types", "pydantic_forms.types"),
42
+ }
43
+ lines = []
44
+ lines.extend([f"Moved {k} import" for k, v in imports.items() if v])
45
+
46
+ if lines:
47
+ formatted_lines = "\n".join(f" - {line}" for line in lines)
48
+ print(f"Updated {f.name:50s}\n{formatted_lines}")
49
+
50
+ return bool(lines)
51
+
52
+
53
+ if __name__ == "__main__":
54
+ try:
55
+ _target_dir = Path(sys.argv[1])
56
+ assert _target_dir.is_dir()
57
+ except Exception:
58
+ print("Need a directory as parameter")
59
+ sys.exit(1)
60
+
61
+ sys.exit(0 if migrate(_target_dir, migrate_file) else 1)
@@ -0,0 +1,108 @@
1
+ import re
2
+ from pathlib import Path
3
+ from subprocess import run
4
+ from typing import Callable
5
+
6
+
7
+ def remove_imports(text: str, module: str, symbol: str) -> tuple[str, bool]:
8
+ """Find imports and remove them.
9
+
10
+ Assumes code is formatted through Black to keep the regex somewhat readable.
11
+ """
12
+ text_orig = text
13
+
14
+ # single import from module (may have a #comment) -> remove line
15
+ rgx = r"(from %s import \b%s\b(\s*#[^\n]*)*\n)" % (re.escape(module), symbol)
16
+ text = re.sub(rgx, "", text)
17
+
18
+ # middle or last of multiple imports from module -> strip symbol
19
+ rgx = r"(from %s import .+)(, \b%s\b)" % (re.escape(module), symbol)
20
+ text = re.sub(rgx, r"\1", text)
21
+
22
+ # first of multiple imports from same module -> strip symbol
23
+ rgx = r"(from %s import )\b%s\b, " % (re.escape(module), symbol)
24
+ text = re.sub(rgx, r"\1", text)
25
+
26
+ # multiline import -> remove line with symbol
27
+ rgx_verbose = r"""(?P<before>^from\s%s\simport\s*\([^\n]*\n(?:^[^\n]+,\n)*)
28
+ (^\s*\b%s\b,[^\n]*\n)
29
+ (?P<after>(?:^[^\n]+,\n)*\)[^\n]*$)"""
30
+ text = re.sub(rgx_verbose % (re.escape(module), symbol), r"\g<before>\g<after>", text, flags=re.M | re.X)
31
+ return text, text_orig != text
32
+
33
+
34
+ def insert_import(text: str, import_stmt: str) -> str:
35
+ # Find the first import line and add our line above that
36
+ # Rely on ruff & black for formatting
37
+ return re.sub(r"(^(?:from .+|import .+)$)", f"{import_stmt}\n" + r"\1", text, count=1, flags=re.M)
38
+
39
+
40
+ def move_import(f: Path, symbol: str, old_module: str, new_module: str) -> bool:
41
+ text = f.read_text()
42
+ text, changed = remove_imports(text, old_module, symbol)
43
+ if not changed:
44
+ return False
45
+ text = insert_import(text, f"from {new_module} import {symbol}")
46
+ with f.open(mode="w"):
47
+ f.write_text(text)
48
+ return True
49
+
50
+
51
+ def find_and_remove_aliases(text: str, symbol: str) -> tuple[str, list[str]]:
52
+ """In the given text find aliases of the given symbol and remove them.
53
+
54
+ Return updated text and aliases removed.
55
+ """
56
+ rgx = r"(\b%s as (\w+))" % (symbol,)
57
+ aliases = [aliasgroup for fullgroup, aliasgroup in re.findall(rgx, text)]
58
+ newtext = re.sub(rgx, symbol, text)
59
+ return newtext, aliases
60
+
61
+
62
+ def replace_words(text: str, words: list[str], replace: str) -> str:
63
+ rgx = r"\b(%s)\b" % ("|".join(words),)
64
+ return re.sub(rgx, replace, text)
65
+
66
+
67
+ def has_word(text: str, word: str) -> bool:
68
+ return bool(re.search(r"\b%s\b" % (word,), text))
69
+
70
+
71
+ def run_tool(*args: str) -> bool:
72
+ cmd = " ".join(args)
73
+ try:
74
+ r = run(args, capture_output=True) # noqa: S603
75
+ if r.returncode == 0:
76
+ return True
77
+ print(f"{cmd} failed:", r.stdout, r.stderr)
78
+ except FileNotFoundError:
79
+ print(f"{cmd} failed: could not find executable in the current venv")
80
+ return False
81
+
82
+
83
+ def migrate(target_dir: Path, migrate_file: Callable[[Path], bool]) -> bool:
84
+ abs_path = str(target_dir.resolve())
85
+
86
+ def run_tools() -> bool:
87
+ return run_tool("ruff", "check", "--fix", abs_path) and run_tool("black", "--quiet", abs_path)
88
+
89
+ print(f"\n### Verifying files in {abs_path}... ", end="")
90
+ if not run_tools():
91
+ print("Failed to verify files, aborting migration. Please resolve errors.")
92
+ return False
93
+ print("Ok")
94
+
95
+ files_migrated = files_checked = 0
96
+ print(f"\n### Migrating files in {abs_path}")
97
+ try:
98
+ for f in target_dir.glob("**/*.py"):
99
+ if migrate_file(f):
100
+ files_migrated += 1
101
+ files_checked += 1
102
+ except KeyboardInterrupt:
103
+ print("Interrupted...")
104
+
105
+ print(f"\n### Migrated {files_migrated}/{files_checked} files in {abs_path}")
106
+
107
+ print(f"\n### Formatting files in {abs_path}")
108
+ return run_tools()
@@ -20,6 +20,7 @@ from redis.lock import Lock as SyncLock
20
20
  from structlog import get_logger
21
21
 
22
22
  from orchestrator.settings import app_settings
23
+ from orchestrator.utils.redis_client import create_redis_asyncio_client, create_redis_client
23
24
 
24
25
  logger = get_logger(__name__)
25
26
 
@@ -37,7 +38,7 @@ class RedisDistLockManager:
37
38
  self.redis_address = redis_address
38
39
 
39
40
  async def connect_redis(self) -> None:
40
- self.redis_conn = AIORedis.from_url(str(self.redis_address))
41
+ self.redis_conn = create_redis_asyncio_client(self.redis_address)
41
42
 
42
43
  async def disconnect_redis(self) -> None:
43
44
  if self.redis_conn:
@@ -78,7 +79,7 @@ class RedisDistLockManager:
78
79
  def release_sync(self, lock: Lock) -> None:
79
80
  redis_conn: Redis | None = None
80
81
  try:
81
- redis_conn = Redis.from_url(str(app_settings.CACHE_URI))
82
+ redis_conn = create_redis_client(app_settings.CACHE_URI)
82
83
  sync_lock: SyncLock = SyncLock(
83
84
  redis=redis_conn,
84
85
  name=lock.name, # type: ignore
@@ -55,9 +55,7 @@ from orchestrator.domain.lifecycle import (
55
55
  from orchestrator.services.products import get_product_by_id
56
56
  from orchestrator.types import (
57
57
  SAFE_USED_BY_TRANSITIONS_FOR_STATUS,
58
- State,
59
58
  SubscriptionLifecycle,
60
- UUIDstr,
61
59
  filter_nonetype,
62
60
  get_origin_and_args,
63
61
  get_possible_product_block_types,
@@ -69,6 +67,7 @@ from orchestrator.types import (
69
67
  )
70
68
  from orchestrator.utils.datetime import nowtz
71
69
  from orchestrator.utils.docs import make_product_block_docstring, make_subscription_model_docstring
70
+ from pydantic_forms.types import State, UUIDstr
72
71
 
73
72
  logger = structlog.get_logger(__name__)
74
73
 
@@ -16,7 +16,8 @@ from typing import TYPE_CHECKING, TypeVar
16
16
  import strawberry
17
17
  import structlog
18
18
 
19
- from orchestrator.types import SubscriptionLifecycle, strEnum
19
+ from orchestrator.types import SubscriptionLifecycle
20
+ from pydantic_forms.types import strEnum
20
21
 
21
22
  if TYPE_CHECKING:
22
23
  from orchestrator.domain.base import DomainModel
@@ -21,6 +21,7 @@ from orchestrator.services.processes import SYSTEM_USER, ThreadPoolWorkerStatus,
21
21
  from orchestrator.services.settings import get_engine_settings, get_engine_settings_for_update, post_update_to_slack
22
22
  from orchestrator.settings import ExecutorType, app_settings
23
23
  from orchestrator.utils.redis import delete_keys_matching_pattern
24
+ from orchestrator.utils.redis_client import create_redis_asyncio_client
24
25
 
25
26
  logger = structlog.get_logger(__name__)
26
27
 
@@ -57,7 +58,7 @@ def resolve_settings(info: OrchestratorInfo) -> StatusType:
57
58
 
58
59
  # Mutations
59
60
  async def clear_cache(info: OrchestratorInfo, name: str) -> CacheClearSuccess | Error:
60
- cache: AIORedis = AIORedis.from_url(str(app_settings.CACHE_URI))
61
+ cache: AIORedis = create_redis_asyncio_client(app_settings.CACHE_URI)
61
62
  if name not in CACHE_FLUSH_OPTIONS:
62
63
  return Error(message="Invalid cache name")
63
64
 
@@ -1,11 +1,11 @@
1
- from typing import TYPE_CHECKING, Annotated
1
+ from typing import TYPE_CHECKING, Annotated, Iterable
2
2
 
3
3
  import strawberry
4
4
  from strawberry import UNSET
5
5
  from strawberry.federation.schema_directives import Key
6
6
 
7
7
  from oauth2_lib.strawberry import authenticated_field
8
- from orchestrator.db import ProductTable
8
+ from orchestrator.db import ProductBlockTable, ProductTable
9
9
  from orchestrator.domain.base import ProductModel
10
10
  from orchestrator.graphql.pagination import Connection
11
11
  from orchestrator.graphql.schemas.fixed_input import FixedInput
@@ -51,6 +51,23 @@ class ProductType:
51
51
  filter_by_with_related_subscriptions = (filter_by or []) + [GraphqlFilter(field="product", value=self.name)]
52
52
  return await resolve_subscriptions(info, filter_by_with_related_subscriptions, sort_by, first, after)
53
53
 
54
+ @strawberry.field(description="Returns list of all nested productblock names") # type: ignore
55
+ async def all_pb_names(self) -> list[str]:
56
+
57
+ model = get_original_model(self, ProductTable)
58
+
59
+ def get_all_pb_names(product_blocks: list[ProductBlockTable]) -> Iterable[str]:
60
+ for product_block in product_blocks:
61
+ yield product_block.name
62
+
63
+ if product_block.depends_on:
64
+ yield from get_all_pb_names(product_block.depends_on)
65
+
66
+ names: list[str] = list(get_all_pb_names(model.product_blocks))
67
+ names.sort()
68
+
69
+ return names
70
+
54
71
  @strawberry.field(description="Return product blocks") # type: ignore
55
72
  async def product_blocks(self) -> list[Annotated["ProductBlock", strawberry.lazy(".product_block")]]:
56
73
  from orchestrator.graphql.schemas.product_block import ProductBlock
@@ -19,7 +19,7 @@ import sqlalchemy as sa
19
19
  import structlog
20
20
 
21
21
  from orchestrator.settings import app_settings
22
- from orchestrator.types import UUIDstr
22
+ from pydantic_forms.types import UUIDstr
23
23
 
24
24
  logger = structlog.get_logger(__name__)
25
25
 
@@ -0,0 +1,56 @@
1
+ """Added Input State Table.
2
+
3
+ Revision ID: bac6be6f2b4f
4
+ Revises: 4fjdn13f83ga
5
+ Create Date: 2025-02-12 14:39:53.664284
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ import sqlalchemy_utils
11
+ from alembic import op
12
+ from sqlalchemy.dialects import postgresql
13
+
14
+ from orchestrator import db
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision = "bac6be6f2b4f"
18
+ down_revision = "4fjdn13f83ga"
19
+ branch_labels = None
20
+ depends_on = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "input_states",
27
+ sa.Column(
28
+ "input_state_id",
29
+ sqlalchemy_utils.types.uuid.UUIDType(),
30
+ server_default=sa.text("uuid_generate_v4()"),
31
+ nullable=False,
32
+ ),
33
+ sa.Column("pid", sqlalchemy_utils.types.uuid.UUIDType(), nullable=False),
34
+ sa.Column("input_state", postgresql.JSONB(astext_type=sa.Text()), nullable=False), # type: ignore
35
+ sa.Column(
36
+ "input_time",
37
+ db.models.UtcTimestamp(timezone=True),
38
+ server_default=sa.text("current_timestamp"),
39
+ nullable=False,
40
+ ),
41
+ sa.Column("input_type", sa.Enum("user_input", "initial_state", name="inputtype"), nullable=False),
42
+ sa.ForeignKeyConstraint(
43
+ ["pid"],
44
+ ["processes.pid"],
45
+ ),
46
+ sa.PrimaryKeyConstraint("input_state_id"),
47
+ )
48
+ op.create_index(op.f("ix_input_state_input_state_id"), "input_states", ["input_state_id"], unique=False)
49
+ # ### end Alembic commands ###
50
+
51
+
52
+ def downgrade() -> None:
53
+ # ### commands auto generated by Alembic - please adjust! ###
54
+ op.drop_index(op.f("ix_input_state_input_state_id"), table_name="input_states")
55
+ op.drop_table("input_statse")
56
+ # ### end Alembic commands ###
@@ -16,7 +16,7 @@ import strawberry
16
16
  from pydantic import ConfigDict
17
17
 
18
18
  from orchestrator.schemas.base import OrchestratorBaseModel
19
- from orchestrator.types import strEnum
19
+ from pydantic_forms.types import strEnum
20
20
 
21
21
 
22
22
  @strawberry.enum
@@ -41,3 +41,7 @@ class ProductSchema(ProductBaseSchema):
41
41
  product_blocks: list[ProductBlockSchema]
42
42
  fixed_inputs: list[FixedInputSchema]
43
43
  workflows: list[WorkflowSchema]
44
+
45
+
46
+ class ProductPatchSchema(OrchestratorBaseModel):
47
+ description: str | None = None
@@ -37,3 +37,7 @@ class ProductBlockSchema(ProductBlockBaseSchema):
37
37
  end_date: datetime | None = None
38
38
  resource_types: list[ResourceTypeSchema] | None = None # type: ignore
39
39
  model_config = ConfigDict(from_attributes=True)
40
+
41
+
42
+ class ProductBlockPatchSchema(OrchestratorBaseModel):
43
+ description: str | None = None
@@ -27,3 +27,7 @@ class ResourceTypeBaseSchema(OrchestratorBaseModel):
27
27
  class ResourceTypeSchema(ResourceTypeBaseSchema):
28
28
  resource_type_id: UUID
29
29
  model_config = ConfigDict(from_attributes=True)
30
+
31
+
32
+ class ResourceTypePatchSchema(OrchestratorBaseModel):
33
+ description: str | None = None
@@ -22,7 +22,8 @@ from orchestrator.schemas.product import ProductBaseSchema
22
22
  from orchestrator.schemas.product_block import ProductBlockSchema
23
23
  from orchestrator.schemas.resource_type import ResourceTypeSchema
24
24
  from orchestrator.schemas.subscription_descriptions import SubscriptionDescriptionSchema
25
- from orchestrator.types import SubscriptionLifecycle, strEnum
25
+ from orchestrator.types import SubscriptionLifecycle
26
+ from pydantic_forms.types import strEnum
26
27
 
27
28
 
28
29
  class PortMode(strEnum):
@@ -59,3 +59,7 @@ class SubscriptionWorkflowListsSchema(OrchestratorBaseModel):
59
59
  modify: list[WorkflowListItemSchema]
60
60
  terminate: list[WorkflowListItemSchema]
61
61
  system: list[WorkflowListItemSchema]
62
+
63
+
64
+ class WorkflowPatchSchema(OrchestratorBaseModel):
65
+ description: str | None = None
@@ -22,10 +22,11 @@ from kombu.exceptions import ConnectionError, OperationalError
22
22
  from orchestrator import app_settings
23
23
  from orchestrator.api.error_handling import raise_status
24
24
  from orchestrator.db import ProcessTable, db
25
+ from orchestrator.services.input_state import store_input_state
25
26
  from orchestrator.services.processes import create_process, delete_process
26
27
  from orchestrator.targets import Target
27
- from orchestrator.types import State
28
28
  from orchestrator.workflows import get_workflow
29
+ from pydantic_forms.types import State
29
30
 
30
31
  SYSTEM_USER = "SYSTEM"
31
32
 
@@ -53,9 +54,8 @@ def _celery_start_process(
53
54
  task_name = NEW_TASK if workflow.target == Target.SYSTEM else NEW_WORKFLOW
54
55
  trigger_task = get_celery_task(task_name)
55
56
  pstat = create_process(workflow_key, user_inputs, user)
56
- tasks = pstat.state.s
57
57
  try:
58
- result = trigger_task.delay(pstat.process_id, workflow_key, tasks, user)
58
+ result = trigger_task.delay(pstat.process_id, workflow_key, user)
59
59
  _block_when_testing(result)
60
60
  return pstat.process_id
61
61
  except (ConnectionError, OperationalError) as e:
@@ -82,9 +82,12 @@ def _celery_resume_process(
82
82
 
83
83
  task_name = RESUME_TASK if workflow.target == Target.SYSTEM else RESUME_WORKFLOW
84
84
  trigger_task = get_celery_task(task_name)
85
+
86
+ user_inputs = user_inputs or [{}]
87
+ store_input_state(pstat.process_id, user_inputs, "user_input")
85
88
  try:
86
89
  _celery_set_process_status_resumed(process)
87
- result = trigger_task.delay(pstat.process_id, user_inputs, user)
90
+ result = trigger_task.delay(pstat.process_id, user)
88
91
  _block_when_testing(result)
89
92
 
90
93
  return pstat.process_id
@@ -0,0 +1,76 @@
1
+ # Copyright 2019-2025 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ from typing import Any, Literal
14
+ from uuid import UUID
15
+
16
+ import structlog
17
+ from sqlalchemy import select
18
+
19
+ from orchestrator.db import db
20
+ from orchestrator.db.models import InputStateTable
21
+
22
+ logger = structlog.get_logger(__name__)
23
+
24
+ InputType = Literal["initial_state", "user_input"]
25
+
26
+
27
+ def retrieve_input_state(process_id: UUID, input_type: InputType) -> InputStateTable:
28
+ """Get user input.
29
+
30
+ Args:
31
+ process_id: Process ID
32
+ input_type: The type of the input.
33
+
34
+ Returns:
35
+ User input table
36
+
37
+ """
38
+
39
+ res: InputStateTable | None = db.session.scalars(
40
+ select(InputStateTable)
41
+ .filter(InputStateTable.process_id == process_id)
42
+ .filter(InputStateTable.input_type == input_type)
43
+ .order_by(InputStateTable.input_time.asc())
44
+ ).first()
45
+
46
+ if res:
47
+ logger.debug("Retrieved input state", process_id=process_id, input_state=res, input_type=input_type)
48
+ return res
49
+ raise ValueError(f"No input state for pid: {process_id}")
50
+
51
+
52
+ def store_input_state(
53
+ process_id: UUID,
54
+ input_state: dict[str, Any] | list[dict[str, Any]],
55
+ input_type: InputType,
56
+ ) -> None:
57
+ """Store user input state.
58
+
59
+ Args:
60
+ process_id: Process ID
61
+ input_state: Dictionary of user input state
62
+ input_type: The type of the input.
63
+
64
+ Returns:
65
+ None
66
+
67
+ """
68
+ logger.debug("Store input state", process_id=process_id, input_state=input_state, input_type=input_type)
69
+ db.session.add(
70
+ InputStateTable(
71
+ process_id=process_id,
72
+ input_state=input_state,
73
+ input_type=input_type,
74
+ )
75
+ )
76
+ db.session.commit()