orchestrator-core 3.0.0rc1__py3-none-any.whl → 3.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- orchestrator/__init__.py +1 -1
- orchestrator/api/api_v1/endpoints/settings.py +2 -1
- orchestrator/cli/generator/generator/migration.py +5 -2
- orchestrator/db/models.py +17 -0
- orchestrator/distlock/managers/redis_distlock_manager.py +3 -2
- orchestrator/graphql/resolvers/settings.py +2 -1
- orchestrator/graphql/schemas/product.py +19 -2
- orchestrator/migrations/helpers.py +5 -5
- orchestrator/migrations/versions/schema/2025-02-12_bac6be6f2b4f_added_input_state_table.py +56 -0
- orchestrator/services/celery.py +6 -3
- orchestrator/services/input_state.py +76 -0
- orchestrator/services/processes.py +6 -5
- orchestrator/services/tasks.py +11 -6
- orchestrator/services/workflows.py +13 -0
- orchestrator/settings.py +26 -2
- orchestrator/utils/redis.py +6 -11
- orchestrator/utils/redis_client.py +35 -0
- {orchestrator_core-3.0.0rc1.dist-info → orchestrator_core-3.1.0rc1.dist-info}/METADATA +11 -9
- {orchestrator_core-3.0.0rc1.dist-info → orchestrator_core-3.1.0rc1.dist-info}/RECORD +21 -18
- {orchestrator_core-3.0.0rc1.dist-info → orchestrator_core-3.1.0rc1.dist-info}/WHEEL +1 -1
- {orchestrator_core-3.0.0rc1.dist-info → orchestrator_core-3.1.0rc1.dist-info/licenses}/LICENSE +0 -0
orchestrator/__init__.py
CHANGED
|
@@ -28,6 +28,7 @@ from orchestrator.services import processes, settings
|
|
|
28
28
|
from orchestrator.settings import ExecutorType, app_settings
|
|
29
29
|
from orchestrator.utils.json import json_dumps
|
|
30
30
|
from orchestrator.utils.redis import delete_keys_matching_pattern
|
|
31
|
+
from orchestrator.utils.redis_client import create_redis_asyncio_client
|
|
31
32
|
from orchestrator.websocket import WS_CHANNELS, broadcast_invalidate_cache, websocket_manager
|
|
32
33
|
|
|
33
34
|
router = APIRouter()
|
|
@@ -41,7 +42,7 @@ CACHE_FLUSH_OPTIONS: dict[str, str] = {
|
|
|
41
42
|
|
|
42
43
|
@router.delete("/cache/{name}")
|
|
43
44
|
async def clear_cache(name: str) -> int | None:
|
|
44
|
-
cache: AIORedis =
|
|
45
|
+
cache: AIORedis = create_redis_asyncio_client(app_settings.CACHE_URI)
|
|
45
46
|
if name not in CACHE_FLUSH_OPTIONS:
|
|
46
47
|
raise_status(HTTPStatus.BAD_REQUEST, "Invalid cache name")
|
|
47
48
|
|
|
@@ -31,13 +31,16 @@ from orchestrator.cli.generator.generator.helpers import (
|
|
|
31
31
|
sort_product_blocks_by_dependencies,
|
|
32
32
|
)
|
|
33
33
|
from orchestrator.cli.generator.generator.settings import product_generator_settings as settings
|
|
34
|
+
from orchestrator.settings import convert_database_uri
|
|
34
35
|
|
|
35
36
|
logger = structlog.getLogger(__name__)
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
def create_migration_file(message: str, head: str) -> Path | None:
|
|
39
|
-
if
|
|
40
|
-
environ.update({"DATABASE_URI": "
|
|
40
|
+
if environ.get("DATABASE_URI"):
|
|
41
|
+
environ.update({"DATABASE_URI": convert_database_uri(environ["DATABASE_URI"])})
|
|
42
|
+
else:
|
|
43
|
+
environ.update({"DATABASE_URI": "postgresql+psycopg://nwa:nwa@localhost/orchestrator-core"})
|
|
41
44
|
if not environ.get("PYTHONPATH"):
|
|
42
45
|
environ.update({"PYTHONPATH": "."})
|
|
43
46
|
logger.info(
|
orchestrator/db/models.py
CHANGED
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
|
|
14
14
|
from __future__ import annotations
|
|
15
15
|
|
|
16
|
+
import enum
|
|
16
17
|
from datetime import datetime, timezone
|
|
17
18
|
|
|
18
19
|
import sqlalchemy
|
|
@@ -23,6 +24,7 @@ from sqlalchemy import (
|
|
|
23
24
|
Boolean,
|
|
24
25
|
CheckConstraint,
|
|
25
26
|
Column,
|
|
27
|
+
Enum,
|
|
26
28
|
ForeignKey,
|
|
27
29
|
Index,
|
|
28
30
|
Integer,
|
|
@@ -81,6 +83,20 @@ class UtcTimestamp(TypeDecorator):
|
|
|
81
83
|
return value.astimezone(timezone.utc) if value else value
|
|
82
84
|
|
|
83
85
|
|
|
86
|
+
class InputStateTable(BaseModel):
|
|
87
|
+
__tablename__ = "input_states"
|
|
88
|
+
|
|
89
|
+
class InputType(enum.Enum):
|
|
90
|
+
user_input = "user_input"
|
|
91
|
+
initial_state = "initial_state"
|
|
92
|
+
|
|
93
|
+
input_state_id = mapped_column(UUIDType, primary_key=True, server_default=text("uuid_generate_v4()"), index=True)
|
|
94
|
+
process_id = mapped_column("pid", UUIDType, ForeignKey("processes.pid"), nullable=False)
|
|
95
|
+
input_state = mapped_column(pg.JSONB(), nullable=False) # type: ignore
|
|
96
|
+
input_time = mapped_column(UtcTimestamp, server_default=text("current_timestamp()"), nullable=False)
|
|
97
|
+
input_type = mapped_column(Enum(InputType), nullable=False)
|
|
98
|
+
|
|
99
|
+
|
|
84
100
|
class ProcessTable(BaseModel):
|
|
85
101
|
__tablename__ = "processes"
|
|
86
102
|
|
|
@@ -101,6 +117,7 @@ class ProcessTable(BaseModel):
|
|
|
101
117
|
steps = relationship(
|
|
102
118
|
"ProcessStepTable", cascade="delete", passive_deletes=True, order_by="asc(ProcessStepTable.executed_at)"
|
|
103
119
|
)
|
|
120
|
+
input_states = relationship("InputStateTable", cascade="delete", order_by="desc(InputStateTable.input_time)")
|
|
104
121
|
process_subscriptions = relationship("ProcessSubscriptionTable", back_populates="process", passive_deletes=True)
|
|
105
122
|
workflow = relationship("WorkflowTable", back_populates="processes")
|
|
106
123
|
|
|
@@ -20,6 +20,7 @@ from redis.lock import Lock as SyncLock
|
|
|
20
20
|
from structlog import get_logger
|
|
21
21
|
|
|
22
22
|
from orchestrator.settings import app_settings
|
|
23
|
+
from orchestrator.utils.redis_client import create_redis_asyncio_client, create_redis_client
|
|
23
24
|
|
|
24
25
|
logger = get_logger(__name__)
|
|
25
26
|
|
|
@@ -37,7 +38,7 @@ class RedisDistLockManager:
|
|
|
37
38
|
self.redis_address = redis_address
|
|
38
39
|
|
|
39
40
|
async def connect_redis(self) -> None:
|
|
40
|
-
self.redis_conn =
|
|
41
|
+
self.redis_conn = create_redis_asyncio_client(self.redis_address)
|
|
41
42
|
|
|
42
43
|
async def disconnect_redis(self) -> None:
|
|
43
44
|
if self.redis_conn:
|
|
@@ -78,7 +79,7 @@ class RedisDistLockManager:
|
|
|
78
79
|
def release_sync(self, lock: Lock) -> None:
|
|
79
80
|
redis_conn: Redis | None = None
|
|
80
81
|
try:
|
|
81
|
-
redis_conn =
|
|
82
|
+
redis_conn = create_redis_client(app_settings.CACHE_URI)
|
|
82
83
|
sync_lock: SyncLock = SyncLock(
|
|
83
84
|
redis=redis_conn,
|
|
84
85
|
name=lock.name, # type: ignore
|
|
@@ -21,6 +21,7 @@ from orchestrator.services.processes import SYSTEM_USER, ThreadPoolWorkerStatus,
|
|
|
21
21
|
from orchestrator.services.settings import get_engine_settings, get_engine_settings_for_update, post_update_to_slack
|
|
22
22
|
from orchestrator.settings import ExecutorType, app_settings
|
|
23
23
|
from orchestrator.utils.redis import delete_keys_matching_pattern
|
|
24
|
+
from orchestrator.utils.redis_client import create_redis_asyncio_client
|
|
24
25
|
|
|
25
26
|
logger = structlog.get_logger(__name__)
|
|
26
27
|
|
|
@@ -57,7 +58,7 @@ def resolve_settings(info: OrchestratorInfo) -> StatusType:
|
|
|
57
58
|
|
|
58
59
|
# Mutations
|
|
59
60
|
async def clear_cache(info: OrchestratorInfo, name: str) -> CacheClearSuccess | Error:
|
|
60
|
-
cache: AIORedis =
|
|
61
|
+
cache: AIORedis = create_redis_asyncio_client(app_settings.CACHE_URI)
|
|
61
62
|
if name not in CACHE_FLUSH_OPTIONS:
|
|
62
63
|
return Error(message="Invalid cache name")
|
|
63
64
|
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
from typing import TYPE_CHECKING, Annotated
|
|
1
|
+
from typing import TYPE_CHECKING, Annotated, Iterable
|
|
2
2
|
|
|
3
3
|
import strawberry
|
|
4
4
|
from strawberry import UNSET
|
|
5
5
|
from strawberry.federation.schema_directives import Key
|
|
6
6
|
|
|
7
7
|
from oauth2_lib.strawberry import authenticated_field
|
|
8
|
-
from orchestrator.db import ProductTable
|
|
8
|
+
from orchestrator.db import ProductBlockTable, ProductTable
|
|
9
9
|
from orchestrator.domain.base import ProductModel
|
|
10
10
|
from orchestrator.graphql.pagination import Connection
|
|
11
11
|
from orchestrator.graphql.schemas.fixed_input import FixedInput
|
|
@@ -51,6 +51,23 @@ class ProductType:
|
|
|
51
51
|
filter_by_with_related_subscriptions = (filter_by or []) + [GraphqlFilter(field="product", value=self.name)]
|
|
52
52
|
return await resolve_subscriptions(info, filter_by_with_related_subscriptions, sort_by, first, after)
|
|
53
53
|
|
|
54
|
+
@strawberry.field(description="Returns list of all nested productblock names") # type: ignore
|
|
55
|
+
async def all_pb_names(self) -> list[str]:
|
|
56
|
+
|
|
57
|
+
model = get_original_model(self, ProductTable)
|
|
58
|
+
|
|
59
|
+
def get_all_pb_names(product_blocks: list[ProductBlockTable]) -> Iterable[str]:
|
|
60
|
+
for product_block in product_blocks:
|
|
61
|
+
yield product_block.name
|
|
62
|
+
|
|
63
|
+
if product_block.depends_on:
|
|
64
|
+
yield from get_all_pb_names(product_block.depends_on)
|
|
65
|
+
|
|
66
|
+
names: list[str] = list(get_all_pb_names(model.product_blocks))
|
|
67
|
+
names.sort()
|
|
68
|
+
|
|
69
|
+
return names
|
|
70
|
+
|
|
54
71
|
@strawberry.field(description="Return product blocks") # type: ignore
|
|
55
72
|
async def product_blocks(self) -> list[Annotated["ProductBlock", strawberry.lazy(".product_block")]]:
|
|
56
73
|
from orchestrator.graphql.schemas.product_block import ProductBlock
|
|
@@ -880,10 +880,10 @@ def delete_product(conn: sa.engine.Connection, name: str) -> None:
|
|
|
880
880
|
RETURNING product_id
|
|
881
881
|
),
|
|
882
882
|
deleted_p_pb AS (
|
|
883
|
-
DELETE FROM product_product_blocks WHERE product_id
|
|
883
|
+
DELETE FROM product_product_blocks WHERE product_id = ANY(SELECT product_id FROM deleted_p)
|
|
884
884
|
),
|
|
885
885
|
deleted_pb_rt AS (
|
|
886
|
-
DELETE FROM products_workflows WHERE product_id
|
|
886
|
+
DELETE FROM products_workflows WHERE product_id = ANY(SELECT product_id FROM deleted_p)
|
|
887
887
|
)
|
|
888
888
|
SELECT * from deleted_p;
|
|
889
889
|
"""
|
|
@@ -911,10 +911,10 @@ def delete_product_block(conn: sa.engine.Connection, name: str) -> None:
|
|
|
911
911
|
RETURNING product_block_id
|
|
912
912
|
),
|
|
913
913
|
deleted_p_pb AS (
|
|
914
|
-
DELETE FROM product_product_blocks WHERE product_block_id
|
|
914
|
+
DELETE FROM product_product_blocks WHERE product_block_id =ANY(SELECT product_block_id FROM deleted_pb)
|
|
915
915
|
),
|
|
916
916
|
deleted_pb_rt AS (
|
|
917
|
-
DELETE FROM product_block_resource_types WHERE product_block_id
|
|
917
|
+
DELETE FROM product_block_resource_types WHERE product_block_id =ANY(SELECT product_block_id FROM deleted_pb)
|
|
918
918
|
)
|
|
919
919
|
SELECT * from deleted_pb;
|
|
920
920
|
"""
|
|
@@ -968,7 +968,7 @@ def delete_resource_type(conn: sa.engine.Connection, resource_type: str) -> None
|
|
|
968
968
|
RETURNING resource_type_id
|
|
969
969
|
),
|
|
970
970
|
deleted_pb_rt AS (
|
|
971
|
-
DELETE FROM product_block_resource_types WHERE resource_type_id
|
|
971
|
+
DELETE FROM product_block_resource_types WHERE resource_type_id =ANY(SELECT resource_type_id FROM deleted_pb)
|
|
972
972
|
)
|
|
973
973
|
SELECT * from deleted_pb;
|
|
974
974
|
"""
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""Added Input State Table.
|
|
2
|
+
|
|
3
|
+
Revision ID: bac6be6f2b4f
|
|
4
|
+
Revises: 4fjdn13f83ga
|
|
5
|
+
Create Date: 2025-02-12 14:39:53.664284
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
import sqlalchemy_utils
|
|
11
|
+
from alembic import op
|
|
12
|
+
from sqlalchemy.dialects import postgresql
|
|
13
|
+
|
|
14
|
+
from orchestrator import db
|
|
15
|
+
|
|
16
|
+
# revision identifiers, used by Alembic.
|
|
17
|
+
revision = "bac6be6f2b4f"
|
|
18
|
+
down_revision = "4fjdn13f83ga"
|
|
19
|
+
branch_labels = None
|
|
20
|
+
depends_on = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def upgrade() -> None:
|
|
24
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
25
|
+
op.create_table(
|
|
26
|
+
"input_states",
|
|
27
|
+
sa.Column(
|
|
28
|
+
"input_state_id",
|
|
29
|
+
sqlalchemy_utils.types.uuid.UUIDType(),
|
|
30
|
+
server_default=sa.text("uuid_generate_v4()"),
|
|
31
|
+
nullable=False,
|
|
32
|
+
),
|
|
33
|
+
sa.Column("pid", sqlalchemy_utils.types.uuid.UUIDType(), nullable=False),
|
|
34
|
+
sa.Column("input_state", postgresql.JSONB(astext_type=sa.Text()), nullable=False), # type: ignore
|
|
35
|
+
sa.Column(
|
|
36
|
+
"input_time",
|
|
37
|
+
db.models.UtcTimestamp(timezone=True),
|
|
38
|
+
server_default=sa.text("current_timestamp"),
|
|
39
|
+
nullable=False,
|
|
40
|
+
),
|
|
41
|
+
sa.Column("input_type", sa.Enum("user_input", "initial_state", name="inputtype"), nullable=False),
|
|
42
|
+
sa.ForeignKeyConstraint(
|
|
43
|
+
["pid"],
|
|
44
|
+
["processes.pid"],
|
|
45
|
+
),
|
|
46
|
+
sa.PrimaryKeyConstraint("input_state_id"),
|
|
47
|
+
)
|
|
48
|
+
op.create_index(op.f("ix_input_state_input_state_id"), "input_states", ["input_state_id"], unique=False)
|
|
49
|
+
# ### end Alembic commands ###
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def downgrade() -> None:
|
|
53
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
54
|
+
op.drop_index(op.f("ix_input_state_input_state_id"), table_name="input_states")
|
|
55
|
+
op.drop_table("input_statse")
|
|
56
|
+
# ### end Alembic commands ###
|
orchestrator/services/celery.py
CHANGED
|
@@ -22,6 +22,7 @@ from kombu.exceptions import ConnectionError, OperationalError
|
|
|
22
22
|
from orchestrator import app_settings
|
|
23
23
|
from orchestrator.api.error_handling import raise_status
|
|
24
24
|
from orchestrator.db import ProcessTable, db
|
|
25
|
+
from orchestrator.services.input_state import store_input_state
|
|
25
26
|
from orchestrator.services.processes import create_process, delete_process
|
|
26
27
|
from orchestrator.targets import Target
|
|
27
28
|
from orchestrator.workflows import get_workflow
|
|
@@ -53,9 +54,8 @@ def _celery_start_process(
|
|
|
53
54
|
task_name = NEW_TASK if workflow.target == Target.SYSTEM else NEW_WORKFLOW
|
|
54
55
|
trigger_task = get_celery_task(task_name)
|
|
55
56
|
pstat = create_process(workflow_key, user_inputs, user)
|
|
56
|
-
tasks = pstat.state.s
|
|
57
57
|
try:
|
|
58
|
-
result = trigger_task.delay(pstat.process_id, workflow_key,
|
|
58
|
+
result = trigger_task.delay(pstat.process_id, workflow_key, user)
|
|
59
59
|
_block_when_testing(result)
|
|
60
60
|
return pstat.process_id
|
|
61
61
|
except (ConnectionError, OperationalError) as e:
|
|
@@ -82,9 +82,12 @@ def _celery_resume_process(
|
|
|
82
82
|
|
|
83
83
|
task_name = RESUME_TASK if workflow.target == Target.SYSTEM else RESUME_WORKFLOW
|
|
84
84
|
trigger_task = get_celery_task(task_name)
|
|
85
|
+
|
|
86
|
+
user_inputs = user_inputs or [{}]
|
|
87
|
+
store_input_state(pstat.process_id, user_inputs, "user_input")
|
|
85
88
|
try:
|
|
86
89
|
_celery_set_process_status_resumed(process)
|
|
87
|
-
result = trigger_task.delay(pstat.process_id,
|
|
90
|
+
result = trigger_task.delay(pstat.process_id, user)
|
|
88
91
|
_block_when_testing(result)
|
|
89
92
|
|
|
90
93
|
return pstat.process_id
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
from typing import Any, Literal
|
|
14
|
+
from uuid import UUID
|
|
15
|
+
|
|
16
|
+
import structlog
|
|
17
|
+
from sqlalchemy import select
|
|
18
|
+
|
|
19
|
+
from orchestrator.db import db
|
|
20
|
+
from orchestrator.db.models import InputStateTable
|
|
21
|
+
|
|
22
|
+
logger = structlog.get_logger(__name__)
|
|
23
|
+
|
|
24
|
+
InputType = Literal["initial_state", "user_input"]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def retrieve_input_state(process_id: UUID, input_type: InputType) -> InputStateTable:
|
|
28
|
+
"""Get user input.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
process_id: Process ID
|
|
32
|
+
input_type: The type of the input.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
User input table
|
|
36
|
+
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
res: InputStateTable | None = db.session.scalars(
|
|
40
|
+
select(InputStateTable)
|
|
41
|
+
.filter(InputStateTable.process_id == process_id)
|
|
42
|
+
.filter(InputStateTable.input_type == input_type)
|
|
43
|
+
.order_by(InputStateTable.input_time.asc())
|
|
44
|
+
).first()
|
|
45
|
+
|
|
46
|
+
if res:
|
|
47
|
+
logger.debug("Retrieved input state", process_id=process_id, input_state=res, input_type=input_type)
|
|
48
|
+
return res
|
|
49
|
+
raise ValueError(f"No input state for pid: {process_id}")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def store_input_state(
|
|
53
|
+
process_id: UUID,
|
|
54
|
+
input_state: dict[str, Any] | list[dict[str, Any]],
|
|
55
|
+
input_type: InputType,
|
|
56
|
+
) -> None:
|
|
57
|
+
"""Store user input state.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
process_id: Process ID
|
|
61
|
+
input_state: Dictionary of user input state
|
|
62
|
+
input_type: The type of the input.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
None
|
|
66
|
+
|
|
67
|
+
"""
|
|
68
|
+
logger.debug("Store input state", process_id=process_id, input_state=input_state, input_type=input_type)
|
|
69
|
+
db.session.add(
|
|
70
|
+
InputStateTable(
|
|
71
|
+
process_id=process_id,
|
|
72
|
+
input_state=input_state,
|
|
73
|
+
input_type=input_type,
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
db.session.commit()
|
|
@@ -18,7 +18,7 @@ from typing import Any
|
|
|
18
18
|
from uuid import UUID, uuid4
|
|
19
19
|
|
|
20
20
|
import structlog
|
|
21
|
-
from deepmerge import Merger
|
|
21
|
+
from deepmerge.merger import Merger
|
|
22
22
|
from sqlalchemy import delete, select
|
|
23
23
|
from sqlalchemy.exc import SQLAlchemyError
|
|
24
24
|
from sqlalchemy.orm import joinedload
|
|
@@ -35,6 +35,7 @@ from orchestrator.db import (
|
|
|
35
35
|
)
|
|
36
36
|
from orchestrator.distlock import distlock_manager
|
|
37
37
|
from orchestrator.schemas.engine_settings import WorkerStatus
|
|
38
|
+
from orchestrator.services.input_state import store_input_state
|
|
38
39
|
from orchestrator.services.settings import get_engine_settings_for_update
|
|
39
40
|
from orchestrator.services.workflows import get_workflow_by_name
|
|
40
41
|
from orchestrator.settings import ExecutorType, app_settings
|
|
@@ -450,7 +451,7 @@ def create_process(
|
|
|
450
451
|
)
|
|
451
452
|
|
|
452
453
|
_db_create_process(pstat)
|
|
453
|
-
|
|
454
|
+
store_input_state(process_id, state | initial_state, "initial_state")
|
|
454
455
|
return pstat
|
|
455
456
|
|
|
456
457
|
|
|
@@ -514,7 +515,7 @@ def thread_resume_process(
|
|
|
514
515
|
|
|
515
516
|
if user_input:
|
|
516
517
|
pstat.update(state=pstat.state.map(lambda state: StateMerger.merge(state, user_input)))
|
|
517
|
-
|
|
518
|
+
store_input_state(pstat.process_id, user_input, "user_input")
|
|
518
519
|
# enforce an update to the process status to properly show the process
|
|
519
520
|
process.last_status = ProcessStatus.RUNNING
|
|
520
521
|
db.session.add(process)
|
|
@@ -529,8 +530,8 @@ def thread_validate_workflow(validation_workflow: str, json: list[State] | None)
|
|
|
529
530
|
|
|
530
531
|
|
|
531
532
|
THREADPOOL_EXECUTION_CONTEXT: dict[str, Callable] = {
|
|
532
|
-
"start":
|
|
533
|
-
"resume":
|
|
533
|
+
"start": thread_start_process,
|
|
534
|
+
"resume": thread_resume_process,
|
|
534
535
|
"validate": thread_validate_workflow,
|
|
535
536
|
}
|
|
536
537
|
|
orchestrator/services/tasks.py
CHANGED
|
@@ -23,6 +23,7 @@ from kombu.serialization import registry
|
|
|
23
23
|
|
|
24
24
|
from orchestrator.api.error_handling import raise_status
|
|
25
25
|
from orchestrator.schemas.engine_settings import WorkerStatus
|
|
26
|
+
from orchestrator.services.input_state import retrieve_input_state
|
|
26
27
|
from orchestrator.services.processes import (
|
|
27
28
|
_get_process,
|
|
28
29
|
_run_process_async,
|
|
@@ -118,24 +119,28 @@ def initialise_celery(celery: Celery) -> None: # noqa: C901
|
|
|
118
119
|
celery_task = partial(celery.task, log=local_logger, serializer="orchestrator-json")
|
|
119
120
|
|
|
120
121
|
@celery_task(name=NEW_TASK) # type: ignore
|
|
121
|
-
def new_task(process_id, workflow_key: str,
|
|
122
|
+
def new_task(process_id, workflow_key: str, user: str) -> UUID | None:
|
|
122
123
|
local_logger.info("Start task", process_id=process_id, workflow_key=workflow_key)
|
|
124
|
+
state = retrieve_input_state(process_id, "initial_state").input_state
|
|
123
125
|
return start_process(process_id, workflow_key, state=state, user=user)
|
|
124
126
|
|
|
125
127
|
@celery_task(name=NEW_WORKFLOW) # type: ignore
|
|
126
|
-
def new_workflow(process_id, workflow_key: str,
|
|
128
|
+
def new_workflow(process_id, workflow_key: str, user: str) -> UUID | None:
|
|
127
129
|
local_logger.info("Start workflow", process_id=process_id, workflow_key=workflow_key)
|
|
130
|
+
state = retrieve_input_state(process_id, "initial_state").input_state
|
|
128
131
|
return start_process(process_id, workflow_key, state=state, user=user)
|
|
129
132
|
|
|
130
133
|
@celery_task(name=RESUME_TASK) # type: ignore
|
|
131
|
-
def resume_task(process_id: UUID,
|
|
134
|
+
def resume_task(process_id: UUID, user: str) -> UUID | None:
|
|
132
135
|
local_logger.info("Resume task", process_id=process_id)
|
|
133
|
-
|
|
136
|
+
state = retrieve_input_state(process_id, "user_input").input_state
|
|
137
|
+
return resume_process(process_id, user_inputs=state, user=user)
|
|
134
138
|
|
|
135
139
|
@celery_task(name=RESUME_WORKFLOW) # type: ignore
|
|
136
|
-
def resume_workflow(process_id: UUID,
|
|
140
|
+
def resume_workflow(process_id: UUID, user: str) -> UUID | None:
|
|
137
141
|
local_logger.info("Resume workflow", process_id=process_id)
|
|
138
|
-
|
|
142
|
+
state = retrieve_input_state(process_id, "user_input").input_state
|
|
143
|
+
return resume_process(process_id, user_inputs=state, user=user)
|
|
139
144
|
|
|
140
145
|
|
|
141
146
|
class CeleryJobWorkerStatus(WorkerStatus):
|
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
1
14
|
from collections.abc import Iterable
|
|
2
15
|
|
|
3
16
|
from sqlalchemy import Select, select
|
orchestrator/settings.py
CHANGED
|
@@ -13,16 +13,21 @@
|
|
|
13
13
|
|
|
14
14
|
import secrets
|
|
15
15
|
import string
|
|
16
|
+
import warnings
|
|
16
17
|
from pathlib import Path
|
|
17
18
|
from typing import Literal
|
|
18
19
|
|
|
19
|
-
from pydantic import PostgresDsn, RedisDsn
|
|
20
|
+
from pydantic import Field, NonNegativeInt, PostgresDsn, RedisDsn
|
|
20
21
|
from pydantic_settings import BaseSettings
|
|
21
22
|
|
|
22
23
|
from oauth2_lib.settings import oauth2lib_settings
|
|
23
24
|
from pydantic_forms.types import strEnum
|
|
24
25
|
|
|
25
26
|
|
|
27
|
+
class OrchestratorDeprecationWarning(DeprecationWarning):
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
26
31
|
class ExecutorType(strEnum):
|
|
27
32
|
WORKER = "celery"
|
|
28
33
|
THREADPOOL = "threadpool"
|
|
@@ -49,7 +54,7 @@ class AppSettings(BaseSettings):
|
|
|
49
54
|
EXECUTOR: str = ExecutorType.THREADPOOL
|
|
50
55
|
WORKFLOWS_SWAGGER_HOST: str = "localhost"
|
|
51
56
|
WORKFLOWS_GUI_URI: str = "http://localhost:3000"
|
|
52
|
-
DATABASE_URI: PostgresDsn = "postgresql://nwa:nwa@localhost/orchestrator-core" # type: ignore
|
|
57
|
+
DATABASE_URI: PostgresDsn = "postgresql+psycopg://nwa:nwa@localhost/orchestrator-core" # type: ignore
|
|
53
58
|
MAX_WORKERS: int = 5
|
|
54
59
|
MAIL_SERVER: str = "localhost"
|
|
55
60
|
MAIL_PORT: int = 25
|
|
@@ -57,6 +62,9 @@ class AppSettings(BaseSettings):
|
|
|
57
62
|
CACHE_URI: RedisDsn = "redis://localhost:6379/0" # type: ignore
|
|
58
63
|
CACHE_DOMAIN_MODELS: bool = False
|
|
59
64
|
CACHE_HMAC_SECRET: str | None = None # HMAC signing key, used when pickling results in the cache
|
|
65
|
+
REDIS_RETRY_COUNT: NonNegativeInt = Field(
|
|
66
|
+
2, description="Number of retries for redis connection errors/timeouts, 0 to disable"
|
|
67
|
+
) # More info: https://redis-py.readthedocs.io/en/stable/retry.html
|
|
60
68
|
ENABLE_DISTLOCK_MANAGER: bool = True
|
|
61
69
|
DISTLOCK_BACKEND: str = "memory"
|
|
62
70
|
CC_NOC: int = 0
|
|
@@ -85,6 +93,22 @@ class AppSettings(BaseSettings):
|
|
|
85
93
|
VALIDATE_OUT_OF_SYNC_SUBSCRIPTIONS: bool = False
|
|
86
94
|
FILTER_BY_MODE: Literal["partial", "exact"] = "exact"
|
|
87
95
|
|
|
96
|
+
def __init__(self) -> None:
|
|
97
|
+
super(AppSettings, self).__init__()
|
|
98
|
+
self.DATABASE_URI = PostgresDsn(convert_database_uri(str(self.DATABASE_URI)))
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def convert_database_uri(db_uri: str) -> str:
|
|
102
|
+
if db_uri.startswith(("postgresql://", "postgresql+psycopg2://")):
|
|
103
|
+
db_uri = "postgresql+psycopg" + db_uri[db_uri.find("://") :]
|
|
104
|
+
warnings.filterwarnings("always", category=OrchestratorDeprecationWarning)
|
|
105
|
+
warnings.warn(
|
|
106
|
+
"DATABASE_URI converted to postgresql+psycopg:// format, please update your enviroment variable",
|
|
107
|
+
OrchestratorDeprecationWarning,
|
|
108
|
+
stacklevel=2,
|
|
109
|
+
)
|
|
110
|
+
return db_uri
|
|
111
|
+
|
|
88
112
|
|
|
89
113
|
app_settings = AppSettings()
|
|
90
114
|
|
orchestrator/utils/redis.py
CHANGED
|
@@ -17,22 +17,22 @@ from os import getenv
|
|
|
17
17
|
from typing import Any, Callable
|
|
18
18
|
from uuid import UUID
|
|
19
19
|
|
|
20
|
-
import redis.exceptions
|
|
21
20
|
from anyio import CancelScope, get_cancelled_exc_class
|
|
22
|
-
from redis import Redis
|
|
23
21
|
from redis.asyncio import Redis as AIORedis
|
|
24
22
|
from redis.asyncio.client import Pipeline, PubSub
|
|
25
|
-
from redis.asyncio.retry import Retry
|
|
26
|
-
from redis.backoff import EqualJitterBackoff
|
|
27
23
|
from structlog import get_logger
|
|
28
24
|
|
|
29
25
|
from orchestrator.services.subscriptions import _generate_etag
|
|
30
26
|
from orchestrator.settings import app_settings
|
|
31
27
|
from orchestrator.utils.json import PY_JSON_TYPES, json_dumps, json_loads
|
|
28
|
+
from orchestrator.utils.redis_client import (
|
|
29
|
+
create_redis_asyncio_client,
|
|
30
|
+
create_redis_client,
|
|
31
|
+
)
|
|
32
32
|
|
|
33
33
|
logger = get_logger(__name__)
|
|
34
34
|
|
|
35
|
-
cache =
|
|
35
|
+
cache = create_redis_client(app_settings.CACHE_URI)
|
|
36
36
|
|
|
37
37
|
ONE_WEEK = 3600 * 24 * 7
|
|
38
38
|
|
|
@@ -136,12 +136,7 @@ class RedisBroadcast:
|
|
|
136
136
|
client: AIORedis
|
|
137
137
|
|
|
138
138
|
def __init__(self, redis_url: str):
|
|
139
|
-
self.client =
|
|
140
|
-
redis_url,
|
|
141
|
-
retry_on_error=[redis.exceptions.ConnectionError],
|
|
142
|
-
retry_on_timeout=True,
|
|
143
|
-
retry=Retry(EqualJitterBackoff(base=0.05), 2),
|
|
144
|
-
)
|
|
139
|
+
self.client = create_redis_asyncio_client(redis_url)
|
|
145
140
|
self.redis_url = redis_url
|
|
146
141
|
|
|
147
142
|
@asynccontextmanager
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import redis.asyncio
|
|
2
|
+
import redis.client
|
|
3
|
+
import redis.exceptions
|
|
4
|
+
from pydantic import RedisDsn
|
|
5
|
+
from redis import Redis
|
|
6
|
+
from redis.asyncio import Redis as AIORedis
|
|
7
|
+
from redis.asyncio.retry import Retry as AIORetry
|
|
8
|
+
from redis.backoff import EqualJitterBackoff
|
|
9
|
+
from redis.retry import Retry
|
|
10
|
+
|
|
11
|
+
from orchestrator.settings import app_settings
|
|
12
|
+
|
|
13
|
+
REDIS_RETRY_ON_ERROR = [redis.exceptions.ConnectionError]
|
|
14
|
+
REDIS_RETRY_ON_TIMEOUT = True
|
|
15
|
+
REDIS_RETRY_BACKOFF = EqualJitterBackoff(base=0.05)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def create_redis_client(redis_url: str | RedisDsn) -> redis.client.Redis:
|
|
19
|
+
"""Create sync Redis client for the given Redis DSN with retry handling for connection errors and timeouts."""
|
|
20
|
+
return Redis.from_url(
|
|
21
|
+
str(redis_url),
|
|
22
|
+
retry_on_error=REDIS_RETRY_ON_ERROR, # type: ignore[arg-type]
|
|
23
|
+
retry_on_timeout=REDIS_RETRY_ON_TIMEOUT,
|
|
24
|
+
retry=Retry(REDIS_RETRY_BACKOFF, app_settings.REDIS_RETRY_COUNT),
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def create_redis_asyncio_client(redis_url: str | RedisDsn) -> redis.asyncio.client.Redis:
|
|
29
|
+
"""Create async Redis client for the given Redis DSN with retry handling for connection errors and timeouts."""
|
|
30
|
+
return AIORedis.from_url(
|
|
31
|
+
str(redis_url),
|
|
32
|
+
retry_on_error=REDIS_RETRY_ON_ERROR, # type: ignore[arg-type]
|
|
33
|
+
retry_on_timeout=REDIS_RETRY_ON_TIMEOUT,
|
|
34
|
+
retry=AIORetry(REDIS_RETRY_BACKOFF, app_settings.REDIS_RETRY_COUNT),
|
|
35
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: orchestrator-core
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.1.0rc1
|
|
4
4
|
Summary: This is the orchestrator workflow engine.
|
|
5
5
|
Requires-Python: >=3.11,<3.14
|
|
6
6
|
Classifier: Intended Audience :: Information Technology
|
|
@@ -27,26 +27,27 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
27
27
|
Classifier: Programming Language :: Python :: 3.11
|
|
28
28
|
Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
|
|
29
29
|
Classifier: Topic :: Internet :: WWW/HTTP
|
|
30
|
+
License-File: LICENSE
|
|
30
31
|
Requires-Dist: alembic==1.14.1
|
|
31
32
|
Requires-Dist: anyio>=3.7.0
|
|
32
33
|
Requires-Dist: click==8.*
|
|
33
34
|
Requires-Dist: deprecated
|
|
34
|
-
Requires-Dist: deepmerge==
|
|
35
|
+
Requires-Dist: deepmerge==2.0
|
|
35
36
|
Requires-Dist: fastapi~=0.115.2
|
|
36
37
|
Requires-Dist: fastapi-etag==0.4.0
|
|
37
38
|
Requires-Dist: more-itertools~=10.6.0
|
|
38
39
|
Requires-Dist: itsdangerous
|
|
39
40
|
Requires-Dist: Jinja2==3.1.5
|
|
40
41
|
Requires-Dist: orjson==3.10.15
|
|
41
|
-
Requires-Dist:
|
|
42
|
+
Requires-Dist: psycopg[binary]==3.2.5
|
|
42
43
|
Requires-Dist: pydantic[email]~=2.8.2
|
|
43
|
-
Requires-Dist: pydantic-settings~=2.
|
|
44
|
+
Requires-Dist: pydantic-settings~=2.8.0
|
|
44
45
|
Requires-Dist: python-dateutil==2.8.2
|
|
45
46
|
Requires-Dist: python-rapidjson>=1.18,<1.21
|
|
46
|
-
Requires-Dist: pytz==
|
|
47
|
+
Requires-Dist: pytz==2025.1
|
|
47
48
|
Requires-Dist: redis==5.0.3
|
|
48
49
|
Requires-Dist: schedule==1.1.0
|
|
49
|
-
Requires-Dist: sentry-sdk[fastapi]~=2.
|
|
50
|
+
Requires-Dist: sentry-sdk[fastapi]~=2.22.0
|
|
50
51
|
Requires-Dist: SQLAlchemy==2.0.38
|
|
51
52
|
Requires-Dist: SQLAlchemy-Utils==0.41.2
|
|
52
53
|
Requires-Dist: structlog
|
|
@@ -56,7 +57,7 @@ Requires-Dist: nwa-stdlib~=1.9.0
|
|
|
56
57
|
Requires-Dist: oauth2-lib~=2.4.0
|
|
57
58
|
Requires-Dist: tabulate==0.9.0
|
|
58
59
|
Requires-Dist: strawberry-graphql>=0.246.2
|
|
59
|
-
Requires-Dist: pydantic-forms~=1.
|
|
60
|
+
Requires-Dist: pydantic-forms~=1.3.0
|
|
60
61
|
Requires-Dist: celery~=5.4.0 ; extra == "celery"
|
|
61
62
|
Requires-Dist: toml ; extra == "dev"
|
|
62
63
|
Requires-Dist: bumpversion ; extra == "dev"
|
|
@@ -81,8 +82,9 @@ Requires-Dist: dirty-equals ; extra == "test"
|
|
|
81
82
|
Requires-Dist: jsonref ; extra == "test"
|
|
82
83
|
Requires-Dist: mypy==1.9 ; extra == "test"
|
|
83
84
|
Requires-Dist: pyinstrument ; extra == "test"
|
|
84
|
-
Requires-Dist: pytest==8.3.
|
|
85
|
+
Requires-Dist: pytest==8.3.4 ; extra == "test"
|
|
85
86
|
Requires-Dist: pytest-asyncio==0.21.2 ; extra == "test"
|
|
87
|
+
Requires-Dist: pytest-codspeed ; extra == "test"
|
|
86
88
|
Requires-Dist: pytest-cov ; extra == "test"
|
|
87
89
|
Requires-Dist: pytest-httpx ; extra == "test"
|
|
88
90
|
Requires-Dist: pytest-xdist ; extra == "test"
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
orchestrator/__init__.py,sha256=
|
|
1
|
+
orchestrator/__init__.py,sha256=44uEQENUfGzHI2rM8eaZMGP3Zt5u-wn0O2SF5kiHMdk,1058
|
|
2
2
|
orchestrator/app.py,sha256=8GMzoHjdR0bkgRBCejiG8nIUjeo43f12I3WNNZ89pKE,11659
|
|
3
3
|
orchestrator/exception_handlers.py,sha256=UsW3dw8q0QQlNLcV359bIotah8DYjMsj2Ts1LfX4ClY,1268
|
|
4
4
|
orchestrator/log_config.py,sha256=1tPRX5q65e57a6a_zEii_PFK8SzWT0mnA5w2sKg4hh8,1853
|
|
5
5
|
orchestrator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
orchestrator/security.py,sha256=_W_wFkjmlwVwwHRsql69iMoqRvDCiaA63i5rvRHSrZ0,2414
|
|
7
|
-
orchestrator/settings.py,sha256=
|
|
7
|
+
orchestrator/settings.py,sha256=lrNKPtJMxZtbEZcUZ1MDGEpIDCJv_swWVoVJvwcooCY,4614
|
|
8
8
|
orchestrator/targets.py,sha256=q_IMCdVUUYWcyKHqyls38fJPveJDBNfSzMKj_U2hLsk,768
|
|
9
9
|
orchestrator/types.py,sha256=4vDeL5teRnugXoet3O2dMv8WwTsEyimrIfzagx9jQRo,15451
|
|
10
10
|
orchestrator/version.py,sha256=b58e08lxs47wUNXv0jXFO_ykpksmytuzEXD4La4W-NQ,1366
|
|
@@ -21,7 +21,7 @@ orchestrator/api/api_v1/endpoints/processes.py,sha256=VPNqzogjgK9Y-70b9r-tqPSJD-
|
|
|
21
21
|
orchestrator/api/api_v1/endpoints/product_blocks.py,sha256=kZ6ywIOsS_S2qGq7RvZ4KzjvaS1LmwbGWR37AKRvWOw,2146
|
|
22
22
|
orchestrator/api/api_v1/endpoints/products.py,sha256=BfFtwu9dZXEQbtKxYj9icc73GKGvAGMR5ytyf41nQlQ,3081
|
|
23
23
|
orchestrator/api/api_v1/endpoints/resource_types.py,sha256=gGyuaDyOD0TAVoeFGaGmjDGnQ8eQQArOxKrrk4MaDzA,2145
|
|
24
|
-
orchestrator/api/api_v1/endpoints/settings.py,sha256=
|
|
24
|
+
orchestrator/api/api_v1/endpoints/settings.py,sha256=OVz0VXPAPX2BMiIjixmvPwXowRxbHb9wZ3OrbguIqPc,6218
|
|
25
25
|
orchestrator/api/api_v1/endpoints/subscription_customer_descriptions.py,sha256=Elu4DVJoNtUFq_b3pG1Ws8StrUIo_jTViff2TJqe6ZU,3398
|
|
26
26
|
orchestrator/api/api_v1/endpoints/subscriptions.py,sha256=s0nzWY1n8J1Ep-f6LuhRj_LX3shfCq7PsMmHf0_Rzsw,8716
|
|
27
27
|
orchestrator/api/api_v1/endpoints/translations.py,sha256=dIWh_fCnZZUxJoGiNeJ49DK_xpf75IpR_0EIMSvzIvY,963
|
|
@@ -58,7 +58,7 @@ orchestrator/cli/generator/custom_templates/additional_terminate_steps.j2,sha256
|
|
|
58
58
|
orchestrator/cli/generator/generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
59
59
|
orchestrator/cli/generator/generator/enums.py,sha256=ztGxHzpq7l4HDSZswH8UDJlf2374tj_-Rzf8t-sub1s,2007
|
|
60
60
|
orchestrator/cli/generator/generator/helpers.py,sha256=IoHXacEebef7MhUseTVkj05fRryyGMDH94Ai0nGq-nw,9838
|
|
61
|
-
orchestrator/cli/generator/generator/migration.py,sha256=
|
|
61
|
+
orchestrator/cli/generator/generator/migration.py,sha256=zWSZk42AayXj65mPIYKczRKVlPSXTsTM-pBf7lis2F8,7202
|
|
62
62
|
orchestrator/cli/generator/generator/product.py,sha256=W930c-9C8k0kW7I8_SC4mWf045neYcfFpkck5SwHeNQ,2079
|
|
63
63
|
orchestrator/cli/generator/generator/product_block.py,sha256=h552YZTuehtaux6PKw5GKWAmBQ6cStOSY4TbaJ1Kcq8,4802
|
|
64
64
|
orchestrator/cli/generator/generator/settings.py,sha256=_IhRnQ7bpGjqYtFo-OiLky25IKCibdghC6pkHmPIPoI,1379
|
|
@@ -106,7 +106,7 @@ orchestrator/db/database.py,sha256=MU_w_e95ho2dVb2JDnt_KFYholx___XDkiQXbc8wCkI,1
|
|
|
106
106
|
orchestrator/db/helpers.py,sha256=L8kEdnSSNGnUpZhdeGx2arCodakWN8vSpKdfjoLuHdY,831
|
|
107
107
|
orchestrator/db/listeners.py,sha256=UBPYcH0FE3a7aZQu_D0O_JMXpXIRYXC0gjSAvlv5GZo,1142
|
|
108
108
|
orchestrator/db/loaders.py,sha256=escBOUNf5bHmjIuNH37fGgNSeZLzMiJvQgQFy4r4MYY,6244
|
|
109
|
-
orchestrator/db/models.py,sha256=
|
|
109
|
+
orchestrator/db/models.py,sha256=LOdqw2G45TGV2Bkukj2GgXtc4nRYk1rARE0wDk_QZUE,26850
|
|
110
110
|
orchestrator/db/filters/__init__.py,sha256=RUj6P0XxEBhYj0SN5wH5-Vf_Wt_ilZR_n9DSar5m9oM,371
|
|
111
111
|
orchestrator/db/filters/filters.py,sha256=55RtpQwM2rhrk4A6CCSeSXoo-BT9GnQoNTryA8CtLEg,5020
|
|
112
112
|
orchestrator/db/filters/process.py,sha256=xvGhyfo_MZ1xhLvFC6yULjcT4mJk0fKc1glJIYgsWLE,4018
|
|
@@ -137,7 +137,7 @@ orchestrator/distlock/__init__.py,sha256=0uCW-4efWGbU4RXSb7t3U1yA2T8z77OGgb9SDNe
|
|
|
137
137
|
orchestrator/distlock/distlock_manager.py,sha256=VVfBpOnk574JncfHwS6sPavKwPxCgAneDqZNT8fVWNw,2508
|
|
138
138
|
orchestrator/distlock/managers/__init__.py,sha256=ImIkNsrXcyE7-NgRWqEhUXUuUzda9KwcDkhebipfSdI,571
|
|
139
139
|
orchestrator/distlock/managers/memory_distlock_manager.py,sha256=HWQafcVKBF-Cka_wukZZ1GM69AWPVOpJPje3quIebQ8,3114
|
|
140
|
-
orchestrator/distlock/managers/redis_distlock_manager.py,sha256=
|
|
140
|
+
orchestrator/distlock/managers/redis_distlock_manager.py,sha256=DXtMhC8qtxiFO6xU9qYXHZQnCLjlmGBpeyfLA0vbRP0,3369
|
|
141
141
|
orchestrator/domain/__init__.py,sha256=Rnt9XXHasAgieQiLT0JhUFRrysa9EIubvzcd5kk3Gvc,894
|
|
142
142
|
orchestrator/domain/base.py,sha256=cMBJps91vtJJPXQUk0Ejps-K6DaEdkK6GTGdvtPpDnA,61953
|
|
143
143
|
orchestrator/domain/customer_description.py,sha256=v7o6TTN4oc6bWHZU-jCT-fUYvkeYahbpXOwlKXofuI8,3360
|
|
@@ -168,7 +168,7 @@ orchestrator/graphql/resolvers/process.py,sha256=8UQaw3Mqjwubq_V7iW4O2KsFpqlPKLy
|
|
|
168
168
|
orchestrator/graphql/resolvers/product.py,sha256=PQCurvc2NVk5sspdzHUrpcDgEmn7i0wA-jvqpvCMkqk,2742
|
|
169
169
|
orchestrator/graphql/resolvers/product_block.py,sha256=BAYW66KT_1mozNidfBxBI7l3_e8-ozC0g2qv3NLniqE,2909
|
|
170
170
|
orchestrator/graphql/resolvers/resource_type.py,sha256=NRDKPFqwktOvVLLm2QCLxVPwzXIYEMJKUN3DS_kS49o,2926
|
|
171
|
-
orchestrator/graphql/resolvers/settings.py,sha256=
|
|
171
|
+
orchestrator/graphql/resolvers/settings.py,sha256=xVYqxo-EWQ24F4hUHm9OZeN9vsqQXJzIJ1_HF4Ci9Cs,3777
|
|
172
172
|
orchestrator/graphql/resolvers/subscription.py,sha256=57niFv-JCro_wm0peJ5Ne04F2WIPuJ-Lx2h8yd9qubA,6541
|
|
173
173
|
orchestrator/graphql/resolvers/version.py,sha256=qgwe1msPOexeg3RHCscJ8s45vNfMhYh9ZKyCZ3MNw30,809
|
|
174
174
|
orchestrator/graphql/resolvers/workflow.py,sha256=YUwPklwYesgmRS4d0eIQdgVmkyhgGbkQZ9uC1Oe8EyA,2776
|
|
@@ -179,7 +179,7 @@ orchestrator/graphql/schemas/errors.py,sha256=VRl-Zd1FHMnscyozhfxzqeEUZ0ERAWum_Y
|
|
|
179
179
|
orchestrator/graphql/schemas/fixed_input.py,sha256=1yqYHADQRgHz8OIP7ObYsPFS-gmzfkCvEO0a-KKf7zI,513
|
|
180
180
|
orchestrator/graphql/schemas/helpers.py,sha256=Kpj4kIbmoKKN35bdgUSwQvGUIbeg7VJAVMEq65YS_ik,346
|
|
181
181
|
orchestrator/graphql/schemas/process.py,sha256=DGsOJwn2b6sz1q2CZ1aUPiazigqFsMxCkyN0Jv5LKwM,3775
|
|
182
|
-
orchestrator/graphql/schemas/product.py,sha256=
|
|
182
|
+
orchestrator/graphql/schemas/product.py,sha256=Bj9hBYLNuH3O9r3ybU0i0PFEVlMU8F_nOFyasFhaNaw,4199
|
|
183
183
|
orchestrator/graphql/schemas/product_block.py,sha256=Qk9cbA6vm7ZPrhdgPHatKRuy6TytBmxSr97McEOxAu8,2860
|
|
184
184
|
orchestrator/graphql/schemas/resource_type.py,sha256=s5d_FwQXL2-Sc-IDUxTJun5qFQ4zOP4-XcHF9ql-t1g,898
|
|
185
185
|
orchestrator/graphql/schemas/settings.py,sha256=drhm5VcLmUbiYAk6WUSJcyJqjNM96E6GvpxVdPAobnA,999
|
|
@@ -199,7 +199,7 @@ orchestrator/graphql/utils/to_graphql_result_page.py,sha256=8ObkJP8reVf-TQOQVPKv
|
|
|
199
199
|
orchestrator/migrations/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
|
|
200
200
|
orchestrator/migrations/alembic.ini,sha256=kMoADqhGeubU8xanILNaqm4oixLy9m4ngYtdGpZcc7I,873
|
|
201
201
|
orchestrator/migrations/env.py,sha256=AwlgBPYbV2hr5rHNwlOPJ5rs-vRyfmzcWyxae0btpZ4,3382
|
|
202
|
-
orchestrator/migrations/helpers.py,sha256=
|
|
202
|
+
orchestrator/migrations/helpers.py,sha256=U-b64Gp6VBq5sTDN0fqrG8mbXcpncCFVgYObW9y7ffs,43778
|
|
203
203
|
orchestrator/migrations/script.py.mako,sha256=607Zrgp-Z-m9WGLt4wewN1QDOmHeifxcePUdADkSZyM,510
|
|
204
204
|
orchestrator/migrations/templates/alembic.ini.j2,sha256=jA-QykVparwWSNt5XDP0Zk7epLOhK7D87Af-i2shJV4,905
|
|
205
205
|
orchestrator/migrations/templates/env.py.j2,sha256=RfLAQItZ56Jlzwi6LJfBo92m1-th_bdfkFKD1mwTZIE,2821
|
|
@@ -223,6 +223,7 @@ orchestrator/migrations/versions/schema/2023-12-06_048219045729_add_workflow_id_
|
|
|
223
223
|
orchestrator/migrations/versions/schema/2024-09-27_460ec6748e37_add_uuid_search_workaround.py,sha256=GzHBzOwOc6FaO1kYwoSNIhb8sKstXo8Cfxdqy3Rmeg4,972
|
|
224
224
|
orchestrator/migrations/versions/schema/2024-09-27_460ec6748e37_add_uuid_search_workaround.sql,sha256=mhPnqjG5H3W8_BD7w5tYzXUQSxFOM7Rahn_MudEPTIE,5383
|
|
225
225
|
orchestrator/migrations/versions/schema/2025-01-08_4c5859620539_add_version_column_to_subscription.py,sha256=xAhe74U0ZiVRo9Z8Uq7491RBbATMMUnYpTBjbG-BYL0,1690
|
|
226
|
+
orchestrator/migrations/versions/schema/2025-02-12_bac6be6f2b4f_added_input_state_table.py,sha256=BPtx8blYEotaJv9Gnzq9Pf4ihOyzx4tfk9qwr81i8MU,1769
|
|
226
227
|
orchestrator/migrations/versions/schema/2025-10-19_4fjdn13f83ga_add_validate_product_type_task.py,sha256=O0GfCISIDnyohGf3Ot_2HKedGRbMqLVox6t7Wd3PMvo,894
|
|
227
228
|
orchestrator/schedules/__init__.py,sha256=JnnaglfK1qYUBKI6Dd9taV-tCZIPlAdAkHtnkJDMXxY,1066
|
|
228
229
|
orchestrator/schedules/resume_workflows.py,sha256=kSotzTAXjX7p9fpSYiGOpuxuTQfv54eRFAe0YSG0DHc,832
|
|
@@ -243,18 +244,19 @@ orchestrator/schemas/subscription.py,sha256=1SKrhw25-HtTp39lM05R9gynr9GprMxQol4G
|
|
|
243
244
|
orchestrator/schemas/subscription_descriptions.py,sha256=Ft_jw1U0bf9Z0U8O4OWfLlcl0mXCVT_qYVagBP3GbIQ,1262
|
|
244
245
|
orchestrator/schemas/workflow.py,sha256=w-CaRPp9AAddhnd8o_0jPaey1Vnnh-s-A5s5kWlR2pI,1977
|
|
245
246
|
orchestrator/services/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
|
|
246
|
-
orchestrator/services/celery.py,sha256=
|
|
247
|
+
orchestrator/services/celery.py,sha256=DHruqocnORNZUca9WDIti9GXYk9Q38BFyeJy7-N7l3c,4760
|
|
247
248
|
orchestrator/services/fixed_inputs.py,sha256=kyz7s2HLzyDulvcq-ZqefTw1om86COvyvTjz0_5CmgI,876
|
|
249
|
+
orchestrator/services/input_state.py,sha256=HF7wl9fWdaAW8pdCCqbuYoKyNj8dY0g8Ff8vXis8z5A,2211
|
|
248
250
|
orchestrator/services/process_broadcast_thread.py,sha256=D44YbjF8mRqGuznkRUV4SoRn1J0lfy_x1H508GnSVlU,4649
|
|
249
|
-
orchestrator/services/processes.py,sha256=
|
|
251
|
+
orchestrator/services/processes.py,sha256=pucntLu1f-2ZTgSQG7l3qDX5zQGFhat4-iaq7ecglGo,27618
|
|
250
252
|
orchestrator/services/products.py,sha256=w6b6sSA3MstmbM_YN8xWEvkb_YnuCQFph48wYU3_Lx4,1935
|
|
251
253
|
orchestrator/services/resource_types.py,sha256=_QBy_JOW_X3aSTqH0CuLrq4zBJL0p7Q-UDJUcuK2_qc,884
|
|
252
254
|
orchestrator/services/settings.py,sha256=u-834F4KWloXS8zi7R9mp-D3cjl-rbVjKJRU35IqhXo,2723
|
|
253
255
|
orchestrator/services/subscription_relations.py,sha256=9C126TUfFvyBe7y4x007kH_dvxJ9pZ1zSnaWeH6HC5k,12261
|
|
254
256
|
orchestrator/services/subscriptions.py,sha256=yEeZbgXqrfAgo-l0Tz_NtNJHG5-CkXa8-l76gZ_aw6A,26209
|
|
255
|
-
orchestrator/services/tasks.py,sha256=
|
|
257
|
+
orchestrator/services/tasks.py,sha256=LYThlemDTI5fK-whc7qciJuukTGyprDR-UbQHweKsX8,6567
|
|
256
258
|
orchestrator/services/translations.py,sha256=GyP8soUFGej8AS8uulBsk10CCK6Kwfjv9AHMFm3ElQY,1713
|
|
257
|
-
orchestrator/services/workflows.py,sha256=
|
|
259
|
+
orchestrator/services/workflows.py,sha256=oH7klit4kv2NGo-BACWA0ZtajVMSJAxG5m-kM6TXIMI,3742
|
|
258
260
|
orchestrator/utils/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
|
|
259
261
|
orchestrator/utils/crypt.py,sha256=18eNamYWMllPkxyRtWIde3FDr3rSF74R5SAL6WsCj9Y,5584
|
|
260
262
|
orchestrator/utils/datetime.py,sha256=a1WQ_yvu7MA0TiaRpC5avwbOSFdrj4eMrV4a7I2sD5Q,1477
|
|
@@ -268,7 +270,8 @@ orchestrator/utils/get_subscription_dict.py,sha256=fkgDM54hn5YGUP9_2MOcJApJK1Z6c
|
|
|
268
270
|
orchestrator/utils/get_updated_properties.py,sha256=egVZ0R5LNJ4e51Z8SXlU8cmb4tXxG-xb1d7OKwh-7xI,1322
|
|
269
271
|
orchestrator/utils/helpers.py,sha256=NjUF3IvWdnLulliP8-JQvGGGpHrh0vs0Vm092ynw-ss,3212
|
|
270
272
|
orchestrator/utils/json.py,sha256=7386sdqkrKYyy4sbn5NscwctH_v1hLyw5172P__rU3g,8341
|
|
271
|
-
orchestrator/utils/redis.py,sha256=
|
|
273
|
+
orchestrator/utils/redis.py,sha256=E2vrMO3uQHb4nJENgA3WnpB0iw2C615YMuaWT-4gqoI,7027
|
|
274
|
+
orchestrator/utils/redis_client.py,sha256=9rhsvedjK_CyClAjUicQyge0mVIViATqKFGZyjBY3XA,1384
|
|
272
275
|
orchestrator/utils/search_query.py,sha256=ji5LHtrzohGz6b1IG41cnPdpWXzLEzz4SGWgHly_yfU,16205
|
|
273
276
|
orchestrator/utils/state.py,sha256=DLHBnpEjhHQNeBGYV6H6geqZclToeMuWwqU26TVy220,13185
|
|
274
277
|
orchestrator/utils/strings.py,sha256=N0gWjmQaMjE9_99VtRvRaU8IBLTKMgBKSXcTZ9TpWAg,1077
|
|
@@ -288,7 +291,7 @@ orchestrator/workflows/tasks/resume_workflows.py,sha256=R0I3jxGToiqDr5mF3YjDd6dN
|
|
|
288
291
|
orchestrator/workflows/tasks/validate_product_type.py,sha256=5FwhRQyMNgtys5DM846EIIY0uXKvnSYy3Orf7lOg0DA,3176
|
|
289
292
|
orchestrator/workflows/tasks/validate_products.py,sha256=5uXX7MXMDDP13cXRvfLDNvvCp4nG7zLQBm_IYdf8BSs,8513
|
|
290
293
|
orchestrator/workflows/translations/en-GB.json,sha256=ST53HxkphFLTMjFHonykDBOZ7-P_KxksktZU3GbxLt0,846
|
|
291
|
-
orchestrator_core-3.
|
|
292
|
-
orchestrator_core-3.
|
|
293
|
-
orchestrator_core-3.
|
|
294
|
-
orchestrator_core-3.
|
|
294
|
+
orchestrator_core-3.1.0rc1.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
|
|
295
|
+
orchestrator_core-3.1.0rc1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
|
|
296
|
+
orchestrator_core-3.1.0rc1.dist-info/METADATA,sha256=HWbNmbgLR-tuB1ifP702BFVr7x4La_ymTtl678oxtwU,4993
|
|
297
|
+
orchestrator_core-3.1.0rc1.dist-info/RECORD,,
|
{orchestrator_core-3.0.0rc1.dist-info → orchestrator_core-3.1.0rc1.dist-info/licenses}/LICENSE
RENAMED
|
File without changes
|