orchestrator-core 2.10.0rc2__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- orchestrator/__init__.py +1 -1
- orchestrator/api/api_v1/endpoints/processes.py +1 -1
- orchestrator/api/api_v1/endpoints/settings.py +2 -1
- orchestrator/cli/generator/templates/create_product.j2 +2 -1
- orchestrator/cli/generator/templates/modify_product.j2 +2 -1
- orchestrator/cli/generator/templates/shared_workflows.j2 +2 -1
- orchestrator/cli/generator/templates/terminate_product.j2 +1 -1
- orchestrator/cli/generator/templates/test_create_workflow.j2 +0 -1
- orchestrator/cli/generator/templates/test_modify_workflow.j2 +1 -2
- orchestrator/cli/generator/templates/test_terminate_workflow.j2 +1 -1
- orchestrator/cli/generator/templates/validate_product.j2 +3 -1
- orchestrator/cli/helpers/print_helpers.py +1 -1
- orchestrator/config/assignee.py +1 -1
- orchestrator/db/models.py +17 -0
- orchestrator/devtools/populator.py +1 -1
- orchestrator/devtools/scripts/migrate_20.py +11 -106
- orchestrator/devtools/scripts/migrate_30.py +61 -0
- orchestrator/devtools/scripts/shared.py +108 -0
- orchestrator/distlock/managers/redis_distlock_manager.py +3 -2
- orchestrator/domain/base.py +1 -2
- orchestrator/domain/lifecycle.py +2 -1
- orchestrator/graphql/resolvers/settings.py +2 -1
- orchestrator/graphql/schemas/product.py +19 -2
- orchestrator/migrations/helpers.py +1 -1
- orchestrator/migrations/versions/schema/2025-02-12_bac6be6f2b4f_added_input_state_table.py +56 -0
- orchestrator/schemas/engine_settings.py +1 -1
- orchestrator/schemas/subscription.py +2 -1
- orchestrator/services/celery.py +7 -4
- orchestrator/services/input_state.py +76 -0
- orchestrator/services/processes.py +8 -6
- orchestrator/services/products.py +1 -1
- orchestrator/services/subscriptions.py +2 -1
- orchestrator/services/tasks.py +13 -7
- orchestrator/services/workflows.py +13 -0
- orchestrator/settings.py +5 -2
- orchestrator/targets.py +1 -1
- orchestrator/types.py +8 -43
- orchestrator/utils/errors.py +2 -1
- orchestrator/utils/redis.py +6 -11
- orchestrator/utils/redis_client.py +35 -0
- orchestrator/utils/state.py +2 -1
- orchestrator/workflow.py +3 -1
- orchestrator/workflows/modify_note.py +1 -2
- orchestrator/workflows/steps.py +2 -1
- orchestrator/workflows/tasks/cleanup_tasks_log.py +1 -1
- orchestrator/workflows/tasks/resume_workflows.py +1 -1
- orchestrator/workflows/tasks/validate_product_type.py +1 -1
- orchestrator/workflows/tasks/validate_products.py +1 -1
- orchestrator/workflows/utils.py +2 -2
- {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info}/METADATA +10 -8
- {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info}/RECORD +53 -48
- {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info}/WHEEL +1 -1
- {orchestrator_core-2.10.0rc2.dist-info → orchestrator_core-3.0.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""Added Input State Table.
|
|
2
|
+
|
|
3
|
+
Revision ID: bac6be6f2b4f
|
|
4
|
+
Revises: 4fjdn13f83ga
|
|
5
|
+
Create Date: 2025-02-12 14:39:53.664284
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
import sqlalchemy_utils
|
|
11
|
+
from alembic import op
|
|
12
|
+
from sqlalchemy.dialects import postgresql
|
|
13
|
+
|
|
14
|
+
from orchestrator import db
|
|
15
|
+
|
|
16
|
+
# revision identifiers, used by Alembic.
|
|
17
|
+
revision = "bac6be6f2b4f"
|
|
18
|
+
down_revision = "4fjdn13f83ga"
|
|
19
|
+
branch_labels = None
|
|
20
|
+
depends_on = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def upgrade() -> None:
|
|
24
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
25
|
+
op.create_table(
|
|
26
|
+
"input_states",
|
|
27
|
+
sa.Column(
|
|
28
|
+
"input_state_id",
|
|
29
|
+
sqlalchemy_utils.types.uuid.UUIDType(),
|
|
30
|
+
server_default=sa.text("uuid_generate_v4()"),
|
|
31
|
+
nullable=False,
|
|
32
|
+
),
|
|
33
|
+
sa.Column("pid", sqlalchemy_utils.types.uuid.UUIDType(), nullable=False),
|
|
34
|
+
sa.Column("input_state", postgresql.JSONB(astext_type=sa.Text()), nullable=False), # type: ignore
|
|
35
|
+
sa.Column(
|
|
36
|
+
"input_time",
|
|
37
|
+
db.models.UtcTimestamp(timezone=True),
|
|
38
|
+
server_default=sa.text("current_timestamp"),
|
|
39
|
+
nullable=False,
|
|
40
|
+
),
|
|
41
|
+
sa.Column("input_type", sa.Enum("user_input", "initial_state", name="inputtype"), nullable=False),
|
|
42
|
+
sa.ForeignKeyConstraint(
|
|
43
|
+
["pid"],
|
|
44
|
+
["processes.pid"],
|
|
45
|
+
),
|
|
46
|
+
sa.PrimaryKeyConstraint("input_state_id"),
|
|
47
|
+
)
|
|
48
|
+
op.create_index(op.f("ix_input_state_input_state_id"), "input_states", ["input_state_id"], unique=False)
|
|
49
|
+
# ### end Alembic commands ###
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def downgrade() -> None:
|
|
53
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
54
|
+
op.drop_index(op.f("ix_input_state_input_state_id"), table_name="input_states")
|
|
55
|
+
op.drop_table("input_statse")
|
|
56
|
+
# ### end Alembic commands ###
|
|
@@ -22,7 +22,8 @@ from orchestrator.schemas.product import ProductBaseSchema
|
|
|
22
22
|
from orchestrator.schemas.product_block import ProductBlockSchema
|
|
23
23
|
from orchestrator.schemas.resource_type import ResourceTypeSchema
|
|
24
24
|
from orchestrator.schemas.subscription_descriptions import SubscriptionDescriptionSchema
|
|
25
|
-
from orchestrator.types import SubscriptionLifecycle
|
|
25
|
+
from orchestrator.types import SubscriptionLifecycle
|
|
26
|
+
from pydantic_forms.types import strEnum
|
|
26
27
|
|
|
27
28
|
|
|
28
29
|
class PortMode(strEnum):
|
orchestrator/services/celery.py
CHANGED
|
@@ -22,10 +22,11 @@ from kombu.exceptions import ConnectionError, OperationalError
|
|
|
22
22
|
from orchestrator import app_settings
|
|
23
23
|
from orchestrator.api.error_handling import raise_status
|
|
24
24
|
from orchestrator.db import ProcessTable, db
|
|
25
|
+
from orchestrator.services.input_state import store_input_state
|
|
25
26
|
from orchestrator.services.processes import create_process, delete_process
|
|
26
27
|
from orchestrator.targets import Target
|
|
27
|
-
from orchestrator.types import State
|
|
28
28
|
from orchestrator.workflows import get_workflow
|
|
29
|
+
from pydantic_forms.types import State
|
|
29
30
|
|
|
30
31
|
SYSTEM_USER = "SYSTEM"
|
|
31
32
|
|
|
@@ -53,9 +54,8 @@ def _celery_start_process(
|
|
|
53
54
|
task_name = NEW_TASK if workflow.target == Target.SYSTEM else NEW_WORKFLOW
|
|
54
55
|
trigger_task = get_celery_task(task_name)
|
|
55
56
|
pstat = create_process(workflow_key, user_inputs, user)
|
|
56
|
-
tasks = pstat.state.s
|
|
57
57
|
try:
|
|
58
|
-
result = trigger_task.delay(pstat.process_id, workflow_key,
|
|
58
|
+
result = trigger_task.delay(pstat.process_id, workflow_key, user)
|
|
59
59
|
_block_when_testing(result)
|
|
60
60
|
return pstat.process_id
|
|
61
61
|
except (ConnectionError, OperationalError) as e:
|
|
@@ -82,9 +82,12 @@ def _celery_resume_process(
|
|
|
82
82
|
|
|
83
83
|
task_name = RESUME_TASK if workflow.target == Target.SYSTEM else RESUME_WORKFLOW
|
|
84
84
|
trigger_task = get_celery_task(task_name)
|
|
85
|
+
|
|
86
|
+
user_inputs = user_inputs or [{}]
|
|
87
|
+
store_input_state(pstat.process_id, user_inputs, "user_input")
|
|
85
88
|
try:
|
|
86
89
|
_celery_set_process_status_resumed(process)
|
|
87
|
-
result = trigger_task.delay(pstat.process_id,
|
|
90
|
+
result = trigger_task.delay(pstat.process_id, user)
|
|
88
91
|
_block_when_testing(result)
|
|
89
92
|
|
|
90
93
|
return pstat.process_id
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
from typing import Any, Literal
|
|
14
|
+
from uuid import UUID
|
|
15
|
+
|
|
16
|
+
import structlog
|
|
17
|
+
from sqlalchemy import select
|
|
18
|
+
|
|
19
|
+
from orchestrator.db import db
|
|
20
|
+
from orchestrator.db.models import InputStateTable
|
|
21
|
+
|
|
22
|
+
logger = structlog.get_logger(__name__)
|
|
23
|
+
|
|
24
|
+
InputType = Literal["initial_state", "user_input"]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def retrieve_input_state(process_id: UUID, input_type: InputType) -> InputStateTable:
|
|
28
|
+
"""Get user input.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
process_id: Process ID
|
|
32
|
+
input_type: The type of the input.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
User input table
|
|
36
|
+
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
res: InputStateTable | None = db.session.scalars(
|
|
40
|
+
select(InputStateTable)
|
|
41
|
+
.filter(InputStateTable.process_id == process_id)
|
|
42
|
+
.filter(InputStateTable.input_type == input_type)
|
|
43
|
+
.order_by(InputStateTable.input_time.asc())
|
|
44
|
+
).first()
|
|
45
|
+
|
|
46
|
+
if res:
|
|
47
|
+
logger.debug("Retrieved input state", process_id=process_id, input_state=res, input_type=input_type)
|
|
48
|
+
return res
|
|
49
|
+
raise ValueError(f"No input state for pid: {process_id}")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def store_input_state(
|
|
53
|
+
process_id: UUID,
|
|
54
|
+
input_state: dict[str, Any] | list[dict[str, Any]],
|
|
55
|
+
input_type: InputType,
|
|
56
|
+
) -> None:
|
|
57
|
+
"""Store user input state.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
process_id: Process ID
|
|
61
|
+
input_state: Dictionary of user input state
|
|
62
|
+
input_type: The type of the input.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
None
|
|
66
|
+
|
|
67
|
+
"""
|
|
68
|
+
logger.debug("Store input state", process_id=process_id, input_state=input_state, input_type=input_type)
|
|
69
|
+
db.session.add(
|
|
70
|
+
InputStateTable(
|
|
71
|
+
process_id=process_id,
|
|
72
|
+
input_state=input_state,
|
|
73
|
+
input_type=input_type,
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
db.session.commit()
|
|
@@ -18,7 +18,7 @@ from typing import Any
|
|
|
18
18
|
from uuid import UUID, uuid4
|
|
19
19
|
|
|
20
20
|
import structlog
|
|
21
|
-
from deepmerge import Merger
|
|
21
|
+
from deepmerge.merger import Merger
|
|
22
22
|
from sqlalchemy import delete, select
|
|
23
23
|
from sqlalchemy.exc import SQLAlchemyError
|
|
24
24
|
from sqlalchemy.orm import joinedload
|
|
@@ -35,11 +35,12 @@ from orchestrator.db import (
|
|
|
35
35
|
)
|
|
36
36
|
from orchestrator.distlock import distlock_manager
|
|
37
37
|
from orchestrator.schemas.engine_settings import WorkerStatus
|
|
38
|
+
from orchestrator.services.input_state import store_input_state
|
|
38
39
|
from orchestrator.services.settings import get_engine_settings_for_update
|
|
39
40
|
from orchestrator.services.workflows import get_workflow_by_name
|
|
40
41
|
from orchestrator.settings import ExecutorType, app_settings
|
|
41
42
|
from orchestrator.targets import Target
|
|
42
|
-
from orchestrator.types import BroadcastFunc
|
|
43
|
+
from orchestrator.types import BroadcastFunc
|
|
43
44
|
from orchestrator.utils.datetime import nowtz
|
|
44
45
|
from orchestrator.utils.errors import error_state_to_dict
|
|
45
46
|
from orchestrator.websocket import broadcast_invalidate_status_counts
|
|
@@ -60,6 +61,7 @@ from orchestrator.workflows import get_workflow
|
|
|
60
61
|
from orchestrator.workflows.removed_workflow import removed_workflow
|
|
61
62
|
from pydantic_forms.core import post_form
|
|
62
63
|
from pydantic_forms.exceptions import FormValidationError
|
|
64
|
+
from pydantic_forms.types import State
|
|
63
65
|
|
|
64
66
|
logger = structlog.get_logger(__name__)
|
|
65
67
|
|
|
@@ -449,7 +451,7 @@ def create_process(
|
|
|
449
451
|
)
|
|
450
452
|
|
|
451
453
|
_db_create_process(pstat)
|
|
452
|
-
|
|
454
|
+
store_input_state(process_id, state | initial_state, "initial_state")
|
|
453
455
|
return pstat
|
|
454
456
|
|
|
455
457
|
|
|
@@ -513,7 +515,7 @@ def thread_resume_process(
|
|
|
513
515
|
|
|
514
516
|
if user_input:
|
|
515
517
|
pstat.update(state=pstat.state.map(lambda state: StateMerger.merge(state, user_input)))
|
|
516
|
-
|
|
518
|
+
store_input_state(pstat.process_id, user_input, "user_input")
|
|
517
519
|
# enforce an update to the process status to properly show the process
|
|
518
520
|
process.last_status = ProcessStatus.RUNNING
|
|
519
521
|
db.session.add(process)
|
|
@@ -528,8 +530,8 @@ def thread_validate_workflow(validation_workflow: str, json: list[State] | None)
|
|
|
528
530
|
|
|
529
531
|
|
|
530
532
|
THREADPOOL_EXECUTION_CONTEXT: dict[str, Callable] = {
|
|
531
|
-
"start":
|
|
532
|
-
"resume":
|
|
533
|
+
"start": thread_start_process,
|
|
534
|
+
"resume": thread_resume_process,
|
|
533
535
|
"validate": thread_validate_workflow,
|
|
534
536
|
}
|
|
535
537
|
|
|
@@ -18,7 +18,7 @@ from sqlalchemy import select
|
|
|
18
18
|
from sqlalchemy.orm import joinedload
|
|
19
19
|
|
|
20
20
|
from orchestrator.db import ProductTable, db
|
|
21
|
-
from
|
|
21
|
+
from pydantic_forms.types import UUIDstr
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
def get_products(*, filters: list | None = None) -> list[ProductTable]:
|
|
@@ -43,9 +43,10 @@ from orchestrator.db.models import (
|
|
|
43
43
|
)
|
|
44
44
|
from orchestrator.domain.base import SubscriptionModel
|
|
45
45
|
from orchestrator.targets import Target
|
|
46
|
-
from orchestrator.types import SubscriptionLifecycle
|
|
46
|
+
from orchestrator.types import SubscriptionLifecycle
|
|
47
47
|
from orchestrator.utils.datetime import nowtz
|
|
48
48
|
from orchestrator.utils.helpers import is_ipaddress_type
|
|
49
|
+
from pydantic_forms.types import UUIDstr
|
|
49
50
|
|
|
50
51
|
logger = structlog.get_logger(__name__)
|
|
51
52
|
|
orchestrator/services/tasks.py
CHANGED
|
@@ -23,6 +23,7 @@ from kombu.serialization import registry
|
|
|
23
23
|
|
|
24
24
|
from orchestrator.api.error_handling import raise_status
|
|
25
25
|
from orchestrator.schemas.engine_settings import WorkerStatus
|
|
26
|
+
from orchestrator.services.input_state import retrieve_input_state
|
|
26
27
|
from orchestrator.services.processes import (
|
|
27
28
|
_get_process,
|
|
28
29
|
_run_process_async,
|
|
@@ -30,10 +31,11 @@ from orchestrator.services.processes import (
|
|
|
30
31
|
safe_logstep,
|
|
31
32
|
thread_resume_process,
|
|
32
33
|
)
|
|
33
|
-
from orchestrator.types import BroadcastFunc
|
|
34
|
+
from orchestrator.types import BroadcastFunc
|
|
34
35
|
from orchestrator.utils.json import json_dumps, json_loads
|
|
35
36
|
from orchestrator.workflow import ProcessStat, ProcessStatus, Success, runwf
|
|
36
37
|
from orchestrator.workflows import get_workflow
|
|
38
|
+
from pydantic_forms.types import State
|
|
37
39
|
|
|
38
40
|
logger = get_task_logger(__name__)
|
|
39
41
|
|
|
@@ -117,24 +119,28 @@ def initialise_celery(celery: Celery) -> None: # noqa: C901
|
|
|
117
119
|
celery_task = partial(celery.task, log=local_logger, serializer="orchestrator-json")
|
|
118
120
|
|
|
119
121
|
@celery_task(name=NEW_TASK) # type: ignore
|
|
120
|
-
def new_task(process_id, workflow_key: str,
|
|
122
|
+
def new_task(process_id, workflow_key: str, user: str) -> UUID | None:
|
|
121
123
|
local_logger.info("Start task", process_id=process_id, workflow_key=workflow_key)
|
|
124
|
+
state = retrieve_input_state(process_id, "initial_state").input_state
|
|
122
125
|
return start_process(process_id, workflow_key, state=state, user=user)
|
|
123
126
|
|
|
124
127
|
@celery_task(name=NEW_WORKFLOW) # type: ignore
|
|
125
|
-
def new_workflow(process_id, workflow_key: str,
|
|
128
|
+
def new_workflow(process_id, workflow_key: str, user: str) -> UUID | None:
|
|
126
129
|
local_logger.info("Start workflow", process_id=process_id, workflow_key=workflow_key)
|
|
130
|
+
state = retrieve_input_state(process_id, "initial_state").input_state
|
|
127
131
|
return start_process(process_id, workflow_key, state=state, user=user)
|
|
128
132
|
|
|
129
133
|
@celery_task(name=RESUME_TASK) # type: ignore
|
|
130
|
-
def resume_task(process_id: UUID,
|
|
134
|
+
def resume_task(process_id: UUID, user: str) -> UUID | None:
|
|
131
135
|
local_logger.info("Resume task", process_id=process_id)
|
|
132
|
-
|
|
136
|
+
state = retrieve_input_state(process_id, "user_input").input_state
|
|
137
|
+
return resume_process(process_id, user_inputs=state, user=user)
|
|
133
138
|
|
|
134
139
|
@celery_task(name=RESUME_WORKFLOW) # type: ignore
|
|
135
|
-
def resume_workflow(process_id: UUID,
|
|
140
|
+
def resume_workflow(process_id: UUID, user: str) -> UUID | None:
|
|
136
141
|
local_logger.info("Resume workflow", process_id=process_id)
|
|
137
|
-
|
|
142
|
+
state = retrieve_input_state(process_id, "user_input").input_state
|
|
143
|
+
return resume_process(process_id, user_inputs=state, user=user)
|
|
138
144
|
|
|
139
145
|
|
|
140
146
|
class CeleryJobWorkerStatus(WorkerStatus):
|
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
1
14
|
from collections.abc import Iterable
|
|
2
15
|
|
|
3
16
|
from sqlalchemy import Select, select
|
orchestrator/settings.py
CHANGED
|
@@ -16,11 +16,11 @@ import string
|
|
|
16
16
|
from pathlib import Path
|
|
17
17
|
from typing import Literal
|
|
18
18
|
|
|
19
|
-
from pydantic import PostgresDsn, RedisDsn
|
|
19
|
+
from pydantic import Field, NonNegativeInt, PostgresDsn, RedisDsn
|
|
20
20
|
from pydantic_settings import BaseSettings
|
|
21
21
|
|
|
22
22
|
from oauth2_lib.settings import oauth2lib_settings
|
|
23
|
-
from
|
|
23
|
+
from pydantic_forms.types import strEnum
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
class ExecutorType(strEnum):
|
|
@@ -57,6 +57,9 @@ class AppSettings(BaseSettings):
|
|
|
57
57
|
CACHE_URI: RedisDsn = "redis://localhost:6379/0" # type: ignore
|
|
58
58
|
CACHE_DOMAIN_MODELS: bool = False
|
|
59
59
|
CACHE_HMAC_SECRET: str | None = None # HMAC signing key, used when pickling results in the cache
|
|
60
|
+
REDIS_RETRY_COUNT: NonNegativeInt = Field(
|
|
61
|
+
2, description="Number of retries for redis connection errors/timeouts, 0 to disable"
|
|
62
|
+
) # More info: https://redis-py.readthedocs.io/en/stable/retry.html
|
|
60
63
|
ENABLE_DISTLOCK_MANAGER: bool = True
|
|
61
64
|
DISTLOCK_BACKEND: str = "memory"
|
|
62
65
|
CC_NOC: int = 0
|
orchestrator/targets.py
CHANGED
orchestrator/types.py
CHANGED
|
@@ -34,59 +34,24 @@ from annotated_types import Len, MaxLen, MinLen
|
|
|
34
34
|
from more_itertools import first, last
|
|
35
35
|
from pydantic.fields import FieldInfo
|
|
36
36
|
|
|
37
|
-
|
|
38
|
-
# these types from pydantic_forms themselves
|
|
39
|
-
from pydantic_forms.types import (
|
|
40
|
-
JSON,
|
|
41
|
-
AcceptData,
|
|
42
|
-
AcceptItemType,
|
|
43
|
-
FormGenerator,
|
|
44
|
-
FormGeneratorAsync,
|
|
45
|
-
InputForm,
|
|
46
|
-
InputFormGenerator,
|
|
47
|
-
InputStepFunc,
|
|
48
|
-
SimpleInputFormGenerator,
|
|
49
|
-
State,
|
|
50
|
-
StateInputFormGenerator,
|
|
51
|
-
StateInputFormGeneratorAsync,
|
|
52
|
-
StateInputStepFunc,
|
|
53
|
-
StateSimpleInputFormGenerator,
|
|
54
|
-
SubscriptionMapping,
|
|
55
|
-
SummaryData,
|
|
56
|
-
UUIDstr,
|
|
57
|
-
strEnum,
|
|
58
|
-
)
|
|
37
|
+
from pydantic_forms.types import InputForm, State, strEnum
|
|
59
38
|
|
|
60
39
|
__all__ = [
|
|
61
|
-
"
|
|
40
|
+
"SAFE_USED_BY_TRANSITIONS_FOR_STATUS",
|
|
62
41
|
"BroadcastFunc",
|
|
63
|
-
"AcceptData",
|
|
64
|
-
"AcceptItemType",
|
|
65
42
|
"ErrorDict",
|
|
66
43
|
"ErrorState",
|
|
67
|
-
"FormGenerator",
|
|
68
|
-
"FormGeneratorAsync",
|
|
69
|
-
"InputForm",
|
|
70
|
-
"InputFormGenerator",
|
|
71
|
-
"InputStepFunc",
|
|
72
|
-
"SimpleInputFormGenerator",
|
|
73
|
-
"State",
|
|
74
|
-
"StateInputFormGenerator",
|
|
75
|
-
"StateInputFormGeneratorAsync",
|
|
76
|
-
"StateInputStepFunc",
|
|
77
|
-
"StateSimpleInputFormGenerator",
|
|
78
44
|
"StateStepFunc",
|
|
79
45
|
"StepFunc",
|
|
80
46
|
"SubscriptionLifecycle",
|
|
81
|
-
"
|
|
82
|
-
"
|
|
83
|
-
"
|
|
47
|
+
"filter_nonetype",
|
|
48
|
+
"get_origin_and_args",
|
|
49
|
+
"get_possible_product_block_types",
|
|
84
50
|
"is_list_type",
|
|
85
51
|
"is_of_type",
|
|
86
52
|
"is_optional_type",
|
|
87
53
|
"is_union_type",
|
|
88
|
-
"
|
|
89
|
-
"strEnum",
|
|
54
|
+
"list_factory",
|
|
90
55
|
]
|
|
91
56
|
|
|
92
57
|
if TYPE_CHECKING:
|
|
@@ -97,8 +62,8 @@ def is_union(tp: type[Any] | None) -> bool:
|
|
|
97
62
|
return tp is Union or tp is types.UnionType # type: ignore[comparison-overlap]
|
|
98
63
|
|
|
99
64
|
|
|
100
|
-
# ErrorState is either a string containing an error message, a
|
|
101
|
-
#
|
|
65
|
+
# ErrorState is either a string containing an error message, a caught Exception, or a tuple containing a message and
|
|
66
|
+
# an HTTP status code
|
|
102
67
|
ErrorState = Union[str, Exception, tuple[str, Union[int, HTTPStatus]]]
|
|
103
68
|
# An ErrorDict should have the following keys:
|
|
104
69
|
# error: str # A message describing the error
|
orchestrator/utils/errors.py
CHANGED
|
@@ -18,7 +18,8 @@ from typing import Any, cast
|
|
|
18
18
|
import structlog
|
|
19
19
|
|
|
20
20
|
from nwastdlib.ex import show_ex
|
|
21
|
-
from orchestrator.types import
|
|
21
|
+
from orchestrator.types import ErrorDict
|
|
22
|
+
from pydantic_forms.types import JSON
|
|
22
23
|
|
|
23
24
|
logger = structlog.get_logger(__name__)
|
|
24
25
|
|
orchestrator/utils/redis.py
CHANGED
|
@@ -17,22 +17,22 @@ from os import getenv
|
|
|
17
17
|
from typing import Any, Callable
|
|
18
18
|
from uuid import UUID
|
|
19
19
|
|
|
20
|
-
import redis.exceptions
|
|
21
20
|
from anyio import CancelScope, get_cancelled_exc_class
|
|
22
|
-
from redis import Redis
|
|
23
21
|
from redis.asyncio import Redis as AIORedis
|
|
24
22
|
from redis.asyncio.client import Pipeline, PubSub
|
|
25
|
-
from redis.asyncio.retry import Retry
|
|
26
|
-
from redis.backoff import EqualJitterBackoff
|
|
27
23
|
from structlog import get_logger
|
|
28
24
|
|
|
29
25
|
from orchestrator.services.subscriptions import _generate_etag
|
|
30
26
|
from orchestrator.settings import app_settings
|
|
31
27
|
from orchestrator.utils.json import PY_JSON_TYPES, json_dumps, json_loads
|
|
28
|
+
from orchestrator.utils.redis_client import (
|
|
29
|
+
create_redis_asyncio_client,
|
|
30
|
+
create_redis_client,
|
|
31
|
+
)
|
|
32
32
|
|
|
33
33
|
logger = get_logger(__name__)
|
|
34
34
|
|
|
35
|
-
cache =
|
|
35
|
+
cache = create_redis_client(app_settings.CACHE_URI)
|
|
36
36
|
|
|
37
37
|
ONE_WEEK = 3600 * 24 * 7
|
|
38
38
|
|
|
@@ -136,12 +136,7 @@ class RedisBroadcast:
|
|
|
136
136
|
client: AIORedis
|
|
137
137
|
|
|
138
138
|
def __init__(self, redis_url: str):
|
|
139
|
-
self.client =
|
|
140
|
-
redis_url,
|
|
141
|
-
retry_on_error=[redis.exceptions.ConnectionError],
|
|
142
|
-
retry_on_timeout=True,
|
|
143
|
-
retry=Retry(EqualJitterBackoff(base=0.05), 2),
|
|
144
|
-
)
|
|
139
|
+
self.client = create_redis_asyncio_client(redis_url)
|
|
145
140
|
self.redis_url = redis_url
|
|
146
141
|
|
|
147
142
|
@asynccontextmanager
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import redis.asyncio
|
|
2
|
+
import redis.client
|
|
3
|
+
import redis.exceptions
|
|
4
|
+
from pydantic import RedisDsn
|
|
5
|
+
from redis import Redis
|
|
6
|
+
from redis.asyncio import Redis as AIORedis
|
|
7
|
+
from redis.asyncio.retry import Retry as AIORetry
|
|
8
|
+
from redis.backoff import EqualJitterBackoff
|
|
9
|
+
from redis.retry import Retry
|
|
10
|
+
|
|
11
|
+
from orchestrator.settings import app_settings
|
|
12
|
+
|
|
13
|
+
REDIS_RETRY_ON_ERROR = [redis.exceptions.ConnectionError]
|
|
14
|
+
REDIS_RETRY_ON_TIMEOUT = True
|
|
15
|
+
REDIS_RETRY_BACKOFF = EqualJitterBackoff(base=0.05)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def create_redis_client(redis_url: str | RedisDsn) -> redis.client.Redis:
|
|
19
|
+
"""Create sync Redis client for the given Redis DSN with retry handling for connection errors and timeouts."""
|
|
20
|
+
return Redis.from_url(
|
|
21
|
+
str(redis_url),
|
|
22
|
+
retry_on_error=REDIS_RETRY_ON_ERROR, # type: ignore[arg-type]
|
|
23
|
+
retry_on_timeout=REDIS_RETRY_ON_TIMEOUT,
|
|
24
|
+
retry=Retry(REDIS_RETRY_BACKOFF, app_settings.REDIS_RETRY_COUNT),
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def create_redis_asyncio_client(redis_url: str | RedisDsn) -> redis.asyncio.client.Redis:
|
|
29
|
+
"""Create async Redis client for the given Redis DSN with retry handling for connection errors and timeouts."""
|
|
30
|
+
return AIORedis.from_url(
|
|
31
|
+
str(redis_url),
|
|
32
|
+
retry_on_error=REDIS_RETRY_ON_ERROR, # type: ignore[arg-type]
|
|
33
|
+
retry_on_timeout=REDIS_RETRY_ON_TIMEOUT,
|
|
34
|
+
retry=AIORetry(REDIS_RETRY_BACKOFF, app_settings.REDIS_RETRY_COUNT),
|
|
35
|
+
)
|
orchestrator/utils/state.py
CHANGED
|
@@ -19,7 +19,7 @@ from typing import Any, cast, get_args
|
|
|
19
19
|
from uuid import UUID
|
|
20
20
|
|
|
21
21
|
from orchestrator.domain.base import SubscriptionModel
|
|
22
|
-
from orchestrator.types import
|
|
22
|
+
from orchestrator.types import StepFunc, is_list_type, is_optional_type
|
|
23
23
|
from orchestrator.utils.functional import logger
|
|
24
24
|
from pydantic_forms.types import (
|
|
25
25
|
FormGenerator,
|
|
@@ -27,6 +27,7 @@ from pydantic_forms.types import (
|
|
|
27
27
|
InputFormGenerator,
|
|
28
28
|
InputStepFunc,
|
|
29
29
|
SimpleInputFormGenerator,
|
|
30
|
+
State,
|
|
30
31
|
StateInputStepFunc,
|
|
31
32
|
)
|
|
32
33
|
|
orchestrator/workflow.py
CHANGED
|
@@ -43,7 +43,7 @@ from orchestrator.config.assignee import Assignee
|
|
|
43
43
|
from orchestrator.db import db, transactional
|
|
44
44
|
from orchestrator.services.settings import get_engine_settings
|
|
45
45
|
from orchestrator.targets import Target
|
|
46
|
-
from orchestrator.types import ErrorDict,
|
|
46
|
+
from orchestrator.types import ErrorDict, StepFunc
|
|
47
47
|
from orchestrator.utils.docs import make_workflow_doc
|
|
48
48
|
from orchestrator.utils.errors import error_state_to_dict
|
|
49
49
|
from orchestrator.utils.state import form_inject_args, inject_args
|
|
@@ -52,9 +52,11 @@ from pydantic_forms.types import (
|
|
|
52
52
|
FormGenerator,
|
|
53
53
|
InputFormGenerator,
|
|
54
54
|
InputStepFunc,
|
|
55
|
+
State,
|
|
55
56
|
StateInputFormGenerator,
|
|
56
57
|
StateInputStepFunc,
|
|
57
58
|
StateSimpleInputFormGenerator,
|
|
59
|
+
strEnum,
|
|
58
60
|
)
|
|
59
61
|
|
|
60
62
|
logger = structlog.get_logger(__name__)
|
|
@@ -15,12 +15,11 @@ from orchestrator.forms import SubmitFormPage
|
|
|
15
15
|
from orchestrator.services import subscriptions
|
|
16
16
|
from orchestrator.settings import app_settings
|
|
17
17
|
from orchestrator.targets import Target
|
|
18
|
-
from orchestrator.types import UUIDstr
|
|
19
18
|
from orchestrator.utils.json import to_serializable
|
|
20
19
|
from orchestrator.workflow import StepList, conditional, done, init, step, workflow
|
|
21
20
|
from orchestrator.workflows.steps import cache_domain_models, store_process_subscription
|
|
22
21
|
from orchestrator.workflows.utils import wrap_modify_initial_input_form
|
|
23
|
-
from pydantic_forms.types import FormGenerator, State
|
|
22
|
+
from pydantic_forms.types import FormGenerator, State, UUIDstr
|
|
24
23
|
from pydantic_forms.validators import LongText
|
|
25
24
|
|
|
26
25
|
|
orchestrator/workflows/steps.py
CHANGED
|
@@ -23,11 +23,12 @@ from orchestrator.domain.base import ProductBlockModel, SubscriptionModel
|
|
|
23
23
|
from orchestrator.services.settings import reset_search_index
|
|
24
24
|
from orchestrator.services.subscriptions import build_extended_domain_model, get_subscription
|
|
25
25
|
from orchestrator.targets import Target
|
|
26
|
-
from orchestrator.types import
|
|
26
|
+
from orchestrator.types import SubscriptionLifecycle
|
|
27
27
|
from orchestrator.utils.json import to_serializable
|
|
28
28
|
from orchestrator.utils.redis import delete_from_redis, to_redis
|
|
29
29
|
from orchestrator.websocket import sync_invalidate_subscription_cache
|
|
30
30
|
from orchestrator.workflow import Step, step
|
|
31
|
+
from pydantic_forms.types import State, UUIDstr
|
|
31
32
|
|
|
32
33
|
logger = structlog.get_logger(__name__)
|
|
33
34
|
|
|
@@ -19,9 +19,9 @@ from sqlalchemy import select
|
|
|
19
19
|
from orchestrator.db import ProcessTable, db
|
|
20
20
|
from orchestrator.settings import app_settings
|
|
21
21
|
from orchestrator.targets import Target
|
|
22
|
-
from orchestrator.types import State
|
|
23
22
|
from orchestrator.utils.datetime import nowtz
|
|
24
23
|
from orchestrator.workflow import ProcessStatus, StepList, done, init, step, workflow
|
|
24
|
+
from pydantic_forms.types import State
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
@step("Clean up completed tasks older than TASK_LOG_RETENTION_DAYS")
|
|
@@ -18,8 +18,8 @@ from sqlalchemy import select
|
|
|
18
18
|
from orchestrator.db import ProcessTable, db
|
|
19
19
|
from orchestrator.services import processes
|
|
20
20
|
from orchestrator.targets import Target
|
|
21
|
-
from orchestrator.types import State, UUIDstr
|
|
22
21
|
from orchestrator.workflow import ProcessStatus, StepList, done, init, step, workflow
|
|
22
|
+
from pydantic_forms.types import State, UUIDstr
|
|
23
23
|
|
|
24
24
|
logger = structlog.get_logger(__name__)
|
|
25
25
|
|
|
@@ -26,8 +26,8 @@ from orchestrator.services.workflows import (
|
|
|
26
26
|
start_validation_workflow_for_workflows,
|
|
27
27
|
)
|
|
28
28
|
from orchestrator.targets import Target
|
|
29
|
-
from orchestrator.types import FormGenerator, State
|
|
30
29
|
from orchestrator.workflow import StepList, done, init, step, workflow
|
|
30
|
+
from pydantic_forms.types import FormGenerator, State
|
|
31
31
|
|
|
32
32
|
logger = structlog.get_logger(__name__)
|
|
33
33
|
|
|
@@ -27,10 +27,10 @@ from orchestrator.services.products import get_products
|
|
|
27
27
|
from orchestrator.services.translations import generate_translations
|
|
28
28
|
from orchestrator.services.workflows import get_workflow_by_name, get_workflows
|
|
29
29
|
from orchestrator.targets import Target
|
|
30
|
-
from orchestrator.types import State
|
|
31
30
|
from orchestrator.utils.errors import ProcessFailureError
|
|
32
31
|
from orchestrator.utils.fixed_inputs import fixed_input_configuration as fi_configuration
|
|
33
32
|
from orchestrator.workflow import StepList, done, init, step, workflow
|
|
33
|
+
from pydantic_forms.types import State
|
|
34
34
|
|
|
35
35
|
# Since these errors are probably programming failures we should not throw AssertionErrors
|
|
36
36
|
|