orchestrator-core 3.2.3__py3-none-any.whl → 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- orchestrator/__init__.py +1 -1
- orchestrator/api/api_v1/endpoints/settings.py +3 -13
- orchestrator/api/api_v1/endpoints/subscription_customer_descriptions.py +0 -2
- orchestrator/api/api_v1/endpoints/subscriptions.py +1 -0
- orchestrator/app.py +10 -1
- orchestrator/cli/generator/templates/new_product_migration.j2 +5 -1
- orchestrator/cli/migrate_tasks.py +5 -5
- orchestrator/cli/migrate_workflows.py +1 -2
- orchestrator/db/models.py +3 -1
- orchestrator/domain/base.py +4 -24
- orchestrator/domain/customer_description.py +0 -4
- orchestrator/graphql/mutations/customer_description.py +1 -1
- orchestrator/metrics/__init__.py +3 -0
- orchestrator/metrics/engine.py +49 -0
- orchestrator/metrics/init.py +14 -0
- orchestrator/metrics/processes.py +147 -0
- orchestrator/metrics/subscriptions.py +93 -0
- orchestrator/migrations/helpers.py +14 -6
- orchestrator/migrations/templates/alembic.ini.j2 +1 -2
- orchestrator/migrations/templates/env.py.j2 +4 -7
- orchestrator/migrations/versions/schema/2025-02-20_68d14db1b8da_make_workflow_description_mandatory.py +33 -0
- orchestrator/migrations/versions/schema/2025-05-08_161918133bec_add_is_task_to_workflow.py +28 -0
- orchestrator/schedules/validate_subscriptions.py +4 -4
- orchestrator/schemas/workflow.py +3 -1
- orchestrator/services/celery.py +13 -5
- orchestrator/services/processes.py +2 -2
- orchestrator/services/settings.py +10 -1
- orchestrator/services/subscriptions.py +10 -23
- orchestrator/services/workflows.py +8 -4
- orchestrator/settings.py +1 -4
- orchestrator/targets.py +1 -0
- orchestrator/utils/get_subscription_dict.py +0 -4
- orchestrator/utils/redis.py +1 -67
- orchestrator/workflows/modify_note.py +3 -11
- orchestrator/workflows/steps.py +2 -86
- orchestrator/workflows/tasks/validate_product_type.py +2 -2
- orchestrator/workflows/tasks/validate_products.py +3 -6
- orchestrator/workflows/utils.py +3 -23
- {orchestrator_core-3.2.3.dist-info → orchestrator_core-4.0.0.dist-info}/METADATA +9 -8
- {orchestrator_core-3.2.3.dist-info → orchestrator_core-4.0.0.dist-info}/RECORD +42 -35
- {orchestrator_core-3.2.3.dist-info → orchestrator_core-4.0.0.dist-info}/WHEEL +0 -0
- {orchestrator_core-3.2.3.dist-info → orchestrator_core-4.0.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -135,18 +135,21 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
|
|
|
135
135
|
>>> workflow = {
|
|
136
136
|
"name": "workflow_name",
|
|
137
137
|
"target": "SYSTEM",
|
|
138
|
+
"is_task": False,
|
|
138
139
|
"description": "workflow description",
|
|
139
140
|
"product_type": "product_type",
|
|
140
141
|
}
|
|
141
142
|
>>> create_workflow(conn, workflow)
|
|
142
143
|
"""
|
|
144
|
+
if not workflow.get("is_task", False):
|
|
145
|
+
workflow["is_task"] = False
|
|
143
146
|
|
|
144
147
|
conn.execute(
|
|
145
148
|
sa.text(
|
|
146
149
|
"""
|
|
147
150
|
WITH new_workflow AS (
|
|
148
|
-
INSERT INTO workflows(name, target, description)
|
|
149
|
-
VALUES (:name, :target, :description)
|
|
151
|
+
INSERT INTO workflows(name, target, is_task, description)
|
|
152
|
+
VALUES (:name, :target, :is_task, :description)
|
|
150
153
|
ON CONFLICT DO NOTHING
|
|
151
154
|
RETURNING workflow_id)
|
|
152
155
|
INSERT
|
|
@@ -184,8 +187,8 @@ def create_task(conn: sa.engine.Connection, task: dict) -> None:
|
|
|
184
187
|
conn.execute(
|
|
185
188
|
sa.text(
|
|
186
189
|
"""
|
|
187
|
-
INSERT INTO workflows(name, target, description)
|
|
188
|
-
VALUES (:name, 'SYSTEM', :description)
|
|
190
|
+
INSERT INTO workflows(name, target, is_task, description)
|
|
191
|
+
VALUES (:name, 'SYSTEM', TRUE, :description)
|
|
189
192
|
ON CONFLICT DO NOTHING
|
|
190
193
|
RETURNING workflow_id
|
|
191
194
|
"""
|
|
@@ -206,6 +209,7 @@ def create_workflows(conn: sa.engine.Connection, new: dict) -> None:
|
|
|
206
209
|
"workflow_name": {
|
|
207
210
|
"workflow_id": "f2702074-3203-454c-b298-6dfa7675423d",
|
|
208
211
|
"target": "CREATE",
|
|
212
|
+
"is_task": False,
|
|
209
213
|
"description": "Workflow description",
|
|
210
214
|
"tag": "ProductBlockName1",
|
|
211
215
|
"search_phrase": "Search Phrase%",
|
|
@@ -214,12 +218,16 @@ def create_workflows(conn: sa.engine.Connection, new: dict) -> None:
|
|
|
214
218
|
"""
|
|
215
219
|
for name, workflow in new.items():
|
|
216
220
|
workflow["name"] = name
|
|
221
|
+
|
|
222
|
+
if not workflow.get("is_task", False):
|
|
223
|
+
workflow["is_task"] = False
|
|
224
|
+
|
|
217
225
|
conn.execute(
|
|
218
226
|
sa.text(
|
|
219
227
|
"""
|
|
220
228
|
WITH new_workflow AS (
|
|
221
|
-
INSERT INTO workflows(workflow_id, name, target, description)
|
|
222
|
-
VALUES (:workflow_id, :name, :target, :description)
|
|
229
|
+
INSERT INTO workflows(workflow_id, name, target, is_task, description)
|
|
230
|
+
VALUES (:workflow_id, :name, :target, :is_task, :description)
|
|
223
231
|
RETURNING workflow_id)
|
|
224
232
|
INSERT
|
|
225
233
|
INTO products_workflows (product_id, workflow_id)
|
|
@@ -6,8 +6,7 @@ file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
|
|
|
6
6
|
|
|
7
7
|
# set to 'true' to run the environment during
|
|
8
8
|
# the 'revision' command, regardless of autogenerate
|
|
9
|
-
|
|
10
|
-
script_location = {{ migrations_dir }}
|
|
9
|
+
script_location = %(here)s/{{ migrations_dir }}
|
|
11
10
|
version_locations = %(here)s/{{ migrations_dir }}/versions/schema
|
|
12
11
|
# Logging configuration
|
|
13
12
|
[loggers]
|
|
@@ -1,9 +1,7 @@
|
|
|
1
|
-
import
|
|
2
|
-
import os
|
|
1
|
+
import structlog
|
|
3
2
|
from alembic import context
|
|
4
3
|
from sqlalchemy import engine_from_config, pool
|
|
5
4
|
|
|
6
|
-
import orchestrator
|
|
7
5
|
from orchestrator.db.database import BaseModel
|
|
8
6
|
from orchestrator.settings import app_settings
|
|
9
7
|
|
|
@@ -11,11 +9,10 @@ from orchestrator.settings import app_settings
|
|
|
11
9
|
# access to the values within the .ini file in use.
|
|
12
10
|
config = context.config
|
|
13
11
|
|
|
14
|
-
#
|
|
15
|
-
|
|
16
|
-
logger = logging.getLogger("alembic.env")
|
|
12
|
+
# Setup logging
|
|
13
|
+
logger = structlog.get_logger()
|
|
17
14
|
|
|
18
|
-
config.set_main_option("sqlalchemy.url", app_settings.DATABASE_URI)
|
|
15
|
+
config.set_main_option("sqlalchemy.url", str(app_settings.DATABASE_URI))
|
|
19
16
|
|
|
20
17
|
# add your model's MetaData object here
|
|
21
18
|
# for 'autogenerate' support
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""Make workflow description mandatory.
|
|
2
|
+
|
|
3
|
+
Revision ID: 68d14db1b8da
|
|
4
|
+
Revises: bac6be6f2b4f
|
|
5
|
+
Create Date: 2025-02-20 16:39:34.889953
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
from alembic import op
|
|
11
|
+
from structlog import get_logger
|
|
12
|
+
|
|
13
|
+
logger = get_logger(__name__)
|
|
14
|
+
|
|
15
|
+
# revision identifiers, used by Alembic.
|
|
16
|
+
revision = "68d14db1b8da"
|
|
17
|
+
down_revision = "fc5c993a4b4a"
|
|
18
|
+
branch_labels = None
|
|
19
|
+
depends_on = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def upgrade() -> None:
|
|
23
|
+
try:
|
|
24
|
+
op.alter_column("workflows", "description", existing_type=sa.TEXT(), nullable=False)
|
|
25
|
+
except sa.exc.IntegrityError:
|
|
26
|
+
logger.error(
|
|
27
|
+
"Unable to execute migrations due to missing descriptions in workflow table, please create a migration to backfill this column."
|
|
28
|
+
)
|
|
29
|
+
raise
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def downgrade() -> None:
|
|
33
|
+
op.alter_column("workflows", "description", existing_type=sa.TEXT(), nullable=True)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Add is_task to workflow.
|
|
2
|
+
|
|
3
|
+
Revision ID: 161918133bec
|
|
4
|
+
Revises: 68d14db1b8da
|
|
5
|
+
Create Date: 2025-05-08 11:25:51.966410
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
from alembic import op
|
|
11
|
+
|
|
12
|
+
# revision identifiers, used by Alembic.
|
|
13
|
+
revision = "161918133bec"
|
|
14
|
+
down_revision = "68d14db1b8da"
|
|
15
|
+
branch_labels = None
|
|
16
|
+
depends_on = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def upgrade() -> None:
|
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
21
|
+
op.add_column("workflows", sa.Column("is_task", sa.Boolean(), server_default=sa.text("false"), nullable=False))
|
|
22
|
+
# ### end Alembic commands ###
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def downgrade() -> None:
|
|
26
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
27
|
+
op.drop_column("workflows", "is_task")
|
|
28
|
+
# ### end Alembic commands ###
|
|
@@ -22,7 +22,7 @@ from orchestrator.services.subscriptions import (
|
|
|
22
22
|
get_subscriptions_on_product_table_in_sync,
|
|
23
23
|
)
|
|
24
24
|
from orchestrator.services.workflows import (
|
|
25
|
-
|
|
25
|
+
get_validation_product_workflows_for_subscription,
|
|
26
26
|
start_validation_workflow_for_workflows,
|
|
27
27
|
)
|
|
28
28
|
from orchestrator.settings import app_settings
|
|
@@ -42,9 +42,9 @@ def validate_subscriptions() -> None:
|
|
|
42
42
|
subscriptions = get_subscriptions_on_product_table_in_sync()
|
|
43
43
|
|
|
44
44
|
for subscription in subscriptions:
|
|
45
|
-
|
|
45
|
+
validation_product_workflows = get_validation_product_workflows_for_subscription(subscription)
|
|
46
46
|
|
|
47
|
-
if not
|
|
47
|
+
if not validation_product_workflows:
|
|
48
48
|
logger.warning(
|
|
49
49
|
"SubscriptionTable has no validation workflow",
|
|
50
50
|
subscription=subscription,
|
|
@@ -52,4 +52,4 @@ def validate_subscriptions() -> None:
|
|
|
52
52
|
)
|
|
53
53
|
break
|
|
54
54
|
|
|
55
|
-
start_validation_workflow_for_workflows(subscription=subscription, workflows=
|
|
55
|
+
start_validation_workflow_for_workflows(subscription=subscription, workflows=validation_product_workflows)
|
orchestrator/schemas/workflow.py
CHANGED
|
@@ -15,7 +15,7 @@ from datetime import datetime
|
|
|
15
15
|
from typing import Any
|
|
16
16
|
from uuid import UUID
|
|
17
17
|
|
|
18
|
-
from pydantic import ConfigDict
|
|
18
|
+
from pydantic import ConfigDict, Field
|
|
19
19
|
|
|
20
20
|
from orchestrator.schemas.base import OrchestratorBaseModel
|
|
21
21
|
from orchestrator.targets import Target
|
|
@@ -24,6 +24,7 @@ from orchestrator.targets import Target
|
|
|
24
24
|
class WorkflowBaseSchema(OrchestratorBaseModel):
|
|
25
25
|
name: str
|
|
26
26
|
target: Target
|
|
27
|
+
is_task: bool = False
|
|
27
28
|
description: str | None = None
|
|
28
29
|
created_at: datetime | None = None
|
|
29
30
|
|
|
@@ -59,6 +60,7 @@ class SubscriptionWorkflowListsSchema(OrchestratorBaseModel):
|
|
|
59
60
|
modify: list[WorkflowListItemSchema]
|
|
60
61
|
terminate: list[WorkflowListItemSchema]
|
|
61
62
|
system: list[WorkflowListItemSchema]
|
|
63
|
+
validate_: list[WorkflowListItemSchema] = Field(default_factory=list, alias="validate")
|
|
62
64
|
|
|
63
65
|
|
|
64
66
|
class WorkflowPatchSchema(OrchestratorBaseModel):
|
orchestrator/services/celery.py
CHANGED
|
@@ -24,7 +24,7 @@ from orchestrator.api.error_handling import raise_status
|
|
|
24
24
|
from orchestrator.db import ProcessTable, db
|
|
25
25
|
from orchestrator.services.input_state import store_input_state
|
|
26
26
|
from orchestrator.services.processes import create_process, delete_process
|
|
27
|
-
from orchestrator.
|
|
27
|
+
from orchestrator.services.workflows import get_workflow_by_name
|
|
28
28
|
from orchestrator.workflows import get_workflow
|
|
29
29
|
from pydantic_forms.types import State
|
|
30
30
|
|
|
@@ -51,7 +51,11 @@ def _celery_start_process(
|
|
|
51
51
|
if not workflow:
|
|
52
52
|
raise_status(HTTPStatus.NOT_FOUND, "Workflow does not exist")
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
wf_table = get_workflow_by_name(workflow.name)
|
|
55
|
+
if not wf_table:
|
|
56
|
+
raise_status(HTTPStatus.NOT_FOUND, "Workflow in Database does not exist")
|
|
57
|
+
|
|
58
|
+
task_name = NEW_TASK if wf_table.is_task else NEW_WORKFLOW
|
|
55
59
|
trigger_task = get_celery_task(task_name)
|
|
56
60
|
pstat = create_process(workflow_key, user_inputs, user)
|
|
57
61
|
try:
|
|
@@ -68,8 +72,8 @@ def _celery_start_process(
|
|
|
68
72
|
def _celery_resume_process(
|
|
69
73
|
process: ProcessTable,
|
|
70
74
|
*,
|
|
71
|
-
user_inputs: list[State] | None,
|
|
72
|
-
user: str | None,
|
|
75
|
+
user_inputs: list[State] | None = None,
|
|
76
|
+
user: str | None = None,
|
|
73
77
|
**kwargs: Any,
|
|
74
78
|
) -> UUID:
|
|
75
79
|
"""Client side call of Celery."""
|
|
@@ -80,7 +84,11 @@ def _celery_resume_process(
|
|
|
80
84
|
last_process_status = process.last_status
|
|
81
85
|
workflow = pstat.workflow
|
|
82
86
|
|
|
83
|
-
|
|
87
|
+
wf_table = get_workflow_by_name(workflow.name)
|
|
88
|
+
if not workflow or not wf_table:
|
|
89
|
+
raise_status(HTTPStatus.NOT_FOUND, "Workflow does not exist")
|
|
90
|
+
|
|
91
|
+
task_name = RESUME_TASK if wf_table.is_task else RESUME_WORKFLOW
|
|
84
92
|
trigger_task = get_celery_task(task_name)
|
|
85
93
|
|
|
86
94
|
user_inputs = user_inputs or [{}]
|
|
@@ -34,7 +34,6 @@ from orchestrator.services.input_state import store_input_state
|
|
|
34
34
|
from orchestrator.services.settings import get_engine_settings_for_update
|
|
35
35
|
from orchestrator.services.workflows import get_workflow_by_name
|
|
36
36
|
from orchestrator.settings import ExecutorType, app_settings
|
|
37
|
-
from orchestrator.targets import Target
|
|
38
37
|
from orchestrator.types import BroadcastFunc
|
|
39
38
|
from orchestrator.utils.datetime import nowtz
|
|
40
39
|
from orchestrator.utils.errors import error_state_to_dict
|
|
@@ -108,7 +107,7 @@ def _db_create_process(stat: ProcessStat) -> None:
|
|
|
108
107
|
workflow_id=wf_table.workflow_id,
|
|
109
108
|
last_status=ProcessStatus.CREATED,
|
|
110
109
|
created_by=stat.current_user,
|
|
111
|
-
is_task=
|
|
110
|
+
is_task=wf_table.is_task,
|
|
112
111
|
)
|
|
113
112
|
db.session.add(p)
|
|
114
113
|
db.session.commit()
|
|
@@ -441,6 +440,7 @@ def create_process(
|
|
|
441
440
|
}
|
|
442
441
|
|
|
443
442
|
try:
|
|
443
|
+
|
|
444
444
|
state = post_form(workflow.initial_input_form, initial_state, user_inputs)
|
|
445
445
|
except FormValidationError:
|
|
446
446
|
logger.exception("Validation errors", user_inputs=user_inputs)
|
|
@@ -19,7 +19,7 @@ from sqlalchemy import select, text
|
|
|
19
19
|
from sqlalchemy.exc import SQLAlchemyError
|
|
20
20
|
|
|
21
21
|
from orchestrator.db import EngineSettingsTable, db
|
|
22
|
-
from orchestrator.schemas.engine_settings import EngineSettingsSchema
|
|
22
|
+
from orchestrator.schemas.engine_settings import EngineSettingsSchema, GlobalStatusEnum
|
|
23
23
|
from orchestrator.settings import app_settings
|
|
24
24
|
|
|
25
25
|
logger = structlog.get_logger(__name__)
|
|
@@ -35,6 +35,15 @@ def get_engine_settings_for_update() -> EngineSettingsTable:
|
|
|
35
35
|
return db.session.execute(select(EngineSettingsTable).with_for_update()).scalar_one()
|
|
36
36
|
|
|
37
37
|
|
|
38
|
+
def generate_engine_global_status(engine_settings: EngineSettingsTable) -> GlobalStatusEnum:
|
|
39
|
+
"""Returns the global status of the engine."""
|
|
40
|
+
if engine_settings.global_lock and engine_settings.running_processes > 0:
|
|
41
|
+
return GlobalStatusEnum.PAUSING
|
|
42
|
+
if engine_settings.global_lock and engine_settings.running_processes == 0:
|
|
43
|
+
return GlobalStatusEnum.PAUSED
|
|
44
|
+
return GlobalStatusEnum.RUNNING
|
|
45
|
+
|
|
46
|
+
|
|
38
47
|
def post_update_to_slack(engine_status: EngineSettingsSchema, user: str) -> None:
|
|
39
48
|
"""Post engine settings update to slack.
|
|
40
49
|
|
|
@@ -505,6 +505,7 @@ TARGET_DEFAULT_USABLE_MAP: dict[Target, list[str]] = {
|
|
|
505
505
|
Target.MODIFY: ["active"],
|
|
506
506
|
Target.TERMINATE: ["active", "provisioning"],
|
|
507
507
|
Target.SYSTEM: ["active"],
|
|
508
|
+
Target.VALIDATE: ["active"],
|
|
508
509
|
}
|
|
509
510
|
|
|
510
511
|
WF_USABLE_MAP: dict[str, list[str]] = {}
|
|
@@ -530,6 +531,7 @@ def subscription_workflows(subscription: SubscriptionTable) -> dict[str, Any]:
|
|
|
530
531
|
... "modify": [],
|
|
531
532
|
... "terminate": [],
|
|
532
533
|
... "system": [],
|
|
534
|
+
... "validate": [],
|
|
533
535
|
... }
|
|
534
536
|
|
|
535
537
|
"""
|
|
@@ -549,9 +551,10 @@ def subscription_workflows(subscription: SubscriptionTable) -> dict[str, Any]:
|
|
|
549
551
|
"modify": [],
|
|
550
552
|
"terminate": [],
|
|
551
553
|
"system": [],
|
|
554
|
+
"validate": [],
|
|
552
555
|
}
|
|
553
556
|
for workflow in subscription.product.workflows:
|
|
554
|
-
if workflow.name in WF_USABLE_WHILE_OUT_OF_SYNC or workflow.
|
|
557
|
+
if workflow.name in WF_USABLE_WHILE_OUT_OF_SYNC or workflow.is_task:
|
|
555
558
|
# validations and modify note are also possible with: not in sync or locked relations
|
|
556
559
|
workflow_json = {"name": workflow.name, "description": workflow.description}
|
|
557
560
|
else:
|
|
@@ -605,7 +608,6 @@ def build_domain_model(subscription_model: SubscriptionModel) -> dict:
|
|
|
605
608
|
|
|
606
609
|
def build_extended_domain_model(subscription_model: SubscriptionModel) -> dict:
|
|
607
610
|
"""Create a subscription dict from the SubscriptionModel with additional keys."""
|
|
608
|
-
from orchestrator.settings import app_settings
|
|
609
611
|
|
|
610
612
|
stmt = select(SubscriptionCustomerDescriptionTable).where(
|
|
611
613
|
SubscriptionCustomerDescriptionTable.subscription_id == subscription_model.subscription_id
|
|
@@ -615,27 +617,12 @@ def build_extended_domain_model(subscription_model: SubscriptionModel) -> dict:
|
|
|
615
617
|
with cache_subscription_models():
|
|
616
618
|
subscription = subscription_model.model_dump()
|
|
617
619
|
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
update_in(subscription, f"{path_to_block}.in_use_by_ids", in_use_by_ids)
|
|
625
|
-
update_in(subscription, f"{path_to_block}.in_use_by_relations", in_use_by_relations)
|
|
626
|
-
|
|
627
|
-
if app_settings.ENABLE_SUBSCRIPTION_MODEL_OPTIMIZATIONS:
|
|
628
|
-
# TODO #900 remove toggle and make this path the default
|
|
629
|
-
# query all subscription instances and inject the in_use_by_ids/in_use_by_relations into the subscription dict.
|
|
630
|
-
instance_to_in_use_by = {
|
|
631
|
-
instance.subscription_instance_id: instance.in_use_by
|
|
632
|
-
for instance in eagerload_all_subscription_instances_only_inuseby(subscription_model.subscription_id)
|
|
633
|
-
}
|
|
634
|
-
inject_in_use_by_ids_v2(subscription, instance_to_in_use_by)
|
|
635
|
-
else:
|
|
636
|
-
# find all product blocks, check if they have in_use_by and inject the in_use_by_ids into the subscription dict.
|
|
637
|
-
for path in product_block_paths(subscription):
|
|
638
|
-
inject_in_use_by_ids(path)
|
|
620
|
+
# query all subscription instances and inject the in_use_by_ids/in_use_by_relations into the subscription dict.
|
|
621
|
+
instance_to_in_use_by = {
|
|
622
|
+
instance.subscription_instance_id: instance.in_use_by
|
|
623
|
+
for instance in eagerload_all_subscription_instances_only_inuseby(subscription_model.subscription_id)
|
|
624
|
+
}
|
|
625
|
+
inject_in_use_by_ids_v2(subscription, instance_to_in_use_by)
|
|
639
626
|
|
|
640
627
|
subscription["customer_descriptions"] = customer_descriptions
|
|
641
628
|
|
|
@@ -39,6 +39,7 @@ def _to_workflow_schema(workflow: WorkflowTable, include_steps: bool = False) ->
|
|
|
39
39
|
workflow_id=workflow.workflow_id,
|
|
40
40
|
name=workflow.name,
|
|
41
41
|
target=workflow.target,
|
|
42
|
+
is_task=workflow.is_task,
|
|
42
43
|
description=workflow.description,
|
|
43
44
|
created_at=workflow.created_at,
|
|
44
45
|
**extra_kwargs,
|
|
@@ -63,10 +64,10 @@ def get_workflow_by_name(workflow_name: str) -> WorkflowTable | None:
|
|
|
63
64
|
return db.session.scalar(select(WorkflowTable).where(WorkflowTable.name == workflow_name))
|
|
64
65
|
|
|
65
66
|
|
|
66
|
-
def
|
|
67
|
+
def get_validation_product_workflows_for_subscription(
|
|
67
68
|
subscription: SubscriptionTable,
|
|
68
69
|
) -> list:
|
|
69
|
-
return [workflow.name for workflow in subscription.product.workflows if workflow.target == Target.
|
|
70
|
+
return [workflow.name for workflow in subscription.product.workflows if workflow.target == Target.VALIDATE]
|
|
70
71
|
|
|
71
72
|
|
|
72
73
|
def start_validation_workflow_for_workflows(
|
|
@@ -78,9 +79,12 @@ def start_validation_workflow_for_workflows(
|
|
|
78
79
|
result = []
|
|
79
80
|
|
|
80
81
|
for workflow_name in workflows:
|
|
81
|
-
|
|
82
|
-
|
|
82
|
+
target_system = TARGET_DEFAULT_USABLE_MAP[Target.SYSTEM]
|
|
83
|
+
system_usable_when = WF_USABLE_MAP.get(workflow_name, target_system)
|
|
84
|
+
target_validate = TARGET_DEFAULT_USABLE_MAP[Target.VALIDATE]
|
|
85
|
+
validate_usable_when = WF_USABLE_MAP.get(workflow_name, target_validate)
|
|
83
86
|
|
|
87
|
+
usable_when = system_usable_when + validate_usable_when
|
|
84
88
|
if subscription.status in usable_when and (
|
|
85
89
|
product_type_filter is None or subscription.product.product_type == product_type_filter
|
|
86
90
|
):
|
orchestrator/settings.py
CHANGED
|
@@ -55,7 +55,6 @@ class AppSettings(BaseSettings):
|
|
|
55
55
|
MAIL_PORT: int = 25
|
|
56
56
|
MAIL_STARTTLS: bool = False
|
|
57
57
|
CACHE_URI: RedisDsn = "redis://localhost:6379/0" # type: ignore
|
|
58
|
-
CACHE_DOMAIN_MODELS: bool = False
|
|
59
58
|
CACHE_HMAC_SECRET: str | None = None # HMAC signing key, used when pickling results in the cache
|
|
60
59
|
REDIS_RETRY_COUNT: NonNegativeInt = Field(
|
|
61
60
|
2, description="Number of retries for redis connection errors/timeouts, 0 to disable"
|
|
@@ -85,11 +84,9 @@ class AppSettings(BaseSettings):
|
|
|
85
84
|
ENABLE_GRAPHQL_DEPRECATION_CHECKER: bool = True
|
|
86
85
|
ENABLE_GRAPHQL_PROFILING_EXTENSION: bool = False
|
|
87
86
|
ENABLE_GRAPHQL_STATS_EXTENSION: bool = False
|
|
87
|
+
ENABLE_PROMETHEUS_METRICS_ENDPOINT: bool = False
|
|
88
88
|
VALIDATE_OUT_OF_SYNC_SUBSCRIPTIONS: bool = False
|
|
89
89
|
FILTER_BY_MODE: Literal["partial", "exact"] = "exact"
|
|
90
|
-
ENABLE_SUBSCRIPTION_MODEL_OPTIMIZATIONS: bool = (
|
|
91
|
-
True # True=ignore cache + optimized DB queries; False=use cache + unoptimized DB queries. Remove in #900
|
|
92
|
-
)
|
|
93
90
|
|
|
94
91
|
|
|
95
92
|
app_settings = AppSettings()
|
orchestrator/targets.py
CHANGED
|
@@ -2,15 +2,11 @@ from uuid import UUID
|
|
|
2
2
|
|
|
3
3
|
from orchestrator.domain.base import SubscriptionModel
|
|
4
4
|
from orchestrator.services.subscriptions import _generate_etag, build_domain_model, build_extended_domain_model
|
|
5
|
-
from orchestrator.utils.redis import from_redis
|
|
6
5
|
|
|
7
6
|
|
|
8
7
|
async def get_subscription_dict(subscription_id: UUID, inject_inuseby: bool = True) -> tuple[dict, str]:
|
|
9
8
|
"""Helper function to get subscription dict by uuid from db or cache."""
|
|
10
9
|
|
|
11
|
-
if cached_model := from_redis(subscription_id):
|
|
12
|
-
return cached_model # type: ignore
|
|
13
|
-
|
|
14
10
|
subscription_model = SubscriptionModel.from_subscription(subscription_id)
|
|
15
11
|
|
|
16
12
|
if not inject_inuseby:
|
orchestrator/utils/redis.py
CHANGED
|
@@ -10,11 +10,9 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
import functools
|
|
14
13
|
from collections.abc import AsyncGenerator
|
|
15
14
|
from contextlib import asynccontextmanager
|
|
16
|
-
from
|
|
17
|
-
from typing import Any, Callable
|
|
15
|
+
from typing import Any
|
|
18
16
|
from uuid import UUID
|
|
19
17
|
|
|
20
18
|
from anyio import CancelScope, get_cancelled_exc_class
|
|
@@ -22,9 +20,7 @@ from redis.asyncio import Redis as AIORedis
|
|
|
22
20
|
from redis.asyncio.client import Pipeline, PubSub
|
|
23
21
|
from structlog import get_logger
|
|
24
22
|
|
|
25
|
-
from orchestrator.services.subscriptions import _generate_etag
|
|
26
23
|
from orchestrator.settings import app_settings
|
|
27
|
-
from orchestrator.utils.json import PY_JSON_TYPES, json_dumps, json_loads
|
|
28
24
|
from orchestrator.utils.redis_client import (
|
|
29
25
|
create_redis_asyncio_client,
|
|
30
26
|
create_redis_client,
|
|
@@ -37,52 +33,6 @@ cache = create_redis_client(app_settings.CACHE_URI)
|
|
|
37
33
|
ONE_WEEK = 3600 * 24 * 7
|
|
38
34
|
|
|
39
35
|
|
|
40
|
-
def caching_models_enabled() -> bool:
|
|
41
|
-
return getenv("AIOCACHE_DISABLE", "0") == "0" and app_settings.CACHE_DOMAIN_MODELS
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def to_redis(subscription: dict[str, Any]) -> str | None:
|
|
45
|
-
if caching_models_enabled():
|
|
46
|
-
logger.info("Setting cache for subscription", subscription=subscription["subscription_id"])
|
|
47
|
-
etag = _generate_etag(subscription)
|
|
48
|
-
cache.set(f"orchestrator:domain:{subscription['subscription_id']}", json_dumps(subscription), ex=ONE_WEEK)
|
|
49
|
-
cache.set(f"orchestrator:domain:etag:{subscription['subscription_id']}", etag, ex=ONE_WEEK)
|
|
50
|
-
return etag
|
|
51
|
-
|
|
52
|
-
logger.warning("Caching disabled, not caching subscription", subscription=subscription["subscription_id"])
|
|
53
|
-
return None
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def from_redis(subscription_id: UUID) -> tuple[PY_JSON_TYPES, str] | None:
|
|
57
|
-
log = logger.bind(subscription_id=subscription_id)
|
|
58
|
-
|
|
59
|
-
if app_settings.ENABLE_SUBSCRIPTION_MODEL_OPTIMIZATIONS:
|
|
60
|
-
# TODO #900 remove toggle and remove usage of this function in get_subscription_dict
|
|
61
|
-
log.info("Using SubscriptionModel optimization, not loading subscription from redis cache")
|
|
62
|
-
return None
|
|
63
|
-
|
|
64
|
-
if caching_models_enabled():
|
|
65
|
-
log.debug("Try to retrieve subscription from cache")
|
|
66
|
-
obj = cache.get(f"orchestrator:domain:{subscription_id}")
|
|
67
|
-
etag = cache.get(f"orchestrator:domain:etag:{subscription_id}")
|
|
68
|
-
if obj and etag:
|
|
69
|
-
log.info("Retrieved subscription from cache")
|
|
70
|
-
return json_loads(obj), etag.decode("utf-8")
|
|
71
|
-
log.info("Subscription not found in cache")
|
|
72
|
-
return None
|
|
73
|
-
log.warning("Caching disabled, not loading subscription")
|
|
74
|
-
return None
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
def delete_from_redis(subscription_id: UUID) -> None:
|
|
78
|
-
if caching_models_enabled():
|
|
79
|
-
logger.info("Deleting subscription object from cache", subscription_id=subscription_id)
|
|
80
|
-
cache.delete(f"orchestrator:domain:{subscription_id}")
|
|
81
|
-
cache.delete(f"orchestrator:domain:etag:{subscription_id}")
|
|
82
|
-
else:
|
|
83
|
-
logger.warning("Caching disabled, not deleting subscription", subscription=subscription_id)
|
|
84
|
-
|
|
85
|
-
|
|
86
36
|
def default_get_subscription_id(data: Any) -> UUID:
|
|
87
37
|
if hasattr(data, "subscription_id"):
|
|
88
38
|
return data.subscription_id
|
|
@@ -91,22 +41,6 @@ def default_get_subscription_id(data: Any) -> UUID:
|
|
|
91
41
|
return data
|
|
92
42
|
|
|
93
43
|
|
|
94
|
-
def delete_subscription_from_redis(
|
|
95
|
-
extract_fn: Callable[..., UUID] = default_get_subscription_id,
|
|
96
|
-
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
97
|
-
def _delete_subscription_from_redis(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
98
|
-
@functools.wraps(func)
|
|
99
|
-
async def wrapper(*args: tuple, **kwargs: dict[str, Any]) -> Any:
|
|
100
|
-
data = await func(*args, **kwargs)
|
|
101
|
-
key = extract_fn(data)
|
|
102
|
-
delete_from_redis(key)
|
|
103
|
-
return data
|
|
104
|
-
|
|
105
|
-
return wrapper
|
|
106
|
-
|
|
107
|
-
return _delete_subscription_from_redis
|
|
108
|
-
|
|
109
|
-
|
|
110
44
|
async def delete_keys_matching_pattern(_cache: AIORedis, pattern: str, chunksize: int = 5000) -> int:
|
|
111
45
|
"""Delete all keys matching the given pattern.
|
|
112
46
|
|
|
@@ -13,11 +13,10 @@
|
|
|
13
13
|
from orchestrator.db import db
|
|
14
14
|
from orchestrator.forms import SubmitFormPage
|
|
15
15
|
from orchestrator.services import subscriptions
|
|
16
|
-
from orchestrator.settings import app_settings
|
|
17
16
|
from orchestrator.targets import Target
|
|
18
17
|
from orchestrator.utils.json import to_serializable
|
|
19
|
-
from orchestrator.workflow import StepList,
|
|
20
|
-
from orchestrator.workflows.steps import
|
|
18
|
+
from orchestrator.workflow import StepList, done, init, step, workflow
|
|
19
|
+
from orchestrator.workflows.steps import store_process_subscription
|
|
21
20
|
from orchestrator.workflows.utils import wrap_modify_initial_input_form
|
|
22
21
|
from pydantic_forms.types import FormGenerator, State, UUIDstr
|
|
23
22
|
from pydantic_forms.validators import LongText
|
|
@@ -54,11 +53,4 @@ def store_subscription_note(subscription_id: UUIDstr, note: str) -> State:
|
|
|
54
53
|
|
|
55
54
|
@workflow("Modify Note", initial_input_form=wrap_modify_initial_input_form(initial_input_form), target=Target.MODIFY)
|
|
56
55
|
def modify_note() -> StepList:
|
|
57
|
-
|
|
58
|
-
return (
|
|
59
|
-
init
|
|
60
|
-
>> store_process_subscription(Target.MODIFY)
|
|
61
|
-
>> store_subscription_note
|
|
62
|
-
>> push_subscriptions(cache_domain_models)
|
|
63
|
-
>> done
|
|
64
|
-
)
|
|
56
|
+
return init >> store_process_subscription(Target.MODIFY) >> store_subscription_note >> done
|