orchestrator-core 4.5.1a1__py3-none-any.whl → 4.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- orchestrator/__init__.py +3 -12
- orchestrator/agentic_app.py +48 -29
- orchestrator/api/api_v1/api.py +8 -6
- orchestrator/api/api_v1/endpoints/processes.py +2 -0
- orchestrator/api/api_v1/endpoints/search.py +26 -7
- orchestrator/cli/main.py +2 -2
- orchestrator/cli/search/__init__.py +32 -0
- orchestrator/devtools/populator.py +16 -0
- orchestrator/domain/base.py +2 -7
- orchestrator/domain/lifecycle.py +24 -7
- orchestrator/llm_settings.py +9 -3
- orchestrator/log_config.py +1 -0
- orchestrator/migrations/helpers.py +7 -1
- orchestrator/schemas/search.py +13 -0
- orchestrator/schemas/workflow.py +1 -0
- orchestrator/search/agent/__init__.py +15 -2
- orchestrator/search/agent/agent.py +30 -15
- orchestrator/search/agent/prompts.py +75 -37
- orchestrator/search/agent/state.py +13 -0
- orchestrator/search/agent/tools.py +148 -11
- orchestrator/search/core/__init__.py +12 -0
- orchestrator/search/core/embedding.py +13 -4
- orchestrator/search/core/exceptions.py +14 -0
- orchestrator/search/core/types.py +15 -0
- orchestrator/search/core/validators.py +13 -0
- orchestrator/search/docs/running_local_text_embedding_inference.md +1 -0
- orchestrator/search/filters/__init__.py +13 -0
- orchestrator/search/filters/base.py +84 -61
- orchestrator/search/filters/date_filters.py +13 -0
- orchestrator/search/filters/definitions.py +16 -2
- orchestrator/search/filters/ltree_filters.py +16 -3
- orchestrator/search/filters/numeric_filter.py +13 -0
- orchestrator/search/indexing/__init__.py +13 -0
- orchestrator/search/indexing/indexer.py +14 -3
- orchestrator/search/indexing/registry.py +13 -0
- orchestrator/search/indexing/tasks.py +17 -1
- orchestrator/search/indexing/traverse.py +17 -5
- orchestrator/search/llm_migration.py +108 -0
- orchestrator/search/retrieval/__init__.py +13 -0
- orchestrator/search/retrieval/builder.py +23 -8
- orchestrator/search/retrieval/engine.py +36 -34
- orchestrator/search/retrieval/exceptions.py +90 -0
- orchestrator/search/retrieval/pagination.py +13 -0
- orchestrator/search/retrieval/retrievers/__init__.py +26 -0
- orchestrator/search/retrieval/retrievers/base.py +123 -0
- orchestrator/search/retrieval/retrievers/fuzzy.py +94 -0
- orchestrator/search/retrieval/retrievers/hybrid.py +277 -0
- orchestrator/search/retrieval/retrievers/semantic.py +94 -0
- orchestrator/search/retrieval/retrievers/structured.py +39 -0
- orchestrator/search/retrieval/utils.py +21 -7
- orchestrator/search/retrieval/validation.py +54 -76
- orchestrator/search/schemas/__init__.py +12 -0
- orchestrator/search/schemas/parameters.py +13 -0
- orchestrator/search/schemas/results.py +15 -1
- orchestrator/services/processes.py +2 -1
- orchestrator/settings.py +7 -0
- orchestrator/utils/state.py +6 -1
- orchestrator/workflows/steps.py +16 -1
- {orchestrator_core-4.5.1a1.dist-info → orchestrator_core-4.5.2.dist-info}/METADATA +13 -11
- {orchestrator_core-4.5.1a1.dist-info → orchestrator_core-4.5.2.dist-info}/RECORD +66 -59
- orchestrator/migrations/versions/schema/2025-08-12_52b37b5b2714_search_index_model_for_llm_integration.py +0 -95
- orchestrator/search/retrieval/retriever.py +0 -447
- /orchestrator/cli/{index_llm.py → search/index_llm.py} +0 -0
- /orchestrator/cli/{resize_embedding.py → search/resize_embedding.py} +0 -0
- /orchestrator/cli/{search_explore.py → search/search_explore.py} +0 -0
- /orchestrator/cli/{speedtest.py → search/speedtest.py} +0 -0
- {orchestrator_core-4.5.1a1.dist-info → orchestrator_core-4.5.2.dist-info}/WHEEL +0 -0
- {orchestrator_core-4.5.1a1.dist-info → orchestrator_core-4.5.2.dist-info}/licenses/LICENSE +0 -0
orchestrator/__init__.py
CHANGED
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
|
|
14
14
|
"""This is the orchestrator workflow engine."""
|
|
15
15
|
|
|
16
|
-
__version__ = "4.5.
|
|
16
|
+
__version__ = "4.5.2"
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
from structlog import get_logger
|
|
@@ -25,18 +25,9 @@ logger.info("Starting the orchestrator", version=__version__)
|
|
|
25
25
|
from orchestrator.llm_settings import llm_settings
|
|
26
26
|
from orchestrator.settings import app_settings
|
|
27
27
|
|
|
28
|
-
if llm_settings.
|
|
29
|
-
try:
|
|
30
|
-
from importlib import import_module
|
|
28
|
+
if llm_settings.SEARCH_ENABLED or llm_settings.AGENT_ENABLED:
|
|
31
29
|
|
|
32
|
-
|
|
33
|
-
from orchestrator.agentic_app import AgenticOrchestratorCore as OrchestratorCore
|
|
34
|
-
|
|
35
|
-
except ImportError:
|
|
36
|
-
logger.error(
|
|
37
|
-
"Unable to import 'pydantic_ai' module, please install the orchestrator with llm dependencies. `pip install orchestrator-core[llm]",
|
|
38
|
-
)
|
|
39
|
-
exit(1)
|
|
30
|
+
from orchestrator.agentic_app import LLMOrchestratorCore as OrchestratorCore
|
|
40
31
|
else:
|
|
41
32
|
from orchestrator.app import OrchestratorCore # type: ignore[assignment]
|
|
42
33
|
|
orchestrator/agentic_app.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
2
|
"""The main application module.
|
|
3
3
|
|
|
4
|
-
This module contains the main `
|
|
5
|
-
provides the ability to run the CLI.
|
|
4
|
+
This module contains the main `LLMOrchestratorCore` class for the `FastAPI` backend and
|
|
5
|
+
provides the ability to run the CLI with LLM features (search and/or agent).
|
|
6
6
|
"""
|
|
7
7
|
# Copyright 2019-2025 SURF
|
|
8
8
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
@@ -16,65 +16,84 @@ provides the ability to run the CLI.
|
|
|
16
16
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
17
17
|
# See the License for the specific language governing permissions and
|
|
18
18
|
# limitations under the License.
|
|
19
|
-
from typing import Any
|
|
19
|
+
from typing import TYPE_CHECKING, Any
|
|
20
20
|
|
|
21
21
|
import typer
|
|
22
|
-
from pydantic_ai.models.openai import OpenAIModel
|
|
23
|
-
from pydantic_ai.toolsets import FunctionToolset
|
|
24
22
|
from structlog import get_logger
|
|
25
23
|
|
|
26
24
|
from orchestrator.app import OrchestratorCore
|
|
27
25
|
from orchestrator.cli.main import app as cli_app
|
|
28
26
|
from orchestrator.llm_settings import LLMSettings, llm_settings
|
|
29
27
|
|
|
28
|
+
if TYPE_CHECKING:
|
|
29
|
+
from pydantic_ai.models.openai import OpenAIModel
|
|
30
|
+
from pydantic_ai.toolsets import FunctionToolset
|
|
31
|
+
|
|
30
32
|
logger = get_logger(__name__)
|
|
31
33
|
|
|
32
34
|
|
|
33
|
-
class
|
|
35
|
+
class LLMOrchestratorCore(OrchestratorCore):
|
|
34
36
|
def __init__(
|
|
35
37
|
self,
|
|
36
38
|
*args: Any,
|
|
37
|
-
llm_model: OpenAIModel | str = "gpt-4o-mini",
|
|
38
39
|
llm_settings: LLMSettings = llm_settings,
|
|
39
|
-
|
|
40
|
+
agent_model: "OpenAIModel | str | None" = None,
|
|
41
|
+
agent_tools: "list[FunctionToolset] | None" = None,
|
|
40
42
|
**kwargs: Any,
|
|
41
43
|
) -> None:
|
|
42
|
-
"""Initialize the `
|
|
44
|
+
"""Initialize the `LLMOrchestratorCore` class.
|
|
43
45
|
|
|
44
|
-
This class
|
|
46
|
+
This class extends `OrchestratorCore` with LLM features (search and agent).
|
|
47
|
+
It runs the search migration and mounts the agent endpoint based on feature flags.
|
|
45
48
|
|
|
46
49
|
Args:
|
|
47
50
|
*args: All the normal arguments passed to the `OrchestratorCore` class.
|
|
48
|
-
llm_model: An OpenAI model class or string, not limited to OpenAI models (gpt-4o-mini etc)
|
|
49
51
|
llm_settings: A class of settings for the LLM
|
|
52
|
+
agent_model: Override the agent model (defaults to llm_settings.AGENT_MODEL)
|
|
50
53
|
agent_tools: A list of tools that can be used by the agent
|
|
51
54
|
**kwargs: Additional arguments passed to the `OrchestratorCore` class.
|
|
52
55
|
|
|
53
56
|
Returns:
|
|
54
57
|
None
|
|
55
58
|
"""
|
|
56
|
-
self.llm_model = llm_model
|
|
57
|
-
self.agent_tools = agent_tools
|
|
58
59
|
self.llm_settings = llm_settings
|
|
60
|
+
self.agent_model = agent_model or llm_settings.AGENT_MODEL
|
|
61
|
+
self.agent_tools = agent_tools
|
|
59
62
|
|
|
60
63
|
super().__init__(*args, **kwargs)
|
|
61
64
|
|
|
62
|
-
|
|
63
|
-
self.
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
65
|
+
# Run search migration if search or agent is enabled
|
|
66
|
+
if self.llm_settings.SEARCH_ENABLED or self.llm_settings.AGENT_ENABLED:
|
|
67
|
+
logger.info("Running search migration")
|
|
68
|
+
try:
|
|
69
|
+
from orchestrator.db import db
|
|
70
|
+
from orchestrator.search.llm_migration import run_migration
|
|
71
|
+
|
|
72
|
+
with db.engine.begin() as connection:
|
|
73
|
+
run_migration(connection)
|
|
74
|
+
except ImportError as e:
|
|
75
|
+
logger.error(
|
|
76
|
+
"Unable to run search migration. Please install search dependencies: "
|
|
77
|
+
"`pip install orchestrator-core[search]`",
|
|
78
|
+
error=str(e),
|
|
79
|
+
)
|
|
80
|
+
raise
|
|
81
|
+
|
|
82
|
+
# Mount agent endpoint if agent is enabled
|
|
83
|
+
if self.llm_settings.AGENT_ENABLED:
|
|
84
|
+
logger.info("Initializing agent features", model=self.agent_model)
|
|
85
|
+
try:
|
|
86
|
+
from orchestrator.search.agent import build_agent_router
|
|
87
|
+
|
|
88
|
+
agent_app = build_agent_router(self.agent_model, self.agent_tools)
|
|
89
|
+
self.mount("/agent", agent_app)
|
|
90
|
+
except ImportError as e:
|
|
91
|
+
logger.error(
|
|
92
|
+
"Unable to initialize agent features. Please install agent dependencies: "
|
|
93
|
+
"`pip install orchestrator-core[agent]`",
|
|
94
|
+
error=str(e),
|
|
95
|
+
)
|
|
96
|
+
raise
|
|
78
97
|
|
|
79
98
|
|
|
80
99
|
main_typer_app = typer.Typer()
|
orchestrator/api/api_v1/api.py
CHANGED
|
@@ -76,20 +76,22 @@ api_router.include_router(user.router, prefix="/user", tags=["Core", "User"], de
|
|
|
76
76
|
api_router.include_router(
|
|
77
77
|
settings.router, prefix="/settings", tags=["Core", "Settings"], dependencies=[Depends(authorize)]
|
|
78
78
|
)
|
|
79
|
-
api_router.include_router(
|
|
79
|
+
api_router.include_router(
|
|
80
|
+
settings.ws_router, prefix="/settings", tags=["Core", "Settings"]
|
|
81
|
+
) # Auth on the websocket is handled in the Websocket Manager
|
|
80
82
|
api_router.include_router(health.router, prefix="/health", tags=["Core"])
|
|
81
83
|
api_router.include_router(
|
|
82
84
|
translations.router,
|
|
83
85
|
prefix="/translations",
|
|
84
86
|
tags=["Core", "Translations"],
|
|
85
87
|
)
|
|
86
|
-
api_router.include_router(
|
|
88
|
+
api_router.include_router(
|
|
89
|
+
ws.router, prefix="/ws", tags=["Core", "Events"]
|
|
90
|
+
) # Auth on the websocket is handled in the Websocket Manager
|
|
87
91
|
|
|
88
|
-
if llm_settings.
|
|
92
|
+
if llm_settings.SEARCH_ENABLED:
|
|
89
93
|
from orchestrator.api.api_v1.endpoints import search
|
|
90
94
|
|
|
91
95
|
api_router.include_router(
|
|
92
|
-
search.router,
|
|
93
|
-
prefix="/search",
|
|
94
|
-
tags=["Core", "Search"],
|
|
96
|
+
search.router, prefix="/search", tags=["Core", "Search"], dependencies=[Depends(authorize)]
|
|
95
97
|
)
|
|
@@ -244,6 +244,8 @@ def continue_awaiting_process_endpoint(
|
|
|
244
244
|
continue_awaiting_process(process, token=token, input_data=json_data, broadcast_func=broadcast_func)
|
|
245
245
|
except AssertionError as e:
|
|
246
246
|
raise_status(HTTPStatus.NOT_FOUND, str(e))
|
|
247
|
+
except ValueError as e:
|
|
248
|
+
raise_status(HTTPStatus.BAD_REQUEST, str(e))
|
|
247
249
|
|
|
248
250
|
|
|
249
251
|
@router.post(
|
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF, GÉANT.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
1
14
|
from typing import Any, Literal, overload
|
|
2
15
|
|
|
3
16
|
from fastapi import APIRouter, HTTPException, Query, status
|
|
@@ -11,6 +24,7 @@ from orchestrator.db import (
|
|
|
11
24
|
db,
|
|
12
25
|
)
|
|
13
26
|
from orchestrator.domain.base import SubscriptionModel
|
|
27
|
+
from orchestrator.domain.context_cache import cache_subscription_models
|
|
14
28
|
from orchestrator.schemas.search import (
|
|
15
29
|
PageInfoSchema,
|
|
16
30
|
PathsResponse,
|
|
@@ -179,17 +193,22 @@ async def search_subscriptions(
|
|
|
179
193
|
page_info = PageInfoSchema(has_next_page=has_next_page, next_page_cursor=next_page_cursor)
|
|
180
194
|
|
|
181
195
|
search_info_map = {res.entity_id: res for res in search_response.results}
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
196
|
+
|
|
197
|
+
with cache_subscription_models():
|
|
198
|
+
subscriptions_data = {
|
|
199
|
+
sub_id: SubscriptionModel.from_subscription(sub_id).model_dump(exclude_unset=False)
|
|
200
|
+
for sub_id in search_info_map
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
results_data = [
|
|
204
|
+
SubscriptionSearchResult(
|
|
205
|
+
subscription=format_special_types(subscriptions_data[sub_id]),
|
|
188
206
|
score=search_info.score,
|
|
189
207
|
perfect_match=search_info.perfect_match,
|
|
190
208
|
matching_field=search_info.matching_field,
|
|
191
209
|
)
|
|
192
|
-
|
|
210
|
+
for sub_id, search_info in search_info_map.items()
|
|
211
|
+
]
|
|
193
212
|
|
|
194
213
|
return SearchResultsSchema(data=results_data, page_info=page_info, search_metadata=search_response.metadata)
|
|
195
214
|
|
orchestrator/cli/main.py
CHANGED
|
@@ -25,8 +25,8 @@ app.add_typer(scheduler.app, name="scheduler", help="Access all the scheduler fu
|
|
|
25
25
|
app.add_typer(database.app, name="db", help="Interact with the application database")
|
|
26
26
|
app.add_typer(generate.app, name="generate", help="Generate products, workflows and other artifacts")
|
|
27
27
|
|
|
28
|
-
if llm_settings.
|
|
29
|
-
from orchestrator.cli import index_llm, resize_embedding, search_explore, speedtest
|
|
28
|
+
if llm_settings.SEARCH_ENABLED:
|
|
29
|
+
from orchestrator.cli.search import index_llm, resize_embedding, search_explore, speedtest
|
|
30
30
|
|
|
31
31
|
app.add_typer(index_llm.app, name="index", help="(Re-)Index the search table.")
|
|
32
32
|
app.add_typer(search_explore.app, name="search", help="Try out different search types.")
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# Copyright 2019-2020 SURF.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
14
|
+
import typer
|
|
15
|
+
|
|
16
|
+
from orchestrator.cli.search import index_llm, resize_embedding, search_explore, speedtest
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def register_commands(app: typer.Typer) -> None:
|
|
20
|
+
"""Register all LLM/search related commands to the main app."""
|
|
21
|
+
app.add_typer(index_llm.app, name="index", help="(Re-)Index the search table.")
|
|
22
|
+
app.add_typer(search_explore.app, name="search", help="Try out different search types.")
|
|
23
|
+
app.add_typer(
|
|
24
|
+
resize_embedding.app,
|
|
25
|
+
name="embedding",
|
|
26
|
+
help="Resize the vector dimension of the embedding column in the search table.",
|
|
27
|
+
)
|
|
28
|
+
app.add_typer(
|
|
29
|
+
speedtest.app,
|
|
30
|
+
name="speedtest",
|
|
31
|
+
help="Search performance testing and analysis.",
|
|
32
|
+
)
|
|
@@ -371,6 +371,22 @@ class Populator:
|
|
|
371
371
|
self.log.info("Started modify workflow")
|
|
372
372
|
return self._start_workflow(workflow_name, subscription_id=subscription_id, **kwargs)
|
|
373
373
|
|
|
374
|
+
def start_reconcile_workflow(self, workflow_name: str, subscription_id: UUIDstr | UUID, **kwargs: Any) -> UUIDstr:
|
|
375
|
+
"""Start a reconcile workflow for the provided name and subscription_id.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
workflow_name: workflow name
|
|
379
|
+
subscription_id: uuid of the subscription you want to modify
|
|
380
|
+
kwargs: values to be used as form input
|
|
381
|
+
|
|
382
|
+
Returns: the process_id of the workflow process
|
|
383
|
+
|
|
384
|
+
"""
|
|
385
|
+
subscription_id = str(subscription_id)
|
|
386
|
+
self.log = self.log.bind(subscription_id=subscription_id)
|
|
387
|
+
self.log.info("Started reconcile workflow")
|
|
388
|
+
return self._start_workflow(workflow_name, subscription_id=subscription_id, **kwargs)
|
|
389
|
+
|
|
374
390
|
def start_verify_workflow(self, workflow_name: str, subscription_id: UUIDstr | UUID) -> UUIDstr:
|
|
375
391
|
subscription_id = str(subscription_id)
|
|
376
392
|
self.log = self.log.bind(subscription_id=subscription_id)
|
orchestrator/domain/base.py
CHANGED
|
@@ -614,9 +614,7 @@ class ProductBlockModel(DomainModel):
|
|
|
614
614
|
product_blocks_in_model = cls._get_depends_on_product_block_types()
|
|
615
615
|
product_blocks_types_in_model = get_depends_on_product_block_type_list(product_blocks_in_model)
|
|
616
616
|
|
|
617
|
-
product_blocks_in_model = set(
|
|
618
|
-
flatten(map(attrgetter("__names__"), product_blocks_types_in_model))
|
|
619
|
-
) # type: ignore
|
|
617
|
+
product_blocks_in_model = set(flatten(map(attrgetter("__names__"), product_blocks_types_in_model))) # type: ignore
|
|
620
618
|
|
|
621
619
|
missing_product_blocks_in_db = product_blocks_in_model - product_blocks_in_db # type: ignore
|
|
622
620
|
missing_product_blocks_in_model = product_blocks_in_db - product_blocks_in_model # type: ignore
|
|
@@ -1084,9 +1082,7 @@ class SubscriptionModel(DomainModel):
|
|
|
1084
1082
|
product_blocks_in_model = cls._get_depends_on_product_block_types()
|
|
1085
1083
|
product_blocks_types_in_model = get_depends_on_product_block_type_list(product_blocks_in_model)
|
|
1086
1084
|
|
|
1087
|
-
product_blocks_in_model = set(
|
|
1088
|
-
flatten(map(attrgetter("__names__"), product_blocks_types_in_model))
|
|
1089
|
-
) # type: ignore
|
|
1085
|
+
product_blocks_in_model = set(flatten(map(attrgetter("__names__"), product_blocks_types_in_model))) # type: ignore
|
|
1090
1086
|
|
|
1091
1087
|
missing_product_blocks_in_db = product_blocks_in_model - product_blocks_in_db # type: ignore
|
|
1092
1088
|
missing_product_blocks_in_model = product_blocks_in_db - product_blocks_in_model # type: ignore
|
|
@@ -1294,7 +1290,6 @@ class SubscriptionModel(DomainModel):
|
|
|
1294
1290
|
# Some common functions shared by from_other_product and from_subscription
|
|
1295
1291
|
@classmethod
|
|
1296
1292
|
def _get_subscription(cls: type[S], subscription_id: UUID | UUIDstr) -> SubscriptionTable | None:
|
|
1297
|
-
|
|
1298
1293
|
if not isinstance(subscription_id, UUID | UUIDstr):
|
|
1299
1294
|
raise TypeError(f"subscription_id is of type {type(subscription_id)} instead of UUID | UUIDstr")
|
|
1300
1295
|
|
orchestrator/domain/lifecycle.py
CHANGED
|
@@ -16,11 +16,16 @@ from typing import TYPE_CHECKING, TypeVar
|
|
|
16
16
|
import strawberry
|
|
17
17
|
import structlog
|
|
18
18
|
|
|
19
|
+
from orchestrator.settings import LifecycleValidationMode, app_settings
|
|
19
20
|
from orchestrator.types import SubscriptionLifecycle
|
|
20
21
|
from pydantic_forms.types import strEnum
|
|
21
22
|
|
|
22
23
|
if TYPE_CHECKING:
|
|
23
|
-
from orchestrator.domain.base import DomainModel
|
|
24
|
+
from orchestrator.domain.base import DomainModel, SubscriptionModel
|
|
25
|
+
else:
|
|
26
|
+
SubscriptionModel = None
|
|
27
|
+
DomainModel = None
|
|
28
|
+
T = TypeVar("T", bound=SubscriptionModel)
|
|
24
29
|
|
|
25
30
|
logger = structlog.get_logger(__name__)
|
|
26
31
|
|
|
@@ -71,9 +76,21 @@ def validate_lifecycle_status(
|
|
|
71
76
|
)
|
|
72
77
|
|
|
73
78
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
79
|
+
def validate_subscription_model_product_type(
|
|
80
|
+
subscription: SubscriptionModel,
|
|
81
|
+
validation_mode: LifecycleValidationMode = app_settings.LIFECYCLE_VALIDATION_MODE,
|
|
82
|
+
) -> None:
|
|
83
|
+
"""Validate that a subscription model has been instantiated with the correct product type class for its lifecycle status."""
|
|
84
|
+
|
|
85
|
+
actual_class = subscription.__class__
|
|
86
|
+
expected_class = lookup_specialized_type(actual_class, subscription.status)
|
|
87
|
+
|
|
88
|
+
if actual_class != expected_class:
|
|
89
|
+
msg = f"Subscription of type {actual_class} should use {expected_class} for lifecycle status '{subscription.status}'"
|
|
90
|
+
if validation_mode == LifecycleValidationMode.STRICT:
|
|
91
|
+
logger.error(msg)
|
|
92
|
+
raise ValueError(msg)
|
|
93
|
+
if validation_mode == LifecycleValidationMode.LOOSE:
|
|
94
|
+
logger.warning(msg)
|
|
95
|
+
elif validation_mode == LifecycleValidationMode.IGNORED:
|
|
96
|
+
pass
|
orchestrator/llm_settings.py
CHANGED
|
@@ -18,7 +18,10 @@ logger = get_logger(__name__)
|
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
class LLMSettings(BaseSettings):
|
|
21
|
-
|
|
21
|
+
# Feature flags for LLM functionality
|
|
22
|
+
SEARCH_ENABLED: bool = False # Enable search/indexing with embeddings
|
|
23
|
+
AGENT_ENABLED: bool = False # Enable agentic functionality
|
|
24
|
+
|
|
22
25
|
# Pydantic-ai Agent settings
|
|
23
26
|
AGENT_MODEL: str = "gpt-4o-mini" # See pydantic-ai docs for supported models.
|
|
24
27
|
AGENT_MODEL_VERSION: str = "2025-01-01-preview"
|
|
@@ -30,16 +33,19 @@ class LLMSettings(BaseSettings):
|
|
|
30
33
|
0.1, description="Safety margin as a percentage (e.g., 0.1 for 10%) for token budgeting.", ge=0, le=1
|
|
31
34
|
)
|
|
32
35
|
|
|
33
|
-
# The following settings are only needed for local models.
|
|
36
|
+
# The following settings are only needed for local models or system constraints.
|
|
34
37
|
# By default, they are set conservative assuming a small model like All-MiniLM-L6-V2.
|
|
35
38
|
OPENAI_BASE_URL: str | None = None
|
|
36
39
|
EMBEDDING_FALLBACK_MAX_TOKENS: int | None = 512
|
|
37
|
-
EMBEDDING_MAX_BATCH_SIZE: int | None =
|
|
40
|
+
EMBEDDING_MAX_BATCH_SIZE: int | None = None
|
|
38
41
|
|
|
39
42
|
# General LiteLLM settings
|
|
40
43
|
LLM_MAX_RETRIES: int = 3
|
|
41
44
|
LLM_TIMEOUT: int = 30
|
|
42
45
|
|
|
46
|
+
# Toggle creation of extensions
|
|
47
|
+
LLM_FORCE_EXTENTION_MIGRATION: bool = False
|
|
48
|
+
|
|
43
49
|
@field_validator("EMBEDDING_MODEL")
|
|
44
50
|
def validate_embedding_model_format(cls, v: str) -> str:
|
|
45
51
|
"""Validate that embedding model is in 'vendor/model' format."""
|
orchestrator/log_config.py
CHANGED
|
@@ -155,7 +155,7 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
|
|
|
155
155
|
conn: DB connection as available in migration main file.
|
|
156
156
|
workflow: Dict with data for a new workflow.
|
|
157
157
|
name: Name of the workflow.
|
|
158
|
-
target: Target of the workflow ("CREATE", "MODIFY", "TERMINATE", "SYSTEM")
|
|
158
|
+
target: Target of the workflow ("CREATE", "MODIFY", "RECONCILE", "TERMINATE", "SYSTEM")
|
|
159
159
|
description: Description of the workflow.
|
|
160
160
|
product_type: Product type to add the workflow to.
|
|
161
161
|
|
|
@@ -166,12 +166,16 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
|
|
|
166
166
|
"is_task": False,
|
|
167
167
|
"description": "workflow description",
|
|
168
168
|
"product_type": "product_type",
|
|
169
|
+
"product_tag": "product_tag",
|
|
169
170
|
}
|
|
170
171
|
>>> create_workflow(conn, workflow)
|
|
171
172
|
"""
|
|
172
173
|
if not workflow.get("is_task", False):
|
|
173
174
|
workflow["is_task"] = False
|
|
174
175
|
|
|
176
|
+
if not workflow.get("product_tag"):
|
|
177
|
+
workflow["product_tag"] = None
|
|
178
|
+
|
|
175
179
|
if has_table_column(table_name="workflows", column_name="is_task", conn=conn):
|
|
176
180
|
query = """
|
|
177
181
|
WITH new_workflow AS (
|
|
@@ -186,6 +190,7 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
|
|
|
186
190
|
FROM products AS p
|
|
187
191
|
CROSS JOIN new_workflow AS nw
|
|
188
192
|
WHERE p.product_type = :product_type
|
|
193
|
+
AND (:product_tag IS NULL OR p.tag = :product_tag)
|
|
189
194
|
ON CONFLICT DO NOTHING
|
|
190
195
|
"""
|
|
191
196
|
else:
|
|
@@ -203,6 +208,7 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
|
|
|
203
208
|
FROM products AS p
|
|
204
209
|
CROSS JOIN new_workflow AS nw
|
|
205
210
|
WHERE p.product_type = :product_type
|
|
211
|
+
AND (:product_tag IS NULL OR p.tag = :product_tag)
|
|
206
212
|
ON CONFLICT DO NOTHING
|
|
207
213
|
"""
|
|
208
214
|
|
orchestrator/schemas/search.py
CHANGED
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF, GÉANT.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
1
14
|
from datetime import datetime
|
|
2
15
|
from typing import Any, Generic, TypeVar
|
|
3
16
|
from uuid import UUID
|
orchestrator/schemas/workflow.py
CHANGED
|
@@ -60,6 +60,7 @@ class SubscriptionWorkflowListsSchema(OrchestratorBaseModel):
|
|
|
60
60
|
modify: list[WorkflowListItemSchema]
|
|
61
61
|
terminate: list[WorkflowListItemSchema]
|
|
62
62
|
system: list[WorkflowListItemSchema]
|
|
63
|
+
reconcile: list[WorkflowListItemSchema]
|
|
63
64
|
validate_: list[WorkflowListItemSchema] = Field(default_factory=list, alias="validate")
|
|
64
65
|
|
|
65
66
|
|
|
@@ -1,8 +1,21 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF, GÉANT.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
1
14
|
# This module requires: pydantic-ai==0.7.0, ag-ui-protocol>=0.1.8
|
|
2
15
|
|
|
3
16
|
|
|
4
|
-
from orchestrator.search.agent.agent import
|
|
17
|
+
from orchestrator.search.agent.agent import build_agent_router
|
|
5
18
|
|
|
6
19
|
__all__ = [
|
|
7
|
-
"
|
|
20
|
+
"build_agent_router",
|
|
8
21
|
]
|
|
@@ -1,13 +1,26 @@
|
|
|
1
|
+
# Copyright 2019-2025 SURF, GÉANT.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
|
|
1
14
|
from typing import Any
|
|
2
15
|
|
|
3
16
|
import structlog
|
|
4
|
-
from fastapi import
|
|
5
|
-
from pydantic_ai.ag_ui import StateDeps
|
|
17
|
+
from fastapi import APIRouter, HTTPException, Request
|
|
18
|
+
from pydantic_ai.ag_ui import StateDeps, handle_ag_ui_request
|
|
6
19
|
from pydantic_ai.agent import Agent
|
|
7
20
|
from pydantic_ai.models.openai import OpenAIModel
|
|
8
21
|
from pydantic_ai.settings import ModelSettings
|
|
9
22
|
from pydantic_ai.toolsets import FunctionToolset
|
|
10
|
-
from starlette.
|
|
23
|
+
from starlette.responses import Response
|
|
11
24
|
|
|
12
25
|
from orchestrator.search.agent.prompts import get_base_instructions, get_dynamic_instructions
|
|
13
26
|
from orchestrator.search.agent.state import SearchState
|
|
@@ -16,17 +29,9 @@ from orchestrator.search.agent.tools import search_toolset
|
|
|
16
29
|
logger = structlog.get_logger(__name__)
|
|
17
30
|
|
|
18
31
|
|
|
19
|
-
def
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS", "HEAD"])
|
|
23
|
-
async def _disabled(path: str) -> None:
|
|
24
|
-
raise HTTPException(status_code=503, detail=f"Agent disabled: {reason}")
|
|
25
|
-
|
|
26
|
-
return app
|
|
27
|
-
|
|
32
|
+
def build_agent_router(model: str | OpenAIModel, toolsets: list[FunctionToolset[Any]] | None = None) -> APIRouter:
|
|
33
|
+
router = APIRouter()
|
|
28
34
|
|
|
29
|
-
def build_agent_app(model: str | OpenAIModel, toolsets: list[FunctionToolset[Any]] | None = None) -> ASGIApp:
|
|
30
35
|
try:
|
|
31
36
|
toolsets = toolsets + [search_toolset] if toolsets else [search_toolset]
|
|
32
37
|
|
|
@@ -41,7 +46,17 @@ def build_agent_app(model: str | OpenAIModel, toolsets: list[FunctionToolset[Any
|
|
|
41
46
|
agent.instructions(get_base_instructions)
|
|
42
47
|
agent.instructions(get_dynamic_instructions)
|
|
43
48
|
|
|
44
|
-
|
|
49
|
+
@router.post("/")
|
|
50
|
+
async def agent_endpoint(request: Request) -> Response:
|
|
51
|
+
return await handle_ag_ui_request(agent, request, deps=StateDeps(SearchState()))
|
|
52
|
+
|
|
53
|
+
return router
|
|
45
54
|
except Exception as e:
|
|
46
55
|
logger.error("Agent init failed; serving disabled stub.", error=str(e))
|
|
47
|
-
|
|
56
|
+
error_msg = f"Agent disabled: {str(e)}"
|
|
57
|
+
|
|
58
|
+
@router.api_route("/{path:path}", methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS", "HEAD"])
|
|
59
|
+
async def _disabled(path: str) -> None:
|
|
60
|
+
raise HTTPException(status_code=503, detail=error_msg)
|
|
61
|
+
|
|
62
|
+
return router
|