lfx-nightly 0.2.1.dev7__py3-none-any.whl → 0.3.0.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/__main__.py +137 -6
- lfx/_assets/component_index.json +1 -1
- lfx/base/agents/agent.py +9 -5
- lfx/base/agents/altk_base_agent.py +5 -3
- lfx/base/agents/events.py +1 -1
- lfx/base/models/unified_models.py +1 -1
- lfx/base/models/watsonx_constants.py +10 -7
- lfx/base/prompts/api_utils.py +40 -5
- lfx/cli/__init__.py +10 -2
- lfx/cli/script_loader.py +5 -4
- lfx/cli/validation.py +6 -3
- lfx/components/datastax/astradb_assistant_manager.py +4 -2
- lfx/components/docling/docling_remote.py +1 -0
- lfx/components/langchain_utilities/ibm_granite_handler.py +211 -0
- lfx/components/langchain_utilities/tool_calling.py +24 -1
- lfx/components/llm_operations/lambda_filter.py +182 -97
- lfx/components/models_and_agents/mcp_component.py +38 -1
- lfx/components/models_and_agents/prompt.py +105 -18
- lfx/components/ollama/ollama_embeddings.py +109 -28
- lfx/components/processing/text_operations.py +580 -0
- lfx/custom/custom_component/component.py +65 -10
- lfx/events/observability/__init__.py +0 -0
- lfx/events/observability/lifecycle_events.py +111 -0
- lfx/field_typing/__init__.py +57 -58
- lfx/graph/graph/base.py +36 -0
- lfx/graph/utils.py +45 -12
- lfx/graph/vertex/base.py +71 -22
- lfx/graph/vertex/vertex_types.py +0 -5
- lfx/inputs/input_mixin.py +1 -0
- lfx/inputs/inputs.py +5 -0
- lfx/interface/components.py +24 -7
- lfx/run/base.py +47 -77
- lfx/schema/__init__.py +50 -0
- lfx/schema/message.py +85 -8
- lfx/schema/workflow.py +171 -0
- lfx/services/deps.py +12 -0
- lfx/services/interfaces.py +43 -1
- lfx/services/schema.py +1 -0
- lfx/services/settings/auth.py +95 -4
- lfx/services/settings/base.py +4 -0
- lfx/services/settings/utils.py +82 -0
- lfx/services/transaction/__init__.py +5 -0
- lfx/services/transaction/service.py +35 -0
- lfx/tests/unit/components/__init__.py +0 -0
- lfx/utils/constants.py +1 -0
- lfx/utils/mustache_security.py +79 -0
- lfx/utils/validate_cloud.py +67 -0
- {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/METADATA +3 -1
- {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/RECORD +51 -42
- {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/entry_points.txt +0 -0
lfx/graph/vertex/base.py
CHANGED
|
@@ -8,6 +8,9 @@ from collections.abc import AsyncIterator, Callable, Iterator, Mapping
|
|
|
8
8
|
from enum import Enum
|
|
9
9
|
from typing import TYPE_CHECKING, Any
|
|
10
10
|
|
|
11
|
+
from ag_ui.core import StepFinishedEvent, StepStartedEvent
|
|
12
|
+
|
|
13
|
+
from lfx.events.observability.lifecycle_events import observable
|
|
11
14
|
from lfx.exceptions.component import ComponentBuildError
|
|
12
15
|
from lfx.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData
|
|
13
16
|
from lfx.graph.utils import UnbuiltObject, UnbuiltResult, log_transaction
|
|
@@ -105,7 +108,6 @@ class Vertex:
|
|
|
105
108
|
self.use_result = False
|
|
106
109
|
self.build_times: list[float] = []
|
|
107
110
|
self.state = VertexStates.ACTIVE
|
|
108
|
-
self.log_transaction_tasks: set[asyncio.Task] = set()
|
|
109
111
|
self.output_names: list[str] = [
|
|
110
112
|
output["name"] for output in self.outputs if isinstance(output, dict) and "name" in output
|
|
111
113
|
]
|
|
@@ -180,6 +182,7 @@ class Vertex:
|
|
|
180
182
|
|
|
181
183
|
if isinstance(self.built_result, UnbuiltResult):
|
|
182
184
|
return {}
|
|
185
|
+
|
|
183
186
|
return self.built_result if isinstance(self.built_result, dict) else {"result": self.built_result}
|
|
184
187
|
|
|
185
188
|
def set_artifacts(self) -> None:
|
|
@@ -381,6 +384,7 @@ class Vertex:
|
|
|
381
384
|
vertex=self,
|
|
382
385
|
)
|
|
383
386
|
|
|
387
|
+
@observable
|
|
384
388
|
async def _build(
|
|
385
389
|
self,
|
|
386
390
|
fallback_to_env_vars,
|
|
@@ -390,7 +394,6 @@ class Vertex:
|
|
|
390
394
|
"""Initiate the build process."""
|
|
391
395
|
await logger.adebug(f"Building {self.display_name}")
|
|
392
396
|
await self._build_each_vertex_in_params_dict()
|
|
393
|
-
|
|
394
397
|
if self.base_type is None:
|
|
395
398
|
msg = f"Base type for vertex {self.display_name} not found"
|
|
396
399
|
raise ValueError(msg)
|
|
@@ -531,11 +534,12 @@ class Vertex:
|
|
|
531
534
|
self,
|
|
532
535
|
flow_id: str | UUID,
|
|
533
536
|
source: Vertex,
|
|
534
|
-
status,
|
|
537
|
+
status: str,
|
|
535
538
|
target: Vertex | None = None,
|
|
536
|
-
error=None,
|
|
539
|
+
error: str | Exception | None = None,
|
|
540
|
+
outputs: dict[str, Any] | None = None,
|
|
537
541
|
) -> None:
|
|
538
|
-
"""Log a transaction asynchronously
|
|
542
|
+
"""Log a transaction asynchronously.
|
|
539
543
|
|
|
540
544
|
Args:
|
|
541
545
|
flow_id: The ID of the flow
|
|
@@ -543,20 +547,16 @@ class Vertex:
|
|
|
543
547
|
status: Transaction status
|
|
544
548
|
target: Optional target vertex
|
|
545
549
|
error: Optional error information
|
|
550
|
+
outputs: Optional explicit outputs dict (component execution results)
|
|
546
551
|
"""
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
# Create and track new task
|
|
553
|
-
task = asyncio.create_task(log_transaction(flow_id, source, status, target, error))
|
|
554
|
-
self.log_transaction_tasks.add(task)
|
|
555
|
-
task.add_done_callback(self.log_transaction_tasks.discard)
|
|
552
|
+
try:
|
|
553
|
+
await log_transaction(flow_id, source, status, target, error, outputs)
|
|
554
|
+
except Exception as exc: # noqa: BLE001
|
|
555
|
+
logger.debug(f"Error logging transaction: {exc!s}")
|
|
556
556
|
|
|
557
557
|
async def _get_result(
|
|
558
558
|
self,
|
|
559
|
-
requester: Vertex,
|
|
559
|
+
requester: Vertex, # noqa: ARG002
|
|
560
560
|
target_handle_name: str | None = None, # noqa: ARG002
|
|
561
561
|
) -> Any:
|
|
562
562
|
"""Retrieves the result of the built component.
|
|
@@ -566,17 +566,11 @@ class Vertex:
|
|
|
566
566
|
Returns:
|
|
567
567
|
The built result if use_result is True, else the built object.
|
|
568
568
|
"""
|
|
569
|
-
flow_id = self.graph.flow_id
|
|
570
569
|
if not self.built:
|
|
571
|
-
if flow_id:
|
|
572
|
-
await self._log_transaction_async(str(flow_id), source=self, target=requester, status="error")
|
|
573
570
|
msg = f"Component {self.display_name} has not been built yet"
|
|
574
571
|
raise ValueError(msg)
|
|
575
572
|
|
|
576
|
-
|
|
577
|
-
if flow_id:
|
|
578
|
-
await self._log_transaction_async(str(flow_id), source=self, target=requester, status="success")
|
|
579
|
-
return result
|
|
573
|
+
return self.built_result if self.use_result else self.built_object
|
|
580
574
|
|
|
581
575
|
async def _build_vertex_and_update_params(self, key, vertex: Vertex) -> None:
|
|
582
576
|
"""Builds a given vertex and updates the params dictionary accordingly."""
|
|
@@ -657,6 +651,12 @@ class Vertex:
|
|
|
657
651
|
except Exception as exc:
|
|
658
652
|
tb = traceback.format_exc()
|
|
659
653
|
await logger.aexception(exc)
|
|
654
|
+
# Log transaction error
|
|
655
|
+
flow_id = self.graph.flow_id
|
|
656
|
+
if flow_id:
|
|
657
|
+
await self._log_transaction_async(
|
|
658
|
+
str(flow_id), source=self, target=None, status="error", error=str(exc)
|
|
659
|
+
)
|
|
660
660
|
msg = f"Error building Component {self.display_name}: \n\n{exc}"
|
|
661
661
|
raise ComponentBuildError(msg, tb) from exc
|
|
662
662
|
|
|
@@ -772,6 +772,19 @@ class Vertex:
|
|
|
772
772
|
|
|
773
773
|
self.finalize_build()
|
|
774
774
|
|
|
775
|
+
# Log transaction after successful build
|
|
776
|
+
flow_id = self.graph.flow_id
|
|
777
|
+
if flow_id:
|
|
778
|
+
# Extract outputs from outputs_logs for transaction logging
|
|
779
|
+
outputs_dict = None
|
|
780
|
+
if self.outputs_logs:
|
|
781
|
+
outputs_dict = {
|
|
782
|
+
k: v.model_dump() if hasattr(v, "model_dump") else v for k, v in self.outputs_logs.items()
|
|
783
|
+
}
|
|
784
|
+
await self._log_transaction_async(
|
|
785
|
+
str(flow_id), source=self, target=None, status="success", outputs=outputs_dict
|
|
786
|
+
)
|
|
787
|
+
|
|
775
788
|
return await self.get_requester_result(requester)
|
|
776
789
|
|
|
777
790
|
async def get_requester_result(self, requester: Vertex | None):
|
|
@@ -824,3 +837,39 @@ class Vertex:
|
|
|
824
837
|
return
|
|
825
838
|
# Apply the function to each output
|
|
826
839
|
[func(output) for output in self.custom_component.get_outputs_map().values()]
|
|
840
|
+
|
|
841
|
+
# AGUI/AG UI Event Streaming Callbacks/Methods - (Optional, see Observable decorator)
|
|
842
|
+
def raw_event_metrics(self, optional_fields: dict | None) -> dict:
|
|
843
|
+
"""This method is used to get the metrics of the vertex by the Observable decorator.
|
|
844
|
+
|
|
845
|
+
If the vertex has a get_metrics method, it will be called, and the metrics will be captured
|
|
846
|
+
to stream back to the user in an AGUI compliant format.
|
|
847
|
+
Additional fields/metrics to be captured can be modified in this method, or in the callback methods,
|
|
848
|
+
which are before_callback_event and after_callback_event before returning the AGUI event.
|
|
849
|
+
"""
|
|
850
|
+
if optional_fields is None:
|
|
851
|
+
optional_fields = {}
|
|
852
|
+
import time
|
|
853
|
+
|
|
854
|
+
return {"timestamp": time.time(), **optional_fields}
|
|
855
|
+
|
|
856
|
+
def before_callback_event(self, *args, **kwargs) -> StepStartedEvent: # noqa: ARG002
|
|
857
|
+
"""Should be a AGUI compatible event.
|
|
858
|
+
|
|
859
|
+
VERTEX class generates a StepStartedEvent event.
|
|
860
|
+
"""
|
|
861
|
+
metrics = {}
|
|
862
|
+
if hasattr(self, "raw_event_metrics"):
|
|
863
|
+
metrics = self.raw_event_metrics({"component_id": self.id})
|
|
864
|
+
|
|
865
|
+
return StepStartedEvent(step_name=self.display_name, raw_event={"langflow": metrics})
|
|
866
|
+
|
|
867
|
+
def after_callback_event(self, result, *args, **kwargs) -> StepFinishedEvent: # noqa: ARG002
|
|
868
|
+
"""Should be a AGUI compatible event.
|
|
869
|
+
|
|
870
|
+
VERTEX class generates a StepFinishedEvent event.
|
|
871
|
+
"""
|
|
872
|
+
metrics = {}
|
|
873
|
+
if hasattr(self, "raw_event_metrics"):
|
|
874
|
+
metrics = self.raw_event_metrics({"component_id": self.id})
|
|
875
|
+
return StepFinishedEvent(step_name=self.display_name, raw_event={"langflow": metrics})
|
lfx/graph/vertex/vertex_types.py
CHANGED
|
@@ -98,7 +98,6 @@ class ComponentVertex(Vertex):
|
|
|
98
98
|
Returns:
|
|
99
99
|
The built result if use_result is True, else the built object.
|
|
100
100
|
"""
|
|
101
|
-
flow_id = self.graph.flow_id
|
|
102
101
|
if not self.built:
|
|
103
102
|
default_value: Any = UNDEFINED
|
|
104
103
|
for edge in self.get_edge_with_target(requester.id):
|
|
@@ -109,8 +108,6 @@ class ComponentVertex(Vertex):
|
|
|
109
108
|
else:
|
|
110
109
|
default_value = requester.get_value_from_template_dict(edge.target_param)
|
|
111
110
|
|
|
112
|
-
if flow_id:
|
|
113
|
-
await self._log_transaction_async(source=self, target=requester, flow_id=str(flow_id), status="error")
|
|
114
111
|
if default_value is not UNDEFINED:
|
|
115
112
|
return default_value
|
|
116
113
|
msg = f"Component {self.display_name} has not been built yet"
|
|
@@ -148,8 +145,6 @@ class ComponentVertex(Vertex):
|
|
|
148
145
|
raise ValueError(msg)
|
|
149
146
|
msg = f"Result not found for {edge.source_handle.name} in {edge}"
|
|
150
147
|
raise ValueError(msg)
|
|
151
|
-
if flow_id:
|
|
152
|
-
await self._log_transaction_async(source=self, target=requester, flow_id=str(flow_id), status="success")
|
|
153
148
|
return result
|
|
154
149
|
|
|
155
150
|
def extract_messages_from_artifacts(self, artifacts: dict[str, Any]) -> list[dict]:
|
lfx/inputs/input_mixin.py
CHANGED
lfx/inputs/inputs.py
CHANGED
|
@@ -120,6 +120,10 @@ class PromptInput(BaseInputMixin, ListableInputMixin, InputTraceMixin, ToolModeM
|
|
|
120
120
|
field_type: SerializableFieldTypes = FieldTypes.PROMPT
|
|
121
121
|
|
|
122
122
|
|
|
123
|
+
class MustachePromptInput(PromptInput):
|
|
124
|
+
field_type: SerializableFieldTypes = FieldTypes.MUSTACHE_PROMPT
|
|
125
|
+
|
|
126
|
+
|
|
123
127
|
class CodeInput(BaseInputMixin, ListableInputMixin, InputTraceMixin, ToolModeMixin):
|
|
124
128
|
field_type: SerializableFieldTypes = FieldTypes.CODE
|
|
125
129
|
|
|
@@ -807,6 +811,7 @@ InputTypes: TypeAlias = (
|
|
|
807
811
|
| NestedDictInput
|
|
808
812
|
| ToolsInput
|
|
809
813
|
| PromptInput
|
|
814
|
+
| MustachePromptInput
|
|
810
815
|
| CodeInput
|
|
811
816
|
| SecretStrInput
|
|
812
817
|
| StrInput
|
lfx/interface/components.py
CHANGED
|
@@ -14,11 +14,16 @@ import orjson
|
|
|
14
14
|
from lfx.constants import BASE_COMPONENTS_PATH
|
|
15
15
|
from lfx.custom.utils import abuild_custom_components, create_component_template
|
|
16
16
|
from lfx.log.logger import logger
|
|
17
|
+
from lfx.utils.validate_cloud import (
|
|
18
|
+
filter_disabled_components_from_dict,
|
|
19
|
+
is_component_disabled_in_astra_cloud,
|
|
20
|
+
)
|
|
17
21
|
|
|
18
22
|
if TYPE_CHECKING:
|
|
19
23
|
from lfx.services.settings.service import SettingsService
|
|
20
24
|
|
|
21
25
|
MIN_MODULE_PARTS = 2
|
|
26
|
+
MIN_MODULE_PARTS_WITH_FILENAME = 4 # Minimum parts needed to have a module filename (lfx.components.type.filename)
|
|
22
27
|
EXPECTED_RESULT_LENGTH = 2 # Expected length of the tuple returned by _process_single_module
|
|
23
28
|
|
|
24
29
|
|
|
@@ -284,6 +289,8 @@ async def _load_from_index_or_cache(
|
|
|
284
289
|
if top_level not in modules_dict:
|
|
285
290
|
modules_dict[top_level] = {}
|
|
286
291
|
modules_dict[top_level].update(components)
|
|
292
|
+
# Filter disabled components for Astra cloud
|
|
293
|
+
modules_dict = filter_disabled_components_from_dict(modules_dict)
|
|
287
294
|
await logger.adebug(f"Loaded {len(modules_dict)} component categories from index")
|
|
288
295
|
return modules_dict, "builtin"
|
|
289
296
|
|
|
@@ -303,6 +310,8 @@ async def _load_from_index_or_cache(
|
|
|
303
310
|
if top_level not in modules_dict:
|
|
304
311
|
modules_dict[top_level] = {}
|
|
305
312
|
modules_dict[top_level].update(components)
|
|
313
|
+
# Filter disabled components for Astra cloud
|
|
314
|
+
modules_dict = filter_disabled_components_from_dict(modules_dict)
|
|
306
315
|
await logger.adebug(f"Loaded {len(modules_dict)} component categories from cache")
|
|
307
316
|
return modules_dict, "cache"
|
|
308
317
|
|
|
@@ -335,11 +344,19 @@ async def _load_components_dynamically(
|
|
|
335
344
|
if "deactivated" in modname:
|
|
336
345
|
continue
|
|
337
346
|
|
|
338
|
-
#
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
347
|
+
# Parse module name once for all checks
|
|
348
|
+
parts = modname.split(".")
|
|
349
|
+
if len(parts) > MIN_MODULE_PARTS:
|
|
350
|
+
component_type = parts[2]
|
|
351
|
+
|
|
352
|
+
# Skip disabled components when ASTRA_CLOUD_DISABLE_COMPONENT is true
|
|
353
|
+
if len(parts) >= MIN_MODULE_PARTS_WITH_FILENAME:
|
|
354
|
+
module_filename = parts[3]
|
|
355
|
+
if is_component_disabled_in_astra_cloud(component_type.lower(), module_filename):
|
|
356
|
+
continue
|
|
357
|
+
|
|
358
|
+
# If specific modules requested, filter by top-level module name
|
|
359
|
+
if target_modules and component_type.lower() not in target_modules:
|
|
343
360
|
continue
|
|
344
361
|
|
|
345
362
|
module_names.append(modname)
|
|
@@ -549,7 +566,7 @@ def _process_single_module(modname: str) -> tuple[str, dict] | None:
|
|
|
549
566
|
return (top_level, module_components)
|
|
550
567
|
|
|
551
568
|
|
|
552
|
-
async def _determine_loading_strategy(settings_service: "SettingsService") -> dict:
|
|
569
|
+
async def _determine_loading_strategy(settings_service: "SettingsService") -> dict[str, Any]:
|
|
553
570
|
"""Determines and executes the appropriate component loading strategy.
|
|
554
571
|
|
|
555
572
|
Args:
|
|
@@ -577,7 +594,7 @@ async def _determine_loading_strategy(settings_service: "SettingsService") -> di
|
|
|
577
594
|
f"Built {component_count} custom components from {settings_service.settings.components_path}"
|
|
578
595
|
)
|
|
579
596
|
|
|
580
|
-
return component_cache.all_types_dict
|
|
597
|
+
return component_cache.all_types_dict or {}
|
|
581
598
|
|
|
582
599
|
|
|
583
600
|
async def get_and_cache_all_types_dict(
|
lfx/run/base.py
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
import json
|
|
4
4
|
import re
|
|
5
5
|
import sys
|
|
6
|
-
import tempfile
|
|
7
6
|
import time
|
|
8
7
|
from io import StringIO
|
|
9
8
|
from pathlib import Path
|
|
@@ -125,21 +124,16 @@ async def run_flow(
|
|
|
125
124
|
output_error(error_msg, verbose=verbose)
|
|
126
125
|
raise RunError(error_msg, None)
|
|
127
126
|
|
|
128
|
-
|
|
127
|
+
# Store parsed JSON dict for direct loading (avoids temp file round-trip)
|
|
128
|
+
flow_dict: dict | None = None
|
|
129
129
|
|
|
130
130
|
if flow_json is not None:
|
|
131
131
|
if verbosity > 0:
|
|
132
132
|
sys.stderr.write("Processing inline JSON content...\n")
|
|
133
133
|
try:
|
|
134
|
-
|
|
134
|
+
flow_dict = json.loads(flow_json)
|
|
135
135
|
if verbosity > 0:
|
|
136
136
|
sys.stderr.write("JSON content is valid\n")
|
|
137
|
-
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
138
|
-
json.dump(json_data, temp_file, indent=2)
|
|
139
|
-
temp_file_to_cleanup = temp_file.name
|
|
140
|
-
script_path = Path(temp_file_to_cleanup)
|
|
141
|
-
if verbosity > 0:
|
|
142
|
-
sys.stderr.write(f"Created temporary file: {script_path}\n")
|
|
143
137
|
except json.JSONDecodeError as e:
|
|
144
138
|
error_msg = f"Invalid JSON content: {e}"
|
|
145
139
|
output_error(error_msg, verbose=verbose)
|
|
@@ -157,15 +151,9 @@ async def run_flow(
|
|
|
157
151
|
error_msg = "No content received from stdin"
|
|
158
152
|
output_error(error_msg, verbose=verbose)
|
|
159
153
|
raise RunError(error_msg, None)
|
|
160
|
-
|
|
154
|
+
flow_dict = json.loads(stdin_content)
|
|
161
155
|
if verbosity > 0:
|
|
162
156
|
sys.stderr.write("JSON content from stdin is valid\n")
|
|
163
|
-
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
164
|
-
json.dump(json_data, temp_file, indent=2)
|
|
165
|
-
temp_file_to_cleanup = temp_file.name
|
|
166
|
-
script_path = Path(temp_file_to_cleanup)
|
|
167
|
-
if verbosity > 0:
|
|
168
|
-
sys.stderr.write(f"Created temporary file from stdin: {script_path}\n")
|
|
169
157
|
except json.JSONDecodeError as e:
|
|
170
158
|
error_msg = f"Invalid JSON content from stdin: {e}"
|
|
171
159
|
output_error(error_msg, verbose=verbose)
|
|
@@ -176,39 +164,52 @@ async def run_flow(
|
|
|
176
164
|
raise RunError(error_msg, e) from e
|
|
177
165
|
|
|
178
166
|
try:
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
raise ValueError(error_msg)
|
|
182
|
-
if not script_path.is_file():
|
|
183
|
-
error_msg = f"'{script_path}' is not a file."
|
|
184
|
-
raise ValueError(error_msg)
|
|
185
|
-
file_extension = script_path.suffix.lower()
|
|
186
|
-
if file_extension not in [".py", ".json"]:
|
|
187
|
-
error_msg = f"'{script_path}' must be a .py or .json file."
|
|
188
|
-
raise ValueError(error_msg)
|
|
189
|
-
file_type = "Python script" if file_extension == ".py" else "JSON flow"
|
|
190
|
-
if verbosity > 0:
|
|
191
|
-
sys.stderr.write(f"Analyzing {file_type}: {script_path}\n")
|
|
192
|
-
if file_extension == ".py":
|
|
193
|
-
graph_info = find_graph_variable(script_path)
|
|
194
|
-
if not graph_info:
|
|
195
|
-
error_msg = (
|
|
196
|
-
"No 'graph' variable found in the script. Expected to find an assignment like: graph = Graph(...)"
|
|
197
|
-
)
|
|
198
|
-
raise ValueError(error_msg)
|
|
199
|
-
if verbosity > 0:
|
|
200
|
-
sys.stderr.write(f"Found 'graph' variable at line {graph_info['line_number']}\n")
|
|
201
|
-
sys.stderr.write(f"Type: {graph_info['type']}\n")
|
|
202
|
-
sys.stderr.write(f"Source: {graph_info['source_line']}\n")
|
|
203
|
-
sys.stderr.write("Loading and executing script...\n")
|
|
204
|
-
graph = await load_graph_from_script(script_path)
|
|
205
|
-
elif file_extension == ".json":
|
|
167
|
+
# Handle direct JSON dict (from stdin or --flow-json)
|
|
168
|
+
if flow_dict is not None:
|
|
206
169
|
if verbosity > 0:
|
|
207
|
-
sys.stderr.write("
|
|
208
|
-
sys.stderr.write("Loading and executing JSON flow\n")
|
|
170
|
+
sys.stderr.write("Loading graph from JSON content...\n")
|
|
209
171
|
from lfx.load import aload_flow_from_json
|
|
210
172
|
|
|
211
|
-
graph = await aload_flow_from_json(
|
|
173
|
+
graph = await aload_flow_from_json(flow_dict, disable_logs=not verbose)
|
|
174
|
+
# Handle file path
|
|
175
|
+
elif script_path is not None:
|
|
176
|
+
if not script_path.exists():
|
|
177
|
+
error_msg = f"File '{script_path}' does not exist."
|
|
178
|
+
raise ValueError(error_msg)
|
|
179
|
+
if not script_path.is_file():
|
|
180
|
+
error_msg = f"'{script_path}' is not a file."
|
|
181
|
+
raise ValueError(error_msg)
|
|
182
|
+
file_extension = script_path.suffix.lower()
|
|
183
|
+
if file_extension not in [".py", ".json"]:
|
|
184
|
+
error_msg = f"'{script_path}' must be a .py or .json file."
|
|
185
|
+
raise ValueError(error_msg)
|
|
186
|
+
file_type = "Python script" if file_extension == ".py" else "JSON flow"
|
|
187
|
+
if verbosity > 0:
|
|
188
|
+
sys.stderr.write(f"Analyzing {file_type}: {script_path}\n")
|
|
189
|
+
if file_extension == ".py":
|
|
190
|
+
graph_info = find_graph_variable(script_path)
|
|
191
|
+
if not graph_info:
|
|
192
|
+
error_msg = (
|
|
193
|
+
"No 'graph' variable found in the script. "
|
|
194
|
+
"Expected to find an assignment like: graph = Graph(...)"
|
|
195
|
+
)
|
|
196
|
+
raise ValueError(error_msg)
|
|
197
|
+
if verbosity > 0:
|
|
198
|
+
sys.stderr.write(f"Found 'graph' variable at line {graph_info['line_number']}\n")
|
|
199
|
+
sys.stderr.write(f"Type: {graph_info['type']}\n")
|
|
200
|
+
sys.stderr.write(f"Source: {graph_info['source_line']}\n")
|
|
201
|
+
sys.stderr.write("Loading and executing script...\n")
|
|
202
|
+
graph = await load_graph_from_script(script_path)
|
|
203
|
+
else: # .json file
|
|
204
|
+
if verbosity > 0:
|
|
205
|
+
sys.stderr.write("Valid JSON flow file detected\n")
|
|
206
|
+
sys.stderr.write("Loading and executing JSON flow\n")
|
|
207
|
+
from lfx.load import aload_flow_from_json
|
|
208
|
+
|
|
209
|
+
graph = await aload_flow_from_json(script_path, disable_logs=not verbose)
|
|
210
|
+
else:
|
|
211
|
+
error_msg = "No input source provided"
|
|
212
|
+
raise ValueError(error_msg)
|
|
212
213
|
|
|
213
214
|
# Inject global variables into graph context
|
|
214
215
|
if global_variables:
|
|
@@ -244,12 +245,6 @@ async def run_flow(
|
|
|
244
245
|
|
|
245
246
|
error_msg = f"Failed to load graph. {e}"
|
|
246
247
|
output_error(error_msg, verbose=verbose, exception=e)
|
|
247
|
-
if temp_file_to_cleanup:
|
|
248
|
-
try:
|
|
249
|
-
Path(temp_file_to_cleanup).unlink()
|
|
250
|
-
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
251
|
-
except OSError:
|
|
252
|
-
pass
|
|
253
248
|
raise RunError(error_msg, e) from e
|
|
254
249
|
|
|
255
250
|
inputs = InputValueRequest(input_value=final_input_value) if final_input_value else None
|
|
@@ -285,13 +280,6 @@ async def run_flow(
|
|
|
285
280
|
for error in validation_errors:
|
|
286
281
|
logger.debug(f"Validation error: {error}")
|
|
287
282
|
output_error(error_details, verbose=verbose)
|
|
288
|
-
if temp_file_to_cleanup:
|
|
289
|
-
try:
|
|
290
|
-
Path(temp_file_to_cleanup).unlink()
|
|
291
|
-
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
292
|
-
except OSError:
|
|
293
|
-
pass
|
|
294
|
-
if validation_errors:
|
|
295
283
|
raise RunError(error_details, None)
|
|
296
284
|
logger.info("Global variable validation passed")
|
|
297
285
|
else:
|
|
@@ -308,12 +296,6 @@ async def run_flow(
|
|
|
308
296
|
|
|
309
297
|
error_msg = f"Failed to prepare graph: {e}"
|
|
310
298
|
output_error(error_msg, verbose=verbose, exception=e)
|
|
311
|
-
if temp_file_to_cleanup:
|
|
312
|
-
try:
|
|
313
|
-
Path(temp_file_to_cleanup).unlink()
|
|
314
|
-
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
315
|
-
except OSError:
|
|
316
|
-
pass
|
|
317
299
|
raise RunError(error_msg, e) from e
|
|
318
300
|
|
|
319
301
|
logger.info("Executing graph...")
|
|
@@ -426,12 +408,6 @@ async def run_flow(
|
|
|
426
408
|
|
|
427
409
|
logger.exception("Failed to execute graph - full traceback:")
|
|
428
410
|
|
|
429
|
-
if temp_file_to_cleanup:
|
|
430
|
-
try:
|
|
431
|
-
Path(temp_file_to_cleanup).unlink()
|
|
432
|
-
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
433
|
-
except OSError:
|
|
434
|
-
pass
|
|
435
411
|
sys.stdout = original_stdout
|
|
436
412
|
sys.stderr = original_stderr
|
|
437
413
|
error_msg = f"Failed to execute graph: {e}"
|
|
@@ -440,12 +416,6 @@ async def run_flow(
|
|
|
440
416
|
finally:
|
|
441
417
|
sys.stdout = original_stdout
|
|
442
418
|
sys.stderr = original_stderr
|
|
443
|
-
if temp_file_to_cleanup:
|
|
444
|
-
try:
|
|
445
|
-
Path(temp_file_to_cleanup).unlink()
|
|
446
|
-
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
447
|
-
except OSError:
|
|
448
|
-
pass
|
|
449
419
|
|
|
450
420
|
execution_end_time = time.time() if timing else None
|
|
451
421
|
|
lfx/schema/__init__.py
CHANGED
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Schema modules for lfx package."""
|
|
2
2
|
|
|
3
3
|
__all__ = [
|
|
4
|
+
"ComponentOutput",
|
|
4
5
|
"Data",
|
|
5
6
|
"DataFrame",
|
|
7
|
+
"ErrorDetail",
|
|
6
8
|
"InputValue",
|
|
9
|
+
"JobStatus",
|
|
7
10
|
"Message",
|
|
8
11
|
"OpenAIErrorResponse",
|
|
9
12
|
"OpenAIResponsesRequest",
|
|
@@ -11,6 +14,13 @@ __all__ = [
|
|
|
11
14
|
"OpenAIResponsesStreamChunk",
|
|
12
15
|
"Tweaks",
|
|
13
16
|
"UUIDstr",
|
|
17
|
+
"WorkflowExecutionRequest",
|
|
18
|
+
"WorkflowExecutionResponse",
|
|
19
|
+
"WorkflowJobResponse",
|
|
20
|
+
"WorkflowStatusResponse",
|
|
21
|
+
"WorkflowStopRequest",
|
|
22
|
+
"WorkflowStopResponse",
|
|
23
|
+
"WorkflowStreamEvent",
|
|
14
24
|
"dotdict",
|
|
15
25
|
]
|
|
16
26
|
|
|
@@ -61,6 +71,46 @@ def __getattr__(name: str):
|
|
|
61
71
|
from .openai_responses_schemas import OpenAIErrorResponse
|
|
62
72
|
|
|
63
73
|
return OpenAIErrorResponse
|
|
74
|
+
if name == "WorkflowExecutionRequest":
|
|
75
|
+
from .workflow import WorkflowExecutionRequest
|
|
76
|
+
|
|
77
|
+
return WorkflowExecutionRequest
|
|
78
|
+
if name == "WorkflowExecutionResponse":
|
|
79
|
+
from .workflow import WorkflowExecutionResponse
|
|
80
|
+
|
|
81
|
+
return WorkflowExecutionResponse
|
|
82
|
+
if name == "WorkflowJobResponse":
|
|
83
|
+
from .workflow import WorkflowJobResponse
|
|
84
|
+
|
|
85
|
+
return WorkflowJobResponse
|
|
86
|
+
if name == "WorkflowStreamEvent":
|
|
87
|
+
from .workflow import WorkflowStreamEvent
|
|
88
|
+
|
|
89
|
+
return WorkflowStreamEvent
|
|
90
|
+
if name == "WorkflowStatusResponse":
|
|
91
|
+
from .workflow import WorkflowStatusResponse
|
|
92
|
+
|
|
93
|
+
return WorkflowStatusResponse
|
|
94
|
+
if name == "WorkflowStopRequest":
|
|
95
|
+
from .workflow import WorkflowStopRequest
|
|
96
|
+
|
|
97
|
+
return WorkflowStopRequest
|
|
98
|
+
if name == "WorkflowStopResponse":
|
|
99
|
+
from .workflow import WorkflowStopResponse
|
|
100
|
+
|
|
101
|
+
return WorkflowStopResponse
|
|
102
|
+
if name == "JobStatus":
|
|
103
|
+
from .workflow import JobStatus
|
|
104
|
+
|
|
105
|
+
return JobStatus
|
|
106
|
+
if name == "ErrorDetail":
|
|
107
|
+
from .workflow import ErrorDetail
|
|
108
|
+
|
|
109
|
+
return ErrorDetail
|
|
110
|
+
if name == "ComponentOutput":
|
|
111
|
+
from .workflow import ComponentOutput
|
|
112
|
+
|
|
113
|
+
return ComponentOutput
|
|
64
114
|
|
|
65
115
|
msg = f"module '{__name__}' has no attribute '{name}'"
|
|
66
116
|
raise AttributeError(msg)
|