lfx-nightly 0.1.12.dev34__py3-none-any.whl → 0.1.12.dev36__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/base/composio/composio_base.py +65 -26
- lfx/base/data/base_file.py +3 -2
- lfx/base/tools/flow_tool.py +1 -1
- lfx/base/tools/run_flow.py +1 -1
- lfx/cli/commands.py +17 -12
- lfx/cli/run.py +156 -95
- lfx/components/__init__.py +3 -0
- lfx/components/deactivated/sub_flow.py +1 -1
- lfx/components/logic/flow_tool.py +1 -1
- lfx/components/logic/run_flow.py +1 -1
- lfx/components/logic/sub_flow.py +1 -1
- lfx/components/vectorstores/local_db.py +0 -1
- lfx/components/vlmrun/__init__.py +34 -0
- lfx/components/vlmrun/vlmrun_transcription.py +224 -0
- lfx/custom/custom_component/custom_component.py +1 -1
- lfx/graph/vertex/param_handler.py +2 -2
- lfx/helpers/__init__.py +129 -1
- lfx/helpers/flow.py +0 -3
- lfx/inputs/input_mixin.py +2 -1
- lfx/inputs/inputs.py +5 -14
- lfx/log/logger.py +5 -1
- lfx/memory/__init__.py +10 -30
- lfx/schema/cross_module.py +80 -0
- lfx/schema/data.py +2 -1
- lfx/services/mcp_composer/service.py +3 -2
- lfx/services/settings/base.py +31 -0
- lfx/utils/langflow_utils.py +52 -0
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev36.dist-info}/METADATA +1 -1
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev36.dist-info}/RECORD +31 -27
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev36.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev36.dist-info}/entry_points.txt +0 -0
lfx/cli/run.py
CHANGED
|
@@ -18,18 +18,29 @@ from lfx.cli.validation import validate_global_variables_for_env
|
|
|
18
18
|
from lfx.log.logger import logger
|
|
19
19
|
from lfx.schema.schema import InputValueRequest
|
|
20
20
|
|
|
21
|
+
# Verbosity level constants
|
|
22
|
+
VERBOSITY_DETAILED = 2
|
|
23
|
+
VERBOSITY_FULL = 3
|
|
21
24
|
|
|
22
|
-
|
|
25
|
+
|
|
26
|
+
def output_error(error_message: str, *, verbose: bool, exception: Exception | None = None) -> None:
|
|
23
27
|
"""Output error in JSON format to stdout when not verbose, or to stderr when verbose."""
|
|
24
28
|
if verbose:
|
|
25
29
|
typer.echo(f"{error_message}", file=sys.stderr)
|
|
30
|
+
|
|
31
|
+
error_response = {
|
|
32
|
+
"success": False,
|
|
33
|
+
"type": "error",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# Add clean exception data if available
|
|
37
|
+
if exception:
|
|
38
|
+
error_response["exception_type"] = type(exception).__name__
|
|
39
|
+
error_response["exception_message"] = str(exception)
|
|
26
40
|
else:
|
|
27
|
-
error_response =
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
"type": "error",
|
|
31
|
-
}
|
|
32
|
-
typer.echo(json.dumps(error_response))
|
|
41
|
+
error_response["exception_message"] = error_message
|
|
42
|
+
|
|
43
|
+
typer.echo(json.dumps(error_response))
|
|
33
44
|
|
|
34
45
|
|
|
35
46
|
@partial(syncify, raise_sync_error=False)
|
|
@@ -67,9 +78,20 @@ async def run(
|
|
|
67
78
|
help="Check global variables for environment compatibility",
|
|
68
79
|
),
|
|
69
80
|
verbose: bool = typer.Option(
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
81
|
+
False, # noqa: FBT003
|
|
82
|
+
"-v",
|
|
83
|
+
"--verbose",
|
|
84
|
+
help="Show basic progress information",
|
|
85
|
+
),
|
|
86
|
+
verbose_detailed: bool = typer.Option(
|
|
87
|
+
False, # noqa: FBT003
|
|
88
|
+
"-vv",
|
|
89
|
+
help="Show detailed progress and debug information",
|
|
90
|
+
),
|
|
91
|
+
verbose_full: bool = typer.Option(
|
|
92
|
+
False, # noqa: FBT003
|
|
93
|
+
"-vvv",
|
|
94
|
+
help="Show full debugging output including component logs",
|
|
73
95
|
),
|
|
74
96
|
timing: bool = typer.Option(
|
|
75
97
|
default=False,
|
|
@@ -88,6 +110,8 @@ async def run(
|
|
|
88
110
|
input_value: Input value to pass to the graph (positional argument)
|
|
89
111
|
input_value_option: Input value to pass to the graph (alternative option)
|
|
90
112
|
verbose: Show diagnostic output and execution details
|
|
113
|
+
verbose_detailed: Show detailed progress and debug information (-vv)
|
|
114
|
+
verbose_full: Show full debugging output including component logs (-vvv)
|
|
91
115
|
output_format: Format for output (json, text, message, or result)
|
|
92
116
|
flow_json: Inline JSON flow content as a string
|
|
93
117
|
stdin: Read JSON flow content from stdin
|
|
@@ -96,15 +120,22 @@ async def run(
|
|
|
96
120
|
"""
|
|
97
121
|
# Start timing if requested
|
|
98
122
|
import time
|
|
99
|
-
from datetime import datetime
|
|
100
|
-
|
|
101
|
-
def verbose_print(message: str, level: str = "INFO") -> None:
|
|
102
|
-
if verbose:
|
|
103
|
-
timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3] # Include milliseconds # noqa: DTZ005
|
|
104
|
-
typer.echo(f"[{timestamp}] {level}: {message}", file=sys.stderr)
|
|
105
123
|
|
|
106
|
-
|
|
107
|
-
|
|
124
|
+
# Configure logger based on verbosity level
|
|
125
|
+
from lfx.log.logger import configure
|
|
126
|
+
|
|
127
|
+
if verbose_full:
|
|
128
|
+
configure(log_level="DEBUG", output_file=sys.stderr) # Show everything including component debug logs
|
|
129
|
+
verbosity = 3
|
|
130
|
+
elif verbose_detailed:
|
|
131
|
+
configure(log_level="DEBUG", output_file=sys.stderr) # Show debug and above
|
|
132
|
+
verbosity = 2
|
|
133
|
+
elif verbose:
|
|
134
|
+
configure(log_level="INFO", output_file=sys.stderr) # Show info and above including our CLI info messages
|
|
135
|
+
verbosity = 1
|
|
136
|
+
else:
|
|
137
|
+
configure(log_level="CRITICAL", output_file=sys.stderr) # Only critical errors
|
|
138
|
+
verbosity = 0
|
|
108
139
|
|
|
109
140
|
start_time = time.time() if timing else None
|
|
110
141
|
|
|
@@ -127,15 +158,18 @@ async def run(
|
|
|
127
158
|
temp_file_to_cleanup = None
|
|
128
159
|
|
|
129
160
|
if flow_json is not None:
|
|
130
|
-
|
|
161
|
+
if verbosity > 0:
|
|
162
|
+
typer.echo("Processing inline JSON content...", file=sys.stderr)
|
|
131
163
|
try:
|
|
132
164
|
json_data = json.loads(flow_json)
|
|
133
|
-
|
|
165
|
+
if verbosity > 0:
|
|
166
|
+
typer.echo("JSON content is valid", file=sys.stderr)
|
|
134
167
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
135
168
|
json.dump(json_data, temp_file, indent=2)
|
|
136
169
|
temp_file_to_cleanup = temp_file.name
|
|
137
170
|
script_path = Path(temp_file_to_cleanup)
|
|
138
|
-
|
|
171
|
+
if verbosity > 0:
|
|
172
|
+
typer.echo(f"Created temporary file: {script_path}", file=sys.stderr)
|
|
139
173
|
except json.JSONDecodeError as e:
|
|
140
174
|
output_error(f"Invalid JSON content: {e}", verbose=verbose)
|
|
141
175
|
raise typer.Exit(1) from e
|
|
@@ -143,19 +177,22 @@ async def run(
|
|
|
143
177
|
output_error(f"Error processing JSON content: {e}", verbose=verbose)
|
|
144
178
|
raise typer.Exit(1) from e
|
|
145
179
|
elif stdin:
|
|
146
|
-
|
|
180
|
+
if verbosity > 0:
|
|
181
|
+
typer.echo("Reading JSON content from stdin...", file=sys.stderr)
|
|
147
182
|
try:
|
|
148
183
|
stdin_content = sys.stdin.read().strip()
|
|
149
184
|
if not stdin_content:
|
|
150
185
|
output_error("No content received from stdin", verbose=verbose)
|
|
151
186
|
raise typer.Exit(1)
|
|
152
187
|
json_data = json.loads(stdin_content)
|
|
153
|
-
|
|
188
|
+
if verbosity > 0:
|
|
189
|
+
typer.echo("JSON content from stdin is valid", file=sys.stderr)
|
|
154
190
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
155
191
|
json.dump(json_data, temp_file, indent=2)
|
|
156
192
|
temp_file_to_cleanup = temp_file.name
|
|
157
193
|
script_path = Path(temp_file_to_cleanup)
|
|
158
|
-
|
|
194
|
+
if verbosity > 0:
|
|
195
|
+
typer.echo(f"Created temporary file from stdin: {script_path}", file=sys.stderr)
|
|
159
196
|
except json.JSONDecodeError as e:
|
|
160
197
|
output_error(f"Invalid JSON content from stdin: {e}", verbose=verbose)
|
|
161
198
|
raise typer.Exit(1) from e
|
|
@@ -175,7 +212,8 @@ async def run(
|
|
|
175
212
|
error_msg = f"'{script_path}' must be a .py or .json file."
|
|
176
213
|
raise ValueError(error_msg)
|
|
177
214
|
file_type = "Python script" if file_extension == ".py" else "JSON flow"
|
|
178
|
-
|
|
215
|
+
if verbosity > 0:
|
|
216
|
+
typer.echo(f"Analyzing {file_type}: {script_path}", file=sys.stderr)
|
|
179
217
|
if file_extension == ".py":
|
|
180
218
|
graph_info = find_graph_variable(script_path)
|
|
181
219
|
if not graph_info:
|
|
@@ -183,47 +221,44 @@ async def run(
|
|
|
183
221
|
"No 'graph' variable found in the script. Expected to find an assignment like: graph = Graph(...)"
|
|
184
222
|
)
|
|
185
223
|
raise ValueError(error_msg)
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
224
|
+
if verbosity > 0:
|
|
225
|
+
typer.echo(f"Found 'graph' variable at line {graph_info['line_number']}", file=sys.stderr)
|
|
226
|
+
typer.echo(f"Type: {graph_info['type']}", file=sys.stderr)
|
|
227
|
+
typer.echo(f"Source: {graph_info['source_line']}", file=sys.stderr)
|
|
228
|
+
typer.echo("Loading and executing script...", file=sys.stderr)
|
|
190
229
|
graph = load_graph_from_script(script_path)
|
|
191
230
|
elif file_extension == ".json":
|
|
192
|
-
|
|
193
|
-
|
|
231
|
+
if verbosity > 0:
|
|
232
|
+
typer.echo("Valid JSON flow file detected", file=sys.stderr)
|
|
233
|
+
typer.echo("Loading and executing JSON flow", file=sys.stderr)
|
|
194
234
|
from lfx.load import aload_flow_from_json
|
|
195
235
|
|
|
196
236
|
graph = await aload_flow_from_json(script_path, disable_logs=not verbose)
|
|
197
237
|
except Exception as e:
|
|
198
238
|
error_type = type(e).__name__
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
if verbose:
|
|
202
|
-
# Enhanced error context for better debugging
|
|
203
|
-
debug_print(f"Exception type: {error_type}")
|
|
204
|
-
debug_print(f"Exception message: {e!s}")
|
|
239
|
+
logger.error(f"Graph loading failed with {error_type}")
|
|
205
240
|
|
|
241
|
+
if verbosity > 0:
|
|
206
242
|
# Try to identify common error patterns
|
|
207
243
|
if "ModuleNotFoundError" in str(e) or "No module named" in str(e):
|
|
208
|
-
|
|
244
|
+
logger.info("This appears to be a missing dependency issue")
|
|
209
245
|
if "langchain" in str(e).lower():
|
|
210
|
-
|
|
246
|
+
logger.info(
|
|
211
247
|
"Missing LangChain dependency detected. Try: pip install langchain-<provider>",
|
|
212
|
-
level="WARN",
|
|
213
248
|
)
|
|
214
249
|
elif "ImportError" in str(e):
|
|
215
|
-
|
|
250
|
+
logger.info("This appears to be an import issue - check component dependencies")
|
|
216
251
|
elif "AttributeError" in str(e):
|
|
217
|
-
|
|
252
|
+
logger.info("This appears to be a component configuration issue")
|
|
218
253
|
|
|
219
254
|
# Show full traceback in debug mode
|
|
220
|
-
logger.exception("Failed to load graph
|
|
255
|
+
logger.exception("Failed to load graph.")
|
|
221
256
|
|
|
222
|
-
output_error(f"Failed to load graph
|
|
257
|
+
output_error(f"Failed to load graph. {e}", verbose=verbose, exception=e)
|
|
223
258
|
if temp_file_to_cleanup:
|
|
224
259
|
try:
|
|
225
260
|
Path(temp_file_to_cleanup).unlink()
|
|
226
|
-
|
|
261
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
227
262
|
except OSError:
|
|
228
263
|
pass
|
|
229
264
|
raise typer.Exit(1) from e
|
|
@@ -233,70 +268,70 @@ async def run(
|
|
|
233
268
|
# Mark end of loading phase if timing
|
|
234
269
|
load_end_time = time.time() if timing else None
|
|
235
270
|
|
|
236
|
-
|
|
271
|
+
if verbosity > 0:
|
|
272
|
+
typer.echo("Preparing graph for execution...", file=sys.stderr)
|
|
237
273
|
try:
|
|
238
274
|
# Add detailed preparation steps
|
|
239
|
-
if
|
|
240
|
-
|
|
241
|
-
|
|
275
|
+
if verbosity > 0:
|
|
276
|
+
logger.debug(f"Graph contains {len(graph.vertices)} vertices")
|
|
277
|
+
logger.debug(f"Graph contains {len(graph.edges)} edges")
|
|
242
278
|
|
|
243
279
|
# Show component types being used
|
|
244
280
|
component_types = set()
|
|
245
281
|
for vertex in graph.vertices:
|
|
246
282
|
if hasattr(vertex, "display_name"):
|
|
247
283
|
component_types.add(vertex.display_name)
|
|
248
|
-
|
|
284
|
+
logger.debug(f"Component types in graph: {', '.join(sorted(component_types))}")
|
|
249
285
|
|
|
250
286
|
graph.prepare()
|
|
251
|
-
|
|
287
|
+
logger.info("Graph preparation completed")
|
|
252
288
|
|
|
253
289
|
# Validate global variables for environment compatibility
|
|
254
290
|
if check_variables:
|
|
255
|
-
|
|
291
|
+
logger.info("Validating global variables...")
|
|
256
292
|
validation_errors = validate_global_variables_for_env(graph)
|
|
257
293
|
if validation_errors:
|
|
258
294
|
error_details = "Global variable validation failed: " + "; ".join(validation_errors)
|
|
259
|
-
|
|
295
|
+
logger.info(f"Variable validation failed: {len(validation_errors)} errors")
|
|
260
296
|
for error in validation_errors:
|
|
261
|
-
|
|
297
|
+
logger.debug(f"Validation error: {error}")
|
|
262
298
|
output_error(error_details, verbose=verbose)
|
|
263
299
|
if temp_file_to_cleanup:
|
|
264
300
|
try:
|
|
265
301
|
Path(temp_file_to_cleanup).unlink()
|
|
266
|
-
|
|
302
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
267
303
|
except OSError:
|
|
268
304
|
pass
|
|
269
305
|
if validation_errors:
|
|
270
306
|
raise typer.Exit(1)
|
|
271
|
-
|
|
307
|
+
logger.info("Global variable validation passed")
|
|
272
308
|
else:
|
|
273
|
-
|
|
309
|
+
logger.info("Global variable validation skipped")
|
|
274
310
|
except Exception as e:
|
|
275
311
|
error_type = type(e).__name__
|
|
276
|
-
|
|
312
|
+
logger.info(f"Graph preparation failed with {error_type}")
|
|
277
313
|
|
|
278
|
-
if
|
|
279
|
-
|
|
314
|
+
if verbosity > 0:
|
|
315
|
+
logger.debug(f"Preparation error: {e!s}")
|
|
280
316
|
logger.exception("Failed to prepare graph - full traceback:")
|
|
281
317
|
|
|
282
|
-
output_error(f"Failed to prepare graph: {e}", verbose=verbose)
|
|
318
|
+
output_error(f"Failed to prepare graph: {e}", verbose=verbose, exception=e)
|
|
283
319
|
if temp_file_to_cleanup:
|
|
284
320
|
try:
|
|
285
321
|
Path(temp_file_to_cleanup).unlink()
|
|
286
|
-
|
|
322
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
287
323
|
except OSError:
|
|
288
324
|
pass
|
|
289
325
|
raise typer.Exit(1) from e
|
|
290
326
|
|
|
291
|
-
|
|
327
|
+
logger.info("Executing graph...")
|
|
292
328
|
execution_start_time = time.time() if timing else None
|
|
293
|
-
|
|
294
329
|
if verbose:
|
|
295
|
-
|
|
330
|
+
logger.debug("Setting up execution environment")
|
|
296
331
|
if inputs:
|
|
297
|
-
|
|
332
|
+
logger.debug(f"Input provided: {inputs.input_value}")
|
|
298
333
|
else:
|
|
299
|
-
|
|
334
|
+
logger.debug("No input provided")
|
|
300
335
|
|
|
301
336
|
captured_stdout = StringIO()
|
|
302
337
|
captured_stderr = StringIO()
|
|
@@ -309,18 +344,20 @@ async def run(
|
|
|
309
344
|
|
|
310
345
|
try:
|
|
311
346
|
sys.stdout = captured_stdout
|
|
312
|
-
|
|
347
|
+
# Don't capture stderr at high verbosity levels to avoid duplication with direct logging
|
|
348
|
+
if verbosity < VERBOSITY_FULL:
|
|
349
|
+
sys.stderr = captured_stderr
|
|
313
350
|
results = []
|
|
314
351
|
|
|
315
|
-
|
|
352
|
+
logger.info("Starting graph execution...", level="DEBUG")
|
|
316
353
|
result_count = 0
|
|
317
354
|
|
|
318
355
|
async for result in graph.async_start(inputs):
|
|
319
356
|
result_count += 1
|
|
320
|
-
if
|
|
321
|
-
|
|
357
|
+
if verbosity > 0:
|
|
358
|
+
logger.debug(f"Processing result #{result_count}")
|
|
322
359
|
if hasattr(result, "vertex") and hasattr(result.vertex, "display_name"):
|
|
323
|
-
|
|
360
|
+
logger.debug(f"Component: {result.vertex.display_name}")
|
|
324
361
|
if timing:
|
|
325
362
|
step_end_time = time.time()
|
|
326
363
|
step_duration = step_end_time - execution_step_start
|
|
@@ -342,46 +379,70 @@ async def run(
|
|
|
342
379
|
|
|
343
380
|
results.append(result)
|
|
344
381
|
|
|
345
|
-
|
|
382
|
+
logger.info(f"Graph execution completed. Processed {result_count} results")
|
|
346
383
|
|
|
347
384
|
except Exception as e:
|
|
348
|
-
sys.stdout = original_stdout
|
|
349
|
-
sys.stderr = original_stderr
|
|
350
|
-
|
|
351
385
|
error_type = type(e).__name__
|
|
352
|
-
|
|
386
|
+
logger.info(f"Graph execution failed with {error_type}")
|
|
353
387
|
|
|
354
|
-
if
|
|
355
|
-
|
|
356
|
-
debug_print(f"Failed after processing {result_count} results")
|
|
388
|
+
if verbosity >= VERBOSITY_DETAILED: # Only show details at -vv and above
|
|
389
|
+
logger.debug(f"Failed after processing {result_count} results")
|
|
357
390
|
|
|
391
|
+
# Only show component output at maximum verbosity (-vvv)
|
|
392
|
+
if verbosity >= VERBOSITY_FULL:
|
|
358
393
|
# Capture any output that was generated before the error
|
|
359
|
-
|
|
394
|
+
# Only show captured stdout since stderr logging is already shown directly in verbose mode
|
|
395
|
+
captured_content = captured_stdout.getvalue()
|
|
360
396
|
if captured_content.strip():
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
397
|
+
# Check if captured content contains the same error that will be displayed at the end
|
|
398
|
+
error_text = str(e)
|
|
399
|
+
captured_lines = captured_content.strip().split("\n")
|
|
400
|
+
|
|
401
|
+
# Filter out lines that are duplicates of the final error message
|
|
402
|
+
unique_lines = [
|
|
403
|
+
line
|
|
404
|
+
for line in captured_lines
|
|
405
|
+
if not any(
|
|
406
|
+
error_part.strip() in line for error_part in error_text.split("\n") if error_part.strip()
|
|
407
|
+
)
|
|
408
|
+
]
|
|
409
|
+
|
|
410
|
+
if unique_lines:
|
|
411
|
+
logger.info("Component output before error:", level="DEBUG")
|
|
412
|
+
for line in unique_lines:
|
|
413
|
+
# Log each line directly using the logger to avoid nested formatting
|
|
414
|
+
if verbosity > 0:
|
|
415
|
+
# Remove any existing timestamp prefix to avoid duplication
|
|
416
|
+
clean_line = line
|
|
417
|
+
if "] " in line and line.startswith("2025-"):
|
|
418
|
+
# Extract just the log message after the timestamp and level
|
|
419
|
+
parts = line.split("] ", 1)
|
|
420
|
+
if len(parts) > 1:
|
|
421
|
+
clean_line = parts[1]
|
|
422
|
+
logger.debug(clean_line)
|
|
364
423
|
|
|
365
424
|
# Provide context about common execution errors
|
|
366
425
|
if "list can't be used in 'await' expression" in str(e):
|
|
367
|
-
|
|
368
|
-
|
|
426
|
+
logger.info("This appears to be an async/await mismatch in a component")
|
|
427
|
+
logger.info("Check that async methods are properly awaited")
|
|
369
428
|
elif "AttributeError" in error_type and "NoneType" in str(e):
|
|
370
|
-
|
|
371
|
-
|
|
429
|
+
logger.info("This appears to be a null reference error")
|
|
430
|
+
logger.info("A component may be receiving unexpected None values")
|
|
372
431
|
elif "ConnectionError" in str(e) or "TimeoutError" in str(e):
|
|
373
|
-
|
|
374
|
-
|
|
432
|
+
logger.info("This appears to be a network connectivity issue")
|
|
433
|
+
logger.info("Check API keys and network connectivity")
|
|
375
434
|
|
|
376
435
|
logger.exception("Failed to execute graph - full traceback:")
|
|
377
436
|
|
|
378
|
-
output_error(f"Failed to execute graph: {e}", verbose=verbose)
|
|
379
437
|
if temp_file_to_cleanup:
|
|
380
438
|
try:
|
|
381
439
|
Path(temp_file_to_cleanup).unlink()
|
|
382
|
-
|
|
440
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
383
441
|
except OSError:
|
|
384
442
|
pass
|
|
443
|
+
sys.stdout = original_stdout
|
|
444
|
+
sys.stderr = original_stderr
|
|
445
|
+
output_error(f"Failed to execute graph: {e}", verbose=verbosity > 0, exception=e)
|
|
385
446
|
raise typer.Exit(1) from e
|
|
386
447
|
finally:
|
|
387
448
|
sys.stdout = original_stdout
|
|
@@ -389,7 +450,7 @@ async def run(
|
|
|
389
450
|
if temp_file_to_cleanup:
|
|
390
451
|
try:
|
|
391
452
|
Path(temp_file_to_cleanup).unlink()
|
|
392
|
-
|
|
453
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
393
454
|
except OSError:
|
|
394
455
|
pass
|
|
395
456
|
|
|
@@ -424,7 +485,7 @@ async def run(
|
|
|
424
485
|
result_data["logs"] = captured_logs
|
|
425
486
|
if timing_metadata:
|
|
426
487
|
result_data["timing"] = timing_metadata
|
|
427
|
-
indent = 2 if
|
|
488
|
+
indent = 2 if verbosity > 0 else None
|
|
428
489
|
typer.echo(json.dumps(result_data, indent=indent))
|
|
429
490
|
elif output_format in {"text", "message"}:
|
|
430
491
|
result_data = extract_structured_result(results)
|
|
@@ -437,5 +498,5 @@ async def run(
|
|
|
437
498
|
result_data["logs"] = captured_logs
|
|
438
499
|
if timing_metadata:
|
|
439
500
|
result_data["timing"] = timing_metadata
|
|
440
|
-
indent = 2 if
|
|
501
|
+
indent = 2 if verbosity > 0 else None
|
|
441
502
|
typer.echo(json.dumps(result_data, indent=indent))
|
lfx/components/__init__.py
CHANGED
|
@@ -96,6 +96,7 @@ if TYPE_CHECKING:
|
|
|
96
96
|
vectara,
|
|
97
97
|
vectorstores,
|
|
98
98
|
vertexai,
|
|
99
|
+
vlmrun,
|
|
99
100
|
weaviate,
|
|
100
101
|
wikipedia,
|
|
101
102
|
wolframalpha,
|
|
@@ -198,6 +199,7 @@ _dynamic_imports = {
|
|
|
198
199
|
"vectara": "__module__",
|
|
199
200
|
"vectorstores": "__module__",
|
|
200
201
|
"vertexai": "__module__",
|
|
202
|
+
"vlmrun": "__module__",
|
|
201
203
|
"weaviate": "__module__",
|
|
202
204
|
"wikipedia": "__module__",
|
|
203
205
|
"wolframalpha": "__module__",
|
|
@@ -326,6 +328,7 @@ __all__ = [
|
|
|
326
328
|
"vectara",
|
|
327
329
|
"vectorstores",
|
|
328
330
|
"vertexai",
|
|
331
|
+
"vlmrun",
|
|
329
332
|
"weaviate",
|
|
330
333
|
"wikipedia",
|
|
331
334
|
"wolframalpha",
|
|
@@ -4,7 +4,7 @@ from lfx.base.flow_processing.utils import build_data_from_result_data
|
|
|
4
4
|
from lfx.custom.custom_component.custom_component import CustomComponent
|
|
5
5
|
from lfx.graph.graph.base import Graph
|
|
6
6
|
from lfx.graph.vertex.base import Vertex
|
|
7
|
-
from lfx.helpers
|
|
7
|
+
from lfx.helpers import get_flow_inputs
|
|
8
8
|
from lfx.log.logger import logger
|
|
9
9
|
from lfx.schema.data import Data
|
|
10
10
|
from lfx.schema.dotdict import dotdict
|
|
@@ -6,7 +6,7 @@ from lfx.base.langchain_utilities.model import LCToolComponent
|
|
|
6
6
|
from lfx.base.tools.flow_tool import FlowTool
|
|
7
7
|
from lfx.field_typing import Tool
|
|
8
8
|
from lfx.graph.graph.base import Graph
|
|
9
|
-
from lfx.helpers
|
|
9
|
+
from lfx.helpers import get_flow_inputs
|
|
10
10
|
from lfx.io import BoolInput, DropdownInput, Output, StrInput
|
|
11
11
|
from lfx.log.logger import logger
|
|
12
12
|
from lfx.schema.data import Data
|
lfx/components/logic/run_flow.py
CHANGED
lfx/components/logic/sub_flow.py
CHANGED
|
@@ -4,7 +4,7 @@ from lfx.base.flow_processing.utils import build_data_from_result_data
|
|
|
4
4
|
from lfx.custom.custom_component.component import Component
|
|
5
5
|
from lfx.graph.graph.base import Graph
|
|
6
6
|
from lfx.graph.vertex.base import Vertex
|
|
7
|
-
from lfx.helpers
|
|
7
|
+
from lfx.helpers import get_flow_inputs
|
|
8
8
|
from lfx.io import DropdownInput, Output
|
|
9
9
|
from lfx.log.logger import logger
|
|
10
10
|
from lfx.schema.data import Data
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any
|
|
4
|
+
|
|
5
|
+
from langflow.components._importing import import_mod
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .vlmrun_transcription import VLMRunTranscription
|
|
9
|
+
|
|
10
|
+
_dynamic_imports = {
|
|
11
|
+
"VLMRunTranscription": "vlmrun_transcription",
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"VLMRunTranscription",
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def __getattr__(attr_name: str) -> Any:
|
|
20
|
+
"""Lazily import VLMRun components on attribute access."""
|
|
21
|
+
if attr_name not in _dynamic_imports:
|
|
22
|
+
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
|
23
|
+
raise AttributeError(msg)
|
|
24
|
+
try:
|
|
25
|
+
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
|
26
|
+
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
|
27
|
+
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
|
28
|
+
raise AttributeError(msg) from e
|
|
29
|
+
globals()[attr_name] = result
|
|
30
|
+
return result
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def __dir__() -> list[str]:
|
|
34
|
+
return list(__all__)
|